diff --git a/.circleci/config.yml b/.circleci/config.yml index 7c69a0432e..82492e724f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,258 +1,293 @@ --- -version: 2 -jobs: - python3_test: - # Run Python 3 tests - working_directory: /python3_test - docker: - - image: continuumio/miniconda3 +version: 2.1 + +orbs: + coverage-reporter: codacy/coverage-reporter@13.13.0 + +commands: + + check_changes: steps: - - checkout - run: + name: Check whether or not installation tests are needed command: | - apt update && apt -y install build-essential gfortran - # Create a file to checksum as cache key - date --rfc-3339 date > cache_key.txt - cat environment.yml >> cache_key.txt - - restore_cache: - key: deps3-{{ .Branch }}-{{ checksum "cache_key.txt" }} + if (test "$CIRCLE_BRANCH" = main || + git --no-pager diff --name-only origin/main... | + grep -q -E -f .circleci/install_triggers) + then + echo Running installation tests + else + echo Skipping installation tests + circleci step halt + fi + + log_versions: + steps: - run: - # Update/Create Conda environment and run tests + name: Log versions command: | . /opt/conda/etc/profile.d/conda.sh - conda update -y conda - conda env update + conda env export --name base > /logs/base_environment.yml conda activate esmvaltool - conda install -yS r-lintr - python setup.py test - - save_cache: - key: deps3-{{ .Branch }}-{{ checksum "cache_key.txt" }} - paths: - - "/opt/conda/envs/esmvaltool" - - ".eggs" - - store_test_results: - path: test-reports/ - - store_artifacts: - path: test-reports/ + esmvaltool version + dpkg -l > /logs/versions.txt + conda env export > /logs/environment.yml + pip freeze > /logs/requirements.txt - python3_install: - # Test Python 3 installation - working_directory: /python3_install - docker: - - image: continuumio/miniconda3 + test_and_report: steps: - - checkout - - restore_cache: - key: python3-install-{{ .Branch }} - run: + name: Run tests command: | . /opt/conda/etc/profile.d/conda.sh - set -x - mkdir /logs - # Install - apt update > /logs/apt.txt 2>&1 - apt -y install build-essential gfortran >> /logs/apt.txt 2>&1 - wget https://julialang-s3.julialang.org/bin/linux/x64/1.0/julia-1.0.3-linux-x86_64.tar.gz - tar xfz julia-*-linux-x86_64.tar.gz - ln -s $(pwd)/julia-*/bin/julia /usr/bin/julia - conda update -y conda > /logs/conda.txt 2>&1 - conda env update >> /logs/conda.txt 2>&1 - set +x; conda activate esmvaltool; set -x - pip install . > /logs/install.txt 2>&1 - Rscript esmvaltool/install/R/setup.R > /logs/R_install.txt 2>&1 - julia esmvaltool/install/Julia/setup.jl > /logs/julia_install.txt 2>&1 - # Log versions - dpkg -l > /logs/versions.txt - conda env export > /logs/environment.yml - pip freeze > /logs/requirements.txt - # Test installation - python setup.py test --installation - esmvaltool -h + conda activate esmvaltool + mamba --version + pytest -n 4 --junitxml=test-reports/report.xml + esmvaltool version + esmvaltool -- --help ncl -V - # cdo test, check that it supports hdf5 cdo --version - echo 0 | cdo -f nc input,r1x1 tmp.nc - ncdump tmp.nc | ncgen -k hdf5 -o tmp.nc - cdo -f nc copy tmp.nc tmp2.nc - - save_cache: - key: python3-install-{{ .Branch }} - paths: - - "/opt/conda/pkgs" - - ".eggs" + - store_test_results: + path: test-reports/report.xml - store_artifacts: path: /logs + - run: + name: Compress pytest artifacts + command: tar -cvzf pytest.tar.gz -C /tmp/pytest-of-root/ . + when: always - store_artifacts: - path: test-reports/ - - store_test_results: - path: test-reports/ + path: pytest.tar.gz - run: + name: Compress test-report artifacts + command: tar -cvzf test-reports.tar.gz test-reports/ when: always - command: | - pip install codacy-coverage - python-codacy-coverage -r test-reports/python3/coverage.xml + - store_artifacts: + path: test-reports.tar.gz - develop: - # Test development installation - working_directory: /develop - docker: - - image: continuumio/miniconda3 + test_installation_from_source: + parameters: + extra: + type: string + flags: + type: string + default: "" steps: + - run: + name: Install git+ssh + environment: + DEBIAN_FRONTEND: noninteractive # needed to install tzdata + command: apt update && apt install -y git ssh - checkout + - check_changes + - run: + name: Generate cache key + command: date '+%Y-%V' | tee cache_key.txt + - restore_cache: + key: install-<< parameters.extra >>-{{ .Branch }}-{{ checksum "cache_key.txt" }} - run: + name: Install dependencies command: | + # Install . /opt/conda/etc/profile.d/conda.sh - set -x mkdir /logs - # Install - apt update > /logs/apt.txt 2>&1 - apt -y install build-essential gfortran >> /logs/apt.txt 2>&1 - wget https://julialang-s3.julialang.org/bin/linux/x64/1.0/julia-1.0.3-linux-x86_64.tar.gz - tar xfz julia-*-linux-x86_64.tar.gz - ln -s $(pwd)/julia-*/bin/julia /usr/bin/julia - conda update -y conda > /logs/conda.txt 2>&1 - conda env update >> /logs/conda.txt 2>&1 - set +x; conda activate esmvaltool; set -x - pip install -e .[develop] > /logs/install.txt 2>&1 - Rscript esmvaltool/install/R/setup.R > /logs/R_install.txt 2>&1 - julia esmvaltool/install/Julia/setup.jl > /logs/julia_install.txt 2>&1 - # Log versions - dpkg -l > /logs/versions.txt - conda env export > /logs/environment.yml - pip freeze > /logs/requirements.txt - # Test installation - esmvaltool -h - python setup.py test --installation - ncl -V - cdo --version - - store_artifacts: - path: /logs + mamba env create >> /logs/conda.txt 2>&1 + conda activate esmvaltool + pip install << parameters.flags >> ".[<>]"> /logs/install.txt 2>&1 + esmvaltool install Julia > /logs/install_julia.txt 2>&1 + if [[ "<>" != *'--editable'* ]] + then + rm -r esmvaltool + fi + - log_versions + - run: + name: Lint source code + command: | + . /opt/conda/etc/profile.d/conda.sh + conda activate esmvaltool + flake8 -j 4 + - test_and_report + - save_cache: + key: install-<< parameters.extra >>-{{ .Branch }}-{{ checksum "cache_key.txt" }} + paths: + - /opt/conda/pkgs + - /root/.cache/pip + - .pytest_cache - doc: - # Test building documentation - working_directory: /doc +jobs: + run_tests: + # Run tests docker: - - image: continuumio/miniconda3 + - image: esmvalgroup/esmvaltool:development + resource_class: large steps: - checkout - run: + name: Generate cache key + command: date '+%Y-%V' | tee cache_key.txt + - restore_cache: + key: test-{{ .Branch }}-{{ checksum "cache_key.txt" }} + - run: + name: Install dependencies command: | - . /opt/conda/etc/profile.d/conda.sh set -x + . /opt/conda/etc/profile.d/conda.sh + conda activate esmvaltool mkdir /logs - # Install - apt update > /logs/apt.txt 2>&1 - apt -y install build-essential gfortran >> /logs/apt.txt 2>&1 - conda update -y conda > /logs/conda.txt 2>&1 - conda env update >> /logs/conda.txt 2>&1 - set +x; conda activate esmvaltool; set -x - pip install -e .[develop] > /logs/install.txt 2>&1 - # Log versions - dpkg -l > /logs/versions.txt - conda env export > /logs/environment.yml - pip freeze > /logs/requirements.txt - # Test building documentation - python setup.py build_sphinx - - store_artifacts: - path: /logs + pip install .[test] > /logs/install.txt 2>&1 + esmvaltool install Julia > /logs/install_julia.txt 2>&1 + - run: + name: Check Python code style and mistakes + command: | + . /opt/conda/etc/profile.d/conda.sh + conda activate esmvaltool + flake8 -j 4 + - run: + name: Remove source code to test the installed software + command: rm -r esmvaltool + - test_and_report + - save_cache: + key: test-{{ .Branch }}-{{ checksum "cache_key.txt" }} + paths: + - /root/.cache/pip + - .pytest_cache + - coverage-reporter/send_report: + coverage-reports: 'test-reports/coverage.xml' + project-token: $CODACY_PROJECT_TOKEN + skip: true # skip if project-token is not defined (i.e. on a fork) + + test_installation_from_source_test_mode: + # Test installation from source + docker: + - image: condaforge/miniforge3:latest + resource_class: large + steps: + - test_installation_from_source: + extra: test + + test_installation_from_source_develop_mode: + # Test development installation + docker: + - image: condaforge/miniforge3:latest + resource_class: large + steps: + - test_installation_from_source: + extra: develop + flags: "--editable" - conda_build: - # Test conda build - working_directory: /esmvaltool + test_upstream_development: + # Test running recipes with the development version of ESMValCore. The + # purpose of this test to discover backward-incompatible changes early on in + # the development cycle. docker: - - image: continuumio/miniconda3 + - image: condaforge/miniforge3:latest + resource_class: large steps: + - run: + name: Install git and ssh + environment: + DEBIAN_FRONTEND: noninteractive # needed to install tzdata + command: apt update && apt install -y git ssh - checkout - run: + name: Generate cache key + command: echo $(date '+%Y')-$(expr $(date '+%V') / 2) | tee cache_key.txt + - restore_cache: + key: test-upstream-{{ .Branch }}-{{ checksum "cache_key.txt" }} + - run: + name: Install command: | + # Install according to instructions on readthedocs with the + # development version of ESMValTool and ESMValCore: + # https://docs.esmvaltool.org/en/latest/quickstart/installation.html#install-from-source . /opt/conda/etc/profile.d/conda.sh - set -x - # Install prerequisites mkdir /logs - apt update > /logs/apt.txt 2>&1 - apt -y install build-essential gfortran >> /logs/apt.txt 2>&1 - wget https://julialang-s3.julialang.org/bin/linux/x64/1.0/julia-1.0.3-linux-x86_64.tar.gz - tar xfz julia-*-linux-x86_64.tar.gz - ln -s $(pwd)/julia-*/bin/julia /usr/bin/julia - conda update -y conda > /logs/conda_base.txt 2>&1 - conda install -y conda-build conda-verify >> /logs/conda_base.txt 2>&1 - # Log versions - dpkg -l > /logs/versions.txt - conda env export -n base > /logs/build_environment.yml - # Build conda package - conda build . -c conda-forge/label/cf201901 -c birdhouse > /logs/build_log.txt - # Install Python 3 conda package - conda create -y --name esmvaltool3 > /logs/conda_esmvaltool3.txt 2>&1 - set +x; conda activate esmvaltool3; set -x - conda install -y esmvaltool --use-local -c conda-forge/label/cf201901 -c birdhouse - conda env export > /logs/test_environment3.yml - esmvaltool -h + mamba env create >> /logs/conda.txt 2>&1 + conda activate esmvaltool + pip install --editable .[develop] + esmvaltool install Julia > /logs/install_julia.txt 2>&1 + git clone https://github.com/ESMValGroup/ESMValCore $HOME/ESMValCore + pip install --editable $HOME/ESMValCore[develop] + - log_versions + - test_and_report + - run: + name: Run recipes + command: | + . /opt/conda/etc/profile.d/conda.sh + conda activate esmvaltool + mkdir -p ~/climate_data + esmvaltool config get_config_user + echo "search_esgf: when_missing" >> ~/.config/esmvaltool/config-user.yml + cat ~/.config/esmvaltool/config-user.yml + for recipe in esmvaltool/recipes/testing/recipe_*.yml; do + esmvaltool run "$recipe" + done - store_artifacts: - path: /logs + path: /root/esmvaltool_output + - save_cache: + key: test-upstream-{{ .Branch }}-{{ checksum "cache_key.txt" }} + paths: + - /opt/conda/pkgs + - /root/.cache/pip + - /root/climate_data - conda_install: - # Test conda package installation - working_directory: /esmvaltool + build_documentation: + # Test building documentation docker: - - image: continuumio/miniconda3 + - image: condaforge/miniforge3:latest + resource_class: medium steps: + - checkout - run: command: | - . /opt/conda/etc/profile.d/conda.sh - set -x - # Install prerequisites mkdir /logs - apt update > /logs/apt.txt 2>&1 - apt -y install build-essential gfortran >> /logs/apt.txt 2>&1 - wget https://julialang-s3.julialang.org/bin/linux/x64/1.0/julia-1.0.3-linux-x86_64.tar.gz - tar xfz julia-*-linux-x86_64.tar.gz - ln -s $(pwd)/julia-*/bin/julia /usr/bin/julia - conda update -y conda > /logs/conda.txt 2>&1 - # Create and activate conda environment - conda create -y --name esmvaltool - set +x; conda activate esmvaltool; set -x + . /opt/conda/etc/profile.d/conda.sh # Install - conda install -y esmvaltool -c esmvalgroup -c conda-forge -c birdhouse + mamba env create + conda activate esmvaltool + pip install .[doc] # Log versions + dpkg -l > /logs/versions.txt conda env export > /logs/environment.yml - # Test installation - esmvaltool -h - ncl -V - cdo --version + pip freeze > /logs/requirements.txt + # Test building documentation + MPLBACKEND=Agg sphinx-build -W doc/sphinx/source doc/spinx/build + - store_artifacts: + path: /logs - ncl_cdo_test: - # Test ncl and cdo conda packages - working_directory: /ncl + test_installation_from_conda: + # Test conda package installation docker: - - image: continuumio/miniconda3 + - image: condaforge/miniforge3:latest + resource_class: large steps: - - checkout - run: command: | . /opt/conda/etc/profile.d/conda.sh set -x + # Install prerequisites mkdir /logs - # Install - apt update > /logs/apt.txt 2>&1 - apt -y install build-essential gfortran >> /logs/apt.txt 2>&1 - conda update -y conda > /logs/conda.txt 2>&1 - conda create -y --name ncl > /logs/conda.txt 2>&1 - set +x; conda activate ncl; set -x - conda install -y --channel conda-forge ncl cdo >> /logs/conda.txt 2>&1 + # Install ESMValTool in a new conda environment + mamba create -y --name esmvaltool -c conda-forge esmvaltool julia 'python=3.11' >> /logs/conda.txt 2>&1 + # Activate the environment + set +x; conda activate esmvaltool; set -x + # install the Julia dependencies + esmvaltool install Julia > /logs/install_Julia.txt 2>&1 # Log versions - dpkg -l > /logs/versions.txt - conda env export > /logs/environment.yml - # Test if NCL installed successfully + mamba env export > /logs/environment.yml + # Test installation + esmvaltool -- --help + esmvaltool version ncl -V cdo --version - store_artifacts: path: /logs workflows: - version: 2 commit: jobs: - - python3_test - - python3_install + - run_tests + - test_installation_from_source_test_mode + - test_installation_from_source_develop_mode nightly: triggers: - schedule: @@ -260,13 +295,11 @@ workflows: filters: branches: only: - - version2_development - - version2_master + - main jobs: - - python3_test - - python3_install - - develop - - doc - - conda_build - - conda_install - - ncl_cdo_test + - run_tests + - test_installation_from_source_test_mode + - test_installation_from_source_develop_mode + - test_upstream_development + - build_documentation + - test_installation_from_conda diff --git a/.circleci/install_triggers b/.circleci/install_triggers new file mode 100644 index 0000000000..79cdc890f7 --- /dev/null +++ b/.circleci/install_triggers @@ -0,0 +1,7 @@ +^\.circleci/ +^environment\.yml$ +^esmvaltool/install/ +^pyproject.toml$ +^setup\.py$ +^setup\.cfg$ +^MANIFEST.in$ diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000..7168bcfade --- /dev/null +++ b/.dockerignore @@ -0,0 +1,10 @@ + +**/__pycache__ +.* +doc +tests +ESMValTool.egg-info + +!.git +!.zenodo.json + diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000000..2086d60173 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,2 @@ +esmvaltool/cmorizers @ESMValGroup/obs-maintainers +.github/workflows @valeriupredoi diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000000..5041b22420 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,76 @@ + + +## Description + + +- Closes #issue_number +- Link to documentation: + +* * * + +## Before you get started + + + +- [ ] [☝ Create an issue](https://docs.esmvaltool.org/en/latest/community/code_documentation.html#contributing-code-and-documentation) to discuss what you are going to do + +## Checklist + +It is the responsibility of the author to make sure the pull request is ready to review. The icons indicate whether the item will be subject to the [🛠 Technical][1] or [🧪 Scientific][2] review. + + +[1]: https://docs.esmvaltool.org/en/latest/community/review.html#technical-review +[2]: https://docs.esmvaltool.org/en/latest/community/review.html#scientific-review + +- [ ] [🛠][1] This pull request has a [descriptive title](https://docs.esmvaltool.org/en/latest/community/code_documentation.html#pull-request-title) +- [ ] [🛠][1] Code is written according to the [code quality guidelines](https://docs.esmvaltool.org/en/latest/community/code_documentation.html#code-quality) +- [ ] [🛠][1] [Documentation](https://docs.esmvaltool.org/en/latest/community/code_documentation.html#documentation) is available +- [ ] [🛠][1] [Tests](https://docs.esmvaltool.org/en/latest/community/code_documentation.html#tests) run successfully +- [ ] [🛠][1] The [list of authors](https://docs.esmvaltool.org/en/latest/community/code_documentation.html#list-of-authors) is up to date +- [ ] [🛠][1] Any changed dependencies have been [added or removed correctly](https://docs.esmvaltool.org/en/latest/community/code_documentation.html#dependencies) +- [ ] [🛠][1] All [checks below this pull request](https://docs.esmvaltool.org/en/latest/community/code_documentation.html#pull-request-checks) were successful + +### [New or updated recipe/diagnostic](https://docs.esmvaltool.org/en/latest/community/diagnostic.html) + +- [ ] [🧪][2] [Recipe runs successfully](https://docs.esmvaltool.org/en/latest/community/diagnostic.html#testing-recipes) +- [ ] [🧪][2] [Recipe is well documented](https://docs.esmvaltool.org/en/latest/community/diagnostic.html#recipe-and-diagnostic-documentation) +- [ ] [🧪][2] [Figure(s) and data](https://docs.esmvaltool.org/en/latest/community/diagnostic.html#diagnostic-output) look as expected from literature +- [ ] [🛠][1] [Provenance information](https://docs.esmvaltool.org/en/latest/community/diagnostic.html#recording-provenance) has been added + +### [New or updated data reformatting script](https://docs.esmvaltool.org/en/latest/develop/dataset.html) + +- [ ] [🛠][1] [Documentation](https://docs.esmvaltool.org/en/latest/community/dataset.html#dataset-documentation) is available +- [ ] [🛠][1] The dataset has been [added to the CMOR check recipe](https://docs.esmvaltool.org/en/latest/community/dataset.html#testing) +- [ ] [🛠][1] The dataset has been added to the shared [data pools](https://docs.esmvaltool.org/en/latest/community/dataset.html#cmorized-data) of DKRZ and Jasmin by the @ESMValGroup/OBS-maintainers team +- [ ] [🧪][2] Numbers and units of the data look [physically meaningful](https://docs.esmvaltool.org/en/latest/community/dataset.html#scientific-sanity-check) + +*** + +To help with the number of pull requests: + +- 🙏 We kindly ask you to [review](https://docs.esmvaltool.org/en/latest/community/review.html#review-of-pull-requests) two other [open pull requests](https://github.com/ESMValGroup/ESMValTool/pulls) in this repository + + diff --git a/.github/workflows/citation_file_validator.yml b/.github/workflows/citation_file_validator.yml new file mode 100644 index 0000000000..e957d40f86 --- /dev/null +++ b/.github/workflows/citation_file_validator.yml @@ -0,0 +1,24 @@ +# workflow that performs a validity check of CITATION.cff file +# authors: Abel S. Siqueira, Faruk Diblen, Jurriaan Spaaks GH: @abelsiqueira, @fdiblen, @jspaaks + +name: CFF File Validator + +on: + push: + paths: + - CITATION.cff + schedule: + - cron: '0 0 1 * *' + +jobs: + validate: + name: "validate" + runs-on: ubuntu-latest + steps: + - name: Check out a copy of the repository + uses: actions/checkout@v4 + + - name: Check whether the citation metadata from CITATION.cff is valid + uses: citation-file-format/cffconvert-github-action@2.0.0 + with: + args: "--validate" diff --git a/.github/workflows/create-condalock-file.yml b/.github/workflows/create-condalock-file.yml new file mode 100644 index 0000000000..7babd2a456 --- /dev/null +++ b/.github/workflows/create-condalock-file.yml @@ -0,0 +1,101 @@ +name: Conda lock file creation + +on: + # Trigger on push on main or other branch for testing + # NOTE that push: main will create the file very often + # and hence lots of automated PRs + # push: + # branches: + # - main + schedule: + - cron: '0 4 */10 * *' + +# Required shell entrypoint to have properly configured bash shell +defaults: + run: + shell: bash -l {0} + +jobs: + create-lock-file: + name: Create conda lock file for latest Python + runs-on: 'ubuntu-latest' + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + activate-environment: esmvaltool-fromlock + python-version: "3.12" + miniforge-version: "latest" + use-mamba: true + - name: Show conda config + run: | + conda update -n base -c conda-forge conda + conda --version + # setup-miniconda@v3 installs an old conda and mamba + # forcing a modern mamba updates both mamba and conda + # unpin mamba after conda-lock=3 release + # see github.com/ESMValGroup/ESMValTool/issues/3782 + conda install -c conda-forge "mamba>=1.4.8,<2" + conda config --show-sources + conda config --show + conda --version + mamba --version + - name: Python info + run: | + which python + python --version + - name: Install conda-lock + run: mamba install -y -c conda-forge conda-lock + - name: Check version of conda-lock + run: conda-lock --version + - name: Create conda lock file for linux-64 + run: conda-lock lock --platform linux-64 -f environment.yml --mamba --kind explicit + - name: Show conda version again + run: conda --version + - name: Show Python exec and version again + run: | + which python + python -V + - name: Create conda-lock environment + run: conda create --name esmvaltool-fromlock --file conda-linux-64.lock + - name: Install pip + run: mamba install -y pip + - name: Check Python and pip versions post pip-install + run: | + which python + pip --version + - name: Install ESMValTool + run: pip install -e .[develop] + - name: Check ESMValTool version + run: esmvaltool --help + - name: Check ESMValTool help + run: esmvaltool version + - name: Run flake8 + run: flake8 + - name: Run tests + run: pytest -n 2 -m "not installation" + # Automated PR + # see https://github.com/marketplace/actions/create-pull-request + - name: Create Automated PR if conda lock file has changed + uses: peter-evans/create-pull-request@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + commit-message: Updating Linux condalock file + author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com> + signoff: false + branch: condalock-update + delete-branch: true + title: '[Condalock] Update Linux condalock file' + body: | + Update condalock file + Automatic Pull Request. + labels: | + testing + condalock + automatedPR + assignees: valeriupredoi + reviewers: valeriupredoi + draft: false diff --git a/.github/workflows/install-from-conda.yml b/.github/workflows/install-from-conda.yml new file mode 100644 index 0000000000..185add02a8 --- /dev/null +++ b/.github/workflows/install-from-conda.yml @@ -0,0 +1,87 @@ +--- +name: Install from Conda + +# runs on a push on main and at the end of every day +on: + push: + branches: + - main + schedule: + - cron: '0 4 * * *' + +# Required shell entrypoint to have properly configured bash shell +defaults: + run: + shell: bash -l {0} + +jobs: + linux: + runs-on: "ubuntu-latest" + strategy: + fail-fast: false + matrix: + python-version: ["3.10", "3.11", "3.12"] + name: Linux Python ${{ matrix.python-version }} + steps: + - uses: conda-incubator/setup-miniconda@v3 + with: + python-version: ${{ matrix.python-version }} + miniforge-version: "latest" + use-mamba: true + - run: mkdir -p conda_install_linux_artifacts_python_${{ matrix.python-version }} + - name: Record versions + run: | + mamba --version 2>&1 | tee conda_install_linux_artifacts_python_${{ matrix.python-version }}/conda_version.txt + which conda 2>&1 | tee conda_install_linux_artifacts_python_${{ matrix.python-version }}/conda_path.txt + which mamba 2>&1 | tee -a conda_install_linux_artifacts_python_${{ matrix.python-version }}/conda_path.txt + python -V 2>&1 | tee conda_install_linux_artifacts_python_${{ matrix.python-version }}/python_version.txt + - name: Install ESMValTool + run: mamba install esmvaltool 2>&1 | tee conda_install_linux_artifacts_python_${{ matrix.python-version }}/install.txt + - name: Verify installation + run: | + esmvaltool --help + esmvaltool version 2>&1 | tee conda_install_linux_artifacts_python_${{ matrix.python-version }}/version.txt + - name: Upload artifacts + if: ${{ always() }} # upload artifacts even if fail + uses: actions/upload-artifact@v4 + with: + name: Conda_Install_Linux_python_${{ matrix.python-version }} + path: conda_install_linux_artifacts_python_${{ matrix.python-version }} + +# uncomment from here when we have a testing environment on an OSX machine +# and we know that this should work +# +# osx: +# runs-on: "macos-latest" +# strategy: +# matrix: +# python-version: ["3.10", "3.11"] +# fail-fast: false +# name: OSX Python ${{ matrix.python-version }} +# steps: +# - uses: actions/checkout@v2 +# - uses: conda-incubator/setup-miniconda@v3 +# with: +# python-version: ${{ matrix.python-version }} +# miniconda-version: "latest" +# channels: conda-forge +# - run: mkdir -p conda_install_osx_artifacts_python_${{ matrix.python-version }} +# - run: conda --version 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/conda_version.txt +# - run: which conda 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/conda_path.txt +# - run: python -V 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/python_version.txt +# # ncurses needs to be from conda-forge and not main channel +# # for now it's turned off since we're not testing R/Julia installs +# # - run: conda uninstall -y ncurses +# # - run: conda list ncurses +# # - run: conda install -y conda-forge::ncurses +# # - run: conda list ncurses +# - run: conda install esmvaltool --no-update-deps 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/install.txt +# - run: conda install esmvaltool-python esmvaltool-ncl 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/install.txt +# - run: esmvaltool --help +# - run: esmvaltool version 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/version.txt +# - name: Upload artifacts +# if: ${{ always() }} # upload artifacts even if fail +# uses: actions/upload-artifact@v4 +# with: +# name: Conda_Install_OSX_python_${{ matrix.python-version }} +# path: conda_install_osx_artifacts_python_${{ matrix.python-version }} diff --git a/.github/workflows/install-from-condalock-file.yml b/.github/workflows/install-from-condalock-file.yml new file mode 100644 index 0000000000..0f11cddc6e --- /dev/null +++ b/.github/workflows/install-from-condalock-file.yml @@ -0,0 +1,63 @@ +# Install esmvaltool from a conda lock file +# To build a conda lock file install conda-lock first then +# run conda-lock lock --platform linux-64 -f environment.yml --mamba +# (mamba activated for speed). Change platform for osx-64 or win-64. +# Env creation then happens as per normal use with +# conda create --name esmvaltool-fromlock --file conda-linux-64.lock +# note that pip and conda are NOT installed. + +name: Conda-lock Install +on: + push: + branches: + - main + # - condalock-update + # run the test only if the PR is to main + # turn it on if required + #pull_request: + # branches: + # - main + schedule: + - cron: '0 0 * * *' + +# Required shell entrypoint to have properly configured bash shell +defaults: + run: + shell: bash -l {0} + +jobs: + linux: + runs-on: "ubuntu-latest" + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12"] + fail-fast: false + name: Linux Python ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: conda-incubator/setup-miniconda@v3 + with: + activate-environment: esmvaltool-fromlock + python-version: ${{ matrix.python-version }} + miniconda-version: "latest" + channels: conda-forge + - run: mkdir -p source_install_linux_artifacts_python_${{ matrix.python-version }} + - run: conda --version 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/conda_version.txt + - run: which python + - run: python -V 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/python_version.txt + - run: conda create --name esmvaltool-fromlock --file conda-linux-64.lock + - run: which python + - run: pip --version + - run: pip install -e .[develop] + - run: esmvaltool --help + - run: esmvaltool version 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/version.txt + - run: flake8 + - run: pytest -n 2 -m "not installation" + - name: Upload artifacts + if: ${{ always() }} # upload artifacts even if fail + uses: actions/upload-artifact@v4 + with: + name: Source_Install_Linux_python_${{ matrix.python-version }} + path: source_install_linux_artifacts_python_${{ matrix.python-version }} diff --git a/.github/workflows/install-from-source.yml b/.github/workflows/install-from-source.yml new file mode 100644 index 0000000000..018fcb2a0a --- /dev/null +++ b/.github/workflows/install-from-source.yml @@ -0,0 +1,84 @@ +--- +name: Install from Source + +# runs on a push on main and at the end of every day +on: + push: + branches: + - main + schedule: + - cron: '0 0 * * *' + +# Required shell entrypoint to have properly configured bash shell +defaults: + run: + shell: bash -l {0} + +jobs: + linux: + runs-on: "ubuntu-latest" + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12"] + fail-fast: false + name: Linux Python ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: conda-incubator/setup-miniconda@v3 + with: + activate-environment: esmvaltool + environment-file: environment.yml + python-version: ${{ matrix.python-version }} + miniforge-version: "latest" + use-mamba: true + - run: mkdir -p source_install_linux_artifacts_python_${{ matrix.python-version }} + - name: Record versions + run: | + mamba --version 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/conda_version.txt + python -V 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/python_version.txt + - name: Install + run: pip install -e .[develop] 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/install.txt + - name: Verify installation + run: | + esmvaltool --help + esmvaltool version 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/version.txt + - name: Upload artifacts + if: ${{ always() }} # upload artifacts even if fail + uses: actions/upload-artifact@v4 + with: + name: Source_Install_Linux_python_${{ matrix.python-version }} + path: source_install_linux_artifacts_python_${{ matrix.python-version }} + +# uncomment from here when we have a testing environment on an OSX machine +# and we know that this should work +# +# osx: +# runs-on: "macos-latest" +# strategy: +# matrix: +# python-version: ["3.10", "3.11"] +# fail-fast: false +# name: OSX Python ${{ matrix.python-version }} +# steps: +# - uses: actions/checkout@v2 +# - uses: conda-incubator/setup-miniconda@v3 +# with: +# activate-environment: esmvaltool +# environment-file: environment.yml +# python-version: ${{ matrix.python-version }} +# miniconda-version: "latest" +# channels: conda-forge +# - run: mkdir -p source_install_osx_artifacts_python_${{ matrix.python-version }} +# - run: conda --version 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/conda_version.txt +# - run: python -V 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/python_version.txt +# - run: pip install -e .[develop] 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/install.txt +# - run: esmvaltool --help +# - run: esmvaltool version 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/version.txt +# - name: Upload artifacts +# if: ${{ always() }} # upload artifacts even if fail +# uses: actions/upload-artifact@v4 +# with: +# name: Source_Install_OSX_python_${{ matrix.python-version }} +# path: source_install_osx_artifacts_python_${{ matrix.python-version }} diff --git a/.github/workflows/pypi-build-and-deploy.yml b/.github/workflows/pypi-build-and-deploy.yml new file mode 100644 index 0000000000..d6df3626e6 --- /dev/null +++ b/.github/workflows/pypi-build-and-deploy.yml @@ -0,0 +1,52 @@ +--- +name: PyPi Build and Deploy 🐍📦 + +on: + release: + types: [published] + # use this for testing + push: + branches: + - main + +jobs: + build-n-publish: + name: Build and publish ESMValTool on PyPi + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Set up Python 3.12 + uses: actions/setup-python@v1 + with: + python-version: "3.12" + - name: Install pep517 + run: >- + python -m + pip install + pep517 + --user + - name: Build a binary wheel and a source tarball + run: >- + python -m + pep517.build + --source + --binary + --out-dir dist/ + . + # Publish on Test PyPi; uncomment to test + # and remember to adjust the triggers above + # - name: Publish distribution 📦 to Test PyPI + # uses: pypa/gh-action-pypi-publish@master + # with: + # password: ${{ secrets.test_pypi_password }} + # repository_url: https://test.pypi.org/legacy/ + + # Publish on PyPi + - name: Publish distribution 📦 to PyPI + if: startsWith(github.ref, 'refs/tags') + uses: pypa/gh-action-pypi-publish@v1.5.0 + with: + user: __token__ + password: ${{ secrets.pypi_password }} diff --git a/.github/workflows/run-tests-monitor.yml b/.github/workflows/run-tests-monitor.yml new file mode 100644 index 0000000000..1fc657e387 --- /dev/null +++ b/.github/workflows/run-tests-monitor.yml @@ -0,0 +1,113 @@ +--- +name: Monitor Tests +on: + push: + branches: + - main + # run the test only if the PR is to main + # turn it on if required + # pull_request: + # branches: + # - main + schedule: + - cron: '0 0 * * *' # nightly + +# Required shell entrypoint to have properly configured bash shell +defaults: + run: + shell: bash -l {0} + +jobs: + linux: + runs-on: "ubuntu-latest" + strategy: + fail-fast: false + matrix: + python-version: ["3.10", "3.11", "3.12"] + name: Linux Python ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: conda-incubator/setup-miniconda@v3 + with: + activate-environment: esmvaltool + environment-file: environment.yml + python-version: ${{ matrix.python-version }} + miniforge-version: "latest" + use-mamba: true + - run: mkdir -p test_linux_artifacts_python_${{ matrix.python-version }} + - name: Record versions + run: | + mamba --version 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/conda_version.txt + python -V 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/python_version.txt + - name: Inspect environment + run: conda list + - name: Install pytest-monitor + run: pip install pytest-monitor + - name: Install ESMValTool + run: pip install -e .[develop] 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/install.txt + - name: Install Julia dependencies + run: esmvaltool install Julia + - name: Run tests + run: > + pytest -n 2 -m "not installation" --db ../.pymon 2>&1 + | tee test_linux_artifacts_python_${{ matrix.python-version }}/test_report.txt + - name: Parse monitor information + run: python tests/parse_pymon.py + - name: Upload artifacts + if: ${{ always() }} # upload artifacts even if fail + uses: actions/upload-artifact@v4 + with: + name: Test_Linux_python_${{ matrix.python-version }} + path: test_linux_artifacts_python_${{ matrix.python-version }} + + osx: + runs-on: "macos-latest" + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12"] + architecture: ["x64"] # need to force Intel, arm64 builds have issues + fail-fast: false + name: OSX Python ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: conda-incubator/setup-miniconda@v3 + with: + architecture: ${{ matrix.architecture }} + activate-environment: esmvaltool + environment-file: environment_osx.yml + python-version: ${{ matrix.python-version }} + miniforge-version: "latest" + use-mamba: true + # - name: Install libomp with homebrew + # run: brew install libomp + - run: mkdir -p test_osx_artifacts_python_${{ matrix.python-version }} + - name: Record versions + run: | + mamba --version 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/conda_version.txt + python -V 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/python_version.txt + - name: Inspect environment + run: conda list + - name: Install git + run: mamba install -c conda-forge git + - name: Install pytest-monitor + run: pip install pytest-monitor + - name: Install ESMValTool + run: > + pip install -e .[develop] 2>&1 + | tee test_osx_artifacts_python_${{ matrix.python-version }}/install.txt + - name: Run tests + run: > + pytest -n 2 -m "not installation" --db ../.pymon 2>&1 + | tee test_osx_artifacts_python_${{ matrix.python-version }}/test_report.txt + - name: Parse monitor information + run: python tests/parse_pymon.py + - name: Upload artifacts + if: ${{ always() }} # upload artifacts even if fail + uses: actions/upload-artifact@v4 + with: + name: Test_OSX_python_${{ matrix.python-version }} + path: test_osx_artifacts_python_${{ matrix.python-version }} diff --git a/.github/workflows/test-development.yml b/.github/workflows/test-development.yml new file mode 100644 index 0000000000..f6718a866e --- /dev/null +++ b/.github/workflows/test-development.yml @@ -0,0 +1,66 @@ +# Action that runs the full development stack: +# Steps (Python-only): +# - creates esmvaltool conda environment and pip-installs esmvaltool +# - downloads latest main of esmvalcore and installs it in development mode +# - runs tests of esmvaltool +# Triggered by a push to main and nightly +--- +name: Test in Full Development Mode + +# runs on a push on main and at the end of every day +on: + push: + branches: + - main + schedule: + - cron: '0 0 * * *' + +# Required shell entrypoint to have properly configured bash shell +defaults: + run: + shell: bash -l {0} + +jobs: + linux: + runs-on: "ubuntu-latest" + strategy: + fail-fast: false + matrix: + python-version: ["3.10", "3.11", "3.12"] + name: Linux Python ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: conda-incubator/setup-miniconda@v3 + with: + activate-environment: esmvaltool + environment-file: environment.yml + python-version: ${{ matrix.python-version }} + miniforge-version: "latest" + use-mamba: true + - run: mkdir -p develop_test_linux_artifacts_python_${{ matrix.python-version }} + - name: Record versions + run: | + mamba --version 2>&1 | tee develop_test_linux_artifacts_python_${{ matrix.python-version }}/conda_version.txt + python -V 2>&1 | tee develop_test_linux_artifacts_python_${{ matrix.python-version }}/python_version.txt + - name: Install ESMValTool + run: pip install -e .[develop] 2>&1 | tee develop_test_linux_artifacts_python_${{ matrix.python-version }}/install.txt + - name: Install Julia dependencies + run: esmvaltool install Julia + - name: Install development version of ESMValCore + run: | + cd .. + git clone https://github.com/ESMValGroup/ESMValCore.git + cd ESMValCore + pip install -e .[develop] + - name: Run flake8 + run: flake8 + - name: Run tests + run: pytest -n 2 -m "not installation" 2>&1 | tee develop_test_linux_artifacts_python_${{ matrix.python-version }}/test_report.txt + - name: Upload artifacts + if: ${{ always() }} # upload artifacts even if fail + uses: actions/upload-artifact@v4 + with: + name: Develop_Test_Linux_python_${{ matrix.python-version }} + path: develop_test_linux_artifacts_python_${{ matrix.python-version }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000000..8b3c9ceb39 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,120 @@ +--- +name: Test + +# runs on a push on main and at the end of every day +on: + push: + branches: + - main + schedule: + - cron: '0 0 * * *' + +# Required shell entrypoint to have properly configured bash shell +defaults: + run: + shell: bash -l {0} + +jobs: + linux: + runs-on: "ubuntu-latest" + strategy: + fail-fast: false + matrix: + python-version: ["3.10", "3.11", "3.12"] + name: Linux Python ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: conda-incubator/setup-miniconda@v3 + with: + activate-environment: esmvaltool + environment-file: environment.yml + python-version: ${{ matrix.python-version }} + miniforge-version: "latest" + use-mamba: true + - run: mkdir -p test_linux_artifacts_python_${{ matrix.python-version }} + - name: Record versions + run: | + mamba --version 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/conda_version.txt + python -V 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/python_version.txt + # this is how to export variables to the GITHUB var environment + echo "pver0=$(python -V)" >> $GITHUB_ENV + - name: Inspect environment + run: conda list + - name: Install ESMValTool + run: pip install -e .[develop] 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/install.txt + - name: Examine conda environment + run: conda list + - name: Install Julia dependencies + run: esmvaltool install Julia + - name: Export Python minor version + run: echo "pver1=$(python -V)" >> $GITHUB_ENV + - name: Exit if Python minor version changed + if: ${{ env.pver1 != env.pver0}} + run: | + echo "Python minor version changed after Julia install" + python -V + exit 1 + - name: Inspect environment + run: conda list + - name: Run flake8 + run: flake8 + - name: Run tests + run: pytest -n 2 -m "not installation" 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/test_report.txt + - name: Upload artifacts + if: ${{ always() }} # upload artifacts even if fail + uses: actions/upload-artifact@v4 + with: + name: Test_Linux_python_${{ matrix.python-version }} + path: test_linux_artifacts_python_${{ matrix.python-version }} + + osx: + runs-on: "macos-latest" + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12"] + architecture: ["x64"] # need to force Intel, arm64 builds have issues + fail-fast: false + name: OSX Python ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: conda-incubator/setup-miniconda@v3 + with: + architecture: ${{ matrix.architecture }} + activate-environment: esmvaltool + environment-file: environment_osx.yml + python-version: ${{ matrix.python-version }} + miniforge-version: "latest" + use-mamba: true + # - name: Install libomp with homebrew + # run: brew install libomp + - run: mkdir -p test_osx_artifacts_python_${{ matrix.python-version }} + - name: Record versions + run: | + mamba --version 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/conda_version.txt + python -V 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/python_version.txt + - name: Inspect environment + run: conda list + - name: Determine if git + run: | + which git + git --version + - name: Install git + run: mamba install -c conda-forge git + - name: Install ESMValTool + run: pip install -e .[develop] 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/install.txt + - name: Inspect environment + run: conda list + - name: Run flake8 + run: flake8 + - name: Run tests + run: pytest -n 2 -m "not installation" 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/test_report.txt + - name: Upload artifacts + if: ${{ always() }} # upload artifacts even if fail + uses: actions/upload-artifact@v4 + with: + name: Test_OSX_python_${{ matrix.python-version }} + path: test_osx_artifacts_python_${{ matrix.python-version }} diff --git a/.gitignore b/.gitignore index c0f1f23055..15818062c4 100644 --- a/.gitignore +++ b/.gitignore @@ -45,6 +45,7 @@ __pycache__/ #Create by VSCode .vscode +*.code-workspace #pytest .cache @@ -83,9 +84,10 @@ test-reports/ /python_test_out.txt # Build folder -doc/sphinx/source/diag_scripts/** -doc/sphinx/source/plot_scripts/** doc/sphinx/build +doc/sphinx/_build +doc/sphinx/source/_sidebar.rst.inc +doc/sphinx/source/gallery.rst # Data files *.nc @@ -100,3 +102,13 @@ doc/sphinx/build # ESMF log files *.ESMF_LogFile + +# Rstudio files +.Rproj.user +esmvaltool.Rproj + +# Julia installation +esmvaltool/install/Julia/Manifest.toml + +# Cylc suite +esmvaltool/utils/testing/regression/.service/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..f3ac440f05 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,58 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +--- +exclude: | + (?x) + ^doc/sphinx/source/conf.py| + ^esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/| + ^esmvaltool/diag_scripts/cvdp/ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: check-added-large-files + - id: check-ast + - id: check-case-conflict + - id: check-merge-conflict + - id: debug-statements + - id: end-of-file-fixer + - id: trailing-whitespace + args: [--markdown-linebreak-ext=md] + - repo: https://github.com/adrienverge/yamllint + rev: 'v1.35.1' + hooks: + - id: yamllint + - repo: local # nclcodestyle is installed alongside ESMValTool + hooks: + - id: nclcodestyle + name: nclcodestyle + entry: nclcodestyle + language: system + files: '\.(ncl|NCL)$' + - repo: https://github.com/lorenzwalthert/precommit/ # Checks for R + rev: 'v0.4.2' + hooks: + - id: style-files # styler + - id: lintr + - repo: https://github.com/codespell-project/codespell + rev: 'v2.3.0' + hooks: + - id: codespell + - repo: https://github.com/PyCQA/isort + rev: '5.13.2' + hooks: + - id: isort + - repo: https://github.com/pre-commit/mirrors-yapf + rev: 'v0.32.0' + hooks: + - id: yapf + additional_dependencies: + - 'toml' + - repo: https://github.com/myint/docformatter + rev: 'v1.7.5' + hooks: + - id: docformatter + - repo: https://github.com/pycqa/flake8 + rev: '5.0.4' + hooks: + - id: flake8 diff --git a/.prospector.yml b/.prospector.yml index 15fc8b71e9..dbc62018eb 100644 --- a/.prospector.yml +++ b/.prospector.yml @@ -19,4 +19,4 @@ pep257: # disable rules that are allowed by the numpy convention # see https://github.com/PyCQA/pydocstyle/blob/master/src/pydocstyle/violations.py # and http://pydocstyle.readthedocs.io/en/latest/error_codes.html - disable: ['D107', 'D203', 'D212', 'D213', 'D402', 'D413'] + disable: ['D107', 'D203', 'D212', 'D213', 'D402', 'D413', 'D416'] diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000000..974ac2ee78 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,31 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-lts-latest + tools: + # try miniforge3 when available? see github.com/ESMValGroup/ESMValTool/issues/3779 + # DO NOT use mambaforge-*; that is currently sunsetted + python: "miniconda-latest" + jobs: + post_create_environment: + - conda run -n ${CONDA_DEFAULT_ENV} pip install . --no-deps + +# Declare the requirements required to build your docs +conda: + environment: + environment.yml + +# Build documentation in the doc directory with Sphinx +sphinx: + configuration: doc/sphinx/source/conf.py + fail_on_warning: true + +# If using Sphinx, optionally build your docs in additional formats such as PDF +formats: + - pdf diff --git a/.stickler.yml b/.stickler.yml deleted file mode 100644 index 173ac052bd..0000000000 --- a/.stickler.yml +++ /dev/null @@ -1,24 +0,0 @@ -# stickler-ci configuration - ---- - -linters: - flake8: - pep8: - yamllint: - shellcheck: -files: - ignore: [ - 'doc/sphinx', - 'esmvaltool/doc/sphinx', - # ignore old stuff, recent versions are in 'esmvaltool' dir - 'backend', - 'diag_scripts', - 'interface_data', - 'interface_scripts', - 'main.py', - 'nml', - 'plot_scripts', - 'reformat_scripts', - 'variable_defs', - ] diff --git a/.yamllint b/.yamllint new file mode 100644 index 0000000000..aca491af2f --- /dev/null +++ b/.yamllint @@ -0,0 +1,8 @@ +--- + +extends: default + +rules: + line-length: + max: 120 + octal-values: enable diff --git a/.zenodo.json b/.zenodo.json new file mode 100644 index 0000000000..c087c4ae21 --- /dev/null +++ b/.zenodo.json @@ -0,0 +1,436 @@ +{ + "creators": [ + { + "affiliation": "NLeSC, Netherlands", + "name": "Andela, Bouwe", + "orcid": "0000-0001-9005-8940" + }, + { + "affiliation": "DLR, Germany", + "name": "Broetz, Bjoern" + }, + { + "affiliation": "PML, UK", + "name": "de Mora, Lee", + "orcid": "0000-0002-5080-3149" + }, + { + "affiliation": "NLeSC, Netherlands", + "name": "Drost, Niels", + "orcid": "0000-0001-9795-7981" + }, + { + "affiliation": "DLR, Germany", + "name": "Eyring, Veronika", + "orcid": "0000-0002-6887-4885" + }, + { + "affiliation": "AWI, Germany", + "name": "Koldunov, Nikolay", + "orcid": "0000-0002-3365-8146" + }, + { + "affiliation": "DLR, Germany", + "name": "Lauer, Axel", + "orcid": "0000-0002-9270-1044" + }, + { + "affiliation": "LMU, Germany", + "name": "Mueller, Benjamin" + }, + { + "affiliation": "URead, UK", + "name": "Predoi, Valeriu", + "orcid": "0000-0002-9729-6578" + }, + { + "affiliation": "DLR, Germany", + "name": "Righi, Mattia", + "orcid": "0000-0003-3827-5950" + }, + { + "affiliation": "DLR, Germany", + "name": "Schlund, Manuel", + "orcid": "0000-0001-5251-0158" + }, + { + "affiliation": "BSC, Spain", + "name": "Vegas-Regidor, Javier", + "orcid": "0000-0003-0096-4291" + }, + { + "affiliation": "SMHI, Sweden", + "name": "Zimmermann, Klaus" + }, + { + "affiliation": "University of Bremen, Germany", + "name": "Adeniyi, Kemisola" + }, + { + "affiliation": "ISAC-CNR, Italy", + "name": "Arnone, Enrico", + "orcid": "0000-0001-6740-5051" + }, + { + "affiliation": "BSC, Spain", + "name": "Bellprat, Omar", + "orcid": "0000-0001-6434-1793" + }, + { + "affiliation": "SMHI, Sweden", + "name": "Berg, Peter", + "orcid": "0000-0002-1469-2568" + }, + { + "affiliation": "DLR, Germany", + "name": "Bock, Lisa", + "orcid": "0000-0001-7058-5938" + }, + { + "affiliation": "MetOffice, UK", + "name": "Bodas-Salcedo, Alejandro", + "orcid": "0000-0002-7890-2536" + }, + { + "affiliation": "BSC, Spain", + "name": "Caron, Louis-Philippe", + "orcid": "0000-0001-5221-0147" + }, + { + "affiliation": "MPI for Biogeochemistry, Germany", + "name": "Carvalhais, Nuno" + }, + { + "affiliation": "ENEA, Italy", + "name": "Cionni, Irene", + "orcid": "0000-0002-0591-9193" + }, + { + "affiliation": "BSC, Spain", + "name": "Cortesi, Nicola", + "orcid": "0000-0002-1442-9225" + }, + { + "affiliation": "ISAC-CNR, Italy", + "name": "Corti, Susanna" + }, + { + "affiliation": "ETH Zurich, Switzerland", + "name": "Crezee, Bas", + "orcid": "0000-0002-1774-1126" + }, + { + "affiliation": "ETH Zurich, Switzerland", + "name": "Davin, Edouard Leopold", + "orcid": "0000-0003-3322-9330" + }, + { + "affiliation": "ISAC-CNR, Italy", + "name": "Davini, Paolo", + "orcid": "0000-0003-3389-7849" + }, + { + "affiliation": "NCAR, USA", + "name": "Deser, Clara" + }, + { + "affiliation": "NLeSC, Netherlands", + "name": "Diblen, Faruk" + }, + { + "affiliation": "UCLouvain, Belgium", + "name": "Docquier, David" + }, + { + "affiliation": "MetOffice, UK", + "name": "Dreyer, Laura" + }, + { + "affiliation": "DKRZ, Germany", + "name": "Ehbrecht, Carsten" + }, + { + "affiliation": "MetOffice, UK", + "name": "Earnshaw, Paul" + }, + { + "affiliation": "University of Bremen, Germany", + "name": "Gier, Bettina" + }, + { + "affiliation": "BSC, Spain", + "name": "Gonzalez-Reviriego, Nube", + "orcid": "0000-0002-5919-6701" + }, + { + "affiliation": "University of Arizona, USA", + "name": "Goodman, Paul" + }, + { + "affiliation": "HZG, Germany", + "name": "Hagemann, Stefan", + "orcid": "0000-0001-5444-2945" + }, + { + "affiliation": "University of Canterbury, New Zealand", + "name": "Hardacre, Catherine", + "orcid": "0000-0001-9093-4656" + }, + { + "affiliation": "ISAC-CNR, Italy", + "name": "von Hardenberg, Jost", + "orcid": "0000-0002-5312-8070" + }, + { + "affiliation": "DLR, Germany", + "name": "Hassler, Birgit", + "orcid": "0000-0003-2724-709X" + }, + { + "affiliation": "DLR, Germany", + "name": "Heuer, Helge", + "orcid": "0000-0003-2411-7150" + }, + { + "affiliation": "BSC, Spain", + "name": "Hunter, Alasdair", + "orcid": "0000-0001-8365-3709" + }, + { + "affiliation": "FUB, Germany", + "name": "Kadow, Christopher" + }, + { + "affiliation": "DKRZ, Germany", + "name": "Kindermann, Stephan", + "orcid": "0000-0001-9335-1093" + }, + { + "affiliation": "MPI for Biogeochemistry, Germany", + "name": "Koirala, Sujan" + }, + { + "affiliation": "DLR, Germany", + "name": "Kuehbacher, Birgit" + }, + { + "affiliation": "BSC, Spain", + "name": "Lledó, Llorenç" + }, + { + "affiliation": "ETH Zurich, Switzerland", + "name": "Lejeune, Quentin" + }, + { + "affiliation": "University of Hamburg, German", + "name": "Lembo, Valerio", + "orcid": "0000-0001-6085-5914" + }, + { + "affiliation": "MetOffice, UK", + "name": "Little, Bill" + }, + { + "affiliation": "BSC, Spain", + "name": "Loosveldt-Tomas, Saskia" + }, + { + "affiliation": "ETH Zurich, Switzerland", + "name": "Lorenz, Ruth", + "orcid": "0000-0002-3986-1268" + }, + { + "affiliation": "CMCC, Italy", + "name": "Lovato, Tomas", + "orcid": "0000-0002-5188-6767" + }, + { + "affiliation": "University of Hamburg, German", + "name": "Lucarini, Valerio" + }, + { + "affiliation": "UCLouvain, Belgium", + "name": "Massonnet, François" + }, + { + "affiliation": "NIBIO, Norway", + "name": "Mohr, Christian Wilhelm", + "orcid": "0000-0003-2656-1802" + }, + { + "affiliation": "University of Arizona, USA", + "name": "Amarjiit, Pandde" + }, + { + "affiliation": "BSC, Spain", + "name": "Pérez-Zanón, Núria" + }, + { + "affiliation": "NCAR, USA", + "name": "Phillips, Adam", + "orcid": "0000-0003-4859-8585" + }, + { + "affiliation": "University of Arizona, USA", + "name": "Russell, Joellen" + }, + { + "affiliation": "CICERO, Norway", + "name": "Sandstad, Marit" + }, + { + "affiliation": "MetOffice, UK", + "name": "Sellar, Alistair" + }, + { + "affiliation": "DLR, Germany", + "name": "Senftleben, Daniel" + }, + { + "affiliation": "ISMAR-CNR, Italy", + "name": "Serva, Federico", + "orcid": "0000-0002-7118-0817" + }, + { + "affiliation": "CICERO, Norway", + "name": "Sillmann, Jana" + }, + { + "affiliation": "MPI-M, Germany", + "name": "Stacke, Tobias", + "orcid": "0000-0003-4637-5337" + }, + { + "affiliation": "URead, UK", + "name": "Swaminathan, Ranjini", + "orcid": "0000-0001-5853-2673" + }, + { + "affiliation": "BSC, Spain", + "name": "Torralba, Verónica" + }, + { + "affiliation": "University of Bremen, Germany", + "name": "Weigel, Katja", + "orcid": "0000-0001-6133-7801" + }, + { + "affiliation": "DLR, Germany", + "name": "Sarauer, Ellen" + }, + { + "affiliation": "University of Reading, UK", + "name": "Roberts, Charles", + "orcid": "0000-0002-1147-8961" + }, + { + "affiliation": "Netherlands eScience Center", + "name": "Kalverla, Peter", + "orcid": "0000-0002-5025-7862" + }, + { + "affiliation": "Netherlands eScience Center", + "name": "Alidoost, Sarah", + "orcid": "0000-0001-8407-6472" + }, + { + "affiliation": "Netherlands eScience Center", + "name": "Verhoeven, Stefan", + "orcid": "0000-0002-5821-2060" + }, + { + "affiliation": "Netherlands eScience Center", + "name": "Vreede, Barbara", + "orcid": "0000-0002-5023-4601" + }, + { + "affiliation": "Netherlands eScience Center", + "name": "Smeets, Stef", + "orcid": "0000-0002-5413-9038" + }, + { + "affiliation": "Netherlands eScience Center", + "name": "Soares Siqueira, Abel", + "orcid": "0000-0003-4451-281X" + }, + { + "affiliation": "DLR, Germany", + "name": "Kazeroni, Rémi", + "orcid": "0000-0001-7205-9528" + }, + { + "affiliation": "NASA, USA", + "name": "Potter, Jerry" + }, + { + "affiliation": "DLR, Germany", + "name": "Winterstein, Franziska", + "orcid": "0000-0002-2406-4936" + }, + { + "affiliation": "ACCESS-NRI, Australia", + "name": "Beucher, Romain", + "orcid": "0000-0003-3891-5444" + }, + { + "affiliation": "DLR, Germany", + "name": "Kraft, Jeremy" + }, + { + "affiliation": "University of Bremen, Germany", + "name": "Ruhe, Lukas", + "orcid": "0000-0001-6349-9118" + }, + { + "affiliation": "DLR, Germany", + "name": "Bonnet, Pauline", + "orcid": "0000-0003-3780-0784" + }, + { + "affiliation": "MetOffice, UK", + "name": "Munday, Gregory", + "orcid": "0000-0003-4750-9923" + } + ], + "description": "ESMValTool: A community diagnostic and performance metrics tool for routine evaluation of Earth system models in CMIP.", + "license": { + "id": "Apache-2.0" + }, + "publication_date": "2024-07-04", + "title": "ESMValTool", + "version": "v2.11.0", + "communities": [ + { + "identifier": "is-enes3" + }, + { + "identifier": "dlr_de" + }, + { + "identifier": "ecfunded" + }, + { + "identifier": "nlesc" + } + ], + "grants": [ + { + "id": "10.13039/501100000780::282672" + }, + { + "id": "10.13039/501100000780::641727" + }, + { + "id": "10.13039/501100000780::641816" + }, + { + "id": "10.13039/501100000780::727862" + }, + { + "id": "10.13039/501100000780::776613" + }, + { + "id": "10.13039/501100000780::824084" + } + ] +} diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 0000000000..1934c36ef1 --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,413 @@ +# YAML 1.2 +--- +abstract: "ESMValTool: A community diagnostic and performance metrics tool for routine evaluation of Earth system models in CMIP." + +authors: + - + affiliation: "NLeSC, Netherlands" + family-names: Andela + given-names: Bouwe + orcid: "https://orcid.org/0000-0001-9005-8940" + - + affiliation: "DLR, Germany" + family-names: Broetz + given-names: Bjoern + - + affiliation: "PML, UK" + name-particle: de + family-names: Mora + given-names: Lee + orcid: "https://orcid.org/0000-0002-5080-3149" + - + affiliation: "NLeSC, Netherlands" + family-names: Drost + given-names: Niels + orcid: "https://orcid.org/0000-0001-9795-7981" + - + affiliation: "DLR, Germany" + family-names: Eyring + given-names: Veronika + orcid: "https://orcid.org/0000-0002-6887-4885" + - + affiliation: "AWI, Germany" + family-names: Koldunov + given-names: Nikolay + orcid: "https://orcid.org/0000-0002-3365-8146" + - + affiliation: "DLR, Germany" + family-names: Lauer + given-names: Axel + orcid: "https://orcid.org/0000-0002-9270-1044" + - + affiliation: "LMU, Germany" + family-names: Mueller + given-names: Benjamin + - + affiliation: "URead, UK" + family-names: Predoi + given-names: Valeriu + orcid: "https://orcid.org/0000-0002-9729-6578" + - + affiliation: "DLR, Germany" + family-names: Righi + given-names: Mattia + orcid: "https://orcid.org/0000-0003-3827-5950" + - + affiliation: "DLR, Germany" + family-names: Schlund + given-names: Manuel + orcid: "https://orcid.org/0000-0001-5251-0158" + - + affiliation: "BSC, Spain" + family-names: Vegas-Regidor + given-names: Javier + orcid: "https://orcid.org/0000-0003-0096-4291" + - + affiliation: "SMHI, Sweden" + family-names: Zimmermann + given-names: Klaus + - + affiliation: "University of Bremen, Germany" + family-names: Adeniyi + given-names: Kemisola + - + affiliation: "ISAC-CNR, Italy" + family-names: Arnone + given-names: Enrico + orcid: "https://orcid.org/0000-0001-6740-5051" + - + affiliation: "BSC, Spain" + family-names: Bellprat + given-names: Omar + orcid: "https://orcid.org/0000-0001-6434-1793" + - + affiliation: "SMHI, Sweden" + family-names: Berg + given-names: Peter + orcid: "https://orcid.org/0000-0002-1469-2568" + - + affiliation: "DLR, Germany" + family-names: Bock + given-names: Lisa + orcid: "https://orcid.org/0000-0001-7058-5938" + - + affiliation: "MetOffice, UK" + family-names: Bodas-Salcedo + given-names: Alejandro + orcid: "https://orcid.org/0000-0002-7890-2536" + - + affiliation: "BSC, Spain" + family-names: Caron + given-names: Louis-Philippe + orcid: "https://orcid.org/0000-0001-5221-0147" + - + affiliation: "MPI for Biogeochemistry, Germany" + family-names: Carvalhais + given-names: Nuno + - + affiliation: "ENEA, Italy" + family-names: Cionni + given-names: Irene + orcid: "https://orcid.org/0000-0002-0591-9193" + - + affiliation: "BSC, Spain" + family-names: Cortesi + given-names: Nicola + orcid: "https://orcid.org/0000-0002-1442-9225" + - + affiliation: "ISAC-CNR, Italy" + family-names: Corti + given-names: Susanna + - + affiliation: "ETH Zurich, Switzerland" + family-names: Crezee + given-names: Bas + orcid: "https://orcid.org/0000-0002-1774-1126" + - + affiliation: "ETH Zurich, Switzerland" + family-names: Davin + given-names: Edouard Leopold + orcid: "https://orcid.org/0000-0003-3322-9330" + - + affiliation: "ISAC-CNR, Italy" + family-names: Davini + given-names: Paolo + orcid: "https://orcid.org/0000-0003-3389-7849" + - + affiliation: "NCAR, USA" + family-names: Deser + given-names: Clara + - + affiliation: "NLeSC, Netherlands" + family-names: Diblen + given-names: Faruk + - + affiliation: "UCLouvain, Belgium" + family-names: Docquier + given-names: David + - + affiliation: "MetOffice, UK" + family-names: Dreyer + given-names: Laura + - + affiliation: "DKRZ, Germany" + family-names: Ehbrecht + given-names: Carsten + - + affiliation: "MetOffice, UK" + family-names: Earnshaw + given-names: Paul + - + affiliation: "University of Bremen, Germany" + family-names: Gier + given-names: Bettina + - + affiliation: "BSC, Spain" + family-names: Gonzalez-Reviriego + given-names: Nube + orcid: "https://orcid.org/0000-0002-5919-6701" + - + affiliation: "University of Arizona, USA" + family-names: Goodman + given-names: Paul + - + affiliation: "HZG, Germany" + family-names: Hagemann + given-names: Stefan + orcid: "https://orcid.org/0000-0001-5444-2945" + - + affiliation: "University of Canterbury, New Zealand" + family-names: Hardacre + given-names: Catherine + orcid: "https://orcid.org/0000-0001-9093-4656" + - + affiliation: "ISAC-CNR, Italy" + name-particle: von + family-names: Hardenberg + given-names: Jost + orcid: "https://orcid.org/0000-0002-5312-8070" + - + affiliation: "DLR, Germany" + family-names: Hassler + given-names: Birgit + orcid: "https://orcid.org/0000-0003-2724-709X" + - + affiliation: "DLR, Germany" + family-names: Heuer + given-names: Helge + orcid: "https://orcid.org/0000-0003-2411-7150" + - + affiliation: "BSC, Spain" + family-names: Hunter + given-names: Alasdair + orcid: "https://orcid.org/0000-0001-8365-3709" + - + affiliation: "FUB, Germany" + family-names: Kadow + given-names: Christopher + - + affiliation: "DKRZ, Germany" + family-names: Kindermann + given-names: Stephan + orcid: "https://orcid.org/0000-0001-9335-1093" + - + affiliation: "MPI for Biogeochemistry, Germany" + family-names: Koirala + given-names: Sujan + - + affiliation: "DLR, Germany" + family-names: Kuehbacher + given-names: Birgit + - + affiliation: "BSC, Spain" + family-names: Lledó + given-names: Llorenç + - + affiliation: "ETH Zurich, Switzerland" + family-names: Lejeune + given-names: Quentin + - + affiliation: "University of Hamburg, German" + family-names: Lembo + given-names: Valerio + orcid: "https://orcid.org/0000-0001-6085-5914" + - + affiliation: "MetOffice, UK" + family-names: Little + given-names: Bill + - + affiliation: "BSC, Spain" + family-names: Loosveldt-Tomas + given-names: Saskia + - + affiliation: "ETH Zurich, Switzerland" + family-names: Lorenz + given-names: Ruth + orcid: "https://orcid.org/0000-0002-3986-1268" + - + affiliation: "CMCC, Italy" + family-names: Lovato + given-names: Tomas + orcid: "https://orcid.org/0000-0002-5188-6767" + - + affiliation: "University of Hamburg, German" + family-names: Lucarini + given-names: Valerio + - + affiliation: "UCLouvain, Belgium" + family-names: Massonnet + given-names: François + - + affiliation: "NIBIO, Norway" + family-names: Mohr + given-names: Christian Wilhelm + orcid: "https://orcid.org/0000-0003-2656-1802" + - + affiliation: "University of Arizona, USA" + family-names: Amarjiit + given-names: Pandde + - + affiliation: "BSC, Spain" + family-names: Pérez-Zanón + given-names: Núria + - + affiliation: "NCAR, USA" + family-names: Phillips + given-names: Adam + orcid: "https://orcid.org/0000-0003-4859-8585" + - + affiliation: "ACCESS-NRI, Australia" + family-names: Proft + given-names: Max + orcid: "https://orcid.org/0009-0003-1611-9516" + - + affiliation: "University of Arizona, USA" + family-names: Russell + given-names: Joellen + - + affiliation: "CICERO, Norway" + family-names: Sandstad + given-names: Marit + - + affiliation: "MetOffice, UK" + family-names: Sellar + given-names: Alistair + - + affiliation: "DLR, Germany" + family-names: Senftleben + given-names: Daniel + - + affiliation: "ISMAR-CNR, Italy" + family-names: Serva + given-names: Federico + orcid: "https://orcid.org/0000-0002-7118-0817" + - + affiliation: "CICERO, Norway" + family-names: Sillmann + given-names: Jana + - + affiliation: "MPI-M, Germany" + family-names: Stacke + given-names: Tobias + orcid: "https://orcid.org/0000-0003-4637-5337" + - + affiliation: "URead, UK" + family-names: Swaminathan + given-names: Ranjini + orcid: "https://orcid.org/0000-0001-5853-2673" + - + affiliation: "BSC, Spain" + family-names: Torralba + given-names: Verónica + - + affiliation: "University of Bremen, Germany" + family-names: Weigel + given-names: Katja + orcid: "https://orcid.org/0000-0001-6133-7801" + - + affiliation: "DLR, Germany" + family-names: Sarauer + given-names: Ellen + - + affiliation: "University of Reading, UK" + family-names: Roberts + given-names: Charles + orcid: "https://orcid.org/0000-0002-1147-8961" + - + affiliation: "Netherlands eScience Center" + family-names: Kalverla + given-names: Peter + orcid: "https://orcid.org/0000-0002-5025-7862" + - + affiliation: "Netherlands eScience Center" + family-names: Alidoost + given-names: Sarah + orcid: "https://orcid.org/0000-0001-8407-6472" + - + affiliation: "Netherlands eScience Center" + family-names: Verhoeven + given-names: Stefan + orcid: "https://orcid.org/0000-0002-5821-2060" + - + affiliation: "Netherlands eScience Center" + family-names: Vreede + given-names: Barbara + orcid: "https://orcid.org/0000-0002-5023-4601" + - + affiliation: "Netherlands eScience Center" + family-names: Smeets + given-names: Stef + orcid: "https://orcid.org/0000-0002-5413-9038" + - + affiliation: "Netherlands eScience Center" + family-names: Soares Siqueira + given-names: Abel + orcid: "https://orcid.org/0000-0003-4451-281X" + - + affiliation: "DLR, Germany" + family-names: Kazeroni + given-names: Rémi + orcid: "https://orcid.org/0000-0001-7205-9528" + - + affiliation: "NASA, USA" + family-names: Potter + given-names: Jerry + - + affiliation: "DLR, Germany" + family-names: Winterstein + given-names: Franziska + orcid: "https://orcid.org/0000-0002-2406-4936" + - + affiliation: "ACCESS-NRI, Australia" + family-names: Beucher + given-names: Romain + orcid: "https://orcid.org/0000-0003-3891-5444" + - + affiliation: "DLR, Germany" + family-names: Kraft + given-names: Jeremy + - + affiliation: "University of Bremen, Germany" + family-names: Ruhe + given-names: Lukas + orcid: "https://orcid.org/0000-0001-6349-9118" + - + affiliation: "DLR, Germany" + family-names: Bonnet + given-names: Pauline + orcid: "https://orcid.org/0000-0003-3780-0784" + - + affiliation: "MetOffice, UK" + family-names: Munday + given-names: Gregory + orcid: "https://orcid.org/0000-0003-4750-9923" + +cff-version: 1.2.0 +date-released: 2024-07-04 +doi: "10.5281/zenodo.3401363" +license: "Apache-2.0" +message: "If you use this software, please cite it using these metadata." +repository-code: "https://github.com/ESMValGroup/ESMValTool/" +title: ESMValTool +version: "v2.11.0" +... diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 170e9131c0..76efeb2eca 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -55,7 +55,8 @@ further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported by contacting the project team at veronika.eyring@dlr.de. All +reported by contacting the project team at birgit.hassler@dlr.de or +alistair.sellar@metoffice.gov.uk. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 18db8020be..9cc637dc6c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,95 +1,3 @@ # Contributions are very welcome -If you would like to contribute a new diagnostic and recipe or a new feature, please discuss your idea with the development team before getting started, to avoid double work and/or disappointment later. A good way to do this is to open an [issue on GitHub](https://github.com/ESMValGroup/ESMValTool/issues). This is also a good way to get help. - -If you have a bug to report, please do so using the [issues tab on the ESMValTool github repository](https://github.com/ESMValGroup/ESMValTool/issues). - -To get started developing, follow the instructions below. More detailed instructions can be found in the [manual](https://esmvaltool.readthedocs.io) under Developer's Guide. - -## Getting started -To install in development mode, follow these instructions. - - Install gcc, g++ and gfortran if these are not available on your system. On Debian based systems, this can be done by running `apt install build-essential gfortran`, on managed systems you can often use the `module avail` command to see what compilers are available (note on gcc version: gcc 7.3.0 works well; gcc 8.2.0 is reported to have issues installing the R packages; example of loading gcc7 on the CEDA Jasmin cluster: `module load contrib/gnu/gcc/7.3.0`, similar module loading functionality should be present at your local cluster as well). - - [Download and install conda](https://conda.io/projects/conda/en/latest/user-guide/install/linux.html) (this should be done even if the system in use already has a preinstalled version of conda, as problems have been reported with NCL when using such a version) - - To make the `conda` command availble, add `source /etc/profile.d/conda.sh` to your `.bashrc` file and restart your shell. If using (t)csh shell, add `source /etc/profile.d/conda.csh` to your `.cshrc`/`.tcshrc` file instead. - - Update conda: `conda update -y conda` - - Create a conda environment: `conda create -y -n esmvaltool python=3` - - Activate the esmvaltool environment: `conda activate esmvaltool` - - Clone the ESMValTool public github repository: `git clone git@github.com:ESMValGroup/ESMValTool.git`, or one of the private github repositories (e.g. `git clone git@github.com:ESMValGroup/ESMValTool-private.git`) - - Go to the esmvaltool directory: `cd ESMValTool` - - Update the esmvaltool conda environment `conda env update` - - Install in development mode: `pip install -e '.[develop]'`. If you are installing behind a proxy that does not trust the usual pip-urls you can declare them with the option `--trusted-host`, e.g. `pip install --trusted-host=pypi.python.org --trusted-host=pypi.org --trusted-host=files.pythonhosted.org -e .[develop]` - - If you want to use R diagnostics, run `Rscript esmvaltool/install/R/setup.R` to install the R dependences. - - If you want to use Julia diagnostics, run `julia esmvaltool/install/Julia/setup.jl` to install the Julia dependences. - - Test that your installation was succesful by running `esmvaltool -h`. - - If you log into a cluster or other device via `ssh` and your origin machine sends the `locale` environment via the `ssh` connection, make sure the environment is set correctly, specifically `LANG` and `LC_ALL` are set correctly (for GB English UTF-8 encoding these variables must be set to `en_GB.UTF-8`; you can set them by adding `export LANG=en_GB.UTF-8` and `export LC_ALL=en_GB.UTF-8` in your origin or login machines' `.profile`) - -## Running tests -Go to the directory where the repository is cloned and run `python setup.py test --installation`. Tests will also be run automatically by [CircleCI](https://circleci.com/gh/ESMValGroup/ESMValTool). - -## Code style -To increase the readability and maintainability or the ESMValTool source code, we aim to adhere to best practices and coding standards. All pull requests are reviewed and tested by one or more members of the core development team. For code in all languages, it is highly recommended that you split your code up in functions that are short enough to view without scrolling. - -### Python -The standard document on best practices for Python code is [PEP8](https://www.python.org/dev/peps/pep-0008/) and there is [PEP257](https://www.python.org/dev/peps/pep-0257/) for documentation. We make use of [numpy style docstrings](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_numpy.html) to document Python functions that are visible on [readthedocs](https://esmvaltool.readthedocs.io). - -Most formatting issues in Python code can be fixed automatically by running the commands -``` -isort some_file.py -``` -to sort the imports in the standard way and -``` -yapf -i some_file.py -``` -to add/remove whitespace as required by the standard. - -To check if your code adheres to the standard, go to the directory where the repository is cloned, e.g. `cd ESMValTool`. -and run -``` -prospector esmvaltool/diag_scripts/your_diagnostic/your_script.py -``` -Run -``` -python setup.py lint -``` -to see the warnings about the code style of the entire project. - -We use `pycodestyle` on CircleCI to automatically check that there are no formatting mistakes and Codacy for monitoring (Python) code quality. Running prospector locally will give you quicker and sometimes more accurate results. - -### NCL -Because there is no standard best practices document for NCL, we use [PEP8](https://www.python.org/dev/peps/pep-0008/) for NCL code as well, with some minor adjustments to accomodate for differences in the languages. The most important difference is that for NCL code the indentation should be 2 spaces instead of 4. - -### R -A document on best practices for R is [Hadley Wickham's R Style Guide](http://r-pkgs.had.co.nz/style.html). We partially check adherence to this style guide by using [lintr](https://cran.r-project.org/web/packages/lintr/index.html) on CircleCI. In the future we would also like to make use of [goodpractice](https://cran.r-project.org/web/packages/goodpractice/index.html) to assess the quality of R code. - -### YAML -Please use `yamllint` to check that your YAML files do not contain mistakes. - -## Documentation - -### What should be documented - -Any code documentation that is visible on [readthedocs](https://esmvaltool.readthedocs.io) should be well written and adhere to the standards for documentation for the respective language. Recipes should have a page in the *Recipes* section on readthedocs. This is also the place to document recipe options for the diagnostic scripts used in those recipes. Note that there is no need to write extensive documentation for functions that are not visible on readthedocs. However, adding a one line docstring describing what a function does is always a good idea. - -### How to build the documentation locally -Go to the directory where the repository is cloned and run -``` -python setup.py build_sphinx -Ea -``` -Make sure that your newly added documentation builds without warnings or errors. - -## Pull requests and code review -New development should preferably be done in a new git branch in the main ESMValTool github repository. However, for scientists requiring confidentiality, private repositories are available. It is recommended that you open a pull request early, as this will cause CircleCI to run the unit tests and Codacy to analyse your code. It's also easier to get help from other developers if your code is visible in a pull request. - -You can view the results of the automatic checks below your pull request. If one of the tests shows a red cross instead of a green approval sign, please click the link and try to solve the issue. Note that this kind of automated checks make it easier to review code, but they are not flawless, so occasionally Codacy will report false positives. - -### Diagnostic script contributions -A pull request with diagnostic code should preferably not introduce new Codacy issues. However, we understand that there is a limit to how much time can be spend on polishing code, so up to 10 new (non-trivial) issues is still an acceptable amount. - -Never make changes to the esmvaltool core, e.g. a new preprocessor function, in diagnostic script pull requests. If you need to make this kind of change, create a separate pull request for it in the public repository. - -### Contributing to the core of ESMValTool -Contributions to the core of ESMValTool should - - Go into the public repository. - - Preferably be covered by unit tests. Unit tests are mandatory for new preprocessor functions or modifications to existing functions. If you do not know how to start with writing unit tests, let us know in a comment on the pull request and a core development team member will try to help you get started. - - Be accompanied by appropriate documentation. - - Introduce no new issues on Codacy (but note that style issues reported in unit test code are not worth the effort of fixing). +Please read our [contribution guidelines](https://docs.esmvaltool.org/en/latest/community/index.html). diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000000..378e60c3d2 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,4 @@ +exclude .* +prune .* +prune doc +include .zenodo.json diff --git a/NOTICE b/NOTICE index 5fa313af38..bb3324b08b 100644 --- a/NOTICE +++ b/NOTICE @@ -14,12 +14,14 @@ Copyright 2008- Deutsches Zentrum für Luft- und Raumfahrt e.V. (DLR) and partne ========================================== (1) ESMValTool VERSION 2.0 CORE DEVELOPMENT TEAM ========================================== -Copyright 2008- Deutsches Zentrum für Luft- und Raumfahrt e.V. (DLR), Germany - ESMValTool Principal Investigator (PI) +Copyright 2008- Deutsches Zentrum für Luft- und Raumfahrt e.V. (DLR), Germany - ESMValTool Co-PI +Copyright 2020- Met Office, UK - ESMValTool Co-PI Copyright 2017- Alfred-Wegener-Institute (AWI), Germany Copyright 2017- Barcelona Supercomputing Center (BSC), Spain Copyright 2016- Ludwig Maximilian University, Germany -Copyright 2017- Netherlands e-Science Center (NLeSC), Netherlands +Copyright 2017- Netherlands e-Science Center (NLeSC), The Netherlands Copyright 2019- Plymouth Marine Laboratory, UK +Copyright 2011- Swedish Meteorological and Hydrological Institute (SMHI), Sweden Copyright 2017- University of Reading, UK ========================================== @@ -41,7 +43,7 @@ recipe_autoassess_radiation_rms_Amon_obs.yml recipe_autoassess_radiation_rms_cfMon_all.yml recipe_autoassess_stratosphere.yml Copyright 2017- University of Reading, UK -Copyright 2017- MetOffice, UK +Copyright 2017- Met Office, UK recipe_my_personal_diagnostic.yml recipe_validation.yml @@ -100,10 +102,15 @@ Unless required by applicable law or agreed to in writing, software distributed ========================================== -Users who apply the Software resulting in presentations or papers are kindly asked to cite the following “Software Documentation Paper” alongside with the Software doi (doi:10.17874/ac8548f0315) and version number: -Eyring et al., ESMValTool (v1.0) – a community diagnostic and performance metrics tool for routine evaluation of Earth System Models in CMIP, Geosci. Model Dev., 2016. +Users who apply the Software resulting in presentations or papers are kindly asked to cite the following “Software Documentation Papers” alongside with the Software doi (ESMValCore doi:10.5281/zenodo.3952695, +ESMValTool doi:10.5281/zenodo.3970975) and version number: -Besides the above citation, users are kindly asked to register any journal articles (or other scientific documents) that use the Software at ESMValTool webpage (see http://www.esmvaltool.org/). +- Righi et al., Earth System Model Evaluation Tool (ESMValTool) v2.0 - technical overview, Geosci. Model Dev., 2020. +- Eyring et al., Earth System Model Evaluation Tool (ESMValTool) v2.0 - an extended set of large-scale diagnostics for quasi-operational and comprehensive evaluation of Earth system models in CMIP, Geosci. Model Dev., 2020. +- Lauer et al., Earth System Model Evaluation Tool (ESMValTool) v2.0 - diagnostics for emergent constraints and future projections from Earth system models in CMIP, Geosci. Model Dev., 2020. +- Weigel et al., Earth System Model Evaluation Tool (ESMValTool) v2.0 - diagnostics for extreme events, regional and impact evaluation and analysis of Earth system models in CMIP, Geosci. Model Dev. Discuss., in review, 2020. + +Besides the above citation, users are kindly asked to register any journal articles (or other scientific documents) that use the Software at ESMValTool webpage (see https://www.esmvaltool.org/). Citing the Software Documentation Paper and registering your paper(s) will serve to document the scientific impact of the Software, which is of vital importance for securing future funding. You should consider this an obligation if you have taken advantage of the Software, which represents the end product of considerable effort by the development team. ========================================== @@ -112,7 +119,7 @@ In addition to using the Software, we encourage the community to join the Softwa ========================================== -To join the ESMValTool Development Team, please contact Prof. Veronika Eyring (veronika.eyring@dlr.de) and Dr. Axel Lauer (axel.lauer@dlr.de). +To join the ESMValTool Development Team, please contact Dr. Birgit Hassler (birgit.hassler@dlr.de) and Dr. Axel Lauer (axel.lauer@dlr.de). ========================================== diff --git a/README.md b/README.md index c37fd0e6ad..4ac7d694ee 100644 --- a/README.md +++ b/README.md @@ -1,52 +1,63 @@ -# ESMValTool -[![Documentation Status](https://readthedocs.org/projects/esmvaltool/badge/?version=version2_development)](https://esmvaltool.readthedocs.io/en/version2_development/?badge=version2_development) -[![DOIBadge](https://img.shields.io/badge/DOI-10.17874%2Fac8548f0315-blue.svg)](https://doi.org/10.17874/ac8548f0315) -[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/ESMValGroup?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -[![CircleCI](https://circleci.com/gh/ESMValGroup/ESMValTool.svg?style=svg)](https://circleci.com/gh/ESMValGroup/ESMValTool) -[![Codacy Coverage Badge](https://api.codacy.com/project/badge/Coverage/79bf6932c2e844eea15d0fb1ed7e415c)](https://www.codacy.com/app/ESMValGroup/ESMValTool?utm_source=github.com&utm_medium=referral&utm_content=ESMValGroup/ESMValTool&utm_campaign=Badge_Coverage) -[![Codacy Badge](https://api.codacy.com/project/badge/Grade/79bf6932c2e844eea15d0fb1ed7e415c)](https://www.codacy.com/app/ESMValGroup/ESMValTool?utm_source=github.com&utm_medium=referral&utm_content=ESMValGroup/ESMValTool&utm_campaign=Badge_Grade) -[![Docker Build Status](https://img.shields.io/docker/build/esmvalgroup/esmvaltool.svg)](https://hub.docker.com/r/esmvalgroup/esmvaltool/) -[![Anaconda-Server Badge](https://anaconda.org/esmvalgroup/esmvaltool/badges/installer/conda.svg)](https://conda.anaconda.org/esmvalgroup) - - -ESMValTool: A community diagnostic and performance metrics tool for routine evaluation of Earth system models in CMIP +[![Maintenance](https://img.shields.io/badge/Maintained%3F-yes-green.svg)](https://GitHub.com/Naereen/StrapDown.js/graphs/commit-activity) +[![made-with-python](https://img.shields.io/badge/Made%20with-Python-1f425f.svg)](https://www.python.org/) +[![Documentation Status](https://readthedocs.org/projects/esmvaltool/badge/?version=latest)](https://esmvaltool.readthedocs.io/en/latest/?badge=latest) +[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3401363.svg)](https://doi.org/10.5281/zenodo.3401363) +[![Chat on Matrix](https://matrix.to/img/matrix-badge.svg)](https://matrix.to/#/#ESMValGroup_Lobby:gitter.im) +[![CircleCI](https://circleci.com/gh/ESMValGroup/ESMValTool/tree/main.svg?style=svg)](https://circleci.com/gh/ESMValGroup/ESMValTool/tree/main) +[![Test in Full Development Mode](https://github.com/ESMValGroup/ESMValTool/actions/workflows/test-development.yml/badge.svg)](https://github.com/ESMValGroup/ESMValTool/actions/workflows/test-development.yml) +[![Codacy Badge](https://app.codacy.com/project/badge/Grade/79bf6932c2e844eea15d0fb1ed7e415c)](https://app.codacy.com/gh/ESMValGroup/ESMValTool/dashboard?utm_source=gh&utm_medium=referral&utm_content=&utm_campaign=Badge_grade) +[![Docker Build Status](https://img.shields.io/docker/automated/esmvalgroup/esmvaltool)](https://hub.docker.com/r/esmvalgroup/esmvaltool/) +[![Anaconda-Server Badge](https://img.shields.io/conda/vn/conda-forge/ESMValTool?color=blue&label=conda-forge&logo=conda-forge&logoColor=white)](https://anaconda.org/conda-forge/esmvaltool) +![stand with Ukraine](https://badgen.net/badge/stand%20with/UKRAINE/?color=0057B8&labelColor=FFD700) + +![esmvaltoollogo](https://raw.githubusercontent.com/ESMValGroup/ESMValTool/main/doc/sphinx/source/figures/ESMValTool-logo-2-glow.png) + +- [**Documentation**](https://docs.esmvaltool.org/en/latest/) +- [**ESMValTool Website**](https://www.esmvaltool.org/) +- [**ESMValTool Tutorial**](https://tutorial.esmvaltool.org/index.html) +- [**ESMValGroup Project on GitHub**](https://github.com/ESMValGroup) +- [**Gallery**](https://docs.esmvaltool.org/en/latest/gallery.html) +- [**`conda-forge` package feedstock**](https://github.com/conda-forge/esmvaltool-suite-feedstock) + +# Introduction + +ESMValTool is a community-developed climate model diagnostics and evaluation software package, driven +both by computational performance and scientific accuracy and reproducibility. ESMValTool is open to both +users and developers, encouraging open exchange of diagnostic source code and evaluation results from the +Coupled Model Intercomparison Project [CMIP](https://www.wcrp-climate.org/wgcm-cmip) ensemble. For a +comprehensive introduction to ESMValTool please visit our +[documentation](https://docs.esmvaltool.org/en/latest/introduction.html) page. + +# Running esmvaltool + +Diagnostics from ESMValTool are run using [recipe](https://docs.esmvaltool.org/en/latest/recipes/index.html) +files that contain pointers to the requested data types, directives for the preprocessing steps that data +will be subject to, and directives for the actual diagnostics that will be run with the now preprocessed data. +Data preprocessing is done via the [ESMValCore](https://docs.esmvaltool.org/projects/ESMValCore/en/latest/quickstart/index.html) package, a pure Python, highly-optimized scientific library, developed by the ESMValTool core developers, +and that performs a number of common analysis tasks +such as regridding, masking, levels extraction etc. [Diagnostics](https://docs.esmvaltool.org/en/latest/develop/diagnostic.html) are written in a variety of programming languages (Python, NCL, R, Julia) and are developed by the wider +scientific community, and included after a scientific and technical review process. + +# Input data + +ESMValTool can run with the following types of [data as input](https://docs.esmvaltool.org/en/latest/input.html): + +- CMIP6 +- CMIP5 +- CMIP3 +- [observational and re-analysis datasets](https://docs.esmvaltool.org/en/latest/input.html#supported-datasets-for-which-a-cmorizer-script-is-available) +- obs4MIPs +- ana4mips +- CORDEX ([work in progress](https://docs.esmvaltool.org/en/latest/input.html#cordex-note)) # Getting started -This is the development branch for version 2 of ESMValTool. ESMValTool version 2 is under rapid development, an installation from source is recommended at the moment. - -## Installing from source [recommended] -Please see [CONTRIBUTING.md](https://github.com/ESMValGroup/ESMValTool/blob/version2_development/CONTRIBUTING.md) for instructions on installing ESMValTool from source. - -## Installing from Anaconda -The Anaconda package can be found on [ESMValGroup Anaconda Channel.](https://anaconda.org/ESMValGroup) - -First install gcc, g++ and gfortran if these are not available on your system. On Debian based systems, this can be done by -running `apt install build-essential gfortran`. In order to run -Julia diagnostics, you will also need to [install Julia](https://julialang.org/downloads/) version 1 or greater. - -If you already installed Anaconda, you can install ESMValTool by running: -``` -conda install -c esmvalgroup esmvaltool -c conda-forge -c birdhouse -``` - -## Using Docker -ESMValTool Docker images are automatically generated by Docker Hub. Run the command below to pull the most recent ESMValTool image. -``` -docker pull esmvalgroup/esmvaltool -``` -Example use: -``` -[sudo] docker run -ti esmvalgroup/esmvaltool -``` - -## Running ESMValTool -- Review `config-user.yml`. To customize for your system, create a copy, edit and use the command line option `-c` to instruct `esmvaltool` to use your custom configuration. -- Available recipes are located in the directory `esmvaltool/recipes`. -- Run e.g. `esmvaltool -c ~/config-user.yml examples/recipe_python.yml + +Please see [getting started](https://docs.esmvaltool.org/en/latest/quickstart/index.html) on our instance of Read the Docs as well as [ESMValTool tutorial](https://tutorial.esmvaltool.org/index.html). The tutorial is a set of lessons that together teach skills needed to work with ESMValTool in climate-related domains. ## Getting help -The easiest way to get help if you cannot find the answer in the documentation on [readthedocs](https://esmvaltool.readthedocs.io), is to open an [issue on GitHub](https://github.com/ESMValGroup/ESMValTool/issues). + +The easiest way to get help, if you cannot find the answer in the documentation in our [docs](https://docs.esmvaltool.org), is to open an [issue on GitHub](https://github.com/ESMValGroup/ESMValTool/issues). ## Contributing -If you would like to contribute a new diagnostic or feature, please have a look at [CONTRIBUTING.md](https://github.com/ESMValGroup/ESMValTool/blob/version2_development/CONTRIBUTING.md). +If you would like to contribute a new diagnostic or feature, please have a look at our [contribution guidelines](https://docs.esmvaltool.org/en/latest/community/index.html). diff --git a/RELEASE_NOTES b/RELEASE_NOTES deleted file mode 100644 index 4ddd688a88..0000000000 --- a/RELEASE_NOTES +++ /dev/null @@ -1,86 +0,0 @@ -############################################################################### -####### ESMValTool - Earth System Model Evaluation Tool - RELEASE NOTES ####### -############################################################################### - -=============================================================================== -Version 2.0aX - June 2018 -=============================================================================== -Completely revised version of ESMValTool. This are alpha releases, intended -to be used solely as a preview of the upcoming version 2 release. - -=============================================================================== -Version 1.1.0 - January 2017 -=============================================================================== -GENERAL -- new OBS class definition (includes time range specification _YYY1M1-YYY2M2) -- updated ESMValTool user's and developer's guide -- updated ESMValTool Software License - -DIAGNOSTIC AND PLOT ROUTINES -- added sea-ice, xco2 and soil moisture to perfmetrics -- added optional uncertainty estimates to Taylor diagrams (recipe_lauer17rse.yml) -- new ESA CCI diagnostics for SST, soil moisture, land cover, xco2 - (recipe_lauer17rse.yml) -- added IPCC ch.9 figs. 9.28 and 9.30 (recipe_flato13ipcc.yml) -- new variables including error estimates for extisting variables (Stderr) -- added summary plot (various campaigns) for aerosol vertical profiles - -OBSERVATIONS -- new observations: ESACCI-OZONE, ESACCI-SIC, ESACCI-CLOUD, ESACCI-GHG, - ESACCI-SOILMOISTURE, ESACCI-SST, ESACCI-LANDCOVER, PATMOS, CLARA-A2, Asmi11 - -MINOR CHANGES AND BUG FIXES -- adjustments of aerosol, cloud, sea ice, ozone, perfmetrics diagnostics -- perfmetrics: stippling of significant values in differences plots - (zonal means + lat-lon plots) instead of masking out non-significant values - in gray -- remove inappropriate conversion (C-->K) from recognized_units.dat - (only multiplicative factors are allowed) -- added depth info for mrsos in reformat -- added diagnostic for calculating multi-model means (recipe_mmm.yml) - -=============================================================================== -Version 1.0.1 - June 2016 -=============================================================================== -GENERAL -- paths to workdir, climodir, plotdir, model and observational data can now be - set in a single configuration file (config_private.yml) and included in the - main recipes using the @{} syntax (e.g., @{MODELPATH}/CMIP5/MPI-ESM-LR/...) -- reformat scripts for the observations can now be defined in a main recipe - (recipe_reformat_obs.yml) and passed to main.py with the -r option: - python main.py -r nml/recipe_reformat_obs.yml - -DIAGNOSTIC AND PLOT ROUTINES -- new diagnostics for precipitation over soil moisture anomalies in - recipe_sm_pr.yml -- enabled detrending in nml/cfg_GO/spco2compmapDiag.ncl -- added od870aer, abs550aer, od550lt1aer, toz to recipe_perfmetrics_CMIP5 -- added display name wrapper to all(?) model entries for the Tropical - Variability diagnostics -- added option to alternative panelling of Tropical variability scatter plots -- added more options for plotting in recipe_aerosol_EMAC.yml - -OBSERVATIONS -- added reformat_obs_ACCESS-2 and reformat_obs_HadCRUT4 -- extended reformat_obs_MODIS to include od550aer -- extended reformat_obs_ESACCI-AEROSOL to include od870aer, abs550aer and - od550lt1aer - -MINOR CHANGES AND BUG FIXES -- cleaned up config files for recipe_perfmetrics_CMIP5 -- added fix file for CMIP5_CNRM-CM5 -- added dpi option to recipe for Python based diagnostics -- added routines to use external file for model display names in Python -- added functionality to switch display names in Python plots -- small generalization and bugfix in logging routine in esmval_lib -- added functionality for mapping model name onto display model names for - figure headings -- added explanatory comment how to run the pybot easytest recipe test cases -- added recipe switch to control PNG figure resolution from recipe -- generalized handling of styles with Python diagnostics - - -=============================================================================== -Version 1.0 - December 2015 -=============================================================================== -FIRST RELEASE! diff --git a/conda-linux-64.lock b/conda-linux-64.lock new file mode 100644 index 0000000000..a3ad9b680c --- /dev/null +++ b/conda-linux-64.lock @@ -0,0 +1,693 @@ +# Generated by conda-lock. +# platform: linux-64 +# input_hash: fafc256cb40a5d6ebcbc180cb08e91d1bd9ca77a04c258188faad5c05c60f1b9 +@EXPLICIT +https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 +https://conda.anaconda.org/conda-forge/linux-64/_py-xgboost-mutex-2.0-gpu_0.tar.bz2#7702188077361f43a4d61e64c694f850 +https://conda.anaconda.org/conda-forge/noarch/_r-mutex-1.0.1-anacondar_1.tar.bz2#19f9db5f4f1b7f5ef5f6d67207f25f38 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda#c27d1c142233b5bc9ca570c6e2e0c244 +https://conda.anaconda.org/conda-forge/noarch/cuda-version-11.8-h70ddcb2_3.conda#670f0e1593b8c1d84f57ad5fe5256799 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 +https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-3.10.0-he073ed8_18.conda#ad8527bf134a90e1c9ed35fa0b64318c +https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.5-ha770c72_0.conda#2889e6b9c666c3a564ab90cedc5832fd +https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda#d8d7293c5b37f39b2ac32940621c6592 +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda#0424ae29b104430108f5218a66db7260 +https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda#8ac3367aafb1cc0a068483c580af8015 +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_2.conda#048b02e3962f066da18efe3a21b77672 +https://conda.anaconda.org/conda-forge/noarch/libgcc-devel_linux-64-14.2.0-h41c2201_101.conda#fb126e22f5350c15fec6ddbd062f4871 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.2.0-h77fa898_1.conda#cc3573974587f12dda90d96e3e55a702 +https://conda.anaconda.org/conda-forge/noarch/libstdcxx-devel_linux-64-14.2.0-h41c2201_101.conda#60b9a16fd147f7184b5a964aa08f3b0f +https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.17-h4a8ded7_18.conda#0ea96f90a10838f58412aa84fdd9df09 +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d +https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.43-h4bf12b8_2.conda#cf0c5521ac2a20dfa6c662a4009eeef6 +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab +https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda#3cb76c3f10d3bc7f1105b2fc9db984df +https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.28-hb9d3cd8_0.conda#1b53af320b24547ce0fb8196d2604542 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.3-heb4867d_0.conda#09a6c610d002e54e18353c06ef61a253 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda#41b599ed2b02abcfdd84302bff174b23 +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda#db833e03127376d461e1e13e76f09b6c +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda#e39480b9ca41323497b05492a63bc35b +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda#9822b874ea29af082e5d36098d25427d +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 +https://conda.anaconda.org/conda-forge/linux-64/make-4.4.1-hb9d3cd8_2.conda#33405d2a66b1411db9f7242c8b97c9e7 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.2-hb9d3cd8_0.conda#4d638782050ab6faa27275bed57e9b4e +https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e +https://conda.anaconda.org/conda-forge/linux-64/tzcode-2024b-hb9d3cd8_0.conda#db124840386e1f842f93372897d1b857 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hb9d3cd8_1.conda#19608a9656912805b2b9a2f6bd257b04 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hb9d3cd8_1.conda#77cbc488235ebbaab2b6e912d3934bae +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda#8035c64cb77ed555e3f150b7b3972480 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-hb9d3cd8_1004.conda#bc4cd53a083b6720d61a1519a1900878 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2024.1-hb9d3cd8_1.conda#7c21106b851ec72c037b162c216d8f05 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.4-hfd43aa1_1.conda#f301eb944d297fc879c441fffe461d8a +https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.19-h756ea98_1.conda#5e08c385a1b8a79b52012b74653bbb99 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h756ea98_3.conda#bfe6623096906d2502c78ccdbfc3bc7a +https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.18-h756ea98_11.conda#eadcc12bedac44f13223a2909c0e5bcc +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda#62ee74e96c5ebb0af99386de58cf9553 +https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda#418c6ca5929a611cbd69204907a83995 +https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.4-h5888daf_0.conda#1d6afef758879ef5ee78127eb4cd2c4a +https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 +https://conda.anaconda.org/conda-forge/linux-64/gettext-tools-0.22.5-he02047a_3.conda#fcd2016d1d299f654f81021e27496818 +https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-h5888daf_1005.conda#d411fc29e338efb48c5fd4576d71d881 +https://conda.anaconda.org/conda-forge/linux-64/ghostscript-10.04.0-h5888daf_0.conda#3b8d7a2df810ad5109a51472b23dbd8e +https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda#3bf7b9fd5a7136126e0234db4b87c8b6 +https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf +https://conda.anaconda.org/conda-forge/linux-64/json-c-0.17-h1220068_1.conda#f8f0f0c4338bad5c34a4e9e11460481d +https://conda.anaconda.org/conda-forge/linux-64/jxrlib-1.1-hd590300_3.conda#5aeabe88534ea4169d4c49998f293d6c +https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda#9566f0bd264fbd463002e759b8a82401 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda#06f70867945ea6a84d35836af780f1de +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.21-h4bc722e_0.conda#36ce76665bf67f5aac36be7a0d21b7f3 +https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d +https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 +https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-0.22.5-he02047a_3.conda#efab66b82ec976930b96d62a976de8e7 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda#f1fd30127802683586f768875127a987 +https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 +https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.4-h7f98852_1002.tar.bz2#e728e874159b042d92b90238a3cb0dc2 +https://conda.anaconda.org/conda-forge/linux-64/libopenlibm4-0.8.1-hd590300_1.conda#e6af610e01d04927a5060c95ce4e0875 +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda#f4cc49d7aa68316213e4b12be35308d1 +https://conda.anaconda.org/conda-forge/linux-64/libsanitizer-14.2.0-h2a3dede_1.conda#160623b9425f5c04941586da43bd1a9c +https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda#a587892d3c13b6621a6091be690dbca2 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.0-hadc24fc_1.conda#b6f02b52a174e612e89548f4663ce56a +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 +https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 +https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.8.0-h166bdaf_0.tar.bz2#ede4266dc02e875fe1ea77b25dd43747 +https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda#b26e8aa824079e1be0294e7152ca4559 +https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 +https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc +https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda#ec7398d21e2651e0dcb0044d03b9a339 +https://conda.anaconda.org/conda-forge/linux-64/metis-5.1.0-hd0bcaf9_1007.conda#28eb714416de4eb83e2cbc47e99a1b45 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe +https://conda.anaconda.org/conda-forge/linux-64/nspr-4.36-h5888daf_0.conda#de9cd5bca9e4918527b9b72b6e2e1409 +https://conda.anaconda.org/conda-forge/linux-64/pkg-config-0.29.2-h4bc722e_1009.conda#1bee70681f504ea424fb07cdb090c001 +https://conda.anaconda.org/conda-forge/linux-64/rav1e-0.6.6-he8a937b_2.conda#77d9955b4abddb811cb8ab1aa7d743e4 +https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.5-h3931f03_0.conda#334dba9982ab9f5d62033c61698a8683 +https://conda.anaconda.org/conda-forge/linux-64/sed-4.8-he412f7d_0.tar.bz2#7362f0042e95681f5d371c46c83ebd08 +https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.3.0-h5888daf_0.conda#355898d24394b2af353eb96358db9fdd +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc +https://conda.anaconda.org/conda-forge/linux-64/xorg-imake-1.0.10-h5888daf_0.conda#040f0ca9f518151897759ad09ea98b2d +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 +https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 +https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae +https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.1-h5888daf_2.conda#e0409515c467b87176b070bff5d9442e +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 +https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.2.2-h5888daf_0.conda#135fd3c66bccad3d2254f50f9809e86a +https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda#346722a0be40f6edc53f12640d301338 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-h2af50b2_12.conda#700f1883f5a0a28c30fd98c43d4d946f +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda#c63b5e52939e795ba8d26e35d767a843 +https://conda.anaconda.org/conda-forge/linux-64/bwidget-1.9.14-ha770c72_1.tar.bz2#5746d6202ba2abad4a4707f2a2462795 +https://conda.anaconda.org/conda-forge/linux-64/charls-2.4.2-h59595ed_0.conda#4336bd67920dd504cd8c6761d6a99645 +https://conda.anaconda.org/conda-forge/linux-64/fmt-11.0.2-h434a139_0.conda#995f7e13598497691c1dc476d889bc04 +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb +https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-14.2.0-h6b349bd_1.conda#0551d01d65027359bf011c049f9c6401 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.2-he02047a_1.conda#aab9195bc018b82dc77a84584b36cce9 +https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda#ff862eebdfeb2fd048ae9dc92510baca +https://conda.anaconda.org/conda-forge/linux-64/gmp-6.3.0-hac33072_2.conda#c94a5994ef49749880a8139cf9afcbe1 +https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h59595ed_1003.conda#f87c7b7c2cb45f323ffbce941c78ab7c +https://conda.anaconda.org/conda-forge/linux-64/gtest-1.14.0-h434a139_2.conda#89971b339bb4dfbf3759f1f2528d81b1 +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 +https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda#8b189310083baabfb622af68fd9d3ae3 +https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f +https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240116.2-cxx17_he02047a_1.conda#c48fc56ec03229f294176923c3265c05 +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda#5e97e271911b8b2001a8b71860c32faa +https://conda.anaconda.org/conda-forge/linux-64/libasprintf-0.22.5-he8f35ee_3.conda#4fab9799da9571266d05ca5503330655 +https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2#c965a5aa0d5c1c37ffc62dff36e28400 +https://conda.anaconda.org/conda-forge/linux-64/libde265-1.0.15-h00ab1b0_0.conda#407fee7a5d7ab2dca12c9ca7f62310ad +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 +https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-devel-0.22.5-he02047a_3.conda#9aba7960731e6b4547b3a52f812ed801 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.2.0-h69a702a_1.conda#0a7f4cd238267c88e5d69f7826a407eb +https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.1.0-h00ab1b0_0.conda#88928158ccfe797eac29ef5e03f7d23d +https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-hcd5def8_4.conda#73301c133ded2bf71906aa2104edae8b +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda#62857b389e42b36b686331bec0922050 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-ilp64-0.3.28-pthreads_h3e26593_1.conda#9d5c316d93ee4c5effd9afda8e8af823 +https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.20.0-h0e7cc3e_1.conda#d0ed81c4591775b70384f4cc78e05cd1 +https://conda.anaconda.org/conda-forge/linux-64/libunwind-1.6.2-h9c3ff4c_0.tar.bz2#a730b2badd586580c5752cc73842e068 +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba +https://conda.anaconda.org/conda-forge/linux-64/libzopfli-1.0.3-h9c3ff4c_0.tar.bz2#c66fe2d123249af7651ebde8984c51c2 +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 +https://conda.anaconda.org/conda-forge/linux-64/mbedtls-3.5.1-h59595ed_0.conda#a7b444a6e008b804b35521895e3440e2 +https://conda.anaconda.org/conda-forge/linux-64/nccl-2.23.4.1-h03a54cd_2.conda#a08604ac3f9c3dbd128bb24e089dee5f +https://conda.anaconda.org/conda-forge/linux-64/nss-3.106-hdf54f9c_0.conda#efe735c7dc47dddbb14b3433d11c6feb +https://conda.anaconda.org/conda-forge/linux-64/openlibm-0.8.1-hd590300_1.conda#6eba22eb06d69e53d0ca01eef42bc675 +https://conda.anaconda.org/conda-forge/linux-64/p7zip-16.02-h9c3ff4c_1001.tar.bz2#941066943c0cac69d5aa52189451aa5f +https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 +https://conda.anaconda.org/conda-forge/linux-64/perl-5.32.1-7_hd590300_perl5.conda#f2cfec9406850991f4e3d960cc9e3321 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda#71004cbf7924e19c02746ccde9fd7123 +https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 +https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 +https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-ha2e4443_0.conda#6b7dcc7349efd123d493d2dbe85a045f +https://conda.anaconda.org/conda-forge/linux-64/tktable-2.10-h8bc8fbc_6.conda#dff3627fec2c0584ded391205295abf0 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 +https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda#d71d3a66528853c0a1ac2c02d79a0284 +https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2#e7f6ed84d4623d52ee581325c1587a6b +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-he73a12e_1.conda#05a8ea5f446de33006171a7afe6ae857 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_0.conda#0b666058a179b744a622d0a4a0c56353 +https://conda.anaconda.org/conda-forge/linux-64/xorg-makedepend-1.0.9-h59595ed_0.conda#71c756cfcc6649ed7614eb07712bfce0 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h235a6dd_1.conda#c05358e3a231195f7f0b3f592078bb0c +https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.9-h5e77a74_0.conda#d7714013c40363f45850a25113e2cb05 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f +https://conda.anaconda.org/conda-forge/linux-64/c-blosc2-2.15.1-hc57e6cf_0.conda#5f84961d86d0ef78851cb34f9d5e31fe +https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hf1063bd_110.conda#ee3e687b78b778db7b304e5b00a4dca6 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee +https://conda.anaconda.org/conda-forge/linux-64/gfortran_impl_linux-64-14.2.0-hc73f493_1.conda#131a59b3bb1dbbfc63ec0f21eb0e8c65 +https://conda.anaconda.org/conda-forge/linux-64/gxx_impl_linux-64-14.2.0-h2c03514_1.conda#41664acd4c99ef4d192e12950ff68ca6 +https://conda.anaconda.org/conda-forge/linux-64/hdfeos2-2.20-h3e53b52_1004.conda#c21dc684e0e8efa507aba61a030f65e7 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 +https://conda.anaconda.org/conda-forge/linux-64/libasprintf-devel-0.22.5-he8f35ee_3.conda#1091193789bb830127ed067a9e01ac57 +https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.1.1-h1909e37_2.conda#21e468ed3786ebcb2124b123aa2484b7 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-25_linux64_openblas.conda#8ea26d42ca88ec5258802715fe1ee10b +https://conda.anaconda.org/conda-forge/linux-64/libgit2-1.8.4-hd24f944_0.conda#94887b4deb460378a34e1533beaacfd5 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 +https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.0-hdb8da77_2.conda#9c4554fafc94db681543804037e65de2 +https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda#e8c7620cc49de0c6a2349b6dd6e39beb +https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.25.3-hd5b35b9_1.conda#06def97690ef90781a91b786cb48a0a9 +https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.09.01-h5a48ba9_2.conda#41c69fba59d495e8cf5ffda48a607e35 +https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-hc670b87_16.conda#3d9f3a2e5d7213c34997e4464d2f938c +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-h6565414_0.conda#80eaf80d84668fa5620ac9ec1b4bf56f +https://conda.anaconda.org/conda-forge/linux-64/libxgboost-2.1.2-cuda118_h09a87be_0.conda#d59c3f95f80071f24ebce434494ead0a +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.4-hb346dea_2.conda#69b90b70c434b916abf5a1d5ee5d55fb +https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h401b404_0.conda#4474532a312b2245c5c77f1176989b46 +https://conda.anaconda.org/conda-forge/linux-64/mpfr-4.2.1-h90cbb55_3.conda#2eeb50cab6652538eee8fc0bc3340c81 +https://conda.anaconda.org/conda-forge/linux-64/openblas-ilp64-0.3.28-pthreads_h3d04fff_1.conda#fdaa89df7b34f5c904f8f1348e5a62a5 +https://conda.anaconda.org/conda-forge/linux-64/python-3.12.7-hc5c86c4_0_cpython.conda#0515111a9cdf69f83278f7c197db9807 +https://conda.anaconda.org/conda-forge/linux-64/s2geometry-0.10.0-h8413349_4.conda#d19f88cf8812836e6a4a2a7902ed0e77 +https://conda.anaconda.org/conda-forge/linux-64/spdlog-1.14.1-hed91bc2_1.conda#909188c8979846bac8e586908cf1ca6a +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_1.conda#53abf1ef70b9ae213b22caa5350f97a9 +https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h988505b_2.conda#9dda9667feba914e0e80b95b82f7402b +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda#4bdb303603e9821baf5fe5fdff1dc8f8 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hb9d3cd8_1.conda#a7a49a8b85122b49214798321e2e96b4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.0-hb9d3cd8_2.conda#d8602724ac0d276c380b97e9eb0f814b +https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda#ae5f4ad87126c55ba3f690ef07f81d64 +https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.3-pyhd8ed1ab_0.conda#ec763b0a58960558ca0ad7255a51a237 +https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_0.conda#7d78a232029458d0077ede6cda30ed0c +https://conda.anaconda.org/conda-forge/noarch/asciitree-0.3.3-py_2.tar.bz2#c0481c9de49f040272556e2cedf42816 +https://conda.anaconda.org/conda-forge/linux-64/astroid-3.3.5-py312h7900ff3_0.conda#e1ed4d572a4a16b97368ab00fd646487 +https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 +https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda#6732fa52eb8e66e5afeb32db8701a791 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.30-hec5e740_0.conda#bc1b9f70ea7fa533aefa6a8b6fbe8da7 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.5-h0009854_0.conda#d393d0a6c9b993771fbc67a998fccf6c +https://conda.anaconda.org/conda-forge/linux-64/backports.zoneinfo-0.2.1-py312h7900ff3_9.conda#6df4f61b215587c40ec93810734778ca +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda#b0b867af6fc74b2a0aa206da29c0f3cf +https://conda.anaconda.org/conda-forge/linux-64/brunsli-0.1-h9c3ff4c_0.tar.bz2#c1ac6229d0bfd14f8354ff9ad2a26cad +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-hebfffa5_3.conda#fceaedf1cdbcb02df9699a0d9b005292 +https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda#12f7d00853807b0531775e9be891cb11 +https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda#a374efa97290b8799046df7c5ca17164 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda#c88ca2bb7099167912e3b26463fff079 +https://conda.anaconda.org/conda-forge/noarch/codespell-2.3.0-pyhd8ed1ab_0.conda#6e67fa19bedafa7eb7d6ea91de53e03d +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 +https://conda.anaconda.org/conda-forge/noarch/config-0.5.1-pyhd8ed1ab_0.tar.bz2#97275d4898af65967b1ad57923cef770 +https://conda.anaconda.org/conda-forge/noarch/configargparse-1.7-pyhd8ed1ab_0.conda#0d07dc29b1c1cc973f76b74beb44915f +https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 +https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.27-h54b06d7_7.conda#dce22f70b4e5a407ce88f2be046f4ceb +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.11-py312h8fd2918_3.conda#21e433caf1bb1e4c95832f8bb731d64c +https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2#961b3a227b437d82ad7054484cfa71b2 +https://conda.anaconda.org/conda-forge/noarch/dill-0.3.9-pyhd8ed1ab_0.conda#27faec84454995f6774786c7e5833cd6 +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda#fe521c1608280cc2803ebd26dc252212 +https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_0.conda#e8cd5d629f65bdf0f3bb312cde14659e +https://conda.anaconda.org/conda-forge/noarch/dodgy-0.2.1-py_0.tar.bz2#62a69d073f7446c90f417b0787122f5b +https://conda.anaconda.org/conda-forge/noarch/ecmwf-api-client-1.6.3-pyhd8ed1ab_0.tar.bz2#15621abf59053e184114d3e1d4f9d01e +https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2#3cf04868fee0a029769bd41f4b2fbf2d +https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-2.0.0-pyhd8ed1ab_0.conda#cdcdbe90dfab4075fc1f3c4cf2e4b4e5 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda#d02ae936e42063ca46af6cdad2dbd1e0 +https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda#15dda3cdbf330abfe9f555d22f66db46 +https://conda.anaconda.org/conda-forge/noarch/fasteners-0.17.3-pyhd8ed1ab_0.tar.bz2#348e27e78a5e39090031448c72f66d5e +https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda#916f8ec5dd4128cd5f207a3c4c07b2c6 +https://conda.anaconda.org/conda-forge/noarch/findlibs-0.0.5-pyhd8ed1ab_0.conda#8f325f63020af6f7acbe2c4cb4c920db +https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h743c826_0.conda#12e6988845706b2cfbc3bc35c9a61a95 +https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py312h66e93f0_0.conda#f98e36c96b2c66d9043187179ddb04f4 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhff2d567_0.conda#816dbc4679a64e4417cd1385d661bb31 +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f +https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2#6b1f32359fc5d2ab7b491d0029bfffeb +https://conda.anaconda.org/conda-forge/linux-64/gettext-0.22.5-he02047a_3.conda#c7f243bbaea97cd6ea1edd693270100e +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe +https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2#914d6646c4dbb1fd3ff539830a12fd71 +https://conda.anaconda.org/conda-forge/noarch/humanfriendly-10.0-pyhd81877a_7.conda#74fbff91ca7c1b9a36b15903f2242f86 +https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2#9f765cbfab6870c8435b9eefecd7a1f4 +https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda#7ba2ede0e7c795ff95088daf0dc59753 +https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 +https://conda.anaconda.org/conda-forge/noarch/isodate-0.7.2-pyhd8ed1ab_0.conda#d68d25aca67d1a06bf6f5b43aea9430d +https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda#ff7ca04134ee8dde1d7cf491a78ef7c7 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py312h68727a3_0.conda#444266743652a4f1538145e9362f6d3b +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 +https://conda.anaconda.org/conda-forge/noarch/legacy-cgi-2.6.1-pyh5b84bb0_3.conda#f258b7f54b5d9ddd02441f10c4dca2ac +https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.4-hfca40fe_0.conda#32ddb97f897740641d8d46a829ce1704 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-25_linux64_openblas.conda#5dbd1b0fc0d01ec5e0e1fbe667281a11 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.9.1-hdb1bdb2_0.conda#7da1d242ca3591e174a3c7d82230d3c0 +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 +https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.0-ha6d2627_1004.conda#df069bea331c8486ac21814969301c1f +https://conda.anaconda.org/conda-forge/linux-64/libheif-1.18.2-gpl_hffcb242_100.conda#76ac2c07b62d45c192940f010eea11fa +https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_he43201b_1000.conda#36247217c4e1018085bd9db41eb3526a +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-25_linux64_openblas.conda#4dc03a53fc69371a6158d0ed37214cd3 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.4.0-h2c329e2_0.conda#80030debaa84cfc31755d53742df3ca6 +https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.39-h76b75d6_0.conda#e71f31f8cfb0a91439f2086fc8aa0461 +https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.43.0-py312h374181b_1.conda#ed6ead7e9ab9469629c6cfb363b5c6e2 +https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 +https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py312hb3f7f12_1.conda#b99d90ef4e77acdab74828f79705a919 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_0.conda#a755704ea0e2503f8c227d84829a8e81 +https://conda.anaconda.org/conda-forge/noarch/mccabe-0.7.0-pyhd8ed1ab_0.tar.bz2#34fc335fc50eef0b5ea708f2b5f54e0c +https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda#5cbee699846772cc939bef23a0d524ed +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py312h68727a3_0.conda#5c9b020a3f86799cdc6115e55df06146 +https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py312h178313f_1.conda#e397d9b841c37fc3180b73275ce7e990 +https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 +https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda#4eccaeba205f0aed9ac3a9ea58568ca3 +https://conda.anaconda.org/conda-forge/noarch/natsort-8.4.0-pyhd8ed1ab_0.conda#70959cd1db3cf77b2a27a0836cfd08a7 +https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.2-pyhd8ed1ab_1.conda#1d4c088869f206413c59acdd309908b7 +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda#7f2e286780f072ed750df46dc2631138 +https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.2-h669347b_0.conda#1e6c10f7d749a490612404efeb179eb8 +https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda#cbe1bb1f21567018ce595d9c2be0f0db +https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2#457c2c8c08e54905d6954e79cb5b5db9 +https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda#17064acba08d3686f1135b5ec1b32b12 +https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda#405678b942f2481cecdb3e010f4925d9 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda#fd8f2b18b65bbf62e8f653100690c8d2 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda#d3483c8fc2dc2cc3f5cf43e26d60cabf +https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.0-py312h66e93f0_2.conda#2c6c0c68f310bc33972e7c83264d7786 +https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.0-py312h66e93f0_0.conda#0524eb91d3d78d76d671c6e3cd7cee82 +https://conda.anaconda.org/conda-forge/noarch/pycodestyle-2.12.1-pyhd8ed1ab_0.conda#72453e39709f38d0494d096bb5f678b7 +https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda#844d9eb3b43095b031874477f7d70088 +https://conda.anaconda.org/conda-forge/noarch/pyflakes-3.2.0-pyhd8ed1ab_0.conda#0cf7fef6aa123df28adb21a590065e3d +https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda#b7f5c092b8f9800150d998a71b76d5a1 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda#035c17fbf099f50ff60bf2eb303b0a83 +https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 +https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 +https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda#b98d2018c01ce9980c03ee2850690fab +https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda#986287f89929b2d629bd6ef6497dc307 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py312h66e93f0_1.conda#39aed2afe4d0cf76ab3d6b09eecdbea7 +https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda#260009d03c9d5c0f111904d851f053dc +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h66e93f0_1.conda#549e5930e768548a89c23f595dac5a95 +https://conda.anaconda.org/conda-forge/linux-64/re2-2023.09.01-h7f4b329_2.conda#8f70e36268dea8eb666ef14c29bd3cda +https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.21.0-py312h12e396e_0.conda#37f4ad7cb4214c799f32e5f411c6c69f +https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml.clib-0.2.8-py312h66e93f0_1.conda#532c3e5d0280be4fea52396ec1fa7d5d +https://conda.anaconda.org/conda-forge/noarch/semver-3.0.2-pyhd8ed1ab_0.conda#5efb3fccda53974aed800b6d575f72ed +https://conda.anaconda.org/conda-forge/noarch/setoptconf-tmp-0.3.1-pyhd8ed1ab_0.tar.bz2#af3e36d4effb85b9b9f93cd1db0963df +https://conda.anaconda.org/conda-forge/noarch/setuptools-75.3.0-pyhd8ed1ab_0.conda#2ce9825396daf72baabaade36cee16da +https://conda.anaconda.org/conda-forge/linux-64/simplejson-3.19.3-py312h66e93f0_1.conda#c8d1a609d5f3358d715c2273011d9f4d +https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 +https://conda.anaconda.org/conda-forge/noarch/smmap-5.0.0-pyhd8ed1ab_0.tar.bz2#62f26a3d1387acee31322208f0cfa3e0 +https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 +https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda#04eedddeb68ad39871c8127dd1c21f4f +https://conda.anaconda.org/conda-forge/noarch/tenacity-9.0.0-pyhd8ed1ab_0.conda#42af51ad3b654ece73572628ad2882ae +https://conda.anaconda.org/conda-forge/noarch/termcolor-2.5.0-pyhd8ed1ab_0.conda#29a5d22565b850099cd9959862d1b154 +https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda#df68d78237980a159bd7149f33c0e8fd +https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda#e977934e00b355ff55ed154904044727 +https://conda.anaconda.org/conda-forge/noarch/tomlkit-0.13.2-pyha770c72_0.conda#0062a5f3347733f67b0f33ca48cc21dd +https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda#34feccdd4177f2d3d53c73fc44fd9a37 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py312h66e93f0_1.conda#af648b62462794649066366af4ecd5b0 +https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda#3df84416a021220d8b5700c613af2dc5 +https://conda.anaconda.org/conda-forge/noarch/trove-classifiers-2024.10.21.16-pyhd8ed1ab_0.conda#501f6d3288160a31d99a2f1321e77393 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda#ebe6952715e1d5eb567eeebf25250fa7 +https://conda.anaconda.org/conda-forge/linux-64/ujson-5.10.0-py312h2ec8cdc_1.conda#96226f62dddc63226472b7477d783967 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py312h66e93f0_1.conda#588486a61153f94c7c13816f7069e440 +https://conda.anaconda.org/conda-forge/noarch/untokenize-0.1.1-pyhd8ed1ab_1.conda#6042b782b893029aa40335782584a092 +https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda#daf5160ff9cde3a468556965329085b9 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.45.0-pyhd8ed1ab_0.conda#f9751d7c71df27b2d29f5cab3378982e +https://conda.anaconda.org/conda-forge/noarch/xlsxwriter-3.2.0-pyhd8ed1ab_0.conda#a1f7264726115a2f8eac9773b1f27eba +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxmu-1.2.1-hb9d3cd8_1.conda#f35a9a2da717ade815ffa70c0e8bdfbd +https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda#156c91e778c1d4d57b709f8c5333fd06 +https://conda.anaconda.org/conda-forge/noarch/yapf-0.32.0-pyhd8ed1ab_0.tar.bz2#177cba0b4bdfacad5c5fbb0ed31504c4 +https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_6.conda#113506c8d2d558e733f5c38f6bf08c50 +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_0.conda#fee389bf8a4843bd7a2248ce11b7f188 +https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_0.conda#1bb1ef9806a9a20872434f58b3e7fc1a +https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.1-pyhd8ed1ab_0.tar.bz2#d1e1eb7e21a9e2c74279d87dafb68156 +https://conda.anaconda.org/conda-forge/linux-64/arpack-3.9.1-nompi_h77f6705_101.conda#ff39030debb47f6b53b45bada38e0903 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.5-hbaf354b_4.conda#2cefeb144de7712995d1b52cc6a3864c +https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.13.0-h935415a_0.conda#debd1677c2fea41eb2233a260f48a298 +https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_0.conda#6d4e9ecca8d88977147e109fc7053184 +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda#332493000404d8411859539a5a630865 +https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyhd8ed1ab_0.conda#461bcfab8e65c166e297222ae919a2d4 +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda#a861504bbea4161a9170b85d4d2be840 +https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.1-hf8ad068_0.conda#1b7a01fd02d11efe0eb5a676842a7b7d +https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2#4fd2c6b53934bd7d96d1f3fdaf99b79f +https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2#a29b7c141d6b2de4bb67788a5f107734 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.4-py312h178313f_0.conda#a32fbd2322865ac80c7db74c553f5306 +https://conda.anaconda.org/conda-forge/linux-64/curl-8.9.1-h18eb788_0.conda#2e7dedf73dfbfcee662e2a0f6175e4bb +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.0-py312h66e93f0_1.conda#a921e2fe122e7f38417b9b17c7a13343 +https://conda.anaconda.org/conda-forge/noarch/docformatter-1.7.5-pyhd8ed1ab_0.conda#3a941b6083e945aa87e739a9b85c82e9 +https://conda.anaconda.org/conda-forge/noarch/docrep-0.3.2-pyh44b312d_0.tar.bz2#235523955bc1bfb019d7ec8a2bb58f9a +https://conda.anaconda.org/conda-forge/noarch/fire-0.7.0-pyhd8ed1ab_0.conda#c8eefdf1e822c56a6034602e67bc92a5 +https://conda.anaconda.org/conda-forge/noarch/flake8-7.1.1-pyhd8ed1ab_0.conda#a25e5df6b26be3c2d64be307c1ef0b37 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.54.1-py312h178313f_1.conda#bbbf5fa5cab622c33907bc8d7eeea9f7 +https://conda.anaconda.org/conda-forge/linux-64/freeglut-3.2.2-ha6d2627_3.conda#84ec3f5b46f3076be49f2cf3f1cfbf02 +https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_1.conda#358c17429c97883b2cb9ab5f64bc161b +https://conda.anaconda.org/conda-forge/linux-64/git-2.46.0-pl5321hb5640b7_0.conda#825d146359bc8b85083d92259d0a0e1b +https://conda.anaconda.org/conda-forge/noarch/gitdb-4.0.11-pyhd8ed1ab_0.conda#623b19f616f2ca0c261441067e18ae40 +https://conda.anaconda.org/conda-forge/linux-64/gsl-2.7-he838d99_0.tar.bz2#fec079ba39c9cca093bf4c00001825de +https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2#b748fbf7060927a6e82df7cb5ee8f097 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda#76b32dcf243444aea9c6b804bcfa40b8 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_hdf9ad27_105.conda#7e1729554e209627636a0f6fabcdd115 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-7.2.1-pyha770c72_0.conda#b9f5330c0853ccabc39a9878c6f1a2ab +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda#c808991d29b9838fb4d96ce8267ec9ec +https://conda.anaconda.org/conda-forge/noarch/isort-5.13.2-pyhd8ed1ab_0.conda#1d25ed2b95b92b026aaa795eabec8d91 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda#7b86ecb7d3557821c649b3c31e3eb9f2 +https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda#25df261d4523d9f9783bcdb7208d872f +https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda#0a2980dada0dd7fd0998f0342308b1b1 +https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_1.conda#afcd1b53bcac8844540358e33f33d28f +https://conda.anaconda.org/conda-forge/noarch/latexcodec-2.0.1-pyh9f0ad1d_0.tar.bz2#8d67904973263afd2985ba56aa2d6bb4 +https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.62.2-h15f2491_0.conda#8dabe607748cb3d7002ad73cd06f1325 +https://conda.anaconda.org/conda-forge/noarch/logilab-common-1.7.3-py_0.tar.bz2#6eafcdf39a7eb90b6d951cfff59e8d3b +https://conda.anaconda.org/conda-forge/linux-64/lxml-5.3.0-py312he28fd5a_2.conda#3acf38086326f49afed094df4ba7c9d9 +https://conda.anaconda.org/conda-forge/noarch/nested-lookup-0.2.25-pyhd8ed1ab_1.tar.bz2#2f59daeb14581d41b1e2dda0895933b2 +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda#dfe0528d0f1c16c1f7c528ea5536ab30 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.4-py312heda63a1_0.conda#d8285bea2a350f63fab23bf460221f3f +https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.8-hedd0468_0.conda#dcd0ed5147d8876b0848a552b416ce76 +https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py312h710cb58_1.conda#69a8838436435f59d72ddcb8dfd24a28 +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 +https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py312h7b63e92_0.conda#385f46a4df6f97892503a841121a9acf +https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_0.conda#5dd546fe99b44fda83963d15f84263b7 +https://conda.anaconda.org/conda-forge/noarch/plotly-5.24.1-pyhd8ed1ab_0.conda#81bb643d6c3ab4cbeaf724e9d68d0a6a +https://conda.anaconda.org/conda-forge/linux-64/poppler-24.08.0-h47131b8_1.conda#0854b9ff0cc10a1f6f67b0f352b8e75a +https://conda.anaconda.org/conda-forge/linux-64/proj-9.4.1-h54d7996_1.conda#e479d1991c725e1a355f33c0e40dbc66 +https://conda.anaconda.org/conda-forge/noarch/pydocstyle-6.3.0-pyhd8ed1ab_0.conda#7e23a61a7fbaedfef6eb0e1ac775c8e5 +https://conda.anaconda.org/conda-forge/noarch/pyproject_hooks-1.2.0-pyh7850678_0.conda#5003da197661e40a2509e9c4651f1eea +https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda#c03d61f31f38fdb9facf70c29958bf7a +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda#2cf4264fffb9e6eff6031c5b6884d61c +https://conda.anaconda.org/conda-forge/noarch/python-utils-3.9.0-pyhff2d567_0.conda#ae8d4e318695c0d3e3464ed95cc8b385 +https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_3.conda#746ce19f0829ec3e19c93007b1a224d3 +https://conda.anaconda.org/conda-forge/noarch/rdflib-7.1.1-pyh0610db2_0.conda#325219de79481bcf5b6446d327e3d492 +https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda#0fc8b52192a8898627c3efae1003e9f6 +https://conda.anaconda.org/conda-forge/noarch/requirements-detector-1.3.1-pyhd8ed1ab_0.conda#f921ea6a1138cc7edee77de8ed12b226 +https://conda.anaconda.org/conda-forge/noarch/retrying-1.3.3-pyhd8ed1ab_3.conda#1f7482562f2082f1b2abf8a3e2a41b63 +https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml-0.18.6-py312h66e93f0_1.conda#28ed869ade5601ee374934a31c9d628e +https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 +https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda#f1acf5fdefa8300de697982bcb1761c9 +https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.0-pyhd8ed1ab_0.conda#196a9e6ab4e036ceafa516ea036619b0 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 +https://conda.anaconda.org/conda-forge/noarch/url-normalize-1.4.3-pyhd8ed1ab_0.tar.bz2#7c4076e494f0efe76705154ac9302ba6 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.1-pyhd8ed1ab_0.conda#dae21509d62aa7bf676279ced3edcb3f +https://conda.anaconda.org/conda-forge/noarch/webob-1.8.9-pyhd8ed1ab_0.conda#ff98f23ad74d2a3256debcd9df65d37d +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxpm-3.5.17-hb9d3cd8_1.conda#f35bec7fface97f67f44ca952fc740b7 +https://conda.anaconda.org/conda-forge/noarch/yamale-5.2.1-pyhca7485f_0.conda#c089f90a086b6214c5606368d0d3bad0 +https://conda.anaconda.org/conda-forge/noarch/yamllint-1.35.1-pyhd8ed1ab_0.conda#a1240b99a7ccd953879dc63111823986 +https://conda.anaconda.org/conda-forge/linux-64/yarl-1.16.0-py312h66e93f0_0.conda#c3f4a6b56026c22319bf31514662b283 +https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.10.10-py312h178313f_0.conda#d2f9e490ab2eae3e661b281346618a82 +https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.2-h6c0439f_6.conda#4e472c316d08af60faeb71f86d7563e1 +https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.8.0-hd126650_2.conda#36df3cf05459de5d0a41c77c4329634b +https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.7.0-h10ac4d7_1.conda#ab6d507ad16dbe2157920451d662e4a1 +https://conda.anaconda.org/conda-forge/noarch/cattrs-24.1.2-pyhd8ed1ab_0.conda#ac582de2324988b79870b50c89c91c75 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.conda#990033147b0a998e756eaaed6b28f48d +https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyh24bf2e0_0.tar.bz2#b73afa0d009a51cabd3ec99c4d2ef4f3 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_2.conda#ff28f374b31937c048107521c814791e +https://conda.anaconda.org/conda-forge/linux-64/cryptography-43.0.3-py312hda17c39_0.conda#2abada8c216dd6e32514535a3fa245d4 +https://conda.anaconda.org/conda-forge/noarch/eofs-1.4.1-pyhd8ed1ab_1.conda#5fc43108dee4106f23050acc7a101233 +https://conda.anaconda.org/conda-forge/noarch/flake8-polyfill-1.0.2-py_0.tar.bz2#a53db35e3d07f0af2eccd59c2a00bffe +https://conda.anaconda.org/conda-forge/noarch/funcargparse-0.2.5-pyhd8ed1ab_0.tar.bz2#e557b70d736251fa0bbb7c4497852a92 +https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.3-hf7fa9e8_2.conda#1d6bdc6b2c62c8cc90c67b50142d7b7f +https://conda.anaconda.org/conda-forge/noarch/gitpython-3.1.43-pyhd8ed1ab_0.conda#0b2154c1818111e17381b1df5b4b0176 +https://conda.anaconda.org/conda-forge/linux-64/hdfeos5-5.1.16-hf1a501a_15.conda#d2e16a32f41d67c7d280da11b2846328 +https://conda.anaconda.org/conda-forge/linux-64/imagecodecs-2024.6.1-py312h6d9a048_4.conda#a810fadedc4edc06b4282d1222467837 +https://conda.anaconda.org/conda-forge/noarch/imageio-2.36.0-pyh12aca89_1.conda#36349844ff73fcd0140ee7f30745f0bf +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.2.1-hd8ed1ab_0.conda#d6c936d009aa63e5f82d216c95cdcaee +https://conda.anaconda.org/conda-forge/linux-64/jasper-4.2.4-h536e39c_0.conda#9518ab7016cf4564778aef08b6bd8792 +https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_0.conda#720745920222587ef942acfbc578b584 +https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_0.conda#a14218cfb29662b4a19ceb04e93e298e +https://conda.anaconda.org/conda-forge/linux-64/kealib-1.5.3-hf8d3e68_2.conda#ffe68c611ae0ccfda4e7a605195e22b3 +https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_1.conda#4809b9f4c6ce106d443c3f90b8e10db2 +https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.28.0-h26d7fe4_0.conda#2c51703b4d775f8943c08a361788131b +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h135f659_114.conda#a908e463c710bd6b10a9eaa89fdf003c +https://conda.anaconda.org/conda-forge/linux-64/libpq-17.0-h04577a9_4.conda#392cae2a58fbcb9db8c2147c6d6d1620 +https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h15fa968_9.conda#4957a903bd6a68cc2e53e47476f9c6f4 +https://conda.anaconda.org/conda-forge/noarch/magics-python-1.5.8-pyhd8ed1ab_1.conda#3fd7e3db129f12362642108f23fde521 +https://conda.anaconda.org/conda-forge/linux-64/numba-0.60.0-py312h83e6fd3_0.conda#e064ca33edf91ac117236c4b5dee207a +https://conda.anaconda.org/conda-forge/linux-64/numcodecs-0.13.1-py312hf9745cd_0.conda#33c27209bfd7af6766211facd24839ce +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.4-py312hfb8ada1_0.conda#d0745ae74c2b26571b692ddde112eebb +https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h4c5309f_1.conda#7df02e445367703cd87a574046e3a6f0 +https://conda.anaconda.org/conda-forge/noarch/patsy-0.5.6-pyhd8ed1ab_0.conda#a5b55d1cb110cdcedc748b5c3e16e687 +https://conda.anaconda.org/conda-forge/noarch/progressbar2-4.5.0-pyhd8ed1ab_0.conda#6f9eb38d0a87898cf5a7c91adaccd691 +https://conda.anaconda.org/conda-forge/noarch/pybtex-0.24.0-pyhd8ed1ab_2.tar.bz2#2099b86a7399c44c0c61cdb6de6915ba +https://conda.anaconda.org/conda-forge/noarch/pylint-3.3.1-pyhd8ed1ab_0.conda#2a3426f75e2172c932131f4e3d51bcf4 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py312h9211aeb_9.conda#173afeb0d112c854fd1a9fcac4b5cce3 +https://conda.anaconda.org/conda-forge/linux-64/pys2index-0.1.5-py312hfb10629_0.conda#325cc5f0e0dc36562f3de2a4dbded572 +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_0.conda#cb8a11b6d209e3d85e5094bdbd9ebd9c +https://conda.anaconda.org/conda-forge/noarch/pytest-env-1.1.5-pyhd8ed1ab_0.conda#ecd5e850bcd3eca02143e7df030ee50f +https://conda.anaconda.org/conda-forge/noarch/pytest-metadata-3.1.1-pyhd8ed1ab_0.conda#52b91ecba854d55b28ad916a8b10da24 +https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_0.conda#4b9b5e086812283c052a9105ab1e254e +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda#b39568655c127a9c4a44d178ac99b6d0 +https://conda.anaconda.org/conda-forge/noarch/python-build-1.2.2.post1-pyhff2d567_0.conda#bd5ae3c630d5eed353badb091fd3e603 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.7.0-py312hc0a28a1_2.conda#8300d634adec4a6aed35a87e90e9cb07 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h62794b6_1.conda#b43233a9e2f62fb94affe5607ea79473 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h6cab151_1.conda#5be02e05e1adaa42826cc6800ce399bc +https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_1.conda#5abeaa41ec50d4d1421a8bc8fbc93054 +https://conda.anaconda.org/conda-forge/linux-64/suitesparse-7.8.3-hb42a789_0.conda#216922e19843f5662a2b260f905640cb +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda#f9664ee31aed96c85b7319ab0a693341 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxaw-1.0.16-hb9d3cd8_0.conda#7c0a9bf62d573409d12ad14b362a96e5 +https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda#8b7069e9792ee4e5b4919a7a306d2e67 +https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.379-h5a9005d_9.conda#5dc18b385893b7991a3bbeb135ad7c3e +https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.12.0-hd2e3451_0.conda#61f1c193452f0daa582f39634627ea33 +https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.1-pyhd8ed1ab_0.conda#e88d74bb7b9b89d4c9764286ceb94cc9 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py312hc0a28a1_0.conda#8b5b812d4c18cb37bda7a7c8d3a6acb3 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.11.0-pyhd8ed1ab_0.conda#75c96f0655908f596a57be60251b78d4 +https://conda.anaconda.org/conda-forge/linux-64/eccodes-2.38.3-h8bb6dbc_1.conda#73265d4acc551063cc5c5beab37f33c5 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.2-pyhd8ed1ab_0.conda#636950f839e065401e2031624a414f0b +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda#da304c192ad59975202859b367d0f6a2 +https://conda.anaconda.org/conda-forge/linux-64/julia-1.10.4-hf18f99d_1.conda#cc0ef9c191bab16211970a29b6787d69 +https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_1.conda#ec6f70b8a5242936567d4f886726a372 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-h353785f_1.conda#c363d0b330b4b21b4c1b10e0981d3a99 +https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.28.0-ha262f82_0.conda#9e7960f0b9ab3895ef73d92477c47dae +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-hc0ffecb_0.conda#83f045969988f5c7a65f3950b95a8b35 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312hd3ec401_2.conda#2380c9ba933ffaac9ad16d8eac8e3318 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h22f9119_106.conda#5b911bfe75855326bae6857451268e59 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.1-nompi_py312h21d6d8e_102.conda#9049ba34261ce7106220711d313fcf61 +https://conda.anaconda.org/conda-forge/noarch/pep8-naming-0.10.0-pyh9f0ad1d_0.tar.bz2#b3c5536e4f9f58a4b16adb6f1e11732d +https://conda.anaconda.org/conda-forge/linux-64/postgresql-17.0-h1122569_4.conda#028ea131f116f13bb2a4a382b5863a04 +https://conda.anaconda.org/conda-forge/noarch/pylint-plugin-utils-0.8.2-pyhd8ed1ab_0.conda#84377261c09c02182d76fbe79e69c9bf +https://conda.anaconda.org/conda-forge/noarch/pyopenssl-24.2.1-pyhd8ed1ab_2.conda#85fa2fdd26d5a38792eb57bc72463f07 +https://conda.anaconda.org/conda-forge/noarch/pytest-html-4.1.1-pyhd8ed1ab_0.conda#4d2040212307d18392a2687772b3a96d +https://conda.anaconda.org/conda-forge/linux-64/r-base-4.2.3-h32f4cee_16.conda#feee98a221344be7a447b80b410df867 +https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.2-py312h7a48858_1.conda#6b5f4c68483bd0c22bca9094dafc606b +https://conda.anaconda.org/conda-forge/noarch/seawater-3.3.5-pyhd8ed1ab_0.conda#8e1b01f05e8f97b0fcc284f957175903 +https://conda.anaconda.org/conda-forge/noarch/sparse-0.15.4-pyh267e887_1.conda#40d80cd9fa4cc759c6dba19ea96642db +https://conda.anaconda.org/conda-forge/linux-64/statsmodels-0.14.4-py312hc0a28a1_0.conda#97dc960f3d9911964d73c2cf240baea5 +https://conda.anaconda.org/conda-forge/linux-64/tempest-remap-2.2.0-h13910d2_3.conda#7f10762cd62c8ad03323c4dc3ee544b1 +https://conda.anaconda.org/conda-forge/noarch/tifffile-2024.9.20-pyhd8ed1ab_0.conda#6de55c7859ed314159eaf2b7b4f19cc7 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda#6b55867f385dd762ed99ea687af32a69 +https://conda.anaconda.org/conda-forge/noarch/xarray-2024.10.0-pyhd8ed1ab_0.conda#53e365732dfa053c4d19fc6b927392c4 +https://conda.anaconda.org/conda-forge/noarch/zarr-2.18.3-pyhd8ed1ab_0.conda#41abde21508578e02e3fd492e82a05cd +https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.11.0-h325d260_1.conda#11d926d1f4a75a1b03d1c053ca20424b +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.23.0-py312hf9745cd_2.conda#cc3ecff140731b46b970a7c4787b1823 +https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.10.0-pyhd8ed1ab_0.conda#9437cfe346eab83b011b4def99f0e879 +https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_0.conda#53df00540de0348ed1b2a62684dd912b +https://conda.anaconda.org/conda-forge/noarch/distributed-2024.11.0-pyhd8ed1ab_0.conda#497f3535cbb69cd2f02158e2e18ee0bb +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 +https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h1299960_7.conda#9cf27e3f9d97ea13f250db9253a25dc8 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-h2db6552_7.conda#524e64f1aa0ebc87230109e684f392f4 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-hc3b29a1_7.conda#56a7436a66a1a4636001ce4b621a3a33 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-hd5ecb85_7.conda#9c8431dc0b83d5fe9c12a2c0b6861a72 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.9.2-h6283f77_7.conda#c8c82df3aece4e23804d178a8a8b308a +https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.9.2-h1b2c38e_7.conda#f0f86f8cb8835bb91acb8c7fa2c350b0 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.9.2-h600f43f_7.conda#567066db0820f4983a6741e429c651d1 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-h5e77dd0_7.conda#e86b26f53ae868565e95fde5b10753d3 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-h5e77dd0_7.conda#3392965ffc4e8b7c66a532750ce0e91f +https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h03c987c_7.conda#165f12373452e8d17889e9c877431acf +https://conda.anaconda.org/conda-forge/linux-64/magics-4.15.4-h24e9adf_1.conda#9731bb0d2a3917cab718fd7c90dea857 +https://conda.anaconda.org/conda-forge/noarch/myproxyclient-2.1.1-pyhd8ed1ab_0.conda#bcdbeb2b693eba886583a907840c6421 +https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda#0b57b5368ab7fc7cdc9e3511fa867214 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda#5971cc64048943605f352f7f8612de6c +https://conda.anaconda.org/conda-forge/linux-64/psyplot-1.5.1-py312h7900ff3_1.conda#f110e71421e5c86e50232cc027c6d85c +https://conda.anaconda.org/conda-forge/noarch/py-xgboost-2.1.2-cuda118_pyh40095f8_0.conda#aa5881b02bd9555a7b06c709aa33bd20 +https://conda.anaconda.org/conda-forge/noarch/pylint-celery-0.3-py_1.tar.bz2#e29456a611a62d3f26105a2f9c68f759 +https://conda.anaconda.org/conda-forge/noarch/pylint-django-2.6.1-pyhd8ed1ab_0.conda#d1023ccf92d8235cd4808ef53e274a5e +https://conda.anaconda.org/conda-forge/noarch/pylint-flask-0.6-py_0.tar.bz2#5a9afd3d0a61b08d59eed70fab859c1b +https://conda.anaconda.org/conda-forge/linux-64/python-eccodes-2.37.0-py312hc0a28a1_0.conda#476b0357e207e10d2b7b13ed82156e6d +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py312hc0a28a1_3.conda#81bbcb20ea4a53b05a8cf51f31496038 +https://conda.anaconda.org/conda-forge/noarch/r-abind-1.4_5-r42hc72bb7e_1005.conda#f2744985b083b1bbffd4df19437cf1e8 +https://conda.anaconda.org/conda-forge/linux-64/r-backports-1.5.0-r42hb1dbf0f_0.conda#d879e1fbd80113312364a5db3682c789 +https://conda.anaconda.org/conda-forge/noarch/r-bigmemory.sri-0.1.8-r42hc72bb7e_0.conda#383f36b5a0b7dd7c467aa1b6b5fe7307 +https://conda.anaconda.org/conda-forge/linux-64/r-cli-3.6.3-r42ha18555a_0.conda#93fc8055b8aee751e201604a02d7d06f +https://conda.anaconda.org/conda-forge/noarch/r-codetools-0.2_20-r42hc72bb7e_0.conda#a9e9276ab95d053b9db56159cfeda2c9 +https://conda.anaconda.org/conda-forge/linux-64/r-colorspace-2.1_0-r42h57805ef_1.conda#68ec691b072953b496ca1a4d83b2bc3d +https://conda.anaconda.org/conda-forge/linux-64/r-contfrac-1.1_12-r42h57805ef_1004.conda#bc308888aa4b4fb4e37a7a17fdc911c9 +https://conda.anaconda.org/conda-forge/noarch/r-cpp11-0.4.7-r42hc72bb7e_0.conda#941d7bcf2b94a682419ea1fbf6789d1f +https://conda.anaconda.org/conda-forge/noarch/r-crayon-1.5.3-r42hc72bb7e_0.conda#4a74a6114bbea1ad8d488e99b83df3da +https://conda.anaconda.org/conda-forge/linux-64/r-desolve-1.40-r42hd9ac46e_0.conda#7232f8b5707fc9739cb2f8fdc5b4b64d +https://conda.anaconda.org/conda-forge/linux-64/r-digest-0.6.36-r42ha18555a_0.conda#332551d9a37018826d528cf16701bd2b +https://conda.anaconda.org/conda-forge/noarch/r-docopt-0.7.1-r42hc72bb7e_3.conda#99be998b67c40ef6eb1a5af90e307c1d +https://conda.anaconda.org/conda-forge/linux-64/r-dotcall64-1.1_1-r42h61816a4_0.conda#d83332ff8d9912151d9a4b4972fd1da0 +https://conda.anaconda.org/conda-forge/noarch/r-evaluate-0.24.0-r42hc72bb7e_0.conda#c2a50c427d0febc367122a875239e771 +https://conda.anaconda.org/conda-forge/linux-64/r-fansi-1.0.6-r42h57805ef_0.conda#c12524190662098e2e8a245a3d1bc7dc +https://conda.anaconda.org/conda-forge/linux-64/r-farver-2.1.2-r42ha18555a_0.conda#475d7bcc6de3c5851b112675eb55f497 +https://conda.anaconda.org/conda-forge/noarch/r-functional-0.6-r42ha770c72_1004.conda#9e27c34589b883accd340d651bdeaa02 +https://conda.anaconda.org/conda-forge/noarch/r-generics-0.1.3-r42hc72bb7e_2.conda#c492355d73e184353c82b62f5087a601 +https://conda.anaconda.org/conda-forge/noarch/r-geomapdata-2.0_2-r42hc72bb7e_0.conda#799a671bad7a89ac1d9da5cb98f75367 +https://conda.anaconda.org/conda-forge/linux-64/r-git2r-0.30.1-r42hf72769b_1.tar.bz2#f64adeea481006f0cb22bdcc066680df +https://conda.anaconda.org/conda-forge/linux-64/r-glue-1.7.0-r42h57805ef_0.conda#eab803a28d66337ae3732b04c5f5604f +https://conda.anaconda.org/conda-forge/linux-64/r-goftest-1.2_3-r42h57805ef_2.conda#4210e40893bbac7533714429ac4d0fe9 +https://conda.anaconda.org/conda-forge/linux-64/r-isoband-0.2.7-r42ha503ecb_2.conda#44979df954a15195470f336cd18b5eb6 +https://conda.anaconda.org/conda-forge/noarch/r-iterators-1.0.14-r42hc72bb7e_2.conda#616ab7b008326d3d76d59ba35b3fb592 +https://conda.anaconda.org/conda-forge/linux-64/r-jsonlite-1.8.8-r42h57805ef_0.conda#d0b27ba963de139270a7b53f897afdf6 +https://conda.anaconda.org/conda-forge/noarch/r-labeling-0.4.3-r42hc72bb7e_0.conda#b9b940011dd81d8b60859fcd0d9775f4 +https://conda.anaconda.org/conda-forge/linux-64/r-lattice-0.22_6-r42h57805ef_0.conda#93cee3961cc5277443a3e437f6991010 +https://conda.anaconda.org/conda-forge/linux-64/r-lazyeval-0.2.2-r42h57805ef_4.conda#109112b1c26d932414daa139a45d3a69 +https://conda.anaconda.org/conda-forge/linux-64/r-lmom-3.0-r42h61816a4_0.conda#0cffcf07f72a3be278b236e3b2f451c9 +https://conda.anaconda.org/conda-forge/noarch/r-logging-0.10_108-r42ha770c72_4.conda#d9980750f18496909aa8327037a43f8b +https://conda.anaconda.org/conda-forge/linux-64/r-magrittr-2.0.3-r42h57805ef_2.conda#ea3b13247660dd534a745a26f8d02365 +https://conda.anaconda.org/conda-forge/linux-64/r-maps-3.4.2-r42h57805ef_0.conda#a0367e4720045d5d17cb841a415ada1e +https://conda.anaconda.org/conda-forge/linux-64/r-mass-7.3_60.0.1-r42h57805ef_0.conda#0427fa6c4da6a4b2e43d8dfd022e933b +https://conda.anaconda.org/conda-forge/linux-64/r-mba-0.1_0-r42ha503ecb_1.conda#ab0ffee07ebd556b0e0119017439218a +https://conda.anaconda.org/conda-forge/noarch/r-nbclust-3.0.1-r42hc72bb7e_2.conda#fffd3a5ced3a6949fe7a20af1ff4b2c6 +https://conda.anaconda.org/conda-forge/linux-64/r-ncdf4-1.22-r42h5647f33_0.conda#d23e6cd8fe41079eb1421b6a6d1f1c67 +https://conda.anaconda.org/conda-forge/linux-64/r-pcict-0.5_4.4-r42h57805ef_1.conda#6e5770da5c174a2617096cbc2b8d96f4 +https://conda.anaconda.org/conda-forge/noarch/r-pkgconfig-2.0.3-r42hc72bb7e_3.conda#469b66f84a5d234689b423c9821b188c +https://conda.anaconda.org/conda-forge/linux-64/r-ps-1.7.6-r42h57805ef_0.conda#3a592c79e0fade3a0c3574696fa143a3 +https://conda.anaconda.org/conda-forge/noarch/r-r.methodss3-1.8.2-r42hc72bb7e_2.conda#305fe9f97f7598d9722c76d6be7bf794 +https://conda.anaconda.org/conda-forge/noarch/r-r6-2.5.1-r42hc72bb7e_2.conda#1473a12b55128f8ac776ae5595a4d0cb +https://conda.anaconda.org/conda-forge/noarch/r-rcolorbrewer-1.1_3-r42h785f33e_2.conda#b7b475c73493f70cbbb9d7213b94aed1 +https://conda.anaconda.org/conda-forge/linux-64/r-rcpp-1.0.12-r42h7df8631_0.conda#096448d673973c0e45b9d803da251971 +https://conda.anaconda.org/conda-forge/noarch/r-remotes-2.5.0-r42hc72bb7e_0.conda#c595028f27588c6ff242fcb0dab79363 +https://conda.anaconda.org/conda-forge/linux-64/r-rlang-1.1.4-r42ha18555a_0.conda#ab6364a17b32268b82c46f09695a9cc9 +https://conda.anaconda.org/conda-forge/noarch/r-rpmg-2.2_7-r42hc72bb7e_0.conda#9e34ca8c73b895781e13b1d399105f35 +https://conda.anaconda.org/conda-forge/noarch/r-rprojroot-2.0.4-r42hc72bb7e_0.conda#c2bb0aa15018f8d9a4bc7b9e459dc94f +https://conda.anaconda.org/conda-forge/noarch/r-snow-0.4_4-r42hc72bb7e_2.conda#97cc50b630391cbc89ea70425ebb6ade +https://conda.anaconda.org/conda-forge/linux-64/r-udunits2-0.13.2.1-r42h57805ef_3.conda#56d551dc25582293fed533026356a79e +https://conda.anaconda.org/conda-forge/linux-64/r-utf8-1.2.4-r42h57805ef_0.conda#1da2e3bcbf75c6ddc3466941d88ff93f +https://conda.anaconda.org/conda-forge/linux-64/r-uuid-1.2_0-r42h57805ef_0.conda#f7585e68687b274880bbd68f34c0524d +https://conda.anaconda.org/conda-forge/noarch/r-viridislite-0.4.2-r42hc72bb7e_1.conda#e7a6483f639fb958747100bd17550ed6 +https://conda.anaconda.org/conda-forge/noarch/r-withr-3.0.0-r42hc72bb7e_0.conda#972eaab581c25fff9ea6986aa6ab281a +https://conda.anaconda.org/conda-forge/linux-64/r-xfun-0.45-r42ha18555a_0.conda#9e13c392bfcee4a261e4b513d6d862e7 +https://conda.anaconda.org/conda-forge/noarch/r-xmlparsedata-1.0.5-r42hc72bb7e_2.conda#2f3614450b54f222c1eff786ec2a45ec +https://conda.anaconda.org/conda-forge/linux-64/r-yaml-2.3.8-r42h57805ef_0.conda#97f60a93ca12f4fdd5f44049dcee4345 +https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda#5ede4753180c7a550a443c430dc8ab52 +https://conda.anaconda.org/conda-forge/linux-64/scikit-image-0.24.0-py312hf9745cd_3.conda#3612f99c589d51c363c8b90c0bcf3a18 +https://conda.anaconda.org/conda-forge/noarch/seaborn-base-0.13.2-pyhd8ed1ab_2.conda#b713b116feaf98acdba93ad4d7f90ca1 +https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.0-h86fa3b2_0.conda#061175d9d4c046a1cf8bffe95a359fab +https://conda.anaconda.org/conda-forge/linux-64/cdo-2.4.1-h9fe33b1_1.conda#a326dab3d2a1a8e32c2a6f792fac3161 +https://conda.anaconda.org/conda-forge/noarch/cfgrib-0.9.14.1-pyhd8ed1ab_0.conda#1870fe8c9bd8967429e227be28ab94d2 +https://conda.anaconda.org/conda-forge/noarch/chart-studio-1.1.0-pyh9f0ad1d_0.tar.bz2#acd9a12a35e5a0221bdf39eb6e4811dc +https://conda.anaconda.org/conda-forge/noarch/dask-jobqueue-0.9.0-pyhd8ed1ab_0.conda#a201de7d36907f2355426e019168d337 +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 +https://conda.anaconda.org/conda-forge/linux-64/imagemagick-7.1.1_39-imagemagick_hcfc5581_1.conda#1144fe07cf76921ec664b868453027d3 +https://conda.anaconda.org/conda-forge/noarch/iris-3.10.0-pyha770c72_2.conda#5d8984ceb5fdf85110ca7108114ecc18 +https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h8d2e343_13_cpu.conda#dc379f362829d5df5ce6722565110029 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h1df15e4_7.conda#c693e703649051ee9db0fabd4fcd0483 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-hf2d2f32_7.conda#4015ef020928219acc0b5c9edbce8d30 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h4a3bace_2.conda#c3fac34ecba2fcf9d5d31a03b975d5a1 +https://conda.anaconda.org/conda-forge/noarch/lime-0.2.0.1-pyhd8ed1ab_1.tar.bz2#789ce01416721a5533fb74aa4361fd13 +https://conda.anaconda.org/conda-forge/noarch/multiurl-0.3.2-pyhd8ed1ab_0.conda#9b6cf42ef472b332970282ec87d2e5d4 +https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda#15b51397e0fe8ea7d7da60d83eb76ebc +https://conda.anaconda.org/conda-forge/linux-64/nco-5.2.8-hf7c1f58_0.conda#6cd18a9c6b8269b0cd101ba9cc3d02ab +https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda#8dab97d8a9616e07d779782995710aed +https://conda.anaconda.org/conda-forge/noarch/prospector-1.12.1-pyhd8ed1ab_0.conda#8621ba9cf057da26d371b87cd2264259 +https://conda.anaconda.org/conda-forge/linux-64/psy-simple-1.5.1-py312h7900ff3_0.conda#683ec8787a523de54b02c885e2c2aefa +https://conda.anaconda.org/conda-forge/linux-64/pydot-3.0.2-py312h7900ff3_0.conda#a972ba77217a2cac592c41dd3cc56dfd +https://conda.anaconda.org/conda-forge/noarch/pyroma-4.2-pyhd8ed1ab_0.conda#fe2aca9a5d4cb08105aefc451ef96950 +https://conda.anaconda.org/conda-forge/linux-64/r-bigmemory-4.6.4-r42ha503ecb_0.conda#12b6fa8fe80a6494a948c6ea2f34340d +https://conda.anaconda.org/conda-forge/linux-64/r-checkmate-2.3.1-r42h57805ef_0.conda#9febce7369c72d991e2399d7d28f3390 +https://conda.anaconda.org/conda-forge/linux-64/r-climdex.pcic-1.1_11-r42ha503ecb_2.conda#cff1d95fe315f109a1f01a7ef112fdd6 +https://conda.anaconda.org/conda-forge/noarch/r-desc-1.4.3-r42hc72bb7e_0.conda#8c535581a9a3a1e2a0f5ef6d7e4d6a7f +https://conda.anaconda.org/conda-forge/linux-64/r-ellipsis-0.3.2-r42h57805ef_2.conda#1673236a1895ca5cce15c888435ad2f9 +https://conda.anaconda.org/conda-forge/noarch/r-elliptic-1.4_0-r42hc72bb7e_4.conda#8388c500125813b91332f9d3720f3471 +https://conda.anaconda.org/conda-forge/noarch/r-foreach-1.5.2-r42hc72bb7e_2.conda#16f5453742f10816f2964a2b05bc20d3 +https://conda.anaconda.org/conda-forge/noarch/r-highr-0.11-r42hc72bb7e_0.conda#2edda9394885683f7cad76673eeb2025 +https://conda.anaconda.org/conda-forge/noarch/r-lifecycle-1.0.4-r42hc72bb7e_0.conda#7d00a412d44005a8714c192589143b81 +https://conda.anaconda.org/conda-forge/linux-64/r-mapproj-1.2.11-r42h57805ef_1.conda#af943f7f8db88fddf340980fa53ed3e5 +https://conda.anaconda.org/conda-forge/linux-64/r-matrix-1.6_5-r42h316c678_0.conda#4f52f9c56146c8692503a7d2057ba7ba +https://conda.anaconda.org/conda-forge/noarch/r-munsell-0.5.1-r42hc72bb7e_0.conda#e7036a0b18f2ee6b108fe12b908b18f3 +https://conda.anaconda.org/conda-forge/noarch/r-ncdf4.helpers-0.3_6-r42hc72bb7e_2.conda#c7180e87be344175853f1ebfcdce04d0 +https://conda.anaconda.org/conda-forge/linux-64/r-nlme-3.1_165-r42hbcb9c34_0.conda#a83fa459c53c4674ee394b80b7b2fbd5 +https://conda.anaconda.org/conda-forge/linux-64/r-plyr-1.8.9-r42ha503ecb_0.conda#9b63113ec0c6f1a69c53f77d8f8fa4dc +https://conda.anaconda.org/conda-forge/linux-64/r-processx-3.8.4-r42h57805ef_0.conda#97f10f23ade292cb369d5635b119befa +https://conda.anaconda.org/conda-forge/noarch/r-r.oo-1.26.0-r42hc72bb7e_0.conda#8d5929eebbe7d431fa3f989874b090eb +https://conda.anaconda.org/conda-forge/linux-64/r-rcpparmadillo-0.12.8.4.0-r42h58a4165_0.conda#49973fea110c814e316d8277bb08e516 +https://conda.anaconda.org/conda-forge/noarch/r-rex-1.2.1-r42hc72bb7e_2.conda#b45f1b94fd106c19eb79303b24dc9a7c +https://conda.anaconda.org/conda-forge/linux-64/r-sp-2.1_4-r42hb1dbf0f_0.conda#681bb0a7290d86f9f8bf8dc816f114c0 +https://conda.anaconda.org/conda-forge/linux-64/r-spam-2.10_0-r42h9f9f741_0.conda#159d8ab59a2777a26a739f8090b5a80c +https://conda.anaconda.org/conda-forge/linux-64/r-timechange-0.3.0-r42ha503ecb_0.conda#3d62906e9c1fecf61370a3ad6e808e5e +https://conda.anaconda.org/conda-forge/linux-64/r-xml2-1.3.6-r42hbfba7a4_1.conda#5c3d7a89a2d5e1c0885f92d1aa6fde30 +https://conda.anaconda.org/conda-forge/linux-64/r-zoo-1.8_12-r42h57805ef_1.conda#5367d265c0c9c151dea85f1ccb515ec1 +https://conda.anaconda.org/conda-forge/noarch/requests-cache-1.2.1-pyhd8ed1ab_0.conda#c6089540fed51a9a829aa19590fa925b +https://conda.anaconda.org/conda-forge/noarch/seaborn-0.13.2-hd8ed1ab_2.conda#a79d8797f62715255308d92d3a91ef2e +https://conda.anaconda.org/conda-forge/noarch/xgboost-2.1.2-cuda118_pyh256f914_0.conda#2dcf3e60ef65fd4cb95048f2491f6a89 +https://conda.anaconda.org/conda-forge/noarch/cads-api-client-1.5.2-pyhd8ed1ab_0.conda#e7005effa79f1493a51404873d6eb5a0 +https://conda.anaconda.org/conda-forge/noarch/esgf-pyclient-0.3.1-pyhd8ed1ab_4.conda#f481c17430f801e68ee3b57cc30ecd2e +https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_13_cpu.conda#b654d072b8d5da807495e49b28a0b884 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_7.conda#63779711c7afd4fcf9cea67538baa67a +https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h39682fd_13_cpu.conda#49c60a8dc089d8127b9368e9eb6c1a77 +https://conda.anaconda.org/conda-forge/noarch/mapgenerator-1.0.7-pyhd8ed1ab_0.conda#d18db96ef2a920b0ecefe30282b0aecf +https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda#e2d2abb421c13456a9a9f80272fdf543 +https://conda.anaconda.org/conda-forge/noarch/prov-2.0.0-pyhd3deb0d_0.tar.bz2#aa9b3ad140f6c0668c646f32e20ccf82 +https://conda.anaconda.org/conda-forge/linux-64/psy-maps-1.5.0-py312h7900ff3_1.conda#080bc8f34a9cb0ab81ae0369fd43b7ab +https://conda.anaconda.org/conda-forge/linux-64/psy-reg-1.5.0-py312h7900ff3_1.conda#ea719cfcc2e5b815b137b7082ece8aeb +https://conda.anaconda.org/conda-forge/noarch/py-cordex-0.8.0-pyhd8ed1ab_0.conda#fba377622e74ee0bbeb8ccae9fa593d3 +https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-17.0.0-py312h01725c0_2_cpu.conda#add603bfa43d9bf3f06783f780e1a817 +https://conda.anaconda.org/conda-forge/noarch/python-cdo-1.6.0-pyhd8ed1ab_0.conda#3fd1a0b063c1fbbe4b7bd5a5a7601e84 +https://conda.anaconda.org/conda-forge/linux-64/r-akima-0.6_3.4-r42h61816a4_2.conda#8536251313f441c4d70ff11ad976d294 +https://conda.anaconda.org/conda-forge/noarch/r-callr-3.7.6-r42hc72bb7e_0.conda#4fb1765d6dc531936db81af3f6be316a +https://conda.anaconda.org/conda-forge/noarch/r-doparallel-1.0.17-r42hc72bb7e_2.conda#1cddfbaade4416f0234670391bb31ba2 +https://conda.anaconda.org/conda-forge/noarch/r-gtable-0.3.5-r42hc72bb7e_0.conda#b5cff9c0564c9fcd8b62632430a0cee5 +https://conda.anaconda.org/conda-forge/noarch/r-hypergeo-1.2_13-r42hc72bb7e_1004.conda#7a207a992c606168044d13dcffd80ad4 +https://conda.anaconda.org/conda-forge/noarch/r-knitr-1.47-r42hc72bb7e_0.conda#0a20a2f6546bc0cde246c53a92a7964d +https://conda.anaconda.org/conda-forge/linux-64/r-lmoments-1.3_1-r42h7ce84a7_5.conda#e727f948785d9aad6426e912e135f935 +https://conda.anaconda.org/conda-forge/linux-64/r-lubridate-1.9.3-r42h57805ef_0.conda#01fd816e4231ae7cf2833e5661a92611 +https://conda.anaconda.org/conda-forge/linux-64/r-mgcv-1.9_1-r42h316c678_0.conda#5c3d738118f5948f6cc29ccb63d6e2ff +https://conda.anaconda.org/conda-forge/noarch/r-r.utils-2.12.3-r42hc72bb7e_0.conda#81f505dec8850e227d9b2a7e88fa505f +https://conda.anaconda.org/conda-forge/linux-64/r-reshape-0.8.9-r42hc72bb7e_2.conda#17e75917161bf824248cc54a412b4394 +https://conda.anaconda.org/conda-forge/noarch/r-scales-1.3.0-r42hc72bb7e_0.conda#0af4021fe6d0047bbf7a34bf21c50bdd +https://conda.anaconda.org/conda-forge/linux-64/r-specsverification-0.5_3-r42h7525677_2.tar.bz2#1521b8a303852af0496245e368d3c61c +https://conda.anaconda.org/conda-forge/linux-64/r-splancs-2.01_45-r42hbcb9c34_0.conda#bcd96dc088f54514a54d57e6b8ed51b6 +https://conda.anaconda.org/conda-forge/linux-64/r-vctrs-0.6.5-r42ha503ecb_0.conda#5689030c60302fb5bb7a48b54c11dbe8 +https://conda.anaconda.org/conda-forge/noarch/xesmf-0.8.7-pyhd8ed1ab_0.conda#42301f78a4c6d2500f891b9723160d5c +https://conda.anaconda.org/conda-forge/noarch/cdsapi-0.7.4-pyhd8ed1ab_0.conda#67a29b663023b8c0e3d8a73013ea3e23 +https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.1-py312h5aa26c2_1.conda#4a30f4277a1894928a7057d0e14c1c95 +https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_13_cpu.conda#cd2c36e8865b158b82f61c6aac28b7e1 +https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.16.4-hd8ed1ab_1.conda#37cec2cf68f4c09563d8bc833791096b +https://conda.anaconda.org/conda-forge/linux-64/ncl-6.6.2-h7cb714c_54.conda#7363202c15302898deb49e82ca3e5f58 +https://conda.anaconda.org/conda-forge/noarch/r-cyclocomp-1.1.1-r42hc72bb7e_0.conda#6bd41a85dc43541400311eca03d4e2d4 +https://conda.anaconda.org/conda-forge/noarch/r-gridextra-2.3-r42hc72bb7e_1005.conda#da116b29105a8d48571975a185e9bb94 +https://conda.anaconda.org/conda-forge/noarch/r-lmomco-2.5.1-r42hc72bb7e_0.conda#6efbdfe5d41b3ef5652be1ea2e0a6e3c +https://conda.anaconda.org/conda-forge/noarch/r-multiapply-2.1.4-r42hc72bb7e_1.conda#7aa5a8ca336904418caeb7395fd867e6 +https://conda.anaconda.org/conda-forge/noarch/r-pillar-1.9.0-r42hc72bb7e_1.conda#07d5ce8e710897745f14c951ff947cdd +https://conda.anaconda.org/conda-forge/linux-64/r-purrr-1.0.2-r42h57805ef_0.conda#7985dada48799b7814ca069794d0b1a3 +https://conda.anaconda.org/conda-forge/noarch/r-r.cache-0.16.0-r42hc72bb7e_2.conda#34daac4e8faee056f15abdee858fc721 +https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.3.11-py312hd177ed6_1.conda#246c5f31c607ecfe1ece1e8cc6ecc9c5 +https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-hf54134d_13_cpu.conda#46f41533959eee8826c09e55976b8c06 +https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.16.4-hd8ed1ab_1.conda#ab83e3b9ca2b111d8f332e9dc8b2170f +https://conda.anaconda.org/conda-forge/noarch/r-climprojdiags-0.3.3-r42hc72bb7e_0.conda#f34d40a3f0f9160fdd2bccaae8e185d1 +https://conda.anaconda.org/conda-forge/noarch/r-lintr-3.1.2-r42hc72bb7e_0.conda#ef49cc606b94a9d5f30b9c48f5f68848 +https://conda.anaconda.org/conda-forge/linux-64/r-tibble-3.2.1-r42h57805ef_2.conda#b1278a5148c9e52679bb72112770cdc3 +https://conda.anaconda.org/conda-forge/linux-64/pyarrow-17.0.0-py312h9cebb41_2.conda#5f7d505626cb057e1320bbd46dd02ef2 +https://conda.anaconda.org/conda-forge/noarch/r-ggplot2-3.5.1-r42hc72bb7e_0.conda#77cc0254e0dc92e5e7791ce20a170f74 +https://conda.anaconda.org/conda-forge/noarch/r-rematch2-2.1.2-r42hc72bb7e_3.conda#5ccfee6f3b94e6b247c7e1929b24f1cc +https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.17-pyhd8ed1ab_0.conda#4f75a3a76e9f693fc33be59485f46fcf +https://conda.anaconda.org/conda-forge/noarch/r-styler-1.10.3-r42hc72bb7e_0.conda#1b2b8fa85a9d0556773abac4763d8ef9 +https://conda.anaconda.org/conda-forge/linux-64/r-tlmoments-0.7.5.3-r42ha503ecb_1.conda#6aa1414e06dfffc39d3b5ca78b60b377 +https://conda.anaconda.org/conda-forge/noarch/r-viridis-0.6.5-r42hc72bb7e_0.conda#959f69b6dfd4b620a15489975fa27670 +https://conda.anaconda.org/conda-forge/noarch/dask-2024.11.0-pyhd8ed1ab_0.conda#9a25bf7e2a910e85209218896f2adeb9 +https://conda.anaconda.org/conda-forge/linux-64/r-fields-15.2-r42h61816a4_0.conda#d84fe2f9e893e92089370b195e2263a0 +https://conda.anaconda.org/conda-forge/noarch/r-spei-1.8.1-r42hc72bb7e_1.conda#7fe060235dac0fc0b3d387f98e79d128 +https://conda.anaconda.org/conda-forge/noarch/iris-esmf-regrid-0.11.0-pyhd8ed1ab_1.conda#86286b197e33e3b034416c18ba0f574c +https://conda.anaconda.org/conda-forge/linux-64/r-geomap-2.5_0-r42h57805ef_2.conda#020534c6abdee4f1253c221e926a5341 +https://conda.anaconda.org/conda-forge/noarch/esmvalcore-2.11.0-pyhd8ed1ab_0.conda#ae2c9a927475f5519d0164c542cde378 +https://conda.anaconda.org/conda-forge/noarch/r-s2dverification-2.10.3-r42hc72bb7e_2.conda#8079a86a913155fe2589ec0b76dc9f5e +https://conda.anaconda.org/conda-forge/noarch/autodocsumm-0.2.14-pyhd8ed1ab_0.conda#351a11ac1215eb4f6c5b82e30070277a +https://conda.anaconda.org/conda-forge/noarch/nbsphinx-0.9.5-pyhd8ed1ab_0.conda#b808b8a0494c5cca76200c73e260a060 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.0-pyhd8ed1ab_0.conda#344261b0e77f5d2faaffb4eac225eeb7 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_0.conda#9075bd8c033f0257122300db914e49c9 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_0.conda#b3bcc38c471ebb738854f52a36059b48 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_0.conda#e25640d692c02e8acfff0372f547e940 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_0.conda#d6e5ea5fe00164ac6c2dcc5d76a42192 +https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_0.conda#05706dd5a145a9c91861495cd435409a +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e diff --git a/conda_build_config.yaml b/conda_build_config.yaml deleted file mode 100644 index 8bd2e57f24..0000000000 --- a/conda_build_config.yaml +++ /dev/null @@ -1,6 +0,0 @@ -# Conda build configuration ---- - -# Python versions -python: - - 3.6 diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints.rst new file mode 100644 index 0000000000..24c5031788 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints.rst @@ -0,0 +1,34 @@ +.. _api.esmvaltool.diag_scripts.emergent_constraints: + +Emergent constraints diagnostics +================================ + +This module provides various tools to evaluate emergent constraints for +arbitrary input variables. + + +Examples +-------- + +* :ref:`recipe_ecs_scatter` +* :ref:`recipes_cox18nature` +* :ref:`recipes_schlund20esd` + + +Diagnostic scripts +------------------ +.. toctree:: + :maxdepth: 1 + + esmvaltool.diag_scripts.emergent_constraints/cox18nature + esmvaltool.diag_scripts.emergent_constraints/ecs_scatter + esmvaltool.diag_scripts.emergent_constraints/multiple_constraints + esmvaltool.diag_scripts.emergent_constraints/single_constraint + + +Auxiliary scripts +----------------- +.. toctree:: + :maxdepth: 1 + + esmvaltool.diag_scripts.emergent_constraints/init diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints/cox18nature.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints/cox18nature.rst new file mode 100644 index 0000000000..d513f8d310 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints/cox18nature.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.emergent_constraints.cox18nature: + +Emergent constraint on ECS from global temperature variability +============================================================== + +.. automodule:: esmvaltool.diag_scripts.emergent_constraints.cox18nature + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints/ecs_scatter.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints/ecs_scatter.rst new file mode 100644 index 0000000000..da7e5dcf9c --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints/ecs_scatter.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.emergent_constraints.ecs_scatter: + +Calculation of emergent constraints on ECS +========================================== + +.. automodule:: esmvaltool.diag_scripts.emergent_constraints.ecs_scatter + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints/init.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints/init.rst new file mode 100644 index 0000000000..7e5b86e816 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints/init.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.emergent_constraints.init: + +Auxiliary functions for emergent constraints scripts +==================================================== + +.. automodule:: esmvaltool.diag_scripts.emergent_constraints diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints/multiple_constraints.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints/multiple_constraints.rst new file mode 100644 index 0000000000..e007d4b0df --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints/multiple_constraints.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.emergent_constraints.multiple_constraints: + +Evaluate multiple emergent constraints simultaneously +===================================================== + +.. automodule:: esmvaltool.diag_scripts.emergent_constraints.multiple_constraints + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints/single_constraint.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints/single_constraint.rst new file mode 100644 index 0000000000..9fd4741181 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.emergent_constraints/single_constraint.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.emergent_constraints.single_constraint: + +Evaluate single emergent constraint +=================================== + +.. automodule:: esmvaltool.diag_scripts.emergent_constraints.single_constraint + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr.rst new file mode 100644 index 0000000000..65959a659f --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr.rst @@ -0,0 +1,63 @@ +.. _api.esmvaltool.diag_scripts.mlr: + +Machine Learning Regression (MLR) diagnostics +============================================= + +This module provides various tools to create and evaluate MLR models for +arbitrary input variables. + + +Examples +-------- + +* :ref:`recipes_schlund20jgr`: Use Gradient Boosted Regression Tree (GBRT) + algorithm to constrain projected Gross Primary Production (GPP) in RCP 8.5 + scenario using observations of process-based predictors. + + +Diagnostic scripts +------------------ +.. toctree:: + :maxdepth: 1 + + esmvaltool.diag_scripts.mlr/evaluate_residuals + esmvaltool.diag_scripts.mlr/main + esmvaltool.diag_scripts.mlr/mmm + esmvaltool.diag_scripts.mlr/plot + esmvaltool.diag_scripts.mlr/postprocess + esmvaltool.diag_scripts.mlr/preprocess + esmvaltool.diag_scripts.mlr/rescale_with_emergent_constraint + + +Auxiliary scripts +----------------- +.. toctree:: + :maxdepth: 1 + + esmvaltool.diag_scripts.mlr/init + esmvaltool.diag_scripts.mlr/custom_sklearn + esmvaltool.diag_scripts.mlr/models + esmvaltool.diag_scripts.mlr/models.gbr_base + esmvaltool.diag_scripts.mlr/models.linear_base + + +.. _availableMLRModels: + +Available MLR models +-------------------- +.. toctree:: + :maxdepth: 1 + + esmvaltool.diag_scripts.mlr/models.gbr_sklearn + esmvaltool.diag_scripts.mlr/models.gbr_xgboost + esmvaltool.diag_scripts.mlr/models.gpr_sklearn + esmvaltool.diag_scripts.mlr/models.huber + esmvaltool.diag_scripts.mlr/models.krr + esmvaltool.diag_scripts.mlr/models.lasso + esmvaltool.diag_scripts.mlr/models.lasso_cv + esmvaltool.diag_scripts.mlr/models.lasso_lars_cv + esmvaltool.diag_scripts.mlr/models.linear + esmvaltool.diag_scripts.mlr/models.rfr + esmvaltool.diag_scripts.mlr/models.ridge + esmvaltool.diag_scripts.mlr/models.ridge_cv + esmvaltool.diag_scripts.mlr/models.svr diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/custom_sklearn.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/custom_sklearn.rst new file mode 100644 index 0000000000..48078b9a39 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/custom_sklearn.rst @@ -0,0 +1,10 @@ +.. _api.esmvaltool.diag_scripts.mlr.custom_sklearn: + +Custom extensions of sklearn functionalities +============================================ + +.. automodule:: esmvaltool.diag_scripts.mlr.custom_sklearn + :no-autosummary: +.. + enabling autosummary with autodocsumm 0.2.8 gives a warning message: + WARNING: autosummary: failed to import AdvancedPipeline.steps. diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/evaluate_residuals.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/evaluate_residuals.rst new file mode 100644 index 0000000000..72327819ff --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/evaluate_residuals.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.mlr.evaluate_residuals: + +Evaluate residuals +================== + +.. automodule:: esmvaltool.diag_scripts.mlr.evaluate_residuals + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/init.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/init.rst new file mode 100644 index 0000000000..681fb512e2 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/init.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.init: + +Auxiliary functions for MLR scripts +=================================== + +.. automodule:: esmvaltool.diag_scripts.mlr diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/main.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/main.rst new file mode 100644 index 0000000000..5dc174794d --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/main.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.mlr.main: + +MLR main diagnostic +=================== + +.. automodule:: esmvaltool.diag_scripts.mlr.main + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/mmm.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/mmm.rst new file mode 100644 index 0000000000..466171a70e --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/mmm.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.mlr.mmm: + +Multi-model means (MMM) +======================= + +.. automodule:: esmvaltool.diag_scripts.mlr.mmm + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.gbr_base.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.gbr_base.rst new file mode 100644 index 0000000000..62d5cd038f --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.gbr_base.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.models.gbr_base: + +Base class for Gradient Boosted Regression models +================================================= + +.. automodule:: esmvaltool.diag_scripts.mlr.models.gbr_base diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.gbr_sklearn.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.gbr_sklearn.rst new file mode 100644 index 0000000000..6a2be70b12 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.gbr_sklearn.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.models.gbr_sklearn: + +Gradient Boosted Regression Trees (sklearn implementation) +========================================================== + +.. automodule:: esmvaltool.diag_scripts.mlr.models.gbr_sklearn diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.gbr_xgboost.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.gbr_xgboost.rst new file mode 100644 index 0000000000..0e1abef836 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.gbr_xgboost.rst @@ -0,0 +1,7 @@ +.. _api.esmvaltool.diag_scripts.mlr.models.gbr_xgboost: + + +Gradient Boosted Regression Trees (xgboost implementation) +========================================================== + +.. automodule:: esmvaltool.diag_scripts.mlr.models.gbr_xgboost diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.gpr_sklearn.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.gpr_sklearn.rst new file mode 100644 index 0000000000..6d5253f031 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.gpr_sklearn.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.models.gpr_sklearn: + +Gaussian Process Regression (sklearn implementation) +==================================================== + +.. automodule:: esmvaltool.diag_scripts.mlr.models.gpr_sklearn diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.huber.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.huber.rst new file mode 100644 index 0000000000..6617dc31fa --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.huber.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.models.huber: + +Huber Regression +================ + +.. automodule:: esmvaltool.diag_scripts.mlr.models.huber diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.krr.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.krr.rst new file mode 100644 index 0000000000..2ef3437e05 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.krr.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.models.krr: + +Kernel Ridge Regression +======================= + +.. automodule:: esmvaltool.diag_scripts.mlr.models.krr diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.lasso.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.lasso.rst new file mode 100644 index 0000000000..594e1fb6f8 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.lasso.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.models.lasso: + +LASSO Regression +================ + +.. automodule:: esmvaltool.diag_scripts.mlr.models.lasso diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.lasso_cv.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.lasso_cv.rst new file mode 100644 index 0000000000..5613bcae9b --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.lasso_cv.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.models.lasso_cv: + +LASSO Regression with built-in CV +================================= + +.. automodule:: esmvaltool.diag_scripts.mlr.models.lasso_cv diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.lasso_lars_cv.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.lasso_lars_cv.rst new file mode 100644 index 0000000000..495c130c6c --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.lasso_lars_cv.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.models.lasso_lars_cv: + +LASSO Regression (using Least-angle Regression algorithm) with built-in CV +========================================================================== + +.. automodule:: esmvaltool.diag_scripts.mlr.models.lasso_lars_cv diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.linear.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.linear.rst new file mode 100644 index 0000000000..16afeeb7ae --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.linear.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.models.linear: + +Linear Regression +================= + +.. automodule:: esmvaltool.diag_scripts.mlr.models.linear diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.linear_base.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.linear_base.rst new file mode 100644 index 0000000000..a1b3513b9a --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.linear_base.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.models.linear_base: + +Base class for Linear models +============================ + +.. automodule:: esmvaltool.diag_scripts.mlr.models.linear_base diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.rfr.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.rfr.rst new file mode 100644 index 0000000000..4723ec4ffd --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.rfr.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.models.rfr: + +Random Forest Regression +======================== + +.. automodule:: esmvaltool.diag_scripts.mlr.models.rfr diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.ridge.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.ridge.rst new file mode 100644 index 0000000000..c9f2ca4baf --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.ridge.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.models.ridge: + +Ridge Regression +================ + +.. automodule:: esmvaltool.diag_scripts.mlr.models.ridge diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.ridge_cv.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.ridge_cv.rst new file mode 100644 index 0000000000..9a5ea0f378 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.ridge_cv.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.models.ridge_cv: + +Ridge Regression with built-in CV +================================= + +.. automodule:: esmvaltool.diag_scripts.mlr.models.ridge_cv diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.rst new file mode 100644 index 0000000000..f3c0ca6969 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.models: + +MLRModel base class +=================== + +.. automodule:: esmvaltool.diag_scripts.mlr.models diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.svr.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.svr.rst new file mode 100644 index 0000000000..5486707e59 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/models.svr.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.mlr.models.svr: + +Support Vector Regression +========================= + +.. automodule:: esmvaltool.diag_scripts.mlr.models.svr diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/plot.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/plot.rst new file mode 100644 index 0000000000..c7f4b52338 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/plot.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.mlr.plot: + +Plotting functionalities +======================== + +.. automodule:: esmvaltool.diag_scripts.mlr.plot + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/postprocess.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/postprocess.rst new file mode 100644 index 0000000000..af23643706 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/postprocess.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.mlr.postprocess: + +Postprocessing functionalities +============================== + +.. automodule:: esmvaltool.diag_scripts.mlr.postprocess + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/preprocess.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/preprocess.rst new file mode 100644 index 0000000000..5990d2cd6c --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/preprocess.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.mlr.preprocess: + +Preprocessing functionalities +============================= + +.. automodule:: esmvaltool.diag_scripts.mlr.preprocess + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/rescale_with_emergent_constraint.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/rescale_with_emergent_constraint.rst new file mode 100644 index 0000000000..f8e933825e --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.mlr/rescale_with_emergent_constraint.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.mlr.rescale_with_emergent_constraint: + +Rescale data with emergent constraints +======================================= + +.. automodule:: esmvaltool.diag_scripts.mlr.rescale_with_emergent_constraint + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor.rst new file mode 100644 index 0000000000..3b1e3e6548 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor.rst @@ -0,0 +1,32 @@ +.. _api.esmvaltool.diag_scripts.monitor: + +Monitor Diagnostic +================== + +This module provides various tools to monitor climate model simulations. +It can be used to plot arbitrary variables from arbitrary datasets. + + +Examples +-------- + +* :ref:`recipe_monitor` +* :ref:`recipe_model_evaluation` + + +Diagnostic scripts +------------------ +.. toctree:: + :maxdepth: 1 + + esmvaltool.diag_scripts.monitor/monitor + esmvaltool.diag_scripts.monitor/compute_eofs + esmvaltool.diag_scripts.monitor/multi_datasets + + +Base class for monitoring diagnostics +------------------------------------- +.. toctree:: + :maxdepth: 1 + + esmvaltool.diag_scripts.monitor/monitor_base diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor/compute_eofs.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor/compute_eofs.rst new file mode 100644 index 0000000000..0997ac1e7a --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor/compute_eofs.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.monitor.compute_eofs: + +Monitoring diagnostic to plot EOF maps and associated PC timeseries +=================================================================== + +.. automodule:: esmvaltool.diag_scripts.monitor.compute_eofs + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor/monitor.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor/monitor.rst new file mode 100644 index 0000000000..25d4a7da9b --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor/monitor.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.monitor.monitor: + +Monitoring diagnostic to plot arbitrary preprocessor output +=========================================================== + +.. automodule:: esmvaltool.diag_scripts.monitor.monitor + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor/monitor_base.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor/monitor_base.rst new file mode 100644 index 0000000000..0127a94963 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor/monitor_base.rst @@ -0,0 +1,6 @@ +.. _api.esmvaltool.diag_scripts.monitor.monitor_base: + +Base class for monitoring diagnostics +===================================== + +.. automodule:: esmvaltool.diag_scripts.monitor.monitor_base diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor/multi_datasets.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor/multi_datasets.rst new file mode 100644 index 0000000000..dab4f07989 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor/multi_datasets.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.monitor.multi_datasets: + +Monitoring diagnostic to show multiple datasets in one plot (incl. biases) +========================================================================== + +.. automodule:: esmvaltool.diag_scripts.monitor.multi_datasets + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/codedoc2/esmvaltool.diag_scripts.ocean.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.ocean.rst similarity index 100% rename from doc/sphinx/source/codedoc2/esmvaltool.diag_scripts.ocean.rst rename to doc/sphinx/source/api/esmvaltool.diag_scripts.ocean.rst diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.psyplot_diag.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.psyplot_diag.rst new file mode 100644 index 0000000000..2dc463cc9f --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.psyplot_diag.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.psyplot_diag: + +Psyplot Diagnostic +================== + +.. automodule:: esmvaltool.diag_scripts.psyplot_diag + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.seaborn_diag.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.seaborn_diag.rst new file mode 100644 index 0000000000..d250d0aaaf --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.seaborn_diag.rst @@ -0,0 +1,9 @@ +.. _api.esmvaltool.diag_scripts.seaborn_diag: + +Seaborn Diagnostic +================== + +.. automodule:: esmvaltool.diag_scripts.seaborn_diag + :no-members: + :no-inherited-members: + :no-show-inheritance: diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.shared.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.shared.rst new file mode 100644 index 0000000000..fcf50cf17c --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.shared.rst @@ -0,0 +1,18 @@ +.. _api_shared: + +Shared diagnostic script code +============================= + +.. automodule:: esmvaltool.diag_scripts.shared + + +Iris helper functions +--------------------- + +.. automodule:: esmvaltool.diag_scripts.shared.iris_helpers + + +Plotting +-------- + +.. automodule:: esmvaltool.diag_scripts.shared.plot diff --git a/doc/sphinx/source/api/esmvaltool.rst b/doc/sphinx/source/api/esmvaltool.rst new file mode 100644 index 0000000000..b080b81ac8 --- /dev/null +++ b/doc/sphinx/source/api/esmvaltool.rst @@ -0,0 +1,31 @@ +.. _api: + +ESMValTool Code API Documentation +================================= + +ESMValTool is mostly used as a command line tool. However, it is also possible +to use (parts of) ESMValTool as a library. This section documents the public +API of ESMValTool. + + +Shared Diagnostic Code +---------------------- + +.. toctree:: + :maxdepth: 1 + + esmvaltool.diag_scripts.shared + + +Diagnostic Scripts +------------------ + +.. toctree:: + :maxdepth: 1 + + esmvaltool.diag_scripts.emergent_constraints + esmvaltool.diag_scripts.mlr + esmvaltool.diag_scripts.monitor + esmvaltool.diag_scripts.ocean + esmvaltool.diag_scripts.psyplot_diag + esmvaltool.diag_scripts.seaborn_diag diff --git a/doc/sphinx/source/changelog.rst b/doc/sphinx/source/changelog.rst new file mode 100644 index 0000000000..76c0a86da5 --- /dev/null +++ b/doc/sphinx/source/changelog.rst @@ -0,0 +1,1480 @@ +.. _changelog: + +Changelog +========= + +.. _changelog-v2-11-0: + +v2.11.0 +------- +Highlights + +- Two new recipes have been added: + + - Recipe :ref:`recipe_aod_aeronet_assess.yml ` + evaluates model aerosol optical depth (AOD) climatologies against ground + based observations from the AeroNET measurement network. + - Recipe :ref:`recipe_climate_patterns.yml ` + generates climate patterns from CMIP6 model datasets. + +- The ESACCI-WATERVAPOUR CMORizer now includes daily data and uses the + officially released CDR2 data. +- Support for 5 new datasets have been added: + + - AeroNET + - ANU Climate 2.0 Australian data + - Australian Gridded Climate Data(AGCD) precipitation + - NOAA-ERSST + - NSIDC-G02202-sh sea ice fraction + +- NEW TREND: First time release manager shout-outs! + + - This is the first ESMValTool release managed by the Met Office! We want to + shout this out - and for all future first time release managers to + shout-out - to celebrate the growing, thriving ESMValTool community. + +This release includes + +Bug fixes +~~~~~~~~~ + +- Recipe_ocean_quadmap: Update ATSR to match ESGF name (:pull:`3443`) by :user:`rbeucher` +- Fix recipe_bock20jgr_fig_8-10.yml (:pull:`3665`) by :user:`LisaBock` +- Update the list of datasets used in ``recipe_easy_ipcc.yml`` (:pull:`3710`) by :user:`bouweandela` + +Documentation +~~~~~~~~~~~~~ + +- Improve release tools and documentation (:pull:`3462`) by :user:`bouweandela` +- Fix a typo in the references file (:pull:`3499`) by :user:`bouweandela` +- Fix recipe path in ``recipe_perfmetrics.rst`` (:pull:`3532`) by :user:`TomasTorsvik` +- Improved description of model evaluation recipes (:pull:`3541`) by :user:`schlunma` +- Remove double word in cmorizer documentation (:pull:`3553`) by :user:`bettina-gier` +- Fix Codacy badge (:pull:`3558`) by :user:`bouweandela` +- Update the release schedule for v2.11.0 (:pull:`3573`) by :user:`ehogan` +- Improve the formatting of the recipe documentation template (:pull:`3652`) by :user:`mo-gill` +- Add introduction material on the main documentation page (:pull:`3628`) by :user:`bouweandela` +- Avoid warning in documentation build (:pull:`3675`) by :user:`bouweandela` +- Update the list of broken recipes for ``v2.11.0`` (:pull:`3706`) by :user:`ehogan` + +Diagnostics +~~~~~~~~~~~ + +- ``monitor/multi_dataset.py`` improvements: allow data w/o ``timerange`` and improve text formatting (:pull:`3528`) by :user:`schlunma` +- Allow datasets without ``project`` in multi_datasets.py (:pull:`3552`) by :user:`schlunma` +- Prevent overlapping time axis tick labels in monitoring recipe (:pull:`3682`) by :user:`schlunma` + +New recipe +~~~~~~~~~~ + +- Add support for aerosol optical depth climatology metrics to the AutoAssess replacement (:pull:`3048`) by :user:`catherinehardacre` +- CMIP6 climate patterns (:pull:`2785`) by :user:`mo-gregmunday` + +Observational and re-analysis dataset support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Add cmorizer scripts for NOAA-ERSST. (:pull:`1799`) by :user:`bjoernbroetz` +- Update OceanSODA-ETHZ CMORizer with new source file (:pull:`3535`) by :user:`TomasTorsvik` +- Add CMORizer script for NSIDC-G02202-sh sea ice fraction (:pull:`3512`) by :user:`flicj191` +- CMORizer Australian Gridded Climate Data(AGCD) precipitation (:pull:`3445`) by :user:`flicj191` +- Extend CMORizer NCEP-DOE-R2 (:pull:`3469`) by :user:`axel-lauer` +- Add comment to recipe_lauer13jclim regarding UWisc being superseded by MAC-LWP (:pull:`3537`) by :user:`rbeucher` +- Recipe_autoassess_landsurface_surfrad: Remove CERES-EBAF version to fix ESGF search (:pull:`3438`) by :user:`rbeucher` +- Updating ESACCI-WATERVAPOUR cmorizer (:pull:`3282`) by :user:`malininae` +- CMORiser for ANU Climate 2.0 Australian data (:pull:`3511`) by :user:`flicj191` +- Add AERONET cmorizer (:pull:`3227`) by :user:`zklaus` +- Update CRU CMORizer (:pull:`3381`) by :user:`lukruh` +- Fix recipe_check_obs to be aligned with DKRZ (:pull:`3673`) by :user:`LisaBock` +- Update AERONET data version (:pull:`3692`) by :user:`ehogan` + +Automatic testing +~~~~~~~~~~~~~~~~~ + +- Move code into function in batch job generation script (:pull:`3491`) by :user:`bouweandela` +- Fix sklearn tests (:pull:`3506`) by :user:`schlunma` +- Pinned sklearn>=1.4.0 (:pull:`3508`) by :user:`schlunma` +- Update sklearn tests to be compatible with current pytest version (pytest >=8.0.0) (:pull:`3517`) by :user:`schlunma` +- Update sklearn tests to be compatible with current pytest version (pytest >=8.0.0) Part 2 (:pull:`3518`) by :user:`schlunma` +- [Circle/CI]Fix `test_installation_from_conda` Circle CI tests (:pull:`3538`) by :user:`valeriupredoi` +- [Github Actions] install git in OSX and add environment inspection (:pull:`3581`) by :user:`valeriupredoi` +- [CI Github Actions] Update (outdated) actions versions that produce Node.js warnings (:pull:`3586`) by :user:`valeriupredoi` +- Fix ``flake8==7`` linting issues (:pull:`3634`) by :user:`valeriupredoi` +- Use ``importlib`` as the import mode for ``pytest`` (:pull:`3672`) by :user:`ehogan` + +Installation +~~~~~~~~~~~~ + +- Update dependencies (:pull:`3487`) by :user:`bouweandela` +- Merge v2.10.x into main (:pull:`3489`) by :user:`schlunma` +- Add imagehash package as an ESMValTool dependency (:pull:`3557`) by :user:`alistairsellar` +- Unpin ``r-akima`` (:pull:`3564`) by :user:`valeriupredoi` +- Adding pys2index dependency (:pull:`3577`) by :user:`ljoakim` +- Pin esmpy <8.6.0 (:pull:`3585`) by :user:`valeriupredoi` +- Pin R <4.3.0 (:pull:`3689`) by :user:`ehogan` +- Pin importlib_metadata <8 (:pull:`3700`) by :user:`ehogan` +- Pin matplotlib <3.9.0 on ESMValTool release branch (:pull:`3712`) by :user:`ehogan` + +Dependency updates +~~~~~~~~~~~~~~~~~~ + +- Fix for ``recipe_seaice_drift.yml``: fix CRS transformer for "North Pole Stereographic" (:pull:`3531`) by :user:`flicj191` +- Fixed attribute handling in austral_jet/main.ncl for iris>=3.8 (:pull:`3603`) by :user:`schlunma` +- Fixed attribute handling in emergent constraint diagnostic for iris>=3.8 (:pull:`3605`) by :user:`schlunma` +- Update the name of the remapcon2 operator in R recipes (:pull:`3611`) by :user:`ehogan` +- Use ``iris.FUTURE.save_split_attrs = True`` to remove iris warning in many diagnostics (:pull:`3651`) by :user:`schlunma` +- Avoid concatenation error in recipe_pcrglobwb.yml (:pull:`3645`) by :user:`bouweandela` +- Update `scipy.integrate.simps` import (:pull:`3704`) by :user:`ehogan` + +Improvements +~~~~~~~~~~~~ + +- Add native6, OBS6 and RAWOBS rootpaths to metoffice template in config-user-example.yml and remove temporary dir (:pull:`3613`) by :user:`alistairsellar` + +.. _changelog-v2-10-0: + +v2.10.0 +------- +Highlights + +- Add a realistic IPCC example recipe that reproduces figure 9.3 from AR6. It + computes the mean sea-surface temperature anomaly between 1850-2100 over all + available CMIP6 models. See the :ref:`recipe documentation ` + or read the `blog post `__ + for more information. + +- Added more plot types to monitoring diagnostic: Hovmoeller Z vs. time, + Hovmoeller time vs latlon, variable vs. latitude are now available. See the + :ref:`recipe documentation ` for more information. + +- Add support for 4 new datasets: + + - NOAA-CIRES-20CR v3 reanalysis + - NASA MERRA reanalysis + - NOAA marine boundary layer data for CH4 + - MOBO-DIC2004-2019 + + See :ref:`supported_datasets` and :ref:`inputdata_observations` for more + information. + +- Many recipes now have up-to-date obs4MIPs dataset names so required data can + automatically be downloaded from ESGF. + +This release includes + +Bug fixes +~~~~~~~~~ + +- Update recipe shapeselect to work with shapely v2 (:pull:`3283`) :user:`lukruh` +- Correctly handle ``~`` when reading ``plot_folder`` option of monitoring diagnostic (:pull:`3449`) :user:`schlunma` +- Fixed provenance tracking for NCL multipanel PNGs (:pull:`3332`) :user:`schlunma` +- Fixed plot paths in NCL provenance tracking (:pull:`3422`) :user:`schlunma` +- Fix erroneous file_type handling in certain NCL diagnostics (:pull:`3474`) :user:`zklaus` +- Fix NCL provenance tracking (:pull:`3477`) :user:`schlunma` +- Fix plots and provenance in Russell diagnostics (:pull:`3479`) :user:`schlunma` + +Documentation +~~~~~~~~~~~~~ + +- Add merge instructions to release instructions (:pull:`3292`) :user:`remi-kazeroni` +- Update release schedule after release of v2.9.0 (:pull:`3289`) :user:`remi-kazeroni` +- Add list of failing recipes for v2.9.0 release (:pull:`3294`) :user:`remi-kazeroni` +- Update ``mamba`` version in readthedocs configuration docs builds (:pull:`3310`) :user:`valeriupredoi` +- Add Romain Beucher to citation file as contributor (:pull:`3318`) :user:`valeriupredoi` +- Removed recipe_carvalhais14nat from list of broken recipes (:pull:`3319`) :user:`remi-kazeroni` +- Add `OBS-maintainers `__ team to documentation on OBS data maintenance and CMORizer reviews (:pull:`3335`) :user:`remi-kazeroni` +- Add Pauline Bonnet to citation file (:pull:`3347`) :user:`Paulinebonnet111` +- Ensure compatible zstandard and zstd in readthedocs builds (:pull:`3362`) :user:`zklaus` +- Fix documentation build (:pull:`3397`) :user:`bouweandela` +- Minor updates to release tools (:pull:`3216`) :user:`bouweandela` +- Enhance provenance documentation (:pull:`3305`) :user:`alistairsellar` +- Re-add communities and grants in zenodo file (:pull:`3416`) :user:`valeriupredoi` +- Update Anconda badge in README (:pull:`3375`, :pull:`3453`) :user:`valeriupredoi` + +Diagnostics +~~~~~~~~~~~ + +- Slight refactoring of diagnostic script ``galytska23/select_variables_for_tigramite.py`` for generality and portability (:pull:`3298`) :user:`valeriupredoi` and :user:`egalytska` +- Allow custom variable grouping in diagnostic script ``monitor/multi_datasets.py`` (:pull:`3343`) :user:`schlunma` +- Extended monitor diagnostic with plot type variable vs. latitude (:pull:`3340`) :user:`ellensarauer` +- Add Hovmoeller Z vs. time plot to monitoring diagnostic (:pull:`3345`) :user:`cubeme` and :user:`helgehr` +- Adding Hovmoeller time vs latlon plots to monitoring recipes (:pull:`3341`) :user:`lukruh` and :user:`jeremykraftdlr` +- Implied heat transport new diagnostic (:pull:`3177`) :user:`mo-abodas` +- Recipe changes for new statistics preprocessors (percentiles) (:pull:`3351`) :user:`schlunma` +- Add a realistic example recipe (:pull:`3356`) :user:`Peter9191` and :user:`bouweandela` +- Support ``CenteredNorm`` in diagnostic monitor/multidatasets.py (:pull:`3415`) :user:`schlunma` +- Use new preprocessor statistics calling convention for recipe_easy_ipcc.yml (:pull:`3418`) :user:`bouweandela` +- Adapt to changed style scheme name in matplotlib (:pull:`3475`) :user:`zklaus` +- Add version to dataset in python example recipe to avoid "Unknown file format" issue on JASMIN (:pull:`3322`) :user:`ehogan` +- Add the dataset version in the heatwaves_coldwaves recipe to avoid the "Unknown file format" issue on JASMIN (:pull:`3373`) :user:`ehogan` + +Observational and re-analysis dataset support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Cmorizer for NOAA-CIRES-20CR v3 reanalysis (clt, clwvi, hus, prw, rlut, rlutcs, rsut, rsutcs) (:pull:`3137`) :user:`LisaBock` +- CMORizer for NASA MERRA reanalysis (:pull:`3039`) :user:`axel-lauer` +- Download and formatting of NOAA marine boundary layer data for CH4 (NOAA-MBL-CH4) (:pull:`3301`) :user:`FranziskaWinterstein` +- Added CMORizer for MOBO-DIC2004-2019 (:pull:`3297`) :user:`schlunma` +- Update obs4MIPs dataset names in quantilebias recipe (:pull:`3330`) :user:`rbeucher` +- Update obs4MIPs dataset names in Schlund20esd recipe (:pull:`3329`) :user:`rbeucher` +- Update obs4MIPs dataset names in flatoipcc recipes (:pull:`3328`) :user:`rbeucher` +- Update obs4mips dataset names in clouds recipes (:pull:`3326`) :user:`rbeucher` +- Update Obs4MIPs dataset names in ECS recipes (:pull:`3327`) :user:`rbeucher` +- Update obs4mips dataset names in Bock et al recipes (:pull:`3324`, :pull:`3389` and :pull:`3473`) :user:`rbeucher` and :user:`bouweandela` +- Update obs4mips dataset names in radiation budget recipe (:pull:`3323`) :user:`rbeucher` +- Update Obs4MIPs dataset names in perfmetrics CMIP5 recipe (:pull:`3325`) :user:`rbeucher` + +Automatic testing +~~~~~~~~~~~~~~~~~ + +- Made sklearn test backwards-compatible with sklearn < 1.3 (:pull:`3285`) :user:`schlunma` +- Update conda lock creation Github Action workflow and ship updated conda-lock file (:pull:`3307`, :pull:`3407`) :user:`valeriupredoi` +- Compress all bash shell setters into one default option per GitHub Action workflow (:pull:`3315`) :user:`valeriupredoi` +- Remove deprecated option ``offline`` from CI configuration (:pull:`3367`) :user:`schlunma` + +Installation +~~~~~~~~~~~~ + +- Use ESMValCore v2.10 (:pull:`3486`) :user:`bouweandela` + +Improvements +~~~~~~~~~~~~ + +- Merge v2.9.x into main (:pull:`3286`) :user:`schlunma` +- Allow NCL unit conversion `kg s-1` -> `GtC y-1` (:pull:`3300`) :user:`schlunma` + +.. _changelog-v2-9-0: + +v2.9.0 +------ + +Highlights +~~~~~~~~~~ + +- A new :ref:`diagnostic ` has been + added to provide a high-level interface to + `seaborn `__, + a Python data visualization library based on + `matplotlib `__. + See the :ref:`recipe documentation ` for more + information. + +- We have included a new recipe and diagnostic that represent the major + physical processes that describe Arctic-midlatitude teleconnections and + provide the basis for the CMIP6 model evaluation for the further application + of causal discovery. + The results are discussed in the article + `"Causal model evaluation of Arctic-midlatitude teleconnections in CMIP6" `__ + by Galytska et al. (in review in Journal of Geophysical Research: Atmospheres). + +- It is now possible to use the + `Dask distributed scheduler `__, + which can + `significantly reduce the run-time of recipes `__. + Configuration examples and advice are available in the + :ref:`ESMValCore documentation `. + If configured, the Dask distributed scheduler will also be used by diagnostic + scripts written in Python, so make sure to use + `lazy data `__ + wherever it is possible in your (new) diagnostics. + More work on improving the computational performance is planned, so please + share your experiences, good and bad, with this new feature in + `ESMValGroup/ESMValCore#1763 `__. + +This release includes + +Bug fixes +~~~~~~~~~ + +- Fixed usage of ``work_dir`` in some CMORizer scripts (:pull:`3192`) :user:`remi-kazeroni` +- Realize data for scalar cube in `recipe_carvalhais14nat` to avert issue from dask latest (2023.6.0) (:pull:`3265`) :user:`valeriupredoi` +- Fix failing ``mlr`` diagnostic test by adding new scikit-learn default tag (:pull:`3273`) :user:`remi-kazeroni` +- Fix ordering of models in perfmetrics diagnostic script (:pull:`3275`) :user:`LisaBock` + +Documentation +~~~~~~~~~~~~~ + +- Update release schedule after v2.8.0 (:pull:`3138`) :user:`remi-kazeroni` +- Added reference entry for Winterstein (:pull:`3154`) :user:`FranziskaWinterstein` +- Show logo on PyPI (:pull:`3185`) :user:`valeriupredoi` +- Add Release Managers for v2.9.0 and v2.10.0 (:pull:`3184`) :user:`remi-kazeroni` +- Fix readthedocs build with esmpy>=8.4.0 and missing ESMFMKFILE variable (:pull:`3205`) :user:`valeriupredoi` +- Add ESMValCore release v2.8.1 into the documentation (:pull:`3235`) :user:`remi-kazeroni` +- Modified links to the tutorial (:pull:`3236`) :user:`remi-kazeroni` +- Fix gitter badge in README (:pull:`3258`) :user:`remi-kazeroni` +- Add release notes for v2.9.0 (:pull:`3266`) :user:`bouweandela` + +Diagnostics +~~~~~~~~~~~ + +- New plot_type 1d_profile in monitor (:pull:`3178`) :user:`FranziskaWinterstein` +- Add Seaborn diagnostic (:pull:`3155`) :user:`schlunma` +- New recipe and diagnostic for Arctic-midlatitude research (:pull:`3021`) :user:`egalytska` +- Generate climatology on the fly for AutoAssess soil moisture (:pull:`3197`) :user:`alistairsellar` +- Remove "fx_variables" from recipe_tebaldi21esd.yml (:pull:`3211`) :user:`hb326` +- Remove "fx_variables" from ipccwg1ar5ch9 recipes (:pull:`3215`) :user:`katjaweigel` +- Remove "fx_variables" from recipe_wenzel14jgr.yml (:pull:`3212`) :user:`hb326` +- Update obs4MIPs dataset to the current naming scheme in recipe_smpi.yml (:pull:`2991`) :user:`bouweandela` +- Fixed pandas diagnostics for pandas>=2.0.0 (:pull:`3209`) :user:`schlunma` +- Update recipe_impact.yml to work with newer versions of `pandas` (:pull:`3220`) :user:`bouweandela` +- Add variable long names to provenance record in monitoring diagnostics (:pull:`3222`) :user:`bsolino` + +Observational and re-analysis dataset support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Add CMORizer for GPCP-SG (pr) (:pull:`3150`) :user:`FranziskaWinterstein` +- Extension of NASA MERRA2 CMORizer (cl, cli, clivi, clw, clwvi) (:pull:`3167`) :user:`axel-lauer` + +Automatic testing +~~~~~~~~~~~~~~~~~ + +- Add a CircleCI-testing-specific ``recipe_python_for_CI.yml`` to avoid calling geolocator/Nominatim over CI (:pull:`3159`) :user:`valeriupredoi` +- Check if Python minor version changed after Julia install in development installation test (:pull:`3213`) :user:`valeriupredoi` +- Fix tests using deprecated ``esmvalcore._config`` module that has been removed in ESMValCore v2.9 (:pull:`3204`) :user:`valeriupredoi` + +Installation +~~~~~~~~~~~~ + +- Add support for Python=3.11 (:pull:`3173`) :user:`valeriupredoi` +- Drop python=3.8 support (:pull:`3193`) :user:`valeriupredoi` +- Repair generation of conda lock files (:pull:`3148`) :user:`valeriupredoi` +- Modernize lock creation script and repair lock generation (:pull:`3174`) :user:`valeriupredoi` +- Pin numpy !=1.24.3 due to severe masking bug (:pull:`3182`) :user:`valeriupredoi` +- Update xesmf to versions >= 0.4.0 (:pull:`2728`) :user:`zklaus` +- Update esmpy import for ESMF version 8.4.0 or larger (:pull:`3188`) :user:`valeriupredoi` +- Relax the pin on iris to allow the use of older versions for performance reasons (:pull:`3270`) :user:`bouweandela` +- Use ESMValCore v2.9.0 (:pull:`3274`) :user:`bouweandela` + +Improvements +~~~~~~~~~~~~ + +- Update pre-commit hooks (:pull:`3189`) :user:`bouweandela` +- Add support for using a dask distributed scheduler (:pull:`3151`) :user:`bouweandela` + +.. _changelog-v2-8-0: + +v2.8.0 +------ + +Highlights +~~~~~~~~~~ + +- This release includes the diagnostics for reproducing figures 3.9, 3.19, + 3.42 and 3.43 of the IPCC AR6 WG1 report. + See :ref:`recipe documentation ` about added recipes. +- A new set of recipes and diagnostics has been included to evaluate cloud + climatologies from CMIP models as used in `Lauer et al. (2023), J. Climate + `__. + See :ref:`recipe documentation ` about added recipes. +- Addition of a set of recipes for extreme events, regional and impact + evaluation as used in `Weigel et al. (2021), J. Climate + `__ and in IPCC AR5. + See :ref:`recipe documentation ` about added recipes. + +Highlights from ESMValCore v2.8.0 :ref:`here`: + +- ESMValCore now supports wildcards in recipes and offers improved support + for ancillary variables and dataset versioning. +- Support for CORDEX datasets in a rotated pole coordinate system has been added. +- Native :ref:`ICON ` output is now made UGRID-compliant + on-the-fly. +- The Python API has been extended with the addition of three modules: + :mod:`esmvalcore.config`, :mod:`esmvalcore.dataset`, and + :mod:`esmvalcore.local` +- The preprocessor :func:`~esmvalcore.preprocessor.multi_model_statistics` + has been extended to support more use-cases. + +This release includes: + +Backwards incompatible changes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Please read the descriptions of the linked pull requests for detailed upgrade instructions. + +- Deprecated features scheduled for removal in v2.8.0 or earlier have now been removed + (:pull:`2941`) + :user:`schlunma`. + Removed ``esmvaltool.iris_helpers.var_name_constraint`` (has been deprecated + in v2.6.0; please use :class:`iris.NameConstraint` with the keyword argument + ``var_name`` instead). + Removed `write_netcdf` and `write_plots` from `recipe_filer.py`. +- No files from the ``native6`` project will be found if a non-existent version + of a dataset is specified (`#3041 `_) + :user:`remi-kazeroni`. + The tool now searches for exact ``version`` of ``native6`` datasets. + Therefore, it is necessary to make sure that the version number in the + directory tree matches with the version number in the recipe to find the files. +- The conversion of precipitation units from monitoring diagnostic is now done + at the preprocessor stage + (`#3049 `_) + :user:`schlunma`. + To use the unit conversion for precipitation in the new version of this + diagnostic, add it as a preprocessor for the precipitation dataset to the + recipe. + +Bug fixes +~~~~~~~~~ + +- Fix for provenance records from `seaice_tsline.ncl` (:pull:`2938`) :user:`axel-lauer` +- Fix in `validation.py` for resolving datasets with identical names by using distinct aliases (:pull:`2955`) :user:`FranziskaWinterstein` +- Bugfix: masking of non-significant differences in `zonal.ncl` (perfmetrics) (:pull:`2957`) :user:`axel-lauer` +- Fix typo in `perfmetrics/main.ncl` to add tropopause (:pull:`2966`) :user:`FranziskaWinterstein` +- Fix .png bug in `wenzel16nat` diagnostics (:pull:`2976`) :user:`axel-lauer` +- `Recipe_ocean_Landschuetzer2016`: Fix typo in filename to run model vs OBS diagnostics (:pull:`2997`) :user:`TomasTorsvik` +- Fix read_cmor in NCL utilities (:pull:`3007`) :user:`axel-lauer` +- Removed usages of deprecated features that cause diagnostic crashes (:pull:`3009`) :user:`schlunma` +- Replace removed `matplotlib.pyplot.savefig` option `additional_artists` (:pull:`3075`) :user:`schlunma` +- Added missing comma to `sommer17joss.bibtex` (:pull:`3078`) :user:`schlunma` +- Fix call of output_type in `aux_plotting.ncl` (:pull:`3083`) :user:`LisaBock` +- Remove colorbar from `bbox_extra_artists` (:pull:`3087`) :user:`schlunma` +- Fix `MPI-ESM1-2-HR` entries in `recipe_tebaldi21esd` (:pull:`3093`) :user:`remi-kazeroni` +- Fix bug in provenance writing of `perfmetrics` recipes v2.8.0 (:pull:`3098`) :user:`axel-lauer` +- Fix `recipe_sea_surface_salinity` for v2.8 (:pull:`3102`) :user:`sloosvel` +- Fix variable `short_name` and metadata for ESACCI-LST CMORizer (:pull:`3104`) :user:`remi-kazeroni` +- Fix `recipe_carvalhais14`: replace outline patch with splines (:pull:`3111`) :user:`valeriupredoi` +- Replace deprecated function `cm.register_cmap` with `mpl.colormaps.register` for `recipe_ arctic_ocean` (:pull:`3112`) :user:`TomasTorsvik` +- Fix `recipe_extract_shape.yml` (lacking caption for provenance) (:pull:`3126`) :user:`valeriupredoi` + +Community +~~~~~~~~~ + +- Update documentation on pre-installed versions on HPC clusters (:pull:`2934`) :user:`remi-kazeroni` + +Deprecations +~~~~~~~~~~~~ + +- Remove radiation recipes that have been superseded by :ref:`recipe_radiation_budget ` along with associated diagnostic scripts (`#3115 `_) :user:`alistairsellar` + +Documentation +~~~~~~~~~~~~~ + +- Backward compatibility policy (:pull:`2879`) :user:`alistairsellar` +- Suppress installing and reinstalling dependencies with pip during readthedocs builds (:pull:`2913`) :user:`valeriupredoi` +- Update installation instructions (:pull:`2939`) :user:`bouweandela` +- Update documentation for `recipe_extreme_index` (:pull:`2951`) :user:`katjaweigel` +- Update documentation and `recipe_check_obs` (ERA5) (:pull:`2952`) :user:`axel-lauer` +- Updated ICON dataset entry in documentation (:pull:`2954`) :user:`schlunma` +- Add Franziska Winterstein as collaborator in CITATION file (:pull:`3001`) :user:`valeriupredoi` +- Update release schedule for v2.7.0 and v2.8.0 (:pull:`3010`) :user:`remi-kazeroni` +- Add ESMValCore Bugfix release v2.7.1 to the release overview table (:pull:`3028`) :user:`valeriupredoi` +- Detailed instructions for release procedure: running recipes and analyzing the output (:pull:`3032`) :user:`valeriupredoi` +- Link backward compatibility policy to top level of ESMValCore changelog (:pull:`3052`) :user:`alistairsellar` +- Update release instructions (:pull:`3066`) :user:`remi-kazeroni` +- Updated docs and tests regarding new `search_esgf` option (:pull:`3069`) :user:`schlunma` +- Update script to draft release notes (:pull:`3070`) :user:`remi-kazeroni` +- Synchronize documentation table of contents with ESMValCore (:pull:`3073`) :user:`bouweandela` +- Update environment handling in release documentation (:pull:`3096`) :user:`remi-kazeroni` +- Clarify use (or not) of Jasmin climatology files by soil moisture & permafrost recipes (:pull:`3103`) :user:`alistairsellar` +- Add link to recipe portal in the gallery page (:pull:`3113`) :user:`remi-kazeroni` +- Improve stratosphere documentation (:pull:`3114`) :user:`alistairsellar` +- Added note to documentation that not all datasets used in `schlund20jgr` recipes are available on ESGF (:pull:`3121`) :user:`schlunma` +- Draft changelog for `v2.8.0` (:pull:`3124`) :user:`remi-kazeroni` +- Documenting broken recipes after recipe testing for releases (:pull:`3129`) :user:`remi-kazeroni` +- Increase ESMValTool version to 2.8.0 and update release dates (:pull:`3136`) :user:`remi-kazeroni` + +Diagnostics +~~~~~~~~~~~ + +- Cloud diagnostics for Lauer et al. (2023) (:pull:`2750`) :user:`axel-lauer` +- Splitting of `flato13ipcc.yml` into separate recipes and adding recipes for regional Figures (:pull:`2156`) :user:`katjaweigel` +- Adding IPCC AR6 Chapter 3 Figure 3.43 - Pattern Correlation (:pull:`2772`) :user:`LisaBock` +- Adding IPCC AR6 Chapter 3 Fig. 3.42 - Perfmetrics (:pull:`2856`) :user:`LisaBock` +- Comment missing datasets and remove deprecated argument in `recipe_climate_change_hotspot` (:pull:`2920`) :user:`sloosvel` +- Add plot type `annual_cycle` to multi-dataset monitoring diagnostic (:pull:`2922`) :user:`schlunma` +- Adding IPCC AR6 Chapter 3 Fig. 3.19 - Speed-Up Of Zonal Mean Wind (:pull:`2984`) :user:`LisaBock` +- Adding IPCC AR6 Chapter 3 Fig. 3.9 - Attribution (:pull:`2986`) :user:`LisaBock` +- Obs4mips CERES-EBAF: update version to latest available through esgf in `recipe_validation.yml` (:pull:`3002`) :user:`valeriupredoi` +- Improve flexibility of cloud diagnostics (:pull:`3016`) :user:`axel-lauer` +- Let `recipe_impact.yml` write a CSV file that can directly be used in C4I portal (:pull:`2258`) :user:`Peter9192` +- Fix version numbers of native6 datasets in recipes (`#3041`_) :user:`remi-kazeroni` +- Removed automatic conversion of precipitation units from monitoring diagnostic (`#3049`_) :user:`schlunma`. +- Updated recipes for ESMValCore v2.8 (:pull:`3064`) :user:`schlunma` +- Fix `cos22esd` for release of 2.8 (:pull:`3097`) :user:`sloosvel` +- Diagnostic for `recipe_autoassess_stratosphere.yml`: remove unused feature incompatible with Matplotlib=3.7.1 (:pull:`3089`) :user:`valeriupredoi` +- Fix numpy deprecation in `hype` diagnostic (:pull:`3101`) :user:`Peter9192` +- Remove superseded radiation recipes (`#3115`_) :user:`alistairsellar` +- Removed `fx_variables` in `recipe_mpqb_xch4` and `recipe_lauer22jclim_fig8` (:pull:`3117`) :user:`axel-lauer` +- Update Python example recipe (:pull:`3119`) :user:`bouweandela` +- Updated figure settings to account for newer matplotlib version (:pull:`3133`) :user:`katjaweigel` + +Observational and re-analysis dataset support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Earth System Data Cube (ESDC) cmorizer (:pull:`2799`) :user:`bsolino` +- Added CMORizer for Landschützer2020 (spco2) (:pull:`2908`) :user:`schlunma` +- Added CMORizer for MOBO-DIC_MPIM (dissic) (:pull:`2909`) :user:`schlunma` +- Added CMORizer for OceanSODA-ETHZ (areacello, co3os, dissicos, fgco2, phos, spco2, talkos) (:pull:`2915`) :user:`schlunma` +- Extension of ERA-Interim CMORizer (cl, cli, clw, lwp, rlut, rlutcs, rsut, rsutcs) (:pull:`2923`) :user:`axel-lauer` +- Add JRA-25 cmorizer (clt, hus, prw, rlut, rlutcs, rsut, rsutcs) (:pull:`2927`) :user:`LisaBock` +- New CMORizers for datasets from the NCEP family (NCEP-DOE-R2, NCEP-NCAR-R1, NOAA-CIRES-20CR) (:pull:`2931`) :user:`hb326` +- Updates to the recipes that use the NCEP reanalysis dataset (:pull:`2932`) :user:`hb326` +- MERRA2 cmorizer convert vertical level coordinate units from hPa to Pa (:pull:`3003`) :user:`valeriupredoi` +- MERRA2 cmorizer set UNLIMITED time coordinate (:pull:`3006`) :user:`valeriupredoi` +- Added CMORizers for TCOM-CH4 (CH4) and TCOM-N2O (N2O) (:pull:`3014`) :user:`schlunma` +- Update HadISST cmorizer to include recent years (:pull:`3027`) :user:`remi-kazeroni` + +Automatic testing +~~~~~~~~~~~~~~~~~ + +- Add DKRZ/Levante batch scripts for release recipe running (:pull:`2883`) :user:`valeriupredoi` +- Remove `pytest-flake8` and call the use of `flake8` straight (:pull:`2904`) :user:`valeriupredoi` +- Unpin `flake8` (:pull:`2937`) :user:`valeriupredoi` +- Fix failing tests that use deprecated feature of `sklearn` (:pull:`2961`) :user:`schlunma` +- Fix recipe loading tests for esmvalcore before and after version 2.8 (:pull:`3020`) :user:`valeriupredoi` +- Update recipe load test for v2.8 (:pull:`3040`) :user:`bouweandela` +- Test running recipes with the development version of ESMValCore (:pull:`3072`) :user:`bouweandela` +- Fix `test_naming.py` so it doesn't let through directories that need be ignored (:pull:`3082`) :user:`valeriupredoi` +- Conda environment files for interim use of `esmvalcore=2.8.0rc1` (:pull:`3090`) :user:`valeriupredoi` +- Move `flake8` check to a step separate from installation on CircleCI (:pull:`3105`) :user:`bouweandela` +- Recreate conda lock file to harpoon esmvalcore=2.8.0rc1 (:pull:`3108`) :user:`valeriupredoi` +- Update batch script generation to run all recipes in one command (:pull:`3130`) :user:`remi-kazeroni` + +Installation +~~~~~~~~~~~~ + +- Merge release branch `release_270stable` in main so we pick up unsquashed commits and set the correct version 2.7.0 for main (and up version in CITATION.cff) (:pull:`2896`) :user:`valeriupredoi` +- Unpin `NetCDF4` (:pull:`2929`) :user:`valeriupredoi` +- Unpin `cf-units` (:pull:`2930`) :user:`bouweandela` +- Set the version number on the development branches to one minor version more than the last release (:pull:`2964`) :user:`bouweandela` +- Pin `shapely<2.0.0` for linux64 (:pull:`2970`) :user:`valeriupredoi` +- Unpin `matplotlib` (:pull:`3068`) :user:`valeriupredoi` +- Add `packaging` as direct dependency to ESMValTool (:pull:`3099`) :user:`valeriupredoi` +- Re-pin sphinx to latest (6.1.3) and add nbsphinx to the environment (:pull:`3118`) :user:`valeriupredoi` +- Conda environment files for esmvalcore=2.8.0rc2 (:pull:`3120`) :user:`remi-kazeroni` +- Remove rc (release candidates) conda channel and re-pin esmvalcore to new stable 2.8 (:pull:`3131`) :user:`valeriupredoi` + +Improvements +~~~~~~~~~~~~ + +- Read `config-user.yml` using `esmvalcore.config` module (:pull:`2736`) :user:`bouweandela` +- Make results of recipes `schlund20jgr_*.yml` deterministic (:pull:`2900`) :user:`schlunma` +- `Recipe_gier2020bg.yml`: add sorting to SA barplot (:pull:`2905`) :user:`bettina-gier` +- Add the outline of a climatological tropopause to the zonalmean_profile plots (:pull:`2947`) :user:`FranziskaWinterstein` +- Update data finder imports (:pull:`2958`) :user:`bouweandela` +- Add support for the upcoming ESMValCore v2.8 release to the recipe filler tool (:pull:`2995`) :user:`bouweandela` +- Updated monitoring diagnostics with netCDF output and additional logging (:pull:`3029`) :user:`schlunma` +- Use aliases in perfmetrics (:pull:`3058`) :user:`FranziskaWinterstein` + + +.. _changelog-v2-7-0: + +v2.7.0 +------ + +Highlights +~~~~~~~~~~ + +- This release has seen the inclusion of the code for figures 3.3, 3.4, 3.5, 3,13 and 3.15 of the IPCC AR6 WG1 report, see them in the `new documentation `__ +- We have also included new diagnostics and recipe necessary to produce the plots and tables for the journal article "Climate model projections from the Scenario Model Intercomparison Project (ScenarioMIP) of CMIP6" by `Tebaldi et al. in ESD 2020-68 `__ from 2021; also see the `recipe entry `__ +- We have also extended the support for MERRA2 observational dataset, by adding support for a large number of variables, including 3D variables, see the `table of supported obs datasets `__ + +Backwards incompatible changes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Remove installation of R dependencies from the help message (:pull:`2761`) :user:`remi-kazeroni` + +Bug fixes +~~~~~~~~~ + +- Fix misplaced provenance records from IPCC AR5 Ch.12 diags (:pull:`2758`) :user:`axel-lauer` +- Fix `esmvaltool.utils.testing.regression.compare` module to run with Python<3.10 too (:pull:`2778`) :user:`valeriupredoi` +- Fixed small bug that could lead to wrong pr units in `monitor/multi_datasets.py` (:pull:`2788`) :user:`schlunma` +- Pin `xgboost>1.6.1` so we avert documentation failing to build with `1.6.1` (:pull:`2780`) :user:`valeriupredoi` +- Pin `matplotlib-base<3.6.0` to avoid conflict from `mapgenerator` that fails doc builds (:pull:`2830`) :user:`valeriupredoi` +- Fixed wrong latitudes in NDP CMORizer (:pull:`2832`) :user:`schlunma` +- Fix indexer in Autoassess supermeans module use a tuple of `(slice(), idx, idx)` (:pull:`2838`) :user:`valeriupredoi` +- Replace xarray ufuncs with bogstandard numpy in weighting/climwip/calibrate_sigmas.py (:pull:`2848`) :user:`valeriupredoi` +- Fix units MERRA2 CMORizer (:pull:`2850`) :user:`axel-lauer` +- Fix bug when using log-scale y-axis for ocean transects. (:pull:`2862`) :user:`TomasTorsvik` + +Community +~~~~~~~~~ + +- Add MO-paths to config file (:pull:`2784`) `mo-tgeddes `__ + +Deprecations +~~~~~~~~~~~~ + +- Recipe `recipe_esacci_oc.yml` replace with new regrid scheme `nearest_extrapolate` (:pull:`2841`) :user:`valeriupredoi` + +Documentation +~~~~~~~~~~~~~ + +- Update release schedule for v2.7 (:pull:`2747`) :user:`bouweandela` +- Add Met Office installation method (:pull:`2751`) `mo-tgeddes `__ +- Add release dates for 2023 (:pull:`2769`) :user:`remi-kazeroni` +- Made `maintainer` entry mandatory for published recipes (:pull:`2703`) :user:`schlunma` +- Use command with current command line opts for `cffconvert` in documentation (:pull:`2791`) :user:`valeriupredoi` +- Update CMORizer documentation with command options (:pull:`2795`) :user:`remi-kazeroni` +- Fixed broken link for monthly meetings (:pull:`2806`) :user:`remi-kazeroni` +- Update MO obs4MIPs paths in the user configuration file (:pull:`2813`) `mo-tgeddes `__ +- Fix Windows incompatible file names in documentation of recipe_climate_change_hotspot.yml (:pull:`2823`) :user:`ledm` +- Update documentation for the Landschuetzer 2016 recipe. (:pull:`2801`) :user:`TomasTorsvik` +- Fixed anaconda badge in README (:pull:`2866`) :user:`valeriupredoi` +- Update release strategy notes (:pull:`2734`) :user:`sloosvel` +- Add documentation on how to handle CMORizers for multiple dataset versions (:pull:`2730`) :user:`remi-kazeroni` +- Extending documentation: recipe maintainer + broken recipe policy (:pull:`2719`) :user:`axel-lauer` + +Diagnostics +~~~~~~~~~~~ + +- Recipe and diagnostics for : Tebaldi et al.,ESD, 2021 (:pull:`2052`) `debe-kevin `__ +- Figures for IPCC AR6 WG1 Chapter 3 (Atmosphere) (:pull:`2533`) :user:`LisaBock` + +Observational and re-analysis dataset support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Update CERES-EBAF to Ed4.1 (:pull:`2752`) :user:`axel-lauer` +- New CMORizer for CALIPSO-ICECLOUD (:pull:`2753`) :user:`axel-lauer` +- New CMORizer for CLOUDSAT-L2 (:pull:`2754`) :user:`axel-lauer` +- Update MERRA2 cmorizer with extra 2D and 3D variables (:pull:`2774`) :user:`valeriupredoi` + +Automatic testing +~~~~~~~~~~~~~~~~~ + +- Pin `netcdf4 != 1.6.1` since that is spitting large numbers of SegFaults (:pull:`2796`) :user:`valeriupredoi` + +Installation +~~~~~~~~~~~~ + +- Increase esmvalcore version to 2.7.0 in environment files (:pull:`2860`) :user:`valeriupredoi` +- Add iris-esmf-regrid as a dependency (:pull:`2880`) :user:`zklaus` + +Improvements +~~~~~~~~~~~~ + +- Fix tebaldi21esd (:pull:`2749`) :user:`axel-lauer` +- Added option to show basic statistics in plots of `monitor/multi_datasets.py` (:pull:`2790`) :user:`schlunma` +- Remove retracted datasets from `recipe_climate_change_hotspot` (:pull:`2854`) :user:`sloosvel` + + +.. _changelog-v2-6-0: + +v2.6.0 +------ + +Highlights +~~~~~~~~~~ + +- A new monitoring diagnostic has been added to allow the comparison of model runs against reference datasets. For details, see :ref:`Monitoring diagnostic to show multiple datasets in one plot (incl. biases) `. +- A tool has been developed to compare the output of recipe runs against previous runs, in order to detect in an automated way breaking changes between releases. Find more information in :ref:`Comparing recipe runs `. +- The recipe :ref:`Climate Change Hotspot ` allows to compute hotspots in any rectangular region. + +Please also note the highlights from the corresponding ESMValCore release :ref:`here`. +Thanks to that ESMValTool has gained the following features: + +- A new set of CMOR fixes is now available in order to load native EMAC model output and CMORize it on the fly. +- The version number of ESMValCore is now automatically generated using `setuptools_scm `__, which extracts Python package versions from git metadata. + +This release includes + +Bug fixes +~~~~~~~~~ + +- Fix dtype for Marrmot recipe results (:pull:`2646`) :user:`SarahAlidoost` +- Adapt test_fix_coords to new version of cf-units (:pull:`2707`) :user:`zklaus` +- Fix nested axes in `recipe_martin18_grl` and `recipe_li17natcc` (:pull:`2712`) :user:`lukruh` +- Update common_climdex_preprocessing_for_plots.R (:pull:`2727`) :user:`earnone` + +Community +~~~~~~~~~ + +- Collecting github user names for config-references (:pull:`2677`) :user:`lukruh` + +Deprecations +~~~~~~~~~~~~ + +- Deprecate the function `esmvaltool.diag_scripts.shared.var_name_constraint`. This function is scheduled for removal in v2.8.0. Please use :class:`iris.NameConstraint` with the keyword argument var_name instead: this is an exact replacement. (:pull:`2655`) :user:`schlunma` + +Documentation +~~~~~~~~~~~~~ + +- Documentation Improvements (:pull:`2580`) :user:`stacristo` +- Fixed broken label in the documentation (:pull:`2616`) :user:`remi-kazeroni` +- Add readthedocs configuration file (:pull:`2627`) :user:`bouweandela` +- Update the command for building the documentation (:pull:`2622`) :user:`bouweandela` +- Added DKRZ-Levante to `config-user-example.yml` (:pull:`2632`) :user:`remi-kazeroni` +- Improved documentation on native dataset support (:pull:`2635`) :user:`schlunma` +- Add documentation on building and uploading Docker images (:pull:`2662`) :user:`bouweandela` +- Remove support for Mistral in `config-user-example.yml` (:pull:`2667`) :user:`remi-kazeroni` +- Add note to clarify that CORDEX support is work in progress (:pull:`2682`) :user:`bouweandela` +- Restore accidentally deleted text from input data docs (:pull:`2683`) :user:`bouweandela` +- Add running settings note in `recipe_wenzel16nat.yml` documentation (:pull:`2692`) :user:`sloosvel` +- Add a note on transferring permissions to the release manager (:pull:`2688`) :user:`bouweandela` +- Update documentation on ESMValTool module at DKRZ (:pull:`2696`) :user:`remi-kazeroni` +- Add note on how to run recipe_wenzel14jgr.yml (:pull:`2717`) :user:`sloosvel` +- Added conda forge feedstock repo link in README (:pull:`2555`) :user:`valeriupredoi` + +Diagnostics +~~~~~~~~~~~ + +- Compute bias instead of correlation in `compare_salinity.py` (:pull:`2642`) :user:`sloosvel` +- Update monitor diagnostics (:pull:`2608`) :user:`schlunma` +- Add new Psyplot diagnostic (:pull:`2653`) :user:`schlunma` +- Reduce memory usage of lisflood recipe (:pull:`2634`) :user:`sverhoeven` +- Provenance in ocean diagnostics (:pull:`2651`) :user:`tomaslovato` +- Extend monitor diagnostics with multi-dataset plots (:pull:`2657`) :user:`schlunma` +- Recipe and diagnostics to plot climate change hotspots: Cos et al., ESD 2022 (:pull:`2614`) :user:`pepcos` +- Update plots of consecutive dry days recipe (:pull:`2671`) :user:`bouweandela` +- Fix the format of ids in Hype forcing files (:pull:`2679`) :user:`SarahAlidoost` +- WFlow diagnostic script: remove manual rechunking (:pull:`2680`) :user:`Peter9192` + +Observational and re-analysis dataset support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Extending the HadCRUT5 cmorizer (:pull:`2509`) :user:`LisaBock` +- Cmorize Kadow2020 dataset (:pull:`2513`) :user:`LisaBock` +- Cmorize NOAAGlobalTemp dataset (:pull:`2515`) :user:`LisaBock` +- Add option to CMORize ts as tos in ESACCI data (:pull:`2731`) :user:`sloosvel` + +Automatic testing +~~~~~~~~~~~~~~~~~ + +- Add a tool for comparing recipe runs to previous runs (:pull:`2613`) :user:`bouweandela` +- Ignore NCL interface files when comparing recipe runs (:pull:`2673`) :user:`bouweandela` +- Add a short version of recipe deangelis15nat for testing (:pull:`2685`) :user:`katjaweigel` +- Expanded recipe output comparison tool to better handle absolute paths in output (:pull:`2709`) :user:`schlunma` +- Update development infrastructure (:pull:`2663`) :user:`bouweandela` + +Installation +~~~~~~~~~~~~ + +- Removed `package/meta.yaml` and all references to it (:pull:`2612`) :user:`schlunma` + +Improvements +~~~~~~~~~~~~ + +- Improved handling of weights in MLR diagnostics (:pull:`2625`) :user:`schlunma` +- Fixed order of variables in perfemetrics plot of Anav13jclim recipe (:pull:`2706`) :user:`schlunma` +- Added input file sorting to many diagnostic to make output exactly reproducible (:pull:`2710`) :user:`schlunma` +- Removed 'ancestors' attributes before saving netcdf files in emergent constraints diagnostics (:pull:`2713`) :user:`schlunma` + +.. _changelog-v2-5-0: + +v2.5.0 +------ + +Highlights +~~~~~~~~~~ + +- A new recipe to plot generic preprocessor output is now available. For details, see :ref:`recipe_monitor`. +- The CMORization of observational and other datasets has been overhauled. For many datasets, an automatic download script is now available. For details, see :ref:`inputdata_observations` and :ref:`new-cmorizer`. + +Please also note the highlights from the corresponding ESMValCore release :ref:`here`. +Thanks to that ESMValTool has gained the following features: + +- The new preprocessor ``extract_location`` can extract arbitrary locations on the Earth. +- Time ranges can now be extracted using the `ISO 8601 format `_. +- The new preprocessor ``ensemble_statistics`` can calculate arbitrary statistics over all ensemble members of a simulation. + + +This release includes + +Backwards incompatible changes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Streamline observations download (:pull:`1657`) `Javier Vegas-Regidor `__. This change removes the ``cmorize_obs`` command which has previously been used to CMORize observations and other datasets. The new command ``esmvaltool data`` provides many new features apart from the CMORization (``esmvaltool data format``), for example, automatic downloading of observational datasets (``esmvaltool data download``). More details on this can be found :ref:`here` and :ref:`here`. +- Dropped Python 3.7 (:pull:`2585`) :user:`schlunma`. ESMValTool v2.5.0 dropped support for Python 3.7. From now on Python >=3.8 is required to install ESMValTool. The main reason for this is that conda-forge dropped support for Python 3.7 for OSX and arm64 (more details are given `here `__). + +Bug fixes +~~~~~~~~~ + +- Remove the use of `esmvalgroup` channel from the conda install Github Action workflow (:pull:`2420`) :user:`valeriupredoi` +- Ignore .pymon-journal file in test discovery (:pull:`2491`) :user:`zklaus` +- Relocate pytest-monitor outputted database `.pymon` so `.pymon-journal` file should not be looked for by `pytest` (:pull:`2501`) :user:`valeriupredoi` +- Re-establish Python 3.7 compatibility (:pull:`2506`) :user:`zklaus` +- Update intersphinx mapping (:pull:`2531`) :user:`zklaus` +- Fixed `KeyError` in `recipe_ocean_bgc.yml` (:pull:`2540`) :user:`schlunma` +- Corrected ESACCI-SEA-SURFACE-SALINITY from OBS to OBS6 (:pull:`2542`) :user:`axel-lauer` +- Fixed `recipe_kcs.yml` (:pull:`2541`) :user:`schlunma` +- Fix MDER diagnostic regression_stepwise (:pull:`2545`) :user:`axel-lauer` +- Fix for recipe_wenzel16nat (:pull:`2547`) :user:`axel-lauer` +- Fixed `recipe_carvalhais14nat` and removed deprecated use of np.float (:pull:`2558`) :user:`schlunma` +- Fix `recipe_wenzel14jgr` (:pull:`2577`) :user:`remi-kazeroni` +- Fixed various recipes by removing faulty or non-available datasets (:pull:`2563`) :user:`schlunma` +- Remove missing CMIP5 data from 2 recipes (:pull:`2579`) :user:`remi-kazeroni` +- Fix `recipe_seaice` (:pull:`2578`) :user:`remi-kazeroni` +- Fix `recipe_climwip_brunner20esd` (:pull:`2581`) :user:`remi-kazeroni` + +Deprecations +~~~~~~~~~~~~ + +- Remove `--use-feature=2020-resolver` command line option for obsolete pip 2020 solver (:pull:`2493`) :user:`valeriupredoi` +- Renamed vertical regridding schemes in affected recipes (:pull:`2487`) :user:`schlunma` + +Documentation +~~~~~~~~~~~~~ + +- Update release manager for v2.5 (:pull:`2429`) :user:`axel-lauer` +- Mention ENES Climate Analytics service (:pull:`2438`) :user:`bouweandela` +- Add recipe overview page (:pull:`2439`) :user:`bouweandela` +- Fix pointer to Tutorial lesson on preprocessor from 05 to 06 (:pull:`2473`) :user:`valeriupredoi` +- Removed obsolete option `synda-download` from documentation (:pull:`2485`) :user:`schlunma` +- Update CMUG XCH4 docu figure (:pull:`2502`) :user:`axel-lauer` +- Add Python=3.10 to package info, update Circle CI auto install and documentation for Python=3.10 (:pull:`2503`) :user:`schlunma` +- Unify user configuration file (:pull:`2507`) :user:`schlunma` +- Synchronized `config-user.yml` with version from ESMValCore (:pull:`2516`) :user:`schlunma` +- CITATION.cff fix and automatic validation of your citation metadata (:pull:`2517`) :user:`abelsiqueira` +- Add backwards incompatible changes at the top of the release notes draft (:pull:`2431`) :user:`bouweandela` +- Fixed intersphinx mapping of `scipy` (:pull:`2523`) :user:`schlunma` +- Add authors to citation cff (:pull:`2525`) :user:`SarahAlidoost` +- Update documentation on running a recipe (:pull:`2432`) :user:`bouweandela` +- Fix recipe `hydrology/recipe_wflow.yml` (:pull:`2549`) :user:`remi-kazeroni` +- Update `draft_release_notes.py` for new release (:pull:`2553`) :user:`schlunma` +- Added stand with Ukraine badge (:pull:`2565`) :user:`valeriupredoi` +- Updated CREM docu (recipe_williams09climdyn.yml) (:pull:`2567`) :user:`axel-lauer` +- First draft for v2.5.0 changelog (:pull:`2554`) :user:`schlunma` +- Replace nonfunctional Github Actions badge with cool one in README (:pull:`2582`) :user:`valeriupredoi` +- Updated changelog (:pull:`2589`) :user:`schlunma` +- Updated release strategy with current release and upcoming release (:pull:`2597`) :user:`schlunma` +- Increased ESMValTool version to 2.5.0 (:pull:`2600`) :user:`schlunma` + +Diagnostics +~~~~~~~~~~~ + +- AutoAssess: Add new diagnostic for radiation budget (:pull:`2282`) :user:`Jon-Lillis` +- CMUG Sea Surface Salinity dataset and diagnostic (:pull:`1832`) `Javier Vegas-Regidor `__ +- Recipe with new diagnostics for ESA-CMUG H2O (:pull:`1834`) :user:`katjaweigel` +- Cleaned Schlund et al. (2020) recipe and fixed small bugs in corresponding diagnostic (:pull:`2484`) :user:`schlunma` +- Add ESA CCI LST cmorizer and diagnostic (:pull:`1897`) :user:`morobking` +- XCH4 ESA CMUG diagnostics (subset of the MPQB diagnostics) (:pull:`1960`) :user:`hb326` +- Add support for ESACCI Ocean Color (Chlorophyll) observations (:pull:`2055`) `ulrikaw-cloud `__ +- Updated `recipe_zmnam.yml` with hemisphere selection (:pull:`2230`) :user:`fserva` +- Add recipe and diagnostic scripts to compute figures of D9.4 of ISENES3 (:pull:`2441`) :user:`sloosvel` +- Save resampled climates from KCS diagnostic local_resampling.py (:pull:`2221`) :user:`Emmadd` +- Use years from KCS recipe (:pull:`2223`) :user:`Emmadd` +- Recipe to plot generic output from the preprocessor (:pull:`2184`) `Javier Vegas-Regidor `__ +- Fixed provenance tracking for emergent constraint diagnostics (:pull:`2573`) :user:`schlunma` + +Observational and re-analysis dataset support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Ensure dummy data for cmorize_obs_woa test are written to the correct directory (:pull:`2451`) :user:`ehogan` +- Add ESA CCI LST cmorizer and diagnostic (see previous section `Diagnostics`) + +Automatic testing +~~~~~~~~~~~~~~~~~ + +- Run a nightly Github Actions workflow to monitor tests memory per test (configurable for other metrics too) and lists the slowest 100 tests (:pull:`2449`) :user:`valeriupredoi` +- Fix individual pytest runs broken due to missing explicit imports from `iris` and adding a couple missing package markers (:pull:`2455`) :user:`valeriupredoi` +- Add Python=3.10 to Github Actions and switch to Python=3.10 for the Github Action that builds the PyPi package (:pull:`2488`) :user:`valeriupredoi` +- Switch all github actions from miniconda to mambaforge (:pull:`2498`) :user:`zklaus` +- Pin `flake8<4` to have actual FLAKE8 error printed if tests fail and not garbage (:pull:`2492`) :user:`valeriupredoi` +- Implementing conda lock (:pull:`2193`) :user:`valeriupredoi` +- [Docker] Update Docker container builds with correct installations of Julia (:pull:`2530`) :user:`valeriupredoi` +- Update Linux condalock file (various pull requests) github-actions[bot] + +Installation +~~~~~~~~~~~~ + +- Comment out release candidate channel in environment.yml (:pull:`2417`) :user:`zklaus` +- Comment out rc channel in osx environment file (:pull:`2421`) :user:`valeriupredoi` +- Add `python-cdo` as conda-forge dependency in environment files to ensure `cdo` gets used from conda-forge and not pip (:pull:`2469`) :user:`valeriupredoi` +- Install rasterio from conda-forge and avoid issues from python=3.10 (:pull:`2479`) :user:`valeriupredoi` +- Updated dependencies with new ESMValCore version (:pull:`2599`) :user:`schlunma` + +Improvements +~~~~~~~~~~~~ + +- Remove use of OBS and use CMIP instead in `examples/recipe_ncl.yml` (:pull:`2494`) :user:`valeriupredoi` +- Expanded `recipe_preprocessor_test.yml` to account for new `multi_model_statistics` features (:pull:`2519`) :user:`schlunma` +- Updated piControl periods for recipes that use KACE-1-0-G (:pull:`2537`) :user:`schlunma` +- Reduced time range in `recipe_globwat.yml` (:pull:`2548`) :user:`schlunma` +- Removed models with missing data from recipe_williams09climdyn.yml (:pull:`2566`) :user:`axel-lauer` +- Restored original versions of `recipe_schlund20esd.yml` and `recipe_meehl20sciadv.yml` (:pull:`2583`) :user:`schlunma` + + +.. _changelog-v2-4-0: + +v2.4.0 +------ + +Highlights +~~~~~~~~~~ + +- ESMValTool is moving from Conda to Mamba as the preferred installation method. This will speed up the + installation and comes with some improvements behind the scenes. + Read more about it at :ref:`Move to Mamba` and in :ref:`the installation guide`. + +Please also note the highlights from the corresponding ESMValCore release :ref:`here`. +Thanks to that ESMValTool has gained the following features: + +- Download any missing data that is available on the ESGF automatically. +- Resume previous runs, reusing expensive pre-processing results. + + +This release includes + +Bug fixes +~~~~~~~~~ + +- Fixed `recipe_meehl20sciadv.yml` for ESMValCore 2.3 (:pull:`2253`) :user:`schlunma` +- Fix provenance of NCL figures created using the log_provenance function (:pull:`2279`) :user:`bouweandela` +- Fix bug in ClimWIP brunner19 recipe when plotting (:pull:`2226`) :user:`lukasbrunner` +- Pin docutils <0.17 to fix sphinx build with rtd theme (:pull:`2312`) :user:`zklaus` +- Fix example recipes (:pull:`2338`) :user:`valeriupredoi` +- Do not add bounds to plev (plev19) in era interim cmorizer (:pull:`2328`) :user:`valeriupredoi` +- Fix problem with pip 21.3 that prevents installation from source (:pull:`2344`) :user:`zklaus` +- Add title to recipe embedded in test_diagnostic_run.py (:pull:`2353`) :user:`zklaus` +- Fix capitalization of obs4MIPs (:pull:`2368`) :user:`bouweandela` +- Specify that areacella is needed for area statistics in the Python example recipe (:pull:`2371`) :user:`bouweandela` +- Enabling variable `obs550lt1aer` in recipes (:pull:`2388`) :user:`remi-kazeroni` +- Update a diagnostic to new Iris version (:pull:`2390`) :user:`katjaweigel` +- Fixed bug in provenance tracking of ecs_scatter.ncl (:pull:`2391`) :user:`schlunma` +- Fix provenance issue in pv_capacity_factor.R (:pull:`2392`) :user:`katjaweigel` +- Remove obsolete write_plots option from R diagnostics (:pull:`2395`) :user:`zklaus` +- Fix arctic ocean diagnostic (:pull:`2397`) :user:`zklaus` +- Fix sea ice drift recipe and script (:pull:`2404`) :user:`sloosvel` +- Adapt diagnostic script to new version of iris (:pull:`2403`) :user:`zklaus` +- Fix ocean multimap (:pull:`2406`) :user:`zklaus` +- Fix diagnostic that uses `xarray`: `dtype` correctly set and harmonize `xarray` and `matplotlib` (:pull:`2409`) :user:`zklaus` +- Deactivate provenance logging for plots in thermodyn toolbox (:pull:`2414`) :user:`zklaus` + +Deprecations +~~~~~~~~~~~~ + +- Removed write_plots and write_netcdf from some NCL diagnostics (:pull:`2293`) :user:`schlunma` +- Fixed provenance logging of all python diagnostics by removing 'plot_file' entry (:pull:`2296`) :user:`schlunma` +- Do not deprecate classes Variable, Variables and Datasets on a specific version (:pull:`2286`) :user:`schlunma` +- Remove obsolete write_netcdf option from ncl diagnostic scripts (:pull:`2387`) :user:`zklaus` +- Remove write plots from ocean diagnostics (:pull:`2393`) :user:`valeriupredoi` +- More removals of instances of `write_plots` from Python diagnostics (appears to be the final removal from Py diags) (:pull:`2394`) :user:`valeriupredoi` + +Documentation +~~~~~~~~~~~~~ + +- List Manuel Schlund as release manager for v2.5 (:pull:`2268`) :user:`bouweandela` +- GlobWat fix download links and gdal command (:pull:`2334`) :user:`babdollahi` +- Add titles to recipes authored by `predoi_valeriu` (:pull:`2333`) :user:`valeriupredoi` +- Added titles to recipes maintained by lauer_axel (:pull:`2332`) :user:`axel-lauer` +- Update the documentation of the GRACE CMORizer (:pull:`2349`) :user:`remi-kazeroni` +- Add titles in BSC recipes (:pull:`2351`) :user:`sloosvel` +- Update esmvalcore dependency to 2.4.0rc1 (:pull:`2348`) :user:`zklaus` +- Add titles to recipes maintained by Peter Kalverla (:pull:`2356`) :user:`Peter9192` +- Adding titles to the recipes with maintainer hb326 (:pull:`2358`) :user:`hb326` +- Add title for zmnam as for #2354 (:pull:`2363`) :user:`fserva` +- Added recipe titles the the ocean recipes. (:pull:`2364`) :user:`ledm` +- Update recipe_thermodyn_diagtool.yml - add title (:pull:`2365`) :user:`ValerioLembo` +- Fix provenance of figures of several R diagnostics (:pull:`2300`) :user:`bouweandela` +- Adding titles to Mattia's recipes (:pull:`2367`) :user:`remi-kazeroni` +- Adding titles to wenzel recipes (:pull:`2366`) :user:`hb326` +- Fix formatting of some recipe titles merged from PR 2364 (:pull:`2372`) :user:`zklaus` +- Adding titles to Bjoern's recipes (:pull:`2369`) :user:`remi-kazeroni` +- Add titles to ocean recipes (maintainer Lovato) (:pull:`2375`) :user:`tomaslovato` +- Add titles for three c3s-magic recipes (:pull:`2378`) :user:`zklaus` +- Add title for recipe maintained by Ruth Lorenz (:pull:`2379`) :user:`zklaus` +- Fix toymodel recipe (:pull:`2381`) `Javier Vegas-Regidor `__ +- Added titles for recipes of maintainer `schlund_manuel` (:pull:`2377`) :user:`schlunma` +- Write_plots and titles for deangelis15nat, li17natcc, martin18grl, pv_capacity_factor (:pull:`2382`) :user:`katjaweigel` +- Add titles for some recipes (:pull:`2383`) :user:`zklaus` +- Adding titles for recipes by von Hardenberg and Arnone (:pull:`2384`) :user:`zklaus` +- Last two missing titles (:pull:`2386`) :user:`valeriupredoi` +- Update documentation on downloading data (:pull:`2370`) :user:`bouweandela` +- Fix installation instructions for Julia (:pull:`2335`) :user:`zklaus` +- Fix provenance of Julia example diagnostic (:pull:`2289`) :user:`bouweandela` +- Added notes on use of mamba in the installation documentation chapter (:pull:`2236`) :user:`valeriupredoi` +- Update version number for 2.4.0 release (:pull:`2410`) :user:`zklaus` +- Update release schedule for 2.4.0 (:pull:`2412`) :user:`zklaus` +- Update changelog for 2.4.0 release (:pull:`2411`) :user:`zklaus` + +Diagnostics +~~~~~~~~~~~ + +- Add all available CMIP5 and CMIP6 models to recipe_impact.yml (:pull:`2251`) :user:`bouweandela` +- Add Fig. 6, 7 and 9 of Bock20jgr (:pull:`2252`) :user:`LisaBock` +- Generalize `recipe_validation*` diagnostic to work with identical control and experiment dataset names (:pull:`2284`) :user:`valeriupredoi` +- Add missing preprocessor to recipe_gier2020bg and adapt to available data (:pull:`2399`) :user:`bettina-gier` +- Removed custom version of `AtmosphereSigmaFactory` in diagnostics (:pull:`2405`) :user:`schlunma` + +Observational and re-analysis dataset support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Replace recipe_era5.yml with recipe_daily_era5.yml (:pull:`2182`) :user:`SarahAlidoost` +- Update WOA cmorizer for WOA18 and WOA13v2 (:pull:`1812`) :user:`LisaBock` +- GLODAP v2.2016 ocean data cmorizer (:pull:`2185`) :user:`tomaslovato` +- Updated GCP CMORizer (:pull:`2295`) :user:`schlunma` + +Automatic testing +~~~~~~~~~~~~~~~~~ + +- Add a cylc suite to run all recipes (:pull:`2219`) :user:`bouweandela` +- Retire test with Python 3.6 from full development Github Actions test (:pull:`2229`) :user:`valeriupredoi` +- Remove Python 3.6 tests from GitHub Actions (:pull:`2264`) :user:`valeriupredoi` +- Unpin upper bound for iris (previously was at <3.0.4) (:pull:`2266`) :user:`valeriupredoi` +- Pin latest esmvalcore to allow use of the bugfix release 2.3.1 always (:pull:`2269`) :user:`valeriupredoi` +- Add apt update so Julia gets found and installed by Docker (:pull:`2290`) :user:`valeriupredoi` +- Use mamba for environment update and creation in the Docker container build on DockerHub (:pull:`2297`) :user:`valeriupredoi` +- Docker container experimental - run a full env solve with mamba instead of a conda update (:pull:`2306`) :user:`valeriupredoi` +- Full use of mamba in Github Actions source install test and use generic Python 3.7 (removing the very specific 3.7.10) (:pull:`2287`) :user:`valeriupredoi` +- Replace use of conda with mamba for conda_install test on Circle CI (:pull:`2237`) :user:`valeriupredoi` +- Update circleci configuration (:pull:`2357`) :user:`zklaus` + +Installation +~~~~~~~~~~~~ + +- Remove `mpich` from conda dependencies list (:pull:`2343`) :user:`valeriupredoi` + +Improvements +~~~~~~~~~~~~ + +- Add script for extracting a list of input files from the provenance (:pull:`2278`) :user:`bouweandela` +- Update github actions (:pull:`2360`) :user:`zklaus` +- Removed 'write_plots' from all NCL diagnostics (:pull:`2331`) :user:`axel-lauer` +- Update and modernize `config-user-example.yml` (:pull:`2374`) :user:`valeriupredoi` + + +.. _changelog-v2-3-0: + +v2.3.0 +------ + +This release includes + +Bug fixes +~~~~~~~~~ + +- Indent block to pick up and raise exception if cmorizer data not found (TierX dir is not there) (:pull:`1877`) :user:`valeriupredoi` +- Skip recipe filler tests until we have a new release since GA tests are failing (:pull:`2089`) :user:`valeriupredoi` +- Fixed broken link to contributions in README (:pull:`2102`) :user:`schlunma` +- Fix recipe filler for the case the variable doesn't contain short_name (:pull:`2104`) :user:`valeriupredoi` +- Add fix for iris longitude bug to ClimWIP (:pull:`2107`) :user:`lukasbrunner` +- Update for outdated link to reference Déandreis et al. (2014). (:pull:`2076`) :user:`katjaweigel` +- Fixed recipes for ESMValCore 2.3.0 (:pull:`2203`) :user:`schlunma` +- Fix the WFDE5 cmorizer (:pull:`2211`) :user:`remi-kazeroni` +- Fix broken CMORizer log message if no Tier directory exists (:pull:`2207`) :user:`jmrgonza` +- Fix bug in ClimWIP basic test recipe when plotting (:pull:`2225`) :user:`lukasbrunner` +- Fix bug in ClimWIP advanced test recipe when plotting (:pull:`2227`) :user:`lukasbrunner` +- Adjust time range for the `WDFE5` dataset in the `recipe_check_obs.yml` (:pull:`2232`) :user:`remi-kazeroni` +- Fix plot and provenance of recipe_consecdrydays (:pull:`2244`) :user:`bouweandela` + +Documentation +~~~~~~~~~~~~~ + +- Improving the README.md file with a more appealing look and bit more info (:pull:`2065`) :user:`valeriupredoi` +- Update plot title martin18grl (:pull:`2080`) :user:`katjaweigel` +- Update contribution guidelines (:pull:`2031`) :user:`bouweandela` +- Update links in pull request template to point to latest documentation (:pull:`2083`) :user:`bouweandela` +- Update release schedule (:pull:`2081`) :user:`bouweandela` +- Updates to contribution guidelines (:pull:`2092`) :user:`bouweandela` +- Update documentation for ERA5 with new variables (:pull:`2111`) :user:`lukasbrunner` +- Add OSX installation instructions to docs (:pull:`2115`) :user:`bvreede` +- Instructions to use pre-installed versions on HPC clusters (:pull:`2197`) :user:`remi-kazeroni` +- Add functional Autoassess diagnostics: land surface metrics: permafrost, soil moisture, surface radiation (:pull:`2170`) :user:`valeriupredoi` +- Add citation info in `recipe_eady_growth_rate.yml` (:pull:`2188`) :user:`sloosvel` +- Update version number to 2.3.0 (:pull:`2213`) :user:`zklaus` +- Update release schedule for 2.3.0 (:pull:`2247`) :user:`zklaus` +- Changelog update to v2.3.0 (:pull:`2214`) :user:`zklaus` + +Diagnostics +~~~~~~~~~~~ + +- Added figures 8 and 10 to recipe_bock20jgr.yml (:pull:`2074`) :user:`schlunma` +- Add hydrological forcing comparison recipe (:pull:`2013`) :user:`stefsmeets` +- Added recipe for Meehl et al., Sci. Adv. (2020) (:pull:`2094`) :user:`schlunma` +- Add GlobWat recipe and diagnostic (:pull:`1808`) :user:`babdollahi` +- Add ClimWIP recipe to reproduce Brunner et al. 2019 (:pull:`2109`) :user:`lukasbrunner` +- Update Climwip recipe to reproduce brunner2020esd (:pull:`1859`) :user:`ruthlorenz` +- Update recipe_thermodyn_diagtool.yml: code improvements and more user options (:pull:`1391`) :user:`ValerioLembo` +- Remove model AWI-CM-1-1-MR from recipe_impact.yml (:pull:`2238`) :user:`bouweandela` +- PV capacity factor for ESMValTool GMD paper (:pull:`2153`) :user:`katjaweigel` + +Observational and re-analysis dataset support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Cmorize wfde5 (:pull:`1991`) :user:`mwjury` +- Make cmorizer utils funcs public in utilities.py and add some numpy style docstrings (:pull:`2206`) :user:`valeriupredoi` +- CMORizer for CLARA-AVHRR cloud data (:pull:`2101`) :user:`axel-lauer` +- Update of ESACCI-CLOUD CMORizer (:pull:`2144`) :user:`axel-lauer` + +Automatic testing +~~~~~~~~~~~~~~~~~ + +- Force latest Python in empty environment in conda install CI test (:pull:`2069`) :user:`valeriupredoi` +- Removed imports from private sklearn modules and improved test coverage of custom_sklearn.py (:pull:`2078`) :user:`schlunma` +- Move private _(global)_stock_cube from esmvacore.preprocessor._regrid to cmorizer (:pull:`2087`) :user:`valeriupredoi` +- Try mamba install esmvaltool (:pull:`2125`) :user:`valeriupredoi` +- Reinstate OSX Github Action tests (:pull:`2110`) :user:`valeriupredoi` +- Pin mpich to avoid default install of 3.4.1 and 3.4.2 with external_0 builds (:pull:`2220`) :user:`valeriupredoi` +- Include test sources in distribution (:pull:`2234`) :user:`zklaus` +- Pin `iris<3.0.4` to ensure we still (sort of) support Python 3.6 (:pull:`2246`) :user:`valeriupredoi` + +Installation +~~~~~~~~~~~~ + +- Fix conda build by skipping documentation test (:pull:`2058`) `Javier Vegas-Regidor `__ +- Update pin on esmvalcore pick up esmvalcore=2.3.0 (:pull:`2200`) :user:`valeriupredoi` +- Pin Python to 3.9 for development installation (:pull:`2208`) :user:`bouweandela` + +Improvements +~~~~~~~~~~~~ + +- Add EUCP and IS-ENES3 projects to config-references (:pull:`2066`) :user:`Peter9192` +- Fix flake8 tests on CircleCI (:pull:`2070`) :user:`bouweandela` +- Added recipe filler. (:pull:`1707`) :user:`ledm` +- Update use of fx vars to new syntax (:pull:`2145`) :user:`sloosvel` +- Add recipe for climate impact research (:pull:`2072`) :user:`Peter9192` +- Update references "master" to "main" (:pull:`2172`) :user:`axel-lauer` +- Force git to ignore VSCode workspace files (:pull:`2186`) `Javier Vegas-Regidor `__ +- Update to new ESMValTool logo (:pull:`2168`) :user:`axel-lauer` +- Python cmorizers for CDR1 and CDR2 ESACCI H2O (TCWV=prw) data. (:pull:`2152`) :user:`katjaweigel` +- Remove obsolete conda package (closes #2100) (:pull:`2103`) :user:`zklaus` + +.. _changelog-v2-2-0: + +v2.2.0 +------ + +Highlights +~~~~~~~~~~ + +ESMValTool is now using the recently released `Iris 3 `__. +We acknowledge that this change may impact your work, as Iris 3 introduces +several changes that are not backward-compatible, but we think that moving forward is the best +decision for the tool in the long term. + + +This release includes + +Bug fixes +~~~~~~~~~ + +- Bugfix: time weights in time_operations (:pull:`1956`) :user:`axel-lauer` +- Fix issues with bibtex references (:pull:`1955`) :user:`stefsmeets` +- Fix ImportError for `configure_logging` (:pull:`1976`) :user:`stefsmeets` +- Add required functional parameters for extract time in recipe_er5.yml (:pull:`1978`) :user:`valeriupredoi` +- Revert "Fix ImportError for `configure_logging`" (:pull:`1992`) :user:`bouweandela` +- Fix import of esmvalcore _logging module in cmorize_obs.py (:pull:`2020`) :user:`valeriupredoi` +- Fix logging import in cmorize_obs again since last merge was nulled by pre-commit hooks (:pull:`2022`) :user:`valeriupredoi` +- Refactor the functions in derive_evspsblpot due to new iris (:pull:`2023`) :user:`SarahAlidoost` +- Avoid importing private ESMValCore functions in CMORizer (:pull:`2027`) :user:`bouweandela` +- Fix extract_seasons in validation recipe (:pull:`2054`) `Javier Vegas-Regidor `__ + +Deprecations +~~~~~~~~~~~~ + +- Deprecate classes Variable, Variables and Datasets (:pull:`1944`) :user:`schlunma` +- Python 3.9: remove pynio as dependency and replace with rasterio and pin Matplotlib>3.3.1 and pin cartopy>=0.18 (:pull:`1997`) :user:`valeriupredoi` +- Removed write_plots and write_netcdf in some python diagnostics (:pull:`2036`) :user:`schlunma` + +Documentation +~~~~~~~~~~~~~ + +- Update instructions on making a release (:pull:`1867`) :user:`bouweandela` +- Update review.rst (:pull:`1917`) :user:`axel-lauer` +- Add guidance on how to review a pull request (:pull:`1872`) :user:`bouweandela` +- Adding tutorial links to documentation (:pull:`1927`) :user:`hb326` +- Added bibtex file for schlund20jgr (:pull:`1928`) :user:`schlunma` +- Documentation contact added the actual email for the mailing list (:pull:`1938`) :user:`valeriupredoi` +- Make CircleCI badge specific to main branch (:pull:`1831`) :user:`bouweandela` +- Documentation on how to move code from a private repository to a public repository (:pull:`1920`) :user:`hb326` +- Refine pull request review guidelines (:pull:`1924`) :user:`stefsmeets` +- Update release schedule (:pull:`1948`) :user:`zklaus` +- Improve contact info and move to more prominent location (:pull:`1950`) :user:`bouweandela` +- Add some maintainers to some recipes that are missing them (:pull:`1970`) :user:`valeriupredoi` +- Update core team info (:pull:`1973`) :user:`axel-lauer` +- Combine installation from source instructions and add common issues (:pull:`1971`) :user:`bouweandela` +- Update iris documentation URL for sphinx (:pull:`2003`) :user:`bouweandela` +- Fix iris documentation link(s) with new iris3 location on readthedocs (:pull:`2012`) :user:`valeriupredoi` +- Document how to run tests for installation verification (:pull:`1847`) :user:`valeriupredoi` +- List Remi Kazeroni as a code owner and sole merger of CMORizers (:pull:`2017`) :user:`bouweandela` +- Install documentation: mention that we build conda package with python>=3.7 (:pull:`2030`) :user:`valeriupredoi` +- Recipe and documentation update for ERA5-Land. (:pull:`1906`) :user:`katjaweigel` +- Update changelog and changelog tool for v2.2.0 (:pull:`2043`) `Javier Vegas-Regidor `__ +- Final update to the changelog for v2.2.0 (:pull:`2056`) `Javier Vegas-Regidor `__ + +Diagnostics +~~~~~~~~~~~ + +- Add mapplot diagnostic to ClimWIP (:pull:`1864`) :user:`lukasbrunner` +- Add the option to weight variable groups in ClimWIP (:pull:`1856`) :user:`lukasbrunner` +- Implementation of ensemble member recognition to the ClimWIP diagnostic (:pull:`1852`) :user:`lukasbrunner` +- Restructure ClimWIP (:pull:`1919`) :user:`lukasbrunner` +- Diagnostic for recipe_eyring13jgr.yml Fig. 12 (:pull:`1922`) :user:`LisaBock` +- Added changes in shared functions necessary for schlund20esd (:pull:`1967`) :user:`schlunma` +- Adding recipe and diagnostics for Gier et al 2020 (:pull:`1914`) :user:`bettina-gier` +- Added recipe, diagnostics and documentation for Schlund et al., ESD (2020) (:pull:`2015`) :user:`schlunma` +- Add PRIMAVERA Eady Growth Rate diagnostic (:pull:`1285`) :user:`sloosvel` +- Implement shape parameter calibration for ClimWIP (:pull:`1905`) :user:`lukasbrunner` + +Observational and re-analysis dataset support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Extended ESRL cmorizer (:pull:`1937`) :user:`bettina-gier` +- Cmorizer for GRACE data (:pull:`1694`) :user:`bascrezee` +- Cmorizer for latest ESACCI-SST data (:pull:`1895`) :user:`valeriupredoi` +- Fix longitude in ESRL cmorizer (:pull:`1988`) :user:`bettina-gier` +- Selectively turn off fixing bounds for coordinates during cmorization with utilities.py (:pull:`2014`) :user:`valeriupredoi` +- Cmorize hadcrut5 (:pull:`1977`) :user:`mwjury` +- Cmorize gpcc masking (:pull:`1995`) :user:`mwjury` +- Cmorize_utils_save_1mon_Amon (:pull:`1990`) :user:`mwjury` +- Cmorize gpcc fix (:pull:`1982`) :user:`mwjury` +- Fix flake8 raised by develop test in cmorize_obs_gpcc.py (:pull:`2038`) :user:`valeriupredoi` + +Automatic testing +~~~~~~~~~~~~~~~~~ + +- Switched miniconda conda setup hooks for Github Actions workflows (:pull:`1913`) :user:`valeriupredoi` +- Fix style issue (:pull:`1929`) :user:`bouweandela` +- Fix mlr test with solution that works for CentOS too (:pull:`1936`) :user:`valeriupredoi` +- Temporary deactivation Github Actions on OSX (:pull:`1939`) :user:`valeriupredoi` +- Fix conda installation test on CircleCI (:pull:`1952`) :user:`bouweandela` +- Github Actions: change time for cron job that installs from conda (:pull:`1969`) :user:`valeriupredoi` +- CI upload relevant artifacts for test job (:pull:`1999`) :user:`valeriupredoi` +- Github Actions test that runs with the latest ESMValCore main (:pull:`1989`) :user:`valeriupredoi` +- Introduce python 39 in Github Actions tests (:pull:`2029`) :user:`valeriupredoi` +- Remove test for conda package installation on Python 3.6 (:pull:`2033`) :user:`valeriupredoi` +- Update codacy coverage reporter to fix coverage (:pull:`2039`) :user:`bouweandela` + +Installation +~~~~~~~~~~~~ + +- Simplify installation of R development dependencies (:pull:`1930`) :user:`bouweandela` +- Fix docker build (:pull:`1934`) :user:`bouweandela` +- Use new conda environment for installing ESMValTool in Docker containers (:pull:`1993`) :user:`bouweandela` +- Fix conda build (:pull:`2026`) :user:`bouweandela` + +Improvements +~~~~~~~~~~~~ + +- Allow multiple references for a cmorizer script (:pull:`1953`) :user:`SarahAlidoost` +- Add GRACE to the recipe check_obs (:pull:`1963`) :user:`remi-kazeroni` +- Align ESMValTool to ESMValCore=2.2.0 (adopt iris3, fix environment for new Core release) (:pull:`1874`) :user:`stefsmeets` +- Make it possible to use write_plots and write_netcdf from recipe instead of config-user.yml (:pull:`2018`) :user:`bouweandela` +- Revise lisflood and hype recipes (:pull:`2035`) :user:`SarahAlidoost` +- Set version to 2.2.0 (:pull:`2042`) `Javier Vegas-Regidor `__ + +.. _changelog-v2-1-1: + +v2.1.1 +------ + +This release includes + +Improvements +~~~~~~~~~~~~ + +- Fix the conda build on CircleCI (:pull:`1883`) :user:`bouweandela` +- Pin matplotlib to <3.3 and add compilers (:pull:`1898`) :user:`bouweandela` +- Pin esmvaltool subpackages to the same version and build as the esmvaltool conda package (:pull:`1899`) :user:`bouweandela` + +Documentation +~~~~~~~~~~~~~ + +- Release notes v2.1.1 (:pull:`1932`) :user:`valeriupredoi` + +.. _changelog-v2-1-0: + +v2.1.0 +------ + +This release includes + +Diagnostics +~~~~~~~~~~~ + +- Add extra steps to diagnostic to make output of hydrology/recipe_lisflood.yml usable by the LISFLOOD model (:pull:`1737`) :user:`JaroCamphuijsen` +- Recipe to reproduce the 2014 KNMI Climate Scenarios (kcs). (:pull:`1667`) :user:`Peter9192` +- Implement the climwip weighting scheme in a recipe and diagnostic (:pull:`1648`) :user:`JaroCamphuijsen` +- Remove unreviewed autoassess recipes (:pull:`1840`) :user:`valeriupredoi` +- Changes in shared scripts for Schlund et al., JGR: Biogeosciences, 2020 (:pull:`1845`) :user:`schlunma` +- Updated derivation test recipe (:pull:`1790`) :user:`schlunma` +- Support for multiple model occurrence in perf main (:pull:`1649`) :user:`bettina-gier` +- Add recipe and diagnostics for Schlund et al., JGR: Biogeosciences, 2020 (:pull:`1860`) :user:`schlunma` +- Adjust recipe_extract_shape.yml to recent changes in the example diagnostic.py (:pull:`1880`) :user:`bouweandela` + +Documentation +~~~~~~~~~~~~~ + +- Add pip installation instructions (:pull:`1783`) :user:`bouweandela` +- Add installation instruction for R and Julia dependencies tot pip install (:pull:`1787`) :user:`bouweandela` +- Avoid autodocsumm 0.2.0 and update documentation build dependencies (:pull:`1794`) :user:`bouweandela` +- Add more information on working on cluster attached to ESGF node (:pull:`1821`) :user:`bouweandela` +- Add release strategy to community documentation (:pull:`1809`) :user:`zklaus` +- Update esmvaltool run command everywhere in documentation (:pull:`1820`) :user:`bouweandela` +- Add more info on documenting a recipe (:pull:`1795`) :user:`bouweandela` +- Improve the Python example diagnostic and documentation (:pull:`1827`) :user:`bouweandela` +- Improve description of how to use draft_release_notes.py (:pull:`1848`) :user:`bouweandela` +- Update changelog for release 2.1 (:pull:`1886`) :user:`valeriupredoi` + +Improvements +~~~~~~~~~~~~ + +- Fix R installation in WSL (:pull:`1789`) `Javier Vegas-Regidor `__ +- Add pre-commit for linting/formatting (:pull:`1796`) :user:`stefsmeets` +- Speed up tests on CircleCI and use pytest to run them (:pull:`1804`) :user:`bouweandela` +- Move pre-commit excludes to top-level and correct order of lintr and styler (:pull:`1805`) :user:`stefsmeets` +- Remove isort setup to fix formatting conflict with yapf (:pull:`1815`) :user:`stefsmeets` +- GitHub Actions (:pull:`1806`) :user:`valeriupredoi` +- Fix yapf-isort import formatting conflict (:pull:`1822`) :user:`stefsmeets` +- Replace vmprof with vprof as the default profiler (:pull:`1829`) :user:`bouweandela` +- Update ESMValCore v2.1.0 requirement (:pull:`1839`) `Javier Vegas-Regidor `__ +- Pin iris to version 2 (:pull:`1881`) :user:`bouweandela` +- Pin eccodes to not use eccodes=2.19.0 for cdo to work fine (:pull:`1869`) :user:`valeriupredoi` +- Increase version to 2.1.0 and add release notes (:pull:`1868`) :user:`valeriupredoi` +- Github Actions Build Packages and Deploy tests (conda and PyPi) (:pull:`1858`) :user:`valeriupredoi` + +Observational and re-analysis dataset support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Added CMORizer for Scripps-CO2-KUM (:pull:`1857`) :user:`schlunma` + +.. _changelog-v2-0-0: + +v2.0.0 +------ + +This release includes + +Bug fixes +~~~~~~~~~ + +- Fix pep8-naming errors and fix zmnam diagnostic (:pull:`1702`) :user:`bouweandela` +- Fix keyword argument in cmorize_obs (:pull:`1721`) :user:`mattiarighi` +- Fixed JMA-TRANSCOM CMORizer (:pull:`1735`) :user:`schlunma` +- Fix bug in extract_doi_value (:pull:`1734`) :user:`bascrezee` +- Fix small errors in the arctic_ocean diagnostic (:pull:`1722`) :user:`koldunovn` +- Flatten ancestor lists for diag_spei.R and diag_spi.R. (:pull:`1745`) :user:`katjaweigel` +- Fix for recipe_ocean_ice_extent.yml (:pull:`1744`) :user:`mattiarighi` +- Fix recipe_combined_indices.yml provenance (:pull:`1746`) `Javier Vegas-Regidor `__ +- Fix provenance in recipe_multimodel_products (:pull:`1747`) `Javier Vegas-Regidor `__ +- Exclude FGOALS-g2 due to ESMValCore issue #728 (:pull:`1749`) :user:`mattiarighi` +- Fix recipe_modes_of_variability (:pull:`1753`) `Javier Vegas-Regidor `__ +- Flatten lists for ancestors for hyint to prevent nested lists. (:pull:`1752`) :user:`katjaweigel` +- Fix bug in cmorize_obs_eppley_vgpm_modis.py (#1729) (:pull:`1759`) :user:`tomaslovato` +- Correct mip for clltkisccp in example derive preprocessor recipe (:pull:`1768`) :user:`bouweandela` +- Update date conversion in recipe_hype.yml (:pull:`1769`) :user:`bouweandela` +- Fix recipe_correlation.yml (:pull:`1767`) :user:`bouweandela` +- Add attribute positive: down to plev coordinate in ERA-Interim CMORizer (:pull:`1771`) :user:`bouweandela` +- Fix sispeed in recipe_preprocessor_derive_test (:pull:`1772`) `Javier Vegas-Regidor `__ +- Fix extreme events and extreme index ancestors (:pull:`1774`) :user:`katjaweigel` +- Correct date in output filenames of ERA5 CMORizer recipe (:pull:`1773`) :user:`bouweandela` +- Exclude WOA from multi-model stats in recipe_ocean_bgc (:pull:`1778`) :user:`mattiarighi` + +Diagnostics +~~~~~~~~~~~ + +- Enhancement of the hyint recipe to include etccdi indices (:pull:`1133`) :user:`earnone` +- Add lazy regridding for wflow diagnostic (:pull:`1630`) :user:`bouweandela` +- Miles default domains to include lat=0 (:pull:`1626`) :user:`jhardenberg` +- Miles: selection of reference dataset based on experiment (:pull:`1632`) :user:`jhardenberg` +- New recipe/diagnostic: recipe_li17natcc.yml for Axels GMD Paper (:pull:`1567`) :user:`katjaweigel` +- New recipe/diagnostics: recipe_deangelis_for_gmdpart4.yml for Axels GMD Paper (:pull:`1576`) :user:`katjaweigel` +- EWaterCycle: Add recipe to prepare input for LISFLOOD (:pull:`1298`) :user:`sverhoeven` +- Use area weighted regridding in wflow diagnostic (:pull:`1643`) :user:`bouweandela` +- Workaround for permetrics recipe until Iris3 (:pull:`1674`) :user:`mattiarighi` +- C3S_511_MPQB_bas-features (:pull:`1465`) :user:`bascrezee` +- Additional Land perfmetrics (:pull:`1641`) :user:`bettina-gier` +- Necessary diagnostic from eyring06jgr for the release of version2 (:pull:`1686`) :user:`hb326` +- Drought characteristics based on Martin2018 and SPI for gmd paper (:pull:`1689`) :user:`katjaweigel` +- Additional features and bugfixes for recipe anav13clim (:pull:`1723`) :user:`bettina-gier` +- Gmd laueretal2020 revisions (:pull:`1725`) :user:`axel-lauer` +- Wenzel16nature (:pull:`1692`) :user:`zechlau` +- Add mask albedolandcover (:pull:`1673`) :user:`bascrezee` +- IPCC AR5 fig. 9.3 (seasonality) (:pull:`1726`) :user:`axel-lauer` +- Added additional emergent constraints on ECS (:pull:`1585`) :user:`schlunma` +- A diagnostic to evaluate the turnover times of land ecosystem carbon (:pull:`1395`) `koir-su `__ +- Removed multi_model_statistics step in recipe_oceans_example.yml as a workaround (:pull:`1779`) :user:`valeriupredoi` + +Documentation +~~~~~~~~~~~~~ + +- Extend getting started instructions to obtain config-user.yml (:pull:`1642`) :user:`Peter9192` +- Extend information about native6 support on RTD (:pull:`1652`) :user:`Peter9192` +- Update citation of ESMValTool paper in the doc (:pull:`1664`) :user:`mattiarighi` +- Updated references to documentation (now docs.esmvaltool.org) (:pull:`1679`) :user:`axel-lauer` +- Replace dead link with ESGF link. (:pull:`1681`) :user:`mattiarighi` +- Add all European grants to Zenodo (:pull:`1682`) :user:`bouweandela` +- Update Sphinx to v3 or later (:pull:`1685`) :user:`bouweandela` +- Small fix to number of models in ensclus documentation (:pull:`1691`) :user:`jhardenberg` +- Move draft_release_notes.py from ESMValCore to here and update (:pull:`1701`) :user:`bouweandela` +- Improve the installation instructions (:pull:`1634`) :user:`valeriupredoi` +- Improve description of how to implement provenance in diagnostic (:pull:`1750`) :user:`SarahAlidoost` +- Update command line interface documentation and add links to ESMValCore configuration documentation (:pull:`1776`) :user:`bouweandela` +- Documentation on how to find shapefiles for hydrology recipes (:pull:`1777`) :user:`JaroCamphuijsen` + +Improvements +~~~~~~~~~~~~ + +- Pin flake8<3.8.0 (:pull:`1635`) :user:`valeriupredoi` +- Update conda package path in more places (:pull:`1636`) :user:`bouweandela` +- Remove curly brackets around issue number in pull request template (:pull:`1637`) :user:`bouweandela` +- Fix style issue in test (:pull:`1639`) :user:`bouweandela` +- Update Codacy badges (:pull:`1662`) :user:`bouweandela` +- Support extra installation methods in R (:pull:`1360`) `Javier Vegas-Regidor `__ +- Add ncdf4.helpers package as a dependency again (:pull:`1678`) :user:`bouweandela` +- Speed up conda installation (:pull:`1677`) :user:`bouweandela` +- Update CMORizers and recipes for ESMValCore v2.0.0 (:pull:`1699`) :user:`SarahAlidoost` +- Update setup.py for PyPI package (:pull:`1700`) :user:`bouweandela` +- Cleanup recipe headers before the release (:pull:`1740`) :user:`mattiarighi` +- Add colortables as esmvaltool subcommand (:pull:`1666`) `Javier Vegas-Regidor `__ +- Increase version to v2.0.0 (:pull:`1756`) :user:`bouweandela` +- Update job script (:pull:`1757`) :user:`mattiarighi` +- Read authors and description from .zenodo.json (:pull:`1758`) :user:`bouweandela` +- Update docker recipe to install from source (:pull:`1651`) `Javier Vegas-Regidor `__ + +Observational and re-analysis dataset support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Cmorize aphro ma (:pull:`1555`) :user:`mwjury` +- Respectable testing for cmorizers/obs/utilities.py and cmorizers/obs/cmorize_obs.py (:pull:`1517`) :user:`valeriupredoi` +- Fix start year in recipe_check_obs (:pull:`1638`) :user:`mattiarighi` +- Cmorizer for the PERSIANN-CDR precipitation data (:pull:`1633`) :user:`hb326` +- Cmorize eobs (:pull:`1554`) :user:`mwjury` +- Update download cds satellite lai fapar (:pull:`1654`) :user:`bascrezee` +- Added monthly mean vars (ta, va, zg) to era5 cmorizer via recipe (:pull:`1644`) :user:`egalytska` +- Make format time check more flexible (:pull:`1661`) :user:`mattiarighi` +- Exclude od550lt1aer from recipe_check_obs.yml (:pull:`1720`) :user:`mattiarighi` +- PERSIANN-CDR cmorizer update: adding the capability to save monthly mean files (:pull:`1728`) :user:`hb326` +- Add standard_name attribute to lon and lat in cmorize_obs_esacci_oc.py (:pull:`1760`) :user:`tomaslovato` +- Allow for incomplete months on daily frequency in cmorizer ncl utilities (:pull:`1754`) :user:`mattiarighi` +- Fix AURA-TES cmorizer (:pull:`1766`) :user:`mattiarighi` + +.. _changelog-v2-0-0b4: + +v2.0.0b4 +-------- + +This release includes + +Bug fixes +~~~~~~~~~ + +- Fix HALOE plev coordinate (:pull:`1590`) :user:`mattiarighi` +- Fix tro3 units in HALOE (:pull:`1591`) :user:`mattiarighi` + +Diagnostics +~~~~~~~~~~~ + +- Applicate sea ice negative feedback (:pull:`1299`) `Javier Vegas-Regidor `__ +- Add Russell18jgr ocean diagnostics (:pull:`1592`) :user:`bouweandela` +- Refactor marrmot recipe and diagnostic to use ERA5 daily data made by new cmorizer (:pull:`1600`) :user:`SarahAlidoost` +- In recipe_wflow, use daily ERA5 data from the new cmorizer. (:pull:`1599`) :user:`Peter9192` +- In wflow diagnostic, calculate PET after(!) interpolation and lapse rate correction (:pull:`1618`) :user:`jeromaerts` +- Fixed wenz14jgr (:pull:`1562`) :user:`zechlau` +- Update portrait_plot.ncl (:pull:`1625`) :user:`bettina-gier` + +Documentation +~~~~~~~~~~~~~ + +- Restructure documentation (:pull:`1587`) :user:`bouweandela` +- Add more links to documentation (:pull:`1595`) :user:`bouweandela` +- Update links in readme (:pull:`1598`) :user:`bouweandela` +- Minor improvements to installation documentation (:pull:`1608`) :user:`bouweandela` +- Add info for new mailing list to documentation. (:pull:`1607`) :user:`bjoernbroetz` +- Update making a release documentation (:pull:`1627`) :user:`bouweandela` + +Improvements +~~~~~~~~~~~~ + +- Avoid broken pytest-html plugin (:pull:`1583`) :user:`bouweandela` +- Remove reference section in config-references.yml (:pull:`1545`) :user:`SarahAlidoost` +- Various improvements to development infrastructure (:pull:`1570`) :user:`bouweandela` +- Install scikit-learn from conda, remove libunwind as a direct dependency (:pull:`1611`) :user:`valeriupredoi` +- Create conda subpackages and enable tests (:pull:`1624`) :user:`bouweandela` + +Observational and re-analysis dataset support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Cmorizer for HALOE (:pull:`1581`) :user:`mattiarighi` +- Add CMORizer for CT2019 (:pull:`1604`) :user:`schlunma` + +For older releases, see the release notes on https://github.com/ESMValGroup/ESMValTool/releases. diff --git a/doc/sphinx/source/codedoc2/esmvaltool.cmor.rst b/doc/sphinx/source/codedoc2/esmvaltool.cmor.rst deleted file mode 100644 index 6b23b5e1d9..0000000000 --- a/doc/sphinx/source/codedoc2/esmvaltool.cmor.rst +++ /dev/null @@ -1,19 +0,0 @@ -CMOR functions -============== - -.. automodule:: esmvaltool.cmor - -Checking compliance -------------------- - -.. automodule:: esmvaltool.cmor.check - -Fixing issues -------------- - -.. automodule:: esmvaltool.cmor.fix - -Using CMOR tables ------------------ - -.. automodule:: esmvaltool.cmor.table diff --git a/doc/sphinx/source/codedoc2/esmvaltool.diag_scripts.rst b/doc/sphinx/source/codedoc2/esmvaltool.diag_scripts.rst deleted file mode 100644 index 70c1c0526a..0000000000 --- a/doc/sphinx/source/codedoc2/esmvaltool.diag_scripts.rst +++ /dev/null @@ -1,8 +0,0 @@ -Diagnostic scripts -================== - -Various diagnostic packages exist as part of ESMValTool. - -.. automodule:: esmvaltool.diag_scripts - -.. automodule:: esmvaltool.diag_scripts.ocean diff --git a/doc/sphinx/source/codedoc2/esmvaltool.diag_scripts.shared.rst b/doc/sphinx/source/codedoc2/esmvaltool.diag_scripts.shared.rst deleted file mode 100644 index 4069e0f6bd..0000000000 --- a/doc/sphinx/source/codedoc2/esmvaltool.diag_scripts.shared.rst +++ /dev/null @@ -1,9 +0,0 @@ -Shared diagnostic script code -============================= - -.. automodule:: esmvaltool.diag_scripts.shared - -Plotting --------- - -.. automodule:: esmvaltool.diag_scripts.shared.plot diff --git a/doc/sphinx/source/codedoc2/esmvaltool.preprocessor.rst b/doc/sphinx/source/codedoc2/esmvaltool.preprocessor.rst deleted file mode 100644 index aec8fda022..0000000000 --- a/doc/sphinx/source/codedoc2/esmvaltool.preprocessor.rst +++ /dev/null @@ -1,4 +0,0 @@ -Preprocessor functions -====================== - -.. automodule:: esmvaltool.preprocessor diff --git a/doc/sphinx/source/codedoc2/esmvaltool.rst b/doc/sphinx/source/codedoc2/esmvaltool.rst deleted file mode 100644 index a2c392d4be..0000000000 --- a/doc/sphinx/source/codedoc2/esmvaltool.rst +++ /dev/null @@ -1,13 +0,0 @@ -ESMValTool Code API Documentation -================================= - -ESMValTool is mostly used as a commandline tool. However, it is also possibly to use (parts of) EsmValTool as a -library. This section documents the public API of ESMValTool. - -.. toctree:: - - esmvaltool.preprocessor - esmvaltool.cmor - esmvaltool.diag_scripts.shared - esmvaltool.diag_scripts - esmvaltool.diag_scripts.ocean diff --git a/doc/sphinx/source/community/backward_compatibility.rst b/doc/sphinx/source/community/backward_compatibility.rst new file mode 100644 index 0000000000..8ea68acaf0 --- /dev/null +++ b/doc/sphinx/source/community/backward_compatibility.rst @@ -0,0 +1,275 @@ +.. _backward-compatibility-policy: + +ESMValTool policy on backward compatibility +=========================================== + +Motivation +---------- + +Development of recipes or conducting project-related work may require a +rather long period of time during which new versions of the ESMValTool +might become available. For a good user experience and a seamless +workflow, users and developers need to know before upgrading to a new +version if and how their work might be affected (backward +compatibility). This includes, for instance, information about changes +to technical features such as syntax of recipes and configuration files, +and interfaces of shared functions, but also changes that affect the +results of an ESMValTool run, e.g. modification of algorithms or changes +to the order of operators. It is therefore essential that users and +developers have the best advice on how and when to upgrade to new +versions. + +While trying to minimise the impact of new code developments on users +and developers by maintaining backward compatibility where possible, +this cannot always be guaranteed. A very restrictive policy might delay +the ESMValTool development and make it more complex for developers to +contribute. + +This document outlines the key principles of an updated ESMValTool policy +on backward compatibility. + +Definitions +----------- + +**Release:** A numbered version of ESMValCore / ESMValTool that has been +released to the community, e.g. 2.4.0. This policy relates only to +backward compatibility of releases, not to interim revisions of the main +branch. Release numbers are of the format x.y.z, where: + +- x indicates a major release +- y indicates a minor release +- z indicates a patch release + +**Backward-incompatible change:** A change in ESMValCore or ESMValTool that causes a +recipe to no longer run successfully (a *breaking change*), or which +results in scientifically significant changes in results (a *science +change*). + +**Breaking change:** A change which causes a previously working recipe +to no longer run successfully. + +**Science change:** A change that alters scientific results. We do not +formally distinguish between trivial science changes (e.g. from changes +in the order of calculations) and more significant changes that would +affect interpretation, although the detail that we communicate will +share any understanding that we have regarding expected impact. + +**Benign third-party dependency changes:** A change over which we have +no control, but which we believe will only have trivial technical +impacts (such as a change in font). Such changes are outside of the +scope of this policy, though we will communicate about those we are +aware of. + +**Developer of backward-incompatible change:** For the purpose of this +policy, developer is the individual that is responsible for the pull +request (PR) that is not backward compatible. + +**Recipe developer:** Someone who is developing a recipe that is not +(yet) integrated into the repository. + +**Recipe user:** For the purpose of this policy, a *recipe user* is +anyone who runs a recipe using a *release* of ESMValTool. In this +context, someone can be both a *recipe developer* and a *recipe user*, +but they perform different activities in each capacity. + +**Recipe maintainer:** First contact point for *integrated recipes* in +case of problems with that recipe (see also :ref:`Maintaining a recipe`). + +**Integrated recipes:** Recipes that are contained within the main +branch of the ESMValTool repository, and can therefore be updated by any +developer in line with the above guidance. Note that the recipe can be +updated by someone other than the original author. + +**User recipes:** Recipes developed by any developer outside of the main +branch of the repository (i.e. on a dev/feature branch or outside the +repository completely), and therefore cannot be updated by anyone else. + +Scope +----- + +The ESMValTool and ESMValCore policy on backward compatibility aims at balancing two +competing needs: the occasional need of improvements or maintenance to +break backward compatibility and the need for stability for existing +users and developers. The following aspects are covered by this policy: + +- Key principles and approaches to backward compatibility +- Guidelines and requirements for *developers of backward-incompatible + changes* +- Communication with users and developers about *backward-incompatible + changes* + +Not within the scope of this policy are: + +- Versioning scheme of ESMValTool +- Breakage of recipes due to changes in input data or dependencies. + This is covered by the :ref:`broken recipe policy`. + +Expectations of developers, users & funders +------------------------------------------- + +Stakeholders and their expectations and aims: + +Projects / Funders + +- Aim to facilitate scientific discovery +- Expect deliverables, e.g. new features/recipes +- Expect reproducible results + +*Recipe users* + +- Expect the recipe to work +- Expect the recipe to be easy to run +- Expect reproducible results +- Expect easy installation of ESMValTool + +*Recipe developers* + +- Develop recipes +- Expect their recipe to keep working with every new *release* of + ESMValCore +- Expect ESMValCore bugfixes and new features to become available + quickly +- Expect reproducible results +- Expect easy installation of ESMValTool + +Core developers and *recipe maintainers* + +- Fix bugs +- Add ESMValCore features requested by *recipe developers* +- Try to accommodate ESMValCore features contributed to by *recipe + developers* +- Maintain existing recipes +- Add new recipes +- Try to help (other) *recipe developers* with contributing their + recipe +- Try to make installation as easy as possible + +There is a tension between making new features available and keeping +everything as is. New features facilitate scientific discovery because +they enable *recipe developers* to do new research (e.g. analyse more +data, new data, or perform a different analysis). Ensuring that every +recipe ever made works with every new feature is technically a lot of +work, more than we have funding for. Therefore we need to make sure that +new features are added regularly, but we respect the timescale on which +*recipe developers* work when removing outdated features. Writing a +paper and getting it published may take up to a year, so this seems a +good timescale for larger changes. For changes that only affect a few +users, shorter timescales could be acceptable. It is also good to note +that we are part of a large software ecosystem (ESMValTool currently +depends on over 500 different software packages), so we may not always +be able to control at what pace changes are made to the software that we +depend upon. + +Two-way communication about new and removed features is needed to make +this work. This requires active involvement from both the people +developing the new features and the *recipe developers*. ESMValTool core +developers and ESMValCore core developers need to make sure they clearly +communicate changes. In the first place, this is done by writing good +descriptions in issues and pull requests on GitHub, but some of this +material also makes it to the changelog (where the GitHub pull requests +are linked). It is highly recommended to communicate a relevant +selection (e.g. important new, scheduled for removal, and removed +features) also by other means, to ensure we reach as many people +potentially affected as possible (see :ref:`Guidance on handling +*backward-incompatible changes*` +section below). +We organize :ref:`monthly community ` meetings where +*recipe developers* can learn about the latest developments and everyone is +welcome to join, ask questions, and provide feedback. + +To meet the needs of users and funders, we should take reproducibility +of older results seriously, but this should not hold us back from +advancing our tools. We can support this by uploading a well tested +container image to an archive that provides a DOI and by providing clear +instructions on how to use such containers. + +Helping developers to upgrade +----------------------------- + +*Recipe users* of ESMValTool should be able to successfully run +*integrated recipes* using a *release*, since all +*backward-incompatible changes* introduced between *releases* will have +been fixed before the *release* is created. Please note the +:ref:`broken recipe policy`. + +However, *recipe developers* working on *user recipes* must be provided +with information to enable them to adapt their code to resolve issues +related to *backward-incompatible changes* when *backward-incompatible +changes* are introduced to the main branch / when a *release* of +ESMValTool is created. + +.. _guidance-on-backward-incompatiable-changes: + +Guidance on handling *backward-incompatible changes* +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +As well as helping users to handle *backward-incompatible changes*, the +policy and surrounding tools must help developers avoid making +*backward-incompatible changes*. Not many ideas are developed on this yet, +but components should include: + +- Testing; *backward-incompatible changes* should be discovered as + early in the development process as possible. This motivates + continued investment in automated testing. + To discover *backward-incompatible changes* early on in the development cycle, + every night a selection of recipes is run on + `CircleCI `__. + A recipe can be added to the test suite by adding it to the directory + `esmvaltool/recipes/testing `__. + Only add recipes that require a small amount of data, i.e. considerably less + than a gigabyte. +- Guidance on how to minimise the likelihood of introducing + *backward-incompatible changes* and how to use deprecation warnings + when needed (see :ref:`developer guidance `). +- :ref:`Instructions on how to provide text for the release notes ` + to assist *recipe developers* to adapt their recipe in light of the + *backward-incompatible change* +- General instructions for *recipe developers* working on *user + recipes* to enable them to adapt their code related to + *backward-incompatible changes* (see `ESMValTool_Tutorial: issue + #263 `__). +- The developer or reviewer must tag the core development team to + notify them of the *backward-incompatible change*, and give at least + 2 weeks for objections to be raised before merging to the main + branch. If a strong objection is raised the backward-incompatible + change should not be merged until the objection is resolved. + + +.. _guidance-on-releasing-backward-incompatible-changes: + +Guidance on releasing *backward-incompatible changes* +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +During the *release* process, the following information must be +provided: + +- **Release notes:** The *release* notes are already documented in the + :ref:`ESMValTool Changelog ` and + :ref:`ESMValCore Changelog `, and + “*backward-incompatible changes*” is the first section after + “Highlights”. + + - **backward-incompatible changes:** This section must include + clear instructions detailing how a *recipe developer* should adapt + their code for each item in this section, whether the adapted code + would introduce a *science change*, and the list of affected or + fixed *integrated recipes* that had to be updated due to the + *backward-incompatible changes*, if applicable (to provide + further examples to *recipe developers* working on *user recipes* + of how to adapt code). + - **Developer guidance:** *Developers* *of backward-incompatible + changes* must: + + - write and include the information required for the + “*backward-incompatible changes*” section in the PR that + introduces the *backward-incompatible change* + - share details of the *backward-incompatible change* at the + next monthly ESMValTool community meeting + + - **Communication:** The *release* notes must be shared with the + community (for example, via the :ref:`mailing-list` and the + `Community `__ + repository) at the point the first *release* candidate is made, + highlighting the “*backward-incompatible changes*” section. The + User Engagement Team should organise the communication of new + *releases* together with the :ref:`release_manager`. diff --git a/doc/sphinx/source/community/broken_recipe_policy.rst b/doc/sphinx/source/community/broken_recipe_policy.rst new file mode 100644 index 0000000000..f9872a4ece --- /dev/null +++ b/doc/sphinx/source/community/broken_recipe_policy.rst @@ -0,0 +1,22 @@ +.. _broken-recipe-policy: + +Broken recipe policy +==================== + +Recipes might stop working for different reasons. Among those are, for instance, withdrawal of datasets +used by the recipe (i.e. the recipe contains data that are no longer publicly available), backward incompatible development +of the ESMValTool including new features or retiring old ones as well as +changes to Python or used dependencies such as library functions. +In such cases, the :ref:`Maintaining a recipe` is contacted by the technical lead development team (`@ESMValGroup/technical-lead-development-team`_) to find +a solution, fixing the affected recipe and checking the scientific output after applying the fix. If no recipe maintainer is +available, such recipes will be flagged by the release manager during the +:ref:`Release schedule and procedure` as "broken". +For this, the affected recipe will be added to the :ref:`list of broken recipes `, together with the version +number of the last known release in which the recipe was still working. +If a recipe continues to be broken for three releases of the ESMValTool (about one year) and no recipe maintainer could be found +during this time, the affected recipe and diagnostics will be retired. This means the recipe and diagnostic code are +removed from the ESMValTool main branch by the release manager and thus will not be included in future releases. +Only the scientific documentation of the recipe (and diagnostics) will be kept in the user and developer guide with an +additional note and link to the last release in which the recipe was still fully functional. + +.. _`@ESMValGroup/technical-lead-development-team`: https://github.com/orgs/ESMValGroup/teams/technical-lead-development-team diff --git a/doc/sphinx/source/community/code_documentation.rst b/doc/sphinx/source/community/code_documentation.rst new file mode 100644 index 0000000000..1c211daf39 --- /dev/null +++ b/doc/sphinx/source/community/code_documentation.rst @@ -0,0 +1,532 @@ +.. _contributing_code_docs: + +Contributing code and documentation +=================================== + +If you would like to contribute a new diagnostic and recipe or a new feature, +please discuss your idea with the development team before getting started, to +avoid double work and/or disappointment later. +A good way to do this is to open an +`issue on GitHub `__. +This is also a good way to get help with the implementation. + +We value the time you invest in contributing and strive to make the process as +easy as possible. +If you have suggestions for improving the process of contributing, please do +not hesitate to propose them, for example by starting a discussion on our +`discussions page `__. + +Getting started +--------------- + +See :ref:`install_from_source` for instructions on how to set up a development +installation. + +New development should preferably be done in the +`ESMValTool `__ +GitHub repository. +However, for scientists requiring confidentiality, private repositories are +available, see :ref:`private_repository` for more information. +The default git branch is ``main``. Use +this branch to create a new feature branch from and make a pull request +against. +This +`page `__ +offers a good introduction to git branches, but it was written for +BitBucket while we use GitHub, so replace the word BitBucket by GitHub +whenever you read it. + +It is recommended that you open a `draft pull +request `__ +early, as this will cause :ref:`CircleCI to run the unit tests `, +:ref:`Codacy to analyse your code `, and +:ref:`readthedocs to build the documentation `. +It’s also easier to get help from other developers if +your code is visible in a pull request. + +Please review the results of the automatic checks below your pull request. +If one of the tests shows a red cross instead of a green checkmark, please click +the ``Details`` link behind the failing check and try to solve the issue. +Ask `@ESMValGroup/tech-reviewers`_ for help if you do not know how to fix the +failing check. +Note that this kind of automated checks make it easier to +:ref:`review code `, but they are not flawless. +Preferably Codacy code quality checks pass, however a few remaining hard to +solve Codacy issues are still acceptable. +If you suspect Codacy may be wrong, please ask by commenting on your pull +request. + +.. _pull_request_checklist: + +Checklist for pull requests +--------------------------- + +To clearly communicate up front what is expected from a pull request, we have +the following checklist. +Please try to do everything on the list before requesting a review. +If you are unsure about something on the list, please ask the +`@ESMValGroup/tech-reviewers`_ or `@ESMValGroup/science-reviewers`_ for help +by commenting on your (draft) pull request or by starting a new +`discussion `__. + +In the ESMValTool community we use +:ref:`pull request reviews ` to ensure all code and +documentation contributions are of good quality. +The icons indicate whether the item will be checked during the +:ref:`🛠 Technical review ` or +:ref:`🧪 Scientific review `. + +All pull requests +~~~~~~~~~~~~~~~~~ + +- 🛠 :ref:`The pull request has a descriptive title ` +- 🛠 Code is written according to the :ref:`code quality guidelines ` +- 🛠 Documentation_ is available +- 🛠 Tests_ run successfully +- 🛠 The :ref:`list of authors ` is up to date +- 🛠 Changed dependencies are :ref:`added or removed correctly ` +- 🛠 The :ref:`checks shown below the pull request ` are successful + +If a pull request introduces a change that causes a recipe to +no longer run successfully (*breaking change*), or which results in scientifically +significant changes in results (*science change*), additional requirements +defined in the :ref:`backward compatibility policy` apply. +These include in particular: + +- 🛠 Instructions for the release notes to assist *recipe + developers* to adapt their recipe in light of the *backward-incompatible change* + available. +- 🛠 If applicable, instructions for *recipe developers* working on *user + recipes* to enable them to adapt their code related to + *backward-incompatible changes* available (see `ESMValTool_Tutorial: issue + #263 `__) + available. +- 🛠 Core development team tagged to notify them of the + *backward-incompatible change*, and give at least + 2 weeks for objections to be raised before merging to the main + branch. If a strong objection is raised the backward-incompatible + change should not be merged until the objection is resolved. +- 🛠 Information required for the “*backward-incompatible changes*” + section in the PR that introduces the *backward-incompatible change* + available. + +New or updated recipe and/or diagnostic +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +See :ref:`new-diagnostic` for detailed instructions. + +- 🧪 :ref:`Recipe runs successfully ` +- 🧪 :ref:`recipe_documentation` is available +- 🧪 :ref:`Figure(s) and data ` look as expected from literature +- 🛠 :ref:`Provenance information ` has been added + +New or updated data reformatting script +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +See :ref:`new dataset ` for detailed instructions. + +- 🛠 :ref:`dataset-documentation` is available +- 🛠 The dataset has been :ref:`added to the CMOR check recipe ` +- 🧪 Numbers and units of the data look :ref:`physically meaningful ` + +.. _descriptive_pr_title: + +Pull request title +------------------ + +The title of a pull request should clearly describe what the pull request changes. +If you need more text to describe what the pull request does, please add it in +the description. +The titles of pull requests are used to compile the :ref:`changelog`, therefore +it is important that they are easy to understand for people who are not +familiar with the code or people in the project. +Descriptive pull request titles also makes it easier to find back what was +changed when, which is useful in case a bug was introduced. + +.. _code_quality: + +Code quality +------------ + +To increase the readability and maintainability or the ESMValTool source +code, we aim to adhere to best practices and coding standards. +For code in all languages, it is highly recommended that you split your code up +in functions that are short enough to view without scrolling, e.g. no more than +50 lines long. + +We include checks for Python, R, NCL, and yaml files, most of which are +described in more detail in the sections below. +This includes checks for invalid syntax and formatting errors. +:ref:`pre-commit` is a handy tool that can run all of these checks automatically +just before you commit your code. +It knows knows which tool to run for each filetype, and therefore provides +a convenient way to check your code. + +Python +~~~~~~ + +The standard document on best practices for Python code is +`PEP8 `__ and there is +`PEP257 `__ for code documentation. +We make use of +`numpy style docstrings `__ +to document Python functions that are visible on +`readthedocs `__. + +To check if your code adheres to the standard, go to the directory where +the repository is cloned, e.g. ``cd ESMValTool``, and run `prospector `_ + +:: + + prospector esmvaltool/diag_scripts/your_diagnostic/your_script.py + +In addition to prospector, we also use `flake8 `_ +to automatically check for obvious bugs and formatting mistakes. + +When you make a pull request, adherence to the Python development best practices +is checked in two ways: + +#. As part of the unit tests, flake8_ is run by + `CircleCI `_, + see the section on Tests_ for more information. +#. `Codacy `_ + is a service that runs prospector (and other code quality tools) on changed + files and reports the results. + Click the 'Details' link behind the Codacy check entry and then click + 'View more details on Codacy Production' to see the results of the static + code analysis done by Codacy_. + If you need to log in, you can do so using your GitHub account. + +A pull request should preferably not introduce any new prospector issues. +However, we understand that there is a limit to how much time can be spent on +polishing code, so up to 10 new (non-trivial) issues is still an acceptable +amount. +Formatting issues are considered trivial and need to be addressed. +Note that the automatic code quality checks by prospector are really helpful to +improve the quality of your code, but they are not flawless. +If you suspect prospector or Codacy may be wrong, please ask the +`@ESMValGroup/tech-reviewers`_ by commenting on your pull request. + +Note that running prospector locally will give you quicker and sometimes more +accurate results than waiting for Codacy. + +Most formatting issues in Python code can be fixed automatically by +running the commands + +:: + + isort some_file.py + +to sort the imports in `the standard way `__ +using `isort `__ and + +:: + + yapf -i some_file.py + +to add/remove whitespace as required by the standard using `yapf `__, + +:: + + docformatter -i some_file.py + +to run `docformatter `__ which helps +formatting the docstrings (such as line length, spaces). + +NCL +~~~ + +Because there is no standard best practices document for NCL, we use +`PEP8 `__ for NCL code as +well, with some minor adjustments to accommodate for differences in the +languages. The most important difference is that for NCL code the +indentation should be 2 spaces instead of 4. +Use the command ``nclcodestyle /path/to/file.ncl`` to check if your code +follows the style guide. +More information on the ``nclcodestyle`` command can be found +:ref:`here `. + +R +~ + +Best practices for R code are described in `The tidyverse style +guide `__. We check adherence to this +style guide by using +`lintr `__ on +CircleCI. Please use `styler `__ to +automatically format your code according to this style guide. In the +future we would also like to make use of +`goodpractice `__ +to assess the quality of R code. + +YAML +~~~~ + +Please use `yamllint `_ to check that your +YAML files do not contain mistakes. +``yamllint`` checks for valid syntax, common mistakes like key repetition and +cosmetic problems such as line length, trailing spaces, wrong indentation, etc. +When the tool complains about the maximum line length or too many spaces, please +use your own best judgement about whether solving the issue will make your +recipe more readable. + +Any text file +~~~~~~~~~~~~~ + +A generic tool to check for common spelling mistakes is +`codespell `__. + +.. _documentation: + +Documentation +------------- + +The documentation lives on `docs.esmvaltool.org `_ +and is built using `Sphinx `_. +There are two main ways of adding documentation: + +#. As written text in the directory + `doc/sphinx/source `__. + When writing + `reStructuredText `_ + (``.rst``) files, please try to limit the line length to 80 characters and + always start a sentence on a new line. + This makes it easier to review changes to documentation on GitHub. + +#. As docstrings or comments in code. + For Python code, the + `docstrings `__ + of Python modules, classes, and functions + that are mentioned in + `doc/sphinx/source/api `__ + are used to generate documentation. + This results in the :ref:`api`. + +.. _doc_howto: + +What should be documented +~~~~~~~~~~~~~~~~~~~~~~~~~ + +See also :ref:`recipe_documentation` and :ref:`dataset-documentation`. + +Any code documentation that is visible on `docs.esmvaltool.org`_ +should be well written and adhere to the standards for documentation for the +respective language. +Note that there is no need to write extensive documentation for functions that +are not visible in the online documentation. +However, a short description in the docstring helps other contributors to +understand what a function is intended to do and and what its capabilities are. +For short functions, a one-line docstring is usually sufficient, but more +complex functions might require slightly more extensive documentation. + +How to build and view the documentation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Whenever you make a pull request or push new commits to an existing pull +request, readthedocs will automatically build the documentation. +The link to the documentation will be shown in the list of checks below your +pull request, click 'Details' behind the check +``docs/readthedocs.org:esmvaltool`` to preview the documentation. +If all checks were successful, you may need to click 'Show all checks' to see +the individual checks. + +To build the documentation on your own computer, go to the directory where the +repository was cloned and run + +:: + + sphinx-build doc/sphinx/source/ doc/sphinx/build/ + +or + +:: + + sphinx-build -Ea doc/sphinx/source/ doc/sphinx/build/ + +to build it from scratch. +Make sure that your newly added documentation builds without warnings or +errors and looks correctly formatted. +CircleCI_ will build the documentation with the command + +.. code-block:: bash + + sphinx-build -W doc/sphinx/source/ doc/sphinx/build/ + +to catch mistakes that can be detected automatically. + +The configuration file for Sphinx_ is +`doc/sphinx/source/conf.py `_ +and the configuration file for ReadTheDocs is +`.readthedocs.yaml `_. + +When reviewing a pull request, always check that the documentation checks +shown below the pull request were successful. +Successful checks have a green ✓ in front, a ❌ means the test job failed. + +.. _esmvalcore-documentation-integration: + +Integration with the ESMValCore documentation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The `ESMValCore documentation `_ +is hosted as a +`subproject `_ +of the ESMValTool documentation on readthedocs. +To link to a section from the ESMValCore documentation from the reStructuredText +(``.rst``) files, use the usual ``:ref:`` but prefix the reference with +``esmvalcore:``. +For example, ``:ref:`esmvalcore:recipe``` to link to +:ref:`esmvalcore:recipe`. + +There is a script that generates the navigation menu shown on the left when +you view the documentation. +This script is called +`doc/sphinx/source/gensidebar.py `_ +in the ESMValTool repository and it should be identical to +`doc/gensidebar.py `_ +in the ESMValCore repository, or the sidebar will change when navigating from +the ESMValTool documentation to the ESMValCore documentation and vice-versa. + +.. _tests: + +Tests +----- + +To check various aspects of the recipes and code, there tests available in the +`tests `__ +directory. + +Whenever you make a pull request or push new commits to an existing pull +request, these tests will be run automatically on CircleCI_. +The results appear at the bottom of the pull request. +Click on 'Details' for more information on a specific test job. +To see some of the results on CircleCI, you may need to log in. +You can do so using your GitHub account. + +To run the tests on your own computer, go to the directory where the repository +is cloned and run the command ``pytest``. + +Have a look at :ref:`testing_recipes` for information on testing recipes. + +Every night, more extensive tests are run to make sure that problems with the +installation of the tool are discovered by the development team before users +encounter them. +These nightly tests have been designed to mimic the installation procedures +described in the documentation, e.g. in the :ref:`install` chapter. +The nightly tests are run using both CircleCI and GitHub Actions, the +result of the tests ran by CircleCI can be seen on the +`CircleCI project page `__ +and the result of the tests ran by GitHub Actions can be viewed on the +`Actions tab `__ +of the repository. + +The configuration of the tests run by CircleCI can be found in the directory +`.circleci `__, +while the configuration of the tests run by GitHub Actions can be found in the +directory +`.github/workflows `__. + +When reviewing a pull request, always check that all test jobs on CircleCI_ were +successful. +Successful test jobs have a green ✓ in front, a ❌ means the test job failed. + +.. _authors: + +List of authors +--------------- + +If you make a contribution to ESMValTool and you would like to be listed as an +author (e.g. on `Zenodo `__), please add your +name to the list of authors in ``CITATION.cff`` and generate the entry for the +``.zenodo.json`` file by running the commands + +:: + + pip install cffconvert + cffconvert --infile CITATION.cff --format zenodo --outfile .zenodo.json + +Presently, this method unfortunately discards entries `communities` +and `grants` from that file; please restore them manually. + +Note that authors of recipes and/or diagnostics also need to be added to the file +`esmvaltool/config-references.yml `__, +see :ref:`recording-provenance` for more information. + +.. _dependencies: + +Dependencies +------------ + +Before considering adding a new dependency, carefully check that the +`license `__ +of the dependency you want to add and any of its dependencies are +`compatible `__ +with the +`Apache 2.0 `_ +license that applies to the ESMValTool. +Note that GPL version 2 license is considered incompatible with the Apache 2.0 +license, while the compatibility of GPL version 3 license with the Apache 2.0 +license is questionable. +See this `statement `__ +by the authors of the Apache 2.0 license for more information. + +When adding or removing dependencies, please consider applying the changes in +the following files: + +- ``environment.yml`` + contains dependencies that cannot be installed from + `PyPI `__/`Julia package registry `__ +- ``environment_osx.yml`` + contains development dependencies for MacOSX. Should be the same as ``environment.yml``, + but currently without multi language support. +- ``esmvaltool/install/Julia/Project.toml`` + contains Julia dependencies that can be installed from the default Julia + package registry +- ``setup.py`` + contains all Python dependencies, regardless of their installation source + +Note that packages may have a different name on +`conda-forge `__ than on PyPI or CRAN. + +Several test jobs on CircleCI_ related to the installation of the tool will only +run if you change the dependencies. +These will be skipped for most pull requests. + +When reviewing a pull request where dependencies are added or removed, always +check that the changes have been applied in all relevant files. + +.. _pull_request_checks: + +Pull request checks +------------------- + +To check that a pull request is up to standard, several automatic checks are +run when you make a pull request. +Read more about it in the Tests_ and Documentation_ sections. +Successful checks have a green ✓ in front, a ❌ means the check failed. + +If you need help with the checks, please ask the technical reviewer of your pull +request for help. +Ask `@ESMValGroup/tech-reviewers`_ if you do not have a technical reviewer yet. + +If the checks are broken because of something unrelated to the current +pull request, please check if there is an open issue that reports the problem +and create one if there is no issue yet. +You can attract the attention of the `@ESMValGroup/esmvaltool-coreteam`_ by +mentioning them in the issue if it looks like no-one is working on solving the +problem yet. +The issue needs to be fixed in a separate pull request first. +After that has been merged into the ``main`` branch and all checks are green +again on the ``main`` branch, merge it into your own branch to get the tests +to pass. + +When reviewing a pull request, always make sure that all checks were successful. +If the Codacy check keeps failing, please run prospector locally. +If necessary, ask the pull request author to do the same and to address the +reported issues. +See the section on code_quality_ for more information. +Never merge a pull request with failing CircleCI or readthedocs checks. + +.. _`@ESMValGroup/esmvaltool-coreteam`: https://github.com/orgs/ESMValGroup/teams/esmvaltool-coreteam +.. _`@ESMValGroup/esmvaltool-developmentteam`: https://github.com/orgs/ESMValGroup/teams/esmvaltool-developmentteam +.. _`@ESMValGroup/tech-reviewers`: https://github.com/orgs/ESMValGroup/teams/tech-reviewers +.. _`@ESMValGroup/science-reviewers`: https://github.com/orgs/ESMValGroup/teams/science-reviewers diff --git a/doc/sphinx/source/community/dataset.rst b/doc/sphinx/source/community/dataset.rst new file mode 100644 index 0000000000..7a24e7c923 --- /dev/null +++ b/doc/sphinx/source/community/dataset.rst @@ -0,0 +1,181 @@ +.. _new-dataset: + +Making a new dataset +******************** + +If you are contributing a new dataset, please have a look at +:ref:`new-cmorizer` for how to do so. +Please always create separate pull requests for CMORizer scripts, even when +introducing a new dataset or updating an existing dataset with a new recipe. + +If you are updating a CMORizer script to support a different dataset version, +please have a look at :ref:`dataset-versions` for how to handle multiple +dataset versions. + +.. _dataset-documentation: + +Dataset documentation +===================== + +The documentation required for a CMORizer script is the following: + +- Make sure that the new dataset is added to the list of + :ref:`supported_datasets` and to the file datasets.yml_. +- The code documentation should contain clear instructions on how to obtain + the data. +- A BibTeX file named ``.bibtex`` defining the reference for the new + dataset should be placed in the directory ``esmvaltool/references/``, see + :ref:`adding_references` for detailed instructions. + +.. _datasets.yml: https://github.com/ESMValGroup/ESMValTool/blob/main/esmvaltool/cmorizers/data/datasets.yml + +For more general information on writing documentation, see :ref:`documentation`. + +.. _dataset-test: + +Testing +======= + +When contributing a new script, add an entry for the CMORized data to +`recipes/examples/recipe_check_obs.yml `__ +and run the recipe, to make sure the CMOR checks pass without warnings or errors. + +To test a pull request for a new CMORizer script: + +#. Download the data following the instructions included in the script and + place it in the ``RAWOBS`` ``rootpath`` specified in your + :ref:`configuration ` +#. If available, use the downloading script by running + ``esmvaltool data download --config_file `` +#. Run the cmorization by running ``esmvaltool data format `` +#. Copy the resulting data to the ``OBS`` (for CMIP5 compliant data) or ``OBS6`` + (for CMIP6 compliant data) ``rootpath`` specified in your + :ref:`configuration ` +#. Run ``recipes/examples/recipe_check_obs.yml`` with the new dataset to check that + the data can be used + +.. _dataset-sanity-check: + +Scientific sanity check +======================= + +When contributing a new dataset, we expect that the numbers and units of the +dataset look physically meaningful. +The scientific reviewer needs to check this. + +Data availability +================= + +Once your pull request has been approved by the reviewers, ask a member of +`@OBS-maintainers `_ +to add the new dataset to the data pool at DKRZ and CEDA-Jasmin. +This team is in charge of merging CMORizer pull requests. + +.. _dataset_checklist: + +Detailed checklist for reviews +============================== + +This (non-exhaustive) checklist provides ideas for things to check when +reviewing pull requests for new or updated CMORizer scripts. + +Dataset description +------------------- + +Check that new dataset has been added to the table of observations defined in +the ESMValTool guide user’s guide in section :ref:`inputdata` +(generated from ``doc/sphinx/source/input.rst``). +Check that the new dataset has also been added to the file `datasets.yml +`__. + +BibTeX info file +---------------- + +Check that a BibTeX file, i.e. ``.bibtex`` defining the reference for +the new dataset has been created in ``esmvaltool/references/``. + +recipe_check_obs.yml +-------------------- + +Check that new dataset has been added to the testing recipe +``esmvaltool/recipes/examples/recipe_check_obs.yml`` + +Downloader script +----------------- + +If present, check that the new downloader script +``esmvaltool/cmorizers/data/downloaders/datasets/.py`` +meets standards. +This includes the following items: + +* Code quality checks + + 1. Code quality + 2. No Codacy errors reported + +CMORizer script +--------------- + +Check that the new CMORizer script +``esmvaltool/cmorizers/data/formatters/datasets/.{py,ncl}`` +meets standards. +This includes the following items: + +* In-code documentation (header) contains + + 1. Download instructions + 2. Reference(s) + +* Code quality checks + + 1. Code quality (e.g. no hardcoded pathnames) + 2. No Codacy errors reported + + +Config file +----------- + +If present, check config file ``.yml`` in +``esmvaltool/cmorizers/data/cmor_config/`` for correctness. +Use ``yamllint`` to check for syntax errors and common mistakes. + +Run downloader script +--------------------- + +If available, make sure the downloader script is working by running +``esmvaltool data download --config_file `` + + +Run CMORizer +------------ + +Make sure CMORizer is working by running +``esmvaltool data format --config_file `` + +Check output of CMORizer +------------------------ + +After successfully running the new CMORizer, check that: + +* Output contains (some) valid values (e.g. not only nan or zeros) +* Metadata is defined properly + +Run ``esmvaltool/recipes/examples/recipe_check_obs.yml`` for new dataset. + + +RAW data +-------- + +Contact the team in charge of ESMValTool data pool (`@OBS-maintainers`_) and +request to copy RAW data to RAWOBS/Tier2 (Tier3). + + +CMORized data +------------- + +Contact the team in charge of ESMValTool data pool (`@OBS-maintainers`_) and +request to + +* Merge the pull request +* Copy CMORized dataset to OBS/Tier2 (Tier3) +* Set file access rights for new dataset diff --git a/doc/sphinx/source/community/diagnostic.rst b/doc/sphinx/source/community/diagnostic.rst new file mode 100644 index 0000000000..1be820f7b8 --- /dev/null +++ b/doc/sphinx/source/community/diagnostic.rst @@ -0,0 +1,567 @@ +.. _new-diagnostic: + +Making a new diagnostic or recipe +********************************* + +Getting started +=============== + +Please discuss your idea for a new diagnostic or recipe with the development team before getting started, +to avoid disappointment later. A good way to do this is to open an +`issue on GitHub `_. +This is also a good way to get help. + +.. _diagnostic_from_example: + +Creating a recipe and diagnostic script(s) +========================================== +First create a recipe in esmvaltool/recipes to define the input data your analysis script needs +and optionally preprocessing and other settings. +Also create a script in the +`esmvaltool/diag_scripts `_ +directory and make sure it is referenced from your recipe. +The easiest way to do this is probably to copy the example recipe and diagnostic +script and adjust those to your needs. + +If you have no preferred programming language yet, Python 3 is highly recommended, because it is most well supported. +However, NCL, R, and Julia scripts are also supported. + +Good example recipes for the different languages are: + +- python: `esmvaltool/recipes/examples/recipe_python.yml `_ +- R: `esmvaltool/recipes/examples/recipe_r.yml `_ +- julia: `esmvaltool/recipes/examples/recipe_julia.yml `_ +- ncl: `esmvaltool/recipes/examples/recipe_ncl.yml `_ + +Good example diagnostics are: + +- python: `esmvaltool/diag_scripts/examples/diagnostic.py `_ +- R: `esmvaltool/diag_scripts/examples/diagnostic.R `_ +- julia: `esmvaltool/diag_scripts/examples/diagnostic.jl `_ +- ncl: `esmvaltool/diag_scripts/examples/diagnostic.ncl `_ + +For an explanation of the recipe format, you might want to read about the +:ref:`ESMValTool recipe ` and have a look at the +available :ref:`preprocessor functions `. +For further inspiration, check out the already +:ref:`available recipes and diagnostics `. + +There is a directory +`esmvaltool/diag_scripts/shared `_ +for code that is shared by many diagnostics. +This directory contains code for creating common plot types, generating output +file names, selecting input data, and other commonly needed functions. +See :ref:`api_shared` for the documentation of the shared Python code. + +Re-using existing code +====================== +Always make sure your code is or can be released under a license that is compatible with the Apache 2.0 license. + +If you have existing code in a supported scripting language, you have two options for re-using it. If it is fairly +mature and a large amount of code, the preferred way is to package and publish it on the +official package repository for that language and add it as a dependency of ESMValTool. +If it is just a few simple scripts or packaging is not possible (i.e. for NCL) you can simply copy +and paste the source code into the ``esmvaltool/diag_scripts`` directory. + +If you have existing code in a compiled language like +C, C++, or Fortran that you want to reuse, the recommended way to proceed is to add Python bindings and publish +the package on PyPI so it can be installed as a Python dependency. You can then call the functions it provides +using a Python diagnostic. + +.. _recipe_documentation: + +Recipe and diagnostic documentation +=================================== + +This section describes how to document a recipe. +For more general information on writing documentation, see :ref:`documentation`. + +On readthedocs +-------------- + +Recipes should have a page in the :ref:`recipes` chapter which describes what +the recipe/diagnostic calculates. + +When adding a completely new recipe, please start by copying +`doc/sphinx/source/recipes/recipe_template.rst.template `_ +to a new file ``doc/sphinx/source/recipes/recipe_.rst`` +and do not forget to add your recipe to the +`index `_. + +Fill all sections from the template: + +- Add a brief description of the method +- Add references +- Document recipe options for the diagnostic scripts +- Fill in the list of variables required to run the recipe +- Add example images + +An example image for each type of plot produced by the recipe should be added +to the documentation page to show the kind of output the recipe produces. +The '.png' files can be stored in a subdirectory specific for the recipe under +`doc/sphinx/source/recipes/figures `_ +and linked from the recipe documentation page. +A resolution of 150 `dpi `_ is +recommended for these image files, as this is high enough for the images to look +good on the documentation webpage, but not so high that the files become large. + +In the recipe +------------- +Fill in the ``documentation`` section of the recipe as described in +:ref:`esmvalcore:recipe_documentation` and add a ``description`` to each +diagnostic entry. +Please note that the ``maintainer`` entry is per se not necessary to run a +recipe, but mandatory for recipes within the ESMValTool repository (enforced by +a unit test). +If no maintainer is available, use the single entry ``unmaintained``. +When reviewing a recipe, check that these entries have been filled with +descriptive content. + +In the diagnostic scripts +------------------------- +Functions implementing scientific formula should contain comments with +references to the source paper(s) and formula number(s). + +When reviewing diagnostic code, check that formulas are implemented according +to the referenced paper(s) and/or other resources and that the computed numbers +look as expected from literature. + +.. _diagnostic_output: + +Diagnostic output +================= + +Typically, diagnostic scripts create plots, but any other output such as e.g. +text files or tables is also possible. +Figures should be saved in the ``plot_dir``, either in both ``.pdf`` and +``.png`` format (preferred), or respect the :ref:`configuration option +` ``output_file_type`` . +Data should be saved in the ``work_dir``, preferably as a ``.nc`` +(`NetCDF `__) file, following the +`CF-Conventions `__ as much as possible. + +Have a look at the :ref:`example scripts ` for how to +access the value of ``work_dir``, ``plot_dir``, and ``output_file_type`` from +the diagnostic script code. +More information on the interface between ESMValCore and the diagnostic script +is available :ref:`here ` and +the description of the :ref:`outputdata` may also help to understand this. + +If a diagnostic script creates plots, it should save the data used to create +those plots also to a NetCDF file. +If at all possible, there will be one NetCDF file for each plot the diagnostic +script creates. +There are several reasons why it is useful to have the plotted data available +in a NetCDF file: + +- for interactive visualization of the recipe on a website +- for automated regression tests, e.g. checking that the numbers are still the + same with newer versions of libraries + +If the output data is prohibitively large, diagnostics authors can choose to +implement a ``write_netcdf: false`` diagnostic script option, so writing the +NetCDF files can be disabled from the recipe. + +When doing a scientific review, please check that the figures and data look as +expected from the literature and that appropriate references have been added. + +.. _recording-provenance: + +Recording provenance +==================== + +When ESMValCore (the ``esmvaltool`` command) runs a recipe, +it will first find all data and run the default preprocessor steps plus any +additional preprocessing steps defined in the recipe. Next it will run the diagnostic script defined in the recipe +and finally it will store provenance information. Provenance information is stored in the +`W3C PROV XML format `_ +and provided that the provenance tree is small, also plotted in an SVG file for +human inspection. +In addition to provenance information, a caption is also added to the plots. + +Provenance information from the recipe is automatically recorded by ESMValCore, whereas +diagnostic scripts must include code specifically to record provenance. See below for +documentation of provenance attributes that can be included in a recipe. +When contributing a diagnostic, please make sure it records the provenance, +and that no warnings related to provenance are generated when running the recipe. +To allow the ESMValCore to keep track of provenance (e.g. which input files +were used to create what plots by the diagnostic script), it needs the +:ref:`esmvalcore:interface_diagnostic_esmvalcore`. + +.. note:: + + Provenance is recorded by the ``esmvaltool`` command provided by the + ESMValCore package. + No ``*_provenance.xml`` files will be generated when re-running just + the diagnostic script with the command that is displayed on the screen + during a recipe run, because that will only run the diagnostic script. + +Provenance items provided by the recipe +--------------------------------------- +Provenance tags can be added in several places in the recipe. +The :ref:`esmvalcore:recipe_documentation` section provides information about +the entire recipe. + +For each diagnostic in the recipe, ESMValCore supports the following additional information: + +- :code:`realms` a list of high-level modeling components +- :code:`themes` a list of themes + +Please see the (installed version of the) file +`esmvaltool/config-references.yml `_ +for all available information on each item. + +Provenance items provided by the diagnostic script +-------------------------------------------------- +For each output file produced by the diagnostic script, ESMValCore supports the following additional information: + +- :code:`ancestors` a list of input files used to create the plot. +- :code:`caption` a caption text for the plot + +Note that the level of detail is limited, the only valid choices for ``ancestors`` are files produced by +:ref:`ancestor tasks`. + +It is also possible to add more information for the implemented diagnostics using the following items: + +- :code:`authors` a list of authors +- :code:`references` a list of references, see :ref:`adding_references` below +- :code:`projects` a list of projects +- :code:`domains` a list of spatial coverage of the dataset +- :code:`plot_types` a list of plot types if the diagnostic created a plot, e.g. error bar +- :code:`statistics` a list of types of the statistic, e.g. anomaly +- :code:`long_names` a list of long names of used variables, e.g. Air Temperature + +Arbitrarily named other items are also supported. + +Please see the (installed version of the) file +`esmvaltool/config-references.yml `_ +for all available information on each item, see :ref:`esmvalcore:config-ref` for +an introduction. It is also possible to add custom provenance information by adding items to each category in this file. +In this file, the information is written in the form + +.. code-block:: console + + key: + value: description + +for example + +.. code-block:: console + + plot_types: + errorbar: error bar plot + +To use these items, include them in the provenance record dictionary in the form +:code:`key: [value]` +i.e. for the example above as +:code:`'plot_types': ['errorbar']`. + +In order to communicate with the diagnostic script, two interfaces have been defined, +which are described in the `ESMValCore documentation `_. +Note that for Python and NCL diagnostics much more convenient methods are available than +directly reading and writing the interface files. For other languages these are not implemented (yet). + +Depending on your preferred programming language for developing a diagnostic, +see the instructions and examples below on how to add provenance information: + +Recording provenance in a Python diagnostic script +-------------------------------------------------- +Always use :func:`esmvaltool.diag_scripts.shared.run_diagnostic` at the end of your script: + +.. code-block:: python + + if __name__ == '__main__': + with run_diagnostic() as config: + main(config) + +Create a ``provenance_record`` for each diagnostic file (i.e. image or data +file) that the diagnostic script outputs. The ``provenance_record`` is a +dictionary of provenance items, for example: + +.. code-block:: python + + provenance_record = { + 'ancestors': ancestor_files, + 'authors': [ + 'andela_bouwe', + 'righi_mattia', + ], + 'caption': caption, + 'domains': ['global'], + 'plot_types': ['zonal'], + 'references': [ + 'acknow_project', + ], + 'statistics': ['mean'], + } + +To save a matplotlib figure, use the convenience function +:func:`esmvaltool.diag_scripts.shared.save_figure`. Similarly, to save Iris cubes use +:func:`esmvaltool.diag_scripts.shared.save_data`. Both of these functions take +``provenance_record`` as an argument and log the provenance accordingly. +Have a look at the example Python diagnostic in +`esmvaltool/diag_scripts/examples/diagnostic.py `_ +for a complete example. + +For any other files created, you will need to make use of a +:class:`esmvaltool.diag_scripts.shared.ProvenanceLogger` to log provenance. Include the +following code directly after the file is saved: + +.. code-block:: python + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(diagnostic_file, provenance_record) + +The full path of a ``diagnostic_file`` can be obtained using :class:`esmvaltool.diag_scripts.shared.get_diagnostic_filename`. + +Recording provenance in an NCL diagnostic script +------------------------------------------------ +Always call the ``log_provenance`` procedure after plotting from your NCL diag_script: + +.. code-block:: console + + log_provenance(nc-file,plot_file,caption,statistics,domain,plottype,authors,references,input-files) + +For example: + +.. code-block:: console + + log_provenance(ncdf_outfile, \ + map@outfile, \ + "Mean of variable: " + var0, \ + "mean", \ + "global", \ + "geo", \ + (/"righi_mattia", "gottschaldt_klaus-dirk"/), \ + (/"acknow_author"/), \ + metadata_att_as_array(info0, "filename")) + +Have a look at the example NCL diagnostic in +`esmvaltool/diag_scripts/examples/diagnostic.ncl `_ +for a complete example. + +Recording provenance in a Julia diagnostic script +------------------------------------------------- +The provenance information is written in a ``diagnostic_provenance.yml`` that will be located in ``run_dir``. +For example a ``provenance_record`` can be stored in a yaml file as: + +.. code-block:: julia + + provenance_file = string(run_dir, "/diagnostic_provenance.yml") + + open(provenance_file, "w") do io + JSON.print(io, provenance_records, 4) + end + +The ``provenance_records`` can be defined as a dictionary of provenance items. +For example: + +.. code-block:: julia + + provenance_records = Dict() + + provenance_record = Dict( + "ancestors" => [input_file], + "authors" => ["vonhardenberg_jost", "arnone_enrico"], + "caption" => "Example diagnostic in Julia", + "domains" => ["global"], + "projects" => ["crescendo", "c3s-magic"], + "references" => ["zhang11wcc"], + "statistics" => ["other"], + ) + + provenance_records[output_file] = provenance_record + +Have a look at the example Julia diagnostic in +`esmvaltool/diag_scripts/examples/diagnostic.jl `_ +for a complete example. + +Recording provenance in an R diagnostic script +---------------------------------------------- +The provenance information is written in a ``diagnostic_provenance.yml`` that will be located in ``run_dir``. +For example a ``provenance_record`` can be stored in a yaml file as: + +.. code-block:: R + + provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") + write_yaml(provenance_records, provenance_file) + +The ``provenance_records`` can be defined as a list of provenance items. +For example: + +.. code-block:: R + + provenance_records <- list() + + provenance_record <- list( + ancestors = input_filenames, + authors = list("hunter_alasdair", "perez-zanon_nuria"), + caption = title, + projects = list("c3s-magic"), + statistics = list("other"), + ) + + provenance_records[[output_file]] <- provenance_record + +.. _adding_references: + +Adding references +================= +Recipes and diagnostic scripts can include references. +When a recipe is run, citation information is stored in `BibTeX `__ format. +Follow the steps below to add a reference to a recipe (or a diagnostic): + +- make a ``tag`` that is representative of the reference entry. + For example, ``righi15gmd`` shows the last name of the first author, year and journal abbreviation. +- add the ``tag`` to the ``references`` section in the recipe (or the diagnostic script provenance, see recording-provenance_). +- make a BibTeX file for the reference entry. There are some online tools to convert a doi to BibTeX format like https://doi2bib.org/ +- rename the file to the ``tag``, keep the ``.bibtex`` extension. +- add the file to the folder ``esmvaltool/references``. + +Note: the ``references`` section in ``config-references.yaml`` has been replaced by the folder ``esmvaltool/references``. + +.. _testing_recipes: + +Testing recipes +=============== + +To test a recipe, you can run it yourself on your local infrastructure or you +can ask the `@esmvalbot `_ to run it for you. +To request a run of ``recipe_xyz.yml``, write the following comment below a pull +request: + +:: + + @esmvalbot Please run recipe_xyz.yml + +Note that only members of the `@ESMValGroup/esmvaltool-developmentteam`_ +can request runs. The memory of the `@esmvalbot`_ is limited to 16 GB and it only +has access to data available at DKRZ. + +When reviewing a pull request, at the very least check that a recipes runs +without any modifications. +For a more thorough check, you might want to try out different datasets or +changing some settings if the diagnostic scripts support those. +A simple :ref:`tool ` is available for testing recipes +with various settings. + +.. _diagnostic_checklist: + +Detailed checklist for reviews +============================== + +This (non-exhaustive) checklist provides ideas for things to check when reviewing +pull requests for new or updated recipes and/or diagnostic scripts. + +Technical reviews +----------------- + +Documentation +~~~~~~~~~~~~~ + +Check that the scientific documentation of the new diagnostic has been added to +the user’s guide: + +* A file ``doc/sphinx/source/recipes/recipe_.rst`` exists +* New documentation is included in ``doc/sphinx/source/recipes/index.rst`` +* Documentation follows template `doc/sphinx/source/recipes/recipe_template.rst.template`_ +* Description of configuration options +* Description of variables +* Valid image files +* Resolution of image files (~150 dpi is usually enough; file size should be + kept small) + +Recipe +~~~~~~ + +Check yaml syntax (with ``yamllint``) and that new recipe contains: + +* Documentation: description, authors, maintainer, references, projects +* Provenance tags: themes, realms + +Diagnostic script +~~~~~~~~~~~~~~~~~ + +Check that the new diagnostic script(s) meet(s) standards. +This includes the following items: + +* In-code documentation (comments, docstrings) +* Code quality (e.g. no hardcoded pathnames) +* No Codacy errors reported +* Reuse of existing functions whenever possible +* Provenance implemented + +Run recipe +~~~~~~~~~~ + +Make sure new diagnostic(s) is working by running the ESMValTool with the recipe. + +Check output of diagnostic +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +After successfully running the new recipe, check that: + +* NetCDF output has been written +* Output contains (some) valid values (e.g. not only nan or zeros) +* Provenance information has been written + +Check automated tests +~~~~~~~~~~~~~~~~~~~~~ + +Check for errors reported by automated tests + +* Codacy +* CircleCI +* Documentation build + +Scientific reviews +------------------ + +Documentation added to user’s guide +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Check that the scientific documentation of the new diagnostic +in ``doc/sphinx/source/recipes/recipe_.rst``: + +* Meets scientific documentation standard and +* Contains brief description of method +* Contains references +* Check for typos / broken text +* Documentation is complete and written in an understandable language +* References are complete + +Recipe +~~~~~~ + +Check that new recipe contains valid: + +* Documentation: description, references +* Provenance tags: themes, realms + +Diagnostic script +~~~~~~~~~~~~~~~~~ + +Check that the new diagnostic script(s) meet(s) scientific standards. +This can include the following items: + +* Clear and understandable in-code documentation including brief description of + diagnostic +* References +* Method / equations match reference(s) given + +Run recipe +~~~~~~~~~~ + +Make sure new diagnostic(s) is working by running the ESMValTool. + +Check output of diagnostic +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +After successfully running the new recipe, check that: + +* Output contains (some) valid values (e.g. not only nan or zeros) +* If applicable, check plots and compare with corresponding plots in the + paper(s) cited + + +.. _`@ESMValGroup/esmvaltool-developmentteam`: https://github.com/orgs/ESMValGroup/teams/esmvaltool-developmentteam diff --git a/doc/sphinx/source/community/index.rst b/doc/sphinx/source/community/index.rst new file mode 100644 index 0000000000..470fe7ae02 --- /dev/null +++ b/doc/sphinx/source/community/index.rst @@ -0,0 +1,37 @@ +.. _community: + +Contributing to the community +============================= + +**Contributions are very welcome!** + +This chapter explains how to contribute to ESMValTool. +We greatly value contributions of any kind. +Contributions could include, but are not limited to documentation improvements, +bug reports, new or improved diagnostic code, scientific and technical code +reviews, infrastructure improvements, maintenance of recipes, mailing list and chat participation, +community help/building, education and outreach. + +If you have a bug or other issue to report, please open an issue on the +`issues tab on the ESMValTool github +repository `__. + +In case anything is unclear feel free to contact us for more information and +help, e.g. on our +`GitHub Discussions page `__. + +.. toctree:: + :maxdepth: 1 + + Contributing code and documentation + Contributing a diagnostic or recipe + Policy on backward compatibility + Broken recipe policy + Contributing a dataset + Supporting multiple dataset versions + Contributing a review + Maintaining a recipe + Upgrading a namelist to a recipe + GitHub workflow + Moving work from the private to the public repository + Release strategy and procedures diff --git a/doc/sphinx/source/community/maintainer.rst b/doc/sphinx/source/community/maintainer.rst new file mode 100644 index 0000000000..7e73748be9 --- /dev/null +++ b/doc/sphinx/source/community/maintainer.rst @@ -0,0 +1,28 @@ +.. _recipe-maintainer: + +Maintaining a recipe +==================== + +As development of the ESMValTool continues, new features may be added, old ones replaced or retired or +the interface of library functions may change when updating to new versions. This or for example the +withdrawal of datasets used by a recipe can result in an existing recipe to stop working. Such "broken" +recipes might require some work to fix such problems and make the recipe fully functional again. + +A first **contact point** for the technical lead development team (:team:`technical-lead-development-team`) in such cases is the recipe "maintainer". The recipe +maintainer is then asked to check the affected recipe and if possible, fix the problems or work with the technical +lead development team to find a solution. Ideally, a recipe maintainer is able to tell whether the results of a fixed +recipe are scientifically valid and look as expected. Being a recipe maintainer consists of the following tasks: + +* answering timely to requests from the technical lead development team, e.g. if a recipe is broken +* if needed, checking and trying to fix their recipe(s) / working with the technical lead development team + (e.g. fixing a recipe might include updating the actual recipe, diagnostic code or documentation) +* if needed, checking the output of the fixed recipe for scientific validity (asking science lead development team + for advice if needed) +* If needed, change the documentation to reflect that some differences from the original results might appear (for reproducibility reasons. e.g. some missing datasets in the fixed recipe produce slight differences in the results but do not modify the conclusions) +* informing the core development team when no longer available as maintainer + +Ideally, a recipe maintainer is named when contributing a new recipe to the ESMValTool. Recipe maintainers are asked to inform +the core development team (:team:`esmvaltool-coreteam`) when they are no longer able to act as maintainer or when they would like to step down from this duty +for any reason. The core development team will then try to find a successor. If no recipe maintainer can be found, the +:ref:`policy on unmaintained broken (non-working) recipes` might apply eventually leading to +retirement of the affected recipe. diff --git a/doc/sphinx/source/community/multiple_dataset_versions.rst b/doc/sphinx/source/community/multiple_dataset_versions.rst new file mode 100644 index 0000000000..7cfa059a79 --- /dev/null +++ b/doc/sphinx/source/community/multiple_dataset_versions.rst @@ -0,0 +1,38 @@ +.. _dataset-versions: + +Support for multiple versions of a dataset +****************************************** + +If you plan to update a CMORizer script to support a newer version of an +existing dataset, indicate in the issue or pull request if support for previous +versions should be kept. +If the dataset is used in recipes, please also indicate if the recipes should +be updated with the newest dataset version. + +Policy for dropping support for older dataset versions +====================================================== + +Support for older versions should preferably be kept as long as the data are +publicly available. +This ensures reproducibility and eases comparison of results of recipes using +this dataset. + +Even when previous dataset versions are no longer available or data issues have +been fixed in a newer dataset version, it is preferable to keep support for the +previous version in addition to supporting the newer version. +In such cases, it is recommended to ask the recipe maintainers of recipes using +the older version of the dataset to update to the newer version if possible so +that support for the old version can be dropped in the future. + +Naming conventions +================== + +If the data structure is rather similar between versions, a single CMORizer +script (e.g. woa.py) and config file (e.g. WOA.yml) should be favored to handle +multiple versions and avoid code duplication. +Version-dependent data fixes can be applied based on the ``version`` keys +defined in the config file. + +In some cases, it can be simpler to use different names for different dataset +versions (e.g. GCP2018 and GCP2020). +CMORizer scripts and config files should be named accordingly. diff --git a/doc/sphinx/source/community/private_repository.rst b/doc/sphinx/source/community/private_repository.rst new file mode 100644 index 0000000000..349036c38c --- /dev/null +++ b/doc/sphinx/source/community/private_repository.rst @@ -0,0 +1,65 @@ +.. _private_repository: + +Moving work from the private to the public repository +***************************************************** + +In case you develop a new diagnostic with the ESMValTool, and you plan on publishing the results of the diagnostic in a peer-reviewed paper, you might want to develop the diagnostic in a slightly less open setting than the ESMValTool-public repository. That is what the ESMValTool-private repository is for. It would be great, though, if you would make the diagnostic available for the whole community after your paper was accepted. The steps that you need to take to develop a diagnostic in the private repository and then open a pull request for it in the public repository are described in the following: + +1. Clone the private repository +=============================== +For example, to clone a repository called esmvaltool-private, you would run: + +``git clone git@github.com:esmvalgroup/esmvaltool-private`` + +or + +``git clone https://github.com/esmvalgroup/esmvaltool-private`` + + +2. Make a branch to develop your recipe and diagnostic +====================================================== +``git checkout main`` + +``git pull`` + +``git checkout -b my-awesome-diagnostic`` + + +3. Develop your diagnostic in that branch and push it to the private repository +=============================================================================== +``git push -u origin my-awesome-diagnostic`` + +the first time and + +``git push`` + +any other time + + +4. Write and submit your paper +============================== + +5. Push your branch to the public repository +============================================ +Add the public repository as a remote + +``git remote add public git@github.com:esmvalgroup/esmvaltool`` + +or + +``git remote add public https://github.com/esmvalgroup/esmvaltool`` + +and push your branch to the public repository + +``git push -u public my-awesome-diagnostic`` + + +6. Make a pull request in the public repository +=============================================== +Go to https://github.com/esmalgroup/esmvaltool/pulls and click the 'New pull request button'. +Process reviewer comments and get it merged as described in :ref:`reviewing`. + +7. Obtain a DOI for your code and add it to your paper +====================================================== +Wait for a new release of ESMValTool. Releases are scheduled normally every four months. Find the release schedule here: :ref:`release_schedule`. +With the next release, your diagnostic recipe and source code will automatically be included in the archive on Zenodo and you can add the DOI from Zenodo to your paper: https://zenodo.org/record/3698045 diff --git a/doc/sphinx/source/community/release_strategy/detailed_release_procedure.rst b/doc/sphinx/source/community/release_strategy/detailed_release_procedure.rst new file mode 100644 index 0000000000..d0d7f74672 --- /dev/null +++ b/doc/sphinx/source/community/release_strategy/detailed_release_procedure.rst @@ -0,0 +1,225 @@ +.. _detailed_release_procedure: + +Release: recipes runs and comparison +==================================== + +The release procedure for ESMValTool is a fairly involved process (at the moment), so it +is important to be very well organized and to have documented each procedural steps, so that +the next release manager can follow said steps, and finalize the release without any delays. + +The workflow below assumes an ESMValCore release candidate, or a completed stable release, have been released +and deployed on conda-forge and PyPI; it also assumes the release manager has access to accounts on `DKRZ/Levante +`_. + +Below is a list of steps that the release manager, together with the previous release manager, should go through before the actual release; +these include testing the new code by running all available recipes in the ``main`` branch, and comparing the output against +the previous release. + +Open an issue on GitHub +----------------------- + +First, open an issue on GitHub where the testing workflow before the release is documented (see example https://github.com/ESMValGroup/ESMValTool/issues/2881). +Name it something relevant like "Recipe testing and comparison for release 2.x.x", and populate the issue description with information +about where the testing is taking place, what tools are used, and what versions, here are some suggestions: + + +- path to the output directories on DKRZ/Levante + +We should document various utilities' versions so that the work can be reproduced in case there +is an issue, or release work needs to be picked up mid-release by another release manager: + +- documenting `conda`/`mamba` versions: + +.. code-block:: bash + + mamba --version + +- documenting `git` branch and its state: + +.. code-block:: bash + + git status + +Furthermore, the runtime environment needs to be documented: make a copy of the environment file, +and attach it in the release testing issue; to record the environment in a yaml file use e.g. + +.. code-block:: bash + + conda env export > ToolEnv_v2xx_Test.txt + +Modifications to configuration files need to be documented as well. +To test recipes, it is recommended to only use the default options and DKRZ data directories, simply by uncommenting +the DKRZ-Levante block of a :ref:`newly generated configuration file `. + +Submit run scripts - test recipe runs +------------------------------------- + +We are now ready to start running all the available recipes, to compare output against previous release. Running is currently done +via batch scripts submitted to a schedulers (SLURM). Generate the submission scripts using the ``generate.py`` :ref:`utility Python script `. + +You will have to set the name of your environment, your email address (if you want to get email notifications for successful/failed jobs) and the name of the directory you want to store the log files of the jobs. A compute project from which resources are billed needs to be set, and the default partition is set to `interactive`. +More information on running jobs with SLURM on DKRZ/Levante can be found in the DKRZ `documentation +`_. + +You can also specify the path to your configuration directory where ``max_parallel_tasks`` can be set in a YAML file. The script was found to work well with ``max_parallel_tasks=8``. Some recipes need to be run with ``max_parallel_tasks=1`` (large memory requirements, CMIP3 data, diagnostic issues, ...). These recipes are listed in `ONE_TASK_RECIPES`. + +Some recipes need other job requirements, you can add their headers in the `SPECIAL_RECIPES` dictionary. Otherwise the header will be written following the template that is written in the lines below. If you want to exclude recipes, you can do so by uncommenting the `exclude` lines. + +Before submitting all jobs, it is recommended to try the batch script generation with ``submit = False`` and check the generated files. If recipes with special runtime requirements have been added to ESMValTool since the previous release, these may need to be added to `SPECIAL_RECIPES` and/or to `ONE_TASK_RECIPES`. +Other recipes should run successfully with the default SLURM settings set in this script. + +The launch scripts will be saved in the same directory you execute the script from. These are named like ``launch_recipe_.sh``. +To submit these scripts to the SLURM scheduler, use the ``sbatch launch_recipe_.sh`` command. You can check the status of your BATCH queue by invoking: + +.. code-block:: bash + + squeue -u $USER + +Also, for computationally-heavy recipes, you can require more memory and/or time, see e.g. edited batch header below +(note the `compute` partition which is used for such heavy runs): + +.. code-block:: bash + + #SBATCH --partition=compute + #SBATCH --time=08:00:00 + #SBATCH --mem=0 + #SBATCH --constraint=512G + +.. note:: + + On DKRZ/Levante, a user can't have more than 20 SLURM jobs running at a time. + As soon as a job is finished, the next one should start. More information on the job handling at DKRZ `here + `_. + Also note that the ``--mem=0`` argument needs be specified if any of the ``--constraint`` arguments are + used for memory requests, so that the node's full memory is allocated. + +Analyse the results +------------------- + +Once all jobs are completed, assemble some statistics so that issues with certain recipes +can be followed-up, and document this information in the release issue, such as: + +- number of successfully run recipes +- number of failed recipes with preprocessor errors (can they be fixed? Can the fixes be included in the release?) +- number of failed recipes with diagnostic errors (can they be fixed? Can the fixes be included in the release?) +- number of recipes that are missing data +- number of recipes that have various other issues (and document them) + +To parse the output of all these runs, use the ``parse_recipes_output.py`` :ref:`utility Python script `. +It is recommended to run the recipes with `log_level: info` in your config file to enable the parsing script to run fast. + +Share the results with the community +------------------------------------ + +Create the debug.html and index.html overview webpages by running the :ref:`utility script ` +in the directory containing the recipe runs. +These two files, together with the recipe output, need to be copied to the disk of a virtual machine (VM) +used to display recipe output in `webpages +`_. +Do not store final release results on the VM including `/preproc/` dirs, the total +size for all the recipes output, including `/preproc/` dirs is in the 4.5TB ballpark, +much too high for the VM storage capacity! Therefore, we would recommend using the option +to remove preprocessing directories upon recipe running successfully ``--remove-preproc-dir=True`` +at runtime, or set ``remove_preproc_dir: true`` in the configuration file. + +Login and access to the DKRZ esmvaltool VM - results from recipe runs +are stored on the VM; log in to the Levante head node and then continue to the VM with: + +.. code-block:: bash + + ssh user@esmvaltool.dkrz.de + +where `user` is your DKRZ/Levante user name. +Then create a new subdirectory in ``/shared/esmvaltool/`` that will contain recipe output. +This should be named like the ESMValCore version used for the testing, e.g. ``v2.x.xrcx`` or ``v2.x.x``. +Recipe output can be copied by doing from the VM: + +.. code-block:: bash + + nohup rsync --exclude preproc/ -rlt /path_to_testing/esmvaltool_output/* /shared/esmvaltool/v2.x.x/ + +By copying the debug.html and index.html files into /shared/esmvaltool/v2.x.x/, the output +becomes available online, see for `example +`_. +Before copying the recipe output to the VM, you may want to clean up your directory containing +the results by removing any large ``preproc`` directories of failed runs and only keeping the last run for each recipe. +This will help generating a clearer overview webpage. +Note that the ``summarize.py`` script needs to be rerun if recipe runs were added or deleted +from your testing directory. + +Link the overview webpage to the release issue. +This makes it much easier to ask for feedback from recipe developers and analyse failures. + +Results produced with the final ESMValCore release candidate should be put in a VM directory +named after the version number, e.g. ``v2.x.x``. +Once the release process is over, test results produced with previous release candidates can be deleted to save space on the VM. + +.. note:: + + If you wrote recipe runs output to Levante's `/scratch` partition, be aware that + the data will be removed after two weeks, so you will have to quickly move the + output data to the VM, using the ``nohup`` command above. + +Running the comparison +---------------------- + +To compare the newly produced output from running all recipes, follow these steps below. + +Access to the DKRZ esmvaltool VM, then install miniconda on the VM, and +if you have a Miniconda installer already downloaded in your Levante $HOME + +.. code-block:: bash + + scp Miniconda3-py39_4.12.0-Linux-x86_64.sh user@esmvaltool.dkrz.de:/mnt/esmvaltool_disk2/work/ + +.. warning:: + + conda environments should not be created in the home directory because it is on a very small disk, + but rather in a directory with your username under `/mnt/esmvaltool_disk2/work/` + +Next, we need to set up the input files + +The ``/work`` partition is visible from the VM so you can run the compare tool straight on the VM. + +The steps to running the compare tool on the VM are the following: + +- run date: log the run date here +- conda env: log the name of the conda environment you are using +- ESMValTool branch: log the name of the code branch you are using (e.g. `v2.8.x`) +- prerequisite - install `imagehash`: `pip install imagehash` +- reference run (v2.7.0; previous stable release): `export reference_dir=/work/bd0854/b382109/v270` (contains `preproc/` dirs too, 122 recipes) +- current run (v2.8.0): `export current_dir=path_to_current_run` +- run the :ref:`comparison script` with: + +.. code-block:: bash + + nohup python ESMValTool/esmvaltool/utils/testing/regression/compare.py --reference $reference_dir --current $current_dir > compare_v280_output.txt + +Copy the comparison txt file to the release issue. +Some of the recipes will appear as having identical output to the one from previous release. +However, others will need human inspection. +Ask the recipe maintainers (`@ESMValGroup/esmvaltool-recipe-maintainers`_) and ESMValTool Development Team (`@ESMValGroup/esmvaltool-developmentteam`_) to provide assistance in checking the results. +Here are some guidelines on how to perform the human inspection: + +- look at plots from current run vs previous release run: most of them will be identical, but if Matplotlib + has changed some plotting feature, images may look slightly different so the comparison script may report them + if the difference is larger than the threshold - but Mark I eyeball inspection will show they are identical +- other plots will differ due to changes in plot settings (different colours, axes etc) due to updated settings from the + diagnostic developers: if they look similar enough, then it's fine +- report (and subsequently open issues) if you notice major differences in plots; most times a simple comment on the + release issue, whereby you tag the diagnostic developers leads to them having a look at the plots and OK-ing them; if that's + not the case, then open a separate issue. You can example of release issues containing overview lists and tables + of failures and problems in `2881 `_ + and `3076 `_. + +Appendix +-------- + +Here you can find a list of utility scripts used to run recipes and analyse the results: + +- :ref:`Python scripts` that create slurm submission scripts and parse slurm log files. +- :ref:`Python script` that compares one or more recipe runs to known good previous run(s). +- :ref:`Python script` that creates the ``index.html`` and ``debug.html`` overview pages. + +.. _`@ESMValGroup/esmvaltool-recipe-maintainers`: https://github.com/orgs/ESMValGroup/teams/esmvaltool-recipe-maintainers +.. _`@ESMValGroup/esmvaltool-developmentteam`: https://github.com/orgs/ESMValGroup/teams/esmvaltool-developmentteam diff --git a/doc/sphinx/source/community/release_strategy/index.rst b/doc/sphinx/source/community/release_strategy/index.rst new file mode 100644 index 0000000000..61445a031d --- /dev/null +++ b/doc/sphinx/source/community/release_strategy/index.rst @@ -0,0 +1,17 @@ +.. _release_strategy: + +Release strategy and procedure +============================== + +These pages detail the release strategy and technical procedures +during a release; these pages are very helpful if you are the next +Release Manager. + +For the release schdule, check the :ref:`release_schedule` to see upcoming +and past releases. + +.. toctree:: + :maxdepth: 1 + + Release strategy and procedure + Detailed testing before release diff --git a/doc/sphinx/source/community/release_strategy/release_strategy.rst b/doc/sphinx/source/community/release_strategy/release_strategy.rst new file mode 100644 index 0000000000..72c55266dd --- /dev/null +++ b/doc/sphinx/source/community/release_strategy/release_strategy.rst @@ -0,0 +1,676 @@ +.. _preparation-new-release: + +Release schedule and procedure for ESMValCore and ESMValTool +============================================================ + +This document describes the process for the release of ESMValCore +and ESMValTool. +By following a defined process, we streamline the work, reduce +uncertainty about required actions, and clarify the state of the code for the +user. + +ESMValTool follows a strategy of timed releases. +That means that we do releases with a regular frequency and all features +that are implemented up to a certain cut-off-point can go +into the upcoming release; those that are not are deferred to the next +release. +This means that generally no release will be delayed due to a pending feature. +Instead, the regular nature of the release guarantees that every feature can be +released in a timely manner even if a specific target release is missed. + +Because of limited resources, only the latest released versions of ESMValTool and ESMValCore is maintained. +If your project requires longer maintenance or you have other concerns about +the release strategy, please contact the ESMValTool core development team, see +:ref:`Support-and-Contact`. + + +Overall Procedure +----------------- + +Timeline +~~~~~~~~~ + +.. figure:: /figures/release-timeline.png + :align: center + + Example of a Release Timeline (in this case for 2.1.0) + +1. Contributors assign issues (and pull requests) that they intend to finish before the due date, there is a separate milestone for ESMValCore and ESMValTool +2. The ESMValCore feature freeze takes place on the ESMValCore due date +3. Some additional testing of ESMValCore takes place +4. ESMValCore release +5. The ESMValTool feature freeze takes place +6. Some additional testing of ESMValTool takes place +7. ESMValTool release +8. Soon after the release, the core development team meets to coordinate the content of the milestone for the next release + +.. _release_schedule: + +Release schedule +~~~~~~~~~~~~~~~~ + +With the following release schedule, we strive to have three releases per year and to avoid releases too close to holidays, as well as avoiding weekends. + +Upcoming releases +^^^^^^^^^^^^^^^^^ +- 2.12.0 (Release Manager: `Saskia Loosveldt Tomas`_) + ++------------+------------+----------------------------------------+-------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+========================================+=====================================+ +| 2025-01-13 | | ESMValCore `Feature Freeze`_ | | ++------------+------------+----------------------------------------+-------------------------------------+ +| 2025-01-20 | | ESMValCore Release 2.12.0 | | ++------------+------------+----------------------------------------+-------------------------------------+ +| 2025-01-27 | | ESMValTool `Feature Freeze`_ | | ++------------+------------+----------------------------------------+-------------------------------------+ +| 2025-02-03 | | ESMValTool Release 2.12.0 | | ++------------+------------+----------------------------------------+-------------------------------------+ + + +Past releases +^^^^^^^^^^^^^ + +- 2.11.0 (Release Manager: Met Office: `Emma Hogan`_, `Chris Billows`_, `Ed Gillett`_) + ++------------+------------+----------------------------------------+-------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+========================================+=====================================+ +| 2024-04-22 | | ESMValCore `Feature Freeze`_ | | ++------------+------------+----------------------------------------+-------------------------------------+ +| 2023-05-03 | 2024-07-03 | :esmvalcore-release:`v2.11.0` released | :ref:`esmvalcore:changelog-v2-11-0` | ++------------+------------+----------------------------------------+-------------------------------------+ +| 2023-05-06 | | ESMValTool `Feature Freeze`_ | | ++------------+------------+----------------------------------------+-------------------------------------+ +| 2023-05-17 | 2024-07-04 | :release:`v2.11.0` released | :ref:`changelog-v2-11-0` | ++------------+------------+----------------------------------------+-------------------------------------+ + +- 2.10.0 (Release Manager: `Klaus Zimmermann`_) + ++------------+------------+----------------------------------------+-------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+========================================+=====================================+ +| 2023-10-02 | | ESMValCore `Feature Freeze`_ | | ++------------+------------+----------------------------------------+-------------------------------------+ +| 2023-10-09 | 2023-12-19 | :esmvalcore-release:`v2.10.0` released | :ref:`esmvalcore:changelog-v2-10-0` | ++------------+------------+----------------------------------------+-------------------------------------+ +| 2023-10-16 | | ESMValTool `Feature Freeze`_ | | ++------------+------------+----------------------------------------+-------------------------------------+ +| 2023-10-16 | 2023-12-20 | :release:`v2.10.0` released | :ref:`changelog-v2-10-0` | ++------------+------------+----------------------------------------+-------------------------------------+ + +- 2.9.0 (Release Manager: `Bouwe Andela`_) + ++------------+------------+---------------------------------------+-------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+=======================================+=====================================+ +| 2023-06-05 | | ESMValCore `Feature Freeze`_ | | ++------------+------------+---------------------------------------+-------------------------------------+ +| 2023-06-12 | 2023-07-04 | :esmvalcore-release:`v2.9.0` released | :ref:`esmvalcore:changelog-v2-9-0` | ++------------+------------+---------------------------------------+-------------------------------------+ +| 2023-06-19 | | ESMValTool `Feature Freeze`_ | | ++------------+------------+---------------------------------------+-------------------------------------+ +| 2023-06-26 | 2023-07-06 | :release:`v2.9.0` released | :ref:`changelog-v2-9-0` | ++------------+------------+---------------------------------------+-------------------------------------+ + +- 2.8.1 (Bugfix, Release Manager: `Valeriu Predoi`_) + ++------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| Done | Event | Changelog | ++============+=============================================================================================+====================================+ +| 2023-06-02 | `ESMValCore Release 2.8.1 `_ | :ref:`esmvalcore:changelog-v2-8-1` | ++------------+---------------------------------------------------------------------------------------------+------------------------------------+ + +- 2.8.0 (Release Manager: `Rémi Kazeroni`_) + ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+=============================================================================================+====================================+ +| 2023-03-03 | | ESMValCore Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2023-03-20 | 2023-03-23 | `ESMValCore Release 2.8.0 `_ | :ref:`esmvalcore:changelog-v2-8-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2023-03-17 | | ESMValTool Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2023-03-27 | 2023-03-28 | `ESMValTool Release 2.8.0 `_ | :ref:`changelog-v2-8-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ + +- 2.7.1 (Bugfix, Release Manager: `Valeriu Predoi`_) + ++------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| Done | Event | Changelog | ++============+=============================================================================================+====================================+ +| 2022-12-12 | `ESMValCore Release 2.7.1 `_ | :ref:`esmvalcore:changelog-v2-7-1` | ++------------+---------------------------------------------------------------------------------------------+------------------------------------+ + +- 2.7.0 (Release Manager: `Valeriu Predoi`_) + ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+=============================================================================================+====================================+ +| 2022-10-03 | | ESMValCore Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2022-10-10 | 2022-10-13 | `ESMValCore Release 2.7.0 `_ | :ref:`esmvalcore:changelog-v2-7-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2022-10-17 | | ESMValTool Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2022-10-24 | 2022-10-28 | `ESMValTool Release 2.7.0 `_ | :ref:`changelog-v2-7-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ + +- 2.6.0 (Release Manager: `Saskia Loosveldt Tomas`_) + ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+=============================================================================================+====================================+ +| 2022-06-06 | | ESMValCore Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2022-06-13 | 2022-07-15 | `ESMValCore Release 2.6.0 `_ | :ref:`esmvalcore:changelog-v2-6-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2022-06-20 | | ESMValTool Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2022-06-27 | 2022-07-25 | `ESMValTool Release 2.6.0 `_ | :ref:`changelog-v2-6-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ + +- 2.5.0 (Coordinating Release Manager: `Axel Lauer`_, team members: `Manuel Schlund`_, `Rémi Kazeroni`_) + ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+=============================================================================================+====================================+ +| 2022-02-07 | | ESMValCore Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2022-02-14 | 2022-03-14 | `ESMValCore Release 2.5.0 `_ | :ref:`esmvalcore:changelog-v2-5-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2022-02-21 | | ESMValTool Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2022-02-28 | 2022-03-15 | `ESMValTool Release 2.5.0 `_ | :ref:`changelog-v2-5-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ + +- 2.4.0 (Release Manager: `Klaus Zimmermann`_) + ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+=============================================================================================+====================================+ +| 2021-10-04 | | ESMValCore Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2021-10-11 | 2021-11-08 | `ESMValCore Release 2.4.0 `_ | :ref:`esmvalcore:changelog-v2-4-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2021-10-18 | | ESMValTool Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2021-10-25 | 2021-11-09 | `ESMValTool Release 2.4.0 `_ | :ref:`changelog-v2-4-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ + +- 2.3.1 (Bugfix, Release Manager: `Klaus Zimmermann`_) + ++------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| Done | Event | Changelog | ++============+=============================================================================================+====================================+ +| 2021-07-23 | `ESMValCore Release 2.3.1 `_ | :ref:`esmvalcore:changelog-v2-3-1` | ++------------+---------------------------------------------------------------------------------------------+------------------------------------+ + +- 2.3.0 (Release Manager: `Klaus Zimmermann`_) + ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+=============================================================================================+====================================+ +| 2021-06-07 | | ESMValCore Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2021-06-14 | 2021-06-14 | `ESMValCore Release 2.3.0 `_ | :ref:`esmvalcore:changelog-v2-3-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2021-06-21 | | ESMValTool Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2021-06-28 | 2021-07-27 | `ESMValTool Release 2.3.0 `_ | :ref:`changelog-v2-3-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ + +- 2.2.0 (Release Manager: `Javier Vegas-Regidor`_) + ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+=============================================================================================+====================================+ +| 2021-02-01 | | ESMValCore Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2021-02-07 | 2021-02-09 | `ESMValCore Release 2.2.0 `_ | :ref:`esmvalcore:changelog-v2-2-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2021-02-14 | | ESMValTool Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2021-02-21 | 2021-02-25 | `ESMValTool Release 2.2.0 `_ | :ref:`changelog-v2-2-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ + +- 2.1.1 (Bugfix, Release Manager: `Valeriu Predoi`_) + ++------------+---------------------------------------------------------------------------------------------+-------------------------+ +| Done | Event | Changelog | ++============+=============================================================================================+=========================+ +| 2020-12-01 | `ESMValTool Release 2.1.1 `_ | :ref:`changelog-v2-1-1` | ++------------+---------------------------------------------------------------------------------------------+-------------------------+ + +- 2.1.0 (Release Manager: `Valeriu Predoi`_) + ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+=============================================================================================+====================================+ +| 2020-10-05 | | ESMValCore Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2020-10-12 | 2020-10-12 | `ESMValCore Release 2.1.0 `_ | :ref:`esmvalcore:changelog-v2-1-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2020-10-19 | | ESMValTool Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2020-10-26 | 2020-10-26 | `ESMValTool Release 2.1.0 `_ | :ref:`changelog-v2-1-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ + +- 2.0.0 (Release Manager: `Bouwe Andela`_) + ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+=============================================================================================+====================================+ +| 2020-07-01 | | ESMValCore Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2020-07-20 | 2020-07-20 | `ESMValCore Release 2.0.0 `_ | :ref:`esmvalcore:changelog-v2-0-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2020-07-22 | | ESMValTool Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2020-08-03 | 2020-08-03 | `ESMValTool Release 2.0.0 `_ | :ref:`changelog-v2-0-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ + + + +.. _release_steps: + +Detailed timeline steps +~~~~~~~~~~~~~~~~~~~~~~~ + +These are the detailed steps to take to make a release. + +#. Populate the milestone + + - The core development team will make sure it adds issues that it intends to work on as early as possible. + - Any contributor is welcome to add issues or pull requests that they intend to work on themselves to a milestone. + + +#. ESMValCore feature freeze, testing, and release candidates + + - A release branch is created and branch protection rules are set up so only the release manager (i.e. the person in charge of the release branch) can push commits to that branch. + - Make a release candidate with the release branch following the :ref:`ESMValCore release instructions `. + - Uncomment the release candidate channel item (i.e. ``conda-forge/label/esmvalcore_rc``) in the ``environment.yml`` of ESMValTool to add it to the list of channels used. Adjust the pin on ESMValCore after each release candidate (e.g. ``esmvalcore==2.8.0rc1``). Check that the environment creation of ESMValTool works fine and contains the latest release candidate version. + - Run all the recipes (optionally with a reduced amount of data) to check that they still work with the release candidate. + - If a bug is discovered that needs to be fixed before the release, a pull request can be made to the main branch to fix the bug. The person making the pull request can then ask the release manager to cherry-pick that commit into the release branch. + - Make another release candidate including the bugfix(es) and run the affected recipes again to check for further bugs. + - Make as many release candidates for ESMValCore as needed in order to fix all the detected bugs. + + +#. ESMValTool feature freeze + + - A release branch is created and branch protection rules are set up so only the release manager (i.e. the person in charge of the release branch) can push commits to that branch. + - The creation of the release branch is announced to the ESMValTool development team along with the procedures to use the branch for testing and making last-minute changes (see next step). + + +#. Some additional testing of ESMValTool + + - :ref:`Run all the recipes to check that they still work and generate the overview HTML pages `. + - Upload the results to the webpage at https://esmvaltool.dkrz.de/shared/esmvaltool/. + - :ref:`Compare the results to those obtained with the previous release `. + - Create a `GitHub discussion `__ to communicate about the results. + - If there are differences with the previous release, ask recipe maintainers + or authors to review the plots and NetCDF files of their diagnostics, for + example by + `mentioning `__ + them in the discussion. + - If a bug is discovered that needs to be fixed before the release, a pull request can be made to the main branch to fix the bug. The person making the pull request can then ask the release manager to cherry-pick that commit into the release branch. + - Update the :ref:`list of broken recipes ` with new recipes that could not be run successfully during the testing. + Open a separate GitHub issue for each failing recipe and assign the next milestone. + Open an overview issue, see :issue:`3484` for an example, and review past overview issues. + Take action to ensure that the broken recipe policy is followed. + + +#. ESMValCore release + + - Make the official ESMValCore release with the last release candidate by following the :ref:`ESMValCore release instructions `. + + +#. ESMValTool release + + - Pin ESMValCore to the same version as ESMValTool in the ``environment.yml`` and on `conda-forge + `__. + This way, we make sure that ESMValTool uses the ESMValCore version with which it has been tested. + Make sure to comment again the release candidate channel once ESMValCore has been released. + - Make the release by following :ref:`How to make a release`. + + +#. Announce the releases + + - Ask the user engagement team to announce the releases to the user mailing list, the development team mailing list, and on twitter. + + +#. Core development team meets to coordinate the content of next milestone + + - Create a doodle for the meeting or even better, have the meeting during an ESMValTool workshop + - Prepare the meeting by filling the milestone + - At the meeting, discuss + + - If the proposed issues cover everything we would like to accomplish + - Are there things we need to change about the release process + - Who will be the release manager(s) for the next release + +Bugfix releases +--------------- + +Next to the feature releases described above, it is also possible to have bugfix releases (2.0.1, 2.0.2, etc). In general bugfix releases will only be done on the latest release, and may include ESMValCore, ESMValTool, or both. + + +Procedure +~~~~~~~~~ + +#. One or more issues are resolved that are deemed (by the core development team) to warrant a bugfix release. +#. A release branch is created from the last release tag and the commit that fixes the bug/commits that fix the bugs are cherry-picked into it from the main branch. +#. Some additional testing of the release branch takes place. +#. The release takes place. + +Compatibility between ESMValTool and ESMValCore is ensured by the appropriate version pinning of ESMValCore by ESMValTool. + +Glossary +-------- + +Feature freeze +~~~~~~~~~~~~~~ +The date on which no new features may be submitted for the upcoming release. +After this date, only critical bug fixes can still be included to the :ref:`release_branch`. +Development work can continue in the main branch. +If you are unsure whether new developments could interfere with the release, check with the :ref:`release_manager`. + + +Milestone +~~~~~~~~~ +A milestone is a list of issues and pull-request on GitHub. It has a due date, this date is the date of the feature freeze. Adding an issue or pull request indicates the intent to finish the work on this issue before the due date of the milestone. If the due date is missed, the issue can be included in the next milestone. + +.. _release_manager: + +Release manager +~~~~~~~~~~~~~~~ +The person in charge of making the release, both technically and organizationally. Appointed for a single release. +Check the :ref:`release_schedule` to see who is the manager of the next release. + +.. _release_branch: + +Release branch +~~~~~~~~~~~~~~ +The release branch can be used to do some additional testing before the release, while normal development work continues in the main branch. It will be branched off from the main branch after the feature freeze and will be used to make the release on the release date. The only way to still get something included in the release after the feature freeze is to ask the release manager to cherry-pick a commit from the main branch into this branch. + + +.. _How to make a release: + +How to make an ESMValTool release +--------------------------------- + +Before the actual release, a number of tests, and pre-release steps must be performed, +a detailed workflow description can be found here :ref:`detailed_release_procedure`. + +The release manager makes the release, assisted by the release manager of the +previous release, or if that person is not available, another previous release +manager. +Perform the steps listed below with two persons, to reduce the risk of +error. + +.. note:: + + The previous release manager ensures the current release manager has the + required administrative permissions to make the release. + Consider the following services: + `conda-forge `__, + `DockerHub `__, + `PyPI `__, and + `readthedocs `__. + +The release of ESMValTool should come after the release of ESMValCore. +To make a new release of the package, follow these steps: + +1. Check that all tests and builds work +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Check that the ``nightly`` + `test run on CircleCI `__ + was successful. +- Check that the + `GitHub Actions test runs `__ + were successful. +- Check that the documentation builds successfully on + `readthedocs `__. +- Check that the + `Docker images `__ + are building successfully. + +All tests should pass before making a release (branch). + +2. Increase the version number +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The version number is automatically generated from the information provided by +git using `setuptools-scm `__, but a +static version number is stored in ``CITATION.cff``. +Make sure to update the version number and release date in ``CITATION.cff``. +See https://semver.org for more information on choosing a version number. +Make sure that the ESMValCore version that is being used is set to the latest version. +See the :ref:`dependencies ` section in order to find more details on how update the ESMValCore version. +Make a pull request and get it merged into ``main``. + +.. _add-release-notes: + +3. Add release notes +~~~~~~~~~~~~~~~~~~~~ +Use the script :ref:`draft_release_notes.py` to create a draft of the +release notes. +This script uses the titles and labels of merged pull requests since the +previous release. +Open a discussion to allow members of the development team to nominate pull requests +as highlights. Add the most voted pull requests as highlights at the beginning of +changelog. +After the highlights section, list any backward incompatible changes that the +release may include. +The :ref:`backward compatibility policy ` +lists the information that should be provided by the developer of any backward +incompatible change. +Make sure to also list any deprecations that the release may include, as well +as a brief description on how to upgrade a deprecated feature. +Review the results, and if anything needs changing, change it on GitHub and +re-run the script until the changelog looks acceptable. +Copy the result to the file ``doc/sphinx/source/changelog.rst``. +If possible, try to set the script dates to the date of the release +you are managing. +Make a pull request and get it merged into ``main``. + +4. Create a release branch +~~~~~~~~~~~~~~~~~~~~~~~~~~ +Create a branch off the ``main`` branch and push it to GitHub. +Ask someone with administrative permissions to set up branch protection rules +for it so only you and the person helping you with the release can push to it. +Announce the name of the branch in an issue and ask the members of the +`ESMValTool development team `__ +to run their favourite recipe using this branch. + +5. Make the release on GitHub +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Do a final check that all tests on CircleCI and GitHub Actions completed +successfully. +Then click the +`releases tab `__ +and create the new release from the release branch (i.e. not from ``main``). +The release tag always starts with the letter ``v`` followed by the version +number, e.g. ``v2.1.0``. + +6. Merge the release branch back into the main branch +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +When the (pre-)release is tagged, it is time to merge the release branch back into `main`. +We do this for two reasons, namely, one, to mark the point up to which commits in `main` +have been considered for inclusion into the present release, and, two, to inform +setuptools-scm about the version number so that it creates the correct version number in +`main`. +However, unlike in a normal merge, we do not want to integrate any of the changes from the +release branch into main. +This is because all changes that should be in both branches, i.e. bug fixes, originate from +`main` anyway and the only other changes in the release branch relate to the release itself. +To take this into account, we perform the merge in this case on the command line using `the +ours merge strategy `__ +(``git merge -s ours``), not to be confused with the ``ours`` option to the ort merge strategy +(``git merge -X ours``). +For details about merge strategies, see the above-linked page. +To execute the merge use following sequence of steps + +.. code-block:: bash + + git fetch + git checkout main + git pull + git merge -s ours v2.1.x + git push + +Note that the release branch remains intact and you should continue any work on the release +on that branch. + +7. Create and upload the PyPI package +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The package is automatically uploaded to the +`PyPI `__ +by a GitHub action. +If has failed for some reason, build and upload the package manually by +following the instructions below. + +Follow these steps to create a new Python package: + +- Check out the tag corresponding to the release, + e.g. ``git checkout tags/v2.1.0`` +- Make sure your current working directory is clean by checking the output + of ``git status`` and by running ``git clean -xdf`` to remove any files + ignored by git. +- Install the required packages: + ``python3 -m pip install --upgrade pep517 twine`` +- Build the package: + ``python3 -m pep517.build --source --binary --out-dir dist/ .`` + This command should generate two files in the ``dist`` directory, e.g. + ``ESMValTool-2.1.0-py3-none-any.whl`` and ``ESMValTool-2.1.0.tar.gz``. +- Upload the package: + ``python3 -m twine upload dist/*`` + You will be prompted for an API token if you have not set this up + before, see + `here `__ for more information. + +You can read more about this in +`Packaging Python Projects `__. + +8. Create the Conda package +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The ``esmvaltool`` package is published on the `conda-forge conda channel +`__. +This is done via a pull request on the `esmvaltool-suite-feedstock repository +`__. + +After the upload of the PyPI package, this pull request is automatically opened +by a bot. +An example pull request can be found `here +`__. +Follow the instructions by the bot to finalize the pull request. +This step mostly contains updating dependencies that have been changed during +the last release cycle. +Once approved by the `feedstock maintainers +`__ +they will merge the pull request, which will in turn publish the package on +conda-forge some time later. +Contact the feedstock maintainers if you want to become a maintainer yourself. + +9. Check the Docker images +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +There are three main Docker container images available for ESMValTool on +`Dockerhub `_: + +- ``esmvalgroup/esmvaltool:stable``, built from `docker/Dockerfile `_, + this is a tag that is always the same as the latest released version. + This image is only built by Dockerhub when a new release is created. +- ``esmvalgroup/esmvaltool:development``, built from `docker/Dockerfile.dev `_, + this is a tag that always points to the latest development version of + ESMValTool. + This image is built by Dockerhub every time there is a new commit to the + ``main`` branch on Github. +- ``esmvalgroup/esmvaltool:experimental``, built from `docker/Dockerfile.exp `_, + this is a tag that always points to the latest development version of + ESMValTool with the latest development version of ESMValCore. + Note that some recipes may not work as expected with this image because + the ESMValTool development version has been designed to work with the latest + release of ESMValCore (i.e. not with the development version). + This image is built by Dockerhub every time there is a new commit to the + ESMValTool ``main`` branch on Github. + +In addition to the three images mentioned above, there is an image available +for every release (e.g. ``esmvalgroup/esmvaltool:v2.5.0``). +When working on the Docker images, always try to follow the +`best practices `__. + +After making the release, check that the Docker image for that release has been +built correctly by + +1. checking that the version tag is available on `Dockerhub`_ and the ``stable`` + tag has been updated, +2. running some recipes with the ``stable`` tag Docker container, for example one + recipe for Python, NCL, R, and Julia, +3. running a recipe with a Singularity container built from the ``stable`` tag. + +If there is a problem with the automatically built container image, you can fix +the problem and build a new image locally. +For example, to +`build `__ and +`upload `__ +the container image for v2.5.0 of the tool run: + +.. code-block:: bash + + git checkout v2.5.0 + git clean -x + docker build -t esmvalgroup/esmvaltool:v2.5.0 . -f docker/Dockerfile + docker push esmvalgroup/esmvaltool:v2.5.0 + +and if it is the latest release that you are updating, also run + +.. code-block:: bash + + docker tag esmvalgroup/esmvaltool:v2.5.0 esmvalgroup/esmvaltool:stable + docker push esmvalgroup/esmvaltool:stable + +Note that the ``docker push`` command will overwrite the existing tags on +Dockerhub. + +If you would like to make a small change to an existing Docker container image, +it is also possible to do just that using the +`docker commit `__ +command. +Note that this is only recommended for very small changes, as it is not +reproducible and it will add an extra layer, increasing the size of the image. +To do this, start the container with +``docker run -it --entrypoint /bin/bash esmvalgroup/esmvaltool:v2.5.0`` +and make your changes. +Exit the container by pressing `ctrl+d` and find it back by running +``docker ps -a``. +Find the `CONTAINER ID` of the image you would like to save and run +``docker commit -c 'ENTRYPOINT ["conda", "run", "--name", "esmvaltool", "esmvaltool"]' 633696a8b53a esmvalgroup/esmvaltool:v2.5.0`` +where ``633696a8b53c`` is the an example of a container ID, replace it by +by the actual ID. + +Changelog +--------- +- 2020-09-09 Converted to rst and added to repository (future changes tracked by git) +- 2020-09-03 Update during video conference (present: Bouwe Andela, Niels Drost, Javier Vegas, Valeriu Predoi, Klaus Zimmermann) +- 2020-07-27 Update including tidying up and Glossary by Klaus Zimmermann and Bouwe Andela +- 2020-07-23 Update to timeline format by Bouwe Andela and Klaus Zimmermann +- 2020-06-08 First draft by Klaus Zimmermann and Bouwe Andela + +.. _Bouwe Andela: https://github.com/bouweandela +.. _Rémi Kazeroni: https://github.com/remi-kazeroni +.. _Axel Lauer: https://github.com/axel-lauer +.. _Saskia Loosveldt Tomas: https://github.com/sloosvel +.. _Valeriu Predoi: https://github.com/valeriupredoi +.. _Manuel Schlund: https://github.com/schlunma +.. _Javier Vegas-Regidor: https://github.com/jvegasbsc +.. _Klaus Zimmermann: https://github.com/zklaus +.. _Emma Hogan: https://github.com/ehogan +.. _Chris Billows: https://github.com/chrisbillowsMO +.. _Ed Gillett: https://github.com/mo-gill diff --git a/doc/sphinx/source/community/repository.rst b/doc/sphinx/source/community/repository.rst new file mode 100644 index 0000000000..d1e6c4a70a --- /dev/null +++ b/doc/sphinx/source/community/repository.rst @@ -0,0 +1,180 @@ +.. _git-repository: + +*************** +GitHub Workflow +*************** + +Basics +====== + +The source code of the ESMValTool is hosted on GitHub. The following description gives an overview of the typical workflow and usage for implementing new diagnostics or technical changes into the ESMValTool. For general information on Git, see e.g. the online documentation at https://www.git-scm.com/doc. + +There are *two* ESMValTool GitHub repositories available: + +#. The **PUBLIC** GitHub repository is open to the public. The ESMValTool is released as open-source software under the Apache License 2.0. Use of the software constitutes acceptance of this license and terms. The PUBLIC ESMValTool repository is located at https://github.com/ESMValGroup/ESMValTool + +#. The **PRIVATE** GitHub repository is restricted to the ESMValTool Development Team. This repository is only accessible to ESMValTool developers that have accepted the terms of use for the ESMValTool development environment. The use of the ESMValTool software and access to the private ESMValTool GitHub repository constitutes acceptance of these terms. *When you fork or copy this repository, you must ensure that you do not copy the PRIVATE repository into an open domain!* The PRIVATE ESMValTool repository for the ESMValTool development team is located at https://github.com/ESMValGroup/ESMValTool-private + +All developments can be made in either of the two repositories. The creation of *FEATURE BRANCHES* (see below), however, is restricted to registered ESMValTool developers in both repositories. We encourage all developers to join the ESMValTool development team. Please contact the :ref:`ESMValTool Core Development Team ` if you want to join the ESMValTool development team. +The PRIVATE GitHub repository offers a central protected environment for ESMValTool developers who would like to keep their contributions undisclosed (e.g., unpublished scientific work, work in progress by PhD students) while at the same time benefiting from the possibilities of collaborating with other ESMValTool developers and having a backup of their work. *FEATURE BRANCHES* created in the PRIVATE repository are only visible to the ESMValTool development team but not to the public. The concept of a PRIVATE repository has proven to be very useful to efficiently share code during the development across institutions and projects in a common repository without having the contributions immediately accessible to the public. + +Both, the PUBLIC and the PRIVATE repository, contain the following kinds of branches: + +* *MAIN BRANCH* (official releases), +* *DEVELOPMENT BRANCH* (includes approved new contributions but version is not yet fully tested), +* *FEATURE BRANCH* (development branches for new features and diagnostics created by developers, the naming convention for *FEATURE BRANCHES* is _). + +Access rights +============= + +* Write access to the *MAIN* and *DEVELOPMENT BRANCH* in both, the PUBLIC and the PRIVATE GitHub repositories, is restricted to the :ref:`ESMValTool Core Development Team `. +* *FEATURE BRANCHES* in both the PUBLIC and the PRIVATE repository can be created by all members of the ESMValTool development team (i.e. members in the GitHub organization "ESMValGroup"). If needed, branches can be individually write-protected within each repository so that other developers cannot accidently push changes to these branches. + +The *MAIN BRANCH* of the PRIVATE repository will be regularly synchronized with the *MAIN BRANCH* of the PUBLIC repository (currently by hand). This ensures that they are identical at all times (see schematic in Figure :numref:`fig-git`). The recommended workflow for members of the ESMValTool development team is to create additional *FEATURE BRANCHES* in either the PUBLIC or the PRIVATE repository, see further instructions below. + +.. _fig-git: + +.. figure:: /figures/git_diagram.png + :width: 10cm + :align: center + + Schematic diagram of the ESMValTool GitHub repositories. + +Workflow +======== + +The following description gives an overview of the typical workflow and usage for implementing new diagnostics or technical changes into the ESMValTool. The description assumes that your local development machine is running a Unix-like operating system. For a general introduction to Git tutorials such as, for instance, https://www.git-scm.com/docs/gittutorial are recommended. + +Getting started +--------------- + +First make sure that you have Git installed on your development machine. On shared machines, software is usually installed using the environment modules. Try e.g. + +.. code:: bash + + module avail git + +if this is the case. You can ask your system administrator for assistance. You can test this with the command: + +.. code:: bash + + git --version + +In order to properly identify your contributions to the ESMValTool you need to configure your local Git with some personal data. This can be done with the following commands: + +.. code:: bash + + git config --global user.name "YOUR NAME" + git config --global user.email "YOUR EMAIL" + +.. note:: For working on GitHub you need to create an account and login to https://github.com/. + +Working with the ESMValTool GitHub Repositories +----------------------------------------------- + +As a member of the ESMValTool development team you can create *FEATURE BRANCHES* in the PUBLIC as well as in the PRIVATE repository. We encourage all ESMValTool developers to use the following workflow for long-lived developments (>2 weeks). + +* Login to GitHub.com +* On GitHub, go to the website of the ESMValTool repository (https://github.com/ESMValGroup/ESMValTool-private or https://github.com/ESMValGroup/ESMValTool) +* Click on the button create *FEATURE BRANCH* +* Select the *"DEVELOPMENT" BRANCH* and create a new *FEATURE BRANCH* for the diagnostic/feature you want to implement. Please follow the following naming convention for your new *FEATURE BRANCH*: _. + +.. figure:: /figures/git_branch.png + :align: center + :width: 6cm + +* Click the button "Clone or Download" and copy the URL shown there +* Open a terminal window and go to the folder where you would like to store your local copy of the ESMValTool source +* Type git clone, and paste the URL: + +.. code:: bash + + git clone + +This will clone the ESMValTool repository at GitHub to a local folder. You can now query the status of your local working copy with: + +.. code:: bash + + git status + +You will see that you are on a branch called main and your local working copy is up to date with the remote repository. With + +.. code:: bash + + git branch --all + +you can list all available remote and local branches. Now switch to your feature branch by: + +.. code:: bash + + git checkout + +You can now start coding. To check your current developments you can use the command + +.. code:: bash + + git status + +You can add new files and folders that you want to have tracked by Git using: + +.. code:: bash + + git add + +Commit your tracked changes to your local working copy via: + +.. code:: bash + + git commit -m "YOUR COMMIT MESSAGE" + +You can inspect your changes with (use man git-log for all options): + +.. code:: bash + + git log + +To share your work and to have an online backup, push your local development to your *FEATURE BRANCH* on GitHub: + +.. code:: bash + + git push origin + +.. note:: An overview on Git commands and best practices can be found e.g. here: https://zeroturnaround.com/rebellabs/git-commands-and-best-practices-cheat-sheet/ + +Pull requests +------------- + +Once your development is completely finished, go to the GitHub website of the ESMValTool repository and switch to your *FEATURE BRANCH*. You can then initiate a pull request by clicking on the button "New pull request". Select the *DEVELOPMENT BRANCH* as "base branch" and click on "Create pull request". Your pull request will then be tested, discussed and implemented into the *DEVELPOMENT BRANCH* by the :ref:`ESMValTool Core Development Team `. + +.. attention:: When creating a pull request, please carefully review the requirements and recommendations in CONTRIBUTING.md and try to implement those (see also checklist in the pull request template). It is recommended that you create a draft pull request early in the development process, when it is still possible to implement feedback. Do not wait until shortly before the deadline of the project you are working on. If you are unsure how to implement any of the requirements, please do not hesitate to ask for help in the pull request. + +GitHub issues +------------- + +In case you encounter a bug of if you have a feature request or something similar you can open an issue on the PUBLIC ESMValTool GitHub repository. + +General do-s and don't-s +======================== + +Do-s +---- + +* Create a *FEATURE BRANCH* and use exclusively this branch for developing the ESMValTool. The naming convention for *FEATURE BRANCHES* is _. +* Comment your code as much as possible and in English. +* Use short but self-explanatory variable names (e.g., model_input and reference_input instead of xm and xr). +* Consider a modular/functional programming style. This often makes code easier to read and deletes intermediate variables immediately. If possible, separate diagnostic calculations from plotting routines. +* Consider reusing or extending existing code. General-purpose code can be found in esmvaltool/diag_scripts/shared/. +* Comment all switches and parameters including a list of all possible settings/options in the header section of your code (see also ...). +* Use templates for recipes (see ...) and diagnostics (see ...) to help with proper documentation. +* Keep your *FEATURE BRANCH* regularly synchronized with the *DEVELOPMENT BRANCH* (git merge). +* Keep developments / modifications of the ESMValTool framework / backend / basic structure separate from developments of diagnostics by creating different *FEATURE BRANCHES* for these two kinds of developments. Create *FEATURE BRANCHES* for changes / modifications of the ESMValTool framework only in the *PUBLIC* repository. + +Don't-s +------- + +* Do not use other programming languages than the ones currently supported (Python, R, NCL, Julia). If you are unsure what language to use, Python is probably the best choice, because it has very good libraries available and is supported by a large community. Contact the :ref:`ESMValTool Core Development Team ` if you wish to use another language, but remember that only open-source languages are supported by the ESMValTool. +* Do not develop without proper version control (see do-s above). +* Avoid large (memory, disk space) intermediate results. Delete intermediate files/variables or see modular/functional programming style. +* Do not use hard-coded pathnames or filenames. +* Do not mix developments / modifications of the ESMValTool framework and developments / modifications of diagnostics in the same *FEATURE BRANCH*. + diff --git a/doc/sphinx/source/community/review.rst b/doc/sphinx/source/community/review.rst new file mode 100644 index 0000000000..3429c0ef7a --- /dev/null +++ b/doc/sphinx/source/community/review.rst @@ -0,0 +1,222 @@ +.. _reviewing: + +Review of pull requests +======================= + +In the ESMValTool community we use pull request reviews to ensure all code and +documentation contributions are of good quality. +An introduction to code reviews can be found in `The Turing Way`_, including +`why code reviews are important`_ and advice on +`how to have constructive reviews`_. + +Most pull requests will need two reviews before they can be merged. +First a technical review takes place and then a scientific review. +Once both reviewers have approved a pull request, it can be merged. +These three steps are described in more detail below. +If a pull request contains only technical changes, e.g. a pull request that +corrects some spelling errors in the documentation or a pull request that +fixes some installation problem, a scientific review is not needed. + +If you are a regular contributor, please try to review a bit more than two +other pull requests for every pull request you create yourself, to make sure +that each pull request gets the attention it deserves. + +If a pull request contains backward-incompatible changes, the developer or +reviewer must tag the `@ESMValGroup/esmvaltool-coreteam`_ team to notify them +of the :ref:`backward-incompatible change `. + +.. _technical_review: + +1. Technical review +------------------- + +Technical reviews are done by the technical review team. +This team consists of regular contributors that have a strong interest and +experience in software engineering. + +Technical reviewers use the technical checklist from the +`pull request template`_ to make sure the pull request follows the standards we +would like to uphold as a community. +The technical reviewer also keeps an eye on the design and checks that no major +design changes are made without the approval from the technical lead development +team. +If needed, the technical reviewer can help with programming questions, design +questions, and other technical issues. + +The technical review team can be contacted by writing +`@ESMValGroup/tech-reviewers`_ in a comment on an issue or pull request on +GitHub. + +.. _scientific_review: + +2. Scientific review +-------------------- + +Scientific reviews are done by the scientific review team. +This team consists of contributors that have a strong interest and +experience in climate science or related domains. + +Scientific reviewers use the scientific checklist from the +`pull request template`_ to make sure the pull request follows the standards we +would like to uphold as a community. + +The scientific review team can be contacted by writing +`@ESMValGroup/science-reviewers`_ in a comment on an issue or pull request on +GitHub. + +3. Merge +-------- + +Pull requests are merged by the `@ESMValGroup/esmvaltool-coreteam`_. +Specifically, pull requests containing a :ref:`CMORizer script` can only be merged by +a member of `@OBS-maintainers`_, who will then add the CMORized data to the OBS data pool at +DKRZ and CEDA-Jasmin. +The team member who does the merge first checks that both the technical and +scientific reviewer approved the pull request and that the reviews were +conducted thoroughly. +He or she looks at the list of files that were changed in the pull request and +checks that all relevant checkboxes from the checklist in the pull request +template have been added and ticked. +Finally, he or she checks that the :ref:`pull_request_checks` passed and +merges the pull request. +The person doing the merge commit edits the merge commit message so it +contains a concise and meaningful text. + +Any issues that were solved by the pull request can be closed after merging. +It is always a good idea to check with the author of an issue and ask if it is +completely solved by the related pull request before closing the issue. + +If a pull request contains +:ref:`backward-incompatible changes `, +the person in charge of merging must give the core development team at least 2 +weeks for objections to be raised before merging to the main branch. +If a strong objection is raised the backward-incompatible change should not be +merged until the objection is resolved. + +The core development team can be contacted by writing `@ESMValGroup/esmvaltool-coreteam`_ +in a comment on an issue or pull request on GitHub. + +Frequently asked questions +-------------------------- + +How do I request a review of my pull request? +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you know a suitable reviewer, e.g. because your pull request fixes an issue +that they opened or they are otherwise interested in the work you are +contributing, you can ask them for a review by clicking the cogwheel next to +'Reviewers' on the pull request 'Conversation' tab and clicking on that person. +When changing code, it is a good idea to ask the original authors of that code +for a review. +An easy way to find out who previously worked on a particular piece of code is +to use `git blame`_. +GitHub will also suggest reviewers based on who previously worked on the files +changed in a pull request. +All recipes contain a list of the recipe authors (and some of them in addition +a list of recipe maintainers). +It is a good idea to ask these people for a review. + +If there is no obvious reviewer, you can attract the attention of the relevant +team of reviewers by writing to `@ESMValGroup/tech-reviewers`_ or +`@ESMValGroup/science-reviewers`_ in a comment on your pull request. +You can also label your pull request with one of the labels +`looking for technical reviewer `_ +or +`looking for scientific reviewer `_, +though asking people for a review directly is probably more effective. + +.. _easy_review: + +How do I optimize for a fast review? +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +When authoring a pull request, please keep in mind that it is easier and +faster to review a pull request that does not contain many changes. +Try to add one new feature per pull request and change only a few files. +For the ESMValTool repository, try to limit changes to a few hundred lines of +code and new diagnostics to not much more than a thousand lines of code. +For the ESMValCore repository, a pull request should ideally change no more +than about a hundred lines of existing code, though adding more lines for unit +tests and documentation is fine. + +If you are a regular contributor, make sure you regularly review other people's +pull requests, that way they will be more inclined to return the favor by +reviewing your pull request. + +How do I find a pull request to review? +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Please pick pull requests to review yourself based on your interest or +expertise. +We try to be self organizing, so there is no central authority that will assign +you to review anything. +People may advertise that they are looking for a reviewer by applying the label +`looking for technical reviewer `_ +or `looking for scientific reviewer `_. +If someone knows you have expertise on a certain topic, they might request your +review on a pull request though. +If your review is requested, please try to respond within a few days if at all +possible. +If you do not have the time to review the pull request, notify the author and +try to find a replacement reviewer. + +How do I actually do a review? +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To do a review, go to the pull request on GitHub, the list of all pull requests +is available here https://github.com/ESMValGroup/ESMValCore/pulls for the ESMValCore +and here https://github.com/ESMValGroup/ESMValTool/pulls for the ESMValTool, click the +pull request you would like to review. + +The top comment should contain (a selection of) the checklist available in the +`pull request template`_. +If it is not there, copy the relevant items from the `pull request template`_. +Which items from the checklist are relevant, depends on which files are changed +in the pull request. +To see which files have changed, click the tab 'Files changed'. +Please make sure you are familiar with all items from the checklist by reading +the content linked from :ref:`pull_request_checklist` and check all items +that are relevant. +Checklists with some of the items to check are available: +:ref:`recipe and diagnostic checklist ` and +:ref:`dataset checklist `. + +In addition to the items from the checklist, good questions to start a review +with are 'Do I understand why these changes improve the tool?' (if not, ask the +author to improve the documentation contained in the pull request and/or the +description of the pull request on GitHub) and 'What could possibly go wrong if +I run this code?'. + +To comment on specific lines of code or documentation, click the 'plus' icon +next to a line of code and write your comment. +When you are done reviewing, use the 'Review changes' button in the top right +corner to comment on, request changes to, or approve the pull request. + +What if the author and reviewer disagree? +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +When the author and the reviewer of a pull request have difficulty agreeing +on what needs to be done before the pull request can be approved, it is usually +both more pleasant and more efficient to schedule a meeting or co-working +session, for example using `Google meet`_ or `Jitsi meet`_. + +When reviewing a pull request, try to refrain from making changes to the pull +request yourself, unless the author specifically agrees to those changes, as +this could potentially be perceived as offensive. + +If talking about the pull requests in a meeting still does not resolve the +disagreement, ask a member of the `@ESMValGroup/esmvaltool-coreteam`_ for +their opinion and try to find a solution. + + +.. _`The Turing Way`: https://the-turing-way.netlify.app/reproducible-research/reviewing.html +.. _`why code reviews are important`: https://the-turing-way.netlify.app/reproducible-research/reviewing/reviewing-motivation.html +.. _`how to have constructive reviews`: https://the-turing-way.netlify.app/reproducible-research/reviewing/reviewing-recommend.html +.. _`@ESMValGroup/tech-reviewers`: https://github.com/orgs/ESMValGroup/teams/tech-reviewers +.. _`@ESMValGroup/science-reviewers`: https://github.com/orgs/ESMValGroup/teams/science-reviewers +.. _`@ESMValGroup/esmvaltool-coreteam`: https://github.com/orgs/ESMValGroup/teams/esmvaltool-coreteam +.. _`@OBS-maintainers`: https://github.com/orgs/ESMValGroup/teams/obs-maintainers +.. _`pull request template`: https://raw.githubusercontent.com/ESMValGroup/ESMValTool/main/.github/pull_request_template.md +.. _`Google meet`: https://meet.google.com +.. _`Jitsi meet`: https://meet.jit.si +.. _`git blame`: https://www.freecodecamp.org/news/git-blame-explained-with-examples/ diff --git a/doc/sphinx/source/community/upgrading.rst b/doc/sphinx/source/community/upgrading.rst new file mode 100644 index 0000000000..9a9b37f178 --- /dev/null +++ b/doc/sphinx/source/community/upgrading.rst @@ -0,0 +1,217 @@ +.. _upgrading: + +************************************************************ +Upgrading a namelist (recipe) or diagnostic to ESMValTool v2 +************************************************************ + +This guide summarizes the main steps to be taken in order to port an ESMValTool namelist (now called **recipe**) and the corresponding diagnostic(s) from v1.0 to v2.0, hereafter also referred as the *"old"* and the *"new version"*, respectively. The new ESMValTool version is being developed in the public git branch ``main``. An identical version of this branch is maintained in the private repository as well and kept synchronized on an hourly basis. + +In the following, it is assumed that the user has successfully installed ESMValTool v2 and has a rough overview of its structure (see `Technical Overview `_). + +Create a github issue +===================== + +Create an issue in the public repository to keep track of your work and inform other developers. See an example `here `_. Use the following title for the issue: "PORTING into v2.0". +Do not forget to assign it to yourself. + +Create your own branch +====================== + +Create your own branch from ``main`` for each namelist (recipe) to be ported: + +.. code-block:: bash + + git checkout main + git pull + git checkout -b + +``main`` contains only v2.0 under the ``./esmvaltool/`` directory. + +Convert xml to yml +================== + +In ESMValTool v2.0, the namelist (now recipe) is written in yaml format (`Yet Another Markup Language format `_). It may be useful to activate the yaml syntax highlighting for the editor in use. This improves the readability of the recipe file and facilitates the editing, especially concerning the indentations which are essential in this format (like in python). Instructions can be easily found online, for example for `emacs `_ and `vim `_. + +A xml2yml converter is available in ``esmvaltool/utils/xml2yml/``, please refer to the corresponding README file for detailed instructions on how to use it. + +Once the recipe is converted, a first attempt to run it can be done, possibly starting with a few datasets and one diagnostics and proceed gradually. The recipe file ``./esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml`` can be used as an example, as it covers most of the common cases. + +Do not forget to also rewrite the recipe header in a ``documentation`` section using the yaml syntax and, if possible, to add themes and realms item to each diagnostic section. All keys and tags used for this part must be defined in ``./esmvaltool/config-references.yml``. See ``./esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml`` for an example. + +Create a copy of the diag script in v2.0 +======================================== + +The diagnostic script to be ported goes into the directory ./esmvaltool/diag_script/. It is recommended to get a copy of the very last version of the script to be ported from the ``version1`` branch (either in the public or in the private repository). Just create a local (offline) copy of this file from the repository and add it to ../esmvaltool/diag_script/ as a new file. + +Note that (in general) this is not necessary for plot scripts and for the libraries in ``./esmvaltool/diag_script/ncl/lib/``, which have already been ported. Changes may however still be necessary, especially in the plot scripts which have not yet been fully tested with all diagnostics. + +Check and apply renamings +========================= + +The new ESMValTool version includes a completely revised interface, handling the communication between the python workflow and the (NCL) scripts. This required several variables and functions to be renamed or removed. These changes are listed in the following table and have to be applied to the diagnostic code before starting with testing. + +.. tabularcolumns:: |p{6cm}|p{6cm}|p{3cm}| + ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| Name in v1.0 | Name in v2.0 | Affected code | ++=================================================+===========================================================+==================+ +| ``getenv("ESMValTool_wrk_dir")`` | ``config_user_info@work_dir`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``getenv(ESMValTool_att)`` | ``diag_script_info@att`` or | all .ncl scripts | +| | ``config_user_info@att`` | | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``xml`` | ``yml`` | all scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``var_attr_ref(0)`` | ``variable_info@reference_dataset`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``var_attr_ref(1)`` | ``variable_info@alternative_dataset`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``models`` | ``input_file_info`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``models@name`` | ``input_file_info@dataset`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``verbosity`` | ``config_user_info@log_level`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``isfilepresent_esmval`` | ``fileexists`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``messaging.ncl`` | ``logging.ncl`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``info_output(arg1, arg2, arg3)`` | ``log_info(arg1)`` if ``arg3=1`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``info_output(arg1, arg2, arg3)`` | ``log_debug(arg1)`` if ``arg3>1`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``verbosity = config_user_info@verbosity`` | remove this statement | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``enter_msg(arg1, arg2, arg3)`` | ``enter_msg(arg1, arg2)`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``leave_msg(arg1, arg2, arg3)`` | ``leave_msg(arg1, arg2)`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``noop()`` | appropriate ``if-else`` statement | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``nooperation()`` | appropriate ``if-else`` stsatement | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``fullpaths`` | ``input_file_info@filename`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``get_output_dir(arg1, arg2)`` | ``config_user_info@plot_dir`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``get_work_dir`` | ``config_user_info@work_dir`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``inlist(arg1, arg2)`` | ``any(arg1.eq.arg2)`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``load interface_scripts/*.ncl`` | ``load $diag_scripts/../interface_scripts/interface.ncl`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``_info.tmp`` | ``_info.ncl`` in ``preproc`` dir | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``ncl.interface`` | ``settings.ncl`` in ``run_dir`` and | all .ncl scripts | +| | ``interface_scripts/interface.ncl`` | | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``load diag_scripts/lib/ncl/`` | ``load $diag_scripts/shared/`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``load plot_scripts/ncl/`` | ``load $diag_scripts/shared/plot/`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``load diag_scripts/lib/ncl/rgb/`` | ``load $diag_scripts/shared/plot/rgb/`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``load diag_scripts/lib/ncl/styles/`` | ``load $diag_scripts/shared/plot/styles`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``load diag_scripts/lib/ncl/misc_function.ncl`` | ``load $diag_scripts/shared/plot/misc_function.ncl`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``LW_CRE``, ``SW_CRE`` | ``lwcre``, ``swcre`` | some yml recipes | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``check_min_max_models`` | ``check_min_max_datasets`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``get_ref_model_idx`` | ``get_ref_dataset_idx`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``get_model_minus_ref`` | ``get_dataset_minus_ref`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ + +The following changes may also have to be considered: + +- namelists are now called recipes and collected in ``esmvaltool/recipes``; +- models are now called datasets and all files have been updated accordingly, including NCL functions (see table above); +- ``run_dir`` (previous ``interface_data``), ``plot_dir``, ``work_dir`` are now unique to each diagnostic script, so it is no longer necessary to define specific paths in the diagnostic scripts to prevent file collision; +- ``input_file_info`` is now a list of a list of logicals, where each element describes one dataset and one variable. Convenience functions to extract the required elements (e.g., all datasets of a given variable) are provided in ``esmvaltool/interface_scripts/interface.ncl``; +- the interface functions ``interface_get_*`` and ``get_figure_filename`` are no longer available: their functionalities can be easily reproduced using the ``input_file_info`` and the convenience functions in ``esmvaltool/interface_scripts/interface.ncl`` to access the required attributes; +- there are now only 4 log levels (``debug``, ``info``, ``warning``, and ``error``) instead of (infinite) numerical values in ``verbosity`` +- diagnostic scripts are now organized in subdirectories in ``esmvaltool/diag_scripts/``: all scripts belonging to the same diagnostics are to be collected in a single subdirectory (see ``esmvaltool/diag_scripts/perfmetrics/`` for example). This applies also to the ``aux_`` scripts, unless they are shared among multiple diagnostics (in this case they go in ``shared/``); +- the relevant input_file_info items required by a plot routine should be passed as argument to the routine itself; +- upper case characters have to be avoided in script names, if possible. + +As for the recipe, the diagnostic script ``./esmvaltool/diag_scripts/perfmetrics/main.ncl`` can be followed as working example. + +Move preprocessing from the diagnostic script to the backend +============================================================ + +Many operations previously performed by the diagnostic scripts, are now included in the backend, including level extraction, regridding, masking, and multi-model statistics. If the diagnostics to be ported contains code performing any of such operations, the corresponding code has to be removed from the diagnostic script and the respective backend functionality can be used instead. + +The backend operations are fully controlled by the ``preprocessors`` section in the recipe. Here, a number of preprocessor sets can be defined, with different options for each of the operations. The sets defined in this section are applied in the ``diagnostics`` section to preprocess a given variable. + +It is recommended to proceed step by step, porting and testing each operation separately before proceeding with the next one. A useful setting in the configuration called ``write_intermediary_cube`` allows writing out the variable field after each preprocessing step, thus facilitating the comparison with the old version (e.g., after CMORization, level selection, after regridding, etc.). The CMORization step of the new backend exactly corresponds to the operation performed by the old backend (and stored in the ``climo`` directory, now called ``preprec``): this is the very first step to be checked, by simply comparing the intermediary file produced by the new backend after CMORization with the output of the old backend in the ``climo`` directorsy (see "Testing" below for instructions). + +The new backend also performs variable derivation, replacing the ``calculate`` function in the ``variable_defs`` scripts. If the recipe which is being ported makes use of derived variables, the corresponding calculation must be ported from the ``./variable_defs/.ncl`` file to ``./esmvaltool/preprocessor/_derive.py``. + +Note that the Python library ``esmval_lib``, containing the ``ESMValProject`` class is no longer available in version 2. Most functionalities have been moved to the new preprocessor. If you miss a feature, please open an issue on github [https://github.com/ESMValGroup/ESMValTool/issues]. + +Move diagnostic- and variable-specific settings to the recipe +=============================================================== + +In the new version, all settings are centralized in the recipe, completely replacing the diagnostic-specific settings in ``./nml/cfg_files/`` (passed as ``diag_script_info`` to the diagnostic scripts) and the variable-specific settings in ``variable_defs/.ncl`` (passed as ``variable_info``). There is also no distinction anymore between diagnostic- and variable-specific settings: they are collectively defined in the ``scripts`` dictionary of each diagnostic in the recipe and passed as ``diag_script_info`` attributes by the new ESMValTool interface. Note that the ``variable_info`` logical still exists, but it is used to pass variable information as given in the corresponding dictionary of the recipe. + +Make sure the diagnostic script writes NetCDF output +====================================================== + +Each diagnostic script is required to write the output of the analysis in one or more NetCDF files. This is to give the user the possibility to further look into the results, besides the plots, but (most importantly) for tagging purposes when publishing the data in a report and/or on a website. + +For each of the plot produced by the diagnostic script a single NetCDF file has to be generated. The variable saved in this file should also contain all the necessary metadata that documents the plot (dataset names, units, statistical methods, etc.). +The files have to be saved in the work directory (defined in `cfg['work_dir']` and `config_user_info@work_dir`, for the python and NCL diagnostics, respectively). + +Test the recipe/diagnostic in the new version +=============================================== + +Once complete, the porting of the diagnostic script can be tested. Most of the diagnostic script allows writing the output in a NetCDF file before calling the plotting routine. This output can be used to check whether the results of v1.0 are correctly reproduced. As a reference for v1.0, it is recommended to use the development branch. + +There are two methods for comparing NetCDF files: ``cdo`` and ``ncdiff``. The first method is applied with the command: + +.. code-block:: bash + + cdo diffv old_output.nc new_output.nc + +which will print a log on the stdout, reporting how many records of the file differ and the absolute/relative differences. + +The second method produces a NetCDF file (e.g., ``diff.nc``) with the difference between two given files: + +.. code-block:: bash + + ncdiff old_output.nc new_output.nc diff.nc + +This file can be opened with ``ncview`` to visually inspect the differences. + +In general, binary identical results cannot be expected, due to the use of different languages and algorithms in the two versions, especially for complex operations such as regridding. However, difference within machine precision are desirable. At this stage, it is essential to test all datasets in the recipe and not just a subset of them. + +It is also recommended to compare the graphical output (this may be necessary if the ported diagnostic does not produce a NetCDF output). For this comparison, the PostScript format is preferable, since it is easy to directly compare two PostScript files with the standard ``diff`` command in Linux: + +.. code-block:: bash + + diff old_graphic.ps new_graphic.ps + +but it is very unlikely to produce no differences, therefore visual inspection of the output may also be required. + +Clean the code +============== + +Before submitting a pull request, the code should be cleaned to adhere to the coding standard, which are somehow stricter in v2.0. This check is performed automatically on GitHub (CircleCI and Codacy) when opening a pull request on the public repository. A code-style checker (``nclcodestyle``) is available in the tool to check NCL scripts and installed alongside the tool itself. When checking NCL code style, the following should be considered in addition to the warning issued by the style checker: + +- two-space instead of four-space indentation is now adopted for NCL as per NCL standard; +- ``load`` statements for NCL standard libraries should be removed: these are automatically loaded since NCL v6.4.0 (see `NCL documentation `_); +- the description of diagnostic- and variable-specific settings can be moved from the header of the diagnostic script to the recipe, since the settings are now defined there (see above); +- NCL ``print`` and ``printVarSummary`` statements must be avoided and replaced by the ``log_info`` and ``log_debug`` functions; +- for error and warning statements, the ``error_msg`` function can be used, which automatically include an exit statement. + +Update the documentation +======================== + +If necessary, add or update the documentation for your recipes in the corresponding rst file, which is now in ``doc\sphinx\source\recipes``. Do not forget to also add the documentation file to the list in ``doc\sphinx\source\annex_c`` to make sure it actually appears in the documentation. + +Open a pull request +=================== + +Create a pull request on github to merge your branch back to ``main``, provide a short description of what has been done and nominate one or more reviewers. diff --git a/doc/sphinx/source/conf.py b/doc/sphinx/source/conf.py index b7642b5a6b..de7feb4775 100644 --- a/doc/sphinx/source/conf.py +++ b/doc/sphinx/source/conf.py @@ -12,30 +12,50 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os - +import sys +from datetime import datetime +from pathlib import Path # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -os.chdir(os.path.abspath(os.path.dirname(__file__))) -sys.path.insert(0, os.path.abspath('./../../..')) - -from esmvaltool._version import __version__ - -#add process_nl_docs in sphinx documentation source folder -sys.path.insert(0, os.path.abspath('.')) -import process_ncl_docs2 as process_ncl_docs - - -# add custom extensions directory to python path -#sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'extensions')) +root = Path(__file__).absolute().parent.parent.parent.parent +sys.path.insert(0, str(root)) + +from esmvaltool import __version__ + +# -- RTD configuration ------------------------------------------------ + +# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org +on_rtd = os.environ.get("READTHEDOCS", None) == "True" + +# This is used for linking and such so we link to the thing we're building +rtd_version = os.environ.get("READTHEDOCS_VERSION", "latest") +if on_rtd: + # On Readthedocs, the conda environment used for building the documentation + # is not `activated`. As a consequence, a few critical environment variables + # are not set. Here, we hardcode them instead. + # In a normal environment, i.e. a local build of the documentation, the + # normal environment activation takes care of this. + rtd_project = os.environ.get("READTHEDOCS_PROJECT") + rtd_conda_prefix = f"/home/docs/checkouts/readthedocs.org/user_builds/{rtd_project}/conda/{rtd_version}" + os.environ["ESMFMKFILE"] = f"{rtd_conda_prefix}/lib/esmf.mk" + os.environ["PROJ_DATA"] = f"{rtd_conda_prefix}/share/proj" + os.environ["PROJ_NETWORK"] = "OFF" +if rtd_version not in ["latest", "stable", "doc"]: + rtd_version = "latest" + +# Generate gallery +sys.path.append(os.path.dirname(__file__)) +import generate_gallery + +generate_gallery.main() # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -43,6 +63,7 @@ extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', + 'sphinx.ext.extlinks', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', @@ -50,18 +71,21 @@ 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'sphinx.ext.napoleon', + 'autodocsumm', ] -process_ncl_docs.create_doc_files_from_ncl() -autodoc_default_flags = [ - 'members', - 'undoc-members', - 'inherited-members', - 'show-inheritance', -] +autodoc_default_options = { + 'members': True, + 'undoc-members': True, + 'inherited-members': True, + 'show-inheritance': True, + 'autosummary': True, +} -#autodoc_mock_imports = ['cf_units', 'iris', 'matplotlib', 'numpy', 'cartopy', 'cftime', 'netCDF4', 'yaml', 'PIL', 'prov', 'scipy', 'psutil', 'shapely', 'stratify', 'ESMF'] -autodoc_mock_imports = ['iris', 'cftime', 'PIL', 'prov', 'scipy', 'stratify', 'ESMF', 'cartopy', 'cf_units'] +# See https://github.com/sphinx-doc/sphinx/issues/12589 +suppress_warnings = [ + 'autosummary.import_cycle', +] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -70,14 +94,14 @@ source_suffix = '.rst' # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'ESMValTool' -copyright = u'2015, Veronika Eyring, Axel Lauer, Mattia Righi, Martin Evaldsson et al.' +copyright = u'{0}, ESMValTool Development Team'.format(datetime.now().year) # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -90,13 +114,13 @@ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -104,109 +128,122 @@ # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False - +# keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'sphinx_rtd_theme' +html_theme = 'pydata_sphinx_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} - +# +# Avoid the following warning issued by pydata_sphinx_theme: +# +# "WARNING: The default value for `navigation_with_keys` will change to `False` +# in the next release. If you wish to preserve the old behavior for your site, +# set `navigation_with_keys=True` in the `html_theme_options` dict in your +# `conf.py` file.Be aware that `navigation_with_keys = True` has negative +# accessibility implications: +# https://github.com/pydata/pydata-sphinx-theme/issues/1492" +html_theme_options = { + "navigation_with_keys": False, + "logo": { + "image_light": "figures/ESMValTool-logo-2.png", + "image_dark": "figures/ESMValTool-logo-2-dark.png", + }, +} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. html_short_title = "ESMValTool {0}".format(release) # The name of an image file (relative to this directory) to place at the top # of the sidebar. -html_logo = 'figures/ESMValTool-logo.png' +html_logo = 'figures/ESMValTool-logo-2.png' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = [] +html_static_path = ["figures/ESMValTool-logo-2-dark.png"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'ESMValTooldoc' @@ -214,14 +251,15 @@ # -- Options for LaTeX output --------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', -# Additional stuff for the LaTeX preamble. -'preamble': r''' + # Additional stuff for the LaTeX preamble. + 'preamble': + r''' \makeatletter \renewcommand{\maketitle}{ \newcommand{\MONTH}{% @@ -241,7 +279,7 @@ \fi} \begin{titlepage} \begin{center} - \includegraphics[width=\textwidth]{../../source/figures/ESMValTool-logo.pdf}\par + \includegraphics[width=\textwidth]{../../source/figures/ESMValTool-logo-2.pdf}\par \vspace{2cm} {\Huge \bf \sffamily User's and Developer's Guide \par} \vspace{1cm} @@ -263,44 +301,43 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'ESMValTool_Users_Guide.tex', u'ESMValTool User\'s and Developer\'s Guide', - u'Veronika Eyring, Axel Lauer, Mattia Righi, Martin Evaldsson et al.', 'manual'), + ('index', 'ESMValTool_Users_Guide.tex', + u'ESMValTool User\'s and Developer\'s Guide', + u'ESMValTool Development Team', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = True +# latex_use_parts = True latex_toplevel_sectioning = "part" # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True - +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -#man_pages = [ +# man_pages = [ # ('index', 'esmvaltool', u'ESMValTool Documentation', # [u'Veronika Eyring, Axel Lauer, Mattia Righi, Martin Evaldsson et al.'], 1) #] # If true, show URL addresses after external links. -#man_show_urls = False - +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------- @@ -314,88 +351,143 @@ #] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False - +# texinfo_no_detailmenu = False # -- Options for Epub output ---------------------------------------------- # Bibliographic Dublin Core info. epub_title = u'ESMValTool' -epub_author = u'Veronika Eyring, Axel Lauer, Mattia Righi, Martin Evaldsson et al.' -epub_publisher = u'Veronika Eyring, Axel Lauer, Mattia Righi, Martin Evaldsson et al.' -epub_copyright = u'2015, Veronika Eyring, Axel Lauer, Mattia Righi, Martin Evaldsson et al.' +epub_author = u'ESMValTool Development Team' +epub_publisher = u'ESMValTool Development Team' +epub_copyright = u'ESMValTool Development Team' # The basename for the epub file. It defaults to the project name. -#epub_basename = u'ESMValTool' +# epub_basename = u'ESMValTool' # The HTML theme for the epub output. Since the default themes are not optimized # for small screen space, using the same theme for HTML and epub output is # usually not wise. This defaults to 'epub', a theme designed to save visual # space. -#epub_theme = 'epub' +# epub_theme = 'epub' # The language of the text. It defaults to the language option # or en if the language is not set. -#epub_language = '' +# epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. -#epub_scheme = '' +# epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. -#epub_identifier = '' +# epub_identifier = '' # A unique identification for the text. -#epub_uid = '' +# epub_uid = '' # A tuple containing the cover image and cover page html template filenames. -#epub_cover = () +# epub_cover = () # A sequence of (type, uri, title) tuples for the guide element of content.opf. -#epub_guide = () +# epub_guide = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. -#epub_pre_files = [] +# epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. -#epub_post_files = [] +# epub_post_files = [] # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html'] # The depth of the table of contents in toc.ncx. -#epub_tocdepth = 3 +# epub_tocdepth = 3 # Allow duplicate toc entries. -#epub_tocdup = True +# epub_tocdup = True # Choose between 'default' and 'includehidden'. -#epub_tocscope = 'default' +# epub_tocscope = 'default' # Fix unsupported image types using the PIL. -#epub_fix_images = False +# epub_fix_images = False # Scale large images. -#epub_max_image_width = 0 +# epub_max_image_width = 0 # How to display URL addresses: 'footnote', 'no', or 'inline'. -#epub_show_urls = 'inline' +# epub_show_urls = 'inline' # If false, no index is generated. -#epub_use_index = True +# epub_use_index = True numfig = True -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'http://docs.python.org/': None} +# Configuration for intersphinx +intersphinx_mapping = { + 'cartopy': ('https://scitools.org.uk/cartopy/docs/latest/', None), + 'cf_units': ('https://cf-units.readthedocs.io/en/latest/', None), + 'esmvalcore': + (f'https://docs.esmvaltool.org/projects/esmvalcore/en/{rtd_version}/', + None), + 'esmvaltool': (f'https://docs.esmvaltool.org/en/{rtd_version}/', None), + 'iris': ('https://scitools-iris.readthedocs.io/en/latest/', None), + 'lime': ('https://lime-ml.readthedocs.io/en/latest/', None), + 'matplotlib': ('https://matplotlib.org/stable/', None), + 'numpy': ('https://numpy.org/doc/stable/', None), + 'pandas': ('https://pandas.pydata.org/pandas-docs/dev', None), + 'python': ('https://docs.python.org/3/', None), + 'scipy': ('https://docs.scipy.org/doc/scipy/', None), + 'seaborn': ('https://seaborn.pydata.org/', None), + 'sklearn': ('https://scikit-learn.org/stable', None), +} + +# -- Extlinks extension ------------------------------------------------------- +# See https://www.sphinx-doc.org/en/master/usage/extensions/extlinks.html + +extlinks = { + "discussion": ( + "https://github.com/ESMValGroup/ESMValTool/discussions/%s", + "Discussion #%s", + ), + "issue": ( + "https://github.com/ESMValGroup/ESMValTool/issues/%s", + "Issue #%s", + ), + "pull": ( + "https://github.com/ESMValGroup/ESMValTool/pull/%s", + "Pull request #%s", + ), + "release": ( + "https://github.com/ESMValGroup/ESMValTool/releases/tag/%s", + "ESMValTool %s", + ), + "esmvalcore-release": ( + "https://github.com/ESMValGroup/ESMValCore/releases/tag/%s", + "ESMValCore %s", + ), + "team": ( + "https://github.com/orgs/ESMValGroup/teams/%s", + "@ESMValGroup/%s", + ), + "user": ( + "https://github.com/%s", + "@%s", + ), +} + +# -- Custom Document processing ---------------------------------------------- + +import gensidebar + +gensidebar.generate_sidebar(globals(), "esmvaltool") diff --git a/doc/sphinx/source/develop/dataset.rst b/doc/sphinx/source/develop/dataset.rst new file mode 100644 index 0000000000..f624a44feb --- /dev/null +++ b/doc/sphinx/source/develop/dataset.rst @@ -0,0 +1,352 @@ +.. _new-cmorizer: + +Writing a CMORizer script for an additional dataset +*************************************************** + +ESMValTool is designed to work with `CF compliant `_ +data and follows the CMOR tables from the CMIP data request, therefore +the observational datasets need to be CMORized for usage in ESMValTool. +The following steps are necessary to prepare an observational +data set for the use in ESMValTool. + +| `1. Check if your variable is CMOR standard`_ +| `2. Edit your configuration file`_ +| `3. Store your dataset in the right place`_ +| `3.1 Downloader script (optional)`_ +| `4. Create a cmorizer for the dataset`_ +| `4.1 Cmorizer script written in python`_ +| `4.2 Cmorizer script written in NCL`_ +| `5. Run the cmorizing script`_ +| `6. Naming convention of the observational data files`_ +| `7. Test the cmorized dataset`_ + +.. note:: + **CMORization as a fix.** As of early 2020, we've started implementing cmorization as + *fixes*. As compared to the workflow described below, this has the advantage that + the user does not need to store a duplicate (CMORized) copy of the data. Instead, the + CMORization is performed 'on the fly' when running a recipe. **ERA5** is the first dataset + for which this 'CMORization on the fly' is supported. For more information, see + :ref:`inputdata_native_datasets`. + + +1. Check if your variable is CMOR standard +========================================== + +Most variables are defined in the CMIP data request and can be found in the +CMOR tables in the folder `/esmvalcore/cmor/tables/cmip6/Tables/ +`_, +differentiated according to the ``MIP`` they belong to. The tables are a +copy of the `PCMDI `_ guidelines. If you find the +variable in one of these tables, you can proceed to the next section. + +If your variable is not available in the standard CMOR tables, +you need to write a custom CMOR table entry for the variable +as outlined below and add it to `/esmvalcore/cmor/tables/custom/ +`_. + +To create a new custom CMOR table you need to follow these +guidelines: + +- Provide the ``variable_entry``; +- Provide the ``modeling_realm``; +- Provide the variable attributes, but leave ``standard_name`` blank. Necessary + variable attributes are: ``units``, ``cell_methods``, ``cell_measures``, + ``long_name``, ``comment``. +- Provide some additional variable attributes. Necessary additional variable + attributes are: ``dimensions``, ``out_name``, ``type``. There are also + additional variable attributes that can be defined here (see the already + available cmorizers). + +It is recommended to use an existing custom table as a template, to edit the +content and save it as ``CMOR_.dat``. + +2. Edit your configuration file +=============================== + +Make sure that beside the paths to the model simulations and observations, also +the path to raw observational data to be cmorized (``RAWOBS``) is present in +your configuration file. + +3. Store your dataset in the right place +======================================== + +The folder ``RAWOBS`` needs the subdirectories ``Tier1``, ``Tier2`` and +``Tier3``. The different tiers describe the different levels of restrictions +for downloading (e.g. providing contact information, licence agreements) +and using the observations. The unformatted (raw) observations +should then be stored in the appropriate of these three folders. + +For each additional dataset, an entry needs to be made to the file +`datasets.yml +`_. +The dataset entry should contain: + +- the correct ``tier`` information; +- the ``source`` of the raw data; +- the ``last_access`` date; +- the ``info`` that explain how to download the data. + +Note that these fields should be identical to the content of the header +of the cmorizing script (see Section `4. Create a cmorizer for the dataset`_). + +3.1 Downloader script (optional) +-------------------------------- + +A Python script can be written to download raw observations +from source and store the data in the appropriate tier subdirectory of the +folder ``RAWOBS`` automatically. +There are many downloading scripts available in +`/esmvaltool/cmorizers/data/downloaders/datasets/ +`_ +where several data download mechanisms are provided: + +- A `wget` get based downloader for http(s) downloads, with a specific derivation for NASA datasets. +- A `ftp` downloader with a specific derivation for ESACCI datasets available from CEDA. +- A Climate Data Store downloader based on `cdsapi`. + +Note that the name of this downloading script has to be identical to the +name of the dataset. + +Depending on the source server, the downloading script needs to contain paths to +raw observations, filename patterns and various necessary fields to retrieve +the data. +Default ``start_date`` and ``end_date`` can be provided in cases where raw data +are stored in daily, monthly, and yearly files. + +The downloading script for the given dataset can be run with: + +.. code-block:: console + + esmvaltool data download --config_dir + +The options ``--start`` and ``--end`` can be added to the command above to +restrict the download of raw data to a time range. They will be ignored if a specific dataset +does not support it (i.e. because it is provided as a single file). Valid formats are +``YYYY``, ``YYYYMM`` and ``YYYYMMDD``. By default, already downloaded data are not overwritten +unless the option ``--overwrite=True`` is used. + +4. Create a cmorizer for the dataset +==================================== + +There are many cmorizing scripts available in +`/esmvaltool/cmorizers/data/formatters/datasets/ +`_ +where solutions to many kinds of format issues with observational data are +addressed. These scripts are either written in Python or in NCL. + +.. note:: + NCL support will terminate soon, so new cmorizer scripts should preferably be + written in Python. + +How much cmorizing an observational data set needs is strongly dependent on +the original NetCDF file and how close the original formatting already is to +the strict CMOR standard. + +In the following two subsections two cmorizing scripts, one written in Python +and one written in NCL, are explained in more detail. + +4.1 Cmorizer script written in python +------------------------------------- + +Find here an example of a cmorizing script, written for the ``MTE`` dataset +that is available at the MPI for Biogeochemistry in Jena: `mte.py +`_. + +All the necessary information about the dataset to write the filename +correctly, and which variable is of interest, is stored in a separate +configuration file: `MTE.yml +`_ +in the directory ``ESMValTool/esmvaltool/cmorizers/data/cmor_config/``. Note +that both the name of this configuration file and the cmorizing script have to be +identical to the name of your dataset. +It is recommended that you set ``project`` to ``OBS6`` in the +configuration file. That way, the variables defined in the CMIP6 CMOR table, +augmented with the custom variables described above, are available to your script. + +The first part of this configuration file defines the filename of the raw +observations file. The second part defines the common global attributes for +the cmorizer output, e.g. information that is needed to piece together the +final observations file name in the correct structure (see Section `6. Naming convention of the observational data files`_). +Another global attribute is ``reference`` which includes a ``doi`` related to the dataset. +Please see the section `adding references +`_ +on how to add reference tags to the ``reference`` section in the configuration file. +If a single dataset has more than one reference, +it is possible to add tags as a list e.g. ``reference: ['tag1', 'tag2']``. +The third part in the configuration file defines the variables that are supposed to be cmorized. + +The actual cmorizing script ``mte.py`` consists of a header with +information on where and how to download the data, and noting the last access +of the data webpage. + +The main body of the CMORizer script must contain a function called + +.. code-block:: python + + def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + +with this exact call signature. Here, ``in_dir`` corresponds to the input +directory of the raw files, ``out_dir`` to the output directory of final +reformatted data set, ``cfg`` to the dataset-specific configuration file, +``cfg_user`` to the configuration object (which behaves basically like a +dictionary), ``start_date`` to the start +of the period to format, and ``end_date`` to the end of the period to format. +If not needed, the last three arguments can be ignored using underscores. +The return value of this function is ignored. All +the work, i.e. loading of the raw files, processing them and saving the final +output, has to be performed inside its body. To simplify this process, ESMValTool +provides a set of predefined utilities.py_, which can be imported into your CMORizer +by + +.. code-block:: python + + from esmvaltool.cmorizers.data import utilities as utils + +Apart from a function to easily save data, this module contains different kinds +of small fixes to the data attributes, coordinates, and metadata which are +necessary for the data field to be CMOR-compliant. + +Note that this specific CMORizer script contains several subroutines in order +to make the code clearer and more readable (we strongly recommend to follow +that code style). For example, the function ``_get_filepath`` converts the raw +filepath to the correct one and the function ``_extract_variable`` extracts and +saves a single variable from the raw data. + +.. _utilities.py: https://github.com/ESMValGroup/ESMValTool/blob/main/esmvaltool/cmorizers/data/utilities.py + + +4.2 Cmorizer script written in NCL +---------------------------------- + +Find here an example of a cmorizing script, written for the ``ESACCI XCH4`` +dataset that is available on the Copernicus Climate Data Store: +`cds_xch4.ncl +`_. + +The first part of the script collects all the information about the dataset +that are necessary to write the filename correctly and to understand which +variable is of interest here. Please make sure to provide the correct +information for following key words: DIAG_SCRIPT, VAR, NAME, MIP, FREQ, +CMOR_TABLE. + +- **Note:** the fields ``VAR``, ``NAME``, ``MIP`` and ``FREQ`` all ask for one + or more entries. If more than one entry is provided, make sure that the order + of the entries is the same for all four fields! (for example, that the first + entry in all four fields describe the variable ``xch4`` that you would like + to extract); +- **Note:** some functions in the script are NCL-specific and are available + through the loading of the script interface.ncl_. There are similar + functions available for python scripts. + +.. _interface.ncl: https://github.com/ESMValGroup/ESMValTool/blob/main/esmvaltool/cmorizers/data/formatters/interface.ncl + +.. _utilities.ncl: https://github.com/ESMValGroup/ESMValTool/blob/main/esmvaltool/cmorizers/data/formatters/utilities.ncl + +In the second part of the script each variable defined in ``VAR`` is separately +extracted from the original data file and processed. Most parts of the code are +commented, and therefore it should be easy to follow. ESMValTool provides a set +of predefined utilities.ncl_, which are imported by default into your CMORizer. +This module contains different kinds of small fixes to the data attributes, +coordinates, and metadata which are necessary for the data field to be +CMOR-compliant. + +5. Run the cmorizing script +=========================== + +The cmorizing script for the given dataset can be run with: + +.. code-block:: console + + esmvaltool data format --config_dir + +The options ``--start`` and ``--end`` can be added to the command above to +restrict the formatting of raw data to a time range. They will be ignored if a specific dataset +does not support it (i.e. because it is provided as a single file). Valid formats are +``YYYY``, ``YYYYMM`` and ``YYYYMMDD``. + +.. note:: + + The output path given in the configuration file is the path where + your cmorized dataset will be stored. The ESMValTool will create a folder + with the correct tier information + (see Section `2. Edit your configuration file`_) if that tier folder is not + already available, and then a folder named after the dataset. + In this folder the cmorized data set will be stored as a NetCDF file. + The cmorized dataset will be automatically moved to the correct tier + subfolder of your OBS or OBS6 directory if the option + ``--install=True`` is used in the command above and no such directory + was already created. + +If your run was successful, one or more NetCDF files are produced in your +output directory. + +If a downloading script is available for the dataset, the downloading and +the cmorizing scripts can be run in a single command with: + +.. code-block:: console + + esmvaltool data prepare --config_dir + +Note that options from the ```esmvaltool data download`` and +``esmvaltool data format`` commands can be passed to the above command. + +6. Naming convention of the observational data files +==================================================== + +For the ESMValTool to be able to read the observations from the NetCDF file, +the file name needs a very specific structure and order of information parts +(very similar to the naming convention for observations in ESMValTool +v1.0). The file name will be automatically correctly created if a cmorizing +script has been used to create the netCDF file. + +The correct structure of an observational data set is defined in +`config-developer.yml +`_, +and looks like the following: + +.. code-block:: console + + OBS_[dataset]_[type]_[version]_[mip]_[short_name]_YYYYMM-YYYYMM.nc + +For the example of the ``CDS-XCH4`` data set, the correct structure of the +file name looks then like this: + +.. code-block:: console + + OBS_CDS-XCH4_sat_L3_Amon_xch4_200301-201612.nc + +The different parts of the name are explained in more detail here: + +- OBS: describes what kind of data can be expected in the file, in this case + ``observations``; +- CDS-XCH4: that is the name of the dataset. It has been named this way for + illustration purposes (so that everybody understands it is the xch4 dataset + downloaded from the CDS), but a better name would indeed be ``ESACCI-XCH4`` + since it is a ESA-CCI dataset; +- sat: describes the source of the data, here we are looking at satellite data + (therefore ``sat``), could also be ``reanaly`` for reanalyses; +- L3: describes the version of the dataset: +- Amon: is the information in which ``mip`` the variable is to be expected, and + what kind of temporal resolution it has; here we expect ``xch4`` to be part + of the atmosphere (``A``) and we have the dataset in a monthly resolution + (``mon``); +- xch4: Is the name of the variable. Each observational data file is supposed + to only include one variable per file; +- 200301-201812: Is the period the dataset spans with ``200301`` being the + start year and month, and ``201812`` being the end year and month; + +.. note:: + There is a different naming convention for ``obs4MIPs`` data (see the exact + specifications for the obs4MIPs data file naming convention in the + ``config-developer.yml`` file). + +7. Test the cmorized dataset +====================================== + +To verify that the cmorized data file is indeed correctly formatted, you can +run a dedicated test recipe, that does not include any diagnostic, but only +reads in the data file and has it processed in the preprocessor. Such a recipe +is called ``recipes/examples/recipe_check_obs.yml``. You just need to add a +diagnostic for your dataset following the existing entries. +Only the diagnostic of interest needs to be run, the others should be commented +out for testing. diff --git a/doc/sphinx/source/develop/diagnostic.rst b/doc/sphinx/source/develop/diagnostic.rst new file mode 100644 index 0000000000..9f2d131a61 --- /dev/null +++ b/doc/sphinx/source/develop/diagnostic.rst @@ -0,0 +1,46 @@ +Diagnostic +********** + +Instructions for personal diagnostic +==================================== + +Anyone can run a personal diagnostic, no matter where the location of it; +there is no need to install esmvaltool in developer mode nor is it to +git push or for that matter, do any git operations; the example recipe + +.. code-block:: console + + esmvaltool/recipes/recipe_my_personal_diagnostic.yml + +shows the use of running a personal diagnostic; the example + +.. code-block:: console + + esmvaltool/diag_scripts/examples/my_little_diagnostic.py + +and any of its alterations may be used as training wheels for the future ESMValTool +diagnostic developer. The purpose of this example is to familiarize the user with +the framework of ESMValTool without the constraints of installing and running the +tool as developer. + +Functionality +============= + +`my_little_diagnostic` (or whatever the user will call their diagnostic) makes full use +of ESMValTool's preprocessor output (both phyisical files and run variables); this output +comes in form of a nested dictionary, or config dictionary, see an example below; +it also makes full use of the ability to call any of the preprocessor's functions, +note that relative imports of modules from the esmvaltool package are allowed and +work without altering the $PYTHONPATH. + +The user may parse this dictionary so that they execute a number of operations on the +preprocessed data; for example the `my_little_diagnostic.plot_time_series` grabs the +preprocessed data output, computes global area averages for each model, then plots +a time-series for each model. Different manipulation functionalities for grouping, +sorting etc of the data in the config dictionary are available, +please consult ESMValTool User Manual. + + +Example of config dictionary +============================ +To be added (use python-style code-block). diff --git a/doc/sphinx/source/develop/index.rst b/doc/sphinx/source/develop/index.rst new file mode 100644 index 0000000000..4c206d6d73 --- /dev/null +++ b/doc/sphinx/source/develop/index.rst @@ -0,0 +1,10 @@ +Making a recipe or diagnostic +============================= + +.. toctree:: + :maxdepth: 1 + + Introduction + Recipe + Diagnostic + Dataset diff --git a/doc/sphinx/source/develop/introduction.rst b/doc/sphinx/source/develop/introduction.rst new file mode 100644 index 0000000000..806dd0853f --- /dev/null +++ b/doc/sphinx/source/develop/introduction.rst @@ -0,0 +1,8 @@ +Introduction +============ + +This chapter contains instructions for developing your own recipes and/or diagnostics. +It also contains a section describing how to use additional datasets with ESMValTool. +While it is possible to use just the ESMValCore package and run any recipes/diagnostics you develop with just this package, it is highly recommended that you consider contributing the work you do back to the ESMValTool community. +Among the advantages of contributing to the community are improved visibility of your work and support by the community with making and maintaining your diagnostic. +See the :ref:`Community ` chapter for a guide on how to contribute to the community. diff --git a/doc/sphinx/source/develop/recipe.rst b/doc/sphinx/source/develop/recipe.rst new file mode 100644 index 0000000000..98f802505d --- /dev/null +++ b/doc/sphinx/source/develop/recipe.rst @@ -0,0 +1,22 @@ +Recipe +****** + +Writing a basic recipe +====================== +The user will need to write a basic recipe to be able to run their own personal diagnostic. +An example of such a recipe is found in `esmvaltool/recipes/recipe_my_personal_diagnostic.yml`. +For general guidelines with regards to ESMValTool recipes please consult the User Guide; +the specific parameters needed by a recipe that runs a personal diagnostic are: + +.. code-block:: yaml + + scripts: + my_diagnostic: + script: /path/to/your/my_little_diagnostic.py + +i.e. the full path to the personal diagnostic that the user needs to run. + +There is also a lesson available in the +`ESMValTool tutorial `_ +that describes in a step-by-step procedure how to write your own recipe. It can be found +`here `_. diff --git a/doc/sphinx/source/developer_guide2/config_developer.inc b/doc/sphinx/source/developer_guide2/config_developer.inc deleted file mode 100644 index 630de1ea92..0000000000 --- a/doc/sphinx/source/developer_guide2/config_developer.inc +++ /dev/null @@ -1,100 +0,0 @@ -.. _config_developer: - -******************* -Configuration files -******************* - -There are several configuration files in ESMValTool: - - - config-user.yml - - config-developer.yml - - config-references.yml - - environment.yml - - -User config file -================ - -The ``config-user.yml`` file is the only user-facing configuration file and is -described in the user guide section of this document. - - -Developer config file -===================== - -This configuration file describes the file system structure for several -key projects (CMIP5, CMIP6) on several key machines (BADC, CP4CDS, DKRZ, ETHZ, -SMHI, BSC). - -The data directory structure of the CMIP5 project is set up differently -at each site. The following code snipper is an example of several paths -descriptions for the CMIP5 at various sites: - -.. code-block:: yml - - CMIP5: - input_dir: - default: '/' - BADC: '[institute]/[dataset]/[exp]/[frequency]/[modeling_realm]/[mip]/[ensemble]/latest/[short_name]' - CP4CDS: '[institute]/[dataset]/[exp]/[frequency]/[modeling_realm]/[mip]/[ensemble]/[short_name]/latest/' - DKRZ: '[institute]/[dataset]/[exp]/[frequency]/[modeling_realm]/[mip]/[ensemble]/[latestversion]/[short_name]' - ETHZ: '[exp]/[mip]/[short_name]/[dataset]/[ensemble]/' - SMHI: '[dataset]/[ensemble]/[exp]/[frequency]' - BSC: '[project]/[exp]/[dataset.lower]' - -As an example, the CMIP5 file path on BADC would be: - -.. code-block:: yml - - [institute]/[dataset ]/[exp]/[frequency]/[modeling_realm]/[mip]/[ensemble]/latest/[short_name] - -When loading these files, ESMValTool replaces the placeholders with the true -values. The resulting real path would look something like this: - -.. code-block:: yml - - MOHC/HadGEM2-CC/rcp85/mon/ocean/Omon/r1i1p1/latest/tos - - -References config file -================================ - -The ``config-references.yml`` file is the full list of ESMValTool authors, -references and projects. Each author, project and reference in the documentation -section of a recipe needs to be in this file in the relevant section. - -For instance, the recipe ``recipe_ocean_example.yml`` file contains the following -documentation section: - -.. code-block:: yml - - documentation - authors: - - demo_le - - maintainer: - - demo_le - - references: - - demora2018gmd - - projects: - - ukesm - - -All four items here are named people, references and projects listed in the -``config-references.yml`` file. - - - -Environment config file -================================ - -This is the conda envronment which informs conda as to which packages -and which versions are needed in order to install ESMValTool. -It is called in the update and install commands: - -.. code-block:: bash - - conda env install --name esmvaltool --file ESMValTool/environment.yml - conda env update --name esmvaltool --file ESMValTool/environment.yml diff --git a/doc/sphinx/source/developer_guide2/core_team.inc b/doc/sphinx/source/developer_guide2/core_team.inc deleted file mode 100644 index 7a3e0ba9a4..0000000000 --- a/doc/sphinx/source/developer_guide2/core_team.inc +++ /dev/null @@ -1,52 +0,0 @@ -.. _core_team: - -************************************ -The ESMValTool core development team -************************************ - -.. _core_dev_team: - -Main contacts -============= - -A mailing list has been set up for all general and technical questions on the ESMValTool such as, for instance, -questions on installation, application or development. You are encouraged to subscribe to the ESMValTool user -mailing list by sending an email to Listserv@dlr.de with the following text: - -.. centered:: *subscribe ESMValTool-Usr* - -Core development team -===================== - -* Deutsches Zentrum für Luft- und Raumfahrt (DLR), Institut für Physik der Atmosphäre, Germany (PI) - - ESMValTool Core PI and Developer: contact for requests to use the ESMValTool and for collaboration with the - development team, access to the PRIVATE GitHub repository. - -* Alfred-Wegener-Institute Bremerhaven (AWI), Germany -* Barcelona Computing Center (BSC), Spain -* Ludwig Maximilian University of Munich, Germany -* Netherlands E-Science Center, Netherlands -* University of Reading, United Kingdom - -Contacts for specific diagnostic sets are the respective authors, as listed in the corresponding diagnostic -documentation and in the source code. - -Pull requests -============= - -This section describes the general workflow of how new diagnostics are integrated into the ESMValTool and the -responsibilities of the developer contribution to the ESMValTool. To be written for v2. - -Workflow core development team ------------------------------- - -The following workflow followed by the ESMValTool core development team takes place whenever a developer -requests integration of a diagnostics set into the *development branch*. - -To be written for v2. - -Responsibilities of ESMValTool developers ------------------------------------------ - -To be written. diff --git a/doc/sphinx/source/developer_guide2/git_repository.inc b/doc/sphinx/source/developer_guide2/git_repository.inc deleted file mode 100644 index 6e5c832c0b..0000000000 --- a/doc/sphinx/source/developer_guide2/git_repository.inc +++ /dev/null @@ -1,179 +0,0 @@ -.. _git_repository: - -************** -Git repository -************** - -Basics -====== - -The source code of the ESMValTool is hosted on GitHub. The following description gives an overview of the typical workflow and usage for implementing new diagnostics or technical changes into the ESMValTool. For general information on Git, see e.g. the online documentation at https://www.git-scm.com/doc. - -There are *two* ESMValTool GitHub repositories available: - -#. The **PUBLIC** GitHub repository is open to the public. The ESMValTool is released as open-source software under the Apache License 2.0. Use of the software constitutes acceptance of this license and terms. The PUBLIC ESMValTool repository is located at https://github.com/ESMValGroup/ESMValTool - -#. The **PRIVATE** GitHub repository is restricted to the ESMValTool Development Team. This repository is only accessible to ESMValTool developers that have accepted the terms of use for the ESMValTool development environment. The use of the ESMValTool software and access to the private ESMValTool GitHub repository constitutes acceptance of these terms. *When you fork or copy this repository, you must ensure that you do not copy the PRIVATE repository into an open domain!* The PRIVATE ESMValTool repository for the ESMValTool development team is located at https://github.com/ESMValGroup/ESMValTool-private - -All developments can be made in either of the two repositories. The creation of *FEATURE BRANCHES* (see below), however, is restricted to registered ESMValTool developers in both repositories. We encourage all developers to join the ESMValTool development team. Please contact the ESMValTool Core Development Team (Section :ref:`core_dev_team`) if you want to join the ESMValTool development team. -The PRIVATE GitHub repository offers a central protected environment for ESMValTool developers who would like to keep their contributions undisclosed (e.g., unpublished scientific work, work in progress by PhD students) while at the same time benefiting from the possibilities of collaborating with other ESMValTool developers and having a backup of their work. *FEATURE BRANCHES* created in the PRIVATE repository are only visible to the ESMValTool development team but not to the public. The concept of a PRIVATE repository has proven to be very useful to efficiently share code during the development across institutions and projects in a common repository without having the contributions immediately accessible to the public. - -Both, the PUBLIC and the PRIVATE repository, contain the following kinds of branches: - -* *MASTER BRANCH* (official releases), -* *DEVELOPMENT BRANCH* (includes approved new contributions but version is not yet fully tested), -* *FEATURE BRANCH* (development branches for new features and diagnostics created by developers, the naming convention for *FEATURE BRANCHES* is _). - -Access rights -============= - -* Write access to the *MASTER* and *DEVELOPMENT BRANCH* in both, the PUBLIC and the PRIVATE GitHub repositories, is restricted to the ESMValTool core development team. -* *FEATURE BRANCHES* in both the PUBLIC and the PRIVATE repository can be created by all members of the ESMValTool development team (i.e. members in the GitHub organization "ESMValGroup"). If needed, branches can be individually write-protected within each repository so that other developers cannot accidently push changes to these branches. - -The *MASTER BRANCH* of the PRIVATE repository will be regularly synchronized with the *MASTER BRANCH* of the PUBLIC repository (currently by hand). This ensures that they are identical at all times (see schematic in :ref:`fig_git`). The recommended workflow for members of the ESMValTool development team is to create additional *FEATURE BRANCHES* in either the PUBLIC or the PRIVATE repository, see further instructions below. - -.. _fig_git: -.. figure:: /figures/git_diagram.png - :align: center - :width: 10cm - - Schematic diagram of the ESMValTool GitHub repositories. - -Workflow -======== - -The following description gives an overview of the typical workflow and usage for implementing new diagnostics or technical changes into the ESMValTool. The description assumes that your local development machine is running a Unix-like operating system. For a general introduction to Git tutorials such as, for instance, https://www.git-scm.com/docs/gittutorial are recommended. - -Getting started ---------------- - -First make sure that you have Git installed on your development machine. On shared machines, software is usually installed using the environment modules. Try e.g. - -.. code:: bash - - module avail git - -if this is the case. You can ask your system administrator for assistance. You can test this with the command: - -.. code:: bash - - git --version - -In order to properly identify your contributions to the ESMValTool you need to configure your local Git with some personal data. This can be done with the following commands: - -.. code:: bash - - git config --global user.name "YOUR NAME" - git config --global user.email "YOUR EMAIL" - -.. note:: For working on GitHub you need to create an account and login to https://github.com/. - -Working with the ESMValTool GitHub Repositories ------------------------------------------------ - -As a member of the ESMValTool development team you can create *FEATURE BRANCHES* in the PUBLIC as well as in the PRIVATE repository. We encourage all ESMValTool developers to use the following workflow for long-lived developments (>2 weeks). - -* Login to GitHub.com -* On GitHub, go to the website of the ESMValTool repository (https://github.com/ESMValGroup/ESMValTool-private or https://github.com/ESMValGroup/ESMValTool) -* Click on the button create *FEATURE BRANCH* -* Select the *"DEVELOPMENT" BRANCH* and create a new *FEATURE BRANCH* for the diagnostic/feature you want to implement. Please follow the following naming convention for your new *FEATURE BRANCH*: _. - -.. figure:: /figures/git_branch.png - :align: center - :width: 6cm - -* Click the button "Clone or Download" and copy the URL shown there -* Open a terminal window and go to the folder where you would like to store your local copy of the ESMValTool source -* Type git clone, and paste the URL: - -.. code:: bash - - git clone - -This will clone the ESMValTool repository at GitHub to a local folder. You can now query the status of your local working copy with: - -.. code:: bash - - git status - -You will see that you are on a branch called master and your local working copy is up to date with the remote repository. With - -.. code:: bash - - git branch --all - -you can list all available remote and local branches. Now switch to your feature branch by: - -.. code:: bash - - git checkout - -You can now start coding. To check your current developments you can use the command - -.. code:: bash - - git status - -You can add new files and folders that you want to have tracked by Git using: - -.. code:: bash - - git add - -Commit your tracked changes to your local working copy via: - -.. code:: bash - - git commit -m "YOUR COMMIT MESSAGE" - -You can inspect your changes with (use man git-log for all options): - -.. code:: bash - - git log - -To share your work and to have an online backup, push your local development to your *FEATURE BRANCH* on GitHub: - -.. code:: bash - - git push origin - -.. note:: An overview on Git commands and best practices can be found e.g. here: https://zeroturnaround.com/rebellabs/git-commands-and-best-practices-cheat-sheet/ - -Pull requests -------------- - -Once your development is completely finished, go to the GitHub website of the ESMValTool repository and switch to your *FEATURE BRANCH*. You can then initiate a pull request by clicking on the button "New pull request". Select the *DEVELOPMENT BRANCH* as "base branch" and click on "Create pull request". Your pull request will then be tested, discussed and implemented into the *DEVELPOMENT BRANCH* by the ESMValTool Core Development Team. - -.. attention:: Before creating a pull request, please make sure all requirements listed in Sections :ref:`writing` and :ref:`documentation` are fully met (see also checklist in :ref:`tab_checklist`). - -GitHub issues -------------- - -In case you encounter a bug of if you have a feature request or something similar you can open an issue on the PUBLIC ESMValTool GitHub repository. - -General do-s and don't-s -======================== - -Do-s ----- - -* Create a *FEATURE BRANCH* and use exclusively this branch for developing the ESMValTool. The naming convention for *FEATURE BRANCHES* is _. -* Comment your code as much as possible and in English. -* Use short but self-explanatory variable names (e.g., model_input and reference_input instead of xm and xr). -* Consider a modular/functional programming style. This often makes code easier to read and deletes intermediate variables immediately. If possible, separate diagnostic calculations from plotting routines. -* Consider reusing or extending existing code. General-purpose code can be found in esmvaltool/diag_scripts/shared/. -* Comment all switches and parameters including a list of all possible settings/options in the header section of your code (see also Section :ref:`std_diag`). -* Use templates for recipes (Section :ref:`std_recipe`) and diagnostics (Section :ref:`std_diag`) to help with proper documentation. -* Keep your *FEATURE BRANCH* regularly synchronized with the *DEVELOPMENT BRANCH* (git merge). -* Keep developments / modifications of the ESMValTool framework / backend / basic structure separate from developments of diagnostics by creating different *FEATURE BRANCHES* for these two kinds of developments. Create *FEATURE BRANCHES* for changes / modifications of the ESMValTool framework only in the *PUBLIC* repository. - -Don't-s -------- - -* Do not use other programming languages than the ones currently supported (NCL, Python, R). Contact the Core Development Team (Section :ref:`core_dev_team`) if you wish to use another language, but remember that only open-source languages are supported by the ESMValTool. -* Do not develop without proper version control (see do-s above). -* Avoid large (memory, disk space) intermediate results. Delete intermediate files/variables or see modular/functional programming style. -* Do not use hard-coded pathnames or filenames. -* Do not mix developments / modifications of the ESMValTool framework and developments / modifications of diagnostics in the same *FEATURE BRANCH*. - diff --git a/doc/sphinx/source/developer_guide2/index.rst b/doc/sphinx/source/developer_guide2/index.rst deleted file mode 100644 index ea65b5dae0..0000000000 --- a/doc/sphinx/source/developer_guide2/index.rst +++ /dev/null @@ -1,9 +0,0 @@ -################# -Developer's Guide -################# - -.. include:: new_diagnostic.inc -.. include:: porting.inc -.. include:: git_repository.inc -.. include:: core_team.inc -.. include:: config_developer.inc diff --git a/doc/sphinx/source/developer_guide2/new_diagnostic.inc b/doc/sphinx/source/developer_guide2/new_diagnostic.inc deleted file mode 100644 index bf98741688..0000000000 --- a/doc/sphinx/source/developer_guide2/new_diagnostic.inc +++ /dev/null @@ -1,202 +0,0 @@ -.. _new_diagnostic: - -*************************************** -Contributing a new diagnostic or recipe -*************************************** - -Getting started -=============== - -Please discuss your idea for a new diagnostic or recipe with the development team before getting started, -to avoid disappointment later. A good way to do this is to open an -`issue on GitHub `_. -This is also a good way to get help. - -Creating a recipe and diagnostic script(s) -========================================== -First create a recipe in esmvaltool/recipes to define the input data your analysis script needs -and optionally preprocessing and other settings. Also create a script in the esmvaltool/diag_scripts directory -and make sure it is referenced from your recipe. The easiest way to do this is probably to copy the example recipe -and diagnostic script and adjust those to your needs. -A good example recipe is esmvaltool/recipes/examples/recipe_python.yml -and a good example diagnostic is esmvaltool/diag_scripts/examples/diagnostic.py. - -If you have no preferred programming language yet, Python 3 is highly recommended, because it is most well supported. -However, NCL, R, and Julia scripts are also supported. - -Unfortunately not much documentation is available at this stage, -so have a look at the other recipes and diagnostics for further inspiration. - -Re-using existing code -====================== -Always make sure your code is or can be released under a license that is compatible with the Apache 2 license. - -If you have existing code in a supported scripting language, you have two options for re-using it. If it is fairly -mature and a large amount of code, the preferred way is to package and publish it on the -official package repository for that language and add it as a dependency of esmvaltool. -If it is just a few simple scripts or packaging is not possible (i.e. for NCL) you can simply copy -and paste the source code into the esmvaltool/diag_scripts directory. - -If you have existing code in a compiled language like -C, C++, or Fortran that you want to re-use, the recommended way to proceed is to add Python bindings and publish -the package on PyPI so it can be installed as a Python dependency. You can then call the functions it provides -using a Python diagnostic. - -Interfaces and provenance -========================= -When ESMValTool runs a recipe, it will first find all data and run the default preprocessor steps plus any -additional preprocessing steps defined in the recipe. Next it will run the diagnostic script defined in the recipe -and finally it will store provenance information. Provenance information is stored in the -`W3C PROV XML format `_ -and also plotted in an SVG file for human inspection. In addition to provenance information, a caption is also added -to the plots. - -In order to communicate with the diagnostic script, two interfaces have been defined, which are described below. -Note that for Python and NCL diagnostics much more convenient methods are available than -directly reading and writing the interface files. For other languages these are not implemented yet. - -Using the interfaces from Python --------------------------------- -Always use :meth:`esmvaltool.diag_scripts.shared.run_diagnostic` to start your script and make use of a -:class:`esmvaltool.diag_scripts.shared.ProvenanceLogger` to log provenance. Have a look at the example -Python diagnostic in esmvaltool/diag_scripts/examples/diagnostic.py for a complete example. - -Using the interfaces from NCL ------------------------------ -Always call the ``log_provenance`` procedure after plotting from your NCL diag_script. You could find available shortcuts for -statistics, domain, plottype, authors and references in the ``config-references.yml`` file. - -.. code-block:: bash - - log_provenance(nc-file,plot_file,caption,statistics,domain,plottype,authors,references,input-files) - -Have a look at the example NCL diagnostic in ``esmvaltool/diag_scripts/examples/diagnostic.ncl`` for a complete example. - -Generic interface between backend and diagnostic ------------------------------------------------- -To provide the diagnostic script with the information it needs to run (e.g. location of input data, various settings), -the backend creates a YAML file called settings.yml and provides the path to this file as the first command line -argument to the diagnostic script. - -The most interesting settings provided in this file are - -.. code:: yaml - - run_dir: /path/to/recipe_output/run/diagnostic_name/script_name - work_dir: /path/to/recipe_output/work/diagnostic_name/script_name - plot_dir: /path/to/recipe_output/work/diagnostic_name/script_name - input_files: - - /path/to/recipe_output/preproc/diagnostic_name/ta/metadata.yml - - /path/to/recipe_output/preproc/diagnostic_name/pr/metadata.yml - -Custom settings in the script section of the recipe will also be made available in this file. - -There are three directories defined: - -- :code:`run_dir` use this for storing temporary files -- :code:`work_dir` use this for storing NetCDF files containing the data used to make a plot -- :code:`plot_dir` use this for storing plots - -Finally :code:`input_files` is a list of YAML files, containing a description of the preprocessed data. Each entry in these -YAML files is a path to a preprocessed file in NetCDF format, with a list of various attributes. -An example preprocessor metadata.yml file could look like this - -.. code:: yaml - - ? /path/to/recipe_output/preproc/diagnostic_name/pr/CMIP5_GFDL-ESM2G_Amon_historical_r1i1p1_T2Ms_pr_2000-2002.nc - : cmor_table: CMIP5 - dataset: GFDL-ESM2G - diagnostic: diagnostic_name - end_year: 2002 - ensemble: r1i1p1 - exp: historical - filename: /path/to/recipe_output/preproc/diagnostic_name/pr/CMIP5_GFDL-ESM2G_Amon_historical_r1i1p1_T2Ms_pr_2000-2002.nc - frequency: mon - institute: [NOAA-GFDL] - long_name: Precipitation - mip: Amon - modeling_realm: [atmos] - preprocessor: preprocessor_name - project: CMIP5 - recipe_dataset_index: 1 - reference_dataset: MPI-ESM-LR - short_name: pr - standard_name: precipitation_flux - start_year: 2000 - units: kg m-2 s-1 - variable_group: pr - ? /path/to/recipe_output/preproc/diagnostic_name/pr/CMIP5_MPI-ESM-LR_Amon_historical_r1i1p1_T2Ms_pr_2000-2002.nc - : cmor_table: CMIP5 - dataset: MPI-ESM-LR - diagnostic: diagnostic_name - end_year: 2002 - ensemble: r1i1p1 - exp: historical - filename: /path/to/recipe_output/preproc/diagnostic1/pr/CMIP5_MPI-ESM-LR_Amon_historical_r1i1p1_T2Ms_pr_2000-2002.nc - frequency: mon - institute: [MPI-M] - long_name: Precipitation - mip: Amon - modeling_realm: [atmos] - preprocessor: preprocessor_name - project: CMIP5 - recipe_dataset_index: 2 - reference_dataset: MPI-ESM-LR - short_name: pr - standard_name: precipitation_flux - start_year: 2000 - units: kg m-2 s-1 - variable_group: pr - -Generic interface between diagnostic and backend ------------------------------------------------- - -After the diagnostic script has finished running, the backend will try to store provenance information. In order to -link the produced files to input data, the diagnostic script needs to store a YAML file called :code:`diagnostic_provenance.yml` -in it's :code:`run_dir`. - -For output file produced by the diagnostic script, there should be an entry in the :code:`diagnostic_provenance.yml` file. -The name of each entry should be the path to the output file. -Each file entry should at least contain the following items - -- :code:`ancestors` a list of input files used to create the plot -- :code:`caption` a caption text for the plot -- :code:`plot_file` if the diagnostic also created a plot file, e.g. in .png format. - -Each file entry can also contain items from the categories defined in the file :code:`esmvaltool/config_references.yml`. -The short entries will automatically be replaced by their longer equivalent in the final provenance records. -It is possible to add custom provenance information by adding custom items to entries. - -An example :code:`diagnostic_provenance.yml` file could look like this - -.. code:: yaml - - ? /path/to/recipe_output/work/diagnostic_name/script_name/CMIP5_GFDL-ESM2G_Amon_historical_r1i1p1_T2Ms_pr_2000-2002_mean.nc - : ancestors: - - /path/to/recipe_output/preproc/diagnostic_name/pr/CMIP5_GFDL-ESM2G_Amon_historical_r1i1p1_T2Ms_pr_2000-2002.nc - authors: [ande_bo, righ_ma] - caption: Average Precipitation between 2000 and 2002 according to GFDL-ESM2G. - domains: [global] - plot_file: /path/to/recipe_output/plots/diagnostic_name/script_name/CMIP5_GFDL-ESM2G_Amon_historical_r1i1p1_T2Ms_pr_2000-2002_mean.png - plot_type: zonal - references: [acknow_project] - statistics: [mean] - ? /path/to/recipe_output/work/diagnostic_name/script_name/CMIP5_MPI-ESM-LR_Amon_historical_r1i1p1_T2Ms_pr_2000-2002_mean.nc - : ancestors: - - /path/to/recipe_output/preproc/diagnostic_name/pr/CMIP5_MPI-ESM-LR_Amon_historical_r1i1p1_T2Ms_pr_2000-2002.nc - authors: [ande_bo, righ_ma] - caption: Average Precipitation between 2000 and 2002 according to MPI-ESM-LR. - domains: [global] - plot_file: /path/to/recipe_output/plots/diagnostic_name/script_name/CMIP5_MPI-ESM-LR_Amon_historical_r1i1p1_T2Ms_pr_2000-2002_mean.png - plot_type: zonal - references: [acknow_project] - statistics: [mean] - -You can check whether your diagnostic script successfully provided the provenance information to the backend by -verifying that - -- for each output file in the :code:`work_dir`, a file with the same name, but ending with _provenance.xml is created -- any NetCDF files created by your diagnostic script contain a 'provenance' global attribute -- any PNG plots created by your diagnostic script contain the provenance information in the 'Image History' attribute - -Note that this is done automatically by the ESMValTool backend. diff --git a/doc/sphinx/source/developer_guide2/porting.inc b/doc/sphinx/source/developer_guide2/porting.inc deleted file mode 100644 index e707c060f3..0000000000 --- a/doc/sphinx/source/developer_guide2/porting.inc +++ /dev/null @@ -1,216 +0,0 @@ -.. _porting: - -************************************************************** -Porting namelists (recipes) and diagnostics to ESMValTool v2.0 -************************************************************** - -This guide summarizes the main steps to be taken in order to port an ESMValTool namelist (now called **recipe**) and the corresponding diagnostic(s) from v1.0 to v2.0, hereafter also referred as the *"old"* and the *"new version"*, respectively. The new ESMValTool version is being developed in the public git branch ``version2_development``. An identical version of this branch is maintained in the private repository as well and kept synchronized on an hourly basis. - -In the following, it is assumed that the user has successfully installed ESMValTool v2 and has a rough overview of its structure (see `Technical Overview `_). - -Create a github issue -===================== - -Create an issue in the public repository to keep track of your work and inform other developers. See an example `here `_. Use the following title for the issue: "PORTING into v2.0". -Do not forget to assign it to yourself. - -Create your own branch -====================== - -Create your own branch from ``version2_development`` for each namelist (recipe) to be ported: - -.. code-block:: bash - - git checkout version2_development - git checkout -b version2_ - -``version2_development`` contains only v2.0 under the ``./esmvaltool/`` directory. - -Convert xml to yml -================== - -In ESMValTool v2.0, the namelist (now recipe) is written in yaml format (`Yet Another Markup Language format `_). It may be useful to activate the yaml syntax highlighting for the editor in use. This improves the readability of the recipe file and facilitates the editing, especially concerning the indentations which are essential in this format (like in python). Instructions can be easily found online, for example for `emacs `_ and `vim `_. - -A xml2yml converter is available in ``esmvaltool/utils/xml2yml/``, please refer to the corresponding README file for detailed instructions on how to use it. - -Once the recipe is converted, a first attempt to run it can be done, possibly starting with a few datasets and one diagnostics and proceed gradually. The recipe file ``./esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml`` can be used as an example, as it covers most of the common cases. - -Do not forget to also rewrite the recipe header in a ``documentation`` section using the yaml syntax and, if possible, to add themes and realms item to each diagnostic section. All keys and tags used for this part must be defined in ``./esmvaltool/config-references.yml``. See ``./esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml`` for an example. - -Create a copy of the diag script in v2.0 -======================================== - -The diagnostic script to be ported goes into the directory ./esmvaltool/diag_script/. It is recommended to get a copy of the very last version of the script to be ported from the development branch (either in the public or in the private repository). Just create a local (offline) copy of this file from the repository and add it to ../esmvaltool/diag_script/ as a new file. - -Note that (in general) this is not necessary for plot scripts and for the libraries in ``./esmvaltool/diag_script/ncl/lib/``, which have already been ported. Changes may however still be necessary, especially in the plot scripts which have not yet been fully tested with all diagnostics. - -Check and apply renamings -========================= - -The new ESMValTool version includes a completely revised interface, handling the communication between the python workflow and the (NCL) scripts. This required several variables and functions to be renamed or removed. These chagnes are listed in the following table and have to be applied to the diagnostic code before starting with testing. - -.. tabularcolumns:: |p{6cm}|p{6cm}|p{3cm}| - -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| Name in v1.0 | Name in v2.0 | Affected code | -+=================================================+===========================================================+==================+ -| ``getenv("ESMValTool_wrk_dir")`` | ``config_user_info@work_dir`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``getenv(ESMValTool_att)`` | ``diag_script_info@att`` or | all .ncl scripts | -| | ``config_user_info@att`` | | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``xml`` | ``yml`` | all scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``var_attr_ref(0)`` | ``variable_info@reference_dataset`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``var_attr_ref(1)`` | ``variable_info@alternative_dataset`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``models`` | ``input_file_info`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``models@name`` | ``input_file_info@dataset`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``verbosity`` | ``config_user_info@log_level`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``isfilepresent_esmval`` | ``fileexists`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``messaging.ncl`` | ``logging.ncl`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``info_output(arg1, arg2, arg3)`` | ``log_info(arg1)`` if ``arg3=1`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``info_output(arg1, arg2, arg3)`` | ``log_debug(arg1)`` if ``arg3>1`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``verbosity = config_user_info@verbosity`` | remove this statement | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``enter_msg(arg1, arg2, arg3)`` | ``enter_msg(arg1, arg2)`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``leave_msg(arg1, arg2, arg3)`` | ``leave_msg(arg1, arg2)`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``noop()`` | appropriate ``if-else`` statement | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``nooperation()`` | appropriate ``if-else`` stsatement | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``fullpaths`` | ``input_file_info@filename`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``get_output_dir(arg1, arg2)`` | ``config_user_info@plot_dir`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``get_work_dir`` | ``config_user_info@work_dir`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``inlist(arg1, arg2)`` | ``any(arg1.eq.arg2)`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``load interface_scripts/*.ncl`` | ``load $diag_scripts/../interface_scripts/interface.ncl`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``_info.tmp`` | ``_info.ncl`` in ``preproc`` dir | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``ncl.interface`` | ``settings.ncl`` in ``run_dir`` and | all .ncl scripts | -| | ``interface_scripts/interface.ncl`` | | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``load diag_scripts/lib/ncl/`` | ``load $diag_scripts/shared/`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``load plot_scripts/ncl/`` | ``load $diag_scripts/shared/plot/`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``load diag_scripts/lib/ncl/rgb/`` | ``load $diag_scripts/shared/plot/rgb/`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``load diag_scripts/lib/ncl/styles/`` | ``load $diag_scripts/shared/plot/styles`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``load diag_scripts/lib/ncl/misc_function.ncl`` | ``load $diag_scripts/shared/plot/misc_function.ncl`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``LW_CRE``, ``SW_CRE`` | ``lwcre``, ``swcre`` | some yml recipes | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``check_min_max_models`` | ``check_min_max_datasets`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``get_ref_model_idx`` | ``get_ref_dataset_idx`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ -| ``get_model_minus_ref`` | ``get_dataset_minus_ref`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------------+------------------+ - -The following changes may also have to be considered: - -- namelists are now called recipes and collected in ``esmvaltool/recipes``; -- models are now called datasets and all files have been updated accordingly, including NCL functions (see table above); -- ``run_dir`` (previous ``interface_data``), ``plot_dir``, ``work_dir`` are now unique to each diagnostic script, so it is no longer necessary to define specific paths in the diagnostic scripts to prevent file collision; -- `input_file_info`` is now a list of a list of logicals, where each element describes one dataset and one variable. Convenience functions to extract the required elements (e.g., all datasets of a given variable) are provided in ``esmvaltool/interface_scripts/interface.ncl``; -- the interface functions ``interface_get_*`` and ``get_figure_filename`` are no longer available: their functionalities can be easily reproduced using the ``input_file_info`` and the convenience functions in ``esmvaltool/interface_scripts/interface.ncl`` to access the required attributes; -- there are now only 4 log levels (``debug``, ``info``, ``warning``, and ``error``) instead of (infinite) numerical values in ``verbosity`` -- diagnostic scripts are now organized in subdirectories in ``esmvaltool/diag_scripts/``: all scripts belonging to the same diagnostics are to be collected in a single subdirectory (see ``esmvaltool/diag_scripts/perfmetrics/`` for example). This applies also to the ``aux_`` scripts, unless they are shared among multiple diagnostics (in this case they go in ``shared/``); -- the relevant input_file_info items required by a plot routine should be passed as argument to the routine itself; -- upper case characters have to be avoided in script names, if possible. - -As for the recipe, the diagnostic script ``./esmvaltool/diag_scripts/perfmetrics/main.ncl`` can be followed as working example. - -Move preprocessing from the diagnostic script to the backend -============================================================ - -Many operations previously performed by the diagnostic scripts, are now included in the backend, including level extraction, regridding, masking, and multi-model statistics. If the diagnostics to be ported contains code performing any of such operations, the corresponding code has to be removed from the diagnostic script and the respective backend functionality can be used instead. - -The backend operations are fully controlled by the ``preprocessors`` section in the recipe. Here, a number of preprocessor sets can be defined, with different options for each of the operations. The sets defined in this section are applied in the ``diagnostics`` section to preprocess a given variable. - -It is recommended to proceed step by step, porting and testing each operation separately before proceeding with the next one. A useful setting in the user configuration file (``config-private.yml``) called ``write_intermediary_cube`` allows writing out the variable field after each preprocessing step, thus facilitating the comparison with the old version (e.g., after CMORization, level selection, after regridding, etc.). The CMORization step of the new backend exactly corresponds to the operation performed by the old backend (and stored in the ``climo`` directory, now called ``preprec``): this is the very first step to be checked, by simply comparing the intermediary file produced by the new backend after CMORization with the output of the old backend in the ``climo`` directorsy (see "Testing" below for instructions). - -The new backend also performs variable derivation, replacing the ``calculate`` function in the ``variable_defs`` scripts. If the recipe which is being ported makes use of derived variables, the corresponding calculation must be ported from the ``./variable_defs/.ncl`` file to ``./esmvaltool/preprocessor/_derive.py``. - -Note that the Python library ``esmval_lib``, containing the ``ESMValProject`` class is no longer available in version 2. Most functionalities have been moved to the new preprocessor. If you miss a feature, please open an issue on github [https://github.com/ESMValGroup/ESMValTool/issues]. - -Move diagnostic- and variable-specific settings to the recipe -=============================================================== - -In the new version, all settings are centralized in the recipe, completely replacing the diagnostic-specific settings in ``./nml/cfg_files/`` (passed as ``diag_script_info`` to the diagnostic scripts) and the variable-specific settings in ``variable_defs/.ncl`` (passed as ``variable_info``). There is also no distinction anymore between diagnostic- and variable-specific settings: they are collectively defined in the ``scripts`` dictionary of each diagnostic in the recipe and passed as ``diag_script_info`` attributes by the new ESMValTool interface. Note that the ``variable_info`` logical still exists, but it is used to pass variable information as given in the corresponding dictionary of the recipe. - -Make sure the diagnostic script writes NetCDF output -====================================================== - -Each diagnostic script is required to write the output of the anaylsis in one or more NetCDF files. This is to give the user the possibility to further look into the results, besides the plots, but (most importantly) for tagging purposes when publishing the data in a report and/or on a website. - -For each of the plot produced by the diagnostic script a single NetCDF file has to be generated. The variable saved in this file should also contain all the necessary metadata that documents the plot (dataset names, units, statistical methods, etc.). -The files have to be saved in the work directory (defined in `cfg['work_dir']` and `config_user_info@work_dir`, for the python and NCL diagnostics, respectively). - -Test the recipe/diagnostic in the new version -=============================================== - -Once complete, the porting of the diagnostic script can be tested. Most of the diagnostic script allows writing the output in a NetCDF file before calling the plotting routine. This output can be used to check whether the results of v1.0 are correctly reproduced. As a reference for v1.0, it is recommended to use the development branch. - -There are two methods for comparing NetCDF files: ``cdo`` and ``ncdiff``. The first method is applied with the command: - -.. code-block:: bash - - cdo diffv old_output.nc new_output.nc - -which will print a log on the stdout, reporting how many records of the file differ and the absolute/relative differences. - -The second method produces a NetCDF file (e.g., ``diff.nc``) with the difference between two given files: - -.. code-block:: bash - - ncdiff old_output.nc new_output.nc diff.nc - -This file can be opened with ``ncview`` to visually inspect the differences. - -In general, binary identical results cannot be expected, due to the use of different languages and algorithms in the two versions, especially for complex operations such as regridding. However, difference within machine precision are desirable. At this stage, it is essential to test all datasets in the recipe and not just a subset of them. - -It is also recommended to compare the graphical output (this may be necessary if the ported diagnostic does not produce a NetCDF output). For this comparison, the PostScript format is preferable, since it is easy to directly compare two PostScript files with the standard ``diff`` command in Linux: - -.. code-block:: bash - - diff old_graphic.ps new_graphic.ps - -but it is very unlikely to produce no differences, therefore visual inspection of the output may also be required. - -Clean the code -============== - -Before submitting a pull request, the code should be cleaned to adhere to the coding standard, which are somehow stricter in v2.0. This check is performed automatically on GitHub (CircleCI and Codacy) when opening a pull request on the public repository. A code-style checker (``nclcodestyle``) is available in the tool to check NCL scripts and installed alongside the tool itself. When checking NCL code style, the following should be considered in addition to the warning issued by the style checker: - -- two-space instead of four-space indentation is now adopted for NCL as per NCL standard; -- ``load`` statements for NCL standard libraries should be removed: these are automatically loaded since NCL v6.4.0 (see `NCL documentation `_); -- the description of diagnostic- and variable-specific settings can be moved from the header of the diagnostic script to the recipe, since the settings are now defined there (see above); -- NCL ``print`` and ``printVarSummary`` statements must be avoided and replaced by the ``log_info`` and ``log_debug`` functions; -- for error and warning statments, the ``error_msg`` function can be used, which automatically include an exit statement. - -Update the documentation -======================== - -If necessary, add or update the documentation for your recipes in the corrsponding rst file, which is now in ``doc\sphinx\source\recipes``. Do not forget to also add the documentation file to the list in ``doc\sphinx\source\annex_c`` to make sure it actually appears in the documentation. - -Open a pull request -=================== - -Create a pull request on github to merge your branch back to ``version2_development``, provide a short description of what has been done and nominate one or more reviewers. diff --git a/doc/sphinx/source/faq.rst b/doc/sphinx/source/faq.rst new file mode 100644 index 0000000000..43251a801b --- /dev/null +++ b/doc/sphinx/source/faq.rst @@ -0,0 +1,141 @@ +.. _faq: + +Frequently Asked Questions +************************** + +Is there a mailing list? +======================== + +Yes, you can subscribe to the ESMValTool user mailing list and join the discussion on general topics (installation, configuration, etc). See :ref:`mailing-list`. + +What is YAML? +============= + +While ``.yaml`` or ``.yml`` is a relatively common format, users may not have +encountered this language before. The key information about this format is: + +- yaml is a human friendly markup language; +- yaml is commonly used for configuration files (gradually replacing the + venerable ``.ini``); +- the syntax is relatively straightforward; +- indentation matters a lot (like ``Python``)! +- yaml is case sensitive; + +More information can be found in the `yaml tutorial +`_ and `yaml quick reference card +`_. ESMValTool uses the `yamllint +`_ linter tool to check recipe syntax. + + +.. _rerunning: + +Re-running diagnostics +====================== + +If a diagnostic fails, you will get the message + +.. code:: bash + + INFO To re-run this diagnostic script, run: + +If you run the command in the stdout you will be able to re-run the +diagnostic without having to re-run the whole preprocessor. If you add the ``-f`` +argument (available only for Python diagnostics, check your options with ``--help``) +that will force an overwrite, and it will delete not just the failed diagnostic, +but the contents of its ``work_dir`` and ``plot_dir`` directories - this is useful when needing to +redo the whole work. Adding ``-i`` or ``--ignore-existing`` will not delete any existing files, +and it can be used to skip work that was already done successfully, provided +that the diagnostic script supports this. + + +Enter interactive mode with iPython +=================================== + +Sometimes it is useful to enter an interactive session to have a look what's going on. +Insert a single line in the code where you want to enter IPython: +``import IPython; IPython.embed()`` + +This is a useful functionality because it allows the user to `fix` things on-the-fly and after +quitting the Ipython console, code execution continues as per normal. + + +Using multiple configuration directories +======================================== + +By default, ESMValTool will read YAML configuration files from the user +configuration directory ``~/.config/esmvaltool``, which can be changed with the +``ESMVALTOOL_CONFIG_DIR`` environment variable. +If required, users can specify the command line option ``--config_dir`` to +select another configuration directory, which is read **in addition** to the +user configuration directory +See the section on configuration :ref:`config_yaml_files` for details on this. + + +Create a symbolic link to the latest output directory +===================================================== + +When running multiple times the same recipe, the tool creates separate output directories +sorted by the time tag that they were created at; sometimes, when running quite a few times, +it is not straightforward to detect which one is the `latest` output directory, so a symbolic +link attached to it would make things more clear e.g.: + +.. code:: bash + + recipe_example_20190905_163431 + recipe_example_20190905_163519 + recipe_example_latest -> recipe_example_20190905_163519 + + +You can do that by running the tool using the latest output as basis and creating +a symbolic link to it so it gets picked up at every re-run iteration: + +.. code:: bash + + esmvaltool run recipe_example.yml; \ + ln -sfT $(ls -1d ~/esmvaltool_output/recipe_example_* | tail -1) ~/esmvaltool_output/recipe_example_latest + + +.. uncomment when feature plopped in main +.. # Running a dry run +.. ================= + +.. You can run in dry-run mode with + +.. .. code:: bash + +.. esmvaltool run recipe_xxx.yml --dry-run + + +.. This mode activated will run through the data finding and CMOR checks and fixes +.. and will highlight on screen and in `run/main_log.txt` every time certain data is +.. missing or there are issues with the CMOR checks; note that no data is written +.. to disk and no diagnostics are run; you don't have to modify your recipe in any +.. way to have this mode run. The information provided will help you obtain any data +.. that is missing and/or create fixes for the datasets and variables that failed the +.. CMOR checks and could not be fixed on the fly. + + +Can ESMValTool plot arbitrary model output? +=========================================== + +:ref:`recipe_model_evaluation` provides a set of recipes that can be used for a +basic climate model evaluation with observational data. +This is especially useful to get an overview of the general performance of a +simulation. + +Furthermore, recipe :ref:`recipe_monitor` allows for the plotting of any +preprocessed model. +The plotting parameters are set through a yaml configuration file, and the type +of plots to be generated are determined in the recipe. + +Moreover, recipe :ref:`recipes_psyplot_diag` and the corresponding diagnostic +:ref:`psyplot_diag.py ` provide a +high-level interface to the `Psyplot `__ package +which can be used to create a large variety of different plots. + +Similarly, recipe :ref:`recipes_seaborn_diag` and the corresponding diagnostic +:ref:`seaborn_diag.py ` provide a +high-level interface to the `Seaborn `__ package +which can also be used to create a large variety of different plots. + +See also :ref:`general_purpose_diags`. diff --git a/doc/sphinx/source/figures/ESMValTool-logo-2-dark.png b/doc/sphinx/source/figures/ESMValTool-logo-2-dark.png new file mode 100644 index 0000000000..e120b2e731 Binary files /dev/null and b/doc/sphinx/source/figures/ESMValTool-logo-2-dark.png differ diff --git a/doc/sphinx/source/figures/ESMValTool-logo-2-glow.png b/doc/sphinx/source/figures/ESMValTool-logo-2-glow.png new file mode 100644 index 0000000000..14aef201ee Binary files /dev/null and b/doc/sphinx/source/figures/ESMValTool-logo-2-glow.png differ diff --git a/doc/sphinx/source/figures/ESMValTool-logo-2.pdf b/doc/sphinx/source/figures/ESMValTool-logo-2.pdf new file mode 100644 index 0000000000..faaa53123b Binary files /dev/null and b/doc/sphinx/source/figures/ESMValTool-logo-2.pdf differ diff --git a/doc/sphinx/source/figures/ESMValTool-logo-2.png b/doc/sphinx/source/figures/ESMValTool-logo-2.png new file mode 100644 index 0000000000..aaaa3578a5 Binary files /dev/null and b/doc/sphinx/source/figures/ESMValTool-logo-2.png differ diff --git a/doc/sphinx/source/figures/release-timeline.png b/doc/sphinx/source/figures/release-timeline.png new file mode 100644 index 0000000000..2b5ef6e0e6 Binary files /dev/null and b/doc/sphinx/source/figures/release-timeline.png differ diff --git a/doc/sphinx/source/figures/schematic.png b/doc/sphinx/source/figures/schematic.png index fce8f16c69..43167b3359 100644 Binary files a/doc/sphinx/source/figures/schematic.png and b/doc/sphinx/source/figures/schematic.png differ diff --git a/doc/sphinx/source/functionalities.rst b/doc/sphinx/source/functionalities.rst new file mode 100644 index 0000000000..0098d95ded --- /dev/null +++ b/doc/sphinx/source/functionalities.rst @@ -0,0 +1,118 @@ +What ESMValTool can do for you +****************************** + +The ESMValTool applies a great variety of standard diagnostics and +metrics, and produces a collection of netCDF and graphical files +(plots). Thus, the tool needs a certain amount of input from the user so +that it can: + +- establish the correct input and output parameters and the structured + workflow; +- acquire the correct data; +- execute the workflow; and +- output the desired collective data and media. + +To facilitate these four steps, the user has control over the tool via the +:ref:`configuration ` and the :ref:`recipe +`. The configuration sets +user and site-specific parameters (like input and output paths, desired +output graphical formats, logging level, etc.), whereas the recipe file +sets data, preprocessing and diagnostic-specific parameters (data +parameters grouped in the datasets sections, preprocessing steps for +various preprocessors sections, variables' parameters and +diagnostic-specific instructions grouped in the diagnostics sections). +The configuration file may be used for a very large number of runs with +very minimal changes since most of the parameters it sets are +recyclable; the recipe file can be used for a large number of +applications, since it may include as many datasets, preprocessors and +diagnostics sections as the user deems useful. + +Once the configuration files and the recipe are at hand, the user +can start the tool. A schematic overview of the ESMValTool workflow is +depicted in the figure below. + +.. container:: + :name: figarch + + .. figure:: figures/schematic.png + :alt: Schematic of the system architecture. + :figclass: align-center + + Schematic of the system architecture. + +For a generalized run scenario, the tool will perform the following +ordered procedures. + +Data finding +------------ + +- read the data requirements from the :ref:`datasets section + ` of the recipe and assemble the data request to + locate the data; +- find the data using the specified root paths and DRS types in the + configuration file (note the flexibility allowed by the + :ref:`data finder + `); + +Data selection +-------------- + +- data selection is performed using the parameters specified in the + :ref:`datasets section ` (including e.g. type of + experiment, type of ensemble, time boundaries etc); data will be + retrieved and selected for each variable that is specified in the + :ref:`diagnostics ` section of the recipe; + +Data fixing +----------- + +- the ESMValTool requires data to be in CMOR format; since errors in + the data formatting are not uncommon, the ESMValTool performs + :ref:`checks against the + CMOR library and fixes small irregularities ` (note that the degree of leniency is not + very high). + +Variable derivation +------------------- + +- :ref:`variable derivation ` (in the + case of non CMOR-standard variables, most likely associated with + observational datasets) is performed automatically before running the + preprocessor; +- if the variable definitions are already in the database then the user + will just have to specify the variable to be derived in the + :ref:`diagnostics + ` section (as any other standard variable, + just setting ``derive: true``). + +Run the preprocessor +-------------------- + +- if any :ref:`preprocessor section ` is + specified in the recipe file, then data will be loaded in memory as + iris cubes and passed through the preprocessing steps required by the + user and specified in the preprocessor section, using the specific + preprocessing step parameters provided by the user as keys (for the + parameter name) and values (for the parameter value); the + preprocessing order is very important since a number of steps depend + on prior execution of other steps (e.g. :ref:`multimodel + statistics ` can not be computed + unless all models are on a common grid, hence a prior + :ref:`regridding + ` on a common grid is necessary); + the preprocessor steps order can be set by the user as custom or the + default order can be used; +- once preprocessing has finished, the tool writes the data output to + disk as netCDF files so that the diagnostics can pick it up and use + it; the user will also be provided with a metadata file containing a + summary of the preprocessing and pointers to its output. Note that + writing data to disk between the preprocessing and the diagnostic + phase is required to ensure multi-language support for the latter. + +Run the diagnostics +------------------- + +- the last and most important phase can now be run: using output files + from the preprocessor, the diagnostic scripts are executed using the + provided diagnostics parameters. diff --git a/doc/sphinx/source/generate_gallery.py b/doc/sphinx/source/generate_gallery.py new file mode 100644 index 0000000000..ec1aa6034a --- /dev/null +++ b/doc/sphinx/source/generate_gallery.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Create gallery with all available recipes.""" + +import os + +RECIPE_DIR = 'recipes' +OUT_PATH = os.path.abspath('gallery.rst') +HEADER = ('.. DO NOT MODIFY! THIS PAGE IS AUTOGENERATED!\n\n' + '#######\nGallery\n#######\n\n' + 'This section shows example plots produced by ESMValTool. For more ' + 'information, click on the footnote below the image. ' + 'A website displaying results produced with the latest release of ' + 'ESMValTool for all available recipes can be accessed `here ' + '`_.' + '\n\n') +WIDTH = ':width: 90%' +FIGURE_STR = '.. figure::' +IMAGE_STR = ' image:: ' +TABLE_SEP = ('+---------------------------------------------------' + '+---------------------------------------------------+\n') +EMPTY_TABLE = ('| ' + '| |\n') +CELL_WIDTH = 50 + + +def _get_figure_index(file_content): + """Get index of figure in text.""" + if FIGURE_STR in file_content: + return file_content.index(FIGURE_STR) + len(FIGURE_STR) + if IMAGE_STR in file_content: + return file_content.index(IMAGE_STR) + len(IMAGE_STR) + raise ValueError("File does not contain image") + + +def _get_next_row(filenames, file_contents): + """Get next row.""" + figure_idx = [_get_figure_index(content) for content in file_contents] + figure_paths = [ + file_contents[idx][fig_idx:].split('\n')[0].strip() + for (idx, fig_idx) in enumerate(figure_idx) + ] + subst = [ + '|{}|'.format(os.path.splitext(filename)[0]) for filename in filenames + ] + link = [file_contents[0].split()[1][1:-1]] + if figure_paths[1] == '': + subst[1] = '' + link.append('') + else: + link.append(file_contents[1].split()[1][1:-1]) + + # Build table + row = '' + refs = '' + row += TABLE_SEP + row += '| {}| {}|\n'.format(subst[0].ljust(CELL_WIDTH), + subst[1].ljust(CELL_WIDTH)) + row += EMPTY_TABLE + left_col = '[#]_'.ljust(CELL_WIDTH) + if figure_paths[1] == '': + right_col = ''.ljust(CELL_WIDTH) + else: + right_col = '[#]_'.ljust(CELL_WIDTH) + row += '| {}| {}|\n'.format(left_col, right_col) + + # Build refs + for (idx, path) in enumerate(figure_paths): + if path == '': + continue + refs += f'.. {subst[idx]} image:: {path}\n' + refs += f' {WIDTH}\n' + refs += '\n' + refs += f'.. [#] :ref:`{link[idx]}`\n' + refs += '\n' + + return (row, refs) + + +def main(): + """Generate gallery for recipe plots.""" + print(f"Generating gallery at {OUT_PATH}") + left_col = True + table = '' + refs = '' + filenames = [] + file_contents = [] + for filename in sorted(os.listdir(RECIPE_DIR)): + if not filename.startswith('recipe_'): + continue + if not filename.endswith('.rst'): + continue + with open(os.path.join(RECIPE_DIR, filename), 'r') as in_file: + recipe_file = in_file.read() + if (FIGURE_STR not in recipe_file and IMAGE_STR not in recipe_file): + print(f"INFO: {filename} does not contain an image, skipping") + continue + if not recipe_file.startswith('..'): + print(f"INFO: {filename} does not contain reference at top, " + "skipping") + continue + + # Get next row + if left_col: + left_col = False + filenames = [filename] + file_contents = [recipe_file] + continue + else: + left_col = True + filenames.append(filename) + file_contents.append(recipe_file) + new_row = _get_next_row(filenames, file_contents) + table += new_row[0] + refs += new_row[1] + + # Last row + if len(filenames) == 1: + filenames.append('') + file_contents.append(f'{FIGURE_STR}\n') + new_row = _get_next_row(filenames, file_contents) + table += new_row[0] + refs += new_row[1] + table += TABLE_SEP + table += '\n' + + # Write file + whole_file = HEADER + table + refs + with open(OUT_PATH, 'w') as out_file: + print(whole_file, file=out_file) + print(f"Wrote {OUT_PATH}") + + +if __name__ == '__main__': + main() diff --git a/doc/sphinx/source/gensidebar.py b/doc/sphinx/source/gensidebar.py new file mode 100644 index 0000000000..970722ff0a --- /dev/null +++ b/doc/sphinx/source/gensidebar.py @@ -0,0 +1,85 @@ +"""Generates sidebar/toctree. + +Generates the common sidebar/toctree for the sphinx/ReadTheDocs +documentation of the ESMValTool and its subprojects. +""" + +import os + + +def _write_if_changed(fname, contents): + """Write/update file only if changed.""" + try: + with open(fname, "r") as stream: + old_contents = stream.read() + except IOError: + old_contents = "" + + if old_contents != contents: + with open(fname, "w") as stream: + stream.write(contents) + + +def generate_sidebar(conf, conf_api): + """Generate sidebar. + + Generate sidebar for ReadTheDocs with links to subprojects and + superprojects accordingly. + """ + # determine 'latest' or 'stable' + # if not conf.do_gen: + do_gen = os.environ.get("SIDEBAR", None) == "1" or conf["on_rtd"] + + lines = ["", ".. DO NOT MODIFY! THIS PAGE IS AUTOGENERATED!", ""] + + def _toctree(): + lines.extend([".. toctree::", " :maxdepth: 1", ""]) + + def _endl(): + lines.append("") + + def _write(project, desc, link, mapping=conf['intersphinx_mapping']): + if project != conf_api: + if do_gen: + args = desc, mapping[project][0], link + lines.append(" %s <%s%s.html>" % args) + else: + args = desc, link + lines.append(" %s <%s>" % args) + + def _header(project, text): + if project == conf_api or do_gen: + lines.extend([".. toctree::", " :maxdepth: 2"]) + lines.extend([" :caption: %s" % text, ""]) + + # + # Specify the sidebar contents here + # + + _header("esmvaltool", "ESMValTool") + _write("esmvaltool", "Introduction", "introduction") + _write("esmvaltool", "ESMValTool Functionalities", "functionalities") + _write("esmvaltool", "Getting started", "quickstart/index") + _write("esmvaltool", "Gallery", "gallery") + _write("esmvaltool", "Available recipes", "recipes/index") + _write("esmvaltool", "Obtaining input data", "input") + _write("esmvaltool", "Making a recipe or diagnostic", "develop/index") + _write("esmvaltool", "Contributing to the community", "community/index") + _write("esmvaltool", "Utilities", "utils") + _write("esmvaltool", "Diagnostics API Reference", "api/esmvaltool") + _write("esmvaltool", "Frequently Asked Questions", "faq") + _write("esmvaltool", "Changelog", "changelog") + _endl() + + _header("esmvalcore", "ESMValCore") + _write("esmvalcore", "Getting started", "quickstart/index") + _write("esmvalcore", "Example notebooks", "example-notebooks") + _write("esmvalcore", "The recipe format", "recipe/index") + _write("esmvalcore", "Diagnostic script interfaces", "interfaces") + _write("esmvalcore", "Development", "develop/index") + _write("esmvalcore", "Contributing", "contributing") + _write("esmvalcore", "ESMValCore API Reference", "api/esmvalcore") + _write("esmvalcore", "Changelog", "changelog") + _endl() + + _write_if_changed("_sidebar.rst.inc", "\n".join(lines)) diff --git a/doc/sphinx/source/index.rst b/doc/sphinx/source/index.rst index 61052b9eea..136c2eba08 100644 --- a/doc/sphinx/source/index.rst +++ b/doc/sphinx/source/index.rst @@ -6,22 +6,31 @@ Welcome to ESMValTool's documentation! ====================================== -.. toctree:: - :maxdepth: 2 +To get a first impression of what ESMValTool and ESMValCore can do for you, +have a look at our blog posts +`Analysis-ready climate data with ESMValCore `_ +and +`ESMValTool: Recipes for solid climate science `_. - preface - known_issues +A tutorial is available on https://tutorial.esmvaltool.org. - recipes/index +A series of video lectures has been created by `ACCESS-NRI `_. +While these are tailored for ACCESS users, they are still very informative. - user_guide2/index - developer_guide2/index +.. raw:: html - codedoc2/esmvaltool + + +| + +For more detailed information, the documentation is available below. + +Get in touch! Contact information is available :ref:`here `. + +.. include:: _sidebar.rst.inc Indices and tables ================== * :ref:`genindex` * :ref:`search` - diff --git a/doc/sphinx/source/input.rst b/doc/sphinx/source/input.rst new file mode 100644 index 0000000000..fbc16b45ec --- /dev/null +++ b/doc/sphinx/source/input.rst @@ -0,0 +1,511 @@ +.. _inputdata: + +******************** +Obtaining input data +******************** + +ESMValTool supports input data from climate models participating in +`CMIP6 `__, +`CMIP5 `__, +`CMIP3 `__, and +`CORDEX `__ +as well as observations, reanalysis, and any other data, provided that it +adheres to the +`CF conventions `__ +and the data is described in a +`CMOR table `__ +as used in the various +`Climate Model Intercomparison Projects `__. + +.. _cordex_note: + +.. note:: + + CORDEX support is still + `work in progress `__. + Contributions, in the form of + :ref:`pull request reviews ` or + :ref:`pull requests ` + are most welcome. We are particularly interested in contributions from + people with good understanding of the CORDEX project and its standards. + +This section provides an introduction to getting (access to) climate data +for use with ESMValTool. + +Because the amount of data required by ESMValTool is typically large, it is +recommended that you use the tool on a compute cluster where the data is +already available, for example because it is connected to an +`ESGF node `__. +Examples of such compute clusters are +`Levante `__ +and +`Jasmin `__, +but many more exist around the world. + +If you do not have access to such a facility through your institute or the +project you are working on, you can request access by applying for the +`ENES Climate Analytics Service `__ +or, if you need longer term access or more computational resources, the +`IS-ENES3 Trans-national Access call `__. + +If the options above are not available to you, ESMValTool also offers a feature +to make it easy to download CMIP6, CMIP5, CMIP3, CORDEX, and obs4MIPs from ESGF. +ESMValTool also provides support to download some observational dataset from source. + +The chapter in the ESMValCore documentation on +:ref:`finding data ` explains how to +configure ESMValTool so it can find locally available data and/or +download it from ESGF if it isn't available locally yet. + + +.. _inputdata_models: + +Models +====== + +If you do not have access to a compute cluster with the data already mounted, +ESMValTool can automatically download any required data that is available on +ESGF. +This is the recommended approach for first-time users to obtain some data for +running ESMValTool. +For example, run + +.. code-block:: bash + + esmvaltool run --search_esgf=when_missing examples/recipe_python.yml + +to run the default example recipe and automatically download the required data +to the directory ``~/climate_data``. +The data only needs to be downloaded once, every following run will reuse +previously downloaded data stored in this directory. +See :ref:`esmvalcore:config-esgf` for a more in depth explanation and the +available configuration options. + +Alternatively, you can use an external tool called +`Synda `__ +to maintain your own collection of ESGF data. + + +.. _inputdata_observations: + +Observations +============ + +Observational and reanalysis products in the standard CF/CMOR format used in +CMIP and required by ESMValTool are available via the obs4MIPs and ana4mips +projects at the ESGF (e.g., https://esgf-data.dkrz.de/projects/esgf-dkrz/). +Their use is strongly recommended, when possible. + +Other datasets not available in these archives can be obtained by the user from +the respective sources and reformatted to the CF/CMOR standard. +ESMValTool currently supports two ways to perform this reformatting (aka +'CMORization'): + +#. Using a CMORizer script: The first is to use a CMORizer script to generate a + local pool of reformatted data that can readily be used by ESMValTool. This + method is described in detail below. + +#. Using fixes for on-the-fly CMORization: The second way is to implement + specific :ref:`'fixes' ` for your dataset. In that + case, the reformatting is performed 'on the fly' during the execution of an + ESMValTool recipe (note that one of the first preprocessor tasks is 'CMOR + checks and fixes'). Details on this second method are given at the + :ref:`end of this chapter `. + +A collection of readily CMORized OBS and OBS6 datasets can be accessed directly on CEDA/JASMIN and DKRZ. At CEDA/JASMIN +OBS and OBS6 data is stored in the `esmeval` Group Workspace (GWS), and to be granted read (and execute) permissions to the +GWS, one must apply at https://accounts.jasmin.ac.uk/services/group_workspaces/esmeval/ ; after permission has been granted, the user +is encouraged to use the data locally, and not move it elsewhere, to minimize both data transfers and +stale disk usage; to note that Tier 3 data is subject to data protection restrictions; for further inquiries, +the GWS is administered by [Valeriu Predoi](mailto:valeriu.predoi@ncas.ac.uk). + +Using a CMORizer script +----------------------- + +ESMValTool comes with a set of CMORizers readily available. +The CMORizers are dataset-specific scripts that can be run once to generate a +local pool of CMOR-compliant data. +The necessary information to download and process the data is provided in the +header of each CMORizing script. +These scripts also serve as template to create new CMORizers for datasets not +yet included. +Note that datasets CMORized for ESMValTool v1 may not be working with v2, due +to the much stronger constraints on metadata set by the iris library. + +ESMValTool provides the ``esmvaltool data`` command line tool, which can be +used to download and format datasets. + +To list the available commands, run + +.. code-block:: bash + + esmvaltool data --help + +It is also possible to get help on specific commands, e.g. + +.. code-block:: bash + + esmvaltool data download --help + +The list of datasets supported by ESMValTool through a CMORizer script can be +obtained with: + +.. code-block:: bash + + esmvaltool data list + +Datasets for which auto-download is supported can be downloaded with: + +.. code-block:: bash + + esmvaltool data download --config_file [CONFIG_FILE] [DATASET_LIST] + +Note that all Tier3 and some Tier2 datasets for which auto-download is supported +will require an authentication. In such cases enter your credentials in your +``~/.netrc`` file as explained +`here `_. + +An entry to the ``~/.netrc`` should look like: + +.. code-block:: bash + + machine [server_name] login [user_name] password [password] + +Make sure that the permissions of the ``~/.netrc`` file are set so only you and administrators +can read it, i.e. + +.. code-block:: bash + + chmod 600 ~/.netrc + ls -l ~/.netrc + +The latter command should show ``-rw-------``. + +For other datasets, downloading instructions can be obtained with: + +.. code-block:: bash + + esmvaltool data info [DATASET] + +To CMORize one or more datasets, run: + +.. code-block:: bash + + esmvaltool data format --config_file [CONFIG_FILE] [DATASET_LIST] + +The ``rootpath`` to the raw data to be CMORized must be specified in the +:ref:`configuration ` as ``RAWOBS``. +Within this path, the data are expected to be organized in subdirectories +corresponding to the data tier: Tier2 for freely-available datasets (other than +obs4MIPs and ana4mips) and Tier3 for restricted datasets (i.e., dataset which +requires a registration to be retrieved or provided upon request to the +respective contact or PI). +The CMORization follows the `CMIP5 CMOR tables +`_ or `CMIP6 CMOR tables +`_ for the OBS and OBS6 projects +respectively. +The resulting output is saved in the output_dir, again following the Tier +structure. +The output file names follow the definition given in :ref:`config-developer +file ` for the ``OBS`` project: + +.. code-block:: + + [project]_[dataset]_[type]_[version]_[mip]_[short_name]_YYYYMM_YYYYMM.nc + +where ``project`` may be OBS (CMIP5 format) or OBS6 (CMIP6 format), ``type`` +may be ``sat`` (satellite data), ``reanaly`` (reanalysis data), +``ground`` (ground observations), ``clim`` (derived climatologies), +``campaign`` (aircraft campaign). + +At the moment, ``esmvaltool data format`` supports Python and NCL scripts. + +.. _supported_datasets: + +Supported datasets for which a CMORizer script is available +----------------------------------------------------------- + +A list of the datasets for which a CMORizers is available is provided in the following table. + +.. tabularcolumns:: |p{3cm}|p{6cm}|p{3cm}|p{3cm}| + ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| Dataset | Variables (MIP) | Tier | Script language | ++==============================+======================================================================================================+======+=================+ +| AERONET | od440aer, od550aer, od870aer (AERmon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| AGCD | pr (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ANU Climate | pr, tas, tasmin, tasmax (Amon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| APHRO-MA | pr, tas (day), pr, tas (Amon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| AURA-TES | tro3 (Amon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| BerkelyEarth | tas, tasa (Amon), sftlf (fx) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CALIPSO-GOCCP | clcalipso (cfMon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CALIPSO-ICECLOUD | cli (AMon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CDS-SATELLITE-ALBEDO | bdalb (Lmon), bhalb (Lmon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CDS-SATELLITE-LAI-FAPAR | fapar (Lmon), lai (Lmon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CDS-SATELLITE-SOIL-MOISTURE | sm (day), sm (Lmon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CDS-UERRA | sm (E6hr) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CDS-XCH4 | xch4 (Amon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CDS-XCO2 | xco2 (Amon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CERES-EBAF | rlut, rlutcs, rsut, rsutcs (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CERES-SYN1deg | rlds, rldscs, rlus, rluscs, rlut, rlutcs, rsds, rsdscs, rsus, rsuscs, rsut, rsutcs (3hr) | 3 | NCL | +| | rlds, rldscs, rlus, rlut, rlutcs, rsds, rsdt, rsus, rsut, rsutcs (Amon) | | | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CLARA-AVHRR | clt, clivi, clwvi, lwp (Amon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CLOUDSAT-L2 | clw, clivi, clwvi, lwp (Amon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CMAP | pr (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CowtanWay | tasa (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CRU | tas, tasmin, tasmax, pr, clt (Amon), evspsblpot (Emon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CT2019 | co2s (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| Duveiller2018 | albDiffiTr13 | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| E-OBS | tas, tasmin, tasmax, pr, psl (day, Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| Eppley-VGPM-MODIS | intpp (Omon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ERA5 [#note1]_ | cl, clt, evspsbl, evspsblpot, mrro, pr, prsn, ps, psl, ptype, rls, rlds, rlns, rlus [#note2]_, rsds, | 3 | n/a | +| | rsns, rsus [#note2]_, rsdt, rss, uas, vas, tas, tasmax, tasmin, tdps, ts, tsn (E1hr/Amon), orog (fx) | | | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ERA5-Land [#note1]_ | pr | 3 | n/a | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ERA-Interim | cl, cli, clivi, clt, clw, clwvi, evspsbl, hfds, hur, hus, lwp, orog, pr, prsn, prw, ps, psl, rlds, | 3 | Python | +| | rlut, rlutcs, rsds, rsdt, rss, rsut, rsutcs, sftlf, ta, tas, tasmax, tasmin, tauu, tauv, tdps, tos, | | | +| | ts, tsn, ua, uas, va, vas, wap, zg | | | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ERA-Interim-Land | sm (Lmon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-AEROSOL | abs550aer, od550aer, od550aerStderr, od550lt1aer, od870aer, od870aerStderr (aero) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-CLOUD | clivi, clt, cltStderr, clwvi, lwp, rlut, rlutcs, rsut, rsutcs, rsdt, rlus, rsus, rsuscs (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-FIRE | burntArea (Lmon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-LANDCOVER v1.6.1 | baresoilFrac, cropFrac, grassFrac, shrubFrac, treeFrac (Lmon) | 2 | NCL | +| | | | (CMORizer | +| | | | available until | +| | | | ESMValTool | +| | | | v2.11.0) | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-LANDCOVER v2.0.8 | baresoilFrac, cropFrac, grassFrac, shrubFrac, treeFrac (Lmon, frequency=yr) | 2 | Python | +| | | | (CMORizer | +| | | | available since | +| | | | ESMValTool | +| | | | v2.12.0) | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-LST | ts (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-OC | chl (Omon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-OZONE | toz, tozStderr, tro3prof, tro3profStderr (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-SEA-SURFACE-SALINITY | sos (Omon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-SOILMOISTURE | sm (Eday, Lmon), smStderr (Eday) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-SST | ts, tsStderr (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-WATERVAPOUR | prw (Amon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESDC | tas, tasmax, tasmin (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESRL | co2s (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| FLUXCOM | gpp (Lmon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| GCP2018 | fgco2 (Omon [#note3]_), nbp (Lmon [#note3]_) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| GCP2020 | fgco2 (Omon [#note3]_), nbp (Lmon [#note3]_) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| GHCN | pr (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| GHCN-CAMS | tas (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| GISTEMP | tasa (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| GLODAP | dissic, ph, talk (Oyr) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| GPCC | pr (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| GPCP-SG | pr (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| GRACE | lweGrace (Lmon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| HadCRUT3 | tas, tasa (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| HadCRUT4 | tas, tasa (Amon), tasConf5, tasConf95 | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| HadCRUT5 | tas, tasa (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| HadISST | sic (OImon), tos (Omon), ts (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| HALOE | tro3, hus (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| HWSD | cSoil (Lmon), areacella (fx), sftlf (fx) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ISCCP-FH | alb, prw, ps, rlds, rlus, rlut, rlutcs, rsds, rsdt, rsus, rsut, rsutcs, tas, ts (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| JMA-TRANSCOM | nbp (Lmon), fgco2 (Omon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| JRA-25 | clt, hus, prw, rlut, rlutcs, rsut, rsutcs (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| JRA-55 | cli, clivi, clw, clwvi, clt, prw, rlus, rlut, rlutcs, rsus, rsuscs, rsut, rsutcs, ta, tas, wap (Amon)| 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| Kadow2020 | tasa (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| LAI3g | lai (Lmon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| LandFlux-EVAL | et, etStderr (Lmon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| Landschuetzer2016 | dpco2, fgco2, spco2 (Omon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| Landschuetzer2020 | spco2 (Omon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| MAC-LWP | lwp, lwpStderr (Amon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| MERRA | cli, clivi, clt, clw, clwvi, hur, hus, lwp, pr, prw, ps, psl, rlut, rlutcs, rsdt, rsut, rsutcs, ta, | 3 | NCL | +| | tas, ts, ua, va, wap, zg (Amon) | | | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| MERRA2 | sm (Lmon) | 3 | Python | +| | clt, pr, evspsbl, hfss, hfls, huss, prc, prsn, prw, ps, psl, rlds, rldscs, rlus, rlut, rlutcs, rsds, | | | +| | rsdscs, rsdt, tas, tasmin, tasmax, tauu, tauv, ts, uas, vas, rsus, rsuscs, rsut, rsutcs, ta, ua, va, | | | +| | tro3, zg, hus, wap, hur, cl, clw, cli, clwvi, clivi (Amon) | | | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| MLS-AURA | hur, hurStderr (day) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| MOBO-DIC_MPIM | dissic (Omon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| MOBO-DIC2004-2019 | dissic (Omon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| MODIS | cliwi, clt, clwvi, iwpStderr, lwpStderr (Amon), od550aer (aero) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| MSWEP [#note1]_ | pr | 3 | n/a | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| MTE | gpp, gppStderr (Lmon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NCEP-NCAR-R1 | clt, hur, hurs, hus, pr, prw, psl, rlut, rlutcs, rsut, rsutcs, sfcWind, ta, tas, | 2 | Python | +| | tasmax, tasmin, ts, ua, va, wap, zg (Amon) | | | +| | pr, rlut, ua, va (day) | | | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NCEP-DOE-R2 | clt, hur, prw, ta, wap, pr, tauu, tauv, tos (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NDP | cVeg (Lmon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NIWA-BS | toz, tozStderr (Amon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NOAA-CIRES-20CR-V2 | clt, clwvi, hus, prw, rlut, rsut, pr, tauu, tauv (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NOAA-CIRES-20CR-V3 | clt, clwvi, hus, prw, rlut, rlutcs, rsut, rsutcs (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NOAA-ERSSTv3b | tos (Omon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NOAA-ERSSTv5 | tos (Omon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NOAA-MBL-CH4 | ch4s (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NOAAGlobalTemp | tasa (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NSIDC-0116-[nh|sh] [#note4]_ | usi, vsi (day) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NSIDC-g02202-[sh] | siconc (SImon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| OceanSODA-ETHZ | areacello (Ofx), co3os, dissicos, fgco2, phos, spco2, talkos (Omon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| OSI-450-[nh|sh] | sic (OImon), sic (day) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| PATMOS-x | clt (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| PERSIANN-CDR | pr (Amon), pr (day) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| PHC | thetao, so (Omon [#note3]_) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| PIOMAS | sit (day) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| REGEN | pr (day, Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| Scripps-CO2-KUM | co2s (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| TCOM-CH4 | ch4 (Amon [#note3]_) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| TCOM-N2O | n2o (Amon [#note3]_) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| UWisc | clwvi, lwpStderr (Amon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| WFDE5 | tas, pr (Amon, day) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| WOA | thetao, so, tos, sos (Omon) | 2 | Python | +| | no3, o2, po4, si (Oyr) | | | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ + +.. [#note1] CMORization is built into ESMValTool through the native6 project, + so there is no separate CMORizer script. + +.. [#note2] Derived on the fly from down & net radiation. + +.. [#note3] The frequency of this variable differs from the one specified in + the table. The correct entry that needs to be used in the recipe + can be found in the corresponding section of `recipe_check_obs.yml + `__. + +.. [#note4] The cmoriser requires PROJ>=9.3. Previous version of PROJ will return an error: + ``Internal Proj Error: proj_create: unhandled axis direction: UNKNOWN)`` + You can check the version of PROJ in your conda environment by running: + ``conda list PROJ``. + +.. _inputdata_native_datasets: + +Datasets in native format +========================= + +ESMValCore also provides support for some datasets in their native format. +In this case, the steps needed to reformat the data are executed as dataset +fixes during the execution of an ESMValTool recipe, as one of the first +preprocessor steps, see :ref:`fixing data `. +Compared to the workflow described above, this has the advantage that the user +does not need to store a duplicate (CMORized) copy of the data. +Instead, the CMORization is performed 'on the fly' when running a recipe. +Native datasets can be hosted either under a dedicated project (usually done +for native model output) or under project ``native6`` (usually done for native +reanalysis/observational products). +These projects are configured in the :ref:`config-developer file +`. + +A list of all currently supported native datasets is :ref:`provided here +`. +A detailed description of how to include new native datasets is given +:ref:`here `. + +To use this functionality, users need to provide a ``rootpath`` in the +:ref:`configuration ` for the ``native6`` project data +and/or the dedicated project used for the native dataset, e.g., ``ICON``. +Then, in the recipe, they can refer to those projects. +For example: + +.. code-block:: yaml + + datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, start_year: 1990, end_year: 1990} + - {project: ICON, dataset: ICON, exp: icon-2.6.1_atm_amip_R2B5_r1i1p1f1, mip: Amon, short_name: tas, start_year: 2000, end_year: 2014} + +For project ``native6``, more examples can be found in the diagnostics +``ERA5_native6`` in the recipe `examples/recipe_check_obs.yml +`_. diff --git a/doc/sphinx/source/introduction.rst b/doc/sphinx/source/introduction.rst new file mode 100644 index 0000000000..5802f1689d --- /dev/null +++ b/doc/sphinx/source/introduction.rst @@ -0,0 +1,156 @@ +Introduction +************ + +About +===== + +The Earth System Model Evaluation Tool (ESMValTool) is a +community-development that aims at improving diagnosing and +understanding of the causes and effects of model biases and inter-model +spread. The ESMValTool is open to both users and developers encouraging +open exchange of diagnostic source code and evaluation results from the +Coupled Model Intercomparison Project (CMIP) ensemble. This will +facilitate and improve ESM evaluation beyond the state-of-the-art and +aims at supporting the activities within CMIP and at individual +modelling centers. We envisage running the ESMValTool routinely on the +CMIP model output utilizing observations available through the Earth +System Grid Federation (ESGF) in standard formats (obs4MIPs) or made +available at ESGF nodes. + +The goal is to develop a benchmarking and evaluation tool that produces +well-established analyses as soon as model output from CMIP simulations +becomes available, e.g., at one of the central repositories of the ESGF. +This is realized through standard recipes that reproduce a certain set +of diagnostics and performance metrics that have demonstrated its +importance in benchmarking Earth System Models (ESMs) in a paper or +assessment report, such as Chapter 9 of the Intergovernmental Panel on +Climate Change (IPCC) Fifth Assessment Report (AR5) (Flato et al., +2013). The expectation is that in this way a routine and systematic +evaluation of model results can be made more efficient, thereby enabling +scientists to focus on developing more innovative methods of analysis +rather than constantly having to "reinvent the wheel". + +In parallel to standardization of model output, the ESGF also hosts +observations for Model Intercomparison Projects (obs4MIPs) and +reanalyses data (ana4MIPs). obs4MIPs provides open access data sets of +satellite data that are comparable in terms of variables, temporal and +spatial frequency, and periods to CMIP model output (Taylor et al., +2012). The ESMValTool utilizes these observations and reanalyses from +ana4MIPs plus additionally available observations in order to evaluate +the models performance. In many diagnostics and metrics, more than one +observational data set or meteorological reanalysis is used to assess +uncertainties in observations. + +The main idea of the ESMValTool is to provide a broad suite of +diagnostics which can be performed easily when new model simulations are +run. The suite of diagnostics needs to be broad enough to reflect the +diversity and complexity of Earth System Models, but must also be robust +enough to be run routinely or semi-operationally. In order the address +these challenging objectives the ESMValTool is conceived as a framework +which allows community contributions to be bound into a coherent +framework. + +.. _Support-and-Contact: + +Support +======= + +Support for ESMValTool can be found in `ESMValTool Discussions page +`__ +where users can open an issue and a member of the `User Engagement Team +`_ of ESMValTool +will reply as soon as possible. +This is open for all general and technical questions on the ESMValTool: +installation, application, development, or any other question or comment +you may have. + +.. _mailing-list: + +User mailing list +----------------- + +Subscribe to the ESMValTool announcements mailing list +`esmvaltool@listserv.dfn.de `__ +to stay up to date about new releases, monthly online meetings, upcoming workshops, and trainings. + +To subscribe, send an email to +`sympa@listserv.dfn.de `_ +with the following subject line: + +- *subscribe esmvaltool* + +or + +- *subscribe esmvaltool YOUR_FIRSTNAME YOUR_LASTNAME* + +The mailing list also has a `public archive `_ online. + +.. _monthly-meetings: + +Monthly meetings +---------------- + +We have monthly online meetings using `zoom `__, anyone with +an interest in the ESMValTool is welcome to join these meetings to connect with +the community. +These meetings are always announced in a discussion +on the `ESMValTool Community `_ +repository and on the mailing-list_. + +.. _core-team: + +Core development team +--------------------- + +- Deutsches Zentrum für Luft- und Raumfahrt (DLR), Institut für Physik + der Atmosphäre, Germany (Co-PI) + + - ESMValTool Core Co-PI and Developer: contact for requests to use the ESMValTool and for collaboration with the development team, access to the PRIVATE GitHub repository. + +- Met Office, United Kingdom (Co-PI) +- Alfred Wegener institute (AWI) Bremerhaven, Germany +- Barcelona Supercomputing Center (BSC), Spain +- Netherlands eScience Center (NLeSC), The Netherlands +- Ludwig Maximilian University of Munich, Germany +- Plymouth Marine Laboratory (PML), United Kingdom +- Swedish Meteorological and Hydrological Institute (SMHI), Sweden +- University of Bremen, Germany +- University of Reading, United Kingdom + +Recipes and diagnostics +----------------------- + +Contacts for specific diagnostic sets are the respective authors, as +listed in the corresponding :ref:`recipe and diagnostic documentation` +and in the source code. + + +License +======= + +The ESMValTool is released under the Apache License, version 2.0. +Citation of the ESMValTool paper ("Software Documentation Paper") is +kindly requested upon use, alongside with the software DOI for +ESMValTool +(`doi:10.5281/zenodo.3401363 `__) +and ESMValCore +(`doi:10.5281/zenodo.3387139 `__) +and version number: + +- Righi, M., Andela, B., Eyring, V., Lauer, A., Predoi, V., Schlund, + M., Vegas-Regidor, J., Bock, L., Brötz, B., de Mora, L., Diblen, F., + Dreyer, L., Drost, N., Earnshaw, P., Hassler, B., Koldunov, N., + Little, B., Loosveldt Tomas, S., and Zimmermann, K.: Earth System + Model Evaluation Tool (ESMValTool) v2.0 – technical overview, Geosci. + Model Dev., 13, 1179–1199, https://doi.org/10.5194/gmd-13-1179-2020, + 2020. + +Besides the above citation, users are kindly asked to register any +journal articles (or other scientific documents) that use the software +at the ESMValTool webpage (http://www.esmvaltool.org/). Citing the +Software Documentation Paper and registering your paper(s) will serve to +document the scientific impact of the Software, which is of vital +importance for securing future funding. You should consider this an +obligation if you have taken advantage of the ESMValTool, which +represents the end product of considerable effort by the development +team. diff --git a/doc/sphinx/source/known_issues.rst b/doc/sphinx/source/known_issues.rst deleted file mode 100644 index e479a57a07..0000000000 --- a/doc/sphinx/source/known_issues.rst +++ /dev/null @@ -1,5 +0,0 @@ -Known issues ------------- - - -Please see our bug and feature request tracker on `github `_. diff --git a/doc/sphinx/source/preface.rst b/doc/sphinx/source/preface.rst deleted file mode 100644 index 43ac641ec7..0000000000 --- a/doc/sphinx/source/preface.rst +++ /dev/null @@ -1,11 +0,0 @@ -Preface -------- - -This user's and developer's guide consists of parts targeting two overlapping categories of scientists working with the Earth System Model Evaluation Tool (ESMValTool): - -1. Part I: User's Guide: this part gives an introduction to the ESMValTool including installation, running the ESMValTool, and available user settings of existing diagnostics and performance metrics. The target group would typically consist of scientists mostly interested in running the ESMValTool as provided either on CMIP model simulations or on simulations performed with other Earth system models, and on observations. An overview on the available diagnostics and metrics packages including a description of the user settings and example plots can be found in Annex C (to be added for v2). -2. Part II: Developer's Guide: this part gives additional technical details on the ESMValTool not necessarily needed to apply the ESMValTool as well as an introduction to implementing new variables and new diagnostics. This part is mostly intended for scientists interested in technical details as well as in contributing to the development of the ESMValTool by adding new nameslists and code for additional diagnostics or performance metrics. - -For the developer's guide (Part II), it is assumed that the user/developer is already familiar with the ESMValTool framework introduced in part I. - -**Please report problems and bugs to the ESMValTool Core Development Team (**:ref:`core_dev_team` **and http://www.esmvaltool.org/). Thank you!** diff --git a/doc/sphinx/source/process_ncl_docs2.py b/doc/sphinx/source/process_ncl_docs2.py deleted file mode 100644 index 006ae4c032..0000000000 --- a/doc/sphinx/source/process_ncl_docs2.py +++ /dev/null @@ -1,282 +0,0 @@ -""" -This script is part of the ESMValTool distribution. It's been added as part of -the incorporation of the Sphinx documentation generator. Sphinx was originally -developed for documenting Python code, and one of its features is that it is -able - using the so-called autodoc extension - to extract documentation strings -from Python source files and use them in the documentation it generates. - -The autodoc feature apparently does not exist for NCL source files (such as -those which are used in ESMValTool), but it has been mimicked -(or - more-or-less - reverse-engineered) here via this script, which walks -through a subset of the ESMValTool NCL scripts, extracts function names, -argument lists and descriptions (from the comments immediately following the -function definition), and assembles them in a subdirectory of -doc/sphinx/source. These output files are in the so-called reStructuredText -format (see, e.g., http://docutils.sourceforge.net/rst.html), which is the -markup language used by Sphinx; running make in doc/sphinx builds the -ESMValTool documentation from them, as noted above. - -Created on July 14, 2015 - -@author: jeremy.walton@metoffice.gov.uk -""" - -import os -import glob -import re -import string -import collections - - -def make_param_details(params): - """ - Create a list of parameter names and types from the params string. - :param params: - :return: - """ - - # We'll store the parameter names and their types in a dictionary. - # Note that it has to be an ordered dictionary, because later on we want - # to pull the entries out in the same order - # that we added them. - param_details = collections.OrderedDict() - for param in params: - - # Extract the type if it's specified, - # otherwise default to integer (say). - if ':' in param: - [pname, ptype] = param.split(':') - else: - pname = param - ptype = 'integer' - - # If the parameter is an array, - # we only want its name in the description. - pname = pname.split('[')[0] - pname = pname.strip() - - # Tie the name and the type of the parameter together. - param_details[pname] = ptype - - return param_details - - -def process_params(params, inp, oup): - """ - Extract the parameter names and types from the params string, pull their - descriptions out from the input file and reformat the lot in the output. - """ - # Get the names and types. - param_details = make_param_details(params) - - # We assume we're at the line before the first parameter description. - # Bump it, then check to see if we're really at the right location and - # issue a warning if not. - line = next(inp) - param_keys = list(param_details.keys()) - if param_keys[0] not in line: - print("Warning - parameter " + param_keys[0] + - " not found in this line:\n" + line) - - # Want a blank line just before parameter descriptions. - oup.write('\n') - - # Loop over all parameters in the argument list. - for i, pname in enumerate(param_keys): - - # Now assemble the description from the line(s). - if pname in line: - - # Get the text in the line which follows the first occurrence - # (reading from the left) of the parameter name, then strip - # trailing spaces (including the CR). - pdesc = line.split(pname, 1)[1] - pdesc = pdesc.rstrip() - - # The description could continue on the following lines, which - # need to be concatenated together. For all except the last - # parameter, the end of the description is signaled by the name of - # the next parameter. For the last (or maybe the only) parameter, - # it's signaled by a blank line. - line = next(inp) - if i < len(param_keys)-1: - pnext = param_keys[i + 1] - if pnext not in line: - # Do the concatenation, stripping whitespace - # (including the CR) as we go. - while pnext not in line: - pdesc += " " + line.replace(';;', ' ', 1).strip() - line = next(inp) - else: - while not line.replace(';;', ' ', 1).isspace(): - pdesc += " " + line.replace(';;', ' ', 1).strip() - line = next(inp) - - # Ensure the description starts with a colon. - if pdesc[0] != ':': - pdesc = ':' + pdesc - - # Write out the complete description of this parameter. - oup.write(' :param ' + param_details[pname] + ' ' - + pname + pdesc + '\n') - - # Want a blank line just after parameter descriptions. - oup.write('\n') - - -def find_argument(inp): - """ - Find the start of the Arguments list. - """ - - line = next(inp) - count = 1 - while 'Arguments' not in line: - line = next(inp) - - # We assume we're going to find this within two lines of the original - # location of the input - # - stop looking if we don't. - count += 1 - if count > 2: - return False - - return True - - -def parse_file(in_filename, out_filename): - """ - Processes an ncl file and produces an rst file as output, which contains - documentation of the ncl functions in a form suitable for input to - the Sphinx documentation generator. - :param in_filename: - :param out_filename: - :return: - """ - - # Open the files. - try: - inp = open(in_filename, "r") - except IOError: - print("Couldn't open", in_filename) - return - - try: - oup = open(out_filename, "w") - except IOError: - print("Couldn't open", out_filename) - return - - # We assume the file name has the form /path/to/foo.ncl, and the - # module name is foo. Pull it out, and write it to the output file - # as the title. - mod_name = os.path.splitext(os.path.basename(in_filename))[0] - - oup.write(':mod:' + '`' + mod_name + '`' + '\n') - oup.write("=" * (7+len(mod_name)) + '\n') - - for line in inp: - - # Is this the start of a function? - if re.match('^function', line) or re.match('^procedure', line): - - # The function could have parameters on the following lines. - # Concatenate them up until the closing bracket, stripping - # whitespace (including the CR) as we go. - fname = line.rstrip() - while ')' not in fname: - line = next(inp) - fname += " " + line.strip() - - # Some ncl files have backslashes in the function declaration to - # indicate continuation to the next line (even though this isn't - # necessary in ncl). These will mess up our processing of - # the argument list, and don't look good in the doc. so we pull - # them out here. - fname = fname.replace('\\', '') - - # Write the line out from the word 'function' onwards, and suitably - # decorated for rst. Need the CR at the end, as we've been pulling - # that off throughout the assembly of this line. - oup.write('.. function:: ' + fname[len('function')+1:] + '\n') - - # Now extract the list of parameters from the function declaration. - # First, pullout the text between the brackets, then split that - # into individual parameter names. - plist = fname.split('(')[1].split(')')[0] - params = plist.split(',') - - # Position the input just after the line containing 'Arguments'. - if not find_argument(inp): - print("Warning - argument list not found for " + fname) - else: - - # Here's where we check whether this function has any - # parameters. If it doesn't, then we don't need to - # process any. - if len(plist) > 0: - # Read the parameter descriptions and reformat them - # before writing them out. - process_params(params, inp, oup) - - # We assume the first batch of comments immediately following - # the function arepart of the documentation. - line = next(inp) - while re.match('^;;', line): - - # Write out this line, replacing the comments with spaces. - oup.write(line.replace(';;', ' ', 1)) - line = next(inp) - - # Close the files. - inp.close() - oup.close() - - -def create_doc_files_from_ncl(): - # Do some rudimentary checking of where this script is being run from, - # because we're going to be using relative paths below to find the - # directories containing the input & output. - file_path = os.path.dirname(os.path.realpath(__file__)) - esmval_root_folder = os.path.abspath(os.path.join(file_path, '..', '..', - '..')) - - # List the directories containing input files, then loop over them. - ncl_folders = {'diag_scripts': 'esmvaltool/diag_scripts/lib/ncl', - 'plot_scripts': 'esmvaltool/plot_scripts/ncl'} - for ncl_folder in ncl_folders: - in_dir = os.path.join(esmval_root_folder, ncl_folders[ncl_folder]) - # Form the output directory name from the input directory name - # (NB we assume the latter are all named ../../../foo/bar, where foo - # is the useful part of the name. - out_dir = os.path.join(esmval_root_folder, "doc/sphinx/source/", - ncl_folder) - if not os.path.isdir(out_dir): - os.makedirs(out_dir) - - # Find all the ncl files in the input directory, and loop over them. - in_files = glob.glob(os.path.join(in_dir, '*.ncl')) - index_file = open(os.path.join(out_dir, 'index.rst'), 'w') - write_index_header(index_file, ncl_folder) - - for nclFile in in_files: - print("Processing " + nclFile) - rst_filename = os.path.basename(nclFile).replace('.ncl', '.rst') - rst_file = os.path.join(out_dir, rst_filename) - parse_file(nclFile, rst_file) - index_file.write(' ') - index_file.write(os.path.basename(nclFile).replace('.ncl', '')) - index_file.write('\n') - - -def write_index_header(index_file, ncl_folder): - index_file.write(ncl_folder.upper()) - index_file.write('\n') - index_file.write('-' * len(ncl_folder)) - index_file.write('\n') - index_file.write('\n') - index_file.write('.. toctree::\n :maxdepth: 2\n\n') - - -if __name__ == '__main__': - create_doc_files_from_ncl() diff --git a/doc/sphinx/source/quickstart/configuration.rst b/doc/sphinx/source/quickstart/configuration.rst new file mode 100644 index 0000000000..9cea6413b6 --- /dev/null +++ b/doc/sphinx/source/quickstart/configuration.rst @@ -0,0 +1,29 @@ +.. _config: + +************* +Configuration +************* + +The ``esmvaltool`` command is provided by the ESMValCore package, the +documentation on configuring ESMValCore can be found +:ref:`here `. +An overview of all configuration options can be found +:ref:`here `. +In particular, it is recommended to read the section on how to :ref:`specify +configuration options ` and the section on +:ref:`Finding data `. + +To install the default configuration in the default location, run + + .. code:: bash + + esmvaltool config get_config_user + +Note that this needs to be customized using the instructions above, so +the ``esmvaltool`` command can find the data on your system, before it can run +a recipe. + +There is a lesson available in the +`ESMValTool tutorial `_ +that describes how to personalize the configuration. It can be found +`at this site `_. diff --git a/doc/sphinx/source/quickstart/index.rst b/doc/sphinx/source/quickstart/index.rst new file mode 100644 index 0000000000..f0ff76febc --- /dev/null +++ b/doc/sphinx/source/quickstart/index.rst @@ -0,0 +1,10 @@ +Getting started +*************** + +.. toctree:: + :maxdepth: 1 + + Installation + Configuration + Running + Output diff --git a/doc/sphinx/source/quickstart/installation.rst b/doc/sphinx/source/quickstart/installation.rst new file mode 100644 index 0000000000..9f66c1f670 --- /dev/null +++ b/doc/sphinx/source/quickstart/installation.rst @@ -0,0 +1,768 @@ +.. _install: + +************ +Installation +************ + +.. note:: + ESMValTool now uses `mamba` instead of `conda` for the recommended installation. + For more information about the change, have a look at :ref:`Move to Mamba`. + +ESMValTool supports Python 3.10 and later and requires Linux or MacOS. +Successful usage on Windows has been reported by following the Linux +installation instructions with +`WSL `__. + +ESMValTool can be installed in multiple ways. + +Recommended installation method: + +Install the :ref:`mamba package manager ` and then follow +the instructions for + +* :ref:`ESMValTool installation on Linux` +* :ref:`ESMValTool installation on MacOS`. + +Further options for installation are: + + +* :ref:`From the source code` available at https://github.com/ESMValGroup/ESMValTool; +* :ref:`From pre-installed versions on HPC clusters`; +* :ref:`Deployment through a Docker container` (see https://www.docker.com); +* :ref:`Deployment through a Singularity container` (see https://sylabs.io/guides/latest/user-guide/); +* :ref:`Installation with pip ` (see https://pypi.org); +* :ref:`installation_from_the_conda_lock_file`. + +The next sections will detail the procedure to install ESMValTool through each +of these methods. + +There is also a lesson available in the +`ESMValTool tutorial `_ +that describes the installation of the ESMValTool in more detail. +It can be found +`here `_. + +See `common installation issues`_ if you run into trouble. + +.. _install_with_mamba: + +Mamba/Conda installation +======================== + +In order to install ESMValTool and its dependencies from +`conda-forge `__, you will first need to install the +`mamba package manager `__. +We recommend using `mamba `__ as a package manager +for your conda environments instead of +`conda `__ because it is +much faster, see `move-to-mamba`_ for more information. + +For a minimal mamba installation (recommended) go to +https://mamba.readthedocs.io/en/latest/installation.html. + +.. note:: + It is recommended that you always use the latest version of mamba, as + problems have been reported when trying to use older versions. + +.. note:: + Some systems provide a pre-installed version of conda or mamba (e.g. via + the module environment). + However, several users reported problems when installing with such versions. + It is therefore preferable to use a local, fully user-controlled mamba + installation. + +First download the installation file for +`Linux `_ +or +`MacOSX `_. +After downloading the installation file from one of the links above, execute it +by running (Linux example): + +.. code-block:: bash + + bash Miniforge3-Linux-x86_64.sh + +and follow the instructions on your screen. + +.. note:: + Make sure to choose an installation location where you have at least 10 GB + of disk space available. + +During installation, mamba will ask you if you want ``mamba`` to be +automatically loaded from your ``.bashrc`` or ``.bash-profile`` files. +It is recommended that you answer yes. +If you answered no, you can load the correct paths and environment variables +later by running: + +.. code-block:: bash + + source /etc/profile.d/conda.sh + +where ```` is the installation location of mamba (e.g. +``/home/$USER/miniforge3`` if you chose the default installation path). + +If you use another shell than Bash, have a look at the available configurations +in the ``/etc/profile.d`` directory. + +You can check that mamba installed correctly by running + +.. code-block:: bash + + which mamba + +this should show the path to your mamba executable, e.g. +``~/miniforge3/bin/mamba``. + +It is recommended to update both mamba and conda after installing: + +.. code-block:: bash + + mamba update --name base mamba conda + +.. _install_on_linux: + +ESMValTool installation on Linux +-------------------------------- + +Once you have installed the mamba package manager, you can install +the entire ESMValTool package by running: + +.. code-block:: bash + + mamba create --name esmvaltool esmvaltool + +It is also possible to install just a subset of the ESMValTool dependencies +by installing one or more of the :ref:`subpackages ` +described in the next section. + +The command above will create a new +`conda environment `_ +called ``esmvaltool``, and install ESMValTool in it. +Of course it is also possible to choose a different name than ``esmvaltool`` +for the environment. + +.. note:: + + Creating a new conda environment is often much faster and more reliable than + trying to update an existing conda environment. + Therefore it is recommended that you create a new environment when you + want to upgrade to the latest version. + +The next step is to check that the installation works properly. + +First activate the environment with the command: + +.. code-block:: bash + + conda activate esmvaltool + +and then run the tool with the command: + +.. code-block:: bash + + esmvaltool --help + +If everything was installed properly, ESMValTool should have printed a help +message to the console. + + +.. _conda subpackages: + +Installation of subpackages +--------------------------- + +The diagnostics bundled in ESMValTool are scripts in four different programming +languages: Python, NCL, R, and Julia. + +There are three language specific packages available: + +* ``esmvaltool-ncl`` +* ``esmvaltool-python`` +* ``esmvaltool-r`` + +The main ``esmvaltool`` package contains all three subpackages listed above. +For the Julia dependencies, there is no subpackage yet, but there are special +:ref:`installation instructions `. +If you only need to run a recipe with diagnostics in some of these languages, it +is possible to install only the dependencies needed to do just that. +The diagnostic script(s) used in each recipe, are documented in :ref:`recipes`. +The extension of the diagnostic script can be used to see in which language a +diagnostic script is written (``.py`` for Python, ``.ncl`` for NCL, ``.R`` for R, and ``.jl`` for Julia diagnostics). + +To install support for diagnostics written in Python and NCL into an existing +environment, run + +.. code-block:: bash + + mamba install esmvaltool-python esmvaltool-ncl + +Some of the CMORization scripts are written in Python, while others are written +in NCL. Therefore, both ``esmvaltool-python`` and ``esmvaltool-ncl`` need to be +installed in order to be able to run all CMORization scripts. + +Note that the ESMValTool source code is contained in the ``esmvaltool-python`` +package, so this package will always be installed as a dependency if you install +one or more of the packages for other languages. + +.. _install_julia_dependencies: + +Installation of Julia dependencies +---------------------------------- + +If you want to use the ESMValTool Julia functionality, you will also need to +install Julia. If you are just getting started, we suggest that you +come back to this step later when, and if you need it. +To perform the Julia installation, make sure that your conda +environment is activated and then execute + +.. code-block:: bash + + mamba install julia + esmvaltool install Julia +.. _install_on_macosx: + +ESMValTool installation on MacOS +--------------------------------- + +The Python diagnostics of the ESMValTool are supported on MacOS, but Julia, +NCL, and R are not. +If any of these are needed, deployment through a +:ref:`Docker` +container is advised. + +The ``esmvaltool-python`` diagnostics can be installed as follows: + +First, ensure mamba is installed (see install_with_mamba_ for more details). + +Create a new environment with the ``esmvaltool-python`` package: + +.. code-block:: bash + + mamba create --name esmvaltool esmvaltool-python + +Activate the new environment: + +.. code-block:: bash + + conda activate esmvaltool + +Confirm that the ESMValTool is working with: + +.. code-block:: bash + + esmvaltool --help + +Note that some recipes may depend on the OpenMP library, which does not +install via mamba on MacOS. To install this library, run: + +.. code-block:: bash + + brew install libomp + +to install the library with Homebrew. In case you do not have Homebrew, follow +installation instructions `here `__. + +.. _install_from_source: + +Install from source +=================== + +Installing the tool from source is recommended if you need the very latest +features or if you would like to contribute to its development. + +*Obtaining the source code* + +The ESMValTool source code is available on a public GitHub repository: +https://github.com/ESMValGroup/ESMValTool + +The easiest way to obtain it is to clone the repository using git +(see https://git-scm.com/). To clone the public repository: + +.. code-block:: bash + + git clone https://github.com/ESMValGroup/ESMValTool + +or + +.. code-block:: bash + + git clone git@github.com:ESMValGroup/ESMValTool + +if you prefer to connect to the repository over SSH. + +The command above will create a folder called ``ESMValTool`` +containing the source code of the tool in the current working directory. + +.. note:: + Using SSH is much more convenient if you push to the repository regularly + (recommended to back up your work), because then you do not need to type + your password over and over again. + See + `this guide `__ + for information on how to set it up if you have not done so yet. + If you are developing ESMValTool on a shared compute cluster, you can set up + `SSH agent forwarding `__ + to use your local SSH keys also from the remote machine. + +It is also possible to work in one of the ESMValTool private repositories, e.g.: + +.. code-block:: bash + + git clone https://github.com/ESMValGroup/ESMValTool-private + +GitHub also allows one to download the source code in as a ``tar.gz`` or ``zip`` +file. +If you choose to use this option, download the compressed file and extract its +contents at the desired location. + +*Install dependencies* + +It is recommended to use mamba to manage ESMValTool dependencies. +See the :ref:`mamba installation instructions ` at the top +of this page for instructions on installing mamba. +To simplify the installation process, an environment definition file is provided +in the repository (``environment.yml`` in the root folder). + +The ESMValTool conda environment file can also be used as a requirements list +for those cases in which a mamba installation is not possible or advisable. +From now on, we will assume that the installation is going to be done through +mamba. + +Ideally, you should create a separate conda environment for ESMValTool, so it is +independent from any other Python tools present in the system. + +To create an environment, go to the directory containing the ESMValTool source +code that you just downloaded. It is called ``ESMValTool`` if you did not +choose a different name. + +.. code-block:: bash + + cd ESMValTool + +and create a new environment called ``esmvaltool`` with the command (when on +Linux): + +.. code-block:: bash + + mamba env create --name esmvaltool --file environment.yml + +or (when on MacOS) + +.. code-block:: bash + + mamba env create --name esmvaltool --file environment_osx.yml + +This will install all of the required development dependencies. +Note that the MacOS environment file contains only Python dependencies, +so you will not be able to run NCL, R, or Julia diagnostics with it. + +.. note:: + The environment is called ``esmvaltool`` in the example above, but it is + possible to use the option ``--name some_environment_name`` to define a + different name. + This can be useful when you have an older ESMValTool installation that you + would like to keep. + It is recommended that you create a new environment when updating ESMValTool. + +Next, activate the environment by using the command: + +.. code-block:: bash + + conda activate esmvaltool + +.. attention:: + From now on, we assume that the conda environment containing the + development dependencies for ESMValTool is activated. + +*Install ESMValTool* + +Once all dependencies have been installed, ESMValTool itself can be installed by +running the following command in the directory containing the ESMValTool source +code (called ``ESMValTool`` if you did not choose a different name): + +.. code-block:: bash + + pip install --editable '.[develop]' + +Using the ``--editable`` flag will cause the installer to create a symbolic link +from the installation location to your source code, so any changes you make to +the source code will immediately be available in the installed version of the +tool. + +If you would like to run Julia diagnostic scripts, you will need to +install the ESMValTool Julia dependencies: + +.. code-block:: bash + + esmvaltool install Julia + +The next step is to check that the installation works properly. +To do this, run the tool with: + +.. code-block:: bash + + esmvaltool --help + +If everything was installed properly, ESMValTool should have printed a +help message to the console. + +.. note:: + **MacOS users:** some recipes may depend on the OpenMP library, which does not + install via mamba on MacOS. Instead run + + .. code-block:: bash + + brew install libomp + + to install the library with Homebrew. In case you do not have Homebrew, follow + installation instructions `here `__. + +For a more complete installation verification, run the automated tests and +confirm that no errors are reported: + +.. code-block:: bash + + pytest -m "not installation" + +or if you want to run the full test suite remove the ``-m "not installation"`` flag; +also if you want to run the tests on multiple threads, making the run faster, use +the `-n N` flag where N is the number of available threads e.g: + +.. code-block:: bash + + pytest -n 4 + +This concludes the installation from source guide. However, if you would like +to do development work on ESMValCore, please read on. + +.. _esmvalcore-development-installation: + +Using the development version of the ESMValCore package +------------------------------------------------------- + +If you need the latest developments of the ESMValCore package, you +can install it from source into the same conda environment. + +.. attention:: + The recipes and diagnostics in the ESMValTool repository are compatible + with the latest released version of the ESMValCore. + Using the development version of the ESMValCore package is only recommended + if you are planning to develop new features for the ESMValCore, e.g. + you want to implement a new preprocessor function. + +First follow the steps in the section above to +:ref:`install ESMValTool from source `. +Next, go to the place where you would like to keep the source code and clone the +ESMValCore github repository: + +.. code-block:: bash + + git clone https://github.com/ESMValGroup/ESMValCore + +or + +.. code-block:: bash + + git clone git@github.com:ESMValGroup/ESMValCore + +The command above will create a folder called ``ESMValCore`` +containing the source code of the tool in the current working directory. + +Go into the folder you just downloaded + +.. code-block:: bash + + cd ESMValCore + +and then install ESMValCore in development mode + +.. code-block:: bash + + pip install --editable '.[develop]' + +To check that the installation was successful, run + +.. code-block:: bash + + python -c 'import esmvalcore; print(esmvalcore.__path__[0])' + +this should show the directory of the source code that you just downloaded. + +If the command above shows a directory inside your conda environment instead, +e.g. ``~/miniforge3/envs/esmvaltool/lib/python3.11/site-packages/esmvalcore``, +you may need to manually remove that directory and run +``pip install --editable '.[develop]'`` again. + +.. _install_on_hpc: + +Pre-installed versions on HPC clusters / other servers +====================================================== + +ESMValTool is available on the HPC clusters CEDA-JASMIN and DKRZ-Levante, and on the Met Office Linux +estate, so there is no need to install ESMValTool if you are just running recipes: + + - CEDA-JASMIN: `esmvaltool` is available on the scientific compute nodes (`sciX.jasmin.ac.uk` where + `X = 1, 2, 3, 4, 5`) after login and module loading via `module load esmvaltool`; see the helper page at + `CEDA `__ . + - DKRZ-Levante: `esmvaltool` is available on login nodes (`levante.dkrz.de`) after login and module loading + via `module load esmvaltool`; the command `module help esmvaltool` provides some information about the module. + A Jupyter kernel based on the latest module is available from `DKRZ-JupyterHub `__. + - Met Office: `esmvaltool` is available on the Linux estate after login and module loading via `module load`; + see the ESMValTool Community of Practice SharePoint site for more details. + +The ESMValTool Tutorial provides a `quickstart guide `__ +that is particularly suited for new users that have an access to pre-installed version of ESMValTool. + +Information on how to request an account at CEDA-JASMIN and DKRZ-Levante and to get started with these HPC clusters +can be found on the setup page of the tutorial `here `__. + +.. _install_with_docker: + +Docker installation +=================== + +ESMValTool is also provided through `DockerHub `_ +in the form of docker containers. +See https://docs.docker.com for more information about docker containers and how to +run them. + +You can get the latest release with + +.. code-block:: bash + + docker pull esmvalgroup/esmvaltool:stable + +If you want to use the current main branch, use + +.. code-block:: bash + + docker pull esmvalgroup/esmvaltool:latest + +To run a container using those images, use: + +.. code-block:: bash + + docker run esmvalgroup/esmvaltool:stable --help + +Note that the container does not see the data or environmental variables +available in the host by default. You can make data available with +``-v /path:/path/in/container`` and environmental variables with ``-e VARNAME``. + +For example, the following command would run a recipe + +.. code-block:: bash + + docker run -e HOME -v "$HOME":"$HOME" -v /data:/data esmvalgroup/esmvaltool:stable run examples/recipe_python.yml + +with the environmental variable ``$HOME`` available inside the container and +the data in the directories ``$HOME`` and ``/data``, so these can be used to +find the configuration file, recipe, and data. + +It might be useful to define a `bash alias +`_ +or script to abbreviate the above command, for example + +.. code-block:: bash + + alias esmvaltool="docker run -e HOME -v $HOME:$HOME -v /data:/data esmvalgroup/esmvaltool:stable" + +would allow using the ``esmvaltool`` command without even noticing that the +tool is running inside a Docker container. + +.. _install_with_singularity: + +Singularity installation +======================== + +Docker is usually forbidden in clusters due to security reasons. However, +there is a more secure alternative to run containers that is usually available +on them: `Singularity `_. + +Singularity can use docker containers directly from DockerHub with the +following command + +.. code-block:: bash + + singularity run docker://esmvalgroup/esmvaltool:stable run examples/recipe_python.yml + +Note that the container does not see the data available in the host by default. +You can make host data available with ``-B /path:/path/in/container``. + +It might be useful to define a `bash alias +`_ +or script to abbreviate the above command, for example + +.. code-block:: bash + + alias esmvaltool="singularity run -B $HOME:$HOME -B /data:/data docker://esmvalgroup/esmvaltool:stable" + +would allow using the ``esmvaltool`` command without even noticing that the +tool is running inside a Singularity container. + +Some clusters may not allow to connect to external services, in those cases +you can first create a singularity image locally: + +.. code-block:: bash + + singularity build esmvaltool.sif docker://esmvalgroup/esmvaltool:stable + +and then upload the image file ``esmvaltool.sif`` to the cluster. +To run the container using the image file ``esmvaltool.sif`` use: + +.. code-block:: bash + + singularity run esmvaltool.sif run examples/recipe_python.yml + +.. _install_with_pip: + +Pip installation +================ + +It is also possible to install ESMValTool from +`PyPI `_. +However, this requires first installing dependencies that are not available +on PyPI in some other way. +The list of required dependencies can be found in +:download:`environment.yml <../../../../environment.yml>`. + +.. warning:: + + It is recommended to use the installation with mamba instead, as it may not + be easy to install the correct versions of all dependencies. + +After installing the dependencies that are not available from PyPI_, install +ESMValTool and any remaining Python dependencies with the command: + +.. code-block:: bash + + pip install esmvaltool + +If you would like to run Julia diagnostic scripts, you will also need to +install the Julia dependencies: + +.. code-block:: bash + + esmvaltool install Julia + +.. _installation_from_the_conda_lock_file: + +Installation from the conda lock file +===================================== + +The conda lock file is an alternative to the ``environment.yml`` file used in +the :ref:`installation from source instructions `. +All other steps in those installation instructions are the same. + +The conda lock file can be used to install the dependencies of ESMValTool +whenever the conda environment defined by ``environment.yml`` can not be solved +for some reason. +A conda lock file is a reproducible environment file that contains links to +dependency packages as they are hosted on the Anaconda cloud; +these have frozen version numbers, build hashes, and channel names. +These parameters are established at the time of the conda lock file creation, so +may be outdated after a while. +Therefore, we regenerate these lock files every 10 days through automatic +Pull Requests (or more frequently, since the automatic generator runs on merges +on the ``main`` branch too), to minimize the risk of dependencies becoming +outdated. + +Conda environment creation from a lock file is done with the following command: + +.. code-block:: bash + + conda create --name esmvaltool --file conda-linux-64.lock + +The latest, most up-to-date file can always be downloaded directly from the source code +repository, a direct download link can be found `here `__. + +.. note:: + For instructions on how to manually create the lock file, see + :ref:`these instructions `. + +.. _common installation issues: + +Common installation problems and their solutions +================================================ + +Problems with proxies +--------------------- +If you are installing ESMValTool from source from behind a proxy that does not +trust the usual PyPI URLs you can declare them with the option +``--trusted-host``, e.g. + +.. code-block:: bash + + pip install --trusted-host=pypi.python.org --trusted-host=pypi.org --trusted-host=files.pythonhosted.org -e .[develop] + +If R packages fail to download, you might be able to solve this by +setting the environment variable ``http_proxy`` to the correct value, e.g. +in bash: + +.. code-block:: bash + + export http_proxy=http://user:pass@proxy_server:port + +the username and password can be omitted if they are not required. See e.g. +`here `__ +for more information. + +Anaconda servers connection issues +---------------------------------- +HTTP connection errors (of e.g. type 404) to the Anaconda servers are rather common, and usually a retry +will solve the problem. + +Installation of R packages fails +-------------------------------- +Problems have been reported if the ``R`` interpreter was made available +through the ``module load`` command in addition to installation from mamba. +If your ESMValTool conda environment is called ``esmvaltool`` and you want to +use the R interpreter installed from mamba, the path to the R interpreter should +end with ``mamba/envs/esmvaltool/bin/R`` or ``conda/envs/esmvaltool/bin/R``. +When the conda environment for ESMValTool is activated, you can check which R +interpreter is used by running + +.. code-block:: bash + + which R + +The Modules package is often used by system administrators to make software +available to users of scientific compute clusters. +To list any currently loaded modules run ``module list``, run ``module help`` +or ``man module`` for more information about the Modules package. + +Problems when using ssh +----------------------- +If you log in to a cluster or other device via SSH and your origin +machine sends the ``locale`` environment via the SSH connection, +make sure the environment is set correctly, specifically ``LANG`` and +``LC_ALL`` are set correctly (for GB English UTF-8 encoding these +variables must be set to ``en_GB.UTF-8``; you can set them by adding +``export LANG=en_GB.UTF-8`` and ``export LC_ALL=en_GB.UTF-8``) in your +origin or login machines’ ``.profile``. + +Problems when updating the conda environment +-------------------------------------------- +Usually mamba is much better at solving new environments than updating older +environments, so it is often a good idea to create a new environment if updating +does not work. + +Do not run ``mamba update --update-all`` in the ``esmvaltool`` +environment since that will update some packages that are pinned to +specific versions for the correct functionality of the tool. + + +.. _move-to-mamba: + +Move to Mamba +============= + +Mamba is a much faster alternative to `conda`, and environment creation and updating +benefits from the use of a much faster (C++ backend) dependency solver; tests have been performed +to verify the integrity of the `esmvaltool` environment built with `mamba`, and we are +now confident that the change will not affect the way ESMValTool is installed and run, whether it be on a Linux or OS platform. +From the user's perspective, it is a straightforward use change: the CLI (command line +interface) of `mamba` is identical to `conda`: any command that was run with `conda` before +will now be run with `mamba` instead, keeping all the other command line arguments and +flags as they were before. The only place where `conda` should not be replaced with `mamba` +at command line level is at the environment activation point: `conda activate` will still +have to be used. diff --git a/doc/sphinx/source/quickstart/output.rst b/doc/sphinx/source/quickstart/output.rst new file mode 100644 index 0000000000..33836f1c9a --- /dev/null +++ b/doc/sphinx/source/quickstart/output.rst @@ -0,0 +1,154 @@ +.. _outputdata: + +****** +Output +****** + +ESMValTool automatically generates a new output directory with every run. The +location is determined by the :ref:`configuration option +` ``output_dir``, the recipe name, and the date and +time, using the the format: YYYYMMDD_HHMMSS. + +For instance, a typical output location would be: +output_directory/recipe_ocean_amoc_20190118_1027/ + +This is effectively produced by the combination: +output_dir/recipe_name_YYYYMMDD_HHMMSS/ + + +This directory will contain 4 further subdirectories: + +1. `Diagnostic output`_ (work): A place for any diagnostic script results that are not plots, e.g. files in NetCDF format (depends on the diagnostics). + +2. `Plots`_: The location for all the plots, split by individual diagnostics and fields. + +3. `Run`_: This directory includes all log files, a copy of the recipe, a summary of the resource usage, and the `settings.yml`_ interface files and temporary files created by the diagnostic scripts. + +4. `Preprocessed datasets`_ (preproc): This directory contains all the preprocessed netcdfs data and the `metadata.yml`_ interface files. Note that by default this directory will be deleted after each run, because most users will only need the results from the diagnostic scripts. + + +Preprocessed datasets +===================== + +The preprocessed datasets will be stored to the preproc/ directory. +Each variable in each diagnostic will have its own the `metadata.yml`_ +interface files saved in the preproc directory. + +If the :ref:`configuration option ` +``save_intermediary_cubes`` is set to ``true`` , then the intermediary cubes +will also be saved here. +This option is set to ``false`` by default. + +If the :ref:`configuration option ` +``remove_preproc_dir`` is set to ``true`` , then the preproc directory will be +deleted after the run completes. +This option is set to ``true`` by default. + + +Run +=== + +The log files in the run directory are automatically generated by ESMValTool +and create a record of the output messages produced by ESMValTool and they are +saved in the run directory. They can be helpful for debugging or monitoring the +job, but also allow a record of the job output to screen after the job has been +completed. + +The run directory will also contain a copy of the recipe and the +`settings.yml`_ file, described below. +The run directory is also where the diagnostics are executed, and may also +contain several temporary files while diagnostics are running. + +Diagnostic output +================= + +The work/ directory will contain all files that are output at the diagnostic +stage. Ie, the model data is preprocessed by ESMValTool and stored in the +preproc/ directory. These files are opened by the diagnostic script, then some +processing is applied. Once the diagnostic level processing has been applied, +the results should be saved to the work directory. + + +Plots +===== + +The plots directory is where diagnostics save their output figures. These +plots are saved in the format requested by the :ref:`configuration option +` ``output_file_type``. + + +Settings.yml +============ + +The settings.yml file is automatically generated by ESMValCore. For each diagnostic, +a unique settings.yml file will be produced. + +The settings.yml file passes several global level keys to diagnostic scripts. +This includes several flags from the configuration (such as +``write_netcdf``, ``write_plots``, etc...), several paths which are specific to +the diagnostic being run (such as ``plot_dir`` and ``run_dir``) and the +location on disk of the metadata.yml file (described below). + +.. code-block:: yaml + + input_files:[[...]recipe_ocean_bgc_20190118_134855/preproc/diag_timeseries_scalars/mfo/metadata.yml] + log_level: debug + output_file_type: png + plot_dir: [...]recipe_ocean_bgc_20190118_134855/plots/diag_timeseries_scalars/Scalar_timeseries + profile_diagnostic: false + recipe: recipe_ocean_bgc.yml + run_dir: [...]recipe_ocean_bgc_20190118_134855/run/diag_timeseries_scalars/Scalar_timeseries + script: Scalar_timeseries + version: 2.0a1 + work_dir: [...]recipe_ocean_bgc_20190118_134855/work/diag_timeseries_scalars/Scalar_timeseries + write_netcdf: true + write_plots: true + +The first item in the settings file will be a list of `Metadata.yml`_ files. +There is a metadata.yml file generated for each field in each diagnostic. + + +Metadata.yml +============ + +The metadata.yml files is automatically generated by ESMValTool. Along with the +settings.yml file, it passes all the paths, boolean flags, and additional +arguments that your diagnostic needs to know in order to run. + +The metadata is loaded from cfg as a dictionary object in python diagnostics. + +Here is an example metadata.yml file: + +.. code-block:: yaml + + ? + [...]/recipe_ocean_bgc_20190118_134855/preproc/diag_timeseries_scalars/mfo/CMIP5_HadGEM2-ES_Omon_historical_r1i1p1_TO0M_mfo_2002-2004.nc + : cmor_table: CMIP5 + dataset: HadGEM2-ES + diagnostic: diag_timeseries_scalars + end_year: 2004 + ensemble: r1i1p1 + exp: historical + field: TO0M + filename: [...]recipe_ocean_bgc_20190118_134855/preproc/diag_timeseries_scalars/mfo/CMIP5_HadGEM2-ES_Omon_historical_r1i1p1_TO0M_mfo_2002-2004.nc + frequency: mon + institute: [INPE, MOHC] + long_name: Sea Water Transport + mip: Omon + modeling_realm: [ocean] + preprocessor: prep_timeseries_scalar + project: CMIP5 + recipe_dataset_index: 0 + short_name: mfo + standard_name: sea_water_transport_across_line + start_year: 2002 + units: kg s-1 + variable_group: mfo + + +As you can see, this is effectively a dictionary with several items including +data paths, metadata and other information. + +There are several tools available in python which are built to read and parse +these files. The tools are available in the shared directory in the diagnostics +directory. diff --git a/doc/sphinx/source/quickstart/running.rst b/doc/sphinx/source/quickstart/running.rst new file mode 100644 index 0000000000..20cb8620b0 --- /dev/null +++ b/doc/sphinx/source/quickstart/running.rst @@ -0,0 +1,111 @@ +.. _running: + +******* +Running +******* + +ESMValTool is mostly used as a command line tool. +Whenever your Conda environment for ESMValTool is active, you can run the +command ``esmvaltool``. +See :ref:`running esmvaltool ` in the ESMValCore +documentation for an introduction to the ``esmvaltool`` command. + +Running your first recipe +========================= + +There is a step-by-step tutorial available in the +`ESMValTool tutorial `_ +on how to run your first recipe. It can be found +`here `_. + +An +`example recipe `_ +is available in the ESMValTool installation folder as +``examples/recipe_python.yml``. + +This recipe finds data from BCC-ESM1 and CanESM2 and creates two plot types: + +- a global map plot that shows the monthly mean 2m surface air temperature in + January 2000. +- a time series plot that shows the globally averaged annual mean 2m surface + air temperature and compares it to the one in Amsterdam. + +To run this recipe and automatically download the required climate data +from ESGF to the local directory ``~/climate_data``, run + +.. code:: bash + + esmvaltool run examples/recipe_python.yml --search_esgf=when_missing + +The ``--search_esgf=when_missing`` option tells ESMValTool to search for and +download the necessary climate data files, if they cannot be found locally. +The data only needs to be downloaded once, every following run will reuse +previously downloaded data. +If you have all required data available locally, you can run the tool with +``--search_esgf=never`` argument (the default). +Note that in that case the required data should be located in the directories +specified in the configuration (see :ref:`esmvalcore:config_option_rootpath`). +A third option ``--search_esgf=always`` is available. +With this option, the tool will first check the ESGF for the needed data, +regardless of any local data availability; if the data found on ESGF is newer +than the local data (if any) or the user specifies a version of the data that +is available only from the ESGF, then that data will be downloaded; otherwise, +local data will be used. +Recall that the chapter on :ref:`configuring ESMValTool ` +provides an explanation of how to set up the configuration. + +See :ref:`running esmvaltool ` in the ESMValCore +documentation for a more complete introduction to the ``esmvaltool`` command. + +.. _recipes_command: + +Available diagnostics and metrics +================================= + +Although ESMValTool can be used to download data, analyze it using ESMValCore's +preprocessing modules, and the creation of your own analysis code, its main purpose is the +continuously growing set of diagnostics and metrics that it directly provides to +the user. These metrics and diagnostics are provided as a set of preconfigured +recipes that users can run or customize for their own analysis. +The latest list of available recipes can be found :ref:`here `. + +In order to make the management of these installed recipes easier, ESMValTool +provides the ``recipes`` command group with utilities that help the users in +discovering and customizing the provided recipes. + +The first command in this group allows users to get the complete list of installed +recipes printed to the console: + +.. code:: bash + + esmvaltool recipes list + +If the user then wants to explore any one of these recipes, they can be printed +using the following command + +.. code:: bash + + esmvaltool recipes show recipe_name.yml + +Note that there is no ``recipe_name.yml`` shipped with ESMValTool, replace +this with a recipes that is available, for example +`examples/recipe_python.yml `_. +Finally, to get a local copy that can then be customized and run, users can +run the following command + +.. code:: bash + + esmvaltool recipes get recipe_name.yml + +Note that the ``esmvaltool run recipe_name.yml`` command will first look if +``recipe_name.yml`` is the path to an existing file. +If this is the case, it will run that recipe. +If not, it will look if it is a relative path to an existing recipe with respect to the +`recipes `__ +directory in your ESMValTool installation and run that. + +Running multiple recipes +======================== + +Have a look at :ref:`running_multiple_recipes` if you are interested in running multiple +recipes in parallel. diff --git a/doc/sphinx/source/recipes/broken_recipe_list.rst b/doc/sphinx/source/recipes/broken_recipe_list.rst new file mode 100644 index 0000000000..f2c25623ac --- /dev/null +++ b/doc/sphinx/source/recipes/broken_recipe_list.rst @@ -0,0 +1,44 @@ +.. _broken-recipe-list: + +Broken recipe list +================== + +This table gives an overview of the recipes that are known to have issues. +The table is always valid for the latest stable release of ESMValTool. +More details can be found in the :ref:`broken recipe policy +`. + +.. list-table:: Broken recipes + :widths: 25 25 25 25 25 + :header-rows: 1 + + * - Broken recipe + - Affected diagnostics + - Broken since release + - Problem + - GitHub issue + * - :ref:`recipe_julia.yml ` + - `example` + - v2.5.0 + - Fill values are not interpreted, resulting in an unusable plot + - `#2595 `_ + * - :ref:`recipe_climwip_brunner2019_med.yml ` + - All (preprocessor issue) + - v2.11.0 + - Failed to run preprocessor function ``fix_metadata`` on the data: Unable to convert units + - `#3694 `_ + * - :ref:`recipe_ocean_amoc.yml ` + - ``diag_timeseries_amoc``, ``diag_transects`` + - v2.11.0 + - CESM1 CMIP5 Omon data no longer available + - `#3693 `_ + * - :ref:`recipe_russell18jgr.yml ` + - ``Figure_4`` + - v2.11.0 + - CESM1 CMIP5 Omon data no longer available + - `#3693 `_ + * - :ref:`recipe_wenzel14jgr.yml ` + - ``diag_tsline_Fig2d`` + - v2.11.0 + - CESM1 CMIP5 Omon data no longer available + - `#3693 `_ diff --git a/doc/sphinx/source/recipes/figures/Combined_Indices_Area_Average/Nino3.4_tos_Dec-Feb_running-mean__1950-2005.png b/doc/sphinx/source/recipes/figures/Combined_Indices_Area_Average/Nino3.4_tos_Dec-Feb_running-mean__1950-2005.png new file mode 100644 index 0000000000..7a96af3b80 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/Combined_Indices_Area_Average/Nino3.4_tos_Dec-Feb_running-mean__1950-2005.png differ diff --git a/doc/sphinx/source/recipes/figures/albedolandcover/MPI-ESM-LR_albedo_change_from_tree_to_crop-grass.png b/doc/sphinx/source/recipes/figures/albedolandcover/MPI-ESM-LR_albedo_change_from_tree_to_crop-grass.png new file mode 100644 index 0000000000..8ae7fbed68 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/albedolandcover/MPI-ESM-LR_albedo_change_from_tree_to_crop-grass.png differ diff --git a/doc/sphinx/source/recipes/figures/anav13jclim/cSoil-cVeg_scatter_global.png b/doc/sphinx/source/recipes/figures/anav13jclim/cSoil-cVeg_scatter_global.png new file mode 100644 index 0000000000..a8f97803c3 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/anav13jclim/cSoil-cVeg_scatter_global.png differ diff --git a/doc/sphinx/source/recipes/figures/anav13jclim/diag_grading_pr-global_to_diag_grading_gpp-global_RMSD.png b/doc/sphinx/source/recipes/figures/anav13jclim/diag_grading_pr-global_to_diag_grading_gpp-global_RMSD.png new file mode 100644 index 0000000000..6610ac5002 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/anav13jclim/diag_grading_pr-global_to_diag_grading_gpp-global_RMSD.png differ diff --git a/doc/sphinx/source/recipes/figures/anav13jclim/gpp_cycle_nh.png b/doc/sphinx/source/recipes/figures/anav13jclim/gpp_cycle_nh.png new file mode 100644 index 0000000000..638d84342d Binary files /dev/null and b/doc/sphinx/source/recipes/figures/anav13jclim/gpp_cycle_nh.png differ diff --git a/doc/sphinx/source/recipes/figures/anav13jclim/gpp_errorbar_trop.png b/doc/sphinx/source/recipes/figures/anav13jclim/gpp_errorbar_trop.png new file mode 100644 index 0000000000..dbe11e4cb3 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/anav13jclim/gpp_errorbar_trop.png differ diff --git a/doc/sphinx/source/recipes/figures/anav13jclim/nbp_evolution_global.png b/doc/sphinx/source/recipes/figures/anav13jclim/nbp_evolution_global.png new file mode 100644 index 0000000000..5b9595dd9a Binary files /dev/null and b/doc/sphinx/source/recipes/figures/anav13jclim/nbp_evolution_global.png differ diff --git a/doc/sphinx/source/recipes/figures/anav13jclim/tas_global.png b/doc/sphinx/source/recipes/figures/anav13jclim/tas_global.png new file mode 100644 index 0000000000..9ef73d8c7d Binary files /dev/null and b/doc/sphinx/source/recipes/figures/anav13jclim/tas_global.png differ diff --git a/doc/sphinx/source/recipes/figures/anav13jclim/tos_scatter_global.png b/doc/sphinx/source/recipes/figures/anav13jclim/tos_scatter_global.png new file mode 100644 index 0000000000..da0629fe2d Binary files /dev/null and b/doc/sphinx/source/recipes/figures/anav13jclim/tos_scatter_global.png differ diff --git a/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1988_2008_DJF.png b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1988_2008_DJF.png new file mode 100644 index 0000000000..ccf5c4b1c8 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1988_2008_DJF.png differ diff --git a/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1988_2008_JJA.png b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1988_2008_JJA.png new file mode 100644 index 0000000000..38b09c52f9 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1988_2008_JJA.png differ diff --git a/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1988_2008_MAM.png b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1988_2008_MAM.png new file mode 100644 index 0000000000..5e7ca417a1 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1988_2008_MAM.png differ diff --git a/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1988_2008_SON.png b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1988_2008_SON.png new file mode 100644 index 0000000000..ffb666eea2 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1988_2008_SON.png differ diff --git a/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1988_2008_scatter.png b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1988_2008_scatter.png new file mode 100644 index 0000000000..67c64a3fa7 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1988_2008_scatter.png differ diff --git a/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_DJF.png b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_DJF.png new file mode 100644 index 0000000000..f653410e07 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_DJF.png differ diff --git a/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_JJA.png b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_JJA.png new file mode 100644 index 0000000000..6474acc856 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_JJA.png differ diff --git a/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_MAM.png b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_MAM.png new file mode 100644 index 0000000000..2e2fda4cca Binary files /dev/null and b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_MAM.png differ diff --git a/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_SON.png b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_SON.png new file mode 100644 index 0000000000..ce4d3fac08 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_SON.png differ diff --git a/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_scatter.png b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_scatter.png new file mode 100644 index 0000000000..9226bca81a Binary files /dev/null and b/doc/sphinx/source/recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_scatter.png differ diff --git a/doc/sphinx/source/recipes/figures/arctic_ocean/aw_depth.png b/doc/sphinx/source/recipes/figures/arctic_ocean/aw_depth.png new file mode 100644 index 0000000000..1190dff2bb Binary files /dev/null and b/doc/sphinx/source/recipes/figures/arctic_ocean/aw_depth.png differ diff --git a/doc/sphinx/source/recipes/figures/arctic_ocean/aw_temp.png b/doc/sphinx/source/recipes/figures/arctic_ocean/aw_temp.png new file mode 100644 index 0000000000..55c2b05d9b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/arctic_ocean/aw_temp.png differ diff --git a/doc/sphinx/source/recipes/figures/arctic_ocean/bias.png b/doc/sphinx/source/recipes/figures/arctic_ocean/bias.png new file mode 100644 index 0000000000..9ed98ce107 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/arctic_ocean/bias.png differ diff --git a/doc/sphinx/source/recipes/figures/arctic_ocean/hofm.png b/doc/sphinx/source/recipes/figures/arctic_ocean/hofm.png new file mode 100644 index 0000000000..6bcc1798cf Binary files /dev/null and b/doc/sphinx/source/recipes/figures/arctic_ocean/hofm.png differ diff --git a/doc/sphinx/source/recipes/figures/arctic_ocean/spatial.png b/doc/sphinx/source/recipes/figures/arctic_ocean/spatial.png new file mode 100644 index 0000000000..aa878b1320 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/arctic_ocean/spatial.png differ diff --git a/doc/sphinx/source/recipes/figures/arctic_ocean/transect.png b/doc/sphinx/source/recipes/figures/arctic_ocean/transect.png new file mode 100644 index 0000000000..d67091f25d Binary files /dev/null and b/doc/sphinx/source/recipes/figures/arctic_ocean/transect.png differ diff --git a/doc/sphinx/source/recipes/figures/arctic_ocean/ts.png b/doc/sphinx/source/recipes/figures/arctic_ocean/ts.png new file mode 100644 index 0000000000..11645f7409 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/arctic_ocean/ts.png differ diff --git a/doc/sphinx/source/recipes/figures/arctic_ocean/vertical.png b/doc/sphinx/source/recipes/figures/arctic_ocean/vertical.png new file mode 100644 index 0000000000..6b80921cfa Binary files /dev/null and b/doc/sphinx/source/recipes/figures/arctic_ocean/vertical.png differ diff --git a/doc/sphinx/source/recipes/figures/autoassess_landsurface/Permafrost_Metrics.png b/doc/sphinx/source/recipes/figures/autoassess_landsurface/Permafrost_Metrics.png new file mode 100644 index 0000000000..46ce008b08 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/autoassess_landsurface/Permafrost_Metrics.png differ diff --git a/doc/sphinx/source/recipes/figures/autoassess_landsurface/Soilmoisture_Metrics.png b/doc/sphinx/source/recipes/figures/autoassess_landsurface/Soilmoisture_Metrics.png new file mode 100644 index 0000000000..541224a1bc Binary files /dev/null and b/doc/sphinx/source/recipes/figures/autoassess_landsurface/Soilmoisture_Metrics.png differ diff --git a/doc/sphinx/source/recipes/figures/autoassess_landsurface/Surfrad_Metrics.png b/doc/sphinx/source/recipes/figures/autoassess_landsurface/Surfrad_Metrics.png new file mode 100644 index 0000000000..d1713aa80e Binary files /dev/null and b/doc/sphinx/source/recipes/figures/autoassess_landsurface/Surfrad_Metrics.png differ diff --git a/doc/sphinx/source/recipes/figures/autoassess_landsurface/pf_extent_asia_ACCESS-CM2.png b/doc/sphinx/source/recipes/figures/autoassess_landsurface/pf_extent_asia_ACCESS-CM2.png new file mode 100644 index 0000000000..bc1db0b5cb Binary files /dev/null and b/doc/sphinx/source/recipes/figures/autoassess_landsurface/pf_extent_asia_ACCESS-CM2.png differ diff --git a/doc/sphinx/source/recipes/figures/autoassess_landsurface/pf_extent_north_america_ACCESS-CM2.png b/doc/sphinx/source/recipes/figures/autoassess_landsurface/pf_extent_north_america_ACCESS-CM2.png new file mode 100644 index 0000000000..93635e260a Binary files /dev/null and b/doc/sphinx/source/recipes/figures/autoassess_landsurface/pf_extent_north_america_ACCESS-CM2.png differ diff --git a/doc/sphinx/source/recipes/figures/autoassess_stratosphere/HadGEM3-GC31-LL_qbo.png b/doc/sphinx/source/recipes/figures/autoassess_stratosphere/HadGEM3-GC31-LL_qbo.png new file mode 100644 index 0000000000..5a8f0f5607 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/autoassess_stratosphere/HadGEM3-GC31-LL_qbo.png differ diff --git a/doc/sphinx/source/recipes/figures/autoassess_stratosphere/HadGEM3-GC31-LL_u_jan.png b/doc/sphinx/source/recipes/figures/autoassess_stratosphere/HadGEM3-GC31-LL_u_jan.png new file mode 100644 index 0000000000..c208be086d Binary files /dev/null and b/doc/sphinx/source/recipes/figures/autoassess_stratosphere/HadGEM3-GC31-LL_u_jan.png differ diff --git a/doc/sphinx/source/recipes/figures/autoassess_stratosphere/UKESM1-0-LL_qbo.png b/doc/sphinx/source/recipes/figures/autoassess_stratosphere/UKESM1-0-LL_qbo.png new file mode 100644 index 0000000000..6a3e4cded4 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/autoassess_stratosphere/UKESM1-0-LL_qbo.png differ diff --git a/doc/sphinx/source/recipes/figures/autoassess_stratosphere/UKESM1-0-LL_u_jan.png b/doc/sphinx/source/recipes/figures/autoassess_stratosphere/UKESM1-0-LL_u_jan.png new file mode 100644 index 0000000000..df6abf4c1c Binary files /dev/null and b/doc/sphinx/source/recipes/figures/autoassess_stratosphere/UKESM1-0-LL_u_jan.png differ diff --git a/doc/sphinx/source/recipes/figures/autoassess_stratosphere/metrics.png b/doc/sphinx/source/recipes/figures/autoassess_stratosphere/metrics.png new file mode 100644 index 0000000000..02eb5fad1b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/autoassess_stratosphere/metrics.png differ diff --git a/doc/sphinx/source/recipes/figures/autoassess_stratosphere/qbo_30hpa.png b/doc/sphinx/source/recipes/figures/autoassess_stratosphere/qbo_30hpa.png new file mode 100644 index 0000000000..2b46c992b2 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/autoassess_stratosphere/qbo_30hpa.png differ diff --git a/doc/sphinx/source/recipes/figures/autoassess_stratosphere/teq_100hpa.png b/doc/sphinx/source/recipes/figures/autoassess_stratosphere/teq_100hpa.png new file mode 100644 index 0000000000..03f4dd7b13 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/autoassess_stratosphere/teq_100hpa.png differ diff --git a/doc/sphinx/source/recipes/figures/bock20jgr/model_bias_tas_annual_CMIP6.png b/doc/sphinx/source/recipes/figures/bock20jgr/model_bias_tas_annual_CMIP6.png new file mode 100644 index 0000000000..8d58031f92 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/bock20jgr/model_bias_tas_annual_CMIP6.png differ diff --git a/doc/sphinx/source/recipes/figures/bock20jgr/patterncor.png b/doc/sphinx/source/recipes/figures/bock20jgr/patterncor.png new file mode 100644 index 0000000000..0f3921721b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/bock20jgr/patterncor.png differ diff --git a/doc/sphinx/source/recipes/figures/bock20jgr/ta850-global_to_swcre-global_RMSD.png b/doc/sphinx/source/recipes/figures/bock20jgr/ta850-global_to_swcre-global_RMSD.png new file mode 100644 index 0000000000..24545cf28d Binary files /dev/null and b/doc/sphinx/source/recipes/figures/bock20jgr/ta850-global_to_swcre-global_RMSD.png differ diff --git a/doc/sphinx/source/recipes/figures/bock20jgr/tas_Global_CMIP6_historical_anom_1850-2014.png b/doc/sphinx/source/recipes/figures/bock20jgr/tas_Global_CMIP6_historical_anom_1850-2014.png new file mode 100644 index 0000000000..0f0ddc11ab Binary files /dev/null and b/doc/sphinx/source/recipes/figures/bock20jgr/tas_Global_CMIP6_historical_anom_1850-2014.png differ diff --git a/doc/sphinx/source/recipes/figures/bock20jgr/tas_Global_multimodel_anom_1850-2017.png b/doc/sphinx/source/recipes/figures/bock20jgr/tas_Global_multimodel_anom_1850-2017.png new file mode 100644 index 0000000000..8b327eba5e Binary files /dev/null and b/doc/sphinx/source/recipes/figures/bock20jgr/tas_Global_multimodel_anom_1850-2017.png differ diff --git a/doc/sphinx/source/recipes/figures/capacity_factor/capacity_factor_IPSL-CM5A-LR_1980-2005.png b/doc/sphinx/source/recipes/figures/capacity_factor/capacity_factor_IPSL-CM5A-LR_1980-2005.png deleted file mode 100644 index f65013283b..0000000000 Binary files a/doc/sphinx/source/recipes/figures/capacity_factor/capacity_factor_IPSL-CM5A-LR_1980-2005.png and /dev/null differ diff --git a/doc/sphinx/source/recipes/figures/capacity_factor/capacity_factor_IPSL-CM5A-MR_2021-2050.png b/doc/sphinx/source/recipes/figures/capacity_factor/capacity_factor_IPSL-CM5A-MR_2021-2050.png new file mode 100644 index 0000000000..4cab8e974a Binary files /dev/null and b/doc/sphinx/source/recipes/figures/capacity_factor/capacity_factor_IPSL-CM5A-MR_2021-2050.png differ diff --git a/doc/sphinx/source/recipes/figures/carvalhais14nat/global_matrix_map_ecosystem_carbon_turnover_time_Carvalhais2014_gn.png b/doc/sphinx/source/recipes/figures/carvalhais14nat/global_matrix_map_ecosystem_carbon_turnover_time_Carvalhais2014_gn.png new file mode 100644 index 0000000000..e0c146556f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/carvalhais14nat/global_matrix_map_ecosystem_carbon_turnover_time_Carvalhais2014_gn.png differ diff --git a/doc/sphinx/source/recipes/figures/carvalhais14nat/global_multimodelAgreement_ecosystem_carbon_turnover_time_Carvalhais2014_gn.png b/doc/sphinx/source/recipes/figures/carvalhais14nat/global_multimodelAgreement_ecosystem_carbon_turnover_time_Carvalhais2014_gn.png new file mode 100644 index 0000000000..443d2a945a Binary files /dev/null and b/doc/sphinx/source/recipes/figures/carvalhais14nat/global_multimodelAgreement_ecosystem_carbon_turnover_time_Carvalhais2014_gn.png differ diff --git a/doc/sphinx/source/recipes/figures/carvalhais14nat/r_tau_ctotal_climate_pearson_Carvalhais2014_gnz.png b/doc/sphinx/source/recipes/figures/carvalhais14nat/r_tau_ctotal_climate_pearson_Carvalhais2014_gnz.png new file mode 100644 index 0000000000..1924e58b66 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/carvalhais14nat/r_tau_ctotal_climate_pearson_Carvalhais2014_gnz.png differ diff --git a/doc/sphinx/source/recipes/figures/carvalhais14nat/zonal_mean_ecosystem_carbon_turnover_time_Carvalhais2014_gnz.png b/doc/sphinx/source/recipes/figures/carvalhais14nat/zonal_mean_ecosystem_carbon_turnover_time_Carvalhais2014_gnz.png new file mode 100644 index 0000000000..5873985df8 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/carvalhais14nat/zonal_mean_ecosystem_carbon_turnover_time_Carvalhais2014_gnz.png differ diff --git a/doc/sphinx/source/recipes/figures/climate_patterns/patterns.png b/doc/sphinx/source/recipes/figures/climate_patterns/patterns.png new file mode 100644 index 0000000000..396fd78830 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/climate_patterns/patterns.png differ diff --git a/doc/sphinx/source/recipes/figures/climwip/independence_tas.png b/doc/sphinx/source/recipes/figures/climwip/independence_tas.png new file mode 100644 index 0000000000..a1190fbc82 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/climwip/independence_tas.png differ diff --git a/doc/sphinx/source/recipes/figures/climwip/performance_pr.png b/doc/sphinx/source/recipes/figures/climwip/performance_pr.png new file mode 100644 index 0000000000..6d5c5811bd Binary files /dev/null and b/doc/sphinx/source/recipes/figures/climwip/performance_pr.png differ diff --git a/doc/sphinx/source/recipes/figures/climwip/performance_sigma_calibration.png b/doc/sphinx/source/recipes/figures/climwip/performance_sigma_calibration.png new file mode 100644 index 0000000000..56dc600128 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/climwip/performance_sigma_calibration.png differ diff --git a/doc/sphinx/source/recipes/figures/climwip/temperature_anomaly_graph.png b/doc/sphinx/source/recipes/figures/climwip/temperature_anomaly_graph.png new file mode 100644 index 0000000000..b530fe21ba Binary files /dev/null and b/doc/sphinx/source/recipes/figures/climwip/temperature_anomaly_graph.png differ diff --git a/doc/sphinx/source/recipes/figures/climwip/temperature_change_weighted_map.png b/doc/sphinx/source/recipes/figures/climwip/temperature_change_weighted_map.png new file mode 100644 index 0000000000..be817ad0be Binary files /dev/null and b/doc/sphinx/source/recipes/figures/climwip/temperature_change_weighted_map.png differ diff --git a/doc/sphinx/source/recipes/figures/climwip/weights_tas.png b/doc/sphinx/source/recipes/figures/climwip/weights_tas.png new file mode 100644 index 0000000000..265cd8e9c0 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/climwip/weights_tas.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/cloud_lweffect.png b/doc/sphinx/source/recipes/figures/clouds/cloud_lweffect.png deleted file mode 100644 index 74bb43dab2..0000000000 Binary files a/doc/sphinx/source/recipes/figures/clouds/cloud_lweffect.png and /dev/null differ diff --git a/doc/sphinx/source/recipes/figures/clouds/cloud_sweffect.png b/doc/sphinx/source/recipes/figures/clouds/cloud_sweffect.png deleted file mode 100644 index 52b00c8c45..0000000000 Binary files a/doc/sphinx/source/recipes/figures/clouds/cloud_sweffect.png and /dev/null differ diff --git a/doc/sphinx/source/recipes/figures/clouds/cloud_var_multi.png b/doc/sphinx/source/recipes/figures/clouds/cloud_var_multi.png deleted file mode 100644 index 54a85593ae..0000000000 Binary files a/doc/sphinx/source/recipes/figures/clouds/cloud_var_multi.png and /dev/null differ diff --git a/doc/sphinx/source/recipes/figures/clouds/clouds_dyn_matrix_ts_wap_clt_cmip6_ocean.png b/doc/sphinx/source/recipes/figures/clouds/clouds_dyn_matrix_ts_wap_clt_cmip6_ocean.png new file mode 100644 index 0000000000..b7a1680365 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/clouds_dyn_matrix_ts_wap_clt_cmip6_ocean.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/clouds_interannual_pr.png b/doc/sphinx/source/recipes/figures/clouds/clouds_interannual_pr.png new file mode 100644 index 0000000000..49ee986a7f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/clouds_interannual_pr.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/clouds_ipcc_lwcre_annual.png b/doc/sphinx/source/recipes/figures/clouds/clouds_ipcc_lwcre_annual.png new file mode 100644 index 0000000000..d8dcd97ecd Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/clouds_ipcc_lwcre_annual.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/clouds_ipcc_netcre_annual.png b/doc/sphinx/source/recipes/figures/clouds/clouds_ipcc_netcre_annual.png new file mode 100644 index 0000000000..507c4a9d36 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/clouds_ipcc_netcre_annual.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/clouds_ipcc_swcre_annual.png b/doc/sphinx/source/recipes/figures/clouds/clouds_ipcc_swcre_annual.png new file mode 100644 index 0000000000..0fe217b762 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/clouds_ipcc_swcre_annual.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/clouds_lwp_annual.png b/doc/sphinx/source/recipes/figures/clouds/clouds_lwp_annual.png new file mode 100644 index 0000000000..08e1062510 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/clouds_lwp_annual.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/clouds_pdf_clt_so_cmip6_line.png b/doc/sphinx/source/recipes/figures/clouds/clouds_pdf_clt_so_cmip6_line.png new file mode 100644 index 0000000000..70fe1e34a3 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/clouds_pdf_clt_so_cmip6_line.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/clouds_scatter_clt_swcre_so_cmip6.png b/doc/sphinx/source/recipes/figures/clouds/clouds_scatter_clt_swcre_so_cmip6.png new file mode 100644 index 0000000000..8bfa8a04b4 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/clouds_scatter_clt_swcre_so_cmip6.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/clouds_taylor_clt_annual.png b/doc/sphinx/source/recipes/figures/clouds/clouds_taylor_clt_annual.png new file mode 100644 index 0000000000..76d4b41830 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/clouds_taylor_clt_annual.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/clouds_zonal_clcalipso_annual_cmip6.png b/doc/sphinx/source/recipes/figures/clouds/clouds_zonal_clcalipso_annual_cmip6.png new file mode 100644 index 0000000000..f32de2a8d6 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/clouds_zonal_clcalipso_annual_cmip6.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/liq_h2o_path_multi.png b/doc/sphinx/source/recipes/figures/clouds/liq_h2o_path_multi.png deleted file mode 100644 index 57052e850d..0000000000 Binary files a/doc/sphinx/source/recipes/figures/clouds/liq_h2o_path_multi.png and /dev/null differ diff --git a/doc/sphinx/source/recipes/figures/clouds/liq_h2o_taylor.png b/doc/sphinx/source/recipes/figures/clouds/liq_h2o_taylor.png deleted file mode 100644 index 7e1e27ab1b..0000000000 Binary files a/doc/sphinx/source/recipes/figures/clouds/liq_h2o_taylor.png and /dev/null differ diff --git a/doc/sphinx/source/recipes/figures/cmug_h2o/fig_ERA-Interim_Cold_point_tropopause_Specific_Humidity.png b/doc/sphinx/source/recipes/figures/cmug_h2o/fig_ERA-Interim_Cold_point_tropopause_Specific_Humidity.png new file mode 100644 index 0000000000..3e8ab703fd Binary files /dev/null and b/doc/sphinx/source/recipes/figures/cmug_h2o/fig_ERA-Interim_Cold_point_tropopause_Specific_Humidity.png differ diff --git a/doc/sphinx/source/recipes/figures/cmug_h2o/fig_ERA-Interim_Cold_point_tropopause_Specific_Humidity_map.png b/doc/sphinx/source/recipes/figures/cmug_h2o/fig_ERA-Interim_Cold_point_tropopause_Specific_Humidity_map.png new file mode 100644 index 0000000000..fa71cd4b09 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/cmug_h2o/fig_ERA-Interim_Cold_point_tropopause_Specific_Humidity_map.png differ diff --git a/doc/sphinx/source/recipes/figures/cmug_h2o/fig_ERA-Interim_Zonal_mean_Specific_Humidity.png b/doc/sphinx/source/recipes/figures/cmug_h2o/fig_ERA-Interim_Zonal_mean_Specific_Humidity.png new file mode 100644 index 0000000000..45db2b5664 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/cmug_h2o/fig_ERA-Interim_Zonal_mean_Specific_Humidity.png differ diff --git a/doc/sphinx/source/recipes/figures/cmug_h2o/fig_profile_Specific_Humidity.png b/doc/sphinx/source/recipes/figures/cmug_h2o/fig_profile_Specific_Humidity.png new file mode 100644 index 0000000000..55240cfeb3 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/cmug_h2o/fig_profile_Specific_Humidity.png differ diff --git a/doc/sphinx/source/recipes/figures/collins13ipcc/collins_fig_1.png b/doc/sphinx/source/recipes/figures/collins13ipcc/collins_fig_1.png new file mode 100644 index 0000000000..3b4a0ff3c7 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/collins13ipcc/collins_fig_1.png differ diff --git a/doc/sphinx/source/recipes/figures/collins13ipcc/collins_fig_2.png b/doc/sphinx/source/recipes/figures/collins13ipcc/collins_fig_2.png new file mode 100644 index 0000000000..84bdcff60e Binary files /dev/null and b/doc/sphinx/source/recipes/figures/collins13ipcc/collins_fig_2.png differ diff --git a/doc/sphinx/source/recipes/figures/collins13ipcc/collins_fig_3.png b/doc/sphinx/source/recipes/figures/collins13ipcc/collins_fig_3.png new file mode 100644 index 0000000000..14cf181140 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/collins13ipcc/collins_fig_3.png differ diff --git a/doc/sphinx/source/recipes/figures/collins13ipcc/collins_fig_4.png b/doc/sphinx/source/recipes/figures/collins13ipcc/collins_fig_4.png new file mode 100644 index 0000000000..c963855d4b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/collins13ipcc/collins_fig_4.png differ diff --git a/doc/sphinx/source/recipes/figures/combined_climate_extreme_index/t90p_IPSL-CM5A-LR_rcp85_2020_2040.png b/doc/sphinx/source/recipes/figures/combined_climate_extreme_index/t90p_IPSL-CM5A-LR_rcp85_2020_2040.png deleted file mode 100644 index cb48e377ec..0000000000 Binary files a/doc/sphinx/source/recipes/figures/combined_climate_extreme_index/t90p_IPSL-CM5A-LR_rcp85_2020_2040.png and /dev/null differ diff --git a/doc/sphinx/source/recipes/figures/consecdrydays/consec_example_freq.png b/doc/sphinx/source/recipes/figures/consecdrydays/consec_example_freq.png new file mode 100644 index 0000000000..350f3ad11b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/consecdrydays/consec_example_freq.png differ diff --git a/doc/sphinx/source/recipes/figures/cos22esd/pr_45.png b/doc/sphinx/source/recipes/figures/cos22esd/pr_45.png new file mode 100644 index 0000000000..118f6fb004 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/cos22esd/pr_45.png differ diff --git a/doc/sphinx/source/recipes/figures/cos22esd/scenario_combination_tas-tas_jja.png b/doc/sphinx/source/recipes/figures/cos22esd/scenario_combination_tas-tas_jja.png new file mode 100644 index 0000000000..d422d0c684 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/cos22esd/scenario_combination_tas-tas_jja.png differ diff --git a/doc/sphinx/source/recipes/figures/cos22esd/tas_45.png b/doc/sphinx/source/recipes/figures/cos22esd/tas_45.png new file mode 100644 index 0000000000..443da9c10b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/cos22esd/tas_45.png differ diff --git a/doc/sphinx/source/recipes/figures/cox18nature/emergent_relationship_HadCRUT4.png b/doc/sphinx/source/recipes/figures/cox18nature/emergent_relationship_HadCRUT4.png new file mode 100644 index 0000000000..c0e7da1db2 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/cox18nature/emergent_relationship_HadCRUT4.png differ diff --git a/doc/sphinx/source/recipes/figures/cox18nature/pdf_HadCRUT4.png b/doc/sphinx/source/recipes/figures/cox18nature/pdf_HadCRUT4.png new file mode 100644 index 0000000000..ab5c2e222a Binary files /dev/null and b/doc/sphinx/source/recipes/figures/cox18nature/pdf_HadCRUT4.png differ diff --git a/doc/sphinx/source/recipes/figures/cox18nature/temperature_anomaly_HadCRUT4.png b/doc/sphinx/source/recipes/figures/cox18nature/temperature_anomaly_HadCRUT4.png new file mode 100644 index 0000000000..a3ece05eed Binary files /dev/null and b/doc/sphinx/source/recipes/figures/cox18nature/temperature_anomaly_HadCRUT4.png differ diff --git a/doc/sphinx/source/recipes/figures/cvdp/nam.prreg.ann.png b/doc/sphinx/source/recipes/figures/cvdp/nam.prreg.ann.png new file mode 100755 index 0000000000..70e788bec6 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/cvdp/nam.prreg.ann.png differ diff --git a/doc/sphinx/source/recipes/figures/deangelis15nat/bar_all.png b/doc/sphinx/source/recipes/figures/deangelis15nat/bar_all.png new file mode 100644 index 0000000000..edbb253e2f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/deangelis15nat/bar_all.png differ diff --git a/doc/sphinx/source/recipes/figures/deangelis15nat/exfig2a.png b/doc/sphinx/source/recipes/figures/deangelis15nat/exfig2a.png new file mode 100644 index 0000000000..554dfb07dd Binary files /dev/null and b/doc/sphinx/source/recipes/figures/deangelis15nat/exfig2a.png differ diff --git a/doc/sphinx/source/recipes/figures/deangelis15nat/fig3b.png b/doc/sphinx/source/recipes/figures/deangelis15nat/fig3b.png new file mode 100644 index 0000000000..bf323365b5 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/deangelis15nat/fig3b.png differ diff --git a/doc/sphinx/source/recipes/figures/deangelis15nat/fig_deangelis_cmug_cdr2.png b/doc/sphinx/source/recipes/figures/deangelis15nat/fig_deangelis_cmug_cdr2.png new file mode 100644 index 0000000000..ad05ffa876 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/deangelis15nat/fig_deangelis_cmug_cdr2.png differ diff --git a/doc/sphinx/source/recipes/figures/diurnal_temp_index/Seasonal_DTRindicator_MPI-ESM-MR_2030_2080_1961_1990.png b/doc/sphinx/source/recipes/figures/diurnal_temp_index/Seasonal_DTRindicator_MPI-ESM-MR_2030_2080_1961_1990.png new file mode 100644 index 0000000000..1f07375a09 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/diurnal_temp_index/Seasonal_DTRindicator_MPI-ESM-MR_2030_2080_1961_1990.png differ diff --git a/doc/sphinx/source/recipes/figures/diurnal_temp_index/rcp85_diurnal.png b/doc/sphinx/source/recipes/figures/diurnal_temp_index/rcp85_diurnal.png deleted file mode 100644 index 0747de51b0..0000000000 Binary files a/doc/sphinx/source/recipes/figures/diurnal_temp_index/rcp85_diurnal.png and /dev/null differ diff --git a/doc/sphinx/source/recipes/figures/droughtindex/martin18grl_fig1.png b/doc/sphinx/source/recipes/figures/droughtindex/martin18grl_fig1.png new file mode 100644 index 0000000000..190570f625 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/droughtindex/martin18grl_fig1.png differ diff --git a/doc/sphinx/source/recipes/figures/droughtindex/martin18grl_fig2.png b/doc/sphinx/source/recipes/figures/droughtindex/martin18grl_fig2.png new file mode 100644 index 0000000000..11f2642e1f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/droughtindex/martin18grl_fig2.png differ diff --git a/doc/sphinx/source/recipes/figures/eady_growth_rate/HadGEM3-GC31-LM_winter_eady_growth_rate_70000.png b/doc/sphinx/source/recipes/figures/eady_growth_rate/HadGEM3-GC31-LM_winter_eady_growth_rate_70000.png new file mode 100644 index 0000000000..6c2eb7bf9b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/eady_growth_rate/HadGEM3-GC31-LM_winter_eady_growth_rate_70000.png differ diff --git a/doc/sphinx/source/recipes/figures/ecs/CanESM2.png b/doc/sphinx/source/recipes/figures/ecs/CanESM2.png new file mode 100644 index 0000000000..82b5d7500e Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ecs/CanESM2.png differ diff --git a/doc/sphinx/source/recipes/figures/emergent_constraints/covrefl.png b/doc/sphinx/source/recipes/figures/emergent_constraints/covrefl.png new file mode 100644 index 0000000000..64116d09a1 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/emergent_constraints/covrefl.png differ diff --git a/doc/sphinx/source/recipes/figures/emergent_constraints/humidx.png b/doc/sphinx/source/recipes/figures/emergent_constraints/humidx.png new file mode 100644 index 0000000000..bfbd86891f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/emergent_constraints/humidx.png differ diff --git a/doc/sphinx/source/recipes/figures/emergent_constraints/itczidx.png b/doc/sphinx/source/recipes/figures/emergent_constraints/itczidx.png new file mode 100644 index 0000000000..ce2a019b1f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/emergent_constraints/itczidx.png differ diff --git a/doc/sphinx/source/recipes/figures/emergent_constraints/li17natcc_fig2a.png b/doc/sphinx/source/recipes/figures/emergent_constraints/li17natcc_fig2a.png new file mode 100644 index 0000000000..f55bb4a728 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/emergent_constraints/li17natcc_fig2a.png differ diff --git a/doc/sphinx/source/recipes/figures/emergent_constraints/li17natcc_fig2b.png b/doc/sphinx/source/recipes/figures/emergent_constraints/li17natcc_fig2b.png new file mode 100644 index 0000000000..25c1ebb1ab Binary files /dev/null and b/doc/sphinx/source/recipes/figures/emergent_constraints/li17natcc_fig2b.png differ diff --git a/doc/sphinx/source/recipes/figures/emergent_constraints/li17natcc_fig2c.png b/doc/sphinx/source/recipes/figures/emergent_constraints/li17natcc_fig2c.png new file mode 100644 index 0000000000..df9aa35a91 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/emergent_constraints/li17natcc_fig2c.png differ diff --git a/doc/sphinx/source/recipes/figures/emergent_constraints/li17natcc_fig2d.png b/doc/sphinx/source/recipes/figures/emergent_constraints/li17natcc_fig2d.png new file mode 100644 index 0000000000..27ccb7a5ef Binary files /dev/null and b/doc/sphinx/source/recipes/figures/emergent_constraints/li17natcc_fig2d.png differ diff --git a/doc/sphinx/source/recipes/figures/emergent_constraints/ltmi.png b/doc/sphinx/source/recipes/figures/emergent_constraints/ltmi.png new file mode 100644 index 0000000000..e3a78da58d Binary files /dev/null and b/doc/sphinx/source/recipes/figures/emergent_constraints/ltmi.png differ diff --git a/doc/sphinx/source/recipes/figures/emergent_constraints/shhc.png b/doc/sphinx/source/recipes/figures/emergent_constraints/shhc.png new file mode 100644 index 0000000000..a618a7190b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/emergent_constraints/shhc.png differ diff --git a/doc/sphinx/source/recipes/figures/emergent_constraints/volodin.png b/doc/sphinx/source/recipes/figures/emergent_constraints/volodin.png new file mode 100644 index 0000000000..9d1918cb73 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/emergent_constraints/volodin.png differ diff --git a/doc/sphinx/source/recipes/figures/ensclus/ensclus.png b/doc/sphinx/source/recipes/figures/ensclus/ensclus.png index 1335e6d99d..9e721eeea7 100644 Binary files a/doc/sphinx/source/recipes/figures/ensclus/ensclus.png and b/doc/sphinx/source/recipes/figures/ensclus/ensclus.png differ diff --git a/doc/sphinx/source/recipes/figures/examples/IPCC_AR6_figure_9.3a_1850-2100.png b/doc/sphinx/source/recipes/figures/examples/IPCC_AR6_figure_9.3a_1850-2100.png new file mode 100644 index 0000000000..67bccb204e Binary files /dev/null and b/doc/sphinx/source/recipes/figures/examples/IPCC_AR6_figure_9.3a_1850-2100.png differ diff --git a/doc/sphinx/source/recipes/figures/examples/decadal_first_example.png b/doc/sphinx/source/recipes/figures/examples/decadal_first_example.png new file mode 100644 index 0000000000..11a89aabed Binary files /dev/null and b/doc/sphinx/source/recipes/figures/examples/decadal_first_example.png differ diff --git a/doc/sphinx/source/recipes/figures/examples/elbe.png b/doc/sphinx/source/recipes/figures/examples/elbe.png new file mode 100644 index 0000000000..db42d16bad Binary files /dev/null and b/doc/sphinx/source/recipes/figures/examples/elbe.png differ diff --git a/doc/sphinx/source/recipes/figures/examples/map.png b/doc/sphinx/source/recipes/figures/examples/map.png new file mode 100644 index 0000000000..e2240d0bd9 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/examples/map.png differ diff --git a/doc/sphinx/source/recipes/figures/examples/timeseries.png b/doc/sphinx/source/recipes/figures/examples/timeseries.png new file mode 100644 index 0000000000..2292870ca5 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/examples/timeseries.png differ diff --git a/doc/sphinx/source/recipes/figures/extreme_events/cdd_timeseries.png b/doc/sphinx/source/recipes/figures/extreme_events/cdd_timeseries.png new file mode 100644 index 0000000000..e67e0f0180 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/extreme_events/cdd_timeseries.png differ diff --git a/doc/sphinx/source/recipes/figures/extreme_events/gleckler.png b/doc/sphinx/source/recipes/figures/extreme_events/gleckler.png new file mode 100644 index 0000000000..9120303ea0 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/extreme_events/gleckler.png differ diff --git a/doc/sphinx/source/recipes/figures/eyring06jgr/fig_diagn01.png b/doc/sphinx/source/recipes/figures/eyring06jgr/fig_diagn01.png new file mode 100644 index 0000000000..d3a526e71c Binary files /dev/null and b/doc/sphinx/source/recipes/figures/eyring06jgr/fig_diagn01.png differ diff --git a/doc/sphinx/source/recipes/figures/eyring13jgr/fig_eyr13jgr_12.png b/doc/sphinx/source/recipes/figures/eyring13jgr/fig_eyr13jgr_12.png new file mode 100644 index 0000000000..101e690dbd Binary files /dev/null and b/doc/sphinx/source/recipes/figures/eyring13jgr/fig_eyr13jgr_12.png differ diff --git a/doc/sphinx/source/recipes/figures/galytska23jgr/Timeseries_Arctic_temperature_anomalies.png b/doc/sphinx/source/recipes/figures/galytska23jgr/Timeseries_Arctic_temperature_anomalies.png new file mode 100644 index 0000000000..77768d11b7 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/galytska23jgr/Timeseries_Arctic_temperature_anomalies.png differ diff --git a/doc/sphinx/source/recipes/figures/gier20bg/fig01.png b/doc/sphinx/source/recipes/figures/gier20bg/fig01.png new file mode 100644 index 0000000000..87e0d0ac64 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/gier20bg/fig01.png differ diff --git a/doc/sphinx/source/recipes/figures/gier20bg/fig02.png b/doc/sphinx/source/recipes/figures/gier20bg/fig02.png new file mode 100644 index 0000000000..03d421c1c5 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/gier20bg/fig02.png differ diff --git a/doc/sphinx/source/recipes/figures/gier20bg/fig03.png b/doc/sphinx/source/recipes/figures/gier20bg/fig03.png new file mode 100644 index 0000000000..50f43c2f54 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/gier20bg/fig03.png differ diff --git a/doc/sphinx/source/recipes/figures/gier20bg/fig04.png b/doc/sphinx/source/recipes/figures/gier20bg/fig04.png new file mode 100644 index 0000000000..99785ad27d Binary files /dev/null and b/doc/sphinx/source/recipes/figures/gier20bg/fig04.png differ diff --git a/doc/sphinx/source/recipes/figures/gier20bg/fig05.png b/doc/sphinx/source/recipes/figures/gier20bg/fig05.png new file mode 100644 index 0000000000..d7f78ac644 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/gier20bg/fig05.png differ diff --git a/doc/sphinx/source/recipes/figures/gier20bg/fig06.png b/doc/sphinx/source/recipes/figures/gier20bg/fig06.png new file mode 100644 index 0000000000..c108c6d525 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/gier20bg/fig06.png differ diff --git a/doc/sphinx/source/recipes/figures/gier20bg/fig07.png b/doc/sphinx/source/recipes/figures/gier20bg/fig07.png new file mode 100644 index 0000000000..cea0cfb500 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/gier20bg/fig07.png differ diff --git a/doc/sphinx/source/recipes/figures/gier20bg/fig08.png b/doc/sphinx/source/recipes/figures/gier20bg/fig08.png new file mode 100644 index 0000000000..5dceec1d94 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/gier20bg/fig08.png differ diff --git a/doc/sphinx/source/recipes/figures/hydrology/Precipitation_climatology_day_of_year_plot.png b/doc/sphinx/source/recipes/figures/hydrology/Precipitation_climatology_day_of_year_plot.png new file mode 100644 index 0000000000..50a6ee2dd7 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/hydrology/Precipitation_climatology_day_of_year_plot.png differ diff --git a/doc/sphinx/source/recipes/figures/hydrology/Precipitation_climatology_month_number_plot.png b/doc/sphinx/source/recipes/figures/hydrology/Precipitation_climatology_month_number_plot.png new file mode 100644 index 0000000000..b062673bed Binary files /dev/null and b/doc/sphinx/source/recipes/figures/hydrology/Precipitation_climatology_month_number_plot.png differ diff --git a/doc/sphinx/source/recipes/figures/hydrology/Precipitation_day_plot.png b/doc/sphinx/source/recipes/figures/hydrology/Precipitation_day_plot.png new file mode 100644 index 0000000000..73430c1ebb Binary files /dev/null and b/doc/sphinx/source/recipes/figures/hydrology/Precipitation_day_plot.png differ diff --git a/doc/sphinx/source/recipes/figures/hydrology/Precipitation_month_plot.png b/doc/sphinx/source/recipes/figures/hydrology/Precipitation_month_plot.png new file mode 100644 index 0000000000..e843d6a674 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/hydrology/Precipitation_month_plot.png differ diff --git a/doc/sphinx/source/recipes/figures/hyint/hyint_maps.png b/doc/sphinx/source/recipes/figures/hyint/hyint_maps.png new file mode 100644 index 0000000000..7d2075514f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/hyint/hyint_maps.png differ diff --git a/doc/sphinx/source/recipes/figures/hyint/hyint_timeseries.png b/doc/sphinx/source/recipes/figures/hyint/hyint_timeseries.png new file mode 100644 index 0000000000..549678d667 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/hyint/hyint_timeseries.png differ diff --git a/doc/sphinx/source/recipes/figures/hyint/hyint_trends.png b/doc/sphinx/source/recipes/figures/hyint/hyint_trends.png new file mode 100644 index 0000000000..018c0bc903 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/hyint/hyint_trends.png differ diff --git a/doc/sphinx/source/recipes/figures/iht_toa/figure1_CERES-EBAF_CERES-EBAF.png b/doc/sphinx/source/recipes/figures/iht_toa/figure1_CERES-EBAF_CERES-EBAF.png new file mode 100644 index 0000000000..18aa689123 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/iht_toa/figure1_CERES-EBAF_CERES-EBAF.png differ diff --git a/doc/sphinx/source/recipes/figures/iht_toa/figure2_CERES-EBAF_CERES-EBAF.png b/doc/sphinx/source/recipes/figures/iht_toa/figure2_CERES-EBAF_CERES-EBAF.png new file mode 100644 index 0000000000..ae8afa7d25 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/iht_toa/figure2_CERES-EBAF_CERES-EBAF.png differ diff --git a/doc/sphinx/source/recipes/figures/iht_toa/figure3_CERES-EBAF_CERES-EBAF.png b/doc/sphinx/source/recipes/figures/iht_toa/figure3_CERES-EBAF_CERES-EBAF.png new file mode 100644 index 0000000000..6562693268 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/iht_toa/figure3_CERES-EBAF_CERES-EBAF.png differ diff --git a/doc/sphinx/source/recipes/figures/iht_toa/figure4_CERES-EBAF_CERES-EBAF.png b/doc/sphinx/source/recipes/figures/iht_toa/figure4_CERES-EBAF_CERES-EBAF.png new file mode 100644 index 0000000000..21076317ff Binary files /dev/null and b/doc/sphinx/source/recipes/figures/iht_toa/figure4_CERES-EBAF_CERES-EBAF.png differ diff --git a/doc/sphinx/source/recipes/figures/iht_toa/figure5_CERES-EBAF_CERES-EBAF.png b/doc/sphinx/source/recipes/figures/iht_toa/figure5_CERES-EBAF_CERES-EBAF.png new file mode 100644 index 0000000000..e241ae07e3 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/iht_toa/figure5_CERES-EBAF_CERES-EBAF.png differ diff --git a/doc/sphinx/source/recipes/figures/iht_toa/figure6_CERES-EBAF_CERES-EBAF.png b/doc/sphinx/source/recipes/figures/iht_toa/figure6_CERES-EBAF_CERES-EBAF.png new file mode 100644 index 0000000000..073ccff158 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/iht_toa/figure6_CERES-EBAF_CERES-EBAF.png differ diff --git a/doc/sphinx/source/recipes/figures/impact/bias_vs_change.png b/doc/sphinx/source/recipes/figures/impact/bias_vs_change.png new file mode 100755 index 0000000000..f48266e9fe Binary files /dev/null and b/doc/sphinx/source/recipes/figures/impact/bias_vs_change.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-14.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-14.png new file mode 100644 index 0000000000..210d63013b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-14.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-2.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-2.png new file mode 100644 index 0000000000..51ab5b4b99 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-2.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-26.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-26.png new file mode 100644 index 0000000000..9ddd3d8ff8 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-26.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-27.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-27.png new file mode 100644 index 0000000000..572682d836 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-27.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-3.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-3.png new file mode 100644 index 0000000000..fabb142682 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-3.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-38-pr.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-38-pr.png new file mode 100644 index 0000000000..157779d269 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-38-pr.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-38.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-38.png new file mode 100644 index 0000000000..2caa4b53e4 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-38.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-38_regions.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-38_regions.png new file mode 100644 index 0000000000..4bb2549bea Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-38_regions.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-39-pr.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-39-pr.png new file mode 100644 index 0000000000..1f256232ff Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-39-pr.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-39.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-39.png new file mode 100644 index 0000000000..05e6318493 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-39.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-39_regions.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-39_regions.png new file mode 100644 index 0000000000..58fd4541af Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-39_regions.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-4.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-4.png new file mode 100644 index 0000000000..e8bc5a87a9 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-4.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-40-pr.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-40-pr.png new file mode 100644 index 0000000000..39afa9670c Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-40-pr.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-40.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-40.png new file mode 100644 index 0000000000..d62dc66213 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-40.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-40_regions.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-40_regions.png new file mode 100644 index 0000000000..cecc7fdb3f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-40_regions.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-41b.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-41b.png new file mode 100644 index 0000000000..c2ea620059 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-41b.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-42a.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-42a.png new file mode 100644 index 0000000000..64d389ab25 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-42a.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-42b.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-42b.png new file mode 100644 index 0000000000..28498cf847 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-42b.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-45a.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-45a.png new file mode 100644 index 0000000000..943b1c345f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-45a.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/cloud_neteffect.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-5.png similarity index 100% rename from doc/sphinx/source/recipes/figures/clouds/cloud_neteffect.png rename to doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-5.png diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-6.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-6.png new file mode 100644 index 0000000000..056ee75f6f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-6.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-8.png b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-8.png new file mode 100644 index 0000000000..42c13a2d32 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar5ch9/fig-9-8.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/fig_3_42_a.png b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/fig_3_42_a.png new file mode 100644 index 0000000000..59dbf502b2 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/fig_3_42_a.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/gsat_Global_CMIP6_historical-ssp245_anom_1850-2020.png b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/gsat_Global_CMIP6_historical-ssp245_anom_1850-2020.png new file mode 100644 index 0000000000..3e90ecc49f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/gsat_Global_CMIP6_historical-ssp245_anom_1850-2020.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/gsat_Global_multimodel_anom_1850-2020.png b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/gsat_Global_multimodel_anom_1850-2020.png new file mode 100644 index 0000000000..83ea186a09 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/gsat_Global_multimodel_anom_1850-2020.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/model_bias_pr_annualclim_CMIP6.png b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/model_bias_pr_annualclim_CMIP6.png new file mode 100644 index 0000000000..d2bb9bbdf9 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/model_bias_pr_annualclim_CMIP6.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/model_bias_tas_annualclim_CMIP6.png b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/model_bias_tas_annualclim_CMIP6.png new file mode 100644 index 0000000000..7f04f0d13c Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/model_bias_tas_annualclim_CMIP6.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/patterncor.png b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/patterncor.png new file mode 100644 index 0000000000..059d897e1d Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/patterncor.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/precip_anom_1950-2014.png b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/precip_anom_1950-2014.png new file mode 100644 index 0000000000..63c07e37dc Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/precip_anom_1950-2014.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/tas_anom_damip_global_1850-2020.png b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/tas_anom_damip_global_1850-2020.png new file mode 100644 index 0000000000..86f825c698 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/tas_anom_damip_global_1850-2020.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/tas_std_dev_zonmean.png b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/tas_std_dev_zonmean.png new file mode 100644 index 0000000000..19d0eea097 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/tas_std_dev_zonmean.png differ diff --git a/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/zonal_westerly_winds.png b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/zonal_westerly_winds.png new file mode 100644 index 0000000000..d70723d69f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ipccwg1ar6ch3/zonal_westerly_winds.png differ diff --git a/doc/sphinx/source/recipes/figures/kcs/global_matching.png b/doc/sphinx/source/recipes/figures/kcs/global_matching.png new file mode 100644 index 0000000000..754aadd6d1 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/kcs/global_matching.png differ diff --git a/doc/sphinx/source/recipes/figures/kcs/local_validation_2085.png b/doc/sphinx/source/recipes/figures/kcs/local_validation_2085.png new file mode 100644 index 0000000000..cd43fe4c85 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/kcs/local_validation_2085.png differ diff --git a/doc/sphinx/source/recipes/figures/lst/lst_example.png b/doc/sphinx/source/recipes/figures/lst/lst_example.png new file mode 100644 index 0000000000..f51c5bc3b0 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/lst/lst_example.png differ diff --git a/doc/sphinx/source/recipes/figures/meehl20sciadv/cmip6_gregory_regression.png b/doc/sphinx/source/recipes/figures/meehl20sciadv/cmip6_gregory_regression.png new file mode 100644 index 0000000000..eeeb874f86 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/meehl20sciadv/cmip6_gregory_regression.png differ diff --git a/doc/sphinx/source/recipes/figures/meehl20sciadv/cmip6_tcr_vs_ecs.png b/doc/sphinx/source/recipes/figures/meehl20sciadv/cmip6_tcr_vs_ecs.png new file mode 100644 index 0000000000..d287a023e0 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/meehl20sciadv/cmip6_tcr_vs_ecs.png differ diff --git a/doc/sphinx/source/recipes/figures/model_evaluation/annual_cycle_clt_southerocean_Amon.jpg b/doc/sphinx/source/recipes/figures/model_evaluation/annual_cycle_clt_southerocean_Amon.jpg new file mode 100644 index 0000000000..0e1e8a4531 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/model_evaluation/annual_cycle_clt_southerocean_Amon.jpg differ diff --git a/doc/sphinx/source/recipes/figures/model_evaluation/map_swcre_MPI-ESM1-2-HR_Amon.jpg b/doc/sphinx/source/recipes/figures/model_evaluation/map_swcre_MPI-ESM1-2-HR_Amon.jpg new file mode 100644 index 0000000000..f6abf01516 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/model_evaluation/map_swcre_MPI-ESM1-2-HR_Amon.jpg differ diff --git a/doc/sphinx/source/recipes/figures/model_evaluation/map_tas_MPI-ESM1-2-HR_Amon.jpg b/doc/sphinx/source/recipes/figures/model_evaluation/map_tas_MPI-ESM1-2-HR_Amon.jpg new file mode 100644 index 0000000000..50b5ebbd20 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/model_evaluation/map_tas_MPI-ESM1-2-HR_Amon.jpg differ diff --git a/doc/sphinx/source/recipes/figures/model_evaluation/timeseries_rtnt_ambiguous_dataset_Amon.jpg b/doc/sphinx/source/recipes/figures/model_evaluation/timeseries_rtnt_ambiguous_dataset_Amon.jpg new file mode 100644 index 0000000000..2b65fe97e7 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/model_evaluation/timeseries_rtnt_ambiguous_dataset_Amon.jpg differ diff --git a/doc/sphinx/source/recipes/figures/model_evaluation/variable_vs_lat_pr_Amon.jpg b/doc/sphinx/source/recipes/figures/model_evaluation/variable_vs_lat_pr_Amon.jpg new file mode 100644 index 0000000000..4e252d7904 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/model_evaluation/variable_vs_lat_pr_Amon.jpg differ diff --git a/doc/sphinx/source/recipes/figures/modes_of_variability/DJF-psl_observed_regimes.png b/doc/sphinx/source/recipes/figures/modes_of_variability/DJF-psl_observed_regimes.png deleted file mode 100644 index fb2abe6f5b..0000000000 Binary files a/doc/sphinx/source/recipes/figures/modes_of_variability/DJF-psl_observed_regimes.png and /dev/null differ diff --git a/doc/sphinx/source/recipes/figures/modes_of_variability/SON-psl_predicted_regimes.png b/doc/sphinx/source/recipes/figures/modes_of_variability/SON-psl_predicted_regimes.png new file mode 100644 index 0000000000..ebe9c75217 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/modes_of_variability/SON-psl_predicted_regimes.png differ diff --git a/doc/sphinx/source/recipes/figures/monitor/1d_profile_with_ref.png b/doc/sphinx/source/recipes/figures/monitor/1d_profile_with_ref.png new file mode 100644 index 0000000000..7edcd8e50a Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/1d_profile_with_ref.png differ diff --git a/doc/sphinx/source/recipes/figures/monitor/annualcycle.png b/doc/sphinx/source/recipes/figures/monitor/annualcycle.png new file mode 100644 index 0000000000..429e98e9e8 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/annualcycle.png differ diff --git a/doc/sphinx/source/recipes/figures/monitor/annualcycle_with_ref.png b/doc/sphinx/source/recipes/figures/monitor/annualcycle_with_ref.png new file mode 100644 index 0000000000..becd1fb29b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/annualcycle_with_ref.png differ diff --git a/doc/sphinx/source/recipes/figures/monitor/clim.png b/doc/sphinx/source/recipes/figures/monitor/clim.png new file mode 100644 index 0000000000..4b29d4d42b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/clim.png differ diff --git a/doc/sphinx/source/recipes/figures/monitor/hovmoeller_time_vs_lat_with_ref.png b/doc/sphinx/source/recipes/figures/monitor/hovmoeller_time_vs_lat_with_ref.png new file mode 100644 index 0000000000..4abd6df04f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/hovmoeller_time_vs_lat_with_ref.png differ diff --git a/doc/sphinx/source/recipes/figures/monitor/hovmoeller_z_vs_time_with_ref.png b/doc/sphinx/source/recipes/figures/monitor/hovmoeller_z_vs_time_with_ref.png new file mode 100755 index 0000000000..734913c60b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/hovmoeller_z_vs_time_with_ref.png differ diff --git a/doc/sphinx/source/recipes/figures/monitor/map_with_ref.png b/doc/sphinx/source/recipes/figures/monitor/map_with_ref.png new file mode 100644 index 0000000000..f8da92b90c Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/map_with_ref.png differ diff --git a/doc/sphinx/source/recipes/figures/monitor/monclim.png b/doc/sphinx/source/recipes/figures/monitor/monclim.png new file mode 100644 index 0000000000..af38ca0e5b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/monclim.png differ diff --git a/doc/sphinx/source/recipes/figures/monitor/seasonclim.png b/doc/sphinx/source/recipes/figures/monitor/seasonclim.png new file mode 100644 index 0000000000..34ae08cdf0 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/seasonclim.png differ diff --git a/doc/sphinx/source/recipes/figures/monitor/timeseries.png b/doc/sphinx/source/recipes/figures/monitor/timeseries.png new file mode 100644 index 0000000000..79413e608b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/timeseries.png differ diff --git a/doc/sphinx/source/recipes/figures/monitor/timeseries_with_ref.png b/doc/sphinx/source/recipes/figures/monitor/timeseries_with_ref.png new file mode 100644 index 0000000000..a7f849f452 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/timeseries_with_ref.png differ diff --git a/doc/sphinx/source/recipes/figures/monitor/variable_vs_lat_with_ref.png b/doc/sphinx/source/recipes/figures/monitor/variable_vs_lat_with_ref.png new file mode 100644 index 0000000000..31cb81135d Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/variable_vs_lat_with_ref.png differ diff --git a/doc/sphinx/source/recipes/figures/monitor/zonalmean_profile_with_ref.png b/doc/sphinx/source/recipes/figures/monitor/zonalmean_profile_with_ref.png new file mode 100644 index 0000000000..2679b5f88a Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/zonalmean_profile_with_ref.png differ diff --git a/doc/sphinx/source/recipes/figures/mpqb/lineplot_xch4_2003-2014_monmean.png b/doc/sphinx/source/recipes/figures/mpqb/lineplot_xch4_2003-2014_monmean.png new file mode 100644 index 0000000000..6c69759c68 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/mpqb/lineplot_xch4_2003-2014_monmean.png differ diff --git a/doc/sphinx/source/recipes/figures/multimodel_products/tas_JUN_multimodel-anomaly_2006_2099_1961_1990.png b/doc/sphinx/source/recipes/figures/multimodel_products/tas_JUN_multimodel-anomaly_2006_2099_1961_1990.png index 9ca805dd9b..9df37bff68 100644 Binary files a/doc/sphinx/source/recipes/figures/multimodel_products/tas_JUN_multimodel-anomaly_2006_2099_1961_1990.png and b/doc/sphinx/source/recipes/figures/multimodel_products/tas_JUN_multimodel-anomaly_2006_2099_1961_1990.png differ diff --git a/doc/sphinx/source/recipes/figures/ocean/model_vs_obs_MassConcentrationofTotalPhytoplanktonExpressedasChlorophyllinSeaWater_NorESM2-LM_ESACCI-OC__maps.png b/doc/sphinx/source/recipes/figures/ocean/model_vs_obs_MassConcentrationofTotalPhytoplanktonExpressedasChlorophyllinSeaWater_NorESM2-LM_ESACCI-OC__maps.png new file mode 100644 index 0000000000..19faf348f9 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ocean/model_vs_obs_MassConcentrationofTotalPhytoplanktonExpressedasChlorophyllinSeaWater_NorESM2-LM_ESACCI-OC__maps.png differ diff --git a/doc/sphinx/source/recipes/figures/ocean/model_vs_obs_MassConcentrationofTotalPhytoplanktonExpressedasChlorophyllinSeaWater_NorESM2-LM_ESACCI-OC__scatter.png b/doc/sphinx/source/recipes/figures/ocean/model_vs_obs_MassConcentrationofTotalPhytoplanktonExpressedasChlorophyllinSeaWater_NorESM2-LM_ESACCI-OC__scatter.png new file mode 100644 index 0000000000..4727174db3 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ocean/model_vs_obs_MassConcentrationofTotalPhytoplanktonExpressedasChlorophyllinSeaWater_NorESM2-LM_ESACCI-OC__scatter.png differ diff --git a/doc/sphinx/source/recipes/figures/psyplot/psyplot_CanESM5.jpg b/doc/sphinx/source/recipes/figures/psyplot/psyplot_CanESM5.jpg new file mode 100644 index 0000000000..cbd7107cc3 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/psyplot/psyplot_CanESM5.jpg differ diff --git a/doc/sphinx/source/recipes/figures/psyplot/psyplot_ICON-ESM-LR.jpg b/doc/sphinx/source/recipes/figures/psyplot/psyplot_ICON-ESM-LR.jpg new file mode 100644 index 0000000000..cdbbcb874e Binary files /dev/null and b/doc/sphinx/source/recipes/figures/psyplot/psyplot_ICON-ESM-LR.jpg differ diff --git a/doc/sphinx/source/recipes/figures/pv_capacity_factor/capacity_factor_IPSL-CM5A-MR_1980-2005_DJF.png b/doc/sphinx/source/recipes/figures/pv_capacity_factor/capacity_factor_IPSL-CM5A-MR_1980-2005_DJF.png new file mode 100644 index 0000000000..362cc2771e Binary files /dev/null and b/doc/sphinx/source/recipes/figures/pv_capacity_factor/capacity_factor_IPSL-CM5A-MR_1980-2005_DJF.png differ diff --git a/doc/sphinx/source/recipes/figures/quantilebias/quantilebias.png b/doc/sphinx/source/recipes/figures/quantilebias/quantilebias.png new file mode 100644 index 0000000000..c15ee07ae7 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/quantilebias/quantilebias.png differ diff --git a/doc/sphinx/source/recipes/figures/radiation_budget/UKESM1-0-LL.png b/doc/sphinx/source/recipes/figures/radiation_budget/UKESM1-0-LL.png new file mode 100644 index 0000000000..5f5fdb5848 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/radiation_budget/UKESM1-0-LL.png differ diff --git a/doc/sphinx/source/recipes/figures/rainfarm/rainfarm.png b/doc/sphinx/source/recipes/figures/rainfarm/rainfarm.png new file mode 100644 index 0000000000..3732aae1f3 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/rainfarm/rainfarm.png differ diff --git a/doc/sphinx/source/recipes/figures/recipe_extreme_index/t90p_IPSL-CM5A-LR_rcp85_2020_2040.png b/doc/sphinx/source/recipes/figures/recipe_extreme_index/t90p_IPSL-CM5A-LR_rcp85_2020_2040.png new file mode 100644 index 0000000000..8528b1a2fe Binary files /dev/null and b/doc/sphinx/source/recipes/figures/recipe_extreme_index/t90p_IPSL-CM5A-LR_rcp85_2020_2040.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig1_polar-contour_tauu_1986-2005.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig1_polar-contour_tauu_1986-2005.png new file mode 100644 index 0000000000..3a3611dcd1 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig1_polar-contour_tauu_1986-2005.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig2_1986-2005.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig2_1986-2005.png new file mode 100644 index 0000000000..b29354c262 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig2_1986-2005.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig3_Polar-Front.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig3_Polar-Front.png new file mode 100644 index 0000000000..67d7297f65 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig3_Polar-Front.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig3_Subantarctic-Fronts.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig3_Subantarctic-Fronts.png new file mode 100644 index 0000000000..c3e6ddca25 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig3_Subantarctic-Fronts.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig4_Drake_passage.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig4_Drake_passage.png new file mode 100644 index 0000000000..9a7f49dbb6 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig4_Drake_passage.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig5_sic-max-min.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig5_sic-max-min.png new file mode 100644 index 0000000000..ad37065d2b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig5_sic-max-min.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig5g_sic-line.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig5g_sic-line.png new file mode 100644 index 0000000000..540966754f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig5g_sic-line.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig6a.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig6a.png new file mode 100644 index 0000000000..03e3e2601e Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig6a.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig6b.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig6b.png new file mode 100644 index 0000000000..01c2d3ab4a Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig6b.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig7_fgco2_polar.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig7_fgco2_polar.png new file mode 100644 index 0000000000..76f749b154 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig7_fgco2_polar.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig7h_fgco2_zonal-flux.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig7h_fgco2_zonal-flux.png new file mode 100644 index 0000000000..a66beda9cb Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig7h_fgco2_zonal-flux.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig7i_fgco2_integrated-flux.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig7i_fgco2_integrated-flux.png new file mode 100644 index 0000000000..95a57a7300 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig7i_fgco2_integrated-flux.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig8_polar-ph.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig8_polar-ph.png new file mode 100644 index 0000000000..91e639d6d9 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig8_polar-ph.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig9a.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig9a.png new file mode 100644 index 0000000000..7ee8d64e79 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig9a.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig9b.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig9b.png new file mode 100644 index 0000000000..62a168e120 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig9b.png differ diff --git a/doc/sphinx/source/recipes/figures/russell18jgr/Fig9c.png b/doc/sphinx/source/recipes/figures/russell18jgr/Fig9c.png new file mode 100644 index 0000000000..c15461adcb Binary files /dev/null and b/doc/sphinx/source/recipes/figures/russell18jgr/Fig9c.png differ diff --git a/doc/sphinx/source/recipes/figures/schlund20esd/SHL_pdf.png b/doc/sphinx/source/recipes/figures/schlund20esd/SHL_pdf.png new file mode 100644 index 0000000000..6019ccf410 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/schlund20esd/SHL_pdf.png differ diff --git a/doc/sphinx/source/recipes/figures/schlund20esd/SHL_scatter.png b/doc/sphinx/source/recipes/figures/schlund20esd/SHL_scatter.png new file mode 100644 index 0000000000..709829c04d Binary files /dev/null and b/doc/sphinx/source/recipes/figures/schlund20esd/SHL_scatter.png differ diff --git a/doc/sphinx/source/recipes/figures/schlund20esd/ZHA_scatter.png b/doc/sphinx/source/recipes/figures/schlund20esd/ZHA_scatter.png new file mode 100644 index 0000000000..2ed84f0ced Binary files /dev/null and b/doc/sphinx/source/recipes/figures/schlund20esd/ZHA_scatter.png differ diff --git a/doc/sphinx/source/recipes/figures/schlund20jgr/feature_importance.png b/doc/sphinx/source/recipes/figures/schlund20jgr/feature_importance.png new file mode 100644 index 0000000000..88ce15b39a Binary files /dev/null and b/doc/sphinx/source/recipes/figures/schlund20jgr/feature_importance.png differ diff --git a/doc/sphinx/source/recipes/figures/schlund20jgr/map_prediction_output___GBRT_abs.png b/doc/sphinx/source/recipes/figures/schlund20jgr/map_prediction_output___GBRT_abs.png new file mode 100644 index 0000000000..cc6b30d01c Binary files /dev/null and b/doc/sphinx/source/recipes/figures/schlund20jgr/map_prediction_output___GBRT_abs.png differ diff --git a/doc/sphinx/source/recipes/figures/schlund20jgr/map_prediction_output___GBRT_change.png b/doc/sphinx/source/recipes/figures/schlund20jgr/map_prediction_output___GBRT_change.png new file mode 100644 index 0000000000..e815d11fdb Binary files /dev/null and b/doc/sphinx/source/recipes/figures/schlund20jgr/map_prediction_output___GBRT_change.png differ diff --git a/doc/sphinx/source/recipes/figures/schlund20jgr/map_prediction_output_error___GBRT_abs.png b/doc/sphinx/source/recipes/figures/schlund20jgr/map_prediction_output_error___GBRT_abs.png new file mode 100644 index 0000000000..1518a9fb21 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/schlund20jgr/map_prediction_output_error___GBRT_abs.png differ diff --git a/doc/sphinx/source/recipes/figures/schlund20jgr/map_prediction_output_error___GBRT_change.png b/doc/sphinx/source/recipes/figures/schlund20jgr/map_prediction_output_error___GBRT_change.png new file mode 100644 index 0000000000..d52e3c246a Binary files /dev/null and b/doc/sphinx/source/recipes/figures/schlund20jgr/map_prediction_output_error___GBRT_change.png differ diff --git a/doc/sphinx/source/recipes/figures/schlund20jgr/residuals_distribution.png b/doc/sphinx/source/recipes/figures/schlund20jgr/residuals_distribution.png new file mode 100644 index 0000000000..484ad35e05 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/schlund20jgr/residuals_distribution.png differ diff --git a/doc/sphinx/source/recipes/figures/schlund20jgr/rmse_plot.png b/doc/sphinx/source/recipes/figures/schlund20jgr/rmse_plot.png new file mode 100644 index 0000000000..bfdb30a574 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/schlund20jgr/rmse_plot.png differ diff --git a/doc/sphinx/source/recipes/figures/schlund20jgr/training_progress.png b/doc/sphinx/source/recipes/figures/schlund20jgr/training_progress.png new file mode 100644 index 0000000000..fb9b7903bb Binary files /dev/null and b/doc/sphinx/source/recipes/figures/schlund20jgr/training_progress.png differ diff --git a/doc/sphinx/source/recipes/figures/sea_surface_salinity/radar_bias.png b/doc/sphinx/source/recipes/figures/sea_surface_salinity/radar_bias.png new file mode 100644 index 0000000000..61b312624e Binary files /dev/null and b/doc/sphinx/source/recipes/figures/sea_surface_salinity/radar_bias.png differ diff --git a/doc/sphinx/source/recipes/figures/sea_surface_salinity/radar_std.png b/doc/sphinx/source/recipes/figures/sea_surface_salinity/radar_std.png new file mode 100644 index 0000000000..4eb8a8e2cc Binary files /dev/null and b/doc/sphinx/source/recipes/figures/sea_surface_salinity/radar_std.png differ diff --git a/doc/sphinx/source/recipes/figures/seaborn/regional_pr_hists.jpg b/doc/sphinx/source/recipes/figures/seaborn/regional_pr_hists.jpg new file mode 100644 index 0000000000..da57977859 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/seaborn/regional_pr_hists.jpg differ diff --git a/doc/sphinx/source/recipes/figures/seaborn/ta_vs_lat.jpg b/doc/sphinx/source/recipes/figures/seaborn/ta_vs_lat.jpg new file mode 100644 index 0000000000..c4929c33ca Binary files /dev/null and b/doc/sphinx/source/recipes/figures/seaborn/ta_vs_lat.jpg differ diff --git a/doc/sphinx/source/recipes/figures/seaice/SSIE-MEAN_vs_YOD_sic_extend_Arctic_September_1960-2100.png b/doc/sphinx/source/recipes/figures/seaice/SSIE-MEAN_vs_YOD_sic_extend_Arctic_September_1960-2100.png new file mode 100644 index 0000000000..b53f601802 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/seaice/SSIE-MEAN_vs_YOD_sic_extend_Arctic_September_1960-2100.png differ diff --git a/doc/sphinx/source/recipes/figures/seaice/extent_sic_Arctic_September_1960-2005.png b/doc/sphinx/source/recipes/figures/seaice/extent_sic_Arctic_September_1960-2005.png new file mode 100644 index 0000000000..b24d9db612 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/seaice/extent_sic_Arctic_September_1960-2005.png differ diff --git a/doc/sphinx/source/recipes/figures/seaice/timeseries_rcp85.png b/doc/sphinx/source/recipes/figures/seaice/timeseries_rcp85.png new file mode 100644 index 0000000000..4389e2eb37 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/seaice/timeseries_rcp85.png differ diff --git a/doc/sphinx/source/recipes/figures/seaice/trend_sic_extend_Arctic_September_histogram.png b/doc/sphinx/source/recipes/figures/seaice/trend_sic_extend_Arctic_September_histogram.png new file mode 100644 index 0000000000..90516b5b5e Binary files /dev/null and b/doc/sphinx/source/recipes/figures/seaice/trend_sic_extend_Arctic_September_histogram.png differ diff --git a/doc/sphinx/source/recipes/figures/seaice_drift/drift-strength.png b/doc/sphinx/source/recipes/figures/seaice_drift/drift-strength.png new file mode 100644 index 0000000000..ec2af7788f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/seaice_drift/drift-strength.png differ diff --git a/doc/sphinx/source/recipes/figures/seaice_feedback/negative_feedback.png b/doc/sphinx/source/recipes/figures/seaice_feedback/negative_feedback.png new file mode 100644 index 0000000000..623a496299 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/seaice_feedback/negative_feedback.png differ diff --git a/doc/sphinx/source/recipes/figures/shapeselect/shapeselect.png b/doc/sphinx/source/recipes/figures/shapeselect/shapeselect.png new file mode 100644 index 0000000000..4b409f9f66 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/shapeselect/shapeselect.png differ diff --git a/doc/sphinx/source/recipes/figures/smpi/reichlerkim08bams_smpi.png b/doc/sphinx/source/recipes/figures/smpi/reichlerkim08bams_smpi.png new file mode 100644 index 0000000000..e98e69c2e7 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/smpi/reichlerkim08bams_smpi.png differ diff --git a/doc/sphinx/source/recipes/figures/tcr/CanESM2.png b/doc/sphinx/source/recipes/figures/tcr/CanESM2.png new file mode 100644 index 0000000000..5e6b8eedca Binary files /dev/null and b/doc/sphinx/source/recipes/figures/tcr/CanESM2.png differ diff --git a/doc/sphinx/source/recipes/figures/tebaldi21esd/pr_pattern.png b/doc/sphinx/source/recipes/figures/tebaldi21esd/pr_pattern.png new file mode 100644 index 0000000000..f3190ad217 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/tebaldi21esd/pr_pattern.png differ diff --git a/doc/sphinx/source/recipes/figures/tebaldi21esd/tas_timeseries.png b/doc/sphinx/source/recipes/figures/tebaldi21esd/tas_timeseries.png new file mode 100644 index 0000000000..6ed3c09a44 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/tebaldi21esd/tas_timeseries.png differ diff --git a/doc/sphinx/source/recipes/figures/tebaldi21esd/warming_level_table.png b/doc/sphinx/source/recipes/figures/tebaldi21esd/warming_level_table.png new file mode 100644 index 0000000000..b3eeeb92ab Binary files /dev/null and b/doc/sphinx/source/recipes/figures/tebaldi21esd/warming_level_table.png differ diff --git a/doc/sphinx/source/recipes/figures/thermodyn_diagtool/CanESM2_2458_lec_diagram.pdf b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/CanESM2_2458_lec_diagram.pdf new file mode 100644 index 0000000000..b364d37bdd Binary files /dev/null and b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/CanESM2_2458_lec_diagram.pdf differ diff --git a/doc/sphinx/source/recipes/figures/thermodyn_diagtool/CanESM2_wmb_transp.png b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/CanESM2_wmb_transp.png new file mode 100644 index 0000000000..9a2288c73b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/CanESM2_wmb_transp.png differ diff --git a/doc/sphinx/source/recipes/figures/thermodyn_diagtool/IPSL-CM5A-MR_energy_climap.png b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/IPSL-CM5A-MR_energy_climap.png new file mode 100644 index 0000000000..e816d727ca Binary files /dev/null and b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/IPSL-CM5A-MR_energy_climap.png differ diff --git a/doc/sphinx/source/recipes/figures/thermodyn_diagtool/meridional_transp.png b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/meridional_transp.png new file mode 100644 index 0000000000..7397d070c4 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/meridional_transp.png differ diff --git a/doc/sphinx/source/recipes/figures/toymodel/synthetic_CMIP5_IPSL-CM5A-LR_day_historical_r1i1p1_T2M_tasmax_1999-2000.jpg b/doc/sphinx/source/recipes/figures/toymodel/synthetic_CMIP5_IPSL-CM5A-LR_day_historical_r1i1p1_T2M_tasmax_1999-2000.jpg deleted file mode 100644 index 17ad159aa8..0000000000 Binary files a/doc/sphinx/source/recipes/figures/toymodel/synthetic_CMIP5_IPSL-CM5A-LR_day_historical_r1i1p1_T2M_tasmax_1999-2000.jpg and /dev/null differ diff --git a/doc/sphinx/source/recipes/figures/toymodel/synthetic_CMIP5_bcc-csm1-1_Amon_rcp45_r1i1p1_psl_2051-2060.jpg b/doc/sphinx/source/recipes/figures/toymodel/synthetic_CMIP5_bcc-csm1-1_Amon_rcp45_r1i1p1_psl_2051-2060.jpg new file mode 100644 index 0000000000..776a733feb Binary files /dev/null and b/doc/sphinx/source/recipes/figures/toymodel/synthetic_CMIP5_bcc-csm1-1_Amon_rcp45_r1i1p1_psl_2051-2060.jpg differ diff --git a/doc/sphinx/source/recipes/figures/validation/Merid_Mean_DJF_longitude_tas_UKESM1-0-LL_vs_IPSL-CM6A-LR.png b/doc/sphinx/source/recipes/figures/validation/Merid_Mean_DJF_longitude_tas_UKESM1-0-LL_vs_IPSL-CM6A-LR.png new file mode 100644 index 0000000000..4a9d89bc4c Binary files /dev/null and b/doc/sphinx/source/recipes/figures/validation/Merid_Mean_DJF_longitude_tas_UKESM1-0-LL_vs_IPSL-CM6A-LR.png differ diff --git a/doc/sphinx/source/recipes/figures/validation/Zonal_Mean_DJF_latitude_tas_UKESM1-0-LL_vs_IPSL-CM6A-LR.png b/doc/sphinx/source/recipes/figures/validation/Zonal_Mean_DJF_latitude_tas_UKESM1-0-LL_vs_IPSL-CM6A-LR.png new file mode 100644 index 0000000000..e2f96831c4 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/validation/Zonal_Mean_DJF_latitude_tas_UKESM1-0-LL_vs_IPSL-CM6A-LR.png differ diff --git a/doc/sphinx/source/recipes/figures/wenzel14jgr/constr_tas-nbp_30-1960.000001.png b/doc/sphinx/source/recipes/figures/wenzel14jgr/constr_tas-nbp_30-1960.000001.png new file mode 100644 index 0000000000..c819aadaae Binary files /dev/null and b/doc/sphinx/source/recipes/figures/wenzel14jgr/constr_tas-nbp_30-1960.000001.png differ diff --git a/doc/sphinx/source/recipes/figures/wenzel14jgr/constr_tas-nbp_30-1960.000002.png b/doc/sphinx/source/recipes/figures/wenzel14jgr/constr_tas-nbp_30-1960.000002.png new file mode 100644 index 0000000000..878cf223ff Binary files /dev/null and b/doc/sphinx/source/recipes/figures/wenzel14jgr/constr_tas-nbp_30-1960.000002.png differ diff --git a/doc/sphinx/source/recipes/figures/wenzel14jgr/corr_tas-nbp_anom_1960-2005.png b/doc/sphinx/source/recipes/figures/wenzel14jgr/corr_tas-nbp_anom_1960-2005.png new file mode 100644 index 0000000000..9ed6339aa6 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/wenzel14jgr/corr_tas-nbp_anom_1960-2005.png differ diff --git a/doc/sphinx/source/recipes/figures/wenzel14jgr/tas_Global_CMIP5_1pctCO2_anom__1-1999.png b/doc/sphinx/source/recipes/figures/wenzel14jgr/tas_Global_CMIP5_1pctCO2_anom__1-1999.png new file mode 100644 index 0000000000..29639ee3cc Binary files /dev/null and b/doc/sphinx/source/recipes/figures/wenzel14jgr/tas_Global_CMIP5_1pctCO2_anom__1-1999.png differ diff --git a/doc/sphinx/source/recipes/figures/wenzel16jclim/CMPI5_uajet-pos_rcp45_20ystep_FIG1.png b/doc/sphinx/source/recipes/figures/wenzel16jclim/CMPI5_uajet-pos_rcp45_20ystep_FIG1.png new file mode 100644 index 0000000000..3790b177ef Binary files /dev/null and b/doc/sphinx/source/recipes/figures/wenzel16jclim/CMPI5_uajet-pos_rcp45_20ystep_FIG1.png differ diff --git a/doc/sphinx/source/recipes/figures/wenzel16jclim/CMPI5_uajet-pos_rcp45_20ystep_FIG2b.png b/doc/sphinx/source/recipes/figures/wenzel16jclim/CMPI5_uajet-pos_rcp45_20ystep_FIG2b.png new file mode 100644 index 0000000000..358c2a8fe5 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/wenzel16jclim/CMPI5_uajet-pos_rcp45_20ystep_FIG2b.png differ diff --git a/doc/sphinx/source/recipes/figures/wenzel16jclim/CMPI5_uajet-pos_rcp45_20ystep_FIG3.png b/doc/sphinx/source/recipes/figures/wenzel16jclim/CMPI5_uajet-pos_rcp45_20ystep_FIG3.png new file mode 100644 index 0000000000..649328ffe6 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/wenzel16jclim/CMPI5_uajet-pos_rcp45_20ystep_FIG3.png differ diff --git a/doc/sphinx/source/recipes/figures/wenzel16jclim/ta_trop250_ta_DJF_trend.png b/doc/sphinx/source/recipes/figures/wenzel16jclim/ta_trop250_ta_DJF_trend.png new file mode 100644 index 0000000000..0ae81d1534 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/wenzel16jclim/ta_trop250_ta_DJF_trend.png differ diff --git a/doc/sphinx/source/recipes/figures/wenzel16jclim/uajet-pos_rcp45_2040-2059.png b/doc/sphinx/source/recipes/figures/wenzel16jclim/uajet-pos_rcp45_2040-2059.png new file mode 100644 index 0000000000..5921ef9a4e Binary files /dev/null and b/doc/sphinx/source/recipes/figures/wenzel16jclim/uajet-pos_rcp45_2040-2059.png differ diff --git a/doc/sphinx/source/recipes/figures/wenzel16jclim/uajet_H-SH_c.png b/doc/sphinx/source/recipes/figures/wenzel16jclim/uajet_H-SH_c.png new file mode 100644 index 0000000000..1cb68b1b5c Binary files /dev/null and b/doc/sphinx/source/recipes/figures/wenzel16jclim/uajet_H-SH_c.png differ diff --git a/doc/sphinx/source/recipes/figures/wenzel16nat/fig_1.png b/doc/sphinx/source/recipes/figures/wenzel16nat/fig_1.png new file mode 100644 index 0000000000..8e2a238e99 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/wenzel16nat/fig_1.png differ diff --git a/doc/sphinx/source/recipes/figures/wenzel16nat/fig_2.png b/doc/sphinx/source/recipes/figures/wenzel16nat/fig_2.png new file mode 100644 index 0000000000..b4d4876827 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/wenzel16nat/fig_2.png differ diff --git a/doc/sphinx/source/recipes/figures/wenzel16nat/fig_3.png b/doc/sphinx/source/recipes/figures/wenzel16nat/fig_3.png new file mode 100644 index 0000000000..ab877cf31b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/wenzel16nat/fig_3.png differ diff --git a/doc/sphinx/source/recipes/index.rst b/doc/sphinx/source/recipes/index.rst index dd034bc8cf..e18ada0fd7 100644 --- a/doc/sphinx/source/recipes/index.rst +++ b/doc/sphinx/source/recipes/index.rst @@ -3,28 +3,159 @@ Recipes ------- +A website displaying results produced with the latest release of +ESMValTool for all available recipes can be accessed `here +`_. + .. toctree:: :maxdepth: 1 - recipe_capacity_factor +.. _general_purpose_diags: + +General-purpose diagnostics +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Recipes that use highly customizable diagnostics which are designed to plot a +large variety of input data. + +.. toctree:: + :maxdepth: 1 + + recipe_model_evaluation + recipe_monitor + recipe_psyplot + recipe_seaborn + +Atmosphere +^^^^^^^^^^ +.. toctree:: + :maxdepth: 1 + + recipe_miles + recipe_climate_patterns recipe_clouds - recipe_combined_climate_extreme_index - recipe_consecdrydays + recipe_cmug_h2o recipe_crem - recipe_cvdp + recipe_consecdrydays + recipe_deangelis15nat recipe_diurnal_temperature_index - recipe_ensclus - recipe_flato13ipcc + recipe_eady_growth_rate + recipe_extreme_events + recipe_extreme_index + recipe_eyring06jgr + recipe_eyring13jgr + recipe_gier20bg recipe_heatwaves_coldwaves - recipe_landcover - recipe_miles + recipe_hyint + recipe_iht_toa + recipe_impact recipe_modes_of_variability - recipe_multimodel_products - recipe_oceans - recipe_perfmetrics + recipe_mpqb_xch4 recipe_quantilebias + recipe_bock20jgr + recipe_spei + recipe_martin18grl + recipe_autoassess_stratosphere + recipe_autoassess_landsurface_permafrost + recipe_autoassess_landsurface_surfrad + recipe_autoassess_landsurface_soilmoisture + recipe_zmnam + recipe_thermodyn_diagtool + recipe_validation + recipe_radiation_budget + recipe_aod_aeronet_assess + +Climate metrics +^^^^^^^^^^^^^^^ +.. toctree:: + :maxdepth: 1 + + recipe_perfmetrics + recipe_smpi + +Future projections +^^^^^^^^^^^^^^^^^^ +.. toctree:: + :maxdepth: 1 + + recipe_climwip + recipe_li17natcc + recipe_schlund20jgr + recipe_meehl20sciadv + recipe_emergent_constraints + recipe_wenzel14jgr + recipe_schlund20esd + recipe_cox18nature + recipe_snowalbedo + recipe_ecs + recipe_kcs + recipe_wenzel16jclim + recipe_wenzel16nat + recipe_tcr + recipe_tebaldi21esd + recipe_climate_change_hotspot + +IPCC +^^^^ +.. toctree:: + :maxdepth: 1 + + recipe_ipccwg1ar6ch3 + recipe_ipccwg1ar5ch9 + recipe_collins13ipcc + recipe_examples + +Land +^^^^ +.. toctree:: + :maxdepth: 1 + + recipe_albedolandcover + recipe_carvalhais14nat + recipe_hydrology + recipe_hydro_forcing + recipe_landcover + recipe_anav13jclim recipe_runoff_et + +Ocean +^^^^^ +.. toctree:: + :maxdepth: 1 + + recipe_arctic_ocean + recipe_cvdp + recipe_combined_indices + recipe_esacci_oc + recipe_oceans + recipe_sea_surface_salinity + recipe_russell18jgr + +Other +^^^^^ +.. toctree:: + :maxdepth: 1 + + recipe_capacity_factor + recipe_cmorizers + recipe_ensclus + recipe_esacci_lst + recipe_examples + recipe_galytska23jgr + recipe_multimodel_products + recipe_pv_capacity_factor + recipe_rainfarm + recipe_seaice + recipe_seaice_drift + recipe_seaice_feedback recipe_shapeselect - recipe_spei + recipes_testing recipe_toymodel - recipe_zmnam + +Broken recipe list +^^^^^^^^^^^^^^^^^^ + +.. toctree:: + :maxdepth: 1 + + broken_recipe_list diff --git a/doc/sphinx/source/recipes/recipe_albedolandcover.rst b/doc/sphinx/source/recipes/recipe_albedolandcover.rst new file mode 100644 index 0000000000..ee1efdc137 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_albedolandcover.rst @@ -0,0 +1,66 @@ +.. _recipes_albedolandcover: + +Landcover - Albedo +================== + + +Overview +-------- + +The diagnostic determines the coefficients of multiple linear regressions fitted between the albedo values and the tree, shrub, short vegetation (crops and grasses) fractions of each grid cell within spatially moving windows encompassing 5x5 model grid cells. Solving these regressions provides the albedo values for trees, shrubs and short vegetation (crops and grasses) from which the albedo changes associated with transitions between these three landcover types are derived. The diagnostic distinguishes between snow-free and snow-covered grid cells. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_albedolandcover.yml + +Diagnostics are stored in diag_scripts/landcover/ + + * albedolandcover.py + + +User settings +------------- + +Several parameters can be set in the recipe + + +Variables +--------- + +* rsus (atmos, monthly mean, time latitude longitude) +* rsds (atmos, monthly mean, time latitude longitude) +* snc (landice, monthly mean, time latitude longitude) +* grassFrac (land, monthly mean, time latitude longitude) +* treeFrac (land, monthly mean, time latitude longitude) +* shrubFrac (land, monthly mean, time latitude longitude) +* cropFrac (land, monthly mean, time latitude longitude) +* pastureFrac (land, monthly mean, time latitude longitude) + + +Observations and reformat scripts +--------------------------------- + +A reformatting script for observational data is available here: + * esmvaltool/cmorizers/data/formatters/datasets/duveiller2018.py + + +References +---------- + +* Duveiller, G., Hooker, J. and Cescatti, A., 2018a. A dataset mapping the potential biophysical effects of vegetation cover change. Scientific Data, 5: 180014. + +* Duveiller, G., Hooker, J. and Cescatti, A., 2018b. The mark of vegetation change on Earth’s surface energy balance. Nature communications, 9(1): 679. + +Example plots +------------- + +.. _fig_landcoveralbedo_CMIP5_MPI-ESM-LR: +.. figure:: /recipes/figures/albedolandcover/MPI-ESM-LR_albedo_change_from_tree_to_crop-grass.png + :align: center + :width: 14cm + + Example of albedo change from tree to crop and grass for the CMIP5 model MPI-ESM-LR derived for the month of July and averaged over the years 2000 to 2004. diff --git a/doc/sphinx/source/recipes/recipe_anav13jclim.rst b/doc/sphinx/source/recipes/recipe_anav13jclim.rst new file mode 100644 index 0000000000..bd77a71ad5 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_anav13jclim.rst @@ -0,0 +1,237 @@ +.. _recipes_anav13jclim: + +Land and ocean components of the global carbon cycle +==================================================== + +Overview +-------- + +This recipe reproduces most of the figures of `Anav et al. (2013)`_: + +* Timeseries plot for different regions +* Seasonal cycle plot for different regions +* Errorbar plot for different regions showing mean and standard deviation +* Scatterplot for different regions showing mean vs. interannual variability +* 3D-scatterplot for different regions showing mean vs. linear trend and the + model variability index (MVI) as a third dimension (color coded) +* Scatterplot for different regions comparing two variable against each other + (*cSoil* vs. *cVeg*) + +In addition, performance metrics are calculated for all variables using the +performance metric diagnostics (see details in :ref:`nml_perfmetrics`). + + +.. _mvi calculation: + +MVI calculation +--------------- + +The Model variability index (MVI) on a single grid point (calculated in +``carbon_cycle/mvi.ncl`` is defined as + +.. math:: + + MVI = \left( \frac{s^M}{s^O} - \frac{s^O}{s^M} \right)^2 + +where :math:`s^M` and :math:`s^O` are the standard deviations of the annual +time series on a single grid point of a climate model :math:`M` and the +reference observation :math:`O`. In order to get a global or regional result, +this index is simple averaged over the respective domain. + +In its given form, this equation is prone to small standard deviations close to +zero. For example, values of :math:`s^M = 10^{-5} \mu` and :math:`s^O = 10^{-7} +\mu` (where :math:`\mu` is the mean of :math:`s^O` over all grid cells) results +in a MVI of the order of :math:`10^4` for this single grid cell even +though the two standard deviations are close to zero and negligible compared to +other grid cells. Due to the use of the arithmetic mean, a single high value is +able to distort the overall MVI. + +In the original publication, the maximum MVI is in the order of 10 (for the +variable `gpp`). However, a naive application of the MVI definition yields +values over :math:`10^9` for some models. Unfortunately, `Anav et al. (2013)`_ +do not provide an explanation on how to deal with this problem. Nevertheless, +this script provides two configuration options to avoid high MVI values, but +they are not related to the original paper or any other peer-revied study and +should be used with great caution (see :ref:`user settings`). + +.. _`Anav et al. (2013)`: https://journals.ametsoc.org/doi/full/10.1175/JCLI-D-12-00417.1 + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_anav13jclim.yml + + +Diagnostics are stored in diag_scripts/ + + * carbon_cycle/main.ncl + * carbon_cycle/mvi.ncl + * carbon_cycle/two_variables.ncl + * perfmetrics/main.ncl + * perfmetrics/collect.ncl + + +.. _user settings: + +User settings in recipe +----------------------- + +#. Preprocessor + + * ``mask_fillvalues``: Mask common missing values on different datasets. + * ``mask_landsea``: Mask land/ocean. + * ``regrid``: Regridding. + * ``weighting_landsea_fraction``: Land/ocean fraction weighting. + +#. Script carbon_cycle/main.ncl + + * ``region``, *str*: Region to be averaged. + * ``legend_outside``, *bool*: Plot legend in a separate file (does not + affect errorbar plot and evolution plot) + * ``seasonal_cycle_plot``, *bool*: Draw seasonal cycle plot. + * ``errorbar_plot``, *bool*: Draw errorbar plot. + * ``mean_IAV_plot``, *bool*: Draw Mean (x-axis), IAV (y-axis) plot. + * ``evolution_plot``, *bool*: Draw time evolution of a variable comparing + a reference dataset to multi-dataset mean; requires ref_dataset in recipe. + * ``sort``, *bool*, optional (default: ``False``): Sort dataset in + alphabetical order. + * ``anav_month``, *bool*, optional (default: ``False``): Conversion of + y-axis to PgC/month instead of /year. + * ``evolution_plot_ref_dataset``, *str*, optional: Reference dataset for + evolution_plot. Required when ``evolution_plot`` is ``True``. + * ``evolution_plot_anomaly``, *str*, optional (default: ``False``): Plot + anomalies in evolution plot. + * ``evolution_plot_ignore``, *list*, optional: Datasets to ignore in + evolution plot. + * ``evolution_plot_volcanoes``, *bool*, optional (default: ``False``): Turns + on/off lines of volcano eruptions in evolution plot. + * ``evolution_plot_color``, *int*, optional (default: ``0``): Hue of the + contours in the evolution plot. + * ``ensemble_name``, *string*, optional: Name of ensemble for use in evolution plot legend + +#. Script carbon_cycle/mvi.ncl + + * ``region``, *str*: Region to be averaged. + * ``reference_dataset``, *str*: Reference dataset for the MVI calculation + specified for each variable seperately. + * ``mean_time_range``, *list*, optional: Time period over which the mean is + calculated (if not given, use whole time span). + * ``trend_time_range``, *list*, optional: Time period over which the trend + is calculated (if not given, use whole time span). + * ``mvi_time_range``, *list*, optional: Time period over which the MVI is + calculated (if not given, use whole time span). + * ``stddev_threshold``, *float*, optional (default: ``1e-2``): Threshold to + ignore low standard deviations (relative to the mean) in the MVI + calculations. See also :ref:`mvi calculation`. + * ``mask_below``, *float*, optional: Threshold to mask low absolute values + (relative to the mean) in the input data (not used by default). See also + :ref:`mvi calculation`. + +#. Script carbon_cycle/two_variables.ncl + + * ``region``, *str*: Region to be averaged. + +#. Script perfmetrics/main.ncl + + See :ref:`nml_perfmetrics`. + +#. Script perfmetrics/collect.ncl + + See :ref:`nml_perfmetrics`. + + +Variables +--------- + +* *tas* (atmos, monthly, longitude, latitude, time) +* *pr* (atmos, monthly, longitude, latitude, time) +* *nbp* (land, monthly, longitude, latitude, time) +* *gpp* (land, monthly, longitude, latitude, time) +* *lai* (land, monthly, longitude, latitude, time) +* *cveg* (land, monthly, longitude, latitude, time) +* *csoil* (land, monthly, longitude, latitude, time) +* *tos* (ocean, monthly, longitude, latitude, time) +* *fgco2* (ocean, monthly, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +* CRU (*tas*, *pr*) +* JMA-TRANSCOM (*nbp*, *fgco2*) +* MTE (*gpp*) +* LAI3g (*lai*) +* NDP (*cveg*) +* HWSD (*csoil*) +* HadISST (*tos*) + + +References +---------- + +* Anav, A. et al.: Evaluating the land and ocean components of the global + carbon cycle in the CMIP5 Earth System Models, J. Climate, 26, 6901-6843, + doi: 10.1175/JCLI-D-12-00417.1, 2013. + + +Example plots +------------- + +.. _fig_anav13jclim_1: +.. figure:: /recipes/figures/anav13jclim/nbp_evolution_global.png + :align: center + :width: 80% + + Time series of global net biome productivity (NBP) over the period + 1901-2005. Similar to Anav et al. (2013), Figure 5. + +.. _fig_anav13jclim_2: +.. figure:: /recipes/figures/anav13jclim/gpp_cycle_nh.png + :align: center + :width: 80% + + Seasonal cycle plot for nothern hemisphere gross primary production (GPP) + over the period 1986-2005. Similar to Anav et al. (2013), Figure 9. + +.. _fig_anav13jclim_3: +.. figure:: /recipes/figures/anav13jclim/gpp_errorbar_trop.png + :align: center + :width: 80% + + Errorbar plot for tropical gross primary production (GPP) over the period + 1986-2005. + +.. _fig_anav13jclim_4: +.. figure:: /recipes/figures/anav13jclim/tos_scatter_global.png + :align: center + :width: 80% + + Scatterplot for interannual variability and mean of global sea surface + temperature (TOS) over the period 1986-2005. + +.. _fig_anav13jclim_5: +.. figure:: /recipes/figures/anav13jclim/tas_global.png + :align: center + :width: 80% + + Scatterplot for multiyear average of 2m surface temperature (TAS) in x axis, + its linear trend in y axis, and MVI. Similar to Anav et al. (2013) Figure 1 + (bottom). + +.. _fig_anav13jclim_6: +.. figure:: /recipes/figures/anav13jclim/cSoil-cVeg_scatter_global.png + :align: center + :width: 80% + + Scatterplot for vegetation carbon content (cVeg) and soil carbon content + (cSoil) over the period 1986-2005. Similar to Anav et al. (2013), Figure 12. + +.. _fig_anav13jclim_7: +.. figure:: /recipes/figures/anav13jclim/diag_grading_pr-global_to_diag_grading_gpp-global_RMSD.png + :align: center + :width: 80% + + Performance metrics plot for carbon-cycle-relevant diagnostics. diff --git a/doc/sphinx/source/recipes/recipe_aod_aeronet_assess.rst b/doc/sphinx/source/recipes/recipe_aod_aeronet_assess.rst new file mode 100644 index 0000000000..fec1bed761 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_aod_aeronet_assess.rst @@ -0,0 +1,161 @@ +.. _recipe_aod_aeronet_assess: + +AOD AeroNET Assess +================== + +Overview +-------- + +This diagnostic evaluates model aerosol optical depth (AOD) against ground +based observations from the AeroNET measurement network. Monthly mean AOD +data is downloaded from the AeroNET website and formatted (CMORized) using the +AERONET downloader and formatter within ESMValTool. + +Multiannual seasonal means are calculated from the model output and compared +with a multiannual seasonal mean climatology generated from AeroNET +observational data. At each AeroNET station the data are screened for validity +according to the following default criteria: + + * 1. Monthly means must be generated from at least one AOD observation in that + month. + + * 2. Seasonal means for DJF, MAM, JJA and SON must be calculated from three + monthly means, i.e. a monthly mean from December January and Feburary. + + * 3. For a given year to be valid, there must be a seasonal mean for each climate + season i.e. DJF, MAM, JJA and SON. + + * 4. For a multiannual seasonal means there must be at least five seasonaal means + over the time range of interest. + +NOTE: The code is designed to be flexible and the default criteria can be +changed according to the user's requirements (see the user settings below). + +The evaluation is visualised by plotting model output as 2D filled contours and +overlaying AeroNET observations at model grid cells co-located with the AeroNET +measurement stations. Statistical data (root mean square error) is generated +using AeroNET observations at model grid cells co-located with the AeroNET +measurement stations. + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + + * recipe_aod_aeronet_assess.yml + +Diagnostics are stored in esmvaltool/diag_scripts/aerosols/ + + * aod_aeronet_assess.py: Plot the AOD evaluation. + * aero_utils.py: Utility functions commonly used by aerosol assessment routines. + + +User settings in recipe +----------------------- + +#. Script aod_aeronet_assess.py + + *Required settings for script* + + * wavel: The wavelength of interest for the evaluation, currently set up for 440nm + * min_days_per_mon: The minimum number of days used to calculate the AOD monthly mean + * min_mon_per_seas: The minimum number of seasons used to calculate each + seasonal mean. This must be between 1 and 3. + * min_seas_per_year: The minimum number of seasonal means in each year. This + must be between 1 and 4. + * min_seas_per_clim: The minimum number of seasonal means used to calculate + the multiannual seasonal mean. This must be btween 1 and the number of years + of available AeroNET data. + + *Optional settings for script* + + * None + + *Required settings for variables* + + * None + + *Optional settings for variables* + + * None + + *Required settings for preprocessor* + + * None + + *Optional settings for preprocessor* + + * None + + *Color tables* + + * brewer_Spectral_11 + + +Variables +--------- + +* od440aer (atmos, monthly mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +* Note: (1) obs4MIPs data can be used directly without any preprocessing; (2) + see headers of reformat scripts for non-obs4MIPs data for download + instructions. + +* The AeroNET data is downloaded from the AeroNET website using the downloader: + + .. code-block:: yaml + + $ esmvaltool data download AERONET. + +* The AeroNET data is formatteed (CMORized) using the formatter: + + .. code-block:: yaml + + $ esmvaltool data format AERONET. + + + +References +---------- +* Holben B.N., T.F.Eck, I.Slutsker, D.Tanre, J.P.Buis, A.Setzer, E.Vermote, J.A.Reagan, Y.Kaufman, T.Nakajima, F.Lavenu, I.Jankowiak, and A.Smirnov, 1998: AERONET - A federated instrument network and data archive for aerosol characterization, Rem. Sens. Environ., 66, 1-16. + +* Holben, B.N., D.Tanre, A.Smirnov, T.F.Eck, I.Slutsker, N.Abuhassan, W.W.Newcomb, J.Schafer, B.Chatenet, F.Lavenue, Y.J.Kaufman, J.Vande Castle, A.Setzer, B.Markham, D.Clark, R.Frouin, R.Halthore, A.Karnieli, N.T.O'Neill, C.Pietras, R.T.Pinker, K.Voss, and G.Zibordi, 2001: An emerging ground-based aerosol climatology: Aerosol Optical Depth from AERONET, J. Geophys. Res., 106, 12 067-12 097. + +* Mulcahy, J. P., Johnson, C., Jones, C. G., Povey, A. C., Scott, C. E., Sellar, A., Turnock, S. T., Woodhouse, M. T., Abraham, N. L., Andrews, M. B., Bellouin, N., Browse, J., Carslaw, K. S., Dalvi, M., Folberth, G. A., Glover, M., Grosvenor, D. P., Hardacre, C., Hill, R., Johnson, B., Jones, A., Kipling, Z., Mann, G., Mollard, J., O’Connor, F. M., Palmiéri, J., Reddington, C., Rumbold, S. T., Richardson, M., Schitgens, N. A. J., Stier, P., Stringer, M., Tang, Y., Walton, J., Woodward, S., and Yool. A.: Description and evaluation of aerosol in UKESM1 and HadGEM3-GC3.1 CMIP6 historical simulations, Geosci. Model Dev., 13, 6383–6423, 2020 + +Example plots +------------- + +.. _fig_aod_aeronet_assess_1: +.. figure:: /recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_DJF.png + :align: center + + Evaluation of AOD at 440 nm from UKESM1 historical ensemble member r1i1p1f2 against the AeroNET climatology from ground-based observations for Dec-Jan-Feb. The multiannual seasonal mean is calculated for the model data for the period 1994-2014. The model output is overlaid with the observational climatology. + +.. _fig_aod_aeronet_assess_2: +.. figure:: /recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_MAM.png + :align: center + + Evaluation of AOD at 440 nm from UKESM1 historical ensemble member r1i1p1f2 against the AeroNET climatology from ground-based observations for Mar_Apr_May. The multiannual seasonal mean is calculated for the model data for the period 1994-2014. The model output is overlaid with the observational climatology. + +.. _fig_aod_aeronet_assess_3: +.. figure:: /recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_JJA.png + :align: center + + Evaluation of AOD at 440 nm from UKESM1 historical ensemble member r1i1p1f2 against the AeroNET climatology from ground-based observations for Jun-Jul-Aug. The multiannual seasonal mean is calculated for the model data for the period 1994-2014. The model output is overlaid with the observational climatology. + +.. _fig_aod_aeronet_assess_4: +.. figure:: /recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_SON.png + :align: center + + Evaluation of AOD at 440 nm from UKESM1 historical ensemble member r1i1p1f2 against the AeroNET climatology from ground-based observations for Sep-Oct-Nov. The multiannual seasonal mean is calculated for the model data for the period 1994-2014. The model output is overlaid with the observational climatology. + +.. _fig_aod_aeronet_assess_5: +.. figure:: /recipes/figures/aod_aeronet_assess/UKESM1-0-LL_CMIP_AERmon_historical_od440aer_gn_1994_2014_scatter.png + :align: center + + Evaluation of AOD at 440 nm from UKESM1 historical ensemble member r1i1p1f2 against the AeroNET climatology from ground-based observations for Dec-Jan-Feb, Mar_Apr_May, Jun-Jul-Aug and Sep-Oct-Nov. The multiannual seasonal mean is calculated for the model data for the period 1994-2014. diff --git a/doc/sphinx/source/recipes/recipe_arctic_ocean.rst b/doc/sphinx/source/recipes/recipe_arctic_ocean.rst new file mode 100644 index 0000000000..9cbbd91d68 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_arctic_ocean.rst @@ -0,0 +1,267 @@ +.. _recipes_arctic_ocean: + +Recipe for evaluating Arctic Ocean +================================== + +Overview +........ + +The Arctic ocean is one of the areas of the Earth where the effects of climate change are especially visible today. Two most prominent processes are Arctic amplification [e.g. Serreze and Barry, 2011] and decrease of the sea ice area and thickness. Both receive good coverage in the literature and are already well-studied. Much less attention is paid to the interior of the Arctic Ocean itself. In order to increase our confidence in projections of the Arctic climate future proper representation of the Arctic Ocean hydrography is necessary. + +The main focus of this diagnostics is evaluation of ocean components of climate models in the Arctic Ocean, however most of the diagnostics are implemented in a way that can be easily expanded to other parts of the World Ocean. Most of the diagnostics aim at model comparison to climatological data (PHC3), so we target historical CMIP simulations. However scenario runs also can be analysed to have an impression of how Arcti Ocean hydrography will chnage in the future. + +At present only the subset of CMIP models can be used in particular because our analysis is limited to z coordinate models. + +Available recipes +................. +Recipe is stored in recipes/ + +* recipe_arctic_ocean.yml : contains all setting nessesary to run diagnostics and metrics. + +Currenly the workflow do not allow to easily separate diagnostics from each other, since some of the diagnostics rely on the results of other diagnostics. The recipe currently do not use preprocessor options, so input files are CMORised monthly mean 3D ocean varibales on original grid. + +The following plots will be produced by the recipe: + +Hovmoeller diagrams +------------------- + +The characteristics of vertical TS distribution can change with time, and consequently the vertical TS distribution is an important indicator of the behaviour of the coupled ocean-sea ice-atmosphere system in the North Atlantic and Arctic Oceans. One way to evaluate these changes is by using Hovmoller diagrams. Hovmoller diagrams for two main Arctic Ocean basins – Eurasian and Amerasian with T and S spatially averaged on a monthly basis for every vertical level are available. This diagnostic allows the temporal evolution of vertical ocean potential temperature distribution to be assessed. + +Related settings in the recipe: + + .. code-block:: yaml + + # Define regions, as a list. + # 'EB' - Eurasian Basin of the Arctic Ocean + # 'AB' - Amerasian Basin of the Arctic Ocean + # 'Barents_sea' - Barrents Sea + # 'North_sea' - North Sea + hofm_regions: ["AB" , 'EB'] + # Define variables to use, should also be in "variables" + # entry of your diagnostic + hofm_vars: ['thetao', 'so'] + # Maximum depth of Hovmoeller and vertical profiles + hofm_depth: 1500 + # Define if Hovmoeller diagrams will be ploted. + hofm_plot: True + # Define colormap (as a list, same size as list with variables) + # Only cmaps from matplotlib and cmocean are supported. + # Additional cmap - 'custom_salinity1'. + hofm_cmap: ['Spectral_r', 'custom_salinity1'] + # Data limits for plots, + # List of the same size as the list of the variables + # each entry is [vmin, vmax, number of levels, rounding limit] + hofm_limits: [[-2, 2.3, 41, 1], [30.5, 35.1, 47, 2]] + # Number of columns in the plot + hofm_ncol: 3 + +.. _fig_hofm: +.. figure:: /recipes/figures/arctic_ocean/hofm.png + :align: center + :width: 9cm + + Hovmoller diagram of monthly spatially averaged potential temperature in the Eurasian Basin of the Arctic Ocean for selected CMIP5 climate models (1970-2005). + +Vertical profiles +----------------- + +The vertical structure of temperature and salinity (T and S) in the ocean model is a key diagnostic that is used for ocean model evaluation. Realistic T and S distributions means that model properly represent dynamic and thermodynamic processes in the ocean. Different ocean basins have different hydrological regimes so it is important to perform analysis of vertical TS distribution for different basins separately. The basic diagnostic in this sense is mean vertical profiles of temperature and salinity over some basin averaged for relatively long period of time. In addition to individual vertical profiles for every model, we also show the mean over all participating models and similar profile from climatological data (PHC3). + +Several settings for vertical profiles (region, variables, maximum depths) will be determined by the Hovmoeller diagram settings. The reason is that vertical profiles are calculated from Hovmoeller diagram data. Mean vertical profile is calculated by lineraly interpolating data on standard WOA/PHC depths. + +Related settings in the recipe: + + .. code-block:: yaml + + # Define regions, as a list. + # 'EB' - Eurasian Basin of the Arctic Ocean + # 'AB' - Amerasian Basin of the Arctic Ocean + # 'Barents_sea' - Barrents Sea + # 'North_sea' - North Sea + hofm_regions: ["AB" , 'EB'] + # Define variables to use, should also be in "variables" entry of your diagnostic + hofm_vars: ['thetao', 'so'] + # Maximum depth of Hovmoeller and vertical profiles + hofm_depth: 1500 + +.. _fig_vertical: +.. figure:: /recipes/figures/arctic_ocean/vertical.png + :align: center + :width: 9cm + + Mean (1970-2005) vertical potential temperature distribution in the Eurasian basin for participating CMIP5 coupled ocean models, PHC3 climatology (dotted red line) and multi-model mean (dotted black line). + +Spatial distribution maps of variables +-------------------------------------- + +The spatial distribution of basic oceanographic variables characterises the properties and spreading of ocean water masses. For the coupled models, capturing the spatial distribution of oceanographic variables is especially important in order to correctly represent the ocean-ice-atmosphere interface. We have implemented plots with spatial maps of temperature and salinity at original model levels. + +Plots spatial distribution of variables at selected depths in North Polar projection on original model grid. +For plotting the model depths that are closest to provided `plot2d_depths` will be selected. Settings allow to define color maps and limits for each variable individually. Color maps should be ehter part of standard matplotlib set or one of the cmocean color maps. Additional colormap `custom_salinity1` is provided. + +Related settings in the recipe: + + .. code-block:: yaml + + # Depths for spatial distribution maps + plot2d_depths: [10, 100] + # Variables to plot spatial distribution maps + plot2d_vars: ['thetao', 'so'] + # Define colormap (as a list, same size as list with variables) + # Only cmaps from matplotlib and cmocean are supported. + # Additional cmap - 'custom_salinity1'. + plot2d_cmap: ['Spectral_r', 'custom_salinity1'] + # Data limits for plots, + # List of the same size as the list of the variables + # each entry is [vmin, vmax, number of levels, rounding limit] + plot2d_limits: [[-2, 4, 20, 1], [30.5, 35.1, 47, 2]] + # number of columns for plots + plot2d_ncol: 3 + +.. _fig_spatial: +.. figure:: /recipes/figures/arctic_ocean/spatial.png + :align: center + :width: 9cm + + Mean (1970-2005) salinity distribution at 100 meters. + +Spatial distribution maps of biases +----------------------------------- + +For temperature and salinity, we have implemented spatial maps of model biases from the observed climatology. For the model biases, values from the original model levels are linearly interpolated to the climatology and then spatially interpolated from the model grid to the regular PHC (climatology) grid. Resulting fields show model performance in simulating spatial distribution of temperature and salinity. + +Related settings in the recipe: + + .. code-block:: yaml + + plot2d_bias_depths: [10, 100] + # Variables to plot spatial distribution of the bias for. + plot2d_bias_vars: ['thetao', 'so'] + # Color map names for every variable + plot2d_bias_cmap: ['balance', 'balance'] + # Data limits for plots, + # List of the same size as the list of the variables + # each entry is [vmin, vmax, number of levels, rounding limit] + plot2d_bias_limits: [[-3, 3, 20, 1], [-2, 2, 47, 2]] + # number of columns in the bias plots + plot2d_bias_ncol: 3 + +.. _fig_bias: +.. figure:: /recipes/figures/arctic_ocean/bias.png + :align: center + :width: 9cm + + Mean (1970-2005) salinity bias at 100m relative to PHC3 climatology + +Transects +--------- +Vertical transects through arbitrary sections are important for analysis of vertical distribution of ocean water properties and especially useful when exchange between different ocean basins is evaluated. We have implemented diagnostics that allow for the definition of an arbitrary ocean section by providing set of points on the ocean surface. For each point, a vertical profile on the original model levels is interpolated. All profiles are then connected to form a transect. The great-circle distance between the points is calculated and used as along-track distance. + +One of the main use cases is to create vertical sections across ocean passages, for example Fram Strait. + +Plots transect maps for pre-defined set of transects (defined in `regions.py`, see below). The `transect_depth` defines maximum depth of the transect. Transects are calculated from data averaged over the whole time period. + +Related settings in the recipe: + + .. code-block:: yaml + + # Select regions (transects) to plot + # Available options are: + # AWpath - transect along the path of the Atlantic Water + # Fram - Fram strait + transects_regions: ["AWpath", "Fram"] + # Variables to plot on transects + transects_vars: ['thetao', 'so'] + # Color maps for every variable + transects_cmap: ['Spectral_r', 'custom_salinity1'] + # Data limits for plots, + # List of the same size as the list of the variables + # each entry is [vmin, vmax, number of levels, rounding limit] + transects_limits: [[-2, 4, 20, 1], [30.5, 35.1, 47, 2]] + # Maximum depth to plot the data + transects_depth: 1500 + # number of columns + transects_ncol: 3 + +.. _fig_transect: +.. figure:: /recipes/figures/arctic_ocean/transect.png + :align: center + :width: 9cm + + Mean (1970-2005) potential temperature across the Fram strait. + +Atlantic Water core depth and temperature +----------------------------------------- + +Atlantic water is a key water mass of the Arctic Ocean and its proper representation is one of the main challenges in Arctic Ocean modelling. We have created two metrics by which models can be easily compared in terms of Atlantic water simulation. The temperature of the Atlantic Water core is calculated for every model as the maximum potential temperature between 200 and 1000 meters depth in the Eurasian Basin. The depth of the Atlantic Water core is calculated as the model level depth where the maximum temperature is found in Eurasian Basin (Atlantic water core temperature). + +The AW core depth and temperature will be calculated from data generated for Hovmoeller diagrams for `EB` region, so it should be selected in the Hovmoeller diagrams settings as one of the `hofm_regions`. + +In order to evaluate the spatial distribution of Atlantic water in different climate models we also provide diagnostics with maps of the spatial temperature distribution at model’s Atlantic Water depth. + +.. _fig_aw_temp: +.. figure:: /recipes/figures/arctic_ocean/aw_temp.png + :align: center + :width: 9cm + + Mean (1970-2005) Atlantic Water core temperature. PHC33 is an observed climatology. + +TS-diagrams +----------- + +T-S diagrams combine temperature and salinity, which allows the analysis of water masses and their potential for mixing. The lines of constant density for specific ranges of temperature and salinity are shown on the background of the T-S diagram. The dots on the diagram are individual grid points from specified region at all model levels within user specified depth range. + +Related settings in the recipe: + + .. code-block:: yaml + + tsdiag_regions: ["AB" , 'EB'] + # Maximum depth to consider data for TS diagrams + tsdiag_depth: 1500 + # Number of columns + tsdiag_ncol: 3 + +.. _fig_ts: +.. figure:: /recipes/figures/arctic_ocean/ts.png + :align: center + :width: 9cm + + Mean (1970-2005) T-S diagrams for Eurasian Basin of the Arctic Ocean. + +Available diagnostics +..................... + +The following python modules are included in the diagnostics package: + +* arctic_ocean.py : Reads settings from the recipe and call functions to do analysis and plots. +* getdata.py : Deals with data preparation. +* interpolation.py : Include horizontal and vertical interpolation functions specific for ocean models. +* plotting.py : Ocean specific plotting functions +* regions.py : Contains code to select specific regions, and definition of the regions themselves. +* utils.py : Helpful utilites. + +Diagnostics are stored in diag_scripts/arctic_ocean/ + + +Variables +--------- + +* thetao (ocean, monthly, longitude, latitude, time) +* so (ocean, monthly, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +* PHC3 climatology + +References +---------- + +* Ilıcak, M. et al., An assessment of the Arctic Ocean in a suite of interannual CORE-II simulations. Part III: Hydrography and fluxes, Ocean Modelling, Volume 100, April 2016, Pages 141-161, ISSN 1463-5003, doi.org/10.1016/j.ocemod.2016.02.004 + +* Steele, M., Morley, R., & Ermold, W. (2001). PHC: A global ocean hydrography with a high-quality Arctic Ocean. Journal of Climate, 14(9), 2079-2087. + +* Wang, Q., et al., An assessment of the Arctic Ocean in a suite of interannual CORE-II simulations. Part I: Sea ice and solid freshwater, Ocean Modelling, Volume 99, March 2016, Pages 110-132, ISSN 1463-5003, doi.org/10.1016/j.ocemod.2015.12.008 + +* Wang, Q., Ilicak, M., Gerdes, R., Drange, H., Aksenov, Y., Bailey, D. A., ... & Cassou, C. (2016). An assessment of the Arctic Ocean in a suite of interannual CORE-II simulations. Part II: Liquid freshwater. Ocean Modelling, 99, 86-109, doi.org/10.1016/j.ocemod.2015.12.009 diff --git a/doc/sphinx/source/recipes/recipe_autoassess_landsurface_permafrost.rst b/doc/sphinx/source/recipes/recipe_autoassess_landsurface_permafrost.rst new file mode 100644 index 0000000000..4d6f9b3242 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_autoassess_landsurface_permafrost.rst @@ -0,0 +1,203 @@ +.. _recipe_autoassess_landsurface_permafrost.rst: + +Land-surface Permafrost - Autoassess diagnostics +================================================ + +Overview +-------- + +Permafrost thaw is an important impact of climate change, and is the source of +a potentially strong Earth system feedback through the release of soil carbon +into the atmosphere. This recipe provides metrics that evaluate the +climatological performance of models in simulating soil temperatures that +control permafrost. Performance metrics (with observation-based estimates in brackets): + +* permafrost area (17.46 million square km) +* fractional area of permafrost northwards of zero degree isotherm (0.47) +* soil temperature at 1m minus soil temperature at surface (-0.53 degrees C) +* soil temperature at surface minus air temperature (6.15 degrees C) +* annual amplitude at 1m / annual amplitude at the surface (0.40 unitless) +* annual amplitude at the surface / annual air temperature (0.57 unitless) + + +Plots: + +* Maps of permafrost extent and zero degC isotherm +* Normalised assessment metrics plot comparing control and experiment + +The recipe takes as input a control model and experimental model, comparisons being made +with these two models. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + + * recipe_autoassess_landsurface_permafrost.yml + +Diagnostics are stored in esmvaltool/diag_scripts/autoassess/ + + * autoassess_area_base.py: wrapper for autoassess scripts + * land_surface_permafrost/permafrost.py: script to calculate permafrost + metrics + * plot_autoassess_metrics.py: plot normalised assessment metrics + + +User settings in recipe +----------------------- + +#. Script autoassess_area_base.py + + *Required settings for script* + + * area: must equal land_surface_permafrost to select this diagnostic + * control_model: name of model to be used as control + * exp_model: name of model to be used as experiment + * start: date (YYYY/MM/DD) at which period begins (see note on time gating) + * end: date (YYYY/MM/DD) at which period ends (see note on time gating) + + *Optional settings for script* + + * title: arbitrary string with name of diagnostic + * obs_models: unused for this recipe + + *Required settings for variables* + + none + + *Optional settings for variables* + + none + + +#. Script plot_autoassess_metrics.py + + *Required settings for script* + + * area: must equal land_surface_permafrost to select this diagnostic + * control_model: name of model to be used as control in metrics plot + * exp_model: name of model to be used as experiment in metrics plot + * title: string to use as plot title + + *Optional settings for script* + + none + + *Required settings for variables* + + none + + *Optional settings for variables* + + none + + + +Variables +--------- + +* tas (atmos, monthly mean, longitude latitude time) +* tsl (land, monthly mean, longitude latitude time) +* mrsos (land, monthly mean, longitude latitude time) +* sftlf (mask, fixed, longitude latitude) + + +Observations and reformat scripts +--------------------------------- + +None + + +References +---------- + +* Observed permafrost extent is from http://nsidc.org/data/ggd318.html: Brown, J., + O. Ferrians, J. A. Heginbottom, and E. Melnikov. 2002. Circum-Arctic Map of + Permafrost and Ground-Ice Conditions, Version 2. Boulder, Colorado USA. NSIDC: + National Snow and Ice Data Center. When calculating the global area of + permafrost the grid cells are weighted by the proportion of permafrost within + them. + +* Annual mean air temperature is from: Legates, D. R., and C. J. Willmott, 1990: + Mean seasonal and spatial variability in global surface air temperature. Theor. + Appl. Climatol., 41, 11-21. The annual mean is calculated from the seasonal + mean data available at the Met Office. + +* The soil temperature metrics are calcuated following: Charles D. Koven, William + J. Riley, and Alex Stern, 2013: Analysis of Permafrost Thermal Dynamics and + Response to Climate Change in the CMIP5 Earth System Models. J. Climate, 26. + (Table 3) http://dx.doi.org/10.1175/JCLI-D-12-00228.1 The + locations used for Table 3 were extracted from the model and the modelled + metrics calculated. + + +Example plots +------------- + +.. figure:: /recipes/figures/autoassess_landsurface/pf_extent_north_america_ACCESS-CM2.png + :scale: 50 % + :alt: pf_extent_north_america_ACCESS-CM2.png + + Permafrost extent and zero degC isotherm, showing North America + +.. figure:: /recipes/figures/autoassess_landsurface/pf_extent_asia_ACCESS-CM2.png + :scale: 50 % + :alt: pf_extent_asia_ACCESS-CM2.png + + Permafrost extent and zero degC isotherm, showing Asia and Europe + +.. figure:: /recipes/figures/autoassess_landsurface/Permafrost_Metrics.png + :scale: 50 % + :alt: Permafrost_Metrics.png + + Normalised metrics plot comparing a control and experiment simulation + + +Additional notes on usage +------------------------- +The ``landsurface_permafrost`` area metric is part of the ``esmvaltool/diag_scripts/autoassess`` diagnostics, +and, as any other ``autoassess`` metric, it uses the ``autoassess_area_base.py`` as general purpose +wrapper. This wrapper accepts a number of input arguments that are read through from the recipe. + +This recipe is part of the larger group of Autoassess metrics ported to ESMValTool +from the native Autoassess package from the UK's Met Office. The ``diagnostics`` settings +are almost the same as for the other Autoassess metrics. + +.. note:: + + **Time gating for autoassess metrics.** + + To preserve the native Autoassess functionalities, + data loading and selection on time is done somewhat + differently for ESMValTool's autoassess metrics: the + time selection is done in the preprocessor as per usual but + a further time selection is performed as part of the diagnostic. + For this purpose the user will specify a ``start:`` and ``end:`` + pair of arguments of ``scripts: autoassess_script`` (see below + for example). These are formatted as ``YYYY/MM/DD``; this is + necessary since the Autoassess metrics are computed from 1-Dec + through 1-Dec rather than 1-Jan through 1-Jan. This is a temporary + implementation to fully replicate the native Autoassess functionality + and a minor user inconvenience since they need to set an extra set of + ``start`` and ``end`` arguments in the diagnostic; this will be phased + when all the native Autoassess metrics have been ported to ESMValTool + review has completed. + + +An example of standard inputs as read by ``autoassess_area_base.py`` and passed +over to the diagnostic/metric is listed below. + +.. code-block:: yaml + + scripts: + plot_landsurf_permafrost: &plot_landsurf_permafrost_settings + <<: *autoassess_landsurf_permafrost_settings + control_model: MPI-ESM-LR + exp_model: MPI-ESM-MR + script: autoassess/plot_autoassess_metrics.py + ancestors: ['*/autoassess_landsurf_permafrost'] + title: "Plot Land-Surface Permafrost Metrics" + plot_name: "Permafrost_Metrics" + diag_tag: aa_landsurf_permafrost + diag_name: autoassess_landsurf_permafrost diff --git a/doc/sphinx/source/recipes/recipe_autoassess_landsurface_soilmoisture.rst b/doc/sphinx/source/recipes/recipe_autoassess_landsurface_soilmoisture.rst new file mode 100644 index 0000000000..5ba790b093 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_autoassess_landsurface_soilmoisture.rst @@ -0,0 +1,121 @@ +.. _recipe_autoassess_landsurface_soilmoisture.rst: + +Land-surface Soil Moisture - Autoassess diagnostics +=================================================== + +Overview +-------- + +Soil moisture is a critical component of the land system, controling surface +energy fluxes in many areas of the world. This recipe provides metrics that +evaluate the skill of models' spatial and seasonal distribution of soil +moisture against the ESA CCI soil moisture ECV. + +Performance metrics: + +* median absolute error (model minus observations) + +Metrics are calculated using model and observation multi-year climatologies (seasonal means) +for meteorological seasons: + +* December-January-February (djf) +* March-April-May (mam) +* June-July-August (jja) +* September-October-November (son) + +Plots: + +* Normalised assessment metrics plot comparing control and experiment + +The recipe takes as input a control model and experimental model, comparisons being made +with these two models. + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + + * recipe_autoassess_landsurface_soilmoisture.yml + +Diagnostics are stored in esmvaltool/diag_scripts/autoassess/ + + * land_surface_soilmoisture/soilmoisture.py: script to calculate soil moisture + metrics + * plot_autoassess_metrics.py: plot normalised assessment metrics + + +User settings in recipe +----------------------- + +#. Script soilmoisture.py + + *Required settings for script* + + * area: must equal land_surface_soilmoisture to select this diagnostic + * control_model: name of model to be used as control + * exp_model: name of model to be used as experiment + + *Optional settings for script* + + none + + *Required settings for variables* + + none + + *Optional settings for variables* + + none + + +#. Script plot_autoassess_metrics.py + + *Required settings for script* + + * area: must equal land_surface_soilmoisture to select this diagnostic + * control_model: name of model to be used as control in metrics plot + * exp_model: name of model to be used as experiment in metrics plot + * title: string to use as plot title + + *Optional settings for script* + + none + + *Required settings for variables* + + none + + *Optional settings for variables* + + none + + +Variables +--------- + +* mrsos (from models: land, monthly mean, longitude latitude time) +* sm (from observations: land, monthly mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +1999-2008 climatologies (seasonal means) from ESA ECV Soil Moisture Dataset v1. +Produced by the ESA CCI soil moisture project: https://www.esa-soilmoisture-cci.org/node/93 + + +References +---------- +* Dorigo, W.A., Wagner, W., Albergel, C., Albrecht, F., Balsamo, G., Brocca, L., Chung, D., Ertl, M., Forkel, M., Gruber, A., Haas, E., Hamer, D. P. Hirschi, M., Ikonen, J., De Jeu, R. Kidd, R. Lahoz, W., Liu, Y.Y., Miralles, D., Lecomte, P. (2017). ESA CCI Soil Moisture for improved Earth system understanding: State-of-the art and future directions. In Remote Sensing of Environment, 2017, ISSN 0034-4257, https://doi.org/10.1016/j.rse.2017.07.001. + +* Gruber, A., Scanlon, T., van der Schalie, R., Wagner, W., Dorigo, W. (2019). Evolution of the ESA CCI Soil Moisture Climate Data Records and their underlying merging methodology. Earth System Science Data 11, 717-739, https://doi.org/10.5194/essd-11-717-2019 + + +Example plots +------------- + +.. figure:: /recipes/figures/autoassess_landsurface/Soilmoisture_Metrics.png + :scale: 50 % + :alt: Soilmoisture_Metrics.png + + Normalised metrics plot comparing a control and experiment simulation diff --git a/doc/sphinx/source/recipes/recipe_autoassess_landsurface_surfrad.rst b/doc/sphinx/source/recipes/recipe_autoassess_landsurface_surfrad.rst new file mode 100644 index 0000000000..2b862cbf47 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_autoassess_landsurface_surfrad.rst @@ -0,0 +1,182 @@ +.. _recipe_autoassess_landsurface_surfrad.rst: + +Land-surface Surface Radiation - Autoassess diagnostics +======================================================= + +Overview +-------- + +The simulation of surface radiation is central to all aspects of model +performance, and can often reveal compensating errors which are hidden within +top of atmosphere fluxes. This recipe provides metrics that evaluate the skill +of models' spatial and seasonal distribution of surface shortwave and longwave +radiation against the CERES EBAF satellite dataset. + +Performance metrics: + +* median absolute error (model minus observations) net surface shortwave (SW) radiation +* median absolute error (model minus observations) net surface longwave (LW) radiation + +Metrics are calculated using model and observation multi-year climatologies (seasonal means) +for meteorological seasons: +* December-January-February (djf) +* March-April-May (mam) +* June-July-August (jja) +* September-October-November (son) +* Annual mean (ann) + + +Plots: + +* Normalised assessment metrics plot comparing control and experiment + +The recipe takes as input a control model and experimental model, comparisons being made +with these two models. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + + * recipe_autoassess_landsurface_surfrad.yml + +Diagnostics are stored in esmvaltool/diag_scripts/autoassess/ + + * autoassess_area_base.py: wrapper for autoassess scripts + * land_surface_surfrad/surfrad.py: script to calculate surface radiation + metrics + * plot_autoassess_metrics.py: plot normalised assessment metrics + + +User settings in recipe +----------------------- + +#. Script autoassess_area_base.py + + *Required settings for script* + + * area: must equal land_surface_surfrad to select this diagnostic + * control_model: name of model to be used as control + * exp_model: name of model to be used as experiment + * start: date (YYYY/MM/DD) at which period begins (see note on time gating) + * end: date (YYYY/MM/DD) at which period ends (see note on time gating) + * climfiles_root: path to observation climatologies + + *Optional settings for script* + + * title: arbitrary string with name of diagnostic + * obs_models: unused for this recipe + + *Required settings for variables* + + none + + *Optional settings for variables* + + none + + +#. Script plot_autoassess_metrics.py + + *Required settings for script* + + * area: must equal land_surface_surfrad to select this diagnostic + * control_model: name of model to be used as control in metrics plot + * exp_model: name of model to be used as experiment in metrics plot + * title: string to use as plot title + + *Optional settings for script* + + none + + *Required settings for variables* + + none + + *Optional settings for variables* + + none + + +Variables +--------- + +* rsns (atmos, monthly mean, longitude latitude time) +* rlns (atmos, monthly mean, longitude latitude time) +* sftlf (mask, fixed, longitude latitude) + + +Observations and reformat scripts +--------------------------------- + +2001-2012 climatologies (seasonal means) from CERES-EBAF Ed2.7. + + +References +---------- +* Loeb, N. G., D. R. Doelling, H. Wang, W. Su, C. Nguyen, J. G. Corbett, L. Liang, C. Mitrescu, F. G. Rose, and S. Kato, 2018: Clouds and the Earth's Radiant Energy System (CERES) Energy Balanced and Filled (EBAF) Top-of-Atmosphere (TOA) Edition-4.0 Data Product. J. Climate, 31, 895-918, doi: 10.1175/JCLI-D-17-0208.1. + +* Kato, S., F. G. Rose, D. A. Rutan, T. E. Thorsen, N. G. Loeb, D. R. Doelling, X. Huang, W. L. Smith, W. Su, and S.-H. Ham, 2018: Surface irradiances of Edition 4.0 Clouds and the Earth's Radiant Energy System (CERES) Energy Balanced and Filled (EBAF) data product, J. Climate, 31, 4501-4527, doi: 10.1175/JCLI-D-17-0523.1 + + + +Example plots +------------- + +.. figure:: /recipes/figures/autoassess_landsurface/Surfrad_Metrics.png + :scale: 50 % + :alt: Surfrad_Metrics.png + + Normalised metrics plot comparing a control and experiment simulation + + + +Inputs and usage +---------------- +The ``landsurface_soilmoisture`` area metric is part of the ``esmvaltool/diag_scripts/autoassess`` diagnostics, +and, as any other ``autoassess`` metric, it uses the ``autoassess_area_base.py`` as general purpose +wrapper. This wrapper accepts a number of input arguments that are read through from the recipe. + +This recipe is part of the larger group of Autoassess metrics ported to ESMValTool +from the native Autoassess package from the UK's Met Office. The ``diagnostics`` settings +are almost the same as for the other Autoassess metrics. + +.. note:: + + **Time gating for autoassess metrics.** + + To preserve the native Autoassess functionalities, + data loading and selection on time is done somewhat + differently for ESMValTool's autoassess metrics: the + time selection is done in the preprocessor as per usual but + a further time selection is performed as part of the diagnostic. + For this purpose the user will specify a ``start:`` and ``end:`` + pair of arguments of ``scripts: autoassess_script`` (see below + for example). These are formatted as ``YYYY/MM/DD``; this is + necessary since the Autoassess metrics are computed from 1-Dec + through 1-Dec rather than 1-Jan through 1-Jan. This is a temporary + implementation to fully replicate the native Autoassess functionality + and a minor user inconvenience since they need to set an extra set of + ``start`` and ``end`` arguments in the diagnostic; this will be phased + when all the native Autoassess metrics have been ported to ESMValTool + review has completed. + + +An example of standard inputs as read by ``autoassess_area_base.py`` and passed +over to the diagnostic/metric is listed below. + + +.. code-block:: yaml + + scripts: + autoassess_landsurf_surfrad: &autoassess_landsurf_surfrad_settings + script: autoassess/autoassess_area_base.py + title: "Autoassess Land-Surface Diagnostic Surfrad Metric" + area: land_surface_surfrad + control_model: UKESM1-0-LL + exp_model: UKESM1-0-LL + obs_models: [CERES-EBAF] + obs_type: obs4MIPs + start: 1997/12/01 + end: 2002/12/01 diff --git a/doc/sphinx/source/recipes/recipe_autoassess_stratosphere.rst b/doc/sphinx/source/recipes/recipe_autoassess_stratosphere.rst new file mode 100644 index 0000000000..c2e6839dd9 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_autoassess_stratosphere.rst @@ -0,0 +1,260 @@ +.. _recipe_autoassess_stratosphere.rst: + +Stratosphere - Autoassess diagnostics +===================================== + +Overview +-------- + +Polar night jet / easterly jet strengths are defined as the maximum / minimum wind +speed of the climatological zonal mean jet, and measure how realistic the zonal +wind climatology is in the stratosphere. + +Extratropical temperature at 50hPa (area averaged poleward of 60 degrees) is important +for polar stratospheric cloud formation (in winter/spring), determining the amount of +heterogeneous ozone depletion simulated by models with interactive chemistry schemes. + +The Quasi-Biennial Oscillation (QBO) is a good measure of tropical variability in the +stratosphere. Zonal mean zonal wind at 30hPa is used to define the period and amplitude +of the QBO. + +The tropical tropopause cold point (100hPa, 10S-10N) temperature is an important factor in +determining the stratospheric water vapour concentrations at entry point (70hPa, 10S-10N), +and this in turn is important for the accurate simulation of stratospheric chemistry and +radiative balance. + + +Performance metrics: + +* Polar night jet: northern hem (January) vs. ERA Interim +* Polar night jet: southern hem (July) vs. ERA Interim +* Easterly jet: southern hem (January) vs. ERA Interim +* Easterly jet: northern hem (July) vs. ERA Interim +* 50 hPa temperature: 60N-90N (DJF) vs. ERA Interim +* 50 hPa temperature: 60N-90N (MAM) vs. ERA Interim +* 50 hPa temperature: 90S-60S (JJA) vs. ERA Interim +* 50 hPa temperature: 90S-60S (SON) vs. ERA Interim +* QBO period at 30 hPa vs. ERA Interim +* QBO amplitude at 30 hPa (westward) vs. ERA Interim +* QBO amplitude at 30 hPa (eastward) vs. ERA Interim +* 100 hPa equatorial temp (annual mean) vs. ERA Interim +* 100 hPa equatorial temp (annual cycle strength) vs. ERA Interim +* 70 hPa 10S-10N water vapour (annual mean) vs. ERA-Interim + +Diagnostic plot: + +* Age of stratospheric air vs. observations from Andrews et al. (2001) and Engel et al. (2009) + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + +* recipe_autoassess_stratosphere.yml + +Diagnostics are stored in esmvaltool/diag_scripts/autoassess/ + +* autoassess_area_base.py: wrapper for autoassess scripts +* stratosphere/strat_metrics_1.py: calculation of metrics +* stratosphere/age_of_air.py: calculate age of stratospheric air +* stratosphere/plotting.py: zonal mean wind and QBO plots +* plot_autoassess_metrics.py: plot normalised assessment metrics + + +User settings in recipe +----------------------- + +The ``stratosphere`` area metric is part of the ``esmvaltool/diag_scripts/autoassess`` diagnostics, +and, as any other ``autoassess`` metric, it uses the ``autoassess_area_base.py`` as general purpose +wrapper. This wrapper accepts a number of input arguments that are read through from the recipe. + +This recipe is part of the larger group of Autoassess metrics ported to ESMValTool +from the native Autoassess package from the UK's Met Office. The ``diagnostics`` settings +are almost the same as for the other Atoassess metrics. + +.. note:: + + **Time gating for autoassess metrics.** + + To preserve the native Autoassess functionalities, + data loading and selection on time is done somewhat + differently for ESMValTool's autoassess metrics: the + time selection is done in the preprocessor as per usual but + a further time selection is performed as part of the diagnostic. + For this purpose the user will specify a ``start:`` and ``end:`` + pair of arguments of ``scripts: autoassess_script`` (see below + for example). These are formatted as ``YYYY/MM/DD``; this is + necessary since the Autoassess metrics are computed from 1-Dec + through 1-Dec rather than 1-Jan through 1-Jan. This is a temporary + implementation to fully replicate the native Autoassess functionality + and a minor user inconvenience since they need to set an extra set of + ``start`` and ``end`` arguments in the diagnostic; this will be phased + when all the native Autoassess metrics have been ported to ESMValTool + review has completed. + +.. note:: + + **Polar Night/Easterly Jets Metrics** + + Polar Night Jets (PNJ) metrics require data available at very low air pressures + ie very high altitudes; both Olar Night Jet and Easterly Jets computations should + be preformed using ``ta`` and ``ua`` data at ``<< 100 Pa``; the lowest air pressure + found in atmospheric CMOR mip tables corresponds to ``plev39`` air pressure table, + and is used in the ``AERmonZ`` mip. If the user requires correct calculations of these + jets, it is highly advisable to use data from ``AERmonZ``. Note that standard QBO + calculation is exact for ``plev17`` or ``plev19`` tables. + +An example of standard inputs as read by ``autoassess_area_base.py`` and passed +over to the diagnostic/metric is listed below. + + +.. code-block:: yaml + + scripts: + autoassess_strato_test_1: &autoassess_strato_test_1_settings + script: autoassess/autoassess_area_base.py # the base wrapper + title: "Autoassess Stratosphere Diagnostic Metric" # title + area: stratosphere # assesment area + control_model: UKESM1-0-LL-hist # control dataset name + exp_model: UKESM1-0-LL-piCont # experiment dataset name + obs_models: [ERA-Interim] # list to hold models that are NOT for metrics but for obs operations + additional_metrics: [ERA-Interim] # list to hold additional datasets for metrics + start: 2004/12/01 # start date in native Autoassess format + end: 2014/12/01 # end date in native Autoassess format + + +Variables +--------- + +=========================== ================== ============== ============================================== +Variable/Field name realm frequency Comment +=========================== ================== ============== ============================================== +Eastward wind (ua) Atmosphere monthly mean original stash: x-wind, no stash +Air temperature (ta) Atmosphere monthly mean original stash: m01s30i204 +Specific humidity (hus) Atmosphere monthly mean original stash: m01s30i205 +=========================== ================== ============== ============================================== + +The recipe takes as input a control model and experimental model, comparisons being made +with these two CMIP models; additionally it can take observational data s input, in the +current implementation ERA-Interim. + + +Observations and reformat scripts +----------------------------------- + +ERA-Interim (ta, ua, hus - cmorizers/data/formatters/datasets/era_interim.py) + + +References +---------- +* Andrews, A. E., and Coauthors, 2001: Mean ages of stratospheric air derived from in situ observations of CO2, CH4, and N2O. J. Geophys. Res., 106 (D23), 32295-32314. +* Dee, D. P., and Coauthors, 2011: The ERA-Interim reanalysis: configuration and performance of the data assimilation system. Q. J. R. Meteorol. Soc, 137, 553-597, doi:10.1002/qj.828. +* Engel, A., and Coauthors, 2009: Age of stratospheric air unchanged within uncertainties over the past 30 years. Nat. Geosci., 2, 28-31, doi:10 .1038/NGEO388. + + +Example metrics and plots +------------------------- +Below is a set of metrics for UKESM1-0-LL (historical data); the table +shows a comparison made between running ESMValTool on CMIP6 CMORized +netCDF data freely available on ESGF nodes and the run made using native +Autoassess performed at the Met Office using the pp output of the model. + +=============================================== ================ ==================== +Metric name UKESM1-0-LL; UKESM1-0-LL; + CMIP6: AERmonZ; pp files; + historical, ESGF historical, u-bc179 +=============================================== ================ ==================== +Polar night jet: northern hem (January) 44.86 44.91 +Polar night jet: southern hem (July) 112.09 112.05 +Easterly jet: southern hem (January) 76.12 75.85 +Easterly jet: northern hem (July) 55.68 55.74 +QBO period at 30 hPa 41.50 41.00 +QBO amplitude at 30 hPa (westward) 27.39 27.39 +QBO amplitude at 30 hPa (eastward) 17.36 17.36 +50 hPa temperature: 60N-90N (DJF) 27.11 26.85 +50 hPa temperature: 60N-90N (MAM) 40.94 40.92 +50 hPa temperature: 90S-60S (JJA) 11.75 11.30 +50 hPa temperature: 90S-60S (SON) 23.88 23.63 +100 hPa equatorial temp (annual mean) 15.29 15.30 +100 hPa equatorial temp (annual cycle strength) 1.67 1.67 +100 hPa 10Sto10N temp (annual mean) 15.48 15.46 +100 hPa 10Sto10N temp (annual cycle strength) 1.62 1.62 +70 hPa 10Sto10N wv (annual mean) 5.75 5.75 +=============================================== ================ ==================== + +Results from ``u-bc179`` have been obtained by running the native Autoassess/stratosphere +on ``.pp`` data from UKESM1 ``u-bc179`` suite and are listed here to confirm the +compliance between the ported Autoassess metric in ESMValTool and the original native metric. + +Another reference run comparing UKESM1-0-LL to the physical model HadGEM3-GC31-LL can be found +`here `_ . + + +.. figure:: /recipes/figures/autoassess_stratosphere/metrics.png + :scale: 50 % + :alt: metrics.png + + Standard metrics plot comparing standard metrics from UKESM1-0-LL and HadGEM3-GC31. + + +.. figure:: /recipes/figures/autoassess_stratosphere/UKESM1-0-LL_u_jan.png + :scale: 50 % + :alt: UKESM1-0-LL_u_jan.png + + Zonal mean zonal wind in January for UKESM1-0-LL. + +.. figure:: /recipes/figures/autoassess_stratosphere/HadGEM3-GC31-LL_u_jan.png + :scale: 50 % + :alt: HadGEM3-GC31-LL_u_jan.png + + Zonal mean zonal wind in January for HadGEM3-GC31-LL. + +.. figure:: /recipes/figures/autoassess_stratosphere/UKESM1-0-LL_qbo.png + :scale: 50 % + :alt: UKESM1-0-LL_qbo.png + + QBO for UKESM1-0-LL. + +.. figure:: /recipes/figures/autoassess_stratosphere/HadGEM3-GC31-LL_qbo.png + :scale: 50 % + :alt: HadGEM3-GC31-LL_qbo.png + + QBO for HadGEM3-GC31-LL. + +.. figure:: /recipes/figures/autoassess_stratosphere/qbo_30hpa.png + :scale: 50 % + :alt: qbo_30hpa.png + + QBO at 30hPa comparison between UKESM1-0-LL and HadGEM3-GC31-LL. + +.. figure:: /recipes/figures/autoassess_stratosphere/teq_100hpa.png + :scale: 50 % + :alt: teq_100hpa.png + + Equatorial temperature at 100hPa, multi annual means. + + +Prior and current contributors +------------------------------ +Met Office: + +* Prior to May 2008: Neal Butchart +* May 2008 - May 2016: Steven C Hardiman +* Since May 2016: Alistair Sellar and Paul Earnshaw + +ESMValTool: + +* Since April 2018: Porting into ESMValTool by Valeriu Predoi + + +Developers +---------- +Met Office: + +* Prior to May 2008: Neal Butchart +* May 2008 - May 2016: Steven C Hardiman + +ESMValTool: + +* Since April 2018: Valeriu Predoi diff --git a/doc/sphinx/source/recipes/recipe_bock20jgr.rst b/doc/sphinx/source/recipes/recipe_bock20jgr.rst new file mode 100644 index 0000000000..cb311249f9 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_bock20jgr.rst @@ -0,0 +1,394 @@ +.. _recipes_bock20jgr: + +Quantifying progress across different CMIP phases +================================================= + +Overview +-------- + +The recipe recipe_bock20jgr.yml generates figures to quantify the progress across +different CMIP phases. + +.. note:: + The current recipe uses a horizontal 5x5 grid for figure 10, while the + original plot in the paper shows a 2x2 grid. This is solely done for + computational reasons (running the recipe with a 2x2 grid for figure 10 + takes considerably more time than running it with a 5x5 grid) and can be + easily changed in the preprocessor section of the recipe if necessary. + + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/bock20jgr + + * recipe_bock20jgr_fig_1-4.yml + * recipe_bock20jgr_fig_6-7.yml + * recipe_bock20jgr_fig_8-10.yml + +Diagnostics are stored in diag_scripts/ + + Fig. 1: + + * bock20jgr/tsline.ncl: timeseries of global mean surface temperature + anomalies + + Fig. 2: + + * bock20jgr/tsline_collect.ncl: collect different timeseries from + tsline.ncl to compare different models ensembles + + Fig. 3 and 4: + + * bock20jgr/model_bias.ncl: global maps of the multi-model mean and the + multi-model mean bias + + Fig. 6: + + * perfmetrics/main.ncl + * perfmetrics/collect.ncl + + Fig. 7: + + * bock20jgr/corr_pattern.ncl: calculate pattern correlation + * bock20jgr/corr_pattern_collect.ncl: create pattern correlation plot + + Fig. 8: + + * climate_metrics/ecs.py + * climate_metrics/create_barplot.py + + Fig. 9: + + * clouds/clouds_ipcc.ncl + + Fig. 10: + + * climate_metrics/feedback_parameters.py + + +User settings in recipe +----------------------- + +#. Script tsline.ncl + + *Required settings (scripts)* + + * styleset: as in diag_scripts/shared/plot/style.ncl functions + + *Optional settings (scripts)* + + * time_avg: type of time average (currently only "yearly" and "monthly" are + available). + * ts_anomaly: calculates anomalies with respect to the defined reference + period; for each grid point by removing the mean for the given + calendar month (requiring at least 50% of the data to be + non-missing) + * ref_start: start year of reference period for anomalies + * ref_end: end year of reference period for anomalies + * ref_value: if true, right panel with mean values is attached + * ref_mask: if true, model fields will be masked by reference fields + * region: name of domain + * plot_units: variable unit for plotting + * y_min: set min of y-axis + * y_max: set max of y-axis + * mean_nh_sh: if true, calculate first NH and SH mean + * volcanoes: if true, lines of main volcanic eruptions will be added + * header: if true, use region name as header + * write_stat: if true, write multi-model statistics to nc-file + + *Required settings (variables)* + + none + + * Optional settings (variables) + + none + +#. Script tsline_collect.ncl + + *Required settings (scripts)* + + * styleset: as in diag_scripts/shared/plot/style.ncl functions + + *Optional settings (scripts)* + + * time_avg: type of time average (currently only "yearly" and "monthly" are + available). + * ts_anomaly: calculates anomalies with respect to the defined period + * ref_start: start year of reference period for anomalies + * ref_end: end year of reference period for anomalies + * region: name of domain + * plot_units: variable unit for plotting + * y_min: set min of y-axis + * y_max: set max of y-axis + * order: order in which experiments should be plotted + * header: if true, region name as header + * stat_shading: if true: shading of statistic range + * ref_shading: if true: shading of reference period + + + *Required settings (variables)* + + none + + * Optional settings (variables) + + none + +#. Script model_bias.ncl + + *Required settings (scripts)* + + none + + *Optional settings (scripts)* + + * projection: map projection, e.g., Mollweide, Mercator + * timemean: time averaging, i.e. "seasonalclim" (DJF, MAM, JJA, SON), + "annualclim" (annual mean) + + * Required settings (variables)* + + * reference_dataset: name of reference dataset + + *Optional settings (variables)* + + * long_name: description of variable + + *Color tables* + + * variable "tas": diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_div.rgb, + * variable "pr-mmday": diag_scripts/shared/plots/rgb/ipcc-ar6_precipitation_seq.rgb + diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_div.rgb + +#. Script perfmetrics_main.ncl + + See :ref:`here`. + +#. Script perfmetrics_collect.ncl + + See :ref:`here`. + +#. Script corr_pattern.ncl + + *Required settings (scripts)* + + none + + *Optional settings (scripts)* + + * plot_median + + *Required settings (variables)* + + * reference_dataset + + *Optional settings (variables)* + + * alternative_dataset + +#. Script corr_pattern_collect.ncl + + *Required settings (scripts)* + + none + + *Optional settings (scripts)* + + * diag_order + + *Color tables* + + * diag_scripts/shared/plot/rgb/ipcc-ar6_line_03.rgb + +#. Script ecs.py + + See :ref:`here`. + +#. Script create_barplot.py + + See :ref:`here`. + +#. Script clouds_ipcc.ncl + + See :ref:`here`. + +#. Script feedback_parameters.py + + *Required settings (scripts)* + + none + + *Optional settings (scripts)* + + * calculate_mmm: *bool* (default: ``True``). Calculate multi-model means. + * only_consider_mmm: *bool* (default: ``False``). Only consider multi-model + mean dataset. This automatically sets ``calculate_mmm`` to ``True``. For + large multi-dimensional datasets, this might significantly reduce the + computation time if only the multi-model mean dataset is relevant. + * output_attributes: *dict*. Write additional attributes to netcdf files. + * seaborn_settings: *dict*. Options for :func:`seaborn.set_theme` (affects + all plots). + + +Variables +--------- + +* clt (atmos, monthly, longitude latitude time) +* hus (atmos, monthly, longitude latitude lev time) +* pr (atmos, monthly, longitude latitude time) +* psl (atmos, monthly, longitude latitude time) +* rlut (atmos, monthly, longitude latitude time) +* rsdt (atmos, monthly, longitude latitude time) +* rsut (atmos, monthly, longitude latitude time) +* rtmt (atmos, monthly, longitude latitude time) +* rlutcs (atmos, monthly, longitude latitude time) +* rsutcs (atmos, monthly, longitude latitude time) +* ta (atmos, monthly, longitude latitude lev time) +* tas (atmos, monthly, longitude latitude time) +* ts (atmos, monthly, longitude latitude time) +* ua (atmos, monthly, longitude latitude lev time) +* va (atmos, monthly, longitude latitude lev time) +* zg (atmos, monthly, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +* AIRS (obs4MIPs) - specific humidity + +* CERES-EBAF (obs4MIPs) - CERES TOA radiation fluxes (used for calculation of + cloud forcing) + +* ERA-Interim - reanalysis of surface temperature, sea surface pressure + + *Reformat script:* recipes/cmorizers/recipe_era5.yml + +* ERA5 - reanalysis of surface temperature + + *Reformat script:* recipes/cmorizers/recipe_era5.yml + +* ESACCI-CLOUD - total cloud cover + + *Reformat script:* cmorizers/data/formatters/datasets/esacci_cloud.ncl + +* ESACCI-SST - sea surface temperature + + *Reformat script:* cmorizers/data/formatters/datasets/esacci_sst.py + +* GHCN - Global Historical Climatology Network-Monthly gridded land precipitation + + *Reformat script:* cmorizers/data/formatters/datasets/ghcn.ncl + +* GPCP-SG (obs4MIPs) - Global Precipitation Climatology Project total + precipitation + +* HadCRUT4 - surface temperature anomalies + + *Reformat script:* cmorizers/data/formatters/datasets/hadcrut4.ncl + +* HadISST - surface temperature + + *Reformat script:* cmorizers/data/formatters/datasets/hadisst.ncl + +* JRA-55 (ana4mips) - reanalysis of sea surface pressure + +* NCEP-NCAR-R1 - reanalysis of surface temperature + + *Reformat script:* cmorizers/data/formatters/datasets/ncep_ncar_r1.py + +* PATMOS-x - total cloud cover + + *Reformat script:* cmorizers/data/formatters/datasets/patmos_x.ncl + + +References +---------- + +* Bock, L., Lauer, A., Schlund, M., Barreiro, M., Bellouin, N., Jones, C., + Predoi, V., Meehl, G., Roberts, M., and Eyring, V.: Quantifying progress + across different CMIP phases with the ESMValTool, Journal of Geophysical + Research: Atmospheres, 125, e2019JD032321. https://doi.org/10.1029/2019JD032321 + +* Copernicus Climate Change Service (C3S), 2017: ERA5: Fifth generation of + ECMWF atmospheric reanalyses of the global climate, edited, Copernicus + Climate Change Service Climate Data Store (CDS). + https://cds.climate.copernicus.eu/cdsapp#!/home + +* Flato, G., J. Marotzke, B. Abiodun, P. Braconnot, S.C. Chou, W. Collins, P. + Cox, F. Driouech, S. Emori, V. Eyring, C. Forest, P. Gleckler, E. Guilyardi, + C. Jakob, V. Kattsov, C. Reason and M. Rummukainen, 2013: Evaluation of + Climate Models. In: Climate Change 2013: The Physical Science Basis. + Contribution of Working Group I to the Fifth Assessment Report of the + Intergovernmental Panel on Climate Change [Stocker, T.F., D. Qin, G.-K. + Plattner, M. Tignor, S.K. Allen, J. Boschung, A. Nauels, Y. Xia, V. Bex and + P.M. Midgley (eds.)]. Cambridge University Press, Cambridge, United Kingdom + and New York, NY, USA. + +* Morice, C. P., Kennedy, J. J., Rayner, N. A., & Jones, P., 2012: Quantifying + uncertainties in global and regional temperature change using an ensemble of + observational estimates: The HadCRUT4 data set, Journal of Geophysical + Research, 117, D08101. https://doi.org/10.1029/2011JD017187 + + +Example plots +------------- + +.. _fig_bock20jgr_1: +.. figure:: /recipes/figures/bock20jgr/tas_Global_CMIP6_historical_anom_1850-2014.png + :align: center + + Observed and simulated time series of the anomalies in annual and global mean + surface temperature. All anomalies are differences from the 1850-1900 time + mean of each individual time series (Fig. 1). + +.. _fig_bock20jgr_2: +.. figure:: /recipes/figures/bock20jgr/tas_Global_multimodel_anom_1850-2017.png + :align: center + :width: 7cm + + Observed and simulated time series of the anomalies in annual + and global mean surface temperature as in Figure 1; all anomalies are + calculated by subtracting the 1850-1900 time mean from the time series. + Displayed are the multimodel means of all three CMIP ensembles with + shaded range of the respective standard deviation. In black the HadCRUT4 + data set (HadCRUT4; Morice et al., 2012). Gray shading shows the 5% to + 95% confidence interval of the combined effects of all the uncertainties + described in the HadCRUT4 error model (measurement and sampling, bias, + and coverage uncertainties) (Morice et al., 2012) (Fig. 2). + +.. _fig_bock20jgr_3: +.. figure:: /recipes/figures/bock20jgr/model_bias_tas_annual_CMIP6.png + :align: center + :width: 9cm + + Annual mean near‐surface (2 m) air temperature (°C). (a) Multimodel (ensemble) + mean constructed with one realization of CMIP6 historical experiments for the + period 1995-2014. Multimodel‐mean bias of (b) CMIP6 (1995-2014) compared to + the corresponding time period of the climatology from ERA5 + (Copernicus Climate Change Service (C3S), 2017). (Fig. 3) + +.. _fig_bock20jgr_4: +.. figure:: /recipes/figures/bock20jgr/ta850-global_to_swcre-global_RMSD.png + :align: center + :width: 9cm + + Relative space-time root-mean-square deviation (RMSD) calculated from the + climatological seasonal cycle of the CMIP3, CMIP5, and CMIP6 simulations + (1980-1999) compared to observational data sets (Table 5). A relative + performance is displayed, with blue shading being better and red shading + worse than the median RMSD of all model results of all ensembles. A diagonal + split of a grid square shows the relative error with respect to the reference + data set (lower right triangle) and the alternative data set (upper left + triangle) which are marked in Table 5. White boxes are used when data are not + available for a given model and variable (Fig. 6). + +.. _fig_bock20jgr_5: +.. figure:: /recipes/figures/bock20jgr/patterncor.png + :align: center + :width: 9cm + + Centered pattern correlations between models and observations for the annual + mean climatology over the period 1980–1999 (Fig. 7). diff --git a/doc/sphinx/source/recipes/recipe_capacity_factor.rst b/doc/sphinx/source/recipes/recipe_capacity_factor.rst index 9ad4b667a6..39b277e294 100644 --- a/doc/sphinx/source/recipes/recipe_capacity_factor.rst +++ b/doc/sphinx/source/recipes/recipe_capacity_factor.rst @@ -1,4 +1,4 @@ -.. _yml_capacity_factor: +.. _recipes_capacity_factor: Capacity factor of wind power: Ratio of average estimated power to theoretical maximum power ============================================================================================ @@ -6,14 +6,13 @@ Capacity factor of wind power: Ratio of average estimated power to theoretical m Overview -------- -The goal of this diagnostic is to compute the wind capacity factor, taking as input the daily instantaneous surface wind speed, which is then extrapolated to obtain the wind speed at a height of 100 m as described in Lledó (2017). +The goal of this diagnostic is to compute the wind capacity factor, taking as input the daily instantaneous surface wind speed, which is then extrapolated to obtain the wind speed at a height of 100 m as described in Lledó (2017). -The capacity factor is a normalized indicator of the suitability of wind speed conditions to produce electricity, irrespective of the size and number of installed turbines. This indicator is provided for three different classes of wind turbines (IEC, 2005) that are designed specifically for low, medium and high wind speed conditions. +The capacity factor is a normalized indicator of the suitability of wind speed conditions to produce electricity, irrespective of the size and number of installed turbines. This indicator is provided for three different classes of wind turbines (IEC, 2005) that are designed specifically for low, medium and high wind speed conditions. -The user can select the region, temporal range and season of interest. +The user can select the region, temporal range and season of interest. The output of the recipe is a netcdf file containing the capacity factor for each of the three turbine classes. -. Available recipes and diagnostics --------------------------------- @@ -25,7 +24,7 @@ Recipes are stored in recipes/ Diagnostics are stored in diag_scripts/magic_bsc/ * capacity_factor.R: calculates the capacity factor for the three turbine classes. -* PC.r: calculates the power curves for the three turbine classes. +* PC.R: calculates the power curves for the three turbine classes. User settings @@ -48,7 +47,18 @@ Variables Observations and reformat scripts --------------------------------- -*None* +Main features of the selected turbines: + +================= ================== ================ ================== ================= =================== +Turbine name Rotor diameter (m) Rated power (MW) Cut-in speed (m/s) Rated speed (m/s) Cut-out speed (m/s) + +----------------- ------------------ ---------------- ------------------ ----------------- ------------------- +Enercon E70 2.3MW 70 2.3 2.0 16.0 25.0 +Gamesa G80 2.0MW 80 2.0 4.0 17.0 25.0 +Gamesa G87 2.0MW 87 2.0 4.0 16.0 25.0 +Vestas V100 2.0MW 100 2.0 3.0 15.0 20.0 +Vestas V110 2.0MW 110 2.0 3.0 11.5 20.0 +================= ================== ================ ================== ================= =================== References ---------- @@ -61,7 +71,8 @@ Example plots ------------- .. _fig_capfactor1: -.. figure:: /recipes/figures/capacity_factor/capacity_factor_IPSL-CM5A-LR_1980-2005.png +.. figure:: /recipes/figures/capacity_factor/capacity_factor_IPSL-CM5A-MR_2021-2050.png :align: center :width: 14cm +Wind capacity factor for five turbines: Enercon E70 (top-left), Gamesa G80 (middle-top), Gamesa G87 (top-right), Vestas V100 (bottom-left) and Vestas V110 (middle-bottom) using the IPSL-CM5A-MR simulations for the r1p1i1 ensemble for the rcp8.5 scenario during the period 2021-2050. diff --git a/doc/sphinx/source/recipes/recipe_carvalhais14nat.rst b/doc/sphinx/source/recipes/recipe_carvalhais14nat.rst new file mode 100644 index 0000000000..b551bbbdc5 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_carvalhais14nat.rst @@ -0,0 +1,347 @@ +.. _recipe_carvalhais14nat: + +Turnover time of carbon over land ecosystems +============================================ + +Overview +-------- + +This recipe evaluates the turnover time of carbon over +land ecosystems (tau_ctotal) based on the analysis of +`Carvalhais et al. (2014)`_. In summary, it provides an overview on: + + * Comparisons of global distributions of tau_ctotal from all models against + observation and other models + * Variation of tau_ctotal across latitude (zonal distributions) + * Variation of association of tau_ctotal and climate across latitude + (zonal correlations) + * metrics of global tau_ctotal and correlations + + +.. _tau calculation: + +Calculation of turnover time +---------------------------- + +First, the total carbon content of land ecosystems is calculated as, + +.. math:: + + ctotal = cSoil + cVeg + +where :math:`cSoil` and :math:`cVeg` are the carbon contents in soil and +vegetation. **Note that this is not fully consistent with `Carvalhais et al. +(2014)`_, in which `ctotal` includes all carbon storages that respire to the +atmosphere. Due to inconsistency across models, it resulted in having different +carbon storage components in calculation of ctotal for different models**. + +The turnover time of carbon is then calculated as, + +.. math:: + + \tau_{ctotal} = \frac{ctotal}{gpp} + +where `ctotal` and `gpp` are temporal means of total carbon content and +gross primary productivity, respectively. **The equation +is valid for steady state, and is only applicable when both ctotal and gpp +are long-term averages.** Therefore, the recipe should always include the mean +operator of climate_statistics in preprocessor. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_carvalhais14nat.yml + + +Diagnostics are stored in diag_scripts/ + + * land_carbon_cycle/diag_global_turnover.py + * land_carbon_cycle/diag_zonal_turnover.py + * land_carbon_cycle/diag_zonal_correlation.py + + +User settings in recipe +----------------------- + +Observation-related details +............................ + +The settings needed for loading the observational dataset in all diagnostics +are provided in the recipe through `obs_info` within `obs_details` section. + + * ``obs_data_subdir``: subdirectory of auxiliary_data_dir (set in + configuration) where observation data are stored {e.g., + data_ESMValTool_Carvalhais2014}. + * ``source_label``: source data label {'Carvalhais2014'}. + * ``variant_label``: variant of the observation {'BE'} for best estimate. + * ``grid_label``: label denoting the spatial grid specification {'gn'}. + * ``frequency``: temporal frequency of the observation data {'fx'} + +The observation data file used in the recipe should be changed through the +fields above, as these are used to generate observation file name and +locations. For details, see :ref:`observations` section. + +Preprocessor +............ + + * ``climate_statistics``: {mean} - calculate the mean over full time period. + * ``regrid``: {nearest} - nearest neighbor regridding to the selected + observation resolution. + * ``mask_landsea``: {sea} - mask out all the data points from sea. + * ``multi_model_statistics``: {median} - calculate and include the + multimodel median. + + +Script land_carbon_cycle/diag_global_turnover.py +................................................ + + * Required settings: + + * ``obs_variable``: {``str``} list of the variable(s) to be read from the + observation files + + * Optional settings: + + * ``ax_fs``: {``float``, 7.1} - fontsize in the figure. + * ``fill_value``: {``float``, nan} - fill value to be used in analysis and + plotting. + * ``x0``: {``float``, 0.02} - X - coordinate of the left edge of the figure. + * ``y0``: {``float``, 1.0} Y - coordinate of the upper edge of the figure. + * ``wp``: {``float``, 1 / number of models} - width of each map. + * ``hp``: {``float``, = wp} - height of each map. + * ``xsp``: {``float``, 0} - spacing between maps in X - direction. + * ``ysp``: {``float``, -0.03} - spacing between maps in Y -direction. + Negative to reduce the spacing below default. + * ``aspect_map``: {``float``, 0.5} - aspect of the maps. + * ``xsp_sca``: {``float``, wp / 1.5} - spacing between the scatter plots in + X - direction. + * ``ysp_sca``: {``float``, hp / 1.5} - spacing between the scatter plots in + Y - direction. + * ``hcolo``: {``float``, 0.0123} - height (thickness for horizontal + orientation) of the colorbar . + * ``wcolo``: {``float``, 0.25} - width (length) of the colorbar. + * ``cb_off_y``: {``float``, 0.06158} - distance of colorbar from top of the + maps. + * ``x_colo_d``: {``float``, 0.02} - X - coordinate of the colorbar for maps + along the diagonal (left). + * ``x_colo_r``: {``float``, 0.76} - Y - coordinate of the colorbar for + ratio maps above the diagonal (right). + * ``y_colo_single``: {``float``, 0.1086} - Y-coordinate of the colorbar in + the maps per model (separate figures). + * ``correlation_method``: {``str``, spearman | pearson} - correlation + method to be used while calculating the correlation displayed in the + scatter plots. + * ``tx_y_corr``: {``float``, 1.075} - Y - coordinate of the inset text of + correlation. + * ``valrange_sc``: {``tuple``, (2, 256)} - range of turnover times in X - + and Y - axes of scatter plots. + * ``obs_global``: {``float``, 23} - global turnover time, provided as + additional info for map of the observation. For models, they are + calculated within the diagnostic. + * ``gpp_threshold``: {``float``, 0.01} - The threshold of gpp in + `kg m^{-2} yr^{-1}` below which the grid cells are masked. + + +Script land_carbon_cycle/diag_zonal_turnover.py +............................................... + + * Required settings: + + * ``obs_variable``: {``str``} list of the variable(s) to be read from the + observation files + + * Optional settings: + + * ``ax_fs``: {``float``, 7.1} - fontsize in the figure. + * ``fill_value``: {``float``, nan} - fill value to be used in analysis and + plotting. + * ``valrange_x``: {``tuple``, (2, 1000)} - range of turnover values in the + X - axis. + * ``valrange_y``: {``tuple``, (-70, 90)} - range of latitudes in the Y - + axis. + * ``bandsize``: {``float``, 9.5} - size of the latitudinal rolling window + in degrees. One latitude row if set to ``None``. + * ``gpp_threshold``: {``float``, 0.01} - The threshold of gpp in + `kg m^{-2} yr^{-1}` below which the grid cells are masked. + + +Script land_carbon_cycle/diag_zonal_correlation.py +.................................................. + + * Required settings: + + * ``obs_variable``: {``str``} list of the variable(s) to be read from the + observation files + + * Optional settings: + + * ``ax_fs``: {``float``, 7.1} - fontsize in the figure. + * ``fill_value``: {``float``, nan} - fill value to be used in analysis and + plotting. + * ``correlation_method``: {``str``, pearson | spearman} - correlation + method to be used while calculating the zonal correlation. + * ``min_points_frac: {``float``, 0.125} - minimum fraction of valid points + within the latitudinal band for calculation of correlation. + * ``valrange_x``: {``tuple``, (-1, 1)} - range of correlation values in the + X - axis. + * ``valrange_y``: {``tuple``, (-70, 90)} - range of latitudes in the Y - + axis. + * ``bandsize``: {``float``, 9.5} - size of the latitudinal rolling window + in degrees. One latitude row if set to ``None``. + * ``gpp_threshold``: {``float``, 0.01} - The threshold of gpp in + `kg m^{-2} yr^{-1}` below which the grid cells are masked. + + +Required Variables +------------------ + +* *tas* (atmos, monthly, longitude, latitude, time) +* *pr* (atmos, monthly, longitude, latitude, time) +* *gpp* (land, monthly, longitude, latitude, time) +* *cVeg* (land, monthly, longitude, latitude, time) +* *cSoil* (land, monthly, longitude, latitude, time) + +.. _observations: + +Observations +------------ + +The observations needed in the diagnostics are publicly available for download +from the `Data Portal of the Max Planck Institute for Biogeochemistry `_ after registration. + +Due to inherent dependence of the diagnostic on uncertainty estimates in +observation, the data needed for each diagnostic script are processed at +different spatial resolutions (as in Carvalhais et al., 2014), and provided in +11 different resolutions (see Table 1). Note that the uncertainties were +estimated at the resolution of the selected models, and, thus, only the +pre-processed observed data can be used with the recipe. +It is not possible to use regridding functionalities of ESMValTool to regrid +the observational data to other spatial resolutions, as the uncertainty +estimates cannot be regridded. + +Table 1. A summary of the observation datasets at different resolutions. + ++-------------+---------------+-------------+ +| Reference | target_grid | grid_label* | ++=============+===============+=============+ +| Observation | 0.5x0.5 | gn | ++-------------+---------------+-------------+ +| NorESM1-M | 2.5x1.875 | gr | ++-------------+---------------+-------------+ +| bcc-csm1-1 | 2.812x2.813 | gr1 | ++-------------+---------------+-------------+ +| CCSM4 | 1.25x0.937 | gr2 | ++-------------+---------------+-------------+ +| CanESM2 | 2.812x2.813 | gr3 | ++-------------+---------------+-------------+ +| GFDL-ESM2G | 2.5x2.0 | gr4 | ++-------------+---------------+-------------+ +| HadGEM2-ES | 1.875x1.241 | gr5 | ++-------------+---------------+-------------+ +| inmcm4 | 2.0x1.5 | gr6 | ++-------------+---------------+-------------+ +| IPSL-CM5A-MR| 2.5x1.259 | gr7 | ++-------------+---------------+-------------+ +| MIROC-ESM | 2.812x2.813 | gr8 | ++-------------+---------------+-------------+ +| MPI-ESM-LR | 1.875x1.875 | gr9 | ++-------------+---------------+-------------+ + +\* The grid_label is suffixed with z for data in zonal/latitude coordinates: +the zonal turnover and zonal correlation. + +**To change the spatial resolution of the evaluation, change {grid_label} in +obs_details and the corresponding {target_grid} in regrid preprocessor of the +recipe**. + + +At each spatial resolution, four data files are provided: + + * ``tau_ctotal_fx_Carvalhais2014_BE_gn.nc`` - global data of tau_ctotal + * ``tau_ctotal_fx_Carvalhais2014_BE_gnz.nc`` - zonal data of tau_ctotal + * ``r_tau_ctotal_tas_fx_Carvalhais2014_BE_gnz.nc`` - zonal correlation of + tau_ctotal and tas, controlled for pr + * ``r_tau_ctotal_pr_fx_Carvalhais2014_BE_gnz.nc`` - zonal correlation of + tau_ctotal + and pr, controlled for tas. + +The data is produced in obs4MIPs standards, and provided in netCDF4 format. +The filenames use the convention: + +``{variable}_{frequency}_{source_label}_{variant_label}_{grid_label}.nc`` + + * {variable}: variable name, set in every diagnostic script as obs_variable + * {frequency}: temporal frequency of data, set from obs_details + * {source_label}: observational source, set from obs_details + * {variant_label}: observation variant, set from obs_details + * {grid_label}: temporal frequency of data, set from obs_details + +Refer to the `Obs4MIPs Data Specifications`_ for details of the definitions above. + +All data variables have additional variables ({variable}_5 and {variable}_95) +in the same file. These variables are necessary for a successful execution of +the diagnostics. + +References +---------- + +* Carvalhais, N., et al. (2014), Global covariation of carbon turnover times + with climate in terrestrial ecosystems, Nature, 514(7521), 213-217, + doi: 10.1038/nature13731. + +.. _`Carvalhais et al. (2014)`: https://doi.org/10.1038/nature13731 + +.. _`Obs4MIPs Data Specifications`: + https://esgf-node.llnl.gov/site_media/projects/obs4mips/ODSv2p1.pdf + + +Example plots +------------- + +.. _fig_carvalhais14nat_1: +.. figure:: /recipes/figures/carvalhais14nat/r_tau_ctotal_climate_pearson_Carvalhais2014_gnz.png + :align: center + :width: 80% + + Comparison of latitudinal (zonal) variations of pearson correlation between + turnover time and climate: turnover time and precipitation, controlled for + temperature (left) and vice-versa (right). Reproduces figures 2c and 2d in + `Carvalhais et al. (2014)`_. + +.. _fig_carvalhais14nat_2: + +.. figure:: /recipes/figures/carvalhais14nat/global_matrix_map_ecosystem_carbon_turnover_time_Carvalhais2014_gn.png + :align: center + :width: 80% + + Comparison of observation-based and modelled ecosystem carbon turnover time. + Along the diagnonal, tau_ctotal are plotted, above the bias, and below + density plots. The inset text in density plots indicate the correlation. + +.. _fig_carvalhais14nat_3: + +.. figure:: /recipes/figures/carvalhais14nat/global_multimodelAgreement_ecosystem_carbon_turnover_time_Carvalhais2014_gn.png + :align: center + :width: 80% + + Global distributions of multimodel bias and model agreement. Multimodel bias + is calculated as the ratio of multimodel median turnover time and that from + observation. Stippling indicates the regions where only less than one + quarter of the models fall within the range of observational uncertainties + (`5^{th}` and `95^{th}` percentiles). Reproduces figure 3 in `Carvalhais et + al. (2014)`_. + +.. _fig_carvalhais14nat_4: + +.. figure:: /recipes/figures/carvalhais14nat/zonal_mean_ecosystem_carbon_turnover_time_Carvalhais2014_gnz.png + :align: center + :width: 80% + + Comparison of latitudinal (zonal) variations of observation-based and + modelled ecosystem carbon turnover time. The zonal turnover time is + calculated as the ratio of zonal `ctotal` and `gpp`. Reproduces figures 2a + and 2b in `Carvalhais et al. (2014)`_. diff --git a/doc/sphinx/source/recipes/recipe_climate_change_hotspot.rst b/doc/sphinx/source/recipes/recipe_climate_change_hotspot.rst new file mode 100644 index 0000000000..9e12140d42 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_climate_change_hotspot.rst @@ -0,0 +1,212 @@ +.. _recipe_climate_change_hotspot.rst: + +Climate Change Hotspot +====================== + +Overview +-------- + +In the context of a changing climate, it is found that +not all regions change at the same pace and the same +way. The regions that change at a faster rate than the rest +of the globe are labelled as climate change hotspots. Estimating +the location and magnitude of the hotspots is important +for climate change adaptation, and it is usually computed using +the projected climate variables' differences between the regional +and larger scales. + +One issue when trying to evaluate projections of climate change +is the vast amount of information available from the Coupled +Model Intercomparison Project (CMIP) exercises. Additionally, +results from the CMIP phases 5 and 6 can be quite different, +therefore a comparison between the two multi-model ensembles +can be made to evaluate their differences and similarities. To +account for the projections scenario uncertainty, data from +three different end-of-the-century radiative forcings is given +in the recipe. + +This recipe compares regional surface temperature and precipitation +against larger scale means to obtain the hotspot magnitudes for both +CMIP5 and CMIP6 in the 2.6, 4.5 and 8.5 $Wm^-2$ radiative forcings +by the year 2100 against the preindustrial Era +(RCP2.6, RCP4.5, RCP8.5 for CMIP5 and SSP1-2.6, SSP2-4.5, SSP5-8.5 for CMIP6). +Recipe based on the work by `Cos et al. (2022) `_. + +Note: This recipe is currently set to evaluate the Mediterranean +hotspot (with bounds start_longitude: -10, end_longitude: 40, start_latitude: 30, end_latitude: 45) but it can be set to any other rectangular region. + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + + * ``recipe_climate_change_hotspot.yml``: Loads and ensembles the data, + computes the necessary climate change hotspot diagnostics + and plots the results figures. + +Diagnostics are stored in esmvaltool/diag_scripts/cos22esd/ + + * ``climate_change_hotspot.py``: Calculates the regional field hotspot + for temperature and precipitation and the 10-year rolling mean + timeseries for regional and large-scale temperature and precipitation. + + * ``hotspot_plotter.py``: Gathers the data output from the + ``climate_change_hotspot.py`` script and plots the hotspot fields and + the rolling mean timeseries [Figures 2, 3, S2 and S4 by + `Cos et al. (2022) `_.]. + +User settings in the recipe +--------------------------- + +#. Script ``climate_change_hotspot.py`` + + *Required settings for script* + + * ``baseline_period``: Historical period that serves as a reference to compute the time anomalies. + + * ``future_periods``: List of the two future periods given in years ("YYYY-YYYY") where the hotspot will be computed. + Following the format [future period #1, future period #2]. + + * ``region``: list of longitudes and latitudes that enclose a rectangular region. + In the form of [start_longitude, end_longitude, start_latitude, end_latitude]. + + * ``region_name``: Name of the region to be included in the provenance record. + + +#. Script ``hotspot_plotter.py`` + + *Required settings for script* + + * ``baseline_period``: Historical period displayed in the figures' titles. + + * ``future_periods``: List of the two future periods given in years ("YYYY-YYYY"), following + the format [future period #1, future period #2], used to identify the + ancestor files and in the figure titles. + + * ``region``: List of longitudes and latitudes that enclose a region. + In the form of [start_longitude, end_longitude, start_latitude, end_latitude]. + Used in the title to identify the precipitation large-scale region. + + * ``region_name``: Name of the region used in the plot titles. + + +Modifying the datasets and scenarios used +----------------------------------------- + +``recipe_climate_change_hotspot.yml`` can be modified to use different scenario combinations. +The standard recipe uses data from scenarios with the radiative forcings 2.6, 4.5 and 8.5 `Wm^{-2}` (referred to as 26, 45 and 85), +but any combination of three scenarios from the following list can be used: + +.. code-block:: yaml + + 26: "RCP2.6/SSP1-2.6" + 45: "RCP4.5/SSP2-4.5" + 60: "RCP6.0/SSP4-6.0" + 85: "RCP8.5/SSP5-8.5" + +To specify which datasets are available for each scenario, lists of datasets can be attributed to a +specific CMIP project and scenario between the ``documentation`` +and ``preprocessor`` sections of the recipe as follows: + +.. code-block:: yaml + + cmip6_85: &cmip6_85 + - {...dataset keys...} + - {...dataset keys...} + cmip5_85: &cmip5_85 + - {...dataset keys...} + - {...dataset keys...} + cmip6_45: &cmip6_45 + - {...dataset keys...} + - {...dataset keys...} + cmip5_45: &cmip5_45 + - {...dataset keys...} + - {...dataset keys...} + +These different dataset sections will be called at each diagnostic as ``additional_datasets`` using the +anchors ``*cmip6_85``, ``*cmip5_85``, etc. as in the example: + +.. code-block:: yaml + + pr_cmip6_85: + variables: + pr: + mip: Amon + short_name: pr + preprocessor: ensemble_members + additional_datasets: *cmip6_85 + scripts: + pr_cmip6_85: + <<: *script_input + +In case of wanting to use other scenarios, the datasets and diagnostics must be +changed maintaining the format ``cmip{phase}_{scenario}`` and ``{variable}_cmip{phase}_{scenario}``. +For example, if we want scenario 60 instead of scenario 85, we would need to include the +files available for ``cmip6_60`` and ``cmip5_60``, and the previous diagnostic would change to: + +.. code-block:: yaml + + pr_cmip6_60: + variables: + pr: + mip: Amon + short_name: pr + preprocessor: ensemble_members + additional_datasets: *cmip6_60 + scripts: + pr_cmip6_60: + <<: *script_input + +Finally, if the datasets that need to be included in the multi-model means are common for all scenarios, +the datasets could be simplified to: + +.. code-block:: yaml + + cmip6: &cmip6 + - {...dataset keys...} + - {...dataset keys...} + cmip5: &cmip5 + - {...dataset keys...} + - {...dataset keys...} + +Note that the diagnostics' ``additional_datasets`` will need to be modified accordingly. + +Variables +--------- + +* tas (atmos, monthly mean, longitude latitude time) +* pr (atmos, monthly mean, longitude latitude time) + +References +---------- + +* `Cos et al. 2022 `_, Earth Syst. Dynam., 13, 321–340 + + +Example plots +------------- + +.. _fig_climate_change_hotspot_1: +.. figure:: /recipes/figures/cos22esd/tas_45.png + :align: center + +.. figure:: /recipes/figures/cos22esd/pr_45.png + :align: center + + Mediterranean region temperature (upper rows) and precipitation (lower rows) change differences against the mean global temperature + change and the mean 30–45º  N latitudinal belt precipitation change respectively. The changes for the periods 2041–2060 (first and third + row) and 2081–2100 (second and fourth row) are evaluated against 1986–2005 mean. The differences are shown for the CMIP5 (left) + and CMIP6 (right) DJF, JJA and annual mean projections (columns) under the high emission scenario RCP8.5 and SSP5-8.5 respectively. N + indicates the number of models included in the ensemble mean. + +.. _fig_climate_change_hotspot_2: +.. figure:: /recipes/figures/cos22esd/scenario_combination_tas-tas_jja.png + :align: center + + Mediterranean region warming against global warming for the summer + 2.6, 4.5 and 8.5 `Wm^{-2}` RCP and SSP scenarios + for the CMIP5 and CMIP6 ensemble means. + Each dot represents a 10-year mean change beginning from the period 1960-1969 (light colouring) + until 2091-2100 (opaque coloring). The changes are computed with 1986-2005 as the baseline. + An ordinary least squares linear regression is computed and the slope and $r$ values are shown. + N indicates the number of models included in the ensemble mean. diff --git a/doc/sphinx/source/recipes/recipe_climate_patterns.rst b/doc/sphinx/source/recipes/recipe_climate_patterns.rst new file mode 100644 index 0000000000..f7336c91c4 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_climate_patterns.rst @@ -0,0 +1,107 @@ +.. _recipes_climate_patterns: + +Generating Climate Patterns from CMIP6 Models +============================================= + +Overview +-------- + +The recipe recipe_climate_patterns generates climate patterns from CMIP6 model +datasets. + +.. note:: + The regrid setting in the recipe is set to a 2.5x3.75 grid. This is done to + match the current resolution in the IMOGEN-JULES model, but can be + adjusted with no issues for a finer/coarser patterns grid. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + +* recipe_climate_patterns.yml + +Diagnostics are stored in esmvaltool/diag_scripts/climate_patterns/ + +* climate_patterns.py: generates climate patterns from input datasets +* sub_functions.py: set of sub functions to assist with driving scripts +* plotting.py: contains all plotting functions for driving scripts + + +User settings in recipe +----------------------- + +#. Script climate_patterns.py + + *Required settings for script* + + None + + *Optional settings for script* + + * jules_mode: output jules-specific var names + .nc files + * parallelise: parallelise over models or not + * area: calculate the patterns globally, or over land only + + *Required settings for variables* + + * short_name + * additional_datasets + + *Optional settings for variables* + + None + + *Required settings for preprocessor* + + * monthly_statistics: converts data to mean monthly data + + *Optional settings for preprocessor* + + * regrid: regrids data + + +Variables +--------- + +#. Script climate_patterns.py + +* tasmax (atmos, monthly, longitude latitude time) +* tasmin (atmos, monthly, longitude latitude time) +* tas (atmos, monthly, longitude latitude time) +* huss (atmos, monthly, longitude latitude time) +* pr (atmos, monthly, longitude latitude time) +* sfcWind (atmos, monthly, longitude latitude time) +* ps (atmos, monthly, longitude latitude time) +* rsds (atmos, monthly, longitude latitude time) +* rlds (atmos, monthly, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +None + +References +---------- + +* Huntingford, C., Cox, P. An analogue model to derive additional climate + change scenarios from existing GCM simulations. + Climate Dynamics 16, 575–586 (2000). https://doi.org/10.1007/s003820000067 + +* Mathison, C. T. et al. A rapid application emissions-to-impacts tool + for scenario assessment: Probabilistic Regional Impacts from Model patterns + and Emissions (PRIME). + EGUsphere [preprint], (2024). https://doi.org/10.5194/egusphere-2023-2932 + +Example plots +------------- + +.. _fig_climate_patterns_2: +.. figure:: /recipes/figures/climate_patterns/patterns.png + :align: center + :width: 80% + + Patterns generated for CMIP6 models, gridded view. Patterns are shown per + variable, for the month of January. \ No newline at end of file diff --git a/doc/sphinx/source/recipes/recipe_climwip.rst b/doc/sphinx/source/recipes/recipe_climwip.rst new file mode 100644 index 0000000000..900698b85a --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_climwip.rst @@ -0,0 +1,332 @@ +.. _recipe_climwip: + +Climate model Weighting by Independence and Performance (ClimWIP) +================================================================= + +Overview +-------- + +Projections of future climate change are often based on multi-model +ensembles of global climate models such as CMIP6. To condense the +information from these models they are often combined into +probabilistic estimates such as mean and a related uncertainty range +(such as the standard deviation). However, not all models in a given +multi-model ensemble are always equally ‘fit for purpose’ and it can +make sense to weight models based on their ability to simulate +observed quantities related to the target. In addition, multi-model +ensembles, such as CMIP can contain several models based on a very +similar code-base (sharing of components, only differences in +resolution etc.) leading to complex inter-dependencies between the +models. Adjusting for this by weighting models according to their +independence helps to adjust for this. + + +This recipe implements the **Climate model Weighting by Independence and Performance +(ClimWIP)** method. It is based on work by `Knutti et al. (2017) `_, +`Lorenz et al. (2018) `_, +`Brunner et al. (2019) `_, +`Merrifield et al. (2020) `_, +`Brunner et al. (2020) `_. Weights are +calculated based on historical model performance in several metrics (which can be +defined by the ``performance_contributions`` parameter) as well as by their independence +to all the other models in the ensemble based on their output fields in several metrics +(which can be defined by the ``independence_contributions`` parameter). These weights +can be used in subsequent evaluation scripts (some of which are implemented as part of +this diagnostic). + +**Note**: this recipe is still being developed! A more comprehensive (yet older) +implementation can be found on GitHub: https://github.com/lukasbrunner/ClimWIP + + +Using shapefiles for cutting scientific regions +----------------------------------------------- + +To use shapefiles for selecting SREX or AR6 regions by name it is necessary to download them, e.g., +from the sources below and reference the file using the `shapefile` parameter. This can either be a +absolute or a relative path. In the example recipes they are stored in a subfolder `shapefiles` +in the :ref:`configuration option ` +``auxiliary_data_dir``. + +SREX regions (AR5 reference regions): http://www.ipcc-data.org/guidelines/pages/ar5_regions.html + +AR6 reference regions: https://github.com/SantanderMetGroup/ATLAS/tree/v1.6/reference-regions + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + + * ``recipe_climwip_test_basic.yml``: Basic sample recipe using only a few models + * ``recipe_climwip_test_performance_sigma.yml``: Advanced sample recipe for testing the perfect model test in particular + * ``recipe_climwip_brunner2019_med.yml``: Slightly modified results for one region from `Brunner et al. (2019) `_ (to change regions see below) + * ``recipe_climwip_brunner2020esd.yml``: Slightly modified results for `Brunner et al. (2020) `_ + +Diagnostics are stored in esmvaltool/diag_scripts/weighting/climwip/ + + * ``main.py``: Compute weights for each input dataset + * ``calibrate_sigmas.py``: Compute the sigma values on the fly + * ``core_functions.py``: A collection of core functions used by the scripts + * ``io_functions.py``: A collection of input/output functions used by the scripts + +Plot scripts are stored in esmvaltool/diag_scripts/weighting/ + + * ``weighted_temperature_graph.py``: Show the difference between weighted and non-weighted temperature anomalies as time series. + * ``weighted_temperature_map.py``: Show the difference between weighted and non-weighted temperature anomalies on a map. + * ``plot_utilities.py``: A collection of functions used by the plot scripts. + + +User settings in recipe +----------------------- + +1. Script ``main.py`` + + *Required settings for script* + * ``performance_sigma`` xor ``calibrate_performance_sigma``: If ``performance_contributions`` is given exactly one of the two + has to be given. Otherwise they can be skipped or not set. + + * ``performance_sigma``: float setting the shape parameter for the performance weights calculation (determined offline). + * ``calibrate_performance_sigma``: dictionary setting the performance sigma calibration. Has to contain at least the + key-value pair specifying ``target``: ``variable_group``. Optional parameters for adjusting the calibration are not + yet implemented. **Warning:** It is highly recommended to visually inspect the graphical output of the calibration to + check if everything worked as intended. In case the calibration fails, the best performance sigma will still be + indicated in the figure (see example :numref:`fig_climwip_5` below) but not automatically picked - the user can decide + to use it anyway by setting it in the recipe (not recommenced). + * ``independence_sigma``: float setting the shape parameter for the independence weights calculation (determined offline). + Can be skipped or not set if ``independence_contributions`` is skipped or not set. A on-the-fly calculation of the + independence sigma is not yet implemented + * ``performance_contributions``: dictionary where the keys represent the variable groups to be included in the performance + calculation. The values give the relative contribution of each group, with 0 being equivalent to not including the group. + Can be skipped or not set then weights will be based purely on model independence (this is mutually exclusive with + ``independence_contributions`` being skipped or not set). + * ``independence_contributions``: dictionary where the keys represent the variable groups to be included in the independence + calculation. The values give the relative contribution of each group, with 0 being equivalent to not including the group. + If skipped or not set weights will be based purely on model performance (this is mutually exclusive with + ``performance_contributions`` being skipped or not set). + * ``combine_ensemble_members``: set to true if ensemble members of the same model should be combined during the processing + (leads to identical weights for all ensemble members of the same model). Recommended if running with many (>10) ensemble + members per model. If set to false, the model independence weighting will still (partly) account for the (very high) + dependence between members of the same model. The success of this will depend on the case and the selected parameters. + See `Merrifield et al. (2020) `_ for an in-depth discussion. + * ``obs_data``: list of project names to specify which are the observational data. The rest is assumed to be model data. + + *Required settings for variables* + * This script takes multiple variables as input as long as they're available for all models + * ``start_year``: provide the period for which to compute performance and independence. + * ``end_year``: provide the period for which to compute performance and independence. + * ``mip``: typically Amon + * ``preprocessor``: e.g., climatological_mean + * ``additional_datasets``: this should be ``*obs_data`` and is only needed for variables used in ``performance_contributions``. + + *Required settings for preprocessor* + * Different combinations of preprocessor functions can be used, but the end result should always be aggregated over the time + dimension, i.e. the input for the diagnostic script should be 2d (lat/lon). + + *Optional settings for preprocessor* + * ``extract_region`` or ``extract_shape`` can be used to crop the input data. + * ``extract_season`` can be used to focus on a single season. + * different climate statistics can be used to calculate mean, (detrended) std_dev, or trend. + +2. Script ``weighted_temperature_graph.py`` + + *Required settings for script* + * ``ancestors``: must include weights from previous diagnostic + * ``weights``: the filename of the weights: 'weights.nc' + * ``settings``: a list of plot settings: ``start_year`` (integer), ``end_year`` (integer), ``central_estimate`` ('mean' or integer between 0 and 100 giving the percentile), ``lower_bound`` (integer between 0 and 100), ``upper_bound`` (integer between 0 and 100) + + *Required settings for variables* + * This script only takes temperature (tas) as input + * ``start_year``: provide the period for which to plot a temperature change graph. + * ``end_year``: provide the period for which to plot a temperature change graph. + * ``mip``: typically Amon + * ``preprocessor``: temperature_anomalies + + *Required settings for preprocessor* + * Different combinations of preprocessor functions can be used, but the end result should always be aggregated over the + latitude and longitude dimensions, i.e. the input for the diagnostic script should be 1d (time). + + *Optional settings for preprocessor* + * Can be a global mean or focus on a point, region or shape + * Anomalies can be calculated with respect to a custom reference period + * Monthly, annual or seasonal average/extraction can be used + +3. Script ``weighted_temperature_map.py`` + + *Required settings for script* + * ``ancestors``: must include weights from previous diagnostic + * ``weights``: the filename of the weights: 'weights_combined.nc' + + *Optional settings for script* + * ``model_aggregation``: how to aggregate the models: mean (default), median, integer between 0 and 100 representing a percentile + * ``xticks``: positions to draw xticks at + * ``yticks``: positions to draw yticks at + + *Required settings for variables* + * This script takes temperature (tas) as input + * ``start_year``: provide the period for which to plot a temperature change graph. + * ``end_year``: provide the period for which to plot a temperature change graph. + * ``mip``: typically Amon + * ``preprocessor``: temperature_anomalies + + *Optional settings for variables* + * A second variable is optional: temperature reference (tas_reference). If given, maps of temperature change to + the reference are drawn, otherwise absolute temperatures are drawn. + * tas_reference takes the same fields as tas + + +Updating the Brunner et al. (2019) recipe for new regions +--------------------------------------------------------- + +``recipe_climwip_brunner2019_med.yml`` demonstrates a very similar setup to `Brunner et al. (2019) `_ +but only for one region (the Mediterranean). To calculated weights for other regions the recipe needs to be updated in two places: + +.. code-block:: yaml + + extract_shape: + shapefile: shapefiles/srex.shp + decomposed: True + method: contains + crop: true + ids: + - 'South Europe/Mediterranean [MED:13]' + +The ``ids`` field takes any valid `SREX `_ region +key or any valid `AR6 `_ region key +(depending on the shapefile). Note that this needs to be the full string here (not the abbreviation). + +The sigma parameters need to be set according to the selected region. The sigma values for the regions +used in `Brunner et al. (2019) `_ can be found in table 1 of the paper. + +.. code-block:: yaml + + performance_sigma: 0.546 + independence_sigma: 0.643 + +**Warning:** if a new region is used the sigma values should be recalculated! This can be done by commenting +out the sigma values (lines above) and commenting in the blocks defining the target of the weighting: + +.. code-block:: yaml + + CLIM_future: + short_name: tas + start_year: 2081 + end_year: 2100 + mip: Amon + preprocessor: region_mean + +as well as + +.. code-block:: yaml + + calibrate_performance_sigma: + target: CLIM_future + +In this case ClimWIP will attempt to perform an on-the-fly perfect model test to estimate the lowest +performance sigma (strongest weighting) which does not lead to overconfident weighting. **Important:** +the user should always check the test output for unusual behaviour. For most cases the performance sigma +should lie around 0.5. In cases where the perfect model test fails (no appropriate performance sigma +can be found) the test will still produce graphical output before raising a ValueError. The user can then decide +to manually set the performance sigma to the most appropriate value (based on the output) - **this is +not recommended** and should only be done with care! The perfect model test failing can be a hint for +one of the following: (1) not enough models in the ensemble for a robust distribution (normally >20 +models should be used) or (2) the performance metrics used are not relevant for the target. + +An on-the-fly calibration for the independence sigma is not yet implemented. For most cases we recommend to +use the same setup as in `Brunner et al. (2020) `_ or +`Merrifield et al. (2020) `_ (global or hemispherical +temperature and sea level pressure climatologies as metrics and independence sigma values between 0.2 +and 0.5). + +**Warning:** if a new region or target is used the provided metrics to establish the weights +might no longer be appropriate. Using unrelated metrics with no correlation and/or physical +relation to the target will reduce the skill of the weighting and ultimately render it useless! In +such cases the perfect model test might fail. This means the performance metrics should be updated. + + +Brunner et al. (2020) recipe and example independence weighting +--------------------------------------------------------------- + +``recipe_climwip_brunner2020esd.yml`` implements the weighting used in `Brunner et al. (2020) `_. Compared to the paper there are minor differences due to two models which had to be excluded due to errors in the ESMValTool pre-processor: CAMS-CSM1-0 and MPI-ESM1-2-HR (r2) as well as the use of only one observational dataset (ERA5). + +The recipe uses an additional step between pre-processor and weight calculation to calculate anomalies relative to the global mean (e.g., tas_ANOM = tas_CLIM - global_mean(tas_CLIM)). This means we do not use the absolute temperatures of a model as performance criterion but rather the horizontal temperature distribution (see `Brunner et al. 2020 `_ for a discussion). + +This recipe also implements a somewhat general independence weighting for CMIP6. In contrast to model performance (which should be case specific) model independence can largely be seen as only dependent on the multi-model ensemble in use but not the target variable or region. This means that the configuration used should be valid for similar subsets of CMIP6 as used in this recipe: + + +.. code-block:: yaml + + combine_ensemble_members: true + independence_sigma: 0.54 + independence_contributions: + tas_CLIM_i: 1 + psl_CLIM_i: 1 + +Note that this approach weights ensemble members of the same model with a 1/N independence scaling (combine_ensemble_members: true) as well as different models with an output-based independence weighting. Different approaches to handle ensemble members are discussed in `Merrifield et al. (2020) `_. Note that, unlike for performance, climatologies are used for independence (i.e., the global mean is **not** removed for independence). **Warning:** Using only the independence weighting without any performance weighting might not always lead to meaningful results! The independence weighting is based on model output, which means that if a model is very different from all other models as well as the observations it will get a very high independence weight (and also total weight in absence of any performance weighting). This might not reflect the actual independence. It is therefore recommended to use weights based on both independence and performance for most cases. + + +Variables +--------- + +* pr (atmos, monthly mean, longitude latitude time) +* tas (atmos, monthly mean, longitude latitude time) +* psl (atmos, monthly mean, longitude latitude time) +* rsus, rsds, rlus, rlds, rsns, rlns (atmos, monthly mean, longitude latitude time) +* more variables can be added if available for all datasets. + + +Observations and reformat scripts +--------------------------------- + +Observation data is defined in a separate section in the recipe and may include +multiple datasets. + +References +---------- + +* `Brunner et al. (2020) `_, Earth Syst. Dynam., 11, 995-1012 +* `Merrifield et al. (2020) `_, Earth Syst. Dynam., 11, 807-834 +* `Brunner et al. (2019) `_, Environ. Res. Lett., 14, 124010 +* `Lorenz et al. (2018) `_, J. Geophys. Res.: Atmos., 9, 4509-4526 +* `Knutti et al. (2017) `_, Geophys. Res. Lett., 44, 1909-1918 + +Example plots +------------- + +.. _fig_climwip_1: +.. figure:: /recipes/figures/climwip/independence_tas.png + :align: center + + Distance matrix for temperature, providing the independence metric. + +.. _fig_climwip_2: +.. figure:: /recipes/figures/climwip/performance_pr.png + :align: center + + Distance of preciptation relative to observations, providing the performance metric. + +.. _fig_climwip_3: +.. figure:: /recipes/figures/climwip/weights_tas.png + :align: center + + Weights determined by combining independence and performance metrics for tas. + + .. _fig_climwip_4: +.. figure:: /recipes/figures/climwip/temperature_anomaly_graph.png + :align: center + + Interquartile range of temperature anomalies relative to 1981-2010, weighted versus non-weighted. + + .. _fig_climwip_5: +.. figure:: /recipes/figures/climwip/performance_sigma_calibration.png + :align: center + + Performance sigma calibration: The thick black line gives the reliability (c.f., weather forecast verification) which should + reach at least 80%. The thick grey line gives the mean change in spread between the unweighted and weighted 80% ranges as an + indication of the weighting strength (if it reaches 1, the weighting has no effect on uncertainty). The smallest sigma (i.e., + strongest weighting) which is not overconfident (reliability >= 80%) is selected. If the test fails (like in this example) the + smallest sigma which comes closest to 80% will be indicated in the legend (but NOT automatically selected). + + .. _fig_climwip_6: +.. figure:: /recipes/figures/climwip/temperature_change_weighted_map.png + :align: center + + Map of weighted mean temperature change 2081-2100 relative to 1995-2014 diff --git a/doc/sphinx/source/recipes/recipe_clouds.rst b/doc/sphinx/source/recipes/recipe_clouds.rst index ee4e1bc182..d4497a5d4f 100644 --- a/doc/sphinx/source/recipes/recipe_clouds.rst +++ b/doc/sphinx/source/recipes/recipe_clouds.rst @@ -1,48 +1,94 @@ +.. _recipes_clouds: + Clouds ====== Overview -------- -The recipe recipe_lauer13jclim.yml computes the climatology and interannual -variability of climate relevant cloud variables such as cloud radiative forcing -(CRE), liquid water path (lwp), cloud amount (clt), and total precipitation (pr) -reproducing some of the evaluation results of Lauer and Hamilton (2013). The -recipe includes a comparison of the geographical distribution of multi-year -average cloud parameters from individual models and the multi-model mean with -satellite observations. Taylor diagrams are generated that show the multi-year -annual or seasonal average performance of individual models and the multi-model -mean in reproducing satellite observations. The diagnostic also facilitates the -assessment of the bias of the multi-model mean and zonal averages of individual -models compared with satellite observations. Interannual variability is -estimated as the relative temporal standard deviation from multi-year timeseries -of data with the temporal standard deviations calculated from monthly anomalies -after subtracting the climatological mean seasonal cycle. +Four recipes are available to evaluate cloud climatologies from CMIP models. + +1) recipe_clouds_bias.yml computes climatologies and creates map plots of + multi-model mean, mean bias, absolute bias and relative bias of a given + variable. Similar to IPCC AR5 (ch. 9) fig. 9.2 a/b/c (`Flato et al., 2013`_). + +2) recipe_clouds_ipcc.yml computes multi-model mean bias and zonal means of + the clouds radiative effect (shortwave, longwave and net). Similar to + IPCC AR5 (ch. 9) fig. 9.5 (`Flato et al., 2013`_). + +3) Recipe recipe_lauer13jclim.yml computes the climatology and interannual + variability of climate relevant cloud variables such as cloud radiative forcing + (CRE), liquid water path (lwp), cloud amount (clt), and total precipitation (pr) + reproducing some of the evaluation results of `Lauer and Hamilton (2013)`_. The + recipe includes a comparison of the geographical distribution of multi-year + average cloud parameters from individual models and the multi-model mean with + satellite observations. Taylor diagrams are generated that show the multi-year + annual or seasonal average performance of individual models and the multi-model + mean in reproducing satellite observations. The diagnostic also facilitates the + assessment of the bias of the multi-model mean and zonal averages of individual + models compared with satellite observations. Interannual variability is + estimated as the relative temporal standard deviation from multi-year timeseries + of data with the temporal standard deviations calculated from monthly anomalies + after subtracting the climatological mean seasonal cycle. + Note that the satellite observations used in the original recipe (UWisc) is not + maintained anymore and has been superseeded by MAC-LWP (`Elsaesser et al., 2017`_). + We recommend using MAC-LWP. + +4) Recipe family recipe_lauer22jclim_*.yml is an extension of + recipe_lauer13jclim.yml for evaluation of cloud radiative forcing + (CRE), liquid water path (lwp), ice water path (clivi), total cloud amount (clt), + cloud liquid water content (clw), cloud ice water content (cli), cloud fraction + (cl) and water vapor path (prw) from CMIP6 models in comparison to CMIP5 results + and satellite observations. Wherever possible, the diagnostics use + multi-observational products as reference datasets. The recipe family + reproduces all figures from `Lauer et al. (2023)`_: maps of the geographical + distribution of multi-year averages, Taylor diagrams for multi-year annual + averages, temporal variability, seasonal cycle amplitude, cloud ice fraction + as a function of temperature, zonal means of 3-dim cloud liquid/ice content and + cloud fraction, matrices of cloud cover and total cloud water path as a function + of SST and 500 hPa vertical velocity, shortwave CRE and total cloud water path + binned by total cloud cover and pdfs of total cloud cover for selected regions. + +.. _`Flato et al., 2013`: https://www.ipcc.ch/site/assets/uploads/2018/02/WG1AR5_Chapter09_FINAL.pdf +.. _`Lauer and Hamilton (2013)`: https://journals.ametsoc.org/view/journals/clim/26/11/jcli-d-12-00451.1.xml +.. _`Lauer et al. (2023)`: https://journals.ametsoc.org/view/journals/clim/36/2/JCLI-D-22-0181.1.xml +.. _`Elsaesser et al., 2017`: https://journals.ametsoc.org/view/journals/clim/30/24/jcli-d-16-0902.1.xml Available recipes and diagnostics --------------------------------- -Recipes are stored in recipes/ +Recipes are stored in recipes/clouds - * recipe_lauer13jclim.yml +* recipe_clouds_bias.yml +* recipe_clouds_ipcc.yml +* recipe_lauer13jclim.yml +* recipe_lauer22jclim_*.yml (* = fig1_clim_amip, fig1_clim, fig2_taylor_amip, + fig2_taylor, fig3-4_zonal, fig5_lifrac, fig6_interannual, fig7_seas, + fig8_dyn, fig9-11ab_scatter, fig9-11c_pdf) Diagnostics are stored in diag_scripts/clouds/ - * clouds.ncl: global maps of (multi-year) annual means including multi-model - mean - * clouds_bias.ncl: global maps of the multi-model mean and the multi-model - mean bias - * clouds_interannual: global maps of the interannual variability - * clouds_isccp: global maps of multi-model mean minus observations + zonal - averages of individual models, multi-model mean and observations - * clouds_taylor.ncl: taylor diagrams +* clouds.ncl: global maps of (multi-year) annual means including multi-model mean +* clouds_bias.ncl: global maps of the multi-model mean and the multi-model mean bias +* clouds_dyn_matrix.ncl: cloud properties by dynamical regime (SST, omega500) +* clouds_interannual.ncl: global maps of the interannual variability +* clouds_ipcc.ncl: global maps of multi-model mean minus observations + zonal + averages of individual models, multi-model mean and observations +* clouds_lifrac_scatter.ncl: cloud liquid water fraction as a function of temperature +* clouds_lifrac_scatter_postproc.ncl: additional plots and diagnostics using + the output of clouds_lifrac_scatter.ncl for given CMIP5/CMIP6 model pairs +* clouds_pdf.ncl: pdf of cloud parameters +* clouds_seasonal_cycle.ncl: seasonal cycle amplitude +* clouds_taylor.ncl: Taylor diagrams as in `Lauer and Hamilton (2013)`_ +* clouds_taylor_double.ncl: Taylor diagrams as in `Lauer et al. (2023)`_ +* clouds_zonal.ncl: zonal means of 3-dim variables User settings in recipe ----------------------- -#. Script clouds.ncl +1. Script clouds.ncl *Required settings (scripts)* @@ -54,12 +100,21 @@ User settings in recipe * explicit_cn_levels: explicit contour levels (array) * extralegend: plot legend(s) to extra file(s) * filename_add: optionally add this string to plot filesnames + * multiobs_exclude: list of *observational* datasets to be excluded when + calculating uncertainty estimates from multiple observational datasets + (see also multiobs_uncertainty) + * multiobs_uncertainty: calculate uncertainty estimates from multiple + observational datasets (true, false); by default, all "obs", "obs6", + "obs4mips" and "native6" datasets are used; any of such datasets can be + explicitly excluded when also specifying "multiobs_exclude" * panel_labels: label individual panels (true, false) * PanelTop: manual override for "@gnsPanelTop" used by panel plot(s) * projection: map projection for plotting (default = "CylindricalEquidistant") * showdiff: calculate and plot differences model - reference (default = false) + * showyears: add start and end years to the plot titles + (default = false) * rel_diff: if showdiff = true, then plot relative differences (%) (default = False) * ref_diff_min: lower cutoff value in case of calculating relative @@ -70,12 +125,14 @@ User settings in recipe "annual" = annual mean * treat_var_as_error: treat variable as error when averaging (true, false); true: avg = sqrt(mean(var*var)), false: avg = mean(var) + * var: short_name of variable to process (default = "" - use first + variable in variable list) *Required settings (variables)* none - * Optional settings (variables) + *Optional settings (variables)* * long_name: variable description * reference_dataset: reference dataset; REQUIRED when calculating @@ -86,7 +143,7 @@ User settings in recipe * variable "lwp": diag_scripts/shared/plot/rgb/qcm3.rgb -#. Script clouds_bias.ncl +2. Script clouds_bias.ncl *Required settings (scripts)* @@ -115,7 +172,38 @@ User settings in recipe * variable "pr-mmday": diag_scripts/shared/plots/rgb/ipcc-precip.rgb, diag_scripts/shared/plot/rgb/ipcc-precip-delta.rgb -#. Script clouds_interannual.ncl +3. Script clouds_dyn_matrix.ncl + + *Required settings (scripts)* + + * var_x: short name of variable on x-axis + * var_y: short name of variable on y-axis + * var_z: short name of variable to be binned + * xmin: min x value for generating x bins + * xmax: max x value for generating x bins + * ymin: min y value for generating y bins + * ymax: max y value for generating y bins + + *Optional settings (scripts)* + + * clevels: explicit values for probability labelbar (array) + * filename_add: optionally add this string to plot filesnames + * nbins: number of equally spaced bins (var_x), default = 100 + * sidepanels: show/hide side panels (default = False) + * xlabel: label overriding variable name for x-axis (e.g. SST) + * ylabel: label overriding variable name for y-axis (e.g. omega500) + * zdmin: min z value for labelbar (difference plots) + * zdmax: max z value for labelbar (difference plots) + * zmin: min z value for labelbar + * zmax: max z value for labelbar + + *Required settings (variables)* + + *Optional settings (variables)* + + * reference_dataset: reference dataset + +4. Script clouds_interannual.ncl *Required settings (scripts)* @@ -124,10 +212,12 @@ User settings in recipe *Optional settings (scripts)* * colormap: e.g., WhiteBlueGreenYellowRed, rainbow + * epsilon: "epsilon" value to be replaced with missing values * explicit_cn_levels: use these contour levels for plotting - * extrafiles: write plots for individual models to separate files - (true, false) + * filename_add: optionally add this string to plot filesnames * projection: map projection, e.g., Mollweide, Mercator + * var: short_name of variable to process (default = "" - use first + variable in variable list) *Required settings (variables)* @@ -138,11 +228,9 @@ User settings in recipe * long_name: description of variable * reference_dataset: name of reference datatset - *Color tables* - - * variable "lwp": diag_scripts/shared/plots/rgb/qcm3.rgb +.. _clouds_ipcc.ncl: -#. Script clouds_ipcc.ncl +5. Script clouds_ipcc.ncl *Required settings (scripts)* @@ -151,6 +239,7 @@ User settings in recipe *Optional settings (scripts)* * explicit_cn_levels: contour levels + * highlight_dataset: name of dataset to highlight (default = "MultiModelMean") * mask_ts_sea_ice: true = mask T < 272 K as sea ice (only for variable "ts"); false = no additional grid cells masked for variable "ts" * projection: map projection, e.g., Mollweide, Mercator @@ -173,7 +262,7 @@ User settings in recipe * variables "pr", "pr-mmday": diag_scripts/shared/plot/rgb/ipcc-precip-delta.rgb -#. Script clouds_taylor.ncl +6. Script clouds_lifrac_scatter.ncl *Required settings (scripts)* @@ -181,63 +270,240 @@ User settings in recipe *Optional settings (scripts)* - * embracelegend: false (default) = include legend in plot, max. 2 columns - with dataset names in legend; true = write extra file with legend, max. 7 - dataset names per column in legend, alternative observational dataset(s) - will be plotted as a red star and labeled "altern. ref. dataset" in legend - (only if dataset is of class "OBS") - * estimate_obs_uncertainty: true = estimate observational uncertainties - from mean values (assuming fractions of obs. RMSE from documentation of - the obs data); only available for "CERES-EBAF", "MODIS", "MODIS-L3"; - false = do not estimate obs. uncertainties from mean values - * filename_add: legacy feature: arbitrary string to be added to all - filenames of plots and netcdf output produced (default = "") - * mask_ts_sea_ice: true = mask T < 272 K as sea ice (only for variable "ts"); - false = no additional grid cells masked for variable "ts" - * styleset: "CMIP5", "DEFAULT" (if not set, clouds_taylor.ncl will create a - color table and symbols for plotting) - * timemean: time averaging; annualclim (default) = 1 plot annual mean; - seasonalclim = 4 plots (DJF, MAM, JJA, SON) - * valid_fraction: used for creating sea ice mask (mask_ts_sea_ice = true): - fraction of valid time steps required to mask grid cell as valid data + * filename_add: optionally add this string to plot filesnames + * min_mass: minimum cloud condensate (same units as clw, cli) + * mm_mean_median: calculate multi-model mean and meadian + * nbins: number of equally spaced bins (ta (x-axis)), default = 20 + * panel_labels: label individual panels (true, false) + * PanelTop: manual override for "@gnsPanelTop" used by panel plot(s)s *Required settings (variables)* - * reference_dataset: name of reference data set + *Optional settings (variables)* + + * reference_dataset: reference dataset + +7. Script clouds_lifrac_scatter_postproc.ncl + + *Required settings (scripts)* + + * models: array of CMIP5/CMIP6 model pairs to be compared + * refname: name of reference dataset + + *Optional settings (scripts)* + + * nbins: number of bins used by clouds_lifrac_scatter.ncl (default = 20) + * reg: region (string) (default = "") + * t_int: array of temperatures for printing additional diagnostics + + *Required settings (variables)* + + none + + *Optional settings (variables)* + + none + +8. Script clouds_pdf.ncl + + *Required settings (scripts)* + + * xmin: min value for bins (x axis) + * xmax: max value for bins (y axis) + + *Optional settings (scripts)* + + * filename_add: optionally add this string to output filenames + * plot_average: show average frequency per bin + * region: show only selected geographic region given as latmin, latmax, + lonmin, lonmax + * styleset: "CMIP5", "DEFAULT" + * ymin: min value for frequencies (%) (y axis) + * ymax: max value for frequencies (%) (y axis) + + *Required settings (variables)* *Optional settings (variables)* + * reference_dataset: reference dataset + +9. Script clouds_seasonal_cycle.ncl + + *Required settings (scripts)* + none + *Optional settings (scripts)* + + * colormap: e.g., WhiteBlueGreenYellowRed, rainbow + * epsilon: "epsilon" value to be replaced with missing values + * explicit_cn_levels: use these contour levels for plotting + * filename_add: optionally add this string to plot filesnames + * projection: map projection, e.g., Mollweide, Mercator + * showyears: add start and end years to the plot titles + (default = false) + * var: short_name of variable to process (default = "" i.e. use + first variable in variable list) + + *Required settings (variables)* + + *Optional settings (variables)* + + * long_name: description of variable + * reference_dataset: name of reference dataset + +10. Script clouds_taylor.ncl + + *Required settings (scripts)* + + none + + *Optional settings (scripts)* + + * embracelegend: false (default) = include legend in plot, max. 2 columns + with dataset names in legend; true = write extra file with legend, max. 7 + dataset names per column in legend, alternative observational dataset(s) + will be plotted as a red star and labeled "altern. ref. dataset" in legend + (only if dataset is of class "OBS") + * estimate_obs_uncertainty: true = estimate observational uncertainties + from mean values (assuming fractions of obs. RMSE from documentation of + the obs data); only available for "CERES-EBAF", "MODIS", "MODIS-L3"; + false = do not estimate obs. uncertainties from mean values + * filename_add: legacy feature: arbitrary string to be added to all + filenames of plots and netcdf output produced (default = "") + * legend_filter: do not show individual datasets in legend that are of + project "legend_filter" (default = "") + * mask_ts_sea_ice: true = mask T < 272 K as sea ice (only for variable "ts"); + false = no additional grid cells masked for variable "ts" + * multiobs_exclude: list of *observational* datasets to be excluded when + calculating uncertainty estimates from multiple observational datasets + (see also multiobs_uncertainty) + * multiobs_uncertainty: calculate uncertainty estimates from multiple + observational datasets (true, false); by default, all "obs", "obs6", + "obs4mips" and "native6" datasets are used; any of such datasets can be + explicitly excluded when also specifying "multiobs_exclude" + * styleset: "CMIP5", "DEFAULT" (if not set, clouds_taylor.ncl will create a + color table and symbols for plotting) + * timemean: time averaging; annualclim (default) = 1 plot annual mean; + seasonalclim = 4 plots (DJF, MAM, JJA, SON) + * valid_fraction: used for creating sea ice mask (mask_ts_sea_ice = true): + fraction of valid time steps required to mask grid cell as valid data + * var: short_name of variable to process (default = "" - use first variable + in variable list) + + *Required settings (variables)* + + * reference_dataset: name of reference data set + + *Optional settings (variables)* + + none + +11. Script clouds_taylor_double.ncl + + *Required settings (scripts)* + + none + + *Optional settings (scripts)* + + * filename_add: legacy feature: arbitrary string to be added to all + filenames of plots and netcdf output produced (default = "") + * multiobs_exclude: list of *observational* datasets to be excluded when + calculating uncertainty estimates from multiple observational datasets + (see also multiobs_uncertainty) + * multiobs_uncertainty: calculate uncertainty estimates from multiple + observational datasets (true, false); by default, all "obs", "obs6", + "obs4mips" and "native6" datasets are used; any of such datasets can be + explicitely excluded when also specifying "multiobs_exclude" + * projectcolors: colors for each projectgroups + (e.g. (/"(/0.0, 0.0, 1.0/)", "(/1.0, 0.0, 0.0/)"/) + * projectgroups: calculated mmm per "projectgroup" + (e.g. (/"cmip5", "cmip6")/) + * styleset: "CMIP5", "DEFAULT" (if not set, CLOUDS_TAYLOR_DOUBLE will + create a color table and symbols for plotting) + * timemean: time averaging; annualclim (default) = 1 plot annual mean, + seasonalclim = 4 plots (DJF, MAM, JJA, SON) + * var: short_name of variable to process (default = "" - use first variable + in variable list) + + *Required settings (variables)* + + * reference_dataset: name of reference data set + + *Optional settings (variables)* + +12. Script clouds_zonal.ncl + + *Required settings (scripts)* + + none + + *Optional settings (scripts)* + + * embracesetup: True = 2 plots per line, False = 4 plots per line (default) + * explicit_cn_levels: explicit contour levels for mean values (array) + * explicit_cn_dlevels: explicit contour levels for differences (array) + * extralegend: plot legend(s) to extra file(s) + * filename_add: optionally add this string to plot filesnames + * panel_labels: label individual panels (true, false) + * PanelTop: manual override for "@gnsPanelTop" used by panel plot(s) + * showdiff: calculate and plot differences (default = False) + * rel_diff: if showdiff = True, then plot relative differences (%) (default = False) + * rel_diff_min: lower cutoff value in case of calculating relative differences + (in units of input variable) + * t_test: perform t-test when calculating differences (default = False) + * timemean: time averaging - "seasonal" = DJF, MAM, JJA, SON), "annual" = annual mean + * units_to: target units (automatic conversion) + + *Required settings (variables)* + + none + + *Optional settings (variables)* + + * long_name: variable description + * reference_dataset: reference dataset; REQUIRED when calculating differences (showdiff = True) + * units: variable units (for labeling plot only) + Variables --------- +* cl (atmos, monthly mean, longitude latitude time) +* clcalipso (atmos, monthly mean, longitude latitude time) +* cli (atmos, monthly mean, longitude latitude time) +* clw (atmos, monthly mean, longitude latitude time) * clwvi (atmos, monthly mean, longitude latitude time) * clivi (atmos, monthly mean, longitude latitude time) * clt (atmos, monthly mean, longitude latitude time) * pr (atmos, monthly mean, longitude latitude time) +* prw (atmos, monthly mean, longitude latitude time) * rlut, rlutcs (atmos, monthly mean, longitude latitude time) * rsut, rsutcs (atmos, monthly mean, longitude latitude time) +* ta (atmos, monthly mean, longitude latitude time) +* wap (atmos, monthly mean, longitude latitude time) -Observations and reformat scripts ---------------------------------- - -*Note: (1) obs4mips data can be used directly without any preprocessing; -(2) see headers of reformat scripts for non-obs4mips data for download -instructions.* +Observations/realanyses +----------------------- -* CERES-EBAF (obs4mips) - CERES TOA radiation fluxes (used for calculation of - cloud forcing) -* GPCP-SG (obs4mips) - Global Precipitation Climatology Project total - precipitation -* MODIS (obs4mips) - MODIS total cloud fraction -* UWisc - University of Wisconsin-Madison liquid water path climatology, based - on satellite observbations from TMI, SSM/I, and AMSR-E, reference: O'Dell et - al. (2008), J. Clim. +* CALIPSO-GOCCP +* CALIPSO-ICECLOUD +* CERES-EBAF +* CLARA-AVHRR +* CLOUDSAT-L2 +* ERA5 +* ERA-Interim +* ESACCI-CLOUD +* ESACCI-WATERVAPOUR +* GPCP-SG +* ISCCP-FH +* MAC-LWP +* MODIS +* PATMOS-x +* UWisc - *Reformat script:* reformat_scripts/obs/reformat_obs_UWisc.ncl References ---------- @@ -256,62 +522,104 @@ References models: A comparison of CMIP5 results with CMIP3 and satellite data, J. Clim., 26, 3823-3845, doi: 10.1175/JCLI-D-12-00451.1. -* O’Dell, C.W., F.J. Wentz, and R. Bennartz (2008), Cloud liquid water path - from satellite-based passive microwave observations: A new climatology over - the global oceans, J. Clim., 21, 1721-1739, doi:10.1175/2007JCLI1958.1. - -* Pincus, R., S. Platnick, S.A. Ackerman, R.S. Hemler, Robert J. Patrick - Hofmann (2012), Reconciling simulated and observed views of clouds: MODIS, - ISCCP, and the limits of instrument simulators. J. Climate, 25, 4699-4720, - doi: 10.1175/JCLI-D-11-00267.1. +* Lauer, A., L. Bock, B. Hassler, M. Schröder, and M. Stengel, Cloud climatologies + from global climate models - a comparison of CMIP5 and CMIP6 models with satellite + data, J. Climate, 36(2), doi: 10.1175/JCLI-D-22-0181.1, 2023. Example plots ------------- .. _fig_cloud_1: -.. figure:: /recipes/figures/clouds/liq_h2o_path_multi.png +.. figure:: /recipes/figures/clouds/clouds_lwp_annual.png :align: center The 20-yr average LWP (1986-2005) from the CMIP5 historical model runs and the multi-model mean in comparison with the UWisc satellite climatology - (1988-2007) based on SSM/I, TMI, and AMSR-E (O'Dell et al. 2008). + (1988-2007) based on SSM/I, TMI, and AMSR-E (O'Dell et al. 2008). Produced + with recipe_lauer13jclim.yml (diagnostic script clouds.ncl). .. _fig_cloud_2: -.. figure:: /recipes/figures/clouds/liq_h2o_taylor.png +.. figure:: /recipes/figures/clouds/clouds_taylor_clt_annual.png :align: center :width: 7cm Taylor diagram showing the 20-yr annual average performance of CMIP5 models - for total cloud fraction as compared to MODIS satellite observations. + for total cloud fraction as compared to MODIS satellite observations. Produced + with recipe_lauer13jclim.yml (diagnostic script clouds_taylor.ncl). .. _fig_cloud_3: -.. figure:: /recipes/figures/clouds/cloud_sweffect.png +.. figure:: /recipes/figures/clouds/clouds_ipcc_swcre_annual.png :align: center :width: 9cm -.. figure:: /recipes/figures/clouds/cloud_lweffect.png +.. figure:: /recipes/figures/clouds/clouds_ipcc_lwcre_annual.png :align: center :width: 9cm -.. figure:: /recipes/figures/clouds/cloud_neteffect.png +.. figure:: /recipes/figures/clouds/clouds_ipcc_netcre_annual.png :align: center :width: 9cm 20-year average (1986-2005) annual mean cloud radiative effects of CMIP5 - models against the CERES EBAF (2001–2012). Top row shows the shortwave + models against the CERES-EBAF (2001–2012). Top row shows the shortwave effect; middle row the longwave effect, and bottom row the net effect. - Multi-model mean biases against CERES EBAF are shown on the left, whereas the - right panels show zonal averages from CERES EBAF (thick black), the + Multi-model mean biases against CERES-EBAF are shown on the left, whereas the + right panels show zonal averages from CERES-EBAF (thick black), the individual CMIP5 models (thin gray lines) and the multi-model mean (thick - red line). Similar to Figure 9.5 of Flato et al. (2013). + red line). Similar to Figure 9.5 of `Flato et al., 2013`_. Produced + with recipe_clouds_ipcc.yml (diagnostic script clouds_ipcc.ncl). .. _fig_cloud_4: -.. figure:: /recipes/figures/clouds/cloud_var_multi.png +.. figure:: /recipes/figures/clouds/clouds_interannual_pr.png :align: center Interannual variability of modeled and observed (GPCP) precipitation rates estimated as relative temporal standard deviation from 20 years (1986-2005) - of data. The temporal standard devitions are calculated from monthly - anomalies after subtracting the climatological mean seasonal cycle. + of data. The temporal standard deviations are calculated from monthly + anomalies after subtracting the climatological mean seasonal cycle. Produced + with recipe_lauer13jclim.yml (clouds_interannual.ncl). + +.. _fig_cloud_5: +.. figure:: /recipes/figures/clouds/clouds_zonal_clcalipso_annual_cmip6.png + :align: center + :width: 14cm + + Zonal mean of the multi-year annual mean cloud fraction as seen from + CALIPSO from CMIP6 models in comparison to CALIPSO-GOCCP data. Produced + with recipe_lauer22jclim_fig3-4_zonal.yml (diagnostic script clouds_zonal.ncl). + +.. _fig_cloud_6: +.. figure:: /recipes/figures/clouds/clouds_scatter_clt_swcre_so_cmip6.png + :align: center + :width: 10cm + + Multi-year seasonal average (December-January-February) of cloud shortwave + radiative effect (W m\ :sup:`-2`) vs. total cloud fraction (clt, %) averaged over the + Southern Ocean defined as latitude belt 30°S-65°S (ocean grid cells only). + Shown are the CMIP6 multi-model mean (red filled circles and lines) and + observational estimates from ESACCI-CLOUD (black circles and lines). + The red shaded areas represent the range between the 10th and 90th percentiles + of the results from all individual models. Produced with + recipe_lauer22jclim_fig9-11ab_scatter.yml (diagnostic script clouds_scatter.ncl). + +.. _fig_cloud_7: +.. figure:: /recipes/figures/clouds/clouds_pdf_clt_so_cmip6_line.png + :align: center + :width: 8cm + + Frequency distribution of monthly mean total cloud cover from CMIP6 models + in comparison to ESACCI-CLOUD data. The red curve shows the multi-model average, + the blue curve the ESACCI-CLOUD data and the thin gray lines the individual + models. The red shading shows ±1 standard deviation of the inter-model spread. + Produced with recipe_lauer22jclim_fig9-11c_pdf.yml (diagnostic script clouds_pdf.ncl). + +.. _fig_cloud_8: +.. figure:: /recipes/figures/clouds/clouds_dyn_matrix_ts_wap_clt_cmip6_ocean.png + :align: center + :width: 8cm + 2-dimensional distribution of average total cloud cover (clt) binned by sea + surface temperature (SST, x-axis) and vertical velocity at 500 hPa (ω\ :sub:`500`, y-axis) + averaged over 20 years and all grid cells over the ocean. Produced with + recipe_lauer22jclim_fig8_dyn.yml (diagnostic script clouds_dyn_matrix.ncl). diff --git a/doc/sphinx/source/recipes/recipe_cmorizers.rst b/doc/sphinx/source/recipes/recipe_cmorizers.rst new file mode 100644 index 0000000000..cfadd7dc53 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_cmorizers.rst @@ -0,0 +1,100 @@ +.. _recipe_cmorizers: + +CMORizer recipes +================= + +Overview +-------- + +These are CMORizer recipes calling CMORizer diagnostic scripts. + +ESMValCore supports ERA5 hourly and monthly datasets in their native +format, see :ref:`inputdata_native_datasets`. +and `ERA5 data documentation `_. +It may be useful in some cases to create ERA5 daily CMORized data. This can be +achieved by using a CMORizer *recipe*, +see `recipe_daily_era5.yml `_. +This recipe reads native, hourly ERA5 data, performs a daily aggregation +preprocessor, and then calls a diagnostic that operates on the data. In this +example, the diagnostic renames the files to the standard OBS6 file names. The output +are thus daily, CMORized ERA5 data, that can be used through the OBS6 project. +As such, this example recipe creates a local pool of CMORized data. The advantage, in this +case, is that the daily aggregation is performed only once, which can save a lot +of time and compute if it is used often. + +The example CMORizer recipe can be run like any other ESMValTool recipe: + +.. code-block:: bash + + esmvaltool run cmorizers/recipe_daily_era5.yml + +Note that the ``recipe_daily_era5.yml`` adds the next day of the new year to +the input data. This is because one of the fixes needed for the ERA5 data is to +shift the time axis of non-instantaneous variables half an hour back in time, resulting in a missing +record on the last day of the year. ERA5 data can be downloaded using `era5cli `_. + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + + * cmorizers/recipe_daily_era5.yml + +Diagnostics are stored in esmvaltool/diag_scripts/ + + * cmorizers/era5.py: generates output filename + + +User settings in recipe +----------------------- + +#. cmorizers/recipe_daily_era5.yml + + *Required add_one_day preprocessor settings:* + + * start_year: 1990 + * start_month: 1 + * start_day: 1 + * end_year: 1991 + * end_month: 1 + * end_day: 1 + +These settings should not be changed + * daily_mean: + operator: mean + * daily_min: + operator: min + * daily_max: + operator: max + +Variables +--------- + +#. cmorizers/recipe_daily_era5.yml + + * clt + * evspsbl + * evspsblpot + * mrro + * pr + * prsn + * ps + * psl + * rlds + * rls + * rsds + * rsdt + * rss + * tas + * tasmax + * tasmin + * tdps + * ts + * tsn + * uas + * vas + +References +---------- + +* Hersbach, H., et al., Quarterly Journal of the Royal Meteorological Society, 730, 1999-2049, doi:10.1002/qj.3803, 2020. diff --git a/doc/sphinx/source/recipes/recipe_cmug_h2o.rst b/doc/sphinx/source/recipes/recipe_cmug_h2o.rst new file mode 100644 index 0000000000..b5306fb85d --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_cmug_h2o.rst @@ -0,0 +1,156 @@ +.. _recipes_cmug_h2o: + +Evaluate water vapor short wave radiance absorption schemes of ESMs with the observations, including ESACCI data. +========================================================================================================================== + +Overview +-------- + +The recipe contains several diagnostics to use ESACCI water vapour data to evaluate CMIP models. + +The diagnostic deangelisf3f4.py reproduces figures 3 and 4 from `DeAngelis et al. (2015)`_: +See also doc/sphinx/source/recipes/recipe_deangelis15nat.rst +This paper compares models with different schemes for water vapor short wave radiance absorption with the observations. +Schemes using pseudo-k-distributions with more than 20 exponential terms show the best results. + +The diagnostic diag_tropopause.py plots given variable at cold point tropopause height, +here Specific Humidity (hus) is used. This will be calculated from the ESACCI water vapour data CDR-4, which are planed to consist of +three-dimensional vertically resolved monthly mean water vapour data (in ppmv) with +spatial resolution of 100 km, covering the troposphere and lower stratosphere. +The envisaged coverage is 2010-2014. The calculation of hus from water vapour in ppmv will be part of the cmorizer. +Here, ERA-Interim data are used. + +The diagnostic diag_tropopause_zonalmean.py plots zonal mean for given variable for +all pressure levels between 250 and 1hPa and at cold point tropopause height. +Here Specific Humidity (hus) is used. This will be calculated from the +ESACCI water vapour data CDR-3, which are planed to contain +the vertically resolved water vapour ECV in units of ppmv (volume mixing ratio) and will be provided as +zonal monthly means on the SPARC Data Initiative latitude/pressure level grid +(SPARC, 2017; Hegglin et al., 2013). It covers the vertical range between 250 hPa and 1 hPa, +and the time period 1985 to the end of 2019. The calculation of hus from water vapour in ppmv will be +part of the cmorizer. Here, ERA-Interim data are used. + + +.. _`DeAngelis et al. (2015)`: https://www.nature.com/articles/nature15770 + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_cmug_h2o.yml + +Diagnostics are stored in diag_scripts/ + + * deangelis15nat/deangelisf3f4.py + + * cmug_h2o/diag_tropopause.py + + * cmug_h2o/diag_tropopause_zonalmean.py + + +User settings in recipe +----------------------- + +The recipe can be run with different CMIP5 and CMIP6 models. + +deangelisf3f4.py: +For each model, two experiments must be given: +a pre industrial control run, and a scenario with 4 times CO\ :sub:`2`\. +Possibly, 150 years should be given, but shorter time series work as well. +Currently, HOAPS data are included as place holder for expected ESACCI-WV data, type CDR-2: +Gridded monthly time series of TCWV in units of kg/m2 (corresponds to prw) +that cover the global land and ocean areas with a spatial resolution of 0.05° / 0.5° +for the period July 2002 to December 2017. + + +Variables +--------- + +deangelisf3f4.py: +* *rsnstcs* (atmos, monthly, longitude, latitude, time) +* *rsnstcsnorm* (atmos, monthly, longitude, latitude, time) +* *prw* (atmos, monthly, longitude, latitude, time) +* *tas* (atmos, monthly, longitude, latitude, time) + + +diag_tropopause.py: +* *hus* (atmos, monthly, longitude, latitude, time, plev) +* *ta* (atmos, monthly, longitude, latitude, time, plev) + + +diag_tropopause_zonalmean.py: +* *hus* (atmos, monthly, longitude, latitude, time, plev) +* *ta* (atmos, monthly, longitude, latitude, time, plev) + + +Observations and reformat scripts +--------------------------------- + +deangelisf3f4.py: + +* *rsnstcs*: + CERES-EBAF + +* *prw* + HOAPS, planed for ESACCI-WV data, type CDR-2 + +diag_tropopause.py: + +* *hus* + ERA-Interim, ESACCI water vapour paned + +diag_tropopause_zonalmean.py: + +* *hus* + ERA-Interim, ESACCI water vapour paned + + +References +---------- + +* DeAngelis, A. M., Qu, X., Zelinka, M. D., and Hall, A.: An observational radiative constraint on hydrologic cycle intensification, Nature, 528, 249, 2015. + + +Example plots +------------- + + + +.. _fig_deangelis_cmug_cdr2: +.. figure:: /recipes/figures/deangelis15nat/fig_deangelis_cmug_cdr2.png + :align: center + :width: 50% + + Scatter plot and regression line computed between the ratio of the change of net short wave radiation (rsnst) and the change of the Water Vapor Path (prw) against the ratio of the change of netshort wave radiation for clear skye (rsnstcs) and the the change of surface temperature (tas). The width of horizontal shading for models and the vertical dashed lines for observations (Obs.) represent statistical uncertainties of the ratio, as the 95% confidence interval (CI) of the regression slope to the rsnst versus prw curve. For the prw observations ESACCI CDR-2 data from 2003 to 2014 are used. + +.. _fig_ERA-Interim_Cold_point_tropopause_Specific_Humidity_map: +.. figure:: /recipes/figures/cmug_h2o/fig_ERA-Interim_Cold_point_tropopause_Specific_Humidity_map.png + :align: center + :width: 50% + + Map of the average Specific Humidity (hus) at the cold point tropopause from ERA-Interim data. The diagnostic averages the complete time series, here 2010-2014. + +.. _fig_ERA-Interim_Cold_point_tropopause_Specific_Humidity: +.. figure:: /recipes/figures/cmug_h2o/fig_ERA-Interim_Cold_point_tropopause_Specific_Humidity.png + :align: center + :width: 50% + + Latitude versus time plot of the Specific Humidity (hus) at the cold point tropopause from ERA-Interim data. + +.. _fig_ERA-Interim_Zonal_mean_Specific_Humidity: +.. figure:: /recipes/figures/cmug_h2o/fig_ERA-Interim_Zonal_mean_Specific_Humidity.png + :align: center + :width: 50% + + Zonal average Specific Humidity (hus) between 250 and 1 hPa from ERA-Interim data. The diagnostic averages the complete time series, here 1985-2014. + +.. _fig_profile_Specific_Humidity: +.. figure:: /recipes/figures/cmug_h2o/fig_profile_Specific_Humidity.png + :align: center + :width: 50% + + Average Specific Humidity (hus) profile between 250 and 1 hPa from ERA-Interim and CMIP6 model data. The diagnostic averages the complete time series, here 1985-2014. + + diff --git a/doc/sphinx/source/recipes/recipe_collins13ipcc.rst b/doc/sphinx/source/recipes/recipe_collins13ipcc.rst new file mode 100644 index 0000000000..8d5a8c85ee --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_collins13ipcc.rst @@ -0,0 +1,464 @@ +.. _nml_collins: + +IPCC AR5 Chapter 12 (selected figures) +====================================== + +Overview +-------- + +The goal is to create a standard recipe for creating selected Figures from +IPCC AR5 Chapter 12 on "Long-term Climate Change: Projections, Commitments +and Irreversibility". These include figures showing the change in a variable +between historical and future periods, e.g. maps (2D variables), zonal means +(3D variables), timeseries showing the change in certain variables from +historical to future periods for multiple scenarios, and maps visualizing +change in variables normalized by global mean temperature change (pattern +scaling) as in Collins et al., 2013. + + +Available recipes and diagnostics +----------------------------------- + +Recipes are stored in recipes/ + +* recipe_collins13ipcc.yml + +Diagnostics are stored in diag_scripts/ + +* ipcc_ar5/ch12_map_diff_each_model_fig12-9.ncl: calculates the difference between + future and historical runs for one scenario for each given model + individually on their native grid and plots all of them in one Figure. + As in Figure 12.9 in AR5. +* ipcc_ar5/ch12_ts_line_mean_spread.ncl: calculates time series for one variable, + change in future relative to base period in historical, multi-model mean as + well as spread around it (as standard deviation). +* ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl: plots the timeseries multi-model mean + and spread calculated above. As in Figure 12.5 in AR5. +* ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl: calculates the interannual variability + over piControl runs, either over the whole time period or in chunks over + some years. +* ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl: calculates the difference between + future and historical periods for each given model and then calculates + multi-model mean as well as significance. Significant is where the + multi-model mean change is greater than two standard deviations of the + internal variability and where at least 90% of the models agree on the + sign of change. Not significant is where the multi-model mean change is + less than one standard deviation of internal variability. +* ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl: plots multi-model mean maps calculated + above including stippling where significant and hatching where not + significant. As in Figure 12.11 in AR5. +* ipcc_ar5/ch12_calc_zonal_cont_diff_mmm_stippandhatch.ncl: calculates zonal means + and the difference between future and historical periods for each given + model and then calculates multi-model mean as well as significance as above. +* ipcc_ar5/ch12_plot_zonal_diff_mmm_stipp.ncl: plots the multi-model mean zonal plots + calculated above including stippling where significant and hatching where + not significant. As in Figure 12.12 in AR5. +* ipcc_ar5/ch12_calc_map_diff_scaleT_mmm_stipp.ncl: calculates the change in variable + between future and historical period normalized by gloabl mean temperature + change of each given model and scenario. Then averages over all realizations + and calculates significance. Significant is where the mean change averaged + over all realizations is larger than the 95% percentile of the distribution + of models (assumed to be gaussian). Can be plotted using + ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl. +* seaice/seaice_ecs.ncl: scatter plot of historical trend in September + Arctic sea ice extent (SSIE) vs historical long-term mean SSIE (similar to + Fig. 12.31a in AR5) and historical SSIE trend vs YOD RCP8.5 (similar to Fig. 12.31d + in AR5). +* seaice/seaice_yod.ncl: calculation of year of near disappearance of Arctic sea ice + (similar to Fig 12.31e in AR5) +* ipcc_ar5/ch12_snw_area_change_fig12-32.ncl: calculate snow area extent in a region + (e.g Northern Hemisphere) and season (e.g. Northern Hemisphere spring March + & April) relative to a reference period (e.g 1986-2005) and spread over + models as in Fig. 12.32 of IPCC AR5. Can be plotted using + ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl. + +User settings +------------- + +#. Script ipcc_ar5/ch12_map_diff_each_model_fig12-9.ncl + + *Required settings (script)* + + * time_avg: time averaging ("annualclim", "seasonalclim") + * experiment: IPCC Scenario, used to pair historical and rcp runs from + same model + + *Optional settings (script)* + + * projection: map projection, any valid ncl projection, default = Robinson + * max_vert: maximum number of plots in vertical + * max_hori: maximum number of plots in horizontal + * title: plot title + * colormap: alternative colormap, path to rgb file or ncl name + * diff_levs: list with contour levels for plots + * span: span whole colormap? (True, False, default = False) + + *Required settings (variables)* + + * project: CMIP5 (or CMIP6) + * mip: variable mip, generally Amon or Omon + +#. Script ipcc_ar5/ch12_ts_line_mean_spread.ncl + + *Required settings (script)* + + * scenarios: list with scenarios included in figure + * syears: list with start years in time periods (e.g. start of historical + period and rcps) + * eyears: list with end years in time periods (end year of historical runs + and rcps) + * begin_ref_year: start year of reference period (e.g. 1986) + * end_ref_year: end year of reference period (e.g 2005) + * label: list with labels to use in legend depending on scenarios + + *Optional settings (script)* + + * spread: how many standard deviations to calculate the spread with? + default is 1., ipcc tas used 1.64 + * model_nr: save number of model runs per period and scenario in netcdf + to print in plot? (True, False, default = False) + * ts_minlat: minimum latitude if not global + * ts_maxlat: maximum latitude if not global + * ts_minlon: minimum longitude if not global + * ts_maxlon: maximum longitude if not global + + *Required settings (variables)* + + * project: CMIP5 (or CMIP6) + * mip: variable mip, generally Amon or Omon + +#. Script ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl: + + *Required settings (script)* + + * ancestors: variable and diagnostics that calculated data to be plotted + + *Optional settings (script)* + + * title: specify plot title + * yaxis: specify y-axis title + * ymin: minimim value on y-axis, default calculated from data + * ymax: maximum value on y-axis + * colormap: alternative colormap, path to rgb file or ncl name + +.. _ch12_calc_IAV_for_stippandhatch.ncl: + +#. Script ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl: + + *Required settings (script)* + + * time_avg: time averaging ("annualclim", "seasonalclim"), needs to be + consistent with calculation in ch12_calc_map_diff_mmm_stippandhatch.ncl + + *Optional settings (script)* + + * periodlength: length of period in years to calculate variability over, + default is total time period + * iavmode: calculate IAV from multi-model mean or save individual models + ("each": save individual models, "mmm": multi-model mean, default), + needs to be consistent with ch12_calc_map_diff_mmm_stippandhatch.ncl + + *Required settings (variables)* + + * project: CMIP5 (or CMIP6) + * mip: variable mip, generally Amon or Omon + * exp: piControl + * preprocessor: which preprocessor to use, depends on dimension of variable, + for 2D preprocessor only needs to regrid, for 3D we need to extract levels + either based on reference_dataset or specify levels. + + *Optional settings (variables)* + + * reference_dataset: the reference dataset for level extraction in case of + 3D variables. + +#. Script ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl: + + *Required settings (script)* + + * ancestors: variable and diagnostics that calculated interannual + variability for stippling and hatching + * time_avg: time averaging ("annualclim", "seasonalclim") + * scenarios: list with scenarios to be included + * periods: list with start years of periods to be included + * label: list with labels to use in legend depending on scenarios + + *Optional settings (script)* + + * seasons: list with seasons index if time_avg "seasonalclim" (then + required), DJF:0, MAM:1, JJA:2, SON:3 + * iavmode: calculate IAV from multi-model mean or save individual models + ("each": save individual models, "mmm": multi-model mean, default), + needs to be consistent with ch12_calc_IAV_for_stippandhatch.ncl + * percent: determines if difference expressed in percent (0, 1, default = 0) + + *Required settings (variables)* + + * project: CMIP5 (or CMIP6) + * mip: variable mip, generally Amon or Omon + * preprocessor: which preprocessor to use, preprocessor only needs to regrid + +#. Script ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl: + + *Required settings (script)* + + * ancestors: variable and diagnostics that calculated field to be plotted + + *Optional settings (script)* + + * projection: map projection, any valid ncl projection, default = Robinson + * diff_levs: list with explicit levels for all contour plots + * max_vert: maximum number of plots in vertical + * max_hori: maximum number of plots in horizontal + * model_nr: save number of model runs per period and scenario in netcdf to + print in plot? (True, False, default = False) + * colormap: alternative colormap, path to rgb file or ncl name + * span: span whole colormap? (True, False, default = True) + * sig: plot stippling for significance? (True, False) + * not_sig: plot hatching for uncertainty? (True, False) + * pltname: alternative name for output plot, default is diagnostic + + varname + time_avg + * units: units written next to colorbar, e.g (~F35~J~F~C) + +#. Script ipcc_ar5/ch12_calc_zonal_cont_diff_mmm_stippandhatch.ncl: + + *Required settings (script)* + + * ancestors: variable and diagnostics that calculated interannual + variability for stippling and hatching + * time_avg: time averaging ("annualclim", "seasonalclim") + * scenarios: list with scenarios to be included + * periods: list with start years of periods to be included + * label: list with labels to use in legend depending on scenarios + + *Optional settings (script)* + + * base_cn: if want contours of base period as contour lines, need to save + base period field (True, False) + * seasons: list with seasons index if time_avg "seasonalclim" (then + required), DJF:0, MAM:1, JJA:2, SON:3 + * iavmode: calculate IAV from multi-model mean or save individual models + ("each": save individual models, "mmm": multi-model mean, default), + needs to be consistent with ch12_calc_IAV_for_stippandhatch.ncl + * percent: determines if difference expressed in percent (0, 1, default = 0) + + *Required settings (variables)* + + * project: CMIP5 (or CMIP6) + * mip: variable mip, generally Amon or Omon + * preprocessor: which preprocessor to use, preprocessor needs to regrid, + extract leves and calculate the zonal mean. + + *Optional settings (variables)* + + * reference_dataset: the reference dataset for level extraction + +#. Script ipcc_ar5/ch12_plot_zonal_diff_mmm_stipp.ncl: + + *Required settings (script)* + + * ancestors: variable and diagnostics that calculated field to be plotted + + *Optional settings (script)* + + * diff_levs: list with explicit levels for all contour plots + * max_vert: maximum number of plots in vertical + * max_hori: maximum number of plots in horizontal + * model_nr: save number of model runs per period and scenario in netcdf to + print in plot? (True, False, default = False) + * colormap: alternative colormap, path to rgb file or ncl name + * span: span whole colormap? (True, False, default = True) + * sig: plot stippling for significance? (True, False) + * not_sig: plot hatching for uncertainty? (True, False) + * pltname: alternative name for output plot, default is diagnostic + + varname + time_avg + * units: units written next to colorbar in ncl strings, e.g (m s~S~-1~N~) + * if base_cn: True in ch12_calc_zonal_cont_diff_mmm_stippandhatch.ncl + further settings to control contour lines: + + * base_cnLevelSpacing: spacing between contour levels + * base_cnMinLevel: minimum contour line + * base_cnMaxLevel: maximum contour line + +#. Script ipcc_ar5/ch12_calc_map_diff_scaleT_mmm_stipp.ncl: + + *Required settings (script)* + + * time_avg: time averaging ("annualclim", "seasonalclim") + * scenarios: list with scenarios to be included + * periods: list with start years of periods to be included + * label: list with labels to use in legend depending on scenarios + + *Optional settings (script)* + + * seasons: list with seasons index if time_avg "seasonalclim" + (then required), DJF:0, MAM:1, JJA:2, SON:3 + * percent: determines if difference expressed in percent (0, 1, default = 0) + + *Required settings (variables)* + + * project: CMIP5 (or CMIP6) + * mip: variable mip, generally Amon or Omon + * preprocessor: which preprocessor to use, preprocessor only needs to regrid + +#. Script ipcc_ar5/ch12_snw_area_change_fig12-32.ncl: + + *Required settings (script)* + + * scenarios: list with scenarios included in figure + * syears: list with start years in time periods (e.g. start of historical + period and rcps) + * eyears: list with end years in time periods (end year of historical runs + and rcps) + * begin_ref_year: start year of reference period (e.g. 1986) + * end_ref_year: end year of reference period (e.g 2005) + * months: first letters of months included in analysis? e.g. for MA + (March + April) for Northern Hemisphere + * label: list with labels to use in legend depending on scenarios + + *Optional settings (script)* + + * spread: how many standard deviations to calculate the spread with? + default is 1., ipcc tas used 1.64 + * model_nr: save number of model runs per period and scenario in netcdf + to print in plot? (True, False, default = False) + * colormap: alternative colormap, path to rgb file or ncl name + * ts_minlat: minimum latitude if not global + * ts_maxlat: maximum latitude if not global + * ts_minlon: minimum longitude if not global + * ts_maxlon: maximum longitude if not global + + *Required settings (variables)* + + * project: CMIP5 (or CMIP6) + * mip: variable mip, LImon + * fx_files: [sftlf, sftgif] + +#. Script seaice/seaice_ecs.ncl + + *Required settings (scripts)* + + * hist_exp: name of historical experiment (string) + * month: selected month (1, 2, ..., 12) or annual mean ("A") + * rcp_exp: name of RCP experiment (string) + * region: region to be analyzed ( "Arctic" or "Antarctic") + + *Optional settings (scripts)* + + * fill_pole_hole: fill observational hole at North pole (default: False) + * styleset: color style (e.g. "CMIP5") + + *Optional settings (variables)* + + * reference_dataset: reference dataset + +#. Script seaice/seaice_yod.ncl + + *Required settings (scripts)* + + * month: selected month (1, 2, ..., 12) or annual mean ("A") + * region: region to be analyzed ( "Arctic" or "Antarctic") + + *Optional settings (scripts)* + + * fill_pole_hole: fill observational hole at North pole, Default: False + * wgt_file: netCDF containing pre-determined model weights + + *Optional settings (variables)* + + * ref_model: array of references plotted as vertical lines + + +Variables +--------- + +*Note: These are the variables tested and used in IPCC AR5. However, the code is flexible and in theory other variables of the same kind can be used.* + +* areacello (fx, longitude latitude) +* clt (atmos, monthly mean, longitude latitude time) +* evspsbl (atmos, monthly mean, longitude latitude time) +* hurs (atmos, monthly mean, longitude latitude time) +* mrro (land, monthly mean, longitude latitude time) +* mrsos (land, monthly mean, longitude latitude time) +* pr (atmos, monthly mean, longitude latitude time) +* psl (atmos, monthly mean, longitude latitude time) +* rlut, rsut, rtmt (atmos, monthly mean, longitude latitude time) +* sic (ocean-ice, monthly mean, longitude latitude time) +* snw (land, monthly mean, longitude latitude time) +* sos (ocean, monthly mean, longitude latitude time) +* ta (atmos, monthly mean, longitude latitude lev time) +* tas (atmos, monthly mean, longitude latitude time) +* thetao (ocean, monthly mean, longitude latitude lev time) +* ua (atmos, monthly mean, longitude latitude lev time) + +Observations and reformat scripts +--------------------------------- + +* HadISST (sic - esmvaltool/cmorizers/data/formatters/datasets/hadisst.ncl) + +Reference +--------- + +* Collins, M., R. Knutti, J. Arblaster, J.-L. Dufresne, T. Fichefet, P. + Friedlingstein, X. Gao, W.J. Gutowski, T. Johns, G. Krinner, M. Shongwe, C. + Tebaldi, A.J. Weaver and M. Wehner, 2013: Long-term Climate Change: + Projections, Commitments and Irreversibility. In: Climate Change 2013: The + Physical Science Basis. Contribution of Working Group I to the Fifth + Assessment Report of the Intergovernmental Panel on Climate Change [Stocker, + T.F., D. Qin, G.-K. Plattner, M. Tignor, S.K. Allen, J. Boschung, A. Nauels, + \Y. Xia, V. Bex and P.M. Midgley (eds.)]. Cambridge University Press, + Cambridge, United Kingdom and New York, NY, USA. + + +Example plots +------------- + +.. figure:: /recipes/figures/collins13ipcc/collins_fig_1.png + :width: 85% + :align: center + + Surface air temperature change in 2081–2100 displayed as anomalies with + respect to 1986–2005 for RCP4.5 from individual CMIP5 models. + + +.. figure:: /recipes/figures/collins13ipcc/collins_fig_2.png + :width: 50% + :align: center + + Time series of global annual mean surface air temperature anomalie + (relative to 1986–2005) from CMIP5 concentration-driven experiments. + +.. figure:: /recipes/figures/collins13ipcc/collins_fig_4.png + :width: 70% + :align: center + + Multi-model CMIP5 average percentage change in seasonal mean precipitation + relative to the reference period 1986–2005 averaged over the periods + 2081–2100 and 2181–2200 under the RCP8.5 forcing scenario. Hatching + indicates regions where the multi-model mean change is less than one + standard deviation of internal variability. Stippling indicates regions + where the multi-model mean change is greater than two standard deviations + of internal variability and where at least 90% of models agree on the sign + of change + +.. figure:: /recipes/figures/collins13ipcc/collins_fig_3.png + :width: 70% + :align: center + + Temperature change patterns scaled to 1°C of global mean surface + temperature change. + +.. figure:: /recipes/figures/seaice/SSIE-MEAN_vs_YOD_sic_extend_Arctic_September_1960-2100.png + :align: center + :width: 9cm + + Scatter plot of mean historical September Arctic sea ice extent vs 1st year of disappearance + (RCP8.5) (similar to IPCC AR5 Chapter 12, Fig. 12.31a). + +.. figure:: /recipes/figures/seaice/timeseries_rcp85.png + :align: center + :width: 12cm + + Time series of September Arctic sea ice extent for individual CMIP5 models, + multi-model mean and multi-model standard deviation, year of disappearance + (similar to IPCC AR5 Chapter 12, Fig. 12.31e). diff --git a/doc/sphinx/source/recipes/recipe_combined_climate_extreme_index.rst b/doc/sphinx/source/recipes/recipe_combined_climate_extreme_index.rst deleted file mode 100644 index ddd06e6b8a..0000000000 --- a/doc/sphinx/source/recipes/recipe_combined_climate_extreme_index.rst +++ /dev/null @@ -1,100 +0,0 @@ -.. _recipes_insurance_risk_index_wp7: - -Combined Climate Extreme Index -==================================================== - -Overview --------- - -The goal of this diagnostic is to compute time series of a number of extreme events: heatwave, coldwave, heavy precipitation, drought and high wind. Then, the user can combine these different components (with or without weights). The result is an index similar to the Climate Extremes Index (CEI; Karl et al., 1996), the modified CEI (mCEI; Gleason et al., 2008) or the Actuaries Climate Index (ACI; American Academy of Actuaries, 2018). The output consists of a netcdf file containing the area-weighted and multi-model multi-metric index. This recipe can be applied to data with any temporal resolution, and the running average is computed based on the user-defined window length (e.g. a window length of 5 would compute the 5-day running mean when applied to monthly data, or 5-month running mean when applied to monthly data). - -In recipe_extreme_index_wp7.yml, after defining the area and reference and projection period, the metric indicating the extreme index is selected. The options are -* t90p to compute the number of days when the maximum temperature exceeds the 90th percentile, -* t10p to compute the number of days when the minimum temperature falls below the 10th percentile, -* Wx to compute the number of days when wind power (third power of wind speed) exceeds the 90th percentile, -* cdd to compute the maximum length of a dry spell, defined as the maximum number of consecutive days when the daily precipitation is lower than 1 mm, and -* rx5day to compute the maximum precipitation accumulated during 5 consecutive days. - -Available recipes and diagnostics ------------------------------------ - -Recipes are stored in recipes/ - -* recipe_combined_indices_wp6.yml - -* recipe_extreme_index_wp7.yml - -Diagnostics are stored in diag_scripts/magic_bsc/ - -* combined_indices_wp6.r : calculates the area-weighted means and multi-model means, with or without weights - -* risk_index.r - - - -User settings -------------- - -User setting files are stored in recipes/ - -#. recipe_combined_indices_wp6.yml - - *Required settings for script* - - * weights: either ‘equal’, for equal weights, ‘null’ for no weights, or a vector of integers the same length as the number of input datasets. - * running_mean: an integer specifying the length of the window to be used for computing the running mean (does not work yet). - * moninf: instead of running_mean an integer can be given to determine the first month of the seasonal mean to be computed (does not work yet). - * monsup: an integer specifying the last month to be computed (does not work yet). - * Multi_year_average: ‘true’ or ‘false’ to specify whether to compute the mean across all input years (does not work yet). - -#. recipe_extreme_index_wp7.yml - - *Required settings for script* - - * metric: the metric to be computed, t90p, t10p, Wx, cdd, rx5day. See overview for a description of the different metrics (cdd does not work yet). - - -Variables ---------- - -* tasmax, tasmin, pr or sfcWind (atmos, daily, longitude, latitude, time) - - -Observations and reformat scripts ---------------------------------- - -*None* - -References ----------- - -* Alexander L.V. and Coauthors (2006). Global observed changes in daily climate extremes of temperature and precipitation. J. Geophys. Res., 111, D05109. https://doi.org/10.1029/2005JD006290 - -* American Academy of Actuaries, Canadian Institute of Actuaries, Casualty Actuarial Society and Society of Actuaries. Actuaries Climate Index. http://actuariesclimateindex.org (2018-10-06). - -* Donat, M., and Coauthors (2013). Updated analyses of temperature and precipitation extreme indices since the beginning of the twentieth century: The HadEX2 dataset. J. Geophys. Res., 118, 2098–2118, https://doi.org/10.1002/jgrd.50150. - -* Fouillet, A., Rey, G., Laurent, F., Pavillon, G. Bellec, S., Guihenneuc-Jouyaux, C., Clavel J., Jougla, E. and Hémon, D. (2006) Excess mortality related to the August 2003 heat wave in France. Int. Arch. Occup. Environ. Health, 80, 16–24. https://doi.org/10.1007/s00420-006-0089-4 - -* Gleason, K.L., J.H. Lawrimore, D.H. Levinson, T.R. Karl, and D.J. Karoly (2008). A Revised U.S. Climate Extremes Index. J. Climate, 21, 2124-2137 https://doi.org/10.1175/2007JCLI1883.1 - -* Meehl, G. A., and Coauthors (2000). An introduction to trends inextreme weather and climate events: Observations, socio-economic impacts, terrestrial ecological impacts, and model projections. Bull. Amer. Meteor. Soc., 81, 413–416. `doi: 10.1175/1520-0477(2000)081<0413:AITTIE>2.3.CO;2 `_ - -* Whitman, S., G. Good, E. R. Donoghue, N. Benbow, W. Y. Shou and S. X. Mou (1997). Mortality in Chicago attributed to the July 1995 heat wave. Amer. J. Public Health, 87, 1515–1518. https://doi.org/10.2105/AJPH.87.9.1515 - -* Zhang, Y., M. Nitschke, and P. Bi (2013). Risk factors for direct heat-related hospitalization during the 2009 Adelaide heat-wave: A case crossover study. Sci. Total Environ., 442, 1–5. https://doi.org/10.1016/j.scitotenv.2012.10.042 - -* Zhang, X. , Alexander, L. , Hegerl, G. C., Jones, P. , Tank, A. K., Peterson, T. C., Trewin, B. and Zwiers, F. W. (2011). Indices for monitoring changes in extremes based on daily temperature and precipitation data. WIREs Clim Change, 2: 851-870. doi:10.1002/wcc.147. https://doi.org/10.1002/wcc.147 - - - -Example plots -------------- - -.. _fig_combinedindices1: -.. figure:: /recipes/figures/combined_climate_extreme_index/t90p_IPSL-CM5A-LR_rcp85_2020_2040.png - :align: center - :width: 14cm - - - diff --git a/doc/sphinx/source/recipes/recipe_combined_indices.rst b/doc/sphinx/source/recipes/recipe_combined_indices.rst new file mode 100644 index 0000000000..e10e11e4ed --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_combined_indices.rst @@ -0,0 +1,80 @@ +.. _recipes_combined_indices: + +Nino indices, North Atlantic Oscillation (NAO), Souther Oscillation Index (SOI) +=============================================================================== + +Overview +-------- + +The goal of this diagnostic is to compute indices based on area averages. + +In recipe_combined_indices.yml, after defining the period (historical or +future projection), the variable is selected. The predefined areas are: + +* Nino 3 +* Nino 3.4 +* Nino 4 +* North Atlantic Oscillation (NAO) +* Southern Oscillation Index (SOI) + +Available recipes and diagnostics +----------------------------------- + +Recipes are stored in recipes/ + +* recipe_combined_indices.yml + +Diagnostics are stored in diag_scripts/magic_bsc/ + +* combined_indices.R : calculates the area-weighted means and multi-model means, with or without weights + + + +User settings +------------- + +User setting files are stored in recipes/ + +#. recipe_combined_indices.yml + + *Required settings for script* + + * region: one of the following strings Nino3, Nino3.4, Nino4, NAO, SOI + * running_mean: an integer specifying the length of the window (in months) to be used for computing the running mean. + * moninf: an integer can be given to determine the first month of the seasonal mean to be computed (from 1 to 12, corresponding to January to December respectively). + * monsup: an integer specifying the last month to be computed (from 1 to 12, corresponding to January to December respectively). + * standardized: ‘true’ or ‘false’ to specify whether to compute the standarization of the variable. + + + *Required settings for preprocessor (only for 3D variables)* + + extract_levels: + * levels: [50000] # e.g. for 500 hPa level + * scheme: nearest + +Variables +--------- + +* all variables (atmos/ocean, monthly, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +*None* + +References +---------- + +* Trenberth, Kevin & National Center for Atmospheric Research Staff (Eds). Last modified 11 Jan 2019. "The Climate Data Guide: Nino SST Indices (Nino 1+2, 3, 3.4, 4; ONI and TNI)." Retrieved from https://climatedataguide.ucar.edu/climate-data/nino-sst-indices-nino-12-3-34-4-oni-and-tni. + + +Example plots +------------- + +.. _fig_combinedindices1: +.. figure:: /recipes/figures/Combined_Indices_Area_Average/Nino3.4_tos_Dec-Feb_running-mean__1950-2005.png + :align: center + :width: 14cm + +Time series of the standardized sea surface temperature (tos) area averaged over the Nino 3.4 region during the boreal winter (December-January-February). The time series correspond to the MPI-ESM-MR (red) and BCC-CSM1-1 (blue) models and their mean (black) during the period 1950-2005 for the ensemble r1p1i1 of the historical simulations. \ No newline at end of file diff --git a/doc/sphinx/source/recipes/recipe_consecdrydays.rst b/doc/sphinx/source/recipes/recipe_consecdrydays.rst index b5d04b0ab4..8235158bf8 100644 --- a/doc/sphinx/source/recipes/recipe_consecdrydays.rst +++ b/doc/sphinx/source/recipes/recipe_consecdrydays.rst @@ -1,9 +1,19 @@ +.. _recipes_consecdrydays: + Consecutive dry days ==================== Overview -------- -Meteorological drought can in its simplest form be described by a lack of precipitation. First, a wet day threshold is set, which can be either a limit related to measurement accuracy, or more directly process related to an amount that would break the drought. The diagnostic calculates the longest period of consecutive dry days, which is an indicator of the worst drought in the time series. Further, the diagnostic calculates the frequency of dry periods longer than a user defined number of days. +Meteorological drought can in its simplest form be described by a lack of +precipitation. +First, a wet day threshold is set, which can be either a limit related to +measurement accuracy, or more directly a process related to an amount that +would break the drought. +The diagnostic calculates the longest period of consecutive dry days, which +is an indicator of the worst drought in the time series. +Further, the diagnostic calculates the frequency of dry periods longer than a +user defined number of days. Available recipes and diagnostics @@ -30,8 +40,26 @@ User settings in recipe * frlim: the shortest number of consecutive dry days for entering statistic on frequency of dry periods. + *Optional settings (script)* + + Under ``plot``: + + * cmap: the name of a colormap. cmocean colormaps are also supported. + + * other keyword arguments to :func:`esmvaltool.diag_scripts.shared.plot.global_pcolormesh` can also be supplied. Variables --------- * pr (atmos, daily mean, time latitude longitude) + + +Example plots +------------- + +.. _fig_consecdrydays: +.. figure:: /recipes/figures/consecdrydays/consec_example_freq.png + :align: center + :width: 14cm + + Example of the number of occurrences with consecutive dry days of more than five days in the period 2001 to 2002 for the CMIP5 model bcc-csm1-1-m. diff --git a/doc/sphinx/source/recipes/recipe_cox18nature.rst b/doc/sphinx/source/recipes/recipe_cox18nature.rst new file mode 100644 index 0000000000..7402ecd140 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_cox18nature.rst @@ -0,0 +1,129 @@ +.. _recipes_cox18nature: + +Emergent constraint on equilibrium climate sensitivity from global temperature variability +========================================================================================== + +Overview +-------- + +This recipe reproduces the emergent constraint proposed by `Cox et al. (2018)`_ +for the equilibrium climate sensitivity (ECS) using global temperature +variability. The latter is defined by a metric which can be calculated from the +global temperature variance (in time) :math:`\sigma_T` and the one-year-lag +autocorrelation of the global temperature :math:`\alpha_{1T}` by + +.. math:: + + \psi = \frac{\sigma_T}{\sqrt{-\ln(\alpha_{1T})}} + +Using the simple `Hasselmann model`_ they show that this quantity is linearly +correlated with the ECS. Since it only depends on the temporal evolution of the +global surface temperature, there is lots of observational data available which +allows the construction of an emergent relationship. This method predicts an +ECS range of 2.2K to 3.4K (66% confidence limit). + +.. _`Cox et al. (2018)`: https://www.nature.com/articles/nature25450 +.. _`Hasselmann model`: https://onlinelibrary.wiley.com/doi/10.1111/j.2153-3490.1976.tb00696.x + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_cox18nature.yml + + +Diagnostics are stored in diag_scripts/ + + * emergent_constraints/cox18nature.py + * climate_metrics/ecs.py + * climate_metrics/psi.py + + +User settings in recipe +----------------------- + +* Preprocessor + + * ``area_statistics`` (*operation: mean*): Calculate global mean. + +* Script emergent_constraints/cox18nature.py + + See + :ref:`here`. + +* Script climate_metrics/ecs.py + + See :ref:`here`. + +.. _psi.py: + +* Script climate_metrics/psi.py + + * ``output_attributes``, *dict*, optional: Write additional attributes to + all output netcdf files. + * ``lag``, *int*, optional (default: 1): Lag (in years) for the + autocorrelation function. + * ``window_length``, *int*, optional (default: 55): Number of years used for + the moving window average. + + +Variables +--------- + +* *tas* (atmos, monthly, longitude, latitude, time) +* *tasa* (atmos, monthly, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +* HadCRUT4_ (*tasa*) + +.. _HadCRUT4: https://crudata.uea.ac.uk/cru/data/temperature/ + + +References +---------- + +* Cox, Peter M., Chris Huntingford, and Mark S. Williamson. "Emergent + constraint on equilibrium climate sensitivity from global temperature + variability." Nature 553.7688 (2018): 319. + + +Example plots +------------- + +.. _fig_cox18nature_1: +.. figure:: /recipes/figures/cox18nature/temperature_anomaly_HadCRUT4.png + :align: center + :width: 50% + + Simulated change in global temperature from CMIP5 models (coloured lines), + compared to the global temperature anomaly from the HadCRUT4 dataset (black + dots). The anomalies are relative to a baseline period of 1961–1990. The model + lines are colour-coded, with lower-sensitivity models (λ > 1 + Wm\ :sup:`-2`\ K\ :sup:`-1`\ ) shown by green lines and higher-sensitivity + models (λ < 1 Wm\ :sup:`-2`\ K\ :sup:`-1`\ ) shown by magenta lines. + +.. _fig_cox18nature_2: +.. figure:: /recipes/figures/cox18nature/emergent_relationship_HadCRUT4.png + :align: center + :width: 50% + + Emergent relationship between ECS and the ψ metric. The black dot-dashed + line shows the best-fit linear regression across the model ensemble, with + the prediction error for the fit given by the black dashed lines. The + vertical blue lines show the observational constraint from the HadCRUT4 + observations: the mean (dot-dashed line) and the mean plus and minus one + standard deviation (dashed lines). + +.. _fig_cox18nature_3: +.. figure:: /recipes/figures/cox18nature/pdf_HadCRUT4.png + :align: center + :width: 50% + + The PDF for ECS. The orange histograms (both panels) show the prior + distributions that arise from equal weighting of the CMIP5 models in 0.5 K + bins. diff --git a/doc/sphinx/source/recipes/recipe_crem.rst b/doc/sphinx/source/recipes/recipe_crem.rst index 28838502c0..1ab6b609f7 100644 --- a/doc/sphinx/source/recipes/recipe_crem.rst +++ b/doc/sphinx/source/recipes/recipe_crem.rst @@ -1,3 +1,5 @@ +.. _recipes_crem: + Cloud Regime Error Metric (CREM) ================================ @@ -32,6 +34,11 @@ that the simulated frequency of occurrence of a particular regime is zero, then a NaN will be returned from the code and a bar not plotted on the figure for that model. +The original publication recommends to use sea ice fields from one model also +for other models that do not provide daily sea ice concentration. This is +possible as sea ice concentrations are prescribed in the AMIP simulations and +has been done to produce the example figure shown below. + Available recipes and diagnostics --------------------------------- @@ -101,9 +108,11 @@ Example plots .. figure:: /recipes/figures/crem/crem_error_metric.png :width: 10cm - :alt: xxxxx + :alt: CREM Cloud Regime Error Metrics (CREMpd) from William and Webb (2009) applied - to those CMIP5 AMIP simulations with the required data in the archive. A + to selected CMIP5 AMIP simulations. A perfect score with respect to ISCCP is zero; the dashed red line is an indication of observational uncertainty. + Note: as daily sea ice concentration (sic) is not available for all models + shown, the regridded fields from CanAM4 have been used for all models. diff --git a/doc/sphinx/source/recipes/recipe_cvdp.rst b/doc/sphinx/source/recipes/recipe_cvdp.rst index 6c7aa8744b..7b07c25b83 100644 --- a/doc/sphinx/source/recipes/recipe_cvdp.rst +++ b/doc/sphinx/source/recipes/recipe_cvdp.rst @@ -1,16 +1,53 @@ -The Climate Variability Diagnostics Package (CVDP) -================================================== +.. _recipes_cvdp: -About ------ +Climate Variability Diagnostics Package (CVDP) +============================================== + +Overview +-------- The Climate Variability Diagnostics Package (CVDP) developed by NCAR's Climate Analysis Section is an analysis tool that documents the major modes of climate variability in models and observations, including ENSO, Pacific Decadal Oscillation, Atlantic Multi-decadal Oscillation, Northern and Southern Annular Modes, North Atlantic Oscillation, Pacific North and South American teleconnection patterns. For details please refer to the [1] and [2]. -Requirements ------------- -+ nco (optional for creating netcdf files) +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_cvdp.yml + +Diagnostics are stored in diag_scripts/cvdp/ + + * cvdp_wrapper.py + +User settings in recipe +----------------------- + +The recipe can be run with several data sets including different model ensembles, multi-model mean statistics are currently not supported. + +Variables +--------- + +* ts (atmos, monthly mean, longitude latitude time) +* tas (atmos, monthly mean, longitude latitude time) +* pr (atmos, monthly mean, longitude latitude time) +* psl (atmos, monthly mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +None. References ---------- [1] http://www.cesm.ucar.edu/working_groups/CVC/cvdp/ [2] https://github.com/NCAR/CVDP-ncl + +Example plots +------------- + +.. figure:: /recipes/figures/cvdp/nam.prreg.ann.png + :align: center + + Regression of the precipitation anomalies (PR) onto the Northern Annular + Mode (NAM) index for the time period 1900-2005 for 30 CMIP5 models and observations (GPCP (pr) / IFS-Cy31r2 (psl); time period 1984-2005). diff --git a/doc/sphinx/source/recipes/recipe_deangelis15nat.rst b/doc/sphinx/source/recipes/recipe_deangelis15nat.rst new file mode 100644 index 0000000000..37bad98a88 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_deangelis15nat.rst @@ -0,0 +1,128 @@ +.. _recipes_deangelis15nat: + +Evaluate water vapor short wave radiance absorption schemes of ESMs with the observations. +========================================================================================================================== + +Overview +-------- + + +The recipe reproduces figures from `DeAngelis et al. (2015)`_: +Figure 1b to 4 from the main part as well as extended data figure 1 and 2. +This paper compares models with different schemes for water vapor short wave radiance absorption with the observations. +Schemes using pseudo-k-distributions with more than 20 exponential terms show the best results. + +.. _`DeAngelis et al. (2015)`: https://www.nature.com/articles/nature15770 + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_deangelis15nat.yml + +Diagnostics are stored in diag_scripts/ + + * deangelis15nat/deangelisf1b.py + * deangelis15nat/deangelisf2ext.py + * deangelis15nat/deangelisf3f4.py + + +User settings in recipe +----------------------- + +The recipe can be run with different CMIP5 and CMIP6 models. +deangelisf1b.py: +Several flux variables (W m\ :sup:`-2`\) and up to 6 different model exeriements can be handeled. +Each variable needs to be given for each model experiment. The same experiments must +be given for all models. +In `DeAngelis et al. (2015)`_ +150 year means are used but the recipe can handle any duration. + +deangelisf2ext.py: + +deangelisf3f4.py: +For each model, two experiments must be given: +a pre industrial control run, and a scenario with 4 times CO\ :sub:`2`\. +Possibly, 150 years should be given, but shorter time series work as well. + + +Variables +--------- + +deangelisf1b.py: +Tested for: + +* *rsnst* (atmos, monthly, longitude, latitude, time) +* *rlnst* (atmos, monthly, longitude, latitude, time) +* *lvp* (atmos, monthly, longitude, latitude, time) +* *hfss* (atmos, monthly, longitude, latitude, time) + +any flux variable (W m\ :sup:`-2`\) should be possible. + +deangelisf2ext.py: + +* *rsnst* (atmos, monthly, longitude, latitude, time) +* *rlnst* (atmos, monthly, longitude, latitude, time) +* *rsnstcs* (atmos, monthly, longitude, latitude, time) +* *rlnstcs* (atmos, monthly, longitude, latitude, time) +* *lvp* (atmos, monthly, longitude, latitude, time) +* *hfss* (atmos, monthly, longitude, latitude, time) +* *tas* (atmos, monthly, longitude, latitude, time) + +deangelisf3f4.py: +* *rsnstcs* (atmos, monthly, longitude, latitude, time) +* *rsnstcsnorm* (atmos, monthly, longitude, latitude, time) +* *prw* (atmos, monthly, longitude, latitude, time) +* *tas* (atmos, monthly, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +deangelisf1b.py: +* None + +deangelisf2ext.py: +* None + +deangelisf3f4.py: + +* *rsnstcs*: + CERES-EBAF + +* *prw* + ERA-Interim, SSMI + + +References +---------- + +* DeAngelis, A. M., Qu, X., Zelinka, M. D., and Hall, A.: An observational radiative constraint on hydrologic cycle intensification, Nature, 528, 249, 2015. + + +Example plots +------------- + + +.. _bar_all: +.. figure:: /recipes/figures/deangelis15nat/bar_all.png + :align: center + :width: 50% + + Global average multi-model mean comparing different model experiments for the sum of upward long wave flux at TOA and net downward long wave flux at the surface (rlnst), heating from short wave absorption (rsnst), latent heat release from precipitation (lvp), and sensible heat flux (hfss). The panel shows three model experiments, namely the pre-industrial control simulation averaged over 150 years (blue), the RCP8.5 scenario averaged over 2091-2100 (orange) and the abrupt quadrupled CO\ :sub:`2`\ scenario averaged over the years 141-150 after CO\ :sub:`2`\ quadrupling in all models except CNRM-CM5-2 and IPSL-CM5A-MR, where the average is calculated over the years 131-140 (gray). The figure shows that energy sources and sinks readjust in reply to an increase in greenhouse gases, leading to a decrease in the sensible heat flux and an increase in the other fluxes. + +.. _exfig2a: +.. figure:: /recipes/figures/deangelis15nat/exfig2a.png + :align: center + :width: 50% + + The temperature-mediated response of each atmospheric energy budget term for each model as blue circles and the model mean as a red cross. The numbers above the abscissa are the cross-model correlations between dlvp/dtas and each other temperature-mediated response.' + +.. _fig3b: +.. figure:: /recipes/figures/deangelis15nat/fig3b.png + :align: center + :width: 50% + + Scatter plot and regression line the between the ratio of the change of net short wave radiation (rsnst) and the change of the Water Vapor Path (prw) against the ratio of the change of netshort wave radiation for clear skye (rsnstcs) and the the change of surface temperature (tas). The width of horizontal shading for models and the vertical dashed lines for observations (Obs.) represent statistical uncertainties of the ratio, as the 95% confidence interval (CI) of the regression slope to the rsnst versus prw curve. For the observations the minimum of the lower bounds of all CIs to the maximum of the upper bounds of all CIs is shown. diff --git a/doc/sphinx/source/recipes/recipe_diurnal_temperature_index.rst b/doc/sphinx/source/recipes/recipe_diurnal_temperature_index.rst index 1696ed3325..200255ed50 100644 --- a/doc/sphinx/source/recipes/recipe_diurnal_temperature_index.rst +++ b/doc/sphinx/source/recipes/recipe_diurnal_temperature_index.rst @@ -1,16 +1,16 @@ -.. _recipe_diurnal_temperature_index_wp7: +.. _recipe_diurnal_temperature_index: -Diurnal temperature variation indicator: Difference between Tmax and Tmin for a specific day -============================================================================================ +Diurnal temperature range +========================= Overview -------- The goal of this diagnostic is to compute a vulnerability indicator for the diurnal temperature range (DTR); the maximum variation in temperature within a period of 24 hours at a given location. This indicator was first proposed by the energy sector, to identify locations which may experience increased diurnal temperature variation in the future, which would put additional stress on the operational management of district heating systems. This indicator was defined as the DTR exceeding 5 degrees celsius at a given location and day of the year (Deandreis et al., N.D.). Projections of this indicator currently present high uncertainties, uncertainties associated to both Tmax and Tmin in future climate projections. -As well as being of use to the energy sector, the global‐average DTR has been evaluated using both observations and climate model simulations (Braganza et. al., 2004) and changes in the mean and variability of the DTR have been shown to have a wide range of impacts on society, such as on the transmission of diseases (Lambrechts et al., 2011; Paaijmans et al., 2010) and energy consumption (Deandreis et al., N.D.). +As well as being of use to the energy sector, the global‐average DTR has been evaluated using both observations and climate model simulations (Braganza et. al., 2004) and changes in the mean and variability of the DTR have been shown to have a wide range of impacts on society, such as on the transmission of diseases (Lambrechts et al., 2011; Paaijmans et al., 2010). -The recipe recipe_diurnal_temperature_index_wp7.yml computes first a mean DTR for a reference period using historical simulations and then, the number of days when the DTR from the future climate projections exceeds that of the reference period by 5 degrees or more. The user can define both the reference and projection periods, and the region to be considered. The output produced by this recipe consists of a four panel plot showing the maps of the projected mean DTR indicator for each season and a netcdf file containing the corresponding data. +The recipe recipe_diurnal_temperature_index.yml computes first a mean DTR for a reference period using historical simulations and then, the number of days when the DTR from the future climate projections exceeds that of the reference period by 5 degrees or more. The user can define both the reference and projection periods, and the region to be considered. The output produced by this recipe consists of a four panel plot showing the maps of the projected mean DTR indicator for each season and a netcdf file containing the corresponding data. @@ -19,11 +19,11 @@ Available recipes and diagnostics Recipes are stored in recipes/ -* recipe_diurnal_temperature_index_wp7.yml +* recipe_diurnal_temperature_index.yml Diagnostics are stored in diag_scripts/magic_bsc/ -* diurnal_temp_index.r : calculates the diaurnal temperature vulnerability index. +* diurnal_temp_index.R : calculates the diaurnal temperature vulnerability index. User settings @@ -31,7 +31,7 @@ User settings User setting files are stored in recipes/ -#. recipe_diurnal_temperature_index_wp7.yml +#. recipe_diurnal_temperature_index.yml *Required settings for script* @@ -55,7 +55,7 @@ References * Braganza, K., Karoly, D. J., & Arblaster, J. M. (2004). Diurnal temperature range as an index of global climate change during the twentieth century. Geophysical Research Letters, 31(13), n/a – n/a. https://doi.org/10.1029/2004GL019998 -* Déandreis C. (IPSL), Braconnot P. (IPSL), Planton S. (CNRMGAME). Study performed for the DALKIA company. http://secif.ipsl.fr/images/SECIF/documents/Communication/fiche_invulnerable/RC_indicateur_EN.pdf +* Déandreis, C. (IPSL), Braconnot, P. (IPSL), and Planton, S.(CNRMGAME)(2014). Impact du changement climatique sur la gestion des réseaux de chaleur. DALKIA, Étude réalisée pour l'entreprise DALKIA. Last access 24.02.2021. https://docplayer.fr/9496504-Impact-du-changement-climatique-sur-la-gestion-des-reseaux-de-chaleur.html * Lambrechts, L., Paaijmans, K. P., Fansiri, T., Carrington, L. B., Kramer, L. D., Thomas, M. B., & Scott, T. W. (2011). Impact of daily temperature fluctuations on dengue virus transmission by Aedes aegypti. Proceedings of the National Academy of Sciences of the United States of America, 108(18), 7460–7465. https://doi.org/10.1073/pnas.1101377108 @@ -69,6 +69,8 @@ Example plots ------------- .. _fig_diurnal: -.. figure:: /recipes/figures/diurnal_temp_index/rcp85_diurnal.png +.. figure:: /recipes/figures/diurnal_temp_index/Seasonal_DTRindicator_MPI-ESM-MR_2030_2080_1961_1990.png :align: center :width: 14cm + +Mean number of days exceeding the Diurnal Temperature Range (DTR) simulated during the historical period (1961-1990) by 5 degrees during the period 2030-2080. The result is derived from one RCP 8.5 scenario simulated by MPI-ESM-MR. diff --git a/doc/sphinx/source/recipes/recipe_eady_growth_rate.rst b/doc/sphinx/source/recipes/recipe_eady_growth_rate.rst new file mode 100644 index 0000000000..d0364a688f --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_eady_growth_rate.rst @@ -0,0 +1,61 @@ +.. _recipes_eady_growth_rate: + +Eady growth rate +================ + +Overview +-------- + +This recipe computes the maximum Eady Growth Rate and performs the annual and seasonal means, storing +the results for each dataset. +For the seasonal means, the results are plotted over the North-Atlantic region for the selected +pressure levels. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in ``esmvaltool/recipes/`` + + * ``recipe_eady_growth_rate.yml`` + +Diagnostics are stored in ``esmvaltool/diag_scripts/eady_growth_rate/`` + + * ``eady_growth_rate.py``: Computes and stores the eady growth rate. + Plots can be produced for the seasonal mean over the North Atlantic region. + + +User settings in recipe +----------------------- + +#. Script ``eady_growth_rate.py`` + + *Required settings for script* + + * ``time_statistic``: Set to `'annual'` to compute the annual mean. Set to `'seasonal'` to compute the seasonal mean. + + *Optional settings for script* + + * ``plot_levels``: list of pressure levels to be plotted for the seasonal mean. If not specified, all levels will be plotted. + + +Variables +--------- + +* ta (atmos, monthly mean, longitude latitude level time) +* zg (atmos, monthly mean, longitude latitude level time) +* ua (atmos, monthly mean, longitude latitude level time) + +References +---------- +* Moreno-Chamarro, E., Caron, L-P., Ortega, P., Loosveldt Tomas, S., and Roberts, M. J., Can we trust CMIP5/6 future projections of European winter precipitation?. Environ. Res. Lett. 16 054063 +* Brian J Hoskins and Paul J Valdes. On the existence of storm-tracks. Journal of the atmospheric sciences, 47(15):1854–1864, 1990. + +Example plots +------------- + +.. _fig_eady_growth_rate: +.. figure:: /recipes/figures/eady_growth_rate/HadGEM3-GC31-LM_winter_eady_growth_rate_70000.png + :align: center + + Eady Growth Rate values over the North-Atlantic region at 70000 Pa. diff --git a/doc/sphinx/source/recipes/recipe_ecs.rst b/doc/sphinx/source/recipes/recipe_ecs.rst new file mode 100644 index 0000000000..d426e45b7c --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_ecs.rst @@ -0,0 +1,138 @@ +.. _recipes_ecs: + +Equilibrium climate sensitivity +=============================== + +Overview +-------- + + +Equilibrium climate sensitivity is defined as the change in global mean +temperature as a result of a doubling of the atmospheric CO\ :sub:`2` +concentration compared to pre-industrial times after the climate system has +reached a new equilibrium. This recipe uses a regression method based on +`Gregory et al. (2004)`_ to calculate it for several CMIP models. + +.. _`Gregory et al. (2004)`: https://agupubs.onlinelibrary.wiley.com/doi/full/10.1029/2003GL018747 + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_ecs.yml + + +Diagnostics are stored in diag_scripts/ + + * climate_metrics/ecs.py + * climate_metrics/create_barplot.py + * climate_metrics/create_scatterplot.py + + +User settings in recipe +----------------------- + +* Preprocessor + + * ``area_statistics`` (*operation: mean*): Calculate global mean. + +.. _ecs.py: + +* Script climate_metrics/ecs.py + + * ``calculate_mmm``, *bool*, optional (default: ``True``): Calculate + multi-model mean ECS. + * ``complex_gregory_plot``, *bool*, optional (default: ``False``): Plot + complex Gregory plot (also add response for first ``sep_year`` years and + last 150 - ``sep_year`` years, default: ``sep_year=20``) if ``True``. + * ``output_attributes``, *dict*, optional: Write additional attributes to + netcdf files. + * ``read_external_file``, *str*, optional: Read ECS and feedback parameters + from external file. The path can be given relative to this diagnostic + script or as absolute path. + * ``savefig_kwargs``, *dict*, optional: Keyword arguments for + :func:`matplotlib.pyplot.savefig`. + * ``seaborn_settings``, *dict*, optional: Options for + :func:`seaborn.set_theme` (affects all plots). + * ``sep_year``, *int*, optional (default: ``20``): Year to separate + regressions of complex Gregory plot. Only effective if + ``complex_gregory_plot`` is ``True``. + * ``x_lim``, *list of float*, optional (default: ``[1.5, 6.0]``): Plot + limits for X axis of Gregory regression plot (T). + * ``y_lim``, *list of float*, optional (default: ``[0.5, 3.5]``): Plot + limits for Y axis of Gregory regression plot (N). + +.. _create_barplot.py: + +* Script climate_metrics/create_barplot.py + + * ``add_mean``, *str*, optional: Add a bar representing the mean for each + class. + * ``label_attribute``, *str*, optional: Cube attribute which is used as + label for different input files. + * ``order``, *list of str*, optional: Specify the order of the different + classes in the barplot by giving the ``label``, makes most sense when + combined with ``label_attribute``. + * ``patterns``, *list of str*, optional: Patterns to filter list of input + data. + * ``savefig_kwargs``, *dict*, optional: Keyword arguments for + :func:`matplotlib.pyplot.savefig`. + * ``seaborn_settings``, *dict*, optional: Options for + :func:`seaborn.set_theme` (affects all plots). + * ``sort_ascending``, *bool*, optional (default: ``False``): Sort bars in + ascending order. + * ``sort_descending``, *bool*, optional (default: ``False``): Sort bars in + descending order. + * ``subplots_kwargs``, *dict*, optional: Keyword arguments for + :func:`matplotlib.pyplot.subplots`. + * ``value_labels``, *bool*, optional (default: ``False``): Label bars with + value of that bar. + * ``y_range``, *list of float*, optional: Range for the Y axis of the plot. + +.. _create_scatterplot.py: + +* Script climate_metrics/create_scatterplot.py + + * ``dataset_style``, *str*, optional: Name of the style file (located in + :mod:`esmvaltool.diag_scripts.shared.plot.styles_python`). + * ``pattern``, *str*, optional: Pattern to filter list of input files. + * ``seaborn_settings``, *dict*, optional: Options for + :func:`seaborn.set_theme` (affects all plots). + * ``y_range``, *list of float*, optional: Range for the Y axis of the plot. + + +Variables +--------- + +* *rlut* (atmos, monthly, longitude, latitude, time) +* *rsdt* (atmos, monthly, longitude, latitude, time) +* *rsut* (atmos, monthly, longitude, latitude, time) +* *tas* (atmos, monthly, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +*None* + + +References +---------- + +* Gregory, Jonathan M., et al. "A new method for diagnosing radiative forcing + and climate sensitivity." Geophysical research letters 31.3 (2004). + + +Example plots +------------- + +.. _fig_ecs_1: +.. figure:: /recipes/figures/ecs/CanESM2.png + :align: center + :width: 50% + + Scatterplot between TOA radiance and global mean surface temperature anomaly + for 150 years of the abrupt 4x CO2 experiment including linear regression to + calculate ECS for CanESM2 (CMIP5). diff --git a/doc/sphinx/source/recipes/recipe_emergent_constraints.rst b/doc/sphinx/source/recipes/recipe_emergent_constraints.rst new file mode 100644 index 0000000000..03b0238eaf --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_emergent_constraints.rst @@ -0,0 +1,209 @@ +.. _recipe_ecs_scatter: + +Emergent constraints for equilibrium climate sensitivity +======================================================== + +Overview +-------- + +Calculates equilibrium climate sensitivity (ECS) versus + +1) S index, D index and lower tropospheric mixing index (LTMI); similar to fig. 5 from Sherwood et al. (2014) +2) southern ITCZ index and tropical mid-tropospheric humidity asymmetry index; similar to fig. 2 and 4 from Tian (2015) +3) covariance of shortwave cloud reflection (Brient and Schneider, 2016) +4) climatological Hadley cell extent (Lipat et al., 2017) +5) temperature variability metric; similar to fig. 2 from Cox et al. (2018) +6) total cloud fraction difference between tropics and mid-latitudes; similar to fig. 3 from Volodin (2008) +7) response of marine boundary layer cloud (MBLC) fraction changes to sea surface temperature (SST); similar to fig. 3 of Zhai et al. (2015) +8) Cloud shallowness index (Brient et al., 2016) +9) Error in vertically-resolved tropospheric zonal average relative humidity (Su et al., 2014) + +The results are displayed as scatterplots. + +.. note:: The recipe ``recipe_ecs_scatter.yml`` requires pre-calulation of the + equilibrium climate sensitivites (ECS) for all models. The ECS values are + calculated with recipe_ecs.yml. The netcdf file containing the ECS values + (path and filename) is specified by diag_script_info@ecs_file. + Alternatively, the netcdf file containing the ECS values can be generated + with the cdl-script + $diag_scripts/emergent_constraints/ecs_cmip.cdl (recommended method): + + 1) save script given at the end of this recipe as ecs_cmip.cdl + 2) run command: ncgen -o ecs_cmip.nc ecs_cmip.cdl + 3) copy ecs_cmip.nc to directory given by diag_script_info@ecs_file + (e.g. $diag_scripts/emergent_constraints/ecs_cmip.nc) + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_ecs_scatter.yml + * recipe_ecs_constraints.yml + +Diagnostics are stored in diag_scripts + + * emergent_constraints/ecs_scatter.ncl: calculate emergent constraints for ECS + * emergent_constraints/ecs_scatter.py: calculate further emergent constraints for ECS + * emergent_constraints/single_constraint.py: create scatterplots for emergent constraints + * climate_metrics/psi.py: calculate temperature variabililty metric (Cox et al., 2018) + + +User settings in recipe +----------------------- + +.. _ecs_scatter.ncl: + +* Script emergent_constraints/ecs_scatter.ncl + + *Required settings (scripts)* + + * diag: emergent constraint to calculate ("itczidx", "humidx", "ltmi", + "covrefl", "shhc", "sherwood_d", "sherwood_s") + * ecs_file: path and filename of netCDF containing precalculated + ECS values (see note above) + + *Optional settings (scripts)* + + * calcmm: calculate multi-model mean (True, False) + * legend_outside: plot legend outside of scatterplots (True, False) + * output_diag_only: Only write netcdf files for X axis (True) or write all + plots (False) + * output_models_only: Only write models (no reference datasets) to netcdf + files (True, False) + * output_attributes: Additonal attributes for all output netcdf files + * predef_minmax: use predefined internal min/max values for axes + (True, False) + * styleset: "CMIP5" (if not set, diagnostic will create a color table + and symbols for plotting) + * suffix: string to add to output filenames (e.g."cmip3") + + *Required settings (variables)* + + * reference_dataset: name of reference data set + + *Optional settings (variables)* + + none + + *Color tables* + + none + + +* Script emergent_constraints/ecs_scatter.py + + See + :ref:`here`. + + +* Script emergent_constraints/single_constraint.py + + See + :ref:`here`. + + +* Script climate_metrics/psi.py + + See :ref:`here`. + + +Variables +--------- + +* cl (atmos, monthly mean, longitude latitude level time) +* clt (atmos, monthly mean, longitude latitude time) +* pr (atmos, monthly mean, longitude latitude time) +* hur (atmos, monthly mean, longitude latitude level time) +* hus (atmos, monthly mean, longitude latitude level time) +* rsdt (atmos, monthly mean, longitude latitude time) +* rsut (atmos, monthly mean, longitude latitude time) +* rsutcs (atmos, monthly mean, longitude latitude time) +* rtnt or rtmt (atmos, monthly mean, longitude latitude time) +* ta (atmos, monthly mean, longitude latitude level time) +* tas (atmos, monthly mean, longitude latitude time) +* tasa (atmos, monthly mean, longitude latitude time) +* tos (atmos, monthly mean, longitude latitude time) +* ts (atmos, monthly mean, longitude latitude time) +* va (atmos, monthly mean, longitude latitude level time) +* wap (atmos, monthly mean, longitude latitude level time) +* zg (atmos, monthly mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +.. note:: (1) Obs4mips data can be used directly without any preprocessing. + (2) See headers of reformat scripts for non-obs4MIPs data for download instructions. + +* AIRS (obs4MIPs): hus, husStderr +* AIRS-2-0 (obs4MIPs): hur +* CERES-EBAF (obs4MIPs): rsdt, rsut, rsutcs +* ERA-Interim (OBS6): hur, ta, va, wap +* GPCP-SG (obs4MIPs): pr +* HadCRUT4 (OBS): tasa +* HadISST (OBS): ts +* MLS-AURA (OBS6): hur +* TRMM-L3 (obs4MIPs): pr, prStderr + + +References +---------- + +* Brient, F., and T. Schneider, J. Climate, 29, 5821-5835, doi:10.1175/JCLIM-D-15-0897.1, 2016. +* Brient et al., Clim. Dyn., 47, doi:10.1007/s00382-015-2846-0, 2016. +* Cox et al., Nature, 553, doi:10.1038/nature25450, 2018. +* Gregory et al., Geophys. Res. Lett., 31, doi:10.1029/2003GL018747, 2004. +* Lipat et al., Geophys. Res. Lett., 44, 5739-5748, doi:10.1002/2017GL73151, 2017. +* Sherwood et al., nature, 505, 37-42, doi:10.1038/nature12829, 2014. +* Su, et al., J. Geophys. Res. Atmos., 119, doi:10.1002/2014JD021642, 2014. +* Tian, Geophys. Res. Lett., 42, 4133-4141, doi:10.1002/2015GL064119, 2015. +* Volodin, Izvestiya, Atmospheric and Oceanic Physics, 44, 288-299, doi:10.1134/S0001433808030043, 2008. +* Zhai, et al., Geophys. Res. Lett., 42, doi:10.1002/2015GL065911, 2015. + +Example plots +------------- + +.. _fig_ec_ecs_1: +.. figure:: /recipes/figures/emergent_constraints/ltmi.png + :align: center + + Lower tropospheric mixing index (LTMI; Sherwood et al., 2014) vs. + equilibrium climate sensitivity from CMIP5 models. + +.. _fig_ec_ecs_2: +.. figure:: /recipes/figures/emergent_constraints/shhc.png + :align: center + + Climatological Hadley cell extent (Lipat et al., 2017) vs. + equilibrium climate sensitivity from CMIP5 models. + +.. _fig_ec_ecs_3: +.. figure:: /recipes/figures/emergent_constraints/humidx.png + :align: center + + Tropical mid-tropospheric humidity asymmetry index (Tian, 2015) vs. + equilibrium climate sensitivity from CMIP5 models. + +.. _fig_ec_ecs_4: +.. figure:: /recipes/figures/emergent_constraints/itczidx.png + :align: center + + Southern ITCZ index (Tian, 2015) vs. + equilibrium climate sensitivity from CMIP5 models. + +.. _fig_ec_ecs_5: +.. figure:: /recipes/figures/emergent_constraints/covrefl.png + :align: center + + Covariance of shortwave cloud reflection (Brient and Schneider, 2016) vs. + equilibrium climate sensitivity from CMIP5 models. + +.. _fig_ec_ecs_6: +.. figure:: /recipes/figures/emergent_constraints/volodin.png + :align: center + + Difference in total cloud fraction between tropics (28°S - 28°N) and + Southern midlatitudes (56°S - 36°S) (Volodin, 2008) vs. equilibrium climate + sensitivity from CMIP5 models. diff --git a/doc/sphinx/source/recipes/recipe_ensclus.rst b/doc/sphinx/source/recipes/recipe_ensclus.rst index cb9e7ec38b..483246f0e5 100644 --- a/doc/sphinx/source/recipes/recipe_ensclus.rst +++ b/doc/sphinx/source/recipes/recipe_ensclus.rst @@ -1,4 +1,6 @@ -EnsClus - Ensemble Clustering - a cluster analysis tool for climate model simulations +.. _recipes_ensclus: + +Ensemble Clustering - a cluster analysis tool for climate model simulations (EnsClus) ===================================================================================== @@ -6,13 +8,13 @@ Overview -------- EnsClus is a cluster analysis tool in Python, based on the k-means algorithm, for ensembles of climate model simulations. -Multi-model studies allow to investigate climate processes beyond the limitations of individual models by means of inter-comparison or averages of several members of an ensemble. With large ensembles, it is often an advantage to be able to group members according to similar characteristics and to select the most representative member for each cluster. +Multi-model studies allow to investigate climate processes beyond the limitations of individual models by means of inter-comparison or averages of several members of an ensemble. With large ensembles, it is often an advantage to be able to group members according to similar characteristics and to select the most representative member for each cluster. -The user chooses which feature of the data is used to group the ensemble members by clustering: time mean, maximum, a certain percentile (e.g., 75% as in the examples below), standard deviation and trend over the time period. For each ensemble member this value is computed at each grid point, obtaining N lat-lon maps, where N is the number of ensemble members. The anomaly is computed subtracting the ensemble mean of these maps to each of the single maps. The anomaly is therefore computed with respect to the ensemble members (and not with respect to the time) and the Empirical Orthogonal Function (EOF) analysis is applied to these anomaly maps. +The user chooses which feature of the data is used to group the ensemble members by clustering: time mean, maximum, a certain percentile (e.g., 75% as in the examples below), standard deviation and trend over the time period. For each ensemble member this value is computed at each grid point, obtaining N lat-lon maps, where N is the number of ensemble members. The anomaly is computed subtracting the ensemble mean of these maps to each of the single maps. The anomaly is therefore computed with respect to the ensemble members (and not with respect to the time) and the Empirical Orthogonal Function (EOF) analysis is applied to these anomaly maps. -Regarding the EOF analysis, the user can choose either how many Principal Components (PCs) to retain or the percentage of explained variance to keep. After reducing dimensionality via EOF analysis, k-means analysis is applied using the desired subset of PCs. +Regarding the EOF analysis, the user can choose either how many Principal Components (PCs) to retain or the percentage of explained variance to keep. After reducing dimensionality via EOF analysis, k-means analysis is applied using the desired subset of PCs. -The major final outputs are the classification in clusters, i.e. which member belongs to which cluster (in k-means analysis the number k of clusters needs to be defined prior to the analysis) and the most representative member for each cluster, which is the closest member to the cluster centroid. +The major final outputs are the classification in clusters, i.e. which member belongs to which cluster (in k-means analysis the number k of clusters needs to be defined prior to the analysis) and the most representative member for each cluster, which is the closest member to the cluster centroid. Other outputs refer to the statistics of clustering: in the PC space, the minimum and the maximum distance between a member in a cluster and the cluster centroid (i.e. the closest and the furthest member), the intra-cluster standard deviation for each cluster (i.e. how much the cluster is compact). @@ -47,8 +49,12 @@ User settings * area: region of interest (EAT=Euro-Atlantic, PNA=Pacific North American, NH=Northern Hemisphere, EU=Europe) * extreme: extreme to consider: XXth_percentile (XX can be set arbitrarily, e.g. 75th_percentile), mean (mean value over the period), maximum (maximum value over the period), std (standard deviation), trend (linear trend over the period) * numclus: number of clusters to be computed -* perc: percentage of variance to be explained by PCs (select either this or numpcs) -* numpcs: number of PCs to retain (select either this or perc) +* perc: percentage of variance to be explained by PCs (select either this or numpcs, default=80) +* numpcs: number of PCs to retain (has priority over perc unless it is set to 0 (default)) + +*Optional settings for script* + +* max_plot_panels: maximum number of panels (datasets) in a plot. When exceeded multiple plots are created. Default: 72 Variables @@ -75,4 +81,4 @@ Example plots .. figure:: /recipes/figures/ensclus/ensclus.png :width: 10cm -Clustering based on historical JJA precipitation rate (mm/day), 75th percentile, CMIP5 models, 3 clusters, 80% variance explained by PCs. + Clustering based on the 75th percentile of historical summer (JJA) precipitation rate for CMIP5 models over 1900-2005. 3 clusters are computed, based on the principal components explaining 80% of the variance. The 32 models are grouped in three different clusters. The green cluster is the most populated with 16 ensemble members mostly characterized by a positive anomaly over central-north Europe. The red cluster counts 12 elements that exhibit a negative anomaly centered over southern Europe. The third cluster – labelled in blue- includes only 4 models showing a north-south dipolar precipitation anomaly, with a wetter than average Mediterranean counteracting dryer North-Europe. Ensemble members No.9, No.26 and No.19 are the “specimen” of each cluster, i.e. the model simulations that better represent the main features of that cluster. These ensemble members can eventually be used as representative of the whole possible outcomes of the multi-model ensemble distribution associated to the 32 CMIP5 historical integrations for the summer precipitation rate 75 th percentile over Europe when these outcomes are reduced from 32 to 3. The number of ensemble members of each cluster might provide a measure of the probability of occurrence of each cluster. diff --git a/doc/sphinx/source/recipes/recipe_esacci_lst.rst b/doc/sphinx/source/recipes/recipe_esacci_lst.rst new file mode 100644 index 0000000000..24d68c3eec --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_esacci_lst.rst @@ -0,0 +1,80 @@ +.. _recipes_esacci_lst: + +ESA CCI LST comparison to Historical Models +=========================================== + +Overview +-------- + +This diagnostic compares ESA CCI LST to multiple historical emsemble members of CMIP models. +It does this over a defined region for monthly values of the land surface temperature. +The result is a plot showing the mean differnce of CCI LST to model average LST, with a region of +/- one standard deviation of the model mean LST given as a measure of model variability. + +The recipe and diagnostic need the all time average monthly LST from the CCI data. +We use the L3C single sensor monthy data. +A CMORizing script calculates the mean of the day time, and night time overpasses to give the all time average LST. +This is so that the Amon output from CMIP models can be used. +We created such a dataset from the Aqua MODIS data from CCI. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + + * ``recipe_esacci_lst.yml`` + +Diagnostics are stored in esmvaltool/diag_scripts/lst/ + + * ``lst.py`` + + +User settings in recipe +----------------------- + +#. Script ``recipe_esacci_lst.yml`` + + *No required settings for script* + + *No user defined inputs to the diagnostic* + + *Required settings for variables* + + * The diagnostic works with all data sources on having the same start_year and end_year, and hence that data is also available. + + *Required settings for preprocessor* + + * start_longitude, end_longitude The western and eastern bounds of the region to work with. + * start_latitude, end_latitude The southern and northern bounds of the region to work with. + * target_grid This should be one of the model grids. + + +Variables +--------- + +* ts (atmos, monthly mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +This recipe and diagnostic is written to work with data created from the CMORizer esmvaltool/cmorizers/obs/cmorize_obs_esacci_lst.py. +This takes the orginal ESA CCI LST files for the L3C data from Aqua MODIS DAY and NIGHT files and creates a the all time mean data this diagnostic uses. +Advice from the CCI LST team is to use the monthly not daily files to create the all time average to avoid th epossibility of biasing towards night time LST values being more prevalent because of how the cloud screening algorithms work. + +References +---------- + +* ESA CCI LST project https://climate.esa.int/en/projects/land-surface-temperature/ + +Example plots +------------- + +.. _fig_lst_example: +.. figure:: /recipes/figures/lst/lst_example.png + :align: center + + Timeseries of the ESA CCI LST minus mean of CMIP6 ensembles. The selected region is 35E-175E, 55N-70N. + The black line is the mean difference, and the blue shaded area denotes one standard deviation either way of the individual ensemble member's differecen in LST. + Models used for this are UKESM1 members r1i1p1f2 and r2i1p1f2, and CESM members r2i1p1f1 and r3i1p1f1. + We have used the entire timeseries of available CCI data 2004-2014 inclusive, noting we have not written the CMORizer to process the incomplete year of 2003 for the Aqua MODIS data. diff --git a/doc/sphinx/source/recipes/recipe_esacci_oc.rst b/doc/sphinx/source/recipes/recipe_esacci_oc.rst new file mode 100644 index 0000000000..54b485208e --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_esacci_oc.rst @@ -0,0 +1,74 @@ +.. _recipes_esaccioc: + +Ocean chlorophyll in ESMs compared to ESA-CCI observations. +=========================================================== + +Overview +-------- + +This recipe compares monthly surface chlorophyll from CMIP models to ESA CCI ocean colour chlorophyll (ESACCI-OC). The observations are the merged sensor geographic monthly L3S chlor_a data Sathyendranath et al. (2019). Multiple models and different observational versions can be used by the script. + +The recipe_esacci_oc.yml produces an image showing four maps. Each of these four maps shows latitude vs longitude and the chlorophyll value. The four plots are: ESACCI-OC v5.0 chlorophyll, the CMIP6 model, the bias model-observation and the ratio model/observations. The script also produces a scatter plot for all coordinates with the model on the x-axis and the observations on the y axis and a line of best fit with the parameter values given in the panel. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ocean/ + + * recipe_esacci_oc.yml + +Diagnostics are stored in esmvaltool/diag_scripts/ocean/ + + * diagnostic_model_vs_obs.py + + +User settings in recipe +----------------------- + +#. Script diagnostic_model_vs_obs.py + + *Required settings for script* + + * observational_dataset: name of reference dataset (e.g. {dataset: ESACCI-OC,}) + + +Variables +--------- + +* chl (ocean, monthly mean, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +* ESACCI-OC (chl) + + *Reformat script:* reformat_scripts/obs/reformat_obs_esacci_oc.py + + +References +---------- + +* Sathyendranath, S., et al. (2019), An ocean-colour time series for use in climate studies: the experience of the Ocean-Colour Climate Change Initiative (OC-CCI). Sensors: 19, 4285. doi:10.3390/s19194285. +* ESACCI-OC dataset: http://dx.doi.org/10.5285/00b5fc99f9384782976a4453b0148f49 + +Example plots +------------- + +.. _fig_ocdiag_maps: +.. figure:: /recipes/figures/ocean/model_vs_obs_MassConcentrationofTotalPhytoplanktonExpressedasChlorophyllinSeaWater_NorESM2-LM_ESACCI-OC__maps.png + :align: center + :width: 12cm + + Surface chlorophyll from ESACCI-OC ocean colour data version 5.0 and the + CMIP6 model NorESM2-LM. This model overestimates chlorophyll compared to + the observations. + +.. _fig_ocdiag_scatter: +.. figure:: /recipes/figures/ocean/model_vs_obs_MassConcentrationofTotalPhytoplanktonExpressedasChlorophyllinSeaWater_NorESM2-LM_ESACCI-OC__scatter.png + :align: center + :width: 8cm + + Scatter plot of surface chlorophyll from ESACCI-OC ocean colour data + version 5.0 and the CMIP6 model NorESM2-LM. diff --git a/doc/sphinx/source/recipes/recipe_examples.rst b/doc/sphinx/source/recipes/recipe_examples.rst new file mode 100644 index 0000000000..e3c32c4337 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_examples.rst @@ -0,0 +1,105 @@ +.. _recipe_examples: + +Example recipes +=============== + +Overview +-------- + +These are example recipes calling example diagnostic scripts. + +The recipe ``examples/recipe_python.yml`` produces time series plots of global mean +temperature and for the temperature in Amsterdam. +It also produces a map of global temperature in January 2020. + +The recipe ``examples/recipe_easy_ipcc.yml`` reproduces part of figure 9.3a from +`IPCC AR6 - Climate Change 2021: The Physical Science Basis `__. +It demonstrates how ESMValTool can be used to conveniently analyze +many models on their native grid and is described in detail in the blog post +`Analysis-ready climate data with ESMValCore `__. + +The recipe ``examples/recipe_extract_shape.yml`` produces a map of the mean +temperature in the Elbe catchment over the years 2000 to 2002. +Some example shapefiles for use with this recipe are available +`here `__, +make sure to download all files with the same name but different extensions. + +The recipe ``examples/recipe_julia.yml`` produces a map plot with the mean temperature +over the year 1997 plus a number that is configurable from the recipe. + +The recipe ``examples/recipe_decadal.yml`` showcases how the ``timerange`` tag +can be used to load datasets belonging to the DCPP activity. Produces timeseries +plots comparing the global mean temperature of a DCPP dataset with an observational +dataset. + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in `esmvaltool/recipes/ `__: + * examples/recipe_python.yml + * examples/recipe_easy_ipcc.yml + * examples/recipe_extract_shape.yml + * examples/recipe_julia.yml + * examples/recipe_decadal.yml + +Diagnostics are stored in `esmvaltool/diag_scripts/ `__: + * examples/diagnostic.py: visualize results and store provenance information + * examples/make_plot.py: Create a timeseries plot with likely ranges + * examples/diagnostic.jl: visualize results and store provenance information + * examples/decadal_example.py: visualize results and store provenance information + +User settings in recipe +----------------------- + +#. Script ``examples/diagnostic.py`` + + *Required settings for script* + + * ``quickplot: plot_type``: which of the :py:mod:`iris.quickplot` functions to use. + Arguments that are accepted by these functions can also be specified here, e.g. ``cmap``. + Preprocessors need to be configured such that the resulting data matches the plot type, e.g. a timeseries or a map. + +#. Script ``examples/diagnostic.jl`` + + *Required settings for script* + + * ``parameter1``: example parameter, this number will be added to the mean (over time) value of the input data. + +Variables +--------- + +* tas (atmos, monthly, longitude, latitude, time) +* tos (ocean, monthly, longitude, latitude, time) + +Example plots +------------- + +.. _global_map: +.. figure:: /recipes/figures/examples/map.png + :align: center + + Air temperature in January 2000 (BCC-ESM1 CMIP6). + +.. _timeseries: +.. figure:: /recipes/figures/examples/timeseries.png + :align: center + + Amsterdam air temperature (multimodel mean of CMIP5 CanESM2 and CMIP6 BCC-ESM1). + +.. _easy_ipcc: +.. figure:: /recipes/figures/examples/IPCC_AR6_figure_9.3a_1850-2100.png + :align: center + + Mean sea surface temperature anomaly (part of figure 9.3a from IPCC AR6). + +.. _elbe: +.. figure:: /recipes/figures/examples/elbe.png + :align: center + + Mean air temperature over the Elbe catchment during 2000-2002 according to CMIP5 CanESM2. + +.. _decadal_first_example: +.. figure:: /recipes/figures/examples/decadal_first_example.png + :align: center + + Global mean temperature of CMIP6 dcppA-hindcast EC-Earth3 and OBS ERA-Interim. diff --git a/doc/sphinx/source/recipes/recipe_extreme_events.rst b/doc/sphinx/source/recipes/recipe_extreme_events.rst new file mode 100644 index 0000000000..8d0dc1cd8c --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_extreme_events.rst @@ -0,0 +1,137 @@ +.. _recipes_extreme_events: + +Extreme Events Indices (ETCCDI) +=============================== + + +Overview +-------- + +This diagnostic uses the standard climdex.pcic.ncdf R library to +compute the 27 climate change indices specified by +the joint CCl/CLIVAR/JCOMM Expert Team (ET) on Climate Change Detection and Indices http://etccdi.pacificclimate.org/. +The needed input fields are daily average precipitation flux and minimum, maximum and average daily surface temperatures. +The recipe reproduces panels of figure 9.37 of the IPCC AR5 report, producing both a Gleckler plot, +with relative error metrics for the CMIP5 temperature and precipitation extreme indices, +and timeseries plots comparing the ensemble spread with observations. +For plotting 1 to 4 observational reference datasets are supported. If no observational reference datasets are given, the plotting routines do not work, however, index generation without plotting is still possible. +All datasets are regridded to a common grid and considered only over land. + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_extreme_events.yml + +Diagnostics are stored in diag_scripts/extreme_events/ + +* ExtremeEvents.r + +and subroutines + +* common_climdex_preprocessing_for_plots.r +* make_Glecker_plot2.r +* make_timeseries_plot.r +* cfg_climdex.r +* cfg_extreme.r + +User settings +------------- + +*Required settings for script* + +* reference_datasets: list containing the reference datasets to compare with +* timeseries_idx: list of indices to compute for timeseries plot. + The syntax is "XXXETCCDI_TT", where "TT" can be either "yr" or "mon" + (yearly or monthly indices are computed) and "XXX" can be one of the following: + "altcdd", "altcsdi", "altcwd", "altwsdi", "cdd", "csdi", "cwd", + "dtr", "fd", "gsl", "id", "prcptot", "r10mm", "r1mm", "r20mm", + "r95p", "r99p", "rx1day", "rx5day", "sdii", "su", "tn10p", + "tn90p", "tnn", "tnx", "tr", "tx10p", "tx90p", "txn", "txx", "wsdi". + The option "mon" for "TT" can be only used in combination with one of: + "txx", "tnx", "txn", "tnn", tn10p", "tx10p", "tn90p", "tx90p", "dtr", "rx1day", "rx5day". +* gleckler_idx: list of indices to compute for Gleckler plot. Same syntax as above. + The diagnostic computes all unique indices specified in either ``gleckler_idx`` or ``timeseries_idx``. + If at least one "mon" index is selected, the indices are computed but no plots are produced. +* base_range: a list of two years to specify the range to be used as "base range" for climdex + (the period in which for example reference percentiles are computed) + +*Optional settings for script* + +* regrid_dataset: name of dataset to be used as common target for regridding. If missing the first reference dataset is used +* mip_name: string containing the name of the model ensemble, used for titles and labels in the plots (default: "CMIP") +* analysis_range: a list of two years to specify the range to be used for the analysis in the plots. + The input data will need to cover both ``analysis_range`` and ``base_range``. If missing the full period covered by the + input datasets will be used. +* ts_plt: (logical) if to produce the timeseries or not (default: true) +* glc_plt: (logical) if to produce the Gleckler or not (default: true) +* climdex_parallel: number of parallel threads to be used for climdex calculation (default: 4). Also the logical ``false`` can be passed to switch off parallel computation. +* normalize: (logical) if to detrend and normalize with the standard deviation for the datasets for use in the timeseries plot. When this option is used the data for the following indices are detrended and normalized in the timeseries plots: "altcdd", "altcsdi", "altcwd", "altwsdi", "cdd", "cwd","dtr", "fd", "gsl", "id", "prcptot", "r10mm", "r1mm", "r20mm", "r95p", "r99p", "rx1day", "rx5day", "sdii", "su", "tnn", "tnx", "tr", "txn","txn","txx" (default: false) + +Additional optional setting controlling the plots: + +* Timeseries plots: + + * ts_png_width: width for png figures (dafult: 640) + * ts_png_height: height for png figures (default: 480) + * ts_png_units: units for figure size (default: "px") + * ts_png_pointsize: fontsize (default: 12) + * ts_png_bg: background color (default: "white") + * ts_col_list: list of colors for lines (default: ["dodgerblue2", "darkgreen", "firebrick2", "darkorchid", "aquamarine3"]``) + * ts_lty_list: list of linetypes (default: [1, 4, 2, 3, 5]) + * ts_lwd_list: list of linewidths (default: [2, 2, 2, 2, 2]) + +* Gleckler plot: + + * gl_png_res: height for png figures (default: 480). + The width of the figure is computed automatically. + * gl_png_units: units for figure size (default: "px") + * gl_png_pointsize: fontsize (default: 12) + * gl_png_bg: background color (default: "white") + * gl_mar_par: page margins vector (default: [10, 4, 3, 14]) + * gl_rmsespacer: spacing of RMSE column (default: 0.01) + * gl_scaling_factor: scaling factor for colorscale height (default: 0.9) + * gl_text_scaling_factor: scaling factor for text size (default: 1.0) + * gl_xscale_spacer_rmse: horizontal posizion of coloured colorbar (default: 0.05) + * gl_xscale_spacer_rmsestd: horizontal posizion of gray colorbar (default: 0.05) + * gl_symb_scaling_factor: scaling factor for white "symbol" square explaining the partition (default: 1.0) + * gl_symb_xshift: horizontal position of the symbol box (default: 0.2) + * gl_symb_yshift: vertical position of the symbol box (default: 0.275) + * gl_text_symb_scaling_factor: scaling factor for text to be used for symbol box (default: 0.5) + +Variables +--------- + +* tas (atmos, daily mean, longitude latitude time) +* tasmin (atmos, daily minimum, longitude latitude time) +* tasmax (atmos, daily maximum, longitude latitude time) +* pr (atmos, daily mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +None. + + +References +---------- + +* Zhang, X., Alexander, L., Hegerl, G. C., Jones, P., Klein Tank, A., Peterson, T. C., Trewin, B., Zwiers, F. W., Indices for monitoring changes in extremes based on daily temperature and precipitation data, WIREs Clim. Change, doi:10.1002/wcc.147, 2011 + +* Sillmann, J., V. V. Kharin, X. Zhang, and F. W. Zwiers, Climate extreme indices in the CMIP5 multi-model ensemble. Part 1: Model evaluation in the present climate. J. Geophys. Res., doi:10.1029/2012JD018390, 2013 + + +Example plots +------------- + +.. figure:: /recipes/figures/extreme_events/gleckler.png + :width: 12cm + + Portrait plot of relative error metrics for the CMIP5 temperature and precipitation extreme indices evaluated over 1981-2000. Reproduces Fig. 9.37 of the IPCC AR5 report, Chapter 9. + +.. figure:: /recipes/figures/extreme_events/cdd_timeseries.png + :width: 10cm + + Timeseries of the Consecutive Dry Days index over 1981-2000 for a selection of CMIP5 models, the CMIP5 multi-model mean (CMIP) and ERA-Interim. Shading is used to reproduce the multi-model spread. diff --git a/doc/sphinx/source/recipes/recipe_extreme_index.rst b/doc/sphinx/source/recipes/recipe_extreme_index.rst new file mode 100644 index 0000000000..e4656a3a77 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_extreme_index.rst @@ -0,0 +1,96 @@ +.. _recipes_extreme_index: + +Combined Climate Extreme Index +============================== + +Overview +-------- + +The goal of this diagnostic is to compute time series of a number of extreme events: heatwave, coldwave, heavy precipitation, drought and high wind. Then, the user can combine these different components (with or without weights). The result is an index similar to the Climate Extremes Index (CEI; Karl et al., 1996), the modified CEI (mCEI; Gleason et al., 2008) or the Actuaries Climate Index (ACI; American Academy of Actuaries, 2018). The output consists of a netcdf file containing the area-weighted and multi-model multi-metric index. This recipe expects data of daily temporal resolution, and the running average is computed based on the user-defined window length (e.g. a window length of 5 would compute the 5-day running mean). + +In recipe_extreme_index.yml, after defining the area and reference and projection period, the weigths for each metric selected. The options are + +* weight_t90p the weight of the number of days when the maximum temperature exceeds the 90th percentile, + +* weight_t10p the weight of the number of days when the minimum temperature falls below the 10th percentile, + +* weight_Wx the weight of the number of days when wind power (third power of wind speed) exceeds the 90th percentile, + +* weight_cdd the weight of the maximum length of a dry spell, defined as the maximum number of consecutive days when the daily precipitation is lower than 1 mm, and + +* weight_rx5day the weight of the maximum precipitation accumulated during 5 consecutive days. + +Available recipes and diagnostics +----------------------------------- + +Recipes are stored in recipes/ + +* recipe_extreme_index.yml + +Diagnostics are stored in diag_scripts/magic_bsc/ + +* extreme_index.R + + +User settings +------------- + +User setting files are stored in recipes/ + +#. recipe_extreme_index.yml + + *Required settings for script* + + * weight_t90p: 0.2 (from 0 to 1, the total sum of the weight should be 1) + * weight_t10p: 0.2 (from 0 to 1, the total sum of the weight should be 1) + * weight_Wx: 0.2 (from 0 to 1, the total sum of the weight should be 1) + * weight_rx5day: 0.2 (from 0 to 1, the total sum of the weight should be 1) + * weight_cdd: 0.2 (from 0 to 1, the total sum of the weight should be 1) + * running_mean: 5 (depends on the length of the future projection period selected, but recommended not greater than 11) + +Variables +--------- + +* tasmax (atmos, daily, longitude, latitude, time) +* tasmin (atmos, daily, longitude, latitude, time) +* sfcWind (atmos, daily, longitude, latitude, time) +* pr (atmos, daily, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +*None* + +References +---------- + +* Alexander L.V. and Coauthors (2006). Global observed changes in daily climate extremes of temperature and precipitation. J. Geophys. Res., 111, D05109. https://doi.org/10.1029/2005JD006290 + +* American Academy of Actuaries, Canadian Institute of Actuaries, Casualty Actuarial Society and Society of Actuaries. Actuaries Climate Index. http://actuariesclimateindex.org (2018-10-06). + +* Donat, M., and Coauthors (2013). Updated analyses of temperature and precipitation extreme indices since the beginning of the twentieth century: The HadEX2 dataset. J. Geophys. Res., 118, 2098–2118, https://doi.org/10.1002/jgrd.50150. + +* Fouillet, A., Rey, G., Laurent, F., Pavillon, G. Bellec, S., Guihenneuc-Jouyaux, C., Clavel J., Jougla, E. and Hémon, D. (2006) Excess mortality related to the August 2003 heat wave in France. Int. Arch. Occup. Environ. Health, 80, 16–24. https://doi.org/10.1007/s00420-006-0089-4 + +* Gleason, K.L., J.H. Lawrimore, D.H. Levinson, T.R. Karl, and D.J. Karoly (2008). A Revised U.S. Climate Extremes Index. J. Climate, 21, 2124-2137 https://doi.org/10.1175/2007JCLI1883.1 + +* Meehl, G. A., and Coauthors (2000). An introduction to trends inextreme weather and climate events: Observations, socio-economic impacts, terrestrial ecological impacts, and model projections. Bull. Amer. Meteor. Soc., 81, 413–416. `doi: 10.1175/1520-0477(2000)081<0413:AITTIE>2.3.CO;2 `_ + +* Whitman, S., G. Good, E. R. Donoghue, N. Benbow, W. Y. Shou and S. X. Mou (1997). Mortality in Chicago attributed to the July 1995 heat wave. Amer. J. Public Health, 87, 1515–1518. https://doi.org/10.2105/AJPH.87.9.1515 + +* Zhang, Y., M. Nitschke, and P. Bi (2013). Risk factors for direct heat-related hospitalization during the 2009 Adelaide heat-wave: A case crossover study. Sci. Total Environ., 442, 1–5. https://doi.org/10.1016/j.scitotenv.2012.10.042 + +* Zhang, X. , Alexander, L. , Hegerl, G. C., Jones, P. , Tank, A. K., Peterson, T. C., Trewin, B. and Zwiers, F. W. (2011). Indices for monitoring changes in extremes based on daily temperature and precipitation data. WIREs Clim Change, 2: 851-870. doi:10.1002/wcc.147. https://doi.org/10.1002/wcc.147 + + + +Example plots +------------- + +.. _fig_combinedindices1: +.. figure:: /recipes/figures/recipe_extreme_index/t90p_IPSL-CM5A-LR_rcp85_2020_2040.png + :align: center + :width: 14cm + +Average change in the heat component (t90p metric) of the Combined Climate Extreme Index for the 2020-2040 compared to the 1971-2000 reference period for the RCP 8.5 scenario simulated by MPI-ESM-MR. diff --git a/doc/sphinx/source/recipes/recipe_eyring06jgr.rst b/doc/sphinx/source/recipes/recipe_eyring06jgr.rst new file mode 100644 index 0000000000..1f072d0dba --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_eyring06jgr.rst @@ -0,0 +1,81 @@ +.. _recipe_eyring06jgr: + +Diagnostics of stratospheric dynamics and chemistry +=================================================== + +Overview +-------- + +This recipe reproduces the figures of `Eyring et al. (2006)`_ +The following plots are reproduced: + +* Vertical profile climatological mean bias of climatological mean for selected seasons and latitudinal region. +* Vertical and latitudinal profile of climatological mean for selected seasons this figure and setting is valid for figure 5 (CH4) figure 6 (H2O) figure 11 (HCL) figure 13 (tro3). +* Total ozone anomalies at different latitudinal band and seasons. + +.. _`Eyring et al. (2006)`: https://agupubs.onlinelibrary.wiley.com/doi/full/10.1029/2006JD007327 + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + +* recipe_eyring06jgr.yml + +Diagnostics are stored in esmvaltool/diag_scripts/eyring06jgr/ + +* eyring06jgr_fig01.ncl +* eyring06jgr_fig05a.ncl +* eyring06jgr_fig05b.ncl +* eyring06jgr_fig15.ncl + +User settings in recipe +----------------------- +#. Preprocessor + + * ``regrid_interp_lev_zonal``: Regridding and interpolation reference_dataset levels used by eyring06jgr_fig01 and eyring06jgr_fig05 + * ``zonal`` : Regridding and zonal mean used by eyring06jgr_fig15 + + +#. Script + + *Required settings for script* + + * ``latmin``: array of float, min lat where variable is averaged, i.e. [60., 60., -90., -90. ] + * ``latmax``: array of float,and max lat where variable is averaged, i.e. [90., 90., -60., -60. ] + * ``season``: array of string., season when variable is averaged, i.e. ["DJF", "MAM", "JJA", "SON"] + * ``XMin``: array of float, min limit X axis [-30., -30., -30., -30.] + * ``XMax``: array of float, max limit X axis [20., 20., 20., 20.] + * ``levmin``: array of float, min limit Y axis [1., 1., 1., 1.] + * ``levmax``: array of float, max limit Y axis [350., 350., 350., 350.] + + + *Optional settings for script* + + * ``start_year``: int, year when start the climatology calculation [1980] (default max among the models start year). + * ``end_year``:int, year when end the climatology calculation [1999] (default min among the models end year). + * ``multimean``: bool, calculate multi-model mean, (i.e. False/True) (default False). + + *Required settings for variables* + + * ``preprocessor``: regrid_interp_lev. + * ``reference_dataset``: name of the reference model or observation for regridding and bias calculation (e.g. ERA-Interim"). + * ``mip``: Amon. + + + +Variables +--------- + +* ta (atmos, monthly mean, longitude latitude level time) + + + +Example plots +------------- + +.. _fig_eyring06jgr_01: +.. figure:: /recipes/figures/eyring06jgr/fig_diagn01.png + :align: center + + Climatological mean temperature biases for (top) 60–90N and (bottom) 60–90S for the (left) winter and (right) spring seasons. The climatological means for the CCMs and ERA-Interim data from 1980 to 1999 are included. Biases are calculated relative to ERA-Interim reanalyses. The grey area shows ERA-Interim plus and minus 1 standard deviation (s) about the climatological mean. The turquoise area shows plus and minus 1 standard deviation about the multi-model mean. diff --git a/doc/sphinx/source/recipes/recipe_eyring13jgr.rst b/doc/sphinx/source/recipes/recipe_eyring13jgr.rst new file mode 100644 index 0000000000..7e38e1aebd --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_eyring13jgr.rst @@ -0,0 +1,72 @@ +.. _recipe_eyring13jgr: + +Ozone and associated climate impacts +==================================== + +Overview +-------- + +This recipe is implemented into the ESMValTool to evaluate atmospheric chemistry and the climate impact of stratospheric ozone changes. It reproduces selected plots from Eyring et al. (2013). + +The following plots are reproduced: + +* Zonal mean of long-term zonal wind with linear trend + +.. _`Eyring et al. (2013)`: https://agupubs.onlinelibrary.wiley.com/doi/full/10.1002/jgrd.50316 + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + +* recipe_eyring13jgr_12.yml + +Diagnostics are stored in esmvaltool/diag_scripts/eyring13jgr/ + +* eyring13jgr_fig12.ncl + +User settings in recipe +----------------------- +#. Preprocessor + + * ``zonal`` : Regridding and zonal mean used by eyring13jgr_fig12 + +#. Script + + *Required settings for script* + + * ``e13fig12_exp_MMM``: name of the experiments for the MMM + + *Optional settings for script* + + * ``e13fig12_start_year``: year when to start the climatology calculation + * ``e13fig12_end_year``: year when to end the climatology calculation + * ``e13fig12_multimean``: calculate multimodel mean (default: False) + * ``e13fig12_season``: season (default: ANN (annual)) + + *Required settings for variables* + + * ``preprocessor``: zonal + * ``reference_dataset``: name of the reference model or observation for regridding and bias calculation (e.g. ERA5). + * ``mip``: Amon. + +Variables +--------- + +* ua (atmos, monthly mean, longitude latitude level time) + +Observations and reformat scripts +--------------------------------- + +* ERA5 + *Reformatting with:* recipes/cmorizers/recipe_era5.yml + + +Example plots +------------- + +.. _fig_eyring13jgr_12: +.. figure:: /recipes/figures/eyring13jgr/fig_eyr13jgr_12.png + :align: center + + Long-term mean (thin black contour) and linear trend (colour) of zonal mean DJF zonal winds for the multi-model mean CMIP6 over 1995-2014 diff --git a/doc/sphinx/source/recipes/recipe_flato13ipcc.rst b/doc/sphinx/source/recipes/recipe_flato13ipcc.rst deleted file mode 100644 index 80f1d19f11..0000000000 --- a/doc/sphinx/source/recipes/recipe_flato13ipcc.rst +++ /dev/null @@ -1,4 +0,0 @@ -IPCC AR5 Chapter 9 -================== - -Description will be ported from v1 diff --git a/doc/sphinx/source/recipes/recipe_galytska23jgr.rst b/doc/sphinx/source/recipes/recipe_galytska23jgr.rst new file mode 100644 index 0000000000..1bcd4f8126 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_galytska23jgr.rst @@ -0,0 +1,91 @@ +.. _recipe_galytska23jgr: + +Timeseries for Arctic-Midlatitude Teleconnections +================================================= + +Overview +-------- + +The recipe produces the timeseries of selected variables to study Arctic-midlatitude teleconnections for further application of Causal Model Evaluation (CME) described +in Galytska et al. (2023). + +The output of the recipe consists of the .nc files named after the data source (e.g. ERA5, ACCESS-CM2.nc etc.). +Each file contains the area-weighted spatial average of climatological monthly anomalies of selected variables. +The recipe also applies the CVDP package. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + +* recipe_galytska23jgr.yml + +Diagnostics are stored in esmvaltool/diag_scripts/ + +* galytska23/select_variables_for_tigramite.py +* cvdp/cvdp_wrapper.py + +User settings in recipe +----------------------- +#. Preprocessor + + * ``anomalies`` (*period: monthly*): Calculate anomalies for selected variables + * ``regrid`` (*target_grid: 1x1*): Linear regridding of all datasets to the uniform grid + * ``area_statistics`` (*operation: mean*): Calculate mean over defined regions + +#. Script + + none + +#. Script + + none + +#. Script + + * ``modular`` (*modular: True*): Run the diagnostics that is selected in ``modular_list`` + * ``modular_list`` (*modular_list: psl.nam_nao*): Calculate only NAO diagnostics + +Variables +--------- + +* zg (atmos, monthly mean, longitude latitude time) +* tas (atmos, monthly mean, longitude latitude time) +* psl (atmos, monthly mean, longitude latitude time) +* va (atmos, monthly mean, longitude latitude time) +* ta (atmos, monthly mean, longitude latitude time) +* sic/siconc (seaice, monthly mean, longitude latitude time) +* ts (atmos, monthly mean, longitude latitude time) +* pr (atmos, monthly mean, longitude latitude time) + +Observations and reformat scripts +--------------------------------- + +* ERA5 (pr, psl, ta, tas, ts, va, zg - ERA5 data can be used via the native6 project) + +* HadISST - (sic - esmvaltool/cmorizers/data/formatters/datasets/hadisst.ncl) + +References +---------- + +* Galytska, E., Weigel, K., Handorf, D., Jaiser, R., Köhler, R. H., + Runge, J., & Eyring, V.: Causal model evaluation of Arctic-midlatitude + teleconnections in CMIP6. Authorea Preprints. + https://doi.org/10.1002/essoar.10512569.1. + + +* Copernicus Climate Change Service (C3S), 2017: ERA5: Fifth generation of + ECMWF atmospheric reanalyses of the global climate, edited, Copernicus + Climate Change Service Climate Data Store (CDS). + https://cds.climate.copernicus.eu/cdsapp#!/home + +* http://www.cesm.ucar.edu/working_groups/CVC/cvdp/ + +Example plots +------------- + +.. figure:: /recipes/figures/galytska23jgr/Timeseries_Arctic_temperature_anomalies.png + :align: center + + Monthly mean temperature anomalies in the Arctic (65°–90°N) from observations and selected CMIP6 models during 1980-2021. \ No newline at end of file diff --git a/doc/sphinx/source/recipes/recipe_gier20bg.rst b/doc/sphinx/source/recipes/recipe_gier20bg.rst new file mode 100644 index 0000000000..b8f8fb9b8e --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_gier20bg.rst @@ -0,0 +1,225 @@ +.. _recipes_gier20bg: + +Spatially resolved evaluation of ESMs with satellite column-averaged CO\ :sub:`2` +================================================================================= + +Overview +-------- + +This recipe reproduces the figures of Gier et al. (2020). It uses satellite +column-averaged CO\ :sub:`2` data to evaluate ESMs by plotting several +quantities such as timeseries, seasonal cycle and growth rate in different +areas. + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + + * recipe_gier20bg.yml + +Diagnostics are stored in diag_scripts/ + +Diagnostics are stored in esmvaltool/diag_scripts/xco2_analysis/ + + * carbon_plots.ncl: plot script for panel plots + * delta_T.ncl: IAV of growth rate against growing season temperature - Figure C1 + * global_maps.ncl: global maps for seasonal cycle amplitude - Figures 5, 6 + * main.ncl: Timeseries and histogram - Figures 3, 4 + * panel_plots.ncl: scatter plot of SCA/GR vs variable - Figures 7, 9, B1, B2 + * sat_masks.ncl: data coverage of input data - Figures 1, 8 + * stat.ncl: auxiliary functions for GR, SCA computation + * station_comparison.ncl: - comparison of surface and column data - Figure 2 + + +User settings in recipe +----------------------- + +#. Preprocessor + + * ``conv_units``: converts units to plot-units + * ``mmm_ref``: calculates multi-model mean and regrids to ref dataset + * ``mmm_2x2``: computes multi-model mean on 2x2 grid + * ``mmm``: computes multi-model mean for 3D variable, 5x5 grid with specific + pressure levels + +#. Script xco2_analysis/delta_T.ncl + + * Required diag_script_info attributes: + * ``region``: region to average over + * ``masking``: the kind of masking to apply prior to region average + (possible options: obs, land, sciamachy, gosat, none) + * ``var_order``: First main variable, then temperature variable to compare + + * Optional diag_script_info attributes: + * ``styleset``: styleset for color coding panels + * ``output_file_type``: output file type for plots, default: png + * ``var_plotname``: NCL string formatting how variable should be named in plots + defaults to short_name if not assigned. + +#. Script xco2_analysis/global_maps.ncl: + + * Required diag_script_info attributes: + * ``contour_max_level``: maximum value displayed for seasonal cycle + amplitude contour plot + + * Optional diag_script_info attributes: + * ``output_file_type``: output file type for plots, default: png + +#. Script xco2_analysis/main.ncl: + + * Required diag_script_info attributes: + * ``styleset``: styleset to use for plotting colors, linestyles... + * ``region``: latitude range for averaging + * ``masking``: different masking options are available to use on dataset: + (possible options: none, obs) + * ``ensemble_mean``: if true calculates multi-model mean only + accounting for the ensemble member named in "ensemble_refs" + + * Optional diag_script_info attributes: + * ``output_file_type``: output file type for plots, default: png + * ``ensemble_refs``: list of model-ensemble pairs to denote which ensemble + member to use for calculating multi-model mean. required if + ensemble_mean = true + * ``var_plotname``: String formatting how variable should be named in plots + defaults to short_name if not assigned + +#. Script xco2_analysis/panel_plots.ncl: + + * Required diag_script_info attributes: + * ``styleset``: styleset to use for plotting colors, linestyles... + * ``region``: latitude range for averaging + * ``masking``: different masking options are available to use on dataset: + (possible options: obs, land, sciamachy, gosat, none) + * ``obs_in_panel``: True if observations should be included in plot + * ``area_avg``: Type of area averaging: "full-area" normal area-average + "lat-first" calculate zonal means first, then average these + * ``plot_var2_mean``: If True adds mean of seasonal cycle to panel as string. + + * Optional diag_script_info attributes: + * ``output_file_type``: output file type for plots, default: png + * ``var_plotname``: String formatting how variable should be named in plots + defaults to short_name if not assigned + +#. Script xco2_analysis/sat_masks.ncl: + + * Optional diag_script_info attributes: + * ``output_file_type``: output file type for plots, default: png + * ``var_plotname``: String formatting how variable should be named in plots + defaults to short_name if not assigned + * ``c3s_plots``: Missing value plots separated by timeseries of c3s satellites + +#. Script xco2_analysis/station_comparison.ncl: + + * Required diag_script_info attributes: + * ``var_order``: in this case xco2, co2, co2s - column averaged with obs dataset + first, then 2D variable, followed by surface stations + + * Optional diag_script_info attributes: + * ``output_file_type``: output file type for plots, default: png + * ``var_plotnames``: String formatting how variables should be named in plots + defaults to short_name if not assigned + * ``overwrite_altitudes``: Give other altitude values than the ones attached in + the station data. Valid if altitude changes and + timeseries spans range with different sample + altitude. Caveat: If used, need to give altitude + values for all stations. + * ``output_map``: boolean if stations to be displayed on map. As this requires + finetuning, currently only implemented for station set of + (ASK, CGO, HUN, LEF, WIS) following the paper. Change for different + plot inset locations, if others are desired. + +Variables +--------- + +* *xco2* (atmos, monthly, longitude, latitude, time) +* *co2s* (atmos, monthly, longitude, latitude, time) +* *co2* (atmos, monthly, pressure, longitude, latitude, time) +* *tas* (atmos, monthly, longitude, latitude, time) +* *tasa* (atmos, monthly, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +* CDS-XCO2_ (*xco2*) +* ESRL_ (*co2s*) +* GISTEMP_ (*tasa*) +* MODIS_ (land cover map, auxiliary data folder) + +.. _ESRL: https://www.esrl.noaa.gov/gmd/dv/data/ +.. _GISTEMP: https://data.giss.nasa.gov/gistemp/ +.. _CDS-XCO2: https://cds.climate.copernicus.eu/cdsapp#!/dataset/satellite-carbon-dioxide?tab=form +.. _MODIS: https://daac.ornl.gov/cgi-bin/dsviewer.pl?ds_id=968 + +References +---------- + +* Gier, B. K., Buchwitz, M., Reuter, M., Cox, P. M., Friedlingstein, P., + and Eyring, V.: Spatially resolved evaluation of Earth system models with + satellite column-averaged CO2, Biogeosciences, 17, 6115–6144, + https://doi.org/10.5194/bg-17-6115-2020, 2020. + +Example plots +------------- + +.. _fig_gier20bg_1: +.. figure:: /recipes/figures/gier20bg/fig01.png + :align: center + :width: 80% + + Mean fractional coverage of monthly satellite data. + +.. _fig_gier20bg_2: +.. figure:: /recipes/figures/gier20bg/fig02.png + :align: center + :width: 80% + + Comparison of time series from satellite, in situ, and models sampled + accordingly. Caveat: inset plot positions are hardcoded. + +.. _fig_gier20bg_3: +.. figure:: /recipes/figures/gier20bg/fig03.png + :align: center + :width: 70% + + Timeseries with panels depicting growth rate and seasonal cycle. + +.. _fig_gier20bg_4: +.. figure:: /recipes/figures/gier20bg/fig04.png + :align: center + :width: 50% + + Barplot of the growth rate, averaged over all years, with standard deviation + of interannual variability. + +.. _fig_gier20bg_5: +.. figure:: /recipes/figures/gier20bg/fig05.png + :align: center + :width: 80% + + Panel plot of spatially resolved seasonal cycle amplitude for all models, + including a zonal average sidepanel. + +.. _fig_gier20bg_6: +.. figure:: /recipes/figures/gier20bg/fig06.png + :align: center + :width: 60% + + Seasonal cycle amplitude map comparing influence of sampling, and difference + to observations. + +.. _fig_gier20bg_7: +.. figure:: /recipes/figures/gier20bg/fig07.png + :align: center + :width: 50% + + Panel plots showing seasonal cycle amplitude against XCO\ :sub:`2`, includes + regression line and p-value. + +.. _fig_gier20bg_8: +.. figure:: /recipes/figures/gier20bg/fig08.png + :align: center + :width: 50% + + Mean spatial data coverage for different satellites. diff --git a/doc/sphinx/source/recipes/recipe_heatwaves_coldwaves.rst b/doc/sphinx/source/recipes/recipe_heatwaves_coldwaves.rst index 91b372a160..019508ae80 100644 --- a/doc/sphinx/source/recipes/recipe_heatwaves_coldwaves.rst +++ b/doc/sphinx/source/recipes/recipe_heatwaves_coldwaves.rst @@ -1,14 +1,14 @@ .. _recipes_heatwaves_coldwaves: Heat wave and cold wave duration -==================================================== +================================ Overview -------- -The goal of this diagnostic is to estimate the relative change in heat/cold wave characteristics in future climates compared to a reference period using daily maximum or minimum temperatures. +The goal of this diagnostic is to estimate the relative change in heat/cold wave characteristics in future climates compared to a reference period using daily maximum or minimum temperatures. -The user can select whether to compute the frequency of exceedances or non-exceedances, which corresponds to extreme high or extreme low temperature events, respectively. The user can also select the minimum duration for an event to be classified as a heat/cold wave and the season of interest. +The user can select whether to compute the frequency of exceedances or non-exceedances, which corresponds to extreme high or extreme low temperature events, respectively. The user can also select the minimum duration for an event to be classified as a heat/cold wave and the season of interest. The diagnostic calculates the number of days in which the temperature exceeds or does not exceeds the necessary threshold for a consecutive number of days in future climate projections. The result is an annual time series of the total number of heat/cold wave days for the selected season at each grid point. The final output is the average number of heat/cold wave days for the selected season in the future climate projections. @@ -21,7 +21,7 @@ Recipes are stored in recipes/ Diagnostics are stored in diag_scripts/magic_bsc/ -* extreme_spells.r: calculates the heatwave or coldwave duration. +* extreme_spells.R: calculates the heatwave or coldwave duration. User settings @@ -69,3 +69,4 @@ Example plots :align: center :width: 14cm +Mean number of summer days during the period 2060-2080 when the daily maximum near-surface air temperature exceeds the 80th quantile of the 1971-2000 reference period. The results are based on one RCP 8.5 scenario simulated by BCC-CSM1-1. diff --git a/doc/sphinx/source/recipes/recipe_hydro_forcing.rst b/doc/sphinx/source/recipes/recipe_hydro_forcing.rst new file mode 100644 index 0000000000..fd0007a444 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_hydro_forcing.rst @@ -0,0 +1,99 @@ +.. _recipes_hydro_forcing: + +Hydro forcing comparison +======================== + +Overview +-------- + +This recipe can be used to assess the agreement between forcing datasets +(i.e. MSWEP, ERA5, ERA-Interim) for a defined catchment. The recipe can be used +to: + +1. Plot a timeseries of the raw daily data +2. Plot monthly aggregrated data over a defined period +3. Plot the monthly / daily climatology statistics over a defined period + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/hydrology + + * ``recipe_hydro_forcing.yml`` + +Diagnostics are stored in esmvaltool/diag_scripts/hydrology/ + + * ``hydro_forcing.py``: Compares and plots precipitation for MSWEP / ERA5 / ERA-5 interim datasets + + +User settings in recipe +----------------------- + +All hydrological recipes require a shapefile as an input to select forcing data. This shapefile determines the shape of the basin for which the data will be cut out and processed. All recipes are tested with `the shapefiles `_ from HydroSHEDS that are used for the eWaterCycle project. In principle any shapefile can be used, for example, the freely available basin shapefiles from the `HydroSHEDS project `_. + +#. recipe ``hydrology/hydro_forcing.yml`` + + *Optional preprocessor settings:* + + * ``extract_shape``: The region specified here should match the catchment + + *Required settings for script:* + + * ``plot_type``: Define which plot function to run. Choices: + + * ``timeseries``: Plot a timeseries for the variable data over the defined period + * ``climatology``: Plot the climate statistics over the defined period + + *Required settings for ``timeseries`` plots:* + + * ``time_period``: Defines the period of the output for the correct captions/labels. This value should match the period used for the preprocessor. Choices: ``day``, ``month``. + + + +Variables +--------- + +* pr (atmos, daily or monthly, longitude, latitude, time) + + +Observations +------------ + +All data can be used directly without any preprocessing. + +* ERA-Interim +* ERA5 +* MSWEP + +.. References +.. ---------- + +.. * xxx + +Example plots +------------- + +.. _fig_hydro_forcing_1: +.. figure:: /recipes/figures/hydrology/Precipitation_day_plot.png + :align: center + + Precipitation per day for 2015-01-01:2016-12-31. + +.. _fig_hydro_forcing_2: +.. figure:: /recipes/figures/hydrology/Precipitation_month_plot.png + :align: center + + Precipitation per month for 2015-01:2016-12. + +.. _fig_hydro_forcing_3: +.. figure:: /recipes/figures/hydrology/Precipitation_climatology_month_number_plot.png + :align: center + + Precipitation climatology statistics per month number. + +.. _fig_hydro_forcing_4: +.. figure:: /recipes/figures/hydrology/Precipitation_climatology_day_of_year_plot.png + :align: center + + Precipitation climatology statistics per day of year. diff --git a/doc/sphinx/source/recipes/recipe_hydrology.rst b/doc/sphinx/source/recipes/recipe_hydrology.rst new file mode 100644 index 0000000000..995a70b3ae --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_hydrology.rst @@ -0,0 +1,266 @@ +.. _recipes_hydrology: + +Hydrological models - data pre-processing +========================================= + +Overview +-------- + +We provide a collection of scripts that pre-processes environmental data for use in several hydrological models: + +PCR-GLOBWB +********** +PCR-GLOBWB (PCRaster Global Water Balance) is a large-scale hydrological model intended for global to regional studies and developed at the Department of Physical Geography, Utrecht University (Netherlands). The recipe pre-processes ERA-Interim reanalyses data for use in the PCR-GLOBWB. + +MARRMoT +********** +MARRMoT (Modular Assessment of Rainfall-Runoff Models Toolbox) is a rainfall-runoff model comparison framework that allows objective comparison between different conceptual hydrological model structures https://github.com/wknoben/MARRMoT. The recipe pre-processes ERA-Interim and ERA5 reanalyses data for use in the MARRMoT. + +MARRMoT requires potential evapotranspiration (evspsblpot). The variable evspsblpot is not available in ERA-Interim. Thus, we use the debruin function (De Bruin et al. 2016) to obtain evspsblpot using both ERA-Interim and ERA5. This function needs the variables tas, psl, rsds, and rsdt as input. + +wflow_sbm and wflow_topoflex +**************************** +Forcing data for the `wflow_sbm `_ +and `wflow_topoflex `_ +hydrological models can be prepared using recipe_wflow.yml. +If PET is not available from the source data (e.g. ERA-Interim), then it can be derived from psl, rsds and rsdt using De Bruin's 2016 formula (De Bruin et al. 2016). For daily ERA5 data, the time points of these variables are shifted 30 minutes with respect to one another. This is because in ERA5, accumulated variables are recorded over the past hour, and in the process of cmorization, we shift the time coordinates to the middle of the interval over which is accumulated. However, computing daily statistics then averages the times, which results in 12:00 UTC for accumulated variables and 11:30 UTC for instantaneous variables. Therefore, in this diagnostic, the time coordinates of the daily instantaneous variables are shifted 30 minutes forward in time. + +LISFLOOD +******** +`LISFLOOD `_ is a spatially distributed water resources model, developed by the Joint Research Centre (JRC) of the European Commission since 1997. We provide a recipe to produce meteorological forcing data for the Python 3 version of LISFLOOD. + +LISFLOOD has a separate preprocessor LISVAP that derives some additional variables. We don't replace LISVAP. Rather, we provide input files that can readily be passed to LISVAP and then to LISFLOOD. + + +HYPE +**** + +The hydrological catchment model HYPE simulates water flow and substances on their way from precipitation through soil, river and lakes to the river outlet. +HYPE is developed at the Swedish Meteorological and Hydrological Institute. The recipe pre-processes ERA-Interim and ERA5 data for use in HYPE. + +GlobWat +******* +GlobWat is a soil water balance model that has been provided by the Food and Agriculture Organization (FAO) to assess water use in irrigated agriculture (http://www.fao.org/nr/water/aquamaps). The recipe pre-processes ERA-Interim and ERA5 reanalyses data for use in the GlobWat model. GlobWat requires potential evapotranspiration (evspsblpot) as input. The variable evspsblpot is not available in ERA-Interim. Thus, we use debruin function (De Bruin et al. 2016) or the langbein method (Langbein et al. 1949) to obtain evspsblpot using both ERA-Interim and ERA5. The Langbein function needs a variable tas and the debruin function besides that needs the variables psl, rsds, and rsdt as input. In order to calculate monthly/daily pet with Langbein method we assumed that tas is constant over time and the average value is equal to the annual average. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/hydrology + + * recipe_pcrglobwb.yml + * recipe_marrmot.yml + * recipe_wflow.yml + * recipe_lisflood.yml + * recipe_hype.yml + * recipe_globwat.yml + +Diagnostics are stored in esmvaltool/diag_scripts/hydrology + + * pcrglobwb.py + * marrmot.py + * wflow.py + * lisflood.py + * hype.py + * globwat.py + + +User settings in recipe +----------------------- + +All hydrological recipes require a shapefile as an input to produce forcing data. This shapefile determines the shape of the basin for which the data will be cut out and processed. All recipes are tested with `the shapefiles `_ that are used for the eWaterCycle project. In principle any shapefile can be used, for example, the freely available basin shapefiles from the `HydroSHEDS project `_. + +#. recipe_pcrglobwb.yml + + *Required preprocessor settings:* + + * start_year: 1979 + * end_year: 1979 + +#. recipe_marrmot.yml + + There is one diagnostic ``diagnostic_daily`` for using daily data. + + *Required preprocessor settings:* + + The settings below should not be changed. + + *extract_shape:* + + * shapefile: Meuse.shp (MARRMoT is a hydrological Lumped model that needs catchment-aggregated forcing data. The catchment is provided as a shapefile, the path can be relative to :ref:`configuration option ` ``auxiliary_data_dir``). + * method: contains + * crop: true + + *Required diagnostic script settings:* + + * basin: Name of the catchment + +#. recipe_wflow.yml + + *Optional preprocessor settings:* + + * extract_region: the region specified here should match the catchment + + *Required diagnostic script settings:* + + * basin: name of the catchment + * dem_file: netcdf file containing a digital elevation model with + elevation in meters and coordinates latitude and longitude. + A wflow example dataset is available at: https://github.com/openstreams/wflow/tree/master/examples/wflow_rhine_sbm + The example dem_file can be obtained from https://github.com/openstreams/wflow/blob/master/examples/wflow_rhine_sbm/staticmaps/wflow_dem.map + * regrid: the regridding scheme for regridding to the digital elevation model. Choose ``area_weighted`` (slow) or ``linear``. + +#. recipe_lisflood.yml + + *Required preprocessor settings:* + + * extract_region: A region bounding box slightly larger than the shapefile. This is run prior to regridding, to save memory. + * extract_shape:* + + * shapefile: A shapefile that specifies the extents of the catchment. + + These settings should not be changed + + * method: contains + * crop: true + + * regrid:* + + * target_grid: Grid of LISFLOOD input files + + These settings should not be changed + + * lon_offset: true + * lat_offset: true + * scheme: linear + + There is one diagnostic ``diagnostic_daily`` for using daily data. + + *Required diagnostic script settings:* + + * catchment: Name of the catchment, used in output filenames + +#. recipe_hype.yml + + *Required preprocessor settings:* + + * start_year: 1979 + * end_year: 1979 + * shapefile: Meuse_HYPE.shp (expects shapefile with subcatchments) + + These settings should not be changed + + * method: contains + * decomposed: true + +#. recipe_globwat.yml + + *Required preprocessor settings:* + + * start_year: 2004 + * end_year: 2004 + * target_grid_file: grid of globwat input files. A target file has been generated from one of the GlobWat models sample files (prc01wb.asc) for regridding ERA5 and ERA-Interim datasets. The ASCII file can be found at: https://storage.googleapis.com/fao-maps-catalog-data/geonetwork/aquamaps/GlobWat-InputP1_prec.zip. You can use the GDAL translator to convert the file from ASCII format to NetCDF format by entering the following command into the terminal: gdal_translate -of netCDF prc01wb.asc globwat_target_grid.nc + + *Optional preprocessor settings:* + + * area_selection: A region bounding box to extract the data for a specific region. The area selection preprocessor can be used by users to process the data for their desired region. The data will be processed at the global scale if the preprocessor in the recipe is commented. + * regrid_scheme: The area-weighted regridding scheme is used as a default regridding scheme to ensure that the total volume of water is consistent before and after regridding. + * langbein_pet: Can be set to True to use langbein function for calculating evspsblpot (default is de bruin method) + + +Variables +--------- + +#. recipe_pcrglobwb.yml + + * tas (atmos, daily, longitude, latitude, time) + * pr (atmos, daily, longitude, latitude, time) + +#. recipe_marrmot.yml + + * pr (atmos, daily or hourly mean, longitude, latitude, time) + * psl (atmos, daily or hourly mean, longitude, latitude, time) + * rsds (atmos, daily or hourly mean, longitude, latitude, time) + * rsdt (atmos, daily or hourly mean, longitude, latitude, time) + * tas (atmos, daily or hourly mean, longitude, latitude, time) + +#. recipe_wflow.yml + + * orog (fx, longitude, latitude) + * pr (atmos, daily or hourly mean, longitude, latitude, time) + * tas (atmos, daily or hourly mean, longitude, latitude, time) + + Either potential evapotranspiration can be provided: + + * evspsblpot(atmos, daily or hourly mean, longitude, latitude, time) + + or it can be derived from tas, psl, rsds, and rsdt using the De Bruin formula, in that case the following variables need to be provided: + + * psl (atmos, daily or hourly mean, longitude, latitude, time) + * rsds (atmos, daily or hourly mean, longitude, latitude, time) + * rsdt (atmos, daily or hourly mean, longitude, latitude, time) + +#. recipe_lisflood.yml + + * pr (atmos, daily, longitude, latitude, time) + * tas (atmos, daily, longitude, latitude, time) + * tasmax (atmos, daily, longitude, latitude, time) + * tasmin (atmos, daily, longitude, latitude, time) + * tdps (atmos, daily, longitude, latitude, time) + * uas (atmos, daily, longitude, latitude, time) + * vas (atmos, daily, longitude, latitude, time) + * rsds (atmos, daily, longitude, latitude, time) + +#. recipe_hype.yml + + * tas (atmos, daily or hourly, longitude, latitude, time) + * tasmin (atmos, daily or hourly, longitude, latitude, time) + * tasmax (atmos, daily or hourly, longitude, latitude, time) + * pr (atmos, daily or hourly, longitude, latitude, time) + +#. recipe_globwat.yml + + * pr (atmos, daily or monthly, longitude, latitude, time) + * tas (atmos, daily or monthly, longitude, latitude, time) + * psl (atmos, daily or monthly, longitude, latitude, time) + * rsds (atmos, daily or monthly, longitude, latitude, time) + * rsdt (atmos, daily or monthly , longitude, latitude, time) + +Observations and reformat scripts +--------------------------------- +*Note: download instructions can be obtained with `esmvaltool data info DATASET` or in headers of cmorization scripts.* + +* ERA-Interim (esmvaltool/cmorizers/data/formatters/datasets/era_interim.py) +* ERA5 (esmvaltool/diag_scripts/cmorizers/era5.py) + +Output +--------- + +#. recipe_pcrglobwb.yml + +#. recipe_marrmot.yml + + The forcing data, the start and end times of the forcing data, the latitude and longitude of the catchment are saved in a .mat file as a data structure readable by MATLAB or Octave. + +#. recipe_wflow.yml + + The forcing data, stored in a single NetCDF file. + +#. recipe_lisflood.yml + + The forcing data, stored in separate files per variable. + +#. recipe_globwat.yml + + The forcing data, stored in separate files per timestep and variable. + +References +---------- + +* Sutanudjaja, E. H., van Beek, R., Wanders, N., Wada, Y., Bosmans, J. H. C., Drost, N., van der Ent, R. J., de Graaf, I. E. M., Hoch, J. M., de Jong, K., Karssenberg, D., López López, P., Peßenteiner, S., Schmitz, O., Straatsma, M. W., Vannametee, E., Wisser, D., and Bierkens, M. F. P.: PCR-GLOBWB 2: a 5 arcmin global hydrological and water resources model, Geosci. Model Dev., 11, 2429-2453, https://doi.org/10.5194/gmd-11-2429-2018, 2018. +* De Bruin, H. A. R., Trigo, I. F., Bosveld, F. C., Meirink, J. F.: A Thermodynamically Based Model for Actual Evapotranspiration of an Extensive Grass Field Close to FAO Reference, Suitable for Remote Sensing Application, American Meteorological Society, 17, 1373-1382, DOI: 10.1175/JHM-D-15-0006.1, 2016. +* Arheimer, B., Lindström, G., Pers, C., Rosberg, J. och J. Strömqvist, 2008. Development and test of a new Swedish water quality model for small-scale and large-scale applications. XXV Nordic Hydrological Conference, Reykjavik, August 11-13, 2008. NHP Report No. 50, pp. 483-492. +* Lindström, G., Pers, C.P., Rosberg, R., Strömqvist, J., Arheimer, B. 2010. Development and test of the HYPE (Hydrological Predictions for the Environment) model – A water quality model for different spatial scales. Hydrology Research 41.3-4:295-319. +* van der Knijff, J. M., Younis, J. and de Roo, A. P. J.: LISFLOOD: A GIS-based distributed model for river basin scale water balance and flood simulation, Int. J. Geogr. Inf. Sci., 24(2), 189–212, 2010. +* Hoogeveen, J., Faurès, J. M., Peiser, L., Burke, J., de Giesen, N. V.: GlobWat--a global water balance model to assess water use in irrigated agriculture, Hydrology & Earth System Sciences Discussions, 2015 Jan 1;12(1), Doi:10.5194/hess-19-3829-2015. +* Langbein, W.B., 1949. Annual runoff in the United States. US Geol. Surv.(https://pubs.usgs.gov/circ/1949/0052/report.pdf) diff --git a/doc/sphinx/source/recipes/recipe_hyint.rst b/doc/sphinx/source/recipes/recipe_hyint.rst new file mode 100644 index 0000000000..ebdc75266e --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_hyint.rst @@ -0,0 +1,162 @@ +.. _recipes_hyint: + +Hydroclimatic intensity and extremes (HyInt) +============================================ + + +Overview +-------- +The HyInt tool calculates a suite of hydroclimatic and climate extremes indices to perform a multi-index evaluation of climate models. The tool firstly computes a set of 6 indices that allow to evaluate the response of the hydrological cycle to global warming with a joint view of both wet and dry extremes. The indices were selected following Giorgi et al. (2014) and include the simple precipitation intensity index (SDII) and extreme precipitation index (R95), the maximum dry spell length (DSL) and wet spell length (WSL), the hydroclimatic intensity index (HY-INT), which is a measure of the overall behaviour of the hydroclimatic cycle (Giorgi et al., 2011), and the precipitation area (PA), i.e. the area over which at any given day precipitation occurs, (Giorgi et al., 2014). Secondly, a selection of the 27 temperature and precipitation -based indices of extremes from the Expert Team on Climate Change Detection and Indices (ETCCDI) produced by the climdex (https://www.climdex.org) library can be ingested to produce a multi-index analysis. The tool allows then to perform a subsequent analysis of the selected indices calculating timeseries and trends over predefined continental areas, normalized to a reference period. Trends are calculated using the R `lm` function and significance testing performed with a Student T test on non-null coefficients hypothesis. Trend coefficients are stored together with their statistics which include standard error, t value and Pr(>|t|). The tool can then produce a variety of types of plots including global and regional maps, maps of comparison between models and a reference dataset, timeseries with their spread, trend lines and summary plots of trend coefficients. + +The hydroclimatic indices calculated by the recipe_hyint.yml and included in the output are defined as follows: + +* PRY = mean annual precipitation +* INT = mean annual precipitation intensity (intensity during wet days, or simple precipitation intensity index SDII) +* WSL = mean annual wet spell length (number of consecutive days during each wet spell) +* DSL = mean annual dry spell lenght (number of consecutive days during each dry spell) +* PA = precipitation area (area over which of any given day precipitation occurs) +* R95 = heavy precipitation index (percent of total precipitation above the 95% percentile of the reference distribution) +* HY-INT = hydroclimatic intensity. HY-INT = normalized(INT) x normalized(DSL). + +The recipe_hyint_extreme_events.yml includes an additional call to the :ref:`recipes_extreme_events` diagnostics, which allows to calculate the ETCCDI indices and include them in the subsequent analysis together with the hydroclimatic indices. All of the selected indices are then stored in output files and figures. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_hyint.yml (evaluating the 6 hydroclimatic indices, performing trend analysis and plotting) +* recipe_hyint_extreme_events.yml (similar to the recipe_hyint.yml but with an additional call to the :ref:`recipes_extreme_events` diagnostic for calculation of ETCCDI indices and inclusion of them in the trend analysis and plotting) + +Diagnostics are stored in diag_scripts/hyint/ + +* hyint.R + +and subroutines + +* hyint_diagnostic.R +* hyint_functions.R +* hyint_parameters.R +* hyint_plot_trends.R +* hyint_etccdi_preproc.R +* hyint_metadata.R +* hyint_plot_maps.R +* hyint_preproc.R +* hyint_trends.R + +See details of the extreme_events diagnostics under recipe_extreme_events.yml. + +Known issues +------------ + +*recipe_hyint_extreme_events.yml* + +Call to the :ref:`recipes_extreme_events` diagnostic requires the ncdf4.helpers library, which is currently unavailable on CRAN. Users need therefore to install the library manually, e.g. through the following commands to download the package tarball from CRAN archive, install it and remove the package tarball: + + * url <- "https://cran.r-project.org/src/contrib/Archive/ncdf4.helpers/ncdf4.helpers_0.3-3.tar.gz" + * pkgFile <- "ncdf4.helpers_0.3-3.tar.gz" + * download.file(url = url, destfile = pkgFile) + * install.packages(pkgs=pkgFile, type="source", repos=NULL) + * unlink(pkgFile) + +User settings +------------- + +*Required settings for script* + +* norm_years: first and last year of reference normalization period to be used for normalized indices + +* select_indices: indices to be analysed and plotted. Select one or more fields from the following list (order-sensitive): "pa_norm", "hyint", "int_norm", "r95_norm", "wsl_norm", "dsl_norm", "int", "dsl", "wsl" + +* select_regions: Select regions for timeseries and maps from the following list: GL=Globe, GL60=Global 60S/60N, TR=Tropics (30S/30N), SA=South America, AF=Africa, NA=North America, IN=India, EU=Europe, EA=East-Asia, AU=Australia + +* plot_type: type of figures to be plotted. Select one or more from: 1=lon/lat maps per individual field/exp/multi-year mean, 2=lon/lat maps per individual field exp-ref-diff/multi-year mean, 3=lon/lat maps multi-field/exp-ref-diff/multi-year mean, 11=timeseries over required individual region/exp, 12=timeseries over multiple regions/exp, 13=timeseries with multiple models, 14=summary trend coefficients multiple regions, 15=summary trend coefficients multiple models + + +*Additional settings for recipe_hyint_extreme_events.yml* + +* call to the extreme_events diagnostics: see details in recipe_extreme_events.yml. Make sure that the base_range for extreme_events coincides with the norm_range of hyint and that all ETCCDI indices that are required to be imported in hyint are calculated by the extreme_events diagnostics. + +* etccdi_preproc: set to true to pre-process and include ETCCDI indices in hyint + +* etccdi_list_import: specify the list of ETCCDI indices to be imported, e.g.: "tn10pETCCDI", "tn90pETCCDI", "tx10pETCCDI", "tx90pETCCDI" + +* select_indices: this required settings should here be revised to include the imported indices, e.g.: "pa_norm", "hyint", "tn10pETCCDI", "tn90pETCCDI", "tx10pETCCDI", "tx90pETCCDI" + + +*Optional settings for script (with default setting)* + +#. Data + + * rgrid (false): Define whether model data should be regridded. (a) false to keep original resolution; (b) set desired regridding resolution in cdo format e.g., "r320x160"; (c) "REF" to use resolution of reference model + +#. Plotting + + * npancol (2): number of columns in timeseries/trends multipanel figures + * npanrow (3): number of rows in timeseries/trends multipanel figures + * autolevels (true): select automated (true) or pre-set (false) range of values in plots + * autolevels_scale (1): factor multiplying automated range for maps and timeseries + * autolevels_scale_t (1.5): factor multiplying automated range for trend coefficients + +#. Maps + + * oplot_grid (false): plot grid points over maps + * boxregion (false): !=0 plot region boxes over global maps with thickness = abs(boxregion); white (>0) or grey (<0). + * removedesert (false) remove (flag as NA) grid points with mean annual pr < 0.5 mm/day (deserts, Giorgi2014). This affects timeseries and trends calculations too. + +#. Timeseries and trends + + * weight_tseries (true): adopt area weights in timeseries + * trend_years (false): (a) false = apply trend to all years in dataset; (b) [year1, year2] to apply trend calculation and plotting only to a limited time interval + * add_trend (true): add linear trend to plot + * add_trend_sd (false): add dashed lines of stdev range to timeseries + * add_trend_sd_shade (false): add shade of stdev range to timeseries + * add_tseries_lines (true): plot lines connecting timeseries points + * add_zeroline (true): plot a dashed line at y=0 + * trend_years_only (false): limit timeseries plotting to the time interval adopted for trend calculation (excluding the normalization period) + * scale100years (true): plot trends scaled as 1/100 years + * scalepercent (false): plot trends as percent change + + +Variables +--------- + +* pr (atmos, daily mean, longitude latitude time) + +*Additional variables for recipe_hyint_extreme_events.yml* + +* tas (atmos, daily mean, longitude latitude time) +* tasmin (atmos, daily mean, longitude latitude time) +* tasmax (atmos, daily mean, longitude latitude time) + +Observations and reformat scripts +--------------------------------- + +None. + + +References +---------- + +* Giorgi et al., 2014, J. Geophys. Res. Atmos., 119, 11,695–11,708, doi:10.1002/ 2014JD022238 +* Giorgi et al., 2011, J. Climate 24, 5309-5324, doi:10.1175/2011JCLI3979.1 + + +Example plots +------------- + +.. figure:: /recipes/figures/hyint/hyint_maps.png + :width: 10cm + + Mean hydroclimatic intensity for the EC-EARTH model, for the historical + RCP8.5 projection in the period 1976-2099 + +.. figure:: /recipes/figures/hyint/hyint_timeseries.png + :width: 12cm + + Timeseries for multiple indices and regions for the ACCESS1-0 model, for the historical + RCP8.5 projection in the period 1976-2099, normalized to the 1976-2005 historical period. + +.. figure:: /recipes/figures/hyint/hyint_trends.png + :width: 12cm + + Multi-model trend coefficients over selected indices for CMIP5 models in the RCP8.5 2006-2099 projection, normalized to the 1976-2005 historical period. diff --git a/doc/sphinx/source/recipes/recipe_iht_toa.rst b/doc/sphinx/source/recipes/recipe_iht_toa.rst new file mode 100644 index 0000000000..87e182ef6a --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_iht_toa.rst @@ -0,0 +1,123 @@ +.. _recipes_iht_toa: + +Implied heat transport from Top of Atmosphere fluxes +==================================================== + +Overview +-------- + +This recipe calculates the implied horizontal heat transport (IHT) due to the +spatial anomalies of radiative fluxes at the top of the atmosphere (TOA). +The regional patterns of implied heat transport for different components of +the TOA fluxes are calculated by solving the Poisson equation with the flux +components as source terms. +It reproduces the plots in `Pearce and Bodas-Salcedo (2023)`_ when the input +data is CERES EBAF. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + +* recipe_iht_toa.yml calculates the IHT maps for the following radiative fluxes: + + * Total net, SW net, LW net (Figure 2). + * Total CRE, SW CRE, LW CRE (Figure 4). + * All-sky and clear-sky reflected SW (Figure 5). + * The meridional heat transports (MHT) of the fluxes above (Figures 1 and 3). + +Diagnostics are stored in esmvaltool/diag_scripts/iht_toa/ + +* single_model_diagnostics.py: driver script that produces the plots. +* poisson_solver.py: solver that calculates the IHTs. + +.. _`Pearce and Bodas-Salcedo (2023)`: https://doi.org/10.1175/JCLI-D-22-0149.1 + +User settings in recipe +----------------------- +There are no user settings in this recipe. + +Variables +--------- + +* rlut (atmos, monthly, longitude latitude time) +* rlutcs (atmos, monthly, longitude latitude time) +* rsutcs (atmos, monthly, longitude latitude time) +* rsut (atmos, monthly, longitude latitude time) +* rsdt (atmos, monthly, longitude latitude time) + +Observations and reformat scripts +--------------------------------- + +* CERES-EBAF + +References +---------- + +* Pearce, F. A., and A. Bodas-Salcedo, 2023: Implied Heat Transport from CERES + Data: Direct Radiative Effect of Clouds on Regional Patterns and Hemispheric + Symmetry. J. Climate, 36, 4019–4030, doi: 10.1175/JCLI-D-22-0149.1. + +Example plots +------------- + +.. _fig_iht_toa_1: +.. figure:: /recipes/figures/iht_toa/figure1_CERES-EBAF_CERES-EBAF.png + :align: center + + The implied heat transport due to the total net flux (blue), split into + the contributions from the SW (orange) and LW (green). + +.. _fig_iht_toa_2: +.. figure:: /recipes/figures/iht_toa/figure2_CERES-EBAF_CERES-EBAF.png + :align: center + + The energy flux potentials for (a) TOT, (c) SW, and (e) LW fluxes, + alongside maps of the spatial anomalies of the fluxes [(b),(d),(f) + flux minus global average flux, respectively]. + The implied heat transport is calculated as the gradient of the energy + flux potential, shown by the white vector arrows for a subset of points + to give the overall transport pattern. + Heat is directed from the blue minima of the potential field to + yellow maxima, with the magnitude implied by the density of contours. + All maps of the same type share the same color bar at the bottom + of the column. + +.. _fig_iht_toa_3: +.. figure:: /recipes/figures/iht_toa/figure3_CERES-EBAF_CERES-EBAF.png + :align: center + + Direct radiative effects of clouds on the meridional heat transport. + (a) Contributions from TOT CRE (blue), SW CRE (orange), and LW CRE (green) + fluxes. (b) Contributions from all-sky and clear-sky OSR. + In (b), both curves have been multiplied by −1 such that positive heat + transport is northward. + +.. _fig_iht_toa_4: +.. figure:: /recipes/figures/iht_toa/figure4_CERES-EBAF_CERES-EBAF.png + :align: center + + As in :numref:`fig_iht_toa_2`, but for the implied heat transport associated with + (a),(b) TOT CRE, (c),(d) SW CRE, and (e),(f) LW CRE fluxes. + +.. _fig_iht_toa_5: +.. figure:: /recipes/figures/iht_toa/figure5_CERES-EBAF_CERES-EBAF.png + :align: center + + As in :numref:`fig_iht_toa_2`, but for (a), (b) clear-sky and (c), (d) all-sky reflected + SW flux. + +.. _fig_iht_toa_6: +.. figure:: /recipes/figures/iht_toa/figure6_CERES-EBAF_CERES-EBAF.png + :align: center + + A measure of the symmetry between heat transport in the Northern and + Southern Hemispheres, calculated for the 12-month running mean of TOT MHT + in the regions: (a) the full hemisphere, (b) from the equator to 30°, and + (c) 30° to 90°. + Symmetry values obtained when including (blue) and excluding (orange) + the effect of clouds. The climatological symmetry values for the two cases + are shown as the black lines in each subplot, dashed and dotted, + respectively. + The standard deviations of the time series are shown in each plot. diff --git a/doc/sphinx/source/recipes/recipe_impact.rst b/doc/sphinx/source/recipes/recipe_impact.rst new file mode 100644 index 0000000000..b873f227ad --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_impact.rst @@ -0,0 +1,180 @@ +.. _recipes_impact: + +Quick insights for climate impact researchers +============================================= + +Overview +-------- + +Many impact researchers do not have the time and finances to use a large +ensemble of climate model runs for their impact analysis. To get an idea of the +range of impacts of climate change it also suffices to use a small number of +climate model runs. In case a system is only sensitive to annual temperature, +one can select a run with a high change and one with a low change of annual +temperature, preferably both with a low bias. + +This recipe calculates the bias with respect to observations, and the change +with respect to a reference period, for a wide range of (CMIP) models. These +metrics are tabulated and also visualized in a diagram. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + + * recipe_impact.yml + +Diagnostics are stored in esmvaltool/diag_scripts/ + + * impact/bias_and_change.py: tabulate and visualize bias and change. + + +User settings in recipe +----------------------- + +#. Script ``impact.py`` + + *Required settings for variables* + + * tag: ``'model'`` or ``'observations'``, so the diagnostic script knows which datasets to use for the bias calculation. This must be specified for each dataset. + + *Optional settings for preprocessor* + + * Region and time settings (both for the future and reference period) can be changed at will. + + +Variables +--------- + +* tas (atmos, mon, longitude latitude time) +* pr (atmos, mon, longitude latitude time) +* any other variables of interest + + +Observations and reformat scripts +--------------------------------- + +* ERA5 data can be used via the native6 project. + +References +---------- + +* None + +Example plots +------------- + +.. _fig_impact_1: +.. figure:: /recipes/figures/impact/bias_vs_change.png + :align: center + + "Bias and change for each variable" + +.. raw:: html + + +
metricBias (RMSD of all + gridpoints)Mean change + (Future - Reference)
variableTemperature (K)Precipitation (kg/m2/s)Temperature (K)Precipitation + (kg/m2/s)
dataset
CMIP5_ACCESS1-0_r1i1p13.19e+001.96e-052.36e+008.00e-09
CMIP5_BNU-ESM_r1i1p14.08e+001.87e-052.44e+002.96e-08
CMIP6_ACCESS-CM2_r1i1p1f13.75e+001.77e-052.87e+006.63e-07
CMIP6_ACCESS-ESM1-5_r1i1p1f13.01e+001.96e-052.63e+00-1.39e-07
CMIP6_AWI-CM-1-1-MR_r1i1p1f12.91e+001.80e-052.56e+007.67e-07
CMIP6_BCC-CSM2-MR_r1i1p1f14.22e+001.74e-052.64e+005.02e-07
CMIP6_CAMS-CSM1-0_r1i1p1f14.43e+001.84e-051.48e+004.89e-07
CMIP6_CESM2-WACCM_r1i1p1f12.95e+001.69e-052.33e+00-1.91e-07
CMIP6_CanESM5_r1i1p1f12.81e+001.69e-053.36e+002.10e-06
CMIP6_FGOALS-g3_r1i1p1f16.74e+001.80e-052.13e+005.95e-07
CMIP6_FIO-ESM-2-0_r1i1p1f13.02e+001.75e-052.07e+001.89e-07
CMIP6_MIROC6_r1i1p1f14.00e+001.74e-052.25e+00-2.45e-07
CMIP6_MPI-ESM1-2-HR_r1i1p1f12.98e+001.80e-051.84e+001.18e-07
CMIP6_MPI-ESM1-2-LR_r1i1p1f12.95e+001.78e-051.82e+002.52e-07
CMIP6_MRI-ESM2-0_r1i1p1f12.81e+001.71e-052.36e+005.75e-07
CMIP6_NESM3_r1i1p1f13.90e+001.83e-053.22e+003.60e-07
CMIP6_NorESM2-LM_r1i1p1f13.08e+001.70e-051.74e+00-4.97e-07
CMIP6_NorESM2-MM_r1i1p1f12.86e+001.67e-051.76e+00-7.65e-07
+ + diff --git a/doc/sphinx/source/recipes/recipe_ipccwg1ar5ch9.rst b/doc/sphinx/source/recipes/recipe_ipccwg1ar5ch9.rst new file mode 100644 index 0000000000..fc1c26464c --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_ipccwg1ar5ch9.rst @@ -0,0 +1,839 @@ +.. _recipes_ipccwg1ar5ch9: + +IPCC AR5 Chapter 9 (selected figures) +===================================== + +Overview +-------- + +The goal of this recipe is to collect diagnostics to reproduce Chapter 9 of AR5, +so that the plots can be readily reproduced and compared to previous CMIP +versions. In this way we can next time start with what was available in the +previous round and can focus on developing more innovative methods of analysis +rather than constantly having to "re-invent the wheel". + +.. note:: + + Please note that most recipes have been modified to include only models that are + (still) readily available via ESGF. Plots produced may therefore look different + than the original figures from IPCC AR5. + +The plots are produced collecting the diagnostics from individual recipes. The +following figures from Flato et al. (2013) can currently be reproduced: + + * Figure 9.2 a,b,c: Annual-mean surface air temperature for the period + 1980-2005. a) multi-model mean, b) bias as the difference between the + CMIP5 multi-model mean and the climatology from ERA-Interim + (Dee et al., 2011), c) mean absolute model error with respect to the + climatology from ERA-Interim. + + * Figure 9.3: Seasonality (December-January-February minus June-July-August) + of surface (2 m) air temperature (°C) for the period 1980-2005. + (a) Multi-model mean for the historical experiment. (b) Multi-model mean + of absolute seasonality. (c) Difference between the multi-model mean + and the ERA-Interim reanalysis seasonality. (d) Difference between the + multi-model mean and the ERA-Interim absolute seasonality. + + * Figure 9.4: Annual-mean precipitation rate (mm day-1) for the period + 1980-2005. a) multi-model mean, b) bias as the difference between the + CMIP5 multi-model mean and the climatology from the Global Precipitation + Climatology Project (Adler et al., 2003), c) multi-model mean absolute + error with respect to observations, and d) multi-model mean error + relative to the multi-model mean precipitation itself. + + * Figure 9.5: Climatological (1985-2005) annual-mean cloud radiative + effects in Wm-2 for the CMIP5 models against CERES EBAF (2001-2011) in + Wm-2. Top row shows the shortwave effect; middle row the longwave effect, + and bottom row the net effect. Multi-model-mean biases against CERES + EBAF 2.6 are shown on the left, whereas the right panels show zonal + averages from CERES EBAF 2.6 (black), the individual CMIP5 models (thin + gray lines), and the multi-model mean (thick red line). + + * Figure 9.6: Centered pattern correlations between models and observations + for the annual mean climatology over the period 1980–1999. Results are + shown for individual CMIP3 (black) and CMIP5 (blue) models as thin + dashes, along with the corresponding ensemble average (thick dash) and + median (open circle). The four variables shown are surface air + temperature (TAS), top of the atmosphere (TOA) outgoing longwave + radiation (RLUT), precipitation (PR) and TOA shortwave cloud radiative + effect (SW CRE). The correlations between the reference and alternate + observations are also shown (solid green circles). + + * Figure 9.8: Observed and simulated time series of the anomalies in annual + and global mean surface temperature. All anomalies are differences from + the 1961-1990 time-mean of each individual time series. The reference + period 1961-1990 is indicated by yellow shading; vertical dashed grey + lines represent times of major volcanic eruptions. Single simulations + for CMIP5 models (thin lines); multi-model mean (thick red line); + different observations (thick black lines). Dataset pre-processing like + described in Jones et al., 2013. + + * Figure 9.14: Sea surface temperature plots for zonal mean error, equatorial + (5 deg north to 5 deg south) mean error, and multi model mean for zonal error + and equatorial mean. + + * Figure 9.24: Time series of (a) Arctic and (b) Antarctic sea ice extent; + trend distributions of (c) September Arctic and (d) February Antarctic + sea ice extent. + + * Figure 9.26: Ensemble-mean global ocean carbon uptake (a) and global land + carbon uptake (b) in the CMIP5 ESMs for the historical period 1900–2005. + For comparison, the observation-based estimates provided by the Global + Carbon Project (GCP) are also shown (thick black line). The confidence + limits on the ensemble mean are derived by assuming that the CMIP5 models + are drawn from a t-distribution. The grey areas show the range of annual mean + fluxes simulated across the model ensemble. This figure includes results + from all CMIP5 models that reported land CO2 fluxes, ocean CO2 fluxes, or + both (Anav et al., 2013). + + * Figure 9.27: Simulation of global mean (a) atmosphere–ocean CO2 fluxes + ("fgCO2") and (b) net atmosphere–land CO2 fluxes ("NBP"), by ESMs for the + period 1986–2005. For comparison, the observation-based estimates + provided by Global Carbon Project (GCP) and the Japanese Meteorological + Agency (JMA) atmospheric inversion are also shown. The error bars for the + ESMs and observations represent interannual variability in the fluxes, + calculated as the standard deviation of the annual means over the period + 1986–2005. + + * Figure 9.38: Seasonal cycle for the surface temperature or precipitation + over land within defined regions multi-model mean and difference to + reference dataset or absolute annual cycle can be chosen. + + * Figure 9.39: Seasonal bias box and whiskers plot + for surface temperature or precipitation within + SREX (IPCC Special Report on Managing the Risks of Extreme Events and + Disasters to Advance Climate Change Adaptation) regions. + + * Figure 9.40: Seasonal bias box and whiskers plot for surface + temperature or precipitation within defined polar and ocean regions. + + * Figure 9.41b: Comparison between observations and models for variable + values within defined regions. + + * Figure 9.42a: Equilibrium climate sensitivity (ECS) against the global + mean surface air temperature, both for the period 1961-1990 and for the + pre-industrial control runs. + + * Figure 9.42b: Transient climate response (TCR) against equilibrium climate + sensitivity (ECS). + + * Figure 9.45a: Scatterplot of springtime snow-albedo effect values in climate + change vs. springtime d(alpha\ :sub:`s`\)/d(T\ :sub:`s`\) values in the seasonal + cycle in transient climate change experiments (Hall and Qu, 2006). + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/recipe_ipccwg1ar5ch9 + + * recipe_flato13ipcc_figures_92_95.yml: Figures 9.2, 9.3, 9.4, 9.5 + * recipe_flato13ipcc_figure_96.yml: Figure 9.6 + * recipe_flato13ipcc_figure_98.yml: Figure 9.8 + * recipe_flato13ipcc_figure_914.yml: Figure 9.14 + * recipe_flato13ipcc_figure_924.yml: Figure 9.24 + * recipe_flato13ipcc_figures_926_927.yml: Figures 9.26 and 9.27 + * recipe_flato13ipcc_figure_942.yml: Figure 9.42 + * recipe_flato13ipcc_figure_945a.yml: Figure 9.45a + * recipe_flato13ipcc_figures_938_941_cmip3.yml: Figures 9.38, 9.39, 9.40, and 9.41 + * recipe_flato13ipcc_figures_938_941_cmip6.yml: Figures 9.38, 9.39, 9.40, and 9.41 CMIP6 instead of CMIP3 + * recipe_weigel21gmd_figures_13_16.yml: ESMValTool paper version (Weigel et al., 2021) of Figures 9.38, 9.39, 9.40, and 9.41, only CMIP5 + +Diagnostics are stored in esmvaltool/diag_scripts/ + + * carbon_cycle/main.ncl: See :ref:`here`. + * climate_metrics/ecs.py: See :ref:`here`. + * clouds/clouds_bias.ncl: global maps of the multi-model mean and the multi-model + mean bias (Fig. 9.2, 9.4) + * clouds/clouds_ipcc.ncl: global maps of multi-model mean minus observations + zonal + averages of individual models, multi-model mean and observations (Fig. 9.5) + * ipcc_ar5/ch09_fig09_3.ncl: multi-model mean seasonality of near-surface + temperature (Fig. 9.3) + * ipcc_ar5/ch09_fig09_6.ncl: calculating pattern correlations of annual mean + climatologies for one variable (Fig 9.6 preprocessing) + * ipcc_ar5/ch09_fig09_6_collect.ncl: collecting pattern correlation for each + variable and plotting correlation plot (Fig 9.6) + * ipcc_ar5/tsline.ncl: time series of the global mean (anomaly) (Fig. 9.8) + * ipcc_ar5/ch09_fig09_14.py: Zonally averaged and equatorial SST (Fig. 9.14) + * seaice/seaice_tsline.ncl: Time series of sea ice extent (Fig. 9.24a/b) + * seaice/seaice_trends.ncl: Trend distributions of sea ice extent (Fig 9.24c/d) + * regional_downscaling/Figure9_38.ncl (Fig 9.38a (variable tas) and Fig 9.38b (variable pr)) + * regional_downscaling/Figure9_39.ncl (Fig 9.39a/c/e (variable tas) and Fig 9.39b/d/f (variable pr)) + * regional_downscaling/Figure9_40.ncl (Fig 9.40a/c/e (variable tas) and Fig 9.40b/d/f (variable pr)) + * regional_downscaling/Figure9_41.ncl (Fig 9.41b) + * ipcc_ar5/ch09_fig09_42a.py: ECS vs. surface air temperature (Fig. 9.42a) + * ipcc_ar5/ch09_fig09_42b.py: TCR vs. ECS (Fig. 9.42b) + * emergent_constraints/snowalbedo.ncl: snow-albedo effect (Fig. 9.45a) + +User settings in recipe +----------------------- + +#. Script carbon_cycle/main.ncl + + See :ref:`here`. + +#. Script climate_metrics/ecs.py + + See :ref:`here`. + +#. Script clouds/clouds_bias.ncl + + *Required settings (scripts)* + + none + + *Optional settings (scripts)* + + * plot_abs_diff: additionally also plot absolute differences (true, false) + * plot_rel_diff: additionally also plot relative differences (true, false) + * projection: map projection, e.g., Mollweide, Mercator + * timemean: time averaging, i.e. "seasonalclim" (DJF, MAM, JJA, SON), + "annualclim" (annual mean) + + * Required settings (variables)* + + * reference_dataset: name of reference dataset + + *Optional settings (variables)* + + * long_name: description of variable + + *Color tables* + + * variable "tas": diag_scripts/shared/plot/rgb/ipcc-tas.rgb, + diag_scripts/shared/plot/rgb/ipcc-tas-delta.rgb + * variable "pr-mmday": diag_scripts/shared/plots/rgb/ipcc-precip.rgb, + diag_scripts/shared/plot/rgb/ipcc-precip-delta.rgb + +#. Script clouds/clouds_ipcc.ncl + + *Required settings (scripts)* + + none + + *Optional settings (scripts)* + + * explicit_cn_levels: contour levels + * mask_ts_sea_ice: true = mask T < 272 K as sea ice (only for variable "ts"); + false = no additional grid cells masked for variable "ts" + * projection: map projection, e.g., Mollweide, Mercator + * styleset: style set for zonal mean plot ("CMIP5", "DEFAULT") + * timemean: time averaging, i.e. "seasonalclim" (DJF, MAM, JJA, SON), + "annualclim" (annual mean) + * valid_fraction: used for creating sea ice mask (mask_ts_sea_ice = true): + fraction of valid time steps required to mask grid cell as valid data + + *Required settings (variables)* + + * reference_dataset: name of reference data set + + *Optional settings (variables)* + + * long_name: description of variable + * units: variable units + + *Color tables* + + * variables "pr", "pr-mmday": diag_scripts/shared/plot/rgb/ipcc-precip-delta.rgb + +#. Script ipcc_ar5/tsline.ncl + + *Required settings for script* + + * styleset: as in diag_scripts/shared/plot/style.ncl functions + + *Optional settings for script* + + * time_avg: type of time average (currently only "yearly" and "monthly" are + available). + * ts_anomaly: calculates anomalies with respect to the defined period; for + each grid point by removing the mean for the given calendar month + (requiring at least 50% of the data to be non-missing) + * ref_start: start year of reference period for anomalies + * ref_end: end year of reference period for anomalies + * ref_value: if true, right panel with mean values is attached + * ref_mask: if true, model fields will be masked by reference fields + * region: name of domain + * plot_units: variable unit for plotting + * y-min: set min of y-axis + * y-max: set max of y-axis + * mean_nh_sh: if true, calculate first NH and SH mean + * volcanoes: if true, lines of main volcanic eruptions will be added + * run_ave: if not equal 0 than calculate running mean over this number of + years + * header: if true, region name as header + + *Required settings for variables* + + none + + *Optional settings for variables* + + * reference_dataset: reference dataset; REQUIRED when calculating + anomalies + + *Color tables* + + * e.g. diag_scripts/shared/plot/styles/cmip5.style + +#. Script ipcc_ar5/ch09_fig09_3.ncl + + *Required settings for script* + + none + + *Optional settings for script* + + * projection: map projection, e.g., Mollweide, Mercator (default = Robinson) + + *Required settings for variables* + + * reference_dataset: name of reference observation + + *Optional settings for variables* + + * map_diff_levels: explicit contour levels for plotting + +#. Script ipcc_ar5/ch09_fig09_6.ncl + + *Required settings for variables* + + * reference_dataset: name of reference observation + + *Optional settings for variables* + + * alternative_dataset: name of alternative observations + +#. Script ipcc_ar5/ch09_fig09_6_collect.ncl + + *Required settings for script* + + none + + *Optional settings for script* + + * diag_order: List of diagnostic names in the order variables + should appear on x-axis + +#. Script seaice/seaice_trends.ncl + + *Required settings (scripts)* + + * month: selected month (1, 2, ..., 12) or annual mean ("A") + * region: region to be analyzed ( "Arctic" or "Antarctic") + + *Optional settings (scripts)* + + * fill_pole_hole: fill observational hole at North pole, Default: False + + *Optional settings (variables)* + + * ref_model: array of references plotted as vertical lines + +#. Script seaice/seaice_tsline.ncl + + *Required settings (scripts)* + + * region: Arctic, Antarctic + * month: annual mean (A), or month number (3 = March, for Antarctic; 9 = September for Arctic) + + *Optional settings (scripts)* + + * styleset: for plot_type cycle only (cmip5, cmip6, default) + * multi_model_mean: plot multi-model mean and standard deviation (default: False) + * EMs_in_lg: create a legend label for individual ensemble members (default: False) + * fill_pole_hole: fill polar hole (typically in satellite data) with sic = 1 (default: False) + +#. Script regional_downscaling/Figure9.38.ncl + + *Required settings for script* + + none + + *Optional settings (scripts)* + + * styleset: for plot_type cycle (e.g. CMIP5, CMIP6), default "CMIP5" + * fig938_region_label: Labels for regions, which should be included + (["WNA", "ENA", "CAM", "TSA", "SSA", "EUM", "NAF","CAF", "SAF", "NAS", + "CAS", "EAS", "SAS", "SEA", "AUS"]), default "WNA" + * fig938_project_MMM: projects to average, default "CMIP5" + * fig938_experiment_MMM: experiments to average, default "historical" + * fig938_mip_MMM: mip to average, default "Amon" + * fig938_names_MMM: names in legend i.e. (["CMIP5","CMIP3"]), default fig938_project_MMM + * fig938_colors_MMM: Color for multi-model mean (e.g. ["red"]), default "red" + * If set fig938_mip_MMM, fig938_experiment_MMM, fig938_project_MMM, fig938_names_MMM, and fig938_colors_MMM must + have the same number of elements + + * fig938_refModel: Reference data set for differences default "ERA-Interim" + * fig938_MMM: Plot multi-model mean (true/false), default "true" + * fig938_YMin: minimum Y Axis + * fig938_YMax: maximum Y Axis + * fig938_diff: Difference to reference data (true) or absolute annual cycle (false), default "true" + +#. Script regional_downscaling/Figure9.39.ncl + + *Required settings (scripts)* + + * reference_dataset: reference dataset name (set of variables) + + *Optional settings (scripts)* + + * styleset: for plot_type cycle (e.g. CMIP5, CMIP6), default "CMIP5" + * fig939_season: seasons i.e. (["DJF","JJA","ANN"]), default "DJF" + * fig939_region_label: Labels for regions, which should be included + (["ALAs","CGIs","WNAs","CNAs","ENAs","CAMs","AMZs","NEBs","WSAs","SSAs", + "NEUs","CEUs","MEDs","SAHs","WAFs","EAFs","SAFs","NASs","WASs","CASs", + "TIBs","EASs","SASs","SEAs","NAUs","SAUs"]), default "ALAs" + * fig939_project_MMM: projects to average, default "CMIP5" + * fig939_experiment_MMM: experiments to average, default "historical" + * fig939_mip_MMM: mip to average, default "Amon" + * fig939_MMM: Plot multi-model mean (true/false) + * fig939_names_MMM: names in legend i.e. (["CMIP5","CMIP3"]) + * fig939_YMin: minimum Y Axis + * fig939_YMax: maximum Y Axis + * fig939_vert_line_pos: i.e. ([6,10,13,17,24,26]) + * fig939_vert_line_label: labels of vertical lines, i.e. (["North America", + "South America","Europe","Africa","Asia","Australia"]) + * fig939_mode: True= cumulative mode + +#. Script regional_downscaling/Figure9.40.ncl + + *Required settings (scripts)* + + * reference_dataset: reference dataset name (set of variables) + + *Optional settings (scripts)* + + * styleset: for plot_type cycle (e.g. CMIP5, CMIP6), default "CMIP5" + * fig940_season: seasons i.e. (["DJF","JJA","ANN"]), default "DJF" + * fig940_region_label: Labels for regions, which should be included + (["Arctic_land","Arctic_sea","Antarctic_land","Antarctic_sea", + "Caribbean","WesternIndianOcean","NorthernIndianOcean", + "NorthernTropicalPacific","EquatorialTropicalPacific", + "SouthernTropicalPacific","World_land","World_sea","World"]), + default "Arctic_land" + * fig940_project_MMM: projects to average, default "CMIP5" + * fig940_experiment_MMM: experiments to average, default "historical" + * fig940_mip_MMM: mip to average, default "Amon" + * fig940_MMM: Plot multi-model mean (true/false) + * fig940_names_MMM: names in legend i.e. (["CMIP5","CMIP3"]) + * fig940_YMin: minimum Y Axis + * fig940_YMax: maximum Y Axis + * fig940_vert_line_pos: i.e. ([6,10,13,17,24,26]) + * fig940_vert_line_label: labels of vertical lines, i.e. (["North America", + "South America","Europe","Africa","Asia","Australia"]) + * fig940_mode: True= cumulative mode + +#. Script regional_downscaling/Figure9.41.ncl + + *Required settings (scripts)* + + * reference_dataset: reference dataset name (set of variables) + + *Optional settings (scripts)* + + * styleset: for plot_type cycle (e.g. CMIP5, CMIP6), default "CMIP5" + * fig941_region_label: Labels for regions, which should be included + (["ALAs","CGIs","WNAs","CNAs","ENAs","CAMs","AMZs","NEBs","WSAs","SSAs", + "NEUs","CEUs","MEDs","SAHs","WAFs","EAFs","SAFs","NASs","WASs","CASs", + "TIBs","EASs","SASs","SEAs","NAUs","SAUs"]), default "MEDs" + +#. Script ipcc_ar5/ch09_fig09_42a.py + + *Required settings for script* + + none + + *Optional settings for script* + + * axes_functions: :obj:`dict` containing methods executed for the plot's + :class:`matplotlib.axes.Axes` object. + * dataset_style: name of the style file (located in + :mod:`esmvaltool.diag_scripts.shared.plot.styles_python`). + * matplotlib_style: name of the matplotlib style file (located in + :mod:`esmvaltool.diag_scripts.shared.plot.styles_python.matplotlib`). + * save: :obj:`dict` containing keyword arguments for the function + :func:`matplotlib.pyplot.savefig`. + * seaborn_settings: Options for :func:`seaborn.set_theme` (affects all + plots). + +.. _ch09_fig09_42b.py: + +#. Script ipcc_ar5/ch09_fig09_42b.py + + *Required settings for script* + + none + + *Optional settings for script* + + * dataset_style: Dataset style file (located in + :mod:`esmvaltool.diag_scripts.shared.plot.styles_python`). The entry + ``marker`` is ignored when ``marker_file`` is given. + * log_x: Apply logarithm to X axis (ECS). + * log_y: Apply logarithm to Y axis (TCR). + * marker_column: Name of the column to look up markers in ``marker_file``. + * marker_file: CSV file with markers (can also be integers). Must have the + columns ``dataset`` and ``marker`` (or the column specified by + ``marker_column``). If a relative path is given, assumes that this is a + pattern to search for ancestor files. + * savefig_kwargs: Keyword arguments for :func:`matplotlib.pyplot.savefig`. + * seaborn_settings: Options for :func:`seaborn.set_theme` (affects all + plots). + * x_lim: Plot limits for X axis (ECS). + * y_lim: Plot limits for Y axis (TCR). + +#. Script emergent_constraints/snowalbedo.ncl + + *Required settings for script* + + * exp_presentday: name of present-day experiment (e.g. "historical") + * exp_future: name of climate change experiment (e.g. "rcp45") + + *Optional settings for script* + + * diagminmax: observational uncertainty (min and max) + * legend_outside: create extra file with legend (true, false) + * styleset: e.g. "CMIP5" (if not set, this diagnostic will create its own + color table and symbols for plotting) + * suffix: string to be added to output filenames + * xmax: upper limit of x-axis (default = automatic) + * xmin: lower limit of x-axis (default = automatic) + * ymax: upper limit of y-axis (default = automatic) + * ymin: lower limit of y-axis (default = automatic) + + *Required settings for variables* + + * ref_model: name of reference data set + + *Optional settings for variables* + + none + +Variables +--------- + +* areacello (fx, longitude latitude) +* fgco2 (ocean, monthly mean, longitude latitude time) +* nbp (ocean, monthly mean, longitude latitude time) +* pr (atmos, monthly mean, longitude latitude time) +* rlut, rlutcs (atmos, monthly mean, longitude latitude time) +* rsdt (atmos, monthly mean, longitude latitude time) +* rsuscs, rsdscs (atmos, monthly mean, longitude latitude time) +* rsut, rsutcs (atmos, monthly mean, longitude latitude time) +* sic (ocean-ice, monthly mean, longitude latitude time) +* tas (atmos, monthly mean, longitude latitude time) +* tos (ocean, monthly mean, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +*Note: (1) obs4MIPs data can be used directly without any preprocessing; +(2) see headers of reformat scripts for non-obs4MIPs data for download +instructions.* + +* CERES-EBAF (rlut, rlutcs, rsut, rsutcs - obs4MIPs) +* ERA-Interim (tas, ta, ua, va, zg, hus - esmvaltool/cmorizers/data/formatters/datasets/era-interim.py) +* GCP2018 (fgco2, nbp - esmvaltool/cmorizers/data/formatters/datasets/gcp2018.py) +* GPCP-SG (pr - obs4MIPs) +* JMA-TRANSCOM (fgco2, nbp - esmvaltool/cmorizers/data/formatters/datasets/jma_transcom.py) +* HadCRUT4 (tas - esmvaltool/cmorizers/data/formatters/datasets/hadcrut4.ncl) +* HadISST (sic, tos - esmvaltool/cmorizers/data/formatters/datasets/hadisst.ncl) +* ISCCP-FH (rsuscs, rsdscs, rsdt - esmvaltool/cmorizers/data/formatters/datasets/isccp_fh.ncl) + + +References +---------- + +* Flato, G., J. Marotzke, B. Abiodun, P. Braconnot, S.C. Chou, W. Collins, P. + Cox, F. Driouech, S. Emori, V. Eyring, C. Forest, P. Gleckler, E. Guilyardi, + C. Jakob, V. Kattsov, C. Reason and M. Rummukainen, 2013: Evaluation of + Climate Models. In: Climate Change 2013: The Physical Science Basis. + Contribution of Working Group I to the Fifth Assessment Report of the + Intergovernmental Panel on Climate Change [Stocker, T.F., D. Qin, G.-K. + Plattner, M. Tignor, S.K. Allen, J. Boschung, A. Nauels, Y. Xia, V. Bex and + P.M. Midgley (eds.)]. Cambridge University Press, Cambridge, United Kingdom + and New York, NY, USA. + +* Hall, A., and X. Qu, 2006: Using the current seasonal cycle to constrain + snow albedo feedback in future climate change, Geophys. Res. Lett., 33, + L03502, doi:10.1029/2005GL025127. + +* Jones et al., 2013: Attribution of observed historical near-surface temperature + variations to anthropogenic and natural causes using CMIP5 simulations. Journal + of Geophysical Research: Atmosphere, 118, 4001-4024, doi:10.1002/jgrd.50239. + +* Seneviratne, S. I., Nicholls, N., Easterling, D., Goodess, C. M., Kanae, S., + Kossin, J., Luo, Y., Marengo, J., McInnes, K., Rahimi, M., Reichstein, M., + Sorteberg, A., Vera, C., and Zhang, X., 2012: + Changes in climate extremes and their impacts on the naturalphysical + environment. , in: Managing the Risks of Extreme Events and Disasters to + Advance Climate Change Adaptation. A Special Report of Working Groups I and + II of the Intergovernmental Panel on ClimateChange (IPCC), edited by: + Field, C. B., Barros, V., Stocker, T. F., Qin, D., Dokken, D. J., + Ebi, K. L., Mastrandrea, M. D., Mach, K. J., Plattner, G.-K., Allen, S. K., + Tignor, M., and Midgley, P. M., Cambridge University Press, Cambridge, UK, + and New York, NY, USA, 109-230. + +* Weigel, K., Bock, L., Gier, B. K., Lauer, A., Righi, M., Schlund, M., Adeniyi, K., + Andela, B., Arnone, E., Berg, P., Caron, L.-P., Cionni, I., Corti, S., Drost, N., + Hunter, A., Lledó, L., Mohr, C. W., Paçal, A., Pérez-Zanón, N., Predoi, V., Sandstad, + M., Sillmann, J., Sterl, A., Vegas-Regidor, J., von Hardenberg, J., and Eyring, V.: + Earth System Model Evaluation Tool (ESMValTool) v2.0 - diagnostics for extreme events, + regional and impact evaluation, and analysis of Earth system models in CMIP, + Geosci. Model Dev., 14, 3159-3184, https://doi.org/10.5194/gmd-14-3159-2021, 2021. + + +Example plots +------------- + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-2.png + :align: center + + Figure 9.2 a,b,c: Annual-mean surface air temperature for the period + 1980-2005. a) multi-model mean, b) bias as the difference between the + CMIP5 multi-model mean and the climatology from ERA-Interim + (Dee et al., 2011), c) mean absolute model error with respect to the + climatology from ERA-Interim. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-3.png + :align: center + + Figure 9.3: Multi model values for seasonality of near-surface temperature, + from top left to bottom right: mean, mean of absolute seasonality, mean bias + in seasonality, mean bias in absolute seasonality. Reference dataset: + ERA-Interim. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-4.png + :align: center + + Figure 9.4: Annual-mean precipitation rate (mm day-1) for the period + 1980-2005. a) multi-model mean, b) bias as the difference between the + CMIP5 multi-model mean and the climatology from the Global Precipitation + Climatology Project (Adler et al., 2003), c) multi-model mean absolute + error with respect to observations, and d) multi-model mean error + relative to the multi-model mean precipitation itself. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-5.png + :align: center + + Figure 9.5: Climatological (1985-2005) annual-mean cloud radiative + effects in Wm-2 for the CMIP5 models against CERES EBAF (2001-2011) in + Wm-2. Top row shows the shortwave effect; middle row the longwave effect, + and bottom row the net effect. Multi-model-mean biases against CERES + EBAF 2.6 are shown on the left, whereas the right panels show zonal + averages from CERES EBAF 2.6 (black), the individual CMIP5 models (thin + gray lines), and the multi-model mean (thick red line). + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-6.png + :align: center + + Figure 9.6: Centered pattern correlations between models and observations + for the annual mean climatology over the period 1980–1999. Results are + shown for individual CMIP3 (black) and CMIP5 (blue) models as thin + dashes, along with the corresponding ensemble average (thick dash) and + median (open circle). The four variables shown are surface air + temperature (TAS), top of the atmosphere (TOA) outgoing longwave + radiation (RLUT), precipitation (PR) and TOA shortwave cloud radiative + effect (SW CRE). The correlations between the reference and alternate + observations are also shown (solid green circles). + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-8.png + :align: center + + Figure 9.8: Observed and simulated time series of the anomalies in annual + and global mean surface temperature. All anomalies are differences from + the 1961-1990 time-mean of each individual time series. The reference + period 1961-1990 is indicated by yellow shading; vertical dashed grey + lines represent times of major volcanic eruptions. Single simulations + for CMIP5 models (thin lines); multi-model mean (thick red line); + different observations (thick black lines). Dataset pre-processing like + described in Jones et al., 2013. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-14.png + :align: center + + Figure 9.14: (a) Zonally averaged sea surface temperature (SST) error + in CMIP5 models. (b) Equatorial SST error in CMIP5 models. (c) Zonally + averaged multi-model mean SST error for CMIP5 together with + inter-model standard deviation (shading). (d) Equatorial multi-model + mean SST in CMIP5 together with inter-model standard deviation + (shading) and observations (black). Model climatologies are derived + from the 1979-1999 mean of the historical simulations. The Hadley + Centre Sea Ice and Sea Surface Temperature (HadISST) (Rayner et + al., 2003) observational climatology for 1979-1999 is used as a + reference for the error calculation (a), (b), and (c); and for + observations in (d). + +.. figure:: /recipes/figures/seaice/trend_sic_extend_Arctic_September_histogram.png + :align: center + :width: 9cm + + Figure 9.24c: Sea ice extent trend distribution for the Arctic in September. + +.. figure:: /recipes/figures/seaice/extent_sic_Arctic_September_1960-2005.png + :align: center + :width: 12cm + + Figure 9.24a: Time series of total sea ice area and extent (accumulated) for the Arctic + in September including multi-model mean and standard deviation. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-26.png + :align: center + + Figure 9.26 (bottom): Ensemble-mean global land carbon uptake in the CMIP5 + ESMs for the historical period 1900–2005. For comparison, the + observation-based estimates provided by the Global Carbon Project (GCP) are + also shown (black line). The confidence limits on the ensemble mean are + derived by assuming that the CMIP5 models come from a t-distribution. The + grey areas show the range of annual mean fluxes simulated across the model + ensemble. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-27.png + :align: center + + Figure 9.27 (top): Simulation of global mean atmosphere–ocean CO2 fluxes + ("fgCO2") by ESMs for the period 1986–2005. For comparison, the + observation-based estimates provided by Global Carbon Project (GCP) are also + shown. The error bars for the ESMs and observations represent interannual + variability in the fluxes, calculated as the standard deviation of the + annual means over the period 1986–2005. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-38.png + :align: center + + Figure 9.38tas: Mean seasonal cycle for surface temperature (tas) + as multi model mean of 38 CMIP5 and 22 CMIP6 models as well as + CRU and ERA-Interim reanalysis data averaged + for 1980-2005 over land in different regions: + Western North America (WNA), Eastern North America (ENA), + Central America (CAM), Tropical South America (TSA), + Southern South America (SSA), Europe and Mediterranean (EUM), + North Africa (NAF), Central Africa (CAF), South Africa (SAF), + North Asia (NAS), Central Asia (CAS), East Asia (EAS), + South Asia (SAS), Southeast Asia (SEA), and Australia (AUS). + Similar to Fig. 9.38a from Flato et al. (2013), CMIP6 instead of CMIP3 and + set of CMIP5 models used different. + + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-38-pr.png + :align: center + + Figure 9.38pr: Mean seasonal cycle for precipitation (pr) + as multi model mean of 38 CMIP5 and 22 CMIP6 models as well as + CRU and ERA-Interim reanalysis data averaged + for 1980-1999 over land in different regions: + Western North America (WNA), Eastern North America (ENA), + Central America (CAM), Tropical South America (TSA), + Southern South America (SSA), Europe and Mediterranean (EUM), + North Africa (NAF), Central Africa (CAF), South Africa (SAF), + North Asia (NAS), Central Asia (CAS), East Asia (EAS), + South Asia (SAS), Southeast Asia (SEA), and Australia (AUS). + Similar to Fig. 9.38b from Flato et al. (2013), CMIP6 instead of CMIP3 and + set of CMIP5 models used different. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-38_regions.png + :align: center + + Figure 9.38reg: Positions of the regions used in Figure 9.38. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-39.png + :align: center + + Figure 9.39tas: Box and whisker plots showing the 5th, 25th, 50th, 75th + and 95th percentiles of the seasonal- and annual mean biases for + surface temperature (tas) for 1980-2005 between 38 CMIP5 models + (box and whiskers) or 22 CMIP6 models (crosses) and CRU data. + The regions are: Alaska/NW Canada (ALAs), + Eastern Canada/Greenland/Iceland (CGIs), Western North America(WNAs), + Central North America (CNAs), Eastern North America (ENAs), + Central America/Mexico (CAMs), Amazon (AMZs), NE Brazil (NEBs), + West Coast South America (WSAs), South-Eastern South America (SSAs), + Northern Europe (NEUs), Central Europe (CEUs), + Southern Europe/the Mediterranean (MEDs), Sahara (SAHs), + Western Africa (WAFs), Eastern Africa (EAFs), Southern Africa (SAFs), + Northern Asia (NASs), Western Asia (WASs), Central Asia (CASs), + Tibetan Plateau (TIBs), Eastern Asia (EASs), Southern Asia (SASs), + Southeast Asia (SEAs), Northern Australia (NASs) and + Southern Australia/New Zealand (SAUs). + The positions of these regions are defined following + (Seneviratne et al., 2012) and differ from the ones in Fig. 9.38. + Similar to Fig. 9.39 a,c,e from Flato et al. (2013), CMIP6 instead of CMIP3 and + set of CMIP5 models used different. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-39-pr.png + :align: center + + Figure 9.39pr: Box and whisker plots showing the 5th, 25th, 50th, 75th + and 95th percentiles of the seasonal- and annual mean biases for + precipitation (pr) for 1980-2005 between 38 CMIP5 models + (box and whiskers) or 22 CMIP6 models (crosses) and CRU data. + The regions are: Alaska/NW Canada (ALAs), + Eastern Canada/Greenland/Iceland (CGIs), Western North America(WNAs), + Central North America (CNAs), Eastern North America (ENAs), + Central America/Mexico (CAMs), Amazon (AMZs), NE Brazil (NEBs), + West Coast South America (WSAs), South-Eastern South America (SSAs), + Northern Europe (NEUs), Central Europe (CEUs), + Southern Europe/the Mediterranean (MEDs), Sahara (SAHs), + Western Africa (WAFs), Eastern Africa (EAFs), Southern Africa (SAFs), + Northern Asia (NASs), Western Asia (WASs), Central Asia (CASs), + Tibetan Plateau (TIBs), Eastern Asia (EASs), Southern Asia (SASs), + Southeast Asia (SEAs), Northern Australia (NASs) and + Southern Australia/New Zealand (SAUs). + The positions of these regions are defined following + (Seneviratne et al., 2012) and differ from the ones in Fig. 9.38. + Similar to Fig. 9.39 b,d,f from Flato et al. (2013), CMIP6 instead of CMIP3 and + set of CMIP5 models used different. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-39_regions.png + :align: center + + Figure 9.39reg: Positions of the regions used in Figure 9.39. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-40.png + :align: center + + Figure 9.40tas: Box and whisker plots showing the 5th, 25th, 50th, 75th + and 95th percentiles of the seasonal- and annual mean biases for + surface temperature (tas) for oceanic and polar regions between 38 CMIP5 (box and whiskers) or 22 CMIP6 (crosses) + models and ERA-Interim data for 1980–2005. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-40-pr.png + :align: center + + Figure 9.40pr: Box and whisker plots showing the 5th, 25th, 50th, 75th + and 95th percentiles of the seasonal- and annual mean biases for + precipitation (pr) for oceanic and polar regions between 38 CMIP5 (box and whiskers) or 22 CMIP6 (crosses) + models and Global Precipitation Climatology Project - Satellite-Gauge (GPCP-SG) data for 1980–2005. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-40_regions.png + :align: center + + Figure 9.40reg: Positions of the regions used in Figure 9.40. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-41b.png + :align: center + + Figure 9.41b: Ranked modelled versus ERA-Interim mean temperature for + 38 CMIP5 models in the Mediterranean region for 1961–2000. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-42a.png + :align: center + + Figure 9.42a: Equilibrium climate sensitivity (ECS) against the global mean + surface air temperature of CMIP5 models, both for the period 1961-1990 + (larger symbols) and for the pre-industrial control runs (smaller symbols). + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-42b.png + :align: center + + Figure 9.42b: Transient climate response (TCR) against equilibrium climate + sensitivity (ECS) for CMIP5 models. + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-45a.png + :align: center + + Figure 9.45a: Scatterplot of springtime snow-albedo effect values in climate + change vs. springtime :math:`\Delta \alpha_s`/:math:`\Delta T_s` values in + the seasonal cycle in transient climate change experiments (CMIP5 historical + experiments: 1901-2000, RCP4.5 experiments: 2101-2200). diff --git a/doc/sphinx/source/recipes/recipe_ipccwg1ar6ch3.rst b/doc/sphinx/source/recipes/recipe_ipccwg1ar6ch3.rst new file mode 100644 index 0000000000..718c345b19 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_ipccwg1ar6ch3.rst @@ -0,0 +1,562 @@ +.. _recipes_ipccwg1ar6ch3: + +IPCC AR6 Chapter 3 (selected figures) +===================================== + +Overview +-------- + +This recipe collects selected diagnostics used in IPCC AR6 WGI Chapter 3: +Human influence on the climate system (`Eyring et al., 2021`_). Plots from IPCC +AR6 can be readily reproduced and compared to previous versions. The aim is to +be able to start with what was available now the next time allowing us to focus +on developing more innovative analysis methods rather than constantly having to +"re-invent the wheel". + +Processing of CMIP3 models currently works only in serial mode, due to an issue +in the input data still under investigation. To run the recipe for Fig 3.42a +and Fig. 3.43 set the :ref:`configuration option ` +``max_parallel_tasks: 1``. + +The plots are produced collecting the diagnostics from individual recipes. The +following figures from `Eyring et al. (2021)`_ can currently be reproduced: + + * Figure 3.3 a,b,c,d: Surface Air Temperature - Model Bias + + * Figure 3.4: Anomaly Of Near-Surface Air Temperature + + * Figure 3.5: Temporal Variability Of Near-Surface Air Temperature + + * Figure 3.9: Anomaly Of Near-Surface Air Temperature - Attribution + + * Figure 3.13: Precipitation - Model Bias + + * Figure 3.15: Precipitation Anomaly + + * Figure 3.19: Speed-Up Of Zonal Mean Wind + + * Figure 3.42: Relative Model Performance + + * Figure 3.43: Correlation Pattern + +To reproduce Fig. 3.9 you need the shapefile of the `AR6 reference regions +`_ +(`Iturbide et al., 2020 `_). +Please download the file `IPCC-WGI-reference-regions-v4_shapefile.zip +`_, +unzip and store it in `/IPCC-regions/` (where +``auxiliary_data_dir`` is given as :ref:`configuration option +`). + +.. _`Eyring et al., 2021`: https://www.ipcc.ch/report/ar6/wg1/chapter/chapter-3/ +.. _`Eyring et al. (2021)`: https://www.ipcc.ch/report/ar6/wg1/chapter/chapter-3/ + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ipccwg1ar6ch3/ + + * recipe_ipccwg1ar6ch3_atmosphere.yml + * recipe_ipccwg1ar6ch3_fig_3_9.yml + * recipe_ipccwg1ar6ch3_fig_3_19.yml + * recipe_ipccwg1ar6ch3_fig_3_42_a.yml + * recipe_ipccwg1ar6ch3_fig_3_42_b.yml + * recipe_ipccwg1ar6ch3_fig_3_43.yml + +Diagnostics are stored in esmvaltool/diag_scripts/ + + Fig. 3.3: + + * ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl: See :ref:`here:`. + * ipcc_ar6/model_bias.ncl + + Fig. 3.4: + + * ipcc_ar6/tas_anom.ncl + * ipcc_ar6/tsline_collect.ncl + + Fig. 3.5: + + * ipcc_ar6/zonal_st_dev.ncl + + Fig. 3.9: + + * ipcc_ar6/tas_anom_damip.ncl + + Fig. 3.13: + + * ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl: See :ref:`here:`. + * ipcc_ar6/model_bias.ncl + + Fig. 3.15: + + * ipcc_ar6/precip_anom.ncl + + Fig. 3.19: + + * ipcc_ar6/zonal_westerly_winds.ncl + + Fig. 3.42: + + * perfmetrics/main.ncl + * perfmetrics/collect.ncl + + Fig. 3.43: + + * ipcc_ar6/corr_pattern.ncl + * ipcc_ar6/corr_pattern_collect.ncl + + +User settings in recipe +----------------------- + +#. Script ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + See :ref:`here`. + +#. Script ipcc_ar6/model_bias.ncl + + *Optional settings (scripts)* + + * plot_abs_diff: additionally also plot absolute differences (true, false) + * plot_rel_diff: additionally also plot relative differences (true, false) + * plot_rms_diff: additionally also plot root mean square differences (true, false) + * projection: map projection, e.g., Mollweide, Mercator + * timemean: time averaging, i.e. "seasonalclim" (DJF, MAM, JJA, SON), + "annualclim" (annual mean) + + *Required settings (variables)* + + * reference_dataset: name of reference dataset + + *Color tables* + + * variable "tas" and "tos": + diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_div.rgb, + diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_10.rgb, + diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_seq.rgb + * variable "pr": diag_scripts/shared/plots/rgb/ipcc-ar6_precipitation_seq.rgb, + diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_10.rgb + * variable "sos": diag_scripts/shared/plot/rgb/ipcc-ar6_misc_seq_1.rgb, + diag_scripts/shared/plot/rgb/ipcc-ar6_misc_div.rgb + + +#. Script ipcc_ar6/tas_anom.ncl + + *Required settings for script* + + * styleset: as in diag_scripts/shared/plot/style.ncl functions + + *Optional settings for script* + + * blending: if true, calculates blended surface temperature + * ref_start: start year of reference period for anomalies + * ref_end: end year of reference period for anomalies + * ref_value: if true, right panel with mean values is attached + * ref_mask: if true, model fields will be masked by reference fields + * region: name of domain + * plot_units: variable unit for plotting + * y-min: set min of y-axis + * y-max: set max of y-axis + * header: if true, region name as header + * volcanoes: if true, adds volcanoes to the plot + * write_stat: if true, write multi model statistics in nc-file + + *Optional settings for variables* + + * reference_dataset: reference dataset; REQUIRED when calculating + anomalies + + *Color tables* + + * e.g. diag_scripts/shared/plot/styles/cmip5.style + + +#. Script ipcc_ar6/tas_anom_damip.ncl + + *Required settings for script* + + * start_year: start year in figure + * end_year: end year in figure + * panels: list of variable blocks for each panel + + *Optional settings for script* + + * ref_start: start year of reference period for anomalies + * ref_end: end year of reference period for anomalies + * ref_mask: if true, model fields will be masked by reference fields + * plot_units: variable unit for plotting + * y-min: set min of y-axis + * y-max: set max of y-axis + * header: title for each panel + * title: name of region as part of filename + * legend: set labels for optional output of a legend in an extra file + + +#. Script ipcc_ar6/tsline_collect.ncl + + *Optional settings for script* + + * blending: if true, then var="gmst" otherwise "gsat" + * ref_start: start year of reference period for anomalies + * ref_end: end year of reference period for anomalies + * region: name of domain + * plot_units: variable unit for plotting + * y-min: set min of y-axis + * y-max: set max of y-axis + * order: order in which experiments should be plotted + * stat_shading: if true: shading of statistic range + * ref_shading: if true: shading of reference period + + *Optional settings for variables* + + * reference_dataset: reference dataset; REQUIRED when calculating + anomalies + + +#. Script ipcc_ar6/zonal_st_dev.ncl + + *Required settings for script* + + * styleset: as in diag_scripts/shared/plot/style.ncl functions + + *Optional settings for script* + + * plot_legend: if true, plot legend will be plotted + * plot_units: variable unit for plotting + * multi_model_mean: if true, multi-model mean and uncertainty will be + plotted + + *Optional settings for variables* + + * reference_dataset: reference dataset; REQUIRED when calculating + anomalies + + +#. Script ipcc_ar6/precip_anom.ncl + + *Required settings for script* + + * panels: list of variables plotted in each panel + * start_year: start of time coordinate + * end_year: end of time coordinate + + *Optional settings for script* + + * anomaly: true if anomaly should be calculated + * ref_start: start year of reference period for anomalies + * ref_end: end year of reference period for anomalies + * ref_mask: if true, model fields will be masked by reference fields + * region: name of domain + * plot_units: variable unit for plotting + * header: if true, region name as header + * stat: statistics for multi model nc-file (MinMax,5-95,10-90) + * y_min: set min of y-axis + * y_max: set max of y-axis + + + +#. Script ipcc_ar6/zonal_westerly_winds.ncl + + *Optional settings for variables* + + * reference_dataset: reference dataset; REQUIRED when calculating + anomalies + + *Optional settings for script* + + * e13fig12_start_year: year when the climatology calculation starts + (default: start_year of var) + * e13fig12_end_year: year when the climatology calculation ends + (default: end_year of var) + * e13fig12_multimean: multimodel mean (default: False) + * e13fig12_exp_MMM: name of the experiments for the MMM + (required if @e13fig12_multimean = True) + * e13fig12_season: season (default: ANN) + + + +#. Script perfmetrics/perfmetrics_main.ncl + + See :ref:`here`. + + +#. Script perfmetrics/perfmetrics_collect.ncl + + See :ref:`here`. + +#. Script ipcc_ar6/corr_pattern.ncl + + *Required settings for variables* + + * reference_dataset: name of reference observation + + *Optional settings for variables* + + * alternative_dataset: name of alternative observations + +#. Script ipcc_ar6/corr_pattern_collect.ncl + + *Optional settings for script* + + * diag_order: give order of plotting variables on the x-axis + * labels: List of labels for each variable on the x-axis + * model_spread: if True, model spread is shaded + * plot_median: if True, median is plotted + * project_order: give order of projects + + +Variables +--------- + +* et (land, monthly mean, longitude latitude time) +* fgco2 (ocean, monthly mean, longitude latitude time) +* gpp (land, monthly mean, longitude latitude time) +* hfds (land, monthly mean, longitude latitude time) +* hus (land, monthly mean, longitude latitude level time) +* lai (land, monthly mean, longitude latitude time) +* lwcre (atmos, monthly mean, longitude latitude time) +* nbp (land, monthly mean, longitude latitude time) +* pr (atmos, monthly mean, longitude latitude time) +* psl (atmos, monthly mean, longitude latitude time) +* rlds (atmos, monthly mean, longitude latitude time) +* rlus (atmos, monthly mean, longitude latitude time) +* rlut (atmos, monthly mean, longitude latitude time) +* rsds (atmos, monthly mean, longitude latitude time) +* rsus (atmos, monthly mean, longitude latitude time) +* rsut (atmos, monthly mean, longitude latitude time) +* sm (land, monthly mean, longitude latitude time) +* sic (seaice, monthly mean, longitude latitude time) +* siconc (seaice, monthly mean, longitude latitude time) +* swcre (atmos, monthly mean, longitude latitude time) +* ta (atmos, monthly mean, longitude latitude level time) +* tas (atmos, monthly mean, longitude latitude time) +* tasa (atmos, monthly mean, longitude latitude time) +* tos (atmos, monthly mean, longitude latitude time) +* ts (atmos, monthly mean, longitude latitude time) +* ua (atmos, monthly mean, longitude latitude level time) +* va (atmos, monthly mean, longitude latitude level time) +* zg (atmos, monthly mean, longitude latitude level time) + + +Observations and reformat scripts +--------------------------------- + +* AIRS (hus - obs4MIPs) +* ATSR (tos - obs4MIPs) +* BerkeleyEarth (tasa - esmvaltool/cmorizers/data/formatters/datasets/berkeleyearth.py) +* CERES-EBAF (rlds, rlus, rlut, rlutcs, rsds, rsus, rsut, rsutcs - obs4MIPs) +* CRU (pr - esmvaltool/cmorizers/data/formatters/datasets/cru.py) +* ESACCI-SOILMOISTURE (sm - esmvaltool/cmorizers/data/formatters/datasets + /esacci_soilmoisture.py) +* ESACCI-SST (ts - esmvaltool/cmorizers/data/formatters/datasets/esacci_sst.py) +* ERA5 (hus, psl, ta, tas, ua, va, zg - ERA5 data can be used via the native6 project) +* ERA-Interim (hfds - cmorizers/data/formatters/datasets/era_interim.py) +* FLUXCOM (gpp - cmorizers/data/formatters/datasets/fluxcom.py) +* GHCN (pr - esmvaltool/cmorizers/data/formatters/datasets/ghcn.ncl) +* GPCP-SG (pr - obs4MIPs) +* HadCRUT5 (tasa - esmvaltool/cmorizers/data/formatters/datasets/hadcrut5.py) +* HadISST (sic, tos, ts - esmvaltool/cmorizers/data/formatters/datasets/hadisst.ncl) +* JMA-TRANSCOM (fgco2, nbp - esmvaltool/cmorizers/data/formatters/datasets/jma_transcom.py) +* JRA-55 (psl - ana4MIPs) +* Kadow2020 (tasa - esmvaltool/cmorizers/data/formatters/datasets/kadow2020.py) +* LandFlux-EVAL (et - esmvaltool/cmorizers/data/formatters/datasets/landflux_eval.py) +* Landschuetzer2016 (fgco2 - esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2016.py) +* LAI3g (lai - esmvaltool/cmorizers/data/formatters/datasets/lai3g.py) +* MTE (gpp - esmvaltool/cmorizers/data/formatters/datasets/mte.py) +* NCEP-NCAR-R1 (ta, tas, ua, va, zg - esmvaltool/cmorizers/data/formatters/datasets/ncep_ncar_r1.py) +* NOAAGlobalTemp (tasa - esmvaltool/cmorizers/data/formatters/datasets/noaaglobaltemp.py) + + +References +---------- + +* Eyring, V., N.P. Gillett, K.M. Achuta Rao, R. Barimalala, M. Barreiro + Parrillo, N. Bellouin, C. Cassou, P.J. Durack, Y. Kosaka, S. McGregor, + S. Min, O. Morgenstern, and Y. Sun, 2021: Human Influence on the Climate + System. In Climate Change 2021: The Physical Science Basis. Contribution + of Working Group I to the Sixth Assessment Report of the Intergovernmental + Panel on Climate Change [Masson-Delmotte, V., P. Zhai, A. Pirani, + S.L. Connors, C. Péan, S. Berger, N. Caud, Y. Chen, L. Goldfarb, M.I. Gomis + , M. Huang, K. Leitzell, E. Lonnoy, J.B.R. Matthews, T.K. Maycock, + T. Waterfield, O. Yelekçi, R. Yu, and B. Zhou (eds.)]. Cambridge Universiy + Press, Cambridge, United Kingdom and New York, NY, USA, pp. 423-552, + doi: 10.1017/9781009157896.005. + + +Example plots +------------- + +.. figure:: /recipes/figures/ipccwg1ar6ch3/model_bias_tas_annualclim_CMIP6.png + :align: center + + Figure 3.3: Annual mean near-surface (2 m) air temperature (°C) for the + period 1995-2014. (a) Multi-model (ensemble) mean constructed with one + realization of the CMIP6 historical experiment from each model. (b) + Multi-model mean bias, defined as the difference between the CMIP6 + multi-model mean and the climatology of the fifth generation European + Centre for Medium-Range Weather Forecasts (ECMWF) atmospheric reanalysis + of the global climate (ERA5). (c) Multi-model mean of the root mean square + error calculated over all months separately and averaged, with respect to + the climatology from ERA5. Uncertainty is represented using the advanced + approach: No overlay indicates regions with robust signal, where >=66% of + models show change greater than the variability threshold and >=80% of all + models agree on sign of change; diagonal lines indicate regions with no + change or no robust signal, where <66% of models show a change greater + than the variability threshold; crossed lines indicate regions with + conflicting signal, where >=66% of models show change greater than the + variability threshold and <80% of all models agree on sign of change. + +.. figure:: /recipes/figures/ipccwg1ar6ch3/gsat_Global_CMIP6_historical-ssp245_anom_1850-2020.png + :align: center + + Figure 3.4a: Observed and simulated time series of the anomalies in annual + and global mean surface air temperature (GSAT). All anomalies are + differences from the 1850-1900 time-mean of each individual time series. + The reference period 1850-1900 is indicated by grey shading. (a) Single + simulations from CMIP6 models (thin lines) and the multi-model mean (thick + red line). Observational data (thick black lines) are from the Met Office + Hadley Centre/Climatic Research Unit dataset (HadCRUT5), and are blended + surface temperature (2 m air temperature over land and sea surface + temperature over the ocean). All models have been subsampled using the + HadCRUT5 observational data mask. Vertical lines indicate large historical + volcanic eruptions. Inset: GSAT for each model over the reference period, + not masked to any observations. + +.. figure:: /recipes/figures/ipccwg1ar6ch3/gsat_Global_multimodel_anom_1850-2020.png + :align: center + + Figure 3.4b: Observed and simulated time series of the anomalies in annual + and global mean surface air temperature (GSAT). All anomalies are + differences from the 1850-1900 time-mean of each individual time series. + The reference period 1850-1900 is indicated by grey shading. (b) Multi-model + means of CMIP5 (blue line) and CMIP6 (red line) ensembles and associated 5th + to 95th percentile ranges (shaded regions). Observational data are HadCRUT5, + Berkeley Earth, National Oceanic and Atmospheric Administration + NOAAGlobalTemp and Kadow et al. (2020). Masking was done as in (a). CMIP6 + historical simulations were extended with SSP2-4.5 simulations for the + period 2015-2020 and CMIP5 simulations were extended with RCP4.5 simulations + for the period 2006-2020. All available ensemble members were used. The + multi-model means and percentiles were calculated solely from simulations + available for the whole time span (1850-2020). + +.. figure:: /recipes/figures/ipccwg1ar6ch3/tas_std_dev_zonmean.png + :align: center + + Figure 3.5: The standard deviation of annually averaged zonal-mean + near-surface air temperature. This is shown for four detrended observed + temperature datasets (HadCRUT5, Berkeley Earth, NOAAGlobalTemp and Kadow et + al. (2020), for the years 1995-2014) and 59 CMIP6 pre-industrial control + simulations (one ensemble member per model, 65 years) (after Jones et al., + 2013). For line colours see the legend of Figure 3.4. Additionally, the + multi-model mean (red) and standard deviation (grey shading) are shown. + Observational and model datasets were detrended by removing the + least-squares quadratic trend. + +.. figure:: /recipes/figures/ipccwg1ar6ch3/tas_anom_damip_global_1850-2020.png + :align: center + + Figure 3.9: Global, land and ocean annual mean near-surface air + temperature anomalies in CMIP6 models and observations. Timeseries are + shown for CMIP6 historical anthropogenic and natural (brown) natural-only + (green), greenhouse gas only (grey) and aerosol only (blue) simulations + (multi-model means shown as thick lines, and shaded ranges between the 5th + and 95th percentiles) and for HadCRUT5 (black). All models have been + subsampled using the HadCRUT5 observational data mask. Temperature + anomalies are shown relative to 1950-2010 for Antarctica and relative to + 1850-1900 for other continents. CMIP6 historical simulations are expanded by + the SSP2-4.5 scenario simulations. All available ensemble members were used. + Regions are defined by Iturbide et al. (2020). + +.. figure:: /recipes/figures/ipccwg1ar6ch3/model_bias_pr_annualclim_CMIP6.png + :align: center + + Figure 3.13: Annual-mean precipitation rate (mm day-1) for the period + 1995-2014. (a) Multi-model (ensemble) mean constructed with one realization + of the CMIP6 historical experiment from each model. (b) Multi-model mean + bias, defined as the difference between the CMIP6 multi-model mean and + precipitation analysis from the Global Precipitation Climatology Project + (GPCP) version 2.3 (Adler et al., 2003). (c) Multi-model mean of the root + mean square error calculated over all months separately and averaged with + respect to the precipitation analysis from GPCP version 2.3. Uncertainty is + represented using the advanced approach. No overlay indicates regions with + robust signal, where >=66% of models show change greater than the variability + threshold and >=80% of all models agree on sign of change; diagonal lines + indicate regions with no change or no robust signal, where <66% of models + show a change greater than the variability threshold; crossed lines indicate + regions with conflicting signal, where >=66% of models show change greater + than the variability threshold and <80% of all models agree on the sign of + change. + +.. figure:: /recipes/figures/ipccwg1ar6ch3/precip_anom_1950-2014.png + :align: center + + Figure 3.15: Observed and simulated time series of anomalies in zonal + average annual mean precipitation. (a), (c-f) Evolution of global and zonal + average annual mean precipitation (mm day-1) over areas of land where there + are observations, expressed relative to the base period of 1961-1990, + simulated by CMIP6 models (one ensemble member per model) forced with both + anthropogenic and natural forcings (brown) and natural forcings only + (green). Multi-model means are shown in thick solid lines and shading + shows the 5-95% confidence interval of the individual model simulations. + The data is smoothed using a low pass filter. Observations from three + different datasets are included: gridded values derived from Global + Historical Climatology Network (GHCN version 2) station data, updated + from Zhang et al. (2007), data from the Global Precipitation Climatology + Product (GPCP L3 version 2.3, Adler et al. (2003)) and from the Climate + Research Unit (CRU TS4.02, Harris et al. (2014)). Also plotted are + boxplots showing interquartile and 5-95% ranges of simulated trends over + the period for simulations forced with both anthropogenic and natural + forcings (brown) and natural forcings only (blue). Observed trends for each + observational product are shown as horizontal lines. Panel (b) shows annual + mean precipitation rate (mm day-1) of GHCN version 2 for the years 1950-2014 + over land areas used to compute the plots. + +.. figure:: /recipes/figures/ipccwg1ar6ch3/zonal_westerly_winds.png + :align: center + + Figure 3.19: Long-term mean (thin black contours) and linear trend (colour) + of zonal mean December-January-February zonal winds from 1985 to 2014 + in the Southern Hemisphere. The figure shows (a) ERA5 and (b) the CMIP6 + multi-model mean (58 CMIP6 models). The solid contours show positive + (westerly) and zero long-term mean zonal wind, and the dashed contours show + negative (easterly) long-term mean zonal wind. Only one ensemble member per + model is included. Figure is modified from Eyring et al. (2013), their + Figure 12. + +.. figure:: /recipes/figures/ipccwg1ar6ch3/fig_3_42_a.png + :align: center + + Figure 3.42a: Relative space-time root-mean-square deviation (RMSD) + calculated from the climatological seasonal cycle of the CMIP simulations + (1980-1999) compared to observational datasets. A relative performance + measure is displayed, with blue shading indicating better and red shading + indicating worse performance than the median error of all model results. A + diagonal split of a grid square shows the relative error with respect to the + reference data set (lower right triangle) and an additional data set (upper + left triangle). Reference/additional datasets are from top to bottom in (a): + ERA5/NCEP, GPCP-SG/GHCN, CERES-EBAF, CERES-EBAF, CERES-EBAF, CERES-EBAF, + JRA-55/ERA5, ESACCI-SST/HadISST, ERA5/NCEP, ERA5/NCEP, ERA5/NCEP, ERA5/NCEP, + ERA5/NCEP, ERA5/NCEP, AIRS/ERA5, ERA5/NCEP. White boxes are used when data + are not available for a given model and variable. Figure is updated and + expanded from Bock et al. (2020). + +.. figure:: /recipes/figures/ipccwg1ar6ch3/patterncor.png + :align: center + + Figure 3.43 | Centred pattern correlations between models and observations + for the annual mean climatology over the period 1980-1999. Results are + shown for individual CMIP3 (green), CMIP5 (blue) and CMIP6 (red) models (one + ensemble member from each model is used) as short lines, along with the + corresponding multi-model ensemble averages (long lines). Correlations are + shown between the models and the primary reference observational data set + (from left to right: ERA5, GPCP-SG, CERES-EBAF, CERES-EBAF, CERES-EBAF, + CERES-EBAF, JRA-55, ESACCI-SST, ERA5, ERA5, ERA5, ERA5, ERA5, ERA5, AIRS, + ERA5). In addition, the correlation between the primary reference and + additional observational datasets (from left to right: NCEP, GHCN, -, -, -, + -, ERA5, HadISST, NCEP, NCEP, NCEP, NCEP, NCEP, NCEP, NCEP, ERA5) are shown + (solid grey circles) if available. To ensure a fair comparison across a + range of model resolutions, the pattern correlations are computed after + regridding all datasets to a resolution of 4° in longitude and 5° + latitude. diff --git a/doc/sphinx/source/recipes/recipe_kcs.rst b/doc/sphinx/source/recipes/recipe_kcs.rst new file mode 100644 index 0000000000..1ed117ecb6 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_kcs.rst @@ -0,0 +1,160 @@ +.. _recipe_kcs: + +KNMI Climate Scenarios 2014 +=========================== + +Overview +-------- + +This recipe implements the method described in `Lenderink et al., 2014 `_, to prepare the 2014 KNMI Climate Scenarios (KCS) for the Netherlands. A set of 8 global climate projections from EC-Earth were downscaled with the RACMO regional climate model. Since the EC-Earth ensemble is not readily representative for the spread in the full CMIP ensemble, this method recombines 5-year segments from the EC-Earth ensemble to obtain a large suite of "resamples". Subsequently, 8 new resamples are selected that cover the spread in CMIP much better than the original set. + +The original method created 8 resampled datasets: + +* 2 main scenarios: Moderate (M) and Warm (W) (Lenderink 2014 uses "G" instead of "M"). +* 2 'sub'scenarios: Relatively high (H) or low (L) changes in seasonal temperature and precipitation +* 2 time horizons: Mid-century (MOC; 2050) and end-of-century (EOC; 2085) +* Each scenario consists of changes calculated between 2 periods: Control (e.g. 1981-2010) and future (variable). + +The configuration settings for these resamples can be found in table 1 of Lenderink 2014's `supplementary data `_. + +Implementation +-------------- + +The implementation is such that application to other datasets, regions, etc. is relatively straightforward. The description below focuses on the reference use case of Lenderink et al., 2014, where the target model was EC-Earth. An external set of EC-Earth data (all RCP85) was used, for which 3D fields for downscaling were available as well. In the recipe shipped with ESMValTool, however, the target model is CCSM4, so that it works out of the box with ESGF data only. + +In the first diagnostic, the spread of the full CMIP ensemble is used to obtain 4 values of a *global* :math:`{\Delta}T_{CMIP}`, corresponding to the 10th and 90th percentiles for the M and W scenarios, respectively, for both MOC and EOC. Subsequently, for each of these 4 *steering parameters*, 30-year periods are selected from the target model ensemble, where :math:`{\Delta}T_{target}{\approx}{\Delta}T_{CMIP}`. + +In the second diagnostic, for both the control and future periods, the N target model ensemble members are split into 6 segments of 5 years each. Out of all :math:`N^6` possible re-combinations of these 5-year segments, eventually M new 'resamples' are selected based on *local* changes in seasonal temperature and precipitation. This is done in the following steps: + +1. Select 1000 samples for the control period, and 2 x 1000 samples for the future period (one for each subscenario). Step 1 poses a constraint on winter precipitation. For the control period, winter precipitation must still closely represent the average of the original ensemble. For the two future periods, the change in winter precipitation with respect to the control period must approximately equal 4% per degree :math:`{\Delta}T` (subscenario L) or 8% per degree :math:`{\Delta}T` (subscenario H). +2. Further constrain the selection by picking samples that represent either high or low changes in summer precipitation and summer and winter temperature, by limiting the remaining samples to certain percentile ranges: relatively wet/cold in the control and dry/warm in the future, or vice versa. The percentile ranges are listed in table 1 of Lenderink 2014's supplement. This should result is approximately 50 remaining samples for each scenario, for both control and future. +3. Use a Monte-Carlo method to make a final selection of 8 resamples with minimal reuse of the same ensemble member/segment. + +Datasets have been split in two parts: the CMIP datasets and the target model datasets. An example use case for this recipe is to compare between CMIP5 and CMIP6, for example. The recipe can work with a target model that is not part of CMIP, provided that the data are CMOR compatible, and using the same data reference syntax as the CMIP data. Note that you can specify :ref:`multiple data paths` in the configuration. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +- recipe_kcs.yml + +Diagnostics are stored in diag_scripts/kcs/ + +- global_matching.py +- local_resampling.py + +.. note:: + We highly recommend using the options described in :ref:`rerunning`. The speed bottleneck for the first diagnostic is the preprocessor. In the second diagnostic, step 1 is most time consuming, whereas steps 2 and 3 are likely to be repeated several times. Therefore, intermediate files are saved after step 1, and the diagnostic will automatically detect and use them if the ``-i`` flag is used. + +User settings +------------- + +1. Script + + *Required settings for script* + + * ``scenario_years``: a list of time horizons. Default: ``[2050, 2085]`` + * ``scenario_percentiles``: a list of percentiles for the steering table. Default: ``[p10, p90]`` + + *Required settings for preprocessor* + This diagnostic needs global mean temperature anomalies for each dataset, both CMIP and the target model. Additionally, the multimodel statistics preprocessor must be used to produce the percentiles specified in the setting for the script above. + +2. Script + + *Required settings for script* + + * ``control_period``: the control period shared between all scenarios. Default: ``[1981, 2010]`` + * ``n_samples``: the final number of recombinations to be selected. Default: ``8`` + * ``scenarios``: a scenario name and list of options. The default setting is a single scenario: + + .. code-block:: yaml + + scenarios: + ML_MOC: # scenario name; can be chosen by the user + description: "Moderate / low changes in seasonal temperature & precipitation" + global_dT: 1.0 + scenario_year: 2050 + resampling_period: [2021, 2050] + dpr_winter: 4 + pr_summer_control: [25, 55] + pr_summer_future: [45, 75] + tas_winter_control: [50, 80] + tas_winter_future: [20, 50] + tas_summer_control: [0, 100] + tas_summer_future: [0, 50] + + These values are taken from table 1 in the Lenderink 2014's supplementary material. Multiple scenarios can be processed at once by appending more configurations below the default one. For new applications, ``global_dT``, ``resampling_period`` and ``dpr_winter`` are informed by the output of the first diagnostic. The percentile bounds in the scenario settings (e.g. ``tas_winter_control`` and ``tas_winter_future``) are to be tuned until a satisfactory scenario spread over the full CMIP ensemble is achieved. + + *Required settings for preprocessor* + + This diagnostic requires data on a single point. However, the ``extract_point`` preprocessor can be changed to ``extract_shape`` or ``extract_region``, in conjunction with an area mean. And of course, the coordinates can be changed to analyze a different region. + +Variables +--------- + +Variables are precipitation and temperature, specified separately for the target model and the CMIP ensemble: + +* pr_target (atmos, monthly mean, longitude latitude time) +* tas_target (atmos, monthly mean, longitude latitude time) +* pr_cmip (atmos, monthly mean, longitude latitude time) +* tas_cmip (atmos, monthly mean, longitude latitude time) + +References +---------- + +* `Lenderink et al. 2014, Environ. Res. Lett., 9, 115008 `_. + +Example output +-------------- + +The diagnostic ``global_matching`` produces a scenarios table like the one below + +.. code-block:: python + + year percentile cmip_dt period_bounds target_dt pattern_scaling_factor + 0 2050 P10 0.98 [2019, 2048] 0.99 1.00 + 1 2050 P90 2.01 [2045, 2074] 2.02 0.99 + 2 2085 P10 1.38 [2030, 2059] 1.38 1.00 + 3 2085 P90 3.89 [2071, 2100] 3.28 1.18 + + +which is printed to the log file and also saved as a csv-file ``scenarios.csv``. +Additionally, a figure is created showing the CMIP spread in global temperature change, +AND highlighting the selected steering parameters and resampling periods: + +.. _fig_kcs_global_matching: +.. figure:: /recipes/figures/kcs/global_matching.png + :align: center + +The diagnostic ``local_resampling`` produces a number of output files: + +* ``season_means_.nc``: intermediate results, containing the season means for each segment of the original target model ensemble. +* ``top1000_.csv``: intermediate results, containing the 1000 combinations that have been selected based on winter mean precipitation. +* ``indices_.csv``: showing the final set of resamples as a table: + + .. code-block:: python + + control future + Segment 0 Segment 1 Segment 2 Segment 3 Segment 4 Segment 5 Segment 0 Segment 1 Segment 2 Segment 3 Segment 4 Segment 5 + Combination 0 5 7 6 3 1 3 2 4 2 4 7 7 + Combination 1 0 3 0 4 3 2 4 1 6 1 3 0 + Combination 2 2 4 3 7 4 2 5 4 6 6 4 2 + Combination 3 1 4 7 2 3 6 5 3 1 7 4 1 + Combination 4 5 7 6 3 1 3 2 3 0 6 1 7 + Combination 5 7 2 1 4 5 1 6 0 4 2 3 3 + Combination 6 7 2 2 0 6 6 5 2 1 5 4 2 + Combination 7 6 3 2 1 6 1 2 1 0 2 1 3 + + +* ``resampled_control_.nc``: containing the monthly means for the control period according to the final combinations. +* ``resampled_future_.nc``: containing the monthly means for the future period according to the final combinations. + +* Provenance information: bibtex, xml, and/or text files containing citation information are stored alongside the final result and the final figure. + The final combinations only derive from the target model data, whereas the figure also uses CMIP data. +* A figure used to validate the final result, reproducing figures 5 and 6 from Lenderink et al.: + +.. _fig_kcs_local_validation: +.. figure:: /recipes/figures/kcs/local_validation_2085.png + :align: center diff --git a/doc/sphinx/source/recipes/recipe_landcover.rst b/doc/sphinx/source/recipes/recipe_landcover.rst index 97528f1b46..e22598d69d 100644 --- a/doc/sphinx/source/recipes/recipe_landcover.rst +++ b/doc/sphinx/source/recipes/recipe_landcover.rst @@ -1,5 +1,7 @@ -Landcover -========= +.. _recipes_landcover: + +Landcover diagnostics +===================== Overview @@ -124,4 +126,3 @@ Example plots :width: 14cm Biases in five major land cover fractions for different regions and one experiment. - diff --git a/doc/sphinx/source/recipes/recipe_li17natcc.rst b/doc/sphinx/source/recipes/recipe_li17natcc.rst new file mode 100644 index 0000000000..4b2cf36c7e --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_li17natcc.rst @@ -0,0 +1,93 @@ +.. _recipes_li17natcc: + +Constraining future Indian Summer Monsoon projections with the present-day precipitation over the tropical western Pacific +========================================================================================================================== + +Overview +-------- + + +Following `Li et al. (2017)`_ the change between present-day and future Indian Summer Monsoon (ISM) precipitation is constrained +using the precipitation over the tropical western Pacific compared to +a fixed, observed amount of 6 mm d\ :sup:`-1` from Global Precipitation Climatology Project (GPCP) `(Adler et al., 2003)`_ for 1980-2009. +For CMIP6, historical data for 1980-2009 should be used. For CMIP5 historical data from 1980-2005 should be used, due to the length of the data sets. +At the moment it is not possible to use a combined ``['historical', 'rcp']`` data set, because the diagnostic requires that a historical data set is given. + +.. _`(Adler et al., 2003)`: https://journals.ametsoc.org/doi/abs/10.1175/1525-7541%282003%29004%3C1147%3ATVGPCP%3E2.0.CO%3B2 +.. _`Li et al. (2017)`: https://www.nature.com/articles/nclimate3387 + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_li17natcc.yml + + +Diagnostics are stored in diag_scripts/ + + * emergent_constraints/lif1f2.py + + +User settings in recipe +----------------------- + +The recipe can be run with different CMIP5 and CMIP6 models. For each model, two experiments must be given: +one historical run, possibly between 1980-2009 and one other model experiment. The user can choose the other model experiment, +but it needs to be the same for all given models. +The start and end year for the second data set can be choosen by the user, but should be consistent for all models +(the same for future scenarios, the same length for other experiments). Different ensemble members are not possible, yet. + + +Variables +--------- + +* *pr* (atmos, monthly, longitude, latitude, time) +* *ua* (atmos, monthly, longitude, latitude, plev, time) +* *va* (atmos, monthly, longitude, latitude, plev, time) +* *ts* (atmos, monthly, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +*None* + + +References +---------- + +* Li, G., Xie, S. P., He, C., and Chen, Z. S.: Western Pacific emergent constraint lowers projected increase in Indian summer monsoon rainfall, Nat Clim Change, 7, 708-+, 2017 + + +Example plots +------------- + +.. _li17natcc_fig2a: +.. figure:: /recipes/figures/emergent_constraints/li17natcc_fig2a.png + :align: center + :width: 50% + + Scatter plot of the simulated tropical western Pacific precipitation (mm d\ :sup:`-1`\ ) versus projected average ISM (Indian Summer Monsoon) rainfall changes under the ssp585 scenario. The red line denotes the observed present-day western Pacific precipitation and the inter-model correlation (r) is shown. (CMIP6). + +.. _li17natcc_fig2b: +.. figure:: /recipes/figures/emergent_constraints/li17natcc_fig2b.png + :align: center + :width: 50% + + Scatter plot of the uncorrected versus corrected average ISM (Indian Summer Monsoon) rainfall change ratios (% per degree Celsius of global SST warming). The error bars for the Multi-model mean indicate the standard deviation spread among models and the 2:1 line (y = 0.5x) is used to illustrate the Multi-model mean reduction in projected rainfall increase. (CMIP6). + +.. _li17natcc_fig2c: +.. figure:: /recipes/figures/emergent_constraints/li17natcc_fig2c.png + :align: center + :width: 50% + + Multi-model mean rainfall change due to model error. Box displays the area used to define the average ISM (Indian Summer Monsoon) rainfall. Precipitation changes are normalized by the corresponding global mean SST increase for each model. (CMIP6). + +.. _li17natcc_fig2d: +.. figure:: /recipes/figures/emergent_constraints/li17natcc_fig2d.png + :align: center + :width: 50% + + Corrected multi-model mean rainfall change. Box displays the area used to define the average ISM (Indian Summer Monsoon) rainfall. Precipitation changes are normalized by the corresponding global mean SST increase for each model. (CMIP6). diff --git a/doc/sphinx/source/recipes/recipe_martin18grl.rst b/doc/sphinx/source/recipes/recipe_martin18grl.rst new file mode 100644 index 0000000000..6496afeb55 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_martin18grl.rst @@ -0,0 +1,92 @@ +.. _recipes_martin18grl: + +Drought characteristics following Martin (2018) +=============================================== + +Overview +-------- + + +Following `Martin (2018)`_ drought characteristics are calculated based on the standard precipitation index (SPI), see `Mckee et al. (1993)`_. These characteristics are frequency, average duration, SPI index and severity index of drought events. + +.. _`Martin (2018)`: https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/2018GL079807 +.. _`Mckee et al. (1993)`: https://www.nature.com/articles/nclimate3387 + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_martin18grl.yml + + +Diagnostics are stored in diag_scripts/ + + * droughtindex/diag_save_spi.R + * droughtindex/collect_drought_obs_multi.py + * droughtindex/collect_drought_model.py + * droughtindex/collect_drought_func.py + + +User settings in recipe +----------------------- + +The recipe can be run with different CMIP5 and CMIP6 models and one observational or reanalysis data set. + +The droughtindex/diag_save_spi.R script calculates the SPI index for any given time series. It is based on droughtindex/diag_spi.R but saves the SPI index and does not plot the histogram. The distribution and the representative time scale (smooth_month) can be set by the user, the values used in Martin (2018) are smooth_month: 6 and distribution: 'Gamma' for SPI. + +There are two python diagnostics, which can use the SPI data to calculate the drought characteristics (frequency, average duration, SPI index and severity index of drought events) based on Martin (2018): + +* To compare these characteristics between model data and observations or renanalysis data use droughtindex/collect_drought_obs_multi.py + Here, the user can set: + * indexname: Necessary to identify data produced by droughtindex/diag_save_spi.R as well as write captions and filenames. At the moment only indexname: 'SPI' is supported. + * threshold: Threshold for this index below which an event is considered to be a drought, the setting for SPI should be usually threshold: -2.0 but any other value will be accepted. Values should not be < - 3.0 or > 3.0 for SPI (else it will identify none/always drought conditions). + +* To compare these ccharacteristics between different time periods in model data use droughtindex/collect_drought_model.py + Here, the user can set: + * indexname: Necessary to identify data produced by droughtindex/diag_save_spi.R as well as write captions and filenames. At the moment only indexname: 'SPI' is supported. + * threshold: Threshold for this index below which an event is considered to be a drought, the setting for SPI should be usually threshold: -2.0 but any other value will be accepted. Values should not be < - 3.0 or > 3.0 for SPI (else it will identify none/always drought conditions). + * start_year: Needs to be equal or larger than the start_year for droughtindex/diag_save_spi.R. + * end_year: Needs to be equal or smaller than the end_year for droughtindex/diag_save_spi.R. + * comparison_period: should be < (end_year - start_year)/2 to have non overlapping time series in the comparison. + +The third diagnostic droughtindex/collect_drought_func.py contains functions both ones above use. + +Variables +--------- + +* *pr* (atmos, monthly, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +*None* + + +References +---------- + +* Martin, E.R. (2018). Future Projections of Global Pluvial and Drought Event Characteristics. Geophysical Research Letters, 45, 11913-11920. + +* McKee, T. B., Doesken, N. J., & Kleist, J. (1993). The relationship of drought frequency and duration to time scales. In Proceedings of the 8th Conference on Applied Climatology (Vol. 17, No. 22, pp. 179-183). Boston, MA: American Meteorological Society. + +Example plots +------------- + +.. _martin18grl_fig1: +.. figure:: /recipes/figures/droughtindex/martin18grl_fig1.png + :align: center + :width: 50% + + Global map of the percentage difference between multi-model mean of 15 CMIP models and the CRU data for the number of drought events [%] based on SPI. + +.. _martin18grl_fig2: +.. figure:: /recipes/figures/droughtindex/martin18grl_fig2.png + :align: center + :width: 50% + + Global map of the percentage difference between multi-model mean for RCP8.5 scenarios (2050-2100) runs and historical data (1950-2000) for 15 CMIP models for the number of drought events [%] based on SPI. + + diff --git a/doc/sphinx/source/recipes/recipe_meehl20sciadv.rst b/doc/sphinx/source/recipes/recipe_meehl20sciadv.rst new file mode 100644 index 0000000000..2fd1e1e155 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_meehl20sciadv.rst @@ -0,0 +1,106 @@ +.. _recipes_meehl20sciadv: + +Context for interpreting equilibrium climate sensitivity and transient climate response from the CMIP6 Earth system models +========================================================================================================================== + +Overview +-------- + +This recipe reproduces the analysis of `Meehl et al., Sci. Adv. (2020)`_. In +this paper, the equilibrium climate sensitivity (ECS) and transient climate +response (TCR) are evaluated for the CMIP6 models and put into historical +context. + +.. _`Meehl et al., Sci. Adv. (2020)`: https://advances.sciencemag.org/content/6/26/eaba1981 + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_meehl20sciadv.yml + +Diagnostics are stored in diag_scripts/ + + * climate_metrics/ecs.py + * climate_metrics/tcr.py + * climate_metrics/create_table.py + * ipcc_ar5/ch09_fig09_42b.py + + +User settings in recipe +----------------------- + +* Script climate_metrics/ecs.py + + See :ref:`here`. + + +* Script climate_metrics/tcr.py + + See :ref:`here`. + + +* Script climate_metrics/create_table.py + + * ``calculate_mean``, *bool*, optional (default: ``True``): Calculate + mean over all datasets and add it to table. + * ``calculate_std``, *bool*, optional (default: ``True``): Calculate + standard deviation over all datasets and add it to table. + * ``exclude_datasets``, *list of str*, optional (default: + ``['MultiModelMean']``): Exclude certain datasets when calculating + statistics over all datasets and for assigning an index. + * ``patterns``, *list of str*, optional: Patterns to filter list of input + data. + * ``round_output``, *int*, optional: If given, round output to given + number of decimals. + + +* Script ipcc_ar5/ch09_fig09_42b.py + + See :ref:`here`. + + +Variables +--------- + +* *rlut* (atmos, monthly, longitude, latitude, time) +* *rsdt* (atmos, monthly, longitude, latitude, time) +* *rsut* (atmos, monthly, longitude, latitude, time) +* *tas* (atmos, monthly, longitude, latitude, time) + + + +References +---------- + +* Meehl, G. A., Senior, C. A., Eyring, V., Flato, G., Lamarque, J. F., + Stouffer, R. J., Taylor, K. E. and Schlund, M., *Context for interpreting + equilibrium climate sensitivity and transient climate response from the CMIP6 + Earth system models*, Science Advances, 6(26), eaba1981, + ``_, 2020. + + +Example plots +------------- + +.. _fig_meehl20sciadv_1: +.. figure:: /recipes/figures/meehl20sciadv/cmip6_gregory_regression.png + :align: center + :width: 50% + + ECS calculated for the CMIP6 models using the Gregory method over different + time scales. Using the entire 150-year 4xCO2 experiment (black line), there + is an ECS value of 3.8 K; using only the first 20 years (blue dots and blue + line), there is an ECS of 3.4 K; and using the last 130 years, there is an + ECS of 4.1 K (orange dots and orange line). + +.. _fig_meehl20sciadv_2: +.. figure:: /recipes/figures/meehl20sciadv/cmip6_tcr_vs_ecs.png + :align: center + :width: 50% + + TCR as a function of ECS for the CMIP6 models (black line is a linear fit). + The :math:`R^2` values are given in the upper left parts of each panel. The + numbers denote individual CMIP6 models. diff --git a/doc/sphinx/source/recipes/recipe_miles.rst b/doc/sphinx/source/recipes/recipe_miles.rst index d973bc72b0..a621463222 100644 --- a/doc/sphinx/source/recipes/recipe_miles.rst +++ b/doc/sphinx/source/recipes/recipe_miles.rst @@ -1,51 +1,57 @@ +.. _recipes_miles: + Blocking metrics and indices, teleconnections and weather regimes (MiLES) ========================================================================= - + Overview -------- -Atmospheric blocking is a recurrent mid-latitude weather pattern identified by a large-amplitude, quasi-stationary, long-lasting, high-pressure anomaly that ‘‘blocks’’ the westerly flow forcing the jet stream to split or meander +Atmospheric blocking is a recurrent mid-latitude weather pattern identified by a large-amplitude, quasi-stationary, long-lasting, high-pressure anomaly that ‘‘blocks’’ the westerly flow forcing the jet stream to split or meander `(Rex, 1950) `_. -It is typically initiated by the breaking of a Rossby wave in a diffluence region at the exit of the storm track, where it amplifies the underlying stationary ridge `(Tibaldi and Molteni, 1990) `_. -Blocking occurs more frequently in the Northern Hemisphere cold season, with larger frequencies observed over the Euro-Atlantic and North Pacific sectors. Its lifetime oscillates from a few days up to several weeks `(Davini et al., 2012) `_ sometimes leading to winter cold spells or summer heat waves. +It is typically initiated by the breaking of a Rossby wave in a diffluence region at the exit of the storm track, where it amplifies the underlying stationary ridge `(Tibaldi and Molteni, 1990) `_. +Blocking occurs more frequently in the Northern Hemisphere cold season, with larger frequencies observed over the Euro-Atlantic and North Pacific sectors. Its lifetime oscillates from a few days up to several weeks `(Davini et al., 2012) `_ sometimes leading to winter cold spells or summer heat waves. -To this end, the MId-Latitude Evaluation System (MiLES) was developed as stand-alone package (https://github.com/oloapinivad/MiLES) to support analysis of mid-latitude weather patterns in terms of atmospheric blocking, teleconnections and weather regimes. The package was then implemented as recipe for ESMValTool. +To this end, the MId-Latitude Evaluation System (MiLES) was developed as stand-alone package (https://github.com/oloapinivad/MiLES) to support analysis of mid-latitude weather patterns in terms of atmospheric blocking, teleconnections and weather regimes. The package was then implemented as recipe for ESMValTool. The tool works on daily 500hPa geopotential height data (with data interpolated on a common 2.5x2.5 grid) and calculates the following diagnostics: - -* **1D Atmospheric Blocking:** -`Tibaldi and Molteni (1990) `_ index for Northern Hemisphere. Computed at fixed latitude of 60N, with delta of -5,-2.5,0,2.5,5 deg, fiN=80N and fiS=40N. Full timeseries and climatologies are provided in NetCDF4 Zip format. -* **2D Atmospheric blocking:** -Following the index by `Davini et al. (2012) `_. It is a 2D version of `Tibaldi and Molteni (1990) `_ for Northern Hemisphere atmospheric blocking evaluating meridional gradient reversal at 500hPa. It computes both Instantaneous Blocking and Blocking Events frequency, where the latter allows the estimation of the each blocking duration. It includes also two blocking intensity indices, i.e. the Meridional Gradient Index and the Blocking Intensity index. In addition the orientation (i.e. cyclonic or anticyclonic) of the Rossby wave breaking is computed. A supplementary Instantaneous Blocking index with the GHGS2 condition (see `Davini et al., 2012 `_) is also evaluated. -Full timeseries and climatologies are provided in NetCDF4 Zip format. +1D Atmospheric Blocking +*********************** +`Tibaldi and Molteni (1990) `_ index for Northern Hemisphere. Computed at fixed latitude of 60N, with delta of -5,-2.5,0,2.5,5 deg, fiN=80N and fiS=40N. Full timeseries and climatologies are provided in NetCDF4 Zip format. + +2D Atmospheric blocking +*********************** +Following the index by `Davini et al. (2012) `_. It is a 2D version of `Tibaldi and Molteni (1990) `_ for Northern Hemisphere atmospheric blocking evaluating meridional gradient reversal at 500hPa. It computes both Instantaneous Blocking and Blocking Events frequency, where the latter allows the estimation of the each blocking duration. It includes also two blocking intensity indices, i.e. the Meridional Gradient Index and the Blocking Intensity index. In addition the orientation (i.e. cyclonic or anticyclonic) of the Rossby wave breaking is computed. A supplementary Instantaneous Blocking index with the GHGS2 condition (see `Davini et al., 2012 `_) is also evaluated. +Full timeseries and climatologies are provided in NetCDF4 Zip format. -* **Z500 Empirical Orthogonal Functions:** -Based on SVD. The first 4 EOFs for North Atlantic (over the 90W-40E 20N-85N box) and Northern Hemisphere (20N-85N) or a custom region are computed. North Atlantic Oscillation, East Atlantic Pattern, and Arctic Oscillation can be evaluated. -Figures showing linear regression of PCs on monthly Z500 are provided. PCs and eigenvectors, as well as the variances explained are provided in NetCDF4 Zip format. +Z500 Empirical Orthogonal Functions +*********************************** +Based on SVD. The first 4 EOFs for North Atlantic (over the 90W-40E 20N-85N box) and Northern Hemisphere (20N-85N) or a custom region are computed. North Atlantic Oscillation, East Atlantic Pattern, and Arctic Oscillation can be evaluated. +Figures showing linear regression of PCs on monthly Z500 are provided. PCs and eigenvectors, as well as the variances explained are provided in NetCDF4 Zip format. -* **North Atlantic Weather Regimes:** +North Atlantic Weather Regimes +****************************** Following k-means clustering of 500hPa geopotential height. 4 weather regimes over North Atlantic (80W-40E 30N-87.5N) are evaluated using anomalies from daily seasonal cycle. This is done retaining the first North Atlantic EOFs which explains the 80% of the variance to reduce the phase-space dimensions and then applying k-means clustering using Hartigan-Wong algorithm with k=4. Figures report patterns and frequencies of occurrence. NetCDF4 Zip data are saved. Only 4 regimes and DJF supported so far. - + Available recipes and diagnostics --------------------------------- - + Recipes are stored in recipes/ - + * recipe_miles_block.yml * recipe_miles_eof.yml * recipe_miles_regimes.yml - + Diagnostics are stored in diag_scripts/miles/ - + * miles_block.R -* miles_eof.R -* miles_regimes.R +* miles_eof.R +* miles_regimes.R -and subroutines +and subroutines * basis_functions.R * block_figures.R @@ -62,11 +68,12 @@ and subroutines User settings ------------- -#. miles_block.R +#. miles_block.R *Required settings for variables* * reference_dataset: reference dataset for comparison + * reference_exp: optional reference experiment for comparison (to use when comparing two experiments of the same dataset) *Required settings for script* @@ -77,6 +84,7 @@ User settings *Required settings for variables* * reference_dataset: reference dataset for comparison + * reference_exp: optional reference experiment for comparison (to use when comparing two experiments of the same dataset) *Required settings for script* @@ -84,10 +92,11 @@ User settings * teles: Select EOFs ('NAO','AO','PNA') or specify custom area as "lon1_lon2_lat1_lat2" #. miles_regimes.R - + *Required settings for variables* * reference_dataset: reference dataset + * reference_exp: optional reference experiment for comparison (to use when comparing two experiments of the same dataset) *Required or optional settings for script* @@ -96,32 +105,32 @@ User settings Variables --------- - + * zg (atmos, daily mean, longitude latitude time) - - + + Observations and reformat scripts --------------------------------- * ERA-INTERIM - - + + References ---------- -* REX, D. F. (1950), Blocking Action in the Middle Troposphere and its Effect upon Regional Climate. Tellus, 2: 196-211. doi: http://doi.org/10.1111/j.2153-3490.1950.tb00331.x +* REX, D. F. (1950), Blocking Action in the Middle Troposphere and its Effect upon Regional Climate. Tellus, 2: 196-211. doi: http://doi.org/10.1111/j.2153-3490.1950.tb00331.x * Davini, P., C. Cagnazzo, S. Gualdi, and A. Navarra (2012): Bidimensional Diagnostics, Variability, and Trends of Northern Hemisphere Blocking. J. Climate, 25, 6496–6509, doi: http://doi.org/10.1175/JCLI-D-12-00032.1. * Tibaldi S, Molteni F.: On the operational predictability of blocking. Tellus A 42(3): 343–365, doi: 10.1034/j.1600- 0870.1990.t01- 2- 00003.x, 1990. https://doi.org/10.1034/j.1600-0870.1990.t01-2-00003.x * Paolo Davini. (2018, April 30). MiLES - Mid Latitude Evaluation System (Version v0.51). Zenodo. http://doi.org/10.5281/zenodo.1237838 - + Example plots ------------- .. figure:: /recipes/figures/miles/miles_block.png - :width: 10cm - - Blocking events frequency for EC-Earth model 1980-1989, compared to ERA-Interim. - -.. figure:: /recipes/figures/miles/miles_eof1.png - :width: 10cm + :width: 14cm + + Blocking Events frequency for a CMIP5 EC-Earth historical run (DJF 1980-1989), compared to ERA-Interim. Units are percentage of blocked days per season. - Teleconnection indices as Z500 empirical orthogonal functions for the North Atlantic (the figure shows EOF1) +.. figure:: /recipes/figures/miles/miles_eof1.png + :width: 14cm + + North Atlantic Oscillation for a CMIP5 EC-Earth historical run (DJF 1980-1989) compared to ERA-Interim, shown as the linear regression of the monthly Z500 against the first Principal Component (PC1) of the North Atlantic region. diff --git a/doc/sphinx/source/recipes/recipe_model_evaluation.rst b/doc/sphinx/source/recipes/recipe_model_evaluation.rst new file mode 100644 index 0000000000..c61f34aa62 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_model_evaluation.rst @@ -0,0 +1,98 @@ +.. _recipe_model_evaluation: + +General model evaluation +======================== + +Overview +-------- + +These recipes and diagnostics provide a basic climate model evaluation with +observational data. +This is especially useful to get an overview of the performance of a +simulation. +The diagnostics used here allow plotting arbitrary preprocessor output, i.e., +arbitrary variables from arbitrary datasets. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in `recipes/model_evaluation` + +* recipe_model_evaluation_basics.yml +* recipe_model_evaluation_clouds_clim.yml +* recipe_model_evaluation_clouds_cycles.yml +* recipe_model_evaluation_precip_zonal.yml + +Diagnostics are stored in `diag_scripts/monitor/` + +* :ref:`multi_datasets.py + `: + Monitoring diagnostic to show multiple datasets in one plot (incl. biases). + + +User settings +------------- + +It is recommended to use a vector graphic file type (e.g., SVG) for the output +format when running this recipe, i.e., run the recipe with the +:ref:`configuration options ` ``output_file_type: +svg``. +Note that map and profile plots are rasterized by default. +Use ``rasterize: false`` in the recipe to disable +this. + + +Recipe settings +~~~~~~~~~~~~~~~ + +A list of all possible configuration options that can be specified in the +recipe is given for each diagnostic individually (see links given for the +available diagnostics in the previous section). + + +Variables +--------- + +Any, but the variables' number of dimensions should match the ones expected by +each diagnostic (see links given for the available diagnostics in the previous +section). + + +Example plots +------------- + +.. _fig_1: +.. figure:: /recipes/figures/model_evaluation/map_tas_MPI-ESM1-2-HR_Amon.jpg + :align: center + :width: 14cm + +Global climatology of 2m near-surface air temperature. + +.. _fig_2: +.. figure:: /recipes/figures/model_evaluation/map_swcre_MPI-ESM1-2-HR_Amon.jpg + :align: center + :width: 14cm + +Global climatology of the shortwave cloud radiative effect (SWCRE). + +.. _fig_3: +.. figure:: /recipes/figures/model_evaluation/timeseries_rtnt_ambiguous_dataset_Amon.jpg + :align: center + :width: 14cm + +Time series of the global mean top-of-the-atmosphere net radiative flux. + +.. _fig_4: +.. figure:: /recipes/figures/model_evaluation/variable_vs_lat_pr_Amon.jpg + :align: center + :width: 14cm + +Zonal mean precipitation. + +.. _fig_5: +.. figure:: /recipes/figures/model_evaluation/annual_cycle_clt_southerocean_Amon.jpg + :align: center + :width: 14cm + +Annual cycle of Southern Ocean total cloud cover. diff --git a/doc/sphinx/source/recipes/recipe_modes_of_variability.rst b/doc/sphinx/source/recipes/recipe_modes_of_variability.rst index dbe881302d..56c67683e7 100644 --- a/doc/sphinx/source/recipes/recipe_modes_of_variability.rst +++ b/doc/sphinx/source/recipes/recipe_modes_of_variability.rst @@ -6,12 +6,12 @@ Modes of variability Overview -------- -The goal of this recipe is to compute modes of variability from a reference/observational dataset and a set of climate projections and calculate the root-mean-square error between the mean anomalies obtained for the clusters from the reference and projection data sets. -This is done through K-means clustering applied either directly to the spatial data or after computing the EOFs. +The goal of this recipe is to compute modes of variability from a reference or observational dataset and from a set of climate projections and calculate the root-mean-square error between the mean anomalies obtained for the clusters from the reference and projection data sets. +This is done through K-means or hierarchical clustering applied either directly to the spatial data or after computing the EOFs. -The user can specify the number of clusters to be computed. +The user can specify the number of clusters to be computed. -The recipe's output consist of netcdf files of the time series of the cluster occurrences, the mean anomaly corresponding to each cluster at each location and the corresponding p-value, for both the observed and projected weather regimes and the RMSE between them. +The recipe's output consist of three netcdf files for both the observed and projected weather regimes and the RMSE between them. Available recipes and diagnostics @@ -19,14 +19,14 @@ Available recipes and diagnostics Recipes are stored in recipes/ -* recipe_modes_of_variability_wp4.yml +* recipe_modes_of_variability.yml Diagnostics are stored in diag_scripts/magic_bsc/ -* WeatherRegime.r - function for computing the EOFs and k-means clusters. +* WeatherRegime.R - function for computing the EOFs and k-means and hierarchical clusters. -* weather_regime.r - applies the above weather regimes function to the datasets +* weather_regime.R - applies the above weather regimes function to the datasets @@ -35,25 +35,22 @@ User settings User setting files are stored in recipes/ -#. recipe_modes_of_variability_wp4.yml +#. recipe_modes_of_variability.yml *Required settings for script* - * start_historical: start date (YYYY-MM-DD) of the reference dataset to be used (please make sure this matches the available data) - * end_historical: end date (YYYY-MM-DD) of the reference dataset to be used (please make sure this matches the available data) - * start_projection: start date (YYYY-MM-DD) of the projection dataset to be used (please make sure this matches the available data) - * end_projection: end date (YYYY-MM-DD) of the projection dataset to be used (please make sure this matches the available data) - * region: North-Atlantic or Polar - * ncenters: number of centers to be computed by the k-means clustering algorithm (does not work yet) - * detrend_order: the order of the polynomial detrending to be applied + * plot type: rectangular or polar + * ncenters: number of centers to be computed by the clustering algorithm (maximum 4) + * cluster_method: kmeans (only psl variable) or hierarchical clustering (for psl or sic variables) + * detrend_order: the order of the polynomial detrending to be applied (0, 1 or 2) * EOFs: logical indicating wether the k-means clustering algorithm is applied directly to the spatial data ('false') or to the EOFs ('true') - * frequency: select the month (format: JAN, FEB, ...) or season (format: JJA, SON, MAM, DJF) for the diagnostic to be computed for (does not work yet for MAM). + * frequency: select the month (format: JAN, FEB, ...) or season (format: JJA, SON, MAM, DJF) for the diagnostic to be computed for (does not work yet for MAM with daily data). Variables --------- -* psl or sic (atmos, daily, longitude, latitude, time) +* psl (atmos, monthly/daily, longitude, latitude, time) Observations and reformat scripts @@ -72,9 +69,9 @@ References * Hannachi, A., D. M. Straus, C. L. E. Franzke, S. Corti, and T. Woollings, 2017: Low Frequency Nonlinearity and Regime Behavior in the Northern Hemisphere Extra-Tropical Atmosphere. Reviews of Geophysics, https://doi.org/10.1002/2015RG000509. -* Michelangeli, P.-A., R. Vautard, and B. Legras, 1995: Weather regimes: Recurrence and quasi stationarity. Journal of the atmospheric sciences, 52 (8), 1237-1256, doi: 10.1175/1520-0469(1995)052<1237:WRRAQS>2.0.CO. `link `_ +* Michelangeli, P.-A., R. Vautard, and B. Legras, 1995: Weather regimes: Recurrence and quasi stationarity. Journal of the atmospheric sciences, 52 (8), 1237-1256, doi: `10.1175/1520-0469(1995)052<1237:WRRAQS>2.0.CO `_. -* Vautard, R., 1990: Multiple weather regimes over the North Atlantic: Analysis of precursors and successors. Monthly weather review, 118 (10), 2056-2081, doi: 10.1175/1520-0493(1990)118<2056:MWROTN>2.0.CO;2. `link `_ +* Vautard, R., 1990: Multiple weather regimes over the North Atlantic: Analysis of precursors and successors. Monthly weather review, 118 (10), 2056-2081, doi: `10.1175/1520-0493(1990)118<2056:MWROTN>2.0.CO;2 `_. * Yiou, P., K. Goubanova, Z. X. Li, and M. Nogaj, 2008: Weather regime dependence of extreme value statistics for summer temperature and precipitation. Nonlinear Processes in Geophysics, 15 (3), 365-378, https://doi.org/10.5194/npg-15-365-2008. @@ -85,10 +82,10 @@ Example plots ------------- .. _fig_modesofvar: -.. figure:: /recipes/figures/modes_of_variability/DJF-psl_observed_regimes.png +.. figure:: /recipes/figures/modes_of_variability/SON-psl_predicted_regimes.png :align: center :width: 14cm - +Four modes of variability for autumn (September-October-November) in the North Atlantic European Sector for the RCP 8.5 scenario using BCC-CSM1-1 future projection during the period 2020-2075. The frequency of occurrence of each variability mode is indicated in the title of each map. diff --git a/doc/sphinx/source/recipes/recipe_monitor.rst b/doc/sphinx/source/recipes/recipe_monitor.rst new file mode 100644 index 0000000000..8f4893fc12 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_monitor.rst @@ -0,0 +1,232 @@ +.. _recipe_monitor: + +Monitor +======= + +Overview +-------- + +These recipes and diagnostics allow plotting arbitrary preprocessor output, +i.e., arbitrary variables from arbitrary datasets. +In addition, a :ref:`base class +` is provided that allows a +convenient interface for all monitoring diagnostics. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in `recipes/monitor` + +* recipe_monitor.yml +* recipe_monitor_with_refs.yml + +Diagnostics are stored in `diag_scripts/monitor/` + +* :ref:`monitor.py `: + Monitoring diagnostic to plot arbitrary preprocessor output. +* :ref:`compute_eofs.py `: + Monitoring diagnostic to plot EOF maps and associated PC timeseries. +* :ref:`multi_datasets.py + `: + Monitoring diagnostic to show multiple datasets in one plot (incl. biases). + + +User settings +------------- + +It is recommended to use a vector graphic file type (e.g., SVG) for the output +format when running this recipe, i.e., run the recipe with the +:ref:`configuration options ` ``output_file_type: +svg``. +Note that map and profile plots are rasterized by default. +Use ``rasterize_maps: false`` or ``rasterize: false`` (see `Recipe settings`_) +in the recipe to disable this. + +Recipe settings +~~~~~~~~~~~~~~~ + +A list of all possible configuration options that can be specified in the +recipe is given for each diagnostic individually (see previous section). + +.. _monitor_config_file: + +Monitor configuration file +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In addition, the following diagnostics support the use of a dedicated monitor +configuration file: + + * monitor.py + * compute_eofs.py + +This file is a yaml file that contains map and variable specific options in two +dictionaries ``maps`` and ``variables``. + +Each entry in ``maps`` corresponds to a map definition. +Example: + +.. code-block:: yaml + + maps: + global: # Map name, choose a meaningful one + projection: PlateCarree # Cartopy projection to use + projection_kwargs: # Dictionary with Cartopy's projection keyword arguments. + central_longitude: 285 + smooth: true # If true, interpolate values to get smoother maps. If not, all points in a cells will get the exact same color + lon: [-120, -60, 0, 60, 120, 180] # Set longitude ticks + lat: [-90, -60, -30, 0, 30, 60, 90] # Set latitude ticks + colorbar_location: bottom + extent: null # If defined, restrict the projection to a region. Format [lon1, lon2, lat1, lat2] + suptitle_pos: 0.87 # Title position in the figure. + +Each entry in ``variables`` corresponds to a variable definition. +Use the default entry to apply generic options to all variables. +Example: + +.. code-block:: yaml + + variables: + # Define default. Variable definitions completely override the default + # not just the values defined. If you want to override only the defined + # values, use yaml anchors as shown + default: &default + colors: RdYlBu_r # Matplotlib colormap to use for the colorbar + N: 20 # Number of map intervals to plot + bad: [0.9, 0.9, 0.9] # Color to use when no data + pr: + <<: *default + colors: gist_earth_r + # Define bounds of the colorbar, as a list of + bounds: 0-10.5,0.5 # Set colorbar bounds, as a list or in the format min-max,interval + extend: max # Set extend parameter of mpl colorbar. See https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.colorbar.html + sos: + # If default is defined, entries are treated as map specific option. + # Missing values in map definitionas are taken from variable's default + # definition + default: + <<: *default + bounds: 25-41,1 + extend: both + arctic: + bounds: 25-40,1 + antarctic: + bounds: 30-40,0.5 + nao: &nao + <<: *default + extend: both + # Variable definitions can override map parameters. Use with caution. + bounds: [-0.03, -0.025, -0.02, -0.015, -0.01, -0.005, 0., 0.005, 0.01, 0.015, 0.02, 0.025, 0.03] + projection: PlateCarree + smooth: true + lon: [-90, -60, -30, 0, 30] + lat: [20, 40, 60, 80] + colorbar_location: bottom + suptitle_pos: 0.87 + sam: + <<: *nao + lat: [-90, -80, -70, -60, -50] + projection: SouthPolarStereo + projection_kwargs: + central_longitude: 270 + smooth: true + lon: [-120, -60, 0, 60, 120, 180] + +Variables +--------- + +Any, but the variables' number of dimensions should match the ones expected by each plot. + +Example plots +------------- + +.. _fig_climglobal: +.. figure:: /recipes/figures/monitor/clim.png + :align: center + :width: 14cm + +Global climatology of tas. + +.. _fig_seasonclimglobal: +.. figure:: /recipes/figures/monitor/seasonclim.png + :align: center + :width: 14cm + +Seasonal climatology of pr, with a custom colorbar. + +.. _fig_monthlyclimglobal: +.. figure:: /recipes/figures/monitor/monclim.png + :align: center + :width: 14cm + +Monthly climatology of sivol, only for March and September. + +.. _fig_timeseries: +.. figure:: /recipes/figures/monitor/timeseries.png + :align: center + :width: 14cm + +Timeseries of Niño 3.4 index, computed directly with the preprocessor. + +.. _fig_annual_cycle: +.. figure:: /recipes/figures/monitor/annualcycle.png + :align: center + :width: 14cm + +Annual cycle of tas. + +.. _fig_timeseries_with_ref: +.. figure:: /recipes/figures/monitor/timeseries_with_ref.png + :align: center + :width: 14cm + +Timeseries of tas including a reference dataset. + +.. _fig_annual_cycle_with_ref: +.. figure:: /recipes/figures/monitor/annualcycle_with_ref.png + :align: center + :width: 14cm + +Annual cycle of tas including a reference dataset. + +.. _fig_map_with_ref: +.. figure:: /recipes/figures/monitor/map_with_ref.png + :align: center + :width: 14cm + +Global climatology of tas including a reference dataset. + +.. _fig_zonal_mean_profile_with_ref: +.. figure:: /recipes/figures/monitor/zonalmean_profile_with_ref.png + :align: center + :width: 14cm + +Zonal mean profile of ta including a reference dataset. + +.. _fig_1d_profile_with_ref: +.. figure:: /recipes/figures/monitor/1d_profile_with_ref.png + :align: center + :width: 14cm + +1D profile of ta including a reference dataset. + +.. _fig_variable_vs_lat_with_ref: +.. figure:: /recipes/figures/monitor/variable_vs_lat_with_ref.png + :align: center + :width: 14cm + +Zonal mean pr including a reference dataset. + +.. _fig_hovmoeller_z_vs_time_with_ref: +.. figure:: /recipes/figures/monitor/hovmoeller_z_vs_time_with_ref.png + :align: center + :width: 14cm + +Hovmoeller plot (pressure vs. time) of ta including a reference dataset. + +.. _fig_hovmoeller_time_vs_lat_with_ref: +.. figure:: /recipes/figures/monitor/hovmoeller_time_vs_lat_with_ref.png + :align: center + :width: 14cm + +Hovmoeller plot (time vs. latitude) of tas including a reference dataset diff --git a/doc/sphinx/source/recipes/recipe_mpqb_xch4.rst b/doc/sphinx/source/recipes/recipe_mpqb_xch4.rst new file mode 100644 index 0000000000..867090ba4f --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_mpqb_xch4.rst @@ -0,0 +1,121 @@ +.. _recipe_mpqb_xch4: + +Diagnostics of integrated atmospheric methane (XCH4) +==================================================== + +Overview +-------- + +This recipe ``recipe_mpqb_xch4.yml`` allows the comparison of integrated atmospheric methane +between CMIP6 model simulations and observations, and produces lineplots of monthly mean +methane values, annual cycles and annual growth rates: + +* Monthly mean time series of XCH4 for pre-defined regions (global, Northern Hemisphere, Southern Hemisphere) +* Annual cycles of XCH4 for pre-defined regions (global, Northern Hemisphere, Southern Hemisphere) +* Annual growth rates of XCH4 for pre-defined regions (global, Northern Hemisphere, Southern Hemisphere) + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/mpqb/ + +* recipe_mpqb_xch4.yml + +Diagnostics are stored in esmvaltool/diag_scripts/mpqb/ + +* mpqb_lineplot.py +* mpqb_lineplot_anncyc.py +* mpqb_lineplot_growthrate.py + +Observations and reformat scripts +--------------------------------- + +Observations used in this recipe are: + +* CDS-XCH4 (ESA CCI dataset served on the Copernicus Climate data store) + +A cmorizing script for this dataset is available (``cmorize_obs_cds_xch4.ncl``). + +XCH4 is a derived variable that needs to be calculated from four different variables (ch4, hus, zg, ps). +A derivation script is included in the ESMValCore. + + +User settings in recipe +----------------------- +#. Preprocessor + + * ``pp_lineplots_xx_mon``: Regridding, masking all missing values from all used datasets, area-mean ('xx' can ge replaced by 'gl'=global, 'sh'=southern hemisphere, 'nh'=northern hemisphere), units converted to [ppbv] to obtain one time series of monthly mean values for the selected region (global, southern hemisphere, northern hemisphere) + * ``pp_lineplots_xx_ann``: Regridding, masking all missing values from all used datasets, area-mean ('xx' can ge replaced by 'gl'=global, 'sh'=southern hemisphere, 'nh'=northern hemisphere), units converted to [ppbv] to obtain one time series of annual mean values for the selected region (global, southern hemisphere, northern hemisphere) + * ``pp_lineplots_anncyc_xx:`` : Regridding, masking all missing values from all used datasets, area-mean ('xx' can ge replaced by 'gl'=global, 'sh'=southern hemisphere, 'nh'=northern hemisphere), units converted to [ppbv], monthly climate statistics applied to one annual cycle for the whole chosen time period and for the selected region (global, southern hemisphere, northern hemisphere) + * ``xch4_def_xx``: defining the time period over which the analysis should be calculated; options are "cmip6" which overlapping period of the observations and the CMIP6 historical simulations, and "future" which covers the time period of CMIP6 scenarios + +#. Additional needed files + + * ``mpqb_cfg_xch4.yml``: In this file additional information for the used datasets are defined and stored, e.g. alias of the dataset name and the color that is used to display the dataset in the figures + * ``mpqb_utils.yml``: In this file the preparations for the dataset displays are made. + +#. Script + + *Required settings for script* + + * no additional settings required + + *Optional settings for script* + + * no optional settings available + + *Required settings for variables* + + * no settings for the variable required + +#. Script + + *Required settings for script* + + * no additional settings required + + *Optional settings for script* + + * no optional settings available + + *Required settings for variables* + + * no settings for the variable required + +#. Script + + *Required settings for script* + + * no additional settings required + + *Optional settings for script* + + * no optional settings available + + *Required settings for variables* + + * no settings for the variable required + + +Variables +--------- + +* ch4 (atmos, monthly mean, longitude latitude level time) +* hus (atmos, monthly mean, longitude latitude level time) +* zg (atmos, monthly mean, longitude latitude level time) +* ps (atmos, monthly mean, longitude latitude time) + +All variables are necessary to calculate the derived variable xch4. + + +Example plots +------------- + +.. _lineplot_xch4_2003-2014_monmean: +.. figure:: /recipes/figures/mpqb/lineplot_xch4_2003-2014_monmean.png + :align: center + + Monthly mean time series of XCH4, calculated over the whole globe, for individual CMIP6 model simulations. + + + diff --git a/doc/sphinx/source/recipes/recipe_multimodel_products.rst b/doc/sphinx/source/recipes/recipe_multimodel_products.rst index 411af48187..9d7bcc651d 100644 --- a/doc/sphinx/source/recipes/recipe_multimodel_products.rst +++ b/doc/sphinx/source/recipes/recipe_multimodel_products.rst @@ -1,12 +1,12 @@ .. _recipes_multimodel_products: -Generic multi-model products -==================================================== +Multi-model products +==================== Overview -------- -The goal of this diagnostic is to compute the multi-model ensemble mean for a set of models selected by the user for individual variables and different temporal resolutions (annual, seasonal, monthly). +The goal of this diagnostic is to compute the multi-model ensemble mean for a set of models selected by the user for individual variables and different temporal resolutions (annual, seasonal, monthly). After selecting the region (defined by the lowermost and uppermost longitudes and latitudes), the mean for the selected reference period is subtracted from the projections in order to obtain the anomalies for the desired period. In addition, the recipe computes the percentage of models agreeing on the sign of this anomaly, thus providing some indication on the robustness of the climate signal. @@ -19,12 +19,12 @@ Available recipes and diagnostics Recipes are stored in recipes/ -* recipe_multimodel_products_wp5.yml +* recipe_multimodel_products.yml Diagnostics are stored in diag_scripts/magic_bsc/ -* multimodel_products.r - script for computing multimodel anomalies and their agreement. +* multimodel_products.R - script for computing multimodel anomalies and their agreement. @@ -34,26 +34,23 @@ User settings User setting files are stored in recipes/ -#. recipe_multimodel_products_wp5.yml +#. recipe_multimodel_products.yml *Required settings for script* - * climatology_class: the class to be used for computing the climatology, e.g. 'historical' or 'rcp26' etc - * climatology_start_year: start year for the climatology - * climatology_end_year: end year for the climatology - * anomaly_start_year: start year for the anomalies - * anomaly_end_year: end year for the anomalies - * moninf: integer specifying the first month of the seasonal mean period to be computed, if left blank the monthly anomalies will be computed - * monsup: integer specifying the last month of the seasonal mean period to be computed + * colorbar_lim: positive number specifying the range (-colorbar_lim ... +colorbar_lim) of the colorbar + (0 = automatic colorbar scaling) + * moninf: integer specifying the first month of the seasonal mean period to be computed + * monsup: integer specifying the last month of the seasonal mean period to be computed, if it's null the anomaly of month indicated in moninf will be computed * agreement_threshold: integer between 0 and 100 indicating the threshold in percent for the minimum agreement between models on the sign of the multi-model mean anomaly for the stipling to be plotted * running_mean: integer indictating the length of the window for the running mean to be computed - * time_series_plot: either null or mean or maxmin (does not work yet) + * time_series_plot: Either single or maxmin (plot the individual or the mean with shading between the max and min). Variables --------- -* tas(atmos, daily, longitude, latitude, time) +* any Amon variable (atmos, monthly mean, longitude latitude time) Observations and reformat scripts @@ -79,5 +76,4 @@ Example plots .. _fig_multimodprod: .. figure:: /recipes/figures/multimodel_products/tas_JUN_multimodel-anomaly_2006_2099_1961_1990.png - - +Multi-model mean anomaly of 2-m air temperature during the future projection 2006-2099 in June considering the reference period 1961-1990 (colours). Crosses indicate that the 80% of models agree in the sign of the multi-model mean anomaly. The models selected are BCC-CSM1-1, MPI-ESM-MR and MIROC5 in the r1i1p1 ensembles for the RCP 2.6 scenario. diff --git a/doc/sphinx/source/recipes/recipe_oceans.rst b/doc/sphinx/source/recipes/recipe_oceans.rst index d09074c30e..17552b39fa 100644 --- a/doc/sphinx/source/recipes/recipe_oceans.rst +++ b/doc/sphinx/source/recipes/recipe_oceans.rst @@ -1,7 +1,7 @@ .. _XML_oceans: -Recipes for evaluating models of the ocean -========================================== +Ocean diagnostics +================= Overview ........ @@ -42,7 +42,8 @@ Available recipes * recipe_ocean_scalar_fields.yml_ * recipe_ocean_bgc.yml_ * recipe_ocean_quadmap.yml_ -* recipe_ocean_Landschutzer2014.yml_ +* recipe_ocean_ice_extent.yml_ +* recipe_ocean_multimap.yml_ recipe_ocean_amoc.yml @@ -162,8 +163,8 @@ This recipe includes the following fields: included it as HadGEM2-ES doesn't include a phosphate field. This recipe uses the World Ocean Atlas data, which can be downloaded from: -https://www.nodc.noaa.gov/OC5/woa13/woa13data.html -(last access 10/25/2018) +https://www.ncei.noaa.gov/products/world-ocean-atlas +(last access 02/08/2021) Instructions: Select the "All fields data links (1° grid)" netCDF file, which contain all fields. @@ -252,6 +253,20 @@ sea ice diagnostic is the only tool that performs a field specific evaluation. The diagnostic_seaice.py_ diagnostic is more fully described below. +recipe_ocean_multimap.yml +------------------------- + +The recipe_ocean_multimap.yml_ is an example recipe showing the +diagnostic_maps_multimodel.py_ diagnostic. +This diagnostic produces an image showing Model vs Observations maps or +only Model fields when observational data are not provided. +Each map shows latitude vs longitude fields and user defined values are used to set the colour scale. +Plot layout can be modified by modifying the `layout_rowcol` argument. + +The figure produced by this recipe compares the ocean surface CO2 fluxes +for 16 different CMIP5 model against Landschuetzer2016 observations. + +The diagnostic_maps_multimodel.py_ diagnostic is documented below. Available diagnostics @@ -270,6 +285,7 @@ Each module is described in more detail both below and inside the module. - diagnostic_timeseries.py - diagnostic_tools.py - diagnostic_transects.py +- diagnostic_maps_multimodel.py diagnostic_maps.py @@ -286,7 +302,7 @@ For this reason, we recommend extracting a small number of specific layers in the preprocessor, using the `extract_layer` preprocessor. This script can not process NetCDFs with multiple time steps. Please use the -`time_average` preprocessor to collapse the time dimension. +`climate_statistics` preprocessor to collapse the time dimension. This diagnostic also includes the optional arguments, `threshold` and `thresholds`. @@ -311,7 +327,7 @@ For a Global 2D field: .. code-block:: yaml prep_map_1: - time_average: + climate_statistics: For a regional 2D field: @@ -324,7 +340,8 @@ For a regional 2D field: end_longitude: 30. start_latitude: -80. end_latitude: 80. - time_average: + climate_statistics: + operator: mean For a Global 3D field at the surface and 10m depth: @@ -335,7 +352,8 @@ For a Global 3D field at the surface and 10m depth: extract_levels: levels: [0., 10.] scheme: linear_horizontal_extrapolate_vertical - time_average: + climate_statistics: + operator: mean For a multi-model comparison mean of 2D global fields including contour thresholds. @@ -344,7 +362,8 @@ For a multi-model comparison mean of 2D global fields including contour threshol prep_map_4: custom_order: true - time_average: + climate_statistics: + operator: mean regrid: target_grid: 1x1 scheme: linear @@ -390,7 +409,8 @@ An appropriate preprocessor for a 2D field would be: .. code-block:: yaml prep_quad_map: - time_average: + climate_statistics: + operator: mean and an example of an appropriate diagnostic section of the recipe would be: @@ -405,13 +425,13 @@ and an example of an appropriate diagnostic section of the recipe would be: additional_datasets: # filename: tos_ATSR_L3_ARC-v1.1.1_199701-201112.nc # download from: https://datashare.is.ed.ac.uk/handle/10283/536 - - {dataset: ATSR, project: obs4mips, level: L3, version: ARC-v1.1.1, start_year: 2001, end_year: 2003, tier: 3} + - {dataset: ATSR, project: obs4MIPs, level: L3, version: ARC-v1.1.1, start_year: 2001, end_year: 2003, tier: 3} scripts: Global_Ocean_map: script: ocean/diagnostic_maps_quad.py control_model: {dataset: HadGEM2-CC, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1} exper_model: {dataset: HadGEM2-ES, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1} - observational_dataset: {dataset: ATSR, project: obs4mips,} + observational_dataset: {dataset: ATSR, project: obs4MIPs,} Note that the details about the control model, the experiment models and the observational dataset are all provided in the script section of the @@ -438,9 +458,9 @@ and a latitude and longitude coordinates. This diagnostic also includes the optional arguments, `maps_range` and `diff_range` to manually define plot ranges. Both arguments are a list of two floats -to set plot range minimun and maximum values respectively for Model and Observations +to set plot range minimum and maximum values respectively for Model and Observations maps (Top panels) and for the Model minus Observations panel (bottom left). -Note that if input data have negative values the Model over Observations map +Note that if input data have negative values the Model over Observations map (bottom right) is not produced. The scatter plots plot the matched model coordinate on the x axis, and the @@ -457,13 +477,52 @@ An appropriate preprocessor for a 3D+time field would be: extract_levels: levels: [100., ] scheme: linear_extrap - time_average: + climate_statistics: + operator: mean regrid: target_grid: 1x1 scheme: linear +diagnostic_maps_multimodel.py +----------------------------- + +The diagnostic_maps_multimodel.py_ diagnostic makes model(s) vs observations maps +and if data are not provided it draws only model field. + +It is always necessary to define the overall layout through the argument `layout_rowcol`, +which is a list of two integers indicating respectively the number of rows and columns +to organize the plot. Observations has not be accounted in here as they are automatically +added at the top of the figure. + +This diagnostic also includes the optional arguments, `maps_range` and +`diff_range` to manually define plot ranges. Both arguments are a list of two floats +to set plot range minimum and maximum values respectively for variable data and +the Model minus Observations range. + +Note that this diagnostic assumes that the preprocessors do the bulk of the +hard work, and that the cube received by this diagnostic (via the settings.yml +and metadata.yml files) has no time component, a small number of depth layers, +and a latitude and longitude coordinates. + +An appropriate preprocessor for a 3D+time field would be: + + .. code-block:: yaml + + preprocessors: + prep_map: + extract_levels: + levels: [100., ] + scheme: linear_extrap + climate_statistics: + operator: mean + regrid: + target_grid: 1x1 + scheme: linear + + + diagnostic_profiles.py -------------------------------- @@ -488,9 +547,8 @@ An appropriate preprocessor for a 3D+time field would be: lat2: 30. z_min: 0. z_max: 3000. - average_region: - coord1: longitude - coord2: latitude + area_statistics: + operator: mean @@ -524,18 +582,15 @@ For a global area-weighted average 2D field: .. code-block:: yaml - average_area: - coord1: longitude - coord2: latitude + area_statistics: + operator: mean For a global volume-weighted average 3D field: .. code-block:: yaml - average_volume: - coord1: longitude - coord2: latitude - coordz: depth + volume_statistics: + operator: mean For a global area-weighted surface of a 3D field: @@ -544,9 +599,8 @@ For a global area-weighted surface of a 3D field: extract_levels: levels: [0., ] scheme: linear_horizontal_extrapolate_vertical - average_area: - coord1: longitude - coord2: latitude + area_statistics: + operator: mean An example of the multi-model time series plots can seen here: @@ -578,7 +632,8 @@ An appropriate preprocessor for a 3D+time field would be: .. code-block:: yaml - time_average: + climate_statistics: + operator: mean extract_slice: latitude: [-50.,50.] longitude: 332. @@ -693,7 +748,7 @@ These tools are: - bgc_units: converts to sensible units where appropriate (ie Celsius, mmol/m3) - timecoord_to_float: Converts time series to decimal time ie: Midnight on January 1st 1970 is 1970.0 - add_legend_outside_right: a plotting tool, which adds a legend outside the axes. -- get_image_format: loads the image format, as defined in the global user config.yml. +- get_image_format: loads the image format, as defined in the global configuration. - get_image_path: creates a path for an image output. - make_cube_layer_dict: makes a dictionary for several layers of a cube. @@ -707,8 +762,8 @@ A note on the auxiliary data directory Some of these diagnostic scripts may not function on machines with no access to the internet, as cartopy may try to download the shape files. The solution to this issue is the put the relevant cartopy shapefiles in a directory which -is visible to esmvaltool, then link that path to ESMValTool via -the `auxiliary_data_dir` variable in your config-user.yml file. +is visible to esmvaltool, then link that path to ESMValTool via the +:ref:`configuration option ` ``auxiliary_data_dir``. The cartopy masking files can be downloaded from: https://www.naturalearthdata.com/downloads/ @@ -745,41 +800,43 @@ The following WOA datasets are used by the ocean diagnostics: - Silicate - Dissolved Oxygen -These files need to be reformatted using the `cmorize_obs_py` script with output name `WOA`. +These files need to be reformatted using the `esmvaltool data format WOA` command. -Landschutzer 2014 ------------------ +Landschuetzer 2016 +------------------ These data can be downloaded from: -ftp://ftp.nodc.noaa.gov/nodc/archive/arc0105/0160558/1.1/data/0-data/spco2_1998-2011_ETH_SOM-FFN_CDIAC_G05.nc -(last access 02/28/2019) +https://www.nodc.noaa.gov/archive/arc0105/0160558/3.3/data/0-data/spco2_1982-2015_MPI_SOM-FFN_v2016.nc +(last access 09/20/2022) The following variables are used by the ocean diagnostics: - fgco2, Surface Downward Flux of Total CO2 - spco2, Surface Aqueous Partial Pressure of CO2 - dpco2, Delta CO2 Partial Pressure -The file needs to be reformatted using the `cmorize_obs_py` script with output name `Landschutzer2014`. +The file needs to be reformatted using the `esmvaltool data format Landschuetzer2016` command. .. Links: .. Recipes: -.. _recipe_ocean_amoc.yml: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/recipes/recipe_ocean_amoc.yml -.. _recipe_ocean_example.yml: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/recipes/recipe_ocean_example.yml -.. _recipe_ocean_scalar_fields.yml: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/recipes/recipe_ocean_scalar_fields.yml -.. _recipe_ocean_bgc.yml: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/recipes/recipe_ocean_bgc.yml -.. _recipe_ocean_quadmap.yml: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/recipes/recipe_ocean_quadmap.yml -.. _recipe_ocean_Landschutzer2014.yml: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/recipes/recipe_ocean_Landschutzer2014.yml +.. _recipe_ocean_amoc.yml: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/recipes/recipe_ocean_amoc.yml +.. _recipe_ocean_example.yml: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/recipes/recipe_ocean_example.yml +.. _recipe_ocean_scalar_fields.yml: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/recipes/recipe_ocean_scalar_fields.yml +.. _recipe_ocean_bgc.yml: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/recipes/recipe_ocean_bgc.yml +.. _recipe_ocean_quadmap.yml: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/recipes/recipe_ocean_quadmap.yml +.. _recipe_ocean_Landschuetzer2016.yml: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/recipes/recipe_ocean_Landschuetzer2016.yml +.. _recipe_ocean_multimap.yml: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/recipes/recipe_ocean_multimap.yml .. Diagnostics: -.. _ocean: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/: -.. _diagnostic_maps.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_maps.py -.. _diagnostic_maps_quad.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_maps_quad.py -.. _diagnostic_model_vs_obs.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_model_vs_obs.py -.. _diagnostic_profiles.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_profiles.py -.. _diagnostic_timeseries.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_timeseries.py -.. _diagnostic_transects.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_transects.py -.. _diagnostic_seaice.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_seaice.py -.. _diagnostic_tools.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_tools.py +.. _ocean: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/diag_scripts/ocean/: +.. _diagnostic_maps.py: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/diag_scripts/ocean/diagnostic_maps.py +.. _diagnostic_maps_quad.py: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/diag_scripts/ocean/diagnostic_maps_quad.py +.. _diagnostic_model_vs_obs.py: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/diag_scripts/ocean/diagnostic_model_vs_obs.py +.. _diagnostic_maps_multimodel.py: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/diag_scripts/ocean/diagnostic_maps_multimodel.py +.. _diagnostic_profiles.py: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/diag_scripts/ocean/diagnostic_profiles.py +.. _diagnostic_timeseries.py: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/diag_scripts/ocean/diagnostic_timeseries.py +.. _diagnostic_transects.py: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/diag_scripts/ocean/diagnostic_transects.py +.. _diagnostic_seaice.py: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/diag_scripts/ocean/diagnostic_seaice.py +.. _diagnostic_tools.py: https://github.com/ESMValGroup/ESMValTool/tree/main/esmvaltool/diag_scripts/ocean/diagnostic_tools.py diff --git a/doc/sphinx/source/recipes/recipe_perfmetrics.rst b/doc/sphinx/source/recipes/recipe_perfmetrics.rst index 6cb129abbb..067b65af85 100644 --- a/doc/sphinx/source/recipes/recipe_perfmetrics.rst +++ b/doc/sphinx/source/recipes/recipe_perfmetrics.rst @@ -6,9 +6,9 @@ Performance metrics for essential climate parameters Overview -------- -The goal is to create a standard recipe for the calculation of performance metrics to quantify the ability of the models to reproduce the climatological mean annual cycle for selected "Essential Climate Variables" (ECVs) plus some additional corresponding diagnostics and plots to better understand and interpret the results. +The goal is to create a standard recipe for the calculation of performance metrics to quantify the ability of the models to reproduce the climatological mean annual cycle for selected "Essential Climate Variables" (ECVs) plus some additional corresponding diagnostics and plots to better understand and interpret the results. -The recipe can be used to calculate performance metrics at different vertical levels (e.g., 5, 30, 200, 850 hPa as in `Gleckler et al. (2008) `_ and in four regions (global, tropics 20°N-20°S, northern extratropics 20°-90°N, southern extratropics 20°-90°S). As an additional reference, we consider `Righi et al. (2015) `_. +The recipe can be used to calculate performance metrics at different vertical levels (e.g., 5, 30, 200, 850 hPa as in `Gleckler et al. (2008) `_ and in different regions. As an additional reference, we consider `Righi et al. (2015) `_. Available recipes and diagnostics ----------------------------------- @@ -16,6 +16,8 @@ Available recipes and diagnostics Recipes are stored in recipes/ * recipe_perfmetrics_CMIP5.yml +* recipe_perfmetrics_CMIP5_4cds.yml +* recipe_perfmetrics_land_CMIP5.yml Diagnostics are stored in diag_scripts/perfmetrics/ @@ -29,27 +31,29 @@ Diagnostics are stored in diag_scripts/perfmetrics/ User settings in recipe ----------------------- +.. _perf-main.ncl: + #. Script main.ncl *Required settings (scripts)* - * plot_type: cycle (time), zonal (plev, lat), latlon (lat, lon), cycle_latlon (time, lat, lon) - * time_avg: type of time average (opt argument of time_operations in diag_scripts/shared/statistics.ncl) - * region: selected region (see select_region in diag_scripts/shared/latlon.ncl) - + * plot_type: cycle (time), zonal (plev, lat), latlon (lat, lon), cycle_latlon (time, lat, lon), cycle_zonal (time, plev, lat) + * time_avg: type of time average (monthlyclim, seasonalclim, annualclim) + * region: selected region (global, trop, nhext, shext, nhtrop, shtrop, nh, sh, nhmidlat, shmidlat, nhpolar, shpolar, eq) + *Optional settings (scripts)* - - * styleset: for plot_type cycle only (as in diag_scripts/shared/plot/styles/) + + * styleset: for plot_type cycle only (cmip5, righi15gmd, cmip6, default) * plot_stddev: for plot_type cycle only, plots standard deviation as shading * legend_outside: for plot_type cycle only, plots the legend in a separate file * t_test: for plot_type zonal or latlon, calculates t-test in difference plots (default: False) * conf_level: for plot_type zonal or latlon, adds the confidence level for the t-test to the plot (default: False) * projection: map projection for plot_type latlon (default: CylindricalEquidistant) - * draw_plots: draws plots (default: True) * plot_diff: draws difference plots (default: False) * calc_grading: calculates grading metrics (default: False) * stippling: uses stippling to mark statistically significant differences (default: False = mask out non-significant differences in gray) * show_global_avg: diplays the global avaerage of the input field as string at the top-right of lat-lon plots (default: False) + * annots: choose the annotation style, e.g. ```alias``` which would display the alias of the dataset as title (applies to plot_type zonal and cycle_zonal) * metric: chosen grading metric(s) (if calc_grading is True) * normalization: metric normalization (for RMSD and BIAS metrics only) * abs_levs: list of contour levels for absolute plot @@ -58,17 +62,47 @@ User settings in recipe * zonal_ymin: for plot_type zonal only, minimum pressure level on the y-axis (default: 5. hPa) * latlon_cmap: for plot_type latlon only, chosen color table (default: "amwg_blueyellowred") * plot_units: plotting units (if different from standard CMOR units) - + * add_tropopause: adds an outline of a climatological tropopause to the zonal plot (default: False) + + *Special optional plot configurations* + + It is possible to make some specific customizations to the plots (zonal + only). + + This includes for example specific tickmark labels of the axes. + + Those special customizations can be done by adding ncl plotting resources + combined with prefix ``res_`` as optional settings of the main script in the + recipe. + + Note that this requires to be familiar with the ncl plotting routines for + pressure vs height plots + (https://www.ncl.ucar.edu/Document/Graphics/Interfaces/gsn_csm_pres_hgt.shtml) + and the corresponding resources. + + The following shows an example on customizing the latitude tickmarks so + that a degree sign and and empty space is used for the labels: + + .. code-block:: yaml + + # copernicus style of latitude tickmarks + res_tmXBMode: "Explicit" + res_tmXBValues: [-60, -30, 0, 30, 60] + res_tmXBLabels: ["60~F35~J~F21~ S", "30~F35~J~F21~ S", "0~F35~J", "30~F35~J~F21~ N", "60~F35~J~F21~ N"] + + *Required settings (variables)* - + * reference_dataset: reference dataset to compare with (usually the observations). - + *Optional settings (variables)* * alternative_dataset: a second dataset to compare with. These settings are passed to the other scripts by main.ncl, depending on the selected plot_type. +.. _perf-collect.ncl: + #. Script collect.ncl *Required settings (scripts)* @@ -77,14 +111,15 @@ User settings in recipe * label_bounds: for RMSD and BIAS metrics, min and max of the labelbar * label_scale: for RMSD and BIAS metrics, bin width of the labelbar * colormap: for RMSD and BIAS metrics, color table of the labelbar - + *Optional settings (scripts)* - + * label_lo: adds lower triange for values outside range * label_hi: adds upper triange for values outside range * cm_interval: min and max color of the color table * cm_reverse: reverses the color table * sort: sorts datasets in alphabetic order (excluding MMM) + * diag_order: sort diagnostics in a specific order (name = 'diagnostic'-'region') * title: plots title * scale_font: scaling factor applied to the default font size * disp_values: switches on/off the grading values on the plot @@ -93,38 +128,64 @@ User settings in recipe Variables --------- - -* clt (atmos, monthly mean, longitude latitude time) -* hus (atmos, monthly mean, longitude latitude lev time) -* od550aer, od870aer, od550abs, od550lt1aer (aero, monthly mean, longitude latitude time) -* pr (atmos, monthly mean, longitude latitude time) -* rlut, rlutcs, rsut, rsutcs (atmos, monthly mean, longitude latitude time) -* ta (atmos, monthly mean, longitude latitude lev time) -* tas (atmos, monthly mean, longitude latitude time) -* toz (atmos, monthly mean, longitude latitude time) -* ts (atmos, monthly mean, longitude latitude time) -* ua (atmos, monthly mean, longitude latitude lev time) -* va (atmos, monthly mean, longitude latitude lev time) -* zg (atmos, monthly mean, longitude latitude lev time) +#. recipe_perfmetrics_CMIP5.yml + + * clt (atmos, monthly mean, longitude latitude time) + * hus (atmos, monthly mean, longitude latitude lev time) + * od550aer, od870aer, od550abs, od550lt1aer (aero, monthly mean, longitude latitude time) + * pr (atmos, monthly mean, longitude latitude time) + * rlut, rlutcs, rsut, rsutcs (atmos, monthly mean, longitude latitude time) + * sm (land, monthly mean, longitude latitude time) + * ta (atmos, monthly mean, longitude latitude lev time) + * tas (atmos, monthly mean, longitude latitude time) + * toz (atmos, monthly mean, longitude latitude time) + * ts (atmos, monthly mean, longitude latitude time) + * ua (atmos, monthly mean, longitude latitude lev time) + * va (atmos, monthly mean, longitude latitude lev time) + * zg (atmos, monthly mean, longitude latitude lev time) + +#. recipe_perfmetrics_land_CMIP5.yml + + * sm (land, monthly mean, longitude latitude time) + * nbp (land, monthly mean, longitude latitude time) + * gpp (land, monthly mean, longitude latitude time) + * lai (land, monthly mean, longitude latitude time) + * fgco2 (ocean, monthly mean, longitude latitude time) + * et (land, monthly mean, longitude latitude time) + * rlus, rlds, rsus, rdsd (atmos, monthly mean, longitude latitude time) Observations and reformat scripts --------------------------------- -*Note: (1) obs4mips data can be used directly without any preprocessing; (2) see headers of cmorization scripts (in esmvaltool/utils/cmorizers/obs) for non-obs4mips data for download instructions.* - -* AIRS (hus – obs4mips) -* CERES-EBAF (rlut, rlutcs, rsut, rsutcs – obs4mips) -* ERA-Interim (tas, ta, ua, va, zg, hus – esmvaltool/utils/cmorizers/obs/cmorize_obs_ERA-Interim.ncl) -* ESACCI-AEROSOL (od550aer, od870aer, od550abs, od550lt1aer – esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-AEROSOL.ncl) -* ESACCI-CLOUD (clt – esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-CLOUD.ncl) -* ESACCI-OZONE (toz – esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-OZONE.ncl) -* ESACCI-SST (ts – esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-SST.ncl) -* GPCP-SG (pr – obs4mips) -* HadISST (ts - esmvaltool/utils/cmorizers/obs/cmorize_obs_HadISST.ncl) -* MODIS (od550aer – obs4mips) -* NCEP (tas, ta, ua, va, zg – esmvaltool/utils/cmorizers/obs/cmorize_obs_NCEP.ncl) -* NIWA (toz – esmvaltool/utils/cmorizers/obs/cmorize_obs_NIWA.ncl) -* PATMOS (clt - esmvaltool/utils/cmorizers/obs/cmorize_obs_PATMOS.ncl) +The following list shows the currently used observational data sets for this recipe with their variable names and the reference to their respective reformat scripts in parentheses. Please note that obs4MIPs data can be used directly without any reformating. For non-obs4MIPs data use `esmvaltool data info DATASET` or see headers of cmorization scripts (in `/esmvaltool/cmorizers/data/formatters/datasets/ +`_) for downloading and processing instructions. +#. recipe_perfmetrics_CMIP5.yml + + * AIRS (hus - obs4MIPs) + * CERES-EBAF (rlut, rlutcs, rsut, rsutcs - obs4MIPs) + * ERA-Interim (tas, ta, ua, va, zg, hus - esmvaltool/cmorizers/data/formatters/datasets/era-interim.py) + * ESACCI-AEROSOL (od550aer, od870aer, od550abs, od550lt1aer - esmvaltool/cmorizers/data/formatters/datasets/esacci-aerosol.ncl) + * ESACCI-CLOUD (clt - esmvaltool/cmorizers/data/formatters/datasets/esacci-cloud.ncl) + * ESACCI-OZONE (toz - esmvaltool/cmorizers/data/formatters/datasets/esacci-ozone.ncl) + * ESACCI-SOILMOISTURE (sm - esmvaltool/cmorizers/data/formatters/datasets/esacci_soilmoisture.ncl) + * ESACCI-SST (ts - esmvaltool/ucmorizers/data/formatters/datasets/esacci-sst.py) + * GPCP-SG (pr - obs4MIPs) + * HadISST (ts - esmvaltool/cmorizers/data/formatters/datasets/hadisst.ncl) + * MODIS (od550aer - esmvaltool/cmorizers/data/formatters/datasets/modis.ncl) + * NCEP-NCAR-R1 (tas, ta, ua, va, zg - esmvaltool/cmorizers/data/formatters/datasets/ncep_ncar_r1.py) + * NIWA-BS (toz - esmvaltool/cmorizers/data/formatters/datasets/niwa_bs.ncl) + * PATMOS-x (clt - esmvaltool/cmorizers/data/formatters/datasets/patmos_x.ncl) + +#. recipe_perfmetrics_land_CMIP5.yml + + * CERES-EBAF (rlus, rlds, rsus, rsds - obs4MIPs) + * ESACCI-SOILMOISTURE (sm - esmvaltool/cmorizers/data/formatters/datasets/esacci_soilmoisture.ncl) + * FLUXCOM (gpp - esmvaltool/cmorizers/data/formatters/datasets/fluxcom.py) + * JMA-TRANSCOM (nbp, fgco2 - esmvaltool/cmorizers/data/formatters/datasets/jma_transcom.py) + * LAI3d (lai - esmvaltool/cmorizers/data/formatters/datasets/lai3g.py) + * LandFlux-EVAL (et - esmvaltool/cmorizers/data/formatters/datasets/landflux_eval.py) + * Landschuetzer2016 (fgco2 - esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2016.py) + * MTE (gpp - esmvaltool/cmorizers/data/formatters/datasets/mte.py) References ---------- @@ -136,23 +197,28 @@ References Example plots ------------- -.. centered:: |pic_permetrics1| |pic_permetrics2| +.. figure:: /recipes/figures/perfmetrics/perfmetrics_fig_1.png + :width: 90% + + Annual cycle of globally averaged temperature at 850 hPa (time period 1980-2005) for different CMIP5 models (historical simulation) (thin colored lines) in comparison to ERA-Interim (thick yellow line) and NCEP-NCAR-R1 (thick black dashed line) reanalysis data. + +.. figure:: /recipes/figures/perfmetrics/perfmetrics_fig_2.png + :width: 90% -.. |pic_permetrics1| image:: /recipes/figures/perfmetrics/perfmetrics_fig_1.png - :width: 50% + Taylor diagram of globally averaged temperature at 850 hPa (ta) and longwave cloud radiative effect (lwcre) for different CMIP5 models (historical simulation, 1980-2005). Reference data (REF) are ERA-Interim for temperature (1980-2005) and CERES-EBAF (2001-2012) for longwave cloud radiative effect. -.. |pic_permetrics2| image:: /recipes/figures/perfmetrics/perfmetrics_fig_2.png - :width: 30% +.. figure:: /recipes/figures/perfmetrics/perfmetrics_fig_3.png + :width: 90% -.. centered:: |pic_permetrics3| |pic_permetrics4| + Difference in annual mean of zonally averaged temperature (time period 1980-2005) between the CMIP5 model MPI-ESM-MR (historical simulation) and ERA-Interim. Stippled areas indicdate differences that are statistically significant at a 95% confidence level. -.. |pic_permetrics3| image:: /recipes/figures/perfmetrics/perfmetrics_fig_3.png - :width: 30% +.. figure:: /recipes/figures/perfmetrics/perfmetrics_fig_4.png + :width: 90% -.. |pic_permetrics4| image:: /recipes/figures/perfmetrics/perfmetrics_fig_4.png - :width: 52% + Annual mean (2001-2012) of the shortwave cloud radiative effect from CERES-EBAF. .. figure:: /recipes/figures/perfmetrics/perfmetrics_fig_5.png - :width: 75% + :width: 90% :align: center + Relative space-time root-mean-square deviation (RMSD) calculated from the climatological seasonal cycle of CMIP5 simulations. A relative performance is displayed, with blue shading indicating better and red shading indicating worse performance than the median of all model results. A diagonal split of a grid square shows the relative error with respect to the reference data set (lower right triangle) and the alternative data set (upper left triangle). White boxes are used when data are not available for a given model and variable. diff --git a/doc/sphinx/source/recipes/recipe_psyplot.rst b/doc/sphinx/source/recipes/recipe_psyplot.rst new file mode 100644 index 0000000000..8c04784cd1 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_psyplot.rst @@ -0,0 +1,64 @@ +.. _recipes_psyplot_diag: + +Psyplot Diagnostics +=================== + +Overview +-------- + +These recipes showcase the use of the Psyplot diagnostic that provides a +high-level interface to `Psyplot `__ for ESMValTool +recipes. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_psyplot.yml + +Diagnostics are stored in diag_scripts/ + + * :ref:`psyplot_diag.py ` + + +Variables +--------- + +Arbitrary variables are supported. + + +Observations and reformat scripts +--------------------------------- + +Arbitrary datasets are supported. + + +References +---------- + +* Sommer, (2017), The psyplot interactive visualization framework, Journal of + Open Source Software, 2(16), 363, doi:10.21105/joss.00363 + + +Example plots +------------- + +.. _fig_psyplot_1: +.. figure:: /recipes/figures/psyplot/psyplot_CanESM5.jpg + :align: center + :width: 50% + + Historical near-surface air temperature climatology over Europe simulated by + CanESM5 between 1995 and 2014. The plot visualizes the invidividual + rectangular grid cells of the model's regular grid. + +.. _fig_psyplot_2: +.. figure:: /recipes/figures/psyplot/psyplot_ICON-ESM-LR.jpg + :align: center + :width: 50% + + Historical near-surface air temperature climatology over Europe simulated by + ICON-ESM-LR between 1995 and 2014. The plot visualizes the invidividual + triangular grid cells of the model's unstructured grid. diff --git a/doc/sphinx/source/recipes/recipe_pv_capacity_factor.rst b/doc/sphinx/source/recipes/recipe_pv_capacity_factor.rst new file mode 100644 index 0000000000..2debdf472f --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_pv_capacity_factor.rst @@ -0,0 +1,77 @@ +.. _recipes_pv_capacity_factor: + +Capacity factor for solar photovoltaic (PV) systems +=================================================== + +Overview +-------- + +This diagnostic computes the photovoltaic (PV) capacity factor, +a measure of the fraction of the +maximum possible energy produced per PV grid cell. It uses the daily incoming +surface solar radiation and the surface temperature with a method described +in `Bett and Thornton (2016)`_. The user can select temporal +range, season, and region of interest. + + +.. _`Bett and Thornton (2016)`: https://doi.org/10.1016/j.renene.2015.10.006 + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_pv_capacity_factor.yml + +Diagnostics are stored in diag_scripts/pv_capacityfactor/ + + * pv_capacity_factor.R: prepares data and plots results. + * PV_CF.R: calculates the daily capacity factor. + + +User settings +------------- + +User setting files are stored in recipes/ + +#. recipe_capacity_factor.yml + + *Required settings for script* + + * season: String to include shortcut for season in plot title and name (e.g. "djf"). + It will be converted to upper case. This season should be the one set in the preprocessor, + since it is only used as a string and does not affect the data in the diagnostic. + In the default recipe this is solved through a node anchor. + + *Optional settings for script* + + * maxval_colorbar: Optional upper limit for the colorbar. + +Variables +--------- + +* tas (atmos, daily, longitude, latitude, time) +* rsds (atmos, daily, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +* ERA-Interim + +References +---------- + +* Bett, P. E. and Thornton, H. E.: The climatological relationships between wind and solar energy supply in Britain, Renew. Energ., 87, 96–110, https://doi.org/10.1016/j.renene.2015.10.006, 2016. + + +Example plots +------------- + +.. _fig_pv_capfactor1: +.. figure:: /recipes/figures/pv_capacity_factor/capacity_factor_IPSL-CM5A-MR_1980-2005_DJF.png + :align: center + :width: 14cm + +PV capacity factor calculated from IPSL-CM5-MR during the DJF season for 1980–2005. \ No newline at end of file diff --git a/doc/sphinx/source/recipes/recipe_quantilebias.rst b/doc/sphinx/source/recipes/recipe_quantilebias.rst index dba7af1c92..f17498096f 100644 --- a/doc/sphinx/source/recipes/recipe_quantilebias.rst +++ b/doc/sphinx/source/recipes/recipe_quantilebias.rst @@ -1,46 +1,56 @@ +.. _recipes_quantilebias: + Precipitation quantile bias =========================== - + Overview -------- - -Precipitation is a dominant component of the hydrological cycle, and as such a main driver of the climate system and human development. The reliability of climate projections and water resources strategies therefore depends on how well precipitation can be reproduced by the models used for simulations. While global circulation models from the CMIP5 project observations can reproduce the main patterns of mean precipitation, they often show shortages and biases in the ability to reproduce the strong precipitation tails of the distribution. Most models underestimate precipitation over arid regions and overestimate it over regions of complex topography, and these shortages are amplified at high quantile precipitation. The quantilebias recipe implements calculation of the quantile bias to allow evaluation of the precipitation bias based on a user defined quantile in models as compared to a reference dataset following Mehran et al. (2014). The quantile bias (QB) is defined as the ratio of monthly precipitation amounts in each simulation to that of the reference dataset (GPCP observations in the example) above a specified threshold t (e.g., the 75th percentile of all the local monthly values). A quantile bias equal to 1 indicates no bias in the simulations, whereas a value above (below) 1 corresponds to a climate model's overestimation (underestimation) of the precipitation amount above the specified threshold t, with respect to that of the reference dataset. - - + +Precipitation is a dominant component of the hydrological cycle, and as such a main driver of the climate system and human development. The reliability of climate projections and water resources strategies therefore depends on how well precipitation can be reproduced by the models used for simulations. While global circulation models from the CMIP5 project observations can reproduce the main patterns of mean precipitation, they often show shortages and biases in the ability to reproduce the strong precipitation tails of the distribution. Most models underestimate precipitation over arid regions and overestimate it over regions of complex topography, and these shortages are amplified at high quantile precipitation. The quantilebias recipe implements calculation of the quantile bias to allow evaluation of the precipitation bias based on a user defined quantile in models as compared to a reference dataset following Mehran et al. (2014). The quantile bias (QB) is defined as the ratio of monthly precipitation amounts in each simulation to that of the reference dataset (GPCP observations in the example) above a specified threshold t (e.g., the 75th percentile of all the local monthly values). A quantile bias equal to 1 indicates no bias in the simulations, whereas a value above (below) 1 corresponds to a climate model's overestimation (underestimation) of the precipitation amount above the specified threshold t, with respect to that of the reference dataset. + + Available recipes and diagnostics --------------------------------- - + Recipes are stored in recipes/ - + * recipe_quantilebias.yml - + Diagnostics are stored in diag_scripts/quantilebias/ - + * quantilebias.R - - + + User settings ------------- - + *Required settings for script* * perc_lev: quantile (in %), e.g. 50 - - + + Variables --------- - -* pr (atmos, daily mean, longitude latitude time) - - + +* pr (atmos, monthly, longitude latitude time) + + Observations and reformat scripts --------------------------------- - -* GPCP-SG observations (accessible via the obs4mips project) - - + +* GPCP-SG observations (accessible via the obs4MIPs project) + + References ---------- - + * Mehran, A. et al.: Journal of Geophysical Research: Atmospheres, Volume 119, Issue 4, pp. 1695-1707, 2014. + +Example plots +------------- + +.. figure:: /recipes/figures/quantilebias/quantilebias.png + :width: 10cm + + Quantile bias, as defined in Mehran et al. 2014, with threshold t=75th percentile, evaluated for the CanESM2 model over the 1979-2005 period, adopting GPCP-SG v 2.3 gridded precipitation as a reference dataset. The optimal reference value is 1. Both datasets have been regridded onto a 2° regular grid. diff --git a/doc/sphinx/source/recipes/recipe_radiation_budget.rst b/doc/sphinx/source/recipes/recipe_radiation_budget.rst new file mode 100644 index 0000000000..d67e0a0527 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_radiation_budget.rst @@ -0,0 +1,82 @@ +.. _recipes_radiation_budget: + +Radiation Budget +================ + +Overview +-------- + +The aim of monitoring the energy budget is to understand the (im)balance +of energy flux between the atmosphere and the surface of a model due to its +link with the hydrological cycle and climate change. + +This diagnostic analyses the radiation budget by separating top-of-atmosphere +fluxes into clear-sky and cloud forcing components, and surface fluxes into +downwelling and upwelling components. Model predictions are compared against +three observational estimates, one of which (Stephens et al. 2012) includes +uncertainty estimates. When the black error bars overlap the zero line, the +model is consistent with observations according to Stephens et al. (2012). + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + + * recipe_radiation_budget.yml + +Diagnostics are stored in esmvaltool/diag_scripts/radiation_budget/ + + * radiation_budget.py: Plot the global radiation budget. + * seasonal_radiation_budget.py: Write the global climatological seasonal radiation budget to a text file. + + + +User settings in recipe +----------------------- + +None + + +Variables +--------- + +* rss (atmos, monthly mean, longitude latitude time) +* rsdt (atmos, monthly mean, longitude latitude time) +* rsut (atmos, monthly mean, longitude latitude time) +* rsutcs (atmos, monthly mean, longitude latitude time) +* rsds (atmos, monthly mean, longitude latitude time) +* rls (atmos, monthly mean, longitude latitude time) +* rlut (atmos, monthly mean, longitude latitude time) +* rlutcs (atmos, monthly mean, longitude latitude time) +* rlds (atmos, monthly mean, longitude latitude time) +* hfss (atmos, monthly mean, longitude latitude time) +* hfls (atmos, monthly mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +*Note: (1) obs4MIPs data can be used directly without any preprocessing; +(2) see headers of reformat scripts for non-obs4MIPs data for download +instructions.* + +* CERES-EBAF (rlut, rlutcs, rsut, rsutcs - obs4MIPs) +* Demory observations can be found in esmvaltool/diag_scripts/radiation_budget/Demory_et_al_2014_obs_Energy_Budget.yml and are from Figure 2 in Demory et al. (2014). +* Stephens observations can be found in esmvaltool/diag_scripts/radiation_budget/Stephens_et_al_2012_obs_Energy_Budget.yml from figure 1b in Stephens et al. (2012). + + +References +---------- + +* Demory, ME., Vidale, P.L., Roberts, M.J. et al. The role of horizontal resolution in simulating drivers of the global hydrological cycle. Clim Dyn 42, 2201–2225 (2014). https://doi.org/10.1007/s00382-013-1924-4 +* Stephens, G., Li, J., Wild, M. et al. An update on Earth's energy balance in light of the latest global observations. Nature Geosci 5, 691–696 (2012). https://doi.org/10.1038/ngeo1580 + + +Example plots +------------- + +.. _fig_radiation_budget_1: +.. figure:: /recipes/figures/radiation_budget/UKESM1-0-LL.png + :align: center + + Radiation budget for UKESM1-0-LL diff --git a/doc/sphinx/source/recipes/recipe_rainfarm.rst b/doc/sphinx/source/recipes/recipe_rainfarm.rst new file mode 100644 index 0000000000..aeb7cd0638 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_rainfarm.rst @@ -0,0 +1,63 @@ +.. _recipes_rainfarm: + +RainFARM stochastic downscaling +=============================== + + +Overview +-------- + +Precipitation extremes and small-scale variability are essential drivers in many climate change impact studies. However, the spatial resolution currently achieved by global and regional climate models is still insufficient to correctly identify the fine structure of precipitation intensity fields. In the absence of a proper physically based representation, this scale gap can be at least temporarily bridged by adopting a stochastic rainfall downscaling technique (Rebora et al, 2006). With this aim, the Rainfall Filtered Autoregressive Model (RainFARM) was developed to apply the stochastic precipitation downscaling method to climate models. The RainFARM Julia library and command-line tool version (https://github.com/jhardenberg/RainFARM.jl) was implemented as recipe. The stochastic method allows to predict climate variables at local scale from information simulated by climate models at regional scale: It first evaluates the statistical distribution of precipitation fields at regional scale and then applies the relationship to the boundary conditions of the climate model to produce synthetic fields at the requested higher resolution. RainFARM exploits the nonlinear transformation of a Gaussian random precipitation field, conserving the information present in the fields at larger scale (Rebora et al., 2006; D’Onofrio et al., 2014). + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_rainfarm.yml + +Diagnostics are stored in diag_scripts/rainfarm/ + +* rainfarm.jl + + +User settings +------------- + +*Required settings for script* + +* slope: spatial spectral slope (set to 0 to compute automatically from large scales) +* nens: number of ensemble members to be calculated +* nf: number of subdivisions for downscaling (e.g. 8 will produce output fields with linear resolution increased by a factor 8) +* conserv_glob: logical, if to conserve precipitation over full domain +* conserv_smooth: logical, if to conserve precipitation using convolution (if neither conserv_glob or conserv_smooth is chosen, box conservation is used) +* weights_climo: set to false or omit if no orographic weights are to be used, else set it to the path to a fine-scale precipitation climatology file. If a relative file path is used, ``auxiliary_data_dir`` will be searched for this file. The file is expected to be in NetCDF format and should contain at least one precipitation field. If several fields at different times are provided, a climatology is derived by time averaging. Suitable climatology files could be for example a fine-scale precipitation climatology from a high-resolution regional climate model (see e.g. Terzago et al. 2018), a local high-resolution gridded climatology from observations, or a reconstruction such as those which can be downloaded from the WORLDCLIM (http://www.worldclim.org) or CHELSA (http://chelsa-climate.org) websites. The latter data will need to be converted to NetCDF format before being used (see for example the GDAL tools (https://www.gdal.org). + + +Variables +--------- + +* pr (atmos, daily mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +None. + + +References +---------- + +* Terzago et al. 2018, Nat. Hazards Earth Syst. Sci., 18, 2825-2840 +* D'Onofrio et al. 2014, J of Hydrometeorology 15, 830-843 +* Rebora et. al 2006, JHM 7, 724 + +Example plots +------------- + +.. figure:: /recipes/figures/rainfarm/rainfarm.png + :width: 14cm + + Example of daily cumulated precipitation from the CMIP5 EC-EARTH model on a specific day, downscaled using RainFARM from its original resolution (1.125°) (left panel), increasing spatial resolution by a factor of 8 to 0.14°; Two stochastic realizations are shown (central and right panel). A fixed spectral slope of s=1.7 was used. Notice how the downscaled fields introduce fine scale precipitation structures, while still maintaining on average the original coarse-resolution precipitation. Different stochastic realizations are shown to demonstrate how an ensemble of realizations can be used to reproduce unresolved subgrid variability. (N.B.: this plot was not produced by ESMValTool - the recipe output is netcdf only). diff --git a/doc/sphinx/source/recipes/recipe_runoff_et.rst b/doc/sphinx/source/recipes/recipe_runoff_et.rst index 3ce3fdc5bb..2ca8d72a99 100644 --- a/doc/sphinx/source/recipes/recipe_runoff_et.rst +++ b/doc/sphinx/source/recipes/recipe_runoff_et.rst @@ -1,5 +1,7 @@ -Runoff_ET -========= +.. _recipes_runoff_et: + +Runoff, Precipitation, Evapotranspiration +========================================= Overview -------- @@ -127,4 +129,3 @@ Example plots Biases in runoff coefficient (runoff/precipitation) and precipitation for major catchments of the globe. The MPI-ESM-LR historical simulation (1970-2000) is used as an example. - diff --git a/doc/sphinx/source/recipes/recipe_russell18jgr.rst b/doc/sphinx/source/recipes/recipe_russell18jgr.rst new file mode 100644 index 0000000000..53b242964e --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_russell18jgr.rst @@ -0,0 +1,378 @@ +.. _nml_oceanmetrics: + +Ocean metrics +============= + +Overview +-------- + +The Southern Ocean is central to the global climate and the global carbon cycle, and to the climate’s response to increasing levels of atmospheric greenhouse gases. Global coupled climate models and earth system models, however, vary widely in their simulations of the Southern Ocean and its role in, and response to, the ongoing anthropogenic trend. Observationally-based metrics are critical for discerning processes and mechanisms, and for validating and comparing climate and earth system models. New observations and understanding have allowed for progress in the creation of observationally-based data/model metrics for the Southern Ocean. + +The metrics presented in this recipe provide a means to assess multiple simulations relative to the best available observations and observational products. Climate models that perform better according to these metrics also better simulate the uptake of heat and carbon by the Southern Ocean. Russell et al. 2018 assessed only a few of the available CMIP5 simulations, but most of the available CMIP5 and CMIP6 climate models can be analyzed with these recipes. + +The goal is to create a recipe for recreation of metrics in Russell, J.L., et al., 2018, J. Geophys. Res. – Oceans, 123, 3120-3143, doi: 10.1002/2017JC013461. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_russell18jgr.yml + +Diagnostics are stored in diag_scripts/russell18jgr/ + +* russell18jgr-polar.ncl (figures 1, 7, 8): calculates and plots annual-mean variables (tauu, sic, fgco2, pH) as polar contour map. +* russell18jgr-fig2.ncl: calculates and plots The zonal and annual means of the zonal wind stress (N/m\ :sup:`2`\). +* russell18jgr-fig3b.ncl: calculates and plots the latitudinal position of Subantarctic Front. Using definitions from Orsi et al (1995). +* russell18jgr-fig3b-2.ncl: calculates and plots the latitudinal position of Polar Front. Using definitions from Orsi et al (1995). +* russell18jgr-fig4.ncl: calculates and plots the zonal velocity through Drake Passage (at 69W) and total transport through the passage if the volcello file is available. +* russell18jgr-fig5.ncl: calculates and plots the mean extent of sea ice for September(max) in blue and mean extent of sea ice for February(min) in red. +* russell18jgr-fig5g.ncl: calculates and plots the annual cycle of sea ice area in southern ocean. +* russell18jgr-fig6a.ncl: calculates and plots the density layer based volume transport(in Sv) across 30S based on the layer definitions in Talley (2008). +* russell18jgr-fig6b.ncl: calculates and plots the Density layer based heat transport(in PW) across 30S based on the layer definitions in Talley (2008). +* russell18jgr-fig7h.ncl: calculates and plots the zonal mean flux of fgco2 in gC/(yr * m\ :sup:`2`\). +* russell18jgr-fig7i.ncl: calculates and plots the cumulative integral of the net CO2 flux from 90S to 30S (in PgC/yr). +* russell18jgr-fig9a.ncl: calculates and plots the scatter plot of the width of the Southern Hemisphere westerly wind band against the annual-mean integrated heat uptake south of 30S (in PW), along with the line of best fit. +* russell18jgr-fig9b.ncl: calculates and plots the scatter plot of the width of the Southern Hemisphere westerly wind band against the annual-mean integrated carbon uptake south of 30S (in Pg C/yr), along with the line of best fit. +* russell18jgr-fig9c.ncl: calculates and plots the scatter plot of the net heat uptake south of 30S (in PW) against the annual-mean integrated carbon uptake south of 30S (in Pg C/yr), along with the line of best fit. + +User settings in recipe +----------------------- + +#. Script russell18jgr-polar.ncl + + *Required settings (scripts)* + + * styleset : CMIP5(recommended), default, etc. + * ncdf : default(recommended), CMIP5, etc. + * max_lat : -30.0 + + *Optional settings (scripts)* + + * grid_max : 0.4 (figure 1), 30 (figure 7), 8.2 (figure 8) + * grid_min : -0.4 (figure 1), -30 (figure 7), 8.0 (figure 8) + * grid_step : 0.1 (figure 1), 2.5 (figure 7), 0.1 (figure 8) + * colormap : BlWhRe (figure 7) + * colors : [[237.6, 237.6, 0.], [ 255, 255, 66.4], [255, 255, 119.6], [255, 255, 191.8], [223.8, 191.8, 223.8], [192.8, 127.5, 190.8], [161.6, 65.3, 158.6], [129.5, 1.0, 126.5] ] (figure 1) + [[132,12,127], [147,5,153], [172,12,173], [195,33,196], [203,63,209], [215,89,225], [229,117,230], [243,129,238], [253,155,247], [255,178,254], [255,255,255], + [255,255,255], [126,240,138], [134,234,138], [95,219,89], [57,201,54], [39,182,57], [33,161,36], [16,139,22], [0,123,10], [6,96,6], [12,77,9.0] ] (figure 8) + * max_vert : 1 - 4 (user preference) + * max_hori : 1 - 4 (user preference) + * grid_color: blue4 (figure 8) + * labelBar_end_type: ExcludeOuterBoxes (figure 1), both_triangle (figure 7, 8) + * unitCorrectionalFactor: -3.154e+10 (figure 7) + * new_units : "gC/ (m~S~2~N~ * yr)" (figure 7) + + *Required settings (variables)* + + * additional_dataset: datasets to plot. + + *Optional settings (variables)* + + * none + + +#. Script russell18jgr-fig2.ncl + + *Required settings (scripts)* + + * styleset : CMIP5(recommended), default, etc. + * ncdf : default(recommended), CMIP5, etc. + + *Optional settings (scripts)* + + * none + + +#. Script russell18jgr-fig3b.ncl + + *Required settings (scripts)* + + * styleset : CMIP5(recommended), default, etc. + * ncdf : default(recommended), CMIP5, etc. + + *Optional settings (scripts)* + + * none + + +#. Script russell18jgr-fig3b-2.ncl + + *Required settings (scripts)* + + * styleset : CMIP5(recommended), default, etc. + * ncdf : default(recommended), CMIP5, etc. + + *Optional settings (scripts)* + + * none + + +#. Script russell18jgr-fig4.ncl + + *Required settings (scripts)* + + * styleset : CMIP5(recommended), default, etc. + * ncdf : default(recommended), CMIP5, etc. + + *Optional settings (scripts)* + + * max_vert : 1 - 4 (user preference) + * max_hori : 1 - 4 (user preference) + * unitCorrectionalFactor: 100 (m/s to cm/s) + * new_units : "cm/s" + + +#. Script russell18jgr-fig5.ncl + + *Required settings (scripts)* + + * styleset : CMIP5(recommended), default, etc. + * ncdf : default(recommended), CMIP5, etc. + * max_lat : -45.0 + + *Optional settings (scripts)* + + * max_vert : 1 - 4 (user preference) + * max_hori : 1 - 4 (user preference) + + +#. Script russell18jgr-fig5g.ncl + + *Required settings (scripts)* + + * styleset : CMIP5(recommended), default, etc. + + *Optional settings (scripts)* + + * none + + +#. Script russell18jgr-fig6a.ncl + + *Required settings (scripts)* + + * styleset : CMIP5(recommended), default, etc. + * ncdf : default(recommended), CMIP5, etc. + + *Optional settings (scripts)* + + * none + + +#. Script russell18jgr-fig6b.ncl + + *Required settings (scripts)* + + * styleset : CMIP5(recommended), default, etc. + * ncdf : default(recommended), CMIP5, etc. + + *Optional settings (scripts)* + + * none + + +#. Script russell18jgr-fig7h.ncl + + *Required settings (scripts)* + + * styleset : CMIP5(recommended), default, etc. + * ncdf : default(recommended), CMIP5, etc. + + *Optional settings (scripts)* + + * none + + +#. Script russell18jgr-fig7i.ncl + + *Required settings (scripts)* + + * styleset : CMIP5(recommended), default, etc. + * ncdf : default(recommended), CMIP5, etc. + + *Optional settings (scripts)* + + * none + +#. Script russell18jgr-fig9a.ncl + + *Required settings (scripts)* + + * styleset : CMIP5(recommended), default, etc. + * ncdf : default(recommended), CMIP5, etc. + + *Optional settings (scripts)* + + * none + + +#. Script russell18jgr-fig9b.ncl + + *Required settings (scripts)* + + * styleset : CMIP5(recommended), default, etc. + * ncdf : default(recommended), CMIP5, etc. + + *Optional settings (scripts)* + + * none + + +#. Script russell18jgr-fig9c.ncl + + *Required settings (scripts)* + + * styleset : CMIP5(recommended), default, etc. + * ncdf : default(recommended), CMIP5, etc. + + *Optional settings (scripts)* + + * none + + + +Variables +--------- + +* tauu (atmos, monthly mean, longitude latitude time) +* tauuo, hfds, fgco2 (ocean, monthly mean, longitude latitude time) +* thetao, so, vo (ocean, monthly mean, longitude latitude lev time) +* pH (ocnBgchem, monthly mean, longitude latitude time) +* uo (ocean, monthly mean, longitude latitude lev time) +* sic (seaIce, monthly mean, longitude latitude time) + +Observations and reformat scripts +--------------------------------- + +Note: WOA data has not been tested with reciepe_russell18jgr.yml and + corresponding diagnostic scripts. + +* WOA (thetao, so - esmvaltool/cmorizers/data/formatters/datasets/woa.py) + +References +---------- + +* Russell, J.L., et al., 2018, J. Geophys. Res. – Oceans, 123, 3120-3143. https://doi.org/10.1002/2017JC013461 + +* Talley, L.D., 2003. Shallow,intermediate and deep overturning components of the global heat budget. Journal of Physical Oceanography 33, 530–560 + + +Example plots +------------- + +.. _fig_russell_1: +.. figure:: /recipes/figures/russell18jgr/Fig1_polar-contour_tauu_1986-2005.png + :align: center + :width: 50% + + Figure 1: Annual-mean zonal wind stress (tauu - N/m\ :sup:`2`\) with eastward wind stress as positive plotted as a polar contour map. + +.. _fig_russell_2: +.. figure:: /recipes/figures/russell18jgr/Fig2_1986-2005.png + :align: center + :width: 50% + + Figure 2: The zonal and annual means of the zonal wind stress (N/m\ :sup:`2`\) plotted in a line plot. + +.. _fig_russell_3a: +.. figure:: /recipes/figures/russell18jgr/Fig3_Polar-Front.png + :align: center + :width: 50% + + Figure 3a: The latitudinal position of Subantarctic Front using definitions from Orsi et al (1995). + +.. _fig_russell_3b: +.. figure:: /recipes/figures/russell18jgr/Fig3_Subantarctic-Fronts.png + :align: center + :width: 50% + + Figure 3b: The latitudinal position of Polar Front using definitions from Orsi et al (1995). + +.. _fig_russell_4: +.. figure:: /recipes/figures/russell18jgr/Fig4_Drake_passage.png + :align: center + :width: 50% + + Figure 4: Time averaged zonal velocity through Drake Passage (at 69W, in cm/s, eastward is positive). The total transport by the ACC is calculated if volcello file is available. + +.. _fig_russell_5: +.. figure:: /recipes/figures/russell18jgr/Fig5_sic-max-min.png + :align: center + :width: 50% + + Figure 5: Mean extent of sea ice for September(max) in blue and February(min) in red plotted as polar contour map. + + +.. _fig_russell_5g: +.. figure:: /recipes/figures/russell18jgr/Fig5g_sic-line.png + :align: center + :width: 50% + + Figure 5g: Annual cycle of sea ice area in southern ocean as a line plot (monthly climatology). + +.. _fig_russell_6a: +.. figure:: /recipes/figures/russell18jgr/Fig6a.png + :align: center + :width: 50% + + Figure 6a: Density layer based volume transport (in Sv) across 30S based on the layer definitions in Talley (2008). + +.. _fig_russell_6b: +.. figure:: /recipes/figures/russell18jgr/Fig6b.png + :align: center + :width: 50% + + Figure 6b: Density layer based heat transport(in PW) across 30S based on the layer definitions in Talley (2008). + + +.. _fig_russell_7: +.. figure:: /recipes/figures/russell18jgr/Fig7_fgco2_polar.png + :align: center + :width: 50% + + Figure 7: Annual mean CO\ :sub:`2`\  flux (sea to air, gC/(yr * m\ :sup:`2`\), positive (red) is out of the ocean) as a polar contour map. + +.. _fig_russell_7h: +.. figure:: /recipes/figures/russell18jgr/Fig7h_fgco2_zonal-flux.png + :align: center + :width: 50% + + Figure 7h: the time and zonal mean flux of CO\ :sub:`2`\  in gC/(yr * m\ :sup:`2`\) plotted as a line plot. + + +.. _fig_russell_7i: +.. figure:: /recipes/figures/russell18jgr/Fig7i_fgco2_integrated-flux.png + :align: center + :width: 50% + + Figure 7i is the cumulative integral of the net CO\ :sub:`2`\  flux from 90S to 30S (in PgC/yr) plotted as a line plot. + +.. _fig_russell_8: +.. figure:: /recipes/figures/russell18jgr/Fig8_polar-ph.png + :align: center + :width: 50% + + Figure 8: Annual-mean surface pH plotted as a polar contour map. + +.. _fig_russell_9a: +.. figure:: /recipes/figures/russell18jgr/Fig9a.png + :align: center + :width: 50% + + Figure 9a: Scatter plot of the width of the Southern Hemisphere westerly wind band (in degrees of latitude) against the annual-mean integrated heat uptake south of 30S (in PW—negative uptake is heat lost from the ocean) along with the best fit line. + +.. _fig_russell_9b: +.. figure:: /recipes/figures/russell18jgr/Fig9b.png + :align: center + :width: 50% + + Figure 9b: Scatter plot of the width of the Southern Hemisphere westerly wind band (in degrees of latitude) against the annual-mean integrated carbon uptake south of 30S (in Pg C/yr), along with the best fit line. + +.. _fig_russell_9c: +.. figure:: /recipes/figures/russell18jgr/Fig9c.png + :align: center + :width: 50% + + Figure 9c: Scatter plot of the net heat uptake south of 30S (in PW) against the annual-mean integrated carbon uptake south of 30S (in Pg C/yr), along with the best fit line. diff --git a/doc/sphinx/source/recipes/recipe_schlund20esd.rst b/doc/sphinx/source/recipes/recipe_schlund20esd.rst new file mode 100644 index 0000000000..d19496adb9 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_schlund20esd.rst @@ -0,0 +1,134 @@ +.. _recipes_schlund20esd: + +Emergent constraints on equilibrium climate sensitivity in CMIP5: do they hold for CMIP6? +========================================================================================= + +Overview +-------- + +This recipe reproduces the analysis of `Schlund et al., Earth Sys. Dyn. +(2020)`_. In this paper, emergent constraints on the equilibrium climate +sensitivity are evaluated on CMIP5 and CMIP6 models. Since none of the +considered emergent constraints have been developed on the CMIP6 ensemble, this +allows an out-of-sample testing of the emergent constraints. Most emergent +constraints show a reduced skill in CMIP6 when compared to CMIP5. + +.. _`Schlund et al., Earth Sys. Dyn. (2020)`: https://doi.org/10.5194/esd-11-1233-2020 + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_schlund20esd.yml + +Diagnostics are stored in diag_scripts/ + + * :ref:`climate_metrics/ecs.py` + * :ref:`climate_metrics/psi.py` + * :ref:`emergent_constraints/ecs_scatter.ncl` + * :ref:`emergent_constraints/ecs_scatter.py` + * :ref:`emergent_constraints/multiple_constraints.py` + +More details on the emergent constraint module are given in the API +documentation which is available +:ref:`here`. + + +Variables +--------- + +* *cl* (atmos, monthly, longitude, latitude, level, time) +* *clt* (atmos, monthly, longitude, latitude, time) +* *hur* (atmos, monthly, longitude, latitude, level, time) +* *hus* (atmos, monthly, longitude, latitude, level, time) +* *pr* (atmos, monthly, longitude, latitude, time) +* *rsdt* (atmos, monthly, longitude, latitude, time) +* *rsut* (atmos, monthly, longitude, latitude, time) +* *rsutcs* (atmos, monthly, longitude, latitude, time) +* *rtnt* or *rtmt* (atmos, monthly, longitude, latitude, time) +* *ta* (atmos, monthly, longitude, latitude, level, time) +* *tas* (atmos, monthly, longitude, latitude, time) +* *tasa* (atmos, monthly, longitude, latitude, time) +* *tos* (atmos, monthly, longitude, latitude, time) +* *ts* (atmos, monthly, longitude, latitude, time) +* *va* (atmos, monthly, longitude, latitude, level, time) +* *wap* (atmos, monthly, longitude, latitude, level, time) + + +Observations and reformat scripts +--------------------------------- + +* AIRS_ (*hur*, *hus*) +* CERES-EBAF_ (*rsut*, *rsutcs*, *rsdt*) +* ERA-Interim_ (*hur*, *ta*, *va*, *wap*) +* GPCP-SG_ (*pr*) +* HadCRUT4_ (*tasa*) +* HadISST_ (*ts*) +* MLS-AURA_ (*hur*) + +.. _AIRS: https://opendata.dwd.de/climate_environment/GPCC/html/fulldata-monthly_v2018_doi_download.html +.. _CERES-EBAF: https://opendata.dwd.de/climate_environment/GPCC/html/fulldata-monthly_v2018_doi_download.html +.. _ERA-Interim: http://apps.ecmwf.int/datasets/data/interim-full-moda/ +.. _GPCP-SG: https://opendata.dwd.de/climate_environment/GPCC/html/fulldata-monthly_v2018_doi_download.html +.. _HadCRUT4: https://crudata.uea.ac.uk/cru/data/temperature/ +.. _HadISST: http://www.metoffice.gov.uk/hadobs/hadisst/data/download.html +.. _MLS-AURA: https://disc.gsfc.nasa.gov/datasets/ML2RHI_004/summary + + +References +---------- + +* Schlund, M., Lauer, A., Gentine, P., Sherwood, S. C., and Eyring, V.: + Emergent constraints on equilibrium climate sensitivity in CMIP5: do they + hold for CMIP6?, Earth Syst. Dynam., 11, 1233–1258, + ``_, 2020. + + +Example plots +------------- + +.. _fig_schlund20esd_1: +.. figure:: /recipes/figures/schlund20esd/SHL_scatter.png + :align: center + :width: 50% + + Emergent relationship (solid blue and orange lines) of the `Sherwood et al. + (2014) `_ emergent constraint, which is + based on the lower tropospheric mixing index (LTMI). The numbers correspond + to individual CMIP models. The shaded area around the regression line + corresponds to the standard prediction error, which defines the error in the + regression model itself. The vertical dashed black line corresponds to the + observational reference with its uncertainty range given as standard error + (gray shaded area). The horizontal dashed lines show the best estimates of + the constrained ECS for CMIP5 (blue) and CMIP6 (orange). The colored dots + mark the CMIP5 (blue) and CMIP6 (orange) multi-model means. + +.. _fig_schlund20esd_2: +.. figure:: /recipes/figures/schlund20esd/SHL_pdf.png + :align: center + :width: 50% + + Probability densities for the constrained ECS (solid lines) and the + unconstrained model ensembles (histograms) of the emergent relationship + shown in the figure above. + +.. _fig_schlund20esd_3: +.. figure:: /recipes/figures/schlund20esd/ZHA_scatter.png + :align: center + :width: 50% + + Emergent relationship of the `Zhai et al. (2015) + `_ emergent constraint for different + subsets of CMIP5 models. Blue circles show the 15 CMIP5 models used in the + original publication (except for CESM1-CAM5); the solid blue line and blue + shaded area show the emergent relationships evaluated on these models + including the uncertainty range. In this study, 11 more CMIP5 models have + been added (red circles). The corresponding emergent relationship that + considers all available CMIP5 models is shown in red colors. This + relationship shows a considerably lower coefficient of determination + (:math:`R^2`) and higher *p*-value than the relationship using the original + subset of CMIP5 models. The vertical dashed line and shaded area correspond + to the observational reference, and the horizontal dashed lines show the + corresponding ECS constraints using this observation. diff --git a/doc/sphinx/source/recipes/recipe_schlund20jgr.rst b/doc/sphinx/source/recipes/recipe_schlund20jgr.rst new file mode 100644 index 0000000000..908af3c497 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_schlund20jgr.rst @@ -0,0 +1,168 @@ +.. _recipes_schlund20jgr: + +Constraining uncertainty in projected gross primary production (GPP) with machine learning +========================================================================================== + +.. warning:: + + Not all datasets necessary to run these recipes are available on ESGF. + The following datasets are missing: + + * Dataset: co2, Amon, CMIP5, HadGEM2-ES, esmHistorical, r1i1p1 + * Dataset: gpp, Lmon, CMIP5, MIROC-ESM, esmFixClim1, r1i1p1 + * Supplementary: sftlf, fx, CMIP5, MIROC-ESM, esmFixClim1, r0i0p0 + +Overview +-------- + +These recipes reproduce the analysis of `Schlund et al., JGR: Biogeosciences +(2020)`_. In this paper, a machine learning regression (MLR) approach (using +the MLR algorithm `Gradient Boosted Regression Trees, GBRT`_) is proposed to +constrain uncertainties in projected gross primary production (GPP) in the RCP +8.5 scenario using observations of process-based diagnostics. + +.. _`Gradient Boosted Regression Trees, GBRT`: https://scikit-learn.org/stable/modules/ensemble.html#gradient-tree-boosting +.. _`Schlund et al., JGR: Biogeosciences (2020)`: https://doi.org/10.1029/2019JG005619 + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * schlund20jgr/recipe_schlund20jgr_gpp_abs_rcp85.yml + * schlund20jgr/recipe_schlund20jgr_gpp_change_1pct.yml + * schlund20jgr/recipe_schlund20jgr_gpp_change_rcp85.yml + +Diagnostics are stored in diag_scripts/ + + * :ref:`mlr/evaluate_residuals.py` + * :ref:`mlr/main.py` + * :ref:`mlr/mmm.py` + * :ref:`mlr/plot.py` + * :ref:`mlr/postprocess.py` + * :ref:`mlr/preprocess.py` + * :ref:`mlr/rescale_with_emergent_constraint.py` + +General information (including an example and more details) on machine learning +regression (MLR) diagnostics is given +:ref:`here`. The API documentation is +available :ref:`here`. + + +Variables +--------- + +* *co2s* (atmos, monthly, longitude, latitude, time) +* *gpp* (land, monthly, longitude, latitude, time) +* *gppStderr* (land, monthly, longitude, latitude, time) +* *lai* (land, monthly, longitude, latitude, time) +* *pr* (atmos, monthly, longitude, latitude, time) +* *rsds* (atmos, monthly, longitude, latitude, time) +* *tas* (atmos, monthly, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +* CRU_ (*pr*, *tas*) +* ERA-Interim_ (*rsds*) +* LAI3g_ (*lai*) +* MTE_ (*gpp*, *gppStderr*) +* Scripps-CO2-KUM_ (*co2s*) + +.. _CRU: https://crudata.uea.ac.uk/cru/data/hrg/cru_ts_4.02/cruts.1811131722.v4.02/ +.. _ERA-Interim: http://apps.ecmwf.int/datasets/data/interim-full-moda/ +.. _LAI3g: http://cliveg.bu.edu/modismisr/lai3g-fpar3g.html +.. _MTE: http://www.bgc-jena.mpg.de/geodb/BGI/Home +.. _Scripps-CO2-KUM: https://scrippsco2.ucsd.edu/data/atmospheric_co2/kum.html + + +References +---------- + +* Schlund, M., Eyring, V., Camps‐Valls, G., Friedlingstein, P., Gentine, P., & + Reichstein, M. (2020). Constraining uncertainty in projected gross primary + production with machine learning. Journal of Geophysical Research: + Biogeosciences, 125, e2019JG005619, + ``_. + + +Example plots +------------- + +.. _fig_schlund20jgr_1: +.. figure:: /recipes/figures/schlund20jgr/map_prediction_output___GBRT_change.png + :align: center + :width: 50% + + GBRT-based prediction of the fractional GPP change over the 21st century (= + GPP(2091-2100) / GPP(1991-2000)). + +.. _fig_schlund20jgr_2: +.. figure:: /recipes/figures/schlund20jgr/map_prediction_output_error___GBRT_change.png + :align: center + :width: 50% + + Corresponding error of the GBRT-based prediction of the fractional GPP + change over the 21st century (considering errors in the MLR model and errors + in the predictors). + +.. _fig_schlund20jgr_3: +.. figure:: /recipes/figures/schlund20jgr/map_prediction_output___GBRT_abs.png + :align: center + :width: 50% + + GBRT-based prediction of the absolute GPP at the end of the 21st century + (2091-2100). + +.. _fig_schlund20jgr_4: +.. figure:: /recipes/figures/schlund20jgr/map_prediction_output_error___GBRT_abs.png + :align: center + :width: 50% + + Corresponding error of the GBRT-based prediction of the absolute GPP at the + end of the 21st century (considering errors in the MLR model and errors in + the predictors). + +.. _fig_schlund20jgr_5: +.. figure:: /recipes/figures/schlund20jgr/rmse_plot.png + :align: center + :width: 50% + + Boxplot of the root mean square error of prediction (RMSEP) distributions + for six different statistical models used to predict future absolute GPP + (2091-2100) using a leave-one-model-out cross-validation approach. The + distribution for each statistical model contains seven points (black dots, + one for each climate model used as truth) and is represented in the + following way: the lower and upper limit of the blue boxes correspond to the + 25% and 75% quantiles, respectively. The central line in the box shows the + median, the black "x" the mean of the distribution. The whiskers outside the + box represent the range of the distribution + +.. _fig_schlund20jgr_6: +.. figure:: /recipes/figures/schlund20jgr/feature_importance.png + :align: center + :width: 50% + + Global feature importance of the GBRT model for prediction of the absolute + GPP at the end of the 21st century (2091-2100). + +.. _fig_schlund20jgr_7: +.. figure:: /recipes/figures/schlund20jgr/residuals_distribution.png + :align: center + :width: 50% + + Distribution of the residuals of the GBRT model for the prediction of + absolute GPP at the end of the 21st century (2091-2100) for the training + data (blue) and test data excluded from training (green). + +.. _fig_schlund20jgr_8: +.. figure:: /recipes/figures/schlund20jgr/training_progress.png + :align: center + :width: 50% + + Training progress of the GBRT model for the prediction of absolute GPP at + the end of the 21st century (2091-2100) evaluated as normalized root mean + square error on the training data (blue) and test data excluded from + training (green). diff --git a/doc/sphinx/source/recipes/recipe_sea_surface_salinity.rst b/doc/sphinx/source/recipes/recipe_sea_surface_salinity.rst new file mode 100644 index 0000000000..688502bf03 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_sea_surface_salinity.rst @@ -0,0 +1,220 @@ +.. _recipes_sea_surface_salinity: + +Sea Surface Salinity Evaluation +=============================== + +Overview +-------- + +This recipe compares the regional means of sea surface salinity with a +reference dataset (ESACCI-SEA-SURFACE-SALINITY v1 or v2 by default). +To do this, the recipe generates plots for the timeseries of each region and +a radar plot showing (i) the mean state bias, and (ii) the ratio between the +simulated and observed standard deviations of different regional averages of +sea surface salinity, calculated in the temporal window for which observations +and simulations overlap. + + +Preprocessor requirements: +-------------------------- + +The recipe is created in a way that should make possible (although is not +tested) to use it for other variables and datasets, even for more than one at +a time. The diagnostic only expects variables with dimensions `time` and `depth_id` +and it does not assume any other constraint. + +It is therefore mandatory to keep the `extract_shape` preprocessor for more than +one region and any form of region operation (`mean`, `max`, `min` ...) to collapse +the `latitude` and `longitude` coordinates. In case you want to try with variables +that have extra dimensions (i.e. `depth`) you must add an extra preprocessor +call to collapse them (i.e. `depth_integration`) + +The recipe can be used with any shapefile. As it is, it uses the IHO Sea Areas +(version 3) downloaded from https://marineregions.org/downloads.php, but any +shapefile containing marine regions can be used. + +Any number of regions can be choosed also, even though plots may look odd if +too few or too many are selected. + +Regions available on IHO Sea Areas file: +---------------------------------------- + +- Adriatic Sea +- Aegean Sea +- Alboran Sea +- Andaman or Burma Sea +- Arabian Sea +- Arafura Sea +- Arctic Ocean +- Baffin Bay +- Balearic (Iberian Sea) +- Bali Sea +- Baltic Sea +- Banda Sea +- Barentsz Sea +- Bass Strait +- Bay of Bengal +- Bay of Biscay +- Bay of Fundy +- Beaufort Sea +- Bering Sea +- Bismarck Sea +- Black Sea +- Bristol Channel +- Caribbean Sea +- Celebes Sea +- Celtic Sea +- Ceram Sea +- Chukchi Sea +- Coral Sea +- Davis Strait +- East Siberian Sea +- Eastern China Sea +- English Channel +- Flores Sea +- Great Australian Bight +- Greenland Sea +- Gulf of Aden +- Gulf of Alaska +- Gulf of Aqaba +- Gulf of Boni +- Gulf of Bothnia +- Gulf of California +- Gulf of Finland +- Gulf of Guinea +- Gulf of Mexico +- Gulf of Oman +- Gulf of Riga +- Gulf of St. Lawrence +- Gulf of Suez +- Gulf of Thailand +- Gulf of Tomini +- Halmahera Sea +- Hudson Bay +- Hudson Strait +- Indian Ocean +- Inner Seas off the West Coast of Scotland +- Ionian Sea +- Irish Sea and St. George's Channel +- Japan Sea +- Java Sea +- Kara Sea +- Kattegat +- Labrador Sea +- Laccadive Sea +- Laptev Sea +- Ligurian Sea +- Lincoln Sea +- Makassar Strait +- Malacca Strait +- Mediterranean Sea - Eastern Basin +- Mediterranean Sea - Western Basin +- Molukka Sea +- Mozambique Channel +- North Atlantic Ocean +- North Pacific Ocean +- North Sea +- Norwegian Sea +- Persian Gulf +- Philippine Sea +- Red Sea +- Rio de La Plata +- Savu Sea +- Sea of Azov +- Sea of Marmara +- Sea of Okhotsk +- Seto Naikai or Inland Sea +- Singapore Strait +- Skagerrak +- Solomon Sea +- South Atlantic Ocean +- South China Sea +- South Pacific Ocean +- Southern Ocean +- Strait of Gibraltar +- Sulu Sea +- Tasman Sea +- The Coastal Waters of Southeast Alaska and British Columbia +- The Northwestern Passages +- Timor Sea +- Tyrrhenian Sea +- White Sea +- Yellow Sea + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_sea_surface_salinity.yml + +Diagnostics are stored in diag_scripts/sea_surface_salinity/ + + * compare_salinity.py: plot timeseries for each region and generate radar + plot. + + +User settings in recipe +----------------------- + +#. compare_salinity.py + + *Required settings for script* + + none + + *Optional settings for script* + + none + + *Required settings for variables* + + * ref_model: name of reference data set + + *Optional settings for variables* + + none + + +Variables +--------- + +* sos (ocean, monthly, time latitude longitude) + + +Observations and reformat scripts +--------------------------------- + +* ESACCI-SEA-SURFACE-SALINITY (sos) + + +References +---------- + +* Diagnostic: please contact authors + +* ESACCI-SEA-SURFACE-SALINITY dataset: Boutin, J., J.-L. Vergely, J. Koehler, + F. Rouffi, N. Reul: ESA Sea Surface Salinity Climate Change Initiative + (Sea_Surface_Salinity_cci): Version 1.8 data collection. Centre for + Environmental Data Analysis, 25 November 2019. doi: + 10.5285/9ef0ebf847564c2eabe62cac4899ec41. + http://dx.doi.org/10.5285/9ef0ebf847564c2eabe62cac4899ec41 + + +Example plots +------------- + +.. figure:: /recipes/figures/sea_surface_salinity/radar_bias.png + :align: center + + Radar plot showing the mean state biases (simulation minus observations) + for the regional averages of sea surface salinity in the selected + ocean basins and seas. + +.. figure:: /recipes/figures/sea_surface_salinity/radar_std.png + :align: center + + Radar plot showing the ratio between the simulated and observed standard deviations + of the regional averages of sea surface salinity in the selected + ocean basins and seas. diff --git a/doc/sphinx/source/recipes/recipe_seaborn.rst b/doc/sphinx/source/recipes/recipe_seaborn.rst new file mode 100644 index 0000000000..4eb3c6571c --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_seaborn.rst @@ -0,0 +1,66 @@ +.. _recipes_seaborn_diag: + +Seaborn Diagnostics +=================== + +Overview +-------- + +These recipes showcase the use of the Seaborn diagnostic that provides a +high-level interface to `Seaborn `__ for ESMValTool +recipes. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_seaborn.yml + +Diagnostics are stored in diag_scripts/ + +* :ref:`seaborn_diag.py ` + + +Variables +--------- + +Arbitrary variables are supported. + + +Observations and reformat scripts +--------------------------------- + +Arbitrary datasets are supported. + + +References +---------- + +* Waskom, M. L. (2021), seaborn: statistical data visualization, Journal of + Open Source Software, 6(60), 3021, doi:10.21105/joss.03021. + + +Example plots +------------- + +.. _fig_seaborn_1: +.. figure:: /recipes/figures/seaborn/ta_vs_lat.jpg + :align: center + :width: 50% + + Monthly and zonal mean temperatures vs. latitude in the period 1991-2014 for + two Earth system models (CESM2-WACCM and GFDL-ESM4). + Colors visualize the corresponding pressure levels. + +.. _fig_seaborn_2: +.. figure:: /recipes/figures/seaborn/regional_pr_hists.jpg + :align: center + :width: 50% + + Spatiotemporal distribution of daily precipitation in the period 2005-2014 + for six IPCC AR6 regions simulated by two Earth system models (CESM2-WACCM + and GFDL-ESM4). + Each day in each grid cell in the corresponding regions is considered with + equal weight. diff --git a/doc/sphinx/source/recipes/recipe_seaice.rst b/doc/sphinx/source/recipes/recipe_seaice.rst new file mode 100644 index 0000000000..4d2348d498 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_seaice.rst @@ -0,0 +1,155 @@ +.. _nml_seaice: + +Sea Ice +======= + +Overview +-------- +The sea ice diagnostics include: + +(1) time series of Arctic and Antarctic sea ice area and extent + (calculated as the total area (km\ :sup:`2`\) of grid cells with sea ice concentrations + (sic) of at least 15%). +(2) ice extent trend distributions for the Arctic in September and the Antarctic in February. +(3) calculation of year of near disappearance of Arctic sea ice +(4) scatter plots of (a) historical trend in September Arctic sea ice extent (SSIE) vs + historical long-term mean SSIE; (b) historical SSIE mean vs 1st year of disappearance + (YOD) RCP8.5; (c) historical SSIE trend vs YOD RCP8.5. + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_seaice.yml + +Diagnostics are stored in diag_scripts/seaice/ + +* seaice_aux.ncl: contains functions for calculating sea ice area or extent from sea ice + concentration and first year of disappearance +* seaice_ecs.ncl: scatter plots of mean/trend of historical September Arctic sea ice extent + vs 1st year of disappearance (RCP8.5) (similar to IPCC AR5 Chapter 12, Fig. 12.31a) +* seaice_trends.ncl: calculates ice extent trend distributions + (similar to IPCC AR5 Chapter 9, Fig. 9.24c/d) +* seaice_tsline.ncl: creates a time series line plots of total sea ice area and extent (accumulated) + for northern and southern hemispheres with optional multi-model mean and standard deviation. One + value is used per model per year, either annual mean or the mean value of a selected month + (similar to IPCC AR5 Chapter 9, Fig. 9.24a/b) +* seaice_yod.ncl: calculation of year of near disappearance of Arctic sea ice + +User settings in recipe +----------------------- +#. Script seaice_ecs.ncl + + *Required settings (scripts)* + + * hist_exp: name of historical experiment (string) + * month: selected month (1, 2, ..., 12) or annual mean ("A") + * rcp_exp: name of RCP experiment (string) + * region: region to be analyzed ( "Arctic" or "Antarctic") + + *Optional settings (scripts)* + + * fill_pole_hole: fill observational hole at North pole (default: False) + * styleset: color style (e.g. "CMIP5") + + *Optional settings (variables)* + + * reference_dataset: reference dataset + +#. Script seaice_trends.ncl + + *Required settings (scripts)* + + * month: selected month (1, 2, ..., 12) or annual mean ("A") + * region: region to be analyzed ( "Arctic" or "Antarctic") + + *Optional settings (scripts)* + + * fill_pole_hole: fill observational hole at North pole, Default: False + + *Optional settings (variables)* + + * ref_model: array of references plotted as vertical lines + +#. Script seaice_tsline.ncl + + *Required settings (scripts)* + + * region: Arctic, Antarctic + * month: annual mean (A), or month number (3 = March, for Antarctic; 9 = September for Arctic) + + *Optional settings (scripts)* + + * styleset: for plot_type cycle only (cmip5, cmip6, default) + * multi_model_mean: plot multi-model mean and standard deviation (default: False) + * EMs_in_lg: create a legend label for individual ensemble members (default: False) + * fill_pole_hole: fill polar hole (typically in satellite data) with sic = 1 (default: False) + +#. Script seaice_yod.ncl + + *Required settings (scripts)* + + * month: selected month (1, 2, ..., 12) or annual mean ("A") + * region: region to be analyzed ( "Arctic" or "Antarctic") + + *Optional settings (scripts)* + + * fill_pole_hole: fill observational hole at North pole, Default: False + * wgt_file: netCDF containing pre-determined model weights + + *Optional settings (variables)* + + * ref_model: array of references plotted as vertical lines + +Variables +--------- + +* sic (ocean-ice, monthly mean, longitude latitude time) +* areacello (fx, longitude latitude) + +Observations and reformat scripts +--------------------------------- + +*Note: (1) obs4MIPs data can be used directly without any preprocessing; (2) use `esmvaltool data info DATASET` or see headers of cmorization scripts (in esmvaltool/cmorizers/data/formatters/datasets/) for non-obs4MIPs data for download instructions.* + +* HadISST (sic - esmvaltool/cmorizers/data/formatters/datasets/hadisst.ncl) + +References +---------- + +* Massonnet, F. et al., The Cryosphere, 6, 1383-1394, doi: 10.5194/tc-6-1383-2012, 2012. +* Stroeve, J. et al., Geophys. Res. Lett., 34, L09501, doi:10.1029/2007GL029703, 2007. + +Example plots +------------- + +.. figure:: /recipes/figures/seaice/trend_sic_extend_Arctic_September_histogram.png + :align: center + :width: 9cm + + Sea ice extent trend distribution for the Arctic in September + (similar to IPCC AR5 Chapter 9, Fig. 9.24c). [seaice_trends.ncl] + +.. figure:: /recipes/figures/seaice/extent_sic_Arctic_September_1960-2005.png + :align: center + :width: 12cm + + Time series of total sea ice area and extent (accumulated) for the Arctic in September + including multi-model mean and standard deviation (similar to IPCC AR5 Chapter 9, Fig. 9.24a). + [seaice_tsline.ncl] + +.. figure:: /recipes/figures/seaice/timeseries_rcp85.png + :align: center + :width: 12cm + + Time series of September Arctic sea ice extent for individual CMIP5 models, + multi-model mean and multi-model standard deviation, year of disappearance + (similar to IPCC AR5 Chapter 12, Fig. 12.31e). [seaice_yod.ncl] + +.. figure:: /recipes/figures/seaice/SSIE-MEAN_vs_YOD_sic_extend_Arctic_September_1960-2100.png + :align: center + :width: 9cm + + Scatter plot of mean historical September Arctic sea ice extent vs 1st year of disappearance + (RCP8.5) (similar to IPCC AR5 Chapter 12, Fig. 12.31a). [seaice_ecs.ncl] diff --git a/doc/sphinx/source/recipes/recipe_seaice_drift.rst b/doc/sphinx/source/recipes/recipe_seaice_drift.rst new file mode 100644 index 0000000000..a3207bce0b --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_seaice_drift.rst @@ -0,0 +1,81 @@ +.. _recipes_seaice_drift: + +Seaice drift +============ + +Overview +-------- +This recipe allows to quantify the relationships between Arctic sea-ice drift +speed, concentration and thickness (Docquier et al., 2017). A decrease in +concentration or thickness, as observed in recent decades in the Arctic Ocean +(Kwok, 2018; Stroeve and Notz, 2018), leads to reduced sea-ice strength and +internal stress, and thus larger sea-ice drift speed (Rampal et al., 2011). +This in turn could provide higher export of sea ice out of the Arctic Basin, +resulting in lower sea-ice concentration and further thinning. Olason and +Notz (2014) investigate the relationships between Arctic sea-ice drift speed, +concentration and thickness using satellite and buoy observations. +They show that both seasonal and recent long-term changes in sea ice drift are +primarily correlated to changes in sea ice concentration and thickness. +This recipe allows to quantify these relationships in climate models. + +In this recipe, four process-based metrics are computed based on the multi-year +monthly mean sea-ice drift speed, concentration and thickness, averaged over +the Central Arctic. + +The first metric is the ratio between the modelled drift-concentration slope +and the observed drift-concentration slope. The second metric is similar to the +first one, except that sea-ice thickness is involved instead of sea-ice +concentration. The third metric is the normalised distance between the model +and observations in the drift-concentration space. The fourth metric is similar +to the third one, except that sea-ice thickness is involved instead of sea-ice +concentration. + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_seaice_drift.yml + + +Diagnostics are stored in diag_scripts/seaice_drift/ + + * seaice_drift.py: Compute metrics and plot results + + +User settings in recipe +----------------------- + +#. Script diag_shapeselect.py + + *Required settings (scripts)* + + One of the following two combinations is required: + + 1. Latitude threshold: + + * latitude_threshold: metric will be computed north of this latitude value + + 2. Polygon: + + * polygon: metric will be computed inside the give polygon. Polygon is defined as a list of (lon, lat) tuple + + * polygon_name: name of the region defined by the polygon + + +Variables +--------- + +* sispeed, sithick, siconc (daily) + +Example plots +------------- + +.. _fig_seaice_drift: +.. figure:: /recipes/figures/seaice_drift/drift-strength.png + :align: center + +Scatter plots of modelled (red) and observed (blue) monthly mean +sea-ice drift speed against sea-ice concentration (left panel) and sea-ice +thickness (right panel) temporally averaged over the period 1979–2005 and +spatially averaged over the SCICEX box. diff --git a/doc/sphinx/source/recipes/recipe_seaice_feedback.rst b/doc/sphinx/source/recipes/recipe_seaice_feedback.rst new file mode 100644 index 0000000000..8f5da55be8 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_seaice_feedback.rst @@ -0,0 +1,76 @@ +.. _recipes_seaice_feedback: + +Seaice feedback +=============== + + +Overview +-------- + +In this recipe, one process-based diagnostic named the +Ice Formation Efficiency (IFE) is computed based on monthly mean +sea-ice volume estimated north of 80°N. The choice of this domain +is motivated by the desire to minimize the influence of dynamic +processes but also by the availability of sea-ice thickness measurements. +The diagnostic intends to evaluate the strength of the negative sea-ice +thickness/growth feedback, which causes late-summer negative anomalies +in sea-ice area and volume to be partially recovered during the next +growing season. A chief cause behind the existence of this feedback is +the non-linear inverse dependence between heat conduction fluxes and +sea-ice thickness, which implies that thin sea ice grows faster than thick +sea ice. To estimate the strength of that feedback, anomalies of the annual +minimum of sea-ice volume north of 80°N are first estimated. Then, +the increase in sea-ice volume until the next annual maximum is computed +for each year. The IFE is defined as the regression of this ice volume +production onto the baseline summer volume anomaly. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_seaice_feedback.yml + +Diagnostics are stored in diag_scripts/seaice_feedback/ + + * negative_seaice_feedback.py: scatterplot showing the feedback between + seaice volume and seaice growth + + +User settings +------------- + +script negative_seaice_feedback.py + + *Optional settings for script* + + * plot: dictionary containing plot options: + + - point_color: color of the plot points. (Default: black) + - point_size: size of the plot points. (Default: 10) + - show_values: show numerical values of feedback in plot. (Default: True) + +Variables +--------- + +* sit (seaice, monthly mean, time latitude longitude) + + + +References +---------- + +* Massonnet, F., Vancoppenolle, M., Goosse, H., Docquier, D., Fichefet, T. and Blanchard-Wrigglesworth, E., 2018. + Arctic sea-ice change tied to its mean state through thermodynamic processes. Nature Climate Change, 8: 599-603. + +Example plots +------------- + +.. _fig_negative_feedback_1: +.. figure:: /recipes/figures/seaice_feedback/negative_feedback.png + :align: center + :width: 14cm + + Seaice negative feedback values (CMIP5 historical experiment 1979-2004). + diff --git a/doc/sphinx/source/recipes/recipe_shapeselect.rst b/doc/sphinx/source/recipes/recipe_shapeselect.rst index 61b869eb47..12da974c28 100644 --- a/doc/sphinx/source/recipes/recipe_shapeselect.rst +++ b/doc/sphinx/source/recipes/recipe_shapeselect.rst @@ -1,3 +1,5 @@ +.. _recipes_shapeselect: + Shapeselect =========== @@ -27,7 +29,7 @@ User settings in recipe *Required settings (scripts)* - * shapefile: path to the user provided shapefile. A relative path is relative to the auxiliary_data_dir as configured in config-user.yml. + * shapefile: path to the user provided shapefile. A relative path is relative to the :ref:`configuration option ` ``auxiliary_data_dir``. * weighting_method: the preferred weighting method 'mean_inside' - mean of all grid points inside polygon; 'representative' - one point inside or close to the polygon is used to represent the complete area. @@ -35,3 +37,17 @@ User settings in recipe * write_netcdf: true or false to write output as NetCDF or not. +Variables +--------- + +* pr,tas (daily) + +Example plots +------------- + +.. _fig_shapeselect: +.. figure:: /recipes/figures/shapeselect/shapeselect.png + :align: center + :width: 14cm + + Example of the selection of model grid points falling within (blue pluses) and without (red dots) a provided shapefile (blue contour). diff --git a/doc/sphinx/source/recipes/recipe_smpi.rst b/doc/sphinx/source/recipes/recipe_smpi.rst new file mode 100644 index 0000000000..a5d004c1a1 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_smpi.rst @@ -0,0 +1,92 @@ +.. _recipes_smpi: + +Single Model Performance Index (SMPI) +===================================== + +Overview +-------- + +This diagnostic calculates the Single Model Performance Index (SMPI) following Reichler and Kim (2008). The SMPI (called "I\ :sup:`2`") is based on the comparison of several different climate variables (atmospheric, surface and oceanic) between climate model simulations and observations or reanalyses, and it focuses on the validation of the time-mean state of climate. For I\ :sup:`2` to be determined, the differences between the climatological mean of each model variable and observations at each of the available data grid points are calculated, and scaled to the interannual variance from the validating observations. This interannual variability is determined by performing a bootstrapping method (random selection with replacement) for the creation of a large synthetic ensemble of observational climatologies. The results are then scaled to the average error from a reference ensemble of models, and in a final step the mean over all climate variables and one model is calculated. The plot shows the I\ :sup:`2` values for each model (orange circles) and the multi-model mean (black circle), with the diameter of each circle representing the range of I\ :sup:`2` values encompassed by the 5th and 95th percentiles of the bootstrap ensemble. The I\ :sup:`2` values vary around one, with values greater than one for underperforming models, and values less than one for more accurate models. + +Note: The SMPI diagnostic needs all indicated variables from all added models for exactly the same time period to be calculated correctly. If one model does not provide a specific variable, either that model cannot be added to the SMPI calculations, or the missing variable has to be removed from the diagnostics all together. + +Available recipes and diagnostics +----------------------------------- + +Recipes are stored in recipes/ + +* recipe_smpi.yml +* recipe_smpi_4cds.yml + +Diagnostics are stored in diag_scripts/perfmetrics/ + +* main.ncl: calculates and (optionally) plots annual/seasonal cycles, zonal means, lat-lon fields and time-lat-lon fields. The calculated fields can also be plotted as difference w.r.t. a given reference dataset. main.ncl also calculates RMSD, bias and taylor metrics. Input data have to be regridded to a common grid in the preprocessor. Each plot type is created by a separated routine, as detailed below. +* cycle_zonal.ncl: calculates single model performance index (Reichler and Kim, 2008). It requires fields precalculated by main.ncl. +* collect.ncl: collects the metrics previously calculated by cycle_latlon.ncl and passes them to the plotting functions. + +User settings +------------- + +#. perfmetrics/main.ncl + + *Required settings for script* + + * plot_type: only "cycle_latlon (time, lat, lon)" and "cycle_zonal (time, plev, lat)" available for SMPI; usage is defined in the recipe and is dependent on the used variable (2D variable: cycle_latlon, 3D variable: cycle_zonal) + * time_avg: type of time average (only "yearly" allowed for SMPI, any other settings are not supported for this diagnostic) + * region: selected region (only "global" allowed for SMPI, any other settings are not supported for this diagnostic) + * normalization: metric normalization ("CMIP5" for analysis of CMIP5 simulations; to be adjusted accordingly for a different CMIP phase) + * calc_grading: calculates grading metrics (has to be set to "true" in the recipe) + * metric: chosen grading metric(s) (if calc_grading is True; has to be set to "SMPI") + * smpi_n_bootstrap: number of bootstrapping members used to determine uncertainties on model-reference differences (typical number of bootstrapping members: 100) + + *Required settings for variables* + + * reference_dataset: reference dataset to compare with (usually the observations). + +These settings are passed to the other scripts by main.ncl, depending on the selected plot_type. + +#. collect.ncl + + *Required settings for script* + + * metric: selected metric (has to be "SMPI") + + +Variables +--------- + +* hfds (ocean, monthly mean, longitude latitude time) +* hus (atmos, monthly mean, longitude latitude lev time) +* pr (atmos, monthly mean, longitude latitude time) +* psl (atmos, monthly mean, longitude latitude time) +* sic (ocean-ice, monthly mean, longitude latitude time) +* ta (atmos, monthly mean, longitude latitude lev time) +* tas (atmos, monthly mean, longitude latitude time) +* tauu (atmos, monthly mean, longitude latitude time) +* tauv (atmos, monthly mean, longitude latitude time) +* tos (ocean, monthly mean, longitude latitude time) +* ua (atmos, monthly mean, longitude latitude lev time) +* va (atmos, monthly mean, longitude latitude lev time) + + +Observations and reformat scripts +--------------------------------- + +The following list shows the currently used observational data sets for this recipe with their variable names and the reference to their respective reformat scripts in parentheses. Please note that obs4MIPs data can be used directly without any reformatting. For non-obs4MIPs data use `esmvaltool data info DATASET` or see headers of cmorization scripts for downloading and processing instructions. + +* ERA-Interim (hfds, hus, psl, ta, tas, tauu, tauv, ua, va - esmvaltool/data/formatters/datasets/era-interim.py) +* HadISST (sic, tos - esmvaltool/data/formatters/datasets/hadisst.ncl) +* GPCP-V2.2 (pr - obs4MIPs) + +References +---------- + +* Reichler, T. and J. Kim, How well do coupled models simulate today's climate? Bull. Amer. Meteor. Soc., 89, 303-311, doi: 10.1175/BAMS-89-3-303, 2008. + +Example plots +------------- + +.. figure:: /recipes/figures/smpi/reichlerkim08bams_smpi.png + :width: 70 % + + Performance index I\ :sup:`2` for individual models (circles). Circle sizes indicate the length of the 95% confidence intervals. The black circle indicates the I\ :sup:`2` of the multi-model mean (similar to Reichler and Kim (2008), Figure 1). diff --git a/doc/sphinx/source/recipes/recipe_snowalbedo.rst b/doc/sphinx/source/recipes/recipe_snowalbedo.rst new file mode 100644 index 0000000000..5f3e5ae158 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_snowalbedo.rst @@ -0,0 +1,132 @@ +.. _recipes_snowalbedo: + +Emergent constraint on snow-albedo effect +========================================= + +Overview +-------- + +The recipe recipe_snowalbedo.yml computes the springtime snow-albedo +feedback values in climate change versus springtime values in the seasonal +cycle in transient climate change experiments following Hall and Qu (2006). +The strength of the snow-albedo effect is quantified by the variation in net +incoming shortwave radiation (Q) with surface air temperature (T\ :sub:`s`\) due +to changes in surface albedo :math:`\alpha_s`: + +.. math:: + + \left( \frac{\partial Q}{\partial T_s} \right) = -I_t \cdot \frac{\partial \alpha_p}{\partial \alpha_s} \cdot \frac{\Delta \alpha_s}{\Delta T_s} + +The diagnostic produces scatterplots of simulated springtime +:math:`\Delta \alpha_s`/:math:`\Delta T_s` values in climate change (ordinate) +vs. simulated springtime :math:`\Delta \alpha_s`/:math:`\Delta T_s` values in the +seasonal cycle (abscissa). + +Ordinate values: the change in April :math:`\alpha_s` (future projection - historical) +averaged over NH land masses poleward of 30°N is divided by the change in +April T\ :sub:`s` (future projection - historical) averaged over the same region. +The change in :math:`\alpha_s` (or T\ :sub:`s`) is defined as the difference between +22nd-century-mean :math:`\alpha_s`: (T\ :sub:`s`) and 20th-century-mean :math:`\alpha_s`. Values of +:math:`\alpha_s` are weighted by April incoming insolation (I\ :sub:`t`) prior to averaging. + +Abscissa values: the seasonal cycle :math:`\Delta \alpha_s`/:math:`\Delta T_s` +values, based on 20th century climatological means, are calculated by +dividing the difference between April and May :math:`\alpha_s`: averaged over NH continents +poleward of 30°N by the difference between April and May T\ :sub:`s` averaged over the +same area. Values of :math:`\alpha_s`: are weighted by April incoming insolation prior to +averaging. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_snowalbedo.yml + +Diagnostics are stored in diag_scripts/emergent_constraints/ + + * snowalbedo.ncl: springtime snow-albedo feedback values vs. seasonal cycle + + +User settings in recipe +----------------------- + +#. Script snowalbedo.ncl + + *Required settings for script* + + * exp_presentday: name of present-day experiment (e.g. "historical") + * exp_future: name of climate change experiment (e.g. "rcp45") + + *Optional settings for script* + + * diagminmax: observational uncertainty (min and max) + * legend_outside: create extra file with legend (true, false) + * styleset: e.g. "CMIP5" (if not set, this diagnostic will create its own + color table and symbols for plotting) + * suffix: string to be added to output filenames + * xmax: upper limit of x-axis (default = automatic) + * xmin: lower limit of x-axis (default = automatic) + * ymax: upper limit of y-axis (default = automatic) + * ymin: lower limit of y-axis (default = automatic) + + *Required settings for variables* + + * ref_model: name of reference data set + + *Optional settings for variables* + + none + + *Required settings (scripts)* + + none + + *Optional settings (scripts)* + + +Variables +--------- + +* tas (atmos, monthly mean, longitude latitude time) +* rsdt (atmos, monthly mean, longitude latitude time) +* rsuscs, rsdscs (atmos, monthly mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +* ERA-Interim (tas - esmvaltool/cmorizers/data/formatters/datasets/era_interim.py) +* ISCCP-FH (rsuscs, rsdscs, rsdt - esmvaltool/cmorizers/data/formatters/datasets/isccp_fh.ncl) + + +References +---------- + +* Flato, G., J. Marotzke, B. Abiodun, P. Braconnot, S.C. Chou, W. Collins, P. + Cox, F. Driouech, S. Emori, V. Eyring, C. Forest, P. Gleckler, E. Guilyardi, + C. Jakob, V. Kattsov, C. Reason and M. Rummukainen, 2013: Evaluation of + Climate Models. In: Climate Change 2013: The Physical Science Basis. + Contribution of Working Group I to the Fifth Assessment Report of the + Intergovernmental Panel on Climate Change [Stocker, T.F., D. Qin, G.-K. + Plattner, M. Tignor, S.K. Allen, J. Boschung, A. Nauels, Y. Xia, V. Bex and + P.M. Midgley (eds.)]. Cambridge University Press, Cambridge, United Kingdom + and New York, NY, USA. + +* Hall, A., and X. Qu, 2006: Using the current seasonal cycle to constrain + snow albedo feedback in future climate change, Geophys. Res. Lett., 33, + L03502, doi:10.1029/2005GL025127. + + +Example plots +------------- + +.. figure:: /recipes/figures/ipccwg1ar5ch9/fig-9-45a.png + :align: center + + Scatterplot of springtime snow-albedo effect values in climate + change vs. springtime :math:`\Delta \alpha_s`/:math:`\Delta T_s` values in + the seasonal cycle in transient climate change experiments (CMIP5 historical + experiments: 1901-2000, RCP4.5 experiments: 2101-2200). Similar to IPCC AR5 + Chapter 9 (Flato et al., 2013), Figure 9.45a. diff --git a/doc/sphinx/source/recipes/recipe_spei.rst b/doc/sphinx/source/recipes/recipe_spei.rst index ff71c4a594..ff8a2b8e59 100644 --- a/doc/sphinx/source/recipes/recipe_spei.rst +++ b/doc/sphinx/source/recipes/recipe_spei.rst @@ -1,9 +1,11 @@ -SPEI -==== +.. _recipes_spei: + +Standardized Precipitation-Evapotranspiration Index (SPEI) +========================================================== Overview -------- -Droughts can be separated into three main types: meteorological, hydrological, and agricultural drought. +Droughts can be separated into three main types: meteorological, hydrological, and agricultural drought. Common for all types is that a drought needs to be put in context of local and seasonal characteristics, i.e. a drought should not be defined with an absolute threshold, but as an anomalous condition. @@ -22,9 +24,9 @@ Recipes are stored in recipes/ Diagnostics are stored in diag_scripts/droughtindex/ - * diag_spi.r: calculate the SPI index + * diag_spi.R: calculate the SPI index - * diag_spei.r: calculate the SPEI index + * diag_spei.R: calculate the SPEI index User settings diff --git a/doc/sphinx/source/recipes/recipe_tcr.rst b/doc/sphinx/source/recipes/recipe_tcr.rst new file mode 100644 index 0000000000..58ea9e639b --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_tcr.rst @@ -0,0 +1,99 @@ +.. _recipes_tcr: + +Transient Climate Response +========================== + +Overview +-------- + + +The transient climate response (TCR) is defined as the global and annual mean +surface air temperature anomaly in the *1pctCO2* scenario (1% CO\ :sub:`2` +increase per year) for a 20 year period centered at the time of CO\ :sub:`2` +doubling, i.e. using the years 61 to 80 after the start of the simulation. We +calculate the temperature anomaly by subtracting a linear fit of the +*piControl* run for all 140 years of the *1pctCO2* experiment prior to the TCR +calculation (see `Gregory and Forster, 2008`_). + +.. _`Gregory and Forster, 2008`: https://agupubs.onlinelibrary.wiley.com/doi/10.1029/2008JD010405 + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_tcr.yml + + +Diagnostics are stored in diag_scripts/ + + * climate_metrics/tcr.py + * climate_metrics/create_barplot.py + * climate_metrics/create_scatterplot.py + + +User settings in recipe +----------------------- + +* Preprocessor + + * ``area_statistics`` (*operation: mean*): Calculate global mean. + +.. _tcr.py: + +* Script climate_metrics/tcr.py + + * ``calculate_mmm``, *bool*, optional (default: ``True``): Calculate + multi-model mean TCR. + * ``plot``, *bool*, optional (default: ``True``): Plot temperature vs. time. + * ``read_external_file``, *str*, optional: Read TCR from external file. The + path can be given relative to this diagnostic script or as absolute path. + * ``savefig_kwargs``, *dict*, optional: Keyword arguments for + :func:`matplotlib.pyplot.savefig`. + * ``seaborn_settings``, *dict*, optional: Options for + :func:`seaborn.set_theme` (affects all plots). + +* Script climate_metrics/create_barplot.py + + See :ref:`here`. + +* Script climate_metrics/create_scatterplot.py + + See :ref:`here`. + + +Variables +--------- + +* *tas* (atmos, monthly, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +*None* + + +References +---------- + +* Gregory, J. M., and P. M. Forster. "Transient climate response estimated from + radiative forcing and observed temperature change." Journal of Geophysical + Research: Atmospheres 113.D23 (2008). + + +Example plots +------------- + +.. _fig_tcr_1: +.. figure:: /recipes/figures/tcr/CanESM2.png + :align: center + :width: 50% + + Time series of the global mean surface air temperature anomaly (relative to + the linear fit of the pre-industrial control run) of CanESM2 (CMIP5) for the + 1% CO\ :sub:`2` increase per year experiment. The horizontal dashed line + indicates the transient climate response (TCR) defined as the 20 year + average temperature anomaly centered at the time of CO\ :sub:`2` doubling + (vertical dashed lines). diff --git a/doc/sphinx/source/recipes/recipe_tebaldi21esd.rst b/doc/sphinx/source/recipes/recipe_tebaldi21esd.rst new file mode 100644 index 0000000000..8c85b03a89 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_tebaldi21esd.rst @@ -0,0 +1,570 @@ +.. _recipe_tebaldi21esd: + +Climate model projections from the ScenarioMIP of CMIP6 +======================================================= + + +Overview +-------- + +This recipe is implemented into ESMValTool to evaluate the temperature and precipitation changes from the ScenarioMIP of CMIP6. It produces the original plots and tables of Tebaldi et al. (2021), https://doi.org/10.5194/esd-12-253-2021 + +Available recipe and diagnostics +--------------------------------- + +Recipe is stored in esmvaltool/recipes/ + +* recipe_tebaldi21esd.yml + +Diagnostics are stored in esmvaltool/diag_scripts/tebaldi21esd/ + +* calc_timeseries_across_realization_stddev_runave.ncl: computes time series of ensemble spreads (i.e., inter-member standard deviations). One dataset is used for resampling subsets of 10 members. +* calc_cmip6_and_cmip5_pattern_diff_scaleT.ncl: computes the pattern difference between the CMIP6 multi-model mean change and the CMIP5 multi-model mean change. +* calc_IAV_hatching.ncl: computes the interannual variability (IAV) over + piControl runs, either over the whole time period or in chunks over + some years. +* calc_pattern_diff_scaleT.ncl: computes the map of multi-model mean change + scaled by global T change. +* calc_pattern_stippling_hatching.ncl: computes the map of multi-model mean change + with stippling for significant region and hatching for non-significant + region. Significant is where the multi-model mean change is greater + than two standard deviations of the internal variability and where at + least 90% of the models agree on the sign of change. Not significant is + where the multi-model mean change is less than one standard deviation of + internal variability. +* calc_pattern_intermodel_stddev_scaleT.ncl: computes the intermodel standard deviation of the change scaled by global T change + standard deviation of the change scaled by global T change +* calc_pattern_interscenario_stddev_scaleT.ncl: computes the interscenario + standard deviation of the change scaled by global T change +* calc_pattern_stddev_scaleT.ncl: computes the standard deviation of the + change scaled by global T change +* calc_pattern_comparison.ncl: computes the difference between the patterns of + multi-model mean change of two different scenarios (ex: SSP4-6.0 and SSP4-3.4) +* calc_table_changes.ncl: computes the changes (mean and spreads) for the + specified scenarios and time periods relative to the historical + baseline. +* calc_table_warming_level.ncl: computes the warming level crossing year + (mean, five percent and ninety-five percent quantiles of crossing + years) for specified scenarios and warming levels. +* calc_timeseries_mean_spread_runave.ncl: computes + multi-model time series of change against historical baseline for + specified scenarios with spread. A running average with specified window + is performed. +* calc_timeseries_mean_spread_ssp4.ncl: computes + multi-model time series of change against historical baseline for + specified ssp434 and ssp460 with spread. A running average with specified + window is performed. +* calc_timeseries_mean_spread_ssp5.ncl: computes multi-model time series + of change against historical baseline for ssp534-over and ssp585 with spread. + A running average with specified window is performed. +* plot_pattern.ncl: plots a pattern. +* plot_table_changes: plots a table of the multi-model mean and spread for + specified scenarios and periods. +* plot_table_warming_level.ncl: plots a table of warming level crossing + years for specified scenarios (columns) and warming levels (rows). +* plot_timeseries_mean_spread_3scenarios.ncl: plots time series (multi- + model mean and spread) for 3 scenarios. +* plot_timeseries_mean_spread_constrained_projections.ncl: plot + time series with brackets for constrained projections. +* plot_timeseries_mean_spread.ncl: plot time series (multi-model mean and + spread) for 5 scenarios. +* plot_timeseries_mean_spread_rightaxis_5scen.ncl: plot time series + (multi-model mean and spread) for 5 scenarios and with an additional right axis. +* plot_timeseries_mean_spread_ssp4.ncl: plot time series for two ssp4 + scenarios. +* plot_timeseries_mean_spread_ssp5.ncl: plot time series for two ssp5 + scenarios. +* plot_timeseries_across_realization_stddev_runave.ncl: plot time series of + inter-member standard deviation. + +User settings in recipe +----------------------- + +#. Script calc_timeseries_across_realization_stddev_runave.ncl + + *Required settings for script* + + * scenarios: list with scenarios included in figure + * syears: list with start years in time periods (e.g. start of historical + period and SSPs) + * eyears: list with end years in time periods (end year of historical runs + and SSPs) + * begin_ref_year: start year of reference period (e.g. 1995) + * end_ref_year: end year of reference period (e.g. 2014) + * n_samples: number of samples of size 10 to draw among all the ensembles + of sampled_model + * sampled_model: name of dataset on which to sample + * runave_window: size window used for the centered running average + + +#. Script calc_cmip6_and_cmip5_pattern_diff_scaleT.ncl + + *Required settings for script* + + * scenarios_cmip5: list of CMIP5 scenarios included in figure + * scenarios_cmip6: list of CMIP6 scenarios included in figure + * periods: list with start years of periods to be included + * time_avg: time_avg: time averaging ("annualclim", "seasonalclim") + + *Optional settings for script* + + * percent: determines if difference expressed in percent (0, 1, default= 0) + +#. Script calc_IAV_hatching.ncl + + *Required settings for script* + + * time_avg: time_avg: time averaging ("annualclim", "seasonalclim") needs to + be consistent with calc_pattern_stippling_hatching.ncl + + *Optional settings for script* + + * periodlength: length of period in years to calculate variability over, + default is total time period + * iavmode: calculate IAV from multi-model mean or save individual models + ("each": save individual models, "mmm": multi-model mean, default), + needs to be consistent with calc_pattern_stippling_hatching.ncl + +#. Script calc_pattern_diff_scaleT.ncl + + *Required settings for script* + + * scenarios: list with scenarios included in figure + * periods: list with start years of periods to be included + * time_avg: time_avg: time averaging ("annualclim", "seasonalclim") + +#. Script calc_pattern_stippling_hatching.ncl + + *Required settings for script* + + * ancestors: variable and diagnostics that calculated interannual + variability for stiplling and hatching + * time_avg: time_avg: time averaging ("annualclim", "seasonalclim") needs to + be consistent with calc_IAV_hatching.ncl + * scenarios: list with scenarios to be included + * periods: list with start years of periods to be included + * labels: list with labels to use in legend depending on scenarios + * sig: plot stippling for significance? (True, False) + * not_sig: plot hatching for uncertainty? (True, False) + + *Optional settings for script* + + * seasons: list with season index if time_avg is "seasonalclim" (then seasons + is required), DJF:0, MAM:1, JJA:2, SON:3 + * iavmode: calculate IAV from multi-model mean or save individual models + ("each": save individual models, "mmm": multi-model mean, default), + needs to be consistent with calc_IAV_hatching.ncl + * percent: determines if difference expressed in percent (0, 1, default = 0) + +#. Script calc_pattern_intermodel_stddev_scaleT.ncl + + *Required settings for script* + + * scenarios: list with scenarios included in figure + * periods: list with start years of periods to be included + * time_avg: time_avg: time averaging ("annualclim", "seasonalclim") + +#. Script calc_pattern_interscenario_stddev_scaleT.ncl + + *Required settings for script* + + * scenarios: list with scenarios included in figure + * periods: list with start years of periods to be included + * time_avg: time_avg: time averaging ("annualclim", "seasonalclim") + +#. Script calc_pattern_stddev_scaleT.ncl + + *Required settings for script* + + * scenarios: list with scenarios included in figure + * periods: list with start years of periods to be included + * time_avg: time_avg: time averaging ("annualclim", "seasonalclim") + +#. Script calc_pattern_comparison.ncl + + *Required settings for script* + + * scenarios: list with two scenarios included in figure. The last scenario + is taken as reference. For example to compute the difference of pattern + between SSP4-6.0 and SSP4-3.4, the scenario ssp460 should be the last + element of the list. + * periods: list with start years of periods to be included + * time_avg: time_avg: time averaging ("annualclim", "seasonalclim") + * label: label of periods + +#. Script calc_table_changes.ncl + + *Required settings for script* + + * scenarios: list with scenarios included in the table + * syears: list with start years of time periods to include in the table + * eyears: list with end years of the time periods to include in the table + * begin_ref_year: start year of historical baseline period (e.g. 1995) + * end_ref_year: end year of historical baseline period (e.g. 2014) + * spread: multiplier of standard deviation to calculate spread with + (e.g. 1.64) + * label: list of scenario names included in the table + +#. Script calc_table_warming_level.ncl + + *Required settings for script* + + * scenarios: list with scenarios included in the table + * warming_levels: list of warming levels to include in the table + * syears: list with start years of time periods (historical then SSPs) + * eyears: list with end years of the time periods (historical then SSPs) + * begin_ref_year: start year of historical baseline period (e.g. 1995) + * end_ref_year: end year of historical baseline period (e.g. 2014) + * offset: offset between current historical baseline and 1850-1900 period + * label: list of scenario names included in the table + +#. Script calc_timeseries_mean_spread_runave.ncl + + *Required settings for script* + + * scenarios: list of scenarios to include + * syears: list with start years of time periods (historical then SSPs) + * eyears: list with end years of the time periods (historical then SSPs) + * begin_ref_year: start year of historical baseline period (e.g. 1986) + * end_ref_year: end year of historical baseline period (e.g. 2005) + + *Optional settings for script* + + * runave_window: size of the window used to perform running average + (default 11) + * spread: how many standard deviations to calculate the spread with + (default 1) + * label: list of scenario names included in the legend + * percent: determines if difference expressed in percent (0, 1, default = 0) + * model_nr: whether to save number of models used for each scenario + +#. Script calc_timeseries_mean_spread_ssp4.ncl + + *Required settings for script* + + * scenarios: list of scenarios to include: ssp434 and ssp460 + * syears: list with start years of time periods (historical then SSPs) + * eyears: list with end years of the time periods (historical then SSPs) + * begin_ref_year: start year of historical baseline period (e.g. 1986) + * end_ref_year: end year of historical baseline period (e.g. 2005) + + *Optional settings for script* + + * runave_window: size of the window used to perform running average + (default 11) + * spread: how many standard deviations to calculate the spread with + (default 1) + * label: list of scenario names included in the legend + * percent: determines if difference expressed in percent (0, 1, default = 0) + * model_nr: whether to save number of models used for each scenario + +#. Script calc_timeseries_mean_spread_ssp5.ncl + + *Required settings for script* + + * scenarios: list of scenarios to include: ssp534-over, ssp585 + * syears: list with start years of time periods (historical then SSPs) + * eyears: list with end years of the time periods (historical then SSPs) + * begin_ref_year: start year of historical baseline period (e.g. 1986) + * end_ref_year: end year of historical baseline period (e.g. 2005) + + *Optional settings for script* + + * runave_window: size of the window used to perform running average + (default 11) + * spread: how many standard deviations to calculate the spread with + (default 1) + * label: list of scenario names included in the legend + * percent: determines if difference expressed in percent (0, 1, default = 0) + * model_nr: whether to save number of models used for each scenario + +#. Script plot_pattern.ncl + + *Required settings for script* + + * scenarios: list of scenarios + * periods: list with start years of periods + * ancestors: variable and diagnostics that calculated field to be plotted + + *Optional settings for script* + + * projection: map projection, any valid ncl projection, default = Robinson + * diff_levs: list with explicit levels for all contour plots + * max_vert: maximum number of plots in vertical + * max_hori: maximum number of plots in horizontal + * model_nr: save number of model runs per period and scenario in netcdf to + print in plot? (True, False, default = False) + * colormap: alternative colormap, path to rgb file or ncl name + * span: span whole colormap? (True, False, default = True) + * pltname: alternative name for output plot, default is diagnostic + + varname + time_avg + * units: units written next to colorbar, e.g. (~F35~J~F~C) + * sig: plot stippling for significance? (True, False) + * not_sig: plot hatching for uncertainty? (True, False) + * label: label to add in the legend + +#. Script plot_table_changes.ncl + + *Required settings for script* + + * ancestors: variable and diagnostics that calculated field to be plotted + * scenarios: list of scenarios included in the figure + * syears: list of start years of periods of interest + * eyears: list of end years of periods of interest + * label: list of labels of the scenarios + + *Optional settings for script* + + * title: title of the plot + +#. Script plot_table_warming_level.ncl + + *Required settings for script* + + * scenarios: list of scenarios included in the figure + * warming_levels: list of warming levels + * syears: list of start years of historical and SSPs scenarios + * eyears: list of end years of historical and SSPs scenarios + * begin_ref_year: start year of reference period + * end_ref_year: end year of reference period + * label: list of labels of the scenarios + * offset: offset between reference baseline and 1850-1900 + +#. Script plot_timeseries_mean_spread_3scenarios.ncl + + *Required settings for script* + + * ancestors: variable and diagnostics that calculated field to be plotted + * scenarios: list of scenarios included in the figure + * syears: list of start years of historical and SSPs scenarios + * eyears: list of end years of historical and SSPs scenarios + * begin_ref_year: start year of reference period + * end_ref_year: end year of reference period + * label: list of labels of the scenarios + + *Optional settings for script* + + * title: specify plot title + * yaxis: specify y-axis title + * ymin: minimim value on y-axis, default calculated from data + * ymax: maximum value on y-axis + * colormap: alternative colormap, path to rgb file or ncl name + * model_nr: save number of model runs per period and scenario + * styleset: color style + * spread: how many standard deviations to calculate the spread with, + default is 1, ipcc tas is 1.64 + +#. Script plot_timeseries_mean_spread_constrained_projections.ncl + + *Required settings for script* + + * ancestors: variable and diagnostics that calculated field to be plotted + * scenarios: list of scenarios included in the figure + * syears: list of start years of historical and SSPs scenarios + * eyears: list of end years of historical and SSPs scenarios + * begin_ref_year: start year of reference period + * end_ref_year: end year of reference period + * label: list of labels of the scenarios + * baseline_offset: offset between reference period (baseline) and 1850-1900 + * lower_constrained_projections: list of lower bounds of the constrained + projections for the scenarios included in the same order as the scenarios + * upper_constrained_projections: list of upper bounds of the constrained + projections for the scenarios included in the same order as the scenarios + * mean_constrained_projections: list of means of the constrained + projections for the scenarios included in the same order as the scenarios + + *Optional settings for script* + + * title: specify plot title + * yaxis: specify y-axis title + * ymin: minimim value on y-axis, default calculated from data + * ymax: maximum value on y-axis + * colormap: alternative colormap, path to rgb file or ncl name + * model_nr: save number of model runs per period and scenario + * styleset: color style + * spread: how many standard deviations to calculate the spread with, + default is 1, ipcc tas is 1.64 + +#. Script plot_timeseries_mean_spread.ncl + + *Required settings for script* + + * ancestors: variable and diagnostics that calculated field to be plotted + * scenarios: list of scenarios included in the figure + * syears: list of start years of historical and SSPs scenarios + * eyears: list of end years of historical and SSPs scenarios + * begin_ref_year: start year of reference period + * end_ref_year: end year of reference period + * label: list of labels of the scenarios + + *Optional settings for script* + + * title: specify plot title + * yaxis: specify y-axis title + * ymin: minimim value on y-axis, default calculated from data + * ymax: maximum value on y-axis + * colormap: alternative colormap, path to rgb file or ncl name + * model_nr: save number of model runs per period and scenario + * styleset: color style + * spread: how many standard deviations to calculate the spread with, + default is 1, ipcc tas is 1.64 + +#. Script plot_timeseries_mean_spread_rightaxis_5scen.ncl + + *Required settings for script* + + * ancestors: variable and diagnostics that calculated field to be plotted + * scenarios: list of scenarios included in the figure + * syears: list of start years of historical and SSPs scenarios + * eyears: list of end years of historical and SSPs scenarios + * begin_ref_year: start year of reference period + * end_ref_year: end year of reference period + * rightaxis_offset: offset of the right axis relative to the left axis + * label: list of labels of the scenarios + + *Optional settings for script* + + * title: specify plot title + * yaxis: specify y-axis title + * ymin: minimim value on y-axis, default calculated from data + * ymax: maximum value on y-axis + * colormap: alternative colormap, path to rgb file or ncl name + * model_nr: save number of model runs per period and scenario + * styleset: color style + * spread: how many standard deviations to calculate the spread with, + default is 1, ipcc tas is 1.64 + +#. Script plot_timeseries_mean_spread_ssp4.ncl + + *Required settings for script* + + * ancestors: variable and diagnostics that calculated field to be plotted + * scenarios: list of scenarios included in the figure + * syears: list of start years of historical and SSPs scenarios + * eyears: list of end years of historical and SSPs scenarios + * begin_ref_year: start year of reference period + * end_ref_year: end year of reference period + * label: list of labels of the scenarios + + *Optional settings for script* + + * title: specify plot title + * yaxis: specify y-axis title + * ymin: minimim value on y-axis, default calculated from data + * ymax: maximum value on y-axis + * colormap: alternative colormap, path to rgb file or ncl name + * model_nr: save number of model runs per period and scenario + * styleset: color style + * spread: how many standard deviations to calculate the spread with, + default is 1, ipcc tas is 1.64 + +#. Script plot_timeseries_mean_spread_ssp5.ncl + + *Required settings for script* + + * ancestors: variable and diagnostics that calculated field to be plotted + * scenarios: list of scenarios included in the figure + * syears: list of start years of historical and SSPs scenarios + * eyears: list of end years of historical and SSPs scenarios + * begin_ref_year: start year of reference period + * end_ref_year: end year of reference period + * label: list of labels of the scenarios + + *Optional settings for script* + + * title: specify plot title + * yaxis: specify y-axis title + * ymin: minimim value on y-axis, default calculated from data + * ymax: maximum value on y-axis + * colormap: alternative colormap, path to rgb file or ncl name + * model_nr: save number of model runs per period and scenario + * styleset: color style + * spread: how many standard deviations to calculate the spread with, + default is 1, ipcc tas is 1.64 + +#. Script plot_timeseries_across_realization_stddev_runave.ncl + + *Required settings for script* + + * ancestors: variable and diagnostics that calculated field to be plotted + * scenarios: list of scenarios included in the figure + * syears: list of start years of historical and SSPs scenarios + * eyears: list of end years of historical and SSPs scenarios + * begin_ref_year: start year of reference period + * end_ref_year: end year of reference period + * label: list of labels of the scenarios + * n_samples: number of samples of size 10 to draw among all the ensembles + of sampled_model only + * sampled_model: name of dataset on which to sample + + *Optional settings for script* + + * trend: whether the trend is calculated and displayed + * runave_window: only used if trend is true, size window used for the + centered running average + * title: specify plot title + * yaxis: specify y-axis title + * ymin: minimim value on y-axis, default calculated from data + * ymax: maximum value on y-axis + * colormap: alternative colormap, path to rgb file or ncl name + + +Variables +--------- + +*Note: These are the variables tested and used in the original paper.* + +* tas (atmos, monthly mean, longitude latitude time) +* pr (atmos, monthly mean, longitude latitude time) + +*However, the code is flexible and in theory other variables of the same kind can be used.* + + +References +---------- + +* Tebaldi, C., Debeire, K., Eyring, V., Fischer, E., Fyfe, J., Friedlingstein, P., Knutti, R., Lowe, J., O'Neill, B., Sanderson, B., van Vuuren, D., Riahi, K., Meinshausen, M., Nicholls, Z., Hurtt, G., Kriegler, E., Lamarque, J.-F., Meehl, G., Moss, R., Bauer, S. E., Boucher, O., Brovkin, V., Golaz, J.-C., Gualdi, S., Guo, H., John, J. G., Kharin, S., Koshiro, T., Ma, L., Olivié, D., Panickal, S., Qiao, F., Rosenbloom, N., Schupfner, M., Seferian, R., Song, Z., Steger, C., Sellar, A., Swart, N., Tachiiri, K., Tatebe, H., Voldoire, A., Volodin, E., Wyser, K., Xin, X., Xinyao, R., Yang, S., Yu, Y., and Ziehn, T.: Climate model projections from the Scenario Model Intercomparison Project (ScenarioMIP) of CMIP6, Earth Syst. Dynam., 12, 253-293, https://doi.org/10.5194/esd-12-253-2021 + +Example plots +------------- + +.. figure:: /recipes/figures/tebaldi21esd/tas_timeseries.png + :align: center + :width: 10cm + + Global average temperature time series (11-year running averages) of changes + from current baseline (1995–2014, left axis) and pre-industrial baseline + (1850–1900, right axis, obtained by adding a 0.84 ◦C offset) for SSP1-1.9, + SSP1-2.6, SSP2-4.5, SSP3-7.0 and SSP5-8.5. + +.. figure:: /recipes/figures/tebaldi21esd/pr_pattern.png + :align: center + :width: 10cm + + Patterns of temperature (a) and percent precipitation change (b) normalized + by global average temperature change (averaged across CMIP6 models and all + Tier 1 plus SSP1-1.9 scenarios). + +.. figure:: /recipes/figures/tebaldi21esd/warming_level_table.png + :align: center + :width: 10cm + + Times (best estimate and range – in square brackets – based on the 5 %–95 % + range of the ensemble after smoothing the trajectories by 11-year running + means) at which various warming levels (defined as relative to 1850–1900) + are reached according to simulations following, from left to right, SSP1-1.9, + SSP1-2.6, SSP2-4.5, SSP3-7.0 and SSP5-8.5. Crossing of these levels is + defined by using anomalies with respect to 1995–2014 for the model ensembles + and adding the offset of 0.84  to derive warming from pre-industrial values. + We use a common subset of 31 models for the Tier 1 scenarios and all + available models (13) for SSP1-1.9, while Table A7 shows the result of using + all available models under each scenario. The number of models available + under each scenario and the number of models reaching a given warming + level are shown in parentheses. However, the estimates are based on the + ensemble means and ranges computed from all the models considered (13 or 31 + in this case), not just from the models that reach a given level. An + estimate marked as “NA” is to be interpreted as “not reaching that warming + level by 2100”. In cases where the ensemble average remains below the warming + level for the whole century, it is possible for the central estimate to be NA, + while the earlier time of the confidence interval is not, since it is + determined by the warmer end of the ensemble range. diff --git a/doc/sphinx/source/recipes/recipe_template.rst.template b/doc/sphinx/source/recipes/recipe_template.rst.template new file mode 100644 index 0000000000..6c248ed5d7 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_template.rst.template @@ -0,0 +1,77 @@ +.. _recipes_: + +Title +===== + +Overview +-------- + +Brief description of the diagnostic. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + +* recipe_.yml + +Diagnostics are stored in esmvaltool/diag_scripts// + +* : one line scription + + +User settings in recipe +----------------------- + +#. Script + + *Required settings for script* + + * xxx: zzz + + *Optional settings for script* + + *Required settings for variables* + + *Optional settings for variables* + + *Required settings for preprocessor* + + *Optional settings for preprocessor* + + *Color tables* + + * list required color tables (if any) here + + +Variables +--------- + +* var1 (realm, frequency, dimensions), e.g. pr (atmos, monthly mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +*Note: (1) obs4MIPs data can be used directly without any preprocessing; +(2) see headers of reformat scripts for non-obs4MIPs data for download +instructions.* + +* xxx + + *Reformat script:* + +References +---------- + +* xxx + +Example plots +------------- + +.. _fig_mynewdiag_1: +.. figure:: /recipes/figures//awesome1.png + :align: center + + Add figure caption here. diff --git a/doc/sphinx/source/recipes/recipe_thermodyn_diagtool.rst b/doc/sphinx/source/recipes/recipe_thermodyn_diagtool.rst new file mode 100644 index 0000000000..6d64a7b589 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_thermodyn_diagtool.rst @@ -0,0 +1,150 @@ +.. _recipes_thermodyn_diagtool: + +Thermodynamics of the Climate System - The Diagnostic Tool TheDiaTo v1.0 +======================================================================== + +Overview +-------- + +The tool allows to compute TOA, atmospheric and surface energy budgets, latent energy and water mass budgets, +meridional heat transports, the Lorenz Energy Cycle (LEC), the material entropy production with the direct +and indirect method. + +The energy budgets are computed from monthly mean radiative and heat fluxes at the TOA and at the surface +(cfr. Wild et al., 2013). The meridional heat transports are obtained from the latitudinal integration +of the zonal mean energy budgets. When a land-sea mask is provided, results are also available for +land and oceans, separately. + +The water mass budget is obtained from monthly mean latent heat fluxes (for evaporation), total and snowfall +precipitation (cfr. Liepert et al., 2012). The latent energy budget is obtained multiplying each component of +the water mass budget by the respective latent heat constant. When a land-sea mask is provided, results are +also available for land and oceans, separately. + +The LEC is computed from 3D fields of daily mean velocity and temperature fields in the troposphere over +pressure levels. The analysis is carried on in spectral fields, converting lonlat grids in Fourier coefficients. +The components of the LEC are computed as in Ulbrich and Speth, 1991. In order to account for possible gaps +in pressure levels, the daily fields of 2D near-surface temperature and horizontal velocities are needed. These are +required to perform a vertical interpolation, substituting data in pressure levels where surface pressure is +lower than the respective level and fields are not stored as an output of the analysed model. + +The material entropy production is computed by using the indirect or the direct method (or both). The former +method relies on the convergence of radiative heat in the atmosphere (cfr. Lucarini et al., 2011; Pascale et al., 2011), +the latter on all viscous and non-viscous dissipative processes occurring in the atmosphere +(namely the sensible heat fluxes, the hydrological cycle with its components and the kinetic energy dissipation). + +For a comprehensive report on the methods used and some descriptive results, please refer to Lembo et al., 2019. + + +In order to account for possible gaps in pressure levels, the daily +fields of 2D near-surface temperature and horizontal velocities.' + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_thermodyn_diagtool.yml + +Diagnostics are stored in diag_scripts/thermodyn_diagtool/ + + * thermodyn_diagnostics.py: the main script, handling input files, calling computation and plotting scricpts; + + * computations.py: a module containing all the main computations that are carried out by the program; + + * fluxogram.py: a module for the retrieval of the block diagrams displaying the reservoirs and conversion terms of the LEC + + * fourier_coefficients.py: a module for the computation of the Fourier coefficients from the lonlat input grid + + * lorenz_cycle.py: a module for the computation of the LEC components in Fourier coefficients + + * mkthe.py: a module for the computation of indirect variables obtained from the input fields, such as LCL height, boundary layer top height and temperature, potential temperature + + * plot_script.py: a module for the computation of maps, scatter plots, time series and meridional sections of some derived quantities for each model in the ensemble. The meridional heat and water mass transports are also computed here, as well as the peak magnitudes and locations; + + * provenance_meta.py: a module for collecting metadata and writing them to produced outputs; + +User settings +------------- + +Besides the datasets, to be set according to usual ESMValTool convention, the user can set the following optional variables in the recipe_Thermodyn_diagtool.yml: + + * wat: if set to 'true', computations are performed of the water mass and latent energy budgets and transports + * lsm: if set to true, the computations of the energy budgets, meridional energy transports, water mass and latent energy budgets and transports are performed separately over land and oceans + * lec: if set to 'true', computation of the LEC are performed + * entr: if set to 'true', computations of the material entropy production are performed + * met (1, 2 or 3): the computation of the material entropy production must be performed with the indirect method (1), the direct method (2), or both methods. If 2 or 3 options are chosen, the intensity of the LEC is needed for the entropy production related to the kinetic energy dissipation. If lec is set to 'false', a default value is provided. + + These options apply to all models provided for the multi-model ensemble computations + + +Variables +--------- + +Default variables needed for computation of energy budgets and transports: + +* hfls (atmos, monthly mean, time latitude longitude) +* hfss (atmos, monthly mean, time latitude longitude) +* rlds (atmos, monthly mean, time latitude longitude) +* rlus (atmos, monthly mean, time latitude longitude) +* rlut (atmos, monthly mean, time latitude longitude) +* rsds (atmos, monthly mean, time latitude longitude) +* rsdt (atmos, monthly mean, time latitude longitude) +* rsus (atmos, monthly mean, time latitude longitude) +* rsut (atmos, monthly mean, time latitude longitude) + +Additional variables needed for water mass and latent energy computation (optional, with 'wat' set to 'true'): + +* pr (atmos, monthly mean, time latitude longitude) +* prsn (atmos, monthly mean, time latitude longitude) + +Additional variable needed for LEC computations (optional, with 'lec' set to 'true'): + +* ta (atmos, daily mean, time plev latitude longitude) +* tas (atmos, daily mean, time latitude longitude) +* ua (atmos, daily mean, time plev latitude longitude) +* uas (atmos, daily mean, time latitude longitude) +* va (atmos, daily mean, time plev latitude longitude) +* vas (atmos, daily mean, time latitude longitude) +* wap (atmos, daily mean, time plev latitude longitude) + +Additional variables needed for material entropy production computations with direct method (optional, with 'entr' set to 'true' and 'mep' to '2' or '3'): + +* hus (atmos, monthly mean, time plev latitude longitude) +* pr (atmos, monthly mean, time latitude longitude) +* prsn (atmos, monthly mean, time latitude longitude) +* ps (atmos, monthly mean, time latitude longitude) +* ts (atmos, monthly mean, time latitude longitude) + +Additional variables needed for material entropy production computations with indirect method (optional, with 'entr' set to 'true' and 'mep' to '1' or '3'): + +* tas (atmos, daily mean, time latitude longitude) +* uas (atmos, daily mean, time latitude longitude) +* vas (atmos, daily mean, time latitude longitude) + +Depending on the user's options, variables listed above must be provided. All other variables shall be commented in the recipe file. + + +References +---------- +* Lembo V, Lunkeit F, Lucarini V (2019) A new diagnostic tool for diagnosing water, energy and entropy budgets in climate models. Geophys Mod Dev Disc. doi:10.5194/gmd-12-3805-2019 +* Liepert BG, Previdi M (2012) Inter-model variability and biases of the global water cycle in CMIP3 coupled climate models. Environ Res Lett 7:014006. doi: 10.1088/1748-9326/7/1/014006 +* Lorenz EN (1955) Available Potential Energy and the Maintenance of the General Circulation. Tellus 7:157–167. doi: 10.1111/j.2153-3490.1955.tb01148.x +* Lucarini V, Fraedrich K, Ragone F (2010) New Results on the Thermodynamical Properties of the Climate System. J Atmo 68:. doi: 10.1175/2011JAS3713.1 +* Lucarini V, Blender R, Herbert C, et al (2014) Reviews of Geophysics Mathematical and physical ideas for climate science. doi: 10.1002/2013RG000446 +* Pascale S, Gregory JM, Ambaum M, Tailleux R (2011) Climate entropy budget of the HadCM3 atmosphere–ocean general circulation model and of FAMOUS, its low-resolution version. Clim Dyn 36:1189–1206. doi: 10.1007/s00382-009-0718-1 +* Ulbrich U, Speth P (1991) The global energy cycle of stationary and transient atmospheric waves: Results from ECMWF analyses. Meteorol Atmos Phys 45:125–138. doi: 10.1007/BF01029650 +* Wild M, Folini D, Schär C, et al (2013) The global energy balance from a surface perspective. Clim Dyn 40:3107–3134. doi: 10.1007/s00382-012-1569-8 + + +Example plots +------------- + +.. _fig_1: +.. figure:: /recipes/figures/thermodyn_diagtool/meridional_transp.png + :align: left + :width: 14cm + +.. _fig_2: +.. figure:: /recipes/figures/thermodyn_diagtool/CanESM2_wmb_transp.png + :align: right + :width: 14cm diff --git a/doc/sphinx/source/recipes/recipe_toymodel.rst b/doc/sphinx/source/recipes/recipe_toymodel.rst index 648a723b00..c39ac951db 100644 --- a/doc/sphinx/source/recipes/recipe_toymodel.rst +++ b/doc/sphinx/source/recipes/recipe_toymodel.rst @@ -1,12 +1,12 @@ .. _recipes_toymodel: Toymodel -==================================================== +======== Overview -------- -The goal of this diagnostic is to simulate single-model ensembles from an observational dataset to investigate the effect of observational uncertain. For further discussion of this synthetic value generator, its general application to forecasts and its limitations, see Weigel et al. (2008). The output is a netcdf file containing the synthetic observations. Due to the sampling of the perturbations from a Gaussian distribution, running the recipe multiple times, with the same observation dataset and input parameters, will result in different outputs. +The goal of this diagnostic is to simulate single-model ensembles from an observational dataset to investigate the effect of observational uncertainty. For further discussion of this synthetic value generator, its general application to forecasts and its limitations, see Weigel et al. (2008). The output is a netcdf file containing the synthetic observations. Due to the sampling of the perturbations from a Gaussian distribution, running the recipe multiple times, with the same observation dataset and input parameters, will result in different outputs. Available recipes and diagnostics @@ -14,7 +14,7 @@ Available recipes and diagnostics Recipes are stored in recipes/ -* recipe_toymodel_wp4.yml +* recipe_toymodel.yml Diagnostics are stored in diag_scripts/magic_bsc/ @@ -29,18 +29,32 @@ User settings User setting files are stored in recipes/ -#. recipe_toymodel_wp4.yml +#. recipe_toymodel.yml + + *Required settings for preprocessor* + + extract_region: + + * start_longitude: minimum longitude + * end_longitude: maximum longitude + * start_latitude: minimum longitude + * end_latitude: maximum latitude + + extract_levels: (for 3D variables) + + * levels: [50000] # e.g. for 500 hPa level + *Required settings for script* * number_of_members: integer specifying the number of members to be generated - * beta: the user defined underdispersion + * beta: the user defined underdispersion (beta >= 0) Variables --------- -* (atmos, daily, longitude, latitude, time) +* any variable (atmos/ocean, daily-monthly, longitude, latitude, time) Observations and reformat scripts @@ -62,8 +76,6 @@ Example plots ------------- .. _fig_toymodel: -.. figure:: /recipes/figures/toymodel/synthetic_CMIP5_IPSL-CM5A-LR_day_historical_r1i1p1_T2M_tasmax_1999-2000.jpg - - - +.. figure:: /recipes/figures/toymodel/synthetic_CMIP5_bcc-csm1-1_Amon_rcp45_r1i1p1_psl_2051-2060.jpg +Twenty synthetic single-model ensemble generated by the recipe_toymodel.yml (see Section 3.7.2) for the 2051-2060 monthly data of r1i1p1 RCP 4.5 scenario of BCC_CSM1-1 simulation. diff --git a/doc/sphinx/source/recipes/recipe_validation.rst b/doc/sphinx/source/recipes/recipe_validation.rst new file mode 100644 index 0000000000..0475c6486b --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_validation.rst @@ -0,0 +1,78 @@ +.. _recipes_validation: + +Zonal and Meridional Means +========================== + +Overview +-------- + +This functional diagnostic takes two models designated by CONTROL and EXPERIMENT and compares them via a number of +analyses. Optionally a number of observational datasets can be added for processing. There are three types of standard analysis: +lat_lon, meridional_mean and zonal_mean. Each of these diagnostics can be run on a separate basis (each an entry to diagnostics/scripts). +The lat_lon analysis produces the following plots: a simple global plot for each variable for each dataset, a global plot for the +difference between CONTROL and EXPERIMENT, a global plot for the difference between CONTROL and each of the observational datasets. +The meridional_mean and zonal_mean produce variable vs coordinate (``latitude`` or ``longitude``) with both ``CONTROL`` and ``EXPERIMENT`` curves +in each plot, for the entire duration of time specified and also, if the user wishes, for each season (seasonal means): winter DJF, spring MAM, summer JJA, autumn SON (by setting ``seasonal_analysis: true`` in the recipe). + +At least regridding on a common grid for all model and observational datasets should be performed in preprocessing (if datasets +are on different grids). Also note that it is allowed to use the same dataset (with varying parameters like experiment +or ensemble or mip) for both CONTROL and EXPERIMENT (as long as at least one data parameter is different). + +Available recipes and diagnostics +----------------------------------- + +Recipes are stored in recipes/ + +* recipe_validation.yml (CMIP5) +* recipe_validation_CMIP6.yml (CMIP6) + +Diagnostics are stored in diag_scripts/ + +* validation.py +* shared/_validation.py + +User settings +------------- + +#. validation.py + + *Required settings for script* + + * title: title of the analysis, user defined; + * control_model: control dataset name e.g. UKESM1-0-LL; + * exper_model: experiment dataset name e.g. IPSL-CM6A-LR; + * observational_datasets: list of at least one element; if no OBS wanted comment out; e.g. ['ERA-Interim']; + * analysis_type: use any of: lat_lon, meridional_mean, zonal_mean; + * seasonal_analysis: boolean, if seasonal means are needed e.g. ``true``; + * save_cubes: boolean, save each of the plotted cubes in ``/work``; + +Variables +--------- + +* any variable + +Observations and reformat scripts +--------------------------------- + +*Note: (1) obs4MIPs or OBS or ana4mips can be used.* + +* any observations +* it is important to note that all observational data should go through the same preprocessing as model data + +References +---------- + +* none, basic technical analysis + +Example plots +------------- + +.. figure:: /recipes/figures/validation/Merid_Mean_DJF_longitude_tas_UKESM1-0-LL_vs_IPSL-CM6A-LR.png + :width: 70 % + + Meridional seasonal mean for winter (DJF) comparison beween CMIP6 UKESM1 and IPSL models. + +.. figure:: /recipes/figures/validation/Zonal_Mean_DJF_latitude_tas_UKESM1-0-LL_vs_IPSL-CM6A-LR.png + :width: 70 % + + Zonal seasonal mean for winter (DJF) comparison beween CMIP6 UKESM1 and IPSL models. diff --git a/doc/sphinx/source/recipes/recipe_wenzel14jgr.rst b/doc/sphinx/source/recipes/recipe_wenzel14jgr.rst new file mode 100644 index 0000000000..4faa05c2a9 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_wenzel14jgr.rst @@ -0,0 +1,148 @@ +.. _recipe_wenzel14jgr: + +Emergent constraints on carbon cycle feedbacks +============================================== + +Overview +-------- + +Figures from Wenzel et al. (2014) are reproduced with recipe_wenzel14jgr.yml. Variables relevant for the carbon cycle - climate feedback such as near surface air temperature (tas), net biosphere productivity (nbp) and carbon flux into the ocean (fgco2) are analyzed for coupled (1pctCO2, here the carbon cycle is fully coupled to the climate response) and uncoupled (esmFixCLim1, here the carbon cycle is uncoupled to the climate response) simulations. The standard namelist includes a comparison of cumulated nbp from coupled and uncoupled simulations and includes a set of routines to diagnose the long-term carbon cycle - climate feedback parameter (GammaLT) from an ensemble of CMIP5 models. Also included in the recipe is a comparison of the interannual variability of nbp and fgco2 for historical simulations used to diagnose the observable sensitivity of CO2 to tropical temperature changes (GammaIAV). As a key figure of this recipe, the diagnosed values from the models GammaLT vs. GammaIAV are compared in a scatter plot constituting an emergent constraint. + + +Available recipes and diagnostics +----------------------------------- + +Recipes are stored in recipes/ + +* recipe_wenzel14jgr.yml + +Diagnostics are stored in diag_scripts/ + +* carbon_tsline.ncl: time line plots of annual means for spatial averages +* carbon_gammaHist.ncl: scatter plot of annual mean anomalies of two different variables; diagnosing and saving GammaIAV +* carbon_constraint.ncl: scatter plot of GammaLT vs. GammaIAV + line plot of probability density functions, diagnosing GammaLT + + +User settings +------------- + +.. note:: + + Make sure to run this recipe with the :ref:`configuration option + ` ``max_parallel_tasks: 1``. + +User setting files (cfg files) are stored in nml/cfg_carbon/ + +#. carbon_tsline + + *Required Settings (scripts)* + + * ts_minlat: minimum latitude for area averaging + * ts_maxlat: maximum latitude for area averaging + * ts_minlon: minimum longitude for area averaging + * ts_maxlon: maximum longitude for area averaging + * ts_maxyear: last year (time range) + * ts_minyear: first year (time range) + * plot_units: units to appear on Figure + * time_avg: currently, only yearly is available + * area_opper: type of area operation (sum) + * styleset: Plot style + + *Optional settings (scripts)* + + * multi_model_mean: True for multi-model mean calculation + * volcanoes: True for marking years with lage volcanic eruptions + * align: True for aligning models to have the same start year (needed for idealized 2x CO2 simulations) + * ts_anomaly: calculates anomalies with respect to a defined time range average (anom) + * ridx_start: if ts_anomaly is True, define start time index for reference period + * ridx_end: if ts_anomaly is True, define end time index for reference period + * ref_start: if ts_anomaly is True, define start year for reference period + * ref_end: if ts_anomaly is True, define end year for reference period + + *Required settings (variables)* + + * reference_dataset: name of reference data set + +#. carbon_gammaHist.ncl + + *Required Settings (scripts)* + + * start_year: first year (time range) + * end_year: last year (time range) + * plot_units: units to appear on Figure + * ec_anom: calculates anomalies with respect to the first 10-year average (anom) + * scatter_log: set logarithmic axes in scatterplot.ncl + * styleset: Plot style + + *Optional settings (scripts)* + + * ec_volc : exclude 2 years after volcanic erruptions (True/False) + +#. carbon_constraint.ncl + + *Required Settings (scripts)* + + * gIAV_diagscript: "gammaHist_Fig3and4" + * gIAV_start: start year of GammIAV calculation period + * gIAV_end: end year of GammIAV calculation period + * ec_anom: True + * con_units: label string for units, e.g. (GtC/K) + * nc_infile: specify path to historical gamma values derived by carbon_gammaHist.ncl + * styleset: Plot style + + *Optional settings (scripts)* + + * reg_models: Explicit naming of individual models to be excluded from the regression + + +Variables +--------- + +* tas (atmos, monthly mean, longitude latitude time) +* nbp (land, monthly mean, longitude latitude time) +* fgco2 (ocean, monthly mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +* GCP2018: Global Carbon Budget including land (nbp) and ocean (fgco2) carbon fluxes +* NCEP-NCAR-R1: National Centers for Environmental Prediction reanalysis data for near surface temperature + + +References +---------- + +* Cox, P. M., D. B. Pearson, B. B. Booth, P. Friedlingstein, C. C. Huntingford, C. D. B. Jones, and C. M. Luke, 2013, Sensitivity of tropical carbon to climate change constrained by carbon dioxide variability, Nature, 494(7437), 341-344. doi: 10.1038/nature11882 +* Wenzel, S., P. M. Cox, V. Eyring, and P. Friedlingstein, 2014, Emergent Constraints on Climate Carbon Cycle Feedbacks in the CMIP5 Earth System Models, JGR Biogeoscience, 119(5), doi: 2013JG002591. + + +Example plots +------------- + +.. figure:: /recipes/figures/wenzel14jgr/tas_Global_CMIP5_1pctCO2_anom__1-1999.png + :width: 10 cm + :align: center + + Time series of tropical (30S to 30N) mean near surface temperature (tas) change between year 30 and year 110 for the CMIP5 models simulated with prescribed CO2 (1%/yr CO2 increase) coupled simulation (1pctCO2). + + +.. figure:: /recipes/figures/wenzel14jgr/corr_tas-nbp_anom_1960-2005.png + :width: 10 cm + :align: center + + Correlations between the interannual variability of global co2flux (nbp+fgco2) and tropical temperature for the individual CMIP5 models using esmHistorical simulations, and for observations. + + +.. figure:: /recipes/figures/wenzel14jgr/constr_tas-nbp_30-1960.000001.png + :scale: 50 % + :align: center + + Carbon cycle-climate feedback of tropical land carbon vs. the sensitivity of co2flux to interannual temperature variability in the tropics (30S to 30N). The red line shows the linear best fit of the regression together with the prediction error (orange shading) and the gray shading shows the observed range. + + +.. figure:: /recipes/figures/wenzel14jgr/constr_tas-nbp_30-1960.000002.png + :scale: 30 % + :align: center + + Probability Density Functions for the pure CMIP5 ensemble (black dashed) and after applying the observed constraint to the models (red solid) diff --git a/doc/sphinx/source/recipes/recipe_wenzel16jclim.rst b/doc/sphinx/source/recipes/recipe_wenzel16jclim.rst new file mode 100644 index 0000000000..3071190f4b --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_wenzel16jclim.rst @@ -0,0 +1,217 @@ +.. _recipes_wenzel16jclim: + +Multiple ensemble diagnostic regression (MDER) for constraining future austral jet position +=========================================================================================== + +Overview +-------- + +`Wenzel et al. (2016)`_ use multiple ensemble diagnostic regression (MDER) to +constrain the CMIP5 future projection of the summer austral jet position with +several historical process-oriented diagnostics and respective observations. + +The following plots are reproduced: + +* Absolute correlation between the target variable and the diagnostics. +* Scatterplot between the target variable and the MDER-calculated linear + combination of diagnostics. +* Boxplot of RMSE for the unweighted multi-model mean and the (MDER) weighted + multi-model mean of the target variable in a pseudo-reality setup. +* Time series of the target variable for all models, observations and MDER + predictions. +* Errorbar plots for all diagnostics. +* Scatterplots between the target variable and all diagnostics. + +.. _`Wenzel et al. (2016)`: https://journals.ametsoc.org/doi/full/10.1175/JCLI-D-15-0412.1 + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_wenzel16jclim.yml + + +Diagnostics are stored in diag_scripts/ + + * austral_jet/asr.ncl + * austral_jet/main.ncl + * mder/absolute_correlation.ncl + * mder/regression_stepwise.ncl + * mder/select_for_mder.ncl + + +User settings in recipe +----------------------- + +#. Preprocessor + + * ``extract_region``: Region extraction. + * ``extract_levels``: Pressure level extraction. + * ``area_statistics``: Spatial average calculations. + +#. Script austral_jet/asr.ncl + + * ``season``, *str*: Season. + * ``average_ens``, *bool*, optional (default: ``False``): Average over all + given ensemble members of a climate model. + * ``wdiag``, *array of str*, optional: Names of the diagnostic for MDER + output. Necessary when MDER output is desired. + * ``wdiag_title``, *array of str*, optional: Names of the diagnostic in + plots. + +#. Script austral_jet/main.ncl + + * ``styleset``, *str*: Style set used for plotting the multi-model plots. + * ``season``, *str*: Season. + * ``average_ens``, *bool*, optional (default: ``False``): Average over all + given ensemble members of a climate model. + * ``rsondes``, *array of str*, optional: Additional observations used in the + plot but not for MDER output. + * ``rsondes_file``, *array of str*, optional: Paths to the additional + observations Necessary when ``rsondes`` is given. + * ``rsondes_yr_min``, *int*, optional: Minimum year for additional + observations. Necessary when ``rsondes`` is given. + * ``rsondes_yr_max``, *int*, optional: Maximum year for additional + observations. Necessary when ``rsondes`` is given. + * ``wdiag``, *array of str*, optional: Names of the diagnostic for MDER + output. Necessary when MDER output is desired. + * ``wdiag_title``, *array of str*, optional: Names of the diagnostic in + plots. + * ``derive_var``, *str*, optional: Derive variables using NCL functions. + Must be one of ``"tpp"``, ``"mmstf"``. + * ``derive_latrange``, *array of float*, optional: Latitude range for + variable derivation. Necessary if ``derive_var`` is given. + * ``derive_lev``, *float*, optional: Pressure level (given in *Pa*) for + variable derivation. Necessary if ``derive_var`` is given. + +#. Script mder/absolute_correlation.ncl + + * ``p_time``, *array of int*: Start years for future projections. + * ``p_step``, *int*: Time range for future projections (in years). + * ``scal_time``, *array of int*: Time range for base period (in years) for + anomaly calculations used when ``calc_type = "trend"``. + * ``time_oper``, *str*: Operation used in NCL ``time_operation`` function. + * ``time_opt``, *str*: Option used in NCL ``time_operation`` function. + * ``calc_type``, *str*: Calculation type for the target variable. Must be + one of ``"trend"``, ``"pos"``, ``"int"``. + * ``domain``, *str*: Domain tag for provenance tracking. + * ``average_ens``, *bool*, optional (default: ``False``): Average over all + given ensemble members of a climate model. + * ``region``, *str*, optional: Region used for area aggregation. Necessary + if input of target variable is multidimensional. + * ``area_oper``, *str*, optional: Operation used in NCL ``area_operation`` + function. Necessary if multidimensional is given. + * ``plot_units``, *str*, optional (attribute for ``variable_info``): Units + for the target variable used in the plots. + +#. Script mder/regression_stepwise.ncl + + * ``p_time``, *array of int*: Start years for future projections. + * ``p_step``, *int*: Time range for future projections (in years). + * ``scal_time``, *array of int*: Time range for base period (in years) for + anomaly calculations used when ``calc_type = "trend"``. + * ``time_oper``, *str*: Operation used in NCL ``time_operation`` function. + * ``time_opt``, *str*: Option used in NCL ``time_operation`` function. + * ``calc_type``, *str*: Calculation type for the target variable. Must be + one of ``"trend"``, ``"pos"``, ``"int"``. + * ``domain``, *str*: Domain tag for provenance tracking. + * ``average_ens``, *bool*, optional (default: ``False``): Average over all + given ensemble members of a climate model. + * ``smooth``, *bool*, optional (default: ``False``): Smooth time period with + 1-2-1 filter. + * ``iter``, *int*, optional: Number of iterations for smoothing. Necessary + when ``smooth`` is given. + * ``cross_validation_mode``, *bool*, optional (default: ``False``): Perform + cross-validation. + * ``region``, *str*, optional: Region used for area aggregation. Necessary + if input of target variable is multidimensional. + * ``area_oper``, *str*, optional: Operation used in NCL ``area_operation`` + function. Necessary if multidimensional is given. + * ``plot_units``, *str*, optional (attribute for ``variable_info``): Units + for the target variable used in the plots. + +#. Script mder/select_for_mder.ncl + + * ``wdiag``, *array of str*: Names of the diagnostic for MDER output. + Necessary when MDER output is desired. + * ``domain``, *str*: Domain tag for provenance tracking. + * ``ref_dataset``, *str*: Style set used for plotting the multi-model plots. + * ``average_ens``, *bool*, optional (default: ``False``): Average over all + given ensemble members of a climate model. + * ``derive_var``, *str*, optional: Derive variables using NCL functions. + Must be one of ``"tpp"``, ``"mmstf"``. + + +Variables +--------- + +* *ta* (atmos, monthly, longitude, latitude, pressure level, time) +* *uajet* (atmos, monthly, time) +* *va* (atmos, monthly, longitude, latitude, pressure level, time) +* *ps* (atmos, monthly, longitude, latitude, time) +* *asr* (atmos, monthly, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +* ERA-Intermin (*ta*, *uajet*, *va*, *ps*) +* CERES-EBAF (*asr*) + + +References +---------- + +* Wenzel, S., V. Eyring, E.P. Gerber, and A.Y. Karpechko: Constraining Future + Summer Austral Jet Stream Positions in the CMIP5 Ensemble by Process-Oriented + Multiple Diagnostic Regression. J. Climate, 29, 673–687, + doi:10.1175/JCLI-D-15-0412.1, 2016. + + +Example plots +------------- + +.. _fig_wenzel16jclim_1: +.. figure:: /recipes/figures/wenzel16jclim/CMPI5_uajet-pos_rcp45_20ystep_FIG1.png + :align: center + :width: 80% + + Time series of the the target variable (future austral jet position in the RCP + 4.5 scenario) for the CMIP5 ensemble, observations, unweighted multi-model mean + projections and (MDER) weighted multi-model mean projections. + +.. _fig_wenzel16jclim_2: +.. figure:: /recipes/figures/wenzel16jclim/CMPI5_uajet-pos_rcp45_20ystep_FIG2b.png + :align: center + :width: 80% + + Scatterplot of the target variable (future austral jet position in the RCP + 4.5 scenario) vs. the MDER-determined linear combination of diagnostics for the + CMIP5 ensemble. + +.. _fig_wenzel16jclim_3: +.. figure:: /recipes/figures/wenzel16jclim/CMPI5_uajet-pos_rcp45_20ystep_FIG3.png + :align: center + :width: 80% + + Boxplot for the RMSE of the target variable for the unweighted and (MDER) + weighted multi-model mean projections in a pseudo-reality setup. + +.. _fig_wenzel16jclim_4: +.. figure:: /recipes/figures/wenzel16jclim/ta_trop250_ta_DJF_trend.png + :align: center + :width: 80% + + Trends in tropical DJF temperature at 250hPa for different CMIP5 models and + observations. + +.. _fig_wenzel16jclim_5: +.. figure:: /recipes/figures/wenzel16jclim/uajet_H-SH_c.png + :align: center + :width: 80% + + Scatterplot of the target variable (future austral jet position in the RCP + 4.5 scenario) vs. a single diagnostic, the historical location of the + Southern hemisphere Hadley cell boundary for the CMIP5 ensemble. diff --git a/doc/sphinx/source/recipes/recipe_wenzel16nat.rst b/doc/sphinx/source/recipes/recipe_wenzel16nat.rst new file mode 100644 index 0000000000..a661844e70 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_wenzel16nat.rst @@ -0,0 +1,119 @@ +.. _recipes_wenzel16nat: + +Projected land photosynthesis constrained by changes in the seasonal cycle of atmospheric CO\ :sub:`2` +====================================================================================================== + +Overview +-------- + +Selected figures from `Wenzel et al. (2016)`_ are reproduced with recipe_wenzel16nat.yml. Gross primary productivity (gpp) and atmospheric CO\ :sub:`2` concentrations at the surface (co2s) are analyzed for the carbon cycle - concentration feedback in the historical (esmHistorical) and uncoupled (esmFixCLim1, here the carbon cycle is uncoupled to the climate response) simulations. The recipe includes a set of routines to diagnose the long-term carbon cycle - concentration feedback parameter (beta) from an ensemble of CMIP5 models and the observable change in the CO\ :sub:`2` seasonal cycle amplitude due to rising atmospheric CO\ :sub:`2` levels. As a key figure of this recipe, the diagnosed values from the models beta vs. the change in CO\ :sub:`2` amplitude are compared in a scatter plot constituting an emergent constraint. + +.. _`Wenzel et al. (2016)`: https://www.nature.com/articles/nature19772 + +Available recipe and diagnostics +----------------------------------- + +Recipes are stored in recipes/ + + * recipe_wenzel16nat.yml + +Diagnostics are stored in diag_scripts/carbon_ec/ + + * carbon_beta: (1) scatter plot of annual gpp vs. annual CO\ :sub:`2` and + (2) barchart of gpp(2xCO\ :sub:`2`)/gpp(1xCO\ :sub:`2`); calculates beta + for emergent constraint (carbon_co2_cycle.ncl) + * carbon_co2_cycle.ncl: (1) scatter plot of CO\ :sub:`2` amplitude vs. + annual CO\ :sub:`2`, (2) barchart of sensitivity of CO\ :sub:`2` amplitude + to CO\ :sub:`2`, (3) emergent constraint: + gpp(2xCO\ :sub:`2`)/gpp(1xCO\ :sub:`2`) vs. sensitivity of CO\ :sub:`2` + amplitude to CO\ :sub:`2`, (4) probability density function of constrained + and unconstrained sensitivity of CO\ :sub:`2` amplitude to CO\ :sub:`2` + + +User settings +------------- + +.. note:: + + Make sure to run this recipe with the :ref:`configuration option + ` ``max_parallel_tasks: 1``. + +#. Script carbon_beta.ncl + + *Required Settings (scripts)* + + * styleset: project style for lines, colors and symbols + + *Optional Settings (scripts)* + + * bc_xmax_year: end year to calculate beta (default: use last available year of all models) + * bc_xmin_year: start year to calculate beta (default: use first available year of all models) + + *Required settings (variables)* + + none + + *Optional settings (variables)* + + none + +#. Script carbon_co2_cycle.ncl + + *Required Settings (scripts)* + + * nc_infile: path of netCDF file containing beta (output from carbon_beta.ncl) + * styleset: project style for lines, colors and symbols + + *Optional Settings (scripts)* + + * bc_xmax_year: end year (default = last year of all model datasets available) + * bc_xmin_year: start year (default = first year of all model datasets available) + + *Required settings (variables)* + + * reference_dataset: name of reference dataset (observations) + + *Optional settings (variables)* + + none + + +Variables +--------- + +* co2s (atmos, monthly mean, plev longitude latitude time) +* gpp (land, monthly mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +* ESRL: Earth System Research Laboratory, ground-based CO\ :sub:`2` measurements + + +References +---------- + +* Wenzel, S., Cox, P., Eyring, V. et al., 2016, Projected land photosynthesis constrained by changes in the seasonal cycle of atmospheric CO\ :sub:`2`. Nature 538, 499501, doi: doi.org/10.1038/nature19772 + + +Example plots +------------- + +.. figure:: /recipes/figures/wenzel16nat/fig_1.png + :width: 12 cm + :align: center + + Comparison of CO\ :sub:`2` seasonal amplitudes for CMIP5 historical simulations and observations showing annual mean atmospheric CO\ :sub:`2` versus the amplitudes of the CO\ :sub:`2` seasonal cycle at Pt. Barrow, Alaska (produced with carbon_co2_cycle.ncl, similar to Fig. 1a from Wenzel et al. (2016)). + +.. figure:: /recipes/figures/wenzel16nat/fig_2.png + :width: 12 cm + :align: center + + Barchart showing the gradient of the linear correlations for the comparison of CO\ :sub:`2` seasonal amplitudes for CMIP5 historical for at Pt. Barrow, Alaska (produced with carbon_co2_cycle.ncl, similar to Fig. 1b from Wenzel et al. (2016)). + +.. figure:: /recipes/figures/wenzel16nat/fig_3.png + :width: 12 cm + :align: center + + Emergent constraint on the relative increase of large-scale GPP for a doubling of CO\ :sub:`2`, showing the correlations between the sensitivity of the CO\ :sub:`2` amplitude to annual mean CO\ :sub:`2` increases at Pt. Barrow (x-axis) and the high-latitude (60N - 90N) CO\ :sub:`2` fertilization on GPP at 2xCO\ :sub:`2`. The red line shows the linear best fit of the regression together with the prediction error (orange shading), the gray shading shows the observed range (produced with carbon_co2_cycle.ncl, similar to Fig. 3a from Wenzel et al. (2016)). diff --git a/doc/sphinx/source/recipes/recipe_zmnam.rst b/doc/sphinx/source/recipes/recipe_zmnam.rst index c78dcd5b30..6f8ee26a1b 100644 --- a/doc/sphinx/source/recipes/recipe_zmnam.rst +++ b/doc/sphinx/source/recipes/recipe_zmnam.rst @@ -1,76 +1,78 @@ +.. _recipes_zmnam: + Stratosphere-troposphere coupling and annular modes indices (ZMNAM) =================================================================== - + Overview -------- - -The current generation of climate models include the representation of stratospheric processes, as the vertical coupling with the troposphere is important for the weather and climate at the surface (e.g., `Baldwin and Dunkerton, 2001 `_). -The recipe recipe_zmnam.yml can be used to evaluate the representation of the Northern Annular Mode (NAM, e.g., `Wallace, 2000 `_) in climate simulations, using reanalysis datasets as reference. +The current generation of climate models include the representation of stratospheric processes, as the vertical coupling with the troposphere is important for the weather and climate at the surface (e.g., `Baldwin and Dunkerton, 2001 `_). + +The recipe recipe_zmnam.yml can be used to evaluate the representation of Annular Modes (AM, e.g., `Wallace, 2000 `_) in climate simulations, using reanalysis datasets as reference. -The calculation is based on the “zonal mean algorithm” of `Baldwin and Thompson (2009) `_, and is alternative to pressure based or height-dependent methods. +The calculation is based on the “zonal mean algorithm” of `Baldwin and Thompson (2009) `_, and is alternative to pressure based or height-dependent methods. This approach provides a robust description of the stratosphere-troposphere coupling on daily timescales, requiring less subjective choices and a reduced amount of input data. -Starting from daily mean geopotential height on pressure levels, the leading empirical orthogonal function/principal component are computed from zonal mean daily anomalies, with the leading principal component representing the zonal mean NAM index. The regression of the monthly mean geopotential height onto this monthly averaged index represents the NAM pattern for each selected pressure level. +Starting from daily mean geopotential height on pressure levels, the leading empirical orthogonal function/principal component are computed from zonal mean daily anomalies, with the leading principal component representing the zonal mean AM index. The regression of the monthly mean geopotential height onto this monthly averaged index represents the AM pattern for each selected pressure level. + +The outputs of the procedure are the monthly time series and the histogram of the daily zonal mean AM index, and the monthly regression maps for selected pressure levels. The users can select the specific datasets (climate model simulation and/or reanalysis) to be evaluated, the Northern or Southern hemisphere (NH or SH) and a subset of pressure levels of interest. -The outputs of the procedure are the monthly time series and the histogram of the daily zonal-mean NAM index, and the monthly regression maps for selected pressure levels. The users can select the specific datasets (climate model simulation and/or reanalysis) to be evaluated, and a subset of pressure levels of interest. - Available recipes and diagnostics --------------------------------- - + Recipes are stored in recipes/ - + * recipe_zmnam.yml - + Diagnostics are stored in diag_scripts/zmnam/ - + * zmnam.py and subroutines - + * zmnam_calc.py * zmnam_plot.py * zmnam_preproc.py - + User settings ------------- - -None. - - + +Hemisphere of interest (NH or SH) + + Variables --------- - + * zg (atmos, daily mean, longitude latitude time) - - + + Observations and reformat scripts --------------------------------- - + None. - + References ---------- - -* Baldwin, M. P. and Thompson, D. W. (2009), A critical comparison of stratosphere–troposphere coupling indices. Q.J.R. Meteorol. Soc., 135: 1661-1672. doi:10.1002/qj.479. `link `_ -* Baldwin, M. P and Dunkerton, T. J. (2001), Stratospheric Harbingers of Anomalous Weather Regimes. Science 294 (5542): 581-584. doi:10.1126/science.1063315. `link `_ -* Wallace, J. M. (2000), North Atlantic Oscillation/annular mode: Two paradigms-one phenomenon. Q.J.R. Meteorol. Soc., 126 (564): 791-805. doi:10.1002/qj.49712656402. `link `_ - +* Baldwin, M. P. and Thompson, D. W. (2009), A critical comparison of stratosphere–troposphere coupling indices. Q.J.R. Meteorol. Soc., 135: 1661-1672. `doi:10.1002/qj.479 `_. +* Baldwin, M. P and Dunkerton, T. J. (2001), Stratospheric Harbingers of Anomalous Weather Regimes. Science 294 (5542): 581-584. `doi:10.1126/science.1063315 `_. +* Wallace, J. M. (2000), North Atlantic Oscillation/annular mode: Two paradigms-one phenomenon. Q.J.R. Meteorol. Soc., 126 (564): 791-805. `doi:10.1002/qj.49712656402 `_. + + Example plots ------------- - + .. figure:: /recipes/figures/zmnam/zmnam_reg.png :width: 10cm - - Example output: time series of the zonal-mean NAM index. + + Regression map of the Northern Hemisphere zonal mean AM index onto geopotential height, for a selected pressure level (250 hPa) for the MPI-ESM-MR model (CMIP5 AMIP experiment, period 1979-2008). Negative values are shaded in grey. .. figure:: /recipes/figures/zmnam/zmnam_ts.png :width: 10cm - - Example output: regression map for a selected pressure level. + + Time series of the Northern Hemisphere zonal mean AM index for a selected pressure level (250 hPa) for the MPI-ESM-MR model (CMIP5 AMIP experiment, period 1979-2008). diff --git a/doc/sphinx/source/recipes/recipes_testing.rst b/doc/sphinx/source/recipes/recipes_testing.rst new file mode 100644 index 0000000000..ef91086884 --- /dev/null +++ b/doc/sphinx/source/recipes/recipes_testing.rst @@ -0,0 +1,44 @@ +.. _recipes_testing: + +Short test versions of scientific recipes to check for backward compatibility. +========================================================================================================================== + +Overview +-------- + + +These recipes are created to cover typical functionalities in the ESMValTool and allow to test them quickly. +Each recipe should run less than 5 minutes to facilitate fast tests. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/testing/ + + * recipe_deangelis15nat_fig1_fast.yml + +Diagnostics are stored in diag_scripts/ + + * deangelis15nat/deangelisf1b.py + +Other recipes used for the continuous integration (CI) tests; these +are run only by the automated tests: + + * recipe_python_for_CI.yml + +This is identical to the example Python recipe, with the only difference being +the location extraction preprocessor, which is replaced here with ``extract_point``. +The reason for this is that this recipe is used solely for CI tests, +and the geolocator/Nominatim engine for location extraction should not be used +in CI runs as per their usage policy. + +User settings in recipes +------------------------ + +The recipe recipe_deangelis15nat_fig1_fast.yml calls the first diagnostic (deangelisf1b.py) from the original recipe recipe_deangelis15nat.yml. +It can be run with CMIP5 and CMIP6 models for any duration. +Several flux variables (W m\ :sup:`-2`\) and up to 6 different model experiments can be handled. +Each variable needs to be given for each model experiment. The same experiments must +be given for all models. For testing purpose it was reduce to two models, 3 experiments and one year. +For a more detailed documentation see :ref:`recipes_deangelis15nat` diff --git a/doc/sphinx/source/recipes/template.rst b/doc/sphinx/source/recipes/template.rst deleted file mode 100644 index 0fe7cfc451..0000000000 --- a/doc/sphinx/source/recipes/template.rst +++ /dev/null @@ -1,71 +0,0 @@ -Title -===== - -Overview --------- - -Brief description of the diagnostic. - - -Available recipes and diagnostics ---------------------------------- - -Recipes are stored in recipes/ - - * recipe_.yml - -Diagnostics are stored in diag_scripts// - - * : one line scription - - -User settings in recipe ------------------------ - -#. Script - - *Required settings (scripts)* - - * xxx: zzz - - *Optional settings (scripts)* - - *Required settings (variables)* - - *Optional settings (variables)* - - *Color tables* - - * list required color tables (if any) here - - -Variables ---------- - -* var1 (realm, frequency, dimensions), e.g. pr (atmos, monthly mean, longitude latitude time) - - -Observations and reformat scripts ---------------------------------- - -*Note: (1) obs4mips data can be used directly without any preprocessing; -(2) see headers of reformat scripts for non-obs4mips data for download -instructions.* - -* xxx - - *Reformat script:* - -References ----------- - -* xxx - -Example plots -------------- - -.. _fig_mynewdiag_1: -.. figure:: /recipes/figures//awesome1.png - :align: center - - Add figure caption here. diff --git a/doc/sphinx/source/requirements.txt b/doc/sphinx/source/requirements.txt deleted file mode 100644 index 23ebc0ead2..0000000000 --- a/doc/sphinx/source/requirements.txt +++ /dev/null @@ -1,18 +0,0 @@ -cdo -cython -matplotlib<3 -nc-time-axis -netCDF4 -numba -numpy -pillow -prov[dot] -psutil -pyyaml -shapely -six -xarray -yamale -sklearn -pandas -eofs diff --git a/doc/sphinx/source/user_guide2/config.inc b/doc/sphinx/source/user_guide2/config.inc deleted file mode 100644 index e798a64a19..0000000000 --- a/doc/sphinx/source/user_guide2/config.inc +++ /dev/null @@ -1,102 +0,0 @@ -.. _config: - -******************* -Configuration files -******************* - -There are several configuration files in ESMValTool: - - - config-user.yml - - config-developer.yml - - config-references.yml - - environment.yml - -The ``config-user.yml`` file is described here, however, the -other three files are beyond the scope of the user guide. They are described -in the developer guide. - -User config file -================ - -The ``config-user.yml`` configuration file contains all the global level -information needed by ESMValTool. The following shows the default settings from -the ``config-user.yml`` file. - -.. code-block:: yml - - # Diagnostics create plots? [true]/false - write_plots: true - # Diagnositcs write NetCDF files? [true]/false - write_netcdf: true - # Set the console log level debug, [info], warning, error - log_level: info - # verbosity is deprecated and will be removed in the future - # verbosity: 1 - # Exit on warning? true/[false] - exit_on_warning: false - # Plot file format? [ps]/pdf/png/eps/epsi - output_file_type: pdf - # Destination directory - output_dir: ./esmvaltool_output - # Auxiliary data directory (used for some additional datasets) - auxiliary_data_dir: ./auxiliary_data - # Use netCDF compression true/[false] - compress_netcdf: false - # Save intermediary cubes in the preprocessor true/[false] - save_intermediary_cubes: false - # Remove the preproc dir if all fine - remove_preproc_dir: true - # Run at most this many tasks in parallel null/[1]/2/3/4/.. - # Set to null to use the number of available CPUs. - # Make sure your system has enough memory for the specified number of tasks. - max_parallel_tasks: 1 - # Path to custom config-developer file, to customise project configurations. - # See config-developer.yml for an example. Set to None to use the default - config_developer_file: null - # Get profiling information for diagnostics - # Only available for Python diagnostics - profile_diagnostic: false - - # Rootpaths to the data from different projects (lists are also possible) - rootpath: - CMIP5: [~/cmip5_inputpath1, ~/cmip5_inputpath2] - OBS: ~/obs_inputpath - default: ~/default_inputpath - - # Directory structure for input data: [default]/BADC/DKRZ/ETHZ/etc - # See config-developer.yml for definitions. - drs: - CMIP5: default - -Most of these settings are fairly self-explanatory, ie: - -.. code-block:: yml - - # Diagnostics create plots? [true]/false - write_plots: true - # Diagnositcs write NetCDF files? [true]/false - write_netcdf: true - -The ``write_plots`` setting is used to inform ESMValTool about your preference -for saving figures. Similarly, the ``write_netcdf`` setting is a boolean which -turns on or off the writing of netCDF files. - -.. code-block:: yml - - # Auxiliary data directory (used for some additional datasets) - auxiliary_data_dir: ./auxiliary_data - -The ``auxiliary_data_dir`` setting is the path to place any required -additional auxiliary data files. This method was necessary because certain -Python toolkits such as cartopy will attempt to download data files at run -time, typically geographic data files such as coastlines or land surface maps. -This can fail if the machine does not have access to the wider internet. This -location allows us to tell cartopy (and other similar tools) where to find the -files if they can not be downloaded at runtime. To reiterate, this setting is -not for model or observational datasets, rather it is for data files used in -plotting such as coastline descriptions and so on. - - -Tip: You choose your config.yml file at run time, so you could have several -available with different purposes. One for formalised run, one for debugging, -etc... diff --git a/doc/sphinx/source/user_guide2/dirstruct.inc b/doc/sphinx/source/user_guide2/dirstruct.inc deleted file mode 100644 index 767788c6d6..0000000000 --- a/doc/sphinx/source/user_guide2/dirstruct.inc +++ /dev/null @@ -1,5 +0,0 @@ -:: _dirstruct: - -************************************* -Directory structure of the ESMValTool -************************************* diff --git a/doc/sphinx/source/user_guide2/first_diagnostic.inc b/doc/sphinx/source/user_guide2/first_diagnostic.inc deleted file mode 100644 index aeccebdaa0..0000000000 --- a/doc/sphinx/source/user_guide2/first_diagnostic.inc +++ /dev/null @@ -1,96 +0,0 @@ -******************************************** -How to prepare and run your first diagnostic -********************************************* - -Instructiona for personal diagnostic -==================================== - -Anyone can run a personal diagnostic, no matter where the location of it; -there is no need to install esmvaltool in developer mode nor is it to -git push or for that matter, do any git operations; the example recipe - -.. code-block:: bash - esmvaltool/recipes/recipe_my_personal_diagnostic.yml - -shows the use of running a personal diagnostic; the example - -.. code-block:: bash - esmvaltool/diag_scripts/examples/my_little_diagnostic.py - -and any of its alterations may be used as training wheels for the future ESMValTool -diagnostic developer. The purpose of this example is to familiarize the user with -the framework of ESMValTool without the constraints of installing and running the -tool as developer. - -Functionality -============= - -`my_little_diagnostic` (or whatever the user will call their diagnostic) makes full use -of ESMValTool's preprocessor output (both phyisical files and run variables); this output -comes in form of a nested dictionary, or config dictionary, see an example below; -it also makes full use of the ability to call any of the preprocessor's functions, -note that relative imports of modules from the esmvaltool package are allowed and -work without altering the $PYTHONPATH. - -The user may parse this dictionary so that they execute a number of operations on the -preprocessed data; for example the `my_little_diagnostic.plot_time_series` grabs the -preprocessed data output, computes global area averages for each model, then plots -a time-series for each model. Different manipulation functionalities for grouping, -sorting etc of the data in the config dictionary are available, -please consult ESMValTool User Manual. - - -Writing a basic recipe -====================== -The user will need to write a basic recipe to be able to run their own personal diagnostic. -An example of such a recipe is found in `esmvaltool/recipes/recipe_my_personal_diagnostic.yml`. -For general guidelines with regards to ESMValTool recipes please consult the User Guide; -the specific parameters needed by a recipe that runs a personal diagnostic are: - -.. code-block:: bash - scripts: - my_diagnostic: - script: /path/to/your/my_little_diagnostic.py - -i.e. the full path to the personal diagnostic that the user needs to run. - -Example of config dictionary -============================ -``` -{'input_files': -['/group_workspaces/jasmin2/cmip6_prep/esmvaltool_users/valeriu/MyDIAG/recipe_my_personal_diagnostic_20181001_112918/preproc/simple_pp_ta/metadata.yml'], -'log_level': 'info', -'max_data_filesize': 100, -'myDiag': 'my_personal_diagnostic_example', -'myDiagPlace': '/group_workspaces/jasmin2/cmip6_prep/esmvaltool_users/valeriu', -'output_file_type': 'pdf', -'plot_dir': '/group_workspaces/jasmin2/cmip6_prep/esmvaltool_users/valeriu/MyDIAG/recipe_my_personal_diagnostic_20181001_112918/plots/simple/my_diagnostic', 'recipe': 'recipe_my_personal_diagnostic.yml', -'run_dir': '/group_workspaces/jasmin2/cmip6_prep/esmvaltool_users/valeriu/MyDIAG/recipe_my_personal_diagnostic_20181001_112918/run/simple/my_diagnostic', -'script': 'my_diagnostic', -'title': 'My First Diagnostic', -'version': '2.0a1', -'work_dir': '/group_workspaces/jasmin2/cmip6_prep/esmvaltool_users/valeriu/MyDIAG/recipe_my_personal_diagnostic_20181001_112918/work/simple/my_diagnostic', -'write_netcdf': True, -'write_plots': True, -'input_data': {'/group_workspaces/jasmin2/cmip6_prep/esmvaltool_users/valeriu/MyDIAG/recipe_my_personal_diagnostic_20181001_112918/preproc/simple_pp_ta/CMIP5_MPI-ESM-LR_Amon_historical_r1i1p1_T3M_ta_2000-2002.nc': - {'cmor_table': 'CMIP5', - 'dataset': 'MPI-ESM-LR', - 'diagnostic': 'simple', - 'end_year': 2002, - 'ensemble': 'r1i1p1', - 'exp': 'historical', - 'field': 'T3M', - 'filename': '/group_workspaces/jasmin2/cmip6_prep/esmvaltool_users/valeriu/MyDIAG/recipe_my_personal_diagnostic_20181001_112918/preproc/simple_pp_ta/CMIP5_MPI-ESM-LR_Amon_historical_r1i1p1_T3M_ta_2000-2002.nc', - 'fx_files': {'areacello': '/badc/cmip5/data/cmip5/output1/MPI-M/MPI-ESM-LR/historical/fx/ocean/fx/r0i0p0/latest/areacello/areacello_fx_MPI-ESM-LR_historical_r0i0p0.nc', 'sftlf': '/badc/cmip5/data/cmip5/output1/MPI-M/MPI-ESM-LR/historical/fx/atmos/fx/r0i0p0/latest/sftlf/sftlf_fx_MPI-ESM-LR_historical_r0i0p0.nc', 'sftof': '/badc/cmip5/data/cmip5/output1/MPI-M/MPI-ESM-LR/historical/fx/ocean/fx/r0i0p0/latest/sftof/sftof_fx_MPI-ESM-LR_historical_r0i0p0.nc'}, - 'long_name': 'Air Temperature', - 'mip': 'Amon', - 'preprocessor': 'pp', - 'project': 'CMIP5', - 'short_name': 'ta', - 'standard_name': 'air_temperature', - 'start_year': 2000, - 'units': 'K' - } -- end of input_data member value (key: preprocessed file) - } -- end of input_data dictionary -} -- end of config dictionary -``` diff --git a/doc/sphinx/source/user_guide2/index.rst b/doc/sphinx/source/user_guide2/index.rst deleted file mode 100644 index 5728044b44..0000000000 --- a/doc/sphinx/source/user_guide2/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -####################### -User's guide -####################### - -.. include:: intro.inc -.. include:: install.inc -.. include:: config.inc -.. include:: inputdata.inc -.. include:: outputdata.inc -.. include:: observations.inc -.. include:: recipe.inc -.. include:: preprocessor.inc -.. include:: dirstruct.inc -.. include:: running.inc diff --git a/doc/sphinx/source/user_guide2/inputdata.inc b/doc/sphinx/source/user_guide2/inputdata.inc deleted file mode 100644 index b12d1e54ea..0000000000 --- a/doc/sphinx/source/user_guide2/inputdata.inc +++ /dev/null @@ -1,7 +0,0 @@ -:: _inputdata: - -********************** -ESMValTool data finder -********************** - -Documentation of the _data_finder.py module (incl. _download.py?) \ No newline at end of file diff --git a/doc/sphinx/source/user_guide2/install.inc b/doc/sphinx/source/user_guide2/install.inc deleted file mode 100644 index b25d08cd39..0000000000 --- a/doc/sphinx/source/user_guide2/install.inc +++ /dev/null @@ -1,181 +0,0 @@ -.. _installation_guide: - -********************* -Installing ESMValTool -********************* - -ESMValTool 2.0 requires a Unix(-like) operating system and Python 3.6+. - -The ESMValTool supports three different installation methods: - -* Installation through Conda package manager (see https://www.continuum.io); - -* Deployment through a Docker container (see https://www.docker.com); - -* From the source code available at https://github.com/ESMValGroup/ESMValTool. - -The next sections will detail the procedure to install ESMValTool for each of -this methods. - - -Conda installation -================== - -A conda package will be available after the release of ESMValTool 2. - - -Docker installation -=================== - -.. warning:: - Docker section to be added - - -Install from source -=================== - - - -Obtaining the source code -------------------------- - -The ESMValTool source code is available on a public GitHub repository: -https://github.com/ESMValGroup/ESMValTool - -The easiest way to obtain it is to clone the repository using git -(see https://git-scm.com/). To clone the public repository: - -.. code-block:: bash - - git clone https://github.com/ESMValGroup/ESMValTool.git - -It is also possible to work in one of the ESMValTool private repository, e.g.: - -.. code-block:: bash - - git clone https://github.com/ESMValGroup/ESMValTool-private.git - -By default, this command will create a folder called ESMValTool containing the -source code of the tool. - -.. attention:: - The newly created clone of the git repository will point to the default - branch. To change to another branch or release execute: - git checkout origin/$BRANCH_OR_RELEASE_NAME, i.e git checkout origin/2.0.0. - -GitHub also allows to download the source code in as a tar.gz or zip file. If -you choose to use this option, download the compressed file and extract its -contents at the desired location. - - -Prerequisites -------------- - -It is strongly recommended to use conda to manage ESMValTool dependencies. -For a minimal conda installation go to https://conda.io/miniconda.html. To -simplify the process, an environment definition file is provided within the -repository (``environment.yml`` in the root folder). - -.. attention:: - Some systems provides a preinstalled version of conda (e.g., via the module environment). - Several users however reported problems when installing NCL with such versions. It is - therefore preferable to use a local, fully user-controlled conda installation. - Using an older version of conda can also be a source of problems, so if you have conda - installed already, make sure it is up to date by running ``conda update -n base conda``. - -To enable the ``conda`` command, please source the appropriate configuration file -from your ``~/.bashrc`` file: - -.. code-block:: bash - - source /etc/profile.d/conda.sh - -or ``~/.cshrc``/``~/.tcshrc`` file: - -.. code-block:: bash - - source /etc/profile.d/conda.csh - -The ESMValTool conda environment file can also be used as a requirements list -for those cases in which a conda installation is not possible or advisable. -From now on, we will assume that the installation is going to be done through -conda. - -Ideally, you should create a conda environment for ESMValTool, so it is -independent from any other Python tools present in the system. - -Note that it is advisable to update conda to the latest version before -installing ESMValTool. Using the following command: - -.. code-block::bash - - conda update -n base conda - -To create an environment - -.. code-block:: bash - - conda create --name esmvaltool python=3 - conda env update --name esmvaltool --file ESMValTool/environment.yml - -The environment is called ``esmvaltool`` by default, but it is possible to use -the option -n $(ENVIRONMENT_NAME) to define a custom name. You can activate -the environment using the command: - -.. code-block:: bash - - conda activate esmvaltool - -It is also possible to update an existing environment from the environment -file. This can be very useful when updating an older installation of ESMValTool: - -.. code-block:: bash - - conda env update --file environment.yml --name $(ENVIRONMENT_TO_UPDATE) - -.. attention:: - From now on, we assume that the conda environment for ESMValTool is - activated. - -Software installation ---------------------- - -Once all prerequisites are fulfilled, ESMValTool 2.0 can be installed using -the following command: - -.. code-block:: bash - - cd ESMValTool - python setup.py install - -If you would like to run Julia diagnostic scripts, you will also need to -`install Julia `_ and the Julia dependencies: - -.. code-block:: bash - - julia ESMValTool/esmvaltool/install/Julia/setup.jl - -If you would like to run R diagnostic scripts, you will also need to install the R -dependencies. To do that you will need to install gcc, if gcc is not available on your system or it is too old. On Debian based systems, this can be done by running `apt install build-essential gcc`, on managed systems you can often use the `module avail` command to see what compilers are available (note on gcc version: gcc 7.3.0 works well; gcc 8.2.0 is reported to have issues installing the R packages; example of loading gcc7 on the CEDA Jasmin cluster: `module load contrib/gnu/gcc/7.3.0`, similar module loading functionality should be present at your local cluster as well). Install the R dependency packages: - -.. code-block:: bash - - Rscript ESMValTool/esmvaltool/install/R/setup.R - -The next step is to check that the installation works properly. -To do this, run the tool with --help: - -.. code-block:: bash - - esmvaltool --help - -If everything was installed properly, ESMValTool should have printed the -version number at the console and exited. - -For a more complete installation verification, run the automated tests and -confirm that no errors are reported: - -.. code-block:: bash - - python setup.py test --installation - diff --git a/doc/sphinx/source/user_guide2/intro.inc b/doc/sphinx/source/user_guide2/intro.inc deleted file mode 100644 index f099dd3214..0000000000 --- a/doc/sphinx/source/user_guide2/intro.inc +++ /dev/null @@ -1,45 +0,0 @@ -************ -Introduction -************ - -The Earth System Model Evaluation Tool (ESMValTool) is a community-development that aims at improving diagnosing and understanding of the causes and effects of model biases and inter-model spread. The ESMValTool is open to both users and developers encouraging open exchange of diagnostic source code and evaluation results from the Coupled Model Intercomparison Project (CMIP) ensemble. This will facilitate and improve ESM evaluation beyond the state-of-the-art and aims at supporting the activities within CMIP and at individual modelling centers. We envisage running the ESMValTool routinely on the CMIP model output utilizing observations available through the Earth System Grid Federation (ESGF) in standard formats (obs4MIPs) or made available at ESGF nodes. - -The goal is to develop a benchmarking and evaluation tool that produces well-established analyses as soon as model output from CMIP simulations becomes available, e.g., at one of the central repositories of the ESGF. This is realized through standard recipes that reproduce a certain set of diagnostics and performance metrics that have demonstrated its importance in benchmarking Earth System Models (ESMs) in a paper or assessment report, such as Chapter 9 of the Intergovernmental Panel on Climate Change (IPCC) Fifth Assessment Report (AR5) (Flato et al., 2013). The expectation is that in this way a routine and systematic evaluation of model results can be made more efficient, thereby enabling scientists to focus on developing more innovative methods of analysis rather than constantly having to "reinvent the wheel". - -In parallel to standardization of model output, the ESGF also hosts observations for Model Intercomparison Projects (obs4MIPs) and reanalyses data (ana4MIPs). obs4MIPs provides open access data sets of satellite data that are comparable in terms of variables, temporal and spatial frequency, and periods to CMIP model output (Taylor et al., 2012). The ESMValTool utilizes these observations and reanalyses from ana4MIPs plus additionally available observations in order to evaluate the models performance. In many diagnostics and metrics, more than one observational data set or meteorological reanalysis is used to assess uncertainties in observations. - -Objectives and approach -======================= - -The main idea of the ESMValTool is to provide a broad suite of diagnostics which can be performed easily when new model simulations are run. The suite of diagnostics needs to be broad enough to reflect the diversity and complexity of Earth System Models, but must also be robust enough to be run routinely or semi-operationally. -In order the address these challenging objectives the ESMValTool is conceived as a framework which allows community contributions to be bound into a coherent framework. - -License -======= - -The ESMValTool is released under the Apache License, version 2.0 and citation -of the ESMValTool paper ("Software Documentation Paper") is kindly requested -upon use alongside with the software doi (to be added for v2) -and version number: - -* Righi et al. to be added - -Besides the above citation, users are kindly asked to register any journal -articles (or other scientific documents) that use the software at the -ESMValTool webpage (http://www.esmvaltool.org/). Citing the Software -Documentation Paper and registering your paper(s) will serve to document the -scientific impact of the Software, which is of vital importance for securing -future funding. You should consider this an obligation if you have taken -advantage of the ESMValTool, which represents the end product of considerable -effort by the development team. - -Architecture -============ - -:ref:`fig_schematic` shows a schematic of the ESMValTool architecture: to be written. - -.. _fig_schematic: -.. figure:: ../figures/schematic.png - :align: center - - Schematic of the system architecture. diff --git a/doc/sphinx/source/user_guide2/observations.inc b/doc/sphinx/source/user_guide2/observations.inc deleted file mode 100644 index 227f9e1316..0000000000 --- a/doc/sphinx/source/user_guide2/observations.inc +++ /dev/null @@ -1,76 +0,0 @@ -:: _observations: - -********************************************** -Obtaining and cmorizing observational datasets -********************************************** - -Observational and reanalysis products in the standard CF/CMOR format used in CMIP and required by the ESMValTool are available via the obs4mips (https://esgf-node.llnl.gov/projects/obs4mips/) and ana4mips (https://esgf.nccs.nasa.gov/projects/ana4mips/) proejcts, respectively. Their use is strongly recommended, when possible. - -Other datasets not available in these archives can be obtained by the user from the respective sources and reformatted to the CF/CMOR standard using the cmorizers included in the ESMValTool. The cmorizers are dataset-specific scripts that can be run once to generate a local pool of observational datasets for usage with the ESMValTool. The necessary information to download and process the data is provided in the header of each cmorizing script. These scripts also serve as template to create new cmorizers for datasets not yet included. Note that dataset cmorized for ESMValTool v1 may not be working with v2, due to the much stronger constraints on metadata set by the Iris library. - -To cmorize one or more datasets, run: - -.. code-block:: bash - - cmorize_obs -c [CONFIG_FILE] -o [DATASET_LIST] - -The path to the raw data to be cmorized must be specified in the CONFIG_FILE as RAWOBS. Within this path, the data are expected to be organized in subdirectories corresponding to the data tier: Tier2 for freely-available datasets (other than obs4mips and ana4mips) and Tier3 for restricted datasets (i.e., dataset which requires a registration to be retrieved or provided upon request to the respective contact or PI). The cmorization follows the CMIP5 CMOR tables. The resulting output is saved in the output_dir, again following the Tier structure. The output file names follow the definition given in ``config-developer.yml`` for the ``OBS`` project: ``OBS_[dataset]_[type]_[version]_[mip]_[short_name]_YYYYMM_YYYYMM.nc``, where ``type`` may be ``sat`` (satellite data), ``reanaly`` (reanalysis data), ``ground`` (ground observations), ``clim`` (derived climatologies), ``campaign`` (aircraft campaign). - - -At the moment, cmorize_obs supports Python and NCL scripts. - -A list of the datasets for which a cmorizers is available is provided in the following table. - -.. tabularcolumns:: |p{3cm}|p{6cm}|p{3cm}|p{3cm}| - -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| Dataset | Variables (MIP) | Tier | Script language | -+=======================+======================================================================================================+======+=================+ -| AURA-TES | tro3 (Amon) | 3 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| CDS-XCH4 | xch4 (Amon) | 3 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| CDS-XCO2 | xco2 (Amon) | 3 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| CERES-SYN1deg | rlds, rldscs, rlus, rluscs, rlut, rlutcs, rsds, rsdscs, rsus, rsuscs, rsut, rsutcs (3hr) | 3 | NCL | -| | rlds, rldscs, rlus, rlut, rlutcs, rsds, rsdt, rsus, rsut, rsutcs (Amon) | | | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| ERA-Interim | clivi, clt, clwvi, hfds, hur, hus, pr, prw, ps, psl, ta, tas, tauu, tauv, ts, ua, va, wap, zg (Amon) | 3 | NCL | -| | pr, psl, tas, tasmin, tasmax, zg (day), sftlf (fx), tos (Omon) | | | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| ESACCI-AEROSOL | abs550aer, od550aer, od550aerStderr, od550lt1aer, od870aer, od870aerStderr (aero) | 2 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| ESACCI-CLOUD | clivi, clt, cltStderr, clwvi (Amon) | 2 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| ESACCI-FIRE | burntArea (Lmon) | 2 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| ESACCI-LANDCOVER | baresoilFrac, cropFrac, grassFrac, shrubFrac, treeFrac (Lmon) | 2 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| ESACCI-OZONE | toz, tozStderr, tro3prof, tro3profStderr (Amon) | 2 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| ESACCI-SOILMOISTURE | dos, dosStderr, sm, smStderr (Lmon) | 2 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| ESACCI-SST | ts, tsStderr (Amon) | 2 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| GHCN | pr (Amon) | 2 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| HadCRUT3 | tas, tasa (Amon) | 2 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| HadCRUT4 | tas, tasa (Amon) | 2 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| HadISST | sic (OImon), tos (Omon), ts (Amon) | 2 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| MODIS | cliwi, clt, clwvi, iwpStderr, lwpStderr (Amon), od550aer (aero) | 3 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| NCEP | hur, hus, pr, ta, tas, ua, va, wap, zg (Amon) | 2 | NCL | -| | pr, rlut, ua, va (day) | | | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| NIWA-BS | toz, tozStderr (Amon) | 3 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| PATMOS-x | clt (Amon) | 2 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| UWisc | clwvi, lwpStderr (Amon) | 3 | NCL | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| WOA | no3, o2, po4, si (Oyr), so, thetao (Omon) | 2 | Python | -+-----------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ - diff --git a/doc/sphinx/source/user_guide2/outputdata.inc b/doc/sphinx/source/user_guide2/outputdata.inc deleted file mode 100644 index 4acd1eb531..0000000000 --- a/doc/sphinx/source/user_guide2/outputdata.inc +++ /dev/null @@ -1,22 +0,0 @@ -:: _outputdata: - -***************** -ESMValTool output -***************** - -Preprocessed datasets -===================== -preproc/ - -Diagnostic output -================= -work/ - -Plots -===== -plots/ - -Log files -========= -run/ - diff --git a/doc/sphinx/source/user_guide2/preprocessor.inc b/doc/sphinx/source/user_guide2/preprocessor.inc deleted file mode 100644 index 12965d47df..0000000000 --- a/doc/sphinx/source/user_guide2/preprocessor.inc +++ /dev/null @@ -1,400 +0,0 @@ -:: _preprocessor: - -*********************** -ESMValTool Preprocessor -*********************** -The ESMValTool preprocessor can be used to perform all types of climate data pre-processing needed before indices or diagnostics can be calculated. It is a base component for many other diagnostics and metrics shown on this portal. It can be applied to tailor the climate model data to the need of the user for its own calculations. - -Features of the ESMValTool Climate data pre-processor are: - -* Regridding -* Geographical area selection -* Aggregation of data -* Provenance tracking of the calculations -* Model statistics -* Multi-model mean -* and many more - -Variable derivation -=================== -Documentation of _derive.py - - -Time manipulation -================= -The _time.py module contains the following preprocessor functions: - -* extract_time: Extract a time range from a cube. -* extract_season: Extract only the times that occur within a specific season. -* extract_month: Extract only the times that occur within a specific month. -* time_average: Take the weighted average over the time dimension. -* seasonal_mean: Produces a mean for each season (DJF, MAM, JJA, SON) -* annual_mean: Produces an annual or decadal mean. -* regrid_time: Aligns the time axis of each dataset to have common time points and calendars. - -1. extract_time ---------------- - -This function subsets a dataset between two points in times. It removes all -times in the dataset before the first time and after the last time point. -The required arguments are relatively self explanatory: - -* start_year -* start_month -* start_day -* end_year -* end_month -* end_day - -These start and end points are set using the datasets native calendar. -All six arguments should be given as integers - the named month string -will not be accepted. - -See also :func:`esmvaltool.preprocessor.extract_time`. - - -2. extract_season ------------------ - -Extract only the times that occur within a specific season. - -This function only has one argument: `season`. This is the named season to -extract. ie: DJF, MAM, JJA, SON. - -Note that this function does not change the time resolution. If your original -data is in monthly time resolution, then this function will return three -monthly datapoints per year. - -If you want the seasonal average, then this function needs to be combined with -the seasonal_mean function, below. - -See also :func:`esmvaltool.preprocessor.extract_season`. - - -3. extract_month ----------------- - -The function extracts the times that occur within a specific month. -This function only has one argument: `month`. This value should be an integer -between 1 and 12 as the named month string will not be accepted. - -See also :func:`esmvaltool.preprocessor.extract_month`. - - -4. time_average ---------------- - -This functions takes the weighted average over the time dimension. This -function requires no arguments and removes the time dimension of the cube. - -See also :func:`esmvaltool.preprocessor.time_average`. - - -5. seasonal_mean ----------------- - -This function produces a seasonal mean for each season (DJF, MAM, JJA, SON). -Note that this function will not check for missing time points. For instance, -if you are looking at the DJF field, but your datasets starts on January 1st, -the first DJF field will only contain data from January and February. - -We recommend using the extract_time to start the dataset from the following -December and remove such biased initial datapoints. - -See also :func:`esmvaltool.preprocessor.seasonal_mean`. - - -6. annual_mean --------------- - -This function produces an annual or a decadal mean. The only argument is the -decadal boolean switch. When this switch is set to true, this function -will output the decadal averages. - -See also :func:`esmvaltool.preprocessor.annual_mean`. - - -7. regrid_time --------------- - -This function aligns the time points of each component dataset so that the dataset -iris cubes can be subtracted. The operation makes the datasets time points common and -sets common calendars; it also resets the time bounds and auxiliary coordinates to -reflect the artifically shifted time points. Current implementation for monthly -and daily data; the frequency is set automatically from the variable CMOR table -unless a custom frequency is set manually by the user in recipe. - - -Area manipulation -================= -The _area.py module contains the following preprocessor functions: - -* extract_region: Extract a region from a cube based on lat/lon corners. -* zonal_means: Calculates the zonal or meridional means. -* average_region: Calculates the average value over a region. -* extract_named_regions: Extract a specific region from in the region cooordinate. - - -1. extract_region ------------------ - -This function masks data outside a rectagular region requested. The boundairies -of the region are provided as latitude and longitude coordinates in the -arguments: - -* start_longitude -* end_longitude -* start_latitude -* end_latitude - -Note that this function can only be used to extract a rectangular region. - -See also :func:`esmvaltool.preprocessor.extract_region`. - - -2. zonal_means --------------- - -The function calculates the zonal or meridional means. While this function is -named `zonal_mean`, it can be used to apply several different operations in -an zonal or meridional direction. -This function takes two arguments: - -* coordinate: Which direction to apply the operation: latitude or longitude -* mean_type: Which operation to apply: mean, std_dev, variance, median, min or max - -See also :func:`esmvaltool.preprocessor.zonal_means`. - - -3. average_region ------------------ - -This function calculates the average value over a region - weighted by the -cell areas of the region. - -This function takes three arguments: -coord1: the name of the coordinate in the first direction. -coord2: the name of the coordinate in the second dimension. -operator: the name of the operation to apply (default: mean). - -While this function is named `average_region`, it can be used to apply several -different operations in the horizonal plane: mean, standard deviation, median -variance, minimum and maximum. - -Note that this function is applied over the entire dataset. If only a specific -region, depth layer or time period is required, then those regions need to be -removed using other preprocessor operations in advance. - -See also :func:`esmvaltool.preprocessor.average_region`. - - -4. extract_named_regions ------------------------- - -This function extract a specific named region from the data. This function -takes the following argument: `regions` which is either a string or a list -of strings of named regions. Note that the dataset must have a `region` -cooordinate which includes a list of strings as values. This function then -matches the named regions against the requested string. - -See also :func:`esmvaltool.preprocessor.extract_named_regions`. - - -Volume manipulation -=================== -The _volume.py module contains the following preprocessor functions: - -* extract_volume: Extract a specific depth range from a cube. -* average_volume: Calculate the volume-weighted average. -* depth_integration: Integrate over the depth dimension. -* extract_transect: Extract data along a line of constant latitude or longitude. -* extract_trajectory: Extract data along a specified trajectory. - - -1. extract_volume ------------------ - -Extract a specific range in the z-direction from a cube. This function -takes two arguments, a minimum and a maximum (`z_min` and `z_max`, -respectively) in the z direction. - -Note that this requires the requested z-coordinate range to be the -same sign as the iris cube. ie, if the cube has z-coordinate as -negative, then z_min and z_max need to be negative numbers. - -See also :func:`esmvaltool.preprocessor.extract_volume`. - - -2. average_volume ------------------ - -This function calculates the volume-weighted average across three dimensions, -but maintains the time dimension. The following arguments are required: - -coord1: the name of the coordinate in the first direction. -coord2: the name of the coordinate in the second dimension. - -No depth coordinate is required as this is determined by iris. This -function works best when the fx_files provide the cell volume. - -See also :func:`esmvaltool.preprocessor.average_volume`. - - -3. depth_integration --------------------- - -This function integrate over the depth dimension. This function does a -weighted sum along the z-coordinate, and removes the z direction of the output -cube. This preprocessor takes no arguments. - -See also :func:`esmvaltool.preprocessor.depth_integration`. - - -4. extract_transect -------------------- - -This function extract data along a line of constant latitude or longitude. -This function takes two arguments, although only one is strictly required. -The two arguments are `latitude` and `longitude`. One of these arguments -needs to be set to a float, and the other can then be either ignored or set to -a minimum or maximum value. -Ie: If we set latitude to 0 N and leave longitude blank, it would produce a -cube along the equator. On the other hand, if we set latitude to 0 and then -set longitude to `[40., 100.]` this will produce a transect of the equator -in the indian ocean. - -See also :func:`esmvaltool.preprocessor.extract_transect`. - - -5. extract_trajectory ---------------------- - -This function extract data along a specified trajectory. -The three areguments are: latitudes and longitudes are the coordinates of the -trajectory. - -If two points are provided, the `number_points` argument is used to set a -the number of places to extract between the two end points. - -If more than two points are provided, then -extract_trajectory will produce a cube which has extrapolated the data -of the cube to those points, and `number_points` is not needed. - -Note that this function uses the expensive interpolate method, but it may be -necceasiry for irregular grids. - -See also :func:`esmvaltool.preprocessor.extract_trajectory`. - - -CMORization and dataset-specific fixes -====================================== -Documentation of _reformat.py, check.py and fix.py - -Vertical interpolation -====================== -Documentation of _regrid.py (part 1) - -Land/Sea/Ice Masking -==================== -Documentation of _mask.py (part 1) - -Certain metrics and diagnostics need to be computed and performed on restricted regions of the Globe; ESMValTool supports subsetting the input data on land mass, oceans and seas, ice. This is achived by masking the model data and keeping only the values associated with grid points that correspond to e.g. land mass -or oceans and seas; masking is done either by using standard mask files that have the same grid resolution as the model data (these files are usually produced -at the same time with the model data and are called fx files) or, in the absence of these files, by using Natural Earth masks. Natural Earth masks, even if they are not model-specific, represent a good approximation since their grid resolution is almost always much higher than the model data, and they are constantly updated with changing -geographical features. - -In ESMValTool v2 land-seas-ice masking can be done in two places: in the preprocessor, to apply a mask on the data before any subsequent preprocessing step, and before -running the diagnostic, or in the disgnostic phase. We present both these implementations below. - -To mask out seas in the preprocessor step, simply add `mask_landsea:` as a preprocessor step in the `preprocessor` of your choice section of the recipe, example: - -.. code-block:: bash - - preprocessors: - my_masking_preprocessor: - mask_landsea: - mask_out: sea - -The tool will retrieve the corresponding `fx: stfof` type of mask for each of the used variables and apply the mask so that only the land mass points are -kept in the data after applying the mask; conversely, it will retrieve the `fx: sftlf` files when land needs to be masked out. -`mask_out` accepts: land or sea as values. If the corresponding fx file is not found (some models are missing these -type of files; observational data is missing them altogether), then the tool attempts to mask using Natural Earth mask files (that are vectorized rasters). -Note that the resolutions for the Natural Earth masks are much higher than any usual CMIP model: 10m for land and 50m for ocean masks. - -Note that for masking out ice the preprocessor is using a different function, this so that both land and sea or ice can be masked out without -losing generality. To mask ice out one needs to add the preprocessing step much as above: - -.. code-block:: bash - - preprocessors: - my_masking_preprocessor: - mask_landseaice: - mask_out: ice - -To keep only the ice, one needs to mask out landsea, so use that as value for mask_out. As in the case of mask_landsea, the tool will automatically -retrieve the `fx: sftgif` file corresponding the the used variable and extract the ice mask from it. - -At the core of the land/sea/ice masking in the preprocessor are the mask files (whether it be fx type or Natural Earth type of files); these files (bar Natural Earth) -can be retrived and used in the diagnostic phase as well or solely. By specifying the `fx_files:` key in the variable in diagnostic in the recipe, and populating it -with a list of desired files e.g.: - -.. code-block:: bash - - variables: - ta: - preprocessor: my_masking_preprocessor - fx_files: [sftlf, sftof, sftgif, areacello, areacella] - -Such a recipe will automatically retrieve all the `[sftlf, sftof, sftgif, areacello, areacella]`-type fx files for each of the variables that are needed for -and then, in the diagnostic phase, these mask files will be available for the developer to use them as they need to. They `fx_files` attribute of the big `variable` -nested dictionary that gets passed to the diagnostic is, in turn, a dictionary on its own, and members of it can be accessed in the diagnostic through a simple loop over -the 'config' diagnostic variable items e.g.: - -.. code-block:: bash - - for filename, attributes in config['input_data'].items(): - sftlf_file = attributes['fx_files']['sftlf'] - areacello_file = attributes['fx_files']['areacello'] - - -Horizontal regridding -===================== -Documentation of _regrid.py (part 2) - -Masking of missing values -========================= -Documentation of _mask.py (part 2) - -Multi-model statistics -====================== -Documentation of_multimodel.py - -Time-area statistics -==================== -Documentation of _area_pp.py and _volume_pp.py - -Information on maximum memory required -====================================== -In the most general case, we can set upper limits on the maximum memory the anlysis will require: - - -Ms = (R + N) x F_eff - F_eff - when no multimodel analysis is performed; -Mm = (2R + N) x F_eff - 2F_eff - when multimodel analysis is performed; - -where - -Ms: maximum memory for non-multimodel module -Mm: maximum memory for multimodel module -R: computational efficiency of module; R is typically 2-3 -N: number of datasets -F_eff: average size of data per dataset where F_eff = e x f x F -where e is the factor that describes how lazy the data is (e = 1 for fully realized data) -and f describes how much the data was shrunk by the immediately previous module eg -time extraction, area selection or level extraction; note that for fix_data f relates only to the time extraction, if data is exact in time (no time selection) f = 1 for fix_data - -so for cases when we deal with a lot of datasets (R + N = N), data is fully realized, assuming an average size of 1.5GB for 10 years of 3D netCDF data, N datasets will require - - -Ms = 1.5 x (N - 1) GB -Mm = 1.5 x (N - 2) GB -======= diff --git a/doc/sphinx/source/user_guide2/recipe.inc b/doc/sphinx/source/user_guide2/recipe.inc deleted file mode 100644 index 23311999e2..0000000000 --- a/doc/sphinx/source/user_guide2/recipe.inc +++ /dev/null @@ -1,5 +0,0 @@ -.. _recipe: - -***************** -ESMValTool recipe -***************** diff --git a/doc/sphinx/source/user_guide2/running.inc b/doc/sphinx/source/user_guide2/running.inc deleted file mode 100644 index 3f63f0dd8f..0000000000 --- a/doc/sphinx/source/user_guide2/running.inc +++ /dev/null @@ -1,30 +0,0 @@ -:: _running: - -********************** -Running the ESMValTool -********************** - -To run ESMValTool, use the command - -.. code:: bash - - esmvaltool -c /path/to/config-user.yml examples/recipe_python.yml - -This will run the example recipe_python.yml. The path to the recipe -can either be the path to a recipe file, or a path relative to the -esmvaltool/recipes directory of your installed ESMValTool. See the chapter -`User config file`_ for an explanation of how to create your own -config-user.yml file. - -To get help on additional commands, please use - -.. code:: bash - - esmvaltool --help - - - -Available diagnostics and metrics -================================= - -See :ref:`recipes` for a description of all available recipes. diff --git a/doc/sphinx/source/utils.rst b/doc/sphinx/source/utils.rst new file mode 100644 index 0000000000..536b78ebee --- /dev/null +++ b/doc/sphinx/source/utils.rst @@ -0,0 +1,459 @@ +.. _utils: + +Utilities +********* + +This section provides information on tools that are useful when developing +ESMValTool. +Tools that are specific to ESMValTool live in the +`esmvaltool/utils `_ +directory, while others can be installed using the usual package managers. + +.. _pre-commit: + +Pre-commit +========== + +`pre-commit `__ is a handy tool that can run many +tools for checking code quality with a single command. +Usually it is used just before committing, to avoid accidentally committing +mistakes. +It knows knows which tool to run for each filetype, and therefore provides +a convenient way to check your code! + + +To run ``pre-commit`` on your code, go to the ESMValTool directory +(``cd ESMValTool``) and run + +:: + + pre-commit run + +By default, pre-commit will only run on the files that have been changed, +meaning those that have been staged in git (i.e. after +``git add your_script.py``). + +To make it only check some specific files, use + +:: + + pre-commit run --files your_script.py + +or + +:: + + pre-commit run --files your_script.R + +Alternatively, you can configure ``pre-commit`` to run on the staged files before +every commit (i.e. ``git commit``), by installing it as a `git hook `__ using + +:: + + pre-commit install + +Pre-commit hooks are used to inspect the code that is about to be committed. The +commit will be aborted if files are changed or if any issues are found that +cannot be fixed automatically. Some issues cannot be fixed (easily), so to +bypass the check, run + +:: + + git commit --no-verify + +or + +:: + + git commit -n + +or uninstall the pre-commit hook + +:: + + pre-commit uninstall + + +Note that the configuration of pre-commit lives in +`.pre-commit-config.yaml `_. + +.. _nclcodestyle: + +nclcodestyle +============ + +A tool for checking the style of NCL code, based on pycodestyle. +Install ESMValTool in development mode (``pip install -e '.[develop]'``) to make it available. +To use it, run + +.. code-block:: bash + + nclcodestyle /path/to/file.ncl + +.. _recipe_test_tool: + +Colormap samples +================ +Tool to generate colormap samples for ESMValTool's default Python and NCL colormaps. + +Run + +.. code-block:: bash + + esmvaltool colortables python + +or + +.. code-block:: bash + + esmvaltool colortables ncl + +to generate the samples. + +.. _running_multiple_recipes: + +Running multiple recipes +======================== + +It is possible to run more than one recipe in one go. + +This can for example be achieved by using ``rose`` and/or ``cylc``, tools +that may be available at your local HPC cluster. + +In the case in which neither ``rose`` nor ``cylc`` are available at your HPC cluster, +it is possible to automatically generate job submission scripts, as well as a summary of the +job outputs using the scripts available in +`esmvaltool/utils/batch-jobs `__. + +Using cylc +---------- + +A cylc suite for running all recipes is available in +`esmvaltool/utils/testing/regression `__. +This suite is configured to work with versions of cylc older than 8.0.0 . + +To prepare for using this tool: + +#. Log in to a system that uses `slurm `_ +#. Make sure the required CMIP and observational datasets are available and + their ``rootpath`` and ``drs`` is properly set up in the :ref:`configuration + ` +#. Make sure the required auxiliary data is available (see :ref:`recipe documentation `) +#. Install ESMValTool + +Next, get started with `cylc `_: + +#. Run ``module load cylc`` +#. Register the suite with cylc ``cylc register run-esmvaltool-recipes ~/ESMValTool/esmvaltool/utils/testing/regression`` +#. Edit the suite if needed, this allows e.g. choosing which recipes will be run +#. Validate the suite ``cylc validate run-esmvaltool-recipes --verbose``, this will e.g. list the recipes in the suite +#. Run all recipes ``cylc run run-esmvaltool-recipes`` +#. View progress ``cylc log run-esmvaltool-recipes``, use e.g. ``cylc log run-all-esmvaltool-recipes examples-recipe_python_yml.1 --stdout`` to see the log of an individual esmvaltool run. Once the suite has finished running, you will see the message "WARNING - suite stalled" in the log. +#. Stop the cylc run once everything is done ``cylc stop run-esmvaltool-recipes``. + +To generate an overview page of the recipe runs, use the ``summarize.py`` :ref:`utility script `. + +.. _utils_batch_jobs: + +Using the scripts in `utils/batch-jobs` +--------------------------------------- + +In `utils/batch-jobs `_, +you can find a script to generate slurm submission scripts for all available recipes in ESMValTool, +as well as a script to parse the job outputs. + +.. _utils_generate: + +Using `generate.py` +................... + +The script `generate.py `_, +is a simple python script that creates slurm submission scripts, and +if configured, submits them to the HPC cluster. It has been tested in `DKRZ's Levante cluster `_. + +The following parameters have to be set in the script in order to make it run: + +* ``env``, *str*: Name of the conda environment in which `esmvaltool` is installed. +* ``mail``, *bool*: Whether or not to receive mail notifications when a submitted job fails or finishes successfully. Default is ``False``. +* ``submit``, *bool*: Whether or not to automatically submit the job after creating the launch script. Default value is ``False``. +* ``account``, *str*: Name of the DKRZ account in which the job will be billed. +* ``outputs``, *str*: Name of the directory in which the job outputs (.out and .err files) are going to be saved. The outputs will be saved in `/home/user/`. +* ``conda_path``, *str*: Full path to the `miniforge3/etc/profile.d/conda.sh` executable. + +Optionally, the following parameters can be edited: + +* ``config_dir``, *str*: Path to :ref:`configuration directory `, by default ``~/.config/esmvaltool/``. +* ``partition``, *str*: Name of the DKRZ partition used to run jobs. Default is ``interactive`` to minimize computing cost compared to ``compute`` for which nodes cannot be shared. +* ``memory``, *str*: Amount of memory requested for each run. Default is ``64G`` to allow to run 4 recipes on the same node in parallel. +* ``time``, *str*: Time limit. Default is ``04:00:00`` to increase the job priority. Jobs can run for up to 8 hours and 12 hours on the compute and interactive partitions, respectively. +* ``default_max_parallel_tasks``, *int*: Default is ``8`` which works for most recipes. For other cases, an entry needs to be made to the ``MAX_PARALLEL_TASKS`` dictionary (see below). + +The script will generate a submission script for all recipes using by default the ``interactive`` queue and with a time limit of 4h. In case a recipe +may require of additional resources, they can be defined in the ``SPECIAL_RECIPES`` dictionary. The recipe name has to be given as a ``key`` in which the +values are another dictionary. +The latter are used to specify the ``partition`` in which to submit the recipe, the new ``time`` limit and other ``memory`` requirements +given by the slurm flags ``--mem``, ``--constraint`` or ``--ntasks``. In general, an entry in ``SPECIAL_RECIPES`` should be set as: + +.. code-block:: python + + SPECIAL_RECIPES = { + 'recipe_name': { + 'partition': '#SBATCH --partition=', + 'time': '#SBATCH --time=', + 'memory': '#SBATCH --mem=' # --constraint or --nstasks can be used instead. + }, + } + +Some recipes can only be run with a number of tasks less than ``default_max_parallel_tasks`` for various reasons (memory issues, diagnostic issues, CMIP3 data used). +These recipes need to be added to the ``MAX_PARALLEL_TASKS`` dictionary with a specific ``max_parallel_tasks`` value. + +Note that the script has been optimized to use standard SLURM settings to run most recipes while minimizing the computational cost of the jobs and tailored runtime settings for resource-intensive recipes. +It is only necessary to edit this script for recipes that have been added since the last release and cannot be run with the default settings. + +In the case in which ``submit`` is set to ``True``, but you want to exclude certain recipes from being submitted, their name can be added in the ``exclude`` list: + +.. code-block:: python + + exclude = ['recipe_to_be_excluded_1', 'recipe_to_be_excluded_2'] + +.. _utils_parse: + +Using `parse_recipes_outputs` +............................. + +You can run this script (simply as a standalone Python script) after all recipes have been run, to gather a bird's eye view +of the run status for each recipe; running the script provides you with a Markdown-formatted list of recipes that succeeded, +recipes that failed due to a diagnostic error, and recipes that failed due to missing data (the two most common causes for +recipe run failure). You should provide the location of the output log files from SLURM (``*.out`` and ``*.err``) to the +script as well as a list of all available recipes. To generate the list, run the command: + +.. code-block:: bash + + for recipe in $(esmvaltool recipes list | grep '\.yml$'); do echo $(basename "$recipe"); done > all_recipes.txt + +To keep the script execution fast, it is recommended to use ``log_level: info`` in the configuration so that SLURM +output files are rather small. + +.. _overview_page: + +Overview of recipe runs +======================= + +To create overview webpages of a set of recipe runs, run: + +.. code-block:: python + + python esmvaltool/utils/testing/regression/summarize.py ~/esmvaltool_output/ + +This will generate 2 html files: + +- ``index.html`` that displays a summary of each recipe run, with a title and + a representative plot, a short description of the aim of the recipe, and + links to each individual run. +- ``debug.html`` that provides an overview table of successful and failed runs + with links to each individual run, and computing resources used for each run. + +.. _compare_recipe_runs: + +Comparing recipe runs +===================== + +A command-line tool is available for comparing one or more recipe runs to +known good previous run(s). +This tool uses `xarray `_ to compare NetCDF +files and difference hashing provided by +`imagehash `_ to compare PNG images. +All other file types are compared byte for byte. + +To use it, first install the package imagehash_: + +.. code-block:: bash + + pip install imagehash + +Next, go to the location where ESMValTool is installed and run + +.. code-block:: bash + + python esmvaltool/utils/testing/regression/compare.py ~/reference_output/ ~/output/recipe_python_20220310_180417/ + +where the first argument is a reference run or a directory containing such +runs and the second and following arguments are directories with runs to compare +to the reference run(s). + +To compare all results from the current version to the previous version, use e.g.: + +.. code-block:: bash + + python esmvaltool/utils/testing/regression/compare.py /shared/esmvaltool/v2.4.0 /shared/esmvaltool/v2.5.0 + +To get more information on how a result is different, run the tool with the +``--verbose`` flag. + +Testing recipe settings +======================= + +A tool for generating recipes with various diagnostic settings, to test of those work. +Install ESMValTool in development mode (``pip install -e '.[develop]'``) to make it available. +To use it, run + +.. code-block:: bash + + test_recipe --help + + +.. _draft_release_notes.py: + +draft_release_notes.py +====================== + +`draft_release_notes.py `__ +is a script for drafting release notes based on the titles and labels of +the GitHub pull requests that have been merged since the previous release. + +To use it, install the package pygithub_: + +.. code-block:: bash + + pip install pygithub + +Create a `GitHub access token`_ (leave all boxes for additional +permissions unchecked) and store it in the file ``~/.github_api_key``. + +Edit the script and update the date and time of the previous release and run +the script: + +.. code-block:: bash + + python esmvaltool/utils/draft_release_notes.py ${REPOSITORY} + +``REPOSITORY`` can be either ``esmvalcore`` or ``esmvaltool`` depending on the +release notes you want to create. + +Review the resulting output (in ``.rst`` format) and if anything needs changing, +change it on GitHub and re-run the script until the changelog looks acceptable. +In particular, make sure that pull requests have the correct label, so they are +listed in the correct category. +Finally, copy and paste the generated content at the top of the changelog. + +Converting Version 1 Namelists to Version 2 Recipes +=================================================== + +The +`xml2yml `_ +converter can turn the old xml namelists into new-style yml +recipes. It is implemented as a xslt stylesheet that needs a processor +that is xslt 2.0 capable. With this, you simply process your old +namelist with the stylesheet xml2yml.xsl to produce a new yml recipe. + +After the conversion you need to manually check the mip information in +the variables! Also, check the caveats below! + +Howto +----- + +One freely available processor is the Java based +`saxon `__. You can download the free he +edition +`here `__. +Unpack the zip file into a new directory. Then, provided you have Java +installed, you can convert your namelist simply with: + +:: + + java -jar $SAXONDIR/saxon9he.jar -xsl:xml2yml.xsl -s:namelist.xml -o:recipe.yml + +Caveats/Known Limitations +------------------------- + +- At the moment, not all model schemes (OBS, CMIP5, CMIP5_ETHZ…) are + supported. They are, however, relatively easy to add, so if you need + help adding a new one, please let me know! +- The documentation section (namelist_summary in the old file) is not + automatically converted. +- In version 1, one could name an exclude, similar to the reference + model. This is no longer possible and the way to do it is to include + the models with another ``additional_models`` tag in the variable + section. That conversion is not performed by this tool. + +Authored by **Klaus Zimmermann**, direct questions and comments to +klaus.zimmermann@smhi.se + +.. _GitHub access token: https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line +.. _pygithub: https://pygithub.readthedocs.io/en/latest/introduction.html + + +Recipe filler +============= + +If you need to fill in a blank recipe with additional datasets, you can do that with +the command `recipe_filler`. This runs a tool to obtain a set of additional datasets when +given a blank recipe, and you can give an arbitrary number of data parameters. The blank recipe +should contain, to the very least, a list of diagnostics, each with their variable(s). +Example of running the tool: + +.. code-block:: bash + + recipe_filler recipe.yml + +where `recipe.yml` is the recipe that needs to be filled with additional datasets; a minimal +example of this recipe could be: + +.. code-block:: yaml + + diagnostics: + diagnostic: + variables: + ta: + mip: Amon # required + start_year: 1850 # required + end_year: 1900 # required + + +Key features +------------ + +- you can add as many variable parameters as are needed; if not added, the + tool will use the ``"*"`` wildcard and find all available combinations; +- you can restrict the number of datasets to be looked for with the ``dataset:`` + key for each variable, pass a list of datasets as value, e.g. + ``dataset: [MPI-ESM1-2-LR, MPI-ESM-LR]``; +- you can specify a pair of experiments, e.g. ``exp: [historical, rcp85]`` + for each variable; this will look for each available dataset per experiment + and assemble an aggregated data stretch from each experiment to complete + for the total data length specified by ``start_year`` and ``end_year``; equivalent to + ESMValTool's syntax on multiple experiments; this option needs an ensemble + to be declared explicitly; it will return no entry if there are gaps in data; +- ``start_year`` and ``end_year`` are required and are used to filter out the + datasets that don't have data in the interval; as noted above, the tool will not + return datasets with partial coverage from ``start_year`` to ``end_year``; + if you want all possible years hence no filtering on years just use ``"*"`` + for start and end years; +- ``config-user: rootpath: CMIPX`` may be a list, rootpath lists are supported; +- all major DRS paths (including ``default``, ``BADC``, ``ETHZ`` etc) are supported; +- speedup is achieved through CMIP mip tables lookup, so ``mip`` is required in recipe; + +Caveats +------- + +- the tool doesn't yet work with derived variables; it will not return any available datasets; +- operation restricted to CMIP data only, OBS lookup is not available yet. + + +Extracting a list of input files from the provenance +==================================================== + +There is a small tool available to extract just the list of input files used to generate +a figure from the ``*_provenance.xml`` files (see :ref:`recording-provenance` for more +information). + +To use it, install ESMValTool from source and run + +.. code-block:: bash + + python esmvaltool/utils/prov2files.py /path/to/result_provenance.xml + +The tool is based on the `prov `_ +library, a useful library for working with provenance files. +With minor adaptations, this script could also print out global attributes +of the input NetCDF files, e.g. the tracking_id. diff --git a/docker/Dockerfile b/docker/Dockerfile index 144134ac63..9670028c7b 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,19 +1,15 @@ -FROM continuumio/miniconda3 +# To build this container, go to ESMValTool root folder and execute: +# docker build -t esmvaltool:latest . -f docker/Dockerfile +FROM condaforge/miniforge3 -# update the conda packages -RUN conda update -y conda pip +WORKDIR /src/ESMValTool +COPY environment.yml . +RUN mamba update -y conda mamba pip && mamba env create --name esmvaltool --file environment.yml && conda clean --all -y -# install development tools -RUN apt-get update -y && apt-get install -y \ - build-essential \ - curl \ - unzip +# Make RUN commands use the new environment: +SHELL ["conda", "run", "--name", "esmvaltool", "/bin/bash", "-c"] -# install environment packages -RUN conda install -c conda-forge -c esmvalgroup -c birdhouse esmvaltool +COPY . . +RUN pip install --no-cache . && esmvaltool install Julia -# run tests -RUN esmvaltool -h - -ENTRYPOINT ["esmvaltool"] -CMD ["-h"] +ENTRYPOINT ["conda", "run", "--name", "esmvaltool", "esmvaltool"] diff --git a/docker/Dockerfile.dev b/docker/Dockerfile.dev new file mode 100644 index 0000000000..b7204abaa7 --- /dev/null +++ b/docker/Dockerfile.dev @@ -0,0 +1,14 @@ +# To build this container, go to ESMValTool root folder and execute: +# docker build -t esmvaltool:development . -f docker/Dockerfile.dev +FROM condaforge/miniforge3 + +WORKDIR /src/ESMValTool +RUN apt update && DEBIAN_FRONTEND=noninteractive apt install -y curl git ssh && apt clean +COPY environment.yml . +RUN mamba update -y conda mamba pip && mamba env create --name esmvaltool --file environment.yml && conda clean --all -y + +# Make RUN commands use the new environment: +SHELL ["conda", "run", "--name", "esmvaltool", "/bin/bash", "-c"] + +COPY . . +RUN pip install --no-cache .[test] && esmvaltool install Julia && pip uninstall esmvaltool -y diff --git a/docker/Dockerfile.exp b/docker/Dockerfile.exp new file mode 100644 index 0000000000..062a64b8ab --- /dev/null +++ b/docker/Dockerfile.exp @@ -0,0 +1,18 @@ +# To build this container, go to ESMValTool root folder and execute: +# docker build -t esmvaltool:experimental . -f docker/Dockerfile.exp +FROM condaforge/miniforge3 +RUN apt update && apt install -y git && apt clean + +WORKDIR /src/ESMValTool +COPY environment.yml . +RUN mamba update -y conda mamba pip \ + && mamba env create --name esmvaltool --file environment.yml \ + && conda clean --all -y + +SHELL ["conda", "run", "--name", "esmvaltool", "/bin/bash", "-c"] + +COPY . . +RUN pip install --no-cache git+https://github.com/ESMValGroup/ESMValCore.git#egg=ESMValCore . \ + && esmvaltool install Julia + +ENTRYPOINT ["conda", "run", "--name", "esmvaltool", "esmvaltool"] diff --git a/docker/docker_guide.md b/docker/docker_guide.md deleted file mode 100644 index 33ae75051e..0000000000 --- a/docker/docker_guide.md +++ /dev/null @@ -1,61 +0,0 @@ -# Running ESMValTool packaged in the docker container - -## Install docker -Use official [Docker documentation](https://docs.docker.com/engine/installation/) - -## Get docker image -All available docker images are listed on -[Docker Hub page](https://hub.docker.com/r/esmvalgroup/esmvaltool/tags/) -Before running container you need to pull the image to your machine. - -```sh -docker pull esmvalgroup/esmvaltool:2.0 -``` - -## Running ESMValTool in the container -To run ESMValTool in the docker container you need to mount -necessary directories and provide the name of the namelist -to execute. ESMValTool expects specific directory structure -and directories for input and output files need to be created -in advance. Below are few user scenerios. - -1. Input and output directories are located in the same directory. -```sh -docker run -v :/data/ esmvalgroup/esmvaltool:2.0 nml/.xml -``` -Replace: -* `` with name of the namelist you want to execute -* `` with a location on your local machine -(docker host). It has to be absolute path. - -Content of the `` -directory has to have following structure: -```sh -tree - -├── in # input direcotry -│ ├── datamodel # model data -│ ├── obsdata # observations -│ └── rawobsdata # raw observations (unprocessed) -└── out # output direcotry - ├── climo # path for intermediate files (netCDF) - ├── plots # generated plots - ├── regridding # intermediate files generated by regridding process - └── work # output data path (netCDF) -``` - -2. Input and output directories are in different locations. -If input and output directories are in different locations, -they need to be mounted separately. -```sh -docker run \ - -v :/data/in/datamodel \ - -v :/data/in/obsdata \ - -v :/data/in/rawobsdata \ - -v :/data/out/climo \ - -v :/data/out/plots \ - -v :/data/out/regridding \ - -v :/data/out/work \ - esmvalgroup/esmvaltool: nml/.xml -``` -Replace paths in angle brackets `< >` with appropriate values. diff --git a/environment.yml b/environment.yml index fd27f6ea1f..270f0f6ecd 100644 --- a/environment.yml +++ b/environment.yml @@ -1,37 +1,140 @@ --- -# conda-forge after conda switch to gcc7 (gxx conda compiler, Jan 2019) -# Notes on ncl-cdo-R packages relation for future releases: -# cdo=1.9.6 works only with ncl=6.6.2 but that -# implies the use of R packages from the r channel -# and those packages are R=3.2.2 << 3.5.1 and are -# obsolete (installing ncl=6.6.2 automatically removes -# the conda-forge-installed R (3.5.1) packages -# These issues are a direct consequence of changes in conda-forge -# related to the switch to gcc7(gxx-conda); hopefully they will be -# fixed in future releases of conda; also note that gxx_linux-64 may have -# to be added in the dependency list when switching from cf201901. -# Why use conda-forge/label/cf201901: -# https://github.com/fermi-lat/Fermitools-conda/wiki/User-Notes -# https://github.com/NCAR/ncl/issues/87 - name: esmvaltool channels: - - conda-forge/label/cf201901 + # The release candidate channel should only be activated + # during the rc phase right before the next release of the + # ESMValCore. + # - conda-forge/label/esmvalcore_rc + - conda-forge + - nodefaults dependencies: - # Python packages that cannot be installed from PyPI: - - esmpy - - iris>=2.2 - - matplotlib<3 # Can be installed from PyPI, but is a dependency of iris and should be pinned. - - python-stratify - - xarray # Can be installed from PyPI, but here to get a consistent set of depencies with iris. - # Non-Python dependencies - - graphviz + - aiohttp + - cartopy <0.24 # https://github.com/ESMValGroup/ESMValTool/issues/3767 + - cdo >=2.3.0 + - cdsapi + - cf-units + - cfgrib + - cftime + - cmocean + - curl <8.10 + - cython + - dask !=2024.8.0 # https://github.com/dask/dask/issues/11296 + - distributed + - ecmwf-api-client + - eofs + - esmpy # <8.6 safe https://github.com/SciTools/iris-esmf-regrid/issues/415 + - esmvalcore 2.11.* + - fiona + - fire + - fsspec + - gdal >=3.9.0 + - importlib_metadata <8 # https://github.com/ESMValGroup/ESMValTool/issues/3699 only for Python 3.10/11 and esmpy<8.6 + - iris >=3.6.1 + - iris-esmf-regrid >=0.10.0 # github.com/SciTools-incubator/iris-esmf-regrid/pull/342 + - jinja2 + - joblib + - lime + - mapgenerator >=1.0.5 + - matplotlib-base + - natsort + - nc-time-axis + - netCDF4 + - numba + - numpy !=1.24.3,<2.0 # severe masking bug + - openpyxl + - packaging + - pandas==2.1.4 # unpin when ESMValCore released with https://github.com/ESMValGroup/ESMValCore/pull/2529 + - pip !=21.3 + - progressbar2 + - prov + - psyplot >=1.5.0 + - psy-maps >=1.5.0 + - psy-reg >=1.5.0 + - psy-simple >=1.5.0 + - pyproj >=2.1 + - pys2index # only from conda-forge + - python >=3.10,<3.13 + - python-cdo + - python-dateutil + - pyyaml + - rasterio >=1.3.10 + - requests + - ruamel.yaml + - scikit-image + - scikit-learn >= 1.4.0 # github.com/ESMValGroup/ESMValTool/issues/3504 + - scipy + - seaborn + - seawater + - shapely >=2.0.2 + - xarray >=0.12.0 + - xesmf >=0.7.1 + - xgboost >1.6.1 # github.com/ESMValGroup/ESMValTool/issues/2779 + - xlsxwriter + - zarr + # Python packages needed for unit testing + - flake8 >=6 + - pytest >=3.9,!=6.0.0rc1,!=6.0.0 + - pytest-cov + - pytest-env + - pytest-html !=2.1.0 + - pytest-metadata >=1.5.1 + - pytest-mock + - pytest-xdist + # Python packages needed for building docs + - autodocsumm >=0.2.2 + - nbsphinx + - sphinx >=6.1.3 + - pydata-sphinx-theme + # Python packages needed for development + - codespell ==2.3.0 + - docformatter ==1.7.5 + - imagehash + - isort ==5.13.2 + - pre-commit + - prospector >=1.12 # earliest support for Python 3.12 + - pyroma + # - vprof not on conda-forge + - yamllint ==1.35.1 + - yapf ==0.32.0 + + # NCL and dependencies + - ncl >=6.6.2 + - cdo + - imagemagick + - nco + + # R and dependencies - cdo + - r-base >=3.5,<4.3.0 + - r-abind + - r-akima + - r-climdex.pcic + - r-climprojdiags + - r-docopt + - r-dotcall64 + - r-functional + - r-ggplot2 + - r-gridextra + - r-logging + - r-mapproj + - r-maps + - r-multiapply + - r-ncdf4 + - r-ncdf4.helpers + - r-pcict + - r-plyr + - r-rcolorbrewer + - r-rcpp + - r-s2dverification + - r-snow + - r-spei + - r-udunits2 + - r-yaml + # R packages needed for development + - r-git2r # dependency of lintr + - r-lintr ==3.1.2 + - r-styler ==1.10.3 - # Multi language support: - - ncl>=6.5.0 - - jasper!=1.900.31 # pinned NCL dependency - - r-base - - libunwind # Needed for Python3.7+ - # - julia>=1.0.0 # The Julia package on conda is apparently broken + # Julia (dependencies installed by separate script) + - julia diff --git a/environment_osx.yml b/environment_osx.yml new file mode 100644 index 0000000000..8285b43ecd --- /dev/null +++ b/environment_osx.yml @@ -0,0 +1,98 @@ +--- +name: esmvaltool +channels: + # The release candidate channel should only be activated + # during the rc phase right before the next release of the + # ESMValCore. + # - conda-forge/label/esmvalcore_rc + - conda-forge + - nodefaults + +dependencies: + - aiohttp + - cartopy <0.24 # https://github.com/ESMValGroup/ESMValTool/issues/3767 + - cdo >=2.3.0 + - cdsapi + - cf-units + - cfgrib + - cftime + - cmocean + - cython + - dask !=2024.8.0 # https://github.com/dask/dask/issues/11296 + - distributed + - ecmwf-api-client + - eofs + - esmpy # <8.6 safe https://github.com/SciTools/iris-esmf-regrid/issues/415 + - esmvalcore 2.11.* + - fiona + - fire + - fsspec + - gdal >=3.9.0 + - importlib_metadata <8 # https://github.com/ESMValGroup/ESMValTool/issues/3699 only for Python 3.10/11 and esmpy<8.6 + - iris >=3.6.1 + - iris-esmf-regrid >=0.10.0 # github.com/SciTools-incubator/iris-esmf-regrid/pull/342 + - jinja2 + - joblib + - lime + - mapgenerator >=1.0.5 + - matplotlib-base + - natsort + - nc-time-axis + - netCDF4 + - numba + - numpy !=1.24.3,<2.0 # severe masking bug + - openpyxl + - packaging + - pandas==2.1.4 # unpin when ESMValCore released with https://github.com/ESMValGroup/ESMValCore/pull/2529 + - pip !=21.3 + - progressbar2 + - prov + - psyplot >=1.5.0 + - psy-maps >=1.5.0 + - psy-reg >=1.5.0 + - psy-simple >=1.5.0 + - pyproj >=2.1 + - pys2index >=0.1.5 # only from conda-forge; https://github.com/ESMValGroup/ESMValTool/pull/3792 + - python >=3.10,<3.13 + - python-cdo + - python-dateutil + - pyyaml + - rasterio >=1.3.10 + - requests + - ruamel.yaml + - scikit-image + - scikit-learn >= 1.4.0 # github.com/ESMValGroup/ESMValTool/issues/3504 + - scipy + - seaborn + - seawater + - shapely >=2.0.2 + - xarray >=0.12.0 + - xesmf >=0.7.1 + - xgboost >1.6.1 # github.com/ESMValGroup/ESMValTool/issues/2779 + - xlsxwriter + - zarr + # Python packages needed for unit testing + - flake8 >=6 + - pytest >=3.9,!=6.0.0rc1,!=6.0.0 + - pytest-cov + - pytest-env + - pytest-html !=2.1.0 + - pytest-metadata >=1.5.1 + - pytest-mock + - pytest-xdist + # Python packages needed for building docs + - autodocsumm >=0.2.2 + - nbsphinx + - sphinx >=6.1.3 + - pydata-sphinx-theme + # Python packages needed for development + - codespell ==2.3.0 + - docformatter ==1.7.5 + - imagehash + - isort ==5.13.2 + - pre-commit + - prospector >=1.12 # earliest support for Python 3.12 + - pyroma + # - vprof not on conda-forge + - yamllint ==1.35.1 + - yapf ==0.32.0 diff --git a/esmvaltool/__init__.py b/esmvaltool/__init__.py index db052f292e..6f3375ec51 100644 --- a/esmvaltool/__init__.py +++ b/esmvaltool/__init__.py @@ -1,12 +1,13 @@ -import logging -import os +"""ESMValTool diagnostics package.""" +from importlib.metadata import PackageNotFoundError, version -from ._version import __version__ +try: + __version__ = version("ESMValTool") +except PackageNotFoundError as exc: + raise PackageNotFoundError( + "ESMValTool package not found, please run `pip install -e .` before " + "importing the package.") from exc -logger = logging.getLogger(__name__) -logger.addHandler(logging.NullHandler()) - -def get_script_root(): - """Return the location of the ESMValTool installation.""" - return os.path.abspath(os.path.dirname(__file__)) +class ESMValToolDeprecationWarning(UserWarning): + """Custom deprecation warning.""" diff --git a/esmvaltool/_config.py b/esmvaltool/_config.py deleted file mode 100644 index 221c06d026..0000000000 --- a/esmvaltool/_config.py +++ /dev/null @@ -1,218 +0,0 @@ -"""ESMValTool configuration.""" -import datetime -import logging -import logging.config -import os -import time -from distutils.version import LooseVersion - -import iris -import six -import yaml - -from .cmor.table import read_cmor_tables - -logger = logging.getLogger(__name__) - -CFG = {} -CFG_USER = {} - - -def use_legacy_iris(): - """Return True if legacy iris is used.""" - return LooseVersion(iris.__version__) < LooseVersion("2.0.0") - - -def read_config_user_file(config_file, recipe_name): - """Read config user file and store settings in a dictionary.""" - with open(config_file, 'r') as file: - cfg = yaml.safe_load(file) - - # set defaults - defaults = { - 'write_plots': True, - 'write_netcdf': True, - 'compress_netcdf': False, - 'exit_on_warning': False, - 'max_data_filesize': 100, - 'output_file_type': 'ps', - 'output_dir': './output_dir', - 'auxiliary_data_dir': './auxiliary_data', - 'save_intermediary_cubes': False, - 'remove_preproc_dir': False, - 'max_parallel_tasks': 1, - 'run_diagnostic': True, - 'profile_diagnostic': False, - 'config_developer_file': None, - 'drs': {}, - } - - for key in defaults: - if key not in cfg: - logger.info( - "No %s specification in config file, " - "defaulting to %s", key, defaults[key]) - cfg[key] = defaults[key] - - cfg['output_dir'] = _normalize_path(cfg['output_dir']) - cfg['auxiliary_data_dir'] = _normalize_path(cfg['auxiliary_data_dir']) - - cfg['config_developer_file'] = _normalize_path( - cfg['config_developer_file']) - - for key in cfg['rootpath']: - root = cfg['rootpath'][key] - if isinstance(root, six.string_types): - cfg['rootpath'][key] = [_normalize_path(root)] - else: - cfg['rootpath'][key] = [_normalize_path(path) for path in root] - - # insert a directory date_time_recipe_usertag in the output paths - now = datetime.datetime.utcnow().strftime("%Y%m%d_%H%M%S") - new_subdir = '_'.join((recipe_name, now)) - cfg['output_dir'] = os.path.join(cfg['output_dir'], new_subdir) - - # create subdirectories - cfg['preproc_dir'] = os.path.join(cfg['output_dir'], 'preproc') - cfg['work_dir'] = os.path.join(cfg['output_dir'], 'work') - cfg['plot_dir'] = os.path.join(cfg['output_dir'], 'plots') - cfg['run_dir'] = os.path.join(cfg['output_dir'], 'run') - - # Save user configuration in global variable - for key, value in six.iteritems(cfg): - CFG_USER[key] = value - - # Read developer configuration file - cfg_developer = read_config_developer_file(cfg['config_developer_file']) - for key, value in six.iteritems(cfg_developer): - CFG[key] = value - read_cmor_tables(CFG) - - return cfg - - -def get_config_user_file(): - """Return user configuration dictionary.""" - return CFG_USER - - -def _normalize_path(path): - """Normalize paths. - - Expand ~ character and environment variables and convert path to absolute. - - Parameters - ---------- - path: str - Original path - - Returns - ------- - str: - Normalized path - - """ - if path is None: - return None - return os.path.abspath(os.path.expanduser(os.path.expandvars(path))) - - -def read_config_developer_file(cfg_file=None): - """Read the developer's configuration file.""" - if cfg_file is None: - cfg_file = os.path.join( - os.path.dirname(__file__), - 'config-developer.yml', - ) - - with open(cfg_file, 'r') as file: - cfg = yaml.safe_load(file) - - return cfg - - -def configure_logging(cfg_file=None, output=None, console_log_level=None): - """Set up logging.""" - if cfg_file is None: - cfg_file = os.path.join( - os.path.dirname(__file__), 'config-logging.yml') - - if output is None: - output = os.getcwd() - - cfg_file = os.path.abspath(cfg_file) - with open(cfg_file) as file_handler: - cfg = yaml.safe_load(file_handler) - - log_files = [] - for handler in cfg['handlers'].values(): - if 'filename' in handler: - if not os.path.isabs(handler['filename']): - handler['filename'] = os.path.join(output, handler['filename']) - log_files.append(handler['filename']) - if console_log_level is not None and 'stream' in handler: - if handler['stream'] in ('ext://sys.stdout', 'ext://sys.stderr'): - handler['level'] = console_log_level.upper() - - logging.config.dictConfig(cfg) - logging.Formatter.converter = time.gmtime - logging.captureWarnings(True) - - return log_files - - -def get_project_config(project): - """Get developer-configuration for project.""" - logger.debug("Retrieving %s configuration", project) - return CFG[project] - - -def get_institutes(variable): - """Return the institutes given the dataset name in CMIP5.""" - dataset = variable['dataset'] - project = variable['project'] - logger.debug("Retrieving institutes for dataset %s", dataset) - return CFG.get(project, {}).get('institutes', {}).get(dataset, []) - - -def replace_mip_fx(fx_file): - """Replace MIP so to retrieve correct fx files.""" - default_mip = 'Amon' - if fx_file not in CFG['CMIP5']['fx_mip_change']: - logger.warning( - 'mip for fx variable %s is not specified in ' - 'config_developer.yml, using default (%s)', fx_file, default_mip) - new_mip = CFG['CMIP5']['fx_mip_change'].get(fx_file, default_mip) - logger.debug("Switching mip for fx file finding to %s", new_mip) - return new_mip - - -TAGS_CONFIG_FILE = os.path.join( - os.path.dirname(__file__), 'config-references.yml') - - -def _load_tags(filename=TAGS_CONFIG_FILE): - """Load the refence tags used for provenance recording.""" - logger.debug("Loading tags from %s", filename) - with open(filename) as file: - return yaml.safe_load(file) - - -TAGS = _load_tags() - - -def get_tag_value(section, tag): - """Retrieve the value of a tag.""" - if section not in TAGS: - raise ValueError("Section '{}' does not exist in {}".format( - section, TAGS_CONFIG_FILE)) - if tag not in TAGS[section]: - raise ValueError( - "Tag '{}' does not exist in section '{}' of {}".format( - tag, section, TAGS_CONFIG_FILE)) - return TAGS[section][tag] - - -def replace_tags(section, tags): - """Replace a list of tags with their values.""" - return tuple(get_tag_value(section, tag) for tag in tags) diff --git a/esmvaltool/_data_finder.py b/esmvaltool/_data_finder.py deleted file mode 100644 index ba6e21d532..0000000000 --- a/esmvaltool/_data_finder.py +++ /dev/null @@ -1,297 +0,0 @@ -"""Data finder module for the ESMValTool.""" -# Authors: -# Bouwe Andela (eScience, NL - b.andela@esciencecenter.nl) -# Valeriu Predoi (URead, UK - valeriu.predoi@ncas.ac.uk) -# Mattia Righi (DLR, Germany - mattia.righi@dlr.de) - -import fnmatch -import logging -import os -import re - -import six - -from ._config import get_project_config, replace_mip_fx -from .cmor.table import CMOR_TABLES - -logger = logging.getLogger(__name__) - - -def find_files(dirnames, filenames): - """Find files matching filenames in dirnames.""" - logger.debug("Looking for files matching %s in %s", filenames, dirnames) - - result = [] - for dirname in dirnames: - for path, _, files in os.walk(dirname, followlinks=True): - for filename in filenames: - matches = fnmatch.filter(files, filename) - result.extend(os.path.join(path, f) for f in matches) - - return result - - -def get_start_end_year(filename): - """Get the start and end year from a file name. - - This works for filenames matching - - *[-,_]YYYY*[-,_]YYYY*.* - or - *[-,_]YYYY*.* - or - YYYY*[-,_]*.* - or - YYYY*[-,_]YYYY*[-,_]*.* - or - YYYY*[-,_]*[-,_]YYYY*.* (Does this make sense? Is this worth catching?) - """ - name = os.path.splitext(filename)[0] - - filename = name.split(os.sep)[-1] - filename_list = [elem.split('-') for elem in filename.split('_')] - filename_list = [elem for sublist in filename_list for elem in sublist] - - pos_ydates = [elem.isdigit() and len(elem) >= 4 for elem in filename_list] - pos_ydates_l = list(pos_ydates) - pos_ydates_r = list(pos_ydates) - - for ind, _ in enumerate(pos_ydates_l): - if ind != 0: - pos_ydates_l[ind] = (pos_ydates_l[ind - 1] and pos_ydates_l[ind]) - - for ind, _ in enumerate(pos_ydates_r): - if ind != 0: - pos_ydates_r[-ind - 1] = (pos_ydates_r[-ind] - and pos_ydates_r[-ind - 1]) - - dates = [ - filename_list[ind] for ind, _ in enumerate(pos_ydates) - if pos_ydates_r[ind] or pos_ydates_l[ind] - ] - - if len(dates) == 1: - start_year = int(dates[0][:4]) - end_year = start_year - elif len(dates) == 2: - start_year, end_year = int(dates[0][:4]), int(dates[1][:4]) - else: - raise ValueError('Name {0} dates do not match a recognized ' - 'pattern'.format(name)) - - return start_year, end_year - - -def select_files(filenames, start_year, end_year): - """Select files containing data between start_year and end_year. - - This works for filenames matching *_YYYY*-YYYY*.* or *_YYYY*.* - """ - selection = [] - for filename in filenames: - start, end = get_start_end_year(filename) - if start <= end_year and end >= start_year: - selection.append(filename) - return selection - - -def _replace_tags(path, variable, fx_var=None): - """Replace tags in the config-developer's file with actual values.""" - path = path.strip('/') - - tlist = re.findall(r'\[([^]]*)\]', path) - - paths = [path] - for tag in tlist: - original_tag = tag - tag, _, _ = _get_caps_options(tag) - - if tag == 'fx_var': - replacewith = fx_var - elif tag == 'latestversion': # handled separately later - continue - elif tag in variable: - replacewith = variable[tag] - else: - raise KeyError("Dataset key {} must be specified for {}, check " - "your recipe entry".format(tag, variable)) - - paths = _replace_tag(paths, original_tag, replacewith) - - return paths - - -def _replace_tag(paths, tag, replacewith): - """Replace tag by replacewith in paths.""" - _, lower, upper = _get_caps_options(tag) - result = [] - if isinstance(replacewith, (list, tuple)): - for item in replacewith: - result.extend(_replace_tag(paths, tag, item)) - else: - text = _apply_caps(str(replacewith), lower, upper) - result.extend(p.replace('[' + tag + ']', text) for p in paths) - return result - - -def _get_caps_options(tag): - lower = False - upper = False - if tag.endswith('.lower'): - lower = True - tag = tag[0:-6] - elif tag.endswith('.upper'): - upper = True - tag = tag[0:-6] - return tag, lower, upper - - -def _apply_caps(original, lower, upper): - if lower: - return original.lower() - if upper: - return original.upper() - return original - - -def _resolve_latestversion(dirname_template): - """Resolve the 'latestversion' tag.""" - if '[latestversion]' not in dirname_template: - return dirname_template - - # Find latest version - part1, part2 = dirname_template.split('[latestversion]') - part2 = part2.lstrip(os.sep) - if os.path.exists(part1): - versions = os.listdir(part1) - versions.sort(reverse=True) - for version in ['latest'] + versions: - dirname = os.path.join(part1, version, part2) - if os.path.isdir(dirname): - return dirname - - return dirname_template - - -def _select_drs(input_type, drs, project): - """Select the directory structure of input path.""" - cfg = get_project_config(project) - input_path = cfg[input_type] - if isinstance(input_path, six.string_types): - return input_path - - structure = drs.get(project, 'default') - if structure in input_path: - return input_path[structure] - - raise KeyError( - 'drs {} for {} project not specified in config-developer file'.format( - structure, project)) - - -def get_rootpath(rootpath, project): - """Select the rootpath.""" - if project in rootpath: - return rootpath[project] - if 'default' in rootpath: - return rootpath['default'] - raise KeyError('default rootpath must be specified in config-user file') - - -def _find_input_dirs(variable, rootpath, drs, fx_var=None): - """Return a the full paths to input directories.""" - project = variable['project'] - - root = get_rootpath(rootpath, project) - input_type = 'input_{}dir'.format('fx_' if fx_var else '') - path_template = _select_drs(input_type, drs, project) - - dirnames = [] - for dirname_template in _replace_tags(path_template, variable, fx_var): - for base_path in root: - dirname = os.path.join(base_path, dirname_template) - dirname = _resolve_latestversion(dirname) - if os.path.exists(dirname): - logger.debug("Found %s", dirname) - dirnames.append(dirname) - else: - logger.debug("Skipping non-existent %s", dirname) - - return dirnames - - -def _get_filenames_glob(variable, drs, fx_var=None): - """Return patterns that can be used to look for input files.""" - input_type = 'input_{}file'.format('fx_' if fx_var else '') - path_template = _select_drs(input_type, drs, variable['project']) - filenames_glob = _replace_tags(path_template, variable, fx_var) - return filenames_glob - - -def _find_input_files(variable, rootpath, drs, fx_var=None): - logger.debug("Looking for input %sfiles for variable %s of dataset %s", - fx_var + ' fx ' if fx_var else '', variable['short_name'], - variable['dataset']) - - input_dirs = _find_input_dirs(variable, rootpath, drs, fx_var) - filenames_glob = _get_filenames_glob(variable, drs, fx_var) - files = find_files(input_dirs, filenames_glob) - - return files - - -def get_input_filelist(variable, rootpath, drs): - """Return the full path to input files.""" - files = _find_input_files(variable, rootpath, drs) - files = select_files(files, variable['start_year'], variable['end_year']) - return files - - -def get_input_fx_filelist(variable, rootpath, drs): - """Return a dict with the full path to fx input files.""" - fx_files = {} - for fx_var in variable['fx_files']: - var = dict(variable) - var['mip'] = replace_mip_fx(fx_var) - table = CMOR_TABLES[var['cmor_table']].get_table(var['mip']) - var['frequency'] = table.frequency - realm = getattr(table.get(var['short_name']), 'modeling_realm', None) - var['modeling_realm'] = realm if realm else table.realm - - files = _find_input_files(var, rootpath, drs, fx_var) - fx_files[fx_var] = files[0] if files else None - - return fx_files - - -def get_output_file(variable, preproc_dir): - """Return the full path to the output (preprocessed) file.""" - cfg = get_project_config(variable['project']) - - # Join different experiment names - if isinstance(variable.get('exp'), (list, tuple)): - variable = dict(variable) - variable['exp'] = '-'.join(variable['exp']) - - outfile = os.path.join( - preproc_dir, - variable['diagnostic'], - variable['variable_group'], - _replace_tags(cfg['output_file'], variable)[0] + '.nc', - ) - - return outfile - - -def get_statistic_output_file(variable, preproc_dir): - """Get multi model statistic filename depending on settings.""" - template = os.path.join( - preproc_dir, - '{diagnostic}', - '{variable_group}', - '{dataset}_{mip}_{short_name}_{start_year}-{end_year}.nc', - ) - - outfile = template.format(**variable) - - return outfile diff --git a/esmvaltool/_main.py b/esmvaltool/_main.py deleted file mode 100755 index c8a2dab022..0000000000 --- a/esmvaltool/_main.py +++ /dev/null @@ -1,249 +0,0 @@ -"""ESMValTool - Earth System Model Evaluation Tool. - -http://www.esmvaltool.org - -CORE DEVELOPMENT TEAM AND CONTACTS: - Veronika Eyring (PI; DLR, Germany - veronika.eyring@dlr.de) - Bouwe Andela (NLESC, Netherlands - b.andela@esciencecenter.nl) - Bjoern Broetz (DLR, Germany - bjoern.broetz@dlr.de) - Lee de Mora (PML, UK - ledm@pml.ac.uk) - Niels Drost (NLESC, Netherlands - n.drost@esciencecenter.nl) - Nikolay Koldunov (AWI, Germany - nikolay.koldunov@awi.de) - Axel Lauer (DLR, Germany - axel.lauer@dlr.de) - Benjamin Mueller (LMU, Germany - b.mueller@iggf.geo.uni-muenchen.de) - Valeriu Predoi (URead, UK - valeriu.predoi@ncas.ac.uk) - Mattia Righi (DLR, Germany - mattia.righi@dlr.de) - Manuel Schlund (DLR, Germany - manuel.schlund@dlr.de) - Javier Vegas-Regidor (BSC, Spain - javier.vegas@bsc.es) - -For further help, please read the documentation at -http://esmvaltool.readthedocs.io. Have fun! -""" - -# ESMValTool main script -# -# Authors: -# Bouwe Andela (NLESC, Netherlands - b.andela@esciencecenter.nl) -# Valeriu Predoi (URead, UK - valeriu.predoi@ncas.ac.uk) -# Mattia Righi (DLR, Germany - mattia.righi@dlr.de) - -import argparse -import datetime -import errno -import glob -import logging -import os -import shutil -import sys -from multiprocessing import cpu_count - -from . import __version__ -from ._config import configure_logging, read_config_user_file -from ._recipe import TASKSEP, read_recipe_file -from ._task import resource_usage_logger - -# set up logging -logger = logging.getLogger(__name__) - -HEADER = r""" -______________________________________________________________________ - _____ ____ __ ____ __ _ _____ _ - | ____/ ___|| \/ \ \ / /_ _| |_ _|__ ___ | | - | _| \___ \| |\/| |\ \ / / _` | | | |/ _ \ / _ \| | - | |___ ___) | | | | \ V / (_| | | | | (_) | (_) | | - |_____|____/|_| |_| \_/ \__,_|_| |_|\___/ \___/|_| -______________________________________________________________________ - -""" + __doc__ - - -def get_args(): - """Define the `esmvaltool` command line.""" - # parse command line args - parser = argparse.ArgumentParser( - description=HEADER, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument('recipe', help='Path or name of the yaml recipe file') - parser.add_argument( - '-v', - '--version', - action='version', - version=__version__, - help="return ESMValTool's version number and exit") - parser.add_argument( - '-c', - '--config-file', - default=os.path.join(os.path.dirname(__file__), 'config-user.yml'), - help='Config file') - parser.add_argument( - '-s', - '--synda-download', - action='store_true', - help='Download input data using synda. This requires a working ' - 'synda installation.') - parser.add_argument( - '--max-datasets', - type=int, - help='Try to limit the number of datasets used to MAX_DATASETS.') - parser.add_argument( - '--max-years', - type=int, - help='Limit the number of years to MAX_YEARS.') - parser.add_argument( - '--skip-nonexistent', - action='store_true', - help="Skip datasets that cannot be found.") - parser.add_argument( - '--diagnostics', - nargs='*', - help="Only run the named diagnostics from the recipe.") - args = parser.parse_args() - return args - - -def main(args): - """Define the `esmvaltool` program.""" - recipe = args.recipe - if not os.path.exists(recipe): - installed_recipe = os.path.join( - os.path.dirname(__file__), 'recipes', recipe) - if os.path.exists(installed_recipe): - recipe = installed_recipe - recipe = os.path.abspath(os.path.expandvars(os.path.expanduser(recipe))) - - config_file = os.path.abspath( - os.path.expandvars(os.path.expanduser(args.config_file))) - - # Read user config file - if not os.path.exists(config_file): - print("ERROR: config file {} does not exist".format(config_file)) - - recipe_name = os.path.splitext(os.path.basename(recipe))[0] - cfg = read_config_user_file(config_file, recipe_name) - - # Create run dir - if os.path.exists(cfg['run_dir']): - print("ERROR: run_dir {} already exists, aborting to " - "prevent data loss".format(cfg['output_dir'])) - os.makedirs(cfg['run_dir']) - - # configure logging - log_files = configure_logging( - output=cfg['run_dir'], console_log_level=cfg['log_level']) - - # log header - logger.info(HEADER) - - logger.info("Using config file %s", config_file) - logger.info("Writing program log files to:\n%s", "\n".join(log_files)) - - cfg['skip-nonexistent'] = args.skip_nonexistent - cfg['diagnostics'] = { - pattern if TASKSEP in pattern else pattern + TASKSEP + '*' - for pattern in args.diagnostics or () - } - cfg['synda_download'] = args.synda_download - for limit in ('max_datasets', 'max_years'): - value = getattr(args, limit) - if value is not None: - if value < 1: - raise ValueError("--{} should be larger than 0.".format( - limit.replace('_', '-'))) - cfg[limit] = value - - resource_log = os.path.join(cfg['run_dir'], 'resource_usage.txt') - with resource_usage_logger(pid=os.getpid(), filename=resource_log): - process_recipe(recipe_file=recipe, config_user=cfg) - return cfg - - -def process_recipe(recipe_file, config_user): - """Process recipe.""" - if not os.path.isfile(recipe_file): - raise OSError(errno.ENOENT, "Specified recipe file does not exist", - recipe_file) - - timestamp1 = datetime.datetime.utcnow() - timestamp_format = "%Y-%m-%d %H:%M:%S" - - logger.info( - "Starting the Earth System Model Evaluation Tool v%s at time: %s UTC", - __version__, timestamp1.strftime(timestamp_format)) - - logger.info(70 * "-") - logger.info("RECIPE = %s", recipe_file) - logger.info("RUNDIR = %s", config_user['run_dir']) - logger.info("WORKDIR = %s", config_user["work_dir"]) - logger.info("PREPROCDIR = %s", config_user["preproc_dir"]) - logger.info("PLOTDIR = %s", config_user["plot_dir"]) - logger.info(70 * "-") - - logger.info("Running tasks using at most %s processes", - config_user['max_parallel_tasks'] or cpu_count()) - - logger.info( - "If your system hangs during execution, it may not have enough " - "memory for keeping this number of tasks in memory. In that case, " - "try reducing 'max_parallel_tasks' in your user configuration file.") - - if config_user['compress_netcdf']: - logger.warning( - "You have enabled NetCDF compression. Accesing .nc files can be " - "much slower than expected if your access pattern does not match " - "their internal pattern. Make sure to specify the expected " - "access pattern in the recipe as a parameter to the 'save' " - "preprocessor function. If the problem persists, try disabling " - "NetCDF compression.") - - # copy recipe to run_dir for future reference - shutil.copy2(recipe_file, config_user['run_dir']) - - # parse recipe - recipe = read_recipe_file(recipe_file, config_user) - logger.debug("Recipe summary:\n%s", recipe) - - # run - recipe.run() - - # End time timing - timestamp2 = datetime.datetime.utcnow() - logger.info( - "Ending the Earth System Model Evaluation Tool v%s at time: %s UTC", - __version__, timestamp2.strftime(timestamp_format)) - logger.info("Time for running the recipe was: %s", timestamp2 - timestamp1) - - # Remind the user about reference/acknowledgement file - out_refs = glob.glob( - os.path.join(config_user['output_dir'], '*', '*', - 'references-acknowledgements.txt')) - logger.info( - "For the required references/acknowledgements of these " - "diagnostics see:\n%s", '\n'.join(out_refs)) - - -def run(): - """Run the `esmvaltool` program, logging any exceptions.""" - args = get_args() - try: - conf = main(args) - except: # noqa - if not logger.handlers: - # Add a logging handler if main failed to do so. - logging.basicConfig() - logger.exception( - "Program terminated abnormally, see stack trace " - "below for more information", - exc_info=True) - logger.info( - "If you suspect this is a bug or need help, please open an issue " - "on https://github.com/ESMValGroup/ESMValTool/issues and attach " - "the run/recipe_*.yml and run/main_log_debug.txt files from the " - "output directory.") - sys.exit(1) - else: - if conf["remove_preproc_dir"]: - logger.info("Removing preproc containing preprocessed data") - logger.info("If this data is further needed, then") - logger.info("set remove_preproc_dir to false in config") - shutil.rmtree(conf["preproc_dir"]) - logger.info("Run was successful") diff --git a/esmvaltool/_provenance.py b/esmvaltool/_provenance.py deleted file mode 100644 index b1ff719a53..0000000000 --- a/esmvaltool/_provenance.py +++ /dev/null @@ -1,253 +0,0 @@ -"""Provenance module.""" -import copy -import logging -import os - -from netCDF4 import Dataset -from PIL import Image -from PIL.PngImagePlugin import PngInfo -from prov.dot import prov_to_dot -from prov.model import ProvDocument - -from ._version import __version__ - -logger = logging.getLogger(__name__) - -ESMVALTOOL_URI_PREFIX = 'https://www.esmvaltool.org/' - - -def update_without_duplicating(bundle, other): - """Add new records from other provenance bundle.""" - for record in other.records: - if record not in bundle.records: - bundle.add_record(record) - - -def create_namespace(provenance, namespace): - """Create an esmvaltool namespace.""" - provenance.add_namespace(namespace, uri=ESMVALTOOL_URI_PREFIX + namespace) - - -def get_esmvaltool_provenance(): - """Create an esmvaltool run activity.""" - provenance = ProvDocument() - namespace = 'software' - create_namespace(provenance, namespace) - attributes = {} # TODO: add dependencies with versions here - activity = provenance.activity( - namespace + ':esmvaltool==' + __version__, other_attributes=attributes) - - return activity - - -ESMVALTOOL_PROVENANCE = get_esmvaltool_provenance() - - -def attribute_to_authors(entity, authors): - """Attribute entity to authors.""" - namespace = 'author' - create_namespace(entity.bundle, namespace) - - for author in authors: - agent = entity.bundle.agent( - namespace + ':' + author['name'], - {'attribute:' + k: author[k] - for k in author if k != 'name'}) - entity.wasAttributedTo(agent) - - -def attribute_to_projects(entity, projects): - """Attribute entity to projecs.""" - namespace = 'project' - create_namespace(entity.bundle, namespace) - - for project in projects: - agent = entity.bundle.agent(namespace + ':' + project) - entity.wasAttributedTo(agent) - - -def get_recipe_provenance(documentation, filename): - """Create a provenance entity describing a recipe.""" - provenance = ProvDocument() - - for namespace in ('recipe', 'attribute'): - create_namespace(provenance, namespace) - - entity = provenance.entity( - 'recipe:{}'.format(filename), { - 'attribute:description': documentation.get('description', ''), - 'attribute:references': ', '.join( - documentation.get('references', [])), - }) - - attribute_to_authors(entity, documentation.get('authors', [])) - attribute_to_projects(entity, documentation.get('projects', [])) - - return entity - - -def get_task_provenance(task, recipe_entity): - """Create a provenance activity describing a task.""" - provenance = ProvDocument() - create_namespace(provenance, 'task') - - activity = provenance.activity('task:' + task.name) - - trigger = recipe_entity - update_without_duplicating(provenance, recipe_entity.bundle) - - starter = ESMVALTOOL_PROVENANCE - update_without_duplicating(provenance, starter.bundle) - - activity.wasStartedBy(trigger, starter) - - return activity - - -class TrackedFile(object): - """File with provenance tracking.""" - - def __init__(self, filename, attributes, ancestors=None): - """Create an instance of a file with provenance tracking.""" - self._filename = filename - self.attributes = copy.deepcopy(attributes) - - self.provenance = None - self.entity = None - self.activity = None - self._ancestors = [] if ancestors is None else ancestors - - def __str__(self): - """Return summary string.""" - return "{}: {}".format(self.__class__.__name__, self.filename) - - def copy_provenance(self, target=None): - """Create a copy with identical provenance information.""" - if self.provenance is None: - raise ValueError("Provenance of {} not initialized".format(self)) - if target is None: - new = TrackedFile(self.filename, self.attributes) - else: - if target.filename != self.filename: - raise ValueError( - "Attempt to copy provenance to incompatible file.") - new = target - new.attributes = copy.deepcopy(self.attributes) - new.provenance = copy.deepcopy(self.provenance) - new.entity = new.provenance.get_record(self.entity.identifier)[0] - new.activity = new.provenance.get_record(self.activity.identifier)[0] - return new - - @property - def filename(self): - """Filename.""" - return self._filename - - def initialize_provenance(self, activity): - """Initialize the provenance document. - - Note: this also copies the ancestor provenance. Therefore, changes - made to ancestor provenance after calling this function will not - propagate into the provenance of this file. - """ - if self.provenance is not None: - raise ValueError( - "Provenance of {} already initialized".format(self)) - self.provenance = ProvDocument() - self._initialize_namespaces() - self._initialize_activity(activity) - self._initialize_entity() - self._initialize_ancestors(activity) - - def _initialize_namespaces(self): - """Inialize the namespaces.""" - for namespace in ('file', 'attribute', 'preprocessor', 'task'): - create_namespace(self.provenance, namespace) - - def _initialize_activity(self, activity): - """Copy the preprocessor task activity.""" - self.activity = activity - update_without_duplicating(self.provenance, activity.bundle) - - def _initialize_entity(self): - """Initialize the entity representing the file.""" - attributes = { - 'attribute:' + k: str(v) - for k, v in self.attributes.items() - if k not in ('authors', 'projects') - } - self.entity = self.provenance.entity('file:' + self.filename, - attributes) - attribute_to_authors(self.entity, self.attributes.get('authors', [])) - attribute_to_projects(self.entity, self.attributes.get('projects', [])) - - def _initialize_ancestors(self, activity): - """Register ancestor files for provenance tracking.""" - for ancestor in self._ancestors: - if ancestor.provenance is None: - ancestor.initialize_provenance(activity) - update_without_duplicating(self.provenance, ancestor.provenance) - self.wasderivedfrom(ancestor) - - def wasderivedfrom(self, other): - """Let the file know that it was derived from other.""" - if isinstance(other, TrackedFile): - other_entity = other.entity - else: - other_entity = other - update_without_duplicating(self.provenance, other_entity.bundle) - if not self.activity: - raise ValueError("Activity not initialized.") - self.entity.wasDerivedFrom(other_entity, self.activity) - - def _select_for_include(self): - attributes = { - 'provenance': self.provenance.serialize(format='xml'), - 'software': "Created with ESMValTool v{}".format(__version__), - } - if 'caption' in self.attributes: - attributes['caption'] = self.attributes['caption'] - return attributes - - @staticmethod - def _include_provenance_nc(filename, attributes): - with Dataset(filename, 'a') as dataset: - for key, value in attributes.items(): - setattr(dataset, key, value) - - @staticmethod - def _include_provenance_png(filename, attributes): - pnginfo = PngInfo() - exif_tags = { - 'provenance': 'ImageHistory', - 'caption': 'ImageDescription', - 'software': 'Software', - } - for key, value in attributes.items(): - pnginfo.add_text(exif_tags.get(key, key), value, zip=True) - with Image.open(filename) as image: - image.save(filename, pnginfo=pnginfo) - - def _include_provenance(self): - """Include provenance information as metadata.""" - attributes = self._select_for_include() - - # List of files to attach provenance to - files = [self.filename] - if 'plot_file' in self.attributes: - files.append(self.attributes['plot_file']) - - # Attach provenance to supported file types - for filename in files: - ext = os.path.splitext(filename)[1].lstrip('.').lower() - write = getattr(self, '_include_provenance_' + ext, None) - if write: - write(filename, attributes) - - def save_provenance(self): - """Export provenance information.""" - self._include_provenance() - filename = os.path.splitext(self.filename)[0] + '_provenance' - self.provenance.serialize(filename + '.xml', format='xml') - figure = prov_to_dot(self.provenance) - figure.write_svg(filename + '.svg') diff --git a/esmvaltool/_recipe.py b/esmvaltool/_recipe.py deleted file mode 100644 index 2388ec93ec..0000000000 --- a/esmvaltool/_recipe.py +++ /dev/null @@ -1,1077 +0,0 @@ -"""Recipe parser.""" -import fnmatch -import logging -import os -from collections import OrderedDict -from copy import deepcopy - -import yaml -from netCDF4 import Dataset - -from . import __version__ -from . import _recipe_checks as check -from ._config import TAGS, get_institutes, replace_tags -from ._data_finder import (get_input_filelist, get_input_fx_filelist, - get_output_file, get_statistic_output_file) -from ._provenance import TrackedFile, get_recipe_provenance -from ._recipe_checks import RecipeError -from ._task import (DiagnosticTask, get_flattened_tasks, get_independent_tasks, - run_tasks) -from .cmor.table import CMOR_TABLES -from .preprocessor import (DEFAULT_ORDER, FINAL_STEPS, INITIAL_STEPS, - MULTI_MODEL_FUNCTIONS, PreprocessingTask, - PreprocessorFile) -from .preprocessor._derive import get_required -from .preprocessor._download import synda_search -from .preprocessor._io import DATASET_KEYS, concatenate_callback -from .preprocessor._regrid import (get_cmor_levels, get_reference_levels, - parse_cell_spec) - -logger = logging.getLogger(__name__) - -TASKSEP = os.sep - - -def ordered_safe_load(stream): - """Load a YAML file using OrderedDict instead of dict.""" - class OrderedSafeLoader(yaml.SafeLoader): - """Loader class that uses OrderedDict to load a map.""" - - def construct_mapping(loader, node): - """Load a map as an OrderedDict.""" - loader.flatten_mapping(node) - return OrderedDict(loader.construct_pairs(node)) - - OrderedSafeLoader.add_constructor( - yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping) - - return yaml.load(stream, OrderedSafeLoader) - - -def load_raw_recipe(filename): - """Check a recipe file and return it in raw form.""" - # Note that many checks can only be performed after the automatically - # computed entries have been filled in by creating a Recipe object. - check.recipe_with_schema(filename) - with open(filename, 'r') as file: - contents = file.read() - raw_recipe = yaml.safe_load(contents) - raw_recipe['preprocessors'] = ordered_safe_load(contents).get( - 'preprocessors', {}) - - check.diagnostics(raw_recipe['diagnostics']) - return raw_recipe - - -def read_recipe_file(filename, config_user, initialize_tasks=True): - """Read a recipe from file.""" - raw_recipe = load_raw_recipe(filename) - return Recipe( - raw_recipe, config_user, initialize_tasks, recipe_file=filename) - - -def _get_value(key, datasets): - """Get a value for key by looking at the other datasets.""" - values = {dataset[key] for dataset in datasets if key in dataset} - - if len(values) > 1: - raise RecipeError("Ambigous values {} for property {}".format( - values, key)) - - value = None - if len(values) == 1: - value = values.pop() - - return value - - -def _update_from_others(variable, keys, datasets): - """Get values for keys by copying from the other datasets.""" - for key in keys: - if key not in variable: - value = _get_value(key, datasets) - if value is not None: - variable[key] = value - - -def _add_cmor_info(variable, override=False): - """Add information from CMOR tables to variable.""" - logger.debug("If not present: adding keys from CMOR table to %s", variable) - - if 'cmor_table' not in variable or 'mip' not in variable: - logger.debug("Skipping because cmor_table or mip not specified") - return - - if variable['cmor_table'] not in CMOR_TABLES: - logger.warning("Unknown CMOR table %s", variable['cmor_table']) - - derive = variable.get('derive', False) - # Copy the following keys from CMOR table - cmor_keys = [ - 'standard_name', 'long_name', 'units', 'modeling_realm', 'frequency' - ] - cmor_table = variable['cmor_table'] - mip = variable['mip'] - short_name = variable['short_name'] - table_entry = CMOR_TABLES[cmor_table].get_variable(mip, short_name) - - if derive and table_entry is None: - custom_table = CMOR_TABLES['custom'] - table_entry = custom_table.get_variable(mip, short_name) - - if table_entry is None: - raise RecipeError( - "Unable to load CMOR table '{}' for variable '{}' with mip '{}'". - format(cmor_table, short_name, mip)) - - mip_info = CMOR_TABLES[cmor_table].get_table(mip) - if mip_info: - table_entry.frequency = mip_info.frequency - - for key in cmor_keys: - if key not in variable or override: - value = getattr(table_entry, key, None) - if value is not None: - variable[key] = value - else: - logger.debug( - "Failed to add key %s to variable %s from CMOR table", key, - variable) - - # Check that keys are available - check.variable(variable, required_keys=cmor_keys) - - -def _special_name_to_dataset(variable, special_name): - """Convert special names to dataset names.""" - if special_name in ('reference_dataset', 'alternative_dataset'): - if special_name not in variable: - raise RecipeError( - "Preprocessor {} uses {}, but {} is not defined for " - "variable {} of diagnostic {}".format( - variable['preprocessor'], special_name, special_name, - variable['short_name'], variable['diagnostic'])) - special_name = variable[special_name] - - return special_name - - -def _update_target_levels(variable, variables, settings, config_user): - """Replace the target levels dataset name with a filename if needed.""" - levels = settings.get('extract_levels', {}).get('levels') - if not levels: - return - - levels = _special_name_to_dataset(variable, levels) - - # If levels is a dataset name, replace it by a dict with a 'dataset' entry - if any(levels == v['dataset'] for v in variables): - settings['extract_levels']['levels'] = {'dataset': levels} - levels = settings['extract_levels']['levels'] - - if not isinstance(levels, dict): - return - - if 'cmor_table' in levels and 'coordinate' in levels: - settings['extract_levels']['levels'] = get_cmor_levels( - levels['cmor_table'], levels['coordinate']) - elif 'dataset' in levels: - dataset = levels['dataset'] - if variable['dataset'] == dataset: - del settings['extract_levels'] - else: - variable_data = _get_dataset_info(dataset, variables) - filename = \ - _dataset_to_file(variable_data, config_user) - settings['extract_levels']['levels'] = get_reference_levels( - filename, variable_data['project'], dataset, - variable_data['short_name'], - os.path.splitext(variable_data['filename'])[0] + '_fixed') - - -def _update_target_grid(variable, variables, settings, config_user): - """Replace the target grid dataset name with a filename if needed.""" - grid = settings.get('regrid', {}).get('target_grid') - if not grid: - return - - grid = _special_name_to_dataset(variable, grid) - - if variable['dataset'] == grid: - del settings['regrid'] - elif any(grid == v['dataset'] for v in variables): - settings['regrid']['target_grid'] = _dataset_to_file( - _get_dataset_info(grid, variables), config_user) - else: - # Check that MxN grid spec is correct - parse_cell_spec(settings['regrid']['target_grid']) - - -def _update_regrid_time(variable, settings): - """Input data frequency automatically for regrid_time preprocessor.""" - regrid_time = settings.get('regrid_time') - if regrid_time is None: - return - frequency = settings.get('regrid_time', {}).get('frequency') - if not frequency: - settings['regrid_time']['frequency'] = variable['frequency'] - - -def _get_dataset_info(dataset, variables): - for var in variables: - if var['dataset'] == dataset: - return var - raise RecipeError("Unable to find matching file for dataset" - "{}".format(dataset)) - - -def _augment(base, update): - """Update dict base with values from dict update.""" - for key in update: - if key not in base: - base[key] = update[key] - - -def _dataset_to_file(variable, config_user): - """Find the first file belonging to dataset from variable info.""" - files = get_input_filelist( - variable=variable, - rootpath=config_user['rootpath'], - drs=config_user['drs']) - if not files and variable.get('derive'): - first_required = get_required(variable['short_name'])[0] - _augment(first_required, variable) - files = get_input_filelist( - variable=first_required, - rootpath=config_user['rootpath'], - drs=config_user['drs']) - check.data_availability(files, variable) - return files[0] - - -def _limit_datasets(variables, profile, max_datasets=0): - """Try to limit the number of datasets to max_datasets.""" - if not max_datasets: - return variables - - logger.info("Limiting the number of datasets to %s", max_datasets) - - required_datasets = [ - (profile.get('extract_levels') or {}).get('levels'), - (profile.get('regrid') or {}).get('target_grid'), - variables[0].get('reference_dataset'), - variables[0].get('alternative_dataset'), - ] - - limited = [v for v in variables if v['dataset'] in required_datasets] - for variable in variables: - if len(limited) >= max_datasets: - break - if variable not in limited: - limited.append(variable) - - logger.info("Only considering %s", - ', '.join(v['dataset'] for v in limited)) - - return limited - - -def _get_default_settings(variable, config_user, derive=False): - """Get default preprocessor settings.""" - settings = {} - - # Set up downloading using synda if requested. - if config_user['synda_download']: - # TODO: make this respect drs or download to preproc dir? - download_folder = os.path.join(config_user['preproc_dir'], 'downloads') - settings['download'] = { - 'dest_folder': download_folder, - } - - # Configure loading - settings['load'] = { - 'callback': concatenate_callback, - } - # Configure merge - settings['concatenate'] = {} - - # Configure fixes - fix = { - 'project': variable['project'], - 'dataset': variable['dataset'], - 'short_name': variable['short_name'], - } - # File fixes - fix_dir = os.path.splitext(variable['filename'])[0] + '_fixed' - if not derive: - settings['fix_file'] = dict(fix) - settings['fix_file']['output_dir'] = fix_dir - # Cube fixes - # Only supply mip if the CMOR check fixes are implemented. - if variable.get('cmor_table'): - fix['cmor_table'] = variable['cmor_table'] - fix['mip'] = variable['mip'] - fix['frequency'] = variable['frequency'] - settings['fix_data'] = dict(fix) - settings['fix_metadata'] = dict(fix) - - # Configure time extraction - settings['extract_time'] = { - 'start_year': variable['start_year'], - 'end_year': variable['end_year'] + 1, - 'start_month': 1, - 'end_month': 1, - 'start_day': 1, - 'end_day': 1, - } - - if derive: - settings['derive'] = { - 'short_name': variable['short_name'], - 'standard_name': variable['standard_name'], - 'long_name': variable['long_name'], - 'units': variable['units'], - } - - # Configure CMOR metadata check - if variable.get('cmor_table'): - settings['cmor_check_metadata'] = { - 'cmor_table': variable['cmor_table'], - 'mip': variable['mip'], - 'short_name': variable['short_name'], - 'frequency': variable['frequency'], - } - # Configure final CMOR data check - if variable.get('cmor_table'): - settings['cmor_check_data'] = { - 'cmor_table': variable['cmor_table'], - 'mip': variable['mip'], - 'short_name': variable['short_name'], - 'frequency': variable['frequency'], - } - - # Clean up fixed files - if not config_user['save_intermediary_cubes']: - settings['cleanup'] = { - 'remove': [fix_dir], - } - - # Configure saving cubes to file - settings['save'] = {'compress': config_user['compress_netcdf']} - - return settings - - -def _update_fx_settings(settings, variable, config_user): - """Find and set the FX derive/mask settings.""" - # update for derive - if 'derive' in settings: - fx_files = {} - for var in get_required(variable['short_name']): - if 'fx_files' in var: - _augment(var, variable) - fx_files.update( - get_input_fx_filelist( - variable=var, - rootpath=config_user['rootpath'], - drs=config_user['drs'])) - settings['derive']['fx_files'] = fx_files - - # update for landsea - if 'mask_landsea' in settings: - # Configure ingestion of land/sea masks - logger.debug('Getting fx mask settings now...') - - settings['mask_landsea']['fx_files'] = [] - - var = dict(variable) - var['fx_files'] = ['sftlf', 'sftof'] - fx_files_dict = get_input_fx_filelist( - variable=var, - rootpath=config_user['rootpath'], - drs=config_user['drs']) - - # allow both sftlf and sftof - if fx_files_dict['sftlf']: - settings['mask_landsea']['fx_files'].append(fx_files_dict['sftlf']) - if fx_files_dict['sftof']: - settings['mask_landsea']['fx_files'].append(fx_files_dict['sftof']) - - if 'mask_landseaice' in settings: - logger.debug('Getting fx mask settings now...') - - settings['mask_landseaice']['fx_files'] = [] - - var = dict(variable) - var['fx_files'] = ['sftgif'] - fx_files_dict = get_input_fx_filelist( - variable=var, - rootpath=config_user['rootpath'], - drs=config_user['drs']) - - # allow sftgif (only, for now) - if fx_files_dict['sftgif']: - settings['mask_landseaice']['fx_files'].append( - fx_files_dict['sftgif']) - - for step in ('average_region', 'average_volume'): - if settings.get(step, {}).get('fx_files'): - settings[step]['fx_files'] = get_input_fx_filelist( - variable=variable, - rootpath=config_user['rootpath'], - drs=config_user['drs'], - ) - - -def _read_attributes(filename): - """Read the attributes from a netcdf file.""" - attributes = {} - if not (os.path.exists(filename) - and os.path.splitext(filename)[1].lower() == '.nc'): - return attributes - - with Dataset(filename, 'r') as dataset: - for attr in dataset.ncattrs(): - attributes[attr] = getattr(dataset, attr) - return attributes - - -def _get_input_files(variable, config_user): - """Get the input files for a single dataset.""" - # Find input files locally. - input_files = get_input_filelist( - variable=variable, - rootpath=config_user['rootpath'], - drs=config_user['drs']) - - # Set up downloading using synda if requested. - # Do not download if files are already available locally. - if config_user['synda_download'] and not input_files: - input_files = synda_search(variable) - - logger.info("Using input files for variable %s of dataset %s:\n%s", - variable['short_name'], variable['dataset'], - '\n'.join(input_files)) - if (not config_user.get('skip-nonexistent') - or variable['dataset'] == variable.get('reference_dataset')): - check.data_availability(input_files, variable) - - # Set up provenance tracking - for i, filename in enumerate(input_files): - attributes = _read_attributes(filename) - input_files[i] = TrackedFile(filename, attributes) - - return input_files - - -def _apply_preprocessor_profile(settings, profile_settings): - """Apply settings from preprocessor profile.""" - profile_settings = deepcopy(profile_settings) - for step, args in profile_settings.items(): - # Remove disabled preprocessor functions - if args is False: - if step in settings: - del settings[step] - continue - # Enable/update functions without keywords - if step not in settings: - settings[step] = {} - if isinstance(args, dict): - settings[step].update(args) - - -def _get_statistic_attributes(products): - """Get attributes for the statistic output products.""" - attributes = {} - some_product = next(iter(products)) - for key, value in some_product.attributes.items(): - if all(p.attributes.get(key, object()) == value for p in products): - attributes[key] = value - - # Ensure start_year and end_year attributes are available - for product in products: - start = product.attributes['start_year'] - if 'start_year' not in attributes or start < attributes['start_year']: - attributes['start_year'] = start - end = product.attributes['end_year'] - if 'end_year' not in attributes or end > attributes['end_year']: - attributes['end_year'] = end - - return attributes - - -def _get_remaining_common_settings(step, order, products): - """Get preprocessor settings that are shared between products.""" - settings = {} - remaining_steps = order[order.index(step) + 1:] - some_product = next(iter(products)) - for key, value in some_product.settings.items(): - if key in remaining_steps: - if all(p.settings.get(key, object()) == value for p in products): - settings[key] = value - return settings - - -def _update_multi_dataset_settings(variable, settings): - """Configure multi dataset statistics.""" - for step in MULTI_MODEL_FUNCTIONS: - if not settings.get(step): - continue - # Exclude dataset if requested - exclude = { - _special_name_to_dataset(variable, dataset) - for dataset in settings[step].pop('exclude', []) - } - if variable['dataset'] in exclude: - settings.pop(step) - - -def _update_statistic_settings(products, order, preproc_dir): - """Define statistic output products.""" - # TODO: move this to multi model statistics function? - # But how to check, with a dry-run option? - step = 'multi_model_statistics' - - products = {p for p in products if step in p.settings} - if not products: - return - - some_product = next(iter(products)) - for statistic in some_product.settings[step]['statistics']: - attributes = _get_statistic_attributes(products) - attributes['dataset'] = 'MultiModel{}'.format(statistic.title()) - attributes['filename'] = get_statistic_output_file( - attributes, preproc_dir) - common_settings = _get_remaining_common_settings(step, order, products) - statistic_product = PreprocessorFile(attributes, common_settings) - for product in products: - settings = product.settings[step] - if 'output_products' not in settings: - settings['output_products'] = {} - settings['output_products'][statistic] = statistic_product - - -def _match_products(products, variables): - """Match a list of input products to output product attributes.""" - grouped_products = {} - - def get_matching(attributes): - """Find the output filename which matches input attributes best.""" - score = 0 - filenames = [] - for variable in variables: - filename = variable['filename'] - tmp = sum(v == variable.get(k) for k, v in attributes.items()) - if tmp > score: - score = tmp - filenames = [filename] - elif tmp == score: - filenames.append(filename) - if not filenames: - logger.warning( - "Unable to find matching output file for input file %s", - filename) - return filenames - - # Group input files by output file - for product in products: - for filename in get_matching(product.attributes): - if filename not in grouped_products: - grouped_products[filename] = [] - grouped_products[filename].append(product) - - return grouped_products - - -def _get_preprocessor_products(variables, profile, order, ancestor_products, - config_user): - """Get preprocessor product definitions for a set of datasets.""" - products = set() - - for variable in variables: - variable['filename'] = get_output_file(variable, - config_user['preproc_dir']) - - if ancestor_products: - grouped_ancestors = _match_products(ancestor_products, variables) - else: - grouped_ancestors = {} - - for variable in variables: - settings = _get_default_settings( - variable, config_user, derive='derive' in profile) - _apply_preprocessor_profile(settings, profile) - _update_multi_dataset_settings(variable, settings) - _update_target_levels( - variable=variable, - variables=variables, - settings=settings, - config_user=config_user) - _update_fx_settings( - settings=settings, variable=variable, config_user=config_user) - _update_target_grid( - variable=variable, - variables=variables, - settings=settings, - config_user=config_user) - _update_regrid_time(variable, settings) - ancestors = grouped_ancestors.get(variable['filename']) - if not ancestors: - ancestors = _get_input_files(variable, config_user) - if config_user.get('skip-nonexistent') and not ancestors: - logger.info("Skipping: no data found for %s", variable) - continue - product = PreprocessorFile( - attributes=variable, settings=settings, ancestors=ancestors) - products.add(product) - - _update_statistic_settings(products, order, config_user['preproc_dir']) - - for product in products: - product.check() - - return products - - -def _get_single_preprocessor_task(variables, - profile, - config_user, - name, - ancestor_tasks=None): - """Create preprocessor tasks for a set of datasets.""" - if ancestor_tasks is None: - ancestor_tasks = [] - order = _extract_preprocessor_order(profile) - ancestor_products = [p for task in ancestor_tasks for p in task.products] - products = _get_preprocessor_products( - variables=variables, - profile=profile, - order=order, - ancestor_products=ancestor_products, - config_user=config_user, - ) - - if not products: - raise RecipeError( - "Did not find any input data for task {}".format(name)) - - task = PreprocessingTask( - products=products, - ancestors=ancestor_tasks, - name=name, - order=order, - debug=config_user['save_intermediary_cubes'], - write_ncl_interface=config_user['write_ncl_interface'], - ) - - logger.info("PreprocessingTask %s created. It will create the files:\n%s", - task.name, '\n'.join(p.filename for p in task.products)) - - return task - - -def _extract_preprocessor_order(profile): - """Extract the order of the preprocessing steps from the profile.""" - custom_order = profile.pop('custom_order', False) - if not custom_order: - return DEFAULT_ORDER - order = tuple(p for p in profile if p not in INITIAL_STEPS + FINAL_STEPS) - return INITIAL_STEPS + order + FINAL_STEPS - - -def _split_settings(settings, step, order=DEFAULT_ORDER): - """Split settings, using step as a separator.""" - before = {} - for _step in order: - if _step == step: - break - if _step in settings: - before[_step] = settings[_step] - after = { - k: v - for k, v in settings.items() if not (k == step or k in before) - } - return before, after - - -def _split_derive_profile(profile): - """Split the derive preprocessor profile.""" - order = _extract_preprocessor_order(profile) - before, after = _split_settings(profile, 'derive', order) - after['derive'] = {} - if order != DEFAULT_ORDER: - before['custom_order'] = True - after['custom_order'] = True - return before, after - - -def _get_derive_input_variables(variables, config_user): - """Determine the input sets of `variables` needed for deriving.""" - derive_input = {} - - def append(group_prefix, var): - """Append variable `var` to a derive input group.""" - group = group_prefix + var['short_name'] - var['variable_group'] = group - if group not in derive_input: - derive_input[group] = [] - derive_input[group].append(var) - - for variable in variables: - - group_prefix = variable['variable_group'] + '_derive_input_' - if not variable.get('force_derivation') and get_input_filelist( - variable=variable, - rootpath=config_user['rootpath'], - drs=config_user['drs']): - # No need to derive, just process normally up to derive step - var = deepcopy(variable) - append(group_prefix, var) - else: - # Process input data needed to derive variable - for var in get_required(variable['short_name']): - _augment(var, variable) - append(group_prefix, var) - - return derive_input - - -def _get_preprocessor_task(variables, profiles, config_user, task_name): - """Create preprocessor task(s) for a set of datasets.""" - # First set up the preprocessor profile - variable = variables[0] - preproc_name = variable.get('preprocessor') - if preproc_name not in profiles: - raise RecipeError( - "Unknown preprocessor {} in variable {} of diagnostic {}".format( - preproc_name, variable['short_name'], variable['diagnostic'])) - profile = deepcopy(profiles[variable['preprocessor']]) - logger.info("Creating preprocessor '%s' task for variable '%s'", - variable['preprocessor'], variable['short_name']) - variables = _limit_datasets(variables, profile, - config_user.get('max_datasets')) - for variable in variables: - _add_cmor_info(variable) - # Create preprocessor task(s) - derive_tasks = [] - if variable.get('derive'): - # Create tasks to prepare the input data for the derive step - derive_profile, profile = _split_derive_profile(profile) - derive_input = _get_derive_input_variables(variables, config_user) - - for derive_variables in derive_input.values(): - for derive_variable in derive_variables: - _add_cmor_info(derive_variable, override=True) - derive_name = task_name.split( - TASKSEP)[0] + TASKSEP + derive_variables[0]['variable_group'] - task = _get_single_preprocessor_task( - derive_variables, - derive_profile, - config_user, - name=derive_name) - derive_tasks.append(task) - - # Create (final) preprocessor task - task = _get_single_preprocessor_task( - variables, - profile, - config_user, - ancestor_tasks=derive_tasks, - name=task_name) - - return task - - -class Recipe: - """Recipe object.""" - - def __init__(self, - raw_recipe, - config_user, - initialize_tasks=True, - recipe_file=None): - """Parse a recipe file into an object.""" - self._cfg = deepcopy(config_user) - self._cfg['write_ncl_interface'] = self._need_ncl( - raw_recipe['diagnostics']) - self._filename = os.path.basename(recipe_file) - self._preprocessors = raw_recipe.get('preprocessors', {}) - if 'default' not in self._preprocessors: - self._preprocessors['default'] = {} - self.diagnostics = self._initialize_diagnostics( - raw_recipe['diagnostics'], raw_recipe.get('datasets', [])) - self.entity = self._initalize_provenance( - raw_recipe.get('documentation', {})) - self.tasks = self.initialize_tasks() if initialize_tasks else None - - @staticmethod - def _need_ncl(raw_diagnostics): - if not raw_diagnostics: - return False - for diagnostic in raw_diagnostics.values(): - if not diagnostic.get('scripts'): - continue - for script in diagnostic['scripts'].values(): - if script.get('script', '').lower().endswith('.ncl'): - logger.info("NCL script detected, checking NCL version") - check.ncl_version() - return True - return False - - def _initalize_provenance(self, raw_documentation): - """Initialize the recipe provenance.""" - doc = deepcopy(raw_documentation) - for key in doc: - if key in TAGS: - doc[key] = replace_tags(key, doc[key]) - - return get_recipe_provenance(doc, self._filename) - - def _initialize_diagnostics(self, raw_diagnostics, raw_datasets): - """Define diagnostics in recipe.""" - logger.debug("Retrieving diagnostics from recipe") - - diagnostics = {} - - for name, raw_diagnostic in raw_diagnostics.items(): - diagnostic = {} - diagnostic['name'] = name - diagnostic['preprocessor_output'] = \ - self._initialize_preprocessor_output( - name, - raw_diagnostic.get('variables', {}), - raw_datasets + - raw_diagnostic.get('additional_datasets', [])) - variable_names = tuple(raw_diagnostic.get('variables', {})) - diagnostic['scripts'] = self._initialize_scripts( - name, raw_diagnostic.get('scripts'), variable_names) - for key in ('themes', 'realms'): - if key in raw_diagnostic: - for script in diagnostic['scripts'].values(): - script['settings'][key] = raw_diagnostic[key] - diagnostics[name] = diagnostic - - return diagnostics - - @staticmethod - def _initialize_datasets(raw_datasets): - """Define datasets used by variable.""" - datasets = deepcopy(raw_datasets) - - for dataset in datasets: - for key in dataset: - DATASET_KEYS.add(key) - - check.duplicate_datasets(datasets) - return datasets - - def _initialize_variables(self, raw_variable, raw_datasets): - """Define variables for all datasets.""" - variables = [] - - raw_variable = deepcopy(raw_variable) - datasets = self._initialize_datasets( - raw_datasets + raw_variable.pop('additional_datasets', [])) - - for index, dataset in enumerate(datasets): - variable = deepcopy(raw_variable) - variable.update(dataset) - variable['recipe_dataset_index'] = index - if ('cmor_table' not in variable - and variable.get('project') in CMOR_TABLES): - variable['cmor_table'] = variable['project'] - if 'end_year' in variable and 'max_years' in self._cfg: - variable['end_year'] = min( - variable['end_year'], - variable['start_year'] + self._cfg['max_years'] - 1) - variables.append(variable) - - required_keys = { - 'short_name', - 'mip', - 'dataset', - 'project', - 'start_year', - 'end_year', - 'preprocessor', - 'diagnostic', - } - - for variable in variables: - _update_from_others(variable, ['cmor_table', 'mip'], datasets) - institute = get_institutes(variable) - if institute: - variable['institute'] = institute - check.variable(variable, required_keys) - if 'fx_files' in variable: - for fx_file in variable['fx_files']: - DATASET_KEYS.add(fx_file) - # Get the fx files - variable['fx_files'] = get_input_fx_filelist( - variable=variable, - rootpath=self._cfg['rootpath'], - drs=self._cfg['drs']) - logger.info("Using fx files for var %s of dataset %s:\n%s", - variable['short_name'], variable['dataset'], - variable['fx_files']) - - return variables - - def _initialize_preprocessor_output(self, diagnostic_name, raw_variables, - raw_datasets): - """Define variables in diagnostic.""" - logger.debug("Populating list of variables for diagnostic %s", - diagnostic_name) - - preprocessor_output = {} - - for variable_group, raw_variable in raw_variables.items(): - raw_variable = deepcopy(raw_variable) - raw_variable['variable_group'] = variable_group - if 'short_name' not in raw_variable: - raw_variable['short_name'] = variable_group - raw_variable['diagnostic'] = diagnostic_name - raw_variable['preprocessor'] = str( - raw_variable.get('preprocessor', 'default')) - preprocessor_output[variable_group] = \ - self._initialize_variables(raw_variable, raw_datasets) - - return preprocessor_output - - def _initialize_scripts(self, diagnostic_name, raw_scripts, - variable_names): - """Define script in diagnostic.""" - if not raw_scripts: - return {} - - logger.debug("Setting script for diagnostic %s", diagnostic_name) - - scripts = {} - - for script_name, raw_settings in raw_scripts.items(): - settings = deepcopy(raw_settings) - script = settings.pop('script') - ancestors = [] - for id_glob in settings.pop('ancestors', variable_names): - if TASKSEP not in id_glob: - id_glob = diagnostic_name + TASKSEP + id_glob - ancestors.append(id_glob) - settings['recipe'] = self._filename - settings['version'] = __version__ - settings['script'] = script_name - # Add output dirs to settings - for dir_name in ('run_dir', 'plot_dir', 'work_dir'): - settings[dir_name] = os.path.join(self._cfg[dir_name], - diagnostic_name, script_name) - # Copy other settings - if self._cfg['write_ncl_interface']: - settings['exit_on_ncl_warning'] = self._cfg['exit_on_warning'] - for key in ( - 'max_data_filesize', - 'output_file_type', - 'log_level', - 'write_plots', - 'write_netcdf', - 'profile_diagnostic', - 'auxiliary_data_dir', - ): - settings[key] = self._cfg[key] - - scripts[script_name] = { - 'script': script, - 'output_dir': settings['work_dir'], - 'settings': settings, - 'ancestors': ancestors, - } - - return scripts - - def _resolve_diagnostic_ancestors(self, tasks): - """Resolve diagnostic ancestors.""" - tasks = {t.name: t for t in tasks} - for diagnostic_name, diagnostic in self.diagnostics.items(): - for script_name, script_cfg in diagnostic['scripts'].items(): - task_id = diagnostic_name + TASKSEP + script_name - if isinstance(tasks[task_id], DiagnosticTask): - logger.debug("Linking tasks for diagnostic %s script %s", - diagnostic_name, script_name) - ancestors = [] - for id_glob in script_cfg['ancestors']: - ancestor_ids = fnmatch.filter(tasks, id_glob) - if not ancestor_ids: - raise RecipeError( - "Could not find any ancestors matching {}". - format(id_glob)) - logger.debug("Pattern %s matches %s", id_glob, - ancestor_ids) - ancestors.extend(tasks[a] for a in ancestor_ids) - tasks[task_id].ancestors = ancestors - - def initialize_tasks(self): - """Define tasks in recipe.""" - logger.info("Creating tasks from recipe") - tasks = set() - - for diagnostic_name, diagnostic in self.diagnostics.items(): - logger.info("Creating tasks for diagnostic %s", diagnostic_name) - - # Create preprocessor tasks - for variable_group in diagnostic['preprocessor_output']: - task_name = diagnostic_name + TASKSEP + variable_group - logger.info("Creating preprocessor task %s", task_name) - task = _get_preprocessor_task( - variables=diagnostic['preprocessor_output'] - [variable_group], - profiles=self._preprocessors, - config_user=self._cfg, - task_name=task_name) - tasks.add(task) - - # Create diagnostic tasks - for script_name, script_cfg in diagnostic['scripts'].items(): - task_name = diagnostic_name + TASKSEP + script_name - logger.info("Creating diagnostic task %s", task_name) - task = DiagnosticTask( - script=script_cfg['script'], - output_dir=script_cfg['output_dir'], - settings=script_cfg['settings'], - name=task_name) - tasks.add(task) - - check.tasks_valid(tasks) - - # Resolve diagnostic ancestors - self._resolve_diagnostic_ancestors(tasks) - - # Select only requested tasks - tasks = get_flattened_tasks(tasks) - if not self._cfg.get('run_diagnostic'): - tasks = {t for t in tasks if isinstance(t, PreprocessingTask)} - if self._cfg.get('diagnostics'): - names = {t.name for t in tasks} - selection = set() - for pattern in self._cfg.get('diagnostics'): - selection |= set(fnmatch.filter(names, pattern)) - tasks = {t for t in tasks if t.name in selection} - - tasks = get_flattened_tasks(tasks) - logger.info("These tasks will be executed: %s", - ', '.join(t.name for t in tasks)) - - # Initialize task provenance - for task in tasks: - task.initialize_provenance(self.entity) - - # TODO: check that no loops are created (will throw RecursionError) - - # Return smallest possible set of tasks - return get_independent_tasks(tasks) - - def __str__(self): - """Get human readable summary.""" - return '\n\n'.join(str(task) for task in self.tasks) - - def run(self): - """Run all tasks in the recipe.""" - run_tasks( - self.tasks, max_parallel_tasks=self._cfg['max_parallel_tasks']) diff --git a/esmvaltool/_recipe_checks.py b/esmvaltool/_recipe_checks.py deleted file mode 100644 index a6ab607df0..0000000000 --- a/esmvaltool/_recipe_checks.py +++ /dev/null @@ -1,120 +0,0 @@ -"""Module with functions to check a recipe.""" -import logging -import os -import subprocess - -import yamale - -from ._data_finder import get_start_end_year -from ._task import get_flattened_tasks, which -from .preprocessor import PreprocessingTask - -logger = logging.getLogger(__name__) - - -class RecipeError(Exception): - """Recipe contains an error.""" - - -def ncl_version(): - """Check the NCL version.""" - ncl = which('ncl') - if not ncl: - raise RecipeError("Recipe contains NCL scripts, but cannot find " - "an NCL installation.") - try: - cmd = [ncl, '-V'] - version = subprocess.check_output(cmd, universal_newlines=True) - except subprocess.CalledProcessError: - logger.error("Failed to execute '%s'", ' '.join(' '.join(cmd))) - raise RecipeError("Recipe contains NCL scripts, but your NCL " - "installation appears to be broken.") - - version = version.strip() - logger.info("Found NCL version %s", version) - - major, minor = (int(i) for i in version.split('.')[:2]) - if major < 6 or (major == 6 and minor < 4): - raise RecipeError("NCL version 6.4 or higher is required to run " - "a recipe containing NCL scripts.") - - -def recipe_with_schema(filename): - """Check if the recipe content matches schema.""" - schema_file = os.path.join(os.path.dirname(__file__), 'recipe_schema.yml') - logger.debug("Checking recipe against schema %s", schema_file) - recipe = yamale.make_data(filename) - schema = yamale.make_schema(schema_file) - yamale.validate(schema, recipe) - - -def diagnostics(diags): - """Check diagnostics in recipe.""" - for name, diagnostic in diags.items(): - if 'scripts' not in diagnostic: - raise RecipeError( - "Missing scripts section in diagnostic {}".format(name)) - variable_names = tuple(diagnostic.get('variables', {})) - scripts = diagnostic.get('scripts') - if scripts is None: - scripts = {} - for script_name, script in scripts.items(): - if script_name in variable_names: - raise RecipeError( - "Invalid script name {} encountered in diagnostic {}: " - "scripts cannot have the same name as variables.".format( - script_name, name)) - if not script.get('script'): - raise RecipeError( - "No script defined for script {} in diagnostic {}".format( - script_name, name)) - - -def duplicate_datasets(datasets): - """Check for duplicate datasets.""" - checked_datasets_ = [] - for dataset in datasets: - if dataset in checked_datasets_: - raise RecipeError( - "Duplicate dataset {} in datasets section".format(dataset)) - checked_datasets_.append(dataset) - - -def variable(var, required_keys): - """Check variables as derived from recipe.""" - required = set(required_keys) - missing = required - set(var) - if missing: - raise RecipeError( - "Missing keys {} from variable {} in diagnostic {}".format( - missing, var.get('short_name'), var.get('diagnostic'))) - - -def data_availability(input_files, var): - """Check if the required input data is available.""" - if not input_files: - raise RecipeError("No input files found for variable {}".format(var)) - - required_years = set(range(var['start_year'], var['end_year'] + 1)) - available_years = set() - for filename in input_files: - start, end = get_start_end_year(filename) - available_years.update(range(start, end + 1)) - - missing_years = required_years - available_years - if missing_years: - raise RecipeError( - "No input data available for years {} in files {}".format( - ", ".join(str(year) for year in missing_years), input_files)) - - -def tasks_valid(tasks): - """Check that tasks are consistent.""" - filenames = set() - msg = "Duplicate preprocessor filename {}, please file a bug report." - for task in get_flattened_tasks(tasks): - if isinstance(task, PreprocessingTask): - for product in task.products: - if product.filename in filenames: - raise ValueError(msg.format(product.filename)) - filenames.add(product.filename) diff --git a/esmvaltool/_task.py b/esmvaltool/_task.py deleted file mode 100644 index 091b0028a0..0000000000 --- a/esmvaltool/_task.py +++ /dev/null @@ -1,662 +0,0 @@ -"""ESMValtool task definition.""" -import contextlib -import datetime -import errno -import logging -import numbers -import os -import pprint -import subprocess -import threading -import time -from multiprocessing import Pool, cpu_count - -import psutil -import yaml - -from ._config import TAGS, replace_tags -from ._provenance import TrackedFile, get_task_provenance - -logger = logging.getLogger(__name__) - -DATASET_KEYS = { - 'mip', -} - - -def which(executable): - """Find executable in PATH.""" - for path in os.environ["PATH"].split(os.pathsep): - if os.access(os.path.join(path, executable), os.X_OK): - return os.path.join(path, executable) - - return None - - -def _get_resource_usage(process, start_time, children=True): - """Get resource usage.""" - # yield header first - entries = [ - 'Date and time (UTC)', - 'Real time (s)', - 'CPU time (s)', - 'CPU (%)', - 'Memory (GB)', - 'Memory (%)', - 'Disk read (GB)', - 'Disk write (GB)', - ] - fmt = '{}\t' * len(entries[:-1]) + '{}\n' - yield fmt.format(*entries) - - # Compute resource usage - gigabyte = float(2**30) - precision = [1, 1, None, 1, None, 3, 3] - cache = {} - while process.is_running(): - try: - if children: - # Include child processes - processes = process.children(recursive=True) - processes.append(process) - else: - processes = [process] - - # Update resource usage - for proc in cache: - # Set cpu percent and memory usage to 0 for old processes - if proc not in processes: - cache[proc][1] = 0 - cache[proc][2] = 0 - cache[proc][3] = 0 - for proc in processes: - # Update current processes - cache[proc] = [ - proc.cpu_times().user + proc.cpu_times().system, - proc.cpu_percent(), - proc.memory_info().rss / gigabyte, - proc.memory_percent(), - proc.io_counters().read_bytes / gigabyte, - proc.io_counters().write_bytes / gigabyte, - ] - except (OSError, psutil.AccessDenied, psutil.NoSuchProcess): - # Try again if an error occurs because some process died - continue - - # Create and yield log entry - entries = [sum(entry) for entry in zip(*cache.values())] - entries.insert(0, time.time() - start_time) - entries = [round(entry, p) for entry, p in zip(entries, precision)] - entries.insert(0, datetime.datetime.utcnow()) - yield fmt.format(*entries) - - -@contextlib.contextmanager -def resource_usage_logger(pid, filename, interval=1, children=True): - """Log resource usage.""" - halt = threading.Event() - - def _log_resource_usage(): - """Write resource usage to file.""" - process = psutil.Process(pid) - start_time = time.time() - with open(filename, 'w') as file: - for msg in _get_resource_usage(process, start_time, children): - file.write(msg) - time.sleep(interval) - if halt.is_set(): - return - - thread = threading.Thread(target=_log_resource_usage) - thread.start() - try: - yield - finally: - halt.set() - thread.join() - - -def _py2ncl(value, var_name=''): - """Format a structure of Python list/dict/etc items as NCL.""" - txt = var_name + ' = ' if var_name else '' - if value is None: - txt += '_Missing' - elif isinstance(value, str): - txt += '"{}"'.format(value) - elif isinstance(value, (list, tuple)): - if not value: - txt += '_Missing' - else: - if isinstance(value[0], numbers.Real): - type_ = numbers.Real - else: - type_ = type(value[0]) - if any(not isinstance(v, type_) for v in value): - raise ValueError( - "NCL array cannot be mixed type: {}".format(value)) - txt += '(/{}/)'.format(', '.join(_py2ncl(v) for v in value)) - elif isinstance(value, dict): - if not var_name: - raise ValueError( - "NCL does not support nested dicts: {}".format(value)) - txt += 'True\n' - for key in value: - txt += '{}@{} = {}\n'.format(var_name, key, _py2ncl(value[key])) - else: - txt += str(value) - return txt - - -def write_ncl_settings(settings, filename, mode='wt'): - """Write a dictionary with generic settings to NCL file.""" - logger.debug("Writing NCL configuration file %s", filename) - - def _ncl_type(value): - """Convert some Python types to NCL types.""" - typemap = { - bool: 'logical', - str: 'string', - float: 'double', - int: 'int64', - dict: 'logical', - } - for type_ in typemap: - if isinstance(value, type_): - return typemap[type_] - raise ValueError("Unable to map {} to an NCL type".format(type(value))) - - lines = [] - for var_name, value in sorted(settings.items()): - if isinstance(value, (list, tuple)): - # Create an NCL list that can span multiple files - lines.append('if (.not. isdefined("{var_name}")) then\n' - ' {var_name} = NewList("fifo")\n' - 'end if\n'.format(var_name=var_name)) - for item in value: - lines.append('ListAppend({var_name}, new(1, {type}))\n' - 'i = ListCount({var_name}) - 1'.format( - var_name=var_name, type=_ncl_type(item))) - lines.append(_py2ncl(item, var_name + '[i]')) - else: - # Create an NCL variable that overwrites previous variables - lines.append('if (isvar("{var_name}")) then\n' - ' delete({var_name})\n' - 'end if\n'.format(var_name=var_name)) - lines.append(_py2ncl(value, var_name)) - - with open(filename, mode) as file: - file.write('\n'.join(lines)) - file.write('\n') - - -class BaseTask(object): - """Base class for defining task classes.""" - - def __init__(self, ancestors=None, name=''): - """Initialize task.""" - self.ancestors = [] if ancestors is None else ancestors - self.output_files = None - self.name = name - self.activity = None - - def initialize_provenance(self, recipe_entity): - """Initialize task provenance activity.""" - if self.activity is not None: - raise ValueError( - "Provenance of {} already initialized".format(self)) - self.activity = get_task_provenance(self, recipe_entity) - - def flatten(self): - """Return a flattened set of all ancestor tasks and task itself.""" - tasks = set() - for task in self.ancestors: - tasks.update(task.flatten()) - tasks.add(self) - return tasks - - def run(self, input_files=None): - """Run task.""" - if not self.output_files: - if input_files is None: - input_files = [] - for task in self.ancestors: - input_files.extend(task.run()) - logger.info("Starting task %s in process [%s]", self.name, - os.getpid()) - self.output_files = self._run(input_files) - logger.info("Successfully completed task %s", self.name) - - return self.output_files - - def _run(self, input_files): - raise NotImplementedError( - "Method should be implemented by child class") - - def str(self): - """Return a nicely formatted description.""" - - def _indent(txt): - return '\n'.join('\t' + line for line in txt.split('\n')) - - txt = 'ancestors:\n{}'.format( - '\n\n'.join(_indent(str(task)) for task in self.ancestors) - if self.ancestors else 'None') - return txt - - -class DiagnosticError(Exception): - """Error in diagnostic.""" - - -class DiagnosticTask(BaseTask): - """Task for running a diagnostic.""" - - def __init__(self, script, settings, output_dir, ancestors=None, name=''): - """Create a diagnostic task.""" - super(DiagnosticTask, self).__init__(ancestors=ancestors, name=name) - self.script = script - self.settings = settings - self.products = set() - self.output_dir = output_dir - self.cmd = self._initialize_cmd(script) - self.log = os.path.join(settings['run_dir'], 'log.txt') - self.resource_log = os.path.join(settings['run_dir'], - 'resource_usage.txt') - - def _initialize_cmd(self, script): - """Create a an executable command from script.""" - diagnostics_root = os.path.join( - os.path.dirname(__file__), 'diag_scripts') - script_file = os.path.abspath(os.path.join(diagnostics_root, script)) - - if not os.path.isfile(script_file): - raise DiagnosticError( - "Cannot execute script {} ({}): file does not exist.".format( - script, script_file)) - - cmd = [] - if not os.access(script_file, os.X_OK): # if not executable - extension = os.path.splitext(script)[1].lower()[1:] - if not self.settings['profile_diagnostic']: - executables = { - 'py': [which('python')], - 'ncl': [which('ncl'), '-n', '-p'], - 'r': [which('Rscript')], - 'jl': [which('julia')], - } - else: - profile_file = os.path.join(self.settings['run_dir'], - 'profile.bin') - executables = { - 'py': [ - which('python'), '-m', 'vmprof', '--lines', '-o', - profile_file - ], - 'ncl': [which('ncl'), '-n', '-p'], - 'r': [which('Rscript')], - 'jl': [which('julia')], - } - - if extension not in executables: - raise DiagnosticError( - "Cannot execute script {} ({}): non-executable file " - "with unknown extension.".format(script, script_file)) - - cmd = executables[extension] - - cmd.append(script_file) - - return cmd - - def write_settings(self): - """Write settings to file.""" - run_dir = self.settings['run_dir'] - if not os.path.exists(run_dir): - os.makedirs(run_dir) - - filename = os.path.join(run_dir, 'settings.yml') - - with open(filename, 'w') as file: - yaml.safe_dump(self.settings, file) - - # If running an NCL script: - if self.script.lower().endswith('.ncl'): - # Also write an NCL file and return the name of that instead. - return self._write_ncl_settings() - - return filename - - def _write_ncl_settings(self): - """Write settings to NCL file.""" - filename = os.path.join(self.settings['run_dir'], 'settings.ncl') - - config_user_keys = { - 'run_dir', - 'plot_dir', - 'work_dir', - 'max_data_filesize', - 'output_file_type', - 'log_level', - 'write_plots', - 'write_netcdf', - } - settings = {'diag_script_info': {}, 'config_user_info': {}} - for key, value in self.settings.items(): - if key in config_user_keys: - settings['config_user_info'][key] = value - elif not isinstance(value, dict): - settings['diag_script_info'][key] = value - else: - settings[key] = value - - write_ncl_settings(settings, filename) - - return filename - - def _control_ncl_execution(self, process, lines): - """Check if an error has occurred in an NCL script. - - Apparently NCL does not automatically exit with a non-zero exit code - if an error occurs, so we take care of that here. - """ - ignore_warnings = [ - warning.strip() - for warning in self.settings.get('ignore_ncl_warnings', []) - ] - - errors = ['error:', 'fatal:'] - if self.settings['exit_on_ncl_warning']: - errors.append('warning:') - - msg = ("An error occurred during execution of NCL script {}, " - "see the log in {}".format(self.script, self.log)) - - warned = False - for line in lines: - if line.strip() in ignore_warnings: - continue - if 'warning:' in line: - logger.warning("NCL: %s", line) - warned = True - for error in errors: - if error in line: - logger.error(msg) - logger.error("NCL: %s", line) - try: - process.kill() - except OSError: # ignore error if process already exited - pass - else: - logger.error("Killed process.") - raise DiagnosticError(msg) - - if warned: - logger.warning( - "There were warnings during the execution of NCL script %s, " - "for details, see the log %s", self.script, self.log) - - def _start_diagnostic_script(self, cmd, env): - """Start the diagnostic script.""" - logger.info("Running command %s", cmd) - logger.debug("in environment\n%s", pprint.pformat(env)) - cwd = self.settings['run_dir'] - logger.debug("in current working directory: %s", cwd) - logger.info("Writing output to %s", self.output_dir) - logger.info("Writing plots to %s", self.settings['plot_dir']) - logger.info("Writing log to %s", self.log) - - rerun_msg = 'cd {}; '.format(cwd) - if env: - rerun_msg += ' '.join('{}="{}"'.format(k, env[k]) for k in env - if k not in os.environ) - rerun_msg += ' ' + ' '.join(cmd) - logger.info("To re-run this diagnostic script, run:\n%s", rerun_msg) - - try: - process = subprocess.Popen( - cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - cwd=cwd, - env=env) - except OSError as exc: - if exc.errno == errno.ENOEXEC: - logger.error( - "Diagnostic script has its executable bit set, but is " - "not executable. To fix this run:\nchmod -x %s", cmd[0]) - logger.error( - "You may also need to fix this in the git repository.") - raise - - return process - - def _run(self, input_files): - """Run the diagnostic script.""" - if self.script is None: # Run only preprocessor - output_files = [] - return output_files - - is_ncl_script = self.script.lower().endswith('.ncl') - if is_ncl_script: - self.settings['input_files'] = [ - f for f in input_files - if f.endswith('.ncl') or os.path.isdir(f) - ] - else: - self.settings['input_files'] = [ - f for f in input_files - if f.endswith('.yml') or os.path.isdir(f) - ] - - env = dict(os.environ) - if self.script.lower().endswith('.py'): - # Set non-interactive matplotlib backend - env['MPLBACKEND'] = 'Agg' - else: - # Make diag_scripts path available to diagostics scripts - env['diag_scripts'] = os.path.join( - os.path.dirname(__file__), 'diag_scripts') - - cmd = list(self.cmd) - settings_file = self.write_settings() - if is_ncl_script: - env['settings'] = settings_file - else: - cmd.append(settings_file) - - process = self._start_diagnostic_script(cmd, env) - - returncode = None - last_line = [''] - - with resource_usage_logger(process.pid, self.resource_log),\ - open(self.log, 'at') as log: - while returncode is None: - returncode = process.poll() - txt = process.stdout.read() - txt = txt.decode(encoding='utf-8', errors='ignore') - log.write(txt) - - # Check if an error occurred in an NCL script - # Last line is treated separately to avoid missing - # error messages spread out over multiple lines. - lines = txt.split('\n') - if is_ncl_script: - self._control_ncl_execution(process, last_line + lines) - last_line = lines[-1:] - - # wait, but not long because the stdout buffer may fill up: - # https://docs.python.org/3.6/library/subprocess.html#subprocess.Popen.stdout - time.sleep(0.001) - - if returncode == 0: - self._collect_provenance() - return [self.output_dir] - - raise DiagnosticError( - "Diagnostic script {} failed with return code {}. See the log " - "in {}".format(self.script, returncode, self.log)) - - def _collect_provenance(self): - """Process provenance information provided by the diagnostic script.""" - provenance_file = os.path.join(self.settings['run_dir'], - 'diagnostic_provenance.yml') - if not os.path.exists(provenance_file): - logger.warning("No provenance information was written to %s", - provenance_file) - return - - with open(provenance_file, 'r') as file: - table = yaml.safe_load(file) - - ignore = ( - 'auxiliary_data_dir', - 'exit_on_ncl_warning', - 'input_files', - 'log_level', - 'max_data_filesize', - 'output_file_type', - 'plot_dir', - 'profile_diagnostic', - 'recipe', - 'run_dir', - 'version', - 'write_netcdf', - 'write_ncl_interface', - 'write_plots', - 'work_dir', - ) - attrs = { - 'script_file': self.script, - } - for key in self.settings: - if key not in ignore: - attrs[key] = self.settings[key] - - ancestor_products = {p for a in self.ancestors for p in a.products} - - for filename, attributes in table.items(): - ancestor_files = attributes.pop('ancestors', []) - ancestors = { - p - for p in ancestor_products if p.filename in ancestor_files - } - - attributes.update(attrs) - for key in attributes: - if key in TAGS: - attributes[key] = replace_tags(key, attributes[key]) - - product = TrackedFile(filename, attributes, ancestors) - product.initialize_provenance(self.activity) - product.save_provenance() - self.products.add(product) - - def __str__(self): - """Get human readable description.""" - txt = "{}:\nscript: {}\n{}\nsettings:\n{}\n".format( - self.__class__.__name__, - self.script, - pprint.pformat(self.settings, indent=2), - super(DiagnosticTask, self).str(), - ) - return txt - - -def get_flattened_tasks(tasks): - """Return a set of all tasks and their ancestors in `tasks`.""" - return set(t for task in tasks for t in task.flatten()) - - -def get_independent_tasks(tasks): - """Return a set of independent tasks.""" - independent_tasks = set() - all_tasks = get_flattened_tasks(tasks) - for task in all_tasks: - if not any(task in t.ancestors for t in all_tasks): - independent_tasks.add(task) - return independent_tasks - - -def run_tasks(tasks, max_parallel_tasks=None): - """Run tasks.""" - if max_parallel_tasks == 1: - _run_tasks_sequential(tasks) - else: - _run_tasks_parallel(tasks, max_parallel_tasks) - - -def _run_tasks_sequential(tasks): - """Run tasks sequentially.""" - n_tasks = len(get_flattened_tasks(tasks)) - logger.info("Running %s tasks sequentially", n_tasks) - - for task in get_independent_tasks(tasks): - task.run() - - -def _run_tasks_parallel(tasks, max_parallel_tasks=None): - """Run tasks in parallel.""" - scheduled = get_flattened_tasks(tasks) - running = [] - results = [] - - n_scheduled, n_running = len(scheduled), len(running) - n_tasks = n_scheduled - - pool = Pool(processes=max_parallel_tasks) - - logger.info("Running %s tasks using at most %s processes", n_tasks, - max_parallel_tasks or cpu_count()) - - def done(task): - """Assume a task is done if it not scheduled or running.""" - return not (task in scheduled or task in running) - - while scheduled or running: - # Submit new tasks to pool - just_scheduled = [] - for task in scheduled: - if not task.ancestors or all(done(t) for t in task.ancestors): - result = pool.apply_async(_run_task, [task]) - results.append(result) - running.append(task) - just_scheduled.append(task) - for task in just_scheduled: - scheduled.remove(task) - - # Handle completed tasks - for task, result in zip(running, results): - if result.ready(): - task.output_files, updated_products = result.get() - for updated in updated_products: - for original in task.products: - if original.filename == updated.filename: - updated.copy_provenance(target=original) - break - else: - task.products.add(updated) - running.remove(task) - results.remove(result) - - # Wait if there are still tasks running - if running: - time.sleep(0.1) - - # Log progress message - if len(scheduled) != n_scheduled or len(running) != n_running: - n_scheduled, n_running = len(scheduled), len(running) - n_done = n_tasks - n_scheduled - n_running - logger.info( - "Progress: %s tasks running or queued, %s tasks waiting for " - "ancestors, %s/%s done", n_running, n_scheduled, n_done, - n_tasks) - - pool.close() - pool.join() - - -def _run_task(task): - """Run task and return the result.""" - output_files = task.run() - return output_files, task.products diff --git a/esmvaltool/_version.py b/esmvaltool/_version.py deleted file mode 100644 index 0c0137f85a..0000000000 --- a/esmvaltool/_version.py +++ /dev/null @@ -1,2 +0,0 @@ -"""ESMValTool version""" -__version__ = '2.0a1' diff --git a/esmvaltool/cmor/__init__.py b/esmvaltool/cmor/__init__.py deleted file mode 100644 index d6cbeb0c5d..0000000000 --- a/esmvaltool/cmor/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""CMOR module""" diff --git a/esmvaltool/cmor/_fixes/CMIP5/BNU_ESM.py b/esmvaltool/cmor/_fixes/CMIP5/BNU_ESM.py deleted file mode 100644 index 22c86aaee2..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/BNU_ESM.py +++ /dev/null @@ -1,197 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for BNU ESM model.""" -from cf_units import Unit -from dask import array as da - -from ..fix import Fix - - -class fgco2(Fix): - """Fixes for fgco2.""" - - def fix_metadata(self, cubes): - """ - Fix metadata. - - Fixes cube units - - Parameters - ---------- - cube: iris.cube.CubeList - - Returns - ------- - iris.cube.Cube - - """ - self.get_cube_from_list(cubes).units = Unit('kg m-2 s-1') - return cubes - - def fix_data(self, cube): - """ - Fix data. - - Fixes cube units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 12.0 / 44.0 - cube.metadata = metadata - return cube - - -class ch4(Fix): - """Fixes for ch4.""" - - def fix_metadata(self, cubes): - """ - Fix metadata. - - Fixes cube units - - Parameters - ---------- - cubes: iris.cube.CubeList - - Returns - ------- - iris.cube.CubeList - - """ - self.get_cube_from_list(cubes).units = Unit('1e-9') - return cubes - - def fix_data(self, cube): - """ - Fix metadata. - - Fixes cube units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 29.0 / 16.0 * 1.e9 - cube.metadata = metadata - return cube - - -class co2(Fix): - """Fixes for co2.""" - - def fix_metadata(self, cubes): - """ - Fix metadata. - - Fixes cube units - - Parameters - ---------- - cubes: iris.cube.CubeList - - Returns - ------- - iris.cube.CubeList - - """ - self.get_cube_from_list(cubes).units = Unit('1e-6') - return cubes - - def fix_data(self, cube): - """ - Fix data. - - Fixes cube units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 29.0 / 44.0 * 1.e6 - cube.metadata = metadata - return cube - - -class spco2(Fix): - """Fixes for spco2.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes cube units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 1.e6 - cube.metadata = metadata - return cube - - -class od550aer(Fix): - """Fixes for od550aer.""" - - def fix_data(self, cube): - """ - Fix data. - - Masks invalid values. - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - data = da.ma.masked_equal(cube.core_data(), 1.e36) - return cube.copy(data) - - -# No clear way to apply this fix now that we are working with cubes, not files - -# class sftlf(Fix): -# -# def fix_metadata(self): -# self.cube = self.cube * 1.e6 - -# if (name.eq."sftlf") then -# files = systemfunc("ls " + INFILE) -# f=addfile(files(0), "r") -# tmp=f->lat -# var&lat = tmp -# delete(tmp) -# delete(f) -# ret = 0 -# end if -# diff --git a/esmvaltool/cmor/_fixes/CMIP5/CCSM4.py b/esmvaltool/cmor/_fixes/CMIP5/CCSM4.py deleted file mode 100644 index 995db84a0b..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/CCSM4.py +++ /dev/null @@ -1,65 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for CCSM4 model.""" -import numpy as np - -from ..fix import Fix - - -# noinspection PyPep8Naming -class rlut(Fix): - """Fixes for rlut.""" - - def fix_metadata(self, cubes): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.CubeList - - Returns - ------- - iris.cube.Cube - - """ - cube = self.get_cube_from_list(cubes) - lat = cube.coord('latitude') - lat.points = np.round(lat.points, 3) - lat.bounds = np.round(lat.bounds, 3) - return cubes - - -class rlutcs(rlut): - """Fixes for rlutcs.""" - - -class rsut(rlut): - """Fixes for rsut.""" - - -class rsutcs(rlut): - """Fixes for rsutcs.""" - - -class so(Fix): - """Fixes for so.""" - - def fix_metadata(self, cubes): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.CubeList - - Returns - ------- - iris.cube.Cube - - """ - self.get_cube_from_list(cubes).units = '1e3' - return cubes diff --git a/esmvaltool/cmor/_fixes/CMIP5/CESM1_BGC.py b/esmvaltool/cmor/_fixes/CMIP5/CESM1_BGC.py deleted file mode 100644 index 7c5b0dfe74..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/CESM1_BGC.py +++ /dev/null @@ -1,30 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for CESM1-BGC model.""" - -from cf_units import Unit - -from ..fix import Fix - - -class co2(Fix): - """Fixes for co2 variable.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 28.966 / 44.0 - cube.metadata = metadata - return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/CNRM_CM5.py b/esmvaltool/cmor/_fixes/CMIP5/CNRM_CM5.py deleted file mode 100644 index 5c47ec88d2..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/CNRM_CM5.py +++ /dev/null @@ -1,31 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for CNRM-CM5 model.""" -from ..fix import Fix - - -class msftmyz(Fix): - """Fixes for msftmyz.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 1e6 - cube.metadata = metadata - return cube - - -class msftmyzba(msftmyz): - """Fixes for msftmyzba.""" diff --git a/esmvaltool/cmor/_fixes/CMIP5/CanESM2.py b/esmvaltool/cmor/_fixes/CMIP5/CanESM2.py deleted file mode 100644 index 048b957376..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/CanESM2.py +++ /dev/null @@ -1,28 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for CanESM2 model.""" -from ..fix import Fix - - -# noinspection PyPep8Naming -class fgco2(Fix): - """Fixes for fgco2.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 12.0 / 44.0 - cube.metadata = metadata - return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/EC_EARTH.py b/esmvaltool/cmor/_fixes/CMIP5/EC_EARTH.py deleted file mode 100644 index adc8c02c41..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/EC_EARTH.py +++ /dev/null @@ -1,51 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for EC-Earth model.""" -from ..fix import Fix - - -class sic(Fix): - """Fixes for sic.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 100 - cube.metadata = metadata - return cube - - -class sftlf(Fix): - """Fixes for sftlf.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 100 - cube.metadata = metadata - return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/FGOALS_g2.py b/esmvaltool/cmor/_fixes/CMIP5/FGOALS_g2.py deleted file mode 100644 index 7395c5e6b8..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/FGOALS_g2.py +++ /dev/null @@ -1,29 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for FGOALS-g2 model""" -from cf_units import Unit - -from ..fix import Fix - - -class allvars(Fix): - """Fixes common to all vars""" - - def fix_metadata(self, cubes): - """ - Fix metadata. - - Fixes time units - - Parameters - ---------- - cube: iris.cube.CubeList - - Returns - ------- - iris.cube.Cube - - """ - for cube in cubes: - time = cube.coord('time') - time.units = Unit(time.units.name, time.units.calendar) - return cubes diff --git a/esmvaltool/cmor/_fixes/CMIP5/FIO_ESM.py b/esmvaltool/cmor/_fixes/CMIP5/FIO_ESM.py deleted file mode 100644 index 491af3c861..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/FIO_ESM.py +++ /dev/null @@ -1,51 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for FIO ESM model.""" -from ..fix import Fix - - -class co2(Fix): - """Fixes for co2.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 29. / 44. * 1.e6 - cube.metadata = metadata - return cube - - -class ch4(Fix): - """Fixes for ch4.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 29. / 16. * 1.e9 - cube.metadata = metadata - return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/GFDL_CM2p1.py b/esmvaltool/cmor/_fixes/CMIP5/GFDL_CM2p1.py deleted file mode 100644 index af97ded041..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/GFDL_CM2p1.py +++ /dev/null @@ -1,32 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for GFDL CM2p1 model.""" -from ..fix import Fix -from ..CMIP5.GFDL_ESM2G import allvars as base_allvars - - -class allvars(base_allvars): - """Fixes for all variables.""" - - -class sftof(Fix): - """Fixes for sftof.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 100 - cube.metadata = metadata - return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/GFDL_CM3.py b/esmvaltool/cmor/_fixes/CMIP5/GFDL_CM3.py deleted file mode 100644 index b796f8b0f7..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/GFDL_CM3.py +++ /dev/null @@ -1,33 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for GFDL CM3 model""" -from ..fix import Fix - -from ..CMIP5.GFDL_ESM2G import allvars as base_allvars - - -class allvars(base_allvars): - """Fixes for all variables.""" - - -class sftof(Fix): - """Fixes for sftof""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 100 - cube.metadata = metadata - return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/GFDL_ESM2G.py b/esmvaltool/cmor/_fixes/CMIP5/GFDL_ESM2G.py deleted file mode 100644 index 008c4d70c7..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/GFDL_ESM2G.py +++ /dev/null @@ -1,60 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for GFDL ESM2G""" -import iris -from iris.coords import AuxCoord -from ..fix import Fix - - -class allvars(Fix): - """Common fixes.""" - - def fix_metadata(self, cubes): - """ - Fix metadata. - - Fixes bad standard names - - Parameters - ---------- - cubes: iris.cube.CubeList - - Returns - ------- - iris.cube.Cube - - """ - self._get_and_remove(cubes, 'Start time for average period') - self._get_and_remove(cubes, 'End time for average period') - self._get_and_remove(cubes, 'Length of average period') - return cubes - - def _get_and_remove(self, cubes, long_name): - try: - cube = cubes.extract_strict(long_name) - cubes.remove(cube) - except iris.exceptions.ConstraintMismatchError: - pass - - -class co2(Fix): - """Fixes for co2.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 1e6 - cube.metadata = metadata - return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/GFDL_ESM2M.py b/esmvaltool/cmor/_fixes/CMIP5/GFDL_ESM2M.py deleted file mode 100644 index ec161a6e70..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/GFDL_ESM2M.py +++ /dev/null @@ -1,58 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for GFDL ESM2M""" -from cf_units import Unit - -from ..fix import Fix -from ..CMIP5.GFDL_ESM2G import allvars as base_allvars - - -class allvars(base_allvars): - """Fixes for all variables""" - - -class sftof(Fix): - """Fixes for sftof""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 100 - cube.metadata = metadata - return cube - - -class co2(Fix): - """Fixes for co2""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 1e6 - cube.metadata = metadata - return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/HadGEM2_CC.py b/esmvaltool/cmor/_fixes/CMIP5/HadGEM2_CC.py deleted file mode 100644 index 33ed75be27..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/HadGEM2_CC.py +++ /dev/null @@ -1,68 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for HadGEM2_CC""" -import iris -import numpy as np - -from ..fix import Fix - - -class allvars(Fix): - """Fixes common to all vars""" - - def fix_metadata(self, cubes): - """ - Fixes latitude. - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - for cube in cubes: - lats = cube.coords('latitude') - if lats: - lat = cube.coord('latitude') - lat.points = np.clip(lat.points, -90., 90.) - lat.bounds = np.clip(lat.bounds, -90., 90.) - - return cubes - - -class o2(Fix): - """Fixes for o2""" - - def fix_file(self, filepath, output_dir): - """ - Apply fixes to the files prior to creating the cube. - - Should be used only to fix errors that prevent loading or can - not be fixed in the cube (i.e. those related with missing_value - and _FillValue or missing standard_name). - Parameters - ---------- - filepath: basestring - file to fix. - output_dir: basestring - path to the folder to store the fix files, if required. - Returns - ------- - basestring - Path to the corrected file. It can be different from the original - filepath if a fix has been applied, but if not it should be the - original filepath. - """ - new_path = Fix.get_fixed_filepath(output_dir, filepath) - cube = iris.load_cube(filepath) - - std = 'mole_concentration_of_dissolved_molecular_oxygen_in_sea_water' - long_name = 'Dissolved Oxygen Concentration' - - cube.long_name = long_name - cube.standard_name = std - - iris.save(cube, new_path) - return new_path diff --git a/esmvaltool/cmor/_fixes/CMIP5/HadGEM2_ES.py b/esmvaltool/cmor/_fixes/CMIP5/HadGEM2_ES.py deleted file mode 100644 index b9a4e3c0e6..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/HadGEM2_ES.py +++ /dev/null @@ -1,68 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for HadGEM2_ES.""" -import numpy as np -import iris - -from ..fix import Fix - - -class allvars(Fix): - """Fixes common to all vars.""" - - def fix_metadata(self, cubes): - """ - Fixes latitude. - - Parameters - ---------- - cube: iris.cube.CubeList - - Returns - ------- - iris.cube.Cube - - """ - for cube in cubes: - lats = cube.coords('latitude') - if lats: - lat = cube.coord('latitude') - lat.points = np.clip(lat.points, -90., 90.) - lat.bounds = np.clip(lat.bounds, -90., 90.) - - return cubes - - -class o2(Fix): - """Fixes for o2.""" - - def fix_file(self, filepath, output_dir): - """ - Apply fixes to the files prior to creating the cube. - - Should be used only to fix errors that prevent loading or can - not be fixed in the cube (i.e. those related with missing_value - and _FillValue or missing standard_name). - Parameters - ---------- - filepath: basestring - file to fix. - output_dir: basestring - path to the folder to store the fix files, if required. - Returns - ------- - basestring - Path to the corrected file. It can be different from the original - filepath if a fix has been applied, but if not it should be the - original filepath. - """ - new_path = Fix.get_fixed_filepath(output_dir, filepath) - cube = iris.load_cube(filepath) - - std = 'mole_concentration_of_dissolved_molecular_oxygen_in_sea_water' - long_name = 'Dissolved Oxygen Concentration' - - cube.long_name = long_name - cube.standard_name = std - - iris.save(cube, new_path) - return new_path diff --git a/esmvaltool/cmor/_fixes/CMIP5/MIROC5.py b/esmvaltool/cmor/_fixes/CMIP5/MIROC5.py deleted file mode 100644 index 352b3bd2b2..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/MIROC5.py +++ /dev/null @@ -1,110 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for MIROC5 model.""" -import numpy as np -from ..fix import Fix - - -class sftof(Fix): - """Fixes for sftof.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 100 - cube.metadata = metadata - return cube - - -class snw(Fix): - """Fixes for snw.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 100 - cube.metadata = metadata - return cube - - -class snc(snw): - """Fixes for snc.""" - - # dayspermonth = (/31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31/) - - # if ((name.eq."snc".or.name.eq."snw").and.FIELD.eq."T2Ds".and. \ - # ENSEMBLE.eq."r1i1p1") then - # opt = 0 - # opt@calendar = var&time@calendar - # t = 0.0 - # t@calendar = var&time@calendar - # t@units = var&time@units - # res = cd_calendar(t, -5) - # yy = res(0, 0) - # mm = res(0, 1) - # dd = res(0, 2) - # do ii = 0, dimsizes(var&time) - 1 - # var&time(ii) = tofloat(cd_inv_calendar(yy, mm, dd, 12, 0, 0, \ - # var&time@units, opt)) - # dd = dd + 1 - # if (dd.gt.dayspermonth(mm-1)) then - # mm = mm + 1 - # dd = 1 - # end if - # if (mm.gt.12) then - # mm = 1 - # yy = yy + 1 - # end if - # end do - # ret = 0 - # end if - - -class msftmyz(Fix): - """Fixes for msftmyz.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes mask - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - cube.data = np.ma.array(cube.data) - cube.data = np.ma.masked_where(cube.data.mask + (cube.data == 0.), - cube.data) - - return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM.py b/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM.py deleted file mode 100644 index a976fb1aba..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM.py +++ /dev/null @@ -1,111 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for MIROC ESM model""" -import cf_units -from iris.coords import DimCoord -from iris.exceptions import CoordinateNotFoundError - -from ..fix import Fix - - -class tro3(Fix): - """Fixes for tro3.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 1000 - cube.metadata = metadata - return cube - - -class co2(Fix): - """Fixes for co2""" - - def fix_metadata(self, cubes): - """ - Fix metadata. - - Fixes error in cube units - - Parameters - ---------- - cube: iris.cube.CubeList - - Returns - ------- - iris.cube.Cube - - """ - self.get_cube_from_list(cubes).units = '1.0e-6' - return cubes - - -class gpp(Fix): - """Fixes for gpp""" - - def fix_metadata(self, cubes): - """ - Fix metadata. - - Fixes error in cube units - - Parameters - ---------- - cube: iris.cube.CubeList - - Returns - ------- - iris.cube.CubeList - - """ - # Fixing the metadata, automatic unit conversion should do the trick - self.get_cube_from_list(cubes).units = cf_units.Unit('g m-2 day-1') - return cubes - - -class allvars(Fix): - """Common fixes to all vars""" - - def fix_metadata(self, cubes): - """ - Fix metadata. - - Fixes error in air_pressure coordinate, sometimes called AR5PL35 - - Parameters - ---------- - cube: iris.cube.CubeList - - Returns - ------- - iris.cube.CubeList - - """ - for cube in cubes: - try: - old = cube.coord('AR5PL35') - dims = cube.coord_dims(old) - cube.remove_coord(old) - - plev = DimCoord.from_coord(old) - plev.var_name = plev - plev.standard_name = 'air_pressure' - plev.long_name = 'Pressure ' - cube.add_dim_coord(plev, dims) - except CoordinateNotFoundError: - pass - - return cubes diff --git a/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM_CHEM.py b/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM_CHEM.py deleted file mode 100644 index 072f510ee5..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM_CHEM.py +++ /dev/null @@ -1,47 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for MIROC ESM CHEM""" -from ..fix import Fix - - -class tro3(Fix): - """Fixes for tro3""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 1000 - cube.metadata = metadata - return cube - - -# if (name .eq. "tro3") then -# if (iscoord(var, "time")) then -# do it = 1, dimsizes(var&time) - 1 -# if (var&time(it).eq.0) then -# tt = tointeger(cd_calendar(var&time(it-1), 0)) -# tt(0, 1) = tt(0, 1) + 1 ; month -# if (tt(0, 1).gt.12) then -# tt(0, 1) = 1 -# tt(0, 0) = tt(0, 0) + 1 ; year -# end if -# var&time(it) = cd_inv_calendar(\ -# tt(0, 0), tt(0, 1), tt(0, 2), tt(0, 3), \ -# tt(0, 4), tt(0, 5), var&time@units, 0) -# end if -# end do -# ret = 0 -# end if -# end if diff --git a/esmvaltool/cmor/_fixes/CMIP5/MPI_ESM_LR.py b/esmvaltool/cmor/_fixes/CMIP5/MPI_ESM_LR.py deleted file mode 100644 index c0a5b483e9..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/MPI_ESM_LR.py +++ /dev/null @@ -1,27 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for MPI ESM LR model.""" -from ..fix import Fix - - -class pctisccp(Fix): - """Fixes for pctisccp.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 100 - cube.metadata = metadata - return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/MRI_CGCM3.py b/esmvaltool/cmor/_fixes/CMIP5/MRI_CGCM3.py deleted file mode 100644 index ab03ae2d99..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/MRI_CGCM3.py +++ /dev/null @@ -1,55 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for MRI-CGCM3 model.""" -import numpy as np -from ..fix import Fix - - -class msftmyz(Fix): - """Fixes for msftmyz.""" - - def fix_data(self, cube): - """ - Fix msftmyz data. - - Fixes mask - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - cube.data = np.ma.array(cube.data) - cube.data = np.ma.masked_where(cube.data.mask + (cube.data == 0.), - cube.data) - - return cube - - -class thetao(Fix): - """Fixes for thetao.""" - - def fix_data(self, cube): - """ - Fix thetao data. - - Fixes mask - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - cube.data = np.ma.array(cube.data) - cube.data = np.ma.masked_where(np.logical_or(cube.data.mask, - cube.data == 0.), - cube.data) - - return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/MRI_ESM1.py b/esmvaltool/cmor/_fixes/CMIP5/MRI_ESM1.py deleted file mode 100644 index 4484dd7c0e..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/MRI_ESM1.py +++ /dev/null @@ -1,29 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for MRI-ESM1 model.""" -import numpy as np -from ..fix import Fix - - -class msftmyz(Fix): - """Fixes for msftmyz.""" - - def fix_data(self, cube): - """ - Fix msftmyz data. - - Fixes mask - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - cube.data = np.ma.array(cube.data) - cube.data = np.ma.masked_where(cube.data.mask + (cube.data == 0.), - cube.data) - - return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/__init__.py b/esmvaltool/cmor/_fixes/CMIP5/__init__.py deleted file mode 100644 index 7f4ba3d544..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Fixes for CMIP5 data.""" diff --git a/esmvaltool/cmor/_fixes/CMIP5/bcc_csm1_1.py b/esmvaltool/cmor/_fixes/CMIP5/bcc_csm1_1.py deleted file mode 100644 index c5bb894908..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/bcc_csm1_1.py +++ /dev/null @@ -1,61 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for bcc-csm1-1.""" -import numpy as np -from scipy.interpolate import InterpolatedUnivariateSpline -from scipy.ndimage import map_coordinates - -from ..fix import Fix - - -class tos(Fix): - """Fixes for tos.""" - - def fix_data(self, cube): - """Fix data. - - Calculate missing latitude/longitude boundaries using interpolation. - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - rlat = cube.coord('grid_latitude').points - rlon = cube.coord('grid_longitude').points - - # Transform grid latitude/longitude to array indices [0, 1, 2, ...] - rlat_to_idx = InterpolatedUnivariateSpline( - rlat, np.arange(len(rlat)), k=1) - rlon_to_idx = InterpolatedUnivariateSpline( - rlon, np.arange(len(rlon)), k=1) - rlat_idx_bnds = rlat_to_idx(cube.coord('grid_latitude').bounds) - rlon_idx_bnds = rlon_to_idx(cube.coord('grid_longitude').bounds) - - # Calculate latitude/longitude vertices by interpolation - lat_vertices = [] - lon_vertices = [] - for (i, j) in [(0, 0), (0, 1), (1, 1), (1, 0)]: - (rlat_v, rlon_v) = np.meshgrid( - rlat_idx_bnds[:, i], rlon_idx_bnds[:, j], indexing='ij') - lat_vertices.append( - map_coordinates( - cube.coord('latitude').points, [rlat_v, rlon_v], - mode='nearest')) - lon_vertices.append( - map_coordinates( - cube.coord('longitude').points, [rlat_v, rlon_v], - mode='wrap')) - lat_vertices = np.array(lat_vertices) - lon_vertices = np.array(lon_vertices) - lat_vertices = np.moveaxis(lat_vertices, 0, -1) - lon_vertices = np.moveaxis(lon_vertices, 0, -1) - - # Copy vertices to cube - cube.coord('latitude').bounds = lat_vertices - cube.coord('longitude').bounds = lon_vertices - - return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/bcc_csm1_1_m.py b/esmvaltool/cmor/_fixes/CMIP5/bcc_csm1_1_m.py deleted file mode 100644 index e1ec8b43e2..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/bcc_csm1_1_m.py +++ /dev/null @@ -1,61 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for bcc-csm1-1-m.""" -import numpy as np -from scipy.interpolate import InterpolatedUnivariateSpline -from scipy.ndimage import map_coordinates - -from ..fix import Fix - - -class tos(Fix): - """Fixes for tos.""" - - def fix_data(self, cube): - """Fix data. - - Calculate missing latitude/longitude boundaries using interpolation. - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - rlat = cube.coord('grid_latitude').points - rlon = cube.coord('grid_longitude').points - - # Transform grid latitude/longitude to array indices [0, 1, 2, ...] - rlat_to_idx = InterpolatedUnivariateSpline( - rlat, np.arange(len(rlat)), k=1) - rlon_to_idx = InterpolatedUnivariateSpline( - rlon, np.arange(len(rlon)), k=1) - rlat_idx_bnds = rlat_to_idx(cube.coord('grid_latitude').bounds) - rlon_idx_bnds = rlon_to_idx(cube.coord('grid_longitude').bounds) - - # Calculate latitude/longitude vertices by interpolation - lat_vertices = [] - lon_vertices = [] - for (i, j) in [(0, 0), (0, 1), (1, 1), (1, 0)]: - (rlat_v, rlon_v) = np.meshgrid( - rlat_idx_bnds[:, i], rlon_idx_bnds[:, j], indexing='ij') - lat_vertices.append( - map_coordinates( - cube.coord('latitude').points, [rlat_v, rlon_v], - mode='nearest')) - lon_vertices.append( - map_coordinates( - cube.coord('longitude').points, [rlat_v, rlon_v], - mode='wrap')) - lat_vertices = np.array(lat_vertices) - lon_vertices = np.array(lon_vertices) - lat_vertices = np.moveaxis(lat_vertices, 0, -1) - lon_vertices = np.moveaxis(lon_vertices, 0, -1) - - # Copy vertices to cube - cube.coord('latitude').bounds = lat_vertices - cube.coord('longitude').bounds = lon_vertices - - return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/inmcm4.py b/esmvaltool/cmor/_fixes/CMIP5/inmcm4.py deleted file mode 100644 index 91d340846a..0000000000 --- a/esmvaltool/cmor/_fixes/CMIP5/inmcm4.py +++ /dev/null @@ -1,117 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for inmcm4 model.""" -import iris - -from ..fix import Fix - - -class gpp(Fix): - """Fixes for gpp.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= -1 - cube.metadata = metadata - return cube - - -class lai(Fix): - """Fixes for lai.""" - - def fix_data(self, cube): - """ - Fix data. - - Fixes discrepancy between declared units and real units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 0.01 - cube.metadata = metadata - return cube - - -class nbp(Fix): - """Fixes for nbp.""" - - def fix_file(self, filepath, output_dir): - """ - Apply fixes to the files prior to creating the cube. - - Should be used only to fix errors that prevent loading or can - not be fixed in the cube (i.e. those related with missing_value - and _FillValue or missing standard_name). - - Parameters - ---------- - filepath: basestring - file to fix. - output_dir: basestring - path to the folder to store the fix files, if required. - - Returns - ------- - basestring - Path to the corrected file. It can be different from the original - filepath if a fix has been applied, but if not it should be the - original filepath. - - """ - new_path = Fix.get_fixed_filepath(output_dir, filepath) - cube = iris.load_cube(filepath) - cube.standard_name = ('surface_net_downward_mass_flux_of_carbon_' - 'dioxide_expressed_as_carbon_due_to_all_land_' - 'processes') - iris.save(cube, new_path) - return new_path - - -class baresoilFrac(Fix): - """Fixes for baresoilFrac.""" - - def fix_metadata(self, cubelist): - """ - Fix missing scalar dimension. - - Parameters - ---------- - cubelist: iris CubeList - List of cubes to fix - - Returns - ------- - iris.cube.CubeList - - """ - typebare = iris.coords.AuxCoord( - 'bare_ground', - standard_name='area_type', - long_name='surface type', - var_name='type', - units='1', - bounds=None) - for cube in cubelist: - cube.add_aux_coord(typebare) - return cubelist diff --git a/esmvaltool/cmor/_fixes/PRIMAVERA/EC_Earth3_HR.py b/esmvaltool/cmor/_fixes/PRIMAVERA/EC_Earth3_HR.py deleted file mode 100644 index 6f31db6ada..0000000000 --- a/esmvaltool/cmor/_fixes/PRIMAVERA/EC_Earth3_HR.py +++ /dev/null @@ -1,29 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for EC-Earth3-HR PRIMAVERA project data""" -from ..fix import Fix - - -class allvars(Fix): - """Fixes common to all variables.""" - - def fix_metadata(self, cubes): - """ - Fix cube metadata. - - Parameters - ---------- - cube: Cube - Cube to fix - - Returns - ------- - Cube: - Fixed cube. It is the same instance that was received - """ - for cube in cubes: - latitude = cube.coord('latitude') - latitude.var_name = 'lat' - - longitude = cube.coord('longitude') - longitude.var_name = 'lon' - return cubes diff --git a/esmvaltool/cmor/_fixes/PRIMAVERA/__init__.py b/esmvaltool/cmor/_fixes/PRIMAVERA/__init__.py deleted file mode 100644 index 74148590d1..0000000000 --- a/esmvaltool/cmor/_fixes/PRIMAVERA/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Fixes for PRIMAVERA project data.""" diff --git a/esmvaltool/cmor/_fixes/__init__.py b/esmvaltool/cmor/_fixes/__init__.py deleted file mode 100644 index e50e749659..0000000000 --- a/esmvaltool/cmor/_fixes/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -""" -Automatic fixes for input data - -Module to apply automatic fixes at different levels to input data for known -errors. -""" diff --git a/esmvaltool/cmor/_fixes/fix.py b/esmvaltool/cmor/_fixes/fix.py deleted file mode 100644 index 50e2477960..0000000000 --- a/esmvaltool/cmor/_fixes/fix.py +++ /dev/null @@ -1,171 +0,0 @@ -"""Contains the base class for dataset fixes""" -import importlib -import os - - -class Fix(object): - """ - Base class for dataset fixes. - """ - - def fix_file(self, filepath, output_dir): - """ - Apply fixes to the files prior to creating the cube. - - Should be used only to fix errors that prevent loading or can - not be fixed in the cube (i.e. those related with missing_value - and _FillValue) - - Parameters - ---------- - filepath: basestring - file to fix - output_dir: basestring - path to the folder to store the fixe files, if required - - Returns - ------- - basestring - Path to the corrected file. It can be different from the original - filepath if a fix has been applied, but if not it should be the - original filepath - - """ - return filepath - - def fix_metadata(self, cubes): - """ - Apply fixes to the metadata of the cube. - - Changes applied here must not require data loading. - - These fixes should be applied before checking the metadata. - - Parameters - ---------- - cubes: iris.cube.CubeList - Cubes to fix - - Returns - ------- - iris.cube.CubeList - Fixed cubes. They can be different instances. - - """ - return cubes - - def get_cube_from_list(self, cubes, short_name=None): - """ - Get a cube from the list with a given short name. - - Parameters - ---------- - cubes : iris.cube.CubeList - List of cubes to search - short_name : str - Cube's variable short name. If None, short name is the class name - - Raises - ------ - Exception - If no cube is found - - Returns - ------- - iris.Cube - Variable's cube - """ - if short_name is None: - short_name = self.__class__.__name__ - for cube in cubes: - if cube.var_name == short_name: - return cube - raise Exception('Cube for variable "{}" not found'.format(short_name)) - - def fix_data(self, cube): - """ - Apply fixes to the data of the cube. - - These fixes should be applied before checking the data. - - Parameters - ---------- - cube: iris.cube.Cube - Cube to fix - - Returns - ------- - iris.cube.Cube - Fixed cube. It can be a difference instance. - - """ - return cube - - def __eq__(self, other): - return type(self) == type(other) - - def __ne__(self, other): - return not (self == other) - - @staticmethod - def get_fixes(project, dataset, variable): - """ - Get the fixes that must be applied for a given dataset. - - It will look for them at the module - esmvaltool.cmor._fixes.PROJECT in the file DATASET, and get - the classes named allvars (which should be use for fixes that are - present in all the variables of a dataset, i.e. bad name for the time - coordinate) and VARIABLE (which should be use for fixes for the - specific variable). - - Project, dataset and variable names will have '-' replaced by '_' - before checking because it is not possible to use the character '-' in - python names. - - Parameters - ---------- - project: str - dataset: str - variable: str - - Returns - ------- - list(Fix) - Fixes to apply for the given data - """ - project = project.replace('-', '_') - dataset = dataset.replace('-', '_') - variable = variable.replace('-', '_') - - fixes = [] - try: - fixes_module = importlib.import_module( - 'esmvaltool.cmor._fixes.{0}.{1}'.format(project, dataset)) - for fix_name in ('allvars', variable): - try: - fixes.append(getattr(fixes_module, fix_name)()) - except AttributeError: - pass - except ImportError: - pass - return fixes - - @staticmethod - def get_fixed_filepath(output_dir, filepath): - """ - Get the filepath for the fixed file - - Parameters - ---------- - var_path: str - Original path - - Returns - ------- - str - Path to the fixed file - """ - if not os.path.isdir(output_dir): - os.makedirs(output_dir) - return os.path.join(output_dir, os.path.basename(filepath)) diff --git a/esmvaltool/cmor/_fixes/obs4mips/SSMI.py b/esmvaltool/cmor/_fixes/obs4mips/SSMI.py deleted file mode 100644 index 5343af0826..0000000000 --- a/esmvaltool/cmor/_fixes/obs4mips/SSMI.py +++ /dev/null @@ -1,16 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for SSMI model.""" -from ..fix import Fix - - -class prw(Fix): - """Fixes for prw.""" - - def fix_metadata(self, cubes): - for cube in cubes: - latitude = cube.coord('latitude') - latitude.var_name = 'lat' - - longitude = cube.coord('longitude') - longitude.var_name = 'lon' - return cubes diff --git a/esmvaltool/cmor/_fixes/obs4mips/SSMI_MERIS.py b/esmvaltool/cmor/_fixes/obs4mips/SSMI_MERIS.py deleted file mode 100644 index 56c88daa8e..0000000000 --- a/esmvaltool/cmor/_fixes/obs4mips/SSMI_MERIS.py +++ /dev/null @@ -1,28 +0,0 @@ -# pylint: disable=invalid-name, no-self-use, too-few-public-methods -"""Fixes for CCSM4 model.""" -from iris.cube import CubeList - -from ..fix import Fix - - -# noinspection PyPep8Naming -class prw(Fix): - """Fixes for prw.""" - - def fix_metadata(self, cubes): - """ - Fix metadata. - - Remove error and number of observations cubes - - Parameters - ---------- - cube: iris.cube.CubeList - - Returns - ------- - iris.cube.Cube - - """ - cube = self.get_cube_from_list(cubes) - return CubeList([cube]) diff --git a/esmvaltool/cmor/_fixes/obs4mips/__init__.py b/esmvaltool/cmor/_fixes/obs4mips/__init__.py deleted file mode 100644 index 255025f5a8..0000000000 --- a/esmvaltool/cmor/_fixes/obs4mips/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Fixes for obs4mips data.""" diff --git a/esmvaltool/cmor/check.py b/esmvaltool/cmor/check.py deleted file mode 100644 index be72ad7575..0000000000 --- a/esmvaltool/cmor/check.py +++ /dev/null @@ -1,649 +0,0 @@ -"""Module for checking iris cubes against their CMOR definitions.""" -import logging - -import cf_units -import iris.coord_categorisation -import iris.coords -import iris.exceptions -import iris.util -import numpy as np - -from .table import CMOR_TABLES - - -class CMORCheckError(Exception): - """Exception raised when a cube does not pass the CMORCheck.""" - - -class CMORCheck(object): - """Class used to check the CMOR-compliance of the data. - - It can also fix some minor errors and does some minor data - homogeneization: - - Parameters - ---------- - cube: iris.cube.Cube: - Iris cube to check. - var_info: variables_info.VariableInfo - Variable info to check. - frequency: str - Expected frequency for the data. - fail_on_error: bool - If true, CMORCheck stops on the first error. If false, it collects - all possible errors before stopping. - automatic_fixes: bool - If True, CMORCheck will try to apply automatic fixes for any - detected error, if possible. - - Attributes - ---------- - frequency: str - Expected frequency for the data. - automatic_fixes: bool - If True, CMORCheck will try to apply automatic fixes for any - detected error, if possible. - - """ - - _attr_msg = '{}: {} should be {}, not {}' - _does_msg = '{}: does not {}' - _is_msg = '{}: is not {}' - _vals_msg = '{}: has values {} {}' - _contain_msg = '{}: does not contain {} {}' - - def __init__(self, - cube, - var_info, - frequency=None, - fail_on_error=False, - automatic_fixes=False): - - self._cube = cube - self._failerr = fail_on_error - self._errors = list() - self._warnings = list() - self._cmor_var = var_info - if frequency is None: - frequency = self._cmor_var.frequency - self.frequency = frequency - self.automatic_fixes = automatic_fixes - - def check_metadata(self, logger=None): - """Check the cube metadata. - - Perform all the tests that do not require to have the data in memory. - - It will also report some warnings in case of minor errors and - homogenize some data: - - Equivalent calendars will all default to the same name. - - Auxiliary coordinates year, month_number, day_of_month and - day_of_year will be added for the time axis. - - Raises - ------ - CMORCheckException: - If errors are found. If fail_on_error attribute is set to True, - raises as soon as an error is detected. If set to False, it perform - all checks and then raises. - - """ - if logger is None: - logger = logging.getLogger(__name__) - - self._check_var_metadata() - self._check_fill_value() - self._check_dim_names() - self._check_coords() - self._check_time_coord() - self._check_rank() - - self.report_warnings(logger) - self.report_errors() - - self._add_auxiliar_time_coordinates() - return self._cube - - def report_errors(self): - """Report detected errors. - - Raises - ------ - CMORCheckError: - If any errors were reported before calling this method. - - """ - if self.has_errors(): - msg = 'There were errors in variable {}:\n{}\nin cube:\n{}' - msg = msg.format(self._cube.var_name, '\n '.join(self._errors), - self._cube) - raise CMORCheckError(msg) - - def report_warnings(self, logger): - """Report detected warnings to the given logger. - - Parameters - ---------- - logger - - """ - if self.has_warnings(): - msg = 'There were warnings in variable {}:\n{}\n'.format( - self._cube.var_name, '\n '.join(self._warnings)) - logger.warning(msg) - - def check_data(self, logger=None): - """Check the cube data. - - Performs all the tests that require to have the data in memory. - Assumes that metadata is correct, so you must call check_metadata prior - to this. - - It will also report some warnings in case of minor errors. - - Raises - ------ - CMORCheckException: - If errors are found. If fail_on_error attribute is set to True, - raises as soon as an error is detected. If set to False, it perform - all checks and then raises. - - """ - if logger is None: - logger = logging.getLogger(__name__) - - if self._cmor_var.units: - units = self._get_efective_units() - if str(self._cube.units) != units: - self._cube.convert_units(units) - - self._check_coords_data() - - self.report_warnings(logger) - self.report_errors() - return self._cube - - def _check_fill_value(self): - """Check fill value.""" - # Iris removes _FillValue/missing_value information if data has none - # of these values. If there are values == _FillValue then it will - # be encoded in the numpy.ma object created. - # - # => Very difficult to check! - pass - - def _check_var_metadata(self): - """Check metadata of variable.""" - # Check standard_name - if self._cmor_var.standard_name: - if self._cube.standard_name != self._cmor_var.standard_name: - self.report_error( - self._attr_msg, self._cube.var_name, 'standard_name', - self._cmor_var.standard_name, self._cube.standard_name) - - # Check units - if self._cube.attributes.get('invalid_units', '').lower() == 'psu': - self._cube.units = '1.0' - del self._cube.attributes['invalid_units'] - - if self._cmor_var.units: - units = self._get_efective_units() - - if not self._cube.units.is_convertible(units): - self.report_error( - 'Variable {0} units () can not be ' - 'converted to {2}', self._cube.var_name, - self._cmor_var.units, self._cube.units) - - # Check other variable attributes that match entries in cube.attributes - attrs = ('positive', ) - for attr in attrs: - attr_value = getattr(self._cmor_var, attr) - if attr_value: - if attr not in self._cube.attributes: - self.report_warning('{}: attribute {} not present', - self._cube.var_name, attr) - elif self._cube.attributes[attr] != attr_value: - self.report_error(self._attr_msg, self._cube.var_name, - attr, attr_value, - self._cube.attributes[attr]) - - def _get_efective_units(self): - """Get effective units.""" - if self._cmor_var.units.lower() == 'psu': - units = '1.0' - else: - units = self._cmor_var.units - return units - - def _check_rank(self): - """Check rank, excluding scalar dimensions.""" - rank = 0 - dimensions = [] - for coordinate in self._cmor_var.coordinates.values(): - if coordinate.generic_level: - rank += 1 - elif not coordinate.value: - try: - for dim in self._cube.coord_dims(coordinate.standard_name): - dimensions.append(dim) - except iris.exceptions.CoordinateNotFoundError: - # Error reported at other stages - pass - rank += len(set(dimensions)) - - # Check number of dimension coords matches rank - if self._cube.ndim != rank: - self.report_error(self._does_msg, self._cube.var_name, - 'match coordinate rank') - - def _check_dim_names(self): - """Check dimension names.""" - for (_, coordinate) in self._cmor_var.coordinates.items(): - if coordinate.generic_level: - continue - else: - try: - cube_coord = self._cube.coord(var_name=coordinate.out_name) - if cube_coord.standard_name != coordinate.standard_name: - self.report_error( - self._attr_msg, - coordinate.out_name, - 'standard_name', - coordinate.standard_name, - cube_coord.standard_name, - ) - except iris.exceptions.CoordinateNotFoundError: - try: - coord = self._cube.coord(coordinate.standard_name) - self.report_error( - 'Coordinate {0} has var name {1} instead of {2}', - coordinate.name, - coord.var_name, - coordinate.out_name, - ) - except iris.exceptions.CoordinateNotFoundError: - self.report_error(self._does_msg, coordinate.name, - 'exist') - - def _check_coords(self): - """Check coordinates.""" - for coordinate in self._cmor_var.coordinates.values(): - # Cannot check generic_level coords as no CMOR information - if coordinate.generic_level: - continue - var_name = coordinate.out_name - - # Get coordinate var_name as it exists! - try: - coord = self._cube.coord(var_name=var_name, dim_coords=True) - except iris.exceptions.CoordinateNotFoundError: - continue - - self._check_coord(coordinate, coord, var_name) - - def _check_coords_data(self): - """Check coordinate data.""" - for coordinate in self._cmor_var.coordinates.values(): - # Cannot check generic_level coords as no CMOR information - if coordinate.generic_level: - continue - var_name = coordinate.out_name - - # Get coordinate var_name as it exists! - try: - coord = self._cube.coord(var_name=var_name, dim_coords=True) - except iris.exceptions.CoordinateNotFoundError: - continue - - self._check_coord_monotonicity_and_direction( - coordinate, coord, var_name) - - def _check_coord(self, cmor, coord, var_name): - """Check single coordinate.""" - if coord.var_name == 'time': - return - if cmor.units: - if str(coord.units) != cmor.units: - fixed = False - if self.automatic_fixes: - try: - new_unit = cf_units.Unit(cmor.units, - coord.units.calendar) - coord.convert_units(new_unit) - fixed = True - except ValueError: - pass - if not fixed: - self.report_error(self._attr_msg, var_name, 'units', - cmor.units, coord.units) - self._check_coord_values(cmor, coord, var_name) - if not self.automatic_fixes: - self._check_coord_monotonicity_and_direction(cmor, coord, var_name) - - def _check_coord_monotonicity_and_direction(self, cmor, coord, var_name): - """Check monotonicity and direction of coordinate.""" - if not coord.is_monotonic(): - self.report_error(self._is_msg, var_name, 'monotonic') - if len(coord.points) == 1: - return - if cmor.stored_direction: - if cmor.stored_direction == 'increasing': - if coord.points[0] > coord.points[1]: - if not self.automatic_fixes or coord.ndim > 1: - self.report_error(self._is_msg, var_name, 'increasing') - else: - self._reverse_coord(coord) - elif cmor.stored_direction == 'decreasing': - if coord.points[0] < coord.points[1]: - if not self.automatic_fixes or coord.ndim > 1: - self.report_error(self._is_msg, var_name, 'decreasing') - else: - self._reverse_coord(coord) - - def _reverse_coord(self, coord): - """Reverse coordinate.""" - if coord.ndim == 1: - self._cube = iris.util.reverse(self._cube, - self._cube.coord_dims(coord)) - - def _check_coord_values(self, coord_info, coord, var_name): - """Check coordinate values.""" - # Check requested coordinate values exist in coord.points - self._check_requested_values(coord, coord_info, var_name) - - l_fix_coord_value = False - - # Check coordinate value ranges - if coord_info.valid_min: - valid_min = float(coord_info.valid_min) - if np.any(coord.points < valid_min): - if coord_info.standard_name == 'longitude' and \ - self.automatic_fixes: - l_fix_coord_value = True - else: - self.report_error(self._vals_msg, var_name, - '< {} ='.format('valid_min'), valid_min) - - if coord_info.valid_max: - valid_max = float(coord_info.valid_max) - if np.any(coord.points > valid_max): - if coord_info.standard_name == 'longitude' and \ - self.automatic_fixes: - l_fix_coord_value = True - else: - self.report_error(self._vals_msg, var_name, - '> {} ='.format('valid_max'), valid_max) - - if l_fix_coord_value: - lon_extent = iris.coords.CoordExtent(coord, 0.0, 360., True, False) - self._cube = self._cube.intersection(lon_extent) - - def _check_requested_values(self, coord, coord_info, var_name): - """Check requested values.""" - if coord_info.requested: - cmor_points = [float(val) for val in coord_info.requested] - coord_points = list(coord.points) - for point in cmor_points: - if point not in coord_points: - self.report_warning(self._contain_msg, var_name, - str(point), str(coord.units)) - - def _check_time_coord(self): - """Check time coordinate.""" - try: - coord = self._cube.coord('time', dim_coords=True) # , axis='T') - var_name = coord.var_name - except iris.exceptions.CoordinateNotFoundError: - return - - if not coord.units.is_time_reference(): - self.report_error(self._does_msg, var_name, - 'have time reference units') - else: - coord.convert_units( - cf_units.Unit( - 'days since 1950-1-1 00:00:00', - calendar=coord.units.calendar)) - simplified_cal = self._simplify_calendars(coord.units.calendar) - coord.units = cf_units.Unit(coord.units.origin, simplified_cal) - - tol = 0.001 - intervals = {'dec': (3600, 3660), 'day': (1, 1)} - if self.frequency == 'mon': - for i in range(len(coord.points) - 1): - first = coord.cell(i).point - second = coord.cell(i + 1).point - second_month = first.month + 1 - second_year = first.year - if second_month == 13: - second_month = 1 - second_year += 1 - if second_month != second.month or \ - second_year != second.year: - msg = '{}: Frequency {} does not match input data' - self.report_error(msg, var_name, self.frequency) - break - elif self.frequency == 'yr': - for i in range(len(coord.points) - 1): - first = coord.cell(i).point - second = coord.cell(i + 1).point - second_month = first.month + 1 - if first.year + 1 != second.year: - msg = '{}: Frequency {} does not match input data' - self.report_error(msg, var_name, self.frequency) - break - else: - if self.frequency in intervals: - interval = intervals[self.frequency] - target_interval = (interval[0] - tol, interval[1] + tol) - elif self.frequency.endswith('hr'): - frequency = self.frequency[:-2] - if frequency == 'sub': - frequency = 1.0 / 24 - target_interval = (-tol, frequency + tol) - else: - frequency = float(frequency) / 24 - target_interval = (frequency - tol, frequency + tol) - else: - msg = '{}: Frequency {} not supported by checker' - self.report_error(msg, var_name, self.frequency) - return - for i in range(len(coord.points) - 1): - interval = coord.points[i + 1] - coord.points[i] - if (interval < target_interval[0] - or interval > target_interval[1]): - msg = '{}: Frequency {} does not match input data' - self.report_error(msg, var_name, self.frequency) - break - - CALENDARS = [ - ['gregorian', 'standard'], - ['proleptic_gregorian'], - ['365_day', 'noleap'], - ['366_day', 'all_leap'], - ['360_day'], - ['julian'], - ['none'], - ] - - @staticmethod - def _simplify_calendars(calendar): - for calendar_type in CMORCheck.CALENDARS: - if calendar in calendar_type: - return calendar_type[0] - - def has_errors(self): - """Check if there are reported errors. - - Returns - ------- - bool: - True if there are pending errors, False otherwise. - - """ - return len(self._errors) > 0 - - def has_warnings(self): - """Check if there are reported warnings. - - Returns - ------- - bool: - True if there are pending warnings, False otherwise. - - """ - return len(self._warnings) > 0 - - def report_error(self, message, *args): - """Report an error. - - If fail_on_error is set to True, raises automatically. - If fail_on_error is set to False, stores it for later reports. - - Parameters - ---------- - message: str: unicode - Message for the error. - *args: - arguments to format the message string. - - """ - msg = message.format(*args) - if self._failerr: - raise CMORCheckError(msg + '\nin cube:\n{}'.format(self._cube)) - else: - self._errors.append(msg) - - def report_warning(self, message, *args): - """Report a warning. - - If fail_on_error is set to True, logs it automatically. - If fail_on_error is set to False, stores it for later reports. - - Parameters - ---------- - message: str: unicode - Message for the warning. - *args: - arguments to format the message string. - - """ - msg = message.format(*args) - if self._failerr: - print('WARNING: {0}'.format(msg)) - else: - self._warnings.append(msg) - - def _add_auxiliar_time_coordinates(self): - coords = [coord.name() for coord in self._cube.aux_coords] - if 'day_of_month' not in coords: - iris.coord_categorisation.add_day_of_month(self._cube, 'time') - if 'day_of_year' not in coords: - iris.coord_categorisation.add_day_of_year(self._cube, 'time') - if 'month_number' not in coords: - iris.coord_categorisation.add_month_number(self._cube, 'time') - if 'year' not in coords: - iris.coord_categorisation.add_year(self._cube, 'time') - - -def _get_cmor_checker(table, - mip, - short_name, - frequency, - fail_on_error=True, - automatic_fixes=False): - """Get a CMOR checker/fixer.""" - if table not in CMOR_TABLES: - raise NotImplementedError( - "No CMOR checker implemented for table {}." - "\nThe following options are available: {}".format( - table, ', '.join(CMOR_TABLES))) - - cmor_table = CMOR_TABLES[table] - var_info = cmor_table.get_variable(mip, short_name) - if var_info is None: - var_info = CMOR_TABLES['custom'].get_variable(mip, short_name) - - def _checker(cube): - return CMORCheck( - cube, - var_info, - frequency=frequency, - fail_on_error=fail_on_error, - automatic_fixes=automatic_fixes) - - return _checker - - -def cmor_check_metadata(cube, cmor_table, mip, short_name, frequency): - """Check if metadata conforms to variable's CMOR definiton. - - None of the checks at this step will force the cube to load the data. - - Parameters - ---------- - cube: iris.cube.Cube - Data cube to check. - cmor_table: basestring - CMOR definitions to use. - mip: - Variable's mip. - short_name: basestring - Variable's short name. - frequency: basestring - Data frequency. - - """ - checker = _get_cmor_checker(cmor_table, mip, short_name, frequency) - checker(cube).check_metadata() - return cube - - -def cmor_check_data(cube, cmor_table, mip, short_name, frequency): - """Check if data conforms to variable's CMOR definiton. - - The checks performed at this step require the data in memory. - - Parameters - ---------- - cube: iris.cube.Cube - Data cube to check. - cmor_table: basestring - CMOR definitions to use. - mip: - Variable's mip. - short_name: basestring - Variable's short name - frequency: basestring - Data frequency - - """ - checker = _get_cmor_checker(cmor_table, mip, short_name, frequency) - checker(cube).check_data() - return cube - - -def cmor_check(cube, cmor_table, mip, short_name, frequency): - """Check if cube conforms to variable's CMOR definiton. - - Equivalent to calling cmor_check_metadata and cmor_check_data - consecutively. - - Parameters - ---------- - cube: iris.cube.Cube - Data cube to check. - cmor_table: basestring - CMOR definitions to use. - mip: - Variable's mip. - short_name: basestring - Variable's short name. - frequency: basestring - Data frequency. - - """ - cmor_check_metadata(cube, cmor_table, mip, short_name, frequency) - cmor_check_data(cube, cmor_table, mip, short_name, frequency) - return cube diff --git a/esmvaltool/cmor/fix.py b/esmvaltool/cmor/fix.py deleted file mode 100644 index 5fe7e9aa07..0000000000 --- a/esmvaltool/cmor/fix.py +++ /dev/null @@ -1,185 +0,0 @@ -""" -Apply automatic fixes for known errors in cmorized data - -All functions in this module will work even if no fixes are available -for the given dataset. Therefore is recommended to apply them to all -variables to be sure that all known errors are -fixed. - -""" -from collections import defaultdict - -from iris.cube import CubeList - -from ._fixes.fix import Fix -from .check import _get_cmor_checker - - -def fix_file(file, short_name, project, dataset, output_dir): - """ - Fix files before ESMValTool can load them. - - This fixes are only for issues that prevent iris from loading the cube or - that cannot be fixed after the cube is loaded. - - Original files are not overwritten. - - Parameters - ---------- - file: str - Path to the original file - short_name: str - Variable's short name - project: str - dataset:str - output_dir: str - Output directory for fixed files - - Returns - ------- - str: - Path to the fixed file - - """ - for fix in Fix.get_fixes( - project=project, dataset=dataset, variable=short_name): - file = fix.fix_file(file, output_dir) - return file - - -def fix_metadata(cubes, - short_name, - project, - dataset, - cmor_table=None, - mip=None, - frequency=None): - """ - Fix cube metadata if fixes are required and check it anyway. - - This method collects all the relevant fixes for a given variable, applies - them and checks the resulting cube (or the original if no fixes were - needed) metadata to ensure that it complies with the standards of its - project CMOR tables. - - Parameters - ---------- - cubes: iris.cube.CubeList - Cubes to fix - short_name; str - Variable's short name - project: str - - dataset: str - - cmor_table: str, optional - CMOR tables to use for the check, if available - - mip: str, optional - Variable's MIP, if available - - frequency: str, optional - Variable's data frequency, if available - - Returns - ------- - iris.cube.Cube: - Fixed and checked cube - - Raises - ------ - CMORCheckError: - If the checker detects errors in the metadata that it can not fix. - - """ - fixes = Fix.get_fixes( - project=project, dataset=dataset, variable=short_name) - fixed_cubes = [] - by_file = defaultdict(list) - for cube in cubes: - by_file[cube.attributes.get('source_file', '')].append(cube) - - for cube_list in by_file.values(): - cube_list = CubeList(cube_list) - for fix in fixes: - cube_list = fix.fix_metadata(cube_list) - - if len(cube_list) != 1: - raise ValueError('Cubes were not reduced to one after' - 'fixing: %s' % cube_list) - cube = cube_list[0] - - if cmor_table and mip: - checker = _get_cmor_checker( - frequency=frequency, - table=cmor_table, - mip=mip, - short_name=short_name, - fail_on_error=False, - automatic_fixes=True) - cube = checker(cube).check_metadata() - cube.attributes.pop('source_file', None) - fixed_cubes.append(cube) - return fixed_cubes - - -def fix_data(cube, - short_name, - project, - dataset, - cmor_table=None, - mip=None, - frequency=None): - """ - Fix cube data if fixes add present and check it anyway. - - This method assumes that metadata is already fixed and checked. - - This method collects all the relevant fixes for a given variable, applies - them and checks resulting cube (or the original if no fixes were - needed) metadata to ensure that it complies with the standards of its - project CMOR tables. - - Parameters - ---------- - cube: iris.cube.Cube - Cube to fix - short_name; str - Variable's short name - project: str - - dataset: str - - cmor_table: str, optional - CMOR tables to use for the check, if available - - mip: str, optional - Variable's MIP, if available - - frequency: str, optional - Variable's data frequency, if available - - Returns - ------- - iris.cube.Cube: - Fixed and checked cube - - Raises - ------ - CMORCheckError: - If the checker detects errors in the data that it can not fix. - - """ - for fix in Fix.get_fixes( - project=project, dataset=dataset, variable=short_name): - cube = fix.fix_data(cube) - if cmor_table and mip: - checker = _get_cmor_checker( - frequency=frequency, - table=cmor_table, - mip=mip, - short_name=short_name, - fail_on_error=False, - automatic_fixes=True) - cube = checker(cube).check_data() - return cube diff --git a/esmvaltool/cmor/table.py b/esmvaltool/cmor/table.py deleted file mode 100644 index 805ede3a6a..0000000000 --- a/esmvaltool/cmor/table.py +++ /dev/null @@ -1,649 +0,0 @@ -""" -CMOR information reader for ESMValTool. - -Read variable information from CMOR 2 and CMOR 3 tables and make it easily -available for the other components of ESMValTool -""" -import errno -import glob -import json -import logging -import os - -logger = logging.getLogger(__name__) - -CMOR_TABLES = {} -"""dict of str, obj: CMOR info objects.""" - - -def read_cmor_tables(cfg_developer): - """Read cmor tables required in the configuration. - - Parameters - ---------- - cfg_developer : dict of str - Parsed config-developer file - - """ - custom = CustomInfo() - CMOR_TABLES['custom'] = custom - - for table in cfg_developer: - project = cfg_developer[table] - - cmor_type = project.get('cmor_type', 'CMIP5') - table_path = project.get('cmor_tables', cmor_type.lower()) - table_path = os.path.expandvars(os.path.expanduser(table_path)) - - cmor_strict = project.get('cmor_strict', True) - if cmor_strict: - default = None - else: - default = custom - if cmor_type == 'CMIP5': - CMOR_TABLES[table] = CMIP5Info( - table_path, default=default, - ) - elif cmor_type == 'CMIP6': - CMOR_TABLES[table] = CMIP6Info( - table_path, default=default, - ) - - -class CMIP6Info(object): - """ - Class to read CMIP6-like data request. - - This uses CMOR 3 json format - - Parameters - ---------- - cmor_tables_path: basestring - Path to the folder containing the Tables folder with the json files - - """ - - _CMIP_5to6_varname = { - 'sic': 'siconc', - 'sit': 'sithick', - 'tro3': 'o3', - } - - def __init__(self, cmor_tables_path, default=None): - cmor_tables_path = self._get_cmor_path(cmor_tables_path) - - self._cmor_folder = os.path.join(cmor_tables_path, 'Tables') - self.default = default - - self.tables = {} - - self._load_coordinates() - for json_file in glob.glob(os.path.join(self._cmor_folder, '*.json')): - if 'CV_test' in json_file or 'grids' in json_file: - continue - self._load_table(json_file) - - @staticmethod - def _get_cmor_path(cmor_tables_path): - if os.path.isdir(cmor_tables_path): - return cmor_tables_path - cwd = os.path.dirname(os.path.realpath(__file__)) - cmor_tables_path = os.path.join(cwd, 'tables', cmor_tables_path) - return cmor_tables_path - - def _load_table(self, json_file): - with open(json_file) as inf: - raw_data = json.loads(inf.read()) - if not self._is_table(raw_data): - return - table = TableInfo() - header = raw_data['Header'] - table.name = header['table_id'][6:].split('_')[-1] - self.tables[table.name] = table - - generic_levels = header['generic_levels'].split() - table.frequency = header.get('frequency', '') - table.realm = header.get('realm', '') - - for var_name, var_data in raw_data['variable_entry'].items(): - var = VariableInfo('CMIP6', var_name) - if 'frequency' in var_data: - var.frequency = var_data['frequency'] - else: - var.frequency = table.frequency - var.read_json(var_data) - self._assign_dimensions(var, generic_levels) - table[var_name] = var - - def _assign_dimensions(self, var, generic_levels): - for dimension in var.dimensions: - if dimension in generic_levels: - coord = CoordinateInfo(dimension) - coord.generic_level = True - coord.axis = 'Z' - else: - coord = self.coords[dimension] - - axis = coord.axis - if not axis: - axis = 'none' - - var.coordinates[axis] = coord - - def _load_coordinates(self): - self.coords = {} - for json_file in glob.glob( - os.path.join(self._cmor_folder, '*coordinate*.json')): - with open(json_file) as inf: - table_data = json.loads(inf.read()) - for coord_name in table_data['axis_entry'].keys(): - coord = CoordinateInfo(coord_name) - coord.read_json(table_data['axis_entry'][coord_name]) - self.coords[coord_name] = coord - - def get_table(self, table): - """ - Search and return the table info. - - Parameters - ---------- - table: basestring - Table name - - Returns - ------- - TableInfo - Return the TableInfo object for the requested table if - found, returns None if not - - """ - return self.tables.get(table) - - def get_variable(self, table, short_name): - """ - Search and return the variable info. - - Parameters - ---------- - table: basestring - Table name - short_name: basestring - Variable's short name - - Returns - ------- - VariableInfo - Return the VariableInfo object for the requested variable if - found, returns None if not - - """ - try: - return self.tables[table][short_name] - except KeyError: - if short_name in CMIP6Info._CMIP_5to6_varname: - new_short_name = CMIP6Info._CMIP_5to6_varname[short_name] - return self.get_variable(table, new_short_name) - if self.default: - return self.default.get_variable(table, short_name) - return None - - @staticmethod - def _is_table(table_data): - if 'variable_entry' not in table_data: - return False - if 'Header' not in table_data: - return False - return True - - -class TableInfo(dict): - """Container class for storing a CMOR table.""" - - def __init__(self, *args, **kwargs): - """Create a new TableInfo object for storing VariableInfo objects.""" - super(TableInfo, self).__init__(*args, **kwargs) - self.name = '' - self.frequency = '' - self.realm = '' - - -class JsonInfo(object): - """ - Base class for the info classes. - - Provides common utility methods to read json variables - """ - - def __init__(self): - self._json_data = {} - - def _read_json_variable(self, parameter): - """ - Read a json parameter in json_data. - - Parameters - ---------- - parameter: str - parameter to read - - Returns - ------- - str - Option's value or empty string if parameter is not present - - """ - if parameter not in self._json_data: - return '' - return str(self._json_data[parameter]) - - def _read_json_list_variable(self, parameter): - """ - Read a json list parameter in json_data. - - Parameters - ---------- - parameter: str - parameter to read - - Returns - ------- - str - Option's value or empty list if parameter is not present - - """ - if parameter not in self._json_data: - return [] - return self._json_data[parameter] - - -class VariableInfo(JsonInfo): - """Class to read and store variable information.""" - - def __init__(self, table_type, short_name): - """ - Class to read and store variable information. - - Parameters - ---------- - short_name: str - variable's short name - - """ - super(VariableInfo, self).__init__() - self.table_type = table_type - self.modeling_realm = [] - """Modeling realm""" - self.short_name = short_name - """Short name""" - self.standard_name = '' - """Standard name""" - self.long_name = '' - """Long name""" - self.units = '' - """Data units""" - self.valid_min = '' - """Minimum admitted value""" - self.valid_max = '' - """Maximum admitted value""" - self.frequency = '' - """Data frequency""" - self.positive = '' - """Increasing direction""" - - self.dimensions = [] - """List of dimensions""" - self.coordinates = {} - """Coordinates""" - - self._json_data = None - - def read_json(self, json_data): - """ - Read variable information from json. - - Non-present options will be set to empty - - Parameters - ---------- - json_data: dict - dictionary created by the json reader containing - variable information - - """ - self._json_data = json_data - - self.standard_name = self._read_json_variable('standard_name') - self.long_name = self._read_json_variable('long_name') - self.units = self._read_json_variable('units') - self.valid_min = self._read_json_variable('valid_min') - self.valid_max = self._read_json_variable('valid_max') - self.positive = self._read_json_variable('positive') - self.modeling_realm = \ - self._read_json_variable('modeling_realm').split() - - self.dimensions = self._read_json_variable('dimensions').split() - - -class CoordinateInfo(JsonInfo): - """Class to read and store coordinate information.""" - - def __init__(self, name): - """ - Class to read and store coordinate information. - - Parameters - ---------- - name: str - coordinate's name - - """ - super(CoordinateInfo, self).__init__() - self.name = name - self.generic_level = False - - self.axis = "" - """Axis""" - self.value = "" - """Coordinate value""" - self.standard_name = "" - """Standard name""" - self.long_name = "" - """Long name""" - self.out_name = "" - """ - Out name - - This is the name of the variable in the file - """ - self.var_name = "" - """Short name""" - self.units = "" - """Units""" - self.stored_direction = "" - """Direction in which the coordinate increases""" - self.requested = [] - """Values requested""" - self.valid_min = "" - """Minimum allowed value""" - self.valid_max = "" - """Maximum allowed value""" - - def read_json(self, json_data): - """ - Read coordinate information from json. - - Non-present options will be set to empty - - Parameters - ---------- - json_data: dict - dictionary created by the json reader containing - coordinate information - - """ - self._json_data = json_data - - self.axis = self._read_json_variable('axis') - self.value = self._read_json_variable('value') - self.out_name = self._read_json_variable('out_name') - self.var_name = self._read_json_variable('var_name') - self.standard_name = self._read_json_variable('standard_name') - self.long_name = self._read_json_variable('long_name') - self.units = self._read_json_variable('units') - self.stored_direction = self._read_json_variable('stored_direction') - self.valid_min = self._read_json_variable('valid_min') - self.valid_max = self._read_json_variable('valid_max') - self.requested = self._read_json_list_variable('requested') - - -class CMIP5Info(object): - """ - Class to read CMIP5-like data request. - - Parameters - ---------- - cmor_tables_path: basestring - Path to the folder containing the Tables folder with the json files - - """ - - def __init__(self, cmor_tables_path, default=None): - cmor_tables_path = self._get_cmor_path(cmor_tables_path) - - self._cmor_folder = os.path.join(cmor_tables_path, 'Tables') - if not os.path.isdir(self._cmor_folder): - raise OSError(errno.ENOTDIR, "CMOR tables path is not a directory", - self._cmor_folder) - - self.tables = {} - self.coords = {} - self.default = default - self._current_table = None - self._last_line_read = None - - for table_file in glob.glob(os.path.join(self._cmor_folder, '*')): - if '_grids' in table_file: - continue - self._load_table(table_file) - - @staticmethod - def _get_cmor_path(cmor_tables_path): - if os.path.isdir(cmor_tables_path): - return cmor_tables_path - cwd = os.path.dirname(os.path.realpath(__file__)) - cmor_tables_path = os.path.join(cwd, 'tables', cmor_tables_path) - return cmor_tables_path - - def _load_table(self, table_file, table_name=''): - if table_name and table_name in self.tables: - # special case used for updating a table with custom variable file - table = self.tables[table_name] - else: - # default case: table name is first line of table file - table = None - - self._read_table_file(table_file, table) - - def _read_table_file(self, table_file, table=None): - with open(table_file) as self._current_table: - self._read_line() - while True: - key, value = self._last_line_read - if key == 'table_id': - table = TableInfo() - table.name = value[len('Table '):] - self.tables[table.name] = table - elif key == 'frequency': - table.frequency = value - elif key == 'modeling_realm': - table.realm = value - elif key == 'generic_levels': - for dim in value.split(' '): - coord = CoordinateInfo(dim) - coord.generic_level = True - coord.axis = 'Z' - self.coords[dim] = coord - elif key == 'axis_entry': - self.coords[value] = self._read_coordinate(value) - continue - elif key == 'variable_entry': - table[value] = self._read_variable(value, table.frequency) - continue - if not self._read_line(): - return - - def _read_line(self): - line = self._current_table.readline() - if line == '': - return False - if line.startswith('!'): - return self._read_line() - line = line.replace('\n', '') - if '!' in line: - line = line[:line.index('!')] - line = line.strip() - if not line: - self._last_line_read = ('', '') - else: - index = line.index(':') - self._last_line_read = (line[:index].strip(), - line[index + 1:].strip()) - return True - - def _read_coordinate(self, value): - coord = CoordinateInfo(value) - while self._read_line(): - key, value = self._last_line_read - if key in ('variable_entry', 'axis_entry'): - return coord - if key == 'requested': - coord.requested = value.split(' ') - continue - if hasattr(coord, key): - setattr(coord, key, value) - return coord - - def _read_variable(self, short_name, frequency): - var = VariableInfo('CMIP5', short_name) - var.frequency = frequency - while self._read_line(): - key, value = self._last_line_read - if key in ('variable_entry', 'axis_entry'): - break - if key in ('dimensions', 'modeling_realm'): - setattr(var, key, value.split()) - elif hasattr(var, key): - setattr(var, key, value) - for dim in var.dimensions: - var.coordinates[dim] = self.coords[dim] - return var - - def get_table(self, table): - """ - Search and return the table info. - - Parameters - ---------- - table: basestring - Table name - - Returns - ------- - TableInfo - Return the TableInfo object for the requested table if - found, returns None if not - - """ - return self.tables.get(table) - - def get_variable(self, table, short_name): - """ - Search and return the variable info. - - Parameters - ---------- - table: basestring - Table name - short_name: basestring - Variable's short name - - Returns - ------- - VariableInfo - Return the VariableInfo object for the requested variable if - found, returns None if not - - """ - var_info = self.tables.get(table, {}).get(short_name, None) - if not var_info and self.default: - return self.default.get_variable(table, short_name) - return var_info - - -class CustomInfo(CMIP5Info): - """ - Class to read custom var info for ESMVal. - - Parameters - ---------- - cmor_tables_path: basestring or None - Full path to the table or name for the table if it is present in - ESMValTool repository - - """ - - def __init__(self, cmor_tables_path=None): - cwd = os.path.dirname(os.path.realpath(__file__)) - self._cmor_folder = os.path.join(cwd, 'tables', 'custom') - self.tables = {} - table = TableInfo() - table.name = 'custom' - self.tables[table.name] = table - self._coordinates_file = os.path.join( - self._cmor_folder, - 'CMOR_coordinates.dat', - ) - self.coords = {} - self._read_table_file(self._coordinates_file, self.tables['custom']) - for dat_file in glob.glob(os.path.join(self._cmor_folder, '*.dat')): - if dat_file == self._coordinates_file: - continue - self._read_table_file(dat_file, self.tables['custom']) - - def get_table(self, table): - """ - Search and return the table info. - - Parameters - ---------- - table: basestring - Table name - - Returns - ------- - TableInfo - Return the TableInfo object for the requested table if - found, returns None if not - - """ - return self.tables.get(table) - - def get_variable(self, table, short_name): - """ - Search and return the variable info. - - Parameters - ---------- - table: basestring - Table name - short_name: basestring - Variable's short name - - Returns - ------- - VariableInfo - Return the VariableInfo object for the requested variable if - found, returns None if not - - """ - return self.tables['custom'].get(short_name, None) - - def _read_table_file(self, table_file, table=None): - with open(table_file) as self._current_table: - self._read_line() - while True: - key, value = self._last_line_read - if key == 'generic_levels': - for dim in value.split(' '): - coord = CoordinateInfo(dim) - coord.generic_level = True - coord.axis = 'Z' - self.coords[dim] = coord - elif key == 'axis_entry': - self.coords[value] = self._read_coordinate(value) - continue - elif key == 'variable_entry': - table[value] = self._read_variable(value, None) - continue - if not self._read_line(): - return diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_3hr b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_3hr deleted file mode 100644 index 6effae611c..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_3hr +++ /dev/null @@ -1,740 +0,0 @@ -table_id: Table 3hr -modeling_realm: atmos - -frequency: 3hr - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 0.125000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time1 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: height2m -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: height -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: up ! up or down (default: undeclared) -long_name: height -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: height -valid_min: 1.0 -valid_max: 10.0 -stored_direction: increasing -type: double -value: 2. ! of scalar (singleton) dimension -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: height10m -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: height -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: up ! up or down (default: undeclared) -long_name: height -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: height -valid_min: 1.0 -valid_max: 30.0 -stored_direction: increasing -type: double -value: 10. ! of scalar (singleton) dimension -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: sdepth1 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: depth -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: depth -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: depth -valid_min: 0.0 -valid_max: 0.2 -stored_direction: increasing -type: double -value: 0.05 ! of scalar (singleton) dimension -bounds_values: 0.0 0.1 ! of scalar (singleton) dimension bounds -must_have_bounds: yes -!---------------------------------- -! - -!============ -variable_entry: pr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: precipitation_flux -units: kg m-2 s-1 -cell_methods: time:mean -cell_measures: area: areacella -long_name: Precipitation -comment: at surface; includes both liquid and solid phases. This is the 3-hour mean precipitation flux. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: pr -type: real -!---------------------------------- -! - -!============ -variable_entry: tas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: point -cell_measures: area: areacella -long_name: Near-Surface Air Temperature -comment: This is sampled synoptically. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 height2m -out_name: tas -type: real -!---------------------------------- -! - -!============ -variable_entry: hfls -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_latent_heat_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upward Latent Heat Flux -comment: This is the 3-hour mean flux. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfls -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: hfss -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_sensible_heat_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upward Sensible Heat Flux -comment: This is the 3-hour mean flux. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfss -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rlds -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Downwelling Longwave Radiation -comment: This is the 3-hour mean flux. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlds -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rlus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upwelling Longwave Radiation -comment: This is the 3-hour mean flux. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlus -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsds -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Downwelling Shortwave Radiation -comment: This is the 3-hour mean flux. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsds -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upwelling Shortwave Radiation -comment: This is the 3-hour mean flux. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsus -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: uas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: eastward_wind -units: m s-1 -cell_methods: time: point -long_name: Eastward Near-Surface Wind Speed -comment: This is sampled synoptically. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 height10m -out_name: uas -type: real -!---------------------------------- -! - -!============ -variable_entry: vas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_wind -units: m s-1 -cell_methods: time: point -long_name: Northward Near-Surface Wind Speed -comment: This is sampled synoptically. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 height10m -out_name: vas -type: real -!---------------------------------- -! - -!============ -variable_entry: huss -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: specific_humidity -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Near-Surface Specific Humidity -comment: This is sampled synoptically. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 height2m -out_name: huss -type: real -!---------------------------------- -! - -!============ -variable_entry: mrsos -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: moisture_content_of_soil_layer -units: kg m-2 -cell_methods: time: point area: mean where land -cell_measures: area: areacella -long_name: Moisture in Upper Portion of Soil Column -comment: the mass of water in all phases in a thin surface soil layer. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 sdepth1 -out_name: mrsos -type: real -!---------------------------------- -! - -!============ -variable_entry: tslsi -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_temperature -units: K -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Temperature Where Land or Sea Ice -comment: ""skin"" temperature of all surfaces except open ocean, sampled synoptically. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: tslsi -type: real -!---------------------------------- -! - -!============ -variable_entry: tso -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_surface_temperature -units: K -cell_methods: time: point area: mean where sea -cell_measures: area: areacella -long_name: Sea Surface Temperature -comment: temperature of surface of open ocean, sampled synoptically. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: tso -type: real -!---------------------------------- -! - -!============ -variable_entry: prc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: convective_precipitation_flux -units: kg m-2 s-1 -cell_methods: time:mean -cell_measures: area: areacella -long_name: Convective Precipitation -comment: at surface. This is a 3-hour mean convective precipitation flux. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: prc -type: real -!---------------------------------- -! - -!============ -variable_entry: prsn -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: snowfall_flux -units: kg m-2 s-1 -cell_methods: time:mean -cell_measures: area: areacella -long_name: Snowfall Flux -comment: at surface. Includes precipitation of all forms water in the solid phase. This is the 3-hour mean snowfall flux. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: prsn -type: real -!---------------------------------- -! - -!============ -variable_entry: mrro -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: runoff_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Total Runoff -comment: the total runoff (including ""drainage"" through the base of the soil model) leaving the land portion of the grid cell divided by the land area in the grid cell, averaged over the 3-hour interval. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: mrro -type: real -!---------------------------------- -! - -!============ -variable_entry: rldscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_longwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Downwelling Clear-Sky Longwave Radiation -comment: This is a 3-hour mean flux. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rldscs -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsdscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Downwelling Clear-Sky Shortwave Radiation -comment: This is a 3-hour mean flux. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsdscs -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsuscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upwelling Clear-Sky Shortwave Radiation -comment: This is a 3-hour mean flux. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsuscs -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: ps -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_air_pressure -units: Pa -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Air Pressure -comment: sampled synoptically to diagnose atmospheric tides, this is better than mean sea level pressure. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: ps -type: real -!---------------------------------- -! - -!============ -variable_entry: clt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Cloud Fraction -comment: for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Include both large-scale and convective cloud. This is a 3-hour mean. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: clt -type: real -!---------------------------------- -! - -!============ -variable_entry: rsdsdiff -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_diffuse_downwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Diffuse Downwelling Shortwave Radiation -comment: This is a 3-hour mean flux. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsdsdiff -type: real -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_6hrLev b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_6hrLev deleted file mode 100644 index 6d6f91b536..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_6hrLev +++ /dev/null @@ -1,590 +0,0 @@ -table_id: Table 6hrLev -modeling_realm: atmos - -frequency: 6hr - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 0.250000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -generic_levels: alevel - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time1 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: no -!---------------------------------- -! - -!============ -axis_entry: smooth_level -!============ -! -! This coordinate is a hybrid height coordinate with units of meters (m). -! It increases upward. -! The values of a(k)*ztop, which appear in the formula below, should be stored as smooth_level. -! Note that in the netCDF file the variable will be named "lev", not smooth_level. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_sleve_coordinate -units: m -axis: Z -positive: up -long_name: atmosphere smooth level vertical (SLEVE) coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: -200. -valid_max: 800000. -formula: z = a*ztop + b1*zsurf1 + b2*zsurf2 -z_factors: a: a b1: b1 b2: b2 ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2 -z_bounds_factors: a: a_bnds b1: b1_bnds b2: b2_bnds ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2 -!---------------------------------- -! -!============ -axis_entry: natural_log_pressure -!============ -! -!This coordinate is dimensionless and varies from near 0 at the surface and increases upward. -! The values of lev(k), which appears in the formula below, should be stored as natural_log_pressure. -! Note that in the netCDF file the variable will be named "lev", not natural_log_pressure. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_ln_pressure_coordinate -axis: Z -long_name: atmosphere natural log pressure coordinate -positive: down -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: -1. -valid_max: 20. -formula: p = p0 * exp(-lev) -z_factors: p0: p0 lev: lev -z_bounds_factors: p0: p0 lev: lev_bnds -!---------------------------------- -! -!============ -axis_entry: standard_sigma -!============ -! -! This coordinate is dimensionless and varies from 0 at the model top to 1.0 at the surface. -! The values of sigma(k), which appears in the formula below, should be stored as standard_sigma. -! Note that in the netCDF file the variable will be named "lev", not standard_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_sigma_coordinate -axis: Z -positive: down -long_name: sigma coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = ptop + sigma*(ps - ptop) -z_factors: ptop: ptop sigma: lev ps: ps -z_bounds_factors: ptop: ptop sigma: lev_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: standard_hybrid_sigma -!============ -! -! This coordinate is dimensionless and varies from a small value at the model top to 1.0 at the surface. -! The values of a+ b, which appear in the formula below, should be stored as standard_hybrid_sigma. -! Note that in the netCDF file the variable will be named "lev", not standard_hybrid_sigma. -! -!--------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_sigma_pressure_coordinate -units: 1 -axis: Z -positive: down -long_name: hybrid sigma pressure coordinate -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = a*p0 + b*ps -z_factors: p0: p0 a: a b: b ps: ps -z_bounds_factors: p0: p0 a: a_bnds b: b_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: alternate_hybrid_sigma -!============ -! -! This coordinate is dimensionless and varies from a small value at the model top to 1.0 at the surface. -! The values of ap/p0 + b, which appear in the formula below, should be stored as alternate_hybrid_sigma. -! Note that in the netCDF file the variable will be named "lev", not alternate_hybrid_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_sigma_pressure_coordinate -units: 1 -axis: Z -positive: down -long_name: hybrid sigma pressure coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = ap + b*ps -z_factors: ap: ap b: b ps: ps -z_bounds_factors: ap: ap_bnds b: b_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: hybrid_height -!============ -! -! This coordinate has dimension of meters (m) and increases upward. -! The values of a which appear in the formula below, should be stored as hybrid_height. -! Note that in the netCDF file the variable will be named "lev", not hybrid_height. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_height_coordinate -units: m -axis: Z -positive: up -long_name: hybrid height coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: 0.0 -formula: z = a + b*orog -z_factors: a: lev b: b orog: orog -z_bounds_factors: a: lev_bnds b: b_bnds orog: orog -!---------------------------------- -! -! *************************************************************** -! -! Vertical coordinate formula terms: -! -! *************************************************************** -! -! -!============ -variable_entry: orog -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_altitude -units: m -long_name: Surface Altitude -comment: height above the geoid; as defined here, ""the geoid"" is a surface of constant geopotential that, if the ocean were at rest, would coincide with mean sea level. Under this definition, the geoid changes as the mean volume of the ocean changes (e.g., due to glacial melt, or global warming of the ocean). Report here the height above the present-day geoid. Over ocean, report as 0.0 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: orog -type: real -valid_min: -700 -valid_max: 1.00E+04 -!---------------------------------- -! -! -!============ -variable_entry: p0 -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: reference pressure -units: Pa -!---------------------------------- -! -! -!============ -variable_entry: ptop -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: pressure at top of model -units: Pa -!---------------------------------- -! -! -! -!============ -variable_entry: a -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: a(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: b -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: b(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: a_bnds -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: a(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: b_bnds -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: b(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ap -!============ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: ap(k) -units: Pa -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ap_bnds -!============ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: ap(k+1/2) -units: Pa -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ztop -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: height of top of model -units: m -!---------------------------------- -! -! -! - -!============ -variable_entry: ta -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_measures: area: areacella -long_name: Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: ta -type: real -!---------------------------------- -! - -!============ -variable_entry: ua -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: eastward_wind -units: m s-1 -long_name: Eastward Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: ua -type: real -!---------------------------------- -! - -!============ -variable_entry: va -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_wind -units: m s-1 -long_name: Northward Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: va -type: real -!---------------------------------- -! - -!============ -variable_entry: hus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: specific_humidity -units: 1 -cell_measures: area: areacella -long_name: Specific Humidity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: hus -type: real -!---------------------------------- -! - -!============ -variable_entry: ps -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_air_pressure -units: Pa -cell_measures: area: areacella -long_name: Surface Air Pressure -comment: surface pressure, not mean sea level pressure -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: ps -type: real -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_6hrPlev b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_6hrPlev deleted file mode 100644 index ab353ddf23..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_6hrPlev +++ /dev/null @@ -1,240 +0,0 @@ -table_id: Table 6hrPlev -modeling_realm: atmos - -frequency: 6hr - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 0.250000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: plev3 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -tolerance: 0.001 -type: double -requested: 85000. 50000. 25000. ! space-separated list of requested coordinates -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: time1 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: no -!---------------------------------- -! - -!============ -variable_entry: ua -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: eastward_wind -units: m s-1 -long_name: Eastward Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plev3 time1 -out_name: ua -type: real -valid_min: -90.0 -valid_max: 145.0 -!---------------------------------- -! - -!============ -variable_entry: va -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_wind -units: m s-1 -long_name: Northward Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plev3 time1 -out_name: va -type: real -valid_min: -80.0 -valid_max: 80.0 -!---------------------------------- -! - -!============ -variable_entry: ta -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_measures: area: areacella -long_name: Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plev3 time1 -out_name: ta -type: real -valid_min: 144.0 -valid_max: 330.0 -!---------------------------------- -! - -!============ -variable_entry: psl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_sea_level -units: Pa -cell_measures: area: areacella -long_name: Sea Level Pressure -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: psl -type: real -valid_min: 92500.0 -valid_max: 113000.0 -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Amon b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Amon deleted file mode 100644 index 2a897a55bf..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Amon +++ /dev/null @@ -1,2458 +0,0 @@ -table_id: Table Amon -modeling_realm: atmos - -frequency: mon - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 30.000000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -generic_levels: alevel alevhalf - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: plevs -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -tolerance: 0.001 -type: double -requested: 100000. 92500. 85000. 70000. 60000. 50000. 40000. 30000. 25000. 20000. 15000. 10000. 7000. 5000. 3000. 2000. 1000. ! space-separated list of requested coordinates -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: time -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time2 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -climatology: yes -!---------------------------------- -! - - -!============ -axis_entry: height2m -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: height -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: up ! up or down (default: undeclared) -long_name: height -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: height -valid_min: 1.0 -valid_max: 10.0 -stored_direction: increasing -type: double -value: 2. ! of scalar (singleton) dimension -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: height10m -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: height -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: up ! up or down (default: undeclared) -long_name: height -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: height -valid_min: 1.0 -valid_max: 30.0 -stored_direction: increasing -type: double -value: 10. ! of scalar (singleton) dimension -must_have_bounds: no -!---------------------------------- -! - -!============ -axis_entry: smooth_level -!============ -! -! This coordinate is a hybrid height coordinate with units of meters (m). -! It increases upward. -! The values of a(k)*ztop, which appear in the formula below, should be stored as smooth_level. -! Note that in the netCDF file the variable will be named "lev", not smooth_level. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_sleve_coordinate -units: m -axis: Z -positive: up -long_name: atmosphere smooth level vertical (SLEVE) coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: -200. -valid_max: 800000. -formula: z = a*ztop + b1*zsurf1 + b2*zsurf2 -z_factors: a: a b1: b1 b2: b2 ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2 -z_bounds_factors: a: a_bnds b1: b1_bnds b2: b2_bnds ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2 -!---------------------------------- -! -!============ -axis_entry: natural_log_pressure -!============ -! -!This coordinate is dimensionless and varies from near 0 at the surface and increases upward. -! The values of lev(k), which appears in the formula below, should be stored as natural_log_pressure. -! Note that in the netCDF file the variable will be named "lev", not natural_log_pressure. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_ln_pressure_coordinate -axis: Z -long_name: atmosphere natural log pressure coordinate -positive: down -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: -1. -valid_max: 20. -formula: p = p0 * exp(-lev) -z_factors: p0: p0 lev: lev -z_bounds_factors: p0: p0 lev: lev_bnds -!---------------------------------- -! -!============ -axis_entry: standard_sigma -!============ -! -! This coordinate is dimensionless and varies from 0 at the model top to 1.0 at the surface. -! The values of sigma(k), which appears in the formula below, should be stored as standard_sigma. -! Note that in the netCDF file the variable will be named "lev", not standard_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_sigma_coordinate -axis: Z -positive: down -long_name: sigma coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = ptop + sigma*(ps - ptop) -z_factors: ptop: ptop sigma: lev ps: ps -z_bounds_factors: ptop: ptop sigma: lev_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: standard_hybrid_sigma -!============ -! -! This coordinate is dimensionless and varies from a small value at the model top to 1.0 at the surface. -! The values of a+ b, which appear in the formula below, should be stored as standard_hybrid_sigma. -! Note that in the netCDF file the variable will be named "lev", not standard_hybrid_sigma. -! -!--------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_sigma_pressure_coordinate -units: 1 -axis: Z -positive: down -long_name: hybrid sigma pressure coordinate -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = a*p0 + b*ps -z_factors: p0: p0 a: a b: b ps: ps -z_bounds_factors: p0: p0 a: a_bnds b: b_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: alternate_hybrid_sigma -!============ -! -! This coordinate is dimensionless and varies from a small value at the model top to 1.0 at the surface. -! The values of ap/p0 + b, which appear in the formula below, should be stored as alternate_hybrid_sigma. -! Note that in the netCDF file the variable will be named "lev", not alternate_hybrid_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_sigma_pressure_coordinate -units: 1 -axis: Z -positive: down -long_name: hybrid sigma pressure coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = ap + b*ps -z_factors: ap: ap b: b ps: ps -z_bounds_factors: ap: ap_bnds b: b_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: hybrid_height -!============ -! -! This coordinate has dimension of meters (m) and increases upward. -! The values of a which appear in the formula below, should be stored as hybrid_height. -! Note that in the netCDF file the variable will be named "lev", not hybrid_height. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_height_coordinate -units: m -axis: Z -positive: up -long_name: hybrid height coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: 0.0 -formula: z = a + b*orog -z_factors: a: lev b: b orog: orog -z_bounds_factors: a: lev_bnds b: b_bnds orog: orog -!---------------------------------- -! -! *************************************************************** -! -! Vertical coordinate formula terms: -! -! *************************************************************** -! -! -!============ -variable_entry: orog -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_altitude -units: m -long_name: Surface Altitude -comment: height above the geoid; as defined here, ""the geoid"" is a surface of constant geopotential that, if the ocean were at rest, would coincide with mean sea level. Under this definition, the geoid changes as the mean volume of the ocean changes (e.g., due to glacial melt, or global warming of the ocean). Report here the height above the present-day geoid. Over ocean, report as 0.0 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: orog -type: real -valid_min: -700 -valid_max: 1.00E+04 -!---------------------------------- -! -! -!============ -variable_entry: p0 -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: reference pressure -units: Pa -!---------------------------------- -! -! -!============ -variable_entry: ptop -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: pressure at top of model -units: Pa -!---------------------------------- -! -! -! -!============ -variable_entry: a -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: a(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: b -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: b(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: a_bnds -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: a(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: b_bnds -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: b(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ap -!============ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: ap(k) -units: Pa -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ap_bnds -!============ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: ap(k+1/2) -units: Pa -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ztop -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: height of top of model -units: m -!---------------------------------- -! -! -! - -!============ -variable_entry: tas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: mean -cell_measures: area: areacella -long_name: Near-Surface Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height2m -out_name: tas -type: real -valid_min: 180.6 -valid_max: 335.1 -ok_min_mean_abs: 262.4 -ok_max_mean_abs: 293 -!---------------------------------- -! - -!============ -variable_entry: ts -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_temperature -units: K -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Temperature -comment: ""skin"" temperature (i.e., SST for open ocean) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: ts -type: real -valid_min: 176.8 -valid_max: 339.6 -ok_min_mean_abs: 262.8 -ok_max_mean_abs: 293.3 -!---------------------------------- -! - -!============ -variable_entry: tasmin -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: minimum within days time: mean over days -cell_measures: area: areacella -long_name: Daily Minimum Near-Surface Air Temperature -comment: monthly mean of the daily-minimum near-surface air temperature. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height2m -out_name: tasmin -type: real -valid_min: 182.8 -valid_max: 323 -ok_min_mean_abs: 260.9 -ok_max_mean_abs: 290.6 -!---------------------------------- -! - -!============ -variable_entry: tasmax -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: maximum within days time: mean over days -cell_measures: area: areacella -long_name: Daily Maximum Near-Surface Air Temperature -comment: monthly mean of the daily-maximum near-surface air temperature. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height2m -out_name: tasmax -type: real -valid_min: 181.9 -valid_max: 341.9 -ok_min_mean_abs: 264.9 -ok_max_mean_abs: 294 -!---------------------------------- -! - -!============ -variable_entry: psl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_sea_level -units: Pa -cell_methods: time: mean -cell_measures: area: areacella -long_name: Sea Level Pressure -comment: not, in general, the same as surface pressure -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: psl -type: real -valid_min: 9.122e+04 -valid_max: 1.137e+05 -ok_min_mean_abs: 9.57e+04 -ok_max_mean_abs: 1.063e+05 -!---------------------------------- -! - -!============ -variable_entry: ps -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_air_pressure -units: Pa -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Air Pressure -comment: not, in general, the same as mean sea-level pressure -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: ps -type: real -valid_min: 4.791e+04 -valid_max: 1.119e+05 -ok_min_mean_abs: 9.165e+04 -ok_max_mean_abs: 1.019e+05 -!---------------------------------- -! - -!============ -variable_entry: uas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: eastward_wind -units: m s-1 -cell_methods: time: mean -long_name: Eastward Near-Surface Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height10m -out_name: uas -type: real -valid_min: -19.01 -valid_max: 18.68 -ok_min_mean_abs: 1.668 -ok_max_mean_abs: 4.106 -!---------------------------------- -! - -!============ -variable_entry: vas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_wind -units: m s-1 -cell_methods: time: mean -long_name: Northward Near-Surface Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height10m -out_name: vas -type: real -valid_min: -18.04 -valid_max: 22.84 -ok_min_mean_abs: 1.065 -ok_max_mean_abs: 2.678 -!---------------------------------- -! - -!============ -variable_entry: sfcWind -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: wind_speed -units: m s-1 -cell_methods: time: mean -long_name: Near-Surface Wind Speed -comment: This is the mean of the speed, not the speed computed from the mean u and v components of wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height10m -out_name: sfcWind -type: real -!---------------------------------- -! - -!============ -variable_entry: hurs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: relative_humidity -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Near-Surface Relative Humidity -comment: This is the relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height2m -out_name: hurs -type: real -!---------------------------------- -! - -!============ -variable_entry: huss -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: specific_humidity -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Near-Surface Specific Humidity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height2m -out_name: huss -type: real -valid_min: -7.21e-06 -valid_max: 0.02992 -ok_min_mean_abs: 0.006391 -ok_max_mean_abs: 0.008749 -!---------------------------------- -! - -!============ -variable_entry: pr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: precipitation_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Precipitation -comment: at surface; includes both liquid and solid phases from all types of clouds (both large-scale and convective) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: pr -type: real -valid_min: 0 -valid_max: 0.001254 -ok_min_mean_abs: 2.156e-05 -ok_max_mean_abs: 3.215e-05 -!---------------------------------- -! - -!============ -variable_entry: prsn -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: snowfall_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Snowfall Flux -comment: at surface; includes precipitation of all forms of water in the solid phase -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: prsn -type: real -valid_min: 0 -valid_max: 0.0002987 -ok_min_mean_abs: 1.449e-06 -ok_max_mean_abs: 6.11e-06 -!---------------------------------- -! - -!============ -variable_entry: prc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: convective_precipitation_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Convective Precipitation -comment: at surface; includes both liquid and solid phases. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: prc -type: real -valid_min: 0 -valid_max: 0.0006323 -ok_min_mean_abs: -2.528e-06 -ok_max_mean_abs: 3.209e-05 -!---------------------------------- -! - -!============ -variable_entry: evspsbl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: water_evaporation_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Evaporation -comment: at surface; flux of water into the atmosphere due to conversion of both liquid and solid phases to vapor (from underlying surface and vegetation) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: evspsbl -type: real -!---------------------------------- -! - -!============ -variable_entry: sbl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_snow_and_ice_sublimation_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Snow and Ice Sublimation Flux -comment: The snow and ice sublimation flux is the loss of snow and ice mass from the surface resulting from their conversion to water vapor that enters the atmosphere. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: sbl -type: real -valid_min: -0.004959 -valid_max: 0.0009936 -ok_min_mean_abs: -5.521e-05 -ok_max_mean_abs: 9.61e-05 -!---------------------------------- -! - -!============ -variable_entry: tauu -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_eastward_stress -units: Pa -cell_methods: time: mean -long_name: Surface Downward Eastward Wind Stress -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tauu -type: real -positive: down -valid_min: -2.729 -valid_max: 2.934 -ok_min_mean_abs: 0.03979 -ok_max_mean_abs: 0.1065 -!---------------------------------- -! - -!============ -variable_entry: tauv -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_northward_stress -units: Pa -cell_methods: time: mean -long_name: Surface Downward Northward Wind Stress -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tauv -type: real -positive: down -valid_min: -2.472 -valid_max: 2.527 -ok_min_mean_abs: 0.01447 -ok_max_mean_abs: 0.08134 -!---------------------------------- -! - -!============ -variable_entry: hfls -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_latent_heat_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upward Latent Heat Flux -comment: includes both evaporation and sublimation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfls -type: real -positive: up -valid_min: -76.77 -valid_max: 790.7 -ok_min_mean_abs: 50.39 -ok_max_mean_abs: 73.2 -!---------------------------------- -! - -!============ -variable_entry: hfss -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_sensible_heat_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upward Sensible Heat Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfss -type: real -positive: up -valid_min: -264.5 -valid_max: 844.8 -ok_min_mean_abs: 10.7 -ok_max_mean_abs: 34.84 -!---------------------------------- -! - -!============ -variable_entry: rlds -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Downwelling Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlds -type: real -positive: down -valid_min: 30.71 -valid_max: 520.5 -ok_min_mean_abs: 271.2 -ok_max_mean_abs: 323.6 -!---------------------------------- -! - -!============ -variable_entry: rlus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upwelling Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlus -type: real -positive: up -valid_min: 43.75 -valid_max: 658 -ok_min_mean_abs: 325.6 -ok_max_mean_abs: 376.3 -!---------------------------------- -! - -!============ -variable_entry: rsds -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Downwelling Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsds -type: real -positive: down -valid_min: -0.002946 -valid_max: 524.4 -ok_min_mean_abs: 143.9 -ok_max_mean_abs: 181.6 -!---------------------------------- -! - -!============ -variable_entry: rsus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upwelling Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsus -type: real -positive: up -valid_min: -0.006589 -valid_max: 422.2 -ok_min_mean_abs: 26.22 -ok_max_mean_abs: 43.77 -!---------------------------------- -! - -!============ -variable_entry: rsdscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Downwelling Clear-Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsdscs -type: real -positive: down -valid_min: -1.748e+30 -valid_max: 522.4 -ok_min_mean_abs: 206.9 -ok_max_mean_abs: 239 -!---------------------------------- -! - -!============ -variable_entry: rsuscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upwelling Clear-Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsuscs -type: real -positive: up -valid_min: -0.01446 -valid_max: 462.2 -ok_min_mean_abs: 31.65 -ok_max_mean_abs: 61.59 -!---------------------------------- -! - -!============ -variable_entry: rldscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_longwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Downwelling Clear-Sky Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rldscs -type: real -positive: down -valid_min: 33.55 -valid_max: 543.6 -ok_min_mean_abs: 238.6 -ok_max_mean_abs: 293.8 -!---------------------------------- -! - -!============ -variable_entry: rsdt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_incoming_shortwave_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Incident Shortwave Radiation -comment: at the top of the atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsdt -type: real -positive: down -valid_min: 0 -valid_max: 580.4 -ok_min_mean_abs: 282.6 -ok_max_mean_abs: 315.8 -!---------------------------------- -! - -!============ -variable_entry: rsut -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_shortwave_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Shortwave Radiation -comment: at the top of the atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsut -type: real -positive: up -valid_min: -0.02689 -valid_max: 421.9 -ok_min_mean_abs: 96.72 -ok_max_mean_abs: 114.1 -!---------------------------------- -! - -!============ -variable_entry: rlut -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_longwave_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Longwave Radiation -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlut -type: real -positive: up -valid_min: 67.48 -valid_max: 383.2 -ok_min_mean_abs: 207.4 -ok_max_mean_abs: 234.4 -!---------------------------------- -! - -!============ -variable_entry: rlutcs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_longwave_flux_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Clear-Sky Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlutcs -type: real -positive: up -valid_min: 70.59 -valid_max: 377.5 -ok_min_mean_abs: 228.9 -ok_max_mean_abs: 260.4 -!---------------------------------- -! - -!============ -variable_entry: rsutcs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_shortwave_flux_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Clear-Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsutcs -type: real -positive: up -valid_min: 0 -valid_max: 444 -ok_min_mean_abs: 54.7 -ok_max_mean_abs: 73.36 -!---------------------------------- -! - -!============ -variable_entry: prw -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_water_vapor_content -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Water Vapor Path -comment: vertically integrated through the atmospheric column -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: prw -type: real -valid_min: -0.0006775 -valid_max: 78.04 -ok_min_mean_abs: 12.75 -ok_max_mean_abs: 23.09 -!---------------------------------- -! - -!============ -variable_entry: clt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Cloud Fraction -comment: for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Include both large-scale and convective cloud. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: clt -type: real -valid_min: -0.0001822 -valid_max: 105 -ok_min_mean_abs: 39.37 -ok_max_mean_abs: 84.98 -!---------------------------------- -! - -!============ -variable_entry: clwvi -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_cloud_condensed_water_content -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Condensed Water Path -comment: mass of condensed (liquid + ice) water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating hydrometeors ONLY if the precipitating hydrometeor affects the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: clwvi -type: real -valid_min: -3.827e-06 -valid_max: 3.364 -ok_min_mean_abs: -0.03549 -ok_max_mean_abs: 0.2846 -!---------------------------------- -! - -!============ -variable_entry: clivi -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_cloud_ice_content -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Ice Water Path -comment: mass of ice water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating frozen hydrometeors ONLY if the precipitating hydrometeor affects the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: clivi -type: real -valid_min: -1.872e-06 -valid_max: 1.535 -ok_min_mean_abs: -0.0348 -ok_max_mean_abs: 0.1187 -!---------------------------------- -! - -!============ -variable_entry: rtmt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: net_downward_radiative_flux_at_top_of_atmosphere_model -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Net Downward Flux at Top of Model -comment: i.e., at the top of that portion of the atmosphere where dynamics are explicitly treated by the model. This is reported only if it differs from the net downward radiative flux at the top of the atmosphere. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rtmt -type: real -positive: down -valid_min: -232.8 -valid_max: 218.7 -ok_min_mean_abs: 76.91 -ok_max_mean_abs: 89.25 -!---------------------------------- -! - -!============ -variable_entry: ccb -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_convective_cloud_base -units: Pa -cell_methods: time: mean -cell_measures: area: areacella -long_name: Air Pressure at Convective Cloud Base -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: ccb -type: real -!---------------------------------- -! - -!============ -variable_entry: cct -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_convective_cloud_top -units: Pa -cell_methods: time: mean -cell_measures: area: areacella -long_name: Air Pressure at Convective Cloud Top -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cct -type: real -!---------------------------------- -! - -!============ -variable_entry: ci -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: convection_time_fraction -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Fraction of Time Convection Occurs -comment: Fraction of time that convection occurs in the grid cell . -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: ci -type: real -!---------------------------------- -! - -!============ -variable_entry: sci -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: shallow_convection_time_fraction -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Fraction of Time Shallow Convection Occurs -comment: Fraction of time that shallow convection occurs in the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: sci -type: real -!---------------------------------- -! - -!============ -variable_entry: fco2antt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_carbon_dioxide_expressed_as_carbon_due_to_anthropogenic_emission -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Carbon Mass Flux into Atmosphere Due to All Anthropogenic Emissions of CO2 -comment: This is requested only for the emission-driven coupled carbon climate model runs. Does not include natural fire sources but, includes all anthropogenic sources, including fossil fuel use, cement production, agricultural burning, and sources associated with anthropogenic land use change excluding forest regrowth. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fco2antt -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: fco2fos -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_fossil_fuel_combustion -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Carbon Mass Flux into Atmosphere Due to Fossil Fuel Emissions of CO2 -comment: This is the prescribed anthropogenic CO2 flux from fossil fuel use, including cement production, and flaring (but not from land-use changes, agricultural burning, forest regrowth, etc.) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fco2fos -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: fco2nat -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_natural_sources -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Carbon Mass Flux into the Atmosphere Due to Natural Sources -comment: This is what the atmosphere sees (on its own grid). This field should be equivalent to the combined natural fluxes of carbon (requested in the L_mon and O_mon tables) that account for natural exchanges between the atmosphere and land or ocean reservoirs (i.e., ""net ecosystem biospheric productivity"", for land, and ""air to sea CO2 flux"", for ocean.) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fco2nat -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: cl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Cloud Area Fraction -comment: Includes both large-scale and convective cloud. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: cl -type: real -valid_min: 0 -valid_max: 105 -ok_min_mean_abs: -0.04479 -ok_max_mean_abs: 26.07 -!---------------------------------- -! - -!============ -variable_entry: clw -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_fraction_of_cloud_liquid_water_in_air -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Mass Fraction of Cloud Liquid Water -comment: Includes both large-scale and convective cloud. Calculate as the mass of cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cells. Precipitating hydrometeors are included ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: clw -type: real -!---------------------------------- -! - -!============ -variable_entry: cli -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_fraction_of_cloud_ice_in_air -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Mass Fraction of Cloud Ice -comment: Includes both large-scale and convective cloud. This is calculated as the mass of cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. It includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: cli -type: real -!---------------------------------- -! - -!============ -variable_entry: mc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_net_upward_convective_mass_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Convective Mass Flux -comment: The net mass flux should represent the difference between the updraft and downdraft components. The flux is computed as the mass divided by the area of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: mc -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: ta -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: mean -cell_measures: area: areacella -long_name: Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time -out_name: ta -type: real -valid_min: 157.1 -valid_max: 336.3 -ok_min_mean_abs: 194.3 -ok_max_mean_abs: 299.8 -!---------------------------------- -! - -!============ -variable_entry: ua -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: eastward_wind -units: m s-1 -cell_methods: time: mean -long_name: Eastward Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time -out_name: ua -type: real -valid_min: -68.65 -valid_max: 136.6 -ok_min_mean_abs: 1.101 -ok_max_mean_abs: 22.42 -!---------------------------------- -! - -!============ -variable_entry: va -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_wind -units: m s-1 -cell_methods: time: mean -long_name: Northward Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time -out_name: va -type: real -valid_min: -71.1 -valid_max: 69.93 -ok_min_mean_abs: 0.9886 -ok_max_mean_abs: 4.679 -!---------------------------------- -! - -!============ -variable_entry: hus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: specific_humidity -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Specific Humidity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time -out_name: hus -type: real -valid_min: -0.000299 -valid_max: 0.02841 -ok_min_mean_abs: -0.0003539 -ok_max_mean_abs: 0.01041 -!---------------------------------- -! - -!============ -variable_entry: hur -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: relative_humidity -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Relative Humidity -comment: This is the relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time -out_name: hur -type: real -valid_min: -2.642 -valid_max: 135.7 -ok_min_mean_abs: -10.31 -ok_max_mean_abs: 97 -!---------------------------------- -! - -!============ -variable_entry: wap -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: lagrangian_tendency_of_air_pressure -units: Pa s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: omega (=dp/dt) -comment: commonly referred to as ""omega"", this represents the vertical component of velocity in pressure coordinates (positive down) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time -out_name: wap -type: real -valid_min: -1.126 -valid_max: 2.319 -ok_min_mean_abs: -0.005083 -ok_max_mean_abs: 0.04256 -!---------------------------------- -! - -!============ -variable_entry: zg -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: geopotential_height -units: m -cell_methods: time: mean -cell_measures: area: areacella -long_name: Geopotential Height -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time -out_name: zg -type: real -valid_min: -719.7 -valid_max: 3.437e+04 -ok_min_mean_abs: -113.4 -ok_max_mean_abs: 3.299e+04 -!---------------------------------- -! - -!============ -variable_entry: tro3 -!============ -modeling_realm: atmos atmosChem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_ozone_in_air -units: 1e-9 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Mole Fraction of O3 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time -out_name: tro3 -type: real -valid_min: -419.2 -valid_max: 1.162e+04 -ok_min_mean_abs: -149.1 -ok_max_mean_abs: 7558 -!---------------------------------- -! - -!============ -variable_entry: tro3Clim -!============ -modeling_realm: atmos atmosChem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_ozone_in_air -units: 1e-9 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacella -long_name: Mole Fraction of O3 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time2 -out_name: tro3 -type: real -!---------------------------------- -! - -!============ -variable_entry: co2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_carbon_dioxide_in_air -units: 1e-6 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Mole Fraction of CO2 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time -out_name: co2 -type: real -!---------------------------------- -! - -!============ -variable_entry: co2Clim -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_carbon_dioxide_in_air -units: 1e-6 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacella -long_name: Mole Fraction of CO2 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time2 -out_name: co2 -type: real -!---------------------------------- -! - -!============ -variable_entry: co2mass -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_mass_of_carbon_dioxide -units: kg -cell_methods: time: mean -long_name: Total Atmospheric Mass of CO2 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -out_name: co2mass -type: real -!---------------------------------- -! - -!============ -variable_entry: co2massClim -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_mass_of_carbon_dioxide -units: kg -cell_methods: time: mean within years time: mean over years -long_name: Total Atmospheric Mass of CO2 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time2 -out_name: co2mass -type: real -!---------------------------------- -! - -!============ -variable_entry: ch4 -!============ -modeling_realm: atmos atmosChem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_methane_in_air -units: 1e-9 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Mole Fraction of CH4 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time -out_name: ch4 -type: real -!---------------------------------- -! - -!============ -variable_entry: ch4Clim -!============ -modeling_realm: atmos atmosChem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_methane_in_air -units: 1e-9 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacella -long_name: Mole Fraction of CH4 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time2 -out_name: ch4 -type: real -!---------------------------------- -! - -!============ -variable_entry: ch4global -!============ -modeling_realm: atmos atmosChem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_methane_in_air -units: 1e-9 -cell_methods: time: mean -long_name: Global Mean Mole Fraction of CH4 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -out_name: ch4global -type: real -!---------------------------------- -! - -!============ -variable_entry: ch4globalClim -!============ -modeling_realm: atmos atmosChem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_methane_in_air -units: 1e-9 -cell_methods: time: mean within years time: mean over years -long_name: Global Mean Mole Fraction of CH4 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time2 -out_name: ch4global -type: real -!---------------------------------- -! - -!============ -variable_entry: n2o -!============ -modeling_realm: atmos atmosChem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_nitrous_oxide_in_air -units: 1e-9 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Mole Fraction of N2O -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time -out_name: n2o -type: real -!---------------------------------- -! - -!============ -variable_entry: n2oClim -!============ -modeling_realm: atmos atmosChem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_nitrous_oxide_in_air -units: 1e-9 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacella -long_name: Mole Fraction of N2O -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time2 -out_name: n2o -type: real -!---------------------------------- -! - -!============ -variable_entry: n2oglobal -!============ -modeling_realm: atmos atmosChem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_nitrous_oxide_in_air -units: 1e-9 -cell_methods: time: mean -long_name: Global Mean Mole Fraction of N2O -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -out_name: n2oglobal -type: real -!---------------------------------- -! - -!============ -variable_entry: n2oglobalClim -!============ -modeling_realm: atmos atmosChem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_nitrous_oxide_in_air -units: 1e-9 -cell_methods: time: mean within years time: mean over years -long_name: Global Mean Mole Fraction of N2O -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time2 -out_name: n2oglobal -type: real -!---------------------------------- -! - -!============ -variable_entry: cfc11global -!============ -modeling_realm: atmos atmosChem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_cfc11_in_air -units: 1e-12 -cell_methods: time: mean -long_name: Global Mean Mole Fraction of CFC11 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -out_name: cfc11global -type: real -!---------------------------------- -! - -!============ -variable_entry: cfc12global -!============ -modeling_realm: atmos atmosChem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_cfc12_in_air -units: 1e-12 -cell_methods: time: mean -long_name: Global Mean Mole Fraction of CFC12 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -out_name: cfc12global -type: real -!---------------------------------- -! - -!============ -variable_entry: hcfc22global -!============ -modeling_realm: atmos atmosChem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_hcfc22_in_air -units: 1e-12 -cell_methods: time: mean -long_name: Global Mean Mole Fraction of HCFC22 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -out_name: hcfc22global -type: real -!---------------------------------- -! - -!============ -variable_entry: cfc113global -!============ -modeling_realm: atmos atmosChem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_cfc113_in_air -units: 1e-12 -cell_methods: time: mean -long_name: Global Mean Mole Fraction of CFC113 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -out_name: cfc113global -type: real -!---------------------------------- -! - - -!============ -variable_entry: pfull -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacella -long_name: Pressure on Model Levels -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time2 -out_name: pfull -type: real -!---------------------------------- -! - -!============ -variable_entry: phalf -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacella -long_name: Pressure on Model Half-Levels -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time2 -out_name: phalf -type: real -!---------------------------------- -! - -!============ -variable_entry: tasAdjust -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: mean -cell_measures: area: areacella -long_name: Bias-Corrected Near-Surface Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height2m -out_name: tasAdjust -type: real -!---------------------------------- -! - -!============ -variable_entry: tsAdjust -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_temperature -units: K -cell_methods: time: mean -cell_measures: area: areacella -long_name: Bias-Corrected Surface Temperature -comment: ""skin"" temperature (i.e., SST for open ocean) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tsAdjust -type: real -!---------------------------------- -! - -!============ -variable_entry: pslAdjust -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_sea_level -units: Pa -cell_methods: time: mean -cell_measures: area: areacella -long_name: Bias-Corrected Sea Level Pressure -comment: not, in general, the same as surface pressure -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: pslAdjust -type: real -!---------------------------------- -! - -!============ -variable_entry: prAdjust -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: precipitation_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Bias-Corrected Precipitation -comment: at surface; includes both liquid and solid phases from all types of clouds (both large-scale and convective) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: prAdjust -type: real -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_LImon b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_LImon deleted file mode 100644 index 391f65cfd8..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_LImon +++ /dev/null @@ -1,439 +0,0 @@ -table_id: Table LImon -modeling_realm: land - -frequency: mon - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 30.000000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: sdepth -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: depth -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: depth -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: depth -valid_min: 0.0 -valid_max: 200.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - -!============ -variable_entry: snc -!============ -modeling_realm: landIce land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_snow_area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Snow Area Fraction -comment: Fraction of each grid cell that is occupied by snow that rests on land portion of cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: snc -type: real -valid_min: 0 -valid_max: 105 -ok_min_mean_abs: -29.01 -ok_max_mean_abs: 78.46 -!---------------------------------- -! - -!============ -variable_entry: snw -!============ -modeling_realm: landIce land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_snow_amount -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Surface Snow Amount -comment: Computed as the mass of surface snow on the land portion of the grid cell divided by the land area in the grid cell; reported as 0.0 where the land fraction is 0; excluded is snow on vegetation canopy or on sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: snw -type: real -valid_min: -0.007542 -valid_max: 9.555e+05 -ok_min_mean_abs: -6025 -ok_max_mean_abs: 1.213e+04 -!---------------------------------- -! - -!============ -variable_entry: snd -!============ -modeling_realm: landIce land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_snow_thickness -units: m -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Snow Depth -comment: where land over land, this is computed as the mean thickness of snow in the land portion of the grid cell (averaging over the entire land portion, including the snow-free fraction). Reported as 0.0 where the land fraction is 0. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: snd -type: real -valid_min: 0 -valid_max: 962.9 -ok_min_mean_abs: -1.961 -ok_max_mean_abs: 4.503 -!---------------------------------- -! - -!============ -variable_entry: lwsnl -!============ -modeling_realm: landIce land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: liquid_water_content_of_snow_layer -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Liquid Water Content of Snow Layer -comment: where land over land: this is computed as the total mass of liquid water contained interstitially within the snow layer of the land portion of a grid cell divided by the area of the land portion of the cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: lwsnl -type: real -!---------------------------------- -! - -!============ -variable_entry: sootsn -!============ -modeling_realm: landIce land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: soot_content_of_surface_snow -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Snow Soot Content -comment: the entire land portion of the grid cell is considered, with snow soot content set to 0.0 in regions free of snow. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: sootsn -type: real -!---------------------------------- -! - -!============ -variable_entry: agesno -!============ -modeling_realm: landIce land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: age_of_surface_snow -units: day -cell_methods: time: mean (with samples weighted by snow mass) area: mean where land -cell_measures: area: areacella -long_name: Snow Age -comment: When computing the time-mean here, the time samples, weighted by the mass of snow on the land portion of the grid cell, are accumulated and then divided by the sum of the weights. Reported as ""missing in regions free of snow on land. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: agesno -type: real -!---------------------------------- -! - -!============ -variable_entry: tsn -!============ -modeling_realm: landIce land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: temperature_in_surface_snow -units: K -cell_methods: time: mean (with samples weighted by snow mass) area: mean where land -cell_measures: area: areacella -long_name: Snow Internal Temperature -comment: This temperature is averaged over all the snow in the grid cell that rests on land or land ice. When computing the time-mean here, the time samples, weighted by the mass of snow on the land portion of the grid cell, are accumulated and then divided by the sum of the weights. Reported as ""missing in regions free of snow on land. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tsn -type: real -!---------------------------------- -! - -!============ -variable_entry: snm -!============ -modeling_realm: landIce land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_snow_melt_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Surface Snow Melt -comment: Computed as the total surface melt water on the land portion of the grid cell divided by the land area in the grid cell; report as 0.0 for snow-free land regions; report as 0.0 where the land fraction is 0. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: snm -type: real -valid_min: 0 -valid_max: 0.0003926 -ok_min_mean_abs: -2.092e-06 -ok_max_mean_abs: 6.123e-06 -!---------------------------------- -! - -!============ -variable_entry: sbl -!============ -modeling_realm: landIce land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_snow_and_ice_sublimation_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Surface Snow and Ice Sublimation Flux -comment: The snow and ice sublimation flux is the loss of snow and ice mass resulting from their conversion to water vapor. Computed as the total sublimation on the land portion of the grid cell divided by the land area in the grid cell; reported as 0.0 for snow-free land regions; reported as 0.0 where the land fraction is 0. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: sbl -type: real -valid_min: -0.004959 -valid_max: 0.0009936 -ok_min_mean_abs: -5.521e-05 -ok_max_mean_abs: 9.61e-05 -!---------------------------------- -! - -!============ -variable_entry: hfdsn -!============ -modeling_realm: landIce land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_heat_flux_in_snow -units: W m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Downward Heat Flux into Snow Where Land over Land -comment: the net downward heat flux from the atmosphere into the snow that lies on land divided by the land area in the grid cell; reported as 0.0 for snow-free land regions or where the land fraction is 0. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfdsn -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: tpf -!============ -modeling_realm: landIce land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: permafrost_layer_thickness -units: m -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Permafrost Layer Thickness -comment: where land over land: This is the mean thickness of the permafrost layer in the land portion of the grid cell. Reported as 0.0 in permafrost-free regions. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tpf -type: real -!---------------------------------- -! - -!============ -variable_entry: pflw -!============ -modeling_realm: landIce land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: liquid_water_content_of_permafrost_layer -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Liquid Water Content of Permafrost Layer -comment: ""where land over land"", i.e., this is the total mass of liquid water contained within the permafrost layer within the land portion of a grid cell divided by the area of the land portion of the cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: pflw -type: real -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Lmon b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Lmon deleted file mode 100644 index 7f97b2d2a1..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Lmon +++ /dev/null @@ -1,1656 +0,0 @@ -table_id: Table Lmon -modeling_realm: land - -frequency: mon - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 30.000000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: sdepth -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: depth -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: depth -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: depth -valid_min: 0.0 -valid_max: 200.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: sdepth1 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: depth -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: depth -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: depth -valid_min: 0.0 -valid_max: 0.2 -stored_direction: increasing -type: double -value: 0.05 ! of scalar (singleton) dimension -bounds_values: 0.0 0.1 ! of scalar (singleton) dimension bounds -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: vegtype -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: area_type -long_name: plant functional type -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: type -type: character -must_have_bounds: no -coords_attrib: type_description -!---------------------------------- -! - - -!============ -axis_entry: typebare -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: area_type -long_name: surface type -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: type -type: character -value: bare_ground ! of scalar (singleton) dimension -must_have_bounds: no -coords_attrib: type_description -!---------------------------------- -! - - -!============ -axis_entry: typepdec -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: area_type -long_name: surface type -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: type -type: character -value: primary_deciduous_trees ! of scalar (singleton) dimension -must_have_bounds: no -coords_attrib: type_description -!---------------------------------- -! - - -!============ -axis_entry: typepever -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: area_type -long_name: surface type -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: type -type: character -value: primary_evergreen_trees ! of scalar (singleton) dimension -must_have_bounds: no -coords_attrib: type_description -!---------------------------------- -! - - -!============ -axis_entry: typesdec -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: area_type -long_name: surface type -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: type -type: character -value: secondary_decidous_trees ! of scalar (singleton) dimension -must_have_bounds: no -coords_attrib: type_description -!---------------------------------- -! - - -!============ -axis_entry: typesever -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: area_type -long_name: surface type -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: type -type: character -value: secondary_evergreen_trees ! of scalar (singleton) dimension -must_have_bounds: no -coords_attrib: type_description -!---------------------------------- -! - - -!============ -axis_entry: typec3pft -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: area_type -long_name: surface type -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: type -type: character -value: c3_plant_functional_types ! of scalar (singleton) dimension -must_have_bounds: no -coords_attrib: type_description -!---------------------------------- -! - - -!============ -axis_entry: typec4pft -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: area_type -long_name: surface type -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: type -type: character -value: c4_plant_functional_types ! of scalar (singleton) dimension -must_have_bounds: no -coords_attrib: type_description -!---------------------------------- -! - -!============ -variable_entry: mrsos -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: moisture_content_of_soil_layer -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Moisture in Upper Portion of Soil Column -comment: the mass of water in all phases in a thin surface soil layer. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time sdepth1 -out_name: mrsos -type: real -valid_min: -2.008 -valid_max: 146.5 -ok_min_mean_abs: -35.89 -ok_max_mean_abs: 123.3 -!---------------------------------- -! - -!============ -variable_entry: mrso -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: soil_moisture_content -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Total Soil Moisture Content -comment: the mass per unit area (summed over all soil layers) of water in all phases. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: mrso -type: real -valid_min: -64.17 -valid_max: 5717 -ok_min_mean_abs: -1424 -ok_max_mean_abs: 3038 -!---------------------------------- -! - -!============ -variable_entry: mrfso -!============ -modeling_realm: land landIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: soil_frozen_water_content -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Soil Frozen Water Content -comment: the mass (summed over all all layers) of frozen water. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: mrfso -type: real -valid_min: 0 -valid_max: 5763 -ok_min_mean_abs: -419 -ok_max_mean_abs: 940.3 -!---------------------------------- -! - -!============ -variable_entry: mrros -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_runoff_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Surface Runoff -comment: the total surface runoff leaving the land portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: mrros -type: real -valid_min: -6.802e-06 -valid_max: 0.0009825 -ok_min_mean_abs: -5.867e-06 -ok_max_mean_abs: 1.302e-05 -!---------------------------------- -! - -!============ -variable_entry: mrro -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: runoff_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Total Runoff -comment: "the total runoff (including ""drainage"" through the base of the soil model) leaving the land portion of the grid cell." -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: mrro -type: real -valid_min: -0.0002019 -valid_max: 0.001065 -ok_min_mean_abs: -6.668e-06 -ok_max_mean_abs: 1.874e-05 -!---------------------------------- -! - -!============ -variable_entry: prveg -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: precipitation_flux_onto_canopy -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Precipitation onto Canopy -comment: the precipitation flux that is intercepted by the vegetation canopy (if present in model) before reaching the ground. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: prveg -type: real -!---------------------------------- -! - -!============ -variable_entry: evspsblveg -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: water_evaporation_flux_from_canopy -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Evaporation from Canopy -comment: the canopy evaporation+sublimation (if present in model). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: evspsblveg -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: evspsblsoi -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: water_evaporation_flux_from_soil -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Water Evaporation from Soil -comment: includes sublimation. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: evspsblsoi -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: tran -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: transpiration_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Transpiration -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tran -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: mrlsl -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: moisture_content_of_soil_layer -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Water Content of Soil Layer -comment: in each soil layer, the mass of water in all phases, including ice. Reported as ""missing"" for grid cells occupied entirely by ""sea"" -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude sdepth time -out_name: mrlsl -type: real -!---------------------------------- -! - -!============ -variable_entry: tsl -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: soil_temperature -units: K -cell_methods: time: mean -cell_measures: area: areacella -long_name: Temperature of Soil -comment: "Temperature of each soil layer. Reported as ""missing"" for grid cells occupied entirely by ""sea""." -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude sdepth time -out_name: tsl -type: real -!---------------------------------- -! - -!============ -variable_entry: treeFrac -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tree Cover Fraction -comment: fraction of entire grid cell that is covered by trees. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: treeFrac -type: real -!---------------------------------- -! - -!============ -variable_entry: grassFrac -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Natural Grass Fraction -comment: fraction of entire grid cell that is covered by natural grass. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: grassFrac -type: real -!---------------------------------- -! - -!============ -variable_entry: shrubFrac -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Shrub Fraction -comment: fraction of entire grid cell that is covered by shrub. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: shrubFrac -type: real -!---------------------------------- -! - -!============ -variable_entry: cropFrac -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Crop Fraction -comment: fraction of entire grid cell that is covered by crop. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cropFrac -type: real -!---------------------------------- -! - -!============ -variable_entry: pastureFrac -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Anthropogenic Pasture Fraction -comment: fraction of entire grid cell that is covered by anthropogenic pasture. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: pastureFrac -type: real -!---------------------------------- -! - -!============ -variable_entry: baresoilFrac -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Bare Soil Fraction -comment: fraction of entire grid cell that is covered by bare soil. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time typebare -out_name: baresoilFrac -type: real -!---------------------------------- -! - -!============ -variable_entry: residualFrac -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Fraction of Grid Cell that is Land but Neither Vegetation-Covered nor Bare Soil -comment: fraction of entire grid cell that is land and is covered by ""non-vegetation"" and ""non-bare-soil"" (e.g., urban, ice, lakes, etc.) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: residualFrac -type: real -!---------------------------------- -! - -!============ -variable_entry: burntArea -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Burnt Area Fraction -comment: fraction of entire grid cell that is covered by burnt vegetation. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: burntArea -type: real -!---------------------------------- -! - -!============ -variable_entry: cVeg -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: vegetation_carbon_content -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass in Vegetation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cVeg -type: real -!---------------------------------- -! - -!============ -variable_entry: cLitter -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: litter_carbon_content -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass in Litter Pool -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cLitter -type: real -!---------------------------------- -! - -!============ -variable_entry: cSoil -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: soil_carbon_content -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass in Soil Pool -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cSoil -type: real -!---------------------------------- -! - -!============ -variable_entry: cProduct -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: carbon_content_of_products_of_anthropogenic_land_use_change -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass in Products of Land Use Change -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cProduct -type: real -!---------------------------------- -! - -!============ -variable_entry: lai -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: leaf_area_index -units: 1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Leaf Area Index -comment: a ratio obtained by dividing the total upper leaf surface area of vegetation by the (horizontal) surface area of the land on which it grows. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: lai -type: real -!---------------------------------- -! - -!============ -variable_entry: gpp -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: gross_primary_productivity_of_carbon -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass Flux out of Atmosphere due to Gross Primary Production on Land -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: gpp -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: ra -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: plant_respiration_carbon_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass Flux into Atmosphere due to Autotrophic (Plant) Respiration on Land -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: ra -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: npp -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: net_primary_productivity_of_carbon -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass Flux out of Atmosphere due to Net Primary Production on Land -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: npp -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rh -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: heterotrophic_respiration_carbon_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass Flux into Atmosphere due to Heterotrophic Respiration on Land -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rh -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: fFire -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_fires_excluding_anthropogenic_land_use_change -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass Flux into Atmosphere due to CO2 Emission from Fire -comment: CO2 emissions (expressed as a carbon mass flux) from natural fires + human ignition fires as calculated by the fire module of the DGVM, but excluding any CO2 flux from fire included in fLuc, defined below (CO2 Flux to Atmosphere from Land Use Change). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fFire -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: fGrazing -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_grazing -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass Flux into Atmosphere due to Grazing on Land -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fGrazing -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: fHarvest -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_crop_harvesting -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass Flux into Atmosphere due to Crop Harvesting -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fHarvest -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: fLuc -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_net_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_anthropogenic_land_use_change -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Net Carbon Mass Flux into Atmosphere due to Land Use Change -comment: human changes to land (excluding forest regrowth) accounting possibly for different time-scales related to fate of the wood, for example. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fLuc -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: nbp -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass Flux out of Atmosphere due to Net Biospheric Production on Land -comment: This is the net mass flux of carbon between land and atmosphere calculated as photosynthesis MINUS the sum of plant and soil respiration, carbonfluxes from fire, harvest, grazing and land use change. Positive flux is into the land. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: nbp -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: fVegLitter -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: litter_carbon_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Total Carbon Mass Flux from Vegetation to Litter -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fVegLitter -type: real -!---------------------------------- -! - -!============ -variable_entry: fLitterSoil -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: carbon_mass_flux_into_soil_from_litter -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Total Carbon Mass Flux from Litter to Soil -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fLitterSoil -type: real -!---------------------------------- -! - -!============ -variable_entry: fVegSoil -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: carbon_mass_flux_into_soil_from_vegetation_excluding_litter -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Total Carbon Mass Flux from Vegetation Directly to Soil -comment: In some models part of carbon (e.g., root exudate) can go directly into the soil pool without entering litter. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fVegSoil -type: real -!---------------------------------- -! - -!============ -variable_entry: cLeaf -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: leaf_carbon_content -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass in Leaves -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cLeaf -type: real -!---------------------------------- -! - -!============ -variable_entry: cWood -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: wood_carbon_content -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass in Wood -comment: including sapwood and hardwood. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cWood -type: real -!---------------------------------- -! - -!============ -variable_entry: cRoot -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: root_carbon_content -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass in Roots -comment: including fine and coarse roots. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cRoot -type: real -!---------------------------------- -! - -!============ -variable_entry: cMisc -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: miscellaneous_living_matter_carbon_content -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass in Other Living Compartments on Land -comment: e.g., labile, fruits, reserves, etc. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cMisc -type: real -!---------------------------------- -! - -!============ -variable_entry: cCwd -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: wood_debris_carbon_content -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass in Coarse Woody Debris -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cCwd -type: real -!---------------------------------- -! - -!============ -variable_entry: cLitterAbove -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_litter_carbon_content -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass in Above-Ground Litter -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cLitterAbove -type: real -!---------------------------------- -! - -!============ -variable_entry: cLitterBelow -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: subsurface_litter_carbon_content -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass in Below-Ground Litter -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cLitterBelow -type: real -!---------------------------------- -! - -!============ -variable_entry: cSoilFast -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: fast_soil_pool_carbon_content -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass in Fast Soil Pool -comment: fast is meant as lifetime of less than 10 years for reference climate conditions (20 C, no water limitations). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cSoilFast -type: real -!---------------------------------- -! - -!============ -variable_entry: cSoilMedium -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: medium_soil_pool_carbon_content -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass in Medium Soil Pool -comment: medium is meant as lifetime of more than than 10 years and less than 100 years for reference climate conditions (20 C, no water limitations) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cSoilMedium -type: real -!---------------------------------- -! - -!============ -variable_entry: cSoilSlow -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: slow_soil_pool_carbon_content -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass in Slow Soil Pool -comment: fast is meant as lifetime of more than 100 years for reference climate conditions (20 C, no water limitations) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cSoilSlow -type: real -!---------------------------------- -! - -!============ -variable_entry: landCoverFrac -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Plant Functional Type Grid Fraction -comment: The categories may differ from model to model, depending on their PFT definitions. This may include natural PFTs, anthropogenic PFTs, bare soil, lakes, urban areas, etc. Sum of all should equal the fraction of the grid-cell that is land. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude vegtype time -out_name: landCoverFrac -type: real -!---------------------------------- -! - -!============ -variable_entry: treeFracPrimDec -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Primary Deciduous Tree Fraction -comment: "This is the fraction of the entire grid cell that is covered by ""total primary deciduous trees.""" -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time typepdec -out_name: treeFracPrimDec -type: real -!---------------------------------- -! - -!============ -variable_entry: treeFracPrimEver -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Primary Evergreen Tree Cover Fraction -comment: fraction of entire grid cell that is covered by primary evergreen trees. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time typepever -out_name: treeFracPrimEver -type: real -!---------------------------------- -! - -!============ -variable_entry: treeFracSecDec -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Secondary Deciduous Tree Cover Fraction -comment: fraction of entire grid cell that is covered by secondary deciduous trees. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time typesdec -out_name: treeFracSecDec -type: real -!---------------------------------- -! - -!============ -variable_entry: treeFracSecEver -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Secondary Evergreen Tree Cover Fraction -comment: fraction of entire grid cell that is covered by secondary evergreen trees. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time typesever -out_name: treeFracSecEver -type: real -!---------------------------------- -! - -!============ -variable_entry: c3PftFrac -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total C3 PFT Cover Fraction -comment: fraction of entire grid cell that is covered by C3 PFTs (including grass, crops, and trees). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time typec3pft -out_name: c3PftFrac -type: real -!---------------------------------- -! - -!============ -variable_entry: c4PftFrac -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total C4 PFT Cover Fraction -comment: fraction of entire grid cell that is covered by C4 PFTs (including grass and crops). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time typec4pft -out_name: c4PftFrac -type: real -!---------------------------------- -! - -!============ -variable_entry: rGrowth -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_carbon_mass_flux_due_to_plant_respiration_for_biomass_growth -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass Flux into Atmosphere due to Growth Autotrophic Respiration on Land -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rGrowth -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rMaint -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_carbon_mass_flux_due_to_plant_respiration_for_biomass_maintenance -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass Flux into Atmosphere due to Maintenance Autotrophic Respiration on Land -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rMaint -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: nppLeaf -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: net_primary_productivity_of_carbon_accumulated_in_leaves -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass Flux due to NPP Allocation to Leaf -comment: This is the rate of carbon uptake by leaves due to NPP -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: nppLeaf -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: nppWood -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: net_primary_productivity_of_carbon_accumulated_in_wood -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass Flux due to NPP Allocation to Wood -comment: This is the rate of carbon uptake by wood due to NPP -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: nppWood -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: nppRoot -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: net_primary_productivity_of_carbon_accumulated_in_roots -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Carbon Mass Flux due to NPP Allocation to Roots -comment: This is the rate of carbon uptake by roots due to NPP -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: nppRoot -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: nep -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes_excluding_anthropogenic_land_use_change -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Net Carbon Mass Flux out of Atmophere due to Net Ecosystem Productivity on Land. -comment: Natural flux of CO2 (expressed as a mass flux of carbon) from the atmosphere to the land calculated as the difference between uptake associated will photosynthesis and the release of CO2 from the sum of plant and soil respiration and fire. Positive flux is into the land. emissions from natural fires + human ignition fires as calculated by the fire module of the DGVM, but excluding any CO2 flux from fire included in fLuc, defined below (CO2 Flux to Atmosphere from Land Use Change). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: nep -type: real -positive: down -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_OImon b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_OImon deleted file mode 100644 index 7230314904..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_OImon +++ /dev/null @@ -1,1020 +0,0 @@ -table_id: Table OImon -modeling_realm: ocean - -frequency: mon - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 30.000000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - -!============ -variable_entry: sic -!============ -modeling_realm: seaIce ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_ice_area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacello -long_name: Sea Ice Area Fraction -comment: fraction of grid cell covered by sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: sic -type: real -valid_min: -0.000225 -valid_max: 166.1 -ok_min_mean_abs: -13.33 -ok_max_mean_abs: 36.65 -!---------------------------------- -! - -!============ -variable_entry: sit -!============ -modeling_realm: seaIce ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_ice_thickness -units: m -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Sea Ice Thickness -comment: the mean thickness of sea ice in the ocean portion of the grid cell (averaging over the entire ocean portion, including the ice-free fraction). Reported as 0.0 in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: sit -type: real -valid_min: -0.006698 -valid_max: 104.4 -ok_min_mean_abs: -0.9217 -ok_max_mean_abs: 2.582 -!---------------------------------- -! - -!============ -variable_entry: sim -!============ -modeling_realm: seaIce ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_ice_and_surface_snow_amount -units: kg m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Sea Ice Plus Surface Snow Amount -comment: the mass per unit area of sea ice plus snow in the ocean portion of the grid cell (averaging over the entire ocean portion, including the ice-free fraction). Reported as 0.0 in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: sim -type: real -!---------------------------------- -! - -!============ -variable_entry: evap -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: water_evaporation_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea_ice over sea -cell_measures: area: areacello -long_name: Water Evaporation Flux from Sea Ice -comment: the average rate that water mass evaporates (or sublimates) from the sea ice surface (i.e., kg/s) divided by the area of the ocean (i.e., open ocean + sea ice) portion of the grid cell. This quantity, multiplied both by the oean area of the grid cell and by the length of the month, should yield the total mass of water evaporated (or sublimated) from the sea ice. Reported as 0.0 in regions free of sea ice. [This was computed differently in CMIP3.] -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: evap -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: snd -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_snow_thickness -units: m -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Snow Depth -comment: the mean thickness of snow in the ocean portion of the grid cell (averaging over the entire ocean portion, including the snow-free ocean fraction). Reported as 0.0 in regions free of snow-covered sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: snd -type: real -!---------------------------------- -! - -!============ -variable_entry: snc -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_snow_area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacello -long_name: Surface Snow Area Fraction -comment: Fraction of entire grid cell covered by snow that lies on sea ice; exclude snow that lies on land or land ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: snc -type: real -!---------------------------------- -! - -!============ -variable_entry: ialb -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_ice_albedo -units: 1 -cell_methods: time: mean area: mean where sea_ice -cell_measures: area: areacello -long_name: Sea Ice Albedo -comment: "Reported as ""missing"" if there is no sunlight or if a region is free of sea ice. " -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: ialb -type: real -!---------------------------------- -! - -!============ -variable_entry: ssi -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_ice_salinity -units: psu -cell_methods: time: mean (weighted by mass of sea ice) -cell_measures: area: areacello -long_name: Sea Ice Salinity -comment: When computing the time-mean here, the time-samples, weighted by the mass of sea ice in the grid cell, are accumulated and then divided by the sum of the weights. Reported as ""missing"" in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: ssi -type: real -!---------------------------------- -! - -!============ -variable_entry: tsice -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_temperature -units: K -cell_methods: time: mean (weighted by area of sea ice) -cell_measures: area: areacello -long_name: Surface Temperature of Sea Ice -comment: When computing the time-mean here, the time-samples, weighted by the area of sea ice in the grid cell, are accumulated and then divided by the sum of the weights. Reported as ""missing"" in regions free of sea ice. Note this will be the surface snow temperature in regions where snow covers the sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tsice -type: real -!---------------------------------- -! - -!============ -variable_entry: tsnint -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_ice_surface_temperature -units: K -cell_methods: time: mean (weighted by area of snow-covered sea ice) -cell_measures: area: areacello -long_name: Temperature at Interface Between Sea Ice and Snow -comment: When computing the time-mean here, the time-samples, weighted by the area of snow-covered sea ice in the grid cell, are accumulated and then divided by the sum of the weights. Reported as ""missing"" in regions free of snow-covered sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tsnint -type: real -!---------------------------------- -! - -!============ -variable_entry: pr -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: rainfall_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea_ice over sea -cell_measures: area: areacello -long_name: Surface Rainfall Rate into the Sea Ice Portion of the Grid Cell -comment: where sea ice over sea: this is the the water mass per unit time falling as rain onto the sea ice portion of a grid cell divided by the area of the ocean portion of the grid cell (including both ice-free and sea-ice covered fractions). Reported as 0. in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: pr -type: real -!---------------------------------- -! - -!============ -variable_entry: prsn -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: snowfall_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea_ice over sea -cell_measures: area: areacello -long_name: Surface Snowfall Rate into the Sea Ice Portion of the Grid Cell -comment: where sea ice over sea: this is computed as the total mass per unit time of solid-phase precipitation falling onto the sea ice portion of a grid cell divided by the area of the ocean portion of the grid cell (including both ice-free and sea-ice covered fractions). Reported as 0. in regions free of sea ice. (Snowfall flux includes all types of solid-phase precipitation.) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: prsn -type: real -!---------------------------------- -! - -!============ -variable_entry: ageice -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: age_of_sea_ice -units: years -cell_methods: time: mean (weighted b mass of sea ice) -cell_measures: area: areacello -long_name: Age of Sea Ice -comment: When computing the time-mean here, the time samples, weighted by the mass of sea ice in the grid cell, are accumulated and then divided by the sum of the weights. Reported as ""missing"" in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: ageice -type: real -!---------------------------------- -! - -!============ -variable_entry: grFrazil -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_sea_ice_amount_due_to_frazil_ice_accumulation_in_leads -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Frazil Sea Ice Growth (Leads) Rate -comment: the rate of change of sea ice mass due to frazil sea ice formation divided by the area of the ocean portion of the grid cell. Reported as 0.0 in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: grFrazil -type: real -!---------------------------------- -! - -!============ -variable_entry: grCongel -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_sea_ice_amount_due_to_congelation_ice_accumulation -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Congelation Sea Ice Growth Rate -comment: the rate of change of sea ice mass due to congelation sea ice divided by the area of the ocean portion of the grid cell. Reported as 0.0 in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: grCongel -type: real -!---------------------------------- -! - -!============ -variable_entry: grLateral -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_sea_ice_amount_due_to_lateral_growth_of_ice_floes -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Lateral Sea Ice Growth Rate -comment: the rate of change of sea ice mass due to lateral growth alone of the sea ice divided by the area of the ocean portion of the grid cell. Reported as 0.0 in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: grLateral -type: real -!---------------------------------- -! - -!============ -variable_entry: snoToIce -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_sea_ice_amount_due_to_snow_conversion -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Snow-Ice Formation Rate -comment: the rate of change of sea ice mass due to transformation of snow to sea ice, divided by the area of the ocean portion of the grid cell. Reported as 0.0 in regions free of snow-covered sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: snoToIce -type: real -!---------------------------------- -! - -!============ -variable_entry: snomelt -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_snow_melt_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Snow Melt Rate -comment: the rate of change of snow mass due to melting, divided by the area of the ocean portion of the grid cell. Reported as 0.0 in regions free of sea ice. Includes falling snow that melts on impact with the surface. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: snomelt -type: real -!---------------------------------- -! - -!============ -variable_entry: tmelt -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_sea_ice_amount_due_to_surface_melting -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Rate of Melt at Upper Surface of Sea Ice -comment: the rate of change of sea ice mass due to melting at its upper surface, divided by the area of the ocean portion of the grid cell. Reported as 0.0 in regions free of sea ice. Does not include rate of change of snow mass. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tmelt -type: real -!---------------------------------- -! - -!============ -variable_entry: bmelt -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_sea_ice_amount_due_to_basal_melting -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Rate of Melt at Sea Ice Base -comment: the rate of change of sea ice mass due to melting at its lower surface, divided by the area of the ocean portion of the grid cell. Reported as 0.0 in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: bmelt -type: real -!---------------------------------- -! - -!============ -variable_entry: hcice -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: integral_of_sea_ice_temperature_wrt_depth_expressed_as_heat_content -units: J m-2 -cell_methods: time: mean (weighted by mass of sea ice) -cell_measures: area: areacello -long_name: Sea Ice Heat Content -comment: Ice at 0 Celsius is assumed taken to have a heat content of 0 J. When averaging over time, this quantity is weighted by the mass of sea ice. Reported as ""missing in regions free of sea ice. Does not include heat content of snow. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hcice -type: real -!---------------------------------- -! - -!============ -variable_entry: rsdssi -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean area: mean where sea_ice over sea -cell_measures: area: areacello -long_name: Downwelling Shortwave over Sea Ice -comment: the downwelling shortwave flux in regions of sea ice divided by the area of the ocean portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsdssi -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsussi -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean area: mean where sea_ice over sea -cell_measures: area: areacello -long_name: Upward Shortwave over Sea Ice -comment: the upward shortwave flux in regions of sea ice divided by the area of the ocean portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsussi -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rldssi -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: mean area: mean where sea_ice over sea -cell_measures: area: areacello -long_name: Downwelling Long Wave over Sea Ice -comment: the downwelling longwave flux in regions of sea ice divided by the area of the ocean portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rldssi -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rlussi -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: mean area: mean where sea_ice over sea -cell_measures: area: areacello -long_name: Upward Long Wave over Sea Ice -comment: the upward longwave flux in regions of sea ice divided by the area of the ocean portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlussi -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: hfssi -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_sensible_heat_flux -units: W m-2 -cell_methods: time: mean area: mean where sea_ice over sea -cell_measures: area: areacello -long_name: Surface Upward Sensible Heat Flux over Sea Ice -comment: the upward sensible heat flux in regions of sea ice divided by the area of the ocean portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfssi -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: hflssi -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_latent_heat_flux -units: W m-2 -cell_methods: time: mean area: mean where sea_ice over sea -cell_measures: area: areacello -long_name: Surface Upward Latent Heat Flux over Sea Ice -comment: the upward latent heat flux in regions of sea ice divided by the area of the ocean portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hflssi -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: sblsi -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_snow_and_ice_sublimation_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea_ice over sea -cell_measures: area: areacello -long_name: Sublimation over Sea Ice -comment: the upward flux of water vapor to the atmosphere due to sublimation of snow and sea ice in regions of sea ice divided by the area of the ocean portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: sblsi -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: transix -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_ice_x_transport -units: kg s-1 -cell_methods: time: mean -long_name: X-Component of Sea Ice Mass Transport -comment: The sea ice mass transport is 0.0 in ice-free regions of the ocean. Snow is included in calculation of mass. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: transix -type: real -!---------------------------------- -! - -!============ -variable_entry: transiy -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_ice_y_transport -units: kg s-1 -cell_methods: time: mean -long_name: Y-Component of Sea Ice Mass Transport -comment: The sea ice mass transport is 0.0 in ice-free regions of the ocean. Snow is included in calculation of mass. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: transiy -type: real -!---------------------------------- -! - -!============ -variable_entry: transifs -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_ice_transport_across_line -units: kg s-1 -cell_methods: time: mean -long_name: Sea Ice Mass Transport Through Fram Strait -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -out_name: transifs -type: real -!---------------------------------- -! - -!============ -variable_entry: strairx -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_x_stress -units: N m-2 -cell_methods: time: mean (weighted by area of sea ice) -long_name: X-Component of Atmospheric Stress On Sea Ice -comment: When computing the time-mean here, the time samples, weighted by the area of sea ice, are accumulated and then divided by the sum of the weights. Reported as ""missing"" in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: strairx -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: strairy -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_y_stress -units: N m-2 -cell_methods: time: mean (weighted by area of sea ice) -long_name: Y-Component of Atmospheric Stress On Sea Ice -comment: When computing the time-mean here, the time samples, weighted by the area of sea ice, are accumulated and then divided by the sum of the weights. Reported as ""missing"" in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: strairy -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: strocnx -!============ -modeling_realm: seaIce ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upward_x_stress_at_sea_ice_base -units: N m-2 -cell_methods: time: mean (weighted by area of sea ice) -long_name: X-Component of Ocean Stress On Sea Ice -comment: When computing the time-mean here, the time samples, weighted by the area of sea ice, are accumulated and then divided by the sum of the weights. Report as ""missing"" in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: strocnx -type: real -!---------------------------------- -! - -!============ -variable_entry: strocny -!============ -modeling_realm: seaIce ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upward_y_stress_at_sea_ice_base -units: N m-2 -cell_methods: time: mean (weighted by area of sea ice) -long_name: Y-Component of Ocean Stress On Sea Ice -comment: When computing the time-mean here, the time samples, weighted by the area of sea ice, are accumulated and then divided by the sum of the weights. Reported as ""missing"" in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: strocny -type: real -!---------------------------------- -! - -!============ -variable_entry: streng -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: compressive_strength_of_sea_ice -units: N m-1 -cell_methods: time: mean (weighted by area of sea ice) -cell_measures: area: areacello -long_name: Compressive Sea Ice Strength -comment: When computing the time-mean here, the time samples, weighted by the area of sea ice, are accumulated and then divided by the sum of the weights. Reported as ""missing"" in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: streng -type: real -!---------------------------------- -! - -!============ -variable_entry: divice -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: divergence_of_sea_ice_velocity -units: s-1 -cell_methods: time: mean (weighted by area of sea ice) -cell_measures: area: areacello -long_name: Strain Rate Divergence of Sea Ice -comment: When computing the time-mean here, the time samples, weighted by the area of sea ice, are accumulated and then divided by the sum of the weights. Reported as ""missing"" in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: divice -type: real -!---------------------------------- -! - -!============ -variable_entry: eshrice -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: eastward_derivative_of_northward_sea_ice_velocity -units: s-1 -cell_methods: time: mean (weighted by area of sea ice) -cell_measures: area: areacello -long_name: Eastward Derivative of Northward Sea Ice Velocity -comment: When computing the time-mean here, the time samples, weighted by the area of sea ice, are accumulated and then divided by the sum of the weights. Reported as ""missing"" in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: eshrice -type: real -!---------------------------------- -! - -!============ -variable_entry: nshrice -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_derivative_of_eastward_sea_ice_velocity -units: s-1 -cell_methods: time: mean (weighted by area of sea ice) -cell_measures: area: areacello -long_name: Northward Derivative of Eastward Sea Ice Velocity -comment: When computing the time-mean here, the time samples, weighted by the area of sea ice, are accumulated and then divided by the sum of the weights. Reported as ""missing"" in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: nshrice -type: real -!---------------------------------- -! - -!============ -variable_entry: ridgice -!============ -modeling_realm: seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_sea_ice_area_fraction_due_to_ridging -units: s-1 -cell_methods: time: mean (weighted by area of sea ice) -cell_measures: area: areacello -long_name: Sea Ice Ridging Rate -comment: When computing the time-mean here, the time samples, weighted by the area of sea ice, are accumulated and then divided by the sum of the weights. Reported as ""missing"" in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: ridgice -type: real -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Oclim b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Oclim deleted file mode 100644 index 993e91e01f..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Oclim +++ /dev/null @@ -1,1250 +0,0 @@ -table_id: Table Oclim -modeling_realm: ocean - -frequency: monClim - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 30.000000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -generic_levels: olevel - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time2 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -climatology: yes -!---------------------------------- -! - -!============ -axis_entry: depth_coord -!============ -! -! This vertical coordinate is used in z-coordinate models -! The units are meters (m), and it has a value of 0. at the surface -! and becomes more and more positive with depth. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: depth -units: m -axis: Z -positive: down -long_name: ocean depth coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: 0. -valid_max: 12000. -!---------------------------------- -! -!============ -axis_entry: olev -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: generic ocean level -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lev -stored_direction: increasing -type: double -must_have_bounds: no -!---------------------------------- -! -!============ -axis_entry: ocean_double_sigma -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_double_sigma -axis: Z -positive: up -long_name: ocean double sigma coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -formula: for k <= k_c:\n z= sigma*f \n for k > k_c:\n z= f + (sigma-1)*(depth-f) \n f= 0.5*(z1+ z2) + 0.5*(z1-z2)* tanh(2*a/(z1-z2)*(depth-href)) -z_factors: sigma: sigma depth: depth z1: z1 z2: z2 a: a_coeff href: href k_c: k_c -z_bounds_factors: sigma: sigma_bnds depth: depth z1: z1 z2: z2 a: a href: href k_c: k_c -!---------------------------------- -! -!============ -axis_entry: ocean_sigma_z -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_sigma_z -axis: Z -long_name: ocean sigma over z coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -formula: for k <= nsigma: z = eta + sigma*(min(depth_c,depth)+eta) ; for k > nsigma: z = zlev -z_factors: sigma: sigma eta: eta depth: depth depth_c: depth_c nsigma: nsigma zlev: zlev -z_bounds_factors: sigma: sigma_bnds eta: eta depth: depth depth_c: depth_c nsigma: nsigma zlev: zlev_bnds -!---------------------------------- -! -!============ -axis_entry: ocean_s -!============ -! -! This coordinate is dimensionless and varies from 0 at the surface to -1. at the ocean floor. -! The values of s, which appears in the formula below, should be stored as ocean_s. -! Note that in the netCDF file the variable will be named "lev", not ocean_s. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_s_coordinate -axis: Z -positive: up -long_name: ocean s-coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: -1. -valid_max: 0. -formula: z = eta*(1+s) + depth_c*s + (depth-depth_c)*C \n where \n C=(1-b)*sinh(a*s)/sinh(a) +\n b*(tanh(a*(s+0.5))/(2*tanh(0.5*a)) - 0.5) -z_factors: s: lev eta: eta depth: depth a: a_coeff b: b_coeff depth_c: depth_c -z_bounds_factors: s: lev_bnds eta: eta depth: depth a: a b: b depth_c: depth_c -!---------------------------------- -! -!============ -axis_entry: ocean_sigma -!============ -! -! This coordinate is dimensionless and varies from 0 at the surface to -1. at the ocean floor. -! The values of sigma, which appears in the formula below, should be stored as ocean_sigma. -! Note that in the netCDF file the variable will be named "lev", not ocean_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_sigma_coordinate -axis: Z -positive: up -long_name: ocean sigma coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: -1. -valid_max: 0. -formula: z = eta + sigma*(depth+eta) -z_factors: sigma: lev eta: eta depth: depth -z_bounds_factors: sigma: lev_bnds eta: eta depth: depth -!---------------------------------- -! -! -! *************************************************************** -! -! Vertical coordinate formula_terms: -! -! *************************************************************** -! -!============ -variable_entry: eta -!============ -!---------------------------------- -! Variable attributes: -!---------------------------------- -units: m -cell_methods: time: mean -long_name: Sea Surface Height -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time2 -type: real -!---------------------------------- -! -! -!============ -variable_entry: depth -!============ -!---------------------------------- -! Variable attributes: -!---------------------------------- -units: m -long_name: Sea Floor Depth -comment: Ocean bathymetry. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: depth -type: real -valid_min: 0. -valid_max: 12000. -ok_min_mean_abs: 2000. -ok_max_mean_abs: 5000. -!---------------------------------- -! -! -!============ -variable_entry: sigma -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: sigma(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: sigma_bnds -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: sigma(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: zlev -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: zlev(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: zlev_bnds -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: zlev(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -! -!============ -variable_entry: depth_c -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: depth_c -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: a -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: coefficient a -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: b -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: coefficient b -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: nsigma -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: nsigma -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: integer -!---------------------------------- -! -! -!============ -variable_entry: z1 -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: z1 -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: z2 -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: z2 -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: href -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: href -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: k_c -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: k_c -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: integer -!---------------------------------- -! -! - -!============ -variable_entry: difvho -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_vertical_heat_diffusivity -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Vertical Heat Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: difvho -type: real -!---------------------------------- -! - -!============ -variable_entry: difvso -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_vertical_salt_diffusivity -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Vertical Salt Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: difvso -type: real -!---------------------------------- -! - -!============ -variable_entry: difvtrbo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_vertical_tracer_diffusivity_due_to_background -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Vertical Tracer Diffusivity due to Background -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: difvtrbo -type: real -!---------------------------------- -! - -!============ -variable_entry: difvtrto -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_vertical_tracer_diffusivity_due_to_tides -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Vertical Tracer Diffusivity due to Tides -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: difvtrto -type: real -!---------------------------------- -! - -!============ -variable_entry: tnpeo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_potential_energy_content -units: W m-2 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Tendency of Ocean Potential Energy Content -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: tnpeo -type: real -!---------------------------------- -! - -!============ -variable_entry: tnpeot -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_potential_energy_content_due_to_tides -units: W m-2 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Tendency of Ocean Potential Energy Content due to Tides -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: tnpeot -type: real -!---------------------------------- -! - -!============ -variable_entry: tnpeotb -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_potential_energy_content_due_to_background -units: W m-2 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Tendency of Ocean Potential Energy Content due to Background -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: tnpeotb -type: real -!---------------------------------- -! - -!============ -variable_entry: difvmo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_vertical_momentum_diffusivity -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Vertical Momentum Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: difvmo -type: real -!---------------------------------- -! - -!============ -variable_entry: difvmbo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_vertical_momentum_diffusivity_due_to_background -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Vertical Momentum Diffusivity due to Background -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: difvmbo -type: real -!---------------------------------- -! - -!============ -variable_entry: difvmto -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_vertical_momentum_diffusivity_due_to_tides -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Vertical Momentum Diffusivity due to Tides -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: difvmto -type: real -!---------------------------------- -! - -!============ -variable_entry: difvmfdo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_vertical_momentum_diffusivity_due_to_form_drag -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Vertical Momentum Diffusivity due to Form Drag -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: difvmfdo -type: real -!---------------------------------- -! - -!============ -variable_entry: dispkevfo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_kinetic_energy_dissipation_per_unit_area_due_to_vertical_friction -units: W m-2 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Kinetic Energy Dissipation Per Unit Area due to Vertical Friction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: dispkevfo -type: real -!---------------------------------- -! - -!============ -variable_entry: diftrblo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_tracer_bolus_laplacian_diffusivity -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Tracer Bolus Laplacian Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: diftrblo -type: real -!---------------------------------- -! - -!============ -variable_entry: diftrbbo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_tracer_bolus_biharmonic_diffusivity -units: m4 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Tracer Bolus Biharmonic Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: diftrbbo -type: real -!---------------------------------- -! - -!============ -variable_entry: diftrelo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_tracer_epineutral_laplacian_diffusivity -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Tracer Epineutral Laplacian Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: diftrelo -type: real -!---------------------------------- -! - -!============ -variable_entry: diftrebo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_tracer_epineutral_biharmonic_diffusivity -units: m4 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Tracer Epineutral Biharmonic Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: diftrebo -type: real -!---------------------------------- -! - -!============ -variable_entry: diftrxylo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_tracer_xy_laplacian_diffusivity -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Tracer XY Laplacian Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: diftrxylo -type: real -!---------------------------------- -! - -!============ -variable_entry: diftrxybo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_tracer_xy_biharmonic_diffusivity -units: m4 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Tracer XY Biharmonic Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: diftrxybo -type: real -!---------------------------------- -! - -!============ -variable_entry: tnkebto -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_eddy_kinetic_energy_content_due_to_bolus_transport -units: W m-2 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Tendency of Ocean Eddy Kinetic Energy Content due to Bolus Transport -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: tnkebto -type: real -!---------------------------------- -! - -!============ -variable_entry: difmxylo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_momentum_xy_laplacian_diffusivity -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Momentum XY Laplacian Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: difmxylo -type: real -!---------------------------------- -! - -!============ -variable_entry: difmxybo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_momentum_xy_biharmonic_diffusivity -units: m4 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Momentum XY Biharmonic Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: difmxybo -type: real -!---------------------------------- -! - -!============ -variable_entry: dispkexyfo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_kinetic_energy_dissipation_per_unit_area_due_to_xy_friction -units: W m-2 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello volume: volcello -long_name: Ocean Kinetic Energy Dissipation Per Unit Area due to XY Friction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: dispkexyfo -type: real -!---------------------------------- -! - -!============ -variable_entry: diftrblo2d -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_tracer_bolus_laplacian_diffusivity -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello -long_name: Ocean Tracer Bolus Laplacian Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time2 -out_name: diftrblo -type: real -!---------------------------------- -! - -!============ -variable_entry: diftrbbo2d -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_tracer_bolus_biharmonic_diffusivity -units: m4 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello -long_name: Ocean Tracer Bolus Biharmonic Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time2 -out_name: diftrbbo -type: real -!---------------------------------- -! - -!============ -variable_entry: diftrelo2d -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_tracer_epineutral_laplacian_diffusivity -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello -long_name: Ocean Tracer Epineutral Laplacian Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time2 -out_name: diftrelo -type: real -!---------------------------------- -! - -!============ -variable_entry: diftrebo2d -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_tracer_epineutral_biharmonic_diffusivity -units: m4 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello -long_name: Ocean Tracer Epineutral Biharmonic Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time2 -out_name: diftrebo -type: real -!---------------------------------- -! - -!============ -variable_entry: diftrxylo2d -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_tracer_xy_laplacian_diffusivity -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello -long_name: Ocean Tracer XY Laplacian Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time2 -out_name: diftrxylo -type: real -!---------------------------------- -! - -!============ -variable_entry: diftrxybo2d -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_tracer_xy_biharmonic_diffusivity -units: m4 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello -long_name: Ocean Tracer XY Biharmonic Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time2 -out_name: diftrxybo -type: real -!---------------------------------- -! - -!============ -variable_entry: tnkebto2d -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_eddy_kinetic_energy_content_due_to_bolus_transport -units: W m-2 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello -long_name: Tendency of Ocean Eddy Kinetic Energy Content due to Bolus Transport -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time2 -out_name: tnkebto -type: real -!---------------------------------- -! - -!============ -variable_entry: difmxylo2d -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_momentum_xy_laplacian_diffusivity -units: m2 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello -long_name: Ocean Momentum XY Laplacian Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time2 -out_name: difmxylo -type: real -!---------------------------------- -! - -!============ -variable_entry: difmxybo2d -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_momentum_xy_biharmonic_diffusivity -units: m4 s-1 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello -long_name: Ocean Momentum XY Biharmonic Diffusivity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time2 -out_name: difmxybo -type: real -!---------------------------------- -! - -!============ -variable_entry: dispkexyfo2d -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_kinetic_energy_dissipation_per_unit_area_due_to_xy_friction -units: W m-2 -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello -long_name: Ocean Kinetic Energy Dissipation Per Unit Area due to XY Friction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time2 -out_name: dispkexyfo -type: real -!---------------------------------- -! - -!============ -variable_entry: zfull -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: depth_below_geoid -units: m -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello -long_name: Depth Below Geoid of Ocean Layer -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: zfull -type: real -!---------------------------------- -! - -!============ -variable_entry: zhalf -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: depth_below_geoid -units: m -cell_methods: time: mean within years time: mean over years -cell_measures: area: areacello -long_name: Depth Below Geoid of Interfaces Between Ocean Layers -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time2 -out_name: zhalf -type: real -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Omon b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Omon deleted file mode 100644 index e64f3a68a2..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Omon +++ /dev/null @@ -1,4789 +0,0 @@ -table_id: Table Omon -modeling_realm: ocean - -frequency: mon - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 30.000000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -generic_levels: olevel - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: basin -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: region -long_name: ocean basin -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: region -type: character -requested: atlantic_arctic_ocean indian_pacific_ocean global_ocean ! space-separated list of requested coordinates -must_have_bounds: no -coords_attrib: region -!---------------------------------- -! - - -!============ -axis_entry: rho -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: sea_water_potential_density -units: kg m-3 -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: potential density referenced to 2000 dbar -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: rho -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: oline -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: region -long_name: ocean passage -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: passage -type: character -requested: barents_opening bering_strait canadian_archipelago denmark_strait drake_passage english_channel pacific_equatorial_undercurrent faroe_scotland_channel florida_bahamas_strait fram_strait iceland_faroe_channel indonesian_throughflow mozambique_channel taiwan_luzon_straits windward_passage ! space-separated list of requested coordinates -must_have_bounds: no -coords_attrib: passage -!---------------------------------- -! - - -!============ -axis_entry: olayer100m -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: depth -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: depth -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: depth -valid_min: 0.0 -valid_max: 100.0 -stored_direction: increasing -type: double -value: 50. ! of scalar (singleton) dimension -bounds_values: 0. 100. ! of scalar (singleton) dimension bounds -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: depth100m -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: depth -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: depth -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: depth -valid_min: 80.0 -valid_max: 120.0 -stored_direction: increasing -type: double -value: 100. ! of scalar (singleton) dimension -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: depth0m -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: depth -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: depth -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: depth -valid_min: 0.0 -valid_max: 100.0 -stored_direction: increasing -type: double -value: 0. ! of scalar (singleton) dimension -must_have_bounds: no -!---------------------------------- -! - -!============ -axis_entry: depth_coord -!============ -! -! This vertical coordinate is used in z-coordinate models -! The units are meters (m), and it has a value of 0. at the surface -! and becomes more and more positive with depth. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: depth -units: m -axis: Z -positive: down -long_name: ocean depth coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: 0. -valid_max: 12000. -!---------------------------------- -! -!============ -axis_entry: olev -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: generic ocean level -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lev -stored_direction: increasing -type: double -must_have_bounds: no -!---------------------------------- -! -!============ -axis_entry: ocean_double_sigma -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_double_sigma -axis: Z -positive: up -long_name: ocean double sigma coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -formula: for k <= k_c:\n z= sigma*f \n for k > k_c:\n z= f + (sigma-1)*(depth-f) \n f= 0.5*(z1+ z2) + 0.5*(z1-z2)* tanh(2*a/(z1-z2)*(depth-href)) -z_factors: sigma: sigma depth: depth z1: z1 z2: z2 a: a_coeff href: href k_c: k_c -z_bounds_factors: sigma: sigma_bnds depth: depth z1: z1 z2: z2 a: a href: href k_c: k_c -!---------------------------------- -! -!============ -axis_entry: ocean_sigma_z -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_sigma_z -axis: Z -long_name: ocean sigma over z coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -formula: for k <= nsigma: z = eta + sigma*(min(depth_c,depth)+eta) ; for k > nsigma: z = zlev -z_factors: sigma: sigma eta: eta depth: depth depth_c: depth_c nsigma: nsigma zlev: zlev -z_bounds_factors: sigma: sigma_bnds eta: eta depth: depth depth_c: depth_c nsigma: nsigma zlev: zlev_bnds -!---------------------------------- -! -!============ -axis_entry: ocean_s -!============ -! -! This coordinate is dimensionless and varies from 0 at the surface to -1. at the ocean floor. -! The values of s, which appears in the formula below, should be stored as ocean_s. -! Note that in the netCDF file the variable will be named "lev", not ocean_s. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_s_coordinate -axis: Z -positive: up -long_name: ocean s-coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: -1. -valid_max: 0. -formula: z = eta*(1+s) + depth_c*s + (depth-depth_c)*C \n where \n C=(1-b)*sinh(a*s)/sinh(a) +\n b*(tanh(a*(s+0.5))/(2*tanh(0.5*a)) - 0.5) -z_factors: s: lev eta: eta depth: depth a: a_coeff b: b_coeff depth_c: depth_c -z_bounds_factors: s: lev_bnds eta: eta depth: depth a: a b: b depth_c: depth_c -!---------------------------------- -! -!============ -axis_entry: ocean_sigma -!============ -! -! This coordinate is dimensionless and varies from 0 at the surface to -1. at the ocean floor. -! The values of sigma, which appears in the formula below, should be stored as ocean_sigma. -! Note that in the netCDF file the variable will be named "lev", not ocean_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_sigma_coordinate -axis: Z -positive: up -long_name: ocean sigma coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: -1. -valid_max: 0. -formula: z = eta + sigma*(depth+eta) -z_factors: sigma: lev eta: eta depth: depth -z_bounds_factors: sigma: lev_bnds eta: eta depth: depth -!---------------------------------- -! -! -! *************************************************************** -! -! Vertical coordinate formula_terms: -! -! *************************************************************** -! -!============ -variable_entry: eta -!============ -!---------------------------------- -! Variable attributes: -!---------------------------------- -units: m -cell_methods: time: mean -long_name: Sea Surface Height -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -!---------------------------------- -! -! -!============ -variable_entry: depth -!============ -!---------------------------------- -! Variable attributes: -!---------------------------------- -units: m -long_name: Sea Floor Depth -comment: Ocean bathymetry. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: depth -type: real -valid_min: 0. -valid_max: 12000. -ok_min_mean_abs: 2000. -ok_max_mean_abs: 5000. -!---------------------------------- -! -! -!============ -variable_entry: sigma -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: sigma(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: sigma_bnds -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: sigma(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: zlev -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: zlev(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: zlev_bnds -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: zlev(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -! -!============ -variable_entry: depth_c -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: depth_c -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: a -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: coefficient a -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: b -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: coefficient b -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: nsigma -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: nsigma -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: integer -!---------------------------------- -! -! -!============ -variable_entry: z1 -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: z1 -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: z2 -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: z2 -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: href -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: href -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: k_c -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: k_c -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: integer -!---------------------------------- -! -! - -!============ -variable_entry: dissic -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_dissolved_inorganic_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Inorganic Carbon Concentration at Surface -comment: Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: dissic -type: real -!---------------------------------- -! - -!============ -variable_entry: dissoc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_dissolved_organic_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Organic Carbon Concentration at Surface -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: dissoc -type: real -!---------------------------------- -! - -!============ -variable_entry: phyc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_phytoplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Phytoplankton Carbon Concentration at Surface -comment: sum of phytoplankton carbon component concentrations. In most (all?) cases this is the sum of phycdiat and phycmisc (i.e., ""Diatom Carbon Concentration"" and ""Non-Diatom Phytoplankton Carbon Concentration"" -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: phyc -type: real -!---------------------------------- -! - -!============ -variable_entry: zooc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_zooplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Zooplankton Carbon Concentration at Surface -comment: sum of zooplankton carbon component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: zooc -type: real -!---------------------------------- -! - -!============ -variable_entry: bacc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_bacteria_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Bacterial Carbon Concentration at Surface -comment: sum of bacterial carbon component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: bacc -type: real -!---------------------------------- -! - -!============ -variable_entry: detoc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_organic_detritus_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Detrital Organic Carbon Concentration at Surface -comment: sum of detrital organic carbon component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: detoc -type: real -!---------------------------------- -! - -!============ -variable_entry: calc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_calcite_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Calcite Concentration at Surface -comment: sum of particulate calcite component concentrations (e.g. Phytoplankton, Detrital, etc.) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: calc -type: real -!---------------------------------- -! - -!============ -variable_entry: arag -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_aragonite_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Aragonite Concentration at Surface -comment: sum of particulate aragonite components (e.g. Phytoplankton, Detrital, etc.) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: arag -type: real -!---------------------------------- -! - -!============ -variable_entry: phydiat -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_diatoms_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Diatoms expressed as Carbon in Sea Water at Surface -comment: carbon from the diatom phytoplankton component concentration alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: phydiat -type: real -!---------------------------------- -! - -!============ -variable_entry: phydiaz -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_diazotrophs_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Diazotrophs Expressed as Carbon in Sea Water at Surface -comment: carbon concentration from the diazotrophic phytoplankton component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: phydiaz -type: real -!---------------------------------- -! - -!============ -variable_entry: phycalc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_calcareous_phytoplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Calcareous Phytoplankton expressed as Carbon in Sea Water at Surface -comment: carbon concentration from calcareous (calcite-producing) phytoplankton component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: phycalc -type: real -!---------------------------------- -! - -!============ -variable_entry: phypico -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_picophytoplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Picophytoplankton expressed as Carbon in Sea Water at Surface -comment: carbon concentration from the picophytoplankton (<2 um) component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: phypico -type: real -!---------------------------------- -! - -!============ -variable_entry: phymisc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_miscellaneous_phytoplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Miscellaneous Phytoplankton expressed as Carbon in Sea Water at Surface -comment: carbon concentration from additional phytoplankton component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: phymisc -type: real -!---------------------------------- -! - -!============ -variable_entry: zmicro -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_microzooplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Microzooplankton expressed as Carbon in Sea Water at Surface -comment: carbon concentration from the microzooplankton (<20 um) component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: zmicro -type: real -!---------------------------------- -! - -!============ -variable_entry: zmeso -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_mesozooplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Mesozooplankton expressed as Carbon in Sea Water at Surface -comment: carbon concentration from mesozooplankton (20-200 um) component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: zmeso -type: real -!---------------------------------- -! - -!============ -variable_entry: zoocmisc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_miscellaneous_zooplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Other Zooplankton Carbon Concentration at Surface -comment: carbon from additional zooplankton component concentrations alone (e.g. Micro, meso). Since the models all have different numbers of components, this variable has been included to provide a check for intercomparison between models since some phytoplankton groups are supersets. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: zoocmisc -type: real -!---------------------------------- -! - -!============ -variable_entry: talk -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_alkalinity_expressed_as_mole_equivalent -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Total Alkalinity at Surface -comment: total alkalinity equivalent concentration (including carbonate, nitrogen, silicate, and borate components) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: talk -type: real -!---------------------------------- -! - -!============ -variable_entry: ph -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_ph_reported_on_total_scale -units: 1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: pH at Surface -comment: negative log of hydrogen ion concentration with the concentration expressed as mol H kg-1. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: ph -type: real -!---------------------------------- -! - -!============ -variable_entry: o2 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_molecular_oxygen_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Oxygen Concentration at Surface -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: o2 -type: real -!---------------------------------- -! - -!============ -variable_entry: no3 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_nitrate_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Nitrate Concentration at Surface -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: no3 -type: real -!---------------------------------- -! - -!============ -variable_entry: nh4 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_ammonium_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Ammonium Concentration at Surface -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: nh4 -type: real -!---------------------------------- -! - -!============ -variable_entry: po4 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_phosphate_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Phosphate Concentration at Surface -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: po4 -type: real -!---------------------------------- -! - -!============ -variable_entry: dfe -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_dissolved_iron_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Iron Concentration at Surface -comment: dissolved iron in sea water is meant to include both Fe2+ and Fe3+ ions (but not, e.g., particulate detrital iron) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: dfe -type: real -!---------------------------------- -! - -!============ -variable_entry: si -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_silicate_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Silicate Concentration at Surface -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: si -type: real -!---------------------------------- -! - -!============ -variable_entry: chl -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_phytoplankton_expressed_as_chlorophyll_in_sea_water -units: kg m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Total Chlorophyll Mass Concentration at Surface -comment: sum of chlorophyll from all phytoplankton group concentrations. In most models this is equal to chldiat+chlmisc, that is the sum of ""Diatom Chlorophyll Mass Concentration"" plus ""Other Phytoplankton Chlorophyll Mass Concentration"" -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: chl -type: real -!---------------------------------- -! - -!============ -variable_entry: chldiat -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_diatoms_expressed_as_chlorophyll_in_sea_water -units: kg m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Diatom Chlorophyll Mass Concentration at Surface -comment: chlorophyll from diatom phytoplankton component concentration alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: chldiat -type: real -!---------------------------------- -! - -!============ -variable_entry: chldiaz -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_diazotrophs_expressed_as_chlorophyll_in_sea_water -units: kg m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mass Concentration of Diazotrophs expressed as Chlorophyll in Sea Water at Surface -comment: chlorophyll concentration from the diazotrophic phytoplankton component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: chldiaz -type: real -!---------------------------------- -! - -!============ -variable_entry: chlcalc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_calcareous_phytoplankton_expressed_as_chlorophyll_in_sea_water -units: kg m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mass Concentration of Calcareous Phytoplankton expressed as Chlorophyll in Sea Water at Surface -comment: chlorophyll concentration from the calcite-producing phytoplankton component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: chlcalc -type: real -!---------------------------------- -! - -!============ -variable_entry: chlpico -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_picophytoplankton_expressed_as_chlorophyll_in_sea_water -units: kg m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mass Concentration of Picophytoplankton expressed as Chlorophyll in Sea Water at Surface -comment: chlorophyll concentration from the picophytoplankton (<2 um) component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: chlpico -type: real -!---------------------------------- -! - -!============ -variable_entry: chlmisc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_miscellaneous_phytoplankton_expressed_as_chlorophyll_in_sea_water -units: kg m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Other Phytoplankton Chlorophyll Mass Concentration at Surface -comment: chlorophyll from additional phytoplankton component concentrations alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: chlmisc -type: real -!---------------------------------- -! - -!============ -variable_entry: pon -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_particulate_organic_matter_expressed_as_nitrogen_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Particulate Organic Nitrogen Concentration at Surface -comment: sum of particulate organic nitrogen component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: pon -type: real -!---------------------------------- -! - -!============ -variable_entry: pop -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_particulate_organic_matter_expressed_as_phosphorus_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Particulate Organic Phosphorus Concentration at Surface -comment: sum of particulate organic phosphorus component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: pop -type: real -!---------------------------------- -! - -!============ -variable_entry: bfe -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_particulate_organic_matter_expressed_as_iron_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Particulate Biogenic Iron Concentration at Surface -comment: sum of particulate organic iron component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: bfe -type: real -!---------------------------------- -! - -!============ -variable_entry: bsi -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_particulate_matter_expressed_as_silicon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Particulate Biogenic Silica Concentration at Surface -comment: sum of particulate silica component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: bsi -type: real -!---------------------------------- -! - -!============ -variable_entry: phyn -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Phytoplankton Nitrogen Concentration at Surface -comment: sum of phytoplankton nitrogen component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: phyn -type: real -!---------------------------------- -! - -!============ -variable_entry: phyp -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_phytoplankton_expressed_as_phosphorus_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Phytoplankton Phosphorus Concentration at Surface -comment: sum of phytoplankton phosphorus components -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: phyp -type: real -!---------------------------------- -! - -!============ -variable_entry: phyfe -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_phytoplankton_expressed_as_iron_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Phytoplankton Iron Concentration at Surface -comment: sum of phytoplankton iron component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: phyfe -type: real -!---------------------------------- -! - -!============ -variable_entry: physi -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_phytoplankton_expressed_as_silicon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Phytoplankton Silica Concentration at Surface -comment: sum of phytoplankton silica component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: physi -type: real -!---------------------------------- -! - -!============ -variable_entry: dms -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_dimethyl_sulfide_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dimethyl Sulphide Concentration at Surface -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: dms -type: real -!---------------------------------- -! - -!============ -variable_entry: co3 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_carbonate_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Carbonate expressed as Carbon in Sea Water at Surface -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: co3 -type: real -!---------------------------------- -! - -!============ -variable_entry: co3satcalc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_calcite_expressed_as_carbon_in_sea_water_at_saturation -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Calcite expressed as Carbon in Sea Water at Saturation at Surface -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: co3satcalc -type: real -!---------------------------------- -! - -!============ -variable_entry: co3satarag -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_aragonite_expressed_as_carbon_in_sea_water_at_saturation -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Aragonite expressed as Carbon in Sea Water at Saturation at Surface -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth0m -out_name: co3satarag -type: real -!---------------------------------- -! - -!============ -variable_entry: intpp -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: net_primary_mole_productivity_of_carbon_by_phytoplankton -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Primary Organic Carbon Production by All Types of Phytoplankton -comment: Vertically integrated total primary (organic carbon) production by phytoplankton. This should equal the sum of intpdiat+intpphymisc, but those individual components may be unavailable in some models. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: intpp -type: real -!---------------------------------- -! - -!============ -variable_entry: intpnitrate -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: net_primary_mole_productivity_of_carbon_due_to_nitrate_utilization -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Primary Organic Carbon Production by Phytoplankton Based on Nitrate Uptake Alone -comment: Vertically integrated primary (organic carbon) production by phytoplankton based on nitrate uptake alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: intpnitrate -type: real -!---------------------------------- -! - -!============ -variable_entry: intpdiat -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: net_primary_mole_productivity_of_carbon_by_diatoms -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Primary Organic Carbon Production by Diatoms -comment: Vertically integrated primary (organic carbon) production by the diatom phytoplankton component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: intpdiat -type: real -!---------------------------------- -! - -!============ -variable_entry: intpdiaz -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: net_primary_mole_productivity_of_carbon_by_diazotrophs -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Net Primary Mole Productivity of Carbon by Diazotrophs -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: intpdiaz -type: real -!---------------------------------- -! - -!============ -variable_entry: intpcalc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: net_primary_mole_productivity_of_carbon_by_calcareous_phytoplankton -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Net Primary Mole Productivity of Carbon by Calcareous Phytoplankton -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: intpcalc -type: real -!---------------------------------- -! - -!============ -variable_entry: intppico -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: net_primary_mole_productivity_of_carbon_by_picophytoplankton -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Net Primary Mole Productivity of Carbon by Picophytoplankton -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: intppico -type: real -!---------------------------------- -! - -!============ -variable_entry: intpmisc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: net_primary_mole_productivity_of_carbon_by_miscellaneous_phytoplankton -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Primary Organic Carbon Production by Other Phytoplankton -comment: Vertically integrated total primary (organic carbon) production by other phytoplankton components alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: intpmisc -type: real -!---------------------------------- -! - -!============ -variable_entry: intpbfe -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_iron_due_to_biological_production -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Iron Production -comment: Vertically integrated biogenic iron production -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: intpbfe -type: real -!---------------------------------- -! - -!============ -variable_entry: intpbsi -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_silicon_due_to_biological_production -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Silica Production -comment: Vertically integrated biogenic silica production -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: intpbsi -type: real -!---------------------------------- -! - -!============ -variable_entry: intpcalcite -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_calcite_expressed_as_carbon_due_to_biological_production -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Calcite Production -comment: Vertically integrated calcite production -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: intpcalcite -type: real -!---------------------------------- -! - -!============ -variable_entry: intparag -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_aragonite_expressed_as_carbon_due_to_biological_production -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Aragonite Production -comment: Vertically integrated aragonite production -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: intparag -type: real -!---------------------------------- -! - -!============ -variable_entry: epc100 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sinking_mole_flux_of_particulate_organic_matter_expressed_as_carbon_in_sea_water -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Downward Flux of Particle Organic Carbon -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth100m -out_name: epc100 -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: epfe100 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sinking_mole_flux_of_particulate_iron_in_sea_water -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Downward Flux of Particulate Iron -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth100m -out_name: epfe100 -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: epsi100 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sinking_mole_flux_of_particulate_silicon_in_sea_water -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Downward Flux of Particulate Silica -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth100m -out_name: epsi100 -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: epcalc100 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sinking_mole_flux_of_calcite_expressed_as_carbon_in_sea_water -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Downward Flux of Calcite -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth100m -out_name: epcalc100 -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: eparag100 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sinking_mole_flux_of_aragonite_expressed_as_carbon_in_sea_water -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Downward Flux of Aragonite -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time depth100m -out_name: eparag100 -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: intdic -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_mass_content_of_dissolved_inorganic_carbon -units: kg m-2 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Dissolved Inorganic Carbon Content -comment: Vertically integrated DIC -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: intdic -type: real -!---------------------------------- -! - -!============ -variable_entry: spco2 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_partial_pressure_of_carbon_dioxide_in_sea_water -units: Pa -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Surface Aqueous Partial Pressure of CO2 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: spco2 -type: real -!---------------------------------- -! - -!============ -variable_entry: dpco2 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_carbon_dioxide_partial_pressure_difference_between_sea_water_and_air -units: Pa -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Delta PCO2 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: dpco2 -type: real -!---------------------------------- -! - -!============ -variable_entry: dpo2 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_molecular_oxygen_partial_pressure_difference_between_sea_water_and_air -units: Pa -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Delta PO2 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: dpo2 -type: real -!---------------------------------- -! - -!============ -variable_entry: fgco2 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon -units: kg m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Surface Downward CO2 Flux -comment: Gas exchange flux of CO2 (positive into ocean) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fgco2 -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: fgo2 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_mole_flux_of_molecular_oxygen -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Surface Downward O2 Flux -comment: Gas exchange flux of O2 (positive into ocean) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fgo2 -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: fgdms -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_mole_flux_of_dimethyl_sulfide -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Surface Upward DMS Flux -comment: Gas exchange flux of DMS (positive into atmosphere) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fgdms -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: fsc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_carbon_due_to_runoff_and_sediment_dissolution -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Flux of Carbon Into Ocean Surface by Runoff and Sediment Dissolution -comment: Carbon supply to ocean through runoff and sediment dissolution (neglects gas exchange) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fsc -type: real -!---------------------------------- -! - -!============ -variable_entry: frc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_carbon_due_to_sedimentation -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Downward Carbon Flux at Ocean Bottom -comment: Carbon loss to sediments -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: frc -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: intpn2 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_elemental_nitrogen_due_to_fixation -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Nitrogen Fixation Rate in Ocean -comment: Vertically integrated nitrogen fixation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: intpn2 -type: real -!---------------------------------- -! - -!============ -variable_entry: fsn -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_elemental_nitrogen_due_to_deposition_and_fixation_and_runoff -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Surface Downward Net Flux of Nitrogen -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fsn -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: frn -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_elemental_nitrogen_due_to_denitrification_and_sedimentation -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Nitrogen Loss to Sediments and through Denitrification -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: frn -type: real -!---------------------------------- -! - -!============ -variable_entry: fsfe -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_iron_due_to_deposition_and_runoff_and_sediment_dissolution -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Surface Downward Net Flux of Iron -comment: Iron supply through deposition flux onto sea surface, runoff, coasts, sediments, etc -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fsfe -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: frfe -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_iron_due_to_sedimentation -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Iron Loss to Sediments -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: frfe -type: real -!---------------------------------- -! - -!============ -variable_entry: o2min -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_dissolved_molecular_oxygen_in_sea_water_at_shallowest_local_minimum_in_vertical_profile -units: mol m-3 -cell_methods: time: mean area: where sea depth: minimum -cell_measures: area: areacello -long_name: Oxygen Minimum Concentration -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: o2min -type: real -!---------------------------------- -! - -!============ -variable_entry: zo2min -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: depth_at_shallowest_local_minimum_in_vertical_profile_of_mole_concentration_of_dissolved_molecular_oxygen_in_sea_water -units: m -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Depth of Oxygen Minimum Concentration -comment: Depth of vertical minimum concentration of dissolved oxygen gas (if two, then the shallower) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: zo2min -type: real -!---------------------------------- -! - -!============ -variable_entry: zsatcalc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: minimum_depth_of_calcite_undersaturation_in_sea_water -units: m -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Calcite Saturation Depth -comment: Depth of calcite saturation horizon (0 if < surface, ""missing"" if > bottom, if two, then the shallower) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: zsatcalc -type: real -!---------------------------------- -! - -!============ -variable_entry: zsatarag -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: minimum_depth_of_aragonite_undersaturation_in_sea_water -units: m -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Aragonite Saturation Depth -comment: Depth of aragonite saturation horizon (0 if < surface, ""missing"" if > bottom, if two, then the shallower) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: zsatarag -type: real -!---------------------------------- -! - -!============ -variable_entry: fddtdic -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_dissolved_inorganic_carbon -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Rate of Change of Net Dissolved Inorganic Carbon -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time olayer100m -out_name: fddtdic -type: real -!---------------------------------- -! - -!============ -variable_entry: fddtdin -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_dissolved_inorganic_nitrogen -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Rate of Change of Net Dissolved Inorganic Nitrogen -comment: Net time rate of change of nitrogen nutrients (e.g. NO3+NH4) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time olayer100m -out_name: fddtdin -type: real -!---------------------------------- -! - -!============ -variable_entry: fddtdip -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_dissolved_inorganic_phosphorus -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Rate of Change of Net Dissolved Inorganic Phosphate -comment: vertical integral of net time rate of change of phosphate -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time olayer100m -out_name: fddtdip -type: real -!---------------------------------- -! - -!============ -variable_entry: fddtdife -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_dissolved_inorganic_iron -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Rate of Change of Net Dissolved Inorganic Iron -comment: vertical integral of net time rate of change of dissolved inorganic iron -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time olayer100m -out_name: fddtdife -type: real -!---------------------------------- -! - -!============ -variable_entry: fddtdisi -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_dissolved_inorganic_silicon -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Rate of Change of Net Dissolved Inorganic Silicate -comment: vertical integral of net time rate of change of dissolved inorganic silicate -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time olayer100m -out_name: fddtdisi -type: real -!---------------------------------- -! - -!============ -variable_entry: fddtalk -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: integral_wrt_depth_of_tendency_of_sea_water_alkalinity_expressed_as_mole_equivalent -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Rate of Change of Alkalinity -comment: vertical integral of net time rate of change of alkalinity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time olayer100m -out_name: fddtalk -type: real -!---------------------------------- -! - -!============ -variable_entry: fbddtdic -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_dissolved_inorganic_carbon_due_to_biological_processes -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Rate of Change of Dissolved Inorganic Carbon due to Biological Activity -comment: vertical integral of net biological terms in time rate of change of dissolved inorganic carbon -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time olayer100m -out_name: fbddtdic -type: real -!---------------------------------- -! - -!============ -variable_entry: fbddtdin -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_dissolved_inorganic_nitrogen_due_to_biological_processes -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Rate of Change of Dissolved Inorganic Nitrogen due to Biological Activity -comment: vertical integral of net biological terms in time rate of change of nitrogen nutrients (e.g. NO3+NH4) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time olayer100m -out_name: fbddtdin -type: real -!---------------------------------- -! - -!============ -variable_entry: fbddtdip -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_dissolved_inorganic_phosphorus_due_to_biological_processes -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Rate of Change of Dissolved Inorganic Phosphate due to Biological Activity -comment: vertical integral of net biological terms in time rate of change of phosphate -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time olayer100m -out_name: fbddtdip -type: real -!---------------------------------- -! - -!============ -variable_entry: fbddtdife -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_dissolved_inorganic_iron_due_to_biological_processes -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Rate of Change of Dissolved Inorganic Iron due to Biological Activity -comment: vertical integral of net biological terms in time rate of change of dissolved inorganic iron -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time olayer100m -out_name: fbddtdife -type: real -!---------------------------------- -! - -!============ -variable_entry: fbddtdisi -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_ocean_mole_content_of_dissolved_inorganic_silicon_due_to_biological_processes -units: mol m-2 s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Rate of Change of Dissolved Inorganic Silicate due to Biological Activity -comment: vertical integral of net biological terms in time rate of change of dissolved inorganic silicate -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time olayer100m -out_name: fbddtdisi -type: real -!---------------------------------- -! - -!============ -variable_entry: fbddtalk -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: integral_wrt_depth_of_tendency_of_sea_water_alkalinity_expressed_as_mole_equivalent_due_to_biological_processes -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Rate of Change of Biological Alkalinity due to Biological Activity -comment: vertical integral of net biological terms in time rate of change of alkalinity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time olayer100m -out_name: fbddtalk -type: real -!---------------------------------- -! - -!============ -variable_entry: masso -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_mass -units: kg -cell_methods: time: mean area: sum where sea -long_name: Sea Water Mass -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -out_name: masso -type: real -!---------------------------------- -! - -!============ -variable_entry: pbo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_pressure_at_sea_floor -units: dbar -cell_methods: time: mean -cell_measures: area: areacello -long_name: Sea Water Pressure at Sea floor -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: pbo -type: real -!---------------------------------- -! - -!============ -variable_entry: pso -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_pressure_at_sea_water_surface -units: dbar -cell_methods: time: mean -cell_measures: area: areacello -long_name: Sea Water Pressure at Sea Water Surface -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: pso -type: real -!---------------------------------- -! - -!============ -variable_entry: volo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_volume -units: m3 -cell_methods: time: mean area: sum where sea -long_name: Sea Water Volume -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -out_name: volo -type: real -!---------------------------------- -! - -!============ -variable_entry: zos -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_surface_height_above_geoid -units: m -cell_methods: time: mean -cell_measures: area: areacello -long_name: Sea Surface Height Above Geoid -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: zos -type: real -valid_min: -13.97 -valid_max: 9.575 -ok_min_mean_abs: 0.008384 -ok_max_mean_abs: 1.572 -!---------------------------------- -! - -!============ -variable_entry: zossq -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: square_of_sea_surface_height_above_geoid -units: m2 -cell_methods: time: mean -cell_measures: area: areacello -long_name: Square of Sea Surface Height Above Geoid -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: zossq -type: real -!---------------------------------- -! - -!============ -variable_entry: zosga -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: global_average_sea_level_change -units: m -cell_methods: time: mean area: mean where sea -long_name: Global Average Sea Level Change -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -out_name: zosga -type: real -valid_min: -0.1426 -valid_max: 0.382 -ok_min_mean_abs: -0.09124 -ok_max_mean_abs: 0.3304 -!---------------------------------- -! - -!============ -variable_entry: zossga -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: global_average_steric_sea_level_change -units: m -cell_methods: time: mean area: mean where sea -long_name: Global Average Steric Sea Level Change -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -out_name: zossga -type: real -!---------------------------------- -! - -!============ -variable_entry: zostoga -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: global_average_thermosteric_sea_level_change -units: m -cell_methods: time: mean area: mean where sea -long_name: Global Average Thermosteric Sea Level Change -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -out_name: zostoga -type: real -valid_min: -0.1375 -valid_max: 0.3734 -ok_min_mean_abs: -0.0511 -ok_max_mean_abs: 0.2998 -!---------------------------------- -! - -!============ -variable_entry: masscello -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_mass_per_unit_area -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacello volume: volcello -long_name: Sea Water Mass Per Unit Area -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: masscello -type: real -!---------------------------------- -! - -!============ -variable_entry: thkcello -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cell_thickness -units: m -cell_methods: time: mean -cell_measures: area: areacello volume: volcello -long_name: Ocean Model Cell Thickness -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: thkcello -type: real -!---------------------------------- -! - -!============ -variable_entry: thetao -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_potential_temperature -units: K -cell_methods: time: mean -cell_measures: area: areacello volume: volcello -long_name: Sea Water Potential Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: thetao -type: real -valid_min: -9.37e+17 -valid_max: 1.05e+20 -ok_min_mean_abs: -9.37e+17 -ok_max_mean_abs: 1.05e+20 -!---------------------------------- -! - -!============ -variable_entry: thetaoga -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_potential_temperature -units: K -cell_methods: time: mean area: mean where sea -long_name: Global Average Sea Water Potential Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -out_name: thetaoga -type: real -!---------------------------------- -! - -!============ -variable_entry: tos -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_surface_temperature -units: K -cell_methods: time: mean -cell_measures: area: areacello -long_name: Sea Surface Temperature -comment: "this may differ from ""surface temperature"" in regions of sea ice." -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tos -type: real -valid_min: 257.4 -valid_max: 325.2 -ok_min_mean_abs: 272 -ok_max_mean_abs: 303.5 -!---------------------------------- -! - -!============ -variable_entry: tossq -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: square_of_sea_surface_temperature -units: K2 -cell_methods: time: mean -cell_measures: area: areacello -long_name: Square of Sea Surface Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tossq -type: real -!---------------------------------- -! - -!============ -variable_entry: so -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_salinity -units: psu -cell_methods: time: mean -cell_measures: area: areacello volume: volcello -long_name: Sea Water Salinity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: so -type: real -valid_min: -9.37e+17 -valid_max: 1.05e+20 -ok_min_mean_abs: -9.37e+17 -ok_max_mean_abs: 1.05e+20 -!---------------------------------- -! - -!============ -variable_entry: soga -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_salinity -units: psu -cell_methods: time: mean area: mean where sea -long_name: Global Mean Sea Water Salinity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -out_name: soga -type: real -!---------------------------------- -! - -!============ -variable_entry: sos -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_surface_salinity -units: psu -cell_methods: time: mean -cell_measures: area: areacello -long_name: Sea Surface Salinity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: sos -type: real -!---------------------------------- -! - -!============ -variable_entry: rhopoto -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_potential_density -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacello volume: volcello -long_name: Sea Water Potential Density -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: rhopoto -type: real -valid_min: -3.257 -valid_max: 1094 -ok_min_mean_abs: 10.38 -ok_max_mean_abs: 1079 -!---------------------------------- -! - -!============ -variable_entry: agessc -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_age_since_surface_contact -units: yr -cell_methods: time: mean -cell_measures: area: areacello volume: volcello -long_name: Sea Water Age Since Surface Contact -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: agessc -type: real -!---------------------------------- -! - -!============ -variable_entry: cfc11 -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: moles_of_cfc11_per_unit_mass_in_sea_water -units: mol kg-1 -cell_methods: time: mean -cell_measures: area: areacello volume: volcello -long_name: Moles Per Unit Mass of CFC-11 in Sea Water -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: cfc11 -type: real -!---------------------------------- -! - -!============ -variable_entry: msftbarot -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_barotropic_mass_streamfunction -units: kg s-1 -cell_methods: time: mean -cell_measures: area: areacello -long_name: Ocean Barotropic Mass Streamfunction -comment: differs from CMIP3 because it includes mass. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: msftbarot -type: real -!---------------------------------- -! - -!============ -variable_entry: mlotst -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_mixed_layer_thickness_defined_by_sigma_t -units: m -cell_methods: time: mean -cell_measures: area: areacello -long_name: Ocean Mixed Layer Thickness Defined by Sigma T -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: mlotst -type: real -!---------------------------------- -! - -!============ -variable_entry: mlotstsq -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: square_of_ocean_mixed_layer_thickness_defined_by_sigma_t -units: m2 -cell_methods: time: mean -cell_measures: area: areacello -long_name: Square of Ocean Mixed Layer Thickness Defined by Sigma T -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: mlotstsq -type: real -!---------------------------------- -! - -!============ -variable_entry: omldamax -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_mixed_layer_thickness_defined_by_mixing_scheme -units: m -cell_methods: time: maximum within days time: mean over days -cell_measures: area: areacello -long_name: Mean Daily Maximum Ocean Mixed Layer Thickness Defined by Mixing Scheme -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: omldamax -type: real -!---------------------------------- -! - -!============ -variable_entry: omlmax -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_mixed_layer_thickness_defined_by_mixing_scheme -units: m -cell_methods: time: maximum -cell_measures: area: areacello -long_name: Monthly Maximum Ocean Mixed Layer Thickness Defined by Mixing Scheme -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: omlmax -type: real -!---------------------------------- -! - -!============ -variable_entry: uo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_x_velocity -units: m s-1 -cell_methods: time: mean -long_name: Sea Water X Velocity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: uo -type: real -valid_min: -1.041e+18 -valid_max: 1.05e+20 -ok_min_mean_abs: -1.041e+18 -ok_max_mean_abs: 1.05e+20 -!---------------------------------- -! - -!============ -variable_entry: vo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_y_velocity -units: m s-1 -cell_methods: time: mean -long_name: Sea Water Y Velocity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: vo -type: real -valid_min: -1.041e+18 -valid_max: 1.05e+20 -ok_min_mean_abs: -1.041e+18 -ok_max_mean_abs: 1.05e+20 -!---------------------------------- -! - -!============ -variable_entry: wmo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upward_ocean_mass_transport -units: kg s-1 -cell_methods: time: mean -cell_measures: area: areacello volume: volcello -long_name: Upward Ocean Mass Transport -comment: differs from CMIP3, which only had upward velocity. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: wmo -type: real -!---------------------------------- -! - -!============ -variable_entry: wmosq -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: square_of_upward_ocean_mass_transport -units: kg2 s-2 -cell_methods: time: mean -cell_measures: area: areacello volume: volcello -long_name: Square of Upward Ocean Mass Transport -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: wmosq -type: real -!---------------------------------- -! - -!============ -variable_entry: umo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_mass_x_transport -units: kg s-1 -cell_methods: time: mean -long_name: Ocean Mass X Transport -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: umo -type: real -!---------------------------------- -! - -!============ -variable_entry: vmo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_mass_y_transport -units: kg s-1 -cell_methods: time: mean -long_name: Ocean Mass Y Transport -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: vmo -type: real -!---------------------------------- -! - -!============ -variable_entry: msftmyz -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_meridional_overturning_mass_streamfunction -units: kg s-1 -cell_methods: time: mean longitude: mean -long_name: Ocean Meridional Overturning Mass Streamfunction -comment: differs from CMIP3 because it includes mass. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude olevel basin time -out_name: msftmyz -type: real -!---------------------------------- -! - -!============ -variable_entry: msftmrhoz -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_meridional_overturning_mass_streamfunction -units: kg s-1 -cell_methods: time: mean longitude: mean -long_name: Ocean Meridional Overturning Mass Streamfunction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude rho basin time -out_name: msftmrhoz -type: real -!---------------------------------- -! - -!============ -variable_entry: msftyyz -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_y_overturning_mass_streamfunction -units: kg s-1 -cell_methods: time: mean longitude: mean -long_name: Ocean Y Overturning Mass Streamfunction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude olevel basin time -out_name: msftyyz -type: real -!---------------------------------- -! - -!============ -variable_entry: msftyrhoz -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_y_overturning_mass_streamfunction -units: kg s-1 -cell_methods: time: mean longitude: mean -long_name: Ocean Y Overturning Mass Streamfunction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude rho basin time -out_name: msftyrhoz -type: real -!---------------------------------- -! - -!============ -variable_entry: msftmyzba -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_meridional_overturning_mass_streamfunction_due_to_bolus_advection -units: kg s-1 -cell_methods: time: mean longitude: mean -long_name: Ocean Meridional Overturning Mass Streamfunction due to Bolus Advection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude olevel basin time -out_name: msftmyzba -type: real -!---------------------------------- -! - -!============ -variable_entry: msftmrhozba -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_meridional_overturning_mass_streamfunction_due_to_bolus_advection -units: kg s-1 -cell_methods: time: mean longitude: mean -long_name: Ocean Meridional Overturning Mass Streamfunction due to Bolus Advection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude rho basin time -out_name: msftmrhozba -type: real -!---------------------------------- -! - -!============ -variable_entry: msftyyzba -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_y_overturning_mass_streamfunction_due_to_bolus_advection -units: kg s-1 -cell_methods: time: mean longitude: mean -long_name: Ocean Y Overturning Mass Streamfunction due to Bolus Advection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude olevel basin time -out_name: msftyyzba -type: real -!---------------------------------- -! - -!============ -variable_entry: msftyrhozba -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_y_overturning_mass_streamfunction_due_to_bolus_advection -units: kg s-1 -cell_methods: time: mean longitude: mean -long_name: Ocean Y Overturning Mass Streamfunction due to Bolus Advection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude rho basin time -out_name: msftyrhozba -type: real -!---------------------------------- -! - -!============ -variable_entry: hfnorth -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_ocean_heat_transport -units: W -cell_methods: time: mean -long_name: Northward Ocean Heat Transport -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfnorth -type: real -!---------------------------------- -! - -!============ -variable_entry: hfnorthba -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_ocean_heat_transport_due_to_bolus_advection -units: W -cell_methods: time: mean -long_name: Northward Ocean Heat Transport due to Bolus Advection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfnorthba -type: real -!---------------------------------- -! - -!============ -variable_entry: hfnorthdiff -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_ocean_heat_transport_due_to_diffusion -units: W -cell_methods: time: mean -long_name: Northward Ocean Heat Transport due to Diffusion -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfnorthdiff -type: real -!---------------------------------- -! - -!============ -variable_entry: hfx -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_heat_x_transport -units: W -cell_methods: time: mean -long_name: Ocean Heat X Transport -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfx -type: real -!---------------------------------- -! - -!============ -variable_entry: hfy -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_heat_y_transport -units: W -cell_methods: time: mean -long_name: Ocean Heat Y Transport -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfy -type: real -!---------------------------------- -! - -!============ -variable_entry: hfyba -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_heat_y_transport_due_to_bolus_advection -units: W -cell_methods: time: mean -long_name: Ocean Heat Y Transport due to Bolus Advection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfyba -type: real -!---------------------------------- -! - -!============ -variable_entry: hfydiff -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_heat_y_transport_due_to_diffusion -units: W -cell_methods: time: mean -long_name: Ocean Heat Y Transport due to Diffussion -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfydiff -type: real -!---------------------------------- -! - -!============ -variable_entry: hfxba -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_heat_x_transport_due_to_bolus_advection -units: W -cell_methods: time: mean -long_name: Ocean Heat X Transport due to Bolus Advection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfxba -type: real -!---------------------------------- -! - -!============ -variable_entry: hfxdiff -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_heat_x_transport_due_to_diffusion -units: W -cell_methods: time: mean -long_name: Ocean Heat X Transport due to Diffusion -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfxdiff -type: real -!---------------------------------- -! - -!============ -variable_entry: hfbasin -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_ocean_heat_transport -units: W -cell_methods: time: mean longitude: mean -long_name: Northward Ocean Heat Transport -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude basin time -out_name: hfbasin -type: real -!---------------------------------- -! - -!============ -variable_entry: hfbasinba -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_ocean_heat_transport_due_to_bolus_advection -units: W -cell_methods: time: mean longitude: mean -long_name: Northward Ocean Heat Transport due to Bolus Advection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude basin time -out_name: hfbasinba -type: real -!---------------------------------- -! - -!============ -variable_entry: hfbasindiff -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_ocean_heat_transport_due_to_diffusion -units: W -cell_methods: time: mean longitude: mean -long_name: Northward Ocean Heat Transport due to Diffussion -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude basin time -out_name: hfbasindiff -type: real -!---------------------------------- -! - -!============ -variable_entry: htovgyre -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_ocean_heat_transport_due_to_gyre -units: W -cell_methods: time: mean longitude: mean -long_name: Northward Ocean Heat Transport due to Gyre -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude basin time -out_name: htovgyre -type: real -valid_min: -1.772e+15 -valid_max: 2.691e+15 -ok_min_mean_abs: 5.91e+13 -ok_max_mean_abs: 2.364e+14 -!---------------------------------- -! - -!============ -variable_entry: htovovrt -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_ocean_heat_transport_due_to_overturning -units: W -cell_methods: time: mean longitude: mean -long_name: Northward Ocean Heat Transport due to Overturning -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude basin time -out_name: htovovrt -type: real -valid_min: -6.263e+15 -valid_max: 1.305e+16 -ok_min_mean_abs: 2.039e+14 -ok_max_mean_abs: 8.156e+14 -!---------------------------------- -! - -!============ -variable_entry: sltovgyre -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_ocean_salt_transport_due_to_gyre -units: kg s-1 -cell_methods: time: mean longitude: mean -long_name: Northward Ocean Salt Transport due to Gyre -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude basin time -out_name: sltovgyre -type: real -!---------------------------------- -! - -!============ -variable_entry: sltovovrt -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_ocean_salt_transport_due_to_overturning -units: kg s-1 -cell_methods: time: mean longitude: mean -long_name: Northward Ocean Salt Transport due to Overturning -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude basin time -out_name: sltovovrt -type: real -!---------------------------------- -! - -!============ -variable_entry: mfo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_transport_across_line -units: kg s-1 -cell_methods: time: mean -long_name: Sea Water Transport -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time oline -out_name: mfo -type: real -!---------------------------------- -! - -!============ -variable_entry: pr -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: rainfall_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where ice_free_sea over sea -cell_measures: area: areacello -long_name: Rainfall Flux where Ice Free Ocean over Sea -comment: computed as the total mass of liquid water falling as liquid rain into the ice-free portion of the ocean divided by the area of the ocean portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: pr -type: real -valid_min: 0 -valid_max: 0.001254 -ok_min_mean_abs: 2.156e-05 -ok_max_mean_abs: 3.215e-05 -!---------------------------------- -! - -!============ -variable_entry: prsn -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: snowfall_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where ice_free_sea over sea -cell_measures: area: areacello -long_name: Snowfall Flux where Ice Free Ocean over Sea -comment: computed as the total mass per unit time of solid-phase precipitation falling into the ice-free portion of the ocean divided by the area of the ocean portion of the grid cell. (Snowfall flux includes all types of solid-phase precipitation.) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: prsn -type: real -valid_min: 0 -valid_max: 0.0002987 -ok_min_mean_abs: 1.449e-06 -ok_max_mean_abs: 6.11e-06 -!---------------------------------- -! - -!============ -variable_entry: evs -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: water_evaporation_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where ice_free_sea over sea -cell_measures: area: areacello -long_name: Water Evaporation Flux Where Ice Free Ocean over Sea -comment: computed as the total mass of water vapor evaporating from the ice-free portion of the ocean divided by the area of the ocean portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: evs -type: real -!---------------------------------- -! - -!============ -variable_entry: friver -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: water_flux_into_sea_water_from_rivers -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Water Flux into Sea Water From Rivers -comment: computed as the river flux of water into the ocean divided by the area of the ocean portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: friver -type: real -!---------------------------------- -! - -!============ -variable_entry: ficeberg -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: water_flux_into_sea_water_from_icebergs -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Water Flux into Sea Water From Icebergs -comment: computed as the iceberg melt water flux into the ocean divided by the area of the ocean portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: ficeberg -type: real -!---------------------------------- -! - -!============ -variable_entry: ficeberg2d -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: water_flux_into_sea_water_from_icebergs -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Water Flux into Sea Water From Icebergs -comment: computed as the iceberg melt water flux into the ocean divided by the area of the ocean portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: ficeberg -type: real -!---------------------------------- -! - -!============ -variable_entry: fsitherm -!============ -modeling_realm: ocean seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: water_flux_into_sea_water_due_to_sea_ice_thermodynamics -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Water Flux into Sea Water due to Sea Ice Thermodynamics -comment: computed as the sea ice thermodynamic water flux into the ocean divided by the area of the ocean portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: fsitherm -type: real -!---------------------------------- -! - -!============ -variable_entry: wfo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: water_flux_into_sea_water -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Water Flux into Sea Water -comment: computed as the water flux into the ocean divided by the area of the ocean portion of the grid cell. This is the sum of the next two variables in this table. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: wfo -type: real -valid_min: -0.005411 -valid_max: 0.03952 -ok_min_mean_abs: 1.831e-05 -ok_max_mean_abs: 6.051e-05 -!---------------------------------- -! - -!============ -variable_entry: wfonocorr -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: water_flux_into_sea_water_without_flux_correction -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Water Flux into Sea Water Without Flux Correction -comment: computed as the water flux (without flux correction) into the ocean divided by the area of the ocean portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: wfonocorr -type: real -!---------------------------------- -! - -!============ -variable_entry: wfcorr -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: water_flux_correction -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Water Flux Correction -comment: Positive flux implies correction adds water to ocean. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: wfcorr -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: vsfpr -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: virtual_salt_flux_into_sea_water_due_to_rainfall -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Virtual Salt Flux into Sea Water due to Rainfall -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: vsfpr -type: real -!---------------------------------- -! - -!============ -variable_entry: vsfevap -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: virtual_salt_flux_into_sea_water_due_to_evaporation -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Virtual Salt Flux into Sea Water due to Evaporation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: vsfevap -type: real -!---------------------------------- -! - -!============ -variable_entry: vsfriver -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: virtual_salt_flux_into_sea_water_from_rivers -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Virtual Salt Flux into Sea Water From Rivers -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: vsfriver -type: real -!---------------------------------- -! - -!============ -variable_entry: vsfsit -!============ -modeling_realm: ocean seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: virtual_salt_flux_into_sea_water_due_to_sea_ice_thermodynamics -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Virtual Salt Flux into Sea Water due to Sea Ice Thermodynamics -comment: This variable measures the virtual salt flux into sea water due to the melting of sea ice. It is set to zero in models which receive a real water flux. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: vsfsit -type: real -!---------------------------------- -! - -!============ -variable_entry: vsf -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: virtual_salt_flux_into_sea_water -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Virtual Salt Flux into Sea Water -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: vsf -type: real -!---------------------------------- -! - -!============ -variable_entry: vsfcorr -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: virtual_salt_flux_correction -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Virtual Salt Flux Correction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: vsfcorr -type: real -!---------------------------------- -! - -!============ -variable_entry: sfdsi -!============ -modeling_realm: ocean seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: downward_sea_ice_basal_salt_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Downward Sea Ice Basal Salt Flux -comment: This field is physical, and it arises since sea ice has a nonzero salt content, so it exchanges salt with the liquid ocean upon melting and freezing. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: sfdsi -type: real -!---------------------------------- -! - -!============ -variable_entry: sfriver -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: salt_flux_into_sea_water_from_rivers -units: kg m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Salt Flux into Sea Water from Rivers -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: sfriver -type: real -!---------------------------------- -! - -!============ -variable_entry: hfgeou -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upward_geothermal_heat_flux_at_sea_floor -units: W m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Upward Geothermal Heat Flux at Sea Floor -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfgeou -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: hfrainds -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: temperature_flux_due_to_rainfall_expressed_as_heat_flux_into_sea_water -units: W m-2 -cell_methods: time: mean area: mean where ice_free_sea over sea -cell_measures: area: areacello -long_name: Temperature Flux due to Rainfall Expressed as Heat Flux into Sea Water -comment: This is defined as ""where ice_free_sea over sea""; i.e., the total flux (considered here) entering the ice-free portion of the grid cell divided by the area of the ocean portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfrainds -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: hfevapds -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: temperature_flux_due_to_evaporation_expressed_as_heat_flux_out_of_sea_water -units: W m-2 -cell_methods: time: mean area: mean where ice_free_sea over sea -cell_measures: area: areacello -long_name: Temperature Flux due to Evaporation Expressed as Heat Flux Out of Sea Water -comment: "This is defined as ""where ice_free_sea over sea""" -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfevapds -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: hfrunoffds -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: temperature_flux_due_to_runoff_expressed_as_heat_flux_into_sea_water -units: W m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Temperature Flux due to Runoff Expressed as Heat Flux into Sea Water -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: hfrunoffds -type: real -!---------------------------------- -! - -!============ -variable_entry: hfrunoffds2d -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: temperature_flux_due_to_runoff_expressed_as_heat_flux_into_sea_water -units: W m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Temperature Flux due to Runoff Expressed as Heat Flux into Sea Water -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfrunoffds -type: real -!---------------------------------- -! - -!============ -variable_entry: hfsnthermds -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: heat_flux_into_sea_water_due_to_snow_thermodynamics -units: W m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Heat Flux into Sea Water due to Snow Thermodynamics -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: hfsnthermds -type: real -!---------------------------------- -! - -!============ -variable_entry: hfsnthermds2d -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: heat_flux_into_sea_water_due_to_snow_thermodynamics -units: W m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Heat Flux into Sea Water due to Snow Thermodynamics -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfsnthermds -type: real -!---------------------------------- -! - -!============ -variable_entry: hfsifrazil -!============ -modeling_realm: ocean seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: heat_flux_into_sea_water_due_to_freezing_of_frazil_ice -units: W m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Heat Flux into Sea Water due to Frazil Ice Formation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: hfsifrazil -type: real -!---------------------------------- -! - -!============ -variable_entry: hfsifrazil2d -!============ -modeling_realm: ocean seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: heat_flux_into_sea_water_due_to_freezing_of_frazil_ice -units: W m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Heat Flux into Sea Water due to Frazil Ice Formation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfsifrazil -type: real -!---------------------------------- -! - -!============ -variable_entry: hfsithermds -!============ -modeling_realm: ocean seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: heat_flux_into_sea_water_due_to_sea_ice_thermodynamics -units: W m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Heat Flux into Sea Water due to Sea Ice Thermodynamics -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: hfsithermds -type: real -!---------------------------------- -! - -!============ -variable_entry: hfsithermds2d -!============ -modeling_realm: ocean seaIce -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: heat_flux_into_sea_water_due_to_sea_ice_thermodynamics -units: W m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Heat Flux into Sea Water due to Sea Ice Thermodynamics -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfsithermds -type: real -!---------------------------------- -! - -!============ -variable_entry: hfibthermds -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: heat_flux_into_sea_water_due_to_iceberg_thermodynamics -units: W m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Heat Flux into Sea Water due to Iceberg Thermodynamics -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: hfibthermds -type: real -!---------------------------------- -! - -!============ -variable_entry: hfibthermds2d -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: heat_flux_into_sea_water_due_to_iceberg_thermodynamics -units: W m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Heat Flux into Sea Water due to Iceberg Thermodynamics -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfibthermds -type: real -!---------------------------------- -! - -!============ -variable_entry: rlds -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_net_downward_longwave_flux -units: W m-2 -cell_methods: time: mean area: mean where ice_free_sea over sea -cell_measures: area: areacello -long_name: Surface Net Downward Longwave Radiation -comment: "This is defined as ""where ice_free_sea over sea""" -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlds -type: real -positive: down -valid_min: 30.71 -valid_max: 520.5 -ok_min_mean_abs: 271.2 -ok_max_mean_abs: 323.6 -!---------------------------------- -! - -!============ -variable_entry: hfls -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_latent_heat_flux -units: W m-2 -cell_methods: time: mean area: mean where ice_free_sea over sea -cell_measures: area: areacello -long_name: Surface Downward Latent Heat Flux -comment: "This is defined as ""where ice_free_sea over sea""" -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfls -type: real -positive: down -valid_min: -76.77 -valid_max: 790.7 -ok_min_mean_abs: 50.39 -ok_max_mean_abs: 73.2 -!---------------------------------- -! - -!============ -variable_entry: hfss -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_sensible_heat_flux -units: W m-2 -cell_methods: time: mean area: mean where ice_free_sea over sea -cell_measures: area: areacello -long_name: Surface Downward Sensible Heat Flux -comment: "This is defined as ""where ice_free_sea over sea""" -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfss -type: real -positive: down -valid_min: -264.5 -valid_max: 844.8 -ok_min_mean_abs: 10.7 -ok_max_mean_abs: 34.84 -!---------------------------------- -! - -!============ -variable_entry: rsntds -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: net_downward_shortwave_flux_at_sea_water_surface -units: W m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Net Downward Shortwave Radiation at Sea Water Surface -comment: This is the flux into the surface of liquid sea water only. This excludes shortwave flux absorbed by sea ice, but includes any light that passes through the ice and is absorbed by the ocean. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsntds -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsds -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: downwelling_shortwave_flux_in_sea_water -units: W m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Downwelling Shortwave Radiation in Sea Water -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: rsds -type: real -positive: down -valid_min: -0.002946 -valid_max: 524.4 -ok_min_mean_abs: 143.9 -ok_max_mean_abs: 181.6 -!---------------------------------- -! - -!============ -variable_entry: hfcorr -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: heat_flux_correction -units: W m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Heat Flux Correction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfcorr -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: hfds -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_heat_flux_in_sea_water -units: W m-2 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Downward Heat Flux at Sea Water Surface -comment: "This is the net flux of heat entering the liquid water column through its upper surface (excluding any ""flux adjustment"") ." -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfds -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: tauuo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_x_stress -units: N m-2 -cell_methods: time: mean area: mean where sea -long_name: Surface Downward X Stress -comment: This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tauuo -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: tauvo -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_y_stress -units: N m-2 -cell_methods: time: mean area: mean where sea -long_name: Surface Downward Y Stress -comment: This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tauvo -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: tauucorr -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_x_stress_correction -units: N m-2 -cell_methods: time: mean area: mean where sea -long_name: Surface Downward X Stress Correction -comment: This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tauucorr -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: tauvcorr -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_y_stress_correction -units: N m-2 -cell_methods: time: mean area: mean where sea -long_name: Surface Downward Y Stress Correction -comment: This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tauvcorr -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: zfull -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: depth_below_geoid -units: m -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Depth Below Geoid of Ocean Layer -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: zfull -type: real -!---------------------------------- -! - -!============ -variable_entry: zhalf -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: depth_below_geoid -units: m -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Depth Below Geoid of Interfaces Between Ocean Layers -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: zhalf -type: real -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Oyr b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Oyr deleted file mode 100644 index 98cba4f62b..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Oyr +++ /dev/null @@ -1,2120 +0,0 @@ -table_id: Table Oyr -modeling_realm: ocean - -frequency: yr - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 365.000000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -generic_levels: olevel - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - -!============ -axis_entry: depth_coord -!============ -! -! This vertical coordinate is used in z-coordinate models -! The units are meters (m), and it has a value of 0. at the surface -! and becomes more and more positive with depth. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: depth -units: m -axis: Z -positive: down -long_name: ocean depth coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: 0. -valid_max: 12000. -!---------------------------------- -! -!============ -axis_entry: olev -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: generic ocean level -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lev -stored_direction: increasing -type: double -must_have_bounds: no -!---------------------------------- -! -!============ -axis_entry: ocean_double_sigma -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_double_sigma -axis: Z -positive: up -long_name: ocean double sigma coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -formula: for k <= k_c:\n z= sigma*f \n for k > k_c:\n z= f + (sigma-1)*(depth-f) \n f= 0.5*(z1+ z2) + 0.5*(z1-z2)* tanh(2*a/(z1-z2)*(depth-href)) -z_factors: sigma: sigma depth: depth z1: z1 z2: z2 a: a_coeff href: href k_c: k_c -z_bounds_factors: sigma: sigma_bnds depth: depth z1: z1 z2: z2 a: a href: href k_c: k_c -!---------------------------------- -! -!============ -axis_entry: ocean_sigma_z -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_sigma_z -axis: Z -long_name: ocean sigma over z coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -formula: for k <= nsigma: z = eta + sigma*(min(depth_c,depth)+eta) ; for k > nsigma: z = zlev -z_factors: sigma: sigma eta: eta depth: depth depth_c: depth_c nsigma: nsigma zlev: zlev -z_bounds_factors: sigma: sigma_bnds eta: eta depth: depth depth_c: depth_c nsigma: nsigma zlev: zlev_bnds -!---------------------------------- -! -!============ -axis_entry: ocean_s -!============ -! -! This coordinate is dimensionless and varies from 0 at the surface to -1. at the ocean floor. -! The values of s, which appears in the formula below, should be stored as ocean_s. -! Note that in the netCDF file the variable will be named "lev", not ocean_s. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_s_coordinate -axis: Z -positive: up -long_name: ocean s-coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: -1. -valid_max: 0. -formula: z = eta*(1+s) + depth_c*s + (depth-depth_c)*C \n where \n C=(1-b)*sinh(a*s)/sinh(a) +\n b*(tanh(a*(s+0.5))/(2*tanh(0.5*a)) - 0.5) -z_factors: s: lev eta: eta depth: depth a: a_coeff b: b_coeff depth_c: depth_c -z_bounds_factors: s: lev_bnds eta: eta depth: depth a: a b: b depth_c: depth_c -!---------------------------------- -! -!============ -axis_entry: ocean_sigma -!============ -! -! This coordinate is dimensionless and varies from 0 at the surface to -1. at the ocean floor. -! The values of sigma, which appears in the formula below, should be stored as ocean_sigma. -! Note that in the netCDF file the variable will be named "lev", not ocean_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_sigma_coordinate -axis: Z -positive: up -long_name: ocean sigma coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: -1. -valid_max: 0. -formula: z = eta + sigma*(depth+eta) -z_factors: sigma: lev eta: eta depth: depth -z_bounds_factors: sigma: lev_bnds eta: eta depth: depth -!---------------------------------- -! -! -! *************************************************************** -! -! Vertical coordinate formula_terms: -! -! *************************************************************** -! -!============ -variable_entry: eta -!============ -!---------------------------------- -! Variable attributes: -!---------------------------------- -units: m -cell_methods: time: mean -long_name: Sea Surface Height -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -!---------------------------------- -! -! -!============ -variable_entry: depth -!============ -!---------------------------------- -! Variable attributes: -!---------------------------------- -units: m -long_name: Sea Floor Depth -comment: Ocean bathymetry. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: depth -type: real -valid_min: 0. -valid_max: 12000. -ok_min_mean_abs: 2000. -ok_max_mean_abs: 5000. -!---------------------------------- -! -! -!============ -variable_entry: sigma -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: sigma(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: sigma_bnds -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: sigma(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: zlev -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: zlev(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: zlev_bnds -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: zlev(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -! -!============ -variable_entry: depth_c -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: depth_c -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: a -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: coefficient a -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: b -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: coefficient b -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: nsigma -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: nsigma -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: integer -!---------------------------------- -! -! -!============ -variable_entry: z1 -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: z1 -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: z2 -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: z2 -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: href -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: href -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: k_c -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: k_c -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: integer -!---------------------------------- -! -! - -!============ -variable_entry: dissic -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_dissolved_inorganic_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Inorganic Carbon Concentration -comment: Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: dissic -type: real -!---------------------------------- -! - -!============ -variable_entry: dissoc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_dissolved_organic_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Organic Carbon Concentration -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: dissoc -type: real -!---------------------------------- -! - -!============ -variable_entry: phyc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_phytoplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Phytoplankton Carbon Concentration -comment: sum of phytoplankton carbon component concentrations. In most (all?) cases this is the sum of phycdiat and phycmisc (i.e., ""Diatom Carbon Concentration"" and ""Non-Diatom Phytoplankton Carbon Concentration"" -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: phyc -type: real -!---------------------------------- -! - -!============ -variable_entry: zooc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_zooplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Zooplankton Carbon Concentration -comment: sum of zooplankton carbon component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: zooc -type: real -!---------------------------------- -! - -!============ -variable_entry: bacc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_bacteria_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Bacterial Carbon Concentration -comment: sum of bacterial carbon component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: bacc -type: real -!---------------------------------- -! - -!============ -variable_entry: detoc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_organic_detritus_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Detrital Organic Carbon Concentration -comment: sum of detrital organic carbon component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: detoc -type: real -!---------------------------------- -! - -!============ -variable_entry: calc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_calcite_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Calcite Concentration -comment: sum of particulate calcite component concentrations (e.g. Phytoplankton, Detrital, etc.) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: calc -type: real -!---------------------------------- -! - -!============ -variable_entry: arag -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_aragonite_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Aragonite Concentration -comment: sum of particulate aragonite components (e.g. Phytoplankton, Detrital, etc.) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: arag -type: real -!---------------------------------- -! - -!============ -variable_entry: phydiat -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_diatoms_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Diatoms expressed as Carbon in Sea Water -comment: carbon from the diatom phytoplankton component concentration alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: phydiat -type: real -!---------------------------------- -! - -!============ -variable_entry: phydiaz -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_diazotrophs_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Diazotrophs Expressed as Carbon in Sea Water -comment: carbon concentration from the diazotrophic phytoplankton component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: phydiaz -type: real -!---------------------------------- -! - -!============ -variable_entry: phycalc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_calcareous_phytoplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Calcareous Phytoplankton expressed as Carbon in Sea Water -comment: carbon concentration from calcareous (calcite-producing) phytoplankton component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: phycalc -type: real -!---------------------------------- -! - -!============ -variable_entry: phypico -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_picophytoplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Picophytoplankton expressed as Carbon in Sea Water -comment: carbon concentration from the picophytoplankton (<2 um) component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: phypico -type: real -!---------------------------------- -! - -!============ -variable_entry: phymisc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_miscellaneous_phytoplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Miscellaneous Phytoplankton expressed as Carbon in Sea Water -comment: carbon concentration from additional phytoplankton component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: phymisc -type: real -!---------------------------------- -! - -!============ -variable_entry: zmicro -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_microzooplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Microzooplankton expressed as Carbon in Sea Water -comment: carbon concentration from the microzooplankton (<20 um) component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: zmicro -type: real -!---------------------------------- -! - -!============ -variable_entry: zmeso -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_mesozooplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Mesozooplankton expressed as Carbon in Sea Water -comment: carbon concentration from mesozooplankton (20-200 um) component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: zmeso -type: real -!---------------------------------- -! - -!============ -variable_entry: zoocmisc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_miscellaneous_zooplankton_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Other Zooplankton Carbon Concentration -comment: carbon from additional zooplankton component concentrations alone (e.g. Micro, meso). Since the models all have different numbers of components, this variable has been included to provide a check for intercomparison between models since some phytoplankton groups are supersets. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: zoocmisc -type: real -!---------------------------------- -! - -!============ -variable_entry: talk -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_alkalinity_expressed_as_mole_equivalent -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Total Alkalinity -comment: total alkalinity equivalent concentration (including carbonate, nitrogen, silicate, and borate components) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: talk -type: real -!---------------------------------- -! - -!============ -variable_entry: ph -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_water_ph_reported_on_total_scale -units: 1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: pH -comment: negative log of hydrogen ion concentration with the concentration expressed as mol H kg-1. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: ph -type: real -!---------------------------------- -! - -!============ -variable_entry: o2 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_dissolved_molecular_oxygen_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Oxygen Concentration -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: o2 -type: real -!---------------------------------- -! - -!============ -variable_entry: no3 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_nitrate_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Nitrate Concentration -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: no3 -type: real -!---------------------------------- -! - -!============ -variable_entry: nh4 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_ammonium_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Ammonium Concentration -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: nh4 -type: real -!---------------------------------- -! - -!============ -variable_entry: po4 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_phosphate_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Phosphate Concentration -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: po4 -type: real -!---------------------------------- -! - -!============ -variable_entry: dfe -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_dissolved_iron_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Iron Concentration -comment: dissolved iron in sea water is meant to include both Fe2+ and Fe3+ ions (but not, e.g., particulate detrital iron) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: dfe -type: real -!---------------------------------- -! - -!============ -variable_entry: si -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_silicate_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dissolved Silicate Concentration -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: si -type: real -!---------------------------------- -! - -!============ -variable_entry: chl -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_phytoplankton_expressed_as_chlorophyll_in_sea_water -units: kg m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Total Chlorophyll Mass Concentration -comment: sum of chlorophyll from all phytoplankton group concentrations. In most models this is equal to chldiat+chlmisc, that is the sum of ""Diatom Chlorophyll Mass Concentration"" plus ""Other Phytoplankton Chlorophyll Mass Concentration"" -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: chl -type: real -!---------------------------------- -! - -!============ -variable_entry: chldiat -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_diatoms_expressed_as_chlorophyll_in_sea_water -units: kg m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Diatom Chlorophyll Mass Concentration -comment: chlorophyll from diatom phytoplankton component concentration alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: chldiat -type: real -!---------------------------------- -! - -!============ -variable_entry: chldiaz -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_diazotrophs_expressed_as_chlorophyll_in_sea_water -units: kg m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mass Concentration of Diazotrophs expressed as Chlorophyll in Sea Water -comment: chlorophyll concentration from the diazotrophic phytoplankton component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: chldiaz -type: real -!---------------------------------- -! - -!============ -variable_entry: chlcalc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_calcareous_phytoplankton_expressed_as_chlorophyll_in_sea_water -units: kg m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mass Concentration of Calcareous Phytoplankton expressed as Chlorophyll in Sea Water -comment: chlorophyll concentration from the calcite-producing phytoplankton component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: chlcalc -type: real -!---------------------------------- -! - -!============ -variable_entry: chlpico -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_picophytoplankton_expressed_as_chlorophyll_in_sea_water -units: kg m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mass Concentration of Picophytoplankton expressed as Chlorophyll in Sea Water -comment: chlorophyll concentration from the picophytoplankton (<2 um) component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: chlpico -type: real -!---------------------------------- -! - -!============ -variable_entry: chlmisc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_miscellaneous_phytoplankton_expressed_as_chlorophyll_in_sea_water -units: kg m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Other Phytoplankton Chlorophyll Mass Concentration -comment: chlorophyll from additional phytoplankton component concentrations alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: chlmisc -type: real -!---------------------------------- -! - -!============ -variable_entry: pon -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_particulate_organic_matter_expressed_as_nitrogen_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Particulate Organic Nitrogen Concentration -comment: sum of particulate organic nitrogen component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: pon -type: real -!---------------------------------- -! - -!============ -variable_entry: pop -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_particulate_organic_matter_expressed_as_phosphorus_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Particulate Organic Phosphorus Concentration -comment: sum of particulate organic phosphorus component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: pop -type: real -!---------------------------------- -! - -!============ -variable_entry: bfe -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_particulate_organic_matter_expressed_as_iron_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Particulate Biogenic Iron Concentration -comment: sum of particulate organic iron component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: bfe -type: real -!---------------------------------- -! - -!============ -variable_entry: bsi -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_particulate_matter_expressed_as_silicon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Particulate Biogenic Silica Concentration -comment: sum of particulate silica component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: bsi -type: real -!---------------------------------- -! - -!============ -variable_entry: phyn -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Phytoplankton Nitrogen Concentration -comment: sum of phytoplankton nitrogen component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: phyn -type: real -!---------------------------------- -! - -!============ -variable_entry: phyp -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_phytoplankton_expressed_as_phosphorus_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Phytoplankton Phosphorus Concentration -comment: sum of phytoplankton phosphorus components -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: phyp -type: real -!---------------------------------- -! - -!============ -variable_entry: phyfe -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_phytoplankton_expressed_as_iron_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Phytoplankton Iron Concentration -comment: sum of phytoplankton iron component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: phyfe -type: real -!---------------------------------- -! - -!============ -variable_entry: physi -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_phytoplankton_expressed_as_silicon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Phytoplankton Silica Concentration -comment: sum of phytoplankton silica component concentrations -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: physi -type: real -!---------------------------------- -! - -!============ -variable_entry: dms -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_dimethyl_sulfide_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Dimethyl Sulphide Concentration -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: dms -type: real -!---------------------------------- -! - -!============ -variable_entry: co3 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_carbonate_expressed_as_carbon_in_sea_water -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Carbonate expressed as Carbon in Sea Water -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: co3 -type: real -!---------------------------------- -! - -!============ -variable_entry: co3satcalc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_calcite_expressed_as_carbon_in_sea_water_at_saturation -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Calcite expressed as Carbon in Sea Water at Saturation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: co3satcalc -type: real -!---------------------------------- -! - -!============ -variable_entry: co3satarag -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_concentration_of_aragonite_expressed_as_carbon_in_sea_water_at_saturation -units: mol m-3 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Mole Concentration of Aragonite expressed as Carbon in Sea Water at Saturation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: co3satarag -type: real -!---------------------------------- -! - -!============ -variable_entry: pp -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Primary Carbon Production by Phytoplankton -comment: total primary (organic carbon) production by phytoplankton -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: pp -type: real -!---------------------------------- -! - -!============ -variable_entry: pnitrate -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_nitrate_utilization -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Primary Carbon Production by Phytoplankton due to Nitrate Uptake Alone -comment: Primary (organic carbon) production by phytoplankton due to nitrate uptake alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: pnitrate -type: real -!---------------------------------- -! - -!============ -variable_entry: pbfe -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_iron_in_sea_water_due_to_biological_production -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Biogenic Iron Production -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: pbfe -type: real -!---------------------------------- -! - -!============ -variable_entry: pbsi -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_silicon_in_sea_water_due_to_biological_production -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Biogenic Silica Production -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: pbsi -type: real -!---------------------------------- -! - -!============ -variable_entry: pcalc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_calcite_expressed_as_carbon_in_sea_water_due_to_biological_production -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Calcite Production -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: pcalc -type: real -!---------------------------------- -! - -!============ -variable_entry: parag -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_aragonite_expressed_as_carbon_in_sea_water_due_to_biological_production -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Aragonite Production -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: parag -type: real -!---------------------------------- -! - -!============ -variable_entry: expc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sinking_mole_flux_of_particulate_organic_matter_expressed_as_carbon_in_sea_water -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Sinking Particulate Organic Carbon Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: expc -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: expn -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sinking_mole_flux_of_particulate_organic_nitrogen_in_sea_water -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Sinking Particulate Organic Nitrogen Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: expn -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: expp -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sinking_mole_flux_of_particulate_organic_phosphorus_in_sea_water -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -long_name: Sinking Particulate Organic Phosphorus Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: expp -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: expcfe -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sinking_mole_flux_of_particulate_iron_in_sea_water -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -long_name: Sinking Particulate Iron Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: expcfe -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: expsi -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sinking_mole_flux_of_particulate_silicon_in_sea_water -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Sinking Particulate Silica Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: expsi -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: expcalc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sinking_mole_flux_of_calcite_expressed_as_carbon_in_sea_water -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Sinking Calcite Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: expcalc -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: exparag -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sinking_mole_flux_of_aragonite_expressed_as_carbon_in_sea_water -units: mol m-2 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Sinking Aragonite Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: exparag -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: dcalc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_calcite_expressed_as_carbon_in_sea_water_due_to_dissolution -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Calcite Dissolution -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: dcalc -type: real -!---------------------------------- -! - -!============ -variable_entry: darag -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_aragonite_expressed_as_carbon_in_sea_water_due_to_dissolution -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Aragonite Dissolution -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: darag -type: real -!---------------------------------- -! - -!============ -variable_entry: pdi -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diatoms -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Diatom Primary Carbon Production -comment: Primary (organic carbon) production by the diatom component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: pdi -type: real -!---------------------------------- -! - -!============ -variable_entry: dpocdtdiaz -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophs -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Tendency of Mole Concentration of Organic Carbon in Sea Water due to Net Primary Production by Diazotrophs -comment: Primary (organic carbon) production by the diazotrophic phytoplankton component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: dpocdtdiaz -type: real -!---------------------------------- -! - -!============ -variable_entry: dpocdtcalc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_calcareous_phytoplankton -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Tendency of Mole Concentration of Organic Carbon in Sea Water due to Net Primary Production by Calcareous Phytoplankton -comment: Primary (organic carbon) production by the calcite-producing phytoplankton component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: dpocdtcalc -type: real -!---------------------------------- -! - -!============ -variable_entry: dpocdtpico -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_picophytoplankton -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Tendency of Mole Concentration of Organic Carbon in Sea Water due to Net Primary Production by Picophytoplankton -comment: Primary (organic carbon) production by the picophytoplankton (<2 um) component alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: dpocdtpico -type: real -!---------------------------------- -! - -!============ -variable_entry: phypmisc -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_miscellaneous_phytoplankton -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Other Phytoplankton Carbon Production -comment: Primary (organic carbon) production by other phytoplankton components alone -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: phypmisc -type: real -!---------------------------------- -! - -!============ -variable_entry: bddtdic -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_dissolved_inorganic_carbon_in_sea_water_due_to_biological_processes -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Rate of Change of Dissolved Inorganic Carbon due to Biological Activity -comment: Net of biological terms in time rate of change of dissolved inorganic carbon -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: bddtdic -type: real -!---------------------------------- -! - -!============ -variable_entry: bddtdin -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_dissolved_inorganic_nitrogen_in_sea_water_due_to_biological_processes -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Rate of Change of Nitrogen Nutrient due to Biological Activity -comment: Net of biological terms in time rate of change of nitrogen nutrients (e.g. NO3+NH4) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: bddtdin -type: real -!---------------------------------- -! - -!============ -variable_entry: bddtdip -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_dissolved_inorganic_phosphate_in_sea_water_due_to_biological_processes -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Rate of Change of Dissolved Phosphate due to Biological Activity -comment: Net of biological terms in time rate of change of dissolved phosphate -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: bddtdip -type: real -!---------------------------------- -! - -!============ -variable_entry: bddtdife -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_dissolved_inorganic_iron_in_sea_water_due_to_biological_processes -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Rate of Change of Dissolved Inorganic Iron due to Biological Activity -comment: Net of biological terms in time rate of change of dissolved inorganic iron -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: bddtdife -type: real -!---------------------------------- -! - -!============ -variable_entry: bddtdisi -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_dissolved_inorganic_silicate_in_sea_water_due_to_biological_processes -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Rate of Change of Dissolved Inorganic Silicate due to Biological Activity -comment: Net of biological terms in time rate of change of dissolved inorganic silicate -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: bddtdisi -type: real -!---------------------------------- -! - -!============ -variable_entry: bddtalk -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_sea_water_alkalinity_expressed_as_mole_equivalent_due_to_biological_processes -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Rate of Change of Alkalinity due to Biological Activity -comment: Net of biological terms in time rate of change of alkalinity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: bddtalk -type: real -!---------------------------------- -! - -!============ -variable_entry: fescav -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_dissolved_iron_in_sea_water_due_to_scavenging_by_inorganic_particles -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Nonbiogenic Iron Scavenging -comment: Dissolved Fe removed through nonbiogenic scavenging onto particles -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: fescav -type: real -!---------------------------------- -! - -!============ -variable_entry: fediss -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_dissolved_iron_in_sea_water_due_to_dissolution_from_inorganic_particles -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Particle Source of Dissolved Iron -comment: Dissolution, remineralization and desorption of iron back to the dissolved phase -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: fediss -type: real -!---------------------------------- -! - -!============ -variable_entry: graz -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mole_concentration_of_dissolved_iron_in_sea_water_due_to_grazing_of_phytoplankton -units: mol m-3 s-1 -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello volume: volcello -long_name: Total Grazing of Phytoplankton by Zooplankton -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: graz -type: real -!---------------------------------- -! - -!============ -variable_entry: zfull -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: depth_below_geoid -units: m -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Depth Below Geoid of Ocean Layer -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: zfull -type: real -!---------------------------------- -! - -!============ -variable_entry: zhalf -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: depth_below_geoid -units: m -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Depth Below Geoid of Interfaces Between Ocean Layers -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel time -out_name: zhalf -type: real -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_aero b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_aero deleted file mode 100644 index 6871d12c89..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_aero +++ /dev/null @@ -1,2223 +0,0 @@ -table_id: Table aero -modeling_realm: atmos - -frequency: mon - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 30.000000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -generic_levels: alevel alev1 - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - -!============ -axis_entry: smooth_level -!============ -! -! This coordinate is a hybrid height coordinate with units of meters (m). -! It increases upward. -! The values of a(k)*ztop, which appear in the formula below, should be stored as smooth_level. -! Note that in the netCDF file the variable will be named "lev", not smooth_level. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_sleve_coordinate -units: m -axis: Z -positive: up -long_name: atmosphere smooth level vertical (SLEVE) coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: -200. -valid_max: 800000. -formula: z = a*ztop + b1*zsurf1 + b2*zsurf2 -z_factors: a: a b1: b1 b2: b2 ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2 -z_bounds_factors: a: a_bnds b1: b1_bnds b2: b2_bnds ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2 -!---------------------------------- -! -!============ -axis_entry: natural_log_pressure -!============ -! -!This coordinate is dimensionless and varies from near 0 at the surface and increases upward. -! The values of lev(k), which appears in the formula below, should be stored as natural_log_pressure. -! Note that in the netCDF file the variable will be named "lev", not natural_log_pressure. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_ln_pressure_coordinate -axis: Z -long_name: atmosphere natural log pressure coordinate -positive: down -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: -1. -valid_max: 20. -formula: p = p0 * exp(-lev) -z_factors: p0: p0 lev: lev -z_bounds_factors: p0: p0 lev: lev_bnds -!---------------------------------- -! -!============ -axis_entry: standard_sigma -!============ -! -! This coordinate is dimensionless and varies from 0 at the model top to 1.0 at the surface. -! The values of sigma(k), which appears in the formula below, should be stored as standard_sigma. -! Note that in the netCDF file the variable will be named "lev", not standard_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_sigma_coordinate -axis: Z -positive: down -long_name: sigma coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = ptop + sigma*(ps - ptop) -z_factors: ptop: ptop sigma: lev ps: ps -z_bounds_factors: ptop: ptop sigma: lev_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: standard_hybrid_sigma -!============ -! -! This coordinate is dimensionless and varies from a small value at the model top to 1.0 at the surface. -! The values of a+ b, which appear in the formula below, should be stored as standard_hybrid_sigma. -! Note that in the netCDF file the variable will be named "lev", not standard_hybrid_sigma. -! -!--------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_sigma_pressure_coordinate -units: 1 -axis: Z -positive: down -long_name: hybrid sigma pressure coordinate -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = a*p0 + b*ps -z_factors: p0: p0 a: a b: b ps: ps -z_bounds_factors: p0: p0 a: a_bnds b: b_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: alternate_hybrid_sigma -!============ -! -! This coordinate is dimensionless and varies from a small value at the model top to 1.0 at the surface. -! The values of ap/p0 + b, which appear in the formula below, should be stored as alternate_hybrid_sigma. -! Note that in the netCDF file the variable will be named "lev", not alternate_hybrid_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_sigma_pressure_coordinate -units: 1 -axis: Z -positive: down -long_name: hybrid sigma pressure coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = ap + b*ps -z_factors: ap: ap b: b ps: ps -z_bounds_factors: ap: ap_bnds b: b_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: hybrid_height -!============ -! -! This coordinate has dimension of meters (m) and increases upward. -! The values of a which appear in the formula below, should be stored as hybrid_height. -! Note that in the netCDF file the variable will be named "lev", not hybrid_height. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_height_coordinate -units: m -axis: Z -positive: up -long_name: hybrid height coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: 0.0 -formula: z = a + b*orog -z_factors: a: lev b: b orog: orog -z_bounds_factors: a: lev_bnds b: b_bnds orog: orog -!---------------------------------- -! -! *************************************************************** -! -! Vertical coordinate formula terms: -! -! *************************************************************** -! -! -!============ -variable_entry: orog -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_altitude -units: m -long_name: Surface Altitude -comment: height above the geoid; as defined here, ""the geoid"" is a surface of constant geopotential that, if the ocean were at rest, would coincide with mean sea level. Under this definition, the geoid changes as the mean volume of the ocean changes (e.g., due to glacial melt, or global warming of the ocean). Report here the height above the present-day geoid. Over ocean, report as 0.0 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: orog -type: real -valid_min: -700 -valid_max: 1.00E+04 -!---------------------------------- -! -! -!============ -variable_entry: p0 -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: reference pressure -units: Pa -!---------------------------------- -! -! -!============ -variable_entry: ptop -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: pressure at top of model -units: Pa -!---------------------------------- -! -! -! -!============ -variable_entry: a -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: a(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: b -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: b(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: a_bnds -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: a(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: b_bnds -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: b(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ap -!============ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: ap(k) -units: Pa -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ap_bnds -!============ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: ap(k+1/2) -units: Pa -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ztop -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: height of top of model -units: m -!---------------------------------- -! -! -! -! -!============ -variable_entry: ps -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_air_pressure -units: Pa -cell_methods: time: mean -long_name: Surface Air Pressure -comment: not, in general, the same as mean sea-level pressure -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: ps -type: real -! - -!============ -variable_entry: od550aer -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_optical_thickness_due_to_ambient_aerosol -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Ambient Aerosol Optical Thickness at 550 nm -comment: AOD from the ambient aerosls (i.e., includes aerosol water). Does not include AOD from stratospheric aerosols if these are prescribed but includes other possible background aerosol types. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: od550aer -type: real -!---------------------------------- -! - -!============ -variable_entry: od550lt1aer -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_optical_thickness_due_to_pm1_ambient_aerosol -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Ambient Fine Aerosol Optical Thickness at 550 nm -comment: od550 due to particles with wet diameter less than 1 um (""ambient"" means ""wetted""). When models do not include explicit size information, it can be assumed that all anthropogenic aerosols and natural secondary aerosols have diameter less than 1 um. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: od550lt1aer -type: real -!---------------------------------- -! - -!============ -variable_entry: abs550aer -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Ambient Aerosol Absorption Optical Thickness at 550 nm -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: abs550aer -type: real -!---------------------------------- -! - -!============ -variable_entry: od870aer -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_optical_thickness_due_to_ambient_aerosol -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Ambient Aerosol Optical Thickness at 870 nm -comment: AOD from the ambient aerosls (i.e., includes aerosol water). Does not include AOD from stratospheric aerosols if these are prescribed but includes other possible background aerosol types. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: od870aer -type: real -!---------------------------------- -! - -!============ -variable_entry: emioa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_due_to_net_chemical_production_and_emission -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Rate of Emission and Production of Dry Aerosol Total Organic Matter -comment: tendency of atmosphere mass content of organic matter dry aerosol due to net production and emission. This is the sum of total emission of POA and total production of SOA (see next two entries). ""Mass"" refers to the mass of organic matter, not mass of organic carbon alone. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: emioa -type: real -!---------------------------------- -! - -!============ -variable_entry: emipoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_due_to_emission -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Emission Rate of Dry Aerosol Primary Organic Matter -comment: tendency of atmosphere mass content of primary organic aerosol due to emission: ""mass"" refers to the mass of primary organic matter, not mass of organic carbon alone. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: emipoa -type: real -!---------------------------------- -! - -!============ -variable_entry: chepsoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_due_to_net_chemical_production -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Production Rate of Dry Aerosol Secondary Organic Matter -comment: tendency of atmosphere mass content of secondary organic matter_dry aerosol due to net production: If model lumps SOA emissions with POA, then the sum of POA and SOA emissions is reported as POA emissions. ""mass"" refers to the mass of primary organic matter, not mass of organic carbon alone. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: chepsoa -type: real -!---------------------------------- -! - -!============ -variable_entry: emibc -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_black_carbon_dry_aerosol_due_to_emission -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Emission Rate of Black Carbon Aerosol Mass -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: emibc -type: real -!---------------------------------- -! - -!============ -variable_entry: dryoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_due_to_dry_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Dry Deposition Rate of Dry Aerosol Organic Matter -comment: tendency of atmosphere mass content of organic dry aerosol due to dry deposition: This is the sum of dry deposition of POA and dry deposition of SOA (see next two entries). ""Mass"" refers to the mass of organic matter, not mass of organic carbon alone. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: dryoa -type: real -!---------------------------------- -! - -!============ -variable_entry: drypoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_due_to_dry_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Dry Deposition Rate of Dry Aerosol Primary Organic Matter -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: drypoa -type: real -!---------------------------------- -! - -!============ -variable_entry: drysoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_due_to_dry_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Dry Deposition Rate of Dry Aerosol Secondary Organic Matter -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: drysoa -type: real -!---------------------------------- -! - -!============ -variable_entry: drybc -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_black_carbon_dry_aerosol_due_to_dry_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Dry Deposition Rate of Black Carbon Aerosol Mass -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: drybc -type: real -!---------------------------------- -! - -!============ -variable_entry: wetoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_due_to_wet_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Wet Deposition Rate of Dry Aerosol Organic Matter -comment: tendency of atmosphere mass content of organic matter dry aerosols due to wet deposition: This is the sum of wet deposition of POA and wet deposition of SOA (see next two entries). ""Mass"" refers to the mass of organic matter, not mass of organic carbon alone. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: wetoa -type: real -!---------------------------------- -! - -!============ -variable_entry: wetpoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_due_to_wet_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Wet Deposition Rate of Dry Aerosol Primary Organic Matter -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: wetpoa -type: real -!---------------------------------- -! - -!============ -variable_entry: wetsoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_due_to_wet_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Wet Deposition Rate of Dry Aerosol Secondary Organic Matter -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: wetsoa -type: real -!---------------------------------- -! - -!============ -variable_entry: wetbc -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_black_carbon_dry_aerosol_due_to_wet_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Wet Deposition Rate of Black Carbon Aerosol Mass -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: wetbc -type: real -!---------------------------------- -! - -!============ -variable_entry: emibb -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_due_to_emission -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Emission of Primary Aerosol from Biomass Burning -comment: tendency of atmosphere mass content of primary organic matter dry aerosol due to emission: This does not include sources of secondary aerosols from biomass burning aerosols, such as SO2 or SOA. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: emibb -type: real -!---------------------------------- -! - -!============ -variable_entry: emiso2 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_sulfur_dioxide_due_to_emission -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Emission Rate of SO2 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: emiso2 -type: real -!---------------------------------- -! - -!============ -variable_entry: emiso4 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_due_to_emission -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Direct Emission Rate of SO4 -comment: expressed as a tendency of atmosphere mass content of SO4. Direct emission does not include secondary sulfate production. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: emiso4 -type: real -!---------------------------------- -! - -!============ -variable_entry: emidms -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_dimethyl_sulfide_due_to_emission -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Emission Rate of DMS -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: emidms -type: real -!---------------------------------- -! - -!============ -variable_entry: dryso2 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_sulfur_dioxide_due_to_dry_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Dry Deposition Rate of SO2 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: dryso2 -type: real -!---------------------------------- -! - -!============ -variable_entry: dryso4 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_due_to_dry_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Dry Deposition Rate of SO4 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: dryso4 -type: real -!---------------------------------- -! - -!============ -variable_entry: drydms -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_dimethyl_sulfide_due_to_dry_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Dry Deposition Rate of DMS -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: drydms -type: real -!---------------------------------- -! - -!============ -variable_entry: wetso4 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_sulfate_expressed_as_sulfur_dry_aerosol_due_to_wet_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Wet Deposition Rate of SO4 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: wetso4 -type: real -!---------------------------------- -! - -!============ -variable_entry: wetso2 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_sulfur_dioxide_due_to_wet_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Wet Deposition Rate of SO2 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: wetso2 -type: real -!---------------------------------- -! - -!============ -variable_entry: wetdms -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_dimethyl_sulfide_due_to_wet_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Wet Deposition Rate of DMS -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: wetdms -type: real -!---------------------------------- -! - -!============ -variable_entry: eminh3 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_ammonia_due_to_emission -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Emission Rate of NH3 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: eminh3 -type: real -!---------------------------------- -! - -!============ -variable_entry: drynh3 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_ammonia_due_to_dry_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Dry Deposition Rate of NH3 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: drynh3 -type: real -!---------------------------------- -! - -!============ -variable_entry: drynh4 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_due_to_dry_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Dry Deposition Rate of NH4 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: drynh4 -type: real -!---------------------------------- -! - -!============ -variable_entry: wetnh4 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_due_to_wet_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Wet Deposition Rate of NH4+NH3 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: wetnh4 -type: real -!---------------------------------- -! - -!============ -variable_entry: emiss -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_seasalt_dry_aerosol_due_to_emission -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Emission Rate of Seasalt -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: emiss -type: real -!---------------------------------- -! - -!============ -variable_entry: dryss -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_seasalt_dry_aerosol_due_to_dry_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Dry Deposition Rate of Seasalt -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: dryss -type: real -!---------------------------------- -! - -!============ -variable_entry: wetss -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_seasalt_dry_aerosol_due_to_wet_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Wet Deposition Rate of Seasalt -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: wetss -type: real -!---------------------------------- -! - -!============ -variable_entry: emidust -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_due_to_emission -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Emission Rate of Dust -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: emidust -type: real -!---------------------------------- -! - -!============ -variable_entry: drydust -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_due_to_dry_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Dry Deposition Rate of Dust -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: drydust -type: real -!---------------------------------- -! - -!============ -variable_entry: wetdust -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_due_to_wet_deposition -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Wet Deposition Rate of Dust -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: wetdust -type: real -!---------------------------------- -! - -!============ -variable_entry: loadoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Load of Dry Aerosol Organic Matter -comment: atmosphere dry organic content: This is the vertically integrated sum of atmosphere_primary_organic_content and atmosphere_secondary_organic_content (see next two table entries). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: loadoa -type: real -!---------------------------------- -! - -!============ -variable_entry: loadpoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Load of Dry Aerosol Primary Organic Matter -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: loadpoa -type: real -!---------------------------------- -! - -!============ -variable_entry: loadsoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Load of Dry Aerosol Secondary Organic Matter -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: loadsoa -type: real -!---------------------------------- -! - -!============ -variable_entry: loadbc -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_mass_content_of_black_carbon_dry_aerosol -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Load of Black Carbon Aerosol -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: loadbc -type: real -!---------------------------------- -! - -!============ -variable_entry: loadso4 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_mass_content_of_sulfate_dry_aerosol -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Load of SO4 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: loadso4 -type: real -!---------------------------------- -! - -!============ -variable_entry: loaddust -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_mass_content_of_dust_dry_aerosol -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Load of Dust -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: loaddust -type: real -!---------------------------------- -! - -!============ -variable_entry: loadss -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_mass_content_of_seasalt_dry_aerosol -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Load of Seasalt -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: loadss -type: real -!---------------------------------- -! - -!============ -variable_entry: loadno3 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_mass_content_of_nitrate_dry_aerosol -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Load of NO3 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: loadno3 -type: real -!---------------------------------- -! - -!============ -variable_entry: loadnh4 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_mass_content_of_ammonium_dry_aerosol -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Load of NH4 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: loadnh4 -type: real -!---------------------------------- -! - -!============ -variable_entry: sconcoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_particulate_organic_matter_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Concentration of Dry Aerosol Organic Matter -comment: mass concentration of particulate organic matter dry aerosol in air in model lowest layer -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alev1 time -out_name: sconcoa -type: real -!---------------------------------- -! - -!============ -variable_entry: sconcpoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_primary_particulate_organic_matter_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Concentration of Dry Aerosol Primary Organic Matter -comment: mass concentration of primary particulate organic matter dry aerosol in air in model lowest layer -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alev1 time -out_name: sconcpoa -type: real -!---------------------------------- -! - -!============ -variable_entry: sconcsoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_secondary_particulate_organic_matter_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Concentration of Dry Aerosol Secondary Organic Matter -comment: mass concentration of secondary particulate organic matter dry aerosol in air in model lowest layer. If the model lumps SOA with POA, then their sum is reported as POA. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alev1 time -out_name: sconcsoa -type: real -!---------------------------------- -! - -!============ -variable_entry: sconcbc -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_black_carbon_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Concentration of Black Carbon Aerosol -comment: mass concentration of black carbon dry aerosol in air in model lowest layer -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alev1 time -out_name: sconcbc -type: real -!---------------------------------- -! - -!============ -variable_entry: sconcso4 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_sulfate_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Concentration of SO4 -comment: mass concentration of sulfate dry aerosol in air in model lowest layer. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alev1 time -out_name: sconcso4 -type: real -!---------------------------------- -! - -!============ -variable_entry: sconcdust -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_dust_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Concentration of Dust -comment: mass concentration of dust dry aerosol in air in model lowest layer -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alev1 time -out_name: sconcdust -type: real -!---------------------------------- -! - -!============ -variable_entry: sconcss -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_seasalt_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Concentration of Seasalt -comment: mass concentration of seasalt dry aerosol in air in model lowest layer -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alev1 time -out_name: sconcss -type: real -!---------------------------------- -! - -!============ -variable_entry: sconcno3 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_nitrate_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Concentration of NO3 -comment: Mass concentration in model lowest layer -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alev1 time -out_name: sconcno3 -type: real -!---------------------------------- -! - -!============ -variable_entry: sconcnh4 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_ammonium_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Concentration of NH4 -comment: Mass concentration in model lowest layer -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alev1 time -out_name: sconcnh4 -type: real -!---------------------------------- -! - -!============ -variable_entry: rsdsdiff -!============ -modeling_realm: aerosol land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_diffuse_downwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Diffuse Downwelling Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsdsdiff -type: real -!---------------------------------- -! - -!============ -variable_entry: rsdscsdiff -!============ -modeling_realm: aerosol land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_diffuse_downwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Diffuse Downwelling Clear Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsdscsdiff -type: real -!---------------------------------- -! - -!============ -variable_entry: reffclwtop -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: effective_radius_of_cloud_liquid_water_particle_at_liquid_water_cloud_top -units: m -cell_methods: time: mean -cell_measures: area: areacella -long_name: Cloud-Top Effective Droplet Radius -comment: Droplets are liquid only. This is the effective radius ""as seen from space"" over liquid cloudy portion of grid cell. This is the value from uppermost model layer with liquid cloud or, if available, or for some models it is the sum over all liquid cloud tops, no matter where they occur, as long as they are seen from the top of the atmosphere. Reported values are weighted by total liquid cloud top fraction of (as seen from TOA) each time sample when computing monthly mean. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: reffclwtop -type: real -!---------------------------------- -! - -!============ -variable_entry: cldncl -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: number_concentration_of_cloud_liquid_water_particles_in_air_at_liquid_water_cloud_top -units: m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Cloud Droplet Number Concentration of Cloud Tops -comment: Droplets are liquid only. Report concentration ""as seen from space"" over liquid cloudy portion of grid cell. This is the value from uppermost model layer with liquid cloud or, if available, it is better to sum over all liquid cloud tops, no matter where they occur, as long as they are seen from the top of the atmosphere. Weight by total liquid cloud top fraction of (as seen from TOA) each time sample when computing monthly mean. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cldncl -type: real -!---------------------------------- -! - -!============ -variable_entry: cldnci -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: number_concentration_of_ice_crystals_in_air_at_ice_cloud_top -units: m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Ice Crystal Number Concentration of Cloud Tops -comment: concentration ""as seen from space"" over ice-cloud portion of grid cell. This is the value from uppermost model layer with ice cloud or, if available, it is the sum over all ice cloud tops, no matter where they occur, as long as they are seen from the top of the atmosphere. Weight by total ice cloud top fraction (as seen from TOA) of each time sample when computing monthly mean. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cldnci -type: real -!---------------------------------- -! - -!============ -variable_entry: cldnvi -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_number_content_of_cloud_droplets -units: m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Column Integrated Cloud Droplet Number -comment: Droplets are liquid only. Values are weighted by liquid cloud fraction in each layer when vertically integrating, and for monthly means the samples are weighted by total liquid cloud fraction (as seen from TOA). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cldnvi -type: real -!---------------------------------- -! - -!============ -variable_entry: ec550aer -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: volume_extinction_coefficient_in_air_due_to_ambient_aerosol -units: m-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Ambient Aerosol Extinction at 550 nm -comment: """ambient"" means ""wetted"". " -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: ec550aer -!---------------------------------- -! - -!============ -variable_entry: concoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_particulate_organic_matter_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Concentration of Dry Aerosol Organic Matter -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: concoa -!---------------------------------- -! - -!============ -variable_entry: concpoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_primary_particulate_organic_matter_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Concentration of Dry Aerosol Primary Organic Matter -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: concpoa -!---------------------------------- -! - -!============ -variable_entry: concsoa -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_secondary_particulate_organic_matter_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Concentration of Dry Aerosol Secondary Organic Matter -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: concsoa -!---------------------------------- -! - -!============ -variable_entry: concbb -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_biomass_burning_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Concentration of Biomass Burning Aerosol -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: concbb -!---------------------------------- -! - -!============ -variable_entry: concbc -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_black_carbon_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Concentration of Black Carbon Aerosol -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: concbc -!---------------------------------- -! - -!============ -variable_entry: concaerh2o -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_water_in_ambient_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Concentration of Aerosol Water -comment: """ambient"" means ""wetted""" -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: concaerh2o -!---------------------------------- -! - -!============ -variable_entry: concso4 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_sulfate_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Concentration of SO4 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: concso4 -!---------------------------------- -! - -!============ -variable_entry: concso2 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_sulfur_dioxide_in_air -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Mole Fraction of SO2 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: concso2 -!---------------------------------- -! - -!============ -variable_entry: concdms -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mole_fraction_of_dimethyl_sulfide_in_air -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Mole Fraction of DMS -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: concdms -!---------------------------------- -! - -!============ -variable_entry: concno3 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_nitrate_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Concentration of NO3 Aerosol -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: concno3 -!---------------------------------- -! - -!============ -variable_entry: concnh4 -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_ammonium_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Concentration of NH4 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: concnh4 -!---------------------------------- -! - -!============ -variable_entry: concss -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_seasalt_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Concentration of Seasalt -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: concss -!---------------------------------- -! - -!============ -variable_entry: concdust -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_concentration_of_dust_dry_aerosol_in_air -units: kg m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Concentration of Dust -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: concdust -!---------------------------------- -! - -!============ -variable_entry: conccn -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: number_concentration_of_ambient_aerosol_in_air -units: m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Aerosol Number Concentration -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: conccn -!---------------------------------- -! - -!============ -variable_entry: concnmcn -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: number_concentration_of_nucleation_mode_ambient_aerosol_in_air -units: m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Number Concentration of Nucleation Mode Aerosol -comment: includes all particles with diameter smaller than 3 nm -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: concnmcn -!---------------------------------- -! - -!============ -variable_entry: conccmcn -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: number_concentration_of_coarse_mode_ambient_aerosol_in_air -units: m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Number Concentration Coarse Mode Aerosol -comment: includes all particles with diameter larger than 1 micron -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: conccmcn -!---------------------------------- -! - -!============ -variable_entry: reffclws -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: effective_radius_of_stratiform_cloud_liquid_water_particle -units: m -cell_methods: time: mean -cell_measures: area: areacella -long_name: Stratiform Cloud Droplet Effective Radius -comment: Droplets are liquid. The effective radius is defined as the ratio of the third moment over the second moment of the particle size distribution and the time-mean should be calculated, weighting the individual samples by the cloudy fraction of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: reffclws -!---------------------------------- -! - -!============ -variable_entry: reffclwc -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: effective_radius_of_convective_cloud_liquid_water_particle -units: m -cell_methods: time: mean -cell_measures: area: areacella -long_name: Convective Cloud Droplet Effective Radius -comment: Droplets are liquid. The effective radius is defined as the ratio of the third moment over the second moment of the particle size distribution and the time-mean should be calculated, weighting the individual samples by the cloudy fraction of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: reffclwc -!---------------------------------- -! - -!============ -variable_entry: cdnc -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: number_concentration_of_cloud_liquid_water_particles_in_air -units: m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Cloud Droplet Number Concentration -comment: Cloud droplet number concentration in liquid clouds -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: cdnc -!---------------------------------- -! - -!============ -variable_entry: inc -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: number_concentration_of_ice_crystals_in_air -units: m-3 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Ice Crystal Number Concentration -comment: Ice Crystal number concentration in ice clouds -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: inc -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_cf3hr b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_cf3hr deleted file mode 100644 index 25020d2c71..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_cf3hr +++ /dev/null @@ -1,2593 +0,0 @@ -table_id: Table cf3hr -modeling_realm: atmos - -frequency: 3hr - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 0.125000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -generic_levels: alevel alevhalf - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: p220 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -type: double -value: 22000. ! of scalar (singleton) dimension -bounds_values: 44000. 0.0 ! of scalar (singleton) dimension bounds -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: p560 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -type: double -value: 56000. ! of scalar (singleton) dimension -bounds_values: 68000. 44000. ! of scalar (singleton) dimension bounds -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: p840 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -type: double -value: 84000. ! of scalar (singleton) dimension -bounds_values: 100000. 68000. ! of scalar (singleton) dimension bounds -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: alt40 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: altitude -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: up ! up or down (default: undeclared) -long_name: altitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: alt40 -stored_direction: increasing -tolerance: 0.001 -type: double -requested: 240. 720. 1200. 1680. 2160. 2640. 3120. 3600. 4080. 4560. 5040. 5520. 6000. 6480. 6960. 7440. 7920. 8400. 8880. 9360. 9840. 10320. 10800. 11280. 11760. 12240. 12720. 13200. 13680. 14160. 14640. 15120. 15600. 16080. 16560. 17040. 17520. 18000. 18480. 18960. ! space-separated list of requested coordinates -requested_bounds: 0. 480. 480. 960. 960. 1440. 1440. 1920. 1920. 2400. 2400. 2880. 2880. 3360. 3360. 3840. 3840. 4320. 4320. 4800. 4800. 5280. 5280. 5760. 5760. 6240. 6240. 6720. 6720. 7200. 7200. 7680. 7680. 8160. 8160. 8640. 8640. 9120. 9120. 9600. 9600. 10080. 10080. 10560. 10560. 11040. 11040. 11520. 11520. 12000. 12000. 12480. 12480. 12960. 12960. 13440. 13440. 13920. 13920. 14400. 14400. 14880. 14880. 15360. 15360. 15840. 15840. 16320. 16320. 16800. 16800. 17280. 17280. 17760. 17760. 18240. 18240. 18720. 18720. 19200. ! space-separated list of requested coordinate bounds -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time1 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: height2m -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: height -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: up ! up or down (default: undeclared) -long_name: height -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: height -valid_min: 1.0 -valid_max: 10.0 -stored_direction: increasing -type: double -value: 2. ! of scalar (singleton) dimension -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: height10m -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: height -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: up ! up or down (default: undeclared) -long_name: height -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: height -valid_min: 1.0 -valid_max: 30.0 -stored_direction: increasing -type: double -value: 10. ! of scalar (singleton) dimension -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: scatratio -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: backscattering_ratio -units: 1 -long_name: lidar backscattering ratio -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: scatratio -stored_direction: increasing -tolerance: 0.001 -type: double -requested: 0.005 0.605 2.1 4. 6. 8.5 12.5 17.5 22.5 27.5 35. 45. 55. 70. 50040. ! space-separated list of requested coordinates -requested_bounds: 0. 0.01 0.01 1.2 1.2 3. 3. 5. 5. 7. 7. 10. 10. 15. 15. 20. 20. 25. 25. 30. 30. 40. 40. 50. 50. 60. 60. 80. 80. 100000. ! space-separated list of requested coordinate bounds -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: dbze -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: equivalent_reflectivity_factor -units: dBZ -long_name: CloudSat simulator equivalent radar reflectivity factor -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: dbze -stored_direction: increasing -tolerance: 0.001 -type: double -requested: -47.5 -42.5 -37.5 -32.5 -27.5 -22.5 -17.5 -12.5 -7.5 -2.5 2.5 7.5 12.5 17.5 22.5 ! space-separated list of requested coordinates -requested_bounds: -50. -45. -45. -40. -40. -35. -35. -30. -30. -25. -25. -20. -20. -15. -15. -10. -10. -5. -5. 0. 0. 5. 5. 10. 10. 15. 15. 20. 20. 25. ! space-separated list of requested coordinate bounds -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: sza5 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: solar_zenith_angle -units: degree -long_name: solar zenith angle -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: sza -stored_direction: increasing -tolerance: 0.001 -type: double -requested: 0. 20. 40. 60. 80. ! space-separated list of requested coordinates -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: location -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -long_name: location index -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: loc -stored_direction: increasing -type: integer -must_have_bounds: no -index_only: ok -must_call_cmor_grid: yes - -!---------------------------------- -! - -!============ -axis_entry: smooth_level -!============ -! -! This coordinate is a hybrid height coordinate with units of meters (m). -! It increases upward. -! The values of a(k)*ztop, which appear in the formula below, should be stored as smooth_level. -! Note that in the netCDF file the variable will be named "lev", not smooth_level. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_sleve_coordinate -units: m -axis: Z -positive: up -long_name: atmosphere smooth level vertical (SLEVE) coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: -200. -valid_max: 800000. -formula: z = a*ztop + b1*zsurf1 + b2*zsurf2 -z_factors: a: a b1: b1 b2: b2 ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2 -z_bounds_factors: a: a_bnds b1: b1_bnds b2: b2_bnds ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2 -!---------------------------------- -! -!============ -axis_entry: natural_log_pressure -!============ -! -!This coordinate is dimensionless and varies from near 0 at the surface and increases upward. -! The values of lev(k), which appears in the formula below, should be stored as natural_log_pressure. -! Note that in the netCDF file the variable will be named "lev", not natural_log_pressure. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_ln_pressure_coordinate -axis: Z -long_name: atmosphere natural log pressure coordinate -positive: down -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: -1. -valid_max: 20. -formula: p = p0 * exp(-lev) -z_factors: p0: p0 lev: lev -z_bounds_factors: p0: p0 lev: lev_bnds -!---------------------------------- -! -!============ -axis_entry: standard_sigma -!============ -! -! This coordinate is dimensionless and varies from 0 at the model top to 1.0 at the surface. -! The values of sigma(k), which appears in the formula below, should be stored as standard_sigma. -! Note that in the netCDF file the variable will be named "lev", not standard_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_sigma_coordinate -axis: Z -positive: down -long_name: sigma coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = ptop + sigma*(ps - ptop) -z_factors: ptop: ptop sigma: lev ps: ps -z_bounds_factors: ptop: ptop sigma: lev_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: standard_hybrid_sigma -!============ -! -! This coordinate is dimensionless and varies from a small value at the model top to 1.0 at the surface. -! The values of a+ b, which appear in the formula below, should be stored as standard_hybrid_sigma. -! Note that in the netCDF file the variable will be named "lev", not standard_hybrid_sigma. -! -!--------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_sigma_pressure_coordinate -units: 1 -axis: Z -positive: down -long_name: hybrid sigma pressure coordinate -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = a*p0 + b*ps -z_factors: p0: p0 a: a b: b ps: ps -z_bounds_factors: p0: p0 a: a_bnds b: b_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: alternate_hybrid_sigma -!============ -! -! This coordinate is dimensionless and varies from a small value at the model top to 1.0 at the surface. -! The values of ap/p0 + b, which appear in the formula below, should be stored as alternate_hybrid_sigma. -! Note that in the netCDF file the variable will be named "lev", not alternate_hybrid_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_sigma_pressure_coordinate -units: 1 -axis: Z -positive: down -long_name: hybrid sigma pressure coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = ap + b*ps -z_factors: ap: ap b: b ps: ps -z_bounds_factors: ap: ap_bnds b: b_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: hybrid_height -!============ -! -! This coordinate has dimension of meters (m) and increases upward. -! The values of a which appear in the formula below, should be stored as hybrid_height. -! Note that in the netCDF file the variable will be named "lev", not hybrid_height. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_height_coordinate -units: m -axis: Z -positive: up -long_name: hybrid height coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: 0.0 -formula: z = a + b*orog -z_factors: a: lev b: b orog: orog -z_bounds_factors: a: lev_bnds b: b_bnds orog: orog -!---------------------------------- -! -! *************************************************************** -! -! Vertical coordinate formula terms: -! -! *************************************************************** -! -! -!============ -variable_entry: orog -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_altitude -units: m -long_name: Surface Altitude -comment: height above the geoid; as defined here, ""the geoid"" is a surface of constant geopotential that, if the ocean were at rest, would coincide with mean sea level. Under this definition, the geoid changes as the mean volume of the ocean changes (e.g., due to glacial melt, or global warming of the ocean). Report here the height above the present-day geoid. Over ocean, report as 0.0 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: orog -type: real -valid_min: -700 -valid_max: 1.00E+04 -!---------------------------------- -! -! -!============ -variable_entry: p0 -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: reference pressure -units: Pa -!---------------------------------- -! -! -!============ -variable_entry: ptop -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: pressure at top of model -units: Pa -!---------------------------------- -! -! -! -!============ -variable_entry: a -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: a(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: b -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: b(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: a_bnds -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: a(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: b_bnds -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: b(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ap -!============ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: ap(k) -units: Pa -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ap_bnds -!============ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: ap(k+1/2) -units: Pa -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ztop -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: height of top of model -units: m -!---------------------------------- -! -! -! - -!============ -variable_entry: clcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: point -long_name: CALIPSO Cloud Area Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: location alt40 time1 -out_name: clcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: clcalipso2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: point -long_name: CALIPSO Cloud Fraction Undetected by CloudSat -comment: Clouds detected by CALIPSO but below the detectability threshold of CloudSat -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: location alt40 time1 -out_name: clcalipso2 -type: real -!---------------------------------- -! - -!============ -variable_entry: cfadDbze94 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: histogram_of_equivalent_reflectivity_factor_over_height_above_reference_ellipsoid -units: 1 -cell_methods: time: point -long_name: CloudSat Radar Reflectivity CFAD -comment: CFADs (Cloud Frequency Altitude Diagrams) are joint height - radar reflectivity (or lidar scattering ratio) distributions. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: location alt40 dbze time1 -out_name: cfadDbze94 -type: real -!---------------------------------- -! - -!============ -variable_entry: cfadLidarsr532 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: histogram_of_backscattering_ratio_over_height_above_reference_ellipsoid -units: 1 -cell_methods: time: point -long_name: CALIPSO Scattering Ratio CFAD -comment: CFADs (Cloud Frequency Altitude Diagrams) are joint height - radar reflectivity (or lidar scattering ratio) distributions. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: location alt40 scatratio time1 -out_name: cfadLidarsr532 -type: real -!---------------------------------- -! - -!============ -variable_entry: parasolRefl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_bidirectional_reflectance -units: 1 -cell_methods: time: point -long_name: PARASOL Reflectance -comment: Simulated reflectance from PARASOL as seen at the top of the atmosphere for 5 solar zenith angles. Valid only over ocean and for one viewing direction (viewing zenith angle of 30 degrees and relative azimuth angle 320 degrees). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: location sza5 time1 -out_name: parasolRefl -type: real -!---------------------------------- -! - -!============ -variable_entry: cltcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction -units: % -cell_methods: time: point -long_name: CALIPSO Total Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: location time1 -out_name: cltcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: cllcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: point -long_name: CALIPSO Low Level Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: location time1 p840 -out_name: cllcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: clmcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: point -long_name: CALIPSO Mid Level Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: location time1 p560 -out_name: clmcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: clhcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: point -long_name: CALIPSO High Level Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: location time1 p220 -out_name: clhcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: longitude -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -cell_methods: time: point -long_name: Longitude -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: location time1 -out_name: lon -type: real -valid_min: 0.0 -valid_max: 360.0 -!---------------------------------- -! - -!============ -variable_entry: latitude -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -cell_methods: time: point -long_name: Latitude -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: location time1 -out_name: lat -type: real -valid_min: -90.0 -valid_max: 90.0 -!---------------------------------- -! - -!============ -variable_entry: toffset -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: time -units: day -long_name: Offset Time -comment: "this ""offset time"" should be added to the value stored in the ""time dimension"" to get the actual time. This actual time is the time (UTC) of the corresponding point in the satellite orbit used to extract the model data." -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: location time1 -out_name: toffset -type: real -!---------------------------------- -! - -!============ -variable_entry: tas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: point -cell_measures: area: areacella -long_name: Near-Surface Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 height2m -out_name: tas -type: real -!---------------------------------- -! - -!============ -variable_entry: ts -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_temperature -units: K -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Temperature -comment: ""skin"" temperature (i.e., SST for open ocean) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: ts -type: real -!---------------------------------- -! - -!============ -variable_entry: psl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_sea_level -units: Pa -cell_methods: time: point -cell_measures: area: areacella -long_name: Sea Level Pressure -comment: not, in general, the same as surface pressure -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: psl -type: real -!---------------------------------- -! - -!============ -variable_entry: ps -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_air_pressure -units: Pa -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Air Pressure -comment: not, in general, the same as mean sea-level pressure -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: ps -type: real -!---------------------------------- -! - -!============ -variable_entry: uas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: eastward_wind -units: m s-1 -cell_methods: time: point -long_name: Eastward Near-Surface Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 height10m -out_name: uas -type: real -!---------------------------------- -! - -!============ -variable_entry: vas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_wind -units: m s-1 -cell_methods: time: point -long_name: Northward Near-Surface Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 height10m -out_name: vas -type: real -!---------------------------------- -! - -!============ -variable_entry: sfcWind -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: wind_speed -units: m s-1 -cell_methods: time: point -long_name: Near-Surface Wind Speed -comment: This is the mean of the speed, not the speed computed from the mean u and v components of wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 height10m -out_name: sfcWind -type: real -!---------------------------------- -! - -!============ -variable_entry: hurs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: relative_humidity -units: % -cell_methods: time: point -cell_measures: area: areacella -long_name: Near-Surface Relative Humidity -comment: This is the relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 height2m -out_name: hurs -type: real -!---------------------------------- -! - -!============ -variable_entry: huss -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: specific_humidity -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Near-Surface Specific Humidity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 height2m -out_name: huss -type: real -!---------------------------------- -! - -!============ -variable_entry: pr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: precipitation_flux -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Precipitation -comment: at surface; includes both liquid and solid phases from all types of clouds (both large-scale and convective) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: pr -type: real -!---------------------------------- -! - -!============ -variable_entry: prsn -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: snowfall_flux -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Snowfall Flux -comment: at surface; includes precipitation of all forms of water in the solid phase -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: prsn -type: real -!---------------------------------- -! - -!============ -variable_entry: prc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: convective_precipitation_flux -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Convective Precipitation -comment: at surface; includes both liquid and solid phases. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: prc -type: real -!---------------------------------- -! - -!============ -variable_entry: evspsbl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: water_evaporation_flux -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Evaporation -comment: at surface; flux of water into the atmosphere due to conversion of both liquid and solid phases to vapor (from underlying surface and vegetation) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: evspsbl -type: real -!---------------------------------- -! - -!============ -variable_entry: sbl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_snow_and_ice_sublimation_flux -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Snow and Ice Sublimation Flux -comment: The snow and ice sublimation flux is the loss of snow and ice mass from the surface resulting from their conversion to water vapor that enters the atmosphere. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: sbl -type: real -!---------------------------------- -! - -!============ -variable_entry: tauu -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_eastward_stress -units: Pa -cell_methods: time: point -long_name: Surface Downward Eastward Wind Stress -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: tauu -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: tauv -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_northward_stress -units: Pa -cell_methods: time: point -long_name: Surface Downward Northward Wind Stress -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: tauv -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: hfls -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_latent_heat_flux -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Upward Latent Heat Flux -comment: includes both evaporation and sublimation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: hfls -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: hfss -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_sensible_heat_flux -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Upward Sensible Heat Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: hfss -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rlds -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Downwelling Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: rlds -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rlus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Upwelling Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: rlus -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsds -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Downwelling Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: rsds -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Upwelling Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: rsus -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsdscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Downwelling Clear-Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: rsdscs -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsuscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Upwelling Clear-Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: rsuscs -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rldscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_longwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Downwelling Clear-Sky Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: rldscs -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsdt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_incoming_shortwave_flux -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: TOA Incident Shortwave Radiation -comment: at the top of the atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: rsdt -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsut -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_shortwave_flux -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: TOA Outgoing Shortwave Radiation -comment: at the top of the atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: rsut -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rlut -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_longwave_flux -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: TOA Outgoing Longwave Radiation -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: rlut -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rlutcs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_longwave_flux_assuming_clear_sky -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: TOA Outgoing Clear-Sky Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: rlutcs -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsutcs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_shortwave_flux_assuming_clear_sky -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: TOA Outgoing Clear-Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: rsutcs -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: prw -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_water_vapor_content -units: kg m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Water Vapor Path -comment: vertically integrated through the atmospheric column -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: prw -type: real -!---------------------------------- -! - -!============ -variable_entry: clt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction -units: % -cell_methods: time: point -cell_measures: area: areacella -long_name: Total Cloud Fraction -comment: for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Include both large-scale and convective cloud. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: clt -type: real -!---------------------------------- -! - -!============ -variable_entry: clwvi -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_cloud_condensed_water_content -units: kg m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Condensed Water Path -comment: mass of condensed (liquid + ice) water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating hydrometeors ONLY if the precipitating hydrometeor affects the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: clwvi -type: real -!---------------------------------- -! - -!============ -variable_entry: clivi -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_cloud_ice_content -units: kg m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Ice Water Path -comment: mass of ice water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating frozen hydrometeors ONLY if the precipitating hydrometeor affects the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: clivi -type: real -!---------------------------------- -! - -!============ -variable_entry: rtmt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: net_downward_radiative_flux_at_top_of_atmosphere_model -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Net Downward Flux at Top of Model -comment: i.e., at the top of that portion of the atmosphere where dynamics are explicitly treated by the model. This is reported only if it differs from the net downward radiative flux at the top of the atmosphere. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: rtmt -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: ccb -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_convective_cloud_base -units: Pa -cell_methods: time: point -cell_measures: area: areacella -long_name: Air Pressure at Convective Cloud Base -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: ccb -type: real -!---------------------------------- -! - -!============ -variable_entry: cct -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_convective_cloud_top -units: Pa -cell_methods: time: point -cell_measures: area: areacella -long_name: Air Pressure at Convective Cloud Top -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: cct -type: real -!---------------------------------- -! - -!============ -variable_entry: ci -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: convection_time_fraction -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Fraction of Time Convection Occurs -comment: Fraction of time that convection occurs in the grid cell . -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: ci -type: real -!---------------------------------- -! - -!============ -variable_entry: sci -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: shallow_convection_time_fraction -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Fraction of Time Shallow Convection Occurs -comment: Fraction of time that shallow convection occurs in the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: sci -type: real -!---------------------------------- -! - -!============ -variable_entry: fco2antt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_carbon_dioxide_expressed_as_carbon_due_to_anthropogenic_emission -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Carbon Mass Flux into Atmosphere Due to All Anthropogenic Emissions of CO2 -comment: This is requested only for the emission-driven coupled carbon climate model runs. Does not include natural fire sources but, includes all anthropogenic sources, including fossil fuel use, cement production, agricultural burning, and sources associated with anthropogenic land use change excluding forest regrowth. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: fco2antt -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: fco2fos -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_fossil_fuel_combustion -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Carbon Mass Flux into Atmosphere Due to Fossil Fuel Emissions of CO2 -comment: This is the prescribed anthropogenic CO2 flux from fossil fuel use, including cement production, and flaring (but not from land-use changes, agricultural burning, forest regrowth, etc.) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: fco2fos -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: fco2nat -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_natural_sources -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Carbon Mass Flux into the Atmosphere Due to Natural Sources -comment: This is what the atmosphere sees (on its own grid). This field should be equivalent to the combined natural fluxes of carbon (requested in the L_mon and O_mon tables) that account for natural exchanges between the atmosphere and land or ocean reservoirs (i.e., ""net ecosystem biospheric productivity"", for land, and ""air to sea CO2 flux"", for ocean.) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: fco2nat -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: cltc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: convective_cloud_area_fraction -units: % -cell_methods: time: point -cell_measures: area: areacella -long_name: Convective Cloud Fraction -comment: for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes only convective cloud. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time1 -out_name: cltc -type: real -valid_min: 0.0 -valid_max: 100.0 -!---------------------------------- -! - -!============ -variable_entry: zfull -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: height_above_reference_ellipsoid -units: m -cell_methods: time: point -cell_measures: area: areacella -long_name: Altitude of Model Full-Levels -comment: This is actual height above mean sea level, not geopotential height -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: zfull -type: real -!---------------------------------- -! - -!============ -variable_entry: zhalf -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: height_above_reference_ellipsoid -units: m -cell_methods: time: point -cell_measures: area: areacella -long_name: Altitude of Model Half-Levels -comment: This is actual height above mean sea level, not geopotential height. This is actual height above mean sea level, not geopotential height. Includes both the top of the model atmosphere and surface levels. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time1 -out_name: zhalf -type: real -!---------------------------------- -! - -!============ -variable_entry: pfull -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -cell_methods: time: point -cell_measures: area: areacella -long_name: Pressure at Model Full-Levels -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: pfull -type: real -!---------------------------------- -! - -!============ -variable_entry: phalf -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -cell_methods: time: point -cell_measures: area: areacella -long_name: Pressure at Model Half-Levels -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time1 -out_name: phalf -type: real -!---------------------------------- -! - -!============ -variable_entry: ta -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: point -cell_measures: area: areacella -long_name: Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: ta -type: real -!---------------------------------- -! - -!============ -variable_entry: h2o -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_fraction_of_water_in_air -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Mass Fraction of Water -comment: includes all phases of water -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: h2o -type: real -!---------------------------------- -! - -!============ -variable_entry: clws -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_fraction_of_stratiform_cloud_liquid_water_in_air -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Mass Fraction of Stratiform Cloud Liquid Water -comment: Calculated as the mass of stratiform cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: clws -type: real -!---------------------------------- -! - -!============ -variable_entry: clis -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_fraction_of_stratiform_cloud_ice_in_air -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Mass Fraction of Stratiform Cloud Ice -comment: Calculate as the mass of stratiform cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. Include precipitating hydrometeors ONLY if the precipitating hydrometeor affects the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: clis -type: real -!---------------------------------- -! - -!============ -variable_entry: clwc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_fraction_of_convective_cloud_liquid_water_in_air -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Mass Fraction of Convective Cloud Liquid Water -comment: Calculated as the mass of convective cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: clwc -type: real -!---------------------------------- -! - -!============ -variable_entry: clic -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_fraction_of_convective_cloud_ice_in_air -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Mass Fraction of Convective Cloud Ice -comment: Calculated as the mass of convective cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: clic -type: real -!---------------------------------- -! - -!============ -variable_entry: reffclws -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: effective_radius_of_stratiform_cloud_liquid_water_particle -units: m -cell_methods: time: point -cell_measures: area: areacella -long_name: Hydrometeor Effective Radius of Stratiform Cloud Liquid Water -comment: This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: reffclws -type: real -!---------------------------------- -! - -!============ -variable_entry: reffclis -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: effective_radius_of_stratiform_cloud_ice_particle -units: m -cell_methods: time: point -cell_measures: area: areacella -long_name: Hydrometeor Effective Radius of Stratiform Cloud Ice -comment: This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: reffclis -type: real -!---------------------------------- -! - -!============ -variable_entry: reffclwc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: effective_radius_of_convective_cloud_liquid_water_particle -units: m -cell_methods: time: point -cell_measures: area: areacella -long_name: Hydrometeor Effective Radius of Convective Cloud Liquid Water -comment: This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: reffclwc -type: real -!---------------------------------- -! - -!============ -variable_entry: reffclic -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: effective_radius_of_convective_cloud_ice_particle -units: m -cell_methods: time: point -cell_measures: area: areacella -long_name: Hydrometeor Effective Radius of Convective Cloud Ice -comment: This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: reffclic -type: real -!---------------------------------- -! - -!============ -variable_entry: grpllsprof -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: large_scale_graupel_flux -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Stratiform Graupel Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time1 -out_name: grpllsprof -type: real -!---------------------------------- -! - -!============ -variable_entry: prcprof -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: convective_rainfall_flux -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Convective Rainfall Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time1 -out_name: prcprof -type: real -!---------------------------------- -! - -!============ -variable_entry: prlsprof -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: large_scale_rainfall_flux -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Stratiform Rainfall Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time1 -out_name: prlsprof -type: real -!---------------------------------- -! - -!============ -variable_entry: prsnc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: convective_snowfall_flux -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Convective Snowfall Flux -comment: convective precipitation of all forms of water in the solid phase. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time1 -out_name: prsnc -type: real -!---------------------------------- -! - -!============ -variable_entry: prlsns -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: large_scale_snowfall_flux -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Stratiform Snowfall Flux -comment: large-scale precipitation of all forms of water in the solid phase. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time1 -out_name: prlsns -type: real -!---------------------------------- -! - -!============ -variable_entry: reffgrpls -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: effective_radius_of_stratiform_cloud_graupel_particle -units: m -cell_methods: time: point -cell_measures: area: areacella -long_name: Hydrometeor Effective Radius of Stratiform Graupel -comment: This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: reffgrpls -type: real -!---------------------------------- -! - -!============ -variable_entry: reffrainc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: effective_radius_of_convective_cloud_rain_particle -units: m -cell_methods: time: point -cell_measures: area: areacella -long_name: Hydrometeor Effective Radius of Convective Rainfall -comment: This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: reffrainc -type: real -!---------------------------------- -! - -!============ -variable_entry: reffrains -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: effective_radius_of_stratiform_cloud_rain_particle -units: m -cell_methods: time: point -cell_measures: area: areacella -long_name: Hydrometeor Effective Radius of Stratiform Rainfall -comment: This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: reffrains -type: real -!---------------------------------- -! - -!============ -variable_entry: reffsnowc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: effective_radius_of_convective_cloud_snow_particle -units: m -cell_methods: time: point -cell_measures: area: areacella -long_name: Hydrometeor Effective Radius of Convective Snowfall -comment: This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: reffsnowc -type: real -!---------------------------------- -! - -!============ -variable_entry: reffsnows -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: effective_radius_of_stratiform_cloud_snow_particle -units: m -cell_methods: time: point -cell_measures: area: areacella -long_name: Hydrometeor Effective Radius of Stratiform Snowfall -comment: This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: reffsnows -type: real -!---------------------------------- -! - -!============ -variable_entry: dtaus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_optical_thickness_due_to_stratiform_cloud -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Stratiform Cloud Optical Depth -comment: This is the in-cloud optical depth obtained by considering only the cloudy portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: dtaus -type: real -!---------------------------------- -! - -!============ -variable_entry: dtauc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_optical_thickness_due_to_convective_cloud -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Convective Cloud Optical Depth -comment: This is the in-cloud optical depth obtained by considering only the cloudy portion of the grid cell -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: dtauc -type: real -!---------------------------------- -! - -!============ -variable_entry: dems -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: stratiform_cloud_longwave_emissivity -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Stratiform Cloud Emissivity -comment: This is the in-cloud emissivity obtained by considering only the cloudy portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: dems -type: real -!---------------------------------- -! - -!============ -variable_entry: demc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: convective_cloud_longwave_emissivity -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Convective Cloud Emissivity -comment: This is the in-cloud emissivity obtained by considering only the cloudy portion of the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: demc -type: real -!---------------------------------- -! - -!============ -variable_entry: clc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: convective_cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: point -cell_measures: area: areacella -long_name: Convective Cloud Area Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: clc -type: real -!---------------------------------- -! - -!============ -variable_entry: cls -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: stratiform_cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: point -cell_measures: area: areacella -long_name: Stratiform Cloud Area Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time1 -out_name: cls -type: real -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_cfDay b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_cfDay deleted file mode 100644 index aba44f9c0a..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_cfDay +++ /dev/null @@ -1,1623 +0,0 @@ -table_id: Table cfDay -modeling_realm: atmos - -frequency: day - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 1.000000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -generic_levels: alevel alevhalf - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: plev7 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -tolerance: 0.001 -type: double -requested: 90000. 74000. 62000. 50000. 37500. 24500. 9000. ! space-separated list of requested coordinates -requested_bounds: 100000. 80000. 80000. 68000. 68000. 56000. 56000. 44000. 44000. 31000. 31000. 18000. 18000. 0. ! space-separated list of requested coordinate bounds -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: p500 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -type: double -value: 50000. ! of scalar (singleton) dimension -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: p700 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -type: double -value: 70000. ! of scalar (singleton) dimension -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: alt40 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: altitude -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: up ! up or down (default: undeclared) -long_name: altitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: alt40 -stored_direction: increasing -tolerance: 0.001 -type: double -requested: 240. 720. 1200. 1680. 2160. 2640. 3120. 3600. 4080. 4560. 5040. 5520. 6000. 6480. 6960. 7440. 7920. 8400. 8880. 9360. 9840. 10320. 10800. 11280. 11760. 12240. 12720. 13200. 13680. 14160. 14640. 15120. 15600. 16080. 16560. 17040. 17520. 18000. 18480. 18960. ! space-separated list of requested coordinates -requested_bounds: 0. 480. 480. 960. 960. 1440. 1440. 1920. 1920. 2400. 2400. 2880. 2880. 3360. 3360. 3840. 3840. 4320. 4320. 4800. 4800. 5280. 5280. 5760. 5760. 6240. 6240. 6720. 6720. 7200. 7200. 7680. 7680. 8160. 8160. 8640. 8640. 9120. 9120. 9600. 9600. 10080. 10080. 10560. 10560. 11040. 11040. 11520. 11520. 12000. 12000. 12480. 12480. 12960. 12960. 13440. 13440. 13920. 13920. 14400. 14400. 14880. 14880. 15360. 15360. 15840. 15840. 16320. 16320. 16800. 16800. 17280. 17280. 17760. 17760. 18240. 18240. 18720. 18720. 19200. ! space-separated list of requested coordinate bounds -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: tau -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_optical_thickness_due_to_cloud -units: 1 -long_name: cloud optical thickness -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: tau -stored_direction: increasing -tolerance: 0.001 -type: double -requested: 0.15 0.8 2.45 6.5 16.2 41.5 100. ! space-separated list of requested coordinates -requested_bounds: 0.0 0.3 0.3 1.3 1.3 3.6 3.6 9.4 9.4 23.0 23.0 60.0 60.0 100000. ! space-separated list of requested coordinate bounds -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: sza5 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: solar_zenith_angle -units: degree -long_name: solar zenith angle -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: sza -stored_direction: increasing -tolerance: 0.001 -type: double -requested: 0. 20. 40. 60. 80. ! space-separated list of requested coordinates -must_have_bounds: no -!---------------------------------- -! - -!============ -axis_entry: smooth_level -!============ -! -! This coordinate is a hybrid height coordinate with units of meters (m). -! It increases upward. -! The values of a(k)*ztop, which appear in the formula below, should be stored as smooth_level. -! Note that in the netCDF file the variable will be named "lev", not smooth_level. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_sleve_coordinate -units: m -axis: Z -positive: up -long_name: atmosphere smooth level vertical (SLEVE) coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: -200. -valid_max: 800000. -formula: z = a*ztop + b1*zsurf1 + b2*zsurf2 -z_factors: a: a b1: b1 b2: b2 ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2 -z_bounds_factors: a: a_bnds b1: b1_bnds b2: b2_bnds ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2 -!---------------------------------- -! -!============ -axis_entry: natural_log_pressure -!============ -! -!This coordinate is dimensionless and varies from near 0 at the surface and increases upward. -! The values of lev(k), which appears in the formula below, should be stored as natural_log_pressure. -! Note that in the netCDF file the variable will be named "lev", not natural_log_pressure. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_ln_pressure_coordinate -axis: Z -long_name: atmosphere natural log pressure coordinate -positive: down -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: -1. -valid_max: 20. -formula: p = p0 * exp(-lev) -z_factors: p0: p0 lev: lev -z_bounds_factors: p0: p0 lev: lev_bnds -!---------------------------------- -! -!============ -axis_entry: standard_sigma -!============ -! -! This coordinate is dimensionless and varies from 0 at the model top to 1.0 at the surface. -! The values of sigma(k), which appears in the formula below, should be stored as standard_sigma. -! Note that in the netCDF file the variable will be named "lev", not standard_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_sigma_coordinate -axis: Z -positive: down -long_name: sigma coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = ptop + sigma*(ps - ptop) -z_factors: ptop: ptop sigma: lev ps: ps -z_bounds_factors: ptop: ptop sigma: lev_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: standard_hybrid_sigma -!============ -! -! This coordinate is dimensionless and varies from a small value at the model top to 1.0 at the surface. -! The values of a+ b, which appear in the formula below, should be stored as standard_hybrid_sigma. -! Note that in the netCDF file the variable will be named "lev", not standard_hybrid_sigma. -! -!--------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_sigma_pressure_coordinate -units: 1 -axis: Z -positive: down -long_name: hybrid sigma pressure coordinate -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = a*p0 + b*ps -z_factors: p0: p0 a: a b: b ps: ps -z_bounds_factors: p0: p0 a: a_bnds b: b_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: alternate_hybrid_sigma -!============ -! -! This coordinate is dimensionless and varies from a small value at the model top to 1.0 at the surface. -! The values of ap/p0 + b, which appear in the formula below, should be stored as alternate_hybrid_sigma. -! Note that in the netCDF file the variable will be named "lev", not alternate_hybrid_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_sigma_pressure_coordinate -units: 1 -axis: Z -positive: down -long_name: hybrid sigma pressure coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = ap + b*ps -z_factors: ap: ap b: b ps: ps -z_bounds_factors: ap: ap_bnds b: b_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: hybrid_height -!============ -! -! This coordinate has dimension of meters (m) and increases upward. -! The values of a which appear in the formula below, should be stored as hybrid_height. -! Note that in the netCDF file the variable will be named "lev", not hybrid_height. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_height_coordinate -units: m -axis: Z -positive: up -long_name: hybrid height coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: 0.0 -formula: z = a + b*orog -z_factors: a: lev b: b orog: orog -z_bounds_factors: a: lev_bnds b: b_bnds orog: orog -!---------------------------------- -! -! *************************************************************** -! -! Vertical coordinate formula terms: -! -! *************************************************************** -! -! -!============ -variable_entry: orog -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_altitude -units: m -long_name: Surface Altitude -comment: height above the geoid; as defined here, ""the geoid"" is a surface of constant geopotential that, if the ocean were at rest, would coincide with mean sea level. Under this definition, the geoid changes as the mean volume of the ocean changes (e.g., due to glacial melt, or global warming of the ocean). Report here the height above the present-day geoid. Over ocean, report as 0.0 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: orog -type: real -valid_min: -700 -valid_max: 1.00E+04 -!---------------------------------- -! -! -!============ -variable_entry: p0 -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: reference pressure -units: Pa -!---------------------------------- -! -! -!============ -variable_entry: ptop -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: pressure at top of model -units: Pa -!---------------------------------- -! -! -! -!============ -variable_entry: a -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: a(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: b -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: b(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: a_bnds -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: a(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: b_bnds -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: b(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ap -!============ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: ap(k) -units: Pa -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ap_bnds -!============ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: ap(k+1/2) -units: Pa -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ztop -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: height of top of model -units: m -!---------------------------------- -! -! -! - -!============ -variable_entry: ps -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_air_pressure -units: Pa -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Air Pressure -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: ps -type: real -!---------------------------------- -! - -!============ -variable_entry: rsdt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_incoming_shortwave_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Incident Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsdt -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsut -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_shortwave_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsut -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsdscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Downwelling Clear-Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsdscs -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsuscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upwelling Clear-Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsuscs -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rldscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_longwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Downwelling Clear-Sky Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rldscs -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rlutcs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_longwave_flux_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Clear-Sky Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlutcs -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsutcs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_shortwave_flux_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Clear-Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsutcs -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: clt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Cloud Fraction -comment: for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes both large-scale and convective cloud. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: clt -type: real -!---------------------------------- -! - -!============ -variable_entry: clwvi -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_cloud_condensed_water_content -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Condensed Water Path -comment: calculate mass of condensed (liquid + ice) water in the column divided by the area of the column (not just the area of the cloudy portion of the column). This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: clwvi -type: real -!---------------------------------- -! - -!============ -variable_entry: clivi -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_cloud_ice_content -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Ice Water Path -comment: calculate mass of ice water in the column divided by the area of the column (not just the area of the cloudy portion of the column). This includes precipitating frozen hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: clivi -type: real -!---------------------------------- -! - -!============ -variable_entry: wap500 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: lagrangian_tendency_of_air_pressure -units: Pa s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: omega (=dp/dt) -comment: at 500 hPa level; commonly referred to as ""omega"", this represents the vertical component of velocity in pressure coordinates (positive down) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time p500 -out_name: wap500 -type: real -!---------------------------------- -! - -!============ -variable_entry: ta700 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: mean -cell_measures: area: areacella -long_name: Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time p700 -out_name: ta700 -type: real -!---------------------------------- -! - -!============ -variable_entry: ccb -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_convective_cloud_base -units: Pa -cell_methods: time: mean -cell_measures: area: areacella -long_name: Air Pressure at Convective Cloud Base -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: ccb -type: real -!---------------------------------- -! - -!============ -variable_entry: cct -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_convective_cloud_top -units: Pa -cell_methods: time: mean -cell_measures: area: areacella -long_name: Air Pressure at Convective Cloud Top -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cct -type: real -!---------------------------------- -! - -!============ -variable_entry: prc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: convective_precipitation_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Convective Precipitation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: prc -type: real -!---------------------------------- -! - -!============ -variable_entry: hfls -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_latent_heat_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upward Latent Heat Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfls -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: hfss -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_sensible_heat_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upward Sensible Heat Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfss -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rlds -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Downwelling Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlds -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rlus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upwelling Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlus -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsds -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Downwelling Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsds -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upwelling Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsus -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rlut -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_longwave_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlut -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: cltisccp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: ISCCP Total Total Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cltisccp -type: real -!---------------------------------- -! - -!============ -variable_entry: albisccp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_albedo -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: ISCCP Mean Cloud Albedo -comment: time-means are weighted by the ISCCP Total Cloud Fraction - see http://cfmip.metoffice.com/COSP.html -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: albisccp -type: real -!---------------------------------- -! - -!============ -variable_entry: pctisccp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_cloud_top -units: Pa -cell_methods: time: mean -cell_measures: area: areacella -long_name: ISCCP Mean Cloud Top Pressure -comment: time-means are weighted by the ISCCP Total Cloud Fraction - see http://cfmip.metoffice.com/COSP.html -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: pctisccp -type: real -!---------------------------------- -! - -!============ -variable_entry: parasolRefl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_bidirectional_reflectance -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: PARASOL Reflectance -comment: Simulated reflectance from PARASOL as seen at the top of the atmosphere for 5 solar zenith angles. Valid only over ocean and for one viewing direction (viewing zenith angle of 30 degrees and relative azimuth angle 320 degrees). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude sza5 time -out_name: parasolRefl -type: real -!---------------------------------- -! - -!============ -variable_entry: cltcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO Total Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cltcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: cllcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO Low Level Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cllcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: clmcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO Mid Level Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: clmcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: clhcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO High Level Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: clhcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: ua -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: eastward_wind -units: m s-1 -cell_methods: time: mean -long_name: Eastward Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: ua -type: real -!---------------------------------- -! - -!============ -variable_entry: va -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_wind -units: m s-1 -cell_methods: time: mean -long_name: Northward Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: va -type: real -!---------------------------------- -! - -!============ -variable_entry: ta -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: mean -cell_measures: area: areacella -long_name: Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: ta -type: real -!---------------------------------- -! - -!============ -variable_entry: hus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: specific_humidity -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Specific Humidity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: hus -type: real -!---------------------------------- -! - -!============ -variable_entry: wap -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: lagrangian_tendency_of_air_pressure -units: Pa s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: omega (=dp/dt) -comment: commonly referred to as ""omega"", this represents the vertical component of velocity in pressure coordinates (positive down) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: wap -type: real -!---------------------------------- -! - -!============ -variable_entry: zg -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: geopotential_height -units: m -cell_methods: time: mean -cell_measures: area: areacella -long_name: Geopotential Height -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: zg -type: real -!---------------------------------- -! - -!============ -variable_entry: hur -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: relative_humidity -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Relative Humidity -comment: This is the relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: hur -type: real -!---------------------------------- -! - -!============ -variable_entry: cl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Cloud Area Fraction in Atmosphere Layer -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: cl -type: real -!---------------------------------- -! - -!============ -variable_entry: clw -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_fraction_of_cloud_liquid_water_in_air -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Mass Fraction of Cloud Liquid Water -comment: Calculated as the mass of cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: clw -type: real -!---------------------------------- -! - -!============ -variable_entry: cli -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_fraction_of_cloud_ice_in_air -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Mass Fraction of Cloud Ice -comment: Calculated as the mass of cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: cli -type: real -!---------------------------------- -! - -!============ -variable_entry: mc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_net_upward_convective_mass_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Convective Mass Flux -comment: The net mass flux should represent the difference between the updraft and downdraft components. This is calculated as the convective mass flux divided by the area of the whole grid cell (not just the area of the cloud). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: mc -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: clcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alt40 time -out_name: clcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: clisccp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: ISCCP Cloud Area Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plev7 tau time -out_name: clisccp -type: real -!---------------------------------- -! - -!============ -variable_entry: pfull -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -cell_methods: time: mean -cell_measures: area: areacella -long_name: Pressure on Model Levels -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: pfull -type: real -!---------------------------------- -! - -!============ -variable_entry: phalf -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -cell_methods: time: mean -cell_measures: area: areacella -long_name: Pressure on Model Half-Levels -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: phalf -type: real -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_cfMon b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_cfMon deleted file mode 100644 index 5a16f24d42..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_cfMon +++ /dev/null @@ -1,2776 +0,0 @@ -table_id: Table cfMon -modeling_realm: atmos - -frequency: mon - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 30.000000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -generic_levels: alevel alevhalf - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: plev7 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -tolerance: 0.001 -type: double -requested: 90000. 74000. 62000. 50000. 37500. 24500. 9000. ! space-separated list of requested coordinates -requested_bounds: 100000. 80000. 80000. 68000. 68000. 56000. 56000. 44000. 44000. 31000. 31000. 18000. 18000. 0. ! space-separated list of requested coordinate bounds -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: p220 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -type: double -value: 22000. ! of scalar (singleton) dimension -bounds_values: 44000. 0.0 ! of scalar (singleton) dimension bounds -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: p560 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -type: double -value: 56000. ! of scalar (singleton) dimension -bounds_values: 68000. 44000. ! of scalar (singleton) dimension bounds -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: p840 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -type: double -value: 84000. ! of scalar (singleton) dimension -bounds_values: 100000. 68000. ! of scalar (singleton) dimension bounds -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: alt40 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: altitude -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: up ! up or down (default: undeclared) -long_name: altitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: alt40 -stored_direction: increasing -tolerance: 0.001 -type: double -requested: 240. 720. 1200. 1680. 2160. 2640. 3120. 3600. 4080. 4560. 5040. 5520. 6000. 6480. 6960. 7440. 7920. 8400. 8880. 9360. 9840. 10320. 10800. 11280. 11760. 12240. 12720. 13200. 13680. 14160. 14640. 15120. 15600. 16080. 16560. 17040. 17520. 18000. 18480. 18960. ! space-separated list of requested coordinates -requested_bounds: 0. 480. 480. 960. 960. 1440. 1440. 1920. 1920. 2400. 2400. 2880. 2880. 3360. 3360. 3840. 3840. 4320. 4320. 4800. 4800. 5280. 5280. 5760. 5760. 6240. 6240. 6720. 6720. 7200. 7200. 7680. 7680. 8160. 8160. 8640. 8640. 9120. 9120. 9600. 9600. 10080. 10080. 10560. 10560. 11040. 11040. 11520. 11520. 12000. 12000. 12480. 12480. 12960. 12960. 13440. 13440. 13920. 13920. 14400. 14400. 14880. 14880. 15360. 15360. 15840. 15840. 16320. 16320. 16800. 16800. 17280. 17280. 17760. 17760. 18240. 18240. 18720. 18720. 19200. ! space-separated list of requested coordinate bounds -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: tau -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_optical_thickness_due_to_cloud -units: 1 -long_name: cloud optical thickness -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: tau -stored_direction: increasing -tolerance: 0.001 -type: double -requested: 0.15 0.8 2.45 6.5 16.2 41.5 100. ! space-separated list of requested coordinates -requested_bounds: 0.0 0.3 0.3 1.3 1.3 3.6 3.6 9.4 9.4 23.0 23.0 60.0 60.0 100000. ! space-separated list of requested coordinate bounds -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: sza5 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: solar_zenith_angle -units: degree -long_name: solar zenith angle -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: sza -stored_direction: increasing -tolerance: 0.001 -type: double -requested: 0. 20. 40. 60. 80. ! space-separated list of requested coordinates -must_have_bounds: no -!---------------------------------- -! - -!============ -axis_entry: smooth_level -!============ -! -! This coordinate is a hybrid height coordinate with units of meters (m). -! It increases upward. -! The values of a(k)*ztop, which appear in the formula below, should be stored as smooth_level. -! Note that in the netCDF file the variable will be named "lev", not smooth_level. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_sleve_coordinate -units: m -axis: Z -positive: up -long_name: atmosphere smooth level vertical (SLEVE) coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: -200. -valid_max: 800000. -formula: z = a*ztop + b1*zsurf1 + b2*zsurf2 -z_factors: a: a b1: b1 b2: b2 ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2 -z_bounds_factors: a: a_bnds b1: b1_bnds b2: b2_bnds ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2 -!---------------------------------- -! -!============ -axis_entry: natural_log_pressure -!============ -! -!This coordinate is dimensionless and varies from near 0 at the surface and increases upward. -! The values of lev(k), which appears in the formula below, should be stored as natural_log_pressure. -! Note that in the netCDF file the variable will be named "lev", not natural_log_pressure. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_ln_pressure_coordinate -axis: Z -long_name: atmosphere natural log pressure coordinate -positive: down -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: -1. -valid_max: 20. -formula: p = p0 * exp(-lev) -z_factors: p0: p0 lev: lev -z_bounds_factors: p0: p0 lev: lev_bnds -!---------------------------------- -! -!============ -axis_entry: standard_sigma -!============ -! -! This coordinate is dimensionless and varies from 0 at the model top to 1.0 at the surface. -! The values of sigma(k), which appears in the formula below, should be stored as standard_sigma. -! Note that in the netCDF file the variable will be named "lev", not standard_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_sigma_coordinate -axis: Z -positive: down -long_name: sigma coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = ptop + sigma*(ps - ptop) -z_factors: ptop: ptop sigma: lev ps: ps -z_bounds_factors: ptop: ptop sigma: lev_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: standard_hybrid_sigma -!============ -! -! This coordinate is dimensionless and varies from a small value at the model top to 1.0 at the surface. -! The values of a+ b, which appear in the formula below, should be stored as standard_hybrid_sigma. -! Note that in the netCDF file the variable will be named "lev", not standard_hybrid_sigma. -! -!--------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_sigma_pressure_coordinate -units: 1 -axis: Z -positive: down -long_name: hybrid sigma pressure coordinate -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = a*p0 + b*ps -z_factors: p0: p0 a: a b: b ps: ps -z_bounds_factors: p0: p0 a: a_bnds b: b_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: alternate_hybrid_sigma -!============ -! -! This coordinate is dimensionless and varies from a small value at the model top to 1.0 at the surface. -! The values of ap/p0 + b, which appear in the formula below, should be stored as alternate_hybrid_sigma. -! Note that in the netCDF file the variable will be named "lev", not alternate_hybrid_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_sigma_pressure_coordinate -units: 1 -axis: Z -positive: down -long_name: hybrid sigma pressure coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = ap + b*ps -z_factors: ap: ap b: b ps: ps -z_bounds_factors: ap: ap_bnds b: b_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: hybrid_height -!============ -! -! This coordinate has dimension of meters (m) and increases upward. -! The values of a which appear in the formula below, should be stored as hybrid_height. -! Note that in the netCDF file the variable will be named "lev", not hybrid_height. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_height_coordinate -units: m -axis: Z -positive: up -long_name: hybrid height coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: 0.0 -formula: z = a + b*orog -z_factors: a: lev b: b orog: orog -z_bounds_factors: a: lev_bnds b: b_bnds orog: orog -!---------------------------------- -! -! *************************************************************** -! -! Vertical coordinate formula terms: -! -! *************************************************************** -! -! -!============ -variable_entry: orog -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_altitude -units: m -long_name: Surface Altitude -comment: height above the geoid; as defined here, ""the geoid"" is a surface of constant geopotential that, if the ocean were at rest, would coincide with mean sea level. Under this definition, the geoid changes as the mean volume of the ocean changes (e.g., due to glacial melt, or global warming of the ocean). Report here the height above the present-day geoid. Over ocean, report as 0.0 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: orog -type: real -valid_min: -700 -valid_max: 1.00E+04 -!---------------------------------- -! -! -!============ -variable_entry: p0 -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: reference pressure -units: Pa -!---------------------------------- -! -! -!============ -variable_entry: ptop -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: pressure at top of model -units: Pa -!---------------------------------- -! -! -! -!============ -variable_entry: a -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: a(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: b -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: b(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: a_bnds -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: a(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: b_bnds -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: b(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ap -!============ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: ap(k) -units: Pa -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ap_bnds -!============ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: ap(k+1/2) -units: Pa -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ztop -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: height of top of model -units: m -!---------------------------------- -! -! -! -! -!============ -variable_entry: ps -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_air_pressure -units: Pa -cell_methods: time: mean -long_name: Surface Air Pressure -comment: not, in general, the same as mean sea-level pressure -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: ps -type: real -! - -!============ -variable_entry: rlu -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Upwelling Longwave Radiation -comment: Includes also the fluxes at the surface and TOA. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rlu -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsu -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Upwelling Shortwave Radiation -comment: Includes also the fluxes at the surface and TOA. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rsu -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rld -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: downwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Downwelling Longwave Radiation -comment: Includes also the fluxes at the surface and TOA. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rld -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsd -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: downwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Downwelling Shortwave Radiation -comment: Includes also the fluxes at the surface and TOA. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rsd -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rlucs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upwelling_longwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Upwelling Clear-Sky Longwave Radiation -comment: Includes also the fluxes at the surface and TOA. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rlucs -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsucs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Upwelling Clear-Sky Shortwave Radiation -comment: Includes also the fluxes at the surface and TOA. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rsucs -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rldcs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: downwelling_longwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Downwelling Clear-Sky Longwave Radiation -comment: Includes also the fluxes at the surface and TOA. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rldcs -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsdcs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: downwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Downwelling Clear-Sky Shortwave Radiation -comment: Includes also the fluxes at the surface and TOA. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rsdcs -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: ta -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: mean -cell_measures: area: areacella -long_name: Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: ta -type: real -valid_min: 157.1 -valid_max: 336.3 -ok_min_mean_abs: 194.3 -ok_max_mean_abs: 299.8 -!---------------------------------- -! - -!============ -variable_entry: tnt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_air_temperature -units: K s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnt -type: real -!---------------------------------- -! - -!============ -variable_entry: tnta -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_air_temperature_due_to_advection -units: K s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Air Temperature due to Advection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnta -type: real -!---------------------------------- -! - -!============ -variable_entry: tntmp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_air_temperature_due_to_model_physics -units: K s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Air Temperature due to Diabatic Processes -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tntmp -type: real -!---------------------------------- -! - -!============ -variable_entry: tntscpbl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_air_temperature_due_to_stratiform_cloud_and_precipitation_and_boundary_layer_mixing -units: K s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Air Temperature Due to Stratiform Cloud and Precipitation and Boundary Layer Mixing -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tntscpbl -type: real -!---------------------------------- -! - -!============ -variable_entry: tntr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_air_temperature_due_to_radiative_heating -units: K s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Air Temperature due to Radiative Heating -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tntr -type: real -!---------------------------------- -! - -!============ -variable_entry: tntc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_air_temperature_due_to_convection -units: K s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Air Temperature due to Moist Convection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tntc -type: real -!---------------------------------- -! - -!============ -variable_entry: hur -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: relative_humidity -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Relative Humidity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: hur -type: real -valid_min: -2.642 -valid_max: 135.7 -ok_min_mean_abs: -10.31 -ok_max_mean_abs: 97 -!---------------------------------- -! - -!============ -variable_entry: hus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: specific_humidity -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Specific Humidity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: hus -type: real -valid_min: -0.000299 -valid_max: 0.02841 -ok_min_mean_abs: -0.0003539 -ok_max_mean_abs: 0.01041 -!---------------------------------- -! - -!============ -variable_entry: tnhus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_specific_humidity -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Specific Humidity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnhus -type: real -!---------------------------------- -! - -!============ -variable_entry: tnhusa -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_specific_humidity_due_to_advection -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Specific Humidity due to Advection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnhusa -type: real -!---------------------------------- -! - -!============ -variable_entry: tnhusc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_specific_humidity_due_to_convection -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Specific Humidity due to Convection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnhusc -type: real -!---------------------------------- -! - -!============ -variable_entry: tnhusd -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_specific_humidity_due_to_diffusion -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Specific Humidity due to Diffusion -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnhusd -type: real -!---------------------------------- -! - -!============ -variable_entry: tnhusscpbl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_specific_humidity_due_to_stratiform_cloud_and_precipitation_and_boundary_layer_mixing -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Specific Humidity due to Stratiform Cloud Condensation and Evaporation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnhusscpbl -type: real -!---------------------------------- -! - -!============ -variable_entry: tnhusmp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_specific_humidity_due_to_model_physics -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Specific Humidity due to Model Physics -comment: This includes sources and sinks from parametrized physics (e.g. convection, stratiform condensation/evaporation, etc.) and excludes sources and sinks from resolved dynamics and diffusion. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnhusmp -type: real -!---------------------------------- -! - -!============ -variable_entry: eviscu -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_momentum_diffusivity -units: m2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Eddy Viscosity Coefficients for Momentum -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: eviscu -type: real -!---------------------------------- -! - -!============ -variable_entry: evisct -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_heat_diffusivity -units: m2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Eddy Diffusivity Coefficients for Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: evisct -type: real -!---------------------------------- -! - -!============ -variable_entry: clc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: convective_cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Convective Cloud Area Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: clc -type: real -!---------------------------------- -! - -!============ -variable_entry: clwc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_fraction_of_convective_cloud_liquid_water_in_air -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Mass Fraction of Convective Cloud Liquid Water -comment: Calculated as the mass of convective cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: clwc -type: real -!---------------------------------- -! - -!============ -variable_entry: clic -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_fraction_of_convective_cloud_ice_in_air -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Mass Fraction of Convective Cloud Ice -comment: Calculated as the mass of convective cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: clic -type: real -!---------------------------------- -! - -!============ -variable_entry: cls -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: stratiform_cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Stratiform Cloud Area Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: cls -type: real -!---------------------------------- -! - -!============ -variable_entry: clws -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_fraction_of_stratiform_cloud_liquid_water_in_air -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Mass Fraction of Stratiform Cloud Liquid Water -comment: Calculated as the mass of stratiform cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: clws -type: real -!---------------------------------- -! - -!============ -variable_entry: clis -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_fraction_of_stratiform_cloud_ice_in_air -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Mass Fraction of Stratiform Cloud Ice -comment: Calculated as the mass of stratiform cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: clis -type: real -!---------------------------------- -! - -!============ -variable_entry: mcu -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_updraft_convective_mass_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Updraft Convective Mass Flux -comment: Calculated as the convective mass flux divided by the area of the whole grid cell (not just the area of the cloud). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: mcu -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: mcd -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_downdraft_convective_mass_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Downdraft Convective Mass Flux -comment: Calculated as the convective mass flux divided by the area of the whole grid cell (not just the area of the cloud). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: mcd -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: smc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_net_upward_shallow_convective_mass_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Shallow Convective Mass Flux -comment: The net mass flux represents the difference between the updraft and downdraft components. For models with a distinct shallow convection scheme, this is calculated as convective mass flux divided by the area of the whole grid cell (not just the area of the cloud). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: smc -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: dmc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_net_upward_deep_convective_mass_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Deep Convective Mass Flux -comment: The net mass flux represents the difference between the updraft and downdraft components. This is calculated as the convective mass flux divided by the area of the whole grid cell (not just the area of the cloud). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: dmc -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: tnsclw -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_liquid_water_in_air -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Liquid Water In Air -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclw -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclwcm -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_liquid_water_in_air_due_to_cloud_microphysics -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Liquid Water In Air Due To Cloud Microphysics -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclwcm -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclwbl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_liquid_water_in_air_due_to_boundary_layer_mixing -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Liquid Water In Air Due To Boundary Layer Mixing -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclwbl -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclwbfpcli -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_liquid_water_in_air_due_to_bergeron_findeisen_process_to_cloud_ice -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Liquid Water In Air Due To Bergeron Findeisen Process To Cloud Ice -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclwbfpcli -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclwce -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_liquid_water_in_air_due_to_condensation_and_evaporation -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Liquid Water due to Condensation and Evaporation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclwce -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclwcd -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_liquid_water_in_air_due_to_convective_detrainment -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Liquid Water Due to Convective Detrainment -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclwcd -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclwhon -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_liquid_water_in_air_due_to_homogeneous_nucleation -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Liquid Water due to Homogeneous Nucleation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclwhon -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclwhen -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_liquid_water_in_air_due_to_heterogeneous_nucleation -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Liquid Water due to Heterogeneous Nucleation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclwhen -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclwri -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_liquid_water_in_air_due_to_riming -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Liquid Water due to Riming -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclwri -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclwar -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_liquid_water_in_air_due_to_accretion_to_rain -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Liquid Water due to Accretion to Rain -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclwar -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclwas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_liquid_water_in_air_due_to_accretion_to_snow -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Liquid Water due to Accretion to Snow -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclwas -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclwmi -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_liquid_water_in_air_due_to_melting_from_cloud_ice -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Liquid Water due to Melting From Cloud Ice -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclwmi -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclwac -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_liquid_water_in_air_due_to_autoconversion -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Liquid Water due to Autoconversion -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclwac -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclwa -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_liquid_water_in_air_due_to_advection -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Liquid Water due to Advection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclwa -type: real -!---------------------------------- -! - -!============ -variable_entry: tnscli -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice In Air -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnscli -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclicm -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_cloud_microphysics -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice In Air Due To Cloud Microphysics -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclicm -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclibl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_boundary_layer_mixing -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice In Air Due To Boundary Layer Mixing -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclibl -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclibfpcl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_bergeron_findeisen_process_from_cloud_liquid -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice In Air Due To Bergeron Findeisen Process from Cloud Liquid -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclibfpcl -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclicd -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_convective_detrainment -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice Due Convective Detrainment -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclicd -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclihon -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_homogeneous_nucleation -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice due to Homogeneous Nucleation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclihon -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclihencl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_heterogeneous_nucleation_from_cloud_liquid_water -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice due to Heterogeneous Nucleation From Cloud Liquid -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclihencl -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclihenv -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_heterogeneous_nucleation_from_water_vapor -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice due to Heterogeneous Nucleation From Water Vapor -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclihenv -type: real -!---------------------------------- -! - -!============ -variable_entry: tnscliricl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_cloud_liquid_water -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice due to Riming From Cloud Liquid -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnscliricl -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclirir -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_rain -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice due to Riming From Rain -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclirir -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclids -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_deposition_and_sublimation -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice due to Deposition and Sublimation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclids -type: real -!---------------------------------- -! - -!============ -variable_entry: tnscliag -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_aggregation -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice due to Aggregation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnscliag -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclias -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_accretion_to_snow -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice due to Accretion to Snow -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclias -type: real -!---------------------------------- -! - -!============ -variable_entry: tnscliemi -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_evaporation_of_melting_ice -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice due to Evaporation of Melting Ice -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnscliemi -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclimr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_melting_to_rain -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice due to Melting to Rain -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclimr -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclimcl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_melting_to_cloud_liquid_water -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice due to Melting to Cloud Liquid -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclimcl -type: real -!---------------------------------- -! - -!============ -variable_entry: tnscliif -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_icefall -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice due to Icefall -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnscliif -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsclia -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_advection -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Ice due to Advection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsclia -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsccw -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_condensed_water_in_air -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Condensed Water In Air -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsccw -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsccwcm -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_condensed_water_in_air_due_to_cloud_microphysics -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Condensed Water In Air Due To Cloud Microphysics -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsccwcm -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsccwbl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_condensed_water_in_air_due_to_boundary_layer_mixing -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Condensed Water In Air Due To Boundary Layer Mixing -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsccwbl -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsccwce -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_condensed_water_in_air_due_to_condensation_and_evaporation -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Condensed Water due to Condensation and Evaporation -comment: condensed water includes both liquid and ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsccwce -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsccwacr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_condensed_water_in_air_due_to_autoconversion_to_rain -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Condensed Water due to Autoconversion to Rain -comment: condensed water includes both liquid and ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsccwacr -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsccwacs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_condensed_water_in_air_due_to_autoconversion_to_snow -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Condensed Water due to Autoconversion to Snow -comment: condensed water includes both liquid and ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsccwacs -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsccwif -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_condensed_water_in_air_due_to_icefall -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Condensed Water due to Icefall -comment: condensed water includes both liquid and ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsccwif -type: real -!---------------------------------- -! - -!============ -variable_entry: tnsccwa -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_mass_fraction_of_stratiform_cloud_condensed_water_in_air_due_to_advection -units: s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Tendency of Mass Fraction of Stratiform Cloud Condensed Water due to Advection -comment: condensed water includes both liquid and ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevel time -out_name: tnsccwa -type: real -!---------------------------------- -! - -!============ -variable_entry: rsut4co2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_shortwave_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Shortwave Radiation in 4XCO2 Atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsut4co2 -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rlut4co2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_longwave_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Longwave Radiation 4XCO2 Atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlut4co2 -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsutcs4co2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_shortwave_flux_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Clear-Sky Shortwave Radiation 4XCO2 Atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsutcs4co2 -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rlutcs4co2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_longwave_flux_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Clear-Sky Longwave Radiation 4XCO2 Atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlutcs4co2 -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rlu4co2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Upwelling Longwave Radiation 4XCO2 Atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rlu4co2 -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsu4co2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Upwelling Shortwave Radiation 4XCO2 Atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rsu4co2 -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rld4co2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: downwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Downwelling Longwave Radiation 4XCO2 Atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rld4co2 -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsd4co2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: downwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Downwelling Shortwave Radiation 4XCO2 Atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rsd4co2 -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rlucs4co2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upwelling_longwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Upwelling Clear-Sky Longwave Radiation 4XCO2 Atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rlucs4co2 -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsucs4co2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Upwelling Clear-Sky Shortwave Radiation 4XCO2 Atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rsucs4co2 -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rldcs4co2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: downwelling_longwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Downwelling Clear-Sky Longwave Radiation 4XCO2 Atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rldcs4co2 -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsdcs4co2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: downwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Downwelling Clear-Sky Shortwave Radiation 4XCO2 Atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alevhalf time -out_name: rsdcs4co2 -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: cltisccp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: ISCCP Total Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cltisccp -type: real -!---------------------------------- -! - -!============ -variable_entry: albisccp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_albedo -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: ISCCP Mean Cloud Albedo -comment: time-means weighted by the ISCCP Total Cloud Fraction - see http://cfmip.metoffice.com/COSP.html -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: albisccp -type: real -!---------------------------------- -! - -!============ -variable_entry: pctisccp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_cloud_top -units: Pa -cell_methods: time: mean -cell_measures: area: areacella -long_name: ISCCP Mean Cloud Top Pressure -comment: time-means weighted by the ISCCP Total Cloud Fraction - see http://cfmip.metoffice.com/COSP.html -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: pctisccp -type: real -!---------------------------------- -! - -!============ -variable_entry: clisccp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: isccp_cloud_area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: ISCCP Cloud Area Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plev7 tau time -out_name: clisccp -type: real -!---------------------------------- -! - -!============ -variable_entry: cltcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO Total Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cltcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: cllcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO Low Level Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time p840 -out_name: cllcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: clmcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO Mid Level Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time p560 -out_name: clmcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: clhcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO High Level Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time p220 -out_name: clhcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: clcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alt40 time -out_name: clcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: parasolRefl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_bidirectional_reflectance -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: PARASOL Reflectance -comment: This is reflectance as seen at the top of the atmosphere. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude sza5 time -out_name: parasolRefl -type: real -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_cfOff b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_cfOff deleted file mode 100644 index 106cca4b61..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_cfOff +++ /dev/null @@ -1,488 +0,0 @@ -table_id: Table cfOff -modeling_realm: atmos - -frequency: mon - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 30.000000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: p220 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -type: double -value: 22000. ! of scalar (singleton) dimension -bounds_values: 44000. 0.0 ! of scalar (singleton) dimension bounds -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: p560 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -type: double -value: 56000. ! of scalar (singleton) dimension -bounds_values: 68000. 44000. ! of scalar (singleton) dimension bounds -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: p840 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -type: double -value: 84000. ! of scalar (singleton) dimension -bounds_values: 100000. 68000. ! of scalar (singleton) dimension bounds -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: alt40 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: altitude -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: up ! up or down (default: undeclared) -long_name: altitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: alt40 -stored_direction: increasing -tolerance: 0.001 -type: double -requested: 240. 720. 1200. 1680. 2160. 2640. 3120. 3600. 4080. 4560. 5040. 5520. 6000. 6480. 6960. 7440. 7920. 8400. 8880. 9360. 9840. 10320. 10800. 11280. 11760. 12240. 12720. 13200. 13680. 14160. 14640. 15120. 15600. 16080. 16560. 17040. 17520. 18000. 18480. 18960. ! space-separated list of requested coordinates -requested_bounds: 0. 480. 480. 960. 960. 1440. 1440. 1920. 1920. 2400. 2400. 2880. 2880. 3360. 3360. 3840. 3840. 4320. 4320. 4800. 4800. 5280. 5280. 5760. 5760. 6240. 6240. 6720. 6720. 7200. 7200. 7680. 7680. 8160. 8160. 8640. 8640. 9120. 9120. 9600. 9600. 10080. 10080. 10560. 10560. 11040. 11040. 11520. 11520. 12000. 12000. 12480. 12480. 12960. 12960. 13440. 13440. 13920. 13920. 14400. 14400. 14880. 14880. 15360. 15360. 15840. 15840. 16320. 16320. 16800. 16800. 17280. 17280. 17760. 17760. 18240. 18240. 18720. 18720. 19200. ! space-separated list of requested coordinate bounds -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: time -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: scatratio -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: backscattering_ratio -units: 1 -long_name: lidar backscattering ratio -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: scatratio -stored_direction: increasing -tolerance: 0.001 -type: double -requested: 0.005 0.605 2.1 4. 6. 8.5 12.5 17.5 22.5 27.5 35. 45. 55. 70. 50040. ! space-separated list of requested coordinates -requested_bounds: 0. 0.01 0.01 1.2 1.2 3. 3. 5. 5. 7. 7. 10. 10. 15. 15. 20. 20. 25. 25. 30. 30. 40. 40. 50. 50. 60. 60. 80. 80. 100000. ! space-separated list of requested coordinate bounds -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: dbze -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: equivalent_reflectivity_factor -units: dBZ -long_name: CloudSat simulator equivalent radar reflectivity factor -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: dbze -stored_direction: increasing -tolerance: 0.001 -type: double -requested: -47.5 -42.5 -37.5 -32.5 -27.5 -22.5 -17.5 -12.5 -7.5 -2.5 2.5 7.5 12.5 17.5 22.5 ! space-separated list of requested coordinates -requested_bounds: -50. -45. -45. -40. -40. -35. -35. -30. -30. -25. -25. -20. -20. -15. -15. -10. -10. -5. -5. 0. 0. 5. 5. 10. 10. 15. 15. 20. 20. 25. ! space-separated list of requested coordinate bounds -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: sza5 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: solar_zenith_angle -units: degree -long_name: solar zenith angle -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: sza -stored_direction: increasing -tolerance: 0.001 -type: double -requested: 0. 20. 40. 60. 80. ! space-separated list of requested coordinates -must_have_bounds: no -!---------------------------------- -! - -!============ -variable_entry: clcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alt40 time -out_name: clcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: clcalipso2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO Cloud Fraction Undetected by CloudSat -comment: Clouds detected by CALIPSO but below the detectability threshold of CloudSat -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alt40 time -out_name: clcalipso2 -type: real -!---------------------------------- -! - -!============ -variable_entry: cfadDbze94 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: histogram_of_equivalent_reflectivity_factor_over_height_above_reference_ellipsoid -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: CloudSat Radar Reflectivity -comment: CFADs (Cloud Frequency Altitude Diagrams) are joint height - radar reflectivity (or lidar scattering ratio) distributions . -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alt40 dbze time -out_name: cfadDbze94 -type: real -!---------------------------------- -! - -!============ -variable_entry: cfadLidarsr532 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: histogram_of_backscattering_ratio_over_height_above_reference_ellipsoid -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO Scattering Ratio -comment: CFADs (Cloud Frequency Altitude Diagrams) are joint height - radar reflectivity (or lidar scattering ratio) distributions. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude alt40 scatratio time -out_name: cfadLidarsr532 -type: real -!---------------------------------- -! - -!============ -variable_entry: parasolRefl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_bidirectional_reflectance -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: PARASOL Reflectance -comment: Simulated reflectance from PARASOL as seen at the top of the atmosphere for 5 solar zenith angles. Valid only over ocean and for one viewing direction (viewing zenith angle of 30 degrees and relative azimuth angle 320 degrees). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude sza5 time -out_name: parasolRefl -type: real -!---------------------------------- -! - -!============ -variable_entry: cltcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO Total Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cltcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: cllcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO Low Level Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time p840 -out_name: cllcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: clmcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO Mid Level Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time p560 -out_name: clmcalipso -type: real -!---------------------------------- -! - -!============ -variable_entry: clhcalipso -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: CALIPSO High Level Cloud Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time p220 -out_name: clhcalipso -type: real -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_cfSites b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_cfSites deleted file mode 100644 index afe49d3977..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_cfSites +++ /dev/null @@ -1,2200 +0,0 @@ -table_id: Table cfSites -! -! WARNING: Although this table define variables as function of longitude and latitude -! the user MUST use a 'grid' (using the 'site' index in CMIP5_grids) -! in order to define correctly the location of each site. -! -modeling_realm: atmos - -frequency: subhr - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 0.017361 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. -approx_interval_warning: 0.25 ! Level at which a warning is issued because the time axis values are too far appart from the official interval -approx_interval_error: 0.75 ! Level at which an error is issued because the time axis values are too far appart from the official interval - -generic_levels: alevel alevhalf - -!============ -axis_entry: time1 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: height2m -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: height -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: up ! up or down (default: undeclared) -long_name: height -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: height -valid_min: 1.0 -valid_max: 10.0 -stored_direction: increasing -type: double -value: 2. ! of scalar (singleton) dimension -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: height10m -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: height -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: up ! up or down (default: undeclared) -long_name: height -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: height -valid_min: 1.0 -valid_max: 30.0 -stored_direction: increasing -type: double -value: 10. ! of scalar (singleton) dimension -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: site -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -long_name: site index -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: site -type: integer -must_have_bounds: no -index_only: ok -must_call_cmor_grid: yes - -!---------------------------------- -! - -!============ -axis_entry: smooth_level -!============ -! -! This coordinate is a hybrid height coordinate with units of meters (m). -! It increases upward. -! The values of a(k)*ztop, which appear in the formula below, should be stored as smooth_level. -! Note that in the netCDF file the variable will be named "lev", not smooth_level. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_sleve_coordinate -units: m -axis: Z -positive: up -long_name: atmosphere smooth level vertical (SLEVE) coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: -200. -valid_max: 800000. -formula: z = a*ztop + b1*zsurf1 + b2*zsurf2 -z_factors: a: a b1: b1 b2: b2 ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2 -z_bounds_factors: a: a_bnds b1: b1_bnds b2: b2_bnds ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2 -!---------------------------------- -! -!============ -axis_entry: natural_log_pressure -!============ -! -!This coordinate is dimensionless and varies from near 0 at the surface and increases upward. -! The values of lev(k), which appears in the formula below, should be stored as natural_log_pressure. -! Note that in the netCDF file the variable will be named "lev", not natural_log_pressure. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_ln_pressure_coordinate -axis: Z -long_name: atmosphere natural log pressure coordinate -positive: down -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: -1. -valid_max: 20. -formula: p = p0 * exp(-lev) -z_factors: p0: p0 lev: lev -z_bounds_factors: p0: p0 lev: lev_bnds -!---------------------------------- -! -!============ -axis_entry: standard_sigma -!============ -! -! This coordinate is dimensionless and varies from 0 at the model top to 1.0 at the surface. -! The values of sigma(k), which appears in the formula below, should be stored as standard_sigma. -! Note that in the netCDF file the variable will be named "lev", not standard_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_sigma_coordinate -axis: Z -positive: down -long_name: sigma coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = ptop + sigma*(ps - ptop) -z_factors: ptop: ptop sigma: lev ps: ps -z_bounds_factors: ptop: ptop sigma: lev_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: standard_hybrid_sigma -!============ -! -! This coordinate is dimensionless and varies from a small value at the model top to 1.0 at the surface. -! The values of a+ b, which appear in the formula below, should be stored as standard_hybrid_sigma. -! Note that in the netCDF file the variable will be named "lev", not standard_hybrid_sigma. -! -!--------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_sigma_pressure_coordinate -units: 1 -axis: Z -positive: down -long_name: hybrid sigma pressure coordinate -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = a*p0 + b*ps -z_factors: p0: p0 a: a b: b ps: ps -z_bounds_factors: p0: p0 a: a_bnds b: b_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: alternate_hybrid_sigma -!============ -! -! This coordinate is dimensionless and varies from a small value at the model top to 1.0 at the surface. -! The values of ap/p0 + b, which appear in the formula below, should be stored as alternate_hybrid_sigma. -! Note that in the netCDF file the variable will be named "lev", not alternate_hybrid_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_sigma_pressure_coordinate -units: 1 -axis: Z -positive: down -long_name: hybrid sigma pressure coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: 0.0 -valid_max: 1.0 -formula: p = ap + b*ps -z_factors: ap: ap b: b ps: ps -z_bounds_factors: ap: ap_bnds b: b_bnds ps: ps -!---------------------------------- -! -! -!============ -axis_entry: hybrid_height -!============ -! -! This coordinate has dimension of meters (m) and increases upward. -! The values of a which appear in the formula below, should be stored as hybrid_height. -! Note that in the netCDF file the variable will be named "lev", not hybrid_height. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_hybrid_height_coordinate -units: m -axis: Z -positive: up -long_name: hybrid height coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: 0.0 -formula: z = a + b*orog -z_factors: a: lev b: b orog: orog -z_bounds_factors: a: lev_bnds b: b_bnds orog: orog -!---------------------------------- -! -! *************************************************************** -! -! Vertical coordinate formula terms: -! -! *************************************************************** -! -! -!============ -variable_entry: orog -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_altitude -units: m -long_name: Surface Altitude -comment: height above the geoid; as defined here, ""the geoid"" is a surface of constant geopotential that, if the ocean were at rest, would coincide with mean sea level. Under this definition, the geoid changes as the mean volume of the ocean changes (e.g., due to glacial melt, or global warming of the ocean). Report here the height above the present-day geoid. Over ocean, report as 0.0 -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site -out_name: orog -type: real -valid_min: -700 -valid_max: 1.00E+04 -!---------------------------------- -! -! -!============ -variable_entry: p0 -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: reference pressure -units: Pa -!---------------------------------- -! -! -!============ -variable_entry: ptop -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: pressure at top of model -units: Pa -!---------------------------------- -! -! -! -!============ -variable_entry: a -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: a(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: b -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: b(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: a_bnds -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: a(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: b_bnds -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: b(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ap -!============ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: ap(k) -units: Pa -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ap_bnds -!============ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: ap(k+1/2) -units: Pa -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: alevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: ztop -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: height of top of model -units: m -!---------------------------------- -! -! -! - -!============ -variable_entry: tas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: point -cell_measures: area: areacella -long_name: Near-Surface Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 height2m -out_name: tas -type: real -!---------------------------------- -! - -!============ -variable_entry: ts -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_temperature -units: K -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Temperature -comment: ""skin"" temperature (i.e., SST for open ocean) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: ts -type: real -!---------------------------------- -! - -!============ -variable_entry: psl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_sea_level -units: Pa -cell_methods: time: point -cell_measures: area: areacella -long_name: Sea Level Pressure -comment: not, in general, the same as surface pressure -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: psl -type: real -!---------------------------------- -! - -!============ -variable_entry: ps -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_air_pressure -units: Pa -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Air Pressure -comment: not, in general, the same as mean sea-level pressure -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: ps -type: real -!---------------------------------- -! - -!============ -variable_entry: uas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: eastward_wind -units: m s-1 -cell_methods: time: point -long_name: Eastward Near-Surface Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 height10m -out_name: uas -type: real -!---------------------------------- -! - -!============ -variable_entry: vas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_wind -units: m s-1 -cell_methods: time: point -long_name: Northward Near-Surface Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 height10m -out_name: vas -type: real -!---------------------------------- -! - -!============ -variable_entry: sfcWind -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: wind_speed -units: m s-1 -cell_methods: time: point -long_name: Near-Surface Wind Speed -comment: This is the mean of the speed, not the speed computed from the mean u and v components of wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 height10m -out_name: sfcWind -type: real -!---------------------------------- -! - -!============ -variable_entry: hurs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: relative_humidity -units: % -cell_methods: time: point -cell_measures: area: areacella -long_name: Near-Surface Relative Humidity -comment: This is the relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 height2m -out_name: hurs -type: real -!---------------------------------- -! - -!============ -variable_entry: huss -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: specific_humidity -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Near-Surface Specific Humidity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 height2m -out_name: huss -type: real -!---------------------------------- -! - -!============ -variable_entry: pr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: precipitation_flux -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Precipitation -comment: at surface; includes both liquid and solid phases from all types of clouds (both large-scale and convective) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: pr -type: real -!---------------------------------- -! - -!============ -variable_entry: prsn -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: snowfall_flux -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Snowfall Flux -comment: at surface; includes precipitation of all forms of water in the solid phase -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: prsn -type: real -!---------------------------------- -! - -!============ -variable_entry: prc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: convective_precipitation_flux -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Convective Precipitation -comment: at surface; includes both liquid and solid phases. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: prc -type: real -!---------------------------------- -! - -!============ -variable_entry: evspsbl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: water_evaporation_flux -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Evaporation -comment: at surface; flux of water into the atmosphere due to conversion of both liquid and solid phases to vapor (from underlying surface and vegetation) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: evspsbl -type: real -!---------------------------------- -! - -!============ -variable_entry: sbl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_snow_and_ice_sublimation_flux -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Snow and Ice Sublimation Flux -comment: The snow and ice sublimation flux is the loss of snow and ice mass from the surface resulting from their conversion to water vapor that enters the atmosphere. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: sbl -type: real -!---------------------------------- -! - -!============ -variable_entry: tauu -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_eastward_stress -units: Pa -cell_methods: time: point -long_name: Surface Downward Eastward Wind Stress -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: tauu -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: tauv -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downward_northward_stress -units: Pa -cell_methods: time: point -long_name: Surface Downward Northward Wind Stress -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: tauv -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: hfls -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_latent_heat_flux -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Upward Latent Heat Flux -comment: includes both evaporation and sublimation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: hfls -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: hfss -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_sensible_heat_flux -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Upward Sensible Heat Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: hfss -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rlds -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Downwelling Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: rlds -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rlus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Upwelling Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: rlus -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsds -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Downwelling Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: rsds -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Upwelling Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: rsus -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsdscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Downwelling Clear-Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: rsdscs -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsuscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Upwelling Clear-Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: rsuscs -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rldscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_longwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Downwelling Clear-Sky Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: rldscs -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsdt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_incoming_shortwave_flux -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: TOA Incident Shortwave Radiation -comment: at the top of the atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: rsdt -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsut -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_shortwave_flux -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: TOA Outgoing Shortwave Radiation -comment: at the top of the atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: rsut -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rlut -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_longwave_flux -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: TOA Outgoing Longwave Radiation -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: rlut -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rlutcs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_longwave_flux_assuming_clear_sky -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: TOA Outgoing Clear-Sky Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: rlutcs -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsutcs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_shortwave_flux_assuming_clear_sky -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: TOA Outgoing Clear-Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: rsutcs -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: prw -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_water_vapor_content -units: kg m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Water Vapor Path -comment: vertically integrated through the atmospheric column -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: prw -type: real -!---------------------------------- -! - -!============ -variable_entry: clt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction -units: % -cell_methods: time: point -cell_measures: area: areacella -long_name: Total Cloud Fraction -comment: for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Include both large-scale and convective cloud. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: clt -type: real -!---------------------------------- -! - -!============ -variable_entry: clwvi -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_cloud_condensed_water_content -units: kg m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Condensed Water Path -comment: mass of condensed (liquid + ice) water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating hydrometeors ONLY if the precipitating hydrometeor affects the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: clwvi -type: real -!---------------------------------- -! - -!============ -variable_entry: clivi -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_cloud_ice_content -units: kg m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Ice Water Path -comment: mass of ice water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating frozen hydrometeors ONLY if the precipitating hydrometeor affects the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: clivi -type: real -!---------------------------------- -! - -!============ -variable_entry: rtmt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: net_downward_radiative_flux_at_top_of_atmosphere_model -units: W m-2 -cell_methods: time: point -cell_measures: area: areacella -long_name: Net Downward Flux at Top of Model -comment: i.e., at the top of that portion of the atmosphere where dynamics are explicitly treated by the model. This is reported only if it differs from the net downward radiative flux at the top of the atmosphere. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: rtmt -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: ccb -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_convective_cloud_base -units: Pa -cell_methods: time: point -cell_measures: area: areacella -long_name: Air Pressure at Convective Cloud Base -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: ccb -type: real -!---------------------------------- -! - -!============ -variable_entry: cct -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_convective_cloud_top -units: Pa -cell_methods: time: point -cell_measures: area: areacella -long_name: Air Pressure at Convective Cloud Top -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: cct -type: real -!---------------------------------- -! - -!============ -variable_entry: ci -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: convection_time_fraction -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Fraction of Time Convection Occurs -comment: Fraction of time that convection occurs in the grid cell . -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: ci -type: real -!---------------------------------- -! - -!============ -variable_entry: sci -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: shallow_convection_time_fraction -units: 1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Fraction of Time Shallow Convection Occurs -comment: Fraction of time that shallow convection occurs in the grid cell. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: sci -type: real -!---------------------------------- -! - -!============ -variable_entry: fco2antt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_carbon_dioxide_expressed_as_carbon_due_to_anthropogenic_emission -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Carbon Mass Flux into Atmosphere Due to All Anthropogenic Emissions of CO2 -comment: This is requested only for the emission-driven coupled carbon climate model runs. Does not include natural fire sources but, includes all anthropogenic sources, including fossil fuel use, cement production, agricultural burning, and sources associated with anthropogenic land use change excluding forest regrowth. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: fco2antt -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: fco2fos -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_atmosphere_mass_content_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_fossil_fuel_combustion -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Carbon Mass Flux into Atmosphere Due to Fossil Fuel Emissions of CO2 -comment: This is the prescribed anthropogenic CO2 flux from fossil fuel use, including cement production, and flaring (but not from land-use changes, agricultural burning, forest regrowth, etc.) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: fco2fos -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: fco2nat -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_natural_sources -units: kg m-2 s-1 -cell_methods: time: point -cell_measures: area: areacella -long_name: Surface Carbon Mass Flux into the Atmosphere Due to Natural Sources -comment: This is what the atmosphere sees (on its own grid). This field should be equivalent to the combined natural fluxes of carbon (requested in the L_mon and O_mon tables) that account for natural exchanges between the atmosphere and land or ocean reservoirs (i.e., ""net ecosystem biospheric productivity"", for land, and ""air to sea CO2 flux"", for ocean.) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site time1 -out_name: fco2nat -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: cl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction_in_atmosphere_layer -units: % -cell_methods: time: point -long_name: Cloud Area Fraction -comment: Includes both large-scale and convective cloud. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: cl -type: real -!---------------------------------- -! - -!============ -variable_entry: clw -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_fraction_of_cloud_liquid_water_in_air -units: 1 -cell_methods: time: point -long_name: Mass Fraction of Cloud Liquid Water -comment: Includes both large-scale and convective cloud. This is the mass of cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: clw -type: real -!---------------------------------- -! - -!============ -variable_entry: cli -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: mass_fraction_of_cloud_ice_in_air -units: 1 -cell_methods: time: point -long_name: Mass Fraction of Cloud Ice -comment: Includes both large-scale and convective cloud. This is the mass of cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: cli -type: real -!---------------------------------- -! - -!============ -variable_entry: mc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_net_upward_convective_mass_flux -units: kg m-2 s-1 -cell_methods: time: point -long_name: Convective Mass Flux -comment: The net mass flux should represent the difference between the updraft and downdraft components. This is calculated as the convective mass flux divided by the area of the whole grid cell (not just the area of the updrafts). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevhalf site time1 -out_name: mc -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: ta -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: point -long_name: Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: ta -type: real -!---------------------------------- -! - -!============ -variable_entry: ua -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: eastward_wind -units: m s-1 -cell_methods: time: point -long_name: Eastward Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: ua -type: real -!---------------------------------- -! - -!============ -variable_entry: va -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_wind -units: m s-1 -cell_methods: time: point -long_name: Northward Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: va -type: real -!---------------------------------- -! - -!============ -variable_entry: hus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: specific_humidity -units: 1 -cell_methods: time: point -long_name: Specific Humidity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: hus -type: real -!---------------------------------- -! - -!============ -variable_entry: hur -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: relative_humidity -units: % -cell_methods: time: point -long_name: Relative Humidity -comment: This is the relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: hur -type: real -!---------------------------------- -! - -!============ -variable_entry: wap -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: lagrangian_tendency_of_air_pressure -units: Pa s-1 -cell_methods: time: point -long_name: omega (=dp/dt) -comment: commonly referred to as ""omega"", this represents the vertical component of velocity in pressure coordinates (positive down) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: wap -type: real -!---------------------------------- -! - -!============ -variable_entry: zg -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: geopotential_height -units: m -cell_methods: time: point -long_name: Geopotential Height -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: zg -type: real -!---------------------------------- -! - -!============ -variable_entry: rlu -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: point -long_name: Upwelling Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevhalf site time1 -out_name: rlu -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsu -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: point -long_name: Upwelling Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevhalf site time1 -out_name: rsu -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rld -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: downwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: point -long_name: Downwelling Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevhalf site time1 -out_name: rld -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsd -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: downwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: point -long_name: Downwelling Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevhalf site time1 -out_name: rsd -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rlucs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upwelling_longwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: point -long_name: Upwelling Clear-Sky Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevhalf site time1 -out_name: rlucs -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsucs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: point -long_name: Upwelling Clear-Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevhalf site time1 -out_name: rsucs -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rldcs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: downwelling_longwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: point -long_name: Downwelling Clear-Sky Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevhalf site time1 -out_name: rldcs -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsdcs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: downwelling_shortwave_flux_in_air_assuming_clear_sky -units: W m-2 -cell_methods: time: point -long_name: Downwelling Clear-Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevhalf site time1 -out_name: rsdcs -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: tnt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_air_temperature -units: K s-1 -cell_methods: time: point -long_name: Tendency of Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: tnt -type: real -!---------------------------------- -! - -!============ -variable_entry: tnta -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_air_temperature_due_to_advection -units: K s-1 -cell_methods: time: point -long_name: Tendency of Air Temperature due to Advection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: tnta -type: real -!---------------------------------- -! - -!============ -variable_entry: tntmp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_air_temperature_due_to_model_physics -units: K s-1 -cell_methods: time: point -long_name: Tendency of Air Temperature due to Diabatic Processes -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: tntmp -type: real -!---------------------------------- -! - -!============ -variable_entry: tntscpbl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_air_temperature_due_to_stratiform_cloud_and_precipitation_and_boundary_layer_mixing -units: K s-1 -cell_methods: time: point -long_name: Tendency of Air Temperature due to Stratiform Cloud Condensation and Evaporation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: tntscpbl -type: real -!---------------------------------- -! - -!============ -variable_entry: tntr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_air_temperature_due_to_radiative_heating -units: K s-1 -cell_methods: time: point -long_name: Tendency of Air Temperature due to Radiative Heating -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: tntr -type: real -!---------------------------------- -! - -!============ -variable_entry: tntc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_air_temperature_due_to_convection -units: K s-1 -cell_methods: time: point -long_name: Tendency of Air Temperature due to Moist Convection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: tntc -type: real -!---------------------------------- -! - -!============ -variable_entry: tnhus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_specific_humidity -units: s-1 -cell_methods: time: point -long_name: Tendency of Specific Humidity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: tnhus -type: real -!---------------------------------- -! - -!============ -variable_entry: tnhusa -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_specific_humidity_due_to_advection -units: s-1 -cell_methods: time: point -long_name: Tendency of Specific Humidity due to Advection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: tnhusa -type: real -!---------------------------------- -! - -!============ -variable_entry: tnhusc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_specific_humidity_due_to_convection -units: s-1 -cell_methods: time: point -long_name: Tendency of Specific Humidity due to Convection -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: tnhusc -type: real -!---------------------------------- -! - -!============ -variable_entry: tnhusd -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_specific_humidity_due_to_diffusion -units: s-1 -cell_methods: time: point -long_name: Tendency of Specific Humidity due to Diffusion -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: tnhusd -type: real -!---------------------------------- -! - -!============ -variable_entry: tnhusscpbl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_specific_humidity_due_to_stratiform_cloud_and_precipitation_and_boundary_layer_mixing -units: s-1 -cell_methods: time: point -long_name: Tendency of Specific Humidity due to Stratiform Cloud Condensation and Evaporation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: tnhusscpbl -type: real -!---------------------------------- -! - -!============ -variable_entry: tnhusmp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: tendency_of_specific_humidity_due_to_model_physics -units: s-1 -cell_methods: time: point -long_name: Tendency of Specific Humidity due to Model Physics -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: tnhusmp -type: real -!---------------------------------- -! - -!============ -variable_entry: evu -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_momentum_diffusivity -units: m2 s-1 -cell_methods: time: point -long_name: Eddy Viscosity Coefficient for Momentum Variables -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: evu -type: real -!---------------------------------- -! - -!============ -variable_entry: edt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: atmosphere_heat_diffusivity -units: m2 s-1 -cell_methods: time: point -long_name: Eddy Diffusivity Coefficient for Temperature Variable -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: edt -type: real -!---------------------------------- -! - -!============ -variable_entry: pfull -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -cell_methods: time: point -long_name: Pressure on Model Levels -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevel site time1 -out_name: pfull -type: real -!---------------------------------- -! - -!============ -variable_entry: phalf -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -cell_methods: time: point -long_name: Pressure on Model Half-Levels -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: alevhalf site time1 -out_name: phalf -type: real -!---------------------------------- -! - -!============ -variable_entry: longitude -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -long_name: Longitude -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site -out_name: lon -type: real -valid_min: 0.0 -valid_max: 360.0 -!---------------------------------- -! - -!============ -variable_entry: latitude -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -long_name: Latitude -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: site -out_name: lat -type: real -valid_min: -90.0 -valid_max: 90.0 -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_day b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_day deleted file mode 100644 index 88a568109f..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_day +++ /dev/null @@ -1,1131 +0,0 @@ -table_id: Table day -modeling_realm: atmos - -frequency: day - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 1.000000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: plev8 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -stored_direction: decreasing -tolerance: 0.001 -type: double -requested: 100000. 85000. 70000. 50000. 25000. 10000. 5000. 1000. ! space-separated list of requested coordinates -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: time -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: height2m -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: height -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: up ! up or down (default: undeclared) -long_name: height -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: height -valid_min: 1.0 -valid_max: 10.0 -stored_direction: increasing -type: double -value: 2. ! of scalar (singleton) dimension -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: height10m -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: height -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: up ! up or down (default: undeclared) -long_name: height -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: height -valid_min: 1.0 -valid_max: 30.0 -stored_direction: increasing -type: double -value: 10. ! of scalar (singleton) dimension -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: sdepth1 -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: depth -units: m -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: depth -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: depth -valid_min: 0.0 -valid_max: 0.2 -stored_direction: increasing -type: double -value: 0.05 ! of scalar (singleton) dimension -bounds_values: 0.0 0.1 ! of scalar (singleton) dimension bounds -must_have_bounds: yes -!---------------------------------- -! - -!============ -variable_entry: huss -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: specific_humidity -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Near-Surface Specific Humidity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height2m -out_name: huss -type: real -!---------------------------------- -! - -!============ -variable_entry: tasmin -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: minimum -cell_measures: area: areacella -long_name: Daily Minimum Near-Surface Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height2m -out_name: tasmin -type: real -!---------------------------------- -! - -!============ -variable_entry: tasmax -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: maximum -cell_measures: area: areacella -long_name: Daily Maximum Near-Surface Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height2m -out_name: tasmax -type: real -!---------------------------------- -! - -!============ -variable_entry: tas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: mean -cell_measures: area: areacella -long_name: Near-Surface Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height2m -out_name: tas -type: real -!---------------------------------- -! - -!============ -variable_entry: pr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: precipitation_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Precipitation -comment: at surface; includes both liquid and solid phases from all types of clouds (both large-scale and convective) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: pr -type: real -!---------------------------------- -! - -!============ -variable_entry: psl -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_pressure_at_sea_level -units: Pa -cell_methods: time: mean -cell_measures: area: areacella -long_name: Sea Level Pressure -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: psl -type: real -!---------------------------------- -! - -!============ -variable_entry: sfcWind -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: wind_speed -units: m s-1 -cell_methods: time: mean -long_name: Daily-Mean Near-Surface Wind Speed -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height10m -out_name: sfcWind -type: real -!---------------------------------- -! - -!============ -variable_entry: tossq -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: square_of_sea_surface_temperature -units: K2 -cell_methods: time:mean -cell_measures: area: areacello -long_name: Square of Sea Surface Temperature -comment: square of temperature of liquid ocean, averaged over the day. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tossq -type: real -!---------------------------------- -! - -!============ -variable_entry: tos -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_temperature -units: K -cell_methods: time: mean -cell_measures: area: areacello -long_name: Sea Surface Temperature -comment: temperature of liquid ocean. Note that the correct standard_name for this variable is ""sea_surface_temperature"", not ""surface_temperature"", but this was discovered too late to correct. To maintain consistency across CMIP5 models, the wrong standard_name will continue to be used. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tos -type: real -!---------------------------------- -! - -!============ -variable_entry: omldamax -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_mixed_layer_thickness_defined_by_mixing_scheme -units: m -cell_methods: time: maximum -cell_measures: area: areacello -long_name: Daily Maximum Ocean Mixed Layer Thickness Defined by Mixing Scheme -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: omldamax -type: real -!---------------------------------- -! - -!============ -variable_entry: mrsos -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: moisture_content_of_soil_layer -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Moisture in Upper Portion of Soil Column -comment: the mass of water in all phases in a thin surface soil layer. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time sdepth1 -out_name: mrsos -type: real -!---------------------------------- -! - -!============ -variable_entry: rhs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: relative_humidity -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Near-Surface Relative Humidity -comment: This is the relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height2m -out_name: rhs -type: real -!---------------------------------- -! - -!============ -variable_entry: rhsmin -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: relative_humidity -units: % -cell_methods: time: minimum -cell_measures: area: areacella -long_name: Surface Daily Minimum Relative Humidity -comment: This is the relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height2m -out_name: rhsmin -type: real -!---------------------------------- -! - -!============ -variable_entry: rhsmax -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: relative_humidity -units: % -cell_methods: time: maximum -cell_measures: area: areacella -long_name: Surface Daily Maximum Relative Humidity -comment: This is the relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height2m -out_name: rhsmax -type: real -!---------------------------------- -! - -!============ -variable_entry: snc -!============ -modeling_realm: landIce land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_snow_area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Snow Area Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: snc -type: real -!---------------------------------- -! - -!============ -variable_entry: clt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cloud_area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Cloud Fraction -comment: for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes both large-scale and convective cloud. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: clt -type: real -!---------------------------------- -! - -!============ -variable_entry: tslsi -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_temperature -units: K -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Temperature Where Land or Sea Ice -comment: """skin"" temperature of all surfaces except open ocean. " -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tslsi -type: real -!---------------------------------- -! - -!============ -variable_entry: snw -!============ -modeling_realm: landIce land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_snow_amount -units: kg m-2 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Surface Snow Amount -comment: the mass of surface snow on the land portion of the grid cell divided by the land area in the grid cell; reported as 0.0 where the land fraction is 0; excludes snow on vegetation canopy or on sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: snw -type: real -!---------------------------------- -! - -!============ -variable_entry: prc -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: convective_precipitation_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Convective Precipitation -comment: at surface; includes both liquid and solid phases. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: prc -type: real -!---------------------------------- -! - -!============ -variable_entry: prsn -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: snowfall_flux -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Snowfall Flux -comment: at surface; includes precipitation of all forms of water in the solid phase -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: prsn -type: real -!---------------------------------- -! - -!============ -variable_entry: mrro -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: runoff_flux -units: kg m-2 s-1 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Total Runoff -comment: "computed as the total runoff (including ""drainage"" through the base of the soil model) leaving the land portion of the grid cell divided by the land area in the grid cell." -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: mrro -type: real -!---------------------------------- -! - -!============ -variable_entry: uas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: eastward_wind -units: m s-1 -cell_methods: time: mean -long_name: Eastward Near-Surface Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height10m -out_name: uas -type: real -!---------------------------------- -! - -!============ -variable_entry: vas -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_wind -units: m s-1 -cell_methods: time: mean -long_name: Northward Near-Surface Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height10m -out_name: vas -type: real -!---------------------------------- -! - -!============ -variable_entry: sfcWindmax -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: wind_speed -units: m s-1 -cell_methods: time: maximum -long_name: Daily Maximum Near-Surface Wind Speed -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time height10m -out_name: sfcWindmax -type: real -!---------------------------------- -! - -!============ -variable_entry: hfls -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_latent_heat_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upward Latent Heat Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfls -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: hfss -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upward_sensible_heat_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upward Sensible Heat Flux -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: hfss -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rlds -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Downwelling Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlds -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rlus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_longwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upwelling Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlus -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rsds -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_downwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Downwelling Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsds -type: real -positive: down -!---------------------------------- -! - -!============ -variable_entry: rsus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_upwelling_shortwave_flux_in_air -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upwelling Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsus -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: rlut -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_longwave_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Longwave Radiation -comment: at the top of the atmosphere. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlut -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: usi -!============ -modeling_realm: seaIce ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_ice_x_velocity -units: m s-1 -cell_methods: time: mean -long_name: X-Component of Sea Ice Velocity -comment: "Reported as ""missing"" in regions free of sea ice." -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: usi -type: real -!---------------------------------- -! - -!============ -variable_entry: vsi -!============ -modeling_realm: seaIce ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_ice_y_velocity -units: m s-1 -cell_methods: time: mean -long_name: Y-Component of Sea Ice Velocity -comment: "Reported as ""missing"" in regions free of sea ice." -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: vsi -type: real -!---------------------------------- -! - -!============ -variable_entry: sic -!============ -modeling_realm: seaIce ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_ice_area_fraction -units: % -cell_methods: time: mean -cell_measures: area: areacello -long_name: Sea Ice Area Fraction -comment: fraction of grid cell covered by sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: sic -type: real -!---------------------------------- -! - -!============ -variable_entry: sit -!============ -modeling_realm: seaIce ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_ice_thickness -units: m -cell_methods: time: mean area: mean where sea -cell_measures: area: areacello -long_name: Sea Ice Thickness -comment: the mean thickness of sea ice in the ocean portion of the grid cell (averaging over the entire ocean portion, including the ice-free fraction). Reported as 0.0 in regions free of sea ice. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: sit -type: real -!---------------------------------- -! - -!============ -variable_entry: ta -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: air_temperature -units: K -cell_methods: time: mean -cell_measures: area: areacella -long_name: Air Temperature -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plev8 time -out_name: ta -type: real -!---------------------------------- -! - -!============ -variable_entry: hur -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: relative_humidity -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Relative Humidity -comment: This is the relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plev8 time -out_name: hur -type: real -!---------------------------------- -! - -!============ -variable_entry: hus -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: specific_humidity -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Specific Humidity -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plev8 time -out_name: hus -type: real -!---------------------------------- -! - -!============ -variable_entry: wap -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: lagrangian_tendency_of_air_pressure -units: Pa s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: omega (=dp/dt) -comment: commonly referred to as ""omega"", this represents the vertical component of velocity in pressure coordinates (positive down) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plev8 time -out_name: wap -type: real -!---------------------------------- -! - -!============ -variable_entry: va -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: northward_wind -units: m s-1 -cell_methods: time: mean -long_name: Northward Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plev8 time -out_name: va -type: real -!---------------------------------- -! - -!============ -variable_entry: ua -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: eastward_wind -units: m s-1 -cell_methods: time: mean -long_name: Eastward Wind -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plev8 time -out_name: ua -type: real -!---------------------------------- -! - -!============ -variable_entry: zg -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: geopotential_height -units: m -cell_methods: time: mean -cell_measures: area: areacella -long_name: Geopotential Height -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plev8 time -out_name: zg -type: real -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_fx b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_fx deleted file mode 100644 index 58c8296b3d..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_fx +++ /dev/null @@ -1,811 +0,0 @@ -table_id: Table fx -modeling_realm: atmos - -frequency: fx - -cmor_version: 2.6 ! minimum version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 17 July 2013 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -approx_interval: 0.000000 ! approximate spacing between successive time - ! samples (in units of the output time - ! coordinate. - -generic_levels: olevel - -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - -!============ -axis_entry: depth_coord -!============ -! -! This vertical coordinate is used in z-coordinate models -! The units are meters (m), and it has a value of 0. at the surface -! and becomes more and more positive with depth. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: depth -units: m -axis: Z -positive: down -long_name: ocean depth coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: increasing -valid_min: 0. -valid_max: 12000. -!---------------------------------- -! -!============ -axis_entry: olev -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down ! up or down (default: undeclared) -long_name: generic ocean level -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lev -stored_direction: increasing -type: double -must_have_bounds: no -!---------------------------------- -! -!============ -axis_entry: ocean_double_sigma -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_double_sigma -axis: Z -positive: up -long_name: ocean double sigma coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -formula: for k <= k_c:\n z= sigma*f \n for k > k_c:\n z= f + (sigma-1)*(depth-f) \n f= 0.5*(z1+ z2) + 0.5*(z1-z2)* tanh(2*a/(z1-z2)*(depth-href)) -z_factors: sigma: sigma depth: depth z1: z1 z2: z2 a: a_coeff href: href k_c: k_c -z_bounds_factors: sigma: sigma_bnds depth: depth z1: z1 z2: z2 a: a href: href k_c: k_c -!---------------------------------- -! -!============ -axis_entry: ocean_sigma_z -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_sigma_z -axis: Z -long_name: ocean sigma over z coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -formula: for k <= nsigma: z = eta + sigma*(min(depth_c,depth)+eta) ; for k > nsigma: z = zlev -z_factors: sigma: sigma eta: eta depth: depth depth_c: depth_c nsigma: nsigma zlev: zlev -z_bounds_factors: sigma: sigma_bnds eta: eta depth: depth depth_c: depth_c nsigma: nsigma zlev: zlev_bnds -!---------------------------------- -! -!============ -axis_entry: ocean_s -!============ -! -! This coordinate is dimensionless and varies from 0 at the surface to -1. at the ocean floor. -! The values of s, which appears in the formula below, should be stored as ocean_s. -! Note that in the netCDF file the variable will be named "lev", not ocean_s. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_s_coordinate -axis: Z -positive: up -long_name: ocean s-coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: -1. -valid_max: 0. -formula: z = eta*(1+s) + depth_c*s + (depth-depth_c)*C \n where \n C=(1-b)*sinh(a*s)/sinh(a) +\n b*(tanh(a*(s+0.5))/(2*tanh(0.5*a)) - 0.5) -z_factors: s: lev eta: eta depth: depth a: a_coeff b: b_coeff depth_c: depth_c -z_bounds_factors: s: lev_bnds eta: eta depth: depth a: a b: b depth_c: depth_c -!---------------------------------- -! -!============ -axis_entry: ocean_sigma -!============ -! -! This coordinate is dimensionless and varies from 0 at the surface to -1. at the ocean floor. -! The values of sigma, which appears in the formula below, should be stored as ocean_sigma. -! Note that in the netCDF file the variable will be named "lev", not ocean_sigma. -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: ocean_sigma_coordinate -axis: Z -positive: up -long_name: ocean sigma coordinate -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lev -must_have_bounds: yes -stored_direction: decreasing -valid_min: -1. -valid_max: 0. -formula: z = eta + sigma*(depth+eta) -z_factors: sigma: lev eta: eta depth: depth -z_bounds_factors: sigma: lev_bnds eta: eta depth: depth -!---------------------------------- -! -! -! *************************************************************** -! -! Vertical coordinate formula_terms: -! -! *************************************************************** -! -!============ -variable_entry: eta -!============ -!---------------------------------- -! Variable attributes: -!---------------------------------- -units: m -long_name: Sea Surface Height -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -type: real -!---------------------------------- -! -! -!============ -variable_entry: depth -!============ -!---------------------------------- -! Variable attributes: -!---------------------------------- -units: m -long_name: Sea Floor Depth -comment: Ocean bathymetry. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: depth -type: real -valid_min: 0. -valid_max: 12000. -ok_min_mean_abs: 2000. -ok_max_mean_abs: 5000. -!---------------------------------- -! -! -!============ -variable_entry: sigma -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: sigma(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: sigma_bnds -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: sigma(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: zlev -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: zlev(k) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -!============ -variable_entry: zlev_bnds -!============ -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: zlev(k+1/2) -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -dimensions: olevel -type: double -!---------------------------------- -! -! -! -!============ -variable_entry: depth_c -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: depth_c -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: a -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: coefficient a -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: b -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: coefficient b -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: nsigma -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: nsigma -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: integer -!---------------------------------- -! -! -!============ -variable_entry: z1 -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: z1 -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: z2 -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: z2 -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: href -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: href -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: double -!---------------------------------- -! -! -!============ -variable_entry: k_c -!============ -! -!------------ -! -! Variable attributes: -!---------------------------------- -long_name: vertical coordinate formula term: k_c -!---------------------------------- -! -! Additional variable information: -!---------------------------------- -type: integer -!---------------------------------- -! -! - -!============ -variable_entry: areacella -!============ -modeling_realm: atmos land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cell_area -units: m2 -long_name: Atmosphere Grid-Cell Area -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: areacella -type: real -valid_min: 100000.0 -valid_max: 1e+12 -ok_min_mean_abs: 100000.0 -ok_max_mean_abs: 1e+12 -!---------------------------------- -! - -!============ -variable_entry: orog -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: surface_altitude -units: m -cell_measures: area: areacella -long_name: Surface Altitude -comment: height above the geoid; as defined here, ""the geoid"" is a surface of constant geopotential that, if the ocean were at rest, would coincide with mean sea level. Under this definition, the geoid changes as the mean volume of the ocean changes (e.g., due to glacial melt, or global warming of the ocean). Reported here is the height above the present-day geoid (0.0 over ocean). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: orog -type: real -valid_min: -700.0 -valid_max: 10000.0 -!---------------------------------- -! - -!============ -variable_entry: sftlf -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: land_area_fraction -units: % -cell_measures: area: areacella -long_name: Land Area Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: sftlf -type: real -valid_min: 0.0 -valid_max: 100.0 -!---------------------------------- -! - -!============ -variable_entry: sftgif -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: land_ice_area_fraction -units: % -cell_measures: area: areacella -long_name: Fraction of Grid Cell Covered with Glacier -comment: fraction of grid cell occupied by ""permanent"" ice (i.e., glaciers). -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: sftgif -type: real -valid_min: 0.0 -valid_max: 100.0 -!---------------------------------- -! - -!============ -variable_entry: mrsofc -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: soil_moisture_content_at_field_capacity -units: kg m-2 -cell_measures: area: areacella -long_name: Capacity of Soil to Store Water -comment: "reported ""where land"": divide the total water holding capacity of all the soil in the grid cell by the land area in the grid cell; reported as ""missing"" where the land fraction is 0." -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: mrsofc -type: real -!---------------------------------- -! - -!============ -variable_entry: rootd -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: root_depth -units: m -cell_measures: area: areacella -long_name: Maximum Root Depth -comment: report the maximum soil depth reachable by plant roots (if defined in model), i.e., the maximum soil depth from which they can extract moisture; report as ""missing"" where the land fraction is 0. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: rootd -type: real -valid_min: 0.0 -valid_max: 30.0 -!---------------------------------- -! - -!============ -variable_entry: deptho -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_floor_depth_below_geoid -units: m -cell_measures: area: areacello -long_name: Sea Floor Depth -comment: Ocean bathymetry. Reported here is the sea floor depth for present day. Reported as missing for land grid cells. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: deptho -type: real -valid_min: 0.0 -valid_max: 10000.0 -ok_min_mean_abs: 2000.0 -ok_max_mean_abs: 5000.0 -!---------------------------------- -! - -!============ -variable_entry: volcello -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: ocean_volume -units: m3 -long_name: Ocean Grid-Cell Volume -comment: grid-cell volume ca. 2000. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel -out_name: volcello -type: real -valid_min: 0.0 -valid_max: 1e+15 -ok_min_mean_abs: 0.0 -ok_max_mean_abs: 1e+15 -!---------------------------------- -! - -!============ -variable_entry: areacello -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cell_area -units: m2 -long_name: Ocean Grid-Cell Area -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: areacello -type: real -valid_min: 0.0 -valid_max: 1e+12 -ok_min_mean_abs: 0.0 -ok_max_mean_abs: 1e+12 -!---------------------------------- -! - -!============ -variable_entry: sftof -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: sea_area_fraction -units: % -cell_measures: area: areacello -long_name: Sea Area Fraction -comment: This is the area fraction at the ocean surface. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: sftof -type: real -valid_min: 0.0 -valid_max: 100.0 -!---------------------------------- -! - -!============ -variable_entry: basin -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: region -units: 1 -cell_measures: area: areacello -long_name: Region Selection Index -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: basin -type: integer -valid_min: 0.0 -valid_max: 10.0 -flag_values: 0 1 2 3 4 5 6 7 8 9 10 -flag_meanings: global_land southern_ocean atlantic_ocean pacific_ocean arctic_ocean indian_ocean mediterranean_sea black_sea hudson_bay baltic_sea red_sea - -!---------------------------------- -! - -!============ -variable_entry: hfgeou -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: upward_geothermal_heat_flux_at_sea_floor -units: W m-2 -cell_methods: area: mean where sea -cell_measures: area: areacello -long_name: Upward Geothermal Heat Flux at Sea Floor -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude -out_name: hfgeou -type: real -positive: up -!---------------------------------- -! - -!============ -variable_entry: thkcello -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: cell_thickness -units: m -cell_methods: time: mean -cell_measures: area: areacello volume: volcello -long_name: Ocean Model Cell Thickness -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude olevel -out_name: thkcello -type: real -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_grids b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_grids deleted file mode 100644 index 649bd1bfa1..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_grids +++ /dev/null @@ -1,287 +0,0 @@ -table_id: Table grids - - -cmor_version: 2.0 ! version of CMOR that can read this table -cf_version: 1.4 ! version of CF that output conforms to -project_id: CMIP5 ! project id -table_date: 11 April 2011 ! date this table was constructed - -missing_value: 1.e20 ! value used to indicate a missing value - ! in arrays output by netCDF as 32-bit IEEE - ! floating-point numbers (float or real) - -baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation -product: output - -required_global_attributes: creation_date tracking_id forcing model_id parent_experiment_id parent_experiment_rip branch_time contact institute_id ! space separated required global attribute - -forcings: N/A Nat Ant GHG SD SI SA TO SO Oz LU Sl Vl SS Ds BC MD OC AA - -expt_id_ok: '10- or 30-year run initialized in year XXXX' 'decadalXXXX' -expt_id_ok: 'volcano-free hindcast initialized in year XXXX' 'noVolcXXXX' -expt_id_ok: 'prediction with 2010 volcano' 'volcIn2010' -expt_id_ok: 'pre-industrial control' 'piControl' -expt_id_ok: 'historical' 'historical' -expt_id_ok: 'historical extension' 'historicalExt' -expt_id_ok: 'other historical forcing' 'historicalMisc' -expt_id_ok: 'mid-Holocene' 'midHolocene' -expt_id_ok: 'last glacial maximum' 'lgm' -expt_id_ok: 'last millennium' 'past1000' -expt_id_ok: 'RCP4.5' 'rcp45' -expt_id_ok: 'RCP8.5' 'rcp85' -expt_id_ok: 'RCP2.6' 'rcp26' -expt_id_ok: 'RCP6' 'rcp60' -expt_id_ok: 'ESM pre-industrial control' 'esmControl' -expt_id_ok: 'ESM historical' 'esmHistorical' -expt_id_ok: 'ESM RCP8.5' 'esmrcp85' -expt_id_ok: 'ESM fixed climate 1' 'esmFixClim1' -expt_id_ok: 'ESM fixed climate 2' 'esmFixClim2' -expt_id_ok: 'ESM feedback 1' 'esmFdbk1' -expt_id_ok: 'ESM feedback 2' 'esmFdbk2' -expt_id_ok: '1 percent per year CO2' '1pctCO2' -expt_id_ok: 'abrupt 4XCO2' 'abrupt4xCO2' -expt_id_ok: 'natural-only' 'historicalNat' -expt_id_ok: 'GHG-only' 'historicalGHG' -expt_id_ok: 'AMIP' 'amip' -expt_id_ok: '2030 time-slice' 'sst2030' -expt_id_ok: 'control SST climatology' 'sstClim' -expt_id_ok: 'CO2 forcing' 'sstClim4xCO2' -expt_id_ok: 'all aerosol forcing' 'sstClimAerosol' -expt_id_ok: 'sulfate aerosol forcing' 'sstClimSulfate' -expt_id_ok: '4xCO2 AMIP' 'amip4xCO2' -expt_id_ok: 'AMIP plus patterned anomaly' 'amipFuture' -expt_id_ok: 'aqua planet control' 'aquaControl' -expt_id_ok: '4xCO2 aqua planet' 'aqua4xCO2' -expt_id_ok: 'aqua planet plus 4K anomaly' 'aqua4K' -expt_id_ok: 'AMIP plus 4K anomaly' 'amip4K' - - -! -! -!============ -mapping_entry: sample_user_mapping -!============ -parameter: false_easting -parameter: false_northing -coordinates: rlon rlat -! -!============ -axis_entry: i_index -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -out_name: i -units: 1 -long_name: cell index along first dimension -type: integer -!---------------------------------- -! -! -!============ -axis_entry: j_index -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -out_name: j -units: 1 -long_name: cell index along second dimension -type: integer -!---------------------------------- -! -! -!============ -axis_entry: k_index -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -out_name: k -units: 1 -long_name: cell index along third dimension -type: integer -!---------------------------------- -! -!============ -axis_entry: l_index -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -out_name: l -units: 1 -long_name: cell index along fourth dimension -type: integer -!---------------------------------- -! -!============ -axis_entry: m_index -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -out_name: m -units: 1 -long_name: cell index along fifth dimension -type: integer -!---------------------------------- -! -!============ -axis_entry: x -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: projection_x_coordinate -units: m -long_name: x coordinate of projection -axis: X -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -! -!============ -axis_entry: y -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: projection_y_coordinate -units: m -long_name: y coordinate of projection -axis: Y -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -! -!============ -axis_entry: grid_latitude -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: grid_latitude -units: degrees -long_name: latitude in rotated pole grid -axis: Y -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: rlat -! -!============ -axis_entry: grid_longitude -!============ -! -!------------ -! -! Axis attributes: -!---------------------------------- -standard_name: grid_longitude -units: degrees -long_name: longitude in rotated pole grid -axis: X -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: rlon -! -!============ -axis_entry: vertices -!============ -! -!------------ -! -!============ -variable_entry: longitude -!============ -!---------------------------------- -standard_name: longitude -units: degrees_east -long_name: longitude coordinate -dimensions: longitude latitude -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0. ! CMOR will add n*360 to input values - ! (where n is an integer) to ensure - ! longitudes are in proper range. -valid_max: 360. ! see above comment. -!---------------------------------- -! -! -!============ -variable_entry: latitude -!============ -!---------------------------------- -standard_name: latitude -units: degrees_north -long_name: latitude coordinate -dimensions: longitude latitude -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90. -valid_max: 90. -!---------------------------------- -! -! -!============ -variable_entry: vertices_longitude -!============ -!---------------------------------- -units: degrees_east -dimensions: vertices longitude latitude -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lon_vertices -valid_min: 0. ! CMOR will add n*360 to input values - ! (where n is an integer) to ensure - ! longitudes are in proper range. -valid_max: 360. ! see above comment. -!---------------------------------- -! -! -!============ -variable_entry: vertices_latitude -!============ -!---------------------------------- -units: degrees_north -dimensions: vertices longitude latitude -!---------------------------------- -! -! Additional axis information: -!---------------------------------- -out_name: lat_vertices -valid_min: -90. -valid_max: 90. -!---------------------------------- -! -! diff --git a/esmvaltool/cmor/tables/cmip5/Tables/md5s b/esmvaltool/cmor/tables/cmip5/Tables/md5s deleted file mode 100644 index 9016fe4984..0000000000 --- a/esmvaltool/cmor/tables/cmip5/Tables/md5s +++ /dev/null @@ -1 +0,0 @@ -{'CMIP5': {'cfSites': {'11 May 2010': '80506c07ed63befd33f9f727b78bde55', '17 September 2010': '7876f3c8b9f817348f45af63226af05e', '19 October 2010': '80264b0dd9e203ce916542fa15c6d272', '24 May 2010': 'bf9e8c5576fd70aa92d11fe3d4317e9b', '28 March 2011': 'f7c44d2b2fe147eef790f3a294a137b9', '12 May 2010': '0ebecf85ba30b2223360a29ece880be4', '26 July 2011': '76cc5a96989ebb70010fcab14f04de43', '09 June 2010': '569fd7b653d4aa55cd229227caa67822', '22 March 2010': 'fe60fd7cfbaccc08c75a91dc67a68b99', '15 November 2010': '635f0e6676162c743b15d824033afb04', '23 August 2010': '4a2774cbbde4d5f69c9541e6b6248d00', '01 June 2010': '2eb2e9a6a1db0e18dac73e04e0dfbe47', '02 April 2010': '650c6032a9cbeedb142a41b923e85f22', '11 April 2011': 'd4bd92f600f09b1599927f929db7fe5d', '26 April 2010': '6fe91d86689e2fa1faa2fe7164876ace', '17 July 2013': '0c3bd83f7fb976f5861fee1e40460ab2', '01 July 2010': '96628b3875e74a8a515d0a1e500710d8', '11 November 2010': '6be9b509f02f6118bfd2b0062f27df9a', '14 September 2010': 'e7d36b34ba0a0142e8c24a0c43c6b15a', '12 March 2010': '192f07ba869af3794494ea0dc3a72a54', '29 April 2010': '27c4a93f80a4282ef69f23391c699f01', '22 March 2012': 'b8ab2ebe43833b25972010a3c1fe6ba6', '31 January 2011': 'c9db4a770f497b3f6f236135b0921377', '07 July 2010': 'e0d03c9f9ab6151b27e5af641a6ac00d', '25 October 2010': 'ff381193c4e025bda3c315dc7018ea08', '12 January 2012': 'f58ba903857b8191662180a8128ca975', '30 November 2010': '52a050381cb8b1aae0169e8c09b402cf', '06 January 2011': '24b9cf8db6e0e01f0a2f25d40bdfa706', '02 August 2010': '2c97faef852f74429882098d038c7b96', '27 April 2011': 'a150c803ba8e2ec673209e9f613ca78d', '08 July 2010': '3d8a12a9970f8c0c8a0ff108451df3e2', '12 November 2010': '9c695f150db16c9cade9ef273014b4bc', '26 January 2011': 'a8ccf7099e95a4352a6fdd5364e390bc', '10 June 2010': '7aeea92e38f9ca3fccf5e92f9cbbbeee', '28 May 2010': '7eea57df7c12c4d8524da28aa78fbe94', '22 October 2010': '74a468c3cce5927e1c2ce35238df2146'}, 'cf3hr': {'11 May 2010': 'a59ca1270ff3036d417a7f2217598266', '25 October 2010': '0411f9fce5f7d46c64135ce1ac2b3f6a', '19 October 2010': '8b5c2f197b70a103393838ebd4075c6c', '24 May 2010': '61693a46b06627dcd6ffe8ceff288ef0', '28 March 2011': '0a6cd0a3490ef913de1603e050028d9a', '09 June 2010': '8aad2bf8bea26504a0e247c1737ea4a0', '26 July 2011': '9bddc0ef2f3853878612f91e4b88a6e0', '12 May 2010': '905f7ccdd5a31cb71e2a6e3b339cd0e1', '31 January 2011': 'e4778a751fab1c990cdde28a1692e6f6', '23 August 2010': '5ccea11a7c3d165c76df3d9eede395d0', '01 June 2010': '619aa3c816bc25a53335589698e64968', '02 April 2010': '3394448440a54597570cc877dcbaca7d', '11 April 2011': 'c39107ec4994d0b31572fc740afe1879', '26 April 2010': 'b0d0de6ac8d5ef997347610ea6a51d6d', '01 July 2010': 'c0fe9e92115d75292f5009e13761cb4b', '06 January 2011': '3f683b01939f2813f355252e984e6ae8', '11 November 2010': '35a8e11c7f57f3128517d21d177a8204', '14 September 2010': '1669750f2919391d83f949de05a1bfcd', '12 March 2010': 'ae9a82f01824d9012aa0657cd7896a9a', '17 July 2013': '4fe020dcfff962e5f57f4b269a155b59', '22 March 2012': '2b1f0a7bd8dbcc045731096548540bc8', '22 March 2010': '712cd14ab6a28116da828ff555e00c4d', '07 July 2010': '27949d438b4c04d481ff1d9790cff60f', '17 September 2010': 'b71b239f5fac1e0588d0b15bc4d34ed8', '12 January 2012': '61ea8d1c6dccd52fb03cb4a8c8e4164b', '30 November 2010': 'c415aa249713c89ca88d3ca8dd143566', '29 April 2010': 'fdc4fb38dd4cfb7f066329ed43a9e805', '02 August 2010': '9a94c257c8aebe1b834631c067728357', '27 April 2011': 'b445fcb96291160f776669301c94153d', '08 July 2010': 'a52efa3fc3fa9464cf3378e936320f55', '12 November 2010': '7aa138929397042fe2db914c2e9c0d22', '26 January 2011': 'f356796dc38b7aa7533426a4d1335067', '10 June 2010': 'f70ed019bca338b2d73296e1924f3df4', '28 May 2010': 'f6e966842fed498f0921365983918119', '22 October 2010': '6b3fd20804f1306fc7413997cd478a23'}, '3hr': {'11 May 2010': '9f293a2c84001fbaa5aef7357ea2828c', '25 October 2010': '021eb679fe02596b994f03a8b201a11c', '19 October 2010': '8295c191b254ca0795e974d7f140b72b', '24 May 2010': 'af8b1d3d63376942a55d779d0fb9f504', '28 March 2011': '56599fcaeafb7e8496723e994b913888', '09 June 2010': '27eb969c4cd719b0622bc828d6b9a445', '26 July 2011': '13ce66e026f3c89a1df978a80552114e', '12 May 2010': 'c8ebc04b6487634d9719f4974265fd64', '31 January 2011': 'f92c4fdcc9ed1cb63c48f839379e34a5', '23 August 2010': 'f211766ca746969943a9fe6b4bd45694', '01 June 2010': 'acf6e6c17995a9b0f4d7ecd93f1ffb35', '02 April 2010': '1e9df4873834764950e7cdd1f545541e', '11 April 2011': '07c5faef1d2f64f2bfa5d41b0fc678cc', '26 April 2010': 'dfae3d259acdbe58fd8b4e3f535f02f1', '01 July 2010': '9ff56e4d1df9199fafe41bbe78c5502c', '06 January 2011': '2ba744b01bebca11b2e0417581a433e8', '11 November 2010': 'c4fecc3b0c095cf96b1f7d5a3f6979c4', '14 September 2010': '0dbd5efb8e7cb9b30f06c0ba15c1ee2b', '12 March 2010': '8249007b0a473d19554b47efaff87a39', '17 July 2013': '09fc6805844997a47b81e14e64eed4d3', '22 March 2012': '592c2053b1820bab4b6995650094789a', '22 March 2010': '7e3989c1ac5dd2455892cd25b8fd2a1a', '07 July 2010': 'f07a287d8f310a7eb22b5753ec9c192b', '17 September 2010': '0f0060d0f895564e19189bfea3fb9a8a', '12 January 2012': '7eb57b250935fac136ab277443a9144f', '30 November 2010': 'f3c8c88995990f639ba5898e97d106fd', '29 April 2010': '7ca1dc5009cca6779717dd557e5edc27', '02 August 2010': '110a779349596be375d3b7fecabd338b', '27 April 2011': '39e893709ceee185aecd8e45fdce32f2', '08 July 2010': '82f5befb7bd5f11945e13f5c774ca0b8', '12 November 2010': '3d0f50f5c03069b9f45573283964f1d4', '26 January 2011': '0428e8c76592261adfc0ba1761637b5c', '10 June 2010': 'e53cdb81ec9f7088198133ee086fc966', '28 May 2010': 'f4f7421bc9b4563952bad7edd9e787b0', '22 October 2010': '505fd7eeeedf85127677d8407b23980d'}, 'cfOff': {'11 May 2010': 'aee3517609f9c24b48d210742be22bfc', '25 October 2010': 'b9c19aae50935a3999ac38838ca64546', '19 October 2010': '471f86bd5adea0a5547086e61f7f9228', '24 May 2010': '5d3f4b6daef653409aac76439c534778', '28 March 2011': 'b6bf5cbc84b415fcb17d676aefb31fd4', '09 June 2010': 'c77f28ff0fe2252d8a20169ca802a8f4', '26 July 2011': '7c82288b193cfac9b94f5503f27a3609', '12 May 2010': '3079982b07452f20df2a26f924065f32', '31 January 2011': '2b1166728ba414bed031b0f73c0e093a', '23 August 2010': '00dcdaad4b4040b70ce901f28a6f43b8', '01 June 2010': '0db2ab6935aad13a6227a20e63c6c486', '02 April 2010': '23a2c16a166a9b9bc6d51769375becd2', '11 April 2011': 'f1e7cfbcd61fd57cde19b7fec2bbedce', '26 April 2010': '8ef2dee6b9ac93c654d67562d01316f9', '01 July 2010': '77b45b1f470d649d696953a53c9afb2e', '06 January 2011': '961c6d5204a46be6c497618dce9e1e30', '11 November 2010': '3aa27d69912d316e9423102ab2952992', '14 September 2010': '18b0ee56491ce281ace46ec2cd4c0f8c', '12 March 2010': 'a573ebe565340a61697e7deced82b9c7', '17 July 2013': 'f0cde494dcf95c6ed427a6b6375f8608', '22 March 2012': 'af07a0cf2e935fcbeeef2683e458c20d', '22 March 2010': '4f62827e7661dd79a7bf4df1068b45b8', '07 July 2010': '1725cf6068006e9d7a8e77ffe60510af', '17 September 2010': 'b933cdc8d9cf48c16e4f3216562a9f8c', '12 January 2012': '330de8d8d53517a651d730f978dd645b', '30 November 2010': '6f096714493a821b5d015cc5fd569584', '29 April 2010': 'b022d0d8359dfd8549dcecb4a440459b', '02 August 2010': 'e8819e0496e8210bb00f72f4f97bd58a', '27 April 2011': '6fd2e599aaa4fa5170857b37ce96cc6b', '08 July 2010': '1d97433ef9df7b3a452a2fbbeb6b3641', '12 November 2010': '3f738ba57136d65b96fdc78db9e20c0e', '26 January 2011': '6b5f202e8287d3d173f2e69a76ee4e9d', '10 June 2010': '4311ce8f83ceec96ed2f3ec7a6fad034', '28 May 2010': 'dfff9394b235db3fa52960418a7286f7', '22 October 2010': '9046a88d2097af032a322fa63cdb0d86'}, 'Amon': {'11 May 2010': 'e4e4554008c52abcd8fcea09f7f93f8d', '25 October 2010': 'e40cd150c9c2a3a06ded4ee92e84b461', '19 October 2010': '81f2d61a443bfcc780c9ec2dd27fb4a1', '24 May 2010': 'abf08fbc54eeceb061e588f017462dc4', '28 March 2011': '0afcb2d7f190557bfb8b993eeabc39c9', '09 June 2010': '382b999448888cee3660598699a724c3', '26 July 2011': '976b7fd1d9e1be31dddd28f5dc79b7a1', '12 May 2010': '12e195f2fdc87c907b38b38e15851337', '31 January 2011': '53b766a395ac41696af40aab76a49ae5', '23 August 2010': '045d668827988440ad3ba59116110304', '01 June 2010': '34d587239da7376dd1f3e366a50cf500', '02 April 2010': 'b6f8bea7bf003b2bcd493ad037bf7f24', '11 April 2011': '1cfdc7322cf2f4a32614826fab42c1ab', '26 April 2010': '9f677ce2bf23f4e1d622feef2ca3f434', '01 July 2010': '9b2907adc97564335ea86d81ae46df81', '06 January 2011': '085e3f5c098d203b2fd5d296226f41b3', '11 November 2010': 'dca41799988c6420df70af862b30f492', '14 September 2010': '6f10603fbd99e766f4c6c3ac3152f294', '12 March 2010': 'b0b2c52562fa80a556684fb6195ab135', '17 July 2013': '97d3bd8711883aca8d9e383a7e664afa', '22 March 2012': 'c7ff6fab815d2450728885f7e379f6b2', '22 March 2010': '95e9123f238c2b42fe8cf8d69e35d139', '07 July 2010': '2447c62949c32aade1316c738861f76e', '17 September 2010': '2bcb9818a4f88c67db40ebf7d5df7ad1', '12 January 2012': '95a8ac1233784682d425311ca986756b', '30 November 2010': '525cb891c7d82657304f61cb0d785a19', '29 April 2010': 'a57cdc538f256caa6815604210dc22b3', '02 August 2010': '0b3faa1f662bbee372fe931cb6d1b943', '27 April 2011': 'a5a1c518f52ae340313ba0aada03f862', '08 July 2010': '0f29d14a72ad86a2466d830072657eac', '12 November 2010': '6e535ddfacb41fb7a252f4862fdc5766', '26 January 2011': 'f518a4407dd3ee205d7cabfb6891c7c1', '10 June 2010': '5d6844585631dd2c048b26e8c27a36ec', '28 May 2010': '0dfec5986fd66a2f70900ae5266c4a87', '22 October 2010': '3754c1ca774f699c4d7f7f24fc4c5d0d'}, 'cfMon': {'11 May 2010': 'dbefad89e3fa487ade7986825d6fe7d4', '25 October 2010': '1e157f1f973aaccb1b977d21d70ae051', '19 October 2010': 'c08d10d30c76c5e6df184d820430db4e', '24 May 2010': '1224cca03c126a0b1207f86b1ccbbd5e', '28 March 2011': '90645c050f8bbac5e5413e119b89daad', '09 June 2010': 'e1db34fcdd19c785857fc7626225862e', '26 July 2011': '22fb4760ba7f5845955919d42bf45731', '12 May 2010': '03de0e6784d9468d5c3b21a708a410bc', '31 January 2011': '17a583b65f38f7208006836881ecd5c6', '23 August 2010': '4163799d7e6ca9ed6702f92bceb4a83f', '01 June 2010': 'be7cc61031cc4576cb79127b3341a599', '02 April 2010': '9fcc3f2ebe69dbc949642ecb88e1194d', '11 April 2011': '521b84c0e9e74ab41a36bc59fe6d64e7', '26 April 2010': '93d04dd9aa241cc63a84ff5a9d8c61b0', '01 July 2010': '80c97d7c98abc2f485bb402dd3b7e915', '06 January 2011': '568dd0f54df98a4177c4a5939531c8e9', '11 November 2010': 'eecec4e0c8934eecefa5618e92a4912a', '14 September 2010': '5e2cffb7cc4dbac6079b751a6517b870', '12 March 2010': '4e734968f407cb120679ae76cbabd19f', '17 July 2013': 'bcd907c6aed0db845ee1cd0f79d3324b', '22 March 2012': 'f5df5c23c7fa35287e070beaa07b08fe', '22 March 2010': '27a6c2c3f3eefd64643850e42b29ae95', '07 July 2010': 'ddb3a708296e483988d1fbc98becfc4c', '17 September 2010': '01afab053639ea0c2460f0b62fb84692', '12 January 2012': 'c4968da7d23d1e404cc4605109e953b9', '30 November 2010': '65b7963f47c0b3f0120f2a9778c8eac8', '29 April 2010': '1e18a3180c49a801bb0a3763473d61d0', '02 August 2010': '56a4d049c4461e85febd902424950b81', '27 April 2011': '9dbcef024b332923d92f3d4ed1bb88f7', '08 July 2010': 'f348eaec1ca7e1bdce8f3fa41c4e4304', '12 November 2010': '1d41e779e2782143a255b88ebcecafe8', '26 January 2011': 'd69493ba9f03dee6dca7087b23b00cd6', '10 June 2010': '0e6868e4fe53670680b3d896e197f58f', '28 May 2010': 'c37adda1fb6d9fd8a88b7854f3318ab0', '22 October 2010': 'a4f6136f20c75e957b49a39dbd390e4a'}, '6hrLev': {'11 May 2010': '563f7b4b9db1587890c35a6feb4d9a83', '25 October 2010': '9222efc9a5a8679d8859aab6cd440b05', '19 October 2010': 'e5ae7a9855e4413deea8289e31aaaa21', '24 May 2010': '7a13cf3cf200cff92cbb106c3b0a95db', '28 March 2011': '581a244c03c6226a7b266fdcf0d501b7', '09 June 2010': '2656efcc5cf61affffb263effde97ede', '26 July 2011': 'dbe016edfd11d267710827810cf47b1d', '12 May 2010': '1e643249424bd620052d216950785b83', '31 January 2011': '028ca192fe560aa8f3f610dda5fdd7bc', '23 August 2010': '235242a4410e9b772307262dc755a17b', '01 June 2010': '0567c616eb71663592476f1f10b6a51d', '02 April 2010': 'd1b5ce14b3978b8665d576c2a6abc849', '11 April 2011': '583cb2bb254a9bc7ec5a7bb9cf07ce44', '26 April 2010': 'f4ed7d449df633b1c4924aa17a0f033f', '01 July 2010': 'edd7380c5c773cb9e72f37e76ea1c410', '06 January 2011': '55769dd1af97f1b1f79b595ba7cea34f', '11 November 2010': 'e77e0be5bccb0fd442b55a28cfad40dd', '14 September 2010': '964e5834724585c32ccf0290f64d7e18', '12 March 2010': '814a04ab21f7928f22728ca338e6a9bf', '17 July 2013': 'ac564db88efb927ad44351aaad6c2571', '22 March 2012': 'c1ae9c4865bd9ddac19664f4a3d0410d', '22 March 2010': '1b4de02ae5ce7f94e33a344101ebc85c', '07 July 2010': '19b8d901370b7beff6ec1ba2ba5f00f6', '17 September 2010': '93f88b74ffd53270c9cab6f6b0790fd9', '12 January 2012': '54e40e46bec20b1806d1f9da1742e5d8', '30 November 2010': '6c116423753a0c320c0cf3e31acd51b9', '29 April 2010': 'c71c8d10849d8a3759a2883858a0e723', '02 August 2010': '3a2153ad4b60e08ec681d434df327d9f', '27 April 2011': 'cf08abb8c40d2c5d1c95297b569db0e6', '08 July 2010': '6cc537fbd876969a1ceb125007ab7201', '12 November 2010': 'a6a60e3da21ccf69878be490a922015a', '26 January 2011': 'f028b6b367a03ddc18069ebbfff2231f', '10 June 2010': '5d2d178a4887ca16583c4500fcc2cbbe', '28 May 2010': '5649b5e565aaf90725e52923473237c4', '22 October 2010': '911f4c35b73178e7efb493cd6b96a415'}, 'fx': {'11 May 2010': 'ceee73b67736e0209632cbf0565f3016', '25 October 2010': 'd45f6af57ce4d1698fca7b5843cb9cbf', '19 October 2010': '4c78280a37dcd13557f43dd9cc70de91', '24 May 2010': 'bd6e3e0a5c82a46d84e40acee850a1fb', '28 March 2011': 'c046d0887aeb2bb7ed34d63897d7de8f', '09 June 2010': '14bda3afa90252a7e380250bb89ed909', '26 July 2011': '491518982c8d8b607a58ba740689ea09', '12 May 2010': '7a982ebca5c07d348561e23b056b678c', '31 January 2011': 'dd5903eb1b6f5aa24d6d6dd1679cef9d', '23 August 2010': '8fe1e359662d7edb809672e8ecd20026', '01 June 2010': 'eeedbe523773ea8145bd98134ce63cd7', '02 April 2010': 'ddb926db39ca9dff4b15cd19c7bffe68', '11 April 2011': '36427dbe3a79c7b7000fa59642973ea6', '26 April 2010': '36573de4231a9ab2e1e6dd06fdc332b3', '01 July 2010': 'a7ac4b51593af19085d34f3ddf41b323', '06 January 2011': '514d549dc247532e9dbc96e31906b6e7', '11 November 2010': '1899417a056ed59f2a5dbb05b7b782f7', '14 September 2010': '0ab4232d370f07b82b35568c03a0b4a4', '12 March 2010': '71beb3105440a206cccee47d4ca11e02', '17 July 2013': '3b0d4cb42c006e5b8c33b07e6b313255', '22 March 2012': 'ac5e2c418e5380cabbd90eb83fa6f8c8', '22 March 2010': '23d6a606a08f77d5e0efa2f553792184', '07 July 2010': '6be37421f818e25809dc8a217808ddf9', '17 September 2010': '6e811c03861e5ac05887000cb22824bb', '12 January 2012': '31f8e82ffa2e23ae0de67d9bcd483d93', '30 November 2010': '06cbf82706b95557ab73b5e815db061b', '29 April 2010': 'aed062cea93bf59cea4c089e45f88ea6', '02 August 2010': 'c7b7caf02dbad550bb6a29dffb520758', '27 April 2011': '10d4516edb0202bf85e1ae7cf0b86691', '08 July 2010': '2004b4a855b365fc68fac3f089c5707e', '12 November 2010': 'a3fb4d54ac41c92268e3d1d5bb59e4cf', '26 January 2011': '85fb18111a7f2909da2f8932b7628164', '10 June 2010': '6ace20d389ab3d1fbf7d68c8ddce4c8a', '28 May 2010': '064b1fa83c0626981f62638cad0cd6b6', '22 October 2010': '52e52e2710c778c0ab547c98192dce74'}, 'Omon': {'11 May 2010': '2d0ccdf5de652f1580245657b2cb6e2f', '17 September 2010': '7f1e239986a653271514846dec436895', '19 October 2010': '1a1bab09ac4772abec59bb3f364b2046', '24 May 2010': '9fd8559b31592d9a516bb7cafdf55105', '28 March 2011': 'd71862c6fdafa47720d8a402f39c8864', '12 May 2010': '5bbbf780fbcc32ecae95ec75e5925c2e', '26 July 2011': '25bb94a0408beca44c0f5b601258a94e', '09 June 2010': '9c3676570b53a7eacf0da4ccb132e1e2', '31 January 2011': 'd2d6beec2b8fea5bbed33920a6e08bbe', '28 May 2010': '0dcb11bfa3ed2e44be7dbc4a65b04b34', '23 August 2010': '32568fef98f13d8e600460194429ea64', '01 June 2010': 'c45d87b668792d8b44753f9bd52980ab', '02 April 2010': '28927d3487d4bb87c77b50d495980d21', '11 April 2011': 'eaf2e961aec11e7c91bc5e7a112703ba', '01 July 2010': 'ce70d7a18026d46eaac72ab9ed627cba', '11 November 2010': '8c7016c14989d5917aab04b07185f72f', '14 September 2010': '103bf8b76c901eb92e396008ae4e130b', '12 March 2010': '32e7e5c693ba7fa59697ce93603cafee', '17 July 2013': 'bb12d1e7a6569b8f76441393defe6986', '22 March 2012': '06bfa836151fd3ddc4a5bb32763568f9', '22 March 2010': '8d88ed317135c24d6a01f056ac09ca2f', '07 July 2010': '30b0147d4c08f78266a680e4dba5f05b', '25 October 2010': '857563b97888e34bf4efb95a31201037', '12 January 2012': 'c17931a1bad4e38015cbd162d50c56e5', '30 November 2010': 'fca219c0b072504597d99baa42649e74', '06 January 2011': '14b99a9dc80e3563f771f91e03ca1f4f', '02 August 2010': 'e05ca2ab73fa7930953fa1b5ffb08239', '27 April 2011': '340eddd4fd838d90fa9ffe1345ecbd73', '08 July 2010': '141295f2779d3084924ae119f2fe4bbc', '12 November 2010': '21c7ac8643c73502f98cf73d716ab773', '26 January 2011': '72be8ec9fdc47984987a4d051070143e', '10 June 2010': '5d379ef8f72f147aa6a4d76d8e25d299', '17 November 2010': '8c07c4b31d2d11d879a9723a9e781e4a', '22 October 2010': '459b64863ea0cddb1a3ff6bd38575ffb'}, 'Oyr': {'11 May 2010': '527d920054556b540145ca03f4929ffd', '25 October 2010': '7dac94270ddeebaffde40607e94a50a9', '19 October 2010': '6ab6fc52c1822e96b436ce1c9384255c', '24 May 2010': 'f814cbfe0580345a85c091ef1b54bdaf', '28 March 2011': 'f612fdbbb78488dfa94e4308c5ddfde2', '09 June 2010': 'ceaef2c58732dd8093737b23e2f9f99d', '26 July 2011': '4a4fa20f3a3627af555d7528b7e23347', '12 May 2010': 'b8fbd4fc69d1742f5eff475df79f192e', '31 January 2011': 'e5b82c4a1077c44b150ff43f0c5fdb34', '23 August 2010': '88a495d04230e0093e563463a31ca931', '01 June 2010': 'bb68fb7b1f66ca226cd108ccf20047fa', '02 April 2010': 'e828a0a6190fbdea02bde73cd4db994c', '11 April 2011': '824c95535c8e3f74f440cb97120988f0', '26 April 2010': 'e26c6428fb49ad822e20ee1a7f4c3ac4', '01 July 2010': 'b774f001ee806c9eb932698faf4e15bf', '06 January 2011': 'de99a58102bd40e65b71f6ba739334b0', '11 November 2010': 'cc9ca9c8af0ed8d03d30214564816136', '14 September 2010': '06da4397beb9d242952a57b778fa3bf1', '12 March 2010': '9718b877ea7f6374e7bb7561d01af98b', '17 July 2013': 'e210696796104a790f072bb3718780a1', '22 March 2012': '603a712b9349d896b5459e44322908b3', '22 March 2010': '1ad84e2b17655de2c4ceb95e05cc1e35', '07 July 2010': '5225d811469f0ed452c307eacb71e0c8', '17 September 2010': '62867ce71757c9ddd57389aee731a17b', '12 January 2012': 'a072b2c38f3787d5e6cdf938e23b9658', '30 November 2010': '03db314a71c33cc16ed91f4817170afc', '29 April 2010': '1651b65e299b6aaabef92845dea6692a', '02 August 2010': '4e67cd5378788aee1de2384725a41a89', '27 April 2011': 'a816306750f284585dc77210f193f7bb', '08 July 2010': '8e0c140b20425419299e555a90ab15bd', '12 November 2010': '6f645331f36ab31e48e34d413c782f57', '26 January 2011': 'eeb4ea17162d4c127238678c72573374', '10 June 2010': '73653956328160bb946f229189fbce5f', '28 May 2010': 'e2a6acf84cdec51f0c542111d66e7d9e', '22 October 2010': '18a745403ae5095d532db2c2b1905d32'}, 'grids': {'12 March 2010': 'd2a03433140cb366e0009d5e8b064e34', '19 October 2010': 'ca01dcb5f14d9a7286135fb740550ee2', '12 November 2010': 'bf68c67ba77793dadceef7d115c7ffe5', '11 April 2011': '42e3515b68890b8cb3121c78239fa9b5', '11 August 2010': '3dddd7959e11a27723ec5339306d1120', '20 September 2010': '35ff89deab99c5469dcc89ebc4b4f442', '28 March 2012': '560c2e182080b0940a45127a6570eda0', '19 September 2010': '426154be8fcbdc0c1d6c188858131577'}, 'da': {'11 March 2010': '6e0a7e19df94506d06091b43082677dc'}, 'cfDay': {'11 May 2010': '378455467d0ecbaabd8d93db9855bd1f', '25 October 2010': 'e230b25242e9f7a6aa2acfa9eb195ab8', '19 October 2010': 'd87662b52af1386bd95c7651dc97c237', '24 May 2010': 'eed1d3cda3e535039bb6e0c5f9b9d6c3', '28 March 2011': '5f1d5ffff5512cc4ba5ace386ba17b0e', '09 June 2010': '78bcd978d11b5b2435b4bc85969ba9b3', '26 July 2011': '15b8dceaf565629fdf801247cdfe3200', '12 May 2010': '2bbfcf8d253f38b550e68b0d7b242a13', '31 January 2011': 'f16985ac83c91f13f72a654fb8ba186f', '23 August 2010': '20a096eaaa3c5c140c8dde15bd8f8e76', '01 June 2010': '04af05cd53dabdc5052a962bf28c1607', '02 April 2010': 'f0eab40e698a4b26949da066639897b1', '11 April 2011': '5b428ba8235636d1c2424025b9d1beca', '26 April 2010': '3771e19a505af7b1e007eac78670f4ab', '01 July 2010': '37843ebcfa1862c12ee904e355e86cbc', '06 January 2011': '4ba6967e3ae7fb4fa48f098cd852562d', '11 November 2010': '007645be3e4788db8bb05397c174b1ba', '14 September 2010': '20e6c21202c6cc4fcbc6b85ef5688998', '12 March 2010': '6ab7019daf928d9f1ceda58323b1749e', '17 July 2013': '9473d972c59e82c2d153125fa8485af9', '22 March 2012': '1a384a56b92ab4fe4499c489a1044653', '22 March 2010': 'cfe9075ba940032f9d18707d10bd4d1f', '07 July 2010': '595ca1266fa7ac469bd54d810d7d2d5b', '17 September 2010': 'f86ba023f81f75f89faaf1c8c0a45a81', '12 January 2012': '1a526bb689806f1754130e313dcd1bd9', '30 November 2010': '8ca80f3746d7324f8eece2bea63355d4', '29 April 2010': 'f92a346a102778aa4caaf3804bec2c7e', '02 August 2010': 'e8cc7c2d334a49efbe5c044b74dd7d6c', '27 April 2011': 'cfad94c06919b669836005cf56cbb78f', '08 July 2010': 'c2dc1c9036c53971505b892fa3a3764e', '12 November 2010': '35a022f3d4c503ad7748557a3814d64c', '26 January 2011': '86d15743738df4da68ed2605521d041a', '10 June 2010': '872bcd0c18771af5672bf35454019e86', '28 May 2010': '49b6cd8183be7acaf580984961d0bffc', '22 October 2010': '0f54a4f4776262946481393eb211cac0'}, '6hrPlev': {'11 May 2010': 'b8d13f7aaa2296af505a68dc6608c665', '25 October 2010': '8e14c5beaeea6185824908626ca399cc', '19 October 2010': '8359621ab81242b8b891194e7b1a8006', '24 May 2010': '867a083d0dd5807e2bbd057c1f84e572', '28 March 2011': 'a6f5845a8465f9a3193614237a247f3e', '09 June 2010': 'c8a7205bc95fda5d97160a4727cb5433', '26 July 2011': 'cce5a250b13747b7af5d1cddcc61873d', '12 May 2010': '3d79c767038805af92b29f0d35ed3181', '31 January 2011': '2486865d573b5e4add809e0c04e17345', '23 August 2010': 'a0cf4faa32d6d70f32d5691d1df835e2', '01 June 2010': '9593abe01052c91403fcda79896635e1', '02 April 2010': '2df2ba0c09cc65a054ca529812e96ebc', '11 April 2011': '0fa8c0083b4da4cef7570b2f8384d0df', '26 April 2010': 'e73240fb5cc142534cfb1dfd68a2c581', '01 July 2010': '7d2d2d81c44c69bd1b752b1e5853da51', '06 January 2011': '396e106ee90e4a2df14465e90a6f8cb3', '11 November 2010': '0b0cde80deef0a0149ee87334bc9b915', '14 September 2010': 'c8249d3d4a79b79444c4b31f9d1f880f', '12 March 2010': 'd9cd927f52f494b19cbae19ab80812d9', '17 July 2013': '0fabcdcfc2b9485d07289a2e7c4d5e0e', '22 March 2012': 'b00113e7ed9a58eda8fbe3f5a7ce789f', '22 March 2010': 'e01082cc0a5a732cc62f612abb9b9985', '07 July 2010': 'fd2e3a860e08582a7d091b8d76780b51', '17 September 2010': 'fb420cd690a8dce494fe881d55dd12d3', '12 January 2012': '13256f969b96b7fefa7af694084eaa4f', '30 November 2010': '8b07705b7830b417c025e58d2d147699', '29 April 2010': '340e7ecdf04ee54bc4d41a8ea57b2793', '02 August 2010': '97a7eb1583bf9a816b2bd7d84b762f9a', '27 April 2011': '00b465eee32d6d11a7e57f5cc8e067f8', '08 July 2010': 'a49f3b6ecde9e2dcb76f9b99d770a4a8', '12 November 2010': 'b8a45259fa23a628ff56429905b29431', '26 January 2011': 'aedc18aa6a011480b128b9370335955a', '10 June 2010': 'b0a75f15ace1c8a7dea30ecf4644b202', '28 May 2010': 'c1644081c2dbc4d8dfd85f1e46ea9608', '22 October 2010': 'f7aefdc7bcab370905d2ba572576c147'}, 'Lmon': {'11 May 2010': '3bdcaebc6b08a7cf84415feeebed8b9c', '25 October 2010': '4bcce5e25736cea8e87a0cf8df09d34b', '19 October 2010': '8708d6b811ce32c5ac3e719701b14ce4', '24 May 2010': '2d93d785985c4d6c29da2f33368f61e5', '28 March 2011': '7e3724f7ab2d03605f40c6a3b60612fc', '09 June 2010': '24235e017983479779ba51d8187fe0d9', '26 July 2011': '07f3ab0937f49f401723ad1417906100', '12 May 2010': 'cde39468cd5a720342c94322249adf63', '31 January 2011': 'a84ae296f75bb85ff61668fac8fcf090', '23 August 2010': '3adb2b84ff9e8210239a898e53f5389e', '01 June 2010': 'c48b42eebfd300346db30ce97f70737a', '02 April 2010': 'ac27db67ef68191b5d710de35ca2d6b1', '11 April 2011': '738e0f61cb381d2b73d949ee76b893de', '26 April 2010': '6ca0347f544e3cbdb6f036f7f78b1669', '01 July 2010': 'cb7918fa6a401588ea5b194aaa1f481f', '06 January 2011': 'da7a3c5988b6174b05e36686be3cafb6', '11 November 2010': 'aff8b007563b8c83aec4d731b21835d1', '14 September 2010': 'c7a5c6fae283b7a4ae764a8b0a6cf6b0', '12 March 2010': '5f1e824244e15c318d61f9c0e539f8c2', '17 July 2013': '3f17a25f6a4bcd4bb2ac6986db9359fc', '22 March 2012': 'cdd8311a8ac03848f31b8b5ec9736d78', '22 March 2010': 'a268b38f8737db32cf38be9b5f997dbc', '07 July 2010': '8d8da76e0dcfadd9cad44cb4a187c4fb', '17 September 2010': 'ceb4ec55b9ad65c6f8d4ef3aff1b8ca1', '12 January 2012': '02d5e7d546567ffd5638e5a7d7215fd5', '30 November 2010': '09ab215e1245c7bd2d3fe9c53b1fa861', '29 April 2010': '486b93eeab6a8793a42b2ba233079b6a', '02 August 2010': 'ba5be640353d2ba4bac20188c9438474', '27 April 2011': 'c4244dce0826a43bb0b259f293e2f742', '08 July 2010': '5aaeda289feb0bee3496e285042f6b72', '12 November 2010': '4e65f23a1cd47b3cbe702ff175acc8f6', '26 January 2011': '7c33b644f9e48e2fd9eda21d7240c1c4', '10 June 2010': '53a27e7c52ace60c3bfa0bfd66c0fe4a', '28 May 2010': 'abfd32f1054e75241246b8a4449b0eba', '22 October 2010': '1e6fa7d06912c4884f3f72cea9eb2492'}, 'LImon': {'11 May 2010': '2c39ed63325a36eb52e9425cb4a16a1f', '25 October 2010': '713e2174b2678f385bb233cb63467945', '19 October 2010': 'df91f6e721af3fc4d1d8358b1c709255', '24 May 2010': '81451608bae8fbaa0ef31578b83718cb', '28 March 2011': 'a1fb39be5c00cd06608993110cee9005', '09 June 2010': 'b11403de802f1788fc788ed12f22b548', '26 July 2011': '8dd5995fc9b4340cea492b7027b31c57', '12 May 2010': '2e68c69013fe51ec2c3850361ff39732', '31 January 2011': '6f05b7e5f64aaa34bcac127ca47ac655', '23 August 2010': 'e8b5f402770c03fbec87cb26f4c1c3cc', '01 June 2010': 'd0621fc868ff7c9b891e9780b9ab510e', '02 April 2010': 'ab39191bfac44f43ab9f50e330e20bf1', '11 April 2011': 'b51a5c08cdf6593edd03c7b1c311d305', '26 April 2010': '2ea85da118570c7aa0de9b33671eb302', '01 July 2010': '4a435e63edd293e2638156647a8d7d7e', '06 January 2011': '2dbb6147ef54c3e854307e67cdd5ae5e', '11 November 2010': '89a033f5ceb8d6f13b7ef728dbf738fa', '14 September 2010': '85b01c51d739c91b9796720d36bf1abd', '12 March 2010': 'bc6b4c7356aac811cc5712a1f10c17ee', '17 July 2013': '06603759dd6e0093c7e59c0984656013', '22 March 2012': '8d4296ff4ac0853292ed3be722dc81b8', '22 March 2010': '031807f20998b0c51a8047d3ceb12262', '07 July 2010': 'b2c497d9da0a240d17b56ed376c0ae88', '17 September 2010': 'a58d1ba22107a26336ab73b6d712339d', '12 January 2012': '429410275cb5466e41180ad9466db1bf', '30 November 2010': '95db1553ea05869663d3b9cc833406f6', '29 April 2010': 'ced6aa36efacd3afc3f2ce7255b30ae5', '02 August 2010': '87e81cbc9bc41adce07f59e9e416e975', '27 April 2011': '5a70adac85c24b52fe62c758c1dca0e5', '08 July 2010': '964bf6217e86c6fa7f0d047fa688bca6', '12 November 2010': 'b770dc951b9dfd10cb4402dcaa33c989', '26 January 2011': '9b9f13ba37c3bdbd0ad1cc784ad600a6', '10 June 2010': 'b1a9cedd3e8f0e05132021c71e9d7b1e', '28 May 2010': 'df3676a657c1b1822899320d91cdd45e', '22 October 2010': '43760bb3c90e35316e52f690cf7c9c8b'}, 'Oclim': {'11 May 2010': 'ea596ac32110c9af6ad049df23cf79d7', '25 October 2010': '278f8f457d03aaf9668f931e1d7f3ab1', '19 October 2010': '4351865a4833758c697babe4d68c255b', '24 May 2010': 'a2a559c2dc85db5701276e96d5325eaf', '28 March 2011': 'af155c646b8f0d87c769270d6e0a119c', '09 June 2010': 'e758cabca10ef3eba914e702a82cbc8d', '26 July 2011': '80851943347ebbdf69f69e0ea94c1061', '12 May 2010': '1e672f0814f4ff3b51a56b02efa6c223', '31 January 2011': '760664bb5b8e6901a5c0f3e14486c01a', '23 August 2010': '41857f0b51401b5b692a61b8fe73db7c', '01 June 2010': '821d9f0558ee9f605382e04e48313832', '02 April 2010': '626aa1883608cf471a97824c56d0295a', '11 April 2011': '02c858e13f41cc2d92dde421ff54f504', '26 April 2010': 'c4d9d68f7579b2d83fad74076410d9e3', '01 July 2010': 'cf37d4050f491b3054ce3aff355f325a', '06 January 2011': 'd60ad4b93a87f0b999e24e93901360ff', '11 November 2010': 'a0c992659e5ed596fdc3f814123e5ef7', '14 September 2010': 'a85f9e014db585172e9ae6d20947fa18', '12 March 2010': '66bbb3618539b3bd9720ee049ac3f8d4', '17 July 2013': '347d8e8b26687a88462b5ed71c4ef21f', '22 March 2012': 'cf6def352e089b3109600d2305498370', '22 March 2010': 'af94edbc6cbef78448bfbf205ff53b04', '07 July 2010': 'dbdbcab9408bccebb9f03f59f9b253bc', '17 September 2010': '0bb5e199da1eea041c2411395e6c29d4', '12 January 2012': '6af43a186515f2dc603ea7cfced6f0c7', '30 November 2010': '3faf830a70da60f6064baa497b0ed3d4', '29 April 2010': 'da5cdaae6606be02175b87f4cc73cd1c', '02 August 2010': 'cc1bd091f0b33b35672f656805035da2', '27 April 2011': '5145db518d4d2958d20716b09139a8e3', '08 July 2010': '5a1650d13a86a83443fff681c7303707', '12 November 2010': '840d91afdad624b8241f7ff38de44adb', '26 January 2011': '984ef37481065dfe0c3214c5ce6999d9', '10 June 2010': '48d6fa0d97b3a6159cf039f2a5e41803', '28 May 2010': 'cce203c9dbc5d1cdd4f8639feb11757e', '22 October 2010': 'f0995bb3540918248642d83f676b8c83'}, 'aero': {'11 May 2010': 'f3f04dd2803fd9f09378d1e070386238', '25 October 2010': '202ffc3995132adb3bd2385b89cc3f66', '19 October 2010': 'a83891905bd34dee25925aa6d27471ce', '24 May 2010': '9c14b6368899a609bddcdf4f2c6a87e0', '28 March 2011': '3c35c0cf88f2412e83111f76f85a34de', '09 June 2010': 'fc8c961ac56217ed234bcbd462febb39', '26 July 2011': 'aa8461434060b130391069fa7489bcec', '12 May 2010': 'c0414e2be70762db35f57263b0db6bfc', '31 January 2011': '5ccd9f8dcb6eecb7eb87cd2da96d71b1', '23 August 2010': '2b6afd9f307ba2e81eb0484a12cc6781', '01 June 2010': '889c4d971b1cdceaa0192ba2f19e68a5', '02 April 2010': '7dfe8b88ec6d4469f73fda3656878961', '11 April 2011': '7af7735a90e67d75062719e436036922', '26 April 2010': '6cf1c533eac9c1498e31bf2f7b428257', '01 July 2010': '4e6c9a559b9cabfc3c6edc2c75fe05de', '06 January 2011': '45095d017de4a205937f47f05d4301ea', '11 November 2010': '1c4fdbbe285eb1db3f496cd94c016c06', '14 September 2010': 'dbf204fe4bbe08dd65650b4b35fcc0e2', '12 March 2010': '311409dd6ec2f6ca3570883eaf4031d9', '17 July 2013': '67d8080f36b1421b5a728ca4384c497f', '22 March 2012': 'fcf23342589381a14766bade9347b431', '22 March 2010': '6334a63f2189bb96a76f901ca28adee8', '07 July 2010': '65234a982c7456d02ce35e21066079e0', '17 September 2010': 'd60806d6e45693c33aa75b72553b4cdb', '12 January 2012': '7b038f25485fcf0c3b600236a3ca00b8', '30 November 2010': 'c17c102d70e3579b0b30f8bcef04fa5b', '29 April 2010': '5820b48505bc340af3edb4c0c42157b6', '02 August 2010': 'cd16909cd1f10eefe3237b99ff2ad51a', '27 April 2011': '10c8207daaf67434c6a62c0879b6672f', '08 July 2010': 'f0c9067404749b76ac2b4088578fc843', '12 November 2010': '93f59c16ecbdc49c1ee4dee99777a956', '26 January 2011': 'db143dcb89dfd4f3158cd6adfe0808f4', '10 June 2010': '8fc5f354376074baa9ce77627428c311', '28 May 2010': '8feb21a197cb526881cc9657bdab9330', '22 October 2010': 'bd0d2155a9d5243191ba6b17d5fbbefe'}, 'OImon': {'11 May 2010': '103618d145733750a056304043dea567', '25 October 2010': '1c15b894e8ddf35db73d9419d0d26f7f', '19 October 2010': 'd46e90e0e0a75aa139767038c5f740ad', '24 May 2010': '1beefee61a3bfec498239b121bb53683', '28 March 2011': '4119f48afb28286f0bd4482bd27bbeda', '09 June 2010': '6a5c5c35499a211826b799b52f8e7950', '26 July 2011': 'b40ae6f6b2a738f91076c512dcfee1a5', '12 May 2010': '9acf9f47257b510743ffa661427ae56a', '31 January 2011': '7649fde668723a3e5dd328cc2399b364', '23 August 2010': '08b0e447c93c55277eb2a126b30787e4', '01 June 2010': '176270b2b413b0635e764722ec6d32ff', '02 April 2010': 'e29411ff7991e012460112cb6e56e995', '11 April 2011': 'c685058342b6615c12d34dc34a53e6fa', '26 April 2010': '09d3db3faf545ecab63cdf0570e167f7', '01 July 2010': 'eff1001750d564668899035e9c809daa', '06 January 2011': 'c9836f72f2207843e6503390f984a68a', '11 November 2010': '98ed8162e89fb01d26ac13193deff8de', '14 September 2010': '2005da0bafad21c45e5e407f81997e1e', '12 March 2010': '24030fd1d63ab32ff3cd30783a121b24', '17 July 2013': '284780dd070e05cf3b6d564becf53447', '22 March 2012': 'e0a98ec1a5ae3f27923f029258e5a8b6', '22 March 2010': 'fcc3c126b4bf370a4eeeb921a037a0ac', '07 July 2010': '23d2e048cf1a71560bf0f856e176cc85', '17 September 2010': '4d997579a4d1a4220ae40a424235f76d', '12 January 2012': 'e70c4c96670ab4edf2a973588de6b0a8', '30 November 2010': 'b52736bf3da66f7e8afb025a4e063254', '29 April 2010': '0dd76c65223b6e12650f856470bd0ad0', '02 August 2010': '7b23f2e63cbecae1f21891f8464ce96e', '27 April 2011': 'f04ba250f3acd5b9e847c21d5752eb85', '08 July 2010': 'c3b0fc073f2c782acec50aca801f8ad6', '12 November 2010': '03f02ea4095411dc669446a2cfbc3e79', '26 January 2011': '21cb2b8c4084754fe931ea77eedfd5e1', '10 June 2010': '22c644a4b828f6ef1662bc579cd89180', '28 May 2010': '0b99be09f2f5e979dbbe1044cf769788', '22 October 2010': '641db0e7174a9de9d31c7077b7e55f6a'}, 'day': {'11 May 2010': '097898d568f041c78a5762c3204741d1', '17 September 2010': '63c2ab71b595df5d8d61abc68f564361', '19 October 2010': 'ad573249f4be2751a5f15acdbd756053', '24 May 2010': '939277b33ceefa59d757386f19df3daf', '28 March 2011': 'f9d6cfec5981bb8be1801b35a81002f0', '12 May 2010': 'f130b8ed0705d49c749410c0519c50cb', '26 July 2011': 'f21c16b785432e6bd3f72e80f2cade49', '09 June 2010': 'd734c20e0247d5225e251a20e81ea334', '31 January 2011': 'c8f74e5cf93cc3c793fd594b88985924', '23 August 2010': '1b99baf1d84aa896f697fa1ae705df49', '01 June 2010': 'f6d5b1f45bf471d6bdbe263511650daa', '02 April 2010': 'a66b43262f9760ebcfed1cb559e9605b', '11 April 2011': 'ec52f6ea2595168e5458ad1b950fd49c', '26 April 2010': '12db04043a6cc43b26a6d867eb1e5d4c', '17 July 2013': 'd915e7f305fb050357c850dfe58d74f0', '01 July 2010': '45a083491108c94c715b099123340e9f', '11 November 2010': '02fff6a0b228e556b1acb03a9e553fa7', '14 September 2010': 'a4406d244d9b328488dc24529e0a4e22', '12 March 2010': '6bae9f270feeb5f2d2b5c84de2169d64', '29 April 2010': 'bb5cba05f7393acf99fb834fc54386aa', '22 March 2012': '68011aa0526272fcf0c7f6e7c169ce7d', '22 March 2010': '4d3286d18536c46089ed5dd49cfc0ea9', '07 July 2010': 'de282381d47617aa063a359b5f5724c3', '10 February 2011': '80e409bd73611e9d25d049ad2059c310', '25 October 2010': '783d839767ea9baa68394aefe0d77366', '12 January 2012': '7757d80c56ae0b9009f150afa4850c4e', '30 November 2010': '124740b2ad2bc003e05675678080dc82', '06 January 2011': '2e7604bce6801aa60f87ad347864339e', '02 August 2010': 'e4b6951c15b2bdba4ae7cb895c1dc807', '27 April 2011': '86d1558d99b6ed1e7a886ab3fd717b58', '08 July 2010': 'fbcb83f3874789bb7fbb8f47c97d9582', '12 November 2010': '53fa6f63b86081d1c644183416239052', '26 January 2011': '9a27ee7f81e496bab3c115387badb11e', '10 June 2010': 'f57b2364514d33637da41688a903add2', '28 May 2010': '71ced75543fdae82dd6bc69b91052690', '22 October 2010': 'fbf1870c17e4797ed126e747f182b660'}, 'cfDa': {'11 March 2010': 'dfc578ef208dab9572d445e4ca3834ed'}}} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/cmip6/README.md b/esmvaltool/cmor/tables/cmip6/README.md deleted file mode 100644 index 19d535de2c..0000000000 --- a/esmvaltool/cmor/tables/cmip6/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# cmip6-cmor-tables - -## Data Request 01.00.30 (March 11, 2019) - -Source is https://github.com/PCMDI/cmip6-cmor-tables - -- branch: 01.00.30 -- commit: c9089bf17e81d5dcc41a839e8299cee69da890d3 - diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_3hr.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_3hr.json deleted file mode 100644 index d9eb7a6f6f..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_3hr.json +++ /dev/null @@ -1,409 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table 3hr", - "realm": "atmos", - "frequency": "3hr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "0.125000", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "clt": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Cloud Fraction", - "comment": "Total cloud area fraction for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes both large-scale and convective cloud.", - "dimensions": "longitude latitude time", - "out_name": "clt", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfls": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_latent_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upward Latent Heat Flux", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "hfls", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfss": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_sensible_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upward Sensible Heat Flux", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "hfss", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "huss": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Near-Surface Specific Humidity", - "comment": "Near-surface (usually, 2 meter) specific humidity.", - "dimensions": "longitude latitude time1 height2m", - "out_name": "huss", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrro": { - "modeling_realm": "land", - "standard_name": "runoff_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Runoff", - "comment": "The total run-off (including drainage through the base of the soil model) per unit area leaving the land portion of the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "mrro", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrsos": { - "modeling_realm": "land", - "standard_name": "moisture_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: point", - "cell_measures": "area: areacella", - "long_name": "Moisture in Upper Portion of Soil Column", - "comment": "The mass of water in all phases in the upper 10cm of the soil layer.", - "dimensions": "longitude latitude time1 sdepth1", - "out_name": "mrsos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pr": { - "modeling_realm": "atmos", - "standard_name": "precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Precipitation", - "comment": "includes both liquid and solid phases", - "dimensions": "longitude latitude time", - "out_name": "pr", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prc": { - "modeling_realm": "atmos", - "standard_name": "convective_precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Convective Precipitation", - "comment": "Convective precipitation at surface; includes both liquid and solid phases.", - "dimensions": "longitude latitude time", - "out_name": "prc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prsn": { - "modeling_realm": "atmos", - "standard_name": "snowfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Snowfall Flux", - "comment": "at surface; includes precipitation of all forms of water in the solid phase", - "dimensions": "longitude latitude time", - "out_name": "prsn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ps": { - "modeling_realm": "atmos", - "standard_name": "surface_air_pressure", - "units": "Pa", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Air Pressure", - "comment": "surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", - "dimensions": "longitude latitude time1", - "out_name": "ps", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlds": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downwelling Longwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rlds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rldscs": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_longwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downwelling Clear-Sky Longwave Radiation", - "comment": "Surface downwelling clear-sky longwave radiation", - "dimensions": "longitude latitude time", - "out_name": "rldscs", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlus": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upwelling Longwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rlus", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsds": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downwelling Shortwave Radiation", - "comment": "surface solar irradiance for UV calculations", - "dimensions": "longitude latitude time", - "out_name": "rsds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdscs": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downwelling Clear-Sky Shortwave Radiation", - "comment": "surface solar irradiance clear sky for UV calculations", - "dimensions": "longitude latitude time", - "out_name": "rsdscs", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdsdiff": { - "modeling_realm": "atmos", - "standard_name": "surface_diffuse_downwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Diffuse Downwelling Shortwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rsdsdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsus": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upwelling Shortwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rsus", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsuscs": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upwelling Clear-Sky Shortwave Radiation", - "comment": "Surface Upwelling Clear-sky Shortwave Radiation", - "dimensions": "longitude latitude time", - "out_name": "rsuscs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tas": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Near-Surface Air Temperature", - "comment": "near-surface (usually, 2 meter) air temperature", - "dimensions": "longitude latitude time1 height2m", - "out_name": "tas", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tslsi": { - "modeling_realm": "land", - "standard_name": "surface_temperature", - "units": "K", - "cell_methods": "area: mean (comment: over land and sea ice) time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Temperature Where Land or Sea Ice", - "comment": "'skin' temperature of all surfaces except open ocean.", - "dimensions": "longitude latitude time1", - "out_name": "tslsi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tso": { - "modeling_realm": "ocean", - "standard_name": "sea_surface_temperature", - "units": "K", - "cell_methods": "area: mean where sea time: point", - "cell_measures": "area: areacella", - "long_name": "Sea Surface Temperature", - "comment": "temperature of surface of open ocean, sampled synoptically.", - "dimensions": "longitude latitude time1", - "out_name": "tso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "uas": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Eastward Near-Surface Wind Speed", - "comment": "Eastward component of the near-surface (usually, 10 meters) wind", - "dimensions": "longitude latitude time1 height10m", - "out_name": "uas", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vas": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Northward Near-Surface Wind Speed", - "comment": "Northward component of the near surface wind", - "dimensions": "longitude latitude time1 height10m", - "out_name": "vas", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_6hrLev.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_6hrLev.json deleted file mode 100644 index e2ca5a56d6..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_6hrLev.json +++ /dev/null @@ -1,154 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table 6hrLev", - "realm": "atmos", - "frequency": "6hr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "0.250000", - "generic_levels": "alevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "bs550aer": { - "modeling_realm": "aerosol", - "standard_name": "volume_lidar_backwards_scattering_coefficient_in_air_due_to_ambient_aerosol_particles", - "units": "m-1 sr-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Aerosol backscatter coefficient", - "comment": "Aerosol Backscatter @550nm @ 180 degrees, computed from extinction and lidar ratio", - "dimensions": "longitude latitude alevel time1", - "out_name": "bs550aer", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ec550aer": { - "modeling_realm": "aerosol", - "standard_name": "volume_extinction_coefficient_in_air_due_to_ambient_aerosol_particles", - "units": "m-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Aerosol extinction coefficient", - "comment": "Aerosol Extinction @550nm", - "dimensions": "longitude latitude alevel time1", - "out_name": "ec550aer", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "longitude latitude alevel time1", - "out_name": "hus", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pmlev": { - "modeling_realm": "atmos", - "standard_name": "air_pressure", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Pressure", - "comment": "The atmospheric pressure at the model layer midpoints for all times and levels in the associated output variables", - "dimensions": "longitude latitude alevel time", - "out_name": "pmlev", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ps": { - "modeling_realm": "atmos", - "standard_name": "surface_air_pressure", - "units": "Pa", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Air Pressure", - "comment": "surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", - "dimensions": "longitude latitude time1", - "out_name": "ps", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "longitude latitude alevel time1", - "out_name": "ta", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "time: point", - "cell_measures": "--OPT", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "longitude latitude alevel time1", - "out_name": "ua", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "time: point", - "cell_measures": "--OPT", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "longitude latitude alevel time1", - "out_name": "va", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_6hrPlev.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_6hrPlev.json deleted file mode 100644 index ec8a1fc662..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_6hrPlev.json +++ /dev/null @@ -1,358 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table 6hrPlev", - "realm": "atmos", - "frequency": "6hr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "0.250000", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "bldep": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_boundary_layer_thickness", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Boundary Layer Depth", - "comment": "Boundary layer depth", - "dimensions": "longitude latitude time", - "out_name": "bldep", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hurs": { - "modeling_realm": "atmos", - "standard_name": "relative_humidity", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Near-Surface Relative Humidity", - "comment": "The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", - "dimensions": "longitude latitude time height2m", - "out_name": "hurs", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus4": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "longitude latitude plev4 time", - "out_name": "hus", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pr": { - "modeling_realm": "atmos", - "standard_name": "precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Precipitation", - "comment": "includes both liquid and solid phases", - "dimensions": "longitude latitude time", - "out_name": "pr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prhmax": { - "modeling_realm": "atmos", - "standard_name": "precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean time: mean within hours time: maximum over hours", - "cell_measures": "area: areacella", - "long_name": "Maximum Hourly Precipitation Rate", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "prhmax", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "psl": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_sea_level", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Sea Level Pressure", - "comment": "Sea Level Pressure", - "dimensions": "longitude latitude time", - "out_name": "psl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rv850": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_relative_vorticity", - "units": "s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Relative Vorticity at 850 hPa", - "comment": "", - "dimensions": "longitude latitude time p850", - "out_name": "rv850", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfcWind": { - "modeling_realm": "atmos", - "standard_name": "wind_speed", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Near-Surface Wind Speed", - "comment": "near-surface (usually, 10 meters) wind speed.", - "dimensions": "longitude latitude time height10m", - "out_name": "sfcWind", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tas": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Air Temperature", - "comment": "near-surface (usually, 2 meter) air temperature", - "dimensions": "longitude latitude time height2m", - "out_name": "tas", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua100m": { - "modeling_realm": "", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind at 100m", - "comment": "Zonal wind at 100m height", - "dimensions": "longitude latitude time height100m", - "out_name": "ua100m", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua200": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind at 200hPa", - "comment": "Zonal wind (positive eastwards) at 200hPa", - "dimensions": "longitude latitude time p200", - "out_name": "ua200", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua850": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind at 850 hPa", - "comment": "Zonal wind on the 850 hPa surface", - "dimensions": "longitude latitude time p850", - "out_name": "ua850", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "uas": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Near-Surface Wind", - "comment": "Eastward component of the near-surface (usually, 10 meters) wind", - "dimensions": "longitude latitude time height10m", - "out_name": "uas", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va100m": { - "modeling_realm": "", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Wind at 100m", - "comment": "Meridional wind at 100m above the surface.", - "dimensions": "longitude latitude time height100m", - "out_name": "va100m", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va200": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "Northward component of the wind", - "dimensions": "longitude latitude time p200", - "out_name": "va200", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va850": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "Northward component of the wind at 850hPa", - "dimensions": "longitude latitude time p850", - "out_name": "va850", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wap4": { - "modeling_realm": "atmos", - "standard_name": "lagrangian_tendency_of_air_pressure", - "units": "Pa s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "omega (=dp/dt)", - "comment": "Omega (vertical velocity in pressure coordinates, positive downwards)", - "dimensions": "longitude latitude plev4 time", - "out_name": "wap", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wsgmax100m": { - "modeling_realm": "", - "standard_name": "wind_speed_of_gust", - "units": "m s-1", - "cell_methods": "area: mean time: maximum", - "cell_measures": "area: areacella", - "long_name": "Maximum Wind Speed of Gust at 100m", - "comment": "Wind speed gust maximum at 100m above surface", - "dimensions": "longitude latitude time height100m", - "out_name": "wsgmax100m", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wsgmax10m": { - "modeling_realm": "", - "standard_name": "wind_speed_of_gust", - "units": "m s-1", - "cell_methods": "area: mean time: maximum", - "cell_measures": "area: areacella", - "long_name": "Maximum Wind Speed of Gust at 10m", - "comment": "Wind speed gust maximum at 10m above surface", - "dimensions": "longitude latitude time height10m", - "out_name": "wsgmax10m", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg1000": { - "modeling_realm": "atmos", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height at 1000 hPa", - "comment": "Geopotential height on the 1000 hPa surface", - "dimensions": "longitude latitude time p1000", - "out_name": "zg1000", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_6hrPlevPt.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_6hrPlevPt.json deleted file mode 100644 index fae0c042d5..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_6hrPlevPt.json +++ /dev/null @@ -1,681 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table 6hrPlevPt", - "realm": "atmos", - "frequency": "6hr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "albsrfc": { - "modeling_realm": "aerosol", - "standard_name": "surface_albedo", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "surface albedo", - "comment": "", - "dimensions": "longitude latitude time1", - "out_name": "albsrfc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cldicemxrat27": { - "modeling_realm": "atmos", - "standard_name": "cloud_ice_mixing_ratio", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Cloud Ice Mixing Ratio", - "comment": "Cloud ice mixing ratio", - "dimensions": "longitude latitude plev27 time1", - "out_name": "cldicemxrat", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cldwatmxrat27": { - "modeling_realm": "atmos", - "standard_name": "cloud_liquid_water_mixing_ratio", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Cloud Water Mixing Ratio", - "comment": "Cloud water mixing ratio", - "dimensions": "longitude latitude plev27 time1", - "out_name": "cldwatmxrat", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dtauc": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_optical_thickness_due_to_convective_cloud", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Convective Cloud Optical Depth", - "comment": "This is the in-cloud optical depth obtained by considering only the cloudy portion of the grid cell", - "dimensions": "longitude latitude time1", - "out_name": "dtauc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dtaus": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_optical_thickness_due_to_stratiform_cloud", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Stratiform Cloud Optical Depth", - "comment": "This is the in-cloud optical depth obtained by considering only the cloudy portion of the grid cell.", - "dimensions": "longitude latitude time1", - "out_name": "dtaus", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "grplmxrat27": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_graupel_in_air", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Graupel Mixing Ratio", - "comment": "Graupel mixing ratio", - "dimensions": "longitude latitude plev27 time1", - "out_name": "grplmxrat", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus27": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "longitude latitude plev27 time1", - "out_name": "hus", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus7h": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "longitude latitude plev7h time1", - "out_name": "hus", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "huss": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Near-Surface Specific Humidity", - "comment": "Near-surface (usually, 2 meter) specific humidity.", - "dimensions": "longitude latitude time1 height2m", - "out_name": "huss", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lwsffluxaero": { - "modeling_realm": "atmos", - "standard_name": "longwave__flux__due_to_volcanic_aerosols_at_the_surface", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Longwave flux due to volcanic aerosols at the surface", - "comment": "downwelling longwave flux due to volcanic aerosols at the surface to be diagnosed through double radiation call", - "dimensions": "longitude latitude time1", - "out_name": "lwsffluxaero", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lwtoafluxaerocs": { - "modeling_realm": "atmos", - "standard_name": "longwave_flux_due_to_volcanic_aerosols_at_TOA_under_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Longwave flux due to volcanic aerosols at TOA under clear sky", - "comment": "downwelling longwave flux due to volcanic aerosols at TOA under clear sky to be diagnosed through double radiation call", - "dimensions": "longitude latitude time1", - "out_name": "lwtoafluxaerocs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrlsl": { - "modeling_realm": "land", - "standard_name": "moisture_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: point", - "cell_measures": "area: areacella", - "long_name": "Water Content of Soil Layer", - "comment": "The mass of water in all phases, including ice, in soil layers. Report as missing for grid cells with no land.", - "dimensions": "longitude latitude time1 sdepth1", - "out_name": "mrlsl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrsos": { - "modeling_realm": "land", - "standard_name": "moisture_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: point", - "cell_measures": "area: areacella", - "long_name": "Moisture in Upper Portion of Soil Column", - "comment": "The mass of water in all phases in the upper 10cm of the soil layer.", - "dimensions": "longitude latitude time1 sdepth1", - "out_name": "mrsos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pfull27": { - "modeling_realm": "atmos", - "standard_name": "air_pressure", - "units": "Pa", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Pressure on Model Levels", - "comment": "Air pressure on model levels", - "dimensions": "longitude latitude plev27 time1", - "out_name": "pfull", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "psl": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_sea_level", - "units": "Pa", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Sea Level Pressure", - "comment": "Sea Level Pressure", - "dimensions": "longitude latitude time1", - "out_name": "psl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rainmxrat27": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_rain_in_air", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "rain_mixing_ratio", - "comment": "Rain mixing ratio", - "dimensions": "longitude latitude plev27 time1", - "out_name": "rainmxrat", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rv850": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_relative_vorticity", - "units": "s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Relative Vorticity at 850 hPa", - "comment": "", - "dimensions": "longitude latitude time1 p850", - "out_name": "rv850", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfcWind": { - "modeling_realm": "atmos", - "standard_name": "wind_speed", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Near-Surface Wind Speed", - "comment": "near-surface (usually, 10 meters) wind speed.", - "dimensions": "longitude latitude time1 height10m", - "out_name": "sfcWind", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snowmxrat27": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_snow_in_air", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "snow_mixing_ratio", - "comment": "Snow mixing ratio", - "dimensions": "longitude latitude plev27 time1", - "out_name": "snowmxrat", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snw": { - "modeling_realm": "landIce land", - "standard_name": "surface_snow_amount", - "units": "kg m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Snow Amount", - "comment": "The mass of surface snow on the land portion of the grid cell divided by the land area in the grid cell; reported as missing where the land fraction is 0; excludes snow on vegetation canopy or on sea ice.", - "dimensions": "longitude latitude time1", - "out_name": "snw", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "swsffluxaero": { - "modeling_realm": "atmos", - "standard_name": "shortwave__flux_due_to_volcanic_aerosols_at__the_surface", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Shortwave heating rate due to volcanic aerosols", - "comment": "shortwave heating rate due to volcanic aerosols to be diagnosed through double radiation call, zonal average values required", - "dimensions": "longitude latitude time1", - "out_name": "swsffluxaero", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "swtoafluxaerocs": { - "modeling_realm": "atmos", - "standard_name": "shortwave_flux_due_to_volcanic_aerosols_at_TOA_under_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Shortwave flux due to volcanic aerosols at TOA under clear sky", - "comment": "downwelling shortwave flux due to volcanic aerosols at TOA under clear sky to be diagnosed through double radiation call", - "dimensions": "longitude latitude time1", - "out_name": "swtoafluxaerocs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "longitude latitude plev3 time1", - "out_name": "ta", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta27": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "longitude latitude plev27 time1", - "out_name": "ta", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta7h": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "longitude latitude plev7h time1", - "out_name": "ta", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tas": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Air Temperature", - "comment": "near-surface (usually, 2 meter) air temperature", - "dimensions": "longitude latitude time1 height2m", - "out_name": "tas", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ts": { - "modeling_realm": "atmos", - "standard_name": "surface_temperature", - "units": "K", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Temperature", - "comment": "Temperature of the lower boundary of the atmosphere", - "dimensions": "longitude latitude time1", - "out_name": "ts", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tsl": { - "modeling_realm": "land", - "standard_name": "soil_temperature", - "units": "K", - "cell_methods": "area: mean where land time: point", - "cell_measures": "area: areacella", - "long_name": "Temperature of Soil", - "comment": "Temperature of each soil layer. Reported as missing for grid cells with no land.", - "dimensions": "longitude latitude time1 sdepth1", - "out_name": "tsl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "longitude latitude plev3 time1", - "out_name": "ua", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua7h": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "longitude latitude plev7h time1", - "out_name": "ua", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "uas": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Eastward Near-Surface Wind", - "comment": "Eastward component of the near-surface (usually, 10 meters) wind", - "dimensions": "longitude latitude time1 height10m", - "out_name": "uas", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "longitude latitude plev3 time1", - "out_name": "va", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va7h": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "longitude latitude plev7h time1", - "out_name": "va", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vas": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Northward Near-Surface Wind", - "comment": "Northward component of the near surface wind", - "dimensions": "longitude latitude time1 height10m", - "out_name": "vas", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vortmean": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_relative_vorticity", - "units": "s-1", - "cell_methods": "time: point", - "cell_measures": "area: areacella", - "long_name": "atmosphere_relative_vorticity", - "comment": "Mean vorticity over 850,700,600 hPa", - "dimensions": "longitude latitude time1 pl700", - "out_name": "vortmean", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wbptemp7h": { - "modeling_realm": "atmos", - "standard_name": "wet_bulb_potential_temperature", - "units": "K", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "wet_bulb_potential_temperature", - "comment": "Wet bulb potential temperature", - "dimensions": "longitude latitude plev7h time1", - "out_name": "wbptemp", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg27": { - "modeling_realm": "atmos", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height", - "comment": "", - "dimensions": "longitude latitude plev27 time1", - "out_name": "zg", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg500": { - "modeling_realm": "aerosol", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height at 500 hPa", - "comment": "geopotential height on the 500 hPa surface", - "dimensions": "longitude latitude time1 p500", - "out_name": "zg500", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg7h": { - "modeling_realm": "atmos", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height", - "comment": "", - "dimensions": "longitude latitude plev7h time1", - "out_name": "zg", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_AERday.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_AERday.json deleted file mode 100644 index 99c62be838..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_AERday.json +++ /dev/null @@ -1,205 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table AERday", - "realm": "aerosol", - "frequency": "day", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "cod": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_cloud", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "cloud optical depth", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cod", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "maxpblz": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_boundary_layer_thickness", - "units": "m", - "cell_methods": "area: mean time: maximum", - "cell_measures": "area: areacella", - "long_name": "maximum PBL height", - "comment": "maximum boundary layer height during the day (add cell_methods attribute: 'time: maximum')", - "dimensions": "longitude latitude time", - "out_name": "maxpblz", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "minpblz": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_boundary_layer_thickness", - "units": "m", - "cell_methods": "area: mean time: minimum", - "cell_measures": "area: areacella", - "long_name": "minimum PBL height", - "comment": "minimum boundary layer height during the day (add cell_methods attribute: 'time: minimum')", - "dimensions": "longitude latitude time", - "out_name": "minpblz", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550aer": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_ambient_aerosol_particles", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "ambient aerosol optical thickness at 550 nm", - "comment": "AOD from the ambient aerosls (i.e., includes aerosol water). Does not include AOD from stratospheric aerosols if these are prescribed but includes other possible background aerosol types. Needs a comment attribute 'wavelength: 550 nm'", - "dimensions": "longitude latitude time", - "out_name": "od550aer", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfo3max": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_ozone_in_air", - "units": "mol mol-1", - "cell_methods": "area: mean time: maximum", - "cell_measures": "area: areacella", - "long_name": "daily maximum O3 volume mixing ratio in lowest model layer", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "sfo3max", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "toz": { - "modeling_realm": "aerosol", - "standard_name": "equivalent_thickness_at_stp_of_atmosphere_ozone_content", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Column Ozone", - "comment": "total ozone column in DU", - "dimensions": "longitude latitude time", - "out_name": "toz", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua10": { - "modeling_realm": "aerosol", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind at 10 hPa", - "comment": "Zonal wind on the 10 hPa surface", - "dimensions": "longitude latitude time p10", - "out_name": "ua10", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg10": { - "modeling_realm": "aerosol", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height at 10 hPa", - "comment": "Geopotential height on the 10 hPa surface", - "dimensions": "longitude latitude time p10", - "out_name": "zg10", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg100": { - "modeling_realm": "aerosol", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height at 100 hPa", - "comment": "Geopotential height on the 100 hPa surface", - "dimensions": "longitude latitude time p100", - "out_name": "zg100", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg1000": { - "modeling_realm": "atmos", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height at 1000 hPa", - "comment": "Geopotential height on the 1000 hPa surface", - "dimensions": "longitude latitude time p1000", - "out_name": "zg1000", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg500": { - "modeling_realm": "aerosol", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height at 500 hPa", - "comment": "geopotential height on the 500 hPa surface", - "dimensions": "longitude latitude time p500", - "out_name": "zg500", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_AERhr.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_AERhr.json deleted file mode 100644 index e8f32a625e..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_AERhr.json +++ /dev/null @@ -1,103 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table AERhr", - "realm": "aerosol", - "frequency": "hr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "ps": { - "modeling_realm": "atmos", - "standard_name": "surface_air_pressure", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface pressure", - "comment": "surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", - "dimensions": "longitude latitude time", - "out_name": "ps", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfno2": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_nitrogen_dioxide_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "NO2 volume mixing ratio in lowest model layer", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "sfno2", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfo3": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_ozone_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "O3 volume mixing ratio in lowest model layer", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "sfo3", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfpm25": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "PM2.5 mass mixing ratio in lowest model layer", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "sfpm25", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tas": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Temperature", - "comment": "near-surface (usually, 2 meter) air temperature", - "dimensions": "longitude latitude time height2m", - "out_name": "tas", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_AERmon.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_AERmon.json deleted file mode 100644 index 5cd1dfb501..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_AERmon.json +++ /dev/null @@ -1,2194 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table AERmon", - "realm": "aerosol", - "frequency": "mon", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "30.00000", - "generic_levels": "alevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "abs550aer": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_absorption_optical_thickness_due_to_ambient_aerosol", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "ambient aerosol absorption optical thickness at 550 nm", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "abs550aer", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "airmass": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_mass_of_air_per_unit_area", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Vertically integrated mass content of air in layer", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "airmass", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "albs": { - "modeling_realm": "aerosol", - "standard_name": "surface_albedo", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "planetary albedo", - "comment": "Grid cell average albedo for all wavelengths.", - "dimensions": "longitude latitude time", - "out_name": "albs", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "albsrfc": { - "modeling_realm": "aerosol", - "standard_name": "surface_albedo", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "surface albedo", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "albsrfc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "aoanh": { - "modeling_realm": "aerosol", - "standard_name": "tracer_lifetime", - "units": "yr", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tracer age of air Northern Hemisphere", - "comment": "Fixed surface layer mixing ratio over 30o-50oN (0 ppbv), uniform fixed source (at all levels) everywhere else (source is unspecified but must be constant in space and time and documented). Note that the source could be 1yr/yr, so the tracer concentration provides mean age in years. For method using linearly increasing tracer include a method attribute: 'linearly increasing tracer'For method using uniform source (1yr/yr) include a method attribute: 'uniform source'", - "dimensions": "longitude latitude alevel time", - "out_name": "aoanh", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bldep": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_boundary_layer_thickness", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Boundary Layer Depth", - "comment": "Boundary layer depth", - "dimensions": "longitude latitude time", - "out_name": "bldep", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "c2h2": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_ethyne_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "C2H2 volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "c2h2", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "c2h6": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_ethane_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "C2H6 volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "c2h6", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "c3h6": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_propene_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "C3H6 volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "c3h6", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "c3h8": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_propane_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "C3H8 volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "c3h8", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ccn": { - "modeling_realm": "aerosol", - "standard_name": "number_concentration_of_cloud_condensation_nuclei_at_stp_in_air", - "units": "m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "cloud condensation nuclei concentration at liquid cloud top", - "comment": "proposed name: number_concentration_of_ambient_aerosol_in_air_at_liquid_water_cloud_top", - "dimensions": "longitude latitude time", - "out_name": "ccn", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cdnc": { - "modeling_realm": "aerosol", - "standard_name": "number_concentration_of_cloud_liquid_water_particles_in_air", - "units": "m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Cloud Liquid Droplet Number Concentration", - "comment": "Cloud Droplet Number Concentration in liquid water clouds.", - "dimensions": "longitude latitude alevel time", - "out_name": "cdnc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ch3coch3": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_acetone_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CH3COCH3 volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "ch3coch3", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ch4": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_methane_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CH4 volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "ch4", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cheaqpso4": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_aqueous_phase_net_chemical_production", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Aqueous-phase production rate of SO4", - "comment": "proposed name: tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_due_to_aqueous_phase_net_chemical_production", - "dimensions": "longitude latitude alevel time", - "out_name": "cheaqpso4", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chegpso4": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_gaseous_phase_net_chemical_production", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Gas-phase production rate of SO4", - "comment": "proposed name: tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_due_to_gas_phase_net_chemical_production", - "dimensions": "longitude latitude alevel time", - "out_name": "chegpso4", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chepasoa": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "total net production of anthropogenic secondary organic aerosol", - "comment": "anthropogenic part of chepsoa", - "dimensions": "longitude latitude time", - "out_name": "chepasoa", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chepsoa": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "chemical production of dry aerosol secondary organic matter", - "comment": "If model lumps SOA emissions with POA, then the sum of POA and SOA emissions is reported as OA emissions. ''mass'' refers to the mass of primary organic matter, not mass of organic carbon alone.", - "dimensions": "longitude latitude time", - "out_name": "chepsoa", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cltc": { - "modeling_realm": "aerosol", - "standard_name": "convective_cloud_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Convective Cloud Area Fraction", - "comment": "Convective cloud area fraction for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes only convective cloud.", - "dimensions": "longitude latitude time", - "out_name": "cltc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_carbon_monoxide_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CO volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "co", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co2": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_carbon_dioxide_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CO2 volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "co2", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cod": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_cloud", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "cloud optical depth", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cod", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dms": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_dimethyl_sulfide_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "DMS volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "dms", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "drybc": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_dry_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "dry deposition rate of black carbon aerosol mass", - "comment": "Dry deposition includes gravitational settling, impact scavenging, and turbulent deposition.", - "dimensions": "longitude latitude time", - "out_name": "drybc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "drydust": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_dry_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "dry deposition rate of dust", - "comment": "Dry deposition includes gravitational settling, impact scavenging, and turbulent deposition.", - "dimensions": "longitude latitude time", - "out_name": "drydust", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "drynh3": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_ammonia_due_to_dry_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "dry deposition rate of nh3", - "comment": "dry deposition includes gravitational settling, impact scavenging, and turbulent deposition", - "dimensions": "longitude latitude time", - "out_name": "drynh3", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "drynh4": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_dry_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "dry deposition rate of nh4", - "comment": "dry deposition includes gravitational settling, impact scavenging, and turbulent deposition", - "dimensions": "longitude latitude time", - "out_name": "drynh4", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "drynoy": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_noy_expressed_as_nitrogen_due_to_dry_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "dry deposition rate of noy", - "comment": "NOy is the sum of all simulated oxidized nitrogen species out of NO, NO2, HNO3, HNO4, NO3aerosol, NO3(radical), N2O5, PAN, other organic nitrates. Dry deposition includes gravitational settling, impact scavenging, and turbulent deposition.", - "dimensions": "longitude latitude time", - "out_name": "drynoy", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dryo3": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_ozone_due_to_dry_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "dry deposition rate of o3", - "comment": "dry deposition includes gravitational settling, impact scavenging, and turbulent deposition.", - "dimensions": "longitude latitude time", - "out_name": "dryo3", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dryoa": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "dry deposition rate of dry aerosol total organic matter", - "comment": "Tendency of atmosphere mass content of organic dry aerosol due to dry deposition: This is the sum of dry deposition of POA and dry deposition of SOA (see next two entries). 'Mass' refers to the mass of organic matter, not mass of organic carbon alone. We recommend a scale factor of POM=1.4*OC, unless your model has more detailed info available. Was called dry_pom in old ACCMIP Excel table. Dry deposition includes gravitational settling, impact scavenging, and turbulent deposition.", - "dimensions": "longitude latitude time", - "out_name": "dryoa", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dryso2": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_sulfur_dioxide_due_to_dry_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "dry deposition rate of so2", - "comment": "dry deposition includes gravitational settling, impact scavenging, and turbulent deposition", - "dimensions": "longitude latitude time", - "out_name": "dryso2", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dryso4": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_dry_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "dry deposition rate of so4", - "comment": "dry deposition includes gravitational settling, impact scavenging, and turbulent deposition", - "dimensions": "longitude latitude time", - "out_name": "dryso4", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dryss": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_seasalt_dry_aerosol_particles_due_to_dry_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "dry deposition rate of seasalt", - "comment": "Dry deposition includes gravitational settling, impact scavenging, and turbulent deposition.", - "dimensions": "longitude latitude time", - "out_name": "dryss", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "emiaco": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_carbon_monoxide_due_to_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "total emission rate of anthropogenic co", - "comment": "anthrophogenic emission of CO", - "dimensions": "longitude latitude time", - "out_name": "emiaco", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "emianox": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_nox_expressed_as_nitrogen_due_to_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "anthropogenic emission rate of nox", - "comment": "Store flux as Nitrogen. Anthropogenic fraction. NOx=NO+NO2, Includes agricultural waste burning but no other biomass burning. Integrate 3D emission field vertically to 2d field.", - "dimensions": "longitude latitude time", - "out_name": "emianox", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "emiaoa": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "total emission of anthropogenic organic aerosol", - "comment": "anthropogenic part of emioa", - "dimensions": "longitude latitude time", - "out_name": "emiaoa", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "emibc": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "emission rate of black carbon aerosol mass", - "comment": "Integrate 3D emission field vertically to 2d field.", - "dimensions": "longitude latitude time", - "out_name": "emibc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "emibvoc": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_biogenic_nmvoc_expressed_as_carbon_due_to_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "total emission rate of biogenic nmvoc", - "comment": "Integrate 3D emission field vertically to 2d field._If_ fixed molecular weight of NMVOC is not available in model, please provide in units of kilomole m-2 s-1 (i.e. kg m-2 s-1 as if model NMVOC had molecular weight of 1) and add a comment to your file.", - "dimensions": "longitude latitude time", - "out_name": "emibvoc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "emico": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_carbon_monoxide_due_to_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "total emission rate of co", - "comment": "Integrate 3D emission field vertically to 2d field.", - "dimensions": "longitude latitude time", - "out_name": "emico", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "emidms": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_dimethyl_sulfide_due_to_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "total emission rate of dms", - "comment": "Integrate 3D emission field vertically to 2d field.", - "dimensions": "longitude latitude time", - "out_name": "emidms", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "emidust": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "total emission rate of dust", - "comment": "Integrate 3D emission field vertically to 2d field.", - "dimensions": "longitude latitude time", - "out_name": "emidust", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "emiisop": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_isoprene_due_to_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "total emission rate of isoprene", - "comment": "Integrate 3D emission field vertically to 2d field", - "dimensions": "longitude latitude time", - "out_name": "emiisop", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "emilnox": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_moles_of_nox_expressed_as_nitrogen", - "units": "mol s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "layer-integrated lightning production of NOx", - "comment": "Integrate the NOx production for lightning over model layer. proposed name: tendency_of_atmosphere_mass_content_of_nox_from_lightning", - "dimensions": "longitude latitude alevel time", - "out_name": "emilnox", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "eminh3": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_ammonia_due_to_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "total emission rate of nh3", - "comment": "Integrate 3D emission field vertically to 2d field.", - "dimensions": "longitude latitude time", - "out_name": "eminh3", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "eminox": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_nox_expressed_as_nitrogen_due_to_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "total emission rate of nox", - "comment": "NOx=NO+NO2. Integrate 3D emission field vertically to 2d field.", - "dimensions": "longitude latitude time", - "out_name": "eminox", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "emioa": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "primary emission and chemical production of dry aerosol organic matter", - "comment": "This is the sum of total emission of POA and total production of SOA (emipoa+chepsoa). ''Mass'' refers to the mass of organic matter, not mass of organic carbon alone. We recommend a scale factor of POM=1.4*OC, unless your model has more detailed info available. Integrate 3D chemical production and emission field vertically to 2d field.", - "dimensions": "longitude latitude time", - "out_name": "emioa", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "emiso2": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_sulfur_dioxide_due_to_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "total emission rate of so2", - "comment": "Integrate 3D emission field vertically to 2d field.", - "dimensions": "longitude latitude time", - "out_name": "emiso2", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "emiso4": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "total direct emission rate of so4", - "comment": "Direct primary emission does not include secondary sulfate production. Integrate 3D emission field vertically to 2d field.", - "dimensions": "longitude latitude time", - "out_name": "emiso4", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "emiss": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_seasalt_dry_aerosol_particles_due_to_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "total emission rate of seasalt", - "comment": "Integrate 3D emission field vertically to 2d field.", - "dimensions": "longitude latitude time", - "out_name": "emiss", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "emivoc": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_nmvoc_due_to_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "total emission rate of nmvoc", - "comment": "Integrate 3D emission field vertically to 2d field. _If_ fixed molecular weight of NMVOC is not available in model, please provide in units of kilomole m-2 s-1 (i.e. kg m-2 s-1 as if model NMVOC had molecular weight of 1) and add a comment to your file.", - "dimensions": "longitude latitude time", - "out_name": "emivoc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "h2o": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_water_in_air", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Water vapour volume mixing ratio", - "comment": "includes all phases of water", - "dimensions": "longitude latitude alevel time", - "out_name": "h2o", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hcho": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_formaldehyde_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Formaldehyde volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "hcho", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hcl": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_hydrogen_chloride_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "HCl volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "hcl", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hno3": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_nitric_acid_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "HNO3 volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "hno3", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "isop": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_isoprene_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Isoprene volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "isop", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "jno2": { - "modeling_realm": "aerosol", - "standard_name": "photolysis_rate_of_nitrogen_dioxide", - "units": "s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "photolysis rate of NO2", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "jno2", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lossch4": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mole_concentration_of_methane_due_to_chemical_destruction", - "units": "mol m-3 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Monthly Loss of atmospheric Methane", - "comment": "monthly averaged atmospheric loss", - "dimensions": "longitude latitude alevel time", - "out_name": "lossch4", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lossco": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_of_mole_concentration_of_carbon_monoxide_due_to_chemical_destruction", - "units": "mol m-3 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Monthly Loss of atmospheric Carbon Monoxide", - "comment": "monthly averaged atmospheric loss", - "dimensions": "longitude latitude alevel time", - "out_name": "lossco", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lossn2o": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_of_mole_concentration_of_nitrous_oxide_due_to_chemical_destruction", - "units": "mol m-3 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Monthly Loss of atmospheric Nitrous Oxide", - "comment": "monthly averaged atmospheric loss", - "dimensions": "longitude latitude alevel time", - "out_name": "lossn2o", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lwp": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_mass_content_of_cloud_liquid_water", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "liquid water path", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "lwp", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmraerh2o": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_water_in_ambient_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Aerosol water mass mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "mmraerh2o", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmrbc": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_elemental_carbon_dry_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Elemental carbon mass mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "mmrbc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmrdust": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_dust_dry_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Dust aerosol mass mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "mmrdust", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmrnh4": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_ammonium_dry_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "NH4 mass mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "mmrnh4", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmrno3": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_nitrate_dry_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "NO3 aerosol mass mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "mmrno3", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmroa": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_particulate_organic_matter_dry_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Total organic aerosol mass mixing ratio", - "comment": "We recommend a scale factor of POM=1.4*OC, unless your model has more detailed info available.", - "dimensions": "longitude latitude alevel time", - "out_name": "mmroa", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmrpm1": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_pm1_dry_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "PM1.0 mass mixing ratio", - "comment": "E.g. mass_fraction_of_pm1_aerosol_at_50_percent_relative_humidity_in_air. proposed name: mass_fraction_of_pm1_dry_aerosol_in_air", - "dimensions": "longitude latitude alevel time", - "out_name": "mmrpm1", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmrpm10": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_pm10_ambient_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "PM10 mass mixing ratio", - "comment": "E.g. mass_fraction_of_pm10_aerosol_at_50_percent_relative_humidity_in_air, proposed name: mass_fraction_of_pm10_dry_aerosol_in_air", - "dimensions": "longitude latitude alevel time", - "out_name": "mmrpm10", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmrpm2p5": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_pm2p5_dry_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "PM2.5 mass mixing ratio", - "comment": "E.g. mass_fraction_of_pm2p5_aerosol_at_50_percent_relative_humidity_in_air, proposed_name: mass_fraction_of_pm2p5_dry_aerosol_in_air", - "dimensions": "longitude latitude alevel time", - "out_name": "mmrpm2p5", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmrso4": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_sulfate_dry_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Aerosol sulfate mass mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "mmrso4", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmrsoa": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Secondary organic aerosol mass mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "mmrsoa", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmrss": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_seasalt_dry_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Sea Salt mass mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "mmrss", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "n2o": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_nitrous_oxide_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "N2O volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "n2o", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nh50": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_artificial_tracer_with_fixed_lifetime_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Artificial tracer with 50 day lifetime", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "nh50", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "no": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_nitrogen_monoxide_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "NO volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "no", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "no2": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_nitrogen_dioxide_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "NO2 volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "no2", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o3": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_ozone_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Ozone volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "o3", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o3loss": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mole_concentration_of_ozone_due_to_chemical_destruction", - "units": "mol m-3 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "O3 destruction rate", - "comment": "ONLY provide the sum of the following reactions: (i) O(1D)+H2O; (ii) O3+HO2; (iii) O3+OH; (iv) O3+alkenes (isoprene, ethene,...)", - "dimensions": "longitude latitude alevel time", - "out_name": "o3loss", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o3prod": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mole_concentration_of_ozone_due_to_chemical_production", - "units": "mol m-3 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "O3 production rate", - "comment": "ONLY provide the sum of all the HO2/RO2 + NO reactions (as k*[HO2]*[NO])", - "dimensions": "longitude latitude alevel time", - "out_name": "o3prod", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o3ste": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_ozone_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Ozone volume mixing ratio", - "comment": "Ozone tracer intended to map out strat-trop exchange (STE) of ozone.", - "dimensions": "longitude latitude alevel time", - "out_name": "o3ste", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od440aer": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_ambient_aerosol_particles", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "ambient aerosol optical thickness at 440 nm", - "comment": "AOD from the ambient aerosls (i.e., includes aerosol water). Does not include AOD from stratospheric aerosols if these are prescribed but includes other possible background aerosol types. Needs a comment attribute 'wavelength: 440 nm'", - "dimensions": "longitude latitude time", - "out_name": "od440aer", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550aer": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_ambient_aerosol_particles", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "ambient aerosol optical thickness at 550 nm", - "comment": "AOD from the ambient aerosls (i.e., includes aerosol water). Does not include AOD from stratospheric aerosols if these are prescribed but includes other possible background aerosol types. Needs a comment attribute 'wavelength: 550 nm'", - "dimensions": "longitude latitude time", - "out_name": "od550aer", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550aerh2o": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_water_in_ambient_aerosol", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "aerosol water aod@550nm", - "comment": "proposed name: atmosphere_optical_thickness_due_to_water_ambient_aerosol", - "dimensions": "longitude latitude time", - "out_name": "od550aerh2o", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550bb": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "bb aod@550nm", - "comment": "total organic aerosol AOD due to biomass burning (excluding so4, nitrate BB components)", - "dimensions": "longitude latitude time", - "out_name": "od550bb", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550bc": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_black_carbon_ambient_aerosol", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "black carbon aod@550nm", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "od550bc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550csaer": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_ambient_aerosol_particles", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "ambient aerosol optical thickness at 550 nm", - "comment": "AOD from the ambient aerosols in clear skies if od550aer is for all-sky (i.e., includes aerosol water). Does not include AOD from stratospheric aerosols if these are prescribed but includes other possible background aerosol types. Needs a comment attribute 'wavelength: 550 nm'", - "dimensions": "longitude latitude time", - "out_name": "od550csaer", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550dust": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_dust_ambient_aerosol", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "dust aod@550nm", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "od550dust", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550lt1aer": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_pm1_ambient_aerosol", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "ambient fine mode aerosol optical thickness at 550 nm", - "comment": "od550 due to particles with wet diameter less than 1 um (ambient here means wetted). When models do not include explicit size information, it can be assumed that all anthropogenic aerosols and natural secondary aerosols have diameter less than 1 um.", - "dimensions": "longitude latitude time", - "out_name": "od550lt1aer", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550no3": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_nitrate_ambient_aerosol", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "nitrate aod@550nm", - "comment": "proposed name: atmosphere_optical_thickness_due_to_nitrate_ambient_aerosol", - "dimensions": "longitude latitude time", - "out_name": "od550no3", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550oa": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "total organic aerosol aod@550nm", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "od550oa", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550so4": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_sulfate_ambient_aerosol", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "sulfate aod@550nm", - "comment": "proposed name: atmosphere_optical_thickness_due_to_sulfate_ambient_aerosol", - "dimensions": "longitude latitude time", - "out_name": "od550so4", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550soa": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "soa aod@550nm", - "comment": "total organic aerosol AOD due to secondary aerosol formation", - "dimensions": "longitude latitude time", - "out_name": "od550soa", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550ss": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_seasalt_ambient_aerosol", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "sea salt aod@550nm", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "od550ss", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od870aer": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_optical_thickness_due_to_ambient_aerosol_particles", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "ambient aerosol optical thickness at 870 nm", - "comment": "AOD from the ambient aerosls (i.e., includes aerosol water). Does not include AOD from stratospheric aerosols if these are prescribed but includes other possible background aerosol types. Needs a comment attribute 'wavelength: 870 nm'", - "dimensions": "longitude latitude time", - "out_name": "od870aer", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "oh": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_hydroxyl_radical_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "OH volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "oh", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pan": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_peroxyacetyl_nitrate_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "PAN volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "pan", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pfull": { - "modeling_realm": "aerosol", - "standard_name": "air_pressure", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Pressure", - "comment": "Air pressure on model levels", - "dimensions": "longitude latitude alevel time", - "out_name": "pfull", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phalf": { - "modeling_realm": "aerosol", - "standard_name": "air_pressure", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "air pressure at interfaces", - "comment": "Air pressure on model half-levels", - "dimensions": "longitude latitude alevel time", - "out_name": "phalf", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "photo1d": { - "modeling_realm": "aerosol", - "standard_name": "photolysis_rate_of_ozone_to_1D_oxygen_atom", - "units": "s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "photolysis rate of O3 to O1d", - "comment": "proposed name: photolysis_rate_of_ozone_to_O1D", - "dimensions": "longitude latitude alevel time", - "out_name": "photo1d", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pod0": { - "modeling_realm": "aerosol", - "standard_name": "pending_CF_name", - "units": "mol m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Phytotoxic ozone dose", - "comment": "Accumulated stomatal ozone flux over the threshold of 0 mol m-2 s-1; Computation: Time Integral of (hourly above canopy ozone concentration * stomatal conductance * Rc/(Rb+Rc) )", - "dimensions": "longitude latitude time", - "out_name": "pod0", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ps": { - "modeling_realm": "atmos", - "standard_name": "surface_air_pressure", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Pressure", - "comment": "surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", - "dimensions": "longitude latitude time", - "out_name": "ps", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ptp": { - "modeling_realm": "aerosol", - "standard_name": "tropopause_air_pressure", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tropopause Air Pressure", - "comment": "2D monthly mean thermal tropopause calculated using WMO tropopause definition on 3d temperature", - "dimensions": "longitude latitude time", - "out_name": "ptp", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffclwtop": { - "modeling_realm": "aerosol", - "standard_name": "effective_radius_of_cloud_liquid_water_particle_at_liquid_water_cloud_top", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "cloud-top effective droplet radius", - "comment": "Droplets are liquid only. This is the effective radius as seen from space over liquid cloudy portion of grid cell. This is the value from uppermost model layer with liquid cloud or, if available, or for some models it is the sum over all liquid cloud tops, no matter where they occur, as long as they are seen from the top of the atmosphere.TOA) each time sample when computing monthly mean. Reported values are weighted by total liquid cloud top fraction of (as seen from", - "dimensions": "longitude latitude time", - "out_name": "reffclwtop", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlutaf": { - "modeling_realm": "aerosol", - "standard_name": "toa_outgoing_longwave_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "toa outgoing longwave radiation", - "comment": "Flux corresponding to rlut resulting fom aerosol-free call to radiation, follwing Ghan (ACP, 2013)", - "dimensions": "longitude latitude time", - "out_name": "rlutaf", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlutcsaf": { - "modeling_realm": "aerosol", - "standard_name": "toa_outgoing_longwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "toa outgoing clear-sky longwave radiation", - "comment": "Flux corresponding to rlutcs resulting fom aerosol-free call to radiation, follwing Ghan (ACP, 2013)", - "dimensions": "longitude latitude time", - "out_name": "rlutcsaf", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsutaf": { - "modeling_realm": "aerosol", - "standard_name": "toa_outgoing_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "toa outgoing shortwave radiation", - "comment": "Flux corresponding to rsut resulting fom aerosol-free call to radiation, following Ghan (ACP, 2013)", - "dimensions": "longitude latitude time", - "out_name": "rsutaf", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsutcsaf": { - "modeling_realm": "aerosol", - "standard_name": "toa_outgoing_shortwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "toa outgoing clear-sky shortwave radiation", - "comment": "Flux corresponding to rsutcs resulting fom aerosol-free call to radiation, following Ghan (ACP, 2013)", - "dimensions": "longitude latitude time", - "out_name": "rsutcsaf", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "so2": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_sulfur_dioxide_in_air", - "units": "mol mol-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "SO2 volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "so2", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tatp": { - "modeling_realm": "aerosol", - "standard_name": "tropopause_air_temperature", - "units": "K", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tropopause Air Temperature", - "comment": "2D monthly mean thermal tropopause calculated using WMO tropopause definition on 3d temperature", - "dimensions": "longitude latitude time", - "out_name": "tatp", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntrl": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_air_temperature_due_to_longwave_heating", - "units": "K s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Longwave heating rate", - "comment": "Tendency of air temperature due to longwave radiative heating", - "dimensions": "longitude latitude alevel time", - "out_name": "tntrl", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntrs": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_air_temperature_due_to_shortwave_heating", - "units": "K s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Shortwave heating rate", - "comment": "Tendency of air temperature due to shortwave radiative heating", - "dimensions": "longitude latitude alevel time", - "out_name": "tntrs", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "toz": { - "modeling_realm": "aerosol", - "standard_name": "equivalent_thickness_at_stp_of_atmosphere_ozone_content", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Ozone Column", - "comment": "total ozone column in DU", - "dimensions": "longitude latitude time", - "out_name": "toz", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tropoz": { - "modeling_realm": "aerosol", - "standard_name": "equivalent_thickness_at_stp_of_atmosphere_ozone_content", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "tropospheric ozone column", - "comment": "tropospheric ozone column in DU, should be consistent with ptp definition of tropopause", - "dimensions": "longitude latitude time", - "out_name": "tropoz", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ttop": { - "modeling_realm": "aerosol", - "standard_name": "air_temperature_at_cloud_top", - "units": "K", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "air temperature at cloud top", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "ttop", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua": { - "modeling_realm": "aerosol", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "ua", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va": { - "modeling_realm": "aerosol", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "va", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wa": { - "modeling_realm": "aerosol", - "standard_name": "upward_air_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Upward Air Velocity", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "wa", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wetbc": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_wet_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "wet deposition rate of black carbon aerosol mass", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "wetbc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wetdust": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_wet_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "wet deposition rate of dust", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "wetdust", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wetnh3": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_ammonia_due_to_wet_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "wet deposition rate of nh3", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "wetnh3", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wetnh4": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_wet_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "wet deposition rate of nh4", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "wetnh4", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wetnoy": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_noy_expressed_as_nitrogen_due_to_wet_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "wet deposition of noy incl aerosol nitrate", - "comment": "NOy is the sum of all simulated oxidized nitrogen species, out of NO, NO2, HNO3, HNO4, NO3aerosol, NO3(radical), N2O5, PAN, other organic nitrates.", - "dimensions": "longitude latitude time", - "out_name": "wetnoy", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wetoa": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "wet deposition rate of dry aerosol total organic matter", - "comment": "tendency of atmosphere mass content of organic matter dry aerosols due to wet deposition: This is the sum of wet deposition of POA and wet deposition of SOA (see next two entries). Mass here refers to the mass of organic matter, not mass of organic carbon alone. We recommend a scale factor of POM=1.4*OC, unless your model has more detailed info available. Was called wet_pom in old ACCMIP Excel spreadsheet.", - "dimensions": "longitude latitude time", - "out_name": "wetoa", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wetso2": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_sulfur_dioxide_due_to_wet_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "wet deposition rate of so2", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "wetso2", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wetso4": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_wet_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "wet deposition rate of so4", - "comment": "proposed name: tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_due_to_wet_deposition", - "dimensions": "longitude latitude time", - "out_name": "wetso4", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wetss": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_atmosphere_mass_content_of_seasalt_dry_aerosol_particles_due_to_wet_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "wet deposition rate of seasalt", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "wetss", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg": { - "modeling_realm": "atmos", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "zg", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ztp": { - "modeling_realm": "aerosol", - "standard_name": "tropopause_altitude", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tropopause Altitude", - "comment": "2D monthly mean thermal tropopause calculated using WMO tropopause definition on 3d temperature", - "dimensions": "longitude latitude time", - "out_name": "ztp", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_AERmonZ.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_AERmonZ.json deleted file mode 100644 index 72d3136c20..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_AERmonZ.json +++ /dev/null @@ -1,307 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table AERmonZ", - "realm": "aerosol", - "frequency": "mon", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "bry": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_inorganic_bromine_in_air", - "units": "mol mol-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Total inorganic bromine volume mixing ratio", - "comment": "Total family (the sum of all appropriate species in the model) ; list the species in the netCDF header, e.g. Bry = Br + BrO + HOBr + HBr + BrONO2 + BrCl Definition: Total inorganic bromine (e.g., HBr and inorganic bromine oxides and radicals (e.g., BrO, atomic bromine (Br), bromine nitrate (BrONO2)) resulting from degradation of bromine-containing organicsource gases (halons, methyl bromide, VSLS), and natural inorganic bromine sources (e.g., volcanoes, sea salt, and other aerosols) add comment attribute with detailed description about how the model calculates these fields", - "dimensions": "latitude plev39 time", - "out_name": "bry", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ch4": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_methane_in_air", - "units": "mol mol-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "CH4 volume mixing ratio", - "comment": "", - "dimensions": "latitude plev39 time", - "out_name": "ch4", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cly": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_inorganic_chlorine_in_air", - "units": "mol mol-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Total inorganic chlorine volume mixing ratio", - "comment": "Total family (the sum of all appropriate species in the model) ; list the species in the netCDF header, e.g. Cly = HCl + ClONO2 + HOCl + ClO + Cl + 2*Cl2O2 +2Cl2 + OClO + BrCl Definition: Total inorganic stratospheric chlorine (e.g., HCl, ClO) resulting from degradation of chlorine-containing source gases (CFCs, HCFCs, VSLS), and natural inorganic chlorine sources (e.g., sea salt and other aerosols) add comment attribute with detailed description about how the model calculates these fields", - "dimensions": "latitude plev39 time", - "out_name": "cly", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "h2o": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_water_in_air", - "units": "1.0", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "H2O volume mixing ratio", - "comment": "includes all phases of water", - "dimensions": "latitude plev39 time", - "out_name": "h2o", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hcl": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_hydrogen_chloride_in_air", - "units": "mol mol-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "HCl volume mixing ratio", - "comment": "", - "dimensions": "latitude plev39 time", - "out_name": "hcl", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hno3": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_nitric_acid_in_air", - "units": "mol mol-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "HNO3 volume mixing ratio", - "comment": "", - "dimensions": "latitude plev39 time", - "out_name": "hno3", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ho2": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_hydroperoxyl_radical_in_air", - "units": "mol mol-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "HO2 volume mixing ratio", - "comment": "", - "dimensions": "latitude plev39 time", - "out_name": "ho2", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "meanage": { - "modeling_realm": "aerosol", - "standard_name": "age_of_stratospheric_air", - "units": "yr", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Mean Age of Stratospheric Air", - "comment": "The mean age of air is defined as the mean time that a stratospheric air mass has been out of contact with the well-mixed troposphere.", - "dimensions": "latitude plev39 time", - "out_name": "meanage", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "n2o": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_nitrous_oxide_in_air", - "units": "mol mol-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "N2O volume mixing ratio", - "comment": "", - "dimensions": "latitude plev39 time", - "out_name": "n2o", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "noy": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_noy_expressed_as_nitrogen_in_air", - "units": "mol mol-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Total reactive nitrogen volume mixing ratio", - "comment": "Total family (the sum of all appropriate species in the model); list the species in the netCDF header, e.g. NOy = N + NO + NO2 + NO3 + HNO3 + 2N2O5 + HNO4 + ClONO2 + BrONO2 Definition: Total reactive nitrogen; usually includes atomic nitrogen (N), nitric oxide (NO), NO2, nitrogen trioxide (NO3), dinitrogen radical (N2O5), nitric acid (HNO3), peroxynitric acid (HNO4), BrONO2, ClONO2 add comment attribute with detailed description about how the model calculates these fields", - "dimensions": "latitude plev39 time", - "out_name": "noy", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o3": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_ozone_in_air", - "units": "mol mol-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Ozone volume mixing ratio", - "comment": "", - "dimensions": "latitude plev39 time", - "out_name": "o3", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "oh": { - "modeling_realm": "aerosol", - "standard_name": "mole_fraction_of_hydroxyl_radical_in_air", - "units": "mol mol-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "OH volume mixing ratio", - "comment": "", - "dimensions": "latitude plev39 time", - "out_name": "oh", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta": { - "modeling_realm": "aerosol", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "latitude plev39 time", - "out_name": "ta", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua": { - "modeling_realm": "aerosol", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "latitude plev39 time", - "out_name": "ua", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va": { - "modeling_realm": "aerosol", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "latitude plev39 time", - "out_name": "va", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vt100": { - "modeling_realm": "aerosol", - "standard_name": "northward_heat_flux_in_air_due_to_eddy_advection", - "units": "W m-2", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward heat flux due to eddies", - "comment": "Zonally averaged meridional heat flux at 100 hPa as monthly means derived from daily (or higher frequency) fields.", - "dimensions": "latitude time p100", - "out_name": "vt100", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg": { - "modeling_realm": "atmos", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height", - "comment": "", - "dimensions": "latitude plev39 time", - "out_name": "zg", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Amon.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Amon.json deleted file mode 100644 index 8d1eb4181b..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Amon.json +++ /dev/null @@ -1,1293 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table Amon", - "realm": "atmos atmosChem", - "frequency": "mon", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "30.00000", - "generic_levels": "alevel alevhalf", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "ccb": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_convective_cloud_base", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Pressure at Convective Cloud Base", - "comment": "Where convective cloud is present in the grid cell, the instantaneous cloud base altitude should be that of the bottom of the lowest level containing convective cloud. Missing data should be reported in the absence of convective cloud. The time mean should be calculated from these quantities averaging over occasions when convective cloud is present only, and should contain missing data for occasions when no convective cloud is present during the meaning period.", - "dimensions": "longitude latitude time", - "out_name": "ccb", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cct": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_convective_cloud_top", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Pressure at Convective Cloud Top", - "comment": "Where convective cloud is present in the grid cell, the instantaneous cloud top altitude should be that of the top of the highest level containing convective cloud. Missing data should be reported in the absence of convective cloud. The time mean should be calculated from these quantities averaging over occasions when convective cloud is present only, and should contain missing data for occasions when no convective cloud is present during the meaning period.", - "dimensions": "longitude latitude time", - "out_name": "cct", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cfc113global": { - "modeling_realm": "atmos atmosChem", - "standard_name": "mole_fraction_of_cfc113_in_air", - "units": "1e-12", - "cell_methods": "area: time: mean", - "cell_measures": "", - "long_name": "Global Mean Mole Fraction of CFC113", - "comment": "", - "dimensions": "time", - "out_name": "cfc113global", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cfc11global": { - "modeling_realm": "atmos atmosChem", - "standard_name": "mole_fraction_of_cfc11_in_air", - "units": "1e-12", - "cell_methods": "area: time: mean", - "cell_measures": "", - "long_name": "Global Mean Mole Fraction of CFC11", - "comment": "", - "dimensions": "time", - "out_name": "cfc11global", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cfc12global": { - "modeling_realm": "atmos atmosChem", - "standard_name": "mole_fraction_of_cfc12_in_air", - "units": "1e-12", - "cell_methods": "area: time: mean", - "cell_measures": "", - "long_name": "Global Mean Mole Fraction of CFC12", - "comment": "", - "dimensions": "time", - "out_name": "cfc12global", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ch4": { - "modeling_realm": "atmos atmosChem", - "standard_name": "mole_fraction_of_methane_in_air", - "units": "mol mol-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Mole Fraction of CH4", - "comment": "", - "dimensions": "longitude latitude plev19 time", - "out_name": "ch4", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ch4Clim": { - "modeling_realm": "atmos atmosChem", - "standard_name": "mole_fraction_of_methane_in_air", - "units": "mol mol-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacella", - "long_name": "Mole Fraction of CH4", - "comment": "", - "dimensions": "longitude latitude plev19 time2", - "out_name": "ch4", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ch4global": { - "modeling_realm": "atmos atmosChem", - "standard_name": "mole_fraction_of_methane_in_air", - "units": "1e-09", - "cell_methods": "area: time: mean", - "cell_measures": "", - "long_name": "Global Mean Mole Fraction of CH4", - "comment": "Global Mean Mole Fraction of CH4", - "dimensions": "time", - "out_name": "ch4global", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ch4globalClim": { - "modeling_realm": "atmos atmosChem", - "standard_name": "mole_fraction_of_methane_in_air", - "units": "1e-09", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "", - "long_name": "Global Mean Mole Fraction of CH4", - "comment": "Global Mean Mole Fraction of CH4", - "dimensions": "time2", - "out_name": "ch4global", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ci": { - "modeling_realm": "atmos", - "standard_name": "convection_time_fraction", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Fraction of Time Convection Occurs", - "comment": "Fraction of time that convection occurs in the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "ci", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cl": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Cloud Area Fraction", - "comment": "Percentage cloud cover, including both large-scale and convective cloud.", - "dimensions": "longitude latitude alevel time", - "out_name": "cl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cli": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_cloud_ice_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass Fraction of Cloud Ice", - "comment": "Includes both large-scale and convective cloud. This is calculated as the mass of cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. It includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude alevel time", - "out_name": "cli", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clivi": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_cloud_ice_content", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Ice Water Path", - "comment": "mass of ice water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating frozen hydrometeors ONLY if the precipitating hydrometeor affects the calculation of radiative transfer in model.", - "dimensions": "longitude latitude time", - "out_name": "clivi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clt": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Cloud Fraction", - "comment": "Total cloud area fraction for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes both large-scale and convective cloud.", - "dimensions": "longitude latitude time", - "out_name": "clt", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clw": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_cloud_liquid_water_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass Fraction of Cloud Liquid Water", - "comment": "Includes both large-scale and convective cloud. Calculate as the mass of cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cells. Precipitating hydrometeors are included ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude alevel time", - "out_name": "clw", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clwvi": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_cloud_condensed_water_content", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Condensed Water Path", - "comment": "Mass of condensed (liquid + ice) water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude time", - "out_name": "clwvi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co2": { - "modeling_realm": "atmos", - "standard_name": "mole_fraction_of_carbon_dioxide_in_air", - "units": "mol mol-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Mole Fraction of CO2", - "comment": "", - "dimensions": "longitude latitude plev19 time", - "out_name": "co2", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co2Clim": { - "modeling_realm": "atmos", - "standard_name": "mole_fraction_of_carbon_dioxide_in_air", - "units": "mol mol-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacella", - "long_name": "Mole Fraction of CO2", - "comment": "", - "dimensions": "longitude latitude plev19 time2", - "out_name": "co2", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co2mass": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_mass_of_carbon_dioxide", - "units": "kg", - "cell_methods": "area: time: mean", - "cell_measures": "", - "long_name": "Total Atmospheric Mass of CO2", - "comment": "Total atmospheric mass of Carbon Dioxide", - "dimensions": "time", - "out_name": "co2mass", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co2massClim": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_mass_of_carbon_dioxide", - "units": "kg", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "", - "long_name": "Total Atmospheric Mass of CO2", - "comment": "Total atmospheric mass of Carbon Dioxide", - "dimensions": "time2", - "out_name": "co2mass", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "evspsbl": { - "modeling_realm": "atmos", - "standard_name": "water_evaporation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Evaporation", - "comment": "Evaporation at surface: flux of water into the atmosphere due to conversion of both liquid and solid phases to vapor (from underlying surface and vegetation)", - "dimensions": "longitude latitude time", - "out_name": "evspsbl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fco2antt": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_atmosphere_mass_content_of_carbon_dioxide_expressed_as_carbon_due_to_anthropogenic_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux into Atmosphere Due to All Anthropogenic Emissions of CO2", - "comment": "This is requested only for the emission-driven coupled carbon climate model runs. Does not include natural fire sources but, includes all anthropogenic sources, including fossil fuel use, cement production, agricultural burning, and sources associated with anthropogenic land use change excluding forest regrowth.", - "dimensions": "longitude latitude time", - "out_name": "fco2antt", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fco2fos": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_atmosphere_mass_content_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_fossil_fuel_combustion", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux into Atmosphere Due to Fossil Fuel Emissions of CO2", - "comment": "This is the prescribed anthropogenic CO2 flux from fossil fuel use, including cement production, and flaring (but not from land-use changes, agricultural burning, forest regrowth, etc.)", - "dimensions": "longitude latitude time", - "out_name": "fco2fos", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fco2nat": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_natural_sources", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Carbon Mass Flux into the Atmosphere Due to Natural Sources", - "comment": "This is what the atmosphere sees (on its own grid). This field should be equivalent to the combined natural fluxes of carbon that account for natural exchanges between the atmosphere and land (nep) or ocean (fgco2) reservoirs.", - "dimensions": "longitude latitude time", - "out_name": "fco2nat", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hcfc22global": { - "modeling_realm": "atmos atmosChem", - "standard_name": "mole_fraction_of_hcfc22_in_air", - "units": "1e-12", - "cell_methods": "area: time: mean", - "cell_measures": "", - "long_name": "Global Mean Mole Fraction of HCFC22", - "comment": "", - "dimensions": "time", - "out_name": "hcfc22global", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfls": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_latent_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upward Latent Heat Flux", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "hfls", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfss": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_sensible_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upward Sensible Heat Flux", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "hfss", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hur": { - "modeling_realm": "atmos", - "standard_name": "relative_humidity", - "units": "%", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Relative Humidity", - "comment": "The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", - "dimensions": "longitude latitude plev19 time", - "out_name": "hur", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hurs": { - "modeling_realm": "atmos", - "standard_name": "relative_humidity", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Near-Surface Relative Humidity", - "comment": "The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", - "dimensions": "longitude latitude time height2m", - "out_name": "hurs", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "longitude latitude plev19 time", - "out_name": "hus", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "huss": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Near-Surface Specific Humidity", - "comment": "Near-surface (usually, 2 meter) specific humidity.", - "dimensions": "longitude latitude time height2m", - "out_name": "huss", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mc": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_net_upward_convective_mass_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Convective Mass Flux", - "comment": "The net mass flux should represent the difference between the updraft and downdraft components. The flux is computed as the mass divided by the area of the grid cell.", - "dimensions": "longitude latitude alevhalf time", - "out_name": "mc", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "n2o": { - "modeling_realm": "atmos atmosChem", - "standard_name": "mole_fraction_of_nitrous_oxide_in_air", - "units": "mol mol-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Mole Fraction of N2O", - "comment": "", - "dimensions": "longitude latitude plev19 time", - "out_name": "n2o", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "n2oClim": { - "modeling_realm": "atmos atmosChem", - "standard_name": "mole_fraction_of_nitrous_oxide_in_air", - "units": "mol mol-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacella", - "long_name": "Mole Fraction of N2O", - "comment": "", - "dimensions": "longitude latitude plev19 time2", - "out_name": "n2o", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "n2oglobal": { - "modeling_realm": "atmos atmosChem", - "standard_name": "mole_fraction_of_nitrous_oxide_in_air", - "units": "1e-09", - "cell_methods": "area: time: mean", - "cell_measures": "", - "long_name": "Global Mean Mole Fraction of N2O", - "comment": "Global mean Nitrous Oxide (N2O)", - "dimensions": "time", - "out_name": "n2oglobal", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "n2oglobalClim": { - "modeling_realm": "atmos atmosChem", - "standard_name": "mole_fraction_of_nitrous_oxide_in_air", - "units": "1e-09", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "", - "long_name": "Global Mean Mole Fraction of N2O", - "comment": "Global mean Nitrous Oxide (N2O)", - "dimensions": "time2", - "out_name": "n2oglobal", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o3": { - "modeling_realm": "atmos atmosChem", - "standard_name": "mole_fraction_of_ozone_in_air", - "units": "mol mol-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Mole Fraction of O3", - "comment": "", - "dimensions": "longitude latitude plev19 time", - "out_name": "o3", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o3Clim": { - "modeling_realm": "atmos atmosChem", - "standard_name": "mole_fraction_of_ozone_in_air", - "units": "mol mol-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacella", - "long_name": "Mole Fraction of O3", - "comment": "", - "dimensions": "longitude latitude plev19 time2", - "out_name": "o3", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pfull": { - "modeling_realm": "atmos", - "standard_name": "air_pressure", - "units": "Pa", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacella", - "long_name": "Pressure on Model Levels", - "comment": "Air pressure on model levels", - "dimensions": "longitude latitude alevel time2", - "out_name": "pfull", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phalf": { - "modeling_realm": "atmos", - "standard_name": "air_pressure", - "units": "Pa", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacella", - "long_name": "Pressure on Model Half-Levels", - "comment": "Air pressure on model half-levels", - "dimensions": "longitude latitude alevhalf time2", - "out_name": "phalf", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pr": { - "modeling_realm": "atmos", - "standard_name": "precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Precipitation", - "comment": "includes both liquid and solid phases", - "dimensions": "longitude latitude time", - "out_name": "pr", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prc": { - "modeling_realm": "atmos", - "standard_name": "convective_precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Convective Precipitation", - "comment": "Convective precipitation at surface; includes both liquid and solid phases.", - "dimensions": "longitude latitude time", - "out_name": "prc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prsn": { - "modeling_realm": "atmos", - "standard_name": "snowfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Snowfall Flux", - "comment": "at surface; includes precipitation of all forms of water in the solid phase", - "dimensions": "longitude latitude time", - "out_name": "prsn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prw": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_water_vapor_content", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Water Vapor Path", - "comment": "vertically integrated through the atmospheric column", - "dimensions": "longitude latitude time", - "out_name": "prw", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ps": { - "modeling_realm": "atmos", - "standard_name": "surface_air_pressure", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Air Pressure", - "comment": "surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", - "dimensions": "longitude latitude time", - "out_name": "ps", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "psl": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_sea_level", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Sea Level Pressure", - "comment": "Sea Level Pressure", - "dimensions": "longitude latitude time", - "out_name": "psl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlds": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downwelling Longwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rlds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rldscs": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_longwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downwelling Clear-Sky Longwave Radiation", - "comment": "Surface downwelling clear-sky longwave radiation", - "dimensions": "longitude latitude time", - "out_name": "rldscs", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlus": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upwelling Longwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rlus", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlut": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_longwave_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Longwave Radiation", - "comment": "at the top of the atmosphere (to be compared with satellite measurements)", - "dimensions": "longitude latitude time", - "out_name": "rlut", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlutcs": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_longwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Clear-Sky Longwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rlutcs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsds": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downwelling Shortwave Radiation", - "comment": "surface solar irradiance for UV calculations", - "dimensions": "longitude latitude time", - "out_name": "rsds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdscs": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downwelling Clear-Sky Shortwave Radiation", - "comment": "surface solar irradiance clear sky for UV calculations", - "dimensions": "longitude latitude time", - "out_name": "rsdscs", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdt": { - "modeling_realm": "atmos", - "standard_name": "toa_incoming_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Incident Shortwave Radiation", - "comment": "Shortwave radiation incident at the top of the atmosphere", - "dimensions": "longitude latitude time", - "out_name": "rsdt", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsus": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upwelling Shortwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rsus", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsuscs": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upwelling Clear-Sky Shortwave Radiation", - "comment": "Surface Upwelling Clear-sky Shortwave Radiation", - "dimensions": "longitude latitude time", - "out_name": "rsuscs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsut": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Shortwave Radiation", - "comment": "at the top of the atmosphere", - "dimensions": "longitude latitude time", - "out_name": "rsut", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsutcs": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_shortwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Clear-Sky Shortwave Radiation", - "comment": "Calculated in the absence of clouds.", - "dimensions": "longitude latitude time", - "out_name": "rsutcs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rtmt": { - "modeling_realm": "atmos", - "standard_name": "net_downward_radiative_flux_at_top_of_atmosphere_model", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Net Downward Flux at Top of Model", - "comment": "Net Downward Radiative Flux at Top of Model : I.e., at the top of that portion of the atmosphere where dynamics are explicitly treated by the model. This is reported only if it differs from the net downward radiative flux at the top of the atmosphere.", - "dimensions": "longitude latitude time", - "out_name": "rtmt", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sbl": { - "modeling_realm": "landIce", - "standard_name": "surface_snow_and_ice_sublimation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Snow and Ice Sublimation Flux", - "comment": "The snow and ice sublimation flux is the loss of snow and ice mass per unit area from the surface resulting from their direct conversion to water vapor that enters the atmosphere.", - "dimensions": "longitude latitude time", - "out_name": "sbl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sci": { - "modeling_realm": "atmos", - "standard_name": "shallow_convection_time_fraction", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Fraction of Time Shallow Convection Occurs", - "comment": "Fraction of time that shallow convection occurs in the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "sci", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfcWind": { - "modeling_realm": "atmos", - "standard_name": "wind_speed", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Near-Surface Wind Speed", - "comment": "near-surface (usually, 10 meters) wind speed.", - "dimensions": "longitude latitude time height10m", - "out_name": "sfcWind", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "longitude latitude plev19 time", - "out_name": "ta", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tas": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Near-Surface Air Temperature", - "comment": "near-surface (usually, 2 meter) air temperature", - "dimensions": "longitude latitude time height2m", - "out_name": "tas", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tasmax": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: maximum within days time: mean over days", - "cell_measures": "area: areacella", - "long_name": "Daily Maximum Near-Surface Air Temperature", - "comment": "maximum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: max')", - "dimensions": "longitude latitude time height2m", - "out_name": "tasmax", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tasmin": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: minimum within days time: mean over days", - "cell_measures": "area: areacella", - "long_name": "Daily Minimum Near-Surface Air Temperature", - "comment": "minimum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: min')", - "dimensions": "longitude latitude time height2m", - "out_name": "tasmin", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tauu": { - "modeling_realm": "atmos", - "standard_name": "surface_downward_eastward_stress", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downward Eastward Wind Stress", - "comment": "Downward eastward wind stress at the surface", - "dimensions": "longitude latitude time", - "out_name": "tauu", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tauv": { - "modeling_realm": "atmos", - "standard_name": "surface_downward_northward_stress", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downward Northward Wind Stress", - "comment": "Downward northward wind stress at the surface", - "dimensions": "longitude latitude time", - "out_name": "tauv", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ts": { - "modeling_realm": "atmos", - "standard_name": "surface_temperature", - "units": "K", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Temperature", - "comment": "Temperature of the lower boundary of the atmosphere", - "dimensions": "longitude latitude time", - "out_name": "ts", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "longitude latitude plev19 time", - "out_name": "ua", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "uas": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Near-Surface Wind", - "comment": "Eastward component of the near-surface (usually, 10 meters) wind", - "dimensions": "longitude latitude time height10m", - "out_name": "uas", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "longitude latitude plev19 time", - "out_name": "va", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vas": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Near-Surface Wind", - "comment": "Northward component of the near surface wind", - "dimensions": "longitude latitude time height10m", - "out_name": "vas", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wap": { - "modeling_realm": "atmos", - "standard_name": "lagrangian_tendency_of_air_pressure", - "units": "Pa s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "omega (=dp/dt)", - "comment": "Omega (vertical velocity in pressure coordinates, positive downwards)", - "dimensions": "longitude latitude plev19 time", - "out_name": "wap", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg": { - "modeling_realm": "atmos", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height", - "comment": "", - "dimensions": "longitude latitude plev19 time", - "out_name": "zg", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CF3hr.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CF3hr.json deleted file mode 100644 index 21cc30a7d5..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CF3hr.json +++ /dev/null @@ -1,562 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table CF3hr", - "realm": "atmos", - "frequency": "3hr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "0.125000", - "generic_levels": "alevel alevhalf", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "clc": { - "modeling_realm": "atmos", - "standard_name": "convective_cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Convective Cloud Area Fraction", - "comment": "Include only convective cloud.", - "dimensions": "longitude latitude alevel time1", - "out_name": "clc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clic": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_convective_cloud_ice_in_air", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Mass Fraction of Convective Cloud Ice", - "comment": "Calculated as the mass of convective cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude alevel time1", - "out_name": "clic", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clis": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_stratiform_cloud_ice_in_air", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Mass Fraction of Stratiform Cloud Ice", - "comment": "Calculated as the mass of stratiform cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude alevel time1", - "out_name": "clis", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cls": { - "modeling_realm": "atmos", - "standard_name": "stratiform_cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Stratiform Cloud Area Fraction", - "comment": "", - "dimensions": "longitude latitude alevel time1", - "out_name": "cls", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cltc": { - "modeling_realm": "atmos", - "standard_name": "convective_cloud_area_fraction", - "units": "%", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Convective Cloud Fraction", - "comment": "Convective cloud area fraction for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes only convective cloud.", - "dimensions": "longitude latitude time1", - "out_name": "cltc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clwc": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_convective_cloud_liquid_water_in_air", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Mass Fraction of Convective Cloud Liquid Water", - "comment": "Calculated as the mass of convective cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude alevel time1", - "out_name": "clwc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clws": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_stratiform_cloud_liquid_water_in_air", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Mass Fraction of Stratiform Cloud Liquid Water", - "comment": "Calculated as the mass of stratiform cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude alevel time1", - "out_name": "clws", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "demc": { - "modeling_realm": "atmos", - "standard_name": "convective_cloud_longwave_emissivity", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Convective Cloud Emissivity", - "comment": "This is the in-cloud emissivity obtained by considering only the cloudy portion of the grid cell.", - "dimensions": "longitude latitude alevel time1", - "out_name": "demc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dems": { - "modeling_realm": "atmos", - "standard_name": "stratiform_cloud_longwave_emissivity", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Stratiform Cloud Emissivity", - "comment": "This is the in-cloud emissivity obtained by considering only the cloudy portion of the grid cell.", - "dimensions": "longitude latitude alevel time1", - "out_name": "dems", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dtauc": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_optical_thickness_due_to_convective_cloud", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Convective Cloud Optical Depth", - "comment": "This is the in-cloud optical depth obtained by considering only the cloudy portion of the grid cell", - "dimensions": "longitude latitude alevel time1", - "out_name": "dtauc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dtaus": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_optical_thickness_due_to_stratiform_cloud", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Stratiform Cloud Optical Depth", - "comment": "This is the in-cloud optical depth obtained by considering only the cloudy portion of the grid cell.", - "dimensions": "longitude latitude alevel time1", - "out_name": "dtaus", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "grpllsprof": { - "modeling_realm": "atmos", - "standard_name": "large_scale_graupel_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Stratiform Graupel Flux", - "comment": "", - "dimensions": "longitude latitude alevhalf time1", - "out_name": "grpllsprof", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "h2o": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_water_in_air", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Mass Fraction of Water", - "comment": "includes all phases of water", - "dimensions": "longitude latitude alevel time1", - "out_name": "h2o", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pfull": { - "modeling_realm": "atmos", - "standard_name": "air_pressure", - "units": "Pa", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Pressure at Model Full-Levels", - "comment": "Air pressure on model levels", - "dimensions": "longitude latitude alevel time1", - "out_name": "pfull", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phalf": { - "modeling_realm": "atmos", - "standard_name": "air_pressure", - "units": "Pa", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Pressure at Model Half-Levels", - "comment": "Air pressure on model half-levels", - "dimensions": "longitude latitude alevhalf time1", - "out_name": "phalf", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prcprof": { - "modeling_realm": "atmos", - "standard_name": "convective_rainfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Convective Rainfall Flux", - "comment": "", - "dimensions": "longitude latitude alevhalf time1", - "out_name": "prcprof", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prlsns": { - "modeling_realm": "atmos", - "standard_name": "large_scale_snowfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Stratiform Snowfall Flux", - "comment": "large-scale precipitation of all forms of water in the solid phase.", - "dimensions": "longitude latitude alevhalf time1", - "out_name": "prlsns", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prlsprof": { - "modeling_realm": "atmos", - "standard_name": "large_scale_rainfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Stratiform Rainfall Flux", - "comment": "", - "dimensions": "longitude latitude alevhalf time1", - "out_name": "prlsprof", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prsnc": { - "modeling_realm": "atmos", - "standard_name": "convective_snowfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Convective Snowfall Flux", - "comment": "convective precipitation of all forms of water in the solid phase.", - "dimensions": "longitude latitude alevhalf time1", - "out_name": "prsnc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ps": { - "modeling_realm": "atmos", - "standard_name": "surface_air_pressure", - "units": "Pa", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Pressure", - "comment": "surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", - "dimensions": "longitude latitude time1", - "out_name": "ps", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffclic": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_convective_cloud_ice_particle", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Hydrometeor Effective Radius of Convective Cloud Ice", - "comment": "This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell).", - "dimensions": "longitude latitude alevel time1", - "out_name": "reffclic", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffclis": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_stratiform_cloud_ice_particle", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Hydrometeor Effective Radius of Stratiform Cloud Ice", - "comment": "This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell).", - "dimensions": "longitude latitude alevel time1", - "out_name": "reffclis", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffclwc": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_convective_cloud_liquid_water_particle", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Hydrometeor Effective Radius of Convective Cloud Liquid Water", - "comment": "Droplets are liquid. The effective radius is defined as the ratio of the third moment over the second moment of the particle size distribution and the time-mean should be calculated, weighting the individual samples by the cloudy fraction of the grid cell.", - "dimensions": "longitude latitude alevel time1", - "out_name": "reffclwc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffclws": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_stratiform_cloud_liquid_water_particle", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Hydrometeor Effective Radius of Stratiform Cloud Liquid Water", - "comment": "Droplets are liquid. The effective radius is defined as the ratio of the third moment over the second moment of the particle size distribution and the time-mean should be calculated, weighting the individual samples by the cloudy fraction of the grid cell.", - "dimensions": "longitude latitude alevel time1", - "out_name": "reffclws", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffgrpls": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_stratiform_cloud_graupel_particle", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Hydrometeor Effective Radius of Stratiform Graupel", - "comment": "This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell).", - "dimensions": "longitude latitude alevel time1", - "out_name": "reffgrpls", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffrainc": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_convective_cloud_rain_particle", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Hydrometeor Effective Radius of Convective Rainfall", - "comment": "This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell).", - "dimensions": "longitude latitude alevel time1", - "out_name": "reffrainc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffrains": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_stratiform_cloud_rain_particle", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Hydrometeor Effective Radius of Stratiform Rainfall", - "comment": "This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell).", - "dimensions": "longitude latitude alevel time1", - "out_name": "reffrains", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffsnowc": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_convective_cloud_snow_particle", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Hydrometeor Effective Radius of Convective Snowfall", - "comment": "This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell).", - "dimensions": "longitude latitude alevel time1", - "out_name": "reffsnowc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffsnows": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_stratiform_cloud_snow_particle", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Hydrometeor Effective Radius of Stratiform Snowfall", - "comment": "This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell).", - "dimensions": "longitude latitude alevel time1", - "out_name": "reffsnows", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "longitude latitude alevel time1", - "out_name": "ta", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zfull": { - "modeling_realm": "atmos", - "standard_name": "height_above_reference_ellipsoid", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Altitude of Model Full-Levels", - "comment": "", - "dimensions": "longitude latitude alevel time1", - "out_name": "zfull", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zhalf": { - "modeling_realm": "atmos", - "standard_name": "height_above_reference_ellipsoid", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Altitude of Model Half-Levels", - "comment": "", - "dimensions": "longitude latitude alevhalf time1", - "out_name": "zhalf", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CFday.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CFday.json deleted file mode 100644 index 7c47abcca1..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CFday.json +++ /dev/null @@ -1,630 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table CFday", - "realm": "atmos", - "frequency": "day", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "1.00000", - "generic_levels": "alevel alevhalf", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "albisccp": { - "modeling_realm": "atmos", - "standard_name": "cloud_albedo", - "units": "1.0", - "cell_methods": "area: time: mean where cloud", - "cell_measures": "area: areacella", - "long_name": "ISCCP Mean Cloud Albedo", - "comment": "ISCCP Mean Cloud Albedo. Time-means are weighted by the ISCCP Total Cloud Fraction {:cltisccp} - see http://cfmip.metoffice.com/COSP.html", - "dimensions": "longitude latitude time", - "out_name": "albisccp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ccb": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_convective_cloud_base", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Pressure at Convective Cloud Base", - "comment": "Where convective cloud is present in the grid cell, the instantaneous cloud base altitude should be that of the bottom of the lowest level containing convective cloud. Missing data should be reported in the absence of convective cloud. The time mean should be calculated from these quantities averaging over occasions when convective cloud is present only, and should contain missing data for occasions when no convective cloud is present during the meaning period.", - "dimensions": "longitude latitude time", - "out_name": "ccb", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cct": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_convective_cloud_top", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Pressure at Convective Cloud Top", - "comment": "Where convective cloud is present in the grid cell, the instantaneous cloud top altitude should be that of the top of the highest level containing convective cloud. Missing data should be reported in the absence of convective cloud. The time mean should be calculated from these quantities averaging over occasions when convective cloud is present only, and should contain missing data for occasions when no convective cloud is present during the meaning period.", - "dimensions": "longitude latitude time", - "out_name": "cct", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cl": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Cloud Area Fraction in Atmosphere Layer", - "comment": "Percentage cloud cover, including both large-scale and convective cloud.", - "dimensions": "longitude latitude alevel time", - "out_name": "cl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Cloud Fraction", - "comment": "Percentage cloud cover at CALIPSO standard heights.", - "dimensions": "longitude latitude alt40 time", - "out_name": "clcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clhcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO High Level Cloud Fraction", - "comment": "Percentage cloud cover in layer centred on 220hPa", - "dimensions": "longitude latitude time p220", - "out_name": "clhcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cli": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_cloud_ice_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass Fraction of Cloud Ice", - "comment": "Includes both large-scale and convective cloud. This is calculated as the mass of cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. It includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude alevel time", - "out_name": "cli", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clisccp": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "ISCCP Cloud Area Fraction", - "comment": "Percentage cloud cover in optical depth categories.", - "dimensions": "longitude latitude plev7c tau time", - "out_name": "clisccp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clivi": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_cloud_ice_content", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Ice Water Path", - "comment": "mass of ice water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating frozen hydrometeors ONLY if the precipitating hydrometeor affects the calculation of radiative transfer in model.", - "dimensions": "longitude latitude time", - "out_name": "clivi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cllcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Low Level Cloud Fraction", - "comment": "Percentage cloud cover in layer centred on 840hPa", - "dimensions": "longitude latitude time p840", - "out_name": "cllcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clmcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Mid Level Cloud Fraction", - "comment": "Percentage cloud cover in layer centred on 560hPa", - "dimensions": "longitude latitude time p560", - "out_name": "clmcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cltcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Total Cloud Fraction", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cltcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cltisccp": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "ISCCP Total Total Cloud Fraction", - "comment": "Percentage total cloud cover, simulating ISCCP observations.", - "dimensions": "longitude latitude time", - "out_name": "cltisccp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clw": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_cloud_liquid_water_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass Fraction of Cloud Liquid Water", - "comment": "Includes both large-scale and convective cloud. Calculate as the mass of cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cells. Precipitating hydrometeors are included ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude alevel time", - "out_name": "clw", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clwvi": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_cloud_condensed_water_content", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Condensed Water Path", - "comment": "Mass of condensed (liquid + ice) water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude time", - "out_name": "clwvi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hur": { - "modeling_realm": "atmos", - "standard_name": "relative_humidity", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Relative Humidity", - "comment": "The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", - "dimensions": "longitude latitude alevel time", - "out_name": "hur", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "hus", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mc": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_net_upward_convective_mass_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Convective Mass Flux", - "comment": "The net mass flux should represent the difference between the updraft and downdraft components. The flux is computed as the mass divided by the area of the grid cell.", - "dimensions": "longitude latitude alevhalf time", - "out_name": "mc", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pctisccp": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_cloud_top", - "units": "Pa", - "cell_methods": "area: time: mean where cloud", - "cell_measures": "area: areacella", - "long_name": "ISCCP Mean Cloud Top Pressure", - "comment": "ISCCP Mean Cloud Top Pressure. Time-means are weighted by the ISCCP Total Cloud Fraction {:cltisccp} - see http://cfmip.metoffice.com/COSP.html", - "dimensions": "longitude latitude time", - "out_name": "pctisccp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pfull": { - "modeling_realm": "atmos", - "standard_name": "air_pressure", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Pressure on Model Levels", - "comment": "Air pressure on model levels", - "dimensions": "longitude latitude alevel time", - "out_name": "pfull", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phalf": { - "modeling_realm": "atmos", - "standard_name": "air_pressure", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Pressure on Model Half-Levels", - "comment": "Air pressure on model half-levels", - "dimensions": "longitude latitude alevhalf time", - "out_name": "phalf", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ps": { - "modeling_realm": "atmos", - "standard_name": "surface_air_pressure", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Air Pressure", - "comment": "surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", - "dimensions": "longitude latitude time", - "out_name": "ps", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rldscs": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_longwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downwelling Clear-Sky Longwave Radiation", - "comment": "Surface downwelling clear-sky longwave radiation", - "dimensions": "longitude latitude time", - "out_name": "rldscs", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlutcs": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_longwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Clear-Sky Longwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rlutcs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdscs": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downwelling Clear-Sky Shortwave Radiation", - "comment": "surface solar irradiance clear sky for UV calculations", - "dimensions": "longitude latitude time", - "out_name": "rsdscs", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdt": { - "modeling_realm": "atmos", - "standard_name": "toa_incoming_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Incident Shortwave Radiation", - "comment": "Shortwave radiation incident at the top of the atmosphere", - "dimensions": "longitude latitude time", - "out_name": "rsdt", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsuscs": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upwelling Clear-Sky Shortwave Radiation", - "comment": "Surface Upwelling Clear-sky Shortwave Radiation", - "dimensions": "longitude latitude time", - "out_name": "rsuscs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsut": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Shortwave Radiation", - "comment": "at the top of the atmosphere", - "dimensions": "longitude latitude time", - "out_name": "rsut", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsutcs": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_shortwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Clear-Sky Shortwave Radiation", - "comment": "Calculated in the absence of clouds.", - "dimensions": "longitude latitude time", - "out_name": "rsutcs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "longitude latitude alevel time", - "out_name": "ta", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta700": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air temperature at 700hPa", - "dimensions": "longitude latitude time p700", - "out_name": "ta700", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "ua", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "va", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wap": { - "modeling_realm": "atmos", - "standard_name": "lagrangian_tendency_of_air_pressure", - "units": "Pa s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "omega (=dp/dt)", - "comment": "Omega (vertical velocity in pressure coordinates, positive downwards)", - "dimensions": "longitude latitude alevel time", - "out_name": "wap", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wap500": { - "modeling_realm": "atmos", - "standard_name": "lagrangian_tendency_of_air_pressure", - "units": "Pa s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "omega (=dp/dt)", - "comment": "Omega (vertical velocity in pressure coordinates, positive downwards) at 500 hPa level;", - "dimensions": "longitude latitude time p500", - "out_name": "wap500", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg": { - "modeling_realm": "atmos", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "zg", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CFmon.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CFmon.json deleted file mode 100644 index 14a8f007ad..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CFmon.json +++ /dev/null @@ -1,987 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table CFmon", - "realm": "atmos", - "frequency": "mon", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "30.00000", - "generic_levels": "alevel alevhalf", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "albisccp": { - "modeling_realm": "atmos", - "standard_name": "cloud_albedo", - "units": "1.0", - "cell_methods": "area: time: mean where cloud", - "cell_measures": "area: areacella", - "long_name": "ISCCP Mean Cloud Albedo", - "comment": "ISCCP Mean Cloud Albedo. Time-means are weighted by the ISCCP Total Cloud Fraction {:cltisccp} - see http://cfmip.metoffice.com/COSP.html", - "dimensions": "longitude latitude time", - "out_name": "albisccp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clc": { - "modeling_realm": "atmos", - "standard_name": "convective_cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Convective Cloud Area Fraction", - "comment": "Include only convective cloud.", - "dimensions": "longitude latitude alevel time", - "out_name": "clc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Percentage Cloud Cover", - "comment": "Percentage cloud cover at CALIPSO standard heights.", - "dimensions": "longitude latitude alt40 time", - "out_name": "clcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clhcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Percentage High Level Cloud", - "comment": "Percentage cloud cover in layer centred on 220hPa", - "dimensions": "longitude latitude time p220", - "out_name": "clhcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clic": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_convective_cloud_ice_in_air", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass Fraction of Convective Cloud Ice", - "comment": "Calculated as the mass of convective cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude alevel time", - "out_name": "clic", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clis": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_stratiform_cloud_ice_in_air", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass Fraction of Stratiform Cloud Ice", - "comment": "Calculated as the mass of stratiform cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude alevel time", - "out_name": "clis", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clisccp": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "ISCCP Percentage Cloud Area", - "comment": "Percentage cloud cover in optical depth categories.", - "dimensions": "longitude latitude plev7c tau time", - "out_name": "clisccp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cllcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Percentage Low Level Cloud", - "comment": "Percentage cloud cover in layer centred on 840hPa", - "dimensions": "longitude latitude time p840", - "out_name": "cllcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clmcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Percentage Mid Level Cloud", - "comment": "Percentage cloud cover in layer centred on 560hPa", - "dimensions": "longitude latitude time p560", - "out_name": "clmcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cls": { - "modeling_realm": "atmos", - "standard_name": "stratiform_cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Stratiform Cloud Area Fraction", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "cls", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cltcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Percentage Total Cloud", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cltcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cltisccp": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "ISCCP Total Cloud Fraction", - "comment": "Percentage total cloud cover, simulating ISCCP observations.", - "dimensions": "longitude latitude time", - "out_name": "cltisccp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clwc": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_convective_cloud_liquid_water_in_air", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass Fraction of Convective Cloud Liquid Water", - "comment": "Calculated as the mass of convective cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude alevel time", - "out_name": "clwc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clws": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_stratiform_cloud_liquid_water_in_air", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass Fraction of Stratiform Cloud Liquid Water", - "comment": "Calculated as the mass of stratiform cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude alevel time", - "out_name": "clws", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dmc": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_net_upward_deep_convective_mass_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Deep Convective Mass Flux", - "comment": "The net mass flux represents the difference between the updraft and downdraft components. This is calculated as the convective mass flux divided by the area of the whole grid cell (not just the area of the cloud).", - "dimensions": "longitude latitude alevhalf time", - "out_name": "dmc", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "edt": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_heat_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Eddy Diffusivity Coefficients for Temperature", - "comment": "Vertical diffusion coefficient for temperature due to parametrised eddies", - "dimensions": "longitude latitude alevel time", - "out_name": "edt", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "evu": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_momentum_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Eddy Viscosity Coefficients for Momentum", - "comment": "Vertical diffusion coefficient for momentum due to parametrised eddies", - "dimensions": "longitude latitude alevel time", - "out_name": "evu", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hur": { - "modeling_realm": "atmos", - "standard_name": "relative_humidity", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Relative Humidity", - "comment": "The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", - "dimensions": "longitude latitude alevel time", - "out_name": "hur", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "hus", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mcd": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_downdraft_convective_mass_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Downdraft Convective Mass Flux", - "comment": "Calculated as the convective mass flux divided by the area of the whole grid cell (not just the area of the cloud).", - "dimensions": "longitude latitude alevhalf time", - "out_name": "mcd", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mcu": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_updraft_convective_mass_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Updraft Convective Mass Flux", - "comment": "In accordance with common usage in geophysical disciplines, 'flux' implies per unit area, called 'flux density' in physics. The atmosphere convective mass flux is the vertical transport of mass for a field of cumulus clouds or thermals, given by the product of air density and vertical velocity. For an area-average, cell_methods should specify whether the average is over all the area or the area of updrafts only.", - "dimensions": "longitude latitude alevhalf time", - "out_name": "mcu", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pctisccp": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_cloud_top", - "units": "Pa", - "cell_methods": "area: time: mean where cloud", - "cell_measures": "area: areacella", - "long_name": "ISCCP Mean Cloud Top Pressure", - "comment": "ISCCP Mean Cloud Top Pressure. Time-means are weighted by the ISCCP Total Cloud Fraction {:cltisccp} - see http://cfmip.metoffice.com/COSP.html", - "dimensions": "longitude latitude time", - "out_name": "pctisccp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ps": { - "modeling_realm": "atmos", - "standard_name": "surface_air_pressure", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Pressure", - "comment": "surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", - "dimensions": "longitude latitude time", - "out_name": "ps", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rld": { - "modeling_realm": "atmos", - "standard_name": "downwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Downwelling Longwave Radiation", - "comment": "Downwelling Longwave Radiation (includes the fluxes at the surface and TOA)", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rld", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rld4co2": { - "modeling_realm": "atmos", - "standard_name": "downwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Downwelling Longwave Radiation 4XCO2 Atmosphere", - "comment": "Downwelling longwave radiation calculated using carbon dioxide concentrations increased fourfold (includes the fluxes at the surface and TOA)", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rld4co2", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rldcs": { - "modeling_realm": "atmos", - "standard_name": "downwelling_longwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Downwelling Clear-Sky Longwave Radiation", - "comment": "Downwelling clear-sky longwave radiation (includes the fluxes at the surface and TOA)", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rldcs", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rldcs4co2": { - "modeling_realm": "atmos", - "standard_name": "downwelling_longwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Downwelling Clear-Sky Longwave Radiation 4XCO2 Atmosphere", - "comment": "Downwelling clear-sky longwave radiation calculated using carbon dioxide concentrations increased fourfold (includes the fluxes at the surface and TOA)", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rldcs4co2", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlu": { - "modeling_realm": "atmos", - "standard_name": "upwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Upwelling Longwave Radiation", - "comment": "Upwelling longwave radiation (includes the fluxes at the surface and TOA)", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rlu", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlu4co2": { - "modeling_realm": "atmos", - "standard_name": "upwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Upwelling Longwave Radiation 4XCO2 Atmosphere", - "comment": "Upwelling longwave radiation calculated using carbon dioxide concentrations increased fourfold (includes the fluxes at the surface and TOA)", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rlu4co2", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlucs": { - "modeling_realm": "atmos", - "standard_name": "upwelling_longwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Upwelling Clear-Sky Longwave Radiation", - "comment": "Upwelling clear-sky longwave radiation (includes the fluxes at the surface and TOA)", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rlucs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlucs4co2": { - "modeling_realm": "atmos", - "standard_name": "upwelling_longwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Upwelling Clear-Sky Longwave Radiation 4XCO2 Atmosphere", - "comment": "Upwelling clear-sky longwave radiation calculated using carbon dioxide concentrations increased fourfold (includes the fluxes at the surface and TOA)", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rlucs4co2", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlut4co2": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_longwave_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Longwave Radiation 4XCO2 Atmosphere", - "comment": "Top-of-atmosphere outgoing longwave radiation calculated using carbon dioxide concentrations increased fourfold", - "dimensions": "longitude latitude time", - "out_name": "rlut4co2", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlutcs4co2": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_longwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Clear-Sky Longwave Radiation 4XCO2 Atmosphere", - "comment": "Top-of-atmosphere outgoing clear-sky longwave radiation calculated using carbon dioxide concentrations increased fourfold", - "dimensions": "longitude latitude time", - "out_name": "rlutcs4co2", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsd": { - "modeling_realm": "atmos", - "standard_name": "downwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Downwelling Shortwave Radiation", - "comment": "Downwelling shortwave radiation (includes the fluxes at the surface and top-of-atmosphere)", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rsd", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsd4co2": { - "modeling_realm": "atmos", - "standard_name": "downwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Downwelling Shortwave Radiation 4XCO2 Atmosphere", - "comment": "Downwelling shortwave radiation calculated using carbon dioxide concentrations increased fourfold", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rsd4co2", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdcs": { - "modeling_realm": "atmos", - "standard_name": "downwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Downwelling Clear-Sky Shortwave Radiation", - "comment": "Downwelling clear-sky shortwave radiation (includes the fluxes at the surface and top-of-atmosphere)", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rsdcs", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdcs4co2": { - "modeling_realm": "atmos", - "standard_name": "downwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Downwelling Clear-Sky Shortwave Radiation 4XCO2 Atmosphere", - "comment": "Downwelling clear-sky shortwave radiation calculated using carbon dioxide concentrations increased fourfold", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rsdcs4co2", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsu": { - "modeling_realm": "atmos", - "standard_name": "upwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Upwelling Shortwave Radiation", - "comment": "Upwelling shortwave radiation (includes also the fluxes at the surface and top of atmosphere)", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rsu", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsu4co2": { - "modeling_realm": "atmos", - "standard_name": "upwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Upwelling Shortwave Radiation 4XCO2 Atmosphere", - "comment": "Upwelling Shortwave Radiation calculated using carbon dioxide concentrations increased fourfold", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rsu4co2", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsucs": { - "modeling_realm": "atmos", - "standard_name": "upwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Upwelling Clear-Sky Shortwave Radiation", - "comment": "Upwelling clear-sky shortwave radiation (includes the fluxes at the surface and TOA)", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rsucs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsucs4co2": { - "modeling_realm": "atmos", - "standard_name": "upwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Upwelling Clear-Sky Shortwave Radiation 4XCO2 Atmosphere", - "comment": "Upwelling clear-sky shortwave radiation calculated using carbon dioxide concentrations increased fourfold", - "dimensions": "longitude latitude alevhalf time", - "out_name": "rsucs4co2", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsut4co2": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Shortwave Radiation in 4XCO2 Atmosphere", - "comment": "TOA Outgoing Shortwave Radiation calculated using carbon dioxide concentrations increased fourfold", - "dimensions": "longitude latitude time", - "out_name": "rsut4co2", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsutcs4co2": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_shortwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Clear-Sky Shortwave Radiation 4XCO2 Atmosphere", - "comment": "TOA Outgoing Clear-Sky Shortwave Radiation calculated using carbon dioxide concentrations increased fourfold", - "dimensions": "longitude latitude time", - "out_name": "rsutcs4co2", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "smc": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_net_upward_shallow_convective_mass_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Shallow Convective Mass Flux", - "comment": "The net mass flux represents the difference between the updraft and downdraft components. For models with a distinct shallow convection scheme, this is calculated as convective mass flux divided by the area of the whole grid cell (not just the area of the cloud).", - "dimensions": "longitude latitude alevhalf time", - "out_name": "smc", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "longitude latitude alevel time", - "out_name": "ta", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhus": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity", - "units": "s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Specific Humidity", - "comment": "Tendency of Specific Humidity", - "dimensions": "longitude latitude alevel time", - "out_name": "tnhus", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhusa": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity_due_to_advection", - "units": "s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Specific Humidity due to Advection", - "comment": "Tendency of Specific Humidity due to Advection", - "dimensions": "longitude latitude alevel time", - "out_name": "tnhusa", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhusc": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity_due_to_convection", - "units": "s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Specific Humidity due to Convection", - "comment": "Tendencies from cumulus convection scheme.", - "dimensions": "longitude latitude alevel time", - "out_name": "tnhusc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhusd": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity_due_to_diffusion", - "units": "s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Specific Humidity due to Numerical Diffusion", - "comment": "Tendency of specific humidity due to numerical diffusion.This includes any horizontal or vertical numerical moisture diffusion not associated with the parametrized moist physics or the resolved dynamics. For example, any vertical diffusion which is part of the boundary layer mixing scheme should be excluded, as should any diffusion which is included in the terms from the resolved dynamics. This term is required to check the closure of the moisture budget.", - "dimensions": "longitude latitude alevel time", - "out_name": "tnhusd", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhusmp": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity_due_to_model_physics", - "units": "s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Specific Humidity due to Model Physics", - "comment": "Tendency of specific humidity due to model physics. This includes sources and sinks from parametrized moist physics (e.g. convection, boundary layer, stratiform condensation/evaporation, etc.) and excludes sources and sinks from resolved dynamics or from horizontal or vertical numerical diffusion not associated with model physicsl. For example any diffusive mixing by the boundary layer scheme would be included.", - "dimensions": "longitude latitude alevel time", - "out_name": "tnhusmp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhusscpbl": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity_due_to_stratiform_cloud_and_precipitation_and_boundary_layer_mixing", - "units": "s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Specific Humidity Due to Stratiform Cloud and Precipitation and Boundary Layer Mixing", - "comment": "Tendency of Specific Humidity Due to Stratiform Cloud and Precipitation and Boundary Layer Mixing (to be specified only in models which do not separate budget terms for stratiform cloud, precipitation and boundary layer schemes. Includes all bounday layer terms including and diffusive terms.)", - "dimensions": "longitude latitude alevel time", - "out_name": "tnhusscpbl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnt": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature", - "units": "K s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature", - "comment": "Tendency of Air Temperature", - "dimensions": "longitude latitude alevel time", - "out_name": "tnt", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnta": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_advection", - "units": "K s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature due to Advection", - "comment": "Tendency of Air Temperature due to Advection", - "dimensions": "longitude latitude alevel time", - "out_name": "tnta", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntc": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_convection", - "units": "K s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature due to Convection", - "comment": "Tendencies from cumulus convection scheme.", - "dimensions": "longitude latitude alevel time", - "out_name": "tntc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntmp": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_model_physics", - "units": "K s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature due to Model Physics", - "comment": "Tendency of air temperature due to model physics. This includes sources and sinks from parametrized physics (e.g. radiation, convection, boundary layer, stratiform condensation/evaporation, etc.). It excludes sources and sinks from resolved dynamics and numerical diffusion not associated with parametrized physics. For example, any vertical diffusion which is part of the boundary layer mixing scheme should be included, while numerical diffusion applied in addition to physics or resolved dynamics should be excluded. This term is required to check the closure of the heat budget.", - "dimensions": "longitude latitude alevel time", - "out_name": "tntmp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntr": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_radiative_heating", - "units": "K s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature due to Radiative Heating", - "comment": "Tendency of Air Temperature due to Radiative Heating", - "dimensions": "longitude latitude alevel time", - "out_name": "tntr", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntscpbl": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_stratiform_cloud_and_precipitation_and_boundary_layer_mixing", - "units": "K s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature Due to Stratiform Cloud and Precipitation and Boundary Layer Mixing", - "comment": "Tendency of Air Temperature Due to Stratiform Cloud and Precipitation and Boundary Layer Mixing (to be specified only in models which do not separate cloud, precipitation and boundary layer terms. Includes all boundary layer terms including diffusive ones.)", - "dimensions": "longitude latitude alevel time", - "out_name": "tntscpbl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CFsubhr.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CFsubhr.json deleted file mode 100644 index fa2e1bed39..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CFsubhr.json +++ /dev/null @@ -1,1361 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table CFsubhr", - "realm": "atmos", - "frequency": "subhr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "0.017361", - "generic_levels": "alevel alevhalf", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "ccb": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_convective_cloud_base", - "units": "Pa", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Air Pressure at Convective Cloud Base", - "comment": "Where convective cloud is present in the grid cell, the instantaneous cloud base altitude should be that of the bottom of the lowest level containing convective cloud. Missing data should be reported in the absence of convective cloud. The time mean should be calculated from these quantities averaging over occasions when convective cloud is present only, and should contain missing data for occasions when no convective cloud is present during the meaning period.", - "dimensions": "site time1", - "out_name": "ccb", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cct": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_convective_cloud_top", - "units": "Pa", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Air Pressure at Convective Cloud Top", - "comment": "Where convective cloud is present in the grid cell, the instantaneous cloud top altitude should be that of the top of the highest level containing convective cloud. Missing data should be reported in the absence of convective cloud. The time mean should be calculated from these quantities averaging over occasions when convective cloud is present only, and should contain missing data for occasions when no convective cloud is present during the meaning period.", - "dimensions": "site time1", - "out_name": "cct", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ci": { - "modeling_realm": "atmos", - "standard_name": "convection_time_fraction", - "units": "1.0", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Fraction of Time Convection Occurs", - "comment": "Fraction of time that convection occurs in the grid cell.", - "dimensions": "site time1", - "out_name": "ci", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cl": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Cloud Area Fraction", - "comment": "Percentage cloud cover, including both large-scale and convective cloud.", - "dimensions": "alevel site time1", - "out_name": "cl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cli": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_cloud_ice_in_air", - "units": "kg kg-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Mass Fraction of Cloud Ice", - "comment": "Includes both large-scale and convective cloud. This is calculated as the mass of cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. It includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "alevel site time1", - "out_name": "cli", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clivi": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_cloud_ice_content", - "units": "kg m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Ice Water Path", - "comment": "mass of ice water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating frozen hydrometeors ONLY if the precipitating hydrometeor affects the calculation of radiative transfer in model.", - "dimensions": "site time1", - "out_name": "clivi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clt": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction", - "units": "%", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Total Cloud Fraction", - "comment": "Total cloud area fraction for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes both large-scale and convective cloud.", - "dimensions": "site time1", - "out_name": "clt", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clw": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_cloud_liquid_water_in_air", - "units": "kg kg-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Mass Fraction of Cloud Liquid Water", - "comment": "Includes both large-scale and convective cloud. Calculate as the mass of cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cells. Precipitating hydrometeors are included ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "alevel site time1", - "out_name": "clw", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clwvi": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_cloud_condensed_water_content", - "units": "kg m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Condensed Water Path", - "comment": "Mass of condensed (liquid + ice) water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "site time1", - "out_name": "clwvi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "edt": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_heat_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Eddy Diffusivity Coefficient for Temperature Variable", - "comment": "Vertical diffusion coefficient for temperature due to parametrised eddies", - "dimensions": "alevel site time1", - "out_name": "edt", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "evspsbl": { - "modeling_realm": "atmos", - "standard_name": "water_evaporation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Evaporation", - "comment": "Evaporation at surface: flux of water into the atmosphere due to conversion of both liquid and solid phases to vapor (from underlying surface and vegetation)", - "dimensions": "site time1", - "out_name": "evspsbl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "evu": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_momentum_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Eddy Viscosity Coefficient for Momentum Variables", - "comment": "Vertical diffusion coefficient for momentum due to parametrised eddies", - "dimensions": "alevel site time1", - "out_name": "evu", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fco2antt": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_atmosphere_mass_content_of_carbon_dioxide_expressed_as_carbon_due_to_anthropogenic_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Carbon Mass Flux into Atmosphere Due to All Anthropogenic Emissions of CO2", - "comment": "This is requested only for the emission-driven coupled carbon climate model runs. Does not include natural fire sources but, includes all anthropogenic sources, including fossil fuel use, cement production, agricultural burning, and sources associated with anthropogenic land use change excluding forest regrowth.", - "dimensions": "site time1", - "out_name": "fco2antt", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fco2fos": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_atmosphere_mass_content_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_fossil_fuel_combustion", - "units": "kg m-2 s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Carbon Mass Flux into Atmosphere Due to Fossil Fuel Emissions of CO2", - "comment": "This is the prescribed anthropogenic CO2 flux from fossil fuel use, including cement production, and flaring (but not from land-use changes, agricultural burning, forest regrowth, etc.)", - "dimensions": "site time1", - "out_name": "fco2fos", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fco2nat": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_natural_sources", - "units": "kg m-2 s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Surface Carbon Mass Flux into the Atmosphere Due to Natural Sources", - "comment": "This is what the atmosphere sees (on its own grid). This field should be equivalent to the combined natural fluxes of carbon that account for natural exchanges between the atmosphere and land (nep) or ocean (fgco2) reservoirs.", - "dimensions": "site time1", - "out_name": "fco2nat", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfls": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_latent_heat_flux", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Surface Upward Latent Heat Flux", - "comment": "", - "dimensions": "site time1", - "out_name": "hfls", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfss": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_sensible_heat_flux", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Surface Upward Sensible Heat Flux", - "comment": "", - "dimensions": "site time1", - "out_name": "hfss", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hur": { - "modeling_realm": "atmos", - "standard_name": "relative_humidity", - "units": "%", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Relative Humidity", - "comment": "The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", - "dimensions": "alevel site time1", - "out_name": "hur", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hurs": { - "modeling_realm": "atmos", - "standard_name": "relative_humidity", - "units": "%", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Near-Surface Relative Humidity", - "comment": "The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", - "dimensions": "site time1 height2m", - "out_name": "hurs", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "alevel site time1", - "out_name": "hus", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "huss": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Near-Surface Specific Humidity", - "comment": "Near-surface (usually, 2 meter) specific humidity.", - "dimensions": "site time1 height2m", - "out_name": "huss", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "latitude": { - "modeling_realm": "atmos", - "standard_name": "latitude", - "units": "degrees_north", - "cell_methods": "area: point", - "cell_measures": "", - "long_name": "Latitude", - "comment": "", - "dimensions": "site", - "out_name": "lat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "longitude": { - "modeling_realm": "atmos", - "standard_name": "longitude", - "units": "degrees_east", - "cell_methods": "area: point", - "cell_measures": "", - "long_name": "Longitude", - "comment": "", - "dimensions": "site", - "out_name": "lon", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mc": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_net_upward_convective_mass_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Convective Mass Flux", - "comment": "The net mass flux should represent the difference between the updraft and downdraft components. The flux is computed as the mass divided by the area of the grid cell.", - "dimensions": "alevhalf site time1", - "out_name": "mc", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pfull": { - "modeling_realm": "atmos", - "standard_name": "air_pressure", - "units": "Pa", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Pressure on Model Levels", - "comment": "Air pressure on model levels", - "dimensions": "alevel site time1", - "out_name": "pfull", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phalf": { - "modeling_realm": "atmos", - "standard_name": "air_pressure", - "units": "Pa", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Pressure on Model Half-Levels", - "comment": "Air pressure on model half-levels", - "dimensions": "alevhalf site time1", - "out_name": "phalf", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pr": { - "modeling_realm": "atmos", - "standard_name": "precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Precipitation", - "comment": "includes both liquid and solid phases", - "dimensions": "site time1", - "out_name": "pr", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prc": { - "modeling_realm": "atmos", - "standard_name": "convective_precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Convective Precipitation", - "comment": "Convective precipitation at surface; includes both liquid and solid phases.", - "dimensions": "site time1", - "out_name": "prc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prsn": { - "modeling_realm": "atmos", - "standard_name": "snowfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Snowfall Flux", - "comment": "at surface; includes precipitation of all forms of water in the solid phase", - "dimensions": "site time1", - "out_name": "prsn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prw": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_water_vapor_content", - "units": "kg m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Water Vapor Path", - "comment": "vertically integrated through the atmospheric column", - "dimensions": "site time1", - "out_name": "prw", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ps": { - "modeling_realm": "atmos", - "standard_name": "surface_air_pressure", - "units": "Pa", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Surface Air Pressure", - "comment": "surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", - "dimensions": "site time1", - "out_name": "ps", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "psl": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_sea_level", - "units": "Pa", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Sea Level Pressure", - "comment": "Sea Level Pressure", - "dimensions": "site time1", - "out_name": "psl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rld": { - "modeling_realm": "atmos", - "standard_name": "downwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Downwelling Longwave Radiation", - "comment": "Downwelling Longwave Radiation (includes the fluxes at the surface and TOA)", - "dimensions": "alevhalf site time1", - "out_name": "rld", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rldcs": { - "modeling_realm": "atmos", - "standard_name": "downwelling_longwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Downwelling Clear-Sky Longwave Radiation", - "comment": "Downwelling clear-sky longwave radiation (includes the fluxes at the surface and TOA)", - "dimensions": "alevhalf site time1", - "out_name": "rldcs", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlds": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Surface Downwelling Longwave Radiation", - "comment": "", - "dimensions": "site time1", - "out_name": "rlds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rldscs": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_longwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Surface Downwelling Clear-Sky Longwave Radiation", - "comment": "Surface downwelling clear-sky longwave radiation", - "dimensions": "site time1", - "out_name": "rldscs", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlu": { - "modeling_realm": "atmos", - "standard_name": "upwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Upwelling Longwave Radiation", - "comment": "Upwelling longwave radiation (includes the fluxes at the surface and TOA)", - "dimensions": "alevhalf site time1", - "out_name": "rlu", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlucs": { - "modeling_realm": "atmos", - "standard_name": "upwelling_longwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Upwelling Clear-Sky Longwave Radiation", - "comment": "Upwelling clear-sky longwave radiation (includes the fluxes at the surface and TOA)", - "dimensions": "alevhalf site time1", - "out_name": "rlucs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlus": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Surface Upwelling Longwave Radiation", - "comment": "", - "dimensions": "site time1", - "out_name": "rlus", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlut": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_longwave_flux", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "TOA Outgoing Longwave Radiation", - "comment": "at the top of the atmosphere (to be compared with satellite measurements)", - "dimensions": "site time1", - "out_name": "rlut", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlutcs": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_longwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "TOA Outgoing Clear-Sky Longwave Radiation", - "comment": "", - "dimensions": "site time1", - "out_name": "rlutcs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsd": { - "modeling_realm": "atmos", - "standard_name": "downwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Downwelling Shortwave Radiation", - "comment": "Downwelling shortwave radiation (includes the fluxes at the surface and top-of-atmosphere)", - "dimensions": "alevhalf site time1", - "out_name": "rsd", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdcs": { - "modeling_realm": "atmos", - "standard_name": "downwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Downwelling Clear-Sky Shortwave Radiation", - "comment": "Downwelling clear-sky shortwave radiation (includes the fluxes at the surface and top-of-atmosphere)", - "dimensions": "alevhalf site time1", - "out_name": "rsdcs", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsds": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Surface Downwelling Shortwave Radiation", - "comment": "surface solar irradiance for UV calculations", - "dimensions": "site time1", - "out_name": "rsds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdscs": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Surface Downwelling Clear-Sky Shortwave Radiation", - "comment": "surface solar irradiance clear sky for UV calculations", - "dimensions": "site time1", - "out_name": "rsdscs", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdt": { - "modeling_realm": "atmos", - "standard_name": "toa_incoming_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "TOA Incident Shortwave Radiation", - "comment": "Shortwave radiation incident at the top of the atmosphere", - "dimensions": "site time1", - "out_name": "rsdt", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsu": { - "modeling_realm": "atmos", - "standard_name": "upwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Upwelling Shortwave Radiation", - "comment": "Upwelling shortwave radiation (includes also the fluxes at the surface and top of atmosphere)", - "dimensions": "alevhalf site time1", - "out_name": "rsu", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsucs": { - "modeling_realm": "atmos", - "standard_name": "upwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Upwelling Clear-Sky Shortwave Radiation", - "comment": "Upwelling clear-sky shortwave radiation (includes the fluxes at the surface and TOA)", - "dimensions": "alevhalf site time1", - "out_name": "rsucs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsus": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Surface Upwelling Shortwave Radiation", - "comment": "", - "dimensions": "site time1", - "out_name": "rsus", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsuscs": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Surface Upwelling Clear-Sky Shortwave Radiation", - "comment": "Surface Upwelling Clear-sky Shortwave Radiation", - "dimensions": "site time1", - "out_name": "rsuscs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsut": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "TOA Outgoing Shortwave Radiation", - "comment": "at the top of the atmosphere", - "dimensions": "site time1", - "out_name": "rsut", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsutcs": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_shortwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "TOA Outgoing Clear-Sky Shortwave Radiation", - "comment": "Calculated in the absence of clouds.", - "dimensions": "site time1", - "out_name": "rsutcs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rtmt": { - "modeling_realm": "atmos", - "standard_name": "net_downward_radiative_flux_at_top_of_atmosphere_model", - "units": "W m-2", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Net Downward Flux at Top of Model", - "comment": "Net Downward Radiative Flux at Top of Model : I.e., at the top of that portion of the atmosphere where dynamics are explicitly treated by the model. This is reported only if it differs from the net downward radiative flux at the top of the atmosphere.", - "dimensions": "site time1", - "out_name": "rtmt", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sbl": { - "modeling_realm": "landIce", - "standard_name": "surface_snow_and_ice_sublimation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Surface Snow and Ice Sublimation Flux", - "comment": "The snow and ice sublimation flux is the loss of snow and ice mass per unit area from the surface resulting from their direct conversion to water vapor that enters the atmosphere.", - "dimensions": "site time1", - "out_name": "sbl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sci": { - "modeling_realm": "atmos", - "standard_name": "shallow_convection_time_fraction", - "units": "1.0", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Fraction of Time Shallow Convection Occurs", - "comment": "Fraction of time that shallow convection occurs in the grid cell.", - "dimensions": "site time1", - "out_name": "sci", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfcWind": { - "modeling_realm": "atmos", - "standard_name": "wind_speed", - "units": "m s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Near-Surface Wind Speed", - "comment": "near-surface (usually, 10 meters) wind speed.", - "dimensions": "site time1 height10m", - "out_name": "sfcWind", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "alevel site time1", - "out_name": "ta", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tas": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Near-Surface Air Temperature", - "comment": "near-surface (usually, 2 meter) air temperature", - "dimensions": "site time1 height2m", - "out_name": "tas", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tauu": { - "modeling_realm": "atmos", - "standard_name": "surface_downward_eastward_stress", - "units": "Pa", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Surface Downward Eastward Wind Stress", - "comment": "Downward eastward wind stress at the surface", - "dimensions": "site time1", - "out_name": "tauu", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tauv": { - "modeling_realm": "atmos", - "standard_name": "surface_downward_northward_stress", - "units": "Pa", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Surface Downward Northward Wind Stress", - "comment": "Downward northward wind stress at the surface", - "dimensions": "site time1", - "out_name": "tauv", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhus": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity", - "units": "s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Specific Humidity", - "comment": "Tendency of Specific Humidity", - "dimensions": "alevel site time1", - "out_name": "tnhus", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhusa": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity_due_to_advection", - "units": "s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Specific Humidity due to Advection", - "comment": "Tendency of Specific Humidity due to Advection", - "dimensions": "alevel site time1", - "out_name": "tnhusa", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhusc": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity_due_to_convection", - "units": "s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Specific Humidity due to Convection", - "comment": "Tendencies from cumulus convection scheme.", - "dimensions": "alevel site time1", - "out_name": "tnhusc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhusd": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity_due_to_diffusion", - "units": "s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Specific Humidity due to Numerical Diffusion", - "comment": "Tendency of specific humidity due to numerical diffusion.This includes any horizontal or vertical numerical moisture diffusion not associated with the parametrized moist physics or the resolved dynamics. For example, any vertical diffusion which is part of the boundary layer mixing scheme should be excluded, as should any diffusion which is included in the terms from the resolved dynamics. This term is required to check the closure of the moisture budget.", - "dimensions": "alevel site time1", - "out_name": "tnhusd", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhusmp": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity_due_to_model_physics", - "units": "s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Specific Humidity due to Model Physics", - "comment": "Tendency of specific humidity due to model physics. This includes sources and sinks from parametrized moist physics (e.g. convection, boundary layer, stratiform condensation/evaporation, etc.) and excludes sources and sinks from resolved dynamics or from horizontal or vertical numerical diffusion not associated with model physicsl. For example any diffusive mixing by the boundary layer scheme would be included.", - "dimensions": "alevel site time1", - "out_name": "tnhusmp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhusscpbl": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity_due_to_stratiform_cloud_and_precipitation_and_boundary_layer_mixing", - "units": "s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Specific Humidity Due to Stratiform Cloud and Precipitation and Boundary Layer Mixing", - "comment": "Tendency of Specific Humidity Due to Stratiform Cloud and Precipitation and Boundary Layer Mixing (to be specified only in models which do not separate budget terms for stratiform cloud, precipitation and boundary layer schemes. Includes all bounday layer terms including and diffusive terms.)", - "dimensions": "alevel site time1", - "out_name": "tnhusscpbl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnt": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature", - "units": "K s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Air Temperature", - "comment": "Tendency of Air Temperature", - "dimensions": "alevel site time1", - "out_name": "tnt", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnta": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_advection", - "units": "K s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Air Temperature due to Advection", - "comment": "Tendency of Air Temperature due to Advection", - "dimensions": "alevel site time1", - "out_name": "tnta", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntc": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_convection", - "units": "K s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Air Temperature due to Convection", - "comment": "Tendencies from cumulus convection scheme.", - "dimensions": "alevel site time1", - "out_name": "tntc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntmp": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_model_physics", - "units": "K s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Air Temperature due to Model Physics", - "comment": "Tendency of air temperature due to model physics. This includes sources and sinks from parametrized physics (e.g. radiation, convection, boundary layer, stratiform condensation/evaporation, etc.). It excludes sources and sinks from resolved dynamics and numerical diffusion not associated with parametrized physics. For example, any vertical diffusion which is part of the boundary layer mixing scheme should be included, while numerical diffusion applied in addition to physics or resolved dynamics should be excluded. This term is required to check the closure of the heat budget.", - "dimensions": "alevel site time1", - "out_name": "tntmp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntr": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_radiative_heating", - "units": "K s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Air Temperature due to Radiative Heating", - "comment": "Tendency of Air Temperature due to Radiative Heating", - "dimensions": "alevel site time1", - "out_name": "tntr", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntscpbl": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_stratiform_cloud_and_precipitation_and_boundary_layer_mixing", - "units": "K s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Air Temperature Due to Stratiform Cloud and Precipitation and Boundary Layer Mixing", - "comment": "Tendency of Air Temperature Due to Stratiform Cloud and Precipitation and Boundary Layer Mixing (to be specified only in models which do not separate cloud, precipitation and boundary layer terms. Includes all boundary layer terms including diffusive ones.)", - "dimensions": "alevel site time1", - "out_name": "tntscpbl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ts": { - "modeling_realm": "atmos", - "standard_name": "surface_temperature", - "units": "K", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Surface Temperature", - "comment": "Temperature of the lower boundary of the atmosphere", - "dimensions": "site time1", - "out_name": "ts", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "alevel site time1", - "out_name": "ua", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "uas": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Eastward Near-Surface Wind", - "comment": "Eastward component of the near-surface (usually, 10 meters) wind", - "dimensions": "site time1 height10m", - "out_name": "uas", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "alevel site time1", - "out_name": "va", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vas": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Northward Near-Surface Wind", - "comment": "Northward component of the near surface wind", - "dimensions": "site time1 height10m", - "out_name": "vas", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wap": { - "modeling_realm": "atmos", - "standard_name": "lagrangian_tendency_of_air_pressure", - "units": "Pa s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "omega (=dp/dt)", - "comment": "Omega (vertical velocity in pressure coordinates, positive downwards)", - "dimensions": "alevel site time1", - "out_name": "wap", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg": { - "modeling_realm": "atmos", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Geopotential Height", - "comment": "", - "dimensions": "alevel site time1", - "out_name": "zg", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CFsubhrOff.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CFsubhrOff.json deleted file mode 100644 index 92ad59e513..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CFsubhrOff.json +++ /dev/null @@ -1,171 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table CFsubhrOff", - "realm": "atmos", - "frequency": "subhr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "cfadDbze94": { - "modeling_realm": "atmos", - "standard_name": "histogram_of_equivalent_reflectivity_factor_over_height_above_reference_ellipsoid", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CloudSat Radar Reflectivity", - "comment": "CFAD (Cloud Frequency Altitude Diagrams) are frequency distributions of radar reflectivity (or lidar scattering ratio) as a function of altitude. The variable cfadDbze94 is defined as the simulated relative frequency of occurrence of radar reflectivity in sampling volumes defined by altitude bins. The radar is observing at a frequency of 94GHz.", - "dimensions": "longitude latitude alt40 dbze time", - "out_name": "cfadDbze94", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cfadLidarsr532": { - "modeling_realm": "atmos", - "standard_name": "histogram_of_backscattering_ratio_over_height_above_reference_ellipsoid", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Scattering Ratio", - "comment": "CFAD (Cloud Frequency Altitude Diagrams) are frequency distributions of radar reflectivity (or lidar scattering ratio) as a function of altitude. The variable cfadLidarsr532 is defined as the simulated relative frequency of lidar scattering ratio in sampling volumes defined by altitude bins. The lidar is observing at a wavelength of 532nm.", - "dimensions": "longitude latitude alt40 scatratio time", - "out_name": "cfadLidarsr532", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Cloud Fraction", - "comment": "Percentage cloud cover at CALIPSO standard heights.", - "dimensions": "longitude latitude alt40 time", - "out_name": "clcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clcalipso2": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Cloud Fraction Undetected by CloudSat", - "comment": "Clouds detected by CALIPSO but below the detectability threshold of CloudSat", - "dimensions": "longitude latitude alt40 time", - "out_name": "clcalipso2", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clhcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO High Level Cloud Fraction", - "comment": "Percentage cloud cover in layer centred on 220hPa", - "dimensions": "longitude latitude time p220", - "out_name": "clhcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cllcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Low Level Cloud Fraction", - "comment": "Percentage cloud cover in layer centred on 840hPa", - "dimensions": "longitude latitude time p840", - "out_name": "cllcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clmcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Mid Level Cloud Fraction", - "comment": "Percentage cloud cover in layer centred on 560hPa", - "dimensions": "longitude latitude time p560", - "out_name": "clmcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cltcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Total Cloud Fraction", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cltcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "parasolRefl": { - "modeling_realm": "atmos", - "standard_name": "toa_bidirectional_reflectance", - "units": "1.0", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "PARASOL Reflectance", - "comment": "Simulated reflectance from PARASOL as seen at the top of the atmosphere for 5 solar zenith angles. Valid only over ocean and for one viewing direction (viewing zenith angle of 30 degrees and relative azimuth angle 320 degrees).", - "dimensions": "longitude latitude sza5 time", - "out_name": "parasolRefl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CV.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CV.json deleted file mode 100644 index a8050fc2e2..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CV.json +++ /dev/null @@ -1,8309 +0,0 @@ -{ - "CV":{ - "required_global_attributes":[ - "Conventions", - "activity_id", - "creation_date", - "data_specs_version", - "experiment", - "experiment_id", - "forcing_index", - "frequency", - "further_info_url", - "grid", - "grid_label", - "initialization_index", - "institution", - "institution_id", - "license", - "mip_era", - "nominal_resolution", - "physics_index", - "product", - "realization_index", - "realm", - "source", - "source_id", - "source_type", - "sub_experiment", - "sub_experiment_id", - "table_id", - "tracking_id", - "variable_id", - "variant_label" - ], - "version_metadata":{ - "author":"Paul J. Durack ", - "creation_date":"Wed Apr 26 09:19:24 2017 -0700", - "institution_id":"PCMDI", - "latest_tag_point":"3.2.3 (29; g9be7426)", - "note":"Revise source_id BESM-2-7", - "previous_commit":"66ea1f100a70977e3400c65e14f0c91fc6590ac6" - }, - "activity_id":[ - "AerChemMIP", - "C4MIP", - "CFMIP", - "CMIP", - "CORDEX", - "DAMIP", - "DCPP", - "DynVarMIP", - "FAFMIP", - "GMMIP", - "GeoMIP", - "HighResMIP", - "ISMIP6", - "LS3MIP", - "LUMIP", - "OMIP", - "PMIP", - "RFMIP", - "SIMIP", - "ScenarioMIP", - "VIACSAB", - "VolMIP" - ], - "institution_id":{ - "AWI":"Alfred Wegener Institute, Helmholtz Centre for Polar and Marine Research, Am Handelshafen 12, 27570 Bremerhaven, Germany", - "BNU":"Beijing Normal University, Beijing 100875, China", - "CAMS":"Chinese Academy of Meteorological Sciences, Beijing 100081, China", - "CCCR-IITM":"Centre for Climate Change Research, Indian Institute of Tropical Meteorology Pune, Maharashtra 411 008, India", - "CCCma":"Canadian Centre for Climate Modelling and Analysis, Victoria, BC V8P 5C2, Canada", - "CMCC":"Fondazione Centro Euro-Mediterraneo sui Cambiamenti Climatici, Lecce 73100, Italy", - "CNRM-CERFACS":"CNRM (Centre National de Recherches Meteorologiques, Toulouse 31057, France), CERFACS (Centre Europeen de Recherche et de Formation Avancee en Calcul Scientifique, Toulouse 31100, France)", - "COLA-CFS":"Center for Ocean-Land-Atmosphere Studies, Fairfax, VA 22030, USA", - "CSIR-CSIRO":"CSIR (Council for Scientific and Industrial Research - Natural Resources and the Environment, Pretoria, 0001, South Africa), CSIRO (Commonwealth Scientific and Industrial Research Organisation and Bureau of Meteorology, Melbourne, Victoria 3208, Australia)", - "CSIRO-BOM":"Commonwealth Scientific and Industrial Research Organisation and Bureau of Meteorology, Melbourne, Victoria 3208, Australia", - "EC-Earth-Consortium":"KNMI, The Netherlands; SMHI, Sweden; DMI, Denmark; AEMET, Spain; Met Eireann, Ireland; CNR-ISAC, Italy; Instituto de Meteorologia, Portugal; FMI, Finland; BSC, Spain; Centro de Geofisica, University of Lisbon, Portugal; ENEA, Italy; Geomar, Germany; Geophysical Institute, University of Bergen, Norway; ICHEC, Ireland; ICTP, Italy; IMAU, The Netherlands; IRV, Sweden; Lund University, Sweden; Meteorologiska Institutionen, Stockholms University, Sweden; Niels Bohr Institute, University of Copenhagen, Denmark; NTNU, Norway; SARA, The Netherlands; Unite ASTR, Belgium; Universiteit Utrecht, The Netherlands; Universiteit Wageningen, The Netherlands; University College Dublin, Ireland; Vrije Universiteit Amsterdam, the Netherlands; University of Helsinki, Finland; KIT, Karlsruhe, Germany; USC, University of Santiago de Compostela, Spain; Uppsala Universitet, Sweden; NLeSC, Netherlands eScience Center, The Netherlands", - "FIO-RONM":"FIO (First Institute of Oceanography, State Oceanic Administration, Qingdao 266061, China), RONM (Laboratory for Regional Oceanography and Numerical Modeling, Qingdao National Laboratory for Marine Science and Technology, Qingdao 266237, China)", - "INM":"Institute for Numerical Mathematics, Moscow 119991, Russia", - "INPE":"National Institute for Space Research, Cachoeira Paulista, SP 12630-000, Brazil", - "IPSL":"Institut Pierre Simon Laplace, Paris 75252, France", - "LASG-IAP":"Institute of Atmospheric Physics, Chinese Academy of Sciences, Beijing 100029, China", - "MESSy-Consortium":"The Modular Earth Submodel System (MESSy) Consortium, represented by the Institute for Physics of the Atmosphere, Deutsches Zentrum fur Luft- und Raumfahrt (DLR), Wessling, Bavaria 82234, Germany", - "MIROC":"JAMSTEC (Japan Agency for Marine-Earth Science and Technology, Kanagawa 236-0001, Japan), AORI (Atmosphere and Ocean Research Institute, The University of Tokyo, Chiba 277-8564, Japan), NIES (National Institute for Environmental Studies, Ibaraki 305-8506, Japan), and AICS (RIKEN Advanced Institute for Computational Science, Hyogo 650-0047, Japan)", - "MOHC":"Met Office Hadley Centre, Fitzroy Road, Exeter, Devon, EX1 3PB, UK", - "MPI-M":"Max Planck Institute for Meteorology, Hamburg 20146, Germany", - "MRI":"Meteorological Research Institute, Tsukuba, Ibaraki 305-0052, Japan", - "NASA-GISS":"Goddard Institute for Space Studies, New York, NY 10025, USA", - "NCAR":"National Center for Atmospheric Research, Boulder, CO 80307, USA", - "NCC":"NorESM Climate modeling Consortium consisting of CICERO (Center for International Climate and Environmental Research, Oslo 0349), MET-Norway (Norwegian Meteorological Institute, Oslo 0313), NERSC (Nansen Environmental and Remote Sensing Center, Bergen 5006), NILU (Norwegian Institute for Air Research, Kjeller 2027), UiB (University of Bergen, Bergen 5007), UiO (University of Oslo, Oslo 0313) and UNI (Uni Research, Bergen 5008), Norway", - "NERC":"Natural Environment Research Council, STFC-RAL, Harwell, Oxford, OX11 0QX, UK", - "NIMS-KMA":"National Institute of Meteorological Sciences/Korea Meteorological Administration, Climate Research Division, Seoho-bukro 33, Seogwipo-si, Jejudo 63568, Republic of Korea", - "NOAA-GFDL":"National Oceanic and Atmospheric Administration, Geophysical Fluid Dynamics Laboratory, Princeton, NJ 08540, USA", - "NOAA-NCEP":"National Oceanic and Atmospheric Administration, National Centers for Environmental Prediction, Camp Springs, MD 20746, USA", - "NUIST":"Nanjing University of Information Science and Technology, Nanjing, 210044, China", - "PCMDI":"Program for Climate Model Diagnosis and Intercomparison, Lawrence Livermore National Laboratory, Livermore, CA 94550, USA", - "THU":"Department of Earth System Science, Tsinghua University, Beijing 100084, China" - }, - "source_id":{ - "ACCESS-1-0":{ - "activity_participation":[ - "CMIP" - ], - "cohort":[ - "CMIP5" - ], - "institution_id":[ - "CSIRO-BOM" - ], - "source_id":"ACCESS-1-0", - "source":"ACCESS 1.0 (2011): \naerosol: CLASSIC (v1.0)\natmos: HadGAM2 (r1.1; N96, 192 x 145 longitude/latitude; 38 levels; top level 39255 m)\natmosChem: none\nland: MOSES2.2\nlandIce: none\nocean: ACCESS-OM (MOM4p1; tripolar primarily 1deg, 360 x 300 longitude/latitude; 50 levels; top grid cell 0-10 m)\nocnBgchem: none\nseaIce: CICE4.1" - }, - "AWI-CM-1-0":{ - "activity_participation":[ - "CMIP", - "CORDEX", - "HighResMIP", - "OMIP", - "PMIP", - "SIMIP", - "ScenarioMIP", - "VIACSAB" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "AWI" - ], - "source_id":"AWI-CM-1-0", - "source":"AWI-CM 1.0 (2017): \naerosol: none\natmos: ECHAM6.3.02p4 (T127L95 native atmosphere T127 gaussian grid; 384 x 192 longitude/latitude; 95 levels; top level 80 km)\natmosChem: none\nland: JSBACH 3.10\nlandIce: none\nocean: FESOM 1.4 (unstructured grid in the horizontal with 830305 wet nodes; 46 levels; top grid cell 0-5 m)\nocnBgchem: none\nseaIce: FESOM 1.4" - }, - "BESM-2-7":{ - "activity_participation":[ - "CMIP", - "DCPP", - "HighResMIP", - "ScenarioMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "INPE" - ], - "source_id":"BESM-2-7", - "source":"BESM 2.7 (2017): \naerosol: none\natmos: BAM (v1.0, T062L28; 192 x 96 longitude/latitude; 28 levels; top level 3 hPa)\natmosChem: none\nland: SSiB 2.0\nlandIce: none\nocean: MOM-5 (MOM5, tripolar primarily 1 deg, 1/4 deg between 10S-10N; 360 x 300 longitude/latitude; 50 levels; top grid cell 0-10 m)\nocnBgchem: TOPAZ 2.0\nseaIce: SIS 1.0" - }, - "BNU-ESM-1-1":{ - "activity_participation":[ - "C4MIP", - "CMIP", - "GMMIP", - "GeoMIP", - "OMIP", - "ScenarioMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "BNU" - ], - "source_id":"BNU-ESM-1-1", - "source":"BNU-ESM 1.1 (2016): \naerosol: CAM-chem; semi-interactive\natmos: CAM4 (2deg; 144 x 96 longitude/latitude; 26 levels; top level 2.194 mb)\natmosChem: none\nland: CoLM version 2014 with carbon-nitrogen interactions\nlandIce: none\nocean: MOM4p1 (tripolar, primarily 1deg latitude/longitude, down to 1/3deg within 30deg of the equatorial tropics; 360 x 200 longitude/latitude; 50 levels; top grid cell 0-10 m)\nocnBgchem: Dynamic ecosystem-carbon model version 1\nseaIce: CICE4.1" - }, - "CAMS_CSM1-0":{ - "activity_participation":[ - "CMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "CAMS" - ], - "source_id":"CAMS_CSM1-0", - "source":"CAMS_CSM 1.0 (2016): \naerosol: none\natmos: ECHAM5_CAMS (T106; 320 x 160 longitude/latitude; 31 levels; top level 10 mb)\natmosChem: none\nland: CoLM 1.0\nlandIce: none\nocean: MOM4 (tripolar; 360 x 200 longitude/latitude, primarily 1deg latitude/longitude, down to 1/3deg within 30deg of the equatorial tropics; 50 levels; top grid cell 0-10 m)\nocnBgchem: none\nseaIce: SIS 1.0" - }, - "CIESM":{ - "activity_participation":[ - "CFMIP", - "CMIP", - "CORDEX", - "GMMIP", - "OMIP", - "SIMIP", - "ScenarioMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "THU" - ], - "source_id":"CIESM", - "source":"CIESM (2017): \naerosol: MAM4\natmos: CIESM-AM (FV/FD; 288 x 192 longitude/latitude; 30 levels; top level 2.255 hPa)\natmosChem: trop_mam4\nland: CIESM-LM (modified CLM4.5)\nlandIce: none\nocean: CIESM-OM (FD, SCCGrid Displaced Pole; 720 x 560 longitude/latitude; 46 levels; top grid cell 0-6 m)\nocnBgchem: none\nseaIce: CICE4" - }, - "CMCC-CM2-HR4":{ - "activity_participation":[ - "CMIP", - "HighResMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "CMCC" - ], - "source_id":"CMCC-CM2-HR4", - "source":"CMCC-CM2-HR4 (2016): \naerosol: prescribed MACv2-SP\natmos: CAM4 (1deg; 288 x 192 longitude/latitude; 26 levels; top at ~2 hPa)\natmosChem: none\nland: CLM4.5 (SP mode)\nlandIce: none\nocean: NEMO3.6 (ORCA0.25 1/4 deg from the Equator degrading at the poles; 1442 x 1051 longitude/latitude; 50 vertical levels; top grid cell 0-1 m)\nocnBgchem: none\nseaIce: CICE4.0" - }, - "CMCC-CM2-HR5":{ - "activity_participation":[ - "CMIP", - "OMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "CMCC" - ], - "source_id":"CMCC-CM2-HR5", - "source":"CMCC-CM2-HR5 (2017): \naerosol: MAM3\natmos: CAM5.3 (1deg; 288 x 192 longitude/latitude; 30 levels; top at ~2 hPa)\natmosChem: none\nland: CLM4.5 (BGC mode)\nlandIce: none\nocean: NEMO3.6 (ORCA0.25 1/4 deg from the Equator degrading at the poles; 1442 x 1051 longitude/latitude; 50 vertical levels; top grid cell 0-1 m)\nocnBgchem: none\nseaIce: CICE4.0" - }, - "CMCC-CM2-SR5":{ - "activity_participation":[ - "CMIP", - "DCPP", - "GMMIP", - "ScenarioMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "CMCC" - ], - "source_id":"CMCC-CM2-SR5", - "source":"CMCC-CM2-SR5 (2016): \naerosol: MAM3\natmos: CAM5.3 (1deg; 288 x 192 longitude/latitude; 30 levels; top at ~2 hPa)\natmosChem: none\nland: CLM4.5 (BGC mode)\nlandIce: none\nocean: NEMO3.6 (ORCA1 tripolar primarly 1 deg lat/lon with meridional refinement down to 1/3 degree in the tropics; 362 x 292 longitude/latitude; 50 vertical levels; top grid cell 0-1 m)\nocnBgchem: none\nseaIce: CICE4.0" - }, - "CMCC-CM2-VHR4":{ - "activity_participation":[ - "CMIP", - "HighResMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "CMCC" - ], - "source_id":"CMCC-CM2-VHR4", - "source":"CMCC-CM2-VHR4 (2017): \naerosol: prescribed MACv2-SP\natmos: CAM4 (1/4deg; 1152 x 768 longitude/latitude; 26 levels; top at ~2 hPa)\natmosChem: none\nland: CLM4.5 (SP mode)\nlandIce: none\nocean: NEMO3.6 (ORCA0.25 1/4 deg from the Equator degrading at the poles; 1442 x 1051 longitude/latitude; 50 vertical levels; top grid cell 0-1 m)\nocnBgchem: none\nseaIce: CICE4.0" - }, - "CMCC-ESM2-HR5":{ - "activity_participation":[ - "CMIP", - "OMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "CMCC" - ], - "source_id":"CMCC-ESM2-HR5", - "source":"CMCC-ESM2-HR5 (2017): \naerosol: MAM3\natmos: CAM5.3 (1deg; 288 x 192 longitude/latitude; 30 levels; top at ~2 hPa)\natmosChem: none\nland: CLM4.5 (BGC mode)\nlandIce: none\nocean: NEMO3.6 (ORCA0.25 1/4 deg from the Equator degrading at the poles; 1442 x 1051 longitude/latitude; 50 vertical levels; top grid cell 0-1 m)\nocnBgchem: BFM5.1\nseaIce: CICE4.0" - }, - "CMCC-ESM2-SR5":{ - "activity_participation":[ - "C4MIP", - "CMIP", - "LS3MIP", - "LUMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "CMCC" - ], - "source_id":"CMCC-ESM2-SR5", - "source":"CMCC-ESM2-SR5 (2017): \naerosol: MAM3\natmos: CAM5.3 (1deg; 288 x 192 longitude/latitude; 30 levels; top at ~2 hPa)\natmosChem: none\nland: CLM4.5 (BGC mode)\nlandIce: none\nocean: NEMO3.6 (ORCA1 tripolar primarly 1 deg lat/lon with meridional refinement down to 1/3 degree in the tropics; 362 x 292 longitude/latitude; 50 vertical levels; top grid cell 0-1 m)\nocnBgchem: BFM5.1\nseaIce: CICE4.0" - }, - "CNRM-CM6-1":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "CNRM-CERFACS" - ], - "source_id":"CNRM-CM6-1", - "source":"CNRM-CM6-1 (2016): \naerosol: climatological fields computed by TACTIC (v2.0)\natmos: Arpege 6.2 (Tl127; 91 levels; top level 78.4 km)\natmosChem: OZL_v2\nland: Surfex 8.0c\nlandIce: none (except for some FAFMIP experiments)\nocean: Nemo 3.6 (eORCA1; tripolar primarily 1deg latitude/longitude; 75 levels; top grid cell 0-1 m)\nocnBgchem: none\nseaIce: Gelato 6.1" - }, - "CNRM-CM6-1-HR":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "CNRM-CERFACS" - ], - "source_id":"CNRM-CM6-1-HR", - "source":"CNRM-CM6-1-HR (2016): \naerosol: climatological fields computed by TACTIC (v2.0)\natmos: Arpege 6.2 (Tl359; 91 levels; top level 78.4 km)\natmosChem: OZL_v2\nland: Surfex 8.0c\nlandIce: none\nocean: Nemo 3.6 (eORCA025; tripolar primarily 1/4deg latitude/longitude; 75 levels; top grid cell 0-1 m)\nocnBgchem: none\nseaIce: Gelato 6.1" - }, - "CNRM-ESM2-1":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "CNRM-CERFACS" - ], - "source_id":"CNRM-ESM2-1", - "source":"CNRM-ESM2-1 (2016): \naerosol: TACTIC (v2.0)\natmos: Arpege 6.2 (Tl127; 91 levels; top level 78.4 km)\natmosChem: REPROBUS-C (v2.0)\nland: Surfex 8.0c\nlandIce: none\nocean: Nemo 3.6 (eORCA1; tripolar primarily 1deg latitude/longitude; 75 levels; top grid cell 0-1 m)\nocnBgchem: Pisces 2.s\nseaIce: Gelato 6.1" - }, - "CNRM-ESM2-1-HR":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "CNRM-CERFACS" - ], - "source_id":"CNRM-ESM2-1-HR", - "source":"CNRM-ESM2-1-HR (2016): \naerosol: TACTIC (v2.0)\natmos: Arpege 6.2 (Tl359; 91 levels; top level 78.4 km)\natmosChem: OZL_v2\nland: Surfex 8.0c\nlandIce: none\nocean: Nemo 3.6 (eORCA025; tripolar primarily 1/4deg latitude/longitude; 75 levels; top grid cell 0-1 m)\nocnBgchem: Pisces 2.s\nseaIce: Gelato 6.1" - }, - "EC-Earth3":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "EC-Earth-Consortium" - ], - "source_id":"EC-Earth3", - "source":"EC-Earth3 (2017): \naerosol: none\natmos: IFS cy36r4 (TL255, linearly reduced Gaussian grid equivalent to 512 x 256, 91 levels, top level: 0,01 hPa)\natmosChem: none\nland: HTESSEL (land surface scheme built in IFS)\nlandIce: none\nocean: NEMO3.6 (ORCA1 tripolar primarily 1 deg latitude/longitude with meridional refinement down to 1/3 degree in the tropics; 75 levels; top grid cell 5 m)\nocnBgchem: none\nseaIce: LIM3" - }, - "EC-Earth3-AerChem":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "EC-Earth-Consortium" - ], - "source_id":"EC-Earth3-AerChem", - "source":"EC-Earth3-AerChem (2017): \naerosol: TM5\natmos: IFS cy36r4 (TL255, linearly reduced Gaussian grid equivalent to 512 x 256, 91 levels, top level: 0.01 hPa)\natmosChem: TM5 (3 deg. (long.) x 2 deg. (lat.), 34 levels, top level: 0.1 hPa\nland: HTESSEL (land surface scheme built in IFS)\nlandIce: none\nocean: NEMO3.6 (ORCA1 tripolar primarily 1 deg latitude/longitude with meridional refinement down to 1/3 degree in the tropics; 75 levels; top grid cell 5 m)\nocnBgchem: none\nseaIce: LIM3" - }, - "EC-Earth3-CC":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "EC-Earth-Consortium" - ], - "source_id":"EC-Earth3-CC", - "source":"EC-Earth3-CC (2017): \naerosol: none\natmos: IFS cy36r4 (TL255, linearly reduced Gaussian grid equivalent to 512 x 256, 91 levels, top level: 0.01 hPa)\natmosChem: TM5 (3 deg. (long.) x 2 deg. (lat.), 34 levels, top level: 0.1 hPa\nland: LPJ-GUESS v4\nlandIce: none\nocean: NEMO3.6 (ORCA1 tripolar primarily 1 deg latitude/longitude with meridional refinement down to 1/3 degree in the tropics; 75 levels; top grid cell 5 m)\nocnBgchem: PISCES v2\nseaIce: LIM3" - }, - "EC-Earth3-GrIS":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "EC-Earth-Consortium" - ], - "source_id":"EC-Earth3-GrIS", - "source":"EC-Earth3-GrIS (2017): \naerosol: none\natmos: IFS cy36r4 (TL255, linearly reduced Gaussian grid equivalent to 512 x 256, 91 levels, top level: 0.01 hPa)\natmosChem: none\nland: HTESSEL (land surface scheme built in IFS)\nlandIce: PISM 0.7 (5 km x 5 km, L442)\nocean: NEMO3.6 (ORCA1 tripolar primarily 1 deg latitude/longitude with meridional refinement down to 1/3 degree in the tropics; 75 levels; top grid cell 5 m)\nocnBgchem: none\nseaIce: LIM3" - }, - "EC-Earth3-HR":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "EC-Earth-Consortium" - ], - "source_id":"EC-Earth3-HR", - "source":"EC-Earth3-HR (2017): \naerosol: none\natmos: IFS cy36r4 (TL511, linearly reduced Gaussian grid equivalent to 1024 x 512; 91 levels; top level 0.01 hPa)\natmosChem: none\nland: HTESSEL (land surface scheme built in IFS)\nlandIce: none\nocean: NEMO3.6 (tripolar, 1442x1921; 75 levels; top grid cell 5m)\nocnBgchem: none\nseaIce: LIM3" - }, - "EC-Earth3-LR":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "EC-Earth-Consortium" - ], - "source_id":"EC-Earth3-LR", - "source":"EC-Earth3-LR (2017): \naerosol: none\natmos: IFS cy36r4 (TL159, linearly reduced Gaussian grid equivalent to 320 x 160; 62 levels; top level 5 hPa)\natmosChem: none\nland: HTESSEL (land surface scheme built in IFS)\nlandIce: none\nocean: NEMO3.6 (ORCA1 tripolar primarily 1 deg latitude/longitude with meridional refinement down to 1/3 degree in the tropics; 75 levels; top grid cell 5 m)\nocnBgchem: none\nseaIce: LIM3" - }, - "EC-Earth3-Veg":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "EC-Earth-Consortium" - ], - "source_id":"EC-Earth3-Veg", - "source":"EC-Earth3-Veg (2017): \naerosol: none\natmos: IFS cy36r4 (TL255, linearly reduced Gaussian grid equivalent to 512 x 256, 91 levels, top level: 0.01 hPa)\natmosChem: none\nland: LPJ-GUESS v4\nlandIce: none\nocean: NEMO3.6 (ORCA1 tripolar primarily 1 deg latitude/longitude with meridional refinement down to 1/3 degree in the tropics; 75 levels; top grid cell 5 m)\nocnBgchem: none\nseaIce: LIM3" - }, - "EC-Earth3-Veg-LR":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "EC-Earth-Consortium" - ], - "source_id":"EC-Earth3-Veg-LR", - "source":"EC-Earth3-Veg-LR (2017): \naerosol: none\natmos: IFS cy36r4 (TL159, linearly reduced Gaussian grid equivalent to 320 x 160; 62 levels; top level 5 hPa)\natmosChem: none\nland: LPJ-GUESS v4\nlandIce: none\nocean: NEMO3.6 (ORCA1 tripolar primarily 1 deg latitude/longitude with meridional refinement down to 1/3 degree in the tropics; 75 levels; top grid cell 5 m)\nocnBgchem: none\nseaIce: LIM3" - }, - "EMAC-2-53-AerChem":{ - "activity_participation":[ - "AerChemMIP", - "CMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MESSy-Consortium" - ], - "source_id":"EMAC-2-53-AerChem", - "source":"EMAC-2-53-AerChem (2017): \naerosol: gmxe 2.2.x\natmos: ECHAM5.3.02 (modified, spectral T42; 128 x 64 longitude/latitude; 47 levels; top level 0.01 hPa)\natmosChem: MECCA 3.8.x\nland: same as Atmosphere\nlandIce: none\nocean: MPIOM 1.3.0-beta (bipolar GR1.5, approximately 1.5deg; 256 x 220 longitude/latitude; 40 levels; top grid cell 0-12 m)\nocnBgchem: none\nseaIce: thermodynamic (Semtner zero-layer) dynamic (Hibler 79) sea ice model" - }, - "EMAC-2-53-Vol":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MESSy-Consortium" - ], - "source_id":"EMAC-2-53-Vol", - "source":"EMAC-2-53-Vol (2017): \naerosol: gmxe 2.2.x\natmos: ECHAM5.3.2 (modified; spectral T42; 128 x 64 longitude/latitude; 90 levels; top level 0.001 hPa)\natmosChem: MECCA 3.8.x\nland: same as Atmosphere\nlandIce: none\nocean: MPIOM 1.3.0-beta (bipolar GR1.5; approximately 1.5deg reducing toward the poles, 256 x 220 longitude/latitude; 40 levels; top grid cell 0-12 m)\nocnBgchem: none\nseaIce: thermodynamic (Semtner zero-layer) dynamic (Hibler 79) sea ice model" - }, - "GFDL-AM4":{ - "activity_participation":[ - "CMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "NOAA-GFDL" - ], - "source_id":"GFDL-AM4", - "source":"GFDL-AM4 (2017): \naerosol: interactive\natmos: GFDL-AM4.0 (Cubed-sphere (c96) - 1 degree nominal horizontal resolution; 360x180 longitude/latitude; 32 levels; model top: 1 hPa)\natmosChem: fast chemistry, aerosol only\nland: GFDL-LM4.0\nlandIce: GFDL-LM4.0\nocean: none\nocnBgchem: none\nseaIce: none" - }, - "GFDL-CM4":{ - "activity_participation":[ - "CMIP", - "OMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "NOAA-GFDL" - ], - "source_id":"GFDL-CM4", - "source":"GFDL-CM4 (2017): \naerosol: interactive\natmos: GFDL-AM4.1 (Cubed-sphere (c96) - 1 degree nominal horizontal resolution; 360x180 longitude/latitude; 32 levels; model top: 1 hPa)\natmosChem: unnamed (fast chemistry, aerosol only)\nland: GFDL-LM4.0\nlandIce: GFDL-LM4.0\nocean: GFDL-MOM6 (tripolar - nominal 0.25 deg latitude/longitude; 1440x720 longitude/latitude; 75 levels; top grid cell 0-2 m)\nocnBgchem: none\nseaIce: SIS2" - }, - "GFDL-ESM4":{ - "activity_participation":[ - "AerChemMIP", - "C4MIP", - "CFMIP", - "CMIP", - "DAMIP", - "DynVarMIP", - "GMMIP", - "LUMIP", - "OMIP", - "RFMIP", - "ScenarioMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "NOAA-GFDL" - ], - "source_id":"GFDL-ESM4", - "source":"GFDL-ESM4 (2017): \naerosol: interactive\natmos: GFDL-AM4.1 (Cubed-sphere (c96) - 1 degree nominal horizontal resolution; 360x180 longitude/latitude; 48 levels, model top: 1 Pa)\natmosChem: GFDL-ATMCHEM4.1 (full atmospheric chemistry)\nland: GFDL-LM4.1\nlandIce: GFDL-LM4.1\nocean: GFDL-MOM6 (tripolar - nominal 0.5 deg; 720x360 longitude/latitude; 75 levels; top grid cell 0-2 m)\nocnBgchem: COBALT 2.0\nseaIce: SIS2" - }, - "GISS-E2-1G":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "NASA-GISS" - ], - "source_id":"GISS-E2-1G", - "source":"GISS-E2.1G (2016): \naerosol: varies with physics-version (p==1 none, p==3 OMA, p==4 TOMAS, p==5 MATRIX)\natmos: GISS-E2.1 (90 x 144 2x2.5; 40 levels; top level 0.1hPa)\natmosChem: varies with physics-version (p==1 Non-interactive, p>1 GPUCCINI)\nland: GISS LSM\nlandIce: Fixed\nocean: GISS Ocean (1 deg latitude/longitude; 32 levels; top grid cell 0-10m)\nocnBgchem: none\nseaIce: GISS SI" - }, - "GISS-E2-1H":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "NASA-GISS" - ], - "source_id":"GISS-E2-1H", - "source":"GISS-E2.1H (2016): \naerosol: varies with physics-version (p==1 none, p==3 OMA, p==4 TOMAS, p==5 MATRIX)\natmos: GISS-E2.1 (90 x 144 2x2.5; 40 levels; top level 0.1hPa)\natmosChem: varies with physics-version (p==1 Non-interactive, p>1 GPUCCINI)\nland: GISS LSM\nlandIce: Fixed\nocean: HYCOM Ocean (tripolar grid; ~1 deg latitude/longitude; 26 levels; top grid cell 0-10m)\nocnBgchem: none\nseaIce: GISS SI" - }, - "HadGEM3-GC31-HH":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MOHC" - ], - "source_id":"HadGEM3-GC31-HH", - "source":"HadGEM3-GC31-HH (2016): \naerosol: UKCA-GLOMAP-mode\natmos: MetUM-HadGEM3-GA7.1 (1024 x 768 N512; 85 levels; top level 85km)\natmosChem: none\nland: JULES-HadGEM3-GL7.1\nlandIce: none\nocean: NEMO-HadGEM3-GO6.0 (ORCA12 tripolar primarily 1/12 deg latitude/longitude; 75 levels; top grid cell 0-1m)\nocnBgchem: none\nseaIce: CICE-HadGEM3-GSI8 (ORCA12 tripolar primarily 1/12 deg latitude/longitude)" - }, - "HadGEM3-GC31-HM":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MOHC" - ], - "source_id":"HadGEM3-GC31-HM", - "source":"HadGEM3-GC31-HM (2016): \naerosol: UKCA-GLOMAP-mode\natmos: MetUM-HadGEM3-GA7.1 (1024 x 768 N512; 85 levels; top level 85km)\natmosChem: none\nland: JULES-HadGEM3-GL7.1\nlandIce: none\nocean: NEMO-HadGEM3-GO6.0 (ORCA025 tripolar primarily 0.25 deg latitude/longitude; 75 levels; top grid cell 0-1m)\nocnBgchem: none\nseaIce: CICE-HadGEM3-GSI8 (ORCA025 tripolar primarily 0.25 deg latitude/longitude)" - }, - "HadGEM3-GC31-LL":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MOHC" - ], - "source_id":"HadGEM3-GC31-LL", - "source":"HadGEM3-GC31-LL (2016): \naerosol: UKCA-GLOMAP-mode\natmos: MetUM-HadGEM3-GA7.1 (192 x 144 N96; 85 levels; top level 85km)\natmosChem: none\nland: JULES-HadGEM3-GL7.1\nlandIce: none\nocean: NEMO-HadGEM3-GO6.0 (ORCA1 tripolar primarily 1 deg latitude/longitude with meridional refinement down to 1/3 degree in the tropics; 75 levels; top grid cell 0-1m)\nocnBgchem: none\nseaIce: CICE-HadGEM3-GSI8 (ORCA1 tripolar primarily 1 deg latitude/longitude)" - }, - "HadGEM3-GC31-MM":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MOHC" - ], - "source_id":"HadGEM3-GC31-MM", - "source":"HadGEM3-GC31-MM (2016): \naerosol: UKCA-GLOMAP-mode\natmos: MetUM-HadGEM3-GA7.1 (432 x 324 N216; 85 levels; top level 85km)\natmosChem: none\nland: JULES-HadGEM3-GL7.1\nlandIce: none\nocean: NEMO-HadGEM3-GO6.0 (ORCA025 tripolar primarily 0.25 deg latitude/longitude; 75 levels; top grid cell 0-1m)\nocnBgchem: none\nseaIce: CICE-HadGEM3-GSI8 (ORCA025 tripolar primarily 0.25 deg latitude/longitude)" - }, - "IITM-ESM":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "CCCR-IITM" - ], - "source_id":"IITM-ESM", - "source":"IITM-ESM (2015): \naerosol: unnamed (prescribed MAC-v2)\natmos: GFS (192 x 94 T62; 64 levels; top level 0.2 mb)\natmosChem: none\nland: NOAH LSM\nlandIce: none\nocean: MOM4p1 (tripolar, 360x200; 50 levels; top grid cell 5m)\nocnBgchem: TOPAZ\nseaIce: SIS" - }, - "MIROC-ES2H":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MIROC" - ], - "source_id":"MIROC-ES2H", - "source":"MIROC-ES2H (2017): \naerosol: SPRINTARS6.0\natmos: CCSR AGCM (T85; 256 x 128 longitude/latitude; 81 levels; top level 0.004 hPa)\natmosChem: CHASER4.0\nland: MATSIRO6.0+VISIT-e ver.1.0\nlandIce: none\nocean: COCO4.9 (tripolar primarily 1deg; 360 x 256 longitude/latitude; 63 levels; top grid cell 0-2 m)\nocnBgchem: OECO ver.2.0; NPZD-type with C/N/P/Fe/O cycles\nseaIce: COCO4.9" - }, - "MIROC-ES2L":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MIROC" - ], - "source_id":"MIROC-ES2L", - "source":"MIROC-ES2L (2017): \naerosol: SPRINTARS6.0\natmos: CCSR AGCM (T42; 128 x 64 longitude/latitude; 40 levels; top level 3 hPa)\natmosChem: none\nland: MATSIRO6.0+VISIT-e ver.1.0\nlandIce: none\nocean: COCO4.9 (tripolar primarily 1deg; 360 x 256 longitude/latitude; 63 levels; top grid cell 0-2 m)\nocnBgchem: OECO ver.2.0; NPZD-type with C/N/P/Fe/O cycles\nseaIce: COCO4.9" - }, - "MIROC6":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MIROC" - ], - "source_id":"MIROC6", - "source":"MIROC6 (2017): \naerosol: SPRINTARS6.0\natmos: CCSR AGCM (T85; 256x128; 81 levels; top level 0.004 hPa)\natmosChem: none\nland: MATSIRO6.0\nlandIce: none\nocean: COCO4.9 (360x256x63; tripolar primarily 1deg; 63 levels; top grid cell 0-2 m)\nocnBgchem: none\nseaIce: COCO4.9" - }, - "MPIESM-1-2-HR":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MPI-M" - ], - "source_id":"MPIESM-1-2-HR", - "source":"MPIESM1.2-HR (2017): \naerosol: none, prescribed MACv2-SP\natmos: ECHAM6.3 (spectral T127; 384 x 192 longitude/latitude; 95 levels; top level 0.01 hPa)\natmosChem: none\nland: JSBACH3.2\nlandIce: none/prescribed\nocean: MPIOM1.63 (tripolar TP04; approximately 0.4deg latitude/longitude; 40 levels; top grid cell 0-12 m)\nocnBgchem: HAMOCC\nseaIce: thermodynamic (Semtner zero-layer) dynamic (Hibler 79) sea ice model" - }, - "MPIESM-1-2-LR":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MPI-M" - ], - "source_id":"MPIESM-1-2-LR", - "source":"MPIESM1.2-LR (2017): \naerosol: none, prescribed MACv2-SP\natmos: ECHAM6.3 (spectral T63; 192 x 96 longitude/latitude; 47 levels; top level 0.01 hPa)\natmosChem: none\nland: JSBACH3.2\nlandIce: none/prescribed\nocean: MPIOM1.63 (bipolar GR1.5; approximately 1.5deg latitude/longitude; 40 levels; top grid cell 0-12 m)\nocnBgchem: HAMOCC\nseaIce: thermodynamic (Semtner zero-layer) dynamic (Hibler 79) sea ice model" - }, - "MPIESM-2-LR":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MPI-M" - ], - "source_id":"MPIESM-2-LR", - "source":"MPIESM2 (2017): \naerosol: none, prescribed MACv2-SP\natmos: ICON-AES (icosahedral/triangles; 160 km; 47 levels; top level 80 km)\natmosChem: none\nland: JSBACH4.2\nlandIce: none/prescribed\nocean: ICON-OES (icosahedral/triangles; 40 km; 40 levels; top grid cell 0-12 m)\nocnBgchem: HAMOCC\nseaIce: thermodynamic (Semtner zero-layer) dynamic (Hibler 79) sea ice model" - }, - "MRI-ESM2-0":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MRI" - ], - "source_id":"MRI-ESM2-0", - "source":"MRI-ESM2.0 (2017): \naerosol: MASINGAR mk2r4\natmos: MRI-AGCM3.5 (320 x 160 TL159; 80 levels; top level 0.01 hPa)\natmosChem: MRI-CCM2.1\nland: HAL 1.0\nlandIce: none\nocean: MRI.COM4.4 (tripolar primarily 0.5 deg latitude/1 deg longitude with meridional refinement down to 0.3 deg within 10 degrees north and south of the equator; 360 x 364 longitude/latitude; 61 levels; top grid cell 0-2 m)\nocnBgchem: MRI.COM4.4\nseaIce: MRI.COM4.4" - }, - "NICAM16-7S":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MIROC" - ], - "source_id":"NICAM16-7S", - "source":"NICAM16-7S (2017): \naerosol: Prescribed MACv2-SP\natmos: NICAM.16 (56km icosahedral grid; 163,842 grid cells (=10*4^7+2); 38 levels; top level 40 km)\natmosChem: none\nland: MATSIRO6 (w/o MOSAIC)\nlandIce: none\nocean: none\nocnBgchem: none\nseaIce: Fixed" - }, - "NICAM16-8S":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MIROC" - ], - "source_id":"NICAM16-8S", - "source":"NICAM16-8S (2017): \naerosol: Prescribed MACv2-SP\natmos: NICAM.16 (28km icosahedral grid; 655,362 grid cells (=10*4^8+2); 38 levels; top level 40 km)\natmosChem: none\nland: MATSIRO6 (w/o MOSAIC)\nlandIce: none\nocean: none\nocnBgchem: none\nseaIce: Fixed" - }, - "NICAM16-9D-L78":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MIROC" - ], - "source_id":"NICAM16-9D-L78", - "source":"NICAM16-9D-L78 (2017): \naerosol: Prescribed MACv2-SP\natmos: NICAM.16 (14km icosahedral grid; 2,621,442 grid cells (=10*4^9+2); 78 levels; top level 40 km)\natmosChem: none\nland: MATSIRO6 (w/o MOSAIC)\nlandIce: none\nocean: none\nocnBgchem: none\nseaIce: Fixed" - }, - "NICAM16-9S":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MIROC" - ], - "source_id":"NICAM16-9S", - "source":"NICAM16-9S (2017): \naerosol: Prescribed MACv2-SP\natmos: NICAM.16 (14km icosahedral grid; 2,621,442 grid cells (=10*4^9+2); 38 levels; top level 40 km)\natmosChem: none\nland: MATSIRO6 (w/o MOSAIC)\nlandIce: none\nocean: none\nocnBgchem: none\nseaIce: Fixed" - }, - "NorESM2-HH":{ - "activity_participation":[ - "HighResMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "NCC" - ], - "source_id":"NorESM2-HH", - "source":"NorESM2-HH (2018): \naerosol: OsloAero\natmos: CAM-OSLO (0.25 degree resolution; 1152 x 768; 32 levels; top level 3 mb)\natmosChem: OsloChemSimp\nland: CLM\nlandIce: CISM\nocean: MICOM (0.25 degree resolution; 1440 x 1152; 70 levels; top grid cell minimum 0-2.5 m [native model uses hybrid density and generic upper-layer coordinate interpolated to z-level for contributed data])\nocnBgchem: HAMOCC\nseaIce: CICE" - }, - "NorESM2-LM":{ - "activity_participation":[ - "AerChemMIP", - "CFMIP", - "CMIP", - "DAMIP", - "DCPP", - "LUMIP", - "OMIP", - "PMIP", - "RFMIP", - "ScenarioMIP", - "VolMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "NCC" - ], - "source_id":"NorESM2-LM", - "source":"NorESM2-LM (2017): \naerosol: OsloAero\natmos: CAM-OSLO (2 degree resolution; 144 x 96; 32 levels; top level 3 mb)\natmosChem: OsloChemSimp\nland: CLM\nlandIce: CISM\nocean: MICOM (1 degree resolution; 360 x 384; 70 levels; top grid cell minimum 0-2.5 m [native model uses hybrid density and generic upper-layer coordinate interpolated to z-level for contributed data])\nocnBgchem: HAMOCC\nseaIce: CICE" - }, - "NorESM2-LME":{ - "activity_participation":[ - "C4MIP", - "CMIP", - "GeoMIP", - "LUMIP", - "OMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "NCC" - ], - "source_id":"NorESM2-LME", - "source":"NorESM2-LME (2017): \naerosol: OsloAero\natmos: CAM-OSLO (2 degree resolution; 144 x 96; 32 levels; top level 3 mb)\natmosChem: OsloChemSimp\nland: CLM\nlandIce: CISM\nocean: MICOM (1 degree resolution; 360 x 384; 70 levels; top grid cell minimum 0-2.5 m [native model uses hybrid density and generic upper-layer coordinate interpolated to z-level for contributed data])\nocnBgchem: HAMOCC\nseaIce: CICE" - }, - "NorESM2-LMEC":{ - "activity_participation":[ - "AerChemMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "NCC" - ], - "source_id":"NorESM2-LMEC", - "source":"NorESM2-LMEC (2017): \naerosol: OsloAero\natmos: CAM-OSLO (2 degree resolution; 144 x 96; 32 levels; top level 3 mb)\natmosChem: OsloChemComp\nland: CLM\nlandIce: CISM\nocean: MICOM (1 degree resolution; 360 x 384; 70 levels; top grid cell minimum 0-2.5 m [native model uses hybrid density and generic upper-layer coordinate interpolated to z-level for contributed data])\nocnBgchem: HAMOCC\nseaIce: CICE" - }, - "NorESM2-MH":{ - "activity_participation":[ - "AerChemMIP", - "CFMIP", - "CMIP", - "DAMIP", - "OMIP", - "RFMIP", - "ScenarioMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "NCC" - ], - "source_id":"NorESM2-MH", - "source":"NorESM2-MH (2017): \naerosol: OsloAero\natmos: CAM-OSLO (1 degree resolution; 288 x 192; 32 levels; top level 3 mb)\natmosChem: OsloChemSimp\nland: CLM\nlandIce: CISM\nocean: MICOM (0.25 degree resolution; 1440 x 1152; 70 levels; top grid cell minimum 0-2.5 m [native model uses hybrid density and generic upper-layer coordinate interpolated to z-level for contributed data])\nocnBgchem: HAMOCC\nseaIce: CICE" - }, - "NorESM2-MM":{ - "activity_participation":[ - "AerChemMIP", - "CFMIP", - "CMIP", - "DAMIP", - "OMIP", - "RFMIP", - "ScenarioMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "NCC" - ], - "source_id":"NorESM2-MM", - "source":"NorESM2-MM (2017): \naerosol: OsloAero\natmos: CAM-OSLO (1 degree resolution; 288 x 192; 32 levels; top level 3 mb)\natmosChem: OsloChemSimp\nland: CLM\nlandIce: CISM\nocean: MICOM (1 degree resolution; 360 x 384; 70 levels; top grid cell minimum 0-2.5 m [native model uses hybrid density and generic upper-layer coordinate interpolated to z-level for contributed data])\nocnBgchem: HAMOCC\nseaIce: CICE" - }, - "PCMDI-test-1-0":{ - "activity_participation":[ - "CMIP" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "PCMDI" - ], - "source_id":"PCMDI-test-1-0", - "source":"PCMDI-test 1.0 (1989): \naerosol: none\natmos: Earth1.0-gettingHotter (360x180; 50 levels; top level 0.1 mb)\natmosChem: none\nland: Earth1.0\nlandIce: none\nocean: BlueMarble1.0-warming (360x180; 50 levels; top grid cell 0-10m)\nocnBgchem: none\nseaIce: Declining1.0-warming" - }, - "UKESM1-0-LL":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MOHC", - "NERC" - ], - "source_id":"UKESM1-0-LL", - "source":"UKESM1.0-LL (2017): \naerosol: UKCA-GLOMAP-mode\natmos: MetUM-HadGEM3-GA7.1 (192 x 144 N96; 85 levels; top level 85km)\natmosChem: UKCA-StratTrop\nland: JULES-HadGEM3-GL7.1\nlandIce: BISICLES (only active in ISMIP experiments)\nocean: NEMO-HadGEM3-GO6.0 (ORCA1 tripolar primarily 1 deg latitude/longitude with meridional refinement down to 1/3 degree in the tropics; 75 levels; top grid cell 0-1m)\nocnBgchem: MEDUSA2\nseaIce: CICE-HadGEM3-GSI8 (ORCA1 tripolar primarily 1 deg latitude/longitude)" - }, - "UKESM1-0-MMh":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "MOHC", - "NERC" - ], - "source_id":"UKESM1-0-MMh", - "source":"UKESM1.0-MMh (2018): \naerosol: UKCA-GLOMAP-mode (horizontal resolution degraded relative to that used for atmosphere physics)\natmos: MetUM-HadGEM3-GA7.1 (432 x 324 N216; 85 levels; top level 85km)\natmosChem: UKCA-StratTrop (horizontal resolution degraded relative to that used for atmosphere physics)\nland: JULES-HadGEM3-GL7.1\nlandIce: BISICLES (only active in ISMIP experiments)\nocean: NEMO-HadGEM3-GO6.0 (ORCA025 tripolar primarily 0.25 deg latitude/longitude; 75 levels; top grid cell 0-1m)\nocnBgchem: MEDUSA2 (horizontal resolution degraded relative to that used for ocean physics)\nseaIce: CICE-HadGEM3-GSI8 (ORCA025 tripolar primarily 0.25 deg latitude/longitude)" - }, - "VRESM-1-0":{ - "activity_participation":[ - "" - ], - "cohort":[ - "Registered" - ], - "institution_id":[ - "CSIR-CSIRO" - ], - "source_id":"VRESM-1-0", - "source":"VRESM 1.0 (2016): \naerosol: Rotstayn-1.0\natmos: VCAM-1.0 (192 x 192 x 6 C192; 35 levels; top level 35km)\natmosChem: none\nland: CABLE v2.2.3\nlandIce: none\nocean: VCOM-1.0 (192 x 192 x 6 C192; 35 levels; top grid cell 0-10m)\nocnBgchem: PISCES v3.4socco\nseaIce: CSIR-ICE (visco-plastic)" - } - }, - "source_type":[ - "AER", - "AGCM", - "AOGCM", - "BGCM", - "CHEM", - "ESM", - "ISM", - "LAND", - "OGCM", - "RAD", - "SLAB" - ], - "frequency":[ - "1hr", - "1hrClimMon", - "3hr", - "3hrClim", - "6hr", - "day", - "decadal", - "fx", - "mon", - "monClim", - "subhr", - "yr", - "yrClim" - ], - "grid_label":[ - "gm", - "gn", - "gna", - "gng", - "gnz", - "gr", - "gr1", - "gr1a", - "gr1g", - "gr1z", - "gr2", - "gr2a", - "gr2g", - "gr2z", - "gr3", - "gr3a", - "gr3g", - "gr3z", - "gr4", - "gr4a", - "gr4g", - "gr4z", - "gr5", - "gr5a", - "gr5g", - "gr5z", - "gr6", - "gr6a", - "gr6g", - "gr6z", - "gr7", - "gr7a", - "gr7g", - "gr7z", - "gr8", - "gr8a", - "gr8g", - "gr8z", - "gr9", - "gr9a", - "gr9g", - "gr9z", - "gra", - "grg", - "grz" - ], - "nominal_resolution":[ - "0.5 km", - "1 km", - "10 km", - "100 km", - "1000 km", - "10000 km", - "1x1 degree", - "2.5 km", - "25 km", - "250 km", - "2500 km", - "5 km", - "50 km", - "500 km", - "5000 km" - ], - "realm":{ - "aerosol":"Aerosol", - "atmos":"Atmosphere", - "atmosChem":"Atmospheric Chemistry", - "land":"Land Surface", - "landIce":"Land Ice", - "ocean":"Ocean", - "ocnBgchem":"Ocean Biogeochemistry", - "seaIce":"Sea Ice" - }, - "table_id":[ - "3hr", - "6hrLev", - "6hrPlev", - "6hrPlevPt", - "AERday", - "AERhr", - "AERmon", - "AERmonZ", - "Amon", - "CF3hr", - "CFday", - "CFmon", - "CFsubhr", - "E1hr", - "E1hrClimMon", - "E3hr", - "E3hrPt", - "E6hrZ", - "Eday", - "EdayZ", - "Efx", - "Emon", - "EmonZ", - "Esubhr", - "Eyr", - "IfxAnt", - "IfxGre", - "ImonAnt", - "ImonGre", - "IyrAnt", - "IyrGre", - "LImon", - "Lmon", - "Oclim", - "Oday", - "Odec", - "Ofx", - "Omon", - "Oyr", - "SIday", - "SImon", - "day", - "fx" - ], - "license":[ - "^CMIP6 model data produced by .* is licensed under a Creative Commons Attribution.*ShareAlike 4.0 International License (https://creativecommons.org/licenses)\\. Consult https://pcmdi.llnl.gov/CMIP6/TermsOfUse for terms of use governing CMIP6 output, including citation requirements and proper acknowledgment\\. Further information about this data, including some limitations, can be found via the further_info_url (recorded as a global attribute in this file) .*\\. The data producers and data providers make no warranty, either express or implied, including, but not limited to, warranties of merchantability and fitness for a particular purpose\\. All liabilities arising from the supply of the information (including any liability arising in negligence) are excluded to the fullest extent permitted by law\\.$" - ], - "mip_era":[ - "CMIP6" - ], - "sub_experiment_id":{ - "none":"none", - "s1910":"initialized near end of year 1910", - "s1950":"initialized near end of year 1950", - "s1960":"initialized near end of year 1960", - "s1961":"initialized near end of year 1961", - "s1962":"initialized near end of year 1962", - "s1963":"initialized near end of year 1963", - "s1964":"initialized near end of year 1964", - "s1965":"initialized near end of year 1965", - "s1966":"initialized near end of year 1966", - "s1967":"initialized near end of year 1967", - "s1968":"initialized near end of year 1968", - "s1969":"initialized near end of year 1969", - "s1970":"initialized near end of year 1970", - "s1971":"initialized near end of year 1971", - "s1972":"initialized near end of year 1972", - "s1973":"initialized near end of year 1973", - "s1974":"initialized near end of year 1974", - "s1975":"initialized near end of year 1975", - "s1976":"initialized near end of year 1976", - "s1977":"initialized near end of year 1977", - "s1978":"initialized near end of year 1978", - "s1979":"initialized near end of year 1979", - "s1980":"initialized near end of year 1980", - "s1981":"initialized near end of year 1981", - "s1982":"initialized near end of year 1982", - "s1983":"initialized near end of year 1983", - "s1984":"initialized near end of year 1984", - "s1985":"initialized near end of year 1985", - "s1986":"initialized near end of year 1986", - "s1987":"initialized near end of year 1987", - "s1988":"initialized near end of year 1988", - "s1989":"initialized near end of year 1989", - "s1990":"initialized near end of year 1990", - "s1991":"initialized near end of year 1991", - "s1992":"initialized near end of year 1992", - "s1993":"initialized near end of year 1993", - "s1994":"initialized near end of year 1994", - "s1995":"initialized near end of year 1995", - "s1996":"initialized near end of year 1996", - "s1997":"initialized near end of year 1997", - "s1998":"initialized near end of year 1998", - "s1999":"initialized near end of year 1999", - "s2000":"initialized near end of year 2000", - "s2001":"initialized near end of year 2001", - "s2002":"initialized near end of year 2002", - "s2003":"initialized near end of year 2003", - "s2004":"initialized near end of year 2004", - "s2005":"initialized near end of year 2005", - "s2006":"initialized near end of year 2006", - "s2007":"initialized near end of year 2007", - "s2008":"initialized near end of year 2008", - "s2009":"initialized near end of year 2009", - "s2010":"initialized near end of year 2010", - "s2011":"initialized near end of year 2011", - "s2012":"initialized near end of year 2012", - "s2013":"initialized near end of year 2013", - "s2014":"initialized near end of year 2014", - "s2015":"initialized near end of year 2015", - "s2016":"initialized near end of year 2016", - "s2017":"initialized near end of year 2017", - "s2018":"initialized near end of year 2018", - "s2019":"initialized near end of year 2019", - "s2020":"initialized near end of year 2020", - "s2021":"initialized near end of year 2021", - "s2022":"initialized near end of year 2022", - "s2023":"initialized near end of year 2023", - "s2024":"initialized near end of year 2024", - "s2025":"initialized near end of year 2025", - "s2026":"initialized near end of year 2026", - "s2027":"initialized near end of year 2027", - "s2028":"initialized near end of year 2028", - "s2029":"initialized near end of year 2029" - }, - "experiment_id":{ - "1pctCO2":{ - "activity_id":[ - "CMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"DECK: 1pctCO2", - "end_year":"", - "experiment":"1 percent per year increase in CO2", - "experiment_id":"1pctCO2", - "min_number_yrs_per_sim":"150", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "1pctCO2-4xext":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"branched from 1pctCO2 run at year 140 and run with CO2 fixed at 4x pre-industrial concentration", - "end_year":"", - "experiment":"extension from year 140 of 1pctCO2 with 4xCO2", - "experiment_id":"1pctCO2-4xext", - "min_number_yrs_per_sim":"210", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "1pctCO2" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "1pctCO2-bgc":{ - "activity_id":[ - "C4MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Biogeochemically-coupled specified concentration simulation in which CO2 increases at a rate of 1% per year until quadrupling", - "end_year":"", - "experiment":"biogeochemically-coupled version of 1 percent per year increasing CO2 experiment", - "experiment_id":"1pctCO2-bgc", - "min_number_yrs_per_sim":"150", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM", - "BGC" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "1pctCO2-rad":{ - "activity_id":[ - "C4MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Radiatively-coupled specified concentration simulation in which CO2 increases at a rate of 1% per year until quadrupling", - "end_year":"", - "experiment":"radiatively-coupled version of 1 percent per year increasing CO2 experiment", - "experiment_id":"1pctCO2-rad", - "min_number_yrs_per_sim":"150", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM", - "BGC" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "1pctCO2Ndep":{ - "activity_id":[ - "C4MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Fully-coupled specified concentration simulation in which CO2 increases at a rate of 1% per year until quadrupling, plus an additional scenario of anthropogenic nitrogen deposition", - "end_year":"", - "experiment":"1 percent per year increasing CO2 experiment with increasing N-deposition", - "experiment_id":"1pctCO2Ndep", - "min_number_yrs_per_sim":"150", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM", - "BGC" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "1pctCO2Ndep-bgc":{ - "activity_id":[ - "C4MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Biogeochemically-coupled specified concentration simulation in which CO2 increases at a rate of 1% per year until quadrupling, plus an additional scenario of anthropogenic nitrogen deposition", - "end_year":"", - "experiment":"biogeochemically-coupled version of 1 percent per year increasing CO2 experiment with increasing N-deposition", - "experiment_id":"1pctCO2Ndep-bgc", - "min_number_yrs_per_sim":"150", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM", - "BGC" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "1pctCO2to4x-withism":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Idealized 1%/yr CO2 increase to 4xC02 over 140yrs and kept constant at 4xCO2 for an additional 200 to 400 yrs simulation that includes interactive ice sheets", - "end_year":"", - "experiment":"simulation with interactive ice sheet forced by 1 percent per year increase in CO2 to 4xCO2 (subsequently held fixed)", - "experiment_id":"1pctCO2to4x-withism", - "min_number_yrs_per_sim":"350", - "parent_activity_id":[ - "ISMIP6" - ], - "parent_experiment_id":[ - "piControl-withism" - ], - "required_model_components":[ - "AOGCM", - "ISM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "G1":{ - "activity_id":[ - "GeoMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Beginning from a preindustrial control run, simultaneously quadruple the CO2 concentration and reduce the solar constant such that the TOA radiative flux remains within +/m0.1 W/m2", - "end_year":"", - "experiment":"abrupt quadrupling of CO2 plus reduction in total solar irradiance", - "experiment_id":"G1", - "min_number_yrs_per_sim":"100", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "G6SST1":{ - "activity_id":[ - "GeoMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Time slice at 2020 (ScenarioMIP Tier 1 high forcing scenario)", - "end_year":"", - "experiment":"SSTs, forcings, and other prescribed conditions from year 2020 of SSP5-8.5", - "experiment_id":"G6SST1", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "ScenarioMIP" - ], - "parent_experiment_id":[ - "ssp585" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "G6SST2-solar":{ - "activity_id":[ - "GeoMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Time slice at 2100 (G6solar)", - "end_year":"", - "experiment":"SSTs from year 2020 of SSP5-8.5; forcings and other prescribed conditions from year 2100 of G6solar", - "experiment_id":"G6SST2-solar", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "ScenarioMIP" - ], - "parent_experiment_id":[ - "ssp585" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "G6SST2-sulfur":{ - "activity_id":[ - "GeoMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Time slice at 2100 (G6sulfur)", - "end_year":"", - "experiment":"SSTs from year 2020 of SSP5-8.5; forcings and other prescribed conditions from year 2100 of G6sulfur", - "experiment_id":"G6SST2-sulfur", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "ScenarioMIP" - ], - "parent_experiment_id":[ - "ssp585" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "G6solar":{ - "activity_id":[ - "GeoMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Using solar irradiance reduction, return the radiative forcing from a background of the ScenarioMIP high forcing to the ScenarioMIP middle forcing", - "end_year":"2100", - "experiment":"total solar irradiance reduction to reduce net forcing from SSP585 to SSP245", - "experiment_id":"G6solar", - "min_number_yrs_per_sim":"86", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "G6sulfur":{ - "activity_id":[ - "GeoMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Using equatorial SO2 injection, return the radiative forcing from a background of the ScenarioMIP high forcing to the ScenarioMIP middle forcing", - "end_year":"2100", - "experiment":"stratospheric sulfate aerosol injection to reduce net forcing from SSP585 to SSP245", - "experiment_id":"G6sulfur", - "min_number_yrs_per_sim":"86", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "G7SST1-cirrus":{ - "activity_id":[ - "GeoMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Time slice at 2020 (ScenarioMIP Tier 1 high forcing scenario and cirrus thinning according to G7cirrus)", - "end_year":"", - "experiment":"SSTs from year 2020 of SSP5-8.5; forcings and other prescribed conditions from year 2020 of SSP5-8.5 and cirrus thinning", - "experiment_id":"G7SST1-cirrus", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "ScenarioMIP" - ], - "parent_experiment_id":[ - "ssp585" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "G7SST2-cirrus":{ - "activity_id":[ - "GeoMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Time slice at 2100 (ScenarioMIP Tier 1 high forcing scenario and cirrus thinning according to G7cirrus)", - "end_year":"", - "experiment":"SSTs from year 2100 of SSP5-8.5; forcings and other prescribed conditions from year 2100 of G7cirrus", - "experiment_id":"G7SST2-cirrus", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "ScenarioMIP" - ], - "parent_experiment_id":[ - "ssp585" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "G7cirrus":{ - "activity_id":[ - "GeoMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Against a background of the ScenarioMIP high forcing, reduce cirrus cloud optical depth by a constant amount", - "end_year":"2100", - "experiment":"increase cirrus ice crystal fall speed to reduce net forcing in SSP585 by 1 W m-2", - "experiment_id":"G7cirrus", - "min_number_yrs_per_sim":"86", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "a4SST":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As piSST, but with monthly-varying SSTs taken from years 111-140 of each model's own abrupt4xCO2 experiment instead of from piControl. Sea-ice is unchanged from piSST", - "end_year":"", - "experiment":"as piSST but with SSTs from abrupt4xCO2", - "experiment_id":"a4SST", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "abrupt4xCO2" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "a4SSTice":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As piSST, but with monthly-varying SSTs and sea-ice taken from years 111-140 of each model's own abrupt4xCO2 experiment instead of from piControl", - "end_year":"", - "experiment":"as piSST but with SSTs and sea ice from abrupt4xCO2", - "experiment_id":"a4SSTice", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "abrupt4xCO2" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "a4SSTice-4xCO2":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As a4SSTice, but CO2 is quadrupled, and the increase in CO2 is seen by both the radiation scheme and vegetation", - "end_year":"", - "experiment":"as piSST but with SSTs and sea ice from abrupt4xCO2, and 4xCO2 seen by radiation and vegetation", - "experiment_id":"a4SSTice-4xCO2", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "abrupt4xCO2" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "abrupt-0p5xCO2":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Identical to the DECK abrupt-4xCO2, but at 0.5xCO2", - "end_year":"", - "experiment":"abrupt halving of CO2", - "experiment_id":"abrupt-0p5xCO2", - "min_number_yrs_per_sim":"150", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "abrupt-2xCO2":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Identical to the DECK abrupt-4xCO2, but at 2xCO2", - "end_year":"", - "experiment":"abrupt doubling of CO2", - "experiment_id":"abrupt-2xCO2", - "min_number_yrs_per_sim":"150", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "abrupt-4xCO2":{ - "activity_id":[ - "CMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"DECK: abrupt4xCO2", - "end_year":"", - "experiment":"abrupt quadrupling of CO2", - "experiment_id":"abrupt-4xCO2", - "min_number_yrs_per_sim":"150", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "abrupt-solm4p":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Conceptually similar to abrupt 4xCO2 DECK experiment, except that the solar constant rather than CO2 is abruptly reduced by 4%", - "end_year":"", - "experiment":"abrupt 4% decrease in solar constant", - "experiment_id":"abrupt-solm4p", - "min_number_yrs_per_sim":"150", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "abrupt-solp4p":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Conceptually similar to abrupt 4xCO2 DECK experiment, except that the solar constant rather than CO2 is abruptly increased by 4%", - "end_year":"", - "experiment":"abrupt 4% increase in solar constant", - "experiment_id":"abrupt-solp4p", - "min_number_yrs_per_sim":"150", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "amip":{ - "activity_id":[ - "CMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"DECK: AMIP", - "end_year":"2014", - "experiment":"AMIP", - "experiment_id":"amip", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "amip-4xCO2":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As CMIP5/CFMIP-2 amip4xCO2 experiment. AMIP experiment where SSTs are held at control values and the CO2 seen by the radiation scheme is quadrupled", - "end_year":"2014", - "experiment":"AMIP SSTs with 4xCO2", - "experiment_id":"amip-4xCO2", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "amip-TIP":{ - "activity_id":[ - "GMMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"The topography of the TIP is modified by setting surface elevations to 500m; to understand the combined thermal and mechanical forcing of the TIP. Same model as DECK", - "end_year":"2014", - "experiment":"same as \"amip\" run, but surface elevations of the Tibetan-Iranian Plateau and Himalayas reduced to 500m", - "experiment_id":"amip-TIP", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "amip-TIP-nosh":{ - "activity_id":[ - "GMMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Surface sensible heat released at the elevation above 500m over the TIP is not allowed to heat the atmosphere. Same model as DECK", - "end_year":"2014", - "experiment":"same as \"amip\" run, but sensible heat not allowed for elevations of the Tibetan-Iranian Plateau and Himalayas above 500m", - "experiment_id":"amip-TIP-nosh", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "amip-a4SST-4xCO2":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Same as amip, but a patterned SST anomaly is applied on top of the monthly-varying amip SSTs. This anomaly is a monthly climatology, taken from each model's own abrupt4xCO2 run minus piControl (using the mean of years 111-140 of abrupt4xCO2, and the parallel 30-year section of piControl). CO2 is quadrupled, and the increase in CO2 is seen by both the radiation scheme and vegetation", - "end_year":"2014", - "experiment":"as AMIP but with warming pattern from abrupt4xCO2 added to SSTs and 4xCO2 seen by radiation and vegetation", - "experiment_id":"amip-a4SST-4xCO2", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "amip-future4K":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As CMIP5/CFMIP-2 amipFuture experiment. AMIP experiment where SSTs are subject to a composite SST warming pattern derived from coupled models, scaled to an ice-free ocean mean of 4K", - "end_year":"2014", - "experiment":"AMIP with patterned 4K SST increase", - "experiment_id":"amip-future4K", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "amip-hist":{ - "activity_id":[ - "GMMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Extended AMIP run that covers 1870-2014. All natural and anthropogenic historical forcings as used in CMIP6 Historical Simulation will be included. AGCM resolution as CMIP6 Historical Simulation. The HadISST data will be used", - "end_year":"2014", - "experiment":"AMIP-style simulation covering the period 1870-2014", - "experiment_id":"amip-hist", - "min_number_yrs_per_sim":"144", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1870", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "amip-hld":{ - "activity_id":[ - "GMMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"The topography of the highlands in Africa, N. America and S. America TP is modified by setting surface elevations to a certain height (500m). Same model as DECK", - "end_year":"2014", - "experiment":"same as \"amip\" run, but surface elevations of the East African Highlands in Africa, Sierra Madre in N. America and Andes in S. America reduced to 500m", - "experiment_id":"amip-hld", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "amip-lfmip-pObs":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Land-hist land conditions; AMIP SSTs", - "end_year":"2014", - "experiment":"prescribed land (from pseudo-observations) and AMIP SSTs", - "experiment_id":"amip-lfmip-pObs", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "amip-lfmip-pdLC":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Prescribed land conditions 1980-2014 climate; AMIP SSTs", - "end_year":"2014", - "experiment":"prescribed land (from current climatology) and AMIP SSTs", - "experiment_id":"amip-lfmip-pdLC", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "amip-lfmip-rmLC":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Prescribed land conditions 30yr running mean; AMIP SSTs", - "end_year":"2014", - "experiment":"prescribed land conditions (from running mean climatology) and AMIP SSTs", - "experiment_id":"amip-lfmip-rmLC", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "amip-lwoff":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As amip experiment, but with cloud-radiative effects switched off in the LW radiation code", - "end_year":"2014", - "experiment":"AMIP experiment with longwave cloud-radiative effects off", - "experiment_id":"amip-lwoff", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "amip-m4K":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As amip experiment but SSTs are subject to a uniform cooling of 4K", - "end_year":"2014", - "experiment":"AMIP with uniform 4K SST decrease", - "experiment_id":"amip-m4K", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "amip-p4K":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As CMIP5/CFMIP-2 amip4K experiment. AMIP experiment where SSTs are subject to a uniform warming of 4K", - "end_year":"2014", - "experiment":"AMIP with uniform 4K SST increase", - "experiment_id":"amip-p4K", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "amip-p4K-lwoff":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As amip-p4K experiment, but with cloud-radiative effects switched off in the LW radiation code", - "end_year":"2014", - "experiment":"AMIP experiment with uniform 4K SST increase and with longwave cloud radiative effects off", - "experiment_id":"amip-p4K-lwoff", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "amip-piForcing":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Identical to standard AMIP experiment but from 1870-present with constant pre-industrial forcing levels (anthropogenic and natural)", - "end_year":"2014", - "experiment":"AMIP SSTs with pre-industrial anthropogenic and natural forcing", - "experiment_id":"amip-piForcing", - "min_number_yrs_per_sim":"145", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1870", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "aqua-4xCO2":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Extended version of CMIP5/CFMIP-2 aqua4xCO2 experiment. Aquaplanet experiment where SSTs are held at control values and the CO2 seen by the radiation scheme is quadrupled", - "end_year":"", - "experiment":"aquaplanet with control SST and 4xCO2", - "experiment_id":"aqua-4xCO2", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "aqua-control":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Extended version of CMIP5/CFMIP-2 aquaControl experiment. Aquaplanet (no land) experiment with no seasonal cycle forced with specified zonally symmetric SSTs", - "end_year":"", - "experiment":"aquaplanet control", - "experiment_id":"aqua-control", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "aqua-control-lwoff":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As aqua-control experiment, but with cloud-radiative effects switched off in the LW radiation code", - "end_year":"", - "experiment":"aquaplanet control with longwave cloud radiative effects off", - "experiment_id":"aqua-control-lwoff", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "aqua-p4K":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Extended version of CMIP5/CFMIP-2 aqua4K experiment. Aquaplanet experiment where SSTs are subject to a uniform warming of 4K", - "end_year":"", - "experiment":"aquaplanet with uniform 4K SST increase", - "experiment_id":"aqua-p4K", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "aqua-p4K-lwoff":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As aqua-p4K experiment, but with cloud-radiative effects switched off in the LW radiation code", - "end_year":"", - "experiment":"aquaplanet with uniform 4K SST increase and with longwave cloud radiative effects off", - "experiment_id":"aqua-p4K-lwoff", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "control-1950":{ - "activity_id":[ - "HighResMIP" - ], - "additional_allowed_model_components":[ - "AER" - ], - "description":"Coupled integrations with constant 1950\"s forcing", - "end_year":"", - "experiment":"coupled control with fixed 1950's forcing (HighResMIP equivalent of pre-industrial control)", - "experiment_id":"control-1950", - "min_number_yrs_per_sim":"100", - "parent_activity_id":[ - "HighResMIP" - ], - "parent_experiment_id":[ - "spinup-1950" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "control-slab":{ - "activity_id":[ - "VolMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"slab control run for volc-pinatubo-slab", - "end_year":"", - "experiment":"control with slab ocean", - "experiment_id":"control-slab", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM", - "SLAB" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "dcppA-assim":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"A2.3 Assimilation runs used to generate initial conditions for hindcasts", - "end_year":"2016", - "experiment":"Assimilation run paralleling the historical simulation, which may be used to generate hindcast initial conditions", - "experiment_id":"dcppA-assim", - "min_number_yrs_per_sim":"56", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"before 1961", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "dcppA-hindcast":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"A1 (and A2.1, A3.1, and A3.2) Decadal hindcasts begun near the end of each year from 1960 to 2016, or every other year at minimum. First full hindcast year follows start year (e.g., for s1960, first full hindcast year is 1961)", - "end_year":"5 - 10 years after start year", - "experiment":"hindcast initialized based on observations and using historical forcing", - "experiment_id":"dcppA-hindcast", - "min_number_yrs_per_sim":"5", - "parent_activity_id":[ - "no parent", - "DCPP" - ], - "parent_experiment_id":[ - "no parent", - "dcppA-assim" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"a year in the range 1960-2016", - "sub_experiment_id":[ - "s1960", - "s1961", - "s1962", - "s1963", - "s1964", - "s1965", - "s1966", - "s1967", - "s1968", - "s1969", - "s1970", - "s1971", - "s1972", - "s1973", - "s1974", - "s1975", - "s1976", - "s1977", - "s1978", - "s1979", - "s1980", - "s1981", - "s1982", - "s1983", - "s1984", - "s1985", - "s1986", - "s1987", - "s1988", - "s1989", - "s1990", - "s1991", - "s1992", - "s1993", - "s1994", - "s1995", - "s1996", - "s1997", - "s1998", - "s1999", - "s2000", - "s2001", - "s2002", - "s2003", - "s2004", - "s2005", - "s2006", - "s2007", - "s2008", - "s2009", - "s2010", - "s2011", - "s2012", - "s2013", - "s2014", - "s2015", - "s2016" - ], - "tier":"1" - }, - "dcppA-hindcast-niff":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"A4.1 Decadal hindcasts begun near the end of each year from 1960 to 2016, or every other year at minimum, but with no information from the future. First full hindcast year follows start year (e.g., for s1960, first full hindcast year is 1961)", - "end_year":"5 - 10 years after start year", - "experiment":"hindcast initialized based on observations but without using knowledge of subsequent historical forcing", - "experiment_id":"dcppA-hindcast-niff", - "min_number_yrs_per_sim":"5", - "parent_activity_id":[ - "no parent", - "DCPP" - ], - "parent_experiment_id":[ - "no parent", - "dcppA-assim" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"a year in the range 1960-2016", - "sub_experiment_id":[ - "s1960", - "s1961", - "s1962", - "s1963", - "s1964", - "s1965", - "s1966", - "s1967", - "s1968", - "s1969", - "s1970", - "s1971", - "s1972", - "s1973", - "s1974", - "s1975", - "s1976", - "s1977", - "s1978", - "s1979", - "s1980", - "s1981", - "s1982", - "s1983", - "s1984", - "s1985", - "s1986", - "s1987", - "s1988", - "s1989", - "s1990", - "s1991", - "s1992", - "s1993", - "s1994", - "s1995", - "s1996", - "s1997", - "s1998", - "s1999", - "s2000", - "s2001", - "s2002", - "s2003", - "s2004", - "s2005", - "s2006", - "s2007", - "s2008", - "s2009", - "s2010", - "s2011", - "s2012", - "s2013", - "s2014", - "s2015", - "s2016" - ], - "tier":"4" - }, - "dcppA-historical-niff":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"A4.2 Hindcasts initialized from historical climate simulations as in DCPP-A2.2, but with no information from the future. First full hindcast year follows start year (e.g., for s1960, first full hindcast year is 1961)", - "end_year":"5 - 10 years after start year", - "experiment":"hindcast initialized from historical climate simulation but without using knowledge of subsequent historical forcing", - "experiment_id":"dcppA-historical-niff", - "min_number_yrs_per_sim":"5", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"A year in the range 1960-2016", - "sub_experiment_id":[ - "s1960", - "s1961", - "s1962", - "s1963", - "s1964", - "s1965", - "s1966", - "s1967", - "s1968", - "s1969", - "s1970", - "s1971", - "s1972", - "s1973", - "s1974", - "s1975", - "s1976", - "s1977", - "s1978", - "s1979", - "s1980", - "s1981", - "s1982", - "s1983", - "s1984", - "s1985", - "s1986", - "s1987", - "s1988", - "s1989", - "s1990", - "s1991", - "s1992", - "s1993", - "s1994", - "s1995", - "s1996", - "s1997", - "s1998", - "s1999", - "s2000", - "s2001", - "s2002", - "s2003", - "s2004", - "s2005", - "s2006", - "s2007", - "s2008", - "s2009", - "s2010", - "s2011", - "s2012", - "s2013", - "s2014", - "s2015", - "s2016" - ], - "tier":"4" - }, - "dcppB-forecast":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"B1 (and B2.1, B2.2) Ongoing decadal forecasts. First full forecast year follows start year (e.g., for s2017, first full forecast year is 2018)", - "end_year":"5 years after start year", - "experiment":"forecast initialized from observations with forcing from ssp245", - "experiment_id":"dcppB-forecast", - "min_number_yrs_per_sim":"5", - "parent_activity_id":[ - "no parent", - "DCPP" - ], - "parent_experiment_id":[ - "no parent", - "dcppA-assim" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"a year in the range 2017-2029", - "sub_experiment_id":[ - "s2017", - "s2018", - "s2019", - "s2020", - "s2021", - "s2022", - "s2023", - "s2024", - "s2025", - "s2026", - "s2027", - "s2028", - "s2029" - ], - "tier":"1" - }, - "dcppC-amv-ExTrop-neg":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C1.7 Mechanisms and predictability of the hiatus and of similar long timescale variations of both signs", - "end_year":"", - "experiment":"Idealized climate impact of negative extratropical AMV anomaly pattern", - "experiment_id":"dcppC-amv-ExTrop-neg", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "dcppC-amv-ExTrop-pos":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C1.7Mechanisms and predictability of the hiatus and of similar long timescale variations of both signs", - "end_year":"", - "experiment":"Idealized climate impact of positive extratropical AMV anomaly pattern", - "experiment_id":"dcppC-amv-ExTrop-pos", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "dcppC-amv-Trop-neg":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C1.8 Mechanisms and predictability of the hiatus and of similar long timescale variations of both signs", - "end_year":"", - "experiment":"Idealized climate impact of negative tropical AMV anomaly pattern", - "experiment_id":"dcppC-amv-Trop-neg", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "dcppC-amv-Trop-pos":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C1.8 Mechanisms and predictability of the hiatus and of similar long timescale variations of both signs", - "end_year":"", - "experiment":"idealized positive tropical AMV anomaly pattern", - "experiment_id":"dcppC-amv-Trop-pos", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "dcppC-amv-neg":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C1.3 Mechanisms and predictability of the hiatus and of similar long timescale variations of both signs", - "end_year":"", - "experiment":"Idealized climate impact of negative AMV anomaly pattern", - "experiment_id":"dcppC-amv-neg", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "dcppC-amv-pos":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C1.2 Mechanisms and predictability of the hiatus and of similar long timescale variations of both signs", - "end_year":"", - "experiment":"Idealized climate impact of positive AMV anomaly pattern", - "experiment_id":"dcppC-amv-pos", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "dcppC-atl-control":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C1.1 Mechanisms and predictability of the hiatus and of similar long timescale variations of both signs", - "end_year":"", - "experiment":"Idealized Atlantic control", - "experiment_id":"dcppC-atl-control", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "dcppC-atl-pacemaker":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C1.11 Mechanisms and predictability of the hiatus and of similar long timescale variations of both signs", - "end_year":"2014", - "experiment":"pacemaker Atlantic experiment", - "experiment_id":"dcppC-atl-pacemaker", - "min_number_yrs_per_sim":"65", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1910 or 1950", - "sub_experiment_id":[ - "s1910", - "s1950" - ], - "tier":"3" - }, - "dcppC-atl-spg":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C2.1 (and C2.2) Mechanisms and predictability of the hiatus and of similar long timescale variations of both signs. First full hindcast year follows start year (e.g., for s1992, first full hindcast year is 1993)", - "end_year":"5 - 10 years after start year", - "experiment":"predictability of 1990s warming of Atlantic sub-polar gyre", - "experiment_id":"dcppC-atl-spg", - "min_number_yrs_per_sim":"5", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"A year in the range 1992-1999", - "sub_experiment_id":[ - "s1992", - "s1993", - "s1994", - "s1995", - "s1996", - "s1997", - "s1998", - "s1999" - ], - "tier":"3" - }, - "dcppC-forecast-addAgung":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C3.4 Effects of volcanoes on decadal prediction and predictability of forced and internal variability components. First full hindcast year is 2015", - "end_year":"5 - 10 years after start year", - "experiment":"2015 forecast with added Agung forcing", - "experiment_id":"dcppC-forecast-addAgung", - "min_number_yrs_per_sim":"5", - "parent_activity_id":[ - "no parent", - "DCPP" - ], - "parent_experiment_id":[ - "no parent", - "dcppA-assim" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2014", - "sub_experiment_id":[ - "s2014" - ], - "tier":"3" - }, - "dcppC-forecast-addElChichon":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C3.5 Effects of volcanoes on decadal prediction and predictability of forced and internal variability components. First full hindcast year is 2015", - "end_year":"5 - 10 years after start year", - "experiment":"2015 forecast with added El Chichon forcing", - "experiment_id":"dcppC-forecast-addElChichon", - "min_number_yrs_per_sim":"5", - "parent_activity_id":[ - "no parent", - "DCPP" - ], - "parent_experiment_id":[ - "no parent", - "dcppA-assim" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2014", - "sub_experiment_id":[ - "s2014" - ], - "tier":"3" - }, - "dcppC-forecast-addPinatubo":{ - "activity_id":[ - "DCPP", - "VolMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C3.6 Effects of volcanoes on decadal prediction and predictability of forced and internal variability components. First full hindcast year is 2015", - "end_year":"5 - 10 years after start year", - "experiment":"2015 forecast with added Pinatubo forcing", - "experiment_id":"dcppC-forecast-addPinatubo", - "min_number_yrs_per_sim":"5", - "parent_activity_id":[ - "no parent", - "DCPP" - ], - "parent_experiment_id":[ - "no parent", - "dcppA-assim" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2014", - "sub_experiment_id":[ - "s2014" - ], - "tier":"1" - }, - "dcppC-hindcast-noAgung":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C3.3 Effects of volcanoes on decadal prediction and predictability of forced and internal variability components. First full hindcast year is 1962", - "end_year":"5 - 10 years after start year", - "experiment":"hindcast but with only background volcanic forcing to be the same as that used in the 2015 forecast", - "experiment_id":"dcppC-hindcast-noAgung", - "min_number_yrs_per_sim":"5", - "parent_activity_id":[ - "no parent", - "DCPP" - ], - "parent_experiment_id":[ - "no parent", - "dcppA-assim" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1962", - "sub_experiment_id":[ - "s1962" - ], - "tier":"2" - }, - "dcppC-hindcast-noElChichon":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C3.2 Effects of volcanoes on decadal prediction and predictability of forced and internal variability components. First full hindcast year is 1982", - "end_year":"5 - 10 years after start year", - "experiment":"hindcast but with only background volcanic forcing to be the same as that used in the 2015 forecast", - "experiment_id":"dcppC-hindcast-noElChichon", - "min_number_yrs_per_sim":"5", - "parent_activity_id":[ - "no parent", - "DCPP" - ], - "parent_experiment_id":[ - "no parent", - "dcppA-assim" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1981", - "sub_experiment_id":[ - "s1981" - ], - "tier":"2" - }, - "dcppC-hindcast-noPinatubo":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C3.1 Effects of volcanoes on decadal prediction and predictability of forced and internal variability components. First full hindcast year is 1991", - "end_year":"5 - 10 years after start year", - "experiment":"hindcast but with only background volcanic forcing to be the same as that used in the 2015 forecast", - "experiment_id":"dcppC-hindcast-noPinatubo", - "min_number_yrs_per_sim":"5", - "parent_activity_id":[ - "no parent", - "DCPP" - ], - "parent_experiment_id":[ - "no parent", - "dcppA-assim" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1990", - "sub_experiment_id":[ - "s1990" - ], - "tier":"1" - }, - "dcppC-ipv-NexTrop-neg":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C1.9 and predictability of the hiatus and of similar long timescale variations of both signs", - "end_year":"", - "experiment":"idealized negative northern extratropical IPV anomaly pattern", - "experiment_id":"dcppC-ipv-NexTrop-neg", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "dcppC-ipv-NexTrop-pos":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C1.9 Mechanisms and predictability of the hiatus and of similar long timescale variations of both signs", - "end_year":"", - "experiment":"idealized positive northern extratropical IPV anomaly pattern", - "experiment_id":"dcppC-ipv-NexTrop-pos", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "dcppC-ipv-neg":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C1.6 Mechanisms and predictability of the hiatus and of similar long timescale variations of both signs", - "end_year":"", - "experiment":"idealized negative IPV anomaly pattern", - "experiment_id":"dcppC-ipv-neg", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "dcppC-ipv-pos":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C1.5 Mechanisms and predictability of the hiatus and of similar long timescale variations of both signs", - "end_year":"", - "experiment":"idealized positive IPV anomaly pattern", - "experiment_id":"dcppC-ipv-pos", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "dcppC-pac-control":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C1.4 Mechanisms and predictability of the hiatus and of similar long timescale variations of both signs", - "end_year":"", - "experiment":"idealized Pacific control", - "experiment_id":"dcppC-pac-control", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "dcppC-pac-pacemaker":{ - "activity_id":[ - "DCPP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"C1.10 Mechanisms and predictability of the hiatus and of similar long timescale variations of both signs. First full hindcast year is 2015", - "end_year":"2014", - "experiment":"pacemaker Pacific experiment", - "experiment_id":"dcppC-pac-pacemaker", - "min_number_yrs_per_sim":"65", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"either 1910 or 1950", - "sub_experiment_id":[ - "s1910", - "s1950" - ], - "tier":"3" - }, - "deforest-globe":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Idealized deforestation experiment, 20 million km2 forest removed linearly over a period of 50 years, with an additional 30 years with no specified change in forest cover; all other forcings held constant", - "end_year":"", - "experiment":"idealized transient global deforestation", - "experiment_id":"deforest-globe", - "min_number_yrs_per_sim":"81", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "esm-hist":{ - "activity_id":[ - "CMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"CMIP6 historical (CO2 emission-driven)", - "end_year":"2014", - "experiment":"all-forcing simulation of the recent past with atmospheric CO2 concentration calculated", - "experiment_id":"esm-hist", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "esm-piControl" - ], - "required_model_components":[ - "AOGCM", - "BGC" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "esm-hist-ext":{ - "activity_id":[ - "CMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Extension beyond 2014 of the CMIP6 historical (CO2 emission-driven)", - "end_year":"", - "experiment":"post-2014 all-forcing simulation with atmospheric CO2 concentration calculated", - "experiment_id":"esm-hist-ext", - "min_number_yrs_per_sim":"1", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "esm-hist" - ], - "required_model_components":[ - "AOGCM", - "BGC" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "esm-piControl":{ - "activity_id":[ - "CMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"DECK: control (emission-driven)", - "end_year":"", - "experiment":"pre-industrial control simulation with CO2 concentration calculated", - "experiment_id":"esm-piControl", - "min_number_yrs_per_sim":"500", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "esm-piControl-spinup" - ], - "required_model_components":[ - "AOGCM", - "BGC" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "esm-piControl-spinup":{ - "activity_id":[ - "CMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"DECK: spin-up portion of the control (emission-driven)", - "end_year":"", - "experiment":"pre-industrial control simulation with CO2 concentration calculated (spin-up)", - "experiment_id":"esm-piControl-spinup", - "min_number_yrs_per_sim":"100", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AOGCM", - "BGC" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "esm-ssp585":{ - "activity_id":[ - "C4MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Emissions-driven future scenario simulation", - "end_year":"2100", - "experiment":"emission-driven RCP8.5 based on SSP5", - "experiment_id":"esm-ssp585", - "min_number_yrs_per_sim":"85", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "esm-hist" - ], - "required_model_components":[ - "AOGCM", - "BGC" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "esm-ssp585-ssp126Lu":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Additional land use policy sensitivity simulation for high radiative forcing scenario, keep all forcings the same as in C4MIP esmssp5-8.5 scenario except use SSP1-2.6 land use; emission driven", - "end_year":"2100", - "experiment":"emissions-driven SSP5-8.5 with SSP1-2.6 land use", - "experiment_id":"esm-ssp585-ssp126Lu", - "min_number_yrs_per_sim":"86", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "esm-hist" - ], - "required_model_components":[ - "AOGCM", - "BGC" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "faf-all":{ - "activity_id":[ - "FAFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"1xCO2 experiment, parallel to piControl, forced over the ocean simultaneously by surface windstress (as in the wind experiment), net heat flux (as in the heat experiment) and net freshwater flux (as in the water experiment) anomalies obtained from the CMIP5 ensemble mean of 1pctCO2 experiments at the time of 2xCO2, using a passive tracer to prevent negative climate feedback on the heat flux applied", - "end_year":"", - "experiment":"control plus perturbative surface fluxes of momentum, heat and water into ocean", - "experiment_id":"faf-all", - "min_number_yrs_per_sim":"70", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "faf-heat":{ - "activity_id":[ - "FAFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"1xCO2 experiment, parallel to piControl, forced over the ocean by surface net heat flux anomalies obtained from the CMIP5 ensemble mean of 1pctCO2 experiments at the time of 2xCO2, using a passive tracer to prevent negative climate feedback on the heat flux applied", - "end_year":"", - "experiment":"control plus perturbative surface flux of heat into ocean", - "experiment_id":"faf-heat", - "min_number_yrs_per_sim":"70", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "faf-passiveheat":{ - "activity_id":[ - "FAFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"1xCO2 experiment, parallel to piControl, with a flux of passive tracer added at the ocean surface at the same rate as the surface net heat flux anomaly applied in the FAFMIP heat experiment", - "end_year":"", - "experiment":"control plus surface flux of passive heat tracer into ocean", - "experiment_id":"faf-passiveheat", - "min_number_yrs_per_sim":"70", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "faf-stress":{ - "activity_id":[ - "FAFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"1xCO2 experiment, parallel to piControl, forced over the ocean by surface windstress anomalies obtained from the CMIP5 ensemble mean of 1pctCO2 experiments at the time of 2xCO2", - "end_year":"", - "experiment":"control plus perturbative surface flux of momentum into ocean", - "experiment_id":"faf-stress", - "min_number_yrs_per_sim":"70", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "faf-water":{ - "activity_id":[ - "FAFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"1xCO2 experiment, parallel to piControl, forced over the ocean by surface net freshwater flux anomalies obtained from the CMIP5 ensemble mean of 1pctCO2 experiments at the time of 2xCO2", - "end_year":"", - "experiment":"control plus perturbative surface flux of water into ocean", - "experiment_id":"faf-water", - "min_number_yrs_per_sim":"70", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "futureSST-4xCO2-solar":{ - "activity_id":[ - "GeoMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Time slice at year 100 of G1ext to examine radiative forcing of abrupt4xCO2 and G1", - "end_year":"", - "experiment":"year 100 SSTs from abrupt4xCO2 with quadrupled CO2 and solar reduction", - "experiment_id":"futureSST-4xCO2-solar", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "GeoMIP" - ], - "parent_experiment_id":[ - "G1" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "highres-future":{ - "activity_id":[ - "HighResMIP" - ], - "additional_allowed_model_components":[ - "AER" - ], - "description":"Coupled integrations with SSP5 forcing (nearest to CMIP5 RCP8.5 (as in highresSST-future)", - "end_year":"2050", - "experiment":"coupled future 2015-2050 using a scenario as close to CMIP5 RCP8.5 as possible within CMIP6", - "experiment_id":"highres-future", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "HighResMIP" - ], - "parent_experiment_id":[ - "hist-1950" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "highresSST-4co2":{ - "activity_id":[ - "HighResMIP" - ], - "additional_allowed_model_components":[ - "AER" - ], - "description":"Similar to CFMIP amip-4xCO2, SSTs are held at highresSST-present values and the CO2 seen by the radiation scheme is quadrupled", - "end_year":"2014", - "experiment":"highresSST-present SST with 4xCO2 concentrations", - "experiment_id":"highresSST-4co2", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "HighResMIP" - ], - "parent_experiment_id":[ - "highresSST-present" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "highresSST-LAI":{ - "activity_id":[ - "HighResMIP" - ], - "additional_allowed_model_components":[ - "AER" - ], - "description":"Forced global atmosphere-land simulations as highresSST-present, but using an common LAI dataset across models", - "end_year":"2014", - "experiment":"common LAI dataset within the highresSST-present experiment", - "experiment_id":"highresSST-LAI", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "HighResMIP" - ], - "parent_experiment_id":[ - "highresSST-present" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "highresSST-future":{ - "activity_id":[ - "HighResMIP" - ], - "additional_allowed_model_components":[ - "AER" - ], - "description":"Extend highresSST-present to 2050 with agreed SSP5/RCP8.5 forcings (with option to extend further to 2100)", - "end_year":"2050", - "experiment":"forced atmosphere experiment for 2015-2050 using SST/sea-ice derived from CMIP5 RCP8.5 simulations and a scenario as close to RCP8.5 as possible within CMIP6", - "experiment_id":"highresSST-future", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "HighResMIP" - ], - "parent_experiment_id":[ - "highresSST-present" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "highresSST-p4K":{ - "activity_id":[ - "HighResMIP" - ], - "additional_allowed_model_components":[ - "AER" - ], - "description":"Similar to CFMIP amip-p4K, add a uniform warming of 4K to highresSST-present SSTs and run the experiment parallel to highresSST-present", - "end_year":"2014", - "experiment":"uniform 4K warming of highresSST-present SST", - "experiment_id":"highresSST-p4K", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "HighResMIP" - ], - "parent_experiment_id":[ - "highresSST-present" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "highresSST-present":{ - "activity_id":[ - "HighResMIP" - ], - "additional_allowed_model_components":[ - "AER" - ], - "description":"Forced global atmosphere-land simulations using daily 1/4 degree SST and sea-ice forcings, and aerosol optical properties (not emissions) to constrain model spread", - "end_year":"2014", - "experiment":"forced atmosphere experiment for 1950-2014", - "experiment_id":"highresSST-present", - "min_number_yrs_per_sim":"65", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1950", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "highresSST-smoothed":{ - "activity_id":[ - "HighResMIP" - ], - "additional_allowed_model_components":[ - "AER" - ], - "description":"Forced global atmosphere-land simulations as highresSST-present, but using smoothed SST to investigate impact of SST variability", - "end_year":"2014", - "experiment":"smoothed SST version of highresSST-present", - "experiment_id":"highresSST-smoothed", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "HighResMIP" - ], - "parent_experiment_id":[ - "highresSST-present" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "hist-1950":{ - "activity_id":[ - "HighResMIP" - ], - "additional_allowed_model_components":[ - "AER" - ], - "description":"Coupled integrationswith historic external forcings (as in highresSST-present)", - "end_year":"2014", - "experiment":"coupled historical 1950-2014", - "experiment_id":"hist-1950", - "min_number_yrs_per_sim":"65", - "parent_activity_id":[ - "HighResMIP" - ], - "parent_experiment_id":[ - "spinup-1950" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1950", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "hist-1950HC":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "BGC" - ], - "description":"Historical WMGHG concentrations and NTCF emissions, 1950 halocarbon concentrations, start 1950", - "end_year":"2014", - "experiment":"historical forcing, but with1950s halocarbon concentrations; initialized in 1950", - "experiment_id":"hist-1950HC", - "min_number_yrs_per_sim":"65", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM", - "AER", - "CHEM" - ], - "start_year":"1950", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "hist-CO2":{ - "activity_id":[ - "DAMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Historical CO2-only run", - "end_year":"2020", - "experiment":"historical CO2-only run", - "experiment_id":"hist-CO2", - "min_number_yrs_per_sim":"171", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "hist-GHG":{ - "activity_id":[ - "DAMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Historical well-mixed GHG-only run. Models with interactive chemistry schemes should either turn off the chemistry or use a preindustrial climatology of stratospheric and tropospheric ozone in their radiation schemes. This will ensure that ozone is fixed in all these simulations, and simulated responses in models with and without coupled chemistry are comparable", - "end_year":"2020", - "experiment":"historical well-mixed GHG-only run", - "experiment_id":"hist-GHG", - "min_number_yrs_per_sim":"171", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "hist-aer":{ - "activity_id":[ - "DAMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Historical anthropogenic-Aerosols-only run", - "end_year":"2020", - "experiment":"historical anthropogenic aerosols-only run", - "experiment_id":"hist-aer", - "min_number_yrs_per_sim":"171", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "hist-all-aer2":{ - "activity_id":[ - "DAMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Historical ALL forcing run with alternate estimates of aerosol concentrations/emissions", - "end_year":"2020", - "experiment":"historical ALL-forcing run with alternate estimates of aerosol forcing", - "experiment_id":"hist-all-aer2", - "min_number_yrs_per_sim":"171", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "hist-all-nat2":{ - "activity_id":[ - "DAMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Historical ALL forcing run with alternates estimate of solar and volcanic forcing", - "end_year":"2020", - "experiment":"historical ALL-forcing run with alternate estimates of natural forcing", - "experiment_id":"hist-all-nat2", - "min_number_yrs_per_sim":"171", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "hist-bgc":{ - "activity_id":[ - "C4MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Concentration-driven historical simulation, biogeochemically-coupled", - "end_year":"2014", - "experiment":"biogeochemically-coupled version of the simulation of the recent past with CO2 concentration prescribed", - "experiment_id":"hist-bgc", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM", - "BGC" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "hist-nat":{ - "activity_id":[ - "DAMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Historical natural-only run", - "end_year":"2020", - "experiment":"historical natural-only run", - "experiment_id":"hist-nat", - "min_number_yrs_per_sim":"171", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "hist-noLu":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Same as CMIP6 historical but with land cover held at 1850, no human activity; concentration driven", - "end_year":"2014", - "experiment":"historical with no land-use change", - "experiment_id":"hist-noLu", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "hist-piAer":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM", - "BGC" - ], - "description":"Historical WMGHG, halocarbon concentrations and O3 precursor emissions, 1850 aerosol precursor emissions", - "end_year":"2014", - "experiment":"historical forcing, but with pre-industrial aerosol emissions", - "experiment_id":"hist-piAer", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM", - "AER" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "hist-piNTCF":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM", - "BGC" - ], - "description":"Historical WMGHG and halocarbons concentrations, 1850 NTCF emissions", - "end_year":"2014", - "experiment":"historical forcing, but with pre-industrial NTCF emissions", - "experiment_id":"hist-piNTCF", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM", - "AER" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "hist-resAMO":{ - "activity_id":[ - "GMMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Pacemaker 20th century historical run that includes all forcing as used in CMIP6 Historical Simulation, and the observational historical SST is restored in the AMO domain (0deg-70degN, 70degW-0deg)", - "end_year":"2014", - "experiment":"initialized from \"historical\" run year 1870 and SSTs in the AMO domain (0deg-70degN, 70degW-0deg) restored to AMIP SSTs with historical forcings", - "experiment_id":"hist-resAMO", - "min_number_yrs_per_sim":"145", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1870", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "hist-resIPO":{ - "activity_id":[ - "GMMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Pacemaker 20th century historical run that includes all forcing as used in CMIP6 Historical Simulation, and the observational historical SST is restored in the tropical lobe of the IPO domain (20degS-20degN, 175degE-75degW)", - "end_year":"2014", - "experiment":"initialized from \"historical\" run year 1870 and SSTs in tropical lobe of the IPO domain (20degS-20degN, 175degE-75degW) restored to AMIP SSTs with historical forcings", - "experiment_id":"hist-resIPO", - "min_number_yrs_per_sim":"145", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1870", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "hist-sol":{ - "activity_id":[ - "DAMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Historical solar-only transient simulation using settings from CMIP6 historical simulation but fixed GHG and ODS (1850 level)", - "end_year":"2020", - "experiment":"historical solar-only run", - "experiment_id":"hist-sol", - "min_number_yrs_per_sim":"171", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "hist-spAer-aer":{ - "activity_id":[ - "RFMIP" - ], - "additional_allowed_model_components":"", - "description":"Prescribed anthropogenic aerosol optical properties. Changes in aerosols only", - "end_year":"2014", - "experiment":"historical simulation with specified anthropogenic aerosols, no other forcings", - "experiment_id":"hist-spAer-aer", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "hist-spAer-all":{ - "activity_id":[ - "RFMIP" - ], - "additional_allowed_model_components":"", - "description":"Prescribed anthropogenic aerosol optical properties. All forcings", - "end_year":"2014", - "experiment":"historical simulation with specified anthropogenic aerosols", - "experiment_id":"hist-spAer-all", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "hist-stratO3":{ - "activity_id":[ - "DAMIP" - ], - "additional_allowed_model_components":[ - "AER", - "BGC" - ], - "description":"Historical stratospheric-ozone-only. In models with coupled chemistry, the chemistry scheme should be turned off, and the simulated ensemble mean monthly mean 3D stratospheric ozone concentrations from the CMIP6 historical simulations should be prescribed. Tropospheric ozone should be fixed at 3D long-term monthly mean piControl values, with a value of 100 ppbv ozone concentration in this piControl climatology used to separate the troposphere from the stratosphere. In models without coupled chemistry the same stratospheric ozone prescribed in the CMIP6 historical simulations should be prescribed. Stratospheric ozone concentrations will be provided by CCMI", - "end_year":"2020", - "experiment":"historical stratospheric-ozone-only run", - "experiment_id":"hist-stratO3", - "min_number_yrs_per_sim":"171", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "hist-volc":{ - "activity_id":[ - "DAMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Historical volcanic-only run", - "end_year":"2020", - "experiment":"historical volcanic-only run", - "experiment_id":"hist-volc", - "min_number_yrs_per_sim":"171", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "histSST":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM" - ], - "description":"Historical transient with SSTs prescribed from historical", - "end_year":"2014", - "experiment":"historical prescribed SSTs and historical forcing", - "experiment_id":"histSST", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "histSST-1950HC":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":"", - "description":"Historical WMGHG concentrations and NTCF emissions, 1950 halocarbon concentrations", - "end_year":"2014", - "experiment":"historical SSTs and historical forcing, but with1950 halocarbon concentrations", - "experiment_id":"histSST-1950HC", - "min_number_yrs_per_sim":"65", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM", - "AER", - "CHEM" - ], - "start_year":"1950", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "histSST-piAer":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM" - ], - "description":"Historical WMGHG, halocarbon concentrations and tropospheric ozone precursors emissions, 1850 aerosol precursor emissions, prescribed SSTs", - "end_year":"2014", - "experiment":"historical SSTs and historical forcing, but with pre-industrial aerosol emissions", - "experiment_id":"histSST-piAer", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "histSST-piCH4":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":"", - "description":"Historical (non-CH4) WMGHG concentrations and NTCF emissions, 1850 CH4 concentrations", - "end_year":"2014", - "experiment":"historical SSTs and historical forcing, but with pre-industrial methane concentrations", - "experiment_id":"histSST-piCH4", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM", - "AER", - "CHEM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "histSST-piN2O":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "BGC" - ], - "description":"Historical (non-N2O) WMGHG concentrations and NTCF emissions, 1850 N2O concentrations", - "end_year":"2014", - "experiment":"historical SSTs and historical forcings, but with pre-industrial N2O concentrations", - "experiment_id":"histSST-piN2O", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM", - "AER", - "CHEM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "histSST-piNTCF":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM", - "BGC" - ], - "description":"Historical WMGHG concentrations and halocarbons emissions, 1850 NTCF emissions, prescribed SSTs", - "end_year":"2014", - "experiment":"historical SSTs and historical forcing, but with pre-industrial NTCF emissions", - "experiment_id":"histSST-piNTCF", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "histSST-piO3":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":"", - "description":"Historical WMGHG, halocarbon concentrations and aerosol precursor emissions, 1850 tropospheric ozone precursors emissions, prescribed SSTs", - "end_year":"2014", - "experiment":"historical SSTs and historical forcing, but with pre-industrial ozone precursor emissions", - "experiment_id":"histSST-piO3", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AGCM", - "AER", - "CHEM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "historical":{ - "activity_id":[ - "CMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"CMIP6 historical", - "end_year":"2014", - "experiment":"all-forcing simulation of the recent past", - "experiment_id":"historical", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "historical-ext":{ - "activity_id":[ - "CMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Extension beyond 2014 of the CMIP6 historical", - "end_year":"present", - "experiment":"post-2014 all-forcing simulation", - "experiment_id":"historical-ext", - "min_number_yrs_per_sim":"1", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "historical-withism":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Historical simulation that includes interactive ice sheets. Set up follows the historical experiment", - "end_year":"2014", - "experiment":"historical with interactive ice sheet", - "experiment_id":"historical-withism", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "ISMIP6" - ], - "parent_experiment_id":[ - "piControl-withism" - ], - "required_model_components":[ - "AOGCM", - "ISM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "ism-1pctCO2to4x-self":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":"", - "description":"Idealized 1%/yr CO2 increase to 4xC02 over 140yrs and kept constant at 4xCO2 for an additional 200 to 400 yrs simulation with ice sheets forced \"offline\" with DECK 1pctCO2 using forcing from its own AOGCM", - "end_year":"", - "experiment":"offline ice sheet model forced by ISM's own AOGCM 1pctCO2to4x output", - "experiment_id":"ism-1pctCO2to4x-self", - "min_number_yrs_per_sim":"350", - "parent_activity_id":[ - "ISMIP6" - ], - "parent_experiment_id":[ - "ism-piControl-self" - ], - "required_model_components":[ - "ISM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ism-1pctCO2to4x-std":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":"", - "description":"Idealized 1%/yr CO2 increase to 4xC02 over 140yrs and kept constant at 4xCO2 for an additional 200 to 400 yrs simulation with ice sheets forced \"offline\" with DECK 1pctCO2 using a standard forcing", - "end_year":"", - "experiment":"offline ice sheet model forced by ISMIP6-specified AOGCM 1pctCO2to4x output", - "experiment_id":"ism-1pctCO2to4x-std", - "min_number_yrs_per_sim":"350", - "parent_activity_id":[ - "ISMIP6" - ], - "parent_experiment_id":[ - "ism-pdControl-std" - ], - "required_model_components":[ - "ISM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ism-amip-std":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":"", - "description":"Offline ice sheet evolution for the last few decades forced by amip", - "end_year":"2014", - "experiment":"offline ice sheet forced by ISMIP6-specified AGCM AMIP output", - "experiment_id":"ism-amip-std", - "min_number_yrs_per_sim":"36", - "parent_activity_id":[ - "ISMIP6" - ], - "parent_experiment_id":[ - "ism-ctrl-std" - ], - "required_model_components":[ - "ISM" - ], - "start_year":"1979", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "ism-asmb-std":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":"", - "description":"Offline ice sheet simulation with synthetic atmospheric dataset to explore the uncertainty in sea level due to ice sheet initialization", - "end_year":"", - "experiment":"offline ice sheet forced by initMIP synthetic atmospheric experiment", - "experiment_id":"ism-asmb-std", - "min_number_yrs_per_sim":"100", - "parent_activity_id":[ - "ISMIP6" - ], - "parent_experiment_id":[ - "ism-ctrl-std" - ], - "required_model_components":[ - "ISM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ism-bsmb-std":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":"", - "description":"Offline ice sheet simulation with synthetic oceanic dataset to explore the uncertainty in sea level due to ice sheet initialization", - "end_year":"", - "experiment":"offline ice sheet forced by initMIP synthetic oceanic experiment", - "experiment_id":"ism-bsmb-std", - "min_number_yrs_per_sim":"100", - "parent_activity_id":[ - "ISMIP6" - ], - "parent_experiment_id":[ - "ism-ctrl-std" - ], - "required_model_components":[ - "ISM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ism-ctrl-std":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":"", - "description":"Offline ice sheet control run for the initMIP experiment that explores the uncertainty in sea level due to ice sheet initialization", - "end_year":"", - "experiment":"offline ice sheet model initMIP control", - "experiment_id":"ism-ctrl-std", - "min_number_yrs_per_sim":"100", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "ISM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ism-historical-self":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":"", - "description":"Historical simulation using \"offline\" ice sheet models. Forcing for ice sheet model is from its own AOGCM", - "end_year":"2014", - "experiment":"offline ice sheet forced by ISM's own AOGCM historical output", - "experiment_id":"ism-historical-self", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "ISMIP6" - ], - "parent_experiment_id":[ - "ism-piControl-self" - ], - "required_model_components":[ - "ISM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "ism-historical-std":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":"", - "description":"Historical simulation using \"offline\" ice sheet models. Forcing for ice sheet model is the standard dataset based on CMIP6 AOGCM historical", - "end_year":"2014", - "experiment":"offline ice sheet forced by ISMIP6-specified AOGCM historical output", - "experiment_id":"ism-historical-std", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "ISMIP6" - ], - "parent_experiment_id":[ - "ism-pdControl-std" - ], - "required_model_components":[ - "ISM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "ism-lig127k-std":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":"", - "description":"Last interglacial simulation of ice sheet evolution driven by PMIP lig127k", - "end_year":"", - "experiment":"offline ice sheet forced by ISMIP6-specified AGCM last interglacial output", - "experiment_id":"ism-lig127k-std", - "min_number_yrs_per_sim":"20000", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "ISM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "ism-pdControl-std":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":"", - "description":"Present-day control simulation for \"offline\" ice sheets", - "end_year":"", - "experiment":"offline ice sheet forced by ISMIP6-specified AOGCM pdControl output", - "experiment_id":"ism-pdControl-std", - "min_number_yrs_per_sim":"100", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "ISM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ism-piControl-self":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":"", - "description":"Pre-industrial control simulation for \"offline\" ice sheets", - "end_year":"", - "experiment":"offline ice sheet forced by ISM's own AOGCM piControl output", - "experiment_id":"ism-piControl-self", - "min_number_yrs_per_sim":"500", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "ISM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ism-ssp585-self":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":"", - "description":"Future climate ScenarioMIP SSP5-8.5 simulation using \"offline\" ice sheet models. Forcing for ice sheet model is from its own AOGCM", - "end_year":"2300", - "experiment":"offline ice sheet forced by ISM's own AOGCM ssp585 output", - "experiment_id":"ism-ssp585-self", - "min_number_yrs_per_sim":"85", - "parent_activity_id":[ - "ISMIP6" - ], - "parent_experiment_id":[ - "ism-historical-self" - ], - "required_model_components":[ - "ISM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "ism-ssp585-std":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":"", - "description":"Future climate ScenarioMIP SSP5-8.5 simulation using \"offline\" ice sheet models. Forcing for ice sheet model is the standard dataset based on ScenarioMIP ssp585", - "end_year":"2300", - "experiment":"offline ice sheet forced by ISMIP6-specified AOGCM ssp585 output", - "experiment_id":"ism-ssp585-std", - "min_number_yrs_per_sim":"85", - "parent_activity_id":[ - "ISMIP6" - ], - "parent_experiment_id":[ - "ism-historical-std" - ], - "required_model_components":[ - "ISM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-cCO2":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":"", - "description":"Same as land-hist except with CO2 held constant", - "end_year":"2014", - "experiment":"historical land-only constant CO2", - "experiment_id":"land-cCO2", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850 or 1700", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-cClim":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":"", - "description":"Same as land-hist except with climate held constant", - "end_year":"2014", - "experiment":"historical land-only constant climate", - "experiment_id":"land-cClim", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850 or 1700", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-crop-grass":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":"", - "description":"Same as land-hist but with all new crop and pastureland treated as unmanaged grassland", - "end_year":"2014", - "experiment":"historical land-only with cropland as natural grassland", - "experiment_id":"land-crop-grass", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850 or 1700", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-crop-noFert":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":"", - "description":"Same as land-hist but with fertilization rates and area held at 1850 levels/distribution", - "end_year":"2014", - "experiment":"historical land-only with no fertilizer", - "experiment_id":"land-crop-noFert", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850 or 1700", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-crop-noIrrig":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":"", - "description":"Same as land-hist but with irrigated area held at 1850 levels", - "end_year":"2014", - "experiment":"historical land-only with no irrigation", - "experiment_id":"land-crop-noIrrig", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850 or 1700", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-crop-noIrrigFert":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":"", - "description":"Same as land-hist except with plants in cropland area utilizing at least some form of crop management (e.g., planting and harvesting) rather than simulating cropland vegetation as a natural grassland. Irrigated area and fertilizer area/use should be held constant", - "end_year":"2014", - "experiment":"historical land-only with managed crops but with irrigation and fertilization held constant", - "experiment_id":"land-crop-noIrrigFert", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850 or 1700", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-future":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":"", - "description":"Land only simulations", - "end_year":"2100", - "experiment":"future land-only", - "experiment_id":"land-future", - "min_number_yrs_per_sim":"86", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-hist":{ - "activity_id":[ - "LS3MIP", - "LUMIP" - ], - "additional_allowed_model_components":"", - "description":"Land only simulations", - "end_year":"2014", - "experiment":"historical land-only", - "experiment_id":"land-hist", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850 or 1700", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "land-hist-altLu1":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":"", - "description":"Land only simulations", - "end_year":"2014", - "experiment":"historical land-only alternate land-use history", - "experiment_id":"land-hist-altLu1", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850 or 1700", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-hist-altLu2":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":"", - "description":"Land only simulations", - "end_year":"2014", - "experiment":"historical land-only alternate land use history", - "experiment_id":"land-hist-altLu2", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850 or 1700", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-hist-altStartYear":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":"", - "description":"Same as land-hist except starting from either 1700 (for models that typically start in 1850) or 1850 (for models that typically start in 1700)", - "end_year":"2014", - "experiment":"historical land-only alternate start year", - "experiment_id":"land-hist-altStartYear", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850 or 1700", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "land-hist-cruNcep":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":"", - "description":"Land only simulations", - "end_year":"2014", - "experiment":"as land-hist with CRU-NCEP forcings", - "experiment_id":"land-hist-cruNcep", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-hist-princeton":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":"", - "description":"Land only simulations", - "end_year":"2014", - "experiment":"as land-hist with Princeton forcings", - "experiment_id":"land-hist-princeton", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-hist-wfdei":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":"", - "description":"Land only simulations", - "end_year":"2014", - "experiment":"as land-hist with WFDEI forcings", - "experiment_id":"land-hist-wfdei", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-noFire":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":"", - "description":"Same as land-hist but with anthropogenic ignition and suppression held to 1850 levels", - "end_year":"2014", - "experiment":"historical land-only with no human fire management", - "experiment_id":"land-noFire", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850 or 1700", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-noLu":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":"", - "description":"Same as land-hist except no land-use change", - "end_year":"2014", - "experiment":"historical land-only with no land-use change", - "experiment_id":"land-noLu", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850 or 1700", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "land-noPasture":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":"", - "description":"Same as land-hist but with grazing and other management on pastureland held at 1850 levels/distribution, i.e. all new pastureland is treated as unmanaged grassland (as in land-crop-grass)", - "end_year":"2014", - "experiment":"historical land-only with constant pastureland", - "experiment_id":"land-noPasture", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850 or 1700", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-noShiftCultivate":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":"", - "description":"Same as land-hist except shifting cultivation turned off. An additional LUC transitions dataset will be provided as a data layer within LUMIP LUH2 dataset with shifting cultivation deactivated", - "end_year":"2014", - "experiment":"historical land-only with shifting cultivation turned off", - "experiment_id":"land-noShiftCultivate", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850 or 1700", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "land-noWoodHarv":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":"", - "description":"Same as land-hist but with wood harvest maintained at 1850 amounts/areas", - "end_year":"2014", - "experiment":"historical land-only with no wood harvest", - "experiment_id":"land-noWoodHarv", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "LAND" - ], - "start_year":"1850 or 1700", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "lfmip-initLC":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Initialized pseudo-observations land", - "end_year":"2014", - "experiment":"initialized from \"historical\" run year 1980, but with land conditions initialized from pseudo-observations", - "experiment_id":"lfmip-initLC", - "min_number_yrs_per_sim":"35", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1980", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "lfmip-pdL-princeton":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Prescribed land conditions 1980-2014 climate with Land-Hist-princeton", - "end_year":"2100", - "experiment":"as LFMIP-pdLC with Land-Hist-princeton", - "experiment_id":"lfmip-pdL-princeton", - "min_number_yrs_per_sim":"121", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1980", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "lfmip-pdLC":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Prescribed land conditions 1980-2014 climate", - "end_year":"2100", - "experiment":"prescribed land conditions (from current climate climatology) and initialized from \"historical\" run year 1980", - "experiment_id":"lfmip-pdLC", - "min_number_yrs_per_sim":"121", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1980", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "lfmip-pdLC-cruNcep":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Prescribed land conditions 1980-2014 climate with Land-Hist-cruNcep", - "end_year":"2100", - "experiment":"as LFMIP-pdLC with Land-Hist-cruNcep", - "experiment_id":"lfmip-pdLC-cruNcep", - "min_number_yrs_per_sim":"121", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1980", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "lfmip-pdLC-wfdei":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Prescribed land conditions 1980-2014 climate with Land-Hist-wfdei", - "end_year":"2100", - "experiment":"as LFMIP-pdLC with Land-Hist-wfdei", - "experiment_id":"lfmip-pdLC-wfdei", - "min_number_yrs_per_sim":"121", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1980", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "lfmip-rmLC":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Prescribed land conditions 30yr running mean", - "end_year":"2100", - "experiment":"prescribed land conditions (from running mean climatology) and initialized from \"historical\" run year 1980", - "experiment_id":"lfmip-rmLC", - "min_number_yrs_per_sim":"121", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1980", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "lfmip-rmLC-cruNcep":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Prescribed land conditions 30yr running mean with Land-Hist-cruNcep", - "end_year":"2100", - "experiment":"as LFMIP-rmLC with Land-Hist-cruNcep", - "experiment_id":"lfmip-rmLC-cruNcep", - "min_number_yrs_per_sim":"121", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1980", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "lfmip-rmLC-princeton":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Prescribed land conditions 30yr running mean with Land-Hist-princeton", - "end_year":"2100", - "experiment":"as LFMIP-rmLC with Land-Hist-princeton", - "experiment_id":"lfmip-rmLC-princeton", - "min_number_yrs_per_sim":"121", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1980", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "lfmip-rmLC-wfdei":{ - "activity_id":[ - "LS3MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Prescribed land conditions 30yr running mean with Land-Hist-wfdei", - "end_year":"2100", - "experiment":"as LFMIP-rmLC with Land-Hist-wfdei", - "experiment_id":"lfmip-rmLC-wfdei", - "min_number_yrs_per_sim":"121", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1980", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "lgm":{ - "activity_id":[ - "PMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"main forcings: ice-sheet; trace gases, astronomical parameters, dust (forcing, or feedback if dust cycle represented in model)", - "end_year":"", - "experiment":"last glacial maximum", - "experiment_id":"lgm", - "min_number_yrs_per_sim":"100", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "lig127k":{ - "activity_id":[ - "PMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"main forcings: astronomical parameters, trace gases, dust (forcing, or feedback if dust cycle represented in model)", - "end_year":"", - "experiment":"last interglacial (127k)", - "experiment_id":"lig127k", - "min_number_yrs_per_sim":"100", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "midHolocene":{ - "activity_id":[ - "PMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"main forcings: trace gases, orbital parameters, dust (forcing, or feedback if dust cycle represented in model)", - "end_year":"", - "experiment":"mid-Holocene", - "experiment_id":"midHolocene", - "min_number_yrs_per_sim":"200", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "midPliocene-eoi400":{ - "activity_id":[ - "PMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"main forcings: trace gases, orography, ice-sheet", - "end_year":"", - "experiment":"mid-Pliocene warm period", - "experiment_id":"midPliocene-eoi400", - "min_number_yrs_per_sim":"100", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "omip1":{ - "activity_id":[ - "OMIP" - ], - "additional_allowed_model_components":"", - "description":"Global ocean - sea-ice coupled experiment forced with the Coordinated Ocean - ice Reference Experiments inter-annually varying atmospheric and river data sets for years 1948-2009. Initial ocean tracer fields are based on observations. All Priority=1 OMIP diagnostics are requested for all five cycles of the 62-year forcing to quantify drift. All OMIP diagnostics (Priority=1,2,3) are requested for the 5th cycle", - "end_year":"310", - "experiment":"OMIP experiment forced by Large and Yeager (CORE-2, NCEP) atmospheric data set and initialized with observed physical and biogeochemical ocean data", - "experiment_id":"omip1", - "min_number_yrs_per_sim":"310", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "OGCM" - ], - "start_year":"1", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "omip1-spunup":{ - "activity_id":[ - "OMIP" - ], - "additional_allowed_model_components":"", - "description":"Same as the omip1 experiment except that it is not initialized with observed climatologies; rather it is initialized with results from at least a 2000-year spin up of the coupled physical-biogeochemical models. The spin up simulations may be made with the classic online or offline approach, or with tracer-acceleration techniques or fast solvers. If an online approach is used, at the end of the 5th cycle of CORE-II forcing, the model's physical fields should be reinitialized to the values at the start of the 3rd cycle in order to avoid long-term drift in those fields and to assure that they will not diverge greatly from physical fields in the omip1 simulation. The spin up also includes radiocarbon to evaluate deep-ocean circulation", - "end_year":"", - "experiment":"OMIP experiment forced by Large and Yeager (CORE-2, NCEP) atmospheric data set and initialized from at least a 2000-year spin up of the coupled physical-biogeochemical model", - "experiment_id":"omip1-spunup", - "min_number_yrs_per_sim":"310", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "OGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "omip2":{ - "activity_id":[ - "OMIP" - ], - "additional_allowed_model_components":"", - "description":"Global ocean - sea-ice coupled experiment forced with the JRA-55do inter-annually varying atmospheric and river data sets for years 1958-2016. Initial ocean tracer fields are based on observations. All Priority=1 OMIP diagnostics are requested for all five cycles of the 59-year forcing to quantify drift. All OMIP diagnostics (Priority=1,2,3) are requested for the 5th cycle", - "end_year":"", - "experiment":"OMIP experiment forced by JRA-55do atmospheric data set and initialized with observed physical and biogeochemical ocean data", - "experiment_id":"omip2", - "min_number_yrs_per_sim":"295", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "OGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "omip2-spunup":{ - "activity_id":[ - "OMIP" - ], - "additional_allowed_model_components":"", - "description":"Same as the omip2 experiment except that it is not initialized with observed climatologies; rather it is initialized with results from at least a 2000-year spin up of the coupled physical-biogeochemical models. The spin up simulations may be made with the classic online or offline approach, or with tracer-acceleration techniques or fast solvers. If an online approach is used, at the end of the 5th cycle ofthe JRA-55do forcing, the model's physical fields should be reinitialized to the values at the start of the 3rd cycle in order to avoid long-term drift in those fields and to assure that they will not diverge greatly from physical fields in the omip2 simulation. The spin up also includes radiocarbon to evaluate deep-ocean circulation", - "end_year":"", - "experiment":"OMIP experiment forced by JRA-55do atmospheric data set and initialized from at least a 2000-year spin up of the coupled physical-biogeochemical model", - "experiment_id":"omip2-spunup", - "min_number_yrs_per_sim":"295", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "OGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "past1000":{ - "activity_id":[ - "PMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"main forcings: trace gases, volcanoes, solar variability, land use", - "end_year":"1849", - "experiment":"last millennium", - "experiment_id":"past1000", - "min_number_yrs_per_sim":"1000", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"850", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "piClim-2xDMS":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM" - ], - "description":"1850 control with doubled emissions of DMS", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with doubled emissions of DMS", - "experiment_id":"piClim-2xDMS", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "piClim-2xNOx":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":"", - "description":"1850 control with doubled emissions of lightning NOx", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with doubled production of NOx due to lightning", - "experiment_id":"piClim-2xNOx", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER", - "CHEM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "piClim-2xVOC":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":"", - "description":"1850 control with doubled emissions of biogenic VOCs", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with doubled emissions of biogenic VOCs", - "experiment_id":"piClim-2xVOC", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER", - "CHEM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "piClim-2xdust":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM" - ], - "description":"1850 control with doubled dust emissions", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with doubled emissions of dust", - "experiment_id":"piClim-2xdust", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piClim-2xfire":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM" - ], - "description":"1850 control with doubled emissions of fires", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with doubled emissions from fires", - "experiment_id":"piClim-2xfire", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "piClim-2xss":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM" - ], - "description":"1850 control with doubled sea salt emissions", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with doubled emissions of sea salt", - "experiment_id":"piClim-2xss", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piClim-4xCO2":{ - "activity_id":[ - "RFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As in RFMIP-ERF-PI-Cntrl but with 4xCO2", - "end_year":"", - "experiment":"effective radiative forcing by 4xCO2", - "experiment_id":"piClim-4xCO2", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "piClim-BC":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM" - ], - "description":"Perturbation from 1850 control using 2014 BC emissions", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with 2014 black carbon emissions", - "experiment_id":"piClim-BC", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piClim-CH4":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":"", - "description":"Perturbation from 1850 control using 2014 CH4 concentrations", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with 2014 methane concentrations (including chemistry)", - "experiment_id":"piClim-CH4", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER", - "CHEM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "piClim-HC":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":"", - "description":"Perturbation from 1850 control using 2014 halocarbon concentrations", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with 2014 halocarbon concentrations (including chemistry)", - "experiment_id":"piClim-HC", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER", - "CHEM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "piClim-N2O":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":"", - "description":"Perturbation from 1850 control using 2014 N2O concentrations", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with 2014 N2O concentrations (including chemistry)", - "experiment_id":"piClim-N2O", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER", - "CHEM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piClim-NH3":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM" - ], - "description":"Perturbation from 1850 control using 2014 NH3 emissions", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with 2014 ammonia emissions", - "experiment_id":"piClim-NH3", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "piClim-NOx":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":"", - "description":"Perturbation from 1850 control using 2014 NOx emissions", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with 2014 NOx emissions", - "experiment_id":"piClim-NOx", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER", - "CHEM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "piClim-NTCF":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM", - "BGC" - ], - "description":"Perturbation from 1850 control using 2014 aerosol and ozone precursor emissions", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with 2014 NTCF emissions", - "experiment_id":"piClim-NTCF", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "piClim-O3":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":"", - "description":"Perturbation from 1850 control using 2014 ozone precursor emissions", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with 2014 ozone precursor emissions", - "experiment_id":"piClim-O3", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER", - "CHEM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piClim-OC":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM" - ], - "description":"Perturbation from 1850 control using 2014 OC emissions", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with 2014 organic carbon emissions", - "experiment_id":"piClim-OC", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "piClim-SO2":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM" - ], - "description":"Perturbation from 1850 control using 2014 SO2 emissions", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with 2014 SO2 emissions", - "experiment_id":"piClim-SO2", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "piClim-VOC":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":"", - "description":"Perturbation from 1850 control using 2014 CO/VOC emissions", - "end_year":"", - "experiment":"pre-industrial climatological SSTs and forcing, but with 2014 VOC emissions", - "experiment_id":"piClim-VOC", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM", - "AER", - "CHEM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "piClim-aer":{ - "activity_id":[ - "RFMIP", - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As in RFMIP-ERF-PI-Cntrl but with with present-day aerosols. Note that this experiment is considered to be tier 1 by RFMIP but tier 2 by AerChemMIP", - "end_year":"", - "experiment":"effective radiative forcing by present-day aerosols", - "experiment_id":"piClim-aer", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "piClim-anthro":{ - "activity_id":[ - "RFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As in RFMIP-ERF-PI-Cntrl but with present-day anthropogenic forcing (greenhouse gases, aerosols and land-use)", - "end_year":"", - "experiment":"effective radiative forcing by present day anthropogenic agents", - "experiment_id":"piClim-anthro", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "piClim-control":{ - "activity_id":[ - "RFMIP", - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"30-year atmosphere only integration using preindustrial sea-surface temperature and sea-ice climatology. Interactive vegetation", - "end_year":"", - "experiment":"effective radiative forcing in present-day", - "experiment_id":"piClim-control", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "piClim-ghg":{ - "activity_id":[ - "RFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As in RFMIP-ERF-PI-Cntrl but with present-day greenhouse gases", - "end_year":"", - "experiment":"effective radiative forcing by present-day greenhouse gases", - "experiment_id":"piClim-ghg", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "piClim-histaer":{ - "activity_id":[ - "RFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Time-varying forcing by aerosols. SST and sea ice fixed at preindustrial control. Interactive vegetation", - "end_year":"2100", - "experiment":"transient effective radiative forcing by aerosols", - "experiment_id":"piClim-histaer", - "min_number_yrs_per_sim":"251", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piClim-histall":{ - "activity_id":[ - "RFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Time-varying forcing. SST and sea ice fixed at preindustrial control. Interactive vegetation", - "end_year":"2100", - "experiment":"transient effective radiative forcing", - "experiment_id":"piClim-histall", - "min_number_yrs_per_sim":"251", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piClim-histghg":{ - "activity_id":[ - "RFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Time-varying forcing by GHGs. SST and sea ice fixed at preindustrial control. Interactive vegetation", - "end_year":"2100", - "experiment":"transient effective radiative forcing by greenhouse gases", - "experiment_id":"piClim-histghg", - "min_number_yrs_per_sim":"251", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piClim-histnat":{ - "activity_id":[ - "RFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Time-varying forcing from volcanos, solar variability, etc. SST and sea ice fixed at preindustrial control. Interactive vegetation", - "end_year":"2100", - "experiment":"transient effective radiative forcing by natural perturbations", - "experiment_id":"piClim-histnat", - "min_number_yrs_per_sim":"251", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piClim-lu":{ - "activity_id":[ - "RFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"As in RFMIP-ERF-PI-Cntrl but with present-day land use", - "end_year":"", - "experiment":"effective radiative forcing by present-day land use", - "experiment_id":"piClim-lu", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "piClim-spAer-aer":{ - "activity_id":[ - "RFMIP" - ], - "additional_allowed_model_components":"", - "description":"Prescribed anthropogenic aerosol optical properties. Aerosol and ozone forcings", - "end_year":"", - "experiment":"effective radiative forcing at present day with specified anthropogenic aerosol optical properties, all forcings", - "experiment_id":"piClim-spAer-aer", - "min_number_yrs_per_sim":"", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piClim-spAer-anthro":{ - "activity_id":[ - "RFMIP" - ], - "additional_allowed_model_components":"", - "description":"Prescribed anthropogenic aerosol optical properties. Anthropogenic forcings", - "end_year":"", - "experiment":"effective radiative forcing at present day with specified anthropogenic aerosol optical properties, anthropogenic forcings", - "experiment_id":"piClim-spAer-anthro", - "min_number_yrs_per_sim":"", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piClim-spAer-histaer":{ - "activity_id":[ - "RFMIP" - ], - "additional_allowed_model_components":"", - "description":"Prescribed anthropogenic aerosol optical properties. Aerosol and ozone forcings", - "end_year":"2014", - "experiment":"transient effective radiative forcing with specified anthropogenic aerosol optical properties, aerosol forcing", - "experiment_id":"piClim-spAer-histaer", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piClim-spAer-histall":{ - "activity_id":[ - "RFMIP" - ], - "additional_allowed_model_components":"", - "description":"Prescribed anthropogenic aerosol optical properties. Aerosol and ozone forcings", - "end_year":"2014", - "experiment":"transient effective radiative forcing with specified anthropogenic aerosol optical properties, all forcings", - "experiment_id":"piClim-spAer-histall", - "min_number_yrs_per_sim":"165", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"1850", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piControl":{ - "activity_id":[ - "CMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"DECK: control", - "end_year":"", - "experiment":"pre-industrial control", - "experiment_id":"piControl", - "min_number_yrs_per_sim":"500", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl-spinup" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "piControl-spinup":{ - "activity_id":[ - "CMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"DECK: spin-up portion of the control", - "end_year":"", - "experiment":"pre-industrial control (spin-up)", - "experiment_id":"piControl-spinup", - "min_number_yrs_per_sim":"100", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piControl-withism":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Pre-industrial control simulation that includes interactive ice sheets", - "end_year":"", - "experiment":"preindustrial control with interactive ice sheet", - "experiment_id":"piControl-withism", - "min_number_yrs_per_sim":"500", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AOGCM", - "ISM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "piSST":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"An AGCM experiment with monthly-varying SSTs, sea-ice, atmospheric constituents and any other necessary boundary conditions (e.g. vegetation if required) taken from each model's own piControl run (using the 30 years of piControl that are parallel to years 111-140 of its abrupt4xCO2 run). Dynamic vegetation should be turned off in all the piSST set of experiments", - "end_year":"", - "experiment":"experiment forced with pre-industrial SSTs, sea ice and atmospheric constituents", - "experiment_id":"piSST", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piSST-4xCO2":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Same as piSST but CO2 is quadrupled. The increase in CO2 is seen by both the radiation scheme and vegetation", - "end_year":"", - "experiment":"as piSST with radiation and vegetation seeing 4xCO2", - "experiment_id":"piSST-4xCO2", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piSST-4xCO2-rad":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Same as piSST but CO2 as seen by the radiation scheme is quadrupled", - "end_year":"", - "experiment":"as piSST with radiation-only seeing 4xCO2", - "experiment_id":"piSST-4xCO2-rad", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piSST-4xCO2-solar":{ - "activity_id":[ - "GeoMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Time slice at 1850 (picontrol) for G1ext to examine radiative forcing of abrupt4xCO2", - "end_year":"", - "experiment":"preindustrial control SSTs with quadrupled CO2 and solar reduction", - "experiment_id":"piSST-4xCO2-solar", - "min_number_yrs_per_sim":"10", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "piSST-pxK":{ - "activity_id":[ - "CFMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Same as piSST, but with a spatially and temporally uniform SST anomaly applied on top of the monthly-varying piSST SSTs. The magnitude of the uniform increase is taken from each model's global, climatological annual mean SST change between abrupt4xCO2 minus piControl (using the mean of years 111-140 of abrupt4xCO2, and the parallel 30-year section of piControl)", - "end_year":"", - "experiment":"as piSST with uniform SST increase with magnitude based on abrupt4xCO2 response", - "experiment_id":"piSST-pxK", - "min_number_yrs_per_sim":"20", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "rad-irf":{ - "activity_id":[ - "RFMIP" - ], - "additional_allowed_model_components":"", - "description":"Offline radiation calculations", - "end_year":"", - "experiment":"offline assessment of radiative transfer parmeterizations in clear skies", - "experiment_id":"rad-irf", - "min_number_yrs_per_sim":"", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "RAD" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "spinup-1950":{ - "activity_id":[ - "HighResMIP" - ], - "additional_allowed_model_components":[ - "AER" - ], - "description":"Coupled integration from ocean rest state using recommended HighResMIP protocol spinup, starting from 1950 ocean temperature and salinity analysis EN4, using constant 1950s forcing. At least 30 years to satisfy near surface quasi-equilibrium", - "end_year":"", - "experiment":"coupled spinup with fixed 1950s forcings from 1950 initial conditions (with ocean at rest) to provide initial condition for control-1950 and hist-1950", - "experiment_id":"spinup-1950", - "min_number_yrs_per_sim":"30", - "parent_activity_id":[ - "no parent" - ], - "parent_experiment_id":[ - "no parent" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "ssp126":{ - "activity_id":[ - "ScenarioMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Future scenario with low radiative forcing by the end of century. Following approximately RCP2.6 global forcing pathway but with new forcing based on SSP1. Concentration-driven. As a tier 2 option, this simulation should be extended to year 2300", - "end_year":"2100 or 2300", - "experiment":"update of RCP2.6 based on SSP1", - "experiment_id":"ssp126", - "min_number_yrs_per_sim":"86", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ssp126-ssp370Lu":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Additional land use policy sensitivity simulation for low radiative forcing scenario, keep all forcings the same as ScenarioMIP SSP1-2.6 (afforestation scenario), but replace land use from SSP3-7 (afforestation) scenario; concentration-driven", - "end_year":"2100", - "experiment":"SSP1-2.6 with SSP3-7.0 land use", - "experiment_id":"ssp126-ssp370Lu", - "min_number_yrs_per_sim":"86", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ssp245":{ - "activity_id":[ - "ScenarioMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Future scenario with medium radiative forcing by the end of century. Following approximately RCP4.5 global forcing pathway but with new forcing based on SSP2. Concentration-driven", - "end_year":"2100", - "experiment":"update of RCP4.5 based on SSP2", - "experiment_id":"ssp245", - "min_number_yrs_per_sim":"86", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ssp245-GHG":{ - "activity_id":[ - "DAMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Extension of well-mixed GHG-only run under SSP2-4.5. Models with interactive chemistry schemes should either turn off the chemistry or use a preindustrial climatology of stratospheric and tropospheric ozone in their radiation schemes", - "end_year":"2100", - "experiment":"well-mixed GHG-only SSP2-4.5 run", - "experiment_id":"ssp245-GHG", - "min_number_yrs_per_sim":"80", - "parent_activity_id":[ - "DAMIP" - ], - "parent_experiment_id":[ - "hist-GHG" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2021", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "ssp245-aer":{ - "activity_id":[ - "DAMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Extension of aerosol-only run under SSP2-4.5", - "end_year":"2100", - "experiment":"aerosol-only SSP2-4.5 run", - "experiment_id":"ssp245-aer", - "min_number_yrs_per_sim":"80", - "parent_activity_id":[ - "DAMIP" - ], - "parent_experiment_id":[ - "hist-aer" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2021", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "ssp245-nat":{ - "activity_id":[ - "DAMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Extension of natural-only run under SSP2-4.5", - "end_year":"2100", - "experiment":"natural-only SSP2-4.5 run", - "experiment_id":"ssp245-nat", - "min_number_yrs_per_sim":"80", - "parent_activity_id":[ - "DAMIP" - ], - "parent_experiment_id":[ - "hist-nat" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2021", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "ssp245-stratO3":{ - "activity_id":[ - "DAMIP" - ], - "additional_allowed_model_components":[ - "AER", - "BGC" - ], - "description":"Extension of stratospheric-ozone-only run under SSP2-4.5. In models with coupled chemistry, the chemistry scheme should be turned off, and the simulated ensemble mean monthly mean 3D stratospheric ozone concentrations from the SSP2-4.5 simulations should be prescribed. Tropospheric ozone should be fixed at 3D long-term monthly mean piControl values, with a value of 100 ppbv ozone concentration in this piControl climatology used to separate the troposphere from the stratosphere. In models without coupled chemistry the same stratospheric ozone prescribed in SSP2-4.5 should be prescribed. Stratospheric ozone concentrations will be provided by CCMI", - "end_year":"2100", - "experiment":"stratospheric-ozone-only SSP2-4.5 run", - "experiment_id":"ssp245-stratO3", - "min_number_yrs_per_sim":"80", - "parent_activity_id":[ - "DAMIP" - ], - "parent_experiment_id":[ - "hist-stratO3" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2021", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "ssp370":{ - "activity_id":[ - "ScenarioMIP", - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Future scenario with high radiative forcing by the end of century. Reaches about 7.0 W/m2 by 2100; fills gap in RCP forcing pathways between 6.0 and 8.5 W/m2. Concentration-driven", - "end_year":"2100", - "experiment":"gap-filling scenario reaching 7.0 based on SSP3", - "experiment_id":"ssp370", - "min_number_yrs_per_sim":"86", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ssp370-lowNTCF":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM", - "BGC" - ], - "description":"Future SSP3-7.0 with reduced NTCF emissions", - "end_year":"2055", - "experiment":"SSP3-7.0, with low NTCF emissions", - "experiment_id":"ssp370-lowNTCF", - "min_number_yrs_per_sim":"41", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM", - "AER" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ssp370-ssp126Lu":{ - "activity_id":[ - "LUMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Additional land use policy sensitivity simulation for high radiative forcing scenario, keep all forcings the same as ScenarioMIP SSP3-7 (deforestation scenario), but replace land use from SSP1-2.6 (afforestation) scenario; concentration-driven", - "end_year":"2100", - "experiment":"SSP3-7.0 with SSP1-2.6 land use", - "experiment_id":"ssp370-ssp126Lu", - "min_number_yrs_per_sim":"86", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ssp370SST":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM" - ], - "description":"Future SSP3-7.0, with SSTs prescribed from ssp370", - "end_year":"2055", - "experiment":"SSP3-7.0, with SSTs prescribed from ssp370", - "experiment_id":"ssp370SST", - "min_number_yrs_per_sim":"41", - "parent_activity_id":[ - "ScenarioMIP" - ], - "parent_experiment_id":[ - "ssp370" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ssp370SST-lowAer":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM" - ], - "description":"Future SSP3-7.0 with reduced aerosol emissions (from ssp370-lowNTCF), prescribed SSTs", - "end_year":"2055", - "experiment":"SSP3-7.0, prescribed SSTs, with low aerosol emissions", - "experiment_id":"ssp370SST-lowAer", - "min_number_yrs_per_sim":"41", - "parent_activity_id":[ - "ScenarioMIP" - ], - "parent_experiment_id":[ - "ssp370" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "ssp370SST-lowBC":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM" - ], - "description":"Future SSP3-7.0 with reduced black carbon emissions, prescribed SSTs", - "end_year":"2055", - "experiment":"SSP3-7.0, prescribed SSTs, with low black carbon emissions", - "experiment_id":"ssp370SST-lowBC", - "min_number_yrs_per_sim":"41", - "parent_activity_id":[ - "ScenarioMIP" - ], - "parent_experiment_id":[ - "ssp370" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "ssp370SST-lowCH4":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":"", - "description":"Future SSP3-7.0 with reduced CH4 concentrations, prescribed SSTs", - "end_year":"2055", - "experiment":"SSP3-7.0, prescribed SSTs, with low methane concentrations", - "experiment_id":"ssp370SST-lowCH4", - "min_number_yrs_per_sim":"41", - "parent_activity_id":[ - "ScenarioMIP" - ], - "parent_experiment_id":[ - "ssp370" - ], - "required_model_components":[ - "AGCM", - "AER", - "CHEM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ssp370SST-lowNTCF":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM", - "BGC" - ], - "description":"Future SSP3-7.0 with reduced NTCF emissions, prescribed SSTs", - "end_year":"2055", - "experiment":"SSP3-7.0, prescribed SSTs, with low NTCF emissions", - "experiment_id":"ssp370SST-lowNTCF", - "min_number_yrs_per_sim":"41", - "parent_activity_id":[ - "ScenarioMIP" - ], - "parent_experiment_id":[ - "ssp370" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ssp370SST-lowO3":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":"", - "description":"Future SSP3-7.0 with reduced ozone precursor emissions (from ssp370-lowNTCF), prescribed SSTs", - "end_year":"2055", - "experiment":"SSP3-7.0, prescribed SSTs, with low ozone precursor emissions", - "experiment_id":"ssp370SST-lowO3", - "min_number_yrs_per_sim":"41", - "parent_activity_id":[ - "ScenarioMIP" - ], - "parent_experiment_id":[ - "ssp370" - ], - "required_model_components":[ - "AGCM", - "AER", - "CHEM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "ssp370SST-ssp126Lu":{ - "activity_id":[ - "AerChemMIP" - ], - "additional_allowed_model_components":[ - "CHEM" - ], - "description":"Future SSP3-7.0 with low land use change (from ssp126), prescribed SSTs", - "end_year":"2055", - "experiment":"SSP3-7.0, prescribed SSTs, with SSP1-2.6 land use", - "experiment_id":"ssp370SST-ssp126Lu", - "min_number_yrs_per_sim":"41", - "parent_activity_id":[ - "ScenarioMIP" - ], - "parent_experiment_id":[ - "ssp370" - ], - "required_model_components":[ - "AGCM", - "AER" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ssp434":{ - "activity_id":[ - "ScenarioMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Future scenario with low radiative forcing by the end of century. Reaches about 3.4 W/m2 by 2100; fills gap in RCP forcing pathways between 4.5 and 2.6 W/m2. Concentration-driven", - "end_year":"2100", - "experiment":"gap-filling scenario reaching 3.4 based on SSP4", - "experiment_id":"ssp434", - "min_number_yrs_per_sim":"86", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "ssp460":{ - "activity_id":[ - "ScenarioMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Future scenario with medium radiative forcing by the end of century. Following approximately RCP6.0 global forcing pathway but with new forcing based on SSP4. Concentration-driven", - "end_year":"2100", - "experiment":"update of RCP6.0 based on SSP4", - "experiment_id":"ssp460", - "min_number_yrs_per_sim":"86", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "ssp534-over":{ - "activity_id":[ - "ScenarioMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"21st century overshoot scenario relative to SSP5_34. Branches from SS5_85 at 2040 with emissions reduced to zero by 2070 and negative thereafter. This simulation should optionally be extended to year 2300", - "end_year":"2100 or 2300", - "experiment":"overshoot of 3.4 W/m**2 branching from ssp585 in 2040", - "experiment_id":"ssp534-over", - "min_number_yrs_per_sim":"61", - "parent_activity_id":[ - "ScenarioMIP" - ], - "parent_experiment_id":[ - "ssp585" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2040", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "ssp534-over-bgc":{ - "activity_id":[ - "C4MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"This simulation should optionally be extended to year 2300", - "end_year":"2100 or 2300", - "experiment":"biogeochemically-coupled version of the RCP3.4-overshoot based on SSP5", - "experiment_id":"ssp534-over-bgc", - "min_number_yrs_per_sim":"85", - "parent_activity_id":[ - "C4MIP" - ], - "parent_experiment_id":[ - "hist-bgc" - ], - "required_model_components":[ - "AOGCM", - "BGC" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "ssp585":{ - "activity_id":[ - "ScenarioMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Future scenario with high radiative forcing by the end of century. Following approximately RCP8.5 global forcing pathway but with new forcing based on SSP5. Concentration-driven. As a tier 2 option, this simulation should be extended to year 2300", - "end_year":"2100 or 2300", - "experiment":"update of RCP8.5 based on SSP5", - "experiment_id":"ssp585", - "min_number_yrs_per_sim":"86", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "ssp585-bgc":{ - "activity_id":[ - "C4MIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM" - ], - "description":"Concentration-driven future scenario simulation, biogeochemically-coupled. This simulation should optionally be extended to year 2300", - "end_year":"2100 or 2300", - "experiment":"biogeochemically-coupled version of the RCP8.5 based on SSP5", - "experiment_id":"ssp585-bgc", - "min_number_yrs_per_sim":"85", - "parent_activity_id":[ - "C4MIP" - ], - "parent_experiment_id":[ - "hist-bgc" - ], - "required_model_components":[ - "AOGCM", - "BGC" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "ssp585-withism":{ - "activity_id":[ - "ISMIP6" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Future climate from ScenarioMIP SSP5-8.5 simulation that includes interactive ice sheets. Set up follows the standard SSP5-8.5 experiment", - "end_year":"2300", - "experiment":"ssp585 with interactive ice sheet", - "experiment_id":"ssp585-withism", - "min_number_yrs_per_sim":"85", - "parent_activity_id":[ - "ISMIP6" - ], - "parent_experiment_id":[ - "historical-withism" - ], - "required_model_components":[ - "AOGCM", - "ISM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "sspxy":{ - "activity_id":[ - "ScenarioMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Future scenario with low radiative forcing by the end of century. Following a forcing pathway below RCP2.6. Specific SSP and 2100 forcing level to be finalized with IAM groups within next few months. Concentration-driven", - "end_year":"2100", - "experiment":"low-end scenario informing 1.5C goal", - "experiment_id":"sspxy", - "min_number_yrs_per_sim":"86", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "volc-cluster-21C":{ - "activity_id":[ - "VolMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Parallel experiment to volc-cluster-ctrl, using restart files from the end of the historical simulation instead of from piControl, and boundary conditions from the 21st century SSP2-4.5 scenario experiment of ScenarioMIP", - "end_year":"2100", - "experiment":"volcanic cluster experiment under 21st century SSP2-4.5 scenario", - "experiment_id":"volc-cluster-21C", - "min_number_yrs_per_sim":"85", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "historical" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"2015", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "volc-cluster-ctrl":{ - "activity_id":[ - "VolMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Early 19th century cluster of strong tropical volcanic eruptions, including the 1809 event of unknown location, the 1815 Tambora and 1835 Cosigueina eruptions. Experiment initialized from PiControl", - "end_year":"", - "experiment":"19th century volcanic cluster initialized from PiControl", - "experiment_id":"volc-cluster-ctrl", - "min_number_yrs_per_sim":"50", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "volc-cluster-mill":{ - "activity_id":[ - "VolMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Parallel experiment to volc-cluster-ctrl but with initial conditions taken from last millennium simulation to account for the effects of a more realistic history of past natural forcing. All forcings except volcanic kept constant from year AD 1790 on", - "end_year":"1858", - "experiment":"19th century volcanic cluster initialized from past1000", - "experiment_id":"volc-cluster-mill", - "min_number_yrs_per_sim":"69", - "parent_activity_id":[ - "PMIP" - ], - "parent_experiment_id":[ - "past1000" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"1790", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "volc-long-eq":{ - "activity_id":[ - "VolMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Idealized equatorial eruption corresponding to an initial emission of 56.2 Tg of SO2. The eruption magnitude corresponds to recent estimates for the 1815 Tambora eruption (Sigl et al., 2015), the largest historical tropical eruption, which was linked to the so-called \"year without a summer\" in 1816. Experiment initialized from PiControl", - "end_year":"", - "experiment":"idealized equatorial volcanic eruption emitting 56.2 Tg SO2", - "experiment_id":"volc-long-eq", - "min_number_yrs_per_sim":"20", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "volc-long-hlN":{ - "activity_id":[ - "VolMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Idealized Northern Hemisphere high-latitude eruption emitting 28.1 Tg of SO2. Experiment initialized from PiControl", - "end_year":"", - "experiment":"idealized Northern Hemisphere high-latitude eruption emitting 28.1 Tg of SO2", - "experiment_id":"volc-long-hlN", - "min_number_yrs_per_sim":"20", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"2" - }, - "volc-long-hlS":{ - "activity_id":[ - "VolMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"Idealized Southern Hemisphere high-latitude eruption emitting 28.1 Tg of SO2. Experiment initialized from PiControl", - "end_year":"", - "experiment":"Idealized Southern Hemisphere high-latitude eruption emitting 28.1 Tg of SO2", - "experiment_id":"volc-long-hlS", - "min_number_yrs_per_sim":"20", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "volc-pinatubo-full":{ - "activity_id":[ - "VolMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"1991 Pinatubo forcing as used in the CMIP6 historical simulations. Requires special diagnostics of radiative and latent heating rates. A large number of ensemble members is required to address internal atmospheric variability", - "end_year":"", - "experiment":"Pinatubo experiment", - "experiment_id":"volc-pinatubo-full", - "min_number_yrs_per_sim":"3", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "volc-pinatubo-slab":{ - "activity_id":[ - "VolMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"As volc-pinatubo-full, but with a slab ocean", - "end_year":"", - "experiment":"Pinatubo experiment with slab ocean", - "experiment_id":"volc-pinatubo-slab", - "min_number_yrs_per_sim":"3", - "parent_activity_id":[ - "VolMIP" - ], - "parent_experiment_id":[ - "control-slab" - ], - "required_model_components":[ - "AGCM", - "SLAB" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"3" - }, - "volc-pinatubo-strat":{ - "activity_id":[ - "VolMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"As volc-pinatubo-full, but with prescribed perturbation to the total (LW+SW) radiative heating rates", - "end_year":"", - "experiment":"Pinatubo experiment with partial radiative forcing, includes only stratospheric warming", - "experiment_id":"volc-pinatubo-strat", - "min_number_yrs_per_sim":"3", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - }, - "volc-pinatubo-surf":{ - "activity_id":[ - "VolMIP" - ], - "additional_allowed_model_components":[ - "AER", - "CHEM", - "BGC" - ], - "description":"As volc-pinatubo-full, but with prescribed perturbation to the shortwave flux to mimic the attenuation of solar radiation by volcanic aerosols", - "end_year":"", - "experiment":"Pinatubo experiment with partial radiative forcing, solar radiation scattering only", - "experiment_id":"volc-pinatubo-surf", - "min_number_yrs_per_sim":"3", - "parent_activity_id":[ - "CMIP" - ], - "parent_experiment_id":[ - "piControl" - ], - "required_model_components":[ - "AOGCM" - ], - "start_year":"", - "sub_experiment_id":[ - "none" - ], - "tier":"1" - } - }, - "product":[ - "model-output" - ], - "tracking_id":[ - "hdl:21.14100/.*" - ], - "further_info_url":[ - "http://furtherinfo.es-doc.org/[[:alpha:]]\\{1,\\}" - ], - "realization_index":[ - "^\\[\\{0,\\}[[:digit:]]\\{1,\\}\\]\\{0,\\}$" - ], - "variant_label":[ - "r[[:digit:]]\\{1,\\}i[[:digit:]]\\{1,\\}p[[:digit:]]\\{1,\\}f[[:digit:]]\\{1,\\}$" - ], - "data_specs_version":[ - "^[[:digit:]]\\{2,2\\}\\.[[:digit:]]\\{2,2\\}\\.[[:digit:]]\\{2,2\\}$" - ], - "Conventions":[ - "^CF-1.7 CMIP-6.0\\( UGRID-1.0\\)\\{0,\\}$" - ], - "forcing_index":[ - "^\\[\\{0,\\}[[:digit:]]\\{1,\\}\\]\\{0,\\}$" - ], - "initialization_index":[ - "^\\[\\{0,\\}[[:digit:]]\\{1,\\}\\]\\{0,\\}$" - ], - "physics_index":[ - "^\\[\\{0,\\}[[:digit:]]\\{1,\\}\\]\\{0,\\}$" - ] - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CV_test.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_CV_test.json deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_E1hr.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_E1hr.json deleted file mode 100644 index 86003a9d9d..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_E1hr.json +++ /dev/null @@ -1,290 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table E1hr", - "realm": "atmos", - "frequency": "1hr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "0.017361", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "pr": { - "modeling_realm": "atmos", - "standard_name": "precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Precipitation", - "comment": "includes both liquid and solid phases", - "dimensions": "longitude latitude time", - "out_name": "pr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prc": { - "modeling_realm": "atmos", - "standard_name": "convective_precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Convective Precipitation", - "comment": "Convective precipitation at surface; includes both liquid and solid phases.", - "dimensions": "longitude latitude time", - "out_name": "prc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "psl": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_sea_level", - "units": "Pa", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Sea Level Pressure", - "comment": "Sea Level Pressure", - "dimensions": "longitude latitude time1", - "out_name": "psl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlut": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_longwave_flux", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Longwave Radiatio", - "comment": "at the top of the atmosphere (to be compared with satellite measurements)", - "dimensions": "longitude latitude time1", - "out_name": "rlut", - "type": "", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "longitude latitude plev3 time1", - "out_name": "ta", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta27": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "longitude latitude plev27 time1", - "out_name": "ta", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntr27": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_radiative_heating", - "units": "K s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature due to Radiative Heating", - "comment": "Tendency of Air Temperature due to Radiative Heating", - "dimensions": "longitude latitude plev27 time1", - "out_name": "tntr", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "longitude latitude plev3 time1", - "out_name": "ua", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua27": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "longitude latitude plev27 time1", - "out_name": "ua", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "utendnogw27": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_eastward_wind_due_to_nonorographic_gravity_wave_drag", - "units": "m s-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "u-tendency nonorographic gravity wave drag", - "comment": "Tendency of the eastward wind by parameterized nonorographic gravity waves.", - "dimensions": "longitude latitude plev27 time1", - "out_name": "utendnogw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "longitude latitude plev3 time1", - "out_name": "va", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va27": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "longitude latitude plev27 time1", - "out_name": "va", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vtendnogw27": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_northward_wind_due_to_nonorographic_gravity_wave_drag", - "units": "m s-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "v-tendency nonorographic gravity wave drag", - "comment": "Tendency of the northward wind by parameterized nonorographic gravity waves. (Note that CF name tables only have a general northward tendency for all gravity waves, and we need it separated by type.)", - "dimensions": "longitude latitude plev27 time1", - "out_name": "vtendnogw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wap": { - "modeling_realm": "atmos", - "standard_name": "lagrangian_tendency_of_air_pressure", - "units": "Pa s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "omega (=dp/dt)", - "comment": "Omega (vertical velocity in pressure coordinates, positive downwards)", - "dimensions": "longitude latitude plev3 time1", - "out_name": "wap", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wap27": { - "modeling_realm": "atmos", - "standard_name": "lagrangian_tendency_of_air_pressure", - "units": "Pa s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "omega (=dp/dt)", - "comment": "Omega (vertical velocity in pressure coordinates, positive downwards)", - "dimensions": "longitude latitude plev27 time1", - "out_name": "wap", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg27": { - "modeling_realm": "atmos", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height", - "comment": "", - "dimensions": "longitude latitude plev27 time1", - "out_name": "zg", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_E1hrClimMon.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_E1hrClimMon.json deleted file mode 100644 index bcc0180038..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_E1hrClimMon.json +++ /dev/null @@ -1,103 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table E1hrClimMon", - "realm": "atmos", - "frequency": "1hrClimMon", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "0.017361", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "rlut": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_longwave_flux", - "units": "W m-2", - "cell_methods": "area: mean time: mean within days time: mean over days", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Longwave Radiatio", - "comment": "at the top of the atmosphere (to be compared with satellite measurements)", - "dimensions": "longitude latitude time3", - "out_name": "rlut", - "type": "", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlutcs": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_longwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: mean within days time: mean over days", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Clear-sky Longwave Radiation", - "comment": "", - "dimensions": "longitude latitude time3", - "out_name": "rlutcs", - "type": "", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdt": { - "modeling_realm": "atmos", - "standard_name": "toa_incoming_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: mean time: mean within days time: mean over days", - "cell_measures": "area: areacella", - "long_name": "TOA Incident Shortwave Radiation", - "comment": "Shortwave radiation incident at the top of the atmosphere", - "dimensions": "longitude latitude time3", - "out_name": "rsdt", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsut": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: mean time: mean within days time: mean over days", - "cell_measures": "area: areacella", - "long_name": "Top-of-Atmosphere Outgoing Shortwave Radiation", - "comment": "at the top of the atmosphere", - "dimensions": "longitude latitude time3", - "out_name": "rsut", - "type": "", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsutcs": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_shortwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: mean within days time: mean over days", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Clear-Sky Shortwave Radiation", - "comment": "Calculated in the absence of clouds.", - "dimensions": "longitude latitude time3", - "out_name": "rsutcs", - "type": "", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_E3hr.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_E3hr.json deleted file mode 100644 index 9c418f0829..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_E3hr.json +++ /dev/null @@ -1,358 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table E3hr", - "realm": "atmos", - "frequency": "3hr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "0.125000", - "generic_levels": "alevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "clisccp": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "ISCCP Percentage Cloud Area", - "comment": "Percentage cloud cover in optical depth categories.", - "dimensions": "longitude latitude plev7c tau time", - "out_name": "clisccp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clivi": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_cloud_ice_content", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Ice Water Path", - "comment": "mass of ice water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating frozen hydrometeors ONLY if the precipitating hydrometeor affects the calculation of radiative transfer in model.", - "dimensions": "longitude latitude time", - "out_name": "clivi", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clwvi": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_cloud_condensed_water_content", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Condensed Water Path", - "comment": "Mass of condensed (liquid + ice) water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude time", - "out_name": "clwvi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "gpp": { - "modeling_realm": "land", - "standard_name": "gross_primary_productivity_of_biomass_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux out of Atmosphere due to Gross Primary Production on Land", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "gpp", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prcsh": { - "modeling_realm": "atmos", - "standard_name": "hallow_convective_precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Precipitation Flux from Shallow Convection", - "comment": "Convection precipitation from shallow convection", - "dimensions": "longitude latitude time", - "out_name": "prcsh", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prra": { - "modeling_realm": "atmos", - "standard_name": "rainfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Rainfall Flux where Ice Free Ocean over Sea", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "prra", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prrc": { - "modeling_realm": "atmos", - "standard_name": "convective_rainfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Convective Rainfall rate", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "prrc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prsnc": { - "modeling_realm": "atmos", - "standard_name": "convective_snowfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Convective Snowfall Flux", - "comment": "convective precipitation of all forms of water in the solid phase.", - "dimensions": "longitude latitude time", - "out_name": "prsnc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prw": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_water_vapor_content", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Water Vapor Path", - "comment": "vertically integrated through the atmospheric column", - "dimensions": "longitude latitude time", - "out_name": "prw", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "psl": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_sea_level", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Sea Level Pressure", - "comment": "Sea Level Pressure", - "dimensions": "longitude latitude time", - "out_name": "psl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ra": { - "modeling_realm": "land", - "standard_name": "plant_respiration_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux into Atmosphere due to Autotrophic (Plant) Respiration on Land", - "comment": "Carbon mass flux per unit area into atmosphere due to autotrophic respiration on land (respiration by producers) [see rh for heterotrophic production]", - "dimensions": "longitude latitude time", - "out_name": "ra", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rh": { - "modeling_realm": "land", - "standard_name": "heterotrophic_respiration_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux into Atmosphere due to Heterotrophic Respiration on Land", - "comment": "Carbon mass flux per unit area into atmosphere due to heterotrophic respiration on land (respiration by consumers)", - "dimensions": "longitude latitude time", - "out_name": "rh", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlut": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_longwave_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Longwave Radiatio", - "comment": "at the top of the atmosphere (to be compared with satellite measurements)", - "dimensions": "longitude latitude time", - "out_name": "rlut", - "type": "", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlutcs": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_longwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Clear-sky Longwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rlutcs", - "type": "", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdt": { - "modeling_realm": "atmos", - "standard_name": "toa_incoming_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Incident Shortwave Radiation", - "comment": "Shortwave radiation incident at the top of the atmosphere", - "dimensions": "longitude latitude time", - "out_name": "rsdt", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsut": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Top-of-Atmosphere Outgoing Shortwave Radiation", - "comment": "at the top of the atmosphere", - "dimensions": "longitude latitude time", - "out_name": "rsut", - "type": "", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsutcs": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_shortwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Clear-Sky Shortwave Radiation", - "comment": "Calculated in the absence of clouds.", - "dimensions": "longitude latitude time", - "out_name": "rsutcs", - "type": "", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfcWind": { - "modeling_realm": "atmos", - "standard_name": "wind_speed", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Near-Surface Wind Speed", - "comment": "near-surface (usually, 10 meters) wind speed.", - "dimensions": "longitude latitude time height10m", - "out_name": "sfcWind", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "uas": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Near-Surface Wind", - "comment": "Eastward component of the near-surface (usually, 10 meters) wind", - "dimensions": "longitude latitude time height10m", - "out_name": "uas", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vas": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Near-Surface Wind", - "comment": "Northward component of the near surface wind", - "dimensions": "longitude latitude time height10m", - "out_name": "vas", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_E3hrPt.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_E3hrPt.json deleted file mode 100644 index 692c55224d..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_E3hrPt.json +++ /dev/null @@ -1,902 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table E3hrPt", - "realm": "atmos", - "frequency": "3hr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "0.125000", - "generic_levels": "alevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "aerasymbnd": { - "modeling_realm": "atmos", - "standard_name": "band_aerosol_asymmetry_parameter", - "units": "1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Aerosol level asymmetry parameter for each band", - "comment": "", - "dimensions": "longitude latitude alevel spectband time1", - "out_name": "aerasymbnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "aeroptbnd": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_absorption_optical_thickness_due_to_ambient_aerosol_particles", - "units": "1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Aerosol level extinction optical depth for each band", - "comment": "", - "dimensions": "longitude latitude alevel spectband time1", - "out_name": "aeroptbnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "aerssabnd": { - "modeling_realm": "atmos", - "standard_name": "single_scattering_albedo_in_air_due_to_ambient_aerosol_particles", - "units": "1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Aerosol level single-scattering albedo for each band", - "comment": "", - "dimensions": "longitude latitude alevel spectband time1", - "out_name": "aerssabnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "albdiffbnd": { - "modeling_realm": "atmos", - "standard_name": "band_diffuse_albedo", - "units": "1.0", - "cell_methods": "time: point", - "cell_measures": "area: areacella", - "long_name": "Diffuse surface albedo for each band", - "comment": "", - "dimensions": "longitude latitude spectband time1", - "out_name": "albdiffbnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "albdirbnd": { - "modeling_realm": "atmos", - "standard_name": "band_direct_albedo", - "units": "1.0", - "cell_methods": "time: point", - "cell_measures": "area: areacella", - "long_name": "Direct surface albedo for each band", - "comment": "", - "dimensions": "longitude latitude spectband time1", - "out_name": "albdirbnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cfadDbze94": { - "modeling_realm": "atmos", - "standard_name": "histogram_of_equivalent_reflectivity_factor_over_height_above_reference_ellipsoid", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "CloudSat Radar Reflectivity", - "comment": "CFAD (Cloud Frequency Altitude Diagrams) are frequency distributions of radar reflectivity (or lidar scattering ratio) as a function of altitude. The variable cfadDbze94 is defined as the simulated relative frequency of occurrence of radar reflectivity in sampling volumes defined by altitude bins. The radar is observing at a frequency of 94GHz.", - "dimensions": "longitude latitude alt40 dbze time1", - "out_name": "cfadDbze94", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cfadLidarsr532": { - "modeling_realm": "atmos", - "standard_name": "histogram_of_backscattering_ratio_over_height_above_reference_ellipsoid", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Scattering Ratio", - "comment": "CFAD (Cloud Frequency Altitude Diagrams) are frequency distributions of radar reflectivity (or lidar scattering ratio) as a function of altitude. The variable cfadLidarsr532 is defined as the simulated relative frequency of lidar scattering ratio in sampling volumes defined by altitude bins. The lidar is observing at a wavelength of 532nm.", - "dimensions": "longitude latitude alt40 scatratio time1", - "out_name": "cfadLidarsr532", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ch4": { - "modeling_realm": "atmos", - "standard_name": "mole_fraction_of_methane_in_air", - "units": "mol mol-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "CH4 volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time1", - "out_name": "ch4", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Cloud Fraction", - "comment": "Percentage cloud cover at CALIPSO standard heights.", - "dimensions": "longitude latitude alt40 time1", - "out_name": "clcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clcalipso2": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Cloud Fraction Undetected by CloudSat", - "comment": "Clouds detected by CALIPSO but below the detectability threshold of CloudSat", - "dimensions": "longitude latitude alt40 time1", - "out_name": "clcalipso2", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clhcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "CALIPSO High Level Cloud Fraction", - "comment": "Percentage cloud cover in layer centred on 220hPa", - "dimensions": "longitude latitude time1 p220", - "out_name": "clhcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cllcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Percentage Low Level Cloud", - "comment": "Percentage cloud cover in layer centred on 840hPa", - "dimensions": "longitude latitude time1 p840", - "out_name": "cllcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clmcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Mid Level Cloud Fraction", - "comment": "Percentage cloud cover in layer centred on 560hPa", - "dimensions": "longitude latitude time1 p560", - "out_name": "clmcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clmisr": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Percentage Cloud Cover as Calculated by the MISR Simulator", - "comment": "Cloud percentage in spectral bands and layers as observed by the Multi-angle Imaging SpectroRadiometer (MISR) instrument.", - "dimensions": "longitude latitude alt16 tau time1", - "out_name": "clmisr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cltcalipso": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction", - "units": "%", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Total Cloud Fraction", - "comment": "", - "dimensions": "longitude latitude time1", - "out_name": "cltcalipso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co2": { - "modeling_realm": "atmos", - "standard_name": "mole_fraction_of_carbon_dioxide_in_air", - "units": "mol mol-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "CO2 volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time1", - "out_name": "co2", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "longitude latitude alevel time1", - "out_name": "hus", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus7h": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "longitude latitude plev7h time1", - "out_name": "hus", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "jpdftaureicemodis": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "MODIS Optical Thickness-Particle Size joint distribution, ice", - "comment": "", - "dimensions": "longitude latitude plev7c effectRadIc tau time1", - "out_name": "jpdftaureicemodis", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "jpdftaureliqmodis": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "MODIS Optical Thickness-Particle Size joint distribution, liquid", - "comment": "", - "dimensions": "longitude latitude plev7c effectRadLi tau time1", - "out_name": "jpdftaureliqmodis", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "n2o": { - "modeling_realm": "atmos", - "standard_name": "mole_fraction_of_nitrous_oxide_in_air", - "units": "mol mol-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "N2O volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time1", - "out_name": "n2o", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o3": { - "modeling_realm": "atmos", - "standard_name": "mole_fraction_of_ozone_in_air", - "units": "mol mol-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Ozone volume mixing ratio", - "comment": "", - "dimensions": "longitude latitude alevel time1", - "out_name": "o3", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "parasolRefl": { - "modeling_realm": "atmos", - "standard_name": "toa_bidirectional_reflectance", - "units": "1.0", - "cell_methods": "area: mean (global) time: point", - "cell_measures": "area: areacella", - "long_name": "PARASOL Reflectance", - "comment": "Simulated reflectance from PARASOL as seen at the top of the atmosphere for 5 solar zenith angles. Valid only over ocean and for one viewing direction (viewing zenith angle of 30 degrees and relative azimuth angle 320 degrees).", - "dimensions": "longitude latitude sza5 time1", - "out_name": "parasolRefl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ps": { - "modeling_realm": "atmos", - "standard_name": "surface_air_pressure", - "units": "Pa", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Pressure", - "comment": "surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", - "dimensions": "longitude latitude time1", - "out_name": "ps", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "psl": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_sea_level", - "units": "Pa", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Sea Level Pressure", - "comment": "Sea Level Pressure", - "dimensions": "longitude latitude time1", - "out_name": "psl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdcs": { - "modeling_realm": "atmos", - "standard_name": "downwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Downwelling Clear-Sky Shortwave Radiation", - "comment": "Downwelling clear-sky shortwave radiation (includes the fluxes at the surface and top-of-atmosphere)", - "dimensions": "longitude latitude alevel time1", - "out_name": "rsdcs", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdcsaf": { - "modeling_realm": "atmos", - "standard_name": "downwelling_shortwave_flux_assuming_clean_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Downwelling Clean-Clear-Sky Shortwave Radiation at each level", - "comment": "Calculated in the absence of aerosols and clouds (following Ghan). This requires a double-call in the radiation code with precisely the same meteorology.", - "dimensions": "longitude latitude alevel time1", - "out_name": "rsdcsaf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdcsafbnd": { - "modeling_realm": "atmos", - "standard_name": "band_downwelling_shortwave_flux_assuming_clean_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Downwelling Clear-Sky, Aerosol-Free, Shortwave Radiation in Bands", - "comment": "Calculated in the absence of aerosols and clouds (following Ghan). This requires a double-call in the radiation code with precisely the same meteorology.", - "dimensions": "longitude latitude alevel spectband time1", - "out_name": "rsdcsafbnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdcsbnd": { - "modeling_realm": "atmos", - "standard_name": "band_downwelling_shortwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Downwelling Clear-Sky Shortwave Radiation at each level for each band", - "comment": "Calculated with aerosols but without clouds. This is a standard clear-sky calculation", - "dimensions": "longitude latitude alevel spectband time1", - "out_name": "rsdcsbnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdscsaf": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_shortwave_flux_in_air_assuming_clean_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Downwelling Clear-Sky, Aerosol-Free Shortwave Radiation", - "comment": "Calculated in the absence of aerosols and clouds.", - "dimensions": "longitude latitude time1", - "out_name": "rsdscsaf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdscsafbnd": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_shortwave_flux_in_air_assuming_clean_clear_sky", - "units": "W m-2", - "cell_methods": "time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Downwelling Clear-Sky, Aerosol-Free Shortwave Radiation in Bands", - "comment": "Calculated in the absence of aerosols and clouds, following Ghan (2013, ACP). This requires a double-call in the radiation code with precisely the same meteorology.", - "dimensions": "longitude latitude spectband time1", - "out_name": "rsdscsafbnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdscsbnd": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Downwelling Clear-Sky Shortwave Radiation for each band", - "comment": "Calculated with aerosols but without clouds. This is a standard clear-sky calculation", - "dimensions": "longitude latitude spectband time1", - "out_name": "rsdscsbnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsucs": { - "modeling_realm": "atmos", - "standard_name": "upwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Upwelling Clear-Sky Shortwave Radiation", - "comment": "Upwelling clear-sky shortwave radiation (includes the fluxes at the surface and TOA)", - "dimensions": "longitude latitude alevel time1", - "out_name": "rsucs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsucsaf": { - "modeling_realm": "atmos", - "standard_name": "upwelling_shortwave_flux_assuming_clean_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Upwelling Clean-Clear-Sky Shortwave Radiation at each level", - "comment": "Calculated in the absence of aerosols and clouds (following Ghan). This requires a double-call in the radiation code with precisely the same meteorology.", - "dimensions": "longitude latitude alevel time1", - "out_name": "rsucsaf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsucsafbnd": { - "modeling_realm": "atmos", - "standard_name": "band_upwelling_shortwave_flux_assuming_clean_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Upwelling Clear-Sky, Aerosol-Free Shortwave Radiation in Bands", - "comment": "Calculated in the absence of aerosols and clouds (following Ghan). This requires a double-call in the radiation code with precisely the same meteorology.", - "dimensions": "longitude latitude alevel spectband time1", - "out_name": "rsucsafbnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsucsbnd": { - "modeling_realm": "atmos", - "standard_name": "band_upwelling_shortwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Upwelling Clear-Sky Shortwave Radiation at each level for each band", - "comment": "Calculated with aerosols but without clouds. This is a standard clear-sky calculation", - "dimensions": "longitude latitude alevel spectband time1", - "out_name": "rsucsbnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsuscs": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Upwelling Clear-Sky Shortwave Radiation", - "comment": "Surface Upwelling Clear-sky Shortwave Radiation", - "dimensions": "longitude latitude time1", - "out_name": "rsuscs", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsuscsaf": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_shortwave_flux_in_air_assuming_clean_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Upwelling Clean Clear-Sky Shortwave Radiation", - "comment": "Surface Upwelling Clear-sky, Aerosol Free Shortwave Radiation", - "dimensions": "longitude latitude time1", - "out_name": "rsuscsaf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsuscsafbnd": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_shortwave_flux_in_air_assuming_clean_clear_sky", - "units": "W m-2", - "cell_methods": "time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Upwelling Clear-Sky, Aerosol-Free Shortwave Radiation in Bands", - "comment": "Calculated in the absence of aerosols and clouds, following Ghan (ACP, 2013). This requires a double-call in the radiation code with precisely the same meteorology.", - "dimensions": "longitude latitude spectband time1", - "out_name": "rsuscsafbnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsuscsbnd": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Upwelling Clear-Sky Shortwave Radiation for each band", - "comment": "Calculated with aerosols but without clouds. This is a standard clear-sky calculation", - "dimensions": "longitude latitude spectband time1", - "out_name": "rsuscsbnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsutcsaf": { - "modeling_realm": "aerosol", - "standard_name": "toa_outgoing_shortwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "toa outgoing clear-sky shortwave radiation", - "comment": "Flux corresponding to rsutcs resulting fom aerosol-free call to radiation, following Ghan (ACP, 2013)", - "dimensions": "longitude latitude time1", - "out_name": "rsutcsaf", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsutcsafbnd": { - "modeling_realm": "atmos", - "standard_name": "band_toa_outgoing_shortwave_flux_assuming_clean_clear_sky", - "units": "W m-2", - "cell_methods": "time: point", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Clear-Sky, Aerosol-Free Shortwave Radiation in Bands", - "comment": "Calculated in the absence of aerosols and clouds, following Ghan (2013, ACP). This requires a double-call in the radiation code with precisely the same meteorology.", - "dimensions": "longitude latitude spectband time1", - "out_name": "rsutcsafbnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsutcsbnd": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_shortwave_flux_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "time: point", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Clear-Sky Shortwave Radiation for each band", - "comment": "Calculated with aerosols but without clouds. This is a standard clear-sky calculation", - "dimensions": "longitude latitude spectband time1", - "out_name": "rsutcsbnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "solbnd": { - "modeling_realm": "atmos", - "standard_name": "band_solar_insolation", - "units": "W m-2", - "cell_methods": "time: point", - "cell_measures": "area: areacella", - "long_name": "Top-of-Atmosphere Solar Insolation for each band", - "comment": "", - "dimensions": "longitude latitude spectband time1", - "out_name": "solbnd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sza": { - "modeling_realm": "atmos", - "standard_name": "solar_zenith_angle", - "units": "degree", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Solar Zenith Angle", - "comment": "The angle between the line of sight to the sun and the local vertical", - "dimensions": "longitude latitude time1", - "out_name": "sza", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta7h": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "longitude latitude plev7h time1", - "out_name": "ta", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ts": { - "modeling_realm": "atmos", - "standard_name": "surface_temperature", - "units": "K", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Temperature", - "comment": "Temperature of the lower boundary of the atmosphere", - "dimensions": "longitude latitude time1", - "out_name": "ts", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua7h": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "longitude latitude plev7h time1", - "out_name": "ua", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua850": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "time: point", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "longitude latitude time1 p850", - "out_name": "ua", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va7h": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "longitude latitude plev7h time1", - "out_name": "va", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va850": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "time: point", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "longitude latitude time1 p850", - "out_name": "va", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wap7h": { - "modeling_realm": "atmos", - "standard_name": "lagrangian_tendency_of_air_pressure", - "units": "Pa s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "omega (=dp/dt)", - "comment": "Omega (vertical velocity in pressure coordinates, positive downwards)", - "dimensions": "longitude latitude plev7h time1", - "out_name": "wap", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_E6hrZ.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_E6hrZ.json deleted file mode 100644 index e7eaf5890e..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_E6hrZ.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table E6hrZ", - "realm": "atmos", - "frequency": "6hr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "0.250000", - "generic_levels": "alevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "ps": { - "modeling_realm": "atmos", - "standard_name": "surface_air_pressure", - "units": "Pa", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Pressure", - "comment": "surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", - "dimensions": "latitude time", - "out_name": "ps", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zmlwaero": { - "modeling_realm": "atmos", - "standard_name": "longwave_heating_rate_due_to_volcanic_aerosols", - "units": "K s-1", - "cell_methods": "longitude: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Zonal mean longwave heating rate due to volcanic aerosols", - "comment": "longwave heating rate due to volcanic aerosols to be diagnosed through double radiation call, zonal average values required", - "dimensions": "latitude alevel time1", - "out_name": "zmlwaero", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zmswaero": { - "modeling_realm": "atmos", - "standard_name": "shortwave_heating_rate_due_to_volcanic_aerosols", - "units": "K s-1", - "cell_methods": "longitude: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Zonal mean shortwave heating rate due to volcanic aerosols", - "comment": "shortwave heating rate due to volcanic aerosols to be diagnosed through double radiation call, zonal average values required", - "dimensions": "latitude alevel time1", - "out_name": "zmswaero", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Eday.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Eday.json deleted file mode 100644 index fa7cc11c81..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Eday.json +++ /dev/null @@ -1,2364 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table Eday", - "realm": "aerosol", - "frequency": "day", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "1.00000", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "agesno": { - "modeling_realm": "landIce land", - "standard_name": "age_of_surface_snow", - "units": "day", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow Age", - "comment": "Age of Snow (when computing the time-mean here, the time samples, weighted by the mass of snow on the land portion of the grid cell, are accumulated and then divided by the sum of the weights. Reported as missing data in regions free of snow on land.", - "dimensions": "longitude latitude time", - "out_name": "agesno", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "albc": { - "modeling_realm": "land", - "standard_name": "canopy_albedo", - "units": "1.0", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Canopy Albedo", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "albc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "albs": { - "modeling_realm": "aerosol", - "standard_name": "surface_albedo", - "units": "1.0", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "planetary albedo", - "comment": "Grid cell average albedo for all wavelengths.", - "dimensions": "longitude latitude time", - "out_name": "albs", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "albsn": { - "modeling_realm": "land", - "standard_name": "snow_and_ice_albedo", - "units": "1.0", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow Albedo", - "comment": "Albedo of the snow-covered surface, averaged over the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "albsn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "aod550volso4": { - "modeling_realm": "atmos", - "standard_name": "aerosol_optical_depth_due_to_stratospheric_volcanic_aerosols", - "units": "1e-09", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Aerosol optical depth at 550 nm due to stratospheric volcanic aerosols", - "comment": "aerosol optical depth at 550 nm due to stratospheric volcanic aerosols", - "dimensions": "longitude latitude time", - "out_name": "aod550volso4", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ares": { - "modeling_realm": "land", - "standard_name": "aerodynamic_resistance", - "units": "s m-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Aerodynamic resistance", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "ares", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ccldncl": { - "modeling_realm": "atmos", - "standard_name": "number_concentration_of_convective_cloud_liquid_water_particles_in_air_at_liquid_water_cloud_top", - "units": "m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Cloud Droplet Number Concentration of Convective Cloud Tops", - "comment": "Droplets are liquid only. Report concentration 'as seen from space' over convective liquid cloudy portion of grid cell. This is the value from uppermost model layer with liquid cloud or, if available, it is better to sum over all liquid cloud tops, no matter where they occur, as long as they are seen from the top of the atmosphere. Weight by total liquid cloud top fraction of (as seen from TOA) each time sample when computing monthly mean.", - "dimensions": "longitude latitude time", - "out_name": "ccldncl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cldnci": { - "modeling_realm": "atmos", - "standard_name": "number_concentration_of_ice_crystals_in_air_at_ice_cloud_top", - "units": "m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Ice Crystal Number Concentration of Cloud Tops", - "comment": "Concentration 'as seen from space' over ice-cloud portion of grid cell. This is the value from uppermost model layer with ice cloud or, if available, it is the sum over all ice cloud tops, no matter where they occur, as long as they are seen from the top of the atmosphere. Weight by total ice cloud top fraction (as seen from TOA) of each time sample when computing monthly mean.", - "dimensions": "longitude latitude time", - "out_name": "cldnci", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cldnvi": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_number_content_of_cloud_droplets", - "units": "m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Column Integrated Cloud Droplet Number", - "comment": "Droplets are liquid only. Values are weighted by liquid cloud fraction in each layer when vertically integrating, and for monthly means the samples are weighted by total liquid cloud fraction (as seen from TOA).", - "dimensions": "longitude latitude time", - "out_name": "cldnvi", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clivic": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_convective_cloud_ice_content", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Convective Ice Water Path", - "comment": "calculate mass of convective ice water in the column divided by the area of the column (not just the area of the cloudy portion of the column). This includes precipitating frozen hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude time", - "out_name": "clivic", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clt": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction", - "units": "%", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Cloud Fraction", - "comment": "Total cloud area fraction for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes both large-scale and convective cloud.", - "dimensions": "longitude latitude time", - "out_name": "clt", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clwvic": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_convective_cloud_condensed_water_content", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Convective Condensed Water Path", - "comment": "calculate mass of convective condensed (liquid + ice) water in the column divided by the area of the column (not just the area of the cloudy portion of the column). This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude time", - "out_name": "clwvic", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cnc": { - "modeling_realm": "land", - "standard_name": "vegetation_area_fraction", - "units": "%", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Canopy covered area percentage", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cnc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "concbb": { - "modeling_realm": "atmos", - "standard_name": "mass_concentration_of_biomass_burning_dry_aerosol_in_air", - "units": "kg m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Concentration of Biomass Burning Aerosol", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "concbb", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "conccmcn": { - "modeling_realm": "atmos", - "standard_name": "number_concentration_of_coarse_mode_ambient_aerosol_in_air", - "units": "m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Number Concentration Coarse Mode Aerosol", - "comment": "includes all particles with diameter larger than 1 micron", - "dimensions": "longitude latitude time", - "out_name": "conccmcn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "conccn": { - "modeling_realm": "atmos", - "standard_name": "number_concentration_of_ambient_aerosol_in_air", - "units": "m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Aerosol Number Concentration", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "conccn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "concnmcn": { - "modeling_realm": "atmos", - "standard_name": "number_concentration_of_nucleation_mode_ambient_aerosol_in_air", - "units": "m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Number Concentration of Nucleation Mode Aerosol", - "comment": "includes all particles with diameter smaller than 3 nm", - "dimensions": "longitude latitude time", - "out_name": "concnmcn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "concpoa": { - "modeling_realm": "atmos", - "standard_name": "mass_concentration_of_primary_particulate_organic_matter_dry_aerosol_in_air", - "units": "kg m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Concentration of Dry Aerosol Primary Organic Matter", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "concpoa", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cw": { - "modeling_realm": "land", - "standard_name": "canopy_water_amount", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Canopy Water Storage", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dcw": { - "modeling_realm": "land", - "standard_name": "change_over_time_in_canopy_water_amount", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Change in Interception Storage", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "dcw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dfr": { - "modeling_realm": "land", - "standard_name": "depth_of_frozen_soil", - "units": "m", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Frozen Soil Depth", - "comment": "Depth from surface to the first zero degree isotherm. Above this isotherm T < 0o, and below this line T > 0o.", - "dimensions": "longitude latitude time", - "out_name": "dfr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dgw": { - "modeling_realm": "land", - "standard_name": "change_over_time_in_groundwater", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Change in Groundwater", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "dgw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dmlt": { - "modeling_realm": "land", - "standard_name": "depth_of_subsurface_melting", - "units": "m", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Depth to soil thaw", - "comment": "Depth from surface to the zero degree isotherm. Above this isotherm T > 0o, and below this line T < 0o.", - "dimensions": "longitude latitude time", - "out_name": "dmlt", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "drivw": { - "modeling_realm": "land", - "standard_name": "change_over_time_in_river_water_amount", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Change in River Storage", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "drivw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dslw": { - "modeling_realm": "land", - "standard_name": "change_over_time_in_water_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Change in soil moisture", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "dslw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dsn": { - "modeling_realm": "land", - "standard_name": "change_over_time_in_surface_snow_and_ice_amount", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Change in snow water equivalent", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "dsn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dsw": { - "modeling_realm": "land", - "standard_name": "change_over_time_in_surface_water_amount", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Change in Surface Water Storage", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "dsw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dtes": { - "modeling_realm": "land", - "standard_name": "change_over_time_in_thermal_energy_content_of_surface", - "units": "J m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Change in surface heat storage", - "comment": "Change in heat storage over the soil layer and the vegetation for which the energy balance is calculated, accumulated over the sampling time interval.", - "dimensions": "longitude latitude time", - "out_name": "dtes", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dtesn": { - "modeling_realm": "land", - "standard_name": "change_over_time_in_thermal_energy_content_of_surface_snow_and_ice", - "units": "J m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Change in snow/ice cold content", - "comment": "Change in cold content over the snow layer for which the energy balance is calculated, accumulated over the sampling time interval. This should also include the energy contained in the liquid water in the snow pack.", - "dimensions": "longitude latitude time", - "out_name": "dtesn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ec": { - "modeling_realm": "land", - "standard_name": "liquid_water_evaporation_flux_from_canopy", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Interception evaporation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "ec", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ec550aer": { - "modeling_realm": "aerosol", - "standard_name": "volume_extinction_coefficient_in_air_due_to_ambient_aerosol_particles", - "units": "m-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Aerosol extinction coefficient", - "comment": "Aerosol Extinction @550nm", - "dimensions": "longitude latitude time", - "out_name": "ec550aer", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "eow": { - "modeling_realm": "land", - "standard_name": "liquid_water_evaporation_flux_from_open_water", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Open Water Evaporation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "eow", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "es": { - "modeling_realm": "land", - "standard_name": "liquid_water_evaporation_flux_from_soil", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Bare soil evaporation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "es", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "esn": { - "modeling_realm": "land", - "standard_name": "liquid_water_evaporation_flux_from_surface_snow", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow Evaporation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "esn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "et": { - "modeling_realm": "land", - "standard_name": "surface_evapotranspiration", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Evapotranspiration", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "et", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfdsl": { - "modeling_realm": "atmos", - "standard_name": "surface_downward_heat_flux_in_air", - "units": "W m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Downward Heat Flux at Land Surface", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "hfdsl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfdsn": { - "modeling_realm": "landIce land", - "standard_name": "surface_downward_heat_flux_in_snow", - "units": "W m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Downward Heat Flux into Snow Where Land over Land", - "comment": "the net downward heat flux from the atmosphere into the snow that lies on land divided by the land area in the grid cell; reported as 0.0 for snow-free land regions or where the land fraction is 0.", - "dimensions": "longitude latitude time", - "out_name": "hfdsn", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfdsnb": { - "modeling_realm": "atmos", - "standard_name": "missing", - "units": "W m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Downward heat flux at snow base", - "comment": "Heat flux from snow into the ice or land under the snow.", - "dimensions": "longitude latitude time", - "out_name": "hfdsnb", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfls": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_latent_heat_flux", - "units": "W m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upward Latent Heat Flux", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "hfls", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfmlt": { - "modeling_realm": "atmos", - "standard_name": "surface_snow_and_ice_melt_heat_flux", - "units": "W m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Energy of fusion", - "comment": "Energy consumed or released during liquid/solid phase changes.", - "dimensions": "longitude latitude time", - "out_name": "hfmlt", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfrs": { - "modeling_realm": "atmos", - "standard_name": "temperature_flux_due_to_rainfall_expressed_as_heat_flux_onto_snow_and_ice", - "units": "W m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Heat transferred to snowpack by rainfall", - "comment": "Heat transferred to a snow cover by rain..", - "dimensions": "longitude latitude time", - "out_name": "hfrs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfsbl": { - "modeling_realm": "atmos", - "standard_name": "surface_snow_and_ice_sublimation_heat_flux", - "units": "W m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Energy of sublimation", - "comment": "Energy consumed or released during vapor/solid phase changes.", - "dimensions": "longitude latitude time", - "out_name": "hfsbl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfss": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_sensible_heat_flux", - "units": "W m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upward Sensible Heat Flux", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "hfss", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hursminCrop": { - "modeling_realm": "atmos", - "standard_name": "relative_humidity", - "units": "%", - "cell_methods": "area: mean time: minimum", - "cell_measures": "area: areacella", - "long_name": "Daily Minimum Near-Surface Relative Humidity over Crop Tile", - "comment": "minimum near-surface (usually, 2 meter) relative humidity (add cell_method attribute 'time: min')", - "dimensions": "longitude latitude time height2m", - "out_name": "hursminCrop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "longitude latitude plev19 time", - "out_name": "hus", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus850": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "longitude latitude time p850", - "out_name": "hus850", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "inc": { - "modeling_realm": "atmos", - "standard_name": "number_concentration_of_ice_crystals_in_air", - "units": "m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Ice Crystal Number Concentration", - "comment": "Ice Crystal number concentration in ice clouds", - "dimensions": "longitude latitude time", - "out_name": "inc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "jpdftaureicemodis": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "MODIS Optical Thickness-Particle Size joint distribution, ice", - "comment": "", - "dimensions": "longitude latitude plev7c effectRadLi tau time", - "out_name": "jpdftaureicemodis", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "jpdftaureliqmodis": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "MODIS Optical Thickness-Particle Size joint distribution, liquid", - "comment": "", - "dimensions": "longitude latitude plev7c effectRadLi tau time", - "out_name": "jpdftaureliqmodis", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lai": { - "modeling_realm": "land", - "standard_name": "leaf_area_index", - "units": "1.0", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Leaf Area Index", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "lai", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "loadbc": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_mass_content_of_black_carbon_dry_aerosol", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Load of Black Carbon Aerosol", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "loadbc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "loaddust": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_mass_content_of_dust_dry_aerosol", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Load of Dust", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "loaddust", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "loadnh4": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_mass_content_of_ammonium_dry_aerosol", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Load of NH4", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "loadnh4", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "loadno3": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_mass_content_of_nitrate_dry_aerosol", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Load of NO3", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "loadno3", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "loadoa": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Load of Dry Aerosol Organic Matter", - "comment": "atmosphere dry organic content: This is the vertically integrated sum of atmosphere_primary_organic_content and atmosphere_secondary_organic_content (see next two table entries).", - "dimensions": "longitude latitude time", - "out_name": "loadoa", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "loadpoa": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Load of Dry Aerosol Primary Organic Matter", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "loadpoa", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "loadso4": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_mass_content_of_sulfate_dry_aerosol", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Load of SO4", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "loadso4", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "loadsoa": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Load of Dry Aerosol Secondary Organic Matter", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "loadsoa", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "loadss": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_mass_content_of_seasalt_dry_aerosol", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Load of Seasalt", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "loadss", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lwsnl": { - "modeling_realm": "landIce land", - "standard_name": "liquid_water_content_of_snow_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Liquid Water Content of Snow Layer", - "comment": "The total mass of liquid water contained interstitially within the whole depth of the snow layer of the land portion of a grid cell divided by the area of the land portion of the cell.", - "dimensions": "longitude latitude time", - "out_name": "lwsnl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mlotst": { - "modeling_realm": "ocean", - "standard_name": "ocean_mixed_layer_thickness_defined_by_sigma_t", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Ocean Mixed Layer Thickness Defined by Sigma T", - "comment": "Sigma T is potential density referenced to ocean surface.", - "dimensions": "longitude latitude time", - "out_name": "mlotst", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrfsofr": { - "modeling_realm": "land", - "standard_name": "mass_fraction_of_frozen_water_in_soil_moisture", - "units": "1.0", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Average layer fraction of frozen moisture", - "comment": "Fraction of soil moisture mass in the solid phase in each user-defined soil layer (3D variable)", - "dimensions": "longitude latitude sdepth time", - "out_name": "mrfsofr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrlqso": { - "modeling_realm": "land", - "standard_name": "mass_fraction_of_unfrozen_water_in_soil_moisture", - "units": "1.0", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Average layer fraction of liquid moisture", - "comment": "Fraction of soil moisture mass in the liquid phase in each user-defined soil layer (3D variable)", - "dimensions": "longitude latitude sdepth time", - "out_name": "mrlqso", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrlsl": { - "modeling_realm": "land", - "standard_name": "moisture_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Water Content of Soil Layer", - "comment": "The mass of water in all phases, including ice, in soil layers. Report as missing for grid cells with no land.", - "dimensions": "longitude latitude sdepth time", - "out_name": "mrlsl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrrob": { - "modeling_realm": "land", - "standard_name": "subsurface_runoff_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Subsurface runoff", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "mrrob", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrros": { - "modeling_realm": "land", - "standard_name": "surface_runoff_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Runoff", - "comment": "The total surface run off leaving the land portion of the grid cell (excluding drainage through the base of the soil model).", - "dimensions": "longitude latitude time", - "out_name": "mrros", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrsfl": { - "modeling_realm": "land", - "standard_name": "frozen_moisture_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Frozen water content of soil layer", - "comment": "in each soil layer, the mass of water in ice phase. Reported as 'missing' for grid cells occupied entirely by 'sea'", - "dimensions": "longitude latitude sdepth time", - "out_name": "mrsfl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrsll": { - "modeling_realm": "land", - "standard_name": "liquid_moisture_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Liquid water content of soil layer", - "comment": "in each soil layer, the mass of water in liquid phase. Reported as 'missing' for grid cells occupied entirely by 'sea'", - "dimensions": "longitude latitude sdepth time", - "out_name": "mrsll", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrsol": { - "modeling_realm": "land", - "standard_name": "moisture_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total water content of soil layer", - "comment": "in each soil layer, the mass of water in all phases, including ice. Reported as 'missing' for grid cells occupied entirely by 'sea'", - "dimensions": "longitude latitude sdepth time", - "out_name": "mrsol", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrsow": { - "modeling_realm": "land", - "standard_name": "volume_fraction_of_condensed_water_in_soil_at_field_capacity", - "units": "1.0", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Soil Wetness", - "comment": "Vertically integrated soil moisture divided by maximum allowable soil moisture above wilting point.", - "dimensions": "longitude latitude time", - "out_name": "mrsow", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nudgincsm": { - "modeling_realm": "land", - "standard_name": "nudging_increment_in_water_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Nudging Increment of Water in Soil Mositure", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "nudgincsm", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nudgincswe": { - "modeling_realm": "land", - "standard_name": "nudging_increment_in_surface_snow_and_ice_amount", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Nudging Increment of Water in Snow", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "nudgincswe", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "parasolRefl": { - "modeling_realm": "atmos", - "standard_name": "toa_bidirectional_reflectance", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacella", - "long_name": "PARASOL Reflectance", - "comment": "Simulated reflectance from PARASOL as seen at the top of the atmosphere for 5 solar zenith angles. Valid only over ocean and for one viewing direction (viewing zenith angle of 30 degrees and relative azimuth angle 320 degrees).", - "dimensions": "longitude latitude sza5 time", - "out_name": "parasolRefl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pflw": { - "modeling_realm": "landIce land", - "standard_name": "liquid_water_content_of_permafrost_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Liquid Water Content of Permafrost Layer", - "comment": "*where land over land*, i.e., this is the total mass of liquid water contained within the permafrost layer within the land portion of a grid cell divided by the area of the land portion of the cell.", - "dimensions": "longitude latitude time", - "out_name": "pflw", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "potet": { - "modeling_realm": "land", - "standard_name": "water_potential_evapotranspiration_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Potential Evapotranspiration", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "potet", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prCrop": { - "modeling_realm": "atmos", - "standard_name": "precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Precipitation over Crop Tile", - "comment": "includes both liquid and solid phases", - "dimensions": "longitude latitude time", - "out_name": "prCrop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prhmax": { - "modeling_realm": "atmos", - "standard_name": "precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Maximum Hourly Precipitation Rate", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "prhmax", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prra": { - "modeling_realm": "atmos", - "standard_name": "rainfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Rainfall Flux where Ice Free Ocean over Sea", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "prra", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prrc": { - "modeling_realm": "atmos", - "standard_name": "convective_rainfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Convective Rainfall rate", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "prrc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prrsn": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_rainfall_onto_snow", - "units": "1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Fraction of rainfall on snow.", - "comment": "The fraction of the grid averaged rainfall which falls on the snow pack", - "dimensions": "longitude latitude time", - "out_name": "prrsn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prsnc": { - "modeling_realm": "atmos", - "standard_name": "convective_snowfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Convective Snowfall Flux", - "comment": "convective precipitation of all forms of water in the solid phase.", - "dimensions": "longitude latitude time", - "out_name": "prsnc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prsnsn": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_snowfall_onto_snow", - "units": "1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Fraction of snowfall on snow.", - "comment": "The fraction of the snowfall which falls on the snow pack", - "dimensions": "longitude latitude time", - "out_name": "prsnsn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prveg": { - "modeling_realm": "land", - "standard_name": "precipitation_flux_onto_canopy", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Precipitation onto Canopy", - "comment": "The precipitation flux that is intercepted by the vegetation canopy (if present in model) before reaching the ground.", - "dimensions": "longitude latitude time", - "out_name": "prveg", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prw": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_water_vapor_content", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Water Vapor Path", - "comment": "vertically integrated through the atmospheric column", - "dimensions": "longitude latitude time", - "out_name": "prw", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "qgwr": { - "modeling_realm": "land", - "standard_name": "water_flux_from_soil_layer_to_groundwater", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Groundwater recharge from soil layer", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "qgwr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffcclwtop": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_convective_cloud_liquid_water_particle_at_liquid_water_cloud_top", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Cloud-Top Effective Droplet Radius In Convective Cloud", - "comment": "Droplets are liquid only. This is the effective radius 'as seen from space' over convective liquid cloudy portion of grid cell. This is the value from uppermost model layer with liquid cloud or, if available, or for some models it is the sum over all liquid cloud tops, no matter where they occur, as long as they are seen from the top of the atmosphere. Reported values are weighted by total liquid cloud top fraction of (as seen from TOA) each time sample when computing monthly mean.daily data, separated to large-scale clouds, convective clouds. If any of the cloud is from more than one process (i.e. shallow convection), please provide them separately.", - "dimensions": "longitude latitude time", - "out_name": "reffcclwtop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffclwc": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_convective_cloud_liquid_water_particle", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Hydrometeor Effective Radius of Convective Cloud Liquid Water", - "comment": "Droplets are liquid. The effective radius is defined as the ratio of the third moment over the second moment of the particle size distribution and the time-mean should be calculated, weighting the individual samples by the cloudy fraction of the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "reffclwc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffclws": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_stratiform_cloud_liquid_water_particle", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Hydrometeor Effective Radius of Stratiform Cloud Liquid Water", - "comment": "Droplets are liquid. The effective radius is defined as the ratio of the third moment over the second moment of the particle size distribution and the time-mean should be calculated, weighting the individual samples by the cloudy fraction of the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "reffclws", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffsclwtop": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_stratiform_cloud_liquid_water_particle_at_liquid_water_cloud_top", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Cloud-Top Effective Droplet Radius In Stratiform Cloud", - "comment": "Droplets are liquid only. This is the effective radius 'as seen from space' over liquid stratiform cloudy portion of grid cell. This is the value from uppermost model layer with liquid cloud or, if available, or for some models it is the sum over all liquid cloud tops, no matter where they occur, as long as they are seen from the top of the atmosphere. Reported values are weighted by total liquid cloud top fraction of (as seen from TOA) each time sample when computing monthly mean.daily data, separated to large-scale clouds, convective clouds. If any of the cloud is from more than one process (i.e. shallow convection), please provide them separately.", - "dimensions": "longitude latitude time", - "out_name": "reffsclwtop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rivi": { - "modeling_realm": "land", - "standard_name": "water_flux_from_upstream", - "units": "m3 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "River Inflow", - "comment": "Inflow of River Water into Cell", - "dimensions": "longitude latitude time", - "out_name": "rivi", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rivo": { - "modeling_realm": "land", - "standard_name": "water_flux_to_downstream", - "units": "m3 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "River Discharge", - "comment": "Outflow of River Water from Cell", - "dimensions": "longitude latitude time", - "out_name": "rivo", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rls": { - "modeling_realm": "atmos", - "standard_name": "surface_net_downward_longwave_flux", - "units": "W m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Net Longwave Surface Radiation", - "comment": "Net longwave surface radiation", - "dimensions": "longitude latitude time", - "out_name": "rls", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdscsdiff": { - "modeling_realm": "atmos", - "standard_name": "surface_diffuse_downwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Diffuse Downwelling Clear Sky Shortwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rsdscsdiff", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdsdiff": { - "modeling_realm": "atmos", - "standard_name": "surface_diffuse_downwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Diffuse Downwelling Shortwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rsdsdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rss": { - "modeling_realm": "atmos", - "standard_name": "surface_net_downward_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Net Shortwave Surface Radiation", - "comment": "Net downward shortwave radiation at the surface", - "dimensions": "longitude latitude time", - "out_name": "rss", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rzwc": { - "modeling_realm": "land", - "standard_name": "water_content_of_root_zone", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Root zone soil moisture", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rzwc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sbl": { - "modeling_realm": "landIce", - "standard_name": "surface_snow_and_ice_sublimation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Snow and Ice Sublimation Flux", - "comment": "The snow and ice sublimation flux is the loss of snow and ice mass per unit area from the surface resulting from their direct conversion to water vapor that enters the atmosphere.", - "dimensions": "longitude latitude time", - "out_name": "sbl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "scldncl": { - "modeling_realm": "atmos", - "standard_name": "number_concentration_of_stratiform_cloud_liquid_water_particles_in_air_at_liquid_water_cloud_top", - "units": "m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Cloud Droplet Number Concentration of Stratiform Cloud Tops", - "comment": "Droplets are liquid only. Report concentration 'as seen from space' over stratiform liquid cloudy portion of grid cell. This is the value from uppermost model layer with liquid cloud or, if available, it is better to sum over all liquid cloud tops, no matter where they occur, as long as they are seen from the top of the atmosphere. Weight by total liquid cloud top fraction of (as seen from TOA) each time sample when computing monthly mean.", - "dimensions": "longitude latitude time", - "out_name": "scldncl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "slbnosn": { - "modeling_realm": "atmos", - "standard_name": "sublimation_amount_assuming_no_snow", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Sublimation of the snow free area", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "slbnosn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snd": { - "modeling_realm": "landIce land", - "standard_name": "surface_snow_thickness", - "units": "m", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow Depth", - "comment": "where land over land, this is computed as the mean thickness of snow in the land portion of the grid cell (averaging over the entire land portion, including the snow-free fraction). Reported as 0.0 where the land fraction is 0.", - "dimensions": "longitude latitude time", - "out_name": "snd", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snm": { - "modeling_realm": "landIce land", - "standard_name": "surface_snow_melt_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Snow Melt", - "comment": "The total surface snow melt rate on the land portion of the grid cell divided by the land area in the grid cell; report as zero for snow-free land regions and missing where there is no land.", - "dimensions": "longitude latitude time", - "out_name": "snm", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snmsl": { - "modeling_realm": "atmos", - "standard_name": "surface_snow_melt_flux_into_soil_layer", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Water flowing out of snowpack", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "snmsl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snrefr": { - "modeling_realm": "atmos", - "standard_name": "surface_snow_and_ice_refreezing_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Re-freezing of water in the snow", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "snrefr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snwc": { - "modeling_realm": "atmos", - "standard_name": "canopy_snow_amount", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "SWE intercepted by the vegetation", - "comment": "Total water mass of the snowpack (liquid or frozen), averaged over a grid cell and interecepted by the canopy.", - "dimensions": "longitude latitude time", - "out_name": "snwc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sootsn": { - "modeling_realm": "landIce land", - "standard_name": "soot_content_of_surface_snow", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow Soot Content", - "comment": "the entire land portion of the grid cell is considered, with snow soot content set to 0.0 in regions free of snow.", - "dimensions": "longitude latitude time", - "out_name": "sootsn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sw": { - "modeling_realm": "land", - "standard_name": "surface_water_amount_assuming_no_snow", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Water Storage", - "comment": "Total liquid water storage, other than soil, snow or interception storage (i.e. lakes, river channel or depression storage).", - "dimensions": "longitude latitude time", - "out_name": "sw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "t20d": { - "modeling_realm": "ocean", - "standard_name": "depth_of_isosurface_of_sea_water_potential_temperature", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "20C isotherm depth", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "t20d", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "longitude latitude plev19 time", - "out_name": "ta", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta500": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Temperature on the 500 hPa surface", - "dimensions": "longitude latitude time p500", - "out_name": "ta500", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta850": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air temperature at 850hPa", - "dimensions": "longitude latitude time p850", - "out_name": "ta850", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tasmaxCrop": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: maximum", - "cell_measures": "area: areacella", - "long_name": "Daily Maximum Near-Surface Air Temperature over Crop Tile", - "comment": "maximum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: max')", - "dimensions": "longitude latitude time height2m", - "out_name": "tasmaxCrop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tasminCrop": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: minimum", - "cell_measures": "area: areacella", - "long_name": "Daily Minimum Near-Surface Air Temperature over Crop Tile", - "comment": "minimum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: min')", - "dimensions": "longitude latitude time height2m", - "out_name": "tasminCrop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tau": { - "modeling_realm": "atmos", - "standard_name": "surface_downward_stress", - "units": "N m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Momentum flux", - "comment": "module of the momentum lost by the atmosphere to the surface.", - "dimensions": "longitude latitude time", - "out_name": "tau", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tauu": { - "modeling_realm": "atmos", - "standard_name": "surface_downward_eastward_stress", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downward Eastward Wind Stress", - "comment": "Downward eastward wind stress at the surface", - "dimensions": "longitude latitude time", - "out_name": "tauu", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tauupbl": { - "modeling_realm": "atmos", - "standard_name": "surface_downward_eastward_stress_due_to_planetary_boundary_layer", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "eastward surface stress from planetary boundary layer scheme", - "comment": "The downward eastward stress associated with the models parameterization of the plantary boundary layer. (This request is related to a WGNE effort to understand how models parameterize the surface stresses.)", - "dimensions": "longitude latitude time", - "out_name": "tauupbl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tauv": { - "modeling_realm": "atmos", - "standard_name": "surface_downward_northward_stress", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downward Northward Wind Stress", - "comment": "Downward northward wind stress at the surface", - "dimensions": "longitude latitude time", - "out_name": "tauv", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tauvpbl": { - "modeling_realm": "atmos", - "standard_name": "surface_downward_northward_stress_due_to_planetary_boundary_layer", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "northward surface stress from planetary boundary layer scheme", - "comment": "The downward northward stress associated with the models parameterization of the plantary boundary layer. (This request is related to a WGNE effort to understand how models parameterize the surface stresses.)", - "dimensions": "longitude latitude time", - "out_name": "tauvpbl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tcs": { - "modeling_realm": "land", - "standard_name": "canopy_temperature", - "units": "K", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Vegetation Canopy Temperature", - "comment": "Vegetation temperature, averaged over all vegetation types", - "dimensions": "longitude latitude time", - "out_name": "tcs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tdps": { - "modeling_realm": "atmos", - "standard_name": "dew_point_temperature", - "units": "K", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "2m dewpoint temperature", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "tdps", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tgs": { - "modeling_realm": "land", - "standard_name": "surface_temperature", - "units": "K", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Temperature of bare soil", - "comment": "Surface bare soil temperature", - "dimensions": "longitude latitude time", - "out_name": "tgs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tpf": { - "modeling_realm": "landIce land", - "standard_name": "permafrost_layer_thickness", - "units": "m", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Permafrost Layer Thickness", - "comment": "The mean thickness of the permafrost layer in the land portion of the grid cell. Reported as zero in permafrost-free regions.", - "dimensions": "longitude latitude time", - "out_name": "tpf", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tr": { - "modeling_realm": "atmos", - "standard_name": "surface_temperature", - "units": "K", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Radiative Temperature", - "comment": "Effective radiative surface temperature, averaged over the grid cell", - "dimensions": "longitude latitude time", - "out_name": "tr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tran": { - "modeling_realm": "land", - "standard_name": "transpiration_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Transpiration", - "comment": "Transpiration (may include dew formation as a negative flux).", - "dimensions": "longitude latitude time", - "out_name": "tran", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ts": { - "modeling_realm": "atmos", - "standard_name": "surface_temperature", - "units": "K", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Temperature", - "comment": "Temperature of the lower boundary of the atmosphere", - "dimensions": "longitude latitude time", - "out_name": "ts", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tsl": { - "modeling_realm": "land", - "standard_name": "soil_temperature", - "units": "K", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Temperature of Soil", - "comment": "Temperature of each soil layer. Reported as missing for grid cells with no land.", - "dimensions": "longitude latitude sdepth time", - "out_name": "tsl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tsnl": { - "modeling_realm": "land", - "standard_name": "snow_temperature", - "units": "K", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Temperature profile in the snow", - "comment": "Temperature in the snow pack present in the grid-cell. 3D variable for multi-layer snow schemes.", - "dimensions": "longitude latitude time", - "out_name": "tsnl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tsns": { - "modeling_realm": "atmos", - "standard_name": "surface_temperature", - "units": "K", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow Surface Temperature", - "comment": "Temperature of the snow surface as it interacts with the atmosphere, averaged over a grid cell.", - "dimensions": "longitude latitude time", - "out_name": "tsns", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tws": { - "modeling_realm": "land", - "standard_name": "canopy_and_surface_and_subsurface_water_amount", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Terrestrial Water Storage", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "tws", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "longitude latitude plev19 time", - "out_name": "ua", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua200": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind at 200hPa", - "comment": "Zonal wind (positive eastwards) at 200hPa", - "dimensions": "longitude latitude time p200", - "out_name": "ua200", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua850": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind at 850 hPa", - "comment": "Zonal wind on the 850 hPa surface", - "dimensions": "longitude latitude time p850", - "out_name": "ua850", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "longitude latitude plev19 time", - "out_name": "va", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va200": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "Northward component of the wind", - "dimensions": "longitude latitude time p200", - "out_name": "va200", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va850": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "Northward component of the wind at 850hPa", - "dimensions": "longitude latitude time p850", - "out_name": "va850", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wap": { - "modeling_realm": "atmos", - "standard_name": "lagrangian_tendency_of_air_pressure", - "units": "Pa s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "omega (=dp/dt)", - "comment": "Omega (vertical velocity in pressure coordinates, positive downwards)", - "dimensions": "longitude latitude plev19 time", - "out_name": "wap", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wtd": { - "modeling_realm": "land", - "standard_name": "depth_of_soil_moisture_saturation", - "units": "m", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Water table depth", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "wtd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg": { - "modeling_realm": "atmos", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height", - "comment": "", - "dimensions": "longitude latitude plev19 time", - "out_name": "zg", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zmla": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_boundary_layer_thickness", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Height of Boundary Layer", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "zmla", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_EdayZ.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_EdayZ.json deleted file mode 100644 index 6b4a253394..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_EdayZ.json +++ /dev/null @@ -1,273 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table EdayZ", - "realm": "atmos", - "frequency": "day", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "1.00000", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "epfy": { - "modeling_realm": "atmos", - "standard_name": "northward_eliassen_palm_flux_in_air", - "units": "m3 s-2", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Component of the Eliassen-Palm Flux", - "comment": "Transformed Eulerian Mean Diagnostics Meridional component Fy of Eliassen-Palm (EP) flux (Fy, Fz) derived from 6hr or higher frequency fields (use daily fields or 12 hr fields if the 6 hr are not available). Please use the definitions given by equation 3.5.3a of Andrews, Holton and Leovy text book, but scaled by density to have units m3 s-2.", - "dimensions": "latitude plev39 time", - "out_name": "epfy", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "epfz": { - "modeling_realm": "atmos", - "standard_name": "upward_eliassen_palm_flux_in_air", - "units": "m3 s-2", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Upward Component of the Eliassen-Palm Flux", - "comment": "Transformed Eulerian Mean Diagnostics Meridional component Fz of the Eliassen-Palm (EP) flux (Fy, Fz) derived from 6hr or higher frequency fields (use daily fields or 12 hr fields if the 6 hr are not available). Please use the definitions given by equation 3.5.3b of Andrews, Holton and Leovy text book, but scaled by density to have units m3 s-2.", - "dimensions": "latitude plev39 time", - "out_name": "epfz", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "latitude plev19 time", - "out_name": "hus", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "psitem": { - "modeling_realm": "atmos", - "standard_name": "meridional_streamfunction_transformed_eulerian_mean", - "units": "kg s^-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Transformed Eulerian Mean mass stramfunction", - "comment": "Residual mass streamfunction, computed from vstar and integrated from the top of the atmosphere (on the native model grid). Reference: Andrews et al (1987): Middle Atmospheric Dynamics. Accademic Press.", - "dimensions": "latitude plev39 time", - "out_name": "psitem", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "latitude plev19 time", - "out_name": "ta", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "latitude plev39 time", - "out_name": "ua", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "utendepfd": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_eastward_wind_due_to_eliassen_palm_flux_divergence", - "units": "m s-2", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of eastward wind due to Eliassen-Palm Flux divergence", - "comment": "Tendency of the zonal mean zonal wind due to the divergence of the Eliassen-Palm flux.", - "dimensions": "latitude plev39 time", - "out_name": "utendepfd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "utendnogw": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_eastward_wind_due_to_nonorographic_gravity_wave_drag", - "units": "m s-2", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "u-tendency nonorographic gravity wave drag", - "comment": "Tendency of the eastward wind by parameterized nonorographic gravity waves.", - "dimensions": "latitude plev39 time", - "out_name": "utendnogw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "utendogw": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_eastward_wind_due_to_orographic_gravity_wave_drag", - "units": "m s-2", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "u-tendency orographic gravity wave drag", - "comment": "Tendency of the eastward wind by parameterized orographic gravity waves.", - "dimensions": "latitude plev39 time", - "out_name": "utendogw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "utendvtem": { - "modeling_realm": "atmos", - "standard_name": "u-tendency_by_wstar_advection", - "units": "m s-1 d-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Rendency of eastward wind due to TEM northward advection and Coriolis term", - "comment": "Tendency of zonally averaged eastward wind, by the residual upward wind advection (on the native model grid). Reference: Andrews et al (1987): Middle Atmospheric Dynamics. Accademic Press.", - "dimensions": "latitude plev39 time", - "out_name": "utendvtem", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "utendwtem": { - "modeling_realm": "atmos", - "standard_name": "u-tendency_by_vstar_advection", - "units": "m s-1 d-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Rendency of eastward wind due to TEM upward advection", - "comment": "Tendency of zonally averaged eastward wind, by the residual northward wind advection (on the native model grid). Reference: Andrews et al (1987): Middle Atmospheric Dynamics. Accademic Press.", - "dimensions": "latitude plev39 time", - "out_name": "utendwtem", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "latitude plev19 time", - "out_name": "va", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vtem": { - "modeling_realm": "atmos", - "standard_name": "northward_transformed_eulerian_mean_air_velocity", - "units": "m s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Transformed Eulerian Mean northward wind", - "comment": "Transformed Eulerian Mean Diagnostics v*, meridional component of the residual meridional circulation (v*, w*) derived from 6 hr or higher frequency data fields (use instantaneous daily fields or 12 hr fields if the 6 hr data are not available).", - "dimensions": "latitude plev39 time", - "out_name": "vtem", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wtem": { - "modeling_realm": "atmos", - "standard_name": "unset", - "units": "m s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Transformed Eulerian Mean upward wind", - "comment": "Transformed Eulerian Mean Diagnostics w*, meridional component of the residual meridional circulation (v*, w*) derived from 6 hr or higher frequency data fields (use instantaneous daily fields or 12 hr fields if the 6 hr data are not available). Scale height: 6950 m", - "dimensions": "latitude plev39 time", - "out_name": "wtem", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg": { - "modeling_realm": "atmos", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height", - "comment": "", - "dimensions": "latitude plev19 time", - "out_name": "zg", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Efx.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Efx.json deleted file mode 100644 index 7b41669e8b..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Efx.json +++ /dev/null @@ -1,290 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table Efx", - "realm": "land", - "frequency": "fx", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "0.00000", - "generic_levels": "alevel olevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "clayfrac": { - "modeling_realm": "atmos", - "standard_name": "missing", - "units": "1.0", - "cell_methods": "area: mean where land", - "cell_measures": "area: areacella", - "long_name": "Clay Fraction", - "comment": "", - "dimensions": "longitude latitude sdepth", - "out_name": "clayfrac", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fldcapacity": { - "modeling_realm": "land", - "standard_name": "missing", - "units": "%", - "cell_methods": "area: mean where land", - "cell_measures": "area: areacella", - "long_name": "Field Capacity", - "comment": "", - "dimensions": "longitude latitude sdepth", - "out_name": "fldcapacity", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ksat": { - "modeling_realm": "land", - "standard_name": "missing", - "units": "1e-6 m s-1", - "cell_methods": "area: mean where land", - "cell_measures": "area: areacella", - "long_name": "Saturated Hydraulic Conductivity", - "comment": "", - "dimensions": "longitude latitude sdepth", - "out_name": "ksat", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ps": { - "modeling_realm": "atmos", - "standard_name": "surface_air_pressure", - "units": "Pa", - "cell_methods": "area: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Pressure", - "comment": "surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", - "dimensions": "longitude latitude", - "out_name": "ps", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rld": { - "modeling_realm": "atmos", - "standard_name": "downwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: point", - "cell_measures": "", - "long_name": "Downwelling Longwave Radiation", - "comment": "Downwelling Longwave Radiation (includes the fluxes at the surface and TOA)", - "dimensions": "alevel spectband", - "out_name": "rld", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlu": { - "modeling_realm": "atmos", - "standard_name": "upwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: point", - "cell_measures": "", - "long_name": "Upwelling Longwave Radiation", - "comment": "Upwelling longwave radiation (includes the fluxes at the surface and TOA)", - "dimensions": "alevel spectband", - "out_name": "rlu", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rootdsl": { - "modeling_realm": "land", - "standard_name": "missing", - "units": "kg m-3", - "cell_methods": "area: mean where land", - "cell_measures": "area: areacella", - "long_name": "Root Distribution", - "comment": "", - "dimensions": "longitude latitude sdepth", - "out_name": "rootdsl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsd": { - "modeling_realm": "atmos", - "standard_name": "downwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: point", - "cell_measures": "", - "long_name": "Downwelling Shortwave Radiation", - "comment": "Downwelling shortwave radiation (includes the fluxes at the surface and top-of-atmosphere)", - "dimensions": "alevel spectband", - "out_name": "rsd", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsu": { - "modeling_realm": "atmos", - "standard_name": "upwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: point", - "cell_measures": "", - "long_name": "Upwelling Shortwave Radiation", - "comment": "Upwelling shortwave radiation (includes also the fluxes at the surface and top of atmosphere)", - "dimensions": "alevel spectband", - "out_name": "rsu", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sandfrac": { - "modeling_realm": "atmos", - "standard_name": "missing", - "units": "1.0", - "cell_methods": "area: mean where land", - "cell_measures": "area: areacella", - "long_name": "Sand Fraction", - "comment": "", - "dimensions": "longitude latitude sdepth", - "out_name": "sandfrac", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sftflf": { - "modeling_realm": "landIce", - "standard_name": "floating_ice_shelf_area_fraction", - "units": "%", - "cell_methods": "area: mean", - "cell_measures": "area: areacella", - "long_name": "Floating Ice Shelf Area Fraction", - "comment": "Fraction of grid cell covered by floating ice shelf, the component of the ice sheet that is flowing over seawater", - "dimensions": "longitude latitude typefis", - "out_name": "sftflf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sftgrf": { - "modeling_realm": "landIce", - "standard_name": "grounded_ice_sheet_area_fraction", - "units": "%", - "cell_methods": "area: mean", - "cell_measures": "area: areacella", - "long_name": "Grounded Ice Sheet Area Fraction", - "comment": "Fraction of grid cell covered by grounded ice sheet", - "dimensions": "longitude latitude typegis", - "out_name": "sftgrf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siltfrac": { - "modeling_realm": "atmos", - "standard_name": "missing", - "units": "1.0", - "cell_methods": "area: mean where land", - "cell_measures": "area: areacella", - "long_name": "Silt Fraction", - "comment": "", - "dimensions": "longitude latitude sdepth", - "out_name": "siltfrac", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "slthick": { - "modeling_realm": "land", - "standard_name": "missing", - "units": "m", - "cell_methods": "area: mean where land", - "cell_measures": "area: areacella", - "long_name": "Thickness of Soil Layers", - "comment": "", - "dimensions": "longitude latitude sdepth", - "out_name": "slthick", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vegHeight": { - "modeling_realm": "land", - "standard_name": "canopy_height", - "units": "m", - "cell_methods": "area: mean where land", - "cell_measures": "area: areacella", - "long_name": "canopy height", - "comment": "", - "dimensions": "longitude latitude", - "out_name": "vegHeight", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wilt": { - "modeling_realm": "land", - "standard_name": "missing", - "units": "%", - "cell_methods": "area: mean where land", - "cell_measures": "area: areacella", - "long_name": "Wilting Point", - "comment": "", - "dimensions": "longitude latitude sdepth", - "out_name": "wilt", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Emon.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Emon.json deleted file mode 100644 index 63b8168e25..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Emon.json +++ /dev/null @@ -1,6087 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table Emon", - "realm": "atmos", - "frequency": "mon", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "30.00000", - "generic_levels": "alevel olevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "H2p": { - "modeling_realm": "atmos", - "standard_name": "missing", - "units": "", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "2H in total precipitation", - "comment": "Roche - LSCE", - "dimensions": "longitude latitude time", - "out_name": "H2p", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "H2s": { - "modeling_realm": "atmos", - "standard_name": "missing", - "units": "", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "2H in solid precipitation", - "comment": "Roche - LSCE", - "dimensions": "longitude latitude time", - "out_name": "H2s", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "H2wv": { - "modeling_realm": "atmos", - "standard_name": "missing", - "units": "", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "H2 in water vapor", - "comment": "Roche - LSCE", - "dimensions": "longitude latitude alevel time", - "out_name": "H2wv", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "O17p": { - "modeling_realm": "atmos", - "standard_name": "missing", - "units": "", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "17O in total precipitation", - "comment": "Roche - LSCE", - "dimensions": "longitude latitude time", - "out_name": "O17p", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "O17s": { - "modeling_realm": "atmos", - "standard_name": "missing", - "units": "", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "17O in solid precipitation", - "comment": "Roche - LSCE", - "dimensions": "longitude latitude time", - "out_name": "O17s", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "O17wv": { - "modeling_realm": "atmos", - "standard_name": "missing", - "units": "", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "O17 in water vapor", - "comment": "Roche - LSCE", - "dimensions": "longitude latitude alevel time", - "out_name": "O17wv", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "O18p": { - "modeling_realm": "atmos", - "standard_name": "missing", - "units": "", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "18O in total precipitation", - "comment": "Roche - LSCE", - "dimensions": "longitude latitude time", - "out_name": "O18p", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "O18s": { - "modeling_realm": "atmos", - "standard_name": "missing", - "units": "", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "18O in solid precipitation", - "comment": "Roche - LSCE", - "dimensions": "longitude latitude time", - "out_name": "O18s", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "O18sw": { - "modeling_realm": "ocean", - "standard_name": "missing", - "units": "", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "O18 in sea water", - "comment": "Roche - LSCE", - "dimensions": "longitude latitude olevel time", - "out_name": "O18sw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "O18wv": { - "modeling_realm": "atmos", - "standard_name": "missing", - "units": "", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "O18 in water vapor", - "comment": "Roche - LSCE", - "dimensions": "longitude latitude alevel time", - "out_name": "O18wv", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "c13Land": { - "modeling_realm": "land", - "standard_name": "total_land_c13", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass of 13C in all terrestrial carbon pools", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "c13Land", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "c13Litter": { - "modeling_realm": "land", - "standard_name": "litter_c13_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass of 13C in Litter Pool", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "c13Litter", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "c13Soil": { - "modeling_realm": "land", - "standard_name": "soil_c13_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass of 13C in Soil Pool", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "c13Soil", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "c13Veg": { - "modeling_realm": "land", - "standard_name": "vegetation_c13_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass of 13C in Vegetation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "c13Veg", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "c14Land": { - "modeling_realm": "land", - "standard_name": "total_land_c14", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass of 14C in all terrestrial carbon pools", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "c14Land", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "c14Litter": { - "modeling_realm": "land", - "standard_name": "litter_c14_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass of 14C in Litter Pool", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "c14Litter", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "c14Soil": { - "modeling_realm": "land", - "standard_name": "soil_c14_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass of 14C in Soil Pool", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "c14Soil", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "c14Veg": { - "modeling_realm": "land", - "standard_name": "vegetation_c14_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass of 14C in Vegetation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "c14Veg", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cLand": { - "modeling_realm": "land", - "standard_name": "total_land_carbon", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Carbon in All Terrestrial Carbon Pools", - "comment": "Report missing data over ocean grid cells. For fractional land report value averaged over the land fraction.", - "dimensions": "longitude latitude time", - "out_name": "cLand", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cLitterCwd": { - "modeling_realm": "land", - "standard_name": "litter_wood_debris_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Coarse Woody Debris", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cLitterCwd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cLitterGrass": { - "modeling_realm": "land", - "standard_name": "litter_carbon_content", - "units": "kg m-2", - "cell_methods": "area: time: mean where natural_grasses (comment: mask=grassFrac)", - "cell_measures": "area: areacella", - "long_name": "Carbon mass in litter on grass tiles", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cLitterGrass", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cLitterShrub": { - "modeling_realm": "land", - "standard_name": "litter_carbon_content", - "units": "kg m-2", - "cell_methods": "area: time: mean where shrubs (comment: mask=shrubFrac)", - "cell_measures": "area: areacella", - "long_name": "Carbon mass in litter on shrub tiles", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cLitterShrub", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cLitterSubSurf": { - "modeling_realm": "land", - "standard_name": "subsurface_litter_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Below-Ground Litter", - "comment": "sub-surface litter pool fed by root inputs.", - "dimensions": "longitude latitude time", - "out_name": "cLitterSubSurf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cLitterSurf": { - "modeling_realm": "land", - "standard_name": "surface_litter_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Above-Ground Litter", - "comment": "Surface or near-surface litter pool fed by leaf and above-ground litterfall", - "dimensions": "longitude latitude time", - "out_name": "cLitterSurf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cLitterTree": { - "modeling_realm": "land", - "standard_name": "litter_carbon_content", - "units": "kg m-2", - "cell_methods": "area: time: mean where trees (comment: mask=treeFrac)", - "cell_measures": "area: areacella", - "long_name": "Carbon mass in litter on tree tiles", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cLitterTree", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cMisc": { - "modeling_realm": "land", - "standard_name": "miscellaneous_living_matter_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Other Living Compartments on Land", - "comment": "e.g., labile, fruits, reserves, etc.", - "dimensions": "longitude latitude time", - "out_name": "cMisc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cOther": { - "modeling_realm": "land", - "standard_name": "other_vegegtation_components_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Vegetation Components other than Leaves, Stems and Roots", - "comment": "E.g. fruits, seeds, etc.", - "dimensions": "longitude latitude time", - "out_name": "cOther", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cSoil": { - "modeling_realm": "land", - "standard_name": "soil_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Soil Pool", - "comment": "Carbon mass in the full depth of the soil model.", - "dimensions": "longitude latitude time", - "out_name": "cSoil", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cSoilAbove1m": { - "modeling_realm": "land", - "standard_name": "soil_carbon_content_above_1m_depth", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon mass in soil pool above 1m depth", - "comment": "Report missing data over ocean grid cells. For fractional land report value averaged over the land fraction.", - "dimensions": "longitude latitude time", - "out_name": "cSoilAbove1m", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cSoilBelow1m": { - "modeling_realm": "land", - "standard_name": "soil_carbon_content_below_1m_depth", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Soil Pool below 1m Depth", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cSoilBelow1m", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cSoilGrass": { - "modeling_realm": "land", - "standard_name": "soil_carbon_content", - "units": "kg m-2", - "cell_methods": "area: time: mean where natural_grasses (comment: mask=grassFrac)", - "cell_measures": "area: areacella", - "long_name": "Carbon mass in soil on grass tiles", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cSoilGrass", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cSoilLevels": { - "modeling_realm": "land", - "standard_name": "soil_carbon_content_on_model_levels", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon mass in each model soil level (summed over all soil carbon pools in that level)", - "comment": "for models with vertically discretised soil carbon, report total soil carbon for each level", - "dimensions": "longitude latitude time", - "out_name": "cSoilLevels", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cSoilPools": { - "modeling_realm": "land", - "standard_name": "soil_carbon_content_by_pool", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon mass in each model soil pool (summed over vertical levels)", - "comment": "for models with multiple soil carbon pools, report each pool here. If models also have vertical discretaisation these should be aggregated", - "dimensions": "longitude latitude time", - "out_name": "cSoilPools", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cSoilShrub": { - "modeling_realm": "land", - "standard_name": "soil_carbon_content", - "units": "kg m-2", - "cell_methods": "area: time: mean where shrubs (comment: mask=shrubFrac)", - "cell_measures": "area: areacella", - "long_name": "Carbon mass in soil on shrub tiles", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cSoilShrub", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cSoilTree": { - "modeling_realm": "land", - "standard_name": "soil_carbon_content", - "units": "kg m-2", - "cell_methods": "area: time: mean where trees (comment: mask=treeFrac)", - "cell_measures": "area: areacella", - "long_name": "Carbon mass in soil on tree tiles", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cSoilTree", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cStem": { - "modeling_realm": "land", - "standard_name": "stem_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Stem", - "comment": "including sapwood and hardwood.", - "dimensions": "longitude latitude time", - "out_name": "cStem", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cTotFireLut": { - "modeling_realm": "land", - "standard_name": "missing", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "Total carbon loss from natural and managed fire on land use tile, including deforestation fires", - "comment": "Different from LMON this flux should include all fires occurring on the land use tile, including natural, man-made and deforestation fires", - "dimensions": "longitude latitude landUse time", - "out_name": "cTotFireLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cVegGrass": { - "modeling_realm": "land", - "standard_name": "vegetation_carbon_content", - "units": "kg m-2", - "cell_methods": "area: time: mean where natural_grasses (comment: mask=grassFrac)", - "cell_measures": "area: areacella", - "long_name": "Carbon mass in vegetation on grass tiles", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cVegGrass", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cVegShrub": { - "modeling_realm": "land", - "standard_name": "vegetation_carbon_content", - "units": "kg m-2", - "cell_methods": "area: time: mean where shrubs (comment: mask=shrubFrac)", - "cell_measures": "area: areacella", - "long_name": "Carbon mass in vegetation on shrub tiles", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cVegShrub", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cVegTree": { - "modeling_realm": "land", - "standard_name": "vegetation_carbon_content", - "units": "kg m-2", - "cell_methods": "area: time: mean where trees (comment: mask=treeFrac)", - "cell_measures": "area: areacella", - "long_name": "Carbon mass in vegetation on tree tiles", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cVegTree", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cWood": { - "modeling_realm": "land", - "standard_name": "wood_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Wood", - "comment": "Carbon mass per unit area in wood, including sapwood and hardwood.", - "dimensions": "longitude latitude time", - "out_name": "cWood", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cfadDbze94": { - "modeling_realm": "atmos", - "standard_name": "histogram_of_equivalent_reflectivity_factor_over_height_above_reference_ellipsoid", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CloudSat Radar Reflectivity", - "comment": "CFAD (Cloud Frequency Altitude Diagrams) are frequency distributions of radar reflectivity (or lidar scattering ratio) as a function of altitude. The variable cfadDbze94 is defined as the simulated relative frequency of occurrence of radar reflectivity in sampling volumes defined by altitude bins. The radar is observing at a frequency of 94GHz.", - "dimensions": "longitude latitude alt40 dbze time", - "out_name": "cfadDbze94", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cfadLidarsr532": { - "modeling_realm": "atmos", - "standard_name": "histogram_of_backscattering_ratio_over_height_above_reference_ellipsoid", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO Scattering Ratio", - "comment": "CFAD (Cloud Frequency Altitude Diagrams) are frequency distributions of radar reflectivity (or lidar scattering ratio) as a function of altitude. The variable cfadLidarsr532 is defined as the simulated relative frequency of lidar scattering ratio in sampling volumes defined by altitude bins. The lidar is observing at a wavelength of 532nm.", - "dimensions": "longitude latitude alt40 scatratio time", - "out_name": "cfadLidarsr532", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clcalipsoice": { - "modeling_realm": "atmos", - "standard_name": "ice_cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO ice cloud Fraction", - "comment": "", - "dimensions": "longitude latitude alt40 time", - "out_name": "clcalipsoice", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clcalipsoliq": { - "modeling_realm": "atmos", - "standard_name": "liquid_cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "CALIPSO liquid cloud Fraction", - "comment": "", - "dimensions": "longitude latitude alt40 time", - "out_name": "clcalipsoliq", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cldicemxrat27": { - "modeling_realm": "atmos", - "standard_name": "cloud_ice_mixing_ratio", - "units": "1.0", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Cloud Ice Mixing Ratio", - "comment": "Cloud ice mixing ratio", - "dimensions": "longitude latitude plev27 time", - "out_name": "cldicemxrat", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cldnci": { - "modeling_realm": "atmos", - "standard_name": "number_concentration_of_ice_crystals_in_air_at_ice_cloud_top", - "units": "m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Ice Crystal Number Concentration of Cloud Tops", - "comment": "Concentration 'as seen from space' over ice-cloud portion of grid cell. This is the value from uppermost model layer with ice cloud or, if available, it is the sum over all ice cloud tops, no matter where they occur, as long as they are seen from the top of the atmosphere. Weight by total ice cloud top fraction (as seen from TOA) of each time sample when computing monthly mean.", - "dimensions": "longitude latitude time", - "out_name": "cldnci", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cldncl": { - "modeling_realm": "atmos", - "standard_name": "number_concentration_of_cloud_liquid_water_particles_in_air_at_liquid_water_cloud_top", - "units": "m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Cloud Droplet Number Concentration of Cloud Tops", - "comment": "Droplets are liquid only. Report concentration 'as seen from space' over liquid cloudy portion of grid cell. This is the value from uppermost model layer with liquid cloud or, if available, it is better to sum over all liquid cloud tops, no matter where they occur, as long as they are seen from the top of the atmosphere. Weight by total liquid cloud top fraction of (as seen from TOA) each time sample when computing monthly mean.", - "dimensions": "longitude latitude time", - "out_name": "cldncl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cldnvi": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_number_content_of_cloud_droplets", - "units": "m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Column Integrated Cloud Droplet Number", - "comment": "Droplets are liquid only. Values are weighted by liquid cloud fraction in each layer when vertically integrating, and for monthly means the samples are weighted by total liquid cloud fraction (as seen from TOA).", - "dimensions": "longitude latitude time", - "out_name": "cldnvi", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cldwatmxrat27": { - "modeling_realm": "atmos", - "standard_name": "cloud_liquid_water_mixing_ratio", - "units": "1.0", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Cloud Water Mixing Ratio", - "comment": "Cloud water mixing ratio", - "dimensions": "longitude latitude plev27 time", - "out_name": "cldwatmxrat", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "climodis": { - "modeling_realm": "atmos", - "standard_name": "ice_cloud_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "MODIS Ice Cloud Area Percentage", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "climodis", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clmisr": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Percentage Cloud Cover as Calculated by the MISR Simulator", - "comment": "Cloud percentage in spectral bands and layers as observed by the Multi-angle Imaging SpectroRadiometer (MISR) instrument.", - "dimensions": "longitude latitude alt16 tau time", - "out_name": "clmisr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cltmodis": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "MODIS Total Cloud Cover Percentage", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cltmodis", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clwmodis": { - "modeling_realm": "atmos", - "standard_name": "liquid_cloud_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "MODIS Liquid Cloud Fraction", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "clwmodis", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "clwvic": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_convective_cloud_condensed_water_content", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Convective Condensed Water Path", - "comment": "calculate mass of convective condensed (liquid + ice) water in the column divided by the area of the column (not just the area of the cloudy portion of the column). This includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions": "longitude latitude time", - "out_name": "clwvic", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co23D": { - "modeling_realm": "atmos", - "standard_name": "CO2_3D_tracer", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "3D field of transported CO2", - "comment": "report 3D field of model simulated atmospheric CO2 mass mixing ration on model levels", - "dimensions": "longitude latitude alevel time", - "out_name": "co23D", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co2s": { - "modeling_realm": "atmos", - "standard_name": "mole_fraction_of_carbon_dioxide_in_air", - "units": "1e-06", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Atmosphere CO2", - "comment": "As co2, but only at the surface", - "dimensions": "longitude latitude time", - "out_name": "co2s", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "columnmassflux": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_net_upward_convective_mass_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Column Integrated Mass Flux", - "comment": "Column integral of (mcu-mcd)", - "dimensions": "longitude latitude time", - "out_name": "columnmassflux", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "conccmcn": { - "modeling_realm": "atmos", - "standard_name": "number_concentration_of_coarse_mode_ambient_aerosol_in_air", - "units": "m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Number Concentration Coarse Mode Aerosol", - "comment": "includes all particles with diameter larger than 1 micron", - "dimensions": "longitude latitude alevel time", - "out_name": "conccmcn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "conccn": { - "modeling_realm": "atmos", - "standard_name": "number_concentration_of_ambient_aerosol_in_air", - "units": "m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Aerosol Number Concentration", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "conccn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "concdust": { - "modeling_realm": "atmos", - "standard_name": "mass_concentration_of_dust_dry_aerosol_in_air", - "units": "kg m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Concentration of Dust", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "concdust", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "concnmcn": { - "modeling_realm": "atmos", - "standard_name": "number_concentration_of_nucleation_mode_ambient_aerosol_in_air", - "units": "m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Number Concentration of Nucleation Mode Aerosol", - "comment": "includes all particles with diameter smaller than 3 nm", - "dimensions": "longitude latitude alevel time", - "out_name": "concnmcn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cropFracC3": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Percentage Cover by C3 Crops", - "comment": "Percentage of entire grid cell covered by C3 crops", - "dimensions": "longitude latitude time typec3pft typecrop", - "out_name": "cropFracC3", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cropFracC4": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Percentage Cover by C4 Crops", - "comment": "Percentage of entire grid cell covered by C4 crops", - "dimensions": "longitude latitude time typec4pft typecrop", - "out_name": "cropFracC4", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "depdust": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_total_deposition", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Deposition Rate of Dust", - "comment": "Balkanski - LSCE", - "dimensions": "longitude latitude time", - "out_name": "depdust", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "diabdrag": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_eastward_wind_due_to_numerical_artefacts", - "units": "m s-2", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Eastward Wind from Numerical Artefacts", - "comment": "Other sub-grid scale/numerical zonal drag excluding that already provided for the parameterized orographic and non-ororgraphic gravity waves. This would be used to calculate the total 'diabatic drag'. Contributions to this additional drag such Rayleigh friction and diffusion that can be calculated from the monthly mean wind fields should not be included, but details (e.g. coefficients) of the friction and/or diffusion used in the model should be provided separately.", - "dimensions": "longitude latitude plev19 time", - "out_name": "diabdrag", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissi13c": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_carbon13_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Dissolved Inorganic 13Carbon Concentration", - "comment": "Dissolved inorganic 14carbon (CO3+HCO3+H2CO3) concentration", - "dimensions": "longitude latitude olevel time", - "out_name": "dissi13c", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissi14c": { - "modeling_realm": "land", - "standard_name": "mole_concentration_of_dissolved_inorganic_c14_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Concentration of DI14C", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "dissi14c", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissicnat": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_carbon_in_sea_water_natural_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Natural Dissolved Inorganic Carbon Concentration", - "comment": "Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration at preindustrial atmospheric xCO2", - "dimensions": "longitude latitude olevel time", - "out_name": "dissicnat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ec550aer": { - "modeling_realm": "aerosol", - "standard_name": "volume_extinction_coefficient_in_air_due_to_ambient_aerosol_particles", - "units": "m-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Aerosol extinction coefficient", - "comment": "Aerosol Extinction @550nm", - "dimensions": "longitude latitude alevel time", - "out_name": "ec550aer", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "evspsblpot": { - "modeling_realm": "land", - "standard_name": "water_potential_evaporation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Potential Evapotranspiration", - "comment": "at surface; potential flux of water into the atmosphere due to conversion of both liquid and solid phases to vapor (from underlying surface and vegetation)", - "dimensions": "longitude latitude time", - "out_name": "evspsblpot", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "exparag": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_aragonite_expressed_as_carbon_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sinking Aragonite Flux", - "comment": "Downward flux of Aragonite", - "dimensions": "longitude latitude olevel time", - "out_name": "exparag", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "expcalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_calcite_expressed_as_carbon_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sinking Calcite Flux", - "comment": "Downward flux of Calcite", - "dimensions": "longitude latitude olevel time", - "out_name": "expcalc", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "expcfe": { - "modeling_realm": "ocean", - "standard_name": "sinking_mole_flux_of_particulate_iron_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sinking Particulate Iron Flux", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "expcfe", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "expn": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_particulate_organic_nitrogen_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sinking Particulate Organic Nitrogen Flux", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "expn", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "expp": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_particulate_organic_phosphorus_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sinking Particulate Organic Phosphorus Flux", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "expp", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "expsi": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_particulate_silicon_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sinking Particulate Silica Flux", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "expsi", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fAnthDisturb": { - "modeling_realm": "land", - "standard_name": "surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_anthrogpogenic_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "carbon mass flux into atmosphere due to any human activity", - "comment": "will require some careful definition to make sure we capture everything - any human activity that releases carbon to the atmosphere instead of into product pool goes here. E.g. Deforestation fire, harvest assumed to decompose straight away, grazing...", - "dimensions": "longitude latitude time", - "out_name": "fAnthDisturb", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fBNF": { - "modeling_realm": "land", - "standard_name": "biological_nitrogen_fixation", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "biological nitrogen fixation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fBNF", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fCLandToOcean": { - "modeling_realm": "land", - "standard_name": "carbon_mass_flux_into_ocean_from_rivers", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Lateral transfer of carbon out of gridcell that eventually goes into ocean", - "comment": "leached carbon etc that goes into run off or river routing and finds its way into ocean should be reported here.", - "dimensions": "longitude latitude time", - "out_name": "fCLandToOcean", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fDeforestToAtmos": { - "modeling_realm": "land", - "standard_name": "surface_net_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_anthropogenic_land_use_change", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Deforested biomass that goes into atmosphere as a result of anthropogenic land use change", - "comment": "When land use change results in deforestation of natural vegetation (trees or grasslands) then natural biomass is removed. The treatment of deforested biomass differs significantly across models, but it should be straight-forward to compare deforested biomass across models.", - "dimensions": "longitude latitude time", - "out_name": "fDeforestToAtmos", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fDeforestToProduct": { - "modeling_realm": "land", - "standard_name": "deforested_biomass_into_product_pool_due_to_anthorpogenic_land_use_change", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Deforested biomass that goes into product pool as a result of anthropogenic land use change", - "comment": "When land use change results in deforestation of natural vegetation (trees or grasslands) then natural biomass is removed. The treatment of deforested biomass differs significantly across models, but it should be straight-forward to compare deforested biomass across models.", - "dimensions": "longitude latitude time", - "out_name": "fDeforestToProduct", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fFireAll": { - "modeling_realm": "land", - "standard_name": "fire_CO2_emissions_from_all_sources", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux into Atmosphere due to CO2 emissions from Fire resulting from all sources including natural, anthropogenic and land use change.", - "comment": "Only total fire emissions can be compared to observations.", - "dimensions": "longitude latitude time", - "out_name": "fFireAll", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fFireNat": { - "modeling_realm": "land", - "standard_name": "fire_CO2_emissions_from_wildfire", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux into Atmosphere due to CO2 Emission from natural Fire", - "comment": "CO2 emissions from natural fires", - "dimensions": "longitude latitude time", - "out_name": "fFireNat", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fHarvestToAtmos": { - "modeling_realm": "land", - "standard_name": "surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_crop_harvesting", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Harvested biomass that goes straight into atmosphere", - "comment": "any harvested carbon that is assumed to decompose immediately into the atmosphere is reported here", - "dimensions": "longitude latitude time", - "out_name": "fHarvestToAtmos", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fHarvestToProduct": { - "modeling_realm": "land", - "standard_name": "harvested_biomass_into_product_pool", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Harvested biomass that goes into product pool", - "comment": "be it food or wood harvest, any carbon that is subsequently stored is reported here", - "dimensions": "longitude latitude time", - "out_name": "fHarvestToProduct", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fLitterFire": { - "modeling_realm": "land", - "standard_name": "fire_CO2_emissions_from_litter_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux from Litter, CWD or any non-living pool into Atmosphere due to CO2 Emission from all Fire", - "comment": "Required for unambiguous separation of vegetation and soil + litter turnover times, since total fire flux draws from both sources", - "dimensions": "longitude latitude time", - "out_name": "fLitterFire", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fLuc": { - "modeling_realm": "land", - "standard_name": "surface_net_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_anthropogenic_land_use_change", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Net Carbon Mass Flux into Atmosphere due to Land Use Change", - "comment": "Carbon mass flux per unit area into atmosphere due to human changes to land (excluding forest regrowth) accounting possibly for different time-scales related to fate of the wood, for example.", - "dimensions": "longitude latitude time", - "out_name": "fLuc", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fLulccAtmLut": { - "modeling_realm": "land", - "standard_name": "surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_direct_to_atmosphere_due_to_anthropogenic_land_use_land_cover_change", - "units": "kg s-1", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "carbon transferred directly to atmosphere due to any land-use or land-cover change activities including deforestation or agricultural fire", - "comment": "This annual mean flux refers to the transfer of carbon directly to the atmosphere due to any land-use or land-cover change activities. Include carbon transferred due to deforestation or agricultural directly into atsmophere, and emissions form anthropogenic pools into atmosphere", - "dimensions": "longitude latitude landUse time", - "out_name": "fLulccAtmLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fLulccProductLut": { - "modeling_realm": "land", - "standard_name": "carbon_mass_flux_into_wood_and_agricultural_product_pools_due_to_anthropogenic_land_use_or_land_cover_change", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "carbon harvested due to land-use or land-cover change process that enters anthropogenic product pools on tile", - "comment": "This annual mean flux refers to the transfer of carbon primarily through harvesting land use into anthropogenic product pools, e.g.,deforestation or wood harvestingfrom primary or secondary lands, food harvesting on croplands, harvesting (grazing) by animals on pastures.", - "dimensions": "longitude latitude landUse time", - "out_name": "fLulccProductLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fLulccResidueLut": { - "modeling_realm": "land", - "standard_name": "carbon_mass_flux_into_soil_and_litter_due_to_anthropogenic_land_use_or_land_cover_change", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "carbon transferred to soil or litter pools due to land-use or land-cover change processes on tile", - "comment": "This annual mean flux refers to the transfer of carbon into soil or litter pools due to any land use or land-cover change activities", - "dimensions": "longitude latitude landUse time", - "out_name": "fLulccResidueLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fN2O": { - "modeling_realm": "land", - "standard_name": "land_net_n2o_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total land N2O flux", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fN2O", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNAnthDisturb": { - "modeling_realm": "land", - "standard_name": "nitrogen_flux_into_atmos_due_to_direct_human_disturbance", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "nitrogen mass flux out of land due to any human activity", - "comment": "will require some careful definition to make sure we capture everything - any human activity that releases nitrogen from land instead of into product pool goes here. E.g. Deforestation fire, harvest assumed to decompose straight away, grazing...", - "dimensions": "longitude latitude time", - "out_name": "fNAnthDisturb", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNLandToOcean": { - "modeling_realm": "land", - "standard_name": "nitrogen_mass_flux_into_ocean_from_rivers", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Lateral transfer of nitrogen out of gridcell that eventually goes into ocean", - "comment": "leached nitrogen etc that goes into run off or river routing and finds its way into ocean should be reported here.", - "dimensions": "longitude latitude time", - "out_name": "fNLandToOcean", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNLitterSoil": { - "modeling_realm": "land", - "standard_name": "nitrogen_mass_flux_into_soil_from_litter", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Nitrogen Mass Flux from Litter to Soil", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fNLitterSoil", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNOx": { - "modeling_realm": "land", - "standard_name": "land_net_nox_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total land NOx flux", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fNOx", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNProduct": { - "modeling_realm": "land", - "standard_name": "nitrogen_mass_flux_to_product_pool_due_to_anthorpogenic_activity", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Deforested or harvested biomass as a result of anthropogenic land use or change", - "comment": "When land use change results in deforestation of natural vegetation (trees or grasslands) then natural biomass is removed. The treatment of deforested biomass differs significantly across models, but it shoule be straight-forward to compare deforested biomass across models.", - "dimensions": "longitude latitude time", - "out_name": "fNProduct", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNVegLitter": { - "modeling_realm": "land", - "standard_name": "litter_nitrogen_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Nitrogen Mass Flux from Vegetation to Litter", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fNVegLitter", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNVegSoil": { - "modeling_realm": "land", - "standard_name": "nitrogen_mass_flux_into_soil_from_vegetation_excluding_litter", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Nitrogen Mass Flux from Vegetation Directly to Soil", - "comment": "In some models part of nitrogen (e.g., root exudate) can go directly into the soil pool without entering litter.", - "dimensions": "longitude latitude time", - "out_name": "fNVegSoil", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNdep": { - "modeling_realm": "land", - "standard_name": "deposition_of_nitrogen_onto_land", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Dry and Wet Deposition of Reactive Nitrogen onto Land", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fNdep", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNfert": { - "modeling_realm": "land", - "standard_name": "fertiliser_of_nitrogen_onto_land", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "total N added for cropland fertilisation (artificial and manure)", - "comment": "relative to total land area of a grid cell, not relative to agricultural area", - "dimensions": "longitude latitude time", - "out_name": "fNfert", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNgas": { - "modeling_realm": "land", - "standard_name": "land_nitrogen_lost_to_atmosphere", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Nitrogen lost to the atmosphere (sum of NHx, NOx, N2O, N2)", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fNgas", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNgasFire": { - "modeling_realm": "land", - "standard_name": "land_nitrogen_lost_to_atmosphere_due_to_fire", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total N lost to the atmosphere (including NHx, NOx, N2O, N2) from fire.", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fNgasFire", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNgasNonFire": { - "modeling_realm": "land", - "standard_name": "land_nitrogen_lost_to_atmosphere_not_due_to_fire", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total N lost to the atmosphere (including NHx, NOx, N2O, N2) from all processes except fire.", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fNgasNonFire", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNleach": { - "modeling_realm": "land", - "standard_name": "land_nitrogen_lost_to_leaching", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total N loss to leaching or runoff (sum of ammonium, nitrite and nitrate)", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fNleach", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNloss": { - "modeling_realm": "land", - "standard_name": "land_nitrogen_lost", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total N lost (including NHx, NOx, N2O, N2 and leaching)", - "comment": "Not all models split losses into gasesous and leaching", - "dimensions": "longitude latitude time", - "out_name": "fNloss", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNnetmin": { - "modeling_realm": "land", - "standard_name": "net_nitrogen_mineralisation", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Net nitrogen release from soil and litter as the outcome of nitrogen immobilisation and gross mineralisation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fNnetmin", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fNup": { - "modeling_realm": "land", - "standard_name": "plant_nitrogen_uptake", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "total plant nitrogen uptake (sum of ammonium and nitrate), irrespective of the source of nitrogen", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fNup", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fProductDecomp": { - "modeling_realm": "land", - "standard_name": "Carbon_flux_out_of_storage_product_pools_into_atmos", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "decomposition out of product pools to CO2 in atmos", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fProductDecomp", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fProductDecompLut": { - "modeling_realm": "land", - "standard_name": "tendency_of_atmospheric_mass_content_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_wood_and_agricultural_product_pool", - "units": "kg s-1", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "flux from wood and agricultural product pools on land use tile into atmosphere", - "comment": "If a model has explicit anthropogenic product pools by land use tile", - "dimensions": "longitude latitude landUse time", - "out_name": "fProductDecompLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fVegFire": { - "modeling_realm": "land", - "standard_name": "fire_CO2_emissions_from_vegetation_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux from Vegetation into Atmosphere due to CO2 Emission from all Fire", - "comment": "Required for unambiguous separation of vegetation and soil + litter turnover times, since total fire flux draws from both sources", - "dimensions": "longitude latitude time", - "out_name": "fVegFire", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fVegLitterMortality": { - "modeling_realm": "land", - "standard_name": "mortality_litter_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Carbon Mass Flux from Vegetation to Litter as a Result of Mortality", - "comment": "needed to separate changing vegetation C turnover times resulting from changing allocation versus changing mortality", - "dimensions": "longitude latitude time", - "out_name": "fVegLitterMortality", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fVegLitterSenescence": { - "modeling_realm": "land", - "standard_name": "senescence_litter_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Carbon Mass Flux from Vegetation to Litter as a Result of Leaf, Branch, and Root Senescence", - "comment": "needed to separate changing vegetation C turnover times resulting from changing allocation versus changing mortality", - "dimensions": "longitude latitude time", - "out_name": "fVegLitterSenescence", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fVegSoilMortality": { - "modeling_realm": "land", - "standard_name": "mortality_vegtosoil_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Carbon Mass Flux from Vegetation to Soil as a result of mortality", - "comment": "needed to separate changing vegetation C turnover times resulting from changing allocation versus changing mortality", - "dimensions": "longitude latitude time", - "out_name": "fVegSoilMortality", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fVegSoilSenescence": { - "modeling_realm": "land", - "standard_name": "senescence_vegtosoil_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Carbon Mass Flux from Vegetation to Soil as a result of leaf, branch, and root senescence", - "comment": "needed to separate changing vegetation C turnover times resulting from changing allocation versus changing mortality", - "dimensions": "longitude latitude time", - "out_name": "fVegSoilSenescence", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fahLut": { - "modeling_realm": "land", - "standard_name": "surface_upward_heat_flux_due_to_anthropogenic_energy_consumption", - "units": "W m-2", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "Anthropogenic heat flux generated from non-renewable human primary energy consumption, including energy use by vehicles, commercial and residential buildings, industry, and power plants. Primary energy refers to energy in natural resources, fossil and nonfossil, before conversion into other forms, such as electricity.", - "comment": "", - "dimensions": "longitude latitude landUse time", - "out_name": "fahLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fbddtalk": { - "modeling_realm": "ocnBgChem", - "standard_name": "integral_wrt_depth_of_tendency_of_sea_water_alkalinity_expressed_as_mole_equivalent_due_to_biological_processes", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Biological Alkalinity due to Biological Activity", - "comment": "vertical integral of net biological terms in time rate of change of alkalinity", - "dimensions": "longitude latitude olevel time", - "out_name": "fbddtalk", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fbddtdic": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_carbon_due_to_biological_processes", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Dissolved Inorganic Carbon due to Biological Activity", - "comment": "vertical integral of net biological terms in time rate of change of dissolved inorganic carbon", - "dimensions": "longitude latitude olevel time", - "out_name": "fbddtdic", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fbddtdife": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_iron_due_to_biological_processes", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Dissolved Inorganic Iron due to Biological Activity", - "comment": "vertical integral of net biological terms in time rate of change of dissolved inorganic iron", - "dimensions": "longitude latitude olevel time", - "out_name": "fbddtdife", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fbddtdin": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_nitrogen_due_to_biological_processes", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Dissolved Inorganic Nitrogen due to Biological Activity", - "comment": "vertical integral of net biological terms in time rate of change of nitrogen nutrients (e.g. NO3+NH4)", - "dimensions": "longitude latitude olevel time", - "out_name": "fbddtdin", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fbddtdip": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_phosphorus_due_to_biological_processes", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Dissolved Inorganic Phosphorus due to Biological Activity", - "comment": "vertical integral of net biological terms in time rate of change of phosphate", - "dimensions": "longitude latitude olevel time", - "out_name": "fbddtdip", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fbddtdisi": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_silicon_due_to_biological_processes", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Dissolved Inorganic Silicon due to Biological Activity", - "comment": "vertical integral of net biological terms in time rate of change of dissolved inorganic silicate", - "dimensions": "longitude latitude olevel time", - "out_name": "fbddtdisi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fddtalk": { - "modeling_realm": "ocnBgChem", - "standard_name": "integral_wrt_depth_of_tendency_of_sea_water_alkalinity_expressed_as_mole_equivalent", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Total Alkalinity", - "comment": "vertical integral of net time rate of change of alkalinity", - "dimensions": "longitude latitude olevel time", - "out_name": "fddtalk", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fddtdic": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_carbon", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Net Dissolved Inorganic Carbon", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "fddtdic", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fddtdife": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_iron", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Net Dissolved Inorganic Iron", - "comment": "vertical integral of net time rate of change of dissolved inorganic iron", - "dimensions": "longitude latitude olevel time", - "out_name": "fddtdife", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fddtdin": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_nitrogen", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Net Dissolved Inorganic Nitrogen", - "comment": "Net time rate of change of nitrogen nutrients (e.g. NO3+NH4)", - "dimensions": "longitude latitude olevel time", - "out_name": "fddtdin", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fddtdip": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_phosphorus", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Net Dissolved Inorganic Phosphate", - "comment": "vertical integral of net time rate of change of phosphate", - "dimensions": "longitude latitude olevel time", - "out_name": "fddtdip", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fddtdisi": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_silicon", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Net Dissolved Inorganic Silicon", - "comment": "vertical integral of net time rate of change of dissolved inorganic silicate", - "dimensions": "longitude latitude olevel time", - "out_name": "fddtdisi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fg14co2": { - "modeling_realm": "ocnBgChem", - "standard_name": "air_sea_flux_of_14CO2", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Total air-sea flux of 14CO2", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fg14co2", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fg14co2abio": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_downward_mass_flux_of_abiotic_14_carbon_dioxide_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Surface Downward Flux of Abiotic 14CO2", - "comment": "Gas exchange flux of abiotic 14CO2 (positive into ocean)", - "dimensions": "longitude latitude olevel time", - "out_name": "fg14co2abio", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fgco2abio": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_downward_mass_flux_of_abiotic_carbon_dioxide_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Surface Downward Flux of Abiotic CO2", - "comment": "Gas exchange flux of abiotic CO2 (positive into ocean)", - "dimensions": "longitude latitude olevel time", - "out_name": "fgco2abio", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fgco2nat": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_downward_mass_flux_of_natural_carbon_dioxide_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Surface Downward Flux of Natural CO2", - "comment": "Gas exchange flux of natural CO2 (positive into ocean)", - "dimensions": "longitude latitude olevel time", - "out_name": "fgco2nat", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fgdms": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_upward_mole_flux_of_dimethyl_sulfide", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Surface Upward Flux of DMS", - "comment": "Gas exchange flux of DMS (positive into atmosphere)", - "dimensions": "longitude latitude olevel time", - "out_name": "fgdms", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "flandice": { - "modeling_realm": "ocean", - "standard_name": "water_flux_into_sea_water_from_land_ice", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Water flux into Sea Water from Land Ice", - "comment": "Computed as the water flux into the ocean due to land ice (runoff water from surface and base of land ice or melt from base of ice shelf or vertical ice front) into the ocean divided by the area ocean portion of the grid cell", - "dimensions": "longitude latitude time", - "out_name": "flandice", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "flashrate": { - "modeling_realm": "atmosChem", - "standard_name": "lightning_flash_rate", - "units": "km-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Lightning Flash Rate", - "comment": "proposed name: lightning_flash_rate (units to be interpreted as 'counts km-2 s-1)", - "dimensions": "longitude latitude time", - "out_name": "flashrate", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fracLut": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "fraction of grid cell for each land use tile", - "comment": "end of year values (not annual mean); note that fraction should be reported as fraction of land grid cell (example: frac_lnd = 0.5, frac_ocn = 0.5, frac_crop_lnd = 0.2 (of land portion of grid cell), then frac_lut(crp) = 0.5*0.2 = 0.1)", - "dimensions": "longitude latitude landUse time", - "out_name": "fracLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "gppGrass": { - "modeling_realm": "land", - "standard_name": "gross_primary_productivity_of_biomass_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where natural_grasses (comment: mask=grassFrac)", - "cell_measures": "area: areacella", - "long_name": "gross primary production on grass tiles", - "comment": "Total GPP of grass in the gridcell", - "dimensions": "longitude latitude time", - "out_name": "gppGrass", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "gppLut": { - "modeling_realm": "land", - "standard_name": "gross_primary_productivity_of_biomass_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "gross primary productivity on land use tile", - "comment": "", - "dimensions": "longitude latitude landUse time", - "out_name": "gppLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "gppShrub": { - "modeling_realm": "land", - "standard_name": "gross_primary_productivity_of_biomass_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where shrubs (comment: mask=shrubFrac)", - "cell_measures": "area: areacella", - "long_name": "gross primary production on Shrub tiles", - "comment": "Total GPP of shrubs in the gridcell", - "dimensions": "longitude latitude time", - "out_name": "gppShrub", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "gppTree": { - "modeling_realm": "land", - "standard_name": "gross_primary_productivity_of_biomass_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where trees (comment: mask=treeFrac)", - "cell_measures": "area: areacella", - "long_name": "gross primary production on tree tiles", - "comment": "Total GPP of trees in the gridcell", - "dimensions": "longitude latitude time", - "out_name": "gppTree", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "gppc13": { - "modeling_realm": "land", - "standard_name": "gross_primary_productivity_of_c13", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass Flux of 13C out of Atmosphere due to Gross Primary Production on Land", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "gppc13", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "gppc14": { - "modeling_realm": "land", - "standard_name": "gross_primary_productivity_of_c14", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass Flux of 14C out of Atmosphere due to Gross Primary Production on Land", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "gppc14", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "grassFracC3": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "C3 grass Area Percentage", - "comment": "Fraction of entire grid cell covered by C3 grass.", - "dimensions": "longitude latitude time typec3pft typenatgr", - "out_name": "grassFracC3", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "grassFracC4": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "C4 grass Area Percentage", - "comment": "Fraction of entire grid cell covered by C4 grass.", - "dimensions": "longitude latitude time typec4pft typenatgr", - "out_name": "grassFracC4", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "grplmxrat27": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_graupel_in_air", - "units": "1.0", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Graupel Mixing Ratio", - "comment": "Graupel mixing ratio", - "dimensions": "longitude latitude plev27 time", - "out_name": "grplmxrat", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hcont300": { - "modeling_realm": "ocean", - "standard_name": "heat_content_of_ocean_layer", - "units": "m K", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Heat content of upper 300 meters", - "comment": "Used in PMIP2", - "dimensions": "longitude latitude time depth300m", - "out_name": "hcont300", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hflsLut": { - "modeling_realm": "land", - "standard_name": "surface_upward_latent_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "latent heat flux on land use tile", - "comment": "", - "dimensions": "longitude latitude landUse time", - "out_name": "hflsLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfssLut": { - "modeling_realm": "land", - "standard_name": "surface_upward_sensible_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "sensible heat flux on land use tile", - "comment": "", - "dimensions": "longitude latitude landUse time", - "out_name": "hfssLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hursminCrop": { - "modeling_realm": "atmos", - "standard_name": "relative_humidity", - "units": "%", - "cell_methods": "area: mean time: minimum within days time: mean over days", - "cell_measures": "area: areacella", - "long_name": "Daily Minimum Near-Surface Relative Humidity over Crop Tile", - "comment": "minimum near-surface (usually, 2 meter) relative humidity (add cell_method attribute 'time: min')", - "dimensions": "longitude latitude time height2m", - "out_name": "hursminCrop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "longitude latitude plev7c time", - "out_name": "hus", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus27": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "longitude latitude plev27 time", - "out_name": "hus", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hussLut": { - "modeling_realm": "land", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "near-surface specific humidity on land use tile", - "comment": "Normally, the specific humidity should be reported at the 2 meter height", - "dimensions": "longitude latitude landUse time height2m", - "out_name": "hussLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intuadse": { - "modeling_realm": "atmos", - "standard_name": "vertical_integral_eastward_wind_by_dry_static_energy", - "units": "1.e6 J m-1 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Vertically integrated Eastward dry transport (cp.T +zg).u (Mass_weighted_vertical integral of the product of northward wind by dry static_energy per mass unit)", - "comment": "Used in PMIP2", - "dimensions": "longitude latitude time", - "out_name": "intuadse", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intuaw": { - "modeling_realm": "atmos", - "standard_name": "vertical_integral_eastward_wind_by_total_water", - "units": "kg m-1 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Vertically integrated Eastward moisture transport (Mass_weighted_vertical integral of the product of eastward wind by total water mass per unit mass)", - "comment": "Used in PMIP2", - "dimensions": "longitude latitude time", - "out_name": "intuaw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intvadse": { - "modeling_realm": "atmos", - "standard_name": "vertical_integral_northward_wind_by_dry_static_energy", - "units": "1.e6 J m-1 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Vertically integrated Northward dry transport (cp.T +zg).v (Mass_weighted_vertical integral of the product of northward wind by dry static_energy per mass unit)", - "comment": "Used in PMIP2", - "dimensions": "longitude latitude time", - "out_name": "intvadse", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intvaw": { - "modeling_realm": "atmos", - "standard_name": "vertical_integral_northward_wind_by_total_water", - "units": "kg m-1 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Vertically integrated Northward moisture transport (Mass_weighted_vertical integral of the product of northward wind by total water mass per unit mass)", - "comment": "Used in PMIP2", - "dimensions": "longitude latitude time", - "out_name": "intvaw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "irrLut": { - "modeling_realm": "land", - "standard_name": "surface_downward_water_flux_due_to_irrigation", - "units": "kg s-1", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "Irrigation flux including any irrigation for crops, trees, pasture, or urban lawns", - "comment": "", - "dimensions": "longitude latitude landUse time", - "out_name": "irrLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "jpdftaureicemodis": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "MODIS Optical Thickness-Particle Size joint distribution, ice", - "comment": "", - "dimensions": "longitude latitude plev7c effectRadIc tau time", - "out_name": "jpdftaureicemodis", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "jpdftaureliqmodis": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction_in_atmosphere_layer", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "MODIS Optical Thickness-Particle Size joint distribution, liquid", - "comment": "", - "dimensions": "longitude latitude plev7c effectRadLi tau time", - "out_name": "jpdftaureliqmodis", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "laiLut": { - "modeling_realm": "land", - "standard_name": "missing", - "units": "1.0", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "Leaf Area Index on Land Use Tile", - "comment": "Note that if tile does not model lai, for example, on the urban tile, then should be reported as missing value", - "dimensions": "longitude latitude landUse time", - "out_name": "laiLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "loaddust": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_mass_content_of_dust_dry_aerosol", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Load of Dust", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "loaddust", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "loadso4": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_mass_content_of_sulfate_dry_aerosol", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Load of SO4", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "loadso4", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "loadss": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_mass_content_of_seasalt_dry_aerosol", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Load of Seasalt", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "loadss", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lwsrfasdust": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_all_sky_surface_longwave_flux_to_dust_ambient_aerosol_particles", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "All-sky Surface Longwave radiative flux due to Dust", - "comment": "Balkanski - LSCE", - "dimensions": "longitude latitude time", - "out_name": "lwsrfasdust", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lwsrfcsdust": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_clear_sky_surface_longwave_flux_to_dust_ambient_aerosol_particles", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Clear-sky Surface Longwave radiative flux due to Dust", - "comment": "Balkanski - LSCE", - "dimensions": "longitude latitude time", - "out_name": "lwsrfcsdust", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lwtoaasdust": { - "modeling_realm": "atmos", - "standard_name": "toa_instantaneous_longwave_forcing", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "all sky lw-rf dust at toa", - "comment": "proposed name: toa_instantaneous_longwave_forcing_due_to_dust_ambient_aerosol", - "dimensions": "longitude latitude time", - "out_name": "lwtoaasdust", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lwtoacs": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_clear_sky_top_of_atmosphere_longwave_flux_to_dust_ambient_aerosol_particles___2D_field_radiative_properties", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Clear-sky TOA Longwave radiative flux due to Dust", - "comment": "Balkanski - LSCE", - "dimensions": "longitude latitude time", - "out_name": "lwtoacs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lwtoacsaer": { - "modeling_realm": "atmos", - "standard_name": "toa_instantaneous_longwave_forcing", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "clear sky lw-rf aerosols at toa", - "comment": "proposed name: toa_instantaneous_longwave_forcing_due_to_ambient_aerosol_assuming_clear_sky", - "dimensions": "longitude latitude time", - "out_name": "lwtoacsaer", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "md": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Wet diameter mode coarse insoluble", - "comment": "Balkanski - LSCE", - "dimensions": "longitude latitude alevel time", - "out_name": "md", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmraerso4": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_sulfate_dry_aerosol_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Aerosol Sulfate Mass Mixing Ratio", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "mmraerso4", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmrbc": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_elemental_carbon_dry_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Elemental carbon mass mixing ratio", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "mmrbc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmrdust": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_dust_dry_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Dust aerosol mass mixing ratio", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "mmrdust", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmrno3": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_nitrate_dry_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "NO3 aerosol mass mixing ratio", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "mmrno3", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mmrss": { - "modeling_realm": "aerosol", - "standard_name": "mass_fraction_of_seasalt_dry_aerosol_particles_in_air", - "units": "kg kg-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Sea Salt mass mixing ratio", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "mmrss", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrlso": { - "modeling_realm": "land", - "standard_name": "soil_liquid_water_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Soil Liquid Water Content", - "comment": "the mass (summed over all all layers) of liquid water.", - "dimensions": "longitude latitude time", - "out_name": "mrlso", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrroLut": { - "modeling_realm": "land", - "standard_name": "runoff_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "Total runoff from land use tile", - "comment": "the total runoff (including 'drainage' through the base of the soil model) leaving the land use tile portion of the grid cell", - "dimensions": "longitude latitude landUse time", - "out_name": "mrroLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrsfl": { - "modeling_realm": "land", - "standard_name": "frozen_moisture_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Frozen water content of soil layer", - "comment": "in each soil layer, the mass of water in ice phase. Reported as 'missing' for grid cells occupied entirely by 'sea'", - "dimensions": "longitude latitude sdepth time", - "out_name": "mrsfl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrsll": { - "modeling_realm": "land", - "standard_name": "liquid_moisture_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Liquid water content of soil layer", - "comment": "in each soil layer, the mass of water in liquid phase. Reported as 'missing' for grid cells occupied entirely by 'sea'", - "dimensions": "longitude latitude sdepth time", - "out_name": "mrsll", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrsoLut": { - "modeling_realm": "land", - "standard_name": "soil_moisture_content", - "units": "kg m-2", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "Total soil moisture", - "comment": "", - "dimensions": "longitude latitude landUse time", - "out_name": "mrsoLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrsol": { - "modeling_realm": "land", - "standard_name": "moisture_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total water content of soil layer", - "comment": "in each soil layer, the mass of water in all phases, including ice. Reported as 'missing' for grid cells occupied entirely by 'sea'", - "dimensions": "longitude latitude sdepth time", - "out_name": "mrsol", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrsosLut": { - "modeling_realm": "land", - "standard_name": "moisture_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "Moisture in Upper Portion of Soil Column of land use tile", - "comment": "the mass of water in all phases in a thin surface layer; integrate over uppermost 10cm", - "dimensions": "longitude latitude landUse time sdepth1", - "out_name": "mrsosLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrtws": { - "modeling_realm": "land", - "standard_name": "total_water_storage", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total water storage in a grid cell", - "comment": "Mass of water in all phases and in all components including soil, canopy, vegetation, ice sheets, rivers and ground water.", - "dimensions": "longitude latitude time", - "out_name": "mrtws", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nLand": { - "modeling_realm": "land", - "standard_name": "total_land_nitrogen", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total nitrogen in all terrestrial nitrogen pools", - "comment": "Report missing data over ocean grid cells. For fractional land report value averaged over the land fraction.", - "dimensions": "longitude latitude time", - "out_name": "nLand", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nLeaf": { - "modeling_realm": "land", - "standard_name": "leaf_nitrogen_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Nitrogen Mass in Leaves", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "nLeaf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nLitter": { - "modeling_realm": "land", - "standard_name": "litter_nitrogen_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Nitrogen Mass in Litter Pool", - "comment": "Report missing data over ocean grid cells. For fractional land report value averaged over the land fraction.", - "dimensions": "longitude latitude time", - "out_name": "nLitter", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nLitterCwd": { - "modeling_realm": "land", - "standard_name": "litter_wood_debris_nitrogen_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Nitrogen Mass in Coarse Woody Debris", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "nLitterCwd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nLitterSubSurf": { - "modeling_realm": "land", - "standard_name": "subsurface_litter_nitrogen_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Nitrogen Mass in below ground litter (non CWD)", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "nLitterSubSurf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nLitterSurf": { - "modeling_realm": "land", - "standard_name": "surface_litter_nitrogen_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Nitrogen Mass in above ground litter (non CWD)", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "nLitterSurf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nMineral": { - "modeling_realm": "land", - "standard_name": "mineral_soil_nitrogen_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mineral nitrogen in the soil", - "comment": "SUM of ammonium, nitrite, nitrate, etc over all soil layers", - "dimensions": "longitude latitude time", - "out_name": "nMineral", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nMineralNH4": { - "modeling_realm": "land", - "standard_name": "mineral_nh4_soil_nitrogen_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mineral ammonium in the soil", - "comment": "SUM of ammonium over all soil layers", - "dimensions": "longitude latitude time", - "out_name": "nMineralNH4", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nMineralNO3": { - "modeling_realm": "land", - "standard_name": "mineral_no3_soil_nitrogen_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mineral nitrate in the soil", - "comment": "SUM of nitrate over all soil layers", - "dimensions": "longitude latitude time", - "out_name": "nMineralNO3", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nOther": { - "modeling_realm": "land", - "standard_name": "other_vegegtation_components_nitrogen_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Nitrogen mass in vegetation components other than leaves, stem and root", - "comment": "E.g. fruits, seeds, etc.", - "dimensions": "longitude latitude time", - "out_name": "nOther", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nProduct": { - "modeling_realm": "land", - "standard_name": "nitrogen_content_of_products_of_anthropogenic_land_use_change", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Nitrogen Mass in Products of Land Use Change", - "comment": "Report missing data over ocean grid cells. For fractional land report value averaged over the land fraction.", - "dimensions": "longitude latitude time", - "out_name": "nProduct", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nRoot": { - "modeling_realm": "land", - "standard_name": "root_nitrogen_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Nitrogen Mass in Roots", - "comment": "including fine and coarse roots.", - "dimensions": "longitude latitude time", - "out_name": "nRoot", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nSoil": { - "modeling_realm": "land", - "standard_name": "soil_nitrogen_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Nitrogen Mass in Soil Pool", - "comment": "Report missing data over ocean grid cells. For fractional land report value averaged over the land fraction.", - "dimensions": "longitude latitude time", - "out_name": "nSoil", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nStem": { - "modeling_realm": "land", - "standard_name": "stem_nitrogen_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Nitrogen Mass in Stem", - "comment": "including sapwood and hardwood.", - "dimensions": "longitude latitude time", - "out_name": "nStem", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nVeg": { - "modeling_realm": "land", - "standard_name": "vegetation_nitrogen_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Nitrogen Mass in Vegetation", - "comment": "Report missing data over ocean grid cells. For fractional land report value averaged over the land fraction.", - "dimensions": "longitude latitude time", - "out_name": "nVeg", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "necbLut": { - "modeling_realm": "land", - "standard_name": "surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "net rate of C accumulation (or loss) on land use tile", - "comment": "Computed as npp minus heterotrophic respiration minus fire minus C leaching minus harvesting/clearing. Positive rate is into the land, negative rate is from the land. Do not include fluxes from anthropogenic product pools to atmosphere", - "dimensions": "longitude latitude landUse time", - "out_name": "necbLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nep": { - "modeling_realm": "land", - "standard_name": "surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes_excluding_anthropogenic_land_use_change", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Net Carbon Mass Flux out of Atmophere due to Net Ecosystem Productivity on Land.", - "comment": "Natural flux of CO2 (expressed as a mass flux of carbon) from the atmosphere to the land calculated as the difference between uptake associated will photosynthesis and the release of CO2 from the sum of plant and soil respiration and fire. Positive flux is into the land. emissions from natural fires and human ignition fires as calculated by the fire module of the dynamic vegetation model, but excluding any CO2 flux from fire included in fLuc (CO2 Flux to Atmosphere from Land Use Change).", - "dimensions": "longitude latitude time", - "out_name": "nep", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "netAtmosLandC13Flux": { - "modeling_realm": "land", - "standard_name": "net_atmos_to_land_C13_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Net Mass Flux of 13C between atmosphere and land (positive into land) as a result of all processes.", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "netAtmosLandC13Flux", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "netAtmosLandC14Flux": { - "modeling_realm": "land", - "standard_name": "net_atmos_to_land_C14_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Net Mass Flux of 14C between atmosphere and land (positive into land) as a result of all processes.", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "netAtmosLandC14Flux", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "netAtmosLandCO2Flux": { - "modeling_realm": "land", - "standard_name": "surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Net flux of CO2 between atmosphere and land (positive into land) as a result of all processes.", - "comment": "This flux should be reproducible by differencing the sum of all carbon pools (cVeg, cLitter, cSoil, and cProducts or equivalently cLand) from one time step to the next, except in the case of lateral transfer of carbon due to harvest, riverine transport of dissolved organic and/or inorganic carbon, or any other process (in which case the lateral_carbon_transfer_over_land term, see below, will be zero data).", - "dimensions": "longitude latitude time", - "out_name": "netAtmosLandCO2Flux", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nppGrass": { - "modeling_realm": "land", - "standard_name": "net_primary_productivity_of_biomass_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where natural_grasses (comment: mask=grassFrac)", - "cell_measures": "area: areacella", - "long_name": "net primary production on grass tiles", - "comment": "Total NPP of grass in the gridcell", - "dimensions": "longitude latitude time", - "out_name": "nppGrass", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nppLut": { - "modeling_realm": "land", - "standard_name": "net_primary_productivity_of_biomass_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "net primary productivity on land use tile", - "comment": "", - "dimensions": "longitude latitude landUse time", - "out_name": "nppLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nppOther": { - "modeling_realm": "land", - "standard_name": "net_primary_production_allocated_to_other", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "net primary production allcoated to other pools (not leaves stem or roots)", - "comment": "added for completeness with npp_root", - "dimensions": "longitude latitude time", - "out_name": "nppOther", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nppShrub": { - "modeling_realm": "land", - "standard_name": "net_primary_productivity_of_biomass_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where shrubs (comment: mask=shrubFrac)", - "cell_measures": "area: areacella", - "long_name": "net primary production on Shrub tiles", - "comment": "Total NPP of shrubs in the gridcell", - "dimensions": "longitude latitude time", - "out_name": "nppShrub", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nppStem": { - "modeling_realm": "land", - "standard_name": "net_primary_production_allocated_to_stem", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "net primary production allcoated to stem", - "comment": "added for completeness with npp_root", - "dimensions": "longitude latitude time", - "out_name": "nppStem", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nppTree": { - "modeling_realm": "land", - "standard_name": "net_primary_productivity_of_biomass_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where trees (comment: mask=treeFrac)", - "cell_measures": "area: areacella", - "long_name": "net primary production on tree tiles", - "comment": "Total NPP of trees in the gridcell", - "dimensions": "longitude latitude time", - "out_name": "nppTree", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nwdFracLut": { - "modeling_realm": "land", - "standard_name": "missing", - "units": "1", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "fraction of land use tile tile that is non-woody vegetation ( e.g. herbaceous crops)", - "comment": "", - "dimensions": "longitude latitude landUse time typenwd", - "out_name": "nwdFracLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o2sat": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_molecular_oxygen_in_sea_water_at_saturation", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Dissolved Oxygen Concentration at Saturation", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "o2sat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ocontempdiff": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water conservative temperature expressed as heat content due to parameterized dianeutral mixing", - "comment": "Tendency of heat content for a grid cell from parameterized dianeutral mixing. Reported only for models that use conservative temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "ocontempdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ocontempmint": { - "modeling_realm": "ocean", - "standard_name": "integral_wrt_depth_of_product_of_sea_water_density_and_conservative_temperature", - "units": "degC kg m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "integral wrt depth of product of sea water density and conservative temperature", - "comment": "Full column sum of density*cell thickness*conservative temperature. If the model is Boussinesq, then use Boussinesq reference density for the density factor.", - "dimensions": "longitude latitude time", - "out_name": "ocontempmint", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ocontemppadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_eddy_advection", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water conservative temperature expressed as heat content due to parameterized eddy advection", - "comment": "Tendency of heat content for a grid cell from parameterized eddy advection (any form of eddy advection). Reported only for models that use conservative temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "ocontemppadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ocontemppmdiff": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_mesoscale_diffusion", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water conservative temperature expressed as heat content due to parameterized mesoscale diffusion", - "comment": "Tendency of heat content for a grid cell from parameterized mesoscale eddy diffusion. Reported only for models that use conservative temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "ocontemppmdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ocontemppsmadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_submesoscale_advection", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water conservative temperature expressed as heat content due to parameterized submesoscale advection", - "comment": "Tendency of heat content for a grid cell from parameterized submesoscale eddy advection. Reported only for models that use conservative temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "ocontemppsmadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ocontemprmadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_residual_mean_advection", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water conservative temperature expressed as heat content due to residual mean (sum of Eulerian + parameterized) advection", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "ocontemprmadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ocontemptend": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_conservative_temperature_expressed_as_heat_content", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water conservative temperature expressed as heat content", - "comment": "Tendency of heat content for a grid cell from all processes. Reported only for models that use conservative temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "ocontemptend", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od443dust": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_optical_thickness_due_to_dust_ambient_aerosol_particles", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Optical thickness at 443 nm Dust", - "comment": "Balkanski - LSCE", - "dimensions": "longitude latitude time", - "out_name": "od443dust", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550aerso": { - "modeling_realm": "atmos", - "standard_name": "missing", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Stratospheric Optical depth at 550 nm (all aerosols) 2D-field (here we limit the computation of OD to the stratosphere only)", - "comment": "Balkanski - LSCE", - "dimensions": "longitude latitude time", - "out_name": "od550aerso", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550aerstrat": { - "modeling_realm": "atmos", - "standard_name": "strat_aerosol_optical_depth", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Stratospheric Aerosol Optical Depth at 550nm", - "comment": "From tropopause to stratopause as defined by the model", - "dimensions": "longitude latitude time", - "out_name": "od550aerstrat", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od550so4so": { - "modeling_realm": "atmos", - "standard_name": "missing", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Stratospheric Optical depth at 550 nm (sulphate only) 2D-field (here we limit the computation of OD to the stratosphere only)", - "comment": "Balkanski - LSCE", - "dimensions": "longitude latitude time", - "out_name": "od550so4so", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "od865dust": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_optical_thickness_due_to_dust_ambient_aerosol_particles", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Optical thickness at 865 nm Dust", - "comment": "Balkanski - LSCE", - "dimensions": "longitude latitude time", - "out_name": "od865dust", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "opottempdiff": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water potential temperature expressed as heat content due to parameterized dianeutral mixing", - "comment": "Tendency of heat content for a grid cell from parameterized dianeutral mixing. Reported only for models that use potential temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "opottempdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "opottempmint": { - "modeling_realm": "ocean", - "standard_name": "integral_wrt_depth_of_product_of_sea_water_density_and_potential_temperature", - "units": "degC kg m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "integral wrt depth of product of sea water density and potential temperature", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "opottempmint", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "opottemppadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_eddy_advection", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water potential temperature expressed as heat content due to parameterized eddy advection", - "comment": "Tendency of heat content for a grid cell from parameterized eddy advection (any form of eddy advection). Reported only for models that use potential temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "opottemppadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "opottemppmdiff": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_mesoscale_diffusion", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water potential temperature expressed as heat content due to parameterized mesoscale diffusion", - "comment": "Tendency of heat content for a grid cell from parameterized mesoscale eddy diffusion. Reported only for models that use potential temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "opottemppmdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "opottemppsmadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_submesoscale_advection", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water potential temperature expressed as heat content due to parameterized submesoscale advection", - "comment": "Tendency of heat content for a grid cell from parameterized submesoscale eddy advection. Reported only for models that use potential temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "opottemppsmadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "opottemprmadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_residual_mean_advection", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water potential temperature expressed as heat content due to residual mean advection", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "opottemprmadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "opottemptend": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_potential_temperature_expressed_as_heat_content", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water potential temperature expressed as heat content", - "comment": "Tendency of heat content for a grid cell from all processes. Reported only for models that use potential temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "opottemptend", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "orog": { - "modeling_realm": "land", - "standard_name": "surface_altitude", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Altitude", - "comment": "The surface called 'surface' means the lower boundary of the atmosphere. Altitude is the (geometric) height above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level.", - "dimensions": "longitude latitude time", - "out_name": "orog", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "osaltdiff": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_dianeutral_mixing", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water salinity expressed as salt content due to parameterized dianeutral mixing", - "comment": "Tendency of salt content for a grid cell from parameterized dianeutral mixing.", - "dimensions": "longitude latitude olevel time", - "out_name": "osaltdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "osaltpadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_eddy_advection", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water salinity expressed as salt content due to parameterized eddy advection", - "comment": "Tendency of salt content for a grid cell from parameterized eddy advection (any form of eddy advection).", - "dimensions": "longitude latitude olevel time", - "out_name": "osaltpadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "osaltpmdiff": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_mesoscale_diffusion", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water salinity expressed as salt content due to parameterized mesoscale diffusion", - "comment": "Tendency of salt content for a grid cell from parameterized mesoscale eddy diffusion.", - "dimensions": "longitude latitude olevel time", - "out_name": "osaltpmdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "osaltpsmadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_submesoscale_advection", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water salinity expressed as salt content due to parameterized submesoscale advection", - "comment": "Tendency of salt content for a grid cell from parameterized submesoscale eddy advection.", - "dimensions": "longitude latitude olevel time", - "out_name": "osaltpsmadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "osaltrmadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_residual_mean_advection", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water salinity expressed as salt content due to residual mean advection", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "osaltrmadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "osalttend": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_salinity_expressed_as_salt_content", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water salinity expressed as salt content", - "comment": "Tendency of salt content for a grid cell from all processes.", - "dimensions": "longitude latitude olevel time", - "out_name": "osalttend", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pabigthetao": { - "modeling_realm": "ocean", - "standard_name": "sea_water_added_conservative_temperature", - "units": "degC", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sea Water Added Conservative Temperature", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "pabigthetao", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "parasolRefl": { - "modeling_realm": "atmos", - "standard_name": "toa_bidirectional_reflectance", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacella", - "long_name": "PARASOL Reflectance", - "comment": "Simulated reflectance from PARASOL as seen at the top of the atmosphere for 5 solar zenith angles. Valid only over ocean and for one viewing direction (viewing zenith angle of 30 degrees and relative azimuth angle 320 degrees).", - "dimensions": "longitude latitude sza5 time", - "out_name": "parasolRefl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pastureFracC3": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "C3 Pasture Area Percentage", - "comment": "Percentage of entire grid cell covered by C3 pasture", - "dimensions": "longitude latitude time typec3pft typepasture", - "out_name": "pastureFracC3", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pastureFracC4": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "C4 Pasture Area Percentage", - "comment": "Percentage of entire grid cell covered by C4 pasture", - "dimensions": "longitude latitude time typec4pft typepasture", - "out_name": "pastureFracC4", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pathetao": { - "modeling_realm": "ocean", - "standard_name": "sea_water_additional_potential_temperature", - "units": "degC", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "pathetao", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ppcalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_calcareous_phytoplankton", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Net Primary Mole Productivity of Carbon by Calcareous Phytoplankton", - "comment": "Primary (organic carbon) production by the calcite-producing phytoplankton component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "ppcalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ppdiat": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diatoms", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Net Primary Organic Carbon Production by Diatoms", - "comment": "Primary (organic carbon) production by the diatom component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "ppdiat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ppdiaz": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophs", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Net Primary Mole Productivity of Carbon by Diazotrophs", - "comment": "Primary (organic carbon) production by the diazotrophic phytoplankton component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "ppdiaz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ppmisc": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_miscellaneous_phytoplankton", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Net Primary Organic Carbon Production by Other Phytoplankton", - "comment": "Primary (organic carbon) production by other phytoplankton components alone", - "dimensions": "longitude latitude olevel time", - "out_name": "ppmisc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pppico": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_picophytoplankton", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Net Primary Mole Productivity of Carbon by Picophytoplankton", - "comment": "Primary (organic carbon) production by the picophytoplankton (<2 um) component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "pppico", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prCrop": { - "modeling_realm": "atmos", - "standard_name": "precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Precipitation over Crop Tile", - "comment": "includes both liquid and solid phases", - "dimensions": "longitude latitude time", - "out_name": "prCrop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prbigthetao": { - "modeling_realm": "ocean", - "standard_name": "sea_water_redistributed_conservative_temperature", - "units": "degC", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sea Water Redistributed Conservative Temperature", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "prbigthetao", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prhmax": { - "modeling_realm": "atmos", - "standard_name": "precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean time: mean within hours time: maximum over hours", - "cell_measures": "area: areacella", - "long_name": "Maximum Hourly Precipitation Rate", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "prhmax", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prthetao": { - "modeling_realm": "ocean", - "standard_name": "sea_water_redistributed_potential_temperature", - "units": "degC", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "prthetao", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ps": { - "modeling_realm": "atmos", - "standard_name": "surface_air_pressure", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Air Pressure", - "comment": "surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", - "dimensions": "longitude latitude time", - "out_name": "ps", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "raGrass": { - "modeling_realm": "land", - "standard_name": "plant_respiration_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where natural_grasses (comment: mask=grassFrac)", - "cell_measures": "area: areacella", - "long_name": "autotrophic respiration on grass tiles", - "comment": "Total RA of grass in the gridcell", - "dimensions": "longitude latitude time", - "out_name": "raGrass", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "raLeaf": { - "modeling_realm": "land", - "standard_name": "autotrophic_respiration_from_leaves", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total respiration from leaves", - "comment": "added for completeness with Ra_root", - "dimensions": "longitude latitude time", - "out_name": "raLeaf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "raLut": { - "modeling_realm": "land", - "standard_name": "plant_respiration_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "plant respiration on land use tile", - "comment": "", - "dimensions": "longitude latitude landUse time", - "out_name": "raLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "raOther": { - "modeling_realm": "land", - "standard_name": "autotrophic_respiration", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total respiration from other pools (not leaves stem or roots)", - "comment": "added for completeness with Ra_root", - "dimensions": "longitude latitude time", - "out_name": "raOther", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "raRoot": { - "modeling_realm": "land", - "standard_name": "autotrophic_respiration_from_roots", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Respiration from Roots", - "comment": "Total autotrophic respiration from all belowground plant parts. This has benchmarking value because the sum of Rh and root respiration can be compared to observations of total soil respiration.", - "dimensions": "longitude latitude time", - "out_name": "raRoot", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "raShrub": { - "modeling_realm": "land", - "standard_name": "plant_respiration_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where shrubs (comment: mask=shrubFrac)", - "cell_measures": "area: areacella", - "long_name": "autotrophic respiration on Shrub tiles", - "comment": "Total RA of shrubs in the gridcell", - "dimensions": "longitude latitude time", - "out_name": "raShrub", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "raStem": { - "modeling_realm": "land", - "standard_name": "autotrophic_respiration_from_stem", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Respiration from Stem", - "comment": "added for completeness with Ra_root", - "dimensions": "longitude latitude time", - "out_name": "raStem", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "raTree": { - "modeling_realm": "land", - "standard_name": "plant_respiration_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where trees (comment: mask=treeFrac)", - "cell_measures": "area: areacella", - "long_name": "autotrophic respiration on tree tiles", - "comment": "Total RA of trees in the gridcell", - "dimensions": "longitude latitude time", - "out_name": "raTree", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rac13": { - "modeling_realm": "land", - "standard_name": "plant_respiration_c13_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass Flux of 13C into Atmosphere due to Autotrophic (Plant) Respiration on Land", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rac13", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rac14": { - "modeling_realm": "land", - "standard_name": "plant_respiration_c14_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass Flux of 14C into Atmosphere due to Autotrophic (Plant) Respiration on Land", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rac14", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rainmxrat27": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_rain_in_air", - "units": "1.0", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "rain_mixing_ratio", - "comment": "Rain mixing ratio", - "dimensions": "longitude latitude plev27 time", - "out_name": "rainmxrat", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffclic": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_convective_cloud_ice_particle", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Hydrometeor Effective Radius of Convective Cloud Ice", - "comment": "This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell).", - "dimensions": "longitude latitude alevel time", - "out_name": "reffclic", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffclis": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_stratiform_cloud_ice_particle", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Hydrometeor Effective Radius of Stratiform Cloud Ice", - "comment": "This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell).", - "dimensions": "longitude latitude alevel time", - "out_name": "reffclis", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffclwc": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_convective_cloud_liquid_water_particle", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Hydrometeor Effective Radius of Convective Cloud Liquid Water", - "comment": "Droplets are liquid. The effective radius is defined as the ratio of the third moment over the second moment of the particle size distribution and the time-mean should be calculated, weighting the individual samples by the cloudy fraction of the grid cell.", - "dimensions": "longitude latitude alevel time", - "out_name": "reffclwc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffclws": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_stratiform_cloud_liquid_water_particle", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Hydrometeor Effective Radius of Stratiform Cloud Liquid Water", - "comment": "Droplets are liquid. The effective radius is defined as the ratio of the third moment over the second moment of the particle size distribution and the time-mean should be calculated, weighting the individual samples by the cloudy fraction of the grid cell.", - "dimensions": "longitude latitude alevel time", - "out_name": "reffclws", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rhGrass": { - "modeling_realm": "land", - "standard_name": "heterotrophic_respiration_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where natural_grasses (comment: mask=grassFrac)", - "cell_measures": "area: areacella", - "long_name": "heterotrophic respiration on grass tiles", - "comment": "Total RH of grass in the gridcell", - "dimensions": "longitude latitude time", - "out_name": "rhGrass", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rhLitter": { - "modeling_realm": "land", - "standard_name": "heterotrophic_respiration_carbon_flux_from_litter", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux into Atmosphere due to Heterotrophic Respiration from Litter on Land", - "comment": "Needed to calculate litter bulk turnover time. Includes respiration from CWD as well.", - "dimensions": "longitude latitude time", - "out_name": "rhLitter", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rhLut": { - "modeling_realm": "land", - "standard_name": "heterotrophic_respiration_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "soil heterotrophic respiration on land use tile", - "comment": "", - "dimensions": "longitude latitude landUse time", - "out_name": "rhLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rhShrub": { - "modeling_realm": "land", - "standard_name": "heterotrophic_respiration_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where shrubs (comment: mask=shrubFrac)", - "cell_measures": "area: areacella", - "long_name": "heterotrophic respiration on Shrub tiles", - "comment": "Total RH of shrubs in the gridcell", - "dimensions": "longitude latitude time", - "out_name": "rhShrub", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rhSoil": { - "modeling_realm": "land", - "standard_name": "heterotrophic_respiration_carbon_flux_from_soil", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux into Atmosphere due to Heterotrophic Respiration from Soil on Land", - "comment": "Needed to calculate soil bulk turnover time", - "dimensions": "longitude latitude time", - "out_name": "rhSoil", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rhTree": { - "modeling_realm": "land", - "standard_name": "heterotrophic_respiration_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where trees (comment: mask=treeFrac)", - "cell_measures": "area: areacella", - "long_name": "heterotrophic respiration on tree tiles", - "comment": "Total RH of trees in the gridcell", - "dimensions": "longitude latitude time", - "out_name": "rhTree", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rhc13": { - "modeling_realm": "land", - "standard_name": "heterotrophic_respiration_c13_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass Flux of 13C into Atmosphere due to Heterotrophic Respiration on Land", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rhc13", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rhc14": { - "modeling_realm": "land", - "standard_name": "heterotrophic_respiration_c14_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass Flux of 14C into Atmosphere due to Heterotrophic Respiration on Land", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rhc14", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rls": { - "modeling_realm": "atmos", - "standard_name": "surface_net_downward_longwave_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Net Longwave Surface Radiation", - "comment": "Net longwave surface radiation", - "dimensions": "longitude latitude time", - "out_name": "rls", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlusLut": { - "modeling_realm": "land", - "standard_name": "surface_upwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "Surface Upwelling Longwave on Land Use Tile", - "comment": "", - "dimensions": "longitude latitude landUse time", - "out_name": "rlusLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdoabsorb": { - "modeling_realm": "ocean", - "standard_name": "net_rate_of_absorption_of_shortwave_energy_in_ocean_layer", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "net rate of absorption of shortwave energy in ocean layer", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "rsdoabsorb", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdscsdiff": { - "modeling_realm": "atmos", - "standard_name": "surface_diffuse_downwelling_shortwave_flux_in_air_assuming_clear_sky", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Diffuse Downwelling Clear Sky Shortwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rsdscsdiff", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdsdiff": { - "modeling_realm": "atmos", - "standard_name": "surface_diffuse_downwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Diffuse Downwelling Shortwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rsdsdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rss": { - "modeling_realm": "atmos", - "standard_name": "surface_net_downward_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Net Shortwave Surface Radiation", - "comment": "Net downward shortwave radiation at the surface", - "dimensions": "longitude latitude time", - "out_name": "rss", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsusLut": { - "modeling_realm": "land", - "standard_name": "surface_upwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "Surface Upwelling Shortwave on Land Use Tile", - "comment": "", - "dimensions": "longitude latitude landUse time", - "out_name": "rsusLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sconcdust": { - "modeling_realm": "atmos", - "standard_name": "mass_concentration_of_dust_dry_aerosol_in_air", - "units": "kg m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Concentration of Dust", - "comment": "mass concentration of dust dry aerosol in air in model lowest layer", - "dimensions": "longitude latitude time", - "out_name": "sconcdust", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sconcso4": { - "modeling_realm": "atmos", - "standard_name": "mass_concentration_of_sulfate_dry_aerosol_in_air", - "units": "kg m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Concentration of SO4", - "comment": "mass concentration of sulfate dry aerosol in air in model lowest layer.", - "dimensions": "longitude latitude time", - "out_name": "sconcso4", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sconcss": { - "modeling_realm": "atmos", - "standard_name": "mass_concentration_of_seasalt_dry_aerosol_in_air", - "units": "kg m-3", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Concentration of Seasalt", - "comment": "mass concentration of seasalt dry aerosol in air in model lowest layer", - "dimensions": "longitude latitude time", - "out_name": "sconcss", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sedustCI": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_sedimentation", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Sedimentation Flux of dust mode coarse insoluble", - "comment": "Balkanski - LSCE", - "dimensions": "longitude latitude time", - "out_name": "sedustCI", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfcWindmax": { - "modeling_realm": "atmos", - "standard_name": "wind_speed", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Daily Maximum Near-Surface Wind Speed", - "comment": "Daily maximum near-surface (usually, 10 meters) wind speed.", - "dimensions": "longitude latitude time height10m", - "out_name": "sfcWindmax", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snowmxrat27": { - "modeling_realm": "atmos", - "standard_name": "mass_fraction_of_snow_in_air", - "units": "1.0", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "snow_mixing_ratio", - "comment": "Snow mixing ratio", - "dimensions": "longitude latitude plev27 time", - "out_name": "snowmxrat", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "somint": { - "modeling_realm": "ocean", - "standard_name": "integral_wrt_depth_of_product_of_sea_water_density_and_prognostic_salinity", - "units": "1e-3 kg m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "integral wrt depth of product of sea water density and salinity", - "comment": "Full column sum of density*cell thickness*prognostic salinity. If the model is Boussinesq, then use Boussinesq reference density for the density factor.", - "dimensions": "longitude latitude time", - "out_name": "somint", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sweLut": { - "modeling_realm": "land", - "standard_name": "missing", - "units": "m", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "snow water equivalent on land use tile", - "comment": "", - "dimensions": "longitude latitude landUse time", - "out_name": "sweLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "swsrfasdust": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_all_sky_surface_shortwave_flux_due_to_dust_ambient_aerosol_particles", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "All-sky Surface Shortwave radiative flux due to Dust", - "comment": "Balkanski - LSCE", - "dimensions": "longitude latitude time", - "out_name": "swsrfasdust", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "swsrfcsdust": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_clear_sky_surface_shortwave_flux_due_to_dust_ambient_aerosol_particles", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Clear-sky Surface Shortwave radiative flux due to Dust", - "comment": "Balkanski - LSCE", - "dimensions": "longitude latitude time", - "out_name": "swsrfcsdust", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "swtoaasdust": { - "modeling_realm": "atmos", - "standard_name": "toa_instantaneous_shortwave_forcing", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "all sky sw-rf dust at toa", - "comment": "proposed name: toa_instantaneous_shortwave_forcing_due_to_dust_ambient_aerosol", - "dimensions": "longitude latitude time", - "out_name": "swtoaasdust", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "swtoacsdust": { - "modeling_realm": "atmos", - "standard_name": "toa_instantaneous_shortwave_forcing", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "clear sky sw-rf dust at toa", - "comment": "proposed name: toa_instantaneous_shortwave_forcing_due_to_dust_ambient_aerosol_assuming_clear_sky", - "dimensions": "longitude latitude time", - "out_name": "swtoacsdust", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "t2": { - "modeling_realm": "atmos", - "standard_name": "square_of_air_temperature", - "units": "K2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "square_of_air_temperature", - "comment": "Air temperature squared", - "dimensions": "longitude latitude alevel time", - "out_name": "t2", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "t20d": { - "modeling_realm": "ocean", - "standard_name": "depth_of_isosurface_of_sea_water_potential_temperature", - "units": "m", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "20C isotherm depth", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "t20d", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tSoilPools": { - "modeling_realm": "land", - "standard_name": "soil_carbon_turnover_rate_by_pool", - "units": "s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "turnover rate of each model soil carbon pool", - "comment": "defined as 1/(turnover time) for each soil pool. Use the same pools reported under cSoilPools", - "dimensions": "longitude latitude time", - "out_name": "tSoilPools", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta27": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "longitude latitude plev27 time", - "out_name": "ta", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tasLut": { - "modeling_realm": "land", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "near-surface air temperature (2m above displacement height, i.e. t_ref) on land use tile", - "comment": "", - "dimensions": "longitude latitude landUse time height2m", - "out_name": "tasLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tasmaxCrop": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: maximum within days time: mean over days", - "cell_measures": "area: areacella", - "long_name": "Daily Maximum Near-Surface Air Temperature over Crop Tile", - "comment": "maximum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: max')", - "dimensions": "longitude latitude time height2m", - "out_name": "tasmaxCrop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tasminCrop": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: minimum within days time: mean over days", - "cell_measures": "area: areacella", - "long_name": "Daily Minimum Near-Surface Air Temperature over Crop Tile", - "comment": "minimum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: min')", - "dimensions": "longitude latitude time height2m", - "out_name": "tasminCrop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tdps": { - "modeling_realm": "atmos", - "standard_name": "dew_point_temperature", - "units": "K", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "2m dewpoint temperature", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "tdps", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "thetaot": { - "modeling_realm": "ocean", - "standard_name": "sea_water_potential_temperature", - "units": "degC", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Vertically Averaged Sea Water Potential Temperature", - "comment": "Vertical average of the sea water potential temperature through the whole ocean depth", - "dimensions": "longitude latitude time", - "out_name": "thetaot", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "thetaot2000": { - "modeling_realm": "ocean", - "standard_name": "sea_water_potential_temperature", - "units": "degC", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Depth average potential temperature of upper 2000m", - "comment": "Upper 2000m, 2D field", - "dimensions": "longitude latitude time depth2000m", - "out_name": "thetaot2000", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "thetaot300": { - "modeling_realm": "ocean", - "standard_name": "sea_water_potential_temperature", - "units": "degC", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Depth average potential temperature of upper 300m", - "comment": "Upper 300m, 2D field", - "dimensions": "longitude latitude time depth300m", - "out_name": "thetaot300", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "thetaot700": { - "modeling_realm": "ocean", - "standard_name": "sea_water_potential_temperature", - "units": "degC", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Depth average potential temperature of upper 700m", - "comment": "Upper 700m, 2D field", - "dimensions": "longitude latitude time depth700m", - "out_name": "thetaot700", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhuspbl": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity_due_to_boundary_layer_mixing", - "units": "s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Specific Humidity Due to Boundary Layer Mixing", - "comment": "Includes all boundary layer terms including diffusive terms.", - "dimensions": "longitude latitude alevel time", - "out_name": "tnhuspbl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhusscp": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity_due_to_stratiform_clouds_and_precipitation", - "units": "s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Specific Humidity Due to Stratiform Clouds and Precipitation", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "tnhusscp", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntd": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_numerical_diffusion", - "units": "K s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature due to Numerical Diffusion", - "comment": "This includes any horizontal or vertical numerical temperature diffusion not associated with the parametrized moist physics or the resolved dynamics. For example, any vertical diffusion which is part of the boundary layer mixing scheme should be excluded, as should any diffusion which is included in the terms from the resolved dynamics. This term is required to check the closure of the temperature budget.", - "dimensions": "longitude latitude alevel time", - "out_name": "tntd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntmp27": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_model_physics", - "units": "K s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature due to Model Physics", - "comment": "Tendency of air temperature due to model physics. This includes sources and sinks from parametrized physics (e.g. radiation, convection, boundary layer, stratiform condensation/evaporation, etc.). It excludes sources and sinks from resolved dynamics and numerical diffusion not associated with parametrized physics. For example, any vertical diffusion which is part of the boundary layer mixing scheme should be included, while numerical diffusion applied in addition to physics or resolved dynamics should be excluded. This term is required to check the closure of the heat budget.", - "dimensions": "longitude latitude plev27 time", - "out_name": "tntmp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntpbl": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_boundary_layer_mixing", - "units": "K s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature Due to Boundary Layer Mixing", - "comment": "Includes all boundary layer terms including diffusive terms.", - "dimensions": "longitude latitude alevel time", - "out_name": "tntpbl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntrl27": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_air_temperature_due_to_longwave_heating", - "units": "K s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Longwave heating rate", - "comment": "Tendency of air temperature due to longwave radiative heating", - "dimensions": "longitude latitude plev27 time", - "out_name": "tntrl", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntrlcs": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_longwave_heating_assuming_clear_sky", - "units": "K s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature due to Clear Sky Longwave Radiative Heating", - "comment": "Tendency of Air Temperature due to Clear Sky Longwave Radiative Heating", - "dimensions": "longitude latitude alevel time", - "out_name": "tntrlcs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntrs27": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_air_temperature_due_to_shortwave_heating", - "units": "K s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Shortwave heating rate", - "comment": "Tendency of air temperature due to shortwave radiative heating", - "dimensions": "longitude latitude plev27 time", - "out_name": "tntrs", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntrscs": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_shortwave_heating_assuming_clear_sky", - "units": "K s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature due to Clear Sky Shortwave Radiative Heating", - "comment": "Tendency of Air Temperature due to Clear Sky Shortwave Radiative Heating", - "dimensions": "longitude latitude alevel time", - "out_name": "tntrscs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntscp": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_stratiform_clouds_and_precipitation", - "units": "K s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature Due to Stratiform Clouds and Precipitation", - "comment": "", - "dimensions": "longitude latitude alevel time", - "out_name": "tntscp", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tomint": { - "modeling_realm": "ocean", - "standard_name": "integral_wrt_depth_of_product_of_sea_water_density_and_prognostic_temperature", - "units": "1e-3 kg m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "integral wrt depth of product of sea water density and prognostic temperature", - "comment": "Full column sum of density*cell thickness*prognostic temperature. If the model is Boussinesq, then use Boussinesq reference density for the density factor.", - "dimensions": "longitude latitude time", - "out_name": "tomint", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "treeFracBdlDcd": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Broadleaf deciduous tree fraction", - "comment": "This is the fraction of the entire grid cell that is covered by broadleaf deciduous trees.", - "dimensions": "longitude latitude time typetreebd", - "out_name": "treeFracBdlDcd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "treeFracBdlEvg": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Broadleaf evergreen tree fraction", - "comment": "This is the fraction of the entire grid cell that is covered by broadleaf evergreen trees.", - "dimensions": "longitude latitude time typetreebe", - "out_name": "treeFracBdlEvg", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "treeFracNdlDcd": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Needleleaf deciduous tree fraction", - "comment": "This is the fraction of the entire grid cell that is covered by needleleaf deciduous trees.", - "dimensions": "longitude latitude time typetreend", - "out_name": "treeFracNdlDcd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "treeFracNdlEvg": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Needleleaf evergreen tree fraction", - "comment": "This is the fraction of the entire grid cell that is covered by needleleaf evergreen trees.", - "dimensions": "longitude latitude time typetreene", - "out_name": "treeFracNdlEvg", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tslsiLut": { - "modeling_realm": "land", - "standard_name": "surface_temperature", - "units": "K", - "cell_methods": "area: time: mean where landuse", - "cell_measures": "area: areacella", - "long_name": "surface skin temperature on land use tile", - "comment": "temperature at which long-wave radiation emitted", - "dimensions": "longitude latitude landUse time", - "out_name": "tslsiLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "twap": { - "modeling_realm": "atmos", - "standard_name": "product_of_omega_and_air_temperature", - "units": "K Pa s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "air_temperature_times_omega", - "comment": "Product of air temperature and pressure tendency", - "dimensions": "longitude latitude alevel time", - "out_name": "twap", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "u2": { - "modeling_realm": "atmos", - "standard_name": "square_of_eastward_wind", - "units": "m2 s-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "square_of_eastward_wind", - "comment": "u*u", - "dimensions": "longitude latitude alevel time", - "out_name": "u2", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua27": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "longitude latitude plev27 time", - "out_name": "ua", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "uqint": { - "modeling_realm": "atmos", - "standard_name": "integral_of_product_of_eastward_wind_and_specific_humidity_wrt_height", - "units": "m2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "integrated_eastward_wind_times_humidity", - "comment": "Column integrated eastward wind times specific humidity", - "dimensions": "longitude latitude time", - "out_name": "uqint", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ut": { - "modeling_realm": "atmos", - "standard_name": "product_of_eastward_wind_and_air_temperature", - "units": "K m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "air_temperature_times_eastward_wind", - "comment": "Product of air temperature and eastward wind", - "dimensions": "longitude latitude alevel time", - "out_name": "ut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "utendnogw": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_eastward_wind_due_to_nonorographic_gravity_wave_drag", - "units": "m s-2", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "u-tendency nonorographic gravity wave drag", - "comment": "Tendency of the eastward wind by parameterized nonorographic gravity waves.", - "dimensions": "longitude latitude plev19 time", - "out_name": "utendnogw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "utendogw": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_eastward_wind_due_to_orographic_gravity_wave_drag", - "units": "m s-2", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "u-tendency orographic gravity wave drag", - "comment": "Tendency of the eastward wind by parameterized orographic gravity waves.", - "dimensions": "longitude latitude plev19 time", - "out_name": "utendogw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "uv": { - "modeling_realm": "atmos", - "standard_name": "product_of_eastward_wind_and_northward_wind", - "units": "m2 s-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "eastward_wind_times_northward_wind", - "comment": "u*v", - "dimensions": "longitude latitude alevel time", - "out_name": "uv", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "uwap": { - "modeling_realm": "atmos", - "standard_name": "product_of_eastward_wind_and_omega", - "units": "Pa m s-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "eastward_wind_times_omega", - "comment": "u*omega", - "dimensions": "longitude latitude alevel time", - "out_name": "uwap", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "v2": { - "modeling_realm": "atmos", - "standard_name": "square_of_northward_wind", - "units": "m2 s-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "square_of_northwardwind", - "comment": "v*v", - "dimensions": "longitude latitude alevel time", - "out_name": "v2", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va27": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "longitude latitude plev27 time", - "out_name": "va", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vegFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Total vegetated fraction", - "comment": "fraction of grid cell that is covered by vegetation.This SHOULD be the sum of tree, grass, crop and shrub fractions.", - "dimensions": "longitude latitude time typeveg", - "out_name": "vegFrac", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vegHeight": { - "modeling_realm": "land", - "standard_name": "canopy_height", - "units": "m", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "canopy height", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "vegHeight", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vegHeightCrop": { - "modeling_realm": "land", - "standard_name": "canopy_height", - "units": "m", - "cell_methods": "area: time: mean where crops (comment: mask=cropFrac)", - "cell_measures": "area: areacella", - "long_name": "Vegetation height averaged over the crop fraction of a grid cell.", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "vegHeightCrop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vegHeightGrass": { - "modeling_realm": "land", - "standard_name": "canopy_height", - "units": "m", - "cell_methods": "area: time: mean where natural_grasses (comment: mask=grassFrac)", - "cell_measures": "area: areacella", - "long_name": "Vegetation height averaged over the grass fraction of a grid cell.", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "vegHeightGrass", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vegHeightPasture": { - "modeling_realm": "land", - "standard_name": "canopy_height", - "units": "m", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Vegetation height averaged over the pasture fraction of a grid cell.", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "vegHeightPasture", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vegHeightShrub": { - "modeling_realm": "land", - "standard_name": "canopy_height", - "units": "m", - "cell_methods": "area: time: mean where shrubs (comment: mask=shrubFrac)", - "cell_measures": "area: areacella", - "long_name": "Vegetation height averaged over the shrub fraction of a grid cell.", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "vegHeightShrub", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vegHeightTree": { - "modeling_realm": "land", - "standard_name": "canopy_height", - "units": "m", - "cell_methods": "area: time: mean where trees (comment: mask=treeFrac)", - "cell_measures": "area: areacella", - "long_name": "Vegetation height averaged over the tree fraction of a grid cell.", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "vegHeightTree", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vqint": { - "modeling_realm": "atmos", - "standard_name": "integral_of_product_of_northward_wind_and_specific_humidity_wrt_height", - "units": "m2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "integrated_northward_wind_times_humidity", - "comment": "Column integrated northward wind times specific humidity", - "dimensions": "longitude latitude time", - "out_name": "vqint", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vt": { - "modeling_realm": "atmos", - "standard_name": "product_of_northward_wind_and_air_temperature", - "units": "K m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "air_temperature_times_northward_wind", - "comment": "Product of air temperature and northward wind", - "dimensions": "longitude latitude alevel time", - "out_name": "vt", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vtendnogw": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_northward_wind_due_to_nonorographic_gravity_wave_drag", - "units": "m s-2", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "v-tendency nonorographic gravity wave drag", - "comment": "Tendency of the northward wind by parameterized nonorographic gravity waves. (Note that CF name tables only have a general northward tendency for all gravity waves, and we need it separated by type.)", - "dimensions": "longitude latitude plev19 time", - "out_name": "vtendnogw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vtendogw": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_northward_wind_due_to_orographic_gravity_wave_drag", - "units": "m s-2", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "v-tendency orographic gravity wave drag", - "comment": "Tendency of the northward wind by parameterized orographic gravity waves. (Note that CF name tables only have a general northward tendency for all gravity waves, and we need it separated by type.)", - "dimensions": "longitude latitude plev19 time", - "out_name": "vtendogw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vwap": { - "modeling_realm": "atmos", - "standard_name": "product_of_northward_wind_and_omega", - "units": "Pa m s-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "northward_wind_times_omega", - "comment": "v*omega", - "dimensions": "longitude latitude alevel time", - "out_name": "vwap", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wap": { - "modeling_realm": "atmos", - "standard_name": "lagrangian_tendency_of_air_pressure", - "units": "Pa s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "omega (=dp/dt)", - "comment": "Omega (vertical velocity in pressure coordinates, positive downwards)", - "dimensions": "longitude latitude alevel time", - "out_name": "wap", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wap2": { - "modeling_realm": "atmos", - "standard_name": "square_of_lagrangian_tendency_of_air_pressure", - "units": "Pa2 s-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "square_of_omega", - "comment": "omega*omega", - "dimensions": "longitude latitude alevel time", - "out_name": "wap2", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "waterDpth": { - "modeling_realm": "land", - "standard_name": "water_table_depth", - "units": "m", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Water table depth from surface.", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "waterDpth", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wetlandCH4": { - "modeling_realm": "land", - "standard_name": "wetland_methane_emissions", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Grid averaged methane emissions from wetlands", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "wetlandCH4", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wetlandCH4cons": { - "modeling_realm": "land", - "standard_name": "wetland_methane_consumption", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Grid averaged methane consuption (methanotrophy) from wetlands", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "wetlandCH4cons", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wetlandCH4prod": { - "modeling_realm": "land", - "standard_name": "wetland_methane_production", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Grid averaged methane production (methanogenesis) from wetlands", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "wetlandCH4prod", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wetlandFrac": { - "modeling_realm": "land", - "standard_name": "wetland_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Fraction of a grid cell covered by wetland.", - "comment": "Report only one year if specified fraction is used, or time series if values are determined dynamically.", - "dimensions": "longitude latitude time typewetla", - "out_name": "wetlandFrac", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "xgwdparam": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_eastward_stress_due_to_gravity_wave_drag", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "x_gravity_wave_drag_param", - "comment": "Parameterised x-component of gravity wave drag", - "dimensions": "longitude latitude alevel time", - "out_name": "xgwdparam", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ygwdparam": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_northward_stress_due_to_gravity_wave_drag", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "y_gravity_wave_drag_param", - "comment": "Parameterised y- component of gravity wave drag", - "dimensions": "longitude latitude alevel time", - "out_name": "ygwdparam", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg27": { - "modeling_realm": "atmos", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height", - "comment": "", - "dimensions": "longitude latitude plev27 time", - "out_name": "zg", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zoomeso": { - "modeling_realm": "ocean", - "standard_name": "mole_concentration_of_mesozooplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Mesozooplankton expressed as Carbon in Sea Water", - "comment": "carbon concentration from mesozooplankton (20-200 um) component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "zoomeso", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zoomicro": { - "modeling_realm": "ocean", - "standard_name": "mole_concentration_of_microzooplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Microzooplankton expressed as Carbon in Sea Water", - "comment": "carbon concentration from the microzooplankton (<20 um) component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "zoomicro", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zoomisc": { - "modeling_realm": "ocean", - "standard_name": "mole_concentration_of_miscellaneous_zooplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Other Zooplankton Carbon Concentration", - "comment": "carbon from additional zooplankton component concentrations alone (e.g. Micro, meso). Since the models all have different numbers of components, this variable has been included to provide a check for intercomparison between models since some phytoplankton groups are supersets.", - "dimensions": "longitude latitude olevel time", - "out_name": "zoomisc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_EmonZ.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_EmonZ.json deleted file mode 100644 index f81692549b..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_EmonZ.json +++ /dev/null @@ -1,494 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table EmonZ", - "realm": "atmos", - "frequency": "mon", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "30.00000", - "generic_levels": "alevel olevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "co2totalmass": { - "modeling_realm": "atmos", - "standard_name": "CO2_total_mass_in_atmos", - "units": "kg", - "cell_methods": "area: time: mean", - "cell_measures": "", - "long_name": "Globally integrated Carbon Mass in Atmosphere", - "comment": "globally integrated mass of carbon as CO2 in atmsophere. Report as a single number for all emissions-driven runs", - "dimensions": "time", - "out_name": "co2totalmass", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "epfy": { - "modeling_realm": "atmos", - "standard_name": "northward_eliassen_palm_flux_in_air", - "units": "m3 s-2", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Component of the Eliassen-Palm Flux", - "comment": "Transformed Eulerian Mean Diagnostics Meridional component Fy of Eliassen-Palm (EP) flux (Fy, Fz) derived from 6hr or higher frequency fields (use daily fields or 12 hr fields if the 6 hr are not available). Please use the definitions given by equation 3.5.3a of Andrews, Holton and Leovy text book, but scaled by density to have units m3 s-2.", - "dimensions": "latitude plev39 time", - "out_name": "epfy", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "epfz": { - "modeling_realm": "atmos", - "standard_name": "upward_eliassen_palm_flux_in_air", - "units": "m3 s-2", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Upward Component of the Eliassen-Palm Flux", - "comment": "Transformed Eulerian Mean Diagnostics Meridional component Fz of the Eliassen-Palm (EP) flux (Fy, Fz) derived from 6hr or higher frequency fields (use daily fields or 12 hr fields if the 6 hr are not available). Please use the definitions given by equation 3.5.3b of Andrews, Holton and Leovy text book, but scaled by density to have units m3 s-2.", - "dimensions": "latitude plev39 time", - "out_name": "epfz", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "jo2": { - "modeling_realm": "atmos", - "standard_name": "photolysis_rate_of_molecular_oxygen", - "units": "s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "photoloysis rate of O2", - "comment": "rate of o2 -> o1d+o", - "dimensions": "latitude plev39 time", - "out_name": "jo2", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "jo3": { - "modeling_realm": "atmos", - "standard_name": "photolysis_rate_of_ozone", - "units": "s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "photoloysis rate of O3", - "comment": "sum of rates o3 -> o1d+o2 and o3 -> o+o2", - "dimensions": "latitude plev39 time", - "out_name": "jo3", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "oxloss": { - "modeling_realm": "atmosChem", - "standard_name": "tendency_of_mole_concentration_of_ozone_and_atomic_oxygen_and_1D_oxygen_atom_due_to_chemical_destruction", - "units": "mol m-3 s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "total Ox loss rate", - "comment": "total chemical loss rate for o+o1d+o3", - "dimensions": "latitude plev39 time", - "out_name": "oxloss", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "oxprod": { - "modeling_realm": "atmosChem", - "standard_name": "tendency_of_mole_concentration_of_ozone_and_atomic_oxygen_and_1D_oxygen_atom_due_to_chemical_production_and_photolysis", - "units": "mol m-3 s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "total Ox production rate", - "comment": "total production rate of o+o1d+o3 including o2 photolysis and all o3 producing reactions", - "dimensions": "latitude plev39 time", - "out_name": "oxprod", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sltbasin": { - "modeling_realm": "ocean", - "standard_name": "northward_ocean_salt_transport", - "units": "kg s-1", - "cell_methods": "longitude: mean (basin) time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Ocean Salt Transport", - "comment": "function of latitude, basin", - "dimensions": "latitude basin time", - "out_name": "sltbasin", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sltnorth": { - "modeling_realm": "ocean", - "standard_name": "northward_ocean_salt_transport", - "units": "kg s-1", - "cell_methods": "longitude: mean (basin) time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Ocean Salt Transport", - "comment": "", - "dimensions": "latitude basin time", - "out_name": "sltnorth", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sltnortha": { - "modeling_realm": "ocean", - "standard_name": "northward_ocean_salt_transport", - "units": "kg s-1", - "cell_methods": "longitude: mean (basin) time: mean", - "cell_measures": "area: areacella", - "long_name": "Atlantic Northward Ocean Salt Transport", - "comment": "", - "dimensions": "latitude basin time", - "out_name": "sltnortha", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntc": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_convection", - "units": "K s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature due to Convection", - "comment": "Tendencies from cumulus convection scheme.", - "dimensions": "latitude plev39 time", - "out_name": "tntc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntmp": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_model_physics", - "units": "K s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature due to Model Physics", - "comment": "Tendency of air temperature due to model physics. This includes sources and sinks from parametrized physics (e.g. radiation, convection, boundary layer, stratiform condensation/evaporation, etc.). It excludes sources and sinks from resolved dynamics and numerical diffusion not associated with parametrized physics. For example, any vertical diffusion which is part of the boundary layer mixing scheme should be included, while numerical diffusion applied in addition to physics or resolved dynamics should be excluded. This term is required to check the closure of the heat budget.", - "dimensions": "latitude plev39 time", - "out_name": "tntmp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntnogw": { - "modeling_realm": "atmos", - "standard_name": "temperature_tendency_due_to_dissipation_nonorographic_gravity_wave_drag", - "units": "K s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "temperature tendency nonorographic gravity wave dissipation", - "comment": "Temperature tendency due to dissipation of parameterized nonorographic gravity waves.", - "dimensions": "latitude plev39 time", - "out_name": "tntnogw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntogw": { - "modeling_realm": "atmos", - "standard_name": "temperature_tendency_due_to_dissipation_orographic_gravity_wave_drag", - "units": "K s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "temperature tendency orographic gravity wave dissipation", - "comment": "Temperature tendency due to dissipation of parameterized orographic gravity waves.", - "dimensions": "latitude plev39 time", - "out_name": "tntogw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntrl": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_air_temperature_due_to_longwave_heating", - "units": "K s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Longwave heating rate", - "comment": "Tendency of air temperature due to longwave radiative heating", - "dimensions": "latitude plev39 time", - "out_name": "tntrl", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntrlcs": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_longwave_heating_assuming_clear_sky", - "units": "K s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature due to Clear Sky Longwave Radiative Heating", - "comment": "Tendency of Air Temperature due to Clear Sky Longwave Radiative Heating", - "dimensions": "latitude plev39 time", - "out_name": "tntrlcs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntrs": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_air_temperature_due_to_shortwave_heating", - "units": "K s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Shortwave heating rate", - "comment": "Tendency of air temperature due to shortwave radiative heating", - "dimensions": "latitude plev39 time", - "out_name": "tntrs", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntrscs": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_shortwave_heating_assuming_clear_sky", - "units": "K s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature due to Clear Sky Shortwave Radiative Heating", - "comment": "Tendency of Air Temperature due to Clear Sky Shortwave Radiative Heating", - "dimensions": "latitude plev39 time", - "out_name": "tntrscs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntscp": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_stratiform_clouds_and_precipitation", - "units": "K s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature Due to Stratiform Clouds and Precipitation", - "comment": "", - "dimensions": "latitude plev39 time", - "out_name": "tntscp", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "utendepfd": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_eastward_wind_due_to_eliassen_palm_flux_divergence", - "units": "m s-2", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Tendency of eastward wind due to Eliassen-Palm Flux divergence", - "comment": "Tendency of the zonal mean zonal wind due to the divergence of the Eliassen-Palm flux.", - "dimensions": "latitude plev39 time", - "out_name": "utendepfd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "utendnogw": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_eastward_wind_due_to_nonorographic_gravity_wave_drag", - "units": "m s-2", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "u-tendency nonorographic gravity wave drag", - "comment": "Tendency of the eastward wind by parameterized nonorographic gravity waves.", - "dimensions": "latitude plev39 time", - "out_name": "utendnogw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vmrox": { - "modeling_realm": "atmosChem", - "standard_name": "mole_fraction_of_ozone_and_atomic_oxygen_and_1D_oxygen_atom", - "units": "mol mol-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "mole fraction of o and o3 and o1d", - "comment": "Mole Fraction of Ox", - "dimensions": "latitude plev39 time", - "out_name": "vmrox", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vtem": { - "modeling_realm": "atmos", - "standard_name": "northward_transformed_eulerian_mean_air_velocity", - "units": "m s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Transformed Eulerian Mean northward wind", - "comment": "Transformed Eulerian Mean Diagnostics v*, meridional component of the residual meridional circulation (v*, w*) derived from 6 hr or higher frequency data fields (use instantaneous daily fields or 12 hr fields if the 6 hr data are not available).", - "dimensions": "latitude plev39 time", - "out_name": "vtem", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vtendnogw": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_northward_wind_due_to_nonorographic_gravity_wave_drag", - "units": "m s-2", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "v-tendency nonorographic gravity wave drag", - "comment": "Tendency of the northward wind by parameterized nonorographic gravity waves. (Note that CF name tables only have a general northward tendency for all gravity waves, and we need it separated by type.)", - "dimensions": "latitude plev39 time", - "out_name": "vtendnogw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wtem": { - "modeling_realm": "atmos", - "standard_name": "unset", - "units": "m s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Transformed Eulerian Mean upward wind", - "comment": "Transformed Eulerian Mean Diagnostics w*, meridional component of the residual meridional circulation (v*, w*) derived from 6 hr or higher frequency data fields (use instantaneous daily fields or 12 hr fields if the 6 hr data are not available). Scale height: 6950 m", - "dimensions": "latitude plev39 time", - "out_name": "wtem", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "xgwdparam": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_eastward_stress_due_to_gravity_wave_drag", - "units": "Pa", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "x_gravity_wave_drag_param", - "comment": "Parameterised x-component of gravity wave drag", - "dimensions": "latitude plev39 time", - "out_name": "xgwdparam", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ygwdparam": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_northward_stress_due_to_gravity_wave_drag", - "units": "Pa", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "y_gravity_wave_drag_param", - "comment": "Parameterised y- component of gravity wave drag", - "dimensions": "latitude plev39 time", - "out_name": "ygwdparam", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zmtnt": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_diabatic_processes", - "units": "K s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Zonal Mean Diabatic Heating Rates", - "comment": "The diabatic heating rates due to all the processes that may change potential temperature", - "dimensions": "latitude plev39 time", - "out_name": "zmtnt", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Esubhr.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Esubhr.json deleted file mode 100644 index 4f6338ebda..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Esubhr.json +++ /dev/null @@ -1,579 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table Esubhr", - "realm": "atmos", - "frequency": "subhr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "0.017361", - "generic_levels": "alevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "bldep": { - "modeling_realm": "aerosol", - "standard_name": "atmosphere_boundary_layer_thickness", - "units": "m", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Boundary Layer Depth", - "comment": "Boundary layer depth", - "dimensions": "longitude latitude time1", - "out_name": "bldep", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfls": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_latent_heat_flux", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Upward Latent Heat Flux", - "comment": "", - "dimensions": "longitude latitude time1", - "out_name": "hfls", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfss": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_sensible_heat_flux", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Upward Sensible Heat Flux", - "comment": "", - "dimensions": "longitude latitude time1", - "out_name": "hfss", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "longitude latitude alevel time1", - "out_name": "hus", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "huss": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Near-Surface Specific Humidity", - "comment": "Near-surface (usually, 2 meter) specific humidity.", - "dimensions": "longitude latitude time1 height2m", - "out_name": "huss", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mc": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_net_upward_convective_mass_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Convective Mass Flux", - "comment": "The net mass flux should represent the difference between the updraft and downdraft components. The flux is computed as the mass divided by the area of the grid cell.", - "dimensions": "longitude latitude alevel time1", - "out_name": "mc", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pr": { - "modeling_realm": "atmos", - "standard_name": "precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Precipitation", - "comment": "includes both liquid and solid phases", - "dimensions": "longitude latitude time1", - "out_name": "pr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prc": { - "modeling_realm": "atmos", - "standard_name": "convective_precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Convective Precipitation", - "comment": "Convective precipitation at surface; includes both liquid and solid phases.", - "dimensions": "longitude latitude time1", - "out_name": "prc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prw": { - "modeling_realm": "atmos", - "standard_name": "atmosphere_water_vapor_content", - "units": "kg m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Water Vapor Path", - "comment": "vertically integrated through the atmospheric column", - "dimensions": "longitude latitude time1", - "out_name": "prw", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ps": { - "modeling_realm": "atmos", - "standard_name": "surface_air_pressure", - "units": "Pa", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Pressure", - "comment": "surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", - "dimensions": "longitude latitude time1", - "out_name": "ps", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffclic": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_convective_cloud_ice_particle", - "units": "m", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Hydrometeor Effective Radius of Convective Cloud Ice", - "comment": "This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell).", - "dimensions": "alevel site time1", - "out_name": "reffclic", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffclis": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_stratiform_cloud_ice_particle", - "units": "m", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Hydrometeor Effective Radius of Stratiform Cloud Ice", - "comment": "This is defined as the in-cloud ratio of the third moment over the second moment of the particle size distribution (obtained by considering only the cloudy portion of the grid cell).", - "dimensions": "alevel site time1", - "out_name": "reffclis", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffclwc": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_convective_cloud_liquid_water_particle", - "units": "m", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Hydrometeor Effective Radius of Convective Cloud Liquid Water", - "comment": "Droplets are liquid. The effective radius is defined as the ratio of the third moment over the second moment of the particle size distribution and the time-mean should be calculated, weighting the individual samples by the cloudy fraction of the grid cell.", - "dimensions": "alevel site time1", - "out_name": "reffclwc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "reffclws": { - "modeling_realm": "atmos", - "standard_name": "effective_radius_of_stratiform_cloud_liquid_water_particle", - "units": "m", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Hydrometeor Effective Radius of Stratiform Cloud Liquid Water", - "comment": "Droplets are liquid. The effective radius is defined as the ratio of the third moment over the second moment of the particle size distribution and the time-mean should be calculated, weighting the individual samples by the cloudy fraction of the grid cell.", - "dimensions": "alevel site time1", - "out_name": "reffclws", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlut": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_longwave_flux", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Longwave Radiatio", - "comment": "at the top of the atmosphere (to be compared with satellite measurements)", - "dimensions": "longitude latitude time1", - "out_name": "rlut", - "type": "", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdt": { - "modeling_realm": "atmos", - "standard_name": "toa_incoming_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "TOA Incident Shortwave Radiation", - "comment": "Shortwave radiation incident at the top of the atmosphere", - "dimensions": "longitude latitude time1", - "out_name": "rsdt", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsut": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_shortwave_flux", - "units": "W m-2", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Top-of-Atmosphere Outgoing Shortwave Radiation", - "comment": "at the top of the atmosphere", - "dimensions": "longitude latitude time1", - "out_name": "rsut", - "type": "", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "longitude latitude alevel time1", - "out_name": "ta", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tas": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Surface Air Temperature", - "comment": "near-surface (usually, 2 meter) air temperature", - "dimensions": "longitude latitude time1 height2m", - "out_name": "tas", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhus": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity", - "units": "s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Tendency of Specific Humidity", - "comment": "Tendency of Specific Humidity", - "dimensions": "longitude latitude alevel time1", - "out_name": "tnhus", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhuspbl": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity_due_to_boundary_layer_mixing", - "units": "s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Specific Humidity Due to Boundary Layer Mixing", - "comment": "Includes all boundary layer terms including diffusive terms.", - "dimensions": "alevel site time1", - "out_name": "tnhuspbl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnhusscp": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_specific_humidity_due_to_stratiform_clouds_and_precipitation", - "units": "s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Specific Humidity Due to Stratiform Clouds and Precipitation", - "comment": "", - "dimensions": "alevel site time1", - "out_name": "tnhusscp", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnt": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature", - "units": "K s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Tendency of Air Temperature", - "comment": "Tendency of Air Temperature", - "dimensions": "longitude latitude alevel time1", - "out_name": "tnt", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntd": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_numerical_diffusion", - "units": "K s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Air Temperature due to Numerical Diffusion", - "comment": "This includes any horizontal or vertical numerical temperature diffusion not associated with the parametrized moist physics or the resolved dynamics. For example, any vertical diffusion which is part of the boundary layer mixing scheme should be excluded, as should any diffusion which is included in the terms from the resolved dynamics. This term is required to check the closure of the temperature budget.", - "dimensions": "alevel site time1", - "out_name": "tntd", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntpbl": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_boundary_layer_mixing", - "units": "K s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Air Temperature Due to Boundary Layer Mixing", - "comment": "Includes all boundary layer terms including diffusive terms.", - "dimensions": "alevel site time1", - "out_name": "tntpbl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntrl": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_air_temperature_due_to_longwave_heating", - "units": "K s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Longwave heating rate", - "comment": "Tendency of air temperature due to longwave radiative heating", - "dimensions": "alevel site time1", - "out_name": "tntrl", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntrlcs": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_longwave_heating_assuming_clear_sky", - "units": "K s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Air Temperature due to Clear Sky Longwave Radiative Heating", - "comment": "Tendency of Air Temperature due to Clear Sky Longwave Radiative Heating", - "dimensions": "alevel site time1", - "out_name": "tntrlcs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntrs": { - "modeling_realm": "aerosol", - "standard_name": "tendency_of_air_temperature_due_to_shortwave_heating", - "units": "K s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Shortwave heating rate", - "comment": "Tendency of air temperature due to shortwave radiative heating", - "dimensions": "alevel site time1", - "out_name": "tntrs", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntrscs": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_shortwave_heating_assuming_clear_sky", - "units": "K s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Air Temperature due to Clear Sky Shortwave Radiative Heating", - "comment": "Tendency of Air Temperature due to Clear Sky Shortwave Radiative Heating", - "dimensions": "alevel site time1", - "out_name": "tntrscs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tntscp": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_air_temperature_due_to_stratiform_clouds_and_precipitation", - "units": "K s-1", - "cell_methods": "area: point time: point", - "cell_measures": "", - "long_name": "Tendency of Air Temperature Due to Stratiform Clouds and Precipitation", - "comment": "", - "dimensions": "alevel site time1", - "out_name": "tntscp", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "longitude latitude alevel time1", - "out_name": "ua", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "longitude latitude alevel time1", - "out_name": "va", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wap": { - "modeling_realm": "atmos", - "standard_name": "lagrangian_tendency_of_air_pressure", - "units": "Pa s-1", - "cell_methods": "area: mean time: point", - "cell_measures": "area: areacella", - "long_name": "omega (=dp/dt)", - "comment": "Omega (vertical velocity in pressure coordinates, positive downwards)", - "dimensions": "longitude latitude alevel time1", - "out_name": "wap", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Eyr.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Eyr.json deleted file mode 100644 index 26ca84d71a..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Eyr.json +++ /dev/null @@ -1,341 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table Eyr", - "realm": "land", - "frequency": "yr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "365", - "generic_levels": "alevel olevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "baresoilFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Bare Soil Fraction", - "comment": "Percentage of entire grid cell that is covered by bare soil.", - "dimensions": "longitude latitude time typebare", - "out_name": "baresoilFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cLitter": { - "modeling_realm": "land", - "standard_name": "litter_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: point", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Litter Pool", - "comment": "", - "dimensions": "longitude latitude time1", - "out_name": "cLitter", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cLitterLut": { - "modeling_realm": "land", - "standard_name": "litter_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where landuse time: point", - "cell_measures": "area: areacella", - "long_name": "carbon in above and belowground litter pools on land use tiles", - "comment": "end of year values (not annual mean)", - "dimensions": "longitude latitude landUse time1", - "out_name": "cLitterLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cProduct": { - "modeling_realm": "land", - "standard_name": "carbon_content_of_products_of_anthropogenic_land_use_change", - "units": "kg m-2", - "cell_methods": "area: mean where land time: point", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Products of Land Use Change", - "comment": "Carbon mass per unit area in that has been removed from the environment through landuse change.", - "dimensions": "longitude latitude time1", - "out_name": "cProduct", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cProductLut": { - "modeling_realm": "land", - "standard_name": "carbon_content_in_wood_and_agricultural_products", - "units": "kg m-2", - "cell_methods": "area: mean where landuse time: point", - "cell_measures": "area: areacella", - "long_name": "wood and agricultural product pool carbon associated with land use tiles; examples of products include paper, cardboard, timber for construction, and crop harvest for food or fuel.", - "comment": "anthropogenic pools associated with land use tiles into which harvests and cleared carbon are deposited before release into atmosphere PLUS any remaining anthropogenic pools that may be associated with lands which were converted into land use tiles during reported period . Does NOT include residue which is deposited into soil or litter; end of year values (not annual mean)", - "dimensions": "longitude latitude landUse time1", - "out_name": "cProductLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cSoil": { - "modeling_realm": "land", - "standard_name": "soil_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: point", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Soil Pool", - "comment": "Carbon mass in the full depth of the soil model.", - "dimensions": "longitude latitude time1", - "out_name": "cSoil", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cSoilLut": { - "modeling_realm": "land", - "standard_name": "soil_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where landuse time: point", - "cell_measures": "area: areacella", - "long_name": "carbon in soil pool on land use tiles", - "comment": "end of year values (not annual mean)", - "dimensions": "longitude latitude landUse time1", - "out_name": "cSoilLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cVeg": { - "modeling_realm": "land", - "standard_name": "vegetation_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: point", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Vegetation", - "comment": "Carbon mass per unit area in vegetation.", - "dimensions": "longitude latitude time1", - "out_name": "cVeg", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cVegLut": { - "modeling_realm": "land", - "standard_name": "vegetation_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where landuse time: point", - "cell_measures": "area: areacella", - "long_name": "carbon in vegetation on land use tiles", - "comment": "end of year values (not annual mean)", - "dimensions": "longitude latitude landUse time1", - "out_name": "cVegLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cropFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Crop Fraction", - "comment": "Percentage of entire grid cell that is covered by crop.", - "dimensions": "longitude latitude time typecrop", - "out_name": "cropFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fracInLut": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: sum", - "cell_measures": "area: areacella", - "long_name": "annual gross fraction that was transferred into this tile from other land use tiles", - "comment": "cumulative fractional transitions over the year; note that fraction should be reported as fraction of atmospheric grid cell", - "dimensions": "longitude latitude landUse time", - "out_name": "fracInLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fracLut": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: point", - "cell_measures": "area: areacella", - "long_name": "fraction of grid cell for each land use tile", - "comment": "end of year values (not annual mean); note that fraction should be reported as fraction of land grid cell (example: frac_lnd = 0.5, frac_ocn = 0.5, frac_crop_lnd = 0.2 (of land portion of grid cell), then frac_lut(crp) = 0.5*0.2 = 0.1)", - "dimensions": "longitude latitude landUse time1", - "out_name": "fracLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fracOutLut": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: sum", - "cell_measures": "area: areacella", - "long_name": "annual gross fraction of land use tile that was transferred into other land use tiles", - "comment": "cumulative fractional transitions over the year; note that fraction should be reported as fraction of atmospheric grid cell", - "dimensions": "longitude latitude landUse time", - "out_name": "fracOutLut", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "grassFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Natural Grass Fraction", - "comment": "Percentage of entire grid cell that is covered by natural grass.", - "dimensions": "longitude latitude time typenatgr", - "out_name": "grassFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "residualFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Fraction of Grid Cell that is Land but Neither Vegetation-Covered nor Bare Soil", - "comment": "Percentage of entire grid cell that is land and is covered by neither vegetation nor bare-soil (e.g., urban, ice, lakes, etc.)", - "dimensions": "longitude latitude time typeresidual", - "out_name": "residualFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "shrubFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Shrub Fraction", - "comment": "Percentage of entire grid cell that is covered by shrub.", - "dimensions": "longitude latitude time typeshrub", - "out_name": "shrubFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "treeFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Tree Cover Fraction", - "comment": "Percentage of entire grid cell that is covered by trees.", - "dimensions": "longitude latitude time typetree", - "out_name": "treeFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vegFrac": { - "modeling_realm": "", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Total vegetated fraction", - "comment": "fraction of grid cell that is covered by vegetation.This SHOULD be the sum of tree, grass, crop and shrub fractions.", - "dimensions": "longitude latitude time typeveg", - "out_name": "vegFrac", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zfullo": { - "modeling_realm": "ocean", - "standard_name": "depth_below_geoid", - "units": "m", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Depth Below Geoid of Ocean Layer", - "comment": "Depth below geoid", - "dimensions": "longitude latitude olevel time", - "out_name": "zfullo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_IfxAnt.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_IfxAnt.json deleted file mode 100644 index 2b4c727274..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_IfxAnt.json +++ /dev/null @@ -1,86 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table IfxAnt", - "realm": "landIce", - "frequency": "fx", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "areacellg": { - "modeling_realm": "land", - "standard_name": "cell_area", - "units": "m2", - "cell_methods": "area: mean", - "cell_measures": "area: areacellg", - "long_name": "Grid Cell Area for Interpolated Grids", - "comment": "Area of the target grid (not the interpolated area of the source grid).", - "dimensions": "xant yant", - "out_name": "areacellg", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfgeoubed": { - "modeling_realm": "landIce", - "standard_name": "upward_geothermal_heat_flux_at_ground_level_in_land_ice", - "units": "W m-2", - "cell_methods": "area: mean", - "cell_measures": "area: areacellg", - "long_name": "Geothermal Heat flux beneath land ice", - "comment": "Upward geothermal heat flux per unit area beneath land ice", - "dimensions": "xant yant", - "out_name": "hfgeoubed", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lithk": { - "modeling_realm": "landIce", - "standard_name": "land_ice_thickness", - "units": "m", - "cell_methods": "area: mean", - "cell_measures": "area: areacellg", - "long_name": "Ice Sheet Thickness", - "comment": "The thickness of the ice sheet", - "dimensions": "xant yant", - "out_name": "lithk", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "topg": { - "modeling_realm": "landIce", - "standard_name": "bedrock_altitude", - "units": "m", - "cell_methods": "area: mean", - "cell_measures": "area: areacellg", - "long_name": "Bedrock Altitude", - "comment": "The bedrock topography beneath the land ice", - "dimensions": "xant yant", - "out_name": "topg", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_IfxGre.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_IfxGre.json deleted file mode 100644 index ba35dba324..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_IfxGre.json +++ /dev/null @@ -1,86 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table IfxGre", - "realm": "landIce", - "frequency": "fx", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "areacellg": { - "modeling_realm": "land", - "standard_name": "cell_area", - "units": "m2", - "cell_methods": "area: mean", - "cell_measures": "area: areacellg", - "long_name": "Grid Cell Area for Interpolated Grids", - "comment": "Area of the target grid (not the interpolated area of the source grid).", - "dimensions": "xgre ygre", - "out_name": "areacellg", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfgeoubed": { - "modeling_realm": "landIce", - "standard_name": "upward_geothermal_heat_flux_at_ground_level_in_land_ice", - "units": "W m-2", - "cell_methods": "area: mean", - "cell_measures": "area: areacellg", - "long_name": "Geothermal Heat flux beneath land ice", - "comment": "Upward geothermal heat flux per unit area beneath land ice", - "dimensions": "xgre ygre", - "out_name": "hfgeoubed", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lithk": { - "modeling_realm": "landIce", - "standard_name": "land_ice_thickness", - "units": "m", - "cell_methods": "area: mean", - "cell_measures": "area: areacellg", - "long_name": "Ice Sheet Thickness", - "comment": "The thickness of the ice sheet", - "dimensions": "xgre ygre", - "out_name": "lithk", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "topg": { - "modeling_realm": "landIce", - "standard_name": "bedrock_altitude", - "units": "m", - "cell_methods": "area: mean", - "cell_measures": "area: areacellg", - "long_name": "Bedrock Altitude", - "comment": "The bedrock topography beneath the land ice", - "dimensions": "xgre ygre", - "out_name": "topg", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_ImonAnt.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_ImonAnt.json deleted file mode 100644 index e680bc6b96..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_ImonAnt.json +++ /dev/null @@ -1,494 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table ImonAnt", - "realm": "atmos", - "frequency": "mon", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "30.00", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "acabf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_surface_specific_mass_balance_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Mass Balance flux", - "comment": "Specific mass balance means the net rate at which ice is added per unit area at the land ice surface. Computed as the total surface mass balance on the land ice portion of the grid cell divided by land ice area in the grid cell. A negative value means loss of ice", - "dimensions": "xant yant time", - "out_name": "acabf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfls": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_latent_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Upward Latent Heat Flux", - "comment": "", - "dimensions": "xant yant time", - "out_name": "hfls", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfss": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_sensible_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Upward Sensible Heat Flux", - "comment": "", - "dimensions": "xant yant time", - "out_name": "hfss", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "icem": { - "modeling_realm": "landIce", - "standard_name": "land_ice_surface_melt_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface ice melt flux", - "comment": "Loss of ice mass resulting from surface melting. Computed as the total surface melt water on the land ice portion of the grid cell divided by land ice area in the grid cell.", - "dimensions": "xant yant time", - "out_name": "icem", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lialb": { - "modeling_realm": "landIce", - "standard_name": "surface_albedo", - "units": "1.0", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Land ice or snow albedo", - "comment": "Mean surface albedo of entire land ice covered part of the grid cell", - "dimensions": "xant yant time", - "out_name": "lialb", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "libmassbffl": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_specific_mass_balance_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where floating_ice_shelf (comment: mask=sftflf)", - "cell_measures": "area: areacellg", - "long_name": "Basal specific mass balance flux of floating ice shelf", - "comment": "Specific mass balance means the net rate at which ice is added per unit area at the land ice base. A negative value means loss of ice. Computed as the total basal mass balance on the floating land ice (floating ice shelf) portion of the grid cell divided by floating land ice (floating ice shelf) area in the grid cell. Cell_methods: area: mean where floating_ice_shelf", - "dimensions": "xant yant time", - "out_name": "libmassbffl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "libmassbfgr": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_specific_mass_balance_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where grounded_ice_sheet (comment: mask=sfgrlf)", - "cell_measures": "area: areacellg", - "long_name": "Basal specific mass balance flux of grounded ice sheet", - "comment": "Specific mass balance means the net rate at which ice is added per unit area at the land ice base. A negative value means loss of ice. Computed as the total basal mass balance on the grounded land ice portion of the grid cell divided by grounded land ice area in the grid cell. Cell_methods: area: mean where grounded_ice_sheet", - "dimensions": "xant yant time", - "out_name": "libmassbfgr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "licalvf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_specific_mass_flux_due_to_calving", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Land ice calving flux", - "comment": "Loss of ice mass resulting from iceberg calving. Computed as the rate of mass loss by the ice shelf (in kg s-1) divided by the horizontal area of the ice sheet (m2) in the grid box.", - "dimensions": "xant yant time", - "out_name": "licalvf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lifmassbf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_specific_mass_flux_due_to_calving_and_ice_front_melting", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Land ice vertical front mass balance flux", - "comment": "Total mass balance at the ice front (or vertical margin). It includes both iceberg calving and melt on vertical ice front", - "dimensions": "xant yant time", - "out_name": "lifmassbf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "litempbotfl": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_temperature", - "units": "K", - "cell_methods": "area: time: mean where floating_ice_shelf (comment: mask=sftflf)", - "cell_measures": "area: areacellg", - "long_name": "Basal temperature of floating ice shelf", - "comment": "Basal temperature that is used to force the ice sheet models, it is the temperature AT ice shelf-ocean interface. Cell_methods: area: mean where floating_ice_shelf", - "dimensions": "xant yant time", - "out_name": "litempbotfl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "litempbotgr": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_temperature", - "units": "K", - "cell_methods": "area: time: mean where grounded_ice_sheet (comment: mask=sfgrlf)", - "cell_measures": "area: areacellg", - "long_name": "Basal temperature of grounded ice sheet", - "comment": "Basal temperature that is used to force the ice sheet models, it is the temperature AT ice sheet - bedrock interface. Cell_methods: area: mean where grounded_ice_sheet", - "dimensions": "xant yant time", - "out_name": "litempbotgr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "litemptop": { - "modeling_realm": "landIce", - "standard_name": "temperature_at_top_of_ice_sheet_model", - "units": "K", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Temperature at top of ice sheet model", - "comment": "Upper boundary temperature that is used to force ice sheet models. It is the temperature at the base of the snowpack models, and does not vary with seasons. Report surface temperature of ice sheet where snow thickness is zero", - "dimensions": "xant yant time", - "out_name": "litemptop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrroLi": { - "modeling_realm": "atmos", - "standard_name": "land_ice_runoff_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Land Ice Runoff Flux", - "comment": "Runoff flux over land ice is the difference between any available liquid water in the snowpack less any refreezing. Computed as the sum of rainfall and melt of snow or ice less any refreezing or water retained in the snowpack", - "dimensions": "xant yant time", - "out_name": "mrroLi", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "orog": { - "modeling_realm": "land", - "standard_name": "surface_altitude", - "units": "m", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Altitude", - "comment": "The surface called 'surface' means the lower boundary of the atmosphere. Altitude is the (geometric) height above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level.", - "dimensions": "xant yant time", - "out_name": "orog", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prra": { - "modeling_realm": "atmos", - "standard_name": "rainfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Rainfall Flux where Ice Free Ocean over Sea over Land Ice", - "comment": "", - "dimensions": "xant yant time", - "out_name": "prra", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prsn": { - "modeling_realm": "atmos", - "standard_name": "snowfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Snowfall Flux", - "comment": "at surface; includes precipitation of all forms of water in the solid phase", - "dimensions": "xant yant time", - "out_name": "prsn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlds": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Downwelling Longwave Radiation", - "comment": "", - "dimensions": "xant yant time", - "out_name": "rlds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlus": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Upwelling Longwave Radiation", - "comment": "", - "dimensions": "xant yant time", - "out_name": "rlus", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsds": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Downwelling Shortwave Radiation", - "comment": "surface solar irradiance for UV calculations", - "dimensions": "xant yant time", - "out_name": "rsds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsus": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Upwelling Shortwave Radiation", - "comment": "", - "dimensions": "xant yant time", - "out_name": "rsus", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sbl": { - "modeling_realm": "landIce", - "standard_name": "surface_snow_and_ice_sublimation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Snow and Ice Sublimation Flux", - "comment": "The snow and ice sublimation flux is the loss of snow and ice mass per unit area from the surface resulting from their direct conversion to water vapor that enters the atmosphere.", - "dimensions": "xant yant time", - "out_name": "sbl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snc": { - "modeling_realm": "landIce land", - "standard_name": "surface_snow_area_fraction", - "units": "%", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "snow cover fraction", - "comment": "Fraction of each grid cell that is occupied by snow that rests on land portion of cell.", - "dimensions": "xant yant time", - "out_name": "snc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snicefreez": { - "modeling_realm": "landIce", - "standard_name": "surface_snow_and_ice_refreezing_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface snow and ice refreeze flux", - "comment": "Mass flux of surface meltwater which refreezes within the snowpack. Computed as the total refreezing on the land ice portion of the grid cell divided by land ice area in the grid cell.", - "dimensions": "xant yant time", - "out_name": "snicefreez", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snicem": { - "modeling_realm": "landIce", - "standard_name": "surface_snow_and_ice_melt_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface snow and ice melt flux", - "comment": "Loss of snow and ice mass resulting from surface melting. Computed as the total surface melt on the land ice portion of the grid cell divided by land ice area in the grid cell.", - "dimensions": "xant yant time", - "out_name": "snicem", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snm": { - "modeling_realm": "landIce land", - "standard_name": "surface_snow_melt_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Snow Melt", - "comment": "The total surface snow melt rate on the land portion of the grid cell divided by the land area in the grid cell; report as zero for snow-free land regions and missing where there is no land.", - "dimensions": "xant yant time", - "out_name": "snm", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tas": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "", - "long_name": "Surface Air Temperature", - "comment": "near-surface (usually, 2 meter) air temperature", - "dimensions": "time height2m", - "out_name": "tas", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ts": { - "modeling_realm": "atmos", - "standard_name": "surface_temperature", - "units": "K", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Temperature", - "comment": "Temperature of the lower boundary of the atmosphere", - "dimensions": "xant yant time", - "out_name": "ts", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tsn": { - "modeling_realm": "landIce land", - "standard_name": "temperature_in_surface_snow", - "units": "K", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Snow Internal Temperature", - "comment": "This temperature is averaged over all the snow in the grid cell that rests on land or land ice. When computing the time-mean here, the time samples, weighted by the mass of snow on the land portion of the grid cell, are accumulated and then divided by the sum of the weights. Reported as missing in regions free of snow on land.", - "dimensions": "xant yant time", - "out_name": "tsn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_ImonGre.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_ImonGre.json deleted file mode 100644 index b95e791829..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_ImonGre.json +++ /dev/null @@ -1,494 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table ImonGre", - "realm": "atmos", - "frequency": "mon", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "30.00", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "acabf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_surface_specific_mass_balance_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Mass Balance flux", - "comment": "Specific mass balance means the net rate at which ice is added per unit area at the land ice surface. Computed as the total surface mass balance on the land ice portion of the grid cell divided by land ice area in the grid cell. A negative value means loss of ice", - "dimensions": "xgre ygre time", - "out_name": "acabf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfls": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_latent_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Upward Latent Heat Flux", - "comment": "", - "dimensions": "xgre ygre time", - "out_name": "hfls", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfss": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_sensible_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Upward Sensible Heat Flux", - "comment": "", - "dimensions": "xgre ygre time", - "out_name": "hfss", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "icem": { - "modeling_realm": "landIce", - "standard_name": "land_ice_surface_melt_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface ice melt flux", - "comment": "Loss of ice mass resulting from surface melting. Computed as the total surface melt water on the land ice portion of the grid cell divided by land ice area in the grid cell.", - "dimensions": "xgre ygre time", - "out_name": "icem", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lialb": { - "modeling_realm": "landIce", - "standard_name": "surface_albedo", - "units": "1.0", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Land ice or snow albedo", - "comment": "Mean surface albedo of entire land ice covered part of the grid cell", - "dimensions": "xgre ygre time", - "out_name": "lialb", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "libmassbffl": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_specific_mass_balance_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where floating_ice_shelf (comment: mask=sftflf)", - "cell_measures": "area: areacellg", - "long_name": "Basal specific mass balance flux of floating ice shelf", - "comment": "Specific mass balance means the net rate at which ice is added per unit area at the land ice base. A negative value means loss of ice. Computed as the total basal mass balance on the floating land ice (floating ice shelf) portion of the grid cell divided by floating land ice (floating ice shelf) area in the grid cell. Cell_methods: area: mean where floating_ice_shelf", - "dimensions": "xgre ygre time", - "out_name": "libmassbffl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "libmassbfgr": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_specific_mass_balance_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where grounded_ice_sheet (comment: mask=sfgrlf)", - "cell_measures": "area: areacellg", - "long_name": "Basal specific mass balance flux of grounded ice sheet", - "comment": "Specific mass balance means the net rate at which ice is added per unit area at the land ice base. A negative value means loss of ice. Computed as the total basal mass balance on the grounded land ice portion of the grid cell divided by grounded land ice area in the grid cell. Cell_methods: area: mean where grounded_ice_sheet", - "dimensions": "xgre ygre time", - "out_name": "libmassbfgr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "licalvf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_specific_mass_flux_due_to_calving", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Land ice calving flux", - "comment": "Loss of ice mass resulting from iceberg calving. Computed as the rate of mass loss by the ice shelf (in kg s-1) divided by the horizontal area of the ice sheet (m2) in the grid box.", - "dimensions": "xgre ygre time", - "out_name": "licalvf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lifmassbf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_specific_mass_flux_due_to_calving_and_ice_front_melting", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Land ice vertical front mass balance flux", - "comment": "Total mass balance at the ice front (or vertical margin). It includes both iceberg calving and melt on vertical ice front", - "dimensions": "xgre ygre time", - "out_name": "lifmassbf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "litempbotfl": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_temperature", - "units": "K", - "cell_methods": "area: time: mean where floating_ice_shelf (comment: mask=sftflf)", - "cell_measures": "area: areacellg", - "long_name": "Basal temperature of floating ice shelf", - "comment": "Basal temperature that is used to force the ice sheet models, it is the temperature AT ice shelf-ocean interface. Cell_methods: area: mean where floating_ice_shelf", - "dimensions": "xgre ygre time", - "out_name": "litempbotfl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "litempbotgr": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_temperature", - "units": "K", - "cell_methods": "area: time: mean where grounded_ice_sheet (comment: mask=sfgrlf)", - "cell_measures": "area: areacellg", - "long_name": "Basal temperature of grounded ice sheet", - "comment": "Basal temperature that is used to force the ice sheet models, it is the temperature AT ice sheet - bedrock interface. Cell_methods: area: mean where grounded_ice_sheet", - "dimensions": "xgre ygre time", - "out_name": "litempbotgr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "litemptop": { - "modeling_realm": "landIce", - "standard_name": "temperature_at_top_of_ice_sheet_model", - "units": "K", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Temperature at top of ice sheet model", - "comment": "Upper boundary temperature that is used to force ice sheet models. It is the temperature at the base of the snowpack models, and does not vary with seasons. Report surface temperature of ice sheet where snow thickness is zero", - "dimensions": "xgre ygre time", - "out_name": "litemptop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrroLi": { - "modeling_realm": "atmos", - "standard_name": "land_ice_runoff_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Land Ice Runoff Flux", - "comment": "Runoff flux over land ice is the difference between any available liquid water in the snowpack less any refreezing. Computed as the sum of rainfall and melt of snow or ice less any refreezing or water retained in the snowpack", - "dimensions": "xgre ygre time", - "out_name": "mrroLi", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "orog": { - "modeling_realm": "land", - "standard_name": "surface_altitude", - "units": "m", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Altitude", - "comment": "The surface called 'surface' means the lower boundary of the atmosphere. Altitude is the (geometric) height above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level.", - "dimensions": "xgre ygre time", - "out_name": "orog", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prra": { - "modeling_realm": "atmos", - "standard_name": "rainfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Rainfall Flux where Ice Free Ocean over Sea over Land Ice", - "comment": "", - "dimensions": "xgre ygre time", - "out_name": "prra", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prsn": { - "modeling_realm": "atmos", - "standard_name": "snowfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Snowfall Flux", - "comment": "at surface; includes precipitation of all forms of water in the solid phase", - "dimensions": "xgre ygre time", - "out_name": "prsn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlds": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Downwelling Longwave Radiation", - "comment": "", - "dimensions": "xgre ygre time", - "out_name": "rlds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlus": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Upwelling Longwave Radiation", - "comment": "", - "dimensions": "xgre ygre time", - "out_name": "rlus", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsds": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Downwelling Shortwave Radiation", - "comment": "surface solar irradiance for UV calculations", - "dimensions": "xgre ygre time", - "out_name": "rsds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsus": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Upwelling Shortwave Radiation", - "comment": "", - "dimensions": "xgre ygre time", - "out_name": "rsus", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sbl": { - "modeling_realm": "landIce", - "standard_name": "surface_snow_and_ice_sublimation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Snow and Ice Sublimation Flux", - "comment": "The snow and ice sublimation flux is the loss of snow and ice mass per unit area from the surface resulting from their direct conversion to water vapor that enters the atmosphere.", - "dimensions": "xgre ygre time", - "out_name": "sbl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snc": { - "modeling_realm": "landIce land", - "standard_name": "surface_snow_area_fraction", - "units": "%", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "snow cover fraction", - "comment": "Fraction of each grid cell that is occupied by snow that rests on land portion of cell.", - "dimensions": "xgre ygre time", - "out_name": "snc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snicefreez": { - "modeling_realm": "landIce", - "standard_name": "surface_snow_and_ice_refreezing_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface snow and ice refreeze flux", - "comment": "Mass flux of surface meltwater which refreezes within the snowpack. Computed as the total refreezing on the land ice portion of the grid cell divided by land ice area in the grid cell.", - "dimensions": "xgre ygre time", - "out_name": "snicefreez", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snicem": { - "modeling_realm": "landIce", - "standard_name": "surface_snow_and_ice_melt_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface snow and ice melt flux", - "comment": "Loss of snow and ice mass resulting from surface melting. Computed as the total surface melt on the land ice portion of the grid cell divided by land ice area in the grid cell.", - "dimensions": "xgre ygre time", - "out_name": "snicem", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snm": { - "modeling_realm": "landIce land", - "standard_name": "surface_snow_melt_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Snow Melt", - "comment": "The total surface snow melt rate on the land portion of the grid cell divided by the land area in the grid cell; report as zero for snow-free land regions and missing where there is no land.", - "dimensions": "xgre ygre time", - "out_name": "snm", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tas": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "", - "long_name": "Surface Air Temperature", - "comment": "near-surface (usually, 2 meter) air temperature", - "dimensions": "time height2m", - "out_name": "tas", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ts": { - "modeling_realm": "atmos", - "standard_name": "surface_temperature", - "units": "K", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Temperature", - "comment": "Temperature of the lower boundary of the atmosphere", - "dimensions": "xgre ygre time", - "out_name": "ts", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tsn": { - "modeling_realm": "landIce land", - "standard_name": "temperature_in_surface_snow", - "units": "K", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Snow Internal Temperature", - "comment": "This temperature is averaged over all the snow in the grid cell that rests on land or land ice. When computing the time-mean here, the time samples, weighted by the mass of snow on the land portion of the grid cell, are accumulated and then divided by the sum of the weights. Reported as missing in regions free of snow on land.", - "dimensions": "xgre ygre time", - "out_name": "tsn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_IyrAnt.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_IyrAnt.json deleted file mode 100644 index 043ebe2eb9..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_IyrAnt.json +++ /dev/null @@ -1,579 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table IyrAnt", - "realm": "landIce", - "frequency": "yr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "365.00", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "acabf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_surface_specific_mass_balance_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Mass Balance flux", - "comment": "Specific mass balance means the net rate at which ice is added per unit area at the land ice surface. Computed as the total surface mass balance on the land ice portion of the grid cell divided by land ice area in the grid cell. A negative value means loss of ice", - "dimensions": "xant yant time", - "out_name": "acabf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "areacelli": { - "modeling_realm": "landIce", - "standard_name": "cell_area", - "units": "m2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacellg", - "long_name": "Ice-Sheet Grid Cell Area", - "comment": "Horizontal area of ice-sheet grid cells", - "dimensions": "xant yant time", - "out_name": "areacelli", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfgeoubed": { - "modeling_realm": "landIce", - "standard_name": "upward_geothermal_heat_flux_at_ground_level_in_land_ice", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Geothermal Heat flux beneath land ice", - "comment": "Upward geothermal heat flux per unit area beneath land ice", - "dimensions": "xant yant time", - "out_name": "hfgeoubed", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "iareafl": { - "modeling_realm": "atmos", - "standard_name": "floating_ice_shelf_area", - "units": "m2", - "cell_methods": "area: time: mean where floating_ice_shelf (comment: mask=sftflf)", - "cell_measures": "", - "long_name": "Area covered by floating ice shelves", - "comment": "Total area of the floating ice shelves (the component of ice sheet that flows over ocean)", - "dimensions": "time", - "out_name": "iareafl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "iareagr": { - "modeling_realm": "atmos", - "standard_name": "grounded_ice_sheet_area", - "units": "m2", - "cell_methods": "area: time: mean where grounded_ice_sheet (comment: mask=sfgrlf)", - "cell_measures": "", - "long_name": "Area covered by grounded ice sheet", - "comment": "Total area of the grounded ice sheets (the component of ice sheet resting over bedrock)", - "dimensions": "time", - "out_name": "iareagr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "libmassbffl": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_specific_mass_balance_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where floating_ice_shelf (comment: mask=sftflf)", - "cell_measures": "area: areacellg", - "long_name": "Basal specific mass balance flux of floating ice shelf", - "comment": "Specific mass balance means the net rate at which ice is added per unit area at the land ice base. A negative value means loss of ice. Computed as the total basal mass balance on the floating land ice (floating ice shelf) portion of the grid cell divided by floating land ice (floating ice shelf) area in the grid cell. Cell_methods: area: mean where floating_ice_shelf", - "dimensions": "xant yant time", - "out_name": "libmassbffl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "libmassbfgr": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_specific_mass_balance_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where grounded_ice_sheet (comment: mask=sfgrlf)", - "cell_measures": "area: areacellg", - "long_name": "Basal specific mass balance flux of grounded ice sheet", - "comment": "Specific mass balance means the net rate at which ice is added per unit area at the land ice base. A negative value means loss of ice. Computed as the total basal mass balance on the grounded land ice portion of the grid cell divided by grounded land ice area in the grid cell. Cell_methods: area: mean where grounded_ice_sheet", - "dimensions": "xant yant time", - "out_name": "libmassbfgr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "licalvf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_specific_mass_flux_due_to_calving", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Land ice calving flux", - "comment": "Loss of ice mass resulting from iceberg calving. Computed as the rate of mass loss by the ice shelf (in kg s-1) divided by the horizontal area of the ice sheet (m2) in the grid box.", - "dimensions": "xant yant time", - "out_name": "licalvf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lifmassbf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_specific_mass_flux_due_to_calving_and_ice_front_melting", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Land ice vertical front mass balance flux", - "comment": "Total mass balance at the ice front (or vertical margin). It includes both iceberg calving and melt on vertical ice front", - "dimensions": "xant yant time", - "out_name": "lifmassbf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lim": { - "modeling_realm": "atmos", - "standard_name": "land_ice_mass", - "units": "kg", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "", - "long_name": "Ice sheet mass", - "comment": "The ice sheet mass is computed as the volume times density", - "dimensions": "time", - "out_name": "lim", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limnsw": { - "modeling_realm": "atmos", - "standard_name": "land_ice_mass_not_displacing_sea_water", - "units": "kg", - "cell_methods": "area: time: mean where grounded_ice_sheet (comment: mask=sfgrlf)", - "cell_measures": "", - "long_name": "Ice sheet mass that does not displace sea water", - "comment": "The ice sheet mass is computed as the volume above flotation times density. Changes in land_ice_mass_not_displacing_sea_water will always result in a change in sea level, unlike changes in land_ice_mass which may not result in sea level change (such as melting of the floating ice shelves, or portion of ice that sits on bedrock below sea level)", - "dimensions": "time", - "out_name": "limnsw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "litempbotfl": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_temperature", - "units": "K", - "cell_methods": "area: time: mean where floating_ice_shelf (comment: mask=sftflf)", - "cell_measures": "area: areacellg", - "long_name": "Basal temperature of floating ice shelf", - "comment": "Basal temperature that is used to force the ice sheet models, it is the temperature AT ice shelf-ocean interface. Cell_methods: area: mean where floating_ice_shelf", - "dimensions": "xant yant time", - "out_name": "litempbotfl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "litempbotgr": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_temperature", - "units": "K", - "cell_methods": "area: time: mean where grounded_ice_sheet (comment: mask=sfgrlf)", - "cell_measures": "area: areacellg", - "long_name": "Basal temperature of grounded ice sheet", - "comment": "Basal temperature that is used to force the ice sheet models, it is the temperature AT ice sheet - bedrock interface. Cell_methods: area: mean where grounded_ice_sheet", - "dimensions": "xant yant time", - "out_name": "litempbotgr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "litemptop": { - "modeling_realm": "landIce", - "standard_name": "temperature_at_top_of_ice_sheet_model", - "units": "K", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Temperature at top of ice sheet model", - "comment": "Upper boundary temperature that is used to force ice sheet models. It is the temperature at the base of the snowpack models, and does not vary with seasons. Report surface temperature of ice sheet where snow thickness is zero", - "dimensions": "xant yant time", - "out_name": "litemptop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lithk": { - "modeling_realm": "landIce", - "standard_name": "land_ice_thickness", - "units": "m", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Ice Sheet Thickness", - "comment": "The thickness of the ice sheet", - "dimensions": "xant yant time", - "out_name": "lithk", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "orog": { - "modeling_realm": "land", - "standard_name": "surface_altitude", - "units": "m", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Altitude", - "comment": "The surface called 'surface' means the lower boundary of the atmosphere. Altitude is the (geometric) height above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level.", - "dimensions": "xant yant time", - "out_name": "orog", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sftflf": { - "modeling_realm": "landIce", - "standard_name": "floating_ice_shelf_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacellg", - "long_name": "Floating Ice Shelf Area Fraction", - "comment": "Fraction of grid cell covered by floating ice shelf, the component of the ice sheet that is flowing over seawater", - "dimensions": "xant yant time typefis", - "out_name": "sftflf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sftgif": { - "modeling_realm": "land", - "standard_name": "land_ice_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacellg", - "long_name": "Fraction of Grid Cell Covered with Glacier", - "comment": "Fraction of grid cell covered by land ice (ice sheet, ice shelf, ice cap, glacier)", - "dimensions": "xant yant time typeli", - "out_name": "sftgif", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sftgrf": { - "modeling_realm": "landIce", - "standard_name": "grounded_ice_sheet_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacellg", - "long_name": "Grounded Ice Sheet Area Fraction", - "comment": "Fraction of grid cell covered by grounded ice sheet", - "dimensions": "xant yant time typegis", - "out_name": "sftgrf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snc": { - "modeling_realm": "landIce land", - "standard_name": "surface_snow_area_fraction", - "units": "%", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "snow cover fraction", - "comment": "Fraction of each grid cell that is occupied by snow that rests on land portion of cell.", - "dimensions": "xant yant time", - "out_name": "snc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "strbasemag": { - "modeling_realm": "landIce", - "standard_name": "magnitude_of_basal_drag_at_land_ice_base", - "units": "Pa", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Land Ice Basal Drag", - "comment": "Magnitude of basal drag at land ice base", - "dimensions": "xant yant time", - "out_name": "strbasemag", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tendacabf": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_land_ice_mass_due_to_surface_mass_balance", - "units": "kg s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "", - "long_name": "Total surface mass balance flux", - "comment": "The total surface mass balance flux over land ice is a spatial integration of the surface mass balance flux", - "dimensions": "time", - "out_name": "tendacabf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tendlibmassbf": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_land_ice_mass_due_to_basal_mass_balance", - "units": "kg s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "", - "long_name": "Total basal mass balance flux", - "comment": "The total basal mass balance flux over land ice is a spatial integration of the basal mass balance flux", - "dimensions": "time", - "out_name": "tendlibmassbf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tendlicalvf": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_land_ice_mass_due_to_calving", - "units": "kg s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "", - "long_name": "Total calving flux", - "comment": "The total calving flux over land ice is a spatial integration of the calving flux", - "dimensions": "time", - "out_name": "tendlicalvf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "topg": { - "modeling_realm": "landIce", - "standard_name": "bedrock_altitude", - "units": "m", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Bedrock Altitude", - "comment": "The bedrock topography beneath the land ice", - "dimensions": "xant yant time", - "out_name": "topg", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "xvelbase": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_x_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "X-component of land ice basal velocity", - "comment": "A velocity is a vector quantity. 'x' indicates a vector component along the grid x-axis, positive with increasing x. 'Land ice' means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. 'basal' means the lower boundary of the land ice.", - "dimensions": "xant yant time", - "out_name": "xvelbase", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "xvelmean": { - "modeling_realm": "landIce", - "standard_name": "land_ice_vertical_mean_x_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "X-component of land ice vertical mean velocity", - "comment": "The vertical mean land ice velocity is the average from the bedrock to the surface of the ice", - "dimensions": "xant yant time", - "out_name": "xvelmean", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "xvelsurf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_surface_x_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "X-component of land ice surface velocity", - "comment": "A velocity is a vector quantity. 'x' indicates a vector component along the grid x-axis, positive with increasing x. 'Land ice' means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. The surface called 'surface' means the lower boundary of the atmosphere.", - "dimensions": "xant yant time", - "out_name": "xvelsurf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "yvelbase": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_y_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Y-component of land ice basal velocity", - "comment": "A velocity is a vector quantity. 'y' indicates a vector component along the grid y-axis, positive with increasing y. 'Land ice' means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. 'basal' means the lower boundary of the land ice.", - "dimensions": "xant yant time", - "out_name": "yvelbase", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "yvelmean": { - "modeling_realm": "landIce", - "standard_name": "land_ice_vertical_mean_y_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Y-component of land ice vertical mean velocity", - "comment": "The vertical mean land ice velocity is the average from the bedrock to the surface of the ice", - "dimensions": "xant yant time", - "out_name": "yvelmean", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "yvelsurf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_surface_y_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Y-component of land ice surface velocity", - "comment": "A velocity is a vector quantity. 'y' indicates a vector component along the grid y-axis, positive with increasing y. 'Land ice' means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. The surface called 'surface' means the lower boundary of the atmosphere.'", - "dimensions": "xant yant time", - "out_name": "yvelsurf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zvelbase": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_upward_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Upward-component of land ice basal velocity", - "comment": "A velocity is a vector quantity. 'Upward' indicates a vector component which is positive when directed upward (negative downward). 'basal' means the lower boundary of the atmosphere", - "dimensions": "xant yant time", - "out_name": "zvelbase", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zvelsurf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_surface_upward_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Upward component of land ice surface velocity", - "comment": "A velocity is a vector quantity. 'Upward' indicates a vector component which is positive when directed upward (negative downward). The surface called 'surface' means the lower boundary of the atmosphere", - "dimensions": "xant yant time", - "out_name": "zvelsurf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_IyrGre.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_IyrGre.json deleted file mode 100644 index c134ae3bc7..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_IyrGre.json +++ /dev/null @@ -1,579 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table IyrGre", - "realm": "landIce", - "frequency": "yr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "365.00", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "acabf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_surface_specific_mass_balance_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Mass Balance flux", - "comment": "Specific mass balance means the net rate at which ice is added per unit area at the land ice surface. Computed as the total surface mass balance on the land ice portion of the grid cell divided by land ice area in the grid cell. A negative value means loss of ice", - "dimensions": "xgre ygre time", - "out_name": "acabf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "areacelli": { - "modeling_realm": "landIce", - "standard_name": "cell_area", - "units": "m2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacellg", - "long_name": "Ice-Sheet Grid Cell Area", - "comment": "Horizontal area of ice-sheet grid cells", - "dimensions": "xgre ygre time", - "out_name": "areacelli", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfgeoubed": { - "modeling_realm": "landIce", - "standard_name": "upward_geothermal_heat_flux_at_ground_level_in_land_ice", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Geothermal Heat flux beneath land ice", - "comment": "Upward geothermal heat flux per unit area beneath land ice", - "dimensions": "xgre ygre time", - "out_name": "hfgeoubed", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "iareafl": { - "modeling_realm": "atmos", - "standard_name": "floating_ice_shelf_area", - "units": "m2", - "cell_methods": "area: time: mean where floating_ice_shelf (comment: mask=sftflf)", - "cell_measures": "", - "long_name": "Area covered by floating ice shelves", - "comment": "Total area of the floating ice shelves (the component of ice sheet that flows over ocean)", - "dimensions": "time", - "out_name": "iareafl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "iareagr": { - "modeling_realm": "atmos", - "standard_name": "grounded_ice_sheet_area", - "units": "m2", - "cell_methods": "area: time: mean where grounded_ice_sheet (comment: mask=sfgrlf)", - "cell_measures": "", - "long_name": "Area covered by grounded ice sheet", - "comment": "Total area of the grounded ice sheets (the component of ice sheet resting over bedrock)", - "dimensions": "time", - "out_name": "iareagr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "libmassbffl": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_specific_mass_balance_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where floating_ice_shelf (comment: mask=sftflf)", - "cell_measures": "area: areacellg", - "long_name": "Basal specific mass balance flux of floating ice shelf", - "comment": "Specific mass balance means the net rate at which ice is added per unit area at the land ice base. A negative value means loss of ice. Computed as the total basal mass balance on the floating land ice (floating ice shelf) portion of the grid cell divided by floating land ice (floating ice shelf) area in the grid cell. Cell_methods: area: mean where floating_ice_shelf", - "dimensions": "xgre ygre time", - "out_name": "libmassbffl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "libmassbfgr": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_specific_mass_balance_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where grounded_ice_sheet (comment: mask=sfgrlf)", - "cell_measures": "area: areacellg", - "long_name": "Basal specific mass balance flux of grounded ice sheet", - "comment": "Specific mass balance means the net rate at which ice is added per unit area at the land ice base. A negative value means loss of ice. Computed as the total basal mass balance on the grounded land ice portion of the grid cell divided by grounded land ice area in the grid cell. Cell_methods: area: mean where grounded_ice_sheet", - "dimensions": "xgre ygre time", - "out_name": "libmassbfgr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "licalvf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_specific_mass_flux_due_to_calving", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Land ice calving flux", - "comment": "Loss of ice mass resulting from iceberg calving. Computed as the rate of mass loss by the ice shelf (in kg s-1) divided by the horizontal area of the ice sheet (m2) in the grid box.", - "dimensions": "xgre ygre time", - "out_name": "licalvf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lifmassbf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_specific_mass_flux_due_to_calving_and_ice_front_melting", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Land ice vertical front mass balance flux", - "comment": "Total mass balance at the ice front (or vertical margin). It includes both iceberg calving and melt on vertical ice front", - "dimensions": "xgre ygre time", - "out_name": "lifmassbf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lim": { - "modeling_realm": "atmos", - "standard_name": "land_ice_mass", - "units": "kg", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "", - "long_name": "Ice sheet mass", - "comment": "The ice sheet mass is computed as the volume times density", - "dimensions": "time", - "out_name": "lim", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limnsw": { - "modeling_realm": "atmos", - "standard_name": "land_ice_mass_not_displacing_sea_water", - "units": "kg", - "cell_methods": "area: time: mean where grounded_ice_sheet (comment: mask=sfgrlf)", - "cell_measures": "", - "long_name": "Ice sheet mass that does not displace sea water", - "comment": "The ice sheet mass is computed as the volume above flotation times density. Changes in land_ice_mass_not_displacing_sea_water will always result in a change in sea level, unlike changes in land_ice_mass which may not result in sea level change (such as melting of the floating ice shelves, or portion of ice that sits on bedrock below sea level)", - "dimensions": "time", - "out_name": "limnsw", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "litempbotfl": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_temperature", - "units": "K", - "cell_methods": "area: time: mean where floating_ice_shelf (comment: mask=sftflf)", - "cell_measures": "area: areacellg", - "long_name": "Basal temperature of floating ice shelf", - "comment": "Basal temperature that is used to force the ice sheet models, it is the temperature AT ice shelf-ocean interface. Cell_methods: area: mean where floating_ice_shelf", - "dimensions": "xgre ygre time", - "out_name": "litempbotfl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "litempbotgr": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_temperature", - "units": "K", - "cell_methods": "area: time: mean where grounded_ice_sheet (comment: mask=sfgrlf)", - "cell_measures": "area: areacellg", - "long_name": "Basal temperature of grounded ice sheet", - "comment": "Basal temperature that is used to force the ice sheet models, it is the temperature AT ice sheet - bedrock interface. Cell_methods: area: mean where grounded_ice_sheet", - "dimensions": "xgre ygre time", - "out_name": "litempbotgr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "litemptop": { - "modeling_realm": "landIce", - "standard_name": "temperature_at_top_of_ice_sheet_model", - "units": "K", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Temperature at top of ice sheet model", - "comment": "Upper boundary temperature that is used to force ice sheet models. It is the temperature at the base of the snowpack models, and does not vary with seasons. Report surface temperature of ice sheet where snow thickness is zero", - "dimensions": "xgre ygre time", - "out_name": "litemptop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lithk": { - "modeling_realm": "landIce", - "standard_name": "land_ice_thickness", - "units": "m", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Ice Sheet Thickness", - "comment": "The thickness of the ice sheet", - "dimensions": "xgre ygre time", - "out_name": "lithk", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "orog": { - "modeling_realm": "land", - "standard_name": "surface_altitude", - "units": "m", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Surface Altitude", - "comment": "The surface called 'surface' means the lower boundary of the atmosphere. Altitude is the (geometric) height above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level.", - "dimensions": "xgre ygre time", - "out_name": "orog", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sftflf": { - "modeling_realm": "landIce", - "standard_name": "floating_ice_shelf_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacellg", - "long_name": "Floating Ice Shelf Area Fraction", - "comment": "Fraction of grid cell covered by floating ice shelf, the component of the ice sheet that is flowing over seawater", - "dimensions": "xgre ygre time typefis", - "out_name": "sftflf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sftgif": { - "modeling_realm": "land", - "standard_name": "land_ice_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacellg", - "long_name": "Fraction of Grid Cell Covered with Glacier", - "comment": "Fraction of grid cell covered by land ice (ice sheet, ice shelf, ice cap, glacier)", - "dimensions": "xgre ygre time typeli", - "out_name": "sftgif", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sftgrf": { - "modeling_realm": "landIce", - "standard_name": "grounded_ice_sheet_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacellg", - "long_name": "Grounded Ice Sheet Area Fraction", - "comment": "Fraction of grid cell covered by grounded ice sheet", - "dimensions": "xgre ygre time typegis", - "out_name": "sftgrf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snc": { - "modeling_realm": "landIce land", - "standard_name": "surface_snow_area_fraction", - "units": "%", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "snow cover fraction", - "comment": "Fraction of each grid cell that is occupied by snow that rests on land portion of cell.", - "dimensions": "xgre ygre time", - "out_name": "snc", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "strbasemag": { - "modeling_realm": "landIce", - "standard_name": "magnitude_of_basal_drag_at_land_ice_base", - "units": "Pa", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Land Ice Basal Drag", - "comment": "Magnitude of basal drag at land ice base", - "dimensions": "xgre ygre time", - "out_name": "strbasemag", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tendacabf": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_land_ice_mass_due_to_surface_mass_balance", - "units": "kg s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "", - "long_name": "Total surface mass balance flux", - "comment": "The total surface mass balance flux over land ice is a spatial integration of the surface mass balance flux", - "dimensions": "time", - "out_name": "tendacabf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tendlibmassbf": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_land_ice_mass_due_to_basal_mass_balance", - "units": "kg s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "", - "long_name": "Total basal mass balance flux", - "comment": "The total basal mass balance flux over land ice is a spatial integration of the basal mass balance flux", - "dimensions": "time", - "out_name": "tendlibmassbf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tendlicalvf": { - "modeling_realm": "atmos", - "standard_name": "tendency_of_land_ice_mass_due_to_calving", - "units": "kg s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "", - "long_name": "Total calving flux", - "comment": "The total calving flux over land ice is a spatial integration of the calving flux", - "dimensions": "time", - "out_name": "tendlicalvf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "topg": { - "modeling_realm": "landIce", - "standard_name": "bedrock_altitude", - "units": "m", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Bedrock Altitude", - "comment": "The bedrock topography beneath the land ice", - "dimensions": "xgre ygre time", - "out_name": "topg", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "xvelbase": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_x_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "X-component of land ice basal velocity", - "comment": "A velocity is a vector quantity. 'x' indicates a vector component along the grid x-axis, positive with increasing x. 'Land ice' means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. 'basal' means the lower boundary of the land ice.", - "dimensions": "xgre ygre time", - "out_name": "xvelbase", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "xvelmean": { - "modeling_realm": "landIce", - "standard_name": "land_ice_vertical_mean_x_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "X-component of land ice vertical mean velocity", - "comment": "The vertical mean land ice velocity is the average from the bedrock to the surface of the ice", - "dimensions": "xgre ygre time", - "out_name": "xvelmean", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "xvelsurf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_surface_x_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "X-component of land ice surface velocity", - "comment": "A velocity is a vector quantity. 'x' indicates a vector component along the grid x-axis, positive with increasing x. 'Land ice' means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. The surface called 'surface' means the lower boundary of the atmosphere.", - "dimensions": "xgre ygre time", - "out_name": "xvelsurf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "yvelbase": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_y_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Y-component of land ice basal velocity", - "comment": "A velocity is a vector quantity. 'y' indicates a vector component along the grid y-axis, positive with increasing y. 'Land ice' means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. 'basal' means the lower boundary of the land ice.", - "dimensions": "xgre ygre time", - "out_name": "yvelbase", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "yvelmean": { - "modeling_realm": "landIce", - "standard_name": "land_ice_vertical_mean_y_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Y-component of land ice vertical mean velocity", - "comment": "The vertical mean land ice velocity is the average from the bedrock to the surface of the ice", - "dimensions": "xgre ygre time", - "out_name": "yvelmean", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "yvelsurf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_surface_y_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Y-component of land ice surface velocity", - "comment": "A velocity is a vector quantity. 'y' indicates a vector component along the grid y-axis, positive with increasing y. 'Land ice' means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. The surface called 'surface' means the lower boundary of the atmosphere.'", - "dimensions": "xgre ygre time", - "out_name": "yvelsurf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zvelbase": { - "modeling_realm": "landIce", - "standard_name": "land_ice_basal_upward_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Upward-component of land ice basal velocity", - "comment": "A velocity is a vector quantity. 'Upward' indicates a vector component which is positive when directed upward (negative downward). 'basal' means the lower boundary of the atmosphere", - "dimensions": "xgre ygre time", - "out_name": "zvelbase", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zvelsurf": { - "modeling_realm": "landIce", - "standard_name": "land_ice_surface_upward_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacellg", - "long_name": "Upward component of land ice surface velocity", - "comment": "A velocity is a vector quantity. 'Upward' indicates a vector component which is positive when directed upward (negative downward). The surface called 'surface' means the lower boundary of the atmosphere", - "dimensions": "xgre ygre time", - "out_name": "zvelsurf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_LImon.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_LImon.json deleted file mode 100644 index d2897e68e1..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_LImon.json +++ /dev/null @@ -1,647 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table LImon", - "realm": "landIce land", - "frequency": "mon", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "30.00000", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "acabfIs": { - "modeling_realm": "landIce", - "standard_name": "land_ice_surface_specific_mass_balance_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Surface Mass Balance flux", - "comment": "Specific mass balance means the net rate at which ice is added per unit area at the land ice surface. Computed as the total surface mass balance on the land ice portion of the grid cell divided by land ice area in the grid cell. A negative value means loss of ice", - "dimensions": "longitude latitude time", - "out_name": "acabfIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "agesno": { - "modeling_realm": "landIce land", - "standard_name": "age_of_surface_snow", - "units": "day", - "cell_methods": "area: mean where land time: mean (with samples weighted by snow mass)", - "cell_measures": "area: areacella", - "long_name": "Snow Age", - "comment": "Age of Snow (when computing the time-mean here, the time samples, weighted by the mass of snow on the land portion of the grid cell, are accumulated and then divided by the sum of the weights. Reported as missing data in regions free of snow on land.", - "dimensions": "longitude latitude time", - "out_name": "agesno", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfdsn": { - "modeling_realm": "landIce land", - "standard_name": "surface_downward_heat_flux_in_snow", - "units": "W m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Downward Heat Flux into Snow Where Land over Land", - "comment": "the net downward heat flux from the atmosphere into the snow that lies on land divided by the land area in the grid cell; reported as 0.0 for snow-free land regions or where the land fraction is 0.", - "dimensions": "longitude latitude time", - "out_name": "hfdsn", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hflsIs": { - "modeling_realm": "landIce", - "standard_name": "surface_upward_latent_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Surface Upward Latent Heat Flux", - "comment": "Upward latent heat flux from the ice sheet surface", - "dimensions": "longitude latitude time", - "out_name": "hflsIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfssIs": { - "modeling_realm": "landIce", - "standard_name": "surface_upward_sensible_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Surface Upward Sensible Heat Flux", - "comment": "Upward sensible heat flux from the ice sheet surface", - "dimensions": "longitude latitude time", - "out_name": "hfssIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "icemIs": { - "modeling_realm": "landIce", - "standard_name": "land_ice_surface_melt_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Surface Ice Melt Flux", - "comment": "Loss of ice mass resulting from surface melting. Computed as the total surface melt water on the land ice portion of the grid cell divided by land ice area in the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "icemIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lialbIs": { - "modeling_realm": "landIce", - "standard_name": "surface_albedo", - "units": "1.0", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Ice or Snow Albedo", - "comment": "Mean surface albedo of entire land ice covered part of the grid cell", - "dimensions": "longitude latitude time", - "out_name": "lialbIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "litemptopIs": { - "modeling_realm": "landIce", - "standard_name": "temperature_at_top_of_ice_sheet_model", - "units": "K", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Temperature at top of ice sheet model", - "comment": "Upper boundary temperature that is used to force ice sheet models. It is the temperature at the base of the snowpack models, and does not vary with seasons. Report surface temperature of ice sheet where snow thickness is zero", - "dimensions": "longitude latitude time", - "out_name": "litemptopIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lwsnl": { - "modeling_realm": "landIce land", - "standard_name": "liquid_water_content_of_snow_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Liquid Water Content of Snow Layer", - "comment": "The total mass of liquid water contained interstitially within the whole depth of the snow layer of the land portion of a grid cell divided by the area of the land portion of the cell.", - "dimensions": "longitude latitude time", - "out_name": "lwsnl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrroIs": { - "modeling_realm": "landIce", - "standard_name": "runoff_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Total Run-off", - "comment": "The total run-off (including drainage through the base of the soil model) per unit area leaving the land portion of the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "mrroIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "orogIs": { - "modeling_realm": "landIce", - "standard_name": "surface_altitude", - "units": "m", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Surface Altitude", - "comment": "The surface called 'surface' means the lower boundary of the atmosphere. Altitude is the (geometric) height above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level.", - "dimensions": "longitude latitude time", - "out_name": "orogIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pflw": { - "modeling_realm": "landIce land", - "standard_name": "liquid_water_content_of_permafrost_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Liquid Water Content of Permafrost Layer", - "comment": "*where land over land*, i.e., this is the total mass of liquid water contained within the permafrost layer within the land portion of a grid cell divided by the area of the land portion of the cell.", - "dimensions": "longitude latitude time", - "out_name": "pflw", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prraIs": { - "modeling_realm": "landIce", - "standard_name": "rainfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Rainfall rate", - "comment": "Rainfall rate over the ice sheet", - "dimensions": "longitude latitude time", - "out_name": "prraIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prsnIs": { - "modeling_realm": "landIce", - "standard_name": "snowfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Snowfall Flux", - "comment": "at surface; includes precipitation of all forms of water in the solid phase", - "dimensions": "longitude latitude time", - "out_name": "prsnIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rldsIs": { - "modeling_realm": "landIce", - "standard_name": "surface_downwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Surface Downwelling Longwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rldsIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlusIs": { - "modeling_realm": "landIce", - "standard_name": "surface_upwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Surface Upwelling Longwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rlusIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdsIs": { - "modeling_realm": "landIce", - "standard_name": "surface_downwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Surface Downwelling Shortwave Radiation", - "comment": "Surface solar irradiance for UV calculations", - "dimensions": "longitude latitude time", - "out_name": "rsdsIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsusIs": { - "modeling_realm": "landIce", - "standard_name": "surface_upwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Surface Upwelling Shortwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rsusIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sbl": { - "modeling_realm": "landIce", - "standard_name": "surface_snow_and_ice_sublimation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Snow and Ice Sublimation Flux", - "comment": "The snow and ice sublimation flux is the loss of snow and ice mass per unit area from the surface resulting from their direct conversion to water vapor that enters the atmosphere.", - "dimensions": "longitude latitude time", - "out_name": "sbl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sblIs": { - "modeling_realm": "landIce", - "standard_name": "surface_snow_and_ice_sublimation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Surface Snow and Ice Sublimation Flux", - "comment": "The snow and ice sublimation flux is the loss of snow and ice mass per unit area from the surface resulting from their direct conversion to water vapor that enters the atmosphere.", - "dimensions": "longitude latitude time", - "out_name": "sblIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sftflf": { - "modeling_realm": "landIce", - "standard_name": "floating_ice_shelf_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Floating Ice Shelf Area Fraction", - "comment": "Fraction of grid cell covered by floating ice shelf, the component of the ice sheet that is flowing over seawater", - "dimensions": "longitude latitude time typefis", - "out_name": "sftflf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sftgif": { - "modeling_realm": "land", - "standard_name": "land_ice_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Fraction of Grid Cell Covered with Glacier", - "comment": "Fraction of grid cell covered by land ice (ice sheet, ice shelf, ice cap, glacier)", - "dimensions": "longitude latitude time typeli", - "out_name": "sftgif", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sftgrf": { - "modeling_realm": "landIce", - "standard_name": "grounded_ice_sheet_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Grounded Ice Sheet Area Fraction", - "comment": "Fraction of grid cell covered by grounded ice sheet", - "dimensions": "longitude latitude time typegis", - "out_name": "sftgrf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snc": { - "modeling_realm": "landIce land", - "standard_name": "surface_snow_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow Area Fraction", - "comment": "Fraction of each grid cell that is occupied by snow that rests on land portion of cell.", - "dimensions": "longitude latitude time", - "out_name": "snc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sncIs": { - "modeling_realm": "landIce", - "standard_name": "surface_snow_area_fraction", - "units": "%", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Snow Cover Percentage", - "comment": "Percentage of each grid cell that is occupied by snow that rests on land portion of cell.", - "dimensions": "longitude latitude time", - "out_name": "sncIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snd": { - "modeling_realm": "landIce land", - "standard_name": "surface_snow_thickness", - "units": "m", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow Depth", - "comment": "where land over land, this is computed as the mean thickness of snow in the land portion of the grid cell (averaging over the entire land portion, including the snow-free fraction). Reported as 0.0 where the land fraction is 0.", - "dimensions": "longitude latitude time", - "out_name": "snd", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snicefreezIs": { - "modeling_realm": "landIce", - "standard_name": "surface_snow_and_ice_refreezing_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Surface Snow and Ice Refreeze Flux", - "comment": "Mass flux of surface meltwater which refreezes within the snowpack. Computed as the total refreezing on the land ice portion of the grid cell divided by land ice area in the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "snicefreezIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snicemIs": { - "modeling_realm": "landIce", - "standard_name": "surface_snow_and_ice_melt_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Surface Snow and Ice Melt Flux", - "comment": "Loss of snow and ice mass resulting from surface melting. Computed as the total surface melt on the land ice portion of the grid cell divided by land ice area in the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "snicemIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snm": { - "modeling_realm": "landIce land", - "standard_name": "surface_snow_melt_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Snow Melt", - "comment": "The total surface snow melt rate on the land portion of the grid cell divided by the land area in the grid cell; report as zero for snow-free land regions and missing where there is no land.", - "dimensions": "longitude latitude time", - "out_name": "snm", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snmIs": { - "modeling_realm": "landIce", - "standard_name": "surface_snow_melt_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Surface Snow Melt", - "comment": "The total surface snow melt rate on the land portion of the grid cell divided by the land area in the grid cell; report as zero for snow-free land regions and missing where there is no land.", - "dimensions": "longitude latitude time", - "out_name": "snmIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snw": { - "modeling_realm": "landIce land", - "standard_name": "surface_snow_amount", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Snow Amount", - "comment": "The mass of surface snow on the land portion of the grid cell divided by the land area in the grid cell; reported as missing where the land fraction is 0; excludes snow on vegetation canopy or on sea ice.", - "dimensions": "longitude latitude time", - "out_name": "snw", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sootsn": { - "modeling_realm": "landIce land", - "standard_name": "soot_content_of_surface_snow", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow Soot Content", - "comment": "the entire land portion of the grid cell is considered, with snow soot content set to 0.0 in regions free of snow.", - "dimensions": "longitude latitude time", - "out_name": "sootsn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tasIs": { - "modeling_realm": "landIce", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Near-Surface Air Temperature", - "comment": "near-surface (usually, 2 meter) air temperature", - "dimensions": "longitude latitude time", - "out_name": "tasIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tpf": { - "modeling_realm": "landIce land", - "standard_name": "permafrost_layer_thickness", - "units": "m", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Permafrost Layer Thickness", - "comment": "The mean thickness of the permafrost layer in the land portion of the grid cell. Reported as zero in permafrost-free regions.", - "dimensions": "longitude latitude time", - "out_name": "tpf", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tsIs": { - "modeling_realm": "landIce", - "standard_name": "surface_temperature", - "units": "K", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Surface Temperature", - "comment": "Temperature of the lower boundary of the atmosphere", - "dimensions": "longitude latitude time", - "out_name": "tsIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tsn": { - "modeling_realm": "landIce land", - "standard_name": "temperature_in_surface_snow", - "units": "K", - "cell_methods": "area: mean where land time: mean (with samples weighted by snow mass)", - "cell_measures": "area: areacella", - "long_name": "Snow Internal Temperature", - "comment": "This temperature is averaged over all the snow in the grid cell that rests on land or land ice. When computing the time-mean here, the time samples, weighted by the mass of snow on the land portion of the grid cell, are accumulated and then divided by the sum of the weights. Reported as missing in regions free of snow on land.", - "dimensions": "longitude latitude time", - "out_name": "tsn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tsnIs": { - "modeling_realm": "landIce", - "standard_name": "temperature_in_surface_snow", - "units": "K", - "cell_methods": "area: time: mean where ice_sheet", - "cell_measures": "area: areacella", - "long_name": "Ice Sheet Snow Internal Temperature", - "comment": "This temperature is averaged over all the snow in the grid cell that rests on land or land ice. When computing the time-mean here, the time samples, weighted by the mass of snow on the land portion of the grid cell, are accumulated and then divided by the sum of the weights. Reported as missing in regions free of snow on land.", - "dimensions": "longitude latitude time", - "out_name": "tsnIs", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Lmon.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Lmon.json deleted file mode 100644 index db286de97a..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Lmon.json +++ /dev/null @@ -1,936 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table Lmon", - "realm": "land", - "frequency": "mon", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "30.00000", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "baresoilFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Bare Soil Fraction", - "comment": "Percentage of entire grid cell that is covered by bare soil.", - "dimensions": "longitude latitude time typebare", - "out_name": "baresoilFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "burntFractionAll": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Percentage of Entire Grid cell that is Covered by Burnt Vegetation (All Classes)", - "comment": "Percentage of grid cell burned due to all fires including natural and anthropogenic fires and those associated with anthropogenic land use change", - "dimensions": "longitude latitude time typeburnt", - "out_name": "burntFractionAll", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "c3PftFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Total C3 PFT Cover Fraction", - "comment": "Percentage of entire grid cell that is covered by C3 PFTs (including grass, crops, and trees).", - "dimensions": "longitude latitude time typec3pft", - "out_name": "c3PftFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "c4PftFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Total C4 PFT Cover Fraction", - "comment": "Percentage of entire grid cell that is covered by C4 PFTs (including grass and crops).", - "dimensions": "longitude latitude time typec4pft", - "out_name": "c4PftFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cCwd": { - "modeling_realm": "land", - "standard_name": "wood_debris_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Coarse Woody Debris", - "comment": "Carbon mass per unit area in woody debris (dead organic matter composed of coarse wood. It is distinct from litter)", - "dimensions": "longitude latitude time", - "out_name": "cCwd", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cLeaf": { - "modeling_realm": "land", - "standard_name": "leaf_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Leaves", - "comment": "Carbon mass per unit area in leaves.", - "dimensions": "longitude latitude time", - "out_name": "cLeaf", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cLitter": { - "modeling_realm": "land", - "standard_name": "litter_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Litter Pool", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cLitter", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cLitterAbove": { - "modeling_realm": "land", - "standard_name": "surface_litter_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Above-Ground Litter", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cLitterAbove", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cLitterBelow": { - "modeling_realm": "land", - "standard_name": "subsurface_litter_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Below-Ground Litter", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "cLitterBelow", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cProduct": { - "modeling_realm": "land", - "standard_name": "carbon_content_of_products_of_anthropogenic_land_use_change", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Products of Land Use Change", - "comment": "Carbon mass per unit area in that has been removed from the environment through landuse change.", - "dimensions": "longitude latitude time", - "out_name": "cProduct", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cRoot": { - "modeling_realm": "land", - "standard_name": "root_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Roots", - "comment": "Carbon mass per unit area in roots, including fine and coarse roots.", - "dimensions": "longitude latitude time", - "out_name": "cRoot", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cSoilFast": { - "modeling_realm": "land", - "standard_name": "fast_soil_pool_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Fast Soil Pool", - "comment": "Carbon mass per unit area in fast soil pool. Fast means a lifetime of less than 10 years for reference climate conditions (20th century) in the absence of water limitations.", - "dimensions": "longitude latitude time", - "out_name": "cSoilFast", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cSoilMedium": { - "modeling_realm": "land", - "standard_name": "medium_soil_pool_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Medium Soil Pool", - "comment": "Carbon mass per unit area in medium (rate) soil pool. Medium means a lifetime of more than than 10 years and less than 100 years for reference climate conditions (20th century) in the absence of water limitations.", - "dimensions": "longitude latitude time", - "out_name": "cSoilMedium", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cSoilSlow": { - "modeling_realm": "land", - "standard_name": "slow_soil_pool_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Slow Soil Pool", - "comment": "Carbon mass per unit area in slow soil pool. Slow means a lifetime of more than 100 years for reference climate (20th century) in the absence of water limitations.", - "dimensions": "longitude latitude time", - "out_name": "cSoilSlow", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cVeg": { - "modeling_realm": "land", - "standard_name": "vegetation_carbon_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass in Vegetation", - "comment": "Carbon mass per unit area in vegetation.", - "dimensions": "longitude latitude time", - "out_name": "cVeg", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cropFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Crop Fraction", - "comment": "Percentage of entire grid cell that is covered by crop.", - "dimensions": "longitude latitude time typecrop", - "out_name": "cropFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "evspsblsoi": { - "modeling_realm": "land", - "standard_name": "water_evaporation_flux_from_soil", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Water Evaporation from Soil", - "comment": "Water evaporation from soil (including sublimation).", - "dimensions": "longitude latitude time", - "out_name": "evspsblsoi", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "evspsblveg": { - "modeling_realm": "land", - "standard_name": "water_evaporation_flux_from_canopy", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Evaporation from Canopy", - "comment": "The canopy evaporation and sublimation (if present in model); may include dew formation as a negative flux.", - "dimensions": "longitude latitude time", - "out_name": "evspsblveg", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fFire": { - "modeling_realm": "land", - "standard_name": "surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_fires_excluding_anthropogenic_land_use_change", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux into Atmosphere due to CO2 Emission from Fire", - "comment": "CO2 emissions (expressed as a carbon mass flux per unit area) from natural fires and human ignition fires as calculated by the fire module of the dynamic vegetation model, but excluding any CO2 flux from fire included in fLuc (CO2 Flux to Atmosphere from Land Use Change).", - "dimensions": "longitude latitude time", - "out_name": "fFire", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fGrazing": { - "modeling_realm": "land", - "standard_name": "surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_grazing", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux into Atmosphere due to Grazing on Land", - "comment": "Carbon mass flux per unit area due to grazing on land", - "dimensions": "longitude latitude time", - "out_name": "fGrazing", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fHarvest": { - "modeling_realm": "land", - "standard_name": "surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_crop_harvesting", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux into Atmosphere due to Crop Harvesting", - "comment": "Carbon mass flux per unit area due to crop harvesting", - "dimensions": "longitude latitude time", - "out_name": "fHarvest", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fLitterSoil": { - "modeling_realm": "land", - "standard_name": "carbon_mass_flux_into_soil_from_litter", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Carbon Mass Flux from Litter to Soil", - "comment": "Carbon mass flux per unit area into soil from litter (dead plant material in or above the soil).", - "dimensions": "longitude latitude time", - "out_name": "fLitterSoil", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fVegLitter": { - "modeling_realm": "land", - "standard_name": "litter_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Carbon Mass Flux from Vegetation to Litter", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fVegLitter", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fVegSoil": { - "modeling_realm": "land", - "standard_name": "carbon_mass_flux_into_soil_from_vegetation_excluding_litter", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Carbon Mass Flux from Vegetation Directly to Soil", - "comment": "Carbon mass flux per unit area from vegetation directly into soil, without intermediate conversion to litter.", - "dimensions": "longitude latitude time", - "out_name": "fVegSoil", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "gpp": { - "modeling_realm": "land", - "standard_name": "gross_primary_productivity_of_biomass_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux out of Atmosphere due to Gross Primary Production on Land", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "gpp", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "grassFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Natural Grass Fraction", - "comment": "Percentage of entire grid cell that is covered by natural grass.", - "dimensions": "longitude latitude time typenatgr", - "out_name": "grassFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "lai": { - "modeling_realm": "land", - "standard_name": "leaf_area_index", - "units": "1.0", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Leaf Area Index", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "lai", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "landCoverFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Plant Functional Type Grid Fraction", - "comment": "Percentage of grid cell area occupied by different model vegetation/land cover categories. The categories may differ from model to model, depending on each model's subgrid land cover category definitions. Categories may include natural vegetation, anthropogenic vegetation, bare soil, lakes, urban areas, glaciers, etc. Sum of all should equal the fraction of the grid-cell that is land.", - "dimensions": "longitude latitude vegtype time", - "out_name": "landCoverFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrfso": { - "modeling_realm": "land landIce", - "standard_name": "soil_frozen_water_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Soil Frozen Water Content", - "comment": "The mass per unit area (summed over all model layers) of frozen water.", - "dimensions": "longitude latitude time", - "out_name": "mrfso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrlsl": { - "modeling_realm": "land", - "standard_name": "moisture_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Water Content of Soil Layer", - "comment": "The mass of water in all phases, including ice, in soil layers. Report as missing for grid cells with no land.", - "dimensions": "longitude latitude sdepth time", - "out_name": "mrlsl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrro": { - "modeling_realm": "land", - "standard_name": "runoff_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Runoff", - "comment": "The total run-off (including drainage through the base of the soil model) per unit area leaving the land portion of the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "mrro", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrros": { - "modeling_realm": "land", - "standard_name": "surface_runoff_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Runoff", - "comment": "The total surface run off leaving the land portion of the grid cell (excluding drainage through the base of the soil model).", - "dimensions": "longitude latitude time", - "out_name": "mrros", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrso": { - "modeling_realm": "land", - "standard_name": "soil_moisture_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Soil Moisture Content", - "comment": "the mass per unit area (summed over all soil layers) of water in all phases.", - "dimensions": "longitude latitude time", - "out_name": "mrso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrsos": { - "modeling_realm": "land", - "standard_name": "moisture_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Moisture in Upper Portion of Soil Column", - "comment": "The mass of water in all phases in the upper 10cm of the soil layer.", - "dimensions": "longitude latitude time sdepth1", - "out_name": "mrsos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nbp": { - "modeling_realm": "land", - "standard_name": "surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux out of Atmosphere due to Net Biospheric Production on Land", - "comment": "This is the net mass flux of carbon from atmosphere into land, calculated as photosynthesis MINUS the sum of plant and soil respiration, carbon fluxes from fire, harvest, grazing and land use change. Positive flux is into the land.", - "dimensions": "longitude latitude time", - "out_name": "nbp", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "npp": { - "modeling_realm": "land", - "standard_name": "net_primary_productivity_of_biomass_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux out of Atmosphere due to Net Primary Production on Land", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "npp", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nppLeaf": { - "modeling_realm": "land", - "standard_name": "net_primary_productivity_of_carbon_accumulated_in_leaves", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux due to NPP Allocation to Leaf", - "comment": "This is the rate of carbon uptake by leaves due to NPP", - "dimensions": "longitude latitude time", - "out_name": "nppLeaf", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nppRoot": { - "modeling_realm": "land", - "standard_name": "net_primary_productivity_of_carbon_accumulated_in_roots", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux due to NPP Allocation to Roots", - "comment": "This is the rate of carbon uptake by roots due to NPP", - "dimensions": "longitude latitude time", - "out_name": "nppRoot", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nppWood": { - "modeling_realm": "land", - "standard_name": "net_primary_productivity_of_carbon_accumulated_in_wood", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux due to NPP Allocation to Wood", - "comment": "This is the rate of carbon uptake by wood due to NPP", - "dimensions": "longitude latitude time", - "out_name": "nppWood", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pastureFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Anthropogenic Pasture Fraction", - "comment": "Percentage of entire grid cell that is covered by anthropogenic pasture.", - "dimensions": "longitude latitude time typepasture", - "out_name": "pastureFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prveg": { - "modeling_realm": "land", - "standard_name": "precipitation_flux_onto_canopy", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Precipitation onto Canopy", - "comment": "The precipitation flux that is intercepted by the vegetation canopy (if present in model) before reaching the ground.", - "dimensions": "longitude latitude time", - "out_name": "prveg", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rGrowth": { - "modeling_realm": "land", - "standard_name": "surface_upward_carbon_mass_flux_due_to_plant_respiration_for_biomass_growth", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux into Atmosphere due to Growth Autotrophic Respiration on Land", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rGrowth", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rMaint": { - "modeling_realm": "land", - "standard_name": "surface_upward_carbon_mass_flux_due_to_plant_respiration_for_biomass_maintenance", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux into Atmosphere due to Maintenance Autotrophic Respiration on Land", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rMaint", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ra": { - "modeling_realm": "land", - "standard_name": "plant_respiration_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux into Atmosphere due to Autotrophic (Plant) Respiration on Land", - "comment": "Carbon mass flux per unit area into atmosphere due to autotrophic respiration on land (respiration by producers) [see rh for heterotrophic production]", - "dimensions": "longitude latitude time", - "out_name": "ra", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "residualFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Fraction of Grid Cell that is Land but Neither Vegetation-Covered nor Bare Soil", - "comment": "Percentage of entire grid cell that is land and is covered by neither vegetation nor bare-soil (e.g., urban, ice, lakes, etc.)", - "dimensions": "longitude latitude time typeresidual", - "out_name": "residualFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rh": { - "modeling_realm": "land", - "standard_name": "heterotrophic_respiration_carbon_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Carbon Mass Flux into Atmosphere due to Heterotrophic Respiration on Land", - "comment": "Carbon mass flux per unit area into atmosphere due to heterotrophic respiration on land (respiration by consumers)", - "dimensions": "longitude latitude time", - "out_name": "rh", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "shrubFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Shrub Fraction", - "comment": "Percentage of entire grid cell that is covered by shrub.", - "dimensions": "longitude latitude time typeshrub", - "out_name": "shrubFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tran": { - "modeling_realm": "land", - "standard_name": "transpiration_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Transpiration", - "comment": "Transpiration (may include dew formation as a negative flux).", - "dimensions": "longitude latitude time", - "out_name": "tran", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "treeFrac": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Tree Cover Fraction", - "comment": "Percentage of entire grid cell that is covered by trees.", - "dimensions": "longitude latitude time typetree", - "out_name": "treeFrac", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "treeFracPrimDec": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Primary Deciduous Tree Fraction", - "comment": "Percentage of the entire grid cell that is covered by total primary deciduous trees.", - "dimensions": "longitude latitude time typepdec", - "out_name": "treeFracPrimDec", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "treeFracPrimEver": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Primary Evergreen Tree Cover Fraction", - "comment": "Percentage of entire grid cell that is covered by primary evergreen trees.", - "dimensions": "longitude latitude time typepever", - "out_name": "treeFracPrimEver", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "treeFracSecDec": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Secondary Deciduous Tree Cover Fraction", - "comment": "Percentage of entire grid cell that is covered by secondary deciduous trees.", - "dimensions": "longitude latitude time typesdec", - "out_name": "treeFracSecDec", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "treeFracSecEver": { - "modeling_realm": "land", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: mean where land over all_area_types time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Secondary Evergreen Tree Cover Fraction", - "comment": "Percentage of entire grid cell that is covered by secondary evergreen trees.", - "dimensions": "longitude latitude time typesever", - "out_name": "treeFracSecEver", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tsl": { - "modeling_realm": "land", - "standard_name": "soil_temperature", - "units": "K", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Temperature of Soil", - "comment": "Temperature of each soil layer. Reported as missing for grid cells with no land.", - "dimensions": "longitude latitude sdepth time", - "out_name": "tsl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Oclim.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Oclim.json deleted file mode 100644 index d5d5f2cd69..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Oclim.json +++ /dev/null @@ -1,596 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table Oclim", - "realm": "ocean", - "frequency": "monClim", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "30.00000", - "generic_levels": "olevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "difmxybo": { - "modeling_realm": "ocean", - "standard_name": "ocean_momentum_xy_biharmonic_diffusivity", - "units": "m4 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Momentum XY Biharmonic Diffusivity", - "comment": "Lateral biharmonic viscosity applied to the momentum equitions.", - "dimensions": "longitude latitude olevel time2", - "out_name": "difmxybo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "difmxybo2d": { - "modeling_realm": "ocean", - "standard_name": "ocean_momentum_xy_biharmonic_diffusivity", - "units": "m4 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello", - "long_name": "Ocean Momentum XY Biharmonic Diffusivity", - "comment": "Lateral biharmonic viscosity applied to the momentum equitions.", - "dimensions": "longitude latitude time2", - "out_name": "difmxybo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "difmxylo": { - "modeling_realm": "ocean", - "standard_name": "ocean_momentum_xy_laplacian_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Momentum XY Laplacian Diffusivity", - "comment": "Lateral Laplacian viscosity applied to the momentum equitions.", - "dimensions": "longitude latitude olevel time2", - "out_name": "difmxylo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "difmxylo2d": { - "modeling_realm": "ocean", - "standard_name": "ocean_momentum_xy_laplacian_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello", - "long_name": "Ocean Momentum XY Laplacian Diffusivity", - "comment": "Lateral Laplacian viscosity applied to the momentum equitions.", - "dimensions": "longitude latitude time2", - "out_name": "difmxylo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "diftrbbo": { - "modeling_realm": "ocean", - "standard_name": "ocean_tracer_bolus_biharmonic_diffusivity", - "units": "m4 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Tracer Bolus Biharmonic Diffusivity", - "comment": "", - "dimensions": "longitude latitude olevel time2", - "out_name": "diftrbbo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "diftrbbo2d": { - "modeling_realm": "ocean", - "standard_name": "ocean_tracer_bolus_biharmonic_diffusivity", - "units": "m4 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello", - "long_name": "Ocean Tracer Bolus Biharmonic Diffusivity", - "comment": "", - "dimensions": "longitude latitude time2", - "out_name": "diftrbbo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "diftrblo": { - "modeling_realm": "ocean", - "standard_name": "ocean_tracer_bolus_laplacian_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Tracer Bolus Laplacian Diffusivity", - "comment": "Ocean tracer diffusivity associated with parameterized eddy-induced advective transport. Sometimes this diffusivity is called the 'thickness' diffusivity. For CMIP5, this diagnostic was called 'ocean tracer bolus laplacian diffusivity'. The CMIP6 name is physically more relevant.", - "dimensions": "longitude latitude olevel time2", - "out_name": "diftrblo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "diftrblo2d": { - "modeling_realm": "ocean", - "standard_name": "ocean_tracer_bolus_laplacian_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello", - "long_name": "Ocean Tracer Bolus Laplacian Diffusivity", - "comment": "Ocean tracer diffusivity associated with parameterized eddy-induced advective transport. Sometimes this diffusivity is called the 'thickness' diffusivity. For CMIP5, this diagnostic was called 'ocean tracer bolus laplacian diffusivity'. The CMIP6 name is physically more relevant.", - "dimensions": "longitude latitude time2", - "out_name": "diftrblo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "diftrebo": { - "modeling_realm": "ocean", - "standard_name": "ocean_tracer_epineutral_biharmonic_diffusivity", - "units": "m4 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Tracer Epineutral Biharmonic Diffusivity", - "comment": "", - "dimensions": "longitude latitude olevel time2", - "out_name": "diftrebo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "diftrebo2d": { - "modeling_realm": "ocean", - "standard_name": "ocean_tracer_epineutral_biharmonic_diffusivity", - "units": "m4 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello", - "long_name": "Ocean Tracer Epineutral Biharmonic Diffusivity", - "comment": "", - "dimensions": "longitude latitude time2", - "out_name": "diftrebo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "diftrelo": { - "modeling_realm": "ocean", - "standard_name": "ocean_tracer_epineutral_laplacian_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Tracer Epineutral Laplacian Diffusivity", - "comment": "Ocean tracer diffusivity associated with parameterized eddy-induced diffusive transport oriented along neutral or isopycnal directions. Sometimes this diffusivity is called the neutral diffusivity or isopycnal diffusivity or Redi diffusivity.", - "dimensions": "longitude latitude olevel time2", - "out_name": "diftrelo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "diftrelo2d": { - "modeling_realm": "ocean", - "standard_name": "ocean_tracer_epineutral_laplacian_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello", - "long_name": "Ocean Tracer Epineutral Laplacian Diffusivity", - "comment": "Ocean tracer diffusivity associated with parameterized eddy-induced diffusive transport oriented along neutral or isopycnal directions. Sometimes this diffusivity is called the neutral diffusivity or isopycnal diffusivity or Redi diffusivity.", - "dimensions": "longitude latitude time2", - "out_name": "diftrelo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "diftrxybo": { - "modeling_realm": "ocean", - "standard_name": "ocean_tracer_xy_biharmonic_diffusivity", - "units": "m4 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Tracer XY Biharmonic Diffusivity", - "comment": "", - "dimensions": "longitude latitude olevel time2", - "out_name": "diftrxybo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "diftrxybo2d": { - "modeling_realm": "ocean", - "standard_name": "ocean_tracer_xy_biharmonic_diffusivity", - "units": "m4 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello", - "long_name": "Ocean Tracer XY Biharmonic Diffusivity", - "comment": "", - "dimensions": "longitude latitude time2", - "out_name": "diftrxybo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "diftrxylo": { - "modeling_realm": "ocean", - "standard_name": "ocean_tracer_xy_laplacian_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Tracer XY Laplacian Diffusivity", - "comment": "", - "dimensions": "longitude latitude olevel time2", - "out_name": "diftrxylo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "diftrxylo2d": { - "modeling_realm": "ocean", - "standard_name": "ocean_tracer_xy_laplacian_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello", - "long_name": "Ocean Tracer XY Laplacian Diffusivity", - "comment": "", - "dimensions": "longitude latitude time2", - "out_name": "diftrxylo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "difvho": { - "modeling_realm": "ocean", - "standard_name": "ocean_vertical_heat_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Vertical Heat Diffusivity", - "comment": "Vertical/dianeutral diffusivity applied to prognostic temperature field.", - "dimensions": "longitude latitude olevel time2", - "out_name": "difvho", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "difvmbo": { - "modeling_realm": "ocean", - "standard_name": "ocean_vertical_momentum_diffusivity_due_to_background", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Vertical Momentum Diffusivity due to Background", - "comment": "", - "dimensions": "longitude latitude olevel time2", - "out_name": "difvmbo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "difvmfdo": { - "modeling_realm": "ocean", - "standard_name": "ocean_vertical_momentum_diffusivity_due_to_form_drag", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Vertical Momentum Diffusivity due to Form Drag", - "comment": "", - "dimensions": "longitude latitude olevel time2", - "out_name": "difvmfdo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "difvmo": { - "modeling_realm": "ocean", - "standard_name": "ocean_vertical_momentum_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Vertical Momentum Diffusivity", - "comment": "", - "dimensions": "longitude latitude olevel time2", - "out_name": "difvmo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "difvmto": { - "modeling_realm": "ocean", - "standard_name": "ocean_vertical_momentum_diffusivity_due_to_tides", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Vertical Momentum Diffusivity due to Tides", - "comment": "", - "dimensions": "longitude latitude olevel time2", - "out_name": "difvmto", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "difvso": { - "modeling_realm": "ocean", - "standard_name": "ocean_vertical_salt_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Vertical Salt Diffusivity", - "comment": "Vertical/dianeutral diffusivity applied to prognostic salinity field.", - "dimensions": "longitude latitude olevel time2", - "out_name": "difvso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "difvtrbo": { - "modeling_realm": "ocean", - "standard_name": "ocean_vertical_tracer_diffusivity_due_to_background", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Vertical Tracer Diffusivity due to Background", - "comment": "", - "dimensions": "longitude latitude olevel time2", - "out_name": "difvtrbo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "difvtrto": { - "modeling_realm": "ocean", - "standard_name": "ocean_vertical_tracer_diffusivity_due_to_tides", - "units": "m2 s-1", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Vertical Tracer Diffusivity due to Tides", - "comment": "", - "dimensions": "longitude latitude olevel time2", - "out_name": "difvtrto", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dispkevfo": { - "modeling_realm": "ocean", - "standard_name": "ocean_kinetic_energy_dissipation_per_unit_area_due_to_vertical_friction", - "units": "W m-2", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Kinetic Energy Dissipation Per Unit Area due to Vertical Friction", - "comment": "", - "dimensions": "longitude latitude olevel time2", - "out_name": "dispkevfo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dispkexyfo": { - "modeling_realm": "ocean", - "standard_name": "ocean_kinetic_energy_dissipation_per_unit_area_due_to_xy_friction", - "units": "W m-2", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Kinetic Energy Dissipation Per Unit Area due to XY Friction", - "comment": "Depth integrated impacts on kinetic energy arising from lateral frictional dissipation associated with Laplacian and/or biharmonic viscosity. For CMIP5, this diagnostic was 3d, whereas the CMIP6 depth integrated diagnostic is sufficient for many purposes and reduces archive requirements.", - "dimensions": "longitude latitude olevel time2", - "out_name": "dispkexyfo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dispkexyfo2d": { - "modeling_realm": "ocean", - "standard_name": "ocean_kinetic_energy_dissipation_per_unit_area_due_to_xy_friction", - "units": "W m-2", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello", - "long_name": "Ocean Kinetic Energy Dissipation Per Unit Area due to XY Friction", - "comment": "Depth integrated impacts on kinetic energy arising from lateral frictional dissipation associated with Laplacian and/or biharmonic viscosity. For CMIP5, this diagnostic was 3d, whereas the CMIP6 depth integrated diagnostic is sufficient for many purposes and reduces archive requirements.", - "dimensions": "longitude latitude time2", - "out_name": "dispkexyfo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnkebto": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_ocean_eddy_kinetic_energy_content_due_to_bolus_transport", - "units": "W m-2", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Tendency of Ocean Eddy Kinetic Energy Content due to Bolus Transport", - "comment": "Depth integrated impacts on kinetic energy arising from parameterized eddy-induced advection. For CMIP5, this diagnostic was 3d, whereas the CMIP6 depth integrated diagnostic is sufficient for many purposes and reduces archive requirements.", - "dimensions": "longitude latitude olevel time2", - "out_name": "tnkebto", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnkebto2d": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_ocean_eddy_kinetic_energy_content_due_to_bolus_transport", - "units": "W m-2", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello", - "long_name": "Tendency of Ocean Eddy Kinetic Energy Content due to Bolus Transport", - "comment": "Depth integrated impacts on kinetic energy arising from parameterized eddy-induced advection. For CMIP5, this diagnostic was 3d, whereas the CMIP6 depth integrated diagnostic is sufficient for many purposes and reduces archive requirements.", - "dimensions": "longitude latitude time2", - "out_name": "tnkebto", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnpeo": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_ocean_potential_energy_content", - "units": "W m-2", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Tendency of Ocean Potential Energy Content", - "comment": "Rate that work is done against vertical stratification, as measured by the vertical heat and salt diffusivity. Report here as depth integrated two-dimensional field.", - "dimensions": "longitude latitude olevel time2", - "out_name": "tnpeo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnpeot": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_ocean_potential_energy_content_due_to_tides", - "units": "W m-2", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Tendency of Ocean Potential Energy Content due to Tides", - "comment": "", - "dimensions": "longitude latitude olevel time2", - "out_name": "tnpeot", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnpeotb": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_ocean_potential_energy_content_due_to_background", - "units": "W m-2", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Tendency of Ocean Potential Energy Content due to Background", - "comment": "", - "dimensions": "longitude latitude olevel time2", - "out_name": "tnpeotb", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zfullo": { - "modeling_realm": "ocean", - "standard_name": "depth_below_geoid", - "units": "m", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Depth Below Geoid of Ocean Layer", - "comment": "Depth below geoid", - "dimensions": "longitude latitude olevel time2", - "out_name": "zfullo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zhalfo": { - "modeling_realm": "ocean", - "standard_name": "depth_below_geoid", - "units": "m", - "cell_methods": "area: mean time: mean within years time: mean over years", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Depth Below Geoid of Interfaces Between Ocean Layers", - "comment": "Depth below geoid", - "dimensions": "longitude latitude olevel time2", - "out_name": "zhalfo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Oday.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Oday.json deleted file mode 100644 index 761e264812..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Oday.json +++ /dev/null @@ -1,137 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table Oday", - "realm": "ocnBgChem", - "frequency": "day", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "1.00000", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "chlos": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Surface Total Chlorophyll Mass Concentration", - "comment": "Sum of chlorophyll from all phytoplankton group concentrations at the sea surface. In most models this is equal to chldiat+chlmisc, that is the sum of 'Diatom Chlorophyll Mass Concentration' plus 'Other Phytoplankton Chlorophyll Mass Concentration'", - "dimensions": "longitude latitude time", - "out_name": "chlos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "omldamax": { - "modeling_realm": "ocean", - "standard_name": "ocean_mixed_layer_thickness_defined_by_mixing_scheme", - "units": "m", - "cell_methods": "area: mean time: maximum", - "cell_measures": "area: areacello", - "long_name": "Daily Maximum Ocean Mixed Layer Thickness Defined by Mixing Scheme", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "omldamax", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phycos": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_phytoplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Surface Phytoplankton Carbon Concentration", - "comment": "sum of phytoplankton organic carbon component concentrations at the sea surface", - "dimensions": "longitude latitude time", - "out_name": "phycos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sos": { - "modeling_realm": "ocean", - "standard_name": "sea_surface_salinity", - "units": "0.001", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Surface Salinity", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "sos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sossq": { - "modeling_realm": "ocean", - "standard_name": "square_of_sea_surface_salinity", - "units": "1e-06", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Square of Sea Surface Salinity", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "sossq", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tos": { - "modeling_realm": "ocean", - "standard_name": "sea_surface_temperature", - "units": "K", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Surface Temperature", - "comment": "temperature of liquid ocean. Note that the correct standard_name for this variable is 'sea_surface_temperature', not 'surface_temperature', but this was discovered too late to correct. To maintain consistency across CMIP5 models, the wrong standard_name will continue to be used.", - "dimensions": "longitude latitude time", - "out_name": "tos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tossq": { - "modeling_realm": "ocean", - "standard_name": "square_of_sea_surface_temperature", - "units": "degC2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Square of Sea Surface Temperature", - "comment": "square of temperature of liquid ocean, averaged over the day.", - "dimensions": "longitude latitude time", - "out_name": "tossq", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Odec.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Odec.json deleted file mode 100644 index 40438fe53b..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Odec.json +++ /dev/null @@ -1,426 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table Odec", - "realm": "ocean", - "frequency": "dec", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "", - "generic_levels": "olevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "agessc": { - "modeling_realm": "ocean", - "standard_name": "sea_water_age_since_surface_contact", - "units": "yr", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sea Water Age Since Surface Contact", - "comment": "Time elapsed since water was last in surface layer of the ocean.", - "dimensions": "longitude latitude olevel time", - "out_name": "agessc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bigthetaoga": { - "modeling_realm": "ocean", - "standard_name": "sea_water_conservative_temperature", - "units": "degC", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "", - "long_name": "Global Average Sea Water Conservative Temperature", - "comment": "Diagnostic should be contributed only for models using conservative temperature as prognostic field.", - "dimensions": "time", - "out_name": "bigthetaoga", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfbasin": { - "modeling_realm": "ocean", - "standard_name": "northward_ocean_heat_transport", - "units": "W", - "cell_methods": "longitude: mean (basin) time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Ocean Heat Transport", - "comment": "Contains contributions from all physical processes affecting the northward heat transport, including resolved advection, parameterized advection, lateral diffusion, etc. Diagnosed here as a function of latitude and basin. Use Celsius for temperature scale.", - "dimensions": "latitude basin time", - "out_name": "hfbasin", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfds": { - "modeling_realm": "ocean", - "standard_name": "surface_downward_heat_flux_in_sea_water", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Downward Heat Flux at Sea Water Surface", - "comment": "This is the net flux of heat entering the liquid water column through its upper surface (excluding any 'flux adjustment') .", - "dimensions": "longitude latitude time", - "out_name": "hfds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "masso": { - "modeling_realm": "ocean", - "standard_name": "sea_water_mass", - "units": "kg", - "cell_methods": "area: sum where sea time: mean", - "cell_measures": "", - "long_name": "Sea Water Mass", - "comment": "Total mass of liquid seawater. For Boussinesq models, report this diagnostic as Boussinesq reference density times total volume.", - "dimensions": "time", - "out_name": "masso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "msftmyz": { - "modeling_realm": "ocean", - "standard_name": "ocean_meridional_overturning_mass_streamfunction", - "units": "kg s-1", - "cell_methods": "longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", - "cell_measures": "area: areacella", - "long_name": "Ocean Meridional Overturning Mass Streamfunction", - "comment": "Overturning mass streamfunction arising from all advective mass transport processes, resolved and parameterized.", - "dimensions": "latitude olevel basin time", - "out_name": "msftmyz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfdsi": { - "modeling_realm": "ocean seaIce", - "standard_name": "downward_sea_ice_basal_salt_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Downward Sea Ice Basal Salt Flux", - "comment": "This field is physical, and it arises since sea ice has a nonzero salt content, so it exchanges salt with the liquid ocean upon melting and freezing.", - "dimensions": "longitude latitude time", - "out_name": "sfdsi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfriver": { - "modeling_realm": "ocean", - "standard_name": "salt_flux_into_sea_water_from_rivers", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Salt Flux into Sea Water from Rivers", - "comment": "This field is physical, and it arises when rivers carry a nonzero salt content. Often this is zero, with rivers assumed to be fresh.", - "dimensions": "longitude latitude time", - "out_name": "sfriver", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "so": { - "modeling_realm": "ocean", - "standard_name": "sea_water_salinity", - "units": "0.001", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sea Water Salinity", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "so", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "soga": { - "modeling_realm": "ocean", - "standard_name": "sea_water_salinity", - "units": "0.001", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "", - "long_name": "Global Mean Sea Water Salinity", - "comment": "", - "dimensions": "time", - "out_name": "soga", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sos": { - "modeling_realm": "ocean", - "standard_name": "sea_surface_salinity", - "units": "0.001", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Surface Salinity", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "sos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sosga": { - "modeling_realm": "ocean", - "standard_name": "sea_surface_salinity", - "units": "0.001", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "", - "long_name": "Global Average Sea Surface Salinity", - "comment": "", - "dimensions": "time", - "out_name": "sosga", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tauuo": { - "modeling_realm": "ocean", - "standard_name": "surface_downward_x_stress", - "units": "N m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward X Stress", - "comment": "This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.", - "dimensions": "longitude latitude time", - "out_name": "tauuo", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tauvo": { - "modeling_realm": "ocean", - "standard_name": "surface_downward_y_stress", - "units": "N m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Y Stress", - "comment": "This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.", - "dimensions": "longitude latitude time", - "out_name": "tauvo", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "thetao": { - "modeling_realm": "ocean", - "standard_name": "sea_water_potential_temperature", - "units": "degC", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sea Water Potential Temperature", - "comment": "Diagnostic should be contributed even for models using conservative temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "thetao", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "thetaoga": { - "modeling_realm": "ocean", - "standard_name": "sea_water_potential_temperature", - "units": "degC", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "", - "long_name": "Global Average Sea Water Potential Temperature", - "comment": "Diagnostic should be contributed even for models using conservative temperature as prognostic field", - "dimensions": "time", - "out_name": "thetaoga", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "thkcello": { - "modeling_realm": "ocean", - "standard_name": "cell_thickness", - "units": "m", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Model Cell Thickness", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "thkcello", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tos": { - "modeling_realm": "ocean", - "standard_name": "sea_surface_temperature", - "units": "K", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Surface Temperature", - "comment": "temperature of liquid ocean. Note that the correct standard_name for this variable is 'sea_surface_temperature', not 'surface_temperature', but this was discovered too late to correct. To maintain consistency across CMIP5 models, the wrong standard_name will continue to be used.", - "dimensions": "longitude latitude time", - "out_name": "tos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tosga": { - "modeling_realm": "ocean", - "standard_name": "sea_surface_temperature", - "units": "degC", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "", - "long_name": "Global Average Sea Surface Temperature", - "comment": "This may differ from 'surface temperature' in regions of sea ice.This may differ from 'surface temperature' in regions of sea ice.For models using conservative temperature as prognostic field, they should report the SST as surface potent", - "dimensions": "time", - "out_name": "tosga", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "uo": { - "modeling_realm": "ocean", - "standard_name": "sea_water_x_velocity", - "units": "m s-1", - "cell_methods": "time: mean", - "cell_measures": "--OPT", - "long_name": "Sea Water X Velocity", - "comment": "Prognostic x-ward velocity component resolved by the model.", - "dimensions": "longitude latitude olevel time", - "out_name": "uo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vo": { - "modeling_realm": "ocean", - "standard_name": "sea_water_y_velocity", - "units": "m s-1", - "cell_methods": "time: mean", - "cell_measures": "--OPT", - "long_name": "Sea Water Y Velocity", - "comment": "Prognostic x-ward velocity component resolved by the model.", - "dimensions": "longitude latitude olevel time", - "out_name": "vo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "volo": { - "modeling_realm": "ocean", - "standard_name": "sea_water_volume", - "units": "m3", - "cell_methods": "area: sum where sea time: mean", - "cell_measures": "", - "long_name": "Sea Water Volume", - "comment": "Total volume of liquid seawater.", - "dimensions": "time", - "out_name": "volo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wfo": { - "modeling_realm": "ocean", - "standard_name": "water_flux_into_sea_water", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Water Flux into Sea Water", - "comment": "computed as the water flux into the ocean divided by the area of the ocean portion of the grid cell. This is the sum of the next two variables in this table.", - "dimensions": "longitude latitude time", - "out_name": "wfo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wo": { - "modeling_realm": "ocean", - "standard_name": "upward_sea_water_velocity", - "units": "m s-1", - "cell_methods": "time: mean", - "cell_measures": "--OPT", - "long_name": "Sea Water Z Velocity", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "wo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Ofx.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Ofx.json deleted file mode 100644 index f3ba8bd78e..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Ofx.json +++ /dev/null @@ -1,173 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table Ofx", - "realm": "ocean", - "frequency": "fx", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "", - "generic_levels": "olevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "areacello": { - "modeling_realm": "ocean", - "standard_name": "cell_area", - "units": "m2", - "cell_methods": "area: mean", - "cell_measures": "area: areacello", - "long_name": "Ocean Grid-Cell Area", - "comment": "Horizontal area of ocean grid cells", - "dimensions": "longitude latitude", - "out_name": "areacello", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "basin": { - "modeling_realm": "ocean", - "standard_name": "region", - "units": "1.0", - "cell_methods": "area: mean", - "cell_measures": "area: areacello", - "long_name": "Region Selection Index", - "comment": "", - "dimensions": "longitude latitude", - "out_name": "basin", - "type": "integer", - "positive": "", - "valid_min": "", - "valid_max": "", - "flag_values": "0 1 2 3 4 5 6 7 8 9 10", - "flag_meanings": "global_land southern_ocean atlantic_ocean pacific_ocean arctic_ocean indian_ocean mediterranean_sea black_sea hudson_bay baltic_sea red_sea", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "deptho": { - "modeling_realm": "ocean", - "standard_name": "sea_floor_depth_below_geoid", - "units": "m", - "cell_methods": "area: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Floor Depth Below Geoid", - "comment": "Ocean bathymetry. Reported here is the sea floor depth for present day relative to z=0 geoid. Reported as missing for land grid cells.", - "dimensions": "longitude latitude", - "out_name": "deptho", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfgeou": { - "modeling_realm": "ocean", - "standard_name": "upward_geothermal_heat_flux_at_sea_floor", - "units": "W m-2", - "cell_methods": "area: mean", - "cell_measures": "area: areacello", - "long_name": "Upward Geothermal Heat Flux at Sea Floor", - "comment": "", - "dimensions": "longitude latitude", - "out_name": "hfgeou", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "masscello": { - "modeling_realm": "ocean", - "standard_name": "sea_water_mass_per_unit_area", - "units": "kg m-2", - "cell_methods": "area: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Grid-Cell Mass per area", - "comment": "Tracer grid-cell mass per unit area used for computing tracer budgets. For Boussinesq models with static ocean grid cell thickness, masscello = rhozero*thickcello, where thickcello is static cell thickness and rhozero is constant Boussinesq reference density. More generally, masscello is time dependent and reported as part of Omon.", - "dimensions": "longitude latitude olevel", - "out_name": "masscello", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sftof": { - "modeling_realm": "ocean", - "standard_name": "sea_area_fraction", - "units": "%", - "cell_methods": "area: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Area Fraction", - "comment": "This is the area fraction at the ocean surface.", - "dimensions": "longitude latitude typesea", - "out_name": "sftof", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "thkcello": { - "modeling_realm": "ocean", - "standard_name": "cell_thickness", - "units": "m", - "cell_methods": "area: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Model Cell Thickness", - "comment": "", - "dimensions": "longitude latitude olevel", - "out_name": "thkcello", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ugrid": { - "modeling_realm": "ocean", - "standard_name": "missing", - "units": "", - "cell_methods": "", - "cell_measures": "--UGRID", - "long_name": "UGRID Grid Information", - "comment": "", - "dimensions": "longitude latitude", - "out_name": "ugrido", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "volcello": { - "modeling_realm": "ocean", - "standard_name": "ocean_volume", - "units": "m3", - "cell_methods": "area: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Grid-Cell Volume", - "comment": "grid-cell volume ca. 2000.", - "dimensions": "longitude latitude olevel", - "out_name": "volcello", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Omon.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Omon.json deleted file mode 100644 index 0935e8ec9e..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Omon.json +++ /dev/null @@ -1,5016 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table Omon", - "realm": "ocnBgChem", - "frequency": "mon", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "30.00000", - "generic_levels": "olevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "agessc": { - "modeling_realm": "ocean", - "standard_name": "sea_water_age_since_surface_contact", - "units": "yr", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sea Water Age Since Surface Contact", - "comment": "Time elapsed since water was last in surface layer of the ocean.", - "dimensions": "longitude latitude olevel time", - "out_name": "agessc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "arag": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_aragonite_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Aragonite Concentration", - "comment": "Sum of particulate aragonite components (e.g. Phytoplankton, Detrital, etc.)", - "dimensions": "longitude latitude time depth0m", - "out_name": "arag", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "aragos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_aragonite_epressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Aragonite Concentration", - "comment": "sum of particulate aragonite components (e.g. Phytoplankton, Detrital, etc.)", - "dimensions": "longitude latitude time", - "out_name": "aragos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bacc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_bacteria_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Bacterial Carbon Concentration", - "comment": "Sum of bacterial carbon component concentrations", - "dimensions": "longitude latitude time depth0m", - "out_name": "bacc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "baccos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_bacteria_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Bacterial Carbon Concentration", - "comment": "sum of bacterial carbon component concentrations", - "dimensions": "longitude latitude time", - "out_name": "baccos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bfe": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_particulate_organic_matter_expressed_as_iron_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Particulate Organic Matter expressed as Iron in sea water", - "comment": "Sum of particulate organic iron component concentrations", - "dimensions": "longitude latitude time depth0m", - "out_name": "bfe", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bfeos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_particulate_organic_matter_expressed_as_iron_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Particulate Organic Matter expressed as Iron in sea water", - "comment": "sum of particulate organic iron component concentrations", - "dimensions": "longitude latitude time", - "out_name": "bfeos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bigthetao": { - "modeling_realm": "ocean", - "standard_name": "sea_water_conservative_temperature", - "units": "degC", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sea Water Convervative Temperature", - "comment": "Sea water conservative temperature (this should be contributed only for models using conservative temperature as prognostic field)", - "dimensions": "longitude latitude olevel time", - "out_name": "bigthetao", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bigthetaoga": { - "modeling_realm": "ocean", - "standard_name": "sea_water_conservative_temperature", - "units": "degC", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "", - "long_name": "Global Average Sea Water Conservative Temperature", - "comment": "Diagnostic should be contributed only for models using conservative temperature as prognostic field.", - "dimensions": "time", - "out_name": "bigthetaoga", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bsi": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_particulate_matter_expressed_as_silicon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Particulate Organic Matter expressed as silicon in sea water", - "comment": "Sum of particulate silica component concentrations", - "dimensions": "longitude latitude time depth0m", - "out_name": "bsi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bsios": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_particulate_matter_expressed_as_silicon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Particulate Organic Matter expressed as Silicon in sea water", - "comment": "sum of particulate silica component concentrations", - "dimensions": "longitude latitude time", - "out_name": "bsios", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "calc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_calcite_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Calcite Concentration", - "comment": "Sum of particulate calcite component concentrations (e.g. Phytoplankton, Detrital, etc.)", - "dimensions": "longitude latitude time depth0m", - "out_name": "calc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "calcos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_calcite_epressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Calcite Concentration", - "comment": "sum of particulate calcite component concentrations (e.g. Phytoplankton, Detrital, etc.)", - "dimensions": "longitude latitude time", - "out_name": "calcos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cfc11": { - "modeling_realm": "ocean", - "standard_name": "mole_concentration_of_cfc11_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Moles Per Unit Mass of CFC-11 in sea water", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "cfc11", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cfc12": { - "modeling_realm": "ocean", - "standard_name": "mole_concentration_of_cfc12_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Moles Per Unit Mass of CFC-12 in sea water", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "cfc12", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chl": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mass Concentration of Total Chlorophyll in sea water", - "comment": "Sum of chlorophyll from all phytoplankton group concentrations. In most models this is equal to chldiat+chlmisc, that is the sum of Diatom Chlorophyll Mass Concentration and Other Phytoplankton Chlorophyll Mass Concentration", - "dimensions": "longitude latitude time depth0m", - "out_name": "chl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chlcalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_calcareous_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mass Concentration of Calcareous Phytoplankton expressed as Chlorophyll in sea water", - "comment": "chlorophyll concentration from the calcite-producing phytoplankton component alone", - "dimensions": "longitude latitude time depth0m", - "out_name": "chlcalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chlcalcos": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_calcareous_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mass Concentration of Calcareous Phytoplankton expressed as Chlorophyll in sea water", - "comment": "chlorophyll concentration from the calcite-producing phytoplankton component alone", - "dimensions": "longitude latitude time", - "out_name": "chlcalcos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chldiat": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_diatoms_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mass Concentration of Diatom expressed as Chlorophyll in sea water", - "comment": "Chlorophyll from diatom phytoplankton component concentration alone", - "dimensions": "longitude latitude time depth0m", - "out_name": "chldiat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chldiatos": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_diatoms_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mass Concentration of Diatoms expressed as Chlorophyll in sea water", - "comment": "chlorophyll from diatom phytoplankton component concentration alone", - "dimensions": "longitude latitude time", - "out_name": "chldiatos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chldiaz": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_diazotrophs_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mass Concentration of Diazotrophs expressed as Chlorophyll in sea water", - "comment": "Chlorophyll concentration from the diazotrophic phytoplankton component alone", - "dimensions": "longitude latitude time depth0m", - "out_name": "chldiaz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chldiazos": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_diazotrophs_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mass Concentration of Diazotrophs expressed as Chlorophyll in sea water", - "comment": "chlorophyll concentration from the diazotrophic phytoplankton component alone", - "dimensions": "longitude latitude time", - "out_name": "chldiazos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chlmisc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_miscellaneous_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mass Concentration of Other Phytoplankton expressed as Chlorophyll in sea water", - "comment": "Chlorophyll from additional phytoplankton component concentrations alone", - "dimensions": "longitude latitude time depth0m", - "out_name": "chlmisc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chlmiscos": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_miscellaneous_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mass Concentration of Other Phytoplankton expressed as Chlorophyll in sea water", - "comment": "chlorophyll from additional phytoplankton component concentrations alone", - "dimensions": "longitude latitude time", - "out_name": "chlmiscos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chlos": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mass Concentration of Total Phytoplankton expressed as Chlorophyll in sea water", - "comment": "Sum of chlorophyll from all phytoplankton group concentrations at the sea surface. In most models this is equal to chldiat+chlmisc, that is the sum of 'Diatom Chlorophyll Mass Concentration' plus 'Other Phytoplankton Chlorophyll Mass Concentration'", - "dimensions": "longitude latitude time", - "out_name": "chlos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chlpico": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_picophytoplankton_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mass Concentration of Picophytoplankton expressed as Chlorophyll in sea water", - "comment": "chlorophyll concentration from the picophytoplankton (<2 um) component alone", - "dimensions": "longitude latitude time depth0m", - "out_name": "chlpico", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chlpicoos": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_picophytoplankton_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mass Concentration of Picophytoplankton expressed as Chlorophyll in sea water", - "comment": "chlorophyll concentration from the picophytoplankton (<2 um) component alone", - "dimensions": "longitude latitude time", - "out_name": "chlpicoos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co3": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_carbonate_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Carbonate ion Concentration", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "co3", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co3abio": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_carbonate_expressed_as_carbon_in_sea_water_due_to_abiotic_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Abiotic Carbonate ion Concentration", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "co3abio", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co3abioos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_carbonate_expressed_as_carbon_in_sea_water_due_to_abiotic_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Abiotic Carbonate ion Concentration", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "co3abioos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co3nat": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_carbonate_expressed_as_carbon_in_sea_water_due_to_natural_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Natural Carbonate ion Concentration", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "co3nat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co3natos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_carbonate_expressed_as_carbon_in_sea_water_due_to_natural_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Natural Carbonate ion Concentration", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "co3natos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co3os": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_carbonate_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Carbonate ion Concentration", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "co3os", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co3satarag": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_aragonite_expressed_as_carbon_in_sea_water_at_saturation", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Carbonate ion Concentration for sea water in equilibrium with pure Aragonite", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "co3satarag", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co3sataragos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_carbonate_expressed_as_carbon_for_sea_water_in_equilibrium_with_pure_aragonite", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Carbonate Ion in Equilibrium with Pure Aragonite in sea water", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "co3sataragos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co3satcalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_calcite_expressed_as_carbon_in_sea_water_at_saturation", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Carbonate ion Concentration for sea water in equilibrium with pure Calcite", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "co3satcalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co3satcalcos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_carbonate_expressed_as_carbon_for_sea_water_in_equilibrium_with_pure_calcite", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Carbonate Ion in Equilibrium with Pure Calcite in sea water", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "co3satcalcos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "detoc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_organic_detritus_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Detrital Organic Carbon Concentration", - "comment": "Sum of detrital organic carbon component concentrations", - "dimensions": "longitude latitude time depth0m", - "out_name": "detoc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "detocos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_organic_detritus_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Detrital Organic Carbon Concentration", - "comment": "sum of detrital organic carbon component concentrations", - "dimensions": "longitude latitude time", - "out_name": "detocos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dfe": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_iron_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Dissolved Iron in sea water", - "comment": "Dissolved iron in sea water, including both Fe2+ and Fe3+ ions (but not particulate detrital iron)", - "dimensions": "longitude latitude time depth0m", - "out_name": "dfe", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dfeos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_dissolved_iron_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Dissolved Iron Concentration", - "comment": "dissolved iron in sea water is meant to include both Fe2+ and Fe3+ ions (but not, e.g., particulate detrital iron)", - "dimensions": "longitude latitude time", - "out_name": "dfeos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissi13c": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_carbon13_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Dissolved Inorganic 13Carbon Concentration", - "comment": "Dissolved inorganic 14carbon (CO3+HCO3+H2CO3) concentration", - "dimensions": "longitude latitude time depth0m", - "out_name": "dissi13c", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissi13cos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_dissolved_inorganic_carbon13_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Dissolved Inorganic 13Carbon Concentration", - "comment": "Dissolved inorganic 14carbon (CO3+HCO3+H2CO3) concentration", - "dimensions": "longitude latitude time", - "out_name": "dissi13cos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissi14cabio": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_carbon14_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Abiotic Dissolved Inorganic 14Carbon Concentration", - "comment": "Abiotic Dissolved inorganic 14carbon (CO3+HCO3+H2CO3) concentration", - "dimensions": "longitude latitude time depth0m", - "out_name": "dissi14cabio", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissi14cabioos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_dissolved_inorganic_carbon14_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Abiotic Dissolved Inorganic 14Carbon Concentration", - "comment": "Abiotic Dissolved inorganic 14carbon (CO3+HCO3+H2CO3) concentration", - "dimensions": "longitude latitude time", - "out_name": "dissi14cabioos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissic": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Dissolved Inorganic Carbon Concentration", - "comment": "Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration", - "dimensions": "longitude latitude time depth0m", - "out_name": "dissic", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissicabio": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_carbon_in_sea_water_abiotic_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Abiotic Dissolved Inorganic Carbon Concentration", - "comment": "Abiotic Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration", - "dimensions": "longitude latitude time depth0m", - "out_name": "dissicabio", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissicabioos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_dissolved_inorganic_carbon_in_sea_water_due_to_abiotic_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Abiotic Dissolved Inorganic Carbon Concentration", - "comment": "Abiotic Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration", - "dimensions": "longitude latitude time", - "out_name": "dissicabioos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissicnat": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_carbon_in_sea_water_natural_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Natural Dissolved Inorganic Carbon Concentration", - "comment": "Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration at preindustrial atmospheric xCO2", - "dimensions": "longitude latitude time depth0m", - "out_name": "dissicnat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissicnatos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_dissolved_inorganic_carbon_in_sea_water_due_to_natural_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Natural Dissolved Inorganic Carbon Concentration", - "comment": "Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration at preindustrial atmospheric xCO2", - "dimensions": "longitude latitude time", - "out_name": "dissicnatos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissicos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_dissolved_inorganic_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Dissolved Inorganic Carbon Concentration", - "comment": "Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration", - "dimensions": "longitude latitude time", - "out_name": "dissicos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissoc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_organic_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Dissolved Organic Carbon Concentration", - "comment": "Sum of dissolved carbon component concentrations explicitly represented (i.e. not ~40 uM refractory unless explicit)", - "dimensions": "longitude latitude time depth0m", - "out_name": "dissoc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissocos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_dissolved_organic_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Dissolved Organic Carbon Concentration", - "comment": "Sum of dissolved carbon component concentrations explicitly represented (i.e. not ~40 uM refractory unless explicit)", - "dimensions": "longitude latitude time", - "out_name": "dissocos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dms": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_fraction_of_dimethyl_sulfide_in_air", - "units": "mol mol-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Dimethyl Sulphide in sea water", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "dms", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dmso": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dimethyl_sulfide_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Dimethyl Sulphide in sea water", - "comment": "Mole concentration of dimethyl sulphide in water", - "dimensions": "longitude latitude olevel time", - "out_name": "dmso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dmsos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_dimethyl_sulfide_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Dimethyl Sulphide in sea water", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "dmsos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dpco2": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_carbon_dioxide_partial_pressure_difference_between_sea_water_and_air", - "units": "Pa", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Delta PCO2", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "dpco2", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dpco2abio": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_carbon_dioxide_partial_pressure_difference_between_sea_water_and_air_due_to_abiotic_component", - "units": "Pa", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Abiotic Delta PCO2", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "dpco2abio", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dpco2nat": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_carbon_dioxide_partial_pressure_difference_between_sea_water_and_air_due_to_natural_component", - "units": "Pa", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Natural Delta PCO2", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "dpco2nat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dpo2": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_molecular_oxygen_partial_pressure_difference_between_sea_water_and_air", - "units": "Pa", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Delta PO2", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "dpo2", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "eparag100": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_aragonite_expressed_as_carbon_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Downward Flux of Aragonite", - "comment": "", - "dimensions": "longitude latitude time depth100m", - "out_name": "eparag100", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "epc100": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_particulate_organic_matter_expressed_as_carbon_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Downward Flux of Particle Organic Carbon", - "comment": "", - "dimensions": "longitude latitude time depth100m", - "out_name": "epc100", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "epcalc100": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_calcite_expressed_as_carbon_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Downward Flux of Calcite", - "comment": "", - "dimensions": "longitude latitude time depth100m", - "out_name": "epcalc100", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "epfe100": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_particulate_iron_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Downward Flux of Particulate Iron", - "comment": "", - "dimensions": "longitude latitude time depth100m", - "out_name": "epfe100", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "epn100": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_particulate_nitrogen_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Downward Flux of Particulate Nitrogen", - "comment": "", - "dimensions": "longitude latitude time depth100m", - "out_name": "epn100", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "epp100": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_particulate_phosphorus_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Downward Flux of Particulate Phosphorus", - "comment": "", - "dimensions": "longitude latitude time depth100m", - "out_name": "epp100", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "epsi100": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_particulate_silicon_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Downward Flux of Particulate Silica", - "comment": "", - "dimensions": "longitude latitude time depth100m", - "out_name": "epsi100", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "evs": { - "modeling_realm": "ocean", - "standard_name": "water_evaporation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where ice_free_sea over sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Water Evaporation Flux Where Ice Free Ocean over Sea", - "comment": "computed as the total mass of water vapor evaporating from the ice-free portion of the ocean divided by the area of the ocean portion of the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "evs", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "expc": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_particulate_organic_matter_expressed_as_carbon_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sinking Particulate Organic Carbon Flux", - "comment": "Downward flux of particulate organic carbon", - "dimensions": "longitude latitude olevel time", - "out_name": "expc", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fbddtalk": { - "modeling_realm": "ocnBgChem", - "standard_name": "integral_wrt_depth_of_tendency_of_sea_water_alkalinity_expressed_as_mole_equivalent_due_to_biological_processes", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea (top 100m only) time: mean", - "cell_measures": "area: areacello", - "long_name": "Rate of Change of Biological Alkalinity due to Biological Activity", - "comment": "vertical integral of net biological terms in time rate of change of alkalinity", - "dimensions": "longitude latitude time olayer100m", - "out_name": "fbddtalk", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fbddtdic": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_carbon_due_to_biological_processes", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea (top 100m only) time: mean", - "cell_measures": "area: areacello", - "long_name": "Rate of Change of Dissolved Inorganic Carbon due to Biological Activity", - "comment": "vertical integral of net biological terms in time rate of change of dissolved inorganic carbon", - "dimensions": "longitude latitude time olayer100m", - "out_name": "fbddtdic", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fbddtdife": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_iron_due_to_biological_processes", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea (top 100m only) time: mean", - "cell_measures": "area: areacello", - "long_name": "Rate of Change of Dissolved Inorganic Iron due to Biological Activity", - "comment": "vertical integral of net biological terms in time rate of change of dissolved inorganic iron", - "dimensions": "longitude latitude time olayer100m", - "out_name": "fbddtdife", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fbddtdin": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_nitrogen_due_to_biological_processes", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea (top 100m only) time: mean", - "cell_measures": "area: areacello", - "long_name": "Rate of Change of Dissolved Inorganic Nitrogen due to Biological Activity", - "comment": "vertical integral of net biological terms in time rate of change of nitrogen nutrients (e.g. NO3+NH4)", - "dimensions": "longitude latitude time olayer100m", - "out_name": "fbddtdin", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fbddtdip": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_phosphorus_due_to_biological_processes", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea (top 100m only) time: mean", - "cell_measures": "area: areacello", - "long_name": "Rate of Change of Dissolved Inorganic Phosphorus due to Biological Activity", - "comment": "vertical integral of net biological terms in time rate of change of phosphate", - "dimensions": "longitude latitude time olayer100m", - "out_name": "fbddtdip", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fbddtdisi": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_silicon_due_to_biological_processes", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea (top 100m only) time: mean", - "cell_measures": "area: areacello", - "long_name": "Rate of Change of Dissolved Inorganic Silicon due to Biological Activity", - "comment": "vertical integral of net biological terms in time rate of change of dissolved inorganic silicate", - "dimensions": "longitude latitude time olayer100m", - "out_name": "fbddtdisi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fddtalk": { - "modeling_realm": "ocnBgChem", - "standard_name": "integral_wrt_depth_of_tendency_of_sea_water_alkalinity_expressed_as_mole_equivalent", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea (top 100m only) time: mean", - "cell_measures": "area: areacello", - "long_name": "Rate of Change of Total Alkalinity", - "comment": "vertical integral of net time rate of change of alkalinity", - "dimensions": "longitude latitude time olayer100m", - "out_name": "fddtalk", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fddtdic": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_carbon", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea (top 100m only) time: mean", - "cell_measures": "area: areacello", - "long_name": "Rate of Change of Net Dissolved Inorganic Carbon", - "comment": "", - "dimensions": "longitude latitude time olayer100m", - "out_name": "fddtdic", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fddtdife": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_iron", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea (top 100m only) time: mean", - "cell_measures": "area: areacello", - "long_name": "Rate of Change of Net Dissolved Inorganic Iron", - "comment": "vertical integral of net time rate of change of dissolved inorganic iron", - "dimensions": "longitude latitude time olayer100m", - "out_name": "fddtdife", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fddtdin": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_nitrogen", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea (top 100m only) time: mean", - "cell_measures": "area: areacello", - "long_name": "Rate of Change of Net Dissolved Inorganic Nitrogen", - "comment": "Net time rate of change of nitrogen nutrients (e.g. NO3+NH4)", - "dimensions": "longitude latitude time olayer100m", - "out_name": "fddtdin", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fddtdip": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_phosphorus", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea (top 100m only) time: mean", - "cell_measures": "area: areacello", - "long_name": "Rate of Change of Net Dissolved Inorganic Phosphate", - "comment": "vertical integral of net time rate of change of phosphate", - "dimensions": "longitude latitude time olayer100m", - "out_name": "fddtdip", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fddtdisi": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_dissolved_inorganic_silicon", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea (top 100m only) time: mean", - "cell_measures": "area: areacello", - "long_name": "Rate of Change of Net Dissolved Inorganic Silicon", - "comment": "vertical integral of net time rate of change of dissolved inorganic silicate", - "dimensions": "longitude latitude time olayer100m", - "out_name": "fddtdisi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fg13co2": { - "modeling_realm": "ocnBgChem", - "standard_name": "air_sea_flux_of_13CO2", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Flux of Abiotic 13CO2", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "fg13co2", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fg14co2abio": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_downward_mass_flux_of_abiotic_14_carbon_dioxide_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Flux of Abiotic 14CO2", - "comment": "Gas exchange flux of abiotic 14CO2 (positive into ocean)", - "dimensions": "longitude latitude time depth0m", - "out_name": "fg14co2abio", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fgcfc11": { - "modeling_realm": "ocean", - "standard_name": "surface_downward_mole_flux_of_cfc11", - "units": "mol sec-1 m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward CFC11 flux", - "comment": "gas exchange flux of CFC11", - "dimensions": "longitude latitude time", - "out_name": "fgcfc11", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fgcfc12": { - "modeling_realm": "ocean", - "standard_name": "surface_downward_mole_flux_of_cfc12", - "units": "mol sec-1 m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward CFC12 flux", - "comment": "gas exchange flux of CFC12", - "dimensions": "longitude latitude time", - "out_name": "fgcfc12", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fgco2": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Flux of Total CO2", - "comment": "Gas exchange flux of CO2 (positive into ocean)", - "dimensions": "longitude latitude time depth0m", - "out_name": "fgco2", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fgco2abio": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_downward_mass_flux_of_abiotic_carbon_dioxide_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Flux of Abiotic CO2", - "comment": "Gas exchange flux of abiotic CO2 (positive into ocean)", - "dimensions": "longitude latitude time depth0m", - "out_name": "fgco2abio", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fgco2nat": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_downward_mass_flux_of_natural_carbon_dioxide_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Flux of Natural CO2", - "comment": "Gas exchange flux of natural CO2 (positive into ocean)", - "dimensions": "longitude latitude time depth0m", - "out_name": "fgco2nat", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fgdms": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_upward_mole_flux_of_dimethyl_sulfide", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Upward Flux of DMS", - "comment": "Gas exchange flux of DMS (positive into atmosphere)", - "dimensions": "longitude latitude time depth0m", - "out_name": "fgdms", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fgo2": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_downward_mole_flux_of_molecular_oxygen", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Flux of O2", - "comment": "Gas exchange flux of O2 (positive into ocean)", - "dimensions": "longitude latitude time depth0m", - "out_name": "fgo2", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fgsf6": { - "modeling_realm": "ocean", - "standard_name": "surface_downward_mole_flux_of_sf6", - "units": "mol sec-1 m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward SF6 flux", - "comment": "gas exchange flux of SF6", - "dimensions": "longitude latitude time", - "out_name": "fgsf6", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ficeberg": { - "modeling_realm": "ocean", - "standard_name": "water_flux_into_sea_water_from_icebergs", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Water Flux into Sea Water From Icebergs", - "comment": "computed as the iceberg melt water flux into the ocean divided by the area of the ocean portion of the grid cell.", - "dimensions": "longitude latitude olevel time", - "out_name": "ficeberg", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ficeberg2d": { - "modeling_realm": "ocean", - "standard_name": "water_flux_into_sea_water_from_icebergs", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Water Flux into Sea Water From Icebergs", - "comment": "computed as the iceberg melt water flux into the ocean divided by the area of the ocean portion of the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "ficeberg", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "frfe": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_iron_due_to_sedimentation", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Iron Loss to Sediments", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "frfe", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fric": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_inorganic_carbon_due_to_sedimentation", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Downward Inorganic Carbon Flux at Ocean Bottom", - "comment": "Inorganic Carbon loss to sediments", - "dimensions": "longitude latitude time", - "out_name": "fric", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "friver": { - "modeling_realm": "ocean", - "standard_name": "water_flux_into_sea_water_from_rivers", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Water Flux into Sea Water From Rivers", - "comment": "computed as the river flux of water into the ocean divided by the area of the ocean portion of the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "friver", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "frn": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_elemental_nitrogen_due_to_denitrification_and_sedimentation", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Nitrogen Loss to Sediments and through Denitrification", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "frn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "froc": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_organic_carbon_due_to_sedimentation", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Downward Organic Carbon Flux at Ocean Bottom", - "comment": "Organic Carbon loss to sediments", - "dimensions": "longitude latitude time", - "out_name": "froc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fsfe": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_iron_due_to_deposition_and_runoff_and_sediment_dissolution", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Net Flux of Iron", - "comment": "Iron supply through deposition flux onto sea surface, runoff, coasts, sediments, etc", - "dimensions": "longitude latitude time depth0m", - "out_name": "fsfe", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fsitherm": { - "modeling_realm": "ocean seaIce", - "standard_name": "water_flux_into_sea_water_due_to_sea_ice_thermodynamics", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Water Flux into Sea Water due to Sea Ice Thermodynamics", - "comment": "computed as the sea ice thermodynamic water flux into the ocean divided by the area of the ocean portion of the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "fsitherm", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fsn": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_elemental_nitrogen_due_to_deposition_and_fixation_and_runoff", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Net Flux of Nitrogen", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "fsn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "graz": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_grazing_of_phytoplankton", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Total Grazing of Phytoplankton by Zooplankton", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "graz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfbasin": { - "modeling_realm": "ocean", - "standard_name": "northward_ocean_heat_transport", - "units": "W", - "cell_methods": "longitude: mean (basin) time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Ocean Heat Transport", - "comment": "Contains contributions from all physical processes affecting the northward heat transport, including resolved advection, parameterized advection, lateral diffusion, etc. Diagnosed here as a function of latitude and basin. Use Celsius for temperature scale.", - "dimensions": "latitude basin time", - "out_name": "hfbasin", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfbasinpadv": { - "modeling_realm": "ocean", - "standard_name": "northward_ocean_heat_transport_due_to_parameterized_eddy_advection", - "units": "W", - "cell_methods": "longitude: mean (basin) time: mean", - "cell_measures": "area: areacella", - "long_name": "northward ocean heat transport due to parameterized eddy advection", - "comment": "Contributions to heat transport from parameterized eddy-induced advective transport due to any subgrid advective process. Diagnosed here as a function of latitude and basin. Use Celsius for temperature scale.", - "dimensions": "latitude basin time", - "out_name": "hfbasinpadv", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfbasinpmadv": { - "modeling_realm": "ocean", - "standard_name": "northward_ocean_heat_transport_due_to_parameterized_mesoscale_advection", - "units": "W", - "cell_methods": "longitude: mean (basin) time: mean", - "cell_measures": "area: areacella", - "long_name": "northward ocean heat transport due to parameterized mesoscale advection", - "comment": "Contributions to heat transport from parameterized mesoscale eddy-induced advective transport. Diagnosed here as a function of latitude and basin. Use Celsius for temperature scale.", - "dimensions": "latitude basin time", - "out_name": "hfbasinpmadv", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfbasinpmdiff": { - "modeling_realm": "ocean", - "standard_name": "northward_ocean_heat_transport_due_to_parameterized_mesoscale_diffusion", - "units": "W", - "cell_methods": "longitude: mean (basin) time: mean", - "cell_measures": "area: areacella", - "long_name": "northward ocean heat transport due to parameterized mesoscale diffusion", - "comment": "Contributions to heat transport from parameterized mesoscale eddy-induced diffusive transport (i.e., neutral diffusion). Diagnosed here as a function of latitude and basin.", - "dimensions": "latitude basin time", - "out_name": "hfbasinpmdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfbasinpsmadv": { - "modeling_realm": "ocean", - "standard_name": "northward_ocean_heat_transport_due_to_parameterized_submesoscale_advection", - "units": "W", - "cell_methods": "longitude: mean (basin) time: mean", - "cell_measures": "area: areacella", - "long_name": "northward ocean heat transport due to parameterized submesoscale advection", - "comment": "Contributions to heat transport from parameterized mesoscale eddy-induced advective transport. Diagnosed here as a function of latitude and basin. Use Celsius for temperature scale.", - "dimensions": "latitude basin time", - "out_name": "hfbasinpsmadv", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfcorr": { - "modeling_realm": "ocean", - "standard_name": "heat_flux_correction", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Heat Flux Correction", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "hfcorr", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfds": { - "modeling_realm": "ocean", - "standard_name": "surface_downward_heat_flux_in_sea_water", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Downward Heat Flux at Sea Water Surface", - "comment": "This is the net flux of heat entering the liquid water column through its upper surface (excluding any 'flux adjustment') .", - "dimensions": "longitude latitude time", - "out_name": "hfds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfevapds": { - "modeling_realm": "ocean", - "standard_name": "temperature_flux_due_to_evaporation_expressed_as_heat_flux_out_of_sea_water", - "units": "W m-2", - "cell_methods": "area: mean where ice_free_sea over sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Temperature Flux due to Evaporation Expressed as Heat Flux Out of Sea Water", - "comment": "This is defined as 'where ice_free_sea over sea'", - "dimensions": "longitude latitude time", - "out_name": "hfevapds", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfgeou": { - "modeling_realm": "ocean", - "standard_name": "upward_geothermal_heat_flux_at_sea_floor", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Upward Geothermal Heat Flux at Sea Floor", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "hfgeou", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfibthermds": { - "modeling_realm": "ocean", - "standard_name": "heat_flux_into_sea_water_due_to_iceberg_thermodynamics", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Heat Flux into Sea Water due to Iceberg Thermodynamics", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "hfibthermds", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfibthermds2d": { - "modeling_realm": "ocean", - "standard_name": "heat_flux_into_sea_water_due_to_iceberg_thermodynamics", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Heat Flux into Sea Water due to Iceberg Thermodynamics", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "hfibthermds", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hflso": { - "modeling_realm": "ocean", - "standard_name": "surface_downward_latent_heat_flux", - "units": "W m-2", - "cell_methods": "area: mean where ice_free_sea over sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Latent Heat Flux", - "comment": "This is defined as with the cell methods string: where ice_free_sea over sea", - "dimensions": "longitude latitude time", - "out_name": "hflso", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfrainds": { - "modeling_realm": "ocean", - "standard_name": "temperature_flux_due_to_rainfall_expressed_as_heat_flux_into_sea_water", - "units": "W m-2", - "cell_methods": "area: mean where ice_free_sea over sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Temperature Flux due to Rainfall Expressed as Heat Flux into Sea Water", - "comment": "This is defined as 'where ice_free_sea over sea'; i.e., the total flux (considered here) entering the ice-free portion of the grid cell divided by the area of the ocean portion of the grid cell. All such heat fluxes are computed based on Celsius scale.", - "dimensions": "longitude latitude time", - "out_name": "hfrainds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfrunoffds": { - "modeling_realm": "ocean", - "standard_name": "temperature_flux_due_to_runoff_expressed_as_heat_flux_into_sea_water", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Temperature Flux due to Runoff Expressed as Heat Flux into Sea Water", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "hfrunoffds", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfrunoffds2d": { - "modeling_realm": "ocean", - "standard_name": "temperature_flux_due_to_runoff_expressed_as_heat_flux_into_sea_water", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Temperature Flux due to Runoff Expressed as Heat Flux into Sea Water", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "hfrunoffds", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfsifrazil": { - "modeling_realm": "ocean seaIce", - "standard_name": "heat_flux_into_sea_water_due_to_freezing_of_frazil_ice", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Heat Flux into Sea Water due to Frazil Ice Formation", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "hfsifrazil", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfsifrazil2d": { - "modeling_realm": "ocean seaIce", - "standard_name": "heat_flux_into_sea_water_due_to_freezing_of_frazil_ice", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Heat Flux into Sea Water due to Frazil Ice Formation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "hfsifrazil", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfsnthermds": { - "modeling_realm": "ocean", - "standard_name": "heat_flux_into_sea_water_due_to_snow_thermodynamics", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Heat Flux into Sea Water due to Snow Thermodynamics", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "hfsnthermds", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfsnthermds2d": { - "modeling_realm": "ocean", - "standard_name": "heat_flux_into_sea_water_due_to_snow_thermodynamics", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Heat Flux into Sea Water due to Snow Thermodynamics", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "hfsnthermds", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfsso": { - "modeling_realm": "ocean", - "standard_name": "surface_downward_sensible_heat_flux", - "units": "W m-2", - "cell_methods": "area: mean where ice_free_sea over sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Sensible Heat Flux", - "comment": "This is defined as 'where ice_free_sea over sea'", - "dimensions": "longitude latitude time", - "out_name": "hfsso", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfx": { - "modeling_realm": "ocean", - "standard_name": "ocean_heat_x_transport", - "units": "W", - "cell_methods": "time: mean", - "cell_measures": "--OPT", - "long_name": "Ocean Heat X Transport", - "comment": "Contains all contributions to 'x-ward' heat transport from resolved and parameterized processes. Use Celsius for temperature scale.", - "dimensions": "longitude latitude olevel time", - "out_name": "hfx", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfy": { - "modeling_realm": "ocean", - "standard_name": "ocean_heat_y_transport", - "units": "W", - "cell_methods": "time: mean", - "cell_measures": "--OPT", - "long_name": "Ocean Heat Y Transport", - "comment": "Contains all contributions to 'y-ward' heat transport from resolved and parameterized processes. Use Celsius for temperature scale.", - "dimensions": "longitude latitude olevel time", - "out_name": "hfy", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "htovgyre": { - "modeling_realm": "ocean", - "standard_name": "northward_ocean_heat_transport_due_to_gyre", - "units": "W", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Ocean Heat Transport due to Gyre", - "comment": "From all advective mass transport processes, resolved and parameterized.", - "dimensions": "latitude basin time", - "out_name": "htovgyre", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "htovovrt": { - "modeling_realm": "ocean", - "standard_name": "northward_ocean_heat_transport_due_to_overturning", - "units": "W", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Ocean Heat Transport due to Overturning", - "comment": "From all advective mass transport processes, resolved and parameterized.", - "dimensions": "latitude basin time", - "out_name": "htovovrt", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "icfriver": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_inorganic_carbon_due_to_runoff_and_sediment_dissolution", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Flux of Inorganic Carbon Into Ocean Surface by Runoff", - "comment": "Inorganic Carbon supply to ocean through runoff (separate from gas exchange)", - "dimensions": "longitude latitude time depth0m", - "out_name": "icfriver", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intdic": { - "modeling_realm": "ocnBgChem", - "standard_name": "ocean_mass_content_of_dissolved_inorganic_carbon", - "units": "kg m-2", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Dissolved Inorganic Carbon Content", - "comment": "Vertically integrated DIC", - "dimensions": "longitude latitude time", - "out_name": "intdic", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intdoc": { - "modeling_realm": "ocnBgChem", - "standard_name": "ocean_mass_content_of_dissolved_organic_carbon", - "units": "kg m-2", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Dissolved Organic Carbon Content", - "comment": "Vertically integrated DOC (explicit pools only)", - "dimensions": "longitude latitude time", - "out_name": "intdoc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intparag": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_aragonite_expressed_as_carbon_due_to_biological_production", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Aragonite Production", - "comment": "Vertically integrated aragonite production", - "dimensions": "longitude latitude time", - "out_name": "intparag", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intpbfe": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_iron_due_to_biological_production", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Iron Production", - "comment": "Vertically integrated biogenic iron production", - "dimensions": "longitude latitude time", - "out_name": "intpbfe", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intpbn": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_nitrogen_due_to_biological_production", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Nitrogen Production", - "comment": "Vertically integrated biogenic nitrogen production", - "dimensions": "longitude latitude time", - "out_name": "intpbn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intpbp": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_phosphorus_due_to_biological_production", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Phosphorus Production", - "comment": "Vertically integrated biogenic phosphorus production", - "dimensions": "longitude latitude time", - "out_name": "intpbp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intpbsi": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_silicon_due_to_biological_production", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Silica Production", - "comment": "Vertically integrated biogenic silica production", - "dimensions": "longitude latitude time", - "out_name": "intpbsi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intpcalcite": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_calcite_expressed_as_carbon_due_to_biological_production", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Calcite Production", - "comment": "Vertically integrated calcite production", - "dimensions": "longitude latitude time", - "out_name": "intpcalcite", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intpn2": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_elemental_nitrogen_due_to_fixation", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Nitrogen Fixation Rate in Ocean", - "comment": "Vertically integrated nitrogen fixation", - "dimensions": "longitude latitude time", - "out_name": "intpn2", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intpoc": { - "modeling_realm": "ocnBgChem", - "standard_name": "ocean_mass_content_of_particulate_organic_matter_expressed_as_carbon", - "units": "kg m-2", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Particulate Organic Carbon Content", - "comment": "Vertically integrated POC", - "dimensions": "longitude latitude time", - "out_name": "intpoc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intpp": { - "modeling_realm": "ocnBgChem", - "standard_name": "net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_phytoplankton", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Primary Organic Carbon Production by All Types of Phytoplankton", - "comment": "Vertically integrated total primary (organic carbon) production by phytoplankton. This should equal the sum of intpdiat+intpphymisc, but those individual components may be unavailable in some models.", - "dimensions": "longitude latitude time", - "out_name": "intpp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intppcalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_calcareous_phytoplankton", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Net Primary Mole Productivity of Carbon by Calcareous Phytoplankton", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "intppcalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intppdiat": { - "modeling_realm": "ocnBgChem", - "standard_name": "net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diatoms", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Net Primary Organic Carbon Production by Diatoms", - "comment": "Vertically integrated primary (organic carbon) production by the diatom phytoplankton component alone", - "dimensions": "longitude latitude time", - "out_name": "intppdiat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intppdiaz": { - "modeling_realm": "ocnBgChem", - "standard_name": "net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophs", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Net Primary Mole Productivity of Carbon by Diazotrophs", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "intppdiaz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intppmisc": { - "modeling_realm": "ocnBgChem", - "standard_name": "net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Net Primary Organic Carbon Production by Other Phytoplankton", - "comment": "Vertically integrated total primary (organic carbon) production by other phytoplankton components alone", - "dimensions": "longitude latitude time", - "out_name": "intppmisc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intppnitrate": { - "modeling_realm": "ocnBgChem", - "standard_name": "net_primary_mole_productivity_of_biomass_expressed_as_carbon_due_to_nitrate_utilization", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Primary Organic Carbon Production by Phytoplankton Based on Nitrate Uptake Alone", - "comment": "Vertically integrated primary (organic carbon) production by phytoplankton based on nitrate uptake alone", - "dimensions": "longitude latitude time", - "out_name": "intppnitrate", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "intpppico": { - "modeling_realm": "ocnBgChem", - "standard_name": "net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_picophytoplankton", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea depth: sum where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Net Primary Mole Productivity of Carbon by Picophytoplankton", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "intpppico", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limfecalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "iron_limitation_of_calcareous_phytoplankton", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Iron limitation of Calcareous Phytoplankton", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "limfecalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limfediat": { - "modeling_realm": "ocnBgChem", - "standard_name": "iron_limitation_of_diatoms", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Iron limitation of Diatoms", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "limfediat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limfediaz": { - "modeling_realm": "ocnBgChem", - "standard_name": "iron_limitation_of_diazotrophs", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Iron limitation of Diazotrophs", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "limfediaz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limfemisc": { - "modeling_realm": "ocnBgChem", - "standard_name": "iron_limitation_of_miscellaneous_phytoplankton", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Iron Limitation of Other Phytoplankton", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "limfemisc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limfepico": { - "modeling_realm": "ocnBgChem", - "standard_name": "iron_limitation_of_picophytoplankton", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Iron limitation of Picophytoplankton", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "limfepico", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limirrcalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "irradiance_limitation_of_calcareous_phytoplankton", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Irradiance limitation of Calcareous Phytoplankton", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "limirrcalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limirrdiat": { - "modeling_realm": "ocnBgChem", - "standard_name": "irradiance_limitation_of_diatoms", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Irradiance limitation of Diatoms", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "limirrdiat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limirrdiaz": { - "modeling_realm": "ocnBgChem", - "standard_name": "irradiance_limitation_of_diazotrophs", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Irradiance limitation of Diazotrophs", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "limirrdiaz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limirrmisc": { - "modeling_realm": "ocnBgChem", - "standard_name": "irradiance_limitation_of_miscellaneous_phytoplankton", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Irradiance Limitation of Other Phytoplankton", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "limirrmisc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limirrpico": { - "modeling_realm": "ocnBgChem", - "standard_name": "irradiance_limitation_of_picophytoplankton", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Irradiance limitation of Picophytoplankton", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "limirrpico", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limncalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "nitrogen_limitation_of_calcareous_phytoplankton", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Nitrogen limitation of Calcareous Phytoplankton", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "limncalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limndiat": { - "modeling_realm": "ocnBgChem", - "standard_name": "nitrogen_limitation_of_diatoms", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Nitrogen limitation of Diatoms", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "limndiat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limndiaz": { - "modeling_realm": "ocnBgChem", - "standard_name": "nitrogen_limitation_of_diazotrophs", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Nitrogen limitation of Diazotrophs", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "limndiaz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limnmisc": { - "modeling_realm": "ocnBgChem", - "standard_name": "nitrogen_limitation_of_miscellaneous_phytoplankton", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Nitrogen Limitation of Other Phytoplankton", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "limnmisc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "limnpico": { - "modeling_realm": "ocnBgChem", - "standard_name": "nitrogen_limitation_of_picophytoplankton", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Nitrogen limitation of Picophytoplankton", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "limnpico", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "masscello": { - "modeling_realm": "ocean", - "standard_name": "sea_water_mass_per_unit_area", - "units": "kg m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sea Water Mass Per Unit Area", - "comment": "Tracer grid-cell mass per unit area used for computing tracer budgets. For Boussinesq models with static ocean grid cell thickness, masscello = rhozero*thickcello, where thickcello is static cell thickness and rhozero is constant Boussinesq reference density. More generally, masscello is time dependent and reported as part of Omon.", - "dimensions": "longitude latitude olevel time", - "out_name": "masscello", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "masso": { - "modeling_realm": "ocean", - "standard_name": "sea_water_mass", - "units": "kg", - "cell_methods": "area: sum where sea time: mean", - "cell_measures": "", - "long_name": "Sea Water Mass", - "comment": "Total mass of liquid seawater. For Boussinesq models, report this diagnostic as Boussinesq reference density times total volume.", - "dimensions": "time", - "out_name": "masso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mfo": { - "modeling_realm": "ocean", - "standard_name": "sea_water_transport_across_line", - "units": "kg s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Sea Water Transport", - "comment": "", - "dimensions": "oline time", - "out_name": "mfo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mlotst": { - "modeling_realm": "ocean", - "standard_name": "ocean_mixed_layer_thickness_defined_by_sigma_t", - "units": "m", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Ocean Mixed Layer Thickness Defined by Sigma T", - "comment": "Sigma T is potential density referenced to ocean surface.", - "dimensions": "longitude latitude time", - "out_name": "mlotst", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mlotstmax": { - "modeling_realm": "ocean", - "standard_name": "ocean_mixed_layer_thickness_defined_by_sigma_t", - "units": "m", - "cell_methods": "area: mean time: maximum", - "cell_measures": "area: areacello", - "long_name": "Maximum Ocean Mixed Layer Thickness Defined by Sigma T", - "comment": "Sigma T is potential density referenced to ocean surface.", - "dimensions": "longitude latitude time", - "out_name": "mlotstmax", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mlotstmin": { - "modeling_realm": "ocean", - "standard_name": "ocean_mixed_layer_thickness_defined_by_sigma_t", - "units": "m", - "cell_methods": "area: mean time: minimum", - "cell_measures": "area: areacello", - "long_name": "Minimum Ocean Mixed Layer Thickness Defined by Sigma T", - "comment": "Sigma T is potential density referenced to ocean surface.", - "dimensions": "longitude latitude time", - "out_name": "mlotstmin", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mlotstsq": { - "modeling_realm": "ocean", - "standard_name": "square_of_ocean_mixed_layer_thickness_defined_by_sigma_t", - "units": "m2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Square of Ocean Mixed Layer Thickness Defined by Sigma T", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "mlotstsq", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "msftbarot": { - "modeling_realm": "ocean", - "standard_name": "ocean_barotropic_mass_streamfunction", - "units": "kg s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Ocean Barotropic Mass Streamfunction", - "comment": "Streamfunction or its approximation for free surface models. See OMDP document for details.", - "dimensions": "longitude latitude time", - "out_name": "msftbarot", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "msftmrho": { - "modeling_realm": "ocean", - "standard_name": "ocean_meridional_overturning_mass_streamfunction", - "units": "kg s-1", - "cell_methods": "longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", - "cell_measures": "area: areacella", - "long_name": "Ocean Meridional Overturning Mass Streamfunction", - "comment": "Overturning mass streamfunction arising from all advective mass transport processes, resolved and parameterized.", - "dimensions": "latitude rho basin time", - "out_name": "msftmrho", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "msftmrhompa": { - "modeling_realm": "ocean", - "standard_name": "ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_mesoscale_advection", - "units": "kg s-1", - "cell_methods": "longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", - "cell_measures": "area: areacella", - "long_name": "ocean meridional overturning mass streamfunction due to parameterized mesoscale advection", - "comment": "CMIP5 called this 'due to Bolus Advection'. Name change respects the more general physics of the mesoscale parameterizations.", - "dimensions": "latitude rho basin time", - "out_name": "msftmrhompa", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "msftmyz": { - "modeling_realm": "ocean", - "standard_name": "ocean_meridional_overturning_mass_streamfunction", - "units": "kg s-1", - "cell_methods": "longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", - "cell_measures": "area: areacella", - "long_name": "Ocean Meridional Overturning Mass Streamfunction", - "comment": "Overturning mass streamfunction arising from all advective mass transport processes, resolved and parameterized.", - "dimensions": "latitude olevel basin time", - "out_name": "msftmyz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "msftmzmpa": { - "modeling_realm": "ocean", - "standard_name": "ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_mesoscale_advection", - "units": "kg s-1", - "cell_methods": "longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", - "cell_measures": "area: areacella", - "long_name": "ocean meridional overturning mass streamfunction due to parameterized mesoscale advection", - "comment": "CMIP5 called this 'due to Bolus Advection'. Name change respects the more general physics of the mesoscale parameterizations.", - "dimensions": "latitude olevel basin time", - "out_name": "msftmzmpa", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "msftmzsmpa": { - "modeling_realm": "ocean", - "standard_name": "ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_submesoscale_advection", - "units": "kg s-1", - "cell_methods": "longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", - "cell_measures": "area: areacella", - "long_name": "ocean meridional overturning mass streamfunction due to parameterized submesoscale advection", - "comment": "Report only if there is a submesoscale eddy parameterization.", - "dimensions": "latitude olevel basin time", - "out_name": "msftmzsmpa", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "msftyrho": { - "modeling_realm": "ocean", - "standard_name": "ocean_y_overturning_mass_streamfunction", - "units": "kg s-1", - "cell_methods": "time: mean grid_longitude: mean", - "cell_measures": "area: areacella", - "long_name": "Ocean Y Overturning Mass Streamfunction", - "comment": "Overturning mass streamfunction arising from all advective mass transport processes, resolved and parameterized.", - "dimensions": "latitude rho basin time", - "out_name": "msftyrho", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "msftyrhompa": { - "modeling_realm": "ocean", - "standard_name": "ocean_y_overturning_mass_streamfunction_due_to_parameterized_mesoscale_advection", - "units": "kg s-1", - "cell_methods": "time: mean grid_longitude: mean", - "cell_measures": "area: areacella", - "long_name": "ocean Y overturning mass streamfunction due to parameterized mesoscale advection", - "comment": "CMIP5 called this 'due to Bolus Advection'. Name change respects the more general physics of the mesoscale parameterizations.", - "dimensions": "latitude rho basin time", - "out_name": "msftyrhompa", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "msftyyz": { - "modeling_realm": "ocean", - "standard_name": "ocean_y_overturning_mass_streamfunction", - "units": "kg s-1", - "cell_methods": "time: mean grid_longitude: mean", - "cell_measures": "area: areacella", - "long_name": "Ocean Y Overturning Mass Streamfunction", - "comment": "Overturning mass streamfunction arising from all advective mass transport processes, resolved and parameterized.", - "dimensions": "latitude olevel basin time", - "out_name": "msftyyz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "msftyzmpa": { - "modeling_realm": "ocean", - "standard_name": "ocean_y_overturning_mass_streamfunction_due_to_parameterized_mesoscale_advection", - "units": "kg s-1", - "cell_methods": "time: mean grid_longitude: mean", - "cell_measures": "area: areacella", - "long_name": "ocean Y overturning mass streamfunction due to parameterized mesoscale advection", - "comment": "CMIP5 called this 'due to Bolus Advection'. Name change respects the more general physics of the mesoscale parameterizations.", - "dimensions": "latitude olevel basin time", - "out_name": "msftyzmpa", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "msftyzsmpa": { - "modeling_realm": "ocean", - "standard_name": "ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_submesoscale_advection", - "units": "kg s-1", - "cell_methods": "longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", - "cell_measures": "area: areacella", - "long_name": "ocean Y overturning mass streamfunction due to parameterized submesoscale advection", - "comment": "Report only if there is a submesoscale eddy parameterization.", - "dimensions": "latitude olevel basin time", - "out_name": "msftyzsmpa", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nh4": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_ammonium_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Dissolved Ammonium Concentration", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "nh4", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nh4os": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_ammonium_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Dissolved Ammonium Concentration", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "nh4os", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "no3": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_nitrate_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Dissolved Nitrate Concentration", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "no3", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "no3os": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_nitrate_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Dissolved Nitrate Concentration", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "no3os", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o2": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_molecular_oxygen_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Dissolved Oxygen Concentration", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "o2", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o2min": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_molecular_oxygen_in_sea_water_at_shallowest_local_minimum_in_vertical_profile", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Oxygen Minimum Concentration", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "o2min", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o2os": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_dissolved_molecular_oxygen_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Dissolved Oxygen Concentration", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "o2os", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o2sat": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_molecular_oxygen_in_sea_water_at_saturation", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Dissolved Oxygen Concentration at Saturation", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "o2sat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o2satos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_dissolved_molecular_oxygen_in_sea_water_at_saturation", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Dissolved Oxygen Concentration at Saturation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "o2satos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "obvfsq": { - "modeling_realm": "ocean", - "standard_name": "square_of_brunt_vaisala_frequency_in_sea_water", - "units": "s-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Square of Brunt Vaisala Frequency in Sea Water", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "obvfsq", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ocfriver": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_ocean_mole_content_of_organic_carbon_due_to_runoff_and_sediment_dissolution", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Flux of Organic Carbon Into Ocean Surface by Runoff", - "comment": "Organic Carbon supply to ocean through runoff (separate from gas exchange)", - "dimensions": "longitude latitude time depth0m", - "out_name": "ocfriver", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pbfe": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_iron_in_sea_water_due_to_biological_production", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Biogenic Iron Production", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "pbfe", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pbo": { - "modeling_realm": "ocean", - "standard_name": "sea_water_pressure_at_sea_floor", - "units": "Pa", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Water Pressure at Sea floor", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "pbo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pbsi": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_silicon_in_sea_water_due_to_biological_production", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Biogenic Silica Production", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "pbsi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ph": { - "modeling_realm": "ocnBgChem", - "standard_name": "sea_water_ph_reported_on_total_scale", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "pH", - "comment": "negative log of hydrogen ion concentration with the concentration expressed as mol H kg-1.", - "dimensions": "longitude latitude time depth0m", - "out_name": "ph", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phabio": { - "modeling_realm": "ocnBgChem", - "standard_name": "sea_water_ph_reported_on_total_scale_due_to_abiotic_component", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Abiotic pH", - "comment": "negative log10 of hydrogen ion concentration with the concentration expressed as mol H kg-1 (abiotic component)..", - "dimensions": "longitude latitude time depth0m", - "out_name": "phabio", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phabioos": { - "modeling_realm": "ocnBgChem", - "standard_name": "sea_water_ph_reported_on_total_scale_due_to_abiotic_component", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Abiotic pH", - "comment": "negative log10 of hydrogen ion concentration with the concentration expressed as mol H kg-1.", - "dimensions": "longitude latitude time", - "out_name": "phabioos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phnat": { - "modeling_realm": "ocnBgChem", - "standard_name": "sea_water_ph_reported_on_total_scale_due_to_natural_component", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Natural pH", - "comment": "negative log10 of hydrogen ion concentration with the concentration expressed as mol H kg-1.", - "dimensions": "longitude latitude time depth0m", - "out_name": "phnat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phnatos": { - "modeling_realm": "ocnBgChem", - "standard_name": "sea_water_ph_reported_on_total_scale_due_to_natural_component", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Natural pH", - "comment": "negative log10 of hydrogen ion concentration with the concentration expressed as mol H kg-1.", - "dimensions": "longitude latitude time", - "out_name": "phnatos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phos": { - "modeling_realm": "ocnBgChem", - "standard_name": "sea_water_ph_reported_on_total_scale", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface pH", - "comment": "negative log10 of hydrogen ion concentration with the concentration expressed as mol H kg-1.", - "dimensions": "longitude latitude time", - "out_name": "phos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phyc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_phytoplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Phytoplankton Carbon Concentration", - "comment": "sum of phytoplankton carbon component concentrations. In most (all?) cases this is the sum of phycdiat and phycmisc (i.e., 'Diatom Carbon Concentration' and 'Non-Diatom Phytoplankton Carbon Concentration'", - "dimensions": "longitude latitude time depth0m", - "out_name": "phyc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phycalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_calcareous_phytoplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Calcareous Phytoplankton expressed as Carbon in sea water", - "comment": "carbon concentration from calcareous (calcite-producing) phytoplankton component alone", - "dimensions": "longitude latitude time depth0m", - "out_name": "phycalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phycalcos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_calcareous_phytoplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Calcareous Phytoplankton expressed as Carbon in Sea Water", - "comment": "carbon concentration from calcareous (calcite-producing) phytoplankton component alone", - "dimensions": "longitude latitude time", - "out_name": "phycalcos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phycos": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_phytoplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Phytoplankton Carbon Concentration", - "comment": "sum of phytoplankton organic carbon component concentrations at the sea surface", - "dimensions": "longitude latitude time", - "out_name": "phycos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phydiat": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_diatoms_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Diatoms expressed as Carbon in sea water", - "comment": "carbon from the diatom phytoplankton component concentration alone", - "dimensions": "longitude latitude time depth0m", - "out_name": "phydiat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phydiatos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_diatoms_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Diatoms expressed as Carbon in Sea Water", - "comment": "carbon from the diatom phytoplankton component concentration alone", - "dimensions": "longitude latitude time", - "out_name": "phydiatos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phydiaz": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_diazotrophs_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Diazotrophs Expressed as Carbon in sea water", - "comment": "carbon concentration from the diazotrophic phytoplankton component alone", - "dimensions": "longitude latitude time depth0m", - "out_name": "phydiaz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phydiazos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_diazotrophs_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Diazotrophs Expressed as Carbon in Sea Water", - "comment": "carbon concentration from the diazotrophic phytoplankton component alone", - "dimensions": "longitude latitude time", - "out_name": "phydiazos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phyfe": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_phytoplankton_expressed_as_iron_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Total Phytoplankton expressed as Iron in sea water", - "comment": "sum of phytoplankton iron component concentrations", - "dimensions": "longitude latitude time depth0m", - "out_name": "phyfe", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phyfeos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_phytoplankton_expressed_as_iron_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mass Concentration of Diazotrophs expressed as Chlorophyll in sea water", - "comment": "sum of phytoplankton iron component concentrations", - "dimensions": "longitude latitude time", - "out_name": "phyfeos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phymisc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_miscellaneous_phytoplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Miscellaneous Phytoplankton expressed as Carbon in sea water", - "comment": "carbon concentration from additional phytoplankton component alone", - "dimensions": "longitude latitude time depth0m", - "out_name": "phymisc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phymiscos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_miscellaneous_phytoplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Miscellaneous Phytoplankton expressed as Carbon in Sea Water", - "comment": "carbon concentration from additional phytoplankton component alone", - "dimensions": "longitude latitude time", - "out_name": "phymiscos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phyn": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Total Phytoplankton expressed as Nitrogen in sea water", - "comment": "sum of phytoplankton nitrogen component concentrations", - "dimensions": "longitude latitude time depth0m", - "out_name": "phyn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phynos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Phytoplankton Nitrogen in sea water", - "comment": "sum of phytoplankton nitrogen component concentrations", - "dimensions": "longitude latitude time", - "out_name": "phynos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phyp": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_phytoplankton_expressed_as_phosphorus_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Total Phytoplankton expressed as Phosphorus in sea water", - "comment": "sum of phytoplankton phosphorus components", - "dimensions": "longitude latitude time depth0m", - "out_name": "phyp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phypico": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_picophytoplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Picophytoplankton expressed as Carbon in sea water", - "comment": "carbon concentration from the picophytoplankton (<2 um) component alone", - "dimensions": "longitude latitude time depth0m", - "out_name": "phypico", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phypicoos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_picophytoplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Picophytoplankton expressed as Carbon in Sea Water", - "comment": "carbon concentration from the picophytoplankton (<2 um) component alone", - "dimensions": "longitude latitude time", - "out_name": "phypicoos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phypos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_phytoplankton_expressed_as_phosphorus_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Total Phytoplankton expressed as Phosphorus in sea water", - "comment": "sum of phytoplankton phosphorus components", - "dimensions": "longitude latitude time", - "out_name": "phypos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "physi": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_phytoplankton_expressed_as_silicon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Total Phytoplankton expressed as Silicon in sea water", - "comment": "sum of phytoplankton silica component concentrations", - "dimensions": "longitude latitude time depth0m", - "out_name": "physi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "physios": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_phytoplankton_expressed_as_silicon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Total Phytoplankton expressed as Silicon in sea water", - "comment": "sum of phytoplankton silica component concentrations", - "dimensions": "longitude latitude time", - "out_name": "physios", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pnitrate": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_nitrate_utilization", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Primary Carbon Production by Phytoplankton due to Nitrate Uptake Alone", - "comment": "Primary (organic carbon) production by phytoplankton due to nitrate uptake alone", - "dimensions": "longitude latitude time depth0m", - "out_name": "pnitrate", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "po4": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Total Dissolved Inorganic Phosphorus Concentration", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "po4", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "po4os": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Total Dissolved Inorganic Phosphorus Concentration", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "po4os", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pon": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_particulate_organic_matter_expressed_as_nitrogen_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Particulate Organic Matter expressed as Nitrogen in sea water", - "comment": "sum of particulate organic nitrogen component concentrations", - "dimensions": "longitude latitude time depth0m", - "out_name": "pon", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ponos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_particulate_organic_matter_expressed_as_nitrogen_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Particulate Organic Matter expressed as Nitrogen in sea water", - "comment": "sum of particulate organic nitrogen component concentrations", - "dimensions": "longitude latitude time", - "out_name": "ponos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pop": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_particulate_organic_matter_expressed_as_phosphorus_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Particulate Organic Matter expressed as Phosphorus in sea water", - "comment": "sum of particulate organic phosphorus component concentrations", - "dimensions": "longitude latitude time depth0m", - "out_name": "pop", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "popos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_particulate_organic_matter_expressed_as_phosphorus_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Particulate Organic Matter expressed as Phosphorus in sea water", - "comment": "sum of particulate organic phosphorus component concentrations", - "dimensions": "longitude latitude time", - "out_name": "popos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pp": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Primary Carbon Production by Total Phytoplankton", - "comment": "total primary (organic carbon) production by phytoplankton", - "dimensions": "longitude latitude time depth0m", - "out_name": "pp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prra": { - "modeling_realm": "atmos", - "standard_name": "rainfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where ice_free_sea over sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Rainfall Flux where Ice Free Ocean over Sea", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "prra", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prsn": { - "modeling_realm": "atmos", - "standard_name": "snowfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where ice_free_sea over sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Snowfall Flux where Ice Free Ocean over Sea", - "comment": "at surface; includes precipitation of all forms of water in the solid phase", - "dimensions": "longitude latitude time", - "out_name": "prsn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pso": { - "modeling_realm": "ocean", - "standard_name": "sea_water_pressure_at_sea_water_surface", - "units": "Pa", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Water Pressure at Sea Water Surface", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "pso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlntds": { - "modeling_realm": "ocean", - "standard_name": "surface_net_downward_longwave_flux", - "units": "W m-2", - "cell_methods": "area: mean where ice_free_sea over sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Net Downward Longwave Radiation", - "comment": "This is defined as 'where ice_free_sea over sea'", - "dimensions": "longitude latitude time", - "out_name": "rlntds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdo": { - "modeling_realm": "ocean", - "standard_name": "downwelling_shortwave_flux_in_sea_water", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Downwelling Shortwave Radiation in Sea Water", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "rsdo", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsntds": { - "modeling_realm": "ocean", - "standard_name": "net_downward_shortwave_flux_at_sea_water_surface", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Net Downward Shortwave Radiation at Sea Water Surface", - "comment": "This is the flux into the surface of liquid sea water only. This excludes shortwave flux absorbed by sea ice, but includes any light that passes through the ice and is absorbed by the ocean.", - "dimensions": "longitude latitude time", - "out_name": "rsntds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sf6": { - "modeling_realm": "ocean", - "standard_name": "mole_concentration_of_sulfur_hexafluoride_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Moles Per Unit Mass of SF6 in sea water", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "sf6", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfdsi": { - "modeling_realm": "ocean seaIce", - "standard_name": "downward_sea_ice_basal_salt_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Downward Sea Ice Basal Salt Flux", - "comment": "This field is physical, and it arises since sea ice has a nonzero salt content, so it exchanges salt with the liquid ocean upon melting and freezing.", - "dimensions": "longitude latitude time", - "out_name": "sfdsi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfriver": { - "modeling_realm": "ocean", - "standard_name": "salt_flux_into_sea_water_from_rivers", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Salt Flux into Sea Water from Rivers", - "comment": "This field is physical, and it arises when rivers carry a nonzero salt content. Often this is zero, with rivers assumed to be fresh.", - "dimensions": "longitude latitude time", - "out_name": "sfriver", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "si": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_silicon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Total Dissolved Inorganic Silicon Concentration", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "si", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sios": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_dissolved_inorganic_silicon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Total Dissolved Inorganic Silicon Concentration", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "sios", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sltovgyre": { - "modeling_realm": "ocean", - "standard_name": "northward_ocean_salt_transport_due_to_gyre", - "units": "kg s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Ocean Salt Transport due to Gyre", - "comment": "From all advective mass transport processes, resolved and parameterized.", - "dimensions": "latitude basin time", - "out_name": "sltovgyre", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sltovovrt": { - "modeling_realm": "ocean", - "standard_name": "northward_ocean_salt_transport_due_to_overturning", - "units": "kg s-1", - "cell_methods": "longitude: mean time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Ocean Salt Transport due to Overturning", - "comment": "From all advective mass transport processes, resolved and parameterized.", - "dimensions": "latitude basin time", - "out_name": "sltovovrt", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "so": { - "modeling_realm": "ocean", - "standard_name": "sea_water_salinity", - "units": "0.001", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sea Water Salinity", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "so", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sob": { - "modeling_realm": "ocean", - "standard_name": "sea_water_salinity_at_sea_floor", - "units": "0.001", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "sea water salinity at sea floor", - "comment": "Model prognostic salinity at bottom-most model grid cell", - "dimensions": "longitude latitude time", - "out_name": "sob", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "soga": { - "modeling_realm": "ocean", - "standard_name": "sea_water_salinity", - "units": "0.001", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "", - "long_name": "Global Mean Sea Water Salinity", - "comment": "", - "dimensions": "time", - "out_name": "soga", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sos": { - "modeling_realm": "ocean", - "standard_name": "sea_surface_salinity", - "units": "0.001", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Surface Salinity", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "sos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sosga": { - "modeling_realm": "ocean", - "standard_name": "sea_surface_salinity", - "units": "0.001", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "", - "long_name": "Global Average Sea Surface Salinity", - "comment": "", - "dimensions": "time", - "out_name": "sosga", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sossq": { - "modeling_realm": "ocean", - "standard_name": "square_of_sea_surface_salinity", - "units": "1e-06", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Square of Sea Surface Salinity", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "sossq", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "spco2": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_partial_pressure_of_carbon_dioxide_in_sea_water", - "units": "Pa", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Aqueous Partial Pressure of CO2", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "spco2", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "spco2abio": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_partial_pressure_of_carbon_dioxide_in_sea_water_due_to_abiotic_component", - "units": "Pa", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Abiotic Surface Aqueous Partial Pressure of CO2", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "spco2abio", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "spco2nat": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_partial_pressure_of_carbon_dioxide_in_sea_water_due_to_natural_component", - "units": "Pa", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Natural Surface Aqueous Partial Pressure of CO2", - "comment": "", - "dimensions": "longitude latitude time depth0m", - "out_name": "spco2nat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "talk": { - "modeling_realm": "ocnBgChem", - "standard_name": "sea_water_alkalinity_expressed_as_mole_equivalent", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Total Alkalinity", - "comment": "total alkalinity equivalent concentration (including carbonate, nitrogen, silicate, and borate components)", - "dimensions": "longitude latitude time depth0m", - "out_name": "talk", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "talknat": { - "modeling_realm": "ocnBgChem", - "standard_name": "seawater_alkalinity_expressed_as_mole_equivalent_natural_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Natural Total Alkalinity", - "comment": "total alkalinity equivalent concentration (including carbonate, borate, phosphorus, silicon, and nitrogen components) at preindustrial atmospheric xCO2", - "dimensions": "longitude latitude time depth0m", - "out_name": "talknat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "talknatos": { - "modeling_realm": "ocnBgChem", - "standard_name": "sea_water_alkalinity_expressed_as_mole_equivalent_due_to_natural_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Natural Total Alkalinity", - "comment": "total alkalinity equivalent concentration (including carbonate, borate, phosphorus, silicon, and nitrogen components) at preindustrial atmospheric xCO2", - "dimensions": "longitude latitude time", - "out_name": "talknatos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "talkos": { - "modeling_realm": "ocnBgChem", - "standard_name": "sea_water_alkalinity_expressed_as_mole_equivalent", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Total Alkalinity", - "comment": "total alkalinity equivalent concentration (including carbonate, borate, phosphorus, silicon, and nitrogen components)", - "dimensions": "longitude latitude time", - "out_name": "talkos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tauucorr": { - "modeling_realm": "ocean", - "standard_name": "surface_downward_x_stress_correction", - "units": "N m-2", - "cell_methods": "time: mean", - "cell_measures": "--OPT", - "long_name": "Surface Downward X Stress Correction", - "comment": "This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.", - "dimensions": "longitude latitude time", - "out_name": "tauucorr", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tauuo": { - "modeling_realm": "ocean", - "standard_name": "surface_downward_x_stress", - "units": "N m-2", - "cell_methods": "time: mean", - "cell_measures": "--OPT", - "long_name": "Surface Downward X Stress", - "comment": "This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.", - "dimensions": "longitude latitude time", - "out_name": "tauuo", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tauvcorr": { - "modeling_realm": "ocean", - "standard_name": "surface_downward_y_stress_correction", - "units": "N m-2", - "cell_methods": "time: mean", - "cell_measures": "--OPT", - "long_name": "Surface Downward Y Stress Correction", - "comment": "This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.", - "dimensions": "longitude latitude time", - "out_name": "tauvcorr", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tauvo": { - "modeling_realm": "ocean", - "standard_name": "surface_downward_y_stress", - "units": "N m-2", - "cell_methods": "time: mean", - "cell_measures": "--OPT", - "long_name": "Surface Downward Y Stress", - "comment": "This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.", - "dimensions": "longitude latitude time", - "out_name": "tauvo", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "thetao": { - "modeling_realm": "ocean", - "standard_name": "sea_water_potential_temperature", - "units": "degC", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sea Water Potential Temperature", - "comment": "Diagnostic should be contributed even for models using conservative temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "thetao", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "thetaoga": { - "modeling_realm": "ocean", - "standard_name": "sea_water_potential_temperature", - "units": "degC", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "", - "long_name": "Global Average Sea Water Potential Temperature", - "comment": "Diagnostic should be contributed even for models using conservative temperature as prognostic field", - "dimensions": "time", - "out_name": "thetaoga", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "thkcello": { - "modeling_realm": "ocean", - "standard_name": "cell_thickness", - "units": "m", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Ocean Model Cell Thickness", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "thkcello", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tob": { - "modeling_realm": "ocean", - "standard_name": "sea_water_potential_temperature_at_sea_floor", - "units": "degC", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Water Potential Temperature at Sea Floor", - "comment": "Potential temperature at the ocean bottom-most grid cell.", - "dimensions": "longitude latitude time", - "out_name": "tob", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tos": { - "modeling_realm": "ocean", - "standard_name": "sea_surface_temperature", - "units": "K", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Surface Temperature", - "comment": "temperature of liquid ocean. Note that the correct standard_name for this variable is 'sea_surface_temperature', not 'surface_temperature', but this was discovered too late to correct. To maintain consistency across CMIP5 models, the wrong standard_name will continue to be used.", - "dimensions": "longitude latitude time", - "out_name": "tos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tosga": { - "modeling_realm": "ocean", - "standard_name": "sea_surface_temperature", - "units": "degC", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "", - "long_name": "Global Average Sea Surface Temperature", - "comment": "This may differ from 'surface temperature' in regions of sea ice.This may differ from 'surface temperature' in regions of sea ice.For models using conservative temperature as prognostic field, they should report the SST as surface potent", - "dimensions": "time", - "out_name": "tosga", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tossq": { - "modeling_realm": "ocean", - "standard_name": "square_of_sea_surface_temperature", - "units": "degC2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Square of Sea Surface Temperature", - "comment": "square of temperature of liquid ocean, averaged over the day.", - "dimensions": "longitude latitude time", - "out_name": "tossq", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "umo": { - "modeling_realm": "ocean", - "standard_name": "ocean_mass_x_transport", - "units": "kg s-1", - "cell_methods": "time: mean", - "cell_measures": "--OPT", - "long_name": "Ocean Mass X Transport", - "comment": "X-ward mass transport from resolved and parameterized advective transport.", - "dimensions": "longitude latitude olevel time", - "out_name": "umo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "uo": { - "modeling_realm": "ocean", - "standard_name": "sea_water_x_velocity", - "units": "m s-1", - "cell_methods": "time: mean", - "cell_measures": "--OPT", - "long_name": "Sea Water X Velocity", - "comment": "Prognostic x-ward velocity component resolved by the model.", - "dimensions": "longitude latitude olevel time", - "out_name": "uo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vmo": { - "modeling_realm": "ocean", - "standard_name": "ocean_mass_y_transport", - "units": "kg s-1", - "cell_methods": "time: mean", - "cell_measures": "--OPT", - "long_name": "Ocean Mass Y Transport", - "comment": "Y-ward mass transport from resolved and parameterized advective transport.", - "dimensions": "longitude latitude olevel time", - "out_name": "vmo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vo": { - "modeling_realm": "ocean", - "standard_name": "sea_water_y_velocity", - "units": "m s-1", - "cell_methods": "time: mean", - "cell_measures": "--OPT", - "long_name": "Sea Water Y Velocity", - "comment": "Prognostic x-ward velocity component resolved by the model.", - "dimensions": "longitude latitude olevel time", - "out_name": "vo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "volo": { - "modeling_realm": "ocean", - "standard_name": "sea_water_volume", - "units": "m3", - "cell_methods": "area: sum where sea time: mean", - "cell_measures": "", - "long_name": "Sea Water Volume", - "comment": "Total volume of liquid seawater.", - "dimensions": "time", - "out_name": "volo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vsf": { - "modeling_realm": "ocean", - "standard_name": "virtual_salt_flux_into_sea_water", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Virtual Salt Flux into Sea Water", - "comment": "It is set to zero in models which receive a real water flux.", - "dimensions": "longitude latitude time", - "out_name": "vsf", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vsfcorr": { - "modeling_realm": "ocean", - "standard_name": "virtual_salt_flux_correction", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Virtual Salt Flux Correction", - "comment": "It is set to zero in models which receive a real water flux.", - "dimensions": "longitude latitude time", - "out_name": "vsfcorr", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vsfevap": { - "modeling_realm": "ocean", - "standard_name": "virtual_salt_flux_into_sea_water_due_to_evaporation", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Virtual Salt Flux into Sea Water due to Evaporation", - "comment": "zero for models using real water fluxes.", - "dimensions": "longitude latitude time", - "out_name": "vsfevap", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vsfpr": { - "modeling_realm": "ocean", - "standard_name": "virtual_salt_flux_into_sea_water_due_to_rainfall", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Virtual Salt Flux into Sea Water due to Rainfall", - "comment": "zero for models using real water fluxes.", - "dimensions": "longitude latitude time", - "out_name": "vsfpr", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vsfriver": { - "modeling_realm": "ocean", - "standard_name": "virtual_salt_flux_into_sea_water_from_rivers", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Virtual Salt Flux into Sea Water From Rivers", - "comment": "zero for models using real water fluxes.", - "dimensions": "longitude latitude time", - "out_name": "vsfriver", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vsfsit": { - "modeling_realm": "ocean seaIce", - "standard_name": "virtual_salt_flux_into_sea_water_due_to_sea_ice_thermodynamics", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Virtual Salt Flux into Sea Water due to Sea Ice Thermodynamics", - "comment": "This variable measures the virtual salt flux into sea water due to the melting of sea ice. It is set to zero in models which receive a real water flux.", - "dimensions": "longitude latitude time", - "out_name": "vsfsit", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wfcorr": { - "modeling_realm": "ocean", - "standard_name": "water_flux_correction", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Water Flux Correction", - "comment": "Positive flux implies correction adds water to ocean.", - "dimensions": "longitude latitude time", - "out_name": "wfcorr", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wfo": { - "modeling_realm": "ocean", - "standard_name": "water_flux_into_sea_water", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Water Flux into Sea Water", - "comment": "computed as the water flux into the ocean divided by the area of the ocean portion of the grid cell. This is the sum of the next two variables in this table.", - "dimensions": "longitude latitude time", - "out_name": "wfo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wfonocorr": { - "modeling_realm": "ocean", - "standard_name": "water_flux_into_sea_water_without_flux_correction", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Water Flux into Sea Water Without Flux Correction", - "comment": "computed as the water flux (without flux correction) into the ocean divided by the area of the ocean portion of the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "wfonocorr", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wmo": { - "modeling_realm": "ocean", - "standard_name": "upward_ocean_mass_transport", - "units": "kg s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Upward Ocean Mass Transport", - "comment": "Upward mass transport from resolved and parameterized advective transport.", - "dimensions": "longitude latitude olevel time", - "out_name": "wmo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wo": { - "modeling_realm": "ocean", - "standard_name": "upward_sea_water_velocity", - "units": "m s-1", - "cell_methods": "time: mean", - "cell_measures": "--OPT", - "long_name": "Sea Water Z Velocity", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "wo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zfullo": { - "modeling_realm": "ocean", - "standard_name": "depth_below_geoid", - "units": "m", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Depth Below Geoid of Ocean Layer", - "comment": "Depth below geoid", - "dimensions": "longitude latitude olevel time", - "out_name": "zfullo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zhalfo": { - "modeling_realm": "ocean", - "standard_name": "depth_below_geoid", - "units": "m", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Depth Below Geoid of Interfaces Between Ocean Layers", - "comment": "Depth below geoid", - "dimensions": "longitude latitude olevel time", - "out_name": "zhalfo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zmeso": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_mesozooplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Mesozooplankton expressed as Carbon in sea water", - "comment": "carbon concentration from mesozooplankton (20-200 um) component alone", - "dimensions": "longitude latitude time depth0m", - "out_name": "zmeso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zmesoos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_mesozooplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Mesozooplankton expressed as Carbon in Sea Water", - "comment": "carbon concentration from mesozooplankton (20-200 um) component alone", - "dimensions": "longitude latitude time", - "out_name": "zmesoos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zmicro": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_microzooplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concentration of Microzooplankton expressed as Carbon in sea water", - "comment": "carbon concentration from the microzooplankton (<20 um) component alone", - "dimensions": "longitude latitude time depth0m", - "out_name": "zmicro", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zmicroos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_microzooplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentration of Microzooplankton expressed as Carbon in Sea Water", - "comment": "carbon concentration from the microzooplankton (<20 um) component alone", - "dimensions": "longitude latitude time", - "out_name": "zmicroos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zmisc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_miscellaneous_zooplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Mole Concetration of Other Zooplankton expressed as Carbon in sea water", - "comment": "carbon from additional zooplankton component concentrations alone (e.g. Micro, meso). Since the models all have different numbers of components, this variable has been included to provide a check for intercomparison between models since some phytoplankton groups are supersets.", - "dimensions": "longitude latitude time depth0m", - "out_name": "zmisc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zmiscos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_miscellaneous_zooplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Mole Concentraiton of Other Zooplankton expressed as Carbon in sea water", - "comment": "carbon from additional zooplankton component concentrations alone (e.g. Micro, meso). Since the models all have different numbers of components, this variable has been included to provide a check for intercomparison between models since some phytoplankton groups are supersets.", - "dimensions": "longitude latitude time", - "out_name": "zmiscos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zo2min": { - "modeling_realm": "ocnBgChem", - "standard_name": "depth_at_shallowest_local_minimum_in_vertical_profile_of_mole_concentration_of_dissolved_molecular_oxygen_in_sea_water", - "units": "m", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Depth of Oxygen Minimum Concentration", - "comment": "Depth of vertical minimum concentration of dissolved oxygen gas (if two, then the shallower)", - "dimensions": "longitude latitude time", - "out_name": "zo2min", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zooc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_zooplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Zooplankton Carbon Concentration", - "comment": "sum of zooplankton carbon component concentrations", - "dimensions": "longitude latitude time depth0m", - "out_name": "zooc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zoocos": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_mole_concentration_of_zooplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Zooplankton Carbon Concentration", - "comment": "sum of zooplankton carbon component concentrations", - "dimensions": "longitude latitude time", - "out_name": "zoocos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zos": { - "modeling_realm": "ocean", - "standard_name": "sea_surface_height_above_geoid", - "units": "m", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Surface Height Above Geoid", - "comment": "This is the dynamic sea level, so should have zero global area mean. It should not include inverse barometer depressions from sea ice.", - "dimensions": "longitude latitude time", - "out_name": "zos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zossq": { - "modeling_realm": "ocean", - "standard_name": "square_of_sea_surface_height_above_geoid", - "units": "m2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Square of Sea Surface Height Above Geoid", - "comment": "Surface ocean geoid defines z=0.", - "dimensions": "longitude latitude time", - "out_name": "zossq", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zostoga": { - "modeling_realm": "ocean", - "standard_name": "global_average_thermosteric_sea_level_change", - "units": "m", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "", - "long_name": "Global Average Thermosteric Sea Level Change", - "comment": "There is no CMIP6 request for zosga nor zossga.", - "dimensions": "time", - "out_name": "zostoga", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zsatarag": { - "modeling_realm": "ocnBgChem", - "standard_name": "minimum_depth_of_aragonite_undersaturation_in_sea_water", - "units": "m", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Aragonite Saturation Depth", - "comment": "Depth of aragonite saturation horizon (0 if undersaturated at all depths, 'missing' if supersaturated at all depths; if multiple horizons exist, the shallowest should be taken).", - "dimensions": "longitude latitude time", - "out_name": "zsatarag", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zsatcalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "minimum_depth_of_calcite_undersaturation_in_sea_water", - "units": "m", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Calcite Saturation Depth", - "comment": "Depth of calcite saturation horizon (0 if undersaturated at all depths, and missing saturated through whole depth; if two or more horizons exist, then the shallowest is reported)", - "dimensions": "longitude latitude time", - "out_name": "zsatcalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Oyr.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Oyr.json deleted file mode 100644 index 90e6de9fd0..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_Oyr.json +++ /dev/null @@ -1,2143 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table Oyr", - "realm": "ocnBgChem", - "frequency": "yr", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "365.00000", - "generic_levels": "olevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "arag": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_aragonite_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Aragonite Concentration", - "comment": "Sum of particulate aragonite components (e.g. Phytoplankton, Detrital, etc.)", - "dimensions": "longitude latitude olevel time", - "out_name": "arag", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bacc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_bacteria_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Bacterial Carbon Concentration", - "comment": "Sum of bacterial carbon component concentrations", - "dimensions": "longitude latitude olevel time", - "out_name": "bacc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bddtalk": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_sea_water_alkalinity_expressed_as_mole_equivalent_due_to_biological_processes", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Alkalinity due to Biological Activity", - "comment": "Net total of biological terms in time rate of change of alkalinity", - "dimensions": "longitude latitude olevel time", - "out_name": "bddtalk", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bddtdic": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_dissolved_inorganic_carbon_in_sea_water_due_to_biological_processes", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Dissolved Inorganic Carbon due to Biological Activity", - "comment": "Net total of biological terms in time rate of change of dissolved inorganic carbon", - "dimensions": "longitude latitude olevel time", - "out_name": "bddtdic", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bddtdife": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_dissolved_inorganic_iron_in_sea_water_due_to_biological_processes", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Dissolved Inorganic Iron due to Biological Activity", - "comment": "Net total of biological terms in time rate of change of dissolved inorganic iron", - "dimensions": "longitude latitude olevel time", - "out_name": "bddtdife", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bddtdin": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_dissolved_inorganic_nitrogen_in_sea_water_due_to_biological_processes", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Nitrogen Nutrient due to Biological Activity", - "comment": "Net total of biological terms in time rate of change of nitrogen nutrients (e.g. NO3+NH4)", - "dimensions": "longitude latitude olevel time", - "out_name": "bddtdin", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bddtdip": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water_due_to_biological_processes", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Dissolved phosphorus due to Biological Activity", - "comment": "Net of biological terms in time rate of change of dissolved phosphate", - "dimensions": "longitude latitude olevel time", - "out_name": "bddtdip", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bddtdisi": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_dissolved_inorganic_silicon_in_sea_water_due_to_biological_processes", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Rate of Change of Dissolved Inorganic silicon due to Biological Activity", - "comment": "Net of biological terms in time rate of change of dissolved inorganic silicon", - "dimensions": "longitude latitude olevel time", - "out_name": "bddtdisi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bfe": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_particulate_organic_matter_expressed_as_iron_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Particulate Organic Matter expressed as Iron in sea water", - "comment": "Sum of particulate organic iron component concentrations", - "dimensions": "longitude latitude olevel time", - "out_name": "bfe", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "bsi": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_particulate_matter_expressed_as_silicon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Particulate Organic Matter expressed as silicon in sea water", - "comment": "Sum of particulate silica component concentrations", - "dimensions": "longitude latitude olevel time", - "out_name": "bsi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "calc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_calcite_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Calcite Concentration", - "comment": "Sum of particulate calcite component concentrations (e.g. Phytoplankton, Detrital, etc.)", - "dimensions": "longitude latitude olevel time", - "out_name": "calc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cfc11": { - "modeling_realm": "ocean", - "standard_name": "mole_concentration_of_cfc11_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of CFC-11 in sea water", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "cfc11", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "cfc12": { - "modeling_realm": "ocean", - "standard_name": "mole_concentration_of_cfc12_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of CFC-12 in sea water", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "cfc12", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chl": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mass Concentration of Total Chlorophyll in sea water", - "comment": "Sum of chlorophyll from all phytoplankton group concentrations. In most models this is equal to chldiat+chlmisc, that is the sum of Diatom Chlorophyll Mass Concentration and Other Phytoplankton Chlorophyll Mass Concentration", - "dimensions": "longitude latitude olevel time", - "out_name": "chl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chlcalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_calcareous_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mass Concentration of Calcareous Phytoplankton expressed as Chlorophyll in sea water", - "comment": "chlorophyll concentration from the calcite-producing phytoplankton component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "chlcalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chldiat": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_diatoms_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mass Concentration of Diatom expressed as Chlorophyll in sea water", - "comment": "Chlorophyll from diatom phytoplankton component concentration alone", - "dimensions": "longitude latitude olevel time", - "out_name": "chldiat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chldiaz": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_diazotrophs_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mass Concentration of Diazotrophs expressed as Chlorophyll in sea water", - "comment": "Chlorophyll concentration from the diazotrophic phytoplankton component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "chldiaz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chlmisc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_miscellaneous_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mass Concentration of Other Phytoplankton expressed as Chlorophyll in sea water", - "comment": "Chlorophyll from additional phytoplankton component concentrations alone", - "dimensions": "longitude latitude olevel time", - "out_name": "chlmisc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "chlpico": { - "modeling_realm": "ocnBgChem", - "standard_name": "mass_concentration_of_picophytoplankton_expressed_as_chlorophyll_in_sea_water", - "units": "kg m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mass Concentration of Picophytoplankton expressed as Chlorophyll in sea water", - "comment": "chlorophyll concentration from the picophytoplankton (<2 um) component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "chlpico", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co3": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_carbonate_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Carbonate ion Concentration", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "co3", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co3abio": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_carbonate_expressed_as_carbon_in_sea_water_due_to_abiotic_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Abiotic Carbonate ion Concentration", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "co3abio", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co3nat": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_carbonate_expressed_as_carbon_in_sea_water_due_to_natural_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Natural Carbonate ion Concentration", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "co3nat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co3satarag": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_aragonite_expressed_as_carbon_in_sea_water_at_saturation", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Carbonate ion Concentration for sea water in equilibrium with pure Aragonite", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "co3satarag", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "co3satcalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_calcite_expressed_as_carbon_in_sea_water_at_saturation", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Carbonate ion Concentration for sea water in equilibrium with pure Calcite", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "co3satcalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "darag": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_aragonite_expressed_as_carbon_in_sea_water_due_to_dissolution", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Aragonite Dissolution", - "comment": "Rate of change of Aragonite carbon mole concentration due to dissolution", - "dimensions": "longitude latitude olevel time", - "out_name": "darag", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dcalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_calcite_expressed_as_carbon_in_sea_water_due_to_dissolution", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Calcite Dissolution", - "comment": "Rate of change of Calcite carbon mole concentration due to dissolution", - "dimensions": "longitude latitude olevel time", - "out_name": "dcalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "detoc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_organic_detritus_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Detrital Organic Carbon Concentration", - "comment": "Sum of detrital organic carbon component concentrations", - "dimensions": "longitude latitude olevel time", - "out_name": "detoc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dfe": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_iron_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Dissolved Iron in sea water", - "comment": "Dissolved iron in sea water, including both Fe2+ and Fe3+ ions (but not particulate detrital iron)", - "dimensions": "longitude latitude olevel time", - "out_name": "dfe", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "difmxybo": { - "modeling_realm": "ocean", - "standard_name": "ocean_momentum_xy_biharmonic_diffusivity", - "units": "m4 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "ocean momentum xy biharmonic diffusivity", - "comment": "Lateral biharmonic viscosity applied to the momentum equitions.", - "dimensions": "longitude latitude olevel time", - "out_name": "difmxybo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "difmxylo": { - "modeling_realm": "ocean", - "standard_name": "ocean_momentum_xy_laplacian_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "ocean momentum xy laplacian diffusivity", - "comment": "Lateral Laplacian viscosity applied to the momentum equitions.", - "dimensions": "longitude latitude olevel time", - "out_name": "difmxylo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "diftrblo": { - "modeling_realm": "ocean", - "standard_name": "ocean_tracer_bolus_laplacian_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "ocean tracer diffusivity due to parameterized mesoscale advection", - "comment": "Ocean tracer diffusivity associated with parameterized eddy-induced advective transport. Sometimes this diffusivity is called the 'thickness' diffusivity. For CMIP5, this diagnostic was called 'ocean tracer bolus laplacian diffusivity'. The CMIP6 name is physically more relevant.", - "dimensions": "longitude latitude olevel time", - "out_name": "diftrblo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "diftrelo": { - "modeling_realm": "ocean", - "standard_name": "ocean_tracer_epineutral_laplacian_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "ocean tracer epineutral laplacian diffusivity", - "comment": "Ocean tracer diffusivity associated with parameterized eddy-induced diffusive transport oriented along neutral or isopycnal directions. Sometimes this diffusivity is called the neutral diffusivity or isopycnal diffusivity or Redi diffusivity.", - "dimensions": "longitude latitude olevel time", - "out_name": "diftrelo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "difvho": { - "modeling_realm": "ocean", - "standard_name": "ocean_vertical_heat_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "ocean vertical heat diffusivity", - "comment": "Vertical/dianeutral diffusivity applied to prognostic temperature field.", - "dimensions": "longitude latitude olevel time", - "out_name": "difvho", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "difvso": { - "modeling_realm": "ocean", - "standard_name": "ocean_vertical_salt_diffusivity", - "units": "m2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "ocean vertical salt diffusivity", - "comment": "Vertical/dianeutral diffusivity applied to prognostic salinity field.", - "dimensions": "longitude latitude olevel time", - "out_name": "difvso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dispkexyfo": { - "modeling_realm": "ocean", - "standard_name": "ocean_kinetic_energy_dissipation_per_unit_area_due_to_xy_friction", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "ocean kinetic energy dissipation per unit area due to xy friction", - "comment": "Depth integrated impacts on kinetic energy arising from lateral frictional dissipation associated with Laplacian and/or biharmonic viscosity. For CMIP5, this diagnostic was 3d, whereas the CMIP6 depth integrated diagnostic is sufficient for many purposes and reduces archive requirements.", - "dimensions": "longitude latitude time", - "out_name": "dispkexyfo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissi13c": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_carbon13_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Dissolved Inorganic 13Carbon Concentration", - "comment": "Dissolved inorganic 14carbon (CO3+HCO3+H2CO3) concentration", - "dimensions": "longitude latitude olevel time", - "out_name": "dissi13c", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissi14cabio": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_carbon14_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Abiotic Dissolved Inorganic 14Carbon Concentration", - "comment": "Abiotic Dissolved inorganic 14carbon (CO3+HCO3+H2CO3) concentration", - "dimensions": "longitude latitude olevel time", - "out_name": "dissi14cabio", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissic": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Dissolved Inorganic Carbon Concentration", - "comment": "Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration", - "dimensions": "longitude latitude olevel time", - "out_name": "dissic", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissicabio": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_carbon_in_sea_water_abiotic_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Abiotic Dissolved Inorganic Carbon Concentration", - "comment": "Abiotic Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration", - "dimensions": "longitude latitude olevel time", - "out_name": "dissicabio", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissicnat": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_carbon_in_sea_water_natural_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Natural Dissolved Inorganic Carbon Concentration", - "comment": "Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration at preindustrial atmospheric xCO2", - "dimensions": "longitude latitude olevel time", - "out_name": "dissicnat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dissoc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_organic_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Dissolved Organic Carbon Concentration", - "comment": "Sum of dissolved carbon component concentrations explicitly represented (i.e. not ~40 uM refractory unless explicit)", - "dimensions": "longitude latitude olevel time", - "out_name": "dissoc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dms": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_fraction_of_dimethyl_sulfide_in_air", - "units": "mol mol-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Dimethyl Sulphide in sea water", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "dms", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dpocdtcalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_calcareous_phytoplankton", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Tendency of Mole Concentration of Organic Carbon in sea water due to Net Primary Production by Calcareous Phytoplankton", - "comment": "Primary (organic carbon) production by the calcite-producing phytoplankton component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "dpocdtcalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dpocdtdiaz": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophs", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Tendency of Mole Concentration of Organic Carbon in sea water due to Net Primary Production by Diazotrophs", - "comment": "Primary (organic carbon) production by the diazotrophic phytoplankton component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "dpocdtdiaz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "dpocdtpico": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_picophytoplankton", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Tendency of Mole Concentration of Organic Carbon in sea water due to Net Primary Production by Picophytoplankton", - "comment": "Primary (organic carbon) production by the picophytoplankton (<2 um) component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "dpocdtpico", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "exparag": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_aragonite_expressed_as_carbon_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sinking Aragonite Flux", - "comment": "Downward flux of Aragonite", - "dimensions": "longitude latitude olevel time", - "out_name": "exparag", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "expc": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_particulate_organic_matter_expressed_as_carbon_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sinking Particulate Organic Carbon Flux", - "comment": "Downward flux of particulate organic carbon", - "dimensions": "longitude latitude olevel time", - "out_name": "expc", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "expcalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_calcite_expressed_as_carbon_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sinking Calcite Flux", - "comment": "Downward flux of Calcite", - "dimensions": "longitude latitude olevel time", - "out_name": "expcalc", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "expfe": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_particulate_iron_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sinking Particulate Iron Flux", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "expfe", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "expn": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_particulate_organic_nitrogen_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sinking Particulate Organic Nitrogen Flux", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "expn", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "expp": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_particulate_organic_phosphorus_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sinking Particulate Organic Phosphorus Flux", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "expp", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "expsi": { - "modeling_realm": "ocnBgChem", - "standard_name": "sinking_mole_flux_of_particulate_silicon_in_sea_water", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Sinking Particulate Silica Flux", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "expsi", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fediss": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_dissolved_iron_in_sea_water_due_to_dissolution_from_inorganic_particles", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Particle Source of Dissolved Iron", - "comment": "Dissolution, remineralization and desorption of iron back to the dissolved phase", - "dimensions": "longitude latitude olevel time", - "out_name": "fediss", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fescav": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_dissolved_iron_in_sea_water_due_to_scavenging_by_inorganic_particles", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Nonbiogenic Iron Scavenging", - "comment": "Dissolved Fe removed through nonbiogenic scavenging onto particles", - "dimensions": "longitude latitude olevel time", - "out_name": "fescav", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fg13co2": { - "modeling_realm": "ocnBgChem", - "standard_name": "air_sea_flux_of_13CO2", - "units": "mol m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Flux of Abiotic 13CO2", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "fg13co2", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fg14co2abio": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_downward_mass_flux_of_abiotic_14_carbon_dioxide_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Flux of Abiotic 14CO2", - "comment": "Gas exchange flux of abiotic 14CO2 (positive into ocean)", - "dimensions": "longitude latitude time", - "out_name": "fg14co2abio", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fgco2": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Flux of Total CO2", - "comment": "Gas exchange flux of CO2 (positive into ocean)", - "dimensions": "longitude latitude time", - "out_name": "fgco2", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fgco2abio": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_downward_mass_flux_of_abiotic_carbon_dioxide_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Flux of Abiotic CO2", - "comment": "Gas exchange flux of abiotic CO2 (positive into ocean)", - "dimensions": "longitude latitude time", - "out_name": "fgco2abio", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "fgco2nat": { - "modeling_realm": "ocnBgChem", - "standard_name": "surface_downward_mass_flux_of_natural_carbon_dioxide_expressed_as_carbon", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Surface Downward Flux of Natural CO2", - "comment": "Gas exchange flux of natural CO2 (positive into ocean)", - "dimensions": "longitude latitude time", - "out_name": "fgco2nat", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "graz": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_grazing_of_phytoplankton", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Total Grazing of Phytoplankton by Zooplankton", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "graz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "nh4": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_ammonium_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Dissolved Ammonium Concentration", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "nh4", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "no3": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_nitrate_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Dissolved Nitrate Concentration", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "no3", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o2": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_molecular_oxygen_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Dissolved Oxygen Concentration", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "o2", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "o2sat": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_molecular_oxygen_in_sea_water_at_saturation", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Dissolved Oxygen Concentration at Saturation", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "o2sat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ocontempdiff": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water conservative temperature expressed as heat content due to parameterized dianeutral mixing", - "comment": "Tendency of heat content for a grid cell from parameterized dianeutral mixing. Reported only for models that use conservative temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "ocontempdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ocontempmint": { - "modeling_realm": "ocean", - "standard_name": "integral_wrt_depth_of_product_of_sea_water_density_and_conservative_temperature", - "units": "degC kg m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "integral wrt depth of product of sea water density and conservative temperature", - "comment": "Full column sum of density*cell thickness*conservative temperature. If the model is Boussinesq, then use Boussinesq reference density for the density factor.", - "dimensions": "longitude latitude time", - "out_name": "ocontempmint", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ocontemppadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_eddy_advection", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water conservative temperature expressed as heat content due to parameterized eddy advection", - "comment": "Tendency of heat content for a grid cell from parameterized eddy advection (any form of eddy advection). Reported only for models that use conservative temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "ocontemppadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ocontemppmdiff": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_mesoscale_diffusion", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water conservative temperature expressed as heat content due to parameterized mesoscale diffusion", - "comment": "Tendency of heat content for a grid cell from parameterized mesoscale eddy diffusion. Reported only for models that use conservative temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "ocontemppmdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ocontemppsmadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_submesoscale_advection", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water conservative temperature expressed as heat content due to parameterized submesoscale advection", - "comment": "Tendency of heat content for a grid cell from parameterized submesoscale eddy advection. Reported only for models that use conservative temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "ocontemppsmadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ocontemprmadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_residual_mean_advection", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water conservative temperature expressed as heat content due to residual mean (sum of Eulerian + parameterized) advection", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "ocontemprmadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ocontemptend": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_conservative_temperature_expressed_as_heat_content", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water conservative temperature expressed as heat content", - "comment": "Tendency of heat content for a grid cell from all processes. Reported only for models that use conservative temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "ocontemptend", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "opottempdiff": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water potential temperature expressed as heat content due to parameterized dianeutral mixing", - "comment": "Tendency of heat content for a grid cell from parameterized dianeutral mixing. Reported only for models that use potential temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "opottempdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "opottempmint": { - "modeling_realm": "ocean", - "standard_name": "integral_wrt_depth_of_product_of_sea_water_density_and_potential_temperature", - "units": "degC kg m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "integral wrt depth of product of sea water density and potential temperature", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "opottempmint", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "opottemppadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_eddy_advection", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water potential temperature expressed as heat content due to parameterized eddy advection", - "comment": "Tendency of heat content for a grid cell from parameterized eddy advection (any form of eddy advection). Reported only for models that use potential temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "opottemppadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "opottemppmdiff": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_mesoscale_diffusion", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water potential temperature expressed as heat content due to parameterized mesoscale diffusion", - "comment": "Tendency of heat content for a grid cell from parameterized mesoscale eddy diffusion. Reported only for models that use potential temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "opottemppmdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "opottemppsmadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_submesoscale_advection", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water potential temperature expressed as heat content due to parameterized submesoscale advection", - "comment": "Tendency of heat content for a grid cell from parameterized submesoscale eddy advection. Reported only for models that use potential temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "opottemppsmadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "opottemprmadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_residual_mean_advection", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water potential temperature expressed as heat content due to residual mean advection", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "opottemprmadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "opottemptend": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_potential_temperature_expressed_as_heat_content", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water potential temperature expressed as heat content", - "comment": "Tendency of heat content for a grid cell from all processes. Reported only for models that use potential temperature as prognostic field.", - "dimensions": "longitude latitude olevel time", - "out_name": "opottemptend", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "osaltdiff": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_dianeutral_mixing", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water salinity expressed as salt content due to parameterized dianeutral mixing", - "comment": "Tendency of salt content for a grid cell from parameterized dianeutral mixing.", - "dimensions": "longitude latitude olevel time", - "out_name": "osaltdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "osaltpadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_eddy_advection", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water salinity expressed as salt content due to parameterized eddy advection", - "comment": "Tendency of salt content for a grid cell from parameterized eddy advection (any form of eddy advection).", - "dimensions": "longitude latitude olevel time", - "out_name": "osaltpadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "osaltpmdiff": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_mesoscale_diffusion", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water salinity expressed as salt content due to parameterized mesoscale diffusion", - "comment": "Tendency of salt content for a grid cell from parameterized mesoscale eddy diffusion.", - "dimensions": "longitude latitude olevel time", - "out_name": "osaltpmdiff", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "osaltpsmadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_submesoscale_advection", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water salinity expressed as salt content due to parameterized submesoscale advection", - "comment": "Tendency of salt content for a grid cell from parameterized submesoscale eddy advection.", - "dimensions": "longitude latitude olevel time", - "out_name": "osaltpsmadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "osaltrmadvect": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_residual_mean_advection", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water salinity expressed as salt content due to residual mean advection", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "osaltrmadvect", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "osalttend": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_sea_water_salinity_expressed_as_salt_content", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "tendency of sea water salinity expressed as salt content", - "comment": "Tendency of salt content for a grid cell from all processes.", - "dimensions": "longitude latitude olevel time", - "out_name": "osalttend", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "parag": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_aragonite_expressed_as_carbon_in_sea_water_due_to_biological_production", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Aragonite Production", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "parag", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pbfe": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_iron_in_sea_water_due_to_biological_production", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Biogenic Iron Production", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "pbfe", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pbsi": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_silicon_in_sea_water_due_to_biological_production", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Biogenic Silica Production", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "pbsi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pcalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_calcite_expressed_as_carbon_in_sea_water_due_to_biological_production", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Calcite Production", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "pcalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pdi": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diatoms", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Diatom Primary Carbon Production", - "comment": "Primary (organic carbon) production by the diatom component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "pdi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ph": { - "modeling_realm": "ocnBgChem", - "standard_name": "sea_water_ph_reported_on_total_scale", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "pH", - "comment": "negative log of hydrogen ion concentration with the concentration expressed as mol H kg-1.", - "dimensions": "longitude latitude olevel time", - "out_name": "ph", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phabio": { - "modeling_realm": "ocnBgChem", - "standard_name": "sea_water_ph_reported_on_total_scale_due_to_abiotic_component", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Abiotic pH", - "comment": "negative log10 of hydrogen ion concentration with the concentration expressed as mol H kg-1 (abiotic component)..", - "dimensions": "longitude latitude olevel time", - "out_name": "phabio", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phnat": { - "modeling_realm": "ocnBgChem", - "standard_name": "sea_water_ph_reported_on_total_scale_due_to_natural_component", - "units": "1.0", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Natural pH", - "comment": "negative log10 of hydrogen ion concentration with the concentration expressed as mol H kg-1.", - "dimensions": "longitude latitude olevel time", - "out_name": "phnat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phyc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_phytoplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Phytoplankton Carbon Concentration", - "comment": "sum of phytoplankton carbon component concentrations. In most (all?) cases this is the sum of phycdiat and phycmisc (i.e., 'Diatom Carbon Concentration' and 'Non-Diatom Phytoplankton Carbon Concentration'", - "dimensions": "longitude latitude olevel time", - "out_name": "phyc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phycalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_calcareous_phytoplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Calcareous Phytoplankton expressed as Carbon in sea water", - "comment": "carbon concentration from calcareous (calcite-producing) phytoplankton component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "phycalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phydiat": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_diatoms_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Diatoms expressed as Carbon in sea water", - "comment": "carbon from the diatom phytoplankton component concentration alone", - "dimensions": "longitude latitude olevel time", - "out_name": "phydiat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phydiaz": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_diazotrophs_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Diazotrophs Expressed as Carbon in sea water", - "comment": "carbon concentration from the diazotrophic phytoplankton component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "phydiaz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phyfe": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_phytoplankton_expressed_as_iron_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Total Phytoplankton expressed as Iron in sea water", - "comment": "sum of phytoplankton iron component concentrations", - "dimensions": "longitude latitude olevel time", - "out_name": "phyfe", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phymisc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_miscellaneous_phytoplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Miscellaneous Phytoplankton expressed as Carbon in sea water", - "comment": "carbon concentration from additional phytoplankton component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "phymisc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phyn": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Total Phytoplankton expressed as Nitrogen in sea water", - "comment": "sum of phytoplankton nitrogen component concentrations", - "dimensions": "longitude latitude olevel time", - "out_name": "phyn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phyp": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_phytoplankton_expressed_as_phosphorus_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Total Phytoplankton expressed as Phosphorus in sea water", - "comment": "sum of phytoplankton phosphorus components", - "dimensions": "longitude latitude olevel time", - "out_name": "phyp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "phypico": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_picophytoplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Picophytoplankton expressed as Carbon in sea water", - "comment": "carbon concentration from the picophytoplankton (<2 um) component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "phypico", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "physi": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_phytoplankton_expressed_as_silicon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Total Phytoplankton expressed as Silicon in sea water", - "comment": "sum of phytoplankton silica component concentrations", - "dimensions": "longitude latitude olevel time", - "out_name": "physi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pnitrate": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_nitrate_utilization", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Primary Carbon Production by Phytoplankton due to Nitrate Uptake Alone", - "comment": "Primary (organic carbon) production by phytoplankton due to nitrate uptake alone", - "dimensions": "longitude latitude olevel time", - "out_name": "pnitrate", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "po4": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Total Dissolved Inorganic Phosphorus Concentration", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "po4", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pon": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_particulate_organic_matter_expressed_as_nitrogen_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Particulate Organic Matter expressed as Nitrogen in sea water", - "comment": "sum of particulate organic nitrogen component concentrations", - "dimensions": "longitude latitude olevel time", - "out_name": "pon", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pop": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_particulate_organic_matter_expressed_as_phosphorus_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Particulate Organic Matter expressed as Phosphorus in sea water", - "comment": "sum of particulate organic phosphorus component concentrations", - "dimensions": "longitude latitude olevel time", - "out_name": "pop", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pp": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Primary Carbon Production by Total Phytoplankton", - "comment": "total primary (organic carbon) production by phytoplankton", - "dimensions": "longitude latitude olevel time", - "out_name": "pp", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ppcalc": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_calcareous_phytoplankton", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Net Primary Mole Productivity of Carbon by Calcareous Phytoplankton", - "comment": "Primary (organic carbon) production by the calcite-producing phytoplankton component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "ppcalc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ppdiat": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diatoms", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Net Primary Organic Carbon Production by Diatoms", - "comment": "Primary (organic carbon) production by the diatom component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "ppdiat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ppdiaz": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophs", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Net Primary Mole Productivity of Carbon by Diazotrophs", - "comment": "Primary (organic carbon) production by the diazotrophic phytoplankton component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "ppdiaz", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ppmisc": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_miscellaneous_phytoplankton", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Net Primary Organic Carbon Production by Other Phytoplankton", - "comment": "Primary (organic carbon) production by other phytoplankton components alone", - "dimensions": "longitude latitude olevel time", - "out_name": "ppmisc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pppico": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_picophytoplankton", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Net Primary Mole Productivity of Carbon by Picophytoplankton", - "comment": "Primary (organic carbon) production by the picophytoplankton (<2 um) component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "pppico", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "remoc": { - "modeling_realm": "ocnBgChem", - "standard_name": "tendency_of_mole_concentration_of_organic_matter_expressed_as_carbon_in_sea_water_due_to_remineralization", - "units": "mol m-3 s-1", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Remineralization of Organic Carbon", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "remoc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsdoabsorb": { - "modeling_realm": "ocean", - "standard_name": "net_rate_of_absorption_of_shortwave_energy_in_ocean_layer", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "net rate of absorption of shortwave energy in ocean layer", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "rsdoabsorb", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sf6": { - "modeling_realm": "ocean", - "standard_name": "mole_concentration_of_sulfur_hexafluoride_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of SF6 in sea water", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "sf6", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "si": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_dissolved_inorganic_silicon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Total Dissolved Inorganic Silicon Concentration", - "comment": "", - "dimensions": "longitude latitude olevel time", - "out_name": "si", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "somint": { - "modeling_realm": "ocean", - "standard_name": "integral_wrt_depth_of_product_of_sea_water_density_and_prognostic_salinity", - "units": "1e-3 kg m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "integral wrt depth of product of sea water density and salinity", - "comment": "Full column sum of density*cell thickness*prognostic salinity. If the model is Boussinesq, then use Boussinesq reference density for the density factor.", - "dimensions": "longitude latitude time", - "out_name": "somint", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "talk": { - "modeling_realm": "ocnBgChem", - "standard_name": "sea_water_alkalinity_expressed_as_mole_equivalent", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Total Alkalinity", - "comment": "total alkalinity equivalent concentration (including carbonate, nitrogen, silicate, and borate components)", - "dimensions": "longitude latitude olevel time", - "out_name": "talk", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "talknat": { - "modeling_realm": "ocnBgChem", - "standard_name": "seawater_alkalinity_expressed_as_mole_equivalent_natural_component", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Natural Total Alkalinity", - "comment": "total alkalinity equivalent concentration (including carbonate, borate, phosphorus, silicon, and nitrogen components) at preindustrial atmospheric xCO2", - "dimensions": "longitude latitude olevel time", - "out_name": "talknat", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnkebto": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_ocean_eddy_kinetic_energy_content_due_to_bolus_transport", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "tendency of ocean eddy kinetic energy content due to parameterized eddy advection", - "comment": "Depth integrated impacts on kinetic energy arising from parameterized eddy-induced advection. For CMIP5, this diagnostic was 3d, whereas the CMIP6 depth integrated diagnostic is sufficient for many purposes and reduces archive requirements.", - "dimensions": "longitude latitude time", - "out_name": "tnkebto", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tnpeo": { - "modeling_realm": "ocean", - "standard_name": "tendency_of_ocean_potential_energy_content", - "units": "W m-2", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "tendency of ocean potential energy content", - "comment": "Rate that work is done against vertical stratification, as measured by the vertical heat and salt diffusivity. Report here as depth integrated two-dimensional field.", - "dimensions": "longitude latitude time", - "out_name": "tnpeo", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zmeso": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_mesozooplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Mesozooplankton expressed as Carbon in sea water", - "comment": "carbon concentration from mesozooplankton (20-200 um) component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "zmeso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zmicro": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_microzooplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concentration of Microzooplankton expressed as Carbon in sea water", - "comment": "carbon concentration from the microzooplankton (<20 um) component alone", - "dimensions": "longitude latitude olevel time", - "out_name": "zmicro", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zmisc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_miscellaneous_zooplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Mole Concetration of Other Zooplankton expressed as Carbon in sea water", - "comment": "carbon from additional zooplankton component concentrations alone (e.g. Micro, meso). Since the models all have different numbers of components, this variable has been included to provide a check for intercomparison between models since some phytoplankton groups are supersets.", - "dimensions": "longitude latitude olevel time", - "out_name": "zmisc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zooc": { - "modeling_realm": "ocnBgChem", - "standard_name": "mole_concentration_of_zooplankton_expressed_as_carbon_in_sea_water", - "units": "mol m-3", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello volume: volcello", - "long_name": "Zooplankton Carbon Concentration", - "comment": "sum of zooplankton carbon component concentrations", - "dimensions": "longitude latitude olevel time", - "out_name": "zooc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_SIday.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_SIday.json deleted file mode 100644 index 289befec99..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_SIday.json +++ /dev/null @@ -1,171 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table SIday", - "realm": "seaIce", - "frequency": "day", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "1.00000", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "siconc": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_area_fraction", - "units": "%", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Ice Area Fraction", - "comment": "Area fraction of grid cell covered by sea ice", - "dimensions": "longitude latitude time typesi", - "out_name": "siconc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siconco": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Sea Ice Area Fraction", - "comment": "Area fraction of grid cell covered by sea ice", - "dimensions": "longitude latitude time typesi", - "out_name": "siconco", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sisnthick": { - "modeling_realm": "seaIce", - "standard_name": "surface_snow_thickness", - "units": "m", - "cell_methods": "area: mean where snow over sea_ice area: time: mean where sea_ice", - "cell_measures": "area: areacella", - "long_name": "Snow thickness", - "comment": "Actual thickness of snow (snow volume divided by snow-covered area)", - "dimensions": "longitude latitude time", - "out_name": "sisnthick", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sispeed": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_speed", - "units": "m s-1", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Sea-ice speed", - "comment": "Speed of ice (i.e. mean absolute velocity) to account for back-and-forth movement of the ice", - "dimensions": "longitude latitude time", - "out_name": "sispeed", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sitemptop": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_surface_temperature", - "units": "K", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Surface temperature of sea ice", - "comment": "Report surface temperature of snow where snow covers the sea ice.", - "dimensions": "longitude latitude time", - "out_name": "sitemptop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sithick": { - "modeling_realm": "seaIce ocean", - "standard_name": "sea_ice_thickness", - "units": "m", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Sea Ice Thickness", - "comment": "Actual (floe) thickness of sea ice (NOT volume divided by grid area as was done in CMIP5)", - "dimensions": "longitude latitude time", - "out_name": "sithick", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sitimefrac": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_time_fraction", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Fraction of time steps with sea ice", - "comment": "Fraction of time steps of the averaging period during which sea ice is present (siconc >0 ) in a grid cell", - "dimensions": "longitude latitude time", - "out_name": "sitimefrac", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siu": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_x_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "--MODEL", - "long_name": "X-component of sea ice velocity", - "comment": "The x-velocity of ice on native model grid", - "dimensions": "longitude latitude time", - "out_name": "siu", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siv": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_y_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "--MODEL", - "long_name": "Y-component of sea ice velocity", - "comment": "The y-velocity of ice on native model grid", - "dimensions": "longitude latitude time", - "out_name": "siv", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_SImon.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_SImon.json deleted file mode 100644 index ecfcd3c635..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_SImon.json +++ /dev/null @@ -1,1548 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table SImon", - "realm": "seaIce", - "frequency": "mon", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "30.00000", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "siage": { - "modeling_realm": "seaIce", - "standard_name": "age_of_sea_ice", - "units": "s", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Age of sea ice", - "comment": "Age of sea ice", - "dimensions": "longitude latitude time", - "out_name": "siage", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sialb": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_albedo", - "units": "1.0", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Sea-ice or snow albedo", - "comment": "Mean surface albedo of entire ice-covered part of grid cell", - "dimensions": "longitude latitude time", - "out_name": "sialb", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siareaacrossline": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_area_transport_across_line", - "units": "m2 s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Sea ice area flux through straits", - "comment": "net (sum of transport in all directions) sea ice area transport through the following four passages, positive into the Arctic Ocean 1. Fram Strait = (11.5W,81.3N to (10.5E,79.6N) 2. Canadian Archipelego = (128.2W,70.6N) to (59.3W,82.1N) 3. Barents opening = (16.8E,76.5N) to (19.2E,70.2N) 4. Bering Strait = (171W,66.2N) to (166W,65N)", - "dimensions": "siline time", - "out_name": "siareaacrossline", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siarean": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_area", - "units": "1e6 km2", - "cell_methods": "area: time: mean", - "cell_measures": "", - "long_name": "Sea ice area North", - "comment": "total area of sea ice in the Northern hemisphere", - "dimensions": "time", - "out_name": "siarean", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siareas": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_area", - "units": "1e6 km2", - "cell_methods": "area: time: mean", - "cell_measures": "", - "long_name": "Sea ice area South", - "comment": "total area of sea ice in the Southern hemisphere", - "dimensions": "time", - "out_name": "siareas", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sicompstren": { - "modeling_realm": "seaIce", - "standard_name": "compressive_strength_of_sea_ice", - "units": "N m-1", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Compressive sea ice strength", - "comment": "Computed strength of the ice pack, defined as the energy (J m-2) dissipated per unit area removed from the ice pack under compression, and assumed proportional to the change in potential energy caused by ridging. For Hibler-type models, this is P (= P*hexp(-C(1-A)))", - "dimensions": "longitude latitude time", - "out_name": "sicompstren", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siconc": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_area_fraction", - "units": "%", - "cell_methods": "area: mean where sea time: mean", - "cell_measures": "area: areacello", - "long_name": "Sea Ice Area Fraction", - "comment": "Area fraction of grid cell covered by sea ice", - "dimensions": "longitude latitude time typesi", - "out_name": "siconc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siconco": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Sea Ice Area Fraction", - "comment": "Area fraction of grid cell covered by sea ice", - "dimensions": "longitude latitude time typesi", - "out_name": "siconco", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidconcdyn": { - "modeling_realm": "seaIce", - "standard_name": "tendency_of_sea_ice_area_fraction_due_to_dynamics", - "units": "s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "sea-ice area fraction change from dynamics", - "comment": "Total change in sea-ice area fraction through dynamics-related processes (advection, divergence...)", - "dimensions": "longitude latitude time", - "out_name": "sidconcdyn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidconcth": { - "modeling_realm": "seaIce", - "standard_name": "tendency_of_sea_ice_area_fraction_due_to_thermodynamics", - "units": "s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "sea-ice area fraction change from thermodynamics", - "comment": "Total change in sea-ice area fraction through thermodynamic processes", - "dimensions": "longitude latitude time", - "out_name": "sidconcth", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidivvel": { - "modeling_realm": "seaIce", - "standard_name": "divergence_of_sea_ice_velocity", - "units": "s-1", - "cell_methods": "area: mean where sea_ice (comment: mask=siconc) time: point", - "cell_measures": "area: areacella", - "long_name": "Divergence of the sea-ice velocity field", - "comment": "Divergence of sea-ice velocity field (first shear strain invariant)", - "dimensions": "longitude latitude time1", - "out_name": "sidivvel", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidmassdyn": { - "modeling_realm": "seaIce", - "standard_name": "tendency_of_sea_ice_amount_due_to_dynamics", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "sea-ice mass change from dynamics", - "comment": "Total change in sea-ice mass through dynamics-related processes (advection,...) divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "sidmassdyn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidmassevapsubl": { - "modeling_realm": "seaIce", - "standard_name": "water_evaporation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "sea-ice mass change through evaporation and sublimation", - "comment": "The rate of change of sea-ice mass change through evaporation and sublimation divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "sidmassevapsubl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidmassgrowthbot": { - "modeling_realm": "seaIce", - "standard_name": "tendency_of_sea_ice_amount_due_to_congelation_ice_accumulation", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "sea-ice mass change through basal growth", - "comment": "The rate of change of sea ice mass due to vertical growth of existing sea ice at its base divided by grid-cell area.", - "dimensions": "longitude latitude time", - "out_name": "sidmassgrowthbot", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidmassgrowthwat": { - "modeling_realm": "seaIce", - "standard_name": "tendency_of_sea_ice_amount_due_to_freezing_in_open_water", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "sea-ice mass change through growth in supercooled open water (aka frazil)", - "comment": "The rate of change of sea ice mass due to sea ice formation in supercooled water (often through frazil formation) divided by grid-cell area. Together, sidmassgrowthwat and sidmassgrowthbot should give total ice growth", - "dimensions": "longitude latitude time", - "out_name": "sidmassgrowthwat", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidmasslat": { - "modeling_realm": "seaIce", - "standard_name": "tendency_of_sea_ice_amount_due_to_lateral_melting", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Lateral sea ice melt rate", - "comment": "The rate of change of sea ice mass through lateral melting divided by grid-cell area (report 0 if not explicitly calculated thermodynamically)", - "dimensions": "longitude latitude time", - "out_name": "sidmasslat", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidmassmeltbot": { - "modeling_realm": "seaIce", - "standard_name": "tendency_of_sea_ice_amount_due_to_basal_melting", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "sea-ice mass change through bottom melting", - "comment": "The rate of change of sea ice mass through melting at the ice bottom divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "sidmassmeltbot", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidmassmelttop": { - "modeling_realm": "seaIce", - "standard_name": "tendency_of_sea_ice_amount_due_to_surface_melting", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "sea-ice mass change through surface melting", - "comment": "The rate of change of sea ice mass through melting at the ice surface divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "sidmassmelttop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidmasssi": { - "modeling_realm": "seaIce", - "standard_name": "tendency_of_sea_ice_amount_due_to_snow_conversion", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "sea-ice mass change through snow-to-ice conversion", - "comment": "The rate of change of sea ice mass due to transformation of snow to sea ice divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "sidmasssi", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidmassth": { - "modeling_realm": "seaIce", - "standard_name": "tendency_of_sea_ice_amount_due_to_thermodynamics", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "sea-ice mass change from thermodynamics", - "comment": "Total change in sea-ice mass from thermodynamic processes divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "sidmassth", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidmasstranx": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_x_transport", - "units": "kg s-1", - "cell_methods": "time: mean", - "cell_measures": "--MODEL", - "long_name": "X-component of sea-ice mass transport", - "comment": "Includes transport of both sea ice and snow by advection", - "dimensions": "longitude latitude time", - "out_name": "sidmasstranx", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidmasstrany": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_y_transport", - "units": "kg s-1", - "cell_methods": "time: mean", - "cell_measures": "--MODEL", - "long_name": "Y-component of sea-ice mass transport", - "comment": "Includes transport of both sea ice and snow by advection", - "dimensions": "longitude latitude time", - "out_name": "sidmasstrany", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidragbot": { - "modeling_realm": "seaIce", - "standard_name": "surface_drag_coefficient_for_momentum_in_water", - "units": "1.0", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Ocean drag coefficient", - "comment": "Oceanic drag coefficient that is used to calculate the oceanic momentum drag on sea ice", - "dimensions": "longitude latitude time", - "out_name": "sidragbot", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sidragtop": { - "modeling_realm": "seaIce", - "standard_name": "surface_drag_coefficient_for_momentum_in_air", - "units": "1.0", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Atmospheric drag coefficient", - "comment": "Atmospheric drag coefficient that is used to calculate the atmospheric momentum drag on sea ice", - "dimensions": "longitude latitude time", - "out_name": "sidragtop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siextentn": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_extent", - "units": "1e6 km2", - "cell_methods": "area: time: mean", - "cell_measures": "", - "long_name": "Sea ice extent North", - "comment": "Total area of all Northern-Hemisphere grid cells that are covered by at least 15 % areal fraction of sea ice", - "dimensions": "time", - "out_name": "siextentn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siextents": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_extent", - "units": "1e6 km2", - "cell_methods": "area: time: mean", - "cell_measures": "", - "long_name": "Sea ice extent South", - "comment": "Total area of all Southern-Hemisphere grid cells that are covered by at least 15 % areal fraction of sea ice", - "dimensions": "time", - "out_name": "siextents", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sifb": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_freeboard", - "units": "m", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Sea-ice freeboard", - "comment": "Mean height of sea-ice surface (=snow-ice interface when snow covered) above sea level", - "dimensions": "longitude latitude time", - "out_name": "sifb", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siflcondbot": { - "modeling_realm": "seaIce", - "standard_name": "conductive_heat_flux_at_sea_ice_bottom", - "units": "W m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Net conductive heat fluxes in ice at the bottom", - "comment": "the net heat conduction flux at the ice base", - "dimensions": "longitude latitude time", - "out_name": "siflcondbot", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siflcondtop": { - "modeling_realm": "seaIce", - "standard_name": "conductive_heat_flux_at_sea_ice_surface", - "units": "W m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Net conductive heat flux in ice at the surface", - "comment": "the net heat conduction flux at the ice surface", - "dimensions": "longitude latitude time", - "out_name": "siflcondtop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siflfwbot": { - "modeling_realm": "seaIce", - "standard_name": "freshwater_flux_from_ice", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Freshwater flux from sea ice", - "comment": "Total flux of fresh water from water into sea ice divided by grid-cell area; This flux is negative during ice growth (liquid water mass decreases, hence upward flux of freshwater), positive during ice melt (liquid water mass increases, hence downward flux of freshwater)", - "dimensions": "longitude latitude time", - "out_name": "siflfwbot", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siflfwdrain": { - "modeling_realm": "seaIce", - "standard_name": "freshwater_flux_from_ice_surface", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Freshwater flux from sea-ice surface", - "comment": "Total flux of fresh water from sea-ice surface into underlying ocean. This combines both surface melt water that drains directly into the ocean and the drainage of surface melt pond. By definition, this flux is always positive.", - "dimensions": "longitude latitude time", - "out_name": "siflfwdrain", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sifllatstop": { - "modeling_realm": "seaIce", - "standard_name": "surface_upward_latent_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Net latent heat flux over sea ice", - "comment": "the net latent heat flux over sea ice", - "dimensions": "longitude latitude time", - "out_name": "sifllatstop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sifllwdtop": { - "modeling_realm": "seaIce", - "standard_name": "surface_downwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Downwelling longwave flux over sea ice", - "comment": "the downwelling longwave flux over sea ice (always positive)", - "dimensions": "longitude latitude time", - "out_name": "sifllwdtop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sifllwutop": { - "modeling_realm": "seaIce", - "standard_name": "surface_upwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Upward longwave flux over sea ice", - "comment": "the upward longwave flux over sea ice (always negative)", - "dimensions": "longitude latitude time", - "out_name": "sifllwutop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siflsaltbot": { - "modeling_realm": "seaIce", - "standard_name": "salt_flux_from_ice", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Salt flux from sea ice", - "comment": "Total flux of salt from water into sea ice divided by grid-cell area; salt flux is upward (negative) during ice growth when salt is embedded into the ice and downward (positive) during melt when salt from sea ice is again released to the ocean", - "dimensions": "longitude latitude time", - "out_name": "siflsaltbot", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siflsenstop": { - "modeling_realm": "seaIce", - "standard_name": "surface_upward_sensible_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Net sensible heat flux over sea ice", - "comment": "the net sensible heat flux over sea ice", - "dimensions": "longitude latitude time", - "out_name": "siflsenstop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siflsensupbot": { - "modeling_realm": "seaIce", - "standard_name": "ice_ocean_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Net sensible heat flux under sea ice", - "comment": "the net sensible heat flux under sea ice from the ocean", - "dimensions": "longitude latitude time", - "out_name": "siflsensupbot", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siflswdbot": { - "modeling_realm": "seaIce", - "standard_name": "bottom_downwelling_shortwave_flux_into_ocean", - "units": "W m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Downwelling shortwave flux under sea ice", - "comment": "the downwelling shortwave flux underneath sea ice (always positive)", - "dimensions": "longitude latitude time", - "out_name": "siflswdbot", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siflswdtop": { - "modeling_realm": "seaIce", - "standard_name": "surface_downwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Downwelling shortwave flux over sea ice", - "comment": "the downwelling shortwave flux over sea ice (always positive by sign convention)", - "dimensions": "longitude latitude time", - "out_name": "siflswdtop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siflswutop": { - "modeling_realm": "seaIce", - "standard_name": "surface_upwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Upward shortwave flux over sea ice", - "comment": "the upward shortwave flux over sea ice (always negative)", - "dimensions": "longitude latitude time", - "out_name": "siflswutop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siforcecoriolx": { - "modeling_realm": "seaIce", - "standard_name": "coriolis_force_on_sea_ice_x", - "units": "N m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "--MODEL", - "long_name": "Coriolis force term in force balance (x-component)", - "comment": "X-component of force on sea ice caused by coriolis force", - "dimensions": "longitude latitude time", - "out_name": "siforcecoriolx", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siforcecorioly": { - "modeling_realm": "seaIce", - "standard_name": "coriolis_force_on_sea_ice_y", - "units": "N m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "--MODEL", - "long_name": "Coriolis force term in force balance (y-component)", - "comment": "Y-component of force on sea ice caused by coriolis force", - "dimensions": "longitude latitude time", - "out_name": "siforcecorioly", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siforceintstrx": { - "modeling_realm": "seaIce", - "standard_name": "internal_stress_in_sea_ice_x", - "units": "N m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "--MODEL", - "long_name": "Internal stress term in force balance (x-component)", - "comment": "X-component of force on sea ice caused by internal stress (divergence of sigma)", - "dimensions": "longitude latitude time", - "out_name": "siforceintstrx", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siforceintstry": { - "modeling_realm": "seaIce", - "standard_name": "internal_stress_in_sea_ice_y", - "units": "N m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "--MODEL", - "long_name": "Internal stress term in force balance (y-component)", - "comment": "Y-component of force on sea ice caused by internal stress (divergence of sigma)", - "dimensions": "longitude latitude time", - "out_name": "siforceintstry", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siforcetiltx": { - "modeling_realm": "seaIce", - "standard_name": "sea_surface_tilt_force_on_sea_ice_x", - "units": "N m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "--MODEL", - "long_name": "Sea-surface tilt term in force balance (x-component)", - "comment": "X-component of force on sea ice caused by sea-surface tilt", - "dimensions": "longitude latitude time", - "out_name": "siforcetiltx", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siforcetilty": { - "modeling_realm": "seaIce", - "standard_name": "sea_surface_tilt_force_on_sea_ice_y", - "units": "N m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "--MODEL", - "long_name": "Sea-surface tilt term in force balance (y-component)", - "comment": "Y-component of force on sea ice caused by sea-surface tilt", - "dimensions": "longitude latitude time", - "out_name": "siforcetilty", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sihc": { - "modeling_realm": "seaIce", - "standard_name": "integral_of_sea_ice_temperature_wrt_depth_expressed_as_heat_content", - "units": "J m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Sea-ice heat content per unit area", - "comment": "Heat content of all ice in grid cell divided by total grid-cell area. Water at 0 Celsius is assumed to have a heat content of 0 J. Does not include heat content of snow, but does include heat content of brine. Heat content is always negative, since both the sensible and the latent heat content of ice are less than that of water", - "dimensions": "longitude latitude time", - "out_name": "sihc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siitdconc": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_area_fraction_over_categories", - "units": "%", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Sea-ice area fractions in thickness categories", - "comment": "Area fraction of grid cell covered by each ice-thickness category (vector with one entry for each thickness category starting from the thinnest category, netcdf file should use thickness bounds of the categories as third coordinate axis)", - "dimensions": "longitude latitude iceband time", - "out_name": "siitdconc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siitdsnconc": { - "modeling_realm": "seaIce", - "standard_name": "snow_area_fraction_over_categories", - "units": "%", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siitdconc)", - "cell_measures": "area: areacella", - "long_name": "Snow area fractions in thickness categories", - "comment": "Area fraction of grid cell covered by snow in each ice-thickness category (vector with one entry for each thickness category starting from the thinnest category, netcdf file should use thickness bounds of the categories as third coordinate axis)", - "dimensions": "longitude latitude iceband time", - "out_name": "siitdsnconc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siitdsnthick": { - "modeling_realm": "seaIce", - "standard_name": "snow_thickness_over_categories", - "units": "m", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siitdconc)", - "cell_measures": "area: areacella", - "long_name": "Snow thickness in thickness categories", - "comment": "Actual thickness of snow in each category (NOT volume divided by grid area), (vector with one entry for each thickness category starting from the thinnest category, netcdf file should use thickness bounds of categories as third coordinate axis)", - "dimensions": "longitude latitude iceband time", - "out_name": "siitdsnthick", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siitdthick": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_thickness_over_categories", - "units": "m", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siitdconc)", - "cell_measures": "area: areacella", - "long_name": "Sea-ice thickness in thickness categories", - "comment": "Actual (floe) thickness of sea ice in each category (NOT volume divided by grid area), (vector with one entry for each thickness category starting from the thinnest category, netcdf file should use thickness bounds of categories as third coordinate axis)", - "dimensions": "longitude latitude iceband time", - "out_name": "siitdthick", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "simass": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_amount", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Sea-ice mass per area", - "comment": "Total mass of sea ice divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "simass", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "simassacrossline": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_transport_across_line", - "units": "kg s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Sea mass area flux through straits", - "comment": "net (sum of transport in all directions) sea ice area transport through the following four passages, positive into the Arctic Ocean 1. Fram Strait = (11.5W,81.3N to (10.5E,79.6N) 2. Canadian Archipelego = (128.2W,70.6N) to (59.3W,82.1N) 3. Barents opening = (16.8E,76.5N) to (19.2E,70.2N) 4. Bering Strait = (171W,66.2N) to (166W,65N)", - "dimensions": "siline time", - "out_name": "simassacrossline", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "simpconc": { - "modeling_realm": "seaIce", - "standard_name": "area_fraction", - "units": "%", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Percentage Cover of Sea-Ice by Meltpond", - "comment": "Fraction of sea ice, by area, which is covered by melt ponds, giving equal weight to every square metre of sea ice .", - "dimensions": "longitude latitude time typemp", - "out_name": "simpconc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "simpmass": { - "modeling_realm": "seaIce", - "standard_name": "surface_liquid_water_amount", - "units": "kg m-2", - "cell_methods": "area: time: mean where sea_ice_melt_pond (comment: mask=simpconc)", - "cell_measures": "area: areacella", - "long_name": "Meltpond Mass per Unit Area", - "comment": "Meltpond mass per area of sea ice.", - "dimensions": "longitude latitude time", - "out_name": "simpmass", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "simprefrozen": { - "modeling_realm": "seaIce", - "standard_name": "melt_pond_refrozen_ice", - "units": "m", - "cell_methods": "area: time: mean where sea_ice_melt_pond (comment: mask=simpconc)", - "cell_measures": "area: areacella", - "long_name": "Thickness of Refrozen Ice on Melt Pond", - "comment": "Volume of refrozen ice on melt ponds divided by meltpond covered area", - "dimensions": "longitude latitude time", - "out_name": "simprefrozen", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sipr": { - "modeling_realm": "seaIce", - "standard_name": "rainfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Rainfall rate over sea ice", - "comment": "mass of liquid precipitation falling onto sea ice divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "sipr", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sirdgconc": { - "modeling_realm": "seaIce", - "standard_name": "fraction_of_ridged_sea_ice", - "units": "1.0", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Percentage Cover of Sea-Ice by Ridging", - "comment": "Fraction of sea ice, by area, which is covered by sea ice ridges, giving equal weight to every square metre of sea ice .", - "dimensions": "longitude latitude time", - "out_name": "sirdgconc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sirdgthick": { - "modeling_realm": "seaIce", - "standard_name": "thickness_of_ridged_sea_ice", - "units": "m", - "cell_methods": "area: time: mean where sea_ice (comment: mask=sirdgconc - ridges only)", - "cell_measures": "area: areacella", - "long_name": "Ridged ice thickness", - "comment": "Sea Ice Ridge Height (representing mean height over the ridged area)", - "dimensions": "longitude latitude time", - "out_name": "sirdgthick", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sisali": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_salinity", - "units": "0.001", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Sea ice salinity", - "comment": "Mean sea-ice salinity of all sea ice in grid cell", - "dimensions": "longitude latitude time", - "out_name": "sisali", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sisaltmass": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_salt_mass", - "units": "kg m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Mass of salt in sea ice per area", - "comment": "Total mass of all salt in sea ice divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "sisaltmass", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sishevel": { - "modeling_realm": "seaIce", - "standard_name": "maximum_shear_of_sea_ice_velocity", - "units": "s-1", - "cell_methods": "area: mean where sea_ice (comment: mask=siconc) time: point", - "cell_measures": "area: areacella", - "long_name": "Maximum shear of sea-ice velocity field", - "comment": "Maximum shear of sea-ice velocity field (second shear strain invariant)", - "dimensions": "longitude latitude time1", - "out_name": "sishevel", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sisnconc": { - "modeling_realm": "seaIce", - "standard_name": "surface_snow_area_fraction", - "units": "%", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Snow area fraction", - "comment": "Fraction of sea ice, by area, which is covered by snow, giving equal weight to every square metre of sea ice . Exclude snow that lies on land or land ice.", - "dimensions": "longitude latitude time", - "out_name": "sisnconc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sisnhc": { - "modeling_realm": "seaIce", - "standard_name": "thermal_energy_content_of_surface_snow", - "units": "J m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Snow-heat content per unit area", - "comment": "Heat-content of all snow in grid cell divided by total grid-cell area. Snow-water equivalent at 0 Celsius is assumed to have a heat content of 0 J. Does not include heat content of sea ice.", - "dimensions": "longitude latitude time", - "out_name": "sisnhc", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sisnmass": { - "modeling_realm": "seaIce", - "standard_name": "liquid_water_content_of_surface_snow", - "units": "kg m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Snow mass per area", - "comment": "Total mass of snow on sea ice divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "sisnmass", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sisnthick": { - "modeling_realm": "seaIce", - "standard_name": "surface_snow_thickness", - "units": "m", - "cell_methods": "area: mean where snow over sea_ice area: time: mean where sea_ice", - "cell_measures": "area: areacella", - "long_name": "Snow thickness", - "comment": "Actual thickness of snow (snow volume divided by snow-covered area)", - "dimensions": "longitude latitude time", - "out_name": "sisnthick", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sispeed": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_speed", - "units": "m s-1", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Sea-ice speed", - "comment": "Speed of ice (i.e. mean absolute velocity) to account for back-and-forth movement of the ice", - "dimensions": "longitude latitude time", - "out_name": "sispeed", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sistremax": { - "modeling_realm": "seaIce", - "standard_name": "maximum_shear_stress", - "units": "N m-1", - "cell_methods": "area: mean where sea_ice (comment: mask=siconc) time: point", - "cell_measures": "area: areacella", - "long_name": "Maximum shear stress in sea ice", - "comment": "Maximum shear stress in sea ice (second stress invariant)", - "dimensions": "longitude latitude time1", - "out_name": "sistremax", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sistresave": { - "modeling_realm": "seaIce", - "standard_name": "average_normal_stress", - "units": "N m-1", - "cell_methods": "area: mean where sea_ice (comment: mask=siconc) time: point", - "cell_measures": "area: areacella", - "long_name": "Average normal stress in sea ice", - "comment": "Average normal stress in sea ice (first stress invariant)", - "dimensions": "longitude latitude time1", - "out_name": "sistresave", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sistrxdtop": { - "modeling_realm": "seaIce", - "standard_name": "surface_downward_x_stress", - "units": "N m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "--MODEL", - "long_name": "X-component of atmospheric stress on sea ice", - "comment": "X-component of atmospheric stress on sea ice", - "dimensions": "longitude latitude time", - "out_name": "sistrxdtop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sistrxubot": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_base_upward_x_stress", - "units": "N m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "X-component of ocean stress on sea ice", - "comment": "X-component of ocean stress on sea ice", - "dimensions": "longitude latitude time", - "out_name": "sistrxubot", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sistrydtop": { - "modeling_realm": "seaIce", - "standard_name": "surface_downward_y_stress", - "units": "N m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "--MODEL", - "long_name": "Y-component of atmospheric stress on sea ice", - "comment": "Y-component of atmospheric stress on sea ice", - "dimensions": "longitude latitude time", - "out_name": "sistrydtop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sistryubot": { - "modeling_realm": "seaIce", - "standard_name": "downward_y_stress_at_sea_ice_base", - "units": "N m-2", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Y-component of ocean stress on sea ice", - "comment": "Y-component of ocean stress on sea ice", - "dimensions": "longitude latitude time", - "out_name": "sistryubot", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sitempbot": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_bottom_temperature", - "units": "K", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Temperature at ice-ocean interface", - "comment": "Report temperature at interface, NOT temperature within lowermost model layer", - "dimensions": "longitude latitude time", - "out_name": "sitempbot", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sitempsnic": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_surface_temperature", - "units": "K", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Temperature at snow-ice interface", - "comment": "Report surface temperature of ice where snow thickness is zero", - "dimensions": "longitude latitude time", - "out_name": "sitempsnic", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sitemptop": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_surface_temperature", - "units": "K", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Surface temperature of sea ice", - "comment": "Report surface temperature of snow where snow covers the sea ice.", - "dimensions": "longitude latitude time", - "out_name": "sitemptop", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sithick": { - "modeling_realm": "seaIce ocean", - "standard_name": "sea_ice_thickness", - "units": "m", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "area: areacella", - "long_name": "Sea Ice Thickness", - "comment": "Actual (floe) thickness of sea ice (NOT volume divided by grid area as was done in CMIP5)", - "dimensions": "longitude latitude time", - "out_name": "sithick", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sitimefrac": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_time_fraction", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Fraction of time steps with sea ice", - "comment": "Fraction of time steps of the averaging period during which sea ice is present (siconc >0 ) in a grid cell", - "dimensions": "longitude latitude time", - "out_name": "sitimefrac", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siu": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_x_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "--MODEL", - "long_name": "X-component of sea ice velocity", - "comment": "The x-velocity of ice on native model grid", - "dimensions": "longitude latitude time", - "out_name": "siu", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "siv": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_y_velocity", - "units": "m s-1", - "cell_methods": "area: time: mean where sea_ice (comment: mask=siconc)", - "cell_measures": "--MODEL", - "long_name": "Y-component of sea ice velocity", - "comment": "The y-velocity of ice on native model grid", - "dimensions": "longitude latitude time", - "out_name": "siv", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sivol": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_thickness", - "units": "m", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Sea-ice volume per area", - "comment": "Total volume of sea ice divided by grid-cell area (this used to be called ice thickness in CMIP5)", - "dimensions": "longitude latitude time", - "out_name": "sivol", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sivoln": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_volume", - "units": "1e3 km3", - "cell_methods": "area: time: mean", - "cell_measures": "", - "long_name": "Sea ice volume North", - "comment": "total volume of sea ice in the Northern hemisphere", - "dimensions": "time", - "out_name": "sivoln", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sivols": { - "modeling_realm": "seaIce", - "standard_name": "sea_ice_volume", - "units": "1e3 km3", - "cell_methods": "area: time: mean", - "cell_measures": "", - "long_name": "Sea ice volume South", - "comment": "total volume of sea ice in the Southern hemisphere", - "dimensions": "time", - "out_name": "sivols", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sndmassdyn": { - "modeling_realm": "seaIce", - "standard_name": "tendency_of_snow_mass_due_to_sea_ice_dynamics", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow Mass Rate of Change through Avection by Sea-ice Dynamics", - "comment": "the rate of change of snow mass through advection with sea ice divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "sndmassdyn", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sndmassmelt": { - "modeling_realm": "seaIce", - "standard_name": "surface_snow_melt_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow Mass Rate of Change through Melt", - "comment": "the rate of change of snow mass through melt divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "sndmassmelt", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sndmasssi": { - "modeling_realm": "seaIce", - "standard_name": "tendency_of_surface_snow_amount_due_to_conversion_of_snow_to_sea_ice", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow Mass Rate of Change through Snow-to-Ice Conversion", - "comment": "the rate of change of snow mass due to transformation of snow to sea ice divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "sndmasssi", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sndmasssnf": { - "modeling_realm": "seaIce", - "standard_name": "snowfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "snow mass change through snow fall", - "comment": "mass of solid precipitation falling onto sea ice divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "sndmasssnf", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sndmasssubl": { - "modeling_realm": "seaIce", - "standard_name": "surface_snow_sublimation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow Mass Rate of Change through Evaporation or Sublimation", - "comment": "the rate of change of snow mass through sublimation and evaporation divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "sndmasssubl", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sndmasswindrif": { - "modeling_realm": "seaIce", - "standard_name": "tendency_of_snow_mass_due_to_drifting_snow", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow Mass Rate of Change through Wind Drift of Snow", - "comment": "the rate of change of snow mass through wind drift of snow (from sea-ice into the sea) divided by grid-cell area", - "dimensions": "longitude latitude time", - "out_name": "sndmasswindrif", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snmassacrossline": { - "modeling_realm": "seaIce", - "standard_name": "snow_mass_transport_across_line", - "units": "kg s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow mass flux through straits", - "comment": "net (sum of transport in all directions) snow mass transport through the following four passages, positive into the Arctic Ocean 1. Fram Strait = (11.5W,81.3N to (10.5E,79.6N) 2. Canadian Archipelego = (128.2W,70.6N) to (59.3W,82.1N) 3. Barents opening = (16.8E,76.5N) to (19.2E,70.2N) 4. Bering Strait = (171W,66.2N) to (166W,65N)", - "dimensions": "siline time", - "out_name": "snmassacrossline", - "type": "", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_coordinate.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_coordinate.json deleted file mode 100644 index fb408219f9..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_coordinate.json +++ /dev/null @@ -1,2929 +0,0 @@ -{ - "axis_entry": { - "alev1": { - "standard_name": "", - "units": "", - "axis": "Z", - "long_name": "lowest atmospheric model level", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "lev", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "alt16": { - "standard_name": "altitude", - "units": "m", - "axis": "Z", - "long_name": "altitude", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "alt16", - "positive": "up", - "requested": [ - "0", - "250", - "750", - "1250", - "1750", - "2250", - "2750", - "3500", - "4500", - "6000", - "8000", - "10000", - "12000", - "14500", - "16000", - "18000" - ], - "requested_bounds": [ - "-99000.0", - "0.0", - "0.0", - "500.0", - "500.0", - "1000.0", - "1000.0", - "1500.0", - "1500.0", - "2000.0", - "2000.0", - "2500.0", - "2500.0", - "3000.0", - "3000.0", - "4000.0", - "4000.0", - "5000.0", - "5000.0", - "7000.0", - "7000.0", - "9000.0", - "9000.0", - "11000.0", - "11000.0", - "13000.0", - "13000.0", - "15000.0", - "15000.0", - "17000.0", - "17000.0", - "99000.0" - ], - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "alt40": { - "standard_name": "altitude", - "units": "m", - "axis": "Z", - "long_name": "altitude", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "alt40", - "positive": "up", - "requested": [ - "240.", - "720.", - "1200.", - "1680.", - "2160.", - "2640.", - "3120.", - "3600.", - "4080.", - "4560.", - "5040.", - "5520.", - "6000.", - "6480.", - "6960.", - "7440.", - "7920.", - "8400.", - "8880.", - "9360.", - "9840.", - "10320.", - "10800.", - "11280.", - "11760.", - "12240.", - "12720.", - "13200.", - "13680.", - "14160.", - "14640.", - "15120.", - "15600.", - "16080.", - "16560.", - "17040.", - "17520.", - "18000.", - "18480.", - "18960." - ], - "requested_bounds": [ - "0.0", - "480.0", - "480.0", - "960.0", - "960.0", - "1440.0", - "1440.0", - "1920.0", - "1920.0", - "2400.0", - "2400.0", - "2880.0", - "2880.0", - "3360.0", - "3360.0", - "3840.0", - "3840.0", - "4320.0", - "4320.0", - "4800.0", - "4800.0", - "5280.0", - "5280.0", - "5760.0", - "5760.0", - "6240.0", - "6240.0", - "6720.0", - "6720.0", - "7200.0", - "7200.0", - "7680.0", - "7680.0", - "8160.0", - "8160.0", - "8640.0", - "8640.0", - "9120.0", - "9120.0", - "9600.0", - "9600.0", - "10080.0", - "10080.0", - "10560.0", - "10560.0", - "11040.0", - "11040.0", - "11520.0", - "11520.0", - "12000.0", - "12000.0", - "12480.0", - "12480.0", - "12960.0", - "12960.0", - "13440.0", - "13440.0", - "13920.0", - "13920.0", - "14400.0", - "14400.0", - "14880.0", - "14880.0", - "15360.0", - "15360.0", - "15840.0", - "15840.0", - "16320.0", - "16320.0", - "16800.0", - "16800.0", - "17280.0", - "17280.0", - "17760.0", - "17760.0", - "18240.0", - "18240.0", - "18720.0", - "18720.0", - "19200.0" - ], - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "alternate_hybrid_sigma": { - "standard_name": "atmosphere_hybrid_sigma_pressure_coordinate", - "units": "1", - "axis": "Z", - "long_name": "hybrid sigma pressure coordinate", - "climatology": "", - "formula": "p = ap + b*ps", - "must_have_bounds": "yes", - "out_name": "lev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "", - "valid_max": "1.0", - "valid_min": "0.0", - "value": "", - "z_bounds_factors": "ap: ap_bnds b: b_bnds ps: ps", - "z_factors": "ap: ap b: b ps: ps", - "bounds_values": "" - }, - "basin": { - "standard_name": "region", - "units": "", - "axis": "", - "long_name": "ocean basin", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "basin", - "positive": "", - "requested": [ - "atlantic_arctic_ocean", - "indian_pacific_ocean", - "global_ocean" - ], - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "dbze": { - "standard_name": "equivalent_reflectivity_factor", - "units": "dBZ", - "axis": "", - "long_name": "CloudSat simulator equivalent radar reflectivity factor", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "dbze", - "positive": "", - "requested": [ - "-47.5", - "-42.5", - "-37.5", - "-32.5", - "-27.5", - "-22.5", - "-17.5", - "-12.5", - "-7.5", - "-2.5", - "2.5", - "7.5", - "12.5", - "17.5", - "22.5" - ], - "requested_bounds": [ - "-50.0", - "-45.0", - "-45.0", - "-40.0", - "-40.0", - "-35.0", - "-35.0", - "-30.0", - "-30.0", - "-25.0", - "-25.0", - "-20.0", - "-20.0", - "-15.0", - "-15.0", - "-10.0", - "-10.0", - "-5.0", - "-5.0", - "0.0", - "0.0", - "5.0", - "5.0", - "10.0", - "10.0", - "15.0", - "15.0", - "20.0", - "20.0", - "25.0" - ], - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "depth0m": { - "standard_name": "depth", - "units": "m", - "axis": "Z", - "long_name": "depth", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "depth", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "100.0", - "valid_min": "0.0", - "value": "0.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "depth100m": { - "standard_name": "depth", - "units": "m", - "axis": "Z", - "long_name": "depth", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "depth", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "120.0", - "valid_min": "80.0", - "value": "100.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "depth2000m": { - "standard_name": "depth", - "units": "m", - "axis": "Z", - "long_name": "depth", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "depth", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "2200.0", - "valid_min": "1980.0", - "value": "2000", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "depth300m": { - "standard_name": "depth", - "units": "m", - "axis": "Z", - "long_name": "depth", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "depth", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "320.0", - "valid_min": "280.0", - "value": "300", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "depth700m": { - "standard_name": "depth", - "units": "m", - "axis": "Z", - "long_name": "depth", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "depth", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "720.0", - "valid_min": "680.0", - "value": "700", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "depth_coord": { - "standard_name": "depth", - "units": "m", - "axis": "Z", - "long_name": "ocean depth coordinate", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "lev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "", - "valid_max": "12000.0", - "valid_min": "0.0", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "effectRadIc": { - "standard_name": "", - "units": "micron", - "axis": "", - "long_name": "Effective Radius [Values to be specified]", - "climatology": "", - "formula": "", - "must_have_bounds": "", - "out_name": "effectRadIc", - "positive": "", - "requested": [ - "5.", - "15.", - "25.", - "35.", - "50.", - "75." - ], - "requested_bounds": [ - "0.0", - "10.0", - "10.0", - "20.0", - "20.0", - "30.0", - "30.0", - "40.0", - "40.0", - "60.0", - "60.0", - "90.0" - ], - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "effectRadLi": { - "standard_name": "", - "units": "micron", - "axis": "", - "long_name": "Effective Radius [Values to be specified]", - "climatology": "", - "formula": "", - "must_have_bounds": "", - "out_name": "effectRadLi", - "positive": "", - "requested": [ - "4.", - "9.", - "11.5", - "14.", - "17.5", - "25." - ], - "requested_bounds": [ - "0.0", - "8.0", - "8.0", - "10.0", - "10.0", - "13.0", - "13.0", - "15.0", - "15.0", - "20.0", - "20.0", - "30.0" - ], - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "height100m": { - "standard_name": "height", - "units": "m", - "axis": "Z", - "long_name": "height", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "height", - "positive": "up", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "120.0", - "valid_min": "80.0", - "value": "100.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "height10m": { - "standard_name": "height", - "units": "m", - "axis": "Z", - "long_name": "height", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "height", - "positive": "up", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "30.0", - "valid_min": "1.0", - "value": "10.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "height2m": { - "standard_name": "height", - "units": "m", - "axis": "Z", - "long_name": "height", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "height", - "positive": "up", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "10.0", - "valid_min": "1.0", - "value": "2.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "hybrid_height": { - "standard_name": "atmosphere_hybrid_height_coordinate", - "units": "m", - "axis": "Z", - "long_name": "hybrid height coordinate", - "climatology": "", - "formula": "z = a + b*orog", - "must_have_bounds": "yes", - "out_name": "lev", - "positive": "up", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "", - "valid_max": "", - "valid_min": "0.0", - "value": "", - "z_bounds_factors": "a: lev_bnds b: b_bnds orog: orog", - "z_factors": "a: lev b: b orog: orog", - "bounds_values": "" - }, - "iceband": { - "standard_name": "sea_ice_thickness", - "units": "m", - "axis": "", - "long_name": "Ice Depth Band", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "iceband", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "icesheet": { - "standard_name": "region", - "units": "", - "axis": "", - "long_name": "Ice Sheet", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "region", - "positive": "", - "requested": [ - "antarctic", - "greenland" - ], - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "landUse": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Land use type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "landuse", - "positive": "", - "requested": [ - "primary_and_secondary_land", - "pastures", - "crops", - "urban" - ], - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "latitude": { - "standard_name": "latitude", - "units": "degrees_north", - "axis": "Y", - "long_name": "latitude", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "lat", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "90.0", - "valid_min": "-90.0", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "location": { - "standard_name": "", - "units": "", - "axis": "", - "long_name": "location index", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "loc", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "integer", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "longitude": { - "standard_name": "longitude", - "units": "degrees_east", - "axis": "X", - "long_name": "longitude", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "lon", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "360.0", - "valid_min": "0.0", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "misrBands": { - "standard_name": "wave_frequency", - "units": "s-1", - "axis": "", - "long_name": "MISR Spectral Frequency Band", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "spectband", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "natural_log_pressure": { - "standard_name": "atmosphere_ln_pressure_coordinate", - "units": "", - "axis": "Z", - "long_name": "atmosphere natural log pressure coordinate", - "climatology": "", - "formula": "p = p0 * exp(-lev)", - "must_have_bounds": "yes", - "out_name": "lev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "", - "valid_max": "20.0", - "valid_min": "-1.0", - "value": "", - "z_bounds_factors": "p0: p0 lev: lev_bnds", - "z_factors": "p0: p0 lev: lev", - "bounds_values": "" - }, - "ocean_double_sigma": { - "standard_name": "ocean_double_sigma", - "units": "", - "axis": "Z", - "long_name": "ocean double sigma coordinate", - "climatology": "", - "formula": "for k <= k_c:\n z(k,j,i)= sigma(k)*f(j,i) \n for k > k_c:\n z(k,j,i)= f(j,i) + (sigma(k)-1)*(depth(j,i)-f(j,i)) \n f(j,i)= 0.5*(z1+ z2) + 0.5*(z1-z2)* tanh(2*a/(z1-z2)*(depth(j,i)-href))", - "must_have_bounds": "yes", - "out_name": "lev", - "positive": "up", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "sigma: sigma_bnds depth: depth z1: z1 z2: z2 a: a href: href k_c: k_c", - "z_factors": "sigma: sigma depth: depth z1: z1 z2: z2 a: a_coeff href: href k_c: k_c", - "bounds_values": "" - }, - "ocean_s": { - "standard_name": "ocean_s_coordinate", - "units": "", - "axis": "Z", - "long_name": "ocean s-coordinate", - "climatology": "", - "formula": "z(n,k,j,i) = eta(n,j,i)*(1+s(k)) + depth_c*s(k) + (depth(j,i)-depth_c)*C(k) \n where \n C(k)=(1-b)*sinh(a*s(k))/sinh(a) +\n b*(tanh(a*(s(k)+0.5))/(2*tanh(0.5*a)) - 0.5)", - "must_have_bounds": "yes", - "out_name": "lev", - "positive": "up", - "requested": "", - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "", - "valid_max": "0.0", - "valid_min": "-1.0", - "value": "", - "z_bounds_factors": "s: lev_bnds eta: eta depth: depth a: a b: b depth_c: depth_c", - "z_factors": "s: lev eta: eta depth: depth a: a_coeff b: b_coeff depth_c: depth_c", - "bounds_values": "" - }, - "ocean_sigma": { - "standard_name": "ocean_sigma_coordinate", - "units": "", - "axis": "Z", - "long_name": "ocean sigma coordinate", - "climatology": "", - "formula": "z(n,k,j,i) = eta(n,j,i) + sigma(k)*(depth(j,i)+eta(n,j,i))", - "must_have_bounds": "yes", - "out_name": "lev", - "positive": "up", - "requested": "", - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "", - "valid_max": "0.0", - "valid_min": "-1.0", - "value": "", - "z_bounds_factors": "sigma: lev_bnds eta: eta depth: depth", - "z_factors": "sigma: lev eta: eta depth: depth", - "bounds_values": "" - }, - "ocean_sigma_z": { - "standard_name": "ocean_sigma_z", - "units": "", - "axis": "Z", - "long_name": "ocean sigma over z coordinate", - "climatology": "", - "formula": "for k <= nsigma: z(n,k,j,i) = eta(n,j,i) + sigma(k)*(min(depth_c,depth(j,i))+eta(n,j,i)) ; for k > nsigma: z(n,k,j,i) = zlev(k)", - "must_have_bounds": "yes", - "out_name": "lev", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "sigma: sigma_bnds eta: eta depth: depth depth_c: depth_c nsigma: nsigma zlev: zlev_bnds", - "z_factors": "sigma: sigma eta: eta depth: depth depth_c: depth_c nsigma: nsigma zlev: zlev", - "bounds_values": "" - }, - "olayer100m": { - "standard_name": "depth", - "units": "m", - "axis": "Z", - "long_name": "depth", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "depth", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "100.0", - "valid_min": "0.0", - "value": "50.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "0.0 100.0" - }, - "oline": { - "standard_name": "region", - "units": "", - "axis": "", - "long_name": "ocean passage", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "line", - "positive": "", - "requested": [ - "barents_opening", - "bering_strait", - "canadian_archipelago", - "denmark_strait", - "drake_passage", - "english_channel", - "pacific_equatorial_undercurrent", - "faroe_scotland_channel", - "florida_bahamas_strait", - "fram_strait", - "iceland_faroe_channel", - "indonesian_throughflow", - "mozambique_channel", - "taiwan_luzon_straits", - "windward_passage" - ], - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "p10": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "1000.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "p100": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "10000.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "p1000": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "100000.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "p200": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "20000.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "p220": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "plev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "22000.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "44000.0 0.0" - }, - "p500": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "50000.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "p560": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "plev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "56000.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "68000.0 44000.0" - }, - "p700": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "70000.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "p840": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "plev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "84000.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "100000.0 68000.0" - }, - "p850": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "85000.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "pl700": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "plev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "70000.", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "85000.0 60000.0" - }, - "plev10": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": [ - "100000.", - "85000.", - "70000.", - "50000.", - "25000.", - "15000.", - "10000.", - "7000.", - "5000.", - "1000." - ], - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "plev19": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": [ - "100000.", - "92500.", - "85000.", - "70000.", - "60000.", - "50000.", - "40000.", - "30000.", - "25000.", - "20000.", - "15000.", - "10000.", - "7000.", - "5000.", - "3000.", - "2000.", - "1000.", - "500.", - "100." - ], - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "plev23": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": [ - "100000.", - "92500.", - "85000.", - "70000.", - "60000.", - "50000.", - "40000.", - "30000.", - "25000.", - "20000.", - "15000.", - "10000.", - "7000.", - "5000.", - "3000.", - "2000.", - "1000.", - "700.", - "500.", - "300.", - "200.", - "100.", - "40." - ], - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "plev27": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": [ - "100000.", - "97500.", - "95000.", - "92500.", - "90000.", - "87500.", - "85000.", - "82500.", - "80000.", - "77500.", - "75000.", - "70000.", - "65000.", - "60000.", - "55000.", - "50000.", - "45000.", - "40000.", - "35000.", - "30000.", - "25000.", - "22500.", - "20000.", - "17500.", - "15000.", - "12500.", - "10000." - ], - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "plev3": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": [ - "85000.", - "50000.", - "25000." - ], - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "plev39": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": [ - "100000.", - "92500.", - "85000.", - "70000.", - "60000.", - "50000.", - "40000.", - "30000.", - "25000.", - "20000.", - "17000.", - "15000.", - "13000.", - "11500.", - "10000.", - "9000.", - "8000.", - "7000.", - "5000.", - "3000.", - "2000.", - "1500.", - "1000.", - "700.", - "500.", - "300.", - "200.", - "150.", - "100.", - "70.", - "50.", - "40.", - "30.", - "20.", - "15.", - "10.", - "7.", - "5.", - "3." - ], - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "plev3h": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": [ - "10000.", - "1000.", - "100." - ], - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "plev4": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": [ - "92500.", - "85000.", - "50000.", - "25000." - ], - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "plev7": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "plev", - "positive": "down", - "requested": [ - "90000.", - "74000.", - "62000.", - "50000.", - "37500.", - "24500.", - "9000." - ], - "requested_bounds": [ - "100000.", - "80000.", - "80000.", - "68000.", - "68000.", - "56000.", - "56000.", - "44000.", - "44000.", - "31000.", - "31000.", - "18000.", - "18000.", - " 0." - ], - "stored_direction": "decreasing", - "tolerance": "0.001", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "plev7c": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "plev", - "positive": "down", - "requested": [ - "90000.", - "74000.", - "62000.", - "50000.", - "37500.", - "24500.", - "9000." - ], - "requested_bounds": [ - "100000.0", - "80000.0", - "80000.0", - "68000.0", - "68000.0", - "56000.0", - "56000.0", - "44000.0", - "44000.0", - "31000.0", - "31000.0", - "18000.0", - "18000.0", - "0.0" - ], - "stored_direction": "decreasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "plev7h": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": [ - "92500.", - "85000.", - "70000.", - "60000.", - "50000.", - "25000.", - "5000." - ], - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "plev8": { - "standard_name": "air_pressure", - "units": "Pa", - "axis": "Z", - "long_name": "pressure", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "plev", - "positive": "down", - "requested": [ - "100000.", - "85000.", - "70000.", - "50000.", - "25000.", - "10000.", - "5000.", - "1000." - ], - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "rho": { - "standard_name": "sea_water_potential_density", - "units": "kg m-3", - "axis": "Z", - "long_name": "potential density referenced to 2000 dbar", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "rho", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "scatratio": { - "standard_name": "backscattering_ratio", - "units": "1.0", - "axis": "", - "long_name": "lidar backscattering ratio", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "scatratio", - "positive": "", - "requested": [ - "0.005", - "0.605", - "2.1", - "4.", - "6.", - "8.5", - "12.5", - "17.5", - "22.5", - "27.5", - "35.", - "45.", - "55.", - "70.", - "50040." - ], - "requested_bounds": [ - "0.0", - "0.01", - "0.01", - "1.2", - "1.2", - "3.0", - "3.0", - "5.0", - "5.0", - "7.0", - "7.0", - "10.0", - "10.0", - "15.0", - "15.0", - "20.0", - "20.0", - "25.0", - "25.0", - "30.0", - "30.0", - "40.0", - "40.0", - "50.0", - "50.0", - "60.0", - "60.0", - "80.0", - "80.0", - "100000.0" - ], - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "0.005, 0.605, 2.1, 4, 6, 8.5, 12.5, 17.5, 22.5, 27.5, 35, 45, 55, 70, 50040", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "0.0 0.01 1.2 3.0 5.0 7.0 10.0 15.0 20.0 25.0 30.0 40.0 50.0 60.0 80.0 100000.0" - }, - "sdepth": { - "standard_name": "depth", - "units": "m", - "axis": "Z", - "long_name": "depth", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "depth", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "200.0", - "valid_min": "0.0", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "sdepth1": { - "standard_name": "depth", - "units": "m", - "axis": "Z", - "long_name": "depth", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "depth", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "0.2", - "valid_min": "0.0", - "value": "0.05", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "0.0 0.1" - }, - "siline": { - "standard_name": "region", - "units": "", - "axis": "", - "long_name": "ocean passage", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "line", - "positive": "", - "requested": [ - "Fram", - "Strait,", - "Canadian", - "Archipelego,", - "Barents", - "opening,", - "Bering", - "Strait" - ], - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "site": { - "standard_name": "", - "units": "", - "axis": "", - "long_name": "site index", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "site", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "integer", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "smooth_level": { - "standard_name": "atmosphere_sleve_coordinate", - "units": "m", - "axis": "Z", - "long_name": "atmosphere smooth level vertical (SLEVE) coordinate", - "climatology": "", - "formula": "z = a*ztop + b1*zsurf1 + b2*zsurf2", - "must_have_bounds": "yes", - "out_name": "lev", - "positive": "up", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "", - "valid_max": "800000.0", - "valid_min": "-200.0", - "value": "", - "z_bounds_factors": "a: a_bnds b1: b1_bnds b2: b2_bnds ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2", - "z_factors": "a: a b1: b1 b2: b2 ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2", - "bounds_values": "" - }, - "snowband": { - "standard_name": "surface_snow_thickness", - "units": "m", - "axis": "", - "long_name": "Snow Depth Band", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "snowband", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "snowdepth": { - "standard_name": "depth", - "units": "m", - "axis": "Z", - "long_name": "depth", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "depth", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "200.0", - "valid_min": "0.0", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "spectband": { - "standard_name": "sensor_band_central_radiation_wavenumber", - "units": "m-1", - "axis": "", - "long_name": "Spectral Frequency Band", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "spectband", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "standard_hybrid_sigma": { - "standard_name": "atmosphere_hybrid_sigma_pressure_coordinate", - "units": "1", - "axis": "Z", - "long_name": "hybrid sigma pressure coordinate", - "climatology": "", - "formula": "p = a*p0 + b*ps", - "must_have_bounds": "yes", - "out_name": "lev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "", - "valid_max": "1.0", - "valid_min": "0.0", - "value": "", - "z_bounds_factors": "p0: p0 a: a_bnds b: b_bnds ps: ps", - "z_factors": "p0: p0 a: a b: b ps: ps", - "bounds_values": "" - }, - "standard_sigma": { - "standard_name": "atmosphere_sigma_coordinate", - "units": "", - "axis": "Z", - "long_name": "sigma coordinate", - "climatology": "", - "formula": "p = ptop + sigma*(ps - ptop)", - "must_have_bounds": "yes", - "out_name": "lev", - "positive": "down", - "requested": "", - "requested_bounds": "", - "stored_direction": "decreasing", - "tolerance": "", - "type": "", - "valid_max": "1.0", - "valid_min": "0.0", - "value": "", - "z_bounds_factors": "ptop: ptop sigma: lev_bnds ps: ps", - "z_factors": "ptop: ptop sigma: lev ps: ps", - "bounds_values": "" - }, - "sza5": { - "standard_name": "solar_zenith_angle", - "units": "degree", - "axis": "", - "long_name": "solar zenith angle", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "sza", - "positive": "", - "requested": [ - "0.", - "20.", - "40.", - "60.", - "80." - ], - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "tau": { - "standard_name": "atmosphere_optical_thickness_due_to_cloud", - "units": "1.0", - "axis": "", - "long_name": "cloud optical thickness", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "tau", - "positive": "", - "requested": [ - "0.15", - "0.8", - "2.45", - "6.5", - "16.2", - "41.5", - "100." - ], - "requested_bounds": [ - "0.0", - "0.3", - "0.3", - "1.3", - "1.3", - "3.6", - "3.6", - "9.4", - "9.4", - "23.0", - "23.0", - "60.0", - "60.0", - "100000.0" - ], - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "time": { - "standard_name": "time", - "units": "days since ?", - "axis": "T", - "long_name": "time", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "time", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "time1": { - "standard_name": "time", - "units": "days since ?", - "axis": "T", - "long_name": "time", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "time", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "time2": { - "standard_name": "time", - "units": "days since ?", - "axis": "T", - "long_name": "time", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "time", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "time3": { - "standard_name": "time", - "units": "days since ?", - "axis": "T", - "long_name": "time", - "climatology": "", - "formula": "", - "must_have_bounds": "yes", - "out_name": "time", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "increasing", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typebare": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "surface type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "bare_ground", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typeburnt": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Burnt vegetation area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "burnt_vegetation", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typec3pft": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "surface type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "c3_plant_functional_types", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typec4pft": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "surface type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "c4_plant_functional_types", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typecloud": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Cloud area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "cloud", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typecrop": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Crop area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "crops", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typefis": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Floating Ice Shelf area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "floating_ice_shelf", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typegis": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Grounded Ice Sheet area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "grounded_ice_sheet", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typeland": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Land area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "land", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typeli": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Land Ice area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "land_ice", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typemp": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Melt pond area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "sea_ice_melt_pond", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typenatgr": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Natural grass area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "natural_grasses", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typenwd": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Non-Woody Vegetation area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "non_woody_vegetation", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typepasture": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Pasture area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "pastures", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typepdec": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "surface type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "primary_deciduous_trees", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typepever": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "surface type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "primary_evergreen_trees", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typeresidual": { - "standard_name": "", - "units": "", - "axis": "", - "long_name": "Residual area", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "residual", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typesdec": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "surface type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "secondary_decidous_trees", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typesea": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Ocean area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "sea", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typesever": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "surface type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "secondary_evergreen_trees", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typeshrub": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Shrub area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "shrubs", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typesi": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Sea Ice area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "sea_ice", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typetree": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Tree area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "trees", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typetreebd": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Tree area type (Broadleaf Deciduous)", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "trees", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typetreebe": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Tree area type (Broadleaf Evergreen)", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "trees", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typetreend": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Tree area type (Narrowleaf Deciduous)", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "trees", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typetreene": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Tree area type (Narrowleaf Evergreen)", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "trees", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typeveg": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Vegetation area type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "vegetation", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "typewetla": { - "standard_name": "area_type", - "units": "", - "axis": "", - "long_name": "Wetland", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "vegtype": { - "standard_name": "", - "units": "", - "axis": "", - "long_name": "plant functional type", - "climatology": "", - "formula": "", - "must_have_bounds": "no", - "out_name": "type", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "character", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "xant": { - "standard_name": "projection_x_coordinate", - "units": "km", - "axis": "", - "long_name": "", - "climatology": "", - "formula": "", - "must_have_bounds": "", - "out_name": "xant", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "xgre": { - "standard_name": "projection_x_coordinate", - "units": "km", - "axis": "", - "long_name": "", - "climatology": "", - "formula": "", - "must_have_bounds": "", - "out_name": "xgre", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "yant": { - "standard_name": "projection_y_coordinate", - "units": "km", - "axis": "", - "long_name": "", - "climatology": "", - "formula": "", - "must_have_bounds": "", - "out_name": "yant", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - }, - "ygre": { - "standard_name": "projection_y_coordinate", - "units": "km", - "axis": "", - "long_name": "", - "climatology": "", - "formula": "", - "must_have_bounds": "", - "out_name": "ygre", - "positive": "", - "requested": "", - "requested_bounds": "", - "stored_direction": "", - "tolerance": "", - "type": "double", - "valid_max": "", - "valid_min": "", - "value": "", - "z_bounds_factors": "", - "z_factors": "", - "bounds_values": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_day.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_day.json deleted file mode 100644 index 8f19bbd382..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_day.json +++ /dev/null @@ -1,630 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table day", - "realm": "atmos", - "frequency": "day", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "1.00000", - "generic_levels": "", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "clt": { - "modeling_realm": "atmos", - "standard_name": "cloud_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Cloud Fraction", - "comment": "Total cloud area fraction for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes both large-scale and convective cloud.", - "dimensions": "longitude latitude time", - "out_name": "clt", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfls": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_latent_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upward Latent Heat Flux", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "hfls", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hfss": { - "modeling_realm": "atmos", - "standard_name": "surface_upward_sensible_heat_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upward Sensible Heat Flux", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "hfss", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hur": { - "modeling_realm": "atmos", - "standard_name": "relative_humidity", - "units": "%", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Relative Humidity", - "comment": "The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", - "dimensions": "longitude latitude plev8 time", - "out_name": "hur", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hurs": { - "modeling_realm": "atmos", - "standard_name": "relative_humidity", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Near-Surface Relative Humidity", - "comment": "The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", - "dimensions": "longitude latitude time height2m", - "out_name": "hurs", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hursmax": { - "modeling_realm": "atmos", - "standard_name": "relative_humidity", - "units": "%", - "cell_methods": "area: mean time: maximum", - "cell_measures": "area: areacella", - "long_name": "Surface Daily Maximum Relative Humidity", - "comment": "", - "dimensions": "longitude latitude time height2m", - "out_name": "hursmax", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hursmin": { - "modeling_realm": "atmos", - "standard_name": "relative_humidity", - "units": "%", - "cell_methods": "area: mean time: minimum", - "cell_measures": "area: areacella", - "long_name": "Surface Daily Minimum Relative Humidity", - "comment": "", - "dimensions": "longitude latitude time height2m", - "out_name": "hursmin", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "hus": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Specific Humidity", - "comment": "", - "dimensions": "longitude latitude plev8 time", - "out_name": "hus", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "huss": { - "modeling_realm": "atmos", - "standard_name": "specific_humidity", - "units": "1.0", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Near-Surface Specific Humidity", - "comment": "Near-surface (usually, 2 meter) specific humidity.", - "dimensions": "longitude latitude time height2m", - "out_name": "huss", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrro": { - "modeling_realm": "land", - "standard_name": "runoff_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Runoff", - "comment": "The total run-off (including drainage through the base of the soil model) per unit area leaving the land portion of the grid cell.", - "dimensions": "longitude latitude time", - "out_name": "mrro", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrso": { - "modeling_realm": "land", - "standard_name": "soil_moisture_content", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Total Soil Moisture Content", - "comment": "the mass per unit area (summed over all soil layers) of water in all phases.", - "dimensions": "longitude latitude time", - "out_name": "mrso", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrsos": { - "modeling_realm": "land", - "standard_name": "moisture_content_of_soil_layer", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Moisture in Upper Portion of Soil Column", - "comment": "The mass of water in all phases in the upper 10cm of the soil layer.", - "dimensions": "longitude latitude time sdepth1", - "out_name": "mrsos", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "pr": { - "modeling_realm": "atmos", - "standard_name": "precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Precipitation", - "comment": "includes both liquid and solid phases", - "dimensions": "longitude latitude time", - "out_name": "pr", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prc": { - "modeling_realm": "atmos", - "standard_name": "convective_precipitation_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Convective Precipitation", - "comment": "Convective precipitation at surface; includes both liquid and solid phases.", - "dimensions": "longitude latitude time", - "out_name": "prc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "prsn": { - "modeling_realm": "atmos", - "standard_name": "snowfall_flux", - "units": "kg m-2 s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Snowfall Flux", - "comment": "at surface; includes precipitation of all forms of water in the solid phase", - "dimensions": "longitude latitude time", - "out_name": "prsn", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "psl": { - "modeling_realm": "atmos", - "standard_name": "air_pressure_at_sea_level", - "units": "Pa", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Sea Level Pressure", - "comment": "Sea Level Pressure", - "dimensions": "longitude latitude time", - "out_name": "psl", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlds": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downwelling Longwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rlds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlus": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_longwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upwelling Longwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rlus", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rlut": { - "modeling_realm": "atmos", - "standard_name": "toa_outgoing_longwave_flux", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "TOA Outgoing Longwave Radiation", - "comment": "at the top of the atmosphere (to be compared with satellite measurements)", - "dimensions": "longitude latitude time", - "out_name": "rlut", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsds": { - "modeling_realm": "atmos", - "standard_name": "surface_downwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Downwelling Shortwave Radiation", - "comment": "surface solar irradiance for UV calculations", - "dimensions": "longitude latitude time", - "out_name": "rsds", - "type": "real", - "positive": "down", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rsus": { - "modeling_realm": "atmos", - "standard_name": "surface_upwelling_shortwave_flux_in_air", - "units": "W m-2", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Upwelling Shortwave Radiation", - "comment": "", - "dimensions": "longitude latitude time", - "out_name": "rsus", - "type": "real", - "positive": "up", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfcWind": { - "modeling_realm": "atmos", - "standard_name": "wind_speed", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Daily-Mean Near-Surface Wind Speed", - "comment": "near-surface (usually, 10 meters) wind speed.", - "dimensions": "longitude latitude time height10m", - "out_name": "sfcWind", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sfcWindmax": { - "modeling_realm": "atmos", - "standard_name": "wind_speed", - "units": "m s-1", - "cell_methods": "area: mean time: maximum", - "cell_measures": "area: areacella", - "long_name": "Daily Maximum Near-Surface Wind Speed", - "comment": "Daily maximum near-surface (usually, 10 meters) wind speed.", - "dimensions": "longitude latitude time height10m", - "out_name": "sfcWindmax", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snc": { - "modeling_realm": "landIce land", - "standard_name": "surface_snow_area_fraction", - "units": "%", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Snow Area Fraction", - "comment": "Fraction of each grid cell that is occupied by snow that rests on land portion of cell.", - "dimensions": "longitude latitude time", - "out_name": "snc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "snw": { - "modeling_realm": "landIce land", - "standard_name": "surface_snow_amount", - "units": "kg m-2", - "cell_methods": "area: mean where land time: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Snow Amount", - "comment": "The mass of surface snow on the land portion of the grid cell divided by the land area in the grid cell; reported as missing where the land fraction is 0; excludes snow on vegetation canopy or on sea ice.", - "dimensions": "longitude latitude time", - "out_name": "snw", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ta": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Air Temperature", - "comment": "Air Temperature", - "dimensions": "longitude latitude plev8 time", - "out_name": "ta", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tas": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Near-Surface Air Temperature", - "comment": "near-surface (usually, 2 meter) air temperature", - "dimensions": "longitude latitude time height2m", - "out_name": "tas", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tasmax": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: maximum", - "cell_measures": "area: areacella", - "long_name": "Daily Maximum Near-Surface Air Temperature", - "comment": "maximum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: max')", - "dimensions": "longitude latitude time height2m", - "out_name": "tasmax", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tasmin": { - "modeling_realm": "atmos", - "standard_name": "air_temperature", - "units": "K", - "cell_methods": "area: mean time: minimum", - "cell_measures": "area: areacella", - "long_name": "Daily Minimum Near-Surface Air Temperature", - "comment": "minimum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: min')", - "dimensions": "longitude latitude time height2m", - "out_name": "tasmin", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "tslsi": { - "modeling_realm": "land", - "standard_name": "surface_temperature", - "units": "K", - "cell_methods": "area: time: mean (comment: over land and sea ice)", - "cell_measures": "area: areacella", - "long_name": "Surface Temperature Where Land or Sea Ice", - "comment": "'skin' temperature of all surfaces except open ocean.", - "dimensions": "longitude latitude time", - "out_name": "tslsi", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "ua": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Wind", - "comment": "", - "dimensions": "longitude latitude plev8 time", - "out_name": "ua", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "uas": { - "modeling_realm": "atmos", - "standard_name": "eastward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Eastward Near-Surface Wind", - "comment": "Eastward component of the near-surface (usually, 10 meters) wind", - "dimensions": "longitude latitude time height10m", - "out_name": "uas", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "va": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Wind", - "comment": "", - "dimensions": "longitude latitude plev8 time", - "out_name": "va", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "vas": { - "modeling_realm": "atmos", - "standard_name": "northward_wind", - "units": "m s-1", - "cell_methods": "area: time: mean", - "cell_measures": "area: areacella", - "long_name": "Northward Near-Surface Wind", - "comment": "Northward component of the near surface wind", - "dimensions": "longitude latitude time height10m", - "out_name": "vas", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "wap": { - "modeling_realm": "atmos", - "standard_name": "lagrangian_tendency_of_air_pressure", - "units": "Pa s-1", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "omega (=dp/dt)", - "comment": "Omega (vertical velocity in pressure coordinates, positive downwards)", - "dimensions": "longitude latitude plev8 time", - "out_name": "wap", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zg": { - "modeling_realm": "atmos", - "standard_name": "geopotential_height", - "units": "m", - "cell_methods": "time: mean", - "cell_measures": "area: areacella", - "long_name": "Geopotential Height", - "comment": "", - "dimensions": "longitude latitude plev8 time", - "out_name": "zg", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_formula_terms.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_formula_terms.json deleted file mode 100644 index 82a73c3c4e..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_formula_terms.json +++ /dev/null @@ -1,142 +0,0 @@ -{ - "variable_entry": { - "a": { - "long_name": "vertical coordinate formula term: a(k)", - "units": "", - "dimensions": "alevel", - "type": "double" - }, - "ps": { - "long_name": "Surface Air Pressure", - "units": "Pa", - "dimensions": "longitude latitude time", - "type": "real" - }, - "p0": { - "long_name": "vertical coordinate formula term: reference pressure", - "units": "Pa", - "dimensions": "", - "type": "" - }, - "b": { - "long_name": "vertical coordinate formula term: b(k)", - "units": "", - "dimensions": "alevel", - "type": "double" - }, - "b_bnds": { - "long_name": "vertical coordinate formula term: b(k+1/2)", - "units": "", - "dimensions": "alevel", - "type": "double" - }, - "ap_bnds": { - "long_name": "vertical coordinate formula term: ap(k+1/2)", - "units": "Pa", - "dimensions": "alevel", - "type": "double" - }, - "ap": { - "long_name": "vertical coordinate formula term: ap(k)", - "units": "Pa", - "dimensions": "alevel", - "type": "double" - }, - "orog": { - "long_name": "Surface Altitude", - "units": "m", - "dimensions": "longitude latitude", - "type": "real" - }, - "ztop": { - "long_name": "height of top of model", - "units": "m", - "dimensions": "", - "type": "" - }, - "ptop": { - "long_name": "pressure at top of model", - "units": "Pa", - "dimensions": "", - "type": "" - }, - "a_bnds": { - "long_name": "vertical coordinate formula term: a(k+1/2)", - "units": "", - "dimensions": "alevel", - "type": "double" - }, - "depth_c": { - "long_name": "vertical coordinate formula term: depth_c", - "units": "", - "dimensions": "", - "type": "double" - }, - "nsigma": { - "long_name": "vertical coordinate formula term: nsigma", - "units": "", - "dimensions": "", - "type": "integer" - }, - "href": { - "long_name": "vertical coordinate formula term: href", - "units": "", - "dimensions": "", - "type": "double" - }, - "zlev": { - "long_name": "vertical coordinate formula term: zlev(k)", - "units": "", - "dimensions": "olevel", - "type": "double" - }, - "zlev_bnds": { - "long_name": "vertical coordinate formula term: zlev(k+1/2)", - "units": "", - "dimensions": "olevel", - "type": "double" - }, - "z1": { - "long_name": "vertical coordinate formula term: z1", - "units": "", - "dimensions": "", - "type": "double" - }, - "z2": { - "long_name": "vertical coordinate formula term: z2", - "units": "", - "dimensions": "", - "type": "double" - }, - "sigma_bnds": { - "long_name": "vertical coordinate formula term: sigma(k+1/2)", - "units": "", - "dimensions": "olevel", - "type": "double" - }, - "depth": { - "long_name": "Sea Floor Depth: formula term: thetao", - "units": "m", - "dimensions": "longitude latitude", - "type": "real" - }, - "eta": { - "long_name": "Sea Surface Height formula term: thetao", - "units": "m", - "dimensions": "longitude latitude time", - "type": "real" - }, - "k_c": { - "long_name": "vertical coordinate formula term: k_c", - "units": "", - "dimensions": "", - "type": "integer" - }, - "sigma": { - "long_name": "vertical coordinate formula term: sigma(k)", - "units": "", - "dimensions": "olevel", - "type": "double" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_fx.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_fx.json deleted file mode 100644 index 04c8ee60a4..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_fx.json +++ /dev/null @@ -1,137 +0,0 @@ -{ - "Header": { - "data_specs_version": "01.00.10", - "table_id": "Table fx", - "realm": "land", - "frequency": "fx", - "cmor_version": "3.2", - "table_date": "10 May 2017", - "missing_value": "1e20", - "product": "model-output", - "approx_interval": "0.00000", - "generic_levels": "alevel", - "mip_era": "CMIP6", - "Conventions": "CF-1.7 CMIP-6.0" - }, - "variable_entry": { - "areacella": { - "modeling_realm": "atmos land", - "standard_name": "cell_area", - "units": "m2", - "cell_methods": "area: mean", - "cell_measures": "area: areacella", - "long_name": "Atmosphere Grid-Cell Area", - "comment": "For atmospheres with more than 1 mesh (e.g., staggered grids), report areas that apply to surface vertical fluxes of energy.", - "dimensions": "longitude latitude", - "out_name": "areacella", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "mrsofc": { - "modeling_realm": "land", - "standard_name": "soil_moisture_content_at_field_capacity", - "units": "kg m-2", - "cell_methods": "area: mean", - "cell_measures": "area: areacella", - "long_name": "Capacity of Soil to Store Water", - "comment": "'reported *where land*: divide the total water holding capacity of all the soil in the grid cell by the land area in the grid cell; reported as *missing* where the land fraction is 0.'", - "dimensions": "longitude latitude", - "out_name": "mrsofc", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "orog": { - "modeling_realm": "land", - "standard_name": "surface_altitude", - "units": "m", - "cell_methods": "area: mean", - "cell_measures": "area: areacella", - "long_name": "Surface Altitude", - "comment": "The surface called 'surface' means the lower boundary of the atmosphere. Altitude is the (geometric) height above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level.", - "dimensions": "longitude latitude", - "out_name": "orog", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "rootd": { - "modeling_realm": "land", - "standard_name": "root_depth", - "units": "m", - "cell_methods": "area: mean", - "cell_measures": "area: areacella", - "long_name": "Maximum Root Depth", - "comment": "report the maximum soil depth reachable by plant roots (if defined in model), i.e., the maximum soil depth from which they can extract moisture; report as *missing* where the land fraction is 0.", - "dimensions": "longitude latitude", - "out_name": "rootd", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sftgif": { - "modeling_realm": "land", - "standard_name": "land_ice_area_fraction", - "units": "%", - "cell_methods": "area: mean", - "cell_measures": "area: areacella", - "long_name": "Fraction of Grid Cell Covered with Glacier", - "comment": "Fraction of grid cell covered by land ice (ice sheet, ice shelf, ice cap, glacier)", - "dimensions": "longitude latitude typeli", - "out_name": "sftgif", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "sftlf": { - "modeling_realm": "atmos", - "standard_name": "land_area_fraction", - "units": "%", - "cell_methods": "area: mean", - "cell_measures": "area: areacella", - "long_name": "Land Area Fraction", - "comment": "Please express 'X_area_fraction' as the percentage of horizontal area occupied by X.", - "dimensions": "longitude latitude typeland", - "out_name": "sftlf", - "type": "real", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - }, - "zfull": { - "modeling_realm": "atmos", - "standard_name": "height_above_reference_ellipsoid", - "units": "m", - "cell_methods": "area: mean", - "cell_measures": "area: areacella", - "long_name": "Altitude of Model Full-Levels", - "comment": "", - "dimensions": "longitude latitude alevel", - "out_name": "zfull", - "type": "float", - "positive": "", - "valid_min": "", - "valid_max": "", - "ok_min_mean_abs": "", - "ok_max_mean_abs": "" - } - } -} diff --git a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_grids.json b/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_grids.json deleted file mode 100644 index c6e2d179d9..0000000000 --- a/esmvaltool/cmor/tables/cmip6/Tables/CMIP6_grids.json +++ /dev/null @@ -1,154 +0,0 @@ -{ - "Header": { - "product": "output", - "cmor_version": "3.2", - "Conventions": "CF-1.7 CMIP-6.0", - "table_id": "Table grids", - "data_specs_version": "01.00.10", - "missing_value": "1e20", - "table_date": "10 May 2017" - }, - "mapping_entry": { - "sample_user_mapping": { - "parameter1": "false_easting", - "coordinates": "rlon rlat", - "parameter2": "false_northing" - } - }, - "axis_entry": { - "grid_latitude": { - "long_name": "latitude in rotated pole grid", - "standard_name": "grid_latitude", - "out_name": "rlat", - "units": "degrees", - "type": "", - "axis": "Y" - }, - "y_deg": { - "long_name": "y coordinate of projection", - "standard_name": "projection_y_coordinate", - "out_name": "y", - "units": "degrees", - "type": "", - "axis": "Y" - }, - "l_index": { - "long_name": "cell index along fourth dimension", - "standard_name": "", - "out_name": "l", - "units": "1", - "type": "integer", - "axis": "" - }, - "grid_longitude": { - "long_name": "longitude in rotated pole grid", - "standard_name": "grid_longitude", - "out_name": "rlon", - "units": "degrees", - "type": "", - "axis": "X" - }, - "k_index": { - "long_name": "cell index along third dimension", - "standard_name": "", - "out_name": "k", - "units": "1", - "type": "integer", - "axis": "" - }, - "vertices": { - "long_name": "", - "standard_name": "", - "out_name": "", - "units": "", - "type": "", - "axis": "" - }, - "x_deg": { - "long_name": "x coordinate of projection", - "standard_name": "projection_x_coordinate", - "out_name": "x", - "units": "degrees", - "type": "", - "axis": "X" - }, - "i_index": { - "long_name": "cell index along first dimension", - "standard_name": "", - "out_name": "i", - "units": "1", - "type": "integer", - "axis": "" - }, - "j_index": { - "long_name": "cell index along second dimension", - "standard_name": "", - "out_name": "j", - "units": "1", - "type": "integer", - "axis": "" - }, - "y": { - "long_name": "y coordinate of projection", - "standard_name": "projection_y_coordinate", - "out_name": "", - "units": "m", - "type": "", - "axis": "Y" - }, - "x": { - "long_name": "x coordinate of projection", - "standard_name": "projection_x_coordinate", - "out_name": "", - "units": "m", - "type": "", - "axis": "X" - }, - "m_index": { - "long_name": "cell index along fifth dimension", - "standard_name": "", - "out_name": "m", - "units": "1", - "type": "integer", - "axis": "" - } - }, - "variable_entry": { - "latitude": { - "dimensions": "longitude latitude", - "valid_min": "-90.0", - "long_name": "", - "standard_name": "", - "out_name": "latitude", - "units": "degrees_north", - "valid_max": "90.0" - }, - "vertices_latitude": { - "dimensions": "vertices longitude latitude", - "valid_min": "-90.0", - "long_name": "", - "standard_name": "", - "out_name": "vertices_latitude", - "units": "degrees_north", - "valid_max": "90.0" - }, - "vertices_longitude": { - "dimensions": "vertices longitude latitude", - "valid_min": "0.0", - "long_name": "", - "standard_name": "", - "out_name": "vertices_longitude", - "units": "degrees_east", - "valid_max": "360.0" - }, - "longitude": { - "dimensions": "longitude latitude", - "valid_min": "0.0", - "long_name": "", - "standard_name": "", - "out_name": "longitude", - "units": "degrees_east", - "valid_max": "360.0" - } - } -} diff --git a/esmvaltool/cmor/tables/custom/CMOR_alb.dat b/esmvaltool/cmor/tables/custom/CMOR_alb.dat deleted file mode 100644 index caa3513fd5..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_alb.dat +++ /dev/null @@ -1,20 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: alb -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: albedo at the surface -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_amoc.dat b/esmvaltool/cmor/tables/custom/CMOR_amoc.dat deleted file mode 100644 index 0f951078df..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_amoc.dat +++ /dev/null @@ -1,21 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: amoc -!============ -modeling_realm: ocean -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: kg s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Atlantic Meridional Overturning Circulation -comment: AMOC at the Rapid array (26.5 N) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -type: real -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_clhmtisccp.dat b/esmvaltool/cmor/tables/custom/CMOR_clhmtisccp.dat deleted file mode 100644 index 588264e55a..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_clhmtisccp.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: clhmtisccp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: ISCCP High Level Medium-Thickness Cloud Area Fraction -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -positive: up -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_clhtkisccp.dat b/esmvaltool/cmor/tables/custom/CMOR_clhtkisccp.dat deleted file mode 100644 index 080fe2e76a..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_clhtkisccp.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: clhtkisccp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: ISCCP high level thick cloud area fraction -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -positive: up -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_clisccp.dat b/esmvaltool/cmor/tables/custom/CMOR_clisccp.dat deleted file mode 100644 index b86353b3ed..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_clisccp.dat +++ /dev/null @@ -1,21 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: clisccp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: ISCCP Cloud Area Fraction -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs tau time -out_name: clisccp -type: real -!---------------------------------- -! \ No newline at end of file diff --git a/esmvaltool/cmor/tables/custom/CMOR_cllmtisccp.dat b/esmvaltool/cmor/tables/custom/CMOR_cllmtisccp.dat deleted file mode 100644 index de48310ff6..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_cllmtisccp.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: cllmtisccp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: ISCCP Low Level Medium-Thickness Cloud Area Fraction -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -positive: up -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_clltkisccp.dat b/esmvaltool/cmor/tables/custom/CMOR_clltkisccp.dat deleted file mode 100644 index 01affe0170..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_clltkisccp.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: clltkisccp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: ISCCP low level thick cloud area fraction -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -positive: up -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_clmmtisccp.dat b/esmvaltool/cmor/tables/custom/CMOR_clmmtisccp.dat deleted file mode 100644 index e2fa6bde2c..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_clmmtisccp.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: clmmtisccp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: ISCCP Middle Level Medium-Thickness Cloud Area Fraction -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -positive: up -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_clmtkisccp.dat b/esmvaltool/cmor/tables/custom/CMOR_clmtkisccp.dat deleted file mode 100644 index 4e3e41ceea..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_clmtkisccp.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: clmtkisccp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: ISCCP Middle Level Thick Cloud Area Fraction -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -positive: up -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_cltStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_cltStderr.dat deleted file mode 100644 index 507e62e425..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_cltStderr.dat +++ /dev/null @@ -1,26 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: cltStderr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: % -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Cloud Fraction Error -comment: for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Include both large-scale and convective cloud. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: cltStderr -type: real -valid_min: 0 -valid_max: 0.01 -ok_min_mean_abs: 0 -ok_max_mean_abs: 0.01 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_coordinates.dat b/esmvaltool/cmor/tables/custom/CMOR_coordinates.dat deleted file mode 100644 index 8ad131fb4b..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_coordinates.dat +++ /dev/null @@ -1,106 +0,0 @@ -!============ -axis_entry: longitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: longitude -units: degrees_east -axis: X ! X, Y, Z, T (default: undeclared) -long_name: longitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lon -valid_min: 0.0 -valid_max: 360.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - -!============ -axis_entry: latitude -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: latitude -units: degrees_north -axis: Y ! X, Y, Z, T (default: undeclared) -long_name: latitude -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: lat -valid_min: -90.0 -valid_max: 90.0 -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - -!============ -axis_entry: plevs -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: air_pressure -units: Pa -axis: Z ! X, Y, Z, T (default: undeclared) -positive: down -long_name: pressure -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: plev -valid_min: 0.0 -valid_max: 110000.0 -stored_direction: decreasing -type: double -must_have_bounds: no -!---------------------------------- -! - - -!============ -axis_entry: time -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: time -units: days since ? -axis: T ! X, Y, Z, T (default: undeclared) -long_name: time -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: time -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! - -!============ -axis_entry: tau -!============ -!---------------------------------- -! Axis attributes: -!---------------------------------- -standard_name: atmosphere_optical_thickness_due_to_cloud -units: 1 -long_name: cloud optical thickness -!---------------------------------- -! Additional axis information: -!---------------------------------- -out_name: tau -stored_direction: increasing -type: double -must_have_bounds: yes -!---------------------------------- -! \ No newline at end of file diff --git a/esmvaltool/cmor/tables/custom/CMOR_dos.dat b/esmvaltool/cmor/tables/custom/CMOR_dos.dat deleted file mode 100644 index ef24f2e740..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_dos.dat +++ /dev/null @@ -1,27 +0,0 @@ -SOURCE: CMIP5 (adapted from mrso) -!============ -variable_entry: dos -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: m3 m-3 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Degree of Soil Saturation -comment: (unitless) degree of soil saturation for comparing mass based models with volumetric observations. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: dos -type: real -valid_min: 0 -valid_max: 2 -ok_min_mean_abs: 0 -ok_max_mean_abs: 1 -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/custom/CMOR_dosStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_dosStderr.dat deleted file mode 100644 index 41cbe68923..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_dosStderr.dat +++ /dev/null @@ -1,24 +0,0 @@ -SOURCE: CMIP5 (adapted from mrso) -!============ -variable_entry: dosStderr -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: m3 m-3 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Degree of Soil Saturation Error -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: dosStderr -type: real -valid_min: 0.0 -valid_max: 1.0 -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/custom/CMOR_gtfgco2.dat b/esmvaltool/cmor/tables/custom/CMOR_gtfgco2.dat deleted file mode 100644 index dba2aa00c6..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_gtfgco2.dat +++ /dev/null @@ -1,21 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: gtfgco2 -!============ -modeling_realm: ocnBgchem -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: kg s-1 -cell_methods: time: mean area: where sea -cell_measures: area: areacello -long_name: Global Total Surface Downward CO2 Flux -comment: Gas exchange flux of CO2 (positive into ocean) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: time -type: real -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_husStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_husStderr.dat deleted file mode 100644 index b053d1d4fd..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_husStderr.dat +++ /dev/null @@ -1,25 +0,0 @@ -SOURCE: obs4mips -!============ -variable_entry: husStderr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Specific Humidity Error -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude plevs time -out_name: husStderr -type: real -valid_min: -0.000299 -valid_max: 0.02841 -ok_min_mean_abs: -0.0003539 -ok_max_mean_abs: 0.01041 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_iwpStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_iwpStderr.dat deleted file mode 100644 index 088b372487..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_iwpStderr.dat +++ /dev/null @@ -1,25 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: iwpStderr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Condensed Ice Path Error -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: iwpStderr -type: real -valid_min: 0.0 -valid_max: 5.0 -ok_min_mean_abs: 0.0 -ok_max_mean_abs: 1.0 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_lwcre.dat b/esmvaltool/cmor/tables/custom/CMOR_lwcre.dat deleted file mode 100644 index 1088e87bb0..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_lwcre.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: lwcre -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Longwave Cloud Radiative Effect -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -positive: up -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_lwp.dat b/esmvaltool/cmor/tables/custom/CMOR_lwp.dat deleted file mode 100644 index 2e4dc38d24..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_lwp.dat +++ /dev/null @@ -1,24 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: lwp -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Liquid Water Path -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -valid_min: 0.0 -valid_max: 5.0 -ok_min_mean_abs: 0.0 -ok_max_mean_abs: 1.0 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_lwpStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_lwpStderr.dat deleted file mode 100644 index fd2bf63555..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_lwpStderr.dat +++ /dev/null @@ -1,25 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: lwpStderr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: kg m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Liquid Water Path Error -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: lwpStderr -type: real -valid_min: 0.0 -valid_max: 5.0 -ok_min_mean_abs: 0.0 -ok_max_mean_abs: 1.0 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_nbp_grid.dat b/esmvaltool/cmor/tables/custom/CMOR_nbp_grid.dat deleted file mode 100644 index 1d5cd5ddce..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_nbp_grid.dat +++ /dev/null @@ -1,23 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: nbp_grid -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: kg m-2 s-1 -cell_methods: time: mean area: mean -cell_measures: area: areacella -long_name: Carbon Mass Flux out of Atmosphere due to Net Biospheric Production on Land (relative to grid cell area) -comment: This is the net mass flux of carbon between land and atmosphere calculated as photosynthesis MINUS the sum of plant and soil respiration, carbonfluxes from fire, harvest, grazing and land use change. Positive flux is into the land. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: nbp_grid -type: real -positive: down -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_netcre.dat b/esmvaltool/cmor/tables/custom/CMOR_netcre.dat deleted file mode 100644 index 6525abd2b6..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_netcre.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CCMI1 -!============ -variable_entry: netcre -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Net Cloud Radiative Effect -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -positive: up -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_od550aerStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_od550aerStderr.dat deleted file mode 100644 index 5a43465e9a..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_od550aerStderr.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: od550aerStderr -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Ambient Aerosol Optical Thickness at 550 nm Error -comment: AOD error from the ambient aerosls (i.e., includes aerosol water). Does not include AOD from stratospheric aerosols if these are prescribed but includes other possible background aerosol types. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: od550aerStderr -type: real -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_od870aerStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_od870aerStderr.dat deleted file mode 100644 index fb8c316ca4..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_od870aerStderr.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: od870aerStderr -!============ -modeling_realm: aerosol -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Ambient Aerosol Optical Thickness at 870 nm Error -comment: AOD error from the ambient aerosls (i.e., includes aerosol water). Does not include AOD from stratospheric aerosols if these are prescribed but includes other possible background aerosol types. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: od870aerStderr -type: real -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_prStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_prStderr.dat deleted file mode 100644 index 4e8ecd30e7..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_prStderr.dat +++ /dev/null @@ -1,26 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: prStderr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: kg m-2 s-1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Precipitation Standard Error -comment: at surface; includes both liquid and solid phases from all types of clouds (both large-scale and convective) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: prStderr -type: real -valid_min: 0 -valid_max: 0.001 -ok_min_mean_abs: 0 -ok_max_mean_abs: 0.001 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_rlns.dat b/esmvaltool/cmor/tables/custom/CMOR_rlns.dat deleted file mode 100644 index 05841ab7e2..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_rlns.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: rlns -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Net downward Longwave Radiation -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -positive: up -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_rluscs.dat b/esmvaltool/cmor/tables/custom/CMOR_rluscs.dat deleted file mode 100644 index df4d2601ed..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_rluscs.dat +++ /dev/null @@ -1,26 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: rluscs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Upwelling Clear-Sky Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rluscs -type: real -positive: up -valid_min: 43.75 -valid_max: 658 -ok_min_mean_abs: 325.6 -ok_max_mean_abs: 376.3 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_rlut.dat b/esmvaltool/cmor/tables/custom/CMOR_rlut.dat deleted file mode 100644 index 66cae228e6..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_rlut.dat +++ /dev/null @@ -1,27 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: rlut -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_longwave_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Longwave Radiation -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlut -type: real -positive: up -valid_min: 67.48 -valid_max: 383.2 -ok_min_mean_abs: 207.4 -ok_max_mean_abs: 234.4 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_rlutcs.dat b/esmvaltool/cmor/tables/custom/CMOR_rlutcs.dat deleted file mode 100644 index 204e4bb4b0..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_rlutcs.dat +++ /dev/null @@ -1,26 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: rlutcs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_longwave_flux_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Clear-Sky Longwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rlutcs -type: real -positive: up -valid_min: 70.59 -valid_max: 377.5 -ok_min_mean_abs: 228.9 -ok_max_mean_abs: 260.4 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_rsns.dat b/esmvaltool/cmor/tables/custom/CMOR_rsns.dat deleted file mode 100644 index 966c3bb6cf..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_rsns.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: rsns -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Net downward Shortwave Radiation -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -positive: up -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_rsnt.dat b/esmvaltool/cmor/tables/custom/CMOR_rsnt.dat deleted file mode 100644 index 32a7da45de..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_rsnt.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: rsnt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Net downward Shortwave Radiation -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -positive: up -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_rsut.dat b/esmvaltool/cmor/tables/custom/CMOR_rsut.dat deleted file mode 100644 index a2fd561b34..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_rsut.dat +++ /dev/null @@ -1,27 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: rsut -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_shortwave_flux -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Shortwave Radiation -comment: at the top of the atmosphere -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsut -type: real -positive: up -valid_min: -0.02689 -valid_max: 421.9 -ok_min_mean_abs: 96.72 -ok_max_mean_abs: 114.1 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_rsutcs.dat b/esmvaltool/cmor/tables/custom/CMOR_rsutcs.dat deleted file mode 100644 index 3039856aec..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_rsutcs.dat +++ /dev/null @@ -1,26 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: rsutcs -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: toa_outgoing_shortwave_flux_assuming_clear_sky -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Outgoing Clear-Sky Shortwave Radiation -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: rsutcs -type: real -positive: up -valid_min: 0 -valid_max: 444 -ok_min_mean_abs: 54.7 -ok_max_mean_abs: 73.36 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_rtnt.dat b/esmvaltool/cmor/tables/custom/CMOR_rtnt.dat deleted file mode 100644 index b3697aa344..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_rtnt.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: rtnt -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Net downward Total Radiation -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -positive: up -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_sm.dat b/esmvaltool/cmor/tables/custom/CMOR_sm.dat deleted file mode 100644 index 31858bb292..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_sm.dat +++ /dev/null @@ -1,26 +0,0 @@ -SOURCE: CMIP5 (adapted from mrsos) -!============ -variable_entry: sm -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: m3 m-3 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Volumetric Moisture in Upper Portion of Soil Column -comment: the volume of water in all phases in a thin surface soil layer. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: sm -type: real -valid_min: 0 -valid_max: 1 -ok_min_mean_abs: 0 -ok_max_mean_abs: 1 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_smStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_smStderr.dat deleted file mode 100644 index 7da194878c..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_smStderr.dat +++ /dev/null @@ -1,25 +0,0 @@ -SOURCE: CMIP5 (adapted from mrsos) -!============ -variable_entry: smStderr -!============ -modeling_realm: land -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: m3 m-3 -cell_methods: time: mean area: mean where land -cell_measures: area: areacella -long_name: Volumetric Moisture in Upper Portion of Soil Column Error -comment: Error of the volume of water in all phases in a thin surface soil layer. -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: smStderr -type: real -valid_min: 0.0 -valid_max: 1.0 -!---------------------------------- -! - diff --git a/esmvaltool/cmor/tables/custom/CMOR_swcre.dat b/esmvaltool/cmor/tables/custom/CMOR_swcre.dat deleted file mode 100644 index 096d6ca77e..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_swcre.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: swcre -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: W m-2 -cell_methods: time: mean -cell_measures: area: areacella -long_name: TOA Shortwave Cloud Radiative Effect -comment: at the top of the atmosphere (to be compared with satellite measurements) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -positive: up -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_tasa.dat b/esmvaltool/cmor/tables/custom/CMOR_tasa.dat deleted file mode 100644 index 1ad03eb64e..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_tasa.dat +++ /dev/null @@ -1,25 +0,0 @@ -SOURCE: CMIP5 (adapted from tas) -!============ -variable_entry: tasa -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: K -cell_methods: time: mean -cell_measures: area: areacella -long_name: Near-Surface Air Temperature Anomaly -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tasa -type: real -valid_min: -20.0 -valid_max: 20.0 -ok_min_mean_abs: -20 -ok_max_mean_abs: 20. -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_toz.dat b/esmvaltool/cmor/tables/custom/CMOR_toz.dat deleted file mode 100644 index 6d319a171b..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_toz.dat +++ /dev/null @@ -1,23 +0,0 @@ -SOURCE: CCMI1 -!============ -variable_entry: toz -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: DU -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Ozone Column -comment: total ozone column in DU -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -valid_min: 0.0 -valid_max: 5000.0 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_tozStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_tozStderr.dat deleted file mode 100644 index 240247c84c..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_tozStderr.dat +++ /dev/null @@ -1,23 +0,0 @@ -SOURCE: CCMI1 -!============ -variable_entry: tozStderr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: DU -cell_methods: time: mean -cell_measures: area: areacella -long_name: Total Ozone Column Error -comment: total ozone column in DU -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -type: real -valid_min: 0.0 -valid_max: 5000.0 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_tro3prof.dat b/esmvaltool/cmor/tables/custom/CMOR_tro3prof.dat deleted file mode 100644 index efe6e651c0..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_tro3prof.dat +++ /dev/null @@ -1,23 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: tro3prof -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: 1e-9 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Ozone Volume Mixing Ratio -comment: -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude plevs time -type: real -valid_min: 0.0 -valid_max: 1.0 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_tro3profStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_tro3profStderr.dat deleted file mode 100644 index d5fc63027d..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_tro3profStderr.dat +++ /dev/null @@ -1,23 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: tro3profStderr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: 1e-9 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Ozone Volume Mixing Ratio Error -comment: -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: latitude plevs time -type: real -valid_min: 0.0 -valid_max: 1.0 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_tsStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_tsStderr.dat deleted file mode 100644 index 4bd6530a17..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_tsStderr.dat +++ /dev/null @@ -1,26 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: tsStderr -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: K -cell_methods: time: mean -cell_measures: area: areacella -long_name: Surface Temperature Error -comment: ""skin"" temperature error (i.e., SST for open ocean) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: tsStderr -type: real -valid_min: 0 -valid_max: 10 -ok_min_mean_abs: 0 -ok_max_mean_abs: 10 -!---------------------------------- -! diff --git a/esmvaltool/cmor/tables/custom/CMOR_xch4.dat b/esmvaltool/cmor/tables/custom/CMOR_xch4.dat deleted file mode 100644 index 779a6da6df..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_xch4.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: xch4 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: 1 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Column-average Dry-air Mole Fraction of Atmospheric Methane -comment: Satellite retrieved column-average dry-air mole fraction of atmospheric methane (XCH4) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: xch4 -type: real -!---------------------------------- -! \ No newline at end of file diff --git a/esmvaltool/cmor/tables/custom/CMOR_xco2.dat b/esmvaltool/cmor/tables/custom/CMOR_xco2.dat deleted file mode 100644 index dcdde3bbf7..0000000000 --- a/esmvaltool/cmor/tables/custom/CMOR_xco2.dat +++ /dev/null @@ -1,22 +0,0 @@ -SOURCE: CMIP5 -!============ -variable_entry: xco2 -!============ -modeling_realm: atmos -!---------------------------------- -! Variable attributes: -!---------------------------------- -standard_name: -units: 1e-6 -cell_methods: time: mean -cell_measures: area: areacella -long_name: Column-average Dry-air Mole Fraction of Atmospheric Carbon Dioxide -comment: Satellite retrieved column-average dry-air mole fraction of atmospheric carbon dioxide (XCO2) -!---------------------------------- -! Additional variable information: -!---------------------------------- -dimensions: longitude latitude time -out_name: xco2 -type: real -!---------------------------------- -! \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Aday.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Aday.json deleted file mode 100644 index 0f945e4327..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Aday.json +++ /dev/null @@ -1,631 +0,0 @@ -{ - "Header":{ - "#dataRequest_specs_version":"01.00.21", - "#mip_era":"CMIP6", - "Conventions":"CF-1.7 ODS-2.1", - "approx_interval":"1.00000", - "cmor_version":"3.2", - "data_specs_version":"2.1.0", - "generic_levels":"", - "int_missing_value":"-2147483648", - "missing_value":"1e20", - "product":"observations", - "realm":"atmos", - "table_date":"07 March 2018", - "table_id":"Table obs4MIPs_Aday" - }, - "variable_entry":{ - "clt":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Total cloud area fraction for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes both large-scale and convective cloud.", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Total Cloud Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"clt", - "positive":"", - "standard_name":"cloud_area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "hfls":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Surface Upward Latent Heat Flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfls", - "positive":"up", - "standard_name":"surface_upward_latent_heat_flux", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfss":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Surface Upward Sensible Heat Flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfss", - "positive":"up", - "standard_name":"surface_upward_sensible_heat_flux", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hur":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", - "dimensions":"longitude latitude plev8 time", - "frequency":"day", - "long_name":"Relative Humidity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hur", - "positive":"", - "standard_name":"relative_humidity", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "hurs":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", - "dimensions":"longitude latitude time height2m", - "frequency":"day", - "long_name":"Near-Surface Relative Humidity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hurs", - "positive":"", - "standard_name":"relative_humidity", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "hursmax":{ - "cell_measures":"", - "cell_methods":"area: mean time: maximum", - "comment":"", - "dimensions":"longitude latitude time height2m", - "frequency":"day", - "long_name":"Surface Daily Maximum Relative Humidity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hursmax", - "positive":"", - "standard_name":"relative_humidity", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "hursmin":{ - "cell_measures":"", - "cell_methods":"area: mean time: minimum", - "comment":"", - "dimensions":"longitude latitude time height2m", - "frequency":"day", - "long_name":"Surface Daily Minimum Relative Humidity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hursmin", - "positive":"", - "standard_name":"relative_humidity", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "hus":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"", - "dimensions":"longitude latitude plev8 time", - "frequency":"day", - "long_name":"Specific Humidity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hus", - "positive":"", - "standard_name":"specific_humidity", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "huss":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Near-surface (usually, 2 meter) specific humidity.", - "dimensions":"longitude latitude time height2m", - "frequency":"day", - "long_name":"Near-Surface Specific Humidity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"huss", - "positive":"", - "standard_name":"specific_humidity", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "mrro":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"The total run-off (including drainage through the base of the soil model) per unit area leaving the land portion of the grid cell.", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Total Runoff", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"mrro", - "positive":"", - "standard_name":"runoff_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "mrso":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"the mass per unit area (summed over all soil layers) of water in all phases.", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Total Soil Moisture Content", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"mrso", - "positive":"", - "standard_name":"soil_moisture_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "mrsos":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"The mass of water in all phases in the upper 10cm of the soil layer.", - "dimensions":"longitude latitude time sdepth1", - "frequency":"day", - "long_name":"Moisture in Upper Portion of Soil Column", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"mrsos", - "positive":"", - "standard_name":"moisture_content_of_soil_layer", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "pr":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"includes both liquid and solid phases", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Precipitation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"pr", - "positive":"", - "standard_name":"precipitation_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "prc":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Convective precipitation at surface; includes both liquid and solid phases.", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Convective Precipitation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"prc", - "positive":"", - "standard_name":"convective_precipitation_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "prsn":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"at surface; includes precipitation of all forms of water in the solid phase", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Snowfall Flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"prsn", - "positive":"", - "standard_name":"snowfall_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "psl":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Sea Level Pressure", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Sea Level Pressure", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"psl", - "positive":"", - "standard_name":"air_pressure_at_sea_level", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "rlds":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Surface Downwelling Longwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rlds", - "positive":"down", - "standard_name":"surface_downwelling_longwave_flux_in_air", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rlus":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Surface Upwelling Longwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rlus", - "positive":"up", - "standard_name":"surface_upwelling_longwave_flux_in_air", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rlut":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"at the top of the atmosphere (to be compared with satellite measurements)", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"TOA Outgoing Longwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rlut", - "positive":"up", - "standard_name":"toa_outgoing_longwave_flux", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rsds":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"surface solar irradiance for UV calculations", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Surface Downwelling Shortwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rsds", - "positive":"down", - "standard_name":"surface_downwelling_shortwave_flux_in_air", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rsus":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Surface Upwelling Shortwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rsus", - "positive":"up", - "standard_name":"surface_upwelling_shortwave_flux_in_air", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "sfcWind":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"near-surface (usually, 10 meters) wind speed.", - "dimensions":"longitude latitude time height10m", - "frequency":"day", - "long_name":"Daily-Mean Near-Surface Wind Speed", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sfcWind", - "positive":"", - "standard_name":"wind_speed", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "sfcWindmax":{ - "cell_measures":"", - "cell_methods":"area: mean time: maximum", - "comment":"Daily maximum near-surface (usually, 10 meters) wind speed.", - "dimensions":"longitude latitude time height10m", - "frequency":"day", - "long_name":"Daily Maximum Near-Surface Wind Speed", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sfcWindmax", - "positive":"", - "standard_name":"wind_speed", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "snc":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Fraction of each grid cell that is occupied by snow that rests on land portion of cell.", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Snow Area Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"snc", - "positive":"", - "standard_name":"surface_snow_area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "snw":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"The mass of surface snow on the land portion of the grid cell divided by the land area in the grid cell; reported as missing where the land fraction is 0; excludes snow on vegetation canopy or on sea ice.", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Surface Snow Amount", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"snw", - "positive":"", - "standard_name":"surface_snow_amount", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "ta":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"Air Temperature", - "dimensions":"longitude latitude plev8 time", - "frequency":"day", - "long_name":"Air Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ta", - "positive":"", - "standard_name":"air_temperature", - "type":"real", - "units":"K", - "valid_max":"", - "valid_min":"" - }, - "tas":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"near-surface (usually, 2 meter) air temperature", - "dimensions":"longitude latitude time height2m", - "frequency":"day", - "long_name":"Near-Surface Air Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tas", - "positive":"", - "standard_name":"air_temperature", - "type":"real", - "units":"K", - "valid_max":"", - "valid_min":"" - }, - "tasmax":{ - "cell_measures":"", - "cell_methods":"area: mean time: maximum", - "comment":"maximum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: max')", - "dimensions":"longitude latitude time height2m", - "frequency":"day", - "long_name":"Daily Maximum Near-Surface Air Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tasmax", - "positive":"", - "standard_name":"air_temperature", - "type":"real", - "units":"K", - "valid_max":"", - "valid_min":"" - }, - "tasmin":{ - "cell_measures":"", - "cell_methods":"area: mean time: minimum", - "comment":"minimum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: min')", - "dimensions":"longitude latitude time height2m", - "frequency":"day", - "long_name":"Daily Minimum Near-Surface Air Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tasmin", - "positive":"", - "standard_name":"air_temperature", - "type":"real", - "units":"K", - "valid_max":"", - "valid_min":"" - }, - "tslsi":{ - "cell_measures":"", - "cell_methods":"area: time: mean (comment: over land and sea ice)", - "comment":"Surface temperature of all surfaces except open ocean.", - "dimensions":"longitude latitude time", - "frequency":"day", - "long_name":"Surface Temperature Where Land or Sea Ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tslsi", - "positive":"", - "standard_name":"surface_temperature", - "type":"real", - "units":"K", - "valid_max":"", - "valid_min":"" - }, - "ua":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"", - "dimensions":"longitude latitude plev8 time", - "frequency":"day", - "long_name":"Eastward Wind", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ua", - "positive":"", - "standard_name":"eastward_wind", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "uas":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Eastward component of the near-surface (usually, 10 meters) wind", - "dimensions":"longitude latitude time height10m", - "frequency":"day", - "long_name":"Eastward Near-Surface Wind", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"uas", - "positive":"", - "standard_name":"eastward_wind", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "va":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"", - "dimensions":"longitude latitude plev8 time", - "frequency":"day", - "long_name":"Northward Wind", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"va", - "positive":"", - "standard_name":"northward_wind", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "vas":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Northward component of the near surface wind", - "dimensions":"longitude latitude time height10m", - "frequency":"day", - "long_name":"Northward Near-Surface Wind", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"vas", - "positive":"", - "standard_name":"northward_wind", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "wap":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"Omega (vertical velocity in pressure coordinates, positive downwards)", - "dimensions":"longitude latitude plev8 time", - "frequency":"day", - "long_name":"omega (=dp/dt)", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"wap", - "positive":"", - "standard_name":"lagrangian_tendency_of_air_pressure", - "type":"real", - "units":"Pa s-1", - "valid_max":"", - "valid_min":"" - }, - "zg":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"", - "dimensions":"longitude latitude plev8 time", - "frequency":"day", - "long_name":"Geopotential Height", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"zg", - "positive":"", - "standard_name":"geopotential_height", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - } - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Amon.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Amon.json deleted file mode 100644 index 8862bb46d7..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Amon.json +++ /dev/null @@ -1,1464 +0,0 @@ -{ - "Header":{ - "#dataRequest_specs_version":"01.00.21", - "#mip_era":"CMIP6", - "Conventions":"CF-1.7 ODS-2.1", - "approx_interval":"30.00000", - "cmor_version":"3.2", - "data_specs_version":"2.1.0", - "generic_levels":"alevel alevhalf", - "int_missing_value":"-2147483648", - "missing_value":"1e20", - "product":"observations", - "realm":"atmos", - "table_date":"07 March 2018", - "table_id":"Table obs4MIPs_Amon" - }, - "variable_entry":{ - "ccb":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Where convective cloud is present in the grid cell, the instantaneous cloud base altitude should be that of the bottom of the lowest level containing convective cloud. Missing data should be reported in the absence of convective cloud. The time mean should be calculated from these quantities averaging over occasions when convective cloud is present only, and should contain missing data for occasions when no convective cloud is present during the meaning period.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Air Pressure at Convective Cloud Base", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ccb", - "positive":"", - "standard_name":"air_pressure_at_convective_cloud_base", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "cct":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Where convective cloud is present in the grid cell, the instantaneous cloud top altitude should be that of the top of the highest level containing convective cloud. Missing data should be reported in the absence of convective cloud. The time mean should be calculated from these quantities averaging over occasions when convective cloud is present only, and should contain missing data for occasions when no convective cloud is present during the meaning period.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Air Pressure at Convective Cloud Top", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cct", - "positive":"", - "standard_name":"air_pressure_at_convective_cloud_top", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "cfc113global":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"time", - "frequency":"mon", - "long_name":"Global Mean Mole Fraction of CFC113", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cfc113global", - "positive":"", - "standard_name":"mole_fraction_of_cfc113_in_air", - "type":"real", - "units":"1e-12", - "valid_max":"", - "valid_min":"" - }, - "cfc11global":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"time", - "frequency":"mon", - "long_name":"Global Mean Mole Fraction of CFC11", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cfc11global", - "positive":"", - "standard_name":"mole_fraction_of_cfc11_in_air", - "type":"real", - "units":"1e-12", - "valid_max":"", - "valid_min":"" - }, - "cfc12global":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"time", - "frequency":"mon", - "long_name":"Global Mean Mole Fraction of CFC12", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cfc12global", - "positive":"", - "standard_name":"mole_fraction_of_cfc12_in_air", - "type":"real", - "units":"1e-12", - "valid_max":"", - "valid_min":"" - }, - "ch4":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"", - "dimensions":"longitude latitude plev19 time", - "frequency":"mon", - "long_name":"Mole Fraction of CH4", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ch4", - "positive":"", - "standard_name":"mole_fraction_of_methane_in_air", - "type":"real", - "units":"mol mol-1", - "valid_max":"", - "valid_min":"" - }, - "ch4Clim":{ - "cell_measures":"", - "cell_methods":"area: mean time: mean within years time: mean over years", - "comment":"", - "dimensions":"longitude latitude plev19 time2", - "frequency":"monC", - "long_name":"Mole Fraction of CH4", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ch4", - "positive":"", - "standard_name":"mole_fraction_of_methane_in_air", - "type":"real", - "units":"mol mol-1", - "valid_max":"", - "valid_min":"" - }, - "ch4global":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Global Mean Mole Fraction of CH4", - "dimensions":"time", - "frequency":"mon", - "long_name":"Global Mean Mole Fraction of CH4", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ch4global", - "positive":"", - "standard_name":"mole_fraction_of_methane_in_air", - "type":"real", - "units":"1e-09", - "valid_max":"", - "valid_min":"" - }, - "ch4globalClim":{ - "cell_measures":"", - "cell_methods":"area: mean time: mean within years time: mean over years", - "comment":"Global Mean Mole Fraction of CH4", - "dimensions":"time2", - "frequency":"monC", - "long_name":"Global Mean Mole Fraction of CH4", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ch4global", - "positive":"", - "standard_name":"mole_fraction_of_methane_in_air", - "type":"real", - "units":"1e-09", - "valid_max":"", - "valid_min":"" - }, - "ci":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Fraction of time that convection occurs in the grid cell.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Fraction of Time Convection Occurs", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ci", - "positive":"", - "standard_name":"convection_time_fraction", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "cl":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Percentage cloud cover, including both large-scale and convective cloud.", - "dimensions":"longitude latitude alevel time", - "frequency":"mon", - "long_name":"Cloud Area Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cl", - "positive":"", - "standard_name":"cloud_area_fraction_in_atmosphere_layer", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "clCCI":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Percentage cloud cover in optical depth categories.", - "dimensions":"longitude latitude plev7c tau time", - "frequency":"mon", - "long_name":"CCI Cloud Area Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"clCCI", - "positive":"", - "standard_name":"cloud_area_fraction_in_atmosphere_layer", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "clCLARA":{ - "cell_measures":"", - "cell_methods":"area: mean time: mean", - "comment":"Percentage cloud cover in optical depth categories.", - "dimensions":"longitude latitude plev7c tau time", - "frequency":"mon", - "long_name":"CLARA Cloud Area Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"clCLARA", - "positive":"", - "standard_name":"cloud_area_fraction_in_atmosphere_layer", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "cli":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Includes both large-scale and convective cloud. This is calculated as the mass of cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. It includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions":"longitude latitude alevel time", - "frequency":"mon", - "long_name":"Mass Fraction of Cloud Ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cli", - "positive":"", - "standard_name":"mass_fraction_of_cloud_ice_in_air", - "type":"real", - "units":"kg kg-1", - "valid_max":"", - "valid_min":"" - }, - "clivi":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"mass of ice water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating frozen hydrometeors ONLY if the precipitating hydrometeor affects the calculation of radiative transfer in model.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Ice Water Path", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"clivi", - "positive":"", - "standard_name":"atmosphere_cloud_ice_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "clt":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Total cloud area fraction for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes both large-scale and convective cloud.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Total Cloud Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"clt", - "positive":"", - "standard_name":"cloud_area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "cltCCI":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Total cloud area fraction for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes both large-scale and convective cloud.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"CCI Total Cloud Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cltCCI", - "positive":"", - "standard_name":"cloud_area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "cltCLARA":{ - "cell_measures":"", - "cell_methods":"area: mean time: mean", - "comment":"Total cloud area fraction for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes both large-scale and convective cloud.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"CLARA Total Cloud Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cltCLARA", - "positive":"", - "standard_name":"cloud_area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "clw":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Includes both large-scale and convective cloud. Calculate as the mass of cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cells. Precipitating hydrometeors are included ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions":"longitude latitude alevel time", - "frequency":"mon", - "long_name":"Mass Fraction of Cloud Liquid Water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"clw", - "positive":"", - "standard_name":"mass_fraction_of_cloud_liquid_water_in_air", - "type":"real", - "units":"kg kg-1", - "valid_max":"", - "valid_min":"" - }, - "clwCCI":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Percentage liquid cloud cover in optical depth categories.", - "dimensions":"longitude latitude plev7c tau time", - "frequency":"mon", - "long_name":"CCI Liquid Cloud Area Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"clwCCI", - "positive":"", - "standard_name":"liquid_water_cloud_area_fraction_in_atmosphere_layer", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "clwCLARA":{ - "cell_measures":"", - "cell_methods":"area: mean time: mean", - "comment":"Percentage liquid cloud cover in optical depth categories.", - "dimensions":"longitude latitude plev7c tau time", - "frequency":"mon", - "long_name":"CLARA Liquid Cloud Area Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"clwCLARA", - "positive":"", - "standard_name":"liquid_water_cloud_area_fraction_in_atmosphere_layer", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "clwtCCI":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"CCI Total Liquid Cloud Area Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"clwtCCI", - "positive":"", - "standard_name":"liquid_water_cloud_area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "clwtCLARA":{ - "cell_measures":"", - "cell_methods":"area: mean time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"CLARA Total Liquid Cloud Area Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"clwtCLARA", - "positive":"", - "standard_name":"liquid_water_cloud_area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "clwvi":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Mass of condensed (liquid + ice) water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Condensed Water Path", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"clwvi", - "positive":"", - "standard_name":"atmosphere_cloud_condensed_water_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "co2":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"", - "dimensions":"longitude latitude plev19 time", - "frequency":"mon", - "long_name":"Mole Fraction of CO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"co2", - "positive":"", - "standard_name":"mole_fraction_of_carbon_dioxide_in_air", - "type":"real", - "units":"mol mol-1", - "valid_max":"", - "valid_min":"" - }, - "co2Clim":{ - "cell_measures":"", - "cell_methods":"area: mean time: mean within years time: mean over years", - "comment":"", - "dimensions":"longitude latitude plev19 time2", - "frequency":"monC", - "long_name":"Mole Fraction of CO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"co2", - "positive":"", - "standard_name":"mole_fraction_of_carbon_dioxide_in_air", - "type":"real", - "units":"mol mol-1", - "valid_max":"", - "valid_min":"" - }, - "co2mass":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Total atmospheric mass of Carbon Dioxide", - "dimensions":"time", - "frequency":"mon", - "long_name":"Total Atmospheric Mass of CO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"co2mass", - "positive":"", - "standard_name":"atmosphere_mass_of_carbon_dioxide", - "type":"real", - "units":"kg", - "valid_max":"", - "valid_min":"" - }, - "co2massClim":{ - "cell_measures":"", - "cell_methods":"area: mean time: mean within years time: mean over years", - "comment":"Total atmospheric mass of Carbon Dioxide", - "dimensions":"time2", - "frequency":"monC", - "long_name":"Total Atmospheric Mass of CO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"co2mass", - "positive":"", - "standard_name":"atmosphere_mass_of_carbon_dioxide", - "type":"real", - "units":"kg", - "valid_max":"", - "valid_min":"" - }, - "evspsbl":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Evaporation at surface: flux of water into the atmosphere due to conversion of both liquid and solid phases to vapor (from underlying surface and vegetation)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Evaporation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"evspsbl", - "positive":"", - "standard_name":"water_evaporation_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fco2antt":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"This is requested only for the emission-driven coupled carbon climate model runs. Does not include natural fire sources but, includes all anthropogenic sources, including fossil fuel use, cement production, agricultural burning, and sources associated with anthropogenic land use change excluding forest regrowth.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass Flux into Atmosphere Due to All Anthropogenic Emissions of CO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fco2antt", - "positive":"", - "standard_name":"tendency_of_atmosphere_mass_content_of_carbon_dioxide_expressed_as_carbon_due_to_anthropogenic_emission", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fco2fos":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"This is the prescribed anthropogenic CO2 flux from fossil fuel use, including cement production, and flaring (but not from land-use changes, agricultural burning, forest regrowth, etc.)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass Flux into Atmosphere Due to Fossil Fuel Emissions of CO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fco2fos", - "positive":"", - "standard_name":"tendency_of_atmosphere_mass_content_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_fossil_fuel_combustion", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fco2nat":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"This is what the atmosphere sees (on its own grid). This field should be equivalent to the combined natural fluxes of carbon that account for natural exchanges between the atmosphere and land (nep) or ocean (fgco2) reservoirs.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Carbon Mass Flux into the Atmosphere Due to Natural Sources", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fco2nat", - "positive":"", - "standard_name":"surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_natural_sources", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "hcfc22global":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"time", - "frequency":"mon", - "long_name":"Global Mean Mole Fraction of HCFC22", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hcfc22global", - "positive":"", - "standard_name":"mole_fraction_of_hcfc22_in_air", - "type":"real", - "units":"1e-12", - "valid_max":"", - "valid_min":"" - }, - "hfls":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Upward Latent Heat Flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfls", - "positive":"up", - "standard_name":"surface_upward_latent_heat_flux", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfss":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Upward Sensible Heat Flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfss", - "positive":"up", - "standard_name":"surface_upward_sensible_heat_flux", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hur":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", - "dimensions":"longitude latitude plev19 time", - "frequency":"mon", - "long_name":"Relative Humidity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hur", - "positive":"", - "standard_name":"relative_humidity", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "hurs":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", - "dimensions":"longitude latitude time height2m", - "frequency":"mon", - "long_name":"Near-Surface Relative Humidity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hurs", - "positive":"", - "standard_name":"relative_humidity", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "hus":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"", - "dimensions":"longitude latitude plev19 time", - "frequency":"mon", - "long_name":"Specific Humidity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hus", - "positive":"", - "standard_name":"specific_humidity", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "huss":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Near-surface (usually, 2 meter) specific humidity.", - "dimensions":"longitude latitude time height2m", - "frequency":"mon", - "long_name":"Near-Surface Specific Humidity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"huss", - "positive":"", - "standard_name":"specific_humidity", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "mc":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"The net mass flux should represent the difference between the updraft and downdraft components. The flux is computed as the mass divided by the area of the grid cell.", - "dimensions":"longitude latitude alevhalf time", - "frequency":"mon", - "long_name":"Convective Mass Flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"mc", - "positive":"up", - "standard_name":"atmosphere_net_upward_convective_mass_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "n2o":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"", - "dimensions":"longitude latitude plev19 time", - "frequency":"mon", - "long_name":"Mole Fraction of N2O", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"n2o", - "positive":"", - "standard_name":"mole_fraction_of_nitrous_oxide_in_air", - "type":"real", - "units":"mol mol-1", - "valid_max":"", - "valid_min":"" - }, - "n2oClim":{ - "cell_measures":"", - "cell_methods":"area: mean time: mean within years time: mean over years", - "comment":"", - "dimensions":"longitude latitude plev19 time2", - "frequency":"monC", - "long_name":"Mole Fraction of N2O", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"n2o", - "positive":"", - "standard_name":"mole_fraction_of_nitrous_oxide_in_air", - "type":"real", - "units":"mol mol-1", - "valid_max":"", - "valid_min":"" - }, - "n2oglobal":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Global mean Nitrous Oxide (N2O)", - "dimensions":"time", - "frequency":"mon", - "long_name":"Global Mean Mole Fraction of N2O", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"n2oglobal", - "positive":"", - "standard_name":"mole_fraction_of_nitrous_oxide_in_air", - "type":"real", - "units":"1e-09", - "valid_max":"", - "valid_min":"" - }, - "n2oglobalClim":{ - "cell_measures":"", - "cell_methods":"area: mean time: mean within years time: mean over years", - "comment":"Global mean Nitrous Oxide (N2O)", - "dimensions":"time2", - "frequency":"monC", - "long_name":"Global Mean Mole Fraction of N2O", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"n2oglobal", - "positive":"", - "standard_name":"mole_fraction_of_nitrous_oxide_in_air", - "type":"real", - "units":"1e-09", - "valid_max":"", - "valid_min":"" - }, - "o3":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"", - "dimensions":"longitude latitude plev19 time", - "frequency":"mon", - "long_name":"Mole Fraction of O3", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"o3", - "positive":"", - "standard_name":"mole_fraction_of_ozone_in_air", - "type":"real", - "units":"mol mol-1", - "valid_max":"", - "valid_min":"" - }, - "o3Clim":{ - "cell_measures":"", - "cell_methods":"area: mean time: mean within years time: mean over years", - "comment":"", - "dimensions":"longitude latitude plev19 time2", - "frequency":"monC", - "long_name":"Mole Fraction of O3", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"o3", - "positive":"", - "standard_name":"mole_fraction_of_ozone_in_air", - "type":"real", - "units":"mol mol-1", - "valid_max":"", - "valid_min":"" - }, - "pctCCI":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"CCI Mean Cloud Top Pressure", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"pctCCI", - "positive":"", - "standard_name":"air_pressure_at_cloud_top", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "pctCLARA":{ - "cell_measures":"", - "cell_methods":"area: mean time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"CLARA Mean Cloud Top Pressure", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"pctCLARA", - "positive":"", - "standard_name":"air_pressure_at_cloud_top", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "pme":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Net flux of water (in all phases) between the atmosphere and underlying surface including vegetation), mainly resulting from the difference of precipitation and evaporation", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downward Freshwater Flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"pme", - "positive":"", - "standard_name":"surface_downward_water_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "pr":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"includes both liquid and solid phases", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Precipitation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"pr", - "positive":"", - "standard_name":"precipitation_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "prc":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Convective precipitation at surface; includes both liquid and solid phases.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Convective Precipitation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"prc", - "positive":"", - "standard_name":"convective_precipitation_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "prsn":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"at surface; includes precipitation of all forms of water in the solid phase", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Snowfall Flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"prsn", - "positive":"", - "standard_name":"snowfall_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "prw":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"vertically integrated through the atmospheric column", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Water Vapor Path", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"prw", - "positive":"", - "standard_name":"atmosphere_water_vapor_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "ps":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Air Pressure", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ps", - "positive":"", - "standard_name":"surface_air_pressure", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "psl":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Sea Level Pressure", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea Level Pressure", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"psl", - "positive":"", - "standard_name":"air_pressure_at_sea_level", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "rlds":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downwelling Longwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rlds", - "positive":"down", - "standard_name":"surface_downwelling_longwave_flux_in_air", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rldscs":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Surface downwelling clear-sky longwave radiation", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downwelling Clear-Sky Longwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rldscs", - "positive":"down", - "standard_name":"surface_downwelling_longwave_flux_in_air_assuming_clear_sky", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rlus":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Upwelling Longwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rlus", - "positive":"up", - "standard_name":"surface_upwelling_longwave_flux_in_air", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rlut":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"at the top of the atmosphere (to be compared with satellite measurements)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"TOA Outgoing Longwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rlut", - "positive":"up", - "standard_name":"toa_outgoing_longwave_flux", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rlutcs":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Upwelling clear-sky longwave radiation at top of atmosphere", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"TOA Outgoing Clear-Sky Longwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rlutcs", - "positive":"up", - "standard_name":"toa_outgoing_longwave_flux_assuming_clear_sky", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rsds":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"surface solar irradiance for UV calculations", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downwelling Shortwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rsds", - "positive":"down", - "standard_name":"surface_downwelling_shortwave_flux_in_air", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rsdscs":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"surface solar irradiance clear sky for UV calculations", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downwelling Clear-Sky Shortwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rsdscs", - "positive":"down", - "standard_name":"surface_downwelling_shortwave_flux_in_air_assuming_clear_sky", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rsdt":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Shortwave radiation incident at the top of the atmosphere", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"TOA Incident Shortwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rsdt", - "positive":"down", - "standard_name":"toa_incoming_shortwave_flux", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rsus":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Upwelling Shortwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rsus", - "positive":"up", - "standard_name":"surface_upwelling_shortwave_flux_in_air", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rsuscs":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Surface Upwelling Clear-sky Shortwave Radiation", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Upwelling Clear-Sky Shortwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rsuscs", - "positive":"up", - "standard_name":"surface_upwelling_shortwave_flux_in_air_assuming_clear_sky", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rsut":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"at the top of the atmosphere", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"TOA Outgoing Shortwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rsut", - "positive":"up", - "standard_name":"toa_outgoing_shortwave_flux", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rsutcs":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Calculated in the absence of clouds.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"TOA Outgoing Clear-Sky Shortwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rsutcs", - "positive":"up", - "standard_name":"toa_outgoing_shortwave_flux_assuming_clear_sky", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rtmt":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Net Downward Radiative Flux at Top of Model : I.e., at the top of that portion of the atmosphere where dynamics are explicitly treated by the model. This is reported only if it differs from the net downward radiative flux at the top of the atmosphere.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Net Downward Flux at Top of Model", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rtmt", - "positive":"down", - "standard_name":"net_downward_radiative_flux_at_top_of_atmosphere_model", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "sbl":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"The snow and ice sublimation flux is the loss of snow and ice mass per unit area from the surface resulting from their direct conversion to water vapor that enters the atmosphere.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Snow and Ice Sublimation Flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sbl", - "positive":"", - "standard_name":"surface_snow_and_ice_sublimation_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sci":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Fraction of time that shallow convection occurs in the grid cell.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Fraction of Time Shallow Convection Occurs", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sci", - "positive":"", - "standard_name":"shallow_convection_time_fraction", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "sfcWind":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"near-surface (usually, 10 meters) wind speed.", - "dimensions":"longitude latitude time height10m", - "frequency":"mon", - "long_name":"Near-Surface Wind Speed", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sfcWind", - "positive":"", - "standard_name":"wind_speed", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "ta":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"Air Temperature", - "dimensions":"longitude latitude plev19 time", - "frequency":"mon", - "long_name":"Air Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ta", - "positive":"", - "standard_name":"air_temperature", - "type":"real", - "units":"K", - "valid_max":"", - "valid_min":"" - }, - "tas":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"near-surface (usually, 2 meter) air temperature", - "dimensions":"longitude latitude time height2m", - "frequency":"mon", - "long_name":"Near-Surface Air Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tas", - "positive":"", - "standard_name":"air_temperature", - "type":"real", - "units":"K", - "valid_max":"", - "valid_min":"" - }, - "tasmax":{ - "cell_measures":"", - "cell_methods":"area: mean time: maximum within days time: mean over days", - "comment":"maximum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: max')", - "dimensions":"longitude latitude time height2m", - "frequency":"mon", - "long_name":"Daily Maximum Near-Surface Air Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tasmax", - "positive":"", - "standard_name":"air_temperature", - "type":"real", - "units":"K", - "valid_max":"", - "valid_min":"" - }, - "tasmin":{ - "cell_measures":"", - "cell_methods":"area: mean time: minimum within days time: mean over days", - "comment":"minimum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: min')", - "dimensions":"longitude latitude time height2m", - "frequency":"mon", - "long_name":"Daily Minimum Near-Surface Air Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tasmin", - "positive":"", - "standard_name":"air_temperature", - "type":"real", - "units":"K", - "valid_max":"", - "valid_min":"" - }, - "tauu":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Downward eastward wind stress at the surface", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downward Eastward Wind Stress", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tauu", - "positive":"down", - "standard_name":"surface_downward_eastward_stress", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "tauv":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Downward northward wind stress at the surface", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downward Northward Wind Stress", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tauv", - "positive":"down", - "standard_name":"surface_downward_northward_stress", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "ts":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Temperature of the lower boundary of the atmosphere", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ts", - "positive":"", - "standard_name":"surface_temperature", - "type":"real", - "units":"K", - "valid_max":"", - "valid_min":"" - }, - "ttbr":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Top of Atmosphere Brightness Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ttbr", - "positive":"", - "standard_name":"toa_brightness_temperature", - "type":"real", - "units":"K", - "valid_max":"", - "valid_min":"" - }, - "ua":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"", - "dimensions":"longitude latitude plev19 time", - "frequency":"mon", - "long_name":"Eastward Wind", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ua", - "positive":"", - "standard_name":"eastward_wind", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "uas":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Eastward component of the near-surface (usually, 10 meters) wind", - "dimensions":"longitude latitude time height10m", - "frequency":"mon", - "long_name":"Eastward Near-Surface Wind", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"uas", - "positive":"", - "standard_name":"eastward_wind", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "va":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"", - "dimensions":"longitude latitude plev19 time", - "frequency":"mon", - "long_name":"Northward Wind", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"va", - "positive":"", - "standard_name":"northward_wind", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "vas":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Northward component of the near surface wind", - "dimensions":"longitude latitude time height10m", - "frequency":"mon", - "long_name":"Northward Near-Surface Wind", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"vas", - "positive":"", - "standard_name":"northward_wind", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "wap":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"Omega (vertical velocity in pressure coordinates, positive downwards)", - "dimensions":"longitude latitude plev19 time", - "frequency":"mon", - "long_name":"omega (=dp/dt)", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"wap", - "positive":"", - "standard_name":"lagrangian_tendency_of_air_pressure", - "type":"real", - "units":"Pa s-1", - "valid_max":"", - "valid_min":"" - }, - "zg":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"", - "dimensions":"longitude latitude plev19 time", - "frequency":"mon", - "long_name":"Geopotential Height", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"zg", - "positive":"", - "standard_name":"geopotential_height", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - } - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_CV.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_CV.json deleted file mode 100644 index 5f30ba78aa..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_CV.json +++ /dev/null @@ -1,351 +0,0 @@ -{ - "CV":{ - "activity_id":"obs4MIPs", - "frequency":{ - "1hr":"sampled hourly", - "1hrCM":"monthly-mean diurnal cycle resolving each day into 1-hour means", - "1hrPt":"sampled hourly, at specified time point within an hour", - "3hr":"sampled every 3 hours", - "3hrPt":"sampled 3 hourly, at specified time point within the time period", - "6hr":"sampled every 6 hours", - "6hrPt":"sampled 6 hourly, at specified time point within the time period", - "day":"daily mean samples", - "dec":"decadal mean samples", - "fx":"fixed (time invariant) field", - "mon":"monthly mean samples", - "monC":"monthly climatology computed from monthly mean samples", - "monPt":"sampled monthly, at specified time point within the time period", - "subhrPt":"sampled sub-hourly, at specified time point within an hour", - "yr":"annual mean samples", - "yrPt":"sampled yearly, at specified time point within the time period" - }, - "grid_label":{ - "gm":"global mean data", - "gn":"data reported on a model's native grid", - "gna":"data reported on a native grid in the region of Antarctica", - "gng":"data reported on a native grid in the region of Greenland", - "gnz":"zonal mean data reported on a model's native latitude grid", - "gr":"regridded data reported on the data provider's preferred target grid", - "gr1":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr1a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr1g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr1z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr2":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr2a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr2g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr2z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr3":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr3a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr3g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr3z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr4":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr4a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr4g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr4z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr5":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr5a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr5g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr5z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr6":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr6a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr6g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr6z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr7":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr7a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr7g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr7z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr8":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr8a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr8g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr8z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr9":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr9a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr9g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr9z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gra":"regridded data in the region of Antarctica reported on the data provider's preferred target grid", - "grg":"regridded data in the region of Greenland reported on the data provider's preferred target grid", - "grz":"regridded zonal mean data reported on the data provider's preferred latitude target grid" - }, - "institution_id":{ - "DWD":"Deutscher Wetterdienst, Offenbach 63067, Germany", - "NASA-JPL":"NASA's Jet Propulsion Laboratory, Pasadena, CA 91109, USA", - "NOAA-NCEI":"NOAA's National Centers for Environmental Information, Asheville, NC 28801, USA", - "PCMDI":"Program for Climate Model Diagnosis and Intercomparison, Lawrence Livermore National Laboratory, Livermore, CA 94550, USA", - "RSS":"Remote Sensing Systems, Santa Rosa, CA 95401, USA" - }, - "license":"Data in this file produced by is licensed under a Creative Commons Attribution-ShareAlike 4.0 International License (https://creativecommons.org/licenses/). Use of the data must be acknowledged following guidelines found at . Further information about this data, including some limitations, can be found via .)", - "nominal_resolution":[ - "0.5 km", - "1 km", - "10 km", - "100 km", - "1000 km", - "10000 km", - "1x1 degree", - "2.5 km", - "25 km", - "250 km", - "2500 km", - "5 km", - "50 km", - "500 km", - "5000 km" - ], - "product":[ - "observations", - "reanalysis" - ], - "realm":[ - "aerosol", - "atmos", - "atmosChem", - "land", - "landIce", - "ocean", - "ocnBgchem", - "seaIce" - ], - "region":[ - "africa", - "antarctica", - "arabian_sea", - "aral_sea", - "arctic_ocean", - "asia", - "atlantic_ocean", - "australia", - "baltic_sea", - "barents_opening", - "barents_sea", - "beaufort_sea", - "bellingshausen_sea", - "bering_sea", - "bering_strait", - "black_sea", - "canadian_archipelago", - "caribbean_sea", - "caspian_sea", - "central_america", - "chukchi_sea", - "contiguous_united_states", - "denmark_strait", - "drake_passage", - "east_china_sea", - "english_channel", - "eurasia", - "europe", - "faroe_scotland_channel", - "florida_bahamas_strait", - "fram_strait", - "global", - "global_land", - "global_ocean", - "great_lakes", - "greenland", - "gulf_of_alaska", - "gulf_of_mexico", - "hudson_bay", - "iceland_faroe_channel", - "indian_ocean", - "indo_pacific_ocean", - "indonesian_throughflow", - "irish_sea", - "lake_baykal", - "lake_chad", - "lake_malawi", - "lake_tanganyika", - "lake_victoria", - "mediterranean_sea", - "mozambique_channel", - "north_america", - "north_sea", - "norwegian_sea", - "pacific_equatorial_undercurrent", - "pacific_ocean", - "persian_gulf", - "red_sea", - "ross_sea", - "sea_of_japan", - "sea_of_okhotsk", - "south_america", - "south_china_sea", - "southern_ocean", - "taiwan_luzon_straits", - "weddell_sea", - "windward_passage", - "yellow_sea" - ], - "required_global_attributes":[ - "Conventions", - "activity_id", - "contact", - "creation_date", - "data_specs_version", - "frequency", - "grid", - "grid_label", - "institution", - "institution_id", - "license", - "nominal_resolution", - "product", - "realm", - "source_id", - "table_id", - "tracking_id", - "variable_id", - "variant_label" - ], - "source_id":{ - "AIRS-1-0":{ - "region":"global", - "source":"AIRS 1.0 (2011): Atmospheric Infrared Sounder", - "source_label":"AIRS", - "source_type":"satellite_retrieval", - "source_version_number":"1.0" - }, - "Aura-MLS-v04-2":{ - "region":"global", - "source":"Aura-MLS v04.2 (2018): EOS Aura Microwave Limb Sounder", - "source_label":"Aura-MLS", - "source_type":"satellite_retrieval", - "source_version_number":"v04.2" - }, - "CMSAF-CLARA-A-2-0":{ - "region":"global", - "source":"CMSAF-CLARA-A 2.0 (2017): CM SAF cLoud, Albedo and surface RAdiation dataset from AVHRR data", - "source_label":"CMSAF-CLARA-A", - "source_type":"satellite_retrieval", - "source_version_number":"2.0" - }, - "CMSAF-HOAPS-4-0":{ - "region":"global_ocean", - "source":"CMSAF-HOAPS 4.0 (2017): Hamburg Ocean Atmosphere Parameters and fluxes from Satellite data, based on SSM/I and SSMIS aboard DMSP", - "source_label":"CMSAF-HOAPS", - "source_type":"satellite_retrieval", - "source_version_number":"4.0" - }, - "CMSAF-SARAH-2-0":{ - "region":"africa, atlantic_ocean, europe", - "source":"CMSAF-SARAH 2.0 (2017): Surface solAr RAdiation data set - Heliosat, based on MVIRI/SEVIRI aboard METEOSAT", - "source_label":"CMSAF-SARAH", - "source_type":"satellite_retrieval", - "source_version_number":"2.0" - }, - "ESACCI-CLOUD-ATSR2-AATSR-2-0":{ - "region":"global", - "source":"ESACCI-CLOUD-ATSR2-AATSR 2.0 (2017): Cloud properties derived from ATSR2 and AATSR (aboard ERS2 and ENVISAT) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets.", - "source_label":"ESACCI-CLOUD-ATSR2-AATSR", - "source_type":"satellite_retrieval", - "source_version_number":"2.0" - }, - "ESACCI-CLOUD-AVHRR-AM-2-0":{ - "region":"global", - "source":"ESACCI-CLOUD-AVHRR-AM 2.0 (2017): Cloud properties derived from AVHRR (aboard NOAA and MetOp AM) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets", - "source_label":"ESACCI-CLOUD-AVHRR-AM", - "source_type":"satellite_retrieval", - "source_version_number":"2.0" - }, - "ESACCI-CLOUD-AVHRR-PM-2-0":{ - "region":"global", - "source":"ESACCI-CLOUD-AVHRR-PM 2.0 (2017): Cloud properties derived from AVHRR (aboard NOAA and MetOp PM) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets", - "source_label":"ESACCI-CLOUD-AVHRR-PM", - "source_type":"satellite_retrieval", - "source_version_number":"2.0" - }, - "ESACCI-CLOUD-MERIS-AATSR-2-0":{ - "region":"global", - "source":"ESACCI-CLOUD-MERIS-AATSR 2.0 (2017): Cloud properties derived from MERIS and AATSR (aboard ENVISAT) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets.", - "source_label":"ESACCI-CLOUD-MERIS-AATSR", - "source_type":"satellite_retrieval", - "source_version_number":"2.0" - }, - "GNSS-RO-1-3":{ - "region":"global", - "source":"GNSS-RO 1.3 (2016): Global Navigation Satellite Systems Radio Occultation", - "source_label":"GNSS-RO", - "source_type":"satellite_retrieval", - "source_version_number":"1.3" - }, - "NOAA-NCEI-AVHRR-NDVI-4-0":{ - "region":"global_land", - "source":"NOAA-NCEI-AVHRR-NDVI 4.0 (2013): Normalized Difference Vegetation Index", - "source_label":"NOAA-NCEI-AVHRR-NDVI", - "source_type":"satellite_retrieval", - "source_version_number":"4.0" - }, - "NOAA-NCEI-ERSST-4-0":{ - "region":"global_ocean", - "source":"NOAA-NCEI-ERSST 4.0 (2015): Extended Reconstructed Sea Surface Temperatures", - "source_label":"NOAA-NCEI-ERSST", - "source_type":"gridded_insitu", - "source_version_number":"4.0" - }, - "NOAA-NCEI-FAPAR-4-0":{ - "region":"global_land", - "source":"NOAA-NCEI-FAPAR 4.0 (2014): AVHRR Fraction of Absorbed Photosynthetically Active Radiation", - "source_label":"NOAA-NCEI-FAPAR", - "source_type":"satellite_retrieval", - "source_version_number":"4.0" - }, - "NOAA-NCEI-GridSat-4-0":{ - "region":"global", - "source":"NOAA-NCEI-GridSat 4.0 (2015): Gridded Satellite ISCCP B1 11 Micron Brightness Temperature", - "source_label":"NOAA-NCEI-GridSat", - "source_type":"satellite_retrieval", - "source_version_number":"4.0" - }, - "NOAA-NCEI-LAI-4-0":{ - "region":"global_land", - "source":"NOAA-NCEI-LAI 4.0 (2014): AVHRR Leaf Area Index", - "source_label":"NOAA-NCEI-LAI", - "source_type":"satellite_retrieval", - "source_version_number":"4.0" - }, - "NOAA-NCEI-PERSIANN-1-1":{ - "region":"global", - "source":"NOAA-NCEI-PERSIANN 1.1 (2014): Precipitation Estimation from Remotely Sensed Information using Artificial Neural Network", - "source_label":"NOAA-NCEI-PERSIANN", - "source_type":"satellite_retrieval", - "source_version_number":"1.1" - }, - "NOAA-NCEI-SeaWinds-1-2":{ - "region":"global_ocean", - "source":"NOAA-NCEI-SeaWinds 1.2 (2008): Blended Sea Surface Winds", - "source_label":"NOAA-NCEI-SeaWinds", - "source_type":"satellite_blended", - "source_version_number":"1.2" - }, - "QuikSCAT-v20110531":{ - "region":"global", - "source":"QuikSCAT v20110531 (2011): QuikSCAT winds monthly climatology derived from QuikSCAT L2B using the 2006 model function and processing algorithms.", - "source_label":"QuikSCAT", - "source_type":"satellite_retrieval", - "source_version_number":"v20110531" - }, - "REMSS-PRW-6-6-0":{ - "region":"global", - "source":"REMSS-PRW 6.6.0 (2017): Water Vapor Path", - "source_label":"REMSS-PRW", - "source_type":"satellite_blended", - "source_version_number":"6.6.0" - } - }, - "source_type":{ - "gridded_insitu":"gridded product based on measurements collected from in-situ instruments", - "reanalysis":"gridded product generated from a model reanalysis based on in-situ instruments and possibly satellite measurements", - "satellite_blended":"gridded product based on both in-situ instruments and satellite measurements", - "satellite_retrieval":"gridded product based on satellite measurements" - }, - "table_id":[ - "obs4MIPs_Aday", - "obs4MIPs_Amon", - "obs4MIPs_Lmon", - "obs4MIPs_Omon", - "obs4MIPs_SImon", - "obs4MIPs_fx", - "obs4MIPs_monNobs", - "obs4MIPs_monStderr" - ] - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Lmon.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Lmon.json deleted file mode 100644 index 55666ea58e..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Lmon.json +++ /dev/null @@ -1,954 +0,0 @@ -{ - "Header":{ - "#dataRequest_specs_version":"01.00.21", - "#mip_era":"CMIP6", - "Conventions":"CF-1.7 ODS-2.1", - "approx_interval":"30.00000", - "cmor_version":"3.2", - "data_specs_version":"2.1.0", - "generic_levels":"", - "int_missing_value":"-2147483648", - "missing_value":"1e20", - "product":"observations", - "realm":"land", - "table_date":"07 March 2018", - "table_id":"Table obs4MIPs_Lmon" - }, - "variable_entry":{ - "baresoilFrac":{ - "cell_measures":"", - "cell_methods":"area: mean where land over all_area_types time: mean", - "comment":"Percentage of entire grid cell that is covered by bare soil.", - "dimensions":"longitude latitude time typebare", - "frequency":"mon", - "long_name":"Bare Soil Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"baresoilFrac", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "burntFractionAll":{ - "cell_measures":"", - "cell_methods":"area: mean where land over all_area_types time: mean", - "comment":"Percentage of grid cell burned due to all fires including natural and anthropogenic fires and those associated with anthropogenic land use change", - "dimensions":"longitude latitude time typeburnt", - "frequency":"mon", - "long_name":"Percentage of Entire Grid cell that is Covered by Burnt Vegetation (All Classes)", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"burntFractionAll", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "c3PftFrac":{ - "cell_measures":"", - "cell_methods":"area: mean where land over all_area_types time: mean", - "comment":"Percentage of entire grid cell that is covered by C3 PFTs (including grass, crops, and trees).", - "dimensions":"longitude latitude time typec3pft", - "frequency":"mon", - "long_name":"Total C3 PFT Cover Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"c3PftFrac", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "c4PftFrac":{ - "cell_measures":"", - "cell_methods":"area: mean where land over all_area_types time: mean", - "comment":"Percentage of entire grid cell that is covered by C4 PFTs (including grass and crops).", - "dimensions":"longitude latitude time typec4pft", - "frequency":"mon", - "long_name":"Total C4 PFT Cover Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"c4PftFrac", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "cCwd":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Carbon mass per unit area in woody debris (dead organic matter composed of coarse wood. It is distinct from litter)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass in Coarse Woody Debris", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cCwd", - "positive":"", - "standard_name":"wood_debris_carbon_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "cLeaf":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Carbon mass per unit area in leaves.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass in Leaves", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cLeaf", - "positive":"", - "standard_name":"leaf_carbon_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "cLitter":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass in Litter Pool", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cLitter", - "positive":"", - "standard_name":"litter_carbon_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "cLitterAbove":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass in Above-Ground Litter", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cLitterAbove", - "positive":"", - "standard_name":"surface_litter_carbon_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "cLitterBelow":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass in Below-Ground Litter", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cLitterBelow", - "positive":"", - "standard_name":"subsurface_litter_carbon_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "cProduct":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Carbon mass per unit area in that has been removed from the environment through land use change.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass in Products of Land Use Change", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cProduct", - "positive":"", - "standard_name":"carbon_content_of_products_of_anthropogenic_land_use_change", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "cRoot":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Carbon mass per unit area in roots, including fine and coarse roots.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass in Roots", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cRoot", - "positive":"", - "standard_name":"root_carbon_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "cSoilFast":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Carbon mass per unit area in fast soil pool. Fast means a lifetime of less than 10 years for reference climate conditions (20th century) in the absence of water limitations.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass in Fast Soil Pool", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cSoilFast", - "positive":"", - "standard_name":"fast_soil_pool_carbon_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "cSoilMedium":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Carbon mass per unit area in medium (rate) soil pool. Medium means a lifetime of more than than 10 years and less than 100 years for reference climate conditions (20th century) in the absence of water limitations.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass in Medium Soil Pool", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cSoilMedium", - "positive":"", - "standard_name":"medium_soil_pool_carbon_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "cSoilSlow":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Carbon mass per unit area in slow soil pool. Slow means a lifetime of more than 100 years for reference climate (20th century) in the absence of water limitations.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass in Slow Soil Pool", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cSoilSlow", - "positive":"", - "standard_name":"slow_soil_pool_carbon_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "cVeg":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Carbon mass per unit area in vegetation.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass in Vegetation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cVeg", - "positive":"", - "standard_name":"vegetation_carbon_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "cropFrac":{ - "cell_measures":"", - "cell_methods":"area: mean where land over all_area_types time: mean", - "comment":"Percentage of entire grid cell that is covered by crop.", - "dimensions":"longitude latitude time typecrop", - "frequency":"mon", - "long_name":"Crop Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cropFrac", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "evspsblsoi":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Water evaporation from soil (including sublimation).", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Water Evaporation from Soil", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"evspsblsoi", - "positive":"", - "standard_name":"water_evaporation_flux_from_soil", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "evspsblveg":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"The canopy evaporation and sublimation (if present in model); may include dew formation as a negative flux.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Evaporation from Canopy", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"evspsblveg", - "positive":"", - "standard_name":"water_evaporation_flux_from_canopy", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fFire":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"CO2 emissions (expressed as a carbon mass flux per unit area) from natural fires and human ignition fires as calculated by the fire module of the dynamic vegetation model, but excluding any CO2 flux from fire included in fLuc (CO2 Flux to Atmosphere from Land Use Change).", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass Flux into Atmosphere due to CO2 Emission from Fire", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fFire", - "positive":"up", - "standard_name":"surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_fires_excluding_anthropogenic_land_use_change", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fGrazing":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Carbon mass flux per unit area due to grazing on land", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass Flux into Atmosphere due to Grazing on Land", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fGrazing", - "positive":"up", - "standard_name":"surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_grazing", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fHarvest":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Carbon mass flux per unit area due to crop harvesting", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass Flux into Atmosphere due to Crop Harvesting", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fHarvest", - "positive":"up", - "standard_name":"surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_crop_harvesting", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fLitterSoil":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Carbon mass flux per unit area into soil from litter (dead plant material in or above the soil).", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Total Carbon Mass Flux from Litter to Soil", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fLitterSoil", - "positive":"", - "standard_name":"carbon_mass_flux_into_soil_from_litter", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fVegLitter":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Total Carbon Mass Flux from Vegetation to Litter", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fVegLitter", - "positive":"", - "standard_name":"litter_carbon_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fVegSoil":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Carbon mass flux per unit area from vegetation directly into soil, without intermediate conversion to litter.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Total Carbon Mass Flux from Vegetation Directly to Soil", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fVegSoil", - "positive":"", - "standard_name":"carbon_mass_flux_into_soil_from_vegetation_excluding_litter", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fapar":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"The fraction of incoming solar radiation in the photosynthetically active radiation spectral region that is absorbed by a vegetation canopy.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Fraction of Absorbed Photosynthetically Active Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fapar", - "positive":"", - "standard_name":"fraction_of_surface_downwelling_photosynthetic_radiative_flux_absorbed_by_vegetation", - "type":"real", - "units":"1", - "valid_max":"1.0", - "valid_min":"0.0" - }, - "gpp":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass Flux out of Atmosphere due to Gross Primary Production on Land", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"gpp", - "positive":"", - "standard_name":"gross_primary_productivity_of_biomass_expressed_as_carbon", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "grassFrac":{ - "cell_measures":"", - "cell_methods":"area: mean where land over all_area_types time: mean", - "comment":"Percentage of entire grid cell that is covered by natural grass.", - "dimensions":"longitude latitude time typenatgr", - "frequency":"mon", - "long_name":"Natural Grass Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"grassFrac", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "lai":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Leaf Area Index", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"lai", - "positive":"", - "standard_name":"leaf_area_index", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "landCoverFrac":{ - "cell_measures":"", - "cell_methods":"area: mean where land over all_area_types time: mean", - "comment":"Percentage of grid cell area occupied by different model vegetation/land cover categories. The categories may differ from model to model, depending on each model's subgrid land cover category definitions. Categories may include natural vegetation, anthropogenic vegetation, bare soil, lakes, urban areas, glaciers, etc. Sum of all should equal the fraction of the grid-cell that is land.", - "dimensions":"longitude latitude vegtype time", - "frequency":"mon", - "long_name":"Plant Functional Type Grid Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"landCoverFrac", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "mrfso":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"The mass per unit area (summed over all model layers) of frozen water.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Soil Frozen Water Content", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"mrfso", - "positive":"", - "standard_name":"soil_frozen_water_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "mrro":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"The total run-off (including drainage through the base of the soil model) per unit area leaving the land portion of the grid cell.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Total Runoff", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"mrro", - "positive":"", - "standard_name":"runoff_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "mrros":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"The total surface run off leaving the land portion of the grid cell (excluding drainage through the base of the soil model).", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Runoff", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"mrros", - "positive":"", - "standard_name":"surface_runoff_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "mrso":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"the mass per unit area (summed over all soil layers) of water in all phases.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Total Soil Moisture Content", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"mrso", - "positive":"", - "standard_name":"soil_moisture_content", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "mrsos":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"The mass of water in all phases in the upper 10cm of the soil layer.", - "dimensions":"longitude latitude time sdepth1", - "frequency":"mon", - "long_name":"Moisture in Upper Portion of Soil Column", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"mrsos", - "positive":"", - "standard_name":"moisture_content_of_soil_layer", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "nbp":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"This is the net mass flux of carbon from atmosphere into land, calculated as photosynthesis MINUS the sum of plant and soil respiration, carbon fluxes from fire, harvest, grazing and land use change. Positive flux is into the land.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass Flux out of Atmosphere due to Net Biospheric Production on Land", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"nbp", - "positive":"down", - "standard_name":"surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "ndvi":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Normalized Difference Vegetation Index", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ndvi", - "positive":"", - "standard_name":"normalized_difference_vegetation_index", - "type":"real", - "units":"1", - "valid_max":"1.0", - "valid_min":"-0.1" - }, - "npp":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass Flux out of Atmosphere due to Net Primary Production on Land", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"npp", - "positive":"down", - "standard_name":"net_primary_productivity_of_biomass_expressed_as_carbon", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "nppLeaf":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"This is the rate of carbon uptake by leaves due to NPP", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass Flux due to NPP Allocation to Leaf", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"nppLeaf", - "positive":"down", - "standard_name":"net_primary_productivity_of_carbon_accumulated_in_leaves", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "nppRoot":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"This is the rate of carbon uptake by roots due to NPP", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass Flux due to NPP Allocation to Roots", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"nppRoot", - "positive":"down", - "standard_name":"net_primary_productivity_of_carbon_accumulated_in_roots", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "nppWood":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"This is the rate of carbon uptake by wood due to NPP", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass Flux due to NPP Allocation to Wood", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"nppWood", - "positive":"down", - "standard_name":"net_primary_productivity_of_carbon_accumulated_in_wood", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "pastureFrac":{ - "cell_measures":"", - "cell_methods":"area: mean where land over all_area_types time: mean", - "comment":"Percentage of entire grid cell that is covered by anthropogenic pasture.", - "dimensions":"longitude latitude time typepasture", - "frequency":"mon", - "long_name":"Anthropogenic Pasture Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"pastureFrac", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "prveg":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"The precipitation flux that is intercepted by the vegetation canopy (if present in model) before reaching the ground.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Precipitation onto Canopy", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"prveg", - "positive":"", - "standard_name":"precipitation_flux_onto_canopy", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "rGrowth":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass Flux into Atmosphere due to Growth Autotrophic Respiration on Land", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rGrowth", - "positive":"up", - "standard_name":"surface_upward_carbon_mass_flux_due_to_plant_respiration_for_biomass_growth", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "rMaint":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass Flux into Atmosphere due to Maintenance Autotrophic Respiration on Land", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rMaint", - "positive":"up", - "standard_name":"surface_upward_carbon_mass_flux_due_to_plant_respiration_for_biomass_maintenance", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "ra":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Carbon mass flux per unit area into atmosphere due to autotrophic respiration on land (respiration by producers) [see rh for heterotrophic production]", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass Flux into Atmosphere due to Autotrophic (Plant) Respiration on Land", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ra", - "positive":"up", - "standard_name":"plant_respiration_carbon_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "residualFrac":{ - "cell_measures":"", - "cell_methods":"area: mean where land over all_area_types time: mean", - "comment":"Percentage of entire grid cell that is land and is covered by neither vegetation nor bare-soil (e.g., urban, ice, lakes, etc.)", - "dimensions":"longitude latitude time typeresidual", - "frequency":"mon", - "long_name":"Fraction of Grid Cell that is Land but Neither Vegetation-Covered nor Bare Soil", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"residualFrac", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "rh":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Carbon mass flux per unit area into atmosphere due to heterotrophic respiration on land (respiration by consumers)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Carbon Mass Flux into Atmosphere due to Heterotrophic Respiration on Land", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rh", - "positive":"up", - "standard_name":"heterotrophic_respiration_carbon_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "shrubFrac":{ - "cell_measures":"", - "cell_methods":"area: mean where land over all_area_types time: mean", - "comment":"Percentage of entire grid cell that is covered by shrub.", - "dimensions":"longitude latitude time typeshrub", - "frequency":"mon", - "long_name":"Shrub Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"shrubFrac", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "tran":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Transpiration (may include dew formation as a negative flux).", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Transpiration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tran", - "positive":"up", - "standard_name":"transpiration_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "treeFrac":{ - "cell_measures":"", - "cell_methods":"area: mean where land over all_area_types time: mean", - "comment":"Percentage of entire grid cell that is covered by trees.", - "dimensions":"longitude latitude time typetree", - "frequency":"mon", - "long_name":"Tree Cover Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"treeFrac", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "treeFracPrimDec":{ - "cell_measures":"", - "cell_methods":"area: mean where land over all_area_types time: mean", - "comment":"Percentage of the entire grid cell that is covered by total primary deciduous trees.", - "dimensions":"longitude latitude time typepdec", - "frequency":"mon", - "long_name":"Total Primary Deciduous Tree Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"treeFracPrimDec", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "treeFracPrimEver":{ - "cell_measures":"", - "cell_methods":"area: mean where land over all_area_types time: mean", - "comment":"Percentage of entire grid cell that is covered by primary evergreen trees.", - "dimensions":"longitude latitude time typepever", - "frequency":"mon", - "long_name":"Total Primary Evergreen Tree Cover Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"treeFracPrimEver", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "treeFracSecDec":{ - "cell_measures":"", - "cell_methods":"area: mean where land over all_area_types time: mean", - "comment":"Percentage of entire grid cell that is covered by secondary deciduous trees.", - "dimensions":"longitude latitude time typesdec", - "frequency":"mon", - "long_name":"Total Secondary Deciduous Tree Cover Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"treeFracSecDec", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "treeFracSecEver":{ - "cell_measures":"", - "cell_methods":"area: mean where land over all_area_types time: mean", - "comment":"Percentage of entire grid cell that is covered by secondary evergreen trees.", - "dimensions":"longitude latitude time typesever", - "frequency":"mon", - "long_name":"Total Secondary Evergreen Tree Cover Fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"treeFracSecEver", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "tsl":{ - "cell_measures":"", - "cell_methods":"area: mean where land time: mean", - "comment":"Temperature of soil. Reported as missing for grid cells with no land.", - "dimensions":"longitude latitude sdepth time", - "frequency":"mon", - "long_name":"Temperature of Soil", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tsl", - "positive":"", - "standard_name":"soil_temperature", - "type":"real", - "units":"K", - "valid_max":"", - "valid_min":"" - } - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Omon.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Omon.json deleted file mode 100644 index 2c5306fcb9..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Omon.json +++ /dev/null @@ -1,4609 +0,0 @@ -{ - "Header":{ - "#dataRequest_specs_version":"01.00.21", - "#mip_era":"CMIP6", - "Conventions":"CF-1.7 ODS-2.1", - "approx_interval":"30.00000", - "cmor_version":"3.2", - "data_specs_version":"2.1.0", - "generic_levels":"olevel", - "int_missing_value":"-2147483648", - "missing_value":"1e20", - "product":"observations", - "realm":"ocean", - "table_date":"07 March 2018", - "table_id":"Table obs4MIPs_Omon" - }, - "variable_entry":{ - "agessc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Time elapsed since water was last in surface layer of the ocean.", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Sea Water Age Since Surface Contact", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"agessc", - "positive":"", - "standard_name":"sea_water_age_since_surface_contact", - "type":"real", - "units":"yr", - "valid_max":"", - "valid_min":"" - }, - "arag":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Sum of particulate aragonite components (e.g. Phytoplankton, Detrital, etc.)", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Aragonite Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"arag", - "positive":"", - "standard_name":"mole_concentration_of_aragonite_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "bacc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Sum of bacterial carbon component concentrations", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Bacterial Carbon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"bacc", - "positive":"", - "standard_name":"mole_concentration_of_bacteria_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "bfe":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Sum of particulate organic iron component concentrations", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Particulate Organic Matter expressed as Iron in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"bfe", - "positive":"", - "standard_name":"mole_concentration_of_particulate_organic_matter_expressed_as_iron_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "bfeos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of particulate organic iron component concentrations", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Mole Concentration of Particulate Organic Matter expressed as Iron in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"bfeos", - "positive":"", - "standard_name":"mole_concentration_of_particulate_organic_matter_expressed_as_iron_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "bigthetao":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Sea water conservative temperature (this should be contributed only for models using conservative temperature as prognostic field)", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Sea Water Convervative Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"bigthetao", - "positive":"", - "standard_name":"sea_water_conservative_temperature", - "type":"real", - "units":"degC", - "valid_max":"", - "valid_min":"" - }, - "bigthetaoga":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Diagnostic should be contributed only for models using conservative temperature as prognostic field.", - "dimensions":"time", - "frequency":"mon", - "long_name":"Global Average Sea Water Conservative Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"bigthetaoga", - "positive":"", - "standard_name":"sea_water_conservative_temperature", - "type":"real", - "units":"degC", - "valid_max":"", - "valid_min":"" - }, - "bsi":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Sum of particulate silica component concentrations", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Particulate Organic Matter expressed as silicon in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"bsi", - "positive":"", - "standard_name":"mole_concentration_of_particulate_matter_expressed_as_silicon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "bsios":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of particulate silica component concentrations", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Mole Concentration of Particulate Organic Matter expressed as Silicon in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"bsios", - "positive":"", - "standard_name":"mole_concentration_of_particulate_organic_matter_expressed_as_silicon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "calc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Sum of particulate calcite component concentrations (e.g. Phytoplankton, Detrital, etc.)", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Calcite Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"calc", - "positive":"", - "standard_name":"mole_concentration_of_calcite_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "cfc11":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Moles Per Unit Mass of CFC-11 in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cfc11", - "positive":"", - "standard_name":"mole_concentration_of_cfc11_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "cfc12":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Moles Per Unit Mass of CFC-12 in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"cfc12", - "positive":"", - "standard_name":"mole_concentration_of_cfc12_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "chl":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Sum of chlorophyll from all phytoplankton group concentrations. In most models this is equal to chldiat+chlmisc, that is the sum of Diatom Chlorophyll Mass Concentration and Other Phytoplankton Chlorophyll Mass Concentration", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mass Concentration of Total Chlorophyll in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"chl", - "positive":"", - "standard_name":"mass_concentration_of_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "type":"real", - "units":"kg m-3", - "valid_max":"", - "valid_min":"" - }, - "chlcalc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"chlorophyll concentration from the calcite-producing phytoplankton component alone", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mass Concentration of Calcareous Phytoplankton expressed as Chlorophyll in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"chlcalc", - "positive":"", - "standard_name":"mass_concentration_of_calcareous_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "type":"real", - "units":"kg m-3", - "valid_max":"", - "valid_min":"" - }, - "chlcalcos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"chlorophyll concentration from the calcite-producing phytoplankton component alone", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Mass Concentration of Calcareous Phytoplankton expressed as Chlorophyll in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"chlcalcos", - "positive":"", - "standard_name":"mass_concentration_of_calcareous_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "type":"real", - "units":"kg m-3", - "valid_max":"", - "valid_min":"" - }, - "chldiat":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Chlorophyll from diatom phytoplankton component concentration alone", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mass Concentration of Diatom expressed as Chlorophyll in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"chldiat", - "positive":"", - "standard_name":"mass_concentration_of_diatoms_expressed_as_chlorophyll_in_sea_water", - "type":"real", - "units":"kg m-3", - "valid_max":"", - "valid_min":"" - }, - "chldiatos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"chlorophyll from diatom phytoplankton component concentration alone", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Mass Concentration of Diatoms expressed as Chlorophyll in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"chldiatos", - "positive":"", - "standard_name":"mass_concentration_of_diatoms_expressed_as_chlorophyll_in_sea_water", - "type":"real", - "units":"kg m-3", - "valid_max":"", - "valid_min":"" - }, - "chldiaz":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Chlorophyll concentration from the diazotrophic phytoplankton component alone", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mass Concentration of Diazotrophs expressed as Chlorophyll in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"chldiaz", - "positive":"", - "standard_name":"mass_concentration_of_diazotrophs_expressed_as_chlorophyll_in_sea_water", - "type":"real", - "units":"kg m-3", - "valid_max":"", - "valid_min":"" - }, - "chldiazos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"chlorophyll concentration from the diazotrophic phytoplankton component alone", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Mass Concentration of Diazotrophs expressed as Chlorophyll in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"chldiazos", - "positive":"", - "standard_name":"mass_concentration_of_diazotrophs_expressed_as_chlorophyll_in_sea_water", - "type":"real", - "units":"kg m-3", - "valid_max":"", - "valid_min":"" - }, - "chlmisc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Chlorophyll from additional phytoplankton component concentrations alone", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mass Concentration of Other Phytoplankton expressed as Chlorophyll in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"chlmisc", - "positive":"", - "standard_name":"mass_concentration_of_miscellaneous_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "type":"real", - "units":"kg m-3", - "valid_max":"", - "valid_min":"" - }, - "chlmiscos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"chlorophyll from additional phytoplankton component concentrations alone", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Mass Concentration of Other Phytoplankton expressed as Chlorophyll in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"chlmiscos", - "positive":"", - "standard_name":"mass_concentration_of_miscellaneous_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "type":"real", - "units":"kg m-3", - "valid_max":"", - "valid_min":"" - }, - "chlos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Sum of chlorophyll from all phytoplankton group concentrations at the sea surface. In most models this is equal to chldiat+chlmisc, that is the sum of 'Diatom Chlorophyll Mass Concentration' plus 'Other Phytoplankton Chlorophyll Mass Concentration'", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Mass Concentration of Total Phytoplankton expressed as Chlorophyll in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"chlos", - "positive":"", - "standard_name":"mass_concentration_of_phytoplankton_expressed_as_chlorophyll_in_sea_water", - "type":"real", - "units":"kg m-3", - "valid_max":"", - "valid_min":"" - }, - "chlpico":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"chlorophyll concentration from the picophytoplankton (<2 um) component alone", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mass Concentration of Picophytoplankton expressed as Chlorophyll in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"chlpico", - "positive":"", - "standard_name":"mass_concentration_of_picophytoplankton_expressed_as_chlorophyll_in_sea_water", - "type":"real", - "units":"kg m-3", - "valid_max":"", - "valid_min":"" - }, - "chlpicoos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"chlorophyll concentration from the picophytoplankton (<2 um) component alone", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Mass Concentration of Picophytoplankton expressed as Chlorophyll in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"chlpicoos", - "positive":"", - "standard_name":"mass_concentration_of_picophytoplankton_expressed_as_chlorophyll_in_sea_water", - "type":"real", - "units":"kg m-3", - "valid_max":"", - "valid_min":"" - }, - "co3":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Carbonate ion Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"co3", - "positive":"", - "standard_name":"mole_concentration_of_carbonate_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "co3abio":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Abiotic Carbonate ion Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"co3abio", - "positive":"", - "standard_name":"mole_concentration_of_carbonate_abiotic_analogue_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "co3nat":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Natural Carbonate ion Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"co3nat", - "positive":"", - "standard_name":"mole_concentration_of_carbonate_natural_analogue_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "co3satarag":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Carbonate ion Concentration for sea water in equilibrium with pure Aragonite", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"co3satarag", - "positive":"", - "standard_name":"mole_concentration_of_carbonate_expressed_as_carbon_at_equilibrium_with_pure_aragonite_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "co3satcalc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Carbonate ion Concentration for sea water in equilibrium with pure Calcite", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"co3satcalc", - "positive":"", - "standard_name":"mole_concentration_of_carbonate_expressed_as_carbon_at_equilibrium_with_pure_calcite_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "detoc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Sum of detrital organic carbon component concentrations", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Detrital Organic Carbon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"detoc", - "positive":"", - "standard_name":"mole_concentration_of_organic_detritus_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "dfe":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Dissolved iron in sea water, including both Fe2+ and Fe3+ ions (but not particulate detrital iron)", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Dissolved Iron in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dfe", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_iron_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "dfeos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"dissolved iron in sea water is meant to include both Fe2+ and Fe3+ ions (but not, e.g., particulate detrital iron)", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Dissolved Iron Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dfeos", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_iron_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "dissi13c":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Dissolved inorganic 14carbon (CO3+HCO3+H2CO3) concentration", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Dissolved Inorganic 13Carbon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dissi13c", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_inorganic_carbon13_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "dissi13cos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Dissolved inorganic 14carbon (CO3+HCO3+H2CO3) concentration", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Dissolved Inorganic 13Carbon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dissi13cos", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_inorganic_carbon13_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "dissi14cabio":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Abiotic Dissolved inorganic 14carbon (CO3+HCO3+H2CO3) concentration", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Abiotic Dissolved Inorganic 14Carbon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dissi14cabio", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_inorganic_carbon14_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "dissi14cabioos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Abiotic Dissolved inorganic 14carbon (CO3+HCO3+H2CO3) concentration", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Abiotic Dissolved Inorganic 14Carbon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dissi14cabioos", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_inorganic_carbon14_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "dissic":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Dissolved Inorganic Carbon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dissic", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_inorganic_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "dissicabio":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Abiotic Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Abiotic Dissolved Inorganic Carbon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dissicabio", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_inorganic_carbon_abiotic_analogue_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "dissicabioos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Abiotic Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Abiotic Dissolved Inorganic Carbon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dissicabioos", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_inorganic_carbon_abiotic_analogue_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "dissicnat":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration at preindustrial atmospheric xCO2", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Natural Dissolved Inorganic Carbon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dissicnat", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_inorganic_carbon_natural_analogue_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "dissicnatos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration at preindustrial atmospheric xCO2", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Natural Dissolved Inorganic Carbon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dissicnatos", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_inorganic_carbon_natural_analogue_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "dissoc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Sum of dissolved carbon component concentrations explicitly represented (i.e. not ~40 uM refractory unless explicit)", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Dissolved Organic Carbon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dissoc", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_organic_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "dmso":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Mole concentration of dimethyl sulphide in water", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Dimethyl Sulphide in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dmso", - "positive":"", - "standard_name":"mole_concentration_of_dimethyl_sulfide_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "dmsos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Mole Concentration of Dimethyl Sulphide in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dmsos", - "positive":"", - "standard_name":"mole_concentration_of_dimethyl_sulfide_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "dpco2":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Delta PCO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dpco2", - "positive":"", - "standard_name":"surface_carbon_dioxide_partial_pressure_difference_between_sea_water_and_air", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "dpco2abio":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Abiotic Delta PCO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dpco2abio", - "positive":"", - "standard_name":"surface_carbon_dioxide_abiotic_analogue_partial_pressure_difference_between_sea_water_and_air", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "dpco2nat":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Natural Delta PCO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dpco2nat", - "positive":"", - "standard_name":"surface_carbon_dioxide_natural_analogue_partial_pressure_difference_between_sea_water_and_air", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "dpo2":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Delta PO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"dpo2", - "positive":"", - "standard_name":"surface_molecular_oxygen_partial_pressure_difference_between_sea_water_and_air", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "eparag100":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth100m", - "frequency":"mon", - "long_name":"Downward Flux of Aragonite", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"eparag100", - "positive":"", - "standard_name":"sinking_mole_flux_of_aragonite_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "epc100":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth100m", - "frequency":"mon", - "long_name":"Downward Flux of Particle Organic Carbon", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"epc100", - "positive":"", - "standard_name":"sinking_mole_flux_of_particulate_organic_matter_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "epcalc100":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth100m", - "frequency":"mon", - "long_name":"Downward Flux of Calcite", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"epcalc100", - "positive":"", - "standard_name":"sinking_mole_flux_of_calcite_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "epfe100":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth100m", - "frequency":"mon", - "long_name":"Downward Flux of Particulate Iron", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"epfe100", - "positive":"", - "standard_name":"sinking_mole_flux_of_particulate_iron_in_sea_water", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "epn100":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth100m", - "frequency":"mon", - "long_name":"Downward Flux of Particulate Nitrogen", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"epn100", - "positive":"", - "standard_name":"sinking_mole_flux_of_particulate_organic_nitrogen_in_sea_water", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "epp100":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth100m", - "frequency":"mon", - "long_name":"Downward Flux of Particulate Phosphorus", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"epp100", - "positive":"", - "standard_name":"sinking_mole_flux_of_particulate_organic_phosphorus_in_sea_water", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "epsi100":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth100m", - "frequency":"mon", - "long_name":"Downward Flux of Particulate Silica", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"epsi100", - "positive":"", - "standard_name":"sinking_mole_flux_of_particulate_silicon_in_sea_water", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "evs":{ - "cell_measures":"", - "cell_methods":"area: mean where ice_free_sea over sea time: mean", - "comment":"computed as the total mass of water vapor evaporating from the ice-free portion of the ocean divided by the area of the ocean portion of the grid cell.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Water Evaporation Flux Where Ice Free Ocean over Sea", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"evs", - "positive":"", - "standard_name":"water_evaporation_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "expc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Downward flux of particulate organic carbon", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Sinking Particulate Organic Carbon Flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"expc", - "positive":"down", - "standard_name":"sinking_mole_flux_of_particulate_organic_matter_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fbddtalk":{ - "cell_measures":"", - "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", - "comment":"vertical integral of net biological terms in time rate of change of alkalinity", - "dimensions":"longitude latitude time olayer100m", - "frequency":"mon", - "long_name":"Rate of Change of Biological Alkalinity due to Biological Activity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fbddtalk", - "positive":"", - "standard_name":"integral_wrt_depth_of_tendency_of_sea_water_alkalinity_expressed_as_mole_equivalent_due_to_biological_processes", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fbddtdic":{ - "cell_measures":"", - "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", - "comment":"vertical integral of net biological terms in time rate of change of dissolved inorganic carbon", - "dimensions":"longitude latitude time olayer100m", - "frequency":"mon", - "long_name":"Rate of Change of Dissolved Inorganic Carbon due to Biological Activity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fbddtdic", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_carbon_due_to_biological_processes", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fbddtdife":{ - "cell_measures":"", - "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", - "comment":"vertical integral of net biological terms in time rate of change of dissolved inorganic iron", - "dimensions":"longitude latitude time olayer100m", - "frequency":"mon", - "long_name":"Rate of Change of Dissolved Inorganic Iron due to Biological Activity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fbddtdife", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_iron_due_to_biological_processes", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fbddtdin":{ - "cell_measures":"", - "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", - "comment":"vertical integral of net biological terms in time rate of change of nitrogen nutrients (e.g. NO3+NH4)", - "dimensions":"longitude latitude time olayer100m", - "frequency":"mon", - "long_name":"Rate of Change of Dissolved Inorganic Nitrogen due to Biological Activity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fbddtdin", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_nitrogen_due_to_biological_processes", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fbddtdip":{ - "cell_measures":"", - "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", - "comment":"vertical integral of net biological terms in time rate of change of phosphate", - "dimensions":"longitude latitude time olayer100m", - "frequency":"mon", - "long_name":"Rate of Change of Dissolved Inorganic Phosphorus due to Biological Activity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fbddtdip", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_phosphorus_due_to_biological_processes", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fbddtdisi":{ - "cell_measures":"", - "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", - "comment":"vertical integral of net biological terms in time rate of change of dissolved inorganic silicate", - "dimensions":"longitude latitude time olayer100m", - "frequency":"mon", - "long_name":"Rate of Change of Dissolved Inorganic Silicon due to Biological Activity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fbddtdisi", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_silicon_due_to_biological_processes", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fddtalk":{ - "cell_measures":"", - "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", - "comment":"vertical integral of net time rate of change of alkalinity", - "dimensions":"longitude latitude time olayer100m", - "frequency":"mon", - "long_name":"Rate of Change of Total Alkalinity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fddtalk", - "positive":"", - "standard_name":"integral_wrt_depth_of_tendency_of_sea_water_alkalinity_expressed_as_mole_equivalent", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fddtdic":{ - "cell_measures":"", - "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", - "comment":"", - "dimensions":"longitude latitude time olayer100m", - "frequency":"mon", - "long_name":"Rate of Change of Net Dissolved Inorganic Carbon", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fddtdic", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_carbon", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fddtdife":{ - "cell_measures":"", - "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", - "comment":"vertical integral of net time rate of change of dissolved inorganic iron", - "dimensions":"longitude latitude time olayer100m", - "frequency":"mon", - "long_name":"Rate of Change of Net Dissolved Inorganic Iron", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fddtdife", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_iron", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fddtdin":{ - "cell_measures":"", - "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", - "comment":"Net time rate of change of nitrogen nutrients (e.g. NO3+NH4)", - "dimensions":"longitude latitude time olayer100m", - "frequency":"mon", - "long_name":"Rate of Change of Net Dissolved Inorganic Nitrogen", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fddtdin", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_nitrogen", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fddtdip":{ - "cell_measures":"", - "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", - "comment":"vertical integral of net time rate of change of phosphate", - "dimensions":"longitude latitude time olayer100m", - "frequency":"mon", - "long_name":"Rate of Change of Net Dissolved Inorganic Phosphate", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fddtdip", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_phosphorus", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fddtdisi":{ - "cell_measures":"", - "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", - "comment":"vertical integral of net time rate of change of dissolved inorganic silicate", - "dimensions":"longitude latitude time olayer100m", - "frequency":"mon", - "long_name":"Rate of Change of Net Dissolved Inorganic Silicon", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fddtdisi", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_silicon", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fg13co2":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Downward Flux of Abiotic 13CO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fg13co2", - "positive":"down", - "standard_name":"surface_downward_mass_flux_of_carbon13_dioxide_abiotic_analogue_expressed_as_carbon13", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fg14co2abio":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Gas exchange flux of abiotic 14CO2 (positive into ocean)", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Downward Flux of Abiotic 14CO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fg14co2abio", - "positive":"down", - "standard_name":"surface_downward_mass_flux_of_carbon14_dioxide_abiotic_analogue_expressed_as_carbon", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fgcfc11":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"gas exchange flux of CFC11", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downward CFC11 flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fgcfc11", - "positive":"down", - "standard_name":"surface_downward_mole_flux_of_cfc11", - "type":"real", - "units":"mol sec-1 m-2", - "valid_max":"", - "valid_min":"" - }, - "fgcfc12":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"gas exchange flux of CFC12", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downward CFC12 flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fgcfc12", - "positive":"down", - "standard_name":"surface_downward_mole_flux_of_cfc12", - "type":"real", - "units":"mol sec-1 m-2", - "valid_max":"", - "valid_min":"" - }, - "fgco2":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Gas exchange flux of CO2 (positive into ocean)", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Downward Flux of Total CO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fgco2", - "positive":"down", - "standard_name":"surface_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fgco2abio":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Gas exchange flux of abiotic CO2 (positive into ocean)", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Downward Flux of Abiotic CO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fgco2abio", - "positive":"down", - "standard_name":"surface_downward_mass_flux_of_carbon_dioxide_abiotic_analogue_expressed_as_carbon", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fgco2nat":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Gas exchange flux of natural CO2 (positive into ocean)", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Downward Flux of Natural CO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fgco2nat", - "positive":"down", - "standard_name":"surface_downward_mass_flux_of_carbon_dioxide_natural_analogue_expressed_as_carbon", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fgdms":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Gas exchange flux of DMS (positive into atmosphere)", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Upward Flux of DMS", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fgdms", - "positive":"up", - "standard_name":"surface_upward_mole_flux_of_dimethyl_sulfide", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fgo2":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Gas exchange flux of O2 (positive into ocean)", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Downward Flux of O2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fgo2", - "positive":"down", - "standard_name":"surface_downward_mole_flux_of_molecular_oxygen", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fgsf6":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"gas exchange flux of SF6", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downward SF6 flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fgsf6", - "positive":"down", - "standard_name":"surface_downward_mole_flux_of_sulfur_hexafluoride", - "type":"real", - "units":"mol sec-1 m-2", - "valid_max":"", - "valid_min":"" - }, - "ficeberg":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"computed as the iceberg melt water flux into the ocean divided by the area of the ocean portion of the grid cell.", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Water Flux into Sea Water From Icebergs", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ficeberg", - "positive":"", - "standard_name":"water_flux_into_sea_water_from_icebergs", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "ficeberg2d":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"computed as the iceberg melt water flux into the ocean divided by the area of the ocean portion of the grid cell.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Water Flux into Sea Water From Icebergs", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ficeberg", - "positive":"", - "standard_name":"water_flux_into_sea_water_from_icebergs", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "frfe":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Iron Loss to Sediments", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"frfe", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_iron_due_to_sedimentation", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fric":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Inorganic Carbon loss to sediments", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Downward Inorganic Carbon Flux at Ocean Bottom", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fric", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_inorganic_carbon_due_to_sedimentation", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "friver":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"computed as the river flux of water into the ocean divided by the area of the ocean portion of the grid cell.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Water Flux into Sea Water From Rivers", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"friver", - "positive":"", - "standard_name":"water_flux_into_sea_water_from_rivers", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "frn":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Nitrogen Loss to Sediments and through Denitrification", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"frn", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_elemental_nitrogen_due_to_denitrification_and_sedimentation", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "froc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Organic Carbon loss to sediments", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Downward Organic Carbon Flux at Ocean Bottom", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"froc", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_organic_carbon_due_to_sedimentation", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fsfe":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Iron supply through deposition flux onto sea surface, runoff, coasts, sediments, etc", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Downward Net Flux of Iron", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fsfe", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_iron_due_to_deposition_and_runoff_and_sediment_dissolution", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fsitherm":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"computed as the sea ice thermodynamic water flux into the ocean divided by the area of the ocean portion of the grid cell.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Water Flux into Sea Water due to Sea Ice Thermodynamics", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fsitherm", - "positive":"", - "standard_name":"water_flux_into_sea_water_due_to_sea_ice_thermodynamics", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "fsn":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Downward Net Flux of Nitrogen", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"fsn", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_elemental_nitrogen_due_to_deposition_and_fixation_and_runoff", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "graz":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Total Grazing of Phytoplankton by Zooplankton", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"graz", - "positive":"", - "standard_name":"tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_grazing_of_phytoplankton", - "type":"real", - "units":"mol m-3 s-1", - "valid_max":"", - "valid_min":"" - }, - "hfbasin":{ - "cell_measures":"", - "cell_methods":"longitude: mean (basin) time: mean", - "comment":"Contains contributions from all physical processes affecting the northward heat transport, including resolved advection, parameterized advection, lateral diffusion, etc. Diagnosed here as a function of latitude and basin. Use Celsius for temperature scale.", - "dimensions":"latitude basin time", - "frequency":"mon", - "long_name":"Northward Ocean Heat Transport", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfbasin", - "positive":"", - "standard_name":"northward_ocean_heat_transport", - "type":"real", - "units":"W", - "valid_max":"", - "valid_min":"" - }, - "hfbasinpadv":{ - "cell_measures":"", - "cell_methods":"longitude: mean (basin) time: mean", - "comment":"Contributions to heat transport from parameterized eddy-induced advective transport due to any subgrid advective process. Diagnosed here as a function of latitude and basin. Use Celsius for temperature scale.", - "dimensions":"latitude basin time", - "frequency":"mon", - "long_name":"northward ocean heat transport due to parameterized eddy advection", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfbasinpadv", - "positive":"", - "standard_name":"northward_ocean_heat_transport_due_to_parameterized_eddy_advection", - "type":"real", - "units":"W", - "valid_max":"", - "valid_min":"" - }, - "hfbasinpmadv":{ - "cell_measures":"", - "cell_methods":"longitude: mean (basin) time: mean", - "comment":"Contributions to heat transport from parameterized mesoscale eddy-induced advective transport. Diagnosed here as a function of latitude and basin. Use Celsius for temperature scale.", - "dimensions":"latitude basin time", - "frequency":"mon", - "long_name":"northward ocean heat transport due to parameterized mesoscale advection", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfbasinpmadv", - "positive":"", - "standard_name":"northward_ocean_heat_transport_due_to_parameterized_mesoscale_eddy_advection", - "type":"real", - "units":"W", - "valid_max":"", - "valid_min":"" - }, - "hfbasinpmdiff":{ - "cell_measures":"", - "cell_methods":"longitude: mean (basin) time: mean", - "comment":"Contributions to heat transport from parameterized mesoscale eddy-induced diffusive transport (i.e., neutral diffusion). Diagnosed here as a function of latitude and basin.", - "dimensions":"latitude basin time", - "frequency":"mon", - "long_name":"northward ocean heat transport due to parameterized mesoscale diffusion", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfbasinpmdiff", - "positive":"", - "standard_name":"northward_ocean_heat_transport_due_to_parameterized_mesoscale_eddy_diffusion", - "type":"real", - "units":"W", - "valid_max":"", - "valid_min":"" - }, - "hfbasinpsmadv":{ - "cell_measures":"", - "cell_methods":"longitude: mean (basin) time: mean", - "comment":"Contributions to heat transport from parameterized mesoscale eddy-induced advective transport. Diagnosed here as a function of latitude and basin. Use Celsius for temperature scale.", - "dimensions":"latitude basin time", - "frequency":"mon", - "long_name":"northward ocean heat transport due to parameterized submesoscale advection", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfbasinpsmadv", - "positive":"", - "standard_name":"northward_ocean_heat_transport_due_to_parameterized_submesoscale_eddy_advection", - "type":"real", - "units":"W", - "valid_max":"", - "valid_min":"" - }, - "hfcorr":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Heat Flux Correction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfcorr", - "positive":"down", - "standard_name":"heat_flux_correction", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfds":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"This is the net flux of heat entering the liquid water column through its upper surface (excluding any 'flux adjustment') .", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Downward Heat Flux at Sea Water Surface", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfds", - "positive":"down", - "standard_name":"surface_downward_heat_flux_in_sea_water", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfevapds":{ - "cell_measures":"", - "cell_methods":"area: mean where ice_free_sea over sea time: mean", - "comment":"This is defined as 'where ice_free_sea over sea'", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Temperature Flux due to Evaporation Expressed as Heat Flux Out of Sea Water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfevapds", - "positive":"up", - "standard_name":"temperature_flux_due_to_evaporation_expressed_as_heat_flux_out_of_sea_water", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfgeou":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Upward Geothermal Heat Flux at Sea Floor", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfgeou", - "positive":"up", - "standard_name":"upward_geothermal_heat_flux_at_sea_floor", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfibthermds":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Heat Flux into Sea Water due to Iceberg Thermodynamics", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfibthermds", - "positive":"", - "standard_name":"heat_flux_into_sea_water_due_to_iceberg_thermodynamics", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfibthermds2d":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Heat Flux into Sea Water due to Iceberg Thermodynamics", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfibthermds", - "positive":"", - "standard_name":"heat_flux_into_sea_water_due_to_iceberg_thermodynamics", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hflso":{ - "cell_measures":"", - "cell_methods":"area: mean where ice_free_sea over sea time: mean", - "comment":"This is defined as with the cell methods string: where ice_free_sea over sea", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downward Latent Heat Flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hflso", - "positive":"up", - "standard_name":"surface_downward_latent_heat_flux", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfrainds":{ - "cell_measures":"", - "cell_methods":"area: mean where ice_free_sea over sea time: mean", - "comment":"This is defined as 'where ice_free_sea over sea'; i.e., the total flux (considered here) entering the ice-free portion of the grid cell divided by the area of the ocean portion of the grid cell. All such heat fluxes are computed based on Celsius scale.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Temperature Flux due to Rainfall Expressed as Heat Flux into Sea Water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfrainds", - "positive":"down", - "standard_name":"temperature_flux_due_to_rainfall_expressed_as_heat_flux_into_sea_water", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfrunoffds":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Temperature Flux due to Runoff Expressed as Heat Flux into Sea Water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfrunoffds", - "positive":"", - "standard_name":"temperature_flux_due_to_runoff_expressed_as_heat_flux_into_sea_water", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfrunoffds2d":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Temperature Flux due to Runoff Expressed as Heat Flux into Sea Water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfrunoffds", - "positive":"", - "standard_name":"temperature_flux_due_to_runoff_expressed_as_heat_flux_into_sea_water", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfsifrazil":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Heat Flux into Sea Water due to Frazil Ice Formation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfsifrazil", - "positive":"", - "standard_name":"heat_flux_into_sea_water_due_to_freezing_of_frazil_ice", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfsifrazil2d":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Heat Flux into Sea Water due to Frazil Ice Formation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfsifrazil", - "positive":"", - "standard_name":"heat_flux_into_sea_water_due_to_freezing_of_frazil_ice", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfsnthermds":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Heat Flux into Sea Water due to Snow Thermodynamics", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfsnthermds", - "positive":"", - "standard_name":"heat_flux_into_sea_water_due_to_snow_thermodynamics", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfsnthermds2d":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Heat Flux into Sea Water due to Snow Thermodynamics", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfsnthermds", - "positive":"", - "standard_name":"heat_flux_into_sea_water_due_to_snow_thermodynamics", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfsso":{ - "cell_measures":"", - "cell_methods":"area: mean where ice_free_sea over sea time: mean", - "comment":"This is defined as 'where ice_free_sea over sea'", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downward Sensible Heat Flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfsso", - "positive":"up", - "standard_name":"surface_downward_sensible_heat_flux", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "hfx":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Contains all contributions to 'x-ward' heat transport from resolved and parameterized processes. Use Celsius for temperature scale.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Ocean Heat X Transport", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfx", - "positive":"", - "standard_name":"ocean_heat_x_transport", - "type":"real", - "units":"W", - "valid_max":"", - "valid_min":"" - }, - "hfy":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Contains all contributions to 'y-ward' heat transport from resolved and parameterized processes. Use Celsius for temperature scale.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Ocean Heat Y Transport", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"hfy", - "positive":"", - "standard_name":"ocean_heat_y_transport", - "type":"real", - "units":"W", - "valid_max":"", - "valid_min":"" - }, - "htovgyre":{ - "cell_measures":"", - "cell_methods":"longitude: mean time: mean", - "comment":"From all advective mass transport processes, resolved and parameterized.", - "dimensions":"latitude basin time", - "frequency":"mon", - "long_name":"Northward Ocean Heat Transport due to Gyre", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"htovgyre", - "positive":"", - "standard_name":"northward_ocean_heat_transport_due_to_gyre", - "type":"real", - "units":"W", - "valid_max":"", - "valid_min":"" - }, - "htovovrt":{ - "cell_measures":"", - "cell_methods":"longitude: mean time: mean", - "comment":"From all advective mass transport processes, resolved and parameterized.", - "dimensions":"latitude basin time", - "frequency":"mon", - "long_name":"Northward Ocean Heat Transport due to Overturning", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"htovovrt", - "positive":"", - "standard_name":"northward_ocean_heat_transport_due_to_overturning", - "type":"real", - "units":"W", - "valid_max":"", - "valid_min":"" - }, - "icfriver":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Inorganic Carbon supply to ocean through runoff (separate from gas exchange)", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Flux of Inorganic Carbon Into Ocean Surface by Runoff", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"icfriver", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_inorganic_carbon_due_to_runoff_and_sediment_dissolution", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "intdic":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Vertically integrated DIC", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Dissolved Inorganic Carbon Content", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intdic", - "positive":"", - "standard_name":"ocean_mass_content_of_dissolved_inorganic_carbon", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "intdoc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Vertically integrated DOC (explicit pools only)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Dissolved Organic Carbon Content", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intdoc", - "positive":"", - "standard_name":"ocean_mass_content_of_dissolved_organic_carbon", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "intparag":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Vertically integrated aragonite production", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Aragonite Production", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intparag", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_aragonite_expressed_as_carbon_due_to_biological_production", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "intpbfe":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Vertically integrated biogenic iron production", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Iron Production", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intpbfe", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_iron_due_to_biological_production", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "intpbn":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Vertically integrated biogenic nitrogen production", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Nitrogen Production", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intpbn", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_nitrogen_due_to_biological_production", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "intpbp":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Vertically integrated biogenic phosphorus production", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Phosphorus Production", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intpbp", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_phosphorus_due_to_biological_production", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "intpbsi":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Vertically integrated biogenic silica production", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Silica Production", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intpbsi", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_silicon_due_to_biological_production", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "intpcalcite":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Vertically integrated calcite production", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Calcite Production", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intpcalcite", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_calcite_expressed_as_carbon_due_to_biological_production", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "intpn2":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Vertically integrated nitrogen fixation", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Nitrogen Fixation Rate in Ocean", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intpn2", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_elemental_nitrogen_due_to_fixation", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "intpoc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Vertically integrated POC", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Particulate Organic Carbon Content", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intpoc", - "positive":"", - "standard_name":"ocean_mass_content_of_particulate_organic_matter_expressed_as_carbon", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "intpp":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Vertically integrated total primary (organic carbon) production by phytoplankton. This should equal the sum of intpdiat+intpphymisc, but those individual components may be unavailable in some models.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Primary Organic Carbon Production by All Types of Phytoplankton", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intpp", - "positive":"", - "standard_name":"net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_phytoplankton", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "intppcalc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Net Primary Mole Productivity of Carbon by Calcareous Phytoplankton", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intppcalc", - "positive":"", - "standard_name":"net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_calcareous_phytoplankton", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "intppdiat":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Vertically integrated primary (organic carbon) production by the diatom phytoplankton component alone", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Net Primary Organic Carbon Production by Diatoms", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intppdiat", - "positive":"", - "standard_name":"net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diatoms", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "intppdiaz":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Net Primary Mole Productivity of Carbon by Diazotrophs", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intppdiaz", - "positive":"", - "standard_name":"net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophs", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "intppmisc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Vertically integrated total primary (organic carbon) production by other phytoplankton components alone", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Net Primary Organic Carbon Production by Other Phytoplankton", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intppmisc", - "positive":"", - "standard_name":"net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "intppnitrate":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Vertically integrated primary (organic carbon) production by phytoplankton based on nitrate uptake alone", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Primary Organic Carbon Production by Phytoplankton Based on Nitrate Uptake Alone", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intppnitrate", - "positive":"", - "standard_name":"net_primary_mole_productivity_of_biomass_expressed_as_carbon_due_to_nitrate_utilization", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "intpppico":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Net Primary Mole Productivity of Carbon by Picophytoplankton", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"intpppico", - "positive":"", - "standard_name":"net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_picophytoplankton", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "limfecalc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Iron limitation of Calcareous Phytoplankton", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"limfecalc", - "positive":"", - "standard_name":"iron_growth_limitation_of_calcareous_phytoplankton", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "limfediat":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Iron limitation of Diatoms", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"limfediat", - "positive":"", - "standard_name":"iron_growth_limitation_of_diatoms", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "limfediaz":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Iron limitation of Diazotrophs", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"limfediaz", - "positive":"", - "standard_name":"iron_growth_limitation_of_diazotrophs", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "limfemisc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Iron Limitation of Other Phytoplankton", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"limfemisc", - "positive":"", - "standard_name":"iron_growth_limitation_of_miscellaneous_phytoplankton", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "limfepico":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Iron limitation of Picophytoplankton", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"limfepico", - "positive":"", - "standard_name":"iron_growth_limitation_of_picophytoplankton", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "limirrcalc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Irradiance limitation of Calcareous Phytoplankton", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"limirrcalc", - "positive":"", - "standard_name":"growth_limitation_of_calcareous_phytoplankton_due_to_solar_irradiance", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "limirrdiat":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Irradiance limitation of Diatoms", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"limirrdiat", - "positive":"", - "standard_name":"growth_limitation_of_diatoms_due_to_solar_irradiance", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "limirrdiaz":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Irradiance limitation of Diazotrophs", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"limirrdiaz", - "positive":"", - "standard_name":"growth_limitation_of_diazotrophs_due_to_solar_irradiance", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "limirrmisc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Irradiance Limitation of Other Phytoplankton", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"limirrmisc", - "positive":"", - "standard_name":"growth_limitation_of_miscellaneous_phytoplankton_due_to_solar_irradiance", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "limirrpico":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Irradiance limitation of Picophytoplankton", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"limirrpico", - "positive":"", - "standard_name":"growth_limitation_of_picophytoplankton_due_to_solar_irradiance", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "limncalc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Nitrogen limitation of Calcareous Phytoplankton", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"limncalc", - "positive":"", - "standard_name":"nitrogen_growth_limitation_of_calcareous_phytoplankton", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "limndiat":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Nitrogen limitation of Diatoms", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"limndiat", - "positive":"", - "standard_name":"nitrogen_growth_limitation_of_diatoms", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "limndiaz":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Nitrogen limitation of Diazotrophs", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"limndiaz", - "positive":"", - "standard_name":"nitrogen_growth_limitation_of_diazotrophs", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "limnmisc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Nitrogen Limitation of Other Phytoplankton", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"limnmisc", - "positive":"", - "standard_name":"nitrogen_growth_limitation_of_miscellaneous_phytoplankton", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "limnpico":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Nitrogen limitation of Picophytoplankton", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"limnpico", - "positive":"", - "standard_name":"nitrogen_growth_limitation_of_picophytoplankton", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "masscello":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Tracer grid-cell mass per unit area used for computing tracer budgets. For Boussinesq models with static ocean grid cell thickness, masscello = rhozero*thickcello, where thickcello is static cell thickness and rhozero is constant Boussinesq reference density. More generally, masscello is time dependent and reported as part of Omon.", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Sea Water Mass Per Unit Area", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"masscello", - "positive":"", - "standard_name":"sea_water_mass_per_unit_area", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "masso":{ - "cell_measures":"", - "cell_methods":"area: sum where sea time: mean", - "comment":"Total mass of liquid sea water. For Boussinesq models, report this diagnostic as Boussinesq reference density times total volume.", - "dimensions":"time", - "frequency":"mon", - "long_name":"Sea Water Mass", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"masso", - "positive":"", - "standard_name":"sea_water_mass", - "type":"real", - "units":"kg", - "valid_max":"", - "valid_min":"" - }, - "mfo":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"", - "dimensions":"oline time", - "frequency":"mon", - "long_name":"Sea Water Transport", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"mfo", - "positive":"", - "standard_name":"sea_water_transport_across_line", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "mlotst":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Sigma T is potential density referenced to ocean surface.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Ocean Mixed Layer Thickness Defined by Sigma T", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"mlotst", - "positive":"", - "standard_name":"ocean_mixed_layer_thickness_defined_by_sigma_t", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "mlotstmax":{ - "cell_measures":"", - "cell_methods":"area: mean time: maximum", - "comment":"Sigma T is potential density referenced to ocean surface.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Maximum Ocean Mixed Layer Thickness Defined by Sigma T", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"mlotstmax", - "positive":"", - "standard_name":"ocean_mixed_layer_thickness_defined_by_sigma_t", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "mlotstmin":{ - "cell_measures":"", - "cell_methods":"area: mean time: minimum", - "comment":"Sigma T is potential density referenced to ocean surface.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Minimum Ocean Mixed Layer Thickness Defined by Sigma T", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"mlotstmin", - "positive":"", - "standard_name":"ocean_mixed_layer_thickness_defined_by_sigma_t", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "mlotstsq":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Square of Ocean Mixed Layer Thickness Defined by Sigma T", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"mlotstsq", - "positive":"", - "standard_name":"square_of_ocean_mixed_layer_thickness_defined_by_sigma_t", - "type":"real", - "units":"m2", - "valid_max":"", - "valid_min":"" - }, - "msftbarot":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Streamfunction or its approximation for free surface models. See OMDP document for details.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Ocean Barotropic Mass Streamfunction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"msftbarot", - "positive":"", - "standard_name":"ocean_barotropic_mass_streamfunction", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "msftmrho":{ - "cell_measures":"", - "cell_methods":"longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", - "comment":"Overturning mass streamfunction arising from all advective mass transport processes, resolved and parameterized.", - "dimensions":"latitude rho basin time", - "frequency":"mon", - "long_name":"Ocean Meridional Overturning Mass Streamfunction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"msftmrho", - "positive":"", - "standard_name":"ocean_meridional_overturning_mass_streamfunction", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "msftmrhompa":{ - "cell_measures":"", - "cell_methods":"longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", - "comment":"CMIP5 called this 'due to Bolus Advection'. Name change respects the more general physics of the mesoscale parameterizations.", - "dimensions":"latitude rho basin time", - "frequency":"mon", - "long_name":"ocean meridional overturning mass streamfunction due to parameterized mesoscale advection", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"msftmrhompa", - "positive":"", - "standard_name":"ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_mesoscale_eddy_advection", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "msftmz":{ - "cell_measures":"", - "cell_methods":"longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", - "comment":"Overturning mass streamfunction arising from all advective mass transport processes, resolved and parameterized.", - "dimensions":"latitude olevel basin time", - "frequency":"mon", - "long_name":"Ocean Meridional Overturning Mass Streamfunction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"msftmz", - "positive":"", - "standard_name":"ocean_meridional_overturning_mass_streamfunction", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "msftmzmpa":{ - "cell_measures":"", - "cell_methods":"longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", - "comment":"CMIP5 called this 'due to Bolus Advection'. Name change respects the more general physics of the mesoscale parameterizations.", - "dimensions":"latitude olevel basin time", - "frequency":"mon", - "long_name":"ocean meridional overturning mass streamfunction due to parameterized mesoscale advection", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"msftmzmpa", - "positive":"", - "standard_name":"ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_mesoscale_eddy_advection", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "msftmzsmpa":{ - "cell_measures":"", - "cell_methods":"longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", - "comment":"Report only if there is a submesoscale eddy parameterization.", - "dimensions":"latitude olevel basin time", - "frequency":"mon", - "long_name":"ocean meridional overturning mass streamfunction due to parameterized submesoscale advection", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"msftmzsmpa", - "positive":"", - "standard_name":"ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_submesoscale_eddy_advection", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "msftyrho":{ - "cell_measures":"", - "cell_methods":"time: mean grid_longitude: mean", - "comment":"Overturning mass streamfunction arising from all advective mass transport processes, resolved and parameterized.", - "dimensions":"latitude rho basin time", - "frequency":"mon", - "long_name":"Ocean Y Overturning Mass Streamfunction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"msftyrho", - "positive":"", - "standard_name":"ocean_y_overturning_mass_streamfunction", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "msftyrhompa":{ - "cell_measures":"", - "cell_methods":"time: mean grid_longitude: mean", - "comment":"CMIP5 called this 'due to Bolus Advection'. Name change respects the more general physics of the mesoscale parameterizations.", - "dimensions":"latitude rho basin time", - "frequency":"mon", - "long_name":"ocean Y overturning mass streamfunction due to parameterized mesoscale advection", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"msftyrhompa", - "positive":"", - "standard_name":"ocean_y_overturning_mass_streamfunction_due_to_parameterized_mesoscale_eddy_advection", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "msftyz":{ - "cell_measures":"", - "cell_methods":"time: mean grid_longitude: mean", - "comment":"Overturning mass streamfunction arising from all advective mass transport processes, resolved and parameterized.", - "dimensions":"latitude olevel basin time", - "frequency":"mon", - "long_name":"Ocean Y Overturning Mass Streamfunction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"msftyz", - "positive":"", - "standard_name":"ocean_y_overturning_mass_streamfunction", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "msftyzmpa":{ - "cell_measures":"", - "cell_methods":"time: mean grid_longitude: mean", - "comment":"CMIP5 called this 'due to Bolus Advection'. Name change respects the more general physics of the mesoscale parameterizations.", - "dimensions":"latitude olevel basin time", - "frequency":"mon", - "long_name":"ocean Y overturning mass streamfunction due to parameterized mesoscale advection", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"msftyzmpa", - "positive":"", - "standard_name":"ocean_y_overturning_mass_streamfunction_due_to_parameterized_mesoscale_eddy_advection", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "msftyzsmpa":{ - "cell_measures":"", - "cell_methods":"longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", - "comment":"Report only if there is a submesoscale eddy parameterization.", - "dimensions":"latitude olevel basin time", - "frequency":"mon", - "long_name":"ocean Y overturning mass streamfunction due to parameterized submesoscale advection", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"msftyzsmpa", - "positive":"", - "standard_name":"ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_submesoscale_eddy_advection", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "nh4":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Dissolved Ammonium Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"nh4", - "positive":"", - "standard_name":"mole_concentration_of_ammonium_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "no3":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Dissolved Nitrate Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"no3", - "positive":"", - "standard_name":"mole_concentration_of_nitrate_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "no3os":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Dissolved Nitrate Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"no3os", - "positive":"", - "standard_name":"mole_concentration_of_nitrate_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "o2":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Dissolved Oxygen Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"o2", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_molecular_oxygen_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "o2min":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Oxygen Minimum Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"o2min", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_molecular_oxygen_in_sea_water_at_shallowest_local_minimum_in_vertical_profile", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "o2os":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Dissolved Oxygen Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"o2os", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_molecular_oxygen_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "o2sat":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Dissolved Oxygen Concentration at Saturation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"o2sat", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_molecular_oxygen_in_sea_water_at_saturation", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "o2satos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Dissolved Oxygen Concentration at Saturation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"o2satos", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_molecular_oxygen_in_sea_water_at_saturation", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "obvfsq":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Square of Brunt Vaisala Frequency in Sea Water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"obvfsq", - "positive":"", - "standard_name":"square_of_brunt_vaisala_frequency_in_sea_water", - "type":"real", - "units":"s-2", - "valid_max":"", - "valid_min":"" - }, - "ocfriver":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Organic Carbon supply to ocean through runoff (separate from gas exchange)", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Flux of Organic Carbon Into Ocean Surface by Runoff", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ocfriver", - "positive":"", - "standard_name":"tendency_of_ocean_mole_content_of_organic_carbon_due_to_runoff_and_sediment_dissolution", - "type":"real", - "units":"mol m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "pbfe":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Biogenic Iron Production", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"pbfe", - "positive":"", - "standard_name":"tendency_of_mole_concentration_of_iron_in_sea_water_due_to_biological_production", - "type":"real", - "units":"mol m-3 s-1", - "valid_max":"", - "valid_min":"" - }, - "pbo":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea Water Pressure at Sea floor", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"pbo", - "positive":"", - "standard_name":"sea_water_pressure_at_sea_floor", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "pbsi":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Biogenic Silica Production", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"pbsi", - "positive":"", - "standard_name":"tendency_of_mole_concentration_of_silicon_in_sea_water_due_to_biological_production", - "type":"real", - "units":"mol m-3 s-1", - "valid_max":"", - "valid_min":"" - }, - "ph":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"negative log of hydrogen ion concentration with the concentration expressed as mol H kg-1.", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"pH", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ph", - "positive":"", - "standard_name":"sea_water_ph_reported_on_total_scale", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "phabio":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"negative log10 of hydrogen ion concentration with the concentration expressed as mol H kg-1 (abiotic component)..", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Abiotic pH", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phabio", - "positive":"", - "standard_name":"sea_water_ph_abiotic_analogue_reported_on_total_scale", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "phabioos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"negative log10 of hydrogen ion concentration with the concentration expressed as mol H kg-1.", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Abiotic pH", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phabioos", - "positive":"", - "standard_name":"sea_water_ph_abiotic_analogue_reported_on_total_scale", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "phnat":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"negative log10 of hydrogen ion concentration with the concentration expressed as mol H kg-1.", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Natural pH", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phnat", - "positive":"", - "standard_name":"sea_water_ph_natural_analogue_reported_on_total_scale", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "phnatos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"negative log10 of hydrogen ion concentration with the concentration expressed as mol H kg-1.", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Natural pH", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phnatos", - "positive":"", - "standard_name":"sea_water_ph_natural_analogue_reported_on_total_scale", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "phyc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of phytoplankton carbon component concentrations. In most (all?) cases this is the sum of phycdiat and phycmisc (i.e., 'Diatom Carbon Concentration' and 'Non-Diatom Phytoplankton Carbon Concentration'", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Phytoplankton Carbon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phyc", - "positive":"", - "standard_name":"mole_concentration_of_phytoplankton_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "phycalc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"carbon concentration from calcareous (calcite-producing) phytoplankton component alone", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Calcareous Phytoplankton expressed as Carbon in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phycalc", - "positive":"", - "standard_name":"mole_concentration_of_calcareous_phytoplankton_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "phycos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of phytoplankton organic carbon component concentrations at the sea surface", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Phytoplankton Carbon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phycos", - "positive":"", - "standard_name":"mole_concentration_of_phytoplankton_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "phydiat":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"carbon from the diatom phytoplankton component concentration alone", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Diatoms expressed as Carbon in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phydiat", - "positive":"", - "standard_name":"mole_concentration_of_diatoms_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "phydiaz":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"carbon concentration from the diazotrophic phytoplankton component alone", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Diazotrophs Expressed as Carbon in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phydiaz", - "positive":"", - "standard_name":"mole_concentration_of_diazotrophs_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "phyfe":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of phytoplankton iron component concentrations", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Total Phytoplankton expressed as Iron in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phyfe", - "positive":"", - "standard_name":"mole_concentration_of_phytoplankton_expressed_as_iron_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "phyfeos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of phytoplankton iron component concentrations", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Mole Concentration of Total Phytoplankton expressed as Iron in Sea Water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phyfeos", - "positive":"", - "standard_name":"mole_concentration_of_phytoplankton_expressed_as_iron_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "phymisc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"carbon concentration from additional phytoplankton component alone", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Miscellaneous Phytoplankton expressed as Carbon in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phymisc", - "positive":"", - "standard_name":"mole_concentration_of_miscellaneous_phytoplankton_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "phyn":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of phytoplankton nitrogen component concentrations", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Total Phytoplankton expressed as Nitrogen in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phyn", - "positive":"", - "standard_name":"mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "phynos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of phytoplankton nitrogen component concentrations", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Mole Concentration of Phytoplankton Nitrogen in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phynos", - "positive":"", - "standard_name":"mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "phyp":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of phytoplankton phosphorus components", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Total Phytoplankton expressed as Phosphorus in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phyp", - "positive":"", - "standard_name":"mole_concentration_of_phytoplankton_expressed_as_phosphorus_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "phypico":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"carbon concentration from the picophytoplankton (<2 um) component alone", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Picophytoplankton expressed as Carbon in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phypico", - "positive":"", - "standard_name":"mole_concentration_of_picophytoplankton_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "phypos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of phytoplankton phosphorus components", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Mole Concentration of Total Phytoplankton expressed as Phosphorus in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"phypos", - "positive":"", - "standard_name":"mole_concentration_of_phytoplankton_expressed_as_phosphorus_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "physi":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of phytoplankton silica component concentrations", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Total Phytoplankton expressed as Silicon in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"physi", - "positive":"", - "standard_name":"mole_concentration_of_phytoplankton_expressed_as_silicon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "physios":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of phytoplankton silica component concentrations", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Mole Concentration of Total Phytoplankton expressed as Silicon in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"physios", - "positive":"", - "standard_name":"mole_concentration_of_phytoplankton_expressed_as_silicon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "pnitrate":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Primary (organic carbon) production by phytoplankton due to nitrate uptake alone", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Primary Carbon Production by Phytoplankton due to Nitrate Uptake Alone", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"pnitrate", - "positive":"", - "standard_name":"tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_nitrate_utilization", - "type":"real", - "units":"mol m-3 s-1", - "valid_max":"", - "valid_min":"" - }, - "po4":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Total Dissolved Inorganic Phosphorus Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"po4", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "pon":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of particulate organic nitrogen component concentrations", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Particulate Organic Matter expressed as Nitrogen in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"pon", - "positive":"", - "standard_name":"mole_concentration_of_particulate_organic_matter_expressed_as_nitrogen_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "ponos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of particulate organic nitrogen component concentrations", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Mole Concentration of Particulate Organic Matter expressed as Nitrogen in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"ponos", - "positive":"", - "standard_name":"mole_concentration_of_particulate_organic_matter_expressed_as_nitrogen_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "pop":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of particulate organic phosphorus component concentrations", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Particulate Organic Matter expressed as Phosphorus in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"pop", - "positive":"", - "standard_name":"mole_concentration_of_particulate_organic_matter_expressed_as_phosphorus_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "popos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of particulate organic phosphorus component concentrations", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Mole Concentration of Particulate Organic Matter expressed as Phosphorus in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"popos", - "positive":"", - "standard_name":"mole_concentration_of_particulate_organic_matter_expressed_as_phosphorus_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "pp":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"total primary (organic carbon) production by phytoplankton", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Primary Carbon Production by Total Phytoplankton", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"pp", - "positive":"", - "standard_name":"tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production", - "type":"real", - "units":"mol m-3 s-1", - "valid_max":"", - "valid_min":"" - }, - "prra":{ - "cell_measures":"", - "cell_methods":"area: mean where ice_free_sea over sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Rainfall Flux where Ice Free Ocean over Sea", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"prra", - "positive":"", - "standard_name":"rainfall_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "prsn":{ - "cell_measures":"", - "cell_methods":"area: mean where ice_free_sea over sea time: mean", - "comment":"at surface; includes precipitation of all forms of water in the solid phase", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Snowfall Flux where Ice Free Ocean over Sea", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"prsn", - "positive":"", - "standard_name":"snowfall_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "pso":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea Water Pressure at Sea Water Surface", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"pso", - "positive":"", - "standard_name":"sea_water_pressure_at_sea_water_surface", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "rlntds":{ - "cell_measures":"", - "cell_methods":"area: mean where ice_free_sea over sea time: mean", - "comment":"This is defined as 'where ice_free_sea over sea'", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Net Downward Longwave Radiation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rlntds", - "positive":"down", - "standard_name":"surface_net_downward_longwave_flux", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rsdo":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Downwelling Shortwave Radiation in Sea Water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rsdo", - "positive":"down", - "standard_name":"downwelling_shortwave_flux_in_sea_water", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "rsntds":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"This is the flux into the surface of liquid sea water only. This excludes shortwave flux absorbed by sea ice, but includes any light that passes through the ice and is absorbed by the ocean.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Net Downward Shortwave Radiation at Sea Water Surface", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rsntds", - "positive":"down", - "standard_name":"net_downward_shortwave_flux_at_sea_water_surface", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "sf6":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Moles Per Unit Mass of SF6 in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sf6", - "positive":"", - "standard_name":"mole_concentration_of_sulfur_hexafluoride_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "sfdsi":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"This field is physical, and it arises since sea ice has a nonzero salt content, so it exchanges salt with the liquid ocean upon melting and freezing.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Downward Sea Ice Basal Salt Flux", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sfdsi", - "positive":"down", - "standard_name":"downward_sea_ice_basal_salt_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sfriver":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"This field is physical, and it arises when rivers carry a nonzero salt content. Often this is zero, with rivers assumed to be fresh.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Salt Flux into Sea Water from Rivers", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sfriver", - "positive":"", - "standard_name":"salt_flux_into_sea_water_from_rivers", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "si":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Total Dissolved Inorganic Silicon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"si", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_inorganic_silicon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "sios":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Total Dissolved Inorganic Silicon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sios", - "positive":"", - "standard_name":"mole_concentration_of_dissolved_inorganic_silicon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "sltovgyre":{ - "cell_measures":"", - "cell_methods":"longitude: mean time: mean", - "comment":"From all advective mass transport processes, resolved and parameterized.", - "dimensions":"latitude basin time", - "frequency":"mon", - "long_name":"Northward Ocean Salt Transport due to Gyre", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sltovgyre", - "positive":"", - "standard_name":"northward_ocean_salt_transport_due_to_gyre", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "sltovovrt":{ - "cell_measures":"", - "cell_methods":"longitude: mean time: mean", - "comment":"From all advective mass transport processes, resolved and parameterized.", - "dimensions":"latitude basin time", - "frequency":"mon", - "long_name":"Northward Ocean Salt Transport due to Overturning", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sltovovrt", - "positive":"", - "standard_name":"northward_ocean_salt_transport_due_to_overturning", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "so":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Sea Water Salinity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"so", - "positive":"", - "standard_name":"sea_water_salinity", - "type":"real", - "units":"0.001", - "valid_max":"", - "valid_min":"" - }, - "sob":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Model prognostic salinity at bottom-most model grid cell", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"sea water salinity at sea floor", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sob", - "positive":"", - "standard_name":"sea_water_salinity_at_sea_floor", - "type":"real", - "units":"0.001", - "valid_max":"", - "valid_min":"" - }, - "soga":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"time", - "frequency":"mon", - "long_name":"Global Mean Sea Water Salinity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"soga", - "positive":"", - "standard_name":"sea_water_salinity", - "type":"real", - "units":"0.001", - "valid_max":"", - "valid_min":"" - }, - "sos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea Surface Salinity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sos", - "positive":"", - "standard_name":"sea_surface_salinity", - "type":"real", - "units":"0.001", - "valid_max":"", - "valid_min":"" - }, - "sosga":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"time", - "frequency":"mon", - "long_name":"Global Average Sea Surface Salinity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sosga", - "positive":"", - "standard_name":"sea_surface_salinity", - "type":"real", - "units":"0.001", - "valid_max":"", - "valid_min":"" - }, - "sossq":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Square of Sea Surface Salinity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sossq", - "positive":"", - "standard_name":"square_of_sea_surface_salinity", - "type":"real", - "units":"1e-06", - "valid_max":"", - "valid_min":"" - }, - "spco2":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Aqueous Partial Pressure of CO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"spco2", - "positive":"", - "standard_name":"surface_partial_pressure_of_carbon_dioxide_in_sea_water", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "spco2abio":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Abiotic Surface Aqueous Partial Pressure of CO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"spco2abio", - "positive":"", - "standard_name":"surface_partial_pressure_of_carbon_dioxide_abiotic_analogue_in_sea_water", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "spco2nat":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Natural Surface Aqueous Partial Pressure of CO2", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"spco2nat", - "positive":"", - "standard_name":"surface_partial_pressure_of_carbon_dioxide_natural_analogue_in_sea_water", - "type":"real", - "units":"Pa", - "valid_max":"", - "valid_min":"" - }, - "talk":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"total alkalinity equivalent concentration (including carbonate, nitrogen, silicate, and borate components)", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Total Alkalinity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"talk", - "positive":"", - "standard_name":"sea_water_alkalinity_expressed_as_mole_equivalent", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "talknat":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"total alkalinity equivalent concentration (including carbonate, borate, phosphorus, silicon, and nitrogen components) at preindustrial atmospheric xCO2", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Natural Total Alkalinity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"talknat", - "positive":"", - "standard_name":"sea_water_alkalinity_natural_analogue_expressed_as_mole_equivalent", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "talknatos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"total alkalinity equivalent concentration (including carbonate, borate, phosphorus, silicon, and nitrogen components) at preindustrial atmospheric xCO2", - "dimensions":"longitude latitude time depth0m", - "frequency":"mon", - "long_name":"Surface Natural Total Alkalinity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"talknatos", - "positive":"", - "standard_name":"sea_water_alkalinity_natural_analogue_expressed_as_mole_equivalent", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "tauucorr":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downward X Stress Correction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tauucorr", - "positive":"down", - "standard_name":"surface_downward_x_stress_correction", - "type":"real", - "units":"N m-2", - "valid_max":"", - "valid_min":"" - }, - "tauuo":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downward X Stress", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tauuo", - "positive":"down", - "standard_name":"surface_downward_x_stress", - "type":"real", - "units":"N m-2", - "valid_max":"", - "valid_min":"" - }, - "tauvcorr":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downward Y Stress Correction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tauvcorr", - "positive":"down", - "standard_name":"surface_downward_y_stress_correction", - "type":"real", - "units":"N m-2", - "valid_max":"", - "valid_min":"" - }, - "tauvo":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface Downward Y Stress", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tauvo", - "positive":"down", - "standard_name":"surface_downward_y_stress", - "type":"real", - "units":"N m-2", - "valid_max":"", - "valid_min":"" - }, - "thetao":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Diagnostic should be contributed even for models using conservative temperature as prognostic field.", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Sea Water Potential Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"thetao", - "positive":"", - "standard_name":"sea_water_potential_temperature", - "type":"real", - "units":"degC", - "valid_max":"", - "valid_min":"" - }, - "thetaoga":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Diagnostic should be contributed even for models using conservative temperature as prognostic field", - "dimensions":"time", - "frequency":"mon", - "long_name":"Global Average Sea Water Potential Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"thetaoga", - "positive":"", - "standard_name":"sea_water_potential_temperature", - "type":"real", - "units":"degC", - "valid_max":"", - "valid_min":"" - }, - "thkcello":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Ocean Model Cell Thickness", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"thkcello", - "positive":"", - "standard_name":"cell_thickness", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "tob":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Potential temperature at the ocean bottom-most grid cell.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea Water Potential Temperature at Sea Floor", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tob", - "positive":"", - "standard_name":"sea_water_potential_temperature_at_sea_floor", - "type":"real", - "units":"degC", - "valid_max":"", - "valid_min":"" - }, - "tos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Temperature of upper boundary of the liquid ocean, including temperatures below sea-ice and floating ice shelves.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea Surface Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tos", - "positive":"", - "standard_name":"sea_surface_temperature", - "type":"real", - "units":"degC", - "valid_max":"", - "valid_min":"" - }, - "tosga":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Temperature of upper boundary of the liquid ocean, including temperatures below sea-ice and floating ice shelves.", - "dimensions":"time", - "frequency":"mon", - "long_name":"Global Average Sea Surface Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tosga", - "positive":"", - "standard_name":"sea_surface_temperature", - "type":"real", - "units":"degC", - "valid_max":"", - "valid_min":"" - }, - "tossq":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Square of temperature of liquid ocean.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Square of Sea Surface Temperature", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"tossq", - "positive":"", - "standard_name":"square_of_sea_surface_temperature", - "type":"real", - "units":"degC2", - "valid_max":"", - "valid_min":"" - }, - "umo":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"X-ward mass transport from resolved and parameterized advective transport.", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Ocean Mass X Transport", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"umo", - "positive":"", - "standard_name":"ocean_mass_x_transport", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "uo":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"Prognostic x-ward velocity component resolved by the model.", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Sea Water X Velocity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"uo", - "positive":"", - "standard_name":"sea_water_x_velocity", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "vmo":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"Y-ward mass transport from resolved and parameterized advective transport.", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Ocean Mass Y Transport", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"vmo", - "positive":"", - "standard_name":"ocean_mass_y_transport", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "vo":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"Prognostic x-ward velocity component resolved by the model.", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Sea Water Y Velocity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"vo", - "positive":"", - "standard_name":"sea_water_y_velocity", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "volcello":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"grid-cell volume ca. 2000.", - "dimensions":"longitude latitude olevel time", - "frequency":"fx", - "long_name":"Ocean Grid-Cell Volume", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"volcello", - "positive":"", - "standard_name":"ocean_volume", - "type":"real", - "units":"m3", - "valid_max":"", - "valid_min":"" - }, - "volo":{ - "cell_measures":"", - "cell_methods":"area: sum where sea time: mean", - "comment":"Total volume of liquid sea water.", - "dimensions":"time", - "frequency":"mon", - "long_name":"Sea Water Volume", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"volo", - "positive":"", - "standard_name":"sea_water_volume", - "type":"real", - "units":"m3", - "valid_max":"", - "valid_min":"" - }, - "vsf":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"It is set to zero in models which receive a real water flux.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Virtual Salt Flux into Sea Water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"vsf", - "positive":"", - "standard_name":"virtual_salt_flux_into_sea_water", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "vsfcorr":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"It is set to zero in models which receive a real water flux.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Virtual Salt Flux Correction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"vsfcorr", - "positive":"", - "standard_name":"virtual_salt_flux_correction", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "vsfevap":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"zero for models using real water fluxes.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Virtual Salt Flux into Sea Water due to Evaporation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"vsfevap", - "positive":"", - "standard_name":"virtual_salt_flux_into_sea_water_due_to_evaporation", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "vsfpr":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"zero for models using real water fluxes.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Virtual Salt Flux into Sea Water due to Rainfall", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"vsfpr", - "positive":"", - "standard_name":"virtual_salt_flux_into_sea_water_due_to_rainfall", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "vsfriver":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"zero for models using real water fluxes.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Virtual Salt Flux into Sea Water From Rivers", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"vsfriver", - "positive":"", - "standard_name":"virtual_salt_flux_into_sea_water_from_rivers", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "vsfsit":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"This variable measures the virtual salt flux into sea water due to the melting of sea ice. It is set to zero in models which receive a real water flux.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Virtual Salt Flux into Sea Water due to Sea Ice Thermodynamics", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"vsfsit", - "positive":"", - "standard_name":"virtual_salt_flux_into_sea_water_due_to_sea_ice_thermodynamics", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "wfcorr":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Positive flux implies correction adds water to ocean.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Water Flux Correction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"wfcorr", - "positive":"down", - "standard_name":"water_flux_correction", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "wfo":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"computed as the water flux into the ocean divided by the area of the ocean portion of the grid cell. This is the sum of the next two variables in this table.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Water Flux into Sea Water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"wfo", - "positive":"", - "standard_name":"water_flux_into_sea_water", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "wfonocorr":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"computed as the water flux (without flux correction) into the ocean divided by the area of the ocean portion of the grid cell.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Water Flux into Sea Water Without Flux Correction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"wfonocorr", - "positive":"", - "standard_name":"water_flux_into_sea_water_without_flux_correction", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "wmo":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Upward mass transport from resolved and parameterized advective transport.", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Upward Ocean Mass Transport", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"wmo", - "positive":"", - "standard_name":"upward_ocean_mass_transport", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "wo":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Sea Water Z Velocity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"wo", - "positive":"", - "standard_name":"upward_sea_water_velocity", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "zfullo":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Depth below geoid", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Depth Below Geoid of Ocean Layer", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"zfullo", - "positive":"", - "standard_name":"depth_below_geoid", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "zhalfo":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Depth below geoid", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Depth Below Geoid of Interfaces Between Ocean Layers", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"zhalfo", - "positive":"", - "standard_name":"depth_below_geoid", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "zmeso":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"carbon concentration from mesozooplankton (20-200 um) component alone", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Mesozooplankton expressed as Carbon in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"zmeso", - "positive":"", - "standard_name":"mole_concentration_of_mesozooplankton_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "zmicro":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"carbon concentration from the microzooplankton (<20 um) component alone", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concentration of Microzooplankton expressed as Carbon in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"zmicro", - "positive":"", - "standard_name":"mole_concentration_of_microzooplankton_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "zmisc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"carbon from additional zooplankton component concentrations alone (e.g. Micro, meso). Since the models all have different numbers of components, this variable has been included to provide a check for intercomparison between models since some phytoplankton groups are supersets.", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Mole Concetration of Other Zooplankton expressed as Carbon in sea water", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"zmisc", - "positive":"", - "standard_name":"mole_concentration_of_miscellaneous_zooplankton_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "zo2min":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Depth of vertical minimum concentration of dissolved oxygen gas (if two, then the shallower)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Depth of Oxygen Minimum Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"zo2min", - "positive":"", - "standard_name":"depth_at_shallowest_local_minimum_in_vertical_profile_of_mole_concentration_of_dissolved_molecular_oxygen_in_sea_water", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "zooc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"sum of zooplankton carbon component concentrations", - "dimensions":"longitude latitude olevel time", - "frequency":"mon", - "long_name":"Zooplankton Carbon Concentration", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"zooc", - "positive":"", - "standard_name":"mole_concentration_of_zooplankton_expressed_as_carbon_in_sea_water", - "type":"real", - "units":"mol m-3", - "valid_max":"", - "valid_min":"" - }, - "zos":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"This is the dynamic sea level, so should have zero global area mean. It should not include inverse barometer depressions from sea ice.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea Surface Height Above Geoid", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"zos", - "positive":"", - "standard_name":"sea_surface_height_above_geoid", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "zossq":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Surface ocean geoid defines z=0.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Square of Sea Surface Height Above Geoid", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"zossq", - "positive":"", - "standard_name":"square_of_sea_surface_height_above_geoid", - "type":"real", - "units":"m2", - "valid_max":"", - "valid_min":"" - }, - "zostoga":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"There is no CMIP6 request for zosga nor zossga.", - "dimensions":"time", - "frequency":"mon", - "long_name":"Global Average Thermosteric Sea Level Change", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"zostoga", - "positive":"", - "standard_name":"global_average_thermosteric_sea_level_change", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "zsatarag":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Depth of aragonite saturation horizon (0 if undersaturated at all depths, 'missing' if supersaturated at all depths; if multiple horizons exist, the shallowest should be taken).", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Aragonite Saturation Depth", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"zsatarag", - "positive":"", - "standard_name":"minimum_depth_of_aragonite_undersaturation_in_sea_water", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "zsatcalc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Depth of calcite saturation horizon (0 if undersaturated at all depths, and missing saturated through whole depth; if two or more horizons exist, then the shallowest is reported)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Calcite Saturation Depth", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"zsatcalc", - "positive":"", - "standard_name":"minimum_depth_of_calcite_undersaturation_in_sea_water", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - } - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_SImon.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_SImon.json deleted file mode 100644 index bd5e3733a2..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_SImon.json +++ /dev/null @@ -1,1532 +0,0 @@ -{ - "Header":{ - "#dataRequest_specs_version":"01.00.21", - "#mip_era":"CMIP6", - "Conventions":"CF-1.7 ODS-2.1", - "approx_interval":"30.00000", - "cmor_version":"3.2", - "data_specs_version":"2.1.0", - "generic_levels":"", - "int_missing_value":"-2147483648", - "missing_value":"1e20", - "product":"observations", - "realm":"seaIce", - "table_date":"07 March 2018", - "table_id":"Table obs4MIPs_SImon" - }, - "variable_entry":{ - "sfdsi":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"This field is physical, and it arises since sea ice has a nonzero salt content, so it exchanges salt with the liquid ocean upon melting and freezing.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Salt flux from sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sfdsi", - "positive":"down", - "standard_name":"downward_sea_ice_basal_salt_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "siage":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Age of sea ice", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Age of sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siage", - "positive":"", - "standard_name":"age_of_sea_ice", - "type":"real", - "units":"s", - "valid_max":"", - "valid_min":"" - }, - "siareaacrossline":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"net (sum of transport in all directions) sea ice area transport through the following four passages, positive into the Arctic Ocean 1. Fram Strait = (11.5W,81.3N to (10.5E,79.6N) 2. Canadian Archipelago = (128.2W,70.6N) to (59.3W,82.1N) 3. Barents opening = (16.8E,76.5N) to (19.2E,70.2N) 4. Bering Strait = (171W,66.2N) to (166W,65N)", - "dimensions":"siline time", - "frequency":"mon", - "long_name":"Sea ice area flux through straits", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siareaacrossline", - "positive":"", - "standard_name":"sea_ice_area_transport_across_line", - "type":"real", - "units":"m2 s-1", - "valid_max":"", - "valid_min":"" - }, - "siarean":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"total area of sea ice in the Northern hemisphere", - "dimensions":"time", - "frequency":"mon", - "long_name":"Sea ice area North", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siarean", - "positive":"", - "standard_name":"sea_ice_area", - "type":"real", - "units":"1e6 km2", - "valid_max":"", - "valid_min":"" - }, - "siareas":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"total area of sea ice in the Southern hemisphere", - "dimensions":"time", - "frequency":"mon", - "long_name":"Sea ice area South", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siareas", - "positive":"", - "standard_name":"sea_ice_area", - "type":"real", - "units":"1e6 km2", - "valid_max":"", - "valid_min":"" - }, - "sicompstren":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Computed strength of the ice pack, defined as the energy (J m-2) dissipated per unit area removed from the ice pack under compression, and assumed proportional to the change in potential energy caused by ridging. For Hibler-type models, this is P (= P*hexp(-C(1-A)))", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Compressive sea ice strength", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sicompstren", - "positive":"", - "standard_name":"compressive_strength_of_sea_ice", - "type":"real", - "units":"N m-1", - "valid_max":"", - "valid_min":"" - }, - "siconc":{ - "cell_measures":"", - "cell_methods":"area: mean where sea time: mean", - "comment":"Area fraction of grid cell covered by sea ice", - "dimensions":"longitude latitude time typesi", - "frequency":"mon", - "long_name":"Sea Ice Area Fraction (Ocean Grid)", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siconc", - "positive":"", - "standard_name":"sea_ice_area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "siconca":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Area fraction of grid cell covered by sea ice", - "dimensions":"longitude latitude time typesi", - "frequency":"mon", - "long_name":"Sea Ice Area Fraction (Atmospheric Grid)", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siconca", - "positive":"", - "standard_name":"sea_ice_area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "sidconcdyn":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Total change in sea-ice area fraction through dynamics-related processes (advection, divergence...)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"sea-ice area fraction change from dynamics", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidconcdyn", - "positive":"", - "standard_name":"tendency_of_sea_ice_area_fraction_due_to_dynamics", - "type":"real", - "units":"s-1", - "valid_max":"", - "valid_min":"" - }, - "sidconcth":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Total change in sea-ice area fraction through thermodynamic processes", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"sea-ice area fraction change from thermodynamics", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidconcth", - "positive":"", - "standard_name":"tendency_of_sea_ice_area_fraction_due_to_thermodynamics", - "type":"real", - "units":"s-1", - "valid_max":"", - "valid_min":"" - }, - "sidivvel":{ - "cell_measures":"", - "cell_methods":"area: mean where sea_ice (comment: mask=siconc) time: point", - "comment":"Divergence of sea-ice velocity field (first shear strain invariant)", - "dimensions":"longitude latitude time1", - "frequency":"monPt", - "long_name":"Divergence of the sea-ice velocity field", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidivvel", - "positive":"", - "standard_name":"divergence_of_sea_ice_velocity", - "type":"real", - "units":"s-1", - "valid_max":"", - "valid_min":"" - }, - "sidmassdyn":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Total change in sea-ice mass through dynamics-related processes (advection,...) divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"sea-ice mass change from dynamics", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidmassdyn", - "positive":"", - "standard_name":"tendency_of_sea_ice_amount_due_to_dynamics", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sidmassevapsubl":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"The rate of change of sea-ice mass change through evaporation and sublimation divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"sea-ice mass change through evaporation and sublimation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidmassevapsubl", - "positive":"up", - "standard_name":"water_evaporation_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sidmassgrowthbot":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"The rate of change of sea ice mass due to vertical growth of existing sea ice at its base divided by grid-cell area.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"sea-ice mass change through basal growth", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidmassgrowthbot", - "positive":"", - "standard_name":"tendency_of_sea_ice_amount_due_to_congelation_ice_accumulation", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sidmassgrowthwat":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"The rate of change of sea ice mass due to sea ice formation in supercooled water (often through frazil formation) divided by grid-cell area. Together, sidmassgrowthwat and sidmassgrowthbot should give total ice growth", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"sea-ice mass change through growth in supercooled open water (aka frazil)", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidmassgrowthwat", - "positive":"", - "standard_name":"tendency_of_sea_ice_amount_due_to_freezing_in_open_water", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sidmasslat":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"The rate of change of sea ice mass through lateral melting divided by grid-cell area (report 0 if not explicitly calculated thermodynamically)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Lateral sea ice melt rate", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidmasslat", - "positive":"", - "standard_name":"tendency_of_sea_ice_amount_due_to_lateral_melting", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sidmassmeltbot":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"The rate of change of sea ice mass through melting at the ice bottom divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"sea-ice mass change through bottom melting", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidmassmeltbot", - "positive":"", - "standard_name":"tendency_of_sea_ice_amount_due_to_basal_melting", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sidmassmelttop":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"The rate of change of sea ice mass through melting at the ice surface divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"sea-ice mass change through surface melting", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidmassmelttop", - "positive":"", - "standard_name":"tendency_of_sea_ice_amount_due_to_surface_melting", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sidmasssi":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"The rate of change of sea ice mass due to transformation of snow to sea ice divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"sea-ice mass change through snow-to-ice conversion", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidmasssi", - "positive":"", - "standard_name":"tendency_of_sea_ice_amount_due_to_snow_conversion", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sidmassth":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Total change in sea-ice mass from thermodynamic processes divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"sea-ice mass change from thermodynamics", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidmassth", - "positive":"", - "standard_name":"tendency_of_sea_ice_amount_due_to_thermodynamics", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sidmasstranx":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"Includes transport of both sea ice and snow by advection", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"X-component of sea-ice mass transport", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidmasstranx", - "positive":"", - "standard_name":"sea_ice_x_transport", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "sidmasstrany":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"Includes transport of both sea ice and snow by advection", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Y-component of sea-ice mass transport", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidmasstrany", - "positive":"", - "standard_name":"sea_ice_y_transport", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "sidragbot":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Oceanic drag coefficient that is used to calculate the oceanic momentum drag on sea ice", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Ocean drag coefficient", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidragbot", - "positive":"", - "standard_name":"surface_drag_coefficient_for_momentum_in_water", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "sidragtop":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Atmospheric drag coefficient that is used to calculate the atmospheric momentum drag on sea ice", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Atmospheric drag coefficient", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sidragtop", - "positive":"", - "standard_name":"surface_drag_coefficient_for_momentum_in_air", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "siextentn":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Total area of all Northern-Hemisphere grid cells that are covered by at least 15 % areal fraction of sea ice", - "dimensions":"time", - "frequency":"mon", - "long_name":"Sea ice extent North", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siextentn", - "positive":"", - "standard_name":"sea_ice_extent", - "type":"real", - "units":"1e6 km2", - "valid_max":"", - "valid_min":"" - }, - "siextents":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Total area of all Southern-Hemisphere grid cells that are covered by at least 15 % areal fraction of sea ice", - "dimensions":"time", - "frequency":"mon", - "long_name":"Sea ice extent South", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siextents", - "positive":"", - "standard_name":"sea_ice_extent", - "type":"real", - "units":"1e6 km2", - "valid_max":"", - "valid_min":"" - }, - "sifb":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Mean height of sea-ice surface (=snow-ice interface when snow covered) above sea level", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea-ice freeboard", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sifb", - "positive":"", - "standard_name":"sea_ice_freeboard", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "siflcondbot":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"the net heat conduction flux at the ice base", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Net conductive heat fluxes in ice at the bottom", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siflcondbot", - "positive":"down", - "standard_name":"conductive_heat_flux_at_sea_ice_bottom", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "siflcondtop":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"the net heat conduction flux at the ice surface", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Net conductive heat flux in ice at the surface", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siflcondtop", - "positive":"down", - "standard_name":"conductive_heat_flux_at_sea_ice_surface", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "siflfwbot":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Total flux of fresh water from water into sea ice divided by grid-cell area; This flux is negative during ice growth (liquid water mass decreases, hence upward flux of freshwater), positive during ice melt (liquid water mass increases, hence downward flux of freshwater)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Freshwater flux from sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siflfwbot", - "positive":"", - "standard_name":"freshwater_flux_from_ice", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "siflfwdrain":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Total flux of fresh water from sea-ice surface into underlying ocean. This combines both surface melt water that drains directly into the ocean and the drainage of surface melt pond. By definition, this flux is always positive.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Freshwater flux from sea-ice surface", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siflfwdrain", - "positive":"", - "standard_name":"freshwater_flux_from_ice_surface", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sifllatstop":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"the net latent heat flux over sea ice", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Net latent heat flux over sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sifllatstop", - "positive":"up", - "standard_name":"surface_upward_latent_heat_flux", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "sifllwdtop":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"the downwelling longwave flux over sea ice (always positive)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Downwelling longwave flux over sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sifllwdtop", - "positive":"down", - "standard_name":"surface_downwelling_longwave_flux_in_air", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "sifllwutop":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"the upwelling longwave flux over sea ice (always negative)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Upwelling Longwave Flux over Sea Ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sifllwutop", - "positive":"up", - "standard_name":"surface_upwelling_longwave_flux_in_air", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "siflsenstop":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"the net sensible heat flux over sea ice", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Net upward sensible heat flux over sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siflsenstop", - "positive":"up", - "standard_name":"surface_upward_sensible_heat_flux", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "siflsensupbot":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"the net sensible heat flux under sea ice from the ocean", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siflsensupbot", - "positive":"up", - "standard_name":"upward_sea_ice_basal_heat_flux", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "siflswdbot":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"The downwelling shortwave flux underneath sea ice (always positive)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Downwelling shortwave flux under sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siflswdbot", - "positive":"down", - "standard_name":"bottom_downwelling_shortwave_flux_into_ocean", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "siflswdtop":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"The downwelling shortwave flux over sea ice (always positive by sign convention)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Downwelling shortwave flux over sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siflswdtop", - "positive":"down", - "standard_name":"surface_downwelling_shortwave_flux_in_air", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "siflswutop":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"The upwelling shortwave flux over sea ice (always negative)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Upwelling Shortwave Flux over Sea Ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siflswutop", - "positive":"up", - "standard_name":"surface_upwelling_shortwave_flux_in_air", - "type":"real", - "units":"W m-2", - "valid_max":"", - "valid_min":"" - }, - "siforcecoriolx":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", - "comment":"X-component of force on sea ice caused by coriolis force", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Coriolis force term in force balance (x-component)", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siforcecoriolx", - "positive":"", - "standard_name":"coriolis_force_on_sea_ice_x", - "type":"real", - "units":"N m-2", - "valid_max":"", - "valid_min":"" - }, - "siforcecorioly":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", - "comment":"Y-component of force on sea ice caused by coriolis force", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Coriolis force term in force balance (y-component)", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siforcecorioly", - "positive":"", - "standard_name":"coriolis_force_on_sea_ice_y", - "type":"real", - "units":"N m-2", - "valid_max":"", - "valid_min":"" - }, - "siforceintstrx":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", - "comment":"X-component of force on sea ice caused by internal stress (divergence of sigma)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Internal stress term in force balance (x-component)", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siforceintstrx", - "positive":"", - "standard_name":"internal_stress_in_sea_ice_x", - "type":"real", - "units":"N m-2", - "valid_max":"", - "valid_min":"" - }, - "siforceintstry":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", - "comment":"Y-component of force on sea ice caused by internal stress (divergence of sigma)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Internal stress term in force balance (y-component)", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siforceintstry", - "positive":"", - "standard_name":"internal_stress_in_sea_ice_y", - "type":"real", - "units":"N m-2", - "valid_max":"", - "valid_min":"" - }, - "siforcetiltx":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", - "comment":"X-component of force on sea ice caused by sea-surface tilt", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea-surface tilt term in force balance (x-component)", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siforcetiltx", - "positive":"", - "standard_name":"sea_surface_tilt_force_on_sea_ice_x", - "type":"real", - "units":"N m-2", - "valid_max":"", - "valid_min":"" - }, - "siforcetilty":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", - "comment":"Y-component of force on sea ice caused by sea-surface tilt", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea-surface tilt term in force balance (y-component)", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siforcetilty", - "positive":"", - "standard_name":"sea_surface_tilt_force_on_sea_ice_y", - "type":"real", - "units":"N m-2", - "valid_max":"", - "valid_min":"" - }, - "sihc":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Heat content of all ice in grid cell divided by total grid-cell area. Water at 0 Celsius is assumed to have a heat content of 0 J. Does not include heat content of snow, but does include heat content of brine. Heat content is always negative, since both the sensible and the latent heat content of ice are less than that of water", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea-ice heat content per unit area", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sihc", - "positive":"", - "standard_name":"integral_of_sea_ice_temperature_wrt_depth_expressed_as_heat_content", - "type":"real", - "units":"J m-2", - "valid_max":"", - "valid_min":"" - }, - "siitdconc":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", - "comment":"Area fraction of grid cell covered by each ice-thickness category (vector with one entry for each thickness category starting from the thinnest category, netcdf file should use thickness bounds of the categories as third coordinate axis)", - "dimensions":"longitude latitude iceband time", - "frequency":"mon", - "long_name":"Sea-ice area fractions in thickness categories", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siitdconc", - "positive":"", - "standard_name":"sea_ice_area_fraction_over_categories", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "siitdsnconc":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siitdconc)", - "comment":"Area fraction of grid cell covered by snow in each ice-thickness category (vector with one entry for each thickness category starting from the thinnest category, netcdf file should use thickness bounds of the categories as third coordinate axis)", - "dimensions":"longitude latitude iceband time", - "frequency":"mon", - "long_name":"Snow area fractions in thickness categories", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siitdsnconc", - "positive":"", - "standard_name":"snow_area_fraction_over_categories", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "siitdsnthick":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siitdconc)", - "comment":"Actual thickness of snow in each category (NOT volume divided by grid area), (vector with one entry for each thickness category starting from the thinnest category, netcdf file should use thickness bounds of categories as third coordinate axis)", - "dimensions":"longitude latitude iceband time", - "frequency":"mon", - "long_name":"Snow thickness in thickness categories", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siitdsnthick", - "positive":"", - "standard_name":"snow_thickness_over_categories", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "siitdthick":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siitdconc)", - "comment":"Actual (floe) thickness of sea ice in each category (NOT volume divided by grid area), (vector with one entry for each thickness category starting from the thinnest category, netcdf file should use thickness bounds of categories as third coordinate axis)", - "dimensions":"longitude latitude iceband time", - "frequency":"mon", - "long_name":"Sea-ice thickness in thickness categories", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siitdthick", - "positive":"", - "standard_name":"sea_ice_thickness_over_categories", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "simass":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Total mass of sea ice divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea-ice mass per area", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"simass", - "positive":"", - "standard_name":"sea_ice_amount", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "simassacrossline":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"net (sum of transport in all directions) sea ice area transport through the following four passages, positive into the Arctic Ocean 1. Fram Strait = (11.5W,81.3N to (10.5E,79.6N) 2. Canadian Archipelago = (128.2W,70.6N) to (59.3W,82.1N) 3. Barents opening = (16.8E,76.5N) to (19.2E,70.2N) 4. Bering Strait = (171W,66.2N) to (166W,65N)", - "dimensions":"siline time", - "frequency":"mon", - "long_name":"Sea mass area flux through straits", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"simassacrossline", - "positive":"", - "standard_name":"sea_ice_transport_across_line", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - }, - "simpconc":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", - "comment":"Fraction of sea ice, by area, which is covered by melt ponds, giving equal weight to every square metre of sea ice .", - "dimensions":"longitude latitude time typemp", - "frequency":"mon", - "long_name":"Percentage Cover of Sea-Ice by Meltpond", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"simpconc", - "positive":"", - "standard_name":"area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "simpmass":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice_melt_pond (comment: mask=simpconc)", - "comment":"Meltpond mass per area of sea ice.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Meltpond Mass per Unit Area", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"simpmass", - "positive":"", - "standard_name":"surface_liquid_water_amount", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "simprefrozen":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice_melt_pond (comment: mask=simpconc)", - "comment":"Volume of refrozen ice on melt ponds divided by meltpond covered area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Thickness of Refrozen Ice on Melt Pond", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"simprefrozen", - "positive":"", - "standard_name":"melt_pond_refrozen_ice", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "sipr":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"mass of liquid precipitation falling onto sea ice divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Rainfall rate over sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sipr", - "positive":"", - "standard_name":"rainfall_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sirdgconc":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Fraction of sea ice, by area, which is covered by sea ice ridges, giving equal weight to every square metre of sea ice .", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Percentage Cover of Sea-Ice by Ridging", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sirdgconc", - "positive":"", - "standard_name":"fraction_of_ridged_sea_ice", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "sirdgthick":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=sirdgconc - ridges only)", - "comment":"Sea Ice Ridge Height (representing mean height over the ridged area)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Ridged ice thickness", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sirdgthick", - "positive":"", - "standard_name":"thickness_of_ridged_sea_ice", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "sisali":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Mean sea-ice salinity of all sea ice in grid cell", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea ice salinity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sisali", - "positive":"", - "standard_name":"sea_ice_salinity", - "type":"real", - "units":"0.001", - "valid_max":"", - "valid_min":"" - }, - "sisaltmass":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Total mass of all salt in sea ice divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Mass of salt in sea ice per area", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sisaltmass", - "positive":"", - "standard_name":"sea_ice_salt_mass", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "sishevel":{ - "cell_measures":"", - "cell_methods":"area: mean where sea_ice (comment: mask=siconc) time: point", - "comment":"Maximum shear of sea-ice velocity field (second shear strain invariant)", - "dimensions":"longitude latitude time1", - "frequency":"monPt", - "long_name":"Maximum shear of sea-ice velocity field", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sishevel", - "positive":"", - "standard_name":"maximum_shear_of_sea_ice_velocity", - "type":"real", - "units":"s-1", - "valid_max":"", - "valid_min":"" - }, - "sisnconc":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Fraction of sea ice, by area, which is covered by snow, giving equal weight to every square metre of sea ice . Exclude snow that lies on land or land ice.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Snow area fraction", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sisnconc", - "positive":"", - "standard_name":"surface_snow_area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "sisnhc":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Heat-content of all snow in grid cell divided by total grid-cell area. Snow-water equivalent at 0 Celsius is assumed to have a heat content of 0 J. Does not include heat content of sea ice.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Snow-heat content per unit area", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sisnhc", - "positive":"", - "standard_name":"thermal_energy_content_of_surface_snow", - "type":"real", - "units":"J m-2", - "valid_max":"", - "valid_min":"" - }, - "sisnmass":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Total mass of snow on sea ice divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Snow mass per area", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sisnmass", - "positive":"", - "standard_name":"liquid_water_content_of_surface_snow", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "sisnthick":{ - "cell_measures":"", - "cell_methods":"area: mean where snow over sea_ice area: time: mean where sea_ice", - "comment":"Actual thickness of snow (snow volume divided by snow-covered area)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Snow thickness", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sisnthick", - "positive":"", - "standard_name":"surface_snow_thickness", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "sispeed":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Speed of ice (i.e. mean absolute velocity) to account for back-and-forth movement of the ice", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea-ice speed", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sispeed", - "positive":"", - "standard_name":"sea_ice_speed", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "sistremax":{ - "cell_measures":"", - "cell_methods":"area: mean where sea_ice (comment: mask=siconc) time: point", - "comment":"Maximum shear stress in sea ice (second stress invariant)", - "dimensions":"longitude latitude time1", - "frequency":"monPt", - "long_name":"Maximum shear stress in sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sistremax", - "positive":"", - "standard_name":"maximum_shear_stress", - "type":"real", - "units":"N m-1", - "valid_max":"", - "valid_min":"" - }, - "sistresave":{ - "cell_measures":"", - "cell_methods":"area: mean where sea_ice (comment: mask=siconc) time: point", - "comment":"Average normal stress in sea ice (first stress invariant)", - "dimensions":"longitude latitude time1", - "frequency":"monPt", - "long_name":"Average normal stress in sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sistresave", - "positive":"", - "standard_name":"average_normal_stress", - "type":"real", - "units":"N m-1", - "valid_max":"", - "valid_min":"" - }, - "sistrxdtop":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", - "comment":"X-component of atmospheric stress on sea ice", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"X-component of atmospheric stress on sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sistrxdtop", - "positive":"down", - "standard_name":"surface_downward_x_stress", - "type":"real", - "units":"N m-2", - "valid_max":"", - "valid_min":"" - }, - "sistrxubot":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"X-component of ocean stress on sea ice", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"X-component of ocean stress on sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sistrxubot", - "positive":"up", - "standard_name":"upward_x_stress_at_sea_ice_base", - "type":"real", - "units":"N m-2", - "valid_max":"", - "valid_min":"" - }, - "sistrydtop":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", - "comment":"Y-component of atmospheric stress on sea ice", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Y-component of atmospheric stress on sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sistrydtop", - "positive":"down", - "standard_name":"surface_downward_y_stress", - "type":"real", - "units":"N m-2", - "valid_max":"", - "valid_min":"" - }, - "sistryubot":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Y-component of ocean stress on sea ice", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Y-component of ocean stress on sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sistryubot", - "positive":"up", - "standard_name":"upward_y_stress_at_sea_ice_base", - "type":"real", - "units":"N m-2", - "valid_max":"", - "valid_min":"" - }, - "sitempbot":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Report temperature at interface, NOT temperature within lowermost model layer", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Temperature at ice-ocean interface", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sitempbot", - "positive":"", - "standard_name":"sea_ice_bottom_temperature", - "type":"real", - "units":"K", - "valid_max":"", - "valid_min":"" - }, - "sitempsnic":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Report surface temperature of ice where snow thickness is zero", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Temperature at snow-ice interface", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sitempsnic", - "positive":"", - "standard_name":"sea_ice_surface_temperature", - "type":"real", - "units":"K", - "valid_max":"", - "valid_min":"" - }, - "sitemptop":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Report surface temperature of snow where snow covers the sea ice.", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Surface temperature of sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sitemptop", - "positive":"", - "standard_name":"sea_ice_surface_temperature", - "type":"real", - "units":"K", - "valid_max":"", - "valid_min":"" - }, - "sithick":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", - "comment":"Actual (floe) thickness of sea ice (NOT volume divided by grid area as was done in CMIP5)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea Ice Thickness", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sithick", - "positive":"", - "standard_name":"sea_ice_thickness", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "sitimefrac":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Fraction of time steps of the averaging period during which sea ice is present (siconc >0 ) in a grid cell", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Fraction of time steps with sea ice", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sitimefrac", - "positive":"", - "standard_name":"sea_ice_time_fraction", - "type":"real", - "units":"1", - "valid_max":"", - "valid_min":"" - }, - "siu":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", - "comment":"The x-velocity of ice on native model grid", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"X-component of sea ice velocity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siu", - "positive":"", - "standard_name":"sea_ice_x_velocity", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "siv":{ - "cell_measures":"", - "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", - "comment":"The y-velocity of ice on native model grid", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Y-component of sea ice velocity", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"siv", - "positive":"", - "standard_name":"sea_ice_y_velocity", - "type":"real", - "units":"m s-1", - "valid_max":"", - "valid_min":"" - }, - "sivol":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"Total volume of sea ice divided by grid-cell area (this used to be called ice thickness in CMIP5)", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Sea-ice volume per area", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sivol", - "positive":"", - "standard_name":"sea_ice_thickness", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "sivoln":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"total volume of sea ice in the Northern hemisphere", - "dimensions":"time", - "frequency":"mon", - "long_name":"Sea ice volume North", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sivoln", - "positive":"", - "standard_name":"sea_ice_volume", - "type":"real", - "units":"1e3 km3", - "valid_max":"", - "valid_min":"" - }, - "sivols":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"total volume of sea ice in the Southern hemisphere", - "dimensions":"time", - "frequency":"mon", - "long_name":"Sea ice volume South", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sivols", - "positive":"", - "standard_name":"sea_ice_volume", - "type":"real", - "units":"1e3 km3", - "valid_max":"", - "valid_min":"" - }, - "sndmassdyn":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"the rate of change of snow mass through advection with sea ice divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Snow Mass Rate of Change through Avection by Sea-ice Dynamics", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sndmassdyn", - "positive":"", - "standard_name":"tendency_of_snow_mass_due_to_sea_ice_dynamics", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sndmassmelt":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"the rate of change of snow mass through melt divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Snow Mass Rate of Change through Melt", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sndmassmelt", - "positive":"", - "standard_name":"surface_snow_melt_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sndmasssi":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"the rate of change of snow mass due to transformation of snow to sea ice divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Snow Mass Rate of Change through Snow-to-Ice Conversion", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sndmasssi", - "positive":"", - "standard_name":"tendency_of_surface_snow_amount_due_to_conversion_of_snow_to_sea_ice", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sndmasssnf":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"mass of solid precipitation falling onto sea ice divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"snow mass change through snow fall", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sndmasssnf", - "positive":"", - "standard_name":"snowfall_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sndmasssubl":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"the rate of change of snow mass through sublimation and evaporation divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Snow Mass Rate of Change through Evaporation or Sublimation", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sndmasssubl", - "positive":"", - "standard_name":"surface_snow_sublimation_flux", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "sndmasswindrif":{ - "cell_measures":"", - "cell_methods":"area: time: mean", - "comment":"the rate of change of snow mass through wind drift of snow (from sea-ice into the sea) divided by grid-cell area", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Snow Mass Rate of Change through Wind Drift of Snow", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sndmasswindrif", - "positive":"", - "standard_name":"tendency_of_snow_mass_due_to_drifting_snow", - "type":"real", - "units":"kg m-2 s-1", - "valid_max":"", - "valid_min":"" - }, - "snmassacrossline":{ - "cell_measures":"", - "cell_methods":"time: mean", - "comment":"net (sum of transport in all directions) snow mass transport through the following four passages, positive into the Arctic Ocean 1. Fram Strait = (11.5W,81.3N to (10.5E,79.6N) 2. Canadian Archipela", - "dimensions":"siline time", - "frequency":"mon", - "long_name":"Snow mass flux through straits", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"snmassacrossline", - "positive":"", - "standard_name":"snow_mass_transport_across_line", - "type":"real", - "units":"kg s-1", - "valid_max":"", - "valid_min":"" - } - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_coordinate.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_coordinate.json deleted file mode 100644 index cafd418bd0..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_coordinate.json +++ /dev/null @@ -1,2900 +0,0 @@ -{ - "axis_entry":{ - "alev1":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"lowest atmospheric model level", - "must_have_bounds":"yes", - "out_name":"lev", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "alt16":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"altitude", - "must_have_bounds":"yes", - "out_name":"alt16", - "positive":"up", - "requested":[ - "0", - "250", - "750", - "1250", - "1750", - "2250", - "2750", - "3500", - "4500", - "6000", - "8000", - "10000", - "12000", - "14500", - "16000", - "18000" - ], - "requested_bounds":[ - "-99000.0", - "0.0", - "0.0", - "500.0", - "500.0", - "1000.0", - "1000.0", - "1500.0", - "1500.0", - "2000.0", - "2000.0", - "2500.0", - "2500.0", - "3000.0", - "3000.0", - "4000.0", - "4000.0", - "5000.0", - "5000.0", - "7000.0", - "7000.0", - "9000.0", - "9000.0", - "11000.0", - "11000.0", - "13000.0", - "13000.0", - "15000.0", - "15000.0", - "17000.0", - "17000.0", - "99000.0" - ], - "standard_name":"altitude", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "alt40":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"altitude", - "must_have_bounds":"yes", - "out_name":"alt40", - "positive":"up", - "requested":[ - "240.", - "720.", - "1200.", - "1680.", - "2160.", - "2640.", - "3120.", - "3600.", - "4080.", - "4560.", - "5040.", - "5520.", - "6000.", - "6480.", - "6960.", - "7440.", - "7920.", - "8400.", - "8880.", - "9360.", - "9840.", - "10320.", - "10800.", - "11280.", - "11760.", - "12240.", - "12720.", - "13200.", - "13680.", - "14160.", - "14640.", - "15120.", - "15600.", - "16080.", - "16560.", - "17040.", - "17520.", - "18000.", - "18480.", - "18960." - ], - "requested_bounds":[ - "0.0", - "480.0", - "480.0", - "960.0", - "960.0", - "1440.0", - "1440.0", - "1920.0", - "1920.0", - "2400.0", - "2400.0", - "2880.0", - "2880.0", - "3360.0", - "3360.0", - "3840.0", - "3840.0", - "4320.0", - "4320.0", - "4800.0", - "4800.0", - "5280.0", - "5280.0", - "5760.0", - "5760.0", - "6240.0", - "6240.0", - "6720.0", - "6720.0", - "7200.0", - "7200.0", - "7680.0", - "7680.0", - "8160.0", - "8160.0", - "8640.0", - "8640.0", - "9120.0", - "9120.0", - "9600.0", - "9600.0", - "10080.0", - "10080.0", - "10560.0", - "10560.0", - "11040.0", - "11040.0", - "11520.0", - "11520.0", - "12000.0", - "12000.0", - "12480.0", - "12480.0", - "12960.0", - "12960.0", - "13440.0", - "13440.0", - "13920.0", - "13920.0", - "14400.0", - "14400.0", - "14880.0", - "14880.0", - "15360.0", - "15360.0", - "15840.0", - "15840.0", - "16320.0", - "16320.0", - "16800.0", - "16800.0", - "17280.0", - "17280.0", - "17760.0", - "17760.0", - "18240.0", - "18240.0", - "18720.0", - "18720.0", - "19200.0" - ], - "standard_name":"altitude", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "alternate_hybrid_sigma":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"p = ap + b*ps", - "long_name":"hybrid sigma pressure coordinate", - "must_have_bounds":"yes", - "out_name":"lev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"atmosphere_hybrid_sigma_pressure_coordinate", - "stored_direction":"decreasing", - "tolerance":"", - "type":"", - "units":"1", - "valid_max":"1.0", - "valid_min":"0.0", - "value":"", - "z_bounds_factors":"ap: ap_bnds b: b_bnds ps: ps", - "z_factors":"ap: ap b: b ps: ps" - }, - "basin":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"ocean basin", - "must_have_bounds":"no", - "out_name":"basin", - "positive":"", - "requested":[ - "atlantic_arctic_ocean", - "indian_pacific_ocean", - "global_ocean" - ], - "requested_bounds":"", - "standard_name":"region", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "dbze":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"CloudSat simulator equivalent radar reflectivity factor", - "must_have_bounds":"yes", - "out_name":"dbze", - "positive":"", - "requested":[ - "-47.5", - "-42.5", - "-37.5", - "-32.5", - "-27.5", - "-22.5", - "-17.5", - "-12.5", - "-7.5", - "-2.5", - "2.5", - "7.5", - "12.5", - "17.5", - "22.5" - ], - "requested_bounds":[ - "-50.0", - "-45.0", - "-45.0", - "-40.0", - "-40.0", - "-35.0", - "-35.0", - "-30.0", - "-30.0", - "-25.0", - "-25.0", - "-20.0", - "-20.0", - "-15.0", - "-15.0", - "-10.0", - "-10.0", - "-5.0", - "-5.0", - "0.0", - "0.0", - "5.0", - "5.0", - "10.0", - "10.0", - "15.0", - "15.0", - "20.0", - "20.0", - "25.0" - ], - "standard_name":"equivalent_reflectivity_factor", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"dBZ", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "depth0m":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"depth", - "must_have_bounds":"no", - "out_name":"depth", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"depth", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"100.0", - "valid_min":"0.0", - "value":"0.", - "z_bounds_factors":"", - "z_factors":"" - }, - "depth100m":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"depth", - "must_have_bounds":"no", - "out_name":"depth", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"depth", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"120.0", - "valid_min":"80.0", - "value":"100.", - "z_bounds_factors":"", - "z_factors":"" - }, - "depth2000m":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"depth", - "must_have_bounds":"no", - "out_name":"depth", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"depth", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"2200.0", - "valid_min":"1980.0", - "value":"2000", - "z_bounds_factors":"", - "z_factors":"" - }, - "depth300m":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"depth", - "must_have_bounds":"no", - "out_name":"depth", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"depth", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"320.0", - "valid_min":"280.0", - "value":"300", - "z_bounds_factors":"", - "z_factors":"" - }, - "depth700m":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"depth", - "must_have_bounds":"no", - "out_name":"depth", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"depth", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"720.0", - "valid_min":"680.0", - "value":"700", - "z_bounds_factors":"", - "z_factors":"" - }, - "depth_coord":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"ocean depth coordinate", - "must_have_bounds":"yes", - "out_name":"lev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"depth", - "stored_direction":"increasing", - "tolerance":"", - "type":"", - "units":"m", - "valid_max":"12000.0", - "valid_min":"0.0", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "effectRadIc":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Effective Radius [Values to be specified]", - "must_have_bounds":"", - "out_name":"effectRadIc", - "positive":"", - "requested":[ - "5.", - "15.", - "25.", - "35.", - "50.", - "75." - ], - "requested_bounds":[ - "0.0", - "10.0", - "10.0", - "20.0", - "20.0", - "30.0", - "30.0", - "40.0", - "40.0", - "60.0", - "60.0", - "90.0" - ], - "standard_name":"", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"micron", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "effectRadLi":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Effective Radius [Values to be specified]", - "must_have_bounds":"", - "out_name":"effectRadLi", - "positive":"", - "requested":[ - "4.", - "9.", - "11.5", - "14.", - "17.5", - "25." - ], - "requested_bounds":[ - "0.0", - "8.0", - "8.0", - "10.0", - "10.0", - "13.0", - "13.0", - "15.0", - "15.0", - "20.0", - "20.0", - "30.0" - ], - "standard_name":"", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"micron", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "height100m":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"height", - "must_have_bounds":"no", - "out_name":"height", - "positive":"up", - "requested":"", - "requested_bounds":"", - "standard_name":"height", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"120.0", - "valid_min":"80.0", - "value":"100.", - "z_bounds_factors":"", - "z_factors":"" - }, - "height10m":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"height", - "must_have_bounds":"no", - "out_name":"height", - "positive":"up", - "requested":"", - "requested_bounds":"", - "standard_name":"height", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"30.0", - "valid_min":"1.0", - "value":"10.", - "z_bounds_factors":"", - "z_factors":"" - }, - "height2m":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"height", - "must_have_bounds":"no", - "out_name":"height", - "positive":"up", - "requested":"", - "requested_bounds":"", - "standard_name":"height", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"10.0", - "valid_min":"1.0", - "value":"2.", - "z_bounds_factors":"", - "z_factors":"" - }, - "hybrid_height":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"z = a + b*orog", - "long_name":"hybrid height coordinate", - "must_have_bounds":"yes", - "out_name":"lev", - "positive":"up", - "requested":"", - "requested_bounds":"", - "standard_name":"atmosphere_hybrid_height_coordinate", - "stored_direction":"increasing", - "tolerance":"", - "type":"", - "units":"m", - "valid_max":"", - "valid_min":"0.0", - "value":"", - "z_bounds_factors":"a: lev_bnds b: b_bnds orog: orog", - "z_factors":"a: lev b: b orog: orog" - }, - "iceband":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Ice Depth Band", - "must_have_bounds":"yes", - "out_name":"iceband", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"sea_ice_thickness", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "landUse":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Land use type", - "must_have_bounds":"no", - "out_name":"landuse", - "positive":"", - "requested":[ - "primary_and_secondary_land", - "pastures", - "crops", - "urban" - ], - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "latitude":{ - "axis":"Y", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"latitude", - "must_have_bounds":"yes", - "out_name":"lat", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"latitude", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"degrees_north", - "valid_max":"90.0", - "valid_min":"-90.0", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "location":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"location index", - "must_have_bounds":"no", - "out_name":"loc", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"", - "stored_direction":"increasing", - "tolerance":"", - "type":"integer", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "longitude":{ - "axis":"X", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"longitude", - "must_have_bounds":"yes", - "out_name":"lon", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"longitude", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"degrees_east", - "valid_max":"360.0", - "valid_min":"0.0", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "natural_log_pressure":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"p = p0 * exp(-lev)", - "long_name":"atmosphere natural log pressure coordinate", - "must_have_bounds":"yes", - "out_name":"lev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"atmosphere_ln_pressure_coordinate", - "stored_direction":"decreasing", - "tolerance":"", - "type":"", - "units":"", - "valid_max":"20.0", - "valid_min":"-1.0", - "value":"", - "z_bounds_factors":"p0: p0 lev: lev_bnds", - "z_factors":"p0: p0 lev: lev" - }, - "ocean_double_sigma":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"for k <= k_c:\n z(k,j,i)= sigma(k)*f(j,i) \n for k > k_c:\n z(k,j,i)= f(j,i) + (sigma(k)-1)*(depth(j,i)-f(j,i)) \n f(j,i)= 0.5*(z1+ z2) + 0.5*(z1-z2)* tanh(2*a/(z1-z2)*(depth(j,i)-href))", - "long_name":"ocean double sigma coordinate", - "must_have_bounds":"yes", - "out_name":"lev", - "positive":"up", - "requested":"", - "requested_bounds":"", - "standard_name":"ocean_double_sigma", - "stored_direction":"", - "tolerance":"", - "type":"", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"sigma: sigma_bnds depth: depth z1: z1 z2: z2 a: a href: href k_c: k_c", - "z_factors":"sigma: sigma depth: depth z1: z1 z2: z2 a: a_coeff href: href k_c: k_c" - }, - "ocean_s":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"z(n,k,j,i) = eta(n,j,i)*(1+s(k)) + depth_c*s(k) + (depth(j,i)-depth_c)*C(k) \n where \n C(k)=(1-b)*sinh(a*s(k))/sinh(a) +\n b*(tanh(a*(s(k)+0.5))/(2*tanh(0.5*a)) - 0.5)", - "long_name":"ocean s-coordinate", - "must_have_bounds":"yes", - "out_name":"lev", - "positive":"up", - "requested":"", - "requested_bounds":"", - "standard_name":"ocean_s_coordinate", - "stored_direction":"decreasing", - "tolerance":"", - "type":"", - "units":"", - "valid_max":"0.0", - "valid_min":"-1.0", - "value":"", - "z_bounds_factors":"s: lev_bnds eta: eta depth: depth a: a b: b depth_c: depth_c", - "z_factors":"s: lev eta: eta depth: depth a: a_coeff b: b_coeff depth_c: depth_c" - }, - "ocean_sigma":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"z(n,k,j,i) = eta(n,j,i) + sigma(k)*(depth(j,i)+eta(n,j,i))", - "long_name":"ocean sigma coordinate", - "must_have_bounds":"yes", - "out_name":"lev", - "positive":"up", - "requested":"", - "requested_bounds":"", - "standard_name":"ocean_sigma_coordinate", - "stored_direction":"decreasing", - "tolerance":"", - "type":"", - "units":"", - "valid_max":"0.0", - "valid_min":"-1.0", - "value":"", - "z_bounds_factors":"sigma: lev_bnds eta: eta depth: depth", - "z_factors":"sigma: lev eta: eta depth: depth" - }, - "ocean_sigma_z":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"for k <= nsigma: z(n,k,j,i) = eta(n,j,i) + sigma(k)*(min(depth_c,depth(j,i))+eta(n,j,i)) ; for k > nsigma: z(n,k,j,i) = zlev(k)", - "long_name":"ocean sigma over z coordinate", - "must_have_bounds":"yes", - "out_name":"lev", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"ocean_sigma_z", - "stored_direction":"", - "tolerance":"", - "type":"", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"sigma: sigma_bnds eta: eta depth: depth depth_c: depth_c nsigma: nsigma zlev: zlev_bnds", - "z_factors":"sigma: sigma eta: eta depth: depth depth_c: depth_c nsigma: nsigma zlev: zlev" - }, - "olayer100m":{ - "axis":"Z", - "bounds_values":"0.0 100.0", - "climatology":"", - "formula":"", - "long_name":"depth", - "must_have_bounds":"no", - "out_name":"depth", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"depth", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"100.0", - "valid_min":"0.0", - "value":"50.", - "z_bounds_factors":"", - "z_factors":"" - }, - "oline":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"ocean passage", - "must_have_bounds":"no", - "out_name":"line", - "positive":"", - "requested":[ - "barents_opening", - "bering_strait", - "canadian_archipelago", - "denmark_strait", - "drake_passage", - "english_channel", - "pacific_equatorial_undercurrent", - "faroe_scotland_channel", - "florida_bahamas_strait", - "fram_strait", - "iceland_faroe_channel", - "indonesian_throughflow", - "mozambique_channel", - "taiwan_luzon_straits", - "windward_passage" - ], - "requested_bounds":"", - "standard_name":"region", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "p10":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"1000.", - "z_bounds_factors":"", - "z_factors":"" - }, - "p100":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"10000.", - "z_bounds_factors":"", - "z_factors":"" - }, - "p1000":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"100000.", - "z_bounds_factors":"", - "z_factors":"" - }, - "p200":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"20000.", - "z_bounds_factors":"", - "z_factors":"" - }, - "p220":{ - "axis":"Z", - "bounds_values":"44000.0 0.0", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"yes", - "out_name":"plev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"decreasing", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"22000.", - "z_bounds_factors":"", - "z_factors":"" - }, - "p500":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"50000.", - "z_bounds_factors":"", - "z_factors":"" - }, - "p560":{ - "axis":"Z", - "bounds_values":"68000.0 44000.0", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"yes", - "out_name":"plev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"decreasing", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"56000.", - "z_bounds_factors":"", - "z_factors":"" - }, - "p700":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"70000.", - "z_bounds_factors":"", - "z_factors":"" - }, - "p840":{ - "axis":"Z", - "bounds_values":"100000.0 68000.0", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"yes", - "out_name":"plev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"decreasing", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"84000.", - "z_bounds_factors":"", - "z_factors":"" - }, - "p850":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"85000.", - "z_bounds_factors":"", - "z_factors":"" - }, - "pl700":{ - "axis":"Z", - "bounds_values":"85000.0 60000.0", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"yes", - "out_name":"plev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"70000.", - "z_bounds_factors":"", - "z_factors":"" - }, - "plev10":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":[ - "100000.", - "85000.", - "70000.", - "50000.", - "25000.", - "15000.", - "10000.", - "7000.", - "5000.", - "1000." - ], - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"decreasing", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "plev19":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":[ - "100000.", - "92500.", - "85000.", - "70000.", - "60000.", - "50000.", - "40000.", - "30000.", - "25000.", - "20000.", - "15000.", - "10000.", - "7000.", - "5000.", - "3000.", - "2000.", - "1000.", - "500.", - "100." - ], - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"decreasing", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "plev23":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":[ - "100000.", - "92500.", - "85000.", - "70000.", - "60000.", - "50000.", - "40000.", - "30000.", - "25000.", - "20000.", - "15000.", - "10000.", - "7000.", - "5000.", - "3000.", - "2000.", - "1000.", - "700.", - "500.", - "300.", - "200.", - "100.", - "40." - ], - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"decreasing", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "plev27":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":[ - "100000.", - "97500.", - "95000.", - "92500.", - "90000.", - "87500.", - "85000.", - "82500.", - "80000.", - "77500.", - "75000.", - "70000.", - "65000.", - "60000.", - "55000.", - "50000.", - "45000.", - "40000.", - "35000.", - "30000.", - "25000.", - "22500.", - "20000.", - "17500.", - "15000.", - "12500.", - "10000." - ], - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"decreasing", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "plev3":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":[ - "85000.", - "50000.", - "25000." - ], - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"decreasing", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "plev39":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":[ - "100000.", - "92500.", - "85000.", - "70000.", - "60000.", - "50000.", - "40000.", - "30000.", - "25000.", - "20000.", - "17000.", - "15000.", - "13000.", - "11500.", - "10000.", - "9000.", - "8000.", - "7000.", - "5000.", - "3000.", - "2000.", - "1500.", - "1000.", - "700.", - "500.", - "300.", - "200.", - "150.", - "100.", - "70.", - "50.", - "40.", - "30.", - "20.", - "15.", - "10.", - "7.", - "5.", - "3." - ], - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"decreasing", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "plev3h":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":[ - "10000.", - "1000.", - "100." - ], - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"decreasing", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "plev4":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":[ - "92500.", - "85000.", - "50000.", - "25000." - ], - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"decreasing", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "plev7":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"yes", - "out_name":"plev", - "positive":"down", - "requested":[ - "90000.", - "74000.", - "62000.", - "50000.", - "37500.", - "24500.", - "9000." - ], - "requested_bounds":[ - "100000.", - "80000.", - "80000.", - "68000.", - "68000.", - "56000.", - "56000.", - "44000.", - "44000.", - "31000.", - "31000.", - "18000.", - "18000.", - " 0." - ], - "standard_name":"air_pressure", - "stored_direction":"decreasing", - "tolerance":"0.001", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "plev7c":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"yes", - "out_name":"plev", - "positive":"down", - "requested":[ - "90000.", - "74000.", - "62000.", - "50000.", - "37500.", - "24500.", - "9000." - ], - "requested_bounds":[ - "100000.0", - "80000.0", - "80000.0", - "68000.0", - "68000.0", - "56000.0", - "56000.0", - "44000.0", - "44000.0", - "31000.0", - "31000.0", - "18000.0", - "18000.0", - "0.0" - ], - "standard_name":"air_pressure", - "stored_direction":"decreasing", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "plev7h":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":[ - "92500.", - "85000.", - "70000.", - "60000.", - "50000.", - "25000.", - "5000." - ], - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"decreasing", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "plev8":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"pressure", - "must_have_bounds":"no", - "out_name":"plev", - "positive":"down", - "requested":[ - "100000.", - "85000.", - "70000.", - "50000.", - "25000.", - "10000.", - "5000.", - "1000." - ], - "requested_bounds":"", - "standard_name":"air_pressure", - "stored_direction":"decreasing", - "tolerance":"", - "type":"double", - "units":"Pa", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "rho":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"potential density referenced to 2000 dbar", - "must_have_bounds":"yes", - "out_name":"rho", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"sea_water_potential_density", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"kg m-3", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "scatratio":{ - "axis":"", - "bounds_values":"0.0 0.01 1.2 3.0 5.0 7.0 10.0 15.0 20.0 25.0 30.0 40.0 50.0 60.0 80.0 100000.0", - "climatology":"", - "formula":"", - "long_name":"lidar backscattering ratio", - "must_have_bounds":"yes", - "out_name":"scatratio", - "positive":"", - "requested":[ - "0.005", - "0.605", - "2.1", - "4.", - "6.", - "8.5", - "12.5", - "17.5", - "22.5", - "27.5", - "35.", - "45.", - "55.", - "70.", - "50040." - ], - "requested_bounds":[ - "0.0", - "0.01", - "0.01", - "1.2", - "1.2", - "3.0", - "3.0", - "5.0", - "5.0", - "7.0", - "7.0", - "10.0", - "10.0", - "15.0", - "15.0", - "20.0", - "20.0", - "25.0", - "25.0", - "30.0", - "30.0", - "40.0", - "40.0", - "50.0", - "50.0", - "60.0", - "60.0", - "80.0", - "80.0", - "100000.0" - ], - "standard_name":"backscattering_ratio", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"1", - "valid_max":"", - "valid_min":"", - "value":"0.005, 0.605, 2.1, 4, 6, 8.5, 12.5, 17.5, 22.5, 27.5, 35, 45, 55, 70, 50040", - "z_bounds_factors":"", - "z_factors":"" - }, - "sdepth":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"depth", - "must_have_bounds":"yes", - "out_name":"depth", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"depth", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"200.0", - "valid_min":"0.0", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "sdepth1":{ - "axis":"Z", - "bounds_values":"0.0 0.1", - "climatology":"", - "formula":"", - "long_name":"depth", - "must_have_bounds":"yes", - "out_name":"depth", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"depth", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"0.2", - "valid_min":"0.0", - "value":"0.05", - "z_bounds_factors":"", - "z_factors":"" - }, - "siline":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"ocean passage", - "must_have_bounds":"no", - "out_name":"line", - "positive":"", - "requested":[ - "fram_strait,", - "canadian_archipelego,", - "barents_opening,", - "bering_strait" - ], - "requested_bounds":"", - "standard_name":"region", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "site":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"site index", - "must_have_bounds":"no", - "out_name":"site", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"", - "stored_direction":"", - "tolerance":"", - "type":"integer", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "smooth_level":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"z = a*ztop + b1*zsurf1 + b2*zsurf2", - "long_name":"atmosphere smooth level vertical (SLEVE) coordinate", - "must_have_bounds":"yes", - "out_name":"lev", - "positive":"up", - "requested":"", - "requested_bounds":"", - "standard_name":"atmosphere_sleve_coordinate", - "stored_direction":"increasing", - "tolerance":"", - "type":"", - "units":"m", - "valid_max":"800000.0", - "valid_min":"-200.0", - "value":"", - "z_bounds_factors":"a: a_bnds b1: b1_bnds b2: b2_bnds ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2", - "z_factors":"a: a b1: b1 b2: b2 ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2" - }, - "snowband":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Snow Depth Band", - "must_have_bounds":"yes", - "out_name":"snowband", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"surface_snow_thickness", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "snowdepth":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"depth", - "must_have_bounds":"yes", - "out_name":"depth", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"depth", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"m", - "valid_max":"200.0", - "valid_min":"0.0", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "soilpools":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Soil Pools", - "must_have_bounds":"no", - "out_name":"soilpools", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "spectband":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Spectral Frequency Band", - "must_have_bounds":"yes", - "out_name":"spectband", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"sensor_band_central_radiation_wavenumber", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"m-1", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "standard_hybrid_sigma":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"p = a*p0 + b*ps", - "long_name":"hybrid sigma pressure coordinate", - "must_have_bounds":"yes", - "out_name":"lev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"atmosphere_hybrid_sigma_pressure_coordinate", - "stored_direction":"decreasing", - "tolerance":"", - "type":"", - "units":"1", - "valid_max":"1.0", - "valid_min":"0.0", - "value":"", - "z_bounds_factors":"p0: p0 a: a_bnds b: b_bnds ps: ps", - "z_factors":"p0: p0 a: a b: b ps: ps" - }, - "standard_sigma":{ - "axis":"Z", - "bounds_values":"", - "climatology":"", - "formula":"p = ptop + sigma*(ps - ptop)", - "long_name":"sigma coordinate", - "must_have_bounds":"yes", - "out_name":"lev", - "positive":"down", - "requested":"", - "requested_bounds":"", - "standard_name":"atmosphere_sigma_coordinate", - "stored_direction":"decreasing", - "tolerance":"", - "type":"", - "units":"", - "valid_max":"1.0", - "valid_min":"0.0", - "value":"", - "z_bounds_factors":"ptop: ptop sigma: lev_bnds ps: ps", - "z_factors":"ptop: ptop sigma: lev ps: ps" - }, - "sza5":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"solar zenith angle", - "must_have_bounds":"no", - "out_name":"sza", - "positive":"", - "requested":[ - "0.", - "20.", - "40.", - "60.", - "80." - ], - "requested_bounds":"", - "standard_name":"solar_zenith_angle", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"degree", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "tau":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"cloud optical thickness", - "must_have_bounds":"yes", - "out_name":"tau", - "positive":"", - "requested":[ - "0.15", - "0.8", - "2.45", - "6.5", - "16.2", - "41.5", - "100." - ], - "requested_bounds":[ - "0.0", - "0.3", - "0.3", - "1.3", - "1.3", - "3.6", - "3.6", - "9.4", - "9.4", - "23.0", - "23.0", - "60.0", - "60.0", - "100000.0" - ], - "standard_name":"atmosphere_optical_thickness_due_to_cloud", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"1", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "time":{ - "axis":"T", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"time", - "must_have_bounds":"yes", - "out_name":"time", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"time", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"days since ?", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "time1":{ - "axis":"T", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"time", - "must_have_bounds":"no", - "out_name":"time", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"time", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"days since ?", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "time2":{ - "axis":"T", - "bounds_values":"", - "climatology":"yes", - "formula":"", - "long_name":"time", - "must_have_bounds":"yes", - "out_name":"time", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"time", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"days since ?", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "time3":{ - "axis":"T", - "bounds_values":"", - "climatology":"yes", - "formula":"", - "long_name":"time", - "must_have_bounds":"yes", - "out_name":"time", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"time", - "stored_direction":"increasing", - "tolerance":"", - "type":"double", - "units":"days since ?", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "typebare":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"surface type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"bare_ground", - "z_bounds_factors":"", - "z_factors":"" - }, - "typeburnt":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Burnt vegetation area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"burnt_vegetation", - "z_bounds_factors":"", - "z_factors":"" - }, - "typec3pft":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"surface type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"c3_plant_functional_types", - "z_bounds_factors":"", - "z_factors":"" - }, - "typec4pft":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"surface type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"c4_plant_functional_types", - "z_bounds_factors":"", - "z_factors":"" - }, - "typecloud":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Cloud area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"cloud", - "z_bounds_factors":"", - "z_factors":"" - }, - "typecrop":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Crop area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"crops", - "z_bounds_factors":"", - "z_factors":"" - }, - "typefis":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Floating Ice Shelf area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"floating_ice_shelf", - "z_bounds_factors":"", - "z_factors":"" - }, - "typegis":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Grounded Ice Sheet area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"grounded_ice_sheet", - "z_bounds_factors":"", - "z_factors":"" - }, - "typeland":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Land area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"land", - "z_bounds_factors":"", - "z_factors":"" - }, - "typeli":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Land Ice area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"land_ice", - "z_bounds_factors":"", - "z_factors":"" - }, - "typemp":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Melt pond area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"sea_ice_melt_pond", - "z_bounds_factors":"", - "z_factors":"" - }, - "typenatgr":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Natural grass area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"natural_grasses", - "z_bounds_factors":"", - "z_factors":"" - }, - "typenwd":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Non-Woody Vegetation area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"non_woody_vegetation", - "z_bounds_factors":"", - "z_factors":"" - }, - "typepasture":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Pasture area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"pastures", - "z_bounds_factors":"", - "z_factors":"" - }, - "typepdec":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"surface type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"primary_deciduous_trees", - "z_bounds_factors":"", - "z_factors":"" - }, - "typepever":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"surface type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"primary_evergreen_trees", - "z_bounds_factors":"", - "z_factors":"" - }, - "typeresidual":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Residual area", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"residual", - "z_bounds_factors":"", - "z_factors":"" - }, - "typesdec":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"surface type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"secondary_decidous_trees", - "z_bounds_factors":"", - "z_factors":"" - }, - "typesea":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Ocean area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"sea", - "z_bounds_factors":"", - "z_factors":"" - }, - "typesever":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"surface type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"secondary_evergreen_trees", - "z_bounds_factors":"", - "z_factors":"" - }, - "typeshrub":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Shrub area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"shrubs", - "z_bounds_factors":"", - "z_factors":"" - }, - "typesi":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Sea Ice area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"sea_ice", - "z_bounds_factors":"", - "z_factors":"" - }, - "typetree":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Tree area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"trees", - "z_bounds_factors":"", - "z_factors":"" - }, - "typetreebd":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Tree area type (Broadleaf Deciduous)", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"trees", - "z_bounds_factors":"", - "z_factors":"" - }, - "typetreebe":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Tree area type (Broadleaf Evergreen)", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"trees", - "z_bounds_factors":"", - "z_factors":"" - }, - "typetreend":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Tree area type (Narrowleaf Deciduous)", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"trees", - "z_bounds_factors":"", - "z_factors":"" - }, - "typetreene":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Tree area type (Narrowleaf Evergreen)", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"trees", - "z_bounds_factors":"", - "z_factors":"" - }, - "typeveg":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Vegetation area type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"vegetation", - "z_bounds_factors":"", - "z_factors":"" - }, - "typewetla":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"Wetland", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"area_type", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "vegtype":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"plant functional type", - "must_have_bounds":"no", - "out_name":"type", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"", - "stored_direction":"", - "tolerance":"", - "type":"character", - "units":"", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "xant":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"__unset__", - "must_have_bounds":"", - "out_name":"xant", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"projection_x_coordinate", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"km", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "xgre":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"__unset__", - "must_have_bounds":"", - "out_name":"xgre", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"projection_x_coordinate", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"km", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "yant":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"__unset__", - "must_have_bounds":"", - "out_name":"yant", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"projection_y_coordinate", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"km", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - }, - "ygre":{ - "axis":"", - "bounds_values":"", - "climatology":"", - "formula":"", - "long_name":"__unset__", - "must_have_bounds":"", - "out_name":"ygre", - "positive":"", - "requested":"", - "requested_bounds":"", - "standard_name":"projection_y_coordinate", - "stored_direction":"", - "tolerance":"", - "type":"double", - "units":"km", - "valid_max":"", - "valid_min":"", - "value":"", - "z_bounds_factors":"", - "z_factors":"" - } - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_formula_terms.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_formula_terms.json deleted file mode 100644 index 08da1580cb..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_formula_terms.json +++ /dev/null @@ -1,186 +0,0 @@ -{ - "formula_entry":{ - "a":{ - "dimensions":"alevel", - "long_name":"vertical coordinate formula term: a(k)", - "out_name":"a", - "type":"double", - "units":"" - }, - "a_bnds":{ - "dimensions":"alevel", - "long_name":"vertical coordinate formula term: a(k+1/2)", - "out_name":"a_bnds", - "type":"double", - "units":"" - }, - "ap":{ - "dimensions":"alevel", - "long_name":"vertical coordinate formula term: ap(k)", - "out_name":"ap", - "type":"double", - "units":"Pa" - }, - "ap_bnds":{ - "dimensions":"alevel", - "long_name":"vertical coordinate formula term: ap(k+1/2)", - "out_name":"ap_bnds", - "type":"double", - "units":"Pa" - }, - "b":{ - "dimensions":"alevel", - "long_name":"vertical coordinate formula term: b(k)", - "out_name":"b", - "type":"double", - "units":"" - }, - "b_bnds":{ - "dimensions":"alevel", - "long_name":"vertical coordinate formula term: b(k+1/2)", - "out_name":"b_bnds", - "type":"double", - "units":"" - }, - "depth":{ - "dimensions":"longitude latitude", - "long_name":"Sea Floor Depth: formula term: thetao", - "out_name":"depth", - "type":"real", - "units":"m" - }, - "depth_c":{ - "dimensions":"", - "long_name":"vertical coordinate formula term: depth_c", - "out_name":"depth_c", - "type":"double", - "units":"" - }, - "eta":{ - "dimensions":"longitude latitude time", - "long_name":"Sea Surface Height formula term: thetao", - "out_name":"eta", - "type":"real", - "units":"m" - }, - "eta2":{ - "dimensions":"longitude latitude time2", - "long_name":"Sea Surface Height formula term: thetao", - "out_name":"eta", - "type":"real", - "units":"m" - }, - "href":{ - "dimensions":"", - "long_name":"vertical coordinate formula term: href", - "out_name":"href", - "type":"double", - "units":"" - }, - "k_c":{ - "dimensions":"", - "long_name":"vertical coordinate formula term: k_c", - "out_name":"k_c", - "type":"integer", - "units":"" - }, - "nsigma":{ - "dimensions":"", - "long_name":"vertical coordinate formula term: nsigma", - "out_name":"nsigma", - "type":"integer", - "units":"" - }, - "orog":{ - "dimensions":"longitude latitude", - "long_name":"Surface Altitude", - "out_name":"orog", - "type":"real", - "units":"m" - }, - "p0":{ - "dimensions":"", - "long_name":"vertical coordinate formula term: reference pressure", - "out_name":"p0", - "type":"", - "units":"Pa" - }, - "ps":{ - "dimensions":"longitude latitude time", - "long_name":"Surface Air Pressure", - "out_name":"ps", - "type":"real", - "units":"Pa" - }, - "ps1":{ - "dimensions":"longitude latitude time1", - "long_name":"vertical coordinate formula term: ps", - "out_name":"ps", - "type":"real", - "units":"Pa" - }, - "ps2":{ - "dimensions":"longitude latitude time2", - "long_name":"vertical coordinate formula term: ps", - "out_name":"ps", - "type":"real", - "units":"Pa" - }, - "ptop":{ - "dimensions":"", - "long_name":"pressure at top of model", - "out_name":"ptop", - "type":"", - "units":"Pa" - }, - "sigma":{ - "dimensions":"olevel", - "long_name":"vertical coordinate formula term: sigma(k)", - "out_name":"sigma", - "type":"double", - "units":"" - }, - "sigma_bnds":{ - "dimensions":"olevel", - "long_name":"vertical coordinate formula term: sigma(k+1/2)", - "out_name":"sigma_bnds", - "type":"double", - "units":"" - }, - "z1":{ - "dimensions":"", - "long_name":"vertical coordinate formula term: z1", - "out_name":"z1", - "type":"double", - "units":"" - }, - "z2":{ - "dimensions":"", - "long_name":"vertical coordinate formula term: z2", - "out_name":"z2", - "type":"double", - "units":"" - }, - "zlev":{ - "dimensions":"olevel", - "long_name":"vertical coordinate formula term: zlev(k)", - "out_name":"zlev", - "type":"double", - "units":"" - }, - "zlev_bnds":{ - "dimensions":"olevel", - "long_name":"vertical coordinate formula term: zlev(k+1/2)", - "out_name":"zlev_bnds", - "type":"double", - "units":"" - }, - "ztop":{ - "dimensions":"", - "long_name":"height of top of model", - "out_name":"ztop", - "type":"", - "units":"m" - } - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_fx.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_fx.json deleted file mode 100644 index d36c2cae21..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_fx.json +++ /dev/null @@ -1,163 +0,0 @@ -{ - "Header":{ - "#dataRequest_specs_version":"01.00.21", - "#mip_era":"CMIP6", - "Conventions":"CF-1.7 ODS-2.1", - "approx_interval":"0.00000", - "cmor_version":"3.2", - "data_specs_version":"2.1.0", - "generic_levels":"alevel", - "int_missing_value":"-2147483648", - "missing_value":"1e20", - "product":"observations", - "realm":"fx", - "table_date":"07 March 2018", - "table_id":"Table obs4MIPs_fx" - }, - "variable_entry":{ - "areacella":{ - "cell_measures":"", - "cell_methods":"area: sum", - "comment":"For atmospheres with more than 1 mesh (e.g., staggered grids), report areas that apply to surface vertical fluxes of energy.", - "dimensions":"longitude latitude", - "frequency":"fx", - "long_name":"Grid-Cell Area for Atmospheric Variables", - "modeling_realm":"atmos land", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"areacella", - "positive":"", - "standard_name":"cell_area", - "type":"real", - "units":"m2", - "valid_max":"", - "valid_min":"" - }, - "areacellr":{ - "cell_measures":"", - "cell_methods":"area: sum", - "comment":"For river routing model, if grid differs from the atmospheric grid.", - "dimensions":"longitude latitude", - "frequency":"fx", - "long_name":"Grid-Cell Area for River Model Variables", - "modeling_realm":"land", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"areacellr", - "positive":"", - "standard_name":"cell_area", - "type":"real", - "units":"m2", - "valid_max":"", - "valid_min":"" - }, - "mrsofc":{ - "cell_measures":"area: areacella", - "cell_methods":"area: mean where land", - "comment":"The bulk water content retained by the soil at -33 J/kg of suction pressure, expressed as mass per unit land area; report as missing where there is no land", - "dimensions":"longitude latitude", - "frequency":"fx", - "long_name":"Capacity of Soil to Store Water", - "modeling_realm":"land", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"mrsofc", - "positive":"", - "standard_name":"soil_moisture_content_at_field_capacity", - "type":"real", - "units":"kg m-2", - "valid_max":"", - "valid_min":"" - }, - "orog":{ - "cell_measures":"area: areacella", - "cell_methods":"area: mean", - "comment":"The surface called 'surface' means the lower boundary of the atmosphere. Altitude is the (geometric) height above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level.", - "dimensions":"longitude latitude", - "frequency":"fx", - "long_name":"Surface Altitude", - "modeling_realm":"land", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"orog", - "positive":"", - "standard_name":"surface_altitude", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "rootd":{ - "cell_measures":"area: areacella", - "cell_methods":"area: mean", - "comment":"report the maximum soil depth reachable by plant roots (if defined in model), i.e., the maximum soil depth from which they can extract moisture; report as *missing* where the land fraction is 0.", - "dimensions":"longitude latitude", - "frequency":"fx", - "long_name":"Maximum Root Depth", - "modeling_realm":"land", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"rootd", - "positive":"", - "standard_name":"root_depth", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - }, - "sftgif":{ - "cell_measures":"area: areacella", - "cell_methods":"area: mean", - "comment":"Fraction of grid cell covered by land ice (ice sheet, ice shelf, ice cap, glacier)", - "dimensions":"longitude latitude typeli", - "frequency":"fx", - "long_name":"Fraction of Grid Cell Covered with Glacier", - "modeling_realm":"land", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sftgif", - "positive":"", - "standard_name":"land_ice_area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "sftlf":{ - "cell_measures":"area: areacella", - "cell_methods":"area: mean", - "comment":"Please express 'X_area_fraction' as the percentage of horizontal area occupied by X.", - "dimensions":"longitude latitude typeland", - "frequency":"fx", - "long_name":"Land Area Fraction", - "modeling_realm":"atmos", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"sftlf", - "positive":"", - "standard_name":"land_area_fraction", - "type":"real", - "units":"%", - "valid_max":"", - "valid_min":"" - }, - "zfull":{ - "cell_measures":"area: areacella", - "cell_methods":"area: mean", - "comment":"", - "dimensions":"longitude latitude alevel", - "frequency":"fx", - "long_name":"Altitude of Model Full-Levels", - "modeling_realm":"atmos", - "ok_max_mean_abs":"", - "ok_min_mean_abs":"", - "out_name":"zfull", - "positive":"", - "standard_name":"height_above_reference_ellipsoid", - "type":"real", - "units":"m", - "valid_max":"", - "valid_min":"" - } - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_grids.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_grids.json deleted file mode 100644 index 276beaf467..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_grids.json +++ /dev/null @@ -1,155 +0,0 @@ -{ - "Header":{ - "#dataRequest_specs_version":"01.00.21", - "Conventions":"CF-1.7 ODS-2.1", - "cmor_version":"3.2", - "data_specs_version":"2.1.0", - "missing_value":"1e20", - "product":"observations", - "table_date":"07 March 2018", - "table_id":"Table obs4MIPs_grids" - }, - "axis_entry":{ - "grid_latitude":{ - "axis":"Y", - "long_name":"latitude in rotated pole grid", - "out_name":"rlat", - "standard_name":"grid_latitude", - "type":"", - "units":"degrees" - }, - "grid_longitude":{ - "axis":"X", - "long_name":"longitude in rotated pole grid", - "out_name":"rlon", - "standard_name":"grid_longitude", - "type":"", - "units":"degrees" - }, - "i_index":{ - "axis":"", - "long_name":"cell index along first dimension", - "out_name":"i", - "standard_name":"", - "type":"integer", - "units":"1" - }, - "j_index":{ - "axis":"", - "long_name":"cell index along second dimension", - "out_name":"j", - "standard_name":"", - "type":"integer", - "units":"1" - }, - "k_index":{ - "axis":"", - "long_name":"cell index along third dimension", - "out_name":"k", - "standard_name":"", - "type":"integer", - "units":"1" - }, - "l_index":{ - "axis":"", - "long_name":"cell index along fourth dimension", - "out_name":"l", - "standard_name":"", - "type":"integer", - "units":"1" - }, - "m_index":{ - "axis":"", - "long_name":"cell index along fifth dimension", - "out_name":"m", - "standard_name":"", - "type":"integer", - "units":"1" - }, - "vertices":{ - "axis":"", - "long_name":"", - "out_name":"", - "standard_name":"", - "type":"", - "units":"" - }, - "x":{ - "axis":"X", - "long_name":"x coordinate of projection", - "out_name":"", - "standard_name":"projection_x_coordinate", - "type":"", - "units":"m" - }, - "x_deg":{ - "axis":"X", - "long_name":"x coordinate of projection", - "out_name":"x", - "standard_name":"projection_x_coordinate", - "type":"", - "units":"degrees" - }, - "y":{ - "axis":"Y", - "long_name":"y coordinate of projection", - "out_name":"", - "standard_name":"projection_y_coordinate", - "type":"", - "units":"m" - }, - "y_deg":{ - "axis":"Y", - "long_name":"y coordinate of projection", - "out_name":"y", - "standard_name":"projection_y_coordinate", - "type":"", - "units":"degrees" - } - }, - "mapping_entry":{ - "sample_user_mapping":{ - "coordinates":"rlon rlat", - "parameter1":"false_easting", - "parameter2":"false_northing" - } - }, - "variable_entry":{ - "latitude":{ - "dimensions":"longitude latitude", - "long_name":"", - "out_name":"latitude", - "standard_name":"", - "units":"degrees_north", - "valid_max":"90.0", - "valid_min":"-90.0" - }, - "longitude":{ - "dimensions":"longitude latitude", - "long_name":"", - "out_name":"longitude", - "standard_name":"", - "units":"degrees_east", - "valid_max":"360.0", - "valid_min":"0.0" - }, - "vertices_latitude":{ - "dimensions":"vertices longitude latitude", - "long_name":"", - "out_name":"vertices_latitude", - "standard_name":"", - "units":"degrees_north", - "valid_max":"90.0", - "valid_min":"-90.0" - }, - "vertices_longitude":{ - "dimensions":"vertices longitude latitude", - "long_name":"", - "out_name":"vertices_longitude", - "standard_name":"", - "units":"degrees_east", - "valid_max":"360.0", - "valid_min":"0.0" - } - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_monNobs.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_monNobs.json deleted file mode 100644 index da00c57753..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_monNobs.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "Header":{ - "#mip_era":"CMIP6", - "Conventions":"CF-1.7 ODS-2.1", - "approx_interval":"30.00000", - "cmor_version":"3.2", - "data_specs_version":"2.1.0", - "generic_levels":"alevel alevhalf", - "int_missing_value":"-2147483648", - "missing_value":"1e20", - "product":"observations", - "realm":"aerosol atmos atmosChem land landIce ocean ocnBgchem seaIce", - "table_date":"07 March 2018", - "table_id":"Table obs4MIPs_monNobs" - }, - "variable_entry":{ - "ndviNobs":{ - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Normalized Difference Vegetation Index Number of Observations", - "modeling_realm":"atmos", - "out_name":"ndviNobs", - "standard_name":"number_of_observations", - "type":"integer", - "units":"1" - } - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_monStderr.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_monStderr.json deleted file mode 100644 index 3c4b3814b8..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_monStderr.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "Header":{ - "#mip_era":"CMIP6", - "Conventions":"CF-1.7 ODS-2.1", - "approx_interval":"30.00000", - "cmor_version":"3.2", - "data_specs_version":"2.1.0", - "generic_levels":"alevel alevhalf", - "int_missing_value":"-2147483648", - "missing_value":"1e20", - "product":"observations", - "realm":"aerosol atmos atmosChem land landIce ocean ocnBgchem seaIce", - "table_date":"07 March 2018", - "table_id":"Table obs4MIPs_monStderr" - }, - "variable_entry":{ - "ndviStderr":{ - "comment":"", - "dimensions":"longitude latitude time", - "frequency":"mon", - "long_name":"Normalized Difference Vegetation Index Standard Error", - "modeling_realm":"atmos", - "out_name":"ndviStderr", - "standard_name":"normalized_difference_vegetation_index standard_error", - "type":"real", - "units":"" - } - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_frequency.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_frequency.json deleted file mode 100644 index d749596a35..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_frequency.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "frequency":{ - "1hr":"sampled hourly", - "1hrCM":"monthly-mean diurnal cycle resolving each day into 1-hour means", - "1hrPt":"sampled hourly, at specified time point within an hour", - "3hr":"sampled every 3 hours", - "3hrPt":"sampled 3 hourly, at specified time point within the time period", - "6hr":"sampled every 6 hours", - "6hrPt":"sampled 6 hourly, at specified time point within the time period", - "day":"daily mean samples", - "dec":"decadal mean samples", - "fx":"fixed (time invariant) field", - "mon":"monthly mean samples", - "monC":"monthly climatology computed from monthly mean samples", - "monPt":"sampled monthly, at specified time point within the time period", - "subhrPt":"sampled sub-hourly, at specified time point within an hour", - "yr":"annual mean samples", - "yrPt":"sampled yearly, at specified time point within the time period" - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_grid_label.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_grid_label.json deleted file mode 100644 index fa079918e2..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_grid_label.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "grid_label":{ - "gm":"global mean data", - "gn":"data reported on a model's native grid", - "gna":"data reported on a native grid in the region of Antarctica", - "gng":"data reported on a native grid in the region of Greenland", - "gnz":"zonal mean data reported on a model's native latitude grid", - "gr":"regridded data reported on the data provider's preferred target grid", - "gr1":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr1a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr1g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr1z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr2":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr2a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr2g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr2z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr3":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr3a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr3g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr3z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr4":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr4a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr4g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr4z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr5":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr5a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr5g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr5z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr6":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr6a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr6g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr6z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr7":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr7a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr7g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr7z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr8":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr8a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr8g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr8z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gr9":"regridded data reported on a grid other than the native grid and other than the preferred target grid", - "gr9a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", - "gr9g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", - "gr9z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", - "gra":"regridded data in the region of Antarctica reported on the data provider's preferred target grid", - "grg":"regridded data in the region of Greenland reported on the data provider's preferred target grid", - "grz":"regridded zonal mean data reported on the data provider's preferred latitude target grid" - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_institution_id.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_institution_id.json deleted file mode 100644 index 8ac6bca123..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_institution_id.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "institution_id":{ - "DWD":"Deutscher Wetterdienst, Offenbach 63067, Germany", - "NASA-JPL":"NASA's Jet Propulsion Laboratory, Pasadena, CA 91109, USA", - "NOAA-NCEI":"NOAA's National Centers for Environmental Information, Asheville, NC 28801, USA", - "PCMDI":"Program for Climate Model Diagnosis and Intercomparison, Lawrence Livermore National Laboratory, Livermore, CA 94550, USA", - "RSS":"Remote Sensing Systems, Santa Rosa, CA 95401, USA" - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_license.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_license.json deleted file mode 100644 index b4060ed779..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_license.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "license":"Data in this file produced by is licensed under a Creative Commons Attribution-ShareAlike 4.0 International License (https://creativecommons.org/licenses/). Use of the data must be acknowledged following guidelines found at . Further information about this data, including some limitations, can be found via .)" -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_nominal_resolution.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_nominal_resolution.json deleted file mode 100644 index 4de5620573..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_nominal_resolution.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "nominal_resolution":[ - "0.5 km", - "1 km", - "10 km", - "100 km", - "1000 km", - "10000 km", - "1x1 degree", - "2.5 km", - "25 km", - "250 km", - "2500 km", - "5 km", - "50 km", - "500 km", - "5000 km" - ] -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_product.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_product.json deleted file mode 100644 index 5b74000b98..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_product.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "product":[ - "observations", - "reanalysis" - ] -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_realm.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_realm.json deleted file mode 100644 index ffe16ec257..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_realm.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "realm":[ - "aerosol", - "atmos", - "atmosChem", - "land", - "landIce", - "ocean", - "ocnBgchem", - "seaIce" - ] -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_region.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_region.json deleted file mode 100644 index 69c23d6444..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_region.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "region":[ - "africa", - "antarctica", - "arabian_sea", - "aral_sea", - "arctic_ocean", - "asia", - "atlantic_ocean", - "australia", - "baltic_sea", - "barents_opening", - "barents_sea", - "beaufort_sea", - "bellingshausen_sea", - "bering_sea", - "bering_strait", - "black_sea", - "canadian_archipelago", - "caribbean_sea", - "caspian_sea", - "central_america", - "chukchi_sea", - "contiguous_united_states", - "denmark_strait", - "drake_passage", - "east_china_sea", - "english_channel", - "eurasia", - "europe", - "faroe_scotland_channel", - "florida_bahamas_strait", - "fram_strait", - "global", - "global_land", - "global_ocean", - "great_lakes", - "greenland", - "gulf_of_alaska", - "gulf_of_mexico", - "hudson_bay", - "iceland_faroe_channel", - "indian_ocean", - "indo_pacific_ocean", - "indonesian_throughflow", - "irish_sea", - "lake_baykal", - "lake_chad", - "lake_malawi", - "lake_tanganyika", - "lake_victoria", - "mediterranean_sea", - "mozambique_channel", - "north_america", - "north_sea", - "norwegian_sea", - "pacific_equatorial_undercurrent", - "pacific_ocean", - "persian_gulf", - "red_sea", - "ross_sea", - "sea_of_japan", - "sea_of_okhotsk", - "south_america", - "south_china_sea", - "southern_ocean", - "taiwan_luzon_straits", - "weddell_sea", - "windward_passage", - "yellow_sea" - ] -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_required_global_attributes.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_required_global_attributes.json deleted file mode 100644 index 1c78c5af89..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_required_global_attributes.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "required_global_attributes":[ - "Conventions", - "activity_id", - "contact", - "creation_date", - "data_specs_version", - "frequency", - "grid", - "grid_label", - "institution", - "institution_id", - "license", - "nominal_resolution", - "product", - "realm", - "source_id", - "table_id", - "tracking_id", - "variable_id", - "variant_label" - ] -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_source_id.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_source_id.json deleted file mode 100644 index 21ea8cef00..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_source_id.json +++ /dev/null @@ -1,353 +0,0 @@ -{ - "source_id":{ - "AIRS-1-0":{ - "institution_id":"NASA-JPL", - "region":[ - "global" - ], - "release_year":"2011", - "source_description":"Atmospheric Infrared Sounder", - "source_label":"AIRS", - "source_name":"AIRS", - "source_type":"satellite_retrieval", - "source_variables":[ - "hus", - "ta" - ], - "source_version_number":"1.0" - }, - "Aura-MLS-v04-2":{ - "institution_id":"NASA-JPL", - "region":[ - "global" - ], - "release_year":"2018", - "source_description":"EOS Aura Microwave Limb Sounder", - "source_label":"Aura-MLS", - "source_name":"Aura MLS", - "source_type":"satellite_retrieval", - "source_variables":[ - "cli", - "hus", - "ta" - ], - "source_version_number":"v04.2" - }, - "CMSAF-CLARA-A-2-0":{ - "institution_id":"DWD", - "region":[ - "global" - ], - "release_year":"2017", - "source_description":"CM SAF cLoud, Albedo and surface RAdiation dataset from AVHRR data", - "source_id":"CMSAF-CLARA-A-2-0", - "source_label":"CMSAF-CLARA-A", - "source_name":"CMSAF CLARA A", - "source_type":"satellite_retrieval", - "source_variables":[ - "clCLARA", - "clivi", - "cltCLARA", - "clwCLARA", - "clwtCLARA", - "clwvi", - "pctCLARA", - "rsds", - "rsdscs" - ], - "source_version_number":"2.0" - }, - "CMSAF-HOAPS-4-0":{ - "institution_id":"DWD", - "region":[ - "global_ocean" - ], - "release_year":"2017", - "source_description":"Hamburg Ocean Atmosphere Parameters and fluxes from Satellite data, based on SSM/I and SSMIS aboard DMSP", - "source_id":"CMSAF-HOAPS-4-0", - "source_label":"CMSAF-HOAPS", - "source_name":"CMSAF HOAPS", - "source_type":"satellite_retrieval", - "source_variables":[ - "evspsbl", - "hfls", - "hfss", - "huss", - "pme", - "pr", - "prw", - "sfcWind" - ], - "source_version_number":"4.0" - }, - "CMSAF-SARAH-2-0":{ - "institution_id":"DWD", - "region":[ - "africa", - "atlantic_ocean", - "europe" - ], - "release_year":"2017", - "source_description":"Surface solAr RAdiation data set - Heliosat, based on MVIRI/SEVIRI aboard METEOSAT", - "source_id":"CMSAF-SARAH-2.0", - "source_label":"CMSAF-SARAH", - "source_name":"CMSAF SARAH", - "source_type":"satellite_retrieval", - "source_variables":[ - "rsds" - ], - "source_version_number":"2.0" - }, - "ESACCI-CLOUD-ATSR2-AATSR-2-0":{ - "institution_id":"DWD", - "region":[ - "global" - ], - "release_year":"2017", - "source_description":"Cloud properties derived from ATSR2 and AATSR (aboard ERS2 and ENVISAT) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets.", - "source_id":"ESACCI-CLOUD-ATSR2-AATSR-2-0", - "source_label":"ESACCI-CLOUD-ATSR2-AATSR", - "source_name":"ESACCI CLOUD ATSR2 AATSR", - "source_type":"satellite_retrieval", - "source_variables":[ - "clCCI", - "clivi", - "cltCCI", - "clwCCI", - "clwtCCI", - "clwvi", - "pctCCI" - ], - "source_version_number":"2.0" - }, - "ESACCI-CLOUD-AVHRR-AM-2-0":{ - "institution_id":"DWD", - "region":[ - "global" - ], - "release_year":"2017", - "source_description":"Cloud properties derived from AVHRR (aboard NOAA and MetOp AM) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets", - "source_id":"ESACCI-CLOUD-AVHRR-AM-2-0", - "source_label":"ESACCI-CLOUD-AVHRR-AM", - "source_name":"ESACCI CLOUD AVHRR AM", - "source_type":"satellite_retrieval", - "source_variables":[ - "clCCI", - "clivi", - "cltCCI", - "clwCCI", - "clwtCCI", - "clwvi", - "pctCCI" - ], - "source_version_number":"2.0" - }, - "ESACCI-CLOUD-AVHRR-PM-2-0":{ - "institution_id":"DWD", - "region":[ - "global" - ], - "release_year":"2017", - "source_description":"Cloud properties derived from AVHRR (aboard NOAA and MetOp PM) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets", - "source_id":"ESACCI-CLOUD-AVHRR-PM-2-0", - "source_label":"ESACCI-CLOUD-AVHRR-PM", - "source_name":"ESACCI CLOUD AVHRR PM", - "source_type":"satellite_retrieval", - "source_variables":[ - "clCCI", - "clivi", - "cltCCI", - "clwCCI", - "clwtCCI", - "clwvi", - "pctCCI" - ], - "source_version_number":"2.0" - }, - "ESACCI-CLOUD-MERIS-AATSR-2-0":{ - "institution_id":"DWD", - "region":[ - "global" - ], - "release_year":"2017", - "source_description":"Cloud properties derived from MERIS and AATSR (aboard ENVISAT) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets.", - "source_id":"ESACCI-CLOUD-MERIS-AATSR-2-0", - "source_label":"ESACCI-CLOUD-MERIS-AATSR", - "source_name":"ESACCI CLOUD MERIS AATSR", - "source_type":"satellite_retrieval", - "source_variables":[ - "clCCI", - "clivi", - "cltCCI", - "clwCCI", - "clwtCCI", - "clwvi", - "pctCCI" - ], - "source_version_number":"2.0" - }, - "GNSS-RO-1-3":{ - "institution_id":"NASA-JPL", - "region":[ - "global" - ], - "release_year":"2016", - "source_description":"Global Navigation Satellite Systems Radio Occultation", - "source_label":"GNSS-RO", - "source_name":"GNSS RO", - "source_type":"satellite_retrieval", - "source_variables":[ - "ta", - "zg" - ], - "source_version_number":"1.3" - }, - "NOAA-NCEI-AVHRR-NDVI-4-0":{ - "institution_id":"NOAA-NCEI", - "region":[ - "global_land" - ], - "release_year":"2013", - "source_description":"Normalized Difference Vegetation Index", - "source_id":"NOAA-NCEI-AVHRR-NDVI-4-0", - "source_label":"NOAA-NCEI-AVHRR-NDVI", - "source_name":"NOAA NCEI AVHRR NDVI", - "source_type":"satellite_retrieval", - "source_variables":[ - "ndvi" - ], - "source_version_number":"4.0" - }, - "NOAA-NCEI-ERSST-4-0":{ - "institution_id":"NOAA-NCEI", - "region":[ - "global_ocean" - ], - "release_year":"2015", - "source_description":"Extended Reconstructed Sea Surface Temperatures", - "source_id":"NOAA-NCEI-ERSST-4-0", - "source_label":"NOAA-NCEI-ERSST", - "source_name":"NOAA NCEI ERSST", - "source_type":"gridded_insitu", - "source_variables":[ - "tos" - ], - "source_version_number":"4.0" - }, - "NOAA-NCEI-FAPAR-4-0":{ - "institution_id":"NOAA-NCEI", - "region":[ - "global_land" - ], - "release_year":"2014", - "source_description":"AVHRR Fraction of Absorbed Photosynthetically Active Radiation", - "source_id":"NOAA-NCEI-FAPAR-4-0", - "source_label":"NOAA-NCEI-FAPAR", - "source_name":"NOAA NCEI FAPAR", - "source_type":"satellite_retrieval", - "source_variables":[ - "fapar" - ], - "source_version_number":"4.0" - }, - "NOAA-NCEI-GridSat-4-0":{ - "institution_id":"NOAA-NCEI", - "region":[ - "global" - ], - "release_year":"2015", - "source_description":"Gridded Satellite ISCCP B1 11 Micron Brightness Temperature", - "source_id":"NOAA-NCEI-GridSat-4-0", - "source_label":"NOAA-NCEI-GridSat", - "source_name":"NOAA NCEI GridSat", - "source_type":"satellite_retrieval", - "source_variables":[ - "ttbr" - ], - "source_version_number":"4.0" - }, - "NOAA-NCEI-LAI-4-0":{ - "institution_id":"NOAA-NCEI", - "region":[ - "global_land" - ], - "release_year":"2014", - "source_description":"AVHRR Leaf Area Index", - "source_id":"NOAA-NCEI-LAI-4-0", - "source_label":"NOAA-NCEI-LAI", - "source_name":"NOAA NCEI LAI", - "source_type":"satellite_retrieval", - "source_variables":[ - "lai" - ], - "source_version_number":"4.0" - }, - "NOAA-NCEI-PERSIANN-1-1":{ - "institution_id":"NOAA-NCEI", - "region":[ - "global" - ], - "release_year":"2014", - "source_description":"Precipitation Estimation from Remotely Sensed Information using Artificial Neural Network", - "source_id":"NOAA-NCEI-PERSIANN-1-1", - "source_label":"NOAA-NCEI-PERSIANN", - "source_name":"NOAA NCEI PERSIANN", - "source_type":"satellite_retrieval", - "source_variables":[ - "pr" - ], - "source_version_number":"1.1" - }, - "NOAA-NCEI-SeaWinds-1-2":{ - "institution_id":"NOAA-NCEI", - "region":[ - "global_ocean" - ], - "release_year":"2008", - "source_description":"Blended Sea Surface Winds", - "source_id":"NOAA-NCEI-SeaWinds-1-2", - "source_label":"NOAA-NCEI-SeaWinds", - "source_name":"NOAA NCEI SeaWinds", - "source_type":"satellite_blended", - "source_variables":[ - "sfcWind", - "uas", - "vas" - ], - "source_version_number":"1.2" - }, - "QuikSCAT-v20110531":{ - "institution_id":"NASA-JPL", - "region":[ - "global" - ], - "release_year":"2011", - "source_description":"QuikSCAT winds monthly climatology derived from QuikSCAT L2B using the 2006 model function and processing algorithms.", - "source_label":"QuikSCAT", - "source_name":"QuikSCAT", - "source_type":"satellite_retrieval", - "source_variables":[ - "sfcWind", - "uas", - "vas" - ], - "source_version_number":"v20110531" - }, - "REMSS-PRW-6-6-0":{ - "institution_id":"RSS", - "region":[ - "global" - ], - "release_year":"2017", - "source_description":"Water Vapor Path", - "source_id":"REMSS-PRW-6-6-0", - "source_label":"REMSS-PRW", - "source_name":"REMSS PRW", - "source_type":"satellite_blended", - "source_variables":[ - "prw" - ], - "source_version_number":"6.6.0" - } - } -} diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_source_type.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_source_type.json deleted file mode 100644 index 28d98f5756..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_source_type.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "source_type":{ - "gridded_insitu":"gridded product based on measurements collected from in-situ instruments", - "reanalysis":"gridded product generated from a model reanalysis based on in-situ instruments and possibly satellite measurements", - "satellite_blended":"gridded product based on both in-situ instruments and satellite measurements", - "satellite_retrieval":"gridded product based on satellite measurements" - } -} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_table_id.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_table_id.json deleted file mode 100644 index bbb23e82c8..0000000000 --- a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_table_id.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "table_id":[ - "obs4MIPs_Aday", - "obs4MIPs_Amon", - "obs4MIPs_Lmon", - "obs4MIPs_Omon", - "obs4MIPs_SImon", - "obs4MIPs_fx", - "obs4MIPs_monNobs", - "obs4MIPs_monStderr" - ] -} \ No newline at end of file diff --git a/esmvaltool/utils/cmorizers/obs/__init__.py b/esmvaltool/cmorizers/data/__init__.py similarity index 100% rename from esmvaltool/utils/cmorizers/obs/__init__.py rename to esmvaltool/cmorizers/data/__init__.py diff --git a/esmvaltool/cmorizers/data/cmor_config/AERONET.yml b/esmvaltool/cmorizers/data/cmor_config/AERONET.yml new file mode 100644 index 0000000000..0e6ebf2934 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/AERONET.yml @@ -0,0 +1,28 @@ +--- +# Filename +filename: 'AOD_Level20_Monthly_V3.tar.gz' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: AERONET + version: 20240406 + tier: 3 + modeling_realm: atmos + project_id: OBS6 + source: 'https://aeronet.gsfc.nasa.gov/new_web/download_all_v3_aod.html' + reference: 'aeronetv3' + comment: + 'Notice to users: this data has recommended guidelines for use and publication, + please refer to https://aeronet.gsfc.nasa.gov/new_web/data_usage.html.' + +# Variables to cmorize +variables: + od440aer: + mip: AERmon + wavelength: 440 + od550aer: + mip: AERmon + wavelength: 551 + od870aer: + mip: AERmon + wavelength: 870 diff --git a/esmvaltool/cmorizers/data/cmor_config/AGCD.yml b/esmvaltool/cmorizers/data/cmor_config/AGCD.yml new file mode 100644 index 0000000000..3b11c8819f --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/AGCD.yml @@ -0,0 +1,36 @@ +--- +# filename: 'agcd_v1-0-1_precip_total_r005_monthly_.*.nc' +filename: 'agcd_{version}_{variable}_{raw_calc}_r005_{freq}_.*.nc' + +attributes: + project_id: OBS6 + dataset_id: AGCD + version: 'v2-0-1' + tier: 2 + modeling_realm: ground + resolution: '005' # '001' available for v2 + source: 'https://dx.doi.org/10.25914/rses-zh67 Australian Bureau of Meteorology (2023), + \Australian Gridded Climate Data ( AGCD ) ; v2.0.1 Snapshot (1900-01-01 to 2022-12-31)' + reference: 'agcd-v201' + comment: 'hosted on NCI (National Computing Infrastructure Australia)' + +variables: + pr: + mip: Amon + raw_long: Lwe Thickness Of Precipitation Amount (mm) + raw_calc: total + freq: monthly # convert daily as well, v1s only + raw: precip + +## variables in AGCD v1 + # tasmax: + # mip: Amon + # raw_long: Daily maximum air temperature, degrees_Celsius, monthly, mean + # raw_calc: mean + # freq: monthly + # raw: tmax + # tasmin: + # mip: Amon + # raw_calc: mean + # freq: monthly + # raw: tmin diff --git a/esmvaltool/cmorizers/data/cmor_config/ANUClimate.yml b/esmvaltool/cmorizers/data/cmor_config/ANUClimate.yml new file mode 100644 index 0000000000..3b5ce35b31 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/ANUClimate.yml @@ -0,0 +1,32 @@ +--- +filename: 'ANUClimate_{version}_{raw}_{freq}_.*.nc' #yyyymm + +attributes: + project_id: OBS6 + dataset_id: ANUClimate + version: 'v2-0' + tier: 3 + modeling_realm: reanaly + # resolution: '0.01 degree' + source: 'https://dx.doi.org/10.25914/60a10aa56dd1b' + reference: 'anuclimate2' + comment: 'hosted on NCI' + +variables: + pr: + mip: Amon + freq: monthly # daily available + raw: rain + + tasmax: + mip: Amon + freq: monthly + raw: tmax + tasmin: + mip: Amon + freq: monthly + raw: tmin + tas: + mip: Amon + freq: monthly + raw: tavg diff --git a/esmvaltool/cmorizers/data/cmor_config/APHRO-MA.yml b/esmvaltool/cmorizers/data/cmor_config/APHRO-MA.yml new file mode 100644 index 0000000000..6d0b7552db --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/APHRO-MA.yml @@ -0,0 +1,42 @@ +--- +# Filename (will be extended by version) +filename: 'APHRO_MA{raw_file_var}_{version}.*.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: APHRO-MA + tier: 3 + modeling_realm: ground + project_id: OBS + source: + http://aphrodite.st.hirosaki-u.ac.jp/download/ + reference: '' + version: '' + +# Variables to cmorize +variables: + pr: + mip: day + raw: precip + raw_units: kg m-2 day-1 + calendar: standard + add_mon: true + raw_file_var: '' + version: + v1: '025deg_V1101' + v2: '050deg_V1101' + v3: '025deg_V1101_EXR1' + v4: '050deg_V1101_EXR1' + reference: 'aphro-ma-v1101' + tas: + mip: day + raw: tave + raw_fallback: ta + raw_units: degC + calendar: standard + add_mon: true + raw_file_var: '_TAVE' + version: + v1: '025deg_V1808' + v2: '050deg_V1808' + reference: 'aphro-ma-v1808' diff --git a/esmvaltool/cmorizers/data/cmor_config/BerkeleyEarth.yml b/esmvaltool/cmorizers/data/cmor_config/BerkeleyEarth.yml new file mode 100644 index 0000000000..f667700980 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/BerkeleyEarth.yml @@ -0,0 +1,31 @@ +--- +# Filename +filename: 'Land_and_Ocean_LatLong1.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: BerkeleyEarth + tier: 2 + modeling_realm: reanaly + project_id: OBS + institution: 'Berkeley Earth Surface Temperature Project' + source: 'http://berkeleyearth.org/data/' + source_url: 'http://berkeleyearth.lbl.gov/auto/Global/Gridded/Land_and_Ocean_LatLong1.nc' + version: '2020' + land_source_history: '13-Jan-2020 17:22:52' + ocean_source_history: '07-Jan-2020 10:46:06' + reference: 'berkeleyearth' + comment: '' + +# Variables to cmorize +variables: + tas: + mip: Amon + raw: temperature + raw_units: celsius + rawclim: climatology + short_anom: tasa + rawsftlf_varname: sftlf + rawsftlf: land_mask + rawsftlf_units: 1 + rawsftlf_mip: fx diff --git a/esmvaltool/cmorizers/data/cmor_config/CDS-SATELLITE-ALBEDO.yml b/esmvaltool/cmorizers/data/cmor_config/CDS-SATELLITE-ALBEDO.yml new file mode 100644 index 0000000000..fd999a7b90 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/CDS-SATELLITE-ALBEDO.yml @@ -0,0 +1,27 @@ +--- +# Global attributes of NetCDF file +attributes: + dataset_id: CDS-SATELLITE-ALBEDO + project_id: OBS6 + tier: 3 + version: 'V1' + modeling_realm: sat + source: 'https://cds.climate.copernicus.eu/cdsapp#!/dataset/satellite-albedo' + reference: 'cds-satellite-albedo' + comment: | + 'Surface albedo 10-daily gridded data from 1981 to present' + +# Variables to CMORize +variables: + bdalb: + mip: Lmon + raw: AL_DH_BB + file: 'c3s_ALBB-DH_????????000000_GLOBE_VGT_V1.0.1.nc' + bhalb: + mip: Lmon + raw: AL_BH_BB + file: 'c3s_ALBB-BH_????????000000_GLOBE_VGT_V1.0.1.nc' + +# Parameters +custom: + regrid_resolution: '0.25x0.25' diff --git a/esmvaltool/cmorizers/data/cmor_config/CDS-SATELLITE-LAI-FAPAR.yml b/esmvaltool/cmorizers/data/cmor_config/CDS-SATELLITE-LAI-FAPAR.yml new file mode 100644 index 0000000000..998dbe9944 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/CDS-SATELLITE-LAI-FAPAR.yml @@ -0,0 +1,27 @@ +--- +# Global attributes of NetCDF file +attributes: + dataset_id: CDS-SATELLITE-LAI-FAPAR + project_id: OBS + tier: 3 + version: 'V1' # Version as listed on source + modeling_realm: sat + source: 'https://cds.climate.copernicus.eu/cdsapp#!/dataset/satellite-lai-fapar' + reference: 'cds-satellite-lai-fapar' + comment: | + 'Leaf area index and fraction absorbed of photosynthetically active radiation 10-daily gridded data from 1998 to present' + +# Variables to CMORize +variables: + lai: + mip: Lmon + raw: LAI + file: 'c3s_LAI_*_GLOBE_VGT_V1.0.1.nc' + fapar: + mip: Lmon + raw: fAPAR + file: 'c3s_FAPAR_*_GLOBE_VGT_V1.0.1.nc' + +# Parameters +custom: + regrid_resolution: '0.25x0.25' diff --git a/esmvaltool/cmorizers/data/cmor_config/CDS-SATELLITE-SOIL-MOISTURE.ncl b/esmvaltool/cmorizers/data/cmor_config/CDS-SATELLITE-SOIL-MOISTURE.ncl new file mode 100644 index 0000000000..de60f8d011 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/CDS-SATELLITE-SOIL-MOISTURE.ncl @@ -0,0 +1,5 @@ +; Defines what to process (True/False) as +; [daily, monthly] x [combined, passive, active] +JOBS = (/(/True, True/), \ + (/True, True/), \ + (/True, True/)/) diff --git a/esmvaltool/cmorizers/data/cmor_config/CDS-UERRA.yml b/esmvaltool/cmorizers/data/cmor_config/CDS-UERRA.yml new file mode 100644 index 0000000000..dae4418bb2 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/CDS-UERRA.yml @@ -0,0 +1,23 @@ +--- + +# Common global attributes for Cmorizer output +attributes: + frequency: 6h + dataset_id: CDS-UERRA + version: 'UERRA-HARMONIE' + tier: 3 + modeling_realm: reanaly + project_id: OBS6 + source: 'https://cds.climate.copernicus.eu/cdsapp#!/dataset/reanalysis-uerra-europe-soil-levels?tab=form' + reference: 'cds-uerra' + comment: 'This dataset has been regridded for usage in ESMValTool' + +custom: + regrid: 0.25x0.25 + +# Variables to cmorize +variables: + sm: + mip: E6hr + raw: vsw + file: reanalysis-uerra-europe-soil-levels_{year}{month}.nc diff --git a/esmvaltool/cmorizers/data/cmor_config/CERES-EBAF.yml b/esmvaltool/cmorizers/data/cmor_config/CERES-EBAF.yml new file mode 100644 index 0000000000..60dde632d6 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/CERES-EBAF.yml @@ -0,0 +1,29 @@ +--- +# Filename +filename: 'CERES_EBAF-TOA_Ed4.1_Subset_200003-202203.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: CERES-EBAF + version: 'Ed4.1' + tier: 2 + modeling_realm: sat + project_id: OBS + source: 'https://ceres-tool.larc.nasa.gov/ord-tool/jsp/EBAF4Selection.jsp' + reference: 'ceres-ebaf' + comment: '' + +# Variables to cmorize +variables: + rsut: + mip: Amon + raw: toa_sw_all_mon + rsutcs: + mip: Amon + raw: toa_sw_clr_c_mon + rlut: + mip: Amon + raw: toa_lw_all_mon + rlutcs: + mip: Amon + raw: toa_lw_clr_c_mon diff --git a/esmvaltool/cmorizers/data/cmor_config/CMAP.yml b/esmvaltool/cmorizers/data/cmor_config/CMAP.yml new file mode 100644 index 0000000000..eef1861f08 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/CMAP.yml @@ -0,0 +1,21 @@ +--- +# Global attributes of NetCDF file +attributes: + dataset_id: CMAP + project_id: OBS6 + tier: 2 + version: "v1" + modeling_realm: reanaly + source: "https://psl.noaa.gov/data/gridded/data.cmap.html" + reference: "cmap" + comment: | + '' + +# Variables to CMORize +variables: + # monthly frequency + pr_month: + short_name: pr + mip: Amon + raw: precip + file: "precip.mon.mean.nc" diff --git a/esmvaltool/cmorizers/data/cmor_config/CRU.yml b/esmvaltool/cmorizers/data/cmor_config/CRU.yml new file mode 100644 index 0000000000..e57cc4e945 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/CRU.yml @@ -0,0 +1,46 @@ +--- +# Filename (will be extended by variable name) +# filename: 'cru_ts4.02.1901.2017.{raw_name}.dat.nc' +filename: 'cru_ts4.07.1901.2022.{raw_name}.dat.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: CRU + # version: TS4.02 + version: 'TS4.07' + tier: 2 + modeling_realm: reanaly + # project_id: OBS # v4.02 + project_id: OBS6 + # source: 'https://crudata.uea.ac.uk/cru/data/hrg/cru_ts_4.02/cruts.1811131722.v4.02/' + # source: 'https://crudata.uea.ac.uk/cru/data/hrg/cru_ts_4.06/cruts.2205201912.v4.06/' + source: 'https://crudata.uea.ac.uk/cru/data/hrg/cru_ts_4.07/cruts.2304141047.v4.07/' + reference: 'cru' + comment: '' + +# Variables to cmorize +variables: + tas: + mip: Amon + raw: tmp + raw_units: celsius + tasmin: + mip: Amon + raw: tmn + raw_units: celsius + tasmax: + mip: Amon + raw: tmx + raw_units: celsius + pr: + mip: Amon + raw: pre + raw_units: kg m-2 month-1 + evspsblpot: + mip: Emon + raw: pet + raw_units: kg m-2 day-1 + clt: + mip: Amon + raw: cld + raw_units: percent diff --git a/esmvaltool/cmorizers/data/cmor_config/CT2019.yml b/esmvaltool/cmorizers/data/cmor_config/CT2019.yml new file mode 100644 index 0000000000..62d64fb1c7 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/CT2019.yml @@ -0,0 +1,26 @@ +--- +# FTP server used for downloading the data +ftp_host: 'aftp.cmdl.noaa.gov' +data_dir: 'products/carbontracker/co2/CT2019/molefractions/co2_total_monthly/' + +# Pattern of input files +input_file_pattern: 'CT2019.molefrac_glb3x2_*.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: CT2019 + version: '2019' + tier: 2 + modeling_realm: reanaly + project_id: OBS6 + source: 'https://www.esrl.noaa.gov/gmd/ccgg/carbontracker/index.php' + reference: 'ct2019' + comment: '' + +# Variables to cmorize +variables: + co2s: + mip: Amon + add_aux_coords: + air_pressure: [0, 1, 2, 3] + raw_long_name: mole_fraction_of_carbon_dioxide_in_air diff --git a/esmvaltool/cmorizers/data/cmor_config/CowtanWay.yml b/esmvaltool/cmorizers/data/cmor_config/CowtanWay.yml new file mode 100644 index 0000000000..39c0c8f7c2 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/CowtanWay.yml @@ -0,0 +1,38 @@ +--- +# Filename (will be extended by variable name) +filename: '{version}_0_0.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: CowtanWay + version: + v1: 'had4_krig_v1' + v2: 'had4_uah_v1' + v3: 'had4_short_krig_v2' + v4: 'had4_short_uah_v2' + v5: 'ghcn_short_krig_v2' + v6: 'ghcn_short_uah_v2' + v7: 'had4sst4_krig_v2' + v8: 'had4_krig_v2' + baseline: + v1: '1981-2010' + v2: '1981-2010' + v3: '1981-2010' + v4: '1981-2010' + v5: '1981-2010' + v6: '1981-2010' + v7: '1961-1990' + v8: '1961-1990' + tier: 2 + modeling_realm: reanaly + project_id: OBS + source: 'https://www-users.york.ac.uk/~kdc3/papers/coverage2013/series.html' + reference: 'cowtanway' + comment: 'Temperature anomaly with respect to the period {baseline}.' + +# Variables to cmorize +variables: + tasa: + mip: Amon + raw: temperature_anomaly + short_name: tasa diff --git a/esmvaltool/cmorizers/data/cmor_config/Duveiller2018.yml b/esmvaltool/cmorizers/data/cmor_config/Duveiller2018.yml new file mode 100644 index 0000000000..b8ae5d6bbe --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/Duveiller2018.yml @@ -0,0 +1,23 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: Duveiller2018 + version: 'v2018' + tier: 2 + modeling_realm: clim + project_id: OBS + source: 'https://ndownloader.figshare.com/files/9969496' + reference: 'duveiller2018' + comment: '' + climatology_start: '2008-01-01T00:00:00Z' + climatology_end: '2012-12-31T23:59:59Z' + +# Variables to cmorize +variables: + albDiffiTr13: + mip: Amon + # Match CMOR variables with input file one + raw: Delta_albedo + # input file name + file: albedo_IGBPgen.nc + iTr: 13 diff --git a/esmvaltool/cmorizers/data/cmor_config/E-OBS.yml b/esmvaltool/cmorizers/data/cmor_config/E-OBS.yml new file mode 100644 index 0000000000..61eaca0a31 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/E-OBS.yml @@ -0,0 +1,45 @@ +--- +# Filename (will be extended by variable name, resolutions and version) +filename: '{raw_name}_ens_mean_{resolution}deg_reg_v{version}.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: E-OBS + version: '20.0e' + resolution: + r1: 0.1 + r2: 0.25 + tier: 2 + modeling_realm: ground + project_id: OBS + source: 'https://www.ecad.eu/download/ensembles/download.php' + reference: 'e-obs' + comment: '' + +# Variables to cmorize +variables: + tas: + mip: day + raw: tg + raw_units: celsius + add_mon: true + pr: + mip: day + raw: rr + raw_units: kg m-2 day-1 + add_mon: true + tasmax: + mip: day + raw: tx + raw_units: celsius + add_mon: true + tasmin: + mip: day + raw: tn + raw_units: celsius + add_mon: true + psl: + mip: day + raw: pp + raw_units: hPa + add_mon: true diff --git a/esmvaltool/cmorizers/data/cmor_config/ERA-Interim-Land.yml b/esmvaltool/cmorizers/data/cmor_config/ERA-Interim-Land.yml new file mode 100644 index 0000000000..cfd3afeebc --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/ERA-Interim-Land.yml @@ -0,0 +1,20 @@ +--- +# Global attributes of NetCDF file +attributes: + dataset_id: ERA-Interim-Land + project_id: OBS6 + tier: 3 + version: '1' + modeling_realm: reanaly + source: 'https://apps.ecmwf.int/datasets/data/interim-land/type=an/' + reference: 'era-interim-land' + comment: | + 'Contains modified Copernicus Climate Change Service Information {year}' + +# Variables to CMORize +variables: + sm_monthly: + short_name: sm + mip: Lmon + raw: swvl1 + file: 'ERA-Interim-Land_swvl1_6hourly_????.nc' diff --git a/esmvaltool/cmorizers/data/cmor_config/ERA-Interim.yml b/esmvaltool/cmorizers/data/cmor_config/ERA-Interim.yml new file mode 100644 index 0000000000..cfea7d8265 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/ERA-Interim.yml @@ -0,0 +1,342 @@ +--- +# Global attributes of NetCDF file +attributes: + dataset_id: ERA-Interim + project_id: OBS6 + tier: 3 + version: '1' + modeling_realm: reanaly + source: 'www.ecmwf.int/en/forecasts/datasets/reanalysis-datasets/era-interim' + reference: 'era-interim' + comment: | + 'Contains modified Copernicus Climate Change Service Information {year}' + +# Variables to CMORize +variables: + # time independent + sftlf: + mip: fx + raw: lsm + file: 'ERA-Interim_lsm.nc' + orog: + mip: fx + raw: z + file: 'ERA-Interim_z.nc' + # daily frequency + clt_day: + short_name: clt + mip: day + raw: tcc + file: 'ERA-Interim_tcc_daily_*.nc' + evspsbl_day: + short_name: evspsbl + mip: Eday + raw: e + file: 'ERA-Interim_e_daily_*.nc' + pr_day: + short_name: pr + mip: day + raw: tp + file: 'ERA-Interim_tp_daily_*.nc' + prsn_day: + short_name: prsn + mip: day + raw: sf + file: 'ERA-Interim_sf_daily_*.nc' + ps_day: + short_name: ps + mip: CFday + raw: sp + file: 'ERA-Interim_sp_daily_*.nc' + psl_day: + short_name: psl + mip: day + raw: msl + file: 'ERA-Interim_msl_daily_*.nc' + rlds_day: + short_name: rlds + mip: day + raw: strd + file: 'ERA-Interim_strd_daily_*.nc' + rsds_day: + short_name: rsds + mip: day + raw: ssrd + file: 'ERA-Interim_ssrd_daily_*.nc' + rsdt_day: + short_name: rsdt + mip: CFday + raw: tisr + file: 'ERA-Interim_tisr_daily_*.nc' + rss_day: + short_name: rss + mip: day + raw: ssr + file: 'ERA-Interim_ssr_daily_*.nc' + ta_day: + short_name: ta + mip: day + raw: t + file: 'ERA-Interim_t_daily_*.nc' + tas_day: + short_name: tas + mip: day + raw: t2m + file: 'ERA-Interim_t2m_daily_*.nc' + tasmax: + mip: day + raw: mx2t + file: 'ERA-Interim_mx2t_daily_*.nc' + tasmin: + mip: day + raw: mn2t + file: 'ERA-Interim_mn2t_daily_*.nc' + tdps_day: + short_name: tdps + mip: Eday + raw: d2m + file: 'ERA-Interim_d2m_daily_*.nc' + ts_day: + short_name: ts + mip: Eday + raw: skt + file: 'ERA-Interim_skt_daily_*.nc' + tsn_day: + short_name: tsn + mip: Eday + raw: tsn + file: 'ERA-Interim_tsn_daily_*.nc' + uas_day: + short_name: uas + mip: day + raw: u10 + file: 'ERA-Interim_u10_daily_*.nc' + va_day: + short_name: va + mip: day + raw: v + file: 'ERA-Interim_v_daily_*.nc' + vas_day: + short_name: vas + mip: day + raw: v10 + file: 'ERA-Interim_v10_daily_*.nc' + zg_day: + short_name: zg + mip: day + raw: z + file: 'ERA-Interim_z_daily_*.nc' + + # monthly frequency + clivi: + mip: Amon + raw: p57.162 + file: 'ERA-Interim_p57.162_monthly_*.nc' + clt_month: + short_name: clt + mip: Amon + raw: tcc + file: 'ERA-Interim_tcc_monthly_*.nc' + # clt-low, clt-med, clt-high: support to be added in the future +# clt-low: +# short_name: clt-low +# mip: Amon +# raw: lcc +# file: 'ERA-Interim_lcc_monthly_*.nc' +# clt-med: +# short_name: clt-med +# mip: Amon +# raw: mcc +# file: 'ERA-Interim_mcc_monthly_*.nc' +# clt-high: +# short_name: clt-high +# mip: Amon +# raw: hcc +# file: 'ERA-Interim_hcc_monthly_*.nc' + clwvi: + mip: Amon + raw: + - p56.162 + - p57.162 + operator: sum + files: + - 'ERA-Interim_p56.162_monthly_*.nc' + - 'ERA-Interim_p57.162_monthly_*.nc' + cl_month: + short_name: cl + mip: Amon + raw: cc + file: 'ERA-Interim_cc_monthly_*.nc' + cli_month: + short_name: cli + mip: Amon + raw: ciwc + file: 'ERA-Interim_ciwc_monthly_*.nc' + clw_month: + short_name: clw + mip: Amon + raw: clwc + file: 'ERA-Interim_clwc_monthly_*.nc' + evspsbl_month: + short_name: evspsbl + mip: Amon + raw: e + file: 'ERA-Interim_e_monthly_*.nc' + hfds: + mip: Omon + raw: + - ssr + - str + - slhf + - sshf + operator: sum + files: + - 'ERA-Interim_ssr_monthly_*.nc' + - 'ERA-Interim_str_monthly_*.nc' + - 'ERA-Interim_slhf_monthly_*.nc' + - 'ERA-Interim_sshf_monthly_*.nc' + hur: + mip: Amon + raw: r + file: 'ERA-Interim_r_monthly_*.nc' + hus: + mip: Amon + raw: q + file: 'ERA-Interim_q_monthly_*.nc' + lwp_month: + short_name: lwp + mip: Amon + raw: p56.162 + file: 'ERA-Interim_p56.162_monthly_*.nc' + pr_month: + short_name: pr + mip: Amon + raw: tp + file: 'ERA-Interim_tp_monthly_*.nc' + prw: + mip: Amon + raw: tcwv + file: 'ERA-Interim_tcwv_monthly_*.nc' + prsn_month: + short_name: prsn + mip: Amon + raw: sf + file: 'ERA-Interim_sf_monthly_*.nc' + ps_month: + short_name: ps + mip: Amon + raw: sp + file: 'ERA-Interim_sp_monthly_*.nc' + psl_month: + short_name: psl + mip: Amon + raw: msl + file: 'ERA-Interim_msl_monthly_*.nc' + rlut_month: + short_name: rlut + mip: Amon + raw: ttr + file: 'ERA-Interim_ttr_monthly_*.nc' + rlutcs_month: + short_name: rlutcs + mip: Amon + raw: ttrc + file: 'ERA-Interim_ttrc_monthly_*.nc' + rsds_month: + short_name: rsds + mip: Amon + raw: ssrd + file: 'ERA-Interim_ssrd_monthly_*.nc' + rsdt_month: + short_name: rsdt + mip: Amon + raw: tisr + file: 'ERA-Interim_tisr_monthly_*.nc' + rss_month: + short_name: rss + mip: Emon + raw: ssr + file: 'ERA-Interim_ssr_monthly_*.nc' + rsut_month: + short_name: rsut + mip: Amon + raw: + - tisr + - tsr + operator: diff + files: + - 'ERA-Interim_tisr_monthly_*.nc' + - 'ERA-Interim_tsr_monthly_*.nc' + rsutcs_month: + short_name: rsutcs + mip: Amon + raw: + - tisr + - tsrc + operator: diff + files: + - 'ERA-Interim_tisr_monthly_*.nc' + - 'ERA-Interim_tsrc_monthly_*.nc' + ta: + mip: Amon + raw: t + file: 'ERA-Interim_t_monthly_*.nc' + tas_month: + short_name: tas + mip: Amon + raw: t2m + file: 'ERA-Interim_t2m_monthly_*.nc' + tauu: + mip: Amon + raw: iews + file: 'ERA-Interim_iews_monthly_*.nc' + tauv: + mip: Amon + raw: inss + file: 'ERA-Interim_inss_monthly_*.nc' + tdps_month: + short_name: tdps + mip: Emon + raw: d2m + file: 'ERA-Interim_d2m_monthly_*.nc' + tos: + mip: Omon + raw: sst + file: 'ERA-Interim_sst_monthly_*.nc' + ts_month: + short_name: ts + mip: Amon + raw: skt + file: 'ERA-Interim_skt_monthly_*.nc' + tsn_month: + short_name: tsn + mip: LImon + raw: tsn + file: 'ERA-Interim_tsn_monthly_*.nc' + ua: + mip: Amon + raw: u + file: 'ERA-Interim_u_monthly_*.nc' + uas_month: + short_name: uas + mip: Amon + raw: u10 + file: 'ERA-Interim_u10_monthly_*.nc' + va: + mip: Amon + raw: v + file: 'ERA-Interim_v_monthly_*.nc' + vas_month: + short_name: vas + mip: Amon + raw: v10 + file: 'ERA-Interim_v10_monthly_*.nc' + wap: + mip: Amon + raw: w + file: 'ERA-Interim_w_monthly_*.nc' + zg_month: + short_name: zg + mip: Amon + raw: z + file: 'ERA-Interim_z_monthly_*.nc' diff --git a/esmvaltool/cmorizers/data/cmor_config/ESACCI-LANDCOVER.yml b/esmvaltool/cmorizers/data/cmor_config/ESACCI-LANDCOVER.yml new file mode 100644 index 0000000000..925057dc12 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/ESACCI-LANDCOVER.yml @@ -0,0 +1,32 @@ +attributes: + project_id: 'OBS' + dataset_id: 'ESACCI-LANDCOVER' + tier: 2 + modeling_realm: sat + institution: 'Universite catholique de Louvain' + reference: 'esacci-landcover' + source: 'ftp://anon-ftp.ceda.ac.uk/neodc/esacci/land_cover/data/pft' + title: 'ESA CCI Land Cover' + version: 'v2.0.8' + comment: '' +filename: ESACCI-LC-L4-PFT-Map-300m-P1Y-{year}-v2.0.8.nc +variables: + baresoilFrac: + mip: Lmon + long_name: 'BARE' + frequency: yr + cropFrac: + mip: Lmon + long_name: 'GRASS-MAN' + frequency: yr + grassFrac: + mip: Lmon + long_name: 'GRASS-NAT' + frequency: yr + shrubFrac: + mip: Lmon + frequency: yr + treeFrac: + mip: Lmon + frequency: yr + diff --git a/esmvaltool/cmorizers/data/cmor_config/ESACCI-LST.yml b/esmvaltool/cmorizers/data/cmor_config/ESACCI-LST.yml new file mode 100644 index 0000000000..b15ce08be0 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/ESACCI-LST.yml @@ -0,0 +1,31 @@ +# CMORIZE ESA CCI LST +# Follwing CRU and ESACCI-OC as examples +# Only looking at AQUA MONTHLY data but python has placeholders for different platforms +--- + + +# Common global attributes for Cmorizer output +attributes: + dataset_id: ESACCI-LST + version: '1.00' + tier: 2 + project_id: OBS + source: 'ESA CCI' + modeling_realm: sat + reference: 'esacci_lst' + comment: '' + start_year: 2003 + end_year: 2018 + +# Variables to cmorize +# These go into the vals dictionary in the python script +variables: + ts: + mip: Amon + raw: land surface temperature + raw_units: kelvin + file_day: 'ESACCI-LST-L3C-LST-MODISA-0.05deg_1MONTHLY_DAY-' + file_night: 'ESACCI-LST-L3C-LST-MODISA-0.05deg_1MONTHLY_NIGHT-' + # plan to make it possible to change MODISA here but only placeholders in the python there for now + # Period covered: 2003-2018 + # 2003 Only has data from July onwards for Aqua MODIS diff --git a/esmvaltool/cmorizers/data/cmor_config/ESACCI-OC.yml b/esmvaltool/cmorizers/data/cmor_config/ESACCI-OC.yml new file mode 100644 index 0000000000..01a9739cfb --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/ESACCI-OC.yml @@ -0,0 +1,23 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: ESACCI-OC + version: 'fv5.0' + tier: 2 + modeling_realm: sat + project_id: OBS6 + source: 'ftp://oc-cci-data:ELaiWai8ae@oceancolour.org/occci-v5.0/geographic/netcdf/monthly/chlor_a/' + reference: "esacci-oc" + comment: "" + +# Variables to cmorize (here use only filename prefix) +variables: + chl: + mip: Omon + raw: chlor_a + file: ESACCI-OC-L3S-CHLOR_A-MERGED-1M_MONTHLY_4km_GEO_PML_OCx + +# Custom dictionary for this cmorizer +custom: + # Rebin original data (4km) averaging at lower resolution (multiple of 2 accepted) + bin_size: 6 diff --git a/esmvaltool/cmorizers/data/cmor_config/ESACCI-SEA-SURFACE-SALINITY.yml b/esmvaltool/cmorizers/data/cmor_config/ESACCI-SEA-SURFACE-SALINITY.yml new file mode 100644 index 0000000000..73c354cfb2 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/ESACCI-SEA-SURFACE-SALINITY.yml @@ -0,0 +1,17 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: ESACCI-SEA-SURFACE-SALINITY + versions: ['fv1.8', 'fv2.31'] + tier: 2 + modeling_realm: reanaly + project_id: OBS6 + source: 'ftp://anon-ftp.ceda.ac.uk/neodc/esacci/sea_surface_salinity' + reference: "esacci-sea-surface-salinity" + comment: "" + +# Variables to cmorize (here use only filename prefix) +variables: + sos: + mip: Omon + raw: sss diff --git a/esmvaltool/cmorizers/data/cmor_config/ESACCI-SOILMOISTURE.yml b/esmvaltool/cmorizers/data/cmor_config/ESACCI-SOILMOISTURE.yml new file mode 100644 index 0000000000..f2b7a1053d --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/ESACCI-SOILMOISTURE.yml @@ -0,0 +1,21 @@ +attributes: + project_id: 'OBS' + dataset_id: 'ESACCI-SOILMOISTURE' + tier: 2 + modeling_realm: sat + institution: 'TU Wien (AUT); VanderSat B.V. (NL); Planet Labs (NL); CESBIO (FR), EODC Gmbh (AUT)' + reference: 'esacci-soilmoisture' + source: 'ftp://anon-ftp.ceda.ac.uk/neodc/esacci/soil_moisture/data/' + title: 'ESA CCI Soil Moisture' + version: 'L3S-SSMV-COMBINED-v08.1' + comment: '' +variables: + sm: + mip: Eday + raw: sm + filename: ESACCI-SOILMOISTURE-L3S-SSMV-COMBINED-{year}????000000-fv08.1.nc + smStderr: + mip: Eday + raw: sm_uncertainty + filename: ESACCI-SOILMOISTURE-L3S-SSMV-COMBINED-{year}????000000-fv08.1.nc + \ No newline at end of file diff --git a/esmvaltool/cmorizers/data/cmor_config/ESACCI-SST.yml b/esmvaltool/cmorizers/data/cmor_config/ESACCI-SST.yml new file mode 100644 index 0000000000..49d4731681 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/ESACCI-SST.yml @@ -0,0 +1,29 @@ +--- +# Common global attributes for Cmorizer output +filename: '{year}{month}15_regridded_sst.nc' +attributes: + dataset_id: ESACCI-SST + version: '2.2' + tier: 2 + modeling_realm: sat + project_id: OBS + source: 'http://surftemp.net/regridding/index.html' + reference: ["esacci-sst", "esacci-sst-bias-correction"] + comment: "Note that the variable tsStderr is an uncertainty not a standard error." + +# Variables to cmorize (here use only filename prefix) +variables: + ts: + mip: Amon + raw: sst + file: ESACCI-SST_sat_L4-GHRSST-SSTdepth-OSTIA-GLOB + tsStderr: + mip: Amon + raw: sst_uncertainty + file: ESACCI-SST_sat_L4-GHRSST-SSTdepth-OSTIA-GLOB + +# uncomment this part to produce sst cmorized data for ocean realm (Omon, tos) +# tos: +# mip: Omon +# raw: sst +# file: ESACCI-SST_sat_L4-GHRSST-SSTdepth-OSTIA-GLOB diff --git a/esmvaltool/cmorizers/data/cmor_config/ESACCI-WATERVAPOUR.yml b/esmvaltool/cmorizers/data/cmor_config/ESACCI-WATERVAPOUR.yml new file mode 100644 index 0000000000..eda705c000 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/ESACCI-WATERVAPOUR.yml @@ -0,0 +1,35 @@ +--- +# Common global attributes for Cmorizer output + +attributes: + dataset_id: ESACCI-WATERVAPOUR + version: CDR2-L3-COMBI-05deg-fv3.1 + tier: 3 + modeling_realm: sat + project_id: OBS6 + source: https://wui.cmsaf.eu/safira/action/viewDoiDetails?acronym=COMBI_V001 + reference: ["esacci-watervapour"] + comment: "This CMORizer is for the CDR2 version of dataset (global coverage). + Download is possible after registration and ordering of the data." + +# Variables to cmorize (here use only filename prefix) +variables: +# monthly frequency + prw_mon: + short_name: prw + mip: Amon + raw: tcwv + frequency: mon + filename: HTWmm{year}*000000313WVCCI01GL.nc + start_year: 2002 + end_year: 2017 + +# daily frequency + prw_day: + short_name: prw + mip: Eday + raw: tcwv + frequency: day + filename: HTWdm{year}*000000313WVCCI01GL.nc + start_year: 2002 + end_year: 2017 diff --git a/esmvaltool/cmorizers/data/cmor_config/ESDC.yml b/esmvaltool/cmorizers/data/cmor_config/ESDC.yml new file mode 100644 index 0000000000..7fc4a77ff7 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/ESDC.yml @@ -0,0 +1,26 @@ +--- +filename: 'esdc-8d-{grid}-{chunking}-{version}.zarr' + +attributes: + project_id: OBS6 + dataset_id: ESDC + version: 3.0.1 + tier: 2 + grid: 0.25deg + chunking: 1x720x1440 + # chunking: 256x128x128 + modeling_realm: reanaly + source: http://data.rsc4earth.de/EarthSystemDataCube/ + reference: 'esdc' + comment: '' + +variables: + tas: + mip: Amon + raw: air_temperature_2m + tasmax: + mip: Amon + raw: max_air_temperature_2m + tasmin: + mip: Amon + raw: min_air_temperature_2m diff --git a/esmvaltool/cmorizers/data/cmor_config/ESRL.yml b/esmvaltool/cmorizers/data/cmor_config/ESRL.yml new file mode 100644 index 0000000000..6ffa6ca2c0 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/ESRL.yml @@ -0,0 +1,28 @@ +--- +# FTP server used for downloading the data +download: true +ftp_host: 'aftp.cmdl.noaa.gov' +data_dir: '/data/trace_gases/co2/' + +# Pattern of input file +input_filename_pattern: 'co2_*_surface-*_1_ccgg_*.txt' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: ESRL + tier: 2 + modeling_realm: ground + project_id: OBS + source: 'https://www.esrl.noaa.gov/gmd/dv/data/' + reference: 'esrl' + +# Variables to cmorize +variables: + co2s: + mip: Amon + # Supports NOAA ESRL baseline observatories: 'mlo', 'brw', 'spo', smo' + # Supports stations, see list at + # https://www.esrl.noaa.gov/gmd/dv/site/?program=ccgg + # Also supports NOAA ESRL global data, use key 'global' + # Using key 'all' all of the available data from all 3 types are formatted + stations: ['all'] diff --git a/esmvaltool/cmorizers/data/cmor_config/Eppley-VGPM-MODIS.yml b/esmvaltool/cmorizers/data/cmor_config/Eppley-VGPM-MODIS.yml new file mode 100644 index 0000000000..bc0d452879 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/Eppley-VGPM-MODIS.yml @@ -0,0 +1,18 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: Eppley-VGPM-MODIS + version: 'R2018' + tier: 2 + modeling_realm: sat + project_id: OBS + source: 'http://orca.science.oregonstate.edu/data/1x2/monthly/eppley.r2018.m.chl.m.sst/hdf/' + reference: "eppley-vgpm-modis" + comment: "" + +# Variables to cmorize (here use only filename prefix) +variables: + intpp: + mip: Omon + raw: npp + file: eppley diff --git a/esmvaltool/cmorizers/data/cmor_config/FLUXCOM.yml b/esmvaltool/cmorizers/data/cmor_config/FLUXCOM.yml new file mode 100644 index 0000000000..69d51bd79e --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/FLUXCOM.yml @@ -0,0 +1,19 @@ +--- +# Filename +filename: 'GPP.ANN.CRUNCEPv6.monthly.*.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: FLUXCOM + version: 'ANN-v1' + tier: 3 + modeling_realm: reanaly + project_id: OBS + source: 'http://www.bgc-jena.mpg.de/geodb/BGI/Home' + reference: 'fluxcom' + comment: '' + +# Variables to cmorize +variables: + gpp: + mip: Lmon diff --git a/esmvaltool/cmorizers/data/cmor_config/GCP2018.yml b/esmvaltool/cmorizers/data/cmor_config/GCP2018.yml new file mode 100644 index 0000000000..00849bcab4 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/GCP2018.yml @@ -0,0 +1,30 @@ +--- +# Filename +filename: 'Global_Carbon_Budget_2018v1.0.xlsx' + +# Header line number in the xlsx file +header_line: 18 + +# Common global attributes for Cmorizer output +attributes: + dataset_id: GCP2018 + version: '1.0' + tier: 2 + modeling_realm: reanaly + project_id: OBS + source: 'https://www.icos-cp.eu/GCP/2018' + reference: gcp2018 + comment: '' + +# Variables to cmorize +variables: + nbp: + mip: Lmon + area: 148300000000000.0 + positive: down + comment: 'The original unit of this dataset is GtC/yr. To be CMOR-compliant, the units are given relative to the land surface area of the Earth (given by the attribute area in m2 in this file).' + fgco2: + mip: Omon + area: 360000000000000.0 + positive: down + comment: 'The original unit of this dataset is GtC/yr. To be CMOR-compliant, the units are given relative to the ocean surface area of the Earth (given by the attribute area in m2 in this file).' diff --git a/esmvaltool/cmorizers/data/cmor_config/GCP2020.yml b/esmvaltool/cmorizers/data/cmor_config/GCP2020.yml new file mode 100644 index 0000000000..f955a7c36f --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/GCP2020.yml @@ -0,0 +1,37 @@ +--- +# Filename +filename: 'Global_Carbon_Budget_2020v1.0.xlsx' + +# Header line number in the xlsx file +header_line: 19 + +# Common global attributes for Cmorizer output +attributes: + dataset_id: GCP2020 + version: '1.0' + tier: 2 + modeling_realm: reanaly + project_id: OBS + source: 'https://www.icos-cp.eu/science-and-impact/global-carbon-budget/2020' + reference: gcp2020 + comment: '' + +# Variables to cmorize +variables: + nbp: + mip: Lmon + area: 148300000000000.0 + positive: down + comment: 'The original unit of this dataset is GtC/yr. To be CMOR-compliant, the units are given relative to the land surface area of the Earth (given by the attribute area in m2 in this file).' + nbp_residual: + short_name: nbp + mip: Lmon + area: 148300000000000.0 + positive: down + version_suffix: residual + comment: 'The original unit of this dataset is GtC/yr. To be CMOR-compliant, the units are given relative to the land surface area of the Earth (given by the attribute area in m2 in this file). This version (with suffix _residual) of the dataset does not use the the output of the dynamic global vegetation models (DGVMs) to estimate the atmosphere-land carbon flux S_Land (which is then used to calculate nbp as nbp = S_Land - landuse change emissions), but calculates this as the residuals from other carbon fluxes, i.e., S_Land = Emissions - atmospheric growth - S_Ocean.' + fgco2: + mip: Omon + area: 360000000000000.0 + positive: down + comment: 'The original unit of this dataset is GtC/yr. To be CMOR-compliant, the units are given relative to the ocean surface area of the Earth (given by the attribute area in m2 in this file).' diff --git a/esmvaltool/cmorizers/data/cmor_config/GHCN-CAMS.yml b/esmvaltool/cmorizers/data/cmor_config/GHCN-CAMS.yml new file mode 100644 index 0000000000..db6d10372a --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/GHCN-CAMS.yml @@ -0,0 +1,21 @@ +--- +# Filename +filename: 'air.mon.mean.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: GHCN-CAMS + version: '1' + tier: 2 + modeling_realm: ground + project_id: OBS + source: 'https://www.esrl.noaa.gov/psd/data/gridded/data.ghcncams.html' + reference: 'ghcn-cams' + comment: '' + +# Variables to cmorize +variables: + tas: + mip: Amon + raw: air + raw_units: K diff --git a/esmvaltool/cmorizers/data/cmor_config/GISTEMP.yml b/esmvaltool/cmorizers/data/cmor_config/GISTEMP.yml new file mode 100644 index 0000000000..afe6149cf0 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/GISTEMP.yml @@ -0,0 +1,20 @@ +--- +# Filename (will be extended by variable name) +filename: 'gistemp250_GHCNv4.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: GISTEMP + version: 'v4' + tier: 2 + modeling_realm: ground + project_id: OBS + source: 'https://data.giss.nasa.gov/gistemp/' + reference: 'gistemp' + comment: '' + +# Variables to cmorize +variables: + tasa: + mip: Amon + raw: tempanomaly diff --git a/esmvaltool/cmorizers/data/cmor_config/GLODAP.yml b/esmvaltool/cmorizers/data/cmor_config/GLODAP.yml new file mode 100644 index 0000000000..e2a687e3c9 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/GLODAP.yml @@ -0,0 +1,31 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: GLODAP + version: 'v2.2016b' + tier: 2 + modeling_realm: clim + project_id: OBS6 + source: 'https://www.ncei.noaa.gov/data/oceans/ncei/ocads/data/0162565/mapped/GLODAPv2.2016b_MappedClimatologies.tar.gz' + reference: 'glodap' + comment: '' + +# Variables to cmorize (here use only filename prefix) +# pHtsinsitutp: seawater ph reported on total scale at in situ temperature and pressure +variables: + dissic: + mip: Oyr + raw_var: TCO2 + file: GLODAPv2.2016b + talk: + mip: Oyr + raw_var: TAlk + file: GLODAPv2.2016b + ph: + mip: Oyr + raw_var: pHtsinsitutp + file: GLODAPv2.2016b + +# Custom dictionary for this cmorizer +custom: + reference_year: 2000 diff --git a/esmvaltool/cmorizers/data/cmor_config/GPCC.yml b/esmvaltool/cmorizers/data/cmor_config/GPCC.yml new file mode 100644 index 0000000000..f080cd14bd --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/GPCC.yml @@ -0,0 +1,27 @@ +--- +# Filename (will be extended by version) +filename: 'full_data_monthly_{version}.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: GPCC + version: + v1: 'v2018_25' + v2: 'v2018_10' + v3: 'v2018_05' + v4: 'v2018_025' + tier: 2 + modeling_realm: reanaly + project_id: OBS + source: 'https://opendata.dwd.de/climate_environment/GPCC/html/fulldata-monthly_v2018_doi_download.html' + reference: 'gpcc' + comment: '' + +# Variables to cmorize +variables: + pr: + mip: Amon + raw: precip + raw_units: kg m-2 month-1 + calendar: gregorian + constraint: numgauge diff --git a/esmvaltool/cmorizers/data/cmor_config/GPCP-SG.yml b/esmvaltool/cmorizers/data/cmor_config/GPCP-SG.yml new file mode 100644 index 0000000000..1cd1189032 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/GPCP-SG.yml @@ -0,0 +1,18 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: GPCP-SG + version: '2.3' + tier: 2 + modeling_realm: atmos + project_id: OBS + source: 'https://downloads.psl.noaa.gov/Datasets/gpcp/precip.mon.mean.nc' + reference: 'gpcp-sg' + +# Variables to cmorize +variables: + pr: + filename: precip.mon.mean.nc + mip: Amon + raw_name: precip + raw_units: 'mm/day' diff --git a/esmvaltool/cmorizers/data/cmor_config/GRACE.yml b/esmvaltool/cmorizers/data/cmor_config/GRACE.yml new file mode 100644 index 0000000000..cca06799d0 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/GRACE.yml @@ -0,0 +1,28 @@ +--- +# Global attributes of NetCDF file +attributes: + dataset_id: GRACE + project_id: OBS6 + tier: 3 + version: '1' + modeling_realm: sat + source: 'https://podaac.jpl.nasa.gov/dataset/TELLUS_GRAC-GRFO_MASCON_CRI_GRID_RL06_V2' + reference: 'grace' + comment: | + 'Grace' + +# Variables to CMORize +variables: + lweGrace: + mip: Lmon + raw: lwe_thickness + file: 'GRCTellus.JPL.200204_202108.GLO.RL06M.MSCNv02CRI.nc' + +auxfiles: + land_mask: + 'LAND_MASK.CRI.nc' + scale_factor: + 'CLM4.SCALE_FACTOR.JPL.MSCNv02CRI.nc' + +grace_table: + 'GRACE_GRACE-FO_Months_RL06.csv' diff --git a/esmvaltool/cmorizers/data/cmor_config/HWSD.yml b/esmvaltool/cmorizers/data/cmor_config/HWSD.yml new file mode 100644 index 0000000000..566438ee71 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/HWSD.yml @@ -0,0 +1,26 @@ +--- +# Filename (will be extended by variable name) +filename: 'HWSD_SOIL_CLM_RES.nc4' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: HWSD + version: '1.2' + tier: 3 + modeling_realm: reanaly + project_id: OBS + source: 'https://daac.ornl.gov/cgi-bin/dsviewer.pl?ds_id=1247' + reference: 'hwsd' + comment: '' + +# Variables to cmorize +variables: + cSoil: + mip: Lmon + raw: AWT_SOC + areacella: + mip: fx + raw: areaupsc + sftlf: + mip: fx + raw: landfrac diff --git a/esmvaltool/cmorizers/data/cmor_config/HadCRUT5.yml b/esmvaltool/cmorizers/data/cmor_config/HadCRUT5.yml new file mode 100644 index 0000000000..8a5faf8c0d --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/HadCRUT5.yml @@ -0,0 +1,33 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: HadCRUT5 + version: '5.0.1.0' + tier: 2 + modeling_realm: ground + project_id: OBS + source: 'https://crudata.uea.ac.uk/cru/data/temperature/' + reference: 'hadcrut5' + comment: '2 versions, statistical infilling and no-infilling' + +climatology: + filename: 'absolute_v5.nc' + +filenames: + analysis: + 'HadCRUT.5.0.1.0.analysis.anomalies.ensemble_mean.nc' + noninfilled: + 'HadCRUT.5.0.1.0.anomalies.ensemble_mean.nc' + +# Variables to cmorize +variables: + tas: + mip: Amon + raw: tas_mean + raw_units: K + raw_clim: tem + raw_clim_units: celsius + tasa: + mip: Amon + raw: tas_mean + raw_units: K diff --git a/esmvaltool/cmorizers/data/cmor_config/JMA-TRANSCOM.yml b/esmvaltool/cmorizers/data/cmor_config/JMA-TRANSCOM.yml new file mode 100644 index 0000000000..b9796b315f --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/JMA-TRANSCOM.yml @@ -0,0 +1,24 @@ +--- +# Filename +filename: 'jma_2018.tar.gz' +binary_prefix: 'jma_2018' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: JMA-TRANSCOM + version: '2018' + tier: 3 + modeling_realm: reanaly + project_id: OBS + source: 'http://www.globalcarbonatlas.org/en/content/atmospheric-inversions' + reference: 'jma-transcom' + comment: '' + +# Variables to cmorize +variables: + nbp: + mip: Lmon + positive: down + fgco2: + mip: Omon + positive: down diff --git a/esmvaltool/cmorizers/data/cmor_config/JRA-25.yml b/esmvaltool/cmorizers/data/cmor_config/JRA-25.yml new file mode 100644 index 0000000000..94b1a480da --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/JRA-25.yml @@ -0,0 +1,50 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: JRA-25 + version: '1' + tier: 2 + modeling_realm: reanaly + project_id: OBS6 + source: 'https://esgf.nccs.nasa.gov/thredds/fileServer/CREATE-IP/reanalysis/JMA/JRA-25/JRA-25/' + reference: 'jra_25' + comment: | + '' + +# Variables to cmorize +variables: + clt_month: + short_name: clt + mip: Amon + raw: clt + file: clt_Amon_reanalysis_JRA-25_197901-201312.nc + hus_month: + short_name: hus + mip: Amon + raw: hus + file: hus_Amon_reanalysis_JRA-25_197901-201312.nc + prw_month: + short_name: prw + mip: Amon + raw: prw + file: prw_Amon_reanalysis_JRA-25_197901-201312.nc + rlut_month: + short_name: rlut + mip: Amon + raw: rlut + file: rlut_Amon_reanalysis_JRA-25_197901-201312.nc + rlutcs_month: + short_name: rlutcs + mip: Amon + raw: rlutcs + file: rlutcs_Amon_reanalysis_JRA-25_197901-201312.nc + rsut_month: + short_name: rsut + mip: Amon + raw: rsut + file: rsut_Amon_reanalysis_JRA-25_197901-201312.nc + rsutcs_month: + short_name: rsutcs + mip: Amon + raw: rsutcs + file: rsutcs_Amon_reanalysis_JRA-25_197901-201312.nc diff --git a/esmvaltool/cmorizers/data/cmor_config/JRA-55.yml b/esmvaltool/cmorizers/data/cmor_config/JRA-55.yml new file mode 100644 index 0000000000..a4f4c8b379 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/JRA-55.yml @@ -0,0 +1,103 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: JRA-55 + version: '1' + tier: 2 + modeling_realm: reanaly + project_id: OBS6 + source: 'https://rda.ucar.edu/datasets/ds628.1/' + reference: 'jra_55' + comment: | + '' + +# Variables to cmorize +variables: + cli: + short_name: cli + mip: Amon + file: fcst_p125.229_ciwc.{year}01_{year}12.grb + + clivi: + short_name: clivi + mip: Amon + file: fcst_column125.058_cice.{year}01_{year}12.grb + + clw: + short_name: clw + mip: Amon + file: fcst_p125.228_clwc.{year}01_{year}12.grb + + clwvi: + short_name: clwvi + mip: Amon + operator: sum + files: + - 'fcst_column125.058_cice.{year}01_{year}12.grb' + - 'fcst_column125.227_cw.{year}01_{year}12.grb' + + clt: + short_name: clt + mip: Amon + file: fcst_surf125.071_tcdc.{year}01_{year}12.grb + + prw: + short_name: prw + mip: Amon + file: fcst_column125.054_pwat.{year}01_{year}12.grb + + rlus: + short_name: rlus + mip: Amon + typeOfLevel: surface + file: fcst_phy2m125.212_ulwrf.{year}01_{year}12.grb + + rlut: + short_name: rlut + mip: Amon + typeOfLevel: nominalTop + file: fcst_phy2m125.212_ulwrf.{year}01_{year}12.grb + + rlutcs: + short_name: rlutcs + mip: Amon + file: fcst_phy2m125.162_csulf.{year}01_{year}12.grb + + rsus: + short_name: rsus + mip: Amon + typeOfLevel: surface + file: fcst_phy2m125.211_uswrf.{year}01_{year}12.grb + + rsuscs: + short_name: rsuscs + mip: Amon + typeOfLevel: surface + file: fcst_phy2m125.160_csusf.{year}01_{year}12.grb + + rsut: + short_name: rsut + mip: Amon + typeOfLevel: nominalTop + file: fcst_phy2m125.211_uswrf.{year}01_{year}12.grb + + rsutcs: + short_name: rsutcs + mip: Amon + typeOfLevel: nominalTop + file: fcst_phy2m125.160_csusf.{year}01_{year}12.grb + + ta: + short_name: ta + mip: Amon + file: anl_p125.011_tmp.{year}01_{year}12.grb + + tas: + short_name: tas + mip: Amon + file: anl_surf125.011_tmp.{year}01_{year}12.grb + + wap: + short_name: wap + mip: Amon + file: anl_p125.039_vvel.{year}01_{year}12.grb diff --git a/esmvaltool/cmorizers/data/cmor_config/Kadow2020.yml b/esmvaltool/cmorizers/data/cmor_config/Kadow2020.yml new file mode 100644 index 0000000000..3d62141e9d --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/Kadow2020.yml @@ -0,0 +1,24 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: Kadow2020 + version: '5.0.1.0' + tier: 2 + modeling_realm: ground + project_id: OBS + source: 'http://users.met.fu-berlin.de/~ChristopherKadow/' + reference: 'kadow20natgeosc' + comment: | + 'Temperature anomaly with respect to the period 1961-1990, + ensemble mean, infilled by AI' + +filenames: + 5.0.1.0: + 'HadCRUT.5.0.1.0.anomalies.Kadow_et_al_2020_20crAI-infilled.ensemble_mean_185001-202012.nc' + +# Variables to cmorize +variables: + tasa: + mip: Amon + raw: tas + raw_units: K diff --git a/esmvaltool/cmorizers/data/cmor_config/LAI3g.yml b/esmvaltool/cmorizers/data/cmor_config/LAI3g.yml new file mode 100644 index 0000000000..bb099a911a --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/LAI3g.yml @@ -0,0 +1,28 @@ +--- +# Filename +filename: 'AVHRR-LAI3gV1-1981-2018.zip' +binary_prefix: 'AVHRR_BULAI3g_v1_' + +# Regrid to coarser resolution +# This might be necessary since the original dataset has a very fine grid and +# processing it with ESMValTool can take a very long time (> 30 min). To use +# the original resolution (1/12°), simply remove the following section. +regrid: + target_grid: 1x1 + scheme: linear + +# Common global attributes for Cmorizer output +attributes: + dataset_id: LAI3g + version: '1_regridded' + tier: 3 + modeling_realm: reanaly + project_id: OBS + source: 'http://cliveg.bu.edu/modismisr/lai3g-fpar3g.html' + reference: 'lai3g' + comment: '' + +# Variables to cmorize +variables: + lai: + mip: Lmon diff --git a/esmvaltool/cmorizers/data/cmor_config/LandFlux-EVAL.yml b/esmvaltool/cmorizers/data/cmor_config/LandFlux-EVAL.yml new file mode 100644 index 0000000000..35bcc2301d --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/LandFlux-EVAL.yml @@ -0,0 +1,23 @@ +--- +# Filename +filename: 'LandFluxEVAL.merged.89-05.monthly.all.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: LandFlux-EVAL + version: 'Oct13' + tier: 3 + modeling_realm: reanaly + project_id: OBS + source: 'https://data.iac.ethz.ch/landflux/' + reference: 'landflux-eval' + comment: '' + +# Variables to cmorize +variables: + et: + mip: Lmon + raw: ET_mean + etStderr: + mip: Lmon + raw: ET_sd diff --git a/esmvaltool/cmorizers/data/cmor_config/Landschuetzer2016.yml b/esmvaltool/cmorizers/data/cmor_config/Landschuetzer2016.yml new file mode 100644 index 0000000000..8515351d31 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/Landschuetzer2016.yml @@ -0,0 +1,28 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: Landschuetzer2016 + version: 'v2016' + tier: 2 + modeling_realm: clim + project_id: OBS + source: 'https://www.nodc.noaa.gov/archive/arc0105/0160558/3.3/data/0-data/' + reference: 'landschuetzer2016' + comment: '' + +# Variables to cmorize +variables: + fgco2: + mip: Omon + # Match CMOR variables with input file one + raw: fgco2_smoothed + # input file name + file: spco2_1982-2015_MPI_SOM-FFN_v2016.nc + spco2: + mip: Omon + raw: spco2_smoothed + file: spco2_1982-2015_MPI_SOM-FFN_v2016.nc + dpco2: + mip: Omon + raw: dco2_smoothed + file: spco2_1982-2015_MPI_SOM-FFN_v2016.nc diff --git a/esmvaltool/cmorizers/data/cmor_config/Landschuetzer2020.yml b/esmvaltool/cmorizers/data/cmor_config/Landschuetzer2020.yml new file mode 100644 index 0000000000..8f52ef0403 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/Landschuetzer2020.yml @@ -0,0 +1,19 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: Landschuetzer2020 + version: '1.1' + tier: 2 + modeling_realm: clim + project_id: OBS6 + source: 'https://www.ncei.noaa.gov/data/oceans/ncei/ocads/data/0209633/' + reference: 'landschuetzer2020' + comment: '' + +# Variables to cmorize +variables: + spco2: + filename: MPI-ULB-SOM_FFN_clim.nc + mip: Omon + raw_name: pco2 + raw_units: '1e-6 atm' diff --git a/esmvaltool/cmorizers/data/cmor_config/MERRA2.yml b/esmvaltool/cmorizers/data/cmor_config/MERRA2.yml new file mode 100644 index 0000000000..564b460d04 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/MERRA2.yml @@ -0,0 +1,243 @@ +--- +# Global attributes of NetCDF file +attributes: + dataset_id: MERRA2 + project_id: OBS6 + tier: 3 + version: '5.12.4' + modeling_realm: reanaly + source: '' + reference: 'merra2' + comment: | + 'Contains modified MERRA-2 data' + +# Variables to CMORize +variables: + # monthly frequency + sm_monthly: + short_name: sm + mip: Lmon + raw: SFMC + file: 'MERRA2_???.tavgM_2d_lnd_Nx.{year}??.nc4' + # monthly frequency + # 2 dimensional variables + clt_monthly: + short_name: clt + mip: Amon + raw: CLDTOT + file: 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + pr_monthly: + short_name: pr + mip: Amon + raw: PRECTOT + file: 'MERRA2_???.tavgM_2d_flx_Nx.{year}??.nc4' + evspsbl_monthly: + short_name: evspsbl + mip: Amon + raw: EVAP + file: 'MERRA2_???.tavgM_2d_flx_Nx.{year}??.nc4' + hfss_monthly: + short_name: hfss + mip: Amon + raw: HFLUX + file: 'MERRA2_???.tavgM_2d_flx_Nx.{year}??.nc4' + hfls_monthly: + short_name: hfls + mip: Amon + raw: EFLUX + file: 'MERRA2_???.tavgM_2d_flx_Nx.{year}??.nc4' + huss_monthly: + short_name: huss + mip: Amon + raw: QV2M + file: 'MERRA2_???.tavgM_2d_slv_Nx.{year}??.nc4' + prc_monthly: + short_name: prc + mip: Amon + raw: PRECCON + file: 'MERRA2_???.tavgM_2d_flx_Nx.{year}??.nc4' + prsn_monthly: + short_name: prsn + mip: Amon + raw: PRECSNO + file: 'MERRA2_???.tavgM_2d_flx_Nx.{year}??.nc4' + prw_monthly: + short_name: prw + mip: Amon + raw: TQV + file: 'MERRA2_???.tavgM_2d_slv_Nx.{year}??.nc4' + ps_monthly: + short_name: ps + mip: Amon + raw: PS + file: 'MERRA2_???.tavgM_2d_slv_Nx.{year}??.nc4' + psl_monthly: + short_name: psl + mip: Amon + raw: SLP + file: 'MERRA2_???.tavgM_2d_slv_Nx.{year}??.nc4' + rlds_monthly: + short_name: rlds + mip: Amon + raw: LWGAB + file: 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + rldscs_monthly: + short_name: rldscs + mip: Amon + raw: LWGABCLR + file: 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + rlus_monthly: + short_name: rlus + mip: Amon + raw: LWGEM + file: 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + rlut_monthly: + short_name: rlut + mip: Amon + raw: LWTUP + file: 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + rlutcs_monthly: + short_name: rlutcs + mip: Amon + raw: LWTUPCLR + file: 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + rsds_monthly: + short_name: rsds + mip: Amon + raw: SWGDN + file: 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + rsdscs_monthly: + short_name: rsdscs + mip: Amon + raw: SWGDNCLR + file: 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + rsdt_monthly: + short_name: rsdt + mip: Amon + raw: SWTDN + file: 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + tas_monthly: + short_name: tas + mip: Amon + raw: T2M + file: 'MERRA2_???.tavgM_2d_slv_Nx.{year}??.nc4' + tasmax_monthly: + short_name: tasmax + mip: Amon + raw: T2MMAX + file: 'MERRA2_???.statM_2d_slv_Nx.{year}??.nc4' + tasmin_monthly: + short_name: tasmin + mip: Amon + raw: T2MMIN + file: 'MERRA2_???.statM_2d_slv_Nx.{year}??.nc4' + tauu_monthly: + short_name: tauu + mip: Amon + raw: TAUX + file: 'MERRA2_???.tavgM_2d_flx_Nx.{year}??.nc4' + tauv_monthly: + short_name: tauv + mip: Amon + raw: TAUY + file: 'MERRA2_???.tavgM_2d_flx_Nx.{year}??.nc4' + ts_monthly: + short_name: ts + mip: Amon + raw: TS + file: 'MERRA2_???.tavgM_2d_slv_Nx.{year}??.nc4' + uas_monthly: + short_name: uas + mip: Amon + raw: U10M + file: 'MERRA2_???.tavgM_2d_slv_Nx.{year}??.nc4' + vas_monthly: + short_name: vas + mip: Amon + raw: V10M + file: 'MERRA2_???.tavgM_2d_slv_Nx.{year}??.nc4' + rsus_monthly: + short_name: rsus + mip: Amon + raw: SWGDN-SWGNT + file: 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + rsuscs_monthly: + short_name: rsuscs + mip: Amon + raw: SWGDNCLR-SWGNTCLR + file: 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + rsut_monthly: + short_name: rsut + mip: Amon + raw: SWTDN-SWTNT + file: 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + rsutcs_monthly: + short_name: rsutcs + mip: Amon + raw: "SWTDN-SWTNTCLR" + file: 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + ta_monthly: + short_name: ta + mip: Amon + raw: "T" + file: 'MERRA2_???.instM_3d_ana_Np.{year}??.nc4' + ua_monthly: + short_name: ua + mip: Amon + raw: "U" + file: 'MERRA2_???.instM_3d_ana_Np.{year}??.nc4' + va_monthly: + short_name: va + mip: Amon + raw: "V" + file: 'MERRA2_???.instM_3d_ana_Np.{year}??.nc4' + tro3_monthly: + short_name: tro3 + mip: Amon + raw: "O3" + file: 'MERRA2_???.instM_3d_ana_Np.{year}??.nc4' + zg_monthly: + short_name: zg + mip: Amon + raw: "H" + file: 'MERRA2_???.instM_3d_ana_Np.{year}??.nc4' + hus_monthly: + short_name: hus + mip: Amon + raw: "QV" + file: 'MERRA2_???.instM_3d_ana_Np.{year}??.nc4' + wap_monthly: + short_name: wap + mip: Amon + raw: "OMEGA" + file: 'MERRA2_???.instM_3d_asm_Np.{year}??.nc4' + hur_monthly: + short_name: hur + mip: Amon + raw: "RH" + file: 'MERRA2_???.instM_3d_asm_Np.{year}??.nc4' + cli_monthly: + short_name: cli + mip: Amon + raw: "QI" + file: 'MERRA2_???.tavgM_3d_cld_Np.{year}??.nc4' + clw_monthly: + short_name: clw + mip: Amon + raw: "QL" + file: 'MERRA2_???.tavgM_3d_cld_Np.{year}??.nc4' + cl_monthly: + short_name: cl + mip: Amon + raw: "CLOUD" + file: 'MERRA2_???.tavgM_3d_cld_Np.{year}??.nc4' + clivi_monthly: + short_name: clivi + mip: Amon + raw: TQI + file: 'MERRA2_???.instM_2d_asm_Nx.{year}??.nc4' + clwvi_monthly: + short_name: clwvi + mip: Amon + raw: TQI+TQL + file: 'MERRA2_???.instM_2d_asm_Nx.{year}??.nc4' diff --git a/esmvaltool/cmorizers/data/cmor_config/MLS-AURA.yml b/esmvaltool/cmorizers/data/cmor_config/MLS-AURA.yml new file mode 100644 index 0000000000..5da7ec8047 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/MLS-AURA.yml @@ -0,0 +1,26 @@ +--- +extension: he5 +file_pattern: 'MLS-Aura_L2GP-{var}_v04-' +start_year: 2005 +end_year: 2019 +mip: Amon # Daily data cannot be read by ESMValTool since some days are missing + +# Common global attributes for Cmorizer output +attributes: + dataset_id: MLS-AURA + version: '004' + tier: 3 + modeling_realm: sat + project_id: OBS6 + source: 'https://disc.gsfc.nasa.gov/datasets/ML2RHI_004/summary;https://disc.gsfc.nasa.gov/datasets/ML2T_004/summary' + reference: 'mls-aura' + comment: '' + +# Variables to cmorize +variables: + hur: + raw_var: 'RHI' # in RHI file + cut_levels_outside: [0.002, 320] # hPa + hurStderr: + raw_var: 'RHIPrecision' # in RHI file + cut_levels_outside: [0.002, 320] # hPa diff --git a/esmvaltool/cmorizers/data/cmor_config/MOBO-DIC2004-2019.yml b/esmvaltool/cmorizers/data/cmor_config/MOBO-DIC2004-2019.yml new file mode 100644 index 0000000000..7e80dc6634 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/MOBO-DIC2004-2019.yml @@ -0,0 +1,19 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: MOBO-DIC2004-2019 + version: '2.3' + tier: 2 + modeling_realm: reanaly + project_id: OBS6 + source: 'https://www.nodc.noaa.gov/archive/arc0211/0277099/2.3/data/0-data/' + reference: 'mobo_dic2004_2019' + +# Variables to cmorize +variables: + dissic: + filename: MPI_MOBO-DIC_2004-2019_v2.nc + mip: Omon + raw_name: DIC + raw_units: '1e-6 mol kg-1' + comment: 'The original units of this variable are mumol/kg. To convert to the CMOR units mol/m3, we assume a constant sea water density of 1032 kg/m3, which is approximately the sea water density for T=4°C, salinity=35PSU, and p=100bar according to the UNESCO formula (UNESCO, 1981, Tenth report of the joint panel on oceanographic tables and standards, UNESCO Technical Papers in Marine Science, see https://www.wkcgroup.com/tools-room/seawater-density-calculator/ and https://link.springer.com/content/pdf/bbm:978-3-319-18908-6/1.pdf).' diff --git a/esmvaltool/cmorizers/data/cmor_config/MOBO-DIC_MPIM.yml b/esmvaltool/cmorizers/data/cmor_config/MOBO-DIC_MPIM.yml new file mode 100644 index 0000000000..c25389fb12 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/MOBO-DIC_MPIM.yml @@ -0,0 +1,19 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: MOBO-DIC_MPIM + version: '1.1' + tier: 2 + modeling_realm: clim + project_id: OBS6 + source: 'https://www.ncei.noaa.gov/data/oceans/ncei/ocads/data/0221526/' + reference: 'mobo_dic_mpim' + +# Variables to cmorize +variables: + dissic: + filename: MOBO-DIC_MPIM_monthly_clim.nc + mip: Omon + raw_name: DIC + raw_units: '1e-6 mol kg-1' + comment: 'The original units of this variable are mumol/kg. To convert to the CMOR units mol/m3, we assume a constant sea water density of 1032 kg/m3, which is approximately the sea water density for T=4°C, salinity=35PSU, and p=100bar according to the UNESCO formula (UNESCO, 1981, Tenth report of the joint panel on oceanographic tables and standards, UNESCO Technical Papers in Marine Science, see https://www.wkcgroup.com/tools-room/seawater-density-calculator/ and https://link.springer.com/content/pdf/bbm:978-3-319-18908-6/1.pdf).' diff --git a/esmvaltool/cmorizers/data/cmor_config/MTE.yml b/esmvaltool/cmorizers/data/cmor_config/MTE.yml new file mode 100644 index 0000000000..144f3b1f19 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/MTE.yml @@ -0,0 +1,22 @@ +--- +# Filename (will be extended by timestamp to match downloaded file) +filename: 'EnsembleGPP_GL.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: MTE + version: 'May12' + tier: 3 + modeling_realm: reanaly + project_id: OBS + source: 'http://www.bgc-jena.mpg.de/geodb/BGI/Home' + reference: 'mte' + comment: '' + +# Variables to cmorize +variables: + gpp: + mip: Lmon + gppStderr: + mip: Lmon + raw: std diff --git a/esmvaltool/cmorizers/data/cmor_config/NCEP-DOE-R2.yml b/esmvaltool/cmorizers/data/cmor_config/NCEP-DOE-R2.yml new file mode 100644 index 0000000000..f18f76f5a9 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/NCEP-DOE-R2.yml @@ -0,0 +1,63 @@ +--- +# Global attributes of NetCDF file +attributes: + dataset_id: NCEP-DOE-R2 + project_id: OBS6 + tier: 2 + version: '2' + modeling_realm: reanaly + source: 'https://psl.noaa.gov/data/gridded/data.ncep.reanalysis2.html' + reference: 'ncep-doe-r2' + comment: | + '' + +# Variables to CMORize +variables: + # monthly frequency + clt_month: + short_name: clt + mip: Amon + raw: tcdc + file: 'tcdc\.eatm\.mon\.mean\.nc' + hur_month: + short_name: hur + mip: Amon + raw: rhum + file: 'rhum\.mon\.mean\.nc' + prw_month: + short_name: prw + mip: Amon + raw: pr_wtr + file: 'pr_wtr\.eatm\.mon\.mean\.nc' + ta_month: + short_name: ta + mip: Amon + raw: air + file: 'air\.mon\.mean\.nc' + wap_month: + short_name: wap + mip: Amon + raw: omega + file: 'omega\.mon\.mean\.nc' + pr_month: + short_name: pr + mip: Amon + raw: prate + file: 'prate.sfc.mon.mean.nc' + tauu_month: + short_name: tauu + mip: Amon + raw: uflx + file: 'uflx.sfc.mon.mean.nc' + make_negative: true + tauv_month: + short_name: tauv + mip: Amon + raw: vflx + file: 'vflx.sfc.mon.mean.nc' + make_negative: true + tos_month: + short_name: tos + mip: Amon + raw: skt + file: 'skt.sfc.mon.mean.nc' diff --git a/esmvaltool/cmorizers/data/cmor_config/NCEP-NCAR-R1.yml b/esmvaltool/cmorizers/data/cmor_config/NCEP-NCAR-R1.yml new file mode 100644 index 0000000000..b012ae9cb4 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/NCEP-NCAR-R1.yml @@ -0,0 +1,143 @@ +--- +# Global attributes of NetCDF file +attributes: + dataset_id: NCEP-NCAR-R1 + project_id: OBS6 + tier: 2 + version: '1' + modeling_realm: reanaly + source: 'https://psl.noaa.gov/data/gridded/data.ncep.reanalysis.html' + reference: 'ncep-ncar-r1' + comment: | + '' + +# Variables to CMORize +variables: +# daily frequency + pr_day: + short_name: pr + mip: day + raw: prate + file: 'surface\/prate\.sfc\.gauss\.[0-9]{4}\.nc' + rlut_day: + short_name: rlut + mip: day + raw: ulwrf + file: 'surface\/ulwrf\.ntat\.gauss\.[0-9]{4}\.nc' + ua_day: + short_name: ua + mip: day + raw: uwnd + file: 'pressure\/uwnd\.[0-9]{4}\.nc' + va_day: + short_name: va + mip: day + raw: vwnd + file: 'pressure\/vwnd\.[0-9]{4}\.nc' + +# monthly frequency + clt_month: + short_name: clt + mip: Amon + raw: tcdc + file: 'surface\/tcdc\.eatm\.mon\.mean\.nc' + hur_month: + short_name: hur + mip: Amon + raw: rhum + file: 'pressure\/rhum\.mon\.mean\.nc' + hurs_month: + short_name: hurs + mip: Amon + raw: rhum + file: 'surface\/rhum\.mon\.mean\.nc' + hus_month: + short_name: hus + mip: Amon + raw: shum + file: 'pressure\/shum\.mon\.mean\.nc' + pr_month: + short_name: pr + mip: Amon + raw: prate + file: 'surface\/prate\.mon\.mean\.nc' + prw_month: + short_name: prw + mip: Amon + raw: pr_wtr + file: 'surface\/pr_wtr\.mon\.mean\.nc' + psl_month: + short_name: psl + mip: Amon + raw: slp + file: 'surface\/slp\.mon\.mean\.nc' + rlut_month: + short_name: rlut + mip: Amon + raw: ulwrf + file: 'surface\/ulwrf\.ntat\.mon\.mean\.nc' + rlutcs_month: + short_name: rlutcs + mip: Amon + raw: csulf + file: 'surface\/csulf\.ntat\.mon\.mean\.nc' + rsut_month: + short_name: rsut + mip: Amon + raw: uswrf + file: 'surface\/uswrf\.ntat\.mon\.mean\.nc' + rsutcs_month: + short_name: rsutcs + mip: Amon + raw: csusf + file: 'surface\/csusf\.ntat\.mon\.mean\.nc' + sfcWind_month: + short_name: sfcWind + mip: Amon + raw: wspd + file: 'surface\/wspd\.mon\.mean\.nc' + ta_month: + short_name: ta + mip: Amon + raw: air + file: 'pressure\/air\.mon\.mean\.nc' + tas_month: + short_name: tas + mip: Amon + raw: air + file: 'surface\/air\.2m\.mon\.mean\.nc' + tasmax_month: + short_name: tasmax + mip: Amon + raw: tmax + file: 'surface\/tmax\.2m\.mon\.mean\.nc' + tasmin_month: + short_name: tasmin + mip: Amon + raw: tmin + file: 'surface\/tmin\.2m\.mon\.mean\.nc' + ts_month: + short_name: ts + mip: Amon + raw: air + file: 'surface\/air\.mon\.mean\.nc' + ua_month: + short_name: ua + mip: Amon + raw: uwnd + file: 'pressure\/uwnd\.mon\.mean\.nc' + va_month: + short_name: va + mip: Amon + raw: vwnd + file: 'pressure\/vwnd\.mon\.mean\.nc' + wap_month: + short_name: wap + mip: Amon + raw: omega + file: 'pressure\/omega\.mon\.mean\.nc' + zg_month: + short_name: zg + mip: Amon + raw: hgt + file: 'pressure\/hgt\.mon\.mean\.nc' diff --git a/esmvaltool/cmorizers/data/cmor_config/NDP.yml b/esmvaltool/cmorizers/data/cmor_config/NDP.yml new file mode 100644 index 0000000000..36b0caa7ae --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/NDP.yml @@ -0,0 +1,22 @@ +--- +# General attributes +filename: 'ndp017b.tar.gz' +delta_degrees: 0.08333 +missing_values: [-32768, -9999, -9998] + +# Common global attributes for Cmorizer output +attributes: + dataset_id: NDP + version: 017b + tier: 3 + modeling_realm: reanaly + project_id: OBS + source: 'https://data.ess-dive.lbl.gov/view/doi:10.3334/CDIAC/LUE.NDP017.2006' + reference: 'ndp' + comment: '' + +# Variables to cmorize +variables: + cVeg: + mip: Lmon + filename: 'med_c.tif.gz' diff --git a/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V2.yml b/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V2.yml new file mode 100644 index 0000000000..faded8f9d6 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V2.yml @@ -0,0 +1,64 @@ +--- +# Global attributes of NetCDF file +attributes: + dataset_id: NOAA-CIRES-20CR-V2 + project_id: OBS6 + tier: 2 + version: 'v2' + modeling_realm: reanaly + source: 'https://psl.noaa.gov/data/gridded/data.20thC_ReanV2.html' + reference: 'noaa-cires-20cr-v2' + comment: | + '' + +# Variables to CMORize +variables: + # monthly frequency + clt_month: + short_name: clt + mip: Amon + raw: tcdc + file: 'tcdc.eatm.mon.mean.nc' + clwvi_month: + short_name: clwvi + mip: Amon + raw: cldwtr + file: 'cldwtr.eatm.mon.mean.nc' + prw_month: + short_name: prw + mip: Amon + raw: pr_wtr + file: 'pr_wtr.eatm.mon.mean.nc' + hus_month: + short_name: hus + mip: Amon + raw: shum + file: 'shum.mon.mean.nc' + rlut_month: + short_name: rlut + mip: Amon + raw: ulwrf + file: 'ulwrf.ntat.mon.mean.nc' + rsut_month: + short_name: rsut + mip: Amon + raw: uswrf + file: 'uswrf.ntat.mon.mean.nc' + pr_month: + short_name: pr + mip: Amon + raw: prate + file: 'prate.mon.mean.nc' + tauu_month: + short_name: tauu + mip: Amon + raw: uflx + file: 'uflx.mon.mean.nc' + make_negative: true + tauv_month: + short_name: tauv + mip: Amon + raw: vflx + file: 'vflx.mon.mean.nc' + make_negative: true + diff --git a/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V3.yml b/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V3.yml new file mode 100644 index 0000000000..d16d5265e0 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V3.yml @@ -0,0 +1,56 @@ +--- +# Global attributes of NetCDF file +attributes: + dataset_id: NOAA-CIRES-20CR-V3 + project_id: OBS6 + tier: 2 + version: 'v3' + modeling_realm: reanaly + source: 'https://psl.noaa.gov/data/gridded/data.20thC_ReanV3.html' + reference: 'noaa-cires-20cr-v3' + comment: | + '' + +# Variables to CMORize +variables: + # monthly frequency + clt_month: + short_name: clt + mip: Amon + raw: tcdc + file: 'tcdc.eatm.mon.mean.nc' + clwvi_month: + short_name: clwvi + mip: Amon + raw: cldwtr + file: 'cldwtr.eatm.mon.mean.nc' + prw_month: + short_name: prw + mip: Amon + raw: pr_wtr + file: 'pr_wtr.eatm.mon.mean.nc' + hus_month: + short_name: hus + mip: Amon + raw: shum + file: 'shum.mon.mean.nc' + rlut_month: + short_name: rlut + mip: Amon + raw: ulwrf + file: 'ulwrf.ntat.mon.mean.nc' + rsut_month: + short_name: rsut + mip: Amon + raw: uswrf + file: 'uswrf.ntat.mon.mean.nc' + rlutcs_month: + short_name: rlutcs + mip: Amon + raw: csulf + file: 'csulf.ntat.mon.mean.nc' + rsutcs_month: + short_name: rsutcs + mip: Amon + raw: csusf + file: 'csusf.ntat.mon.mean.nc' diff --git a/esmvaltool/cmorizers/data/cmor_config/NOAA-ERSSTv3b.yml b/esmvaltool/cmorizers/data/cmor_config/NOAA-ERSSTv3b.yml new file mode 100644 index 0000000000..b21baa4bdb --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/NOAA-ERSSTv3b.yml @@ -0,0 +1,20 @@ +--- +# Filename +filename: 'ersst.*.nc' + +# Common global attributes for Cmorizer output +attributes: + project_id: OBS6 + dataset_id: NOAA-ERSSTv3b + version: 'v3b' + tier: 2 + modeling_realm: reanaly + source: https://www1.ncdc.noaa.gov/pub/data/cmb/ersst/v3b/netcdf/' + reference: 'ersstv3b' + comment: '' + +# Variables to cmorize +variables: + tos: + mip: Omon + raw: sst diff --git a/esmvaltool/cmorizers/data/cmor_config/NOAA-ERSSTv5.yml b/esmvaltool/cmorizers/data/cmor_config/NOAA-ERSSTv5.yml new file mode 100644 index 0000000000..dcc36a504a --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/NOAA-ERSSTv5.yml @@ -0,0 +1,20 @@ +--- +# Filename +filename: 'ersst.v5.*.nc' + +# Common global attributes for Cmorizer output +attributes: + project_id: OBS6 + dataset_id: NOAA-ERSSTv5 + version: 'v5' + tier: 2 + modeling_realm: reanaly + source: 'https://doi.org/10.7289/V5T72FNM' + reference: 'ersstv5' + comment: '' + +# Variables to cmorize +variables: + tos: + mip: Omon + raw: sst diff --git a/esmvaltool/cmorizers/data/cmor_config/NOAA-MBL-CH4.yml b/esmvaltool/cmorizers/data/cmor_config/NOAA-MBL-CH4.yml new file mode 100644 index 0000000000..23e84a657d --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/NOAA-MBL-CH4.yml @@ -0,0 +1,18 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: NOAA-MBL-CH4 + version: '1.0' + tier: 2 + modeling_realm: atmos + project_id: OBS6 + source: 'https://gml.noaa.gov/webdata/ccgg/trends/ch4/ch4_mm_gl.csv' + reference: 'noaa-mbl-ch4' + +# Variables to cmorize +variables: + ch4s: + filename: ch4_mm_gl.csv + mip: Amon + raw_name: ch4 + raw_units: 'nmol mol-1' diff --git a/esmvaltool/cmorizers/data/cmor_config/NOAAGlobalTemp.yml b/esmvaltool/cmorizers/data/cmor_config/NOAAGlobalTemp.yml new file mode 100644 index 0000000000..4f4ad8c609 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/NOAAGlobalTemp.yml @@ -0,0 +1,24 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: NOAAGlobalTemp + version: 'v5.0.0' + tier: 2 + modeling_realm: ground + project_id: OBS + source: 'https://www.ncei.noaa.gov/data/noaa-global-surface-temperature/v5/ + access/gridded/' + reference: 'noaaglobaltemp' + comment: | + 'Temperature anomaly with respect to the period 1971-2000' + +filenames: + gridded: + 'NOAAGlobalTemp_v5.0.0_gridded_s188001_e202205_c20220608T133245.nc' + +# Variables to cmorize +variables: + tasa: + mip: Amon + raw: anom + raw_units: K diff --git a/esmvaltool/cmorizers/data/cmor_config/NSIDC-0116-nh.yml b/esmvaltool/cmorizers/data/cmor_config/NSIDC-0116-nh.yml new file mode 100644 index 0000000000..4368b03604 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/NSIDC-0116-nh.yml @@ -0,0 +1,26 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: NSIDC-0116-nh + version: '4.1' + tier: 3 + modeling_realm: reanaly + project_id: OBS + source: 'https://nsidc.org/data/NSIDC-0116' + reference: 'nsidc-0116' + comment: '' + +# Variables to cmorize +variables: + usi: + mip: day + raw: sea_ice_x_velocity + compress: true + vsi: + mip: day + raw: sea_ice_y_velocity + compress: true + +custom: + create_areacello: true + grid_cell_size: 25067.53 diff --git a/esmvaltool/cmorizers/data/cmor_config/NSIDC-0116-sh.yml b/esmvaltool/cmorizers/data/cmor_config/NSIDC-0116-sh.yml new file mode 100644 index 0000000000..106a5d5e97 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/NSIDC-0116-sh.yml @@ -0,0 +1,26 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: NSIDC-0116-sh + version: '4.1' + tier: 3 + modeling_realm: reanaly + project_id: OBS + source: 'https://nsidc.org/data/NSIDC-0116' + reference: 'nsidc-0116' + comment: '' + +# Variables to cmorize +variables: + usi: + mip: day + raw: sea_ice_x_velocity + compress: true + vsi: + mip: day + raw: sea_ice_y_velocity + compress: true + +custom: + create_areacello: true + grid_cell_size: 25067.53 diff --git a/esmvaltool/cmorizers/data/cmor_config/NSIDC-G02202-sh.yml b/esmvaltool/cmorizers/data/cmor_config/NSIDC-G02202-sh.yml new file mode 100644 index 0000000000..0bdeea488a --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/NSIDC-G02202-sh.yml @@ -0,0 +1,23 @@ +--- +filename: seaice_conc_monthly_sh_{year}.*.nc +# Common global attributes for Cmorizer output +attributes: + dataset_id: NSIDC-G02202-sh + version: '4' + tier: 3 + modeling_realm: reanaly + project_id: OBS6 + source: 'https://nsidc.org/data/g02202/versions/4' + reference: 'nsidc-g02202' + comment: '' + +variables: + siconc: + mip: SImon + raw: cdr_seaice_conc_monthly + compress: true + + +custom: + create_areacello: true + area_file: pss25area_v3.dat diff --git a/esmvaltool/cmorizers/data/cmor_config/OSI-450-nh.yml b/esmvaltool/cmorizers/data/cmor_config/OSI-450-nh.yml new file mode 100644 index 0000000000..6a115cf20e --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/OSI-450-nh.yml @@ -0,0 +1,24 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: OSI-450-nh + version: 'v2' + tier: 2 + modeling_realm: reanaly + project_id: OBS + source: 'http://osisaf.met.no/p/ice/' + reference: 'osi-450' + comment: '' + +# Variables to cmorize (here use only filename prefix) +variables: + sic: + mip: [day, OImon] + raw: ice_conc + grid: ease2-250 + +custom: + create_areacello: true + grid_cell_size: 25000. + # Minimum number of days with data to generate daily files + min_days: 50 diff --git a/esmvaltool/cmorizers/data/cmor_config/OSI-450-sh.yml b/esmvaltool/cmorizers/data/cmor_config/OSI-450-sh.yml new file mode 100644 index 0000000000..47c22c6d18 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/OSI-450-sh.yml @@ -0,0 +1,24 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: OSI-450-sh + version: 'v2' + tier: 2 + modeling_realm: reanaly + project_id: OBS + source: 'http://osisaf.met.no/p/ice/' + reference: 'osi-450' + comment: '' + +# Variables to cmorize (here use only filename prefix) +variables: + sic: + mip: [day, OImon] + raw: ice_conc + grid: ease2-250 + +custom: + create_areacello: true + grid_cell_size: 25000. + # Minimum number of days with data to generate daily files + min_days: 50 diff --git a/esmvaltool/cmorizers/data/cmor_config/OceanSODA-ETHZ.yml b/esmvaltool/cmorizers/data/cmor_config/OceanSODA-ETHZ.yml new file mode 100644 index 0000000000..b037a3428e --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/OceanSODA-ETHZ.yml @@ -0,0 +1,47 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: OceanSODA-ETHZ + version: v2023 + tier: 2 + modeling_realm: reanaly + project_id: OBS6 + source: https://www.ncei.noaa.gov/data/oceans/ncei/ocads/data/0220059/ + reference: oceansoda_ethz + +# Variables to cmorize +variables: + areacello: + filename: OceanSODA_ETHZ-v2023.OCADS.01_1982-2022.nc + mip: Ofx + raw_name: area + co3os: + filename: OceanSODA_ETHZ-v2023.OCADS.01_1982-2022.nc + mip: Omon + raw_name: co3 + raw_units: '1e-6 mol kg-1' + comment: 'The original units of this variable are mumol/kg. To convert to the CMOR units mol/m3, we assume a constant sea water density of 1028 kg/m3, which is approximately the sea water density for T=4°C, salinity=35PSU, and p=0bar according to the UNESCO formula (UNESCO, 1981, Tenth report of the joint panel on oceanographic tables and standards, UNESCO Technical Papers in Marine Science, see https://www.wkcgroup.com/tools-room/seawater-density-calculator/ and https://link.springer.com/content/pdf/bbm:978-3-319-18908-6/1.pdf).' + dissicos: + filename: OceanSODA_ETHZ-v2023.OCADS.01_1982-2022.nc + mip: Omon + raw_name: dic + raw_units: '1e-6 mol kg-1' + comment: 'The original units of this variable are mumol/kg. To convert to the CMOR units mol/m3, we assume a constant sea water density of 1028 kg/m3, which is approximately the sea water density for T=4°C, salinity=35PSU, and p=0bar according to the UNESCO formula (UNESCO, 1981, Tenth report of the joint panel on oceanographic tables and standards, UNESCO Technical Papers in Marine Science, see https://www.wkcgroup.com/tools-room/seawater-density-calculator/ and https://link.springer.com/content/pdf/bbm:978-3-319-18908-6/1.pdf).' + fgco2: + filename: OceanSODA_ETHZ-v2023.OCADS.01_1982-2022.nc + mip: Omon + phos: + filename: OceanSODA_ETHZ-v2023.OCADS.01_1982-2022.nc + mip: Omon + raw_name: ph_total + raw_units: '1' + spco2: + filename: OceanSODA_ETHZ-v2023.OCADS.01_1982-2022.nc + mip: Omon + raw_units: '1e-6 atm' + talkos: + filename: OceanSODA_ETHZ-v2023.OCADS.01_1982-2022.nc + mip: Omon + raw_name: talk + raw_units: '1e-6 mol kg-1' + comment: 'The original units of this variable are mumol/kg. To convert to the CMOR units mol/m3, we assume a constant sea water density of 1028 kg/m3, which is approximately the sea water density for T=4°C, salinity=35PSU, and p=0bar according to the UNESCO formula (UNESCO, 1981, Tenth report of the joint panel on oceanographic tables and standards, UNESCO Technical Papers in Marine Science, see https://www.wkcgroup.com/tools-room/seawater-density-calculator/ and https://link.springer.com/content/pdf/bbm:978-3-319-18908-6/1.pdf).' diff --git a/esmvaltool/cmorizers/data/cmor_config/PERSIANN-CDR.yml b/esmvaltool/cmorizers/data/cmor_config/PERSIANN-CDR.yml new file mode 100644 index 0000000000..e5367fa16b --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/PERSIANN-CDR.yml @@ -0,0 +1,24 @@ +--- +# Filename (will be extended by date and config information) +extension: nc +input_file_pattern: 'PERSIANN-CDR_v01r01_*.nc' +start_year: 1983 +end_year: 2019 + +# Common global attributes for Cmorizer output +attributes: + dataset_id: PERSIANN-CDR + version: 'v01r01' + tier: 2 + modeling_realm: reanaly + project_id: OBS + source: 'https://www.ncei.noaa.gov/data/precipitation-persiann/access/' + reference: 'persiann-cdr' + comment: '' + +# Variables to cmorize +variables: + pr: + mip: day + raw_standard_name: precipitation_amount + raw_units: mm diff --git a/esmvaltool/cmorizers/data/cmor_config/PHC.yml b/esmvaltool/cmorizers/data/cmor_config/PHC.yml new file mode 100644 index 0000000000..4d85ca3cee --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/PHC.yml @@ -0,0 +1,32 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: PHC + version: '3' + tier: 2 + modeling_realm: clim + project_id: OBS6 + source: 'http://psc.apl.washington.edu/nonwp_projects/PHC/Data3.html' + reference: 'phc' + comment: 'The data are multiyear annual mean, but given Omon mip.' + frequency: yr + +# Variables to cmorize (here use only filename prefix) +variables: + thetao: + mip: Omon + raw: temp + file: phc3.0_annual.nc + so: + mip: Omon + raw: salt + file: phc3.0_annual.nc + areacello: + mip: fx + raw: areacello + file: phc3.0_annual.nc + +# Custom dictionary for this cmorizer +custom: + # years to be analyzed + years: [1800, ] diff --git a/esmvaltool/cmorizers/data/cmor_config/PIOMAS.yml b/esmvaltool/cmorizers/data/cmor_config/PIOMAS.yml new file mode 100644 index 0000000000..95b64c9291 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/PIOMAS.yml @@ -0,0 +1,27 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: PIOMAS + version: '2.1' + tier: 2 + modeling_realm: reanaly + project_id: OBS + source: 'http://psc.apl.uw.edu/research/projects/arctic-sea-ice-volume-anomaly/data/model_grid' + reference: 'piomas' + comment: '' + +# Variables to cmorize (here use only filename prefix) +variables: + sithick: + mip: day + raw: hiday + type: scalar + units: m + + areacello: + mip: fx + type: scalar + +custom: + scalar_file: grid.dat + vector_file: grid.dat.pop diff --git a/esmvaltool/cmorizers/data/cmor_config/REGEN.yml b/esmvaltool/cmorizers/data/cmor_config/REGEN.yml new file mode 100644 index 0000000000..9f4dce18b1 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/REGEN.yml @@ -0,0 +1,23 @@ +--- +# Filename (will be extended by version) +filename: 'REGEN_AllStns_{version}_*.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: REGEN + version: 'V1-2019' + tier: 2 + modeling_realm: reanaly + project_id: OBS + source: 'https://researchdata.ands.org.au/rainfall-estimates-gridded-v1-2019/1408744' + reference: 'regen' + comment: '' + +# Variables to cmorize +variables: + pr: + mip: day + raw: p + raw_units: kg m-2 day-1 + calendar: gregorian + add_mon: True diff --git a/esmvaltool/cmorizers/data/cmor_config/Scripps-CO2-KUM.yml b/esmvaltool/cmorizers/data/cmor_config/Scripps-CO2-KUM.yml new file mode 100644 index 0000000000..d7bc28f18c --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/Scripps-CO2-KUM.yml @@ -0,0 +1,20 @@ +--- +# Filename +filename: 'monthly_flask_co2_kum.csv' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: Scripps-CO2-KUM + version: '14-Oct-2021' + tier: 2 + modeling_realm: ground + project_id: OBS6 + source: 'https://scrippsco2.ucsd.edu/data/atmospheric_co2/kum.html' + reference: 'scripps_co2' + comment: 'Monthly average CO2 concentrations (ppm) derived from flask air samples taken at Cape Kumukahi, Hawaii: Latitude 19.5°N Longitude 154.8°W Elevation 3m. These data are subject to revision based on recalibration of standard gases.' + +# Variables to cmorize +variables: + co2s: + mip: Amon + column_name: ['fit', '', '[ppm]'] diff --git a/esmvaltool/cmorizers/data/cmor_config/TCOM-CH4.yml b/esmvaltool/cmorizers/data/cmor_config/TCOM-CH4.yml new file mode 100644 index 0000000000..819b4ad2c8 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/TCOM-CH4.yml @@ -0,0 +1,18 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: TCOM-CH4 + version: '1.0' + tier: 2 + modeling_realm: reanaly + project_id: OBS6 + source: 'https://zenodo.org/record/7293740' + reference: 'tcom_ch4' + +# Variables to cmorize +variables: + ch4: + filename: zmch4_TCOM_plev_T2Dz_1991_2021.nc + mip: Amon + raw_name: zmch4 + raw_units: 'mol mol-1' diff --git a/esmvaltool/cmorizers/data/cmor_config/TCOM-N2O.yml b/esmvaltool/cmorizers/data/cmor_config/TCOM-N2O.yml new file mode 100644 index 0000000000..87a41c9aad --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/TCOM-N2O.yml @@ -0,0 +1,18 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: TCOM-N2O + version: '1.0' + tier: 2 + modeling_realm: reanaly + project_id: OBS6 + source: 'https://zenodo.org/record/7386001' + reference: 'tcom_n2o' + +# Variables to cmorize +variables: + n2o: + filename: zmn2o_TCOM_plev_T2Dz_1991_2021.nc + mip: Amon + raw_name: zmn2o + raw_units: '1e-3 mol mol-1' # Note: the netcdf attribute wrongly says "mol/mol". This has been confirmed by the author of the dataset. diff --git a/esmvaltool/cmorizers/data/cmor_config/WFDE5.yml b/esmvaltool/cmorizers/data/cmor_config/WFDE5.yml new file mode 100644 index 0000000000..0babc9cc19 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/WFDE5.yml @@ -0,0 +1,46 @@ +--- +# Filename (will be extended by variable name and reference) +filename: '{raw_name}_WFDE5_{reference}_*_v1.1.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: WFDE5 + version: 'v1.1' + tier: 2 + modeling_realm: ground + project_id: OBS + source: 'https://doi.org/10.24381/cds.20d54e34' + reference: 'wfde5' + comment: '' + +# Variables to cmorize +variables: + tas_CRU: + short_name: tas + raw_name: Tair + reference: CRU + mip: day + raw_units: K + calendar: gregorian + save_hourly: False + add_mon: True + pr_CRU-GPCC: + short_name: pr + raw_name: Rainf + raw_name_snow: Snowf + reference: CRU+GPCC + mip: day + raw_units: kg m-2 s-1 + calendar: gregorian + save_hourly: False + add_mon: True + pr_CRU: + short_name: pr + raw_name: Rainf + raw_name_snow: Snowf + reference: CRU + mip: day + raw_units: kg m-2 s-1 + calendar: gregorian + save_hourly: False + add_mon: True diff --git a/esmvaltool/cmorizers/data/cmor_config/WOA.yml b/esmvaltool/cmorizers/data/cmor_config/WOA.yml new file mode 100644 index 0000000000..e9e2b96f9b --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/WOA.yml @@ -0,0 +1,64 @@ +--- +# Common global attributes for Cmorizer output +attributes: + + dataset_id: WOA + tier: 2 + modeling_realm: clim + + # WOA13 + # project_id: OBS + # short_name: woa13 + # version: '2013v2' + # source: 'https://www.ncei.noaa.gov/data/oceans/woa/WOA13/DATAv2/' + # reference: 'woa2013v2' + + # WOA18 + project_id: OBS6 + short_name: woa18 + version: '2018' + source: 'https://www.ncei.noaa.gov/data/oceans/woa/WOA18/DATA/' + reference: 'woa2018' + + comment: '' + +# Variables to cmorize (here use only filename prefix) +# Temperature and salinity from 1981-2010 climatology (81B0) +variables: + thetao: + mip: Omon + raw_var: t_an + name: temperature + file: decav81B0_t + srf_var: tos + so: + mip: Omon + raw_var: s_an + name: salinity + file: decav81B0_s + srf_var: sos + o2: + mip: Oyr + raw_var: o_an + name: oxygen + file: all_o + no3: + mip: Oyr + raw_var: n_an + name: nitrate + file: all_n + po4: + mip: Oyr + raw_var: p_an + name: phosphate + file: all_p + si: + mip: Oyr + raw_var: i_an + name: silicate + file: all_i + +# Custom dictionary for this cmorizer +custom: + reference_year: 2000 + resolution: '1.00' diff --git a/esmvaltool/cmorizers/data/cmorizer.py b/esmvaltool/cmorizers/data/cmorizer.py new file mode 100755 index 0000000000..5e66b7a70f --- /dev/null +++ b/esmvaltool/cmorizers/data/cmorizer.py @@ -0,0 +1,627 @@ +"""Download and formatting of non-ESGF datasets. + +This module adds new commands to the ESMValTool to allow the user to get +and reformat to the ESMValTool's data format a set of observations and +reanalysis. +""" +import datetime +import importlib +import logging +import os +import shutil +import subprocess +import warnings +from pathlib import Path + +import esmvalcore +import yaml +from esmvalcore._task import write_ncl_settings +from esmvalcore.config import CFG +from esmvalcore.config._logging import configure_logging + +from esmvaltool import ESMValToolDeprecationWarning +from esmvaltool.cmorizers.data.utilities import read_cmor_config + +logger = logging.getLogger(__name__) +datasets_file = os.path.join(os.path.dirname(__file__), 'datasets.yml') + + +class _Formatter(): + """ + Class to manage the download and formatting of datasets. + + Parameters + ---------- + info : dict + Datasets information + """ + + def __init__(self, info): + self.datasets = [] + self.datasets_info = info + self.config = '' + + def start(self, command, datasets, config_file, config_dir, options): + """Read configuration and set up formatter for data processing. + + Parameters + ---------- + command: str + Name of the command to execute. + datasets: str + List of datasets to process, comma separated. + config_file: str + Config file to use. Option will be removed in v2.14.0. + config_dir: str + Config directory to use. + options: dict() + Extra options to overwrite configuration. + + """ + if isinstance(datasets, str): + self.datasets = datasets.split(',') + else: + self.datasets = datasets + + if config_file is not None: # remove in v2.14.0 + CFG.load_from_file(config_file) + elif config_dir is not None: + config_dir = Path( + os.path.expandvars(config_dir) + ).expanduser().absolute() + if not config_dir.is_dir(): + raise NotADirectoryError( + f"Invalid --config_dir given: {config_dir} is not an " + f"existing directory" + ) + CFG.update_from_dirs([config_dir]) + CFG.update(options) + self.config = CFG.start_session(f'data_{command}') + + if not os.path.isdir(self.run_dir): + os.makedirs(self.run_dir) + + # configure logging + log_files = configure_logging(output_dir=self.run_dir, + console_log_level=self.log_level) + logger.info("Writing program log files to:\n%s", "\n".join(log_files)) + + # run + timestamp1 = datetime.datetime.utcnow() + timestamp_format = "%Y-%m-%d %H:%M:%S" + + logger.info("Starting the CMORization Tool at time: %s UTC", + timestamp1.strftime(timestamp_format)) + + logger.info(70 * "-") + logger.info("input_dir = %s", self.rawobs) + # check if the inputdir actually exists + if not os.path.isdir(self.rawobs): + logger.error("Directory %s does not exist", self.rawobs) + raise ValueError + logger.info("output_dir = %s", self.output_dir) + logger.info(70 * "-") + + @property + def rawobs(self): + """Raw obs folder path.""" + return self.config["rootpath"]["RAWOBS"][0] + + @property + def output_dir(self): + """Output folder path.""" + return self.config.session_dir + + @property + def run_dir(self): + """Run dir folder path.""" + return self.config.run_dir + + @property + def log_level(self): + """Console log level.""" + return self.config['log_level'] + + @staticmethod + def _dataset_to_module(dataset): + return dataset.lower().replace('-', '_') + + def download(self, start_date, end_date, overwrite): + """Download all datasets. + + Parameters + ---------- + start_date: datetime + First date to download + end_date: datetime + Last date to download + overwrite: boolean + If True, download again existing files + """ + if not self.datasets: + logger.error('Missing datasets to download') + logger.info("Downloading original data...") + # master directory + failed_datasets = [] + for dataset in self.datasets: + try: + self.download_dataset(dataset, start_date, end_date, overwrite) + except ValueError: + logger.exception('Failed to download %s', dataset) + failed_datasets.append(dataset) + if failed_datasets: + logger.error('Download failed for datasets %s', failed_datasets) + return False + return True + + def download_dataset(self, dataset, start_date, end_date, overwrite): + """Download a single dataset. + + Parameters + ---------- + dataset: str + Dataset name + start_date: datetime + First date to download + end_date: datetime + Last date to download + overwrite: boolean + If True, download again existing files + """ + if not self.has_downloader(dataset): + raise ValueError( + f'Dataset {dataset} does not have an automatic downloader') + dataset_module = self._dataset_to_module(dataset) + logger.info('Downloading %s', dataset) + logger.debug("Download module: %s", dataset_module) + try: + downloader = importlib.import_module( + f'.{dataset_module}', + package='esmvaltool.cmorizers.data.downloaders.datasets') + except ImportError: + logger.exception('Could not find cmorizer for %s', dataset) + raise + + downloader.download_dataset(self.config, dataset, + self.datasets_info['datasets'][dataset], + start_date, end_date, overwrite) + logger.info('%s downloaded', dataset) + + def format(self, start, end, install): + """Format all available datasets. + + Parameters + ---------- + start: datetime + Start of the period to format + end: datetime + End of the period to format + install: bool + If True, automatically moves the data to the final location if + there is no + """ + logger.info("Running the CMORization scripts.") + # datasets dictionary of Tier keys + datasets = self._assemble_datasets() + if not datasets: + logger.warning("Check input: could not find required %s in %s", + self.datasets, self.rawobs) + logger.info("Processing datasets %s", datasets) + + # loop through tier/datasets to be cmorized + failed_datasets = [] + for dataset in datasets: + if not self.format_dataset(dataset, start, end, install): + failed_datasets.append(dataset) + + if failed_datasets: + raise RuntimeError( + f'Format failed for datasets {" ".join(failed_datasets)}' + ) + + @staticmethod + def has_downloader(dataset): + """Check if a given datasets has an automatic downloader. + + Parameters + ---------- + dataset : str + Name of the dataset to check + + Returns + ------- + str + 'Yes' if the downloader exists, 'No' otherwise + """ + try: + importlib.import_module( + f'.{dataset.lower().replace("-", "_")}', + package='esmvaltool.cmorizers.data.downloaders.datasets') + return True + except ImportError: + return False + + def _assemble_datasets(self): + """Get my datasets as dictionary keyed on Tier.""" + # check for desired datasets only (if any) + # if not, walk all over rawobs dir + # assume a RAWOBS/TierX/DATASET input structure + + # get all available tiers in source dir + tiers = [f'Tier{i}' for i in [2, 3]] + tiers = [ + tier for tier in tiers + if os.path.exists(os.path.join(self.rawobs, tier)) + ] + datasets = [] + if self.datasets: + return self.datasets + for tier in tiers: + for dataset in os.listdir(os.path.join(self.rawobs, tier)): + datasets.append(dataset) + + return datasets + + def format_dataset(self, dataset, start, end, install): + """Format a single dataset. + + Parameters + ---------- + dataset: str + Dataset name + start: datetime + Start of the period to format + end: datetime + End of the period to format + install: bool + If True, automatically moves the data to the final location if + there is no data there. + """ + reformat_script_root = os.path.join( + os.path.dirname(os.path.abspath(__file__)), 'formatters', + 'datasets', self._dataset_to_module(dataset)) + tier = self._get_dataset_tier(dataset) + if tier is None: + logger.error("Data for %s not found. Perhaps you are not" + " storing it in a RAWOBS/TierX/%s" + " (X=2 or 3) directory structure?", dataset, dataset) + return False + + # in-data dir; build out-dir tree + in_data_dir = os.path.join(self.rawobs, tier, dataset) + logger.info("Input data from: %s", in_data_dir) + out_data_dir = os.path.join(self.output_dir, tier, dataset) + logger.info("Output will be written to: %s", out_data_dir) + if not os.path.isdir(out_data_dir): + os.makedirs(out_data_dir) + + # all operations are done in the working dir now + os.chdir(out_data_dir) + # figure out what language the script is in + logger.info("Reformat script: %s", reformat_script_root) + if os.path.isfile(reformat_script_root + '.ncl'): + reformat_script = reformat_script_root + '.ncl' + success = self._run_ncl_script(in_data_dir, out_data_dir, dataset, + reformat_script, start, end) + elif os.path.isfile(reformat_script_root + '.py'): + success = self._run_pyt_script(in_data_dir, out_data_dir, dataset, + start, end) + else: + logger.error('Could not find formatter for %s', dataset) + return False + if success: + logger.info('Formatting successful for dataset %s', dataset) + else: + logger.error('Formatting failed for dataset %s', dataset) + return False + if install: + rootpath = self.config['rootpath'] + target_dir = rootpath.get('OBS', rootpath['default'])[0] + target_dir = os.path.join(target_dir, tier, dataset) + if os.path.isdir(target_dir): + logger.info( + 'Automatic installation of dataset %s skipped: ' + 'target folder %s already exists', dataset, target_dir) + else: + logger.info('Installing dataset %s in folder %s', dataset, + target_dir) + shutil.move(out_data_dir, target_dir) + return True + + def _get_dataset_tier(self, dataset): + for tier in [2, 3]: + if os.path.isdir(os.path.join(self.rawobs, f"Tier{tier}", + dataset)): + return f"Tier{tier}" + return None + + def _write_ncl_settings(self, project_info, dataset, run_dir, + reformat_script, start_year, end_year): + """Write the information needed by the ncl reformat script.""" + if start_year is None: + start_year = 0 + else: + start_year = start_year.year + if end_year is None: + end_year = 0 + else: + end_year = end_year.year + settings = { + 'cmorization_script': reformat_script, + 'input_dir_path': project_info[dataset]['indir'], + 'output_dir_path': project_info[dataset]['outdir'], + 'config_user_info': { + 'log_level': self.config['log_level'], + }, + 'start_year': start_year, + 'end_year': end_year, + } + settings_filename = os.path.join(run_dir, dataset, 'settings.ncl') + if not os.path.isdir(os.path.join(run_dir, dataset)): + os.makedirs(os.path.join(run_dir, dataset)) + # write the settings file + write_ncl_settings(settings, settings_filename) + return settings_filename + + def _run_ncl_script(self, in_dir, out_dir, dataset, script, start, end): + """Run the NCL cmorization mechanism.""" + logger.info("CMORizing dataset %s using NCL script %s", dataset, + script) + project = {} + project[dataset] = {} + project[dataset]['indir'] = in_dir + project[dataset]['outdir'] = out_dir + settings_file = self._write_ncl_settings(project, dataset, + self.run_dir, script, start, + end) + + # put settings in environment + env = dict(os.environ) + env['settings'] = settings_file + env['esmvaltool_root'] = os.path.dirname( + os.path.dirname(os.path.dirname(os.path.dirname(script)))) + env['cmor_tables'] = str( + Path(esmvalcore.cmor.__file__).parent / 'tables') + logger.info("Using CMOR tables at %s", env['cmor_tables']) + # call NCL + ncl_call = ['ncl', script] + logger.info("Executing cmd: %s", ' '.join(ncl_call)) + with subprocess.Popen(ncl_call, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + env=env) as process: + output, err = process.communicate() + for oline in str(output.decode('utf-8')).split('\n'): + logger.info('[NCL] %s', oline) + if err: + logger.error('[NCL][subprocess.Popen ERROR] %s', err) + return False + return True + + def _run_pyt_script(self, in_dir, out_dir, dataset, start, end): + """Run the Python cmorization mechanism.""" + module_name = ('esmvaltool.cmorizers.data.formatters.datasets.' + + dataset.lower().replace("-", "_")) + module = importlib.import_module(module_name) + logger.info("CMORizing dataset %s using Python script %s", dataset, + module.__file__) + cmor_cfg = read_cmor_config(dataset) + module.cmorization(in_dir, out_dir, cmor_cfg, self.config, start, end) + logger.info('CMORization of dataset %s finished!', dataset) + return True + + +class DataCommand(): + """Download and format data to use with ESMValTool.""" + + def __init__(self): + with open(datasets_file, 'r', encoding='utf8') as data: + self._info = yaml.safe_load(data) + self.formatter = _Formatter(self._info) + + def _has_downloader(self, dataset): + return 'Yes' if self.formatter.has_downloader(dataset) else "No" + + def list(self): + """List all supported datasets.""" + print() + print(f'| {"Dataset name":30} | Tier | Auto-download | Last access |') + print('-' * 71) + for dataset, dataset_info in self._info['datasets'].items(): + date = datetime.datetime.strptime(str(dataset_info['last_access']), + "%Y-%m-%d") + print(f"| {dataset:30} | {dataset_info['tier']:4} " + f"| {self._has_downloader(dataset):13} " + f"| {date.strftime('%Y-%m-%d')} |") + print('-' * 71) + + def info(self, dataset): + """Show detailed info about a specific dataset. + + Parameters + ---------- + dataset : str + dataset to show + """ + dataset_info = self._info['datasets'][dataset] + print(dataset) + print() + print(f"Tier: {dataset_info['tier']}") + print(f"Source: {dataset_info['source']}") + print(f"Automatic download: {self._has_downloader(dataset)}") + print("") + print(dataset_info['info']) + + def download(self, + datasets, + config_file=None, + start=None, + end=None, + overwrite=False, + config_dir=None, + **kwargs): + """Download datasets. + + Parameters + ---------- + datasets: list(str) + List of datasets to format + config_file: str, optional + Path to ESMValTool's config user file, by default None. + + .. deprecated:: 2.12.0 + This option has been deprecated in ESMValTool version 2.12.0 + and is scheduled for removal in version 2.14.0. Please use the + option `config_dir` instead. + start: str, optional + Start of the interval to process, by default None. Valid formats + are YYYY, YYYYMM and YYYYMMDD. + end: str, optional + End of the interval to process, by default None. Valid formats + are YYYY, YYYYMM and YYYYMMDD. + overwrite: bool, optional + If true, download already present data again + config_dir: str, optional + Path to additional ESMValTool configuration directory. See + :ref:`esmvalcore:config_yaml_files` for details. + + """ + if config_file is not None: + msg = ( + "The option `config_file` has been deprecated in ESMValTool " + "version 2.12.0 and is scheduled for removal in version " + "2.14.0. Please use the option ``config_dir`` instead." + ) + warnings.warn(msg, ESMValToolDeprecationWarning) + + start = self._parse_date(start) + end = self._parse_date(end) + + self.formatter.start( + 'download', datasets, config_file, config_dir, kwargs + ) + self.formatter.download(start, end, overwrite) + + def format(self, + datasets, + config_file=None, + start=None, + end=None, + install=False, + config_dir=None, + **kwargs): + """Format datasets. + + Parameters + ---------- + datasets : list(str) + List of datasets to format + config_file : str, optional + Path to ESMValTool's config user file, by default None + + .. deprecated:: 2.12.0 + This option has been deprecated in ESMValTool version 2.12.0 + and is scheduled for removal in version 2.14.0. Please use the + option `config_dir` instead. + start : str, optional + Start of the interval to process, by default None. Valid formats + are YYYY, YYYYMM and YYYYMMDD. + end : str, optional + End of the interval to process, by default None. Valid formats + are YYYY, YYYYMM and YYYYMMDD. + install : bool, optional + If true, move processed data to the folder, by default False + config_dir: str, optional + Path to additional ESMValTool configuration directory. See + :ref:`esmvalcore:config_yaml_files` for details. + + """ + if config_file is not None: + msg = ( + "The option `config_file` has been deprecated in ESMValTool " + "version 2.12.0 and is scheduled for removal in version " + "2.14.0. Please use the option ``config_dir`` instead." + ) + warnings.warn(msg, ESMValToolDeprecationWarning) + + start = self._parse_date(start) + end = self._parse_date(end) + + self.formatter.start( + 'formatting', datasets, config_file, config_dir, kwargs + ) + self.formatter.format(start, end, install) + + def prepare(self, + datasets, + config_file=None, + start=None, + end=None, + overwrite=False, + install=False, + config_dir=None, + **kwargs): + """Download and format a set of datasets. + + Parameters + ---------- + datasets : list(str) + List of datasets to format + config_file : str, optional + Path to ESMValTool's config user file, by default None + + .. deprecated:: 2.12.0 + This option has been deprecated in ESMValTool version 2.12.0 + and is scheduled for removal in version 2.14.0. Please use the + option `config_dir` instead. + start : str, optional + Start of the interval to process, by default None. Valid formats + are YYYY, YYYYMM and YYYYMMDD. + end : str, optional + End of the interval to process, by default None. Valid formats + are YYYY, YYYYMM and YYYYMMDD. + install : bool, optional + If true, move processed data to the folder, by default False + overwrite : bool, optional + If true, download already present data again + config_dir: str, optional + Path to additional ESMValTool configuration directory. See + :ref:`esmvalcore:config_yaml_files` for details. + + """ + if config_file is not None: + msg = ( + "The option `config_file` has been deprecated in ESMValTool " + "version 2.12.0 and is scheduled for removal in version " + "2.14.0. Please use the option ``config_dir`` instead." + ) + warnings.warn(msg, ESMValToolDeprecationWarning) + + start = self._parse_date(start) + end = self._parse_date(end) + + self.formatter.start( + 'preparation', datasets, config_file, config_dir, kwargs + ) + if self.formatter.download(start, end, overwrite): + self.formatter.format(start, end, install) + else: + logger.warning("Download failed, skipping format step") + + @staticmethod + def _parse_date(date): + if date is None: + return None + date_string = str(date) + date_formats = { + 4: "%Y", + 6: "%Y%m", + 8: "%Y%m%d", + } + format_string = date_formats.get(len(date_string), None) + if format_string is None: + raise ValueError( + f'Unsupported date format for {date}. ' + 'Supported formats for "start" and "end" are: ' + '"None", "YYYY", "YYYYMM", "YYYYMMDD"') + return datetime.datetime.strptime(date_string, format_string) diff --git a/esmvaltool/cmorizers/data/datasets.yml b/esmvaltool/cmorizers/data/datasets.yml new file mode 100644 index 0000000000..4c7c168009 --- /dev/null +++ b/esmvaltool/cmorizers/data/datasets.yml @@ -0,0 +1,1292 @@ +# Dataset information +--- +datasets: + AERONET: + tier: 3 + source: "https://aeronet.gsfc.nasa.gov/" + last_access: 2024-04-06 + info: | + Aerosol Optical Depth information from a worldwide network of stations. + + AGCD: + tier: 2 + source: "http://dx.doi.org/10.25914/6009600786063" + last_access: 2023-11-21 + info: | + Australian Gridded Climate Data (AGCD) version 2 is the Bureau of Meteorology's official dataset for climate + analyses covering analysis of monthly rainfall. The dataset provides consistent temporal and spatial analyses + across Australia for each observed data variable. This accounts for spatial and temporal gaps in observations. + Where possible, the gridded analysis techniques provide useful estimates in data-sparse regions + such as central Australia. + + Time coverage: Site-based data are used to provide gridded climate data at the monthly timescale for rainfall (1900+). + Reference: Evans, A., Jones, D.A., Smalley, R., and Lellyett, S. 2020. An enhanced gridded rainfall analysis scheme + for Australia. Bureau of Meteorology Research Report. No. 41. + National Computational Infrastructure (NCI) - Catalogue Record: http://dx.doi.org/10.25914/6009600786063. + Data from NCI (National Computing Infrastructure Australia https://nci.org.au/), + requires an NCI account and access to Gadi(Supercomputer in Canberra) and the project found in catalogue record. + Access can be requested through NCI. NCI is an ESGF node (https://esgf.nci.org.au/projects/esgf-nci/) + + ANUClimate: + tier: 3 + source: "https://dx.doi.org/10.25914/60a10aa56dd1b" + last_access: 2023-11-21 + info: | + Data from NCI project requiring an NCI account and access to GADI + + ANUClimate 2.0 consists of gridded daily and monthly climate variables across the terrestrial landmass of Australia + from at least 1970 to the present. Rainfall grids are generated from 1900 to the present. The underpinning spatial + models have been developed at the Fenner School of Environment and Society of the Australian National University. + + APHRO-MA: + tier: 3 + source: "http://aphrodite.st.hirosaki-u.ac.jp/download/" + last_access: 2020-03-06 + info: | + Register at http://aphrodite.st.hirosaki-u.ac.jp/download/create/ + + To use the automatic download set your credentials in your .netrc file. + See https://www.gnu.org/software/inetutils/manual/html_node/The-_002enetrc-file.html. + + To download manually, download the following files from + http://aphrodite.st.hirosaki-u.ac.jp/product/: + APHRO_V1808_TEMP/APHRO_MA + 025deg_nc/APHRO_MA_TAVE_025deg_V1808.nc.tgz + 050deg_nc/APHRO_MA_TAVE_050deg_V1808.nc.tgz + APHRO_V1101/APHRO_MA + 025deg_nc/APHRO_MA_025deg_V1101.1951-2007.nc.gz.tar + 050deg_nc/APHRO_MA_050deg_V1101.1951-2007.nc.gz.tar + APHRO_V1101EX_R1/APHRO_MA + 025deg_nc/APHRO_MA_025deg_V1101_EXR1.nc.tgz + 050deg_nc/APHRO_MA_050deg_V1101_EXR1.nc.tgz + + Please untar / unzip all *.tar *.tgz *.gz files in the same directory + (no subdirectories!) prior to running the cmorizer! + + AURA-TES: + tier: 3 + source: https://search.earthdata.nasa.gov/search?q=TL3O3M + last_access: 2018-12-08 + info: | + Download and processing instructions + Select the V004 and V005 projects. + Download the script file for both projects. + To download the data use: wget -i . + From the downloaded data, pick only the *.he5 files and put them in + input_dir_path. + Data is freely available, but a registration is required. + + BerkeleyEarth: + tier: 2 + source: http://berkeleyearth.org/data/ + last_access: 2020-02-25 + info: | + Download the following file: + http://berkeleyearth.lbl.gov/auto/Global/Gridded/Land_and_Ocean_LatLong1.nc + + CALIPSO-GOCCP: + tier: 2 + source: ftp://ftp.climserv.ipsl.polytechnique.fr/ + last_access: 2020-01-27 + info: | + Download the data from:" + ftp://ftp.climserv.ipsl.polytechnique.fr/ + path: /cfmip/GOCCP_v3/3D_CloudFraction/grid_2x2xL40/yyyy/avg/ + Put all files under a single directory (no subdirectories with years). + 3D_CloudFraction330m_yyyymm_avg_CFMIP2_sat_3.1.2.nc + + CALIPSO-ICECLOUD: + tier: 3 + source: https://eosweb.larc.nasa.gov/project/CALIPSO/CAL_LID_L3_Ice_Cloud-Standard_V1-00 + last_access: 2022-08-04 + info: | + Download and processing instructions + (requires EarthData login; see https://urs.earthdata.nasa.gov/) + 1) Go to https://eosweb.larc.nasa.gov/project/CALIPSO/CAL_LID_L3_Ice_Cloud-Standard_V1-00 + 2) Click on "Get Dataset" + 3) Select Granules "A" containing both, day and night (Day/Night filter = "Both") + 4) Download selected granules + 5) Enter EarthData login and password (https://urs.earthdata.nasa.gov/) + 6) Follow download instructions in email from EarthData and put all + files in the same directory + + CDS-SATELLITE-ALBEDO: + tier: 3 + source: https://cds.climate.copernicus.eu/cdsapp#!/dataset/satellite-albedo?tab=form + last_access: 2019-04-01 + info: | + You will need to accept the license prior to use the automatic download. + If not, it will fail but will provide you the link to do so. + + Download and processing instructions + - Download the data from source to the right directory + using the download script. + - Decompress the files within the directory: + "find . -name '*.tar.gz' -execdir tar -xzvf '{}' \" + + CDS-SATELLITE-LAI-FAPAR: + tier: 3 + source: https://cds.climate.copernicus.eu/cdsapp#!/dataset/satellite-lai-fapar?tab=form + last_access: 2019-07-03 + info: | + You will need to accept the license prior to use the automatic download. + If not, it will fail but will provide you the link to do so. + + Download and processing instructions + - Open in a browser the data source as specified above + - Put the right ticks: + - Tick variables LAI and FAPAR + - Tick satellite SPOT (System Pour l'Observation de la Terre) + - Tick sensor VGT (Vegetation) + - Tick horizontal resolution 1km + - Tick product version V1 + - Tick all available years + - Tick all available months + - Tick Nominal day 20 + - Click 'submit form' + - According to ESMValTool practice, put them in the right rawobsdir folder + - Request might need to be split into chunks to not exceed download limit + - Fails setting standard name for variable FAPAR + + CDS-SATELLITE-SOIL-MOISTURE: + tier: 3 + source: https://cds.climate.copernicus.eu/cdsapp#!/dataset/satellite-soil-moisture?tab=form + last_access: 2019-03-14 + info: | + - Use the download command to retrieve the data. + - Available years: 1992-2019 (ACTIVE) or 1979-2019 (others). + - Versions: v201812.0.0 and v201912.0.0 + + CDS-UERRA: + tier: 3 + source: https://cds.climate.copernicus.eu/cdsapp#!/dataset/reanalysis-uerra-europe-soil-levels + last_access: 2019-11-04 + info: | + - Open in a browser the data source as specified above + - Put the right ticks: + - Tick Origin UERRA-HARMONIE + - Tick Variable 'Volumetric soil moisture' + - Tick Soil level 1, 2, 3 + - Tick all available years + - Tick all available months + - Tick all available days + + CDS-XCH4: + tier: 3 + source: https://cds.climate.copernicus.eu/cdsapp#!/dataset/satellite-methane?tab=form + last_access: 2019-03-11 + info: | + Select Processing level "Level 3", variable "Column-average dry-air mole + fraction of atmospheric methane (XCH4) and related variables", Sensor and + algorithm "MERGED and OBS4MIPS". + A registration is required to download the data. + + CDS-XCO2: + tier: 3 + source: https://cds.climate.copernicus.eu/cdsapp#!/dataset/satellite-carbon-dioxide?tab=form + last_access: 2019-03-19 + info: | + Select Processing level "Level 3", variable "Column-average dry-air mole + fraction of atmospheric carbon dioxide (XCO2) and related variables", + Sensor and algorithm "MERGED and OBS4MIPS". + A registration is required to download the data. + + CERES-EBAF: + tier: 2 + source: https://ceres-tool.larc.nasa.gov/ord-tool/jsp/EBAFTOA41Selection.jsp + last_access: 2022-07-01 + info: | + Select: "TOA Fluxes" (all), "Monthly", "Regional" (0-360, -90-90) + Enter "Email Address" and click on "Get Data" + Wait for the processing to be finished and click on "Download" + + CERES-SYN1deg: + tier: 3 + source: https://ceres-tool.larc.nasa.gov/ord-tool/jsp/SYN1degSelection.jsp + last_access: 2019-02-07 + info: | + Monthly data: + Expand "Compute TOA Fluxes" and select: + Shortwave Flux, Allsky and Clearsky + Longwave Flux, Allsky and Clearsky + Shortwave Flux Down, Allsky + Expand "Computed Surface Fluxes" and select: + Shortwave Flux Up, Allsky and Clearsky + Shortwave Flux Down, Allsky and Clearsky + Longwave Flux Up, Allsky and Clearsky + Longwave Flux Down, Allsky and Clearsky + then click on "Monthly", "Regional" and "Get data". All fields are saved + in CERES_SYN1deg-Month_Terra-Aqua-MODIS_Ed3A_Subset_200003-201702.nc + 3hr data: + Select the same fields as above, then click on "Daily 3-Hourly" and + "Get data". All fields are saved in + CERES_SYN1deg-3H_Terra-Aqua-MODIS_Ed3A_Subset_YYYYMMDD-YYYYMMDD.nc + Put all files in input_dir_path (no subdirectories with years). + For orders larger than 2 GB a registration is required. + + CLARA-AVHRR: + tier: 3 + source: https://wui.cmsaf.eu/ + last_access: 2021-03-22 + info: | + Download and processing instructions + 1) Create ("register") an user account at + https://wui.cmsaf.eu/safira/action/viewLogin?menuName=NUTZER_HOME + 2) login (same URL as above) + 3) Search data using search form at + https://wui.cmsaf.eu/safira/action/viewProduktHome?menuName=PRODUKT_HOME + - Product group: Climate Data Records + - Product family: CLARA-A ed. 2.1 + - Product name: CFC - Factional cloud cover + IWP - Ice water path + LWP - Liquid water path + - Area: Global + - Temporal resolution: Monthly + 4) Select "CLARA-A ed. 2.1 AVHRR on polar orbiting satellites" from + list of results. + 5) Click on "Add to order cart" + 6) Follow download instructions in automatic email received when data + are ready for download. + 7) Untar all .tar files into a single directory. + + CLOUDSAT-L2: + tier: 3 + source: https://www.cloudsat.cira.colostate.edu/ + last_access: 2022-08-10 + info: | + Download and processing instructions + 1) Create an account at the CloudSat Data Processing Center + (https://www.cloudsat.cira.colostate.edu/) + 2) Download the datasets '2B-CWC-RO' and '2C-PRECIP-COLUMN' from + www.cloudsat.cira.colostate.edu (via sftp) following the instructions + given here: https://www.cloudsat.cira.colostate.edu/order/sftp-access + 3) Put all files for a specific year under a single directory + named like the year (e.g. 2007), no subdirectories with + days etc. + + CMAP: + tier: 2 + source: https://psl.noaa.gov/data/gridded/data.cmap.html + last_access: 2024-09-09 + info: | + To facilitate the download, the links to the https server are provided. + https://downloads.psl.noaa.gov/Datasets/cmap/enh/ + precip.mon.mean.nc + + CowtanWay: + tier: 2 + source: https://www-users.york.ac.uk/~kdc3/papers/coverage2013/series.html + last_access: 2020-02-26 + info: | + Download the following files: + 'had4_krig_v1_0_0.nc.gz' + 'had4_uah_v1_0_0.nc.gz' + 'had4_short_krig_v2_0_0.nc.gz' + 'had4_short_uah_v2_0_0.nc.gz' + 'ghcn_short_krig_v2_0_0.nc.gz' + 'ghcn_short_uah_v2_0_0.nc.gz' + 'had4sst4_krig_v2_0_0.nc.gz' + 'had4_krig_v2_0_0.nc.gz' + + CRU: + tier: 2 + source: https://crudata.uea.ac.uk/cru/data/hrg/cru_ts_4.07/cruts.2304141047.v4.07/ + last_access: 2023-11-06 + info: | + Files can be downloaded using the download script (latest version only) + or manually: + {raw_name}/cru_ts4.07.1901.2022.{raw_name}.dat.nc.gz + where {raw_name} is the name of the desired variable(s). + Previous versions can be downloaded from the corresponding folders in + https://crudata.uea.ac.uk/cru/data/hrg/. ESMValTools formatter can be used + for older versions with minor adjustments of + ``esmvaltool/cmorizers/data/cmor_config/CRU.yml`` + Exact time coordinates and number of stations are not available version + TS4.02. + + CT2019: + tier: 2 + source: https://www.esrl.noaa.gov/gmd/ccgg/carbontracker/index.php + last_access: 2020-03-23 + info: | + Create a new empty directory ``$RAWOBSPATH/Tier2/CT2019`` (where + ``$RAWOBSPATH`` is given by your configuration) where the raw + data will be stored. The download of the data is automatically handled by + this script. If data is already present in this directory, the download is + skipped (to force a new download delete your old files). + + Duveiller2018: + tier: 2 + source: https://ndownloader.figshare.com/files/9969496 + last_access: 2019-04-30 + info: | + - Download the dataset albedo_IGBPgen.nc and save in the right directory + according to ESMValTool practices. + - Complete the CMOR-config specifications (see instructions in the file + itself) + + E-OBS: + tier: 2 + source: http://surfobs.climate.copernicus.eu/dataaccess/access_eobs.php#datafiles + last_access: 2020-02-25 + info: | + Download the ensemble mean files for: + TG TN TX RR PP + + Eppley-VGPM-MODIS: + tier: 2 + source: http://orca.science.oregonstate.edu/data/1x2/monthly/eppley.r2018.m.chl.m.sst/hdf + last_access: 2019-05-15 + info: | + Download and unpack all the *.tar files under a single directory + (no subdirectories with years) in ${RAWOBS}/Tier2/Eppley-VGPM-MODIS + + ERA-Interim-Land: + tier: 3 + source: https://apps.ecmwf.int/datasets/data/interim-land/type=fc/ + last_access: 2019-11-04 + info: | + See script cmorize_obs_era_interim.py + + ERA-Interim: + tier: 3 + source: http://apps.ecmwf.int/datasets/data/interim-full-moda/ + last_access: 2019-09-05 + info: | + Select "ERA Interim Fields": + Daily: for daily values + Invariant: for time invariant variables (like land-sea mask) + Monthly Means of Daily Means: for monthly values + Monthly Means of Daily Forecast Accumulation: for accumulated variables + like precipitation or radiation fluxes + Select "Type of level" (Surface or Pressure levels) + Download the data on a single variable and single year basis, and save + them as ERA-Interim___YYYY.nc, where is the ERA-Interim + variable name and is either monthly or daily. Further download + "land-sea mask" from the "Invariant" data and save it in + ERA-Interim_lsm.nc. + It is also possible to download data in an automated way, see: + https://confluence.ecmwf.int/display/WEBAPI/Access+ECMWF+Public+Datasets + https://confluence.ecmwf.int/display/WEBAPI/Python+ERA-interim+examples + A registration is required for downloading the data. + It is also possible to use the script in: + esmvaltool/cmorizers/data/download_scripts/download_era-interim.py + This cmorization script currently supports daily and monthly data of + the following variables: + 10m u component of wind + 10m v component of wind + 2m dewpoint temperature + 2m temperature + evaporation + maximum 2m temperature since previous post processing + mean sea level pressure + minimum 2m temperature since previous post processing + skin temperature + snowfall + surface net solar radiation + surface solar radiation downwards + temperature of snow layer + toa incident solar radiation + total cloud cover + total precipitation + and daily, monthly (not invariant) data of: + Geopotential + + and monthly data of: + Fraction of cloud cover (3-dim) + Inst. eastward turbulent surface stress + Inst. northward turbulent surface stress + Net top solar radiation + Net top solar radiation clear-sky + Sea surface temperature + Specific cloud ice water content + Specific cloud liquid water content + Specific humidity + Surface net thermal radiation + Surface latent heat flux + Surface sensible heat flux + Relative humidity + Temperature + Top net thermal radiation clear-sky + Total column water wapour + U component of wind + V component of wind + Vertical integral of cloud condensed water (ice and liquid) + Vertical integral of cloud liquid water + Vertical integral of cloud frozen water + Vertical velocity + + Caveats + Make sure to select the right steps for accumulated fluxes, see: + https://confluence.ecmwf.int/pages/viewpage.action?pageId=56658233 + https://confluence.ecmwf.int/display/CKB/ERA-Interim%3A+monthly+means + for a detailed explanation. + The data are updated regularly: recent years are added, but also the past + years are sometimes corrected. To have a consistent timeseries, it is + therefore recommended to download the full timeseries and not just add + new years to a previous version of the data. + + For further details on obtaining daily values from ERA-Interim, + see: + https://confluence.ecmwf.int/display/CKB/ERA-Interim + https://confluence.ecmwf.int/display/CKB/ERA-Interim+documentation#ERA-Interimdocumentation-Monthlymeans + https://confluence.ecmwf.int/display/CKB/ERA-Interim%3A+How+to+calculate+daily+total+precipitation + + ERA5: + tier: 3 + source: https://cds.climate.copernicus.eu + last_access: 2021-11-29 + info: | + Reformat preprocessor output from the native6 project to follow the OBS6 + conventions. + + ESACCI-AEROSOL: + tier: 2 + source: ftp://anon-ftp.ceda.ac.uk/neodc/esacci/aerosol/data/ + last_access: 2019-01-24 + info: | + Download the data from: + ATSR2_SU/L3/v4.21/MONTHLY/ (1997-2002) + AATSR_SU/L3/v4.21/MONTHLY/ (2003-2011) + Other years are not considered since they are not complete. + Put all files in input_dir_path (no subdirectories with years). + + ESACCI-CLOUD: + tier: 2 + source: https://public.satproj.klima.dwd.de/data/ESA_Cloud_CCI/CLD_PRODUCTS/v3.0/ + last_access: 2019-02-01 + info: | + Download the data from: + L3C/AVHRR-PM/ + To fill the gap 199409-199501, also download: + L3C/AVHRR-AM/AVHRR_NOAA-12/1994/ \ + 199409-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc + L3C/AVHRR-AM/AVHRR_NOAA-12/1994/ \ + 199410-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc + L3C/AVHRR-AM/AVHRR_NOAA-12/1994/ \ + 199411-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc + L3C/AVHRR-AM/AVHRR_NOAA-12/1994/ \ + 199412-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc + L3C/AVHRR-AM/AVHRR_NOAA-12/1995/ \ + 199501-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc + Put all files under a single directory (no subdirectories with years). + + ESACCI-FIRE: + tier: 2 + source: ftp://anon-ftp.ceda.ac.uk/neodc/esacci/fire/data/ + last_access: 2019-01-24 + info: | + Download the data from: + burned_area/MERIS/grid/v4.1/ + Put all files in input_dir_path (no subdirectories with years). + + ESACCI-LANDCOVER: + tier: 2 + source: ftp://anon-ftp.ceda.ac.uk/neodc/esacci/land_cover/data/pft/v2.0.8/ + last_access: 2024-07-11 + info: | + Download and processing instructions: + Use the following CLI to download all the files: + esmvaltool data download ESACCI-LANDCOVER + The underlying downloader is located here: + /ESMValTool/esmvaltool/cmorizers/data/downloaders/datasets/esacci_landcover.py + and it will download all the files currently available on CEDA (1992-2020) + under a single directory as follow: ${RAWOBS}/Tier2/ESACCI-LANDCOVER + + ESACCI-LST: + tier: 2 + source: On CEDA-JASMIN, /gws/nopw/j04/esacci_lst/public + last_access: 2022-01-26 + info: | + For access to this JASMIN group workspace please register at: + https://accounts.jasmin.ac.uk/services/group_workspaces/esacci_lst/ + Download and processing instructions: + Put all files under a single directory (no subdirectories with years) + in ${RAWOBS}/Tier2/ESACCI-LST + BOTH DAY and NIGHT files are needed for each month + + Caveats + Currently set to work with only the MODIS AQUA L3 monthly data + + ESACCI-OC: + tier: 2 + source: ftp://oceancolour.org/occci-v5.0/geographic/netcdf/monthly/chlor_a/ + last_access: 2021-02-02 + info: | + In case of issues with data download, check also the information provided at + OceanColour webpage https://esa-oceancolour-cci.org/ + Username and password are provided on this website + Put all files under a single directory (no subdirectories with years) + in ${RAWOBS}/Tier2/ESACCI-OC + + ESACCI-OZONE: + tier: 2 + source: ftp://anon-ftp.ceda.ac.uk/neodc/esacci/ozone/data/ + last_access: 2019-02-01 + info: | + Download the data from: + total_columns/l3/merged/v0100/ + limb_profiles/l3/merged/merged_monthly_zonal_mean/v0002 + Put all files under a single directory (no subdirectories with years). + + ESACCI-SOILMOISTURE: + tier: 2 + source: ftp://anon-ftp.ceda.ac.uk/neodc/esacci/soil_moisture/data/ + last_access: 2024-06-19 + info: | + Download the data from: + daily_files/COMBINED/v08.1/ + ancillary/v08.1/ + Put all files under a single directory (no subdirectories with years). + + ESACCI-SEA-SURFACE-SALINITY: + tier: 2 + source: ftp://anon-ftp.ceda.ac.uk/neodc/esacci/sea_surface_salinity/data + last_access: 2020-09-21 + info: | + Download the data from: + v01.8/30days/ + v02.31/30days/ + Put all files under a single directory (no subdirectories with years). + + ESACCI-SST: + tier: 2 + source: ftp://anon-ftp.ceda.ac.uk/neodc/esacci/sst/data/ + last_access: 2019-02-01 + info: | + Download the data from: + lt/Analysis/L4/v01.1/ + Put all files under a single directory (no subdirectories with years). + + ESACCI-WATERVAPOUR: + tier: 3 + source: https://wui.cmsaf.eu/safira/action/viewDoiDetails?acronym=COMBI_V001 + last_access: 2024-02-21 + info: | + CDR2 requires registration at EUMETSAT CM SAF, the information on how to + download the order will be emailed once the order is ready. + All files need to be in one directory, not in yearly subdirectories. + + ESDC: + tier: 2 + source: http://data.rsc4earth.de/EarthSystemDataCube/ + last_access: 2023-01-26 + info: | + It is not necessary to download the data, as the cmorizer script can access + it directly from the cloud if it is not available locally. + + To download a dataset, the dataset folder can be explored on the source + website, and downloaded using wget: + ```wget -m -nH -np -R "index.html*" http://data.rsc4earth.de/EarthSystemDataCube/v3.0.1/``` + + ESRL: + tier: 2 + source: http://www.esrl.noaa.gov/gmd/dv/data/index.php + last_access: 2020-06-30 + info: | + (1) Select the following filters: + Category: Greenhouse Gases. + Parameter name: Carbon Dioxide. + Type: Insitu. + Frequency: Monthly Averages. + Site: BRW, MLO, SMO, SPO. + (2) Get stations properties at http://www.esrl.noaa.gov/gmd/obop/ + and compile a station list as comma separated value table containing: + station code, station name, latitude, longitude (0-360), elevation (m) + + Example (ESRL_stations.csv): + ---------------------------- + BRW, Barrow Alaska, 71.3230, 203.3886, 11 + THD, Trinidad Head, 41.0541, 235.849, 107 + SUM, Summit Greenland, 72.5962, 321.578, 3210 + MLO, Mauna Loa, 19.5362, 204.4237, 3397 + SMO, American Samoa, -14.2474, 189.4356, 42 + SPO, South Pole, -90.00, 301.0, 2840 + + (3) This list must be manually created (format: comma separated values) + and saved to the path + filename given by "FILE_STAT" below. + + (4) Check the period covered by the data and give it below as YEAR1 and + YEAR2. + + FLUXCOM: + tier: 3 + source: http://www.bgc-jena.mpg.de/geodb/BGI/Home + last_access: 2019-07-27 + info: | + From the website, select FLUXCOM as the data choice and click download. + Two files will be displayed. One for Land Carbon Fluxes and one for + Land Energy fluxes. The Land Carbon Flux file (RS + METEO) using + CRUNCEP data file has several data files for different variables. + The data for GPP generated using the + Artificial Neural Network Method will be in files with name: + GPP.ANN.CRUNCEPv6.monthly.*.nc + A registration is required for downloading the data. + Users in the UK with a CEDA-JASMIN account may request access to the jules + workspace and access the data. + Note : This data may require rechunking of the netcdf files. + This constraint will not exist once iris is updated to + version 2.3.0 Aug 2019 + + GCP2018: + tier: 2 + source: https://www.icos-cp.eu/GCP/2018 + last_access: 2019-10-17 + info: | + Download the following file: '2018 Global Budget v1.0' + + GCP2020: + tier: 2 + source: https://www.icos-cp.eu/GCP/2020 + last_access: 2019-10-17 + info: | + Download the following file: '2020 Global Budget v1.0' + + GHCN-CAMS: + tier: 2 + source: | + https://www.esrl.noaa.gov/psd/data/gridded/data.ghcncams.html + ftp://ftp.cdc.noaa.gov/Datasets/ghcncams/air.mon.mean.nc + last_access: 2020-03-04 + info: Use automatic download feature to get the data + + GHCN: + tier: 2 + source: https://www.esrl.noaa.gov/psd/data/gridded/data.ghcngridded.html + last_access: 2019-03-08 + info: | + Download the dataset "precip.mon.total.nc" (precipitation, total, surface, + 1900-2015 on a 5x5 grid). + + GISTEMP: + tier: 2 + source: https://data.giss.nasa.gov/gistemp/ https://data.giss.nasa.gov/pub/gistemp/gistemp250_GHCNv4.nc.gz + last_access: 2020-03-03 + info: "Use automatic download feature to get the data" + + GLODAP: + tier: 2 + source: https://www.glodap.info/index.php/mapped-data-product/ + last_access: 2020-03-03 + info: "Use automatic download feature to get the data" + + GPCC: + tier: 2 + source: | + https://opendata.dwd.de/climate_environment/GPCC/html/fulldata-monthly_v2018_doi_download.html + https://opendata.dwd.de/climate_environment/GPCC/full_data_2018/full_data_monthly_v2018_[025 05 10 25].nc.gz + last_access: 2020-02-25 + info: | + Download the following files: + full_data_monthly_{version}.nc.gz + + GPCP-SG: + tier: 2 + source: | + https://psl.noaa.gov/data/gridded/data.gpcp.html + https://downloads.psl.noaa.gov/Datasets/gpcp/precip.mon.mean.nc + last_access: 2023-02-15 + info: | + Download the file precip.mon.mean.nc + + GRACE: + tier: 3 + source: https://podaac.jpl.nasa.gov/dataset/TELLUS_GRAC-GRFO_MASCON_CRI_GRID_RL06_V2 + last_access: 2020-11-27 + info: | + Download and processing instructions + - Go to the source link + - Click the tab "Data Access" + - Log in with Earthdata account + - Download the following files: + - CLM4.SCALE_FACTOR.JPL.MSCNv02CRI.nc + - GRCTellus.JPL.200204_202004.GLO.RL06M.MSCNv02CRI.nc + - LAND_MASK.CRI.nc + - Download the grace months table which holds important information + on data coverage. Save it in the RAWOBSDIR. + https://podaac-tools.jpl.nasa.gov/drive/files/allData/tellus/L3/docs/GRACE_GRACE-FO_Months_RL06.csv + - Manually inspect and check the months table + + HadCRUT3: + tier: 2 + source: http://www.metoffice.gov.uk/hadobs/hadcrut3/data/download.html + last_access: 2019-02-21 + info: | + Download the HadCRUT3v.nc file (variance adjusted dataset). + Caveats + The HadCRUT3v variance-adjusted dataset for tas is actually the anomaly + with respect to the period 1958-2001. + + HadCRUT4: + tier: 2 + source: https://crudata.uea.ac.uk/cru/data/temperature/ + last_access: 2019-02-08 + info: | + Download the dataset "HadCRUT4" (median temperature anomalies) and + the dataset "Absolute" (absolute temperatures for the base period + 1961-90 on a 5x5 grid). + + Caveats + In contrast to the HadCRUT3 reformat script which produces temperature + anomalies (relative to the 1961-90 climatology), this script calculates + absolute tas by adding the climatology ("absolute.nc") to the anomalies + ("HadCRUT.4.6.0.0.median.nc"). It creates 2 output, one with the + temperature time-series and one with the anomaly time-series + + HadCRUT5: + tier: 2 + source: https://crudata.uea.ac.uk/cru/data/temperature + last_access: 2022-03-28 + info: | + Download the following files: + infilling + [Source]/HadCRUT.5.0.1.0.analysis.anomalies.ensemble_mean.nc + no-infilling + [Source]/HadCRUT.5.0.1.0.anomalies.ensemble_mean.nc + climatology + [Source]/absolute_v5.nc + + HadISST: + tier: 2 + source: http://www.metoffice.gov.uk/hadobs/hadisst/data/download.html + last_access: 2019-02-08 + info: | + Download and unzip "HadISST_ice.nc.gz" and "HadISST_sst.nc.gz". + + HALOE: + tier: 2 + source: Grooss, J.-U. and Russell III, J. M., Atmos. Chem. Phys., 5, 2797-2807, doi:10.5194/acp-5-2797-2005, 2005. + last_access: 2020-03-11 + info: | + Download and processing instructions + Download and untar the supplementary material of the above reference. + + Caveats + The time period includes incomplete years. According to the given + reference (Section 3): "Between October 1991 and August 2002, a total of + 78600 HALOE profiles of O3, CH4, H2O, HCl, and HF mixing ratio were then + combined into bins of 5 degree equivalent latitude for 22 pressure levels + between 316 and 0.1 hPa and 12 months". + In order to create T3M fields a fake longitude coordinate is added. + + HWSD: + tier: 3 + source: https://daac.ornl.gov/cgi-bin/dsviewer.pl?ds_id=1247 + last_access: 2019-10-15 + info: | + Download the following file: + HWSD_SOIL_CLM_RES.nc4 + A registration is required + + ISCCP-FH: + tier: 2 + source: https://isccp.giss.nasa.gov/pub/flux-fh/tar-nc4_MPF/ + last_access: 2019-11-07 + info: | + To use the automatic download set your credentials in your .netrc file. + See https://www.gnu.org/software/inetutils/manual/html_node/The-_002enetrc-file.html. + + To download manually: + 1) Go to https://isccp.giss.nasa.gov/projects/flux.html and click on + "FH On-line Data" + 2) username and password are provided on this website + 3) go to directory tar-nc4_MPF/ (= monthly files) + 3) download and unpack files + + JMA-TRANSCOM: + tier: 3 + source: http://www.globalcarbonatlas.org/en/content/atmospheric-inversions + last_access: 2019-07-02 + info: | + To obtain the data sets it is necessary to contact Takashi Maki + (Department of Atmosphere, Ocean and Earth System Modeling Research, + Meteorological Research Institute, Tsukuba City, Japan). See link above + for more information. + + JRA-25: + tier: 2 + source: https://esgf.nccs.nasa.gov/thredds/fileServer/CREATE-IP/reanalysis/JMA/JRA-25/JRA-25/ + last_access: 2022-11-17 + info: | + Download the following files: + mon/atmos/clt/clt_Amon_reanalysis_JRA-25_197901-201312.nc + mon/atmos/hus/hus_Amon_reanalysis_JRA-25_197901-201312.nc + mon/atmos/prw/prw_Amon_reanalysis_JRA-25_197901-201312.nc + mon/atmos/rlut/rlut_Amon_reanalysis_JRA-25_197901-201312.nc + mon/atmos/rlutcs/rlutcs_Amon_reanalysis_JRA-25_197901-201312.nc + mon/atmos/rsut/rsut_Amon_reanalysis_JRA-25_197901-201312.nc + mon/atmos/rsutcs/rsutcs_Amon_reanalysis_JRA-25_197901-201312.nc + + JRA-55: + tier: 2 + source: https://rda.ucar.edu/datasets/ds628.1/ + last_access: 2023-03-22 + info: | + Create an account on the research data archive (RDA) in order to be able + to download the data (1.25 degree, pressure levels). See + https://rda.ucar.edu/login/register/ for more details. + + Kadow2020: + tier: 2 + source: http://users.met.fu-berlin.de/~ChristopherKadow/ + last_access: 2022-03-29 + info: | + Download the following file: + [SOURCE]/HadCRUT.5.0.1.0.anomalies.Kadow_et_al_2020_20crAI-infilled.ensemble_mean_185001-202012.nc + + LAI3g: + tier: 3 + source: http://cliveg.bu.edu/modismisr/lai3g-fpar3g.html + last_access: 2019-05-03 + info: | + To obtain the data sets it is necessary to contact Ranga B. Myneni + (Department of Earth and Environment, Boston University). See link above + for more information. + + By default, this dataset is regridded to a 1°x1° grid (original resolution + is 1/12°). If you want to use the original resolution, remove the `regrid` + section in the configuration file (`LAI3g.yml`). Note that in this case, + preprocessing the dataset with ESMValTool (i.e. every time you run the + tool) can take a very long time (> 30 min). + + LandFlux-EVAL: + tier: 3 + source: https://data.iac.ethz.ch/landflux/ + last_access: 2019-05-16 + info: | + To use the automatic download set your credentials in your .netrc file. + See https://www.gnu.org/software/inetutils/manual/html_node/The-_002enetrc-file.html. + + Download the following files: + LandFluxEVAL.merged.89-05.monthly.all.nc + A registration is required for downloading the data (see + ). + + Landschuetzer2016: + tier: 2 + source: https://www.nodc.noaa.gov/archive/arc0105/0160558/3.3/data/0-data/ + last_access: 2019-03-08 + info: | + Download the file spco2_1982-2015_MPI_SOM-FFN_v2016.nc + + Landschuetzer2020: + tier: 2 + source: https://www.ncei.noaa.gov/data/oceans/ncei/ocads/data/0209633/ + last_access: 2022-11-02 + info: | + Download the file MPI-ULB-SOM_FFN_clim.nc + + MAC-LWP: + tier: 3 + source: https://search.earthdata.nasa.gov/search?q=MAC-LWP + last_access: 2020-01-30 + info: | + Select the MACLWP-mean dataset. + Download the script file for both projects. + Download the data using http either by selecting each granule + individually or by using the option "download all". + Data is freely available, but a registration is required. + + MERRA: + tier: 3 + source: https://goldsmr3.gesdisc.eosdis.nasa.gov/data/MERRA_MONTHLY/ + last_access: 2023-02-01 + info: | + Use automatic download. That will download monthly data. + + MERRA2: + tier: 3 + source: https://goldsmr4.gesdisc.eosdis.nasa.gov/data/MERRA2_MONTHLY/ https://goldsmr5.gesdisc.eosdis.nasa.gov/data/MERRA2_MONTHLY/ + last_access: 2022-09-13 + info: | + Use automatic download. That will download monthly data but with + yearly granularity. Note that some (most) variables are on the goldsmr4 server, + whereas others are on the goldsmr5 server. + + MLS-AURA: + tier: 3 + source: https://disc.gsfc.nasa.gov/datasets/ML2RHI_004/summary https://disc.gsfc.nasa.gov/datasets/ML2T_004/summary + last_access: 2020-02-03 + info: | + Select "Data Access" -> "Subset/Get Data" -> "Get Data" and follow the + "Instructions for downloading". All *.he5 files need to be saved in the + $RAWOBS/Tier3/MLS-AURA directory, where $RAWOBS refers to the RAWOBS + directory defined in the configuration. Apply this procedure to both + links provided above. The temperature fields are necessary for quality + control of the RHI data (see Data Quality Document for MLS-AURA for more + information). + A registration is required. + + MOBO-DIC_MPIM: + tier: 2 + source: https://www.ncei.noaa.gov/data/oceans/ncei/ocads/data/0221526/ + last_access: 2022-11-03 + info: | + Download the file MOBO-DIC_MPIM_monthly_clim.nc + + MOBO-DIC2004-2019: + tier: 2 + source: https://www.nodc.noaa.gov/archive/arc0211/0277099/2.3/data/0-data/ + last_access: 2023-10-09 + info: | + Download the file MPI_MOBO-DIC_2004-2019_v2.nc + + MODIS: + tier: 3 + source: https://ladsweb.modaps.eosdis.nasa.gov/search/order + last_access: 2019-02-09 + info: | + In Products: select "MODIS Aqua", "Collection 6.1" and + "L3 Atmosphere Product", click on MYD08_M3. + In Time: select from 2000-01-01 to today. + In Location: skip, the global domain will be applied. + In Files: select all. + Submit the order. + A registration is required to download the data. + + Caveats + clwvi and clivi data are in-cloud values whereas CMIP5 models provide + grid-box averages --> multiply MODIS clwvi and clivi values with cloud + fraction as a first guess + + MTE: + tier: 3 + source: http://www.bgc-jena.mpg.de/geodb/BGI/Home + last_access: 2019-05-07 + info: | + Download the following files: + EnsembleGPP_GL.nc + A registration is required for + + NCEP-DOE-R2: + tier: 2 + source: https://psl.noaa.gov/data/gridded/data.ncep.reanalysis2.html + last_access: 2022-09-06 + info: | + To facilitate the download, the links to the https server are provided. + + https://downloads.psl.noaa.gov/Datasets/ncep.reanalysis2/Monthlies/ + pressure/ + rhum.mon.mean.nc + air.mon.mean.nc + omega.mon.mean.nc + https://downloads.psl.noaa.gov/Datasets/ncep.reanalysis2/Monthlies/ + gaussian_grid + tcdc.eatm.mon.mean.nc + prate.sfc.mon.mean.nc + uflx.sfc.mon.mean.nc + vflx.sfc.mon.mean.nc + skt.sfc.mon.mean.nc + https://downloads.psl.noaa.gov/Datasets/ncep.reanalysis2/Monthlies/ + surface + pr_wtr.eatm.mon.mean.nc + + + NCEP-NCAR-R1: + tier: 2 + source: https://psl.noaa.gov/data/gridded/data.ncep.reanalysis.html + last_access: 2022-11-22 + info: | + To facilitate the download, the links to the ftp server are provided. + Since the filenames are sometimes identical across different + save the data in two subdirectories in input_dir_path. + Subdirectory pressure/: + ftp://ftp.cdc.noaa.gov/Projects/Datasets/data.ncep.reanalysis/pressure/ + air.mon.mean.nc + hgt.mon.mean.nc + rhum.mon.mean.nc + shum.mon.mean.nc + uwnd.mon.mean.nc + vwnd.mon.mean.nc + omega.mon.mean.nc + ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.dailyavgs/pressure/ + uwnd.*.nc + vwnd.*.nc + Subdirectory surface/: + ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.derived/surface/ + air.mon.mean.nc + pr_wtr.mon.mean.nc + slp.mon.mean.nc + wspd.mon.mean.nc + rhum.mon.mean.nc + ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.derived/surface_gauss/ + air.2m.mon.mean.nc + prate.sfc.mon.mean.nc + tmax.2m.mon.mean.nc + tmin.2m.mon.mean.nc + ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.derived/other_gauss/ + tcdc.eatm.mon.mean.nc + ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.dailyavgs/surface_gauss/ + prate.sft.gauss.*.nc + ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.dailyavgs/other_gauss/ + ulwrf.ntat.gauss.*.nc + + Select the section "Pressure" and "Surface" and download the variables + listed below. Since raw data on pressure levels and for surface have the + same file and variable name, save the data in two different subdirectories + "press" and "surf" in input_dir_path. + + NDP: + tier: 3 + source: https://data.ess-dive.lbl.gov/view/doi:10.3334/CDIAC/LUE.NDP017.2006 + last_access: 2019-10-14 + info: | + Download the following file: + ndp017b.tar.gz + A registration is required for downloading the data. + + NIWA-BS: + tier: 3 + source: http://www.bodekerscientific.com/data/total-column-ozone + last_access: 2019-02-07 + info: | + To get the access data send an email to datasets@bodekerscientific.com + Download all files from + ftp://ftp.bodekerscientific.com/CombinedTCOV3.3/Monthly/Patched/NetCDF/ + Newer versions may become available over time, but make sure to download + the patched one. Only complete years should be downloaded. + + NSIDC-0116-nh: + tier: 3 + source: https://nsidc.org/data/NSIDC-0116 + last_access: 2019-05-13 + info: | + Download daily data from: + https://nsidc.org/data/NSIDC-0116 + Login required for download, and also requires citation only to use + + NOAA-CIRES-20CR-V2: + tier: 2 + source: ftp.cdc.noaa.gov/Projects/20thC_ReanV2/Monthlies/ + last_access: 2022-11-17 + info: | + Download the following files: + monolevel/cldwtr.eatm.mon.mean.nc + monolevel/pr_wtr.eatm.mon.mean.nc + pressure/shum.mon.mean.nc + gaussian/monolevel/tcdc.eatm.mon.mean.nc + gaussian/monolevel/ulwrf.ntat.mon.mean.nc + gaussian/monolevel/uswrf.ntat.mon.mean.nc + gaussian/monolevel/prate.mon.mean.nc + gaussian/monolevel/uflx.mon.mean.nc + gaussian/monolevel/vflx.mon.mean.nc + NOAA-CIRES-20CR-V3: + tier: 2 + source: ftp.cdc.noaa.gov/Projects/20thC_ReanV3/Monthlies/ + last_access: 2023-03-27 + info: | + Download the following files: + miscSI-MO/cldwtr.eatm.mon.mean.nc + miscSI-MO/pr_wtr.eatm.mon.mean.nc + prsSI-MO/shum.mon.mean.nc + miscMO/tcdc.eatm.mon.mean.nc + ntatFlxSI-MO/ulwrf.ntat.mon.mean.nc + ntatFlxSI-MO/uswrf.ntat.mon.mean.nc + ntatFlxSI-MO/csulf.ntat.mon.mean.nc + ntatFlxSI-MO/csusf.ntat.mon.mean.nc + + NOAA-ERSSTv3b: + tier: 2 + source: https://www1.ncdc.noaa.gov/pub/data/cmb/ersst/v3b/netcdf/ + last_access: 2023-12-04 + info: | + Download the following files: + ersst.yyyymm.nc + for years 1854 to 2020 + + NOAA-ERSSTv5: + tier: 2 + source: https://www1.ncdc.noaa.gov/pub/data/cmb/ersst/v5/netcdf/ + last_access: 2023-12-04 + info: | + Download the following files: + ersst.v5.yyyymm.nc + for years 1854 onwards + + NOAAGlobalTemp: + tier: 2 + source: https://www.ncei.noaa.gov/data/noaa-global-surface-temperature/v5/access/ + last_access: 2022-06-28 + info: | + Download the following files: + [SOURCE]/gridded/NOAAGlobalTemp_v5.0.0_gridded_s188001_e202205_c20220608T133245.nc + + NOAA-MBL-CH4: + tier: 2 + source: https://gml.noaa.gov/webdata/ccgg/trends/ch4/ch4_mm_gl.csv + last_access: 2023-07-17 + info: | + Download the following file: + https://gml.noaa.gov/webdata/ccgg/trends/ch4/ch4_mm_gl.csv + + NSIDC-0116-sh: + tier: 3 + source: https://nsidc.org/data/NSIDC-0116 + last_access: 2019-05-13 + info: | + Download daily data from: + https://nsidc.org/data/NSIDC-0116 + Login required for download, and also requires citation only to use + + NSIDC-G02202-sh: + tier: 3 + source: https://polarwatch.noaa.gov/erddap/griddap/nsidcG02202v4shmday + last_access: 2023-05-13 + info: | + Download monthly data. + Login required for download, and also requires citation only to use + + OceanSODA-ETHZ: + tier: 2 + source: https://www.ncei.noaa.gov/data/oceans/ncei/ocads/data/0220059/ + last_access: 2024-02-15 + info: | + Download the file OceanSODA_ETHZ-v2023.OCADS.01_1982-2022.nc + + OSI-450-nh: + tier: 2 + source: http://osisaf.met.no/p/ice/ + last_access: 2019-05-02 + info: | + Download the desired years from the following ftp: + ftp://osisaf.met.no/reprocessed/ice/conc/v2p0 + Please, keep folder structure. + + If you want to use only the nh data download only the nh files, + using, e.g., wget -r -A '*_nh*.nc'. + + If you also want to cmorize the sh, download everything and create a link + for OSI-450-sh pointing to the data folder. Both cmorizers will ignore + files belonging to the other hemisphere + + OSI-450-sh: + tier: 2 + source: http://osisaf.met.no/p/ice/ + last_access: 2019-05-02 + info: | + Download the desired years from the following ftp: + ftp://osisaf.met.no/reprocessed/ice/conc/v2p0 + Please, keep folder structure. + + If you want to use only the nh data download only the nh files, + using, e.g., wget -r -A '*_nh*.nc'. + + If you also want to cmorize the nh, download everything and create a link + for OSI-450-nh pointing to the data folder. Both cmorizers will ignore + files belonging to the other hemisphere + + PATMOS-x: + tier: 2 + source: https://www.ncdc.noaa.gov/cdr/atmospheric/avhrr-cloud-properties-patmos-x + last_access: 2019-02-10 + info: | + Click on Download and download all the NOAA data, excluding the + preliminary, e.g. with: + wget -r --accept '*NOAA*.nc' --reject '*preliminary*' + Put all files in input_dir_path (no subdirectories with years). + Select only complete years for both ascending and descending orbit. + + Caveats + The data are processed by calculating the average of the ascending and the + descending orbit on each day. Multiple files are available for some days, + in this case the most recent version (NOAA-vv) is chosen. + + PERSIANN-CDR: + tier: 2 + source: https://www.ncei.noaa.gov/data/precipitation-persiann/access/ + last_access: 2020-04-22 + info: | + Files are available free for download on the indicated site. + Files are stored as daily nc-files in individual year + folders. + Please copy all files in a single directory. + + PHC: + tier: 2 + source: http://psc.apl.washington.edu/nonwp_projects/PHC/Data3.html + last_access: 2019-01-31 + info: | + Go to `DOWNLOAD DATA (NetCDF)` and download the `ANNUAL` fields + for both `TEMPERATURE` and `SALINITY`. + + PIOMAS: + tier: 2 + source: http://psc.apl.uw.edu/research/projects/arctic-sea-ice-volume-anomaly/data/model_grid + last_access: 2019-05-10 + info: | + Download and unpack the sithick files from: + https://pscfiles.apl.washington.edu/zhang/PIOMAS/data/v2.1/hiday/ + + And the grid info files from: + https://pscfiles.apl.washington.edu/zhang/PIOMAS/utilities/grid.dat + https://pscfiles.apl.washington.edu/zhang/PIOMAS/utilities/grid.dat.pop + + Other variables provided by PIOMAS are not supported, but extending support + should be achievable for most of them just modifying the config file + + REGEN: + tier: 2 + source: https://researchdata.ands.org.au/rainfall-estimates-gridded-v1-2019/1408744 + last_access: 2020-02-26 + info: | + Download the following files: + REGEN_AllStns_{version}_[1950..2016].nc + + Scripps-CO2-KUM: + tier: 2 + source: https://scrippsco2.ucsd.edu/data/atmospheric_co2/kum.html + last_access: 2020-11-25 + info: | + Download the following file: + monthly_flask_co2_kum.csv + + TCOM-CH4: + tier: 2 + source: https://zenodo.org/record/7293740 + last_access: 2023-01-17 + info: | + Download the file zmch4_TCOM_plev_T2Dz_1991_2021.nc + + TCOM-N2O: + tier: 2 + source: https://zenodo.org/record/7386001 + last_access: 2023-01-17 + info: | + Download the file zmn2o_TCOM_plev_T2Dz_1991_2021.nc. + + UWisc: + tier: 3 + source: Data provided by Ralf Bennartz. + last_access: 2015-04-15 + info: | + Contact Ralf Bennartz (Earth and Environmental Sciences, Vanderbilt + University, USA). + + WFDE5: + tier: 2 + source: https://doi.org/10.24381/cds.20d54e34 + last_access: 2021-04-16 + info: | + Download and processing instructions + Download the following variables from the cds: + Near-surface air temperature ("CRU") + Rainfall flux ("CRU" as well as "CRU and GPCC") + Snowfall flux ("CRU" as well as "CRU and GPCC") + unzip the downloaded files + rename to follow syntax '{raw_name}_WFDE5_{reference}_*_v1.0.nc' + + WOA: + tier: 2 + source: https://data.nodc.noaa.gov/woa/WOA18/DATA/ + last_access: 2019-01-31 + info: | + Download the following files: + temperature/netcdf/decav81B0/1.00/woa18_decav81B0_t00_01.nc + salinity/netcdf/decav81B0/1.00/woa18_decav81B0_s00_01.nc + oxygen/netcdf/all/1.00/woa18_all_o00_01.nc + nitrate/netcdf/all/1.00/woa18_all_n00_01.nc + phosphate/netcdf/all/1.00/woa18_all_p00_01.nc + silicate/netcdf/all/1.00/woa18_all_i00_01.nc + (To get WOA13, replace filenames prefix woa18 with woa13 and source with + https://www.ncei.noaa.gov/data/oceans/woa/WOA13/DATAv2) diff --git a/esmvaltool/cmorizers/data/datasets_schema.yml b/esmvaltool/cmorizers/data/datasets_schema.yml new file mode 100644 index 0000000000..5d43f00724 --- /dev/null +++ b/esmvaltool/cmorizers/data/datasets_schema.yml @@ -0,0 +1,16 @@ +# Generic datasets file Yamale schema definition. +# See https://github.com/23andMe/Yamale for help. + +--- +# Dataset file schema +datasets: map(include('dataset'), required=True) + +--- +# Dataset item definition + +dataset: + tier: int(min=2, max=3, required=False) + source: str() + latest: int(required=False, min=20150000, max=20210000) + info: str() + last_access: day() diff --git a/esmvaltool/cmorizers/data/download_scripts/download_era_interim.py b/esmvaltool/cmorizers/data/download_scripts/download_era_interim.py new file mode 100644 index 0000000000..374c750ef6 --- /dev/null +++ b/esmvaltool/cmorizers/data/download_scripts/download_era_interim.py @@ -0,0 +1,292 @@ +"""Script to download era-interim and era-interim-land data. + +Before running the script: +1. Install the dependency i.e. ECMWFDataServer. +For this, run pip install ecmwf-api-client + +2. Create an account at https://www.ecmwf.int/ + +3. Follow the instruction at: +https://confluence.ecmwf.int/display/WEBAPI/Accessing+ECMWF+data+servers+in+batch + +4. Copy/paste the text in https://api.ecmwf.int/v1/key/ into a blank text file +and save it as $HOME/.ecmwfapirc + +5. Copy the default configuration file with + +```bash +esmvaltool config get_config_user --path=config-user.yml +``` + +and set the ``rootpath`` for the RAWOBS project. + +6. Check the description of the variables at +https://apps.ecmwf.int/codes/grib/param-db + +7. Check the invariant variables at +https://apps.ecmwf.int/datasets/data/interim-full-invariant + +```bash +python download_era_interim.py --config_file config-user.yml --start_year 2000 +--end_year 2000 +``` + +This will download and save the data in the RAWOBS directory, +under Tier3/ERA-Interim. + +""" +import argparse +import os + +import yaml +from ecmwfapi import ECMWFDataServer + +DAY_TIMESTEPS = { + 'fc': { + 'step': '3/6/9/12', + 'time': '00:00:00/12:00:00', + 'type': 'fc', + 'levtype': 'sfc', + }, + 'accu': { + 'step': '12', + 'time': '00:00:00/12:00:00', + 'type': 'fc', + 'levtype': 'sfc', + }, + 'an': { + 'type': 'an', + 'time': '00:00:00/06:00:00/12:00:00/18:00:00', + 'step': '0', + 'levtype': 'sfc', + }, + '3d': { + 'type': 'an', + 'time': '00:00:00/06:00:00/12:00:00/18:00:00', + 'step': '0', + 'levelist': '10/50/100/250/500/700/850/1000', # CMIP6 day table, plev8 + 'levtype': 'pl', + } +} + + +DAY_PARAMS = [ + ('167.128', 't2m', 'an'), # 2 metre temperature + ('228.128', 'tp', 'accu'), # Total precipitation + ('182.128', 'e', 'accu'), # Evaporation + ('201.128', 'mx2t', 'fc'), # Max. temp at 2m since previous post-proc + ('202.128', 'mn2t', 'fc'), # Min. temp at 2m since previous post-proc + ('235.128', 'skt', 'an'), # Skin temperature + ('165.128', 'u10', 'an'), # 10 metre U wind component + ('166.128', 'v10', 'an'), # 10 metre V wind component + ('168.128', 'd2m', 'an'), # 2 metre dewpoint temperature + ('151.128', 'msl', 'an'), # Mean sea level pressure + ('134.128', 'sp', 'an'), # Surface pressure + ('144.128', 'sf', 'accu'), # Snowfall + ('176.128', 'ssr', 'accu'), # Surface net solar radiation + ('169.128', 'ssrd', 'accu'), # Surface solar radiation downwards + ('175.128', 'strd', 'accu'), # Surface thermal radiation downwards + ('205.128', 'ro', 'accu'), # Runoff + ('238.128', 'tsn', 'an'), # Temperature of snow layer + ('212.128', 'tisr', 'accu'), # TOA incident solar radiation + ('164.128', 'tcc', 'an'), # Total cloud cover + ('129.128', 'z', '3d'), # Geopotential + ('132.128', 'v', '3d'), # V component of wind + ('130.128', 't', '3d'), # Temperature +] + + +MONTH_TIMESTEPS = { + 'accu': { + 'levtype': 'sfc', + 'stream': 'mdfa', + 'type': 'fc', + 'step': '0-12' + }, + 'an': { + 'levtype': 'sfc', + 'stream': 'moda', + 'type': 'an' + }, + 'fc': { + 'levtype': 'sfc', + 'stream': 'moda', + 'type': 'fc' + }, + '3d': { + 'levtype': 'pl', + 'stream': 'moda', + 'type': 'an', + 'levelist': '1/5/10/20/30/50/70/100/150/200/' + + '250/300/400/500/600/700/850' + + '/925/1000' # CMIP6 Amon table, plev19 + } +} + +LAND_PARAMS = [ + ('39.128', 'swvl1', 'an'), # Volumetric soil moisture layer 1 [0-7 cm] +] + +MONTH_PARAMS = [ + ('167.128', 't2m', 'an'), # 2 metre temperature + ('228.128', 'tp', 'accu'), # Total precipitation + ('182.128', 'e', 'accu'), # Evaporation + ('235.128', 'skt', 'an'), # Skin temperature + ('165.128', 'u10', 'an'), # 10 metre U wind component + ('166.128', 'v10', 'an'), # 10 metre V wind component + ('168.128', 'd2m', 'an'), # 2 metre dewpoint temperature + ('151.128', 'msl', 'an'), # Mean sea level pressure + ('144.128', 'sf', 'accu'), # Snowfall + ('176.128', 'ssr', 'accu'), # Surface net solar radiation + ('169.128', 'ssrd', 'accu'), # Surface solar radiation downwards + ('205.128', 'ro', 'accu'), # Runoff + ('238.128', 'tsn', 'an'), # Temperature of snow layer + ('212.128', 'tisr', 'accu'), # TOA incident solar radiation + ('164.128', 'tcc', 'an'), # Total cloud cover + ('56.162', 'p56.162', 'an'), # Vertical integral of cloud liquid water + ('57.162', 'p57.162', 'an'), # Vertical integral of cloud frozen water + ('137.128', 'tcwv', 'an'), # Total column water vapour + ('134.128', 'sp', 'an'), # Surface pressure + ('229.128', 'iews', 'fc'), # Inst. eastward turbulent surface stress + ('230.128', 'inss', 'fc'), # Inst. northward turbulent surface stress + ('34.128', 'sst', 'an'), # Sea surface temperature + ('177.128', 'str', 'accu'), # Surface net thermal radiation + ('147.128', 'slhf', 'accu'), # Surface latent heat flux + ('146.128', 'sshf', 'accu'), # Surface sensible heat flux + ('157.128', 'r', '3d'), # Relative humidity + ('246.128', 'clwc', '3d'), # Specific cloud liquid water content + ('247.128', 'ciwc', '3d'), # Specific cloud ice water content + ('130.128', 't', '3d'), # Temperature + ('131.128', 'u', '3d'), # U component of wind + ('132.128', 'v', '3d'), # V component of wind + ('135.128', 'w', '3d'), # Vertical velocity + ('133.128', 'q', '3d'), # Specific humidity + ('129.128', 'z', '3d'), # Geopotential + ('178.128', 'tsr', 'accu'), # net top solar radiation + ('208.128', 'tsrc', 'accu'), # net top solar radiation clear-sky + ('179.128', 'ttr', 'accu'), # top net thermal radiation + ('209.128', 'ttrc', 'accu'), # top net thermal radiation clear-sky + ('248.128', 'cc', '3d'), # fraction of cloud cover +] + + +INVARIANT_PARAMS = [ + ('172.128', 'lsm'), # Land-sea mask + ('129.128', 'z'), # Geopotential (invariant at surface) +] + + +def _get_land_data(params, timesteps, years, server, era_interim_land_dir): + for param_id, symbol, timestep in params: + frequency = '6hourly' + for year in years: + server.retrieve({ + 'class': 'ei', + 'dataset': 'interim_land', + 'date': f'{year}-01-01/to/{year}-12-31', + 'expver': '2', + 'grid': '0.25/0.25', + 'param': param_id, + 'format': 'netcdf', + 'target': f'{era_interim_land_dir}/ERA-Interim-Land_{symbol}' + f'_{frequency}_{year}.nc', + **timesteps[timestep] + }) + + +def _get_daily_data(params, timesteps, years, server, era_interim_dir): + for param_id, symbol, timestep in params: + frequency = 'daily' + for year in years: + server.retrieve({ + 'class': 'ei', + 'dataset': 'interim', + 'date': f'{year}-01-01/to/{year}-12-31', + 'expver': '1', + 'grid': '0.75/0.75', + 'param': param_id, + 'stream': 'oper', + 'format': 'netcdf', + 'target': f'{era_interim_dir}/ERA-Interim_{symbol}' + f'_{frequency}_{year}.nc', + **timesteps[timestep] + }) + + +def _get_monthly_data(params, timesteps, years, server, era_interim_dir): + for param_id, symbol, timestep in params: + frequency = 'monthly' + for year in years: + server.retrieve({ + 'class': 'ei', + 'dataset': 'interim', + # All months of a year eg. 19900101/.../19901101/19901201 + 'date': '/'.join([f'{year}{m:02}01' for m in range(1, 13)]), + 'expver': '1', + 'grid': '0.75/0.75', + 'param': param_id, + 'format': 'netcdf', + 'target': f'{era_interim_dir}/ERA-Interim_{symbol}' + f'_{frequency}_{year}.nc', + **timesteps[timestep] + }) + + +def _get_invariant_data(params, server, era_interim_dir): + for param_id, symbol in params: + server.retrieve({ + 'class': 'ei', + 'dataset': 'interim', + 'date': '1989-01-01', + 'expver': '1', + 'grid': '0.75/0.75', + 'levtype': 'sfc', + 'param': param_id, + 'step': '0', + 'stream': 'oper', + 'time': '12:00:00', + 'type': 'an', + 'format': 'netcdf', + 'target': f'{era_interim_dir}/ERA-Interim_{symbol}.nc', + }) + + +def cli(): + """Download ERA-Interim variables from ECMWF data server.""" + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('--config_file', '-c', + default=os.path.join(os.path.dirname(__file__), + 'config-user.yml'), + help='Config file') + parser.add_argument('--start_year', type=int, + default=1979, help='Start year') + parser.add_argument('--end_year', type=int, default=2019, help='End year') + args = parser.parse_args() + + # get and read config file + config_file_name = os.path.abspath( + os.path.expandvars(os.path.expanduser(args.config_file))) + + with open(config_file_name, 'r') as config_file: + config = yaml.safe_load(config_file) + + rawobs_dir = os.path.abspath( + os.path.expandvars(os.path.expanduser(config['rootpath']['RAWOBS']))) + era_interim_dir = f'{rawobs_dir}/Tier3/ERA-Interim' + os.makedirs(era_interim_dir, exist_ok=True) + era_interim_land_dir = f'{rawobs_dir}/Tier3/ERA-Interim-Land' + os.makedirs(era_interim_land_dir, exist_ok=True) + + years = range(args.start_year, args.end_year + 1) + server = ECMWFDataServer() + + _get_daily_data(DAY_PARAMS, DAY_TIMESTEPS, years, server, era_interim_dir) + _get_monthly_data(MONTH_PARAMS, MONTH_TIMESTEPS, + years, server, era_interim_dir) + _get_invariant_data(INVARIANT_PARAMS, server, era_interim_dir) + _get_land_data(LAND_PARAMS, DAY_TIMESTEPS, + years, server, era_interim_land_dir) + + +if __name__ == "__main__": + cli() diff --git a/esmvaltool/cmorizers/data/downloaders/__init__.py b/esmvaltool/cmorizers/data/downloaders/__init__.py new file mode 100644 index 0000000000..9a6cd08429 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/__init__.py @@ -0,0 +1 @@ +"""Automatic downladers and helper classes.""" diff --git a/esmvaltool/cmorizers/data/downloaders/cds.py b/esmvaltool/cmorizers/data/downloaders/cds.py new file mode 100644 index 0000000000..af1ad62501 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/cds.py @@ -0,0 +1,128 @@ +"""Downloader for the Climate Data Store.""" + +import logging +import os +from collections.abc import Iterable + +import cdsapi + +from .downloader import BaseDownloader + +logger = logging.getLogger(__name__) + + +class CDSDownloader(BaseDownloader): + """Downloader class for the climate data store. + + Parameters + ---------- + product_name : str + Name of the product in the CDS + config : dict + ESMValTool's user configuration + request_dictionary : dict + Common CDS request parameters + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + overwrite : bool + Overwrite already downloaded files + extra_name : str, optional + Some products have a subfix appended to their name for certain + variables. This parameter is to specify it, by default '' + """ + def __init__(self, + product_name, + config, + request_dictionary, + dataset, + dataset_info, + overwrite, + extra_name=''): + super().__init__(config, dataset, dataset_info, overwrite) + try: + self._client = cdsapi.Client() + except Exception as ex: + if str(ex).endswith(".cdsapirc"): + logger.error( + 'Could not connect to the CDS due to issues with your ' + '".cdsapirc" file. More info in ' + 'https://cds.climate.copernicus.eu/api-how-to.') + raise + self._product_name = product_name + self._request_dict = request_dictionary + self.extra_name = extra_name + + def download(self, + year, + month, + day=None, + file_pattern=None, + file_format='tar'): + """Download a specific month from the CDS. + + Parameters + ---------- + year : int + Year to download + month : int + Month to download + day : int, list(int), optional + Day or days to download, by default None + file_pattern : str, optional + Filename pattern, by default None + file_format : str, optional + File format, by default tar + """ + request_dict = self._request_dict.copy() + request_dict['year'] = f'{year}' + request_dict['month'] = f"{month:02d}" + if day: + if isinstance(day, Iterable): + request_dict['day'] = day + else: + request_dict['day'] = f"{day:02d}" + + date_str = f"{year}{month:02d}" + if day: + if not isinstance(day, Iterable): + date_str += f"{day:02d}" + + os.makedirs(self.local_folder, exist_ok=True) + if file_pattern is None: + file_pattern = f"{self._product_name}" + file_path = f"{file_pattern}_{date_str}.{file_format}" + self.download_request(file_path, request_dict) + + def download_request(self, filename, request=None): + """Download a specific request. + + Parameters + ---------- + filename : str + Name of the file to download + request : dict, optional + Request dictionary for the CDS, by default None + """ + if request is None: + request = self._request_dict.copy() + + os.makedirs(self.local_folder, exist_ok=True) + filename = os.path.join(self.local_folder, filename) + if os.path.exists(filename): + if self.overwrite: + os.remove(filename) + else: + logger.info('File %s already downloaded. Skipping...', + filename) + return + try: + self._client.retrieve( + self._product_name, + request, + filename, + ) + except Exception: + logger.error('Failed request: %s', request) + raise diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/__init__.py b/esmvaltool/cmorizers/data/downloaders/datasets/__init__.py new file mode 100644 index 0000000000..7fa64e05f0 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/__init__.py @@ -0,0 +1 @@ +"""Downloaders for each supported dataset.""" diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/aeronet.py b/esmvaltool/cmorizers/data/downloaders/datasets/aeronet.py new file mode 100644 index 0000000000..668a688bb6 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/aeronet.py @@ -0,0 +1,38 @@ +"""Script to download Aeronet from its webpage.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + filename = "AOD_Level20_Monthly_V3.tar.gz" + downloader.download_file( + f"https://aeronet.gsfc.nasa.gov/data_push/V3/AOD/{filename}", + wget_options=[], + ) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/aphro_ma.py b/esmvaltool/cmorizers/data/downloaders/datasets/aphro_ma.py new file mode 100644 index 0000000000..425bd93e88 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/aphro_ma.py @@ -0,0 +1,54 @@ +"""Script to download APHRO-MA from its webpage.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader +from esmvaltool.cmorizers.data.utilities import unpack_files_in_folder + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + def download_file(path): + downloader.download_file( + "http://aphrodite.st.hirosaki-u.ac.jp/product/" + path, []) + + for grid in ('025deg', '050deg'): + download_file(f"APHRO_V1808_TEMP/APHRO_MA/{grid}_nc/" + f"APHRO_MA_TAVE_{grid}_V1808.nc.tgz") + download_file(f"APHRO_V1808_TEMP/APHRO_MA/{grid}_nc/" + f"APHRO_MA_TAVE_{grid}_V1808.nc.tgz") + download_file(f"APHRO_V1101/APHRO_MA/{grid}_nc/" + f"APHRO_MA_{grid}_V1101.1951-2007.nc.gz.tar") + download_file(f"APHRO_V1101/APHRO_MA/{grid}_nc/" + f"APHRO_MA_{grid}_V1101.1951-2007.nc.gz.tar") + download_file(f"APHRO_V1101EX_R1/APHRO_MA/{grid}_nc/" + f"APHRO_MA_{grid}_V1101_EXR1.nc.tgz") + download_file(f"APHRO_V1101EX_R1/APHRO_MA/{grid}_nc/" + f"APHRO_MA_{grid}_V1101_EXR1.nc.tgz") + + unpack_files_in_folder(downloader.local_folder) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/aura_tes.py b/esmvaltool/cmorizers/data/downloaders/datasets/aura_tes.py new file mode 100644 index 0000000000..9f7e195397 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/aura_tes.py @@ -0,0 +1,209 @@ +"""Script to download AURA-TES from its webpage.""" + +from esmvaltool.cmorizers.data.downloaders.wget import NASADownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = NASADownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + for path in FILES: + downloader.download_file(path) + + +SERVER = "https://l5ftl01.larc.nasa.gov/ops/tesl1l2l3/TES/TL3O3M." + +FILES = f"""{SERVER}005/2018.01.17/TES-Aura_L3-O3-M2018m01_C01_F01_11.he5 +{SERVER}005/2017.11.02/TES-Aura_L3-O3-M2017m11_C01_F01_11.he5 +{SERVER}005/2017.10.23/TES-Aura_L3-O3-M2017m10_C01_F01_11.he5 +{SERVER}005/2012.11.02/TES-Aura_L3-O3-M2012m11_C01_F01_11.he5 +{SERVER}005/2012.10.18/TES-Aura_L3-O3-M2012m10_C01_F01_11.he5 +{SERVER}005/2012.09.17/TES-Aura_L3-O3-M2012m09_C01_F01_11.he5 +{SERVER}005/2012.04.25/TES-Aura_L3-O3-M2012m04_C01_F01_11.he5 +{SERVER}005/2011.12.04/TES-Aura_L3-O3-M2011m12_C01_F01_11.he5 +{SERVER}005/2011.11.14/TES-Aura_L3-O3-M2011m11_C01_F01_11.he5 +{SERVER}005/2011.09.11/TES-Aura_L3-O3-M2011m09_C01_F01_11.he5 +{SERVER}005/2011.07.01/TES-Aura_L3-O3-M2011m07_C01_F01_11.he5 +{SERVER}005/2011.06.01/TES-Aura_L3-O3-M2011m06_C01_F01_11.he5 +{SERVER}005/2011.05.15/TES-Aura_L3-O3-M2011m05_C01_F01_11.he5 +{SERVER}005/2011.04.02/TES-Aura_L3-O3-M2011m04_C01_F01_11.he5 +{SERVER}005/2011.03.01/TES-Aura_L3-O3-M2011m03_C01_F01_11.he5 +{SERVER}005/2011.02.01/TES-Aura_L3-O3-M2011m02_C01_F01_11.he5 +{SERVER}005/2011.01.02/TES-Aura_L3-O3-M2011m01_C01_F01_11.he5 +{SERVER}005/2010.12.01/TES-Aura_L3-O3-M2010m12_C01_F01_11.he5 +{SERVER}005/2010.11.01/TES-Aura_L3-O3-M2010m11_C01_F01_11.he5 +{SERVER}005/2010.10.28/TES-Aura_L3-O3-M2010m10_C01_F01_11.he5 +{SERVER}005/2010.09.02/TES-Aura_L3-O3-M2010m09_C01_F01_11.he5 +{SERVER}005/2010.08.01/TES-Aura_L3-O3-M2010m08_C01_F01_11.he5 +{SERVER}005/2010.07.02/TES-Aura_L3-O3-M2010m07_C01_F01_11.he5 +{SERVER}005/2010.06.02/TES-Aura_L3-O3-M2010m06_C01_F01_11.he5 +{SERVER}005/2010.05.01/TES-Aura_L3-O3-M2010m05_C01_F01_11.he5 +{SERVER}005/2010.03.03/TES-Aura_L3-O3-M2010m03_F01_11.he5 +{SERVER}005/2009.12.02/TES-Aura_L3-O3-M2009m12_F01_11.he5 +{SERVER}005/2009.11.02/TES-Aura_L3-O3-M2009m11_F01_11.he5 +{SERVER}005/2009.10.01/TES-Aura_L3-O3-M2009m10_F01_11.he5 +{SERVER}005/2009.09.01/TES-Aura_L3-O3-M2009m09_F01_11.he5 +{SERVER}005/2009.08.02/TES-Aura_L3-O3-M2009m08_F01_11.he5 +{SERVER}005/2009.07.01/TES-Aura_L3-O3-M2009m07_F01_11.he5 +{SERVER}005/2009.06.01/TES-Aura_L3-O3-M2009m06_F01_11.he5 +{SERVER}005/2009.05.02/TES-Aura_L3-O3-M2009m05_F01_11.he5 +{SERVER}005/2009.04.02/TES-Aura_L3-O3-M2009m04_F01_11.he5 +{SERVER}005/2009.03.01/TES-Aura_L3-O3-M2009m03_F01_11.he5 +{SERVER}005/2009.02.01/TES-Aura_L3-O3-M2009m02_F01_11.he5 +{SERVER}005/2009.01.02/TES-Aura_L3-O3-M2009m01_F01_11.he5 +{SERVER}005/2008.12.03/TES-Aura_L3-O3-M2008m12_F01_11.he5 +{SERVER}005/2008.11.01/TES-Aura_L3-O3-M2008m11_F01_11.he5 +{SERVER}005/2008.10.02/TES-Aura_L3-O3-M2008m10_F01_11.he5 +{SERVER}005/2008.09.02/TES-Aura_L3-O3-M2008m09_F01_11.he5 +{SERVER}005/2008.08.01/TES-Aura_L3-O3-M2008m08_F01_11.he5 +{SERVER}005/2008.07.02/TES-Aura_L3-O3-M2008m07_F01_11.he5 +{SERVER}005/2008.06.02/TES-Aura_L3-O3-M2008m06_F01_11.he5 +{SERVER}005/2008.05.01/TES-Aura_L3-O3-M2008m05_F01_11.he5 +{SERVER}005/2008.04.01/TES-Aura_L3-O3-M2008m04_F01_11.he5 +{SERVER}005/2008.03.02/TES-Aura_L3-O3-M2008m03_F01_11.he5 +{SERVER}005/2008.02.01/TES-Aura_L3-O3-M2008m02_F01_11.he5 +{SERVER}005/2008.01.02/TES-Aura_L3-O3-M2008m01_F01_11.he5 +{SERVER}005/2007.12.01/TES-Aura_L3-O3-M2007m12_F01_11.he5 +{SERVER}005/2007.11.01/TES-Aura_L3-O3-M2007m11_F01_11.he5 +{SERVER}005/2007.10.02/TES-Aura_L3-O3-M2007m10_F01_11.he5 +{SERVER}005/2007.09.02/TES-Aura_L3-O3-M2007m09_F01_11.he5 +{SERVER}005/2007.08.01/TES-Aura_L3-O3-M2007m08_F01_11.he5 +{SERVER}005/2007.07.02/TES-Aura_L3-O3-M2007m07_F01_11.he5 +{SERVER}005/2007.06.02/TES-Aura_L3-O3-M2007m06_F01_11.he5 +{SERVER}005/2007.05.01/TES-Aura_L3-O3-M2007m05_F01_11.he5 +{SERVER}005/2007.04.01/TES-Aura_L3-O3-M2007m04_F01_11.he5 +{SERVER}005/2007.03.02/TES-Aura_L3-O3-M2007m03_F01_11.he5 +{SERVER}005/2007.02.02/TES-Aura_L3-O3-M2007m02_F01_11.he5 +{SERVER}005/2007.01.01/TES-Aura_L3-O3-M2007m01_F01_11.he5 +{SERVER}005/2006.12.02/TES-Aura_L3-O3-M2006m12_F01_11.he5 +{SERVER}005/2006.11.02/TES-Aura_L3-O3-M2006m11_F01_11.he5 +{SERVER}005/2006.10.01/TES-Aura_L3-O3-M2006m10_F01_11.he5 +{SERVER}005/2006.09.09/TES-Aura_L3-O3-M2006m09_F01_11.he5 +{SERVER}005/2006.08.02/TES-Aura_L3-O3-M2006m08_F01_11.he5 +{SERVER}005/2006.07.01/TES-Aura_L3-O3-M2006m07_F01_11.he5 +{SERVER}005/2006.06.01/TES-Aura_L3-O3-M2006m06_F01_11.he5 +{SERVER}005/2006.05.02/TES-Aura_L3-O3-M2006m05_F01_11.he5 +{SERVER}005/2006.04.02/TES-Aura_L3-O3-M2006m04_F01_11.he5 +{SERVER}005/2006.03.01/TES-Aura_L3-O3-M2006m03_F01_11.he5 +{SERVER}005/2006.02.01/TES-Aura_L3-O3-M2006m02_F01_11.he5 +{SERVER}005/2006.01.02/TES-Aura_L3-O3-M2006m01_F01_11.he5 +{SERVER}005/2005.12.07/TES-Aura_L3-O3-M2005m12_F01_11.he5 +{SERVER}005/2005.11.05/TES-Aura_L3-O3-M2005m11_F01_11.he5 +{SERVER}005/2005.10.02/TES-Aura_L3-O3-M2005m10_F01_11.he5 +{SERVER}005/2005.09.02/TES-Aura_L3-O3-M2005m09_F01_11.he5 +{SERVER}005/2005.08.01/TES-Aura_L3-O3-M2005m08_F01_11.he5 +{SERVER}005/2005.07.04/TES-Aura_L3-O3-M2005m07_F01_11.he5 +{SERVER}005/2005.03.04/TES-Aura_L3-O3-M2005m03_F01_11.he5 +{SERVER}005/2005.02.02/TES-Aura_L3-O3-M2005m02_F01_11.he5 +{SERVER}005/2005.01.01/TES-Aura_L3-O3-M2005m01_F01_11.he5 +{SERVER}005/2004.12.02/TES-Aura_L3-O3-M2004m12_F01_11.he5 +{SERVER}005/2004.11.04/TES-Aura_L3-O3-M2004m11_F01_11.he5 +{SERVER}005/2004.10.09/TES-Aura_L3-O3-M2004m10_F01_11.he5 +{SERVER}005/2004.09.03/TES-Aura_L3-O3-M2004m09_F01_11.he5 +{SERVER}004/2012.11.02/TES-Aura_L3-O3-M2012m11_C01_F01_10.he5 +{SERVER}004/2012.10.18/TES-Aura_L3-O3-M2012m10_C01_F01_10.he5 +{SERVER}004/2012.09.17/TES-Aura_L3-O3-M2012m09_C01_F01_10.he5 +{SERVER}004/2012.04.25/TES-Aura_L3-O3-M2012m04_C01_F01_10.he5 +{SERVER}004/2011.12.04/TES-Aura_L3-O3-M2011m12_C01_F01_10.he5 +{SERVER}004/2011.11.14/TES-Aura_L3-O3-M2011m11_C01_F01_10.he5 +{SERVER}004/2011.09.11/TES-Aura_L3-O3-M2011m09_C01_F01_10.he5 +{SERVER}004/2011.07.01/TES-Aura_L3-O3-M2011m07_C01_F01_10.he5 +{SERVER}004/2011.06.01/TES-Aura_L3-O3-M2011m06_C01_F01_10.he5 +{SERVER}004/2011.05.15/TES-Aura_L3-O3-M2011m05_C01_F01_10.he5 +{SERVER}004/2011.04.02/TES-Aura_L3-O3-M2011m04_C01_F01_10.he5 +{SERVER}004/2011.03.01/TES-Aura_L3-O3-M2011m03_C01_F01_10.he5 +{SERVER}004/2011.02.01/TES-Aura_L3-O3-M2011m02_C01_F01_10.he5 +{SERVER}004/2011.01.02/TES-Aura_L3-O3-M2011m01_C01_F01_10.he5 +{SERVER}004/2010.12.01/TES-Aura_L3-O3-M2010m12_C01_F01_10.he5 +{SERVER}004/2010.11.01/TES-Aura_L3-O3-M2010m11_C01_F01_10.he5 +{SERVER}004/2010.10.28/TES-Aura_L3-O3-M2010m10_C01_F01_10.he5 +{SERVER}004/2010.09.02/TES-Aura_L3-O3-M2010m09_C01_F01_10.he5 +{SERVER}004/2010.08.01/TES-Aura_L3-O3-M2010m08_C01_F01_10.he5 +{SERVER}004/2010.07.02/TES-Aura_L3-O3-M2010m07_C01_F01_10.he5 +{SERVER}004/2010.06.02/TES-Aura_L3-O3-M2010m06_C01_F01_10.he5 +{SERVER}004/2010.05.01/TES-Aura_L3-O3-M2010m05_C01_F01_10.he5 +{SERVER}004/2010.04.01/TES-Aura_L3-O3-M2010m04_F01_10.he5 +{SERVER}004/2010.03.03/TES-Aura_L3-O3-M2010m03_F01_10.he5 +{SERVER}004/2009.12.02/TES-Aura_L3-O3-M2009m12_F01_10.he5 +{SERVER}004/2009.11.02/TES-Aura_L3-O3-M2009m11_F01_10.he5 +{SERVER}004/2009.10.01/TES-Aura_L3-O3-M2009m10_F01_10.he5 +{SERVER}004/2009.09.01/TES-Aura_L3-O3-M2009m09_F01_10.he5 +{SERVER}004/2009.08.02/TES-Aura_L3-O3-M2009m08_F01_10.he5 +{SERVER}004/2009.07.01/TES-Aura_L3-O3-M2009m07_F01_10.he5 +{SERVER}004/2009.06.01/TES-Aura_L3-O3-M2009m06_F01_10.he5 +{SERVER}004/2009.05.02/TES-Aura_L3-O3-M2009m05_F01_10.he5 +{SERVER}004/2009.04.02/TES-Aura_L3-O3-M2009m04_F01_10.he5 +{SERVER}004/2009.03.01/TES-Aura_L3-O3-M2009m03_F01_10.he5 +{SERVER}004/2009.02.01/TES-Aura_L3-O3-M2009m02_F01_10.he5 +{SERVER}004/2009.01.02/TES-Aura_L3-O3-M2009m01_F01_10.he5 +{SERVER}004/2008.12.03/TES-Aura_L3-O3-M2008m12_F01_10.he5 +{SERVER}004/2008.11.01/TES-Aura_L3-O3-M2008m11_F01_10.he5 +{SERVER}004/2008.10.02/TES-Aura_L3-O3-M2008m10_F01_10.he5 +{SERVER}004/2008.09.02/TES-Aura_L3-O3-M2008m09_F01_10.he5 +{SERVER}004/2008.08.01/TES-Aura_L3-O3-M2008m08_F01_10.he5 +{SERVER}004/2008.07.02/TES-Aura_L3-O3-M2008m07_F01_10.he5 +{SERVER}004/2008.06.02/TES-Aura_L3-O3-M2008m06_F01_10.he5 +{SERVER}004/2008.05.01/TES-Aura_L3-O3-M2008m05_F01_10.he5 +{SERVER}004/2008.04.01/TES-Aura_L3-O3-M2008m04_F01_10.he5 +{SERVER}004/2008.03.02/TES-Aura_L3-O3-M2008m03_F01_10.he5 +{SERVER}004/2008.02.01/TES-Aura_L3-O3-M2008m02_F01_10.he5 +{SERVER}004/2008.01.02/TES-Aura_L3-O3-M2008m01_F01_10.he5 +{SERVER}004/2007.12.01/TES-Aura_L3-O3-M2007m12_F01_10.he5 +{SERVER}004/2007.11.01/TES-Aura_L3-O3-M2007m11_F01_10.he5 +{SERVER}004/2007.10.02/TES-Aura_L3-O3-M2007m10_F01_10.he5 +{SERVER}004/2007.09.02/TES-Aura_L3-O3-M2007m09_F01_10.he5 +{SERVER}004/2007.08.01/TES-Aura_L3-O3-M2007m08_F01_10.he5 +{SERVER}004/2007.07.02/TES-Aura_L3-O3-M2007m07_F01_10.he5 +{SERVER}004/2007.06.02/TES-Aura_L3-O3-M2007m06_F01_10.he5 +{SERVER}004/2007.05.01/TES-Aura_L3-O3-M2007m05_F01_10.he5 +{SERVER}004/2007.04.01/TES-Aura_L3-O3-M2007m04_F01_10.he5 +{SERVER}004/2007.03.02/TES-Aura_L3-O3-M2007m03_F01_10.he5 +{SERVER}004/2007.02.02/TES-Aura_L3-O3-M2007m02_F01_10.he5 +{SERVER}004/2007.01.01/TES-Aura_L3-O3-M2007m01_F01_10.he5 +{SERVER}004/2006.12.02/TES-Aura_L3-O3-M2006m12_F01_10.he5 +{SERVER}004/2006.11.02/TES-Aura_L3-O3-M2006m11_F01_10.he5 +{SERVER}004/2006.10.01/TES-Aura_L3-O3-M2006m10_F01_10.he5 +{SERVER}004/2006.09.09/TES-Aura_L3-O3-M2006m09_F01_10.he5 +{SERVER}004/2006.08.02/TES-Aura_L3-O3-M2006m08_F01_10.he5 +{SERVER}004/2006.07.01/TES-Aura_L3-O3-M2006m07_F01_10.he5 +{SERVER}004/2006.06.01/TES-Aura_L3-O3-M2006m06_F01_10.he5 +{SERVER}004/2006.05.02/TES-Aura_L3-O3-M2006m05_F01_10.he5 +{SERVER}004/2006.04.02/TES-Aura_L3-O3-M2006m04_F01_10.he5 +{SERVER}004/2006.03.01/TES-Aura_L3-O3-M2006m03_F01_10.he5 +{SERVER}004/2006.02.01/TES-Aura_L3-O3-M2006m02_F01_10.he5 +{SERVER}004/2006.01.02/TES-Aura_L3-O3-M2006m01_F01_10.he5 +{SERVER}004/2005.12.07/TES-Aura_L3-O3-M2005m12_F01_10.he5 +{SERVER}004/2005.11.05/TES-Aura_L3-O3-M2005m11_F01_10.he5 +{SERVER}004/2005.10.02/TES-Aura_L3-O3-M2005m10_F01_10.he5 +{SERVER}004/2005.09.02/TES-Aura_L3-O3-M2005m09_F01_10.he5 +{SERVER}004/2005.08.01/TES-Aura_L3-O3-M2005m08_F01_10.he5 +{SERVER}004/2005.07.04/TES-Aura_L3-O3-M2005m07_F01_10.he5 +{SERVER}004/2005.03.04/TES-Aura_L3-O3-M2005m03_F01_10.he5 +{SERVER}004/2005.02.02/TES-Aura_L3-O3-M2005m02_F01_10.he5 +{SERVER}004/2005.01.01/TES-Aura_L3-O3-M2005m01_F01_10.he5 +{SERVER}004/2004.12.02/TES-Aura_L3-O3-M2004m12_F01_10.he5 +{SERVER}004/2004.11.04/TES-Aura_L3-O3-M2004m11_F01_10.he5 +{SERVER}004/2004.10.09/TES-Aura_L3-O3-M2004m10_F01_10.he5 +{SERVER}004/2004.09.03/TES-Aura_L3-O3-M2004m09_F01_10.he5 +{SERVER}004/2004.08.22/TES-Aura_L3-O3-M2004m08_F01_10.he5""".split('\n') diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/berkeleyearth.py b/esmvaltool/cmorizers/data/downloaders/datasets/berkeleyearth.py new file mode 100644 index 0000000000..01ce0e6fa3 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/berkeleyearth.py @@ -0,0 +1,35 @@ +"""Script to download BerkeleyEarth from its webpage.""" + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "http://berkeleyearth.lbl.gov/auto/Global/Gridded/" + "Land_and_Ocean_LatLong1.nc", + wget_options=[]) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/calipso_goccp.py b/esmvaltool/cmorizers/data/downloaders/datasets/calipso_goccp.py new file mode 100644 index 0000000000..1c93d85b61 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/calipso_goccp.py @@ -0,0 +1,52 @@ +"""Script to download CALIPSO-GOCCP from IPSL's ftp server.""" + +import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.ftp import FTPDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = FTPDownloader( + config=config, + server='ftp.climserv.ipsl.polytechnique.fr', + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.connect() + if not start_date: + start_date = datetime.datetime(2007, 1, 1) + if not end_date: + end_date = datetime.datetime(2015, 1, 1) + + loop_date = start_date + two_digits = r"\d{2}" + while loop_date <= end_date: + year = loop_date.year + downloader.set_cwd( + f"/cfmip/GOCCP_v3/3D_CloudFraction/grid_2x2xL40/{year}/avg/") + downloader.download_folder( + ".", + filter_files=(f"3D_CloudFraction330m_{year}{two_digits}" + "_avg_CFMIP2_sat_3.1.2.nc")) + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/calipso_icecloud.py b/esmvaltool/cmorizers/data/downloaders/datasets/calipso_icecloud.py new file mode 100644 index 0000000000..c8992e3d38 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/calipso_icecloud.py @@ -0,0 +1,50 @@ +"""Script to download CALIPSO-ICECLOUD from its webpage.""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import NASADownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if not start_date: + start_date = datetime(2007, 1, 1) + if not end_date: + end_date = datetime(2015, 12, 31) + loop_date = start_date + + downloader = NASADownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + while loop_date <= end_date: + year = loop_date.year + for month in range(1, 13): + + downloader.download_file( + "https://asdc.larc.nasa.gov/data/CALIPSO/" + f"LID_L3_Ice_Cloud-Standard-V1-00/{year}/" + f"CAL_LID_L3_Ice_Cloud-Standard-V1-00.{year}-{month:02}A.hdf") + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/cds_satellite_albedo.py b/esmvaltool/cmorizers/data/downloaders/datasets/cds_satellite_albedo.py new file mode 100644 index 0000000000..a789f9b227 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/cds_satellite_albedo.py @@ -0,0 +1,60 @@ +"""Script to download CDS-SATELLITE-ALBEDO from the Climate Data Store.""" + +import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.cds import CDSDownloader +from esmvaltool.cmorizers.data.utilities import unpack_files_in_folder + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime.datetime(1998, 4, 1) + if end_date is None: + end_date = datetime.datetime(2013, 5, 1) + + downloader = CDSDownloader( + product_name='satellite-albedo', + request_dictionary={ + 'format': 'tgz', + 'satellite': 'spot', + 'sensor': 'vgt', + 'product_version': 'v1', + 'horizontal_resolution': '1km', + 'variable': [ + 'albb_bh', + 'albb_dh', + ], + 'nominal_day': '20', + }, + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + loop_date = start_date + while loop_date <= end_date: + downloader.download(loop_date.year, loop_date.month) + loop_date += relativedelta.relativedelta(months=1) + + unpack_files_in_folder(downloader.local_folder) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/cds_satellite_lai_fapar.py b/esmvaltool/cmorizers/data/downloaders/datasets/cds_satellite_lai_fapar.py new file mode 100644 index 0000000000..65c2bafa0f --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/cds_satellite_lai_fapar.py @@ -0,0 +1,60 @@ +"""Script to download CDS-SATELLITE-LAI-FAPAR from the Climate Data Store.""" + +import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.cds import CDSDownloader +from esmvaltool.cmorizers.data.utilities import unpack_files_in_folder + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if not start_date: + start_date = datetime.datetime(1998, 4, 1) + if not end_date: + end_date = datetime.datetime(2014, 5, 1) + + loop_date = start_date + downloader = CDSDownloader( + product_name='satellite-lai-fapar', + request_dictionary={ + 'variable': [ + 'fapar', + 'lai', + ], + 'satellite': 'spot', + 'sensor': 'vgt', + 'horizontal_resolution': '1km', + 'product_version': 'V1', + 'nominal_day': '20', + 'format': 'tgz', + }, + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + while loop_date <= end_date: + downloader.download(loop_date.year, loop_date.month) + loop_date += relativedelta.relativedelta(months=1) + + unpack_files_in_folder(downloader.local_folder) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/cds_satellite_soil_moisture.py b/esmvaltool/cmorizers/data/downloaders/datasets/cds_satellite_soil_moisture.py new file mode 100644 index 0000000000..8b017321e1 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/cds_satellite_soil_moisture.py @@ -0,0 +1,124 @@ +"""Script to download CDS-SATELLITE-SOIL-MOISTURE from the CDS.""" + +import calendar +import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.cds import CDSDownloader +from esmvaltool.cmorizers.data.utilities import unpack_files_in_folder + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if not start_date: + start_date = datetime.datetime(1991, 9, 1) + if not end_date: + end_date = datetime.datetime(2020, 6, 30) + + loop_date = start_date + downloader = CDSDownloader( + product_name='satellite-soil-moisture', + request_dictionary={ + 'format': 'tgz', + 'variable': 'volumetric_surface_soil_moisture', + 'type_of_sensor': 'combined_passive_and_active', + 'type_of_record': 'cdr', + 'version': 'v201912.0.0', + 'time_aggregation': 'month_average', + 'day': ['01'] + }, + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + monthly_downloaders = {} + daily_downloaders = {} + + for sensor in ['combined_passive_and_active', 'passive', 'active']: + monthly_downloaders[sensor] = get_downloader(config, dataset, + dataset_info, overwrite, + sensor, 'month') + daily_downloaders[sensor] = get_downloader(config, dataset, + dataset_info, overwrite, + sensor, 'day') + while loop_date <= end_date: + for sensor, downloader in monthly_downloaders.items(): + pattern = f'cds-satellite-soil-moisture_cdr_{sensor}_monthly' + downloader.download(loop_date.year, + loop_date.month, + file_pattern=pattern) + loop_date += relativedelta.relativedelta(months=1) + + loop_date = start_date + while loop_date <= end_date: + for sensor, downloader in daily_downloaders.items(): + downloader.download( + loop_date.year, loop_date.month, [ + f'{i+1:02d}' for i in range( + calendar.monthrange(loop_date.year, loop_date.month) + [1]) + ], f'cds-satellite-soil-moisture_cdr_{sensor}_daily') + loop_date += relativedelta.relativedelta(months=1) + unpack_files_in_folder(downloader.local_folder) + + +def get_downloader(config, dataset, dataset_info, overwrite, sensor, + frequency): + """Create download request. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + overwrite : bool + Overwrite already downloaded files + sensor : str + Type of sensor + frequency : str + Time aggregation + """ + if sensor == 'active': + variable = 'surface_soil_moisture' + else: + variable = 'volumetric_surface_soil_moisture' + downloader = CDSDownloader( + product_name='satellite-soil-moisture', + request_dictionary={ + 'format': 'tgz', + 'variable': variable, + 'type_of_sensor': sensor, + 'day': '01', + 'type_of_record': 'cdr', + 'version': 'v201912.0.0', + 'time_aggregation': f'{frequency}_average', + }, + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + return downloader diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/cds_uerra.py b/esmvaltool/cmorizers/data/downloaders/datasets/cds_uerra.py new file mode 100644 index 0000000000..7641aea6ad --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/cds_uerra.py @@ -0,0 +1,64 @@ +"""Script to download CDS-UERRA from the CDS.""" + +import calendar +import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.cds import CDSDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if not start_date: + start_date = datetime.datetime(1961, 1, 1) + if not end_date: + end_date = datetime.datetime(2019, 7, 1) + + loop_date = start_date + downloader = CDSDownloader( + product_name='reanalysis-uerra-europe-soil-levels', + request_dictionary={ + 'format': 'netcdf', + 'origin': 'uerra_harmonie', + 'variable': 'volumetric_soil_moisture', + 'soil_level': [ + '1', + '2', + '3', + ], + 'time': ['00:00', '06:00', '12:00', '18:00'], + }, + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + loop_date = start_date + while loop_date <= end_date: + downloader.download( + loop_date.year, + loop_date.month, [ + f'{i+1:02d}' for i in range( + calendar.monthrange(loop_date.year, loop_date.month)[1]) + ], + file_format='nc') + loop_date += relativedelta.relativedelta(months=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/cds_xch4.py b/esmvaltool/cmorizers/data/downloaders/datasets/cds_xch4.py new file mode 100644 index 0000000000..241db20f2d --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/cds_xch4.py @@ -0,0 +1,41 @@ +"""Script to download CDS-XCH4 from the Climate Data Store (CDS).""" + +from esmvaltool.cmorizers.data.downloaders.cds import CDSDownloader +from esmvaltool.cmorizers.data.utilities import unpack_files_in_folder + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = CDSDownloader( + product_name='satellite-methane', + request_dictionary={ + 'format': 'tgz', + 'processing_level': 'level_3', + 'variable': 'xch4', + 'sensor_and_algorithm': 'merged_obs4mips', + 'version': '4.1', + }, + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.download_request("CDS-XCH4.tar") + unpack_files_in_folder(downloader.local_folder) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/cmap.py b/esmvaltool/cmorizers/data/downloaders/datasets/cmap.py new file mode 100644 index 0000000000..5fd58b5ac1 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/cmap.py @@ -0,0 +1,38 @@ +"""Script to download CMAP (CPC Merged Analysis of Precipitation).""" + +import logging + +from esmvaltool.cmorizers.data.downloaders.ftp import FTPDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = FTPDownloader( + config=config, + server="ftp2.psl.noaa.gov", + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.connect() + + downloader.download_file("/Datasets/cmap/enh/precip.mon.mean.nc") diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/cowtanway.py b/esmvaltool/cmorizers/data/downloaders/datasets/cowtanway.py new file mode 100644 index 0000000000..edffbeb18c --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/cowtanway.py @@ -0,0 +1,49 @@ +"""Script to download CowtanWay from its webpage.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader +from esmvaltool.cmorizers.data.utilities import unpack_files_in_folder + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + def download_file(path): + downloader.download_file( + "https://www-users.york.ac.uk/~kdc3/papers/coverage2013/" + path, + []) + + download_file("had4_krig_v1_0_0.nc.gz") + download_file("had4_uah_v1_0_0.nc.gz") + download_file("had4_short_krig_v2_0_0.nc.gz") + download_file("had4_short_uah_v2_0_0.nc.gz") + download_file("ghcn_short_krig_v2_0_0.nc.gz") + download_file("ghcn_short_uah_v2_0_0.nc.gz") + download_file("had4sst4_krig_v2_0_0.nc.gz") + download_file("had4_krig_v2_0_0.nc.gz") + unpack_files_in_folder(downloader.local_folder) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/cru.py b/esmvaltool/cmorizers/data/downloaders/datasets/cru.py new file mode 100644 index 0000000000..8fbce3e9a3 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/cru.py @@ -0,0 +1,50 @@ +"""Script to download CRU from its webpage.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader +from esmvaltool.cmorizers.data.utilities import unpack_files_in_folder + +logger = logging.getLogger(__name__) + + +def download_dataset( + config, dataset, dataset_info, start_date, end_date, overwrite +): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + for var in ['tmp', 'pre', 'pet', 'tmn', 'tmx', 'cld']: + downloader.download_file( + "https://crudata.uea.ac.uk/cru/data/hrg/cru_ts_4.07/" + f"cruts.2304141047.v4.07/{var}/" + f"cru_ts4.07.1901.2022.{var}.dat.nc.gz", + wget_options=[], + ) + # for var in ['tmp', 'pre']: # v TS4.02 + # downloader.download_file( + # "https://crudata.uea.ac.uk/cru/data/hrg/cru_ts_4.02/" + # f"cruts.1811131722.v4.02/{var}/" + # f"cru_ts4.02.1901.2017.{var}.dat.nc.gz", + # wget_options=[], + # ) + unpack_files_in_folder(downloader.local_folder) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/ct2019.py b/esmvaltool/cmorizers/data/downloaders/datasets/ct2019.py new file mode 100644 index 0000000000..4eed312963 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/ct2019.py @@ -0,0 +1,50 @@ +"""Script to download CT2019 from NOAA's webpage.""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.ftp import FTPDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(2000, 1, 1) + if end_date is None: + end_date = datetime(2018, 12, 31) + loop_date = start_date + + downloader = FTPDownloader(config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + server='aftp.cmdl.noaa.gov') + downloader.connect() + downloader.set_cwd( + 'products/carbontracker/co2/CT2019/molefractions/co2_total_monthly/') + + while loop_date <= end_date: + year = loop_date.year + month = loop_date.month + + downloader.download_file( + f'CT2019.molefrac_glb3x2_{year}-{month:02}.nc') + loop_date += relativedelta.relativedelta(months=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/duveiller2018.py b/esmvaltool/cmorizers/data/downloaders/datasets/duveiller2018.py new file mode 100644 index 0000000000..8cf91a15ee --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/duveiller2018.py @@ -0,0 +1,37 @@ +"""Script to download Duveiller2018 from its webpage.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.download_file( + 'https://s3-eu-west-1.amazonaws.com/pstorage-npg-968563215/' + '9969496/albedo_IGBPgen.nc', + wget_options=[]) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/e_obs.py b/esmvaltool/cmorizers/data/downloaders/datasets/e_obs.py new file mode 100644 index 0000000000..fd12127e43 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/e_obs.py @@ -0,0 +1,41 @@ +"""Script to download E-OBS from its webpage.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + for var in ['TG', 'TN', 'TX', 'RR', 'PP']: + for grid in ('0.1deg', '0.25deg'): + for version in ('20.0e', ): + downloader.download_file( + "https://knmi-ecad-assets-prd.s3.amazonaws.com/ensembles/" + f"data/Grid_{grid}_reg_ensemble/" + f"{var.lower()}_ens_mean_{grid}_reg_v{version}.nc", + wget_options=[]) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/eppley_vgpm_modis.py b/esmvaltool/cmorizers/data/downloaders/datasets/eppley_vgpm_modis.py new file mode 100644 index 0000000000..f70ee8ab35 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/eppley_vgpm_modis.py @@ -0,0 +1,52 @@ +"""Script to download Eppley-VGPM-MODIS.""" +import datetime +import logging + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader +from esmvaltool.cmorizers.data.utilities import unpack_files_in_folder + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + if not start_date: + start_date = datetime.datetime(2002, 1, 1) + if not end_date: + end_date = datetime.datetime(2020, 1, 1) + + loop_date = start_date + while loop_date <= end_date: + year = loop_date.year + downloader.download_folder( + "http://orca.science.oregonstate.edu/data/1x2/monthly/" + f"eppley.r2018.m.chl.m.sst/hdf/eppley.m.{year}.tar", + wget_options=["--accept=tar"]) + loop_date += relativedelta.relativedelta(years=1) + unpack_files_in_folder(downloader.local_folder) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/esacci_aerosol.py b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_aerosol.py new file mode 100644 index 0000000000..09d2616c9c --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_aerosol.py @@ -0,0 +1,51 @@ +"""Script to download ESACCI-AEROSOL from CCI CEDA ftp.""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.ftp import CCIDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(1997, 1, 1) + if end_date is None: + end_date = datetime(2011, 1, 1) + loop_date = start_date + + downloader = CCIDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.connect() + + while loop_date <= end_date: + year = loop_date.year + if year < 2003: + downloader.set_cwd('ATSR2_SU/L3/v4.21/MONTHLY') + else: + downloader.set_cwd('AATSR_SU/L3/v4.21/MONTHLY') + + downloader.download_year(loop_date.year) + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/esacci_cloud.py b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_cloud.py new file mode 100644 index 0000000000..a14255e143 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_cloud.py @@ -0,0 +1,65 @@ +"""Script to download ESACCI-CLOUD.""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.ftp import CCIDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(1982, 1, 1) + if end_date is None: + end_date = datetime(2016, 1, 1) + loop_date = start_date + + downloader = CCIDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.connect() + end_of_file = 'ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc' + filler_data = { + 1994: [ + f'AVHRR_NOAA_12/1994/199409-{end_of_file}', + f'AVHRR_NOAA_12/1994/199410-{end_of_file}', + f'AVHRR_NOAA_12/1994/199411-{end_of_file}', + f'AVHRR_NOAA_12/1994/199412-{end_of_file}', + ], + 1995: [ + f'AVHRR_NOAA_12/1995/199501-{end_of_file}', + ], + } + + while loop_date <= end_date: + year = loop_date.year + downloader.set_cwd('version3/L3C/AVHRR-PM/v3.0') + for folder in downloader.list_folders(): + for year_folder in downloader.list_folders(folder): + if int(year_folder) == year: + downloader.download_year(f'{folder}/{year_folder}') + downloader.set_cwd('version3/L3C/AVHRR-AM/v3.0') + for extra_file in filler_data.get(year, []): + downloader.download_file(extra_file) + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/esacci_fire.py b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_fire.py new file mode 100644 index 0000000000..82fbc743ad --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_fire.py @@ -0,0 +1,45 @@ +"""Script to download ESACCI-FIRE.""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.ftp import CCIDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if not start_date: + start_date = datetime(2005, 1, 1) + if not end_date: + end_date = datetime(2011, 1, 1) + loop_date = start_date + + downloader = CCIDownloader(config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite) + downloader.connect() + + downloader.set_cwd('burned_area/MERIS/grid/v4.1/') + while loop_date <= end_date: + year = loop_date.year + downloader.download_year(f'{year}') + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/esacci_landcover.py b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_landcover.py new file mode 100644 index 0000000000..efffa2aaaa --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_landcover.py @@ -0,0 +1,52 @@ +"""Script to download ESACCI-LANDCOVER pft data from the CEDA.""" + +from datetime import datetime + +from esmvaltool.cmorizers.data.downloaders.ftp import CCIDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + # Default start and end dates if not provided + if not start_date: + start_date = datetime(1992, 1, 1) + if not end_date: + end_date = datetime(2020, 12, 31) + + # Initialize the downloader + downloader = CCIDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.ftp_name = 'land_cover' + downloader.connect() + + # Set current working directory to the main directory with the files + downloader.set_cwd('/pft/v2.0.8/') + + # Create a regex pattern to match any .nc files + year_range = '|'.join(str(year) for year in range(start_date.year, + end_date.year + 1)) + pattern = rf".*-(?:{year_range}).*\.nc$" + + # Download all .nc files in the directory + downloader.download_folder('.', filter_files=pattern) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/esacci_oc.py b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_oc.py new file mode 100644 index 0000000000..64d4209477 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_oc.py @@ -0,0 +1,49 @@ +"""Script to download ESACCI-OC from the Climate Data Store(CDS).""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.ftp import CCIDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if not start_date: + start_date = datetime(1997, 9, 1) + if not end_date: + end_date = datetime(2020, 12, 1) + + loop_date = start_date + + downloader = CCIDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.ftp_name = 'ocean_colour' + downloader.connect() + + downloader.set_cwd('v5.0-release/geographic/netcdf/chlor_a/monthly/v5.0/') + while loop_date <= end_date: + year = loop_date.year + downloader.download_year(f'{year}') + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/esacci_ozone.py b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_ozone.py new file mode 100644 index 0000000000..50d80e06f3 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_ozone.py @@ -0,0 +1,53 @@ +"""Script to download ESACCI-OZONE from the Climate Data Store(CDS).""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.ftp import CCIDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(1997, 1, 1) + if end_date is None: + end_date = datetime(2010, 1, 1) + + loop_date = start_date + + downloader = CCIDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.ftp_name = 'ozone' + downloader.connect() + downloader.set_cwd( + 'limb_profiles/l3/merged/merged_monthly_zonal_mean/v0002') + downloader.download_folder('.') + + downloader.set_cwd('total_columns/l3/merged/v0100/') + while loop_date <= end_date: + year = loop_date.year + downloader.set_cwd('total_columns/l3/merged/v0100/') + downloader.download_year(f'{year}') + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/esacci_sea_surface_salinity.py b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_sea_surface_salinity.py new file mode 100644 index 0000000000..dc38903624 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_sea_surface_salinity.py @@ -0,0 +1,54 @@ +"""Script to download ESACCI-SOS.""" + +import logging +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.ftp import CCIDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(2010, 1, 1) + if end_date is None: + end_date = datetime(2019, 1, 1) + loop_date = start_date + + downloader = CCIDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.connect() + for version in ['v01.8', 'v02.31']: + downloader.set_cwd(f'{version}/30days') + loop_date = start_date + while loop_date <= end_date: + if downloader.exists(str(loop_date.year)): + downloader.download_year(loop_date.year) + else: + logger.info('Year %s not available for version %s', + loop_date.year, version) + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/esacci_soilmoisture.py b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_soilmoisture.py new file mode 100644 index 0000000000..0d29e96ff9 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_soilmoisture.py @@ -0,0 +1,49 @@ +"""Script to download ESACCI-SOILMOISTURE.""" +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.ftp import CCIDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(1978, 11, 1) + if end_date is None: + end_date = datetime(2022, 12, 31) + + loop_date = start_date + + downloader = CCIDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.ftp_name = 'soil_moisture' + downloader.connect() + downloader.set_cwd('ancillary/v08.1/') + downloader.download_folder('.') + downloader.set_cwd('daily_files/COMBINED/v08.1/') + while loop_date <= end_date: + year = loop_date.year + downloader.download_year(f'{year}') + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/ghcn.py b/esmvaltool/cmorizers/data/downloaders/datasets/ghcn.py new file mode 100644 index 0000000000..ceeb44de30 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/ghcn.py @@ -0,0 +1,39 @@ +"""Script to download GHCN from its webpage.""" +import logging +import os + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + os.makedirs(downloader.local_folder, exist_ok=True) + downloader.download_file( + "ftp://ftp.cdc.noaa.gov/Datasets/ghcngridded/precip.mon.total.nc", + wget_options=[]) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/ghcn_cams.py b/esmvaltool/cmorizers/data/downloaders/datasets/ghcn_cams.py new file mode 100644 index 0000000000..40b3b1a8a8 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/ghcn_cams.py @@ -0,0 +1,39 @@ +"""Script to download GHCN-CAMS from its webpage.""" +import logging +import os + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + os.makedirs(downloader.local_folder, exist_ok=True) + downloader.download_file( + "ftp://ftp.cdc.noaa.gov/Datasets/ghcncams/air.mon.mean.nc", + wget_options=[]) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/gistemp.py b/esmvaltool/cmorizers/data/downloaders/datasets/gistemp.py new file mode 100644 index 0000000000..2bf73fa4ed --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/gistemp.py @@ -0,0 +1,39 @@ +"""Script to download GISTEMP from its webpage.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader +from esmvaltool.cmorizers.data.utilities import unpack_files_in_folder + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "https://data.giss.nasa.gov/pub/gistemp/gistemp250_GHCNv4.nc.gz", + wget_options=['--no-check-certificate']) + unpack_files_in_folder(downloader.local_folder) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/glodap.py b/esmvaltool/cmorizers/data/downloaders/datasets/glodap.py new file mode 100644 index 0000000000..f4de91f47f --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/glodap.py @@ -0,0 +1,39 @@ +"""Script to download GLODAP.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader +from esmvaltool.cmorizers.data.utilities import unpack_files_in_folder + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.download_file( + "https://www.ncei.noaa.gov/data/oceans/ncei/ocads/data/0162565/mapped/" + "GLODAPv2.2016b_MappedClimatologies.tar.gz", + wget_options=[]) + unpack_files_in_folder(downloader.local_folder) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/gpcc.py b/esmvaltool/cmorizers/data/downloaders/datasets/gpcc.py new file mode 100644 index 0000000000..6279970be0 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/gpcc.py @@ -0,0 +1,45 @@ +"""Script to download GPCC from its webpage.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader +from esmvaltool.cmorizers.data.utilities import ( + read_cmor_config, + unpack_files_in_folder, +) + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + cmor_config = read_cmor_config(dataset) + raw_path = ("https://opendata.dwd.de/climate_environment/GPCC/" + "full_data_2018/full_data_monthly_{version}.nc.gz") + for version in cmor_config['attributes']['version'].values(): + downloader.download_file(raw_path.format(version=version), + wget_options=[]) + unpack_files_in_folder(downloader.local_folder) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/gpcp_sg.py b/esmvaltool/cmorizers/data/downloaders/datasets/gpcp_sg.py new file mode 100644 index 0000000000..91e1b4e47b --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/gpcp_sg.py @@ -0,0 +1,39 @@ +"""Script to download GPCP-SG.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "https://downloads.psl.noaa.gov/Datasets/" + "gpcp/precip.mon.mean.nc", + wget_options=[], + ) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/hadcrut3.py b/esmvaltool/cmorizers/data/downloaders/datasets/hadcrut3.py new file mode 100644 index 0000000000..6481537442 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/hadcrut3.py @@ -0,0 +1,39 @@ +"""Script to download HadCRUT3 from its webpage.""" +import logging +import os + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + os.makedirs(downloader.local_folder, exist_ok=True) + downloader.download_file( + "https://www.metoffice.gov.uk/hadobs/hadcrut3/data/HadCRUT3v.nc", + wget_options=[]) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/hadcrut4.py b/esmvaltool/cmorizers/data/downloaders/datasets/hadcrut4.py new file mode 100644 index 0000000000..986e0bd2e7 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/hadcrut4.py @@ -0,0 +1,43 @@ +"""Script to download HadCRUT4 from its webpage.""" +import logging +import os + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + os.makedirs(downloader.local_folder, exist_ok=True) + downloader.download_file( + "https://crudata.uea.ac.uk/cru/data/temperature/" + "HadCRUT.4.6.0.0.median.nc", + wget_options=[]) + downloader.download_file( + "https://crudata.uea.ac.uk/cru/data/temperature/absolute.nc", + wget_options=[]) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/hadcrut5.py b/esmvaltool/cmorizers/data/downloaders/datasets/hadcrut5.py new file mode 100644 index 0000000000..6c8eda7c57 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/hadcrut5.py @@ -0,0 +1,51 @@ +"""Script to download HadCRUT5 version 5.0.1.0 from its webpage.""" +import logging +import os + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + os.makedirs(downloader.local_folder, exist_ok=True) + downloader.download_file( + "https://crudata.uea.ac.uk/cru/data/temperature/" + "HadCRUT.5.0.1.0.analysis.anomalies.ensemble_mean.nc", + wget_options=[]) + downloader.download_file( + "https://crudata.uea.ac.uk/cru/data/temperature/" + "absolute_v5.nc", + wget_options=[]) + downloader.download_file( + "https://crudata.uea.ac.uk/cru/data/temperature/" + "HadCRUT.5.0.1.0.anomalies.ensemble_mean.nc", + wget_options=[]) + downloader.download_file( + "https://crudata.uea.ac.uk/cru/data/temperature/absolute.nc", + wget_options=[]) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/hadisst.py b/esmvaltool/cmorizers/data/downloaders/datasets/hadisst.py new file mode 100644 index 0000000000..1ac279f284 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/hadisst.py @@ -0,0 +1,45 @@ +"""Script to download HadISST from its webpage.""" +import logging +import os + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader +from esmvaltool.cmorizers.data.utilities import unpack_files_in_folder + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + os.makedirs(downloader.local_folder, exist_ok=True) + downloader.download_file( + "https://www.metoffice.gov.uk/hadobs/hadisst/data/HadISST_sst.nc.gz", + wget_options=[]) + downloader.download_file( + "https://www.metoffice.gov.uk/hadobs/hadisst/data/HadISST_ice.nc.gz", + wget_options=[]) + + unpack_files_in_folder(downloader.local_folder) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/haloe.py b/esmvaltool/cmorizers/data/downloaders/datasets/haloe.py new file mode 100644 index 0000000000..fd396119fb --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/haloe.py @@ -0,0 +1,40 @@ +"""Script to download HALOE from its webpage.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader +from esmvaltool.cmorizers.data.utilities import unpack_files_in_folder + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "https://acp.copernicus.org/articles/5/2797/2005/" + "acp-5-2797-2005-supplement.tar", + wget_options=[]) + unpack_files_in_folder(downloader.local_folder) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/isccp_fh.py b/esmvaltool/cmorizers/data/downloaders/datasets/isccp_fh.py new file mode 100644 index 0000000000..9f2c15b794 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/isccp_fh.py @@ -0,0 +1,49 @@ +"""Script to download ISCCP-FH.""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader +from esmvaltool.cmorizers.data.utilities import unpack_files_in_folder + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(1984, 1, 1) + if end_date is None: + end_date = datetime(2016, 1, 1) + loop_date = start_date + + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + while loop_date <= end_date: + downloader.download_file( + "https://isccp.giss.nasa.gov/pub/flux-fh/tar-nc4_MPF/" + f"ISCCP-FH_nc4_MPF_v.0.0_{loop_date.year}.tar.gz", + wget_options=['--no-check-certificate']) + loop_date += relativedelta.relativedelta(years=1) + unpack_files_in_folder(downloader.local_folder) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/jra_25.py b/esmvaltool/cmorizers/data/downloaders/datasets/jra_25.py new file mode 100644 index 0000000000..27e3dbc97c --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/jra_25.py @@ -0,0 +1,65 @@ +"""Script to download JRA-25 from ESGF.""" +import logging +import os + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + os.makedirs(downloader.local_folder, exist_ok=True) + + url = ("https://esgf.nccs.nasa.gov/thredds/fileServer/CREATE-IP/" + "reanalysis/JMA/JRA-25/JRA-25/mon/atmos/") + + downloader.download_file(url + + "clt/clt_Amon_reanalysis_JRA-25_197901-201312.nc", + wget_options=[]) + downloader.download_file(url + + "hus/hus_Amon_reanalysis_JRA-25_197901-201312.nc", + wget_options=[]) + downloader.download_file(url + + "prw/prw_Amon_reanalysis_JRA-25_197901-201312.nc", + wget_options=[]) + downloader.download_file(url + + "rlut/" + "rlut_Amon_reanalysis_JRA-25_197901-201312.nc", + wget_options=[]) + downloader.download_file(url + + "rlutcs/" + "rlutcs_Amon_reanalysis_JRA-25_197901-201312.nc", + wget_options=[]) + downloader.download_file(url + + "rsut/" + "rsut_Amon_reanalysis_JRA-25_197901-201312.nc", + wget_options=[]) + downloader.download_file(url + + "rsutcs/" + "rsutcs_Amon_reanalysis_JRA-25_197901-201312.nc", + wget_options=[]) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/jra_55.py b/esmvaltool/cmorizers/data/downloaders/datasets/jra_55.py new file mode 100644 index 0000000000..7a9e374136 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/jra_55.py @@ -0,0 +1,113 @@ +"""Script to download JRA-55 from RDA.""" +import logging +import os +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + os.makedirs(downloader.local_folder, exist_ok=True) + + user = os.environ.get("rda-user") + if user is None: + user = str(input("RDA user name? ")) + if user == "": + errmsg = ("A RDA account is required to download JRA-55 data." + " Please visit https://rda.ucar.edu/login/register/" + " to create an account at the Research Data Archive" + " (RDA) if needed.") + logger.error(errmsg) + raise ValueError + + passwd = os.environ.get("rda-passwd") + if passwd is None: + passwd = str(input("RDA password? ")) + + if start_date is None: + start_date = datetime(1958, 1, 1) + if end_date is None: + end_date = datetime(2022, 12, 31) + loop_date = start_date + + options = ["-O", "Authentication.log", "--save-cookies=auth.rda_ucar_edu", + f"--post-data=\"email={user}&passwd={passwd}&action=login\""] + + # login to Research Data Archive (RDA) + + downloader.login("https://rda.ucar.edu/cgi-bin/login", options) + + # download files + + url = "https://data.rda.ucar.edu/ds628.1" + download_options = ["--load-cookies=auth.rda_ucar_edu"] + + # define variables to download + + var = [["011_tmp", "anl_p125"], + ["011_tmp", "anl_surf125"], + ["039_vvel", "anl_p125"], + ["071_tcdc", "fcst_surf125"], + ["054_pwat", "fcst_column125"], + ["058_cice", "fcst_column125"], + ["160_csusf", "fcst_phy2m125"], + ["162_csulf", "fcst_phy2m125"], + ["211_uswrf", "fcst_phy2m125"], + ["212_ulwrf", "fcst_phy2m125"], + ["227_cw", "fcst_column125"], + ["228_clwc", "fcst_p125"], + ["229_ciwc", "fcst_p125"]] + + # download data + + while loop_date <= end_date: + year = loop_date.year + + for item in var: + varname = item[0] + channel = item[1] + fname = f"{channel}.{varname}.{year}01_{year}12" + # download file + downloader.download_file(url + f"/{channel}/{year}/" + + fname, download_options) + # add file extension ".grb" + os.rename(downloader.local_folder + "/" + fname, + downloader.local_folder + "/" + fname + ".grb") + + loop_date += relativedelta.relativedelta(years=1) + + # clean up temporary files + + if os.path.exists("Authentication.log"): + os.remove("Authentication.log") + if os.path.exists("auth.rda_ucar_edu"): + os.remove("auth.rda_ucar_edu") diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/kadow2020.py b/esmvaltool/cmorizers/data/downloaders/datasets/kadow2020.py new file mode 100644 index 0000000000..2c08dd3c04 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/kadow2020.py @@ -0,0 +1,41 @@ +"""Script to download Kadow2020 from its webpage.""" +import logging +import os + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + os.makedirs(downloader.local_folder, exist_ok=True) + downloader.download_file( + "http://users.met.fu-berlin.de/~ChristopherKadow/" + "HadCRUT.5.0.1.0.anomalies.Kadow_et_al_2020_20crAI-" + "infilled.ensemble_mean_185001-202012.nc", + wget_options=[]) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/landflux_eval.py b/esmvaltool/cmorizers/data/downloaders/datasets/landflux_eval.py new file mode 100644 index 0000000000..1d8e84cb9d --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/landflux_eval.py @@ -0,0 +1,37 @@ +"""Script to download LandFlux-EVAL from its webpage.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.download_file( + "https://data.iac.ethz.ch/landflux/" + "LandFluxEVAL.merged.89-05.monthly.all.nc", + wget_options=[]) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/landschuetzer2016.py b/esmvaltool/cmorizers/data/downloaders/datasets/landschuetzer2016.py new file mode 100644 index 0000000000..e08bae771e --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/landschuetzer2016.py @@ -0,0 +1,38 @@ +"""Script to download Landschuetzer2016.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "https://www.nodc.noaa.gov/archive/arc0105/0160558/3.3/data/0-data/" + "spco2_1982-2015_MPI_SOM-FFN_v2016.nc", + wget_options=[]) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/landschuetzer2020.py b/esmvaltool/cmorizers/data/downloaders/datasets/landschuetzer2020.py new file mode 100644 index 0000000000..88929cfc7a --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/landschuetzer2020.py @@ -0,0 +1,39 @@ +"""Script to download Landschuetzer2020.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "https://www.ncei.noaa.gov/data/oceans/ncei/ocads/data/0209633/" + "MPI-ULB-SOM_FFN_clim.nc", + wget_options=[], + ) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/merra.py b/esmvaltool/cmorizers/data/downloaders/datasets/merra.py new file mode 100644 index 0000000000..df1d0ff7e9 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/merra.py @@ -0,0 +1,57 @@ +"""Script to download MERRA.""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import NASADownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if not start_date: + start_date = datetime(1979, 1, 1) + if not end_date: + end_date = datetime(2015, 12, 31) + loop_date = start_date + + downloader = NASADownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + while loop_date <= end_date: + year = loop_date.year + downloader.download_folder( + "https://goldsmr3.gesdisc.eosdis.nasa.gov/data/MERRA_MONTHLY/" + f"MAIMNXINT.5.2.0/{year}/") + downloader.download_folder( + "https://goldsmr3.gesdisc.eosdis.nasa.gov/data/MERRA_MONTHLY/" + f"MAIMCPASM.5.2.0/{year}/") + downloader.download_folder( + "https://goldsmr3.gesdisc.eosdis.nasa.gov/data/MERRA_MONTHLY/" + f"MATMNXRAD.5.2.0/{year}/") + downloader.download_folder( + "https://goldsmr3.gesdisc.eosdis.nasa.gov/data/MERRA_MONTHLY/" + f"MATMFXCHM.5.2.0/{year}/") + + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/merra2.py b/esmvaltool/cmorizers/data/downloaders/datasets/merra2.py new file mode 100644 index 0000000000..28ddebf720 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/merra2.py @@ -0,0 +1,71 @@ +"""Script to download MERRA2.""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import NASADownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if not start_date: + start_date = datetime(1980, 1, 1) + if not end_date: + end_date = datetime(2022, 1, 1) + loop_date = start_date + + downloader = NASADownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + while loop_date <= end_date: + year = loop_date.year + downloader.download_folder( + "https://goldsmr4.gesdisc.eosdis.nasa.gov/data/MERRA2_MONTHLY/" + f"M2TMNXLND.5.12.4/{year}/") + downloader.download_folder( + "https://goldsmr5.gesdisc.eosdis.nasa.gov/data/MERRA2_MONTHLY/" + f"M2IMNPANA.5.12.4/{year}/") + downloader.download_folder( + "https://goldsmr5.gesdisc.eosdis.nasa.gov/data/MERRA2_MONTHLY/" + f"M2IMNPASM.5.12.4/{year}/") + downloader.download_folder( + "https://goldsmr4.gesdisc.eosdis.nasa.gov/data/MERRA2_MONTHLY/" + f"M2TMNXRAD.5.12.4/{year}/") + downloader.download_folder( + "https://goldsmr4.gesdisc.eosdis.nasa.gov/data/MERRA2_MONTHLY/" + f"M2TMNXSLV.5.12.4/{year}/") + downloader.download_folder( + "https://goldsmr4.gesdisc.eosdis.nasa.gov/data/MERRA2_MONTHLY/" + f"M2SMNXSLV.5.12.4/{year}/") + downloader.download_folder( + "https://goldsmr4.gesdisc.eosdis.nasa.gov/data/MERRA2_MONTHLY/" + f"M2TMNXFLX.5.12.4/{year}/") + downloader.download_folder( + "https://goldsmr5.gesdisc.eosdis.nasa.gov/data/MERRA2_MONTHLY/" + f"M2TMNPCLD.5.12.4/{year}/") + downloader.download_folder( + "https://goldsmr4.gesdisc.eosdis.nasa.gov/data/MERRA2_MONTHLY/" + f"M2IMNXASM.5.12.4/{year}/") + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/mobo_dic2004_2019.py b/esmvaltool/cmorizers/data/downloaders/datasets/mobo_dic2004_2019.py new file mode 100644 index 0000000000..1299981811 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/mobo_dic2004_2019.py @@ -0,0 +1,39 @@ +"""Script to download MOBO-DIC2004-2019.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "https://www.nodc.noaa.gov/archive/arc0211/0277099/2.3/data/0-data/" + "MPI_MOBO-DIC_2004-2019_v2.nc", + wget_options=[], + ) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/mobo_dic_mpim.py b/esmvaltool/cmorizers/data/downloaders/datasets/mobo_dic_mpim.py new file mode 100644 index 0000000000..58af5689d7 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/mobo_dic_mpim.py @@ -0,0 +1,39 @@ +"""Script to download MOBO-DIC_MPIM.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "https://www.ncei.noaa.gov/data/oceans/ncei/ocads/data/0221526/" + "MOBO-DIC_MPIM_monthly_clim.nc", + wget_options=[], + ) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/ncep_doe_r2.py b/esmvaltool/cmorizers/data/downloaders/datasets/ncep_doe_r2.py new file mode 100644 index 0000000000..2d691e710d --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/ncep_doe_r2.py @@ -0,0 +1,58 @@ +"""Script to download NCEP-DOE-R2.""" +import logging +import os + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """ + Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + os.makedirs(downloader.local_folder, exist_ok=True) + + url = "https://downloads.psl.noaa.gov/Datasets/ncep.reanalysis2/Monthlies/" + + downloader.download_file(url + "pressure/omega.mon.mean.nc", + wget_options=[]) + downloader.download_file(url + "pressure/rhum.mon.mean.nc", + wget_options=[]) + downloader.download_file(url + "pressure/air.mon.mean.nc", wget_options=[]) + downloader.download_file(url + "gaussian_grid/tcdc.eatm.mon.mean.nc", + wget_options=[]) + downloader.download_file(url + "surface/pr_wtr.eatm.mon.mean.nc", + wget_options=[]) + downloader.download_file(url + "gaussian_grid/prate.sfc.mon.mean.nc", + wget_options=[]) + downloader.download_file(url + "gaussian_grid/uflx.sfc.mon.mean.nc", + wget_options=[]) + downloader.download_file(url + "gaussian_grid/vflx.sfc.mon.mean.nc", + wget_options=[]) + downloader.download_file(url + "gaussian_grid/skt.sfc.mon.mean.nc", + wget_options=[]) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/ncep_ncar_r1.py b/esmvaltool/cmorizers/data/downloaders/datasets/ncep_ncar_r1.py new file mode 100644 index 0000000000..f9b0816efe --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/ncep_ncar_r1.py @@ -0,0 +1,85 @@ +"""Script to download NCEP-NCAR-R1.""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.ftp import FTPDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(1948, 1, 1) + if end_date is None: + end_date = datetime(2021, 1, 1) + downloader = FTPDownloader( + config=config, + server='ftp.cdc.noaa.gov', + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.connect() + + loop_date = start_date + + downloader.set_cwd("/Datasets/ncep.reanalysis.derived/pressure/") + downloader.download_file("air.mon.mean.nc", sub_folder='pressure') + downloader.download_file("hgt.mon.mean.nc", sub_folder='pressure') + downloader.download_file("rhum.mon.mean.nc", sub_folder='pressure') + downloader.download_file("shum.mon.mean.nc", sub_folder='pressure') + downloader.download_file("uwnd.mon.mean.nc", sub_folder='pressure') + downloader.download_file("vwnd.mon.mean.nc", sub_folder='pressure') + downloader.download_file("omega.mon.mean.nc", sub_folder='pressure') + + downloader.set_cwd("/Datasets/ncep.reanalysis.derived/surface/") + downloader.download_file("air.mon.mean.nc", sub_folder='surface') + downloader.download_file("pr_wtr.mon.mean.nc", sub_folder='surface') + downloader.download_file("slp.mon.mean.nc", sub_folder='surface') + downloader.download_file("wspd.mon.mean.nc", sub_folder='surface') + downloader.download_file("rhum.mon.mean.nc", sub_folder='surface') + + downloader.set_cwd("/Datasets/ncep.reanalysis.derived/surface_gauss/") + downloader.download_file("air.2m.mon.mean.nc", sub_folder='surface') + downloader.download_file("prate.mon.mean.nc", sub_folder='surface') + downloader.download_file("tmax.2m.mon.mean.nc", sub_folder='surface') + downloader.download_file("tmin.2m.mon.mean.nc", sub_folder='surface') + + downloader.set_cwd("/Datasets/ncep.reanalysis.derived/other_gauss/") + downloader.download_file("tcdc.eatm.mon.mean.nc", sub_folder='surface') + downloader.download_file("ulwrf.ntat.mon.mean.nc", sub_folder='surface') + downloader.download_file("csulf.ntat.mon.mean.nc", sub_folder='surface') + downloader.download_file("uswrf.ntat.mon.mean.nc", sub_folder='surface') + downloader.download_file("csusf.ntat.mon.mean.nc", sub_folder='surface') + + while loop_date <= end_date: + year = loop_date.year + downloader.set_cwd("/Datasets/ncep.reanalysis.dailyavgs/pressure/") + downloader.download_file(f"uwnd.{year}.nc", sub_folder='pressure') + downloader.download_file(f"vwnd.{year}.nc", sub_folder='pressure') + downloader.set_cwd("/Datasets/ncep.reanalysis.dailyavgs/surface_gauss") + downloader.download_file(f"prate.sfc.gauss.{year}.nc", + sub_folder='surface') + downloader.set_cwd("/Datasets/ncep.reanalysis.dailyavgs/other_gauss") + downloader.download_file(f"ulwrf.ntat.gauss.{year}.nc", + sub_folder='surface') + + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/ndp.py b/esmvaltool/cmorizers/data/downloaders/datasets/ndp.py new file mode 100644 index 0000000000..391e973d28 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/ndp.py @@ -0,0 +1,44 @@ +"""Script to download NDP from its webpage.""" +import logging +import os + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader +from esmvaltool.cmorizers.data.utilities import unpack_files_in_folder + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + if not os.path.isdir(downloader.local_folder): + os.makedirs(downloader.local_folder) + + downloader.download_file( + "https://data.ess-dive.lbl.gov/catalog/d1/mn/v2/object/" + "ess-dive-ec4f4b7097524f6-20180621T213642471", + ["-O", os.path.join(downloader.local_folder, "ndp017b.tar.gz")]) + + unpack_files_in_folder(downloader.local_folder) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v2.py b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v2.py new file mode 100644 index 0000000000..bbbd708293 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v2.py @@ -0,0 +1,55 @@ +"""Script to download NOAA-CIRES-20CR-V2.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.ftp import FTPDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = FTPDownloader( + config=config, + server='ftp.cdc.noaa.gov', + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.connect() + + downloader.set_cwd("/Projects/20thC_ReanV2/Monthlies/") + downloader.download_file("monolevel/cldwtr.eatm.mon.mean.nc", + sub_folder='surface') + downloader.download_file("monolevel/pr_wtr.eatm.mon.mean.nc", + sub_folder='surface') + downloader.download_file("pressure/shum.mon.mean.nc", + sub_folder='pressure') + downloader.download_file("gaussian/monolevel/tcdc.eatm.mon.mean.nc", + sub_folder='surface_gauss') + downloader.download_file("gaussian/monolevel/ulwrf.ntat.mon.mean.nc", + sub_folder='surface_gauss') + downloader.download_file("gaussian/monolevel/uswrf.ntat.mon.mean.nc", + sub_folder='surface_gauss') + downloader.download_file("gaussian/monolevel/prate.mon.mean.nc", + sub_folder='surface_gauss') + downloader.download_file("gaussian/monolevel/uflx.mon.mean.nc", + sub_folder='surface_gauss') + downloader.download_file("gaussian/monolevel/vflx.mon.mean.nc", + sub_folder='surface_gauss') diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v3.py b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v3.py new file mode 100644 index 0000000000..67f1a38f33 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v3.py @@ -0,0 +1,53 @@ +"""Script to download NOAA-CIRES-20CR-V3.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.ftp import FTPDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = FTPDownloader( + config=config, + server='ftp.cdc.noaa.gov', + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.connect() + + downloader.set_cwd("Datasets/20thC_ReanV3/Monthlies/") + downloader.download_file("miscSI-MO/cldwtr.eatm.mon.mean.nc", + sub_folder='surface') + downloader.download_file("miscSI-MO/pr_wtr.eatm.mon.mean.nc", + sub_folder='surface') + downloader.download_file("prsSI-MO/shum.mon.mean.nc", + sub_folder='pressure') + downloader.download_file("miscMO/tcdc.eatm.mon.mean.nc", + sub_folder='surface') + downloader.download_file("ntatFlxSI-MO/ulwrf.ntat.mon.mean.nc", + sub_folder='surface') + downloader.download_file("ntatFlxSI-MO/uswrf.ntat.mon.mean.nc", + sub_folder='surface') + downloader.download_file("ntatFlxSI-MO/csulf.ntat.mon.mean.nc", + sub_folder='surface') + downloader.download_file("ntatFlxSI-MO/csusf.ntat.mon.mean.nc", + sub_folder='surface') diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv3b.py b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv3b.py new file mode 100644 index 0000000000..5a54080be4 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv3b.py @@ -0,0 +1,50 @@ +"""Script to download NOAA-ERSST-v3b.""" +import logging +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(1854, 1, 1) + if end_date is None: + end_date = datetime(2020, 1, 1) + + loop_date = start_date + + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + base_path = ("https://www1.ncdc.noaa.gov/pub/data/cmb/ersst/v3b/netcdf" + "/ersst.{year}{month:02d}.nc") + + while loop_date <= end_date: + downloader.download_folder( + base_path.format(year=loop_date.year, month=loop_date.month), []) + loop_date += relativedelta.relativedelta(months=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv5.py b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv5.py new file mode 100644 index 0000000000..7dbeccfe12 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv5.py @@ -0,0 +1,50 @@ +"""Script to download NOAA-ERSST-V5.""" +import logging +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(1854, 1, 1) + if end_date is None: + end_date = datetime(2020, 1, 1) + loop_date = start_date + + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + base_path = ("https://www1.ncdc.noaa.gov/pub/data/cmb/ersst/v5/netcdf/" + "ersst.v5.{year}{month:02d}.nc") + + while loop_date <= end_date: + downloader.download_folder( + base_path.format(year=loop_date.year, month=loop_date.month), []) + loop_date += relativedelta.relativedelta(months=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_mbl_ch4.py b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_mbl_ch4.py new file mode 100644 index 0000000000..3cbf701c97 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_mbl_ch4.py @@ -0,0 +1,38 @@ +"""Script to download NOAA-MBL-CH4.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "https://gml.noaa.gov/webdata/ccgg/trends/ch4/ch4_mm_gl.csv", + wget_options=[], + ) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/noaaglobaltemp.py b/esmvaltool/cmorizers/data/downloaders/datasets/noaaglobaltemp.py new file mode 100644 index 0000000000..cf946df2cc --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/noaaglobaltemp.py @@ -0,0 +1,43 @@ +"""Script to download NOAAGlobalTemp from its webpage.""" +import logging +import os + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader +from esmvaltool.cmorizers.data.utilities import read_cmor_config + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + filename = read_cmor_config(dataset)['filenames']['gridded'] + os.makedirs(downloader.local_folder, exist_ok=True) + downloader.download_file( + f"https://www.ncei.noaa.gov/data/noaa-global-surface-temperature/" + f"v5/access/gridded/" + f"{filename}", + wget_options=[]) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_0116_nh.py b/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_0116_nh.py new file mode 100644 index 0000000000..c4bb4e3afe --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_0116_nh.py @@ -0,0 +1,49 @@ +"""Script to download NSIDC-0116-nh.""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import NASADownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if not start_date: + start_date = datetime(1979, 1, 1) + if not end_date: + end_date = datetime(2020, 1, 1) + loop_date = start_date + + downloader = NASADownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + while loop_date <= end_date: + year = loop_date.year + downloader.download_file( + "https://daacdata.apps.nsidc.org/pub/DATASETS/" + "nsidc0116_icemotion_vectors_v4/north/daily/" + f"icemotion_daily_nh_25km_{year}0101_{year}1231_v4.1.nc") + + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_0116_sh.py b/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_0116_sh.py new file mode 100644 index 0000000000..adc8acd5ee --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_0116_sh.py @@ -0,0 +1,49 @@ +"""Script to download NSIDC-0116-sh.""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import NASADownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if not start_date: + start_date = datetime(1979, 1, 1) + if not end_date: + end_date = datetime(2020, 1, 1) + loop_date = start_date + + downloader = NASADownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + while loop_date <= end_date: + year = loop_date.year + downloader.download_file( + "https://daacdata.apps.nsidc.org/pub/DATASETS/" + "nsidc0116_icemotion_vectors_v4/south/daily/" + f"icemotion_daily_sh_25km_{year}0101_{year}1231_v4.1.nc") + + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_g02202_sh.py b/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_g02202_sh.py new file mode 100644 index 0000000000..8c3c02c410 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_g02202_sh.py @@ -0,0 +1,81 @@ +"""Script to download NSIDC-G02202-sh.""" +import logging +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(1979, 1, 1) + if end_date is None: + end_date = datetime(2023, 1, 1) + + loop_date = start_date + + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + # need area file + area_dat = ('ftp://sidads.colorado.edu/DATASETS/seaice' + '/polar-stereo/tools/pss25area_v3.dat') + downloader.download_folder(area_dat, []) + + anc_path = ('https://noaadata.apps.nsidc.org/NOAA/G02202_V4/' + 'ancillary/G02202-cdr-ancillary-sh.nc') + downloader.download_folder(anc_path, []) + + base_path = ('https://noaadata.apps.nsidc.org/NOAA/G02202_V4/south/monthly' + '/seaice_conc_monthly_sh_{year}{month:02d}_{other}_v04r00.nc') + + # regex for n07 changes to f08.. file names + # bins #{'197811':'n07','198708':'f08', + # '199201':'f11','199510':'f13', '200801':'f17'} + datels = [datetime(1978, 11, 1), datetime(1987, 7, 30), + datetime(1991, 12, 30), datetime(1995, 9, 30), + datetime(2007, 12, 30), end_date] + suffls = ['n07', 'f08', 'f11', 'f13', 'f17'] + isuf = 0 + suffix = suffls[isuf] + # initialize suffix if dates start higher than initial + while loop_date >= datels[isuf]: + suffix = suffls[isuf] + isuf += 1 + + while loop_date <= end_date: + + if loop_date > datels[isuf]: + suffix = suffls[isuf] + isuf += 1 + + downloader.download_folder( + base_path.format(year=loop_date.year, month=loop_date.month, + other=suffix), []) + loop_date += relativedelta.relativedelta(months=1) + # check loop_date is => next bin diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/oceansoda_ethz.py b/esmvaltool/cmorizers/data/downloaders/datasets/oceansoda_ethz.py new file mode 100644 index 0000000000..2e099814dd --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/oceansoda_ethz.py @@ -0,0 +1,39 @@ +"""Script to download OceanSODA-ETHZ.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "https://www.ncei.noaa.gov/data/oceans/ncei/ocads/data/0220059/" + "OceanSODA_ETHZ-v2023.OCADS.01_1982-2022.nc", + wget_options=[], + ) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/osi_450_nh.py b/esmvaltool/cmorizers/data/downloaders/datasets/osi_450_nh.py new file mode 100644 index 0000000000..826548503f --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/osi_450_nh.py @@ -0,0 +1,54 @@ +"""Script to download OSI-450-nh.""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.ftp import FTPDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(1979, 1, 1) + if end_date is None: + end_date = datetime(2015, 12, 1) + downloader = FTPDownloader( + config=config, + server='osisaf.met.no', + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.connect() + + loop_date = start_date + + downloader.set_cwd("/reprocessed/ice/conc/v2p0") + + while loop_date <= end_date: + year = loop_date.year + folder = f"{year}/{loop_date.month:02}" + downloader.download_folder( + folder, + sub_folder=folder, + filter_files='.*_nh.*[.]nc', + ) + loop_date += relativedelta.relativedelta(months=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/osi_450_sh.py b/esmvaltool/cmorizers/data/downloaders/datasets/osi_450_sh.py new file mode 100644 index 0000000000..5e3fd383f4 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/osi_450_sh.py @@ -0,0 +1,54 @@ +"""Script to download OSI-450-sh.""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.ftp import FTPDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(1979, 1, 1) + if end_date is None: + end_date = datetime(2015, 12, 1) + downloader = FTPDownloader( + config=config, + server='osisaf.met.no', + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.connect() + + loop_date = start_date + + downloader.set_cwd("/reprocessed/ice/conc/v2p0") + + while loop_date <= end_date: + year = loop_date.year + folder = f"{year}/{loop_date.month:02}" + downloader.download_folder( + folder, + sub_folder=folder, + filter_files='.*_sh.*[.]nc', + ) + loop_date += relativedelta.relativedelta(months=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/patmos_x.py b/esmvaltool/cmorizers/data/downloaders/datasets/patmos_x.py new file mode 100644 index 0000000000..20b70e7fb9 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/patmos_x.py @@ -0,0 +1,53 @@ +"""Script to download PATMOS-x.""" + +import os +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(1982, 1, 1) + if end_date is None: + end_date = datetime(2016, 1, 1) + loop_date = start_date + + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + base_path = ( + "https://www.ncei.noaa.gov/data/" + "avhrr-reflectance-cloud-properties-patmos-extended/access/{year}/") + while loop_date <= end_date: + + downloader.download_folder( + base_path.format(year=loop_date.year), + # ["--accept='*NOAA*.nc'", "--reject='*preliminary*'"] + []) + os.remove(os.path.join(downloader.local_folder, 'index.html')) + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/persiann_cdr.py b/esmvaltool/cmorizers/data/downloaders/datasets/persiann_cdr.py new file mode 100644 index 0000000000..5aa19ef257 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/persiann_cdr.py @@ -0,0 +1,50 @@ +"""Script to download PERSIANN-CDR.""" + +import os +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(1983, 1, 1) + if end_date is None: + end_date = datetime(2020, 1, 1) + loop_date = start_date + + base_path = ( + "https://www.ncei.noaa.gov/data/precipitation-persiann/access/" + "{year}/") + while loop_date <= end_date: + print(base_path.format(year=loop_date.year)) + print(base_path) + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.download_folder(base_path.format(year=loop_date.year), []) + os.remove(os.path.join(downloader.local_folder, 'index.html')) + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/phc.py b/esmvaltool/cmorizers/data/downloaders/datasets/phc.py new file mode 100644 index 0000000000..3938735e83 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/phc.py @@ -0,0 +1,40 @@ +"""Script to download PHC from its webpage.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader +from esmvaltool.cmorizers.data.utilities import unpack_files_in_folder + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "http://psc.apl.washington.edu/nonwp_projects/PHC/Data3/" + "phc3.0_annual.nc", + wget_options=[]) + unpack_files_in_folder(downloader.local_folder) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/regen.py b/esmvaltool/cmorizers/data/downloaders/datasets/regen.py new file mode 100644 index 0000000000..4b4727c110 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/regen.py @@ -0,0 +1,48 @@ +"""Script to download REGEN.""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader +from esmvaltool.cmorizers.data.utilities import read_cmor_config + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if start_date is None: + start_date = datetime(1950, 1, 1) + if end_date is None: + end_date = datetime(2016, 1, 1) + loop_date = start_date + + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + base_path = ("http://dapds00.nci.org.au/thredds/fileServer/ks32/CLEX_Data/" + "REGEN_AllStns/v1-2019/REGEN_AllStns_{version}_{year}.nc") + version = read_cmor_config(dataset)['attributes']['version'] + while loop_date <= end_date: + downloader.download_folder( + base_path.format(year=loop_date.year, version=version), []) + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/scripps_co2_kum.py b/esmvaltool/cmorizers/data/downloaders/datasets/scripps_co2_kum.py new file mode 100644 index 0000000000..e9b202ea0a --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/scripps_co2_kum.py @@ -0,0 +1,37 @@ +"""Script to download Scripps-CO2-KUM.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "https://scrippsco2.ucsd.edu/assets/data/atmospheric/stations/" + "flask_co2/monthly/monthly_flask_co2_kum.csv", []) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/tcom_ch4.py b/esmvaltool/cmorizers/data/downloaders/datasets/tcom_ch4.py new file mode 100644 index 0000000000..c317ec7f06 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/tcom_ch4.py @@ -0,0 +1,39 @@ +"""Script to download TCOM-CH4.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "https://zenodo.org/record/7293740/files/" + "zmch4_TCOM_plev_T2Dz_1991_2021.nc", + wget_options=[], + ) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/tcom_n2o.py b/esmvaltool/cmorizers/data/downloaders/datasets/tcom_n2o.py new file mode 100644 index 0000000000..38d4515f97 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/tcom_n2o.py @@ -0,0 +1,39 @@ +"""Script to download TCOM-N2O.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "https://zenodo.org/record/7386001/files/" + "zmn2o_TCOM_plev_T2Dz_1991_2021.nc", + wget_options=[], + ) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/woa.py b/esmvaltool/cmorizers/data/downloaders/datasets/woa.py new file mode 100644 index 0000000000..da8529a680 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/woa.py @@ -0,0 +1,58 @@ +"""Script to download WOA from its webpage.""" +import logging +import os +import shutil + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + def download(file): + downloader.download_file( + "https://www.ncei.noaa.gov/data/oceans/woa/WOA18/DATA/" + file, + wget_options=[]) + + data_paths = [ + "nitrate/netcdf/all/1.00/woa18_all_n00_01.nc", + "oxygen/netcdf/all/1.00/woa18_all_o00_01.nc", + "phosphate/netcdf/all/1.00/woa18_all_p00_01.nc", + "salinity/netcdf/decav81B0/1.00/woa18_decav81B0_s00_01.nc", + "silicate/netcdf/all/1.00/woa18_all_i00_01.nc", + "temperature/netcdf/decav81B0/1.00/woa18_decav81B0_t00_01.nc" + ] + + for source_file in data_paths: + download(source_file) + filename = os.path.basename(source_file) + var = source_file.split("/", maxsplit=1)[0] + os.makedirs(os.path.join(downloader.local_folder, var), exist_ok=True) + filepath = os.path.join(downloader.local_folder, filename) + shutil.move(filepath, + os.path.join(downloader.local_folder, var, filename)) diff --git a/esmvaltool/cmorizers/data/downloaders/downloader.py b/esmvaltool/cmorizers/data/downloaders/downloader.py new file mode 100644 index 0000000000..f9f36c8ea5 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/downloader.py @@ -0,0 +1,48 @@ +"""Downloader base class.""" + +import os + + +class BaseDownloader(): + """Base class for all downloaders. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Dataset to download + dataset_info : dict + Dataset information from the datasets.yml file + overwrite : bool + Overwrite already downloaded files + """ + def __init__(self, config, dataset, dataset_info, overwrite): + self._config = config + self.tier = dataset_info['tier'] + self.dataset = dataset + self.dataset_info = dataset_info + self.overwrite = overwrite + + @property + def local_folder(self): + """Folder to store the downloader date. + + Returns + ------- + str + Path to the download folder + """ + return os.path.join(self.rawobs_folder, f'Tier{self.tier}', + self.dataset) + + @property + def rawobs_folder(self): + """RAWOBS base path. + + Returns + ------- + str + Path to the RAWOBS folder + """ + return self._config['rootpath']['RAWOBS'][0] diff --git a/esmvaltool/cmorizers/data/downloaders/ftp.py b/esmvaltool/cmorizers/data/downloaders/ftp.py new file mode 100644 index 0000000000..9f0cd5e8f9 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/ftp.py @@ -0,0 +1,217 @@ +"""Downloader for FTP repositories.""" + +import ftplib +import logging +import os +import re + +from progressbar import ( + ETA, + Bar, + DataSize, + FileTransferSpeed, + Percentage, + ProgressBar, +) + +from .downloader import BaseDownloader + +logger = logging.getLogger(__name__) + + +class FTPDownloader(BaseDownloader): + """Downloader for FTP repositories. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + server : str + FTP server URL + dataset : str + Dataset to download + dataset_info : dict + Dataset information from the datasets.yml file + overwrite : bool + Overwrite already downloaded files + """ + def __init__(self, config, server, dataset, dataset_info, overwrite): + super().__init__(config, dataset, dataset_info, overwrite) + self._client = None + self.server = server + + def connect(self): + """Connect to the FTP server.""" + self._client = ftplib.FTP(self.server) + logger.info(self._client.getwelcome()) + self._client.login() + + def set_cwd(self, path): + """Set current working directory in the remote. + + Parameters + ---------- + path : str + Remote path to set as current working directory. + """ + logger.debug('Current working directory: %s', self._client.pwd()) + logger.debug('Setting working directory to %s', path) + self._client.cwd(path) + logger.debug('New working directory: %s', self._client.pwd()) + + def list_folders(self, server_path='.'): + """List folder in the remote. + + Parameters + ---------- + server_path : str, optional + Folder to list, by default '.' + + Returns + ------- + list(str) + List of folder names + """ + filenames = self._client.mlsd(server_path, facts=['type']) + return [ + filename for filename, facts in filenames if facts['type'] == 'dir' + ] + + def exists(self, server_path): + """Check if a given path exists in the server. + + Parameters + ---------- + server_path : str + Path to check for existence. + """ + return server_path in self._client.nlst() + + def download_folder(self, server_path, sub_folder='', filter_files=None): + """Download files from a given folder. + + Parameters + ---------- + server_path : str + Folder to download + sub_folder : str, optional + Name of the local subfolder to store the results in, by default '' + filter_files : str, optional + If set, only download files that match this regular expression, + by default None + """ + # get filenames within the directory + filenames = self._client.nlst(server_path) + logger.info('Downloading files in %s', server_path) + if filter_files: + expression = re.compile(filter_files) + filenames = [ + filename for filename in filenames + if expression.match(os.path.basename(filename)) + ] + for filename in filenames: + self.download_file(filename, sub_folder) + + def download_file(self, server_path, sub_folder=''): + """Download a file from the server. + + Parameters + ---------- + server_path : str + Path to the file + sub_folder : str, optional + Name of the local subfolder to store the results in, by default '' + """ + os.makedirs(os.path.join(self.local_folder, sub_folder), exist_ok=True) + local_path = os.path.join(self.local_folder, sub_folder, + os.path.basename(server_path)) + if not self.overwrite and os.path.isfile(local_path): + logger.info('File %s already downloaded. Skipping...', server_path) + return + logger.info('Downloading %s', server_path) + logger.debug('Downloading to %s', local_path) + + self._client.sendcmd("TYPE i") + size = self._client.size(server_path) + + widgets = [ + DataSize(), + Bar(), + Percentage(), ' ', + FileTransferSpeed(), ' (', + ETA(), ')' + ] + + progress = ProgressBar(max_value=size, widgets=widgets) + progress.start() + + with open(local_path, 'wb') as file_handler: + + def _file_write(data): + file_handler.write(data) + nonlocal progress + progress += len(data) + + try: + self._client.retrbinary(f'RETR {server_path}', _file_write) + except Exception: + file_handler.close() + if os.path.exists(local_path): + os.remove(local_path) + raise + + progress.finish() + + +class CCIDownloader(FTPDownloader): + """Downloader for the CDA ESA-CCI repository. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Dataset to download + dataset_info : dict + Dataset information from the datasets.yml file + overwrite : bool + Overwrite already downloaded files + """ + def __init__(self, config, dataset, dataset_info, overwrite): + super().__init__(config, 'anon-ftp.ceda.ac.uk', dataset, dataset_info, + overwrite) + self.ftp_name = self.dataset_name[7:] + + def set_cwd(self, path): + """Set current work directory. + + Relative to the dataset root folder. + + Parameters + ---------- + path : str + Remote path to set as current working directory. + """ + cwd = f'/neodc/esacci/{self.ftp_name}/data/{path}' + super().set_cwd(cwd) + + @property + def dataset_name(self): + """Name of the dataset in the repository. + + Returns + ------- + str + Name of the dataset in the repository. + """ + return self.dataset.lower().replace('-', '_') + + def download_year(self, year): + """Download a specific year. + + Parameters + ---------- + year : int + Year to download + """ + self.download_folder(str(year)) diff --git a/esmvaltool/cmorizers/data/downloaders/wget.py b/esmvaltool/cmorizers/data/downloaders/wget.py new file mode 100644 index 0000000000..2afcca1d5a --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/wget.py @@ -0,0 +1,123 @@ +"""wget based downloader.""" + +import logging +import os +import subprocess + +from .downloader import BaseDownloader + +logger = logging.getLogger(__name__) + + +class WGetDownloader(BaseDownloader): + """Data downloader based on wget.""" + def download_folder(self, server_path, wget_options): + """Download folder. + + Parameters + ---------- + server_path: str + Path to remote folder + wget_options: list(str) + Extra options for wget + """ + if self.overwrite: + raise ValueError( + 'Overwrite does not work with downloading directories through ' + 'wget. Please, remove the unwanted data manually') + command = ['wget'] + wget_options + self.overwrite_options + [ + f'--directory-prefix={self.local_folder}', + '--recursive', + '--no-directories', + f'{server_path}', + ] + logger.debug(command) + subprocess.check_output(command) + + def download_file(self, server_path, wget_options): + """Download file. + + Parameters + ---------- + server_path: str + Path to remote file + wget_options: list(str) + Extra options for wget + """ + command = ['wget'] + wget_options + self.overwrite_options + [ + f'--directory-prefix={self.local_folder}', + '--no-directories', + server_path, + ] + if self.overwrite: + command.append(f'-O {os.path.basename(server_path)}') + logger.debug(command) + subprocess.check_output(command) + + def login(self, server_path, wget_options): + """Login. + + Parameters + ---------- + server_path: str + Path to remote file + wget_options: list(str) + Extra options for wget + """ + command = ['wget'] + wget_options + [server_path] + logger.debug(command) + subprocess.check_output(command) + + @property + def overwrite_options(self): + """Get overwrite options as configured in downloader.""" + if not self.overwrite: + return [ + '--no-clobber', + ] + return [] + + +class NASADownloader(WGetDownloader): + """Downloader for the NASA repository.""" + def __init__(self, config, dataset, dataset_info, overwrite): + super().__init__(config, dataset, dataset_info, overwrite) + + self._wget_common_options = [ + "--load-cookies=~/.urs_cookies", + "--save-cookies=~/.urs_cookies", + "--auth-no-challenge=on", + "--keep-session-cookies", + "--no-check-certificate", + ] + + def download_folder(self, server_path, wget_options=None): + """Download folder. + + Parameters + ---------- + server_path: str + Path to remote folder + wget_options: list(str) + Extra options for wget, by default None + """ + if wget_options is None: + wget_options = [] + wget_options = self._wget_common_options + [ + "-np", "--accept=nc,nc4,hdf"] + wget_options + super().download_folder(server_path, wget_options) + + def download_file(self, server_path, wget_options=None): + """Download file. + + Parameters + ---------- + server_path: str + Path to remote folder + wget_options: list(str) + Extra options for wget, by default None + """ + if wget_options is None: + wget_options = [] + super().download_file(server_path, + self._wget_common_options + wget_options) diff --git a/esmvaltool/cmorizers/data/formatters/__init__.py b/esmvaltool/cmorizers/data/formatters/__init__.py new file mode 100644 index 0000000000..26e793d256 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/__init__.py @@ -0,0 +1 @@ +"""Formatters and helpers.""" diff --git a/esmvaltool/cmorizers/data/formatters/datasets/__init__.py b/esmvaltool/cmorizers/data/formatters/datasets/__init__.py new file mode 100644 index 0000000000..4dcf5c0c9a --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/__init__.py @@ -0,0 +1 @@ +"""Formatters for datasets.""" diff --git a/esmvaltool/cmorizers/data/formatters/datasets/aeronet.py b/esmvaltool/cmorizers/data/formatters/datasets/aeronet.py new file mode 100755 index 0000000000..e3b5c968d8 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/aeronet.py @@ -0,0 +1,410 @@ +"""ESMValTool CMORizer for Aeronet data. + +Tier + Tier 3: restricted dataset. + +Source + https://aeronet.gsfc.nasa.gov/ + +Last access + 20240406 + +Download and processing instructions + Download the following file: + https://aeronet.gsfc.nasa.gov/data_push/V3/AOD/AOD_Level20_Monthly_V3.tar.gz +""" + +import logging +import os.path +import re +from datetime import datetime +from typing import NamedTuple + +import cf_units +import dask.array as da +import iris +import iris.coords +import iris.cube +import numpy as np +import pandas as pd +from fsspec.implementations.tar import TarFileSystem +from pys2index import S2PointIndex + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + +AERONET_HEADER = "AERONET Version 3;" +LEVEL_HEADER = "Version 3: AOD Level 2.0" +LEVEL_DESCRIPTION = ( + "The following data are automatically cloud cleared and quality assured " + "with pre-field and post-field calibration applied.") +UNITS_HEADER = ( + "UNITS can be found at,,, https://aeronet.gsfc.nasa.gov/new_web/units.html" +) +DATA_QUALITY_LEVEL = "lev20" + +CONTACT_PATTERN = re.compile( + "Contact: PI=(?P[^;]*); PI Email=(?P.*)") + + +def compress_column(data_frame, name): + """Assert all values in DataFrame column are equal, and return value.""" + compressed = data_frame.pop(name).unique() + if len(compressed) != 1: + raise ValueError( + f"Data frame column '{name}' must only contain" + f" one unique value, found {len(compressed)}" + ) + return compressed[0] + + +class AeronetStation(NamedTuple): + """AERONET station data.""" + + station_name: str + latitude: float + longitude: float + elevation: float + contacts: str + data_frame: pd.DataFrame + + +class AeronetStations(NamedTuple): + """AERONET station data lists.""" + + station_name: list[str] + latitude: list[float] + longitude: list[float] + elevation: list[float] + contacts: list[str] + data_frame: list[pd.DataFrame] + + +def parse_contact(contact): + """Parse and reformat contact information in AERONET file.""" + match = CONTACT_PATTERN.fullmatch(contact) + if match is None: + raise RuntimeError(f"Could not parse contact line {contact}") + names = match.group("names").replace("_", " ").split(" and ") + emails = match.group("emails").split("_and_") + mailboxes = ", ".join([ + f'"{name}" <{email}>' for name, email in zip(names, emails) + ]) + return mailboxes + + +def load_file(filesystem, path_like): + """Load AERONET data from fsspec filesystem instance.""" + with filesystem.open(path_like, mode="rt", encoding="iso-8859-1") as file: + aeronet_header = file.readline().strip() + if aeronet_header != AERONET_HEADER: + raise ValueError( + f"File header identifier is '{aeronet_header}'," + f" expected '{AERONET_HEADER}'" + ) + station_name = file.readline().strip() + level_header = file.readline().strip() + if level_header != LEVEL_HEADER: + raise ValueError( + f"File level string is '{level_header}'," + f" expected '{LEVEL_HEADER}'" + ) + level_description = file.readline().strip() + if level_description != LEVEL_DESCRIPTION: + raise ValueError( + f"File data description string is" + f" '{level_description}', expected '{LEVEL_DESCRIPTION}'" + ) + contact_string = file.readline().strip() + units_header = file.readline().strip() + if units_header != UNITS_HEADER: + raise ValueError( + f"File units info string is '{units_header}'," + f" expected '{UNITS_HEADER}'" + ) + data_frame = pd.read_csv( + file, + index_col=0, + na_values=-999.0, + date_format="%Y-%b", + parse_dates=[0], + usecols=lambda x: "AOD_Empty" not in x, + ) + contacts = parse_contact(contact_string) + elevation = compress_column(data_frame, "Elevation(meters)") + latitude = compress_column(data_frame, "Latitude(degrees)") + longitude = compress_column(data_frame, "Longitude(degrees)") + data_quality_level = compress_column(data_frame, "Data_Quality_Level") + if data_quality_level != DATA_QUALITY_LEVEL: + raise ValueError( + f"File data quality level is '{data_quality_level}'," + f" expected '{DATA_QUALITY_LEVEL}'" + ) + station = AeronetStation( + station_name, + latitude, + longitude, + elevation, + contacts, + data_frame, + ) + return station + + +def sort_data_columns(columns): + """Sort AOD station data columns.""" + data_columns = [c for c in columns if "NUM_" not in c] + if len(columns) != 3 * len(data_columns): + raise ValueError( + "Station data contains unexpected number of columns." + ) + aod_columns = [c for c in data_columns if c.startswith("AOD_")] + precipitable_water_columns = [ + c for c in data_columns if c == "Precipitable_Water(cm)" + ] + angstrom_exponent_columns = [ + c for c in data_columns if "_Angstrom_Exponent" in c + ] + if len(data_columns) != (len(aod_columns) + + len(precipitable_water_columns) + + len(angstrom_exponent_columns)): + raise ValueError( + "Station data contains unexpected number of columns." + ) + return (aod_columns, precipitable_water_columns, angstrom_exponent_columns) + + +def merge_stations(stations): + """Collect and merge station data into AeronetStations instance.""" + columns = {} + for name, dtype in ( + ("station_name", str), + ("latitude", np.float64), + ("longitude", np.float64), + ("elevation", np.float64), + ("contacts", str), + ("data_frame", object), + ): + columns[name] = np.array( + [getattr(station, name) for station in stations], + dtype=dtype, + ) + return AeronetStations(**columns) + + +def assemble_cube(stations, idx, wavelengths=None): + """Assemble Iris cube with station data. + + Parameters + ---------- + stations : AeronetStations + Station data + idx : int + Unique ids of all stations + wavelengths : list, optional + Wavelengths to include in data. + + Returns + ------- + Iris cube + Iris cube with station data. + + Raises + ------ + ValueError + If station data has inconsistent variable names. + """ + min_time = np.array([df.index.min() for df in stations.data_frame]).min() + max_time = np.array([df.index.max() for df in stations.data_frame]).max() + date_index = pd.date_range(min_time, max_time, freq="MS") + data_frames = [df.reindex(index=date_index) for df in stations.data_frame] + all_data_columns = np.unique( + np.array([df.columns for df in data_frames], dtype=str), + axis=0, + ) + if len(all_data_columns) != 1: + raise ValueError( + "Station data frames has different sets of column names." + ) + aod_columns, _, _ = sort_data_columns(all_data_columns[0]) + if wavelengths is None: + wavelengths = sorted([int(c[4:-2]) for c in aod_columns]) + + aod = da.stack([ + da.stack([df[f"AOD_{wl}nm"].values for wl in wavelengths], axis=-1) + for df in data_frames + ], axis=-1)[..., idx] + num_days = da.stack([ + da.stack([ + df[f"NUM_DAYS[AOD_{wl}nm]"].values.astype(np.float32) + for wl in wavelengths + ], axis=-1) for df in data_frames + ], axis=-1)[..., idx] + num_points = da.stack([ + da.stack([ + df[f"NUM_POINTS[AOD_{wl}nm]"].values.astype(np.float32) + for wl in wavelengths + ], axis=-1) for df in data_frames + ], axis=-1)[..., idx] + + wavelength_points = da.array(wavelengths, dtype=np.float64) + wavelength_coord = iris.coords.DimCoord( + points=wavelength_points, + standard_name="radiation_wavelength", + long_name="Wavelength", + var_name="wl", + units="nm", + ) + times = date_index.to_pydatetime() + time_points = np.array( + [datetime(year=t.year, month=t.month, day=15) for t in times]) + time_bounds_lower = times + time_bounds_upper = np.array([ + datetime(year=t.year + (t.month == 12), + month=t.month + 1 - (t.month == 12) * 12, + day=1) for t in times + ]) + time_bounds = np.stack([time_bounds_lower, time_bounds_upper], axis=-1) + time_units = cf_units.Unit("days since 1850-01-01", calendar="standard") + time_coord = iris.coords.DimCoord( + points=time_units.date2num(time_points), + standard_name="time", + long_name="time", + var_name="time", + units=time_units, + bounds=time_units.date2num(time_bounds), + ) + index_coord = iris.coords.DimCoord( + points=da.arange(aod.shape[-1]), + standard_name=None, + long_name="Station index (arbitrary)", + var_name="station_index", + units="1", + ) + name_coord = iris.coords.AuxCoord( + points=stations.station_name[idx], + standard_name="platform_name", + long_name="Aeronet Station Name", + var_name="station_name", + ) + elevation_coord = iris.coords.AuxCoord( + points=stations.elevation[idx], + standard_name="height_above_mean_sea_level", + long_name="Elevation", + var_name="elev", + units="m", + ) + latitude_coord = iris.coords.AuxCoord( + points=stations.latitude[idx], + standard_name="latitude", + long_name="Latitude", + var_name="lat", + units="degrees_north", + ) + longitude_coord = iris.coords.AuxCoord( + points=stations.longitude[idx], + standard_name="longitude", + long_name="Longitude", + var_name="lon", + units="degrees_east", + ) + num_days_ancillary = iris.coords.AncillaryVariable( + data=da.ma.masked_array(num_days, da.isnan(num_days), + fill_value=1.e20), + standard_name=None, + long_name="Number of days", + var_name="num_days", + units="1", + ) + num_points_ancillary = iris.coords.AncillaryVariable( + data=da.ma.masked_array(num_days, + da.isnan(num_points), + fill_value=1.e20), + standard_name="number_of_observations", + long_name="Number of observations", + var_name="num_points", + units="1", + ) + cube = iris.cube.Cube( + data=da.ma.masked_array(aod, da.isnan(aod), fill_value=1.e20), + standard_name=( + "atmosphere_optical_thickness_due_to_ambient_aerosol_particles"), + long_name="Aerosol Optical Thickness", + var_name="aod", + units="1", + dim_coords_and_dims=[ + (time_coord, 0), + (wavelength_coord, 1), + (index_coord, 2), + ], + aux_coords_and_dims=[ + (latitude_coord, 2), + (longitude_coord, 2), + (elevation_coord, 2), + (name_coord, 2), + ], + ancillary_variables_and_dims=[ + (num_days_ancillary, (0, 1, 2)), + (num_points_ancillary, (0, 1, 2)), + ], + ) + return cube + + +def build_cube(filesystem, paths, wavelengths=None): + """Build station data cube.""" + individual_stations = [ + load_file(filesystem, file_path) for file_path in paths + ] + stations = merge_stations(individual_stations) + latlon_points = np.stack([stations.latitude, stations.longitude], axis=-1) + index = S2PointIndex(latlon_points) + cell_ids = index.get_cell_ids() + idx = np.argsort(cell_ids) + cube = assemble_cube(stations, idx, wavelengths) + return cube + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + raw_filename = cfg['filename'] + + tar_file_system = TarFileSystem(f"{in_dir}/{raw_filename}") + paths = tar_file_system.glob("AOD/AOD20/MONTHLY/*.lev20") + versions = np.unique( + np.array([os.path.basename(p).split("_")[1] for p in paths], + dtype=str)) + if len(versions) != 1: + raise ValueError( + "All station datasets in tar file must have same version." + ) + version = versions[0] + wavelengths = sorted( + [var["wavelength"] for var in cfg['variables'].values()]) + cube = build_cube(tar_file_system, paths, wavelengths) + + attrs = cfg['attributes'].copy() + attrs['version'] = version + attrs['source'] = attrs['source'] + + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", short_name) + + idx = wavelengths.index(var["wavelength"]) + sub_cube = cube[:, idx] + + attrs['mip'] = var['mip'] + # attrs['reference'] = var['reference'] + # Fix metadata + utils.set_global_atts(sub_cube, attrs) + + # Save variable + utils.save_variable( + sub_cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time'], + ) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/agcd.py b/esmvaltool/cmorizers/data/formatters/datasets/agcd.py new file mode 100644 index 0000000000..f0d6b290ef --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/agcd.py @@ -0,0 +1,122 @@ +"""ESMValTool CMORizer for AGCD data. + +Tier + Tier 2: other freely available dataset. + +Source + https://dx.doi.org/10.25914/rses-zh67 + +Last access + 20231121 + +Download and processing instructions + Data from NCI (National Computing Infrastructure Australia) + https://nci.org.au/, + requiring an NCI account and access to Gadi(Supercomputer in Australia) + and the dataset project found in + catalogue record https://dx.doi.org/10.25914/rses-zh67. + Access can be requested through NCI. + NCI is an ESGF node: (https://esgf.nci.org.au/projects/esgf-nci/) + Processing is done on Gadi. + +""" +import logging +import os +import re + +import iris + +from esmvalcore.cmor._fixes.shared import get_time_bounds +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _get_filepaths(in_dir, basename): + """Find correct name of file (extend basename with timestamp).""" + regex = re.compile(basename) + return_files = [] + for root, _, files in os.walk(in_dir, followlinks=True): + + for filename in files: + if regex.match(filename): + return_files.append(os.path.join(root, filename)) + + return return_files + + +def fix_data_var(cube, var): + """Convert units in cube for the variable.""" + monthdays = {1: 31, 2: 28, 3: 31, 4: 30, 5: 31, 6: 30, + 7: 31, 8: 31, 9: 30, 10: 31, 11: 30, 12: 31} + if var == 'pr': + newcubels = [] + for i, m_cube in enumerate(cube.slices(['latitude', 'longitude'])): + m_cube = m_cube / (monthdays[i + 1] * 86400) # days in month + newcubels.append(m_cube) + + cube = iris.cube.CubeList(newcubels).merge()[0] + cube.units = 'kg m-2 s-1' + + elif var in ['tas', 'tasmin', 'tasmax']: # other variables in v1 + cube = cube + 273.15 + cube.units = 'K' + utils.add_height2m(cube) + + else: + logger.info("Variable %s not converted", var) + + return cube + + +def _extract_variable(cmor_info, attrs, filepath, out_dir): + """Extract variable.""" + var = cmor_info.short_name + logger.info("Var is %s", var) + cubes = iris.load(filepath) + for cube in cubes: + + cube = fix_data_var(cube, var) + + utils.fix_var_metadata(cube, cmor_info) + + cube = utils.fix_coords(cube) + bounds = get_time_bounds(cube.coords('time')[0], 'mon') + cube.coords('time')[0].bounds = bounds + utils.set_global_atts(cube, attrs) + + logger.info("Saving file") + utils.save_variable(cube, + var, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + glob_attrs = cfg['attributes'] + cmor_table = cfg['cmor_table'] + + ver = cfg['attributes']['version'] + + # Run the cmorization #multiple variables + for (var, var_info) in cfg['variables'].items(): + + glob_attrs['mip'] = var_info['mip'] + logger.info("CMORizing variable '%s', %s", var, var_info['mip']) + + raw_filename = cfg['filename'].format(version=ver, + variable=var_info['raw'], + raw_calc=var_info['raw_calc'], + freq=var_info['freq']) + filepaths = _get_filepaths(in_dir, raw_filename) + + if not filepaths: + logger.info("no files for %s. pattern:%s", var, raw_filename) + logger.info("directory:%s", in_dir) + for inputfile in filepaths: + logger.info("Found input file '%s'", inputfile) + + cmor_info = cmor_table.get_variable(var_info['mip'], var) + _extract_variable(cmor_info, glob_attrs, inputfile, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/anuclimate.py b/esmvaltool/cmorizers/data/formatters/datasets/anuclimate.py new file mode 100644 index 0000000000..f82ad295ca --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/anuclimate.py @@ -0,0 +1,125 @@ +"""ESMValTool CMORizer for ANU Climate data. + +Tier + Tier 3: restricted dataset. + +Source + https://dx.doi.org/10.25914/60a10aa56dd1b + +Last access + 20231121 + +Download and processing instructions + Data from NCI project requiring an NCI account and access to GADI + Processing is done on GADI + +""" +import logging +import os +import re +import calendar + +import iris + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _get_filepaths(in_dir, basename): + """Find correct name of file (extend basename with timestamp).""" + regex = re.compile(basename) + return_files = [] + # Search sub folders of raw data directory + for root, _dir, files in os.walk(in_dir, followlinks=True): + + for filename in files: + if regex.match(filename): + + return_files.append(os.path.join(root, filename)) + + return return_files + + +def fix_data_var(cube, var): + """Convert units in cube for the variable.""" + # get month, year from cube + tcoord = cube.coord('time') + tdate = tcoord.units.num2date(tcoord.points[0]) + no_ofdays = calendar.monthrange(tdate.year, tdate.month)[1] + + if var == 'pr': + + cube = cube / (no_ofdays * 86400) # days in month + cube.units = 'kg m-2 s-1' + + elif var in ['tas', 'tasmin', 'tasmax']: # other variables in v1 + cube = cube + 273.15 + cube.units = 'K' + utils.add_height2m(cube) + + else: + logger.info("Variable %s not converted", var) + + return cube, tdate.year + + +def _extract_variable(cmor_info, attrs, filepaths, out_dir): + """Extract variable.""" + var = cmor_info.short_name + logger.info("Var is %s", var) + cbls_2 = iris.cube.CubeList() + cbls_1 = iris.cube.CubeList() + for filepath in filepaths: + cubes = iris.load(filepath) + + cube, year = fix_data_var(cubes[0], var) + + utils.fix_var_metadata(cube, cmor_info) + + utils.set_global_atts(cube, attrs) + + if year < 2000: # split for cube save + cbls_1.append(cube) + else: + cbls_2.append(cube) + + for cbls in [cbls_1, cbls_2]: + iris.util.equalise_attributes(cbls) + cubesave = cbls.concatenate_cube() + cubesave = utils.fix_coords(cubesave) + + logger.info("Saving file") + utils.save_variable(cubesave, + var, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + glob_attrs = cfg['attributes'] + cmor_table = cfg['cmor_table'] + + ver = cfg['attributes']['version'] + logger.info(cfg, cfg_user) + + # Run the cmorization, multiple variables + for (var, var_info) in cfg['variables'].items(): + + glob_attrs['mip'] = var_info['mip'] + + raw_filename = cfg['filename'].format(version=ver, + raw=var_info['raw'], + freq=var_info['freq']) + filepaths = _get_filepaths(in_dir, raw_filename) + + if len(filepaths) == 0: + logger.info("no files for %s pattern: %s", var, raw_filename) + logger.info("directory: %s", in_dir) + else: + logger.info("Found files, count %s", len(filepaths)) + + cmor_info = cmor_table.get_variable(var_info['mip'], var) + _extract_variable(cmor_info, glob_attrs, filepaths, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/aphro_ma.py b/esmvaltool/cmorizers/data/formatters/datasets/aphro_ma.py new file mode 100755 index 0000000000..1e1f9dbc4b --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/aphro_ma.py @@ -0,0 +1,149 @@ +"""ESMValTool CMORizer for APHRODITE Monsoon Asia (APHRO-MA) data. + +Tier + Tier 3: restricted dataset. + +Source + http://aphrodite.st.hirosaki-u.ac.jp/download/ + +Last access + 20200306 + +Download and processing instructions + Register at + http://aphrodite.st.hirosaki-u.ac.jp/download/create/ + + Download the following files from + http://aphrodite.st.hirosaki-u.ac.jp/product/: + APHRO_V1808_TEMP/APHRO_MA + 025deg_nc/APHRO_MA_TAVE_025deg_V1808.nc.tgz + 050deg_nc/APHRO_MA_TAVE_050deg_V1808.nc.tgz + APHRO_V1101/APHRO_MA + 025deg_nc/APHRO_MA_025deg_V1101.1951-2007.nc.gz.tar + 050deg_nc/APHRO_MA_050deg_V1101.1951-2007.nc.gz.tar + APHRO_V1101EX_R1/APHRO_MA + 025deg_nc/APHRO_MA_025deg_V1101_EXR1.nc.tgz + 050deg_nc/APHRO_MA_050deg_V1101_EXR1.nc.tgz + + Please untar / unzip all *.tar *.tgz *.gz files in the same directory + (no subdirectories!) prior to running the cmorizer! + +Issues: + In input file APHRO_MA_TAVE_050deg_V1808.2015.nc the input variable is + called ta instead of tave as in the other files. + Currently resolved using raw_fallback: ta in case of thrown + iris.exceptions.ConstraintMismatchError + +Refs: + APHRO_V1101 and APHRO_V1101EX_R1 + Yatagai, A., K. Kamiguchi, O. Arakawa, A. Hamada, N. Yasutomi, and + A. Kitoh, 2012: APHRODITE: Constructing a Long-Term Daily Gridded + Precipitation Dataset for Asia Based on a Dense Network of Rain Gauges. + Bull. Amer. Meteor. Soc., 93, 1401–1415 + https://doi.org/10.1175/BAMS-D-11-00122.1 + + APHRO_V1808_TEMP + Yasutomi, N., Hamada, A., Yatagai, A. (2011) Development of a long-term + daily gridded temperature dataset and its application to rain/snow + discrimination of daily precipitation, + Global Environmental Research 15 (2), 165-172 +""" + +import logging +from pathlib import Path +from warnings import catch_warnings, filterwarnings + +import iris +from esmvalcore.preprocessor import monthly_statistics +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _extract_variable(short_name, var, cfg, filepath, out_dir, version): + """Extract variable.""" + logger.info("CMORizing variable '%s' from input file '%s'", short_name, + filepath) + + with catch_warnings(): + filterwarnings( + action='ignore', + message="Skipping global attribute 'calendar': 'calendar' is .*", + category=UserWarning, + module='iris', + ) + try: + cube = iris.load_cube( + str(filepath), + constraint=NameConstraint(var_name=var['raw']), + ) + except iris.exceptions.ConstraintMismatchError: + cube = iris.load_cube( + str(filepath), + constraint=NameConstraint(var_name=var['raw_fallback']), + ) + + # Fix var units + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + cube.units = var.get('raw_units', short_name) + cube.convert_units(cmor_info.units) + utils.fix_var_metadata(cube, cmor_info) + + # fix coordinates + if 'height2m' in cmor_info.dimensions: + utils.add_height2m(cube) + cube = utils.fix_coords(cube) + + # Fix metadata + attrs = cfg['attributes'].copy() + attrs['mip'] = var['mip'] + attrs['version'] = version.replace('_', '-') + attrs['reference'] = var['reference'] + attrs['source'] = attrs['source'] + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + if 'add_mon' in var.keys(): + if var['add_mon']: + logger.info("Building monthly means") + + # Calc monthly + cube = monthly_statistics(cube) + cube.remove_coord('month_number') + cube.remove_coord('year') + + # Fix metadata + attrs['mip'] = 'Amon' + + # Fix coordinates + cube = utils.fix_coords(cube) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + raw_filename = cfg['filename'] + + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + for version in var['version'].values(): + logger.info("CMORizing variable '%s'", short_name) + filenames = raw_filename.format(raw_file_var=var['raw_file_var'], + version=version) + for filepath in sorted(Path(in_dir).glob(filenames)): + _extract_variable(short_name, var, cfg, filepath, out_dir, + version) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/aura_tes.ncl b/esmvaltool/cmorizers/data/formatters/datasets/aura_tes.ncl new file mode 100644 index 0000000000..b971911222 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/aura_tes.ncl @@ -0,0 +1,173 @@ +; ############################################################################# +; ESMValTool CMORizer for AURA-TES data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; https://search.earthdata.nasa.gov/search?q=TL3O3M +; +; Last access +; 20181208 +; +; Download and processing instructions +; Select the V004 and V005 projects. +; Download the script file for both projects. +; To download the data use: wget -i . +; From the downloaded data, pick only the *.he5 files and put them in +; input_dir_path. +; Data is freely available, but a registration is required. +; +; Modification history +; 20190108-righi_mattia: adapted to v2. +; 20140129-righi_mattia: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") +loadscript(getenv("esmvaltool_root") + \ + "/../interface_scripts/auxiliary.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "aura_tes.ncl" + + ; Source name + OBSNAME = "AURA-TES" + + ; Tier + TIER = 3 + + ; Gridtype + GRIDTYPE = "Nadir" + + ; Period + YEAR1 = get_year(start_year, 2005) + YEAR2 = get_year(end_year, 2011) + + ; Selected variable + VAR = "tro3" + + ; MIP + MIP = "Amon" + + ; Frequency + FREQ = "mon" + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + "/cmip5/Tables/CMIP5_" + MIP + + ; Type + TYPE = "sat" + + ; Version + VERSION = "1" + + ; Global attributes + SOURCE = "https://search.earthdata.nasa.gov/search?q=TL3O3M" + REF = "Beer, R., IEEE Trans. Geosci. Rem. Sens., " + \ + "doi:10.1109/TGRS.2005.863716, 2006" + COMMENT = "" + +end + +begin + + ; Create time coordinate + timec = create_timec(YEAR1, YEAR2) + datec = ut_calendar(timec, 0) + + ; Loop over time + do tt = 0, dimsizes(timec) - 1 + + yy = toint(datec(tt, 0)) + mm = toint(datec(tt, 1)) + + log_info("Processing date " + yy + sprinti("%0.2i", mm)) + + ; Find files + fname = input_dir_path + "TES-Aura_L3-O3-M" + \ + yy + "m" + sprinti("%0.2i", mm) + "_F01_10.he5" + + if (.not.fileexists(fname)) then ; alternative name + fname = input_dir_path + "TES-Aura_L3-O3-M" + \ + yy + "m" + sprinti("%0.2i", mm) + "_C01_F01_10.he5" + end if + + if (.not.fileexists(fname)) then + log_info("File missing, skipping this date") + continue + end if + + ; Convert to NetCDF + destination = output_dir_path + bname(fname) + ".nc" + system(str_join((/"ncks", fname, destination/), " ")) + + ; Read variable + f = addfile(destination, "r") + var = f->O3 + + ; Read and assign coordinates + var!0 = "plev" + var&plev = todouble(f->Pressure) + var!1 = "lat" + var&lat = todouble(f->Latitude) + var!2 = "lon" + var&lon = todouble(f->Longitude) + + ; Create array + if (.not.isdefined("output")) then + outdim = array_append_record(dimsizes(timec), dimsizes(var), 0) + output = new(outdim, float) + output@_FillValue = var@_FillValue + end if + + output(tt, :, :, :) = var + delete(var) + delete(f) + system("rm -f " + destination) + + end do + + ; Assign standard FillValue + output@_FillValue = FILL + + ; Assign coordinates + output!0 = "time" + output&time = timec + output&plev = output&plev * 100. ; [hPa] --> [Pa] + output&plev@units = "Pa" + + ; Convert units [mol/mol] --> [1e9] + output = output * 1.e9 + output@units = "1e-9" + + ; Format coordinates + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ) + + ; Set variable attributes + tmp = format_variable(output, VAR, CMOR_TABLE) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP, VAR, DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR, output, bounds, gAtt) + delete(gAtt) + delete(output) + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/berkeleyearth.py b/esmvaltool/cmorizers/data/formatters/datasets/berkeleyearth.py new file mode 100644 index 0000000000..c2be3dce7e --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/berkeleyearth.py @@ -0,0 +1,239 @@ +"""ESMValTool CMORizer for BerkeleyEarth data. + +Tier + Tier 2: other freely-available dataset. + +Source + http://berkeleyearth.org/data/ + Monthly Land + Ocean + Average Temperature with Air Temperatures at Sea Ice + (Recommended; 1850 – Recent) + 1º x 1º Latitude-Longitude Grid (~400 MB) + +Last access + 20200225 + +Download and processing instructions + Download the following file: + http://berkeleyearth.lbl.gov/auto/Global/Gridded/Land_and_Ocean_LatLong1.nc +""" + +import logging +import os +import re +from warnings import catch_warnings, filterwarnings + +import cf_units +import iris +import numpy as np +from iris import NameConstraint, coord_categorisation + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def reinit_broken_time(cube_anom, cube_clim, climstart, climend): + """Fix broken time. + + The time coordinates are a big mess (given as floats in years A.D.) + best to reinitialize them from scratch + """ + logger.info("Reinitializing broken time coordinate") + time_raw = cube_anom.coord('time') + + n_years, n_add_mon = len(time_raw.points) // 12, len(time_raw.points) % 12 + start_year = int(time_raw.points[0]) + n_days = (n_years + n_add_mon / 12) * 365.25 + 50 # have some extra length + climcenter = (climend - climstart) // 2 + + times = iris.coords.DimCoord( + np.arange(int(n_days), dtype=np.float64), + var_name='time', + standard_name='time', + long_name='time', + units=cf_units.Unit(f'days since {start_year}-01-01 00:00:00', + calendar=cf_units.CALENDAR_STANDARD)) + + # init a dummy cube to enable coord_categorisation + dummycube = iris.cube.Cube(np.zeros(int(n_days), np.int64), + dim_coords_and_dims=[(times, 0)]) + coord_categorisation.add_year(dummycube, 'time', name='year') + coord_categorisation.add_month_number(dummycube, 'time', name='month') + + # build timecoord for the anomaly cube + dummycube = dummycube.aggregated_by(['year', 'month'], iris.analysis.MEAN) + dummycube = dummycube[:(n_years * 12 + n_add_mon)] + timecoord_anom = dummycube.coord('time') + + # build timecoord for the climatology cube + dummycube_clim = dummycube.extract( + iris.Constraint(year=lambda cell: cell == climstart + climcenter)) + timecoord_clim = dummycube_clim.coord('time') + + # change to the new time coordinates + cube_anom.remove_coord('time') + cube_anom.add_dim_coord(timecoord_anom, 0) + cube_clim.add_dim_coord(timecoord_clim, 0) + + # convert time units to standard + utils.convert_timeunits(cube_anom, 1950) + utils.convert_timeunits(cube_clim, 1950) + + return (cube_anom, cube_clim) + + +def calc_abs_temperature(cube_anom, cube_clim, short_name): + """Derive absolute tas values.""" + logger.info("Deriving absolute temperature fields") + + # prepare cubes + for cube in [cube_anom, cube_clim]: + cube.attributes.pop('valid_max') + cube.attributes.pop('valid_min') + + # declare attributes + fill_value = cube_clim.data.fill_value + dtype = cube_anom.dtype + var_name = short_name + units = cube_clim.units + + # init data array + crds_n_dims = [(cor.copy(), i) for i, cor in enumerate(cube_anom.coords())] + shape = [x[0].shape[0] for x in crds_n_dims] + array = np.ma.ones(shape, dtype=dtype) * fill_value + array.mask = True + array.fill_value = fill_value + + # calculate abs fields + for i in range(0, cube_anom.coord('time').shape[0]): + with catch_warnings(): + filterwarnings( + action='ignore', + message='.* not used since it\ncannot be safely cast to' + ' variable data type *', + category=UserWarning, + module='iris', + ) + array[i] = cube_clim[i % 12].data + cube_anom[i].data + + # build absolute tas cube + cube_abs = iris.cube.Cube(array, + var_name=var_name, + units=units, + dim_coords_and_dims=crds_n_dims) + + return cube_abs + + +def _extr_var_n_calc_abs_tas(short_name, var, cfg, filepath, out_dir): + """Extract variable.""" + # load tas anomaly, climatology and sftlf + with catch_warnings(): + filterwarnings( + action='ignore', + message='Ignoring netCDF variable .* invalid units .*', + category=UserWarning, + module='iris', + ) + filterwarnings( + action='ignore', + message='.* not used since it\ncannot be safely cast to variable' + ' data type *', + category=UserWarning, + module='iris', + ) + cubes = iris.load(filepath) + + # tas anomaly + raw_var = var.get('raw', short_name) + cube_anom = cubes.extract(NameConstraint(var_name=raw_var))[0] + + # tas climatology + raw_var_clim = var.get('rawclim', short_name) + cube_clim = cubes.extract(NameConstraint(var_name=raw_var_clim))[0] + # information on time for the climatology are only present in the long_name + climstart, climend = [ + int(x) for x in re.findall(r"\d{4}", cube_clim.long_name) + ] + + # redo the broken time coordinate + cube_anom, cube_clim = reinit_broken_time(cube_anom, cube_clim, climstart, + climend) + + # derive absolute tas values + cube_abs = calc_abs_temperature(cube_anom, cube_clim, short_name) + + # fix coordinates + logger.info("Fixing coordinates") + attrs = cfg['attributes'] + attrs['mip'] = var['mip'] + short_names = [short_name, var['short_anom']] + for s_name, cube in zip(short_names, [cube_abs, cube_anom]): + cmor_info = cfg['cmor_table'].get_variable(var['mip'], s_name) + + cube = utils.fix_coords(cube) + if 'height2m' in cmor_info.dimensions: + utils.add_height2m(cube) + + cube.units = var['raw_units'] + if s_name != 'tasa': + cube.convert_units(cmor_info.units) + + utils.fix_var_metadata(cube, cmor_info) + + # save temperature data + logger.info("Saving temperature data") + comments = { + 'tas': + "Temperature time-series calculated from the anomaly " + "time-series by adding the temperature climatology " + f"for {climstart}-{climend}", + 'tasa': + "Temperature anomaly with respect to the period" + " {climstart}-{climend}" + } + + for s_name, cube in zip(short_names, [cube_abs, cube_anom]): + attrs['comment'] = comments[s_name] + utils.set_global_atts(cube, attrs) + utils.save_variable(cube, + s_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + # sftlf + # extract sftlf + raw_var_sftlf = var.get('rawsftlf', short_name) + cube_sftlf = cubes.extract(NameConstraint(var_name=raw_var_sftlf))[0] + + # fix coordinates + cube_sftlf = utils.fix_coords(cube_sftlf) + + # cmorize sftlf units + cmor_info_sftlf = cfg['cmor_table'].get_variable(var['rawsftlf_mip'], + var['rawsftlf_varname']) + attrs_sftlf = cfg['attributes'] + attrs_sftlf['mip'] = var['rawsftlf_mip'] + if 'rawsftlf_units' in var: + if 'rawsftlf_units' in var: + cube_sftlf.units = var['rawsftlf_units'] + cube_sftlf.convert_units(cmor_info_sftlf.units) + + # fix metadata and save + logger.info("Saving sftlf") + utils.fix_var_metadata(cube_sftlf, cmor_info_sftlf) + utils.set_global_atts(cube_sftlf, attrs_sftlf) + utils.save_variable(cube_sftlf, var['rawsftlf_varname'], out_dir, + attrs_sftlf) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + raw_filepath = os.path.join(in_dir, cfg['filename']) + + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", short_name) + _extr_var_n_calc_abs_tas(short_name, var, cfg, raw_filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/calipso_goccp.ncl b/esmvaltool/cmorizers/data/formatters/datasets/calipso_goccp.ncl new file mode 100644 index 0000000000..6401214bf5 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/calipso_goccp.ncl @@ -0,0 +1,166 @@ +; ############################################################################# +; ESMValTool CMORizer for CALIPSO-GOCCP data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; ftp://ftp.climserv.ipsl.polytechnique.fr/ +; +; Last access +; 20200127 +; +; Download and processing instructions +; Download the data from: +; ftp://ftp.climserv.ipsl.polytechnique.fr/ +; path: /cfmip/GOCCP_v3/3D_CloudFraction/grid_2x2xL40/yyyy/avg/ +; Put all files under a single directory (no subdirectories with years). +; 3D_CloudFraction330m_yyyymm_avg_CFMIP2_sat_3.1.2.nc +; +; Modification history +; 20200127-laue_axel: written +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "calipso-goccp.ncl" + + ; Source name + OBSNAME = "CALIPSO-GOCCP" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = get_year(start_year, 2007) + YEAR2 = get_year(end_year, 2015) + + ; Selected variable (standard name) + VAR = (/"clcalipso"/) + + ; Names in the raw data + NAME = (/"clcalipso"/) + + ; Conversion factor + CONV = (/100./) + + ; MIP + MIP = (/"cfMon"/) + + ; Frequency + FREQ = (/"mon"/) + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + \ + (/"/cmip5/Tables/CMIP5_cfMon"/) + + ; Type + TYPE = "sat" + + ; Version + VERSION = "3.1.2" + + ; Global attributes + SOURCE = "ftp://ftp.climserv.ipsl.polytechnique.fr/" + \ + "/cfmip/GOCCP_v3/3D_CloudFraction/grid_2x2xL40/yyyy/avg/" + REF = "Chepfer et al., J. Geophys. Res., doi:10.1029/2009JD012251, 2010" + COMMENT = "" + +end + +begin + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + time = create_timec(YEAR1, YEAR2) + date = cd_calendar(time, 1) + + ; Create timeseries + do yy = YEAR1, YEAR2 + + syear = sprinti("%i", yy) + do mm = 1, 12 + + smonth = sprinti("%0.2i", mm) + + ; Read file + fname = input_dir_path + "3D_CloudFraction330m_" + syear + smonth + \ + "_avg_CFMIP2_" + TYPE + "_" + VERSION + ".nc" + + ; No files found + if (all(ismissing(fname))) then + continue + end if + + ; Extract data + f = addfile(fname, "r") + + xx = f->$NAME(vv)$ + ; Convert units + xx = xx * CONV(vv) + + ; Assign to global array + if (.not.isdefined("output")) then + dims = dimsizes(xx) + dims(0) = dimsizes(time) + output = new(dims, float) + output!0 = "time" + output&time = time + output!1 = "alt40" + ; km --> m and round to 1 m + output&alt40 = round(f->alt_mid * 1000.0, 0) + output!2 = "lat" + output&lat = f->latitude + output!3 = "lon" + output&lon = f->longitude + end if + output(ind(toint(yy * 100 + mm).eq.date), :, :, :) = (/xx/) + delete(fname) + delete(f) + end do + end do + + ; Set fill value + output = where(output.eq.-999, output@_FillValue, output) + + ; Format coordinates + output!0 = "time" + output!1 = "alt40" + output!2 = "lat" + output!3 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/calipso_icecloud.ncl b/esmvaltool/cmorizers/data/formatters/datasets/calipso_icecloud.ncl new file mode 100644 index 0000000000..3e398b0dfa --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/calipso_icecloud.ncl @@ -0,0 +1,303 @@ +; ############################################################################# +; ESMValTool CMORizer for CALIPSO-Lidar Level 3 Ice Cloud Data, Standard V1-00 +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset (registration required). +; +; Source +; EarthData via https://eosweb.larc.nasa.gov/project/CALIPSO/ +; CAL_LID_L3_Ice_Cloud-Standard_V1-00 +; +; Last access +; 20220804 +; +; Download and processing instructions +; (requires EarthData login; see https://urs.earthdata.nasa.gov/) +; 1) Go to https://eosweb.larc.nasa.gov/project/CALIPSO/ +; CAL_LID_L3_Ice_Cloud-Standard_V1-00 +; 2) Click on "Get Dataset" +; 3) Select Granules "A" containing both, day and night (Day/Night filter +; = "Both") +; 4) Download selected granules +; 5) Enter EarthData login and password (https://urs.earthdata.nasa.gov/) +; 6) Follow download instructions in email from EarthData and put all +; files in the same directory +; +; Modification history +; 20210331-lauer_axel: updated download information +; 20200414-lauer_axel: written +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "calipso_icecloud.ncl" + + ; Source name + OBSNAME = "CALIPSO-ICECLOUD" + + ; Tier + TIER = 3 + + ; Period (complete years only) + YEAR1 = 2007 + YEAR2 = 2015 + + ; Selected variable (standard name) + VAR = (/"cli"/) + + ; MIP + MIP = (/"Amon"/) + + ; Frequency + FREQ = (/"mon"/) + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + \ + (/"/cmip5/Tables/CMIP5_" + MIP/) + + ; Type + TYPE = "sat" + + ; Version + VERSION = "1-00" + + ; Global attributes + SOURCE = "https://eosweb.larc.nasa.gov/project/CALIPSO/" + \ + "CAL_LID_L3_Ice_Cloud-Standard_V1-00" + REF = "doi:10.5067/CALIOP/CALIPSO/L3_ICE_CLOUD-STANDARD-V1-00" + COMMENT = "Atmospheric Science Data Center (ASDC)" + +end + +begin + + R_gas = 8.3145 ; gas constant (J/mol/K) + mw_air = 28.9647e-3 ; molecular weight of dry air (kg/mol) + R_air = R_gas / mw_air + + ; CMIP6 "plev27" pressure levels (Pa) + + plev27 = (/100000., 97500., 95000., 92500., 90000., 87500., 85000., \ + 82500., 80000., 77500., 75000., 70000., 65000., 60000., \ + 55000., 50000., 45000., 40000., 35000., 30000., 25000., \ + 22500., 20000., 17500., 15000., 12500., 10000./) + + vv = 0 + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + ; calculate level bounds + + plev27_bnds = new(dimsizes(plev27) + 1, float) + do i = 1, dimsizes(plev27) - 1 + plev27_bnds(i) = 0.5 * (plev27(i - 1) + plev27(i)) + end do + plev27_bnds(0) = plev27(0) + 0.5 * (plev27(0) - plev27(1)) + n = dimsizes(plev27) + plev27_bnds(n) = plev27(n - 1) - 0.5 * (plev27(n - 2) - plev27(n - 1)) + + ; initialize time + + time = create_timec(YEAR1, YEAR2) + date = cd_calendar(time, 1) + + ; Create timeseries + do yy = YEAR1, YEAR2 + + syear = sprinti("%i", yy) + do mm = 1, 12 + + smonth = sprinti("%0.2i", mm) + + print(syear + "-" + smonth) + + ; Read data + + ; There are 3 granules available: + ; day: CAL_LID_L3_Ice_Cloud-Standard-V1-00.yyyy-mmD.hdf + ; night: CAL_LID_L3_Ice_Cloud-Standard-V1-00.yyyy-mmN.hdf + ; both: CAL_LID_L3_Ice_Cloud-Standard-V1-00.yyyy-mmA.hdf + ; Here, we only use the granule "A" (= day + night). + + fname = input_dir_path + "CAL_LID_L3_Ice_Cloud-Standard-V1-00." + \ + syear + "-" + smonth + "A.hdf" + + ; No input file found + if (ismissing(fname)) then + error_msg("f", DIAG_SCRIPT, "main", "input file for " + syear + \ + "-" + smonth + " not found") + end if + + ; Extract data + f = addfile(fname, "r") + + iwc_hist = f->Ice_Water_Content_Histogram + iwc_bnds = f->Ice_Water_Content_Bin_Boundaries + cldfree_samp = f->Cloud_Free_Samples + cld_samp = f->Cloud_Samples + pres = f->Pressure_Mean * 100.0 ; hPa --> Pa + temp = f->Temperature_Mean + 273.15 ; deg C --> K + + ; Create output array + if (.not.isdefined("output")) then +; alt = round(f->Altitude_Midpoint * 1000.0, 0) ; km, round to 1 m + lat = f->Latitude_Midpoint + lon = f->Longitude_Midpoint + ndims = new(4, integer) + ndims(0) = dimsizes(time) + ndims(1) = dimsizes(plev27) + ndims(2) = dimsizes(lat) + ndims(3) = dimsizes(lon) + output = new(ndims, float) + output!0 = "time" + output&time = time + output!1 = "plev" + output&plev = plev27 + output!2 = "lat" + output&lat = lat + output!3 = "lon" + output&lon = lon + + ; dimensions = lat, lon, plev + iwc_avg_plev27 = new((/ndims(2), ndims(3), ndims(1)/), float) + iwc_avg_plev27!0 = "lat" + iwc_avg_plev27!1 = "lon" + iwc_avg_plev27!2 = "plev" + end if + + ; Calculate grid-box average ice water content from ice water content + ; histogram. Formula from https://www-calipso.larc.nasa.gov/resources/ + ; calipso_users_guide/qs/cal_lid_l3_ice_cloud_v1-00.php + ; (section "In-cloud IWC and Grid-averaged IWC"). + ; + ; Note: 1) ice water content is derived from extiction coefficient. The + ; extinction coefficient can be negative, thus negative ice water + ; contents are produced. These are included in the level 3 product + ; used here to "accurately represent the retrieved population in + ; the level 2 data product" (that was used to derive the level 3 + ; product used here). + ; However, these unphysical ice water contents are excluded here + ; when calculating the grid box average ice water content for + ; comparison with earth system models. + ; 2) ice water content is only calculated for clouds with + ; 1.0e-5 <= iwc <= 1.0 g/m3. + + ; find first bin with iwc >= 1e-5 g/m3 (first bin with valid data) + + tmp = ind(iwc_bnds(:, 0) .ge. 1.0e-5) ; min bound + if (all(ismissing(tmp))) then + error_msg("f", DIAG_SCRIPT, "main", "invalid bounds for histogram") + end if + i0 = tmp(0) + delete(tmp) + + ; find last bin with iwc <= 1.0 g/m3 (last bin with valid data) + tmp = ind(iwc_bnds(:, 2) .le. 1.0) ; max bound + if (all(ismissing(tmp))) then + error_msg("f", DIAG_SCRIPT, "main", "invalid bounds for histogram") + end if + i1 = tmp(dimsizes(tmp) - 1) + delete(tmp) + + iwc = iwc_hist * conform_dims(dimsizes(iwc_hist), iwc_bnds(:, 1), 3) + samples = tofloat(cldfree_samp + cld_samp) + samples@_FillValue = -999. + samples = where(samples.gt.0., samples, samples@_FillValue) + iwc_avg = dim_sum(iwc(:, :, :, i0:i1)) / samples + + ; *** convert units from g/m3 to kg/kg + + ; 1. calculate density of air (kg/m3) + rho_air = pres / (R_air * temp) + ; 2. convert from g/m3 to kg/kg + iwc_avg = 1.0e-3 * iwc_avg / rho_air + +; fname = "debug.nc" +; system("rm " + fname) +; debugfile = addfile(fname, "c") +; debugfile->rho = rho_air + + ; *** aggregate CALIPSO levels to plev27 vertical levels + + pres1d = ndtooned(pres) ; convert to 1D array + + do ilev = 0, dimsizes(plev27) - 1 + ; all levels not within plev27_bnds(ilev) - plev27_bnds(ilev+1) are + ; masked as missing values, which are then ignored when calculating + ; averages with function dim_avg + mapind = ind((pres1d .gt. plev27_bnds(ilev)) .or. \ + (pres1d .lt. plev27_bnds(ilev + 1))) + iwc1d = ndtooned(iwc_avg) + iwc1d@_FillValue = -999. + if (.not.all(ismissing(mapind))) then + iwc1d(mapind) = iwc1d@_FillValue + end if + delete(mapind) + iwc_select = onedtond(iwc1d, dimsizes(iwc_avg)) + delete(iwc1d) + ; the layers of the CALIPSO data quite thin, so we do not bother + ; calculating weights for each vertical level but simply + ; calculate arithmetic averages when mapping the CALIPSO data to + ; the plev27 vertical levels + ; note: dim_avg ignores missing values + iwc_avg_plev27(:, :, ilev) = dim_avg(iwc_select) + delete(iwc_select) + end do + + delete(pres1d) + + ; *** map gridbox average ice water content to output array + + output(ind(toint(yy * 100 + mm).eq.date), :, :, :) = \ + (/iwc_avg_plev27(plev|:, lat|:, lon|:)/) + + ; *** clean up + + delete(fname) + delete(f) + delete(iwc_avg) + delete(iwc) + delete(samples) + delete(pres) + delete(temp) + delete(rho_air) + delete(cldfree_samp) + delete(cld_samp) + end do + end do + + ; Set fill value + output = where(output.eq.-999, output@_FillValue, output) + + ; Format coordinates + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/cds_satellite_albedo.py b/esmvaltool/cmorizers/data/formatters/datasets/cds_satellite_albedo.py new file mode 100644 index 0000000000..92d857ae9a --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/cds_satellite_albedo.py @@ -0,0 +1,217 @@ +r"""ESMValTool CMORizer for cds-satellite-albedo data. + +Tier + Tier 3 + +Source + https://cds.climate.copernicus.eu/ + cdsapp#!/dataset/satellite-albedo?tab=form + +Last access + 20190401 + +Download and processing instructions + - Download the data from source to the right directory + using the download script + - Decompress the files within the directory: + "find . -name '*.tar.gz' -execdir tar -xzvf '{}' \;" + +Notes +----- + - This script regrids and cmorizes the above dataset. + +Modification history + 20191208-crezee_bas: written based on cds-satellite-lai-fapar + +""" + +import glob +import logging +import os +from copy import deepcopy +from datetime import datetime +from warnings import catch_warnings, filterwarnings + +import cf_units +import iris +from esmvalcore.preprocessor import regrid +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _attrs_are_the_same(cubelist): + # assume they are the same + attrs_the_same = True + allattrs = cubelist[0].attributes + for key in allattrs: + try: + unique_attr_vals = {cube.attributes[key] for cube in cubelist} + # This exception is needed for valid_range, which is an + # array and therefore not hashable + except TypeError: + unique_attr_vals = { + tuple(cube.attributes[key]) + for cube in cubelist + } + if len(unique_attr_vals) > 1: + attrs_the_same = False + print(f"Different values found for {key}-attribute: " + f"{unique_attr_vals}") + return attrs_the_same + + +def _cmorize_dataset(in_file, var, cfg, out_dir): + logger.info("CMORizing variable '%s' from input file '%s'", + var['short_name'], in_file) + attributes = deepcopy(cfg['attributes']) + attributes['mip'] = var['mip'] + + cmor_table = cfg['cmor_table'] + definition = cmor_table.get_variable(var['mip'], var['short_name']) + + cube = iris.load_cube(str(in_file), + constraint=NameConstraint(var_name=var['raw'])) + + # Set correct names + cube.var_name = definition.short_name + if definition.standard_name: + cube.standard_name = definition.standard_name + + cube.long_name = definition.long_name + + # Convert units if required + cube.convert_units(definition.units) + + # Set global attributes + utils.set_global_atts(cube, attributes) + + logger.info("Saving CMORized cube for variable %s", cube.var_name) + utils.save_variable(cube, cube.var_name, out_dir, attributes) + + return in_file + + +def _regrid_dataset(in_dir, var, cfg): + """Regridding of original files. + + This function regrids each file and write to disk appending 'regrid' + in front of filename. + """ + filelist = glob.glob(os.path.join(in_dir, var['file'])) + + for infile in filelist: + _, infile_tail = os.path.split(infile) + outfile_tail = infile_tail.replace('c3s', 'c3s_regridded') + outfile = os.path.join(cfg['work_dir'], outfile_tail) + with catch_warnings(): + filterwarnings( + action='ignore', + # Full message: + # UserWarning: Skipping global attribute 'long_name': + # 'long_name' is not a permitted attribute + message="Skipping global attribute 'long_name'", + category=UserWarning, + module='iris', + ) + cube = iris.load_cube(infile, + constraint=NameConstraint( + var_name=var['raw'])) + cube = regrid(cube, cfg['custom']['regrid_resolution'], 'nearest') + logger.info("Saving: %s", outfile) + + iris.save(cube, outfile) + + +def _set_time_bnds(in_dir, var): + """Set time_bnds by using attribute and returns a cubelist.""" + # This is a complicated expression, but necessary to keep local + # variables below the limit, otherwise prospector complains. + cubelist = iris.load( + glob.glob( + os.path.join(in_dir, var['file'].replace('c3s', 'c3s_regridded')))) + + # The purpose of the following loop is to remove any attributes + # that differ between cubes (otherwise concatenation over time fails). + # In addition, care is taken of the time coordinate, by adding the + # time_coverage attributes as time_bnds to the time coordinate. + for n_cube, _ in enumerate(cubelist): + time_coverage_start = cubelist[n_cube].\ + attributes.pop('time_coverage_start') + time_coverage_end = cubelist[n_cube].\ + attributes.pop('time_coverage_end') + + # Now put time_coverage_start/end as time_bnds + # Convert time_coverage_xxxx to datetime + bnd_a = datetime.strptime(time_coverage_start, "%Y-%m-%dT%H:%M:%SZ") + bnd_b = datetime.strptime(time_coverage_end, "%Y-%m-%dT%H:%M:%SZ") + + # Put in shape for time_bnds + time_bnds_datetime = [bnd_a, bnd_b] + + # Read dataset time unit and calendar from file + dataset_time_unit = str(cubelist[n_cube].coord('time').units) + dataset_time_calender = cubelist[n_cube].coord('time').units.calendar + # Convert datetime + time_bnds = cf_units.date2num(time_bnds_datetime, dataset_time_unit, + dataset_time_calender) + # Put them on the file + cubelist[n_cube].coord('time').bounds = time_bnds + + return cubelist + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + # run the cmorization + # Pass on the workdir to the cfg dictionary + cfg['work_dir'] = cfg_user.work_dir + # If it doesn't exist, create it + if not os.path.isdir(cfg['work_dir']): + logger.info("Creating working directory for regridding: %s", + cfg['work_dir']) + os.mkdir(cfg['work_dir']) + + for short_name, var in cfg['variables'].items(): + var['short_name'] = short_name + logger.info("Processing var %s", short_name) + + # Regridding + logger.info("Start regridding to: %s", + cfg['custom']['regrid_resolution']) + _regrid_dataset(in_dir, var, cfg) + logger.info("Finished regridding") + + # File concatenation + logger.info("Start setting time_bnds") + cubelist = _set_time_bnds(cfg['work_dir'], var) + + attrs_to_remove = ['identifier', 'date_created'] + for cube in cubelist: + for attr in attrs_to_remove: + cube.attributes.pop(attr) + + # Loop over two different platform names + for platformname in ['SPOT-4', 'SPOT-5']: + # Now split the cubelist on the different platform + logger.info("Start processing part of dataset: %s", platformname) + cubelist_platform = cubelist.extract( + iris.AttributeConstraint(platform=platformname)) + if cubelist_platform: + assert _attrs_are_the_same(cubelist_platform) + cube = cubelist_platform.concatenate_cube() + else: + logger.warning( + "No files found for platform %s \ + (check input data)", platformname) + continue + savename = os.path.join(cfg['work_dir'], + var['short_name'] + platformname + '.nc') + logger.info("Saving as: %s", savename) + iris.save(cube, savename) + logger.info("Finished file concatenation over time") + logger.info("Start CMORization of file %s", savename) + _cmorize_dataset(savename, var, cfg, out_dir) + logger.info("Finished regridding and CMORizing %s", savename) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/cds_satellite_lai_fapar.py b/esmvaltool/cmorizers/data/formatters/datasets/cds_satellite_lai_fapar.py new file mode 100644 index 0000000000..73d354b7ed --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/cds_satellite_lai_fapar.py @@ -0,0 +1,223 @@ +"""ESMValTool CMORizer for cds-satellite-lai-fapar data. + +Tier + Tier 3 +Source + https://cds.climate.copernicus.eu/cdsapp#!/dataset/satellite-lai-fapar?tab=form +Last access + 20190703 + +Download and processing instructions + - Open in a browser the data source as specified above + - Put the right ticks: + - Tick variables LAI and FAPAR + - Tick satellite SPOT (System Pour l'Observation de la Terre) + - Tick sensor VGT (Vegetation) + - Tick horizontal resolution 1km + - Tick product version V1 + - Tick all available years + - Tick all available months + - Tick Nominal day 20 + - Click 'submit form' + - According to ESMValTool practice, put them in the right rawobsdir folder + +Notes +----- + - This script regrids and cmorizes the above dataset. + - Request might need to be split into chunks to not exceed download limit + +Caveats + - Fails setting standard name for variable FAPAR + +Modification history + 20200512-crezee_bas: adapted to reflect changes in download form by CDS. + 20190703-crezee_bas: written. +""" + +import glob +import logging +import os +from copy import deepcopy +from datetime import datetime +from warnings import catch_warnings, filterwarnings + +import cf_units +import iris +from esmvalcore.preprocessor import regrid +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _attrs_are_the_same(cubelist): + # assume they are the same + attrs_the_same = True + allattrs = cubelist[0].attributes + for key in allattrs: + try: + unique_attr_vals = {cube.attributes[key] for cube in cubelist} + # This exception is needed for valid_range, which is an + # array and therefore not hashable + except TypeError: + unique_attr_vals = { + tuple(cube.attributes[key]) + for cube in cubelist + } + if len(unique_attr_vals) > 1: + attrs_the_same = False + print(f"Different values found for {key}-attribute: " + f"{unique_attr_vals}") + return attrs_the_same + + +def _cmorize_dataset(in_file, var, cfg, out_dir): + logger.info("CMORizing variable '%s' from input file '%s'", + var['short_name'], in_file) + attributes = deepcopy(cfg['attributes']) + attributes['mip'] = var['mip'] + + cmor_table = cfg['cmor_table'] + definition = cmor_table.get_variable(var['mip'], var['short_name']) + + cube = iris.load_cube(str(in_file), + constraint=NameConstraint(var_name=var['raw'])) + + # Set correct names + cube.var_name = definition.short_name + if definition.standard_name: + cube.standard_name = definition.standard_name + + cube.long_name = definition.long_name + + # Convert units if required + cube.convert_units(definition.units) + + # Set global attributes + utils.set_global_atts(cube, attributes) + + logger.info("Saving CMORized cube for variable %s", cube.var_name) + utils.save_variable(cube, cube.var_name, out_dir, attributes) + + return in_file + + +def _regrid_dataset(in_dir, var, cfg): + """Regridding of original files. + + This function regrids each file and write to disk appending 'regrid' + in front of filename. + """ + filelist = glob.glob(os.path.join(in_dir, var['file'])) + for infile in filelist: + _, infile_tail = os.path.split(infile) + outfile_tail = infile_tail.replace('c3s', 'c3s_regridded') + outfile = os.path.join(cfg['work_dir'], outfile_tail) + with catch_warnings(): + filterwarnings( + action='ignore', + # Full message: + # UserWarning: Skipping global attribute 'long_name': + # 'long_name' is not a permitted attribute + message="Skipping global attribute 'long_name'", + category=UserWarning, + module='iris', + ) + lai_cube = iris.load_cube(infile, + constraint=NameConstraint( + var_name=var['raw'])) + lai_cube = regrid(lai_cube, cfg['custom']['regrid_resolution'], + 'nearest') + logger.info("Saving: %s", outfile) + + iris.save(lai_cube, outfile) + + +def _set_time_bnds(in_dir, var): + """Set time_bnds by using attribute and returns a cubelist.""" + # This is a complicated expression, but necessary to keep local + # variables below the limit, otherwise prospector complains. + cubelist = iris.load( + glob.glob( + os.path.join(in_dir, var['file'].replace('c3s', 'c3s_regridded')))) + + # The purpose of the following loop is to remove any attributes + # that differ between cubes (otherwise concatenation over time fails). + # In addition, care is taken of the time coordinate, by adding the + # time_coverage attributes as time_bnds to the time coordinate. + for n_cube, _ in enumerate(cubelist): + time_coverage_start = cubelist[n_cube].\ + attributes.pop('time_coverage_start') + time_coverage_end = cubelist[n_cube].\ + attributes.pop('time_coverage_end') + + # Now put time_coverage_start/end as time_bnds + # Convert time_coverage_xxxx to datetime + bnd_a = datetime.strptime(time_coverage_start, "%Y-%m-%dT%H:%M:%SZ") + bnd_b = datetime.strptime(time_coverage_end, "%Y-%m-%dT%H:%M:%SZ") + + # Put in shape for time_bnds + time_bnds_datetime = [bnd_a, bnd_b] + + # Read dataset time unit and calendar from file + dataset_time_unit = str(cubelist[n_cube].coord('time').units) + dataset_time_calender = cubelist[n_cube].coord('time').units.calendar + # Convert datetime + time_bnds = cf_units.date2num(time_bnds_datetime, dataset_time_unit, + dataset_time_calender) + # Put them on the file + cubelist[n_cube].coord('time').bounds = time_bnds + + return cubelist + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + # run the cmorization + # Pass on the workdir to the cfg dictionary + cfg['work_dir'] = cfg_user.work_dir + # If it doesn't exist, create it + if not os.path.isdir(cfg['work_dir']): + logger.info("Creating working directory for regridding: %s", + cfg['work_dir']) + os.mkdir(cfg['work_dir']) + + for short_name, var in cfg['variables'].items(): + var['short_name'] = short_name + logger.info("Processing var %s", short_name) + + # Regridding + logger.info("Start regridding to: %s", + cfg['custom']['regrid_resolution']) + _regrid_dataset(in_dir, var, cfg) + logger.info("Finished regridding") + + # File concatenation + logger.info("Start setting time_bnds") + cubelist = _set_time_bnds(cfg['work_dir'], var) + + # Loop over two different platform names + for platformname in ['SPOT-4', 'SPOT-5']: + # Now split the cubelist on the different platform + logger.info("Start processing part of dataset: %s", platformname) + cubelist_platform = cubelist.extract( + iris.AttributeConstraint(platform=platformname)) + for n_cube, _ in enumerate(cubelist_platform): + cubelist_platform[n_cube].attributes.pop('identifier') + if cubelist_platform: + assert _attrs_are_the_same(cubelist_platform) + cube = cubelist_platform.concatenate_cube() + else: + logger.warning( + "No files found for platform %s \ + (check input data)", platformname) + continue + savename = os.path.join(cfg['work_dir'], + var['short_name'] + platformname + '.nc') + logger.info("Saving as: %s", savename) + iris.save(cube, savename) + logger.info("Finished file concatenation over time") + logger.info("Start CMORization of file %s", savename) + _cmorize_dataset(savename, var, cfg, out_dir) + logger.info("Finished regridding and CMORizing %s", savename) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/cds_satellite_soil_moisture.ncl b/esmvaltool/cmorizers/data/formatters/datasets/cds_satellite_soil_moisture.ncl new file mode 100644 index 0000000000..338be44a96 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/cds_satellite_soil_moisture.ncl @@ -0,0 +1,173 @@ +; ############################################################################# +; ESMValTool CMORizer for CDS-SATELLITE-SOIL-MOISTURE data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; https://cds.climate.copernicus.eu/cdsapp#!/dataset/ +; satellite-soil-moisture?tab=form +; +; Last access +; 20201207 +; +; Download and processing instructions +; - Use the download script to retrieve the data. +; - Use the configuration file to choose what data to process. +; - Available years: 1992-2019 (ACTIVE) or 1979-2019 (others). +; - Versions: v201812.0.0 and v201912.0.0 +; +; Modification history +; 20201207-kazeroni-remi: fix ACTIVE version +; 20200722-crezee_bas: added PASSIVE/ACTIVE versions +; 20200602-crezee_bas: include ICDR +; 20200525-righi_mattia: fix time checker. +; 20190522-crezee_bas: time period extended. +; 20190314-crezee_bas: written based on cmorize_obs_esacci_soilmoisture.ncl. +; +; Caveats +; - Daily data has to consist of complete months +; - Version ACTIVE can only deal with complete years for both +; monthly and daily data +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") +; Read in variable JOBS which defines what combination of +; [time resolution] x [sensor] will be processed. +loadscript(getenv("esmvaltool_root") + \ + "/data/cmor_config/" + \ + "CDS-SATELLITE-SOIL-MOISTURE.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cds_satellite_soil_moisture.ncl" + + ; Source name + OBSNAME = "CDS-SATELLITE-SOIL-MOISTURE" + + ; Tier + TIER = 3 + + ; Period + YEAR1 = get_year(start_year, 1978) + YEAR2 = get_year(end_year, 2020) + + ; Selected variables (standard name) + VAR = (/"sm", "sm"/) + + ; Name in the raw data + NAME = (/"sm", "sm"/) + + ; MIP + MIP = (/"day", "Lmon"/) + + ; Frequency + FREQ = (/"day", "mon"/) + + ; Name of frequency in filename + FREQNAME = (/"-DAILY-", "-MONTHLY-"/) + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + "/custom/CMOR_" + VAR + ".dat" + + ; Type + TYPE = "sat" + + ; Global attributes + SOURCE = "https://cds.climate.copernicus.eu/cdsapp#!/" + \ + "dataset/satellite-soil-moisture" + REF = "Gruber et al., Earth Syst. Sci. Data, " + \ + "doi: 10.5194/essd-11-717-2019, 2019." + COMMENT = "" + + SENSORS_FILEBASENAMES = (/"C3S-SOILMOISTURE-L3S-SSMV-COMBINED", \ + "C3S-SOILMOISTURE-L3S-SSMV-PASSIVE", \ + "C3S-SOILMOISTURE-L3S-SSMS-ACTIVE"/) + ; Version + VERSIONS = (/"COMBINED", "PASSIVE", "ACTIVE"/) + PATTERNS = (/"????000000-?CDR-v201?12.0.0.nc", \ + "????000000-?CDR-v201?12.0.0.nc", \ + "????000000-?CDR-v201?12.0.0.nc"/) + +end + +begin + + do xx = 0, dimsizes(VERSIONS) - 1 + + do vv = 0, dimsizes(FREQ) - 1 + + if JOBS(xx, vv) then + log_info("Processing " + VAR(vv) + " " + VERSIONS(xx) + \ + " (" + MIP(vv) + ") as requested.") + + do yy = YEAR1, YEAR2 + + ; Set list of files + files = systemfunc("ls " + input_dir_path + \ + SENSORS_FILEBASENAMES(xx) + \ + FREQNAME(vv) + yy + \ + PATTERNS(xx)) + + if all(ismissing(files)) then + log_info("Skipping year " + yy + " since no data.") + continue + end if + + ; Retrieve start and end date (YYYYMM(DD)) + start_date = systemfunc("basename " + files(0)) + start_date := str_get_cols(str_get_field(start_date, 7, "-"), 0, 7) + end_date = systemfunc("basename " + files(dimsizes(files) - 1)) + end_date := str_get_cols(str_get_field(end_date, 7, "-"), 0, 7) + + ; Read data + f = addfiles(files, "r") + delete(files) + output = f[:]->$NAME(vv)$ + delete(f) + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, start_date, end_date, FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = start_date + "-" + end_date + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSIONS(xx), \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + + else + log_info("Skipping " + VAR(vv) + " " + VERSIONS(xx) + \ + " (" + MIP(vv) + ") as requested.") + end if + + end do + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/cds_uerra.py b/esmvaltool/cmorizers/data/formatters/datasets/cds_uerra.py new file mode 100644 index 0000000000..65c7b33635 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/cds_uerra.py @@ -0,0 +1,288 @@ +"""ESMValTool CMORizer for CDS-UERRA version UERRA-HARMONIE. + +Tier + Tier 3 +Source + https://cds.climate.copernicus.eu/cdsapp#!/dataset/reanalysis-uerra-europe-soil-levels +Last access + 20191104 + +Download and processing instructions + - Open in a browser the data source as specified above + - Put the right ticks: + - Tick Origin UERRA-HARMONIE + - Tick Variable 'Volumetric soil moisture' + - Tick Soil level 1, 2, 3 + - Tick all available years + - Tick all available months + - Tick all available days + - Tick all available timesteps + - Click 'submit form' + - According to ESMValTool practice, put them in the right rawobsdir folder + +Notes +----- + - It might be needed to split up the request into smaller chunks + +Modification history + 20190821-A_crezee_bas: written. +""" + +import glob +import logging +import os +import re +from copy import deepcopy + +import cf_units +import iris +import numpy as np +import xarray as xr +import xesmf as xe +from esmvalcore.preprocessor._regrid import _global_stock_cube +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + +# Regular expression to parse a "MxN" cell-specification. +_CELL_SPEC = re.compile( + r'''\A + \s*(?P\d+(\.\d+)?)\s* + x + \s*(?P\d+(\.\d+)?)\s* + \Z + ''', re.IGNORECASE | re.VERBOSE) + +# Default fill-value. +_MDI = 1e+20 + +# Stock cube - global grid extents (degrees). +_LAT_MIN = -90.0 +_LAT_MAX = 90.0 +_LAT_RANGE = _LAT_MAX - _LAT_MIN +_LON_MIN = 0.0 +_LON_MAX = 360.0 +_LON_RANGE = _LON_MAX - _LON_MIN + + +def _generate_cube_from_dimcoords(latdata, londata, circular: bool = False): + """Generate cube from lat/lon points. + + Parameters + ---------- + latdata : np.ndarray + List of latitudes. + londata : np.ndarray + List of longitudes. + circular : bool + Wrap longitudes around the full great circle. Bounds will not be + generated for circular coordinates. + + Returns + ------- + :class:`~iris.cube.Cube` + """ + lats = iris.coords.DimCoord(latdata, + standard_name='latitude', + units='degrees_north', + var_name='lat', + circular=circular) + + lons = iris.coords.DimCoord(londata, + standard_name='longitude', + units='degrees_east', + var_name='lon', + circular=circular) + + if not circular: + # cannot guess bounds for wrapped coordinates + lats.guess_bounds() + lons.guess_bounds() + + # Construct the resultant stock cube, with dummy data. + shape = (latdata.size, londata.size) + dummy = np.empty(shape, dtype=np.dtype('int8')) + coords_spec = [(lats, 0), (lons, 1)] + cube = iris.cube.Cube(dummy, dim_coords_and_dims=coords_spec) + + return cube + + +def parse_cell_spec(spec): + """Parse an MxN cell specification string. + + Parameters + ---------- + spec: str + ``MxN`` degree cell-specification for the global grid. + + Returns + ------- + tuple + tuple of (float, float) of parsed (lon, lat) + + Raises + ------ + ValueError + if the MxN cell specification is malformed. + ValueError + invalid longitude and latitude delta in cell specification. + """ + cell_match = _CELL_SPEC.match(spec) + if cell_match is None: + emsg = 'Invalid MxN cell specification for grid, got {!r}.' + raise ValueError(emsg.format(spec)) + + cell_group = cell_match.groupdict() + dlon = float(cell_group['dlon']) + dlat = float(cell_group['dlat']) + + if (np.trunc(_LON_RANGE / dlon) * dlon) != _LON_RANGE: + emsg = ('Invalid longitude delta in MxN cell specification ' + 'for grid, got {!r}.') + raise ValueError(emsg.format(dlon)) + + if (np.trunc(_LAT_RANGE / dlat) * dlat) != _LAT_RANGE: + emsg = ('Invalid latitude delta in MxN cell specification ' + 'for grid, got {!r}.') + raise ValueError(emsg.format(dlat)) + + return dlon, dlat + + +def _cmorize_dataset(in_file, var, cfg, out_dir): + logger.info("CMORizing variable '%s' from input file '%s'", + var['short_name'], in_file) + attributes = deepcopy(cfg['attributes']) + attributes['mip'] = var['mip'] + + cmor_table = cfg['cmor_table'] + definition = cmor_table.get_variable(var['mip'], var['short_name']) + + cube = iris.load_cube(str(in_file), + constraint=NameConstraint(var_name=var['raw'])) + + # Time has strange values, so use forecast_reference_time instead + cube.remove_coord('time') + cube.coord('forecast_reference_time').rename('time') + + # The following lines are essential before applying + # the common function fix_coords + # Convert time calendar from proleptic_gregorian to gregorian + cube.coord('time').units = cf_units.Unit( + cube.coord('time').units.origin, 'gregorian') + + # Set standard_names for lat and lon + cube.coord('lat').standard_name = 'latitude' + cube.coord('lon').standard_name = 'longitude' + + cube = utils.fix_coords(cube) + + # The above command does not return bounds for longitude + # so explicitly get them here. + if not cube.coord('longitude').has_bounds(): + cube.coord('longitude').guess_bounds() + + # Set correct names + cube.var_name = definition.short_name + + cube.long_name = definition.long_name + + # Convert units if required + cube.units = '1' + cube.convert_units(definition.units) + + # Set global attributes + utils.set_global_atts(cube, attributes) + + logger.info("Saving CMORized cube for variable %s", cube.var_name) + utils.save_variable(cube, cube.var_name, out_dir, attributes) + + return in_file + + +def _regrid_dataset(in_dir, var, cfg): + """Regridding of original files. + + This function regrids each file and write to disk appending 'regrid' + in front of filename. + """ + # Match any year here + filepattern = var['file'].format(year='????', month='??') + filelist = glob.glob(os.path.join(in_dir, filepattern)) + regridder = None + for infile in filelist: + _, infile_tail = os.path.split(infile) + outfile = os.path.join(cfg['work_dir'], infile_tail) + targetgrid_ds = xr.DataArray.from_iris( + _global_stock_cube(cfg['custom']['regrid'])) + input_ds = xr.open_dataset(infile) + # Do renaming for consistency of coordinate names + input_ds = input_ds.rename({'latitude': 'lat', 'longitude': 'lon'}) + # Select uppermoist soil level (index 0) + input_da = input_ds[var['raw']].isel(soilLayer=0) + logger.info("Regridding... ") + # A workaround to avoid spreading of nan values, + # related to Github issue + constantval = 10 + input_da = input_da + constantval + assert int((input_da == 0.).sum()) == 0 # Make sure that there + # are no zero's in the data, + # since they will be masked out + shapes = (input_ds["lon"].shape, input_ds["lat"].shape, + targetgrid_ds["lon"].shape, targetgrid_ds["lat"].shape) + if regridder is None: + regridder = xe.Regridder(input_ds, + targetgrid_ds, + 'bilinear') + ref_shapes = shapes + else: + assert shapes == ref_shapes + da_out = regridder(input_da) + da_out = da_out.where(da_out != 0.) + da_out = da_out - constantval + + # Save it. + logger.info("Saving: %s", outfile) + da_out.to_netcdf(outfile) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + # run the cmorization + # Pass on the workdir to the cfg dictionary + cfg['work_dir'] = cfg_user.work_dir + # If it doesn't exist, create it + if not os.path.isdir(cfg['work_dir']): + logger.info("Creating working directory for " + "regridding: %s", cfg['work_dir']) + os.mkdir(cfg['work_dir']) + + for short_name, var in cfg['variables'].items(): + var['short_name'] = short_name + logger.info("Processing var %s", short_name) + + # Regridding + logger.info("Start regridding to: %s", cfg['custom']['regrid']) + _regrid_dataset(in_dir, var, cfg) + logger.info("Finished regridding") + logger.info("Start CMORizing") + + for year in range(1961, 2019): + # File concatenation + for month in range(1, 13): + in_file = os.path.join( + cfg['work_dir'], var['file'].format(year=year, + month=f"{month:02}")) + if os.path.isfile(in_file): + # Read in the full dataset here from 'workdir' + logger.info("Start CMORization of file %s", in_file) + _cmorize_dataset(in_file, var, cfg, out_dir) + logger.info("Finished processing %s-%s", year, month) + else: + logger.info("No files found for %s-%s", year, month) + continue + + logger.info("Finished CMORIZATION") diff --git a/esmvaltool/cmorizers/data/formatters/datasets/cds_xch4.ncl b/esmvaltool/cmorizers/data/formatters/datasets/cds_xch4.ncl new file mode 100644 index 0000000000..a8ccdad3c2 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/cds_xch4.ncl @@ -0,0 +1,117 @@ +; ############################################################################# +; ESMValTool CMORizer for CDS-XCH4 data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; https://cds.climate.copernicus.eu/cdsapp#!/dataset/ +; satellite-methane?tab=form +; +; Last access +; 20190311 +; +; Download and processing instructions +; Select Processing level "Level 3", variable "Column-average dry-air mole +; fraction of atmospheric methane (XCH4) and related variables", Sensor and +; algorithm "MERGED and OBS4MIPS". +; A registration is required to download the data. +; +; Modification history +; 20190311-hassler_birgit: written. +; +; ############################################################################ +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cds_xch4.ncl" + + ; Source name + OBSNAME = "CDS-XCH4" + + ; Tier + TIER = 3 + + ; Period + YEAR1 = 2003 + YEAR2 = 2018 + + ; Selected variable (standard name) + VAR = (/"xch4"/) + + ; Name in the raw data + NAME = (/"xch4"/) + + ; MIP + MIP = (/"Amon"/) + + ; Frequency + FREQ = (/"mon"/) + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + "/custom/CMOR_xch4.dat" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "L3" + + ; Global attributes + SOURCE = "https://cds.climate.copernicus.eu/cdsapp#!/dataset/" + \ + "satellite-methane?tab=form" + REF = "Buchwitz et al., Adv. Astronaut. Sci. Technol., " + \ + "doi:10.1007/s42423-018-0004-6, 2018." + COMMENT = "" + +end + +begin + + ; Loop over variables + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + ; Read variables + + fname = input_dir_path + "200301_201812-C3S-L3_GHG-GHG_PRODUCTS-" + \ + "MERGED-MERGED-OBS4MIPS-MERGED-v4.1.nc" + setfileoption("nc", "MissingToFillValue", False) + f = addfile(fname, "r") + output = f->xch4 + + ; Format coordinates + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/cds_xco2.ncl b/esmvaltool/cmorizers/data/formatters/datasets/cds_xco2.ncl new file mode 100644 index 0000000000..a5f15fc622 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/cds_xco2.ncl @@ -0,0 +1,116 @@ +; ############################################################################# +; ESMValTool CMORizer for CDS-XCO2 data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; https://cds.climate.copernicus.eu/cdsapp#!/dataset/ +; satellite-carbon-dioxide?tab=form +; +; Last access +; 20190319 +; +; Download and processing instructions +; Select Processing level "Level 3", variable "Column-average dry-air mole +; fraction of atmospheric carbon dioxide (XCO2) and related variables", +; Sensor and algorithm "MERGED and OBS4MIPS". +; A registration is required to download the data. +; +; Modification history +; 20190319-hassler_birgit: written. +; +; ############################################################################ +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cds_xco2.ncl" + + ; Source name + OBSNAME = "CDS-XCO2" + + ; Tier + TIER = 3 + + ; Period + YEAR1 = 2003 + YEAR2 = 2017 + + ; Selected variable (standard name) + VAR = (/"xco2"/) + + ; Name in the raw data + NAME = (/"xco2"/) + + ; MIP + MIP = (/"Amon"/) + + ; Frequency + FREQ = (/"mon"/) + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + "/custom/CMOR_xco2.dat" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "L3" + + ; Global attributes + SOURCE = "https://cds.climate.copernicus.eu/cdsapp#!/dataset/" + \ + "satellite-carbon-dioxide?tab=form" + REF = "Buchwitz et al., Adv. Astronaut. Sci. Technol., " + \ + "doi:10.1007/s42423-018-0004-6, 2018." + COMMENT = "" + +end + +begin + + ; Loop over variables + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + ; Read variables + fname = input_dir_path + "OBS_" + OBSNAME + "_sat_L3_v31_" + NAME(vv) + \ + "_" + YEAR1 + "01-" + YEAR2 + "12.nc" + setfileoption("nc", "MissingToFillValue", False) + f = addfile(fname, "r") + output = f->xco2 + + ; Format coordinates + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/ceres_ebaf.py b/esmvaltool/cmorizers/data/formatters/datasets/ceres_ebaf.py new file mode 100644 index 0000000000..e02332130d --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/ceres_ebaf.py @@ -0,0 +1,78 @@ +"""ESMValTool CMORizer for CERES-EBAF data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://ceres-tool.larc.nasa.gov/ord-tool/jsp/EBAFTOA41Selection.jsp + +Last access + 20220701 + +Download and processing instructions + Select: "TOA Fluxes" (all), "Monthly", "Regional" (0-360, -90-90) + Enter "Email Address" and click on "Get Data" + Wait for the processing to be finished and click on "Download" +""" + +import logging +import os +import warnings + +import iris +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def filter_warnings(): + """Filter certain :mod:`iris` warnings.""" + for msg in ('min', 'max'): + warnings.filterwarnings( + 'ignore', + message=f"WARNING: valid_{msg} not used", + category=UserWarning, + module='iris', + ) + + +def _extract_variable(short_name, var, cfg, filepath, out_dir): + """Extract variable.""" + raw_var = var.get('raw', short_name) + with warnings.catch_warnings(): + filter_warnings() + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + + # Fix units + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + utils.convert_timeunits(cube, 1950) + + # Fix coordinates + cube = utils.fix_coords(cube) + + # Fix metadata + attrs = cfg['attributes'] + attrs['mip'] = var['mip'] + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable + with warnings.catch_warnings(): + filter_warnings() + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + raw_filepath = os.path.join(in_dir, cfg['filename']) + + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", short_name) + _extract_variable(short_name, var, cfg, raw_filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/ceres_syn1deg.ncl b/esmvaltool/cmorizers/data/formatters/datasets/ceres_syn1deg.ncl new file mode 100644 index 0000000000..62fad10d3d --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/ceres_syn1deg.ncl @@ -0,0 +1,195 @@ +; ############################################################################# +; ESMValTool CMORizer for CERES-SYN1deg data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; https://ceres-tool.larc.nasa.gov/ord-tool/jsp/SYN1degSelection.jsp +; +; Last access +; 20190207 +; +; Download and processing instructions +; Monthly data: +; Expand "Compute TOA Fluxes" and select: +; Shortwave Flux, Allsky and Clearsky +; Longwave Flux, Allsky and Clearsky +; Shortwave Flux Down, Allsky +; Expand "Computed Surface Fluxes" and select: +; Shortwave Flux Up, Allsky and Clearsky +; Shortwave Flux Down, Allsky and Clearsky +; Longwave Flux Up, Allsky and Clearsky +; Longwave Flux Down, Allsky and Clearsky +; then click on "Monthly", "Regional" and "Get data". All fields are saved +; in CERES_SYN1deg-Month_Terra-Aqua-MODIS_Ed3A_Subset_200003-201702.nc +; 3hr data: +; Select the same fields as above, then click on "Daily 3-Hourly" and +; "Get data". All fields are saved in +; CERES_SYN1deg-3H_Terra-Aqua-MODIS_Ed3A_Subset_YYYYMMDD-YYYYMMDD.nc +; Put all files in input_dir_path (no subdirectories with years). +; For orders larger than 2 GB a registration is required. +; +; Modification history +; 20190124-righi_mattia: written based on v1 different bash scripts by +; Axel Lauer and Martin Evaldsson. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "ceres_syn1deg.ncl" + + ; Source name + OBSNAME = "CERES-SYN1deg" + + ; Tier + TIER = 3 + + ; Period + YEAR1 = 2001 + YEAR2 = 2016 + + ; CMOR name + VAR = (/"rsus", "rsds", \ + "rlus", "rlds", \ + "rldscs", "rsut", \ + "rlut", "rsutcs", \ + "rlutcs", "rsdt", \ + "rsutcs", "rsut", \ + "rlutcs", "rlut", \ + "rsuscs", "rsus", \ + "rsdscs", "rsds", \ + "rluscs", "rlus", \ + "rldscs", "rlds"/) + + ; Name in the raw data + NAME = (/"sfc_comp_sw_up_all_mon", "sfc_comp_sw_down_all_mon", \ + "sfc_comp_lw_up_all_mon", "sfc_comp_lw_down_all_mon", \ + "sfc_comp_lw_down_clr_mon", "toa_comp_sw_up_all_mon", \ + "toa_comp_lw_up_all_mon", "toa_comp_sw_up_clr_mon", \ + "toa_comp_lw_up_clr_mon", "toa_comp_sw_down_all_mon", \ + "toa_comp_sw_up_clr_3h", "toa_comp_sw_up_all_3h", \ + "toa_comp_lw_up_clr_3h", "toa_comp_lw_up_all_3h", \ + "sfc_comp_sw_up_clr_3h", "sfc_comp_sw_up_all_3h", \ + "sfc_comp_sw_down_clr_3h", "sfc_comp_sw_down_all_3h", \ + "sfc_comp_lw_up_clr_3h", "sfc_comp_lw_up_all_3h", \ + "sfc_comp_lw_down_clr_3h", "sfc_comp_lw_down_all_3h"/) + + ; MIP + MIP = (/"Amon", "Amon", \ + "Amon", "Amon", \ + "Amon", "Amon", \ + "Amon", "Amon", \ + "Amon", "Amon", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr"/) + + ; Frequency + FREQ = (/"mon", "mon", \ + "mon", "mon", \ + "mon", "mon", \ + "mon", "mon", \ + "mon", "mon", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr"/) + + ; CMOR table: Amon version is used also for 3hr, since not all variables are + ; available in 3hr (the tables are identical anyway) + CMOR_TABLE = new(dimsizes(MIP), string) + CMOR_TABLE = getenv("cmor_tables") + "/cmip5/Tables/CMIP5_Amon" + CMOR_TABLE(ind(VAR.eq."rluscs")) = \ + getenv("cmor_tables") + "/custom/CMOR_rluscs.dat" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "Ed3A" + + ; Global attributes + SOURCE = "https://ceres-tool.larc.nasa.gov/ord-tool/jsp/SYN1degSelection.jsp" + REF = "Wielicki et al., Bull. Amer. Meteor. Soc., " + \ + "doi: 10.1175/1520-0477(1996)077<0853:CATERE>2.0.CO;2, 1996" + COMMENT = "" + +end + +begin + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + do yy = YEAR1, YEAR2 + + ; Read file + if (MIP(vv).eq."Amon") then + f = addfile(input_dir_path + "CERES_SYN1deg-Month_Terra-Aqua-" + \ + "MODIS_" + VERSION + "_Subset_200003-201702.nc", "r") + xx = f->$NAME(vv)$ + delete(f) + end if + if (MIP(vv).eq."3hr") then + files = systemfunc("ls " + input_dir_path + "CERES_SYN1deg-3H_" + \ + "Terra-Aqua-MODIS_" + VERSION + "_Subset_*" + \ + yy + "*.nc") + f = addfiles(files, "r") + xx = f[:]->$NAME(vv)$ + delete(f) + delete(files) + end if + + ; Time selection + date = cd_calendar(xx&time, 0) + output = xx(ind(date(:, 0).eq.yy), :, :) + delete(date) + delete(xx) + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, yy + "0101", yy + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = yy + "01-" + yy + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/clara_avhrr.ncl b/esmvaltool/cmorizers/data/formatters/datasets/clara_avhrr.ncl new file mode 100644 index 0000000000..b275332d48 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/clara_avhrr.ncl @@ -0,0 +1,247 @@ +; ############################################################################# +; ESMValTool CMORizer for CM SAF CLARA-AHRR v2 data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; https://wui.cmsaf.eu/ +; +; Last access +; 2021-03-22 +; +; Download and processing instructions +; 1) Create ("register") an user account at +; https://wui.cmsaf.eu/safira/action/viewLogin?menuName=NUTZER_HOME +; 2) login (same URL as above) +; 3) Search data using search form at +; https://wui.cmsaf.eu/safira/action/viewProduktHome?menuName=PRODUKT_HOME +; +; - Product group: Climate Data Records +; - Product family: CLARA-A ed. 2.1 +; - Product name: CFC - Factional cloud cover +; IWP - Ice water path +; LWP - Liquid water path +; - Area: Global +; - Temporal resolution: Monthly +; +; 4) Select "CLARA-A ed. 2.1 AVHRR on polar orbiting satellites" from +; list of results. +; 5) Click on "Add to order cart" +; 6) Follow download instructions in automatic email received when data +; are ready for download. +; 7) Untar all .tar files into a single directory. +; +; Modification history +; 20230818-lauer_axel: added output of clwvi (in addition to iwp, lwp) +; 20210506-lauer_axel: output of lwp instead of clwvi +; 20210323-lauer_axel: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_clara_avhrr.ncl" + + ; Source name + OBSNAME = "CLARA-AVHRR" + + ; Tier + TIER = 3 + + ; Period + YEAR1 = 1982 + YEAR2 = 2018 + + ; Selected variable (standard name) + VAR = (/"clt", "clivi", "lwp", "clwvi"/) + + ; Name in the raw data + NAME = (/"cfc", "iwp_allsky", "lwp_allsky", "iwp_allsky"/) + + ; Filename base + FNBASE = (/"CFCmm", "IWPmm", "LWPmm", "IWPmm"/) + + ; Conversion factor + ; Remark: total cloud cover (CFC) is reported as "1" but is actually "%" + ; IWP and LWP use scale_factor to convert to kg/m2 + ; CONV = (/1., 1., 1., 1./) + + ; MIP + MIP = (/"Amon", "Amon", "Amon", "Amon"/) + + ; Frequency + FREQ = (/"mon", "mon", "mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + \ + (/"/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/custom/CMOR_lwp.dat", \ + "/cmip5/Tables/CMIP5_Amon"/) + + ; Type + TYPE = "sat" + + ; Version + VERSION = "V002_01" + + ; Global attributes + SOURCE = "https://wui.cmsaf.eu/" + REF = "https://doi.org/10.5676/EUM_SAF_CM/CLARA_AVHRR/V002_01" + COMMENT = "The CM SAF data are owned by EUMETSAT and are available to " \ + + "all users free of charge and with no conditions to use. If you wish " \ + + "to use these products, EUMETSAT's copyright credit must be shown by " \ + + "displaying the words 'Copyright (c) (2020) EUMETSAT' under/in each " \ + + "of these SAF Products used in a project or shown in a publication " \ + + "or website. Please follow the citation guidelines given at " \ + + "https://doi.org/10.5676/EUM_SAF_CM/CLARA_AVHRR/V002_01 and also " \ + + "register as a user at http://cm-saf.eumetsat.int/ to receive latest " \ + + "information on CM SAF services and to get access to the CM SAF User " \ + + "Help Desk." + +end + +begin + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + time = create_timec(YEAR1, YEAR2) + date = cd_calendar(time, 1) + + ; Create timeseries + do yy = YEAR1, YEAR2 + + syear = sprinti("%i", yy) + do mm = 1, 12 + + smonth = sprinti("%0.2i", mm) + + ; Read file + fname = systemfunc("ls " + input_dir_path + FNBASE(vv) + \ + syear + smonth + "01*.nc") + + ; No files found + if (ismissing(fname)) then + log_info("Warning: no input data found for variable " + VAR(vv) + \ + " (" + syear + smonth + ")") + continue + end if + + ; Extract data + f = addfile(fname, "r") + val = f->$NAME(vv)$ + if (isatt(val, "scale_factor")) then + scalefac = tofloat(val@scale_factor) + else + scalefac = 1.0 + end if + if (isatt(val, "add_offset")) then + offset = tofloat(val@add_offset) + else + offset = 0.0 + end if + xx = tofloat(val) * scalefac + offset + delete(val) + + ; Assign to global array + if (.not.isdefined("output")) then + dims = dimsizes(xx) + dims(0) = dimsizes(time) + output = new(dims, float) + output!0 = "time" + output&time = time + output!1 = "lat" + output&lat = f->lat + output!2 = "lon" + output&lon = f->lon + fillval = xx@_FillValue + end if + output(ind(toint(yy * 100 + mm).eq.date), :, :) = (/xx/) + + delete(fname) + delete(f) + delete(xx) + + ; *** calculate clwvi (lwp + iwp) *** + + if (VAR(vv) .eq. "clwvi") then + fname = systemfunc("ls " + input_dir_path + "LWPmm" + \ + syear + smonth + "01*.nc") + + ; No files found + if (ismissing(fname)) then + log_info("Warning: input data incomplete for variable " + \ + VAR(vv) + " (" + syear + smonth + ")") + continue + end if + + ; Extract data + f = addfile(fname, "r") + val = f->lwp_allsky + if (isatt(val, "scale_factor")) then + scalefac = tofloat(val@scale_factor) + else + scalefac = 1.0 + end if + if (isatt(val, "add_offset")) then + offset = tofloat(val@add_offset) + else + offset = 0.0 + end if + xx = tofloat(val) * scalefac + offset + delete(val) + + idx = ind(toint(yy * 100 + mm).eq.date) + output(idx, :, :) = output(idx, :, :) + (/xx(0, :, :)/) + + delete(idx) + delete(xx) + delete(fname) + delete(f) + end if ; if VAR(vv) .eq. "clwvi" + end do + end do + + ; Set fill value + output = where(output.eq.fillval, output@_FillValue, output) + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/cloudsat_l2.ncl b/esmvaltool/cmorizers/data/formatters/datasets/cloudsat_l2.ncl new file mode 100644 index 0000000000..d26ed74cd6 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/cloudsat_l2.ncl @@ -0,0 +1,869 @@ +; ############################################################################# +; ESMValTool CMORizer for CloudSat Level 2 data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset (registration required). +; +; Source +; https://www.cloudsat.cira.colostate.edu/ +; +; Last access +; 20220810 +; +; Download and processing instructions: +; +; 1) Create an account at the CloudSat Data Processing Center +; (https://www.cloudsat.cira.colostate.edu/) +; +; 2) Download the datasets '2B-CWC-RO' and '2C-PRECIP-COLUMN' from +; www.cloudsat.cira.colostate.edu (via sftp) following the instructions +; given here: https://www.cloudsat.cira.colostate.edu/order/sftp-access +; +; 3) Put all files for a specific year under a single directory +; named like the year (e.g. 2007), no subdirectories with +; days etc. +; +; Description: +; +; This script bins 3-dim cloud liquid water content from individual CloudSat +; orbits (level 2 data: 2B_CWC_RO "radar only") onto a 2x2 degree grid with +; 40 vertical levels ("alt40"). Individual orbits and days are then averaged +; to monthly means. Precipitating gridboxes are identified using the CloudSat +; level 2 data for column precipitation (2C_PRECIP_COLUMN). The CloudSat data +; are converted from kg/m3 to kg/kg using the air density from the US +; standard atmosphere. In addition, liquid and ice water path are binned onto +; the same spatial grid. The script calculates four types of datasets for the +; 3-dim cloud liquid water content, liquid water path, ice water path: +; +; 1) in-cloud values averaged over all available data points +; 2) in-cloud values averaged over all data points for which no +; precipitation has been detected +; 3) grid-box averages assuming that missing values are cloud-free, i.e. +; cloud liquid water content = 0 +; 4) grid-box averages calculated over all data points for which no +; precipitation has been detected +; +; The results for each variable are written to four different output files: +; 1) in-cloud-all, 2) in-cloud-noprecip, 3) gridbox-average, +; 4) gridbox-average-noprecip. +; +; Note: one year of data consists of approximately 110 GB and 10,000 files. +; Processing one year takes about 5-6 hours. It is therefore recommended +; to process only one year at a time instead of multiple years. The +; year(s) to process are specified when running "esmvaltool" with +; the options "--start=year1" and "--end=year2", e.g. (w/o line break) +; +; esmvaltool data format --config_file [CONFIG_FILE] --start=2015 +; --end=2015 CLOUDSAT-L2 +; +; Modification history +; 20230904-lauer_axel: added output of clwvi (iwp + lwp) +; 20220809-lauer_axel: adapted CMORizer to new format introduced in +; ESMValTool v2.5.0 and added info message +; 20210924-lauer_axel: added processing of lwp and iwp +; 20201119-lauer_axel: allowing more flexible filenames for precipitation +; data +; 20201117-lauer_axel: code completely rewritten to reduce cpu time per +; orbit from ~ 120 s to ~ 4 s +; 20201109-lauer_axel: written. +; +; ############################################################################# + +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + ; Script name (for logger) + diag_script = "cloudsat-l2.ncl" + + ; Source name + obsname = "CLOUDSAT-L2" + + ; Tier + tier = 3 + + ; Period (range of complete years: 2007-2017) + year1 = get_year(start_year, 2007) + year2 = get_year(end_year, 2007) + + if ((year2 - year1) .gt. 0) then + log_info("Note: processing of " + tostring(year2 - year1 + 1) + \ + " years requested (start_year = " + sprinti("%04d", year1) + \ + ", end_year = " + sprinti("%04d", year2) + ").") + log_info("One year of data consists of approximately 110 GB and 10,000 " \ + + "files, processing may take up to several hours.") + log_info("It is therefore recommended to process only one year at a time.") + log_info("The year(s) to process are specified when running esmvaltool " \ + + "with the options --start=year1 and --end=year2, e.g.") + log_info("esmvaltool data format --config_file [CONFIG_FILE] " + \ + "--start=2015 --end=2015 CLOUDSAT-L2") + end if + + ; output variable (standard name) + outvar = (/"clw", "lwp", "clivi", "clwvi"/) + + ; input variables + var = (/"LO_RO_liquid_water_content_2B_CWC_RO", \ + "LO_RO_liquid_water_path_2B_CWC_RO", \ + "IO_RO_ice_water_path_2B_CWC_RO", \ + "dummy"/) + + var_flag = "Precip_flag_2C_PRECIP_COLUMN" + + ; MIP + mip = "Amon" + + ; Frequency + freq = "mon" + + ; CMOR table + + CMOR_TABLE = getenv("cmor_tables") + \ + (/"/cmip5/Tables/CMIP5_" + mip, \ + "/custom/CMOR_lwp.dat", \ + "/cmip5/Tables/CMIP5_" + mip, \ + "/cmip5/Tables/CMIP5_" + mip/) + + ; Type + type = "sat" + + ; Version + version = "P1-R05" + + ; Global attributes + source = "https://www.cloudsat.cira.colostate.edu/" + ref = "Stephens et al., Bull. Am. Meteorol. Soc., " \ + + "doi:10.1175/BAMS-D-16-0324.1, 2017" +end + +begin + ; define 2x2 output grid + + nx = 180 + ny = 90 + dx = 360. / nx + dy = 180. / ny + + ; alt40 from CMIP6 table "coordinates" + gridhgt = (/240., 720., 1200., 1680., 2160., 2640., 3120., 3600., 4080., \ + 4560., 5040., 5520., 6000., 6480., 6960., 7440., 7920., 8400., \ + 8880., 9360., 9840., 10320., 10800., 11280., 11760., 12240., \ + 12720., 13200., 13680., 14160., 14640., 15120., 15600., 16080.,\ + 16560., 17040., 17520., 18000., 18480., 18960./) + + nz = dimsizes(gridhgt) + nt = 12 + + ; define output longitudes and latitudes + + gridlat = latGlobeFo(ny, "lat", "latitude", "degrees_north") + gridlon = lonGlobeFo(nx, "lon", "longitude", "degrees_east") + + ; work arrays (NCL (v6.6.2) procedure bin_sum did not work properly with + ; subsetting the output arrays) + work = new((/ny, nx/), float) + workpts = new((/ny, nx/), integer) + work = 0. + workpts = 0 + + ; "in-cloud" (all points) + grid = new((/nt, nz, ny, nx/), float) + gridpts = new((/nt, nz, ny, nx/), integer) + + grid2d = new((/nt, 3, ny, nx/), float) + gridpts2d = new((/nt, 3, ny, nx/), integer) + + ; "grid-box average" (all points) + grid_avg = new((/nt, nz, ny, nx/), float) + gridpts_avg = new((/nt, nz, ny, nx/), integer) + grid2d_avg = new((/nt, 3, ny, nx/), float) + gridpts2d_avg = new((/nt, 3, ny, nx/), integer) + + ; "in-cloud" (no precipitation) + grid_noprecip = new((/nt, nz, ny, nx/), float) + gridpts_noprecip = new((/nt, nz, ny, nx/), integer) + grid2d_noprecip = new((/nt, 3, ny, nx/), float) + gridpts2d_noprecip = new((/nt, 3, ny, nx/), integer) + + ; "grid-box average" (no precipitation) + grid_avg_noprecip = new((/nt, nz, ny, nx/), float) + gridpts_avg_noprecip = new((/nt, nz, ny, nx/), integer) + grid2d_avg_noprecip = new((/nt, 3, ny, nx/), float) + gridpts2d_avg_noprecip = new((/nt, 3, ny, nx/), integer) + + ; boundaries of vertical (height) bins + + gridhgt_bnds = new(nz + 1, float) + gridhgt_bnds(0) = 0. + do i = 1, nz - 1 + gridhgt_bnds(i) = (gridhgt(i - 1) + gridhgt(i)) / 2. + end do + gridhgt_bnds(nz) = gridhgt(nz - 1) + gridhgt(nz - 1) - gridhgt_bnds(nz - 1) + + ; air density of vertical bins (from US standard atmosphere) in kg/m3 + tdp = stdatmus_z2tdp(gridhgt) + air_density = tdp(1, :) + delete(tdp) + + ; define coordinates + + grid!0 = "time" + grid!1 = "alt40" + grid&alt40 = gridhgt + grid!2 = "lat" + grid&lat = gridlat + grid!3 = "lon" + grid&lon = gridlon + + grid2d!0 = "time" + grid2d!2 = "lat" + grid2d&lat = gridlat + grid2d!3 = "lon" + grid2d&lon = gridlon + + copy_VarMeta(grid, grid_avg) + copy_VarMeta(grid, grid_noprecip) + copy_VarMeta(grid, grid_avg_noprecip) + + copy_VarMeta(grid2d, grid2d_avg) + copy_VarMeta(grid2d, grid2d_noprecip) + copy_VarMeta(grid2d, grid2d_avg_noprecip) + +end + +begin + do calyear = year1, year2 + ; reset work arrays + grid = 0. + grid_avg = 0. + grid_noprecip = 0. + grid_avg_noprecip = 0. + + grid2d = 0. + grid2d_avg = 0. + grid2d_noprecip = 0. + grid2d_avg_noprecip = 0. + + gridpts = 0 + gridpts_avg = 0 + gridpts_noprecip = 0 + gridpts_avg_noprecip = 0 + + gridpts2d = 0 + gridpts2d_avg = 0 + gridpts2d_noprecip = 0 + gridpts2d_avg_noprecip = 0 + + ; create time variable + gridtime = create_timec(calyear, calyear) + + grid&time = gridtime + grid_avg&time = gridtime + grid_noprecip&time = gridtime + grid_avg_noprecip&time = gridtime + + grid2d&time = gridtime + grid2d_avg&time = gridtime + grid2d_noprecip&time = gridtime + grid2d_avg_noprecip&time = gridtime + + inpath = input_dir_path + tostring(calyear) + "/" + + ; process all days of the year + + do dayofyear = 1, day_of_year(calyear, 12, 31) + print("day = " + tostring(dayofyear)) + caldate = yyyyddd_to_yyyymmdd(calyear * 10000 + dayofyear) + calmonth = mod(caldate / 100, 100) + calday = mod(caldate, 100) + + itime = calmonth - 1 + + fn = tostring(calyear) + sprinti("%0.3i", dayofyear) + fs = systemfunc("ls " + inpath + fn + "*2B-CWC-RO_GRANULE*.hdf") + + if (ismissing(fs(0))) then + log_info("Warning: no input file(s) for " + tostring(calyear) + \ + "-" + sprinti("%03d", dayofyear) + ", skipping day.") + delete(fs) + continue + end if + + norbits = dimsizes(fs) + + ; start loop over all orbits for one day + + do orbit = 0, norbits - 1 + begTime = get_cpu_time() + print("orbit = " + tostring(orbit + 1)) + ; combine filename for precipitation data + ssplit = str_split(fs(orbit), "/") + strs = str_split(ssplit(dimsizes(ssplit) - 1), "_") + sorbit = strs(1) + fs_precip = systemfunc("ls " + inpath + strs(0) + "_" + sorbit + \ + "_CS_2C-PRECIP-COLUMN_GRANULE_P1_R05_*.hdf") + delete(ssplit) + delete(strs) + + ; File extension must be .hdfeos for NCL to correctly read input + ; files. File extension of cloudsat data downloaded from cloudsat + ; data processing center is .hdf. + ; --> workaround: set symbolic links with extension .hdfeos before + ; reading input files + + ssplit = str_split(fs(orbit), "/") + slink1 = ssplit(dimsizes(ssplit) - 1) + "eos" + delete(ssplit) + + ssplit = str_split(fs_precip, "/") + slink2 = ssplit(dimsizes(ssplit) - 1) + "eos" + delete(ssplit) + + ; remove old (possibly broken) links before creating new ones + + if (fileexists(slink1)) then + system("rm -f " + slink1) + end if + if (fileexists(slink2)) then + system("rm -f " + slink2) + end if + + ; create symbolic links + + system("ln -s " + fs(orbit) + " " + slink1) + system("ln -s " + fs_precip + " " + slink2) + + ; *************************************************************** + ; Read hdf-eos + ; *************************************************************** + print("processing " + fs(orbit) + " + " + fs_precip + "...") + + f = addfile(slink1, "r") + x2d = f->$var(0)$ + + if (isatt(x2d, "_FillValue")) then + delete(x2d@_FillValue) + end if + + l1d = f->$var(1)$ + + if (isatt(l1d, "_FillValue")) then + delete(l1d@_FillValue) + end if + + i1d = f->$var(2)$ + + if (isatt(i1d, "_FillValue")) then + delete(i1d@_FillValue) + end if + + f_precip = addfile(slink2, "r") + x_flag = f_precip->$var_flag$ + + ; remove attribute '_FillValue' to prevent NCL from handling + ; missing values + + if (isatt(x_flag, "_FillValue")) then + delete(x_flag@_FillValue) + end if + + ; check if array dimensions match + + dimx = dimsizes(x2d) + if (dimx(0) .ne. dimsizes(x_flag)) then + log_info("Warning: dimsizes do not match. " + fs(orbit) + ", " + \ + fs_precip + " Skipping orbit " + sorbit + ".") + continue + end if + + lat = f->Latitude_2B_CWC_RO + lon = f->Longitude_2B_CWC_RO + hgt1d = ndtooned(f->Height_2B_CWC_RO) +; time = f->Profile_time_2B_CWC_RO + + ; convert longitudes from -180...180 to 0...360 + lon = where(lon .lt. 0., lon + 360., lon) + + ; -------------------------------------------------------------------- + + x = ndtooned(x2d) + delete(x2d) + + x_flag_ext = ndtooned(conform_dims(dimx, x_flag, 0)) + + lon_ext = ndtooned(conform_dims(dimx, lon, 0)) + lat_ext = ndtooned(conform_dims(dimx, lat, 0)) + + ; ========================= + ; 3-dim cloud water content + ; ========================= + + do ilev = 0, nz - 1 + ; Find all elements that are within a given height level + ; (output grid) that contain valid (x > 0) or missing (x = 0) values; + ; invalid values (x < 0) are filtered out. + ii = ind((hgt1d .ge. gridhgt_bnds(ilev)) .and. \ + (hgt1d .lt. gridhgt_bnds(ilev + 1)) .and. (x .ge. 0.)) + ; check if there are input data on the given output level and + ; output gridbox + if (.not. ismissing(ii(0))) then + xselected = x(ii) + xlon = lon_ext(ii) + xlat = lat_ext(ii) + x_flag_selected = x_flag_ext(ii) + ; *** gridbox average, all *** + ; Includes all points with missing cloud liquid water + ; content (x = 0) when calculating the average. This assumes + ; that all grid points with "missing" but not invalid values + ; are cloud-free. Invalid values (x < 0) are filtered out. + bin_sum(work, workpts, gridlon, gridlat, xlon, xlat, xselected) + grid_avg(itime, ilev, :, :) = grid_avg(itime, ilev, :, :) + work + gridpts_avg(itime, ilev, :, :) = gridpts_avg(itime, ilev, :, :) \ + + workpts + work = 0. + workpts = 0 + ; *** in-cloud average, all *** + ; Filter invalid (x < 0) and missing values (x = 0). + idx = ind(xselected .gt. 0.) + if (.not. ismissing(idx(0))) then + bin_sum(work, workpts, gridlon, gridlat, \ + xlon(idx), xlat(idx), xselected(idx)) + grid(itime, ilev, :, :) = grid(itime, ilev, :, :) + work + gridpts(itime, ilev, :, :) = gridpts(itime, ilev, :, :) \ + + workpts + work = 0. + workpts = 0 + end if + delete(idx) + ; *** gridbox average, no precipitation *** + ; Filter cases that have been flagged with + ; precipitation or as unknown. The following CloudSat flags + ; are defined (see https://www.cloudsat.cira.colostate.edu/ + ; sites/default/files/ + ; products/files/2C-PRECIP-COLUMN_PDICD.P2_R04.20130124.pdf): + ; + ; flag 0 = no precipitation detected + ; 1 = rain possible + ; 2 = rain probable + ; 3 = rain certain + ; 4 = snow possible + ; 5 = snow certain + ; 6 = mixed precipitation possible + ; 7 = mixed precipitation certain + ; 9 = uncertain + idx = ind(x_flag_selected .eq. 0) + if (.not. ismissing(idx(0))) then + bin_sum(work, workpts, gridlon, gridlat, \ + xlon(idx), xlat(idx), xselected(idx)) + grid_avg_noprecip(itime, ilev, :, :) = \ + grid_avg_noprecip(itime, ilev, :, :) + work + gridpts_avg_noprecip(itime, ilev, :, :) = \ + gridpts_avg_noprecip(itime, ilev, :, :) + workpts + work = 0. + workpts = 0 + end if + delete(idx) + ; *** in-cloud average, no precipitation *** + ; Filter cases that have been flagged with + ; precipitation or as unknown. + idx = ind((xselected .gt. 0.) .and. (x_flag_selected .eq. 0)) + if (.not. ismissing(idx(0))) then + bin_sum(work, workpts, gridlon, gridlat, \ + xlon(idx), xlat(idx), xselected(idx)) + grid_noprecip(itime, ilev, :, :) = \ + grid_noprecip(itime, ilev, :, :) + work + gridpts_noprecip(itime, ilev, :, :) = \ + gridpts_noprecip(itime, ilev, :, :) + workpts + work = 0. + workpts = 0 + end if + delete(idx) + + delete(xselected) + delete(x_flag_selected) + delete(xlon) + delete(xlat) + end if + delete(ii) + end do ; loop over output levels + + delete(x) + delete(x_flag_ext) + delete(lat_ext) + delete(lon_ext) + delete(hgt1d) + + ; ============================================================ + ; 2-dim cloud liquid / ice water path / total cloud water path + ; ============================================================ + + do ivar = 1, 3 + if (outvar(ivar) .eq. "lwp") then + x = l1d + ilev = 0 + else if (outvar(ivar) .eq. "clivi") then + x = i1d + ilev = 1 + else if (outvar(ivar) .eq. "clwvi") then + ; clwvi is calculated from the *output* fields of lwp and iwp + ; to make sure it is the sum of the two (masking, etc.) + ; --> nothing to do here + continue + else + log_info("Warning: output variable unknown: " + outvar(ivar) + \ + ", skipping variable " + outvar(ivar) + ".") + continue + end if + end if + end if + + ; Find all elements that contain valid (x > 0) or missing (x = 0) + ; values; invalid values (x < 0) are filtered out. + ii = ind(x .ge. 0.) + ; check if there are input data for given output gridbox + if (.not. ismissing(ii(0))) then + xselected = x(ii) + xlon = lon(ii) + xlat = lat(ii) + x_flag_selected = x_flag(ii) + ; *** gridbox average, all *** + ; Includes all points with missing lwp + ; content (x = 0) when calculating the average. This assumes + ; that all grid points with "missing" but not invalid values + ; are cloud-free. Invalid values (x < 0) are filtered out. + bin_sum(work, workpts, gridlon, gridlat, xlon, xlat, xselected) + grid2d_avg(itime, ilev, :, :) = \ + grid2d_avg(itime, ilev, :, :) + work + gridpts2d_avg(itime, ilev, :, :) = \ + gridpts2d_avg(itime, ilev, :, :) + workpts + work = 0. + workpts = 0 + ; *** in-cloud average, all *** + ; Filter invalid (x < 0) and missing values (x = 0). + idx = ind(xselected .gt. 0.) + if (.not. ismissing(idx(0))) then + bin_sum(work, workpts, gridlon, gridlat, \ + xlon(idx), xlat(idx), xselected(idx)) + grid2d(itime, ilev, :, :) = grid2d(itime, ilev, :, :) + work + gridpts2d(itime, ilev, :, :) = gridpts2d(itime, ilev, :, :) \ + + workpts + work = 0. + workpts = 0 + end if + delete(idx) + ; *** gridbox average, no precipitation *** + ; Filter cases that have been flagged with + ; precipitation or as unknown. + idx = ind(x_flag_selected .eq. 0) + if (.not. ismissing(idx(0))) then + bin_sum(work, workpts, gridlon, gridlat, \ + xlon(idx), xlat(idx), xselected(idx)) + grid2d_avg_noprecip(itime, ilev, :, :) = \ + grid2d_avg_noprecip(itime, ilev, :, :) + work + gridpts2d_avg_noprecip(itime, ilev, :, :) = \ + gridpts2d_avg_noprecip(itime, ilev, :, :) + workpts + work = 0. + workpts = 0 + end if + delete(idx) + ; *** in-cloud average, no precipitation *** + ; Filter cases that have been flagged with + ; precipitation or as unknown. + idx = ind((xselected .gt. 0.) .and. (x_flag_selected .eq. 0)) + if (.not. ismissing(idx(0))) then + bin_sum(work, workpts, gridlon, gridlat, \ + xlon(idx), xlat(idx), xselected(idx)) + grid2d_noprecip(itime, ilev, :, :) = \ + grid2d_noprecip(itime, ilev, :, :) + work + gridpts2d_noprecip(itime, ilev, :, :) = \ + gridpts2d_noprecip(itime, ilev, :, :) + workpts + work = 0. + workpts = 0 + end if + delete(idx) + + delete(xselected) + delete(x_flag_selected) + delete(xlon) + delete(xlat) + end if + delete(ii) + end do ; loop over implemented 2-dim variables (lwp, iwp) + + delete(x) + delete(x_flag) + delete(lat) + delete(lon) + delete(l1d) + delete(i1d) +; delete(time) + + ; remove temporary symbolic links + + system("rm -f " + slink1) + system("rm -f " + slink2) + + print("CPU time for orbit: " + tostring(get_cpu_time() - begTime) \ + + " s") + end do ; loop over all orbits per day + delete(fs) + end do ; loop over days + + ; ========================= + ; 3-dim cloud water content + ; ========================= + + ; calculate monthly averages + + gridpts = where(gridpts .eq. 0, gridpts@_FillValue, gridpts) + gridpts_avg = where(gridpts_avg .eq. 0, gridpts_avg@_FillValue, \ + gridpts_avg) + gridpts_noprecip = where(gridpts_noprecip .eq. 0, \ + gridpts_noprecip@_FillValue, gridpts_noprecip) + gridpts_avg_noprecip = where(gridpts_avg_noprecip .eq. 0, \ + gridpts_avg_noprecip@_FillValue, \ + gridpts_avg_noprecip) + + grid = grid / gridpts + grid_avg = grid_avg / gridpts_avg + grid_noprecip = grid_noprecip / gridpts_noprecip + grid_avg_noprecip = grid_avg_noprecip / gridpts_avg_noprecip + + ; convert from mg/m3 to kg/kg using air density from + ; US standard atmosphere + + grid = grid / conform(grid, air_density, 1) * 1.e-6 + grid_avg = grid_avg / conform(grid_avg, air_density, 1) * 1.e-6 + grid_noprecip = grid_noprecip / \ + conform(grid_noprecip, air_density, 1) * 1.e-6 + grid_avg_noprecip = grid_avg_noprecip / \ + conform(grid_avg_noprecip, air_density, 1) * 1.e-6 + + ; ------------------------------------------------------------------------ + + ; save results to files + + ; calculate coordinate bounds + bounds = guess_coord_bounds(grid, freq) + + ; *** in-cloud, all *** + + format_coords(grid, calyear + "0101", calyear + "1231", freq) + ; Set variable attributes + output = format_variable(grid, outvar(0), CMOR_TABLE(0)) +; delete(grid) + ; Set global attributes + comment = "Liquid-only Radar-only Liquid Water Content (in-cloud, all)" + gAtt = set_global_atts(obsname, tier, source, ref, comment) + ; output filename + datestr = calyear + "01-" + calyear + "12" + fout = output_dir_path + \ + str_join((/"OBS", obsname, type, version + "-in-cloud-all", \ + mip, outvar(0), datestr/), "_") + ".nc" + ; write variable + write_nc(fout, outvar(0), output, bounds, gAtt) + delete(output) + delete(gAtt) + + ; *** gridbox average, all *** + + format_coords(grid_avg, calyear + "0101", calyear + "1231", freq) + ; Set variable attributes + output = format_variable(grid_avg, outvar(0), CMOR_TABLE(0)) + ; Set global attributes + comment = "Liquid-only Radar-only Liquid Water Content " \ + + "(gridbox average, all)" + gAtt = set_global_atts(obsname, tier, source, ref, comment) + ; output filename + datestr = calyear + "01-" + calyear + "12" + fout = output_dir_path + \ + str_join((/"OBS", obsname, type, version + "-gridbox-average-all", \ + mip, outvar(0), datestr/), "_") + ".nc" + ; write variable + write_nc(fout, outvar(0), output, bounds, gAtt) + delete(output) + delete(gAtt) + + ; *** in-cloud, no precipitation only *** + + format_coords(grid_noprecip, calyear + "0101", calyear + "1231", freq) + ; Set variable attributes + output = format_variable(grid_noprecip, outvar(0), CMOR_TABLE(0)) + ; Set global attributes + comment = "Liquid-only Radar-only Liquid Water Content " \ + + "(in-cloud, no precipitation)" + gAtt = set_global_atts(obsname, tier, source, ref, comment) + ; output filename + datestr = calyear + "01-" + calyear + "12" + fout = output_dir_path + \ + str_join((/"OBS", obsname, type, version + "-in-cloud-noprecip", \ + mip, outvar(0), datestr/), "_") + ".nc" + ; write variable + write_nc(fout, outvar(0), output, bounds, gAtt) + delete(output) + delete(gAtt) + + ; *** gridbox average, no precipitation only *** + + format_coords(grid_avg_noprecip, calyear + "0101", calyear + "1231", freq) + ; Set variable attributes + output = format_variable(grid_avg_noprecip, outvar(0), CMOR_TABLE(0)) + ; Set global attributes + comment = "Liquid-only Radar-only Liquid Water Content " \ + + "(gridbox average, no precipitation)" + gAtt = set_global_atts(obsname, tier, source, ref, comment) + ; output filename + datestr = calyear + "01-" + calyear + "12" + fout = output_dir_path + \ + str_join((/"OBS", obsname, type, version + "-gridbox-average-noprecip", \ + mip, outvar(0), datestr/), "_") + ".nc" + ; write variable + write_nc(fout, outvar(0), output, bounds, gAtt) + delete(output) + delete(gAtt) + + ; =================================== + ; 2-dim cloud liquid / ice water path + ; =================================== + + ; calculate monthly averages + + gridpts2d = where(gridpts2d .eq. 0, gridpts2d@_FillValue, gridpts2d) + gridpts2d_avg = where(gridpts2d_avg .eq. 0, gridpts2d_avg@_FillValue, \ + gridpts2d_avg) + gridpts2d_noprecip = where(gridpts2d_noprecip .eq. 0, \ + gridpts2d_noprecip@_FillValue, \ + gridpts2d_noprecip) + gridpts2d_avg_noprecip = where(gridpts2d_avg_noprecip .eq. 0, \ + gridpts2d_avg_noprecip@_FillValue, \ + gridpts2d_avg_noprecip) + + grid2d = grid2d / gridpts2d + grid2d_avg = grid2d_avg / gridpts2d_avg + grid2d_noprecip = grid2d_noprecip / gridpts2d_noprecip + grid2d_avg_noprecip = grid2d_avg_noprecip / gridpts2d_avg_noprecip + + ; convert from g/m2 to kg/m2 + + grid2d = grid2d * 1.e-3 + grid2d_avg = grid2d_avg * 1.e-3 + grid2d_noprecip = grid2d_noprecip * 1.e-3 + grid2d_avg_noprecip = grid2d_avg_noprecip * 1.e-3 + + ; ------------------------------------------------------------------------ + + ; save results to files + + do ivar = 1, 3 + if (outvar(ivar) .eq. "lwp") then + ilev = 0 + else if (outvar(ivar) .eq. "clivi") then + ilev = 1 + else if (outvar(ivar) .eq. "clwvi") then + ; calculate clwvi (lwp+iwp) as sum of output fields for lwp and iwp + ilev = 2 + grid2d(:, ilev, :, :) = grid2d(:, 0, :, :) \ + + grid2d(:, 1, :, :) + grid2d_avg(:, ilev, :, :) = grid2d_avg(:, 0, :, :) \ + + grid2d_avg(:, 1, :, :) + grid2d_noprecip(:, ilev, :, :) = grid2d_noprecip(:, 0, :, :) \ + + grid2d_noprecip(:, 1, :, :) + grid2d_avg_noprecip(:, ilev, :, :) = grid2d_avg_noprecip(:, 0, :, :) \ + + grid2d_avg_noprecip(:, 1, :, :) + end if + end if + end if + + out_grid = grid2d(:, ilev, :, :) + out_grid_avg = grid2d_avg(:, ilev, :, :) + out_grid_noprecip = grid2d_noprecip(:, ilev, :, :) + out_grid_avg_noprecip = grid2d_avg_noprecip(:, ilev, :, :) + + ; calculate coordinate bounds + bounds = guess_coord_bounds(out_grid, freq) + + ; *** in-cloud, all *** + + format_coords(out_grid, calyear + "0101", calyear + "1231", freq) + ; Set variable attributes + output = format_variable(out_grid, outvar(ivar), CMOR_TABLE(ivar)) + delete(out_grid) + ; Set global attributes + comment = "Liquid-only Radar-only Liquid Water Content (in-cloud, all)" + gAtt = set_global_atts(obsname, tier, source, ref, comment) + ; output filename + datestr = calyear + "01-" + calyear + "12" + fout = output_dir_path + \ + str_join((/"OBS", obsname, type, version + "-in-cloud-all", \ + mip, outvar(ivar), datestr/), "_") + ".nc" + ; write variable + write_nc(fout, outvar(ivar), output, bounds, gAtt) + delete(output) + delete(gAtt) + + ; *** gridbox average, all *** + + format_coords(out_grid_avg, calyear + "0101", calyear + "1231", freq) + ; Set variable attributes + output = format_variable(out_grid_avg, outvar(ivar), CMOR_TABLE(ivar)) + delete(out_grid_avg) + ; Set global attributes + comment = "Liquid-only Radar-only Liquid Water Content " \ + + "(gridbox average, all)" + gAtt = set_global_atts(obsname, tier, source, ref, comment) + ; output filename + datestr = calyear + "01-" + calyear + "12" + fout = output_dir_path + \ + str_join((/"OBS", obsname, type, version + "-gridbox-average-all", \ + mip, outvar(ivar), datestr/), "_") + ".nc" + ; write variable + write_nc(fout, outvar(ivar), output, bounds, gAtt) + delete(output) + delete(gAtt) + + ; *** in-cloud, no precipitation only *** + + format_coords(out_grid_noprecip, calyear + "0101", calyear + "1231", freq) + ; Set variable attributes + output = format_variable(out_grid_noprecip, outvar(ivar), CMOR_TABLE(ivar)) + delete(out_grid_noprecip) + ; Set global attributes + comment = "Liquid-only Radar-only Liquid Water Content " \ + + "(in-cloud, no precipitation)" + gAtt = set_global_atts(obsname, tier, source, ref, comment) + ; output filename + datestr = calyear + "01-" + calyear + "12" + fout = output_dir_path + \ + str_join((/"OBS", obsname, type, version + "-in-cloud-noprecip", \ + mip, outvar(ivar), datestr/), "_") + ".nc" + ; write variable + write_nc(fout, outvar(ivar), output, bounds, gAtt) + delete(output) + delete(gAtt) + + ; *** gridbox average, no precipitation only *** + + format_coords(out_grid_avg_noprecip, calyear + "0101", calyear + "1231", \ + freq) + ; Set variable attributes + output = format_variable(out_grid_avg_noprecip, outvar(ivar), \ + CMOR_TABLE(ivar)) + delete(out_grid_avg_noprecip) + ; Set global attributes + comment = "Liquid-only Radar-only Liquid Water Content " \ + + "(gridbox average, no precipitation)" + gAtt = set_global_atts(obsname, tier, source, ref, comment) + ; output filename + datestr = calyear + "01-" + calyear + "12" + fout = output_dir_path + \ + str_join((/"OBS", obsname, type, version + "-gridbox-average-noprecip", \ + mip, outvar(ivar), datestr/), "_") + ".nc" + ; write variable + write_nc(fout, outvar(ivar), output, bounds, gAtt) + delete(output) + delete(gAtt) + end do ; loop over implemented 2-dim variables (lwp, iwp, lwp+iwp) + end do ; loop over years + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/cmap.py b/esmvaltool/cmorizers/data/formatters/datasets/cmap.py new file mode 100644 index 0000000000..fecd2b128e --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/cmap.py @@ -0,0 +1,69 @@ +"""ESMValTool CMORizer for CMAP (CPC Merged Analysis of Precipitation) data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://psl.noaa.gov/data/gridded/data.cmap.html + +Last access + 20240909 + +Download and processing instructions + To facilitate the download, the links to the ftp server are provided. + + https://downloads.psl.noaa.gov/Datasets/cmap/enh/ + precip.mon.mean.nc + +Caveats + +""" + +import logging +import re +from copy import deepcopy +from pathlib import Path + +import iris + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _extract_variable(short_name, var, cfg, raw_filepath, out_dir): + cmor_info = cfg["cmor_table"].get_variable(var["mip"], short_name) + attributes = deepcopy(cfg["attributes"]) + attributes["mip"] = var["mip"] + + cubes = iris.load(raw_filepath) + for cube in cubes: + assert cube.units == "mm/day", f"unknown units:{cube.units}" + # convert data from mm/day to kg m-2 s-1 + # mm/day ~ density_water * mm/day + # = 1000 kg m-3 * 1/(1000*86400) m s-1 = 1/86400 kg m-2 s-1 + cube = cube / 86400 + cube.units = "kg m-2 s-1" + + utils.fix_var_metadata(cube, cmor_info) + cube = utils.fix_coords(cube) + utils.set_global_atts(cube, attributes) + + logger.info("Saving file") + utils.save_variable(cube, short_name, out_dir, attributes, + unlimited_dimensions=["time"]) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + for short_name, var in cfg["variables"].items(): + logger.info("CMORizing variable '%s'", short_name) + short_name = var["short_name"] + raw_filenames = Path(in_dir).rglob("*.nc") + filenames = [] + for raw_filename in raw_filenames: + if re.search(var["file"], str(raw_filename)) is not None: + filenames.append(raw_filename) + + for filename in sorted(filenames): + _extract_variable(short_name, var, cfg, filename, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/cowtanway.py b/esmvaltool/cmorizers/data/formatters/datasets/cowtanway.py new file mode 100644 index 0000000000..dc2073f825 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/cowtanway.py @@ -0,0 +1,79 @@ +"""ESMValTool CMORizer for CowtanWay. + +Tier + Tier 2: other freely-available dataset. + +Source + https://www-users.york.ac.uk/~kdc3/papers/coverage2013/series.html + +Last access + 20200226 + +Download and processing instructions + Download the following files: + 'had4_krig_v1_0_0.nc.gz' + 'had4_uah_v1_0_0.nc.gz' + 'had4_short_krig_v2_0_0.nc.gz' + 'had4_short_uah_v2_0_0.nc.gz' + 'ghcn_short_krig_v2_0_0.nc.gz' + 'ghcn_short_uah_v2_0_0.nc.gz' + 'had4sst4_krig_v2_0_0.nc.gz' + 'had4_krig_v2_0_0.nc.gz' +""" + +import logging +import os + +import iris +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _extract_variable(short_name, var, vkey, version, cfg, filepath, out_dir): + """Extract variable.""" + raw_var = var.get('raw', short_name) + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + + # Fix units + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name).copy() + cube.convert_units(cmor_info.units) + utils.convert_timeunits(cube, 1950) + + # Fix coordinates + cube = utils.fix_coords(cube) + if 'height2m' in cmor_info.dimensions: + utils.add_height2m(cube) + + # Fix metadata + attrs = cfg['attributes'].copy() + attrs['mip'] = var['mip'] + attrs['version'] = version + baseline = cfg['attributes']['baseline'][vkey] + attrs['baseline'] = baseline + attrs['comment'] = attrs['comment'].format(baseline=baseline) + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + raw_filepath = os.path.join(in_dir, cfg['filename']) + + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + for (vkey, version) in cfg['attributes']['version'].items(): + logger.info("CMORizing variable '%s' version '%s'", short_name, + version) + filepath = raw_filepath.format(version=version) + _extract_variable(short_name, var, vkey, version, cfg, filepath, + out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/cru.py b/esmvaltool/cmorizers/data/formatters/datasets/cru.py new file mode 100644 index 0000000000..28d1f9fb7e --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/cru.py @@ -0,0 +1,106 @@ +"""ESMValTool CMORizer for CRU data. + +Tier + Tier 2: other freely-available dataset. + +Source + TS4.02: https://crudata.uea.ac.uk/cru/data/hrg/cru_ts_4.02/cruts.1811131722.v4.02/ # noqa: E501 + TS4.06: https://crudata.uea.ac.uk/cru/data/hrg/cru_ts_4.06/cruts.2205201912.v4.06/ # noqa: E501 + TS4.07: https://crudata.uea.ac.uk/cru/data/hrg/cru_ts_4.07/cruts.2304141047.v4.07/ # noqa: E501 + +Last access + TS4.02: 20190516 + TS4.06: 20231012 + TS4.07: 20231012 + +Download and processing instructions + Download the following files: + ``{raw_name}/cru_ts4.{X}.1901.{end_year}.{raw_name}.dat.nc.gz`` + where ``{raw_name}`` is the name of the desired variable(s) or run + ``esmvaltool data download CRU`` for the latest version +""" + +import logging +import os + +import cftime +import iris +import numpy as np +from cf_units import Unit +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _center_timecoord(cube): + """Set time coordinates to exact center of each month. + + CRU timepoints are not in the center of the month and added bounds + by utils.fix_coords are incorrect. #1981 + """ + time = cube.coord("time") + times = time.units.num2date(time.points) + + # get bounds + starts = [cftime.DatetimeNoLeap(c.year, c.month, 1) for c in times] + ends = [ + cftime.DatetimeNoLeap(c.year, c.month + 1, 1) + if c.month < 12 else cftime.DatetimeNoLeap(c.year + 1, 1, 1) + for c in times + ] + time.bounds = time.units.date2num(np.stack([starts, ends], -1)) + time.points = [np.mean((t1, t2)) for t1, t2 in time.bounds] + + +def _extract_variable(short_name, var, cfg, filepath, out_dir): + """Extract variable.""" + raw_var = var.get("raw", short_name) + version = cfg["attributes"]["version"] + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + + # Fix units + if "raw_units" in var: + cube.units = var["raw_units"] + cmor_info = cfg["cmor_table"].get_variable(var["mip"], short_name) + cube.convert_units(cmor_info.units) + if version in ["TS4.02"]: + utils.convert_timeunits(cube, 1950) + else: + cube.coord("time").convert_units( + Unit("days since 1950-1-1 00:00:00", calendar="gregorian")) + + # Fix coordinates + cube = utils.fix_coords(cube) + if "height2m" in cmor_info.dimensions: + utils.add_height2m(cube) + if version not in ["TS4.02"]: + _center_timecoord(cube) + + # Fix metadata + attrs = cfg["attributes"] + attrs["mip"] = var["mip"] + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=["time"]) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + raw_filepath = os.path.join(in_dir, cfg["filename"]) + + # Run the cmorization + for short_name, var in cfg["variables"].items(): + logger.info("CMORizing variable '%s'", short_name) + raw_var = var.get("raw", short_name) + filepath = raw_filepath.format(raw_name=raw_var) + if filepath is None: + continue + _extract_variable(short_name, var, cfg, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/ct2019.py b/esmvaltool/cmorizers/data/formatters/datasets/ct2019.py new file mode 100644 index 0000000000..64f64f4e82 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/ct2019.py @@ -0,0 +1,191 @@ +"""ESMValTool CMORizer for CT2019 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://www.esrl.noaa.gov/gmd/ccgg/carbontracker/index.php + +Last access + 20200323 + +Download and processing instructions + Create a new empty directory ``$RAWOBSPATH/Tier2/CT2019`` (where + ``$RAWOBSPATH`` is given in the configuration) where the raw + data will be stored. The download of the data is automatically handled by + this script. If data is already present in this directory, the download is + skipped (to force a new download delete your old files). +""" + +import fnmatch +import glob +import logging +import os +import warnings +from ftplib import FTP +from pprint import pformat + +import dask.array as da +import iris + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _add_aux_coords(cube, input_files, coords_to_add): + """Add additional auxiliary coordinates to cube.""" + for (coord_name, coord_dims) in coords_to_add.items(): + logger.info("Adding auxiliary coordinate '%s' to '%s'", coord_name, + cube.var_name) + coord_cube = _load_cube(input_files, coord_name) + coord_cube = utils.fix_coords(coord_cube) + dim_coords = [c.name() for c in coord_cube.coords(dim_coords=True)] + if 'boundary' in dim_coords: + (points, bounds) = _interpolate_center(coord_cube) + attributes = { + 'comment': + 'Coordinate points where estimated as arithmetic ' + 'mean from given coordinate bounds', + } + else: + points = coord_cube.core_data() + bounds = None + attributes = {} + if coord_cube.long_name == 'air_pressure': + coord_cube.long_name = 'pressure' + coord_cube.standard_name = 'air_pressure' + coord_cube.var_name = 'plev' + aux_coord = iris.coords.AuxCoord( + points, + bounds=bounds, + var_name=coord_cube.var_name, + standard_name=coord_cube.standard_name, + long_name=coord_cube.long_name, + units=coord_cube.units, + attributes=attributes, + ) + cube.add_aux_coord(aux_coord, coord_dims) + + +def _download_files(in_dir, cfg): + """Download input files using FTP.""" + logger.info("Downloading data from FTP server %s", cfg['ftp_host']) + logger.info("Looking for files matching %s", + os.path.join(cfg['data_dir'], cfg['input_file_pattern'])) + input_files = [] + with FTP(cfg['ftp_host']) as ftp_client: + logger.info(ftp_client.getwelcome()) + ftp_client.login() + ftp_client.cwd(cfg['data_dir']) + files_to_download = fnmatch.filter(ftp_client.nlst(), + cfg['input_file_pattern']) + for filename in files_to_download: + logger.info("Downloading %s", filename) + new_path = os.path.join(in_dir, filename) + with open(new_path, mode='wb') as outfile: + ftp_client.retrbinary(f'RETR {filename}', outfile.write) + input_files.append(new_path) + return input_files + + +def _get_input_files(in_dir, cfg): + """Get input files.""" + pattern = os.path.join(in_dir, cfg['input_file_pattern']) + input_files = glob.glob(pattern) + if not input_files: + input_files = _download_files(in_dir, cfg) + logger.debug("Found input files:\n%s", pformat(input_files)) + return input_files + + +def _interpolate_center(cube, axis=1): + """Interpolate center value for grid cells when only boundary is given.""" + indices = [slice(None)] * cube.ndim + idx_all_but_first = indices.copy() + idx_all_but_first[axis] = slice(1, None, None) + idx_all_but_last = indices.copy() + idx_all_but_last[axis] = slice(None, -1, None) + data_all_but_first = cube.core_data()[tuple(idx_all_but_first)] + data_all_but_last = cube.core_data()[tuple(idx_all_but_last)] + points = (data_all_but_first + data_all_but_last) / 2.0 + bounds = da.stack((data_all_but_last, data_all_but_first), axis=-1) + return (points, bounds) + + +def _remove_attributes(cubes): + """Remove attributes from cubes that prevent concatenation.""" + for cube in cubes: + cube.attributes.pop('history', None) + cube.attributes.pop('nco_input_file_list', None) + cube.attributes.pop('nco_input_file_number', None) + cube.attributes.pop('nco_openmp_thread_number', None) + cube.attributes.pop('NCO', None) + cube.attributes.pop('version', None) + + +def _load_cube(input_files, constraints): + """Load single :class:`iris.cube.Cube`.""" + with warnings.catch_warnings(): + warnings.filterwarnings( + 'ignore', + message='Ignoring netCDF variable', + category=UserWarning, + module='iris', + ) + cubes = iris.load(input_files, constraints) + _remove_attributes(cubes) + try: + cube = cubes.concatenate_cube() + except iris.exceptions.ConcatenateError: + if cubes[0].coords('time'): + raise + cube = cubes[0] + return cube + + +def _extract_variable(short_name, var, cfg, input_files, out_dir): + """Extract variable.""" + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + + # Extract data + constraint = var.get('raw_long_name', cmor_info.standard_name) + cube = _load_cube(input_files, constraint) + cube.var_name = short_name + + # Add auxiliary variables + _add_aux_coords(cube, input_files, var.get('add_aux_coords', {})) + + # Variable specific operations + if short_name == 'co2s': + cube = cube[:, 0, :, :] + cube.remove_coord('level') + + # Fix units + cube.convert_units(cmor_info.units) + utils.convert_timeunits(cube, 1950) + + # Fix coordinates + cube = utils.fix_coords(cube) + + # Fix metadata + attrs = cfg['attributes'] + attrs['mip'] = var['mip'] + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + input_files = _get_input_files(in_dir, cfg) + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", short_name) + _extract_variable(short_name, var, cfg, input_files, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/duveiller2018.py b/esmvaltool/cmorizers/data/formatters/datasets/duveiller2018.py new file mode 100644 index 0000000000..a793f8cbb1 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/duveiller2018.py @@ -0,0 +1,147 @@ +"""ESMValTool CMORizer for Duveiller2018 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://ndownloader.figshare.com/files/9969496 + +Last access + 20190430 + +Download and processing instructions + - Download the dataset albedo_IGBPgen.nc and save in the right directory + according to ESMValTool practices. + - Complete the CMOR-config specifications (see instructions in the file + itself) + - Run cmorize_obs + +Modification history + 20190430-crezee_bas: written based on cmorize_obs_Landschuetzer2016.py. + +Caveats + Please be aware that the selected vegetation transition code is not written + to the filename, since this would break the ESMValTool file naming + conventions. +""" + +import datetime +import logging +import os +from warnings import catch_warnings, filterwarnings + +import cf_units +import iris +import numpy as np + +from esmvaltool.cmorizers.data.utilities import ( + fix_coords, + fix_var_metadata, + save_variable, + set_global_atts, +) + +logger = logging.getLogger(__name__) + + +def fix_time_coord_duveiller2018(cube): + """Fix the time coordinate for dataset Duveiller2018.""" + # Rename 'Month' to 'time' + cube.coord('Month').rename('time') + + # Create arrays for storing datetime objects + custom_time = np.zeros((12), dtype=object) + custom_time_bounds = np.empty((12, 2), dtype=object) + custom_time_units = 'days since 1950-01-01 00:00:00.0' + + # Now fill the object arrays defined above with datetime objects + # corresponding to correct time and time_bnds + for i in range(custom_time_bounds.shape[0]): + n_month = i + 1 # we start with month number 1, at position 0 + # Start with time_bnds + time_bnd_a = datetime.datetime(2010, n_month, 1) + if n_month == 12: + time_bnd_b = datetime.datetime(2011, 1, 1) + else: + time_bnd_b = datetime.datetime(2010, n_month + 1, 1) + # Get time 'point' from midpoint between bnd_a and bnd_b + time_midpoint = time_bnd_a + 0.5 * (time_bnd_b - time_bnd_a) + custom_time_bounds[n_month - 1, 0] = time_bnd_a + custom_time_bounds[n_month - 1, 1] = time_bnd_b + custom_time[n_month - 1] = time_midpoint + + # Convert them + time_bnds = cf_units.date2num(custom_time_bounds, custom_time_units, + cf_units.CALENDAR_GREGORIAN) + time_midpoints = cf_units.date2num(custom_time, custom_time_units, + cf_units.CALENDAR_GREGORIAN) + + # Add them to the cube + cube.coord('time').bounds = time_bnds + cube.coord('time').points = time_midpoints + + # Set the correct time unit, as defined above + cube.coord('time').units = cf_units.Unit(custom_time_units) + + +def extract_variable(var_info, raw_info, out_dir, attrs): + """Extract to all vars.""" + var = var_info.short_name + with catch_warnings(): + filterwarnings( + action='ignore', + message='Ignoring netCDF variable .* invalid units .*', + category=UserWarning, + module='iris', + ) + cubes = iris.load(raw_info['file']) + rawvar = raw_info['name'] + for cube in cubes: + if cube.var_name == rawvar: + # Extracting a certain vegetation transition code + itr = raw_info['iTr'] + itr_index = np.where( + cube.coord('Vegetation transition code').points == itr)[0][0] + cube = cube[itr_index, :, :, :] + # Add the vegetation transition code as an attribute + cube.attributes['Vegetation transition code'] = itr + # Remove it as a coordinate, since otherwise it would + # violate CMOR standards + cube.remove_coord('Vegetation transition code') + # Fix metadata + fix_var_metadata(cube, var_info) + # Fix coords + cube = fix_coords(cube) + # Now set the time coordinate properly + fix_time_coord_duveiller2018(cube) + # Global attributes + set_global_atts(cube, attrs) + save_variable(cube, var, out_dir, attrs, local_keys=['positive']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + logger.info("Starting cmorization for Tier%s OBS files: %s", + glob_attrs['tier'], glob_attrs['dataset_id']) + logger.info("Input data from: %s", in_dir) + logger.info("Output will be written to: %s", out_dir) + + # run the cmorization + for var, vals in cfg['variables'].items(): + inpfile = os.path.join(in_dir, vals['file']) + logger.info("CMORizing var %s from file %s", var, inpfile) + var_info = cmor_table.get_variable(vals['mip'], var) + raw_info = {'name': vals['raw'], 'file': inpfile, 'iTr': vals['iTr']} + glob_attrs['mip'] = vals['mip'] + with catch_warnings(): + filterwarnings( + action='ignore', + message=('WARNING: missing_value not used since it\n' + 'cannot be safely cast to variable data type'), + category=UserWarning, + module='iris', + ) + extract_variable(var_info, raw_info, out_dir, glob_attrs) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/e_obs.py b/esmvaltool/cmorizers/data/formatters/datasets/e_obs.py new file mode 100644 index 0000000000..02c0bba5aa --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/e_obs.py @@ -0,0 +1,154 @@ +"""ESMValTool CMORizer for E-OBS data. + +Tier + Tier 2: other freely-available dataset. + +Source + http://surfobs.climate.copernicus.eu/dataaccess/access_eobs.php#datafiles + +Last access + 20200225 + +Download and processing instructions + Download the ensemble mean files for: + TG TN TX RR PP +""" + +import logging +import os + +import iris +import numpy as np +from cf_units import Unit +from esmvalcore.preprocessor import monthly_statistics +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def fix_coords_non_symetric_lon(cube): + """Fix the time units and values to CMOR standards.""" + # first fix any completely missing coord var names + utils.fix_dim_coordnames(cube) + # fix individual coords + for cube_coord in cube.coords(): + # fix time + if cube_coord.var_name == 'time': + logger.info("Fixing time...") + cube.coord('time').convert_units( + Unit('days since 1950-1-1 00:00:00', calendar='gregorian')) + utils.fix_bounds(cube, cube.coord('time')) + + # fix longitude + if cube_coord.var_name == 'lon': + logger.info("Fixing longitude...") + if cube_coord.ndim == 1: + if cube_coord.points[0] < 0. and \ + cube_coord.points[-1] < 181.: + lon_coord = cube.coord('longitude').copy() + lons_below_0 = lon_coord.points[lon_coord.points < 0.] + \ + 360. + lons_above_0 = lon_coord.points[lon_coord.points >= 0.] + lons = np.hstack((lons_above_0, lons_below_0)) + cube_coord.points = lons + + utils.fix_bounds(cube, cube_coord) + cube.attributes['geospatial_lon_min'] = 0. + cube.attributes['geospatial_lon_max'] = 360. + utils.roll_cube_data(cube, len(lons_above_0), -1) + + # fix latitude + if cube_coord.var_name == 'lat': + logger.info("Fixing latitude...") + utils.fix_bounds(cube, cube.coord('latitude')) + + # fix depth + if cube_coord.var_name == 'lev': + logger.info("Fixing depth...") + utils.fix_bounds(cube, cube.coord('depth')) + + # fix air_pressure + if cube_coord.var_name == 'air_pressure': + logger.info("Fixing air pressure...") + utils.fix_bounds(cube, cube.coord('air_pressure')) + + # remove CS + cube.coord('latitude').coord_system = None + cube.coord('longitude').coord_system = None + + return cube + + +def _extract_variable(short_name, var, res, cfg, filepath, out_dir): + """Extract variable.""" + raw_var = var.get('raw', short_name) + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + + # Fix units + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + cube.units = var.get('raw_units', short_name) + cube.convert_units(cmor_info.units) + utils.convert_timeunits(cube, 1950) + + # Fix coordinates + fix_coords_non_symetric_lon(cube) + if 'height2m' in cmor_info.dimensions: + utils.add_height2m(cube) + + # Fix metadata + utils.fix_var_metadata(cube, cmor_info) + attrs = cfg['attributes'].copy() + attrs['version'] = 'v' + attrs['version'] + '-' + str(res) + attrs.pop('resolution') + attrs['mip'] = var['mip'] + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + ##### + # also derive monthly data + if 'add_mon' in var: + if var['add_mon']: + logger.info("Building monthly means") + + # Calc monthly + cube = monthly_statistics(cube) + cube.remove_coord('month_number') + cube.remove_coord('year') + + # Fix metadata + attrs['mip'] = 'Amon' + + # Fix coordinates + fix_coords_non_symetric_lon(cube) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + raw_filepath = os.path.join(in_dir, cfg['filename']) + + # Run the cmorization + ver = cfg['attributes']['version'] + for res in cfg['attributes']['resolution'].values(): + for (short_name, var) in cfg['variables'].items(): + logger.info("CMORizing variable '%s' on %s°x%s°", short_name, res, + res) + raw_var = var.get('raw', short_name) + filepath = raw_filepath.format(raw_name=raw_var, + resolution=res, + version=ver) + _extract_variable(short_name, var, res, cfg, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/eppley_vgpm_modis.py b/esmvaltool/cmorizers/data/formatters/datasets/eppley_vgpm_modis.py new file mode 100644 index 0000000000..6fae2d2d1e --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/eppley_vgpm_modis.py @@ -0,0 +1,141 @@ +"""ESMValTool CMORizer for Eppley-VGPM-MODIS data from Oregon State University. + +Tier + +Source + http://orca.science.oregonstate.edu/data/1x2/monthly/eppley.r2018.m.chl.m.sst/hdf + +Last access + 20190515 + +Download and processing instructions + Download and unpack all the *.tar files under a single directory + (no subdirectories with years) in ${RAWOBS}/Tier2/Eppley-VGPM-MODIS + +Modification history + 20190515-lovato_tomas: written. +""" + +import glob +import logging +import os + +import iris +import numpy as np +import pandas as pd +import xarray as xr + +from esmvaltool.cmorizers.data.utilities import ( + constant_metadata, + fix_coords, + fix_var_metadata, + save_variable, + set_global_atts, +) + +logger = logging.getLogger(__name__) + + +def _fix_data(cube, var): + """Specific data fixes for different variables.""" + logger.info("Fixing data ...") + with constant_metadata(cube): + if var == 'intpp': + cube /= 1000. * 12.01 * 86400. + return cube + + +def extract_variable(var_info, raw_info, out_dir, attrs): + """Extract to all vars.""" + var = var_info.short_name + cubes = iris.load(raw_info['file']) + rawvar = raw_info['name'] + + for cube in cubes: + if cube.var_name == rawvar: + fix_var_metadata(cube, var_info) + cube = fix_coords(cube) + _fix_data(cube, var) + set_global_atts(cube, attrs) + save_variable( + cube, + var, + out_dir, + attrs, + local_keys=['coordinates'], + unlimited_dimensions=['time'], + ) + + +def merge_data(in_dir, out_dir, raw_info): + """Merge all data into a single file.""" + data_array = [] + var = raw_info['name'] + filelist = sorted(glob.glob(in_dir + '/' + raw_info['file'] + '*.hdf')) + for filename in filelist: + dataset = xr.open_rasterio(filename).rename({ + 'y': 'lat', + 'x': 'lon' + }).squeeze().drop('band') + # create coordinates + dataset = dataset.assign_coords( + time=pd.to_datetime(filename[-11:-4], format='%Y%j')) + dataset = dataset.expand_dims(dim='time', axis=0) + spacing = 90. / dataset.lat.size + dataset = dataset.assign_coords( + lat=np.linspace(-90. + spacing, 90. - spacing, dataset.lat.size)) + dataset.lat.attrs = {'long_name': 'Latitude', 'units': 'degrees_north'} + dataset = dataset.assign_coords( + lon=np.linspace(-180. + spacing, 180. - spacing, dataset.lon.size)) + dataset.lon.attrs = {'long_name': 'Longitude', 'units': 'degrees_east'} + # get current file data + data_array.append(dataset) + damerge = xr.concat(data_array, dim='time') + + # need data flip to match coordinates + damerge.data = np.fliplr(damerge.data) + + # save to file + dataset = damerge.to_dataset(name=var) + thekeys = { + 'lat': { + '_FillValue': False + }, + 'lon': { + '_FillValue': False + }, + 'time': { + 'calendar': 'gregorian' + }, + var: { + '_FillValue': -9999.0 + } + } + filename = os.path.join(out_dir, raw_info['file'] + '_merged.nc') + dataset.to_netcdf(filename, encoding=thekeys, unlimited_dims='time') + + logger.info("Merged data written to: %s", filename) + + return filename + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + # run the cmorization + for var, vals in cfg['variables'].items(): + var_info = cmor_table.get_variable(vals['mip'], var) + glob_attrs['mip'] = vals['mip'] + raw_info = {'name': vals['raw'], 'file': vals['file']} + + # merge data + inpfile = merge_data(in_dir, out_dir, raw_info) + + logger.info("CMORizing var %s from file %s", var, inpfile) + raw_info['file'] = inpfile + extract_variable(var_info, raw_info, out_dir, glob_attrs) + + # Remove temporary input file + os.remove(inpfile) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/era_interim.py b/esmvaltool/cmorizers/data/formatters/datasets/era_interim.py new file mode 100644 index 0000000000..70c585c0b7 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/era_interim.py @@ -0,0 +1,513 @@ +"""ESMValTool CMORizer for ERA-Interim data. + +Tier + Tier 3: restricted datasets (i.e., dataset which requires a registration + to be retrieved or provided upon request to the respective contact or PI). + +Source + http://apps.ecmwf.int/datasets/data/interim-full-moda/ + +Last access + 20190905 + +Download and processing instructions + Select "ERA Interim Fields": + Daily: for daily values + Invariant: for time invariant variables (like land-sea mask) + Monthly Means of Daily Means: for monthly values + Monthly Means of Daily Forecast Accumulation: for accumulated variables + like precipitation or radiation fluxes + Select "Type of level" (Surface or Pressure levels) + Download the data on a single variable and single year basis, and save + them as ERA-Interim___YYYY.nc, where is the ERA-Interim + variable name and is either monthly or daily. Further download + "land-sea mask" from the "Invariant" data and save it in + ERA-Interim_lsm.nc. + It is also possible to download data in an automated way, see: + https://confluence.ecmwf.int/display/WEBAPI/Access+ECMWF+Public+Datasets + https://confluence.ecmwf.int/display/WEBAPI/Python+ERA-interim+examples + A registration is required for downloading the data. + It is also possible to use the script in: + esmvaltool/cmorizers/data/download_scripts/download_era-interim.py + This cmorization script currently supports daily and monthly data of +the following variables: + 10m u component of wind + 10m v component of wind + 2m dewpoint temperature + 2m temperature + evaporation + maximum 2m temperature since previous post processing + mean sea level pressure + minimum 2m temperature since previous post processing + skin temperature + snowfall + surface net solar radiation + surface solar radiation downwards + temperature of snow layer + toa incident solar radiation + total cloud cover + total precipitation +and daily, monthly (not invariant) data of: + Geopotential + +and monthly data of: + Inst. eastward turbulent surface stress + Inst. northward turbulent surface stress + Sea surface temperature + Surface net thermal radiation + Surface latent heat flux + Surface sensible heat flux + Relative humidity + Temperature + U component of wind + V component of wind + Vertical velocity + Specific humidity + net top solar radiation + net top solar radiation clear-sky + top net thermal radiation + top net thermal radiation clear-sky + fraction of cloud cover (3-dim) + vertical integral of condensed cloud water (ice and liquid) + vertical integral of cloud liquid water + vertical integral of cloud frozen water + total column water vapour + specific cloud liquid water content + specific cloud ice water content + +Caveats + Make sure to select the right steps for accumulated fluxes, see: + https://confluence.ecmwf.int/pages/viewpage.action?pageId=56658233 + https://confluence.ecmwf.int/display/CKB/ERA-Interim%3A+monthly+means + for a detailed explanation. + The data are updated regularly: recent years are added, but also the past + years are sometimes corrected. To have a consistent timeseries, it is + therefore recommended to download the full timeseries and not just add + new years to a previous version of the data. + +For further details on obtaining daily values from ERA-Interim, + see: + https://confluence.ecmwf.int/display/CKB/ERA-Interim + https://confluence.ecmwf.int/display/CKB/ERA-Interim+documentation#ERA-Interimdocumentation-Monthlymeans + https://confluence.ecmwf.int/display/CKB/ERA-Interim%3A+How+to+calculate+daily+total+precipitation +""" +import logging +import re +from collections import defaultdict +from concurrent.futures import ProcessPoolExecutor, as_completed +from copy import deepcopy +from datetime import datetime, timedelta +from os import cpu_count +from pathlib import Path +from warnings import catch_warnings, filterwarnings + +import iris +import numpy as np +from esmvalcore.cmor.table import CMOR_TABLES +from esmvalcore.preprocessor import daily_statistics, monthly_statistics +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _fix_units(cube, definition): + """Fix issues with the units.""" + if cube.var_name in {'evspsbl', 'pr', 'prsn'}: + # Change units from meters of water per day + # to kg of water per m2 per day + cube.units = 'm' # fix invalid units + cube.units = cube.units * 'kg m-3 day-1' + cube.data = cube.core_data() * 1000. + if cube.var_name in {'hfds', 'rss', 'rsds', 'rsdt', 'rlds'}: + # Add missing 'per day' + cube.units = cube.units * 'day-1' + # Radiation fluxes are positive in downward direction + cube.attributes['positive'] = 'down' + if cube.var_name in {'rlut', 'rlutcs'}: + # Add missing 'per day' + cube.units = cube.units * 'day-1' + # Radiation fluxes are positive in upward direction + cube.attributes['positive'] = 'up' + cube.data = cube.core_data() * -1. + if cube.var_name in {'rsut', 'rsutcs'}: + # Add missing 'per day' + cube.units = cube.units * 'day-1' + # Radiation fluxes are positive in upward direction + cube.attributes['positive'] = 'up' + if cube.var_name in {'tauu', 'tauv'}: + cube.attributes['positive'] = 'down' + if cube.var_name in {'sftlf', 'clt', 'cl', 'clt-low', 'clt-med', + 'clt-high'}: + # Change units from fraction to percentage + cube.units = definition.units + cube.data = cube.core_data() * 100. + if cube.var_name in {'zg', 'orog'}: + # Divide by acceleration of gravity [m s-2], + # required for geopotential height, see: + # https://apps.ecmwf.int/codes/grib/param-db?id=129 + cube.units = cube.units / 'm s-2' + cube.data = cube.core_data() / 9.80665 + if cube.var_name in {'cli', 'clw'}: + cube.units = 'kg kg-1' + + +def _fix_coordinates(cube, definition): + """Fix coordinates.""" + # Make latitude increasing + cube = cube[..., ::-1, :] + + # Make pressure_level decreasing + coord_long_name = [item.long_name for item in cube.coords()] + if 'pressure_level' in coord_long_name: + cube = cube[:, ::-1, ...] + + # Add scalar height coordinates + if 'height2m' in definition.dimensions: + utils.add_scalar_height_coord(cube, 2.) + if 'height10m' in definition.dimensions: + utils.add_scalar_height_coord(cube, 10.) + + for coord_def in definition.coordinates.values(): + axis = coord_def.axis + + # ERA-Interim cloud parameters are downloaded on pressure levels + # (CMOR standard = generic (hybrid) levels, alevel) + if axis == "" and coord_def.name == "alevel": + axis = "Z" + coord_def = CMOR_TABLES['CMIP6'].coords['plev19'] + + coord = cube.coord(axis=axis) + if axis == 'T': + coord.convert_units('days since 1850-1-1 00:00:00.0') + if axis == 'Z': + coord.convert_units(coord_def.units) + coord.standard_name = coord_def.standard_name + coord.var_name = coord_def.out_name + coord.long_name = coord_def.long_name + coord.points = coord.core_points().astype('float64') + if len(coord.points) > 1 and not coord.var_name == 'plev': + coord.guess_bounds() + if coord.var_name == 'plev': + coord.attributes['positive'] = 'down' + return cube + + +def _fix_monthly_time_coord(cube): + """Set the monthly time coordinates to the middle of the month.""" + coord = cube.coord(axis='T') + end = [] + for cell in coord.cells(): + month = cell.point.month + 1 + year = cell.point.year + if month == 13: + month = 1 + year = year + 1 + end.append(cell.point.replace(month=month, year=year)) + end = coord.units.date2num(end) + start = coord.points + coord.points = 0.5 * (start + end) + coord.bounds = np.column_stack([start, end]) + + +def _fix_monthly_time_coord_eiland(cube): + """Set the monthly time coordinates to the middle of the month.""" + coord = cube.coord(axis='T') + start = [] + end = [] + for cell in coord.cells(): + # set start to first day 00 UTC + start.append(cell.point.replace(day=1, hour=0)) + # now deal with the end + month = cell.point.month + 1 + year = cell.point.year + if month == 13: + month = 1 + year = year + 1 + end.append(cell.point.replace(month=month, year=year, day=1, hour=0)) + end = coord.units.date2num(end) + start = coord.units.date2num(start) + coord.points = 0.5 * (start + end) + coord.bounds = np.column_stack([start, end]) + + +def _compute_monthly(cube): + """Convert various frequencies to daily frequency. + + ERA-Interim-Land is in 6hr freq need to convert to monthly + """ + cube = monthly_statistics(cube, operator='mean') + # Remove monthly statistics aux coordinates + cube.remove_coord(cube.coord('month_number')) + cube.remove_coord(cube.coord('year')) + return cube + + +def _compute_daily(cube): + """Convert various frequencies to daily frequency. + + ERA-Interim is in 3hr or 6hr or 12hr freq need to convert to daily + Only variables with step 12 need accounting time 00 AM as time 24 PM + """ + # Account for time 00 AM as time 24 PM + if cube.var_name in { + 'tasmax', + 'tasmin', + 'pr', + 'rsds', + 'rlds', + 'hfds', + 'evspsbl', + 'rsdt', + 'rss', + 'prsn', + }: + cube.coord('time').points = cube.coord('time').units.date2num([ + cell.point - timedelta(seconds=1) + for cell in cube.coord('time').cells() + ]) + + if cube.var_name == 'tasmax': + cube = daily_statistics(cube, 'max') + elif cube.var_name == 'tasmin': + cube = daily_statistics(cube, 'min') + elif cube.var_name in { + 'pr', + 'rsds', + 'rlds', + 'hfds', + 'evspsbl', + 'rsdt', + 'rss', + 'prsn', + }: + cube = daily_statistics(cube, 'sum') + else: + cube = daily_statistics(cube, 'mean') + + # Correct the time coordinate + cube.coord('time').points = cube.coord('time').units.date2num([ + cell.point.replace(hour=12, minute=0, second=0, microsecond=0) + for cell in cube.coord('time').cells() + ]) + cube.coord('time').bounds = None + cube.coord('time').guess_bounds() + + return cube + + +def _load_cube(in_files, var): + """Load in_files into an iris cube.""" + ignore_warnings = ( + { + 'raw': 'cc', + 'units': '(0 - 1)', + }, + { + 'raw': 'tcc', + 'units': '(0 - 1)', + }, + { + 'raw': 'tciw', + 'units': 'kg m**-2', + }, + { + 'raw': 'tclw', + 'units': 'kg m**-2', + }, + { + 'raw': 'lsm', + 'units': '(0 - 1)', + }, + { + 'raw': 'e', + 'units': 'm of water equivalent', + }, + { + 'raw': 'sf', + 'units': 'm of water equivalent', + }, + { + 'raw': 'tp', + 'units': 'm of water equivalent', + }, + ) + + with catch_warnings(): + msg = "Ignoring netCDF variable '{raw}' invalid units '{units}'" + for warning in ignore_warnings: + filterwarnings(action='ignore', + message=re.escape(msg.format(**warning)), + category=UserWarning, + module='iris') + + if len(in_files) == 1: + cube = iris.load_cube( + in_files[0], + constraint=NameConstraint(var_name=var['raw']), + ) + elif var.get('operator', '') == 'sum': + # Multiple variables case using sum operation + cube = None + for raw_name, filename in zip(var['raw'], in_files): + in_cube = iris.load_cube( + filename, + constraint=NameConstraint(var_name=raw_name), + ) + if cube is None: + cube = in_cube + else: + cube += in_cube + elif var.get('operator', '') == 'diff': + # two variables case using diff operation + cube = None + elements_var = len(var['raw']) + elements_files = len(in_files) + if (elements_var != 2) or (elements_files != 2): + shortname = var.get('short_name') + errmsg = (f'operator diff selected for variable {shortname} ' + f'expects exactly two input variables and two input ' + f'files') + raise ValueError(errmsg) + cube = iris.load_cube( + in_files[0], + constraint=NameConstraint(var_name=var['raw'][0]), + ) + cube2 = iris.load_cube( + in_files[1], + constraint=NameConstraint(var_name=var['raw'][1]), + ) + cube -= cube2 + else: + raise ValueError( + "Multiple input files found, with operator '{}' configured: {}" + .format(var.get('operator'), ', '.join(in_files))) + + return cube + + +def _extract_variable(in_files, var, cfg, out_dir): + logger.info("CMORizing variable '%s' from input files '%s'", + var['short_name'], ', '.join(in_files)) + attributes = deepcopy(cfg['attributes']) + attributes['mip'] = var['mip'] + cmor_table = CMOR_TABLES[attributes['project_id']] + definition = cmor_table.get_variable(var['mip'], var['short_name']) + + cube = _load_cube(in_files, var) + + utils.set_global_atts(cube, attributes) + + # Set correct names + cube.var_name = definition.short_name + if definition.standard_name: + cube.standard_name = definition.standard_name + cube.long_name = definition.long_name + + _fix_units(cube, definition) + + # Fix data type + cube.data = cube.core_data().astype('float32') + + cube = _fix_coordinates(cube, definition) + + if attributes['dataset_id'] == 'ERA-Interim': + if 'mon' in var['mip']: + _fix_monthly_time_coord(cube) + if 'day' in var['mip']: + cube = _compute_daily(cube) + if 'fx' in var['mip']: + cube = iris.util.squeeze(cube) + cube.remove_coord('time') + + # Specific to ERA Interim Land + elif attributes['dataset_id'] == 'ERA-Interim-Land': + if 'mon' in var['mip']: + cube = _compute_monthly(cube) + _fix_monthly_time_coord_eiland(cube) + if 'day' in var['mip']: + cube = _compute_daily(cube) + else: + raise ValueError("Unknown dataset_id for this script:\ + {attributes['dataset_id']}") + + # Convert units if required + cube.convert_units(definition.units) + + logger.debug("Saving cube\n%s", cube) + logger.debug("Expected output size is %.1fGB", + np.prod(cube.shape) * 4 / 2**30) + utils.save_variable( + cube, + cube.var_name, + out_dir, + attributes, + local_keys=['positive'], + ) + logger.info("Finished CMORizing %s", ', '.join(in_files)) + + +def _get_in_files_by_year(in_dir, var): + """Find input files by year.""" + if 'file' in var: + var['files'] = [var.pop('file')] + + in_files = defaultdict(list) + for pattern in var['files']: + for filename in Path(in_dir).glob(pattern): + year = str(filename.stem).rsplit('_', maxsplit=1)[-1] + in_files[year].append(str(filename)) + + # Check if files are complete + for year in in_files.copy(): + if len(in_files[year]) != len(var['files']): + logger.warning( + "Skipping CMORizing %s for year '%s', %s input files needed, " + "but found only %s", var['short_name'], year, + len(var['files']), ', '.join(in_files[year])) + in_files.pop(year) + + return in_files.values() + + +def _run(jobs, n_workers): + """Run CMORization jobs using n_workers.""" + if n_workers == 1: + for job in jobs: + _extract_variable(*job) + else: + with ProcessPoolExecutor(max_workers=n_workers) as executor: + futures = {} + for job in jobs: + future = executor.submit(_extract_variable, *job) + futures[future] = job[0] + + for future in as_completed(futures): + try: + future.result() + except: # noqa + logger.error("Failed to CMORize %s", + ', '.join(futures[future])) + raise + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Run CMORizer for ERA-Interim.""" + cfg['attributes']['comment'] = cfg['attributes']['comment'].strip().format( + year=datetime.now().year) + cfg.pop('cmor_table') + + n_workers = cfg_user.get('max_parallel_tasks') + if n_workers is None: + n_workers = int(cpu_count() / 1.5) + logger.info("Using at most %s workers", n_workers) + + jobs = [] + for short_name, var in cfg['variables'].items(): + if 'short_name' not in var: + var['short_name'] = short_name + for in_files in _get_in_files_by_year(in_dir, var): + jobs.append([in_files, var, cfg, out_dir]) + + _run(jobs, n_workers) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/era_interim_land.py b/esmvaltool/cmorizers/data/formatters/datasets/era_interim_land.py new file mode 100644 index 0000000000..1e9da8496c --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/era_interim_land.py @@ -0,0 +1,21 @@ +"""ESMValTool CMORizer for ERA-Interim-Land data. + +Tier + Tier 3: restricted datasets (i.e., dataset which requires a registration + to be retrieved or provided upon request to the respective contact or PI). + +Source + https://apps.ecmwf.int/datasets/data/interim-land/type=fc/ + +Last access + 20191104 + +Download and processing instructions + See script cmorize_obs_era_interim.py +""" + +from esmvaltool.cmorizers.data.formatters.datasets.era_interim import ( + cmorization, +) + +__all__ = ['cmorization'] diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_aerosol.ncl b/esmvaltool/cmorizers/data/formatters/datasets/esacci_aerosol.ncl new file mode 100644 index 0000000000..e169d0eb4e --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_aerosol.ncl @@ -0,0 +1,169 @@ +; ############################################################################# +; ESMValTool CMORizer for ESACCI-AEROSOL data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/aerosol/data/ +; +; Last access +; 20190124 +; +; Download and processing instructions +; Download the data from: +; ATSR2_SU/L3/v4.21/MONTHLY/ (1997-2002) +; AATSR_SU/L3/v4.21/MONTHLY/ (2003-2011) +; Other years are not considered since they are not complete. +; Put all files in input_dir_path (no subdirectories with years). +; +; Modification history +; 20190124-righi_mattia: adapted to v2. +; 20160718-lauer_axel: added AOD550 + AOD870 uncertainties. +; 20160525-righi_mattia: updated to v4.21 and adding more variables. +; 20150126-righi_mattia: adding AOD at other wavelengths. +; 20151124-righi_mattia: switched to monthly raw data (now available). +; 20150424-righi_mattia: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "esacci_aerosol.ncl" + + ; Source name + OBSNAME = "ESACCI-AEROSOL" + + ; Tier + TIER = 2 + + ; Period + print(start_year) + YEAR1 = get_year(start_year, 1997) + print(YEAR1) + YEAR2 = get_year(end_year, 2011) + + ; Selected variable (standard name) + VAR = (/"od550aer", "od870aer", "od550lt1aer", "abs550aer", \ + "od550aerStderr", "od870aerStderr"/) + + ; Name in the raw data + NAME = (/"AOD550_mean", "AOD870_mean", "FM_AOD550_mean", "AAOD550_mean", \ + "AOD550_uncertainty", "AOD870_uncertainty"/) + + ; MIP + MIP = (/"aero", "aero", "aero", "aero", \ + "aero", "aero"/) + + ; Frequency + FREQ = (/"mon", "mon", "mon", "mon", \ + "mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + \ + (/"/cmip5/Tables/CMIP5_aero", \ + "/cmip5/Tables/CMIP5_aero", \ + "/cmip5/Tables/CMIP5_aero", \ + "/cmip5/Tables/CMIP5_aero", \ + "/custom/CMOR_od550aerStderr.dat", \ + "/custom/CMOR_od870aerStderr.dat"/) + + ; Type + TYPE = "sat" + + ; Version + VERSION = "SU-v4.21" + + ; Global attributes + SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/aerosol/data/" + REF = "Popp et al., Remote Sens., doi:10.3390/rs8050421, 2016." + COMMENT = "Combined dataset ERS2-ATSR2 (1997-2002) and ENVISAT-AATSR " + \ + "(2003-2011), based on the University of Swansea algorithm " + \ + "(monthly mean L3 data)" + +end + +begin + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + time = create_timec(YEAR1, YEAR2) + date = cd_calendar(time, 1) + + ; Create timeseries + do yy = YEAR1, YEAR2 + do mm = 1, 12 + + ldate = yy + sprinti("%0.2i", mm) + + ; Read file + fname = systemfunc("ls " + input_dir_path + ldate + "*.nc") + + ; No files found + if (all(ismissing(fname))) then + continue + end if + + ; Extract data + f = addfile(fname, "r") + xx = f->$NAME(vv)$ + + ; Assign to global array + if (.not.isdefined("output")) then + dims = array_append_record(dimsizes(time), dimsizes(xx), 0) + output = new(dims, float) + output!0 = "time" + output&time = time + output!1 = "lat" + output&lat = f->latitude + output!2 = "lon" + output&lon = f->longitude + end if + output(ind(toint(ldate).eq.date), :, :) = (/xx/) + delete(fname) + + end do + end do + + ; Set fill value + output = where(output.eq.-999, output@_FillValue, output) + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_cloud.ncl b/esmvaltool/cmorizers/data/formatters/datasets/esacci_cloud.ncl new file mode 100644 index 0000000000..b19ae0b865 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_cloud.ncl @@ -0,0 +1,230 @@ +; ############################################################################# +; ESMValTool CMORizer for ESACCI-CLOUD data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; https://public.satproj.klima.dwd.de/data/ESA_Cloud_CCI/CLD_PRODUCTS/v3.0/ +; +; Last access +; 20210428 +; +; Download and processing instructions +; Download the data from: +; L3C/AVHRR-AM/ and L3C/AVHRR-PM, e.g.: +; wget -r -nH -e robots=off --cut-dirs=9 --no-parent +; --reject="index.html*" +; https://public.satproj.klima.dwd.de/data/ESA_Cloud_CCI/ +; CLD_PRODUCTS/v3.0/L3C/AVHRR-AM/ +; wget -r -nH -e robots=off --cut-dirs=9 --no-parent +; --reject="index.html*" +; https://public.satproj.klima.dwd.de/data/ESA_Cloud_CCI/ +; CLD_PRODUCTS/v3.0/L3C/AVHRR-PM/ +; +; All files are expected in a single directory (no subdirectories +; with years). +; +; Modification history +; 20230818-lauer_axel: added output of clwvi (in addition to iwp, lwp) +; 20210428-lauer_axel: AVHRR-AM and AVHRR-PM data are now averaged during +; the overlapping time; TOA radiative fluxes are now +; also processed +; 20190201-righi_mattia: adapted to v2. +; 20181116-lauer_axel: changed to use CLOUD-CCI v3.0 data (AVHRR-PM), gaps +; (1994/09 - 1995/01) are filled with AVHRR-AM data +; 20180522-righi_mattia: changed to use AVHRR-PM data. +; 20160519-laue_axel: written (reformat_obs_ESACCI-AEROSOL.ncl). +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "esacci_cloud.ncl" + + ; Source name + OBSNAME = "ESACCI-CLOUD" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = get_year(start_year, 1982) + YEAR2 = get_year(end_year, 2016) + + ; Selected variable (standard name) + VAR = (/"clt", "cltStderr", "clivi", "lwp", "clwvi", "rlut", "rlutcs", \ + "rsut", "rsutcs", "rsdt", "rlus", "rsus", "rsuscs"/) + + ; Name in the raw data + NAME = (/"cfc", "cfc_unc", "iwp_allsky", "lwp_allsky", "iwp_allsky", \ + "toa_lwup", "toa_lwup_clr", "toa_swup", "toa_swup_clr", \ + "toa_swdn", "boa_lwup", "boa_swup", "boa_swup_clr"/) + + ; Conversion factor + CONV = (/100., 1., 0.001, 0.001, 0.001, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, \ + 1.0, 1.0/) + + ; MIP + MIP = (/"Amon", "Amon", "Amon", "Amon", "Amon", "Amon", "Amon", "Amon", \ + "Amon", "Amon", "Amon", "Amon", "Amon"/) + + ; Frequency + FREQ = (/"mon", "mon", "mon", "mon", "mon", "mon", "mon", "mon", "mon", \ + "mon", "mon", "mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + \ + (/"/cmip5/Tables/CMIP5_Amon", \ + "/custom/CMOR_cltStderr.dat", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/custom/CMOR_lwp.dat", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon"/) + + ; Type + TYPE = "sat" + + ; Version + VERSION = "AVHRR-AMPM-fv3.0" + + ; Global attributes + SOURCE = "https://public.satproj.klima.dwd.de/data/ESA_Cloud_CCI/" + \ + "CLD_PRODUCTS/v3.0/" + REF = \ + "Stengel et al., Earth Syst. Sci. Data, doi:10.5194/essd-9-881-2017, 2017" + COMMENT = "" + +end + +begin + + firstime = True + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + time = create_timec(YEAR1, YEAR2) + date = cd_calendar(time, 1) + + ; Create timeseries + do yy = YEAR1, YEAR2 + + syear = sprinti("%i", yy) + do mm = 1, 12 + + smonth = sprinti("%0.2i", mm) + + ; Read file + fname = systemfunc("ls " + input_dir_path + syear + smonth + \ + "-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_*-" + \ + str_sub_str(VERSION, "AVHRR-AMPM-", "") + ".nc") + + ; No files found + if (all(ismissing(fname))) then + continue + end if + + ifiles = dimsizes(fname) + log_info(syear + smonth + ": " + tostring(ifiles) + " input file(s)") + + do i = 0, ifiles - 1 + ; Extract data + f = addfile(fname(i), "r") + xx = f->$NAME(vv)$ + + if (i .eq. 0) then + xdims = array_append_record((/ifiles/), dimsizes(xx), 0) + xx_all = new(xdims, float) + delete(xdims) + end if + + ; Convert units + xx_all(i, :, :, :) = xx * CONV(vv) + + ; *** calculate clwvi (lwp + iwp) *** + if (VAR(vv) .eq. "clwvi") then + xx2 = f->lwp_allsky * 0.001 + xx_all(i, :, :, :) = xx_all(i, :, :, :) + xx2 + delete(xx2) + end if + + if (firstime) then + lat = f->lat + lon = f->lon + firstime = False + end if + delete(f) + end do ; loop over all files (am/pm) per date + + delete(fname) + delete(xx) + + xx = dim_avg_n(xx_all, 0) ; ignore missing values + delete(xx_all) + + ; Assign to global array + if (.not.isdefined("output")) then + dims = dimsizes(xx) + dims(0) = dimsizes(time) + output = new(dims, float) + output!0 = "time" + output&time = time + output!1 = "lat" + output&lat = lat + output!2 = "lon" + output&lon = lon + end if + output(ind(toint(yy * 100 + mm).eq.date), :, :) = (/xx/) + + end do + end do + + ; Set fill value + output = where(output.eq.-999, output@_FillValue, output) + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_fire.ncl b/esmvaltool/cmorizers/data/formatters/datasets/esacci_fire.ncl new file mode 100644 index 0000000000..0a8d36944e --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_fire.ncl @@ -0,0 +1,165 @@ +; ############################################################################# +; ESMValTool CMORizer for ESACCI-FIRE data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/fire/data/ +; +; Last access +; 20190124 +; +; Download and processing instructions +; Download the data from: +; burned_area/MERIS/grid/v4.1/ +; Put all files in input_dir_path (no subdirectories with years). +; +; Modification history +; 20190124-righi_mattia: written based on a python script by Ben Mueller. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "esacci_fire.ncl" + + ; Source name + OBSNAME = "ESACCI-FIRE" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = get_year(start_year, 2005) + YEAR2 = get_year(end_year, 2011) + + ; Selected variable (standard name) + VAR = "burntArea" + + ; Name in the raw data + NAME = "burned_area" + + ; MIP + MIP = "Lmon" + + ; Frequency + FREQ = "mon" + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + "/cmip5/Tables/CMIP5_Lmon" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "L4-BA-MERIS-fv4.1" + + ; Global attributes + SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/fire/data/" + REF = "Chuvieco et al.ESA Fire Climate Change Initiative (Fire_cci): " + \ + "Burned Area Grid Product Version 4.1. " + \ + "Centre for Environmental Data Analysis, " + \ + "doi:10.5285/D80636D4-7DAF-407E-912D-F5BB61C142FA, 2016." + COMMENT = "" + +end + +begin + + time = create_timec(YEAR1, YEAR2) + date = cd_calendar(time, 1) + + do yy = YEAR1, YEAR2 + do mm = 1, 12 + + ldate = yy + sprinti("%0.2i", mm) + + files = systemfunc("ls " + input_dir_path + ldate + \ + "??-ESACCI-L4_FIRE-BA-MERIS-fv4.1.nc") + f = addfiles(files, "r") + + xx = f[:]->$NAME$ + + ; Calculate area + if (.not.isdefined("area")) then + deg2rad = acos(-1.0) / 180. + lat = f[0]->lat + lon = f[0]->lon + nlat = dimsizes(lat) + deltax = abs(lon(1) - lon(0)) + lati = new(dimsizes(lat) + 1, float) + lati(0) = max((/(3 * lat(0) - lat(1)) / 2., -90./)) + do ii = 1, dimsizes(lati) - 2 + lati(ii) = 0.5 * (lat(ii - 1) + lat(ii)) + end do + lati(dimsizes(lati) - 1) = \ + min((/(3 * lat(nlat - 1) - lat(nlat - 2)) / 2., 90./)) + area = new((/dimsizes(lat), dimsizes(lon)/), float) + do ii = 0, dimsizes(lat) - 1 + deltay = sin(lati(ii + 1) * deg2rad) - sin(lati(ii) * deg2rad) + area(ii, :) = abs(6371000. ^ 2 * deltay * deltax * deg2rad) + end do + delete([/lat, lon, nlat, deltax, lati, deltay/]) + end if + + ; Calculate fraction + xx = xx / (/conform(xx, area, (/1, 2/))/) + + ; Assign to global array + if (.not.isdefined("output")) then + dims = array_append_record(dimsizes(time), dimsizes(xx(0, :, :)), 0) + output = new(dims, float) + output!0 = "time" + output&time = time + output!1 = "lat" + output&lat = f[0]->lat + output!2 = "lon" + output&lon = f[0]->lon + end if + output(ind(toint(ldate).eq.date), :, :) = dim_sum_n(xx, 0) + + delete(files) + delete(f) + + end do + end do + + ; Convert units [1] --> [%] + output = output * 100. + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ) + + ; Set variable attributes + tmp = format_variable(output, VAR, CMOR_TABLE) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP, VAR, DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR, output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_landcover.py b/esmvaltool/cmorizers/data/formatters/datasets/esacci_landcover.py new file mode 100644 index 0000000000..d0e4d9d722 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_landcover.py @@ -0,0 +1,190 @@ +"""ESMValTool CMORizer for ESACCI-LANDCOVER pft data. + +Tier + Tier 2: other freely-available dataset. + +Source + ftp://anon-ftp.ceda.ac.uk/neodc/esacci/land_cover/data/pft/ + +Last access + 20240626 + +Download and processing instructions + Download the data from: + pft/v2.0.8/ + Put all files under a single directory (no subdirectories with years). + in ${RAWOBS}/Tier2/ESACCI-LANDCOVER + +""" + +import os +import glob +import logging +from datetime import datetime +import iris +import numpy as np + +from esmvaltool.cmorizers.data.utilities import ( + fix_coords, + fix_var_metadata, + set_global_atts, + add_typebare, + save_variable, +) + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Enable the new split-attributes handling mode +iris.FUTURE.save_split_attrs = True + + +def average_block(data, block_size): + """Average the data within each block of size block_size. + + Parameters + ---------- + data : numpy.ndarray + The input data array to be block averaged. + block_size : int + The size of the block used for averaging. The data is averaged + within non-overlapping blocks of this size along the spatial dimensions + (latitude and longitude). + + Returns + ------- + numpy.ndarray + The block-averaged data array. + """ + shape = data.shape + reshaped_data = data.reshape(shape[0], shape[1] // block_size, + block_size, shape[2] // block_size, + block_size) + averaged_data = reshaped_data.mean(axis=(2, 4)) + return averaged_data + + +def regrid_iris(cube): + """Regrid the cubes using block averaging. + + Parameters + ---------- + cube : iris.cube.Cube + The input data cube to be regridded. + + Returns + ------- + iris.cube.Cube + The regridded data cube. + + Notes + ----- + The block size is set to 100, which means the data will be averaged within + non-overlapping blocks of 100x100 grid cells along the spatial dimensions. + """ + logger.info("Regridding using block averaging") + + block_size = 100 # Number of grid cells to average in each block + + combined_data = average_block(cube.data, block_size) + + # Define target latitude and longitude ranges + target_lats = np.linspace(90 - 0.5 * (180 / combined_data.shape[1]), + -90 + 0.5 * (180 / combined_data.shape[1]), + combined_data.shape[1]) + target_lons = np.linspace(-180 + 0.5 * (360 / combined_data.shape[2]), + 180 - 0.5 * (360 / combined_data.shape[2]), + combined_data.shape[2]) + + combined_cube = iris.cube.Cube(combined_data, + dim_coords_and_dims=[ + (cube.coord('time'), 0), + (iris.coords.DimCoord( + target_lats, + standard_name='latitude', + units='degrees'), 1), + (iris.coords.DimCoord( + target_lons, + standard_name='longitude', + units='degrees'), 2)]) + + combined_cube.coord('latitude').guess_bounds() + combined_cube.coord('longitude').guess_bounds() + + return combined_cube + + +def regrid_fix(cube, glob_attrs, var_name, var_info): + """Regrid cube and fixes. + + Regrids the cube, fixes metadata, coordinates and glob_attrs. + + Parameters + ---------- + cube: iris.cube.Cube + Data cube to be regridded. + + vals: dict + Variable long_name. + + glob_attrs: dict + Dictionary holding cube metadata attributes. + + var_name: str + Variable name. + + var_info: dict + Dictionary holding cube metadata attributes. + + Returns + ------- + cube: iris.cube.Cube + data cube regridded and with fixed coordinates. + """ + logger.info("Regridding cube for %s", var_name) + regridded_cube = regrid_iris(cube) + fix_var_metadata(regridded_cube, var_info) + regridded_cube = fix_coords(regridded_cube) + set_global_atts(regridded_cube, glob_attrs) + + return regridded_cube + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorize data.""" + glob_attrs = cfg['attributes'] + if not start_date: + start_date = datetime(1992, 1, 1) + if not end_date: + end_date = datetime(2020, 12, 31) + + for year in range(start_date.year, end_date.year + 1): + inpfile_pattern = os.path.join(in_dir, cfg['filename']) + year_inpfile_pattern = inpfile_pattern.format(year=year) + inpfiles = sorted(glob.glob(year_inpfile_pattern)) + for inpfile in inpfiles: + cubes = iris.load(inpfile) + for var_name, vals in cfg['variables'].items(): + var_info = cfg['cmor_table'].get_variable(vals['mip'], + var_name) + glob_attrs['mip'] = vals['mip'] + glob_attrs['frequency'] = vals['frequency'] + if var_name == 'shrubFrac': + cube = cubes.extract_cube('SHRUBS-BD') + \ + cubes.extract_cube('SHRUBS-BE') + \ + cubes.extract_cube('SHRUBS-ND') + \ + cubes.extract_cube('SHRUBS-NE') + elif var_name == 'treeFrac': + cube = cubes.extract_cube('TREES-BD') + \ + cubes.extract_cube('TREES-BE') + \ + cubes.extract_cube('TREES-ND') + \ + cubes.extract_cube('TREES-NE') + else: + cube = cubes.extract_cube(vals['long_name']) + regridded_cube = regrid_fix(cube, glob_attrs, + var_name, var_info) + if var_name == 'baresoilFrac': + add_typebare(regridded_cube) + save_variable(regridded_cube, var_name, out_dir, glob_attrs, + unlimited_dimensions=['time']) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_lst.py b/esmvaltool/cmorizers/data/formatters/datasets/esacci_lst.py new file mode 100644 index 0000000000..70d8f1dc1e --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_lst.py @@ -0,0 +1,160 @@ +"""ESMValTool CMORizer for ESACCI-LST data. + +Tier + Tier 2: other freely-available dataset. + +Source + On CEDA-JASMIN + /gws/nopw/j04/esacci_lst/public + For access to this JASMIN group workspace please register at + https://accounts.jasmin.ac.uk/services/group_workspaces/esacci_lst/ + +Download and processing instructions + Put all files under a single directory (no subdirectories with years) + in ${RAWOBS}/Tier2/ESACCI-LST + BOTH DAY and NIGHT files are needed for each month + +Currently set to work with only the MODIS AQUA L3 monthly data + +Modification history + 20201015 Started by Robert King + 20201029 Day/Night averaging added along with CMOR utils +""" + +import datetime +import logging +from calendar import monthrange + +import iris + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + # run the cmorization + + # vals has the info from the yml file + # var is set up in the yml file + for var, vals in cfg['variables'].items(): + # leave this loop in as might be useful in + # the future for getting other info + # like uncertainty information from the original files + + glob_attrs['mip'] = vals['mip'] + cmor_info = cmor_table.get_variable(vals['mip'], var) + var_name = cmor_info.short_name + + for key in vals.keys(): + logger.info("%s %s", key, vals[key]) + + variable = vals['raw'] + # not currently used, but referenced for future + # platform = 'MODISA' + + # loop over years and months + # get years from start_year and end_year + # note 2003 doesn't start until July so not included at this stage + for year in range(glob_attrs['start_year'], + glob_attrs['end_year'] + 1): + this_years_cubes = iris.cube.CubeList() + for month0 in range(12): # Change this in final version + month = month0 + 1 + logger.info(month) + day_cube, night_cube = load_cubes(in_dir, vals['file_day'], + vals['file_night'], year, + month, variable) + + monthly_cube = make_monthly_average(day_cube, night_cube, year, + month) + + # use CMORizer utils + monthly_cube = utils.fix_coords(monthly_cube) + + this_years_cubes.append(monthly_cube) + + # Use utils save + # This seems to save files all with the same name! + # Fixed by making yearly files + this_years_cubes = this_years_cubes.merge_cube() + this_years_cubes.long_name = 'Surface Temperature' + this_years_cubes.standard_name = 'surface_temperature' + + # Fix variable metadata + utils.fix_var_metadata(this_years_cubes, cmor_info) + + # Fix global metadata + utils.set_global_atts(this_years_cubes, glob_attrs) + + utils.save_variable( + this_years_cubes, + var_name, + out_dir, + glob_attrs, + unlimited_dimensions=['time'], + ) + + +def load_cubes(in_dir, file_day, file_night, year, month, variable): + """Variable description. + + variable = land surface temperature + platform = AQUA not used for now + but in place for future expansion to all ESC CCI LST platforms + """ + logger.info('Loading %s/%s%s%s*.nc', in_dir, file_day, year, month) + day_cube = iris.load_cube( + '%s/%s%s%02d*.nc' % (in_dir, file_day, year, month), variable) + logger.info('Loading %s/%s%s%s*.nc', in_dir, file_night, year, month) + night_cube = iris.load_cube( + '%s/%s%s%02d*.nc' % (in_dir, file_night, year, month), variable) + + return day_cube, night_cube + + +def make_monthly_average(day_cube, night_cube, year, month): + """Make the average LST form the day time and night time files.""" + day_cube.attributes.clear() + night_cube.attributes.clear() + + co_time = night_cube.coord('time') + co_time.points = co_time.points + 100.0 + # maybe the arbitrary difference should go on day cubes to + # take the timestamp to 12Z? + # not really an issue when using monthly files + + result = iris.cube.CubeList([day_cube, night_cube]).concatenate_cube() + + # This corrects the lonitude coord name issue + # This should be fixed in the next version of the CCI data + logger.info("Longitude coordinate correction being applied") + result.coords()[2].var_name = 'longitude' + result.coords()[2].standard_name = 'longitude' + result.coords()[2].long_name = 'longitude' + + monthly_cube = result.collapsed('time', iris.analysis.MEAN) + + # fix time coordinate bounds + monthly_co_time = monthly_cube.coord('time') + + time_point = (datetime.datetime(year, month, 1, 0, 0) - + datetime.datetime(1981, 1, 1, 0, 0, 0)).total_seconds() + monthly_co_time.points = time_point + + num_days = monthrange(year, month)[1] + monthly_co_time.bounds = [ + time_point, time_point + ((num_days - 1) * 24 * 3600) + ] + # should this be num_days or num_days-1 ### question for Valeriu or Axel + # or 23:59:59 ??? + + monthly_cube.attributes = { + 'information': 'Mean of Day and Night Aqua MODIS monthly LST' + } + + return monthly_cube diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_oc.py b/esmvaltool/cmorizers/data/formatters/datasets/esacci_oc.py new file mode 100644 index 0000000000..c267222c5c --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_oc.py @@ -0,0 +1,220 @@ +"""ESMValTool CMORizer for ESACCI-OC data. + +Tier + +Source + ftp://oceancolour.org/occci-v5.0/geographic/netcdf/monthly/chlor_a/ + user: oc-cci-data + pass: ELaiWai8ae + +Last access + 20190227 + +Download and processing instructions + In case of issues with data download, check also the information provided at + OceanColour webpage https://esa-oceancolour-cci.org/ + Put all files under a single directory (no subdirectories with years) + in ${RAWOBS}/Tier2/ESACCI-OC + +Modification history + 20190227-lovato_tomas: written. +""" + +import glob +import logging +import os +from datetime import datetime + +import iris +import numpy as np +import xarray as xr + +from esmvaltool.cmorizers.data.utilities import ( + constant_metadata, + fix_coords, + fix_var_metadata, + save_variable, + set_global_atts, +) + +logger = logging.getLogger(__name__) + + +def _fix_data(cube, var): + """Specific data fixes for different variables.""" + logger.info("Fixing data ...") + with constant_metadata(cube): + if var == 'chl': + cube *= 1.e-06 + return cube + + +def _add_depth_coord(cube): + """Add depth auxiliary coordinate for CMIP5 standard.""" + def adjust_dim(dim): + if dim == 0: + return dim + return dim + 1 + + if not cube.coords('depth'): + assert len(cube.shape) == 3 + depth_coord = iris.coords.DimCoord([0.], + standard_name='depth', + long_name='depth', + var_name='lev', + units='m', + bounds=[0., 2.5], + attributes={'positive': 'down'}) + dim_coords = cube.coords(dim_coords=True) + aux_coords = cube.coords(dim_coords=False) + dim_coords_and_dims = [(coord, adjust_dim(cube.coord_dims(coord)[0])) + for coord in dim_coords] + dim_coords_and_dims.append((depth_coord, 1)) + aux_coords_and_dims = [(coord, (adjust_dim(d) + for d in cube.coord_dims(coord))) + for coord in aux_coords] + old_cube = cube + new_data = cube.core_data()[:, np.newaxis, :, :] + cube = iris.cube.Cube( + new_data, + old_cube.standard_name, + old_cube.long_name, + old_cube.var_name, + old_cube.units, + old_cube.attributes, + old_cube.cell_methods, + dim_coords_and_dims, + aux_coords_and_dims, + ) + return cube + + +def _fix_time(cube, frequency): + if frequency == "mon": + time = cube.coord("time") + units = time.units + new_dates = units.date2num( + np.array([[ + datetime(d.year, d.month, 1), + datetime(d.year, d.month, 15), + datetime(d.year + (d.month // 12), (d.month % 12) + 1, 1) + ] for d in units.num2date(time.points)])) + np.savetxt("time.txt", new_dates) + time.points = new_dates[:, 1] + time.bounds = new_dates[:, (0, 2)] + + +def extract_variable(var_info, raw_info, out_dir, attrs): + """Extract to all vars.""" + var = var_info.short_name + cubes = iris.load(raw_info['file']) + rawvar = raw_info['name'] + + for cube in cubes: + if cube.var_name == rawvar: + fix_var_metadata(cube, var_info) + _fix_time(cube, var_info.frequency) + cube = fix_coords(cube, overwrite_time_bounds=False) + cube = _add_depth_coord(cube) + _fix_data(cube, var) + set_global_atts(cube, attrs) + save_variable( + cube, + var, + out_dir, + attrs, + local_keys=['coordinates'], + unlimited_dimensions=['time'], + ) + + +def merge_data(in_dir, out_dir, raw_info, bins): + """Merge all data into a single (regridded) file.""" + var = raw_info['name'] + do_bin = (bins != 0) and (bins % 2 == 0) + datafile = sorted(glob.glob(in_dir + '/' + raw_info['file'] + '*.nc')) + for dataset_id in datafile: + dataset = xr.open_dataset(dataset_id) + data_array = dataset[var].sel(lat=slice(None, None, -1)) + # remove inconsistent attributes + for thekeys in [ + 'grid_mapping', 'ancillary_variables', 'parameter_vocab_uri' + ]: + data_array.attrs.pop(thekeys, None) + + if do_bin: + data_array = data_array.coarsen(lat=bins, boundary='exact').mean() + data_array = data_array.coarsen(lon=bins, boundary='exact').mean() + + if dataset_id == datafile[0]: + new_data_array = data_array + thekeys = [ + 'creator_name', 'creator_url', 'license', 'sensor', + 'processing_level' + ] + dsmeta = dict((y, dataset.attrs[y]) for y in thekeys) + if do_bin: + dsmeta['BINNING'] = ' '.join([ + 'Data binned using ', "{}".format(bins), 'by', + "{}".format(bins), 'cells average' + ]) + else: + dsmeta['BINNING'] = "" + continue + + new_data_array = xr.concat((new_data_array, data_array), dim='time') + + # create dataset + dataset = new_data_array.to_dataset(name=var) + for key, value in dsmeta.items(): + dataset.attrs[key] = value + dataset['lon'].attrs = {'standard_name': 'longitude'} + dataset['lat'].attrs = {'standard_name': 'latitude'} + + # encoding + thekeys = { + 'lat': { + '_FillValue': False + }, + 'lon': { + '_FillValue': False + }, + 'time': { + 'calendar': 'gregorian' + }, + var: { + '_FillValue': 1.e20 + } + } + + # save to file + datafile = os.path.join(out_dir, raw_info['file'] + '_merged.nc') + dataset.to_netcdf(datafile, encoding=thekeys, unlimited_dims='time') + + logger.info("Merged data written to: %s", datafile) + + return (datafile, dsmeta['BINNING']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + # run the cmorization + for var, vals in cfg['variables'].items(): + var_info = cmor_table.get_variable(vals['mip'], var) + glob_attrs['mip'] = vals['mip'] + raw_info = {'name': vals['raw'], 'file': vals['file']} + + # merge yearly data and apply binning + inpfile, addinfo = merge_data(in_dir, out_dir, raw_info, + cfg['custom']['bin_size']) + + logger.info("CMORizing var %s from file %s", var, inpfile) + raw_info['file'] = inpfile + glob_attrs['comment'] = addinfo + glob_attrs['comment'] + extract_variable(var_info, raw_info, out_dir, glob_attrs) + + # Remove temporary input file + os.remove(inpfile) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_ozone.ncl b/esmvaltool/cmorizers/data/formatters/datasets/esacci_ozone.ncl new file mode 100644 index 0000000000..d7b516e347 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_ozone.ncl @@ -0,0 +1,192 @@ +; ############################################################################# +; ESMValTool CMORizer for ESACCI-OZONE data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/ozone/data/ +; +; Last access +; 20190201 +; +; Download and processing instructions +; Download the data from: +; total_columns/l3/merged/v0100/ +; Put all files under a single directory (no subdirectories with years). +; +; Modification history +; 20190201-righi_mattia: adapted to v2 and replace NaN/inf with FillValue. +; 20160224-wenzel_sabrina: written based on reformat_obs_ESACCI-AEROSOL.ncl. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "esacci_ozone.ncl" + + ; Source name + OBSNAME = "ESACCI-OZONE" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = get_year(start_year, 1997) + YEAR2 = get_year(end_year, 2010) + + ; Selected variable (standard name) + VAR = (/"toz", "tozStderr", "tro3prof", "tro3profStderr"/) + MIN_YEAR = (/1997, 1997, 2007, 2007/) + MAX_YEAR = (/2010, 2010, 2008, 2008/) + + ; Name in the raw data + NAME = (/"atmosphere_mole_content_of_ozone", \ + "atmosphere_mole_content_of_ozone_standard_error", \ + "merged_ozone_vmr", \ + "uncertainty_of_merged_ozone"/) + + ; MIP + MIP = (/"Amon", "Amon", "Amon", "Amon"/) + + ; Frequency + FREQ = (/"mon", "mon", "mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + "/custom/CMOR_" + VAR + ".dat" + + ; File name + FNAME = (/"ESACCI-OZONE-L3S-TC-MERGED-DLR_1M-_DATE_??-fv0100.nc", \ + "ESACCI-OZONE-L3S-TC-MERGED-DLR_1M-_DATE_??-fv0100.nc", \ + "ESACCI-OZONE-L3-LP-MERGED-MZM-_DATE_-fv0002.nc", \ + "ESACCI-OZONE-L3-LP-MERGED-MZM-_DATE_-fv0002.nc"/) + + ; Type + TYPE = "sat" + + ; Version + VERSION = "L3" + + ; Global attributes + SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/ozone/data/" + REF = "Loyola et al., Int. J. Remote Sens. doi:10.1080/" + \ + "01431160902825016, 2009." + COMMENT = "" + +end + +begin + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + if (YEAR1 .gt. MAX_YEAR(vv)) then + continue + end if + if (YEAR2 .lt. MIN_YEAR(vv)) then + continue + end if + + if (YEAR1 .lt. MIN_YEAR(vv)) then + START_YEAR = MIN_YEAR(vv) + else + START_YEAR = YEAR1 + end if + + if (YEAR2 .gt. MAX_YEAR(vv)) then + END_YEAR = MAX_YEAR(vv) + else + END_YEAR = YEAR2 + end if + log_info("Processing years " + START_YEAR + "-" + END_YEAR) + ; Create timeseries + time = create_timec(START_YEAR, END_YEAR) + date = cd_calendar(time, 1) + do yy = START_YEAR, END_YEAR + log_info("Processing year " + yy) + do mm = 1, 12 + + ldate = yy + sprinti("%0.2i", mm) + fname_pattern = str_sub_str(FNAME(vv), "_DATE_", ldate) + ; File name + fname = systemfunc("ls " + input_dir_path + fname_pattern) + + ; Check + if (all(ismissing(fname))) then + error_msg("f", DIAG_SCRIPT, "", "no file found for date " + ldate + \ + ". Looking for " + fname_pattern) + end if + + ; Extract data + f = addfile(fname(0), "r") + xx = f->$NAME(vv)$ + xx@_FillValue = FILL + xx@missing_value = xx@_FillValue + xx = where(xx.lt.0., xx@_FillValue, xx) + xx = where(xx.gt.1e35, xx@_FillValue, xx) ; get rid of infinity values + replace_ieeenan(xx, xx@_FillValue, 0) + + ; Assign to global array + dimnames = getvardimnames(xx) + if (.not.isdefined("output")) then + dims = array_append_record(dimsizes(time), dimsizes(xx), 0) + output = new(dims, typeof(xx)) + output!0 = "time" + output&time = time + do ii = 0, dimsizes(dimnames) - 1 + if (dimnames(ii).eq."air_pressure") then + output!(ii+1) = "plev" + output&plev = f->$dimnames(ii)$ + elseif (isStrSubset(dimnames(ii), "latitude")) then + output!(ii+1) = "lat" + output&lat = f->$dimnames(ii)$ + elseif (dimnames(ii).eq."longitude") + output!(ii+1) = "lon" + output&lon = f->$dimnames(ii)$ + end if + end do + end if + output(ind(toint(ldate).eq.date), :, :) = (/xx/) + delete(fname) + delete(xx) + + end do + end do + + log_info("Generating file...") + ; Format coordinates + format_coords(output, START_YEAR + "0101", END_YEAR + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = START_YEAR + "01-" + END_YEAR + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + delete(time) + delete(date) + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_sea_surface_salinity.py b/esmvaltool/cmorizers/data/formatters/datasets/esacci_sea_surface_salinity.py new file mode 100644 index 0000000000..581b126c19 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_sea_surface_salinity.py @@ -0,0 +1,62 @@ +"""ESMValTool CMORizer for ESACCI-SOS data. + +Tier + Tier 2: other freely-available dataset. + +Source + ftp://anon-ftp.ceda.ac.uk/neodc/esacci/sea_surface_salinity/data + +Last access + 20200921 + +Download and processing instructions + Download the data from: + v01.8/30days/ + v02.31/30days/ + Put all files under a single directory (no subdirectories with years). +""" + +import logging +import os + +import iris +from iris.util import equalise_attributes, unify_time_units + +from esmvaltool.cmorizers.data.utilities import ( + fix_var_metadata, + save_variable, + set_global_atts, +) + +logger = logging.getLogger(__name__) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorize ESACCI-SOS dataset.""" + glob_attrs = cfg['attributes'] + + logger.info("Starting cmorization for Tier%s OBS files: %s", + glob_attrs['tier'], glob_attrs['dataset_id']) + logger.info("Input data from: %s", in_dir) + logger.info("Output will be written to: %s", out_dir) + for version in glob_attrs['versions']: + logger.info('Cmorizing version %s', version) + file_expr = os.path.join( + in_dir, + "ESACCI-SEASURFACESALINITY-L4-SSS-MERGED_OI_Monthly_CENTRED_15Day_" + f"25km-20????15-{version}.nc") + + for var, vals in cfg['variables'].items(): + var_info = cfg['cmor_table'].get_variable(vals['mip'], var) + logger.info('Cmorizing var %s', var) + cubes = iris.load_raw(file_expr, vals['raw']) + equalise_attributes(cubes) + unify_time_units(cubes) + cube = cubes.concatenate_cube() + cube.units = '0.001' + logger.info(cube) + glob_attrs['mip'] = vals['mip'] + glob_attrs['version'] = version + fix_var_metadata(cube, var_info) + set_global_atts(cube, glob_attrs) + save_variable(cube, var, out_dir, glob_attrs) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_soilmoisture.py b/esmvaltool/cmorizers/data/formatters/datasets/esacci_soilmoisture.py new file mode 100644 index 0000000000..66859b420b --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_soilmoisture.py @@ -0,0 +1,149 @@ +"""ESMValTool CMORizer for ESACCI-SOILMOISTURE data. + +Tier + Tier 2: other freely-available dataset. + +Source + ftp://anon-ftp.ceda.ac.uk/neodc/esacci/soil_moisture/data/ + +Last access + 20240626 + +Download and processing instructions + Download the data from: + daily_files/COMBINED/v08.1/ + ancillary/v08.1/ + Put all files under a single directory (no subdirectories with years). + in ${RAWOBS}/Tier2/ESACCI-SOILMOISTURE + +""" + +import glob +import logging +import os +from datetime import datetime +import iris +from esmvalcore.preprocessor import concatenate, monthly_statistics +from cf_units import Unit + +from ...utilities import ( + fix_var_metadata, + fix_dim_coordnames, + fix_bounds, + save_variable, + set_global_atts +) + +logger = logging.getLogger(__name__) + + +def fix_coords(cube): + """Fix coordinates to CMOR standards. + + Fixes coordinates eg time to have correct units, bounds etc; + longitude to be CMOR-compliant 0-360deg; fixes some attributes + and bounds - the user can avert bounds fixing by using supplied + arguments; if bounds are None they will be fixed regardless. + + Parameters + ---------- + cube: iris.cube.Cube + data cube with coordinates to be fixed. + + + Returns + ------- + cube: iris.cube.Cube + data cube with fixed coordinates. + """ + # First fix any completely missing coord var names + fix_dim_coordnames(cube) + + # Convert longitude from -180...180 to 0...360 + cube = cube.intersection(longitude=(0.0, 360.0)) + + # Fix individual coords + for cube_coord in cube.coords(): + # Fix time + if cube_coord.var_name == 'time': + logger.info("Fixing time...") + cube.coord('time').convert_units( + Unit('days since 1970-01-01T00:00:00+00:00', + calendar='proleptic_gregorian')) + + # Fix latitude + if cube_coord.var_name == 'lat': + logger.info("Fixing latitude...") + cube = iris.util.reverse(cube, cube_coord) + + # Fix bounds of all coordinates + fix_bounds(cube, cube_coord) + + return cube + + +def extract_variable(raw_info): + """Extract variables.""" + rawvar = raw_info['name'] + constraint = iris.Constraint(name=rawvar) + if rawvar == 'sm_uncertainty': + sm_cube = iris.load_cube(raw_info['file'], + iris.NameConstraint(var_name='sm')) + ancillary_var = sm_cube.ancillary_variable( + 'Volumetric Soil Moisture Uncertainty' + ) + cube = sm_cube.copy(ancillary_var.core_data()) + else: + cube = iris.load_cube(raw_info['file'], constraint) + + # Remove dysfunctional ancillary data without standard names + for ancillary_variable in cube.ancillary_variables(): + cube.remove_ancillary_variable(ancillary_variable) + + return cube + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorize data.""" + glob_attrs = cfg['attributes'] + if not start_date: + start_date = datetime(1978, 1, 1) + if not end_date: + end_date = datetime(2022, 12, 31) + + # run the cmorization + for var_name, vals in cfg['variables'].items(): + all_data_cubes = [] + if not isinstance(vals, dict): # Ensure vals is a dictionary + raise ValueError( + f"Invalid format for variable {var_name}: {type(vals)}" + ) + var_info = cfg['cmor_table'].get_variable(vals['mip'], var_name) + glob_attrs['mip'] = vals['mip'] + raw_info = {'name': vals['raw']} + inpfile_pattern = os.path.join(in_dir, vals['filename']) + logger.info("CMORizing var %s from file type %s", + var_name, inpfile_pattern) + + for year in range(start_date.year, end_date.year + 1): + year_inpfile_pattern = inpfile_pattern.format(year=year) + inpfiles = sorted(glob.glob(year_inpfile_pattern)) + for inpfile in inpfiles: + raw_info['file'] = inpfile + cube = extract_variable(raw_info) + all_data_cubes.append(cube) + final_cube = concatenate(all_data_cubes) + fix_var_metadata(final_cube, var_info) + final_cube = fix_coords(final_cube) + set_global_atts(final_cube, glob_attrs) + + save_variable(final_cube, var_name, out_dir, glob_attrs, + unlimited_dimensions=['time']) + + # For sm, also save monthly means + if var_name == 'sm': + monthly_mean_cube = monthly_statistics(final_cube, 'mean') + glob_attrs['mip'] = 'Lmon' + monthly_mean_cube.attributes.update(glob_attrs) + save_variable(monthly_mean_cube, var_name, out_dir, glob_attrs, + unlimited_dimensions=['time']) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_sst.py b/esmvaltool/cmorizers/data/formatters/datasets/esacci_sst.py new file mode 100644 index 0000000000..c009b96ffb --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_sst.py @@ -0,0 +1,97 @@ +"""ESMValTool CMORizer for ESACCI-SST data. + +Tier + Tier 2: other freely-available dataset. + +Source + http://surftemp.net/regridding/index.html + +Last access + 20201214 + +Download and processing instructions + Download the following files: + Go to http://surftemp.net/regridding/index.html + and request regridded data with the following options: + Time Resolution: monthly + Longitude Resolution: 0.5 + Latitude Resolution: 0.5 + Start Date: 1982-01-01 + End Date: 2019-12-31 + Exclude data above sea ice threshold: True + (Threshold: 100 %) + Include post-hoc SST bias adjustments: True + Output Absolute or Anomaly SST: absolute + Generate Sea Ice Fraction: True + Error Correlation in Time (Days): 7 + Error Correlation In Space (Degrees): 3.0 + +Modification history + 20201204-roberts_charles: written. + 20201214-predoi_valeriu: approved. + 20201214-lauer_axel: approved. +""" + +import logging +import os + +import iris +from esmvalcore.preprocessor import concatenate + +from ...utilities import ( + convert_timeunits, + fix_coords, + fix_var_metadata, + save_variable, + set_global_atts, +) + +logger = logging.getLogger(__name__) + + +def extract_variable(var_info, raw_info, attrs, year): + """Extract to all vars.""" + rawvar = raw_info['name'] + constraint = iris.NameConstraint(var_name=rawvar) + try: + cube = iris.load_cube(raw_info['file'], constraint) + except iris.exceptions.ConstraintMismatchError as constraint_error: + raise ValueError(f"No data available for variable {rawvar}" + f"and year {year}") from constraint_error + + # Fix cube + fix_var_metadata(cube, var_info) + convert_timeunits(cube, year) + cube = fix_coords(cube) + set_global_atts(cube, attrs) + return cube + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + # run the cmorization + for var, vals in cfg['variables'].items(): + var_info = cmor_table.get_variable(vals['mip'], var) + glob_attrs['mip'] = vals['mip'] + raw_info = {'name': vals['raw'], 'file': vals['file']} + inpfile = os.path.join(in_dir, cfg['filename']) + logger.info("CMORizing var %s from file type %s", var, inpfile) + years = range(1982, 2020) + months = ["0" + str(mo) for mo in range(1, 10)] + ["10", "11", "12"] + for year in years: + monthly_cubes = [] + for month in months: + raw_info['file'] = inpfile.format(year=year, month=month) + logger.info("CMORizing var %s from file type %s", var, + raw_info['file']) + cube = extract_variable(var_info, raw_info, glob_attrs, year) + monthly_cubes.append(cube) + yearly_cube = concatenate(monthly_cubes) + save_variable(yearly_cube, + var, + out_dir, + glob_attrs, + unlimited_dimensions=['time']) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_watervapour.py b/esmvaltool/cmorizers/data/formatters/datasets/esacci_watervapour.py new file mode 100644 index 0000000000..d662f0c752 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_watervapour.py @@ -0,0 +1,94 @@ +"""ESMValTool CMORizer for ESACCI-WATERVAPOUR data. + +Tier + Tier 3: CDR2 requires registration at EUMETSAT CM SAF. + +Source + https://wui.cmsaf.eu/safira/action/viewDoiDetails?acronym=COMBI_V001 + +Last access + 20240221 + +Download and processing instructions + CDR2 requires registration at EUMETSAT CM SAF, the information on how to + download the order will be emailed once the order is ready. + All files need to be in one directory, not in yearly subdirectories. + +Modification history + 20240221-malinina_elizaveta: Adjust for daily cmorization and updated + filenames, remove CDR1 due to irrelevance. + 20210607-weigel_katja: Fix for monthly time bounds. + 20210408-weigel_katja: written. +""" + +import glob +import logging +import os + +import iris +from esmvalcore.cmor.fixes import get_time_bounds +from esmvalcore.preprocessor import concatenate + +from ...utilities import ( + convert_timeunits, + fix_coords, + fix_var_metadata, + save_variable, + set_global_atts, +) + +logger = logging.getLogger(__name__) + + +def extract_variable(var_info, raw_info, attrs, year): + """Extract to all vars.""" + rawvar = raw_info['name'] + constraint = iris.NameConstraint(var_name=rawvar) + try: + cube = iris.load_cube(raw_info['file'], constraint) + except iris.exceptions.ConstraintMismatchError as constraint_error: + raise ValueError(f"No data available for variable {rawvar}" + f"and year {year}") from constraint_error + + # Fix cube + fix_var_metadata(cube, var_info) + convert_timeunits(cube, year) + cube = fix_coords(cube, overwrite_time_bounds=False) + set_global_atts(cube, attrs) + # Remove dysfunctional ancillary data without sandard name + for ancillary_variable in cube.ancillary_variables(): + cube.remove_ancillary_variable(ancillary_variable) + return cube + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorize data.""" + glob_attrs = cfg['attributes'] + + # run the cmorization + for var_name, vals in cfg['variables'].items(): + var = vals['short_name'] + var_info = cfg['cmor_table'].get_variable(vals['mip'], var) + glob_attrs['mip'] = vals['mip'] + raw_info = {'name': vals['raw']} + inpfile_pattern = os.path.join(in_dir, vals['filename']) + logger.info("CMORizing var %s from file type %s", var, inpfile_pattern) + for year in range(vals['start_year'], vals['end_year'] + 1): + data_cubes = [] + year_inpfile_pattern = inpfile_pattern.format(year=year) + inpfiles = sorted(glob.glob(year_inpfile_pattern)) + for inpfile in inpfiles: + raw_info['file'] = inpfile + logger.info("CMORizing var %s from file type %s", var, + raw_info['file']) + data_cubes.append( + extract_variable(var_info, raw_info, glob_attrs, year)) + yearly_cube = concatenate(data_cubes) + # Fix monthly time bounds + time = yearly_cube.coord('time') + time.bounds = get_time_bounds(time, vals['frequency']) + save_variable(yearly_cube, + var, + out_dir, + glob_attrs, + unlimited_dimensions=['time']) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esdc.py b/esmvaltool/cmorizers/data/formatters/datasets/esdc.py new file mode 100644 index 0000000000..529f497396 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/esdc.py @@ -0,0 +1,149 @@ +"""ESMValTool CMORizer for Earth System Data Cube data. + +Tier + Tier 2: other freely-available dataset. + +Source + http://data.rsc4earth.de/EarthSystemDataCube/ + +Last access + 20230126 + +Download and processing instructions + It is not necessary to download the data, as the cmorizer script can access + it directly from the cloud if it is not available locally. + + To download a dataset, the dataset folder can be explored on the source + website, and downloaded using wget: + ```wget -m -nH -np -R "index.html*" http://data.rsc4earth.de/EarthSystemDataCube/v3.0.1/``` +""" # noqa: E501 +import logging +from copy import deepcopy +from pathlib import Path + +import cf_units +import iris.std_names +import xarray as xr +from esmvalcore.preprocessor import monthly_statistics + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _fix_cube(var, cube, cfg): + """General fixes for all cubes.""" + cmor_info = cfg['cmor_table'].get_variable(var['mip'], var['short_name']) + + # Set correct names + cube.var_name = cmor_info.short_name + if cmor_info.standard_name: + cube.standard_name = cmor_info.standard_name + cube.long_name = cmor_info.long_name + + # Set calendar to gregorian instead of proleptic gregorian + old_unit = cube.coord('time').units + if old_unit.calendar == 'proleptic_gregorian': + logger.info("Converting time units to gregorian") + cube.coord('time').units = cf_units.Unit(old_unit.origin, + calendar='gregorian') + cube = utils.fix_coords(cube) + cube.convert_units(cmor_info.units) + if 'height2m' in cmor_info.dimensions: + utils.add_height2m(cube) + # Conversion from 8-d to monthly frequency + cube = monthly_statistics(cube, operator="mean") + + # Fix metadata + attrs = cfg['attributes'] + attrs['mip'] = var['mip'] + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + return cube + + +def _open_zarr(path): + """Open zarr dataset.""" + logger.info('Opening zarr in "%s"', path) + try: + zarr_dataset = xr.open_dataset(path, engine='zarr') + return zarr_dataset + except KeyError as exception: + # Happens when the zarr folder is missing metadata, e.g. when + # it is a zarr array instead of a zarr dataset. + logger.error('Could not open zarr dataset "%s": "KeyError: %s"', path, + exception) + raise exception + + +def _extract_variable(zarr_path, var, cfg, out_dir): + """Open and cmorize cube.""" + attributes = deepcopy(cfg['attributes']) + all_attributes = { + **attributes, + **var + } # add the mip to the other attributes + raw_name = var['raw'] + zarr_dataset = _open_zarr(zarr_path) + cube_xr = zarr_dataset[raw_name] + + # Invalid standard names must be removed before converting to iris + standard_name = cube_xr.attrs.get('standard_name', None) + if (standard_name is not None + and standard_name not in iris.std_names.STD_NAMES): + del cube_xr.attrs['standard_name'] + logger.info('Removed invalid standard name "%s".', standard_name) + + cube_iris = cube_xr.to_iris() + cube = _fix_cube(var, cube_iris, cfg) + + utils.save_variable(cube=cube, + var=var['short_name'], + outdir=out_dir, + attrs=all_attributes, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorize the dataset.""" + if start_date: + logger.warning('start_date set to "%s", but will be ignored', + start_date) + if end_date: + logger.warning('end_date set to "%s", but will be ignored', end_date) + + attributes = cfg['attributes'] + variables = cfg['variables'] + version = attributes['version'] + filename_pattern = cfg['filename'].format(grid=attributes['grid'], + chunking=attributes['chunking'], + version=version) + + local_path = Path(in_dir) + in_files = list(local_path.glob(filename_pattern)) + logger.debug('Pattern %s matched: %s', Path(local_path, filename_pattern), + in_files) + + if len(in_files) > 1: + logger.warning( + 'Pattern has matched "%i" files, ' + 'but only the first one will be used.', len(in_files)) + logger.warning('The following files will be ignored.: "%s"', + in_files[1:]) + zarr_path = in_files[0] + elif len(in_files) == 0: + logger.info( + 'No local matches for pattern "%s", ' + 'attempting connection to the cloud.', + Path(local_path, filename_pattern)) + if '*' in filename_pattern: + logger.warning( + 'Detected a wildcard character in path (*), ' + 'online connection to \"%s\" may not work', filename_pattern) + zarr_path = f'{attributes["source"]}/v{version}/{filename_pattern}' + + for short_name, var in variables.items(): + if 'short_name' not in var: + var['short_name'] = short_name + _extract_variable(zarr_path, var, cfg, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esrl.py b/esmvaltool/cmorizers/data/formatters/datasets/esrl.py new file mode 100644 index 0000000000..ab9e0930e9 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/esrl.py @@ -0,0 +1,318 @@ +"""ESMValTool CMORizer for ESRL data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://www.esrl.noaa.gov/gmd/dv/data/ + ftp://aftp.cmdl.noaa.gov/data/trace_gases/co2/ + + +Last access + 20201126 + +Download and processing instructions + Download the txt files from the ftp server/data interface +""" + +import glob +import logging +import os +from datetime import datetime +from ftplib import FTP +from pprint import pformat + +import iris +import numpy as np +import pandas as pd +import requests +from cf_units import Unit + +from ... import utilities as utils + +logger = logging.getLogger(__name__) + + +def _download_files(in_dir, cfg, stations): + """Download input files using FTP.""" + logger.info("Downloading data from FTP server %s", cfg['ftp_host']) + files = {} + for station in stations: + # First look for baseline observatories + if station.upper() in ['MLO', 'BRW', 'SMO', 'SPO']: + files[station] = { + 'name': "co2_" + station.lower() + '_surface-insitu_1_ccgg_' + 'MonthlyData.txt', + 'folder': + cfg['data_dir'] + 'in-situ/surface/' + station.lower() + } + elif station.lower() == 'global': + files[station] = { + 'name': 'co2_mm_gl.txt', + 'folder': 'products/trends/co2/' + } + else: + files[station] = { + 'name': + 'co2_' + station.lower() + '_surface-flask_1_ccgg_month.txt', + 'folder': cfg['data_dir'] + 'flask/surface/' + } + input_files = {} + rm_stat = [] + with FTP(cfg['ftp_host']) as ftp_client: + logger.info(ftp_client.getwelcome()) + ftp_client.login() + for station in files: + filename_full = os.path.join(files[station]["folder"], + files[station]["name"]) + if filename_full in ftp_client.nlst(files[station]["folder"]): + logger.info("Downloading %s", files[station]["name"]) + new_path = os.path.join(in_dir, files[station]["name"]) + with open(new_path, mode='wb') as outfile: + ftp_client.retrbinary(f'RETR {filename_full}', + outfile.write) + input_files[station] = [new_path] + else: + rm_stat.append(station) + return input_files, rm_stat + + +def _get_cube(row, column_ind, fill_value, station_dict): + """Create :class:`iris.cube.Cube` from :class:`pandas.Series`.""" + time_coord = _get_time_coord(int(row['year']), int(row['month'])) + lat_coord, lon_coord = _make_station_lat_lon_coord(station_dict) + data = np.ma.masked_equal(float(row[column_ind[2]]), fill_value) + cube = iris.cube.Cube( + data.reshape((1, 1, 1)), + dim_coords_and_dims=[(time_coord, 0), (lat_coord, 1), (lon_coord, 2)], + units='ppm', + ) + return cube + + +def _get_rows_and_fill_value(filepath): + """Check which dataset type is present and return columns to use.""" + if 'insitu' in filepath: + # Insitu tower monthly + data_rows = [1, 2, 8] + fill_v = -999.990 + elif 'month.' in filepath: + # Monthly surface flask data, 1; year, 2: month, 3: data + data_rows = [1, 2, 3] + fill_v = -999.99 # not sure + elif 'mm_gl' in filepath: + data_rows = [0, 1, 3] + fill_v = -999.99 + else: + raise NotImplementedError("Unexpected number of columns, " + "only monthly data from in situ or flask " + "measurements currently supported") + return data_rows, fill_v + + +def _get_station_dictionary(): + """Get station information from online table.""" + url = "https://www.esrl.noaa.gov/gmd/dv/site/?program=ccgg" + stat_list = pd.read_html(requests.get(url).content) + stats = stat_list[-1] + # Remove asterisk from station names (flags inactive stations) + stats['Code'] = stats['Code'].str.replace('*', '') + stats.set_index("Code", drop=False, inplace=True) + station_dict = stats.to_dict(orient="index") + + # Add entry for Global + station_dict['GLOBAL'] = { + 'Latitude': 0.0, + 'Longitude': 180.0, + 'Elevation (meters)': 0, + 'Code': 'GLOBAL' + } + return station_dict + + +def _get_time_coord(year, month): + """Get time coordinate.""" + point = datetime(year=year, month=month, day=15) + bound_low = datetime(year=year, month=month, day=1) + if month == 12: + month_bound_up = 1 + year_bound_up = year + 1 + else: + month_bound_up = month + 1 + year_bound_up = year + bound_up = datetime(year=year_bound_up, month=month_bound_up, day=1) + time_units = Unit('days since 1950-01-01 00:00:00', calendar='standard') + time_coord = iris.coords.DimCoord( + time_units.date2num(point), + bounds=time_units.date2num([bound_low, bound_up]), + var_name='time', + standard_name='time', + long_name='time', + units=time_units, + ) + return time_coord + + +def _extract_variable(short_name, var, cfg, out_dir, station_dic): + """Extract variable.""" + data = pd.read_csv(station_dic['filepath'], + sep=' {1,}', + comment='#', + engine='python', + header=None) + # Insitu tower monthly had uncommented header, remove + if data.shape[1] == 17: + data = data.drop(0) + + data_rows, fill_v = _get_rows_and_fill_value(station_dic['filepath']) + + # Resample data to monthly, pad with missing values as needed + data[data_rows[2]] = pd.to_numeric(data[data_rows[2]]) + data = data.replace(fill_v, np.nan) + data = data.rename(columns={data_rows[0]: "year", data_rows[1]: "month"}) + data['day'] = 15 + data['datetime'] = pd.to_datetime(data[['year', 'month', 'day']]) + data = data.resample('M', on="datetime").mean() + data = data.fillna(fill_v) + data['year'] = data.index.year + data['month'] = data.index.month + + # Extract cube + cubes = iris.cube.CubeList() + for (_, row) in data.iterrows(): + cube = _get_cube(row, data_rows, fill_v, station_dic) + cubes.append(cube) + cube = cubes.concatenate_cube() + cube.var_name = short_name + + # Fix metadata + utils.convert_timeunits(cube, 1950) + cube = utils.fix_coords(cube) + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + cube.convert_units(cmor_info.units) + attrs = cfg['attributes'] + attrs['version'] = station_dic['Code'].upper() + attrs['mip'] = var['mip'] + attrs['altitude'] = station_dic['Elevation (meters)'] + attrs['altitude_units'] = 'm' + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def _make_station_lat_lon_coord(station_dic): + """Make iris coordinates for given latitude and longitude.""" + lat = station_dic['Latitude'] + lon = station_dic['Longitude'] + if lon < 0: + lon = lon + 360 + + # Treat Global data differently + if lat == 0.0 and lon == 180.0: + lat_coord = iris.coords.DimCoord([0.0], + bounds=[[-90.0, 90.0]], + var_name='lat', + standard_name='latitude', + long_name='latitude', + units=Unit('degrees_north')) + lon_coord = iris.coords.DimCoord([180.0], + bounds=[[0.0, 360.0]], + var_name='lon', + standard_name='longitude', + long_name='longitude', + units=Unit('degrees_east')) + else: + lat_coord = iris.coords.DimCoord([lat], + var_name='lat', + standard_name='latitude', + long_name='latitude', + units='degrees') + lon_coord = iris.coords.DimCoord([lon], + var_name='lon', + standard_name='longitude', + long_name='longitude', + units='degrees') + return lat_coord, lon_coord + + +def _get_filenames(stations, cfg, in_dir, all_stat): + """Get filename given pattern and station name.""" + input_files = {} + download_files = [] + for station in stations: + if station.lower() == "global": + st_filepattern = "co2_mm_gl.txt" + else: + # Replace first * with station name + filename_pattern = cfg['input_filename_pattern'] + st_filepattern = filename_pattern.replace("*", station.lower(), 1) + pattern = os.path.join(in_dir, st_filepattern) + input_file = glob.glob(pattern) + if not input_file: + download_files.append(station) + else: + input_files[station] = input_file + if len(download_files) > 0: + if cfg['download']: + input_files_dl, rm_stat = _download_files(in_dir, cfg, + download_files) + input_files.update(input_files_dl) + if len(rm_stat) > 0: + if all_stat: + # When selecting "all", some stations may not have + # available data at the moment, + # so remove these from to process files + stations = [x for x in stations if x not in rm_stat] + else: + raise ValueError( + f"No data found for {rm_stat} on the ftp server. ") + else: + if not all_stat: + raise ValueError( + f"No local data found for stations {download_files}, " + "consider turning on the download option.") + logger.debug("Found input files:\n%s", pformat(input_files)) + return input_files, stations + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + # read station information + station_dict = _get_station_dictionary() + + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + # Read station names + if 'all' in var['stations']: + stations = station_dict.keys() + all_stat = True + else: + stations = var['stations'] + all_stat = False + # Check for wrong station names + stat_upper = [element.upper() for element in stations] + false_keys = np.setdiff1d(stat_upper, list(station_dict.keys())) + if len(false_keys) == 0: + filepath, stations = _get_filenames(stations, cfg, in_dir, + all_stat) + for station in stations: + logger.info("Reading file '%s'", filepath[station][0]) + logger.info("CMORizing variable '%s' for station '%s'", + short_name, station) + # Add filepath to station_dict + station_dict[station.upper()]['filepath'] = \ + filepath[station][0] + _extract_variable(short_name, var, cfg, out_dir, + station_dict[station.upper()]) + else: + raise ValueError(f"Could not find the following station(s): " + f"{false_keys}. " + "Please double-check your spelling in the " + "cmor config file. The following is a list of " + f"valid stations: {list(station_dict.keys())}.") diff --git a/esmvaltool/cmorizers/data/formatters/datasets/fluxcom.py b/esmvaltool/cmorizers/data/formatters/datasets/fluxcom.py new file mode 100644 index 0000000000..3e25d8a894 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/fluxcom.py @@ -0,0 +1,91 @@ +"""ESMValTool CMORizer for FLUXCOM GPP data. + +Tier + Tier 3: restricted dataset. + +Source + http://www.bgc-jena.mpg.de/geodb/BGI/Home + +Last access + 20190727 + +Download and processing instructions + From the website, select FLUXCOM as the data choice and click download. + Two files will be displayed. One for Land Carbon Fluxes and one for + Land Energy fluxes. The Land Carbon Flux file (RS + METEO) using + CRUNCEP data file has several data files for different variables. + The data for GPP generated using the + Artificial Neural Network Method will be in files with name: + GPP.ANN.CRUNCEPv6.monthly.*.nc + A registration is required for downloading the data. + Users in the UK with a CEDA-JASMIN account may request access to the jules + workspace and access the data. + Note : This data may require rechunking of the netcdf files. + This constraint will not exist once iris is updated to + version 2.3.0 Aug 2019 +""" +import logging +import os +import re + +import iris + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _get_filepath(in_dir, basename): + """Find correct name of file (extend basename with timestamp).""" + regex = re.compile(basename) + + all_files = [ + f for f in os.listdir(in_dir) + if os.path.isfile(os.path.join(in_dir, f)) + ] + for filename in all_files: + if regex.match(filename): + return os.path.join(in_dir, basename) + raise OSError( + f"Cannot find input file matching pattern '{basename}' in '{in_dir}'") + + +def _extract_variable(cmor_info, attrs, filepath, out_dir): + """Extract variable.""" + var = cmor_info.short_name + logger.info("Var is %s", var) + cubes = iris.load(filepath) + for cube in cubes: + # convert data from gc/m2/day to kg/m2/s + cube = cube / (1000 * 86400) + cube.units = 'kg m-2 s-1' + + # The following two lines are needed for iris.util.guess_coord_axis + cube.coord('lat').standard_name = 'latitude' + cube.coord('lon').standard_name = 'longitude' + utils.fix_var_metadata(cube, cmor_info) + utils.convert_timeunits(cube, 1950) + cube = utils.fix_coords(cube) + utils.set_global_atts(cube, attrs) + logger.info("Saving file") + utils.save_variable(cube, + var, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + glob_attrs = cfg['attributes'] + cmor_table = cfg['cmor_table'] + filepath = _get_filepath(in_dir, cfg['filename']) + logger.info("Found input file '%s'", filepath) + + # Run the cmorization + for (var, var_info) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", var) + glob_attrs['mip'] = var_info['mip'] + logger.info(var_info['mip']) + cmor_info = cmor_table.get_variable(var_info['mip'], var) + _extract_variable(cmor_info, glob_attrs, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/gcp2018.py b/esmvaltool/cmorizers/data/formatters/datasets/gcp2018.py new file mode 100644 index 0000000000..9b5ba367db --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/gcp2018.py @@ -0,0 +1,134 @@ +"""ESMValTool CMORizer for GCP2018 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://www.icos-cp.eu/GCP/2018 + +Last access + 20210908 + +Download and processing instructions + Download the following file: '2018 Global Budget v1.0' +""" + +import logging +import os +import warnings +from datetime import datetime + +import iris +import numpy as np +import pandas as pd +from cf_units import Unit + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _get_coords(data_table): + """Extract coordinates.""" + time_units = Unit('days since 1850-01-01 00:00:00') + times = [datetime(year=year, month=6, day=15) for year in data_table.index] + time_dim = iris.coords.DimCoord(time_units.date2num(times), + var_name='time', + standard_name='time', + long_name='time', + units=time_units) + time_dim.guess_bounds() + lat_dim = iris.coords.DimCoord([0.0], + bounds=[[-90.0, 90.0]], + var_name='lat', + standard_name='latitude', + long_name='latitude', + units=Unit('degrees_north')) + lon_dim = iris.coords.DimCoord([180.0], + bounds=[[0.0, 360.0]], + var_name='lon', + standard_name='longitude', + long_name='longitude', + units=Unit('degrees_east')) + return [(time_dim, 0), (lat_dim, 1), (lon_dim, 2)] + + +def _extract_variable(variable_name, var, cfg, data_table, out_dir): + """Extract variable.""" + # Set correct header for data_frame and remove lines that do not include + # final data (indicated by NaNs) + header = data_table.iloc[cfg['header_line']] + data_table = data_table[cfg['header_line'] + 1:] + data_table.columns = header + data_table = data_table.dropna() + + # Coordinates + coords = _get_coords(data_table) + + # Data + if variable_name == 'fgco2': + new_data = data_table['ocean sink'].values + elif variable_name == 'nbp': + new_data = (data_table['land sink'].values - + data_table['land-use change emissions'].values) + elif variable_name == 'nbp_residual': + new_data = ( + data_table['fossil emissions excluding carbonation'].values - + data_table['atmospheric growth'].values - + data_table['ocean sink'].values - + data_table['land-use change emissions'].values) + else: + raise NotImplementedError( + f"Derivation of '{variable_name}' not possible yet") + for _ in range(2): + new_data = np.expand_dims(new_data, -1) + new_units = Unit('Gt yr-1') + if var.get('area'): + new_data /= var['area'] + new_units = Unit('Gt yr-1 m-2') + cube = iris.cube.Cube(new_data.astype(np.float32), + dim_coords_and_dims=coords, + units=new_units) + + # Fix units + short_name = var.pop('short_name', variable_name) + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + cube.convert_units(cmor_info.units) + utils.convert_timeunits(cube, 1950) + + # Fix metadata + attrs = dict(cfg['attributes']) + version_suffix = var.pop('version_suffix', None) + if version_suffix is not None: + attrs['version'] += f'-{version_suffix}' + attrs.update(var) + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + filepath = os.path.join(in_dir, cfg['filename']) + logger.info("Reading '%s'", filepath) + with warnings.catch_warnings(): + warnings.filterwarnings( + 'ignore', + message='Unknown extension is not supported', + category=UserWarning, + module='openpyxl', + ) + data_table = pd.read_excel(filepath, + sheet_name='Global Carbon Budget', + index_col=0) + + # Run the cmorization + for (variable_name, var) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", variable_name) + _extract_variable(variable_name, var, cfg, data_table, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/gcp2020.py b/esmvaltool/cmorizers/data/formatters/datasets/gcp2020.py new file mode 100644 index 0000000000..47f0941be8 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/gcp2020.py @@ -0,0 +1,19 @@ +"""ESMValTool CMORizer for GCP2020 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://www.icos-cp.eu/science-and-impact/global-carbon-budget/2020 + +Last access + 20210908 + +Download and processing instructions + Download the following file: '2020 Global Budget v1.0' +""" + +from .gcp2018 import cmorization + +# The following line makes it clear that the above import is not an error +cmorization diff --git a/esmvaltool/cmorizers/data/formatters/datasets/ghcn.ncl b/esmvaltool/cmorizers/data/formatters/datasets/ghcn.ncl new file mode 100644 index 0000000000..ecd4fcb753 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/ghcn.ncl @@ -0,0 +1,123 @@ +; ############################################################################# +; ESMValTool CMORizer for GHCN data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; https://www.esrl.noaa.gov/psd/data/gridded/data.ghcngridded.html +; +; Last access +; 20190308 +; +; Download and processing instructions +; Download the dataset "precip.mon.total.nc" (precipitation, total, surface, +; 1900-2015 on a 5x5 grid). +; +; Modification history +; 20190308-righi_mattia: minor changes to include coordinate boundaries. +; 20190227-bock_lisa: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "ghcn.ncl" + + ; Source name + OBSNAME = "GHCN" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = get_year(start_year, 1900) + YEAR2 = get_year(end_year, 2014) + + ; Selected variable (standard name) + VAR = "pr" + + ; Name in the raw data + NAME = "precip" + + ; MIP + MIP = "Amon" + + ; Frequency + FREQ = "mon" + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + "/cmip5/Tables/CMIP5_Amon" + + ; Type + TYPE = "ground" + + ; Version + VERSION = "1" + + ; Global attributes + SOURCE = "https://www.esrl.noaa.gov/psd/data/gridded/data.ghcngridded.html" + REF = "Jones and Moberg, J. Clim., " + \ + "doi:10.1175/1520-0442(2003)016<0206:HALSSA>2.0.CO;2, 2003." + COMMENT = "" + +end + +begin + + ; Read file + fname = input_dir_path + "precip.mon.total.nc" + f = addfile(fname, "r") + setfileoption("nc", "MissingToFillValue", False) + + ; Read absolute precipitation without last incomplete year + output = f->$NAME$(time|0:1379, lat|:, lon|:) + + ; Calculate days per month + date = cd_calendar(output&time, 0) + dpm = days_in_month(toint(date(:, 0)), toint(date(:, 1))) + dpmc = conform(output, dpm, 0) + + ; Check time range + if (dimsizes(date(:, 0)).ne.12 * (YEAR2 - YEAR1 + 1)) then + error_msg("f", DIAG_SCRIPT, "", "incorrect number of timesteps") + end if + + ; Convert units [mm/month] --> [kg/m2/s] + output = output / (24 * 3600 * dpmc) + + log_info(" Climatology range: " + min(output) + \ + " kg/m2/s to " + max(output) + " kg/m2/s") + + ; Format coordinates + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ) + + ; Set variable attributes + tmp = format_variable(output, VAR, CMOR_TABLE) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP, VAR, DATESTR/), "_") + ".nc" + + ; Write temperature time-series + write_nc(fout, VAR, output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/ghcn_cams.py b/esmvaltool/cmorizers/data/formatters/datasets/ghcn_cams.py new file mode 100644 index 0000000000..2f3eff6bdd --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/ghcn_cams.py @@ -0,0 +1,63 @@ +"""ESMValTool CMORizer for GHCN-CAMS data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://www.esrl.noaa.gov/psd/data/gridded/data.ghcncams.html + ftp://ftp.cdc.noaa.gov/Datasets/ghcncams/air.mon.mean.nc + +Last access + 20200304 +""" + +import logging +import os + +import iris +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _extract_variable(short_name, var, cfg, filepath, out_dir): + """Extract variable.""" + raw_var = var.get('raw', short_name) + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + + # Fix units + if 'raw_units' in var: + cube.units = var['raw_units'] + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + cube.convert_units(cmor_info.units) + utils.convert_timeunits(cube, 1950) + + # Fix coordinates + cube = utils.fix_coords(cube) + if 'height2m' in cmor_info.dimensions: + utils.add_height2m(cube) + + # Fix metadata + attrs = cfg['attributes'] + attrs['mip'] = var['mip'] + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + filepath = os.path.join(in_dir, cfg['filename']) + + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", short_name) + _extract_variable(short_name, var, cfg, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/gistemp.py b/esmvaltool/cmorizers/data/formatters/datasets/gistemp.py new file mode 100644 index 0000000000..01366a0c06 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/gistemp.py @@ -0,0 +1,61 @@ +"""ESMValTool CMORizer for GISTEMP data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://data.giss.nasa.gov/gistemp/ + https://data.giss.nasa.gov/pub/gistemp/gistemp250_GHCNv4.nc.gz + +Last access + 20200303 +""" + +import logging +import os + +import iris +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _extract_variable(short_name, var, cfg, filepath, out_dir): + """Extract variable.""" + raw_var = var.get('raw', short_name) + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + + # Fix units + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + cube.convert_units(cmor_info.units) + utils.convert_timeunits(cube, 1950) + + # Fix coordinates + cube = utils.fix_coords(cube) + if 'height2m' in cmor_info.dimensions: + utils.add_height2m(cube) + + # Fix metadata + attrs = cfg['attributes'] + attrs['mip'] = var['mip'] + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + filepath = os.path.join(in_dir, cfg['filename']) + + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", short_name) + _extract_variable(short_name, var, cfg, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/glodap.py b/esmvaltool/cmorizers/data/formatters/datasets/glodap.py new file mode 100644 index 0000000000..0323f8b800 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/glodap.py @@ -0,0 +1,142 @@ +"""ESMValTool CMORizer for GLODAP data. + +Tier + Tier 2: other freely-available dataset. + +Source + GLODAP: https://www.glodap.info/index.php/mapped-data-product/ + +Last access + GLODAP: 20210528 + +Download and processing instructions + All handled by the script (download only if local raw data are missing) + +Modification history + 20210528-lovato_tomas: written +""" + +import logging +import os +import tarfile +from warnings import catch_warnings, filterwarnings + +import iris +import requests +from cf_units import Unit + +from esmvaltool.cmorizers.data.utilities import ( + constant_metadata, + fix_coords, + fix_var_metadata, + save_variable, + set_global_atts, +) + +logger = logging.getLogger(__name__) + + +def _fix_data(cube, var): + """Specific data fixes for different variables.""" + logger.info("Fixing data ...") + with constant_metadata(cube): + if var in [ + 'dissic', + 'talk', + ]: + cube /= 1000. # Convert from umol/kg to mol/m^3 + return cube + + +def collect_files( + in_dir, + var, + cfg, +): + """Compose input file list and download if missing.""" + var_dict = cfg['variables'][var] + + fname = '.'.join([var_dict['file'], var_dict['raw_var'], 'nc']) + in_file = os.path.join(in_dir, fname) + + # check if input file is missing + if not os.path.isfile(in_file): + if not os.path.isdir(in_dir): + os.makedirs(in_dir) + + # check if raw tar file is in place + tar_file = os.path.basename(cfg['attributes']['source']) + tar_file = os.path.join(in_dir, tar_file) + if not os.path.isfile(tar_file): + logger.info('Input file %s is missing\n', tar_file) + logger.info('Start download (requested space ~250Mb)... ') + url_file = requests.get(cfg['attributes']['source']) + open(tar_file, 'wb').write(url_file.content) + + # get input file from tar archive + tar_file = tarfile.open(name=tar_file, mode='r') + tar_base = os.path.basename(cfg['attributes']['source'])[:-7] + member = tar_file.getmember(os.path.join(tar_base, fname)) + member.name = fname + tar_file.extract(member, path=in_dir) + tar_file.close() + + return in_file + + +def extract_variable(in_files, out_dir, attrs, raw_info, cmor_table): + """Extract variables and create OBS dataset.""" + var = raw_info['var'] + var_info = cmor_table.get_variable(raw_info['mip'], var) + rawvar = raw_info['raw_var'] + + with catch_warnings(): + filterwarnings( + action='ignore', + message='Ignoring netCDF variable .* invalid units .*', + category=UserWarning, + module='iris', + ) + cube = iris.load_cube(in_files, rawvar) + depth = iris.load_cube(in_files, 'Depth') + + # add depth coord + cube.add_dim_coord( + iris.coords.DimCoord(depth.data, + var_name='depth', + units='m', + attributes={'positive': 'down'}), 0) + # add time coord + year = raw_info['reference_year'] + time = Unit('months since ' + str(year) + '-01-01 00:00:00', + calendar='gregorian') + cube = iris.util.new_axis(cube) + cube.add_dim_coord( + iris.coords.DimCoord(6., + standard_name='time', + units=time, + bounds=[0., 12.]), 0) + + fix_var_metadata(cube, var_info) + cube = fix_coords(cube) + _fix_data(cube, var) + set_global_atts(cube, attrs) + save_variable(cube, var, out_dir, attrs, unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + # run the cmorization + for var, vals in cfg['variables'].items(): + in_files = collect_files(in_dir, var, cfg) + logger.info("CMORizing var %s from input set %s", var, vals['file']) + raw_info = cfg['variables'][var] + raw_info.update({ + 'var': var, + 'reference_year': cfg['custom']['reference_year'], + }) + glob_attrs['mip'] = vals['mip'] + extract_variable(in_files, out_dir, glob_attrs, raw_info, cmor_table) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/gpcc.py b/esmvaltool/cmorizers/data/formatters/datasets/gpcc.py new file mode 100644 index 0000000000..4ce9bbc71e --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/gpcc.py @@ -0,0 +1,177 @@ +"""ESMValTool CMORizer for GPCC data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://opendata.dwd.de/climate_environment/GPCC/html/fulldata-monthly_v2018_doi_download.html + https://opendata.dwd.de/climate_environment/GPCC/ + full_data_2018/full_data_monthly_v2018_[025 05 10 25].nc.gz +Last access + 20200225 + +Download and processing instructions + Download the following files: + full_data_monthly_{version}.nc.gz + +Two files are generated per version, one with version_grid (i.e. v2018_25), +one with version_grid-numgauge1 (i.e. v2018_25-numgauge1), which is constrained +on holding gridpoint values relying on data from at least one station (i.e. +removing gridpoints solely relying on climatological infilling). +""" + +import copy +import logging +import os +from warnings import catch_warnings, filterwarnings + +import cftime +import iris +import numpy as np +from cf_units import Unit +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _get_centered_timecoord(cube): + """Fix time coordinate. + + Time points start at the beginning of month at 00:00:00. + """ + time = cube.coord('time') + times = time.units.num2date(time.points) + + # get bounds + starts = [cftime.DatetimeNoLeap(c.year, c.month, 1) for c in times] + ends = [ + cftime.DatetimeNoLeap(c.year, c.month + 1, 1) + if c.month < 12 else cftime.DatetimeNoLeap(c.year + 1, 1, 1) + for c in times + ] + time.bounds = time.units.date2num(np.stack([starts, ends], -1)) + + # get points + time.points = [np.mean((t1, t2)) for t1, t2 in time.bounds] + + +def _extract_variable(short_name, var, version, cfg, filepath, out_dir): + """Extract variable.""" + raw_var = var.get('raw', short_name) + with catch_warnings(): + filterwarnings( + action='ignore', + message='Ignoring netCDF variable .* invalid units .*', + category=UserWarning, + module='iris', + ) + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + + # Fix units (mm/month) -> 'kg m-2 month-1' -> 'kg m-2 s-1' + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + cube.units = Unit(var.get('raw_units', short_name)) + cube.convert_units(cmor_info.units) + + # fix calendar type + cube.coord('time').units = Unit(cube.coord('time').units.origin, + calendar=var.get('calendar', short_name)) + cube.coord('time').convert_units( + Unit('days since 1950-1-1 00:00:00', calendar='gregorian')) + + # Fix coordinates + # fix time + _get_centered_timecoord(cube) + + # fix flipped latitude + utils.flip_dim_coord(cube, 'latitude') + utils.fix_dim_coordnames(cube) + cube_coord = cube.coord('latitude') + utils.fix_bounds(cube, cube_coord) + + # fix longitude + cube_coord = cube.coord('longitude') + if cube_coord.points[0] < 0. and \ + cube_coord.points[-1] < 181.: + cube_coord.points = \ + cube_coord.points + 180. + utils.fix_bounds(cube, cube_coord) + cube.attributes['geospatial_lon_min'] = 0. + cube.attributes['geospatial_lon_max'] = 360. + nlon = len(cube_coord.points) + utils.roll_cube_data(cube, nlon // 2, -1) + + # Fix metadata + attrs = cfg['attributes'] + attrs['mip'] = var['mip'] + attrs['version'] = version + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + # build contrainted cube on numgauge < 1 + constraint_var = var.get('constraint', short_name) + with catch_warnings(): + filterwarnings( + action='ignore', + message='Ignoring netCDF variable .* invalid units .*', + category=UserWarning, + module='iris', + ) + constr_cube = iris.load_cube(filepath, + NameConstraint(var_name=constraint_var)) + + # fix flipped latitude + utils.flip_dim_coord(constr_cube, 'latitude') + utils.fix_dim_coordnames(constr_cube) + cube_coord = constr_cube.coord('latitude') + utils.fix_bounds(constr_cube, cube_coord) + + # fix longitude + cube_coord = constr_cube.coord('longitude') + if cube_coord.points[0] < 0. and \ + cube_coord.points[-1] < 181.: + cube_coord.points = \ + cube_coord.points + 180. + utils.fix_bounds(constr_cube, cube_coord) + constr_cube.attributes['geospatial_lon_min'] = 0. + constr_cube.attributes['geospatial_lon_max'] = 360. + nlon = len(cube_coord.points) + utils.roll_cube_data(constr_cube, nlon // 2, -1) + + cube.data = np.ma.masked_where(constr_cube.data < 1., cube.data) + + # Save variable + attrs = copy.deepcopy(cfg['attributes']) + attrs.update({ + 'comment': 'constrained on gridpoint values being based on' + 'at least 1 station', + 'version': attrs['version'] + '-numgauge1' + }) + attrs['mip'] = var['mip'] + + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + raw_filepath = os.path.join(in_dir, cfg['filename']) + + # Run the cmorization + for version in cfg['attributes']['version'].values(): + for (short_name, var) in cfg['variables'].items(): + raw_var = var.get('raw', short_name) + filepath = raw_filepath.format(version=version, raw_name=raw_var) + logger.info("CMORizing variable '%s'", short_name) + _extract_variable(short_name, var, version, cfg, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/gpcp_sg.py b/esmvaltool/cmorizers/data/formatters/datasets/gpcp_sg.py new file mode 100644 index 0000000000..70fbecf663 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/gpcp_sg.py @@ -0,0 +1,107 @@ +"""ESMValTool CMORizer for GPCP-SG data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://psl.noaa.gov/data/gridded/data.gpcp.html + https://downloads.psl.noaa.gov/Datasets/gpcp/precip.mon.mean.nc + +Last access + 20230215 + +Download and processing instructions + Download the file precip.mon.mean.nc + wget https://downloads.psl.noaa.gov/Datasets/gpcp/precip.mon.mean.nc +""" + +import logging +import warnings +from pathlib import Path + +import iris +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _fix_var_metadata(var_info, cmor_info, cube): + """Fix variable metadata.""" + if 'raw_units' in var_info: + cube.units = var_info['raw_units'] + + if cube.units == 'mm/day': + cube.units = 'kg m-2 day-1' + + cube.convert_units(cmor_info.units) + + utils.fix_var_metadata(cube, cmor_info) + return cube + + +def _fix_coords(cube, filepath): + """Fix coordinates.""" + utils.fix_dim_coordnames(cube) + + # Bounds + + # Time + time_bnds = iris.load_cube(filepath, NameConstraint(var_name='time_bnds')) + cube.coord('time').bounds = time_bnds.core_data() + # Latitude + lat_bnds = iris.load_cube(filepath, NameConstraint(var_name='lat_bnds')) + cube.coord('latitude').bounds = lat_bnds.core_data() + # Longitude + lon_bnds = iris.load_cube(filepath, NameConstraint(var_name='lon_bnds')) + cube.coord('longitude').bounds = lon_bnds.core_data() + + +def _extract_variable(var_info, cmor_info, attrs, filepath, out_dir): + """Extract variable.""" + var = cmor_info.short_name + raw_var = var_info.get('raw_name', var) + + # Load data + with warnings.catch_warnings(): + warnings.filterwarnings( + action='ignore', + message="Skipping global attribute 'units': 'units' is not a " + "permitted attribute", + category=UserWarning, + module='iris', + ) + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + + # Fix variable metadata + cube = _fix_var_metadata(var_info, cmor_info, cube) + + # Fix coordinates + _fix_coords(cube, filepath) + + # Fix global metadata + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable( + cube, + var, + out_dir, + attrs, + unlimited_dimensions=['time'], + ) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + # Run the cmorization + for (var, var_info) in cfg['variables'].items(): + filepath = Path(in_dir) / var_info['filename'] + logger.info("CMORizing variable '%s' from file %s", var, filepath) + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + _extract_variable(var_info, cmor_info, glob_attrs, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/grace.py b/esmvaltool/cmorizers/data/formatters/datasets/grace.py new file mode 100644 index 0000000000..c287cc7cf3 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/grace.py @@ -0,0 +1,189 @@ +"""ESMValTool CMORizer for GRACE. + +Tier + Tier 3 +Source + https://podaac.jpl.nasa.gov/dataset/TELLUS_GRAC-GRFO_MASCON_CRI_GRID_RL06_V2 +Last access + 20211019 + +Download and processing instructions + - Go to the above link + - Click the tab "Data Access" + - Log in with Earthdata account + - Download the following files: + - CLM4.SCALE_FACTOR.JPL.MSCNv02CRI.nc + - GRCTellus.JPL.200204_202108.GLO.RL06M.MSCNv02CRI.nc + - LAND_MASK.CRI.nc + - Download the grace months table which holds important information + on data coverage. Save it in the RAWOBSDIR. + https://podaac-tools.jpl.nasa.gov/drive/files/allData/gracefo/docs/GRACE_GRACE-FO_Months_RL06.csv + - Manually inspect and check the months table + + +Modification history + 20200630-crezee_bas: written. + 20201127-kazeroni_remi: updated for latest dataset + 20211019-kazeroni_remi: updated for extended dataset +""" + +import logging +import os +from copy import deepcopy +from datetime import datetime + +import iris +import numpy as np +import pandas as pd +import xarray as xr +from cf_units import Unit +from dateutil import relativedelta +from esmvalcore.preprocessor import regrid_time +from iris import NameConstraint + +from ... import utilities as utils + +logger = logging.getLogger(__name__) + + +def _make_monthly_data_contiguous(in_file, out_file, cfg): + + original = xr.open_dataset(in_file)[cfg['variables']['lweGrace']['raw']] + + months_table_file = os.path.join(cfg['in_dir'], cfg['grace_table']) + # Read CSV file if available + if os.path.isfile(months_table_file): + grace_months_table = pd.read_csv(months_table_file) + else: + logger.error("CSV file %s does not exist", months_table_file) + # Construct the time axis + time_axis = [] + # read the first and last years and months from the csv table + time_grace = [[], []] # [start time], [end time] + time_grace[0].append(grace_months_table['YEAR'].iloc[0]) + time_grace[1].append(grace_months_table['YEAR'].iloc[-1]) + time_grace[0].append( + datetime.strptime(grace_months_table['MONTH'].iloc[0], '%b').month) + time_grace[1].append( + datetime.strptime(grace_months_table['MONTH'].iloc[-1], '%b').month) + time_grace[0].append(15) + time_grace[1].append(15) + start_date = datetime(*time_grace[0]) + end_date = datetime(*time_grace[1]) + while start_date <= end_date: + time_axis.append(start_date) + start_date += relativedelta.relativedelta(months=1) + + # Initialize data array with nan + data = np.ones((len(time_axis), ) + original.shape[1:]) + data[:] = np.nan + + # Now fill the array with grace data + for nmonth, recindex in enumerate( + grace_months_table['GRACE/GRACE-FO record index']): + if not np.isnan(recindex): + data[nmonth, :, :] = original[int(recindex - 1), :, :].data + data_array = xr.DataArray(data, + coords={ + 'time': time_axis, + 'lat': original.lat, + 'lon': original.lon + }, + dims=['time', 'lat', 'lon']) + + dataset = data_array.to_dataset(name=cfg['variables']['lweGrace']['raw']) + dataset.to_netcdf(out_file) + + +def _apply_gain_and_land_sea_mask(in_file, out_file, cfg): + + gain_file = os.path.join(cfg['in_dir'], cfg['auxfiles']['scale_factor']) + lsm_file = os.path.join(cfg['in_dir'], cfg['auxfiles']['land_mask']) + + gain = xr.open_dataset(gain_file) + lsm = xr.open_dataset(lsm_file) + data = xr.open_dataset(in_file) + + data = data['lwe_thickness'] + gain = gain['scale_factor'] + lsm = lsm['land_mask'] + data = gain * data + data = data.transpose('time', 'lat', 'lon') + data = data.where(lsm) + + # Specify that unit is cm here (will be converted later) + data.attrs['units'] = 'cm' + data = data.to_dataset(name='lwe_thickness') + data.to_netcdf(out_file) + + +def _cmorize_dataset(in_file, var, cfg, out_dir): + logger.info("CMORizing variable '%s' from input file '%s'", + var['short_name'], in_file) + attributes = deepcopy(cfg['attributes']) + attributes['mip'] = var['mip'] + + cmor_table = cfg['cmor_table'] + definition = cmor_table.get_variable(var['mip'], var['short_name']) + + cube = iris.load_cube(str(in_file), + constraint=NameConstraint(var_name=var['raw'])) + + # Set correct names + cube.var_name = definition.short_name + if definition.standard_name: + cube.standard_name = definition.standard_name + + cube.long_name = definition.long_name + + # Convert units if required + cube.convert_units(definition.units) + + # Set global attributes + utils.set_global_atts(cube, attributes) + + # Setting time right + cube = regrid_time(cube, 'mon') + + # Set calendar to gregorian instead of proleptic gregorian + # matplotlib does not correctly format years in proleptic gregorian + old_unit = cube.coord('time').units + new_unit = Unit(old_unit.origin, calendar='gregorian') + cube.coord('time').units = new_unit + + logger.info("Saving CMORized cube for variable %s", cube.var_name) + utils.save_variable(cube, cube.var_name, out_dir, attributes) + + return in_file + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cfg['work_dir'] = cfg_user.work_dir + # Pass on some parameters to cfg file + cfg['rawobsdir'] = cfg_user['rootpath']['RAWOBS'][0] + cfg['in_dir'] = in_dir + # If it doesn't exist, create it + if not os.path.isdir(cfg['work_dir']): + logger.info("Creating working directory for resampling: %s", + cfg['work_dir']) + os.mkdir(cfg['work_dir']) + + # run the cmorization + for short_name, var in cfg['variables'].items(): + var['short_name'] = short_name + logger.info("Processing var %s", short_name) + in_file = os.path.join(in_dir, var['file']) + logger.info("Structure monthly data") + out_file = os.path.join(cfg['work_dir'], + 'grace_monthly_data_contiguous.nc') + _make_monthly_data_contiguous(in_file, out_file, cfg) + in_file = out_file + out_file = os.path.join( + cfg['work_dir'], + 'grace_monthly_data_contiguous_gain_lsm_applied.nc') + _apply_gain_and_land_sea_mask(in_file, out_file, cfg) + in_file = out_file + logger.info("Start CMORization of file %s", in_file) + _cmorize_dataset(in_file, var, cfg, out_dir) + logger.info("Finished regridding and CMORizing.") diff --git a/esmvaltool/cmorizers/data/formatters/datasets/hadcrut3.ncl b/esmvaltool/cmorizers/data/formatters/datasets/hadcrut3.ncl new file mode 100644 index 0000000000..de5f2439f3 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/hadcrut3.ncl @@ -0,0 +1,126 @@ +; ############################################################################# +; ESMValTool CMORizer for HadCRUT3 data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; http://www.metoffice.gov.uk/hadobs/hadcrut3/data/download.html +; +; Last access +; 20190221 +; +; Download and processing instructions +; Download the HadCRUT3v.nc file (variance adjusted dataset). +; +; Caveats +; The HadCRUT3v variance-adjusted dataset for tas is actually the anomaly +; with respect to the period 1958-2001. +; +; Modification history +; 20190221-righi_mattia: adapted to v2 and renamed to HadCRUT3. +; 20150330-righi_mattia: updated paths and global attributes. +; 20140311-senftleben_daniel: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "hadcrut3.ncl" + + ; Source name + OBSNAME = "HadCRUT3" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = get_year(start_year, 1850) + YEAR2 = get_year(end_year, 2013) + + ; Selected variable (standard name) + VAR = "tasa" + + ; Name in the raw data + NAME = "temp" + + ; MIP + MIP = "Amon" + + ; Frequency + FREQ = "mon" + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + "/custom/CMOR_tasa.dat" + + ; Type + TYPE = "ground" + + ; Version + VERSION = "1" + + ; Global attributes + SOURCE = "http://www.metoffice.gov.uk/hadobs/hadcrut3/data/download.html" + REF = "Brohan et al., J. Geophys. Res., doi:10.1029/2005JD006548, 2006" + COMMENT = "Temperature anomaly with respect to the period 1958-2001" + +end + +begin + + ; Read file + fname = input_dir_path + "HadCRUT3v.nc" + f = addfile(fname, "r") + + ; Read variable + output = f->temp + + ; Delete level coordinate (dimension 1) + tmp = rm_single_dims(output) + delete(output) + output = tmp + delete(tmp) + + ; Extract time period + output!0 = "time" + date = cd_calendar(output&time, 0) + idx = ind(date(:, 0).ge.YEAR1 .and. date(:, 0).le.YEAR2) + output := output(idx, :, :) + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ) + + ; Set variable attributes + tmp = format_variable(output, VAR, CMOR_TABLE) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP, VAR, DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR, output, bounds, gAtt) + w = addfile(fout, "w") + delete(w) + delete(gAtt) + delete(output) + delete(bounds) + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/hadcrut4.ncl b/esmvaltool/cmorizers/data/formatters/datasets/hadcrut4.ncl new file mode 100644 index 0000000000..bda11385e5 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/hadcrut4.ncl @@ -0,0 +1,267 @@ +; ############################################################################# +; ESMValTool CMORizer for HadCRUT4 data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; https://crudata.uea.ac.uk/cru/data/temperature/ +; +; Last access +; 20201125 +; +; Download and processing instructions +; Download the dataset "HadCRUT4" (median temperature anomalies) and +; the dataset "Absolute" (absolute temperatures for the base period +; 1961-90 on a 5x5 grid). +; For the 5% to 95% confidence interval of the combined effects of all the +; uncertainties described in the HadCRUT4 error model (measurement and +; sampling, bias, and coverage uncertainties) download: +; HadCRUT.4.6.0.0.annual_ns_avg.txt from +; https://www.metoffice.gov.uk/hadobs/hadcrut4/data/current/download.html +; +; Caveats +; In contrast to the HadCRUT3 reformat script which produces temperature +; anomalies (relative to the 1961-90 climatology), this script calculates +; absolute tas by adding the climatology ("absolute.nc") to the anomalies +; ("HadCRUT.4.6.0.0.median.nc"). It creates 2 output, one with the +; temperature time-series and one with the anomaly time-series +; +; Modification history +; 20201125-bock_lisa: add tas uncertainty +; 20190916-righi_mattia: remove no-longer used climatology output. +; 20190229-righi_mattia: added output for anomaly (tasa). +; 20190208-righi_mattia: added output for climatology and adapted to v2. +; 20180222-lauer_axel: bug fix (added swapping of latitudes if needed). +; 20160203-lauer_axel: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "hadcrut4.ncl" + + ; Source name + OBSNAME = "HadCRUT4" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = get_year(start_year, 1850) + YEAR2 = get_year(end_year, 2018) + + ; Selected variable (standard name) + VAR = (/"tas", "tasa", "tasConf5", "tasConf95"/) + + ; MIP + MIP = "Amon" + + ; Frequency + FREQ = "mon" + + ; CMOR table + CMOR_TABLE1 = getenv("cmor_tables") + \ + "/cmip5/Tables/CMIP5_Amon" + CMOR_TABLE2 = getenv("cmor_tables") + \ + "/custom/CMOR_tasa.dat" + CMOR_TABLE3 = getenv("cmor_tables") + \ + "/custom/CMOR_tasConf5.dat" + CMOR_TABLE4 = getenv("cmor_tables") + \ + "/custom/CMOR_tasConf95.dat" + + ; Version + VERSION = "1" + + ; Type + TYPE1 = "ground" + TYPE2 = "ground" + + ; Global attributes + SOURCE = "https://crudata.uea.ac.uk/cru/data/temperature/" + REF1 = "Morice et al., J. Geophys. Res., doi:10.1029/2011JD017187, 2012" + REF2 = "Morice et al., J. Geophys. Res., doi:10.1029/2011JD017187, 2012" + COMMENT1 = "Temperature time-series calculated from the anomaly " + \ + "time-series by adding the temperature climatology for 1961-1990" + COMMENT2 = "Temperature anomaly with respect to the period 1961-1990" + COMMENT3 = "Yearly uncertainty of temperature time-series" + +end + +begin + + ; Read file + fname1 = input_dir_path + "HadCRUT.4.6.0.0.median.nc" + fname2 = input_dir_path + "absolute.nc" + f1 = addfile(fname1, "r") + setfileoption("nc", "MissingToFillValue", False) + f2 = addfile(fname2, "r") + + ; Read anomaly + anomaly = f1->temperature_anomaly + + ; Read absolute temperature + tmp = f2->tem + clim = tofloat(tmp * tmp@scale_factor) + 273.15 + copy_VarCoords(tmp, clim) + delete(tmp) + + ; Swap latitudes + if (isMonotonic(anomaly&latitude).eq.-1) then + anomaly = anomaly(:, ::-1, :) + end if + + if (isMonotonic(clim&lat).eq.-1) then + clim = clim(:, ::-1, :) + end if + + log_info(" Climatology range: " + min(clim) + \ + " K to " + max(clim) + " K") + log_info(" Anomaly range: " + min(anomaly) + \ + " K to " + max(anomaly) + " K") + + output1 = anomaly + output2 = anomaly + dims = dimsizes(output1) + + ; Add absolute temperature to anomaly + do yr = 0, dims(0) / 12 - 1 + m1 = yr * 12 + m2 = m1 + 11 + output1(m1:m2, :, :) = where(.not.ismissing(clim), \ + anomaly(m1:m2, :, :) + clim, \ + tofloat(anomaly@_FillValue)) + end do + + ; Format coordinates + output1!0 = "time" + output1!1 = "lat" + output1!2 = "lon" + format_coords(output1, YEAR1 + "0101", YEAR2 + "1231", FREQ) + output2!0 = "time" + output2!1 = "lat" + output2!2 = "lon" + format_coords(output2, YEAR1 + "0101", YEAR2 + "1231", FREQ) + + ; Calculate coordinate bounds + bounds1 = guess_coord_bounds(output1, FREQ) + bounds2 = guess_coord_bounds(output2, FREQ) + + ; Set variable attributes + tmp = format_variable(output1, VAR(0), CMOR_TABLE1) + delete(output1) + output1 = tmp + delete(tmp) + tmp = format_variable(output2, VAR(1), CMOR_TABLE2) + delete(output2) + output2 = tmp + delete(tmp) + + ; Add height coordinate + output1@coordinates = "height" + height = 2.d + height!0 = "ncl_scalar" + height@units = "m" + height@axis = "Z" + height@positive = "up" + height@long_name = "height" + height@standard_name = "height" + + ; Set global attributes + gAtt1 = set_global_atts(OBSNAME, TIER, SOURCE, REF1, COMMENT1) + gAtt2 = set_global_atts(OBSNAME, TIER, SOURCE, REF2, COMMENT2) + + ; Write temperature time-series + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE1, VERSION, \ + MIP, VAR(0), DATESTR/), "_") + ".nc" + write_nc(fout, VAR(0), output1, bounds1, gAtt1) + w = addfile(fout, "w") + w->height = height + delete(w) + delete(gAtt1) + delete(bounds1) + delete(output1) + + ; Write temperature anomaly time-series + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE2, VERSION, \ + MIP, VAR(1), DATESTR/), "_") + ".nc" + write_nc(fout, VAR(1), output2, bounds2, gAtt2) + w = addfile(fout, "w") + delete(w) + delete(gAtt2) + delete(bounds2) + delete(output2) + + ; ------------------------------------- + ; Uncertainties + ; ------------------------------------- + + ; Read uncertainty file + fname = input_dir_path + "HadCRUT.4.6.0.0.annual_ns_avg.txt" + + ntime = YEAR2 - YEAR1 + 1 + tmp = asciiread(fname, (/ntime, 12/), "float") + + ntime2 = 12 * (YEAR2 - YEAR1 + 1) + data1 = new(ntime2, float) + data2 = new(ntime2, float) + + i = 0 + do yy = 0, ntime - 1 + data1((yy * 12) : (yy * 12 + 11)) = tmp(yy, 1) - tmp(yy, 10) + data2((yy * 12) : (yy * 12 + 11)) = tmp(yy, 11) - tmp(yy, 1) + end do + + delete(tmp) + + data1!0 = "time" + data1&time = create_timec(YEAR1, YEAR2) + format_coords(data1, YEAR1 + "0101", YEAR2 + "1231", FREQ) + data2!0 = "time" + data2&time = data1&time + + ; Set variable attributes + tmp = format_variable(data1, VAR(2), CMOR_TABLE3) + delete(data1) + data1 = tmp + delete(tmp) + tmp = format_variable(data2, VAR(3), CMOR_TABLE4) + delete(data2) + data2 = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(data1, FREQ) + bounds = guess_coord_bounds(data2, FREQ) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF1, COMMENT3) + + ; Write temperature time-series uncertainties + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE1, VERSION, \ + MIP, VAR(2), DATESTR/), "_") + ".nc" + write_nc(fout, VAR(2), data1, bounds, gAtt) + w = addfile(fout, "w") + delete(w) + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE1, VERSION, \ + MIP, VAR(3), DATESTR/), "_") + ".nc" + write_nc(fout, VAR(3), data2, bounds, gAtt) + w = addfile(fout, "w") + delete(w) + delete(gAtt) + delete(bounds) + delete(data1) + delete(data2) + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/hadcrut5.py b/esmvaltool/cmorizers/data/formatters/datasets/hadcrut5.py new file mode 100644 index 0000000000..b0d01ee4fa --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/hadcrut5.py @@ -0,0 +1,113 @@ +"""ESMValTool CMORizer for HadCRUT5 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://crudata.uea.ac.uk/cru/data/temperature + +Last access + 20220328 + +Download and processing instructions + Download the following files: + infilling + [Source]/HadCRUT.5.0.1.0.analysis.anomalies.ensemble_mean.nc + no-infilling + [Source]/HadCRUT.5.0.1.0.anomalies.ensemble_mean.nc + climatology + [Source]/absolute_v5.nc +""" + +import copy +import logging +import os + +import iris +import numpy as np +from cf_units import Unit +from iris import NameConstraint + +from ... import utilities as utils + +logger = logging.getLogger(__name__) + + +def _extract_variable(short_name, var, version, filename, cfg, in_dir, + out_dir): + """Extract variable.""" + # load data + filepath = os.path.join(in_dir, filename) + raw_var = var.get('raw', short_name) + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + + if short_name == 'tas': + # load climatology + filepath_clim = os.path.join(in_dir, cfg['climatology']['filename']) + raw_var = var.get('raw_clim', short_name) + clim_cube = iris.load_cube(filepath_clim, + NameConstraint(var_name=raw_var)) + + # fix units + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + for cub in [cube, clim_cube]: + if cub.units != cmor_info.units: + cub.convert_units(cmor_info.units) + + # derive absolute temperatures + clim_data = clim_cube.data + clim_data = np.tile(clim_data, [cube.shape[0] // 12, 1, 1]) + if cube.shape[0] % 12 != 0: + for i in range(cube.shape[0] % 12): + clim_data = np.vstack([clim_data, clim_data[i:i + 1]]) + + cube.data = cube.data + clim_data + + if short_name == 'tasa': + # fix units + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + if cube.units != cmor_info.units: + cube.convert_units(cmor_info.units) + + # fix time units + cube.coord('time').convert_units( + Unit('days since 1950-1-1 00:00:00', calendar='gregorian')) + + # Fix coordinates + utils.fix_dim_coordnames(cube) + cube_coord = cube.coord('longitude') + if cube_coord.points[0] < 0. and \ + cube_coord.points[-1] < 181.: + cube_coord.points = \ + cube_coord.points + 180. + utils.fix_bounds(cube, cube_coord) + cube.attributes['geospatial_lon_min'] = 0. + cube.attributes['geospatial_lon_max'] = 360. + nlon = len(cube_coord.points) + utils.roll_cube_data(cube, nlon // 2, -1) + if 'height2m' in cmor_info.dimensions: + utils.add_height2m(cube) + + # Fix metadata and update version information + attrs = copy.deepcopy(cfg['attributes']) + attrs['mip'] = var['mip'] + attrs['version'] += '-' + version + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + for (version, filename) in cfg['filenames'].items(): + logger.info("CMORizing variable '%s' '%s'", short_name, version) + _extract_variable(short_name, var, version, filename, cfg, in_dir, + out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/hadisst.ncl b/esmvaltool/cmorizers/data/formatters/datasets/hadisst.ncl new file mode 100644 index 0000000000..579b74b2b7 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/hadisst.ncl @@ -0,0 +1,135 @@ +; ############################################################################# +; ESMValTool CMORizer for HadISST data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; http://www.metoffice.gov.uk/hadobs/hadisst/data/download.html +; +; Last access +; 20190208 +; +; Download and processing instructions +; Download and unzip "HadISST_ice.nc.gz" and "HadISST_sst.nc.gz". +; +; Modification history +; 20190208-hassler_birgit: adapted to v2. +; 20180530-righi_mattia: fixed coordinates and metadata. +; 20170217-senftleben_daniel: modified to write SST fields as 'tos'. +; 20150422-lauer_axel: written. +; +; ############################################################################ +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "hadisst.ncl" + + ; Source name + OBSNAME = "HadISST" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = get_year(start_year, 1870) + YEAR2 = get_year(end_year, 2021) + + ; Selected variable (standard name) + VAR = (/"ts", "tos", "sic"/) + + ; Name in the raw data + NAME = (/"sst", "sst", "ice"/) + + ; MIP + MIP = (/"Amon", "Omon", "OImon"/) + + ; Frequency + FREQ = (/"mon", "mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + "/cmip5/Tables/CMIP5_" + MIP + + ; Type + TYPE = "reanaly" + + ; Version + VERSION = "1" + + ; Global attributes + SOURCE = "http://www.metoffice.gov.uk/hadobs/hadisst/data/download.html" + REF = "Rayner et al., J. Geophys. Res., doi:10.1029/2002JD002670, 2013" + COMMENT = "" + +end + +begin + + ; Loop over variables + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + ; Read variables + fname = input_dir_path + "HadISST_" + NAME(vv) + ".nc" + f = addfile(fname, "r") + if (NAME(vv).eq."sst") then + output = f->sst + output@_FillValue = -1000. + end if + if (NAME(vv).eq."ice") then + output = f->sic + end if + + ; Convert units + if (isStrSubset(NAME(vv), "sst")) then + output = output + 273.15 ; [degC] --> [K] + output@units = "K" + end if + if (isStrSubset(NAME(vv), "ice")) then + output = output * 100. ; [1] --> [%] + output@units = "%" + end if + + ; Extract time period + date = cd_calendar(output&time, 0) + idx = ind(date(:, 0).ge.YEAR1 .and. date(:, 0).le.YEAR2) + output := output(idx, :, :) + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/haloe.ncl b/esmvaltool/cmorizers/data/formatters/datasets/haloe.ncl new file mode 100644 index 0000000000..cc0a46aa6c --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/haloe.ncl @@ -0,0 +1,163 @@ +; ############################################################################# +; ESMValTool CMORizer for HALOE data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; Grooss, J.-U. and Russell III, J. M., Atmos. Chem. Phys., 5, 2797-2807, +; doi:10.5194/acp-5-2797-2005, 2005. +; +; Last access +; 20200311 +; +; Download and processing instructions +; Download and untar the supplementary material of the above reference. +; +; Caveats +; The time period includes incomplete years. According to the given +; reference (Section 3): "Between October 1991 and August 2002, a total of +; 78600 HALOE profiles of O3, CH4, H2O, HCl, and HF mixing ratio were then +; combined into bins of 5 degree equivalent latitude for 22 pressure levels +; between 316 and 0.1 hPa and 12 months". +; In order to create T3M fields a fake longitude coordinate is added. +; +; Modification history +; 20200311-righi_mattia: ported from v1. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "haloe.ncl" + + ; Source name + OBSNAME = "HALOE" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = get_year(start_year, 1991) + YEAR2 = get_year(end_year, 2002) + + ; Selected variable (standard name) + VAR = (/"hus", "tro3"/) + + ; Name in the raw data + NAME = (/"H2O", "O3"/) + + ; MIP + MIP = (/"Amon", "Amon"/) + + ; Frequency + FREQ = (/"mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + \ + (/"/cmip5/Tables/CMIP5_Amon", "/cmip5/Tables/CMIP5_Amon"/) + + ; Type + TYPE = "sat" + + ; Version + VERSION = "1" + + ; Global attributes + SOURCE = "https://www.atmos-chem-phys.net/5/2797/2005/" + \ + "acp-5-2797-2005-supplement.tar" + REF = " Grooss and Russell, Atmos. Chem. Phys., " + \ + "doi:10.5194/acp-5-2797-2005, 2005." + COMMENT = "" + +end + +begin + + ; Open input file + f = addfile(input_dir_path + "haloe_eqlat_monthly.nc", "r") + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + time = create_timec(YEAR1, YEAR2) + date = cd_calendar(time, 1) + + xx = f->$NAME(vv)$ + + ; hus: convert mixing ratio to specific humidity + if (VAR(vv).eq."hus") then + xx = xx * 18.015 / 28.97 ; [m3/m3] --> [kg/kg] + xx = xx / (1. + xx) + end if + + ; tro3: convert mixing ratio to ppbv + if (VAR(vv).eq."tro3") then + xx = xx * 1.e9 ; [m3/m3] --> [1.e-9] (i.e., [nmol/mol]) + end if + + ; Define output array + tmp = new((/dimsizes(time), dimsizes(xx&press), dimsizes(xx&eqlat)/), \ + float) + tmp!0 = "time" + tmp&time = time + + ; Extract indexes for valid time range (see caveats) + idx1 = ind(cd_calendar(time, -1).eq.199110) + idx2 = ind(cd_calendar(time, -1).eq.200208) + tmp(idx1:idx2, :, :) = (/xx/) + + ; Create a fake longitude coordinate + nlon = 50 + dims = array_append_record(dimsizes(tmp), nlon, 0) + output = new(dims, float) + output!0 = "time" + output!1 = "plev" + output!2 = "lat" + output!3 = "lon" + output&time = time + output&plev = 100 * f->press ; [hPa] --> [Pa] + output&lat = f->eqlat + output&lon = lonGlobeF(nlon, "lon", "latitude", "degrees_East") + do ii = 0, dimsizes(output&lon) - 1 + output(:, :, :, ii) = (/tmp/) + end do + copy_VarAtts(xx, output) + delete(tmp) + delete(xx) + + ; Format coordinates + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/hwsd.py b/esmvaltool/cmorizers/data/formatters/datasets/hwsd.py new file mode 100644 index 0000000000..68c894f39b --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/hwsd.py @@ -0,0 +1,83 @@ +"""ESMValTool CMORizer for HWSD data. + +Tier + Tier 3: restricted dataset. + +Source + https://daac.ornl.gov/cgi-bin/dsviewer.pl?ds_id=1247 + +Last access + 20191015 + +Download and processing instructions + Download the following file: + HWSD_SOIL_CLM_RES.nc4 + A registration is required for downloading the data. +""" + +import logging +import os + +import iris +from cf_units import Unit +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _extract_variable(short_name, var, cfg, filepath, out_dir): + """Extract variable.""" + raw_var = var.get('raw', short_name) + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + + # Sum over levels + if short_name in ('cSoil', ): + level_coord = iris.coords.DimCoord([0, 1], long_name='level') + cube.add_dim_coord(level_coord, 0) + cube = cube.collapsed('level', iris.analysis.SUM) + + # Fix coordinates + if var['mip'] != 'fx': + cube = iris.util.new_axis(cube) + time_dim = iris.coords.DimCoord( + [183.0], + bounds=[0.0, 366.0], + units=Unit('days since 2000-01-01 00:00:00'), + standard_name='time', + var_name='time', + long_name='time') + cube.add_dim_coord(time_dim, 0) + utils.convert_timeunits(cube, 1950) + cube = utils.fix_coords(cube) + + # Fix units + if 'kg C' in cube.units.origin: + cube.units = Unit(cube.units.origin.replace('C', '')) + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + cube.convert_units(cmor_info.units) + + # Fix metadata + attrs = cfg['attributes'] + attrs['mip'] = var['mip'] + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + filepath = os.path.join(in_dir, cfg['filename']) + logger.info("Reading file '%s'", filepath) + + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", short_name) + _extract_variable(short_name, var, cfg, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/isccp_fh.ncl b/esmvaltool/cmorizers/data/formatters/datasets/isccp_fh.ncl new file mode 100644 index 0000000000..09b2fb02fe --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/isccp_fh.ncl @@ -0,0 +1,180 @@ +; ############################################################################# +; ESMValTool CMORizer for ISCCP-FH data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; https://isccp.giss.nasa.gov/pub/flux-fh/tar-nc4_MPF/ +; +; Last access +; 20191107 +; +; Download and processing instructions +; 1) Go to https://isccp.giss.nasa.gov/projects/flux.html and click on +; "FH On-line Data" +; 2) username and password are provided on this website +; 3) go to directory tar-nc4_MPF/ (= monthly files) +; 3) download and unpack files +; +; Modification history +; 20191108-lauer_axel: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") +begin + + ; Script name (for logger) + DIAG_SCRIPT = "isccp_fh.ncl" + + ; Source name + OBSNAME = "ISCCP-FH" + + ; Tier + TIER = 2 + + ; Period (only complete years) + YEAR1 = get_year(start_year, 1984) + YEAR2 = get_year(end_year, 2016) + + ; Selected variables (standard names) + VAR = (/"rlut", "rlutcs", "rsut", "rsutcs", "rsus", \ + "rsds", "rlus", "rlds", \ + "rsdt", "ps", "ts", "tas", "prw", "alb"/) + + ; Name in the raw data + NAME = (/"txu5fl", "tru5cr", "sxu5fl", "sru5cr", "sxu1fl", \ + "sxd1fl", "txu1fl", "txd1fl", \ + "sxd5fl", "ps____", "ts____", "ta____", "mnpwfl", "al_srf"/) + + ; MIP + MIP = new(dimsizes(VAR), string) + MIP = "Amon" + + ; Frequency + FREQ = new(dimsizes(VAR), string) + FREQ = "mon" + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + \ + (/"/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/custom/CMOR_alb.dat"/) + + ; Conversion factor + CONV = (/1., 1., 1., 1., 1., 1., 1., 1., 1., 100., 1., 1., 10., 1./) + + ; Type + TYPE = "sat" + + ; Version + VERSION = "v0" + + ; Global attributes + SOURCE = "https://isccp.giss.nasa.gov/projects/flux.html" + REF = "Zhang et al., Study of Cloud and Water Processes in Weather and " \ + + "Climate through Satellite Observations (submitted)" + COMMENT = "" + +end + +begin + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + time = create_timec(YEAR1, YEAR2) + setfileoption("nc", "MissingToFillValue", False) + + do yr = YEAR1, YEAR2 + do mo = 1, 12 + + fname = input_dir_path + "ISCCP-FH.MPF.v.0.0.GLOBAL." + yr + "." \ + + sprinti("%0.2i", mo) + ".nc" + setfileoption("nc", "MissingToFillValue", False) + f = addfile(fname, "r") + input = f->$NAME(vv)$ + + if (.not.isdefined("output")) then + lat = (/f->latitude/) + lon = (/f->longitude/) + tmp = dimsizes(input) + dims = (/dimsizes(time), tmp(0), tmp(1)/) + output = new(dims, float) + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + output&time = time + output&lat = lat + output&lon = lon + delete(tmp) + delete(dims) + end if + + timeidx = (yr - YEAR1) * 12 + mo - 1 + + ; apply scaling + if (isatt(input, "scale_factor")) then + output(timeidx, :, :) = tofloat(input) / tofloat(input@scale_factor) + else + output(timeidx, :, :) = tofloat(input) + end if + + ; unit conversion + output(timeidx, :, :) = output(timeidx, :, :) * CONV(vv) + + ; set fill value + if (isatt(input, "missing_value")) then + output(timeidx, :, :) = where(input.eq.input@missing_value, \ + output@_FillValue, \ + output(timeidx, :, :)) + end if + + end do + end do + + delete(input) + + ; format time coordinate + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/jma_transcom.py b/esmvaltool/cmorizers/data/formatters/datasets/jma_transcom.py new file mode 100644 index 0000000000..bd41512294 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/jma_transcom.py @@ -0,0 +1,181 @@ +"""ESMValTool CMORizer for JMA-TRANSCOM data. + +Tier + Tier 3: restricted dataset. + +Source + http://www.globalcarbonatlas.org/en/content/atmospheric-inversions + +Last access + 20190702 + +Download and processing instructions + To obtain the data sets it is necessary to contact Takashi Maki + (Department of Atmosphere, Ocean and Earth System Modeling Research, + Meteorological Research Institute, Tsukuba City, Japan). See link above + for more information. +""" + +import logging +import os +import shutil +import tarfile +from datetime import datetime, timedelta + +import iris +import iris.coord_categorisation +import numpy as np +from cf_units import Unit +from esmvalcore.preprocessor import mask_landsea + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _clean(file_dir): + """Remove unzipped input files.""" + if os.path.isdir(file_dir): + shutil.rmtree(file_dir) + logger.info("Removed cached directory %s", file_dir) + + +def _extract_variable(cmor_info, attrs, in_dir, out_dir, ctl): + """Extract variable.""" + filepath = os.path.join(in_dir, ctl['binary_prefix'] + '.dat') + raw_data = np.fromfile(filepath, ctl['dtype'], + ctl['t_size'] * ctl['y_size'] * + ctl['x_size']).reshape(ctl['t_size'], ctl['y_size'], + ctl['x_size']) + + # Get coordinates + coords = _get_coords(ctl) + + # Build cube + cube = iris.cube.Cube(raw_data, dim_coords_and_dims=coords) + + # Mask appropriate parts + if cmor_info.short_name == 'nbp': + cube = mask_landsea(cube, 'sea') + elif cmor_info.short_name == 'fgco2': + cube = mask_landsea(cube, 'land') + else: + raise NotImplementedError( + f"CMORizer for '{cmor_info.short_name}' not implemented yet") + + # Fix metadata + utils.fix_var_metadata(cube, cmor_info) + utils.convert_timeunits(cube, 1950) + cube = utils.fix_coords(cube) + utils.set_global_atts(cube, attrs) + utils.save_variable(cube, + cmor_info.short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def _add_months(date, delta): + """Add months to a date.""" + add_years = delta // 12 + add_months = delta % 12 + return date.replace(year=date.year + add_years, + month=date.month + add_months) + + +def _get_coords(ctl): + """Get correct coordinates for cube.""" + # Time + time_units = Unit('days since 1950-1-1 00:00:00', calendar='standard') + time_start = datetime.strptime(ctl['t_start'], '%d%b%Y') + times = [ + _add_months(time_start, d) + timedelta(days=14) + for d in int(ctl['t_delta'][0]) * np.arange(ctl['t_size']) + ] + times = [time_units.date2num(time) for time in times] + time_coord = iris.coords.DimCoord(times, + standard_name='time', + long_name='time', + var_name='time', + units=time_units) + + # Latitude + lats = float( + ctl['y_start']) + (float(ctl['y_delta']) * np.arange(ctl['y_size'])) + lat_coord = iris.coords.DimCoord(lats, + standard_name='latitude', + long_name='latitude', + var_name='lat', + units='degrees_north') + + # Longitude + lons = float( + ctl['x_start']) + (float(ctl['x_delta']) * np.arange(ctl['x_size'])) + lon_coord = iris.coords.DimCoord(lons, + standard_name='longitude', + long_name='longitude', + var_name='lon', + units='degrees_east') + + return [(time_coord, 0), (lat_coord, 1), (lon_coord, 2)] + + +def _extract_tar(filepath, out_dir): + """Extract `*.tar.gz` file.""" + logger.info("Starting extraction of %s to %s", filepath, out_dir) + with tarfile.open(filepath) as tar: + tar.extractall() + new_path = os.path.join(out_dir, 'JMA_2018') + logger.info("Successfully extracted files to %s", new_path) + return new_path + + +def _read_control_file(file_dir, cfg): + """Read '*.ctl' file.""" + ctl_path = os.path.join(file_dir, cfg['binary_prefix'] + '.ctl') + with open(ctl_path, mode='r') as ctl_file: + contents = ctl_file.read() + contents = contents.split() + ctl = {} + ctl['binary_prefix'] = cfg['binary_prefix'] + endian = contents[contents.index('OPTIONS') + 1].lower() + if endian == 'big_endian': + ctl['dtype'] = '>f4' + elif endian == 'little_endian': + ctl['dtype'] = ' 1: + if var.get('operator', '') == 'sum': + # Multiple variables case using sum operation + cube = None + for in_cube in cubes: + if cube is None: + cube = in_cube + else: + cube += in_cube + elif var.get('operator', '') == 'diff': + # two variables case using diff operation + if len(cubes) != 2: + errmsg = (f'operator diff selected for variable {short_name} ' + f'expects exactly two input variables and two input ' + f'files') + raise ValueError(errmsg) + cube = cubes[0] - cubes[1] + else: + oper = var.get('operator') + raise ValueError( + f'multiple input files found for variable {short_name} ' + f'with unknown operator {oper}') + else: + cube = cubes[0] + + # Fix metadata + attrs = copy.deepcopy(cfg['attributes']) + attrs['mip'] = var['mip'] + utils.fix_var_metadata(cube, cmor_info) + + if cube.var_name in ['hfls', 'hfss', 'rlus', 'rlut', 'rlutcs', 'rsus', + 'rsuscs', 'rsut', 'rsutcs']: + attrs['positive'] = 'up' + + if cube.var_name in ['rlds', 'rldscs', 'rsds', 'rsdscs', 'rsdt', 'rtmt', + 'tauu', 'tauv']: + attrs['positive'] = 'down' + + # fix longitudes and z-coordinate (if present) + for coord in cube.dim_coords: + coord_type = iris.util.guess_coord_axis(coord) + if coord_type == 'X': + # -> shift longitude coordinate by one grid box + # to match obs4mips/CREATE-IP grid + coord.points = coord.points + 360 / len(coord.points) + if coord_type == 'Z': + coord.standard_name = 'air_pressure' + coord.long_name = 'pressure' + coord.var_name = 'plev' + coord.attributes['positive'] = 'down' + if coord.units == "hPa": + coord.convert_units('Pa') + utils.flip_dim_coord(cube, coord.standard_name) + + utils.fix_dim_coordnames(cube) + utils.fix_coords(cube) + if 'height2m' in cmor_info.dimensions: + utils.add_height2m(cube) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time'], + local_keys=['positive']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + # Run the cmorization + if start_date is None: + start_date = 1958 + else: + start_date = start_date.year + if end_date is None: + end_date = 2022 + else: + end_date = end_date.year + for (short_name, var) in cfg['variables'].items(): + short_name = var['short_name'] + filename = [] + for year in range(start_date, end_date + 1): + if 'file' in var: + filename.append(os.path.join(in_dir, + var['file'].format(year=year))) + elif 'files' in var: + for file in var['files']: + filename.append(os.path.join(in_dir, + file.format(year=year))) + else: + raise ValueError(f"No input file(s) specified for variable " + f"{short_name}.") + + logger.info("CMORizing variable '%s' from file '%s'", short_name, + filename) + _extract_variable(short_name, var, filename, cfg, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/kadow2020.py b/esmvaltool/cmorizers/data/formatters/datasets/kadow2020.py new file mode 100644 index 0000000000..b133dc72a2 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/kadow2020.py @@ -0,0 +1,81 @@ +"""ESMValTool CMORizer for Kadow2020 data. + +Tier + Tier 2: other freely-available dataset. + +Source + http://users.met.fu-berlin.de/~ChristopherKadow/ + +Last access + 20220329 + +Download and processing instructions + Download the following file: + [SOURCE]/HadCRUT.5.0.1.0.anomalies.Kadow_et_al_2020_20crAI-infilled + .ensemble_mean_185001-202012.nc +""" + +import copy +import logging +import os + +import iris +from cf_units import Unit +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _extract_variable(short_name, var, version, filename, cfg, in_dir, + out_dir): + """Extract variable.""" + # load data + filepath = os.path.join(in_dir, filename) + raw_var = var.get('raw', short_name) + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + + # fix time units + cube.coord('time').convert_units( + Unit('days since 1950-1-1 00:00:00', calendar='gregorian')) + + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + + # Fix coordinates + utils.fix_dim_coordnames(cube) + # fix flipped latitude + utils.flip_dim_coord(cube, 'latitude') + utils.fix_dim_coordnames(cube) + cube_coord = cube.coord('latitude') + utils.fix_bounds(cube, cube_coord) + cube_coord = cube.coord('longitude') + utils.fix_bounds(cube, cube_coord) + + # add heigt2m coordinate + if 'height2m' in cmor_info.dimensions: + utils.add_height2m(cube) + + # Fix metadata and update version information + attrs = copy.deepcopy(cfg['attributes']) + attrs['mip'] = var['mip'] + attrs['version'] = version + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + for (version, filename) in cfg['filenames'].items(): + logger.info("CMORizing variable '%s' '%s'", short_name, version) + _extract_variable(short_name, var, version, filename, cfg, in_dir, + out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/lai3g.py b/esmvaltool/cmorizers/data/formatters/datasets/lai3g.py new file mode 100644 index 0000000000..1db260d13d --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/lai3g.py @@ -0,0 +1,218 @@ +"""ESMValTool CMORizer for LAI3g data. + +Tier + Tier 3: restricted dataset. + +Source + http://cliveg.bu.edu/modismisr/lai3g-fpar3g.html + +Last access + 20190503 + +Download and processing instructions + To obtain the data sets it is necessary to contact Ranga B. Myneni + (Department of Earth and Environment, Boston University). See link above + for more information. + + By default, this dataset is regridded to a 1°x1° grid (original resolution + is 1/12°). If you want to use the original resolution, remove the `regrid` + section in the configuration file (`LAI3g.yml`). Note that in this case, + preprocessing the dataset with ESMValTool (i.e. every time you run the + tool) can take a very long time (> 30 min). +""" + +import glob +import logging +import os +import shutil +import zipfile +from datetime import datetime + +import iris +import iris.coord_categorisation +import numpy as np +from cf_units import Unit +from esmvalcore.preprocessor import regrid + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + +# Properties of the binary file (cannot be stored in .yml since computations +# are necessary) +DTYPE = '>i2' +N_LAT = 2160 +N_LON = 4320 +MISSING_VALUE = -32768 +SCALE_FACTOR = 1000.0 +UPPER_LEFT_LAT = 90.0 - 1.0 / 24.0 +UPPER_LEFT_LON = -180.0 + 1.0 / 24.0 +LOWER_RIGHT_LAT = -90.0 + 1.0 / 24.0 +LOWER_RIGHT_LON = 180.0 - 1.0 / 24.0 +MONTHS = { + 'jan': 1, + 'feb': 2, + 'mar': 3, + 'apr': 4, + 'may': 5, + 'jun': 6, + 'jul': 7, + 'aug': 8, + 'sep': 9, + 'oct': 10, + 'nov': 11, + 'dec': 12, +} +DAYS = { + 'a': 8, + 'b': 23, +} + + +def _clean(file_dir): + """Remove unzipped input files.""" + if os.path.isdir(file_dir): + shutil.rmtree(file_dir) + logger.info("Removed cached directory %s", file_dir) + + +def _extract_variable(cmor_info, attrs, in_dir, out_dir, cfg): + """Extract variable.""" + nc_files = [] + for year in _get_years(in_dir, cfg): + cube_path = _get_cube_for_year(year, in_dir, cfg) + nc_files.append(cube_path) + + # Build final cube + logger.info("Building final cube") + cubes = iris.cube.CubeList() + for nc_file in nc_files: + cube = iris.load_cube(nc_file) + cubes.append(cube) + final_cube = cubes.concatenate_cube() + utils.fix_var_metadata(final_cube, cmor_info) + utils.convert_timeunits(final_cube, 1950) + final_cube = utils.fix_coords(final_cube) + utils.set_global_atts(final_cube, attrs) + utils.save_variable(final_cube, + cmor_info.short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def _get_coords(year, filename, cfg): + """Get correct coordinates for cube.""" + filename = os.path.basename(filename) + time_units = Unit('days since 1950-1-1 00:00:00', calendar='standard') + + # Extract date from filename + time_str = filename.replace(cfg['binary_prefix'], '') + month = MONTHS[time_str[4:7]] + day = DAYS[time_str[7:8]] + date = datetime(year, month, day) + + # Build time coordinate + time_data = [time_units.date2num(date)] + time_coord = iris.coords.DimCoord(time_data, + standard_name='time', + long_name='time', + var_name='time', + units=time_units) + + # Build latitude/Longitude coordinates + latitude_data = np.linspace(UPPER_LEFT_LAT, LOWER_RIGHT_LAT, N_LAT) + longitude_data = np.linspace(UPPER_LEFT_LON, LOWER_RIGHT_LON, N_LON) + lat_coord = iris.coords.DimCoord(latitude_data, + standard_name='latitude', + long_name='latitude', + var_name='lat', + units='degrees') + lon_coord = iris.coords.DimCoord(longitude_data, + standard_name='longitude', + long_name='longitude', + var_name='lon', + units='degrees') + + return [(time_coord, 0), (lat_coord, 1), (lon_coord, 2)] + + +def _get_cube_for_year(year, in_dir, cfg): + """Extract cube containing one year from raw file.""" + logger.info("Processing year %i", year) + bin_files = glob.glob( + os.path.join(in_dir, f"{cfg['binary_prefix']}{year}*.bin")) + + # Read files of one year + cubes = iris.cube.CubeList() + for bin_file in bin_files: + raw_data = np.fromfile(bin_file, DTYPE, + N_LAT * N_LON).reshape(1, N_LAT, N_LON) + raw_data = np.ma.masked_equal(raw_data, MISSING_VALUE) + raw_data = raw_data.astype(np.float32) + raw_data /= SCALE_FACTOR + + # Build coordinates and cube, regrid, and append it + coords = _get_coords(year, bin_file, cfg) + cube = iris.cube.Cube(raw_data, dim_coords_and_dims=coords) + if cfg.get('regrid'): + cube = regrid(cube, cfg['regrid']['target_grid'], + cfg['regrid']['scheme']) + cubes.append(cube) + + # Build cube for single year with monthly data + # (Raw data has two values per month) + cube = cubes.concatenate_cube() + iris.coord_categorisation.add_month_number(cube, 'time') + cube = cube.aggregated_by('month_number', iris.analysis.MEAN) + + # Cache cube on disk to save memory + cached_path = os.path.join(in_dir, f'{year}.nc') + iris.save(cube, cached_path) + logger.info("Cached %s", cached_path) + return cached_path + + +def _get_years(in_dir, cfg): + """Get all available years from input directory.""" + bin_files = os.listdir(in_dir) + bin_files = [f.replace(cfg['binary_prefix'], '') for f in bin_files] + years = {int(f[:4]) for f in bin_files} + return years + + +def _unzip(filepath, out_dir): + """Unzip `*.zip` file.""" + logger.info("Starting extraction of %s to %s", filepath, out_dir) + with zipfile.ZipFile(filepath, 'r') as zip_ref: + zip_ref.extractall(out_dir) + new_path = os.path.join(out_dir, 'LAI') + logger.info("Succefully extracted files to %s", new_path) + return new_path + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + glob_attrs = cfg['attributes'] + cmor_table = cfg['cmor_table'] + filepath = os.path.join(in_dir, cfg['filename']) + + # Run the cmorization + for (var, var_info) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", var) + if cfg.get('regrid'): + cfg['regrid'].setdefault('target_grid', '1x1') + cfg['regrid'].setdefault('scheme', 'nearest') + logger.info( + "Final dataset will be regridded to %s grid using scheme '%s'", + cfg['regrid']['target_grid'], cfg['regrid']['scheme']) + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + zip_file = os.path.join(in_dir, filepath) + if not os.path.isfile(zip_file): + logger.debug("Skipping '%s', file '%s' not found", var, zip_file) + continue + logger.info("Found input file '%s'", zip_file) + file_dir = _unzip(zip_file, out_dir) + _extract_variable(cmor_info, glob_attrs, file_dir, out_dir, cfg) + _clean(file_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/landflux_eval.py b/esmvaltool/cmorizers/data/formatters/datasets/landflux_eval.py new file mode 100644 index 0000000000..f8b0a3ad7c --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/landflux_eval.py @@ -0,0 +1,84 @@ +"""ESMValTool CMORizer for LandFlux-EVAL data. + +Tier + Tier 3: restricted dataset. + +Source + https://data.iac.ethz.ch/landflux/ + +Last access + 20190516 + +Download and processing instructions + Download the following files: + LandFluxEVAL.merged.89-05.monthly.all.nc + A registration is required for downloading the data (see + ). +""" + +import logging +import os +from datetime import datetime + +import iris +import numpy as np +from cf_units import Unit +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _extract_variable(raw_var, cmor_info, attrs, filepath, out_dir): + """Extract variable.""" + var = cmor_info.short_name + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + _fix_time_coord(cube) + utils.fix_var_metadata(cube, cmor_info) + utils.convert_timeunits(cube, 1950) + cube = utils.fix_coords(cube) + utils.set_global_atts(cube, attrs) + utils.save_variable(cube, + var, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def _fix_time_coord(cube): + """Fix time coordinate (given as month as %Y%m.%f).""" + time_coord = cube.coord('time') + new_units = Unit('days since 1950-1-1 00:00:00', calendar='standard') + + # Function to convert given date to correct number + def _date2num(date_str): + """Convert data given as %Y%m.%f to number.""" + date_str = str(date_str) + year = int(date_str[:4]) + month = int(date_str[4:6]) + day = 15 + date = datetime(year, month, day) + return new_units.date2num(date) + + # Convert time coordinate array and set correct units + time_coord.points = np.vectorize(_date2num)(time_coord.points) + time_coord.units = new_units + time_coord.attributes = {} + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + glob_attrs = cfg['attributes'] + cmor_table = cfg['cmor_table'] + filepath = os.path.join(in_dir, cfg['filename']) + logger.info("Found input file '%s'", filepath) + + # Run the cmorization + for (var, var_info) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", var) + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + raw_var = var_info.get('raw', var) + _extract_variable(raw_var, cmor_info, glob_attrs, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2016.py b/esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2016.py new file mode 100644 index 0000000000..306c4f8f27 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2016.py @@ -0,0 +1,110 @@ +"""ESMValTool CMORizer for Landschuetzer2016 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://www.nodc.noaa.gov/archive/arc0105/0160558/3.3/data/0-data/ + +Last access + 20190308 + +Download and processing instructions + Download the file spco2_1982-2015_MPI_SOM-FFN_v2016.nc + +Modification history + 20190227-lovato_tomas: written. +""" + +import logging +import os +from warnings import catch_warnings, filterwarnings + +import iris +from dask import array as da + +from esmvaltool.cmorizers.data.utilities import ( + constant_metadata, + fix_coords, + fix_var_metadata, + save_variable, + set_global_atts, +) + +logger = logging.getLogger(__name__) + + +def _fix_data(cube, var): + """Specific data fixes for different variables.""" + logger.info("Fixing data ...") + with constant_metadata(cube) as metadata: + if var == 'fgco2': + # Assume standard year 365_day + cube *= -12.01 / 1000. / (86400. * 365.) + metadata.attributes['positive'] = 'down' + elif var == 'dpco2': + cube *= -1.0 * 101325. / 1.e06 + elif var == 'spco2': + cube *= 101325. / 1.e06 + return cube + + +# pylint: disable=unused-argument +def _fix_fillvalue(cube, field, filename): + """Create masked array from missing_value.""" + if hasattr(field.cf_data, 'missing_value'): + # fix for bad missing value definition + cube.data = da.ma.masked_equal(cube.core_data(), + field.cf_data.missing_value) + + +def extract_variable(var_info, raw_info, out_dir, attrs): + """Extract to all vars.""" + var = var_info.short_name + with catch_warnings(): + filterwarnings( + action='ignore', + message='Ignoring netCDF variable .* invalid units .*', + category=UserWarning, + module='iris', + ) + cubes = iris.load(raw_info['file'], callback=_fix_fillvalue) + rawvar = raw_info['name'] + + for cube in cubes: + if cube.var_name == rawvar: + fix_var_metadata(cube, var_info) + cube = fix_coords(cube) + _fix_data(cube, var) + set_global_atts(cube, attrs) + save_variable( + cube, + var, + out_dir, + attrs, + local_keys=['positive'], + unlimited_dimensions=['time'], + ) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + # run the cmorization + for var, vals in cfg['variables'].items(): + inpfile = os.path.join(in_dir, vals['file']) + logger.info("CMORizing var %s from file %s", var, inpfile) + var_info = cmor_table.get_variable(vals['mip'], var) + raw_info = {'name': vals['raw'], 'file': inpfile} + glob_attrs['mip'] = vals['mip'] + with catch_warnings(): + filterwarnings( + action='ignore', + message=('WARNING: missing_value not used since it\n' + 'cannot be safely cast to variable data type'), + category=UserWarning, + module='iris', + ) + extract_variable(var_info, raw_info, out_dir, glob_attrs) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2020.py b/esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2020.py new file mode 100644 index 0000000000..e8419b320b --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2020.py @@ -0,0 +1,139 @@ +"""ESMValTool CMORizer for Landschuetzer2020 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://www.ncei.noaa.gov/data/oceans/ncei/ocads/data/0209633/ + +Last access + 20221102 + +Download and processing instructions + Download the file MPI-ULB-SOM_FFN_clim.nc + +""" + +import logging +import warnings +from datetime import datetime +from pathlib import Path + +import iris +from cf_units import Unit +from dask import array as da +from iris import NameConstraint +from iris.coords import CellMethod, DimCoord + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _callback_fix_fillvalue(cube, field, _): + """Create masked array from FillValue.""" + if hasattr(field.cf_data, 'FillValue'): + fill_value = int(field.cf_data.FillValue) + logger.info("Fixing fill value (%i)", fill_value) + cube.data = da.ma.masked_equal(cube.core_data(), fill_value) + + +def _fix_climatological_time(cube): + """Fix climatology coordinate.""" + time_units = Unit('days since 1950-01-01 00:00:00', calendar='standard') + + # Following the doc the covered time period of the climatology is + # 1988-01-01 to 2020-01-01 (Use 2004 as the "mean" year). See + # https://www.ncei.noaa.gov/access/metadata/landing-page/bin/ + # iso?id=gov.noaa.nodc%3A0209633 + time_points = time_units.date2num( + [datetime(2004, m, 15) for m in range(1, 13)] + ) + time_bounds = [ + [datetime(1988, m, 1), datetime(2019, m + 1, 1)] for m in range(1, 12) + ] + time_bounds.append([datetime(1988, 12, 1), datetime(2020, 1, 1)]) + time_bounds = time_units.date2num(time_bounds) + + # Add new time coordinate to cube + time_coord = DimCoord( + time_points, + bounds=time_bounds, + standard_name='time', + long_name='time', + var_name='time', + units=time_units, + climatological=True, + ) + cube.remove_coord('time') + cube.add_dim_coord(time_coord, 0) + + # Fix cell methods + cube.add_cell_method(CellMethod('mean within years', coords=time_coord)) + cube.add_cell_method(CellMethod('mean over years', coords=time_coord)) + + +def _fix_scalar_coords(cube): + """Fix scalar coordinates.""" + if cube.var_name == 'spco2': + utils.add_scalar_depth_coord(cube) + + +def _extract_variable(var_info, cmor_info, attrs, filepath, out_dir): + """Extract variable.""" + var = cmor_info.short_name + raw_var = var_info.get('raw_name', var) + + # Load data + with warnings.catch_warnings(): + warnings.filterwarnings( + action='ignore', + message='Ignoring netCDF variable .* invalid units .*', + category=UserWarning, + module='iris', + ) + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var), + callback=_callback_fix_fillvalue) + + # Fix variable metadata + if 'raw_units' in var_info: + cube.units = var_info['raw_units'] + cube.convert_units(cmor_info.units) + utils.fix_var_metadata(cube, cmor_info) + + # Fix coordinates + _fix_climatological_time(cube) + cube = utils.fix_coords( + cube, + overwrite_lat_bounds=False, + overwrite_lon_bounds=False, + overwrite_time_bounds=False, + ) + _fix_scalar_coords(cube) + + # Fix global metadata + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable( + cube, + var, + out_dir, + attrs, + local_keys=['positive'], + unlimited_dimensions=['time'], + ) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + # Run the cmorization + for (var, var_info) in cfg['variables'].items(): + filepath = Path(in_dir) / var_info['filename'] + logger.info("CMORizing variable '%s' from file %s", var, filepath) + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + _extract_variable(var_info, cmor_info, glob_attrs, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/mac_lwp.ncl b/esmvaltool/cmorizers/data/formatters/datasets/mac_lwp.ncl new file mode 100644 index 0000000000..3b73be8944 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/mac_lwp.ncl @@ -0,0 +1,129 @@ +; ############################################################################# +; ESMValTool CMORizer for MAC-LWP data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; https://search.earthdata.nasa.gov/search?q=MAC-LWP +; +; Last access +; 20200130 +; +; Download and processing instructions +; Select the MACLWP-mean dataset. +; Download the script file for both projects. +; Download the data using http either by selecting each granule +; individually or by using the option "download all". +; Data is freely available, but a registration is required. +; +; Modification history +; 20200130-hassler_birgit: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") +begin + + ; Script name (for logger) + DIAG_SCRIPT = "MAC-LWP.ncl" + + ; Source name + OBSNAME = "MAC-LWP" + + ; Tier + TIER = 3 + + ; Period + YEAR1 = get_year(start_year, 1988) + YEAR2 = get_year(end_year, 2016) + + ; Selected variable (standard name) + VAR = (/"lwp", "lwpStderr"/) + + ; Name in the raw data + NAME = (/"cloudlwp", "cloudlwp_error"/) + + ; Conversion factor + CONVERSION = (/1.e-3, 1.e-3/) + + ; MIP + MIP = (/"Amon", "Amon"/) + + ; Frequency + FREQ = (/"mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + "/custom/CMOR_" + VAR + ".dat" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "v1" + + ; Global attributes + SOURCE = "https://search.earthdata.nasa.gov/search?q=MAC-LWP" + REF = "Elsaesser et al., J. Clim., doi:10.1175/JCLI-D-16-0902.1, 2017" + COMMENT = "" + +end + +begin + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + do yr = YEAR1, YEAR2 + + fname = input_dir_path + "maclwp_cloudlwpave_" + yr + "_v1.nc4" + + f = addfile(fname, "r") + output = (/f->$NAME(vv)$/) + + lat = (/f->lat/) + lon = (/f->lon/) + + ; Convert units + output = output * CONVERSION(vv) ; for clivi this will be equal 0 + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + output&time = create_timec(yr, yr) + output&lat = lat + output&lon = lon + format_coords(output, yr + "0101", yr + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = yr + "01-" + yr + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/merra.ncl b/esmvaltool/cmorizers/data/formatters/datasets/merra.ncl new file mode 100644 index 0000000000..d9fbf761df --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/merra.ncl @@ -0,0 +1,316 @@ +; ############################################################################# +; ESMValTool CMORizer for NASA MERRA reanalysis v5.2.0 +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset (registration required). +; +; Source +; EarthData via https://goldsmr3.gesdisc.eosdis.nasa.gov/data/MERRA_MONTHLY/ +; +; Last access +; 20230201 +; +; Download and processing instructions +; (requires EarthData login; see https://urs.earthdata.nasa.gov/) +; Use ESMValTool automatic download: +; esmvaltool data download MERRA +; +; Modification history +; 20230818-lauer_axel: added output of clwvi (iwp + lwp) +; 20230201-lauer_axel: written +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "merra.ncl" + + ; Source name + OBSNAME = "MERRA" + + ; Tier + TIER = 3 + + ; Period (complete years only) + YEAR1 = get_year(start_year, 1979) + YEAR2 = get_year(end_year, 2015) + + ; Selected variable (standard name) + VAR = (/"zg", "wap", "cli", "clw", "hus", "hur", "ta", "ua", "va", \ + "ps", "psl", \ + "clivi", "clwvi", "lwp", "prw", \ + "clt", "rlut", "rlutcs", "rsdt", "rsut", "rsutcs", "ts", \ + "tas", "pr"/) + + ; Name in the raw data + NAME = (/"H", "OMEGA", "QI", "QL", "QV", "RH", "T", "U", "V", \ ; 3d asm + "PS", "SLP", \ + "TQI", "TQI", "TQL", "TQV", \ ; 2d int + "CLDTOT", "LWTUP", "LWTUPCLR", "SWTDN", "SWTNT", \ ; 2d rad + "SWTNTCLR", "TS", \ + "T2M", "PRECLSC"/) ; 2d chm + + ; unit conversion factor + CONVFAC = (/1.0, 1.0, 1.0, 1.0, 1.0, 100.0, 1.0, 1.0, 1.0, \ ; 3d asm + 1.0, 1.0, \ + 1.0, 1.0, 1.0, 1.0, \ ; 2d int + 100.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, \ ; 2d rad + 1.0, 1.0/) ; 2d chm + + ; additional attribute "positive" (for radiative fluxes) + POSITIVE = (/"", "", "", "", "", "", "", "", "", "", "", \ ; 3d asm + "", "", "", "", \ ; 2d int + "", "up", "up", "down", "up", "up", "", \ ; 2d rad + "", ""/) ; 2d chm + + ; names source files + SOURCEFILE = (/"instM_3d_asm_Cp.", "instM_3d_asm_Cp.", \ ; 3d asm + "instM_3d_asm_Cp.", "instM_3d_asm_Cp.", \ + "instM_3d_asm_Cp.", "instM_3d_asm_Cp.", \ + "instM_3d_asm_Cp.", "instM_3d_asm_Cp.", \ + "instM_3d_asm_Cp.", "instM_3d_asm_Cp.", \ + "instM_3d_asm_Cp.", \ + "instM_2d_int_Nx.", "instM_2d_int_Nx.", \ ; 2d int + "instM_2d_int_Nx.", "instM_2d_int_Nx.", \ + "tavgM_2d_rad_Nx.", "tavgM_2d_rad_Nx.", \ ; 2d rad + "tavgM_2d_rad_Nx.", "tavgM_2d_rad_Nx.", \ + "tavgM_2d_rad_Nx.", "tavgM_2d_rad_Nx.", \ + "tavgM_2d_rad_Nx.", \ + "tavgM_2d_chm_Fx.", "tavgM_2d_chm_Fx."/) ; 2d chm + + ; dataset doi numbers + DOI = (/"10.5067/YX0AVASQRTNW", "10.5067/YX0AVASQRTNW", \ ; 3d asm + "10.5067/YX0AVASQRTNW", "10.5067/YX0AVASQRTNW", \ + "10.5067/YX0AVASQRTNW", "10.5067/YX0AVASQRTNW", \ + "10.5067/YX0AVASQRTNW", "10.5067/YX0AVASQRTNW", \ + "10.5067/YX0AVASQRTNW", "10.5067/YX0AVASQRTNW", \ + "10.5067/YX0AVASQRTNW", \ + "10.5067/QL0PGBK2CYJS", "10.5067/QL0PGBK2CYJS", \ ; 2d int + "10.5067/QL0PGBK2CYJS", "10.5067/QL0PGBK2CYJS", \ + "10.5067/6UX3EDUNVUFK", "10.5067/6UX3EDUNVUFK", \ ; 2d rad + "10.5067/6UX3EDUNVUFK", "10.5067/6UX3EDUNVUFK", \ + "10.5067/6UX3EDUNVUFK", "10.5067/6UX3EDUNVUFK", \ + "10.5067/6UX3EDUNVUFK", \ + "10.5067/IYDN3LNZ63UE", "10.5067/IYDN3LNZ63UE"/) ; 2d chm + + ; MIP + MIP = "Amon" + + ; Frequency + FREQ = "mon" + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + \ + (/"/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ ; 3d asm + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ ; 2d int + "/custom/CMOR_lwp.dat", "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ ; 2d rad + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP/) ; 2d chm + + ; Type + TYPE = "reanaly" + + ; Version + VERSION = "5.2.0" + + ; Global attributes + SOURCE = "https://goldsmr3.gesdisc.eosdis.nasa.gov/data/MERRA_MONTHLY/" + COMMENT = "Goddard Earth Sciences Data and Information Services Center " + \ + "(GES DISC)" + +end + +begin + + ; Loop over variables + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP + ")") + if (isvar("output")) then + delete(output) + end if + + do yy = YEAR1, YEAR2 + do mm = 1, 12 + + datestr = tostring(yy) + if (mm .lt. 10) then + datestr = datestr + "0" + end if + datestr = datestr + tostring(mm) + + fname = systemfunc("ls " + input_dir_path + "MERRA???.prod.assim." + \ + SOURCEFILE(vv) + datestr + ".hdf") + + f = addfile(fname, "r") + tmp = f->$NAME(vv)$ + + ; Extract time range + tmp&TIME_EOSGRID@calendar = "standard" + date = cd_calendar(tmp&TIME_EOSGRID, 0) + if ((date(0, 0) .ne. yy) .or. (date(0, 1) .ne. mm)) then + error_msg("f", DIAG_SCRIPT, "", \ + "date in input file does not match date in filename: " + \ + fname) + end if + + delete(date) + + if (.not.isvar("output")) then + dims = dimsizes(tmp) + ; overwrite time dimension + dims(0) = 12 + output = new(dims, float) + delete(dims) + rank = dimsizes(dimsizes(output)) + + output!0 = "time" + if (rank.eq.4) then + output!1 = "plev" + output!2 = "lat" + output!3 = "lon" + output&plev = tmp&Height_EOSGRID * 100. ; [hPa] --> [Pa] + elseif (rank.eq.3) + output!1 = "lat" + output!2 = "lon" + end if + + output&time = fspan(1, 12, 12) + output&time@calendar = "standard" + output&time@units = "days since 1950-01-01 00:00:00" + + output&lat = tmp&YDim_EOSGRID + output&lon = tmp&XDim_EOSGRID + + end if + + ; Unpack variable according to metadata information + if (isatt(tmp, "scale_factor") .or. isatt(tmp, "add_offset")) then + tmp = tmp * tmp@scale_factor + tmp@add_offset + end if + + if (rank.eq.4) then + output(mm - 1, :, :, :) = (/ tmp(0, :, :, :) /) + else + output(mm - 1, :, :) = (/ tmp(0, :, :) /) + end if + + delete(tmp) + + ; calculation of outgoing fluxes: out = in - net + if ((VAR(vv) .eq. "rsut") .or. (VAR(vv) .eq. "rsutcs")) then + tmp = f->SWTDN + if (isatt(tmp, "scale_factor") .or. isatt(tmp, "add_offset")) then + tmp = tmp * tmp@scale_factor + tmp@add_offset + end if + output(mm - 1, :, :) = (/ tmp(0, :, :) /) - output(mm - 1, :, :) + + delete(tmp) + end if + + ; calculation of total precipitation flux = + ; large-scale+convective+anvil + if (VAR(vv) .eq. "pr") then + tmp = f->PRECCON ; surface precipitation flux from convection + if (isatt(tmp, "scale_factor") .or. isatt(tmp, "add_offset")) then + tmp = tmp * tmp@scale_factor + tmp@add_offset + end if + output(mm - 1, :, :) = output(mm - 1, :, :) + tmp(0, :, :) + delete(tmp) + tmp = f->PRECANV ; surface precipitation flux from anvils + if (isatt(tmp, "scale_factor") .or. isatt(tmp, "add_offset")) then + tmp = tmp * tmp@scale_factor + tmp@add_offset + end if + output(mm - 1, :, :) = output(mm - 1, :, :) + tmp(0, :, :) + delete(tmp) + end if + + ; calculation of clwvi + if (VAR(vv) .eq. "clwvi") then + tmp = f->TQL + if (isatt(tmp, "scale_factor") .or. isatt(tmp, "add_offset")) then + tmp = tmp * tmp@scale_factor + tmp@add_offset + end if + output(mm - 1, :, :) = output(mm - 1, :, :) + tmp(0, :, :) + delete(tmp) + end if + + delete(f) + + end do ; loop over months (mm) + + ; Convert units + if (CONVFAC(vv) .ne. 1.0) then + output = output * CONVFAC(vv) + end if + + ; Format coordinates + format_coords(output, yy + "0101", yy + "1231", FREQ) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; set additional attribute "positive" for radiative fluxes + if (POSITIVE(vv) .ne. "") then + output@positive = POSITIVE(vv) + end if + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ) + + ; Set global attributes + REF = "doi: " + DOI(vv) + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = yy + "01-" + yy + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP, VAR(vv), DATESTR/), "_") + ".nc" + + ; Add height coordinate to tas variable (required by the new backend) + if (VAR(vv).eq."tas") then + output@coordinates = "height" + end if + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + ; Add height coordinate to tas variable (required by CMOR checker) + if (VAR(vv).eq."tas") then + height = 2.d + height!0 = "ncl_scalar" + height@units = "m" + height@axis = "Z" + height@positive = "up" + height@long_name = "height" + height@standard_name = "height" + w = addfile(fout, "w") + w->height = height + delete(w) + end if + + ; --------------------------------------------------------------------- + + end do ; loop over years (yy) + end do ; loop over variables (vv) + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/merra2.py b/esmvaltool/cmorizers/data/formatters/datasets/merra2.py new file mode 100644 index 0000000000..a50eb12159 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/merra2.py @@ -0,0 +1,308 @@ +"""ESMValTool CMORizer for MERRA2 data. + +Tier + Tier 3: restricted datasets (i.e., dataset which requires a registration + to be retrieved or provided upon request to the respective contact or PI). + +Source + https://goldsmr4.gesdisc.eosdis.nasa.gov/data/MERRA2_MONTHLY/ + https://daac.gsfc.nasa.gov/datasets/M2IUNPASM_5.12.4/summary?keywords=MERRA2 + +Last access + 20220913 + +Download and processing instructions + - For download instructions see the download script `download_merra2.sh`. +""" +import glob +import logging +import os +from copy import deepcopy +from datetime import datetime + +import cf_units +import iris +from dask import array as da +from esmvalcore.cmor.table import CMOR_TABLES + +from esmvaltool.cmorizers.data import utilities as utils + +# iris spits out a large amount of warnings +# logging.disable('WARNING') # careful: this deactivates writing +# to log files in the pytest test environment for other cmorizer tests +logger = logging.getLogger(__name__) + + +def _fix_time_monthly(cube): + """Fix time by setting it to 15th of month.""" + # Read dataset time unit and calendar from file + dataset_time_unit = str(cube.coord('time').units) + dataset_time_calender = cube.coord('time').units.calendar + # Convert datetime + time_as_datetime = cf_units.num2date( + cube.coord('time').core_points(), dataset_time_unit, + dataset_time_calender) + newtime = [] + for timepoint in time_as_datetime: + midpoint = datetime(timepoint.year, timepoint.month, 15) + newtime.append(midpoint) + + newtime = cf_units.date2num(newtime, dataset_time_unit, + dataset_time_calender) + # Put them on the file + cube.coord('time').points = newtime + cube.coord('time').bounds = None + return cube + + +def _var_pairs(cube_list, var_parts, oper): + """Return a selection composed of two variables.""" + selected_1 = [c for c in cube_list if c.var_name == var_parts[0]] + selected_2 = [c for c in cube_list if c.var_name == var_parts[1]] + if not selected_1: + logger.error("Raw variable %s could not be found " + "in str(cube_list) - operation can not be performed.", + var_parts[0]) + raise ValueError + if not selected_2: + logger.error("Raw variable %s could not be found " + "in str(cube_list) - operation can not be performed.", + var_parts[1]) + raise ValueError + if oper == "-": + selected = [ + cube_1 - cube_2 for cube_1, cube_2 in zip(selected_1, selected_2) + ] + selected = iris.cube.CubeList(selected) + elif oper == "+": + selected = [ + cube_1 + cube_2 for cube_1, cube_2 in zip(selected_1, selected_2) + ] + selected = iris.cube.CubeList(selected) + else: + raise NotImplementedError(f"Pairwise variables operation {oper} " + "not implemented yet, you can do it " + "yourself in the MERRA2 cmorizer.") + + return selected + + +def _load_cube(in_files, var): + cube_list = iris.load_raw(in_files) + pairwise_ops = ["+", "-", ":"] + var_parts = [] + for oper in pairwise_ops: + split_var = var['raw'].split(oper) + if len(split_var) == 2: + var_parts = [split_var[0], split_var[1]] + break + if len(split_var) > 2: + logger.error("Splitting raw variable %s by " + "operation %s results in more than two" + " raw variables, this is not yet implemented.", + var['raw'], oper) + raise NotImplementedError + if not var_parts: + selected = [c for c in cube_list if c.var_name == var['raw']] + selected = iris.cube.CubeList(selected) + else: + selected = _var_pairs(cube_list, var_parts, oper) + + drop_attrs = [ + 'History', 'Filename', 'Comment', 'RangeBeginningDate', + 'RangeEndingDate', 'GranuleID', 'ProductionDateTime', 'Source' + ] + drop_time_attrs = [ + 'begin_date', 'begin_time', 'time_increment', 'valid_range', 'vmax', + 'vmin' + ] + for cube in selected: + for attr in drop_attrs: + cube.attributes.pop(attr) + for attr in drop_time_attrs: + cube.coord('time').attributes.pop(attr) + cube.coord('time').points = cube.coord('time').core_points().astype( + 'float64') + + iris.util.unify_time_units(selected) + cube = selected.concatenate_cube() + return cube + + +def _fix_coordinates(cube, definition): + """Fix coordinates.""" + if cube.ndim == 3: + axis2def = {'T': 'time', 'X': 'longitude', 'Y': 'latitude'} + axes = ['T', 'X', 'Y'] + elif cube.ndim == 4: + axis2def = {'T': 'time', 'X': 'longitude', + 'Y': 'latitude', 'Z': 'plev19'} + axes = ['T', 'X', 'Y', 'Z'] + for axis in axes: + coord_def = definition.coordinates.get(axis2def[axis]) + if coord_def: + coord = cube.coord(axis=axis) + if axis == 'T': + coord.convert_units('days since 1850-1-1 00:00:00.0') + elif axis == 'Z': + if coord.units == "hPa": + coord.convert_units('Pa') + else: + try: + coord.convert_units('Pa') + except ValueError as exc: + logger.error("Attempting to convert units for " + "coordinate %s to Pa", coord) + raise exc + coord.standard_name = coord_def.standard_name + coord.var_name = coord_def.out_name + coord.long_name = coord_def.long_name + coord.points = coord.core_points().astype('float64') + if len(coord.points) > 1: + coord.guess_bounds() + else: + # special case for UV and 3-dim cloud variables: + # variables come with "alevel" instead + # of "plev19" in the table; "alevel" has empty fields for + # standard_name, out_name etc. so we need to set them; it's safe + # to do so since the cmor checker/fixer will convert that during + # preprocessing at cmor fix stage + specialvars = ('uv', 'cl', 'cli', 'clw') + if cube.var_name in specialvars and axis == "Z": + coord = cube.coord(axis=axis) + coord_def = definition.coordinates.get('alevel') + coord.standard_name = "air_pressure" + coord.var_name = "plev" + coord.long_name = "pressure" + coord.points = coord.core_points().astype('float64') + if len(coord.points) > 1: + coord.guess_bounds() + + if coord.units == "hPa": + coord.convert_units('Pa') + else: + try: + coord.convert_units('Pa') + except ValueError as exc: + logger.error("Attempting to convert units for " + "coordinate %s to Pa", coord) + raise exc + + return cube + + +def _extract_variable(in_files, var, cfg, out_dir): + logger.info("CMORizing variable '%s' from input files '%s'", + var['short_name'], ', '.join(in_files)) + attributes = deepcopy(cfg['attributes']) + attributes['mip'] = var['mip'] + attributes['raw'] = var['raw'] + pairwise_ops = ["+", "-", ":"] + for oper in pairwise_ops: + if oper in var['raw']: + components = var['raw'].split(oper) + if len(components) == 2: + attributes['component_raw_1'] = components[0] + attributes['component_raw_2'] = components[1] + attributes['component_operation'] = oper + break + cmor_table = CMOR_TABLES[attributes['project_id']] + definition = cmor_table.get_variable(var['mip'], var['short_name']) + + cube = _load_cube(in_files, var) + + # keep the following raw cube attributes + attrs_to_keep = [ + "institution", "Institution", + "institute_id", "VersionID", + "experiment_id", + "source", "Source", # overrides empty string default + "model_id", "ModelID", + "contact", "Contact", + "references", + "tracking_id", + "mip_specs", # described by "mip" already + "source_id", "SourceID", + "product", "Product", + "frequency", "Frequency", + "creation_date", + "project_id", "ProjectID", + "table_id", "TableID", + "title", "Title", + "modeling_realm", + "doi", + "VersionID", # described by "version" already + ] + + attrs_to_keep_exist = [ + att for att in cube.attributes if att in attrs_to_keep + ] + for att in attrs_to_keep_exist: + attributes[att] = cube.attributes[att] + + utils.set_global_atts(cube, attributes) + + # Set correct names + cube.var_name = definition.short_name + # cube.standard_name = definition.standard_name + cube.long_name = definition.long_name + + # Fix units (if needed) + # input variable reports m-3 m-3 instead of m3 m-3 + if cube.var_name == "sm": + cube.units = definition.units + # Convert units to CMOR units + cube.convert_units(definition.units) + + # Add height2m or height10m if needed + if 'height2m' in definition.dimensions: + utils.add_height2m(cube) + elif 'height10m' in definition.dimensions: + utils.add_height10m(cube) + + # Fix data type + cube.data = cube.core_data().astype('float32') + + # Roll longitude + cube.coord('longitude').points = cube.coord('longitude').points + 180. + nlon = len(cube.coord('longitude').points) + cube.data = da.roll(cube.core_data(), int(nlon / 2), axis=-1) + + # Fix coordinates + cube = _fix_coordinates(cube, definition) + + cube.coord('latitude').attributes = None + cube.coord('longitude').attributes = None + + cube = _fix_time_monthly(cube) + + logger.debug("Saving cube\n%s", cube) + logger.debug("Setting time dimension to UNLIMITED while saving!") + utils.save_variable(cube, cube.var_name, + out_dir, attributes, + unlimited_dimensions=['time']) + logger.info("Finished CMORizing %s", ', '.join(in_files)) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Run CMORizer for MERRA2.""" + cfg.pop('cmor_table') + if start_date is None: + start_date = 1980 + else: + start_date = start_date.year + if end_date is None: + end_date = 2022 + else: + end_date = end_date.year + for year in range(start_date, end_date + 1): + for short_name, var in cfg['variables'].items(): + if 'short_name' not in var: + var['short_name'] = short_name + # Now get list of files + filepattern = os.path.join(in_dir, var['file'].format(year=year)) + in_files = glob.glob(filepattern) + if not in_files: + logger.warning('Year %s data not found', year) + continue + _extract_variable(in_files, var, cfg, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/mls_aura.py b/esmvaltool/cmorizers/data/formatters/datasets/mls_aura.py new file mode 100644 index 0000000000..0a5031b243 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/mls_aura.py @@ -0,0 +1,341 @@ +"""ESMValTool CMORizer for MLS-AURA data. + +Tier + Tier 3: restricted dataset. + +Source + https://disc.gsfc.nasa.gov/datasets/ML2RHI_004/summary + https://disc.gsfc.nasa.gov/datasets/ML2T_004/summary + +Last access + 20200203 + +Download and processing instructions + Select "Data Access" -> "Subset/Get Data" -> "Get Data" and follow the + "Instructions for downloading". All *.he5 files need to be saved in the + $RAWOBS/Tier3/MLS-AURA directory, where $RAWOBS refers to the RAWOBS + directory defined in the configuration. Apply this procedure to + both links provided above. The temperature fields are necessary for quality + control of the RHI data (see Data Quality Document for MLS-AURA for more + information). + A registration is required for downloading the data. +""" + +import glob +import logging +import os +from datetime import datetime + +import iris +import iris.coord_categorisation +import netCDF4 +import numpy as np +import pandas as pd +from cf_units import Unit + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + +ALL_LATS = np.linspace(-90.0, 90.0, 91) +ALL_LONS = np.linspace(-180.0, 180.0, 181) +LAT_COORD = iris.coords.DimCoord(ALL_LATS, + var_name='lat', + standard_name='latitude', + long_name='latitude', + units='degrees') +LON_COORD = iris.coords.DimCoord(ALL_LONS, + var_name='lon', + standard_name='longitude', + long_name='Longitude', + units='degrees') +TIME_UNITS = Unit('days since 1850-01-01 00:00:00', calendar='standard') + + +def _cut_cube(cube, var_info): + """Cut cube if desired.""" + if 'cut_levels_outside' in var_info: + lims = var_info['cut_levels_outside'] + constraint = iris.Constraint( + air_pressure=lambda cell: lims[0] < cell < lims[1]) + cube = cube.extract(constraint) + return cube + + +def _extract_cubes(files_dict, cfg): + """Extract cubes from files.""" + cubes_dict = _get_cubes_dict(files_dict, cfg) + + # Create final cubes and return it + cube_dict = {} + for (var, cubes) in cubes_dict.items(): + var_info = cfg['variables'][var] + cube = cubes.concatenate_cube() + cube = _cut_cube(cube, var_info) + + # Calculate monthly mean if desired + if 'mon' in cfg['mip']: + logger.info("Calculating monthly mean") + iris.coord_categorisation.add_month_number(cube, 'time') + iris.coord_categorisation.add_year(cube, 'time') + cube = cube.aggregated_by(['month_number', 'year'], + iris.analysis.MEAN) + cube.remove_coord('month_number') + cube.remove_coord('year') + + # Save cube + cube_dict[var] = cube + + return cube_dict + + +def _get_cube(gridded_data, time, pressure): + """Get :class:`iris.cube.Cube` with correct data.""" + time_coord = iris.coords.DimCoord(TIME_UNITS.date2num(time), + var_name='time', + standard_name='time', + long_name='time', + units=TIME_UNITS) + pressure_coord = iris.coords.DimCoord(pressure, + var_name='plev', + standard_name='air_pressure', + long_name='pressure', + units='hPa') + coord_spec = [ + (time_coord, 0), + (pressure_coord, 1), + (LAT_COORD, 2), + (LON_COORD, 3), + ] + cube = iris.cube.Cube(gridded_data, + dim_coords_and_dims=coord_spec, + units='%') + return cube + + +def _get_cubes_dict(files_dict, cfg): + """Get :obj:`dict` of :class:`iris.cube.CubeList`.""" + cubes_dict = {var: iris.cube.CubeList() for var in cfg['variables']} + + # Process files + file_idx = 1 + for (filename_rhi, filename_t) in files_dict.values(): + logger.info("Processing file %5d/%5d [%s]", file_idx, len(files_dict), + filename_rhi) + + # Read files + (nc_rhi, nc_loc) = _open_nc_file(filename_rhi, 'RHI') + (nc_t, _) = _open_nc_file(filename_t, 'Temperature') + + # Get cubes for all desired variables + for (var, var_info) in cfg['variables'].items(): + (gridded_data, time, + pressure) = _get_gridded_data(var_info['raw_var'], nc_rhi, nc_loc, + nc_t, filename_rhi) + cubes_dict[var].append(_get_cube(gridded_data, time, pressure)) + file_idx += 1 + + return cubes_dict + + +def _get_date(filename, variable, cfg): + """Extract date from a filename.""" + file_pattern = cfg['file_pattern'].format(var=variable) + filename = os.path.basename(filename) + filename = os.path.splitext(filename)[0] + filename = filename.replace(file_pattern, '') + date = filename.split('_')[1] + return date + + +def _get_file_attributes(filename): + """Get global file attributes.""" + dataset = netCDF4.Dataset(filename, mode='r') + add_info = dataset.groups['HDFEOS'].groups['ADDITIONAL'] + attrs = add_info.groups['FILE_ATTRIBUTES'] + return {key: attrs.getncattr(key) for key in attrs.ncattrs()} + + +def _get_files_single_var(variable, in_dir, cfg): + """Get files for a single variable.""" + filename = cfg['file_pattern'].format(var=variable) + ext = cfg['extension'] + file_pattern = f'{filename}*.{ext}' + + # Get all files + files = glob.glob(os.path.join(in_dir, file_pattern)) + + # Only accept certain years if desired + if 'start_year' in cfg: + start_year = cfg['start_year'] + logger.info("Only considering year %d and above", start_year) + else: + start_year = -np.inf + if 'end_year' in cfg: + end_year = cfg['end_year'] + logger.info("Only considering year %d and below", end_year) + else: + end_year = np.inf + files_dict = {} + for file_ in files: + date = _get_date(file_, variable, cfg) + year = int(date[:4]) + if start_year <= year <= end_year: + files_dict[date] = file_ + + return files_dict + + +def _get_files(in_dir, cfg): + """Get all files for a given variable.""" + logger.info("Searching files") + + # Get file dictionaries + files_dict_rhi = _get_files_single_var('RHI', in_dir, cfg) + files_dict_t = _get_files_single_var('Temperature', in_dir, cfg) + + # Check if all files are available + all_files = {} + for (date, filename_rhi) in files_dict_rhi.items(): + if date not in files_dict_t: + raise ValueError(f"No corresponding temperature file for RHI file " + f"{filename_rhi} found") + all_files[date] = (filename_rhi, files_dict_t[date]) + logger.info("Found %d files", len(all_files)) + return all_files + + +def _get_gridded_data(variable, nc_rhi, nc_loc, nc_t, filename): + """Get gridded data.""" + file_attrs = _get_file_attributes(filename) + + # Extract coords + time = datetime(year=file_attrs['GranuleYear'], + month=file_attrs['GranuleMonth'], + day=file_attrs['GranuleDay'], + hour=12) + pressure = nc_loc.variables['Pressure'][:] + lat = nc_loc.variables['Latitude'][:] + lon = nc_loc.variables['Longitude'][:] + + # Extract data + data = np.ma.array(nc_rhi.variables[variable][:], + mask=_get_mask(nc_rhi, nc_t, nc_loc)) + + # For version 4.20, remove last four profiles (see Data Quality Document) + if file_attrs['PGEVersion'] == 'V04-20': + data = data[:-4] + lat = lat[:-4] + lon = lon[:-4] + + # Place on 1x1 degree grid + lat = np.around(lat) + lon = np.around(lon) + + # Iterate over pressure levels + gridded_data = [] + for (p_idx, _) in enumerate(pressure): + data_frame = pd.DataFrame({ + 'lat': lat, + 'lon': lon, + 'data': data[:, p_idx].filled(np.nan), + }) + + # Create daily-mean gridded data using pivot table + data_frame = pd.pivot_table(data_frame, + values='data', + index='lat', + columns='lon', + aggfunc=np.mean, + dropna=False) + data_frame = data_frame.reindex(index=ALL_LATS, columns=ALL_LONS) + gridded_data.append(data_frame.values) + gridded_data = np.expand_dims(np.array(gridded_data), 0) + gridded_data = np.ma.masked_invalid(gridded_data) + + return (gridded_data, time, pressure) + + +def _get_mask(nc_rhi, nc_t, nc_loc): + """Remove invalid data (see Data Quality Document of MLS-AURA).""" + mask = np.full(nc_rhi.variables['L2gpValue'][:].shape, False) + + # Status (accept only even status flags) + status = np.expand_dims(nc_rhi.variables['Status'][:], -1) + status = np.broadcast_to(status, mask.shape) + mask |= np.where(status % 2, True, False) + + # Precision of RHI (accept only positive numbers) + precision = nc_rhi.variables['L2gpPrecision'][:] + mask |= np.where(precision > 0, False, True) + + # Quality of RHI (accept only values greater than 1.45) + quality_rhi = np.expand_dims(nc_rhi.variables['Quality'][:], -1) + quality_rhi = np.broadcast_to(quality_rhi, mask.shape) + mask |= np.where(quality_rhi > 1.45, False, True) + + # Quality of Temperature (accept only values greater than 0.2/0.9) + pressure_greater_90 = np.where(nc_loc.variables['Pressure'][:] > 90, True, + False) + quality_t = np.expand_dims(nc_t.variables['Quality'][:], -1) + quality_t = np.broadcast_to(quality_t, mask.shape) + new_mask = np.full(mask.shape, False) + new_mask[:, pressure_greater_90] = np.where( + quality_t[:, pressure_greater_90] > 0.9, False, True) + new_mask[:, ~pressure_greater_90] = np.where( + quality_t[:, ~pressure_greater_90] > 0.2, False, True) + mask |= new_mask + + # Convergence of RHI (accept only values smaller than 2.0) + convergence_rhi = np.expand_dims(nc_rhi.variables['Convergence'][:], -1) + convergence_rhi = np.broadcast_to(convergence_rhi, mask.shape) + mask |= np.where(convergence_rhi < 2.0, False, True) + + # Convergence of Temperature (accept only values smaller than 1.03) + convergence_t = np.expand_dims(nc_t.variables['Convergence'][:], -1) + convergence_t = np.broadcast_to(convergence_t, mask.shape) + mask |= np.where(convergence_t < 1.03, False, True) + + return mask + + +def _open_nc_file(filename, variable): + """Open :class:`netCDF4.Dataset`.""" + dataset = netCDF4.Dataset(filename, mode='r') + swaths = dataset.groups['HDFEOS'].groups['SWATHS'] + var = swaths.groups[variable] + return (var.groups['Data Fields'], var.groups['Geolocation Fields']) + + +def _save_cube(cube, cmor_info, attrs, out_dir): + """Save :class:`iris.cube.Cube`.""" + cube.coord('air_pressure').convert_units('Pa') + utils.fix_var_metadata(cube, cmor_info) + utils.convert_timeunits(cube, 1950) + cube = utils.fix_coords(cube) + utils.set_global_atts(cube, attrs) + utils.save_variable(cube, + cmor_info.short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + glob_attrs = cfg['attributes'] + glob_attrs['mip'] = cfg['mip'] + cmor_table = cfg['cmor_table'] + files_dict = _get_files(in_dir, cfg) + + # Run the cmorization + cube_dict = _extract_cubes(files_dict, cfg) + + # Save data + for (var, cube) in cube_dict.items(): + logger.info("Saving variable '%s'", var) + var_info = cfg['variables'][var] + if 'mip' in var_info: + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(glob_attrs['mip'], var) + _save_cube(cube, cmor_info, glob_attrs, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic2004_2019.py b/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic2004_2019.py new file mode 100644 index 0000000000..570e20f715 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic2004_2019.py @@ -0,0 +1,17 @@ +"""ESMValTool CMORizer for MOBO-DIC2004-2019 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://www.nodc.noaa.gov/archive/arc0211/0277099/2.3/data/0-data/ + +Last access + 20231009 + +Download and processing instructions + Download the file MPI_MOBO-DIC_2004-2019_v2.nc + +""" + +from .mobo_dic_mpim import cmorization # noqa diff --git a/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic_mpim.py b/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic_mpim.py new file mode 100644 index 0000000000..7b10ef0b5e --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic_mpim.py @@ -0,0 +1,209 @@ +"""ESMValTool CMORizer for MOBO-DIC_MPIM data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://www.ncei.noaa.gov/data/oceans/ncei/ocads/data/0221526/ + +Last access + 20221103 + +Download and processing instructions + Download the file MOBO-DIC_MPIM_monthly_clim.nc + +""" + +import logging +import warnings +from datetime import datetime +from pathlib import Path + +import iris +import numpy as np +from cf_units import Unit +from dask import array as da +from iris import NameConstraint +from iris.coords import CellMethod, DimCoord + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +TIME_UNITS = Unit('days since 1950-01-01 00:00:00', calendar='standard') + + +def _callback_fix_missing_value(cube, field, _): + """Create masked array from missing_value.""" + if hasattr(field.cf_data, 'missing_value'): + missing_value = float(field.cf_data.missing_value) + logger.info("Fixing missing value (%f)", missing_value) + cube.data = da.ma.masked_equal(cube.core_data(), missing_value) + + +def _fix_climatological_time(cube): + """Fix climatology coordinate.""" + # Following the doc the covered time period of the climatology is + # January 2004 to December 2017 (Use 2011 as the "mean" year). See + # https://www.ncei.noaa.gov/access/metadata/landing-page/bin/ + # iso?id=gov.noaa.nodc%3A0221526 + time_points = TIME_UNITS.date2num( + [datetime(2011, m, 15) for m in range(1, 13)] + ) + time_bounds = [ + [datetime(2004, m, 1), datetime(2017, m + 1, 1)] for m in range(1, 12) + ] + time_bounds.append([datetime(2004, 12, 1), datetime(2018, 1, 1)]) + time_bounds = TIME_UNITS.date2num(time_bounds) + + # Add new time coordinate to cube + time_coord = DimCoord( + time_points, + bounds=time_bounds, + standard_name='time', + long_name='time', + var_name='time', + units=TIME_UNITS, + climatological=True, + ) + cube.remove_coord('month of the year') + cube.add_dim_coord(time_coord, 0) + + # Fix cell methods + cube.add_cell_method(CellMethod('mean within years', coords=time_coord)) + cube.add_cell_method(CellMethod('mean over years', coords=time_coord)) + + +def _fix_time(cube): + """Fix time coordinate.""" + julian_day_coord = cube.coord('Julian Day') + + # Calculate bounds of new time coordinate + # print(str(julian_day_coord.units)) + datetime_base = datetime.strptime( + str(julian_day_coord.units).partition(' since ')[2], + '%Y-%m-%d %H:%M:%S', + ) + base_year = datetime_base.year + base_month = datetime_base.month + all_months = list(julian_day_coord.points.astype(int)) + [ + julian_day_coord.points.astype(int).max() + 1 # 1 more month for bnds + ] + bounds_datetimes = [ + datetime(base_year + (m - 1) // 12, base_month + (m - 1) % 12, 1) + for m in all_months + ] + time_bounds = np.stack( + ( + TIME_UNITS.date2num(bounds_datetimes[:-1]), + TIME_UNITS.date2num(bounds_datetimes[1:]), + ), + axis=-1, + ) + + # Calculate time points as mean of bounds + time_points = np.mean(time_bounds, axis=1) + + # Add new time coordinate to cube + time_coord = DimCoord( + time_points, + bounds=time_bounds, + standard_name='time', + long_name='time', + var_name='time', + units=TIME_UNITS, + ) + cube.remove_coord('Julian Day') + cube.add_dim_coord(time_coord, 0) + + +def _fix_var_metadata(var_info, cmor_info, cube): + """Fix variable metadata. + + Note + ---- + The original units of 'dissic' are mumol/kg. To convert to the CMOR units + mol/m3, we assume a constant sea water density of 1032 kg/m3, which is + approximately the sea water density for T=4°C, salinity=35PSU, and p=100bar + according to the UNESCO formula (UNESCO, 1981, Tenth report of the joint + panel on oceanographic tables and standards, UNESCO Technical Papers in + Marine Science, see + https://www.wkcgroup.com/tools-room/seawater-density-calculator/ and + https://link.springer.com/content/pdf/bbm:978-3-319-18908-6/1.pdf). + + """ + if 'raw_units' in var_info: + cube.units = var_info['raw_units'] + + # Special conversion for dissic (see Note above) + if cmor_info.short_name == 'dissic': + cube.data = cube.core_data() * 1032.0 + cube.units *= 'kg m-3' + + cube.convert_units(cmor_info.units) + + utils.fix_var_metadata(cube, cmor_info) + + +def _extract_variable(var_info, cmor_info, attrs, filepath, out_dir): + """Extract variable.""" + var = cmor_info.short_name + raw_var = var_info.get('raw_name', var) + + # Load data + with warnings.catch_warnings(): + warnings.filterwarnings( + action='ignore', + message='Ignoring netCDF variable .* invalid units .*', + category=UserWarning, + module='iris', + ) + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var), + callback=_callback_fix_missing_value) + + # Fix variable metadata + _fix_var_metadata(var_info, cmor_info, cube) + + # Fix coordinates + if cube.coords('month of the year'): # MOBO-DIC_MPIM + _fix_climatological_time(cube) + elif cube.coords('Julian Day'): # MOBO-DIC2004-2019 + _fix_time(cube) + cube.coord('depth').units = 'm' + cube = utils.fix_coords(cube, overwrite_time_bounds=False) + + # Fix global metadata + utils.set_global_atts(cube, attrs) + + # Save variable + with warnings.catch_warnings(): + warnings.filterwarnings( + action='ignore', + message='WARNING: missing_value not used', + category=UserWarning, + module='iris', + ) + utils.save_variable( + cube, + var, + out_dir, + attrs, + local_keys=['comment', 'positive'], + unlimited_dimensions=['time'], + ) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + # Run the cmorization + for (var, var_info) in cfg['variables'].items(): + filepath = Path(in_dir) / var_info['filename'] + logger.info("CMORizing variable '%s' from file %s", var, filepath) + glob_attrs['comment'] = var_info['comment'] + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + _extract_variable(var_info, cmor_info, glob_attrs, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/modis.ncl b/esmvaltool/cmorizers/data/formatters/datasets/modis.ncl new file mode 100644 index 0000000000..fec324a951 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/modis.ncl @@ -0,0 +1,250 @@ +; ############################################################################# +; ESMValTool CMORizer for MODIS data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; https://ladsweb.modaps.eosdis.nasa.gov/search/order +; +; Last access +; 20190209 +; +; Download and processing instructions +; In Products: select "MODIS Aqua", "Collection 6.1" and +; "L3 Atmosphere Product", click on MYD08_M3. +; In Time: select from 2000-01-01 to today. +; In Location: skip, the global domain will be applied. +; In Files: select all. +; Submit the order. +; A registration is required to download the data. +; +; Caveats +; clwvi and clivi data are in-cloud values whereas CMIP5 models provide +; grid-box averages --> multiply MODIS clwvi and clivi values with cloud +; fraction as a first guess +; +; Modification history +; 20180209-righi_mattia: fixed bug in lwpStderr. +; 20180209-hassler_birgit: adapted to v2. +; 20180810-righi_mattia: fix minor calendar issue. +; 20180806-righi_mattia: code cleaning. +; 20170116-lauer_axel: using cirrus fraction to gridbox averages. +; 20160408-lauer_axel: added processing of uncertainties. +; 20151118-lauer_axel: bugfix: added unit conversion. +; 20150430-evaldsson_martin: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "modis.ncl" + + ; Source name + OBSNAME = "MODIS" + + ; Tier + TIER = 3 + + ; Selected variable (standard name) + VAR = (/"clwvi", \ + "clivi", \ + "clt", \ + "lwpStderr", \ + "iwpStderr", \ + "od550aer"/) + + ; Name in the raw data + NAME = (/"Cloud_Water_Path_Liquid_Mean_Mean", \ + "Cloud_Water_Path_Ice_Mean_Mean", \ + "Cloud_Fraction_Mean_Mean", \ + "Cloud_Water_Path_Liquid_Mean_Uncertainty", \ + "Cloud_Water_Path_Ice_Mean_Uncertainty", \ + "AOD_550_Dark_Target_Deep_Blue_Combined_Mean_Mean"/) + + ; MIP + MIP = (/"Amon", "Amon", "Amon", "Amon", "Amon", "aero"/) + + ; Frequency + FREQ = (/"mon", "mon", "mon", "mon", "mon", "mon"/) + + ; Version + VERSION = "MYD08_M3" + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + \ + (/"/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/cmip5/Tables/CMIP5_Amon", \ + "/custom/CMOR_lwpStderr.dat", \ + "/custom/CMOR_iwpStderr.dat", \ + "/cmip5/Tables/CMIP5_aero"/) + + ; Type + TYPE = "sat" + + ; Global attributes + SOURCE = "https://ladsweb.modaps.eosdis.nasa.gov/search/order" + REF1 = "Platnick et al., IEEE Trans. Geosci. Remote Sens., " + \ + "doi:10.1109/TGRS.2002.808301, 2003." + REF2 = "Levy et al., Atmos. Meas. Tech., " + \ + "doi:10.5194/amt-6-2989-2013, 2013." + COMMENT = "" + +end + +begin + + ; List of files + FILES = systemfunc("ls -1 " + input_dir_path + VERSION + ".A*.hdf") + + do ff = 0, dimsizes(FILES) - 1 + + fin = addfile(FILES(ff), "r") + + ; Get time + infile = systemfunc("basename " + FILES(ff)) + date = yyyyddd_to_yyyymmdd(toint(str_get_cols(infile, 10, 16))) + year = toint(str_get_cols(tostring(date), 0, 3)) + month = toint(str_get_cols(tostring(date), 4, 5)) + dm = days_in_month(year, month) + + ; Loop over variables to fetch from input file + do vv = 0, dimsizes(VAR) - 1 + + invar = fin->$NAME(vv)$ + invar_fv = invar@_FillValue + invar_coords = invar + invar := tofloat(invar) + invar := where(invar.eq.tofloat(invar_fv), \ + default_fillvalue("float"), invar) + + ; Special case clwvi as the sum lwp + iwp + if (VAR(vv).eq."clwvi") then + if (NAME(vv).ne."Cloud_Water_Path_Liquid_Mean_Mean") then + error_msg("f", DIAG_SCRIPT, "", "cannot calculate clwvi") + end if + + ; Read cirrus fraction + ; cfin = fin->Cirrus_Fraction_SWIR_FMean + cfin = fin->Cirrus_Fraction_Infrared_FMean + cif = tofloat(cfin * cfin@scale_factor + cfin@add_offset) + ; liquid fraction is estimated assuming random overlap, i.e. + ; ctot = 1 - (1 - cif) * (1 - lif) + ; --> lif = 1 - (1 - ctot) / (1 - cif) + delete(cfin) + cfin = fin->Cloud_Fraction_Mean_Mean + ctot = tofloat(cfin * cfin@scale_factor + cfin@add_offset) + delete(cfin) + cif = where(cif.gt.0.999, cif@_FillValue, cif) + lif = 1.0 - (1.0 - ctot) / (1.0 - cif) + lif = where(lif.lt.0, 0, lif) + tmpvar = fin->Cloud_Water_Path_Ice_Mean_Mean ; read ice water path + tmpvar_fv = tmpvar@_FillValue + tmpvar := tofloat(tmpvar) + tmpvar := where(tmpvar.eq.tofloat(tmpvar_fv), \ + default_fillvalue("float"), \ + tmpvar) + tmpvar = tmpvar * cif ; convert iwp in-cloud value to gridbox avg + invar = invar * lif ; convert lwp in-cloud value to grid-box avg + invar = invar + tmpvar ; clwvi = lwp + iwp + delete(tmpvar) + delete(lif) + delete(cif) + invar = 0.001 * invar ; [g/m2] --> [kg/m2] + end if + + ; lwp and iwp are in-cloud values + ; convert lwp/iwp to grid-box averages by multiplying with + ; average cloud fraction (not optimum but best we can do at the moment) + if (any((/"clivi", "iwpStderr", "lwpStderr"/) .eq. VAR(vv))) then + + ; Read cirrus fraction (0-1) + ; cfin = fin->Cirrus_Fraction_SWIR_FMean + cfin = fin->Cirrus_Fraction_Infrared_FMean + cf = tofloat(cfin * cfin@scale_factor + cfin@add_offset) + delete(cfin) + if (VAR(vv).eq."lwpStderr") then + cfin = fin->Cloud_Fraction_Mean_Mean + ctot = tofloat(cfin * cfin@scale_factor + cfin@add_offset) + delete(cfin) + cif = where(cf.gt.0.999, cf@_FillValue, cf) + cf = 1.0 - (1.0 - ctot) / (1.0 - cif) + cf = where(cf.lt.0, 0, cf) + delete(cif) + delete(ctot) + end if + invar = invar * cf ; ; "grid-box average" lwp/iwp + delete(cf) + invar = 0.001 * invar ; [g/m2] --> [kg/m2] + end if + + invar@_FillValue = default_fillvalue("float") + copy_VarCoords(invar_coords, invar) + if (isatt(invar_coords, "scale_factor")) then + invar = invar * tofloat(invar_coords@scale_factor) + end if + if (isatt(invar_coords, "add_offset")) then + invar = invar + tofloat(invar_coords@add_offset) + end if + + if (VAR(vv).eq."clt") then + invar = 100.0 * invar ; [1] --> [%] + end if + + ; Create output variable + lat = fin->YDim + lon = fin->XDim + output = new((/1, dimsizes(lat), dimsizes(lon)/), float) + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + output&time = cd_inv_calendar(year, month, 15, 0, 0, 0, TUNITS, 0) + output&lat = lat + output&lon = lon + output(0, :, :) = (/invar/) + delete(invar) + delete(invar_coords) + + ; Format coordinates + format_coords(output, year + sprinti("%0.2i", month) + "01", \ + year + sprinti("%0.2i", month) + dm, FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + if (VAR(vv).ne."od550aer") then + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF1, COMMENT) + else + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF2, COMMENT) + end if + + ; Output file + DATESTR = \ + year + sprinti("%0.2i", month) + "-" + year + sprinti("%0.2i", month) + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, str_sub_str(VERSION, "_", "-"), \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/mte.py b/esmvaltool/cmorizers/data/formatters/datasets/mte.py new file mode 100644 index 0000000000..e82baab967 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/mte.py @@ -0,0 +1,82 @@ +"""ESMValTool CMORizer for MTE data. + +Tier + Tier 3: restricted dataset. + +Source + http://www.bgc-jena.mpg.de/geodb/BGI/Home + +Last access + 20190507 + +Download and processing instructions + Download the following files: + EnsembleGPP_GL.nc + A registration is required for downloading the data. +""" + +import logging +import os + +import iris +from cf_units import Unit +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _fix_time_coord(cube): + """Set time points to central day of month.""" + time_coord = cube.coord('time') + new_unit = Unit('days since 1850-01-01 00:00:00', calendar='standard') + time_coord.convert_units(new_unit) + old_time = new_unit.num2date(time_coord.points) + new_time = [d.replace(day=15) for d in old_time] + time_coord.points = new_unit.date2num(new_time) + + +def _get_filepath(in_dir, basename): + """Find correct name of file (extend basename with timestamp).""" + all_files = [ + f for f in os.listdir(in_dir) + if os.path.isfile(os.path.join(in_dir, f)) + ] + for filename in all_files: + if filename.endswith(basename): + return os.path.join(in_dir, filename) + raise OSError( + f"Cannot find input file ending with '{basename}' in '{in_dir}'") + + +def _extract_variable(raw_var, cmor_info, attrs, filepath, out_dir): + """Extract variable.""" + var = cmor_info.short_name + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + _fix_time_coord(cube) + utils.fix_var_metadata(cube, cmor_info) + utils.convert_timeunits(cube, 1950) + cube = utils.fix_coords(cube) + utils.set_global_atts(cube, attrs) + utils.save_variable(cube, + var, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + glob_attrs = cfg['attributes'] + cmor_table = cfg['cmor_table'] + filepath = _get_filepath(in_dir, cfg['filename']) + logger.info("Found input file '%s'", filepath) + + # Run the cmorization + for (var, var_info) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", var) + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + raw_var = var_info.get('raw', var) + _extract_variable(raw_var, cmor_info, glob_attrs, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/ncep_doe_r2.py b/esmvaltool/cmorizers/data/formatters/datasets/ncep_doe_r2.py new file mode 100644 index 0000000000..3d6d56512d --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/ncep_doe_r2.py @@ -0,0 +1,32 @@ +"""ESMValTool CMORizer for NCEP-DOE-R2 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://psl.noaa.gov/data/gridded/data.ncep.reanalysis2.html + +Last access + 20220906 + +Download and processing instructions + To facilitate the download, the links to the https server are provided. + + https://downloads.psl.noaa.gov/Datasets/ncep.reanalysis2/Monthlies/ + pressure/ + rhum.mon.mean.nc + air.mon.mean.nc + https://downloads.psl.noaa.gov/Datasets/ncep.reanalysis2/Monthlies/ + gaussian_grid/ + tcdc.eatm.mon.mean.nc + https://downlooads.psl.noaa.gov/Datasets/ncep.reanalysis2/Monthlies/ + surface/ + pr_wtr.eatm.mon.mean.nc + +Caveats + +""" +from .ncep_ncar_r1 import cmorization + +# The following line makes it clear that the above import is not an error +cmorization diff --git a/esmvaltool/cmorizers/data/formatters/datasets/ncep_ncar_r1.py b/esmvaltool/cmorizers/data/formatters/datasets/ncep_ncar_r1.py new file mode 100644 index 0000000000..c0f33286d5 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/ncep_ncar_r1.py @@ -0,0 +1,172 @@ +"""ESMValTool CMORizer for NCEP-NCAR-R1 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://psl.noaa.gov/data/gridded/data.ncep.reanalysis.html + +Last access + 20221116 + +Download and processing instructions + To facilitate the download, the links to the ftp server are provided. + Since the filenames are sometimes identical across different + save the data in two subdirectories in input_dir_path. + Subdirectory pressure/: + ftp://ftp.cdc.noaa.gov/Projects/Datasets/data.ncep.reanalysis/pressure/ + air.mon.mean.nc + hgt.mon.mean.nc + rhum.mon.mean.nc + shum.mon.mean.nc + uwnd.mon.mean.nc + vwnd.mon.mean.nc + omega.mon.mean.nc + ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.dailyavgs/pressure/ + uwnd.*.nc + vwnd.*.nc + + Subdirectory surface/: + ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.derived/surface/ + air.mon.mean.nc + pr_wtr.mon.mean.nc + slp.mon.mean.nc + wspd.mon.mean.nc + rhum.mon.mean.nc + ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.derived/surface_gauss/ + air.2m.mon.mean.nc + prate.sfc.mon.mean.nc + tmax.2m.mon.mean.nc + tmin.2m.mon.mean.nc + ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.derived/other_gauss/ + tcdc.eatm.mon.mean.nc + ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.dailyavgs/surface_gauss/ + prate.sft.gauss.*.nc + ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.dailyavgs/other_gauss/ + ulwrf.ntat.gauss.*.nc + + + #Select the section "Pressure" and "Surface" and download the variables + #listed below. Since raw data on pressure levels and for surface have the + #same file and variable name, save the data in two different subdirectories + #"press" and "surf" in input_dir_path. + +Caveats + +""" + +import logging +import re +from copy import deepcopy +from pathlib import Path +from warnings import catch_warnings, filterwarnings +from cf_units import Unit + +import iris +from esmvalcore.cmor.table import CMOR_TABLES +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _fix_units(cube, definition): + """ + Fix issues with the units. + + Exception is `pr` since the units in the + raw file are not recognized correctly. + """ + + if cube.var_name != 'pr': + cube.convert_units(definition.units) + + +def _fix_coordinates(cube, definition, cmor_info): + cube = utils.fix_coords(cube) + + if 'height2m' in cmor_info.dimensions: + utils.add_height2m(cube) + if 'height10m' in cmor_info.dimensions: + utils.add_scalar_height_coord(cube, height=10.) + + for coord_def in definition.coordinates.values(): + axis = coord_def.axis + coord = cube.coord(axis=axis) + if axis == 'Z': + coord.convert_units(coord_def.units) + coord.standard_name = coord_def.standard_name + coord.var_name = coord_def.out_name + coord.long_name = coord_def.long_name + coord.points = coord.core_points().astype('float64') + if coord.var_name == 'plev': + coord.attributes['positive'] = 'down' + + return cube + + +def _extract_variable(short_name, var, cfg, raw_filepath, out_dir): + attributes = deepcopy(cfg['attributes']) + attributes['mip'] = var['mip'] + cmor_table = CMOR_TABLES[attributes['project_id']] + definition = cmor_table.get_variable(var['mip'], short_name) + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + if cmor_info.positive != '': + attributes['positive'] = cmor_info.positive + + # load data + raw_var = var.get('raw', short_name) + with catch_warnings(): + filterwarnings('ignore', + message='Ignoring netCDF variable .* invalid units .*', + category=UserWarning, + module='iris') + cube = iris.load_cube(str(raw_filepath), + NameConstraint(var_name=raw_var)) + + utils.set_global_atts(cube, attributes) + + # Set correct names + cube.var_name = definition.short_name + if definition.standard_name: + cube.standard_name = definition.standard_name + cube.long_name = definition.long_name + + _fix_units(cube, definition) + + utils.fix_var_metadata(cube, cmor_info) + + # fix time units + cube.coord('time').convert_units( + Unit('days since 1950-1-1 00:00:00', calendar='gregorian')) + + cube = _fix_coordinates(cube, definition, cmor_info) + + if var.get("make_negative"): + cube.data = -1 * cube.data + + utils.save_variable( + cube, + short_name, + out_dir, + attributes, + unlimited_dimensions=['time'], + ) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Run CMORizer for NCEP-NCAR-R1.""" + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", short_name) + short_name = var['short_name'] + raw_filenames = Path(in_dir).rglob('*.nc') + filenames = [] + for raw_filename in raw_filenames: + if re.search(var['file'], str(raw_filename)) is not None: + filenames.append(raw_filename) + + for filename in sorted(filenames): + + _extract_variable(short_name, var, cfg, filename, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/ndp.py b/esmvaltool/cmorizers/data/formatters/datasets/ndp.py new file mode 100644 index 0000000000..0e393a452b --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/ndp.py @@ -0,0 +1,86 @@ +"""ESMValTool CMORizer for NDP data. + +Tier + Tier 3: restricted dataset. + +Source + https://data.ess-dive.lbl.gov/view/doi:10.3334/CDIAC/LUE.NDP017.2006 + +Last access + 20191014 + +Download and processing instructions + Download the following file: + ndp017b.tar.gz + A registration is required for downloading the data. +""" + +import logging +import os + +import iris +import iris.coord_categorisation +import numpy as np +from cf_units import Unit +from osgeo import gdal + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _extract_variable(cmor_info, attrs, var_file, out_dir, cfg): + """Extract variable.""" + grid_file = gdal.Open(var_file) + array = grid_file.ReadAsArray() + for missing_value in cfg['missing_values']: + array = np.ma.masked_equal(array, missing_value) + array = array.astype(np.float32) + np.ma.set_fill_value(array, 1e20) + array = np.ma.expand_dims(array, 0) + time = iris.coords.DimCoord([183.0], + bounds=[0.0, 366.0], + units=Unit('days since 2000-01-01 00:00:00'), + standard_name='time', + var_name='time', + long_name='time') + lats = iris.coords.DimCoord( + 83.65972 - np.arange(array.shape[1]) * cfg['delta_degrees'], + standard_name='latitude', + var_name='lat', + long_name='latitude') + lons = iris.coords.DimCoord( + -180.0 + np.arange(array.shape[2]) * cfg['delta_degrees'], + standard_name='longitude', + var_name='lon', + long_name='longitude') + cube = iris.cube.Cube(array, + dim_coords_and_dims=[(time, 0), (lats, 1), + (lons, 2)], + units=Unit('t ha-1')) + cube.convert_units('kg m-2') + utils.fix_var_metadata(cube, cmor_info) + utils.convert_timeunits(cube, 1950) + cube = utils.fix_coords(cube) + utils.set_global_atts(cube, attrs) + utils.save_variable(cube, + cmor_info.short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + glob_attrs = cfg['attributes'] + cmor_table = cfg['cmor_table'] + + # Run the cmorization + for (var, var_info) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", var) + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + var_file = os.path.join(in_dir, + var_info['filename'].replace('.gz', '')) + logger.info("Found input file '%s' for variable '%s'", var_file, var) + _extract_variable(cmor_info, glob_attrs, var_file, out_dir, cfg) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/niwa_bs.ncl b/esmvaltool/cmorizers/data/formatters/datasets/niwa_bs.ncl new file mode 100644 index 0000000000..a27bc2461b --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/niwa_bs.ncl @@ -0,0 +1,117 @@ +; ############################################################################# +; ESMValTool CMORizer for NIWA-BS data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; http://www.bodekerscientific.com/data/total-column-ozone +; +; Last access +; 20190207 +; +; Download and processing instructions +; To get the access data send an email to datasets@bodekerscientific.com +; Download all files from +; ftp://ftp.bodekerscientific.com/CombinedTCOV3.3/Monthly/Patched/NetCDF/ +; Newer versions may become available over time, but make sure to download +; the patched one. Only complete years should be downloaded. +; +; Modification history +; 20190207-righi_mattia: renamed to NIWA-BS and adapted to v2. +; 20140528-gottschaldt_klaus-dirk: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "niwa_bs.ncl" + + ; Source name + OBSNAME = "NIWA-BS" + + ; Tier + TIER = 3 + + ; Period + YEAR1 = get_year(start_year, 1979) + YEAR2 = get_year(end_year, 2016) + + ; Selected variable (standard name) + VAR = (/"toz", "tozStderr"/) + + ; Name in the raw data + NAME = (/"tco", "tco_uncert"/) + + ; MIP + MIP = (/"Amon", "Amon"/) + + ; Frequency + FREQ = (/"mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + "/custom/CMOR_" + VAR + ".dat" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "V3.3" + + ; Global attributes + SOURCE = "http://www.bodekerscientific.com/data/total-column-ozone" + REF = "Bodeker et al., Atmos. Chem. Phys., doi:10.5194/acp-5-2603-2005, 2005" + COMMENT = "" + +end + +begin + + files = systemfunc("ls " + input_dir_path + \ + "NIWA-BS_CombinedTCO_" + VERSION + \ + "_????_Monthly_Patched.nc") + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + f = addfiles(files, "r") + output = f[:]->$NAME(vv)$ + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, str_sub_str(VERSION, "V", "v"), \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/noaa_cires_20cr_v2.py b/esmvaltool/cmorizers/data/formatters/datasets/noaa_cires_20cr_v2.py new file mode 100644 index 0000000000..3744cdce67 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/noaa_cires_20cr_v2.py @@ -0,0 +1,30 @@ +"""ESMValTool CMORizer for NOAA-CIRES-20CR-V2 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://psl.noaa.gov/data/gridded/data.ncep.reanalysis2.html + +Last access + 20220906 + +Download and processing instructions + To facilitate the download, the links to the ftp server are provided. + + ftp://ftp.cdc.noaa.gov/Projects/20thC_ReanV2/Monthlies/ + + pr_wtr.eatm.mon.mean.nc + cldwtr.eatm.mon.mean.nc + tcdc.eatm.mon.mean.nc + ulwrf.ntat.mon.mean.nc + uswrf.ntat.mon.mean.nc + shum.mon.mean.nc + +Caveats + +""" +from .ncep_ncar_r1 import cmorization + +# The following line makes it clear that the above import is not an error +cmorization diff --git a/esmvaltool/cmorizers/data/formatters/datasets/noaa_cires_20cr_v3.py b/esmvaltool/cmorizers/data/formatters/datasets/noaa_cires_20cr_v3.py new file mode 100644 index 0000000000..9405473931 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/noaa_cires_20cr_v3.py @@ -0,0 +1,32 @@ +"""ESMValTool CMORizer for NOAA-CIRES-20CR-V3 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://psl.noaa.gov/data/gridded/data.ncep.reanalysis2.html + +Last access + 20230327 + +Download and processing instructions + To facilitate the download, the links to the ftp server are provided. + + ftp://ftp.cdc.noaa.gov/Datasets/20thC_ReanV3/Monthlies/ + + pr_wtr.eatm.mon.mean.nc + cldwtr.eatm.mon.mean.nc + tcdc.eatm.mon.mean.nc + ulwrf.ntat.mon.mean.nc + uswrf.ntat.mon.mean.nc + csulf.ntat.mon.mean.nc + csusf.ntat.mon.mean.nc + shum.mon.mean.nc + +Caveats + +""" +from .ncep_ncar_r1 import cmorization + +# The following line makes it clear that the above import is not an error +cmorization diff --git a/esmvaltool/cmorizers/data/formatters/datasets/noaa_ersstv3b.py b/esmvaltool/cmorizers/data/formatters/datasets/noaa_ersstv3b.py new file mode 100644 index 0000000000..f7c5e908be --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/noaa_ersstv3b.py @@ -0,0 +1,89 @@ +"""ESMValTool CMORizer for NOAA ERSST data, version 3b. + + This is the CMORizer script for the NOAA Extended Reconstructed + Sea Surface Temperature (ERSST) in its version 3b. + +Tier + Tier 2: open dataset. + +Source + https://doi.org/10.1175/1520-0442-16.10.1495 + +Last access + 20200520 + +Download and processing instructions + The data is provided by NOAA at: + https://www1.ncdc.noaa.gov/pub/data/cmb/ersst/v3b/netcdf/ + +""" + +import logging +import os +import re + +import iris +from cf_units import Unit + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _get_filepaths(in_dir, basename): + """Find correct name of file (extend basename with timestamp).""" + regex = re.compile(basename) + return_files = [] + for files in os.listdir(in_dir): + + if regex.match(files): + return_files.append(os.path.join(in_dir, files)) + + return return_files + + +def _fix_time_coord(cube, _field, _filename): + """Set time points to central day of month.""" + time_coord = cube.coord('time') + new_unit = Unit('days since 1850-01-01 00:00:00', calendar='standard') + time_coord.convert_units(new_unit) + old_time = new_unit.num2date(time_coord.points) + new_time = [d.replace(day=15) for d in old_time] + time_coord.points = new_unit.date2num(new_time) + + +def _extract_variable(raw_var, cmor_info, attrs, filepath, out_dir): + """Extract variable from all files.""" + var = cmor_info.short_name + cubes = iris.load(filepath, raw_var, _fix_time_coord) + iris.util.equalise_attributes(cubes) + cube = cubes.concatenate_cube() + cube = iris.util.squeeze(cube) + + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + utils.save_variable(cube, + var, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + glob_attrs = cfg['attributes'] + cmor_table = cfg['cmor_table'] + + filepaths = _get_filepaths(in_dir, cfg['filename']) + + if len(filepaths) > 0: + logger.info("Found %d input files in '%s'", len(filepaths), in_dir) + else: + logger.info("No files found, basename: %s", cfg['filename']) + + for (var, var_info) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", var) + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + raw_var = var_info.get('raw', var) + _extract_variable(raw_var, cmor_info, glob_attrs, filepaths, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/noaa_ersstv5.py b/esmvaltool/cmorizers/data/formatters/datasets/noaa_ersstv5.py new file mode 100644 index 0000000000..c01783724c --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/noaa_ersstv5.py @@ -0,0 +1,105 @@ +"""ESMValTool CMORizer for NOAA ERSST data, version 5. + + This is the CMORizer script for the NOAA Extended Reconstructed Sea Surface + Temperature (ERSST) data of version 5. + +Tier + Tier 2: open dataset. + +Source + https://doi.org/10.7289/V5T72FNM + +Last access + 20200520 + +Download and processing instructions + The data is provided by NOAA at: + https://www1.ncdc.noaa.gov/pub/data/cmb/ersst/v5/netcdf/ + +""" + +import logging +import os +import re + +import iris +import cf_units + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _get_filepaths(in_dir, basename): + """Find correct name of file (extend basename with timestamp).""" + regex = re.compile(basename) + return_files = [] + return_files_gr08 = [] + for file in os.listdir(in_dir): + + if regex.match(file): + year = file.split('.')[2][:4] # ersst.v5.$yr$nm.nc + # return 2 lists as files differ from 2008 + if int(year) < 2008: + return_files.append(os.path.join(in_dir, file)) + else: + return_files_gr08.append(os.path.join(in_dir, file)) + + return return_files, return_files_gr08 + + +def _fix_time_coord(cube, _, _filename): + """Set time points to central day of month and standardise time units.""" + t_coord = cube.coord('time') + _unit = t_coord.units + new_time = [d.replace(day=15) for d in _unit.num2date(t_coord.points)] + t_coord.points = _unit.date2num(new_time).astype('float64') + t_coord.units = cf_units.Unit(t_coord.units.origin, calendar='standard') + t_coord.long_name = 'Time' + + +def _extract_variable(raw_var, cmor_info, attrs, filepaths, out_dir): + """Extract variable and concatenate months.""" + var = cmor_info.short_name + + cubels = iris.load(filepaths, raw_var, _fix_time_coord) + iris.util.equalise_attributes(cubels) + iris.util.unify_time_units(cubels) + cube = cubels.concatenate_cube() + cube = iris.util.squeeze(cube) + + utils.fix_var_metadata(cube, cmor_info) + cube = utils.fix_coords(cube) + + utils.set_global_atts(cube, attrs) + utils.save_variable(cube, + var, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + glob_attrs = cfg['attributes'] + cmor_table = cfg['cmor_table'] + + filepaths = _get_filepaths(in_dir, cfg['filename']) + + if len(filepaths[0]) > 0 or len(filepaths[1]) > 0: + totalfiles = len(filepaths[0]) + len(filepaths[1]) + logger.info("%d files before 2008", len(filepaths[0])) + logger.info("Found %d input files in '%s'", totalfiles, in_dir) + else: + logger.info("No files found, basename: %s", cfg['filename']) + + # Run the cmorization + for (var, var_info) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", var) + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + raw_var = var_info.get('raw', var) + _extract_variable(raw_var, cmor_info, glob_attrs, + filepaths[0], out_dir) + _extract_variable(raw_var, cmor_info, glob_attrs, + filepaths[1], out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/noaa_mbl_ch4.py b/esmvaltool/cmorizers/data/formatters/datasets/noaa_mbl_ch4.py new file mode 100644 index 0000000000..30011229de --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/noaa_mbl_ch4.py @@ -0,0 +1,159 @@ +"""ESMValTool CMORizer for NOAA-MBL-CH4 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://gml.noaa.gov/ccgg/trends_ch4/ + +Last access + 20230717 + +Download and processing instructions + Download the file: + wget https://gml.noaa.gov/webdata/ccgg/trends/ch4/ch4_mm_gl.csv +""" + +import logging +import warnings +from pathlib import Path + +from datetime import datetime +import iris +import pandas as pd +from cf_units import Unit +import numpy as np + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + +LAT_COORD = iris.coords.DimCoord([0.], + bounds=[[-90.0, 90.0]], + var_name='lat', + standard_name='latitude', + long_name='latitude', + units='degrees') +LON_COORD = iris.coords.DimCoord([180.0], + bounds=[[0., 360.]], + var_name='lon', + standard_name='longitude', + long_name='longitude', + units='degrees') + + +def _fix_var_metadata(var_info, cmor_info, cube): + """Fix variable metadata.""" + if 'raw_units' in var_info: + cube.units = var_info['raw_units'] + + cube.convert_units(cmor_info.units) + + utils.fix_var_metadata(cube, cmor_info) + return cube + + +def _get_time_coord(year, month): + """Get time coordinate.""" + point = datetime(year=year, month=month, day=15) + bound_low = datetime(year=year, month=month, day=1) + if month == 12: + month_bound_up = 1 + year_bound_up = year + 1 + else: + month_bound_up = month + 1 + year_bound_up = year + bound_up = datetime(year=year_bound_up, month=month_bound_up, day=1) + time_units = Unit('days since 1950-01-01 00:00:00', calendar='standard') + time_coord = iris.coords.DimCoord( + time_units.date2num(point), + bounds=time_units.date2num([bound_low, bound_up]), + var_name='time', + standard_name='time', + long_name='time', + units=time_units, + ) + return time_coord + + +def _get_cube(row, column_name): + """Create :class:`iris.cube.Cube` from :class:`pandas.Series`.""" + time_coord = _get_time_coord(int(row['year']), int(row['month'])) + lat_coord = LAT_COORD.copy() + lon_coord = LON_COORD.copy() + data = np.ma.masked_invalid(row[column_name]) + cube = iris.cube.Cube( + data.reshape((1, 1, 1)), + dim_coords_and_dims=[(time_coord, 0), (lat_coord, 1), (lon_coord, 2)], + units='ppb', + ) + return cube + + +def _fix_coords(cube): + """Fix coordinates.""" + utils.fix_dim_coordnames(cube) + + return cube + + +def _extract_variable(var_info, cmor_info, attrs, filepath, out_dir): + """Extract variable.""" + var = cmor_info.short_name + + # Load data + with warnings.catch_warnings(): + warnings.filterwarnings( + action='ignore', + message="Skipping global attribute 'units': 'units' is not a " + "permitted attribute", + category=UserWarning, + module='iris', + ) + skiprows = 0 + with open(filepath, 'r', encoding='utf-8') as csv: + for line in csv: + if line.startswith("#"): + skiprows = skiprows + 1 + + data_frame = pd.read_csv(filepath, header=skiprows) + + # Extract cube + cubes = iris.cube.CubeList() + for (_, row) in data_frame.iterrows(): + cube = _get_cube(row, 'average') + cubes.append(cube) + cube = cubes.concatenate_cube() + cube.var_name = var + + # Fix coordinates + cube = _fix_coords(cube) + + # Fix variable metadata + cube = _fix_var_metadata(var_info, cmor_info, cube) + + # Fix global metadata + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable( + cube, + var, + out_dir, + attrs, + unlimited_dimensions=['time'], + ) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + # Run the cmorization + for (var, var_info) in cfg['variables'].items(): + filepath = Path(in_dir) / var_info['filename'] + logger.info("CMORizing variable '%s' from file %s", var, filepath) + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + _extract_variable(var_info, cmor_info, glob_attrs, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/noaaglobaltemp.py b/esmvaltool/cmorizers/data/formatters/datasets/noaaglobaltemp.py new file mode 100644 index 0000000000..eadbcee5f7 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/noaaglobaltemp.py @@ -0,0 +1,81 @@ +"""ESMValTool CMORizer for "NOAAGlobalTemp data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://www.ncei.noaa.gov/products/land-based-station/noaa-global-temp + +Last access + 20220628 + +Download and processing instructions + Download the following files: + [SOURCE]/v5/access/gridded/ + NOAAGlobalTemp_v5.0.0_gridded_s188001_e202205_c20220608T133245.nc +""" + +import copy +import logging +import os + +import iris +from cf_units import Unit +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _extract_variable(short_name, var, filename, cfg, in_dir, + out_dir): + """Extract variable.""" + # load data + filepath = os.path.join(in_dir, filename) + raw_var = var.get('raw', short_name) + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + + # fix time units + cube.coord('time').convert_units( + Unit('days since 1950-1-1 00:00:00', calendar='gregorian')) + + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + + # Fix coordinates + cube = iris.util.squeeze(cube) + utils.fix_dim_coordnames(cube) + # fix flipped latitude + utils.flip_dim_coord(cube, 'latitude') + utils.fix_dim_coordnames(cube) + cube_coord = cube.coord('latitude') + utils.fix_bounds(cube, cube_coord) + cube_coord = cube.coord('longitude') + utils.fix_bounds(cube, cube_coord) + + # add heigt2m coordinate + if 'height2m' in cmor_info.dimensions: + utils.add_height2m(cube) + + # Fix metadata and update version information + attrs = copy.deepcopy(cfg['attributes']) + attrs['mip'] = var['mip'] + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + for (version, filename) in cfg['filenames'].items(): + logger.info("CMORizing variable '%s' '%s'", short_name, version) + _extract_variable(short_name, var, filename, cfg, in_dir, + out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/nsidc_0116_nh.py b/esmvaltool/cmorizers/data/formatters/datasets/nsidc_0116_nh.py new file mode 100644 index 0000000000..7300648e62 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/nsidc_0116_nh.py @@ -0,0 +1,23 @@ +"""ESMValTool CMORizer for NSIDC-0116 data. + +Tier + Tier 3: restricted dataset. + +Source + https://nsidc.org/data/NSIDC-0116 + +Last access + 20190513 + +Download and processing instructions + Download daily data from: + https://nsidc.org/data/NSIDC-0116 + + Login required for download, but requires citation only to use +""" +from esmvaltool.cmorizers.data.formatters.nsidc_common import cmorize + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmorize(cfg, 'nh', in_dir, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/nsidc_0116_sh.py b/esmvaltool/cmorizers/data/formatters/datasets/nsidc_0116_sh.py new file mode 100644 index 0000000000..4887723592 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/nsidc_0116_sh.py @@ -0,0 +1,23 @@ +"""ESMValTool CMORizer for NSIDC-0116 data. + +Tier + Tier 3: restricted dataset. + +Source + https://nsidc.org/data/NSIDC-0116 + +Last access + 20190513 + +Download and processing instructions + Download daily data from: + https://nsidc.org/data/NSIDC-0116 + + Login required for download, but requires citation only to use +""" +from esmvaltool.cmorizers.data.formatters.nsidc_common import cmorize + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmorize(cfg, 'sh', in_dir, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/nsidc_g02202_sh.py b/esmvaltool/cmorizers/data/formatters/datasets/nsidc_g02202_sh.py new file mode 100644 index 0000000000..202e370043 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/nsidc_g02202_sh.py @@ -0,0 +1,188 @@ +"""ESMValTool CMORizer for Sea Ice Concentration CDR. + +Tier + Tier 3: restricted dataset. + +Source + https://nsidc.org/data/g02202/versions/4 + +Last access + 20231213 + +Download and processing instructions + Download data from: + https://noaadata.apps.nsidc.org/NOAA/G02202_V4/south/monthly + lat and lon from: + https://noaadata.apps.nsidc.org/NOAA/G02202_V4/ancillary/ + area file: + ftp://sidads.colorado.edu/DATASETS/seaice/polar-stereo/tools/ + pss25area_v3.dat + + https://nsidc.org/sites/default/files/g02202-v004-userguide_1_1.pdf + +""" + +import logging +import os +import re + +import numpy as np +import iris +from cf_units import Unit +from iris.coords import AuxCoord + +from esmvalcore.cmor._fixes.common import OceanFixGrid +from esmvalcore.cmor.fixes import get_time_bounds +from esmvaltool.cmorizers.data import utilities as utils + + +logger = logging.getLogger(__name__) + + +def _get_filepaths(in_dir, basename, yyyy): + """Find correct name of file (extend basename with timestamp).""" + f_name = basename.format(year=yyyy) + regex = re.compile(f_name) + return_files = [] + for files in os.listdir(in_dir): + + if regex.match(files): + return_files.append(os.path.join(in_dir, files)) + + return return_files + + +def _fix_time_coord(cube, _field, _filename): + """Set time points to central day of month.""" + time_coord = cube.coord('time') + new_unit = Unit('days since 1850-01-01 00:00:00', calendar='standard') + time_coord.convert_units(new_unit) + old_time = new_unit.num2date(time_coord.points) + new_time = [d.replace(day=15) for d in old_time] + time_coord.points = new_unit.date2num(new_time) + + +def _prom_dim_coord(cube, _field, _filename): + iris.util.promote_aux_coord_to_dim_coord(cube, 'time') + + +def _create_coord(cubes, var_name, standard_name): + cube = cubes.extract_cube(standard_name) + coord = AuxCoord( + cube.data, + standard_name=standard_name, + long_name=cube.long_name, + var_name=var_name, + units='degrees' + ) + return coord + + +def _extract_variable(raw_var, cmor_info, attrs, filepath, out_dir, latlon): + """Extract variable from all files.""" + var = cmor_info.short_name + cubes = iris.load(filepath, raw_var, _prom_dim_coord) + iris.util.equalise_attributes(cubes) + + cube = cubes.concatenate_cube() + iris.util.promote_aux_coord_to_dim_coord(cube, 'projection_y_coordinate') + iris.util.promote_aux_coord_to_dim_coord(cube, 'projection_x_coordinate') + + cube.add_aux_coord(latlon[0], (1, 2)) + cube.add_aux_coord(latlon[1], (1, 2)) + + # add coord typesi + area_type = AuxCoord([1.0], standard_name='area_type', var_name='type', + long_name='Sea Ice area type') + cube.add_aux_coord(area_type) + + cube.units = '%' + cube.data[cube.data > 100] = np.nan + cube = cube * 100 + + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + # latlon are multidimensional, create bounds + siconc = OceanFixGrid(cmor_info) + cube = siconc.fix_metadata(cubes=[cube])[0] + # time bounds + cube.coord('time').bounds = get_time_bounds(cube.coord('time'), + cmor_info.frequency) + + utils.save_variable(cube, + var, + out_dir, + attrs, + unlimited_dimensions=['time']) + + return cube + + +def _create_areacello(cfg, in_dir, sample_cube, glob_attrs, out_dir): + if not cfg['custom'].get('create_areacello', False): + return + var_info = cfg['cmor_table'].get_variable('Ofx', 'areacello') + glob_attrs['mip'] = 'Ofx' + lat_coord = sample_cube.coord('latitude') + + area_file = os.path.join(in_dir, cfg['custom']['area_file']) + with open(area_file, 'rb') as datfile: + areasdmnd = np.fromfile(datfile, + dtype=np.int32).reshape(lat_coord.shape) + + # Divide by 1000 to get km2 then multiply by 1e6 to m2 ...*1000 + ardata = areasdmnd * 1000 + + cube = iris.cube.Cube(ardata, + standard_name=var_info.standard_name, + long_name=var_info.long_name, + var_name=var_info.short_name, + units='m2', + # time is index 0, add cell index dim + dim_coords_and_dims=[(sample_cube.coords()[1], 0), + (sample_cube.coords()[2], 1)]) + cube.add_aux_coord(lat_coord, (0, 1)) + cube.add_aux_coord(sample_cube.coord('longitude'), (0, 1)) + utils.fix_var_metadata(cube, var_info) + utils.set_global_atts(cube, glob_attrs) + utils.save_variable(cube, var_info.short_name, out_dir, glob_attrs, + zlib=True) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + glob_attrs = cfg['attributes'] + cmor_table = cfg['cmor_table'] + + # get aux nc file + cubesaux = iris.load(os.path.join(in_dir, 'G02202-cdr-ancillary-sh.nc')) + lat_coord = _create_coord(cubesaux, 'lat', 'latitude') + lon_coord = _create_coord(cubesaux, 'lon', 'longitude') + + year = 1978 + # split by year.. + sample_cube = None + for year in range(1979, 2022, 1): + + filepaths = _get_filepaths(in_dir, cfg['filename'], year) + + if len(filepaths) > 0: + logger.info("Year %d: Found %d files in '%s'", + year, len(filepaths), in_dir) + + for (var, var_info) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", var) + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + raw_var = var_info.get('raw', var) + sample_cube = _extract_variable(raw_var, cmor_info, + glob_attrs, filepaths, + out_dir, [lat_coord, + lon_coord]) + + else: + logger.info("No files found year: %d basename: %s", + year, cfg['filename']) + + if sample_cube is not None: + _create_areacello(cfg, in_dir, sample_cube, glob_attrs, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/oceansoda_ethz.py b/esmvaltool/cmorizers/data/formatters/datasets/oceansoda_ethz.py new file mode 100644 index 0000000000..2e8baf2c8f --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/oceansoda_ethz.py @@ -0,0 +1,142 @@ +"""ESMValTool CMORizer for OceanSODA-ETHZ data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://www.ncei.noaa.gov/data/oceans/ncei/ocads/data/0220059/ + +Last access + 20240215 + +Download and processing instructions + Download the file OceanSODA_ETHZ-v2023.OCADS.01_1982-2022.nc + +""" + +import logging +import warnings +from datetime import datetime +from pathlib import Path + +import iris +from dask import array as da +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils +from esmvaltool.diag_scripts.shared.iris_helpers import unify_time_coord + +logger = logging.getLogger(__name__) + + +def _fix_coords(cube, cmor_info): + """Fix coordinates.""" + # Dimensional coordinates + if 'time' in cmor_info.dimensions: + unify_time_coord(cube, 'days since 1950-01-01 00:00:00') + + # Move time points to center of month + time_coord = cube.coord('time') + old_dates = time_coord.units.num2date(time_coord.points) + new_dates = [datetime(t.year, t.month, 15) for t in old_dates] + time_coord.points = time_coord.units.date2num(new_dates) + cube.coord('lat').standard_name = 'latitude' + cube.coord('lon').standard_name = 'longitude' + cube = utils.fix_coords(cube) + + # Scalar coordinates + if cmor_info.short_name in ('fgco2', 'spco2'): + utils.add_scalar_depth_coord(cube) + + return cube + + +def _fix_data(cube, var): + """Fix data.""" + if var == 'areacello': + cube.data = da.ma.masked_equal(cube.core_data(), 0.0) + + +def _fix_var_metadata(var_info, cmor_info, attrs, cube): + """Fix variable metadata.""" + if 'raw_units' in var_info: + cube.units = var_info['raw_units'] + + # fgco2: + # Convert from mol(CO2) to kgC (note that one CO2 molecule contains one C + # atom) and fix wrong sign (the dataset reports sea->air flux, while CMOR + # expects "positive into ocean") + if cmor_info.short_name == 'fgco2': + cube.data = -cube.core_data() * 12.01 # molar mass of C [g/mol] + cube.units *= 'g mol-1' + attrs['positive'] = 'down' + + # co3os, dissicos, talkos: + # The original units of these variables are mumol/kg. To convert to the + # CMOR units mol/m3, we assume a constant sea water density of 1028 kg/m3, + # which is approximately the sea water density for T=4°C, salinity=35PSU, + # and p=0bar according to the UNESCO formula (UNESCO, 1981, Tenth report of + # the joint panel on oceanographic tables and standards, UNESCO Technical + # Papers in Marine Science, see + # https://www.wkcgroup.com/tools-room/seawater-density-calculator/ and + # https://link.springer.com/content/pdf/bbm:978-3-319-18908-6/1.pdf). + if cmor_info.short_name in ('co3os', 'dissicos', 'talkos'): + cube.data = cube.core_data() * 1028.0 + cube.units *= 'kg m-3' + + cube.convert_units(cmor_info.units) + + utils.fix_var_metadata(cube, cmor_info) + + +def _extract_variable(var_info, cmor_info, attrs, filepath, out_dir): + """Extract variable.""" + var = cmor_info.short_name + raw_var = var_info.get('raw_name', var) + + # Load data + with warnings.catch_warnings(): + warnings.filterwarnings( + action='ignore', + message='Ignoring netCDF variable .* invalid units .*', + category=UserWarning, + module='iris', + ) + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + + # Fix data + _fix_data(cube, var) + + # Fix variable metadata + _fix_var_metadata(var_info, cmor_info, attrs, cube) + + # Fix coordinates + cube = _fix_coords(cube, cmor_info) + + # Fix global metadata + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable( + cube, + var, + out_dir, + attrs, + local_keys=['comment', 'positive'], + unlimited_dimensions=['time'], + ) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + # Run the cmorization + for (var, var_info) in cfg['variables'].items(): + filepath = Path(in_dir) / var_info['filename'] + logger.info("CMORizing variable '%s' from file %s", var, filepath) + glob_attrs['comment'] = var_info.get('comment', '') + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + _extract_variable(var_info, cmor_info, glob_attrs, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/osi_450_nh.py b/esmvaltool/cmorizers/data/formatters/datasets/osi_450_nh.py new file mode 100644 index 0000000000..2b15b32969 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/osi_450_nh.py @@ -0,0 +1,30 @@ +"""ESMValTool CMORizer for OSI-SAF data. + +Tier + Tier 2: other freely-available dataset. + +Source + http://osisaf.met.no/p/ice/ + +Last access + 20190502 + +Download and processing instructions + Download the desired years from the following ftp: + ftp://osisaf.met.no/reprocessed/ice/conc/v2p0 + Please, keep folder structure. + + If you want to use only the nh data download only the nh files, + using, e.g., wget -r -A '*_nh*.nc'. + + If you also want to cmorize the sh, download everything and create a link + for OSI-450-sh pointing to the data folder. Both cmorizers will ignore + files belonging to the other hemisphere +""" +from esmvaltool.cmorizers.data.formatters.osi_common import OSICmorizer + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmorizer = OSICmorizer(in_dir, out_dir, cfg, 'nh') + cmorizer.cmorize() diff --git a/esmvaltool/cmorizers/data/formatters/datasets/osi_450_sh.py b/esmvaltool/cmorizers/data/formatters/datasets/osi_450_sh.py new file mode 100644 index 0000000000..b58d1ac6c5 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/osi_450_sh.py @@ -0,0 +1,30 @@ +"""ESMValTool CMORizer for OSI-SAF data. + +Tier + Tier 2: other freely-available dataset. + +Source + http://osisaf.met.no/p/ice/ + +Last access + 20190502 + +Download and processing instructions + Download the desired years from the following ftp: + ftp://osisaf.met.no/reprocessed/ice/conc/v2p0 + Please, keep folder structure. + + If you want to use only the sh data download only the sh files, + using, e.g., wget -r -A '*_sh*.nc'. + + If you also want to cmorize the sh, download everything and create a link + for OSI-450-nh pointing to the data folder. Both cmorizers will ignore + files belonging to the other hemisphere. +""" +from esmvaltool.cmorizers.data.formatters.osi_common import OSICmorizer + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmorizer = OSICmorizer(in_dir, out_dir, cfg, 'sh') + cmorizer.cmorize() diff --git a/esmvaltool/cmorizers/data/formatters/datasets/patmos_x.ncl b/esmvaltool/cmorizers/data/formatters/datasets/patmos_x.ncl new file mode 100644 index 0000000000..2f28593602 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/patmos_x.ncl @@ -0,0 +1,223 @@ +; ############################################################################# +; ESMValTool CMORizer for PATMOS-x data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; https://www.ncdc.noaa.gov/cdr/atmospheric/avhrr-cloud-properties-patmos-x +; +; Last access +; 20190210 +; +; Download and processing instructions +; Click on Download and download all the NOAA data, excluding the +; preliminary, e.g. with: +; wget -r --accept '*NOAA*.nc' --reject '*preliminary*' +; Put all files in input_dir_path (no subdirectories with years). +; Select only complete years for both ascending and descending orbit. +; +; Caveats +; The data are processed by calculating the average of the ascending and the +; descending orbit on each day. Multiple files are available for some days, +; in this case the most recent version (NOAA-vv) is chosen. + +; Modification history +; 20190208-righi_mattia: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "patmos_x.ncl" + + ; Source name + OBSNAME = "PATMOS-x" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = get_year(start_year, 1982) + YEAR2 = get_year(end_year, 2016) + + ; Selected variable (standard name) + VAR = (/"clt"/) + + ; Name in the raw data + NAME = (/"cloud_fraction"/) + + ; MIP + MIP = (/"Amon"/) + + ; Frequency + FREQ = (/"mon"/) + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + "/cmip5/Tables/CMIP5_" + MIP + + ; Type + TYPE = "sat" + + ; Version + VERSION = "NOAA" + + ; Global attributes + SOURCE = "https://www.ncdc.noaa.gov/cdr/atmospheric/avhrr-cloud-" + \ + "properties-patmos-x" + REF = "Heidinger et al., Bull. Amer. Meteor. Soc., " + \ + "doi:10.1175/BAMS-D-12-00246.1, 2013." + COMMENT = "" + +end + +begin + + ; Read coordinates + files = systemfunc("ls " + input_dir_path + "patmosx_*" + YEAR1 + "*.nc") + f = addfile(files(0), "r") + tmp = f->latitude + lat = tmp * tmp@scale_factor + tmp@add_offset + nlat = dimsizes(lat) + delete(tmp) + tmp = f->longitude + lon = tmp * tmp@scale_factor + tmp@add_offset + nlon = dimsizes(lon) + delete(tmp) + delete(files) + delete(f) + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + do yy = YEAR1, YEAR2 + + ; Define output monthly-mean array + output = new((/12, nlat, nlon/), float) + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + output&time = create_timec(yy, yy) + output&lat = lat + output&lon = lon + + do mm = 1, 12 + + ; Number of days + nd = days_in_month(yy, mm) + + ; Define local array + output_temp = new((/nd, nlat, nlon/), float) + + ; Date string for this month + yyyymm = yy + sprinti("%0.2i", mm) + + do dd = 1, nd + + ; Date string for this day + yyyymmdd = yy + sprinti("%0.2i", mm) + sprinti("%0.2i", dd) + + ; Ascending orbit + files_asc = systemfunc("ls " + input_dir_path + \ + "patmosx_v??r??_NOAA-??_asc_d" + \ + yyyymm + "??_c*.nc | grep asc_d" + yyyymmdd) + if (.not.all(ismissing(files_asc))) then + ; Read most recent file + f = addfile(files_asc(dimsizes(files_asc) - 1), "r") + tmp = f->$NAME(vv)$ + xasc = tmp * tmp@scale_factor + tmp@add_offset + delete(tmp) + end if + delete(files_asc) + + ; Descending orbit + files_des = systemfunc("ls " + input_dir_path + \ + "patmosx_v??r??_NOAA-??_des_d" + \ + yyyymm + "??_c*.nc | grep des_d" + yyyymmdd) + if (.not.all(ismissing(files_des))) then + ; Read most recent file + f = addfile(files_des(dimsizes(files_des) - 1), "r") + tmp = f->$NAME(vv)$ + xdes = tmp * tmp@scale_factor + tmp@add_offset + delete(tmp) + end if + delete(files_des) + + ; Skip if no data defined (output_temp will stay missing) + if (.not.isdefined("xasc") .and. .not.isdefined("xdes")) then + continue + end if + + if (.not.isdefined("xasc")) then + output_temp(dd, :, :) = (/xdes/) + delete(xdes) + continue + end if + + if (.not.isdefined("xdes")) then + output_temp(dd, :, :) = (/xasc/) + delete(xasc) + continue + end if + + ; Replace missing values in one orbit with valid values from the + ; other orbit, to avoid propagating missing values while averaging + xasc = where(ismissing(xasc), xdes, xasc) + xdes = where(ismissing(xdes), xasc, xdes) + + output_temp(dd - 1, :, :) = 0.5 * (xasc + xdes) + delete(xasc) + delete(xdes) + + end do ; day + + ; Monthly mean + output(mm - 1, :, :) = dim_avg_n(output_temp, 0) + delete(output_temp) + + end do ; month + + if (VAR(vv).eq."clt") then + output = 100. * output ; [1] --> [%] + end if + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, yy + "0101", yy + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = yy + "01-" + yy + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do ; year + + end do ; variable + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/persiann_cdr.py b/esmvaltool/cmorizers/data/formatters/datasets/persiann_cdr.py new file mode 100644 index 0000000000..1b72aaddb5 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/persiann_cdr.py @@ -0,0 +1,153 @@ +"""ESMValTool CMORizer for PERSIANN-CDR data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://www.ncei.noaa.gov/data/precipitation-persiann/access/ + +Last access + 20200422 + +Download and processing instructions + Files are available free for download on the indicated site. + Files are stored as daily nc-files in individual year + folders. + Please copy all files in a single directory. +""" + +import glob +import logging +import os +import warnings +from pprint import pformat + +import dask.array as da +import iris +import iris.coord_categorisation +import numpy as np + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def fix_coord_dimensions(cube): + """Fix the order of lat and lon.""" + # Swap latitude and longitude coordinates + flipped_data = np.moveaxis(cube.core_data(), 2, 1) + coord_spec = [(cube.coord('time'), 0), (cube.coord('latitude'), 1), + (cube.coord('longitude'), 2)] + new_cube = iris.cube.Cube(flipped_data, dim_coords_and_dims=coord_spec) + new_cube.metadata = cube.metadata + + # Reverse cube along latitude coordinate and fix latitude coordinate + lat_coord = new_cube.coord('latitude') + new_cube = iris.util.reverse(new_cube, lat_coord) + if not lat_coord.is_contiguous(): + lat_coord.bounds = None + lat_coord.guess_bounds() + return new_cube + + +def _get_input_files(in_dir, cfg): + """Get input files.""" + pattern = os.path.join(in_dir, cfg['input_file_pattern']) + input_files = glob.glob(pattern) + logger.debug("Found input files:\n%s", pformat(input_files)) + return input_files + + +def _preprocess_cubes(cubes): + """Remove attributes from cubes that prevent concatenation.""" + new_cubes = iris.cube.CubeList() + for cube in cubes: + cube.attributes.pop('datetime', None) + cube.attributes.pop('date_created', None) + cube.attributes.pop('id', None) + cube.attributes.pop('time_coverage_start', None) + cube.attributes.pop('time_coverage_end', None) + cube.attributes.pop('metadata_link', None) + cube.attributes.pop('source', None) + new_cube = fix_coord_dimensions(cube) + new_cubes.append(new_cube) + return new_cubes + + +def _load_cube(input_files): + """Load single :class:`iris.cube.Cube`.""" + with warnings.catch_warnings(): + warnings.filterwarnings( + 'ignore', + message='Ignoring netCDF variable', + category=UserWarning, + module='iris', + ) + cubes = iris.load(input_files) + cubes = _preprocess_cubes(cubes) + try: + cube = cubes.concatenate_cube() + except iris.exceptions.ConcatenateError: + if cubes[0].coords('time'): + raise + cube = cubes[0] + return cube + + +def _extract_variable(short_name, var, cfg, input_files, out_dir): + """Extract variable.""" + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + + # Extract data + cube = _load_cube(input_files) + cube.var_name = short_name + + # Fix fill values + cube.data = da.ma.masked_equal(cube.core_data(), -9999.0) + + # Convert data from precipitation_amount to precipitation_flux + # divide 'mm' values by the number of seconds in one day + cube.data = cube.core_data() / 86400.0 + + # Fix units + cube.units = 'kg m-2 s-1' + + # Fix coordinates + cube = utils.fix_coords(cube) + + # Fix metadata + attrs = cfg['attributes'] + attrs['mip'] = var['mip'] + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable in daily files + iris.coord_categorisation.add_year(cube, 'time', name='year') + years = list(set(cube.coord('year').points)) + for year in years: + cube_slice = cube.extract(iris.Constraint(year=year)) + utils.save_variable(cube_slice, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + # Save variable in monthly files + iris.coord_categorisation.add_month_number(cube, 'time', name='month') + cube = cube.aggregated_by(['month', 'year'], iris.analysis.MEAN) + attrs['mip'] = "Amon" + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + input_files = _get_input_files(in_dir, cfg) + + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", short_name) + _extract_variable(short_name, var, cfg, input_files, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/phc.py b/esmvaltool/cmorizers/data/formatters/datasets/phc.py new file mode 100644 index 0000000000..84a924d48d --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/phc.py @@ -0,0 +1,125 @@ +"""ESMValTool CMORizer for PHC data. + +Tier + Tier 2: other freely-available dataset. + +Source + http://psc.apl.washington.edu/nonwp_projects/PHC/Data3.html + +Last access + 20190131 + +Go to `DOWNLOAD DATA (NetCDF)` and download the `ANNUAL` fields +for both `TEMPERATURE` and `SALINITY`. +""" +import logging +import os +from collections import OrderedDict + +import iris +import numpy as np +import seawater as sw +import xarray as xr + +from esmvaltool.cmorizers.data.utilities import ( + fix_coords, + fix_var_metadata, + save_variable, + set_global_atts, +) + +logger = logging.getLogger(__name__) + + +def save_fx_variable(cube, var, out_dir, attrs): + """Saver function for fx variable.""" + file_name = '_'.join([ + attrs['project_id'], attrs['dataset_id'], attrs['modeling_realm'], + attrs['version'], attrs['mip'], var + ]) + '.nc' + file_path = os.path.join(out_dir, file_name) + iris.save(cube, file_path, fill_value=1e20) + + +def _fix_fx_areacello(xr_time, var): + """Specific data fix for areacello.""" + cube = xr_time.salt.to_iris() + cube.coord('latitude').guess_bounds() + cube.coord('longitude').guess_bounds() + grid_areas = iris.analysis.cartography.area_weights(cube) + grid_areas_xr = xr.DataArray( + grid_areas[0, 0, :, :], + coords={'lat': xr_time.temp.coords['lat'], + 'lon': xr_time.temp.coords['lon'], }, + dims=['lat', 'lon'], + name=var) + grid_areas_xr.attrs = OrderedDict([('cell_area', 'Ocean Grid-Cell Area'), + ('units', 'm2')]) + cube = grid_areas_xr.to_iris() + return cube.copy() + + +def _fix_data(xr_time, var): + """Specific data fixes for different variables.""" + logger.info("Fixing data ...") + if var == 'thetao': + depth3d = np.zeros(xr_time.temp.shape[1:]) + for i in range(xr_time.depth.shape[0]): + depth3d[i, :, :] = xr_time.depth[i] + ptemp = sw.ptmp(xr_time.salt[0, :], xr_time.temp[0, :], depth3d) + ptemp = np.expand_dims(ptemp, axis=0) + temp_new = xr.DataArray( + ptemp + 273.15, + coords={'time': xr_time.temp.coords['time'], + 'depth': xr_time.temp.coords['depth'], + 'lat': xr_time.temp.coords['lat'], + 'lon': xr_time.temp.coords['lon'], }, + dims=['time', 'depth', 'lat', 'lon']) + + temp_new.attrs = OrderedDict([('standard_name', + 'sea_water_potential_temperature'), + ('units', 'K')]) + cube = temp_new.to_iris() + elif var == 'so': + cube = xr_time.salt.to_iris() + elif var == 'areacello': + cube = _fix_fx_areacello(xr_time, var) + else: + return None + return cube.copy() + + +def extract_variable(var_info, raw_info, out_dir, attrs): + """Extract to all vars.""" + var = var_info.short_name + xr_file = xr.open_dataset(raw_info['file']) + xr_time = xr_file.expand_dims('time') + xr_time = xr_time.assign_coords(time=[1]) + xr_time.time.attrs = OrderedDict([('standard_name', 'time'), + ('units', 'days since 1950-1-1 00:00:00') + ]) + + cube = _fix_data(xr_time, var) + fix_var_metadata(cube, var_info) + cube = fix_coords(cube) + set_global_atts(cube, attrs) + print(out_dir) + if var != "areacello": + save_variable(cube, var, out_dir, attrs, unlimited_dimensions=['time']) + else: + save_fx_variable(cube, var, out_dir, attrs) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + # run the cmorization + for var, vals in cfg['variables'].items(): + inpfile = os.path.join(in_dir, vals['file']) + logger.info("CMORizing var %s from file %s", var, inpfile) + var_info = cmor_table.get_variable(vals['mip'], var) + raw_info = {'name': vals['raw'], 'file': inpfile} + glob_attrs['mip'] = vals['mip'] + extract_variable(var_info, raw_info, out_dir, glob_attrs) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/piomas.py b/esmvaltool/cmorizers/data/formatters/datasets/piomas.py new file mode 100644 index 0000000000..75c3d38150 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/piomas.py @@ -0,0 +1,173 @@ +"""ESMValTool CMORizer for PIOMAS data. + +Tier + Tier 2: other freely-available dataset. + +Source + http://psc.apl.uw.edu/research/projects/arctic-sea-ice-volume-anomaly/data/model_grid + +Last access + 20190510 + +Download and processing instructions + Download and unpack the sithick files from: + https://pscfiles.apl.washington.edu/zhang/PIOMAS/data/v2.1/hiday/ + + And the grid info files from: + https://pscfiles.apl.washington.edu/zhang/PIOMAS/utilities/grid.dat + https://pscfiles.apl.washington.edu/zhang/PIOMAS/utilities/grid.dat.pop + + Other variables provided by PIOMAS are not supported, but extending support + should be achievable for most of them just modifying the config file +""" + +import collections +import glob +import logging +import os + +import iris +import numpy as np +from cf_units import Unit +from iris.coords import AuxCoord, DimCoord + +from esmvaltool.cmorizers.data.utilities import save_variable, set_global_atts + +logger = logging.getLogger(__name__) + +# read in CMOR configuration + +NX = 360 +NY = 120 + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + glob_attrs = cfg['attributes'] + + cmorizer = PIOMAS(cfg, in_dir, out_dir) + logger.info("Starting cmorization for Tier%s OBS files: %s", + glob_attrs['tier'], glob_attrs['dataset_id']) + logger.info("Input data from: %s", in_dir) + logger.info("Output will be written to: %s", out_dir) + cmorizer.prepare_grid_info() + cmorizer.cmorize() + + +Coords = collections.namedtuple('Coords', 'lat lon') + + +class PIOMAS(): + """Cmorizer class for PIOMAS.""" + def __init__(self, cfg, in_dir, out_dir): + self.cfg = cfg + self.in_dir = in_dir + self.out_dir = out_dir + self.scalar_coords = None + self.vector_coords = None + self.areacello = None + + def prepare_grid_info(self): + """Read grid information.""" + grids = np.loadtxt( + os.path.join(self.in_dir, self.cfg['custom']['scalar_file'])) + grids = grids.reshape(2, NY, NX) + self.scalar_coords = self._create_lat_lon_coords( + grids[1, ...], grids[0, ...]) + + grids = np.loadtxt( + os.path.join(self.in_dir, self.cfg['custom']['vector_file'])) + grids = grids.reshape(7, NY, NX) + self.vector_coords = self._create_lat_lon_coords( + grids[1, ...], grids[0, ...]) + + # Area in m2 + self.areacello = grids[2, ...] * grids[3, ...] * 1e6 + + def cmorize(self): + """Cmorize available data.""" + # run the cmorization + for var, vals in self.cfg['variables'].items(): + var_info = self.cfg['cmor_table'].get_variable(vals['mip'], var) + self.cfg['attributes']['mip'] = vals['mip'] + + if vals['type'] == 'scalar': + coords = self.scalar_coords + else: + coords = self.vector_coords + if var == "areacello": + cube = self._create_areacello(coords, var_info) + set_global_atts(cube, self.cfg['attributes']) + save_variable( + cube, + var_info.short_name, + self.out_dir, + self.cfg['attributes'], + ) + else: + self._cmorize_var(var_info, vals, coords) + + def _cmorize_var(self, var_info, vals, coords): + file_expression = os.path.join(self.in_dir, + '{0}.H????'.format(vals['raw'])) + for file_path in glob.glob(file_expression): + cube = PIOMAS._create_cube(PIOMAS._read_binary_file(file_path), + coords, int(file_path[-4:]), var_info, + vals['units']) + set_global_atts(cube, self.cfg['attributes']) + save_variable(cube, var_info.short_name, self.out_dir, + self.cfg['attributes']) + + @staticmethod + def _create_lat_lon_coords(lat, lon): + lon_coord = AuxCoord(lon, + standard_name='longitude', + var_name='lon', + units='degrees_east') + + lat_coord = AuxCoord(lat, + standard_name='latitude', + var_name='lat', + units='degrees_north') + return Coords(lat_coord, lon_coord) + + @staticmethod + def _create_cube(data, coords, year, var_info, raw_units): + time_coord = DimCoord( + np.arange(0, data.shape[0]), + standard_name='time', + var_name='time', + units=Unit(f'days since {year}-01-01', calendar='noleap'), + ) + + cube = iris.cube.Cube( + data, + standard_name=var_info.standard_name, + var_name=var_info.short_name, + units=raw_units, + ) + cube.add_dim_coord(time_coord, 0) + cube.add_aux_coord(coords.lon, (1, 2)) + cube.add_aux_coord(coords.lat, (1, 2)) + return cube + + def _create_areacello(self, coords, var_info): + cube = iris.cube.Cube( + self.areacello, + standard_name=var_info.standard_name, + var_name=var_info.short_name, + units='m2', + ) + cube.add_aux_coord(coords.lon, (0, 1)) + cube.add_aux_coord(coords.lat, (0, 1)) + return cube + + @staticmethod + def _read_binary_file(data_path, vector=False): + fd_data = open(data_path, 'rb') + data = np.fromfile(fd_data, dtype=np.dtype('f'), count=-1) + days = data.shape[0] // NX // NY + data = data.reshape(days, NY, NX) + if vector: + return data[0:days:2, ...], data[1:days:2, ...] + return data diff --git a/esmvaltool/cmorizers/data/formatters/datasets/regen.py b/esmvaltool/cmorizers/data/formatters/datasets/regen.py new file mode 100644 index 0000000000..f38424ae20 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/regen.py @@ -0,0 +1,95 @@ +"""ESMValTool CMORizer for REGEN data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://researchdata.ands.org.au/rainfall-estimates-gridded-v1-2019/1408744 +Last access + 20200226 + +Download and processing instructions + Download the following files: + REGEN_AllStns_{version}_[1950..2016].nc +""" + +import logging +from pathlib import Path + +import cf_units +import iris +from esmvalcore.preprocessor import monthly_statistics +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _extract_variable(short_name, var, cfg, file_path, out_dir): + """Extract variable.""" + raw_var = var.get('raw', short_name) + cube = iris.load_cube(file_path, NameConstraint(var_name=raw_var)) + + # Fix units + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + if 'raw_units' in var: + cube.units = var['raw_units'] + cube.convert_units(cmor_info.units) + + # Fix calendar type + cal_time = var.get('calendar', short_name) + origin_time = cube.coord('time').units.origin + cube.coord('time').units = cf_units.Unit(origin_time, calendar=cal_time) + utils.convert_timeunits(cube, 1950) + + # Fix coordinates + cube = utils.fix_coords(cube) + + # Fix metadata + attrs = cfg['attributes'] + attrs['mip'] = var['mip'] + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + if 'add_mon' in var.keys(): + if var['add_mon']: + logger.info("Building monthly means") + + # Calc monthly + cube = monthly_statistics(cube) + cube.remove_coord('month_number') + cube.remove_coord('year') + + # Fix metadata + attrs['mip'] = 'Amon' + + # Fix coordinates + cube = utils.fix_coords(cube) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + raw_filename = cfg['filename'] + file_names = raw_filename.format(version=cfg['attributes']['version']) + + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", short_name) + for file_path in sorted(Path(in_dir).glob(file_names)): + logger.info("Loading '%s'", file_path) + _extract_variable(short_name, var, cfg, str(file_path), out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/scripps_co2_kum.py b/esmvaltool/cmorizers/data/formatters/datasets/scripps_co2_kum.py new file mode 100644 index 0000000000..6a3ccf6ac0 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/scripps_co2_kum.py @@ -0,0 +1,120 @@ +"""ESMValTool CMORizer for Scripps-CO2-KUM data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://scrippsco2.ucsd.edu/data/atmospheric_co2/kum.html + +Last access + 20200422 + +Download and processing instructions + Download the following file: + monthly_flask_co2_kum.csv +""" + +import logging +import os +from datetime import datetime + +import iris +import numpy as np +import pandas as pd +from cf_units import Unit + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + +LAT_COORD = iris.coords.DimCoord([19.5], + var_name='lat', + standard_name='latitude', + long_name='latitude', + units='degrees') +LON_COORD = iris.coords.DimCoord([205.2], + var_name='lon', + standard_name='longitude', + long_name='longitude', + units='degrees') + + +def _get_time_coord(year, month): + """Get time coordinate.""" + point = datetime(year=year, month=month, day=15) + bound_low = datetime(year=year, month=month, day=1) + if month == 12: + month_bound_up = 1 + year_bound_up = year + 1 + else: + month_bound_up = month + 1 + year_bound_up = year + bound_up = datetime(year=year_bound_up, month=month_bound_up, day=1) + time_units = Unit('days since 1950-01-01 00:00:00', calendar='standard') + time_coord = iris.coords.DimCoord( + time_units.date2num(point), + bounds=time_units.date2num([bound_low, bound_up]), + var_name='time', + standard_name='time', + long_name='time', + units=time_units, + ) + return time_coord + + +def _get_cube(row, column_name): + """Create :class:`iris.cube.Cube` from :class:`pandas.Series`.""" + time_coord = _get_time_coord(int(row['Yr']), int(row['Mn'])) + lat_coord = LAT_COORD.copy() + lon_coord = LON_COORD.copy() + data = np.ma.masked_invalid(row[tuple(column_name)]) + cube = iris.cube.Cube( + data.reshape((1, 1, 1)), + dim_coords_and_dims=[(time_coord, 0), (lat_coord, 1), (lon_coord, 2)], + units='ppm', + ) + return cube + + +def _extract_variable(short_name, var, cfg, filepath, out_dir): + """Extract variable.""" + data_frame = pd.read_csv(filepath, comment='"', header=[0, 1, 2], + sep=r'\s*,\s*', engine='python') + data_frame = data_frame.rename( + columns=lambda x: '' if 'Unnamed' in x else x) + + # Extract cube + cubes = iris.cube.CubeList() + for (_, row) in data_frame.iterrows(): + cube = _get_cube(row, var['column_name']) + cubes.append(cube) + cube = cubes.concatenate_cube() + cube.var_name = short_name + + # Fix metadata + utils.convert_timeunits(cube, 1950) + cube = utils.fix_coords(cube) + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + cube.convert_units(cmor_info.units) + attrs = cfg['attributes'] + attrs['mip'] = var['mip'] + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + filepath = os.path.join(in_dir, cfg['filename']) + logger.info("Reading file '%s'", filepath) + + # Run the cmorization + for (short_name, var) in cfg['variables'].items(): + logger.info("CMORizing variable '%s'", short_name) + _extract_variable(short_name, var, cfg, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/tcom_ch4.py b/esmvaltool/cmorizers/data/formatters/datasets/tcom_ch4.py new file mode 100644 index 0000000000..f204c4bb44 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/tcom_ch4.py @@ -0,0 +1,106 @@ +"""ESMValTool CMORizer for TCOM-CH4 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://zenodo.org/record/7293740 + +Last access + 20230117 + +Download and processing instructions + Download the file zmch4_TCOM_plev_T2Dz_1991_2021.nc + +""" + +import logging +import warnings +from pathlib import Path + +import iris +from iris import NameConstraint + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _fix_var_metadata(var_info, cmor_info, cube): + """Fix variable metadata.""" + if 'raw_units' in var_info: + cube.units = var_info['raw_units'] + + cube.convert_units(cmor_info.units) + + utils.fix_var_metadata(cube, cmor_info) + + +def _fix_coords(cube): + """Fix coordinates.""" + utils.fix_dim_coordnames(cube) + + # Time + cube.coord('time').guess_bounds() + + # Pressure levels + cube.coord(axis='Z').standard_name = 'air_pressure' + cube.coord(axis='Z').long_name = 'pressure' + cube.coord(axis='Z').convert_units('Pa') + + # Latitude + utils.flip_dim_coord(cube, 'latitude') + cube.coord('latitude').guess_bounds() + + # Longitude + cube.coord('longitude').points = [180.0] + cube.coord('longitude').bounds = [[0.0, 360]] + + +def _extract_variable(var_info, cmor_info, attrs, filepath, out_dir): + """Extract variable.""" + var = cmor_info.short_name + raw_var = var_info.get('raw_name', var) + + # Load data + with warnings.catch_warnings(): + warnings.filterwarnings( + action='ignore', + message="Skipping global attribute 'units': 'units' is not a " + "permitted attribute", + category=UserWarning, + module='iris', + ) + cube = iris.load_cube(filepath, NameConstraint(var_name=raw_var)) + + # Fix variable metadata + _fix_var_metadata(var_info, cmor_info, cube) + + # Fix coordinates + _fix_coords(cube) + + # Fix global metadata + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable( + cube, + var, + out_dir, + attrs, + unlimited_dimensions=['time'], + ) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + # Run the cmorization + for (var, var_info) in cfg['variables'].items(): + filepath = Path(in_dir) / var_info['filename'] + logger.info("CMORizing variable '%s' from file %s", var, filepath) + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + _extract_variable(var_info, cmor_info, glob_attrs, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/tcom_n2o.py b/esmvaltool/cmorizers/data/formatters/datasets/tcom_n2o.py new file mode 100644 index 0000000000..80d7c2ffb3 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/tcom_n2o.py @@ -0,0 +1,17 @@ +"""ESMValTool CMORizer for TCOM-N2O data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://zenodo.org/record/7386001 + +Last access + 20230117 + +Download and processing instructions + Download the file zmn2o_TCOM_plev_T2Dz_1991_2021.nc + +""" + +from .tcom_ch4 import cmorization # noqa diff --git a/esmvaltool/cmorizers/data/formatters/datasets/uwisc.ncl b/esmvaltool/cmorizers/data/formatters/datasets/uwisc.ncl new file mode 100644 index 0000000000..2145c5170e --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/uwisc.ncl @@ -0,0 +1,126 @@ +; ############################################################################# +; ESMValTool CMORizer for UWisc data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; Data provided by Ralf Bennartz. +; +; Last access +; 20150415 +; +; Download and processing instructions +; Contact Ralf Bennartz (Earth and Environmental Sciences, Vanderbilt +; University, USA). +; +; Modification history +; 20190208-righi_mattia: adapted to v2. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") +begin + + ; Script name (for logger) + DIAG_SCRIPT = "UWISC.ncl" + + ; Source name + OBSNAME = "UWisc" + + ; Tier + TIER = 3 + + ; Period + YEAR1 = get_year(start_year, 1988) + YEAR2 = get_year(end_year, 2007) + + ; Selected variable (standard name) + VAR = (/"lwp", "lwpStderr"/) + + ; Name in the raw data + NAME = (/"LWP", "LWP_ERROR"/) + + ; Conversion factor + CONVERSION = (/1.e-3, 1.e-3/) + + ; MIP + MIP = (/"Amon", "Amon"/) + + ; Frequency + FREQ = (/"mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + "/custom/CMOR_" + VAR + ".dat" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "v2" + + ; Global attributes + SOURCE = "Data provided by Ralf Bennartz (Vanderbilt University, USA)" + REF = "O'Dell et al., J. Clim., doi:10.1175/2007JCLI1958.1, 2008" + COMMENT = "" + +end + +begin + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + do yr = YEAR1, YEAR2 + + fname = input_dir_path + "UWisc_LWPMEAN_" + yr + "_v2.nc" + + f = addfile(fname, "r") + output = (/f->$NAME(vv)$/) + + lat = (/f->lat/) + lon = (/f->lon/) + + ; Convert units + output = output * CONVERSION(vv) ; for clivi this will be equal 0 + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + output&time = create_timec(yr, yr) + output&lat = lat + output&lon = lon + format_coords(output, yr + "0101", yr + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = yr + "01-" + yr + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + + end do + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/wfde5.py b/esmvaltool/cmorizers/data/formatters/datasets/wfde5.py new file mode 100644 index 0000000000..0cc467e161 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/wfde5.py @@ -0,0 +1,135 @@ +"""ESMValTool CMORizer for WFDE5 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://doi.org/10.24381/cds.20d54e34 + +Last access + 20210416 + +Download and processing instructions + Download the following variables from the cds: + Near-surface air temperature ("CRU") + Rainfall flux ("CRU" as well as "CRU and GPCC") + Snowfall flux ("CRU" as well as "CRU and GPCC") + unzip the downloaded files + rename to follow syntax '{raw_name}_WFDE5_{reference}_*_v1.0.nc' +""" + +import copy +import logging +import re +from pathlib import Path + +import iris +from cf_units import Unit +from esmvalcore.preprocessor import daily_statistics, monthly_statistics + +from ... import utilities as utils + +logger = logging.getLogger(__name__) + + +def _fix_time_coord(cube, var): + """Correct wrong time points.""" + # Fix units + cube.coord('time').units = Unit(cube.coord('time').units.origin, + calendar=var['calendar']) + cube.coord('time').convert_units( + Unit('days since 1950-1-1 00:00:00', calendar='gregorian')) + + # time points are XX:00:00, should be XX:30:00 + time = cube.coord('time') + time.points = time.points + 1 / 48 + + +def _extract_variable(var, cfg, filenames, out_dir): + """Extract variable.""" + short_name = var['short_name'] + version = cfg['attributes']['version'] + '-' + var['reference'] + + cubes = iris.load(filenames) + cube = cubes.concatenate_cube() + if short_name == 'pr': + # Rainf add Snowf + snow_filenames = [ + fname.replace(var['raw_name'], var['raw_name_snow']) + for fname in filenames + ] + scubes = iris.load(snow_filenames) + scube = scubes.concatenate_cube() + cube.data = scube.core_data() + cube.core_data() + + # Fix units + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + if 'raw_units' in var: + cube.units = var['raw_units'] + cube.convert_units(cmor_info.units) + + # Fix time coord + _fix_time_coord(cube, var) + + # Fix coordinates + cube = utils.fix_coords(cube) + if 'height2m' in cmor_info.dimensions: + utils.add_height2m(cube) + + # Fix metadata + attrs = copy.deepcopy(cfg['attributes']) + attrs['mip'] = var['mip'] + attrs['version'] = version + utils.fix_var_metadata(cube, cmor_info) + utils.set_global_atts(cube, attrs) + + logger.info("Building daily means") + # Calc daily + cube = daily_statistics(cube) + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + if var['add_mon']: + logger.info("Building monthly means") + + # Calc monthly + cube = monthly_statistics(cube) + cube.remove_coord('month_number') + cube.remove_coord('year') + + # Fix metadata + attrs['mip'] = 'Amon' + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + # Run the cmorization + for var in cfg['variables'].values(): + logger.info("CMORizing variable '%s'", var['short_name']) + file_names = cfg['filename'].format(**var) + + sorted_filenames = {} + # Sort files according to year + pattern = re.compile(r'.*_(\d{4})\d{2}_.*\.nc') + for filename in sorted(Path(in_dir).glob(file_names)): + filename = str(filename) + year = int(pattern.match(filename)[1]) + sorted_filenames.setdefault(year, []) + sorted_filenames[year].append(filename) + + # Run CMORization for each year + for (year, filenames) in sorted_filenames.items(): + logger.info("Processing year %i", year) + _extract_variable(var, cfg, filenames, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/woa.py b/esmvaltool/cmorizers/data/formatters/datasets/woa.py new file mode 100644 index 0000000000..35db6d810d --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/woa.py @@ -0,0 +1,148 @@ +"""ESMValTool CMORizer for WOA data. + +Tier + Tier 2: other freely-available dataset. + +Source + WOA18: https://www.ncei.noaa.gov/data/oceans/woa/WOA18/DATA + WOA13: https://www.ncei.noaa.gov/data/oceans/woa/WOA13/DATAv2 + +Last access + WOA18: 20210311 + +Download and processing instructions + All handled by the script (download only if local data are missing) + + Alternatively, download the following files: + temperature/netcdf/decav81B0/1.00/woa18_decav81B0_t00_01.nc + salinity/netcdf/decav81B0/1.00/woa18_decav81B0_s00_01.nc + oxygen/netcdf/all/1.00/woa18_all_o00_01.nc + nitrate/netcdf/all/1.00/woa18_all_n00_01.nc + phosphate/netcdf/all/1.00/woa18_all_p00_01.nc + silicate/netcdf/all/1.00/woa18_all_i00_01.nc + (To get WOA13, replace filenames prefix woa18 with woa13) + + +Modification history + 20210311-lovato_tomas: handle WOA18/WOA13, raw data download, use OBS6 + 20200911-bock_lisa: extend to WOA18 + 20190328-lovato_tomas: cmorizer revision + 20190131-predoi_valeriu: adapted to v2. + 20190131-demora_lee: written. +""" + +import logging +import os +from warnings import catch_warnings, filterwarnings + +import iris +from cf_units import Unit + +from esmvaltool.cmorizers.data.utilities import ( + constant_metadata, + fix_coords, + fix_var_metadata, + save_variable, + set_global_atts, +) + +logger = logging.getLogger(__name__) + + +def _fix_data(cube, var, version): + """Specific data fixes for different variables.""" + logger.info("Fixing data ...") + + if version == '2018': + with constant_metadata(cube): + if var in ['o2', 'po4', 'si', 'no3']: + cube /= 1000. # Convert from umol/kg to mol/m^3 + + if version == '2013v2': + with constant_metadata(cube): + mll_to_mol = ['po4', 'si', 'no3'] + if var in mll_to_mol: + cube /= 1000. # Convert from ml/l to mol/m^3 + elif var == 'thetao': + cube += 273.15 # Convert to Kelvin + elif var == 'o2': + cube *= 44.661 / 1000. # Convert from ml/l to mol/m^3 + + return cube + + +def collect_files(in_dir, var, cfg): + """Compose input file list and download if missing.""" + file_list = [] + var_dict = cfg['variables'][var] + in_dir = os.path.join(in_dir, var_dict['name']) + + fname = cfg['attributes']['short_name'].lower( + ) + '_' + var_dict['file'] + '00_01.nc' + in_file = os.path.join(in_dir, fname) + file_list.append(in_file) + + return file_list + + +def extract_variable(in_files, out_dir, attrs, raw_info, cmor_table): + """Extract variables and create OBS dataset.""" + var = raw_info['var'] + var_info = cmor_table.get_variable(raw_info['mip'], var) + rawvar = raw_info['raw_var'] + with catch_warnings(): + filterwarnings( + action='ignore', + message='Ignoring netCDF variable .* invalid units .*', + category=UserWarning, + module='iris', + ) + cubes = iris.load(in_files, rawvar) + iris.util.equalise_attributes(cubes) + cube = cubes.concatenate_cube() + + # set reference time + year = raw_info['reference_year'] + cube.coord('time').climatological = False + cube.coord('time').points = 6.5 + cube.coord('time').units = Unit('months since ' + str(year) + + '-01-01 00:00:00', + calendar='gregorian') + + fix_var_metadata(cube, var_info) + cube = fix_coords(cube) + _fix_data(cube, var, attrs['version']) + set_global_atts(cube, attrs) + save_variable(cube, var, out_dir, attrs, unlimited_dimensions=['time']) + + # derive ocean surface + if 'srf_var' in raw_info: + var_info = cmor_table.get_variable(raw_info['mip'], + raw_info['srf_var']) + logger.info("Extract surface OBS for %s", raw_info['srf_var']) + level_constraint = iris.Constraint(cube.var_name, depth=0) + cube_os = cube.extract(level_constraint) + fix_var_metadata(cube_os, var_info) + save_variable(cube_os, + raw_info['srf_var'], + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + # run the cmorization + for var, vals in cfg['variables'].items(): + in_files = collect_files(in_dir, var, cfg) + logger.info("CMORizing var %s from input set %s", var, vals['name']) + raw_info = cfg['variables'][var] + raw_info.update({ + 'var': var, + 'reference_year': cfg['custom']['reference_year'], + }) + glob_attrs['mip'] = vals['mip'] + extract_variable(in_files, out_dir, glob_attrs, raw_info, cmor_table) diff --git a/esmvaltool/utils/cmorizers/obs/interface.ncl b/esmvaltool/cmorizers/data/formatters/interface.ncl similarity index 82% rename from esmvaltool/utils/cmorizers/obs/interface.ncl rename to esmvaltool/cmorizers/data/formatters/interface.ncl index 16bcabb790..148114f546 100644 --- a/esmvaltool/utils/cmorizers/obs/interface.ncl +++ b/esmvaltool/cmorizers/data/formatters/interface.ncl @@ -11,10 +11,12 @@ loadscript("$settings") ; Load logging functions -loadscript(getenv("esmvaltool_root") + "/interface_scripts/logging.ncl") +loadscript(getenv("esmvaltool_root") + \ + "/../interface_scripts/logging.ncl") ; Load utility functions -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/utilities.ncl") +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/utilities.ncl") ; Check trailing slash if (str_get_cols(input_dir_path, -1, -1).ne."/") then diff --git a/esmvaltool/cmorizers/data/formatters/nsidc_common.py b/esmvaltool/cmorizers/data/formatters/nsidc_common.py new file mode 100644 index 0000000000..ac4228a569 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/nsidc_common.py @@ -0,0 +1,92 @@ +"""Common tools to CMORize NSIDC-0116 northern and sothern data.""" + +import glob +import logging +import os + +import iris +import numpy as np +from iris.coords import AuxCoord +from iris.cube import Cube + +from esmvaltool.cmorizers.data.utilities import ( + fix_var_metadata, + save_variable, + set_global_atts, +) + +logger = logging.getLogger(__name__) + + +def cmorize(cfg, region, in_dir, out_dir): + """Cmorize NSIDC-0116 dataset.""" + glob_attrs = cfg['attributes'] + + logger.info("Starting cmorization for Tier%s OBS files: %s", + glob_attrs['tier'], glob_attrs['dataset_id']) + logger.info("Input data from: %s", in_dir) + logger.info("Output will be written to: %s", out_dir) + + file_expr = os.path.join(in_dir, f'icemotion_daily_{region}_*.nc') + for filepath in glob.glob(file_expr): + logger.info('Cmorizing file %s', filepath) + cubes = iris.load(filepath) + logger.debug(cubes) + lat_coord = _create_coord(cubes, 'lat', 'latitude') + lon_coord = _create_coord(cubes, 'lon', 'longitude') + lon_coord.points[lon_coord.points < 0] += 360 + + for var, vals in cfg['variables'].items(): + var_info = cfg['cmor_table'].get_variable(vals['mip'], var) + logger.info('Cmorizing var %s', var) + cube = cubes.extract_cube(iris.Constraint(vals['raw'])) + cube.add_aux_coord(lat_coord, (1, 2)) + cube.add_aux_coord(lon_coord, (1, 2)) + cube.convert_units(var_info.units) + logger.debug(cube) + glob_attrs['mip'] = vals['mip'] + fix_var_metadata(cube, var_info) + set_global_atts(cube, glob_attrs) + zlib = vals.get('compress', False) + if zlib: + # Realize data to speed-up writing + # pylint: disable=pointless-statement + cube.data + save_variable(cube, var, out_dir, glob_attrs, zlib=zlib) + cubes.remove(cube) + + _create_areacello(cfg, cube, glob_attrs, out_dir) + + +def _create_areacello(cfg, sample_cube, glob_attrs, out_dir): + if not cfg['custom'].get('create_areacello', False): + return + var_info = cfg['cmor_table'].get_variable('fx', 'areacello') + glob_attrs['mip'] = 'fx' + lat_coord = sample_cube.coord('latitude') + cube = Cube( + np.full(lat_coord.shape, cfg['custom']['grid_cell_size'], np.float32), + standard_name=var_info.standard_name, + long_name=var_info.long_name, + var_name=var_info.short_name, + units='m2', + ) + cube.add_aux_coord(lat_coord, (0, 1)) + cube.add_aux_coord(sample_cube.coord('longitude'), (0, 1)) + cube.add_dim_coord(sample_cube.coord('projection_y_coordinate'), 0) + cube.add_dim_coord(sample_cube.coord('projection_x_coordinate'), 1) + fix_var_metadata(cube, var_info) + set_global_atts(cube, glob_attrs) + save_variable(cube, var_info.short_name, out_dir, glob_attrs, zlib=True) + + +def _create_coord(cubes, var_name, standard_name): + cube = cubes.extract_cube(standard_name) + coord = AuxCoord( + cube.data, + standard_name=standard_name, + long_name=cube.long_name, + var_name=var_name, + units=cube.units, + ) + return coord diff --git a/esmvaltool/cmorizers/data/formatters/osi_common.py b/esmvaltool/cmorizers/data/formatters/osi_common.py new file mode 100644 index 0000000000..3c604662aa --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/osi_common.py @@ -0,0 +1,259 @@ +"""Common functionalities for OSI-450 dataset cmorization.""" + +import glob +import logging +import os +from calendar import isleap, monthrange +from datetime import datetime, timedelta + +import iris +import iris.exceptions +import numpy as np +from esmvalcore.preprocessor import monthly_statistics +from iris.coord_categorisation import add_day_of_year +from iris.coords import AuxCoord +from iris.cube import Cube, CubeList + +from esmvaltool.cmorizers.data.utilities import ( + convert_timeunits, + fix_var_metadata, + save_variable, + set_global_atts, +) + +logger = logging.getLogger(__name__) + + +class OSICmorizer(): + """Cmorizer for OSI-450 datasets.""" + def __init__(self, in_dir, out_dir, cfg, hemisphere): + self.in_dir = in_dir + self.out_dir = out_dir + self.cfg = cfg + self.hemisphere = hemisphere + self.min_days = self.cfg['custom'].get('min_days', 50) + + def cmorize(self): + """Cmorize OSI-450 or OSI-409 dataset.""" + logger.info("Starting cmorization for Tier%s OBS files: %s", + self.cfg['attributes']['tier'], + self.cfg['attributes']['dataset_id']) + logger.info("Input data from: %s", self.in_dir) + logger.info("Output will be written to: %s", self.out_dir) + + # run the cmorization + first_run = True + for var, vals in self.cfg['variables'].items(): + var_info = {} + for mip in vals['mip']: + var_info[mip] = self.cfg['cmor_table'].get_variable(mip, var) + file_pattern = ( + f"{vals['raw']}_{self.hemisphere}_{vals['grid']}_*.nc") + for year in os.listdir(self.in_dir): + try: + year = int(year) + except ValueError: + continue + logger.info("CMORizing var %s for year %s", var, year) + raw_info = { + 'name': + vals['raw'], + 'file': + os.path.join(self.in_dir, str(year), '??', file_pattern) + } + self._extract_variable(var_info, raw_info, year, vals['mip']) + if first_run: + sample_file = glob.glob( + os.path.join(self.in_dir, str(year), '01', + file_pattern))[0] + cube = iris.load_cube( + sample_file, + iris.Constraint( + # pylint: disable=cell-var-from-loop + cube_func=lambda c: c.var_name == raw_info['name']) + ) + self._create_areacello(cube) + first_run = False + + def _extract_variable(self, var_infos, raw_info, year, mips): + """Extract to all vars.""" + cubes = iris.load( + raw_info['file'], + iris.Constraint( + cube_func=lambda c: c.var_name == raw_info['name'])) + tracking_ids = self._unify_attributes(cubes) + cube = cubes.concatenate_cube() + for ancilliary_var in list(cube.ancillary_variables()): + cube.remove_ancillary_variable(ancilliary_var) + del cubes + if tracking_ids: + cube.attributes['tracking_ids'] = tracking_ids + cube.coord('projection_x_coordinate').var_name = 'x' + cube.coord('projection_y_coordinate').var_name = 'y' + lon_coord = cube.coord('longitude') + lon_coord.points[lon_coord.points < 0] += 360 + source_cube = cube + attrs = self.cfg['attributes'] + for mip in mips: + var_info = var_infos[mip] + attrs['mip'] = mip + if var_info.frequency == 'mon': + cube = monthly_statistics(source_cube) + cube = self._fill_months(cube) + elif var_info.frequency == 'day': + cube = self._fill_days(source_cube, year) + if not cube: + continue + logger.debug(cube) + fix_var_metadata(cube, var_info) + convert_timeunits(cube, year) + set_global_atts(cube, attrs) + self._try_remove_coord(cube, 'year') + self._try_remove_coord(cube, 'day_of_year') + self._try_remove_coord(cube, 'month_number') + self._try_remove_coord(cube, 'day_of_month') + save_variable(cube, var_info.short_name, self.out_dir, attrs) + return cube + + @staticmethod + def _try_remove_coord(cube, coord): + try: + cube.remove_coord(coord) + except iris.exceptions.CoordinateNotFoundError: + pass + + @staticmethod + def _fill_months(cube): + if cube.coord('time').shape[0] == 12: + return cube + cubes = CubeList(cube.slices_over('time')) + model_cube = cubes[0].copy() + for month in range(1, 13): + month_constraint = iris.Constraint( + # pylint: disable=cell-var-from-loop + time=lambda cell: cell.point.month == month) + if cubes.extract(month_constraint): + continue + cubes.append( + OSICmorizer._create_nan_cube(model_cube, month, month=True)) + cube = cubes.merge_cube() + return cube + + def _fill_days(self, cube, year): + if cube.coord('time').shape[0] < self.min_days: + logger.warning( + 'Only %s days available. Skip generation of daily files', + cube.coord('time').shape[0]) + return None + total_days = 366 if isleap(year) else 365 + if cube.coord('time').shape[0] < total_days: + cubes = OSICmorizer._add_nan_timesteps(cube, total_days) + cube = cubes.merge_cube() + cube.remove_coord('day_of_year') + del cubes + return cube + + @staticmethod + def _add_nan_timesteps(cube, total_days): + add_day_of_year(cube, 'time') + cubes = CubeList(cube.slices_over('time')) + model_cube = cubes[0].copy() + model_cube.remove_coord('day_of_year') + for day_of_year in range(total_days): + day_constraint = iris.Constraint(day_of_year=day_of_year + 1) + if cubes.extract(day_constraint): + continue + nan_cube = OSICmorizer._create_nan_cube(model_cube, + day_of_year, + month=False) + add_day_of_year(nan_cube, 'time') + cubes.append(nan_cube) + del model_cube + return cubes + + @staticmethod + def _create_nan_cube(model_cube, num, month): + nan_cube = model_cube.copy( + np.ma.masked_all(model_cube.shape, dtype=model_cube.dtype)) + time_coord = nan_cube.coord('time') + nan_cube.remove_coord(time_coord) + date = time_coord.cell(0).point + if month: + date = datetime(date.year, num, date.day) + bounds = (datetime(date.year, num, 1), + datetime(date.year, num, + monthrange(date.year, num)[1])) + else: + date = datetime(date.year, 1, 1, 12) + timedelta(days=num) + bounds = (datetime(date.year, 1, 1) + timedelta(days=num), + datetime(date.year, 1, 1, 23, 59) + timedelta(days=num)) + + date = time_coord.units.date2num(date) + bounds = ( + time_coord.units.date2num(bounds[0]), + time_coord.units.date2num(bounds[1]), + ) + nan_cube.add_aux_coord( + AuxCoord( + [date], + standard_name=time_coord.standard_name, + var_name=time_coord.var_name, + long_name=time_coord.long_name, + units=time_coord.units, + attributes=time_coord.attributes, + bounds=[bounds], + )) + return nan_cube + + @staticmethod + def _unify_attributes(cubes): + tracking_ids = [] + for cube in cubes: + # OSI-409 and OSI-450 do not have the same attributes + try: + tracking_ids.append(cube.attributes['tracking_id']) + except KeyError: + pass + + to_remove = [ + 'time_coverage_start', + 'time_coverage_end', + 'history', + 'tracking_id', + 'start_date', + 'stop_date', + ] + for attr in to_remove: + try: + del cube.attributes[attr] + except KeyError: + pass + return tracking_ids + + def _create_areacello(self, sample_cube): + if not self.cfg['custom'].get('create_areacello', False): + return + var_info = self.cfg['cmor_table'].get_variable('fx', 'areacello') + lat_coord = sample_cube.coord('latitude') + self.cfg['attributes']['mip'] = 'fx' + cube = Cube( + np.full(lat_coord.shape, self.cfg['custom']['grid_cell_size'], + np.float32), + standard_name=var_info.standard_name, + long_name=var_info.long_name, + var_name=var_info.short_name, + units='m2', + ) + cube.add_aux_coord(lat_coord, (0, 1)) + cube.add_aux_coord(sample_cube.coord('longitude'), (0, 1)) + cube.add_dim_coord(sample_cube.coord('projection_y_coordinate'), 0) + cube.add_dim_coord(sample_cube.coord('projection_x_coordinate'), 1) + cube.coord('projection_x_coordinate').var_name = 'x' + cube.coord('projection_y_coordinate').var_name = 'y' + fix_var_metadata(cube, var_info) + set_global_atts(cube, self.cfg['attributes']) + save_variable(cube, + var_info.short_name, + self.out_dir, + self.cfg['attributes'], + zlib=True) diff --git a/esmvaltool/utils/cmorizers/obs/utilities.ncl b/esmvaltool/cmorizers/data/formatters/utilities.ncl similarity index 84% rename from esmvaltool/utils/cmorizers/obs/utilities.ncl rename to esmvaltool/cmorizers/data/formatters/utilities.ncl index 09690b96ac..4c8cf6444e 100644 --- a/esmvaltool/utils/cmorizers/obs/utilities.ncl +++ b/esmvaltool/cmorizers/data/formatters/utilities.ncl @@ -8,6 +8,7 @@ ; procedure format_time ; procedure format_plev ; procedure format_lev +; procedure format_alt40 ; procedure format_lat ; procedure format_lon ; procedure format_coords @@ -15,6 +16,7 @@ ; function format_variable ; function guess_bounds_time ; function guess_bounds_lev +; function guess_bounds_alt40 ; function guess_bounds_lat ; function guess_bounds_lon ; function guess_coord_bounds @@ -32,6 +34,26 @@ TUNITS = "days since 1950-01-01 00:00:00" ; CMOR FillValue FILL = 1.e+20 +undef("get_year") +function get_year(year:integer, default:integer) +; +; Arguments +; year: year recived for cmorizer +; default: +; +; Return value +; An integer with year or default if year is 0 +; +; Description +; Get passed year an default date if not provided +begin + if year.eq.0 then + return(default) + else + return(year) + end if +end + ; ############################################################################# undef("create_timec") function create_timec(y1:integer, @@ -48,13 +70,13 @@ function create_timec(y1:integer, ; Create a monthly time coordinate for the given time range. ; ; Modification history -; 20140124-A_righ_ma: written. +; 20140124-righi_mattia: written. ; local funcname, scriptname, yy, mm, out begin funcname = "create_timec" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" out = new(12 * (y2 - y1 + 1), double) do yy = y1, y2 @@ -90,14 +112,15 @@ procedure format_time(var:numeric, ; References ; ; Modification history -; 20190216-A_righ_ma: written. +; 20200518-righi_mattia: improve timesteps consistency check. +; 20190216-righi_mattia: written. ; local funcname, scriptname, ctime, ntime, year1, month1, day1, year2, month2, \ day2, calendar, date, exp_ntime, yy, mm, m1, m2, dd, opt, newtime begin funcname = "format_time" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" ; Check supported frequency if (all(frequency.ne.(/"3hr", "6hr", "day", "mon", "yr"/))) then @@ -156,8 +179,8 @@ begin exp_ntime = year2 - year1 + 1 if (ntime.ne.exp_ntime) then error_msg("f", scriptname, funcname, \ - "incorrect number of timesteps in input data: " + exp_ntime + \ - " expected, " + ntime + " found") + "incorrect number of timesteps in input data: " + \ + exp_ntime + " expected, " + ntime + " found") end if ; Reset date (1st of July) @@ -174,26 +197,23 @@ begin if (frequency.eq."mon") then ; Check size - if (year1.eq.year2) then - exp_ntime = month2 - month1 + 1 - else - exp_ntime = (12 - month1 + 1) + month2 + \ - 12 * where((year2 - year1 - 1).gt.0, year2 - year1 - 1, 0) - end if + exp_ntime = 12 * (year2 - year1 + 1) + exp_ntime = exp_ntime - (month1 - 1) + exp_ntime = exp_ntime - (12 - month2) if (ntime.ne.exp_ntime) then error_msg("f", scriptname, funcname, \ - "incorrect number of timesteps in input data: " + exp_ntime + \ - "expected, " + ntime + " found") + "incorrect number of timesteps in input data: " + \ + exp_ntime + " expected, " + ntime + " found") end if ; Reset date (middle of the month) tt = 0 do yy = year1, year2 - date(tt, 0) = yy yy@calendar = calendar m1 = where(yy.eq.year1, month1, 1) m2 = where(yy.eq.year2, month2, 12) do mm = m1, m2 + date(tt, 0) = yy date(tt, 1) = mm dm = days_in_month(yy, mm) / 2. + 1 date(tt, 2) = toint(dm) @@ -217,20 +237,22 @@ begin cd_inv_calendar(year1, month1, day1, zero, zero, zero, TUNITS, opt) + 1 if (ntime.ne.exp_ntime) then error_msg("f", scriptname, funcname, \ - "incorrect number of timesteps in input data: " + exp_ntime + \ - "expected, " + ntime + " found") + "incorrect number of timesteps in input data: " + \ + exp_ntime + " expected, " + ntime + " found") end if delete(opt) ; Reset date (middle of the day) tt = 0 do yy = year1, year2 - date(tt, 0) = yy m1 = where(yy.eq.year1, month1, 1) m2 = where(yy.eq.year2, month2, 12) do mm = m1, m2 - date(tt, 1) = mm - do dd = day1, days_in_month(yy, mm) + d1 = where(yy.eq.year1 .and. mm.eq.m1, day1, 1) + d2 = where(yy.eq.year2 .and. mm.eq.m2, day2, days_in_month(yy, mm)) + do dd = d1, d2 + date(tt, 0) = yy + date(tt, 1) = mm date(tt, 2) = dd tt = tt + 1 end do @@ -253,8 +275,8 @@ begin exp_ntime = 4 * exp_ntime if (ntime.ne.exp_ntime) then error_msg("f", scriptname, funcname, \ - "incorrect number of timesteps in input data: " + exp_ntime + \ - "expected, " + ntime + " found") + "incorrect number of timesteps in input data: " + \ + exp_ntime + " expected, " + ntime + " found") end if delete(opt) @@ -272,8 +294,8 @@ begin exp_ntime = 8 * exp_ntime if (ntime.ne.exp_ntime) then error_msg("f", scriptname, funcname, \ - "incorrect number of timesteps in input data: " + exp_ntime + \ - "expected, " + ntime + " found") + "incorrect number of timesteps in input data: " + \ + exp_ntime + " expected, " + ntime + " found") end if delete(opt) @@ -324,13 +346,13 @@ procedure format_plev(var:numeric) ; References ; ; Modification history -; 20190216-A_righ_ma: written. +; 20190216-righi_mattia: written. ; local funcname, scriptname, rank, cplev, newplev begin funcname = "format_plev" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" ; Set rank rank = dimsizes(dimsizes(var)) @@ -386,13 +408,13 @@ procedure format_lev(var:numeric) ; References ; ; Modification history -; 20190216-A_righ_ma: written. +; 20190216-righi_mattia: written. ; local funcname, scriptname, rank, clev, newlev begin funcname = "format_lev" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" ; Set rank rank = dimsizes(dimsizes(var)) @@ -433,6 +455,69 @@ begin end +; ############################################################################# +undef("format_alt40") +procedure format_alt40(var:numeric) +; +; Arguments +; var: input variable +; +; Description +; Check the monotonicity of the alt40 (altitude) coordinate and set the +; standard CMOR attributes. +; +; Caveats +; +; References +; +; Modification history +; 20200204-lauer_axel: written. +; +local funcname, scriptname, rank, clev, newlev +begin + + funcname = "format_alt40" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" + + ; Set rank + rank = dimsizes(dimsizes(var)) + + ; Check monotonicity + if (isMonotonic(var&alt40) .eq. 0) then + error_msg("f", scriptname, funcname, "non-monotonic vertical coordinate") + end if + if (isMonotonic(var&alt40).eq.-1) then ; must be monotonically increasing + if (rank.eq.4) then + var = var(:, ::-1, :, :) + elseif (rank.eq.3) then + var = var(:, ::-1, :) + elseif (rank.eq.2) then + var = var(:, ::-1) + end if + end if + + ; Read coordinate + calt = var&alt40 + + ; Set standard attributes + newalt = todouble(calt) ; this also removes attributes + copy_VarCoords(calt, newalt) + newalt@bounds = "alt40_bnds" + newalt@positive = "up" + newalt@long_name = "altitude" + newalt@axis = "Z" + newalt@units = "m" + newalt@standard_name = "altitude" + if (isatt(newalt, "_FillValue")) then + delete(newalt@_FillValue) + end if + + ; Reset lev coordinate + delete(var&alt40) + var&alt40 = newalt + +end + ; ############################################################################# undef("format_lat") procedure format_lat(var:numeric) @@ -449,21 +534,22 @@ procedure format_lat(var:numeric) ; References ; ; Modification history -; 20190216-A_righ_ma: written. +; 20190216-righi_mattia: written. ; local funcname, scriptname, rank, dims, dpos, lcheck, clat, newlat begin funcname = "format_lat" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" ; Set rank rank = dimsizes(dimsizes(var)) dims = getvardims(var) dpos = ind(dims.eq."lat") - ; Check monotonicity - if (isMonotonic(var&lat) .eq. 0) then + ; Check monotonicity but not for point variables + if (isMonotonic(var&lat) .eq. 0 .and. \ + dimsizes(var&lat) .gt. 1) then error_msg("f", scriptname, funcname, "non-monotonic latitude coordinate") end if if (isMonotonic(var&lat) .eq. -1) then ; must be S->N @@ -531,17 +617,18 @@ procedure format_lon(var:numeric) ; References ; ; Modification history -; 20190216-A_righ_ma: written. +; 20190216-righi_mattia: written. ; local funcname, scriptname, clon, newlon begin funcname = "format_lon" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" - ; Check monotonicity - if (isMonotonic(var&lon) .eq. 0) then - error_msg("f", scriptname, funcname, "non-monotonic lonfitude coordinate") + ; Check monotonicity but not for point variables + if (isMonotonic(var&lon) .eq. 0 .and. \ + dimsizes(var&lon) .gt. 1) then + error_msg("f", scriptname, funcname, "non-monotonic longitude coordinate") end if ; Check that lon is 0:360 @@ -582,7 +669,6 @@ procedure format_coords(var:numeric, ; date1: start date as YYYYMMDD ; date2: end date as YYYYMMDD ; frequency: time frequency ("3hr", "6hr", "day", "mon", "yr") - ; ; Description ; Format the coordinate according to the CF/CMOR standard. @@ -592,12 +678,12 @@ procedure format_coords(var:numeric, ; References ; ; Modification history -; 20190216-A_righ_ma: written. +; 20190216-righi_mattia: written. ; begin funcname = "format_coords" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" ; Get variable dimensions dnames = getvardims(var) @@ -622,6 +708,11 @@ begin found = True end if + if (dnames(dd).eq."alt40") then + format_alt40(var) + found = True + end if + if (dnames(dd).eq."lat") then format_lat(var) found = True @@ -661,14 +752,14 @@ function read_cmor(name:string, ; References ; ; Modification history -; 20190107-A_righ_ma: modify to read standard CMIP5 tables -; 20130528-A_righ_ma: written. +; 20190107-righi_mattia: modify to read standard CMIP5 tables +; 20130528-righi_mattia: written. ; local funcname, scriptname, data, idxu, idxd, attn, attv, out begin funcname = "read_cmor" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" ; Read attributes from cmor table if (.not.fileexists(table)) then @@ -712,14 +803,39 @@ begin idxu = ind(str_get_field(data, 1, ":").eq."! Variable attributes") + 2 idxd = ind(str_get_field(data, 1, ":").eq. \ "! Additional variable information") - 2 - attn = str_squeeze(str_get_field(data(idxu:idxd), 1, ":")) - attv = str_squeeze(str_get_field(data(idxu:idxd), 2, ":")) + n = idxd - idxu + 1 + attn = new(n, string) + attv = new(n, string) + do i = 0, n - 1 + substr = str_split(data(idxu + i), ":") + m = dimsizes(substr) + attn(i) = str_squeeze(substr(0)) + if (m .gt. 1) then + attv(i) = str_squeeze(str_join(substr(1:), ":")) + else + attv(i) = "" + end if + delete(substr) + end do end if out = True do ii = 0, dimsizes(attn) - 1 - out@$attn(ii)$ = attv(ii) + ; If present, do not copy the attribute 'cell_measures' to comply with the + ; CF conventions: + ; + ; "A variable referenced by cell_measures is not required to be present in + ; the file containing the data variable. If the cell_measures variable is + ; located in another file (an "external file"), rather than in the file + ; where it is referenced, it must be listed in the external_variables + ; attribute of the referencing file (Section 2.6.3)." + ; + ; ---> As we do not have such a variable (e.g. areacella), we remove + ; the attribute 'cell_measures' (if present). + if (attn(ii) .ne. "cell_measures") then + out@$attn(ii)$ = attv(ii) + end if end do return(out) @@ -748,15 +864,15 @@ function format_variable(var:numeric, ; References ; ; Modification history -; 20190107-A_righ_ma: add extra argument for CMOR table -; 20161202-A_laue_ax: preserve attribute "coordinates" if present -; 20130528-A_righ_ma: written. +; 20190107-righi_mattia: add extra argument for CMOR table +; 20161202-lauer_axel: preserve attribute "coordinates" if present +; 20130528-righi_mattia: written. ; local funcname, scriptname, coordattr, out, tmp, att, ii begin funcname = "var_attrib" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" ; Set fill value first if(isatt(var, "_FillValue")) then @@ -816,13 +932,13 @@ function guess_bounds_time(coord[*]:double, ; References ; ; Modification history -; 20190217-A_righ_ma: written. +; 20190217-righi_mattia: written. ; local funcname, scriptname, date, year, month, day, opt, units, tyear, tmonth begin funcname = "guess_bounds_time" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" ; This function assumes that units of days are used if (.not.isStrSubset(coord@units, "days since")) then @@ -922,13 +1038,13 @@ function guess_bounds_lev(coord[*]:double) ; References ; ; Modification history -; 20190217-A_righ_ma: written. +; 20190217-righi_mattia: written. ; local funcname, scriptname, size, top begin funcname = "guess_bounds_lev" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" bounds = new((/dimsizes(coord), 2/), double) bounds!0 = "lev" @@ -951,6 +1067,55 @@ begin end +; ############################################################################# +undef("guess_bounds_alt40") +function guess_bounds_alt40(coord[*]:double) +; +; Arguments +; coord: input level coordinate. +; +; Return value +; A two dimensional array, with the first dimension of the same size of the +; input coordinate and the second dimension of size 2. +; +; Description +; Calculate the boundaries of the altitude40 coordinate as midpoints between +; the input levels. The first (top) boundary is set to a maximum of zero. +; +; Caveats +; +; References +; +; Modification history +; 20200204-lauer_axel: written. +; +local funcname, scriptname, size, top +begin + + funcname = "guess_bounds_alt40" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" + + bounds = new((/dimsizes(coord), 2/), double) + bounds!0 = "alt40" + bounds&lev = coord + bounds!1 = "bnds" + delete(bounds@_FillValue) + + size = dimsizes(coord) + + bounds(1:size - 1, 0) = 0.5 * (coord(0:size - 2) + coord(1:size - 1)) + bounds(0:size - 2, 1) = bounds(1:size - 1, 0) + + ; Set top and bottom separately + top = coord(0) - 0.5 * (coord(1) - coord(0)) + bounds(0, 0) = where(top.ge.0., top, 0.) + bounds(size - 1, 1) = \ + coord(size - 1) + 0.5 * (coord(size - 1) - coord(size - 2)) + + return(bounds) + +end + ; ############################################################################# undef("guess_bounds_lat") function guess_bounds_lat(coord[*]:double) @@ -973,13 +1138,13 @@ function guess_bounds_lat(coord[*]:double) ; References ; ; Modification history -; 20190217-A_righ_ma: written. +; 20190217-righi_mattia: written. ; local funcname, scriptname begin funcname = "guess_bounds_lat" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" bounds = new((/dimsizes(coord), 2/), double) bounds!0 = "lat" @@ -1019,13 +1184,13 @@ function guess_bounds_lon(coord[*]:double) ; References ; ; Modification history -; 20190217-A_righ_ma: written. +; 20190217-righi_mattia: written. ; local funcname, scriptname begin funcname = "guess_bounds_lon" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" bounds = new((/dimsizes(coord), 2/), double) bounds!0 = "lon" @@ -1053,7 +1218,7 @@ local funcname, scriptname, time_bnds, plev_bnds, lev_bnds, lat_bnds, lon_bnds begin funcname = "guess_coord_bounds" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" ; Check supported frequency if (all(frequency.ne.(/"3hr", "6hr", "day", "mon", "yr", "fx"/))) then @@ -1085,6 +1250,12 @@ begin continue end if + if (dnames(dd).eq."alt40") then + alt40_bnds = guess_bounds_lev(var&alt40) + ListPush(bounds_list, alt40_bnds) + continue + end if + if (dnames(dd).eq."lat") then lat_bnds = guess_bounds_lat(var&lat) ListPush(bounds_list, lat_bnds) @@ -1131,14 +1302,14 @@ function set_global_atts(obsname:string, ; provided information with default ones (author, host, date, etc.). ; ; Modification history -; 20190202-A_righ_ma: written. +; 20190202-righi_mattia: written. ; local funcname, scriptname, dim_unlim, ii begin funcname = "set_global_atts" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" global = True global@title = obsname + " data reformatted for the ESMValTool v2.0" @@ -1182,15 +1353,15 @@ procedure write_nc(outfile:string, ; coordinates and boundaries, and append the provided global attributes. ; ; Modification history -; 20190218_A_righ_ma: extend with coordinate bounds. -; 20140123-A_righ_ma: written. +; 20190218_righi_mattia: extend with coordinate bounds. +; 20140123-righi_mattia: written. ; local funcname, scriptname, w, gAtt, dim_names, ndims, dim_sizes, dim_types, \ dim_unlim, ii begin funcname = "write_nc" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" ; Open file if (fileexists(outfile)) then @@ -1278,13 +1449,13 @@ procedure write_nc_profile(outfile:string, ; Designed to write multiple variables for the vertical profiles data. ; ; Modification history -; 20140422-A_righ_ma: written. +; 20140422-righi_mattia: written. ; local funcname, scriptname, w, coords, cc, jj, locname, locvar, cname begin funcname = "write_nc_profile" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" ; Open file if (fileexists(outfile)) then @@ -1358,13 +1529,13 @@ function set_size_array() ; References ; ; Modification history -; 20130528-A_righ_ma: written. +; 20130528-righi_mattia: written. ; local funcname, scriptname, minsize, maxsize, nbins, bin, out begin funcname = "set_size_array" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" ; Size range (0.5 nm - 10 um) minsize = 0.5e-9 @@ -1419,8 +1590,8 @@ function process_EBAS_data(in_vars[*]:string, ; currently accepted. ; ; Modification history -; 20150413-A_righ_ma: improved time selection. -; 20140124-A_righ_ma: written. +; 20150413-righi_mattia: improved time selection. +; 20140124-righi_mattia: written. ; local timec, datec, vID, fID, bn, en, head, hh, cline, syear, smonth, sday, \ scode, comp, matr, unit, scale, fills, lline, cols, data_col, flag_col, \ @@ -1429,7 +1600,7 @@ local timec, datec, vID, fID, bn, en, head, hh, cline, syear, smonth, sday, \ begin funcname = "process_EBAS_data" - scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + scriptname = "esmvaltool/cmorizers/data/formatters/utilities.ncl" ; EBAS flags for valid measurements ; (see http://www.nilu.no/projects/ccc/flags/index.html) diff --git a/esmvaltool/cmorizers/data/utilities.py b/esmvaltool/cmorizers/data/utilities.py new file mode 100644 index 0000000000..ed8b9a9af9 --- /dev/null +++ b/esmvaltool/cmorizers/data/utilities.py @@ -0,0 +1,592 @@ +"""Utils module for Python cmorizers.""" +import datetime +import gzip +import logging +import os +import re +import shutil +from contextlib import contextmanager +from pathlib import Path + +import iris +import numpy as np +import yaml +from cf_units import Unit +from dask import array as da +from esmvalcore.cmor.table import CMOR_TABLES +from iris.cube import Cube + +from esmvaltool import __file__ as esmvaltool_file +from esmvaltool import __version__ as version + +logger = logging.getLogger(__name__) + +REFERENCES_PATH = Path(esmvaltool_file).absolute().parent / 'references' + + +def add_height2m(cube: Cube) -> None: + """Add scalar coordinate 'height' with value of 2m to cube in-place. + + Parameters + ---------- + cube: iris.cube.Cube + Cube which will get the 2m-height coordinate in-place. + + """ + add_scalar_height_coord(cube, height=2.) + + +def add_height10m(cube: Cube) -> None: + """Add scalar coordinate 'height' with value of 10m to cube in-place. + + Parameters + ---------- + cube: iris.cube.Cube + Cube which will get the 10m-height coordinate in-place. + + """ + add_scalar_height_coord(cube, height=10.) + + +def add_scalar_depth_coord(cube: Cube, depth: float = 0.0) -> None: + """Add scalar coordinate 'depth' to cube in-place. + + Parameters + ---------- + cube: iris.cube.Cube + Cube which will get the depth coordinate in-place. + depth: float, optional (default: 0.0) + Value for the depth in meters. + + """ + logger.debug("Adding depth coordinate (%sm)", depth) + depth_coord = iris.coords.AuxCoord(depth, + var_name='depth', + standard_name='depth', + long_name='depth', + units=Unit('m'), + attributes={'positive': 'down'}) + try: + cube.coord('depth') + except iris.exceptions.CoordinateNotFoundError: + cube.add_aux_coord(depth_coord, ()) + return cube + + +def add_scalar_height_coord(cube: Cube, height: float = 2.0) -> None: + """Add scalar coordinate 'height' to cube in-place. + + Parameters + ---------- + cube: iris.cube.Cube + Cube which will get the height coordinate in-place. + height: float, optional (default: 2.0) + Value for the height in meters. + + """ + logger.debug("Adding height coordinate (%sm)", height) + height_coord = iris.coords.AuxCoord(height, + var_name='height', + standard_name='height', + long_name='height', + units=Unit('m'), + attributes={'positive': 'up'}) + cube.add_aux_coord(height_coord, ()) + + +def add_typebare(cube, value='bare_ground'): + """Add scalar coordinate 'typebare' with value of `value`.""" + logger.debug("Adding typebare coordinate (%s)", value) + typebare_coord = iris.coords.AuxCoord(value, + var_name='typebare', + standard_name='area_type', + long_name='surface type', + units=Unit('no unit')) + try: + cube.coord('area_type') + except iris.exceptions.CoordinateNotFoundError: + cube.add_aux_coord(typebare_coord, ()) + return cube + + +@contextmanager +def constant_metadata(cube): + """Do cube math without modifying units, attributes etc. + + Context manager that should be used when operating on a data cube + that keeps its metadata constant (units, variable names, attributes etc.). + Use as with any other context managers: `with constant_metadata(cube):` + + + Parameters + ---------- + cube: iris.cube.Cube + data cube to be operated on, keeping its + metadata constant. + + Returns + ------- + iris.cube.Cube + Returns the iris cube that was operated on. + """ + metadata = cube.metadata + yield metadata + cube.metadata = metadata + + +def convert_timeunits(cube, start_year): + """Convert time axis from malformed Year 0. + + Changes time coordinate with CMOR-like units of + e.g. `months since START_YEAR-01-01`. + + Parameters + ---------- + cube: iris.cube.Cube + data cube to have its time coordinate changed. + + start_year: int + integer start year as origin of time coordinate + + Returns + ------- + iris.cube.Cube + Returns the original iris cube with time coordinate reformatted. + """ + if cube.coord('time').units == 'months since 0000-01-01 00:00:00': + real_unit = f'months since {str(start_year)}-01-01 00:00:00' + elif cube.coord('time').units == 'days since 0000-01-01 00:00:00': + real_unit = f'days since {str(start_year)}-01-01 00:00:00' + elif cube.coord('time').units == 'days since 1950-1-1': + real_unit = 'days since 1950-1-1 00:00:00' + else: + real_unit = cube.coord('time').units + cube.coord('time').units = real_unit + return cube + + +def fix_coords(cube, + overwrite_time_bounds=True, + overwrite_lon_bounds=True, + overwrite_lat_bounds=True, + overwrite_lev_bounds=True, + overwrite_airpres_bounds=True): + """Fix coordinates to CMOR standards. + + Fixes coordinates eg time to have correct units, bounds etc; + longitude to be CMOR-compliant 0-360deg; fixes some attributes + and bounds - the user can avert bounds fixing by using supplied + arguments; if bounds are None they will be fixed regardless. + + Parameters + ---------- + cube: iris.cube.Cube + data cube with coordinates to be fixed. + + overwrite_time_bounds: bool (optional) + set to False not to overwrite time bounds. + + overwrite_lon_bounds: bool (optional) + set to False not to overwrite longitude bounds. + + overwrite_lat_bounds: bool (optional) + set to False not to overwrite latitude bounds. + + overwrite_lev_bounds: bool (optional) + set to False not to overwrite depth bounds. + + overwrite_airpres_bounds: bool (optional) + set to False not to overwrite air pressure bounds. + + Returns + ------- + cube: iris.cube.Cube + data cube with fixed coordinates. + """ + # first fix any completely missing coord var names + fix_dim_coordnames(cube) + # fix individual coords + for cube_coord in cube.coords(): + # fix time + if cube_coord.var_name == 'time': + logger.info("Fixing time...") + cube.coord('time').convert_units( + Unit('days since 1950-1-1 00:00:00', calendar='gregorian')) + if overwrite_time_bounds or not cube.coord('time').has_bounds(): + fix_bounds(cube, cube.coord('time')) + + # fix longitude + if cube_coord.var_name == 'lon': + logger.info("Fixing longitude...") + if cube_coord.ndim == 1: + cube = cube.intersection(longitude=(0.0, 360.0)) + if overwrite_lon_bounds or not cube_coord.has_bounds(): + fix_bounds(cube, cube_coord) + + # fix latitude + if cube_coord.var_name == 'lat': + logger.info("Fixing latitude...") + if overwrite_lat_bounds or not cube.coord('latitude').has_bounds(): + fix_bounds(cube, cube.coord('latitude')) + if cube_coord.core_points()[0] > cube_coord.core_points()[-1]: + cube = iris.util.reverse(cube, cube_coord) + + # fix depth + if cube_coord.var_name == 'lev': + logger.info("Fixing depth...") + if overwrite_lev_bounds or not cube.coord('depth').has_bounds(): + fix_bounds(cube, cube.coord('depth')) + + # fix air_pressure + if cube_coord.var_name == 'air_pressure': + logger.info("Fixing air pressure...") + if overwrite_airpres_bounds \ + or not cube.coord('air_pressure').has_bounds(): + fix_bounds(cube, cube.coord('air_pressure')) + + # remove CS + cube.coord('latitude').coord_system = None + cube.coord('longitude').coord_system = None + + return cube + + +def fix_var_metadata(cube, var_info): + """Fix var metadata from CMOR table. + + Sets var_name, long_name, standard_name and units + in accordance with CMOR standards from specific CMOR table. + + Parameters + ---------- + cube: iris.cube.Cube + data cube to have its metadata changed. + + var_info: class + CMOR table object holding the information to be changed in the cube. + Attributes like standard_name, var_name, long_name are used to + set the new metadata in the input cube. + + Returns + ------- + iris.cube.Cube + Returns the masked iris cube. + """ + if var_info.standard_name == '': + cube.standard_name = None + else: + cube.standard_name = var_info.standard_name + cube.var_name = var_info.short_name + cube.long_name = var_info.long_name + set_units(cube, var_info.units) + return cube + + +def flip_dim_coord(cube, coord_name): + """Flip (reverse) dimensional coordinate of cube.""" + logger.info("Flipping dimensional coordinate %s...", coord_name) + coord = cube.coord(coord_name, dim_coords=True) + coord_idx = cube.coord_dims(coord)[0] + coord.points = np.flip(coord.points) + if coord.bounds is not None: + coord.bounds = np.flip(coord.bounds, axis=0) + cube.data = da.flip(cube.core_data(), axis=coord_idx) + + +def read_cmor_config(dataset): + """Read the associated dataset-specific config file.""" + reg_path = os.path.join(os.path.dirname(__file__), 'cmor_config', + dataset + '.yml') + with open(reg_path, 'r', encoding='utf-8') as file: + cfg = yaml.safe_load(file) + cfg['cmor_table'] = \ + CMOR_TABLES[cfg['attributes']['project_id']] + if 'comment' not in cfg['attributes']: + cfg['attributes']['comment'] = '' + return cfg + + +def save_variable(cube, var, outdir, attrs, **kwargs): + """Saver function. + + Saves iris cubes (data variables) in CMOR-standard named files. + + Parameters + ---------- + cube: iris.cube.Cube + data cube to be saved. + + var: str + Variable short_name e.g. ts or tas. + + outdir: str + root directory where the file will be saved. + + attrs: dict + dictionary holding cube metadata attributes like + project_id, version etc. + + **kwargs: kwargs + Keyword arguments to be passed to `iris.save` + """ + fix_dtype(cube) + # CMOR standard + try: + time = cube.coord('time') + except iris.exceptions.CoordinateNotFoundError: + time_suffix = None + else: + if ( + len(time.points) == 1 and + "mon" not in cube.attributes.get('mip') + ) or cube.attributes.get("frequency") == "yr": + year = str(time.cell(0).point.year) + time_suffix = '-'.join([year + '01', year + '12']) + else: + date1 = ( + f"{time.cell(0).point.year:d}{time.cell(0).point.month:02d}" + ) + date2 = ( + f"{time.cell(-1).point.year:d}{time.cell(-1).point.month:02d}" + ) + time_suffix = '-'.join([date1, date2]) + + name_elements = [ + attrs['project_id'], + attrs['dataset_id'], + attrs['modeling_realm'], + attrs['version'], + attrs['mip'], + var, + ] + if time_suffix: + name_elements.append(time_suffix) + file_name = '_'.join(name_elements) + '.nc' + file_path = os.path.join(outdir, file_name) + logger.info('Saving: %s', file_path) + status = 'lazy' if cube.has_lazy_data() else 'realized' + logger.info('Cube has %s data [lazy is preferred]', status) + iris.save(cube, file_path, fill_value=1e20, **kwargs) + + +def extract_doi_value(tags): + """Extract doi(s) from a bibtex entry.""" + reference_doi = [] + pattern = r'doi\s*=\s*{([^}]+)}' + + if not isinstance(tags, list): + tags = [tags] + + for tag in tags: + bibtex_file = REFERENCES_PATH / f'{tag}.bibtex' + if bibtex_file.is_file(): + reference_entry = bibtex_file.read_text() + dois = re.findall(pattern, reference_entry) + if dois: + for doi in dois: + reference_doi.append(f'doi:{doi}') + else: + reference_doi.append('doi not found') + logger.warning( + 'The reference file %s does not have a doi.', bibtex_file) + else: + reference_doi.append('doi not found') + logger.warning( + 'The reference file %s does not exist.', bibtex_file) + return ', '.join(reference_doi) + + +def set_global_atts(cube, attrs): + """Complete the cmorized file with global metadata.""" + logger.debug("Setting global metadata...") + attrs = dict(attrs) + cube.attributes.clear() + timestamp = datetime.datetime.utcnow() + timestamp_format = "%Y-%m-%d %H:%M:%S" + now_time = timestamp.strftime(timestamp_format) + + # Necessary attributes + try: + glob_dict = { + 'title': (f"{attrs.pop('dataset_id')} data reformatted for " + f"ESMValTool v{version}"), + 'version': + attrs.pop('version'), + 'tier': + str(attrs.pop('tier')), + 'source': + attrs.pop('source'), + 'reference': + extract_doi_value(attrs.pop('reference')), + 'comment': + attrs.pop('comment'), + 'user': + os.environ.get("USER", "unknown user"), + 'host': + os.environ.get("HOSTNAME", "unknown host"), + 'history': + f'Created on {now_time}', + 'project_id': + attrs.pop('project_id'), + } + except KeyError as original_error: + msg = ("All CMORized datasets need the global attributes " + "'dataset_id', 'version', 'tier', 'source', 'reference', " + "'comment' and 'project_id' " + "specified in the configuration file") + raise KeyError(msg) from original_error + + # Additional attributes + glob_dict.update(attrs) + cube.attributes.globals = glob_dict + + +def fix_bounds(cube, dim_coord): + """Reset and fix all bounds.""" + if len(cube.coord(dim_coord).points) > 1: + if cube.coord(dim_coord).has_bounds(): + cube.coord(dim_coord).bounds = None + cube.coord(dim_coord).guess_bounds() + + if cube.coord(dim_coord).has_bounds(): + cube.coord(dim_coord).bounds = da.array( + cube.coord(dim_coord).core_bounds(), dtype='float64') + return cube + + +def fix_dim_coordnames(cube): + """Perform a check on dim coordinate names.""" + # first check for CMOR standard coord; + for coord in cube.coords(): + # guess the CMOR-standard x, y, z and t axes if not there + coord_type = iris.util.guess_coord_axis(coord) + try: + coord = cube.coord(axis=coord_type) + except iris.exceptions.CoordinateNotFoundError: + logger.warning( + 'Multiple coordinates for axis %s. ' + 'This may be an error, specially for regular grids', + coord_type) + continue + + if coord_type == 'T': + coord.var_name = 'time' + coord.attributes = {} + + if coord_type == 'X': + coord.var_name = 'lon' + coord.standard_name = 'longitude' + coord.long_name = 'longitude coordinate' + coord.units = Unit('degrees') + coord.attributes = {} + + if coord_type == 'Y': + coord.var_name = 'lat' + coord.standard_name = 'latitude' + coord.long_name = 'latitude coordinate' + coord.units = Unit('degrees') + coord.attributes = {} + + if coord_type == 'Z': + if coord.var_name == 'depth': + coord.standard_name = 'depth' + coord.long_name = \ + 'ocean depth coordinate' + coord.var_name = 'lev' + coord.attributes['positive'] = 'down' + if coord.var_name == 'pressure': + coord.standard_name = 'air_pressure' + coord.long_name = 'pressure' + coord.var_name = 'air_pressure' + coord.attributes['positive'] = 'up' + return cube + + +def fix_dtype(cube): + """Fix `dtype` of a cube and its coordinates.""" + if cube.dtype != np.float32: + logger.info("Converting data type of data from '%s' to 'float32'", + cube.dtype) + cube.data = cube.core_data().astype(np.float32, casting='same_kind') + for coord in cube.coords(): + if coord.dtype.kind != "U" and coord.dtype != np.float64: + logger.info( + "Converting data type of coordinate points of '%s' from '%s' " + "to 'float64'", coord.name(), coord.dtype) + coord.points = coord.core_points().astype(np.float64, + casting='same_kind') + if coord.has_bounds() and coord.bounds_dtype != np.float64: + logger.info( + "Converting data type of coordinate bounds of '%s' from '%s' " + "to 'float64'", coord.name(), coord.bounds_dtype) + coord.bounds = coord.core_bounds().astype(np.float64, + casting='same_kind') + + +def roll_cube_data(cube, shift, axis): + """Roll a cube data on specified axis.""" + cube.data = da.roll(cube.core_data(), shift, axis=axis) + return cube + + +def set_units(cube, units): + """Set units in compliance with cf_unit.""" + special = {'psu': 1, 'Sv': '1e6 m3 s-1'} + if units in special: + cube.units = special[units] + else: + cube.units = Unit(units) + return cube + + +def unpack_files_in_folder(folder): + """Unpack all compressed and tarred files in a given folder. + + This function flattens the folder hierarchy, both outside + and inside the given folder. It also unpack nested files + + Parameters + ---------- + folder : str + Path to the folder to unpack + """ + decompress = True + while decompress: + decompress = False + files = os.listdir(folder) + files.sort() + for filename in files: + full_path = os.path.join(folder, filename) + if os.path.isdir(full_path): + logger.info('Moving files from folder %s', filename) + folder_files = os.listdir(full_path) + for file_path in folder_files: + shutil.move(os.path.join(full_path, file_path), folder) + os.rmdir(full_path) + decompress = True + continue + if filename.startswith('.'): + continue + if not filename.endswith(('.gz', '.tgz', '.tar')): + continue + logger.info('Unpacking %s', filename) + shutil.unpack_archive(full_path, folder) + os.remove(full_path) + decompress = True + + +def _gunzip(file_name, work_dir): + filename = os.path.split(file_name)[-1] + filename = re.sub(r"\.gz$", "", filename, flags=re.IGNORECASE) + + with gzip.open(file_name, 'rb') as f_in: + with open(os.path.join(work_dir, filename), 'wb') as f_out: + shutil.copyfileobj(f_in, f_out) + + +try: + shutil.register_unpack_format('gz', [ + '.gz', + ], _gunzip) +except shutil.RegistryError: + logger.debug('Format gz already registered. Skipping...') diff --git a/esmvaltool/config-developer.yml b/esmvaltool/config-developer.yml deleted file mode 100644 index c5fc6db9b8..0000000000 --- a/esmvaltool/config-developer.yml +++ /dev/null @@ -1,273 +0,0 @@ -############################################################################### -# Developer's configuration file for the ESMValTool -############################################################################### -# This file retains the project- and machine-dependent directory and file name -# definitions of the input and output data -# Each dictionary is structured as follows -# -# PROJECT: -# input_dir: -# default: -# drs1: -# drs2: -# etc: -# input_file: -# output_file: -# -# Only the default drs is mandatory, the others are optional -############################################################################### ---- - -CMIP6: - input_dir: - default: '/' - DKRZ: '[institute]/[dataset]/[exp]/[ensemble]/[mip]/[short_name]/[grid]/[latestversion]' - input_file: '[short_name]_[mip]_[dataset]_[exp]_[ensemble]_[grid]_*.nc' - output_file: '[project]_[dataset]_[mip]_[exp]_[ensemble]_[short_name]_[start_year]-[end_year]' - cmor_type: 'CMIP6' - institutes: - 'ACCESS-CM2': ['CSIRO-ARCCSS-BoM'] - 'ACCESS-ESM1-5': ['CSIRO'] - 'ARTS-2-3': ['UHH'] - 'AWI-CM-1-1-HR': ['AWI'] - 'AWI-CM-1-1-LR': ['AWI'] - 'AWI-CM-1-1-MR': ['AWI'] - 'AWI-ESM-1-1-LR': ['AWI'] - 'BCC-CSM2-HR': ['BCC'] - 'BCC-CSM2-MR': ['BCC'] - 'BCC-ESM1': ['BCC'] - 'BESM-2-7': ['INPE'] - 'BNU-ESM-1-1': ['BNU'] - 'CAMS-CSM1-0': ['CAMS'] - 'CanESM5': ['CCCma'] - 'CAS-ESM1-0': ['CAS'] - 'CESM2': ['NCAR'] - 'CESM2-SE': ['NCAR'] - 'CESM2-WACCM': ['NCAR'] - 'CIESM': ['THU'] - 'CMCC-CM2-HR4': ['CMCC'] - 'CMCC-CM2-HR5': ['CMCC'] - 'CMCC-CM2-SR5': ['CMCC'] - 'CMCC-CM2-VHR4': ['CMCC'] - 'CMCC-ESM2-HR5': ['CMCC'] - 'CMCC-ESM2-SR5': ['CMCC'] - 'CNRM-CM6-1': ['CNRM-CERFACS'] - 'CNRM-CM6-1-HR': ['CNRM-CERFACS'] - 'CNRM-ESM2-1': ['CNRM-CERFACS'] - 'CNRM-ESM2-1-HR': ['CNRM-CERFACS'] - 'CSIRO-Mk3L-1-3': ['UTAS'] - 'E3SM-1-0': ['E3SM-Project'] - 'EC-Earth3-AerChem': ['EC-Earth-Consortium'] - 'EC-Earth3-CC': ['EC-Earth-Consortium'] - 'EC-Earth3': ['EC-Earth-Consortium'] - 'EC-Earth3-GrIS': ['EC-Earth-Consortium'] - 'EC-Earth3-HR': ['EC-Earth-Consortium'] - 'EC-Earth3-LR': ['EC-Earth-Consortium'] - 'EC-Earth3P': ['EC-Earth-Consortium'] - 'EC-Earth3P-HR': ['EC-Earth-Consortium'] - 'EC-Earth3P-VHR': ['EC-Earth-Consortium'] - 'EC-Earth3-Veg': ['EC-Earth-Consortium'] - 'EC-Earth3-Veg-LR': ['EC-Earth-Consortium'] - 'ECMWF-IFS-HR': ['ECMWF'] - 'ECMWF-IFS-LR': ['ECMWF'] - 'ECMWF-IFS-MR': ['ECMWF'] - 'EMAC-2-53-AerChem': ['MESSy-Consortium'] - 'EMAC-2-53-Vol': ['MESSy-Consortium'] - 'FGOALS-f3-H': ['CAS'] - 'FGOALS-f3-L': ['CAS'] - 'FGOALS-g3': ['CAS'] - 'FIO-ESM-2-0': ['FIO-QLNM'] - 'GFDL-AM4': ['NOAA-GFDL'] - 'GFDL-CM4C192': ['NOAA-GFDL'] - 'GFDL-CM4': ['NOAA-GFDL'] - 'GFDL-ESM2M': ['NOAA-GFDL'] - 'GFDL-ESM4': ['NOAA-GFDL'] - 'GFDL-OM4p5B': ['NOAA-GFDL'] - 'GISS-E2-1-G': ['NASA-GISS'] - 'GISS-E2-1-H': ['NASA-GISS'] - 'GISS-E2-1-MA-G': ['NASA-GISS'] - 'GISS-E3-G': ['NASA-GISS'] - 'HadGEM3-GC31-HH': ['MOHC', 'NERC'] - 'HadGEM3-GC31-HM': ['MOHC', 'NERC'] - 'HadGEM3-GC31-LL': ['MOHC'] - 'HadGEM3-GC31-LM': ['MOHC'] - 'HadGEM3-GC31-MH': ['MOHC'] - 'HadGEM3-GC31-MM': ['MOHC'] - 'ICON-ESM-LR': ['MPI-M'] - 'IITM-ESM': ['CCCR-IITM'] - 'INM-CM4-8': ['INM'] - 'INM-CM5-0': ['INM'] - 'INM-CM5-H': ['INM'] - 'IPSL-CM6A-ATM-HR': ['IPSL'] - 'IPSL-CM6A-LR': ['IPSL'] - 'KACE-1-0-G': ['NIMS-KMA'] - 'KIOST-ESM': ['KIOST'] - 'LBLRTM-12-8': ['AER'] - 'MCM-UA-1-0': ['UA'] - 'MIROC6': ['MIROC'] - 'MIROC-ES2H': ['MIROC'] - 'MIROC-ES2L': ['MIROC'] - 'MPI-ESM-1-2-HAM': ['HAMMOZ-Consortium'] - 'MPI-ESM1-2-HR': ['MPI-M', 'DWD', 'DKRZ'] - 'MPI-ESM1-2-LR': ['MPI-M', 'AWI'] - 'MRI-AGCM3-2': ['MRI'] - 'MRI-ESM2-0': ['MRI'] - 'NESM3': ['NUIST'] - 'NICAM16-7S': ['MIROC'] - 'NICAM16-8S': ['MIROC'] - 'NICAM16-9D-L78': ['MIROC'] - 'NICAM16-9S': ['MIROC'] - 'NorESM2-HH': ['NCC'] - 'NorESM2-LMEC': ['NCC'] - 'NorESM2-LME': ['NCC'] - 'NorESM2-LM': ['NCC'] - 'NorESM2-MH': ['NCC'] - 'NorESM2-MM': ['NCC'] - 'PCMDI-test-1-0': ['PCMDI'] - 'RRTMG-LW-4-91': ['AER'] - 'RRTMG-SW-4-02': ['AER'] - 'RTE-RRTMGP-181204': ['AER'] - 'SAM0-UNICON': ['SNU'] - 'TaiESM1': ['AS-RCEC'] - 'UKESM1-0-LL': ['MOHC', 'NERC', 'NIMS-KMA', 'NIWA'] - 'UKESM1-0-MMh': ['MOHC', 'NERC'] - 'UofT-CCSM4': ['UofT'] - 'VRESM-1-0': ['CSIR-CSIRO'] - - -CMIP5: - cmor_strict: true - input_dir: - default: '/' - BADC: '[institute]/[dataset]/[exp]/[frequency]/[modeling_realm]/[mip]/[ensemble]/latest/[short_name]' - CP4CDS: '[institute]/[dataset]/[exp]/[frequency]/[modeling_realm]/[mip]/[ensemble]/[short_name]/latest/' - DKRZ: '[institute]/[dataset]/[exp]/[frequency]/[modeling_realm]/[mip]/[ensemble]/[latestversion]/[short_name]' - ETHZ: '[exp]/[mip]/[short_name]/[dataset]/[ensemble]/' - SMHI: '[dataset]/[ensemble]/[exp]/[frequency]' - BSC: '[type]/[project]/[exp]/[dataset.lower]' - input_file: '[short_name]_[mip]_[dataset]_[exp]_[ensemble]_*.nc' - input_fx_dir: - default: '/' - BADC: '[institute]/[dataset]/[exp]/fx/[modeling_realm]/fx/r0i0p0/[latestversion]/[fx_var]' - CP4CDS: '[institute]/[dataset]/[exp]/fx/[modeling_realm]/fx/r0i0p0/[fx_var]/latest/' - DKRZ: '[institute]/[dataset]/[exp]/fx/[modeling_realm]/fx/r0i0p0/[latestversion]/[fx_var]' - ETHZ: '[exp]/fx/[fx_var]/[dataset]/r0i0p0' - input_fx_file: '[fx_var]_fx_[dataset]_[exp]_r0i0p0.nc' - fx_mip_change: - 'areacella': 'Amon' - 'areacello': 'Omon' - 'basin': 'Omon' - 'deptho': 'Omon' - 'mrsofc': 'Lmon' - 'orog': 'Amon' - 'rootd': 'Lmon' - 'sftgif': 'Lmon' - 'sftlf': 'Amon' - 'sftof': 'Omon' - 'volcello': 'Omon' - output_file: '[project]_[dataset]_[mip]_[exp]_[ensemble]_[short_name]_[start_year]-[end_year]' - institutes: - 'ACCESS1-0': ['CSIRO-BOM'] - 'ACCESS1-3': ['CSIRO-BOM'] - 'bcc-csm1-1': ['BCC'] - 'bcc-csm1-1-m': ['BCC'] - 'BNU-ESM': ['BNU'] - 'CanAM4': ['CCCma'] - 'CanCM4': ['CCCma'] - 'CanESM2': ['CCCma'] - 'CCSM4': ['NCAR'] - 'CESM1-BGC': ['NSF-DOE-NCAR'] - 'CESM1-CAM5': ['NSF-DOE-NCAR'] - 'CESM1-CAM5-1-FV2': ['NSF-DOE-NCAR'] - 'CESM1-FASTCHEM': ['NSF-DOE-NCAR'] - 'CESM1-WACCM': ['NSF-DOE-NCAR'] - 'CFSv2-2011': ['COLA-CFS', 'NOAA-NCEP'] - 'CMCC-CESM': ['CMCC'] - 'CMCC-CM': ['CMCC'] - 'CMCC-CMS': ['CMCC'] - 'CNRM-CM5': ['CNRM-CERFACS'] - 'CNRM-CM5-2': ['CNRM-CERFACS'] - 'CSIRO-Mk3-6-0': ['CSIRO-QCCCE'] - 'EC-EARTH': ['ICHEC'] - 'FGOALS-g2': ['LASG-CESS'] - 'FGOALS-gl': ['LASG-IAP'] - 'FGOALS-s2': ['LASG-IAP'] - 'FIO-ESM': ['FIO'] - 'fio-esm': ['FIO'] - 'GEOS-5': ['NASA-GMAO'] - 'GFDL-CM2p1': ['NOAA-GFDL'] - 'GFDL-CM3': ['NOAA-GFDL'] - 'GFDL-ESM2G': ['NOAA-GFDL'] - 'GFDL-ESM2M': ['NOAA-GFDL'] - 'GFDL-HIRAM-C180': ['NOAA-GFDL'] - 'GFDL-HIRAM-C360': ['NOAA-GFDL'] - 'GISS-E2-H': ['NASA-GISS'] - 'GISS-E2-H-CC': ['NASA-GISS'] - 'GISS-E2-R': ['NASA-GISS'] - 'GISS-E2-R-CC': ['NASA-GISS'] - 'HadCM3': ['MOHC'] - 'HadGEM2-A': ['MOHC'] - 'HadGEM2-AO': ['NIMR-KMA'] - 'HadGEM2-CC': ['MOHC'] - 'HadGEM2-ES': ['INPE', 'MOHC'] - 'inmcm4': ['INM'] - 'IPSL-CM5A-LR': ['IPSL'] - 'IPSL-CM5A-MR': ['IPSL'] - 'IPSL-CM5B-LR': ['IPSL'] - 'MIROC-ESM': ['MIROC'] - 'MIROC-ESM-CHEM': ['MIROC'] - 'MIROC4h': ['MIROC'] - 'MIROC5': ['MIROC'] - 'MPI-ESM-LR': ['MPI-M'] - 'MPI-ESM-MR': ['MPI-M'] - 'MPI-ESM-P': ['MPI-M'] - 'MRI-AGCM3-2H': ['MRI'] - 'MRI-AGCM3-2S': ['MRI'] - 'MRI-CGCM3': ['MRI'] - 'MRI-ESM1': ['MRI'] - 'NICAM-09': ['NICAM'] - 'NorESM1-M': ['NCC'] - 'NorESM1-ME': ['NCC'] - -OBS: - cmor_strict: false - input_dir: - default: 'Tier[tier]/[dataset]' - BSC: '[type]/[institute.lower]/[dataset.lower]/[freq_folder]/[short_name][freq_base]' - input_file: - default: '[project]_[dataset]_[type]_[version]_[mip]_[short_name]_*.nc' - BSC: '[short_name]_*.nc' - input_fx_dir: - default: 'Tier[tier]/[dataset]' - input_fx_file: - default: '[project]_[dataset]_[type]_[version]_fx_[fx_var].nc' - output_file: '[project]_[dataset]_[type]_[version]_[mip]_[short_name]_[start_year]-[end_year]' - cmor_type: 'CMIP5' - -obs4mips: - cmor_strict: false - input_dir: - default: 'Tier[tier]/[dataset]' - input_file: '[short_name]_[dataset]_[level]_[version]_*.nc' - input_fx_dir: - default: 'Tier[tier]/[dataset]' - input_fx_file: - default: '[project]_[dataset]_fx_[fx_var].nc' - output_file: '[project]_[dataset]_[level]_[version]_[short_name]_[start_year]-[end_year]' - cmor_type: 'CMIP6' - cmor_path: 'obs4mips' - -ana4mips: - cmor_strict: false - input_dir: - default: 'Tier[tier]/[dataset]' - input_file: '[short_name]_[mip]_[type]_[dataset]_*.nc' - output_file: '[project]_[mip]_[type]_[dataset]_[short_name]_[start_year]-[end_year]' - cmor_type: 'CMIP5' - -EMAC: - input_dir: - default: '[dataset]' - input_file: '' - output_file: '[dataset]_[ensemble]_[short_name]_[start_year]-[end_year]' - cmor_type: 'CMIP5' diff --git a/esmvaltool/config-logging.yml b/esmvaltool/config-logging.yml deleted file mode 100644 index b78b308ed8..0000000000 --- a/esmvaltool/config-logging.yml +++ /dev/null @@ -1,33 +0,0 @@ -# Logger configuration ---- - -version: 1 -disable_existing_loggers: false -formatters: - console: - format: '%(asctime)s UTC [%(process)d] %(levelname)-7s %(message)s' - brief: - format: '%(levelname)-7s [%(process)d] %(message)s' - debug: - format: '%(asctime)s UTC [%(process)d] %(levelname)-7s %(name)s:%(lineno)s %(message)s' -handlers: - console: - class: logging.StreamHandler - level: INFO - formatter: console - stream: ext://sys.stdout - simple_log_file: - class: logging.FileHandler - level: INFO - formatter: brief - filename: main_log.txt - mode: w - debug_log_file: - class: logging.FileHandler - level: DEBUG - formatter: debug - filename: main_log_debug.txt - mode: w -root: - level: DEBUG - handlers: [console, simple_log_file, debug_log_file] diff --git a/esmvaltool/config-references.yml b/esmvaltool/config-references.yml index 417ee63d50..79a85c9866 100644 --- a/esmvaltool/config-references.yml +++ b/esmvaltool/config-references.yml @@ -1,3 +1,4 @@ +--- # Full list of authors and references for the currently implemented # diagnostics. # @@ -7,605 +8,792 @@ authors: # Core Team (PI and core developers) - eyri_ve: + eyring_veronika: name: Eyring, Veronika institute: DLR, Germany - email: veronika.eyring 'at' dlr.de - ande_bo: + email: veronika.eyring@dlr.de + orcid: https://orcid.org/0000-0002-6887-4885 + andela_bouwe: name: Andela, Bouwe institute: NLeSC, Netherlands - email: b.andela 'atcecenter.nl - broe_bj: + email: b.andela@esciencecenter.nl + orcid: https://orcid.org/0000-0001-9005-8940 + github: bouweandela + broetz_bjoern: name: Broetz, Bjoern institute: DLR, Germany - email: bjoern.broetz 'at' dlr.de - demo_le: + email: bjoern.broetz@dlr.de + orcid: + debeire_kevin: + name: Debeire, Kevin + institute: DLR, Germany + email: kevin.debeire@dlr.de + orcid: https://orcid.org/0000-0001-6006-8750 + demora_lee: name: de Mora, Lee institute: PML, UK - email: ledm 'at' pml.ac.uK - dros_ni: + email: ledm@pml.ac.uK + orcid: https://orcid.org/0000-0002-5080-3149 + drost_niels: name: Drost, Niels institute: NLeSC, Netherlands - email: n.drost 'at' esciencecenter.nl - kold_ni: + email: n.drost@esciencecenter.nl + orcid: https://orcid.org/0000-0001-9795-7981 + koldunov_nikolay: name: Koldunov, Nikolay institute: AWI, Germany - email: nikolay.koldunov 'at' awi.de - laue_ax: + email: nikolay.koldunov@awi.de + orcid: https://orcid.org/0000-0002-3365-8146 + lauer_axel: name: Lauer, Axel institute: DLR, Germany - email: axel.lauer 'at' dlr.de - muel_bn: + email: axel.lauer@dlr.de + orcid: https://orcid.org/0000-0002-9270-1044 + mueller_benjamin: name: Mueller, Benjamin institute: LMU, Germany - email: b.mueller 'at' iggf.geo.uni-muenchen.de - pred_va: + email: b.mueller@iggf.geo.uni-muenchen.de + orcid: + predoi_valeriu: name: Predoi, Valeriu institute: URead, UK - email: valeriu.predoi 'at' ncas.ac.uk - righ_ma: + email: valeriu.predoi@ncas.ac.uk + orcid: https://orcid.org/0000-0002-9729-6578 + github: valeriupredoi + righi_mattia: name: Righi, Mattia institute: DLR, Germany - email: mattia.righi 'at' dlr.de - schl_ma: + email: mattia.righi@dlr.de + orcid: https://orcid.org/0000-0003-3827-5950 + schlund_manuel: name: Schlund, Manuel institute: DLR, Germany - email: manuel.schlund 'at' dlr.de - vega_ja: + email: manuel.schlund@dlr.de + orcid: https://orcid.org/0000-0001-5251-0158 + github: schlunma + vegas-regidor_javier: name: Vegas-Regidor, Javier institute: BSC, Spain - email: javier.vegas 'at' bsc.es + email: javier.vegas@bsc.es + orcid: https://orcid.org/0000-0003-0096-4291 + zimmermann_klaus: + name: Zimmermann, Klaus + institute: SMHI, Sweden + email: klaus.zimmermann@smhi.se + orcid: https://orcid.org/0000-0003-3994-2057 + github: zklaus # Development team - anav_al: + abdollahi_banafsheh: + name: Abdollahi, Banafsheh + institute: Delft University of Technology, The Netherlands + orcid: https://orcid.org/0000-0002-8969-3821 + aerts_jerom: + name: Aerts, Jerom + institute: University of Delft, Netherlands + orcid: https://orcid.org/0000-0003-0157-4818 + alidoost_sarah: + name: Alidoost, Sarah + institute: NLeSC, Netherlands + orcid: https://orcid.org/0000-0001-8407-6472 + anav_alessandro: name: Anav, Alessandro institute: Univ. of Exeter, UK - email: a.anav 'at' exeter.ac.uk - andr_ol: + orcid: https://orcid.org/0000-0002-4217-7563 + andrews_oliver: name: Andrews, Oliver institute: Tyndall Centre, UK - email: oliverdandrews 'at' googlemail.com - arno_en: + orcid: https://orcid.org/0000-0002-1921-475X + arnone_enrico: name: Arnone, Enrico institute: ISAC-CNR, Torino, Italy - email: e.arnone 'at' isac.cnr.it - bell_om: + orcid: https://orcid.org/0000-0001-6740-5051 + bellprat_omar: name: Bellprat, Omar institute: BSC, Spain - email: omar.bellprat 'at' bsc.es - berg_pe: + orcid: https://orcid.org/0000-0001-6434-1793 + berg_peter: name: Berg, Peter institute: SMHI, Sweden - email: peter.berg 'at' smhi.se - bock_ls: + orcid: https://orcid.org/0000-0002-1469-2568 + bock_lisa: name: Bock, Lisa institute: DLR, Germany - email: lisa.bock 'at' dlr.de - bojo_dr: + orcid: https://orcid.org/0000-0001-7058-5938 + github: LisaBock + bodas-salcedo_alejandro: + name: Bodas-Salcedo, Alejandro + institute: MetOffice, UK + orcid: + bojovic_dragana: name: Bojovic, Dragana institute: BSC, Spain - email: dragana.bojovic 'at' bsc.es - cagn_ch: + orcid: https://orcid.org/0000-0001-7354-1885 + bonnet_pauline: + name: Bonnet, Pauline + institute: DLR, Germany + orcid: https://orcid.org/0000-0003-3780-0784 + github: Paulinebonnet111 + brunner_lukas: + name: Brunner, Lukas + institute: ETH Zurich, Switzerland + orcid: https://orcid.org/0000-0001-5760-4524 + cagnazzo_chiara: name: Cagnazzo, Chiara institute: CNR, Italy - email: chiara.cagnazzo 'at' cnr.it - caro_lo: + orcid: https://orcid.org/0000-0002-2054-0448 + camphuijsen_jaro: + name: Camphuijsen, Jaro + institute: NLeSC, Netherlands + orcid: https://orcid.org/0000-0002-8928-7831 + capistrano_vinicius: + name: Capistrano, Vinicius + institute: INPE, Brazil + orcid: https://orcid.org/0000-0001-8653-2312 + caron_louis-philippe: name: Caron, Louis-Philippe institute: BSC, Spain - email: louis-philippe.caron 'at' bsc.es - chen_ja: + orcid: https://orcid.org/0000-0001-5221-0147 + chen_jack: name: Chen, Jack institute: NCAR, USA - email: cchen 'at' ucar.edu - cion_ir: + orcid: + cionni_irene: name: Cionni, Irene institute: ENEA, Italy - email: irene.cionni 'at' enea.it - cort_ni: - name: Nicola Cortesi + orcid: https://orcid.org/0000-0002-0591-9193 + cortesi_nicola: + name: Cortesi, Nicola + institute: BSC, Spain + orcid: https://orcid.org/0000-0002-1442-9225 + cos_josep: + name: Cos, Josep institute: BSC, Spain - email: nicola.cortesi 'at' bsc.es - crez_ba: + orcid: https://orcid.org/0000-0002-3050-2306 + github: pepcos + crezee_bas: name: Crezee, Bas institute: ETH Zurich, Switzerland - email: bas.crezee 'at' env.ethz.ch - davi_ed: + orcid: https://orcid.org/0000-0002-1774-1126 + github: bascrezee + daniels_emma: + name: Daniels, Emma + institute: KNMI, Netherlands + orcid: + dalvi_mohit: + name: Dalvi, Mohit + institute: MetOffice, UK + orcid: https://orcid.org/0000-0003-1669-0811 + davin_edouardleopold: name: Davin, Edouard Leopold institute: ETH Zurich, Switzerland - email: edouard.davin 'at' env.ethz.ch - davi_pa: + orcid: https://orcid.org/0000-0003-3322-9330 + davini_paolo: name: Davini, Paolo institute: CNR-ISAC, Italy - email: p.davini 'at' isac.cnr.it - ehbr_ca: + orcid: https://orcid.org/0000-0003-3389-7849 + docquier_david: + name: Docquier, David + institute: UC Louvain, Belgium + orcid: 0000-0002-5720-4253 + ehbrecht_carsten: name: Ehbrecht, Carsten institute: DKRZ, Germany - email: ehbrecht 'at' dkrz.de - fran_fr: + orcid: + frank_franziska: name: Frank, Franziska institute: DLR, Germany - email: franziska.frank 'at' dlr.de - fuck_ne: - name: Neven Fuckar + orcid: https://orcid.org/0000-0002-2406-4936 + winterstein_franziska: + name: Winterstein, Franziska + institute: DLR, Germany + orcid: https://orcid.org/0000-0002-2406-4936 + fuckar_neven: + name: Fuckar, Neven institute: BSC, Spain - email: neven.fuckar 'at' bsc.es - gain_al: + orcid: + gainusa-bogdan_alina: name: Gainusa-Bogdan, Alina institute: France - gall_be: + orcid: https://orcid.org/0000-0001-6147-1883 + gallego-elvira_belen: name: Gallego-Elvira, Belen institute: NERC, UK - email: belgal 'at' nerc.ac.uk - gett_an: + orcid: https://orcid.org/0000-0002-2328-0354 + galytska_evgenia: + name: Galytska, Evgenia + institute: IUP, Bremen + orcid: https://orcid.org/0000-0001-6575-1559 + gettelman_andrew: name: Gettelman, Andrew institute: NCAR, USA - email: andrew 'at' ucar.edu - gott_kl: + orcid: https://orcid.org/0000-0002-8284-2599 + gier_bettina: + name: Gier, Bettina + institute: University of Bremen, Germany + orcid: https://orcid.org/0000-0002-2928-8664 + github: bettina-gier + gonzalez-reviriego_nube: + name: Gonzalez-Reviriego, Nube + institute: BSC, Spain + orcid: https://orcid.org/0000-0002-5919-6701 + gottschaldt_klaus-dirk: name: Gottschaldt, Klaus-Dirk institute: DLR, Germany - email: klaus-dirk.gottschaldt 'at' dlr.de - guem_vi: - name: Virginie Guemas + orcid: https://orcid.org/0000-0002-2046-6137 + guemas_virginie: + name: Guemas, Virginie institute: BSC, Spain - email: virginie.guemas 'at' bsc.es - hage_st: + orcid: https://orcid.org/0000-0002-6340-3558 + hagemann_stefan: name: Hagemann, Stefan institute: MPI-M, Germany - email: stefan.hagemann 'at' mpimet.mpg.de - hann_ul: + orcid: https://orcid.org/0000-0001-5444-2945 + hansson_ulf: name: Hansson, Ulf institute: SMHI, Sweden - hard_jo: - name: von Hardenberg, Jost - institute: ISAC-CNR, Torino, Italy - email: j.vonhardenberg 'at' isac.cnr.it - hass_bg: + orcid: + hardacre_catherine: + name: Hardacre, Catherine + institute: University of Canterbury, New Zealand + orcid: https://orcid.org/0000-0001-9093-4656 + hassler_birgit: name: Hassler, Birgit institute: DLR, Germany - email: birgit.hassler 'at' dlr.de - hemp_ni: + orcid: https://orcid.org/0000-0003-2724-709X + github: hb326 + hempelmann_nils: name: Hempelmann, Nils institute: IPSL, France - email: nils.hempelmann 'at' ipsl.jussieu.fr - hunt_al: + orcid: + heuer_helge: + name: Heuer, Helge + institute: DLR, Germany + email: helge.heuer@dlr.de + orcid: https://orcid.org/0000-0003-2411-7150 + hogan_emma: + name: Hogan, Emma + institute: MetOffice, UK + orcid: + hunter_alasdair: name: Hunter, Alasdair institute: BSC, Spain - email: alasdair.hunter 'at' bsc.es - hutj_ro: + orcid: https://orcid.org/0000-0001-8365-3709 + hutjes_ronald: name: Hutjes, Ronald institute: Univ. of Wageningen, NL - email: ronald.hutjes 'at' wur.nl - juck_ma: + orcid: + juckes_martin: name: Juckes, Martin institute: BADC, UK - email: martin.juckes 'at' stfc.ac.uk - kady_ni: + orcid: + kadygrov_nikolay: name: Kadygrov, Nikolay institute: IPSL, France - email: nikolay.kadygrov 'at' ipsl.jussieu.fr - kind_st: + orcid: + kalverla_peter: + name: Kalverla, Peter + institute: NLeSC, Netherlands + orcid: https://orcid.org/0000-0002-5025-7862 + kindermann_stephan: name: Kindermann, Stephan institute: DKRZ, Germany - email: kindermann 'at' dkrz.de - kras_jo: + orcid: https://orcid.org/0000-0001-9335-1093 + king_robert: + name: King, Robert + institute: MetOffice, UK + orcid: + koirala_sujan: + name: Koirala, Sujan + institute: MPI-BGC, Germany + orcid: https://orcid.org/0000-0001-5681-1986 + kraft_jeremy: + name: Kraft, Jeremy + institute: DLR, Germany + orcid: + github: jeremykraftdlr + krasting_john: name: Krasting, John institute: NOAA, USA - email: john.krasting 'at' noaa.gov - kune_dm: - name: Kunert, Dominik + orcid: https://orcid.org/0000-0002-4650-9844 + kuehbacher_birgit: + name: Kuehbacher, Birgit institute: DLR, Germany - email: dominik.kunert 'at' dlr.de - levi_ri: + email: birgit.kuehbacher@dlr.de + orcid: + lejeune_quentin: + name: Lejeune, Quentin + institute: Climate Analytics, Germany + orcid: https://orcid.org/0000-0001-9152-3197 + lembo_valerio: + name: Lembo, Valerio + institute: CEN, University of Hamburg, Germany + orcid: https://orcid.org/0000-0001-6085-5914 + levine_richard: name: Levine, Richard institute: MetOffice, UK - email: richard.levine 'at' metoffice.gov.uk - litt_bi: + orcid: https://orcid.org/0000-0003-1210-0415 + lillis_jon: + name: Lillis, Jon + institute: MetOffice, UK + orcid: + lindenlaub_lukas: + name: Lindenlaub, Lukas + institute: University of Bremen, Germany + orcid: https://orcid.org/0000-0001-6349-9118 + github: lukruh + little_bill: name: Little, Bill institute: MetOffice, UK - email: bill.little 'at' metoffice.gov.uk - lled_ll: - name: Lledó, Llorenç + orcid: + lledo_llorenc: + name: Lledo, Llorenc institute: BSC, Spain - email: llorenç.lledo 'at' bsc.es - lore_ru: + orcid: + loosveldt-tomas_saskia: + name: Loosveldt-Tomas, Saskia + institute: BSC, Spain + orcid: https://orcid.org/0000-0002-7530-7053 + github: sloosvel + lorenz_ruth: name: Lorenz, Ruth institute: ETH Zurich, Switzerland - email: ruth.lorenz 'at' env.ethz.ch - lova_to: + orcid: https://orcid.org/0000-0002-3986-1268 + github: ruthlorenz + lovato_tomas: name: Lovato, Tomas institute: CMCC, IT - email: tomas.lovato 'at' cmcc.it - maek_ja: + orcid: https://orcid.org/0000-0002-5188-6767 + makela_jarmo: name: Makela, Jarmo - malo_er: + institute: FMI, Finland + orcid: + malinina_elizaveta: + name: Malinina, Elizaveta + institute: CCCma, Canada + orcid: https://orcid.org/0000-0002-4102-2877 + github: malininae + maloney_eric: name: Maloney, Eric - institute: Colorado State University - email: emaloney 'at' atmos.colostate.edu - manu_ni: + institute: Colorado State University, USA + orcid: https://orcid.org/0000-0002-2660-2611 + manubens_nicolau: name: Manubens, Nicolau institute: BSC, Spain - email: nicolau.manubens 'at' bsc.es - maso_er: + orcid: https://orcid.org/0000-0002-0114-508X + mason_erik: name: Mason, Erik institute: NOAA, USA - email: erik.mason 'at' noaa.gov - mass_fr: + orcid: + massonnet_francois: name: Massonnet, Francois - institute: Spain - mart_gi: + institute: Universite Catholique de Louvain, Belgium + orcid: + martin_gill: name: Martin, Gill institute: MetOffice, UK - email: gill.martin 'at' metoffice.gov.uk - mavi_ir: + orcid: https://orcid.org/0000-0003-0851-6020 + mavilia_irene: name: Mavilia, Irene institute: ISAC-CNR, Bologna, Italy - email: i.mavilia 'at' isac.cnr.it - mell_fe: + orcid: https://orcid.org/0000-0002-4938-2906 + mello_felipe: name: Mello, Felipe institute: INPE, Brazil - mohi_da: - name: Dalvi, Mohit + orcid: https://orcid.org/0000-0002-8832-2869 + mohr_christianwilhelm: + name: Mohr, Christian Wilhelm + institute: Cicero, Norway + orcid: https://orcid.org/0000-0003-2656-1802 + moreno-chamarro_eduardo: + name: Moreno-Chamarro, Eduardo + institute: BSC, Spain + orcid: + github: emchamarro + munday_gregory: + name: Munday, Gregory institute: MetOffice, UK - email: mohit.dalvi 'at' metoffice.gov.uk - niku_gr: + orcid: https://orcid.org/0000-0003-4750-9923 + github: mo-gregmunday + nikulin_grigory: name: Nikulin, Grigory institute: SMHI, Sweden - email: grigory.nikulin 'at' smhi.se - nobr_pa: + orcid: https://orcid.org/0000-0002-4226-8713 + nobre_paulo: name: Nobre, Paulo institute: INPE, Brazil - gonz_nu: - name: González-Reviriego, Nube - institute: BSC, Spain - email: nube.gonzalez 'at' bsc.es - oliv_ar: + orcid: https://orcid.org/0000-0001-9061-4556 + oliveira_arildo: name: Oliveira, Arildo institute: INPE, Brazil - phil_ad: + pandde_amarjiit: + name: Pandde, Amarjiit + institute: Univ. of Arizona, USA + orcid: + pearce_francesca: + name: Pearce, Francesca + institute: MetOffice, UK + orcid: + perez-zanon_nuria: + name: Perez-Zanon, Nuria + institute: BSC, Spain + orcid: + pelupessy_inti: + name: Pelupessy, Inti + institute: Netherlands eScience Center + orcid: + phillips_adam: name: Phillips, Adam institute: NCAR, USA - email: asphilli 'at' ucar.edu - pugh_th: + orcid: https://orcid.org/0000-0003-4859-8585 + pugh_thomas: name: Pugh, Thomas institute: KIT, Germany - email: thomas.pugh 'at' kit.edu - read_si: + orcid: + read_simon: name: Read, Simon institute: Univ. of Reading, UK - email: s.read 'at' reading.ac.uk - ring_ma: + orcid: + ringer_mark: name: Ringer, Mark institute: MetOffice, UK - email: mark.ringer 'at' metoffice.gov.uk - rio_ca: + orcid: https://orcid.org/0000-0003-4014-2583 + rio_catherine: name: Rio, Catherine institute: IPSL, France - email: catherine.rio 'at' lmd.jussieu.fr - roeh_ro: + orcid: https://orcid.org/0000-0002-6590-7733 + roberts_charles: + name: Roberts, Charles + institute: University of Reading, UK + orcid: https://orcid.org/0000-0002-1147-8961 + roehrig_romain: name: Roehrig, Romain institute: MeteoFr, France - email: romain.roehrig 'at' meteo.fr - senf_da: - name: Senftleben, Daniel + rol_evert: + name: Rol, Evert + orcid: https://orcid.org/0000-0001-8357-4453 + russell_joellen: + name: Russell, Joellen + institute: Univ. of Arizona, USA + orcid: + sanchez-gomez_emilia: + name: Sanchez, Emilia + institute: CERFACS, France + orcid: + sandstad_marit: + name: Sandstad, Marit + institute: Cicero, Norway + orcid: + sarauer_ellen: + name: Sarauer, Ellen institute: DLR, Germany - email: daniel.senftleben 'at' dlr.de - serv_fe: + orcid: + github: ellensarauer + serva_federico: name: Serva, Federico institute: CNR, Italy - email: federico.serva 'at' artov.isac.cnr.it - somm_ph: + orcid: https://orcid.org/0000-0002-7118-0817 + github: fserva + smeets_stef: + name: Smeets, Stef + institute: NLeSC, Netherlands + orcid: https://orcid.org/0000-0002-5413-9038 + sommer_philipp: name: Sommer, Philipp institute: Univ. of Hamburg, Germany - email: 'at' - stac_to: + orcid: + stacke_tobias: name: Stacke, Tobias institute: MPI-M, Germany - email: tobias.stacke 'at' mpimet.mpg.de - ster_an: + orcid: https://orcid.org/0000-0003-4637-5337 + sterl_andreas: name: Sterl, Andreas institute: KNMI, Netherlands - email: sterl 'at' knmi.nl - swam_ra: + orcid: https://orcid.org/0000-0003-3457-0434 + swaminathan_ranjini: name: Swaminathan, Ranjini institute: University of Reading, UK - email: r.swaminathan 'at' reading.ac.uk - teic_ca: + orcid: https://orcid.org/0000-0001-5853-2673 + teichmann_claas: name: Teichmann, Claas - institute: CSC2, Germany - email: claas.teichmann 'at' hzg.de - torr_ve: - name: Veronica Torralba + institute: GERICS, Hamburg, Germany + orcid: + torralba_veronica: + name: Torralba, Veronica institute: BSC, Spain - email: veronica.torralba 'at' bsc.es - tsus_yo: + orcid: + tsushima_yoko: name: Tsushima, Yoko institute: MetOffice, UK - email: yoko.tsushima 'al' metoffice.gov.uk - vanu_be: + orcid: + vanulft_bert: name: van Ulft, Bert institute: KNMI, Netherlands - email: bert.van.ulft 'at' knmi.nl - vini_ca: - name: Vinicius, Capistrano - institute: INPE, Brazil - walt_je: + orcid: https://orcid.org/0000-0001-8653-2312 + verhoeven_stefan: + name: Verhoeven, Stefan + institute: NLeSC, Netherlands + orcid: https://orcid.org/0000-0002-5821-2060 + vonhardenberg_jost: + name: von Hardenberg, Jost + institute: ISAC-CNR, Torino, Italy + orcid: https://orcid.org/0000-0002-5312-8070 + walton_jeremy: name: Walton, Jeremy institute: MetOffice, UK - email: jeremy.walton 'at' metoffice.gov.uk - wang_sh: + orcid: https://orcid.org/0000-0001-7372-178X + wang_shiyu: name: Wang, Shiyu institute: SMHI, Sweden - email: shiyu.wang 'at' smhi.se - wenz_sa: + orcid: + weigel_katja: + name: Weigel, Katja + institute: University of Bremen and DLR, Germany + orcid: https://orcid.org/0000-0001-6133-7801 + github: katjaweigel + wenzel_sabrina: name: Wenzel, Sabrina institute: DLR, Germany - email: sabrina.wenzel 'at' dlr.de - will_ke: + orcid: https://orcid.org/0000-0001-5343-2446 + willen_ulrika: + name: Willén, Ulrika + institute: SMHI, Sweden + orcid: + zechlau_sabrina: + name: Zechlau, Sabrina + institute: DLR, Germany + orcid: https://orcid.org/0000-0001-5343-2446 + williams_keith: name: Williams, Keith institute: MetOffice, UK - email: keith.williams 'at' metoffice.gov.uk + orcid: # Viewers (not active developers) - bala_ve: + balaji_venkatramani: name: Balaji, Venkatramani institute: GFDL, USA - email: balaji 'at' princeton.edu - bunz_fe: + orcid: + bunzel_felix: name: Bunzel, Felix institute: MPI-M, Germany - email: felix.bunzel 'at' mpimet.mpg.de - char_an: + orcid: + charlton-perez_andrew: name: Charlton-Perez, Andrew institute: Univ. of Reading, UK - email: a.j.charlton-perez 'at' reading.ac.uk - cort_su: + orcid: + corti_susanna: name: Corti, Susanna institute: ISAC-CNR, Torino, Italy - email: s.corti 'at' isac.cnr.it - denn_jo: + orcid: + dennis_john: name: Dennis, John institute: NCAR, USA - email: dennis 'at' ucar.edu - graf_po: + orcid: + graf_phoebe: name: Graf, Phoebe institute: DLR, Germany - email: phoebe.graf 'at' dlr.de - hegg_mi: + orcid: + hegglin_michaela: name: Hegglin, Michaela institute: Univ. of Reading, UK - email: m.i.hegglin 'at' reading.ac.uk - hend_jo: + orcid: + hendricks_johannes: name: Hendricks, Johannes institute: DLR, Germany - email: johannes.hendricks 'at' dlr.de - john_ja: + orcid: + john_jasmin: name: John, Jasmin institute: NOAA, USA - email: jasmin.john 'at' noaa.gov - jone_co: + orcid: + jones_colin: name: Jones, Colin institute: MetOffice, UK - email: colin.jones 'at' metoffice.gov.uk - kais_ch: + orcid: + kaiser_christopher: name: Kaiser, Christopher institute: DLR, Germany - email: christopher.kaiser 'at' dlr.de - karp_al: + orcid: + karmouche_soufiane: + name: Soufiane Karmouche + institute: University of Bremen, Germany + orcid: + github: soufianekar + karpechko_alexey: name: Karpechko, Alexey, institute: FMI, Finland - email: Alexey.Karpechko 'at' fmi.fi - lawr_br: + orcid: + lawrence_bryan: name: Lawrence, Bryan institute: STFC, UK - email: Bryan.Lawrence 'at' stfc.ac.uk - mick_sh: + orcid: + mickelson_sheri: name: Mickelson, Sheri institute: NCAR, USA - email: mickelso 'at' ucar.edu - ocon_fi: + orcid: + oconnor_fiona: name: OConnor, Fiona institute: MetOffice, UK - email: fiona.oconnor 'at' metoffice.gov.uk - radh_ap: + orcid: + radhakrishnan_aparna: name: Radhakrishnan, Aparna institute: GFDL, USA - email: aparna.radhakrishnan 'at' noaa.gov - sell_al: + orcid: + sellar_alistair: name: Sellar, Alistair institute: MetOffice, UK - email: alistair.sellar 'at' metoffice.gov.uk - wyse_kl: + orcid: 0000-0002-2955-7254 + wyser_klaus: name: Wyser, Klaus institute: SMHI, Sweden - email: klaus.wyser 'at' smhi.se + orcid: # Former developers - brae_me: + braeu_melanie: name: Braeu, Melanie institute: DLR, Germany - email: braeu.melanie 'at' physik.uni-muenchen.de - dusc_ch: + orcid: + duscha_christiane: name: Duscha, Christiane institute: DLR, Germany - email: christiane.duscha 'at' dlr.de - enri_cl: + orcid: + enright_clare: name: Enright, Clare institute: UEA, UK - email: c.enright 'at' uea.ac.uk - halu_al: + orcid: + evaldsson_martin: + name: Evaldsson, Martin + institute: SMHI, Sweden + orcid: + haluszczynski_alexander: name: Haluszczynski, Alexander institute: DLR, Germany - email: alexander.haluszczynski 'at' dlr.de - herm_mi: + orcid: + herman_michael: name: Herman, Michael institute: New Mexico Tech, USA - email: mherman 'at' nmt.edu - hueb_mi: + orcid: + huebner_michael: name: Huebner, Michael institute: DLR, Germany - email: michael.huebner 'at' dlr.de - john_ma: + orcid: + johnston_marston: name: Johnston, Marston institute: SMHI, Sweden - email: shejo284 'at' gmail.com - klin_ca: + orcid: + klinger_carolin: name: Klinger, Carolin institute: DLR, Germany - email: carolin.klinger 'at' physik.uni-muenchen.de - kola_mi: + orcid: + kolax_michael: name: Kolax, Michael - loew_al: + institute: SMHI, Sweden + orcid: + kunert_dominik: + name: Kunert, Dominik + institute: DLR, Germany + orcid: + loew_alexander: name: Loew, Alexander institute: LMU, Germany - neal_ri: + orcid: + neale_richard: name: Neale, Richard institute: NCAR, US - email: rneale 'at' ucar.edu - orlo_bo: + orcid: + orlowsky_boris: name: Orlowsky, Boris institute: ETH, Switzerland - email: boris.orlowsky 'at' env.ethz.ch - pasc_st: + orcid: + pascoe_stephen: name: Pascoe, Stephen institute: STFC, UK - email: stephen.pascoe 'at' stfc.ac.uk - pere_nu: - name: Perez_Zanon, Nuria - institute: BSC, Spain - email: nuria.perez 'at' bsc.es - stev_ma: + orcid: + reader_cathy: + name: Reader, Cathy + institute: + orcid: + github: mcreader97 + rumbold_heather: + name: Heather, Rumbold + institute: Met Office, UK + orcid: + senftleben_daniel: + name: Senftleben, Daniel + institute: DLR, Germany + orcid: https://orcid.org/0000-0002-3903-3841 + stevens_mark: name: Stevens, Mark institute: NCAR, US - email: stevens 'at' ucar.edu + orcid: # Former viewers (not active viewers) - butc_ne: + adeniyi_kemisola: + name: Adeniyi, Kemisola + institute: University of Bremen, Germany + orcid: + butchart_neal: name: Butchart, Neal institute: MetOffice, UK - email: neal.butchart 'at' metoffice.gov.uk - hass_da: + orcid: + hassell_david: name: Hassell, David institute: Univ. of Reading, UK - email: d.c.hassell 'at' reading.ac.uk - ivan_de: + orcid: + ivanova_detelina: name: Ivanova, Detelina institute: NERSC, Norway - email: detelina.ivanova 'at' nersc.no - mois_au: + orcid: + moise_aurel: name: Moise, Aurel institute: BOM, Australia - email: a.moise 'at' bom.gov.au - pend_di: + orcid: + pendlebury_diane: name: Pendlebury, Diane institute: Univ. of Toronto, Canada - email: diane 'at' atmosp.physics.utoronto.ca - step_da: + orcid: + stepanova_daria: name: Stepanova, Daria institute: FMI, Finland - email: daria.stepanova 'at' fmi.fi - tilm_si: + orcid: + tilmes_simone: name: Tilmes, Simone institute: NCAR, US - email: tilmes 'at' ucar.edu + orcid: + # If no maintainer is available for a recipe, the following entry is used + unmaintained: + name: No maintainer available + institute: Unseen University + orcid: +# Note: the reference section has been replaced by the folder esmvaltool/references that contains bibtex files. +# How to add a bibtex file to esmvaltool/references: +# - make a bibtex file for a reference entry. +# There are some online tools to convert a doi to bibtex format like https://doi2bib.org/ +# - rename the file to the tag used in the recipe or diagnostic. +# - add the file to the folder esmvaltool/references. -references: - acknow_author: "Please acknowledge the author(s)." - contact_authors: "Please contact the author(s) to discuss acknowledgment or co-authorship." - acknow_project: "Please acknowledge the project(s)." - alexander: "Alexander L.V., et al., J. Geophys. Res., 111, D05109, doi:10.1029/2005JD006290" - anav13jclim: "Anav et al., J. Clim., 26, 6801-6843, doi:10.1175/JCLI-D-12-00417.1, 2013." - andrews12grl: "Andrews et al., Geophys. Res. Lett., 39, L09712, doi:10.1029/2012GL051607, 2012." - antonov10usgov: "Antonov, J. I. et al., World Ocean Atlas 2009, Volume 2: Salinity. S. Levitus, Ed. NOAA Atlas NESDIS 69, U.S. Government Printing Office, Washington, D.C., 184 pp., 2010." - aquila11gmd: "Aquila et al., Geosci. Model Dev. 4, 325-355, doi:10.5194/gmd-4-325-2011, 2011." - bakker14essd: "Bakker, D. C. E. et al., Earth Syst. Sci. Data, 6, 69-90, doi:10.5194/essd-6-69-2014, 2014." - baldwin09qjrms: "Baldwin, D. P. et al., Q. J. R. Meteorol. Soc., 135, 1661-1672, doi:10.1002/qj.479, 2009" - bianchi12gbc: "Bianchi, D. et al., Global Biogeochem. Cy., 26, GB2009, doi:10.1029/2011GB004209, 2012." - cionni11acp: "Cionni et al., Atmos. Chem. Phys., 11, 11267-11292, doi:10.5194/acp-11-11267-2011, 2011." - clivar09jclim: "CLIVAR Madden-Julian Oscillation Working Group, J. Climate, 22, 3006-3030, doi:10.1175/2008JCLI2731.1, 2009." - collins13ipcc: "Collins, M. et al., Long-term climate change: Projections, Commitments, and Irreversibility, in: Climate Change 2013: the Physical Science Basis, contribution of Working Group I to the Fifth Assessment Report of the Intergovernmental Panel on Climate Change, edited by: Stocker, T. F., Qin, D., Plattner, G.-K., Tignor, M., Allen, S. K., Boschung, J., Nauels, A., Xia, Y., Bex, V., and Midgley, P. M., Cambridge University Press, Cambridge, UK and New York, NY, USA (2013)." - corti99nat: "Corti, S. et al. Nature 398, 799-801, doi:10.1038/19745" - cox18nature: "Cox, P. M. et al., Nature, 553, 319-322, doi:10.1038/nature25450, 2018." - davini12jclim: "Davini P., C. Cagnazzo, S. Gualdi, and A. Navarra. J. Climate, 25, 6496-6509, doi: 10.1175/JCLI-D-12-00032.1, 2012" - davini18: "Davini, P. MiLES - Mid Latitude Evaluation System. Zenodo. doi:10.5281/zenodo.1237837, 2018" - demora2018gmd: "de Mora et al., Geosci. Model Dev., 11, 4215-4240, doi:10.5194/gmd-11-4215-2018, 2018." - dong08grl: "Dong, S. et al., J. Geophys. Res., 113, C06013, doi:10.1029/2006JC004051, 2008." - donofrio14jh: "D'Onofrio et al., J of Hydrometeorology 15, 830-843, 2014." - duemenil00mpimr: "Duemenil Gates et al., Observed historical discharge data from major rivers for climate model validation. Max Planck Institute for Meteorology Report 307, Hamburg, Germany, 2000." - emmons00jgr: "Emmons et al., J. Geophys. Res., 105, D16, 20497-20538, 2000." - eyring06jgr: "Eyring et al., J. Geophys. Res., 111, D22308, doi:10.1029/2006JD007327, 2006." - eyring13jgr: "Eyring et al., J. Geophys. Res., 118, 5029-5060, doi:10.1002/jgrd.50316, 2013." - flato13ipcc: "Flato, G. et al., Evaluation of climate models, in: Climate Change 2013: the Physical Science Basis, 2013." - fuckar: "Fuckar et al., Clima Dynam, 47, 5-6, 1527-1543, doi:10.1007/s00382-015-2917-2, 2016." - gen14jclim: "Gen, L. et al., J. Climate, 27, 1765-1780, doi:10.1175/JCLI-D-13-00337.1, 2014." - georgievski18tac: "Georgievski, G. & Hagemann, S. Theor Appl Climatol (2018). https://doi.org/10.1007/s00704-018-2675-2" - gleckler08jgr: "Gleckler et al., J. Geophys. Res., 113, D06104, doi:10.1029/2007JD008972, 2008." - goswami99qjrms: "Goswami, B., V. Krishnamurthy, and H. Annamalai, Q. J. R. Meteorol. Soc., 125, 611-633, doi:10.1002/qj.49712555412, 1999." - hagemann13james: "Hagemann et al., J. Adv. Model. Earth Syst., 5, doi:10.1029/2012MS000173, 2013." - jones15james: "Jones et al., J. Adv. Model. Earth Syst., 7, 1554-1575, doi:10.1002/2014MS000416, 2015." - kerry06jclim: "Kerry H. et al, J. Climate, 19, 3681-3703, doi:10.1175/JCLI3814.1, 2006." - kim09jclim: "Kim, D. et al., J. Climate, 22, 6413-6436, doi:10.1175/2009JCLI3063.1, 2009." - kim12grl: "Kim and Yu, Geophys. Res. Lett., 39, L11704, doi:10.1029/2012GL052006, 2012." - key04gbc: "Key, R. M. et al., Global Biogeochem. Cy., 18, GB4031, doi:10.109/2004GB002247, 2004." - lauer05acp: "Lauer et al., Atmos. Chem. Phys., 5, 3251-3276, doi:10.5194/acp-5-3251-2005, 2005." - lauer13jclim: "Lauer and Hamilton, J. Climate, 26, 3823-3845, doi:10.1175/JCLI-D-12-00451.1, 2013." - lauer17rse: "Lauer et al., Remote Sens. Environ., 203, 9-39, doi:10.1016/j.rse.2017.01.007, 2017." - li14jclim: "Li and Xie, J. Climate, 27, 1765-1780, doi:10.1175/JCLI-D-13-00337.1, 2014." - lin08jclim: "Lin, J-L. et al., J. Climate, 21, 4541-4567, doi: 10.1175/2008JCLI1816.1, 2008." - lloyd-hughes02jclim: "Lloyd-Hughes, B. and Saunders, M. A., Int. J. Climatol., 22, 1571-1592, doi:10.1002/joc.846, 2002." - locarini10usgov: "Locarnini, R. A. et al., World Ocean Atlas 2009, Volume 1: Temperature. S. Levitus, Ed. NOAA Atlas NESDIS 68, U.S. Government Printing Office, Washington, D.C., 184 pp.,2010." - mehran14jgr: "Mehran, A. et al., J. Geophys. Res., 119, 4, 1695-1707, doi: 10.1002/2013JD021152, 2014." - manubens: "Manubens, N., et al., ENVIRON MODELL SOFTW 103, 29-42. doi:10.1016/j.envsoft.2018.01.018" - mckee93: "McKee, T. B. and Doesken, N. J. and Kleist, J. In Proceedings of the 8th Conference on Applied Climatology, 17(22), 179-183, Boston, MA: American Meteorological Society, 1993." - mueller14grl: "Mueller, B. and Seneviratne, S. I. Geophys. Res. Lett., 41, 128-134, doi:10.1002/2013GL058055, 2014." - mueller13hess: "Mueller, B. et al., Hydrol. Earth Syst. Sci., 17, 3707-3720, doi:10.5194/hess-17-3707-2013, 2013." - phillips14eos: "Phillips, A. S. et al., EOS T. Am. Geophys. Un., 95, 453-455, 2014." - rebora06jhm: "Rebora et. al., JHM 7, 724, 2006." - righi13acp: "Righi et al., Atmos. Chem. Phys., 13, 9939-9970, doi:10.5194/acp-13-9939-2013, 2013." - righi15gmd: "Righi et al., Geosci. Model Dev., 8, 733-768 doi:10.5194/gmd-8-733-2015, 2015." - roedenbeck13os: "Roedenbeck, C. et al., Ocean Sci., 9, 193-216, doi:10.5194/os-9-193-2013, 2013." - roehrig13jclim: "Roehrig, R. et al., J. Climate, 26, 6471-6505, doi:10.1175/JCLI-D-12-00505.1, 2013." - sperber12asl: "Sperber and Kim, Atmos. Sci. Lett., 13, 3, 187-193, doi:10.1002/asl.378, 2012." - straus07jcli: "Straus, D.M., S. Corti, and F. Molteni. J. Climate, 20, 2251-2272, doi:10.1175/JCLI4070.1, 2007" - stroeve07grl: "Stroeve, J. et al., Geophys. Res. Lett., 34, L09501, doi:10.1029/2007GL029703, 2007." - tibaldi90tel: "Tibaldi S. and Molteni F. Tellus A 42(3): 343-365, doi:10.1034/j.1600-0870.1990.t01-2-00003.x, 1990." - taylor12: "Taylor et al., Nature, 489, 423-426, doi:10.1038/nature11377, 2012." - takahashi14marchem: "Takahashi et al., Mar. Chem., 164, 95-125, doi:10.1016/j.marchem.2014.06.004, 2014." - vicente10jclim: "Vicente-Serrano, S. M. and Beguería, S. and López-Moreno, J. I., Journal of climate, 23(7), 1696-1718, 10.1175/2009JCLI2909.1, 2010" - wang99bams: "Wang, B. and Z. Fan, Bull. Amer. Meteor. Soc., 80, 629-638, doi:10.1175/1520-0477(1999)080<0629:COSASM>2.0.CO;2, 1999." - wang11climdyn: "Wang, B. et al., Clim. Dyn., 39, 1123-1135, doi:10.1007/s00382-011-1266-z, 2011." - webster92qjrms: "Webster, P. J. and Yang, S., Q.J.R. Meteorol. Soc., 118: 877-926. doi:10.1002/qj.49711850705, 1992." - weedon14wrr: "Weedon, G. P. et al., Water Resour. Res., 50, 7505-7514, doi:10.1002/2014WR015638, 2014." - weigel: "Weigel, A P., et al., Q. J. Royal Meteorol. Soc. 134, 630, 241-260. doi:10.1002/qj.210" - wenzel14jgr: "Wenzel et al., J. Geophys. Res. Biogeosci., 119(5), doi:2013JG002591, 2014." - williams09climdyn: "Williams and Webb, Clim. Dynam., 33, 141-157, doi:10.1007/s00382-008-0443-1, 2009." - # Observations - aura-tes: "Beer, R., IEEE Trans. Geosci. Rem. Sens., doi:10.1109/TGRS.2005.863716, 2006." - ceres-syn1deg: "Wielicki et al., Bull. Amer. Meteor. Soc., doi: 10.1175/1520-0477(1996)077<0853:CATERE>2.0.CO;2, 1996." - era-interim: "Dee, D. P. et al., Q. J. Roy. Meteor. Soc., doi:10.1002/qj.828, 2011." - esacci-aerosol: "Popp et al., ESA Aerosol Climate Change Initiative (ESA Aerosol_cci) data: AOD v4.21 via Centre for Environmental Data Analysis, 2016." - esacci-cloud: "Stengel et al., Earth Syst. Sci. Data, doi:10.5194/essd-9-881-2017, 2017." - esacci-fire: "Not available." - esacci-landcover: "Defourny et al.. ESA Land Cover Climate Change Initiative (ESA LC_cci) data, 2015." - esacci-ozone: "Loyola et al., Int. J. Remote Sens. doi:10.1080/01431160902825016, 2009." - esacci-soilmoisture: "Not available." - esacci-sst: "Merchant et al., Geosci. Data J., doi:10.1002/gdj3.20, 2014." - hadisst: "Rayner et al., J. Geophys. Res., doi:10.1029/2002JD002670, 2013." - modis: "Not available." - ncep: "Kalnay et al., B. Am. Meteorol. Soc., doi:10.1175/1520-0477(1996)077<0437:TNYRP>2.0.CO;2, 1996." - niwa-bs: "Bodeker et al., Atmos. Chem. Phys., doi:10.5194/acp-5-2603-2005, 2005." - patmos-x: "Heidinger et al., NOAA National Centers for Environmental Information, doi:10.7289/V5348HCK, last access: 10 February 2019." - woa: "Locarnini et al., World Ocean Atlas 2013, Vol. 1: Temperature, 2013." - projects: + 4c: EU H2020 project 4C + applicate: EU Horizon 2020 Advanced prediction in polar regions and beyond + c3s-magic: Copernicus Climate Change Service 34a Lot 2 (MAGIC) project climval: BMBF MiKlip Project ClimVal cmip6dicad: BMBF CMIP6 Project Germany cmug: ESA CMUG crescendo: EU H2020 project CRESCENDO - dlrveu: DLR project VEU dlrveu2: DLR project VEU2 + dlrveu: DLR project VEU embrace: EU FP7 project EMBRACE + esm2025: EU H2020 project ESM2025 - Earth system models for the future esmval: DLR project ESMVal + eucp: EU H2020 European Climate prediction + eval4cmip: DLR and University of Bremen project funded by the Initiative and Networking Fund of the Helmholtz Society + ewatercycle: eWaterCycle project + ipcc_ar6: IPCC AR6 WG1 contributions + isenes3: EU H2020 Infrastructure for the European Network for Earth System Modelling - Phase 3 + primavera: EU H2020 project PRIMAVERA qa4ecv: QA4ECV - c3s-magic: Copernicus Climate Change Service 34a Lot 2 (MAGIC) project + russell_project: US Clivar, Ocean Carbon Biogeochemistry, ESGF, NOAA, NSF, NASA, US Antarctic program + trr181: DFG Project TRR-181, Energy transfers in Atmosphere and Ocean ukesm: UKESM, UK Earth System Model project (NERC) + usmile: ERC Synergy Grant USMILE + realms: aerosol: aerosol @@ -631,32 +819,51 @@ themes: phys: physics seaIce: sea ice varmodes: modes of variability + ML: machine learning domains: + eq: equatorial et: extra tropics global: global - midlat: mid-latittudes + midlat: mid-latitudes nh: northern hemisphere + nhext: northern extra tropics + nhmidlat: northern mid-latitudes + nhpolar: northern polar + nhtrop: northern tropics polar: polar reg: regional sh: southern hemisphere + shext: southern extra tropics + shmidlat: southern mid-latitudes + shpolar: southern polar + shtrop: southern tropics trop: tropics + user: user-defined (see preprocessor settings) plot_types: errorbar: error bar plot bar: bar chart + map: world map + metrics: Autoassess metrics + circle: different overlapping circles diurn: diurnal cycle geo: geographical distribution + histogram: histogram portrait: portrait diagram polar: polar-stereographic plot scatter: scatter plot seas: seasonal cycle + sect: meridional section size: size-distribution vert: vertical profile taylor: taylor diagram times: time series zonal: zonal mean pro: profile # (any other kind of line chart) + box: boxplot + line: line plot + probability: probability distribution other: other plot types statistics: @@ -675,4 +882,8 @@ statistics: perc: percentiles median: median detrend: detrend + smpi: single metric performance index statistics + mder: multiple diagnostic ensemble regression + mvi: model variability index + pdf: probability density function (PDF) other: other statistics diff --git a/esmvaltool/config-user.yml b/esmvaltool/config-user.yml deleted file mode 100644 index 55968d5ce2..0000000000 --- a/esmvaltool/config-user.yml +++ /dev/null @@ -1,49 +0,0 @@ -############################################################################### -# User's configuration file for the ESMValTool -############################################################################### ---- - -# Diagnostics create plots? [true]/false -write_plots: true -# Diagnositcs write NetCDF files? [true]/false -write_netcdf: true -# Set the console log level debug, [info], warning, error -log_level: info -# verbosity is deprecated and will be removed in the future -# verbosity: 1 -# Exit on warning? true/[false] -exit_on_warning: false -# Plot file format? [ps]/pdf/png/eps/epsi -output_file_type: pdf -# Destination directory -output_dir: ./esmvaltool_output -# Auxiliary data directory (used for some additional datasets) -auxiliary_data_dir: ./auxiliary_data -# Use netCDF compression true/[false] -compress_netcdf: false -# Save intermediary cubes in the preprocessor true/[false] -save_intermediary_cubes: false -# Remove the preproc dir if all fine -remove_preproc_dir: true -# Run at most this many tasks in parallel null/[1]/2/3/4/.. -# Set to null to use the number of available CPUs. -# Make sure your system has enough memory for the specified number of tasks. -max_parallel_tasks: 1 -# Path to custom config-developer file, to customise project configurations. -# See config-developer.yml for an example. Set to None to use the default -config_developer_file: null -# Get profiling information for diagnostics -# Only available for Python diagnostics -profile_diagnostic: false - -# Rootpaths to the data from different projects (lists are also possible) -rootpath: - CMIP5: [~/cmip5_inputpath1, ~/cmip5_inputpath2] - OBS: ~/obs_inputpath - RAWOBS: ~/rawobs_inputpath - default: ~/default_inputpath - -# Directory structure for input data: [default]/BADC/DKRZ/ETHZ/etc -# See config-developer.yml for definitions. -drs: - CMIP5: default diff --git a/esmvaltool/diag_scripts/aerosols/aero_utils.py b/esmvaltool/diag_scripts/aerosols/aero_utils.py new file mode 100644 index 0000000000..623a85f2aa --- /dev/null +++ b/esmvaltool/diag_scripts/aerosols/aero_utils.py @@ -0,0 +1,193 @@ +"""Part of the ESMValTool Aerosol diagnostics. + +This module contains utility functions commonly used by aerosol +assessment routines. +""" + +import iris +import numpy as np + + +class AeroAnsError(Exception): + + """Exception class for errors raised when model data is checked in the + extract_pt module. + """ + + +def add_bounds(cube): + """Add bounds to a cube's latitude and longitude coordinates. + + Parameters + ---------- + cube : Iris cube + Iris cube with latitude and longitude coordinates. + + Returns + ------- + cube : Iris cube. + Iris cube with bounds added to the latitude and longitude coordinates. + """ + + if not cube.coord('latitude').has_bounds(): + cube.coord('latitude').guess_bounds() + if not cube.coord('longitude').has_bounds(): + cube.coord('longitude').guess_bounds() + + return cube + + +def extract_pt(icube, pt_lat, pt_lon, height=None, level=None, nearest=False): + """Extracts given location(s) (3-D) from a cube. + + Method + ------ + Uses Iris module Analysis.Interpolate to extract values, + initially based on horizontal coordinates, and then based on + height, if specified. + + If height ('altitude') is requested, checks if cube heights + include orography, i.e. HybridHeights have been derived. + + Parameters + ---------- + icube : Iris cube + pt_lat, pt_lon : Float or list/array of floats. Latitude and longitude + coordinates of desired points. + args: + height : Float or list/array of floats. Altitude (above geoid) of + point. Initialized to None. + level : Integer . Model level or pseudo level or tile number. + Initialized to None, meaning that all available levels in + the cube are used. + nearest : Boolean. Specify whether to use 'nearest neighbour', instead + of 'linear' method while extracting data. Default is False. + + Returns + ------- + data_out : List + List of single point values, corresponding to each point specified. + + Raises + ------ + AeroAnsError : If the number of latitude and longitude points are + mismatched. OR if both and level and height are passed as args. + OR if the cube contains a time coordinate. OR if a pseudo level + coordinate is requested, but not present in the cube. OR if the numbers + of latitude/longitude and height points are mismatched. OR if height + is requested but the cube does not contain an altitude coordinate. + """ + + # Check that input data is a (single) cube + if not isinstance(icube, iris.cube.Cube): + raise AeroAnsError('Extract_pt:First argument must be a single cube') + + # Check if the cube contains a time dimension, which is + # currently unsupported. + if icube.coords()[0].name() == 'time': + raise AeroAnsError( + 'Extract_pt:Cannot handle time dimension at present') + + # Check that equal number of lat/lon pairs are passed in point coordinates. + # Convert arguments to lists for easier processing if necessary. + pt_lat1 = [] + pt_lon1 = [] + + if not isinstance(pt_lat, list): + pt_lat1.append(pt_lat) + pt_lon1.append(pt_lon) + + else: + for n_lat in np.arange(len(pt_lat)): + pt_lat1.append(pt_lat[n_lat]) + pt_lon1.append(pt_lon[n_lat]) + + if len(pt_lat1) != len(pt_lon1): + raise AeroAnsError('Extract_pt:Mismatch in number of lat/long values') + + # Check that both level and height haven't been requested. + if level is not None and height is not None: + raise AeroAnsError('Extract_pt: Both Level and Height requested') + + # Check that the cube has a level coordinate if level has been requested. + if level is not None and not icube.coord( + 'model_level_number') and not icube.coord('pseudo_level'): + raise AeroAnsError('Extract_pt:Level requested, but not found in cube') + + # Check that the number of height points is equal to the number of + # lat/lon pairs. Convert the argument to a list for easier + # processing if necessary. + if height is not None: + pt_hgt = [] + +# if isinstance(height, list): +# pt_hgt.extend(height) +# else: +# pt_hgt.append(height) + pt_hgt.extend(height) if \ + isinstance(height, list) else \ + pt_hgt.append(height) + + if len(pt_lat1) != len(pt_hgt): + raise AeroAnsError( + 'Extract_pt:Mismatch in number of points for lat/long/height') + + # Check that heights have been merged with orography. + if not icube.coords('altitude'): + raise AeroAnsError( + 'Extract_pt:Height requested but input data does not contain \ + "Altitude" coordinate') + + # Store the min and max altitudes from cube data so that user + # cannot request points located below/ above that. + # Will extract =min/max if beyond limits. + hgt_min = icube.coord('altitude').points.min() + hgt_max = icube.coord('altitude').points.max() + + # ---------- Finished checks -- begin processing ------------------------- + + # If level specified, extract slice first + if level is not None: + + try: + icube = icube.extract( + iris.Constraint(model_level_number=level)) + + except Exception: + print('Model level number not available. Use pseudo level') + + else: + icube = icube.extract( + iris.Constraint(pseudo_level=level)) + + # Extract values for specified points lat/lon + # NOTE: Does not seem to handle multiple points if 3-D + data_out = [] + + # Set lat/lon coordinates for model grid cell interpolation + for n_lat1 in np.arange(len(pt_lat1)): + latlon_coords = [('latitude', pt_lat1[n_lat1]), + ('longitude', pt_lon1[n_lat1])] + + if nearest: + tcube = icube.interpolate(latlon_coords, iris.analysis.Nearest()) + else: + tcube = icube.interpolate(latlon_coords, iris.analysis.Linear()) + + # If height specified, interpolate to requested height + if height is not None: + + # Set vertical coordinates for model grid cell interpolation + point = max(pt_hgt[n_lat1], hgt_min) + point = min(pt_hgt[n_lat1], hgt_max) + hgt_coords = [('altitude', point)] + + if nearest: + tcube = tcube.interpolate(hgt_coords, iris.analysis.Nearest()) + else: + tcube = tcube.interpolate(hgt_coords, iris.analysis.Linear()) + + # Append processed data point + data_out.append(tcube.data) + + return data_out diff --git a/esmvaltool/diag_scripts/aerosols/aod_aeronet_assess.py b/esmvaltool/diag_scripts/aerosols/aod_aeronet_assess.py new file mode 100644 index 0000000000..3866e3c51a --- /dev/null +++ b/esmvaltool/diag_scripts/aerosols/aod_aeronet_assess.py @@ -0,0 +1,448 @@ +"""Implement the AOD climatology metric from ground-based + AeroNet observations. +""" +import logging +import os + +import iris +import iris.plot as iplt +import matplotlib.cm as mpl_cm +import matplotlib.lines as mlines +import matplotlib.pyplot as plt +import numpy as np +import scipy +from matplotlib import colors, gridspec +from numpy import ma + +from esmvaltool.diag_scripts.aerosols.aero_utils import add_bounds, extract_pt +from esmvaltool.diag_scripts.shared import group_metadata, run_diagnostic +from esmvaltool.diag_scripts.shared._base import get_plot_filename + +logger = logging.getLogger(os.path.basename(__file__)) +fontsizedict = {"title": 25, "axis": 20, "legend": 18, "ticklabel": 18} + + +def get_provenance_record(filenames): + """Return a provenance record describing the metric. + + Parameters + ---------- + filenames : List of strings + The filenames containing the data used to create the metric. + + Returns + ------- + dictionary + The provenance record describing the metric. + """ + record = { + "ancestors": filenames, + } + + return record + + +def plot_aod_mod_obs(md_data, obs_data, aeronet_obs_cube, plot_dict): + """Plot AOD contour overlaid with Aeronet climatology. + + Parameters + ---------- + md_data : Iris cube + Model AOD as a cube with latitude and longitude coordinates. + obs_data : List. + Observations of AOD from each AeroNET station. + aeronet_obs_cube : Iris cube. + Holds information about Aeronet measurement stations including + station names, station latitude and station longitude. + plot_dict : Dictionary. + Holds plotting settings. + """ + # Plot model data + cf_plot = iplt.contourf(md_data, + plot_dict["Levels"], + colors=plot_dict["Colours"], + extend="max") + + # Latitude and longitude of stations. + anet_aod_lats = aeronet_obs_cube.coord("latitude").points + anet_aod_lons = ((aeronet_obs_cube.coord("longitude").points + 180) % 360 - + 180) + + # Loop over stations + for istn, stn_data in enumerate(obs_data): + if ma.is_masked(stn_data): + continue + + # Find position of the observed AOD on the colorscale. + # np.searchsorted returns index at which inserting new value will + # maintain a sorted array. We use the color to the left of index. + cid = np.searchsorted(plot_dict["Levels"], stn_data) + cid = max(0, cid - 1) # filter out zero and max when seeking 'left' + cid = min(len(plot_dict["Colours"]) - 1, cid) + pcol = plot_dict["Colours"][cid] + + # Overlay contourf with observations + plt.plot( + anet_aod_lons[istn], + anet_aod_lats[istn], + color=pcol, + marker="o", + markeredgecolor="k", + markeredgewidth=2, + markersize=9, + ) + + # Decorate the plot + plt.title(plot_dict["Title"], size=24) + colbar = plt.colorbar(cf_plot, orientation="horizontal") + colbar.set_ticks(plot_dict["Levels"]) + colbar.set_ticklabels(plot_dict["tick_labels"]) + plt.gca().coastlines(color="#525252") + + # Statistics on plot + plt.figtext( + 0.12, + 0.27, + (f'''Global mean AOD={plot_dict["Mean_aod"]:.3f}; RMSE=''' + f'''{plot_dict["RMS_aod"]:.3f}; Stn mean: md=''' + f'''{plot_dict["Stn_mn_md"]:.3f}; obs=''' + f'''{plot_dict["Stn_mn_obs"]:.3f}'''), + size=16, + ) + + +def aod_analyse(model_data, aeronet_obs_cube, clim_seas, wavel): + """Evaluates AOD vs Aeronet, generates plots and returns evaluation + metrics. + + Parameters + ---------- + model_data : Iris Cube. + Contains model output of AOD with coordinates; time, latitude and + longitude. + aeronet_obs_cube : Iris Cube. + Contains information about Aeronet measurement stations including + station names, station latitude and station longitude. + clim_seas : List. + Strings to denote climate seasons ["DJF", "MAM", "JJA", "SON"] + wavel : String. + AOD wavelength, default = 440nm - translates to pseudo-level. + + Returns + ------- + figures : List. + Contains figure instances for the seasonal contour plots overlaid with + observations of AOD from AeroNET. + fig_scatter : Figure object. + The scatter plot comparing modelled and observed AOD at 440nm. + """ + # Convert wave length nm -> um + wv_mi = str(float(wavel) / 1000.0) + + # Get model run id + if "parent_source_id" in model_data.attributes: + model_id = model_data.attributes["parent_source_id"] + else: + model_id = "Multi-Model-Mean" + + # Add bounds for lat and lon if not present + model_data = add_bounds(model_data) + + # Co-locate model grid points with measurement sites --func from aero_utils + anet_aod_lats = aeronet_obs_cube.coord("latitude").points.tolist() + anet_aod_lons = aeronet_obs_cube.coord("longitude").points.tolist() + aod_at_anet = extract_pt(model_data, anet_aod_lats, anet_aod_lons) + + # Set up seasonal contour plots + figures = [] + + clevs = [0.0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 2.0] + clabs = [ + "0.0", "", "0.1", "", "0.2", "", "0.3", "", "0.4", "", "0.5", "2.0" + ] + cmapr = mpl_cm.get_cmap("brewer_Spectral_11") + cmap = colors.ListedColormap(cmapr(range(cmapr.N))[-1::-1]) + colours = cmap.colors + + # Set up the figure for scatter plotting + fig_scatter = plt.figure(figsize=(10, 10)) + gs_scatter = gridspec.GridSpec(ncols=1, nrows=1) + ax_scatter = fig_scatter.add_subplot(gs_scatter[0, 0]) + col_scatter = ["#081d58", "#41ab5d", "#fe9929", "#7f0000"] + leg_scatter = [] + + # Loop over seasons + for season in aeronet_obs_cube.slices_over("clim_season"): + + # Match Aeronet obs season with model season number + model_sn = [c.lower() for c in clim_seas + ].index(season.coord("clim_season").points[0]) + model_season = model_data[model_sn] + + logger.info('Analysing AOD for %s: %s', {model_id}, + {clim_seas[model_sn]}) + + # Generate statistics required - area-weighted mean + grid_areas = iris.analysis.cartography.area_weights(model_season) + global_mean = model_season.collapsed( + ["latitude", "longitude"], + iris.analysis.MEAN, + weights=grid_areas, + ) + + # Extract model and obs data for season number (model_sn) + seas_anet_obs = season.data + seas_anet_md = np.array([x[model_sn] for x in aod_at_anet]) + + # Match model data with valid obs data + valid_indices = ma.where(seas_anet_obs) + valid_obs = seas_anet_obs[valid_indices] + valid_md = seas_anet_md[valid_indices] + + # Model - obs statistics (diff, model mean and RMS, r2) + diff = valid_md - valid_obs + stn_mn_obs = np.mean(valid_obs) + stn_mn_md = np.mean(valid_md) + rms_aod = np.sqrt(np.mean(diff**2)) + linreg = scipy.stats.linregress(valid_obs, valid_md) + + # Plot scatter of co-located model and obs data + ax_scatter.scatter(valid_obs, valid_md, color=col_scatter[model_sn]) + + # Legend + label = f"{clim_seas[model_sn]} = {linreg.rvalue**2:.2f}" + leg_scatter.append( + mlines.Line2D( + [0], + [0], + marker="o", + color="w", + label=label, + markersize=15, + markerfacecolor=col_scatter[model_sn], + )) + + # Plot contours overlaid with obs for this run and season + fig_cf = plt.figure(figsize=(11, 8), dpi=300) + + n_stn = str(len(valid_obs)) + title = ("\nTotal Aerosol Optical Depth at " + wv_mi + " microns" + + "\n" + model_id + ", " + clim_seas[model_sn] + + ", N stations=" + n_stn) + + # Plot dictionary + plot_dict = { + "Mean_aod": global_mean.data, + "Stn_mn_obs": stn_mn_obs, + "Stn_mn_md": stn_mn_md, + "RMS_aod": rms_aod, + "Levels": clevs, + "Colours": colours, + "tick_labels": clabs, + "Title": title, + "Season": clim_seas[model_sn], + } + plot_aod_mod_obs(model_season, seas_anet_obs, aeronet_obs_cube, + plot_dict) + + figures.append(fig_cf) + + # Decorate the scatter plot + line = mlines.Line2D([0, 1], [0, 1], color="#696969") + transform = ax_scatter.transAxes + line.set_transform(transform) + ax_scatter.add_line(line) + + ax_scatter.set( + xlim=(0, 1), + xticks=np.linspace(0.0, 1.0, num=6), + ylim=(0, 1), + yticks=np.linspace(0.0, 1.0, num=6), + ) + ax_scatter.set_xlabel("AeroNET AOD", fontsize=fontsizedict["axis"]) + ax_scatter.set_ylabel(model_id + " AOD", fontsize=fontsizedict["axis"]) + + ax_scatter.tick_params(axis="both", + which="major", + labelsize=fontsizedict["ticklabel"]) + + ax_scatter.set_title( + "Model vs obs: Total Aerosol Optical Depth \n at " + wv_mi + + " microns", + fontsize=fontsizedict["title"], + ) + + ax_scatter.legend( + handles=leg_scatter, + loc="lower right", + title="Seasonal R2", + title_fontsize=fontsizedict["legend"], + fontsize=fontsizedict["legend"], + ) + + return figures, fig_scatter + + +def preprocess_aod_obs_dataset(obs_dataset): + """Calculate a multiannual seasonal mean AOD climatology. + + Observational AOD timeseries data from AeroNET are used to generate a + multiannual seasonal mean climatology for each AeroNET station. The + user sets thresholds (or uses the default settings) to specify the + amount of valid data required for the climatology. At this stage + ESMValTool preprocessors are unsuitable for pre-processing the AeroNET + AOD observations because of the bespoke nature and application of the + filtering thresholds. + + Parameters + ---------- + obs_dataset : ESMValTool dictionary. Holds meta data for the observational + AOD dataset. + + Returns + ------- + multiannual_seaonal_mean : Iris cube. Preprocessed observational + AOD climatology. + """ + obs_cube = iris.load_cube(obs_dataset[0]["filename"]) + + # Set up thresholds for generating the multi annual seasonal mean + min_days_per_mon = 1 + min_mon_per_seas = 3 + min_seas_per_year = 4 + min_seas_per_clim = 5 + + # Add the clim_season and season_year coordinates. + iris.coord_categorisation.add_year(obs_cube, 'time', name='year') + + iris.coord_categorisation.add_season(obs_cube, 'time', name='clim_season') + + iris.coord_categorisation.add_season_year(obs_cube, + 'time', + name='season_year') + + # Copy obs cube and mask all months with fewer + # "Number of days" than given threshold. + num_days_var = obs_cube.ancillary_variable("Number of days") + masked_months_obs_cube = obs_cube.copy(data=ma.masked_where( + num_days_var.data < min_days_per_mon, obs_cube.data)) + + # Aggregate (mean) by season. + # The number of unmasked months per season is counted, + # and where there are fewer unmasked months than the + # given threshold, the computed mean is masked. + annual_seasonal_mean = masked_months_obs_cube.aggregated_by( + ['clim_season', 'season_year'], + iris.analysis.MEAN, + ) + annual_seasonal_count = masked_months_obs_cube.aggregated_by( + ['clim_season', 'season_year'], + iris.analysis.COUNT, + function=lambda values: ~ma.getmask(values), + ) + annual_seasonal_mean.data = ma.masked_where( + annual_seasonal_count.data < min_mon_per_seas, + annual_seasonal_mean.data, + ) + + # Aggregate (mean) by multi-annual season. + # The number of unmasked seasons per multi-annual season + # is counted, and where there are fewer unmasked seasons + # than the given threshold, the computed multi-annual + # season is masked. + multi_annual_seasonal_mean = annual_seasonal_mean.aggregated_by( + 'clim_season', + iris.analysis.MEAN, + ) + clim_season_agg_count = annual_seasonal_mean.aggregated_by( + 'clim_season', + iris.analysis.COUNT, + function=lambda values: ~ma.getmask(values), + ) + multi_annual_seasonal_mean.data = ma.masked_where( + clim_season_agg_count.data < min_seas_per_clim, + multi_annual_seasonal_mean.data, + ) + year_agg_count = multi_annual_seasonal_mean.aggregated_by( + 'year', + iris.analysis.COUNT, + function=lambda values: ~ma.getmask(values), + ) + + counter = range(len( + multi_annual_seasonal_mean.coord('clim_season').points)) + for iseas in counter: + multi_annual_seasonal_mean.data[iseas, :] = ma.masked_where( + year_agg_count.data[0, :] < min_seas_per_year, + multi_annual_seasonal_mean.data[iseas, :], + ) + + return multi_annual_seasonal_mean + + +def main(config): + """Produce the AOD climatology metric from ground-based AeroNet + observations. + + Parameters + ---------- + wavel : String. + User defined. Default is "440". + config : dict + The ESMValTool configuration. + """ + input_data = config["input_data"] + datasets = group_metadata(input_data.values(), "dataset") + + # Default wavelength + wavel = "440" + + # Produce climatology for observational dataset + obs_dataset = datasets.pop(config["observational_dataset"]) + obs_cube = preprocess_aod_obs_dataset(obs_dataset) + + for model_dataset, group in datasets.items(): + # 'model_dataset' is the name of the model dataset. + # 'group' is a list of dictionaries containing metadata. + logger.info("Processing data for %s", model_dataset) + logger.info(group) + + for attributes in group: + logger.info(attributes["filename"]) + + input_file = attributes["filename"] + provenance_record = get_provenance_record(input_file) + logger.info(provenance_record) + cube = iris.load_cube(input_file) + + # Set up for analysis and plotting + seasons = ["DJF", "MAM", "JJA", "SON"] + + plot_file_prefix = (model_dataset + "_" + attributes["activity"] + + "_" + attributes["mip"] + "_" + + attributes["exp"] + "_" + + attributes["short_name"] + "_" + + str(attributes["start_year"]) + "_" + + str(attributes["end_year"]) + "_") + + # Analysis and plotting for model-obs comparison + figures, fig_scatter = aod_analyse(cube, + obs_cube, + seasons, + wavel=wavel) + + # Save the scatter plot + output_file = plot_file_prefix + "scatter" + output_path = get_plot_filename(output_file, config) + fig_scatter.savefig(output_path) + + # Save the contour plots + for ifig, seas_fig in enumerate(figures): + output_file = plot_file_prefix + seasons[ifig] + output_path = get_plot_filename(output_file, config) + seas_fig.savefig(output_path) + + +if __name__ == "__main__": + with run_diagnostic() as CONFIG: + main(CONFIG) diff --git a/esmvaltool/diag_scripts/arctic_ocean/arctic_ocean.py b/esmvaltool/diag_scripts/arctic_ocean/arctic_ocean.py new file mode 100644 index 0000000000..1cc71e2716 --- /dev/null +++ b/esmvaltool/diag_scripts/arctic_ocean/arctic_ocean.py @@ -0,0 +1,525 @@ +# -*- coding: utf-8 -*- +"""Script to calculate Arctic Ocean diagnostics. + +Description +----------- +The main focus of this diagnostics is evaluation of ocean components + of climate models in the Arctic Ocean, however most of the diagnostics + are implemented in a way that can be easily expanded to other parts + of the World Ocean. Most of the diagnostics aim at model comparison + to climatological data (PHC3), so we target historical CMIP simulations. + However scenario runs also can be analysed to have an impression + of how Arcti Ocean hydrography will chnage in the future. + +Author +------ +Nikolay Koldunov (MARUM/AWI, Germany) + +Project +------- +TRR181/APPLICATE + +Configuration options in recipe +------------------------------- +See documentation + +""" + +import itertools +import logging +import os +from collections import OrderedDict + +import cartopy.crs as ccrs +from matplotlib import cm +import numpy as np + +from esmvaltool.diag_scripts.arctic_ocean.getdata import (aw_core, hofm_data, + transect_data, + tsplot_data) +from esmvaltool.diag_scripts.arctic_ocean.plotting import ( + hofm_plot, plot2d_bias, plot2d_original_grid, plot_aw_core_stat, + plot_profile, transect_map, transect_plot, tsplot_plot) +from esmvaltool.diag_scripts.arctic_ocean.utils import ( + find_observations_name, get_clim_model_filenames, get_cmap, + get_fx_filenames, timmean) +from esmvaltool.diag_scripts.shared import run_diagnostic + + +logger = logging.getLogger(os.path.basename(__file__)) + + +def run_hofm_data(cfg): + """Extract data for Hovmoeller diagrams. + + Parameters + ---------- + cfg: dict + configuration dictionary ESMValTool format. + areacello_fx : dict + configuration dictionary with names of the areacello_fx + files associated to dataset names. + diagworkdir: str + path to the diagnostic work directory. + """ + logger.info("The `hofm_data` is True, going \ + to extract monthly values for `hofm_regions`") + + logger.info("`hofm_vars` are: %s", cfg['hofm_vars']) + # doing the loop for every variable + for hofm_var in cfg['hofm_vars']: + logger.info("Processing %s", hofm_var) + # get dictionary with model names as key and path to the + # preprocessed file as a value + model_filenames = get_clim_model_filenames(cfg, hofm_var) + model_filenames = OrderedDict( + sorted(model_filenames.items(), key=lambda t: t[0])) + # loop over regions and models + for mmodel, region in itertools.product(model_filenames, + cfg['hofm_regions']): + # actual extraction of the data for specific model and region + hofm_data(cfg, model_filenames, mmodel, hofm_var, region) + + +def hofm_plot_params(cfg, hofm_var, var_number, observations): + """Prepeare configuration for Hovmoeller plot.""" + + model_filenames = get_clim_model_filenames(cfg, hofm_var) + model_filenames = OrderedDict( + sorted(model_filenames.items(), key=lambda t: t[0])) + # remove "model" that contain observations, + # since there will be no monthly data + model_filenames = model_filenames.copy() + if observations: + del model_filenames[observations] + # set the color map if not default + if cfg['hofm_cmap']: + cmap = get_cmap(cfg['hofm_cmap'][var_number]) + else: + cmap = get_cmap('Spectral_r') + # set the number of columns in the output figure + # if defined + if cfg['hofm_ncol']: + ncols = cfg['hofm_ncol'] + else: + ncols = 3 + # get the levels for plots of this variable + vmin, vmax, sstep, roundlimit = cfg['hofm_limits'][var_number] + plot_params = {} + plot_params['variable'] = hofm_var + plot_params['model_filenames'] = model_filenames + plot_params['cmap'] = cmap + plot_params['ncols'] = ncols + plot_params['levels'] = np.round(np.linspace(vmin, vmax, sstep), + roundlimit) + plot_params['observations'] = observations + + return plot_params + + +def run_hofm_plot(cfg, observations): + """Plot Hovmoeller diagrams for each variable. + + Parameters + ---------- + cfg: dict + configuration dictionary ESMValTool format. + observations: str + name of the observation data set + """ + # loop over variables + for var_number, hofm_var in enumerate(cfg['hofm_vars']): + plot_params = hofm_plot_params(cfg, hofm_var, var_number, observations) + + # loop over models and regions + for region in cfg['hofm_regions']: + logger.info("Plotting Hovmoeller: for Region: %s, Variable %s", + region, hofm_var) + plot_params['region'] = region + hofm_plot(cfg, plot_params) + + +def run_mean(cfg, observations): + """Create time mean. + + Parameters + ---------- + cfg: dict + configuration dictionary ESMValTool format. + observations: str + name of the observation data set + """ + # loop over variables + for hofm_var in cfg['hofm_vars']: + model_filenames = get_clim_model_filenames( + cfg, + hofm_var, + ) + model_filenames = OrderedDict( + sorted(model_filenames.items(), key=lambda t: t[0])) + # loop over models + for model in model_filenames: + timmean(cfg, + model_filenames, + model, + hofm_var, + observations=observations) + + +def plot_profile_params(cfg, hofm_var, observations): + """Prepeare configuration for profile plot.""" + + model_filenames = get_clim_model_filenames(cfg, hofm_var) + model_filenames = OrderedDict( + sorted(model_filenames.items(), key=lambda t: t[0])) + + plot_params = {} + + plot_params['variable'] = hofm_var + plot_params['model_filenames'] = model_filenames + plot_params['cmap'] = cm.Set2 + plot_params['dpi'] = 100 + plot_params['observations'] = observations + + return plot_params + + +def run_profiles(cfg, observations): + """Plot average vertical profiles for regions. + + Parameters + ---------- + cfg: dict + configuration dictionary ESMValTool format. + observations: str + name of the observation data set + """ + # loop over variables + for hofm_var in cfg['hofm_vars']: + plot_params = plot_profile_params(cfg, hofm_var, observations) + # loop over regions + for region in cfg['hofm_regions']: + plot_params['region'] = region + plot_profile(cfg, plot_params) + + +def plot2d_params(cfg, plot2d_var, var_number): + """Prepeare configuration for plot2d.""" + + model_filenames = get_clim_model_filenames(cfg, plot2d_var) + model_filenames = OrderedDict( + sorted(model_filenames.items(), key=lambda t: t[0])) + # set the color map + if cfg['plot2d_cmap']: + cmap = get_cmap(cfg['plot2d_cmap'][var_number]) + else: + cmap = get_cmap('Spectral_r') + # set the number of columns in the plot + if cfg['plot2d_ncol']: + ncols = cfg['plot2d_ncol'] + else: + ncols = 4 + # create color limits for the plot + vmin, vmax, sstep, roundlimit = cfg['plot2d_limits'][var_number] + # loop over depths + plot_params = {} + plot_params['variable'] = plot2d_var + plot_params['model_filenames'] = model_filenames + plot_params['cmap'] = cmap + plot_params['ncols'] = ncols + plot_params['levels'] = np.round(np.linspace(vmin, vmax, sstep), + roundlimit) + plot_params['dpi'] = 100 + plot_params['explicit_depths'] = None + plot_params['projection'] = ccrs.NorthPolarStereo() + plot_params['bbox'] = (-180, 180, 60, 90) + + return plot_params + + +def run_plot2d(cfg): + """Plot 2d maps on original grid. + + Parameters + ---------- + cfg: dict + configuration dictionary ESMValTool format. + """ + # loop over variables + for var_number, plot2d_var in enumerate(cfg['plot2d_vars']): + + plot_params = plot2d_params(cfg, plot2d_var, var_number) + + for depth in cfg['plot2d_depths']: + plot_params['depth'] = depth + plot2d_original_grid(cfg, plot_params) + + +def plot2d_bias_params(cfg, plot2d_bias_var, var_number, observations): + """Prepeare configuration for plot2d bias.""" + + model_filenames = get_clim_model_filenames(cfg, plot2d_bias_var) + model_filenames = OrderedDict( + sorted(model_filenames.items(), key=lambda t: t[0])) + # setup the color map + if cfg['plot2d_bias_cmap']: + cmap = get_cmap(cfg['plot2d_bias_cmap'][var_number]) + else: + cmap = get_cmap('Spectral_r') + # setup the number of columns + if cfg['plot2d_bias_ncol']: + ncols = cfg['plot2d_bias_ncol'] + else: + ncols = 3 + # setup color limits + vmin, vmax, sstep, roundlimit = cfg['plot2d_bias_limits'][var_number] + + plot_params = {} + plot_params['variable'] = plot2d_bias_var + plot_params['model_filenames'] = model_filenames + plot_params['cmap'] = cmap + plot_params['ncols'] = ncols + plot_params['levels'] = np.round(np.linspace(vmin, vmax, sstep), + roundlimit) + plot_params['dpi'] = 100 + plot_params['observations'] = observations + plot_params['projection'] = ccrs.NorthPolarStereo() + plot_params['bbox'] = (-180, 180, 60, 90) + return plot_params + + +def run_plot2d_bias(cfg, observations): + """Plot model biases over depth. + + Parameters + ---------- + cfg: dict + configuration dictionary ESMValTool format. + observations: str + name of the observation data set + """ + # loop over variables + for var_number, plot2d_bias_var in enumerate(cfg['plot2d_bias_vars']): + + plot_params = plot2d_bias_params(cfg, plot2d_bias_var, var_number, + observations) + + # loop over depths + for depth in cfg['plot2d_bias_depths']: + plot_params['depth'] = depth + plot2d_bias(cfg, plot_params) + + +def transect_plot_params(cfg, trans_var, var_number): + """Prepeare configuration for transect plot.""" + + model_filenames = get_clim_model_filenames(cfg, trans_var) + model_filenames = OrderedDict( + sorted(model_filenames.items(), key=lambda t: t[0])) + # loop over regions + for mmodel, region in itertools.product(model_filenames, + cfg['transects_regions']): + # ploting a transect + transect_data(cfg, mmodel, trans_var, region) + # setup a color map + if cfg['transects_cmap']: + cmap = get_cmap(cfg['transects_cmap'][var_number]) + else: + cmap = get_cmap('Spectral_r') + # setup number of columns + if cfg['transects_ncol']: + ncols = cfg['transects_ncol'] + else: + ncols = 3 + # setup color limits + vmin, vmax, sstep, roundlimit = cfg['transects_limits'][var_number] + + plot_params = {} + plot_params['variable'] = trans_var + plot_params['model_filenames'] = model_filenames + plot_params['cmap'] = cmap + plot_params['ncols'] = ncols + plot_params['levels'] = np.round(np.linspace(vmin, vmax, sstep), + roundlimit) + plot_params['dpi'] = 100 + plot_params['projection'] = ccrs.NorthPolarStereo() + plot_params['bbox'] = (-180, 180, 60, 90) + + return plot_params + + +def run_transects(cfg): + """Plot transects. + + Parameters + ---------- + cfg: dict + configuration dictionary ESMValTool format. + """ + # First plot the map woth transect points for each "region" + for region in cfg['transects_regions']: + transect_map(cfg, + region, + projection=ccrs.NorthPolarStereo(), + bbox=[-180, 180, 60, 90], + mult=2) + # loop over variables + for var_number, trans_var in enumerate(cfg['transects_vars']): + + plot_params = transect_plot_params(cfg, trans_var, var_number) + + # loop over regions + for region in cfg['transects_regions']: + plot_params['region'] = region + transect_plot(cfg, plot_params) + + +def run_aw_core(cfg): + """Calculate depth and temperature of the Atlantic Water core. + + Parameters + ---------- + cfg: dict + configuration dictionary ESMValTool format. + """ + model_filenames = get_clim_model_filenames(cfg, 'thetao') + model_filenames = OrderedDict( + sorted(model_filenames.items(), key=lambda t: t[0])) + aw_core_parameters = aw_core(model_filenames, cfg['work_dir'], 'EB', + 'thetao') + plot_aw_core_stat(aw_core_parameters, cfg['plot_dir']) + return aw_core_parameters + + +def run_aw_core_2d(cfg, aw_core_parameters): + """Plot temperature spatial distribution at AW core depth. + + Parameters + ---------- + cfg: dict + configuration dictionary ESMValTool format. + aw_core_parameters: dict + dictionary that contain AW core parameters generated + by run_aw_core function. + """ + model_filenames = get_clim_model_filenames(cfg, 'thetao') + model_filenames = OrderedDict( + sorted(model_filenames.items(), key=lambda t: t[0])) + aw_core_parameters = aw_core(model_filenames, cfg['work_dir'], 'EB', + 'thetao') + # this is now just using plot2d_original_grid with + # additional `explicit_depths` parameter + plot_params = {} + plot_params['variable'] = 'thetao' + plot_params['model_filenames'] = model_filenames + plot_params['depth'] = 0 + plot_params['cmap'] = cm.Spectral_r + plot_params['ncols'] = 4 + plot_params['levels'] = np.round(np.linspace(-2, 2.3, 41), 1) + plot_params['dpi'] = 100 + plot_params['explicit_depths'] = aw_core_parameters + plot_params['projection'] = ccrs.NorthPolarStereo() + plot_params['bbox'] = (-180, 180, 60, 90) + + plot2d_original_grid(cfg, plot_params) + + +def tsdiag_plot_parameters(cfg): + """Prepeare configuration for TS plots.""" + + # get the dictionary with model file names + model_filenames = get_clim_model_filenames(cfg, 'thetao') + model_filenames = OrderedDict( + sorted(model_filenames.items(), key=lambda t: t[0])) + # setting the number of columns for the plot + if cfg['tsdiag_ncol']: + ncols = cfg['tsdiag_ncol'] + else: + ncols = 3 + + plot_params = {} + plot_params['model_filenames'] = model_filenames + plot_params['ncols'] = ncols + plot_params['cmap'] = cm.Set1 + return plot_params + + +def run_tsdiag(cfg, observations): + """Plot TS diagrams. + + Parameters + ---------- + cfg: dict + configuration dictionary ESMValTool format. + observations: str + name of the observation data set + """ + plot_params = tsdiag_plot_parameters(cfg) + # loop over models and regions + for mmodel, region in itertools.product(plot_params['model_filenames'], + cfg['tsdiag_regions']): + # this function will generate files with T and S points + # selected from the region untill `tsdiag_depth` for + # every model. + tsplot_data(cfg, mmodel, region, observations=observations) + + # actually plot TS diagrams + for region in cfg['tsdiag_regions']: + plot_params['region'] = region + tsplot_plot(cfg, plot_params) + + +def main(cfg): + """Compute the time average for each input model.""" + # for debuging save the configuration in a pickle file + # with open('cfg_NK.joblib', 'wb') as handle: + # pickle.dump(cfg, handle, protocol=pickle.HIGHEST_PROTOCOL) + + logger.info("Starting APPLICATE/TRR Arctic Ocean diagnostics") + + # find the name of the observational dataset + observations = find_observations_name(cfg) + logger.info("Name of the Observations: %s{}", observations) + + # get the names of fx filenames (for now are the same for + # all variables (this is why "thetao" is hardcoded)) + areacello_fx = get_fx_filenames(cfg, 'areacello') + logger.info("areacello_fx files: %s", areacello_fx) + + # Extract data for Hovmoeller diagrams + run_hofm_data(cfg) + + # # Plot Hovmoeller diagrams for each variable + run_hofm_plot(cfg, observations) + + # # Create timemean + run_mean(cfg, observations) + + # Plot average vertical profiles for regions + run_profiles(cfg, observations) + + # Plot 2d maps on original grid + run_plot2d(cfg) + + # Plot model biases over depth + run_plot2d_bias(cfg, observations) + + # Plot transects + run_transects(cfg) + + # Calculate depth and temperature of the Atlantic Water core + # and make plots. + aw_core_parameters = run_aw_core(cfg) + + # Plot temperature spatial distribution at the depth of the + # atlantic water core in different models + run_aw_core_2d(cfg, aw_core_parameters) + + # # Plot TS diagrams + run_tsdiag(cfg, observations) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/arctic_ocean/getdata.py b/esmvaltool/diag_scripts/arctic_ocean/getdata.py new file mode 100644 index 0000000000..d62a9e7721 --- /dev/null +++ b/esmvaltool/diag_scripts/arctic_ocean/getdata.py @@ -0,0 +1,522 @@ +# -*- coding: utf-8 -*- +"""Part of the ESMValTool Arctic Ocean diagnostics. + +This module contains functions for extracting the data +from netCDF files and prepearing them for plotting. +""" +try: + import esmpy +except ImportError as exc: + # Prior to v8.4.0, ``esmpy`` could be imported as `ESMF`. + try: + import ESMF as esmpy # noqa: N811 + except ImportError: + raise exc +import logging +import os +import numpy as np +from netCDF4 import Dataset, num2date + +from esmvaltool.diag_scripts.arctic_ocean.regions import (hofm_regions, + transect_points) +from esmvaltool.diag_scripts.arctic_ocean.utils import (genfilename, + point_distance, + get_fx_filenames, + get_series_lenght, + get_provenance_record) +from esmvaltool.diag_scripts.shared import ProvenanceLogger + +logger = logging.getLogger(os.path.basename(__file__)) + + +def load_meta(datapath, fxpath=None): + """Load metadata of the netCDF file. + + Parameters + ---------- + datapath: str + path to the netCDF file with data + fxpath: str + path to the netCDF file with fx files + + Returns + ------- + datafile: instance of netCDF4 Dataset + points to the file + lon2d: numpy array + Two dimentional longitude information + lat2d: numpy array + Two dimentional latitude information + lev: numpy array + depths of the model levels + time: numpy array + dates are converted to datetime objects + areacello: numpy array + values of areacello + """ + datafile = Dataset(datapath) + + if fxpath: + datafile_area = Dataset(fxpath) + areacello = datafile_area.variables['areacello'][:] + else: + areacello = None + + lon = datafile.variables['lon'][:] + lat = datafile.variables['lat'][:] + lev = datafile.variables['lev'][:] + time = num2date(datafile.variables['time'][:], + datafile.variables['time'].units) + # hack for HadGEM2-ES + lat[lat > 90] = 90 + + if lon.ndim == 2: + lon2d, lat2d = lon, lat + elif lon.ndim == 1: + lon2d, lat2d = np.meshgrid(lon, lat) + + metadata = {} + metadata['datafile'] = datafile + metadata['lon2d'] = lon2d + metadata['lat2d'] = lat2d + metadata['lev'] = lev + metadata['time'] = time + metadata['areacello'] = areacello + return metadata + + +def hofm_extract_region(metadata, cmor_var, indexes, level, time=0): + """Calculates mean over the region.""" + # fix for climatology + if metadata['datafile'].variables[cmor_var].ndim < 4: + level_pp = metadata['datafile'].variables[cmor_var][level, :, :] + else: + level_pp = metadata['datafile'].variables[cmor_var][time, level, :, :] + if not isinstance(level_pp, np.ma.MaskedArray): + level_pp = np.ma.masked_equal(level_pp, 0) + data_mask = level_pp[indexes[0], indexes[1]].mask + area_masked = np.ma.masked_where( + data_mask, metadata['areacello'][indexes[0], indexes[1]]) + result = (area_masked * + level_pp[indexes[0], indexes[1]]).sum() / area_masked.sum() + return result + + +def hofm_save_data(cfg, data_info, oce_hofm): + """Save data for Hovmoeller diagrams.""" + + ofiles = {} + ofiles['ofilename'] = genfilename(**data_info, data_type='hofm') + ofiles['ofilename_levels'] = genfilename(**data_info, data_type='levels') + ofiles['ofilename_time'] = genfilename(**data_info, data_type='time') + + np.save(ofiles['ofilename'], oce_hofm) + provenance_record = get_provenance_record(data_info, 'hofm', 'npy') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(ofiles['ofilename'] + '.npy', provenance_record) + + if isinstance(data_info['levels'], np.ma.core.MaskedArray): + np.save(ofiles['ofilename_levels'], + data_info['levels'][0:data_info['lev_limit']].filled()) + else: + np.save(ofiles['ofilename_levels'], + data_info['levels'][0:data_info['lev_limit']]) + provenance_record = get_provenance_record(data_info, 'lev', 'npy') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(ofiles['ofilename_levels'] + '.npy', + provenance_record) + + np.save(ofiles['ofilename_time'], data_info['time']) + provenance_record = get_provenance_record(data_info, 'time', 'npy') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(ofiles['ofilename_time'] + '.npy', + provenance_record) + + +def hofm_data(cfg, model_filenames, mmodel, cmor_var, region): + """Extract data for Hovmoeller diagrams from monthly values. + + Saves the data to files in `diagworkdir`. + + Parameters + ---------- + model_filenames: OrderedDict + OrderedDict with model names as keys and input files as values. + mmodel: str + model name that will be processed. + cmor_var: str + name of the CMOR variable + areacello_fx: OrderedDict. + dictionary with model names as keys and paths to fx files as values. + max_level: float + maximum depth level the Hovmoeller diagrams should go to. + region: str + name of the region predefined in `hofm_regions` function. + diagworkdir: str + path to work directory. + + Returns + ------- + None + """ + logger.info("Extract %s data for %s, region %s", cmor_var, mmodel, region) + areacello_fx = get_fx_filenames(cfg, 'areacello') + metadata = load_meta(datapath=model_filenames[mmodel], + fxpath=areacello_fx[mmodel]) + + lev_limit = metadata['lev'][ + metadata['lev'] <= cfg['hofm_depth']].shape[0] + 1 + + indexes = hofm_regions(region, metadata['lon2d'], metadata['lat2d']) + + series_lenght = get_series_lenght(metadata['datafile'], cmor_var) + + oce_hofm = np.zeros((metadata['lev'][0:lev_limit].shape[0], series_lenght)) + for mon in range(series_lenght): + for ind, _ in enumerate(metadata['lev'][0:lev_limit]): + oce_hofm[ind, mon] = hofm_extract_region(metadata, cmor_var, + indexes, ind, mon) + data_info = {} + data_info['basedir'] = cfg['work_dir'] + data_info['variable'] = cmor_var + data_info['mmodel'] = mmodel + data_info['region'] = region + data_info['time'] = metadata['time'] + data_info['levels'] = metadata['lev'] + data_info['lev_limit'] = lev_limit + data_info['ori_file'] = model_filenames[mmodel] + data_info['areacello'] = areacello_fx[mmodel] + + hofm_save_data(cfg, data_info, oce_hofm) + + metadata['datafile'].close() + + +def transect_level(datafile, cmor_var, level, grid, locstream): + """Interpolation for one level of transect.""" + + sourcefield = esmpy.Field( + grid, + staggerloc=esmpy.StaggerLoc.CENTER, + name='MPI', + ) + # load model data + model_data = datafile.variables[cmor_var][0, level, :, :] + + # ESMF do not understand masked arrays, so fill them + if isinstance(model_data, np.ma.core.MaskedArray): + sourcefield.data[...] = model_data.filled(0).T + else: + sourcefield.data[...] = model_data.T + + # create a field we giong to intorpolate TO + dstfield = esmpy.Field(locstream, name='dstfield') + dstfield.data[:] = 0.0 + + # create an object to regrid data + # from the source to the destination field + dst_mask_values = None + # if domask: + dst_mask_values = np.array([0]) + + regrid = esmpy.Regrid( + sourcefield, + dstfield, + regrid_method=esmpy.RegridMethod.NEAREST_STOD, + # regrid_method=esmpy.RegridMethod.BILINEAR, + unmapped_action=esmpy.UnmappedAction.IGNORE, + dst_mask_values=dst_mask_values) + + # do the regridding from source to destination field + dstfield = regrid(sourcefield, dstfield) + return dstfield + + +def transect_save_data(cfg, data_info, secfield, lon_s4new, lat_s4new): + """Save data for transects.""" + + ofiles = {} + ofiles['ofilename'] = genfilename(**data_info, data_type='transect') + ofiles['ofilename_depth'] = genfilename( + data_info['basedir'], 'depth', data_info['mmodel'], + data_info['region'], 'transect_' + data_info['variable']) + ofiles['ofilename_dist'] = genfilename(data_info['basedir'], 'distance', + data_info['mmodel'], + data_info['region'], + 'transect_' + data_info['variable']) + + np.save(ofiles['ofilename'], secfield) + print(ofiles['ofilename']) + provenance_record = get_provenance_record(data_info, 'transect', 'npy') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(ofiles['ofilename'] + '.npy', provenance_record) + # we have to fill masked arrays before saving + + if isinstance(data_info['levels'], np.ma.core.MaskedArray): + np.save(ofiles['ofilename_depth'], data_info['levels'].filled()) + else: + np.save(ofiles['ofilename_depth'], data_info['levels']) + print(ofiles['ofilename_depth']) + provenance_record = get_provenance_record(data_info, 'levels', 'npy') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(ofiles['ofilename_depth'] + '.npy', + provenance_record) + + np.save(ofiles['ofilename_dist'], point_distance(lon_s4new, lat_s4new)) + provenance_record = get_provenance_record(data_info, 'distance', 'npy') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(ofiles['ofilename_dist'] + '.npy', + provenance_record) + print(ofiles['ofilename_dist']) + + +def transect_data(cfg, mmodel, cmor_var, region, mult=2): + """Extract data for transects (defined in regions.transect_points). + + Parameters + ---------- + mmodel: str + model name that will be processed. + cmor_var: str + name of the CMOR variable + region: str + name of the region predefined in `transect_points` function. + diagworkdir: str + path to work directory. + mult: integer + multiplicator for the number of points in the transect. + Can be used to increase transect resolution. + observations: str + name of the observation dataset. + """ + logger.info("Extract %s transect data for %s, region %s", cmor_var, + mmodel, region) + # get the path to preprocessed file + ifilename = genfilename(cfg['work_dir'], + cmor_var, + mmodel, + data_type='timmean', + extension='.nc') + # open with netCDF4 + datafile = Dataset(ifilename) + # open with ESMF/esmpy + grid = esmpy.Grid(filename=ifilename, filetype=esmpy.FileFormat.GRIDSPEC) + + # get depth of the levels + lev = datafile.variables['lev'][:] + + # indexesi, indexesj = hofm_regions(region, lon2d, lat2d) + lon_s4new, lat_s4new = transect_points(region, mult=mult) + + # masking true + # domask = True + + # create instans of the location stream (set of points) + locstream = esmpy.LocStream(lon_s4new.shape[0], + name="Atlantic Inflow Section", + coord_sys=esmpy.CoordSys.SPH_DEG) + + # appoint the section locations + locstream["ESMF:Lon"] = lon_s4new + locstream["ESMF:Lat"] = lat_s4new + # if domask: + locstream["ESMF:Mask"] = np.array(np.ones(lon_s4new.shape[0]), + dtype=np.int32) + # initialise array for the section + secfield = np.zeros( + (lon_s4new.shape[0], datafile.variables[cmor_var].shape[1])) + + # loop over depth levels + for level in range(0, datafile.variables[cmor_var].shape[1]): + secfield[:, level] = transect_level(datafile, cmor_var, level, grid, + locstream).data + data_info = {} + data_info['basedir'] = cfg['work_dir'] + data_info['variable'] = cmor_var + data_info['mmodel'] = mmodel + data_info['region'] = region + data_info['levels'] = lev + data_info['ori_file'] = ifilename + data_info['areacello'] = None + + transect_save_data(cfg, data_info, secfield, lon_s4new, lat_s4new) + + datafile.close() + + +def tsplot_extract_data(mmodel, observations, metadata_t, metadata_s, ind): + """Extracts level data from the files for TS plots.""" + + if mmodel != observations: + level_pp = metadata_t['datafile'].variables['thetao'][0, ind, :, :] + level_pp_s = metadata_s['datafile'].variables['so'][0, ind, :, :] + else: + level_pp = metadata_t['datafile'].variables['thetao'][0, ind, :, :] + level_pp_s = metadata_s['datafile'].variables['so'][0, ind, :, :] + # This is fix fo make models with 0 as missing values work, + # should be fixed in fixes that do not work for now in the new backend + if not isinstance(level_pp, np.ma.MaskedArray): + level_pp = np.ma.masked_equal(level_pp, 0) + level_pp_s = np.ma.masked_equal(level_pp_s, 0) + return level_pp, level_pp_s + + +def tsplot_save_data(cfg, data_info, temp, salt, depth_model): + """Save data for TS plots.""" + + ofiles = {} + data_info['variable'] = 'thetao' + ofiles['ofilename_t'] = genfilename(**data_info, data_type='tsplot') + np.save(ofiles['ofilename_t'], temp) + provenance_record = get_provenance_record(data_info, 'tsplot', 'npy') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(ofiles['ofilename_t'] + '.npy', + provenance_record) + + data_info['variable'] = 'so' + ofiles['ofilename_s'] = genfilename(**data_info, data_type='tsplot') + np.save(ofiles['ofilename_s'], salt) + provenance_record = get_provenance_record(data_info, 'tsplot', 'npy') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(ofiles['ofilename_s'] + '.npy', + provenance_record) + + data_info['variable'] = 'depth' + ofiles['ofilename_depth'] = genfilename(**data_info, data_type='tsplot') + np.save(ofiles['ofilename_depth'], depth_model) + provenance_record = get_provenance_record(data_info, 'tsplot', 'npy') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(ofiles['ofilename_depth'] + '.npy', + provenance_record) + + +def tsplot_data(cfg, mmodel, region, observations='PHC'): + """Extract data for TS plots from one specific model. + + Parameters + ---------- + + mmodel: str + model name + max_level: int + maximum level (depth) of TS data to be used + region: str + region as defined in `hofm_regions` + observations: str + name of the observations + + Returns + ------- + None + """ + logger.info("Extract TS data for %s, region %s", mmodel, region) + + # generate input names for T and S. The files are generated by the + # `timmean` function. + ifilename_t = genfilename(cfg['work_dir'], + 'thetao', + mmodel, + data_type='timmean', + extension='.nc') + ifilename_s = genfilename(cfg['work_dir'], + 'so', + mmodel, + data_type='timmean', + extension='.nc') + # get the metadata for T and S + + metadata_t = load_meta(datapath=ifilename_t, fxpath=None) + metadata_s = load_meta(datapath=ifilename_s, fxpath=None) + + # find index of the max_level + lev_limit = metadata_t['lev'][ + metadata_t['lev'] <= cfg['tsdiag_depth']].shape[0] + 1 + # find indexes of data that are in the region + indexes = hofm_regions(region, metadata_t['lon2d'], metadata_t['lat2d']) + + temp = np.array([]) + salt = np.array([]) + depth_model = np.array([]) + # loop over depths + for ind, depth in enumerate(metadata_t['lev'][0:lev_limit]): + level_pp, level_pp_s = tsplot_extract_data(mmodel, observations, + metadata_t, metadata_s, ind) + # select individual points for T, S and depth + temp = np.hstack((temp, level_pp[indexes[0], indexes[1]].compressed())) + salt = np.hstack( + (salt, level_pp_s[indexes[0], indexes[1]].compressed())) + depth_temp = np.zeros_like( + level_pp[indexes[0], indexes[1]].compressed()) + depth_temp[:] = depth + depth_model = np.hstack((depth_model, depth_temp)) + + # Saves the data to individual files + data_info = {} + data_info['basedir'] = cfg['work_dir'] + data_info['mmodel'] = mmodel + data_info['region'] = region + data_info['levels'] = metadata_t['lev'] + data_info['ori_file'] = [ifilename_t, ifilename_s] + data_info['areacello'] = None + tsplot_save_data(cfg, data_info, temp, salt, depth_model) + + metadata_t['datafile'].close() + metadata_s['datafile'].close() + + +def aw_core(model_filenames, diagworkdir, region, cmor_var): + """Calculate Atlantic Water (AW) core depth the region. + + The AW core is defined as water temperature maximum + between 200 and 1000 meters. Can be in future generalised + to find the depth of specific water masses. + + The function relies on the data for the profiles, so + this information should be available. + + Parameters + ---------- + model_filenames: OrderedDict + OrderedDict with model names as keys and input files as values. + diagworkdir: str + path to work directory. + region: str + one of the regions from `hofm_regions`, + the data from the mean vertical profiles should be available. + cmor_var: str + name of the variable. + + Returns + ------- + aw_core_parameters: dict + For each model there is maximum temperature, depth level in the model, + index of the depth level in the model. + """ + logger.info("Calculate AW core statistics") + aw_core_parameters = {} + + for mmodel in model_filenames: + aw_core_parameters[mmodel] = {} + logger.info("Plot profile %s data for %s, region %s", cmor_var, mmodel, + region) + ifilename = genfilename(diagworkdir, cmor_var, mmodel, region, 'hofm', + '.npy') + ifilename_levels = genfilename(diagworkdir, cmor_var, mmodel, region, + 'levels', '.npy') + + hofdata = np.load(ifilename, allow_pickle=True) + lev = np.load(ifilename_levels, allow_pickle=True) + + profile = (hofdata)[:, :].mean(axis=1) + maxvalue = np.max(profile[(lev >= 200) & (lev <= 1000)]) + maxvalue_index = np.where(profile == maxvalue)[0][0] + maxvalue_depth = lev[maxvalue_index] + + if maxvalue > 100: + maxvalue = maxvalue - 273.15 + + aw_core_parameters[mmodel]['maxvalue'] = maxvalue + aw_core_parameters[mmodel]['maxvalue_index'] = maxvalue_index + aw_core_parameters[mmodel]['maxvalue_depth'] = maxvalue_depth + + return aw_core_parameters diff --git a/esmvaltool/diag_scripts/arctic_ocean/interpolation.py b/esmvaltool/diag_scripts/arctic_ocean/interpolation.py new file mode 100644 index 0000000000..5e294d99f1 --- /dev/null +++ b/esmvaltool/diag_scripts/arctic_ocean/interpolation.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +"""Part of the ESMValTool Arctic Ocean diagnostics. + +This module contains functions for data interpolation. +""" +try: + import esmpy +except ImportError as exc: + # Prior to v8.4.0, `esmpy`` could be imported as `ESMF`. + try: + import ESMF as esmpy # noqa: N811 + except ImportError: + raise exc +import logging +import os +import numpy as np +# import pyresample +from cartopy.util import add_cyclic_point +# from netCDF4 import Dataset + +from esmvaltool.diag_scripts.arctic_ocean.getdata import load_meta + +logger = logging.getLogger(os.path.basename(__file__)) + + +def closest_depth(depths, depth): + """Find closest depth. + + From vector of depths finds target depth, + that is closest to desired depth, Also returns + an index for the desired depth. + """ + target_level = abs(abs(depths) - abs(depth)).argmin() + target_depth = depths[target_level] + logger.debug('target_depth: %s', target_depth) + return target_depth, target_level + + +def interpolate_vert(depth_model, target_depth, data_model): + """Vertical linear interpolation. + + Very simple linear interpolation of the model data to the + desired depth. Can't extrapolate, so the limitation is that model + data should have at least one level => and one level <= than the + target depth. + """ + # Simple vertical interpolation + dep_up = [z for z in abs(depth_model) if z <= target_depth][-1] + dep_lo = [z for z in abs(depth_model) if z > target_depth][0] + i_up = 1 - abs(target_depth - dep_up) / (dep_lo - dep_up) + i_lo = 1 - abs(target_depth - dep_lo) / (dep_lo - dep_up) + + iz_up = abs(abs(depth_model) - abs(dep_up)).argmin() + iz_lo = abs(abs(depth_model) - abs(dep_lo)).argmin() + + data_up = data_model[iz_up, :, :] + data_lo = data_model[iz_lo, :, :] + if not isinstance(data_up, np.ma.MaskedArray): + data_up = np.ma.masked_equal(data_up, 0) + data_lo = np.ma.masked_equal(data_lo, 0) + data = i_up * data_up + data = data + i_lo * data_lo + return data + + +def weighting(distance): + """Weighting function for pyresample.""" + weight = 1 / distance**2 + return weight + + +def define_esmf_field(ifile, data_onlevel, name): + """Define ESMF field from netCDF file.""" + + grid_obs = esmpy.Grid(filename=ifile, filetype=esmpy.FileFormat.GRIDSPEC) + mask_obs = grid_obs.add_item(esmpy.GridItem.MASK) + mask_obs[:] = data_onlevel.mask.astype('int').T + esmf_field = esmpy.Field( + grid_obs, + staggerloc=esmpy.StaggerLoc.CENTER, + name=name, + ) + return esmf_field + + +def add_esmf_cyclic(metadata_obs, data_onlevel, interpolated): + """Add cyclic points to interpolated data.""" + + data_onlevel_cyc, lon_obs_cyc = add_cyclic_point( + data_onlevel, coord=metadata_obs['lon2d'][0, :]) + + lonc, latc = np.meshgrid(lon_obs_cyc, metadata_obs['lat2d'][:, 0]) + + interpolated_cyc, lon_obs_cyc = add_cyclic_point( + interpolated, coord=metadata_obs['lon2d'][0, :]) + return lonc, latc, data_onlevel_cyc, interpolated_cyc + + +def esmf_regriding(sourcefield, distfield, metadata_obs, data_onlev_obs): + """Use ESMF fields to do the regriding.""" + # define the regrider + regrid = esmpy.Regrid( + sourcefield, + distfield, + regrid_method=esmpy.RegridMethod.NEAREST_STOD, + # regrid_method=esmpy.RegridMethod.BILINEAR, + unmapped_action=esmpy.UnmappedAction.IGNORE, + dst_mask_values=np.array([1]), + src_mask_values=np.array([1])) + # actual regriding + distfield = regrid(sourcefield, distfield) + # reshape the data and convert to masked array + data_interpolated = distfield.data[:].T + data_interpolated = np.ma.masked_equal(data_interpolated, 0) + lonc, latc, data_onlevel_cyc, interpolated_cyc = add_esmf_cyclic( + metadata_obs, data_onlev_obs, data_interpolated) + return lonc, latc, data_onlevel_cyc, interpolated_cyc + + +def interpolate_esmf(obs_file, mod_file, depth, cmor_var): + """The 2d interpolation with ESMF. + + Parameters + ---------- + obs_file: str + path to file with observations/climatology, + will be used to extract the grid to interpolate on to. + mod_file: str + path to the file with model data. + depth: int + depth to interpolate to. First the closest depth from the + observations will be selected and then. + """ + metadata_obs = load_meta(obs_file, fxpath=None) + metadata_mod = load_meta(mod_file, fxpath=None) + + data_obs = metadata_obs['datafile'].variables[cmor_var][:] + data_model = metadata_mod['datafile'].variables[cmor_var][:] + + # Select depth in climatology that is closest to the desired depth + target_depth, level_depth = closest_depth(metadata_obs['lev'], depth) + + # climatology and model data on the level + data_onlev_obs = data_obs[0, level_depth, :, :] + data_onlev_mod = interpolate_vert(metadata_mod['lev'], target_depth, + data_model[0, :, :, :]) + + # prepear interpolation fields + distfield = define_esmf_field(obs_file, data_onlev_obs, 'OBS') + distfield.data[:] = 0.0 + + sourcefield = define_esmf_field(mod_file, data_onlev_mod, 'Model') + sourcefield.data[...] = data_onlev_mod.T + + lonc, latc, data_onlev_obs_cyc, data_interpolated_cyc = esmf_regriding( + sourcefield, distfield, metadata_obs, data_onlev_obs) + + return lonc, latc, target_depth, data_onlev_obs_cyc, data_interpolated_cyc diff --git a/esmvaltool/diag_scripts/arctic_ocean/plotting.py b/esmvaltool/diag_scripts/arctic_ocean/plotting.py new file mode 100644 index 0000000000..c65f62e08d --- /dev/null +++ b/esmvaltool/diag_scripts/arctic_ocean/plotting.py @@ -0,0 +1,975 @@ +# -*- coding: utf-8 -*- +"""Part of the ESMValTool Arctic Ocean diagnostics. + +This module contains functions for ploting of the results. +""" +import logging +import math +import os +import cartopy.crs as ccrs +import cartopy.feature as cfeature +import cmocean.cm as cmo +from matplotlib import cm +import matplotlib.pylab as plt +import numpy as np +import pandas as pd +from netCDF4 import Dataset + +from esmvaltool.diag_scripts.arctic_ocean.getdata import (load_meta, + transect_points) +from esmvaltool.diag_scripts.arctic_ocean.interpolation import ( + closest_depth, interpolate_esmf) +from esmvaltool.diag_scripts.arctic_ocean.utils import (dens_back, genfilename, + point_distance, + get_provenance_record) +from esmvaltool.diag_scripts.shared._base import (ProvenanceLogger) +logger = logging.getLogger(os.path.basename(__file__)) + + +def create_plot(model_filenames, ncols=3, projection=None, nplots_increment=0): + """Create base figure for multipanel plot. + + Creates matplotlib figure and set of axis that corespond to + the number of models that should be plotted. + + Parameters + ---------- + model_filenames: OrderedDict + OrderedDict with model names as keys and input files as values. + ncols: int + Number of columns in the plot. The number of rows + will be calculated authomatically. + projection: cartopy projection istance + nplots_increment: int + allows to increase or decrease number of plots. + + Returns + ------- + fig: matplotlib figure + ax: list with matplotlib axis, flattened + """ + # Calcualte number of plots on the figure + nplots = len(model_filenames) + nplots_increment + ncols = float(ncols) + nrows = math.ceil(nplots / float(ncols)) + ncols, nrows = int(ncols), int(nrows) + + # different projections will have to have different + # coefficints for creating good looking figsize, + # now the numbers are working well with NorthPolarStereo + # and PlateCaree + if projection: + figsize = (5 * ncols, 1.55 * nrows * ncols) + figure, axis = plt.subplots(nrows, + ncols, + figsize=figsize, + subplot_kw=dict(projection=projection), + constrained_layout=True) + # this workd well for usual plots + else: + figsize = (10 * ncols, 2.5 * nrows * ncols) + figure, axis = plt.subplots(nrows, ncols, figsize=figsize) + # if you have more than one axis, flatten the array + # this way it is easier to handle it. + if isinstance(axis, np.ndarray): + axis = axis.flatten() + # if only one axis is created wrap it in a list. + else: + axis = [axis] + return figure, axis + + +def label_and_conversion(cmor_var, data): + """Convert data if needed. + + And returns formatted version of the colorbar label. + + Parameters + ---------- + cmor_var: str + name of the cmor variable + data: numpy array + array with the data + + Returns + ------- + cb_label: str + formatted units for the cmor_var + data: numpy array + data, converted if needed. + """ + if cmor_var == 'thetao': + # Check if we use K (CMIP5) + # or degC (CMIP6) + if data.min() < 100: + data = data + else: + data = data - 273.15 + cb_label = r'$^{\circ}$C' + elif cmor_var == 'so': + cb_label = 'psu' + return cb_label, data + + +def year_ticks(series_lenght, time): + """Create tick marks with year values.""" + + ygap = int((np.round(series_lenght / 12.) / 5) * 12) + year_indexes = list(range(series_lenght)[::ygap]) + year_value = [] + for index_year in year_indexes: + year_value.append(time[index_year].year) + return year_indexes, year_value + + +def hofm_plot(cfg, plot_params): + """Plot Hovmoeller diagram from data at diagworkdir. + + Parameters + ---------- + model_filenames: OrderedDict + OrderedDict with model names as keys and input files as values. + cmor_var: str + name of the CMOR variable + max_level: float + maximum depth level the Hovmoeller diagrams should go to. + region: str + name of the region predefined in `hofm_regions` function. + diagworkdir: str + path to work directory. + diagplotdir: str + path to plotting directory. + levels: list + levels for the contour plot. + ncols: str + number of columns in the resulting plot + (raws will be calculated from total number of plots) + cmap: matplotlib.cmap object + color map + observations: str + name of the dataset with observations + + Returns + ------- + None + """ + + # create a basis for the muli panel figure + figure, axis = create_plot(plot_params['model_filenames'], + ncols=plot_params['ncols']) + + # plot data on the figure, axis by axis + index = None + for index, mmodel in enumerate(plot_params['model_filenames']): + logger.info("Plot %s data for %s, region %s", plot_params['variable'], + mmodel, plot_params['region']) + # generate input filenames that + # the data prepared by `hofm_data` function + + ifilename = genfilename(cfg['work_dir'], plot_params['variable'], + mmodel, plot_params['region'], 'hofm', '.npy') + ifilename_levels = genfilename(cfg['work_dir'], + plot_params['variable'], mmodel, + plot_params['region'], 'levels', '.npy') + ifilename_time = genfilename(cfg['work_dir'], plot_params['variable'], + mmodel, plot_params['region'], 'time', + '.npy') + # load the data + hofdata = np.load(ifilename, allow_pickle=True) + lev = np.load(ifilename_levels, allow_pickle=True) + time = np.load(ifilename_time, allow_pickle=True) + + # convert data if needed and get labeles for colorbars + cb_label, hofdata = label_and_conversion(plot_params['variable'], + hofdata) + + # find index of the model level closes to the `max_level` + # and add 1, to make a plot look better + lev_limit = lev[lev <= cfg['hofm_depth']].shape[0] + 1 + + # get the length of the time series + series_lenght = time.shape[0] + + # create 2d arrays with coordinates of time and depths + months, depth = np.meshgrid(range(series_lenght), lev[0:lev_limit]) + + # plot an image for the model on the axis (ax[ind]) + image = axis[index].contourf(months, + depth, + hofdata, + cmap=plot_params['cmap'], + levels=plot_params['levels'], + extend='both') + # Generate tick marks with years that looks ok + year_indexes, year_value = year_ticks(series_lenght, time) + + # set properties of the axis + axis[index].set_xticks(year_indexes) + axis[index].set_xticklabels(year_value, size=15) + axis[index].set_title(mmodel, size=20) + axis[index].set_ylabel('m', size=15, rotation='horizontal') + axis[index].invert_yaxis() + axis[index].tick_params(axis='y', labelsize=15) + + # Add a colorbar + colorbar = figure.colorbar(image, ax=axis[index], pad=0.01) + colorbar.set_label(cb_label, rotation='vertical', size=15) + colorbar.ax.tick_params(labelsize=15) + + # delete unused axis + for delind in range(index + 1, len(axis)): + figure.delaxes(axis[delind]) + # tighten the layout + plt.tight_layout() + # generate the path to the output file + plot_params['basedir'] = cfg['plot_dir'] + plot_params['ori_file'] = ifilename + plot_params['areacello'] = None + plot_params['mmodel'] = None + + pltoutname = genfilename(**plot_params, data_type='hofm') + + plt.savefig(pltoutname, dpi=100) + provenance_record = get_provenance_record(plot_params, 'hofm', 'png') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(pltoutname + '.png', provenance_record) + + +def tsplot_plot(cfg, plot_params): + """Plot a TS diagram. + + Parameters + ---------- + model_filenames: OrderedDict + OrderedDict with model names as keys and input files as values. + max_level: float + maximum depth level the TS plot shoud go. + region: str + name of the region predefined in `hofm_regions` function. + diagworkdir: str + path to work directory. + diagplotdir: str + path to plotting directory. + ncols: str + number of columns in the resulting plot + (raws will be calculated from total number of plots) + cmap: matplotlib.cmap object + color map + observations: str + name of the dataset with observations + + Returns + ------- + None + """ + # Setup a figure + nplots = len(plot_params['model_filenames']) + ncols = float(plot_params['ncols']) + nrows = math.ceil(nplots / ncols) + ncols = int(ncols) + nrows = int(nrows) + nplot = 1 + plt.figure(figsize=(8 * ncols, 2 * nrows * ncols)) + + # loop over models + for mmodel in plot_params['model_filenames']: + logger.info("Plot tsplot data for %s, region %s", mmodel, + plot_params['region']) + # load mean data created by `tsplot_data` + ifilename_t = genfilename(cfg['work_dir'], 'thetao', mmodel, + plot_params['region'], 'tsplot', '.npy') + ifilename_s = genfilename(cfg['work_dir'], 'so', mmodel, + plot_params['region'], 'tsplot', '.npy') + ifilename_depth = genfilename(cfg['work_dir'], 'depth', mmodel, + plot_params['region'], 'tsplot', '.npy') + + temp = np.load(ifilename_t, allow_pickle=True) + salt = np.load(ifilename_s, allow_pickle=True) + depth = np.load(ifilename_depth, allow_pickle=True) + # Still old fashioned way to setup a plot, works best for now. + plt.subplot(nrows, ncols, nplot) + # calculate background with density isolines + si2, ti2, dens = dens_back(33, 36., -2, 6) + + # convert form Kelvin if needed + if temp.min() > 100: + temp = temp - 273.15 + + # plot the background + contour_plot = plt.contour(si2, + ti2, + dens, + colors='k', + levels=np.linspace(dens.min(), dens.max(), + 15), + alpha=0.3) + # plot the scatter plot + plt.scatter(salt[::], + temp[::], + c=depth, + s=3.0, + cmap=plot_params['cmap'], + edgecolors='none', + vmax=cfg['tsdiag_depth']) + # adjust the plot + plt.clabel(contour_plot, fontsize=12, inline=1, fmt='%1.1f') + plt.xlim(33, 36.) + plt.ylim(-2.1, 6) + plt.xlabel('Salinity', size=20) + plt.ylabel(r'Temperature, $^{\circ}$C', size=20) + plt.xticks(size=15) + plt.yticks(size=15) + # setup the colorbar + colorbar = plt.colorbar(pad=0.03) + colorbar.ax.get_yaxis().labelpad = 15 + colorbar.set_label('depth, m', rotation=270, size=20) + colorbar.ax.tick_params(labelsize=15) + + plt.title(mmodel, size=20) + nplot = nplot + 1 + + plt.tight_layout() + # save the plot + pltoutname = genfilename(cfg['plot_dir'], + 'tsplot', + region=plot_params['region'], + data_type='tsplot') + plt.savefig(pltoutname, dpi=100) + plot_params['basedir'] = cfg['plot_dir'] + plot_params['ori_file'] = ifilename_t + plot_params['areacello'] = None + plot_params['mmodel'] = None + + provenance_record = get_provenance_record(plot_params, 'tsplot', 'png') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(pltoutname + '.png', provenance_record) + + +def plot_profile(cfg, plot_params): + """Plot profiles. + + From previously calculated data for Hovmoeller diagrams. + + Parameters + ---------- + model_filenames: OrderedDict + OrderedDict with model names as keys and input files as values. + cmor_var: str + name of the CMOR variable + region: str + name of the region predefined in `hofm_regions` function. + diagworkdir: str + path to work directory. + diagplotdir: str + path to plotting directory. + cmap: matplotlib.cmap object + color map + dpi: int + dpi fro the output figure + observations: str + name of the dataset with observations + + Returns + ------- + None + """ + level_clim = Dataset(plot_params['model_filenames'][ + plot_params['observations']]).variables['lev'][:] + plt.figure(figsize=(5, 6)) + axis = plt.subplot(111) + + color = iter(plot_params['cmap'](np.linspace( + 0, 1, len(plot_params['model_filenames'])))) + lev_limit_clim = level_clim[level_clim <= cfg['hofm_depth']].shape[0] + 1 + + mean_profile = np.zeros((level_clim[:lev_limit_clim].shape[0], + len(plot_params['model_filenames']) - 1)) + mean_profile_counter = 0 + + for mmodel in plot_params['model_filenames']: + logger.info("Plot profile %s data for %s, region %s", + plot_params['variable'], mmodel, plot_params['region']) + # construct input filenames + ifilename = genfilename(cfg['work_dir'], plot_params['variable'], + mmodel, plot_params['region'], 'hofm', '.npy') + ifilename_levels = genfilename(cfg['work_dir'], + plot_params['variable'], mmodel, + plot_params['region'], 'levels', '.npy') + # load data + hofdata = np.load(ifilename, allow_pickle=True) + lev = np.load(ifilename_levels, allow_pickle=True) + + # convert data if needed and set labeles + cb_label, hofdata = label_and_conversion(plot_params['variable'], + hofdata) + + # set index for maximum level (max_level+1) + lev_limit = lev[lev <= cfg['hofm_depth']].shape[0] + 1 + + # calculate mean profile + profile = (hofdata)[:, :].mean(axis=1) + + if mmodel != plot_params['observations']: + next_color = next(color) + else: + next_color = 'k' + + plt.plot(profile, lev[0:lev_limit], label=mmodel, c=next_color) + + # interpolate to standard levels and add to mean profile + profile_interpolated = np.interp(level_clim[:lev_limit_clim], + lev[0:lev_limit], profile) + if mmodel != plot_params['observations']: + + print('include {} in to the mean'.format(mmodel)) + mean_profile[:, mean_profile_counter] = profile_interpolated + mean_profile_counter += 1 + + # Here we are ploting the mean profile separately + mean_profile_mean = np.nanmean(mean_profile, axis=1) + + plt.plot(mean_profile_mean, + level_clim[:lev_limit_clim], + label='MODEL-MEAN', + linestyle='--', + color='k', + lw=3) + + plt.xticks(size=12) + plt.yticks(size=12) + + plt.xlabel(cb_label, size=12, rotation='horizontal') + plt.ylabel('m', size=12, rotation='horizontal') + + plt.ylim(0, cfg['hofm_depth']) + + # we shift the legend and plot it + box = axis.get_position() + axis.set_position([box.x0, box.y0, box.width * 0.8, box.height]) + axis.legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=10) + + plt.gca().invert_yaxis() + + plot_params['basedir'] = cfg['plot_dir'] + plot_params['ori_file'] = ifilename + plot_params['areacello'] = None + plot_params['mmodel'] = None + + pltoutname = genfilename(cfg['plot_dir'], plot_params['variable'], + 'MULTIMODEL', plot_params['region'], 'profile') + + plt.savefig(pltoutname, dpi=plot_params['dpi'], bbox_inches='tight') + provenance_record = get_provenance_record(plot_params, 'profile', 'png') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(pltoutname + '.png', provenance_record) + + +def plot2d_original_grid(cfg, plot_params): + """Plot 2d maps on original grid using cartopy. + + Parameters + ---------- + model_filenames:OrderedDict + OrderedDict with model names as keys and input files as values. + cmor_var: str + name of the variable + depth: int + we will plot the data on the model + level that is closest to the `depth`. + Ignored if explicit_depths is provided. + levels: tuple + values to be used for vmin and vmax in the form of (vmin, vmax) + diagworkdir: str + path to the working directory + diagplotdir: str + path to the plot directory + cmap: matplotlib colormap + colormap + dpi: int + the dpi values to save the figure + explicit_depths: dict + Output of the `aw_core` function. + It's a dictionary where for each model there is a maximum temperature, + depth level in the model, index of the depth level in the model. + If provided the `depth` parameter is excluded. + projection: instance of cartopy projection (ccrs) + bbox: list + bounding box. It will be the input for cartopy `set_extent`. + ncols: int + number of columns. + + Retuns + ------ + None + """ + figure, axis = create_plot(plot_params['model_filenames'], + ncols=plot_params['ncols'], + projection=plot_params['projection']) + index = None + for index, mmodel in enumerate(plot_params['model_filenames']): + logger.info("Plot plot2d_original_grid %s for %s", + plot_params['variable'], mmodel) + + ifilename = genfilename(cfg['work_dir'], + plot_params['variable'], + mmodel, + data_type='timmean', + extension='.nc') + + metadata = load_meta(ifilename, fxpath=None) + datafile = metadata['datafile'] + lon2d = metadata['lon2d'] + lat2d = metadata['lat2d'] + lev = metadata['lev'] + + if not plot_params['explicit_depths']: + depth_target, level_target = closest_depth(lev, + plot_params['depth']) + else: + level_target = plot_params['explicit_depths'][mmodel][ + 'maxvalue_index'] + depth_target = lev[level_target] + + if datafile.variables[plot_params['variable']].ndim < 4: + data = datafile.variables[ + plot_params['variable']][level_target, :, :] + else: + data = datafile.variables[ + plot_params['variable']][0, level_target, :, :] + + cb_label, data = label_and_conversion(plot_params['variable'], data) + + left, right, down, upper = plot_params['bbox'] + + axis[index].set_extent([left, right, down, upper], + crs=ccrs.PlateCarree()) + # Only pcolormesh is working for now with cartopy, + # contourf is failing to plot curvilinear meshes, + # let along the unstructures ones. + image = axis[index].pcolormesh( + lon2d, + lat2d, + data, + vmin=plot_params['levels'][0], + vmax=plot_params['levels'][-1], + transform=ccrs.PlateCarree(), + cmap=plot_params['cmap'], + ) + + axis[index].add_feature( + cfeature.GSHHSFeature(levels=[1], + scale="low", + facecolor="lightgray")) + axis[index].set_title("{}, {} m".format(mmodel, + np.round(depth_target, 1)), + size=18) + axis[index].set_rasterization_zorder(-1) + + # delete unused axis + for delind in range(index + 1, len(axis)): + figure.delaxes(axis[delind]) + + # set common colorbar + colorbar = figure.colorbar(image, + orientation='horizontal', + ax=axis, + pad=0.01, + shrink=0.9) + colorbar.set_label(cb_label, rotation='horizontal', size=18) + colorbar.ax.tick_params(labelsize=18) + + if not plot_params['explicit_depths']: + plot_type = 'plot2d_{}_depth'.format(str(plot_params['depth'])) + else: + plot_type = "plot2d_different_levels" + + # save the figure + pltoutname = genfilename(cfg['plot_dir'], + plot_params['variable'], + "MULTIMODEL", + data_type=plot_type) + + plot_params['basedir'] = cfg['plot_dir'] + plot_params['ori_file'] = ifilename + plot_params['areacello'] = None + plot_params['mmodel'] = None + plot_params['region'] = "Global" + plt.savefig(pltoutname, dpi=plot_params['dpi']) + + provenance_record = get_provenance_record(plot_params, 'plot2d', 'png') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(pltoutname + '.png', provenance_record) + + +def plot2d_bias(cfg, plot_params): + """Plot 2d maps of the bias relative to climatology. + + Parameters + ---------- + model_filenames:OrderedDict + OrderedDict with model names as keys and input files as values. + cmor_var: str + name of the variable + depth: int + we will plot the data on the model level + that is closest to the `depth`. + diagworkdir: str + path to the working directory + diagplotdir: str + path to the plot directory + levels: tuple + values to be used for vmin and vmax in the form of (vmin, vmax) + dpi: int + the dpi values to save the figure + observations: str + name of the observations + projection: instance of cartopy projection (ccrs) + bbox: list + bounding box. It will be the input for cartopy `set_extent`. + ncols: int + number of columns. + + Retuns + ------ + None + """ + # setupa a base figure + figure, axis = create_plot(plot_params['model_filenames'], + ncols=plot_params['ncols'], + projection=plot_params['projection']) + # get the filename of observations + ifilename_obs = genfilename(cfg['work_dir'], + plot_params['variable'], + plot_params['observations'], + data_type='timmean', + extension='.nc') + # get the metadata for observations (we just need a size) + metadata = load_meta( + datapath=plot_params['model_filenames'][plot_params['observations']], + fxpath=None) + lon2d = metadata['lon2d'] + + # Create an empty array to store the mean. + # One point larger along long to acount for cyclic point + model_mean = np.zeros((lon2d.shape[0], lon2d.shape[1] + 1)) + print("MODEL MEAN SHAPE {}".format(model_mean.shape)) + + # delete observations from the model list + model_filenames = plot_params['model_filenames'].copy() + del model_filenames[plot_params['observations']] + # loop over models + index = None + for index, mmodel in enumerate(model_filenames): + logger.info("Plot plot2d_bias %s for %s", plot_params['variable'], + mmodel) + # get the filename with the mean generated by the `timemean` + ifilename = genfilename(cfg['work_dir'], + plot_params['variable'], + mmodel, + data_type='timmean', + extension='.nc') + # do the interpolation to the observation grid + # the output is + lonc, latc, target_depth, data_obs, interpolated = interpolate_esmf( + ifilename_obs, ifilename, plot_params['depth'], + plot_params['variable']) + # get the label and convert data if needed + cb_label, data_obs = label_and_conversion(plot_params['variable'], + data_obs) + cb_label, interpolated = label_and_conversion(plot_params['variable'], + interpolated) + # add to the mean model + model_mean = model_mean + interpolated + # set the map extent + left, right, down, upper = plot_params['bbox'] + axis[index].set_extent([left, right, down, upper], + crs=ccrs.PlateCarree()) + # Only pcolormesh is working for now with cartopy, + # contourf is failing to plot curvilinear meshes, + # let along the unstructures ones. + image = axis[index].contourf( + lonc, + latc, + interpolated - data_obs, + levels=plot_params['levels'], + extend='both', + # vmin=contours[0], + # vmax=contours[-1], + transform=ccrs.PlateCarree(), + cmap=plot_params['cmap'], + ) + # fill continents + axis[index].add_feature( + cfeature.GSHHSFeature(levels=[1], + scale="low", + facecolor="lightgray")) + + axis[index].set_title("{}, {} m".format(mmodel, int(target_depth)), + size=18) + axis[index].set_rasterization_zorder(-1) + # calculate the model mean and plot it + if index: + index = index + else: + index = 0 + model_mean = model_mean / len(model_filenames) + axis[index + 1].set_extent([left, right, down, upper], + crs=ccrs.PlateCarree()) + image = axis[index + 1].contourf( + lonc, + latc, + model_mean - data_obs, + levels=plot_params['levels'], + extend='both', + # vmin=contours[0], + # vmax=contours[-1], + transform=ccrs.PlateCarree(), + cmap=cmo.balance, + ) + + axis[index + 1].add_feature( + cfeature.GSHHSFeature(levels=[1], scale="low", facecolor="lightgray")) + + axis[index + 1].set_title("Model mean bias, {} m".format( + int(target_depth)), + size=18) + axis[index + 1].set_rasterization_zorder(-1) + # delete the axis that are not needed + for delind in range(index + 2, len(axis)): + figure.delaxes(axis[delind]) + # set common colorbar + colorbar = figure.colorbar(image, + orientation='horizontal', + ax=axis, + pad=0.01, + shrink=0.9) + colorbar.set_label(cb_label, rotation='horizontal', size=18) + colorbar.ax.tick_params(labelsize=18) + # save the picture + pltoutname = genfilename(cfg['plot_dir'], + plot_params['variable'], + "MULTIMODEL", + data_type='plot2d_bias_{}_level'.format( + str(int(target_depth)))) + + plot_params['basedir'] = cfg['plot_dir'] + plot_params['ori_file'] = ifilename + plot_params['areacello'] = None + plot_params['mmodel'] = None + plot_params['region'] = "Global" + plt.savefig(pltoutname, dpi=plot_params['dpi']) + + provenance_record = get_provenance_record(plot_params, 'plot2d_bias', + 'png') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(pltoutname + '.png', provenance_record) + + +def plot_aw_core_stat(aw_core_parameters, diagplotdir): + """Generate plotsa t AW core depth. + + Depth of the AW core and temperature of the AW core. + Use pandas plot functionality. + + Parameters + ---------- + aw_core_parameters: dict + Output of the `aw_core` function. + It's a dictionary where for each model there is maximum temperature, + depth level in the model, index of the depth level in the model. + diagplotdir: str + plot folder + + Returns + ------- + None + """ + logger.info("Plot AW core statistics") + # Convert dictionary to pandas Dataframe + dataframe = pd.DataFrame(aw_core_parameters).transpose() + + plt.figure() + dataframe.maxvalue.plot(kind='barh') + plt.xlabel(r'$^{\circ}$C') + pltoutname = genfilename(diagplotdir, + variable='aw-core-temp', + region='EB', + data_type='awiCoreTemp') + + plt.tight_layout() + plt.savefig(pltoutname, dpi=100) + + plt.figure() + dataframe.maxvalue_depth.plot(kind='barh') + plt.xlabel('m') + pltoutname = genfilename(diagplotdir, + variable='aw-core-depth', + region='EB', + data_type='awiCoreTemp') + + plt.tight_layout() + plt.savefig(pltoutname, dpi=100) + + +def transect_map(cfg, + region, + projection=ccrs.NorthPolarStereo(), + bbox=(-180, 180, 60, 90), + mult=2): + """Plot the map with points of the transect overlayed. + + Parameters + ---------- + region: str + name of the region predefined in `transect_points` function. + diagplotdir: str + path to the plot directory + projection: instance of cartopy ccrs + cartopy progection + bbox: list + four values - [left, right, bottom, top] + mult: int + miltiplicator for the number of points. + E.g. mult=2 increase the number of points 2 times. + + Returns + ------- + None + + """ + logger.info("Create transect map for region %s", region) + lon_s4new, lat_s4new = transect_points(region, mult=mult) + dist = point_distance(lon_s4new, lat_s4new) + figure, axis = plt.subplots(1, + 1, + subplot_kw=dict(projection=projection), + constrained_layout=True) + + axis.set_extent(bbox, crs=ccrs.PlateCarree()) + image = axis.scatter(lon_s4new, + lat_s4new, + s=10, + c=dist, + transform=ccrs.PlateCarree(), + cmap=cm.Spectral, + edgecolors='none') + axis.coastlines(resolution="50m") + + colorbar = figure.colorbar(image, ax=axis) + colorbar.set_label('Along-track distance, km', + rotation='vertical', + size=15) + pltoutname = genfilename(cfg['work_dir'], + 'allvars', + region=region, + data_type='transect_map') + + plt.savefig(pltoutname, dpi=100) + + plot_params = {} + plot_params['basedir'] = cfg['plot_dir'] + plot_params['ori_file'] = None + plot_params['areacello'] = None + plot_params['mmodel'] = None + plot_params['region'] = region + + provenance_record = get_provenance_record(plot_params, 'transect_map', + 'png') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(pltoutname + '.png', provenance_record) + + +def transect_plot(cfg, plot_params): + """Plot transects. + + Parameters + ---------- + model_filenames:OrderedDict + OrderedDict with model names as keys and input files as values. + cmor_var: str + name of the variable + region: str + name of the region predefined in `transect_points` function. + levels: tuple + contours - (minimum, maximum, number of levels) + ncols: int + number of columns in the plot + cmap: matplotlib colormap instance + + Returns + ------- + None + + """ + figure, axis = create_plot(plot_params['model_filenames'], + ncols=plot_params['ncols']) + + # get transect positions and calculate distances between points + lon_s4new, lat_s4new = transect_points(plot_params['region'], mult=2) + dist = point_distance(lon_s4new, lat_s4new) + + # loop over models + index = None + for index, mmodel in enumerate(plot_params['model_filenames']): + logger.info("Plot %s data for %s, region %s", plot_params['variable'], + mmodel, plot_params['region']) + # construct file names and get the data + ifilename = genfilename(cfg['work_dir'], plot_params['variable'], + mmodel, plot_params['region'], 'transect', + '.npy') + ifilename_depth = genfilename(cfg['work_dir'], 'depth', mmodel, + plot_params['region'], + 'transect_' + plot_params['variable'], + '.npy') + ifilename_dist = genfilename(cfg['work_dir'], 'distance', mmodel, + plot_params['region'], + 'transect_' + plot_params['variable'], + '.npy') + + data = np.load(ifilename, allow_pickle=True) + data = np.ma.masked_equal(data.T, 0) + lev = np.load(ifilename_depth, allow_pickle=True) + dist = np.load(ifilename_dist, allow_pickle=True) + # get labeles and convert the data + cb_label, data = label_and_conversion(plot_params['variable'], data) + # index of the maximum depth + lev_limit = lev[lev <= cfg['transects_depth']].shape[0] + 1 + + image = axis[index].contourf(dist, + lev[:lev_limit], + data[:lev_limit, :], + levels=plot_params['levels'], + extend='both', + cmap=plot_params['cmap']) + # plot settings + axis[index].set_ylabel('Depth, m', size=15, rotation='vertical') + axis[index].set_xlabel('Along-track distance, km', + size=15, + rotation='horizontal') + axis[index].set_title(mmodel, size=20) + axis[index].set_ylim(cfg['transects_depth'], 0) + # ax[ind].invert_yaxis() + axis[index].tick_params(axis='both', labelsize=15) + # color bar settings + colorbar = figure.colorbar(image, ax=axis[index], pad=0.01) + colorbar.set_label(cb_label, rotation='vertical', size=15) + colorbar.ax.tick_params(labelsize=15) + # fig.set_constrained_layout_pads(w_pad=2./30., h_pad=2./30., + # hspace=10, wspace=10) + for delind in range(index + 1, len(axis)): + figure.delaxes(axis[delind]) + + pltoutname = genfilename(cfg['plot_dir'], + plot_params['variable'], + region=plot_params['region'], + data_type='transect') + + plt.savefig(pltoutname, dpi=100) + plot_params['basedir'] = cfg['plot_dir'] + plot_params['ori_file'] = ifilename + plot_params['areacello'] = None + plot_params['mmodel'] = None + + provenance_record = get_provenance_record(plot_params, 'transect', 'png') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(pltoutname + '.png', provenance_record) diff --git a/esmvaltool/diag_scripts/arctic_ocean/regions.py b/esmvaltool/diag_scripts/arctic_ocean/regions.py new file mode 100644 index 0000000000..f5c2548fd4 --- /dev/null +++ b/esmvaltool/diag_scripts/arctic_ocean/regions.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- +"""Part of the ESMValTool Arctic Ocean diagnostics. + +This module contains functions with definitions of regions. +""" +import logging +import os +import numpy as np +from scipy.interpolate import interp1d + +logger = logging.getLogger(os.path.basename(__file__)) + + +def hofm_regions(region, lon2d, lat2d): + """Define regions for data selection. + + Parameters + ---------- + region: str + the name of the region + lon2d: 2d numpy array + lat2d: 2d numpy array + + Returns + ------- + indexesi: 1d numpy array + i indexes of the selected points + indexesj: 1d numpy array + j indexes of the selected points + """ + if region == 'EB': + # Eurasian Basin of the Arctic Ocean + indi, indj = np.where((lon2d > 300) & (lat2d > 80)) + indi2, indj2 = np.where((lon2d < 100) & (lat2d > 80)) + indi3, indj3 = np.where((lon2d > 100) & (lon2d < 140) & (lat2d > 66)) + + indexesi = np.hstack((indi, indi2, indi3)) + indexesj = np.hstack((indj, indj2, indj3)) + elif region == 'AB': + # Amerasian basin of the Arctic Ocean + indi, indj = np.where((lon2d >= 260) & (lon2d <= 300) & (lat2d >= 80)) + indi2, indj2 = np.where((lon2d >= 140) & (lon2d < 260) & (lat2d > 66)) + + indexesi = np.hstack((indi, indi2)) + indexesj = np.hstack((indj, indj2)) + elif region == 'Barents_sea': + + indi, indj = np.where((lon2d >= 20) & (lon2d <= 55) & (lat2d >= 70) + & (lat2d <= 80)) + + indexesi = indi + indexesj = indj + elif region == 'North_sea': + # Amerasian basin of the Arctic Ocean + indi, indj = np.where((lon2d >= 355) & (lon2d <= 360) & (lat2d >= 50) + & (lat2d <= 60)) + indi2, indj2 = np.where((lon2d >= 0) & (lon2d <= 10) & (lat2d >= 50) + & (lat2d <= 60)) + + indexesi = np.hstack((indi, indi2)) + indexesj = np.hstack((indj, indj2)) + else: + print('Region {} is not recognized'.format(region)) + return indexesi, indexesj + + +def transect_points(transect, mult=2): + """Return a collection of points for transect. + + Parameters + ---------- + transect: str + Name of the predefined transect + mult: int + multiplicator that allow to increase + the number of points by `mult` times + + Returns + ------- + lon_s4new: 1d numpy array + longitude points of the transect + lat_s4new: 1d numpy array + latitude points of the transect + """ + if transect == 'AWpath': + lon_s4 = np.array([ + 17.6, 16.5, 16.05, 15.6, 15.1, 14.1, 13.0, 12.0, 10.0, 8.0, 4.0, + 4.0, 10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0, + 110.0, 120.0, 130.0, 140.0 + ]) + lat_s4 = np.array([ + 69.0, 70.6, 71.3, 72.02, 72.8, 73.8, 75.0, 76.0, 77.0, 78.0, 79.0, + 80.0, 81.0, 81.8, 81.8, 82.6, 83.0, 83.2, 83.1, 82.8, 82.5, 81.8, + 79.7, 78.2, 78.7, 79.7 + ]) + elif transect == 'Fram': + lon_s4 = np.array([ + -22.3732, -21.4796, -19.6479, -18.1074, -16.7828, -15.504, + -14.2042, -12.9771, -11.6642, -10.1892, -8.7414, -7.719, -6.3646, + -4.4803, -3.4232, -2.4435, -1.615, -0.6752, 0.343, 1.6947, 2.7157, + 3.7374, 4.6099, 5.5097, 6.3754, 7.2394, 7.9238, 8.7029, 9.7338, + 10.4462, 11.0559, 12.0102, 13.3313 + ]) + lat_s4 = np.array([ + 78.9373, 78.9276, 78.9183, 78.9356, 78.9346, 78.9334, 78.9425, + 78.9434, 78.9274, 78.9392, 78.9287, 78.9262, 78.9392, 78.95, + 78.9405, 78.9347, 78.9334, 78.922, 78.9287, 78.9131, 78.919, + 78.9215, 78.9242, 78.909, 78.8995, 78.8874, 78.8865, 78.9026, + 78.8992, 78.8841, 78.8793, 78.8715, 78.9012 + ]) + + else: + print('Transect {} is not recognized'.format(transect)) + + point_number = np.linspace(1, + lon_s4.shape[0], + num=lon_s4.shape[0], + endpoint=True) + f_lons = interp1d(point_number, lon_s4) + g_lats = interp1d(point_number, lat_s4) + xnew = np.linspace(1, + lon_s4.shape[0], + num=mult * lon_s4.shape[0], + endpoint=True) + + lon_s4new = f_lons(xnew) + lat_s4new = g_lats(xnew) + return lon_s4new, lat_s4new diff --git a/esmvaltool/diag_scripts/arctic_ocean/utils.py b/esmvaltool/diag_scripts/arctic_ocean/utils.py new file mode 100644 index 0000000000..372c561880 --- /dev/null +++ b/esmvaltool/diag_scripts/arctic_ocean/utils.py @@ -0,0 +1,337 @@ +# -*- coding: utf-8 -*- +"""Part of the ESMValTool Arctic Ocean diagnostics. + +This module contains utility functions. +""" +import logging +import os +import shutil + +import matplotlib as mpl +import numpy as np +import pyproj +import seawater as sw +from cdo import Cdo +from cmocean import cm as cmo +from matplotlib import pylab as plt +from matplotlib.colors import LinearSegmentedColormap + +from esmvaltool.diag_scripts.shared import ProvenanceLogger + +logger = logging.getLogger(os.path.basename(__file__)) + + +class DiagnosticError(Exception): + """Error in diagnostic.""" + + +def genfilename(basedir, + variable=None, + mmodel=None, + region=None, + data_type=None, + extension=None, + basis='arctic_ocean', + **kwargs): + """Generate file name for the output data. + + Parameters + ---------- + basedir: str + base directory + variable: str + name of the variable + mmodel: str + name of the model + region: str + name of the region + data_type: str + type of the data, for example `timmean` + extension: str + file extension, for example `nc` + basis: str + basis name that can be used for series of + diagnostics + + Returns + ------- + ifilename: str + path to the file + """ + nname = [basis, region, mmodel, variable, data_type] + nname_nonans = [] + for i in nname: + if i: + nname_nonans.append(i) + basename = "_".join(nname_nonans) + if extension: + basename = basename + extension + ifilename = os.path.join(basedir, basename) + return ifilename + + +def timmean(cfg, model_filenames, mmodel, cmor_var, observations='PHC'): + """Create time mean of input data with cdo. + + Parameters + ---------- + cfg: dict + configuration dictionary ESMValTool format. + model_filenames: OrderedDict + OrderedDict with model names as keys and input files as values. + mmodel: str + model name that will be processed + cmor_var: str + name of the CMOR variable + observations: str + name of observational/climatology data set. + + Returns + ------- + None + """ + logger.info("Calculate timmean %s for %s", cmor_var, mmodel) + cdo = Cdo() + ofilename = genfilename(cfg['work_dir'], + cmor_var, + mmodel, + data_type='timmean', + extension='.nc') + if mmodel != observations: + cdo.timmean(input=model_filenames[mmodel], output=ofilename) + else: + shutil.copy2(model_filenames[mmodel], ofilename) + + attributes = {} + attributes['region'] = 'global' + attributes['mmodel'] = mmodel + attributes['ori_file'] = model_filenames[mmodel] + attributes['areacello'] = None + + provenance_record = get_provenance_record(attributes, 'timmean', 'nc') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(ofilename, provenance_record) + + +def get_clim_model_filenames(config, variable): + """Extract model filenames from the configuration.""" + model_filenames = {} + for key, value in config['input_data'].items(): + if value['short_name'] == variable: + model_filenames[value['dataset']] = key + return model_filenames + + +def get_fx_filenames(config, fx_var): + """Extract fx file names.""" + areacello_fxdataset = {} + for _, value in config['input_data'].items(): + if value['short_name'] == fx_var: + print(value['filename']) + areacello_fxdataset[value['dataset']] = value['filename'] + return areacello_fxdataset + + +def find_observations_name(config): + """Find "model name" of the observations data set. + + Assumes that there is only one observational data set. + """ + obsname = [] + for value in config['input_data'].values(): + if value['project'] == "OBS6": + obsname = value['dataset'] + print(obsname) + if not obsname: + logger.info('Can\'t find observational (climatology) data') + + return obsname + + +def shiftedcolormap(cmap, start=0, midpoint=0.5, stop=1.0, name='shiftedcmap'): + """Offset the "center" of a colormap. + + Function to offset the "center" of a colormap. Useful for + data with a negative min and positive max and you want the + middle of the colormap's dynamic range to be at zero. + Source: https://stackoverflow.com/questions/7404116/ + defining-the-midpoint-of-a-colormap-in-matplotlib + + Input + ----- + cmap : The matplotlib colormap to be altered + start : Offset from lowest point in the colormap's range. + Defaults to 0.0 (no lower ofset). Should be between + 0.0 and `midpoint`. + midpoint : The new center of the colormap. Defaults to + 0.5 (no shift). Should be between 0.0 and 1.0. In + general, this should be 1 - vmax/(vmax + abs(vmin)) + For example if your data range from -15.0 to +5.0 and + you want the center of the colormap at 0.0, `midpoint` + should be set to 1 - 5/(5 + 15)) or 0.75 + stop : Offset from highets point in the colormap's range. + Defaults to 1.0 (no upper ofset). Should be between + `midpoint` and 1.0. + """ + cdict = {'red': [], 'green': [], 'blue': [], 'alpha': []} + + # regular index to compute the colors + reg_index = np.linspace(start, stop, 257) + + # shifted index to match the data + shift_index = np.hstack([ + np.linspace(0.0, midpoint, 128, endpoint=False), + np.linspace(midpoint, 1.0, 129, endpoint=True) + ]) + + for regi, shii in zip(reg_index, shift_index): + red, gren, blue, alpha = cmap(regi) + + cdict['red'].append((shii, red, red)) + cdict['green'].append((shii, gren, gren)) + cdict['blue'].append((shii, blue, blue)) + cdict['alpha'].append((shii, alpha, alpha)) + + try: + newcmap = mpl.colors.LinearSegmentedColormap(name, cdict) + mpl.colormaps.register(cmap=newcmap) + except ValueError: + logger.info("A colormap named %s is already registered.", name) + + return newcmap + + +def dens_back(smin, smax, tmin, tmax): + """Calculate density for TS diagram.""" + xdim = round((smax - smin) / 0.1 + 1, 0) + ydim = round((tmax - tmin) + 1, 0) + + ti_size = np.linspace(tmin, tmax, int(ydim * 10)) + si_size = np.linspace(smin, smax, int(xdim * 10)) + + si2d, ti2d = np.meshgrid(si_size, ti_size) + + dens = sw.dens0(si2d, ti2d) - 1000 + return si2d, ti2d, dens + + +def get_cmap(cmap_name): + """Return matplotlib colormap object. + + From matplotlib.cm or cmocean. + Additional custom colormap for salinity is provided: + - "custom_salinity1" + """ + try: + mpl.colormaps.register(cmap=LinearSegmentedColormap( + 'cubehelix3', mpl._cm.cubehelix(gamma=1.0, s=2.0, r=1.0, h=3)), + name="new_cubehelix3", force=False) + except ValueError: + logger.info('Colormap new_cubehelix3 is already registered.') + + if cmap_name in cmo.cmapnames: + colormap = cmo.cmap_d[cmap_name] + elif cmap_name in plt.colormaps(): + colormap = plt.get_cmap(cmap_name) + elif cmap_name == "custom_salinity1": + colormap = shiftedcolormap(mpl.colormaps.get_cmap("new_cubehelix3"), + start=0, + midpoint=0.89, + stop=0.9, + name='shiftedcmap') + else: + raise ValueError('Get unrecognised name for the colormap `{}`.\ + Colormaps should be from standard matplotlib \ + set or from cmocean package.'.format(cmap_name)) + return colormap + + +def point_distance(lon_s4new, lat_s4new): + """Calculate distance between points of the section. + + Parameters + ---------- + lon_s4new: numpy array + 1d array of longitudes + lat_s4new: numpy array + 1d array of latitudes + + Returns + ------- + dist: numpy array + 1d array of distances between points in km. + """ + g_proj = pyproj.Geod(ellps='WGS84') + (_, _, dist) = g_proj.inv(lon_s4new[0:-1], lat_s4new[0:-1], lon_s4new[1:], + lat_s4new[1:]) + dist = dist.cumsum() / 1000 + dist = np.insert(dist, 0, 0) + return dist + + +def get_series_lenght(datafile, cmor_var): + """Get the length of the series. + + Fix for climatology, ESMValTool reduces the dimensions if one of the + dimensions is empty. + """ + if datafile.variables[cmor_var].ndim < 4: + series_lenght = 1 + else: + series_lenght = datafile.variables[cmor_var].shape[0] + return series_lenght + + +def get_provenance_record(attributes, data_type, file_type): + """Create a provenance record describing the diagnostic data and plot.""" + if data_type == 'hofm' and file_type == 'npy': + caption = ("Data for Hovmoeller diagram. " + "Region: {region}. Model: {mmodel} ".format(**attributes)) + elif data_type == 'hofm' and file_type == 'png': + caption = ("Hovmoeller diagram. " + "Region: {region}. Model: {mmodel} ".format(**attributes)) + elif data_type == 'transect' and file_type == 'npy': + caption = ("Data for Transect. " + "Region: {region}. Model: {mmodel} ".format(**attributes)) + elif data_type == 'tsplot' and file_type == 'npy': + caption = ("Data for TS diagram. " + "Region: {region}. Model: {mmodel} ".format(**attributes)) + elif data_type == 'timmean' and file_type == 'nc': + caption = ("Global time mean. " + "Region: {region}. Model: {mmodel} ".format(**attributes)) + elif data_type == 'profile' and file_type == 'png': + caption = ("Mean vertical profile. " + "Region: {region}. Model: {mmodel} ".format(**attributes)) + elif data_type == 'plot2d' and file_type == 'png': + caption = ("Map of spatial distribution. " + "Region: {region}. Model: {mmodel} ".format(**attributes)) + elif data_type == 'plot2d_bias' and file_type == 'png': + caption = ("Map of spatial distribution of bias. " + "Region: {region}. Model: {mmodel} ".format(**attributes)) + elif data_type == 'transect_map' and file_type == 'png': + caption = ("Map of the transect points. " + "Region: {region}. Model: {mmodel} ".format(**attributes)) + elif data_type == 'transect' and file_type == 'png': + caption = ("Vertical transect. " + "Region: {region}. Model: {mmodel} ".format(**attributes)) + elif data_type == 'tsplot' and file_type == 'png': + caption = ("TS diagram. " + "Region: {region}. Model: {mmodel} ".format(**attributes)) + else: + caption = "None" + + if isinstance(attributes['ori_file'], str): + ancestor_files = [attributes['ori_file'], attributes['areacello']] + else: + ancestor_files = ["No_ancestor_file"] + + record = { + 'caption': caption, + 'region': attributes['region'], + 'authors': ['koldunov_nikolay'], + 'references': [ + 'contact_authors', + ], + 'ancestors': ancestor_files + } + return record diff --git a/esmvaltool/diag_scripts/austral_jet/asr.ncl b/esmvaltool/diag_scripts/austral_jet/asr.ncl new file mode 100644 index 0000000000..5d855ad393 --- /dev/null +++ b/esmvaltool/diag_scripts/austral_jet/asr.ncl @@ -0,0 +1,325 @@ +; ############################################################################# +; austral_jet/asr.ncl +; ############################################################################# +; +; Description: +; Calculates the absorbed shortwave correlation gradient and saves output +; for MDER analysis. +; +; Required diag_script_info attributes: +; season: specify season of year. +; +; Optional diag_script_info attributes: +; average_ens: average over ensemble members of the same model. +; wdiag: if the diagnostics will be used for the MDER a diagnostic name +; name is needed. +; wdiag_title: this name will apper in the figure title. +; +; Caveats: +; "warning: in unique_labels_min (diag_scripts/shared/plot/style.ncl), Add +; more attributes to prio to make labels unique! Continuing with non-unique +; labels" is normal when using "average_ens = true". +; +; Modification history +; 20191121-schlund_manuel: added new provenance tracking. +; 20180725-schlund_manuel: ported to v2.0 +; 201303??-wenzel_sabrina: written. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/set_operators.ncl" + +load "$diag_scripts/shared/plot/style.ncl" + +load "$diag_scripts/shared/mder.ncl" + + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Variable + VAR0 = variable_info[0] + var0 = VAR0@short_name + DIM_VAR = ListCount(variable_info) + if (DIM_VAR .gt. 1) then + error_msg("w", DIAG_SCRIPT, "", "this diagnostic supports only one " + \ + "variable, processing " + VAR0@short_name) + end if + + ; Input data + INFO0 = select_metadata_by_name(input_file_info, var0) + DATASETS = metadata_att_as_array(INFO0, "dataset") + + ; Ensemble averaging (if desired) + avgens = False + if (isatt(diag_script_info, "average_ens")) then + avgens = diag_script_info@average_ens + end if + if (avgens) then + DATASETS := get_unique_values(DATASETS) + else + DATASETS := unique_labels_min(INFO0, (/"dataset", "ensemble"/)) + end if + DIM_DAT = dimsizes(DATASETS) + + ; Directories + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + ; Provenance + AUTHORS = (/"wenzel_sabrina", "schlund_manuel"/) + STATISTICS = (/"mean", "clim", "stddev"/) + DOMAIN = "sh" + PLOT_TYPE = "errorbar" + REFERENCES = (/"wenzel16jclim"/) + ANCESTORS = metadata_att_as_array(INFO0, "filename") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + +end + +begin + + ; --------------------------------------------------------------------------- + ; Read recipe and config data + ; --------------------------------------------------------------------------- + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Required attributes + req_atts = (/"season"/) + exit_if_missing_atts(diag_script_info, req_atts) + season = diag_script_info@season + + ; Output arrays + val_grad = new((/2, DIM_DAT/), "float") + val_grad!0 = "case" + val_grad&case = (/"val", "stddev"/) + val_grad!1 = "dataset" + val_grad&dataset = DATASETS + + ; Get lat range from preprocessed data + f = addfile(INFO0[0]@filename, "r") + lat = f->lat + if (dimsizes(lat) .gt. 1) then + lat_range = (/min(lat), max(lat)/) + else + lat_range = f->lat_bnds + end if + log_info("Retrieved latitude range " + lat_range(0) + " to " + \ + lat_range(1) + " from preprocessed data") + delete(f) + + ; --------------------------------------------------------------------------- + ; Read and preprocess data + ; --------------------------------------------------------------------------- + + ; Iterate over datasets + do idat = 0, DIM_DAT-1 + log_info(DATASETS(idat)) + + ; Average over ensemble members if desired + if (avgens) then + atts = True + atts@dataset = DATASETS(idat) + info := select_metadata_by_atts(INFO0, atts) + else + info := NewList("lifo") + ListAppend(info, INFO0[idat]) + end if + + ; Get data + A0 = get_average(info) + info := info[0] + + ; ------------------------------------------------------------------------- + ; Process temporal data + ; ------------------------------------------------------------------------- + + dummy0 = time_operations(A0, info@start_year, info@end_year, \ + "average", season, True) + + ; ------------------------------------------------------------------------- + ; Process spatial data + ; ------------------------------------------------------------------------- + + grad = new(dimsizes(dummy0&month), float) + do tt = 0, dimsizes(dummy0&month) - 1 + dummy2 = area_operations(dummy0(tt, :, :), -20.0, -50.0, 0.0, 360.0, \ + "average", True) + dummy3 = area_operations(dummy0(tt, :, :), -50.0, -90.0, 0.0, 360.0, \ + "average", True) + grad(tt) = dummy2 - dummy3 + delete([/dummy2, dummy3/]) + end do + + val_grad(0, idat) = tofloat(avg(grad)) + val_grad(1, idat) = tofloat(stddev(grad)) + + delete([/dummy0, grad, A0/]) + end do + + ; Consider ensemble averaging + if (avgens) then + INFO0 := get_unique_items(INFO0, "dataset") + end if + + ; --------------------------------------------------------------------------- + ; Plots + ; --------------------------------------------------------------------------- + + ; Basic plotting settings + colors = project_style(INFO0, diag_script_info, "colors") + thicks = project_style(INFO0, diag_script_info, "thicks") + lg_labels = unique_labels_min(INFO0, (/"dataset", "ensemble"/)) + plot_file = config_user_info@plot_dir + var0 + "_" + season + + ; Plotting instances + wks = gsn_open_wks(file_type, plot_file) + dum0 = new(DIM_DAT, graphic) + error_bar0 = new(DIM_DAT, graphic) + ti = ispan(1, dimsizes(val_grad(0, :)), 1) + + ; Get plot descriptions + if (lat_range(0).le.-82.5 .and. lat_range(1).ge.82.5) then + lat_string = "NG" + else + if (lat_range(0) .ge. 0.0) then + lat_string = "(" + round(lat_range(0), 3) + "N" + else if (lat_range(0) .lt. 0.0) then + lat_string = "(" + -1 * round(lat_range(0), 3) + "S" + end if + end if + lat_string = lat_string + "-" + if (lat_range(1) .ge. 0.0) then + lat_string = lat_string + round(lat_range(1), 3) + "N)" + else if (lat_range(1) .lt. 0.0) then + lat_string = lat_string + -1 * round(lat_range(1), 3) + "S)" + end if + end if + end if + if (season .eq. "yearly") then + season = "ANN" + end if + + ; Plot appearance + res = True + res@tiYAxisString = "W m-2" ; add units title + res@tiMainString = season + " ASR " + lat_string + " gradient (" + \ + diag_script_info@wdiag_title(0) + ")" + res@gsnFrame = False ; don't draw yet + res@gsnDraw = False ; don't advance frame + res@gsnMaximize = True + res@xyMarker = thicks(0) ; choose type of marker + res@xyMonoMarkerColor = False + res@xyMarkerColors = colors(0) ; Marker color + res@xyMarkerSizeF = 0.01 ; Marker size (default 0.01) + res@tiMainFontHeightF = 0.022 + res@trYMinF = min(val_grad(0, :)) - 0.5 * abs(min(val_grad(0, :))) + res@trYMaxF = max(val_grad(0, :)) + 0.5 * abs(max(val_grad(0, :))) + res@trXMinF = 0 + res@trXMaxF = DIM_DAT + 1 + res@vpWidthF = 0.9 ; Change the aspect ratio, but + res@vpHeightF = 0.4 ; make plot as large as possible. + res@gsnYRefLine = 0.0 + res@tmXBLabelAngleF = 90.0 + res@tmXBMode = "Explicit" ; explicitly set Y-axis labels + res@tmXBLabelJust = "CenterRight" + res@tmXBValues = ti + res@tmXBLabels = lg_labels + res@tmXBLabelFontHeightF = 0.015 + + ; Plot data + plot = gsn_csm_xy(wks, \ + (/ti(0), ti(0)/), \ + (/val_grad(0, 0), val_grad(0, 0)/), \ + res) + mkres = True + mkres@gsMarkerSizeF = 0.01 + mkres@gsMarkerThicknessF = 1.0 + do t = 0, DIM_DAT - 1 + mkres@gsMarkerColor = colors(t) + mkres@gsMarkerIndex = 16 + dum0(t) = gsn_add_polymarker(wks, plot(0), ti(t), val_grad(0, t), mkres) + end do + + ; Plot error bars + polyres = True + do t = 0, DIM_DAT - 1 + polyres@gsLineColor = colors(t) + error_bar0(t) = gsn_add_polyline(wks, plot, (/ti(t), ti(t)/), \ + (/val_grad(0, t) - val_grad(1, t), \ + val_grad(0, t) + val_grad(1, t)/), \ + polyres) + end do + draw(plot) + frame(wks) + + ; Write ncdf file + new_path = work_dir + "asr_gradient_" + season + ".nc" + val_grad@var = var0 + val_grad@diag_script = DIAG_SCRIPT + val_grad@ncdf = new_path + outfile_grad = ncdf_write(val_grad, new_path) + + ; Provenance tracking + plot_path = plot_file + "." + file_type + caption = season + " ASR " + lat_string + " gradient (" + \ + diag_script_info@wdiag_title(0) + ")." + log_provenance(outfile_grad, plot_path, caption, STATISTICS, \ + DOMAIN, PLOT_TYPE, AUTHORS, REFERENCES, ANCESTORS) + + ; --------------------------------------------------------------------------- + ; Write NETCDF file for MDER + ; --------------------------------------------------------------------------- + + output = mder_output_desired() + if (output) then + work_dir = output@work_dir + opt = diag_script_info + log_info(opt@wdiag) + plot_path = "n/a" + plot_type = "" + + ; Iterate over all datasets + do idat = 0, DIM_DAT - 1 + log_debug("Processing " + DATASETS(idat)) + info = INFO0[idat] + new_path = work_dir + diag_file_name(info@filename, opt) + + ; Write files + val = val_grad(case|:, dataset|idat) + val@var = var0 + val@units = "W m-2" + val@diag_script = (/DIAG_SCRIPT/) + copy_VarAtts(diag_script_info, val) + val@dataset = DATASETS(idat) + val@start_year = info@start_year + val@end_year = info@end_year + val@ncdf = new_path + ncdf_outfile = ncdf_write(val, new_path) + delete([/val, new_path/]) + + ; Provenance tracking + caption = season + " ASR " + lat_string + " gradient (" + \ + diag_script_info@wdiag_title(0) + ") of " + DATASETS(idat) + "." + log_provenance(ncdf_outfile, plot_path, caption, STATISTICS, \ + DOMAIN, plot_type, AUTHORS, REFERENCES, \ + info@filename) + end do + end if + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/austral_jet/main.ncl b/esmvaltool/diag_scripts/austral_jet/main.ncl new file mode 100644 index 0000000000..1b7604d4f5 --- /dev/null +++ b/esmvaltool/diag_scripts/austral_jet/main.ncl @@ -0,0 +1,795 @@ +; ############################################################################# +; austral_jet/main.ncl +; ############################################################################# +; +; Description: +; This script is based on the diagnostic eyring13jclim.ncl but +; calculates trends and annual mean for various present day diagnostiocs +; such as near-global (82.5°S-82.5°N) ozone at 50 hPa and temperature at +; 100 hPa, September-October-November-December (SOND) ozone at 50 hPa +; over Antarctica (60°S-90°S), October-November-December-January (ONDJ) +; temperature at 100 hPa over Antarctica (60°S-90°S), DJF SH jet +; position at 850 hPa, and DJF upper tropospheric tropical (30°S-30°N) +; temperatures at 250 hPa. The trends are calculated over 1979-2005 for +; the past and over 2006-2050 for the future. +; The output will optional be saved in NetCDF-files for further +; processing of the MDER diagnostics. +; +; Required diag_script_info attributes: +; styleset: style set ("DEFAULT", "CMIP5"). +; season: seasons (n-element array of strings, "ANN", "JAN", "FEB", "MAR", +; "DJF", "SON", etc.). +; +; Optional diag_script_info attributes: +; average_ens: average over ensemble members of the same model (default: +; False). +; rsondes: a set of additional observations can be added to the plot +; but will not be saved for the MDER diagnostics. +; rsondes_file: paths to additional observations, required when rsondes is +; set +; rsondes_yr_min: minimum year for plot, required when rsondes is set. +; rsondes_yr_max: maximum year for plot, required when rsondes is set. +; wdiag: if the diagnostics will be used for the MDER a diagnostic name +; name is needed. +; wdiag_title: this name will apper in the figure title. +; derive_var: derive variable (one of "tpp", "mmstf") +; derive_latrange: latitude range for derived variable, required when +; derive_var is set. +; derive_lev: level for derived variable, required when derive_var is set. +; +; Caveats: +; "warning: in unique_labels_min (diag_scripts/shared/plot/style.ncl), Add +; more attributes to prio to make labels unique! Continuing with non-unique +; labels" is normal when using "average_ens = true". +; +; Modification history: +; 20191121-schlund_manuel: added new provenance tracking. +; 20180716-schlund_manuel: ported to v2.0. +; 20180329-wenzel_sabrina: modified original script (eyring13jclim.ncl) to +; austral-jet_diags.ncl. +; 20170816-bock_lisa: added tags for reporting. +; 20151209-lauer_axel: code clean-up, added call to write_references, +; several bugfixes for processing zonal wind (ua) +; and handling missing variable attribute "units". +; 201510??-cionni_irene: written. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/set_operators.ncl" + +load "$diag_scripts/shared/plot/style.ncl" + +load "$diag_scripts/shared/mder.ncl" + + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Variable + VAR0 = variable_info[0] + var0 = VAR0@short_name + DIM_VAR = ListCount(variable_info) + if (DIM_VAR .gt. 2) then + error_msg("w", DIAG_SCRIPT, "", "this diagnostic supports at most two " + \ + "variables, got " + DIM_VAR) + end if + + ; Input data + INFO0 = select_metadata_by_name(input_file_info, var0) + DATASETS = metadata_att_as_array(INFO0, "dataset") + + ; Ensemble averaging (if desired) + avgens = False + if (isatt(diag_script_info, "average_ens")) then + avgens = diag_script_info@average_ens + end if + if (avgens) then + DATASETS := get_unique_values(DATASETS) + else + DATASETS := unique_labels_min(INFO0, (/"dataset", "ensemble"/)) + end if + DIM_DAT = dimsizes(DATASETS) + + ; Directories + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + ; Provenance + AUTHORS = (/"cionni_irene", "lauer_axel", "bock_lisa", "wenzel_sabrina", \ + "schlund_manuel"/) + DOMAIN = "sh" + PLOT_TYPE = "errorbar" + REFERENCES = (/"wenzel16jclim"/) + ANCESTORS = metadata_att_as_array(INFO0, "filename") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + +end + +begin + + ; --------------------------------------------------------------------------- + ; Read recipe and config data + ; --------------------------------------------------------------------------- + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Required attributes + req_atts = (/"season"/) + exit_if_missing_atts(diag_script_info, req_atts) + season = diag_script_info@season + + ; Additional observations + if (isatt(diag_script_info, "rsondes")) then + if(isatt(diag_script_info, "rsondes_yr_min")) then + year_min = diag_script_info@rsondes_yr_min + else + error_msg("f", DIAG_SCRIPT, "", "plot requires attribute " + \ + "'rsondes_yr_min' in recipe if 'rsondes' is given") + end if + if(isatt(diag_script_info, "rsondes_yr_max")) then + year_max = diag_script_info@rsondes_yr_max + else + error_msg("f", DIAG_SCRIPT, "", "plot requires attribute " + \ + "'rsondes_yr_max' in recipe if 'rsondes' is given") + end if + end if + + ; Attributes for MDER analysis + if (.not. isatt(diag_script_info, "wdiag")) then + diag_script_info@wdiag = (/"", ""/) + end if + if (.not. isatt(diag_script_info, "wdiag_title")) then + diag_script_info@wdiag_title = (/"", ""/) + end if + + ; Output arrays + val_trend = new((/2, DIM_DAT/), "float") + val_trend!0 = "case" + val_trend&case = (/"val", "stddev"/) + val_trend!1 = "dataset" + val_trend&dataset = DATASETS + val_mean = new((/2, DIM_DAT/), "float") + val_mean!0 = "case" + val_mean&case = (/"val", "stddev"/) + val_mean!1 = "dataset" + val_mean&dataset = DATASETS + + ; --------------------------------------------------------------------------- + ; Derive variables (using NCL) + ; --------------------------------------------------------------------------- + + if (isatt(diag_script_info, "derive_var")) then + new_var = diag_script_info@derive_var + var0 = new_var + log_info("Deriving variable " + new_var) + + ; Check attributes + if (isatt(diag_script_info, "derive_latrange")) then + lat_range = diag_script_info@derive_latrange + else + error_msg("f", DIAG_SCRIPT, "", "when deriving a variable the " + \ + "attribute derive_latrange is required") + end if + if (isatt(diag_script_info, "derive_lev")) then + lev = diag_script_info@derive_lev + else + error_msg("f", DIAG_SCRIPT, "", "when deriving a variable the " + \ + "attribute derive_lev is required") + end if + log_info("Retrieved pressure level " + lev + " and latitude range " + \ + lat_range(0) + " to " + lat_range(1) + " from recipe") + + ; Tropopause pressure tpp + if (new_var .eq. "tpp") then + INFO0 := select_metadata_by_name(input_file_info, "ta") + VAR0@short_name = new_var + VAR0@standard_name = "tropopause_air_pressure" + VAR0@long_name = "Tropopause Air Pressure" + VAR0@units = "hPa" + + ; Iterate over datasets + dim_info = ListCount(INFO0) + do idat = 0, dim_info - 1 + info = INFO0[idat] + f = addfile(info@filename, "r") + if isfilevar(f, new_var) then + log_info("Variable " + new_var + " for " + info@dataset + \ + " already derived, skipping") + copy_VarAtts(VAR0, INFO0[idat]) + delete(f) + continue + end if + + ; Read data + A0 = read_data(info) + copy_VarAtts(VAR0, INFO0[idat]) + + ; Order plev: lowest pressure first (in hPa) + pr = A0&plev + pr = pr / 100.0 + if (pr(0) .eq. max(pr)) then + pin = pr(::-1) + tin = A0({time|:}, {lat|:}, {lon|:}, {plev|::-1}) + else + pin = pr + tin = A0({time|:}, {lat|:}, {lon|:}, {plev|:}) + end if + + ; Calculate tropopause pressure and copy metedata + outvar = trop_wmo(pin, tin, 0, False) + outvar!0 = A0!0 + outvar!1 = A0!2 + outvar!2 = A0!3 + outvar&time = A0&time + outvar&lat = A0&lat + outvar&lon = A0&lon + outvar@var = new_var + outvar := area_operations(outvar, lat_range(0), lat_range(1), 0.0, \ + 360.0, "average", True) + outvar@diag_script = DIAG_SCRIPT + ncdf_outfile = ncdf_write(outvar, info@filename) + delete([/A0, pr, pin, tin, outvar, ncdf_outfile/]) + end do + + ; Zonal mean meridional stream function mmstf + else if (new_var .eq. "mmstf") then + INFO0 := select_metadata_by_name(input_file_info, "ps") + INFO1 := select_metadata_by_name(input_file_info, "va") + VAR0@short_name = new_var + VAR0@standard_name = "zonal_meridional_stream_function" + VAR0@long_name = "Zonal Meridional Stream Function" + VAR0@units = "kg s-1" + + ; Iterate over datasets + dim_info = ListCount(INFO0) + do idat = 0, dim_info - 1 + info0 = INFO0[idat] + info1 = INFO1[idat] + + ; Check datasets + if (isatt(info0, "ensemble") .and. isatt(info1, "ensemble")) then + fail = ((info0@dataset .ne. info1@dataset) .or. \ + (info0@ensemble .ne. info1@ensemble)) + else + fail = (info0@dataset .ne. info1@dataset) + end if + if (fail) then + error_msg("f", DIAG_SCRIPT, "", "derivation of 'mmstf' not " + \ + "possible, 'va' and 'ps' do not cover the same datasets") + end if + + ; Check if variable was already derived + f = addfile(info0@filename, "r") + if isfilevar(f, new_var) then + log_info("Variable " + new_var + " for " + info0@dataset + \ + " already derived, skipping") + copy_VarAtts(VAR0, INFO0[idat]) + delete(f) + continue + end if + + ; Read data + ps = read_data(info0) + va = read_data(info1) + copy_VarAtts(VAR0, INFO0[idat]) + if (dimsizes(va&lat) .ne. dimsizes(ps&lat)) then + tmp = ps({time|:}, {lat|va&lat}, {lon|:}) + delete(ps) + ps = tmp + delete(tmp) ; ps := ps(...) causes memory leaks + end if + + ; Preprocess plev (see docu of zonal_mpsi_Wrap) + pr = va&plev + pidx = ind(pr .gt. 500.0 .and. pr .lt. 100500.0) + pr := pr(pidx) + vt2b = va(:, pidx, :, :) + if (pr(0) .eq. max(pr)) then + pr := pr(::-1) + tmp = vt2b({time|:}, {plev|::-1}, {lat|:}, {lon|:}) + delete(vt2b) + vt2b = tmp + delete(tmp) ; vt2b := vt2b(...) causes memory leaks + end if + + ; Calculate zonal meridional stream function + outvar = zonal_mpsi_Wrap(vt2b, va&lat, pr, ps) + outvar!0 = va!0 + outvar!1 = va!1 + outvar!2 = va!2 + outvar&time = va&time + outvar&plev = pr + outvar&lat = va&lat + outvar@var = new_var + outvar@diag_script = DIAG_SCRIPT + ncdf_outfile = ncdf_write(outvar, info0@filename) + delete([/va, ps, pr, pidx, vt2b, outvar, ncdf_outfile/]) + end do + else + error_msg("f", DIAG_SCRIPT, "", "don't know how to derive variable " + \ + new_var) + end if + end if + + ; Get plev and lat range from preprocessed data if variable is not derived + else + info = INFO0[0] + f = addfile(info@filename, "r") + if (isfilevar(f, "plev") .and. isfilevar(f, "lat")) then + plev = f->plev + lat = f->lat + if (dimsizes(plev) .eq. 1) then + lev = plev(0) + else + error_msg("f", DIAG_SCRIPT, "", "ambiguous pressure levels given, " + \ + "only one supported") + end if + if (dimsizes(lat) .gt. 1) then + lat_range = (/min(lat), max(lat)/) + else + lat_range = f->lat_bnds + end if + else + f_var = f->$var0$ + if (isatt(f, "plev") .and. \ ; ESMValCore < 2.11.0 + isatt(f, "lat_range_0") .and. \ + isatt(f, "lat_range_1")) then + lev = f@plev + lat_range = (/f@lat_range_0, f@lat_range_1/) + else if (isatt(f_var, "plev") .and. \ ; ESMValCore >= 2.11.0 + isatt(f_var, "lat_range_0") .and. \ + isatt(f_var, "lat_range_1")) then + lev = f_var@plev + lat_range = (/f_var@lat_range_0, f_var@lat_range_1/) + else + error_msg("f", DIAG_SCRIPT, "", "cannot read plev and latrange") + end if + end if + delete(f_var) + end if + log_info("Retrieved pressure level " + lev + " and latitude range " + \ + lat_range(0) + " to " + lat_range(1) + " from preprocessed data") + delete(f) + end if + + ; --------------------------------------------------------------------------- + ; Read and preprocess data + ; --------------------------------------------------------------------------- + + ; Iterate over datasets + do idat = 0, DIM_DAT - 1 + log_info(DATASETS(idat)) + + ; Average over ensemble members if desired + if (avgens) then + atts = True + atts@dataset = DATASETS(idat) + info := select_metadata_by_atts(INFO0, atts) + else + info := NewList("lifo") + ListAppend(info, INFO0[idat]) + end if + + ; Get data + A0 = get_average(info) + info := info[0] + + ; ------------------------------------------------------------------------- + ; Process spatial data + ; ------------------------------------------------------------------------- + + ; Process mmstf (SH Hadley cell boundary) + if (var0 .eq. "mmstf") then + tmp = A0({time|:}, {plev|lev}, {lat|lat_range(0):lat_range(1)}) + dummy1 = new(dimsizes(A0&time), typeof(A0&lat)) + + ; 1st degree polynomial fit for all times + do tt = 0, dimsizes(A0&time) - 1 + idxmin = minind(tmp(tt, :)) + idxmax = maxind(tmp(tt, :)) + tmpp = tmp(tt, idxmin:idxmax) + idx1 = min(ind(tmpp .ge. 0.0)) + idx2 = max(ind(tmpp .le. 0.0)) + if (ismissing(idx1)) then + idx1 = 1 + end if + if (ismissing(idx2)) then + idx2 = dimsizes(tmpp) - 1 + end if + if (idx2 .gt. (idx1 - 1)) then + idx2 = idx1 - 1 + end if + scal_x = tmpp(idx2:idx1) + scal_y = tmpp&lat(idx2:idx1) + pf = lspoly(scal_x, scal_y, 1, 2) + yfit = pf(0) + pf(1) * scal_x + dummy1(tt) = yfit(1) + delete([/pf, idx1, idx2, idxmin, idxmax, yfit, tmpp, scal_x, scal_y/]) + end do + dummy1!0 = "time" + dummy1&time = A0&time + VAR0@units = "degrees" + delete(tmp) + + ; Other variables (no operations needed, done in preprocessing) + else + dummy1 = A0 + end if + delete(A0) + + ; Convert units of tro3 + if (var0 .eq. "tro3") then + dummy1 = dummy1 * 10e-3 + VAR0@units = "ppmv" + end if + + ; ------------------------------------------------------------------------- + ; Process temporal data + ; ------------------------------------------------------------------------- + + if (season .ne. "yearly") then + tmp = time_operations(dummy1, info@start_year, info@end_year, \ + "extract", season, True) + dummy2 = time_operations(tmp, info@start_year, info@end_year, \ + "average", "yearly", True) + delete(tmp) + else + dummy2 = time_operations(dummy1, info@start_year, info@end_year, \ + "average", season, True) + end if + + ; Trend + ttmp = ispan(info@start_year, info@end_year, 1) + dummy3 = dummy2 - dim_avg_n(dummy2, 0) + rc = regline(ttmp, dummy3) + if (var0 .ne. "tpp") then + val_trend(0, idat) = tofloat(rc) * 10 + val_trend(1, idat) = tofloat(rc@rstd) * 10 + else + val_trend(0, idat) = tofloat(rc) + val_trend(1, idat) = tofloat(rc@rstd) + end if + + ; Mean + val_mean(0, idat) = tofloat(avg(dummy2)) + val_mean(1, idat) = tofloat(stddev(dummy2) / sqrt(dimsizes(dummy2))) + delete([/dummy1, dummy2, dummy3, ttmp, rc/]) + end do + + ; Consider ensemble averaging + if (avgens) then + INFO0 := get_unique_items(INFO0, "dataset") + end if + + ; --------------------------------------------------------------------------- + ; Process additional observations + ; --------------------------------------------------------------------------- + + if (isatt(diag_script_info, "rsondes")) then + rsondes = diag_script_info@rsondes + rsondes_f = diag_script_info@rsondes_file + dim_rsondes = dimsizes(rsondes) + tmp_vtrend = val_trend + tmp_vt = val_mean + delete([/val_trend, val_mean/]) + DIM_DAT = DIM_DAT + dim_rsondes + dataset_names = new(DIM_DAT, string) + dataset_names(:dim_rsondes-1) = rsondes + dataset_names(dim_rsondes:) = DATASETS + DATASETS := dataset_names + delete(dataset_names) + + ; Trend + val_trend = new((/2, DIM_DAT/), "float") + val_trend(:, dim_rsondes:) = (/tmp_vtrend/) + val_trend!0 = "case" + val_trend&case = (/"val", "stddev"/) + val_trend!1 = "dataset" + val_trend&dataset = DATASETS + + ; Mean + val_mean = new((/2, DIM_DAT/), "float") + val_mean(:, dim_rsondes:) = (/tmp_vt/) + val_mean!0 = "case" + val_mean&case = (/"val", "stddev"/) + val_mean!1 = "dataset" + val_mean&dataset = DATASETS + delete([/tmp_vtrend, tmp_vt/]) + + ; Read files + do irs = 0, dim_rsondes - 1 + rs_fils = systemfunc("ls " + rsondes_f(irs)) + tmp = addfile(rs_fils, "r") + A0 = tmp->$var0$ + delete([/rs_fils, tmp/]) + tt = cd_calendar(A0&time, 0) + rst_max = max(tt(:, 0)) + rst_min = min(tt(:, 0)) + if (year_min .ge. rst_min) then + t_min = year_min + else + t_min = rst_min + end if + if (year_max .ge. rst_max) then + t_max = year_max + else + t_max = rst_max + end if + + ; Read data and process data + ttmp = ispan(toint(t_min), toint(t_max), 1) + A0&time@calendar = "standard" + A0!1 = "plev" + A0!2 = "station" + dummy1 = tofloat(dim_avg_Wrap(A0(time|:, {plev|lev}, station|:))) + dummy1!0 = "time" + dummy1&time = A0&time + if (season.ne."yearly") then + tmp_t = time_operations(dummy1, toint(t_min), toint(t_max), \ + "extract", season, True) + dummy2 = time_operations(tmp_t, toint(t_min), toint(t_max), \ + "average", "yearly", True) + delete(tmp_t) + else + dummy2 = time_operations(dummy1, toint(t_min), toint(t_max), \ + "average", season, True) + end if + + ; Trend + dummy3 = dummy2 - dim_avg_n(dummy2, 0) + rc = regline(ttmp, dummy3) + val_trend(0, irs) = tofloat(rc) * 10 + val_trend(1, irs) = tofloat(rc@rstd) * 10 + + ; Mean + val_mean(0, irs) = tofloat(avg(dummy2)) + val_mean(1, irs) = tofloat(stddev(dummy2)) + delete([/dummy1, dummy2, dummy3/]) + delete([/ttmp, rc, A0/]) + end do + end if + + ; --------------------------------------------------------------------------- + ; Plots + ; --------------------------------------------------------------------------- + + ; Basic plotting settings + colors = project_style(INFO0, diag_script_info, "colors") + thicks = project_style(INFO0, diag_script_info, "thicks") + lg_labels = unique_labels_min(INFO0, (/"dataset", "ensemble"/)) + plot_file1 = config_user_info@plot_dir + DIAG_SCRIPT + "_" + var0 + \ + "_" + season + "_trend" + plot_file2 = config_user_info@plot_dir + DIAG_SCRIPT + "_" + var0 + \ + "_" + season + "_climmean" + + ; Plotting instances + wks1 = gsn_open_wks(file_type, plot_file1) + wks2 = gsn_open_wks(file_type, plot_file2) + dum0 = new(DIM_DAT, graphic) + dum1 = new(DIM_DAT, graphic) + error_bar0 = new(DIM_DAT, graphic) + error_bar1 = new(DIM_DAT, graphic) + ti = ispan(1, dimsizes(val_trend(0, :)), 1) + + ; Get plot descriptions + if (lat_range(0).le.-82.5 .and. lat_range(1).ge.82.5) then + lat_string = "NG" + else + if (lat_range(0) .ge. 0.0) then + lat_string = "(" + round(lat_range(0), 3) + "N" + else if (lat_range(0) .lt. 0.0) then + lat_string = "(" + -1 * round(lat_range(0), 3) + "S" + end if + end if + lat_string = lat_string + "-" + if (lat_range(1) .ge. 0.0) then + lat_string = lat_string + round(lat_range(1), 3) + "N)" + else if (lat_range(1) .lt. 0.0) then + lat_string = lat_string + -1 * round(lat_range(1), 3) + "S)" + end if + end if + end if + if (season .eq. "yearly") then + season = "ANN" + end if + + ; ------------------------------------------------------------------------- + ; Plot 1: Trend + ; ------------------------------------------------------------------------- + + res = True + if ((isatt(VAR0, "units"))) then + res@tiYAxisString = "(" + VAR0@units + "/dec)" + else + res@tiYAxisString = "(per dec)" + end if + res@tiMainString = lev/100 + " hPa " + season + " " + var0 + " " + \ + lat_string + " trend (" + diag_script_info@wdiag_title(0) + ")" + res@gsnFrame = False ; don't draw yet + res@gsnDraw = False ; don't advance frame + res@gsnMaximize = True + res@xyMarker = thicks(0) ; choose type of marker (16) + res@xyMonoMarkerColor = False + res@xyMarkerColors = colors(0) ; Marker color + res@xyMarkerSizeF = 0.01 ; Marker size (default 0.01) + res@tiMainFontHeightF = 0.022 + res@trYMinF = min(val_trend(0, :)) - 0.5 * abs(min(val_trend(0, :))) + res@trYMaxF = max(val_trend(0, :)) + 0.5 * abs(max(val_trend(0, :))) + res@trXMinF = 0 + res@trXMaxF = DIM_DAT + 1 + res@vpWidthF = 0.9 ; Change the aspect ratio, but + res@vpHeightF = 0.4 ; make plot as large as possible. + res@gsnYRefLine = 0.0 + res@tmXBLabelAngleF = 90.0 + res@tmXBMode = "Explicit" ; explicitly set Y-axis labels + res@tmXBLabelJust = "CenterRight" + res@tmXBValues = ti + res@tmXBLabels = lg_labels + res@tmXBLabelFontHeightF = 0.015 + + ; Plot trend + plot1 = gsn_csm_xy(wks1, \ + (/ti(0), ti(0)/), \ + (/val_trend(0, 0), val_trend(0, 0)/), \ + res) + mkres = True + mkres@gsMarkerSizeF = 0.01 + mkres@gsMarkerThicknessF = 1.0 + do t = 0, DIM_DAT - 1 + mkres@gsMarkerColor = colors(t) + mkres@gsMarkerIndex = 16 + dum0(t) = gsn_add_polymarker(wks1, plot1, ti(t), val_trend(0, t), mkres) + end do + + ; Plot error bars + polyres = True + do t = 0, DIM_DAT - 1 + polyres@gsLineColor = colors(t) + error_bar0(t) = gsn_add_polyline(wks1, plot1, (/ti(t), ti(t)/), \ + (/val_trend(0, t) - val_trend(1, t), \ + val_trend(0, t) + val_trend(1, t)/), \ + polyres) + end do + draw(plot1) + frame(wks1) + + ; Write ncdf file + new_path = work_dir + "trend_" + season + "_" + var0 + ".nc" + val_trend@var = var0 + val_trend@diag_script = DIAG_SCRIPT + val_trend@ncdf = new_path + outfile_trend = ncdf_write(val_trend, new_path) + + ; Provenance tracking + plot_path = plot_file1 + "." + file_type + caption = "Trends in " + season + " " + var0 + "." + statistics := (/"trend", "stddev"/) + log_provenance(outfile_trend, plot_path, caption, statistics, \ + DOMAIN, PLOT_TYPE, AUTHORS, REFERENCES, ANCESTORS) + + ; ------------------------------------------------------------------------- + ; Plot 2: Climatological mean + ; ------------------------------------------------------------------------- + + res@tiMainString = lev/100 + " hPa " + season + " " + var0 + " " + \ + lat_string + " mean (" + diag_script_info@wdiag_title(1) + ")" + res@tiYAxisString = "(" + VAR0@units + ")" + res@trYMinF = min(val_mean(0, :)) - 0.1 * abs(min(val_mean(0, :) - \ + val_mean(1, :))) + res@trYMaxF = max(val_mean(0, :)) + 0.1 * abs(max(val_mean(0, :) + \ + val_mean(1, :))) + + ; Plot mean + plot2 = gsn_csm_xy(wks2, \ + (/ti(0), ti(0)/), \ + (/val_mean(0, 0), val_mean(0, 0)/), \ + res) + mkres = True + mkres@gsMarkerSizeF = 0.01 + mkres@gsMarkerThicknessF = 1.0 + do t = 0, DIM_DAT - 1 + mkres@gsMarkerColor = colors(t) + mkres@gsMarkerIndex = 16 ; thicks(t) + dum1(t) = gsn_add_polymarker(wks2, plot2, ti(t), val_mean(0, t), mkres) + end do + + ; Plot error bars + polyres = True + do t = 0, DIM_DAT - 1 + polyres@gsLineColor = colors(t) + error_bar1(t) = gsn_add_polyline(wks2, plot2, (/ti(t), ti(t)/), \ + (/val_mean(0, t) - val_mean(1, t), \ + val_mean(0, t) + val_mean(1, t)/), \ + polyres) + end do + draw(plot2) + frame(wks2) + + ; Write ncdf file + new_path = work_dir + "mean_" + season + "_" + var0 + ".nc" + val_mean@var = var0 + val_mean@diag_script = DIAG_SCRIPT + val_mean@ncdf = new_path + outfile_mean = ncdf_write(val_mean, new_path) + + ; Provenance tracking + plot_path = plot_file2 + "." + file_type + caption = "Climatological mean in " + season + " " + var0 + "." + statistics := (/"mean", "clim", "stddev"/) + log_provenance(outfile_mean, plot_path, caption, statistics, \ + DOMAIN, PLOT_TYPE, AUTHORS, REFERENCES, ANCESTORS) + + ; --------------------------------------------------------------------------- + ; Write NETCDF file for MDER + ; --------------------------------------------------------------------------- + + output = mder_output_desired() + if (output) then + work_dir := output@work_dir + opt = diag_script_info + plot_path = "n/a" + plot_type = "" + + ; Iterate over all desired diagnostics + do dd = 0, dimsizes(diag_script_info@wdiag) - 1 + opt@wdiag := diag_script_info@wdiag(dd) + log_info(opt@wdiag) + + ; Iterate over all datasets + do idat = 0, DIM_DAT - 1 + log_debug("Processing " + DATASETS(idat)) + info = INFO0[idat] + new_path = work_dir + diag_file_name(info@filename, opt) + + ; Write files + if (isStrSubset(opt@wdiag, "_t")) then + val = val_trend(case|:, dataset|idat) + val@units = "(" + VAR0@units + "/dec)" + statistics := (/"trend", "stddev"/) + caption = "Trend in " + season + " " + var0 + " of " + \ + DATASETS(idat) + "." + else if(isStrSubset(opt@wdiag, "_c")) then + val = val_mean(case|:, dataset|idat) + val@units = "(" + VAR0@units + ")" + statistics := (/"mean", "clim", "stddev"/) + caption = "Climatological mean in " + season + " " + var0 + \ + " of " + DATASETS(idat) + "." + else + error_msg("f", DIAG_SCRIPT, "", "Diagnostic " + opt@wdiag + \ + "not supported for MDER output") + end if + end if + val@var = var0 + val@diag_script = (/DIAG_SCRIPT/) + copy_VarAtts(diag_script_info, val) + val@wdiag := opt@wdiag + val@dataset = DATASETS(idat) + val@start_year = info@start_year + val@end_year = info@end_year + val@ncdf = new_path + ncdf_outfile = ncdf_write(val, new_path) + delete([/val, new_path/]) + + ; Provenance tracking + log_provenance(ncdf_outfile, plot_path, caption, statistics, \ + DOMAIN, plot_type, AUTHORS, REFERENCES, \ + info@filename) + end do + end do + end if + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/autoassess/_plot_mo_metrics.py b/esmvaltool/diag_scripts/autoassess/_plot_mo_metrics.py index d26c03847b..52eca796ab 100644 --- a/esmvaltool/diag_scripts/autoassess/_plot_mo_metrics.py +++ b/esmvaltool/diag_scripts/autoassess/_plot_mo_metrics.py @@ -13,6 +13,8 @@ import matplotlib.pyplot as plt import numpy as np +from esmvaltool.diag_scripts.shared import save_figure + # Define some colours BLACK = '#000000' RED = '#FF0000' @@ -434,7 +436,13 @@ def normalise(test, ref, strict=False): ref[metric] = 1.e-20 norm[metric] = test[metric] / ref[metric] else: - norm[metric] = tuple(x / ref[metric] for x in test[metric]) + if ref[metric] != 0: + norm[metric] = tuple(x / ref[metric] + for x in test[metric]) + else: + ref[metric] = 1. + norm[metric] = tuple(x * 0. / ref[metric] + for x in test[metric]) return norm @@ -590,7 +598,8 @@ def plot_nac(cref, acc=None, extend_y=False, title=None, - ofile=None): + ofile=None, + config=None): """ Routine to produce NAC plot. @@ -605,6 +614,7 @@ def plot_nac(cref, :param bool extend_y: Extend y-axis to include obs/acc ranges :param str title: Plot title :param str ofile: Plot file name + :param dict config: ESMValTool configuration object """ # initialize if metrics is None: @@ -676,15 +686,31 @@ def plot_nac(cref, legend.set_title('Vs %s' % cref, prop={'size': 'small'}) # Display or produce file - if ofile: - # Create directory to write file to - odir = os.path.dirname(ofile) - if not os.path.isdir(odir): - os.makedirs(odir) + if ofile and config: + os.makedirs(config['plot_dir'], exist_ok=True) + provenance = get_provenance_record(config) # Note that bbox_inches only works for png plots - plt.savefig(ofile, bbox_extra_artists=(legend, ), bbox_inches='tight') + save_figure(ofile, provenance, config, fig, + bbox_extra_artists=(legend, ), bbox_inches='tight') else: # Need the following to attempt to display legend in frame fig.subplots_adjust(right=0.85) plt.show() plt.close() + + +def get_provenance_record(config): + """Create a provenance record describing the diagnostic data and plot.""" + filenames = [item["filename"] for item in config["input_data"].values()] + record = { + 'caption': 'Normalised assessment criteria plot', + 'plot_type': 'metrics', + 'authors': [ + 'williams_keith', + 'predoi_valeriu', + 'sellar_alistair' + ], + "ancestors": filenames, + } + + return record diff --git a/esmvaltool/diag_scripts/autoassess/_rms_radiation.py b/esmvaltool/diag_scripts/autoassess/_rms_radiation.py deleted file mode 100644 index a96f06cb23..0000000000 --- a/esmvaltool/diag_scripts/autoassess/_rms_radiation.py +++ /dev/null @@ -1,314 +0,0 @@ -""" -Port for ESMValTool v2 from v1. - -Uses: ESMValTool v2, Python 3.x -Valeriu Predoi, UREAD, July 2018 - -Functionality: computes root mean squares for a bunch of geographical -regions; - -Original docstring: -This file contains an rms class which contains all the information needed -to make a rms table of a particular region. -""" - -import os -import math -import logging -import numpy.ma as ma -import iris -from esmvaltool.diag_scripts.autoassess._valmod_radiation import area_avg - -logger = logging.getLogger(os.path.basename(__file__)) - - -class RMSLISTCLASS(list): - """ - Construct the regions class. - - This is the class for a list of RMSCLASS (i.e. for lots of regions). - """ - - def __init__(self, *args): - """Init.""" - if not args: - super(RMSLISTCLASS, self).__init__() - else: - super(RMSLISTCLASS, self).__init__(args[0]) - - def __repr__(self): - """Repr.""" - rms_out = "[" - for rms_item in self: - rms_out += "rms.RMSCLASS for " + rms_item.region + ", \n" - rms_out = rms_out[0:-3] - rms_out += "]" - - return rms_out - - def __call__(self, region=False): - """Call.""" - rms_found = False - region_list = [] - for rms_item in self: - region_list.append(rms_item.region) - if region: - if rms_item.region == region: - rms_returned = rms_item - rms_found = True - if not region: - logger.warning( - "Please supply a region using the region='xxx' input. " + - "Available regions are:") - elif not rms_found: - logger.warning("ERROR: Requested region not found.") - if not rms_found: - logger.error(region_list) - raise Exception - return rms_returned - - -# This is the class for one set of rms values (i.e. for one region) -class RMSCLASS: - """Class per region.""" - - def __init__(self, region, exper='experiment', control='control'): - """ - Create instances of this class but also start making. - - html files that will contain all the rms data. (old) - - region = the region name - exper = experiment jobid - control = control jobid. - """ - # Store the region name, experiment and control - self.region = region - self.exper = exper - self.control = control - - # This could be a dictionary in the future; not now tho - - # Store the region boundaries - if region == 'north': - self.region_bounds = [-180, 30, 180, 90] - if region == 'south': - self.region_bounds = [-180, -90, 180, -30] - if region == 'tropical_land': - self.region_bounds = [-180, -30, 180, 30] - if region == 'tropical_ocean': - self.region_bounds = [-180, -30, 180, 30] - if region == 'east_asia': - self.region_bounds = [100, 20, 145, 50] - if region == 'natl_europe': - self.region_bounds = [-45, 25, 60, 75] - if region == 'australia_land': - self.region_bounds = [100, -45, 155, -10] - - # Store the end of the mask key - if region == 'tropical_land': - self.mask_end = 'land_gt_50pc' - if region == 'tropical_ocean': - self.mask_end = 'ocean_gt_50pc' - if region == 'australia_land': - self.mask_end = 'land_gt_50pc' - if region == 'sahara_n_africa': - self.mask_end = 'SaharaNC' - if region == 'tropical_n_africa': - self.mask_end = 'TNAfrica' - if region == 'east_africa': - self.mask_end = 'EAfrica' - if region == 'central_africa': - self.mask_end = 'CAfrica' - if region == 'southern_africa': - self.mask_end = 'SAfrica' - if region == 'africa_land': - self.mask_end = 'Africa' - - # Make a blank dictionary to store the values - self.data_dict = {} - - # Allow iterations over this - def __iter__(self): - """Iter.""" - return self - - # This defines how this class is shown on the screen if you print it - def __repr__(self): - """Repr.""" - rms_out = "rms.RMSCLASS for {0}".format(self.region) - return rms_out - - def calc(self, toplot_cube, mask_cube): - """Calculate the rms value of a cube for this region. - - toplot_cube = (cube) cube that is to be plotted - mask_cube = (cube) the mask to be applied (land/sea) - """ - # Make a copy of the input cube - working_cube = toplot_cube.copy() - - # What type of plot is this - plot_type = 'lat_lon' - if not toplot_cube.coords(axis='x'): - plot_type = 'zonal_mean' - else: - if len(toplot_cube.coords(axis='x')[0].points) == 1: - plot_type = 'zonal_mean' - if not toplot_cube.coords(axis='y'): - plot_type = 'meridional_mean' - else: - if len(toplot_cube.coords(axis='y')[0].points) == 1: - plot_type = 'meridional_mean' - - # Apply the mask but only for lat_lon plots - if hasattr(self, 'mask_end'): - if plot_type == 'lat_lon': - # Apply the mask - working_cube.data = \ - ma.masked_array(working_cube.data, - mask=(mask_cube.data > 0.5)) - - else: - # If there is a mask but we are using zonal - # mean or meridional mean, return missing - return 1e+20 - - # Extract a region - if hasattr(self, 'region_bounds'): - - # Extract just the latitudes you want - lonc = iris.Constraint() - latc = iris.Constraint() - if plot_type == 'lat_lon' or plot_type == 'meridional_mean': - lonc = iris.Constraint( - longitude=lambda lon: - self.region_bounds[0] <= lon <= self.region_bounds[2] - ) - if plot_type == 'lat_lon' or plot_type == 'zonal_mean': - latc = iris.Constraint( - latitude=lambda lat: - self.region_bounds[1] <= lat <= self.region_bounds[3] - ) - working_cube = working_cube.extract(lonc & latc) - - # Check to see if we have any data left. - # If not then apply a missing data number. - amount_of_data = len(working_cube.data) - if hasattr(working_cube.data, 'compressed'): - amount_of_data = len(working_cube.data.compressed()) - if amount_of_data == 0: - rms_float = 1e+20 - else: - logger.info('Calculating RMS for %s', self.region) - - # Square the values - squared_cube = working_cube**2 - - # Mean the values - area_average = area_avg( - squared_cube, coord1='latitude', coord2='longitude') - - # Square root the answer - rms_float = math.sqrt(area_average.data) - - return rms_float - - def calc_wrapper(self, toplot_cube, mask_cube, page_title): - """ - Get the RMS value and adds it to its own data array. - - toplot_cube = (cube) cube that is to be plotted - mask_cube = (cube) mask land/sea - page_title = (str) the page title for this plot - """ - rms_float = self.calc(toplot_cube, mask_cube) - self.data_dict[page_title] = [] - if rms_float: - self.data_dict[page_title].append(rms_float) - return rms_float - - def tofile(self, csv_dir): - """Output all the RMS statistics to csv files.""" - csv_file = 'summary_' + self.region + '_RMS_' + self.exper + '.csv' - csv_path = os.path.join(csv_dir, csv_file) - with open(csv_path, 'a') as out_file: - for page_title, rms_list in self.data_dict.items(): - out_file.write('{0}: '.format(page_title)) - for rms_val in rms_list: - out_file.write('{0}'.format(str(rms_val))) - out_file.write('\n') - - -def start(exper='experiment', control='control'): - """ - Make some instances of the rms class. - - exper = experiment jobid (optional) - control = control jobid (optional). - """ - # Loop over all regions. Regions are: - # 0 = globe - # 1 = north of 30N - # 2 = south of 30S - # 3 = tropical land - # 4 = tropical ocean - # 5 = east asia - # 6 = north atlantic and europe - # 7 = australian land - # 8 = sahara and north african coast - # 9 = tropical northern africa - # 10 = east africa - # 11 = central africa - # 12 = southern africa - # 13 = african land - - # Make a list of the regions - region_list = [ - 'global', 'north', 'south', 'tropical_land', 'tropical_ocean', - 'east_asia', 'natl_europe', 'australia_land', 'sahara_n_africa', - 'tropical_n_africa', 'east_africa', 'central_africa', - 'southern_africa', 'africa_land' - ] - - # Make a blank list that will hold the rms classes - rms_list = RMSLISTCLASS() - - # Make the rms classes. This will also start making the summary web pages. - for region in region_list: - rms_list.append(RMSCLASS(region, exper=exper, control=control)) - - return rms_list - - -def calc_all(rms_list, toplot_cube, mask_cube, page_title): - """ - Loop through all the regions. - - Calculate rms values and store them in the class. - rms_list = list of rms classes that stores all the information to do - with the rms regions and the resulting answers. - toplot_cube = (cube) cube that is to be plotted - page_title = (str) the page title for this plot. - """ - # Run through the loop, calculating rms values for each region - rms_float_list = [] - n_rms = len(rms_list) - for i in range(n_rms): - rms_float = rms_list[i].calc_wrapper(toplot_cube, mask_cube, - page_title) - rms_float_list.append(rms_float) - - # Return the global rms value - return rms_float_list[0] - - -def end(rms_list, csv_dir): - """ - Finish using the rms class. - - rms_list = list of rms classes that stores all the information to do with - the rms regions and the resulting answers. - """ - for rms_instance in rms_list: - rms_instance.tofile(csv_dir) diff --git a/esmvaltool/diag_scripts/autoassess/_valmod_radiation.py b/esmvaltool/diag_scripts/autoassess/_valmod_radiation.py deleted file mode 100644 index dd1f330623..0000000000 --- a/esmvaltool/diag_scripts/autoassess/_valmod_radiation.py +++ /dev/null @@ -1,81 +0,0 @@ -#! /usr/local/sci/bin/python -""" -Port for ESMValTool v2 from v1. - -Uses: ESMValTool v2, Python 3.x -Valeriu Predoi, UREAD, July 2018 - -The script is well different than the v1 vresion but executes the -same set of functionalities; script name kept the same as in v1 -for historical purposes. -""" - -import iris -import iris.analysis.cartography - - -def get_cube_ready(cube): - """Remve unwanted coords and check bounds.""" - to_remove_list = [ - 'forecast_reference_time', 'forecast_period', 'source', 'season', - 'time' - ] - for coord in cube.coords(): - if coord.name() in to_remove_list: - cube.remove_coord(coord) - if not cube.coord(axis='x').has_bounds(): - cube.coord(axis='x').guess_bounds() - if not cube.coord(axis='y').has_bounds(): - cube.coord(axis='y').guess_bounds() - - return cube - - -def area_avg(cube, coord1=None, coord2=None): - """ - Get area average. - - Perform an area average of a cube using weights to account for - changes in latitude. - """ - for coord in (coord1, coord2): - if not cube.coord(coord).has_bounds(): - cube.coord(coord).guess_bounds() - grid_areas = iris.analysis.cartography.area_weights(cube) - result = cube.collapsed( - [coord1, coord2], iris.analysis.MEAN, weights=grid_areas) - - return result - - -def perform_equation(dataset_1, dataset_2, analysis_type): - """ - Perform a simple cube operation. - - analysis_type = type of analysis (zonal_mean, vertical_mean,...) - This can be easily adapted for more than one type of operation - by passing an argument e.g. 'sum_of_squares' etc. - """ - # Make sure all the fields have correct units - dataset_1_ready = get_cube_ready(dataset_1) - dataset_2_ready = get_cube_ready(dataset_2) - - if analysis_type == 'zonal_mean': - dataset_1_mean = dataset_1_ready.collapsed('longitude', - iris.analysis.MEAN) - dataset_2_mean = dataset_2_ready.collapsed('longitude', - iris.analysis.MEAN) - - elif analysis_type == 'vertical_mean': - dataset_1_mean = dataset_1_ready.collapsed('pressure', - iris.analysis.MEAN) - dataset_2_mean = dataset_2_ready.collapsed('pressure', - iris.analysis.MEAN) - elif analysis_type == 'lat_lon': - dataset_1_mean = dataset_1_ready - dataset_2_mean = dataset_2_ready - - # Perform simple difference - toplot_cube = dataset_1_mean - dataset_2_mean - - return toplot_cube diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_area_base.py b/esmvaltool/diag_scripts/autoassess/autoassess_area_base.py index d3e50b0c55..df45868ad9 100644 --- a/esmvaltool/diag_scripts/autoassess/autoassess_area_base.py +++ b/esmvaltool/diag_scripts/autoassess/autoassess_area_base.py @@ -51,7 +51,7 @@ def _import_package(area): def _fix_cube(cube_list): """Apply some ad hoc fixes to cubes.""" - # force add a long_name; supermeans uses extract_strict + # force add a long_name; supermeans uses extract_cube # and for derived vars there is only # invalid_standard_name which is an attribute for cube in cube_list: @@ -152,23 +152,11 @@ def _get_filelists(cfg): fullpath_file = filename if base_file.split('_')[1] == cfg['control_model']: metrics_dict['control_model'].append(fullpath_file) - if 'fx_files' in attributes: - for fx_file in cfg['fx']: - metrics_dict['control_model'].append( - attributes['fx_files'][fx_file]) if base_file.split('_')[1] == cfg['exp_model']: metrics_dict['exp_model'].append(fullpath_file) - if 'fx_files' in attributes: - for fx_file in cfg['fx']: - metrics_dict['exp_model'].append( - attributes['fx_files'][fx_file]) if additional_metrics and base_file.split( '_')[1] in cfg['additional_metrics']: metrics_dict[base_file.split('_')[1]].append(fullpath_file) - if 'fx_files' in attributes: - for fx_file in cfg['fx']: - metrics_dict[base_file.split('_')[1]].append( - attributes['fx_files'][fx_file]) if obs_types and base_file.split('_')[1] in obs_types: obs_list.append(fullpath_file) @@ -213,15 +201,21 @@ def create_output_tree(out_dir, ref_suite_id, exp_suite_id, area): `out_dir`/`exp_suite_id`_vs_`ref_suite_id`/`area` - If the leaf directory `area` exists raises OSError. + Parameters + ---------- + out_dir: str + Base directory for output. + suite_id1: str + Suite Id of reference model run. + suite_id2: str + Suite Id of test model run. + area: str + Name of asssessment area. + + Returns + ------- + Path to area output directory. - :param str out_dir: Base directory for output. - :param str suite_id1: Suite Id of reference model run. - :param str suite_id2: Suite Id of test model run. - :param str area: Name of asssessment area. - :returns: Path to area output directory. - :rtype: str - :raises: OSError. """ assessment_name = exp_suite_id + '_vs_' + ref_suite_id # make sure out_dir exists in output folder @@ -238,16 +232,27 @@ def create_output_tree(out_dir, ref_suite_id, exp_suite_id, area): def create_tmp_dir(tmp_dir, ref_suite_id, exp_suite_id, area): """ - Create directory tree for temporary data according to the following scheme. + Create directory tree for temporary data. + + The structure is: `tmp_dir`/`exp_suite_id`_vs_`ref_suite_id`_random/`area`_random - :param str tmp_dir: Base temporary directory. - :param str suite_id1: Suite ID of reference model run. - :param str suite_id2: Suite ID of test model run. - :param str area: Name of asssessment area. - :returns: Path to area temporary directory. - :rtype: str. + Parameters + ---------- + tmp_dir: str + Base temporary directory. + suite_id1: str + Suite ID of reference model run. + suite_id2: str + Suite ID of test model run. + area: str + Name of asssessment area. + + Returns + ------- + Path to area temporary directory. + """ assessment_name = exp_suite_id + '_vs_' + ref_suite_id # create unique temporary folder in tmp dir @@ -287,8 +292,9 @@ def _setup_input(cfg): logger.info("Saved control data cubes: %s", str(all_cubelists)) # separately process the obs's that dont need metrics - if cfg['obs_models']: - _process_obs(cfg, obs_list, obs_loc) + if 'obs_models' in cfg: + if cfg['obs_models']: + _process_obs(cfg, obs_list, obs_loc) return tmp_dir, obs_loc, ancil_dir @@ -331,17 +337,36 @@ def _create_run_dict(cfg): run['from_annual'] = datetime.datetime(year, month, day) year, month, day = [int(s) for s in run['end'].split('/')] - run['to_instantaneous'] = datetime.datetime(year, 11, 30) - run['to_daily'] = datetime.datetime(year, 11, 30) - run['to_monthly'] = datetime.datetime(year, 11, 1) - run['to_seasonal'] = datetime.datetime(year, 9, 1) + run['to_instantaneous'] = datetime.datetime(year, 12, 1) + run['to_daily'] = datetime.datetime(year, 12, 1) + run['to_monthly'] = datetime.datetime(year, 12, 1) + run['to_seasonal'] = datetime.datetime(year, 12, 1) run['to_annual'] = datetime.datetime(year - 1, 12, 1) return run def run_area(cfg): - """Kick start the area diagnostic.""" + """ + Kick start the area diagnostic. + + Takes the settings metadata file with all the diagnostic + and preprocessing settings and sets up the running workflow + for any of the autoassess assessment areas. + + All the assessment area-specific parameters (mandatory and optional) + are set in _create_run_dict; that function is the main gateway for + this function. + + Available assessment areas: stratosphere. + + Parameters + ---------- + cfg: dict + contents of the metadata file as produced by the preprocessor + in dictionary format. + + """ run_obj = _create_run_dict(cfg) area_out_dir = create_output_tree(run_obj['out_dir'], run_obj['suite_id1'], run_obj['suite_id2'], run_obj['_area']) diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_radiation_rms.py b/esmvaltool/diag_scripts/autoassess/autoassess_radiation_rms.py deleted file mode 100644 index 5687518dd7..0000000000 --- a/esmvaltool/diag_scripts/autoassess/autoassess_radiation_rms.py +++ /dev/null @@ -1,118 +0,0 @@ -""" -Port to Version 2 with implementation of v2-specific changes. - -Uses: ESMValTool v2, Python3.x -Valeriu Predoi, UREAD, July 2018 - -Porting replicates the functionality to minimum errors. - -Original Description from Version 1 Diagnostic: -;;########################################################################### -;; AutoAssess_radiation_rms.py -;;########################################################################### -;; Description -;; This script is the RMS error metric script of -;; AutoAssess radiation -;; ########################################################################### - -This diagnostic uses CMIP5 data; to switch to CMIP6 change _CMIP_TYPE. -""" - -import os -import logging -import iris -from esmvaltool.diag_scripts.autoassess._rms_radiation import (start, end, - calc_all) -from esmvaltool.diag_scripts.autoassess._valmod_radiation import ( - perform_equation) -from esmvaltool.diag_scripts.shared import ( - group_metadata, run_diagnostic, get_control_exper_obs, apply_supermeans) - -logger = logging.getLogger(os.path.basename(__file__)) - -_CMIP_TYPE = 'CMIP5' - - -def apply_rms(data_1, data_2, cfg, component_dict, var_name): - """Compute RMS for any data1-2 combination.""" - data_names = [model['dataset'] for model in component_dict.values()] - plot_title = var_name + ': ' + data_names[0] + ' vs ' + data_names[1] - rms_list = start(data_names[0], data_names[1]) - analysis_type = cfg['analysis_type'] - landsea_mask_file = os.path.join( - os.path.dirname(__file__), 'autoassess_source', cfg['landsea_mask']) - landsea_mask_cube = iris.load_cube(landsea_mask_file) - data1_vs_data2 = perform_equation(data_1, data_2, analysis_type) - - # call to rms.calc_all() to compute rms; rms.end() to write results - calc_all(rms_list, data1_vs_data2, landsea_mask_cube, plot_title) - end(rms_list, cfg['work_dir']) - - -def do_preamble(cfg): - """Execute some preamble functionality.""" - # get data - input_data = cfg['input_data'].values() - grouped_input_data = group_metadata( - input_data, 'short_name', sort='dataset') - - return input_data, grouped_input_data - - -def main(cfg): - """Execute the radiation rms diag.""" - logger.setLevel(cfg['log_level'].upper()) - input_data, grouped_input_data = do_preamble(cfg) - - # select variables and their corresponding - # obs files - for short_name in grouped_input_data: - logger.info("Processing variable %s", short_name) - - # control, experiment and obs's - ctrl, exper, obslist = get_control_exper_obs(short_name, input_data, - cfg, _CMIP_TYPE) - - # apply the supermeans - ctrl_sm, exper_sm, obs_sm_list = apply_supermeans(ctrl, exper, obslist) - - # assemble a dict that contains various params depending - # on the data combinations for RMS computations - # control-experiment - data_component_dict = {'ct-ex': {'ctrl': ctrl, 'exper': exper}} - logger.info("Computing CONTROL-EXPERIMENT RMS...") - apply_rms(ctrl_sm, exper_sm, cfg, data_component_dict['ct-ex'], - short_name) - if obs_sm_list: - for obs, obsfile in zip(obs_sm_list, obslist): - data_component_dict = { - 'ct-obs': { - 'ctrl': ctrl, - 'obs': obsfile - }, - 'ex-obs': { - 'exper': exper, - 'obs': obsfile - } - } - - # ctrl-obs - logger.info("Computing CONTROL-OBS RMS...") - apply_rms(ctrl_sm, obs, cfg, data_component_dict['ct-obs'], - short_name) - # exper-obs - logger.info("Computing EXPERIMENT-OBS RMS...") - apply_rms(exper_sm, obs, cfg, data_component_dict['ex-obs'], - short_name) - else: - # only ctrl-exper - data_component_dict = {'ct-ex': {'ctrl': ctrl, 'exper': exper}} - logger.info("Computing CONTROL-EXPERIMENT RMS...") - apply_rms(ctrl_sm, exper_sm, cfg, data_component_dict['ct-ex'], - short_name) - - -if __name__ == '__main__': - - with run_diagnostic() as config: - main(config) diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_permafrost/permafrost.py b/esmvaltool/diag_scripts/autoassess/land_surface_permafrost/permafrost.py index 2aa56b3daf..0844132017 100644 --- a/esmvaltool/diag_scripts/autoassess/land_surface_permafrost/permafrost.py +++ b/esmvaltool/diag_scripts/autoassess/land_surface_permafrost/permafrost.py @@ -12,10 +12,32 @@ import iris.util as ut from esmvaltool.diag_scripts.autoassess.loaddata import load_run_ss -# from esmvaltool.diag_scripts.shared._supermeans import get_supermean +from esmvaltool.diag_scripts.shared._base import ProvenanceLogger from . import permafrost_koven_sites +def get_provenance_record(caption, run): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'caption': caption, + 'statistics': ['mean'], + 'domains': ['global'], + 'plot_types': ['map', 'metrics'], + 'authors': [ + 'burke_eleanor', + 'sellar_alistair', + ], + 'references': [ + 'brown02nsidc', + 'legates90tac', + 'koven13jclim', + ], + 'ancestors': run, + } + + return record + + # main permafrost subroutine def land_permafrost_top(run): """ @@ -23,12 +45,16 @@ def land_permafrost_top(run): Code development Eleanor Burke. - Arguments: - run - dictionary containing model run metadata - (see auto_assess/model_run.py for description) + Parameters + ---------- + run: dict + dictionary containing model run metadata + (see auto_assess/model_run.py for description) - Returns: - metrics - dictionary of metrics names and values + Returns + ------- + metrics: dict + dictionary of metrics names and values also produces image files in the current working directory """ @@ -108,8 +134,9 @@ def permafrost_area(soiltemp, airtemp, landfrac, run): # set all non-masked values to 1 for area calculation # (may be a better way of doing this but I'm not sure what it is) pf_periods = pf_periods / pf_periods - # mask for land area also - pf_periods = pf_periods * mask + # mask for land area also; using `sftlf` which has percents data + # divide by 100 to account for absolute fraction and not percents + pf_periods = pf_periods * mask / 100. # calculate the area of permafrost # Generate area-weights array. This method requires bounds on lat/lon @@ -162,6 +189,16 @@ def permafrost_area(soiltemp, airtemp, landfrac, run): run['runid'])) plt.savefig('pf_extent_asia_' + run['runid'] + '.png') + # record provenance + plot_file = 'pf_extent_asia_' + run['runid'] + caption = 'Permafrost extent & zero degree isotherm ({})'.format( + run['runid']) + provenance_record = get_provenance_record(caption, run) + cfg = {} + cfg['run_dir'] = run['out_dir'] + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(plot_file, provenance_record) + # defining metrics for return up to top level metrics = { 'permafrost area': pf_area, @@ -193,7 +230,7 @@ def num_frozen(data, threshold, axis, frozen_length): # Find the windows "full of True-s" (along the added 'window axis'). full_windows = np.all(hit_windows, axis=axis + 1) # Count points fulfilling the condition (along the time axis). - frozen_point_counts = np.sum(full_windows, axis=axis, dtype=int) + frozen_point_counts = np.sum(full_windows, axis=axis, dtype=np.int64) return frozen_point_counts @@ -210,7 +247,7 @@ def get_landfr_mask(run): name_constraint = iris.Constraint(name='land_area_fraction') cubes_path = os.path.join(supermean_data_dir, 'cubeList.nc') cubes = iris.load(cubes_path) - cube = cubes.extract_strict(name_constraint) + cube = cubes.extract_cube(name_constraint) return cube @@ -220,25 +257,22 @@ def get_nonice_mask(run): """ Get the land points without ice. - Need to read the soil moisture data from the supermeans + Derive ice mask from points that have zero liquid water content. + """ - # TODO: currently set to mrsofc: soil_moisture_content_at_field_capacity supermean_data_dir = os.path.join(run['data_root'], run['runid'], run['_area'] + '_supermeans') # m01s08i223 - # TODO: original code - # cube = get_supermean('moisture_content_of_soil_layer', 'ann', - # supermean_data_dir) - # replaced with new time-invariant variable + # use mrsos (moisture in surface soil layer), Lmon name_constraint = iris.Constraint( - name='soil_moisture_content_at_field_capacity') + name='mass_content_of_water_in_soil_layer' + ) cubes_path = os.path.join(supermean_data_dir, 'cubeList.nc') cubes = iris.load(cubes_path) - cube = cubes.extract_strict(name_constraint) - - # TODO: mrsofc does not have depth - # cube = cube.extract(iris.Constraint(depth=2.0)) # layer from 1m to 3m + cube = cubes.extract_cube(name_constraint) + # mrsol data comes a time-lat-lon, collapse on time + cube = cube.collapsed('time', iris.analysis.MEAN) # make it into a mask of ones - extract first layer # use masked_values for floating point fuzzy equals @@ -265,9 +299,9 @@ def koven_temp_offsets(soiltemp, airtemp): # interpolate to depth required # the soil temperatures are for the middle of the layer not the bottom of # the layer - soiltemp_surf = iris.analysis.interpolate.linear(soiltemp, - [('depth', 0.0)]) - soiltemp_1m = iris.analysis.interpolate.linear(soiltemp, [('depth', 1.0)]) + linear = iris.analysis.Linear() + soiltemp_surf = soiltemp.interpolate([('depth', 0.0)], linear) + soiltemp_1m = soiltemp.interpolate([('depth', 1.0)], linear) # extract points for eachsite airtemp_1d = extract_sites(ex_points, airtemp) @@ -278,10 +312,11 @@ def koven_temp_offsets(soiltemp, airtemp): # assign metrics metrics = {} - metrics['offset 1m minus surface'] = np.median(soiltemp_1m_1d - - soiltemp_surf_1d) - metrics['offset surface minus air'] = np.median(soiltemp_surf_1d - - airtemp_1d) + off_surf = soiltemp_1m_1d - soiltemp_surf_1d + off_air = soiltemp_surf_1d - airtemp_1d + metrics['offset 1m minus surface'] = np.ma.median(off_surf) + metrics['offset surface minus air'] = np.ma.median(off_air) + return metrics @@ -305,11 +340,10 @@ def koven_temp_atten(soiltemp, airtemp): # interpolate the log to the correct depth soiltemp_log = iris.analysis.maths.log(soiltemp_ampl) - soiltemp_log_surf = iris.analysis.interpolate.linear( - soiltemp_log, [('depth', 0.0)]) + linear = iris.analysis.Linear() + soiltemp_log_surf = soiltemp_log.interpolate([('depth', 0.0)], linear) soiltemp_ampl_surf = iris.analysis.maths.exp(soiltemp_log_surf) - soiltemp_log_1m = iris.analysis.interpolate.linear(soiltemp_log, - [('depth', 1.0)]) + soiltemp_log_1m = soiltemp_log.interpolate([('depth', 1.0)], linear) soiltemp_ampl_1m = iris.analysis.maths.exp(soiltemp_log_1m) # extract points for eachsite diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_snow/snow.py b/esmvaltool/diag_scripts/autoassess/land_surface_snow/snow.py index 6d2c4f52bf..66cacb8ba6 100644 --- a/esmvaltool/diag_scripts/autoassess/land_surface_snow/snow.py +++ b/esmvaltool/diag_scripts/autoassess/land_surface_snow/snow.py @@ -4,7 +4,7 @@ import numpy as np import iris -from esmvaltool.preprocessor._regrid import regrid +from esmvalcore.preprocessor import regrid from esmvaltool.diag_scripts.shared._supermeans import get_supermean diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_soilmoisture/soilmoisture.py b/esmvaltool/diag_scripts/autoassess/land_surface_soilmoisture/soilmoisture.py index be08fa9f50..0533b94eed 100644 --- a/esmvaltool/diag_scripts/autoassess/land_surface_soilmoisture/soilmoisture.py +++ b/esmvaltool/diag_scripts/autoassess/land_surface_soilmoisture/soilmoisture.py @@ -2,103 +2,178 @@ import os import logging +import csv +from collections.abc import Iterable import numpy as np import iris -from esmvaltool.preprocessor._regrid import regrid -from esmvaltool.diag_scripts.shared._supermeans import get_supermean +from esmvalcore.preprocessor import regrid +from esmvaltool.diag_scripts.shared._base import ProvenanceLogger +from esmvaltool.diag_scripts.shared import ( + group_metadata, + run_diagnostic, + save_data, +) +# Order of seasons must agree with preprocessor definition in recipe +SEASONS = ("djf", "mam", "jja", "son") logger = logging.getLogger(__name__) -def land_sm_top(run): +def get_provenance_record(caption, ancestor_filenames): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'caption': caption, + 'statistics': ['mean'], + 'domains': ['global'], + 'plot_type': 'metrics', + 'authors': [ + 'rumbold_heather', + 'sellar_alistair', + ], + 'references': [ + 'esacci-soilmoisture', + 'dorigo17rse', + 'gruber19essd', + ], + "ancestors": ancestor_filenames, + } + + return record + + +def write_metrics(output_dir, metrics, config, ancestors): + """Write metrics to CSV file. + + The CSV file will have the name ``metrics.csv`` and can be + used for the normalised metric assessment plot. + + Parameters + ---------- + output_dir : string + The full path to the directory in which the CSV file will be written. + metrics : dictionary of metric,value pairs + The seasonal data to write. + config : dictionary + ESMValTool configuration object + ancestors : list + Filenames of input files for provenance """ - Calculate median absolute errors for soil mosture against CCI data. + os.makedirs(output_dir, exist_ok=True) - Arguments: - run - dictionary containing model run metadata - (see auto_assess/model_run.py for description) + file_name = "metrics.csv" + file_path = os.path.join(output_dir, file_name) - Returns: - metrics - dictionary of metrics names and values + with open(file_path, "w", newline="", encoding="utf-8") as csvfile: + csv_writer = csv.writer(csvfile) + for line in metrics.items(): + csv_writer.writerow(line) + record_provenance(file_path, config, ancestors) + + +def volumetric_soil_moisture(model_file, constr_season): """ - supermean_data_dir = os.path.join(run['data_root'], run['runid'], - run['_area'] + '_supermeans') + Read moisture mass content and convert to volumetric soil moisture. - seasons = ['djf', 'mam', 'jja', 'son'] + Parameters + ---------- + model_file : string + Path to model file + constr_season : iris constraint + Constraint on season to load - # Constants - # density of water and ice + Returns + ------- + vol_sm1_run : cube + Volumetric soil moisture + """ + # Constant: density of water rhow = 1000. - rhoi = 917. + + # m01s08i223 + # CMOR name: mrsos (soil moisture in top model layer kg/m2) + mrsos = iris.load_cube( + model_file, + "mass_content_of_water_in_soil_layer" & constr_season + ) + + # Set soil moisture to missing data where no soil (moisture=0) + np.ma.masked_where(mrsos.data == 0, mrsos.data, copy=False) + # first soil layer depth - dz1 = 0.1 - - # Work through each season - metrics = dict() - for season in seasons: - fname = 'ecv_soil_moisture_{}.nc'.format(season) - clim_file = os.path.join(run['climfiles_root'], fname) - ecv_clim = iris.load_cube(clim_file) - # correct invalid units - if (ecv_clim.units == 'unknown' and - 'invalid_units' in ecv_clim.attributes): - if ecv_clim.attributes['invalid_units'] == 'm^3m^-3': - ecv_clim.units = 'm3 m-3' - - # m01s08i223 - # standard_name: mrsos - smcl_run = get_supermean('moisture_content_of_soil_layer', season, - supermean_data_dir) - - # m01s08i229i - # standard_name: ??? - # TODO: uncomment when implemented - # sthu_run = get_supermean( - # 'mass_fraction_of_unfrozen_water_in_soil_moisture', season, - # supermean_data_dir) - - # m01s08i230 - # standard_name: ??? soil_frozen_water_content - mrfso - # TODO: uncomment when implemented - # sthf_run = get_supermean( - # 'mass_fraction_of_frozen_water_in_soil_moisture', season, - # supermean_data_dir) - - # TODO: remove after correct implementation - sthu_run = smcl_run - sthf_run = smcl_run - - # extract top soil layer - cubes = [smcl_run, sthu_run, sthf_run] - for i, cube in enumerate(cubes): - if cube.coord('depth').attributes['positive'] != 'down': - logger.warning('Cube %s depth attribute is not down', cube) - top_level = min(cube.coord('depth').points) - topsoil = iris.Constraint(depth=top_level) - cubes[i] = cube.extract(topsoil) - smcl_run, sthu_run, sthf_run = cubes - - # Set all sea points to missing data np.nan - smcl_run.data[smcl_run.data < 0] = np.nan - sthu_run.data[sthu_run.data < 0] = np.nan - sthf_run.data[sthf_run.data < 0] = np.nan - - # set soil moisture to missing data on ice points (i.e. no soil) - sthu_plus_sthf = (dz1 * rhow * sthu_run) + (dz1 * rhoi * sthf_run) - ice_pts = sthu_plus_sthf.data == 0 - sthu_plus_sthf.data[ice_pts] = np.nan - - # Calculate the volumetric soil moisture in m3/m3 - theta_s_run = smcl_run / sthu_plus_sthf - vol_sm1_run = theta_s_run * sthu_run - vol_sm1_run.units = "m3 m-3" - vol_sm1_run.long_name = "Top layer Soil Moisture" + dz1 = mrsos.coord('depth').bounds[0, 1] - \ + mrsos.coord('depth').bounds[0, 0] + + # Calculate the volumetric soil moisture in m3/m3 + # volumetric soil moisture = volume of water / volume of soil layer + # = depth equivalent of water / thickness of soil layer + # = (soil moisture content (kg m-2) / water density (kg m-3) ) / + # soil layer thickness (m) + # = mosrs / (rhow * dz1) + vol_sm1_run = mrsos / (rhow * dz1) + vol_sm1_run.units = "m3 m-3" + vol_sm1_run.long_name = "Top layer Soil Moisture" + + return vol_sm1_run + + +def flatten(list_of_lists): + """ + Convert list of lists into a flat list, allowing some items to be non-list. + + Parameters + ---------- + list_of_lists : list + List containing iterables to flatten, plus optionally non-list items + + Returns + ------- + flattened : list + Flattened list with one level of nesting removed + """ + flattened = [] + for item in list_of_lists: + if isinstance(item, Iterable) and not isinstance(item, (str, bytes)): + flattened.extend(item) + else: + flattened.append(item) + + return flattened + + +def land_sm_top(clim_file, model_file, model_dataset, config, ancestors): + """ + Calculate median absolute errors for soil mosture against CCI data. + + Parameters + ---------- + clim_file : string + Path to observation climatology file + model_file : list + Paths to model files + model_dataset : string + Name of model dataset + config : dict + ESMValTool configuration object + ancestors : list + Filenames of input files for provenance + + Returns + ------- + metrics: dict + a dictionary of metrics names and values + """ + # Work through each season + metrics = {} + for index, season in enumerate(SEASONS): + + constr_season = iris.Constraint(season_number=index) + ecv_clim = iris.load_cube(clim_file, constr_season) + + vol_sm1_run = volumetric_soil_moisture(model_file, constr_season) # update the coordinate system ECV data with a WGS84 coord system - # TODO: ask Heather why this is needed - # TODO: who is Heather? # unify coord systems for regridder vol_sm1_run.coord('longitude').coord_system = \ iris.coord_systems.GeogCS(semi_major_axis=6378137.0, @@ -115,16 +190,81 @@ def land_sm_top(run): # Interpolate to the grid of the climatology and form the difference vol_sm1_run = regrid(vol_sm1_run, ecv_clim, 'linear') + + # mask invalids + vol_sm1_run.data = np.ma.masked_invalid(vol_sm1_run.data) + ecv_clim.data = np.ma.masked_invalid(ecv_clim.data) + # diff the cubes dff = vol_sm1_run - ecv_clim - # Remove NaNs from data before aggregating statistics - dff.data = np.ma.masked_invalid(dff.data) + # save output and populate metric + caption = f"{model_dataset} minus CCI soil moisture clim for {season}" + provenance_record = get_provenance_record(caption, ancestors) + save_data(f"soilmoist_diff_{model_dataset}_{season}", + provenance_record, config, dff) - # save output - iris.save(dff, os.path.join(run['dump_output'], - 'soilmoist_diff_{}.nc'.format(season))) - name = 'soilmoisture MedAbsErr {}'.format(season) + name = f"soilmoisture MedAbsErr {season}" metrics[name] = float(np.ma.median(np.ma.abs(dff.data))) return metrics + + +def record_provenance(diagnostic_file, config, ancestors): + """Record provenance.""" + caption = f"Autoassess soilmoisture MedAbsErr for {SEASONS}" + provenance_record = get_provenance_record(caption, ancestors) + with ProvenanceLogger(config) as provenance_logger: + provenance_logger.log(diagnostic_file, provenance_record) + + +def main(config): + """ + Top-level function for soil moisture metrics. + + Parameters + ---------- + config : dict + The ESMValTool configuration. + """ + input_data = config["input_data"] + + # Separate OBS from model datasets + # (and check there is only one obs dataset) + obs = [v for v in input_data.values() if v["project"] == "OBS"] + if len(obs) != 1: + msg = f"Expected exactly 1 OBS dataset: found {len(obs)}" + raise RuntimeError(msg) + clim_file = obs[0]["filename"] + + models = group_metadata( + [v for v in input_data.values() if v["project"] != "OBS"], + "dataset") + + for model_dataset, group in models.items(): + # 'model_dataset' is the name of the model dataset. + # 'group' is a list of dictionaries containing metadata. + logger.info("Processing data for %s", model_dataset) + model_file = [item["filename"] for item in group] + + # Input filenames for provenance + ancestors = flatten([model_file, clim_file]) + + # Calculate metrics + metrics = land_sm_top(clim_file, model_file, model_dataset, config, + ancestors) + + # Write metrics + metrics_dir = os.path.join( + config["plot_dir"], + f"{config['exp_model']}_vs_{config['control_model']}", + config["area"], + model_dataset, + ) + + write_metrics(metrics_dir, metrics, config, ancestors) + + +if __name__ == "__main__": + with run_diagnostic() as CONFIG: + main(CONFIG) diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_surfrad/surfrad.py b/esmvaltool/diag_scripts/autoassess/land_surface_surfrad/surfrad.py index 717326450d..4165437e57 100644 --- a/esmvaltool/diag_scripts/autoassess/land_surface_surfrad/surfrad.py +++ b/esmvaltool/diag_scripts/autoassess/land_surface_surfrad/surfrad.py @@ -6,20 +6,45 @@ import iris -from esmvaltool.preprocessor._regrid import regrid +from esmvalcore.preprocessor import regrid +from esmvaltool.diag_scripts.shared._base import ProvenanceLogger from esmvaltool.diag_scripts.shared._supermeans import get_supermean -def land_surf_rad(run): - """ - Compute median absolute errors against CERES-EBAF data. +def get_provenance_record(caption, run): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'caption': caption, + 'statistics': ['mean'], + 'domains': ['global'], + 'plot_type': 'metrics', + 'authors': [ + 'edwards_john', + 'sellar_alistair', + ], + 'references': [ + 'loeb19jclim', + 'kato18ebaf', + ], + 'ancestors': run, + } + + return record - Arguments: - run - dictionary containing model run metadata - (see auto_assess/model_run.py for description) - Returns: - metrics - dictionary of metrics names and values. +def land_surf_rad(run): + """Compute median absolute errors against CERES-EBAF data. + + Parameters + ---------- + run: dict + dictionary containing model run metadata + (see auto_assess/model_run.py for description) + + Returns + ------- + metrics: dict + dictionary of metrics names and values. """ supermean_data_dir = os.path.join(run['data_root'], run['runid'], run['_area'] + '_supermeans') @@ -32,7 +57,7 @@ def land_surf_rad(run): # replaced with a constant sftlf mask; original was # lnd = get_supermean('land_area_fraction', 'ann', supermean_data_dir) cubes = iris.load(os.path.join(supermean_data_dir, 'cubeList.nc')) - lnd = cubes.extract_strict(iris.Constraint(name='land_area_fraction')) + lnd = cubes.extract_cube(iris.Constraint(name='land_area_fraction')) metrics = dict() for season in rad_seasons: @@ -72,9 +97,9 @@ def land_surf_rad(run): # apply the mask reg_run_fld.data = np.ma.masked_array( - reg_run_fld.data, mask=(lnd.data < 0.98)) + reg_run_fld.data, mask=(lnd.data > 90.)) reg_ebaf_fld.data = np.ma.masked_array( - reg_ebaf_fld.data, mask=(lnd.data < 0.98)) + reg_ebaf_fld.data, mask=(lnd.data > 90.)) # do a simple diff dff = reg_run_fld - reg_ebaf_fld @@ -82,4 +107,16 @@ def land_surf_rad(run): name = "{} MedAbsErr {}".format(fld, season) metrics[name] = float(np.ma.median(np.abs(dff.data))) + # record provenance + plot_file = "Autoassess Surface Radiation metrics" + caption = '{} MedAbsErr for {}'.format(str(rad_fld), str(rad_seasons)) + provenance_record = get_provenance_record(caption, run) + cfg = {} + cfg['run_dir'] = run['out_dir'] + # avoid rewriting provenance when running the plot diag + if not os.path.isfile(os.path.join(cfg['run_dir'], + 'diagnostic_provenance.yml')): + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(plot_file, provenance_record) + return metrics diff --git a/esmvaltool/diag_scripts/autoassess/loaddata.py b/esmvaltool/diag_scripts/autoassess/loaddata.py index 7552a3c1f4..197f8711b2 100644 --- a/esmvaltool/diag_scripts/autoassess/loaddata.py +++ b/esmvaltool/diag_scripts/autoassess/loaddata.py @@ -139,6 +139,7 @@ def annual_mean(mycube): Chunks time in 365-day periods and computes means over them; Returns a cube. """ + coord_cat.add_year(mycube, 'time') yr_mean = mycube.aggregated_by('year', iris.analysis.MEAN) def spans_year(time): diff --git a/esmvaltool/diag_scripts/autoassess/plot_autoassess_metrics.py b/esmvaltool/diag_scripts/autoassess/plot_autoassess_metrics.py index 1bdd9f6428..201ae0c248 100644 --- a/esmvaltool/diag_scripts/autoassess/plot_autoassess_metrics.py +++ b/esmvaltool/diag_scripts/autoassess/plot_autoassess_metrics.py @@ -1,12 +1,15 @@ """Standard MO metrics plotter.""" -import os import logging +import os import sys -import iris import yaml + from esmvaltool.diag_scripts.autoassess._plot_mo_metrics import ( - read_model_metrics, read_obs_metrics, plot_nac) + plot_nac, + read_model_metrics, + read_obs_metrics, +) logger = logging.getLogger(__name__) @@ -40,10 +43,12 @@ def main(): cfg['diag_name'], vsloc, cfg['area'], control_model, 'metrics.csv') plot_title = ' '.join([cfg['area'], control_model, 'vs', exp_model]) - # Read metrics files + # Read (and record) metrics files # metrics = read_order_metrics(args.file_ord) ref = read_model_metrics(file_ref) tests = [read_model_metrics(file_exp)] + cfg['input_data'] = {'ref': {'filename': file_ref}, + 'exp': {'filename': file_exp}} # var = read_model_metrics(args.file_var) obs, acc = None, None if 'additional_metrics' in cfg: @@ -65,11 +70,11 @@ def main(): acc=acc, extend_y=False, title=plot_title, - ofile=os.path.join(cfg['plot_dir'], cfg['plot_name'] + '.png')) + ofile=cfg['plot_name'], + config=cfg) if __name__ == '__main__': - iris.FUTURE.netcdf_promote = True logging.basicConfig(format="%(asctime)s [%(process)d] %(levelname)-8s " "%(name)s,%(lineno)s\t%(message)s") main() diff --git a/esmvaltool/diag_scripts/autoassess/stratosphere/age_of_air.py b/esmvaltool/diag_scripts/autoassess/stratosphere/age_of_air.py index 861a99829f..40d6436523 100644 --- a/esmvaltool/diag_scripts/autoassess/stratosphere/age_of_air.py +++ b/esmvaltool/diag_scripts/autoassess/stratosphere/age_of_air.py @@ -135,8 +135,7 @@ def age_of_air(run): # Write age of air data to CWD outfile = '{0}_age_of_air_{1}.nc' cubelist = iris.cube.CubeList([diag1, diag2]) - with iris.FUTURE.context(netcdf_no_unlimited=True): - iris.save(cubelist, outfile.format(run['runid'], run.period)) + iris.save(cubelist, outfile.format(run['runid'], run.period)) # Calculate metrics diag1sf6 = iai.Linear(diag1, [('level_height', ZSF6_KM)]) diff --git a/esmvaltool/diag_scripts/autoassess/stratosphere/strat_metrics_1.py b/esmvaltool/diag_scripts/autoassess/stratosphere/strat_metrics_1.py index 500e0747a6..4690741d39 100644 --- a/esmvaltool/diag_scripts/autoassess/stratosphere/strat_metrics_1.py +++ b/esmvaltool/diag_scripts/autoassess/stratosphere/strat_metrics_1.py @@ -12,7 +12,6 @@ import matplotlib.pyplot as plt import numpy as np from cartopy.mpl.gridliner import LATITUDE_FORMATTER -from matplotlib.patches import Rectangle from esmvaltool.diag_scripts.autoassess.loaddata import load_run_ss @@ -29,6 +28,12 @@ def weight_lat_ave(cube): return cube.collapsed('latitude', iris.analysis.MEAN, weights=grid_areas) +def weight_cosine(cube): + """Routine to calculate weighted lat avg when there is no longitude.""" + grid_areas = iac.cosine_latitude_weights(cube) + return cube.collapsed('latitude', iris.analysis.MEAN, weights=grid_areas) + + def cmap_and_norm(cmap, levels, reverse=False): """ Generate interpolated colour map. @@ -64,7 +69,7 @@ def plot_zmean(cube, levels, title, log=False, ax1=None): ax1.set_xticks([-90, -60, -30, 0, 30, 60, 90]) ax1.xaxis.set_major_formatter(LATITUDE_FORMATTER) ax1.set_ylabel('Pressure (Pa)', fontsize='small') - ax1.set_ylim(100000., 1000.) + ax1.set_ylim(100000., 10.) if log: ax1.set_yscale("log") @@ -90,12 +95,11 @@ def plot_timehgt(cube, levels, title, log=False, ax1=None): new_epoch = time_coord.points[0] new_unit_str = 'hours since {}' new_unit = new_unit_str.format(time_coord.units.num2date(new_epoch)) - ax1.xaxis.axis_date() ax1.xaxis.set_label(new_unit) ax1.xaxis.set_major_locator(mdates.YearLocator(4)) ax1.xaxis.set_major_formatter(mdates.DateFormatter('%Y')) ax1.set_ylabel('Pressure (Pa)', fontsize='small') - ax1.set_ylim(100000., 1000.) + ax1.set_ylim(100000., 10.) if log: ax1.set_yscale("log") @@ -154,10 +158,19 @@ def calc_qbo_index(qbo): counterdown = len(indiciesdown) # Did we start on an upwards or downwards cycle? - if indiciesdown[0] < indiciesup[0]: - (kup, kdown) = (0, 1) + if indiciesdown and indiciesup: + if indiciesdown[0] < indiciesup[0]: + (kup, kdown) = (0, 1) + else: + (kup, kdown) = (1, 0) else: - (kup, kdown) = (1, 0) + logger.warning('QBO metric can not be computed; no zero crossings!') + logger.warning( + "This means the model U(30hPa, around tropics) doesn't oscillate" + "between positive and negative" + "with a period<12 months, QBO can't be computed, set to 0." + ) + (kup, kdown) = (0, 0) # Translate upwards and downwards indices into U wind values periodsmin = counterup - kup periodsmax = counterdown - kdown @@ -206,6 +219,7 @@ def calc_qbo_index(qbo): period = period2 else: period = period1 + return (period, ampl_west, ampl_east) @@ -261,9 +275,9 @@ def pnj_strength(cube, winter=True): for nh/sh in winter and sh/nh in summer repsectively. """ # Extract regions of interest - notrop = iris.Constraint(air_pressure=lambda p: p < 8000.0) - nh_cons = iris.Constraint(latitude=lambda l: l > 0) - sh_cons = iris.Constraint(latitude=lambda l: l < 0) + notrop = iris.Constraint(air_pressure=lambda p: p < 8000.) + nh_cons = iris.Constraint(latitude=lambda lat: lat > 0) + sh_cons = iris.Constraint(latitude=lambda lat: lat < 0) nh_tmp = cube.extract(notrop & nh_cons) sh_tmp = cube.extract(notrop & sh_cons) @@ -314,17 +328,18 @@ def qbo_metrics(run, ucube, metrics): """Routine to calculate QBO metrics from zonal mean U.""" # TODO side effect: changes metrics without returning # Extract equatorial zonal mean U - # tropics = iris.Constraint(latitude=lambda lat: -5 <= lat <= 5) + tropics = iris.Constraint(latitude=lambda lat: -5 <= lat <= 5) p30 = iris.Constraint(air_pressure=3000.) - # qbo = weight_lat_ave(ucube.extract(tropics)) - # qbo30 = qbo.extract(p30) - qbo = weight_lat_ave(ucube) + ucube_cds = [cdt.standard_name for cdt in ucube.coords()] + if 'longitude' in ucube_cds: + qbo = weight_lat_ave(ucube.extract(tropics)) + else: + qbo = weight_cosine(ucube.extract(tropics)) qbo30 = qbo.extract(p30) # write results to current working directory outfile = '{0}_qbo30_{1}.nc' - with iris.FUTURE.context(netcdf_no_unlimited=True): - iris.save(qbo30, outfile.format(run['runid'], run['period'])) + iris.save(qbo30, outfile.format(run['runid'], run['period'])) # Calculate QBO metrics (period, amp_west, amp_east) = calc_qbo_index(qbo30) @@ -360,10 +375,17 @@ def tpole_metrics(run, tcube, metrics): shpole = iris.Constraint(latitude=lambda la: la <= -60, air_pressure=5000.0) - djf_polave = weight_lat_ave(t_djf.extract(nhpole)) - mam_polave = weight_lat_ave(t_mam.extract(nhpole)) - jja_polave = weight_lat_ave(t_jja.extract(shpole)) - son_polave = weight_lat_ave(t_son.extract(shpole)) + tcube_cds = [cdt.standard_name for cdt in tcube.coords()] + if 'longitude' in tcube_cds: + djf_polave = weight_lat_ave(t_djf.extract(nhpole)) + mam_polave = weight_lat_ave(t_mam.extract(nhpole)) + jja_polave = weight_lat_ave(t_jja.extract(shpole)) + son_polave = weight_lat_ave(t_son.extract(shpole)) + else: + djf_polave = weight_cosine(t_djf.extract(nhpole)) + mam_polave = weight_cosine(t_mam.extract(nhpole)) + jja_polave = weight_cosine(t_jja.extract(shpole)) + son_polave = weight_cosine(t_son.extract(shpole)) # Calculate metrics and add to metrics dictionary # TODO Why take off 180.0? @@ -410,12 +432,15 @@ def teq_metrics(run, tcube, metrics): # Calculate area-weighted global monthly means from multi-annual data t_months = teq100.aggregated_by('month', iris.analysis.MEAN) - t_months = weight_lat_ave(t_months) + tcube_cds = [cdt.standard_name for cdt in tcube.coords()] + if 'longitude' in tcube_cds: + t_months = weight_lat_ave(t_months) + else: + t_months = weight_cosine(t_months) # write results to current working directory outfile = '{0}_teq100_{1}.nc' - with iris.FUTURE.context(netcdf_no_unlimited=True): - iris.save(t_months, outfile.format(run['runid'], run['period'])) + iris.save(t_months, outfile.format(run['runid'], run['period'])) # Calculate metrics (tmean, tstrength) = mean_and_strength(t_months) @@ -435,12 +460,15 @@ def t_metrics(run, tcube, metrics): # Calculate area-weighted global monthly means from multi-annual data t_months = t100.aggregated_by('month', iris.analysis.MEAN) - t_months = weight_lat_ave(t_months) + tcube_cds = [cdt.standard_name for cdt in tcube.coords()] + if 'longitude' in tcube_cds: + t_months = weight_lat_ave(t_months) + else: + t_months = weight_cosine(t_months) # write results to current working directory outfile = '{0}_t100_{1}.nc' - with iris.FUTURE.context(netcdf_no_unlimited=True): - iris.save(t_months, outfile.format(run['runid'], run['period'])) + iris.save(t_months, outfile.format(run['runid'], run['period'])) # Calculate metrics (tmean, tstrength) = mean_and_strength(t_months) @@ -460,12 +488,15 @@ def q_metrics(run, qcube, metrics): # Calculate area-weighted global monthly means from multi-annual data q_months = q70.aggregated_by('month', iris.analysis.MEAN) - q_months = weight_lat_ave(q_months) + qcube_cds = [cdt.standard_name for cdt in qcube.coords()] + if 'longitude' in qcube_cds: + q_months = weight_lat_ave(q_months) + else: + q_months = weight_cosine(q_months) # write results to current working directory outfile = '{0}_q70_{1}.nc' - with iris.FUTURE.context(netcdf_no_unlimited=True): - iris.save(q_months, outfile.format(run['runid'], run['period'])) + iris.save(q_months, outfile.format(run['runid'], run['period'])) # Calculate metrics qmean = q_mean(q_months) @@ -521,7 +552,9 @@ def mainfunc(run): # removes longitude as a dimension coordinate and makes it a scalar # coordinate in line with how a zonal mean would be described. # Is there a better way of doing this? - ucube = ucube.collapsed('longitude', iris.analysis.MEAN) + ucube_cds = [cdt.standard_name for cdt in ucube.coords()] + if 'longitude' in ucube_cds: + ucube = ucube.collapsed('longitude', iris.analysis.MEAN) if not ucube.coord('latitude').has_bounds(): ucube.coord('latitude').guess_bounds() # check for month_number @@ -538,7 +571,9 @@ def mainfunc(run): # removes longitude as a dimension coordinate and makes it a scalar # coordinate in line with how a zonal mean would be described. # Is there a better way of doing this? - tcube = tcube.collapsed('longitude', iris.analysis.MEAN) + tcube_cds = [cdt.standard_name for cdt in tcube.coords()] + if 'longitude' in tcube_cds: + tcube = tcube.collapsed('longitude', iris.analysis.MEAN) if not tcube.coord('latitude').has_bounds(): tcube.coord('latitude').guess_bounds() aux_coord_names = [aux_coord.var_name for aux_coord in tcube.aux_coords] @@ -556,7 +591,9 @@ def mainfunc(run): # removes longitude as a dimension coordinate and makes it a scalar # coordinate in line with how a zonal mean would be described. # Is there a better way of doing this? - qcube = qcube.collapsed('longitude', iris.analysis.MEAN) + qcube_cds = [cdt.standard_name for cdt in qcube.coords()] + if 'longitude' in qcube_cds: + qcube = qcube.collapsed('longitude', iris.analysis.MEAN) if not qcube.coord('latitude').has_bounds(): qcube.coord('latitude').guess_bounds() aux_coord_names = [aux_coord.var_name for aux_coord in qcube.aux_coords] @@ -717,15 +754,51 @@ def calc_merra(run): time=lambda cell: run['from_monthly'] <= cell.point <= run['to_monthly'] ) - with iris.FUTURE.context(cell_datetime_objects=True): - t = t.extract(time) - q = q.extract(time) + t = t.extract(time) + q = q.extract(time) + + # zonal mean + t_cds = [cdt.standard_name for cdt in t.coords()] + if 'longitude' in t_cds: + t = t.collapsed('longitude', iris.analysis.MEAN) + q_cds = [cdt.standard_name for cdt in q.coords()] + if 'longitude' in q_cds: + q = q.collapsed('longitude', iris.analysis.MEAN) + + # mean over tropics + equator = iris.Constraint(latitude=lambda lat: -10 <= lat <= 10) + p100 = iris.Constraint(air_pressure=10000.) + t = t.extract(equator & p100) + + # Calculate area-weighted global monthly means from multi-annual data + iris.coord_categorisation.add_month(t, 'time', name='month') + t = t.aggregated_by('month', iris.analysis.MEAN) + if 'longitude' in t_cds: + t = weight_lat_ave(t) + else: + t = weight_cosine(t) + + # Extract 10S-10N humidity at 100hPa + tropics = iris.Constraint(latitude=lambda lat: -10 <= lat <= 10) + p70 = iris.Constraint(air_pressure=7000.) + q = q.extract(tropics & p70) + + # Calculate area-weighted global monthly means from multi-annual data + iris.coord_categorisation.add_month(q, 'time', name='month') + q = q.aggregated_by('month', iris.analysis.MEAN) + if 'longitude' in q_cds: + q = weight_lat_ave(q) + else: + q = weight_cosine(q) + # Calculate time mean t = t.collapsed('time', iris.analysis.MEAN) q = q.collapsed('time', iris.analysis.MEAN) # Create return values - tmerra = t.data # K - qmerra = ((1000000. * 29. / 18.) * q.data) # ppmv + tmerra = t.data # K + # TODO magic numbers + qmerra = ((1000000. * 29. / 18.) * q.data) # ppmv + return tmerra, qmerra @@ -740,9 +813,8 @@ def calc_erai(run): time=lambda cell: run['from_monthly'] <= cell.point <= run['to_monthly'] ) - with iris.FUTURE.context(cell_datetime_objects=True): - t = t.extract(time) - q = q.extract(time) + t = t.extract(time) + q = q.extract(time) # Calculate time mean t = t.collapsed('time', iris.analysis.MEAN) q = q.collapsed('time', iris.analysis.MEAN) @@ -818,26 +890,39 @@ def multi_t100_vs_q70_plot(run): ax1.set_ylim(merra_ymin, merra_ymax) ax1.xaxis.set_tick_params(labelsize='small') ax1.yaxis.set_tick_params(labelsize='small') - ax1.set_xlabel('T(10S-10N, 100hPa) bias wrt MERRA (K)', fontsize='large') - ax1.set_ylabel('q(10S-10N, 70hPa) bias wrt MERRA (ppmv)', fontsize='large') + ax1.set_xlabel('T(10S-10N, 100hPa) bias wrt ERA-I (K)', fontsize='large') + ax1.set_ylabel('q(10S-10N, 70hPa) bias wrt ERA-I (ppmv)', fontsize='large') # ERA-I axes - ax2 = ax1.twiny() # twiny gives second horizontal axis - ay2 = ax1.twinx() # twinx gives second vertical axis - ax2.xaxis.set_tick_params(labelsize='small') - ay2.yaxis.set_tick_params(labelsize='small') - ax2.set_xlabel('T(10S-10N, 100hPa) bias wrt ERA-I (K)', fontsize='large') - ay2.set_ylabel('q(10S-10N, 70hPa) bias wrt ERA-I (ppmv)', fontsize='large') + # ax2 = ax1.twiny() # twiny gives second horizontal axis + # ay2 = ax1.twinx() # twinx gives second vertical axis + # ax2.xaxis.set_tick_params(labelsize='small') + # ay2.yaxis.set_tick_params(labelsize='small') + # ax2.set_xlabel('T(10S-10N, 100hPa) bias wrt ERA-I (K)', + # fontsize='large') + # ay2.set_ylabel('q(10S-10N, 70hPa) bias wrt ERA-I (ppmv)', + # fontsize='large') # Plot ideal area - patch = Rectangle( - (0.0, 0.0), - 2.0, - 0.2 * q_merra[0, 0, 0], - fc='lime', - ec='None', - zorder=0) - ax1.add_patch(patch) + # Arbitrary box of acceptability for Met Office model + # development, designed to target warm + # tropopause temperature biases + # (e.g. Hardiman et al (2015) DOI: 10.1175/JCLI-D-15-0075.1. + # Defined as T bias < 2K and q bias < 20% relative to MERRA. + # MERRA is not used in this plot so ranges shifted by + # +0.8 K and +0.1 ppmv to account for + # differences between MERRA and ERA-Interim. + # TODO: Make box symmetric about zero to be relevant + # to models with a cold bias? + # TODO: add this to the final plot + # patch = Rectangle( + # (0.8, 0.1), + # 2.0, + # 0.2 * q_merra, + # fc='lime', + # ec='None', + # zorder=0) + # ax1.add_patch(patch) # Plot control tmon = iris.load_cube(t_cntl) diff --git a/esmvaltool/diag_scripts/bock20jgr/corr_pattern.ncl b/esmvaltool/diag_scripts/bock20jgr/corr_pattern.ncl new file mode 100644 index 0000000000..71e4851885 --- /dev/null +++ b/esmvaltool/diag_scripts/bock20jgr/corr_pattern.ncl @@ -0,0 +1,149 @@ +; ############################################################################# +; DIAGNOSTIC SCRIPT for correlation pattern figure (see IPCC ch. 9 fig. 9.6) +; Author: Lisa Bock (DLR, Germany) and Bettina Gier (Uni Bremen & DLR, Germany) +; CRESCENDO and IPCCAR6 project +; ############################################################################# +; +; Description +; Calculates centred pattern correlations for annual mean climatologies +; and plots them. Like IPCC ch. 9 fig 9.6 +; +; Required diag_script_info attributes (diagnostics specific) +; +; Optional diag_script_info attributes (diagnostic specific) +; +; Required variable_info attributes (variable specific) +; none +; +; Optional variable_info attributes (variable specific) +; none +; +; Caveats +; +; +; Modification history +; 20210519-A_bock_lisa: Written together with Bettina Gier +; (based on ipcc_ar5/ch09_fig09_6.ncl) +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + infiles = metadata_att_as_array(info_items, "filename") + datasetnames = metadata_att_as_array(info_items, "dataset") + projectnames = metadata_att_as_array(info_items, "project") + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check and set reference dataset + ref_model = variable_info[0]@reference_dataset + if (variable_info[0]@reference_dataset.eq."None") then + error_msg("f", DIAG_SCRIPT, "", "no reference dataset is specified") + end if + log_info("reference model = " + ref_model) + ref_ind = ind(datasetnames.eq.ref_model) + if isatt(variable_info[0], "alternative_dataset") then + alt_ref = True + aref_ind = ind(datasetnames.eq.variable_info[0]@alternative_dataset) + else + alt_ref = False + end if + +end + +begin + ; Output directories + ncdf_dir = config_user_info@work_dir + "pattern_cor.nc" + modproj_dir = config_user_info@work_dir + "modprojnames.txt" + system("mkdir -p " + config_user_info@work_dir) +end + +begin +; ----------------------------------------------------------------------------- +; -------------------- Compute correlation for one var ------------------------ +; ----------------------------------------------------------------------------- + + ; Save list of preproc files for provenance in collect.ncl + preproc_files = metadata_att_as_array(info_items, "filename") + + ; Reference model + mod_idx = ispan(0, dim_MOD - 1, 1) + mod_ind_woref = mod_idx(ind(mod_idx.ne.ref_ind)) + delete(mod_idx) + + ; Make output array + all_cor = new((/dim_MOD-1/), float) + all_cor!0 = "models" + all_cor&models = datasetnames(mod_ind_woref) + ; Pass on alt models + if alt_ref then + all_cor@alt_obs = variable_info[0]@alternative_dataset + else + all_cor@alt_obs = "none" + end if + + ; Loop over models, with ref model processed first + model_ind = array_append_record(ref_ind, mod_ind_woref, 0) + do iloop = 0, dim_MOD - 1 + imod = model_ind(iloop) + log_info("Processing " + datasetnames(imod)) + + ; Extract model corresponding data + var = read_data(info_items[imod]) + + ; Calculate annual mean + data_yearly = time_operations(var, \ + toint(info_items[imod]@start_year), \ + toint(info_items[imod]@end_year), \ + "average", "yearly", True) + + ; Mean over the years + mean_years = dim_avg_n_Wrap(data_yearly, 0) + delete(data_yearly) + + ; Compute centred pattern correlation (ref_model will be first so no error) + if datasetnames(imod).eq.ref_model then + data_ref = mean_years + else + ; Since ref model processed first, move all models up one spot + all_cor(iloop-1) = pattern_cor(data_ref, mean_years, 1.0, 0) + end if + delete(mean_years) + delete(var) + end do + + ; Write data + all_cor@corvar = var0 + all_cor@corvar_long = variable_info[0]@long_name + all_cor@var = "cor" + all_cor@diag_script = DIAG_SCRIPT + all_cor@diagnostics = variable_info[0]@diagnostic + all_cor@ncdf = ncdf_dir + all_cor@input = str_join(infiles, ",") + ncdf_outfile = ncdf_write(all_cor, ncdf_dir) + + ; Write provenance + statistics = (/"corr", "clim"/) + domains = (/"global"/) + plottype = "other" + authors = (/"gier_bettina", "bock_lisa"/) + references = (/"flato13ipcc"/) + log_provenance(ncdf_outfile, "n/a", "n/a", statistics, domains, "other", \ + authors, references, preproc_files) + + ; Write list of models with project (arrays get squished as attributes) + modnames = datasetnames(mod_ind_woref) + projnames = projectnames(mod_ind_woref) + modproj = (/modnames, projnames/) + asciiwrite(modproj_dir, modproj) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/bock20jgr/corr_pattern_collect.ncl b/esmvaltool/diag_scripts/bock20jgr/corr_pattern_collect.ncl new file mode 100644 index 0000000000..0d6db9d321 --- /dev/null +++ b/esmvaltool/diag_scripts/bock20jgr/corr_pattern_collect.ncl @@ -0,0 +1,471 @@ +; ############################################################################# +; DIAGNOSTIC PLOT SCRIPT for extending IPCC ch. 9 fig. 9.6 +; Author: Bettina Gier (DLR, Germany), Lisa Bock (DLR, Germany) +; IPCC-AR6 project +; ############################################################################# +; +; Description +; Calculated centred pattern correlations for annual mean climatologies +; and plots them. Like IPCC ch. 9 fig 9.6 +; +; Required diag_script_info attributes (diagnostics specific) +; +; Optional diag_script_info attributes (diagnostic specific) +; plot_median: if True, median is plotted +; diag_script_info@diag_order: give order of plotting variables on the +; x-axis +; +; Required variable_info attributes (variable specific) +; none +; +; Optional variable_info attributes (variable specific) +; none +; +; Required variable attributes (defined in namelist) +; reference_dataset: name of reference data set (observations) +; +; Caveats +; Effect of different regridding methods not yet determined +; +; Modification history +; 20210519-A_bock_lisa: Written with Bettina Gier +; (based on ipcc_ar5/ch09_fig09_6.ncl) +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; Define file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "plot_median", False) + +; ----------------------------------------------------------------------------- +; ------------------- Collecting Data ----------------------------------------- +; ----------------------------------------------------------------------------- + + ; List of correlation files and project names + file_list = tostring(diag_script_info@input_files) + "/pattern_cor.nc" + mp_file_list = tostring(diag_script_info@input_files) + "/modprojnames.txt" + + ; Filter non-existing files (mp files only useful if cor file exists) + file_list := file_list(ind(isfilepresent(file_list))) + mp_file_list := mp_file_list(ind(isfilepresent(file_list))) + + ; Set up auxiliary variables + var_name = new(dimsizes(file_list), string) + var_collect = new(dimsizes(file_list), string) + var_diag = new(dimsizes(file_list), string) + alt_obs = new(dimsizes(file_list), string) + + ; Loop over files in list, read and append data + do ii = 0, dimsizes(file_list) - 1 + + data_temp = ncdf_read(file_list(ii), "cor") + var_name(ii) = data_temp@corvar_long + if (var_name(ii) .eq. "Near-Surface Air Temperature") then + var_name(ii) = " Near-Surface ~C~ Air Temperature" + elseif (var_name(ii) .eq. "Precipitation") then + var_name(ii) = "Precipitation" + elseif (var_name(ii) .eq. "Sea Level Pressure") then + var_name(ii) = "Sea Level ~C~ Pressure" + elseif (var_name(ii) .eq. "TOA Shortwave Cloud Radiative Effect") then + var_name(ii) = "TOA Shortwave Cloud ~C~ Radiative Effect" + elseif (var_name(ii) .eq. "TOA Outgoing Longwave Radiation") then + var_name(ii) = " TOA Outgoing ~C~ Longwave Radiation" + end if + var_collect(ii) = data_temp@corvar + var_diag(ii) = data_temp@diagnostics + alt_obs(ii) = data_temp@alt_obs + + ; Make 2D array to store all data + if (.not.isdefined("data_all")) then + data_all = new((/dimsizes(data_temp), dimsizes(file_list)/), float) + data_all(:, ii) = data_temp + data_all!0 = "models" + data_all&models = data_temp&models + + ; Input file list for provenance + prov_files = str_split(data_temp@input, ",") + else + ; If model coordinates are identical + if (dimsizes(data_temp&models).eq.dimsizes(data_all&models)) \ + .and. all(data_temp&models.eq.data_all&models) then + data_all(:, ii) = (/data_temp/) + else + ; Loop over models in new data entry + do imod_temp = 0, dimsizes(data_temp&models) - 1 + ; If current model is not already part of the model coordinate + if (.not.any(data_temp&models(imod_temp) .eq. data_all&models)) then + ; Append record for model(imod) + data_new = extend_var_at(data_all, 0, \ + dimsizes(data_all&models)) + data_new(dimsizes(data_all&models), ii) = (/data_temp(imod_temp)/) + data_new&models(dimsizes(data_all&models)) = \ + (/data_temp&models(imod_temp)/) + delete(data_all) + data_all = data_new + delete(data_new) + else + ; Loop over models of data + do imod = 0, dimsizes(data_all&models)-1 + ; if neq data model is similar to current + ; entry, write data entry + if (data_all&models(imod).eq. data_temp&models(imod_temp)) then + data_all(imod, ii) = (/data_temp(imod_temp)/) + end if + end do + end if + end do + end if + ; Append input file list for provenance + prov_files := array_append_record(prov_files, \ + str_split(data_temp@input, ","), 0) + end if + delete(data_temp) + end do + data_all!1 = "vars" + data_all&vars = var_name + delete(var_name) + + ; Get project for models + projects = new(dimsizes(data_all&models), string) + + ; Loop over model-project files to complete project list + do ii = 0, dimsizes(mp_file_list) - 1 + modproj = asciiread(mp_file_list(ii), -1, "string") + mods = modproj(:dimsizes(modproj)/2-1) + projs = modproj(dimsizes(modproj)/2:) + + overlap_index = get1Dindex(data_all&models, mods) + projects(overlap_index) = projs + delete([/modproj, mods, projs, overlap_index/]) + end do + + data_all&models@project = projects + delete(projects) + + ; Sort diagnostics in the order specified in the settings + if (isatt(diag_script_info, "diag_order")) then + l_ok = True + if (dimsizes(data_all&vars).ne. \ + dimsizes(diag_script_info@diag_order)) then + error_msg("w", DIAG_SCRIPT, "", "specified order of diagnostics " + \ + "cannot be applied, number of diagnostics does not match") + l_ok = False + end if + pid = new(dimsizes(diag_script_info@diag_order), integer) + do ii = 0, dimsizes(diag_script_info@diag_order) - 1 + tmp = ind(var_diag.eq.diag_script_info@diag_order(ii)) + if (any(ismissing(tmp)) .or. dimsizes(tmp).gt.1) then + error_msg("w", DIAG_SCRIPT, "", "specified order of diagnostics " + \ + "cannot be applied, invalid entry in diag_order") + break + end if + pid(ii) = tmp + delete(tmp) + end do + if (l_ok) then + data_all := data_all(:, pid) + alt_obs := alt_obs(pid) + end if + end if + +; ------------------------------------------------------------------------- +; ----------------- Interim Functions ------------------------------------- +; ------------------------------------------------------------------------- + + undef("get_unique_entries") + function get_unique_entries(array) + ; + ; Arguments: + ; array: 1D array + ; + ; Return value: 1D array of unique entries in array + ; + ; Modification history: + ; 20170406-A_gier_bettina: written. + local dummy_array, unique_new, new_array, nodupes + begin + dummy_array = array + do while (dimsizes(dummy_array).ne.0) + if (.not.isdefined("unique")) then + unique = dummy_array(0) + else + unique_new = array_append_record(unique, dummy_array(0), 0) + delete(unique) + unique = unique_new + delete(unique_new) + end if + nodupes = ind(dummy_array.ne.dummy_array(0)) + ; Missing value index are dim 1 and would give an error + if (dimsizes(dummy_array).eq. \ + dimsizes(ind(dummy_array.eq.dummy_array(0)))) then + break + end if + new_array = dummy_array(nodupes) + delete(nodupes) + delete(dummy_array) + dummy_array = new_array + delete(new_array) + end do + return(unique) + end +; ----------------------------------------------------------------------------- +; ---------------------------- Plotting --------------------------------------- +; ----------------------------------------------------------------------------- + + ; Calculating necessary values + ; Number of Projects needed to determine span + ; For now just CMIP projects + c_projects = str_match_ic(data_all&models@project, "CMIP") + projects = get_unique_entries(c_projects) + projects = (/"CMIP3", "CMIP5", "CMIP6"/) + n_var = dimsizes(data_all&vars) + + nr_projects = dimsizes(projects) + if (nr_projects .eq. 2) then + x_val = new(n_var, float) + do i = 0, n_var-1 + x_val(i) = 1 + i * 3 + end do + elseif (nr_projects .eq. 3) then + x_val = new(n_var, float) + do i = 0, n_var-1 + x_val(i) = 1.5 + i * 4 + end do + else + x_val = ispan(1, n_var*nr_projects, nr_projects) + end if + + ; Mean and Median of Ensemble - without alt obs + obs_ind = get1Dindex(data_all&models, alt_obs) + if all(alt_obs.eq."none") then + mod_ind = ispan(0, dimsizes(data_all&models)-1, 1) + else + ex_ind = obs_ind(ind(.not.ismissing(obs_ind))) + mods_ind = ispan(0, dimsizes(data_all&models)-1, 1) + ex_ind@_FillValue = default_fillvalue("integer") + mods_ind@_FillValue = default_fillvalue("integer") + mod_ind = get1Dindex_Collapse(mods_ind, ex_ind) + delete(mods_ind) + delete(ex_ind) + end if + ; Split by project + means = new((/nr_projects, n_var/), float) + if (diag_script_info@plot_median) then + median = new((/nr_projects, n_var/), float) + end if + + do iproj = 0, nr_projects - 1 + mod_proj_ind = ind(data_all&models@project(mod_ind).eq.projects(iproj)) + means(iproj, :) = dim_avg_n(data_all(mod_proj_ind, :), 0) + if (diag_script_info@plot_median) then + median(iproj, :) = dim_median_n(data_all(mod_proj_ind, :), 0) + end if + delete(mod_proj_ind) + end do + + ; Create outfile directory + system("mkdir -p " + config_user_info@plot_dir) + + ; Plotting preparation + name = "" + outfile = config_user_info@plot_dir + name + "patterncor." + file_type + wks = gsn_open_wks(file_type, outfile) + wks@fullname = outfile + + ; Calc limits + y_min = min(data_all) + y_min := decimalPlaces(y_min-0.05, 1, True) + x_max = max(x_val) + nr_projects * 0.5 + + ; Set half line length + l_length = 0.1 + + ; Project Colors - TODO: let them be specified in cfg + fcolors = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_line_03.rgb") + + res = True + res@gsnDraw = False + res@gsnFrame = False + res@vpWidthF = 0.8 + n_square = 16. + if x_max.le. n_square then + res@vpHeightF = 0.8 + else + res@vpHeightF = 0.8*(n_square/x_max) + end if + font_height = 0.02/0.6 * res@vpHeightF + res@gsnMaximize = True + res@tiYAxisString = "Correlation" + res@trYMinF = y_min + res@trYMaxF = 1 + yspan = res@trYMaxF - res@trYMinF + res@trXMinF = 0 + res@trXMaxF = x_max + res@tmXBLabels = data_all&vars + res@tmXBValues = x_val + res@tmXBMode = "Explicit" + res@tmXBLabelFontHeightF = font_height + if (n_var .gt. 3) then + res@tmXBLabelFontHeightF = font_height * 0.5 + end if + ; Set Marker Size to be half of line_length + marker_size = res@vpWidthF * 3 * l_length / (res@trXMaxF - res@trXMinF) + + ; Resources for model lines + res_lines = True ; polyline mods desired + res_lines@gsLineDashPattern = 0. ; solid line + res_lines@gsLineThicknessF = 2.5 ; line thickness + res_lines@tfPolyDrawOrder = "PreDraw" + + ; Resources for mean lines + res_mlines = True ; polyline mods desired + res_mlines@gsLineDashPattern = 0. ; solid line + res_mlines@gsLineThicknessF = 4. ; line thicker + res_mlines@tfPolyDrawOrder = "PreDraw" + + ; Resources for obs data markers + res_circ = True + res_circ@gsMarkerIndex = 16 + res_circ@gsMarkerColor = (/178, 178, 178, 255/) / 255. + res_circ@gsMarkerSizeF = 2 * marker_size + res_circ@gsMarkerOpacityF = 0.4 + + ; Resources for white markers below median + res_circw = True + res_circw@gsMarkerIndex = 16 + res_circw@gsMarkerColor = "white" + res_circw@gsMarkerSizeF = 0.95*marker_size + res_circw@tfPolyDrawOrder = "PreDraw" + + ; Resources for median markers if required + res_circm = True + res_circm@gsMarkerIndex = 4 + res_circm@gsMarkerSizeF = marker_size + res_circm@gsMarkerThicknessF = 3. + res_circm@tfPolyDrawOrder = "Draw" + + ; Resources for legend text + res_text = True ; text mods desired + res_text@txFontHeightF = font_height ; change text size + res_text@txJust = "CenterLeft" ; text justification + + ; New x_val according to median! + x_val_proj = new((/nr_projects, n_var/), float) + ; space between projects in graph + if (nr_projects.eq.1) then + d_proj = 1. ; offset + else + d_proj = 1 ; (nr_projects - 1.)/nr_projects + end if + do iproj = 0, nr_projects - 1 + do ivar = 0, n_var - 1 + x_val_proj(iproj, ivar) = ivar*(1 + nr_projects) - 0.5 \ + + d_proj*(iproj+1.) + end do + end do + ; Start with blank plot! gs and xy marker sizes are different.. + plot = gsn_csm_blank_plot(wks, res) + + if (diag_script_info@plot_median) then + do iproj = 0, nr_projects - 1 + res_circm@gsMarkerColor = fcolors(iproj, :) + plot@$unique_string("dum_median")$ = gsn_add_polymarker( \ + wks, plot, x_val_proj(iproj, :), median(iproj, :), res_circm) + end do + end if + + ; add lines for individual models + do ivar = 0, dimsizes(data_all(0, :))-1 + do iproj = 0, dimsizes(projects)-1 + ; Skip Project if no data for it + proj_mods = ind(data_all&models@project(mod_ind).eq.projects(iproj)) + if .not. all(ismissing(data_all(proj_mods, ivar))) then + proj_center = x_val_proj(iproj, ivar) + xx = (/proj_center-l_length, proj_center+l_length/) + ; Plot lines for mean + xx_mean = (/proj_center-l_length*3., proj_center+l_length*3./) + yy_mean = (/means(iproj, ivar), means(iproj, ivar)/) + res_mlines@gsLineColor = fcolors(iproj, :) + res_lines@gsLineColor = fcolors(iproj, :) + plot@$unique_string("dum")$ = gsn_add_polyline( \ + wks, plot, xx_mean, yy_mean, res_mlines) + do imod = 0, dimsizes(data_all(:, 0)) - 1 + ; Only plot if model in right project + if data_all&models@project(imod).eq.projects(iproj) then + ; Don't plot obs as lines + if (.not.ismissing(data_all(imod, ivar))) then + if (data_all&models(imod).ne.alt_obs(ivar)) then + yy = (/data_all(imod, ivar), data_all(imod, ivar)/) + plot@$unique_string("dum")$ = gsn_add_polyline( \ + wks, plot, xx, yy, res_lines) + end if + end if + end if + end do + if (diag_script_info@plot_median) then + plot@$unique_string("dum_ci")$ = gsn_add_polymarker( \ + wks, plot, x_val_proj(iproj, ivar), median(iproj, ivar), res_circw) + end if + end if + delete(proj_mods) + end do + if (alt_obs(ivar).ne."none") then + ; Plot obs as circles + plot@$unique_string("dum_circ")$ = gsn_add_polymarker( \ + wks, plot, x_val(ivar), data_all(obs_ind(ivar), ivar), res_circ) + end if + end do + + y_min_label = res@trYMinF + 0.1*yspan + lb_stride = yspan/res@vpHeightF * font_height * 2. + plabel = projects + ; Draw Legend + do iproj = 0, dimsizes(projects)-1 + res_text@txFontColor = fcolors(iproj, :) + ; CMIP5 label has to be reduced to CMIP5 sometimes + if str_match_ind_ic(plabel, "CMIP5").eq.iproj then + plabel(iproj) = "CMIP5" + end if + plot@$unique_string("dum_l")$ = gsn_add_text(wks, plot, plabel(iproj),\ + x_val(0)-0.5, y_min_label + \ + lb_stride*(iproj+1.2), \ + res_text) + end do + res_text@txFontColor = res_circ@gsMarkerColor + plot@$unique_string("dum_l")$ = gsn_add_text(wks, plot, \ + "alternative Observations", \ + x_val(0)-0.5, y_min_label, \ + res_text) + + draw(plot) + frame(wks) + ; Write output + system("mkdir -p " + config_user_info@work_dir) + workpath = config_user_info@work_dir + "pattern_cor.nc" + ncdf_outfile = ncdf_write(data_all, workpath) + + ; collect meta-data and call ESMValMD function + caption = "Centered pattern correlations between models and observations" \ + + " for the annual mean climatologies " \ + + "(similar to IPCC ch. 9 fig. 9.6)." + statistics = (/"corr", "clim"/) + domains = (/"global"/) + plottype = "other" + authors = (/"gier_bettina", "bock_lisa"/) + references = (/"flato13ipcc"/) + log_provenance(ncdf_outfile, outfile, caption, statistics, domains, \ + plottype, authors, references, prov_files) + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/bock20jgr/model_bias.ncl b/esmvaltool/diag_scripts/bock20jgr/model_bias.ncl new file mode 100644 index 0000000000..2e87d28f9d --- /dev/null +++ b/esmvaltool/diag_scripts/bock20jgr/model_bias.ncl @@ -0,0 +1,382 @@ +; MODEL_BIAS +; ############################################################################ +; Author: Lisa Bock (DLR, Germany), Axel Lauer (DLR, Germany) +; Crescendo, IPCC-AR6 +; ############################################################################ +; Description +; Calculates the multi-model mean bias of annual mean +; variables compared with a reference dataset (observations). +; +; Required diag_script_info attributes (diagnostic specific) +; none +; +; Optional diag_script_info attributes (diagnostic specific) +; projection: map projection, e.g., Mollweide, Mercator +; timemean: time averaging, i.e. "seasonalclim" (DJF, MAM, JJA, SON), +; "annualclim" (annual mean) +; +; Required variable_info attributes (variable specific) +; reference_dataset: name of reference datatset +; +; Optional variable_info attributes (variable specific) +; long_name: description of variable +; +; Caveats +; none +; +; Modification history +; 20211006-lauer_axel: removed write_plots +; 20190312-bock_lisa: adapted code (based on clouds/clouds_bias.ncl) +; +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/contour_maps.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + var0 = variable_info[0]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + project = input_file_info[1]@project + dim_MOD = ListCount(info0) + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + end if + names = metadata_att_as_array(info0, "dataset") + infiles = metadata_att_as_array(info0, "filename") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; time averaging: at the moment, only "annualclim" and "seasonalclim" + ; are supported + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "projection", "CylindricalEquidistant") + set_default_att(diag_script_info, "timemean", "annualclim") + + timemean = diag_script_info@timemean + + if (timemean.eq."seasonalclim") then + numseas = 4 + season = (/"DJF", "MAM", "JJA", "SON"/) + else + numseas = 1 ; default + season = (/"annual"/) + end if + + ; create string for caption (netcdf provenance) + + allseas = season(0) + do is = 1, numseas - 1 + allseas = allseas + "/" + season(i) + end do + + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + +end + +begin + ; ======================================================================== + ; ========================== initialization ============================== + ; ======================================================================== + + ; check for reference dataset definition + + if (.not.(isvar("refname"))) then + error_msg("f", DIAG_SCRIPT, "", "no reference dataset defined in recipe") + end if + + ; get reference dataset + + ref_ind = ind(names .eq. refname) + if (ismissing(ref_ind)) then + error_msg("f", DIAG_SCRIPT, "", "reference dataset (" \ + + refname + ") is missing") + end if + + ; get multi-model mean index + + mm_ind = ind(names .eq. "MultiModelMean") + + if (ismissing(mm_ind)) then + error_msg("f", DIAG_SCRIPT, "", "multi-model mean is missing (required)") + end if + + ; basename of diag_script + + diag_script_base = basename(DIAG_SCRIPT) + + ; ======================================================================== + ; =========================== calculations =============================== + ; ======================================================================== + + ; read data + + A0 = read_data(info0[mm_ind]) + mmdata = time_operations(A0, -1, -1, "average", timemean, True) + delete(A0) + + A0 = read_data(info0[ref_ind]) + refdata = time_operations(A0, -1, -1, "average", timemean, True) + delete(A0) + + ; convert units for plotting + if (var0.eq."pr") then + ; kg m-2 s-1 --> mm day-1 + mmdata = convert_units(mmdata, "mm/day") + refdata = convert_units(refdata, "mm/day") + elseif (var0.eq."tas") then + mmdata = convert_units(mmdata, "degC") + refdata = convert_units(refdata, "degC") + elseif (var0.eq."tos" .or. var0.eq."thetao") then + if (mmdata@units .eq. "K") then + mmdata = convert_units(mmdata, "degC") + end if + if (refdata@units .eq. "K") then + refdata = convert_units(refdata, "degC") + end if + elseif (var0.eq."so") then + refdata = refdata * 1000. + end if + + ; differences between multi-model mean and reference data set + ; (multi-model bias) + + diff = mmdata - refdata + mmdata@diag_script = DIAG_SCRIPT + copy_VarMeta(mmdata, diff) + + if any(var0 .eq. (/"tas", "tos", "thetao"/)) then + diff@units = "~F34~0~F~ C" + diff@res_cnLevels = ispan(-6, 6, 1) + elseif (var0 .eq. "pr") then + diff@res_cnLevels = ispan(-30, 30, 5) * 0.1 + elseif (var0 .eq. "so") then + diff@units = "0.001" + diff@res_cnLevels = ispan(-30, 30, 5) * 0.1 + end if + + rmsd = calculate_metric(refdata, mmdata, "RMSD") + bias = area_operations(diff, -90., 90., 0., 360., "average", True) + + ; ======================================================================== + ; ============================= plotting ================================= + ; ======================================================================== + + climofiles = new(2, string) + climofiles(0) = infiles(mm_ind) + climofiles(1) = infiles(ref_ind) + + diff@res_gsnMaximize = True ; use full page for the plot + diff@res_cnFillOn = True ; color plot desired + diff@res_cnLineLabelsOn = False ; contour lines + diff@res_cnLinesOn = False + diff@res_tiMainOn = True + diff@res_gsnLeftStringFontHeightF = 0.015 + diff@res_gsnRightStringFontHeightF = 0.015 + diff@res_cnLevelSelectionMode = "ExplicitLevels" + diff@res_mpOutlineOn = True + if (.not.isatt(diff, "res_cnLevels")) then + diff@res_cnLevels = fspan(min(diff), max(diff), 20) + end if + diff@res_mpFillOn = False + diff@res_lbLabelBarOn = True + diff@res_gsnRightString = "" + diff@res_gsnLeftString = "" + diff@res_mpFillDrawOrder = "PostDraw" ; draw map fill last + diff@res_cnMissingValFillColor = "Gray" + diff@res_tmYLLabelsOn = False + diff@res_tmYLOn = False + diff@res_tmYRLabelsOn = False + diff@res_tmYROn = False + diff@res_tmXBLabelsOn = False + diff@res_tmXBOn = False + diff@res_tmXTLabelsOn = False + diff@res_tmXTOn = False + diff@res_cnInfoLabelOn = False ; turn off cn info label + diff@res_mpProjection = diag_script_info@projection + + diff@var = var0 ; Overwrite existing entry + if (isatt(variable_info[0], "long_name")) then + diff@var_long_name = variable_info[0]@long_name + end if + diff@var_units = diff@units + + plots = new((/2, numseas/), graphic) + + ; -------------------------------------------------------------------- + ; plot contour map + + diff@res_gsnDraw = False ; Do not draw yet + diff@res_gsnFrame = False ; Don't advance frame. + diff@res_mpPerimOn = False + + diff@res_lbTitleString = "(" + diff@units + ")" + diff@res_lbTitlePosition = "Bottom" + + diff@res_lbLabelFontHeightF = 0.014 + diff@res_lbTopMarginF = 0.1 + diff@res_lbTitleFontHeightF = 0.014 + + diff@res_tiMainFontHeightF = 0.016 + + diff@res_tiMainString = project + " Multi-Model Mean Bias" + + copy_VarMeta(diff, mmdata) + delete(mmdata@res_cnLevels) + + mmdata@res_tiMainString = project + " Multi-Model Mean" + + mmdata@res_gsnLeftStringFontHeightF = 0.015 + mmdata@res_gsnRightStringFontHeightF = 0.015 + mmdata@res_gsnRightString = " " + mmdata@res_gsnLeftString = " " + + if (var0.eq."tas") then + mmdata@res_cnLevels = ispan(-35, 35, 5) + + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_temperature_div.rgb") + mmdata@res_cnFillPalette = pal + diff@res_cnFillPalette = pal + elseif (var0.eq."pr") then + mmdata@res_cnLevels = ispan(1, 10, 1) + + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_precipitation_seq.rgb") + mmdata@res_cnFillPalette = pal + pal2 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_precipitation_div.rgb") + diff@res_cnFillPalette = pal2 + elseif (var0.eq."tos" .or. var0.eq."thetao") then + mmdata@res_tiMainString = "Sea Surface Temperature ~C~Multi Model Mean" + mmdata@res_cnLevels = ispan(-30, 30, 5) + + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_temperature_div.rgb") + mmdata@res_cnFillPalette = pal + diff@res_tiMainString = "Sea Surface Temperature ~C~Multi Model Mean Bias" + diff@res_cnFillPalette = pal + elseif (var0.eq."so") then + mmdata@res_tiMainString = "Sea Surface Salinity ~C~Multi Model Mean" + mmdata@res_cnLevels = ispan(310, 370, 5) * 0.1 + + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_misc_seq_1.rgb") + mmdata@res_cnFillPalette = pal + diff@res_tiMainString = "Sea Surface Salinity ~C~Multi Model Mean Bias" + pal2 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_misc_div.rgb") + diff@res_cnFillPalette = pal2 + else + mmdata@res_cnLevels = fspan(min(mmdata), max(mmdata), 20) + end if + + plotsperline = (/2, 0/) + plotind = (/0, 1/) ; mmm and mean bias are always plotted + + ; absolute differences + + ; root mean square differences + + ; relative differences + + ; add global bias and rmsd value + diff@res_gsnRightString = "rmsd = " + sprintf("%6.3f", rmsd) + diff@res_gsnLeftString = "bias = " + sprintf("%6.3f", bias) + + ; panelling resources + pres = True + pres@gsnPanelCenter = False + pres@gsnPanelRowSpec = True ; tell panel what order to plot + pres@gsnPanelYWhiteSpacePercent = 5 + pres@gsnPanelXWhiteSpacePercent = 5 + pres@gsnPanelFigureStrings = (/"a)", "b)"/) + pres@gsnPanelFigureStringsPerimOn = False + pres@gsnPanelFigureStringsJust = "TopLeft" + pres@gsnPanelFigureStringsFontHeightF = 0.016 + + plotfile = new(numseas, string) + plotfile(:) = "" + + do is = 0, numseas - 1 + ; -------------------------------------------------------------------- + ; create workspace + + if (isvar("wks")) then + delete(wks) + end if + + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "model_bias_" + var0 \ + + "_" + season(is) + "_" + project) + + plotfile(is) = wks@fullname + + if (numseas.gt.1) then + pres@txString = season(is) + plots(0, is) = contour_map(wks, mmdata(is, :, :), var0) + plots(1, is) = contour_map(wks, diff(is, :, :), var0) + gsn_panel(wks, plots(plotind, is), plotsperline, pres) + else + plots(0, 0) = contour_map(wks, mmdata, var0) + plots(1, 0) = contour_map(wks, diff, var0) + gsn_panel(wks, plots(plotind, 0), plotsperline, pres) + end if + + ; add meta data to plot (for reporting) + + caption = "Multi-model values, from top left to bottom right: " \ + + "mean, bias" + + end do ; is-loop (seasons) + + ; ########################################### + ; # output to netCDF # + ; ########################################### + + nc_filename = work_dir + "model_bias_" + var0 + "_" + project + ".nc" + nc_filename@existing = "overwrite" + + mmdata@var = var0 + "_mean" + mmdata@long_name = var0 + " (multi-model mean)" + nc_outfile = ncdf_write(mmdata, nc_filename) + + nc_filename@existing = "append" + refdata@var = var0 + "_ref" + refdata@long_name = var0 + " (reference data)" + nc_outfile = ncdf_write(refdata, nc_filename) + + diff@var = var0 + "_bias" + diff@long_name = var0 + " (multi-model bias)" + nc_outfile = ncdf_write(diff, nc_filename) + + ; ------------------------------------------------------------------------ + ; write provenance to netcdf output and plot file(s) (mean) + ; ------------------------------------------------------------------------ + + statistics = (/"clim", "diff"/) + domain = ("global") + plottype = ("geo") + prov_caption = caption + " for variable " + var0 \ + + " (" + allseas + "), reference = " + names(ref_ind) + "." + + do is = 0, numseas - 1 + log_provenance(nc_outfile, plotfile(is), prov_caption, statistics, \ + domain, plottype, "", "", climofiles) + end do + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/bock20jgr/tsline.ncl b/esmvaltool/diag_scripts/bock20jgr/tsline.ncl new file mode 100644 index 0000000000..f05f11da0d --- /dev/null +++ b/esmvaltool/diag_scripts/bock20jgr/tsline.ncl @@ -0,0 +1,556 @@ +; ############################################################################# +; PLOTS TIME SERIES +; Authors: Lisa Bock (DLR, Germany) +; ############################################################################# +; +; Description +; This script is for plotting a time series of the global mean (anomaly). +; +; Required diag_script_info attributes (diagnostic specific) +; styleset: as in diag_scripts/shared/plot/style.ncl functions +; +; Optional diag_script_info attributes (diagnostic specific) +; time_avg: type of time average (currently only "yearly" and "monthly" are +; available). +; ts_anomaly: calculates anomalies with respect to the defined period; +; for each gird point by removing the mean for the given +; calendar month (requiring at least 50% of the data to be +; non-missing) +; ref_start: start year of reference period for anomalies +; ref_end: end year of reference period for anomalies +; ref_value: if true, right panel with mean values is attached +; ref_mask: if true, model fields will be masked by reference fields +; region: name of domain +; plot_units: variable unit for plotting +; y_min: set min of y-axis +; y_max: set max of y-axis +; mean_nh_sh: if true, calculate first NH and SH mean +; volcanoes: if true, lines of main volcanic eruptions will be added +; header: if true, region name as header +; write_stat: if true, write multi model statistics in nc-file +; +; Caveats +; +; Modification history +; 20190911-bock_lisa: included method of Jones et al., 2013 and +; added provenance +; 20181112-bock_lisa: code rewritten for ESMValTool v2.0 +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/ensemble.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + + +begin + + enter_msg(DIAG_SCRIPT, "") + + var0 = variable_info[0]@short_name + project0 = input_file_info[0]@project + if (project0 .eq. "OBS") then + project0 = input_file_info[1]@project + exp0 = input_file_info[1]@exp + obs_first = True + else + exp0 = input_file_info[0]@exp + obs_first = False + end if + info_items = select_metadata_by_name(input_file_info, var0) + datasetnames = metadata_att_as_array(info_items, "dataset") + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; Create output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Time averaging + if (isatt(diag_script_info, "time_avg")) then + time_avg = diag_script_info@time_avg + if (all(time_avg.ne.(/"yearly", "monthly"/))) then + error_msg("f", DIAG_SCRIPT, "", \ + "time averaging option " + time_avg + " not yet " + \ + "implemented") + end if + else + time_avg = "monthly" + end if + + ; Check for required settings + exit_if_missing_atts(diag_script_info, "styleset") + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "time_avg", "monthly") + set_default_att(diag_script_info, "ts_anomaly", "noanom") + set_default_att(diag_script_info, "ref_value", True) + set_default_att(diag_script_info, "ref_mask", False) + set_default_att(diag_script_info, "region", "Global") + set_default_att(diag_script_info, "mean_nh_sh", False) + set_default_att(diag_script_info, "header", False) + set_default_att(diag_script_info, "volcanoes", False) + set_default_att(diag_script_info, "write_stat", False) + set_default_att(diag_script_info, "stat", "MinMax") + + ; Determine time range + start_year = min(metadata_att_as_array(info_items, "start_year")) + end_year = max(metadata_att_as_array(info_items, "end_year")) + all_years = ispan(start_year, end_year, 1) + + ; Create time coordinate + if (time_avg.eq."monthly") then + ntime = 12 * (end_year - start_year + 1) + time = new(ntime, integer) + do yy = start_year, end_year + do mm = 1, 12 + time(12 * (yy - start_year) + mm - 1) = 100 * yy + mm + end do + end do + elseif (time_avg.eq."yearly") then + ntime = end_year - start_year + 1 + time = new(ntime, integer) + time = ispan(start_year, end_year, 1) + end if + ntime_old = ntime + + ; get multi-model mean index + mmm_ind = ind(datasetnames .eq. "MultiModelMean") + + ; Set index of the reference dataset and read it + if (isatt(variable_info[0], "reference_dataset")) then + ref_ind = ind(datasetnames.eq.variable_info[0]@reference_dataset) + A_ref = read_data(info_items[ref_ind]) + end if + + ; Anomaly + if (isatt(diag_script_info, "ts_anomaly")) then + anom = diag_script_info@ts_anomaly + if (anom .eq. "anom") then + if (isatt(diag_script_info, "ref_start") .and. \ + isatt(diag_script_info, "ref_end")) then + ref_start = diag_script_info@ref_start + ref_end = diag_script_info@ref_end + if (ref_start.lt.start_year) then + ref_start = start_year + end if + if ((ref_start.lt.start_year) .or. (ref_end.gt.end_year) .or. \ + (ref_end.lt.ref_start)) then + error_msg("f", DIAG_SCRIPT, "", \ + "period for reference years is not properly defined") + end if + else + error_msg("f", DIAG_SCRIPT, "", \ + "period for reference years is not defined " + \ + "(needed for anomaly)") + end if + + anom_ref = new((/dim_MOD/), double) + anom_ref!0 = "model" + anom_ref&model = datasetnames + end if + else + anom = "noanom" + end if + + ; Create model array + model_arr = new((/dim_MOD, ntime/), double) + model_arr!0 = "model" + model_arr!1 = "time" + model_arr&model = datasetnames + model_arr&time = time + model_arr@_FillValue = 1e+20 + model_arr = model_arr@_FillValue + + ; Loop over models + do imod = 0, dim_MOD - 1 + + log_info("Process dataset: " + datasetnames(imod)) + + ; Read data + A0 = read_data(info_items[imod]) + dnames = getVarDimNames(A0) + + ; Convert units for plotting (if required) + if (isatt(diag_script_info, "plot_units")) then + A0 = convert_units(A0, diag_script_info@plot_units) + end if + + if (isatt(variable_info[0], "reference_dataset")) then + + ; masking with reference dataset + if (diag_script_info@ref_mask .and. imod .ne. ref_ind) then + ; Determine start/end year + start_year = info_items[imod]@start_year + end_year = info_items[imod]@end_year + if (start_year .gt. diag_script_info@ref_start) then + ref_start = start_year + else + ref_start = diag_script_info@ref_start + end if + A_ref_mask = time_operations(A_ref, start_year, \ + end_year, "extract", "", 0) + A0_nomask_ref = time_operations(A0, ref_start, \ + ref_end, "extract", "", 0) + A0 = where(A_ref_mask.eq.A_ref_mask@_FillValue, \ + A_ref_mask@_FillValue, A0) + delete(A_ref_mask) + end if + + end if + + ; Anomaly + if (anom .eq. "anom") then + ; calculate monthly mean of ref period if 0.5 data points are available + tmp = time_operations(A0, ref_start, ref_end, "extract", "", 0) + limit = toint(0.5 * dimsizes(tmp&time)) + do i = 0, dimsizes(tmp!0)-1 + tmp(i, :, :) = where(dim_num_n(.not.ismissing(tmp), 0).ge.limit, \ + tmp(i, :, :), tmp@_FillValue) + end do + A0_monavg_ref = time_operations(tmp, ref_start, ref_end, "average", \ + "monthlyclim", True) + + ; calculate anomaly for each grid point + do i = 0, dimsizes(A0&time) - 1 + A0(i, :, :) = A0(i, :, :) - A0_monavg_ref(mod(i, 12), :, :) + end do + + if (diag_script_info@ref_value) then + ; Calculate time average of ref period + ; annual mean if at least 2 months of data is available + ; if masking then take original unmasked dataset for ref mean + if (diag_script_info@ref_mask .and. imod .ne. ref_ind) then + tmp = A0_nomask_ref + delete(A0_nomask_ref) + end if + date := cd_calendar(tmp&time, 0) + year := date(:, 0) + month := date(:, 1) + weights = days_in_month(toint(year), toint(month)) + A0_timavg_ref = dim_avg_wgt_n_Wrap(tmp, weights, 2, 0) + delete(year) + delete(month) + delete(date) + delete(weights) + delete(A0_monavg_ref) + + ; calculate global mean of reference period + ; first for each hemisphere - if choosen + if (diag_script_info@mean_nh_sh) then + tmp1 = area_operations(A0_timavg_ref, -90., 0., 0., 360., \ + "average", True) + tmp2 = area_operations(A0_timavg_ref, 0., 90., 0., 360., \ + "average", True) + anom_ref_tmp = (tmp1 + tmp2) / 2. + delete(tmp1) + delete(tmp2) + else + anom_ref_tmp = area_operations(A0_timavg_ref, -90., 90., \ + 0., 360., "average", True) + end if + delete(A0_timavg_ref) + + anom_ref(imod) = anom_ref_tmp + ; delete(anom_ref_tmp) + end if + + delete(tmp) + + end if + + ; Calculate time average of dataset + ; annual mean if at least 2 months of data is available + if (time_avg.eq."yearly") then + do i = 0, dimsizes(A0!0) - 1 + A0(i, :, :) = where(dim_num_n(.not.ismissing(A0), 0).ge.2, \ + A0(i, :, :), A0@_FillValue) + end do + A0_timavg = time_operations(A0, -1, -1, "average", \ + "yearly", True) + else + A0_timavg = A0 + end if + delete(A0) + + ; calculate global mean of complete time series + ; first for each hemisphere - if choosen + if (diag_script_info@mean_nh_sh) then + tmp1 = area_operations(A0_timavg, -90., 0., 0., 360., \ + "average", True) + tmp2 = area_operations(A0_timavg, 0., 90., 0., 360., \ + "average", True) + procmod = tmp1 + procmod = (tmp1 + tmp2) / 2. + delete(tmp1) + delete(tmp2) + else + procmod = area_operations(A0_timavg, -90., 90., 0., 360., \ + "average", True) + end if + delete(A0_timavg) + + if (.not.isdefined("procmod")) then + error_msg("f", diag_script_info, "", "cannot process this " + \ + "field type " + field_type0) + end if + + ; Match time coordinate + if (time_avg.eq."monthly") then + date = cd_calendar(procmod&time, -1) + elseif (time_avg.eq."yearly") then + date = procmod&year + end if + idx1 = ind(date(0).eq.model_arr&time) + idx2 = ind(date(dimsizes(date) - 1).eq.model_arr&time) + model_arr(imod, idx1:idx2) = (/procmod/) + if (imod.eq.0) then + copy_VarAtts(procmod, model_arr) + end if + delete(procmod) + delete(date) + + end do + + ; rm OBS from last entry + if (obs_first) then + tmp = model_arr(0, :) + tmp1 = info_items[0] + do i = 0, dim_MOD - 3 + model_arr(i, :) = model_arr(i + 1, :) + model_arr&model(i) = model_arr&model(i + 1) + info_items[i] = info_items[i + 1] + end do + model_arr(dim_MOD - 2, :) = tmp + model_arr&model(dim_MOD - 2) = variable_info[0]@reference_dataset + info_items[dim_MOD - 2] = tmp1 + ref_ind = dim_MOD - 2 + delete(tmp) + delete(tmp1) + end if + + ; Convert time coordinate to years (required by the plot routine) + if (time_avg.eq."monthly") then + year = model_arr&time / 100 + xmin = toint(floor(min(year))) + xmax = toint(ceil(max(year))) + month = model_arr&time - 100 * year + time = todouble(year + month/12.) + delete(model_arr&time) + model_arr&time = time + delete(time) + delete(year) + delete(month) + elseif (time_avg.eq."yearly") then + xmin = toint(floor(min(model_arr&time))) + xmax = toint(ceil(max(model_arr&time))) + tmp = todouble(model_arr&time) + delete(model_arr&time) + model_arr&time = tmp + delete(tmp) + end if + + ; individual case for HadCRUT4 observations + ; mean value comes from climatology file (absolute.nc). + ; There are no missing values as in the anomaly data. + if (anom .eq. "anom" .and. isatt(variable_info[0], "reference_dataset") \ + .and. variable_info[0]@reference_dataset .eq. "HadCRUT4") then + if(ref_start .eq. 1961 .and. ref_end .eq. 1990) then + anom_ref(ref_ind) = 14.0 + else + anom_ref(ref_ind) = anom_ref@_FillValue + end if + end if + + ; calculate multi model statistics + if (diag_script_info@write_stat) then + if (isatt(variable_info[0], "reference_dataset")) then + ind_mod = ind(datasetnames.ne."MultiModelMean" .and. \ + datasetnames.ne.variable_info[0]@reference_dataset) + else + ind_mod = ind(datasetnames.ne."MultiModelMean") + end if + + model_arr_stat = new((/ntime, 10/), double) + model_arr_stat!1 = "stat" + model_arr_stat!0 = "time" + model_arr_stat&time = model_arr&time + model_arr_stat&stat = (/"mean", "stddev", "spread", "min", "5%", "10%", \ + "90%", "95%", "max", "ref"/) + + model_arr_stat(:, 0) = (/model_arr(mmm_ind, :)/) + do i = 0, ntime-1 + statb = stat_dispersion(model_arr(ind_mod, i), False) + ; standard deviation + model_arr_stat(i, 1) = (/statb(1)/) + ; spread + model_arr_stat(i, 2) = (/statb(14)-statb(2)/) + ; min + model_arr_stat(i, 3) = (/statb(2)/) + ; max + model_arr_stat(i, 8) = (/statb(14)/) + ; 5% quantile + model_arr_stat(i, 4) = (/statb(24)/) + ; 95% quantile + model_arr_stat(i, 7) = (/statb(25)/) + ; 10% quantile + model_arr_stat(i, 5) = (/statb(3)/) + ; 90% quantile + model_arr_stat(i, 6) = (/statb(13)/) + end do + if (isatt(variable_info[0], "reference_dataset")) then + model_arr_stat(:, 9) = (/model_arr(ref_ind, :)/) + end if + + end if + + ; ************************************* + ; output to NetCDF + ; ************************************* + out_path = config_user_info@work_dir + system("mkdir -p " + out_path) + out_path1 = out_path + "tsline_" + var0 + "_" + anom + ".nc" + model_arr@ncdf = out_path + model_arr@experiment = project0 + "_" + exp0 + model_arr@diag_script = DIAG_SCRIPT + model_arr@var = var0 + ncdf_outfile = ncdf_write(model_arr, out_path1) + ; statistics + if (diag_script_info@write_stat) then + out_path2 = out_path + "tsline_mm_" + var0 + "_" + anom + "_stat.nc" + model_arr_stat@ncdf = out_path + model_arr_stat@experiment = project0 + "_" + exp0 + model_arr_stat@diag_script = DIAG_SCRIPT + model_arr_stat@var = var0 + model_arr_stat@units = model_arr@units + ncdf_outfile = ncdf_write(model_arr_stat, out_path2) + end if + + ; Define workstation + outfile = config_user_info@plot_dir + var0 + "_" \ + + str_sub_str(diag_script_info@region, " ", "_") \ + + "_" + project0 + "_" + exp0 + "_" + anom + "_" \ + + start_year + "-" + end_year + wks = gsn_open_wks(file_type, outfile) + + ; Set resources + res = True + res@trXMinF = xmin + res@trXMaxF = xmax + ; res@trXMaxF = xmax + 0.25 * (xmax - xmin) + res@tmXBMode = "Explicit" + if (xmax - xmin.gt.20) then + res@tmXBValues = ispan(xmin, xmax, 10) + res@tmXBLabels = ispan(xmin, xmax, 10) + res@tmXBMinorValues = ispan(xmin, xmax, 5) + else + res@tmXBValues = ispan(xmin, xmax, 5) + res@tmXBLabels = ispan(xmin, xmax, 5) + res@tmXBMinorValues = ispan(xmin, xmax, 1) + end if + res@tmXBLabelAngleF = 45 + res@tmXBLabelJust = "CenterRight" + if (isatt(diag_script_info, "y_min")) then + res@trYMinF = diag_script_info@y_min + end if + if (isatt(diag_script_info, "y_max")) then + res@trYMaxF = diag_script_info@y_max + end if + + res@tmXBMode = "Manual" + res@tmXBTickSpacingF = 20 + + if (diag_script_info@header) then + res@tiMainString = diag_script_info@region + else + res@tiMainString = "" + end if + + if (isatt(variable_info[0], "long_name")) then + if (var0 .eq. "tas") then + varname = "Temperature" + else + varname = variable_info[0]@long_name + end if + else + varname = var0 + end if + + if (model_arr@units .eq. "degC") then + units = "~F34~0~F~ C" + else + units = model_arr@units + end if + + if (anom .eq. "anom") then + res@tiYAxisString = varname + " Anomaly" + " (" + units + ")" + else + res@tiYAxisString = varname + " (" + units + ")" + end if + + res0 = True + ref_start = diag_script_info@ref_start + ref_end = diag_script_info@ref_end + res0@tiYAxisString = tostring(ref_start) + "-" + tostring(ref_end) \ + + " Mean " + varname + " (" + units + ")" + + if (anom .eq. "anom") then + xy_line_anom(wks, anom_ref, model_arr, model_arr&time, \ + 0, ref_start, ref_end, res, res0, info_items) + else + xy_line(wks, model_arr, model_arr&time, model_arr_stddev, res, info_items) + end if + + log_info(" wrote " + outfile + "." + file_type) + + ; *************************************** + ; add meta data to plot (for reporting) + ; *************************************** + + if (diag_script_info@region .eq. "Global") then + domain = "global" + else + domain = "reg" + end if + + if (anom .eq. "anom") then + statistics = "anomaly" + else + statistics = "mean" + end if + + caption = "Time series of the " + statistics + " for variable " \ + + varname + ", similar to IPCC AR5, fig. 9.8." + contrib_authors = (/"cionni_irene", "righi_mattia", \ + "wenzel_sabrina", "bock_lisa"/) + + ; Call provenance logger + log_provenance(ncdf_outfile, \ + outfile + "." + file_type, \ + caption, \ + statistics, \ + domain, \ + "times", \ + contrib_authors, \ + (/"flato13ipcc", "jones13jgr"/), \ + metadata_att_as_array(input_file_info, "filename")) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/bock20jgr/tsline_collect.ncl b/esmvaltool/diag_scripts/bock20jgr/tsline_collect.ncl new file mode 100644 index 0000000000..0cf08b109c --- /dev/null +++ b/esmvaltool/diag_scripts/bock20jgr/tsline_collect.ncl @@ -0,0 +1,382 @@ +; ############################################################################# +; WRAPPER SCRIPT FOR COLLECTING AND PLOTTING TIMESERIES +; Author: Lisa Bock (DLR, Germany) +; ############################################################################# +; Description +; Collects timeseries and their statistics previously calculated by +; tsline.ncl and passes them to a new tsline plot +; +; Required diag_script_info attributes +; styleset: as in diag_scripts/shared/plot/style.ncl functions +; +; Optional diag_script_info attributes +; time_avg: type of time average (currently only "yearly" and "monthly" are +; available). +; ts_anomaly: calculates anomalies with respect to the defined period +; ref_start: start year of reference period for anomalies +; ref_end: end year of reference period for anomalies +; region: name of domain +; plot_units: variable unit for plotting +; y_min: set min of y-axis +; y_max: set max of y-axis +; order: order in which experiments should be plotted +; header: if true, region name as header +; stat_shading: if true: shading of statistic range +; ref_shading: if true: shading of reference period +; +; Caveats +; +; Modification history +; 20190204-A_bock_lisa: written. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/ensemble.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + diag_script_base = basename(DIAG_SCRIPT) + + var_names = metadata_att_as_array(variable_info, "variable_group") + var0 = "tas" + ind_tas = ind(var_names .eq. "tas") + info_items = select_metadata_by_name(input_file_info, var0) + units0 = info_items[0]@units + if(any(var_names .eq. "tasUnc1") .and. any(var_names .eq. "tasUnc2")) then + obs_unc = True + var1 = "tasUnc1" + var2 = "tasUnc2" + ind1 = ind(var_names .eq. var1) + ind2 = ind(var_names .eq. var2) + info_items1 = select_metadata_by_name(input_file_info, \ + variable_info[ind1]@short_name) + info_items2 = select_metadata_by_name(input_file_info, \ + variable_info[ind2]@short_name) + else + obs_unc = False + end if + + ; Create output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + + ; Check file type + file_type = config_user_info@output_file_type + if(ismissing(file_type)) then + file_type = "ps" + end if + + ; Time averaging + if (isatt(diag_script_info, "time_avg")) then + time_avg = diag_script_info@time_avg + if (all(time_avg.ne.(/"yearly", "monthly"/))) then + error_msg("f", DIAG_SCRIPT, "", \ + "time averaging option " + time_avg + " not yet " + \ + "implemented") + end if + else + time_avg = "monthly" + end if + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "time_avg", "monthly") + set_default_att(diag_script_info, "ts_anomaly", "noanom") + set_default_att(diag_script_info, "region", "Global") + set_default_att(diag_script_info, "header", True) + set_default_att(diag_script_info, "stat_shading", False) + set_default_att(diag_script_info, "ref_shading", False) + + ; Determine time range + start_year = diag_script_info@start_year + end_year = diag_script_info@end_year + all_years = ispan(start_year, end_year, 1) + nyears = end_year - start_year + 1 + + ref_start = diag_script_info@ref_start + ref_end = diag_script_info@ref_end + + ; Anomaly + if (isatt(diag_script_info, "ts_anomaly")) then + anom = diag_script_info@ts_anomaly + if (anom .eq. "anom") then + if (isatt(diag_script_info, "ref_start") .and. \ + isatt(diag_script_info, "ref_end")) then + ref_start = diag_script_info@ref_start + ref_end = diag_script_info@ref_end + if ((ref_start.lt.start_year) .or. (ref_end.gt.end_year) .or. \ + (ref_end.lt.ref_start)) then + error_msg("f", DIAG_SCRIPT, "", \ + "period for reference years is not properly defined") + end if + else + error_msg("f", DIAG_SCRIPT, "", \ + "period for reference years is not defined " + \ + "(needed for anomaly)") + end if + anom_ref = 0. + + end if + else + anom = "noanom" + end if + + ; ----------------------------------------------- + ; Read pre-calculated multi model statistics + ; ----------------------------------------------- + + input_files = diag_script_info@input_files + "/" + \ + "tsline_mm_" + var0 + "_" + anom + "_stat.nc" + input_files := tostring(input_files) + + ; Filter out non-existing files + input_files := input_files(ind(isfilepresent(input_files))) + + nfiles = dimsizes(input_files) + + ; Loop over files in the list, read and append data + do ii = 0, nfiles - 1 + + log_info("Read in " + input_files(ii)) + + data_temp = ncdf_read(input_files(ii), var0) + + log_info("Experiment: " + data_temp@experiment) + + if(isatt(diag_script_info, "order")) then + ii_n = ind(data_temp@experiment .eq. diag_script_info@order) + if (all(ismissing(ii_n))) then + error_msg("f", DIAG_SCRIPT, "", "Collected experiment " + \ + data_temp@experiment + " does not appear in " + \ + "diag_script_info@order") + elseif (ii_n .gt. nfiles-1) + error_msg("f", DIAG_SCRIPT, "", "diag_script_info@order not " \ + + "well defined (less experiments available)") + end if + else + ii_n = ii + end if + + if (ii .eq. 0) then + time = data_temp&time + ntime = dimsizes(time) + + dim_data = (/nfiles, ntime/) + data_mean = new(dim_data, double) + data_mean!0 = "experiment" + data_mean!1 = "time" + data_mean&experiment = new(nfiles, string, "exp") + data_mean&time = time + data_mean@var = var0 + if (isatt(diag_script_info, "ref")) then + data_ref = new((/1, ntime/), double) + data_ref!0 = "dataset" + data_ref!1 = "time" + data_ref&time = time + end if + data_stat = new((/2, nfiles, ntime/), double) + end if + + ; Match time coordinate + if (time_avg.eq."monthly") then + date = cd_calendar(data_temp&time, -1) + elseif (time_avg.eq."yearly") then + date = data_temp&time + end if + idx1 = ind(date(0).eq.data_mean&time) + idx2 = ind(date(dimsizes(date) - 1).eq.data_mean&time) + delete(date) + + data_mean(ii_n, idx1:idx2) = (/data_temp(:, 0)/) + data_mean&experiment(ii_n) = data_temp@experiment + + if (ii .eq. 0) then + data_mean@units = data_temp@units + if (isatt(diag_script_info, "ref")) then + data_ref(0, idx1:idx2) = (/data_temp(:, 9)/) + data_ref&dataset(0) = diag_script_info@ref + end if + end if + + ; add statistics to multi-model mean + ; stddev + data_stat(0, ii_n, idx1:idx2) = data_temp(:, 0) - data_temp(:, 1) + data_stat(1, ii_n, idx1:idx2) = data_temp(:, 0) + data_temp(:, 1) + + delete(data_temp) + + end do + + if (obs_unc) then + + A_ref1 = read_data(info_items1[0]) + A_ref2 = read_data(info_items2[0]) + + if (time_avg .eq. "yearly") then + A_ref1 := time_operations(A_ref1, -1, -1, "average", "yearly", True) + A_ref2 := time_operations(A_ref2, -1, -1, "average", "yearly", True) + end if + + data_ref_stderr = new((/2, ntime/), double) + data_ref_stderr!0 = "statistics" + data_ref_stderr!1 = "time" + data_ref_stderr&time = time + + data_ref_stderr(0, :) = (/data_ref(0, :) - A_ref1/) + data_ref_stderr(1, :) = (/data_ref(0, :) + A_ref2/) + + delete(A_ref1) + delete(A_ref2) + + else + + data_ref_stderr = 0 + + end if + + ; ------------------------------------------- + ; NetCDF Output + ; ------------------------------------------- + + out_path = config_user_info@work_dir + system("mkdir -p " + out_path) + out_path1 = out_path + "tsline_collect_" + var0 + ".nc" + data_mean@ncdf = out_path + data_mean@diag_script = DIAG_SCRIPT + data_mean@var = var0 + ncdf_outfile = ncdf_write(data_mean, out_path1) + + if (isatt(diag_script_info, "ref")) then + out_path1 = out_path + "tsline_collect_" + var0 + "_ref.nc" + data_ref@ncdf = out_path + data_ref@diag_script = DIAG_SCRIPT + data_ref@var = var0 + ncdf_outfile_ref = ncdf_write(data_ref, out_path1) + end if + + ; ------------------------------------------- + ; Plotting + ; ------------------------------------------- + + ; Define workstation + outfile = config_user_info@plot_dir + data_mean@var + "_" + \ + str_sub_str(diag_script_info@region, " ", "_") + \ + "_multimodel_" + anom + "_" + start_year + "-" + end_year + wks = gsn_open_wks(file_type, outfile) + + ; Set resources + res = True + xmin = start_year + xmax = end_year + res@trXMinF = xmin + res@trXMaxF = xmax + res@tmXBMode = "Explicit" + if (xmax - xmin.gt.20) then + res@tmXBValues = ispan(xmin, xmax, 10) + res@tmXBLabels = ispan(xmin, xmax, 10) + res@tmXBMinorValues = ispan(xmin, xmax, 5) + else + res@tmXBValues = ispan(xmin, xmax, 5) + res@tmXBLabels = ispan(xmin, xmax, 5) + res@tmXBMinorValues = ispan(xmin, xmax, 1) + end if + if (isatt(diag_script_info, "y_min")) then + res@trYMinF = diag_script_info@y_min + end if + if (isatt(diag_script_info, "y_max")) then + res@trYMaxF = diag_script_info@y_max + end if + + if (xmin .eq. 1850) then + res@tmXBMode = "Manual" + res@tmXBTickSpacingF = 50 + elseif (xmin .eq. 1950) then + res@tmXBMode = "Manual" + res@tmXBTickSpacingF = 20 + end if + + if (diag_script_info@header) then + res@tiMainString = diag_script_info@region + else + res@tiMainString = "" + end if + + res@tiMainString = diag_script_info@region + + if (isatt(variable_info[0], "long_name")) then + if (var0 .eq. "tas") then + varname = "Temperature" + elseif (var0 .eq. "siconc") + varname = "Sea Ice" + else + varname = variable_info[0]@long_name + end if + else + varname = var0 + end if + + if (data_mean@units .eq. "degC") then + units = "~F34~0~F~ C" + else + units = data_mean@units + end if + + if (anom .eq. "anom") then + res@tiYAxisString = varname + " Anomaly" + " (" + units + ")" + else + res@tiYAxisString = varname + " (" + units + ")" + end if + + res0 = True + + if (isatt(diag_script_info, "ref")) then + xy_line_collect(wks, data_mean, data_ref, data_ref_stderr, data_ref&time, \ + data_stat, ref_start, ref_end, res, res0, info_items) + else + xy_line_collect(wks, data_mean, 0, 0, data_mean&time, \ + data_stat, ref_start, ref_end, res, res0, info_items) + end if + + log_info(" wrote " + outfile + "." + file_type) + + ; *************************************** + ; add meta data to plot (for reporting) + ; *************************************** + + if (diag_script_info@region .eq. "Global") then + domain = (/"global"/) + else + domain = (/"reg"/) + end if + + if (anom .eq. "anom") then + statistics = (/"anomaly"/) + else + statistics = (/"mean"/) + end if + + caption = "Multi model mean time series of the " + statistics + \ + " for variable " + varname + ", similar to IPCC AR5, fig. 10.21." + + ; Call provenance logger + log_provenance(ncdf_outfile, \ + outfile + "." + file_type, \ + caption, \ + statistics, \ + domain, \ + "times", \ + (/"bock_lisa"/), \ + (/"flato13ipcc", "jones13jgr"/), \ + metadata_att_as_array(input_file_info, "filename")) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/carbon_cycle/main.ncl b/esmvaltool/diag_scripts/carbon_cycle/main.ncl new file mode 100644 index 0000000000..b0ab4220a5 --- /dev/null +++ b/esmvaltool/diag_scripts/carbon_cycle/main.ncl @@ -0,0 +1,622 @@ +; ############################################################################# +; carbon_cycle/main.ncl +; ############################################################################# +; +; Description: +; Calculates temporal and spatial averages and plots the variable as +; error-bar and seasonal cycle plots. +; +; Required diag_script_info attributes: +; region: the region to be averaged. +; legend_outside: plot legend in a separate page (not for errorbar plot). +; seasonal_cycle_plot: draw seasonal cycle plot [False/True]. +; errorbar_plot: draw errorbar plot [False/True]. +; mean_IAV_plot: draw Mean (x-axis), IAV (y-axis) plot [False/True]. +; evolution_plot: time evolution of a variable comparing obs to +; multi-dataset mean; requires ref_dataset in recipe and +; at least one model and one observation. +; +; Optional diag_script_info attributes: +; sort: sort dataset in alphabetical order. +; anav_month: Conversion of y-axis to PgC/month instead of /year, +; following Anav2013 fig 7, 9 (cycle plot). +; evolution_plot_ref_dataset: reference dataset for evolution_plot. +; evolution_plot_anomaly: makes evolution_plot an anomaly plot. +; evolution_plot_ignore: Datasets to ignore, obs are excluded +; automatically. +; evolution_plot_volcanoes: Turns on/off lines of volcano eruptions. +; evolution_plot_color: Hue of the contours; default: red = 0. +; ensemble_name: Name of ensemble for use in evolution plot legend +; +; For external dataset input: +; obsfile: Filename of external obs to read in. +; obsname: Name of ext dataset to read in. +; ref_varname: Varname in attributes of ext file. +; +; Caveats: +; If CRU is the reference dataset it is important to apply a landmask in +; the preprocessor section of the recipe. +; Regridding is not required since spatial averages are calculated for all +; plots. Performance metrics calculated with perfmetrics_main could be +; inconsistent with the plots produced by this script, since a consistent +; missing value mask is not applied here. However, for variable such NBP, +; GPP, LAI, FGCO2 for which this script is designed, there should be no +; inconsistencies as the missing value mask is similar for all datasets +; and observations and coincides with the land-sea mask. +; +; Modification history: +; 20191209-schlund_manuel: added new provenance tracking. +; 20180816-schlund_manuel: continued porting to v2.0. +; 20180619-wenzel_sabrina: adopted script to v2. +; 20170330-gier_bettina: added external data-read in (e.g. GCP). +; 20170316-gier_bettina: added cdo regridding, landmask for CRU obs. +; 20170308-gier_bettina: added "evolution_plot". +; 20151130-lauer_axel: corrected passing of file type from namelist to +; plotting functions; moved call to write_references +; to beginning of code. +; 20151105-righi_mattia: graphic part moved to external plot_scripts. +; 20151030-righi_mattia: regridding not working on ocean grids, removed +; since it is not necessary for these diagnostics. +; 20151029-righi_mattia: replacing area functions with ESMValTool +; functions. +; 20151021-righi_mattia: pep8 and header standardization. +; 201510??-wenzel_sabrina: adapted to ESMValTool structure. +; 201505??-anav_alessandro: written. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/scatterplot.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + AUTHORS = (/"anav_alessandro", "wenzel_sabrina", "righi_mattia", \ + "schlund_manuel"/) + REFERENCES = (/"anav13jclim"/) + DOMAIN = diag_script_info@region + + ; Variable + VAR0 = variable_info[0] + var0 = VAR0@short_name + DIM_VAR = ListCount(variable_info) + err = False + if (DIM_VAR .eq. 2) then + if (variable_info[1]@short_name .ne. var0) then + err = True + end if + elseif (DIM_VAR .gt. 2) then + err = True + end if + if (err) then + error_msg("w", DIAG_SCRIPT, "", "this diagnostic supports only one " + \ + "variable, processing " + var0) + end if + + ; Input data + INFO0 = select_metadata_by_name(input_file_info, var0) + DATASETS = metadata_att_as_array(INFO0, "dataset") + DIM_DAT = dimsizes(DATASETS) + DIM_DAT_ORIG = DIM_DAT + ALL_FILES = metadata_att_as_array(INFO0, "filename") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + +end + +begin + + ; --------------------------------------------------------------------------- + ; Read recipe and config data + ; --------------------------------------------------------------------------- + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + + ; Check required diag_script_info attributes + req_atts = (/"region", "legend_outside", "seasonal_cycle_plot", \ + "errorbar_plot", "evolution_plot", "mean_IAV_plot"/) + exit_if_missing_atts(diag_script_info, req_atts) + + ; Set region - supports 2D array for lat and strings + if isstring(DOMAIN) then + region = select_region(DOMAIN) + elseif dimsizes(DOMAIN) .eq. 2 then + region = (/DOMAIN(0), DOMAIN(1), \ + 0.0, 360.0/) + region@name = flt2string(DOMAIN(0)) + "_-_" + flt2string(DOMAIN(1)) + region@DM_tag = "DM_reg" + else + error_msg("f", DIAG_SCRIPT, "", DOMAIN + " is an invalid region type") + end if + + ; Set time range + start_year = 9999 + end_year = -9999 + do idat = 0, DIM_DAT - 1 + if (INFO0[idat]@start_year .lt. start_year) then + start_year = INFO0[idat]@start_year + end if + if (INFO0[idat]@end_year .gt. end_year) then + end_year = INFO0[idat]@end_year + end if + end do + all_years = ispan(start_year, end_year, 1) + DIM_TIME = dimsizes(all_years) + + ; Only evolution plot utilizes time ranges greater then 1986-2005 + mean_minyear = 1986 + mean_maxyear = 2005 + minyear_ind = ind(ispan(start_year, end_year, 1) .eq. mean_minyear) + maxyear_ind = ind(ispan(start_year, end_year, 1) .eq. mean_maxyear) + + ; Global arrays for collecting all data + all_data_yearly = new((/DIM_DAT, DIM_TIME, 2/), float) + all_data_yearly!0 = "dataset" + all_data_yearly!1 = "year" + all_data_yearly!2 = "statistic" + all_data_yearly&dataset = DATASETS + all_data_yearly&year = all_years + all_data_yearly&statistic = (/"mean", "stddev"/) + + all_data_monthly = new((/DIM_DAT, 12, 2/), float) + all_data_monthly!0 = "dataset" + all_data_monthly!1 = "month" + all_data_monthly!2 = "statistic" + all_data_monthly&dataset = DATASETS + all_data_monthly&month = ispan(1, 12, 1) + all_data_monthly&statistic = (/"mean", "stddev"/) + + ; --------------------------------------------------------------------------- + ; Read and preprocess data + ; --------------------------------------------------------------------------- + + ; Iterate over datasets and calculate averages + do idat = 0, DIM_DAT - 1 + data = INFO0[idat] + dataset_name = DATASETS(idat) + log_debug("Processing " + dataset_name) + + ; Read data + data_var = read_data(data) + if (typeof(data_var) .eq. "double") then + data_var := dble2flt(data_var) + end if + + ; Set fill value if necessary + if (.not. isatt(data_var, "_FillValue")) then + data_var@_FillValue = 1.0e20 + end if + + ; Check for irregular grids + if (.not. isdim(data_var, "lat") .and. isdim(data_var, "lon")) then + error_msg("f", DIAG_SCRIPT, "", "Irregular grids are not supported") + end if + + ; Set type of area operation (flux vs. non-flux variables) + ; Flux variables need to be integrated and area weighted ("sum"), non-flux + ; variables need to be averaged, area weighted and normalized ("average") + if (data_var@units .eq. "kg m-2 s-1" .and. var0 .ne. "pr") then + sp_opt = "sum" + else + sp_opt = "average" + end if + + ; Cut off Antarctica if necessary (for land-based observations) + if ((isatt(VAR0, "reference_dataset") .and. \ + VAR0@reference_dataset .eq. "CRU")) then + if (dimsizes(data_var&lat) .gt. 1) then + log_info("Masking Antarctica") + data_var(:, {:-60}, :) = data_var@_FillValue + end if + end if + + ; Calculate annual mean + data_yearly = time_operations(data_var, data@start_year, data@end_year, \ + "average", "yearly", True) + + ; Calculate mean annual cycle + data_monthly = time_operations( \ + data_var, \ + max((/mean_minyear, data@start_year/)), \ + min((/mean_maxyear, data@end_year/)), \ + "average", "monthlyclim", True) + + ; Special case for lai: + ; Since datasets have different land covers, all sparse vegetated points + ; must be masked + if (isStrSubset(var0, "lai")) then + data_yearly = where(data_yearly .ge. 0.025, data_yearly, \ + data_yearly@_FillValue) + data_monthly = where(data_monthly .ge. 0.025, data_monthly, \ + data_monthly@_FillValue) + end if + + ; Calculate spatial mean/sum and total global area if necessary + dims = dimsizes(data_yearly) + rank = dimsizes(dims) + if ((rank .gt. 1) .and. .not. any(dims .eq. 1)) then + y_mean = area_operations(data_yearly, region(0), region(1), \ + region(2), region(3), sp_opt, True) + m_mean = area_operations(data_monthly, region(0), region(1), \ + region(2), region(3), sp_opt, True) + area = map_area(data_var&lat, data_var&lon) + else + y_mean = data_yearly + m_mean = data_monthly + if (.not. isatt(data_var, "area")) then + error_msg("f", DIAG_SCRIPT, "", "Attribute 'area' not given for " + \ + "global dataset " + dataset_name) + end if + area = tofloat(data_var@area) + if (sp_opt .eq. "sum") then + y_mean = y_mean * area + m_mean = m_mean * area + end if + end if + + ; Assign to global arrays + idx_1 = ind(min(y_mean&year) .eq. all_data_yearly&year) + idx_2 = ind(max(y_mean&year) .eq. all_data_yearly&year) + all_data_yearly(idat, idx_1:idx_2, 0) = (/y_mean/) + all_data_yearly(idat, idx_1:idx_2, 1) = 0.0 + copy_VarAtts(y_mean, all_data_yearly) + all_data_monthly(idat, :, 0) = (/m_mean/) + all_data_monthly(idat, :, 1) = 0.0 + copy_VarAtts(m_mean, all_data_monthly) + delete([/y_mean, idx_1, idx_2, m_mean, data_var, data_yearly, \ + data_monthly, area/]) + end do + + ; Convert units if appropriate [kg m-2 s-1] --> [PgC y-1] + if ((isatt(VAR0, "plot_units")) .and. \ + (all_data_yearly@units .ne. VAR0@plot_units)) then + if (VAR0@plot_units .eq. "degC") then + all_data_yearly(:, :, 0) = convert_units(all_data_yearly(:, :, 0), \ + VAR0@plot_units) + all_data_monthly(:, :, 0) = convert_units(all_data_monthly(:, :, 0), \ + VAR0@plot_units) + else + all_data_yearly = convert_units(all_data_yearly, \ + VAR0@plot_units) + all_data_monthly = convert_units(all_data_monthly, \ + VAR0@plot_units) + end if + end if + + ; --------------------------------------------------------------------------- + ; Write NETCDF + ; --------------------------------------------------------------------------- + + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + ; Yearly data + new_path = work_dir + "yearly_" + var0 + "_" + DOMAIN + ".nc" + all_data_yearly@var = var0 + all_data_yearly@diag_script = DIAG_SCRIPT + all_data_yearly@ncdf = new_path + ncdf_outfile_yearly = ncdf_write(all_data_yearly, new_path) + + ; Monthly data + new_path = work_dir + "monthly_" + var0 + "_" + DOMAIN + ".nc" + all_data_monthly@var = var0 + all_data_monthly@diag_script = DIAG_SCRIPT + all_data_monthly@ncdf = new_path + ncdf_outfile_monthly = ncdf_write(all_data_monthly, new_path) + + ; --------------------------------------------------------------------------- + ; Plots + ; --------------------------------------------------------------------------- + + ; ------------------------------------------------------------------------- + ; Seasonal cycle plot + ; ------------------------------------------------------------------------- + + if (diag_script_info@seasonal_cycle_plot) then + data_arr = all_data_monthly + data_arr@legend_outside = diag_script_info@legend_outside + + ; Add option for PgC/month y-axis that is used in Anav2013 plots 7 and 9 + if (isatt(diag_script_info, "anav_month") .and. \ + diag_script_info@anav_month) then + data_arr = data_arr / 12.0 + data_arr@units = "PgC/month" + end if + + ; Draw plot + outfile = plot_dir + var0 + "_cycle_" + DOMAIN + wks = gsn_open_wks(file_type, outfile) + wks@legendfile = outfile + "_legend" + data_arr@res_tiYAxisString = var0 + " [" + \ + format_units(data_arr@units) + "]" + data_arr@res_tiMainString = (var0 + " - " + region@name + " (" + \ + mean_minyear + "-" + mean_maxyear + ")") + plot = cycle_plot(wks, data_arr, var0, INFO0) + plot_path = outfile + "." + file_type + log_info("Wrote " + plot_path) + draw(plot) + frame(wks) + + ; Provenance tracking + caption = ("Seasonal cycle plot for " + var0 + " over the period " + \ + start_year + "-" + end_year + ". Similar to Anav et al. " + \ + "(2013), Figure 9.") + statistics = (/"mean"/) + plot_type = "seas" + log_provenance(ncdf_outfile_monthly, plot_path, caption, statistics, \ + DOMAIN, plot_type, AUTHORS, REFERENCES, ALL_FILES) + delete([/plot, wks, data_arr, caption, statistics, plot_type/]) + end if + + ; ------------------------------------------------------------------------- + ; Error-bar plot + ; ------------------------------------------------------------------------- + + if (diag_script_info@errorbar_plot) then + annual_mean = dim_avg_Wrap(all_data_yearly( \ + :, minyear_ind:maxyear_ind, 0)) + annual_std = dim_stddev_Wrap(all_data_yearly( \ + :, minyear_ind:maxyear_ind, 0)) + + ; Sort dataset in alphabetical order (observations at the beginning) + if (isatt(diag_script_info, "sort")) then + projects = metadata_att_as_array(INFO0, "project") + if (diag_script_info@sort) then + pid = sort_alphabetically(annual_mean&dataset, \ + ind(projects .eq. "OBS"), \ + "begin") + annual_mean_s = annual_mean(pid) + annual_mean_s&dataset = annual_mean&dataset(pid) + annual_mean = annual_mean_s + annual_std_s = annual_std(pid) + annual_std_s&dataset = annual_std&dataset(pid) + annual_std = annual_std_s + delete([/annual_mean_s, annual_std_s, pid/]) + end if + end if + + ; Collect data and save it + new_path = work_dir + "errorbar_" + var0 + "_" + DOMAIN + ".nc" + data_arr = new((/2, dimsizes(annual_mean)/), float) + data_arr!0 = "statistic" + data_arr!1 = "dataset" + data_arr&statistic = (/"mean", "standard deviation"/) + data_arr&dataset = annual_mean&dataset + data_arr(0, :) = (/annual_mean/) + data_arr(1, :) = (/annual_std/) + data_arr@units = annual_mean@units + data_arr@var = var0 + data_arr@diag_script = DIAG_SCRIPT + data_arr@ncdf = new_path + ncdf_outfile_errorbar = ncdf_write(data_arr, new_path) + delete([/annual_mean, annual_std/]) + + ; Draw plot + outfile = plot_dir + var0 + "_errorbar_" + DOMAIN + wks = gsn_open_wks(file_type, outfile) + data_arr@res_tiMainString = (var0 + " - " + region@name + " (" + \ + mean_minyear + "-" + mean_maxyear + ")") + plot = errorbar_plot(wks, data_arr, var0) + plot_path = outfile + "." + file_type + log_info("Wrote " + plot_path) + draw(plot) + frame(wks) + + ; Provenance tracking + caption = ("Errorbar plot for " + var0 + " over the period " + \ + start_year + "-" + end_year + ". Similar to Anav et al. " + \ + "(2013), Figure 6.") + statistics = (/"mean", "stddev"/) + plot_type = "errorbar" + log_provenance(ncdf_outfile_errorbar, plot_path, caption, statistics, \ + DOMAIN, plot_type, AUTHORS, REFERENCES, ALL_FILES) + delete([/plot, wks, data_arr, caption, statistics, plot_type/]) + end if + + ; ------------------------------------------------------------------------- + ; Mean and IAV plot + ; ------------------------------------------------------------------------- + + if (diag_script_info@mean_IAV_plot) then + + ; Prepare data + annual_mean = new((/DIM_DAT/), float) + annual_std = new((/DIM_DAT/), float) + annual_mean@_FillValue = all_data_yearly@_FillValue + annual_std@_FillValue = all_data_yearly@_FillValue + annual_mean@units = all_data_yearly@units + annual_std@units = all_data_yearly@units + + ; Loop over datasets and compute means and standard deviations + do idat = 0, DIM_DAT - 1 + annual_mean(idat) = dim_avg( \ + all_data_yearly(idat, minyear_ind:maxyear_ind, 0)) + annual_std(idat) = dim_stddev( \ + all_data_yearly(idat, minyear_ind:maxyear_ind, 0)) + end do + + ; Collect data and save it + new_path = work_dir + "mean_iav_" + var0 + "_" + DOMAIN + ".nc" + data_arr = new((/2, DIM_DAT/), float) + data_arr!0 = "statistic" + data_arr!1 = "dataset" + data_arr&statistic = (/"mean", "interannual variability"/) + data_arr&dataset = DATASETS + data_arr(0, :) = (/annual_mean/) + data_arr(1, :) = (/annual_std/) + data_arr@units = (/annual_mean@units, annual_std@units/) + data_arr@legend_outside = diag_script_info@legend_outside + data_arr@var = var0 + data_arr@diag_script = DIAG_SCRIPT + data_arr@ncdf = new_path + ncdf_outfile_mean_iav = ncdf_write(data_arr, new_path) + delete([/annual_mean, annual_std/]) + + ; Draw plot + outfile = plot_dir + var0 + "_scatter_" + DOMAIN + wks = gsn_open_wks(file_type, outfile) + wks@legendfile = outfile + "_legend" + data_arr@res_tiMainString = (var0 + " - " + region@name + " (" + \ + mean_minyear + "-" + mean_maxyear + ")") + plot = scatterplot_markers(wks, data_arr, var0, INFO0) + plot_path = outfile + "." + file_type + log_info("Wrote " + plot_path) + draw(plot) + frame(wks) + + ; Provenance tracking + caption = ("Scatterplot for " + var0 + " over the period " + \ + start_year + "-" + end_year + ". Similar to Anav et al. " + \ + "(2013), Figure 16.") + statistics = (/"mean", "stddev", "var"/) + plot_type = "scatter" + log_provenance(ncdf_outfile_mean_iav, plot_path, caption, statistics, \ + DOMAIN, plot_type, AUTHORS, REFERENCES, ALL_FILES) + delete([/plot, wks, data_arr, caption, statistics, plot_type/]) + end if + + ; ------------------------------------------------------------------------- + ; Time evolution plot + ; ------------------------------------------------------------------------- + + if (diag_script_info@evolution_plot) then + + ; Check if it's supposed to be an anomaly plot + anomaly = False + if (isatt(diag_script_info, "evolution_plot_anomaly")) then + if (diag_script_info@evolution_plot_anomaly) then + anomaly = True + end if + end if + + ; Get index of reference dataset + obs_ind = ind(DATASETS .eq. \ + diag_script_info@evolution_plot_ref_dataset) + + ; Set list of observations/input_file_info to exclude in the statistics + exclude_data = ind(metadata_att_as_array(INFO0, "project") .eq. "OBS") + if (isatt(diag_script_info, "evolution_plot_ignore")) then + exclude_data := array_append_record( \ + exclude_data, ind(DATASETS .eq. \ + diag_script_info@evolution_plot_ignore), 0) + end if + + ; To not get a warning about fillvalues from get1Dindex_Exclude + ind_dats = ispan(0, dimsizes(DATASETS) - 1, 1) + ind_dats@_FillValue = default_fillvalue("integer") + model_ind = get1Dindex_Exclude(ind_dats, exclude_data) + delete([/exclude_data, ind_dats/]) + + ; data_arr = all_data_yearly -> all_data_yearly: dataset, year, std = 0 + data_arr = new((/6, dimsizes(all_data_yearly&year)/), float) + total_mean = dim_avg_n(all_data_yearly(model_ind, :, 0), 0) + total_std = new((/dimsizes(total_mean)/), float) + data_arr@ref_name = diag_script_info@evolution_plot_ref_dataset + data_arr@units = all_data_yearly@units + data_arr@dim_Mod = dimsizes(model_ind) + data_arr@project = diag_script_info@ensemble_name + + ; Subtract mean 1901-1930 (coded as "until 1930") if anomaly plot + ind_1930 = ind(all_data_yearly&year .eq. 1930) + if (anomaly) then + data_arr(0, :) = (/all_data_yearly(obs_ind, :, 0)/) - \ + avg(all_data_yearly(obs_ind, :ind_1930, 0)) + ref_mean = dim_avg_n(all_data_yearly(model_ind, :ind_1930, 0), 1) + data_arr(1, :) = (/total_mean/) - avg(ref_mean) + do time_i = 0, dimsizes(all_data_yearly&year) - 1 + data_arr(2, time_i) = dim_min_n( \ + all_data_yearly(model_ind, time_i, 0) - ref_mean, 0) + data_arr(3, time_i) = dim_max_n( \ + all_data_yearly(model_ind, time_i, 0) - ref_mean, 0) + total_std(time_i) = dim_stddev_n( \ + all_data_yearly(model_ind, time_i, 0) - ref_mean, 0) + end do + else + data_arr(0, :) = (/all_data_yearly(obs_ind, :, 0)/) + data_arr(1, :) = (/total_mean/) + data_arr(2, :) = dim_min_n(all_data_yearly(model_ind, :, 0), 0) + data_arr(3, :) = dim_max_n(all_data_yearly(model_ind, :, 0), 0) + total_std = dim_stddev_n(all_data_yearly(model_ind, :, 0), 0) + end if + data_arr(4, :) = (/all_data_yearly&year/) + data_arr(5, :) = (/total_std/) + + ; Get contour color + if (isatt(diag_script_info, "evolution_plot_color")) then + data_arr@contour_color = diag_script_info@evolution_plot_color + end if + + ; Draw plot + outfile = plot_dir + var0 + "_evolution_" + DOMAIN + wks = gsn_open_wks(file_type, outfile) + wks@legendfile = outfile + "_legend" + plot1 = evolution_base_plot(wks, data_arr, var0, anomaly) + + ; After overlaying the plots add titles and legend manually + wks_out = get_wks(wks, DIAG_SCRIPT, var0) + res_lines = True ; polyline mods desired + res_lines@gsLineColor = "black" + res_lines@gsLineDashPattern = 16 + res_lines@gsLineThicknessF = 3.0 + if (.not. anomaly) then + plot1@$unique_string("dum")$ = gsn_add_polyline(wks_out, plot1, \ + (/start_year, \ + end_year/), \ + (/0.0, 0.0/), \ + res_lines) + end if + + res = True + res@gsnDraw = False + res@gsnFrame = False + res@gsnMaximize = True + res@gsnPaperOrientation = "landscape" + res@xyLineThicknesses = (/2, 2/) + res@xyLineColors = (/"black", "white"/) + res@xyDashPatterns = (/0.0, 0.0/) ; make all lines solid + + plot3 = gsn_csm_xy(wks_out, data_arr(4, :), data_arr(:1, :), res) + + NhlAddOverlay(plot1, plot3, plot1) + draw(plot1) + frame(wks) + plot_path = outfile + "." + file_type + log_info("Wrote " + plot_path) + + ; Provenance tracking + if (anomaly) then + tagstr = " (anomalies)" + statistics = (/"mean", "stddev", "anomaly"/) + else + tagstr = "" + statistics = (/"mean", "stddev"/) + end if + caption = ("Time series of " + var0 + tagstr + ". Similar to Anav " + \ + "et al. (2013), Figure 1 (top).") + plot_type = "times" + log_provenance(ncdf_outfile_yearly, plot_path, caption, statistics, \ + DOMAIN, plot_type, AUTHORS, REFERENCES, ALL_FILES) + delete([/data_arr, caption, statistics, plot_type/]) + end if + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/carbon_cycle/mvi.ncl b/esmvaltool/diag_scripts/carbon_cycle/mvi.ncl new file mode 100644 index 0000000000..812ca16e8e --- /dev/null +++ b/esmvaltool/diag_scripts/carbon_cycle/mvi.ncl @@ -0,0 +1,534 @@ +; ############################################################################# +; carbon_cycle/mvi.ncl +; ############################################################################# +; +; Description: +; Calculates the model variability index (MVI), interannual variability +; (IAV) and mean, and draws them in a 3D scatter plot. +; +; Required diag_script_info attributes: +; region: region to be averaged. +; reference_dataset: reference for the MVI calculation. +; +; Optional diag_script_info attributes: +; mean_time_range: time period over which the mean is calculated (default: +; whole time span). +; trend_time_range: time period over which the trend is calculated +; (default: whole time span). +; mvi_time_range: time period over which the MVI is calculated (default: +; whole time span). +; stddev_threshold: ignore normalized (by mean) standard deviations smaller +; than this to avoid numerical problems in the MVI +; calculation (default: 1e-2). +; mask_below: mask input data with a normalized (by mean) absolute value +; below a threshold to avoid numerical problems in the MVI +; calculation. +; +; Caveats: +; If CRU is the reference dataset it is important to apply a landmask in +; the preprocessor section of the recipe. +; +; Modification history: +; 20191209-schlund_manuel: added new provenance tracking. +; 20180814-schlund_manuel: continued porting to v2.0. +; 20180619-wenzel_sabrina: adopted script to v2. +; 20170316-gier_bettina: Added cdo regridding, landmask for CRU obs. +; 20151130-lauer_axel: moved call to write_references to beginning of code. +; 20151104-righi_mattia: graphic part moved to external plot_scripts. +; 20151102-righi_mattia: replacing area functions with ESMValTool +; functions. +; 201507??-wenzel_sabrina: adapted to ESMValTool structure. +; 201505??-anav_alessandro: written. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/scatterplot.ncl" +load "$diag_scripts/shared/plot/style.ncl" + + +begin + + enter_msg(DIAG_SCRIPT, "") + DOMAIN = diag_script_info@region + + ; Variable + VAR0 = variable_info[0] + var0 = VAR0@short_name + DIM_VAR = ListCount(variable_info) + if (DIM_VAR .gt. 1) then + error_msg("w", DIAG_SCRIPT, "", "this diagnostic supports only one " + \ + "variable, processing " + var0) + end if + + ; Input data + INFO0 = select_metadata_by_name(input_file_info, var0) + DATASETS = metadata_att_as_array(INFO0, "dataset") + DIM_DAT = dimsizes(DATASETS) + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + +end + +begin + + ; --------------------------------------------------------------------------- + ; Read recipe and config data + ; --------------------------------------------------------------------------- + + ; Settings to avoid numerical problems + if (isatt(diag_script_info, "stddev_threshold")) then + TOLERANCE = diag_script_info@stddev_threshold + else + TOLERANCE = 1e-2 + end if + if (isatt(diag_script_info, "mask_below")) then + MASK = True + MASK_VALUE = diag_script_info@mask_below + else + MASK = False + MASK_VALUE = 0 + end if + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + + ; Check required diag_script_info attributes + req_atts = (/"region"/) + exit_if_missing_atts(diag_script_info, req_atts) + delete(req_atts) + + ; Set region + region = select_region(DOMAIN) + + ; Set time range + start_year = 9999 + end_year = -9999 + do idat = 0, DIM_DAT - 1 + if (INFO0[idat]@start_year .lt. start_year) then + start_year = INFO0[idat]@start_year + end if + if (INFO0[idat]@end_year .gt. end_year) then + end_year = INFO0[idat]@end_year + end if + end do + all_years = ispan(start_year, end_year, 1) + DIM_TIME = dimsizes(all_years) + + ; Specific time range for mean + if (isatt(diag_script_info, "mean_time_range")) then + custom_mean_time_range = True + mean_time_range = diag_script_info@mean_time_range + mean_time_ind = (/ind(all_years .eq. mean_time_range(0)), \ + ind(all_years .eq. mean_time_range(1))/) + log_info("Restricting mean calculation to time period " + \ + mean_time_range(0) + "-" + mean_time_range(1)) + else + custom_mean_time_range = False + end if + + ; Specific time ranges for trend + if (isatt(diag_script_info, "trend_time_range")) then + custom_trend_time_range = True + trend_time_range = diag_script_info@trend_time_range + trend_time_ind = (/ind(all_years .eq. trend_time_range(0)), \ + ind(all_years .eq. trend_time_range(1))/) + log_info("Restricting trend calculation to time period " + \ + trend_time_range(0) + "-" + trend_time_range(1)) + else + custom_trend_time_range = False + end if + + ; Specific time ranges for MVI + if (isatt(diag_script_info, "mvi_time_range")) then + custom_mvi_time_range = True + mvi_time_range = diag_script_info@mvi_time_range + mvi_time_ind = (/ind(all_years .eq. mvi_time_range(0)), \ + ind(all_years .eq. mvi_time_range(1))/) + log_info("Restricting mvi calculation to time period " + \ + mvi_time_range(0) + "-" + mvi_time_range(1)) + else + custom_mvi_time_range = False + end if + + ; --------------------------------------------------------------------------- + ; Read and preprocess data + ; --------------------------------------------------------------------------- + + ; Calculate yearly average + do idat = 0, DIM_DAT - 1 + data = INFO0[idat] + dataset_name = DATASETS(idat) + log_debug("Processing " + dataset_name) + + ; Read data + data_var = read_data(data) + if (typeof(data_var) .eq. "double") then + data_var := dble2flt(data_var) + end if + + ; Set fill value if necessary + if (.not. isatt(data_var, "_FillValue")) then + data_var@_FillValue = 1.0e20 + end if + + ; Mask data if desired + if (MASK) then + log_info("Masking data with normalized absolute values smaller than " + \ + MASK_VALUE) + norm_data = new(dimsizes(data_var), typeof(data_var)) + norm_data = abs(data_var / avg(abs(data_var))) + data_var = where(norm_data .le. MASK_VALUE, data_var@_FillValue, \ + data_var) + delete(norm_data) + end if + + ; Get dimensions + if (isdim(data_var, "lat") .and. isdim(data_var, "lon")) then + DIM_LAT = dimsizes(data_var&lat) + DIM_LON = dimsizes(data_var&lon) + else + error_msg("f", DIAG_SCRIPT, "", "Irregular grids are not supported") + end if + + ; Cut off Antarctica if necessary (for land-based observations) + if (isatt(VAR0, "reference_dataset") .and. \ + any(VAR0@reference_dataset .eq. (/"CRU", "MTE"/))) then + if (dimsizes(data_var&lat) .gt. 1) then + log_info("Masking Antarctica") + data_var(:, {:-60}, :) = data_var@_FillValue + end if + end if + + ; Computing annual mean + data_annual = time_operations(data_var, data@start_year, data@end_year, \ + "average", "yearly", True) + + ; Collect data in global array + if (.not. isvar("all_data_yearly")) then + all_data_yearly = \ + new((/DIM_DAT, DIM_TIME, DIM_LAT, DIM_LON/), float) + all_data_yearly!0 = "dataset" + all_data_yearly!1 = "year" + all_data_yearly!2 = "lat" + all_data_yearly!3 = "lon" + all_data_yearly&dataset = DATASETS + all_data_yearly&year = all_years + all_data_yearly&lat = data_var&lat + all_data_yearly&lon = data_var&lon + all_data_yearly@_FillValue = data_var@_FillValue + end if + + ; Check lat and lon dimensions + if (DIM_LAT .ne. dimsizes(all_data_yearly&lat) .or. \ + DIM_LON .ne. dimsizes(all_data_yearly&lon)) then + error_msg("f", DIAG_SCRIPT, "", "Not all datasets are on the same " + \ + "grid (" + dataset_name + "), select 'regrid' in " + \ + "preprocessor options") + end if + + ; Save data at correct place + idx_1 = data@start_year - start_year + idx_2 = dimsizes(all_years) - (end_year - data@end_year) - 1 + all_data_yearly(idat, idx_1:idx_2, :, :) = data_annual + copy_VarAtts(data_annual, all_data_yearly) + copy_VarCoords(data_annual, all_data_yearly(idat, :, :, :)) + delete([/data_var, data_annual, idx_1, idx_2/]) + end do + + ; --------------------------------------------------------------------------- + ; Compute means, trends and model variability indices (MVI) globally + ; --------------------------------------------------------------------------- + + ; MVI array + all_data_mvi = new((/DIM_DAT, DIM_LAT, DIM_LON/), float) + all_data_mvi!0 = "dataset" + all_data_mvi!1 = "lat" + all_data_mvi!2 = "lon" + all_data_mvi&dataset = DATASETS + all_data_mvi&lat = all_data_yearly&lat + all_data_mvi&lon = all_data_yearly&lon + all_data_mvi@_FillValue = all_data_yearly@_FillValue + + ; Trend array + all_data_trend = new((/DIM_DAT, DIM_LAT, DIM_LON/), float) + all_data_trend!0 = "dataset" + all_data_trend!1 = "lat" + all_data_trend!2 = "lon" + all_data_trend&dataset = DATASETS + all_data_trend&lat = all_data_yearly&lat + all_data_trend&lon = all_data_yearly&lon + all_data_trend@_FillValue = all_data_yearly@_FillValue + + ; Determine reference dataset + ref_idx = ind(DATASETS .eq. VAR0@reference_dataset) + ref_idx := ref_idx(0) + if (ismissing(ref_idx)) then + error_msg("f", DIAG_SCRIPT, "", "no adequate reference dataset provided") + end if + log_info("Reference dataset: " + DATASETS(ref_idx)) + + ; Restrict time range of reference dataset if desired + if (custom_mvi_time_range) then + data_ref = all_data_yearly(ref_idx, mvi_time_ind(0):mvi_time_ind(1), \ + :, :) + else + data_ref = all_data_yearly(ref_idx, :, :, :) + end if + ref_stddev = dim_stddev_n(data_ref, 0) + + ; Normalize std_dev with mean to avoid numerical problems and remove very + ; small values + norm = avg(ref_stddev) + ref_stddev = ref_stddev / norm + ref_stddev = where(ref_stddev .le. TOLERANCE, ref_stddev@_FillValue, \ + ref_stddev) + + ; Loop over datasets + do idat = 0, DIM_DAT - 1 + + ; Datasets for MVI (restrict time range if desired) + if (custom_mvi_time_range) then + data_temp = all_data_yearly(idat, mvi_time_ind(0):mvi_time_ind(1), \ + :, :) + else + data_temp = all_data_yearly(idat, :, :, :) + end if + dat_stddev = dim_stddev_n(data_temp, 0) + dat_stddev = where(ismissing(ref_stddev), dat_stddev@_FillValue, \ + dat_stddev) + dat_stddev = dat_stddev / norm + dat_stddev = where(dat_stddev .le. TOLERANCE, dat_stddev@_FillValue, \ + dat_stddev) + + ; Compute MVI + all_data_mvi(idat, :, :) = (dat_stddev / ref_stddev - \ + ref_stddev / dat_stddev) ^ 2 + delete([/dat_stddev, data_temp/]) + + ; Compute trend (restrict time range if desired) + if (custom_trend_time_range) then + dataset = all_data_yearly(idat, \ + trend_time_ind(0):trend_time_ind(1), :, :) + rc = regCoef_n( \ + 1.0 * ispan(trend_time_range(0), trend_time_range(1), 1), \ + dataset, 0, 0) + else + dataset = all_data_yearly(idat, :, :, :) + rc = regCoef_n(1.0 * all_years, dataset, 0, 0) + end if + rc!0 = "lat" + rc!1 = "lon" + rc&lat = dataset&lat + rc&lon = dataset&lon + all_data_trend(idat, :, :) = rc + delete([/dataset, rc/]) + end do + delete([/ref_stddev, norm/]) + copy_VarAtts(all_data_yearly, all_data_trend) + + ; Special case for lai: + ; Since datasets have different land covers, all sparse vegetated points must + ; be masked + if (isStrSubset(var0, "lai")) then + all_data_yearly = where(all_data_yearly .ge. 0.025, \ + all_data_yearly, all_data_yearly@_FillValue) + end if + + ; Compute total temporal mean + if (custom_mean_time_range) then + all_data_mean = dim_avg_n_Wrap(all_data_yearly(\ + :, mean_time_ind(0):mean_time_ind(1), :, :), 1) + else + all_data_mean = dim_avg_n_Wrap(all_data_yearly, 1) + end if + all_data_mean!0 = "dataset" + all_data_mean!1 = "lat" + all_data_mean!2 = "lon" + all_data_mean&dataset = DATASETS + all_data_mean&lat = all_data_yearly&lat + all_data_mean&lon = all_data_yearly&lon + all_data_mean@units = all_data_yearly@units + delete(all_data_yearly) + + ; --------------------------------------------------------------------------- + ; Compute means, trends and MVI regionally averaged + ; --------------------------------------------------------------------------- + + ; Setup arrays + regional_mean = new((/DIM_DAT/), typeof(all_data_mean)) + regional_mean!0 = "dataset" + regional_mean&dataset = DATASETS + regional_trend = new((/DIM_DAT/), typeof(all_data_trend)) + regional_trend!0 = "dataset" + regional_trend&dataset = DATASETS + regional_mvi = new((/DIM_DAT/), typeof(all_data_mvi)) + regional_mvi!0 = "dataset" + regional_mvi&dataset = DATASETS + + ; Set type of area operation (flux vs. non-flux variables) + ; Flux variables need to be integrated and area weighted ("sum"), non-flux + ; variables need to be averaged, area weighted and normalized ("average") + if (all_data_mean@units .eq. "kg m-2 s-1" .and. \ + var0 .ne. "pr") then + sp_opt = "sum" + else + sp_opt = "average" + end if + + ; Regional averages/sums for every dataset + do idat = 0, DIM_DAT - 1 + regional_mean(idat) = area_operations(all_data_mean(idat, :, :), \ + region(0), region(1), region(2), \ + region(3), sp_opt, True) + regional_trend(idat) = area_operations(all_data_trend(idat, :, :), \ + region(0), region(1), region(2), \ + region(3), sp_opt, True) + regional_mvi(idat) = area_operations(all_data_mvi(idat, :, :), \ + region(0), region(1), region(2), \ + region(3), "average", True) + end do + + ; Diagnostic- and variable-specific units conversions + regional_mvi@units = "1" + trend_tmp = regional_trend + if ((isatt(VAR0, "plot_units")) .and. \ + (regional_mean@units .ne. VAR0@plot_units)) then + new_units = VAR0@plot_units + regional_mean = convert_units(regional_mean, new_units) + if (new_units .eq. "degC") then + trend_tmp@units = "degC" + else + trend_tmp = convert_units(regional_trend, new_units) + end if + end if + + ; Units conversion for trend + if (INFO0[0]@mip .eq. "Lmon") then + regional_trend = trend_tmp + temp_unit = "yr-1" + else + regional_trend = trend_tmp * 10.0 + temp_unit = "decade-1" + end if + if (trend_tmp@units .ne. "1") then + regional_trend@units = trend_tmp@units + " " + temp_unit + else + regional_trend@units = temp_unit + end if + delete([/trend_tmp, temp_unit/]) + + ; --------------------------------------------------------------------------- + ; Write NETCDF + ; --------------------------------------------------------------------------- + + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + ; Mean + new_path = work_dir + "mean_" + var0 + "_" + DOMAIN + ".nc" + regional_mean@var = var0 + regional_mean@diag_script = DIAG_SCRIPT + regional_mean@ncdf = new_path + ncdf_outfile_mean = ncdf_write(regional_mean, new_path) + + ; Trend + new_path = work_dir + "trend_" + var0 + "_" + DOMAIN + ".nc" + regional_trend@var = var0 + regional_trend@diag_script = DIAG_SCRIPT + regional_trend@ncdf = new_path + ncdf_outfile_trend = ncdf_write(regional_trend, new_path) + + ; MVI + new_path = work_dir + "MVI_" + var0 + "_" + DOMAIN + ".nc" + regional_mvi@var = var0 + regional_mvi@diag_script = DIAG_SCRIPT + regional_mvi@ncdf = new_path + ncdf_outfile_mvi = ncdf_write(regional_mvi, new_path) + + ; --------------------------------------------------------------------------- + ; Plots + ; --------------------------------------------------------------------------- + + ; Set plot output file + outfile = plot_dir + var0 + "_" + DOMAIN + wks = gsn_open_wks(file_type, outfile) + gsn_define_colormap(wks, "BlAqGrYeOrReVi200") + gsn_reverse_colormap(wks) + + ; Axis labels + if (custom_mean_time_range) then + mean_label = mean_time_range(0) + "-" + mean_time_range(1) + " Mean" + else + mean_label = start_year + "-" + end_year + " Mean" + end if + if (custom_trend_time_range) then + trend_label = trend_time_range(0) + "-" + trend_time_range(1) + \ + " Linear trend" + else + trend_label = start_year + "-" + end_year + " Linear trend" + end if + if (custom_mvi_time_range) then + mvi_label = mvi_time_range(0) + "-" + mvi_time_range(1) + "~C~MVI" + else + mvi_label = start_year + "-" + end_year + "~C~MVI" + end if + + ; Pack data + data_arr = new((/3, dimsizes(regional_mean)/), float) + data_arr(0, :) = (/regional_mean/) + data_arr(1, :) = (/regional_trend/) + data_arr(2, :) = (/regional_mvi/) + data_arr!0 = "statistic" + data_arr!1 = "datasets" + data_arr&statistic = (/mean_label, trend_label, mvi_label/) + data_arr&datasets = DATASETS + data_arr@units = (/regional_mean@units, regional_trend@units, "1"/) + + ; Set levels for the color coding + tmp = regional_mvi + tmp(ref_idx) = tmp@_FillValue + nlevs = 11 + tmp := nice_mnmxintvl(min(tmp) - 0.01, max(tmp) + 0.01, nlevs, True) + levs = decimalPlaces(fspan(tmp(0), tmp(1), 11), 2, True) + delete(tmp) + + ; Draw a 3D scatterplot + data_arr@res_tiMainString = var0 + " - " + region@name + plot = scatterplot3D(wks, data_arr, var0, levs) + plot_path = outfile + "." + file_type + log_info("Wrote " + plot_path) + draw(plot) + frame(wks) + + ; Provenance tracking + caption = ("Scatterplot for multiyear average " + var0 + " in x " + \ + "axis, its linear trend in y axis, and MVI. Similar to " + \ + "Anav et al. (2013) Figure 1 (bottom).") + statistics = (/"mean", "trend", "mvi"/) + authors = (/"anav_alessandro", "wenzel_sabrina", "righi_mattia", \ + "schlund_manuel"/) + plot_type = "scatter" + references = (/"anav13jclim"/) + ancestors = metadata_att_as_array(INFO0, "filename") + log_provenance(ncdf_outfile_mvi, plot_path, caption, statistics, \ + DOMAIN, plot_type, authors, references, ancestors) + delete(data_arr) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/carbon_cycle/two_variables.ncl b/esmvaltool/diag_scripts/carbon_cycle/two_variables.ncl new file mode 100644 index 0000000000..7c6642b48b --- /dev/null +++ b/esmvaltool/diag_scripts/carbon_cycle/two_variables.ncl @@ -0,0 +1,253 @@ +; ############################################################################# +; carbon_cycle/two_variables.ncl +; ############################################################################# +; +; Description: +; Draw a scatter plot with two variables. +; +; Required diag_script_info attributes: +; region: the region to be averaged. +; +; Modification history: +; 20191209-schlund_manuel: added new provenance tracking. +; 20180823-schlund_manuel: continued porting to v2.0. +; 20180619-wenzel_sabrina: adopted script to v2. +; 20170511-gier_bettina: added metadata to files. +; 20151130-lauer_axel: corrected passing of file type from namelist to +; plotting functions; moved call to write_references +; to beginning of code. +; 20151105-righi_mattia: graphic part moved to external plot_scripts. +; 20151104-righi_mattia: removing regridding, not necessary for this +; diagnostic. +; 201507??-wenzel_sabrina: adapted to ESMValTool structure. +; 201505??-anav_alessandro: written. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/scatterplot.ncl" +load "$diag_scripts/shared/plot/style.ncl" + + +begin + + enter_msg(DIAG_SCRIPT, "") + DOMAIN = diag_script_info@region + + ; Variable + DIM_VAR = ListCount(variable_info) + if (DIM_VAR .ne. 2) then + error_msg("f", DIAG_SCRIPT, "", "this diagnostic expects exactly two " + \ + "variables") + end if + var = new(DIM_VAR, string) + var(0) = variable_info[0]@short_name + var(1) = variable_info[1]@short_name + + ; Input data + info0 = select_metadata_by_name(input_file_info, var(0)) + info1 = select_metadata_by_name(input_file_info, var(1)) + projects0 = metadata_att_as_array(info0, "project") + projects1 = metadata_att_as_array(info1, "project") + DATASETS = NewList("lifo") + ListAppend(DATASETS, metadata_att_as_array(info0, "dataset")) + ListAppend(DATASETS, metadata_att_as_array(info1, "dataset")) + if any(DATASETS[0](ind(projects0 .ne. "OBS")) .ne. \ + DATASETS[1](ind(projects1 .ne. "OBS"))) then + error_msg("f", DIAG_SCRIPT, "", "the two variables do not cover the " + \ + "same datasets, this is not supported yet") + end if + if any(projects0 .ne. projects1) then + error_msg("f", DIAG_SCRIPT, "", "the two variables do not include the " + \ + "same number of observations") + end if + DIM_DAT = dimsizes(DATASETS[0]) + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (vars: " + var(0) + ", " + var(1) + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; --------------------------------------------------------------------------- + ; Read recipe and config data + ; --------------------------------------------------------------------------- + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + + ; Check required diag_script_info attributes + req_atts = (/"region"/) + exit_if_missing_atts(diag_script_info, req_atts) + delete(req_atts) + if (.not. isatt(diag_script_info, "legend_outside")) then + diag_script_info@legend_outside = False + end if + + ; Set region + region = select_region(DOMAIN) + + ; --------------------------------------------------------------------------- + ; Read and preprocess data + ; --------------------------------------------------------------------------- + + ; Loop over all variables and datasets + all_data = NewList("lifo") + do ivar = 0, DIM_VAR - 1 + INFO = select_metadata_by_name(input_file_info, var(ivar)) + + ; Array + ListAppend(all_data, new((/DIM_DAT/), float)) + all_data[ivar]!0 = "dataset" + all_data[ivar]&dataset = DATASETS[ivar] + + ; Time range + start_year = 9999 + end_year = -9999 + do idat = 0, DIM_DAT - 1 + if (INFO[idat]@start_year .lt. start_year) then + start_year = INFO[idat]@start_year + end if + if (INFO[idat]@end_year .gt. end_year) then + end_year = INFO[idat]@end_year + end if + end do + + ; Read data + do idat = 0, DIM_DAT - 1 + data = INFO[idat] + log_debug("Processing " + var(ivar) + " of " + DATASETS[ivar](idat)) + + ; Read variable + var_data = read_data(data) + + ; Compute annual mean + tmp = time_operations(var_data, data@start_year, data@end_year, \ + "average", "annualclim", True) + + ; Compute area-weighted sum + area_op = area_operations(tmp, region(0), region(1), region(2), \ + region(3), "sum", True) + + ; Assign to global array + all_data[ivar](idat) = area_op + delete([/var_data, tmp, area_op/]) + end do + + ; Convert units if appropriate + if ((isatt(variable_info[ivar], "plot_units")) .and. \ + (all_data[ivar]@units .ne. variable_info[ivar]@plot_units)) then + all_data[ivar] = convert_units(all_data[ivar], \ + variable_info[ivar]@plot_units) + end if + end do + + ; --------------------------------------------------------------------------- + ; Write NETCDF + ; --------------------------------------------------------------------------- + + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + ; Save individual variables + do ivar = 0, DIM_VAR - 1 + new_path = work_dir + var(ivar) + "_" + DOMAIN + ".nc" + all_data[ivar]@var = var(ivar) + all_data[ivar]@diag_script = DIAG_SCRIPT + all_data[ivar]@ncdf = new_path + ncdf_outfile = ncdf_write(all_data[ivar], new_path) + delete(ncdf_outfile) + end do + + ; Save both variables + new_path = work_dir + var(0) + "_" + var(1) + "_" + DOMAIN + ".nc" + both_vars = new((/DIM_VAR, DIM_DAT/), typeof(all_data[0])) + do ivar = 0, DIM_VAR - 1 + both_vars(ivar, :) = all_data[ivar](:) + end do + both_vars!0 = "variable" + both_vars!1 = "dataset" + both_vars&variable = var + both_vars&dataset = DATASETS[0] + both_vars&dataset(ind(projects0 .eq. "OBS")) = "OBS" + both_vars@var = "scatter" + both_vars@diag_script = DIAG_SCRIPT + both_vars@ncdf = new_path + ncdf_outfile_both_vars = ncdf_write(both_vars, new_path) + + ; --------------------------------------------------------------------------- + ; Plots + ; --------------------------------------------------------------------------- + + data_arr = new((/DIM_VAR, DIM_DAT(0)/), float) + + ; Collect data + do ivar = 0, DIM_VAR - 1 + ref_idx = ind(DATASETS[ivar] .eq. variable_info[ivar]@reference_dataset) + ref_idx := ref_idx(0) + if (ismissing(ref_idx)) then + error_msg("f", DIAG_SCRIPT, "", "no adequate reference dataset " + \ + "for " + var(ivar) + " provided") + end if + log_info("Reference dataset for " + var(ivar) + ": " + \ + DATASETS[ivar](ref_idx)) + + ; Replace FillValue in the dataset array with the value of the reference + all_data[ivar] = \ + where(ismissing(all_data[ivar]), all_data[ivar](ref_idx), \ + all_data[ivar]) + data_arr(ivar, :) = (/all_data[ivar]/) + end do + + ; Setup metadata of output array + data_arr!0 = "statistic" + data_arr!1 = "dataset" + data_arr&statistic = (/var(0), var(1)/) + data_arr&dataset = DATASETS[0] + data_arr&dataset(ind(projects0 .eq. "OBS")) = "OBS" + data_arr@units = (/all_data[0]@units, all_data[1]@units/) + data_arr@legend_outside = diag_script_info@legend_outside + data_arr@annots = data_arr&dataset + + ; Draw plot + outfile = plot_dir + var(0) + "-" + var(1) + "_scatter_" + DOMAIN + wks = gsn_open_wks(file_type, outfile) + wks@legendfile = outfile + "_legend" + data_arr@res_tiMainString = var(0) + "-" + var(1) + " - " + region@name + plot = scatterplot_markers(wks, data_arr, var(0), info0) + plot_path = outfile + "." + file_type + plot@outfile = plot_path + log_info("Wrote " + plot_path) + draw(plot) + frame(wks) + + ; Provenance tracking + caption = ("Scatterplot for " + var(0) + " and " + var(1) + \ + " over the period " + start_year + "-" + end_year + \ + ". Similar to Anav et al. (2013), Figure 12.") + statistics = (/"mean"/) + authors = (/"anav_alessandro", "wenzel_sabrina", "righi_mattia", \ + "schlund_manuel"/) + plot_type = "scatter" + references = (/"anav13jclim"/) + ancestors = metadata_att_as_array(info0, "filename") + ancestors := array_append_record( \ + ancestors, metadata_att_as_array(info1, "filename"), 0) + log_provenance(ncdf_outfile_both_vars, plot_path, caption, statistics, \ + DOMAIN, plot_type, authors, references, ancestors) + delete(data_arr) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/carbon_ec/carbon_aux.ncl b/esmvaltool/diag_scripts/carbon_ec/carbon_aux.ncl new file mode 100644 index 0000000000..048f891ee2 --- /dev/null +++ b/esmvaltool/diag_scripts/carbon_ec/carbon_aux.ncl @@ -0,0 +1,178 @@ +; ############################################################################# +; # EXTERNAL CARBON FUNCTIONS +; ############################################################################# +; Please consider using rf extending existing routines before adding new ones. +; Check the header of each routine for documentation. +; +; Contents: +; function cumulate_carbon: cummulates the variable over time +; function Cond_PDF: calculates the normal and conditional PDF +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/latlon.ncl" + +; ############################################################################# +undef("cumulate_carbon") +function cumulate_carbon(NBP[*][*][*]:numeric) + +; Arguments: +; NBP : carbon flux variable +; +; Return value: +; array containing cumulated carbon fluxes +; +; Description: +; Calculates cumulated carbon fluxes +; +; Caveats +; +; Modification history: +; 20191118-zechlau_sabrina: written. +; +local funcname, scriptname, verbosity, idx_new, NBP_tmp, NBP, y + +begin + + funcname = "cumulate_carbon" + scriptname = "diag_scripts/carbon_ec/carbon_aux.ncl" + enter_msg(scriptname, funcname) + + ; Integrate + idx_new = dimsizes(NBP) + NBP_tmp = new(dimsizes(NBP), typeof(NBP)) + + if (dimsizes(idx_new).eq.3) then + NBP_tmp(0, :, :) = 0.0 + do y = 0, dimsizes(NBP(:, 0, 0)) - 2 + NBP_tmp(y + 1, :, :) = NBP_tmp(y, :, :) + NBP(y, :, :) + end do + + copy_VarAtts(NBP, NBP_tmp) + copy_VarCoords(NBP, NBP_tmp) + + elseif (dimsizes(idx_new).eq.1) then + NBP_tmp(0) = 0.0 + do y = 0, dimsizes(NBP) - 2 + NBP_tmp(y + 1) = NBP_tmp(y:) + NBP(y) + end do + copy_VarAtts(NBP_tmp, NBP) + copy_VarCoords(NBP_tmp, NBP) + + else + error_msg("f", scriptname, funcname, \ + "rank " + dimsizes(idx_new) + " of NBP not supported") + end if + delete(y) + + return(NBP_tmp) +end + +; ####################################################################### +undef("Cond_PDF") +function Cond_PDF(xfit[*]:numeric, + yfit[*]:numeric, + Xval[*]:numeric, + Yval[*]:numeric, + MSE[*]:numeric, + obs_std[*]:numeric + ) + +; Arguments: +; * xfit : 1-D array of the X variable (from regressionline y=a*x+b) +; * yfit : 1-D array of the Y variable (from regressionline y=a*x+b) +; * Xval : 1-D array of the X variable (from models) +; * Yval : 1-D array of the Y variable (from models) +; * MSE : 1-D array of the mean squared error of regressionline +; * obs_std : 1-D array of the conditional values +; +; Description: +; * calculates the normal (Py_pr) and conditional distribution (Py) following +; Wilks, D.S. - Statistical Methods in the Atmospheric Science +; +; Return value (PDF): +; PDF@y = X values of distribution +; PDF@Py = Y values of normal distribution +; PDF@Py_pr = Y values of conditional distribution +; +; Modification history: +; * 20140128-zechlau_sabrina: written. + +local Pi, nfitx, mfity, miny, maxy, dy, y, sigma, \ + Px, dx, Pxy, Py_given_x, y, Py, Py_pr +begin + + ; Return variable as logical + PDF = True + + Pi = 3.142 + nfitx = dimsizes(xfit) + + miny = min(Yval)-1.1*(max(Yval)-min(Yval)) + maxy = max(Yval)+1.1*(max(Yval)-min(Yval)) + + mfity = fspan(0, 500, 500) + dy = (maxy-miny)/dimsizes(mfity) + y = new(dimsizes(mfity), "float") + y = miny+dy*mfity + + ; calculate +/- 1 sima confidence limits of regressionline + sigma = new(nfitx, "float") + do n = 0, nfitx-1 + sigma(n) = sqrt(MSE) * sqrt(1.0 + \ + (1.0 / dimsizes(Xval)) + \ + (sum((xfit(n) - avg(Xval)) ^ 2) \ + / (dimsizes(Xval) * sum(Xval ^ 2)))) + end do + + if (obs_std(0).ne.0) then + ; Calculate PDF for conditional values + Px = new(nfitx, "float") + Px!0 = "x" + Px&x = xfit + dx = xfit(1)-xfit(0) + Px = [1 / (obs_std(1) * sqrt(2 * Pi))] * exp(-0.5 * (((xfit - obs_std(0)) \ + / obs_std(1)) ^ 2)) + + ; Calculate contours of probability in (x,y) space + Pxy = new((/nfitx, dimsizes(mfity)/), "float") + Pxy!0 = "x" + Pxy&x = xfit + Pxy!1 = "y" + Pxy&y = y + Py = new(dimsizes(mfity), "float") + Py!0 = "y" + Py&y = y + + do m = 0, dimsizes(mfity)-1 + Py(m) = 0.0 + do n = 0, nfitx-1 + Py_given_x = (1/(sigma(n) * sqrt(2 * Pi))) * \ + exp(-0.5 * (((y(m) - yfit(n)) / sigma(n)) ^ 2)) + Pxy(n, m) = Px(n) * Py_given_x + Py(m) = Py(m) + Pxy(n, m)*dx + end do + end do + + ; Asign return values conditional PDF + PDF@Py = Py + end if + + ; Calculate mean and stdev of (equal model weight) prior + mn_pr = avg(Yval(:)) + std_pr = stddev(Yval(:)) + + Py_pr = new(dimsizes(mfity), "float") + Py_pr = (1 / (std_pr * sqrt(2 * Pi))) * exp(-0.5 * (((y - mn_pr) / \ + std_pr) ^ 2)) + + ; Assign return values for prior PDF + PDF@y = y + PDF@Py_pr = Py_pr + PDF@sigma = sigma + + return(PDF) + +end diff --git a/esmvaltool/diag_scripts/carbon_ec/carbon_beta.ncl b/esmvaltool/diag_scripts/carbon_ec/carbon_beta.ncl new file mode 100644 index 0000000000..0d6319ea85 --- /dev/null +++ b/esmvaltool/diag_scripts/carbon_ec/carbon_beta.ncl @@ -0,0 +1,295 @@ +; ####################################################################### +; carbon_beta.ncl +; Author: Sabrina Zechlau (DLR, Germany) +; ####################################################################### +; Description: +; creates two panels to diagnos beta GPP at 2x[CO2] increase +; Panel: a) correlation between GPP and [co2] increase, +; b) barchart of the trend of the upper correlation +; +; Required info attributes (scripts): +; - styleset project for line, color, symbol styles +; +; Optional info attributes (scripts): +; - bc_xmax_year end year (default = last year of all model datasets +; available) +; - bc_xmin_year start year (default = first year of all model datasets +; available) +; +; Modification history +; * 20200406-zechlau_sabrina: code rewritten for ESMValTool v2.0 +; * 2015xxxx-wenzel_sabrina: written +; ######################################################################## +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/carbon_ec/carbon_aux.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/ensemble.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/scatterplot.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" +load "$diag_scripts/shared/plot/legends.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/carbon_plots.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + tmp = metadata_att_as_array(input_file_info, "short_name") + variables = get_unique_values(tmp) + delete(tmp) + + ; Load var_info + var = variable_info[0]@short_name + info = select_metadata_by_name(input_file_info, var) + datasetnames = metadata_att_as_array(info, "dataset") + dim_MOD = ListCount(info) + + log_info("+++++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var + ")") + log_info("+++++++++++++++++++++++++++++++++++++++++++++") + + ; ****************************************************************** + ; Create output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + system("mkdir -p " + config_user_info@work_dir) + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Check for required settings + exit_if_missing_atts(diag_script_info, "styleset") + + ; optional input parameters + if (isatt(diag_script_info, "bc_xmax_year")) then + xMax_year = toint(diag_script_info@bc_xmax_year) + else + xMax_year = max(metadata_att_as_array(info, "end_year")) + end if + if (isatt(diag_script_info, "bc_xmin_year")) then + xMin_year = toint(diag_script_info@bc_xmin_year) + else + xMin_year = max(metadata_att_as_array(info, "start_year")) + end if + + ; Call plot scripts + plot_file = "beta_" + xMin_year + "-" + xMax_year + wks = gsn_open_wks(file_type, plot_dir + plot_file) + colors = project_style(info, diag_script_info, "colors") + markers = project_style(info, diag_script_info, "markers") + thicks = project_style(info, diag_script_info, "avgstd") + lgLabels = datasetnames + + ; =============================================================== + ; setup data holders for plots + nyMax = max(metadata_att_as_array(info, "end_year")) + nyMin = max(metadata_att_as_array(info, "start_year")) + nMax = nyMax - nyMin + 1 + + pctco = new((/nMax+10/), double) + pctco(0) = 285 + do tt = 1, nMax+10-1 + pctco(tt) = pctco(tt-1) + 0.01 * pctco(tt-1) + end do + pctco!0 = "year" + pctco&year = ispan(nyMin-10, nyMax, 1) + + ; -------------------------------------------------------------- + ; define arrey + betaGPP = new((/dim_MOD/), float) + delta0 = new((/dim_MOD/), float) + delta0_std = new((/dim_MOD/), float) + rcgpp = new((/dim_MOD/), float) + aY0data = new((/dim_MOD, nMax/), double) + Yfitda = new((/dim_MOD, nMax/), double) + + ; loop for models + do imod = 0, dim_MOD-1 + + ; Read data + A0 = read_data(info[imod]) + A0&time = pctco&year(10:) + + ; calculate annual mean for VAR: + ymin = 0 + ymax = toint(dimsizes(A0)-1) + tmp0 = (A0 * 3600. * 24. * 365) / 1e12 + copy_VarMeta(A0, tmp0) + aY0data(imod, ymin:ymax) = (/tmp0/) + + rc = regline_stats(aY0data(imod, ymin:ymax), \ + pctco({year|nyMin:nyMax})) + Yfitda(imod, ymin:ymax) = rc@Yest + + rcgpp(imod) = tofloat(rc) + + if (xMin_year.ge.info[imod]@start_year) then + delta0(imod) = (avg(tofloat(tmp0({time|xMax_year-4:xMax_year})))/ \ + avg(tofloat(tmp0({time|xMin_year:xMin_year+4})))) + + delta0_std(imod) = sqrt( \ + (stddev(tofloat(tmp0( \ + {time|xMax_year-4:xMax_year})))/ \ + avg(tofloat(tmp0( \ + {time|xMin_year:xMin_year+4})))) ^ 2 + \ + (stddev(tofloat(tmp0( \ + {time|xMin_year:xMin_year+4}))) * \ + avg(tofloat(tmp0( \ + {time|xMax_year-4:xMax_year})))/ \ + avg(tofloat(tmp0( \ + {time|xMin_year:xMin_year+4}))) ^ 2) ^ 2) + else + delta0(imod) = (avg(tmp0({time|xMax_year-4:xMax_year}))/ \ + avg(tmp0(0:4))) + + delta0_std(imod) = sqrt( \ + (stddev(tmp0({time|xMax_year-4:xMax_year}))/ \ + avg(tmp0(0:4))) ^ 2 + (stddev(tmp0(0:4)) * \ + avg(tmp0({time|xMax_year-4:xMax_year}))/ \ + avg(tmp0(0:4)) ^ 2) ^ 2) + + end if + betaGPP(imod) = delta0(imod) * 1/0.9 + + delete([/tmp0, ymin, ymax, A0, rc/]) + end do ; model loop + + ; --------------------------------------------------------------------- + ; Output to NetCDF + new_path = config_user_info@work_dir + "/" + new_path = new_path + "beta_" + xMax_year + "-" + xMin_year + ".nc" + + ; Attach attributes to the results + betavar = (/betaGPP, delta0_std/) + betavar!0 = "case" + betavar&case = (/"mean", "stddev"/) + betavar!1 = "model" + betavar&model = datasetnames + betavar@ncdf = new_path + betavar@var = "beta" + betavar@diag_script = (/DIAG_SCRIPT/) + betavar@rcgpp = rcgpp + betavar@betaGPP = betaGPP + betavar@delta0_std = delta0_std + + ; Write NetCDF output + ncdf_outfile = ncdf_write(betavar, new_path) + + ; ----------------------------------------------------------- + ; creat plots + + ; PLOT - Panel A + XStg = "Annual CO~B~2~N~ [ppmv]" + YStg = "Annual GPP [GtC/yr]" + data_arr = (/pctco({year|nyMin:nyMax}), aY0data(0, :)/) + data_arr!0 = "datasets" + data_arr&datasets = (/XStg, YStg/) + data_arr@res_tiMainString = "" + data_arr@res_tiMainFontHeightF = 0.025 + data_arr@res_gsnCenterString = "" + data_arr@res_gsnMaximize = True + data_arr@res_xyMonoMarker = False + data_arr@res_xyMarkers = markers + data_arr@res_xyMarkerColors = "white" + data_arr@res_trXMinF = min(pctco({year|nyMin:nyMax})) + data_arr@res_trXMaxF = max(pctco({year|nyMin:nyMax})) + data_arr@res_trYMinF = 0 + data_arr@res_trYMaxF = max(aY0data) + 0.5*max(aY0data) + data_arr@res_vpWidthF = 0.5 + data_arr@res_vpHeightF = 0.4 + + data_arr@diag_script = DIAG_SCRIPT + diag_script_info@scatter_log = False + + plot = scatterplot(wks, data_arr, var, False, False, input_file_info) + + txres = True + MarkerSizeF = where(datasetnames.eq."CESM1-BGC", 20., 17.) + txres@gsMarkerThicknessF = 3 + + lineres = True + lineres@gsLineDashPattern = 0 + lineres@gsLineThicknessF = 3 + + ; Add year strings as markers + do imod = 0, dim_MOD-1 + marker = unique_string("marker") + txres@gsMarkerColor = colors(imod) + txres@gsMarkerSizeF = MarkerSizeF(imod) + txres@gsMarkerIndex = markers(imod) + add_markers(wks, plot, txres, pctco({year|nyMin:nyMax}), aY0data(imod, :)) + + lineres@gsLineColor = colors(imod) + plot@$marker$ = gsn_add_polyline(wks, plot, Yfitda(imod, :), \ + aY0data(imod, :), lineres) + end do + draw(plot) + frame(wks) + delete([/plot, XStg, YStg, data_arr, marker/]) + + ; PLOT - Panel B + print(datasetnames + " " + betaGPP) + + YStg = str_upper(var) + "(2xCO~B~2~N~)/" + str_upper(var) + "(1xCO~B~2~N~)" + + data_arr = new((/dimsizes(betaGPP), 1/), float) + data_arr(:, 0) = betaGPP + data_arr!0 = "datasets" + data_arr&datasets(0) = datasetnames + data_arr!1 = "yaxis" + data_arr&yaxis = YStg + data_arr@res_tiMainString = "" + data_arr@res_tiMainFontHeightF = 0.025 + data_arr@res_gsnCenterString = "" + + plot = barchart(wks, data_arr, var, info) + draw(plot) + frame(wks) + + delete([/betaGPP, plot, delta0, plot, Yfitda/]) + + ; create separate legend + marker_thicks = datasetnames + marker_sizes = datasetnames + + marker_thicks = 3 + marker_sizes = 0.02 + + leg = True + leg@txFontQuality = "High" + leg@txFont = 25 + leg@txFontHeightF = 0.02 + leg@diag_script = DIAG_SCRIPT + leg@annots = datasetnames(::-1) + leg@colors = colors(::-1) + leg@markers = markers(::-1) + leg@thicks = marker_thicks + leg@sizes = marker_sizes + leg@ncols = 1 + + create_legend_lines(leg@annots, leg, plot_dir + \ + DIAG_SCRIPT + "_legend", "markers") + + plotname = plot_dir + plot_file + "." + file_type + + ; Call provenance logger + log_provenance(ncdf_outfile, \ + plotname, \ + "Climate models vs " + YStg, \ + (/"anomaly", "corr", "stddev"/), \ + (/"global"/),\ + (/""/), \ + (/"zechlau_sabrina"/), \ + (/"wenzel16nat"/), \ + metadata_att_as_array(info, "filename")) + +end diff --git a/esmvaltool/diag_scripts/carbon_ec/carbon_co2_cycle.ncl b/esmvaltool/diag_scripts/carbon_ec/carbon_co2_cycle.ncl new file mode 100644 index 0000000000..f37b7bc23a --- /dev/null +++ b/esmvaltool/diag_scripts/carbon_ec/carbon_co2_cycle.ncl @@ -0,0 +1,510 @@ +; ####################################################################### +; carbon_co2_cycle.ncl +; Author: Sabrina Zechlau (DLR, Germany) +; ####################################################################### +; Description: +; Creates Figure 1 of Wenzel et al. 2016 Nature: +; Panel: a) correlation of the [co2] seasonal cycle amplitude and +; [co2] increase +; b) barchart of the trend of the upper correlation +; and Figure 3 of Wenzel et al. 2016 Nature: +; Panel: a) Emergent Constraint between beta and the trend of the +; [co2] seasonal cycle amplitude +; b) conditional PDF of the EC +; +; Required info attributes (scripts): +; - styleset project for line, color, symbol styles +; - nc_infile path of netCDF file containing beta +; (output from carbon_beta.ncl) +; +; Optional info attributes (scripts): +; - bc_xmax_year end year (default = last year of all model datasets +; available) +; - bc_xmin_year start year (default = first year of all model datasets +; available) +; +; Required variable attributes (variables): +; - reference_dataset: name of reference datatset (observations) +; +; Modification history +; * 20200406-zechlau_sabrina: code rewritten for ESMValTool v2.0 +; * 2015xxxx-wenzel_sabrina: written +; ######################################################################## +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/carbon_ec/carbon_aux.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/ensemble.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/scatterplot.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" +load "$diag_scripts/shared/plot/legends.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/carbon_plots.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + tmp = metadata_att_as_array(input_file_info, "variable_group") + co2s_ann = ind(tmp.eq."co2s") + co2s_amp = ind(tmp.eq."co2s_amp") + co2s_ann_obs = ind(tmp.eq."co2s_obs") + co2s_amp_obs = ind(tmp.eq."co2s_amp_obs") + delete(tmp) + + ; Load var_info + var = variable_info[0]@short_name + info = select_metadata_by_name(input_file_info, var) + info0 = info[co2s_ann] + ListPush(info0, info[co2s_ann_obs]) + info1 = info[co2s_amp] + ListPush(info1, info[co2s_amp_obs]) + + datasetnames = metadata_att_as_array(info0, "dataset") + dim_MOD = ListCount(info0) + + reference_dataset = variable_info[0]@reference_dataset + iref = ind(reference_dataset .eq. datasetnames) + idat = ind(reference_dataset .ne. datasetnames) + + log_info("+++++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var + ")") + log_info("+++++++++++++++++++++++++++++++++++++++++++++") + + ; ****************************************************************** + ; Create output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + system("mkdir -p " + config_user_info@work_dir) + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Check for required settings + exit_if_missing_atts(diag_script_info, "styleset") + + ; optional input parameters + if(isatt(diag_script_info, "bc_xmax_year")) then + xMax_year = toint(diag_script_info@bc_xmax_year) + else + xMax_year = max(metadata_att_as_array(info, "end_year")) + end if + if(isatt(diag_script_info, "bc_xmin_year")) then + xMin_year = toint(diag_script_info@bc_xmin_year) + else + xMin_year = min(metadata_att_as_array(info, "start_year")) + end if + + ; Call plot scripts + plot_file = "amplitude_" + var + "_" + xMin_year + "-" + xMax_year + wks = gsn_open_wks(file_type, plot_dir + plot_file) + colors = project_style(info0, diag_script_info, "colors") + markers = project_style(info0, diag_script_info, "markers") + thicks = project_style(info0, diag_script_info, "avgstd") + lgLabels = datasetnames + + ; path to beta file + temp_dir = config_user_info@work_dir + "/" + diag_script_info@nc_infile + temp_dir = temp_dir + "beta_" + xMax_year + "-" + xMin_year + ".nc" + ; ================================================================== + ; setup data holders for plots + nyMax = max(metadata_att_as_array(info0, "end_year")) + nyMin = min(metadata_att_as_array(info0, "start_year")) + nMax = nyMax - nyMin + 1 + + Ydata = new((/dim_MOD, nMax/), float) + Yfitd = new((/dim_MOD, nMax/), float) + Xdata = new((/dim_MOD, nMax/), float) + rcc = new((/dim_MOD, 2/), float) + stdrc = new((/dim_MOD, 2/), float) + Xdata!1 = "year" + Xdata&year = ispan(nyMin+1, nyMax + 1, 1) + + ; ############################################################## + ; loop for models + do imod = 0, dim_MOD-1 + + ; Read data + A0 = read_data(info0[imod]) + A1 = read_data(info1[imod]) + + ; Save input data with one year leap + Xdata(imod, :dimsizes(A0)-2) = tofloat(A0(1:)) + Ydata(imod, :dimsizes(A1)-1) = tofloat(A1) + + ; Regression between [CO2] Amplitude (x) and [CO2] ann mean (Y) + rc = regline_stats(Xdata(imod, :), Ydata(imod, :)) + rcc(imod, 1) = rc + rcc(imod, 0) = rc@b(0) + stdrc(imod, :) = rc@stderr + Yfitd(imod, :dimsizes(rc@Yest)-1) = rc@Yest + + delete([/rc, A1, A0/]) + end do ; model loop + + ; read file with beta values + beta = ncdf_read(temp_dir, "beta") + + ; Save in array w/o OBS + betaGPP = new((/2, dim_MOD-1/), float) + rcGPP = new((/(dim_MOD-1)*2/), float) + + betaGPP(:, :) = beta + rcGPP(1::2) = betaGPP@rcgpp + + rcGPP!0 = "models" + tmpm = new((/(dim_MOD-1)*2/), string) + tmpm(::2) = datasetnames(idat) + tmpm(1::2) = datasetnames(idat) + rcGPP&models = tmpm + + delete(beta) + delete(tmpm) + + ; Calculate constraint on BETA_GPP and + nfit = fspan(0, 50, 50) + + ; amplitude trends + minx = min(rcc(idat, 1))-1.1*(max(rcc(idat, 1)) - min(rcc(idat, 1))) + maxx = max(rcc(idat, 1))+1.1*(max(rcc(idat, 1)) - min(rcc(idat, 1))) + + rc = regline_stats(rcc(idat, 1), betaGPP(0, :)) + arc_bco2 = rc@r + prc_bco2 = rc@F_pval + xfit_bco2 = minx + (maxx - minx) * nfit/dimsizes(nfit) + yfit_bco2 = rc * xfit_bco2 + rc@yintercept + beta_mco2 = rc * rcc(iref, 1) + rc@yintercept + beta_eco2 = sqrt(stdrc(iref, 1) ^ 2 + rc@rstd ^ 2) + + ; Calculate prior distribution + PDF = Cond_PDF(xfit_bco2, yfit_bco2, rcc(idat, 1), \ + betaGPP(0, :), rc@MSE, \ + (/rcc(iref, 1), stdrc(iref, 1)/)) + + if (stdrc(iref, 1).ne.0) then + co2_Py = PDF@Py + else + co2_Py = 0. + end if + co2_Py_pr = PDF@Py_pr + co2_y = PDF@y + + ; this is for plotting the regression line with confidence intervalls + yfit_bco2_std = new((/2, dimsizes(yfit_bco2)/), typeof(yfit_bco2)) + yfit_bco2_std(0, :) = yfit_bco2 + PDF@sigma + yfit_bco2_std(1, :) = yfit_bco2 - PDF@sigma + + yBETA = new((/2, dim_MOD+1/), float) + yBETA(0, 0) = avg(betaGPP(0, :)) + yBETA(0, 1) = beta_mco2 + yBETA(0, 2:) = betaGPP(0, :) + yBETA(1, 0) = stddev(betaGPP(0, :)) + yBETA(1, 1) = beta_eco2 + yBETA(1, 2:) = betaGPP(1, :) + + xCO2 = new((/2, dim_MOD+1/), float) + xCO2(0, 1) = rcc(iref, 1) + xCO2(0, 2:) = rcc(idat, 1) + xCO2(1, 1) = stdrc(iref, 1) + xCO2(1, 2:) = stdrc(idat, 1) + + delete(rc) + delete(PDF) + delete(minx) + delete(maxx) + + ; Plot section + ; ----------------------------------------------------------- + + marker_thicks = new(dimsizes(datasetnames), float) + marker_sizes = new(dimsizes(datasetnames), float) + marker_thicks = 3 + marker_sizes = 0.02 + marker = new(dim_MOD, "graphic") + + ; Create Plots 1a: + ; ----------------------------------------------------------- + XStg = "Annual CO~B~2~N~ [ppmv]" + YStg = "CO~B~2~N~ Amplitude [ppmv]" + data_arr = (/Xdata(0, :), Ydata(0, :)/) + data_arr!0 = "datasets" + data_arr&datasets = (/XStg, YStg/) + data_arr@res_tiMainString = "" + data_arr@res_tiMainFontHeightF = 0.025 + data_arr@res_gsnCenterString = "" + data_arr@res_gsnMaximize = True + data_arr@res_xyMarkerSizeF = 1. + data_arr@res_trXMinF = min(Xdata(:, :)) + data_arr@res_trXMaxF = max(Xdata(:, :)) + data_arr@res_trYMinF = 0 + data_arr@res_trYMaxF = max(Ydata(:, :)) + 0.25 * max(Ydata(:, :)) + data_arr@res_vpWidthF = 0.5 + data_arr@res_vpHeightF = 0.4 + data_arr@diag_script = DIAG_SCRIPT + diag_script_info@scatter_log = False + + data_arr@res_xyMarkLineMode = "Markers" + + plot = scatterplot(wks, data_arr, var, False, False, input_file_info) + + txres = True + txres@gsMarkerSizeF = 10. + txres@gsMarkerThicknessF = 3 + + ; Add year strings as markers + lineres = True + do imod = 0, dim_MOD - 1 + idx := ind(.not.ismissing(Xdata(imod, :))) + idy := ind(.not.ismissing(Ydata(imod, :))) + if (datasetnames(imod).eq."OBS") then + lineres@gsLineThicknessF = 4 + else + lineres@gsLineThicknessF = 3 + end if + txres@gsMarkerColor = colors(imod) + txres@gsMarkerIndex = markers(imod) + add_markers(wks, plot, txres, Xdata(imod, idx), Ydata(imod, idy)) + + lineres@gsLineDashPattern = 0 + lineres@gsLineColor = colors(imod) + marker(imod) = gsn_add_polyline(wks, plot, Xdata(imod, :), \ + Yfitd(imod, :), lineres) + end do + + draw(plot) + frame(wks) + delete([/plot, XStg, data_arr/]) + + ; Create Plot 1b: + ; ---------------------------------------------------------- + rcc@info = info0 + + YStg = "Sensitivity of CO~B~2~N~ Amplitude to CO~B~2~N~" + + data_arr = new((/dim_MOD, 1/), float) + data_arr(:, 0) = rcc(:, 1) + data_arr!0 = "datasets" + data_arr&datasets(0) = datasetnames + data_arr!1 = "yaxis" + data_arr&yaxis = YStg + data_arr@res_tiMainString = "" + data_arr@res_tiMainFontHeightF = 0.025 + data_arr@res_gsnCenterString = "" + + plot = barchart(wks, data_arr, var, info0) + draw(plot) + frame(wks) + + delete([/plot, data_arr/]) + + ; Create Plots 3a: + ; ---------------------------------------------------------- + tmpm = datasetnames(idat) + delete(datasetnames) + datasetnames = new(dim_MOD+1, typeof(tmpm)) + datasetnames(0) = "OBS" + datasetnames(1) = "constraint mean" + datasetnames(2:) = tmpm + delete(tmpm) + + xmarkers = new(dim_MOD+1, typeof(markers)) + xmarkers(0) = 16 + xmarkers(1) = 3 + xmarkers(2:) = markers(idat) + + xcolors = new(dim_MOD+1, typeof(colors)) + xcolors(0) = "black" + xcolors(1) = "red" + xcolors(2:) = colors(idat) + + MainStg = "" + XStg = "Sensitivity of CO~B~2~N~ Amplitude to CO~B~2~N~" + YStg = "GPP(2xCO~B~2~N~)/GPP(1xCO~B~2~N~)" + + data_arr = (/xCO2(0, :), yBETA(0, :)/) + data_arr!0 = "datasets" + data_arr&datasets = (/XStg, YStg/) + data_arr@res_tiMainString = "" + data_arr@res_tiMainFontHeightF = 0.025 + data_arr@res_gsnCenterString = "" + data_arr@res_gsnMaximize = True + data_arr@res_xyMarkerSizeF = 1. + data_arr@res_trXMinF = min(xCO2(0, :)) + data_arr@res_trXMaxF = 0.1 ; max(xCO2(0, :)) + data_arr@res_trYMinF = 1.1 ; 0 + data_arr@res_trYMaxF = max(yBETA(0, :)) + 0.25 * max(yBETA(0, :)) + data_arr@res_vpWidthF = 0.5 + data_arr@res_vpHeightF = 0.4 + data_arr@diag_script = DIAG_SCRIPT + diag_script_info@scatter_log = False + + plot = scatterplot(wks, data_arr, var, False, False, input_file_info) + + ; add confidence intervall of regression line + peres = True + peres@tfPolyDrawOrder = "PreDraw" + peres@xyLineColor = "orange" + peres@gsFillColor = "orange" + xin = array_append_record(xfit_bco2, xfit_bco2(::-1), 0) + yin = array_append_record(yfit_bco2_std(0, :), yfit_bco2_std(1, ::-1), 0) + add_prediction_error(wks, plot, peres, xin, yin) + delete([/xin, yin/]) + + if (rcc(iref, 1).ne.0) then + ores = True + ores@tfPolyDrawOrder = "PreDraw" + ores@xyLineColor = "grey" + ores@gsFillColor = "grey" + xinobs = (/rcc(iref, 1) + stdrc(iref, 1), \ + rcc(iref, 1) + stdrc(iref, 1), \ + rcc(iref, 1) - stdrc(iref, 1), \ + rcc(iref, 1) - stdrc(iref, 1)/) + yinobs = (/500, -500, -500, 500/) + add_prediction_error(wks, plot, ores, xinobs, yinobs) + delete([/xinobs, yinobs, ores/]) + end if + + ; add regression line + lres = True + lres@gsLineColor = "red" + lres@gsLineThicknessF = 1.5 + fit_line = gsn_add_polyline(wks, plot, xfit_bco2, yfit_bco2, lres) + + ; add colored markers + mres = True + mres@gsMarkerSizeF = 20. + mres@gsMarkerThicknessF = 3. + do imod = 1, dim_MOD - 1 + mres@gsMarkerColor = xcolors(imod) + mres@gsMarkerIndex = xmarkers(imod) + add_markers(wks, plot, mres, xCO2(0, imod), yBETA(0, imod)) + end do + + ; add xy error bars + eres = True + eres@gsLineColor = colors(idat) + add_errorbar(wks, plot, eres, xCO2(:, 2:), yBETA(:, 2:)) + + draw(plot) + frame(wks) + + delete([/plot, peres, eres, mres, lres, data_arr/]) + + ; Create Plots 3b: + ; ----------------------------------------------- + plot_pdf = new(1, graphic) + + bres = True + bres@gsnDraw = False + bres@gsnFrame = False + bres@xyLineThicknessF = 2. + bres@xyLineColor = (/"black"/) + bres@tiXAxisString = "GPP(2xCO~B~2~N~)/GPP(1xCO~B~2~N~)" + bres@tiYAxisString = "Probability Density" + bres@trXMinF = 0.6 + bres@trXMaxF = 2.0 + bres@trYMinF = 0. + bres@trYMaxF = max(co2_Py)*1.2 + bres@gsnXYBarChart = True + bres@tiMainString = "" + bres@vpWidthF = 0.5 + bres@vpHeightF = 0.4 + bres@tmXTLabelFontHeightF = 0.025 + bres@tmYLLabelFontHeightF = 0.025 + bres@tiXAxisFontHeightF = 0.025 + bres@tiYAxisFontHeightF = 0.022 + + ; create line plot with PDFs + diag_script_info@multi_model_mean = False + diag_script_info@scatter_log = False + diag_script_info@xy_line_legend = False + + pres = True + pres@tiMainString = "" + pres@tiXAxisString = XStg + pres@tiYAxisString = "Probability Density" + pres@vpWidthF = 0.4 + if (rcc(iref, 1).eq.0) then + pres@xyLineColors = "black" + pres@xyDashPatterns = 2 + pres@trYMinF = 0 + xy_line(wks, co2_Py_pr, co2_y, 0., pres, input_file_info) + else + pres@xyLineColors = (/"black", "red"/) + pres@xyDashPatterns = (/2, 0/) + pres@trYMinF = 0 + xy_line(wks, (/co2_Py_pr, co2_Py/), (/co2_y, co2_y/), \ + 0., pres, input_file_info) + end if + + obsres = True + obsres@gsLineColor = "red" + obsres@gsLineThicknessF = 2 + PDF3 = gsn_add_polyline(wks, plot_pdf, co2_y, co2_Py, obsres) + + draw(plot_pdf) + frame(wks) + delete(plot_pdf) + + ; create separate legend + + leg = True + leg@txFontQuality = "High" + leg@txFont = 25 + leg@txFontHeightF = 0.02 + leg@diag_script = DIAG_SCRIPT + leg@annots = lgLabels(::-1) + leg@markers = markers(::-1) + leg@thicks = marker_thicks + leg@sizes = marker_sizes + leg@ncols = 1 + leg@colors = colors(::-1) ; rgbcolors + + create_legend_lines(leg@annots, leg, plot_dir + \ + DIAG_SCRIPT + "_legend", "markers") + + ; --------------------------------------------------------------- + ; Output to NetCDF + new_path = config_user_info@work_dir + new_path = new_path + "amplitude_" + xMax_year + "-" + xMin_year + ".nc" + + ; Attach attributes to the results + CO2var = new((/2, dim_MOD+1/), float) + CO2var(0, :) = xCO2(0, :) + CO2var(1, :) = xCO2(1, :) + CO2var!0 = "case" + CO2var&case = (/"mean", "stddev"/) + CO2var!1 = "model" + CO2var&model = datasetnames + CO2var@ncdf = new_path + CO2var@var = var + + CO2var@diag_script = (/DIAG_SCRIPT/) + + ; Write NetCDF output + ncdf_outfile = ncdf_write(CO2var, new_path) + + plotname = plot_dir + plot_file + "." + file_type + + ; ----------------------------------------------------------- + ; Call provenance logger + log_provenance(ncdf_outfile, \ + plotname, \ + XStg + " vs " + YStg, \ + (/"anomaly", "corr", "stddev"/), \ + (/"global"/),\ + (/""/), \ + (/"zechlau_sabrina"/), \ + (/"wenzel16nat"/), \ + metadata_att_as_array(info, "filename")) + +end +; ############################################################### diff --git a/esmvaltool/diag_scripts/carbon_ec/carbon_constraint.ncl b/esmvaltool/diag_scripts/carbon_ec/carbon_constraint.ncl new file mode 100644 index 0000000000..61f1f5c5a3 --- /dev/null +++ b/esmvaltool/diag_scripts/carbon_ec/carbon_constraint.ncl @@ -0,0 +1,344 @@ +; ############################################################################# +; carbon_constraint.ncl +; Author: Sabrina Wenzel (DLR, Germany) +; ############################################################################# +; +; Description: +; Calculates the long term sensitivity GAMMA_LT according to Wenzel et al. +; 2014 with observations from GCP. +; This plotscript calls a file which contains the results from calculating +; GAMMA_IAV (carbon_corr_2var.ncl) +; +; Required diag_script_info attributes (diagnostics specific)9 +; * con_latrange array of latitudes for time series plots if 2D or 3D +; * con_lonrange array of pressure levels for time series plots if 2D/3D +; * gIAV_file file path for gamma_IAV values calculated with +; carbon_corr_2var.ncl +; +; Modification history +; 20200101-zechlau_sabrina: code rewritten for ESMValTool v2.0 +; 20170822-bock_lisa: added tags for reporting +; 20151027-lauer_axel: added call to 'write_references' +; 201509-zechlau_sabrina: updated to new ESMValTool structure +; 201303-wenzel_sabrina: written +; ############################################################################# +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/carbon_ec/carbon_aux.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/ensemble.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/scatterplot.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" +load "$diag_scripts/shared/plot/legends.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + tmp = metadata_att_as_array(input_file_info, "short_name") + variables = get_unique_values(tmp) + delete(tmp) + + ; get indices associated with specific variables + tas_idx = 0 + nbp_idx = 1 + + ; Load var_info + var0 = variable_info[tas_idx]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info0) + + var1 = variable_info[nbp_idx]@short_name + info = select_metadata_by_name(input_file_info, var1) + + info1 = info[dim_MOD:] ; 1pctCO2 + info2 = info[:dim_MOD-1] ; esmFixClim1 + + log_info("+++++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + " + " + var1 + ")") + log_info("+++++++++++++++++++++++++++++++++++++++++++++") + + datasetnames0 = metadata_att_as_array(info0, "dataset") + datasetnames1 = metadata_att_as_array(info1, "dataset") + datasetnames2 = metadata_att_as_array(info2, "dataset") + + ; ****************************************************************** + ; Create output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + system("mkdir -p " + config_user_info@work_dir) + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Check for required settings + exit_if_missing_atts(diag_script_info, "styleset") + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "ts_anom", False) + set_default_att(diag_script_info, "volcanoes", False) + + if(isatt(diag_script_info, "ec_anom")) then + anom = diag_script_info@ec_anom + end if + + if(isatt(diag_script_info, "con_units")) then + UNITS = diag_script_info@con_units + end if + + temp_dir = config_user_info@run_dir + diag_script_info@nc_infile + temp_dir = temp_dir + "gIAV_" \ + + diag_script_info@gIAV_start + "-" \ + + diag_script_info@gIAV_end + ".nc" + + ; ================================================================== + ; setup data holders for plots + xMax_year = max(metadata_att_as_array(info0, "end_year")) + xMin_year = min(metadata_att_as_array(info0, "start_year")) + nMax = xMax_year - xMin_year + 1 + + ; carbon cycle climate feedback + gamma_LT = new((/dim_MOD/), double) + gamma_LT_std = new((/dim_MOD/), double) + + ; trend lines + Yreg = new((/dim_MOD-1/), double) + + ; slope + arc = new((/dim_MOD/), double) + + ; read uncoupled VAR0 + climo_dir = getenv("ESMValTool_climo_dir") + + ; ================================================================== + ; loop for models + do imod = 0, dim_MOD-1 + + ; Read data + A0 = read_data(info0[imod]) + A1 = read_data(info1[imod]) + A2 = read_data(info2[imod]) + + idx_mod := imod + + ; average yearly + A0 := time_operations(A0, -1, -1, "average", "yearly", True) + A1 := time_operations(A1, -1, -1, "average", "yearly", True) + A2 := time_operations(A2, -1, -1, "average", "yearly", True) + + ; Convert units if appropriate + if (isatt(variable_info[tas_idx], "plot_units")) then + A0 := convert_units(A0, variable_info[tas_idx]@plot_units) + end if + if (isatt(variable_info[nbp_idx+1], "plot_units")) then + A1 := convert_units(A1, variable_info[nbp_idx+1]@plot_units) + end if + if (isatt(variable_info[nbp_idx], "plot_units")) then + A2 := convert_units(A2, variable_info[nbp_idx]@plot_units) + end if + + ; cummulate nbp over time series + tmpcA1 = new((/dimsizes(A1)+1/), double) + tmpcA2 = new((/dimsizes(A2)+1/), double) + tmpcA1(0) = 0.0 + tmpcA2(0) = 0.0 + + do y = 0, dimsizes(A1&year)-1 + tmpcA1(y+1) = tmpcA1(y)+A1(y) + tmpcA2(y+1) = tmpcA2(y)+A2(y) + end do + delete(y) + + ; calculate integral line + tidx := dimsizes(A0)-1 + Y1data := avg(tmpcA1(tidx-9:tidx))-avg(tmpcA1(:9)) + Y2data := avg(tmpcA2(tidx-9:tidx))-avg(tmpcA2(:9)) + Y3data := avg(A0(tidx-9:tidx))-avg(A0(:9)) + + ; calculate gamma_LT + gamma_LT(imod) = ((Y1data-Y2data)/(Y3data)) + gamma_LT_std(imod) = sqrt((stddev(A1(tidx-9:tidx)) / Y3data) ^ 2 \ + + (1 / stddev(A0(tidx-9:tidx))) ^ 2 \ + + (stddev(A2(tidx-9:tidx)) / Y3data) ^ 2) + + delete([/A0, A1, A2, tidx/]) + delete([/Y1data, Y2data, Y3data/]) + delete([/tmpcA1, tmpcA2/]) + end do ; model loop + + ; read file with GAMMA_IAV values + IAV = ncdf_read(temp_dir, "gIAV") + + if (dimsizes(IAV(0, :)).eq.dim_MOD+1) then + Y6data = IAV(:, 1:dim_MOD) ; Gamma_IAV from the models + obs_std = IAV(:, 0) ; Gamma_IAV from the OBS + else + Y6data = IAV + obs_std = (/0, 0/) + end if + + ; exclude models from regression if appropriate + aX = tofloat(Y6data) + aY = tofloat(gamma_LT) + if(isatt(diag_script_info, "reg_models")) then + ind_MOD = ind(datasetnames0.eq.diag_script_info@reg_models) + aX(:, ind_MOD) = aX@_FillValue + aY(ind_MOD) = aY@_FillValue + end if + + ; Calculate confidence limits on fit + minx = min(aX(0, :))-1.1*(max(aX(0, :))-min(aX(0, :))) + maxx = max(aX(0, :))+1.1*(max(aX(0, :))-min(aX(0, :))) + nfit = fspan(0, 50, 50) + + ; calculate regression line + rc = regline_stats(aX(0, :), aY(:)) + arc = rc@r + prc = rc@F_pval + xfit = minx+(maxx-minx)*nfit/dimsizes(nfit) + yfit = rc*xfit + rc@yintercept + + PDF = Cond_PDF(xfit, yfit, aX(0, :), aY(:), rc@MSE, obs_std) + + if (obs_std(0).ne.0) then + Py = PDF@Py + gamma_mco2 = rc*obs_std(0)+rc@yintercept + gamma_eco2 = 0.5*cdft_t(0.05/2., 5)*sqrt(rc@rstd ^ 2+obs_std(1) ^ 2) + end if + Py_pr = PDF@Py_pr + y = PDF@y + + ; this is for plotting the regression line with confidence intervals + yfit_std = new((/2, dimsizes(yfit)/), typeof(yfit)) + yfit_std(0, :) = yfit+PDF@sigma + yfit_std(1, :) = yfit-PDF@sigma + + ; ================================================================== + ; PLOTTING + + ; Get environment variables + plot_dir = getenv("ESMValTool_plot_dir") + diag_script_base = DIAG_SCRIPT + output_dir = config_user_info@plot_dir + + plot_file = "constr_"+var0+"-"+var1+"_"+xMin_year+"-"+xMax_year + wks = gsn_open_wks(file_type, output_dir + plot_file) + + colors = project_style(info0, diag_script_info, "colors") + thicks = project_style(info0, diag_script_info, "thicks") + plot_num = integertochar(ispan(97, 122, 1)) + + XStg = "Sensitivity of land+ocean CO~B~2~N~ flux," \ + + "~F8~g~F21~~B~IAV~N~ (GtC/yr/K)" + YStg = "Carbon Cycle Climate Feedback, ~F8~g~F21~~B~LT~N~ (GtC/K)" + + data_arr = new((/2, dimsizes(gamma_LT)/), float) + data_arr!0 = "datasets" + data_arr!1 = "model" + data_arr&datasets = (/XStg, YStg/) + data_arr&model = datasetnames0 + data_arr(0, :) = tofloat(Y6data(0, :)) + data_arr(1, :) = tofloat(gamma_LT) + data_arr@diag_script = DIAG_SCRIPT + + data_arr@res_tiMainString = "" + data_arr@res_tiMainFontHeightF = 0.025 + data_arr@res_gsnCenterString = "" + data_arr@res_trXMinF = min(Y6data(0, :))-0.2*abs(min(Y6data(0, :))) + data_arr@res_trXMaxF = max(Y6data(0, :))+0.2*abs(max(Y6data(0, :))) + data_arr@res_trYMinF = min(gamma_LT)-0.2*abs(min(gamma_LT)) + data_arr@res_trYMaxF = max(gamma_LT)+0.2*abs(max(gamma_LT)) + plot = scatterplot(wks, data_arr, var0, False, False, input_file_info) + + peres = True + peres@tfPolyDrawOrder = "PreDraw" + peres@xyLineColor = "orange" + peres@gsFillColor = "orange" + xin = array_append_record(xfit, xfit(::-1), 0) + yin = array_append_record(yfit_std(0, :), yfit_std(1, ::-1), 0) + add_prediction_error(wks, plot, peres, xin, yin) + delete([/xin, yin/]) + + if (obs_std(0).ne.0) then + xin = (/obs_std(0)+obs_std(1), obs_std(0)+obs_std(1), \ + obs_std(0)-obs_std(1), obs_std(0)-obs_std(1)/) + yin = (/500, -500, -500, 500/) + add_prediction_error(wks, plot, False, xin, yin) + delete([/xin, yin/]) + end if + + ; add regression line + lres = True + lres@gsLineColor = "red" + lres@gsLineThicknessF = 1.5 + fit_line = gsn_add_polyline(wks, plot, xfit, yfit, lres) + + ; add colored markers + mres = True + mres@xyMarker = 0 + do imod = 0, dim_MOD-1 + mres@xyMarkerColor = colors(imod) + add_markers(wks, plot, mres, Y6data(0, imod), gamma_LT(imod)) + end do + + ; add xy error bars + eres = True + eres@gsLineColor = colors + add_errorbar(wks, plot, eres, Y6data, (/gamma_LT, gamma_LT_std/)) + draw(plot) + frame(wks) + + ; create line plot with PDFs + diag_script_info@multi_model_mean = False + diag_script_info@scatter_log = False + diag_script_info@xy_line_legend = False + pres = True + pres@tiMainString = plot_num(1)+") " + pres@tiXAxisString = XStg + pres@tiYAxisString = "Probability Density" + pres@vpWidthF = 0.4 + if (obs_std(0).eq.0) then + pres@xyLineColors = "black" + pres@xyDashPatterns = 2 + pres@trYMinF = 0 + xy_line(wks, Py_pr, y, 0., pres, False) + else + pres@xyLineColors = (/"black", "red"/) + pres@xyDashPatterns = (/2, 0/) + pres@trYMinF = 0 + xy_line(wks, (/Py_pr, Py/), (/y, y/), 0., pres, input_file_info) + end if + + ; ************************************* + ; output to NetCDF + out_path = config_user_info@work_dir + system("mkdir -p " + out_path) + out_path1 = out_path + "constraint_" + var1 + "_" \ + + diag_script_info@gIAV_start + "-" \ + + diag_script_info@gIAV_end + ".nc" + data_arr@ncdf = out_path + data_arr@var = var0 + ncdf_outfile = ncdf_write(data_arr, out_path1) + + ; Call provenance logger + log_provenance(ncdf_outfile, \ + output_dir + plot_file + "." + file_type, \ + XStg + " vs " + YStg, \ + (/"anomaly", "corr", "stddev"/), \ + (/"trop", "global"/),\ + (/""/), \ + (/"wenzel_sabrina"/), \ + (/"wenzel14jgr"/), \ + metadata_att_as_array(info0, "filename")) + +end +; ########################################################################### diff --git a/esmvaltool/diag_scripts/carbon_ec/carbon_gammaHist.ncl b/esmvaltool/diag_scripts/carbon_ec/carbon_gammaHist.ncl new file mode 100644 index 0000000000..589b0d72e8 --- /dev/null +++ b/esmvaltool/diag_scripts/carbon_ec/carbon_gammaHist.ncl @@ -0,0 +1,349 @@ +; ############################################################################ +; carbon_corr_2var.ncl +; Author: Sabrina Wenzel (DLR, Germany) +; ############################################################################ +; Description +; Reads and correlates in time spatial averages of CO2 fluxes (derived +; from land and ocean carbon fluxes) and temperature +; +; Required diag_script_info attributes (diagnostics specific) +; * ec_anom (optional) plot anomalies. +; +; Caveats +; +; Modification history +; 20200115-zechlau_sabrina: code rewritten for ESMValTool v2.0 +; 20151027-lauer_axel: added call to 'write_references' +; 201404??-zechlau_sabrina: updated according to changes in ESMValTool +; 201303??-zechlau_sabrina: written +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/carbon_ec/carbon_aux.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/ensemble.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/scatterplot.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + tmp = metadata_att_as_array(input_file_info, "short_name") + variables = get_unique_values(tmp) + delete(tmp) + + ; get indices associated with specific variables + tas_idx = ind(variables.eq."tas") + nbp_idx = ind(variables.eq."nbp") + fgco2_idx = ind(variables.eq."fgco2") + + ; Load var_info + var0 = variables(tas_idx) + info0 = select_metadata_by_name(input_file_info, var0) + + var1 = variables(nbp_idx) + info1 = select_metadata_by_name(input_file_info, var1) + + var2 = variables(fgco2_idx) + info2 = select_metadata_by_name(input_file_info, var2) + + log_info("+++++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + " + " + var1 + ")") + log_info("+++++++++++++++++++++++++++++++++++++++++++++") + + dim_MOD = ListCount(info0) + datasetnames0 = metadata_att_as_array(info0, "dataset") + datasetnames1 = metadata_att_as_array(info1, "dataset") + datasetnames2 = metadata_att_as_array(info2, "dataset") + + ; ***************************************************************** + ; Create output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + system("mkdir -p " + config_user_info@work_dir) + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Check for required settings + exit_if_missing_atts(diag_script_info, "styleset") + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "ts_anom", False) + set_default_att(diag_script_info, "volcanoes", False) + + if(isatt(diag_script_info, "ec_anom")) then + anom = diag_script_info@ec_anom + end if + + ; ================================================================== + ; time span + xMax_year = max(metadata_att_as_array(info0, "end_year")) + xMin_year = min(metadata_att_as_array(info0, "start_year")) + nMax = xMax_year - xMin_year + 1 + + ; setup data holders for time lines + aY1reg = new((/dim_MOD, nMax/), "float") + aY2reg = new((/dim_MOD, nMax/), "float") + aY1reg!1 = "year" + aY1reg&year = ispan(xMin_year, xMax_year, 1) + aY2reg!1 = "year" + aY2reg&year = ispan(xMin_year, xMax_year, 1) + + ; setup data holders for slope + aYreg = new((/dim_MOD, nMax/), "float") + arc = new((/2, dim_MOD/), "float") + ccr = new((/dim_MOD/), "float") + var = new((/2, dim_MOD/), "float") + + ; ================================================================== + ; loop for models + do imod = 0, dim_MOD-1 + + ; Read data + A0 = read_data(info0[imod]) + A1 = read_data(info1[imod]) + A2 = read_data(info2[imod]) + + idx_mod := imod + + ; special case for GCP data + if (datasetnames1(imod).eq."GCP") then + A1 = A1*148300000000000. ; A1@area + end if + if (datasetnames2(imod).eq."GCP") then + A2 = A2*360000000000000. ; A2@area + end if + + ; calculate spatial average + if (dimsizes(dimsizes(A1)).gt.1) then + A1_tmp = dim_sum_Wrap(dim_sum_Wrap(A1)) + A1_tmp!0 = "time" + A1_tmp&time = (/A1&time/) + else + A1_tmp = A1 + end if + + if (dimsizes(dimsizes(A2)).gt.1) then + A2_tmp = dim_sum_Wrap(dim_sum_Wrap(A2)) + A2_tmp!0 = "time" + A2_tmp&time = (/A2&time/) + else + A2_tmp = A2 + end if + + ; align timeseries first (there might be better ways to do it) + if (dimsizes(A1_tmp).ne.dimsizes(A2_tmp))then + maxt = max((/dimsizes(A1_tmp), dimsizes(A2_tmp)/)) + tmpA = new((/2, maxt/), typeof(A1_tmp&time)) + tmpA(0, :dimsizes(A1_tmp)-1) = A1_tmp&time + tmpA(1, :dimsizes(A2_tmp)-1) = A2_tmp&time + dum = tmpA(1, :) - tmpA(0, :) + ind_t = ind(dum.eq.0) + delete([/maxt, dum, tmpA/]) + else + ind_t = ispan(0, dimsizes(A1_tmp)-1, 1) + end if + + ; add land and ocean carbon fluxes + A1_co2flux = A1_tmp(ind_t) + A2_tmp(ind_t) + copy_VarAtts(A2, A1_co2flux) + copy_VarCoords(A2, A1_co2flux) + delete([/A1, A1_tmp, A2_tmp, ind_t/]) + + ; Convert units if appropriate + if (isatt(diag_script_info, "plot_units_y")) then + A1 := convert_units(A1_co2flux, diag_script_info@plot_units_y) + end if + + ; Compute time average + tmp_A0 := time_operations(A0, -1, -1, "average", "yearly", True) + tmp_A1 := time_operations(A1, -1, -1, "average", "yearly", False) + delete([/A0, A1, A2, A1_co2flux/]) + + ; set years with big vulcanic eruptions to _FillValues + if (diag_script_info@ec_volc .eq. True) then + + ; remove 2 yrs after Krakatoa, Santa Maria, Agung, El Chichon, Pinatubo + volc_years = (/1883, 1884, 1902, 1903, 1963, \ + 1964, 1982, 1983, 1991, 1992/) + + do y = 0, dimsizes(tmp_A0&year)-1 + tmp0 = where(any(tmp_A0&year(y).eq.volc_years), \ + tmp_A0@_FillValue, tofloat(tmp_A0(y))) + aY1reg(idx_mod, y) = tofloat(tmp0) + + tmp1 = where(any(tmp_A1&year(y).eq.volc_years), \ + tmp_A1@_FillValue, tofloat(tmp_A1(y))) + aY2reg(idx_mod, y) = tofloat(tmp1) + delete([/tmp0, tmp1/]) + end do + else + aY1reg(idx_mod, 0:dimsizes(tmp_A0&year)-1) = tofloat(tmp_A0) + aY2reg(idx_mod, 0:dimsizes(tmp_A1&year)-1) = tofloat(tmp_A1) + end if + delete([/tmp_A0, tmp_A1/]) + + ; remove 11-yr running mean for anomalies + if (anom .eq. True) then + nmeanh = 5 + tmp1 = new((/nMax/), "float") + tmp2 = new((/nMax/), "float") + do n = nmeanh, nMax-nmeanh-1 + tmp1(n) = (/avg(aY1reg(idx_mod, n-nmeanh:n+nmeanh))/) + tmp2(n) = (/avg(aY2reg(idx_mod, n-nmeanh:n+nmeanh))/) + end do + + tmp1(:nmeanh-1) = tmp1(nmeanh) + tmp1(nMax-nmeanh:) = (/tmp1(nMax-nmeanh-1)/) + aY1reg(idx_mod, :) = aY1reg(idx_mod, :) - tmp1 + delete(tmp1) + + tmp2(:nmeanh-1) = tmp2(nmeanh) + tmp2(nMax-nmeanh:) = (/tmp2(nMax-nmeanh-1)/) + aY2reg(idx_mod, :) = aY2reg(idx_mod, :) - tmp2 + delete(tmp2) + end if + + ; check time range + tmin := ind(aY1reg&year .eq. max((/diag_script_info@start_year, \ + toint(info0[imod]@start_year)/))) + tmax := ind(aY1reg&year .eq. min((/diag_script_info@end_year, \ + toint(info0[imod]@end_year)/))) + + ; calculate regression line: + rc := regline(aY1reg(idx_mod, tmin:tmax), aY2reg(idx_mod, tmin:tmax)) + df := (/rc@nptxy-2/) + + aYreg(idx_mod, tmin:tmax) = (/rc * aY1reg(idx_mod, tmin:tmax) \ + + rc@yintercept/) + arc(0, idx_mod) = (/rc/) + arc(1, idx_mod) = (/rc@rstd/) + ccr(idx_mod) = (/escorc(aY1reg(idx_mod, tmin:tmax), \ + aY2reg(idx_mod, tmin:tmax))/) + var(0, idx_mod) = (/stddev(aY1reg(idx_mod, tmin:tmax))/) + var(1, idx_mod) = (/stddev(aY2reg(idx_mod, tmin:tmax))/) + + delete([/rc, df, tmin, tmax/]) + end do ; model loop + + ; counting ref_models only as one model + datasetnames0 = where(datasetnames0.eq.variable_info[0]@reference_dataset, \ + "OBS", datasetnames0) + + ; Get environment variables for plotting + plot_dir = getenv("ESMValTool_plot_dir") + diag_script_base = DIAG_SCRIPT + output_dir = config_user_info@plot_dir + + ; define arrays for plotting + f = sqrt(dim_MOD) + mm = floattointeger(ceil(f)) + colors = project_style(info0, diag_script_info, "colors") + thicks = project_style(info0, diag_script_info, "thicks") + plot_num = integertochar(ispan(97, 122, 1)) + plot = new(dim_MOD, "graphic") + + ; Call plot scripts + plot_file = "corr_" + var0 + "-" + var1 + "_anom_" + \ + diag_script_info@start_year + "-" + diag_script_info@end_year + wks = gsn_open_wks(file_type, output_dir + plot_file) + + ; Create panel for each model + do imod = 0, dim_MOD-1 + + MainStg = plot_num(imod) + ") " + datasetnames0(imod) \ + + " ~F8~g~F21~~B~IAV~N~= " \ + + sprintf("%4.2f", arc(0, imod)) + " GtC/K/yr; r= " \ + + sprintf("%4.2f", ccr(imod)) + XStg = "Anomaly in Trop. Temp. (" \ + + diag_script_info@plot_units_x + ")" + YStg = "Anomaly in d(land+ocean CO~B~2~N~ flux)/dt (" \ + + diag_script_info@plot_units_y + ")" + + data_arr = new((/2, dimsizes(aY1reg(imod, :))/), float) + data_arr!0 = "datasets" + data_arr!1 = "time" + data_arr&datasets = (/XStg, YStg/) + data_arr(0, :) = (/aY1reg(imod, :)/) + data_arr(1, :) = (/aY2reg(imod, :)/) + data_arr@diag_script = DIAG_SCRIPT + data_arr@res_tiMainString = MainStg + data_arr@res_gsnCenterString = "" + data_arr@res_tiMainFontHeightF = 0.025 + data_arr@res_xyMarkerColors = "white" + data_arr@res_trXMinF = min(aY1reg(imod, :)) + data_arr@res_trXMaxF = max(aY1reg(imod, :)) + data_arr@res_trYMinF = min(aY2reg(imod, :)) + data_arr@res_trYMaxF = max(aY2reg(imod, :)) + + if (.not.all(ismissing(data_arr))) then + plot(imod) = scatterplot(wks, data_arr, var0, False, \ + False, input_file_info) + + ; Add year strings as markers + add_num_markers(wks, plot(imod), False, aY1reg(imod, :), \ + aY2reg(imod, :)) + lineres = True + lineres@gsLineColor = colors(imod) + str = unique_string("line") + plot@$str$ = gsn_add_polyline(wks, plot(imod), aY1reg(imod, :), \ + aYreg(imod, :), lineres) + end if + delete([/data_arr, MainStg/]) + end do + + ; Common resources for panel plots + pan = True + pan@gsnFrame = False + pan@gsnMaximize = True + pan@gsnPanelXWhiteSpacePercent = 10 + pan@gsnPanelYWhiteSpacePercent = 5 + gsn_panel(wks, plot, (/mm, mm/), pan) + frame(wks) + + delete([/plot, pan/]) + + ; Save data for later use + new_path = config_user_info@run_dir + new_path = new_path + "gIAV_" \ + + diag_script_info@start_year + "-" \ + + diag_script_info@end_year + ".nc" + + ; Attach attributes to the results + arc!0 = "case" + arc&case = (/"mean", "stddev"/) + arc!1 = "model" + arc&model = datasetnames0 + arc@ncdf = new_path + arc@var = "gIAV" + arc@diag_script = (/DIAG_SCRIPT/) + arc@varIAV = (/var/) + arc@rIAV = (/ccr/) + + ; Write NetCDF output + ncdf_outfile = ncdf_write(arc, new_path) + + ; Call provenance logger + log_provenance(ncdf_outfile, \ + output_dir + plot_file + "." + file_type, \ + XStg + " vs " + YStg, \ + (/"anomaly", "corr", "stddev"/), \ + (/"trop", "global"/),\ + (/""/), \ + (/"wenzel_sabrina"/), \ + (/"wenzel14jgr"/), \ + metadata_att_as_array(info0, "filename")) + +end +; ############################################################# diff --git a/esmvaltool/diag_scripts/carbon_ec/carbon_tsline.ncl b/esmvaltool/diag_scripts/carbon_ec/carbon_tsline.ncl new file mode 100644 index 0000000000..d1a20301ab --- /dev/null +++ b/esmvaltool/diag_scripts/carbon_ec/carbon_tsline.ncl @@ -0,0 +1,553 @@ +; ############################################################################# +; PLOTS TIME SERIES +; Authors: Lisa Bock (DLR, Germany), Sabrina Wenzel (DLR, Germany) and +; Irene Cionni (ENEA, Italy) +; ############################################################################# +; +; Description +; This script is for plotting a time series of the global mean (anomaly). +; +; Required diag_script_info attributes (diagnostic specific) +; styleset: as in diag_scripts/shared/plot/style.ncl functions +; +; Optional diag_script_info attributes (diagnostic specific) +; time_avg: type of time average (currently only "yearly" and "monthly" are +; available). +; ts_anomaly: calculates anomalies with respect to the defined period; +; for each gird point by removing the mean for the given +; calendar month (requiring at least 50% of the data to be +; non-missing) +; ref_start: start year of reference period for anomalies +; ref_end: end year of reference period for anomalies +; ridx_start: start index of reference period for anomalies +; ridx_end end index of reference period for anomalies +; ref_value: if true, right panel with mean values is attached +; ref_mask: if true, model fields will be masked by reference fields +; region: name of domain +; plot_units: variable unit for plotting +; y-min: set min of y-axis +; y-max: set max of y-axis +; mean_nh_sh: if true, calculate first NH and SH mean +; volcanoes: if true, lines of main volcanic eruptions will be added +; run_ave: if not equal 0 than calculate running mean over this number +; of years +; header: if true, region name as header +; +; Caveats +; +; Modification history +; 20191219-wenzel_sabrina: added option to choose index instead of +; reference period +; 20191019-wenzel_sabrina: copyed tsline.ncl to carbon_tsline and +; added the option to cumulate variables +; 20190911-bock_lisa: included method of Jones et al., 2013 and +; added provenance +; 20181112-bock_lisa: code rewritten for ESMValTool v2.0 +; 20170623-lauer_axel: added tags for reporting +; 20160905-bock_lisa: implemented multi-model mean and variable refence +; period for anomaly with refence mean value +; 20151027-lauer_axel: moved call to 'write_references' to the beginning +; of the code +; 20150622-wenzel_sabrina: added optional anomaly calculation, choice of +; area operations (sum, average) and detrending of +; time series. +; 20150420-righi_mattia: adapted to new structure. Some of the original +; features not ported yet (seasonal average, +; smoothing, multi-model mean, etc.). +; 20??????-cionni_irene: written. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/carbon_ec/carbon_aux.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/ensemble.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + + +begin + + enter_msg(DIAG_SCRIPT, "") + + var0 = variable_info[0]@short_name + exp0 = variable_info[0]@exp + project0 = input_file_info[0]@project + info_items = select_metadata_by_name(input_file_info, var0) + datasetnames = metadata_att_as_array(info_items, "dataset") + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; Create output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Time averaging + if (isatt(diag_script_info, "time_avg")) then + time_avg = diag_script_info@time_avg + if (all(time_avg.ne.(/"yearly", "monthly"/))) then + error_msg("f", DIAG_SCRIPT, "", \ + "time averaging option " + time_avg + " not yet " + \ + "implemented") + end if + else + time_avg = "monthly" + end if + + ; Check for required settings + exit_if_missing_atts(diag_script_info, "styleset") + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "time_avg", "monthly") + set_default_att(diag_script_info, "ts_anomaly", "noanom") + set_default_att(diag_script_info, "ref_value", True) + set_default_att(diag_script_info, "ref_mask", False) + set_default_att(diag_script_info, "region", "Global") + set_default_att(diag_script_info, "mean_nh_sh", False) + set_default_att(diag_script_info, "run_ave", 0) + set_default_att(diag_script_info, "header", False) + set_default_att(diag_script_info, "volcanoes", False) + set_default_att(diag_script_info, "align", False) + + ; Determine time range + start_year = min(metadata_att_as_array(info_items, "start_year")) + end_year = max(metadata_att_as_array(info_items, "end_year")) + all_years = ispan(start_year, end_year, 1) + + ; Create time coordinate + if (time_avg.eq."monthly") then + ntime = 12 * (end_year - start_year + 1) + time = new(ntime, integer) + do yy = start_year, end_year + do mm = 1, 12 + time(12 * (yy - start_year) + mm - 1) = 100 * yy + mm + end do + end do + elseif (time_avg.eq."yearly") then + ntime = end_year - start_year + 1 + time = new(ntime, integer) + time = ispan(start_year, end_year, 1) + end if + + ; get multi-model mean index + mmm_ind = ind(datasetnames .eq. "MultiModelMean") + + ; Set index of the reference dataset and read it + if (isatt(variable_info[0], "reference_dataset")) then + ref_ind = ind(datasetnames.eq.variable_info[0]@reference_dataset) + A_ref = read_data(info_items[ref_ind]) + end if + + ; Area operation + if (isatt(diag_script_info, "area_opper")) then + area_opper = diag_script_info@area_opper + if (all(area_opper.ne.(/"sum", "average"/))) then + error_msg("f", diag_script, "", \ + "area opperation option " + area_opper + " not yet " + \ + "implemented") + end if + else + area_opper = "average" + end if + + ; Anomaly + if (isatt(diag_script_info, "ts_anomaly")) then + anom = diag_script_info@ts_anomaly + if (anom .eq. "anom") then + if ((isatt(diag_script_info, "ref_start") .and. \ + isatt(diag_script_info, "ref_end")) .and. \ + .not. (isatt(diag_script_info, "ridx_start") .and. \ + isatt(diag_script_info, "ridx_end"))) then + ref_start = diag_script_info@ref_start + ref_end = diag_script_info@ref_end + if (ref_start.lt.start_year) then + ref_start = start_year + end if + if ((ref_start.lt.start_year) .or. (ref_end.gt.end_year) .or. \ + (ref_end.lt.ref_start)) then + error_msg("f", DIAG_SCRIPT, "", \ + "period for reference years is not properly defined") + end if + + elseif (isatt(diag_script_info, "ridx_start") .and. \ + isatt(diag_script_info, "ridx_end")) then + ridx_start = diag_script_info@ridx_start + ridx_end = diag_script_info@ridx_end + if (ridx_start.lt.0) then + ridx_start = 0 + end if + if ((ridx_start.lt.0) .or. (ridx_end.gt.end_year) .or. \ + (ridx_end.lt.ridx_start)) then + error_msg("f", DIAG_SCRIPT, "", \ + "period for reference years is not properly defined") + end if + else + error_msg("f", DIAG_SCRIPT, "", \ + "period for reference years is not defined " + \ + "(needed for anomaly)") + end if + + anom_ref = new((/dim_MOD/), double) + anom_ref!0 = "model" + anom_ref&model = datasetnames + end if + else + anom = "noanom" + end if + + ; TODO + model_arr_stddev = 0 + + ; Create model array + model_arr = new((/dim_MOD, ntime/), double) + model_arr!0 = "model" + model_arr!1 = "time" + model_arr&model = datasetnames + model_arr&time = time + model_arr@_FillValue = 1e+20 + model_arr = model_arr@_FillValue + delete(time) + + ; Loop over models + do imod = 0, dim_MOD - 1 + + log_info("Process dataset: " + datasetnames(imod)) + + ; Read data + A0 = read_data(info_items[imod]) + dnames = getVarDimNames(A0) + + if (isatt(variable_info[0], "reference_dataset")) then + + ; masking with reference dataset + if (diag_script_info@ref_mask .and. imod .ne. ref_ind) then + ; Determine start/end year + start_year = info_items[imod]@start_year + end_year = info_items[imod]@end_year + A_ref_mask = time_operations(A_ref, start_year, \ + end_year, "extract", "", 0) + A0_nomask_ref = time_operations(A0, ref_start, \ + ref_end, "extract", "", 0) + A0 = where(A_ref_mask.eq.A_ref_mask@_FillValue, \ + A_ref_mask@_FillValue, A0) + delete(A_ref_mask) + end if + + end if + + ; Calcluate cumulated fluxes + if (isatt(variable_info[0], "cumulate")) then + A0 = cumulate_carbon(A0) + A0@units = "kg s-1" + end if + + ; Convert units for plotting (if required) + if (isatt(diag_script_info, "plot_units")) then + A0 = convert_units(A0, diag_script_info@plot_units) + end if + + ; Running Mean + if(diag_script_info@run_ave .gt. 0) then + run_ave = "running_mean" + tmp = runave_Wrap(A0, 12 * diag_script_info@run_ave, 0) + delete(A0) + A0 = tmp + delete(tmp) + else + run_ave = "" + end if + + ; Anomaly + if (anom .eq. "anom") then + if (isatt(diag_script_info, "ridx_start") .and. \ + isatt(diag_script_info, "ridx_end")) then + tmp_start = cd_calendar(A0&time(ridx_start), 0) + tmp_end = cd_calendar(A0&time(ridx_end), 0) + ref_start = toint(tmp_start(0, 0)) + ref_end = toint(tmp_end(0, 0)) + end if + + ; calculate monthly mean of ref period if 0.5 data points are available + tmp = time_operations(A0, ref_start, ref_end, "extract", "", 0) + limit = toint(0.5 * dimsizes(tmp&time)) + do i = 0, dimsizes(tmp!0)-1 + tmp(i, :, :) = where(dim_num_n(.not.ismissing(tmp), 0).ge.limit, \ + tmp(i, :, :), tmp@_FillValue) + end do + A0_monavg_ref = time_operations(tmp, ref_start, ref_end, "average", \ + "monthlyclim", True) + + ; calculate anomaly for each grid point + do i = 0, dimsizes(A0&time) - 1 + A0(i, :, :) = A0(i, :, :) - A0_monavg_ref(mod(i, 12), :, :) + end do + + if (diag_script_info@ref_value) then + ; Calculate time average of ref period + ; annual mean if at least 2 months of data is available + ; if masking then take original unmasked dataset for ref mean + if (diag_script_info@ref_mask .and. imod .ne. ref_ind) then + tmp = A0_nomask_ref + delete(A0_nomask_ref) + end if + date := cd_calendar(tmp&time, 0) + year := date(:, 0) + month := date(:, 1) + weights = days_in_month(toint(year), toint(month)) + A0_timavg_ref = dim_avg_wgt_n_Wrap(tmp, weights, 2, 0) + delete(year) + delete(month) + delete(date) + delete(tmp) + delete(A0_monavg_ref) + + ; calculate global mean of reference period + ; first for each hemisphere - if chosen + if (diag_script_info@mean_nh_sh) then + tmp1 = area_operations(A0_timavg_ref, -90., 0., 0., 360., \ + "average", True) + tmp2 = area_operations(A0_timavg_ref, 0., 90., 0., 360., \ + "average", True) + anom_ref_tmp = (tmp1 + tmp2) / 2. + delete(tmp1) + delete(tmp2) + else + anom_ref_tmp = area_operations(A0_timavg_ref, -90., 90., \ + 0., 360., "average", True) + end if + delete(A0_timavg_ref) + + ; if anom_ref(imod) .eq. anom_ref_tmp then + ; delete(anom_ref_tmp) + ; end if + + if (isatt(diag_script_info, "ridx_start")) then + delete(ref_start) + delete(ref_end) + delete(tmp_start) + delete(tmp_end) + end if + + end if + end if + + ; Calculate time average of dataset + ; annual mean if at least 2 months of data is available + if (time_avg.eq."yearly") then + do i = 0, dimsizes(A0!0) - 1 + A0(i, :, :) = where(dim_num_n(.not.ismissing(A0), 0).ge.2, \ + A0(i, :, :), A0@_FillValue) + end do + A0_timavg = time_operations(A0, -1, -1, "average", \ + "yearly", True) + else + A0_timavg = A0 + end if + delete(A0) + + ; calculate global mean of complete time series + ; first for each hemisphere - if chosen + if (diag_script_info@mean_nh_sh) then + tmp1 = area_operations(A0_timavg, -90., 0., 0., 360., \ + "average", True) + tmp2 = area_operations(A0_timavg, 0., 90., 0., 360., \ + "average", True) + procmod = tmp1 + procmod = (tmp1 + tmp2) / 2. + delete(tmp1) + delete(tmp2) + else + procmod = area_operations(A0_timavg, diag_script_info@ts_minlat, \ + diag_script_info@ts_maxlat, \ + diag_script_info@ts_minlon, \ + diag_script_info@ts_maxlon, area_opper, True) + + end if + delete(A0_timavg) + + ; Match time coordinate + if (time_avg.eq."monthly") then + date = cd_calendar(procmod&time, -1) + else if (time_avg.eq."yearly") then + date = procmod&year + end if + end if + if (diag_script_info@align) then + model_arr(imod, :dimsizes(procmod)-1) = (/procmod/) + xmax = date(dimsizes(procmod)-1) + else + idx1 = ind(date(0).eq.model_arr&time) + idx2 = ind(date(dimsizes(date) - 1).eq.model_arr&time) + model_arr(imod, idx1:idx2) = (/procmod/) + end if + if (imod.eq.0) then + copy_VarAtts(procmod, model_arr) + end if + delete(procmod) + delete(date) + + end do + + ; individual case for HadCRUT4 observations + ; mean value comes from climatology file (absolute.nc). + ; There are no missing values as in the anomaly data. + if (anom .eq. "anom" .and. isatt(variable_info[0], "reference_dataset") \ + .and. variable_info[0]@reference_dataset .eq. "HadCRUT4") then + if(ref_start .eq. 1961 .and. ref_end .eq. 1990) then + anom_ref(ref_ind) = 14.0 + else + anom_ref(ref_ind) = anom_ref@_FillValue + end if + end if + + ; Convert time coordinate to years (required by the plot routine) + if (time_avg.eq."monthly") then + year = model_arr&time / 100 + xmin = min(year) + xmax = max(year) + month = model_arr&time - 100 * year + time = todouble(year + month/12.) + delete(model_arr&time) + model_arr&time = time + delete(time) + delete(year) + delete(month) + elseif (time_avg.eq."yearly") then + xmin = min(model_arr&time) + if (.not.diag_script_info@align) then + xmax = max(model_arr&time) + end if + tmp = todouble(model_arr&time) + delete(model_arr&time) + model_arr&time = tmp + delete(tmp) + end if + + ; ************************************* + ; output to NetCDF + ; ************************************* + out_path = config_user_info@work_dir + system("mkdir -p " + out_path) + out_path1 = out_path + "tsline_" + var0 + "_" + anom + "_" + run_ave + ".nc" + model_arr@ncdf = out_path + model_arr@experiment = project0 + "_" + exp0 + model_arr@diag_script = DIAG_SCRIPT + model_arr@var = var0 + ncdf_outfile = ncdf_write(model_arr, out_path1) + + ; Define workstation + outfile = config_user_info@plot_dir + var0 + "_" \ + + str_sub_str(diag_script_info@region, " ", "_") \ + + "_" + project0 + "_" + exp0 + "_" + anom + "_" + run_ave + "_" \ + + start_year + "-" + end_year + wks = gsn_open_wks(file_type, outfile) + + ; Set resources + res = True + res@trXMinF = xmin + res@trXMaxF = xmax + ; res@trXMaxF = xmax + 0.25 * (xmax - xmin) + res@tmXBMode = "Explicit" + if (xmax - xmin.gt.20) then + res@tmXBValues = ispan(xmin, xmax, 10) + res@tmXBLabels = ispan(xmin, xmax, 10) + res@tmXBMinorValues = ispan(xmin, xmax, 5) + else + res@tmXBValues = ispan(xmin, xmax, 5) + res@tmXBLabels = ispan(xmin, xmax, 5) + res@tmXBMinorValues = ispan(xmin, xmax, 1) + end if + res@tmXBLabelAngleF = 45 + res@tmXBLabelJust = "CenterRight" + if (isatt(diag_script_info, "y_min")) then + res@trYMinF = diag_script_info@y_min + end if + if (isatt(diag_script_info, "y_max")) then + res@trYMaxF = diag_script_info@y_max + end if + + res@tmXBMode = "Manual" + res@tmXBTickSpacingF = 20 + + if (diag_script_info@header) then + res@tiMainString = diag_script_info@region + else + res@tiMainString = "" + end if + + if (isatt(variable_info[0], "long_name")) then + if (var0 .eq. "tas") then + varname = "Temperature" + else + varname = variable_info[0]@short_name + end if + else + varname = var0 + end if + + if (model_arr@units .eq. "degC") then + units = "~F34~0~F~ C" + else + units = model_arr@units + end if + + if (anom .eq. "anom") then + res@tiYAxisString = varname + " Anomaly" + " (" + units + ")" + else + res@tiYAxisString = varname + " (" + units + ")" + end if + + xy_line(wks, model_arr, model_arr&time, model_arr_stddev, res, info_items) + + log_info(" wrote " + outfile + "." + file_type) + + ; *************************************** + ; add meta data to plot (for reporting) + ; *************************************** + + if (diag_script_info@region .eq. "Global") then + domain = "global" + else + domain = "reg" + end if + + if (anom .eq. "anom") then + statistics = "anomaly" + else + statistics = "mean" + end if + + caption = "Time series of the " + statistics + " for variable " \ + + varname + ", similar to IPCC AR5, fig. 9.8." + contrib_authors = (/"cionni_irene", "righi_mattia", \ + "wenzel_sabrina", "bock_lisa"/) + + ; Call provenance logger + log_provenance(ncdf_outfile, \ + outfile + "." + file_type, \ + caption, \ + statistics, \ + domain, \ + "times", \ + contrib_authors, \ + (/"flato13ipcc", "jones13jgr"/), \ + metadata_att_as_array(input_file_info, "filename")) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/climate_metrics/create_barplot.py b/esmvaltool/diag_scripts/climate_metrics/create_barplot.py new file mode 100644 index 0000000000..18759449e0 --- /dev/null +++ b/esmvaltool/diag_scripts/climate_metrics/create_barplot.py @@ -0,0 +1,279 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Diagnostic script to create simple multi-dataset barplots. + +Description +----------- +Create barplot for different datasets of a single variable. This diagnostic +needs preprocessed 1D cubes with single dimension ``dataset``. + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +add_mean : str, optional + Add a bar representing the mean for each class. +label_attribute : str, optional + Cube attribute which is used as label for different input files. +order : list of str, optional + Specify the order of the different classes in the barplot by giving the + ``label``, makes most sense when combined with ``label_attribute``. +patterns : list of str, optional + Patterns to filter list of input data. +savefig_kwargs : dict, optional + Keyword arguments for :func:`matplotlib.pyplot.savefig`. +seaborn_settings : dict, optional + Options for :func:`seaborn.set_theme` (affects all plots). +sort_ascending : bool, optional (default: False) + Sort bars in ascending order. +sort_descending : bool, optional (default: False) + Sort bars in descending order. +subplots_kwargs : dict, optional + Keyword arguments for :func:`matplotlib.pyplot.subplots`. +value_labels : bool, optional (default: False) + Label bars with value of that bar. +y_range : list of float, optional + Range for the Y axis in the plot. + +""" + +import colorsys +import logging +import os +from collections import OrderedDict +from copy import deepcopy +from pprint import pformat + +import iris +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + io, + run_diagnostic, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def _adjust_lightness(rgb_color, amount=0.6): + """Adjust lightness of given RGB color.""" + hls = colorsys.rgb_to_hls(*rgb_color) + new_color = (hls[0], max(0.0, min(1.0, amount * hls[1])), hls[2]) + return colorsys.hls_to_rgb(*new_color) + + +def _get_data_for_label(cfg, cube): + """Extract data from :class:`iris.cube.Cube`.""" + datasets = cube.coord('dataset').points + values = cube.data + + # Add mean if desired + if 'add_mean' in cfg: + logger.debug("Adding mean") + datasets = np.hstack((datasets, 'Mean')) + values = np.hstack((values, np.ma.mean(values))) + + # Sort if desired + if cfg.get('sort_ascending'): + sort_idx = np.argsort(values) + elif cfg.get('sort_descending'): + sort_idx = np.argsort(values)[::-1] + else: + sort_idx = np.argsort(datasets) + datasets = datasets[sort_idx] + values = values[sort_idx] + + return (datasets, values) + + +def _get_ordered_dict(cfg, all_data): + """Get desired order of data.""" + if 'order' not in cfg: + return all_data + new_dict = [] + order = cfg['order'] + if len(order) != len(set(order)): + raise ValueError( + f"Expected unique elements for 'order' option, got {order}") + logger.info("Using order %s for barplot", order) + if len(order) != len(all_data): + raise ValueError( + f"Expected {len(all_data):d} unique elements for 'order' option " + f"(number of different labels for the barplot), got " + f"{len(order):d}") + for label in order: + if label not in all_data: + raise ValueError( + f"Got invalid label '{label}' in 'order' option, expected one " + f"of {list(all_data.keys())}") + new_dict.append((label, all_data[label])) + return OrderedDict(new_dict) + + +def get_all_data(cfg, input_files): + """Get all data.""" + metadata = None + all_data = {} + all_files = [] + for filename in input_files: + all_files.append(filename) + cube = iris.load_cube(filename) + try: + cube.coord('dataset') + except iris.exceptions.CoordinateNotFoundError: + raise iris.exceptions.CoordinateNotFoundError( + f"File '{filename}' does not contain necessary coordinate " + f"'dataset'") + logger.info("Processing '%s'", filename) + + # Add to data dictionary + if cfg.get('label_attribute') in cube.attributes: + label = cube.attributes[cfg.get('label_attribute')] + else: + label = filename + all_data[label] = _get_data_for_label(cfg, cube) + + # Check cube metadata + new_metadata = { + 'long_name': cube.long_name, + 'units': cube.units, + 'var_name': cube.var_name.upper(), + } + if metadata is None: + metadata = new_metadata + else: + if metadata != new_metadata: + raise ValueError( + f"Got differing metadata for the different input files, " + f"{metadata} and {new_metadata}") + return (all_data, all_files, metadata) + + +def get_provenance_record(caption, ancestor_files, **kwargs): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'caption': caption, + 'authors': ['schlund_manuel'], + 'references': ['acknow_project'], + 'ancestors': ancestor_files, + } + record.update(kwargs) + return record + + +def plot_data(cfg, all_data, metadata): + """Create barplot.""" + logger.debug("Plotting barplot") + (_, axes) = plt.subplots(**cfg.get('subplots_kwargs', {})) + + # Plot + all_pos = [] + x_labels = [] + offset = 0.0 + all_data = _get_ordered_dict(cfg, all_data) + for (label, xy_data) in all_data.items(): + xy_data = (xy_data[0], xy_data[1]) + pos = np.arange(len(xy_data[0])) + offset + 0.5 + bars = axes.bar(pos, xy_data[1], align='center', label=label) + all_pos.extend(pos) + x_labels.extend(xy_data[0]) + offset += len(pos) + 1.0 + if 'Mean' in xy_data[0]: + mean_idx = np.nonzero(xy_data[0] == 'Mean')[0][0] + bars[mean_idx].set_facecolor( + _adjust_lightness(bars[mean_idx].get_facecolor()[:3])) + + # Plot appearance + axes.set_title(metadata['long_name']) + axes.set_xticks(all_pos) + axes.set_xticklabels(x_labels, rotation=45.0, ha='right', size=4.0) + axes.tick_params(axis='x', which='major', pad=-5.0) + axes.set_ylabel(f"{metadata['var_name']} / {metadata['units']}") + axes.set_ylim(cfg.get('y_range')) + if 'label_attribute' in cfg: + axes.legend(loc='upper right') + if cfg.get('value_labels'): + for rect in axes.patches: + axes.text(rect.get_x() + rect.get_width() / 2.0, + rect.get_height() + 0.05, + "{:.2f}".format(rect.get_height()), + rotation=90.0, + ha='center', + va='bottom', + size=5.0) + + # Save plot + plot_path = get_plot_filename(metadata['var_name'], cfg) + plt.savefig(plot_path, **cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + return plot_path + + +def write_data(cfg, all_data, metadata): + """Write netcdf file.""" + new_data = {} + for (label, xy_data) in all_data.items(): + for (idx, dataset_name) in enumerate(xy_data[0]): + key = f'{label}-{dataset_name}' + value = xy_data[1][idx] + new_data[key] = value + netcdf_path = get_diagnostic_filename(metadata['var_name'], cfg) + var_attrs = metadata.copy() + var_attrs['short_name'] = var_attrs.pop('var_name') + io.save_scalar_data(new_data, netcdf_path, var_attrs) + return netcdf_path + + +def main(cfg): + """Run the diagnostic.""" + cfg = deepcopy(cfg) + cfg.setdefault('savefig_kwargs', { + 'dpi': 300, + 'orientation': 'landscape', + 'bbox_inches': 'tight', + }) + sns.set_theme(**cfg.get('seaborn_settings', {})) + patterns = cfg.get('patterns') + if patterns is None: + input_files = io.get_all_ancestor_files(cfg) + else: + input_files = [] + for pattern in patterns: + input_files.extend(io.get_all_ancestor_files(cfg, pattern=pattern)) + if not input_files: + raise ValueError("No input files found") + logger.info("Found input files:\n%s", pformat(input_files)) + + # Iterate over all files and extract data + (all_data, all_files, metadata) = get_all_data(cfg, input_files) + + # Create plot and netcdf file + plot_path = plot_data(cfg, all_data, metadata) + netcdf_path = write_data(cfg, all_data, metadata) + + # Provenance + caption = f"{metadata['long_name']} for multiple datasets." + provenance_record = get_provenance_record(caption, all_files) + provenance_record.update({ + 'plot_types': ['bar'], + }) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + provenance_logger.log(plot_path, provenance_record) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/climate_metrics/create_scatterplot.py b/esmvaltool/diag_scripts/climate_metrics/create_scatterplot.py new file mode 100644 index 0000000000..71c9ed771c --- /dev/null +++ b/esmvaltool/diag_scripts/climate_metrics/create_scatterplot.py @@ -0,0 +1,160 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Diagnostic script to create simple multi-dataset scatterplots. + +Description +----------- +Create scatterplot for different datasets of a single variable. This diagnostic +needs preprocessed 1D cubes with single dimension ``dataset``. + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +dataset_style : str, optional + Dataset style file (located in + :mod:`esmvaltool.diag_scripts.shared.plot.styles_python`). +pattern : str, optional + Pattern to filter list of input data. +seaborn_settings : dict, optional + Options for :func:`seaborn.set_theme` (affects all plots). +y_range : list of float, optional + Range for the y axis in the plot. + +""" + +import logging +import os + +import iris +import matplotlib.pyplot as plt +import seaborn as sns + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + io, + iris_helpers, + plot, + run_diagnostic, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def get_provenance_record(caption, ancestor_files, **kwargs): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'caption': caption, + 'authors': ['schlund_manuel'], + 'references': ['acknow_project'], + 'ancestors': ancestor_files, + } + record.update(kwargs) + return record + + +def plot_data(cfg, cube): + """Create scatterplot for cube.""" + logger.debug("Plotting scatterplot for cube %s", + cube.summary(shorten=True)) + (_, axes) = plt.subplots() + project = cube.attributes.get('project') + + # Plot + for (idx, dataset_name) in enumerate(cube.coord('dataset').points): + style = plot.get_dataset_style(dataset_name, cfg.get('dataset_style')) + y_data = cube.extract(iris.Constraint(dataset=dataset_name)).data + axes.plot(idx + 1, + y_data, + marker=style['mark'], + linestyle='none', + markeredgecolor=style['color'], + markerfacecolor=style['facecolor'], + label=dataset_name) + + # Plot appearance + title = cube.long_name + if project is not None: + title += f' for {project}' + axes.set_title(title) + axes.tick_params(axis='x', + which='both', + bottom=False, + top=False, + labelbottom=False) + axes.set_ylabel(f"{cube.var_name.upper()} / {cube.units}") + axes.set_ylim(cfg.get('y_range')) + legend = axes.legend(loc='center left', + bbox_to_anchor=[1.05, 0.5], + borderaxespad=0.0, + ncol=2) + + # Save plot + plot_path = get_plot_filename(cube.var_name, cfg) + plt.savefig(plot_path, + orientation='landscape', + bbox_inches='tight', + bbox_extra_artists=[legend]) + logger.info("Wrote %s", plot_path) + plt.close() + return plot_path + + +def write_data(cfg, cube): + """Write netcdf file.""" + cube.attributes.pop('provenance', None) + netcdf_path = get_diagnostic_filename(cube.var_name, cfg) + io.iris_save(cube, netcdf_path) + return netcdf_path + + +def main(cfg): + """Run the diagnostic.""" + sns.set_theme(**cfg.get('seaborn_settings', {})) + input_files = io.get_all_ancestor_files(cfg, pattern=cfg.get('pattern')) + if len(input_files) != 1: + raise ValueError(f"Expected exactly 1 file, got {len(input_files)}") + input_file = input_files[0] + logger.info("Found input file: %s", input_file) + + # Create plots + cube = iris.load_cube(input_file) + try: + cube.coord('dataset') + except iris.exceptions.CoordinateNotFoundError as exc: + logger.error( + "File '%s' does not contain necessary coordinate 'dataset'", + input_file) + raise exc + + # Sort coordinate 'dataset' + [cube] = iris_helpers.intersect_dataset_coordinates([cube]) + + # Create plot and netcdf file + plot_path = plot_data(cfg, cube) + netcdf_path = write_data(cfg, cube) + + # Provenance + project = cube.attributes.get('project') + caption = "{}{} for multiple datasets.".format( + cube.long_name, '' if project is None else f' for {project}') + provenance_record = get_provenance_record(caption, [input_file]) + provenance_record.update({ + 'plot_types': ['scatter'], + }) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + provenance_logger.log(plot_path, provenance_record) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/climate_metrics/create_table.py b/esmvaltool/diag_scripts/climate_metrics/create_table.py new file mode 100644 index 0000000000..22645137bf --- /dev/null +++ b/esmvaltool/diag_scripts/climate_metrics/create_table.py @@ -0,0 +1,223 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Diagnostic script to create table of scalar data. + +Description +----------- +Create CSV table for scalar data (per dataset). All input data must be 1D +arrays with single ``'dataset'`` coordinate. + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +calculate_mean : bool, optional (default: True) + Calculate mean over all datasets and add it to table. +calculate_std : bool, optional (default: True) + Calculate standard deviation over all datasets and add it to table. +exclude_datasets : list of str, optional (default: ['MultiModelMean']) + Exclude certain datasets when calculating statistics over all datasets and + for assigning an index. +patterns : list of str, optional + Patterns to filter list of input data. +round_output : int, optional + If given, round output to given number of decimals. + +""" + +import logging +import os +from pprint import pformat + +import iris +import numpy as np +import pandas as pd + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + io, + run_diagnostic, +) + +logger = logging.getLogger(os.path.basename(__file__)) + +EXCLUDE_VAL = 0 +PANDAS_PRINT_OPTIONS = [ + 'display.max_rows', None, + 'display.max_colwidth', None, +] + + +def _add_numerical_index(data_frame, exclude_datasets): + """Get numerical index.""" + data_frame.loc[:, 'idx'] = np.arange(len(data_frame.index)) + 1 + for exclude_dataset in exclude_datasets: + idx_exclude_dataset = (data_frame.index.get_level_values('dataset') == + exclude_dataset) + data_frame.loc[idx_exclude_dataset, 'idx'] = EXCLUDE_VAL + idx_exclude_vals = (data_frame['idx'] == EXCLUDE_VAL).to_numpy().nonzero() + for idx in idx_exclude_vals[0]: + rows_above_exclude = data_frame.index[idx + 1:] + row_to_exclude = data_frame.index[idx] + data_frame.loc[rows_above_exclude, 'idx'] -= 1 + data_frame.loc[row_to_exclude, 'idx'] = EXCLUDE_VAL + data_frame = data_frame.set_index('idx', append=True) + return data_frame + + +def _calculate_statistic(data_frame, stat_func, exclude_datasets): + """Calculate statistic.""" + projects = data_frame.index.get_level_values('project') + dfs_to_append = [] + for project in list(set(projects)): + sub_data_frame = data_frame.loc[projects == project] + datasets = sub_data_frame.index.get_level_values('dataset') + for exclude_dataset in exclude_datasets: + sub_data_frame = sub_data_frame.loc[datasets != exclude_dataset] + stat = stat_func(sub_data_frame, axis=0) + series = pd.Series( + stat, + index=data_frame.columns, + name=(project, f'--{stat_func.__name__.upper()}--', EXCLUDE_VAL), + ) + df_to_append = series.to_frame().T + df_to_append.index.names = data_frame.index.names + dfs_to_append.append(df_to_append) + data_frame = pd.concat([data_frame] + dfs_to_append).sort_index() + return data_frame + + +def calculate_statistics(data_frame, cfg): + """Calculate statistics over all datasets.""" + exclude_datasets = cfg['exclude_datasets'] + if cfg.get('calculate_mean', True): + logger.info("Calculating mean over all datasets excluding %s", + exclude_datasets) + data_frame = _calculate_statistic(data_frame, np.mean, + exclude_datasets) + if cfg.get('calculate_std', True): + logger.info("Calculating standard deviation over all datasets " + "excluding %s", exclude_datasets) + data_frame = _calculate_statistic(data_frame, np.std, exclude_datasets) + return data_frame + + +def check_cube(cube, filename): + """Check properties of cube.""" + if cube.ndim != 1: + raise ValueError( + f"Expected 1D data in file '{filename}', got {cube.ndim:d} cube") + try: + cube.coord('dataset') + except iris.exceptions.CoordinateNotFoundError: + raise iris.exceptions.CoordinateNotFoundError( + f"Necessary coordinate 'dataset' not found in file '{filename}'") + + +def create_data_frame(input_files, exclude_datasets): + """Create data frame.""" + data_frame = pd.DataFrame() + for input_file in input_files: + cube = iris.load_cube(input_file) + check_cube(cube, input_file) + project = cube.attributes.get('project', 'unknown project') + index = pd.MultiIndex.from_product([[project], + cube.coord('dataset').points], + names=['project', 'dataset']) + series = pd.Series(data=cube.data, index=index) + + # Expand index + rows_to_add = [ + pd.Series(name=row, dtype=cube.dtype).to_frame().T for row in + series.index.difference(data_frame.index) + ] + data_frame = pd.concat([data_frame] + rows_to_add) + + # Add new data + if cube.var_name in data_frame.columns: + for row in series.index: + if np.isnan(data_frame.loc[row, cube.var_name]): + data_frame.loc[row, cube.var_name] = series.loc[row] + else: + if not np.isclose(data_frame.loc[row, cube.var_name], + series.loc[row]): + raise ValueError( + f"Got duplicate data for '{cube.var_name}' of " + f"'{row}': {series.loc[row]:e} and " + f"{data_frame.loc[row, cube.var_name]:e}") + else: + data_frame.loc[:, cube.var_name] = series + + # Sort and add numerical index for labels + data_frame.index.names = ['project', 'dataset'] + data_frame = data_frame.sort_index() + data_frame = _add_numerical_index(data_frame, exclude_datasets) + + return data_frame + + +def write_provenance(cfg, filename, data_frame, ancestors): + """Write provenance information.""" + variables = ', '.join(data_frame.columns) + projects = ', '.join(list(set(data_frame.index.get_level_values(0)))) + caption = (f"Table including variable(s) {variables} for datasets of " + f"project(s) {projects}.") + provenance_record = { + 'caption': caption, + 'authors': ['schlund_manuel'], + 'references': ['acknow_project'], + 'ancestors': ancestors, + } + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, provenance_record) + + +def main(cfg): + """Run the diagnostic.""" + cfg.setdefault('exclude_datasets', ['MultiModelMean']) + + # Get input files + patterns = cfg.get('patterns') + if patterns is None: + input_files = io.get_all_ancestor_files(cfg) + else: + input_files = [] + for pattern in patterns: + input_files.extend(io.get_all_ancestor_files(cfg, pattern=pattern)) + if not input_files: + raise ValueError("No input files found") + logger.info("Found input files:\n%s", pformat(input_files)) + + # Create data frame + data_frame = create_data_frame(input_files, cfg['exclude_datasets']) + + # Calculate statistics + data_frame = calculate_statistics(data_frame, cfg) + + # Round output if desired + if 'round_output' in cfg: + data_frame = data_frame.round(decimals=cfg['round_output']) + + # Save file + basename = '-'.join(data_frame.index.levels[0]) + '_' + basename += '-'.join(data_frame.columns) + csv_path = get_diagnostic_filename(basename, cfg).replace('.nc', '.csv') + data_frame.to_csv(csv_path) + logger.info("Wrote %s", csv_path) + with pd.option_context(*PANDAS_PRINT_OPTIONS): + logger.info("Data:\n%s", data_frame) + + # Provenance + write_provenance(cfg, csv_path, data_frame, input_files) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/climate_metrics/ecs.py b/esmvaltool/diag_scripts/climate_metrics/ecs.py index 3bc1938fe7..ee015be568 100644 --- a/esmvaltool/diag_scripts/climate_metrics/ecs.py +++ b/esmvaltool/diag_scripts/climate_metrics/ecs.py @@ -1,11 +1,15 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -"""Diagnostic script to calculate ECS following Andrews et al. (2012). +"""Diagnostic script to calculate ECS following Gregory et al. (2004). Description ----------- Calculate the equilibrium climate sensitivity (ECS) using the regression method -proposed by Andrews et al. (2012). +proposed by Gregory et al. (2004). Further plots related to ECS can be found +in the script ``climate_metrics/feedback_parameters.py``. + +If datasets with different numbers of years are given, assume that all data +starts with year 1 in the MMM calculation. Author ------ @@ -17,90 +21,321 @@ Configuration options in recipe ------------------------------- -plot_ecs_regression : bool, optional (default: False) - Plot the linear regression graph. +calculate_mmm : bool, optional (default: True) + Calculate multi-model mean ECS. +complex_gregory_plot : bool, optional (default: False) + Plot complex Gregory plot (also add response for first ``sep_year`` years + and last 150 - ``sep_year`` years, default: ``sep_year=20``) if ``True``. +output_attributes : dict, optional + Write additional attributes to netcdf files. read_external_file : str, optional - Read ECS from external file. + Read ECS and feedback parameters from external file. The path can be given + relative to this diagnostic script or as absolute path. +savefig_kwargs : dict, optional + Keyword arguments for :func:`matplotlib.pyplot.savefig`. +seaborn_settings : dict, optional + Options for :func:`seaborn.set_theme` (affects all plots). +sep_year : int, optional (default: 20) + Year to separate regressions of complex Gregory plot. Only effective if + ``complex_gregory_plot`` is ``True``. +x_lim : list of float, optional (default: [1.5, 6.0]) + Plot limits for X axis of Gregory regression plot (T). +y_lim : list of float, optional (default: [0.5, 3.5]) + Plot limits for Y axis of Gregory regression plot (N). """ import logging import os +from copy import deepcopy +from functools import partial from pprint import pformat import cf_units import iris +import iris.coord_categorisation +import matplotlib.pyplot as plt import numpy as np +import seaborn as sns import yaml from scipy import stats +from esmvaltool.diag_scripts.climate_metrics.feedback_parameters import ( + calculate_anomaly, +) from esmvaltool.diag_scripts.shared import ( - ProvenanceLogger, extract_variables, get_diagnostic_filename, - get_plot_filename, group_metadata, io, plot, run_diagnostic, - select_metadata, variables_available) + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + group_metadata, + io, + run_diagnostic, + select_metadata, + sorted_metadata, + variables_available, +) logger = logging.getLogger(os.path.basename(__file__)) +COLORS = sns.color_palette() EXP_4XCO2 = { 'CMIP5': 'abrupt4xCO2', 'CMIP6': 'abrupt-4xCO2', } +RTMT_DATASETS = set() + + +def _get_anomaly_data(input_data): + """Calculate anomaly data for all variables.""" + logger.info("Calculating anomaly data") + project = input_data[0]['project'] + new_input_data = [] + for (var, var_data) in group_metadata(input_data, 'short_name').items(): + grouped_data = group_metadata(var_data, 'dataset') + for (dataset_name, datasets) in grouped_data.items(): + logger.debug("Calculating '%s' anomaly for dataset '%s'", var, + dataset_name) + data_4x = select_metadata(datasets, exp=EXP_4XCO2[project]) + data_pic = select_metadata(datasets, exp='piControl') + + # Check if all experiments are available + if not data_4x: + raise ValueError( + f"No '{EXP_4XCO2[project]}' data available for '{var}' of " + f"'{dataset_name}'") + if not data_pic: + raise ValueError( + f"No 'piControl' data available for '{var}' of " + f"'{dataset_name}'") + + # Calculate anomaly, extract correct years and save it + cube = calculate_anomaly(data_4x, data_pic) + cube.attributes['project'] = data_4x[0]['project'] + cube.attributes['dataset'] = dataset_name + if cube.ndim != 1: + raise ValueError( + f"This diagnostic supports only 1D (time), input data, " + f"got {cube.ndim}D data") + new_input_data.append({ + **data_4x[0], + 'ancestors': [data_4x[0]['filename'], data_pic[0]['filename']], + 'cube': + cube, + }) + return new_input_data + + +def _get_data_time_last(cube): + """Get data of ``cube`` with time axis as last dimension.""" + return np.moveaxis(cube.data, cube.coord_dims('time')[0], -1) + + +@partial(np.vectorize, excluded=['x_arr'], signature='(n),(n)->()') +def _get_intercept(x_arr, y_arr): + """Get intercept of linear regression of two (masked) arrays.""" + if np.ma.is_masked(y_arr): + x_arr = x_arr[~y_arr.mask] + y_arr = y_arr[~y_arr.mask] + if len(y_arr) < 2: + return np.nan + reg = stats.linregress(x_arr, y_arr) + return reg.intercept + + +def _get_multi_model_mean(input_data): + """Get multi-model mean for all variables.""" + logger.info("Calculating multi-model means") + project = input_data[0]['project'] + mmm_data = [] + + # Iterate over all variables + for (var, datasets) in group_metadata(input_data, 'short_name').items(): + logger.debug("Calculating multi-model mean for variable '%s'", var) + ancestors = [] + dataset_names = [] + mmm = {} + + # Read data from every datasets + for dataset in datasets: + try: + cube = dataset['cube'] + except KeyError: + raise KeyError( + f"No data for '{var}' of dataset '{dataset['dataset']}' " + f"for multi-model mean calculation") + if cube.ndim > 1: + raise ValueError( + f"Calculation of multi-model mean not supported for input " + f"data with more than one dimension (which should be " + f"time), got {cube.ndim:d}-dimensional cube") + ancestors.extend(dataset['ancestors']) + dataset_names.append(dataset['dataset']) + mmm[dataset['dataset']] = cube.data + + # Adapt shapes if necessary + target_shape = max([d.shape[0] for d in mmm.values()]) + for (dataset_name, dataset_data) in mmm.items(): + if dataset_data.shape[0] != target_shape: + dataset_data = np.pad( + dataset_data, (0, target_shape - dataset_data.shape[0]), + constant_values=np.nan) + mmm[dataset_name] = dataset_data + + # Calculate MMM + mmm = np.ma.masked_invalid(list(mmm.values())) + mmm_cube = cube.copy(data=np.ma.mean(mmm, axis=0)) + attributes = { + 'ancestors': ancestors, + 'dataset': 'MultiModelMean', + 'datasets': '|'.join(dataset_names), + 'project': project, + 'short_name': var, + } + mmm_cube.attributes = attributes + mmm_data.append({**attributes, 'cube': mmm_cube}) + input_data.extend(mmm_data) + return input_data + + +@partial(np.vectorize, excluded=['x_arr'], signature='(n),(n)->()') +def _get_slope(x_arr, y_arr): + """Get slope of linear regression of two (masked) arrays.""" + if np.ma.is_masked(y_arr): + x_arr = x_arr[~y_arr.mask] + y_arr = y_arr[~y_arr.mask] + if len(y_arr) < 2: + return np.nan + reg = stats.linregress(x_arr, y_arr) + return reg.slope + + +def _plot_complex_gregroy_plot(cfg, axes, tas_cube, rtnt_cube, reg_all): + """Plot complex Gregory plot.""" + sep = cfg['sep_year'] + + # Regressions + x_reg = np.linspace(cfg['x_lim'][0] - 1.0, cfg['x_lim'][1] + 1.0, 2) + reg_first = stats.linregress(tas_cube.data[:sep], rtnt_cube.data[:sep]) + reg_last = stats.linregress(tas_cube.data[sep:], rtnt_cube.data[sep:]) + y_reg_first = reg_first.slope * x_reg + reg_first.intercept + y_reg_last = reg_last.slope * x_reg + reg_last.intercept + y_reg_all = reg_all.slope * x_reg + reg_all.intercept + ecs_first = -reg_first.intercept / (2.0 * reg_first.slope) + ecs_last = -reg_last.intercept / (2.0 * reg_last.slope) + ecs_all = -reg_all.intercept / (2.0 * reg_all.slope) + + # Plots + axes.scatter(tas_cube.data[:sep], + rtnt_cube.data[:sep], + color=COLORS[0], + marker='o', + s=8, + alpha=0.7, + label=f'first {sep:d} years: ECS = {ecs_first:.2f} K') + axes.scatter(tas_cube.data[sep:], + rtnt_cube.data[sep:], + color=COLORS[1], + marker='o', + s=8, + alpha=0.7, + label=f'last {tas_cube.shape[0] - sep:d} years: ECS = ' + f'{ecs_last:.2f} K') + axes.plot(x_reg, y_reg_first, color=COLORS[0], linestyle='-', alpha=0.6) + axes.plot(x_reg, y_reg_last, color=COLORS[1], linestyle='-', alpha=0.6) + axes.plot(x_reg, + y_reg_all, + color='k', + linestyle='-', + alpha=0.9, + label=r'all years: ECS = {:.2f} K ($R^2$ = {:.2f})'.format( + ecs_all, reg_all.rvalue**2)) + + # Legend + return axes.legend(loc='upper right') + + +def _write_ecs_regression(cfg, tas_cube, rtnt_cube, reg_stats, dataset_name): + """Write Gregory regression cube.""" + ecs = -reg_stats.intercept / (2.0 * reg_stats.slope) + attrs = { + 'anomaly': 'relative to piControl run', + 'regression_r_value': reg_stats.rvalue, + 'regression_slope': reg_stats.slope, + 'regression_interception': reg_stats.intercept, + 'Climate Feedback Parameter': reg_stats.slope, + 'ECS': ecs, + } + attrs.update(cfg.get('output_attributes', {})) + cubes = iris.cube.CubeList() + for cube in [tas_cube, rtnt_cube]: + cube.var_name += '_anomaly' + cube.long_name += ' Anomaly' + cube.attributes = attrs + cubes.append(cube) + netcdf_path = get_diagnostic_filename('ecs_regression_' + dataset_name, + cfg) + io.iris_save(cubes, netcdf_path) + return netcdf_path def check_input_data(cfg): """Check input data.""" - if not variables_available(cfg, ['tas', 'rtmt']): - raise ValueError("This diagnostic needs 'tas' and 'rtmt' " - "variables if 'read_external_file' is not given") + if not variables_available(cfg, ['tas']): + raise ValueError( + "This diagnostic needs variable 'tas' if 'read_external_file' is " + "not given") + if not (variables_available(cfg, ['rtnt']) + or variables_available(cfg, ['rtmt'])): + raise ValueError( + "This diagnostic needs the variable 'rtnt' or 'rtmt' if " + "'read_external_file' is not given") input_data = cfg['input_data'].values() project_group = group_metadata(input_data, 'project') projects = list(project_group.keys()) if len(projects) > 1: - raise ValueError("This diagnostic supports only unique 'project' " - "attributes, got {}".format(projects)) + raise ValueError( + f"This diagnostic supports only unique 'project' attributes, got " + f"{projects}") project = projects[0] if project not in EXP_4XCO2: - raise ValueError("Project '{}' not supported yet".format(project)) + raise ValueError(f"Project '{project}' not supported yet") exp_group = group_metadata(input_data, 'exp') exps = set(exp_group.keys()) if exps != {'piControl', EXP_4XCO2[project]}: - raise ValueError("This diagnostic needs 'piControl' and '{}' " - "experiments, got {}".format(EXP_4XCO2[project], - exps)) - - -def get_anomaly_data(tas_data, rtmt_data, dataset): - """Calculate anomaly data for both variables.""" - project = tas_data[0]['project'] - exp_4xco2 = EXP_4XCO2[project] - paths = { - 'tas_4x': select_metadata(tas_data, dataset=dataset, exp=exp_4xco2), - 'tas_pi': select_metadata(tas_data, dataset=dataset, exp='piControl'), - 'rtmt_4x': select_metadata(rtmt_data, dataset=dataset, exp=exp_4xco2), - 'rtmt_pi': select_metadata( - rtmt_data, dataset=dataset, exp='piControl'), - } - ancestor_files = [] - cubes = {} - for (key, [path]) in paths.items(): - ancestor_files.append(path['filename']) - cube = iris.load_cube(path['filename']) - cube = cube.aggregated_by('year', iris.analysis.MEAN) - cubes[key] = cube - - # Substract piControl run from abrupt4xCO2 experiment - shape = None - for cube in cubes.values(): - if shape is None: - shape = cube.shape - else: - if cube.shape != shape: - raise ValueError( - "Expected all cubes of dataset '{}' to have identical " - "shapes, got {} and {}".format(dataset, shape, cube.shape)) - cubes['tas_4x'].data -= cubes['tas_pi'].data - cubes['rtmt_4x'].data -= cubes['rtmt_pi'].data - return (cubes['tas_4x'], cubes['rtmt_4x'], ancestor_files) + raise ValueError( + f"This diagnostic needs 'piControl' and '{EXP_4XCO2[project]}' " + f"experiments, got {exps}") + + +def preprocess_data(cfg): + """Extract input data.""" + input_data = deepcopy(list(cfg['input_data'].values())) + input_data = sorted_metadata(input_data, ['short_name', 'exp', 'dataset']) + if not input_data: + return ([], []) + + # Use 'rtmt' instead of 'rtmt' if necessary + for dataset in input_data: + if dataset['short_name'] == 'rtmt': + RTMT_DATASETS.add(dataset['dataset']) + dataset['short_name'] = 'rtnt' + if RTMT_DATASETS: + logger.info("Using 'rtmt' instead of 'rtnt' for datasets '%s'", + RTMT_DATASETS) + + # Calculate anomalies for every dataset + input_data = _get_anomaly_data(input_data) + + # Calculate multi-model mean + if cfg.get('calculate_mmm', True): + input_data = _get_multi_model_mean(input_data) + + # Group data in terms of dataset + tas_data = select_metadata(input_data, short_name='tas') + rtnt_data = select_metadata(input_data, short_name='rtnt') + tas_data = group_metadata(tas_data, 'dataset') + rtnt_data = group_metadata(rtnt_data, 'dataset') + return (tas_data, rtnt_data) def get_provenance_record(caption): @@ -109,8 +344,8 @@ def get_provenance_record(caption): 'caption': caption, 'statistics': ['mean', 'diff'], 'domains': ['global'], - 'authors': ['schl_ma'], - 'references': ['andrews12grl'], + 'authors': ['schlund_manuel'], + 'references': ['gregory04grl'], 'realms': ['atmos'], 'themes': ['phys'], } @@ -119,182 +354,208 @@ def get_provenance_record(caption): def read_external_file(cfg): """Read external file to get ECS.""" - ecs = {} - clim_sens = {} - if not cfg.get('read_external_file'): - return (ecs, clim_sens) - base_dir = os.path.dirname(__file__) - filepath = os.path.join(base_dir, cfg['read_external_file']) - if os.path.isfile(filepath): - with open(filepath, 'r') as infile: - external_data = yaml.safe_load(infile) - else: - logger.error("Desired external file %s does not exist", filepath) - return (ecs, clim_sens) + filepath = os.path.expanduser(os.path.expandvars( + cfg['read_external_file'])) + if not os.path.isabs(filepath): + filepath = os.path.join(os.path.dirname(__file__), filepath) + if not os.path.isfile(filepath): + raise FileNotFoundError( + f"Desired external file '{filepath}' does not exist") + with open(filepath, 'r') as infile: + external_data = yaml.safe_load(infile) ecs = external_data.get('ecs', {}) - clim_sens = external_data.get('climate_sensitivity', {}) - logger.info("External file %s", filepath) + feedback_parameter = external_data.get('feedback_parameter', {}) + logger.info("Reading external file '%s'", filepath) logger.info("Found ECS (K):") logger.info("%s", pformat(ecs)) - logger.info("Found climate sensitivities (W m-2 K-1):") - logger.info("%s", pformat(clim_sens)) - return (ecs, clim_sens, filepath) + logger.info("Found climate feedback parameters (W m-2 K-1):") + logger.info("%s", pformat(feedback_parameter)) + return (ecs, feedback_parameter, filepath) -def plot_ecs_regression(cfg, dataset_name, tas_cube, rtmt_cube, reg_stats): +def plot_gregory_plot(cfg, dataset_name, tas_cube, rtnt_cube, reg_stats): """Plot linear regression used to calculate ECS.""" - if not (cfg['write_plots'] and cfg.get('plot_ecs_regression')): - return (None, None) + (_, axes) = plt.subplots() ecs = -reg_stats.intercept / (2 * reg_stats.slope) + project = tas_cube.attributes['project'] # Regression line - x_reg = np.linspace(-1.0, 9.0, 2) + x_reg = np.linspace(cfg['x_lim'][0] - 1.0, cfg['x_lim'][1] + 1.0, 2) y_reg = reg_stats.slope * x_reg + reg_stats.intercept # Plot data - text = r'r = {:.2f}, $\lambda$ = {:.2f}, F = {:.2f}, ECS = {:.2f}'.format( - reg_stats.rvalue, -reg_stats.slope, reg_stats.intercept, ecs) + if cfg.get('complex_gregory_plot'): + legend = _plot_complex_gregroy_plot(cfg, axes, tas_cube, rtnt_cube, + reg_stats) + else: + axes.scatter(tas_cube.data, + rtnt_cube.data, + color=COLORS[0], + marker='o', + s=8, + alpha=0.7) + legend = None + axes.plot(x_reg, y_reg, color='k', linestyle='-', alpha=0.8) + axes.text( + 0.05, + 0.9, + r'R$^2$ = {:.2f}, ECS = {:.2f} K'.format(reg_stats.rvalue**2, ecs), + transform=axes.transAxes, + ) + axes.axhline(0.0, color='gray', linestyle=':') + + # Plot appearance + axes.set_title(f"Gregory regression for {dataset_name} ({project})") + axes.set_xlabel("ΔT [K]") + axes.set_ylabel(r"N [W m$^{-2}$]") + axes.set_xlim(cfg['x_lim']) + axes.set_ylim(cfg['y_lim']) + + # Save plot plot_path = get_plot_filename(dataset_name, cfg) - plot.scatterplot( - [tas_cube.data, x_reg], - [rtmt_cube.data, y_reg], - plot_path, - plot_kwargs=[{ - 'linestyle': 'none', - 'markeredgecolor': 'b', - 'markerfacecolor': 'none', - 'marker': 's', - }, { - 'color': 'k', - 'linestyle': '-', - }], - save_kwargs={ - 'bbox_inches': 'tight', - 'orientation': 'landscape', - }, - axes_functions={ - 'set_title': dataset_name, - 'set_xlabel': 'tas / ' + tas_cube.units.origin, - 'set_ylabel': 'rtmt / ' + rtmt_cube.units.origin, - 'set_xlim': [0.0, 8.0], - 'set_ylim': [-2.0, 10.0], - 'text': { - 'args': [0.05, 0.9, text], - 'kwargs': { - 'transform': 'transAxes' - }, - }, - }, - ) + savefig_kwargs = dict(cfg['savefig_kwargs']) + if legend is not None: + savefig_kwargs['bbox_extra_artists'] = [legend] + plt.savefig(plot_path, **savefig_kwargs) + logger.info("Wrote %s", plot_path) + plt.close() # Write netcdf file for every plot - tas_coord = iris.coords.AuxCoord( - tas_cube.data, - **extract_variables(cfg, as_iris=True)['tas']) - attrs = { - 'model': dataset_name, - 'regression_r_value': reg_stats.rvalue, - 'regression_slope': reg_stats.slope, - 'regression_interception': reg_stats.intercept, - 'climate_sensitivity': -reg_stats.slope, - 'ECS': ecs, - } - cube = iris.cube.Cube( - rtmt_cube.data, - attributes=attrs, - aux_coords_and_dims=[(tas_coord, 0)], - **extract_variables(cfg, as_iris=True)['rtmt']) - netcdf_path = get_diagnostic_filename('ecs_regression_' + dataset_name, - cfg) - io.save_iris_cube(cube, netcdf_path) + netcdf_path = _write_ecs_regression(cfg, tas_cube, rtnt_cube, reg_stats, + dataset_name) # Provenance provenance_record = get_provenance_record( - "Scatterplot between TOA radiance and global mean surface temperature " - "anomaly for 150 years of the abrupt 4x CO2 experiment including " - "linear regression to calculate ECS for {}.".format(dataset_name)) + f"Scatterplot between TOA radiance and global mean surface " + f"temperature anomaly for 150 years of the abrupt 4x CO2 experiment " + f"including linear regression to calculate ECS for {dataset_name} " + f"({project}).") provenance_record.update({ - 'plot_file': plot_path, 'plot_types': ['scatter'], }) - return (netcdf_path, provenance_record) + return (netcdf_path, plot_path, provenance_record) + + +def set_default_cfg(cfg): + """Set default values for cfg.""" + cfg = deepcopy(cfg) + cfg.setdefault('savefig_kwargs', { + 'dpi': 300, + 'orientation': 'landscape', + 'bbox_inches': 'tight', + }) + cfg.setdefault('sep_year', 20) + cfg.setdefault('x_lim', [0.0, 12.0]) + cfg.setdefault('y_lim', [-2.0, 10.0]) + return cfg -def write_data(ecs_data, clim_sens_data, ancestor_files, cfg): +def write_data(cfg, ecs_data, feedback_parameter_data, ancestor_files): """Write netcdf files.""" - data = [ecs_data, clim_sens_data] + data = [ecs_data, feedback_parameter_data] var_attrs = [ { 'short_name': 'ecs', - 'long_name': 'Equilibrium Climate Sensitivity (ECS)', + 'long_name': 'Equilibrium Climate Sensitivity (Gregory method)', 'units': cf_units.Unit('K'), }, { 'short_name': 'lambda', - 'long_name': 'Climate Sensitivity', + 'long_name': 'Climate Feedback Parameter', 'units': cf_units.Unit('W m-2 K-1'), }, ] + input_data = list(cfg['input_data'].values()) + if input_data: + attrs = { + 'project': input_data[0]['project'], + } + else: + attrs = {} + if RTMT_DATASETS: + rtmt_datasets = sorted(list(RTMT_DATASETS)) + attrs['net_toa_radiation'] = ( + f"For datasets {rtmt_datasets}, 'rtmt' (net top of model " + f"radiation) instead of 'rtnt' (net top of atmosphere radiation) " + f"is used due to lack of data. These two variables might differ.") + attrs.update(cfg.get('output_attributes', {})) + data_available = False for (idx, var_attr) in enumerate(var_attrs): + if not data[idx]: + logger.info( + "Skipping writing of '%s' for all models, no data available", + var_attr['short_name']) + continue + data_available = True path = get_diagnostic_filename(var_attr['short_name'], cfg) - io.save_scalar_data(data[idx], path, var_attr) + io.save_scalar_data(data[idx], path, var_attr, attributes=attrs) caption = "{long_name} for multiple climate models.".format(**var_attr) provenance_record = get_provenance_record(caption) provenance_record['ancestors'] = ancestor_files with ProvenanceLogger(cfg) as provenance_logger: provenance_logger.log(path, provenance_record) + if not data_available: + raise ValueError("No input data given") def main(cfg): """Run the diagnostic.""" - input_data = cfg['input_data'].values() + cfg = set_default_cfg(cfg) + sns.set_theme(**cfg.get('seaborn_settings', {})) # Read external file if desired if cfg.get('read_external_file'): - (ecs, clim_sens, external_file) = read_external_file(cfg) + (ecs, feedback_parameter, external_file) = read_external_file(cfg) else: check_input_data(cfg) ecs = {} - clim_sens = {} + feedback_parameter = {} external_file = None - # Read data - tas_data = select_metadata(input_data, short_name='tas') - rtmt_data = select_metadata(input_data, short_name='rtmt') - - # Iterate over all datasets and save ECS and climate sensitivity - for dataset in group_metadata(tas_data, 'dataset'): - logger.info("Processing %s", dataset) - (tas_cube, rtmt_cube, ancestor_files) = get_anomaly_data( - tas_data, rtmt_data, dataset) + # Read and preprocess data + all_ancestors = [] + (tas_data, rtnt_data) = preprocess_data(cfg) + + # Iterate over all datasets and save ECS and feedback parameter + for dataset_name in tas_data: + logger.info("Processing '%s'", dataset_name) + if dataset_name not in rtnt_data: + raise ValueError( + f"No 'rtmt' or 'rtnt' data for '{dataset_name}' available") + tas_cube = tas_data[dataset_name][0]['cube'] + rtnt_cube = rtnt_data[dataset_name][0]['cube'] + ancestor_files = (tas_data[dataset_name][0]['ancestors'] + + rtnt_data[dataset_name][0]['ancestors']) # Perform linear regression - reg = stats.linregress(tas_cube.data, rtmt_cube.data) + reg = stats.linregress(tas_cube.data, rtnt_cube.data) - # Plot ECS regression if desired - (path, provenance_record) = plot_ecs_regression( - cfg, dataset, tas_cube, rtmt_cube, reg) + # Plot Gregory plots + (path, plot_path, provenance_record) = plot_gregory_plot( + cfg, dataset_name, tas_cube, rtnt_cube, reg) # Provenance - if path is not None: - provenance_record['ancestors'] = ancestor_files - with ProvenanceLogger(cfg) as provenance_logger: - provenance_logger.log(path, provenance_record) + provenance_record['ancestors'] = ancestor_files + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + provenance_logger.log(plot_path, provenance_record) # Save data - if cfg.get('read_external_file') and dataset in ecs: - logger.info( - "Overwriting external given ECS and climate " - "sensitivity for %s", dataset) - ecs[dataset] = -reg.intercept / (2 * reg.slope) - clim_sens[dataset] = -reg.slope + if cfg.get('read_external_file') and dataset_name in ecs: + logger.warning( + "Overwriting externally given ECS and climate feedback " + "parameter from file '%s' for '%s'", external_file, + dataset_name) + ecs[dataset_name] = -reg.intercept / (2 * reg.slope) + feedback_parameter[dataset_name] = reg.slope + all_ancestors.extend(ancestor_files) # Write data - ancestor_files = [d['filename'] for d in tas_data + rtmt_data] if external_file is not None: - ancestor_files.append(external_file) - write_data(ecs, clim_sens, ancestor_files, cfg) + all_ancestors.append(external_file) + all_ancestors = list(set(all_ancestors)) + write_data(cfg, ecs, feedback_parameter, all_ancestors) if __name__ == '__main__': diff --git a/esmvaltool/diag_scripts/climate_metrics/external_sources/ipcc_ar4.yml b/esmvaltool/diag_scripts/climate_metrics/external_sources/ipcc_ar4.yml new file mode 100644 index 0000000000..c6db39a81f --- /dev/null +++ b/esmvaltool/diag_scripts/climate_metrics/external_sources/ipcc_ar4.yml @@ -0,0 +1,21 @@ +# ECS (IPCC AR4 table 8.2, units: K) +ecs: + cccma_cgcm3_1: 3.4 + cccma_cgcm3_1_t63: 3.4 + csiro_mk3_0: 3.1 + gfdl_cm2_0: 2.9 + gfdl_cm2_1: 3.4 + giss_model_e_h: 2.7 + giss_model_e_r: 2.7 + iap_fgoals1_0_g: 2.3 + inmcm3_0: 2.1 + ipsl_cm4: 4.4 + miroc3_2_hires: 4.3 + miroc3_2_medres: 4.0 + miub_echo_g: 3.2 + mpi_echam5: 3.4 + mri_cgcm2_3_2a: 3.2 + ncar_ccsm3_0: 2.7 + ncar_pcm1: 2.1 + ukmo_hadcm3: 3.3 + ukmo_hadgem1: 4.4 diff --git a/esmvaltool/diag_scripts/climate_metrics/external_sources/ipcc_ar5.yml b/esmvaltool/diag_scripts/climate_metrics/external_sources/ipcc_ar5.yml index f1d77f8dfb..aa5233b176 100644 --- a/esmvaltool/diag_scripts/climate_metrics/external_sources/ipcc_ar5.yml +++ b/esmvaltool/diag_scripts/climate_metrics/external_sources/ipcc_ar5.yml @@ -23,8 +23,8 @@ ecs: MRI-CGCM3: 2.6 NorESM1-M: 2.8 -# Climate sensitivity lambda (IPCC AR5 table 9.5, units: W m-2 K-1) -climate_sensitivity: +# Absolute climate feedback parameter lambda (IPCC AR5 table 9.5, units: W m-2 K-1) +feedback_parameter: ACCESS1-0: 0.8 bcc-csm1-1-m: 1.2 bcc-csm1-1: 1.1 diff --git a/esmvaltool/diag_scripts/climate_metrics/feedback_parameters.py b/esmvaltool/diag_scripts/climate_metrics/feedback_parameters.py new file mode 100644 index 0000000000..d6bd28b0fb --- /dev/null +++ b/esmvaltool/diag_scripts/climate_metrics/feedback_parameters.py @@ -0,0 +1,1048 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Diagnostic script to calculate several climate feedback parameters and ECS. + +Description +----------- +Calculate climate feedback parameters and ECS similar to Andrews et al. (2012) +using the regression methods proposed by Gregory et al. (2004). + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +calculate_mmm : bool, optional (default: True) + Calculate multi-model means. +only_consider_mmm : bool, optional (default: False) + Only consider multi-model mean dataset. This automatically sets + ``calculate_mmm`` to ``True``. For large multi-dimensional datasets, this + might significantly reduce the computation time if only the multi-model + mean dataset is relevant. +output_attributes : dict, optional + Write additional attributes to netcdf files. +seaborn_settings : dict, optional + Options for :func:`seaborn.set_theme` (affects all plots). + +""" + +import logging +import os +from collections import OrderedDict +from copy import deepcopy +from functools import partial + +import cf_units +import iris +import iris.plot as iplt +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns +from scipy import stats + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + extract_variables, + get_diagnostic_filename, + get_plot_filename, + group_metadata, + io, + plot, + run_diagnostic, + select_metadata, + sorted_metadata, + variables_available, +) + +logger = logging.getLogger(os.path.basename(__file__)) + +EXP_4XCO2 = { + 'CMIP5': 'abrupt4xCO2', + 'CMIP6': 'abrupt-4xCO2', +} +FEEDBACK_PARAMETERS = { + 'rtnt': 'Net', + 'rtmt': 'Net (rtmt)', + 'rlntcs': 'LW (cs)', + 'rsntcs': 'SW (cs)', + 'netcre': 'Net CRE', + 'lwcre': 'LW CRE', + 'swcre': 'SW CRE', +} +VAR_NAMES = { + 'rtnt': 'lambda_net', + 'rtmt': 'lambda_net_rtmt', + 'rlntcs': 'lambda_lwcs', + 'rsntcs': 'lambda_swcs', + 'netcre': 'lambda_netcre', + 'lwcre': 'lambda_lwcre', + 'swcre': 'lambda_swcre', +} +UNITS = { + 'rtnt': 'W m-2 K-1', + 'rtmt': 'W m-2 K-1', + 'rlntcs': 'W m-2 K-1', + 'rsntcs': 'W m-2 K-1', + 'netcre': 'W m-2 K-1', + 'lwcre': 'W m-2 K-1', + 'swcre': 'W m-2 K-1', + 'F': 'W m-2', + 'ECS': 'K', +} +LONG_NAMES = { + 'rtnt': 'Net Climate Feedback Parameter', + 'rtmt': 'Net Climate Feedback Parameter (using rtmt)', + 'rlntcs': 'Long Wave (clear sky) Climate Feedback Parameter', + 'rsntcs': 'Short Wave (clear sky) Climate Feedback Parameter', + 'netcre': 'Net Cloud Radiative Effect Climate Feedback Parameter', + 'lwcre': 'Long Wave Cloud Radiative Effect Climate Feedback Parameter', + 'swcre': 'Short Wave Cloud Radiative Effect Climate Feedback Parameter', + 'F': 'Radiative Forcing', + 'ECS': 'Equilibrium Climate Sensitivity', +} +SHORTER_NAMES = { + 'rtnt': 'Net Feedback Parameter', + 'rtmt': 'Net Feedback Parameter (rtmt)', + 'rlntcs': 'LW (clear sky) Feedback Parameter', + 'rsntcs': 'SW (clear sky) Feedback Parameter', + 'netcre': 'Net CRE Feedback Parameter', + 'lwcre': 'LW CRE Feedback Parameter', + 'swcre': 'SW CRE Feedback Parameter', + 'F': 'Radiative Forcing', + 'ECS': 'Equilibrium Climate Sensitivity', +} +NICE_SYMBOLS = { + 'rtnt': r'$\lambda_{net}$', + 'rtmt': r'$\lambda_{net(rtmt)}$', + 'rlntcs': r'$\lambda_{lwcs}$', + 'rsntcs': r'$\lambda_{swcs}$', + 'netcre': r'$\lambda_{netcre}$', + 'lwcre': r'$\lambda_{lwcre}$', + 'swcre': r'$\lambda_{swcre}$', +} +NICE_UNITS = { + 'W m-2 K-1': r'$W m^{-2} K^{-1}$', + 'W m-2': r'$W m^{-2}$', + 'K': r'$K$', +} +RTMT_TEXT = ( + "For datasets {}, 'rtmt' (net top of model radiation) instead of 'rtnt' " + "(net top of atmosphere radiation) is used due to lack of data. These two " + "variables might differ.") + +# Global variables which will be adapted during runtime +NDIMS = {} +COORDS = {} +RTMT_DATASETS = set() + + +def calculate_anomaly(data_4x, data_pic): + """Calculate anomaly cube for a dataset.""" + # Abrupt 4xCO2 + cube_4x = iris.load_cube(data_4x[0]['filename']) + iris.coord_categorisation.add_year(cube_4x, 'time') + cube_4x = cube_4x.aggregated_by('year', iris.analysis.MEAN) + + # PiControl + cube_pic = iris.load_cube(data_pic[0]['filename']) + iris.coord_categorisation.add_year(cube_pic, 'time') + cube_pic = cube_pic.aggregated_by('year', iris.analysis.MEAN) + + # Anomaly + x_data = cube_pic.coord('year').points + y_data = _get_data_time_last(cube_pic) + slope = _get_slope(x_data, y_data) + intercept = _get_intercept(x_data, y_data) + for _ in range(cube_pic.ndim - 1): + x_data = np.expand_dims(x_data, -1) + new_x_data = np.broadcast_to(x_data, cube_pic.shape) + new_data = slope * new_x_data + intercept + cube_4x.data -= np.ma.masked_invalid(new_data) + return cube_4x + + +def _check_array_shapes(list_of_arrays, var): + """Check if list of arrays has identical shapes.""" + shapes = {a.shape for a in list_of_arrays} + if len(shapes) > 1: + raise ValueError( + f"Expected cubes with identical shapes for multi-model mean " + f"calculation of '{var}', got {shapes}") + + +def _check_cube_dimensions(cube): + """Check if dimension of :class:`iris.cube.Cube` is valid.""" + var = 'tas' if cube.var_name == 'tas' else 'rad' + ndim = cube.ndim + if ndim not in (1, 2, 3): + raise ValueError( + f"This diagnostic supports only 1D (time), 2D (time + other " + f"dimension) or 3D (time + 2 other dimensions) input data, got " + f"{ndim}D data") + coord_0_name = cube.coord(dimensions=(0, ), dim_coords=True).name() + if coord_0_name != 'time': + raise ValueError( + f"This diagnostic expects 'time' as dimension 0 for every cube, " + f"got '{coord_0_name}' for cube\n{cube}") + if NDIMS.get(var) is None: + NDIMS[var] = ndim + else: + if ndim != NDIMS[var]: + raise ValueError( + "This diagnostic supports only '{}' data with consistent " + "numbers of dimension, got mixed data".format( + 'radiation' if var is None else var)) + if NDIMS.get('tas', 0) > NDIMS.get('rad', 4): + raise ValueError( + f"This diagnostic expects temperature data to have smaller number " + f"of dimensions than radiation data, got {NDIMS['tas']}D data for " + f"temperature and {NDIMS['rad']}D data for radiation") + if ndim < 2: + return + coords = [ + coord.name() for coord in cube.coords(dim_coords=True) + if coord.name() != 'time' + ] + if COORDS.get(var) is None: + COORDS[var] = coords + else: + if coords != COORDS[var]: + raise ValueError( + "This diagnostic expects identical coordinate names for all " + "'{}' data, got {} and {}".format( + 'radiation' if var is None else var, coords, COORDS[var])) + + +def _create_feedback_file(feedback_cube, dataset_name, cfg, description=None): + """Save feedback parameter plot vs. remaining dimensions.""" + var = feedback_cube.var_name + filename = ('{}_vs_{}_{}'.format(VAR_NAMES.get(var, var), + '-'.join(COORDS['rad']), dataset_name)) + attrs = { + 'dataset': dataset_name, + 'radiation_variable': var, + } + attrs.update(cfg.get('output_attributes', {})) + if description is not None: + attrs['description'] = description + filename += f"_{description.replace(' ', '_')}" + feedback_cube.var_name = VAR_NAMES.get(var, var) + feedback_cube.long_name = LONG_NAMES.get(var, var) + feedback_cube.units = UNITS.get(var, 'unknown') + feedback_cube.attributes = attrs + + # Write cube + netcdf_path = get_diagnostic_filename(filename, cfg) + io.iris_save(feedback_cube, netcdf_path) + + # Caption + caption = ( + 'Dependence of {} on {} for {}. The calculation follows Andrews et ' + 'al., Geophys. Res. Lett., 39 (2012): The {} is defined as the ' + 'slope of the linear regression between {}-dependent {} TOA radiance ' + 'and the {} surface temperature anomaly{} of the abrupt 4x CO2 ' + 'experiment.'.format( + LONG_NAMES.get(var, + var), ' and '.join(COORDS['rad']), dataset_name, + LONG_NAMES.get(var, var), ' and '.join(COORDS['rad']), + FEEDBACK_PARAMETERS.get(var, var), + ('global mean' if NDIMS.get('tas') == 1 else '{}-dependent'.format( + ' and '.join(COORDS['tas']))), + '' if description is None else f' for {description}')) + return (netcdf_path, caption) + + +def _create_feedback_plot(tas_cube, cube, dataset_name, cfg, description=None): + """Plot feedback parameter vs. remaining dimensions.""" + var = cube.var_name + logger.debug("Plotting '%s' vs. %s for '%s'", SHORTER_NAMES.get(var, var), + COORDS['rad'], dataset_name) + x_data = _get_data_time_last(tas_cube) + y_data = _get_data_time_last(cube) + coords = [(coord, idx - 1) + for (idx, coord) in enumerate(cube.coords(dim_coords=True)) + if coord.name() != 'time'] + feedback_cube = iris.cube.Cube(_get_slope(x_data, y_data), + var_name=var, + dim_coords_and_dims=coords, + units='W m-2 K-1') + + # Plot + if feedback_cube.ndim == 1: + iplt.plot(feedback_cube) + plt.xlabel(f"{COORDS['rad'][0]} / " + f"{cube.coord(COORDS['rad'][0]).units.origin}") + plt.ylabel(f"{NICE_SYMBOLS.get(var, var)} / " + f"{NICE_UNITS.get(feedback_cube.units.origin, 'unknown')}") + colorbar = None + elif feedback_cube.ndim == 2: + iplt.contourf(feedback_cube, cmap='bwr', levels=_get_levels()) + colorbar = plt.colorbar(orientation='horizontal') + colorbar.set_label( + f"{NICE_SYMBOLS.get(var, var)} / " + f"{NICE_UNITS.get(feedback_cube.units.origin, 'unknown')}") + ticks = [-8.0, -6.0, -4.0, -2.0, 0.0, 2.0, 4.0, 6.0, 8.0] + colorbar.set_ticks(ticks) + colorbar.set_ticklabels([str(tick) for tick in ticks]) + if COORDS['rad'] == ['latitude', 'longitude']: + plt.gca().coastlines() + else: + plt.xlabel(f"{COORDS['rad'][0]} / " + f"{cube.coord(COORDS['rad'][0]).units.origin}") + plt.ylabel(f"{COORDS['rad'][1]} / " + f"{cube.coord(COORDS['rad'][1]).units.origin}") + else: + raise ValueError(f"Cube dimension {feedback_cube.ndim} not supported") + + # Appearance + title = f'{SHORTER_NAMES.get(var, var)} for {dataset_name}' + filename = ('{}_vs_{}_{}'.format(VAR_NAMES.get(var, var), + '-'.join(COORDS['rad']), dataset_name)) + if description is not None: + title += f' ({description})' + filename += f"_{description.replace(' ', '_')}" + plt.title(title) + plot_path = get_plot_filename(filename, cfg) + savefig_kwargs = dict(bbox_inches='tight', orientation='landscape') + plt.savefig(plot_path, **savefig_kwargs) + logger.info("Wrote %s", plot_path) + plt.close() + + return (plot_path, feedback_cube) + + +def _create_regression_file(tas_cube, + cube, + dataset_name, + cfg, + description=None): + """Save regression plot as netcdf file for a given dataset.""" + var = cube.var_name + reg = stats.linregress(tas_cube.data, cube.data) + filename = f'{var}_regression_{dataset_name}' + attrs = { + 'dataset': dataset_name, + 'regression_r_value': reg.rvalue, + 'regression_slope': reg.slope, + 'regression_interception': reg.intercept, + 'feedback_parameter': reg.slope, + } + attrs.update(cfg.get('output_attributes', {})) + if description is not None: + attrs['description'] = description + filename += f"_{description.replace(' ', '_')}" + if var in ('rtmt', 'rtnt'): + attrs['ECS'] = -reg.intercept / (2.0 * reg.slope) + tas_coord = iris.coords.AuxCoord( + tas_cube.data, + **extract_variables(cfg, as_iris=True)['tas']) + cube = iris.cube.Cube(cube.data, + attributes=attrs, + aux_coords_and_dims=[(tas_coord, 0)], + **extract_variables(cfg, as_iris=True)[var]) + netcdf_path = get_diagnostic_filename(filename, cfg) + io.iris_save(cube, netcdf_path) + return netcdf_path + + +def _create_regression_plot(tas_cube, + cube, + dataset_name, + cfg, + description=None): + """Create regression plot.""" + var = cube.var_name + logger.debug("Plotting '%s' vs. 'tas' for '%s'", var, dataset_name) + reg = stats.linregress(tas_cube.data, cube.data) + + # Regression line + x_reg = np.linspace(-1.0, 9.0, 2) + y_reg = reg.slope * x_reg + reg.intercept + + # Plot data + title = (f'{FEEDBACK_PARAMETERS.get(var, var)} TOA radiance for ' + f'{dataset_name}') + filename = f'{var}_regression_{dataset_name}' + if description is not None: + title += f' ({description})' + filename += f"_{description.replace(' ', '_')}" + plot_path = get_plot_filename(filename, cfg) + text = r'r = {:.2f}, {} = {:.2f}'.format(reg.rvalue, + NICE_SYMBOLS.get(var, var), + reg.slope) + if var in ('rtmt', 'rtnt'): + text += ', F = {:.2f}, ECS = {:.2f}'.format( + reg.intercept, -reg.intercept / (2.0 * reg.slope)) + plot.scatterplot( + [tas_cube.data, x_reg], + [cube.data, y_reg], + plot_path, + plot_kwargs=[{ + 'linestyle': 'none', + 'markeredgecolor': 'b', + 'markerfacecolor': 'none', + 'marker': 's', + }, { + 'color': 'k', + 'linestyle': '-', + }], + save_kwargs={ + 'bbox_inches': 'tight', + 'orientation': 'landscape', + }, + axes_functions={ + 'axhline': { + 'args': [0.0], + 'kwargs': { + 'color': 'black', + 'linestyle': '--', + }, + }, + 'set_title': title, + 'set_xlabel': + f"tas / {NICE_UNITS.get(tas_cube.units.origin, 'K')}", + 'set_ylabel': + f"{var} / {NICE_UNITS.get(cube.units.origin, 'unknown')}", + 'set_xlim': [0.0, 8.0], + 'text': { + 'args': [0.05, 0.9, text], + 'kwargs': { + 'transform': 'transAxes', + }, + }, + }, + ) + return (plot_path, reg) + + +def _create_table(table, cfg, description=None): + """Create summary table containing all climate feedback parameters.""" + logger.debug("Creating summary table") + (cell_data, row_labels, col_labels, col_units) = _dict_to_array(table) + + # Create netcdf file + cubes = _get_cube_list_for_table(cell_data, row_labels, col_labels, + col_units) + filename = 'summary_table' + if description is not None: + filename += f"_{description.replace(' ', '_')}" + netcdf_path = get_diagnostic_filename(filename, cfg) + for cube in cubes: + cube.attributes.update(cfg.get('output_attributes', {})) + io.iris_save(cubes, netcdf_path) + + # Create plot + cell_text = np.vectorize('{:.2f}'.format)(cell_data) + col_labels = [f"{NICE_SYMBOLS.get(l, l)} / " + f"{NICE_UNITS.get(col_units[i], 'unknown')}" + for (i, l) in enumerate(col_labels)] + (_, axes) = plt.subplots() + axes.axis('off') + table = axes.table( + cellText=cell_text, + rowLabels=row_labels, + colLabels=col_labels, + loc='center', + fontsize=8.0, + ) + table.scale(1.7, 1.7) + + # Save plot + plot_path = os.path.join(cfg['plot_dir'], filename + '.pdf') + plt.savefig(plot_path, bbox_inches='tight', orientation='landscape') + logger.info("Wrote %s", plot_path) + plt.close() + + # Provenance + caption = ( + 'Forcing, Feedback and Equilibrium Climate Sensitivity (ECS) values. ' + 'SW = short wave, LW = long wave, cs = clear sky, CRE = cloud ' + 'radiative effect (similar to Andrews et al., Geophys. Res. Lett., ' + '39, 2012).') + _write_provenance( + netcdf_path, + plot_path, + caption, + sorted([d['filename'] for d in cfg['input_data'].values()]), + cfg, + ) + + +def _dict_to_array(dict_): + """Convert (2D) dictionary to table.""" + row_labels = list(dict_.keys()) + + # Columns (variables) + all_cols = set() + for row in dict_.values(): + all_cols |= set(row.keys()) + col_labels = list(all_cols) + col_labels.sort() + + # Data + cell_data = np.array( + [[dict_.get(row, {}).get(col, np.nan) for col in col_labels] + for row in row_labels]) + col_units = [UNITS.get(var, 'unknown') for var in col_labels] + return (cell_data, row_labels, col_labels, col_units) + + +def _get_anomaly_data(input_data, year_idx=None): + """Calculate anomaly data for all variables.""" + logger.info("Calculating anomaly data") + project = input_data[0]['project'] + new_input_data = [] + for (var, var_data) in group_metadata(input_data, 'short_name').items(): + grouped_data = group_metadata(var_data, 'dataset') + for (dataset_name, datasets) in grouped_data.items(): + logger.debug("Calculating '%s' anomaly for dataset '%s'", var, + dataset_name) + data_4x = select_metadata(datasets, exp=EXP_4XCO2[project]) + data_pic = select_metadata(datasets, exp='piControl') + + # Check if all experiments are available + if not data_4x: + raise ValueError( + f"No '{EXP_4XCO2[project]}' data available for '{var}' of " + f"'{dataset_name}'") + if not data_pic: + raise ValueError( + f"No 'piControl' data available for '{var}' of " + f"'{dataset_name}'") + + # Calculate anomaly, extract correct years and save it + cube = calculate_anomaly(data_4x, data_pic) + _check_cube_dimensions(cube) + cube = cube[year_idx] + new_input_data.append({ + **data_4x[0], + 'ancestors': [data_4x[0]['filename'], data_pic[0]['filename']], + 'cube': + cube, + }) + msg = '' if not COORDS else f" with additional coordinates {COORDS['rad']}" + logger.info("Found %iD 'tas' data and %iD radiation data%s", + NDIMS.get('tas'), NDIMS.get('rad'), msg) + return new_input_data + + +def _get_cube_list_for_table(cell_data, row_labels, col_labels, col_units): + """Create :class:`iris.cube.CubeList` representing a table.""" + aux_coord = iris.coords.AuxCoord(row_labels, long_name='dataset') + cubes = iris.cube.CubeList() + for (idx, label) in enumerate(col_labels): + if label in ('ECS', 'F', 'rtnt') and RTMT_DATASETS: + rtmt_datasets = sorted(list(RTMT_DATASETS)) + attrs = {'net_toa_radiation': RTMT_TEXT.format(rtmt_datasets)} + else: + attrs = {} + cube = iris.cube.Cube( + np.ma.masked_invalid(cell_data[:, idx]), + var_name=VAR_NAMES.get(label, label), + long_name=LONG_NAMES.get(label, label), + units=col_units[idx], + aux_coords_and_dims=[(aux_coord, 0)], + attributes=attrs, + ) + cubes.append(cube) + return cubes + + +def _get_data_time_last(cube): + """Get data of :class:`iris.cube.Cube` with time axis as last dimension.""" + return np.moveaxis(cube.data, cube.coord_dims('time')[0], -1) + + +@partial(np.vectorize, excluded=['x_arr'], signature='(n),(n)->()') +def _get_intercept(x_arr, y_arr): + """Get intercept of linear regression of two (masked) arrays.""" + if np.ma.is_masked(y_arr): + x_arr = x_arr[~y_arr.mask] + y_arr = y_arr[~y_arr.mask] + if len(y_arr) < 2: + return np.nan + reg = stats.linregress(x_arr, y_arr) + return reg.intercept + + +def _get_levels(): + """Get symmetric levels for contour plot. + + This function might be changed to consider cube in the future. + + """ + n_levels_per_sign = 50 + val_max = 8.0 + val_min = -8.0 + max_range = max([val_max, -val_min]) + range_ = np.linspace(0.0, max_range, n_levels_per_sign + 1)[1:] + levels = list(-range_[::-1]) + [0.0] + list(range_) + return levels + + +def _get_mmm_rad(rad_var, rad_datasets): + """Get multi-model mean for radiation data.""" + logger.debug("Calculating multi-model mean for variable '%s'", rad_var) + ancestors = [] + dataset_names = [] + mmm = [] + for dataset in rad_datasets: + cube = dataset['cube'] + ancestors.extend(dataset['ancestors']) + dataset_names.append(dataset['dataset']) + mmm.append(cube.data) + _check_array_shapes(mmm, rad_var) + mmm = np.ma.array(mmm) + mmm_cube = cube.copy(data=np.ma.mean(mmm, axis=0)) + attributes = { + 'ancestors': ancestors, + 'dataset': 'MultiModelMean', + 'datasets': '|'.join(dataset_names), + 'project': rad_datasets[0]['project'], + 'short_name': rad_var, + } + mmm_cube.attributes = attributes + return {**attributes, 'cube': mmm_cube} + + +def _get_mmm_tas(rad_var, rad_datasets, tas_datasets): + """Get multi-model mean for tas data.""" + logger.debug( + "Calculating multi-model mean 'tas' for radiation variable '%s'", + rad_var) + ancestors = [] + dataset_names = [] + mmm = [] + for dataset_name in [d['dataset'] for d in rad_datasets]: + tas_data = select_metadata(tas_datasets, dataset=dataset_name) + if not tas_data: + raise ValueError( + f"No 'tas' data for dataset '{dataset_name}' available for " + f"multi-model mean calculation") + cube = tas_data[0]['cube'] + ancestors.extend(tas_data[0]['ancestors']) + dataset_names.append(dataset_name) + mmm.append(cube.data) + _check_array_shapes(mmm, 'tas') + mmm = np.ma.array(mmm) + mmm_cube = cube.copy(data=np.ma.mean(mmm, axis=0)) + attributes = { + 'ancestors': ancestors, + 'dataset': 'MultiModelMean', + 'datasets': '|'.join(dataset_names), + 'project': rad_datasets[0]['project'], + 'short_name': _get_tas_var('MultiModelMean', rad_var), + } + mmm_cube.attributes = attributes + return {**attributes, 'cube': mmm_cube} + + +def _get_multi_model_mean(input_data): + """Get multi-model mean for all variables.""" + logger.info("Calculating multi-model means") + mmm_data = [] + tas_data = select_metadata(input_data, short_name='tas') + for (var, datasets) in group_metadata(input_data, 'short_name').items(): + if var == 'tas': + continue + mmm_rad = _get_mmm_rad(var, datasets) + mmm_tas = _get_mmm_tas(var, datasets, tas_data) + mmm_data.append(mmm_rad) + mmm_data.append(mmm_tas) + input_data.extend(mmm_data) + return input_data + + +def _get_provenance_record(caption): + """Create a provenance record.""" + record = { + 'caption': caption, + 'statistics': ['mean', 'diff'], + 'domains': ['global'], + 'authors': ['schlund_manuel'], + 'references': ['andrews12grl', 'gregory04grl'], + 'realms': ['atmos'], + 'themes': ['phys'], + } + return record + + +@partial(np.vectorize, excluded=['x_arr'], signature='(n),(n)->()') +def _get_slope(x_arr, y_arr): + """Get slope of linear regression of two (masked) arrays.""" + if np.ma.is_masked(y_arr): + x_arr = x_arr[~y_arr.mask] + y_arr = y_arr[~y_arr.mask] + if len(y_arr) < 2: + return np.nan + reg = stats.linregress(x_arr, y_arr) + return reg.slope + + +def _get_tas_var(dataset_name, rad_var): + """Get correct tas data for a certain radiation variable.""" + if dataset_name == 'MultiModelMean': + return f'tas_{rad_var}' + return 'tas' + + +def _vectorized_linregress(x_arr, y_arr): + """Vectorized version if :func:`scipy.stats.linregress`.""" + slope = np.vectorize(lambda x, y: stats.linregress(x, y).slope, + signature='(n),(n)->()') + intercept = np.vectorize(lambda x, y: stats.linregress(x, y).intercept, + signature='(n),(n)->()') + return (slope(x_arr, y_arr), intercept(x_arr, y_arr)) + + +def _write_scalar_data(data, ancestor_files, cfg, description=None): + """Write scalar data for multiple datasets.""" + var_attrs = [ + { + 'short_name': 'ecs', + 'long_name': 'Equilibrium Climate Sensitivity (Gregory method)', + 'units': cf_units.Unit('K'), + }, + { + 'short_name': 'lambda', + 'long_name': 'Climate Feedback Parameter', + 'units': cf_units.Unit('W m-2 K-1'), + }, + ] + global_attrs = {'project': list(cfg['input_data'].values())[0]['project']} + if RTMT_DATASETS: + rtmt_datasets = sorted(list(RTMT_DATASETS)) + global_attrs['net_toa_radiation'] = RTMT_TEXT.format(rtmt_datasets) + for (idx, var_attr) in enumerate(var_attrs): + caption = '{long_name} for multiple climate models'.format(**var_attr) + if description is not None: + filename = '{}_{}'.format(var_attr['short_name'], + description.replace(' ', '_')) + attributes = {'Description': description} + caption += f' for {description}.' + else: + filename = var_attr['short_name'] + attributes = {} + caption += '.' + attributes.update(global_attrs) + path = get_diagnostic_filename(filename, cfg) + if not data[idx]: + raise ValueError(f"Cannot write file {path}, no data for variable " + f"'{var_attr['short_name']}' given") + + # Scalar data + if NDIMS['rad'] == 1: + io.save_scalar_data({d: data[idx][d].data + for d in data[idx]}, + path, + var_attr, + attributes=attributes) + + # 1D data + elif NDIMS['rad'] == 2: + io.save_1d_data(data[idx], + path, + COORDS['rad'][0], + var_attr, + attributes=attributes) + + # Higher dimensions + else: + logger.info( + "Writing netcdf summary file including ECS and feedback " + "parameters for all datasets is not supported for %iD data " + "yet", NDIMS['rad']) + return + + # Provenance + provenance_record = _get_provenance_record(caption) + provenance_record['ancestors'] = ancestor_files + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + + +def _write_provenance(netcdf_path, plot_path, caption, ancestors, cfg, + **kwargs): + """Write provenance information for a single dataset cube.""" + provenance_record = _get_provenance_record(caption) + provenance_record.update({ + 'ancestors': ancestors, + **kwargs, + }) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + provenance_logger.log(plot_path, provenance_record) + + +def calculate_ecs(input_data, cfg, description=None): + """Calculate ECS and net climate feedback parameters.""" + logger.info("Calculating ECS and net climate feedback parameter") + msg = '' if description is None else f' for {description}' + ancestors = [] + ecs = {} + feedback_parameter = {} + + # Iterate over all datasets and save ECS and feedback parameters + for dataset in select_metadata(input_data, short_name='tas'): + dataset_name = dataset['dataset'] + logger.debug("Calculating ECS%s of dataset '%s'", msg, dataset_name) + rtnt_data = select_metadata(input_data, + short_name='rtnt', + dataset=dataset_name) + if not rtnt_data: + logger.debug( + "No 'rtmt' or 'rtnt' data for '%s' available, skipping ECS " + "calculation for it", dataset_name) + continue + tas_cube = dataset['cube'] + rtnt_cube = rtnt_data[0]['cube'] + if rtnt_cube.ndim > 2: + raise ValueError( + f"Calculating ECS is only supported for cubes with less than " + f"3 dimensions, got {rtnt_cube.ndim:d}D cube") + ancestors.extend(dataset['ancestors'] + rtnt_data[0]['ancestors']) + coords = [(coord, idx - 1) + for (idx, + coord) in enumerate(rtnt_cube.coords(dim_coords=True)) + if coord.name() != 'time'] + + # Calculate ECS (using linear regression) + reg = _vectorized_linregress(_get_data_time_last(tas_cube), + _get_data_time_last(rtnt_cube)) + ecs[dataset_name] = iris.cube.Cube(-reg[1] / (2 * reg[0]), + dim_coords_and_dims=coords) + feedback_parameter[dataset_name] = iris.cube.Cube( + reg[0], dim_coords_and_dims=coords) + ancestors = list(set(ancestors)) + if not ecs: + logger.info( + "No 'rtmt' or 'rtnt' data available, skipping ECS calculation") + return + + # Write data + _write_scalar_data([ecs, feedback_parameter], ancestors, cfg, description) + + +def check_input_data(cfg): + """Check input data.""" + if not variables_available(cfg, ['tas']): + raise ValueError("This diagnostic needs the variable 'tas'") + input_data = cfg['input_data'].values() + project_group = group_metadata(input_data, 'project') + projects = list(project_group.keys()) + if len(projects) > 1: + raise ValueError( + f"This diagnostic supports only unique 'project' attributes, got " + f"{projects}") + project = projects[0] + if project not in EXP_4XCO2: + raise ValueError(f"Project '{project}' not supported yet") + exp_group = group_metadata(input_data, 'exp') + exps = set(exp_group.keys()) + if exps != {'piControl', EXP_4XCO2[project]}: + raise ValueError( + f"This diagnostic needs 'piControl' and '{EXP_4XCO2[project]}' " + f"experiments, got {exps}") + + +def plot_feedback_parameters(input_data, cfg, description=None): + """Plot feedback parameters vs. remaining dimension(s).""" + # Iterate over radiation quantities (y axis) + for (var, datasets) in group_metadata(input_data, 'short_name').items(): + if 'tas' in var: + continue + logger.info("Creating feedback parameter plots for variable '%s'", var) + + # Iterate over all available datasets + for dataset in datasets: + dataset_name = dataset['dataset'] + tas_data = select_metadata(input_data, + short_name=_get_tas_var( + dataset_name, var), + dataset=dataset_name) + if not tas_data: + raise ValueError( + f"No 'tas' data for '{dataset_name}' available") + cube = dataset['cube'] + tas_cube = tas_data[0]['cube'] + if cube.ndim not in (2, 3): + raise ValueError( + f"Feedback plots are not supported for {cube.ndim:d}D " + f"input data, this requires 2D or 3D data") + + # Create plot + (plot_path, + feedback_cube) = _create_feedback_plot(tas_cube, + cube, + dataset_name, + cfg, + description=description) + (netcdf_path, + caption) = _create_feedback_file(feedback_cube, + dataset_name, + cfg, + description=description) + + # Provenance + if 'latitude' in COORDS['rad'] and 'longitude' in COORDS['rad']: + plot_types = ['geo'] + elif 'latitude' in COORDS['rad']: + plot_types = ['zonal'] + else: + plot_types = ['other'] + _write_provenance( + netcdf_path, + plot_path, + caption, + dataset['ancestors'] + tas_data[0]['ancestors'], + cfg, + plot_types=plot_types, + ) + + +def plot_regressions(input_data, cfg, description=None): + """Plot linear regressions used to calculate feedback parameters.""" + table = OrderedDict() + + # Iterate over radiation quantities (y axis) + for (var, datasets) in group_metadata(input_data, 'short_name').items(): + if 'tas' in var: + continue + logger.info("Creating regression plots for variable '%s'", var) + + # Iterate over all available datasets + for dataset in datasets: + dataset_name = dataset['dataset'] + table.setdefault(dataset_name, {}) + tas_data = select_metadata(input_data, + short_name=_get_tas_var( + dataset_name, var), + dataset=dataset_name) + if not tas_data: + raise ValueError( + f"No 'tas' data for '{dataset_name}' available") + tas_cube = tas_data[0]['cube'] + if dataset['cube'].ndim > 1: + raise ValueError( + "Regression plots are not supported for input data with " + "more than one dimension (which should be time)") + + # Save plot and netcdf file + (plot_path, reg) = _create_regression_plot(tas_cube, + dataset['cube'], + dataset_name, + cfg, + description=description) + netcdf_path = _create_regression_file(tas_cube, + dataset['cube'], + dataset_name, + cfg, + description=description) + + # Expand table + table[dataset_name][var] = reg.slope + if var == 'rtnt': + table[dataset_name]['ECS'] = (-reg.intercept / 2.0 / reg.slope) + table[dataset_name]['F'] = reg.intercept + + # Provenance + caption = ( + 'Scatterplot between {} TOA radiance and global mean surface ' + 'temperature anomaly{} of the abrupt 4x CO2 experiment ' + 'including linear regression for {} (following Andrews et ' + 'al., Geophys. Res. Lett., 39, 2012).'.format( + FEEDBACK_PARAMETERS.get(var, var), + '' if description is None else f' for {description}', + dataset_name)) + _write_provenance(netcdf_path, + plot_path, + caption, + dataset['ancestors'] + tas_data[0]['ancestors'], + cfg, + plot_types=['scatter']) + + # Create summary table + _create_table(table, cfg, description=description) + + +def preprocess_data(cfg, year_idx=None): + """Calculate anomalies and multi-model mean.""" + input_data = deepcopy(list(cfg['input_data'].values())) + input_data = sorted_metadata(input_data, ['short_name', 'exp', 'dataset']) + + # Use 'rtmt' instead of 'rtmt' if necessary + for dataset in input_data: + if dataset['short_name'] == 'rtmt': + RTMT_DATASETS.add(dataset['dataset']) + dataset['short_name'] = 'rtnt' + if RTMT_DATASETS: + logger.info("Using 'rtmt' instead of 'rtnt' for datasets '%s'", + RTMT_DATASETS) + + # Calculate anomalies for every dataset + input_data = _get_anomaly_data(input_data, year_idx) + + # Calculate multi-model mean + if cfg['calculate_mmm']: + input_data = _get_multi_model_mean(input_data) + + # Remove other datasets if desired + if cfg['only_consider_mmm']: + logger.info("Removing all datasets except for 'MultiModelMean'") + input_data = [d for d in input_data if d['dataset'] == + 'MultiModelMean'] + + return input_data + + +def set_default_cfg(cfg): + """Set default values for cfg.""" + cfg = deepcopy(cfg) + cfg.setdefault('calculate_mmm', True) + cfg.setdefault('only_consider_mmm', False) + cfg.setdefault('seaborn_settings', {}) + if cfg['only_consider_mmm'] and not cfg['calculate_mmm']: + logger.warning("Automatically setting 'calculate_mmm' to 'True' since " + "'only_consider_mmm' is set to 'True'") + cfg['calculate_mmm'] = True + return cfg + + +def main(cfg): + """Run the diagnostic.""" + cfg = set_default_cfg(cfg) + sns.set_theme(cfg['seaborn_settings']) + check_input_data(cfg) + year_indices = { + 'all 150 years': slice(None), + 'first 20 years': slice(None, 20), + 'last 130 years': slice(20, None), + } + for (descr, year_idx) in year_indices.items(): + logger.info("Considering %s for all datasets", descr) + input_data = preprocess_data(cfg, year_idx) + + # Calculate and save ECS + if NDIMS['rad'] < 3: + calculate_ecs(input_data, cfg, description=descr) + else: + logger.info("No ECS calculation for %iD data available", + NDIMS['rad']) + + # Plots + if NDIMS['rad'] == 1: + plot_regressions(input_data, cfg, description=descr) + elif NDIMS['rad'] in (2, 3): + plot_feedback_parameters(input_data, cfg, description=descr) + else: + logger.info("No plots for %iD data available", NDIMS['rad']) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/climate_metrics/psi.py b/esmvaltool/diag_scripts/climate_metrics/psi.py index 04238f9496..05f06e9614 100644 --- a/esmvaltool/diag_scripts/climate_metrics/psi.py +++ b/esmvaltool/diag_scripts/climate_metrics/psi.py @@ -17,10 +17,12 @@ Configuration options in recipe ------------------------------- -window_length : int, optional (default: 55) - Number of years used for the moving window average. lag : int, optional (default: 1) Lag (in years) for the autocorrelation function. +output_attributes : dict, optional + Write additional attributes to netcdf files. +window_length : int, optional (default: 55) + Number of years used for the moving window average. """ @@ -29,12 +31,19 @@ import cf_units import iris +import iris.coord_categorisation import numpy as np from scipy import stats from esmvaltool.diag_scripts.shared import ( - ProvenanceLogger, get_diagnostic_filename, group_metadata, io, - run_diagnostic, select_metadata) + ProvenanceLogger, + get_diagnostic_filename, + group_metadata, + io, + run_diagnostic, + select_metadata, + sorted_metadata, +) logger = logging.getLogger(os.path.basename(__file__)) @@ -65,18 +74,19 @@ def calculate_psi(cube, cfg): psis.append(np.std(tas) / np.sqrt(-np.log(autocorr))) # Return new cube - year_coord = iris.coords.DimCoord( - np.array(psi_years), - var_name='year', - long_name='year', - units=cf_units.Unit('year')) + year_coord = iris.coords.DimCoord(np.array(psi_years), + var_name='year', + long_name='year', + units=cf_units.Unit('year')) psi_cube = iris.cube.Cube( np.array(psis), dim_coords_and_dims=[(year_coord, 0)], attributes={ 'window_length': window_length, - 'lag': lag - }) + 'lag': lag, + **cfg.get('output_attributes', {}), + }, + ) return psi_cube @@ -86,7 +96,7 @@ def get_provenance_record(caption, ancestor_files): 'caption': caption, 'statistics': ['var', 'diff', 'corr', 'detrend'], 'domains': ['global'], - 'authors': ['schl_ma'], + 'authors': ['schlund_manuel'], 'references': ['cox18nature'], 'realms': ['atmos'], 'themes': ['phys'], @@ -95,11 +105,30 @@ def get_provenance_record(caption, ancestor_files): return record +def get_attributes(cfg, single_psi_cube, input_data): + """Get attributes for psi cube for all datasets.""" + datasets = sorted(list({str(d['dataset']) for d in input_data})) + projects = sorted(list({str(d['project']) for d in input_data})) + ref = sorted(list({str(d.get('reference_dataset')) for d in input_data})) + datasets = "|".join(datasets) + projects = "|".join(projects) + ref = "|".join(ref) + attrs = single_psi_cube.attributes + attrs.update({ + 'dataset': datasets, + 'project': projects, + 'reference_dataset': ref, + }) + attrs.update(cfg.get('output_attributes', {})) + return attrs + + def main(cfg): """Run the diagnostic.""" input_data = ( select_metadata(cfg['input_data'].values(), short_name='tas') + select_metadata(cfg['input_data'].values(), short_name='tasa')) + input_data = sorted_metadata(input_data, ['short_name', 'exp', 'dataset']) if not input_data: raise ValueError("This diagnostics needs 'tas' or 'tasa' variable") @@ -114,6 +143,7 @@ def main(cfg): for (dataset, [data]) in grouped_data.items(): logger.info("Processing %s", dataset) cube = iris.load_cube(data['filename']) + iris.coord_categorisation.add_year(cube, 'time') cube = cube.aggregated_by('year', iris.analysis.MEAN) psi_cube = calculate_psi(cube, cfg) data.update(psi_attrs) @@ -137,11 +167,11 @@ def main(cfg): # Save averaged psis for every dataset in one file out_path = get_diagnostic_filename('psi', cfg) - io.save_scalar_data( - psis, out_path, psi_attrs, attributes=psi_cube.attributes) + attrs = get_attributes(cfg, psi_cube, input_data) + io.save_scalar_data(psis, out_path, psi_attrs, attributes=attrs) # Provenance - caption = "{long_name} for mutliple climate models.".format(**psi_attrs) + caption = "{long_name} for multiple climate models.".format(**psi_attrs) ancestor_files = [d['filename'] for d in input_data] provenance_record = get_provenance_record(caption, ancestor_files) with ProvenanceLogger(cfg) as provenance_logger: diff --git a/esmvaltool/diag_scripts/climate_metrics/tcr.py b/esmvaltool/diag_scripts/climate_metrics/tcr.py new file mode 100644 index 0000000000..c227a6fa6e --- /dev/null +++ b/esmvaltool/diag_scripts/climate_metrics/tcr.py @@ -0,0 +1,370 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Diagnostic script to calculate Transient Climate Response (TCR). + +Description +----------- +Calculate the transient climate response (see e.g. Gregory and Forster, 2008). + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +calculate_mmm : bool, optional (default: True) + Calculate multi-model mean TCR. +plot : bool, optional (default: True) + Plot temperature vs. time. +read_external_file : str, optional + Read TCR from external file. The path can be given relative to this + diagnostic script or as absolute path. +savefig_kwargs : dict, optional + Keyword arguments for :func:`matplotlib.pyplot.savefig`. +seaborn_settings : dict, optional + Options for :func:`seaborn.set_theme` (affects all plots). + +""" + +import logging +import os +from copy import deepcopy +from pprint import pformat + +import cf_units +import iris +import iris.coord_categorisation +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns +import yaml +from scipy import stats + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + group_metadata, + io, + run_diagnostic, + select_metadata, + sorted_metadata, + variables_available, +) + +logger = logging.getLogger(os.path.basename(__file__)) + +START_YEAR_IDX = 60 +END_YEAR_IDX = 80 + + +def _get_anomaly_cube(onepct_cube, pi_cube): + """Get anomaly cube.""" + iris.coord_categorisation.add_year(onepct_cube, 'time') + onepct_cube = onepct_cube.aggregated_by('year', iris.analysis.MEAN) + + iris.coord_categorisation.add_year(pi_cube, 'time') + pi_cube = pi_cube.aggregated_by('year', iris.analysis.MEAN) + + # Check cube + if onepct_cube.ndim != 1: + raise ValueError( + f"This diagnostics needs 1D cubes, got {onepct_cube.ndim:d}D cube " + f"for '1pctCO2' experiment") + if pi_cube.ndim != 1: + raise ValueError( + f"This diagnostics needs 1D cubes, got {pi_cube.ndim:d}D cube for " + f"'piControl' experiment") + if onepct_cube.shape != pi_cube.shape: + raise ValueError( + f"Cube shapes of '1pctCO2' and 'piControl' are not identical, got " + f"{onepct_cube.shape} and {pi_cube.shape}") + if onepct_cube.shape[0] < END_YEAR_IDX: + raise ValueError( + f"Cubes need at least {END_YEAR_IDX:d} points for TCR " + f"calculation, got only {onepct_cube.shape[0]:d}") + + # Calculate anomaly + reg = stats.linregress(pi_cube.coord('year').points, pi_cube.data) + onepct_cube.data -= (reg.slope * pi_cube.coord('year').points + + reg.intercept) + + # Adapt metadata + onepct_cube.standard_name = None + onepct_cube.var_name += '_anomaly' + onepct_cube.long_name += ' (Anomaly)' + onepct_cube.attributes['anomaly'] = ('relative to linear fit of piControl ' + 'run') + onepct_cube.convert_units('K') + return onepct_cube + + +def _get_anomaly_cubes(cfg): + """Get all anomaly cubes.""" + logger.info("Calculating anomalies") + cubes = {} + ancestors = {} + input_data = cfg['input_data'].values() + input_data = sorted_metadata(input_data, ['short_name', 'exp', 'dataset']) + onepct_data = select_metadata(input_data, short_name='tas', exp='1pctCO2') + + # Process data + for dataset in onepct_data: + dataset_name = dataset['dataset'] + pi_data = select_metadata(input_data, + short_name='tas', + exp='piControl', + dataset=dataset_name) + if not pi_data: + raise ValueError("No 'piControl' data available for dataset " + "'dataset_name'") + onepct_cube = iris.load_cube(dataset['filename']) + pi_cube = iris.load_cube(pi_data[0]['filename']) + anomaly_cube = _get_anomaly_cube(onepct_cube, pi_cube) + cubes[dataset_name] = anomaly_cube + ancestors[dataset_name] = [dataset['filename'], pi_data[0]['filename']] + + # Calculate multi-model mean if desired + if cfg.get('calculate_mmm', True): + (mmm_cube, mmm_ancestors) = _get_mmm_anomaly(cubes, ancestors, cfg) + cubes['MultiModelMean'] = mmm_cube + ancestors['MultiModelMean'] = mmm_ancestors + + return (cubes, ancestors) + + +def _get_mmm_anomaly(cubes, ancestors, cfg): + """Get multi-model mean anomaly.""" + logger.info("Calculating multi-model mean anomaly") + mmm_ancestors = [f for sublist in ancestors.values() for f in sublist] + project = list(cfg['input_data'].values())[0]['project'] + datasets = [] + mmm_anomaly = [] + for (dataset_name, cube) in cubes.items(): + datasets.append(dataset_name) + mmm_anomaly.append(cube.data) + mmm_anomaly = np.ma.array(mmm_anomaly) + dataset_0 = list(cubes.keys())[0] + mmm_cube = cubes[dataset_0].copy(data=np.ma.mean(mmm_anomaly, axis=0)) + mmm_cube.attributes = { + 'ancestors': mmm_ancestors, + 'dataset': 'MultiModelMean', + 'datasets': '|'.join(datasets), + 'project': project, + 'short_name': mmm_cube.var_name, + } + time_coord = iris.coords.DimCoord( + np.arange(mmm_cube.coord('time').shape[0]), + var_name='time', + standard_name='time', + long_name='time', + units='years', + ) + mmm_cube.remove_coord('time') + mmm_cube.add_dim_coord(time_coord, 0) + return (mmm_cube, mmm_ancestors) + + +def _plot(cfg, cube, dataset_name, tcr): + """Create scatterplot of temperature anomaly vs. time.""" + if not cfg.get('plot', True): + return (None, None, None) + logger.debug("Plotting temperature anomaly vs. time for '%s'", + dataset_name) + (_, axes) = plt.subplots() + + # Plot data + x_data = np.arange(cube.shape[0]) + y_data = cube.data + axes.scatter(x_data, y_data, color='b', marker='o') + + # Plot lines + line_kwargs = {'color': 'k', 'linewidth': 1.0, 'linestyle': '--'} + axes.axhline(tcr, **line_kwargs) + axes.axvline(START_YEAR_IDX, **line_kwargs) + axes.axvline(END_YEAR_IDX, **line_kwargs) + + # Appearance + units_str = (cube.units.symbol + if cube.units.origin is None else cube.units.origin) + axes.set_title(dataset_name) + axes.set_xlabel('Years after experiment start') + axes.set_ylabel(f'Temperature anomaly / {units_str}') + axes.set_ylim([x_data[0] - 1, x_data[-1] + 1]) + axes.set_ylim([-1.0, 7.0]) + axes.text(0.0, tcr + 0.1, 'TCR = {:.1f} {}'.format(tcr, units_str)) + + # Save cube + netcdf_path = get_diagnostic_filename(dataset_name, cfg) + io.iris_save(cube, netcdf_path) + + # Save plot + plot_path = get_plot_filename(dataset_name, cfg) + plt.savefig(plot_path, **cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Provenance + provenance_record = get_provenance_record( + f"Time series of the global mean surface air temperature anomaly " + f"(relative to the linear fit of the pre-industrial control run) of " + f"{dataset_name} for the 1% CO2 increase per year experiment. The " + f"horizontal dashed line indicates the transient climate response " + f"(TCR) defined as the 20 year average temperature anomaly centered " + f"at the time of CO2 doubling (vertical dashed lines).") + provenance_record.update({ + 'plot_types': ['times'], + }) + + return (netcdf_path, plot_path, provenance_record) + + +def calculate_tcr(cfg): + """Calculate transient climate response (TCR).""" + tcr = {} + + # Get anomaly cubes + (anomaly_cubes, ancestors) = _get_anomaly_cubes(cfg) + + # Iterate over cubes and calculate TCR + for (dataset_name, anomaly_cube) in anomaly_cubes.items(): + tas_2x = anomaly_cube[START_YEAR_IDX:END_YEAR_IDX].collapsed( + 'time', iris.analysis.MEAN).data + new_tcr = tas_2x + tcr[dataset_name] = new_tcr + logger.info("TCR (%s) = %.2f %s", dataset_name, new_tcr, + anomaly_cube.units) + + # Plot + (path, plot_path, provenance_record) = _plot(cfg, anomaly_cube, + dataset_name, new_tcr) + if path is not None: + provenance_record['ancestors'] = ancestors[dataset_name] + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + provenance_logger.log(plot_path, provenance_record) + + return tcr + + +def check_input_data(cfg): + """Check input data.""" + if not variables_available(cfg, ['tas']): + raise ValueError( + "This diagnostic needs variable 'tas' if 'read_external_file' is " + "not given") + input_data = cfg['input_data'].values() + project_group = group_metadata(input_data, 'project') + projects = list(project_group.keys()) + if len(projects) > 1: + raise ValueError( + f"This diagnostic supports only unique 'project' attributes, got " + f"{projects}") + exp_group = group_metadata(input_data, 'exp') + exps = set(exp_group.keys()) + if exps != {'piControl', '1pctCO2'}: + raise ValueError( + f"This diagnostic needs '1pctCO2' and 'piControl' experiment, got " + f"{exps}") + + +def get_provenance_record(caption): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'caption': caption, + 'statistics': ['mean', 'diff'], + 'domains': ['global'], + 'authors': ['schlund_manuel'], + 'references': ['gregory08jgr'], + 'realms': ['atmos'], + 'themes': ['phys'], + } + return record + + +def read_external_file(cfg): + """Read external file to get TCR.""" + filepath = os.path.expanduser(os.path.expandvars( + cfg['read_external_file'])) + if not os.path.isabs(filepath): + filepath = os.path.join(os.path.dirname(__file__), filepath) + if not os.path.isfile(filepath): + raise FileNotFoundError( + f"Desired external file '{filepath}' does not exist") + with open(filepath, 'r') as infile: + external_data = yaml.safe_load(infile) + tcr = external_data.get('tcr', {}) + logger.info("Reading external file '%s'", filepath) + logger.info("Found TCR (K):") + logger.info("%s", pformat(tcr)) + return (tcr, filepath) + + +def set_default_cfg(cfg): + """Set default values for cfg.""" + cfg = deepcopy(cfg) + cfg.setdefault('savefig_kwargs', { + 'dpi': 300, + 'orientation': 'landscape', + 'bbox_inches': 'tight', + }) + return cfg + + +def write_data(cfg, tcr, external_file=None): + """Write netcdf files.""" + var_attr = { + 'short_name': 'tcr', + 'long_name': 'Transient Climate Response (TCR)', + 'units': cf_units.Unit('K'), + } + path = get_diagnostic_filename(var_attr['short_name'], cfg) + project = list(cfg['input_data'].values())[0]['project'] + io.save_scalar_data(tcr, path, var_attr, attributes={'project': project}) + caption = "{long_name} for multiple climate models.".format(**var_attr) + provenance_record = get_provenance_record(caption) + ancestor_files = [] + for dataset_name in tcr.keys(): + datasets = select_metadata(cfg['input_data'].values(), + dataset=dataset_name) + ancestor_files.extend(sorted([d['filename'] for d in datasets])) + if external_file is not None: + ancestor_files.append(external_file) + provenance_record['ancestors'] = ancestor_files + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + + +def main(cfg): + """Run the diagnostic.""" + cfg = set_default_cfg(cfg) + sns.set_theme(**cfg.get('seaborn_settings', {})) + + # Read external file if desired + if cfg.get('read_external_file'): + (tcr, external_file) = read_external_file(cfg) + else: + check_input_data(cfg) + tcr = {} + external_file = None + + # Calculate TCR directly + new_tcr = calculate_tcr(cfg) + for dataset_name in new_tcr: + if dataset_name in tcr: + logger.warning( + "Overwriting externally given TCR from file '%s' for '%s'", + external_file, dataset_name) + tcr.update(new_tcr) + + # Write TCR + write_data(cfg, tcr) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/climate_patterns/climate_patterns.py b/esmvaltool/diag_scripts/climate_patterns/climate_patterns.py new file mode 100644 index 0000000000..bc265cda00 --- /dev/null +++ b/esmvaltool/diag_scripts/climate_patterns/climate_patterns.py @@ -0,0 +1,658 @@ +# (C) Crown Copyright 2022-2024, Met Office. +"""Diagnostic script to build climate patterns from CMIP6 models. + +Description +----------- +Builds patterns, anomaly and climatology cubes from CMIP6 models. +This diagnostic needs preprocessed mean monthly cubes, with no +gridding requirements. Default re-grid specification exists to +decrease CPU-load and run-time. + +Author +------ +Gregory Munday (Met Office, UK) + + +Configuration options in recipe +------------------------------- +jules_mode: bool, optional (default: false) + options: true, false + def: outputs extra data (anomaly, climatology) per variable + to drive JULES-IMOGEN configuration +parallelise: bool, optional (default: false) + options: true, false + def: parallelises code to run N models at once +area: str, optional (default: global) + options: global, land + def: area over which to calculate climate patterns +""" + +import logging +import os +from pathlib import Path + +import iris +import iris.coord_categorisation +import iris.cube +import numpy as np +import sklearn.linear_model +from esmvalcore.preprocessor import ( + area_statistics, + climate_statistics, + extract_time, +) + +import esmvaltool.diag_scripts.climate_patterns.sub_functions as sf +from esmvaltool.diag_scripts.climate_patterns.plotting import ( + plot_patterns, + plot_timeseries, +) +from esmvaltool.diag_scripts.shared import run_diagnostic + +logger = logging.getLogger(Path(__file__).stem) + + +def calculate_climatology(cube, syr=1850, eyr=1889): + """Handle aggregation to make climatology. + + Parameters + ---------- + cube : cube + cube loaded from config dictionary + syr : int + set climatology start year + eyr : int + set climatology end year + + Returns + ------- + cube_aggregated : cube + 40 year climatology cube from syr-eyr (default 1850-1889) + """ + cube_40yr = extract_time( + cube, + start_year=syr, + start_month=1, + start_day=1, + end_year=eyr, + end_month=12, + end_day=31 + ) + cube_aggregated = climate_statistics(cube_40yr, 'mean', 'month') + + return cube_aggregated + + +def diurnal_temp_range(cubelist): + """Calculate diurnal range from monthly max and min temperatures. + + Parameters + ---------- + cubelist : cubelist + cubelist of tasmin and tasmax + + Returns + ------- + range_cube : cube + cube of calculated diurnal range + """ + range_cube = cubelist[0] - cubelist[1] + + # check in case cubes are wrong way around + if np.mean(range_cube.data) < 0: + range_cube = -range_cube + + range_cube.rename("Diurnal Range") + range_cube.var_name = "range_tl1" + + return range_cube + + +def calculate_diurnal_range(clim_list, ts_list): + """Facilitate diurnal range calculation and appending. + + Parameters + ---------- + clim_list : cubelist + cubelist of climatology cubes + ts_list : cubelist + cubelist of standard timeseries cubes + + Returns + ------- + clim_list_final : cubelist + cubelist of climatology cubes including diurnal range + ts_list_final : cubelist + cubelist of standard timeseries cubes including diurnal range + """ + temp_range_list_clim = iris.cube.CubeList([]) + temp_range_list_ts = iris.cube.CubeList([]) + comb_list = [clim_list, ts_list] + + for cube_list in comb_list: + for cube in cube_list: + if (cube.var_name in ("tasmax", "tasmin")) and cube in clim_list: + temp_range_list_clim.append(cube) + elif (cube.var_name in ("tasmax", "tasmin")) and cube in ts_list: + temp_range_list_ts.append(cube) + else: + pass + + derived_diurnal_clim = diurnal_temp_range(temp_range_list_clim) + derived_diurnal_ts = diurnal_temp_range(temp_range_list_ts) + + # append diurnal range to lists + clim_list_final, ts_list_final = append_diurnal_range( + derived_diurnal_clim, derived_diurnal_ts, clim_list, ts_list + ) + + return clim_list_final, ts_list_final + + +def append_diurnal_range(derived_diurnal_clim, + derived_diurnal_ts, + clim_list, + ts_list): + """Append diurnal range to cubelists. + + Parameters + ---------- + derived_diurnal_clim : cube + derived diurnal climatology cube + derived_diurnal_ts : cube + derived diurnal timeseries cube + clim_list : cubelist + existing climatology cubelist, no range + ts_list : cubelist + existing timeseries cubelist, no range + + Returns + ------- + clim_list_final : cubelist + cubelist of climatology cubes including diurnal range + ts_list_final : cubelist + cubelist of standard timeseries cubes including diurnal range + """ + # creating cube list without tasmax or tasmin + # (since we just wanted the diurnal range) + clim_list_final = iris.cube.CubeList([]) + ts_list_final = iris.cube.CubeList([]) + + for cube in clim_list: + if cube.var_name not in ("tasmax", "tasmin"): + clim_list_final.append(cube) + + for cube in ts_list: + if cube.var_name not in ("tasmax", "tasmin"): + ts_list_final.append(cube) + + clim_list_final.append(derived_diurnal_clim) + ts_list_final.append(derived_diurnal_ts) + + return clim_list_final, ts_list_final + + +def calculate_anomaly(clim_list, ts_list): + """Calculate variables as anomalies, and adds diurnal range as variable. + + Parameters + ---------- + clim_list : cubelist + cubelist of climatology variables + ts_list : cubelist + cubelist of standard variable timeseries + + Returns + ------- + clim_list_final : cubelist + cubelist of clim. vars, inc. diurnal range + anom_list_final : cubelist + cubelist of anomaly vars, inc. diurnal range + """ + # calculate diurnal temperature range cube + clim_list_final, ts_list_final = calculate_diurnal_range( + clim_list, + ts_list + ) + + anom_list_final = ts_list_final.copy() + + # calc the anom by subtracting the monthly climatology from + # the time series + for i, _ in enumerate(ts_list_final): + i_months = ( + anom_list_final[i].coord("month_number").points - 1 + ) # -1 because months are numbered 1..12 + anom_list_final[i].data -= clim_list_final[i][i_months].data + + return clim_list_final, anom_list_final + + +def regression(tas, cube_data, area, ocean_frac=None, land_frac=None): + """Calculate coeffs of regression between global surf temp and variable. + + Parameters + ---------- + tas : cube + near-surface air temperature + cube_data : arr + cube.data array of a variable + area: str + area over which to calculate patterns + ocean_frac: cube + gridded ocean fraction + land_frac: cube + gridded land fraction + + Returns + ------- + slope_array : arr + array of grid cells with same shape as initial cube, + containing the regression slope + """ + if area == "land": + # calculate average warming over land + tas_data = sf.area_avg_landsea( + tas, ocean_frac, land_frac, land=True, return_cube=False + ) + else: + # calculate global average warming + tas_data = area_statistics(tas, 'mean').data + + # Reshape cube for regression + cube_reshaped = cube_data.reshape(cube_data.shape[0], -1) + + # Perform linear regression on valid values + model = sklearn.linear_model.LinearRegression( + fit_intercept=False, copy_X=True + ) + model.fit(tas_data.reshape(-1, 1), cube_reshaped) + + # Extract regression coefficients + slopes = model.coef_ + + # Reshape the regression coefficients back to the shape of the grid cells + slope_array = slopes.reshape(cube_data.shape[1:]) + + return slope_array + + +def create_cube(tas_cube, ssp_cube, array, month_number, units=None): + """Create a new cube from existing metadata, and new aray data. + + Parameters + ---------- + tas_cube: cube + near-surface air temperature + ssp_cube: cube + cube of a given variable + array: array + output array from regression + month_number: int + month related to the regression array + units: str + units related to the regression variable + + Returns + ------- + cube: cube + cube filled with regression array and metadata + + """ + # assigning dim_coords + coord1 = tas_cube.coord(contains_dimension=1) + coord2 = tas_cube.coord(contains_dimension=2) + dim_coords_and_dims = [(coord1, 0), (coord2, 1)] + + # assigning aux_coord + coord_month = iris.coords.AuxCoord(month_number, var_name="imogen_drive") + aux_coords_and_dims = [(coord_month, ())] + + cube = sf.rename_variables(ssp_cube, has_orig_vars=False) + + # creating cube + cube = iris.cube.Cube( + array, + units=units, + dim_coords_and_dims=dim_coords_and_dims, + aux_coords_and_dims=aux_coords_and_dims, + var_name=cube.var_name, + standard_name=cube.standard_name, + ) + + return cube + + +def calculate_regressions( + anom_list, + area, + ocean_frac=None, + land_frac=None, + yrs=86 +): + """Facilitate the calculation of regression coeffs (climate patterns). + + Also creates of a new cube of patterns per variable. + + Parameters + ---------- + anom_list : cubelist + cube list of variables as anomalies + area: str + area over which to calculate patterns + ocean_frac: cube + gridded ocean fraction + land_frac: cube + gridded land fraction + yrs : int + int to specify length of scenario + + Returns + ------- + regr_var_list : cubelist + cube list of newly created regression slope value cubes, for each var + """ + regr_var_list = iris.cube.CubeList([]) + + for cube in anom_list: + if cube.var_name == "tl1_anom": + # convert years to months when selecting + tas = cube[-yrs * 12:] + + for cube in anom_list: + cube = cube[-yrs * 12:] + month_list = iris.cube.CubeList([]) + + # extracting months, regressing, and merging + for i in range(1, 13): + month_cube = cube.extract(iris.Constraint(imogen_drive=i)) + month_tas = tas.extract(iris.Constraint(imogen_drive=i)) + + if area == 'land': + regr_array = regression( + month_tas, + month_cube.data, + area=area, + ocean_frac=ocean_frac, + land_frac=land_frac, + ) + else: + regr_array = regression( + month_tas, + month_cube.data, + area=area, + ) + + if cube.var_name in ("swdown_anom", "lwdown_anom"): + units = "W m-2 K-1" + else: + units = cube.units / tas.units + + # create, and append cube of regression values + month_list.append( + create_cube(tas, cube.copy(), regr_array, i, units=units) + ) + + month_list = month_list.merge_cube() + regr_var_list.append(month_list) + + return regr_var_list + + +def cube_saver(list_of_cubelists, work_path, name_list, jules_mode): + """Save desired cubelists to work_dir, depending on switch settings. + + Parameters + ---------- + list_of_cubelists : list + list containing desired cubelists + work_path : path + path to work_dir, to save cubelists + name_list : list + list of filename strings for saving + jules_mode : str + switch option passed through by ESMValTool config dict + + Returns + ------- + None + """ + if jules_mode: + for i in range(0, 3): + iris.save( + list_of_cubelists[i], + os.path.join(work_path, name_list[i]) + ) + else: + for i, cube in enumerate(list_of_cubelists[2]): + list_of_cubelists[2][i] = sf.rename_variables( + cube, has_orig_vars=False + ) + iris.save( + list_of_cubelists[2], + os.path.join(work_path, name_list[2]) + ) + + +def save_outputs( + cfg, + list_of_cubelists, + model +): + """Save data and plots to relevant directories. + + Parameters + ---------- + cfg: dict + Dictionary passed in by ESMValTool preprocessors + list_of_cubelists: list + List of cubelists to save + model : str + model name + + Returns + ------- + None + """ + work_path, plot_path = sf.make_model_dirs( + cfg, model + ) + + name_list = [ + "climatology_variables.nc", + "anomaly_variables.nc", + "patterns.nc", + ] + + # saving data + plotting + if cfg["jules_mode"] is True: + plot_timeseries( + list_of_cubelists[0], + plot_path, + "40 Year Climatologies, 1850-1889", + "Climatologies" + ) + plot_timeseries( + list_of_cubelists[1], + plot_path, + "Anomaly Timeseries, 1850-2100", + "Anomalies" + ) + plot_patterns(list_of_cubelists[2], plot_path) + cube_saver( + list_of_cubelists, + work_path, + name_list, + jules_mode=cfg["jules_mode"] + ) + + else: + plot_patterns(list_of_cubelists[2], plot_path) + cube_saver( + list_of_cubelists, + work_path, + name_list, + jules_mode=cfg["jules_mode"] + ) + + +def get_provenance_record(): + """Create a provenance record describing the diagnostic data and plot. + + Parameters + ---------- + None + + Returns + ------- + record : dict + provenance record + """ + record = { + "caption": ["Generating Climate Patterns from CMIP6 Models"], + "statistics": ["mean", "other"], + "domains": ["global"], + "themes": ["carbon"], + "realms": ["atmos"], + "authors": ["munday_gregory"], + } + + return record + + +def extract_data_from_cfg(cfg, model): + """Extract model data from the cfg. + + Parameters + ---------- + cfg: dict + Dictionary passed in by ESMValTool preprocessors + model : str + model name + + Returns + ------- + clim_list: cubelist + cubelist of climatologies + ts_list: cubelist + cubelist of spatial timeseries + sftlf: cube + land fraction cube + """ + clim_list = iris.cube.CubeList([]) + ts_list = iris.cube.CubeList([]) + + for dataset in cfg["input_data"].values(): + if dataset["dataset"] == model: + input_file = dataset["filename"] + + # preparing single cube + cube = sf.load_cube(input_file) + + if dataset["exp"] != "historical-ssp585": + sftlf = cube + else: + # appending to timeseries list + ts_list.append(cube) + + # making climatology + clim_cube = calculate_climatology(cube) + clim_list.append(clim_cube) + + if cfg["area"] == 'land': + return clim_list, ts_list, sftlf + + return clim_list, ts_list, None + + +def patterns(model, cfg): + """Driving function for script, taking in model data and saving parameters. + + Parameters + ---------- + model : str + model name + cfg: dict + Dictionary passed in by ESMValTool preprocessors + + Returns + ------- + None + """ + clim_list, ts_list, sftlf = extract_data_from_cfg(cfg, model) + + if cfg["area"] == 'land': + # calculate land/ocean_fracs + ocean_frac, land_frac = sf.ocean_fraction_calc(sftlf) + + # calculate anomaly over historical + ssp timeseries + clim_list_final, anom_list_final = calculate_anomaly(clim_list, ts_list) + + for i, cube in enumerate(clim_list_final): + clim_list_final[i] = sf.rename_variables( + cube, has_orig_vars=True, new_extension="_clim" + ) + anom_list_final[i] = sf.rename_variables( + anom_list_final[i], has_orig_vars=True, new_extension="_anom" + ) + + if cfg["area"] == 'land': + regressions = calculate_regressions( + anom_list_final, + cfg["area"], + ocean_frac=ocean_frac, + land_frac=land_frac + ) + else: + regressions = calculate_regressions( + anom_list_final, cfg["area"] + ) + + list_of_cubelists = [clim_list_final, anom_list_final, regressions] + + save_outputs(cfg, list_of_cubelists, model) + + # Provenance Logging, removed due to sporadic errors. Fix later. + + # model_work_dir, _ = sf.make_model_dirs( + # cfg, + # model + # ) + + # provenance_record = get_provenance_record() + # path = os.path.join(model_work_dir, "patterns.nc") + # with ProvenanceLogger(cfg) as provenance_logger: + # provenance_logger.log(path, provenance_record) + + +def main(cfg): + """Take in driving data with parallelisation options. + + Parameters + ---------- + cfg : dict + the global config dictionary, passed by ESMValTool. + + Returns + ------- + None + """ + input_data = cfg["input_data"].values() + parallelise = cfg["parallelise"] + + models = [] + for mod in input_data: + model = mod["dataset"] + if model not in models: + models.append(model) + + if parallelise is True: + sf.parallelise(patterns)(models, cfg) + else: + for model in models: + patterns(model, cfg) + + +if __name__ == "__main__": + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/climate_patterns/plotting.py b/esmvaltool/diag_scripts/climate_patterns/plotting.py new file mode 100644 index 0000000000..447055a392 --- /dev/null +++ b/esmvaltool/diag_scripts/climate_patterns/plotting.py @@ -0,0 +1,128 @@ +# (C) Crown Copyright 2022-2024, Met Office. +"""Script containing plotting functions for driving scripts. + +Author +------ +Gregory Munday (Met Office, UK) +""" +import os + +import iris.quickplot as qplt +import matplotlib.pyplot as plt +import numpy as np + +from esmvalcore.preprocessor import area_statistics + + +def subplot_positions(j): + """Determine sub-plot positions in a 3x3 figure. + + Parameters + ---------- + j : int + index of cube position in cubelist + + Returns + ------- + x_pos : int + x subplot position + y_pos : int + y subplot position + """ + if j <= 2: + y_pos = j + x_pos = 0 + elif 2 < j <= 5: + y_pos = j - 3 + x_pos = 1 + else: + y_pos = j - 6 + x_pos = 2 + + return x_pos, y_pos + + +def plot_patterns(cube_list, plot_path): + """Plot climate patterns for jules_mode: off. + + Parameters + ---------- + cube_list : cubelist + input cubelist for plotting patterns per variable + plot_path : path + path to plot_dir + + Returns + ------- + None + """ + fig, axis = plt.subplots(3, 3, figsize=(14, 12), sharex=True) + fig.suptitle("Patterns from a random grid-cell", fontsize=18, y=0.98) + + plt.figure(figsize=(14, 12)) + plt.subplots_adjust(hspace=0.5) + plt.suptitle("Global Patterns, January", fontsize=18, y=0.95) + + for j, cube in enumerate(cube_list): + # determining plot positions + x_pos, y_pos = subplot_positions(j) + months = np.arange(1, 13) + # plots patterns for an arbitrary grid cell + axis[x_pos, y_pos].plot(months, cube[:, 50, 50].data) + axis[x_pos, + y_pos].set_ylabel(str(cube.var_name) + " / " + str(cube.units)) + if j > 5: + axis[x_pos, y_pos].set_xlabel("Time") + + # January patterns + plt.subplot(3, 3, j + 1) + qplt.pcolormesh(cube[0]) + + plt.tight_layout() + plt.savefig(os.path.join(plot_path, "Patterns"), dpi=300) + plt.close() + + fig.tight_layout() + fig.savefig(os.path.join(plot_path, "Patterns Timeseries"), dpi=300) + + +def plot_timeseries(cubelist, plot_path, title, save_name): + """Plot timeseries and maps of climatologies, anomalies and patterns. + + Parameters + ---------- + cubelist : cubelist + input cubelist for plotting per variable + plot_path : path + path to plot_dir + title: str + title for the figure + save_name: str + name for the saved figure + + Returns + ------- + None + """ + fig, axs = plt.subplots(3, 3, figsize=(14, 12), sharex=True) + fig.suptitle(f"{title}", fontsize=18, y=0.98) + + for j, cube in enumerate(cubelist): + # determining plot positions + x_pos, y_pos = subplot_positions(j) + yrs = (1850 + np.arange(cube.shape[0])).astype("float") + months = np.arange(1, 13) + + # anomaly timeseries + avg_cube = area_statistics(cube, 'mean').data + if save_name == "Climatologies": + axs[x_pos, y_pos].plot(months, avg_cube) + else: + axs[x_pos, y_pos].plot(yrs, avg_cube) + axs[x_pos, + y_pos].set_ylabel(cube.long_name + " / " + str(cube.units)) + if j > 5: + axs[x_pos, y_pos].set_xlabel("Time") + + fig.tight_layout() + fig.savefig(os.path.join(plot_path, f"{save_name}"), dpi=300) diff --git a/esmvaltool/diag_scripts/climate_patterns/sub_functions.py b/esmvaltool/diag_scripts/climate_patterns/sub_functions.py new file mode 100644 index 0000000000..4b3fe00141 --- /dev/null +++ b/esmvaltool/diag_scripts/climate_patterns/sub_functions.py @@ -0,0 +1,267 @@ +# (C) Crown Copyright 2022-2024, Met Office. +"""Script containing relevant sub-functions for driving scripts. + +Author +------ +Gregory Munday (Met Office, UK) +""" + +import logging +import multiprocessing as mp +import os +from functools import partial +from pathlib import Path + +import iris +import iris.analysis.cartography +import iris.coord_categorisation +import dask as da + +logger = logging.getLogger(Path(__file__).stem) + + +def load_cube(filename): + """Load cube, remove any dimensions of length: 1. + + Parameters + ---------- + filename : path + path to load cube file + + Returns + ------- + cube : cube + a cube + """ + logger.debug("Loading %s", filename) + cube = iris.load_cube(filename) + cube = iris.util.squeeze(cube) + + return cube + + +def ocean_fraction_calc(sftlf): + """Calculate gridded land and ocean fractions. + + Parameters + ---------- + sftlf: cube + land-fraction cube from piControl experiment + + Returns + ------- + ocean_frac: cube + ocean_fraction cube for area-weights + land_frac: cube + land_fraction cube for area-weights + """ + sftlf.coord("latitude").coord_system = iris.coord_systems.GeogCS( + 6371229.0 + ) + sftlf.coord("longitude").coord_system = iris.coord_systems.GeogCS( + 6371229.0 + ) + sftof = 100 - sftlf + + ocean_frac = sftof / 100 + land_frac = sftlf / 100 + + return ocean_frac, land_frac + + +def area_avg_landsea(cube, + ocean_frac, + land_frac, + land=True, + return_cube=False): + """Calculate the global mean of a variable in a cube. + + Parameters + ---------- + cube : cube + input cube + ocean_frac : cube + ocean fraction cube, found from sftlf + land_frac : cube + land fraction cube, sftlf + land : bool + option to weight be land or ocean + return_cube : bool + option to return a cube or array + + Returns + ------- + cube2 : cube + cube with collapsed lat-lons, global mean over time + cube2.data : arr + array with collapsed lat-lons, global mean over time + """ + if not cube.coord("latitude").has_bounds(): + cube.coord("latitude").guess_bounds() + if not cube.coord("longitude").has_bounds(): + cube.coord("longitude").guess_bounds() + + global_weights = iris.analysis.cartography.area_weights( + cube, + normalize=False + ) + + if land is False: + ocean_frac.data = da.array.ma.masked_less(ocean_frac.core_data(), 0.01) + weights = iris.analysis.cartography.area_weights( + ocean_frac, + normalize=False + ) + ocean_area = ( + ocean_frac.collapsed( + ["latitude", "longitude"], iris.analysis.SUM, weights=weights + ) + / 1e12 + ) + cube2 = cube * global_weights * ocean_frac + + cube2 = ( + cube2.collapsed(["latitude", "longitude"], iris.analysis.SUM) + / 1e12 + / ocean_area + ) + + if land: + land_frac.data = da.array.ma.masked_less(land_frac.core_data(), 0.01) + weights = iris.analysis.cartography.area_weights( + land_frac, + normalize=False + ) + land_area = ( + land_frac.collapsed( + ["latitude", "longitude"], iris.analysis.SUM, weights=weights + ) + / 1e12 + ) + + # Iris is too strict so we need to use core_data in this calculation + cube2 = cube * global_weights * land_frac.core_data() + cube2 = ( + cube2.collapsed(["latitude", "longitude"], iris.analysis.SUM) + / 1e12 + / land_area + ) + + if return_cube: + return cube2 + + return cube2.data + + +def make_model_dirs(cfg, model): + """Create directories for each input model for saving. + + Parameters + ---------- + cfg: dict + Dictionary passed in by ESMValTool preprocessors + model : str + model name + + Returns + ------- + model_work_dir : path + path to specific model directory in work_dir + model_plot_dir : path + path to specific plot directory in plot_dir + """ + work_path = cfg["work_dir"] + plot_path = cfg["plot_dir"] + model_work_dir = os.path.join(work_path, model) + model_plot_dir = os.path.join(plot_path, model) + + if not os.path.exists(model_work_dir): + os.mkdir(model_work_dir) + if not os.path.exists(model_plot_dir): + os.mkdir(model_plot_dir) + + return model_work_dir, model_plot_dir + + +def rename_variables(cube, has_orig_vars=True, new_extension=""): + """Rename variables and a coord to fit in JULES framework. + + Parameters + ---------- + cube : cube + input cube + has_orig_vars : bool + if True, rename to new var names with correct extension + new_extension : str + extension to add to variable names + + Returns + ------- + cube : cube + cube with renamed variables + """ + original_var_names = ["tas", "range_tl1", "huss", "pr", + "sfcWind", "ps", "rsds", "rlds"] + new_var_names = ["tl1", "range_tl1", "ql1", "precip", + "wind", "pstar", "swdown", "lwdown"] + long_var_names = [ + "Air Temperature", + "Diurnal Range", + "Specific Humidity", + "Precipitation", + "Wind Speed", + "Surface Pressure", + "Surface Downwelling Shortwave Radiation", + "Surface Downwelling Longwave Radiation" + ] + for orig_var, new_var, long_var in zip( + original_var_names, new_var_names, long_var_names + ): + if has_orig_vars: + if cube.var_name == orig_var: + cube.var_name = f"{new_var}{new_extension}" + cube.coord("month_number").rename("imogen_drive") + return cube + else: + if cube.var_name == f"{new_var}_anom": + cube.rename(long_var) + cube.var_name = f"{new_var}_patt" + return cube + if cube.var_name == f"{new_var}_patt": + cube.rename(long_var) + cube.var_name = orig_var + cube.coord("imogen_drive").rename("month_number") + return cube + + return None + + +def parallelise(function, processes=None): + """Parallelise any function, by George Ford, Met Office. + + Parameters + ---------- + function : function + function to be parallelised + processes : int + number of threads to be used in parallelisation + + Returns + ------- + result : any + results of parallelised elements + """ + if processes is None: + processes = max(1, mp.cpu_count() - 1) + if processes <= 0: + processes = 1 + + def easy_parallise(func, sequence, cfg): + with mp.Pool(processes=processes) as pool: + config_wrapper = partial(func, cfg=cfg) + result = pool.map_async(config_wrapper, sequence).get() + pool.close() + pool.join() + return result + + return partial(easy_parallise, function) diff --git a/esmvaltool/diag_scripts/clouds/clouds.ncl b/esmvaltool/diag_scripts/clouds/clouds.ncl index e10f3e7416..c05c091cf4 100644 --- a/esmvaltool/diag_scripts/clouds/clouds.ncl +++ b/esmvaltool/diag_scripts/clouds/clouds.ncl @@ -17,24 +17,35 @@ ; explicit_cn_levels: explicit contour levels (array) ; extralegend: plot legend(s) to extra file(s) ; filename_add: optionally add this string to plot filesnames +; multiobs_exclude: list of *observational* datasets to be excluded when +; calculating uncertainty estimates from multiple +; observational datasets (see also multiobs_uncertainty) +; multiobs_uncertainty: calculate uncertainty estimates from multiple +; observational datasets (true, false); by default, +; all "obs", "obs6", "obs4mips" and "native6" datasets +; are used; any of such datasets can be explicitely +; excluded when also specifying "multiobs_exclude" ; panel_labels: label individual panels (true, false) ; PanelTop: manual override for "@gnsPanelTop" used by panel ; plot(s) ; projection: map projection for plotting (default = ; "CylindricalEquidistant") -; showdiff calculate and plot differences (default = False) +; showdiff: calculate and plot differences (default = False) +; showyears: add start and end years to the plot titles (default = +; false) ; rel_diff: if showdiff = True, then plot relative differences (%) ; (default = False) -; ref_diff_min: lower cutoff value in case of calculating relative -; differences -; (in units of input variable) +; rel_diff_min: lower cutoff value in case of calculating relative +; differences (in units of input variable) ; region: show only selected geographic region given as latmin, ; latmax, lonmin, lonmax ; timemean: time averaging - "seasonal" = DJF, MAM, JJA, SON), -; "annual" = annual mean +; "annualclim" = annual mean ; treat_var_as_error: treat variable as error when averaging (true, false) ; true: avg = sqrt(mean(var*var)) ; false: avg = mean(var) +; var: short_name of variable to process (default = "" - use +; first variable in variable list) ; ; Required variable attributes (variable specific) ; none @@ -49,15 +60,22 @@ ; none ; ; Modification history -; 20190220-A_laue_ax: added output of provenance (v2.0) -; 20181119-A_laue_ax: adapted code to multi-variable capable framework -; 20180923-A_laue_ax: added writing of results to netcdf -; 20180518-A_laue_ax: code rewritten for ESMValTool v2.0 -; 20170621-A_laue_ax: reworked code to add tags for reporting -; 20160901-A_laue_ax: added regridding option 1 deg x 1 deg -; 20151027-A_laue_ax: moved call to 'write_references' to the beginning -; of the code -; 20150415-A-laue_ax: written. +; 20230117-lauer_axel: added support for ICON (code from Manuel) +; 20211021-lauer_axel: added output of basic statistics as ascii files +; 20211006-lauer_axel: removed write_plots +; 20210325-lauer_axel: added option to estimate observational uncertainty +; from multiple observational datasets +; 20210318-lauer_axel: added option to speficfy variable if more than one +; variable is present +; 20190220-lauer_axel: added output of provenance (v2.0) +; 20181119-lauer_axel: adapted code to multi-variable capable framework +; 20180923-lauer_axel: added writing of results to netcdf +; 20180518-lauer_axel: code rewritten for ESMValTool v2.0 +; 20170621-lauer_axel: reworked code to add tags for reporting +; 20160901-lauer_axel: added regridding option 1 deg x 1 deg +; 20151027-lauer_axel: moved call to 'write_references' to the beginning +; of the code +; 20150415-lauer_axel: written. ; ; ############################################################################ @@ -66,39 +84,60 @@ load "$diag_scripts/../interface_scripts/interface.ncl" load "$diag_scripts/shared/statistics.ncl" load "$diag_scripts/shared/plot/style.ncl" load "$diag_scripts/shared/plot/contour_maps.ncl" +load "$diag_scripts/shared/dataset_selection.ncl" begin enter_msg(DIAG_SCRIPT, "") - var0 = variable_info[0]@short_name - info0 = select_metadata_by_name(input_file_info, var0) - dim_MOD = ListCount(info0) - if (isatt(variable_info[0], "reference_dataset")) then - refname = variable_info[0]@reference_dataset - end if - names = metadata_att_as_array(info0, "dataset") - projects = metadata_att_as_array(info0, "project") - - log_info("++++++++++++++++++++++++++++++++++++++++++") - log_info(DIAG_SCRIPT + " (var: " + var0 + ")") - log_info("++++++++++++++++++++++++++++++++++++++++++") - ; Set default values for non-required diag_script_info attributes set_default_att(diag_script_info, "embrace_setup", False) set_default_att(diag_script_info, "extralegend", False) set_default_att(diag_script_info, "filename_add", "") + set_default_att(diag_script_info, "multiobs_exclude", "") + set_default_att(diag_script_info, "multiobs_uncertainty", False) set_default_att(diag_script_info, "panel_labels", True) set_default_att(diag_script_info, "rel_diff", False) set_default_att(diag_script_info, "rel_diff_min", -1.0e19) set_default_att(diag_script_info, "showdiff", False) + set_default_att(diag_script_info, "showyears", False) set_default_att(diag_script_info, "timemean", "annualclim") set_default_att(diag_script_info, "treat_var_as_error", False) + set_default_att(diag_script_info, "var", "") + + if (diag_script_info@var .eq. "") then + var0 = variable_info[0]@short_name + else + var0 = diag_script_info@var + end if + + variables = metadata_att_as_array(variable_info, "short_name") + if (.not. any(variables .eq. var0)) then + errstr = "diagnostic " + diag + " requires the following variable: " + var0 + error_msg("f", DIAG_SCRIPT, "", errstr) + end if + + var0_info = select_metadata_by_name(variable_info, var0) + var0_info := var0_info[0] + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info0) + + if (isatt(var0_info, "reference_dataset")) then + refname = var0_info@reference_dataset + end if + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") flag_diff = diag_script_info@showdiff flag_rel_diff = diag_script_info@rel_diff flag_rel_diff_min = diag_script_info@rel_diff_min + flag_multiobs_unc = diag_script_info@multiobs_uncertainty + multiobs_exclude = diag_script_info@multiobs_exclude if (.not.flag_diff .and. flag_rel_diff) then log_info("flag_rel_diff = True has no effect until flag_diff is also " \ @@ -134,6 +173,38 @@ begin season = (/"DJF", "MAM", "JJA", "SON"/) end if + if (flag_multiobs_unc .and. timemean .ne. "annualclim") then + log_info("multiobs_uncertainty = True is currently supported for annual" \ + + " means only (timemean = annualclim). Setting " \ + + " multiobs_uncertainty to False.") + flag_multiobs_unc = False + end if + + if (flag_multiobs_unc) then + ; find indices of all OBS and obs4mips datasets (including "native6" ERA5) + idxobs = get_obs(names, projects, multiobs_exclude) + + if (idxobs(0) .eq. -1) then + flag_multiobs_unc = False + else + refname = "REF" + ref_ind = dimsizes(names) + names := array_append_record(names, (/refname/), 0) + end if + else + ref_ind = -1 ; set to invalid value + idxobs = -1 + ; if attribute is present, use it so correlations can be calculated + if (isvar("refname")) then + ; set reference model + ref_ind = ind(names .eq. refname) + if (ismissing(ref_ind)) then + log_info("warning: reference dataset (" + refname + ") not found.") + ref_ind = -1 + end if + end if + end if + ; create string for caption (netcdf provenance) allseas = season(0) @@ -153,24 +224,6 @@ begin ; Create work dir system("mkdir -p " + work_dir) - if (config_user_info@write_plots.eq."True") then - write_plots = True - else - write_plots = False - end if - - ref_ind = -1 ; set to invalid value - - ; if attribute is present, use it so correlations can be calculated - if (isvar("refname")) then - ; set reference model - ref_ind = ind(names .eq. refname) - if (ismissing(ref_ind)) then - log_info("warning: reference dataset (" + refname + ") not found.") - ref_ind = -1 - end if - end if - climofiles = metadata_att_as_array(info0, "filename") outfile = new(numseas, string) @@ -187,11 +240,9 @@ begin end if ; set reference model - - ref_ind = ind(names .eq. refname) - if (ismissing(ref_ind)) then - error_msg("f", DIAG_SCRIPT, "", "reference dataset (" \ - + refname + ") is missing") + if (ref_ind .lt. 0) then + error_msg("f", DIAG_SCRIPT, "", "cannot calculate differences as " \ + + "reference dataset (" + refname + ") is missing") end if end if @@ -202,21 +253,111 @@ begin ; # get data and average time # ; ########################################### - maps = new((/dim_MOD, 4/), graphic) - maps_d = new((/dim_MOD, 4/), graphic) + ; --------------------------------------------------------- + ; if requested, calculate multi-observational mean and standard deviation - ind_all_sorted = ispan(0, dim_MOD - 1, 1) ; create array + if (flag_multiobs_unc) then + nobs = dimsizes(idxobs) + + ; step 1: calculate multi-obs mean + + do i = 0, nobs - 1 + A0 = read_data(info0[idxobs(i)]) + + ; calculate time average + mean = time_operations(A0, -1, -1, "average", "annualclim", True) + delete(A0) + + ; if requested, extract geographical region + if (isatt(diag_script_info, "region")) then + region = diag_script_info@region + mean := area_operations(mean, region(0), region(1), region(2), \ + region(3), "extract", False) + end if + if (i .eq. 0) then + dims = dimsizes(mean) + newdims = new(dimsizes(dims) + 1, integer) + newdims(0) = nobs + newdims(1:dimsizes(newdims) - 1) = dims + ref_tmp = new(newdims, float) + delete(dims) + end if + ref_tmp(i, :, :) = mean + end do + delete(mean) + ; note: we are using dim_avg_n_Wrap so missing values are ignored + ; when averaging + ref_avg = dim_avg_n_Wrap(ref_tmp, 0) + delete(ref_tmp) + + ; step 2: calculate standard deviation of all obs datasets using + ; the multi-obs mean + + sigma2 = new(dimsizes(ref_avg), float) + sig_tmp = new(newdims, float) + delete(newdims) + + do i = 0, nobs - 1 + A0 = read_data(info0[idxobs(i)]) + ; calculate yearly averages + ymean = time_operations(A0, -1, -1, "average", "yearly", True) + delete(A0) + ; if requested, extract geographical region + if (isatt(diag_script_info, "region")) then + region = diag_script_info@region + ymean := area_operations(ymean, region(0), region(1), region(2), \ + region(3), "extract", False) + end if + dims = dimsizes(ymean) + nyears = dims(0) + delete(dims) + sigma2 = 0.0 + do j = 0, nyears - 1 ; dimension 0 = time + sigma2 = sigma2 + (ref_avg - ymean(j, :, :)) ^ 2 + end do + if (nyears .gt. 1) then + sigma2 = sigma2 / tofloat(nyears - 1) + end if + sig_tmp(i, :, :) = sigma2 + delete(ymean) + end do + delete(sigma2) + ; note: we are using dim_avg_n so missing values are ignored + ; when averaging + ref_std = sqrt(dim_avg_n(sig_tmp, 0)) + delete(sig_tmp) + copy_VarCoords(ref_avg, ref_std) + ref_std@units = ref_avg@units + end if + +; system("rm debug.nc") +; debugfile = addfile("debug.nc", "c") +; debugfile->avg = ref_avg +; debugfile->std = ref_std + + ; --------------------------------------------------------- + + nplots = dim_MOD + if (flag_multiobs_unc) then + nplots = nplots + 1 + end if + + maps = new((/nplots, 4/), graphic) + maps_d = new((/nplots, 4/), graphic) + + ind_all_sorted = ispan(0, nplots, 1) ; create array if (ref_ind .ge. 0) then ind_wo_ref = ind(names .ne. refname) ind_all_sorted(0) = ref_ind - ind_all_sorted(1:dim_MOD - 1) = ind_wo_ref + n = dimsizes(names) + ind_all_sorted(1:n - 1) = ind_wo_ref end if - corr = new((/numseas/), float) - gavg = new((/numseas/), float) - rmsd = new((/numseas/), float) - bias = new((/numseas/), float) + corr = new((/nplots, numseas/), float) + gavg = new((/nplots, numseas/), float) + rmsd = new((/nplots, numseas/), float) + bias = new((/nplots, numseas/), float) ; filenames for netcdf output @@ -225,10 +366,12 @@ begin nc_filename_mean = work_dir + "clouds_" + var0 + "_mean.nc" nc_filename_mean@existing = "append" - do ii = 0, dim_MOD - 1 + res = True + + do ii = 0, nplots - 1 imod = ind_all_sorted(ii) - log_info("processing " + names(imod)) + log_info("processing " + names(imod) + " ***") if (isvar("data1")) then delete(data1) @@ -238,43 +381,47 @@ begin delete(A0) end if - A0 = read_data(info0[imod]) + if (imod .ne. ref_ind .or. .not.flag_multiobs_unc) then + A0 = read_data(info0[imod]) + ; check dimensions - ; check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + \ + " dimensions, need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if - dims = getvardims(A0) - if (dimsizes(dims) .lt. 2) then - error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + \ - " dimensions, need 2 or 3") - end if - idx = ind(dims .eq. "lat") - if (ismissing(idx)) then - error_msg("f", DIAG_SCRIPT, "", "no lat dimension") - end if - idx = ind(dims .eq. "lon") - if (ismissing(idx)) then - error_msg("f", DIAG_SCRIPT, "", "no lon dimension") - end if + ; average over time - ; average over time + ; if variable is an error variable, we have to square it before + ; averaging and then calculate the square-root afterwards - ; if variable is an error variable, we have to square it before - ; averaging and then calculate the square-root afterwards + if (treat_var_as_error) then + log_info(" ++++++++++++++ Treating variable as error " + \ + "variable when averaging ") + A0 = A0 * A0 + end if - if (treat_var_as_error) then - log_info(" ++++++++++++++ Treating variable as error " + \ - "variable when averaging ") - A0 = A0 * A0 - end if + data1 = time_operations(A0, -1, -1, "average", timemean, True) - data1 = time_operations(A0, -1, -1, "average", timemean, True) + if (treat_var_as_error) then + data1 = sqrt(data1) + end if - if (treat_var_as_error) then - data1 = sqrt(data1) + delete(A0) + else + data1 = ref_avg + delete(ref_avg) end if - delete(A0) - ; if requested, select geographical region if (isatt(diag_script_info, "region")) then @@ -283,13 +430,13 @@ begin region(3), "extract", False) if (region(2).eq.0. .and. region(3).eq.360.) then else - data1@res_gsnAddCyclic = False + res@gsnAddCyclic = False end if - data1@res_mpMinLatF = region(0) ; range to zoom in on - data1@res_mpMaxLatF = region(1) - data1@res_mpMinLonF = region(2) - data1@res_mpMaxLonF = region(3) - data1@res_mpCenterLonF = 0.5 * (region(2) + region(3)) + res@mpMinLatF = region(0) ; range to zoom in on + res@mpMaxLatF = region(1) + res@mpMinLonF = region(2) + res@mpMaxLonF = region(3) + res@mpCenterLonF = 0.5 * (region(2) + region(3)) delete(region) end if @@ -303,106 +450,131 @@ begin ; # plot ressources # ; ########################################### - data1@res_cnFillOn = True ; color plot desired - data1@res_cnLineLabelsOn = False ; contour lines + res@cnFillOn = True ; color plot desired + res@cnLineLabelsOn = False ; contour lines ; colors ; http://www.ncl.ucar.edu/Document/Graphics/color_table_gallery.shtml ; annotation - data1@res_tiMainOn = False - data1@res_cnLevelSelectionMode = "ExplicitLevels" - data1@res_cnLinesOn = False + ; if desired, add years to plot title + years_str = "" + if (diag_script_info@showyears) then + years_str = " (" + var0_info@start_year + if (var0_info@start_year .ne. var0_info@end_year) then + years_str = years_str + "-" + var0_info@end_year + end if + years_str = years_str + ")" + end if + +; res@tiMainOn = False + res@tiMainString = names(imod) + years_str + res@tiMainFontHeightF = 0.025 + + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLinesOn = False - data1@res_mpOutlineOn = True - data1@res_mpFillOn = False + res@mpOutlineOn = True + res@mpFillOn = False ; variable specific plotting settings + if (isatt(var0_info, "units")) then + data1@units = var0_info@units + else + data1@units = "" + end if + if (var0.eq."pr") then - data1@res_cnLevels = fspan(0.5, 10, 20) + res@cnLevels = fspan(0.5, 10, 20) ; convert from kg m-2 s-1 to mm day-1 data1 = data1 * 86400.0 data1@units = "mm day-1" end if if (var0.eq."lwp") then - data1@res_cnLevels = ispan(10, 200, 10) * 0.001 - data1@res_mpOutlineOn = False - data1@res_mpFillOn = True - data1@res_mpLandFillColor = "Black" + res@cnLevels = ispan(10, 200, 10) * 0.001 +; res@mpOutlineOn = False +; res@mpFillOn = True +; res@mpLandFillColor = "Black" pal = read_colormap_file("$diag_scripts/shared/plot/rgb/qcm3.rgb") - data1@res_cnFillColors = pal + res@cnFillColors = pal end if if (var0.eq."tas") then - data1@res_cnLevels = ispan(-30, 30, 3) + res@cnLevels = ispan(-30, 30, 3) pal = read_colormap_file("$diag_scripts/shared/plot/rgb/ipcc-tas.rgb") - data1@res_cnFillColors = pal + res@cnFillColors = pal ; convert from K to degC data1 = data1 - 273.15 data1@units = "degC" end if - if (var0.eq."clt") then - data1@res_cnLevels = fspan(5, 100, 20) + if ((var0.eq."clt") .or. (var0.eq."cltisccp")) then + res@cnLevels = fspan(5, 100, 20) end if if (var0.eq."clivi") then - data1@res_cnLevels = ispan(10, 200, 10) * 0.001 + res@cnLevels = ispan(10, 200, 10) * 0.001 end if if (var0.eq."clwvi") then - data1@res_cnLevels = ispan(10, 300, 10) * 0.001 + res@cnLevels = ispan(10, 300, 10) * 0.001 end if if (var0.eq."swcre") then - data1@res_cnLevels = ispan(-100, 0, 10) + res@cnLevels = ispan(-100, 0, 10) end if if (var0.eq."lwcre") then - data1@res_cnLevels = ispan(0, 100, 10) + res@cnLevels = ispan(0, 100, 10) end if if (var0.eq."netcre") then - data1@res_cnLevels = ispan(-70, 70, 10) + res@cnLevels = ispan(-70, 70, 10) end if - data1@res_lbLabelBarOn = False - data1@res_gsnRightString = "" + if (var0.eq."prw") then + res@cnLevels = ispan(0, 60, 5) + end if + +; res@lbLabelBarOn = False + res@gsnRightString = "" - data1@res_mpFillDrawOrder = "PostDraw" ; draw map last - data1@res_cnMissingValFillColor = "Gray" + res@mpFillDrawOrder = "PostDraw" ; draw map last + res@cnMissingValFillColor = "Gray" ; no tickmarks and no labels - data1@res_tmYLLabelsOn = False - data1@res_tmYLOn = False - data1@res_tmYRLabelsOn = False - data1@res_tmYROn = False - data1@res_tmXBLabelsOn = False - data1@res_tmXBOn = False - data1@res_tmXTLabelsOn = False - data1@res_tmXTOn = False - data1@res_cnInfoLabelOn = False ; turn off cn info label - data1@res_mpPerimOn = perim ; draw line around map + res@tmYLLabelsOn = False + res@tmYLOn = False + res@tmYRLabelsOn = False + res@tmYROn = False + res@tmXBLabelsOn = False + res@tmXBOn = False + res@tmXTLabelsOn = False + res@tmXTOn = False + res@cnInfoLabelOn = False ; turn off cn info label + res@mpPerimOn = perim ; draw line around map + + res@gsnStringFontHeightF = 0.02 ; specified in namelist - data1@res_mpProjection = projection + res@mpProjection = projection ; set explicit contour levels if (isatt(diag_script_info, "explicit_cn_levels")) then - data1@res_cnLevelSelectionMode = "ExplicitLevels" - data1@res_cnLevels = diag_script_info@explicit_cn_levels + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = diag_script_info@explicit_cn_levels end if - if (.not. isatt(data1, "res_cnLevels")) then + if (.not. isatt(res, "cnLevels")) then log_info(DIAG_SCRIPT + " (var: " + var0 + "):") log_info("info: using default contour levels") - data1@res_cnLevels = fspan(min(data1), max(data1), 20) + res@cnLevels = fspan(min(data1), max(data1), 20) end if ; ########################################### @@ -419,18 +591,12 @@ begin data1@diag_script = (/DIAG_SCRIPT/) end if - if (isatt(variable_info[0], "long_name")) then - data1@var_long_name = variable_info[0]@long_name + if (isatt(var0_info, "long_name")) then + data1@long_name = var0_info@long_name end if data1@var = var0 - if (isatt(variable_info[0], "units")) then - data1@var_units = variable_info[0]@units - else - data1@var_units = "" - end if - if (.not. isvar("ref_data")) then ref_data = data1 end if @@ -457,42 +623,72 @@ begin + "preprocessor settings in recipe).") end if - corr = corr@_FillValue - gavg = gavg@_FillValue + corr(imod, :) = corr@_FillValue + gavg(imod, :) = gavg@_FillValue if (.not.all(ismissing(data1))) then if (numseas.gt.1) then do is = 0, numseas - 1 if (same_grid .and. (ref_ind .ge. 0)) then - corr(is) = calculate_metric(ref_data(is, :, :), data1(is, :, :), \ - "correlation") + mask1 = ref_data(is, :, :) + mask2 = data1(is, :, :) + mask1 = where(.not.ismissing(mask1), 0., mask1@_FillValue) + mask2 = where(.not.ismissing(mask2), 0., mask2@_FillValue) + amask = mask1 + mask2 + delete(mask1) + delete(mask2) + refmasked = ref_data(is, :, :) + refmasked = refmasked + amask + datmasked = data1(is, :, :) + datmasked = datmasked + amask + corr(imod, is) = calculate_metric(refmasked, datmasked, \ + "correlation") +; corr(imod, is) = calculate_metric(ref_data(is, :, :), \ +; data1(is, :, :), "correlation") + delete(amask) + delete(refmasked) + delete(datmasked) end if - gavg(is) = area_operations(data1(is, :, :), -90., 90., 0., 360., \ - "average", True) + gavg(imod, is) = area_operations(data1(is, :, :), -90., 90., \ + 0., 360., "average", True) end do else if (same_grid .and. (ref_ind .ge. 0)) then - corr(0) = calculate_metric(ref_data, data1, "correlation") + mask1 = ref_data + mask2 = data1 + mask1 = where(.not.ismissing(mask1), 0., mask1@_FillValue) + mask2 = where(.not.ismissing(mask2), 0., mask2@_FillValue) + amask = mask1 + mask2 + delete(mask1) + delete(mask2) + refmasked = ref_data + refmasked = refmasked + amask + datmasked = data1 + datmasked = datmasked + amask + corr(imod, 0) = calculate_metric(refmasked, datmasked, "correlation") +; corr(imod, 0) = calculate_metric(ref_data, data1, "correlation") + delete(amask) + delete(refmasked) + delete(datmasked) end if - gavg(0) = area_operations(data1, -90., 90., 0., 360., "average", True) + gavg(imod, 0) = area_operations(data1, -90., 90., 0., 360., \ + "average", True) end if end if - data1@res_gsnLeftStringFontHeightF = min((/0.025, 0.015 * 6.0 \ - / tofloat((dim_MOD + 1) / 2)/)) - data1@res_gsnRightStringFontHeightF = min((/0.025, 0.015 * 6.0 \ - / tofloat((dim_MOD + 1) / 2)/)) + res@lbTitleString = data1@units + res@lbTitlePosition = "Bottom" + res@lbTitleFontHeightF = 0.02 + res@lbLabelFontHeightF = 0.02 ; ########################################### ; # create the plot # ; ########################################### - data1@res_gsnDraw = False ; do not draw yet - data1@res_gsnFrame = False ; don't advance frame - ; function in aux_plotting.ncl if (ii.eq.0) then + ndframe = 0 ; note: an array of workspaces (i.e. wks(numseas)) does not work as ; attributes cannot be assigned to each array element ; individually @@ -524,51 +720,51 @@ begin if (numseas.gt.1) then do is = 0, numseas - 1 - if (.not.ismissing(corr(is))) then - data1@res_gsnRightString = "corr = " + sprintf("%6.3f", corr(is)) + if (.not.ismissing(corr(imod, is))) then + res@gsnRightString = "corr = " + sprintf("%6.3f", corr(imod, is)) else - data1@res_gsnRightString = "" + res@gsnRightString = "" end if - if (.not.ismissing(gavg(is))) then - data1@res_gsnLeftString = "mean = " + sprintf("%6.3f", gavg(is)) + if (.not.ismissing(gavg(imod, is))) then + res@gsnLeftString = "mean = " + sprintf("%6.3f", gavg(imod, is)) else - data1@res_gsnLeftString = "" + res@gsnLeftString = "" end if if (imod.eq.ref_ind) then ; remove corr. string for reference dataset - data1@res_gsnRightString = "" + res@gsnRightString = "" end if if (is.eq.0) then - maps(imod, is) = contour_map(wks0, data1(is, :, :), var0) + maps(imod, is) = gsn_csm_contour_map(wks0, data1(is, :, :), res) end if if (is.eq.1) then - maps(imod, is) = contour_map(wks1, data1(is, :, :), var0) + maps(imod, is) = gsn_csm_contour_map(wks1, data1(is, :, :), res) end if if (is.eq.2) then - maps(imod, is) = contour_map(wks2, data1(is, :, :), var0) + maps(imod, is) = gsn_csm_contour_map(wks2, data1(is, :, :), res) end if if (is.eq.3) then - maps(imod, is) = contour_map(wks3, data1(is, :, :), var0) + maps(imod, is) = gsn_csm_contour_map(wks3, data1(is, :, :), res) end if end do else - if (.not.ismissing(corr(0))) then - data1@res_gsnRightString = "corr = " + sprintf("%6.3f", corr(0)) + if (.not.ismissing(corr(imod, 0))) then + res@gsnRightString = "corr = " + sprintf("%6.3f", corr(imod, 0)) else - data1@res_gsnRightString = "" + res@gsnRightString = "" end if - if (.not.ismissing(gavg(0))) then - data1@res_gsnLeftString = "mean = " + sprintf("%6.3f", gavg(0)) + if (.not.ismissing(gavg(imod, 0))) then + res@gsnLeftString = "mean = " + sprintf("%6.3f", gavg(imod, 0)) else - data1@res_gsnLeftString = "" + res@gsnLeftString = "" end if if (imod.eq.ref_ind) then ; remove corr. string for reference dataset - data1@res_gsnRightString = "" + res@gsnRightString = "" end if - maps(imod, 0) = contour_map(wks0, data1, var0) + maps(imod, 0) = gsn_csm_contour_map(wks0, data1, res) end if ; mandatory netcdf output @@ -580,41 +776,66 @@ begin ; Create difference plots (if requested) ; ======================================================================= - if (flag_diff .and. (imod .ne. ref_ind)) then + if (flag_diff) then - diff = data1 - if (flag_rel_diff) then - diff = (diff - ref_data) / ref_data * 100.0 - diff = where(ref_data .le. rel_diff_min, diff@_FillValue, diff) - else - diff = diff - ref_data - end if + dres = True - diff@res_gsnLeftString = "" - diff@res_gsnRightString = "" + if (imod .ne. ref_ind) then + diff = data1 + if (flag_rel_diff) then + diff = (diff - ref_data) / ref_data * 100.0 + diff = where(ref_data .le. rel_diff_min, diff@_FillValue, diff) + diff@units = "%" + else + diff = diff - ref_data + end if - rmsd = rmsd@_FillValue - bias = bias@_FillValue + dres@gsnLeftString = "" + dres@gsnRightString = "" + dres@gsnCenterString = "" + dres@mpPerimOn = perim ; draw line around map + dres@gsnStringFontHeightF = 0.02 - if (numseas.gt.1) then - do is = 0, numseas - 1 + dres@tiMainString = names(imod) + " - " + refname + years_str + dres@tiMainFontHeightF = 0.025 + + rmsd(imod, :) = rmsd@_FillValue + bias(imod, :) = bias@_FillValue + + if (numseas.gt.1) then + do is = 0, numseas - 1 + if (.not. flag_rel_diff) then + if (same_grid) then + rmsd(imod, is) = calculate_metric(ref_data(is, :, :), \ + data1(is, :, :), "RMSD") + end if + bias(imod, is) = area_operations(diff(is, :, :), -90., 90., \ + 0., 360., "average", True) + end if + end do + else if (.not. flag_rel_diff) then if (same_grid) then - rmsd(is) = calculate_metric(ref_data(is, :, :), \ - data1(is, :, :), "RMSD") + rmsd(imod, 0) = calculate_metric(ref_data, data1, "RMSD") end if - bias(is) = area_operations(diff(is, :, :), -90., 90., 0., 360., \ - "average", True) + bias(imod, 0) = area_operations(diff, -90., 90., 0., 360., \ + "average", True) end if - end do - else - if (.not. flag_rel_diff) then - if (same_grid) then - rmsd(0) = calculate_metric(ref_data, data1, "RMSD") - end if - bias(0) = area_operations(diff, -90., 90., 0., 360., "average", \ - True) end if + else if (flag_multiobs_unc) then + diff = ref_std + if (.not.isatt(diff, "diag_script")) then + diff@diag_script = (/DIAG_SCRIPT/) + end if + dres@gsnLeftString = "" + dres@gsnRightString = "" + dres@gsnCenterString = "" + dres@tiMainString = refname + " uncertainty" + years_str + rmsd(imod, :) = rmsd@_FillValue + bias(imod, :) = bias@_FillValue + else + continue + end if end if ; ---------------------------------------------------------------------- @@ -623,25 +844,23 @@ begin ; # plot ressources # ; ########################################### - diff@res_gsnLeftStringFontHeightF = min((/0.025, 0.015 * 6.0 \ - / tofloat((dim_MOD + 1) / 2)/)) - diff@res_gsnRightStringFontHeightF = min((/0.025, 0.015 * 6.0 \ - / tofloat((dim_MOD + 1) / 2)/)) + dres@cnFillOn = True ; color plot desired + dres@cnLineLabelsOn = False ; contour lines + dres@cnLinesOn = False - diff@res_tiMainOn = False - - diff@res_cnFillOn = True ; color plot desired - diff@res_cnLineLabelsOn = False ; contour lines - diff@res_cnLinesOn = False + dres@lbTitleString = diff@units + dres@lbTitlePosition = "Bottom" + dres@lbTitleFontHeightF = 0.02 + dres@lbLabelFontHeightF = 0.02 ; colors ; http://www.ncl.ucar.edu/Document/Graphics/color_table_gallery.shtml ; annotation - diff@res_cnLevelSelectionMode = "ExplicitLevels" - diff@res_mpOutlineOn = True - diff@res_mpFillOn = False + dres@cnLevelSelectionMode = "ExplicitLevels" + dres@mpOutlineOn = True + dres@mpFillOn = False ; variable specific plotting settings @@ -649,64 +868,65 @@ begin if (.not.isvar("cnLevels")) then - if (isatt(diff, "res_cnLevels")) then - delete(diff@res_cnLevels) + if (isatt(dres, "cnLevels")) then + delete(dres@cnLevels) end if - if (isatt(diff, "res_cnFillColors")) then - delete(diff@res_cnFillColors) + if (isatt(dres, "cnFillColors")) then + delete(dres@cnFillColors) end if if (isvar("pal")) then delete(pal) end if if (var0.eq."pr") then - diff@res_cnLevels = ispan(-30, 30, 5) * 0.1 + dres@cnLevels = ispan(-30, 30, 5) * 0.1 pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + "ipcc-precip-delta.rgb") - diff@res_cnFillColors = pal - diff@res_lbOrientation = "horizontal" + dres@cnFillColors = pal + dres@lbOrientation = "horizontal" end if if ((var0.eq."tas") .or. (var0.eq."ts")) then pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + "ipcc-tas-delta.rgb") - diff@res_cnFillPalette = pal + dres@cnFillPalette = pal if (var0.eq."ts") then - diff@res_cnLevels = ispan(-5, 5, 1) * 0.5 + dres@cnLevels = ispan(-5, 5, 1) * 0.5 end if end if if (var0.eq."lwp") then - diff@res_cnLevels = ispan(-50, 50, 10) * 0.001 - diff@res_mpOutlineOn = False - diff@res_mpFillOn = True - diff@res_mpLandFillColor = "Black" + dres@cnLevels = ispan(-45, 45, 5) * 0.001 pal = read_colormap_file("$diag_scripts/shared/plot/rgb/qcm3.rgb") - diff@res_cnFillColors = pal + dres@cnFillColors = pal end if if (var0.eq."clt") then - diff@res_cnLevels = fspan(-25, 25, 11) + dres@cnLevels = fspan(-25, 25, 11) end if if (var0.eq."clivi") then - diff@res_cnLevels = ispan(-70, 70, 10) * 0.001 + dres@cnLevels = ispan(-70, 70, 10) * 0.001 end if if (var0.eq."clwvi") then - diff@res_cnLevels = ispan(-50, 50, 10) * 0.001 + dres@cnLevels = ispan(-50, 50, 10) * 0.001 end if if (var0.eq."swcre") then - data1@res_cnLevels = ispan(-30, 30, 5) + dres@cnLevels = ispan(-30, 30, 5) end if if (var0.eq."lwcre") then - data1@res_cnLevels = ispan(-30, 30, 5) + dres@cnLevels = ispan(-30, 30, 5) end if if (var0.eq."netcre") then - data1@res_cnLevels = ispan(-30, 30, 5) + dres@cnLevels = ispan(-30, 30, 5) + end if + + if (var0.eq."prw") then + dres@cnLevels = ispan(-14, 14, 2) end if ; ****************************************************** @@ -714,86 +934,86 @@ begin ; ****************************************************** if (flag_rel_diff) then - if (isatt(diff, "res_cnLevels")) then - delete(diff@res_cnLevels) + if (isatt(dres, "cnLevels")) then + delete(dres@cnLevels) end if - if (isatt(diff, "res_cnFillColors")) then - delete(diff@res_cnFillColors) + if (isatt(dres, "cnFillColors")) then + delete(dres@cnFillColors) end if - diff@res_cnLevels = fspan(-100, 100, 21) + dres@cnLevels = fspan(-100, 100, 21) if (isvar("pal")) then delete(pal) end if pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + "percent100.rgb") - diff@res_cnFillColors = pal + dres@cnFillColors = pal end if ; ****************************************************** - if (.not. isatt(diff, "res_cnLevels")) then + if (.not. isatt(dres, "cnLevels")) then log_info(DIAG_SCRIPT + " (var: " + var0 + "):") log_info("info: using default contour levels") - diff@res_cnLevels = fspan(min(diff), max(diff), 20) + dres@cnLevels = fspan(min(diff), max(diff), 20) end if - cnLevels = diff@res_cnLevels - if (isatt(diff, "res_cnFillColors")) then - cnFillColors = diff@res_cnFillColors + cnLevels = dres@cnLevels + if (isatt(dres, "cnFillColors")) then + cnFillColors = dres@cnFillColors end if else ; use previously defined colors and contour intervals - if (isatt(diff, "res_cnLevels")) then - delete(diff@res_cnLevels) + if (isatt(dres, "cnLevels")) then + delete(dres@cnLevels) end if - if (isatt(diff, "res_cnFillColors")) then - delete(diff@res_cnFillColors) + if (isatt(dres, "cnFillColors")) then + delete(dres@cnFillColors) end if - diff@res_cnLevels = cnLevels + dres@cnLevels = cnLevels if (isvar("cnFillColors")) then - diff@res_cnFillColors = cnFillColors + dres@cnFillColors = cnFillColors end if end if ; if .not.isvar("cnLevels") - if (imod.eq.ref_ind) then - diff@res_lbLabelBarOn = True - else - diff@res_lbLabelBarOn = False - end if +; if (imod.eq.ref_ind) then +; dres@lbLabelBarOn = True +; else +; dres@lbLabelBarOn = False +; end if ; map attributes - diff@res_mpFillDrawOrder = "PostDraw" ; draw map last - diff@res_cnMissingValFillColor = "Gray" + dres@mpFillDrawOrder = "PostDraw" ; draw map last + dres@cnMissingValFillColor = "Gray" ; no tickmarks and no labels - diff@res_tmYLLabelsOn = False - diff@res_tmYLOn = False - diff@res_tmYRLabelsOn = False - diff@res_tmYROn = False - diff@res_tmXBLabelsOn = False - diff@res_tmXBOn = False - diff@res_tmXTLabelsOn = False - diff@res_tmXTOn = False - diff@res_cnInfoLabelOn = False ; turn off cn info label + dres@tmYLLabelsOn = False + dres@tmYLOn = False + dres@tmYRLabelsOn = False + dres@tmYROn = False + dres@tmXBLabelsOn = False + dres@tmXBOn = False + dres@tmXTLabelsOn = False + dres@tmXTOn = False + dres@cnInfoLabelOn = False ; turn off cn info label ; specified in namelist - diff@res_mpProjection = projection + dres@mpProjection = projection ; set explicit contour levels if (isatt(diag_script_info, "explicit_cn_levels")) then - diff@res_cnLevelSelectionMode = "ExplicitLevels" - if (isatt(diff, "res_cnLevels")) then - delete(diff@res_cnLevels) + dres@cnLevelSelectionMode = "ExplicitLevels" + if (isatt(dres, "cnLevels")) then + delete(dres@cnLevels) end if - diff@res_cnLevels = diag_script_info@explicit_cn_levels + dres@cnLevels = diag_script_info@explicit_cn_levels end if ; ########################################### @@ -802,62 +1022,133 @@ begin ; add to diff as attributes without prefix if (isatt(variable_info, "long_name")) then - diff@var_long_name = variable_info@long_name + diff@long_name = variable_info@long_name end if if (isatt(variable_info, "units")) then - diff@var_units = variable_info@units + diff@units = variable_info@units else - diff@var_units = "" + diff@units = "" end if ; ########################################### ; # create the plot # ; ########################################### - diff@res_gsnDraw = False ; do not draw yet - diff@res_gsnFrame = False ; don't advance frame + if (flag_multiobs_unc) then + dres@gsnDraw = False ; do not draw yet + dres@gsnFrame = False ; don't advance frame + end if ; ---------------------------------------------------------------------- if (numseas.gt.1) then do is = 0, numseas - 1 - if (.not.ismissing(rmsd(is))) then - diff@res_gsnRightString = "rmsd = " + sprintf("%6.3f", rmsd(is)) + if (.not.ismissing(rmsd(imod, is))) then + dres@gsnRightString = "rmsd = " + sprintf("%6.3f", rmsd(imod, is)) + else + dres@gsnRightString = "" + end if + if (.not.ismissing(bias(imod, is))) then + dres@gsnLeftString = "bias = " + sprintf("%6.3f", bias(imod, is)) else - diff@res_gsnRightString = "" + dres@gsnLeftString = "" end if - if (.not.ismissing(bias(is))) then - diff@res_gsnLeftString = "bias = " + sprintf("%6.3f", bias(is)) + if (.not.ismissing(corr(imod, is))) then + dres@gsnCenterString = "corr = " + sprintf("%6.3f", corr(imod, is)) else - diff@res_gsnLeftString = "" + dres@gsnCenterString = "" end if if (is.eq.0) then - maps_d(imod, is) = contour_map(wks0d, diff(is, :, :), var0) + maps_d(imod, is) = gsn_csm_contour_map(wks0d, diff(is, :, :), dres) end if if (is.eq.1) then - maps_d(imod, is) = contour_map(wks1d, diff(is, :, :), var0) + maps_d(imod, is) = gsn_csm_contour_map(wks1d, diff(is, :, :), dres) end if if (is.eq.2) then - maps_d(imod, is) = contour_map(wks2d, diff(is, :, :), var0) + maps_d(imod, is) = gsn_csm_contour_map(wks2d, diff(is, :, :), dres) end if if (is.eq.3) then - maps_d(imod, is) = contour_map(wks3d, diff(is, :, :), var0) + maps_d(imod, is) = gsn_csm_contour_map(wks3d, diff(is, :, :), dres) end if end do else - if (.not.ismissing(rmsd(0))) then - diff@res_gsnRightString = "rmsd = " + sprintf("%6.3f", rmsd(0)) + if (.not.ismissing(rmsd(imod, 0))) then + dres@gsnRightString = "rmsd = " + sprintf("%6.3f", rmsd(imod, 0)) else - diff@res_gsnRightString = "" + dres@gsnRightString = "" end if - if (.not.ismissing(bias(0))) then - diff@res_gsnLeftString = "bias = " + sprintf("%6.3f", bias(0)) + if (.not.ismissing(bias(imod, 0))) then + dres@gsnLeftString = "bias = " + sprintf("%6.3f", bias(imod, 0)) else - diff@res_gsnLeftString = "" + dres@gsnLeftString = "" end if - maps_d(imod, 0) = contour_map(wks0d, diff, var0) - end if + if (.not.ismissing(corr(imod, 0))) then + dres@gsnCenterString = "corr = " + sprintf("%6.3f", corr(imod, 0)) + else + dres@gsnCenterString = "" + end if + + maps_d(imod, 0) = gsn_csm_contour_map(wks0d, diff, dres) + + ; obs uncertainty + + if (flag_multiobs_unc) then + xmask = where(abs(diff) .gt. ref_std, 1.0, 0.0) + copy_VarMeta(diff, xmask) + +; if (ii .eq. 0) then +; system("rm debug.nc") +; debugfile = addfile("debug.nc", "c") +; end if +; xm = "xmask" + names(imod) +; debugfile->$xm$ = xmask +; debugfile->obs_std = ref_std +; dn = "diff" + names(imod) +; debugfile->$dn$ = diff + + xres = True + xres@gsnDraw = False ; do not draw yet + xres@gsnFrame = False ; don't advance frame + xres@cnMissingValFillColor = -1 + xres@cnLevelSelectionMode = "ExplicitLevels" + xres@cnLevels = 0.5 + xres@cnFillColors = (/"black", "transparent"/) + xres@cnFillPattern = 17 ; 10 = hatching, 17 = stippling + xres@cnFillOn = True ; color plot desired + xres@cnInfoLabelOn = False + xres@cnLinesOn = False + xres@cnLineLabelsOn = False + xres@lbLabelBarOn = False + xres@gsnRightString = "" + xres@gsnLeftString = "" + xres@gsnCenterString = "" + xres@gsnCenterString = "" + xres@tiYAxisOn = False + xres@tmXBBorderOn = False + xres@tmXTBorderOn = False + xres@tmYLBorderOn = False + xres@tmYRBorderOn = False + xres@tmXBLabelsOn = False + xres@tmYLLabelsOn = False + xres@tmXBOn = False + xres@tmXTOn = False + xres@tmYLOn = False + xres@tmYROn = False + xres@cnConstFEnableFill = True + + if (imod .ne. ref_ind) then + plotmask = gsn_csm_contour(wks0d, xmask, xres) + overlay(maps_d(imod, 0), plotmask) + delete(plotmask) + end if + delete(xmask) + draw(maps_d(imod, 0)) + frame(wks0d) + end if ; if flag_multiobs_unc + end if ; if numseas .gt. 1 + + ndframe = ndframe + 1 ; mandatory netcdf output @@ -870,123 +1161,275 @@ begin end do ; ii-loop (models) - if (write_plots) then + ; save default color map in case it is needed later for optionally + ; plotting color bar to a separate file + + tmp_colors = gsn_retrieve_colormap(wks0) + cdims = dimsizes(tmp_colors) + nboxes = dimsizes(res@cnLevels) + clen = cdims(0) + stride = max((/1, ((clen(0)-1) - 2) / nboxes /)) + fill_colors = ispan(2, clen(0) - 1, stride) + mean_colors = tmp_colors(fill_colors, :) + delete(tmp_colors) + delete(fill_colors) + delete(cdims) + + ; sort plots if needed (observations go first) + + plottmp = ispan(0, dim_MOD - 1, 1) + plotind = plottmp + + ; move plots of observational datasets (if present) into the first line(s) + ; of the panel plot + + j = 0 + do i = 0, dimsizes(plottmp) - 1 + if (i.eq.ref_ind) then + plotind(j) = plottmp(i) + j = j + 1 + else if (plottmp(i) .lt. dimsizes(projects)) then + if (isStrSubset(str_lower(projects(plottmp(i))), "obs")) then + plotind(j) = plottmp(i) + j = j + 1 + end if + end if + end if + end do + + do i = 0, dimsizes(plottmp) - 1 + if ((isStrSubset(str_lower(projects(plottmp(i))), \ + "obs")).or.(i.eq.ref_ind)) then + else + plotind(j) = plottmp(i) + j = j + 1 + end if + end do + + pres = True ; needed to override + ; panelling defaults + if (panel_labels) then + ; print dataset name on each panel + pres@gsnPanelFigureStrings = names(plotind) + end if + pres@gsnPanelFigureStringsFontHeightF = min((/0.01, 0.01 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + pres@lbLabelFontHeightF = min((/0.015, 0.01 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + pres@lbAutoManage = False + pres@lbTopMarginF = 0.1 + pres@lbPerimOn = False ; draw line around label + ; bar area + pres@gsnPanelCenter = False + if (dim_MOD.le.8) then + pres@pmLabelBarOrthogonalPosF = -0.03 + else + pres@pmLabelBarOrthogonalPosF = -0.01 ; shift label bar a bit to + ; the bottom + end if + + if (embracesetup) then + if (numseas.gt.1) then + pres@txString = season(0) + outfile(0) = panelling(wks0, maps(plotind, 0), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(1) + outfile(1) = panelling(wks1, maps(plotind, 1), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(2) + outfile(2) = panelling(wks2, maps(plotind, 2), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(3) + outfile(3) = panelling(wks3, maps(plotind, 3), (dim_MOD + 3) / 4, \ + 4, pres) + log_info(" Wrote " + outfile) + else + pres@gsnPanelRowSpec = True ; tell panel what order to plt + pres@gsnPanelYWhiteSpacePercent = 5 + pres@gsnPanelXWhiteSpacePercent = 5 + if (isatt(diag_script_info, "PanelTop")) then + top = tofloat(diag_script_info@PanelTop) + else + top = 0.99 ; default + end if + pres@gsnPanelTop = top + + if (isvar("plotsperline")) then + delete(plotsperline) + end if + + plotsperline = new((dim_MOD + 1) / 2, integer) + plotsperline = 2 + + if ((isStrSubset(str_lower(projects(plotind(0))), "obs")).and. \ + .not.(isStrSubset(str_lower(projects(plotind(1))), "obs"))) then + plotsperline(0) = 1 + end if + + if (sum(plotsperline).gt.dimsizes(plotind)) then + plotsperline(dimsizes(plotsperline) - 1) = 1 + end if + + if (sum(plotsperline).lt.dimsizes(plotind)) then + xadd = 1 + xtmp = array_append_record(plotsperline, xadd, 0) + delete(plotsperline) + plotsperline = xtmp + delete(xtmp) + end if + + gsn_panel(wks0, maps(plotind, 0), plotsperline, pres) + outfile(0) = wks0@fullname + end if + else ; if embracesetup + if (numseas.gt.1) then + pres@txString = season(0) + outfile(0) = panelling(wks0, maps(plotind, 0), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(1) + outfile(1) = panelling(wks1, maps(plotind, 1), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(2) + outfile(2) = panelling(wks2, maps(plotind, 2), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(3) + outfile(3) = panelling(wks3, maps(plotind, 3), (dim_MOD + 3) / 4, \ + 4, pres) + else + outfile(0) = panelling(wks0, maps(plotind, 0), (dim_MOD + 3) / 4, \ + 4, pres) + end if + end if ; if embracesetup + + do is = 0, numseas - 1 + log_info("Wrote " + outfile(is)) + end do + + ; ------------------------------------------------------------------------ + ; write provenance to netcdf output and plot file(s) (mean) + ; ------------------------------------------------------------------------ + + statistics = (/"clim", "mean"/) + if (isatt(diag_script_info, "region")) then + domain = "reg" + else + domain = "global" + end if + plottype = "geo" + + do is = 0, numseas - 1 + caption = "Mean values for variable " + var0 \ + + " (" + allseas + ")." + log_provenance(nc_outfile_mean, outfile(is), caption, statistics, \ + domain, plottype, "", "", climofiles) + end do + + ; ======================================================================== + + if (flag_diff) then +; pres@lbTitleString = "~F33~D~F21~" + diff@long_name + " (" + \ +; diff@units + ")" + ; save default color map in case it is needed later for optionally ; plotting color bar to a separate file - tmp_colors = gsn_retrieve_colormap(wks0) + if (isvar("nboxes")) then + delete(nboxes) + end if + + tmp_colors = gsn_retrieve_colormap(wks0d) cdims = dimsizes(tmp_colors) - nboxes = dimsizes(data1@res_cnLevels) + nboxes = dimsizes(dres@cnLevels) clen = cdims(0) stride = max((/1, ((clen(0)-1) - 2) / nboxes /)) fill_colors = ispan(2, clen(0) - 1, stride) - mean_colors = tmp_colors(fill_colors, :) + diff_colors = tmp_colors(fill_colors, :) delete(tmp_colors) delete(fill_colors) delete(cdims) - ; sort plots if needed (observations go first) + if (isvar("plottmp")) then + delete(plottmp) + end if + + if (isvar("plotind")) then + delete(plotind) + end if - plottmp = ispan(0, dim_MOD - 1, 1) + plottmp = ind(ispan(0, dim_MOD - 1, 1).ne.ref_ind) plotind = plottmp - ; move plots of observational datasets (if present) into the first line(s) - ; of the panel plot + ; if there is a second observational dataset, move the corresponding + ; plot to the first line of the panel plot j = 0 do i = 0, dimsizes(plottmp) - 1 - if (i.eq.ref_ind) then + if (isStrSubset(str_lower(projects(plottmp(i))), "obs")) then plotind(j) = plottmp(i) j = j + 1 - else if (plottmp(i) .lt. dimsizes(projects)) then - if (isStrSubset(str_lower(projects(plottmp(i))), \ - "obs")) then - plotind(j) = plottmp(i) - j = j + 1 - end if - end if end if end do - do i = 0, dimsizes(plottmp) - 1 - if ((isStrSubset(str_lower(projects(plottmp(i))), \ - "obs")).or.(i.eq.ref_ind)) then + if (isStrSubset(str_lower(projects(plottmp(i))), "obs")) then else plotind(j) = plottmp(i) j = j + 1 end if end do - pres = True ; needed to override - ; panelling defaults - pres@gsnPanelLabelBar = True ; add common colorbar + if (isatt(pres, "gsnPanelFigureStrings")) then + delete(pres@gsnPanelFigureStrings) + end if if (panel_labels) then - ; print dataset name on each panel pres@gsnPanelFigureStrings = names(plotind) end if - pres@gsnPanelFigureStringsFontHeightF = min((/0.01, 0.01 * 6.0 \ - / tofloat((dim_MOD + 1) / 2)/)) - pres@lbLabelFontHeightF = min((/0.015, 0.01 * 6.0 \ - / tofloat((dim_MOD + 1) / 2)/)) - pres@lbAutoManage = False - pres@lbTopMarginF = 0.1 - pres@lbTitleOn = True - pres@lbTitleFontHeightF = min((/0.015, 0.01 * 6.0 \ - / tofloat((dim_MOD + 1) / 2)/)) - pres@lbTitlePosition = "Bottom" - pres@lbTitleString = data1@long_name + " (" \ - + data1@units + ")" - pres@lbPerimOn = False ; draw line around label - ; bar area - pres@gsnPanelCenter = False - if (dim_MOD.le.8) then - pres@pmLabelBarOrthogonalPosF = -0.03 - else - pres@pmLabelBarOrthogonalPosF = -0.01 ; shift label bar a bit to - ; the bottom + + if (dimsizes(plotind).eq.1) then + pres@gsnPanelRight = 0.5 end if if (embracesetup) then if (numseas.gt.1) then pres@txString = season(0) - outfile(0) = panelling(wks0, maps(plotind, 0), (dim_MOD + 3) / 4, \ - 4, pres) + outfile_d(0) = panelling(wks0d, maps_d(plotind, 0), \ + (dim_MOD + 3) / 4, 4, pres) pres@txString = season(1) - outfile(1) = panelling(wks1, maps(plotind, 1), (dim_MOD + 3) / 4, \ - 4, pres) + outfile_d(1) = panelling(wks1d, maps_d(plotind, 1), \ + (dim_MOD + 3) / 4, 4, pres) pres@txString = season(2) - outfile(2) = panelling(wks2, maps(plotind, 2), (dim_MOD + 3) / 4, \ - 4, pres) + outfile_d(2) = panelling(wks2d, maps_d(plotind, 2), \ + (dim_MOD + 3) / 4, 4, pres) pres@txString = season(3) - outfile(3) = panelling(wks3, maps(plotind, 3), (dim_MOD + 3) / 4, \ - 4, pres) - log_info(" Wrote " + outfile) + outfile_d(3) = panelling(wks3d, maps_d(plotind, 3), \ + (dim_MOD + 3) / 4, 4, pres) else - pres@gsnPanelRowSpec = True ; tell panel what order to plt + pres@gsnPanelRowSpec = True ; tell panel what order to plt pres@gsnPanelYWhiteSpacePercent = 5 pres@gsnPanelXWhiteSpacePercent = 5 - if (isatt(diag_script_info, "PanelTop")) then - top = tofloat(diag_script_info@PanelTop) - else - top = 0.99 ; default - end if - pres@gsnPanelTop = top + pres@gsnPanelTop = tofloat(diag_script_info@PanelTop) if (isvar("plotsperline")) then delete(plotsperline) end if - plotsperline = new((dim_MOD + 1) / 2, integer) + plotsperline = new(max((/1, dim_MOD / 2/)), integer) plotsperline = 2 - if ((isStrSubset(str_lower(projects(plotind(0))), \ - "obs")).and. \ - .not.(isStrSubset(str_lower(projects(plotind(1))), \ - "obs"))) then - plotsperline(0) = 1 + if (dimsizes(plotind).gt.1) then + if ((isStrSubset(str_lower(projects(plotind(0))), "obs")).and. \ + .not. \ + (isStrSubset(str_lower(projects(plotind(1))), "obs"))) then + plotsperline(0) = 1 + end if end if if (sum(plotsperline).gt.dimsizes(plotind)) then @@ -1001,239 +1444,113 @@ begin delete(xtmp) end if - gsn_panel(wks0, maps(plotind, 0), plotsperline, pres) - outfile(0) = wks0@fullname + gsn_panel(wks0d, maps_d(plotind, 0), plotsperline, pres) + outfile_d(0) = wks0d@fullname end if - else ; if embracesetup + else ; embracesetup = False if (numseas.gt.1) then - pres@txString = season(0) - outfile(0) = panelling(wks0, maps(plotind, 0), (dim_MOD + 3) / 4, \ - 4, pres) + pres@txString = season(0) + outfile_d(0) = panelling(wks0d, maps_d(plotind, 0), \ + (dim_MOD + 3) / 4, 4, pres) - pres@txString = season(1) - outfile(1) = panelling(wks1, maps(plotind, 1), (dim_MOD + 3) / 4, \ - 4, pres) + pres@txString = season(1) + outfile_d(1) = panelling(wks1d, maps_d(plotind, 1), \ + (dim_MOD + 3) / 4, 4, pres) - pres@txString = season(2) - outfile(2) = panelling(wks2, maps(plotind, 2), (dim_MOD + 3) / 4, \ - 4, pres) + pres@txString = season(2) + outfile_d(2) = panelling(wks2d, maps_d(plotind, 2), \ + (dim_MOD + 3) / 4, 4, pres) - pres@txString = season(3) - outfile(3) = panelling(wks3, maps(plotind, 3), (dim_MOD + 3) / 4, \ - 4, pres) + pres@txString = season(3) + outfile_d(3) = panelling(wks3d, maps_d(plotind, 3), \ + (dim_MOD + 3) / 4, 4, pres) else - outfile(0) = panelling(wks0, maps(plotind, 0), (dim_MOD + 3) / 4, \ - 4, pres) + outfile_d(0) = panelling(wks0d, maps_d(plotind, 0), \ + (dim_MOD + 3) / 4, 4, pres) end if - end if ; if embracesetup + end if ; end if embracesetup do is = 0, numseas - 1 - log_info("Wrote " + outfile(is)) - end do - - ; ------------------------------------------------------------------------ - ; write provenance to netcdf output and plot file(s) (mean) - ; ------------------------------------------------------------------------ - - statistics = (/"clim", "mean"/) - if (isatt(diag_script_info, "region")) then - domain = ("reg") - else - domain = ("glob") - end if - plottype = ("geo") - - do is = 0, numseas - 1 - caption = "Mean values for variable " + var0 \ - + " (" + allseas + ")." - log_provenance(nc_outfile_mean, outfile(is), caption, statistics, \ - domain, plottype, "", "", climofiles) - end do - - ; ======================================================================== - - if (flag_diff) then - pres@lbTitleString = "~F33~D~F21~" + diff@long_name + " (" + \ - diff@units + ")" - - ; save default color map in case it is needed later for optionally - ; plotting color bar to a separate file + log_info(" Wrote " + outfile(is)) - if (isvar("nboxes")) then - delete(nboxes) - end if - - tmp_colors = gsn_retrieve_colormap(wks0d) - cdims = dimsizes(tmp_colors) - nboxes = dimsizes(diff@res_cnLevels) - clen = cdims(0) - stride = max((/1, ((clen(0)-1) - 2) / nboxes /)) - fill_colors = ispan(2, clen(0) - 1, stride) - diff_colors = tmp_colors(fill_colors, :) - delete(tmp_colors) - delete(fill_colors) - delete(cdims) - - if (isvar("plottmp")) then - delete(plottmp) - end if - - if (isvar("plotind")) then - delete(plotind) - end if - - plottmp = ind(ispan(0, dim_MOD - 1, 1).ne.ref_ind) - plotind = plottmp - - ; if there is a second observational dataset, move the corresponding - ; plot to the first line of the panel plot - - j = 0 - do i = 0, dimsizes(plottmp) - 1 - if (isStrSubset(str_lower(projects(plottmp(i))), "obs")) then - plotind(j) = plottmp(i) - j = j + 1 - end if - end do - do i = 0, dimsizes(plottmp) - 1 - if (isStrSubset(str_lower(projects(plottmp(i))), "obs")) then - else - plotind(j) = plottmp(i) - j = j + 1 - end if - end do + ; -------------------------------------------------------------------- + ; write provenance to netcdf output and plot file(s) (bias) + ; -------------------------------------------------------------------- - if (isatt(pres, "gsnPanelFigureStrings")) then - delete(pres@gsnPanelFigureStrings) - end if - if (panel_labels) then - pres@gsnPanelFigureStrings = names(plotind) - end if - - if (dimsizes(plotind).eq.1) then - pres@gsnPanelRight = 0.5 + statistics = (/"clim", "diff"/) + if (isatt(diag_script_info, "region")) then + domain = "reg" + else + domain = "global" end if + plottype = "geo" - if (embracesetup) then - if (numseas.gt.1) then - pres@txString = season(0) - outfile_d(0) = panelling(wks0d, maps_d(plotind, 0), \ - (dim_MOD + 3) / 4, 4, pres) + ; note: because function log_provenance does not yet support to attach + ; different captions to netcdf (contains all seasons) and plots + ; (contain one season each), the caption cannot specifiy the + ; season plotted; using "annual" or "DJF/MAM/JJA/SON" instead. - pres@txString = season(1) - outfile_d(1) = panelling(wks1d, maps_d(plotind, 1), \ - (dim_MOD + 3) / 4, 4, pres) - - pres@txString = season(2) - outfile_d(2) = panelling(wks2d, maps_d(plotind, 2), \ - (dim_MOD + 3) / 4, 4, pres) - - pres@txString = season(3) - outfile_d(3) = panelling(wks3, maps_d(plotind, 3), \ - (dim_MOD + 3) / 4, 4, pres) - else - pres@gsnPanelRowSpec = True ; tell panel what order to plt - pres@gsnPanelYWhiteSpacePercent = 5 - pres@gsnPanelXWhiteSpacePercent = 5 - pres@gsnPanelTop = tofloat(diag_script_info@PanelTop) - - if (isvar("plotsperline")) then - delete(plotsperline) - end if - - plotsperline = new(max((/1, dim_MOD / 2/)), integer) - plotsperline = 2 - - if (dimsizes(plotind).gt.1) then - if ((isStrSubset(str_lower(projects(plotind(0))), "obs")).and. \ - .not. \ - (isStrSubset(str_lower(projects(plotind(1))), "obs"))) then - plotsperline(0) = 1 - end if - end if - - if (sum(plotsperline).gt.dimsizes(plotind)) then - plotsperline(dimsizes(plotsperline) - 1) = 1 - end if - - if (sum(plotsperline).lt.dimsizes(plotind)) then - xadd = 1 - xtmp = array_append_record(plotsperline, xadd, 0) - delete(plotsperline) - plotsperline = xtmp - delete(xtmp) + caption = "Differences for variable " + var0 \ + + " (" + allseas + "), reference = " + refname + "." + log_provenance(nc_outfile_bias, outfile_d(is), caption, statistics, \ + domain, plottype, "", "", climofiles) + end do + end if ; if flag_diff + + ; optionally save legend(s) to extra file(s) + + if (extralegend) then + nboxes = dimsizes(res@cnLevels) + 1 + wksleg = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_" + var0 \ + + "_legend") + pres@lbMonoFillPattern = True + pres@lbOrientation = "Horizontal" + pres@vpWidthF = 0.7 + pres@vpHeightF = 0.1 + pres@lbLabelFontHeightF = 0.015 + pres@lbLabelAlignment = "InteriorEdges" +; pres@lbTitleFontHeightF = 0.015 +; pres@lbTitleString = data1@long_name + " (" + data1@units + ")" + + labels = tostring(res@cnLevels) + + ; remove trailing zeros from strings + + do i = 0, dimsizes(labels) - 1 + i1 = str_index_of_substr(labels(i), ".", -1) + if (.not.ismissing(i1)) then + tmp = stringtochar(labels(i)) + do j = dimsizes(tmp) - 2, i1, 1 + if ((tmp(j).ne.".").and.(tmp(j).ne."0")) then + break end if + end do + labels(i) = chartostring(tmp(0:j)) + delete(tmp) + end if + end do - gsn_panel(wks0d, maps_d(plotind, 0), plotsperline, pres) - outfile_d(0) = wks0d@fullname - end if - else ; embracesetup = False - if (numseas.gt.1) then - pres@txString = season(0) - outfile_d(0) = panelling(wks0d, maps_d(plotind, 0), \ - (dim_MOD + 3) / 4, 4, pres) - - pres@txString = season(1) - outfile_d(1) = panelling(wks1d, maps_d(plotind, 1), \ - (dim_MOD + 3) / 4, 4, pres) - - pres@txString = season(2) - outfile_d(2) = panelling(wks2d, maps_d(plotind, 2), \ - (dim_MOD + 3) / 4, 4, pres) - - pres@txString = season(3) - outfile_d(3) = panelling(wks3d, maps_d(plotind, 3), \ - (dim_MOD + 3) / 4, 4, pres) - else - outfile_d(0) = panelling(wks0d, maps_d(plotind, 0), \ - (dim_MOD + 3) / 4, 4, pres) - end if - end if ; end if embracesetup - - do is = 0, numseas - 1 - log_info(" Wrote " + outfile(is)) - - ; -------------------------------------------------------------------- - ; write provenance to netcdf output and plot file(s) (bias) - ; -------------------------------------------------------------------- - - statistics = (/"clim", "diff"/) - if (isatt(diag_script_info, "region")) then - domain = ("reg") - else - domain = ("glob") - end if - plottype = ("geo") - - ; note: because function log_provenance does not yet support to attach - ; different captions to netcdf (contains all seasons) and plots - ; (contain one season each), the caption cannot specifiy the - ; season plotted; using "annual" or "DJF/MAM/JJA/SON" instead. - - caption = "Differences for variable " + var0 \ - + " (" + allseas + "), reference = " + refname + "." - log_provenance(nc_outfile_bias, outfile_d(is), caption, statistics, \ - domain, plottype, "", "", climofiles) - end do + if (isatt(res, "cnFillColors")) then + pres@lbFillColors = res@cnFillColors + else if (isatt(res, "cnFillPalette")) then + pres@lbFillColors = res@cnFillPalette + else + pres@lbFillColors = mean_colors ; default colors + end if + end if - end if ; if flag_diff + gsn_labelbar_ndc(wksleg, nboxes, labels, 0.1, 0.9, pres) - ; optionally save legend(s) to extra file(s) + delete(wksleg) + delete(labels) + delete(pres@lbFillColors) - if (extralegend) then - nboxes = dimsizes(data1@res_cnLevels) + 1 + if (flag_diff) then + nboxes = dimsizes(dres@cnLevels) + 1 wksleg = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_" + var0 \ - + "_legend") - pres@lbMonoFillPattern = True - pres@lbOrientation = "Horizontal" - pres@vpWidthF = 0.7 - pres@vpHeightF = 0.1 - pres@lbLabelFontHeightF = 0.015 - pres@lbLabelAlignment = "InteriorEdges" - pres@lbTitleFontHeightF = 0.015 - pres@lbTitleString = data1@long_name + " (" + data1@units + ")" - - labels = tostring(data1@res_cnLevels) + + "_diff_legend") + + labels = tostring(dres@cnLevels) ; remove trailing zeros from strings @@ -1251,64 +1568,142 @@ begin end if end do - if (isatt(data1, "res_cnFillColors")) then - pres@lbFillColors = data1@res_cnFillColors - else if (isatt(data1, "res_cnFillPalette")) then - pres@lbFillColors = data1@res_cnFillPalette +; if (flag_rel_diff) then +; pres@lbTitleString = "~F33~D~F21~" + data1@long_name + " (%)" +; else +; pres@lbTitleString = "~F33~D~F21~" + data1@long_name + " (" + \ +; data1@units + ")" +; end if + + if (isatt(dres, "cnFillColors")) then + pres@lbFillColors = dres@cnFillColors + else if (isatt(dres, "cnFillPalette")) then + pres@lbFillColors = dres@cnFillPalette else - pres@lbFillColors = mean_colors ; default colors + pres@lbFillColors = diff_colors ; default colors end if end if gsn_labelbar_ndc(wksleg, nboxes, labels, 0.1, 0.9, pres) + end if ; if (flag_diff) + end if ; if (extralegend) - delete(wksleg) - delete(labels) - delete(pres@lbFillColors) + ; ========================================================================== - if (flag_diff) then - nboxes = dimsizes(diff@res_cnLevels) + 1 - wksleg = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_" + var0 \ - + "_diff_legend") + ; output some statistics to ASCII files - labels = tostring(diff@res_cnLevels) + statLabel = (/ " mean", " stddev", " min", \ + " lowDec", " lowOct", " lowSex", \ + "lowQuart", " lowTri", " median", \ + " hiTri", " hiQuart", " hiSex", " hiOct", " hiDec", \ + " max", " range", " disper", " RMS", \ + " nTot", " nUse", " nMsg", " %Msg" /) - ; remove trailing zeros from strings + idxmod = get_mod(names, projects) ; model indices w/o MultiModelMean - do i = 0, dimsizes(labels) - 1 - i1 = str_index_of_substr(labels(i), ".", -1) - if (.not.ismissing(i1)) then - tmp = stringtochar(labels(i)) - do j = dimsizes(tmp) - 2, i1, 1 - if ((tmp(j).ne.".").and.(tmp(j).ne."0")) then - break - end if - end do - labels(i) = chartostring(tmp(0:j)) - delete(tmp) - end if - end do + if ((idxmod(0) .ne. -1) .and. (idxobs(0) .ne. -1)) then - if (flag_rel_diff) then - pres@lbTitleString = "~F33~D~F21~" + data1@long_name + " (%)" - else - pres@lbTitleString = "~F33~D~F21~" + data1@long_name + " (" + \ - data1@units + ")" - end if - - if (isatt(diff, "res_cnFillColors")) then - pres@lbFillColors = diff@res_cnFillColors - else if (isatt(diff, "res_cnFillPalette")) then - pres@lbFillColors = diff@res_cnFillPalette - else - pres@lbFillColors = diff_colors ; default colors - end if - end if + do is = 0, numseas - 1 - gsn_labelbar_ndc(wksleg, nboxes, labels, 0.1, 0.9, pres) - end if ; if (flag_diff) - end if ; if (extralegend) - end if ; if write_plots + ; *** mean *** + + ofile = work_dir + var0 + "_mean_" + season(is) + filename_add + ".txt" + statmod = stat_dispersion(gavg(idxmod, is), False) + statobs = stat_dispersion(gavg(idxobs, is), False) + + statselect = (/0, 1, 2, 14, 3, 6, 8, 10, 13/) + + write_table(ofile, "w", [/names + " " + tostring(gavg(:, is))/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/"statistics models:"/], "%s") + write_table(ofile, "a", [/names(idxmod)/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/statLabel(statselect) + " " + \ + tostring(statmod(statselect))/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/"statistics observations:"/], "%s") + write_table(ofile, "a", [/names(idxobs)/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/statLabel(statselect) + " " + \ + tostring(statobs(statselect))/], "%s") + + delete(statselect) + + ; *** bias *** + + ofile = work_dir + var0 + "_bias_" + season(is) + filename_add + ".txt" + statmod = stat_dispersion(bias(idxmod, is), False) + statobs = stat_dispersion(bias(idxobs, is), False) + + statselect = (/0, 1, 2, 14, 3, 6, 8, 10, 13/) + + write_table(ofile, "w", [/names + " " + tostring(bias(:, is))/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/"statistics models:"/], "%s") + write_table(ofile, "a", [/names(idxmod)/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/statLabel(statselect) + " " + \ + tostring(statmod(statselect))/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/"statistics observations:"/], "%s") + write_table(ofile, "a", [/names(idxobs)/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/statLabel(statselect) + " " + \ + tostring(statobs(statselect))/], "%s") + + delete(statselect) + + ; *** correlation *** + + ofile = work_dir + var0 + "_corr_" + season(is) + filename_add + ".txt" + statmod = stat_dispersion(corr(idxmod, is), False) + statobs = stat_dispersion(corr(idxobs, is), False) + + statselect = (/2, 14, 3, 6, 8, 10, 13/) + + write_table(ofile, "w", [/names + " " + tostring(corr(:, is))/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/"statistics models:"/], "%s") + write_table(ofile, "a", [/names(idxmod)/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/statLabel(statselect) + " " + \ + tostring(statmod(statselect))/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/"statistics observations:"/], "%s") + write_table(ofile, "a", [/names(idxobs)/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/statLabel(statselect) + " " + \ + tostring(statobs(statselect))/], "%s") + + delete(statselect) + + ; *** rmsd *** + + ofile = work_dir + var0 + "_rmsd_" + season(is) + filename_add + ".txt" + statmod = stat_dispersion(rmsd(idxmod, is), False) + statobs = stat_dispersion(rmsd(idxobs, is), False) + + statselect = (/2, 14, 3, 6, 8, 10, 13/) + + write_table(ofile, "w", [/names + " " + tostring(rmsd(:, is))/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/"statistics models:"/], "%s") + write_table(ofile, "a", [/names(idxmod)/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/statLabel(statselect) + " " + \ + tostring(statmod(statselect))/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/"statistics observations:"/], "%s") + write_table(ofile, "a", [/names(idxobs)/], "%s") + write_table(ofile, "a", [/"--------------------------------"/], "%s") + write_table(ofile, "a", [/statLabel(statselect) + " " + \ + tostring(statobs(statselect))/], "%s") + + delete(statselect) + + end do ; loop over seasons + + end if ; if there are models and observations ; ========================================================================== diff --git a/esmvaltool/diag_scripts/clouds/clouds_bias.ncl b/esmvaltool/diag_scripts/clouds/clouds_bias.ncl index 7920fea535..5eeaaaded9 100644 --- a/esmvaltool/diag_scripts/clouds/clouds_bias.ncl +++ b/esmvaltool/diag_scripts/clouds/clouds_bias.ncl @@ -4,7 +4,7 @@ ; PROJECT-NAME EMBRACE ; ############################################################################ ; Description -; Calculates the multi-model mean bias, absolute difference and relative +; Calculates the (multi-model mean) bias, absolute difference and relative ; difference of annual mean 2-d cloud variables compared with a ; reference dataset (observations). ; @@ -28,15 +28,17 @@ ; none ; ; Modification history -; 20190222-A_laue_ax: added output of provenance (v2.0) -; 20181119-A_laue_ax: adapted code to multi-variable capable framework -; 20180923-A_laue_ax: added writing of results to netcdf -; 20180914-A_laue_ax: code rewritten for ESMValTool v2.0 -; 20170620-A_laue_ax: added tags for reporting -; 20160901-A_laue_ax: added regridding option 1 deg x 1 deg -; 20151027-A_laue_ax: moved call to 'write_references' to the beginning -; of the code -; 20150428-A-laue_ax: written. +; 20230118-lauer_axel: added support to plot just 1 model +; 20211006-lauer_axel: removed write_plots +; 20190222-lauer_axel: added output of provenance (v2.0) +; 20181119-lauer_axel: adapted code to multi-variable capable framework +; 20180923-lauer_axel: added writing of results to netcdf +; 20180914-lauer_axel: code rewritten for ESMValTool v2.0 +; 20170620-lauer_axel: added tags for reporting +; 20160901-lauer_axel: added regridding option 1 deg x 1 deg +; 20151027-lauer_axel: moved call to 'write_references' to the beginning +; of the code +; 20150428-lauer_axel: written. ; ; ############################################################################ @@ -96,12 +98,6 @@ begin ; Create work dir system("mkdir -p " + work_dir) - if (config_user_info@write_plots.eq."True") then - write_plots = True - else - write_plots = False - end if - end begin @@ -131,7 +127,15 @@ begin mm_ind = ind(names .eq. "MultiModelMean") if (ismissing(mm_ind)) then - error_msg("f", DIAG_SCRIPT, "", "multi-model mean is missing (required)") + mod_ind = ind(names .ne. ref_ind) + if (all(ismissing(mod_ind))) then + error_msg("f", DIAG_SCRIPT, "", "no dataset besides reference " \ + + "dataset found. Cannot continue.") + end if + mm_ind = mod_ind(0) + log_info("multi-model mean is missing, using first dataset (" \ + + names(mm_ind) + ") instead") + delete(mod_ind) end if ; basename of diag_script @@ -238,7 +242,7 @@ begin diff@res_tiMainFontHeightF = 0.016 - diff@res_tiMainString = "Multi Model Mean Bias" + diff@res_tiMainString = names(mm_ind) + " Bias" copy_VarMeta(diff, mmdata) delete(mmdata@res_cnLevels) @@ -277,7 +281,7 @@ begin end if end if - mmdata@res_tiMainString = "Multi Model Mean" + mmdata@res_tiMainString = names(mm_ind) plotsperline = (/2, 0/) plotind = (/0, 1/) ; mmm and mean bias are always plotted @@ -303,7 +307,7 @@ begin absdiff@res_cnLevels = tmp(1:dimsizes(tmp)-1) delete(tmp) - absdiff@res_tiMainString = "Multi Model Mean of Absolute Error" + absdiff@res_tiMainString = names(mm_ind) + " Absolute Error" iadd = 2 itmp = array_append_record(plotind, iadd, 0) @@ -324,7 +328,7 @@ begin copy_VarMeta(diff, reldiff) delete(reldiff@res_cnLevels) reldiff@res_cnLevels = fspan(-90.0, 90.0, 13) - reldiff@res_tiMainString = "Multi Model Mean of Relative Error" + reldiff@res_tiMainString = names(mm_ind) + " Relative Error" reldiff@units = "%" reldiff@res_lbTitleString = "(" + reldiff@units + ")" if (isvar("pal4")) then @@ -351,56 +355,54 @@ begin plotfile = new(numseas, string) plotfile(:) = "" - if (write_plots) then - do is = 0, numseas - 1 - ; -------------------------------------------------------------------- - ; create workspace + do is = 0, numseas - 1 + ; -------------------------------------------------------------------- + ; create workspace - if (isvar("wks")) then - delete(wks) - end if + if (isvar("wks")) then + delete(wks) + end if - wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_bias_" + var0 \ - + "_" + season(is)) - - plotfile(is) = wks@fullname - - if (numseas.gt.1) then - pres@txString = season(is) - plots(0, is) = contour_map(wks, mmdata(is, :, :), var0) - plots(1, is) = contour_map(wks, diff(is, :, :), var0) - if (plot_abs_diff) then - plots(2, is) = contour_map(wks, absdiff(is, :, :), var0) - end if - if (plot_rel_diff) then - plots(3, is) = contour_map(wks, reldiff(is, :, :), var0) - end if - gsn_panel(wks, plots(plotind, is), plotsperline, pres) - else - plots(0, 0) = contour_map(wks, mmdata, var0) - plots(1, 0) = contour_map(wks, diff, var0) - if (plot_abs_diff) then - plots(2, 0) = contour_map(wks, absdiff, var0) - end if - if (plot_rel_diff) then - plots(3, 0) = contour_map(wks, reldiff, var0) - end if - gsn_panel(wks, plots(plotind, 0), plotsperline, pres) - end if + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_bias_" + var0 \ + + "_" + season(is)) - ; add meta data to plot (for reporting) + plotfile(is) = wks@fullname - caption = "Multi model values, from top left to bottom right: " \ - + "mean, bias" + if (numseas.gt.1) then + pres@txString = season(is) + plots(0, is) = contour_map(wks, mmdata(is, :, :), var0) + plots(1, is) = contour_map(wks, diff(is, :, :), var0) if (plot_abs_diff) then - caption = caption + ", absolute error" + plots(2, is) = contour_map(wks, absdiff(is, :, :), var0) end if if (plot_rel_diff) then - caption = caption + ", relative error" + plots(3, is) = contour_map(wks, reldiff(is, :, :), var0) end if + gsn_panel(wks, plots(plotind, is), plotsperline, pres) + else + plots(0, 0) = contour_map(wks, mmdata, var0) + plots(1, 0) = contour_map(wks, diff, var0) + if (plot_abs_diff) then + plots(2, 0) = contour_map(wks, absdiff, var0) + end if + if (plot_rel_diff) then + plots(3, 0) = contour_map(wks, reldiff, var0) + end if + gsn_panel(wks, plots(plotind, 0), plotsperline, pres) + end if + + ; add meta data to plot (for reporting) + + caption = names(mm_ind) + " values, from top left to bottom right: " \ + + "mean, bias" + if (plot_abs_diff) then + caption = caption + ", absolute error" + end if + if (plot_rel_diff) then + caption = caption + ", relative error" + end if - end do ; is-loop (seasons) - end if ; if write_plots + end do ; is-loop (seasons) ; ########################################### ; # output to netCDF # @@ -410,22 +412,22 @@ begin nc_filename@existing = "append" mmdata@var = var0 + "_mean" - mmdata@long_name = var0 + " (multi-model mean)" + mmdata@long_name = var0 + " " + names(mm_ind) + " (mean)" nc_outfile = ncdf_write(mmdata, nc_filename) diff@var = var0 + "_bias" - diff@long_name = var0 + " (multi-model bias)" + diff@long_name = var0 + " " + names(mm_ind) + " (bias)" nc_outfile = ncdf_write(diff, nc_filename) if (isvar("absdiff")) then absdiff@var = var0 + "_abs_bias" - absdiff@long_name = var0 + " (multi-model absolute bias)" + absdiff@long_name = var0 + " " + names(mm_ind) + " (absolute bias)" nc_outfile = ncdf_write(absdiff, nc_filename) end if if (isvar("reldiff")) then reldiff@var = var0 + "_rel_bias" - reldiff@long_name = var0 + " (multi-model relative bias)" + reldiff@long_name = var0 + " " + names(mm_ind) + " (relative bias)" reldiff@units = reldiff@units nc_outfile = ncdf_write(reldiff, nc_filename) end if @@ -435,8 +437,8 @@ begin ; ------------------------------------------------------------------------ statistics = (/"clim", "diff"/) - domain = ("glob") - plottype = ("geo") + domain = "global" + plottype = "geo" prov_caption = caption + " for variable " + var0 \ + " (" + allseas + "), reference = " + names(ref_ind) + "." diff --git a/esmvaltool/diag_scripts/clouds/clouds_dyn_matrix.ncl b/esmvaltool/diag_scripts/clouds/clouds_dyn_matrix.ncl new file mode 100644 index 0000000000..c18da6fe23 --- /dev/null +++ b/esmvaltool/diag_scripts/clouds/clouds_dyn_matrix.ncl @@ -0,0 +1,837 @@ +; CLOUDS_DYN_MATRIX +; ############################################################################ +; Author: Axel Lauer (DLR, Germany) +; ############################################################################ +; Description +; Calculates mean values of variable z per bin of variable x and y. +; The results are displayed as a matrix. +; +; Required diag_script_info attributes (diagnostic specific) +; var_x: short name of variable on x-axis +; var_y: short name of variable on y-axis +; var_z: short name of variable to be binned +; xmin: min x value for generating x bins +; xmax: max x value for generating x bins +; ymin: min y value for generating y bins +; ymax: max y value for generating y bins +; +; Optional diag_script_info attributes (diagnostic specific) +; clevels: explicit values for probability labelbar (array) +; filename_add: optionally add this string to plot filesnames +; nbins: number of equally spaced bins (var_x), default = 100 +; sidepanels: show/hide side panels +; xlabel: label overriding variable name for x-axis (e.g. SST) +; ylabel: label overriding variable name for y-axis (e.g. omega500) +; zdmin: min z value for labelbar (difference plots) +; zdmax: max z value for labelbar (difference plots) +; zmin: min z value for labelbar +; zmax: max z value for labelbar +; +; Required variable attributes (variable specific) +; none +; +; Optional variable_info attributes (variable specific) +; reference_dataset: reference dataset +; +; Caveats +; none +; +; Modification history +; 20230117-lauer_axel: added support for ICON (code from Manuel) +; 20220126-lauer_axel: added optional variable labels for x- and y-axes +; 20211118-lauer_axel: added output of frequency distributions +; 20210607-lauer_axel: added multi-model-average (= average over all models) +; as an individual plot +; 20210408-lauer_axel: written +; +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/dataset_selection.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + diag = "clouds_dyn_matrix.ncl" + variables = get_unique_values(metadata_att_as_array(variable_info, \ + "short_name")) + + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"var_x", "var_y", "var_z", \ + "xmin", "xmax", "ymin", "ymax"/)) + + file_type = output_type() + + ; make sure required variables are available + var_x = diag_script_info@var_x + var_y = diag_script_info@var_y + var_z = diag_script_info@var_z + + ; special case: columnicefrac = clivi / (clivi + lwp) + ; note: clwvi is not used since it contains lwp only for some models + ; (by error) + + calcicefrac = new(3, logical) + calctcwp = new(3, logical) + calcswclt = new(3, logical) + calclwclt = new(3, logical) + + calcicefrac = False + calctcwp = False + calcswclt = False + calclwclt = False + + ; if present, replace special variables "columnicefrac" and "totalcwp" in + ; list with variables used for variable derivation + + tmplist = (/var_x, var_y, var_z/) + checklist = (/"columnicefrac", "totalcwp", "swcreclt", "lwcreclt"/) + substlist = (/(/"clivi", "lwp"/), (/"clivi", "lwp"/), (/"swcre", "clt"/), \ + (/"lwcre", "clt"/)/) + varstart = new(3, integer) + varstart(0) = 0 + + do i = 0, dimsizes(tmplist) - 1 + if (i .eq. 0) then + if (any(checklist .eq. tmplist(i))) then + j = ind(checklist .eq. tmplist(i)) + varlist = substlist(j, :) + varstart(1) = 2 + else + varlist = tmplist(i) + varstart(1) = 1 + end if + else + if (any(checklist .eq. tmplist(i))) then + j = ind(checklist .eq. tmplist(i)) + varlist := array_append_record(varlist, substlist(j, :), 0) + if (i .lt. dimsizes(tmplist) - 1) then + varstart(i + 1) = varstart(i) + 2 + end if + else + varlist := array_append_record(varlist, tmplist(i), 0) + if (i .lt. dimsizes(tmplist) - 1) then + varstart(i + 1) = varstart(i) + 1 + end if + end if + end if + end do + + do i = 0, dimsizes(tmplist) - 1 + if (tmplist(i) .eq. checklist(0)) then + calcicefrac(i) = True + else if (tmplist(i) .eq. checklist(1)) then + calctcwp(i) = True + else if (tmplist(i) .eq. checklist(2)) then + calcswclt(i) = True + else if (tmplist(i) .eq. checklist(3)) then + calclwclt(i) = True + end if + end if + end if + end if + end do + + idx = new(dimsizes(varlist), integer) + + nVAR = dimsizes(varlist) + refname = new(nVAR, string) + + do i = 0, nVAR - 1 + idx(i) = ind(variables .eq. varlist(i)) + end do + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + variables(idx) + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + if (any(ismissing(idx))) then + errstr = "diagnostic " + diag + " requires the following variable(s): " \ + + str_join(varlist, ", ") + error_msg("f", DIAG_SCRIPT, "", errstr) + end if + + ; save input files for writing provenance + + infiles = metadata_att_as_array(input_file_info, "filename") + + ; get reference datasets (if present) and check that number of datasets + ; is equal for each variable + + do i = 0, nVAR - 1 + var = variables(idx(i)) + var_info = select_metadata_by_name(variable_info, var) + var_info := var_info[0] + if (isatt(var_info, "reference_dataset")) then + refname(i) = var_info@reference_dataset + end if + info = select_metadata_by_name(input_file_info, var) + if (i .eq. 0) then + dim_MOD = ListCount(info) + else + dim_test = ListCount(info) + if (dim_test .ne. dim_MOD) then + error_msg("f", DIAG_SCRIPT, "", "number of datasets for variable " \ + + var + " does not match number of datasets for " \ + + variables(idx(0))) + end if + end if + delete(info) + delete(var) + delete(var_info) + end do + + ; Set default values for non-required diag_script_info attributes + + set_default_att(diag_script_info, "filename_add", "") + set_default_att(diag_script_info, "nbins", 100) + set_default_att(diag_script_info, "sidepanels", False) + set_default_att(diag_script_info, "xlabel", var_x) + set_default_att(diag_script_info, "ylabel", var_y) + + if (diag_script_info@filename_add .ne. "") then + filename_add = "_" + diag_script_info@filename_add + else + filename_add = "" + end if + + if (diag_script_info@sidepanels) then + flag_sidepanels = True + else + flag_sidepanels = False + end if + + nbins = toint(diag_script_info@nbins) + + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + ; Create work dir + system("mkdir -p " + work_dir) + +end + +begin + ; ############ + ; # get data # + ; ############ + + info_x = select_metadata_by_name(input_file_info, varlist(varstart(0))) + names_x = metadata_att_as_array(info_x, "dataset") + projects_x = metadata_att_as_array(info_x, "project") + info_y = select_metadata_by_name(input_file_info, varlist(varstart(1))) + names_y = metadata_att_as_array(info_y, "dataset") + projects_y = metadata_att_as_array(info_y, "project") + info_z = select_metadata_by_name(input_file_info, varlist(varstart(2))) + names_z = metadata_att_as_array(info_z, "dataset") + projects_z = metadata_att_as_array(info_z, "project") + + refidx_x = ind(names_x .eq. refname(varstart(0))) + refidx_y = ind(names_y .eq. refname(varstart(1))) + refidx_z = ind(names_z .eq. refname(varstart(2))) + + if (ismissing(refidx_x) .or. ismissing(refidx_y) .or. ismissing(refidx_z)) \ + then + refidx_x = -1 + refidx_y = -1 + refidx_z = -1 + end if + + ref_ind = refidx_x + names = names_x + projects = projects_x + + if (ref_ind .ge. 0) then + ; if reference datasets for var_x, var_y, var_z are from different + ; sources + uninames = get_unique_values(refname) + names(ref_ind) = str_join(uninames, "/") + delete(uninames) + end if + + ; find all indices of models w/o MultiModelMean/MultiModelMedian (if present) + + idxmod = get_mod(names_x, projects_x) + + if (idxmod(0) .eq. -1) then ; no model found + flag_multimod = False + elseif (dimsizes(idxmod) .eq. 1) then ; one model found + flag_multimod = False + else ; more than one model found + flag_multimod = True + end if + + result = new((/dim_MOD, nbins, nbins/), float) + count = new((/dim_MOD, nbins, nbins/), float) + bincenter_x = new((/nbins/), float) + bincenter_y = new((/nbins/), float) + bin_x0 = new((/nbins/), float) + bin_x1 = new((/nbins/), float) + bin_y0 = new((/nbins/), float) + bin_y1 = new((/nbins/), float) + + xmax = diag_script_info@xmax + xmin = diag_script_info@xmin + ymax = diag_script_info@ymax + ymin = diag_script_info@ymin + binsize_x = tofloat(xmax - xmin) / nbins + binsize_y = tofloat(ymax - ymin) / nbins + + do n = 0, nbins - 1 + x0 = n * binsize_x + x1 = x0 + binsize_x + bincenter_x(n) = xmin + 0.5 * (x0 + x1) + bin_x0(n) = bincenter_x(n) - 0.5 * binsize_x + bin_x1(n) = bincenter_x(n) + 0.5 * binsize_x + y0 = n * binsize_y + y1 = y0 + binsize_y + bincenter_y(n) = ymin + 0.5 * (y0 + y1) + bin_y0(n) = bincenter_y(n) - 0.5 * binsize_y + bin_y1(n) = bincenter_y(n) + 0.5 * binsize_y + end do + + atts_x = True + atts_y = True + atts_z = True + + do ii = 0, dim_MOD - 1 + atts_x@short_name = varlist(varstart(0)) + atts_y@short_name = varlist(varstart(1)) + atts_z@short_name = varlist(varstart(2)) + + ; reference datasets may have different names + if (ii .eq. refidx_x) then + atts_x@dataset = refname(varstart(0)) + atts_y@dataset = refname(varstart(1)) + atts_z@dataset = refname(varstart(2)) + else ; all other datasets: force same dataset name for var_x, var_y, var_z + atts_x@dataset = names_x(ii) + atts_y@dataset = names_x(ii) + atts_z@dataset = names_x(ii) + end if + + ; read var_x + + info = select_metadata_by_atts(input_file_info, atts_x) + x = read_data(info[0]) + delete(info) + + ; read var_y + + info = select_metadata_by_atts(input_file_info, atts_y) + y = read_data(info[0]) + delete(info) + + ; read var_z + + info = select_metadata_by_atts(input_file_info, atts_z) + z = read_data(info[0]) + delete(info) + + atts_list = NewList("fifo") + + ListAppend(atts_list, atts_x) + ListAppend(atts_list, atts_y) + ListAppend(atts_list, atts_z) + + vars_list = NewList("fifo") + + ListAppend(vars_list, x) + ListAppend(vars_list, y) + ListAppend(vars_list, z) + + do i = 0, 2 + ; read second variable needed to derive icefrac/tcwp/swcreclt/lwcreclt + if (calcicefrac(i) .or. calctcwp(i) .or. calcswclt(i) .or. \ + calclwclt(i)) then + atts_list[i]@short_name = varlist(varstart(i) + 1) + if (ii .eq. refidx_x) then + atts_list[i]@dataset = refname(varstart(i) + 1) + end if + info = select_metadata_by_atts(input_file_info, atts_list[i]) + var2 = read_data(info[0]) + delete(info) + var2 = where(isnan_ieee(var2), var2@_FillValue, var2) + end if + + ; calculate column ice fraction + + if (calcicefrac(i)) then + min_mass = 1.0e-6 + ; filter invalid values (needed for some models) + vars_list[i] = where(vars_list[i] .lt. 0.0, vars_list[i]@_FillValue, \ + vars_list[i]) + vars_list[i] = where(isnan_ieee(vars_list[i]), \ + vars_list[i]@_FillValue, vars_list[i]) + var2 = where(var2 .lt. 0.0, var2@_FillValue, var2) + mass = vars_list[i] + var2 + mass = where(mass .lt. min_mass, mass@_FillValue, mass) + + ; ice fraction = ice / (ice + lwp) * 100% + vars_list[i] = 100.0 * vars_list[i] / mass + delete(mass) + + vars_list[i]@units = "%" + vars_list[i]@long_name = "cloud ice fraction" + vars_list[i]@var = "columnicefrac" + end if + + ; calculate total cloud water path as sum of liquid water path (lwp) + ; and ice water path (clivi); + ; we do not use the CMOR variable clwvi directly as this variable + ; erroneously contains only cloud liquid water for some models + + if (calctcwp(i)) then + vars_list[i] = vars_list[i] + var2 + vars_list[i]@long_name = "Condensed Water Path" + vars_list[i]@var = "totalcwp" + end if + + ; calculate swcre divided by cloud fraction + + if (calcswclt(i)) then + var2 = where(var2 .le. 1.0e-6, var2@_FillValue, var2) + vars_list[i] = vars_list[i] / var2 * 100.0 + vars_list[i]@long_name = \ + "Shortwave cloud radiative effect / cloud cover" + vars_list[i]@var = "swcreclt" + end if + + ; calculate swcre divided by cloud fraction + + if (calclwclt(i)) then + var2 = where(var2 .le. 1.0e-6, var2@_FillValue, var2) + vars_list[i] = vars_list[i] / var2 * 100.0 + vars_list[i]@long_name = \ + "Longwave cloud radiative effect / cloud cover" + vars_list[i]@var = "lwcreclt" + end if + + if (isvar("var2")) then + delete(var2) + end if + end do + + delete(atts_list) + + ; check dimensions + + dims_x = dimsizes(x) + dims_y = dimsizes(y) + dims_z = dimsizes(z) + + dimerror = False + + if (dimsizes(dims_x) .eq. dimsizes(dims_y)) then + if (any(dims_x - dims_y .ne. 0)) then + dimerror = True + end if + else + dimerror = True + end if + + if (dimsizes(dims_x) .eq. dimsizes(dims_z)) then + if (any(dims_x - dims_z .ne. 0)) then + dimerror = True + end if + else + dimerror = True + end if + + if (dimerror) then + error_msg("f", DIAG_SCRIPT, "", "dimensions of datasets " \ + + atts_x@dataset + " (variable " + var_x + ") and " \ + + atts_y@dataset + " (variable " + var_y + ") and " \ + + atts_z@dataset + " (variable " + var_z + ") do not match.") + end if + + ; check dimensions + + if ((dimsizes(dims_x) .ne. dimsizes(dims_y)) .or. \ + (dimsizes(dims_x) .lt. 3) .or. (dimsizes(dims_x) .gt. 4)) then + dimerror = True + end if + + if ((dimsizes(dims_y) .ne. dimsizes(dims_z)) .or. \ + (dimsizes(dims_y) .lt. 3) .or. (dimsizes(dims_y) .gt. 4)) then + dimerror = True + end if + + if (dimerror) then + error_msg("f", DIAG_SCRIPT, "", "all variables need to have the " + \ + "same number of dimensions (time, [optional: level], " + \ + "latitude, longitude)") + end if + + do i = 0, 2 + dims = getvardims(vars_list[i]) + testidx = ind(dims .eq. "lon") + if (ismissing(testidx)) then + error_msg("f", DIAG_SCRIPT, "", var + ": no lon dimension") + end if + testidx = ind(dims .eq. "lat") + if (ismissing(testidx)) then + error_msg("f", DIAG_SCRIPT, "", var + ": no lat dimension") + end if + testidx = ind(dims .eq. "time") + if (ismissing(testidx)) then + error_msg("f", DIAG_SCRIPT, "", var + ": no time dimension") + end if + delete(dims) + end do + + delete(vars_list) + + delete(dims_x) + delete(dims_y) + delete(dims_z) + delete(testidx) + + ; save attributes long_name and units + long_name = z@long_name + xunits = x@units + yunits = y@units + zunits = z@units + + x1d = ndtooned(x) + delete(x) + y1d = ndtooned(y) + delete(y) + z1d = ndtooned(z) + delete(z) + + ; This approach to grid the data is significantly slower than + ; using "bin_avg" but still fine for moderate grids (e.g. 100x100 grid + ; cells). In NCL 6.6.2, the function bin_avg crashes after being + ; called several times (segmentation violation). This is the reason + ; for choosing this "manual" approach. + + do n = 0, nbins - 1 + selidx = ind((x1d .ge. bin_x0(n)) .and. (x1d .lt. bin_x1(n))) + if (all(ismissing(selidx))) then + delete(selidx) + result(ii, :, n) = result@_FillValue + count(ii, :, n) = count@_FillValue + continue + end if + xsel = x1d(selidx) + ysel = y1d(selidx) + zsel = z1d(selidx) + delete(selidx) + do m = 0, nbins - 1 + selidx = ind((ysel .ge. bin_y0(m)) .and. (ysel .lt. bin_y1(m))) + if (.not.all(ismissing(selidx))) then + result(ii, m, n) = avg(zsel(selidx)) + count(ii, m, n) = num(.not.ismissing(zsel(selidx))) + else + result(ii, m, n) = result@_FillValue + count(ii, m, n) = count@_FillValue + end if + delete(selidx) + end do + delete(xsel) + delete(ysel) + delete(zsel) + end do + +; r = bin_avg(x1d, y1d, z1d, bincenter_x, bincenter_y, False) +; result(ii, :, :) = r(0, :, :) + + delete(x1d) + delete(y1d) + delete(z1d) +; delete(r) + + count(ii, :, :) = count(ii, :, :) / sum(count(ii, :, :)) * 1.0e2 + + end do ; ii-loop (models) + + ; ########################################### + ; # netCDF output # + ; ########################################### + + nc_filename = work_dir + "clouds_scatter_" + var_x + "_" + var_y + "_" + \ + var_z + filename_add + ".nc" + nc_filename2 = work_dir + "clouds_scatter_prob_" + var_x + "_" + var_y + \ + filename_add + ".nc" + + result!0 = "model" + result!1 = "bin_y" + result!2 = "bin_x" + + result&model = str_sub_str(names, "/", "-") + result&bin_y = bincenter_y + result&bin_x = bincenter_x + + result@diag_script = (/DIAG_SCRIPT/) + result@var = var_z + result@var_long_name = long_name + result@var_units = zunits + result@_FillValue = 1.0e20 + + copy_VarCoords(result, count) + count@diag_script = (/DIAG_SCRIPT/) + count@var = "count" + count@var_units = "1e-3 %" + + nc_outfile = ncdf_write(result, nc_filename) + nc_outfile2 = ncdf_write(count, nc_filename2) + + ; ########################################### + ; # create the plots # + ; ########################################### + + nplots = dim_MOD + if (flag_multimod) then + nplots = nplots + 1 + end if + + plots = new((/nplots, 2/), graphic) + baseplots = new(nplots, graphic) + xaddplots = new(nplots, graphic) + yaddplots = new(nplots, graphic) + + dplots = new(nplots, graphic) + + plots_c = new((/nplots, 2/), graphic) + baseplots_c = new(nplots, graphic) + xaddplots_c = new(nplots, graphic) + yaddplots_c = new(nplots, graphic) + + res = True + res@cnFillOn = True ; color Fill + res@cnFillMode = "RasterFill" ; Raster Mode + res@cnLinesOn = False ; Turn off contour lines + res@tiXAxisString = diag_script_info@xlabel + " (" + xunits + ")" + res@tiYAxisString = diag_script_info@ylabel + " (" + yunits + ")" + res@lbOrientation = "Vertical" + res@lbTitleString = zunits + res@lbTitlePosition = "Right" + res@lbTitleFontHeightF = 0.0275 + res@lbLabelFontHeightF = 0.0275 + res@tmXMajorGrid = True + res@tmXMinorGrid = True + res@tmYMajorGrid = True + res@tmYMinorGrid = True + + dres = res + + if (flag_sidepanels) then + res@pmLabelBarOrthogonalPosF = 0.35 + end if + + cres = res + cres@lbTitleString = count@var_units + + if (isatt(diag_script_info, "zmin") .and. isatt(diag_script_info, "zmax")) \ + then + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = fspan(diag_script_info@zmin, diag_script_info@zmax, 19) + end if + + if (isatt(diag_script_info, "zdmin") .and. \ + isatt(diag_script_info, "zdmax")) then + dres@cnLevelSelectionMode = "ExplicitLevels" + dres@cnLevels = fspan(diag_script_info@zdmin, diag_script_info@zdmax, 19) + end if + + if (isatt(diag_script_info, "clevels")) then + cres@cnLevelSelectionMode = "ExplicitLevels" + cres@cnLevels = diag_script_info@clevels + end if + + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_dyn_matrix_" + \ + var_x + "_" + var_y + "_" + var_z + filename_add) + dwks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_dyn_matrix_bias_" + \ + var_x + "_" + var_y + "_" + var_z + filename_add) + cwks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_dyn_matrix_prob_" + \ + var_x + "_" + var_y + filename_add) + + res@gsnDraw = False ; don't draw yet + res@gsnFrame = False ; don't advance frame yet + + cres@gsnDraw = False ; don't draw yet + cres@gsnFrame = False ; don't advance frame yet + + xyres1 = True ; xy plot mods desired + xyres1@tmXBMinorOn = False ; no minor tickmarks + xyres1@tmXBLabelStride = 2 ; label stride + xyres1@gsnDraw = False ; don't draw yet + xyres1@gsnFrame = False ; don't advance frame yet + xyres1@xyLineThicknessF = 2.0 + xyres1@tmEqualizeXYSizes = True + xyres1@tmXBLabelFontHeightF = 0.0275 + + xyres2 = xyres1 + + xyres1@vpHeightF = .20 ; set width of second plot + xyres1@trXMinF = xmin + xyres1@trXMaxF = xmax + xyres1@tiXAxisString = diag_script_info@xlabel + " (" + xunits + ")" + + xyres2@vpWidthF = .20 ; set width of second plot + xyres2@trYMinF = ymin + xyres2@trYMaxF = ymax + xyres2@tiXAxisSide = "Top" + xyres2@tmXTLabelsOn = True + xyres2@tmXBLabelsOn = False + + do ii = 0, nplots - 1 + if (ii .lt. dim_MOD) then + plotdata = result(ii, :, :) + countdata = count(ii, :, :) + plotname = names(ii) + else + plotdata = dim_avg_n_Wrap(result(idxmod, :, :), 0) + countdata = dim_avg_n_Wrap(count(idxmod, :, :), 0) + plotname = "Multi-model average" + end if + + countdata = countdata * 1.0e3 + + res@tiMainString = plotname + res@tiMainFontHeightF = 0.03 + + if (ii .eq. ref_ind) then + if (refname(varstart(0)) .eq. refname(varstart(1))) then + plotname = refname(varstart(0)) + else + plotname = refname(varstart(0)) + "/" + refname(varstart(1)) + end if + end if + + cres@tiMainString = plotname + + ; z-values + + baseplots(ii) = gsn_csm_contour(wks, plotdata, res) + + if (flag_sidepanels) then + xyres1@tiYAxisString = zunits + xyres2@tiXAxisString = zunits + + xaddplots(ii) = gsn_csm_xy(wks, result&bin_x, \ + dim_avg_n(plotdata, 0), xyres1) + yaddplots(ii) = gsn_csm_xy(wks, dim_avg_n(plotdata, 1), \ + result&bin_y, xyres2) + xyres1@gsnAttachPlotsXAxis = True + plots(ii, 0) = gsn_attach_plots(baseplots(ii), xaddplots(ii), \ + res, xyres1) + delete(xyres1@gsnAttachPlotsXAxis) + plots(ii, 1) = gsn_attach_plots(baseplots(ii), yaddplots(ii), \ + res, xyres2) + end if + + draw(baseplots(ii)) + frame(wks) + + ; differences z-values + + if (isdefined("dresplot")) then + delete(dresplot) + end if + if (ii .eq. ref_ind) then + dresplot = res + dresplot@tiMainString = "REF" + diff = plotdata + else + dresplot = dres + dresplot@tiMainString = plotname + " - REF" + diff = plotdata - result(ref_ind, :, :) + end if + copy_VarCoords(plotdata, diff) + dplots(ii) = gsn_csm_contour(dwks, diff, dresplot) + + ; probability values (%) + + xyres1@tiYAxisString = count@var_units + xyres2@tiXAxisString = count@var_units + + baseplots_c(ii) = gsn_csm_contour(cwks, countdata, cres) + + if (flag_sidepanels) then + xaddplots_c(ii) = gsn_csm_xy(cwks, count&bin_x, \ + dim_avg_n(countdata, 0), xyres1) + yaddplots_c(ii) = gsn_csm_xy(cwks, dim_avg_n(countdata, 1), \ + count&bin_y, xyres2) + + xyres1@gsnAttachPlotsXAxis = True + plots_c(ii, 0) = gsn_attach_plots(baseplots_c(ii), \ + xaddplots_c(ii), cres, xyres1) + delete(xyres1@gsnAttachPlotsXAxis) + plots_c(ii, 1) = gsn_attach_plots(baseplots_c(ii), \ + yaddplots_c(ii), cres, xyres2) + end if + + draw(baseplots_c(ii)) + frame(cwks) + + delete(plotdata) + delete(countdata) + end do + +; pres = True ; needed to override +; ; panelling defaults +; pres@gsnPanelCenter = False +; +; pres@gsnPanelFigureStrings = names +; pres@gsnPanelFigureStringsFontHeightF = min((/0.008, 0.008 * 6.0 \ +; / tofloat((dim_MOD + 1) / 2)/)) +; pres@lbLabelFontHeightF = min((/0.01, 0.01 * 6.0 \ +; / tofloat((dim_MOD + 1) / 2)/)) +; outfile = panelling(wks, baseplots, (dim_MOD + 3) / 4, 4, pres) +; doutfile = panelling(dwks, dplots, (dim_MOD + 3) / 4, 4, pres) + + outfile = wks@fullname + doutfile = dwks@fullname + coutfile = cwks@fullname + + log_info("Wrote " + outfile) + log_info("Wrote " + doutfile) + log_info("Wrote " + coutfile) + + ; ========================================================================== + + ; ---------------------------------------------------------------------- + ; write provenance to netcdf output (and plot file) + ; ---------------------------------------------------------------------- + + statistics = (/"clim", "mean"/) + domain = "reg" + plottype = "scatter" + caption = "Scatterplot of " + var_x + " (x) vs. " + var_y + " (y)." + log_provenance(nc_outfile, outfile, caption, statistics, \ + domain, plottype, "", "", infiles) + + ; ---------------------------------------------------------------------- + ; write relative differences of the probabilities (count) to netcdf + ; ---------------------------------------------------------------------- + + ref = where(count(ref_ind, :, :) .gt. 1.0e-3, count(ref_ind, :, :), \ + count@_FillValue) + if (dimsizes(idxmod) .gt. 1) then + modavg = dim_avg_n_Wrap(count(idxmod, :, :), 0) + else + modavg = count(idxmod, :, :) + end if + modavg = where(modavg .gt. 1.0e-3, modavg, modavg@_FillValue) + + diff = modavg + diff = (diff - ref) / ref * 100.0 + + nc_filename3 = work_dir + "clouds_scatter_prob_" + var_x + "_" + var_y + \ + "_mmm-ref-reldiff" + filename_add + ".nc" + + diff@var_long_name = "(count(MMM) - count(REF)) / count(REF) * 100%" + diff@var_units = "%" + + nc_outfile3 = ncdf_write(diff, nc_filename3) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/clouds/clouds_interannual.ncl b/esmvaltool/diag_scripts/clouds/clouds_interannual.ncl index de9b751eeb..a3e318c556 100644 --- a/esmvaltool/diag_scripts/clouds/clouds_interannual.ncl +++ b/esmvaltool/diag_scripts/clouds/clouds_interannual.ncl @@ -13,10 +13,12 @@ ; ; Optional diag_script_info attributes (diagnostic specific) ; colormap: e.g., WhiteBlueGreenYellowRed, rainbow +; epsilon: "epsilon" value to be replaced with missing values ; explicit_cn_levels: use these contour levels for plotting -; extrafiles: write plots for individual models to separate files -; (True, False) +; filename_add: optionally add this string to plot filesnames ; projection: map projection, e.g., Mollweide, Mercator +; var: short_name of variable to process (default = "" - use +; first variable in variable list) ; ; Required variable_info attributes (variable specific) ; none @@ -29,15 +31,21 @@ ; none ; ; Modification history -; 20190220-A_laue_ax: added provenance to output (v2.0) -; 20181120-A_laue_ax: adapted code to multi-variable capable framework -; 20180923-A_laue_ax: added writing of results to netcdf -; 20180611-A_laue_ax: code rewritten for ESMValTool v2.0 -; 20170620-A_laue_ax: added tags for reporting -; 20160901-A_laue_ax: added regridding option 1 deg x 1 deg -; 20151027-A_laue_ax: moved call to 'write_references' to the beginning -; of the code -; 20150415-A-laue_ax: written. +; 20230117-lauer_axel: added support for ICON (code from Manuel) +; 20211006-lauer_axel: removed write_plots +; 20210413-lauer_axel: added multi-obs mean and average over all models +; as individual plots +; 20210407-lauer_axel: added option to speficfy variable if more than one +; variable is present +; 20190220-lauer_axel: added provenance to output (v2.0) +; 20181120-lauer_axel: adapted code to multi-variable capable framework +; 20180923-lauer_axel: added writing of results to netcdf +; 20180611-lauer_axel: code rewritten for ESMValTool v2.0 +; 20170620-lauer_axel: added tags for reporting +; 20160901-lauer_axel: added regridding option 1 deg x 1 deg +; 20151027-lauer_axel: moved call to 'write_references' to the beginning +; of the code +; 20150415-lauer_axel: written. ; ; ############################################################################ @@ -54,41 +62,52 @@ load "$diag_scripts/shared/plot/aux_plotting.ncl" load "$diag_scripts/shared/statistics.ncl" load "$diag_scripts/shared/plot/style.ncl" load "$diag_scripts/shared/plot/contour_maps.ncl" +load "$diag_scripts/shared/dataset_selection.ncl" begin enter_msg(DIAG_SCRIPT, "") - var0 = variable_info[0]@short_name + set_default_att(diag_script_info, "colormap", "WhiteBlueGreenYellowRed") + set_default_att(diag_script_info, "epsilon", 1.0e-4) + set_default_att(diag_script_info, "filename_add", "") + set_default_att(diag_script_info, "projection", "CylindricalEquidistant") + set_default_att(diag_script_info, "var", "") + + if (diag_script_info@var .eq. "") then + var0 = variable_info[0]@short_name + else + var0 = diag_script_info@var + end if + + variables = metadata_att_as_array(variable_info, "short_name") + if (.not. any(variables .eq. var0)) then + errstr = "diagnostic " + diag + " requires the following variable: " + var0 + error_msg("f", DIAG_SCRIPT, "", errstr) + end if + + var0_info = select_metadata_by_name(variable_info, var0) + var0_info := var0_info[0] info0 = select_metadata_by_name(input_file_info, var0) dim_MOD = ListCount(info0) - if (isatt(variable_info[0], "reference_dataset")) then - refname = variable_info[0]@reference_dataset + + if (isatt(var0_info, "reference_dataset")) then + refname = var0_info@reference_dataset end if + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") infiles = metadata_att_as_array(info0, "filename") log_info("++++++++++++++++++++++++++++++++++++++++++") log_info(DIAG_SCRIPT + " (var: " + var0 + ")") log_info("++++++++++++++++++++++++++++++++++++++++++") - set_default_att(diag_script_info, "colormap", "WhiteBlueGreenYellowRed") - set_default_att(diag_script_info, "extrafiles", False) - set_default_att(diag_script_info, "projection", "CylindricalEquidistant") - - extrafiles = diag_script_info@extrafiles - ; make sure path for (mandatory) netcdf output exists work_dir = config_user_info@work_dir + "/" ; Create work dir system("mkdir -p " + work_dir) - if (config_user_info@write_plots.eq."True") then - write_plots = True - else - write_plots = False - end if - ; get multi-model mean index (if present) mm_ind = ind(names .eq. "MultiModelMean") @@ -104,6 +123,34 @@ begin ref_ind = ind(names .eq. refname) end if + if (diag_script_info@filename_add .ne. "") then + filename_add = "_" + diag_script_info@filename_add + else + filename_add = "" + end if + + ; find indices of all OBS and obs4mips datasets (including "native6" ERA5) + + idxobs = get_obs(names, projects, "") + + if (idxobs(0) .eq. -1) then + flag_multiobs = False + else + flag_multiobs = True + end if + + ; find all indices of models w/o MultiModelMean/MultiModelMedian (if present) + + idxmod = get_mod(names, projects) + + if (idxmod(0) .eq. -1) then ; no model found + flag_multimod = False + elseif (dimsizes(idxmod) .eq. 1) then ; one model found + flag_multimod = False + else ; more than one model found + flag_multimod = True + end if + end begin @@ -159,6 +206,34 @@ begin A0@units = "mm day-1" end if + ; check if time dimension is a multiple of 12 + ; if not, use only complete years + + idx = ind(dims .eq. "time") + if (.not.ismissing(idx)) then + dimsize = dimsizes(A0) + nt = dimsize(idx) + if (mod(nt, 12) .ne. 0) then + if (nt .lt. 12) then + error_msg("f", DIAG_SCRIPT, "", "time dimension < 12 (" \ + + names(imod) + ")") + else + tfull = nt - mod(nt, 12) + log_info("warning, using only first " + tostring(tfull) \ + + " time steps (" + names(imod) + ")") + if (dimsizes(dims) .eq. 2) then + A0 := A0(0:tfull - 1, :) + else if (dimsizes(dims) .eq. 3) then + A0 := A0(0:tfull - 1, :, :) + else + error_msg("f", DIAG_SCRIPT, "", "number of dimensions " \ + + "must be 2 or 3 (" + names(imod) + ")") + end if + end if + end if + end if + end if + ; subtract climatological seasonal cycle from time series if (isvar("timeseries")) then @@ -179,9 +254,67 @@ begin end if mean = time_operations(A0, -1, -1, "average", "annualclim", True) ; replace "epsilon" values with missing value - mean = where(abs(mean).lt.1.0e-4, mean@_FillValue, mean) + mean = where(abs(mean).lt.diag_script_info@epsilon, mean@_FillValue, mean) data1 = 100.0 * data1 / abs(mean) + ; create arrays for multi-obs and multi-model averages (if requested) + + if (ii .eq. 0) then + multidim = dimsizes(data1) + if (flag_multiobs) then + newdims = array_append_record(dimsizes(idxobs), multidim, 0) + multiobs_all = new(newdims, float) + end if + if (flag_multimod) then + newdims = array_append_record(dimsizes(idxmod), multidim, 0) + multimod_all = new(newdims, float) + end if + end if + + ; calculate multi-obs and multi-model averages (if requested) + + if (flag_multiobs) then + iidx = ind(idxobs .eq. imod) + if (.not.ismissing(iidx)) then + dims1 = dimsizes(data1) + dimerror = False + if (dimsizes(multidim) .eq. dimsizes(dims1)) then + if (any(multidim - dims1 .ne. 0)) then + dimerror = True + end if + else + dimerror = True + end if + if (dimerror) then + error_msg("f", DIAG_SCRIPT, "", "dimensions of datasets " \ + + "do not match. Use preprocessor to regrid data to " \ + + "common grid.") + end if + multiobs_all(iidx, :, :) = data1 + end if + end if + + if (flag_multimod) then + iidx = ind(idxmod .eq. imod) + if (.not.ismissing(iidx)) then + dims1 = dimsizes(data1) + dimerror = False + if (dimsizes(multidim) .eq. dimsizes(dims1)) then + if (any(multidim - dims1 .ne. 0)) then + dimerror = True + end if + else + dimerror = True + end if + if (dimerror) then + error_msg("f", DIAG_SCRIPT, "", "dimensions of datasets " \ + + "do not match. Use preprocessor to regrid data to " \ + + "common grid.") + end if + multimod_all(iidx, :, :) = data1 + end if + end if + ; ########################################### ; # Style dependent annotation # ; ########################################### @@ -194,10 +327,11 @@ begin ; # plot ressources # ; ########################################### - data1@res_gsnMaximize = True ; use full page for the plot - data1@res_cnFillOn = True ; color plot desired - data1@res_cnLineLabelsOn = False ; contour lines - data1@res_cnLinesOn = False + res = True + + res@cnFillOn = True ; color plot desired + res@cnLineLabelsOn = False ; contour lines + res@cnLinesOn = False ; colors ; http://www.ncl.ucar.edu/Document/Graphics/color_table_gallery.shtml @@ -209,68 +343,67 @@ begin ; annotation - data1@res_tiMainOn = False - data1@res_gsnLeftStringFontHeightF = 0.015 - data1@res_cnLevelSelectionMode = "ExplicitLevels" + res@tiMainString = names(imod) + res@tiMainFontHeightF = 0.025 + res@gsnStringFontHeightF = 0.02 + res@cnLevelSelectionMode = "ExplicitLevels" if (diag_script_info@projection.eq."Robinson") then - data1@res_mpPerimOn = False ; turn off perimeter around map - data1@res_mpGridLineColor = -1 - data1@res_mpGridAndLimbOn = True + res@mpPerimOn = False ; turn off perimeter around map + res@mpGridLineColor = -1 + res@mpGridAndLimbOn = True end if - data1@res_mpOutlineOn = True - data1@res_mpFillOn = False + res@mpOutlineOn = True + res@mpFillOn = False ; variable specific plotting settings - if (any((/"clt"/).eq.var0)) then - data1@res_cnLevels = ispan(5, 50, 5) + if (any((/"clt", "prw"/).eq.var0)) then + res@cnLevels = ispan(5, 50, 5) else - data1@res_cnLevels = ispan(5, 100, 5) - end if - - if (var0.eq."lwp") then - data1@res_mpOutlineOn = False - data1@res_mpFillOn = True - data1@res_mpLandFillColor = "Black" -; delete(pal) -; pal = read_colormap_file("$diag_scripts/shared/plot/rgb/qcm3.rgb") + res@cnLevels = ispan(5, 100, 5) end if - nboxes = dimsizes(data1@res_cnLevels) + nboxes = dimsizes(res@cnLevels) clen = dimsizes(pal) stride = max((/1, ((clen(0)-1) - 2) / nboxes /)) fill_colors = ispan(2, clen(0) - 1, stride) - data1@res_cnFillColors = fill_colors + res@cnFillColors = fill_colors + + res@gsnRightString = "" - data1@res_lbLabelBarOn = False - data1@res_gsnRightString = "" + gavg = area_operations(data1, -90., 90., 0., 360., "average", True) + if (.not.ismissing(gavg)) then + res@gsnLeftString = "mean = " + sprintf("%6.3f", gavg) + else + res@gsnLeftString = "" + end if ; map attributes - data1@res_mpFillDrawOrder = "PostDraw" ; draw map last - data1@res_cnMissingValFillColor = "Gray" + res@mpFillDrawOrder = "PostDraw" ; draw map last + res@cnMissingValFillColor = "Gray" ; no tickmarks and no labels - data1@res_tmYLLabelsOn = False - data1@res_tmYLOn = False - data1@res_tmYRLabelsOn = False - data1@res_tmYROn = False - data1@res_tmXBLabelsOn = False - data1@res_tmXBOn = False - data1@res_tmXTLabelsOn = False - data1@res_tmXTOn = False - data1@res_cnInfoLabelOn = False ; turn off cn info label + res@tmYLLabelsOn = False + res@tmYLOn = False + res@tmYRLabelsOn = False + res@tmYROn = False + res@tmXBLabelsOn = False + res@tmXBOn = False + res@tmXTLabelsOn = False + res@tmXTOn = False + res@cnInfoLabelOn = False ; turn off cn info label - data1@res_mpProjection = diag_script_info@projection + res@mpProjection = diag_script_info@projection ; set explicit contour levels if (isatt(diag_script_info, "explicit_cn_levels")) then - data1@res_cnLevelSelectionMode = "ExplicitLevels" - data1@res_cnLevels = diag_script_info@explicit_cn_levels + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = diag_script_info@explicit_cn_levels end if ; ########################################### @@ -287,154 +420,151 @@ begin data1@diag_script = (/DIAG_SCRIPT/) end if data1@var = var0 ; Overwrite existing entry - if (isatt(variable_info[0], "long_name")) then - data1@var_long_name = variable_info[0]@long_name + if (isatt(var0_info, "long_name")) then + data1@long_name = var0_info@long_name else - data1@var_long_name = var0 + data1@long_name = var0 end if - data1@var_units = "%" + data1@units = "%" ; copy attributes for netCDF output - data1@long_name = "interannual variability " + data1@var_long_name - data1@units = data1@var_units + data1@long_name = "interannual variability " + data1@long_name ; ########################################### ; # create the plot # ; ########################################### - data1@res_gsnFrame = False ; don't advance frame - data1@res_gsnDraw = False + res@lbTitleString = data1@units + res@lbTitlePosition = "Bottom" + res@lbTitleFontHeightF = 0.02 + res@lbLabelFontHeightF = 0.02 ; function in aux_plotting.ncl if (ii.eq.0) then - if (.not.extrafiles) then - wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_interannual_" \ - + var0) - end if -; drawNDCGrid(wks) ; debugging option - end if - - if (extrafiles) then - if (isvar("wks")) then - delete(wks) - end if wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_interannual_" \ - + var0 + "_" + annots(imod)) + + var0 + filename_add) end if - maps(ii) = contour_map(wks, data1, var0) - - if (extrafiles) then - if (write_plots) then ; add labels - txres = True - txres@txFontHeightF = 0.03 - txres@txJust = "BottomRight" - txres@txPerimOn = True - txres@txBackgroundFillColor = "white" - text = gsn_add_text(wks, maps(ii), annots(imod), 170, -80, txres) - draw(maps(ii)) - frame(wks) - plotfile = maps@outfile - else - plotfile = "" - end if - - ; ########################################## - ; # output each dataset to separate netCDF # - ; ########################################## + maps(ii) = gsn_csm_contour_map(wks, data1, res) - nc_filename = work_dir + "clouds_interannual_" + var0 + "_" \ - + annots(imod) + ".nc" - nc_outfile = ncdf_write(data1, nc_filename) + ; ######################################### + ; # output all datasets to common netCDF # + ; ######################################### - ; ------------------------------------------------------------------- - ; write provenance info - ; ------------------------------------------------------------------- + nc_filename = work_dir + "clouds_interannual_" + var0 + ".nc" + nc_filename@existing = "append" + data1@var = var0 + "_var_" + annots(imod) + nc_outfile = ncdf_write(data1, nc_filename) - statistics = (/"clim", "var"/) - domain = ("glob") - plottype = ("geo") - climofile = infiles(imod) - caption = "Interannual variability of variable " + var0 + \ - " from dataset " + annots(imod) + "." - - log_provenance(nc_outfile, plotfile, caption, statistics, domain, \ - plottype, "", "", climofile) - - else ; extrafiles .eq. false + end do ; ii-loop (datasets) - ; ######################################### - ; # output all datasets to common netCDF # - ; ######################################### + ; create panel plot + + pres = True ; needed to override + ; panelling defaults + ; print dataset name on each panel + pres@gsnPanelFigureStrings = annots(ind_all_sorted) + pres@gsnPanelFigureStringsFontHeightF = 0.007 + pres@lbLabelFontHeightF = 0.01 + pres@lbAutoManage = False + pres@lbTopMarginF = 0.1 + pres@lbPerimOn = False ; draw line around label + ; bar area + pres@gsnPanelCenter = False + pres@pmLabelBarOrthogonalPosF = -0.01 ; shift label bar a bit to + ; the bottom + outfile = panelling(wks, maps, (dim_MOD + 3) / 4, 4, pres) + + ; plot multi-obs and multi-model average (if requested) + + if (flag_multiobs) then + multiobs = dim_avg_n(multiobs_all, 0) + delete(multiobs_all) + copy_VarMeta(data1, multiobs) + gavg = area_operations(multiobs, -90., 90., 0., 360., "average", True) + res@gsnLeftString = "mean = " + sprintf("%6.3f", gavg) + res@tiMainString = "Multi-obs average" + map_multiobs = gsn_csm_contour_map(wks, multiobs, res) + end if - nc_filename = work_dir + "clouds_interannual_" + var0 + ".nc" - nc_filename@existing = "append" - data1@var = var0 + "_var_" + annots(imod) - nc_outfile = ncdf_write(data1, nc_filename) + if (flag_multimod) then + multimod = dim_avg_n(multimod_all, 0) + delete(multimod_all) + copy_VarMeta(data1, multimod) + + mask1 = multiobs + mask2 = multimod + mask1 = where(.not.ismissing(mask1), 0., mask1@_FillValue) + mask2 = where(.not.ismissing(mask2), 0., mask2@_FillValue) + amask = mask1 + mask2 + delete(mask1) + delete(mask2) + refmasked = multiobs + refmasked = refmasked + amask + datmasked = multimod + datmasked = datmasked + amask + delete(amask) + corr = calculate_metric(refmasked, datmasked, "correlation") + gavg = area_operations(datmasked, -90., 90., 0., 360., \ + "average", True) + rmsd = calculate_metric(refmasked, datmasked, "RMSD") + +; system("rm debug.nc") +; debugfile = addfile("debug.nc", "c") +; debugfile->refmasked = refmasked +; debugfile->datmasked = datmasked + + delete(refmasked) + delete(datmasked) + + res@gsnLeftString = "mean = " + sprintf("%6.3f", gavg) + res@gsnCenterString = "corr = " + sprintf("%6.3f", corr) + res@gsnRightString = "rmsd = " + sprintf("%6.3f", rmsd) + +; gavg = area_operations(multimod, -90., 90., 0., 360., "average", True) +; res@gsnLeftString = "mean = " + sprintf("%6.3f", gavg) + res@tiMainString = "Multi-model average" + map_multimod = gsn_csm_contour_map(wks, multimod, res) - end if ; if extrafiles - end do ; ii-loop (datasets) + end if - if (write_plots) then - pres = True ; needed to override - ; panelling defaults - pres@gsnPanelLabelBar = True ; add common colorbar - ; print dataset name on each panel - pres@gsnPanelFigureStrings = annots(ind_all_sorted) - pres@gsnPanelFigureStringsFontHeightF = 0.007 - pres@lbLabelFontHeightF = 0.01 - pres@lbAutoManage = False - pres@lbTopMarginF = 0.1 - pres@lbTitleOn = True - pres@lbTitleFontHeightF = 0.009 - pres@lbTitlePosition = "Bottom" - pres@lbTitleString = "~F8~s~F21~" + var0 + " (%)" - pres@lbPerimOn = False ; draw line around label - ; bar area - pres@gsnPanelCenter = False - pres@pmLabelBarOrthogonalPosF = -0.01 ; shift label bar a bit to - ; the bottom -; pres@gsnPanelDebug = True - - if (extrafiles) then - if (isvar("wks")) then - delete(wks) - end if - ; plot legend - wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_interannual_" \ - + var0 + "_legend") - pres@lbMonoFillPattern = True - pres@lbOrientation = "Horizontal" - pres@vpWidthF = 0.7 - pres@vpHeightF = 0.1 - pres@lbLabelFontHeightF = 0.015 - pres@lbLabelAlignment = "InteriorEdges" - pres@lbTitleFontHeightF = 0.015 - labels = tostring(data1@res_cnLevels) - pres@lbFillColors = fill_colors - gsn_labelbar_ndc(wks, nboxes, labels, 0.1, 0.9, pres) - else - outfile = panelling(wks, maps, (dim_MOD + 3) / 4, 4, pres) - log_info(" Wrote " + outfile) - end if - else - outfile = "" - end if ; if write_plots + log_info(" Wrote " + outfile) ; ------------------------------------------------------------------------ ; write provenance to common netcdf and plot file ; ------------------------------------------------------------------------ - if (.not. extrafiles) then - statistics = (/"clim", "var"/) - domain = ("glob") - plottype = ("geo") - caption = "Interannual variability of variable " + var0 + "." - - log_provenance(nc_outfile, outfile, caption, statistics, domain, \ - plottype, "", "", infiles) + statistics = (/"clim", "var"/) + domain = "global" + plottype = "geo" + caption = "Interannual variability of variable " + var0 + "." + log_provenance(nc_outfile, outfile, caption, statistics, domain, \ + plottype, "", "", infiles) + + ; ---------------------------------------------------------------------- + ; write differences of the multi-model and multi-obs means to netcdf + ; ---------------------------------------------------------------------- + + if (flag_multimod .and. flag_multiobs) then + diff = multimod + diff = diff - multiobs + diff@var = "MMM-MOBSMEAN" + + nc_filename2 = work_dir + "clouds_interannual_" + var0 + \ + "_mmm-mobsmean-absdiff.nc" + nc_filename2@existing = "append" + + diff@var_long_name = "MMM - MOBSMEAN" + multimod@var = "MMM" + multimod@var_long_name = "multi-model mean (MMM)" + multiobs@var = "MOBSMEAN" + multiobs@var_long_name = "multi-obs mean (MOBSMEAN)" + + nc_outfile2 = ncdf_write(diff, nc_filename2) + nc_outfile2 = ncdf_write(multimod, nc_filename2) + nc_outfile2 = ncdf_write(multiobs, nc_filename2) end if leave_msg(DIAG_SCRIPT, "") diff --git a/esmvaltool/diag_scripts/clouds/clouds_ipcc.ncl b/esmvaltool/diag_scripts/clouds/clouds_ipcc.ncl index 9d138a2840..101ca0a080 100644 --- a/esmvaltool/diag_scripts/clouds/clouds_ipcc.ncl +++ b/esmvaltool/diag_scripts/clouds/clouds_ipcc.ncl @@ -17,6 +17,7 @@ ; ; Optional diag_script_info attributes (diagnostic specific) ; explicit_cn_levels: contour levels +; highlight_dataset: name of dataset to highlight ("MultiModelMean") ; mask_ts_sea_ice: - True = mask T < 272 K as sea ice (only for ; variable "ts") ; - False = no additional grid cells masked for @@ -50,17 +51,20 @@ ; plot is written to 2 separate netCDFs. ; ; Modification history -; 20190222-A_laue_ax: added output of provenance (v2.0) -; 20181119-A_laue_ax: adapted code to multi-variable capable framework -; 20180923-A_laue_ax: added writing of results to netcdf -; 20180529-A_laue_ax: code rewritten for ESMValTool v2.0 -; 20170620-A_laue_ax: added tags for reporting -; 20160920-A_laue_ax: added optional shading of observational uncertainties -; to the zonal mean plot -; 20160901-A_laue_ax: added regridding option 1 deg x 1 deg -; 20151027-A_laue_ax: moved call to 'write_references' to the beginning -; of the code -; 20150428A-laue_ax: written. +; 20230118-lauer_axel: added support to highlight other dataset than +; MultiModelMean +; 20211006-lauer_axel: removed write_plots +; 20190222-lauer_axel: added output of provenance (v2.0) +; 20181119-lauer_axel: adapted code to multi-variable capable framework +; 20180923-lauer_axel: added writing of results to netcdf +; 20180529-lauer_axel: code rewritten for ESMValTool v2.0 +; 20170620-lauer_axel: added tags for reporting +; 20160920-lauer_axel: added optional shading of observational uncertainties +; to the zonal mean plot +; 20160901-lauer_axel: added regridding option 1 deg x 1 deg +; 20151027-lauer_axel: moved call to 'write_references' to the beginning +; of the code +; 20150428-lauer_axel: written. ; ; ############################################################################ @@ -126,6 +130,7 @@ begin log_info("++++++++++++++++++++++++++++++++++++++++++") ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "highlight_dataset", "MultiModelMean") set_default_att(diag_script_info, "mask_ts_sea_ice", False) set_default_att(diag_script_info, "projection", "CylindricalEquidistant") set_default_att(diag_script_info, "timemean", "annualclim") @@ -154,17 +159,10 @@ begin ; make sure path for (mandatory) netcdf output exists - write_nc = True work_dir = config_user_info@work_dir + "/" ; Create work dir system("mkdir -p " + work_dir) - if (config_user_info@write_plots.eq."True") then - write_plots = True - else - write_plots = False - end if - end begin @@ -198,12 +196,23 @@ begin ; get multi-model mean index - mm_ind = ind(names .eq. "MultiModelMean") + mm_ind = ind(names .eq. diag_script_info@highlight_dataset) if (ismissing(mm_ind)) then - error_msg("f", DIAG_SCRIPT, "", "multi-model mean is missing (required)") + mod_ind = ind(names .ne. ref_ind) + if (all(ismissing(mod_ind))) then + error_msg("f", DIAG_SCRIPT, "", "no dataset besides reference " \ + + "dataset found. Cannot continue.") + end if + mm_ind = mod_ind(0) + log_info("highlight_dataset (" + diag_script_info@highlight_dataset \ + + " ) not found, using first dataset (" + names(mm_ind) \ + + ") instead.") + delete(mod_ind) end if + highlight_name = names(mm_ind) + mask_ts_sea_ice = diag_script_info@mask_ts_sea_ice if ((var0 .eq. "ts") .and. (mask_ts_sea_ice)) @@ -363,7 +372,8 @@ begin delete(tmp) - ; save maps of multi-model mean and reference data + ; save maps of highlight_dataset (default = multi-model mean) + ; and reference data if (imod.eq.mm_ind) then mmdata = data @@ -382,7 +392,8 @@ begin end do ; imod - ; differences between multi-model mean and reference data set + ; differences between highlight_dataset (default = multi-model mean) + ; and reference data set diff = mmdata - refdata copy_VarMeta(refdata, diff) @@ -390,17 +401,19 @@ begin ; debugfile->diff = diff ; we order the zonal mean array in a way so that - ; the lines for the multi-model mean and reference model will - ; be drawn on top of the lines for the individual models, i.e.: + ; the lines for the highligh_dataset (default = multi-model mean) + ; and reference model will be drawn on top of the lines for the individual + ; models, i.e.: ; (1) individual model(s) ; (2) reference model(s) (observations) - ; (3) multi-model mean + ; (3) highlight_dataset (default = multi-model mean) dims = dimsizes(zm) zonalmean = new(dims, float) copy_VarMeta(zm, zonalmean) - ; model indices with no reference model(s) and no multi-model mean + ; model indices with no reference model(s) and no highlight_dataset + ; (default = multi-model mean) model_ind = ispan(0, dim_MOD0 - 1, 1) model_ind(ref_ind) = -1 @@ -412,18 +425,22 @@ begin modelsonly_ind = ind(model_ind.ge.0) delete(model_ind) - n = dimsizes(modelsonly_ind) - 1 + if (.not.all(ismissing(modelsonly_ind))) then + n = dimsizes(modelsonly_ind) - 1 - ; first entries in "zonalmean" = individual models + ; first entries in "zonalmean" = individual models - if (numseas.gt.1) then - zonalmean(0:n, :, :) = zm(modelsonly_ind, :, :) + if (numseas.gt.1) then + zonalmean(0:n, :, :) = zm(modelsonly_ind, :, :) + else + zonalmean(0:n, :) = zm(modelsonly_ind, :) + end if + + zonalmean&model(0:n) = zm&model(modelsonly_ind) else - zonalmean(0:n, :) = zm(modelsonly_ind, :) + n = -1 end if - zonalmean&model(0:n) = zm&model(modelsonly_ind) - ; observation(s) n = n + 1 @@ -447,14 +464,14 @@ begin zonalmean&model(n) = zm&model(ref_ind2) end if - ; last entry in "zonalmean" = multi-model mean + ; last entry in "zonalmean" = highlight_dataset (default = multi-model mean) n = n + 1 if (numseas.gt.1) then - zonalmean(n, :, :) = zm(mm_ind, :, :) ; multi-model mean + zonalmean(n, :, :) = zm(mm_ind, :, :) ; highlight_dataset else - zonalmean(n, :) = zm(mm_ind, :) ; multi-model mean + zonalmean(n, :) = zm(mm_ind, :) ; highlight_dataset end if zonalmean&model(n) = zm&model(mm_ind) @@ -479,6 +496,8 @@ begin log_info(DIAG_SCRIPT + " (var: " + var0 + "):") log_info("info: using default contour levels") cnLevels = fspan(min(diff), max(diff), 20) + else + cnLevels = diag_script_info@explicit_cn_levels end if diff@diag_script = DIAG_SCRIPT @@ -558,6 +577,12 @@ begin diff@res_cnFillColors = pal end if + if (var0 .eq. "swcre") then + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_misc_div.rgb") + diff@res_cnFillPalette = pal + end if + plots = new((/2, numseas/), graphic) plotfile = new(numseas, string) plotfile(:) = "" @@ -619,7 +644,7 @@ begin n = dimsizes(modelsonly_ind) - 1 ; settings for all models that have been used to calculate the - ; multi-model mean (= first entries in "zonalmean") + ; highligh_dataset (= first entries in "zonalmean") linethickness(0:n) = 1.0 linecolor(0:n) = "(/0.5, 0.5, 0.5/)" @@ -646,9 +671,9 @@ begin end do else linethickness(n+1:dim_MOD0-1) = 4.0 ; reference dataset(s) - linethickness(mm_ind) = 4.0 ; multi-model mean + linethickness(mm_ind) = 4.0 ; highlight_dataset linecolor(n+1:dim_MOD0-1) = "Black" ; reference data set - linecolor(mm_ind) = "Red" ; multi-model mean + linecolor(mm_ind) = "Red" ; highlight_dataset end if res = True @@ -703,17 +728,15 @@ begin ; -------------------------------------------------------------------- - if (write_plots) then - pres = True - pres@gsnPanelCenter = False - pres@gsnPanelXF = (/0.075, 0.625/) ; hor. pos. of sub-plots - pres@txString = season(is) + pres = True + pres@gsnPanelCenter = False + pres@gsnPanelXF = (/0.075, 0.625/) ; hor. pos. of sub-plots + pres@txString = highlight_name + " (" + season(is) + ")" - outfile = panelling(wks, plots(:, is), 1, 2, pres) - log_info("Wrote " + wks@fullname) + outfile = panelling(wks, plots(:, is), 1, 2, pres) + log_info("Wrote " + wks@fullname) - plotfile(is) = wks@fullname - end if ; if write_plots + plotfile(is) = wks@fullname end do ; is-loop (seasons) ; ########################################### @@ -742,12 +765,12 @@ begin ; write provenance to netcdf output and plot file(s) ; ------------------------------------------------------------------------ - statistics = ("clim") - domain = ("glob") + statistics = "clim" + domain = "global" plottype = (/"geo", "zonal"/) - caption = "Multi model mean bias (left) and zonal averages (right) " \ - + "for variable " + var0 + " (" + allseas \ - + "), reference = " + names(ref_ind) + "." + caption = names(mm_ind) + " bias (left) and zonal averages (right) " \ + + "for variable " + var0 + " (" + allseas \ + + "), reference = " + names(ref_ind) + "." do is = 0, numseas - 1 log_provenance(nc_outfile_bias, plotfile(is), caption, statistics, \ diff --git a/esmvaltool/diag_scripts/clouds/clouds_lifrac_scatter.ncl b/esmvaltool/diag_scripts/clouds/clouds_lifrac_scatter.ncl new file mode 100644 index 0000000000..b383753c90 --- /dev/null +++ b/esmvaltool/diag_scripts/clouds/clouds_lifrac_scatter.ncl @@ -0,0 +1,765 @@ +; CLOUDS_LIFRAC_SCATTER +; ############################################################################ +; Author: Axel Lauer (DLR, Germany) +; ############################################################################ +; Description +; Calculates average liquid / ice fraction vs. temperature. +; +; Required diag_script_info attributes (diagnostic specific) +; none +; +; Optional diag_script_info attributes (diagnostic specific) +; filename_add: optionally add this string to plot filesnames +; min_mass: minimum cloud condensate (same units as clw, cli) +; mm_mean_median: calculate multi-model mean and meadian +; nbins: number of equally spaced bins (ta (x-axis)), default = 20 +; panel_labels: label individual panels (true, false) +; PanelTop: manual override for "@gnsPanelTop" used by panel +; plot(s) +; +; Required variable attributes (variable specific) +; none +; +; Optional variable_info attributes (variable specific) +; reference_dataset: reference dataset +; +; Caveats +; none +; +; Modification history +; 20230117-lauer_axel: added support for ICON (code from Manuel) +; 20210302-lauer_axel: written. +; +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/dataset_selection.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + diag = "clouds_lifrac_scatter.ncl" + variables = get_unique_values(metadata_att_as_array(variable_info, \ + "short_name")) + dim_VAR = dimsizes(variables) + refname = new(dim_VAR, string) + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + variables + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; make sure required variables are available + + varlist = (/"clw", "cli", "ta"/) + idx = new(dimsizes(varlist), integer) + + do i = 0, dimsizes(varlist) - 1 + idx(i) = ind(variables .eq. varlist(i)) + end do + + if (any(ismissing(idx))) then + errstr = "diagnostic " + diag + " requires the following variable(s): " \ + + str_join(varlist, ", ") + error_msg("f", DIAG_SCRIPT, "", errstr) + end if + + ; save input files for writing provenance + + infiles = metadata_att_as_array(input_file_info, "filename") + + ; get reference datasets (if present) and check that number of datasets + ; is equal for each variable + + do i = 0, dim_VAR - 1 + var = variables(idx(i)) + var_info = select_metadata_by_name(variable_info, var) + var_info := var_info[0] + if (isatt(var_info, "reference_dataset")) then + refname(i) = var_info@reference_dataset + end if + info = select_metadata_by_name(input_file_info, var) + if (i .eq. 0) then + dim_MOD = ListCount(info) + else + dim_test = ListCount(info) + if (dim_test .ne. dim_MOD) then + error_msg("f", DIAG_SCRIPT, "", "number of datasets for variable " \ + + variables(i) + " does not match number of datasets for " \ + + variables(0)) + end if + end if + delete(info) + delete(var) + delete(var_info) + end do + + delete(idx) + + ; Set default values for non-required diag_script_info attributes + + set_default_att(diag_script_info, "filename_add", "") + set_default_att(diag_script_info, "panel_labels", True) + set_default_att(diag_script_info, "min_mass", 1.0e-15) + set_default_att(diag_script_info, "mm_mean_median", True) + set_default_att(diag_script_info, "nbins", 20) + + if (diag_script_info@filename_add .ne. "") then + filename_add = "_" + diag_script_info@filename_add + else + filename_add = "" + end if + + nbins = toint(diag_script_info@nbins) + + panel_labels = diag_script_info@panel_labels + min_mass = diag_script_info@min_mass + mm_mean_median = diag_script_info@mm_mean_median + + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + +end + +begin + ; ############ + ; # get data # + ; ############ + + info_clw = select_metadata_by_name(input_file_info, "clw") + names_clw = metadata_att_as_array(info_clw, "dataset") + projects_clw = metadata_att_as_array(info_clw, "project") + info_cli = select_metadata_by_name(input_file_info, "cli") + names_cli = metadata_att_as_array(info_cli, "dataset") + projects_cli = metadata_att_as_array(info_cli, "project") + info_ta = select_metadata_by_name(input_file_info, "ta") + names_ta = metadata_att_as_array(info_ta, "dataset") + projects_ta = metadata_att_as_array(info_ta, "project") + + refidx_clw = ind(names_clw .eq. refname(0)) + refidx_cli = ind(names_cli .eq. refname(1)) + refidx_ta = ind(names_ta .eq. refname(2)) + + if (ismissing(refidx_clw) .or. ismissing(refidx_cli) .or. \ + ismissing(refidx_ta)) then + if (ismissing(refidx_clw)) then + str = refname(0) + end if + if (ismissing(refidx_cli)) then + if (isdefined("str")) then + str = str + " + " + refname(1) + else + str = refname(1) + end if + end if + if (ismissing(refidx_ta)) then + if (isdefined("str")) then + str = str + " + " + refname(2) + else + str = refname(2) + end if + end if + + error_msg("f", DIAG_SCRIPT, "", "the following reference dataset(s) " \ + + "are not available: " + str) + end if + + add_dim_MOD = 0 + + if (mm_mean_median) then + ; check if enough model datasets are available to calculate multi-model + ; mean and median ice/liquid fractions + + ; find all indices of models w/o MultiModelMean/MultiModelMedian + ; (if present) + idxmod = get_mod(names_clw, projects_clw) + + if ((idxmod(0) .eq. -1) .or. (dimsizes(idxmod) .le. 2)) then + log_info("Not enough model datasets to calculate multi-model " \ + + "mean and median. Setting of 'mm_mean_median' will" \ + + " be ignored.") + mm_mean_median = False + else ; more than one model found + add_dim_MOD = 2 + end if + delete(idxmod) + end if + + resulti = new((/dim_MOD + add_dim_MOD, nbins/), float) + resultl = new((/dim_MOD + add_dim_MOD, nbins/), float) + bincenter = new((/nbins/), float) + + xmax = 300. ; Kelvin (27°C) + xmin = 230. ; Kelvin (-43°C) + binsize = tofloat(xmax - xmin) / nbins + + do n = 0, nbins - 1 + x0 = n * binsize + x1 = x0 + binsize + bincenter(n) = xmin + 0.5 * (x0 + x1) + end do + + ; ------------------------------------------------------------------------- + + ; create index vector with reference dataset as first entry (index 0) + ; so reference icefrac is calculated first + + idataset = ispan(0, dim_MOD - 1, 1) + idataset0 = idataset(0) + i1 = ind(idataset .eq. refidx_clw) + idataset(0) = idataset(i1) + idataset(i1) = idataset0 + + do ii = 0, dim_MOD - 1 + + i = idataset(ii) + + atts_cli = True + atts_cli@short_name = "cli" + + atts_clw = True + atts_clw@short_name = "clw" + + atts_ta = True + atts_ta@short_name = "ta" + + ; reference datasets may have different names + if (i .eq. refidx_clw) then + atts_clw@dataset = refname(0) + atts_cli@dataset = refname(1) + atts_ta@dataset = refname(2) + ; all other datasets: force same dataset name for clw, cli and ta + else + atts_clw@dataset = names_clw(i) + atts_cli@dataset = names_clw(i) + atts_ta@dataset = names_clw(i) + end if + + ; read cli + + info = select_metadata_by_atts(input_file_info, atts_cli) + cli = read_data(info[0]) + delete(info) + + ; read clw + + info = select_metadata_by_atts(input_file_info, atts_clw) + clw = read_data(info[0]) + delete(info) + + ; read ta + + info = select_metadata_by_atts(input_file_info, atts_ta) + ta = read_data(info[0]) + delete(info) + + ; check dimensions + + dims_clw = dimsizes(clw) + dims_cli = dimsizes(cli) + dims_ta = dimsizes(ta) + + dimerror = False + + if (dimsizes(dims_cli) .eq. dimsizes(dims_clw)) then + if (any(dims_cli - dims_clw .ne. 0)) then + dimerror = True + end if + else + dimerror = True + end if + + if (dimsizes(dims_cli) .eq. dimsizes(dims_ta)) then + if (any(dims_cli - dims_ta .ne. 0)) then + dimerror = True + end if + else + dimerror = True + end if + + if (dimerror) then + error_msg("f", DIAG_SCRIPT, "", "dimensions of datasets " \ + + atts_cli@dataset + " (variable cli), " \ + + atts_clw@dataset + " (variable clw), " \ + + atts_ta@dataset + " (variable ta) do not match.") + end if + + delete(dims_cli) + delete(dims_clw) + delete(dims_ta) + + ; calculate ice fraction + + ; filter valid values (needed for some models) + + cli = where(cli .lt. 0.0, cli@_FillValue, cli) + cli = where(isnan_ieee(cli), cli@_FillValue, cli) + clw = where(clw .lt. 0.0, clw@_FillValue, clw) + clw = where(isnan_ieee(clw), clw@_FillValue, clw) + + mass = cli + clw + mass = where(mass .lt. min_mass, mass@_FillValue, mass) + + icefrac = 100.0 * cli / mass + liqfrac = 100.0 * clw / mass + + ; ======================================================================== + + ; output zonal means of cli with max icefrac limited to observed icefrac + ; ---------------------------------------------------------------------- + ; as modeled and observed timeseries of icefrac may differ in length and + ; years covered, we use average seasonal cycles instead of the full time + ; series + + copy_VarMeta(cli, icefrac) + icedims = dimsizes(cli) + icedimnames = getvardims(cli) + lonidx = ind(icedimnames .eq. "lon") + if (ismissing(lonidx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + + if (ii .eq. 0) then + system("rm " + config_user_info@work_dir + "zonal.nc") + zonalfile = addfile(config_user_info@work_dir + "zonal.nc", "c") + end if + + cli_avg = time_operations(cli, -1, -1, "average", "monthlyclim", True) + zcli = dim_avg_n_Wrap(cli_avg, lonidx) + delete(zcli&month) + zcli!0 = "time" + zcli&time = ispan(0, 11, 1) + + var = "f_" + tostring(atts_cli@dataset) + zonalfile->$var$ = zcli + + delete(cli_avg) + delete(zcli) + + ; ======================================================================== + + cli1d = ndtooned(cli) + + delete(clw) + delete(mass) + + icefrac_units = "%" + liqfrac_units = "%" + icefrac_long_name = "cloud ice fraction" + liqfrac_long_name = "cloud liquid fraction" + icefrac_var = "icefrac" + liqfrac_var = "liqfrac" + ref_ind = refidx_cli + if (ismissing(ref_ind)) then + ref_ind = -1 + end if + names = names_cli + projects = projects_cli + + ; if reference datasets for clw, cli, ta are from different sources + if (refname(0) .ne. refname(1)) then + names(refidx_cli) = refname(0) + "/" + refname(1) + end if + if (refname(0) .ne. refname(2) .and. refname(1) .ne. refname(2)) then + names(refidx_cli) = names(refidx_cli) + "/" + refname(2) + end if + + ; ======================================================================== + + ; bin data + + ice1d = ndtooned(icefrac) + delete(icefrac) + liq1d = ndtooned(liqfrac) + delete(liqfrac) + ta1d = ndtooned(ta) + ta_units = ta@units + delete(ta) + + resulti@_FillValue = ice1d@_FillValue + resultl@_FillValue = ice1d@_FillValue + + cli_limited1d = cli1d + + do n = 0, nbins - 1 + resulti(i, n) = 0.0 + resultl(i, n) = resultl@_FillValue + x0 = xmin + n * binsize + x1 = x0 + binsize + idx0 = ind((ta1d .gt. x0) .and. (ta1d .le. x1)) + if (.not.all(ismissing(idx0))) then + nv = num(.not.ismissing(ice1d(idx0))) + if (nv .gt. 0) then + resulti(i, n) = avg(ice1d(idx0)) + ; ------------------------------------------------------------------ + + iref = idataset(0) + ifrac_ref = resulti(iref, n) + ifrac_mod = resulti(i, n) + + if (.not.ismissing(ifrac_ref) .and. .not.ismissing(ifrac_mod)) then + if (ifrac_mod .gt. 0.0) then + corr_fac = ifrac_ref / ifrac_mod + if (corr_fac .lt. 1.0) then + cli_limited1d(idx0) = cli1d(idx0) * corr_fac + end if + end if + end if + ; ------------------------------------------------------------------ + end if + nv = num(.not.ismissing(liq1d(idx0))) + if (nv .gt. 0) then + resultl(i, n) = avg(liq1d(idx0)) + end if + end if + delete(idx0) + end do + + cli_limited = onedtond(cli_limited1d, icedims) + copy_VarMeta(cli, cli_limited) + + cli_avg = time_operations(cli_limited, -1, -1, "average", \ + "monthlyclim", True) + zcli = dim_avg_n_Wrap(cli_avg, lonidx) + delete(zcli&month) + zcli!0 = "time" + zcli&time = ispan(0, 11, 1) + + var = "l_" + tostring(atts_cli@dataset) + zonalfile->$var$ = zcli + + delete(cli_avg) + delete(zcli) + delete(cli_limited) + delete(cli_limited1d) + + delete(ice1d) + delete(liq1d) + delete(ta1d) + delete(cli1d) + delete(icedims) + delete(icedimnames) + + delete(cli) + + ; ========================================================================== + + end do ; loop over all datasets (ii) + + ; calculate average and median over all models excluding possibly present + ; MultiModelMean, MultiModelMedian and reference dataset + + if (mm_mean_median) then + index_my_mm = dim_MOD + index_my_med = dim_MOD + 1 + + xnames = array_append_record(names, (/"Average", "Median"/), 0) + delete(names) + names = xnames + delete(xnames) + + mm_ind = ind(names .eq. "MultiModelMean") + mmed_ind = ind(names .eq. "MultiModelMedian") + + if (ismissing(mm_ind)) then + mm_ind = -1 + end if + if (ismissing(mmed_ind)) then + mmed_ind = -1 + end if + + imod = ispan(0, dim_MOD - 1, 1) + idx0 = ind((imod .ne. ref_ind) .and. (imod .ne. mm_ind) .and. \ + (imod .ne. mmed_ind)) + + mmed25i = new(nbins, float) + mmed75i = new(nbins, float) + mmed25l = new(nbins, float) + mmed75l = new(nbins, float) + + mmstdi = new(nbins, float) + mmstdl = new(nbins, float) + + if (.not.all(ismissing(idx0))) then + do n = 0, nbins - 1 + nv = num(.not.ismissing(resulti(idx0, n))) + if (nv .gt. 2) then + stat = stat_dispersion(resulti(idx0, n), False) + resulti(index_my_mm, n) = stat(0) ; mean + mmstdi(n) = stat(1) ; standard deviation + mmed25i(n) = stat(6) ; lower quartile + resulti(index_my_med, n) = stat(8) ; median + mmed75i(n) = stat(10) ; upper quartile + ; if number of datasets < 4, quartiles cannot be calculated + if (ismissing(mmed25i(n)) .or. ismissing(mmed75i(n))) then + mmed25i(n) = 0.0 + mmed75i(n) = 0.0 + end if + else + resulti(index_my_mm, n) = 0.0 + mmstdi(n) = 0.0 + mmed25i(n) = 0.0 + resulti(index_my_med, n) = 0.0 + mmed75i(n) = 0.0 + end if + nv = num(.not.ismissing(resultl(idx0, n))) + if (nv .gt. 2) then + stat = stat_dispersion(resultl(idx0, n), False) + resultl(index_my_mm, n) = stat(0) ; mean + mmstdl(n) = stat(1) ; standard deviation + mmed25l(n) = stat(6) ; lower quartile + resultl(index_my_med, n) = stat(8) ; median + mmed75l(n) = stat(10) ; upper quartile + ; if number of datasets < 4, quartiles cannot be calculated + if (ismissing(mmed25l(n)) .or. ismissing(mmed75l(n))) then + mmed25l(n) = 0.0 + mmed75l(n) = 0.0 + end if + else + resultl(index_my_mm, n) = 0.0 + mmstdl(n) = 0.0 + mmed25l(n) = 0.0 + resultl(index_my_med, n) = 0.0 + mmed75l(n) = 0.0 + end if + end do + end if + + if (isdefined("stat")) then + delete(stat) + end if + delete(idx0) + delete(imod) + else + index_my_mm = -1 + index_my_med = -1 + end if + +; if (mm_ind .ge. 0) then +; mmstdi = new(nbins, float) +; mmstdl = new(nbins, float) +; mmstdi = 0.0 +; mmstdl = 0.0 +; imod = ispan(0, dim_MOD - 1, 1) +; idx0 = ind((imod .ne. ref_ind) .and. (imod .ne. mm_ind) .and. \ +; (imod .ne. mmed_ind)) +; if (.not.all(ismissing(idx0))) then +; do n = 0, nbins - 1 +; nv = num(.not.ismissing(resulti(idx0, n))) +; if (nv .gt. 2) then +; mmstdi(n) = stddev(resulti(idx0, n)) +; end if +; nv = num(.not.ismissing(resultl(idx0, n))) +; if (nv .gt. 2) then +; mmstdl(n) = stddev(resultl(idx0, n)) +; end if +; end do +; end if +; delete(idx0) +; delete(imod) +; end if +; +; ; if MultiModelMedian is present, calculate 25 and 75 pecentiles +; +; if (mmed_ind .ge. 0) then +; mmed25i = new(nbins, float) +; mmed75i = new(nbins, float) +; mmed25l = new(nbins, float) +; mmed75l = new(nbins, float) +; mmed25i = 0.0 +; mmed75i = 0.0 +; mmed25l = 0.0 +; mmed75l = 0.0 +; imod = ispan(0, dim_MOD - 1, 1) +; idx0 = ind((imod .ne. ref_ind) .and. (imod .ne. mm_ind) .and. \ +; (imod .ne. mmed_ind)) +; if (.not.all(ismissing(idx0))) then +; do n = 0, nbins - 1 +; nv = num(.not.ismissing(resulti(idx0, n))) +; if (nv .gt. 2) then +; stat = stat_dispersion(resulti(idx0, n), False) +; mmed25i(n) = stat(6) +; mmed75i(n) = stat(10) +; end if +; nv = num(.not.ismissing(resultl(idx0, n))) +; if (nv .gt. 2) then +; stat = stat_dispersion(resultl(idx0, n), False) +; mmed25l(n) = stat(6) +; mmed75l(n) = stat(10) +; end if +; end do +; end if +; delete(idx0) +; delete(imod) +; delete(stat) +; end if + + ; ########################################### + ; # netCDF output # + ; ########################################### + + nc_filename_i = work_dir + "clouds_lifrac_scatter_ice" + \ + filename_add + ".nc" + nc_filename_l = work_dir + "clouds_lifrac_scatter_liq" + \ + filename_add + ".nc" + + resulti!0 = "model" + resulti!1 = "bin" + resulti&model = str_sub_str(names, "/", "-") + resulti&bin = bincenter + resulti@diag_script = (/DIAG_SCRIPT/) + + copy_VarMeta(resulti, resultl) + + resulti@var = icefrac_var + resulti@var_long_name = icefrac_long_name + resulti@var_units = icefrac_units + + resultl@var = liqfrac_var + resultl@var_long_name = liqfrac_long_name + resultl@var_units = liqfrac_units + + nc_outfile_i = ncdf_write(resulti, nc_filename_i) + nc_outfile_l = ncdf_write(resultl, nc_filename_l) + + ; ########################################### + ; # create the plots # + ; ########################################### + + plots_i = new(dim_MOD + add_dim_MOD, graphic) + plots_l = new(dim_MOD + add_dim_MOD, graphic) + res = True + + wks_i = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_lifrac_scatter_" + \ + "ice" + filename_add) + wks_l = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_lifrac_scatter_" + \ + "liq" + filename_add) + + data_i = new((/2, nbins/), float) + data_l = new((/2, nbins/), float) + if (ref_ind .gt. 0) then + data_i(0, :) = resulti(ref_ind, :) + data_l(0, :) = resultl(ref_ind, :) + else + data_i(0, :) = data@_FillValue + data_l(0, :) = data@_FillValue + end if + + do ii = 0, dim_MOD + add_dim_MOD - 1 + if (ii .eq. refidx_ta) then + continue + end if + + res@gsnDraw = False ; do not draw yet + res@gsnFrame = False ; don't advance frame + + res@xyMarkLineMode = "Markers" + res@xyMarkers = (/4, 16/) + res@xyMonoMarkerColor = False + res@xyMarkerColors = (/"black", "red"/) + res@xyMarkerSizeF = 0.01 + res@tmLabelAutoStride = True + + res@tiMainFontHeightF = 0.025 + res@tiYAxisFontHeightF = 0.025 + res@tiXAxisFontHeightF = 0.025 + res@tiXAxisString = varlist(2) + " (" + ta_units + ")" + + data_i(1, :) = resulti(ii, :) + data_l(1, :) = resultl(ii, :) + + res@tiMainString = names(ii) + + res@tiYAxisString = icefrac_var + " (" + icefrac_units + ")" + plots_i(ii) = gsn_csm_xy(wks_i, resulti&bin, data_i, res) + res@tiYAxisString = liqfrac_var + " (" + liqfrac_units + ")" + plots_l(ii) = gsn_csm_xy(wks_l, resultl&bin, data_l, res) + + if (ii .eq. index_my_mm) then + res_std = True + res_std@gsnDraw = False ; do not draw yet + res_std@gsnFrame = False ; don't advance frame + res_std@gsnXYFillColors = "lightpink" + res_std@xyLineColor = -1 ; Make lines transparent + mmstddev = new((/2, nbins/), float) + mmstddev(0, :) = resulti(ii, :) - mmstdi(:) + mmstddev(1, :) = resulti(ii, :) + mmstdi(:) + plotstd_i = gsn_csm_xy(wks_i, resulti&bin, mmstddev, res_std) + overlay(plots_i(ii), plotstd_i) + mmstddev(0, :) = resultl(ii, :) - mmstdl(:) + mmstddev(1, :) = resultl(ii, :) + mmstdl(:) + plotstd_l = gsn_csm_xy(wks_l, resultl&bin, mmstddev, res_std) + delete(mmstddev) + overlay(plots_l(ii), plotstd_l) + end if + + if (ii .eq. index_my_med) then + res_med = True + res_med@gsnDraw = False ; do not draw yet + res_med@gsnFrame = False ; don't advance frame + res_med@gsnXYFillColors = "lightpink" + res_med@xyLineColor = -1 ; Make lines transparent + mmed = new((/2, nbins/), float) + mmed(0, :) = mmed25i(:) + mmed(1, :) = mmed75i(:) + plotmed_i = gsn_csm_xy(wks_i, resulti&bin, mmed, res_med) + overlay(plots_i(ii), plotmed_i) + mmed(0, :) = mmed25l(:) + mmed(1, :) = mmed75l(:) + plotmed_l = gsn_csm_xy(wks_l, resultl&bin, mmed, res_med) + delete(mmed) + overlay(plots_l(ii), plotmed_l) + end if + + draw(plots_i(ii)) + frame(wks_i) + draw(plots_l(ii)) + frame(wks_l) + end do + + pres = True ; needed to override + ; panelling defaults + pres@gsnPanelCenter = False + + idx0 = ind(.not.ismissing(plots_i)) + n = dimsizes(idx0) + pres@gsnPanelFigureStrings = names(idx0) + pres@gsnPanelFigureStringsFontHeightF = min((/0.008, 0.008 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + pres@lbLabelFontHeightF = min((/0.01, 0.01 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + + outfile_i = panelling(wks_i, plots_i(idx0), (n + 3) / 4, 4, pres) + log_info("Wrote " + outfile_i) + + outfile_l = panelling(wks_l, plots_l(idx0), (n + 3) / 4, 4, pres) + log_info("Wrote " + outfile_l) + delete(idx0) + + ; ========================================================================== + + ; ---------------------------------------------------------------------- + ; write provenance to netcdf output (and plot file) + ; ---------------------------------------------------------------------- + + statistics = (/"clim", "mean", "pdf"/) + domain = "reg" + plottype = "scatter" + caption_i = "Scatterplot of air temperature (x) vs. cloud ice fraction (y)." + caption_l = "Scatterplot of air temperature (x) vs. " + \ + "cloud liquid fraction (y)." + log_provenance(nc_outfile_i, outfile_i, caption_i, statistics, \ + domain, plottype, "", "", infiles) + log_provenance(nc_outfile_l, outfile_l, caption_l, statistics, \ + domain, plottype, "", "", infiles) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/clouds/clouds_lifrac_scatter_postproc.ncl b/esmvaltool/diag_scripts/clouds/clouds_lifrac_scatter_postproc.ncl new file mode 100644 index 0000000000..92cc4b13e3 --- /dev/null +++ b/esmvaltool/diag_scripts/clouds/clouds_lifrac_scatter_postproc.ncl @@ -0,0 +1,371 @@ +; CLOUDS_LIFRAC_SCATTER_POSTPROC +; ############################################################################ +; Author: Axel Lauer (DLR, Germany) +; ############################################################################ +; Description +; Read ice fraction from clouds_lifrac_scatter.ncl and plot results for +; cmip5/cmip6 model pairs defined below. +; +; Required diag_script_info attributes (diagnostic specific) +; models: array of CMIP5/CMIP6 model pairs to be compared +; refname: name of reference dataset +; +; Optional diag_script_info attributes (diagnostic specific) +; nbins: number of bins used by clouds_lifrac_scatter.ncl (default = 20) +; reg: region (string) (default = "") +; t_int: array of temperatures for printing additional diagnostics +; +; Required variable attributes (variable specific) +; none +; +; Optional variable_info attributes (variable specific) +; none +; +; Caveats +; none +; +; Modification history +; 20211113-lauer_axel: written. +; +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT) + log_info("++++++++++++++++++++++++++++++++++++++++++") + + set_default_att(diag_script_info, "nbins", 20) + set_default_att(diag_script_info, "reg", "") + set_default_att(diag_script_info, "t_int", (/240.0, 250.0, 260.0, 270.0/)) + + models = diag_script_info@models + t_int = diag_script_info@t_int + + if (isatt(diag_script_info, "refname")) then + refname = diag_script_info@refname + else + error_msg("f", DIAG_SCRIPT, "", "no reference dataset defined (refname).") + end if + + dims = dimsizes(models) + nmod = dims(0) + + nbins = diag_script_info@nbins + reg = diag_script_info@reg + + test = str_lower(diag_script_info@input_files) + + i = dimsizes(test) + if (i .ne. 2) then + error_msg("f", DIAG_SCRIPT, "", "number of datasets (ancestors) is " \ + + tostring(i) + ", expected 2") + end if + + if (reg .ne. "") then + reg = "_" + reg + end if + + fname5 = "" + fname6 = "" + + do i = 0, dimsizes(test) - 1 + if (isStrSubset(test(i), "cmip5")) then + fname5 = diag_script_info@input_files(i) + \ + "/clouds_lifrac_scatter_ice_cmip5" + reg + ".nc" + end if + if (isStrSubset(test(i), "cmip6")) then + fname6 = diag_script_info@input_files(i) + \ + "/clouds_lifrac_scatter_ice_cmip6" + reg + ".nc" + end if + end do + + if (fname5 .eq. "") then + error_msg("f", DIAG_SCRIPT, "", "no dataset (ancestor) for CMIP5 found.") + end if + if (fname6 .eq. "") then + error_msg("f", DIAG_SCRIPT, "", "no dataset (ancestor) for CMIP6 found.") + end if + + results = new((/nmod, nbins, 2/), float) + refdata = new(nbins, float) + mmmedian = new((/nbins, 2/), float) + mmmean = new((/nbins, 2/), float) + mmp10 = new((/nbins, 2/), float) + mmp90 = new((/nbins, 2/), float) + mmstd = new((/nbins, 2/), float) + + ; CMIP5 + infile5 = addfile(fname5, "r") + icefrac5 = infile5->icefrac + ; CMIP6 + infile6 = addfile(fname6, "r") + icefrac6 = infile6->icefrac + + results!1 = "bin" + results&bin = icefrac5&bin + + tmp = infile5->model + dims = dimsizes(tmp) + idxtmp = ispan(0, dims(0) - 1, 1) + models5 = new(dims(0), string) + do i = 0, dims(0) - 1 + models5(i) = tostring(tmp(i, :)) + if (isStrSubset(models5(i), "Median") .or. \ + isStrSubset(models5(i), "Mean") .or. \ + isStrSubset(models5(i), "Average") .or. \ + isStrSubset(models5(i), refname)) then + idxtmp(i) = -1 + end if + end do + delete(tmp) + delete(dims) + + idx5 = ind(idxtmp .ge. 0) + delete(idxtmp) + + tmp = infile6->model + dims = dimsizes(tmp) + idxtmp = ispan(0, dims(0) - 1, 1) + models6 = new(dims(0), string) + do i = 0, dims(0) - 1 + models6(i) = tostring(tmp(i, :)) + if (isStrSubset(models6(i), "Median") .or. \ + isStrSubset(models6(i), "Mean") .or. \ + isStrSubset(models6(i), "Average") .or. \ + isStrSubset(models6(i), refname)) then + idxtmp(i) = -1 + end if + end do + delete(tmp) + delete(dims) + + idx6 = ind(idxtmp .ge. 0) + delete(idxtmp) + + model_ok = new((/nmod, 2/), logical) + model_ok = False + ref_ok = False + + do imod = 0, nmod - 1 + idx = ind(models5 .eq. models(imod, 0)) + if (.not. ismissing(idx)) then + results(imod, :, 0) = icefrac5(idx, :) + model_ok(imod, 0) = True + end if + idx = ind(models6 .eq. models(imod, 1)) + if (.not. ismissing(idx)) then + results(imod, :, 1) = icefrac6(idx, :) + model_ok(imod, 1) = True + end if + idx = ind(models6 .eq. refname) + if (.not. ismissing(idx)) then + refdata(:) = icefrac6(idx, :) + ref_ok = True + end if + end do + + if (any(.not. model_ok(:, 0))) then + inotok = ind(.not. model_ok(:, 0)) + log_info("warning: could not find data for the following CMIP5 model(s) " \ + + str_join(models(inotok, 0), ",")) + end if + if (any(.not. model_ok(:, 1))) then + inotok = ind(.not. model_ok(:, 1)) + log_info("warning: could not find data for the following CMIP6 model(s) " \ + + str_join(models(inotok, 1), ",")) + end if + + ; calculate multi-model, p10 and p90, mean and standard deviation + + print("T (K), CMIP5-CMIP6 (abs), CMIP5-CMIP6 (rel), CMIP5, CMIP6, ref") + print("--------------------------------------------------------------") + + do n = 0, nbins - 1 + selection = icefrac5(idx5, n) + itmp = ind(.not.ismissing(selection)) + if (.not. ismissing(itmp(0))) then + sorted = selection(itmp) + qsort(sorted) + i10 = toint(dimsizes(sorted) * 0.1 + 0.5) + i50 = toint(dimsizes(sorted) * 0.5 + 0.5) + i90 = toint(dimsizes(sorted) * 0.9 - 0.5) + mmp10(n, 0) = sorted(i10) + mmmedian(n, 0) = sorted(i50) + mmp90(n, 0) = sorted(i90) + delete(sorted) + mmmean(n, 0) = avg(selection) + mmstd(n, 0) = stddev(selection) + else + mmp10(n, 0) = mmp10@_FillValue + mmedian(n, 0) = mmmedian@_FillValue + mmp90(n, 0) = mmp90@_FillValue + mmmean(n, 0) = mmmean@_FillValue + mmstd(n, 0) = mmstd@_FillValue + end if + + delete(selection) + delete(itmp) + + selection = icefrac6(idx6, n) + itmp = ind(.not.ismissing(selection)) + if (.not. ismissing(itmp(0))) then + sorted = selection(itmp) + qsort(sorted) + i10 = toint(dimsizes(sorted) * 0.1 + 0.5) + i50 = toint(dimsizes(sorted) * 0.5 + 0.5) + i90 = toint(dimsizes(sorted) * 0.9 - 0.5) + mmp10(n, 1) = sorted(i10) + mmmedian(n, 1) = sorted(i50) + mmp90(n, 1) = sorted(i90) + delete(sorted) + mmmean(n, 1) = avg(selection) + mmstd(n, 1) = stddev(selection) + else + mmp10(n, 1) = mmp10@_FillValue + mmedian(n, 0) = mmmedian@_FillValue + mmp90(n, 1) = mmp90@_FillValue + mmmean(n, 1) = mmmean@_FillValue + mmstd(n, 1) = mmstd@_FillValue + end if + + delete(selection) + delete(itmp) + + delta = mmmean(n, 0) - mmmean(n, 1) + + if (n .ge. 1) then + do it = 0, dimsizes(t_int) - 1 + if ((results&bin(n - 1) .lt. t_int(it)) .and. \ + (results&bin(n) .gt. t_int(it))) then + dx = results&bin(n) - results&bin(n - 1) + dy5 = mmmean(n, 0) - mmmean(n - 1, 0) + dy6 = mmmean(n, 1) - mmmean(n - 1, 1) + dyr = refdata(n) - refdata(n - 1) + r5 = mmmean(n - 1, 0) + dy5 / dx * (t_int(it) - results&bin(n - 1)) + r6 = mmmean(n - 1, 1) + dy6 / dx * (t_int(it) - results&bin(n - 1)) + rr = refdata(n - 1) + dyr / dx * (t_int(it) - results&bin(n - 1)) + + print(tostring(t_int(it)) + ", " + tostring(r5) + ", " \ + + tostring(r6) + ", " + tostring(rr)) + end if + end do + end if + + mmmean = where(mmmean .eq. 0, mmmean@_FillValue, mmmean) + + print(tostring(results&bin(n)) + ", " + tostring(delta) + " (" \ + + tostring(delta / mmmean(n, 0) * 100.0 + "%)") \ + + ", " + tostring(mmmean(n, 0)) + ", " + tostring(mmmean(n, 1)) \ + + ", " + tostring(refdata(n))) + end do + + ; ########################################### + ; # create the plots # + ; ########################################### + + plots = new(nmod + 2, graphic) + + res = True + res@gsnDraw = False ; do not draw yet + res@gsnFrame = False ; don't advance frame + res@xyMarkLineMode = "MarkLines" + res@xyMarkers = (/16, 16, 16/) + res@xyMonoMarkerColor = False + res@xyMarkerColors = (/"blue", "red", "black"/) + res@xyDashPatterns = (/0, 0, 0/) + res@xyMarkerSizeF = 0.01 + res@tmLabelAutoStride = True + res@tiMainFontHeightF = 0.025 + res@tiYAxisFontHeightF = 0.025 + res@tiXAxisFontHeightF = 0.025 + res@tiXAxisString = "temperature (K)" + res@tiYAxisString = "icefrac (%)" + + lgres = True + lgres@vpWidthF = 0.2 ; width of legend (NDC) + lgres@vpHeightF = 0.08 ; height of legend (NDC) + lgres@lgAutoManage = False ; Necessary to set font hgt + lgres@lgMonoLineColor = True + lgMonoMarkerIndex = False + lgMonoMarkerColor = False + lgres@lgLineColors = "black" + lgres@lgMonoItemType = True ; more than one type + lgres@lgItemType = res@xyMarkLineMode ; line/marker + lgres@lgDashIndexes = res@xyDashPatterns ; dash indexes + lgres@lgMarkerIndexes = res@xyMarkers ; marker indexes + lgres@lgMarkerColors = res@xyMarkerColors ; marker colors + lgres@lgLabelFontHeightF = 0.12 + lgres@lgLineLabelsOn = False + lgres@lgMarkerSizeF = 0.01 + + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, \ + "clouds_lifrac_scatter_post_ice" + reg) + + data = new((/3, nbins/), float) + + do ii = 0, nmod + 1 + if (ii .lt. nmod) then + data(0, :) = results(ii, :, 0) + data(1, :) = results(ii, :, 1) + data(2, :) = refdata(:) + labels = (/models(ii, 0), models(ii, 1), "observations"/) + else if (ii .eq. nmod) then + data(0, :) = (/mmmedian(:, 0)/) + data(1, :) = (/mmmedian(:, 1)/) + data(2, :) = refdata(:) + labels = (/"CMIP5 MMMedian", "CMIP6 MMMedian", "observations"/) + else if (ii .eq. nmod + 1) then + data(0, :) = (/mmmean(:, 0)/) + data(1, :) = (/mmmean(:, 1)/) + data(2, :) = refdata(:) + labels = (/"CMIP5 MMMean", "CMIP6 MMMean", "observations"/) + end if + end if + end if + + plots(ii) = gsn_csm_xy(wks, results&bin, data, res) + + gsn_legend_ndc(wks, 3, labels, 0.58, 0.78, lgres) + + if (ii .ge. nmod) then + res2 = True + res2@gsnDraw = False ; do not draw yet + res2@gsnFrame = False ; don't advance frame + res2@xyLineColor = -1 ; Make lines transparent + mmvar = new((/2, nbins/), float) + if (ii .eq. nmod) then ; median + percentiles + mmvar(0, :) = mmp10(:, 0) + mmvar(1, :) = mmp90(:, 0) + else if (ii .eq. nmod + 1) then ; mean + stddev + mmvar(0, :) = mmmean(:, 0) - mmstd(:, 0) + mmvar(1, :) = mmmean(:, 0) + mmstd(:, 0) + end if + end if + res2@gsnXYFillColors = (/0.9, 0.9, 1.0, 1.0/) ; "lightblue" + plotvar5 = gsn_csm_xy(wks, results&bin, mmvar, res2) + overlay(plots(ii), plotvar5) + if (ii .eq. nmod) then ; median + percentiles + mmvar(0, :) = mmp10(:, 1) + mmvar(1, :) = mmp90(:, 1) + else if (ii .eq. nmod + 1) then + mmvar(0, :) = mmmean(:, 1) - mmstd(:, 1) + mmvar(1, :) = mmmean(:, 1) + mmstd(:, 1) + end if + end if + res2@gsnXYFillColors = (/1.0, 0.9, 0.9, 0.3/) ; "lightpink" + plotvar6 = gsn_csm_xy(wks, results&bin, mmvar, res2) + overlay(plots(ii), plotvar6) + delete(mmvar) + end if + + draw(plots(ii)) + frame(wks) + end do + +end diff --git a/esmvaltool/diag_scripts/clouds/clouds_pdf.ncl b/esmvaltool/diag_scripts/clouds/clouds_pdf.ncl new file mode 100644 index 0000000000..bc94f08fc2 --- /dev/null +++ b/esmvaltool/diag_scripts/clouds/clouds_pdf.ncl @@ -0,0 +1,546 @@ +; CLOUDS_PDF +; ############################################################################ +; Author: Axel Lauer (DLR, Germany) +; PROJECT-NAME ESA-CCI CMUG +; ############################################################################ +; Description +; Calculates temporal/spatial pdf's of 2-d (cloud) parameters for selected +; regions. +; +; Required diag_script_info attributes (diagnostic specific) +; xmin min value for bins (x axis) +; xmax max value for bins (x axis) +; +; Optional diag_script_info attributes (diagnostic specific) +; filename_add: optionally add this string to output filenames +; plot_average: show average frequency per bin +; region: show only selected geographic region given as latmin, +; latmax, lonmin, lonmax +; styleset: "CMIP5", "DEFAULT" +; ymin min value for frequencies (%) (y axis) +; ymax max value for frequencies (%) (y axis) +; +; Required variable attributes (variable specific) +; none +; +; Optional variable_info attributes (variable specific) +; reference_dataset: reference dataset +; +; Caveats +; none +; +; Modification history +; 20190916-lauer_axel: written. +; +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/dataset_selection.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + var0 = variable_info[0]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info0) + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + end if + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "filename_add", "") + set_default_att(diag_script_info, "plot_average", False) + set_default_att(diag_script_info, "styleset", "DEFAULT") + set_default_att(diag_script_info, "multi_model_mean", False) + + if (.not.isatt(diag_script_info, "xmin") .or. \ + .not.isatt(diag_script_info, "xmax")) then + error_msg("f", DIAG_SCRIPT, "", "no xmin / xmax specified in recipe") + end if + + xmin = diag_script_info@xmin + xmax = diag_script_info@xmax + + if (diag_script_info@filename_add .ne. "") then + filename_add = "_" + diag_script_info@filename_add + else + filename_add = "" + end if + + plot_average = diag_script_info@plot_average + + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + + ref_ind = -1 ; set to invalid value + + ; if attribute is present, use it so datasets can be sorted + if (isvar("refname")) then + ; set reference model + ref_ind = ind(names .eq. refname) + if (ismissing(ref_ind)) then + log_info("warning: reference dataset (" + refname + ") not found.") + ref_ind = -1 + end if + end if + + climofiles = metadata_att_as_array(info0, "filename") + + outfile = "" + +end + +begin + ; ############ + ; # get data # + ; ############ + + plot = new((/dim_MOD/), graphic) + + ind_all_sorted = ispan(0, dim_MOD - 1, 1) ; create array + + if (ref_ind .ge. 0) then + ind_wo_ref = ind(names .ne. refname) + ind_all_sorted(0) = ref_ind + ind_all_sorted(1:dim_MOD - 1) = ind_wo_ref + end if + + bins = 20 + + data_all = new((/dim_MOD, bins/), float) + data_all!0 = "models" + data_all&models = names + + ; filenames for netcdf output + + nc_filename = work_dir + "clouds_pdf_" + var0 + filename_add + ".nc" + nc_filename@existing = "append" + + do ii = 0, dim_MOD - 1 + + imod = ind_all_sorted(ii) + log_info("processing " + names(imod)) + + if (isvar("data1")) then + delete(data1) + end if + + data1 = read_data(info0[imod]) + + ; check dimensions + + dims = getvardims(data1) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + \ + " dimensions, need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + + ; if requested, select geographical region + + if (isatt(diag_script_info, "region")) then + region = diag_script_info@region + data1 := area_operations(data1, region(0), region(1), region(2), \ + region(3), "extract", False) + delete(region) + end if + + ; variable specific plotting settings + + res = True + + if (var0.eq."pr") then + ; convert from kg m-2 s-1 to mm day-1 + data1 = data1 * 86400.0 + data1@units = "mm day-1" + end if + + if (var0.eq."tas") then + ; convert from K to degC + data1 = data1 - 273.15 + data1@units = "degC" + end if + + if (var0.eq."clt") then + res@gsnHistogramClassIntervals = ispan(0, 100, 10) + end if + + ; ########################################### + ; # other Metadata: diag_script, var # + ; ########################################### + ; add to data1 as attributes without prefix + + if (isatt(data1, "diag_script")) then ; add to existing entries + temp = data1@diag_script + delete(data1@diag_script) + data1@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; add as new attribute + data1@diag_script = (/DIAG_SCRIPT/) + end if + + if (isatt(variable_info[0], "long_name")) then + data1@var_long_name = variable_info[0]@long_name + end if + + data1@var = var0 + + if (isatt(variable_info[0], "units")) then + data1@var_units = variable_info[0]@units + else + data1@var_units = "" + end if + + ; ########################################### + ; # create the plot # + ; ########################################### + + ; function in aux_plotting.ncl + + if (ii.eq.0) then + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_pdf_" + var0 \ + + filename_add) + wks_line = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_pdf_" + var0 \ + + filename_add + "_line") + end if + + res@gsnHistogramComputePercentages = True + res@gsnHistogramPercentSign = True + res@tiXAxisString = data1@var + " (" + data1@units + ")" + res@tmYLOn = False + res@tiYAxisOn = False + res@tiMainString = names(imod) + + plot(ii) = gsn_histogram(wks, ndtooned(data1), res) + + ; mandatory netcdf output + + opt = True + opt@bin_min = xmin + opt@bin_max = xmax + + data_out = pdfx(data1, bins, opt) + data_out@diag_script = (/DIAG_SCRIPT/) + data_out@var = var0 + "_pdf_" + names(imod) + nc_outfile = ncdf_write(data_out, nc_filename) + + if (ii .eq. 0) then + bin_min = data_out@bin_bound_min + bin_max = data_out@bin_bound_max + data_all!1 = var0 + data_all&$var0$ = data_out@bin_center + data_all@units = data_out@units + end if + + data_all(imod, :) = tofloat(data_out) + + delete(data_out) + + ; ======================================================================= + + end do ; ii-loop (models) + + ; sort plots if needed (observations go first) + + plottmp = ispan(0, dim_MOD - 1, 1) + plotind = plottmp + + ; move plots of observational datasets (if present) into the first line(s) + ; of the panel plot + + j = 0 + do i = 0, dimsizes(plottmp) - 1 + if (i.eq.ref_ind) then + plotind(j) = plottmp(i) + j = j + 1 + else if (plottmp(i) .lt. dimsizes(projects)) then + if (isStrSubset(str_lower(projects(plottmp(i))), "obs")) then + plotind(j) = plottmp(i) + j = j + 1 + end if + end if + end if + end do + + do i = 0, dimsizes(plottmp) - 1 + if ((isStrSubset(str_lower(projects(plottmp(i))), \ + "obs")).or.(i.eq.ref_ind)) then + else + plotind(j) = plottmp(i) + j = j + 1 + end if + end do + + pres = True ; needed to override + ; panelling defaults + + pres@gsnPanelFigureStrings = names(plotind) + + pres@gsnPanelFigureStringsFontHeightF = min((/0.01, 0.01 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + pres@lbLabelFontHeightF = min((/0.015, 0.01 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + pres@lbAutoManage = False + pres@lbTopMarginF = 0.1 + pres@lbTitleOn = True + pres@lbTitleFontHeightF = min((/0.015, 0.01 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + pres@lbTitlePosition = "Bottom" + pres@lbTitleString = data1@long_name + " (" \ + + data1@units + ")" + pres@lbPerimOn = False ; draw line around label + ; bar area + pres@gsnPanelCenter = False + if (dim_MOD.le.8) then + pres@pmLabelBarOrthogonalPosF = -0.03 + else + pres@pmLabelBarOrthogonalPosF = -0.01 ; shift label bar a bit to + ; the bottom + end if + + pres@gsnPanelRowSpec = True ; tell panel what order to plt + pres@gsnPanelYWhiteSpacePercent = 5 + pres@gsnPanelXWhiteSpacePercent = 5 + if (isatt(diag_script_info, "PanelTop")) then + top = tofloat(diag_script_info@PanelTop) + else + top = 0.99 ; default + end if + pres@gsnPanelTop = top + + if (isvar("plotsperline")) then + delete(plotsperline) + end if + + n = toint((dim_MOD + 1) / 4) + if (n .lt. 1) then + n = 1 + end if + plotsperline = new(n, integer) + plotsperline = 4 + + if ((isStrSubset(str_lower(projects(plotind(0))), \ + "obs")).and. \ + .not.(isStrSubset(str_lower(projects(plotind(1))), \ + "obs"))) then + plotsperline(0) = 1 + end if + + if (sum(plotsperline).gt.dimsizes(plotind)) then + plotsperline(dimsizes(plotsperline) - 1) = 1 + end if + + if (sum(plotsperline).lt.dimsizes(plotind)) then + xadd = 1 + xtmp = array_append_record(plotsperline, xadd, 0) + delete(plotsperline) + plotsperline = xtmp + delete(xtmp) + end if + + gsn_panel(wks, plot(plotind), plotsperline, pres) + outfile = wks@fullname + + ; ======================================================================== + + ; line plot + + colors = project_style(info0, diag_script_info, "colors") + dashes = project_style(info0, diag_script_info, "dashes") + thicks = project_style(info0, diag_script_info, "thicks") + + delete(res) + res = True + res@gsnDraw = False ; do not draw yet + res@gsnFrame = False ; don't advance frame + res@xyLineColors = colors ; change line color + res@xyLineThicknesses = thicks ; make 2nd lines thicker + res@xyMonoDashPattern = False + res@xyDashPatterns = dashes + res@gsnDraw = False ; don't draw yet + res@gsnFrame = False ; don't advance frame yet + res@tiMainFontHeightF = .025 ; font height + res@gsnMaximize = True + res@vpHeightF = 0.4 + res@vpWidthF = 0.8 + if (isatt(diag_script_info, "ymin")) then + res@trYMinF = diag_script_info@ymin + else + res@trYMinF = min(data_all) + end if + if (isatt(diag_script_info, "ymax")) then + res@trYMaxF = diag_script_info@ymax + else + res@trYMaxF = max(data_all) + end if +; res@tiXAxisString = data1@long_name + " (" + data1@units + ")" + res@tiXAxisString = data1@var + " (" + data1@units + ")" + res@tiYAxisString = "Frequency (" + data_all@units + ")" + + lineplot = gsn_csm_xy(wks_line, data_all&$var0$, data_all, res) + + if (plot_average) then + ; model indices w/o MultiModelMean / MultiModelMedian + idxmod = get_mod(names, projects) + ; calculate mean and stddev + result_avg = dim_avg_n(data_all(idxmod, :), 0) + result_std = dim_stddev_n(data_all(idxmod, :), 0) + + ; plot average + stddev + res_std = True + res_std@gsnDraw = False ; do not draw yet + res_std@gsnFrame = False ; don't advance frame + res_std@gsnXYFillColors = (/1.0, 0.9, 0.9/) + res_std@xyLineColor = -1 ; Make lines transparent + mmstddev = new((/2, bins/), float) + mmp10 = new((/bins/), float) + mmp90 = new((/bins/), float) + + do n = 0, bins - 1 + selection = data_all(idxmod, n) + itmp = ind(.not.ismissing(selection)) + if (.not. ismissing(itmp(0))) then + sorted = selection(itmp) + qsort(sorted) + i10 = toint(dimsizes(sorted) * 0.1 + 0.5) + i90 = toint(dimsizes(sorted) * 0.9 - 0.5) + mmp10(n) = sorted(i10) + mmp90(n) = sorted(i90) + delete(sorted) + else + mmp10(n) = mmp10@_FillValue + mmp90(n) = mmp90@_FillValue + end if + delete(selection) + delete(itmp) + end do + +; mmstddev(0, :) = result_avg - result_std +; mmstddev(1, :) = result_avg + result_std +; plotstd = gsn_csm_xy(wks_line, data_all&$var0$, mmstddev, res_std) + + mmstddev(0, :) = mmp10(:) + mmstddev(1, :) = mmp90(:) + plotstd = gsn_csm_xy(wks_line, data_all&$var0$, mmstddev, res_std) + + delete(mmstddev) + + overlay(lineplot, plotstd) + + res_avg = True + res_avg@gsLineColor = "red" + res_avg@gsLineThicknessF = 3 + + avgline = gsn_add_polyline(wks_line, plotstd, data_all&$var0$, \ + result_avg, res_avg) + end if + + draw(lineplot) + frame(wks_line) + +; delete(res) +; res = True +; res@stddev = True +; res@tiXAxisString = data1@long_name + " (" + data1@units + ")" +; res@tiYAxisString = "Frequency (" + data_all@units + ")" +; diag_script_info@xy_line_legend = False +; data_all@legend_outside = True +; wks_line@legendfile = get_plot_dir() + "clouds_pdf_" + var0 \ +; + filename_add + "_legend" +; xy_line(wks_line, data_all, data_all&$var0$, dim_stddev_n(data_all, 0), \ +; xy_line(wks_line, data_all, data_all&$var0$, result_std, \ +; res, info0) + + outfile_line = wks_line@fullname + + ; ======================================================================== + + log_info("Wrote " + outfile) + log_info("Wrote " + outfile_line) + + ; ------------------------------------------------------------------------ + ; write provenance to netcdf output and plot file(s) (mean) + ; ------------------------------------------------------------------------ + + statistics = (/"clim", "pdf"/) + if (isatt(diag_script_info, "region")) then + domain = "reg" + else + domain = "global" + end if + plottype = "geo" + + caption = "Pdf of variable " + var0 + ")." + log_provenance(nc_outfile, outfile, caption, statistics, \ + domain, plottype, "", "", climofiles) + + ; ---------------------------------------------------------------------- + ; write mmm and ref to additional netcdf + ; ---------------------------------------------------------------------- + + if (isvar("idxmod")) then + delete(idxmod) + end if + idxmod = get_mod(names, projects) + ; calculate mmm + if (dimsizes(idxmod) .gt. 1) then + mmmpdf = dim_avg_n_Wrap(data_all(idxmod, :), 0) + + mmmpdf@diag_script = (/DIAG_SCRIPT/) + mmmpdf@var = var0 + "_pdf_mmm" + mmmpdf@datasets = str_join(names(idxmod), ", ") + + if (var0 .eq. "clt") then + x = mmmpdf&$var0$ + idx60 = ind(x .ge. 60.) + idx80 = ind(x .ge. 80.) + mmmpdf@cummulative_freq_ge_60 = sum(mmmpdf(idx60)) + mmmpdf@cummulative_freq_ge_80 = sum(mmmpdf(idx80)) + end if + + nc_filename2 = work_dir + "clouds_pdf_" + var0 + filename_add + \ + "_mmm_ref.nc" + + if (ref_ind .ge. 0) then + refpdf = data_all(ref_ind, :) + refpdf@diag_script = (/DIAG_SCRIPT/) + refpdf@var = var0 + "_pdf_ref" + refpdf@datasets = names(ref_ind) + if (var0 .eq. "clt") then + refpdf@cummulative_freq_ge_60 = sum(refpdf(idx60)) + refpdf@cummulative_freq_ge_80 = sum(refpdf(idx80)) + end if + nc_outfile2 = ncdf_write(refpdf, nc_filename2) + nc_filename2@existing = "append" + end if + + nc_outfile2 = ncdf_write(mmmpdf, nc_filename2) + end if + + ; ======================================================================== + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/clouds/clouds_scatter.ncl b/esmvaltool/diag_scripts/clouds/clouds_scatter.ncl new file mode 100644 index 0000000000..996fcd7121 --- /dev/null +++ b/esmvaltool/diag_scripts/clouds/clouds_scatter.ncl @@ -0,0 +1,600 @@ +; CLOUDS_SCATTER +; ############################################################################ +; Author: Axel Lauer (DLR, Germany) +; ############################################################################ +; Description +; Calculates mean values of variable y per bin of variable x and displays +; the results as scatter plot. +; +; Required diag_script_info attributes (diagnostic specific) +; var_x: short name of variable on x-axis +; var_y: short name of variable on y-axis +; xmin: min x value for generating bins +; xmax: max x value for generating bins +; +; Optional diag_script_info attributes (diagnostic specific) +; filename_add: optionally add this string to plot filesnames +; nbins: number of equally spaced bins (var_x), default = 20 +; ymin_mm: min y value for plotting MultiModelMean +; ymax_mm: max y value for plotting MultiModelMean +; +; Required variable attributes (variable specific) +; none +; +; Optional variable_info attributes (variable specific) +; reference_dataset: reference dataset +; +; Caveats +; none +; +; Modification history +; 20230117-lauer_axel: added support for ICON (code from Manuel) +; 20210210-lauer_axel: written. +; +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/dataset_selection.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + diag = "clouds_scatter.ncl" + variables = get_unique_values(metadata_att_as_array(variable_info, \ + "short_name")) + + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"var_x", "var_y", "xmin", "xmax"/)) + + file_type = output_type() + + ; make sure required variables are available + var_x = diag_script_info@var_x + var_y = diag_script_info@var_y + + ; special case: columnicefrac = clivi / (clivi + lwp) + ; note: clwvi is not used since it contains lwp only for some models + ; (by error) + + calcicefrac = False + calctcwp = False + if (var_y .eq. "columnicefrac") then + calcicefrac = True + varlist = (/var_x, "clivi", "lwp"/) + else if (var_y .eq. "totalcwp") then + calctcwp = True + varlist = (/var_x, "clivi", "lwp"/) + else + varlist = (/var_x, var_y/) + end if + end if + idx = new(dimsizes(varlist), integer) + + nVAR = dimsizes(varlist) + refname = new(nVAR, string) + + do i = 0, nVAR - 1 + idx(i) = ind(variables .eq. varlist(i)) + end do + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + variables(idx) + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + if (any(ismissing(idx))) then + errstr = "diagnostic " + diag + " requires the following variable(s): " \ + + str_join(varlist, ", ") + error_msg("f", DIAG_SCRIPT, "", errstr) + end if + + ; save input files for writing provenance + + infiles = metadata_att_as_array(input_file_info, "filename") + + ; get reference datasets (if present) and check that number of datasets + ; is equal for each variable + + do i = 0, nVAR - 1 + var = variables(idx(i)) + var_info = select_metadata_by_name(variable_info, var) + var_info := var_info[0] + if (isatt(var_info, "reference_dataset")) then + refname(i) = var_info@reference_dataset + end if + info = select_metadata_by_name(input_file_info, var) + if (i .eq. 0) then + dim_MOD = ListCount(info) + else + dim_test = ListCount(info) + if (dim_test .ne. dim_MOD) then + error_msg("f", DIAG_SCRIPT, "", "number of datasets for variable " \ + + var + " does not match number of datasets for " \ + + variables(idx(0))) + end if + end if + delete(info) + delete(var) + delete(var_info) + end do + + ; Set default values for non-required diag_script_info attributes + + set_default_att(diag_script_info, "filename_add", "") + set_default_att(diag_script_info, "nbins", 20) + + if (diag_script_info@filename_add .ne. "") then + filename_add = "_" + diag_script_info@filename_add + else + filename_add = "" + end if + + nbins = toint(diag_script_info@nbins) + + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + +end + +begin + ; ############ + ; # get data # + ; ############ + + info_x = select_metadata_by_name(input_file_info, varlist(0)) + names_x = metadata_att_as_array(info_x, "dataset") + projects_x = metadata_att_as_array(info_x, "project") + info_y = select_metadata_by_name(input_file_info, varlist(1)) + names_y = metadata_att_as_array(info_y, "dataset") + projects_y = metadata_att_as_array(info_y, "project") + + refidx_x = ind(names_x .eq. refname(0)) + refidx_y = ind(names_y .eq. refname(1)) + + if (ismissing(refidx_x) .or. ismissing(refidx_y)) then + refidx_x = -1 + refidx_y = -1 + end if + + result_avg = new((/dim_MOD, nbins/), float) + result_std = new((/dim_MOD, nbins/), float) + bincenter = new((/nbins/), float) + + xmax = diag_script_info@xmax + xmin = diag_script_info@xmin + binsize = tofloat(xmax - xmin) / nbins + + do n = 0, nbins - 1 + x0 = n * binsize + x1 = x0 + binsize + bincenter(n) = xmin + 0.5 * (x0 + x1) + end do + + do ii = 0, dim_MOD - 1 + atts_x = True + atts_x@short_name = varlist(0) + + atts_y = True + atts_y@short_name = varlist(1) + + ; reference datasets may have different names + if (ii .eq. refidx_x) then + atts_y@dataset = refname(1) + atts_x@dataset = refname(0) + ; all other datasets: force same dataset name for var_x and var_y + else + atts_y@dataset = names_x(ii) + atts_x@dataset = names_x(ii) + end if + + ; read var_x + + info = select_metadata_by_atts(input_file_info, atts_x) + x = read_data(info[0]) + delete(info) + + ; read var_y + + info = select_metadata_by_atts(input_file_info, atts_y) + y = read_data(info[0]) + delete(info) + + if (calcicefrac) then + atts_y@short_name = varlist(2) + info = select_metadata_by_atts(input_file_info, atts_y) + z = read_data(info[0]) + delete(info) + min_mass = 1.0e-6 + ; filter valid values (needed for some models) + y = where(y .lt. 0.0, y@_FillValue, y) + y = where(isnan_ieee(y), y@_FillValue, y) + z = where(z .lt. 0.0, z@_FillValue, z) + z = where(isnan_ieee(z), z@_FillValue, z) + mass = y + z + delete(z) + mass = where(mass .lt. min_mass, mass@_FillValue, mass) + + ; ice fraction = ice / (ice + lwp) * 100% + y = 100.0 * y / mass + delete(mass) + + y@units = "%" + y@long_name = "cloud ice fraction" + y@var = "columnicefrac" + end if + + ; calculate total cloud water path as sum of liquid water path (lwp) + ; and ice water path (clivi); + ; we do not use the CMOR variable clwvi directly as this variable + ; erroneously contains only cloud liquid water for some models + + if (calctcwp) then + atts_y@short_name = varlist(2) + info = select_metadata_by_atts(input_file_info, atts_y) + z = read_data(info[0]) + delete(info) + y = y + z + delete(z) + y@long_name = "Condensed Water Path" + y@var = "totalcwp" + end if + + ; check dimensions + + dims_x = dimsizes(x) + dims_y = dimsizes(y) + + dimerror = False + + if (dimsizes(dims_x) .eq. dimsizes(dims_y)) then + if (any(dims_x - dims_y .ne. 0)) then + dimerror = True + end if + else + dimerror = True + end if + + if (dimerror) then + error_msg("f", DIAG_SCRIPT, "", "dimensions of datasets " \ + + atts_x@dataset + " (variable " + var_x + ") and " \ + + atts_y@dataset + " (variable " + var_y + ") do not match.") + end if + + ; check dimensions + + if ((dimsizes(dims_x) .ne. dimsizes(dims_y)) .or. \ + (dimsizes(dims_x) .lt. 3) .or. (dimsizes(dims_x) .gt. 4)) then + error_msg("f", DIAG_SCRIPT, "", "all variables need to have the " + \ + "same number of dimensions (time, [optional: level], " + \ + "latitude, longitude)") + end if + + do i = 0, nVAR - 1 + var = variables(idx(i)) + if (var .eq. varlist(0)) then + dims = getvardims(x) + else + dims = getvardims(y) + end if + testidx = ind(dims .eq. "lon") + if (ismissing(testidx)) then + error_msg("f", DIAG_SCRIPT, "", var + ": no lon dimension") + end if + testidx = ind(dims .eq. "lat") + if (ismissing(testidx)) then + error_msg("f", DIAG_SCRIPT, "", var + ": no lat dimension") + end if + testidx = ind(dims .eq. "time") + if (ismissing(testidx)) then + error_msg("f", DIAG_SCRIPT, "", var + ": no time dimension") + end if + delete(dims) + end do + + delete(dims_x) + delete(dims_y) + delete(testidx) + + ref_ind = refidx_x + if (ismissing(ref_ind)) then + ref_ind = -1 + end if + names = names_x + projects = projects_x + + if (refidx_x .ge. 0) then + ; if reference datasets for var_x and var_y are from different sources + if (refname(0) .ne. refname(1)) then + names(refidx_x) = refname(0) + "/" + refname(1) + end if + end if + + ; save attributes long_name and units + long_name = y@long_name + units = y@units + xunits = x@units + + x1d = ndtooned(x) + delete(x) + y1d = ndtooned(y) + delete(y) + + do n = 0, nbins - 1 + x0 = xmin + n * binsize + x1 = x0 + binsize + idx0 = ind((x1d .gt. x0) .and. (x1d .le. x1)) + if (.not.all(ismissing(idx0))) then + result_avg(ii, n) = avg(y1d(idx0)) + result_std(ii, n) = stddev(y1d(idx0)) + + else + result_avg(ii, n) = result_avg@_FillValue + result_std(ii, n) = result_std@_FillValue + end if + delete(idx0) + end do + + delete(x1d) + delete(y1d) + end do ; ii-loop (models) + + ; if multiple models are present, calculate standard deviation of all models + + ; find all indices of models w/o MultiModelMean/MultiModelMedian (if present) + + idxmod = get_mod(names, projects) + + if (idxmod(0) .eq. -1) then + flag_multimod = False + mm_ind = -1 + elseif (dimsizes(idxmod) .eq. 1) then + flag_multimod = False + mm_ind = -1 + else + flag_multimod = True + mmavg = new((/1, nbins/), float) + mmstd = new((/1, nbins/), float) + mmp10 = new((/1, nbins/), float) + mmp90 = new((/1, nbins/), float) + do n = 0, nbins - 1 + mmavg(0, n) = avg(result_avg(idxmod, n)) + mmstd(0, n) = stddev(result_avg(idxmod, n)) + selection = result_avg(idxmod, n) + itmp = ind(.not.ismissing(selection)) + if (.not. ismissing(itmp(0))) then + sorted = selection(itmp) + qsort(sorted) + i10 = toint(dimsizes(sorted) * 0.1 + 0.5) + i90 = toint(dimsizes(sorted) * 0.9 - 0.5) + mmp10(0, n) = sorted(i10) + mmp90(0, n) = sorted(i90) + delete(sorted) + else + mmp10(0, n) = mmp10@_FillValue + mmp90(0, n) = mmp90@_FillValue + end if + delete(selection) + delete(itmp) + end do + mm_ind = dim_MOD + dim_MOD = dim_MOD + 1 + result_avg := array_append_record(result_avg, mmavg, 0) + result_std := array_append_record(result_std, mmstd, 0) + result_std(dim_MOD - 1, :) = 0.0 + names := array_append_record(names, (/"Multi-model average"/), 0) + end if + + ; ########################################### + ; # netCDF output # + ; ########################################### + + nc_filename = work_dir + "clouds_scatter_" + var_x + "_" + var_y + \ + filename_add + ".nc" + + result_avg!0 = "model" + result_avg!1 = "bin" + + result_avg&model = str_sub_str(names, "/", "-") + result_avg&bin = bincenter + + result_avg@diag_script = (/DIAG_SCRIPT/) + result_avg@var = var_y + result_avg@var_long_name = long_name + result_avg@var_units = units + + nc_outfile = ncdf_write(result_avg, nc_filename) + + ; ########################################### + ; # create the plots # + ; ########################################### + + plots = new(dim_MOD, graphic) + stdbar = new((/nbins, dim_MOD/), graphic) + centers = new((/nbins, dim_MOD/), graphic) + centersout = new((/nbins, dim_MOD/), graphic) + stdbarR = new((/nbins, dim_MOD/), graphic) + centersR = new((/nbins, dim_MOD/), graphic) + centersRout = new((/nbins, dim_MOD/), graphic) + res = True + + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_scatter_" + \ + var_x + "_" + var_y + filename_add) + + data = new((/2, nbins/), float) + if (ref_ind .gt. 0) then + data(0, :) = result_avg(ref_ind, :) + else + data(0, :) = data@_FillValue + end if + + do ii = 0, dim_MOD - 1 + if (ii .eq. refidx_x) then + continue + end if + + res@gsnDraw = False ; do not draw yet + res@gsnFrame = False ; don't advance frame + res@xyMarkLineMode = "MarkLines" + res@xyDashPatterns = (/0., 0./) + res@tmLabelAutoStride = True + res@xyLineThicknesses = (/2.0, 2.0/) + res@xyLineColors = (/"black", "red"/) + res@tiMainFontHeightF = 0.025 + res@tiYAxisFontHeightF = 0.025 + res@tiXAxisFontHeightF = 0.025 + res@tiXAxisString = var_x + " (" + xunits + ")" + res@tiYAxisString = var_y + " (" + units + ")" + + if (ii .eq. mm_ind) then + if (isatt(diag_script_info, "ymin_mm")) then + res@trYMinF = diag_script_info@ymin_mm + end if + if (isatt(diag_script_info, "ymax_mm")) then + res@trYMaxF = diag_script_info@ymax_mm + end if + else + if (isatt(res, "trYMinF")) then + delete(res@trYMinF) + end if + if (isatt(res, "trYMaxF")) then + delete(res@trYMaxF) + end if + end if + + polyres = True + polyres@gsMarkerSizeF = 0.01 + polyres@gsLineColor = "red" + polyres@gsLineThicknessF = 1.0 + + polyresRef = True + polyresRef@gsMarkerSizeF = 0.01 + polyresRef@gsLineColor = "black" + polyresRef@gsLineThicknessF = 1.0 + + data(1, :) = result_avg(ii, :) + + res@tiMainString = names(ii) + + plots(ii) = gsn_csm_xy(wks, result_avg&bin, data, res) + + if (ii .eq. mm_ind) then + res_std = True + res_std@gsnDraw = False ; do not draw yet + res_std@gsnFrame = False ; don't advance frame + res_std@gsnXYFillColors = (/1.0, 0.9, 0.9/) ; "lightpink" + res_std@xyLineColor = -1 ; Make lines transparent + mmstddev = new((/2, nbins/), float) +; mmstddev(0, :) = mmavg(0, :) - mmstd(0, :) +; mmstddev(1, :) = mmavg(0, :) + mmstd(0, :) + mmstddev(0, :) = mmp10(0, :) + mmstddev(1, :) = mmp90(0, :) + plotstd = gsn_csm_xy(wks, result_avg&bin, mmstddev, res_std) + delete(mmstddev) + overlay(plots(ii), plotstd) + end if + + do i = 0, nbins - 1 + y0 = result_avg(ii, i) + if (.not.ismissing(y0)) then + x0 = result_avg&bin(i) + stdbar(i, ii) = gsn_add_polyline(wks, plots(ii), (/x0, x0/), \ + (/y0 + result_std(ii, i), y0 - \ + result_std(ii, i)/), polyres) + polyres@gsMarkerIndex = 16 + polyres@gsMarkerColor = "red" + centers(i, ii) = gsn_add_polymarker(wks, plots(ii), (/x0, x0/), \ + (/y0, y0/), polyres) + polyres@gsMarkerIndex = 4 + polyres@gsMarkerColor = "black" + centersout(i, ii) = gsn_add_polymarker(wks, plots(ii), (/x0, x0/), \ + (/y0, y0/), polyres) + end if + y0 = result_avg(ref_ind, i) + if (.not.ismissing(y0)) then + x0 = result_avg&bin(i) + stdbarR(i, ii) = gsn_add_polyline(wks, plots(ii), (/x0, x0/), \ + (/y0 + result_std(ref_ind, i), \ + y0 - result_std(ref_ind, i)/), \ + polyresRef) + polyresRef@gsMarkerIndex = 16 + polyresRef@gsMarkerColor = "white" + centersR(i, ii) = gsn_add_polymarker(wks, plots(ii), (/x0, x0/), \ + (/y0, y0/), polyresRef) + polyresRef@gsMarkerIndex = 4 + polyresRef@gsMarkerColor = "black" + centersRout(i, ii) = gsn_add_polymarker(wks, plots(ii), (/x0, x0/), \ + (/y0, y0/), polyresRef) + end if + end do + + draw(plots(ii)) + frame(wks) + + end do + + pres = True ; needed to override + ; panelling defaults + pres@gsnPanelCenter = False + + idx0 = ind(.not.ismissing(plots)) + n = dimsizes(idx0) + pres@gsnPanelFigureStrings = names(idx0) + pres@gsnPanelFigureStringsFontHeightF = min((/0.008, 0.008 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + pres@lbLabelFontHeightF = min((/0.01, 0.01 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + outfile = panelling(wks, plots(idx0), (n + 3) / 4, 4, pres) + delete(idx0) + + log_info("Wrote " + outfile) + + ; ========================================================================== + + ; ---------------------------------------------------------------------- + ; write provenance to netcdf output (and plot file) + ; ---------------------------------------------------------------------- + + statistics = (/"clim", "mean"/) + domain = "reg" + plottype = "scatter" + caption = "Scatterplot of " + var_x + " (x) vs. " + var_y + " (y)." + log_provenance(nc_outfile, outfile, caption, statistics, \ + domain, plottype, "", "", infiles) + + ; ---------------------------------------------------------------------- + ; write mmm and ref to additional netcdf + ; ---------------------------------------------------------------------- + + if ((mm_ind .ge. 0) .and. (ref_ind .ge. 0)) then + mmm = result_avg(mm_ind, :) + ref = result_avg(ref_ind, :) + + mmm@var = var_y + "_mmm" + ref@var = var_y + "_ref" + + ratio = mmm + ratio = ratio / ref + ratio@average = avg(ratio) + ratio@var = var_y + "_ratio" + + nc_filename2 = work_dir + "clouds_scatter_" + var_x + "_" + var_y + \ + filename_add + "_ref_mmm_ratio.nc" + + nc_outfile2 = ncdf_write(mmm, nc_filename2) + nc_filename2@existing = "append" + nc_outfile2 = ncdf_write(ref, nc_filename2) + nc_outfile2 = ncdf_write(ratio, nc_filename2) + end if + + ; ---------------------------------------------------------------------- + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/clouds/clouds_seasonal_cycle.ncl b/esmvaltool/diag_scripts/clouds/clouds_seasonal_cycle.ncl new file mode 100644 index 0000000000..1b6ae7fdad --- /dev/null +++ b/esmvaltool/diag_scripts/clouds/clouds_seasonal_cycle.ncl @@ -0,0 +1,500 @@ +; CLOUDS_SEASONAL_CYCLE +; ############################################################################ +; Author: Axel Lauer (DLR, Germany) +; ############################################################################ +; Description +; Calculates the amplitude and month of maximum values from the +; climatological mean seasonal cycle. +; +; Required diag_script_info attributes (diagnostic specific) +; none +; +; Optional diag_script_info attributes (diagnostic specific) +; colormap: e.g., WhiteBlueGreenYellowRed, rainbow +; epsilon: "epsilon" value to be replaced with missing values +; explicit_cn_levels: use these contour levels for plotting +; filename_add: optionally add this string to plot filesnames +; projection: map projection, e.g., Mollweide, Mercator +; var: short_name of variable to process (default = "" i.e. use +; first variable in variable list) +; +; Required variable_info attributes (variable specific) +; none +; +; Optional variable_info attributes (variable specific) +; long_name: description of variable +; reference_dataset: name of reference datatset +; +; Caveats +; none +; +; Modification history +; 20230117-lauer_axel: added support for ICON (code from Manuel) +; 20210415-lauer_axel: written. +; +; ############################################################################ + +; ##################################### +; # load external NCL code, if needed # +; ##################################### + +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/contour_maps.ncl" +load "$diag_scripts/shared/dataset_selection.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + set_default_att(diag_script_info, "colormap", "BkBlAqGrYeOrReViWh200") + set_default_att(diag_script_info, "epsilon", 1.0e-4) + set_default_att(diag_script_info, "filename_add", "") + set_default_att(diag_script_info, "projection", "CylindricalEquidistant") + set_default_att(diag_script_info, "var", "") + + if (diag_script_info@var .eq. "") then + var0 = variable_info[0]@short_name + else + var0 = diag_script_info@var + end if + + variables = metadata_att_as_array(variable_info, "short_name") + if (.not. any(variables .eq. var0)) then + errstr = "diagnostic " + diag + " requires the following variable: " + var0 + error_msg("f", DIAG_SCRIPT, "", errstr) + end if + + var0_info = select_metadata_by_name(variable_info, var0) + var0_info := var0_info[0] + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info0) + + if (isatt(var0_info, "reference_dataset")) then + refname = var0_info@reference_dataset + end if + + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + infiles = metadata_att_as_array(info0, "filename") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + + ref_ind = -1 ; set to invalid value + + ; if reference dataset has been defined, use it so plots can be sorted + + if (isvar("refname")) then + ref_ind = ind(names .eq. refname) + end if + + if (diag_script_info@filename_add .ne. "") then + filename_add = "_" + diag_script_info@filename_add + else + filename_add = "" + end if + + ; find indices of all OBS and obs4mips datasets (including "native6" ERA5) + + idxobs = get_obs(names, projects, "") + + if (idxobs(0) .eq. -1) then + flag_multiobs = False + else + flag_multiobs = True + end if + + ; find all indices of models w/o MultiModelMean/MultiModelMedian (if present) + + idxmod = get_mod(names, projects) + + if (idxmod(0) .eq. -1) then ; no model found + flag_multimod = False + elseif (dimsizes(idxmod) .eq. 1) then ; one model found + flag_multimod = False + else ; more than one model found + flag_multimod = True + end if + +end + +begin + ind_all_sorted = ispan(0, dim_MOD - 1, 1) ; create array + + if (ref_ind .ge. 0) then + ind_wo_ref = ind(names .ne. refname) + ind_all_sorted(0) = ref_ind + ind_all_sorted(1:dim_MOD - 1) = ind_wo_ref + end if + + maps = new(dim_MOD, graphic) ; collect individual maps in a graphic array + + ; ########################################### + ; # get data and average time # + ; ########################################### + + do ii = 0, dim_MOD - 1 + + imod = ind_all_sorted(ii) + + if (isvar("data1")) then + delete(data1) + end if + + log_info("processing " + names(imod)) + + if (isvar("A0")) then + delete(A0) + end if + + A0 = read_data(info0[imod]) + + ; check dimensions + + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + \ + " dimensions, need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + + ; calculate climatological seasonal cycle from time series + + if (isvar("timeseries")) then + delete(timeseries) + end if + + timeseries = time_operations(A0, -1, -1, "average", "monthlyclim", True) + + ; calculate amplitude at each grid cell + + monmean = dim_avg_n_Wrap(timeseries, 0) + monmean = where(abs(monmean) .le. diag_script_info@epsilon, \ + monmean@_FillValue, monmean) + + monmin = monmean + monmax = monmin + + do i = 0, 11 + monmin = where(timeseries(i, :, :) .lt. monmin, timeseries(i, :, :), \ + monmin) + monmax = where(timeseries(i, :, :) .gt. monmax, timeseries(i, :, :), \ + monmax) + end do + +; monind = dim_maxind(timeseries, 0) + +; data1 = 1.0 + monind + data1 = 100.0 * abs((monmax - monmin) / monmean) + copy_VarMeta(monmin, data1) + + ; create arrays for multi-obs and multi-model averages (if requested) + + if (ii .eq. 0) then + multidim = dimsizes(data1) + if (flag_multiobs) then + newdims = array_append_record(dimsizes(idxobs), multidim, 0) + multiobs_all = new(newdims, float) + end if + if (flag_multimod) then + newdims = array_append_record(dimsizes(idxmod), multidim, 0) + multimod_all = new(newdims, float) + end if + end if + + same_grid = False + + ; calculate multi-obs and multi-model averages (if requested) + + if (flag_multiobs) then + iidx = ind(idxobs .eq. imod) + if (.not.ismissing(iidx)) then + dims1 = dimsizes(data1) + dimerror = False + if (dimsizes(multidim) .eq. dimsizes(dims1)) then + if (any(multidim - dims1 .ne. 0)) then + dimerror = True + end if + else + dimerror = True + end if + if (dimerror) then + error_msg("f", DIAG_SCRIPT, "", "dimensions of datasets " \ + + "do not match. Use preprocessor to regrid data to " \ + + "common grid.") + end if + multiobs_all(iidx, :, :) = data1 + same_grid = True + end if + end if + + if (flag_multimod) then + iidx = ind(idxmod .eq. imod) + if (.not.ismissing(iidx)) then + dims1 = dimsizes(data1) + dimerror = False + if (dimsizes(multidim) .eq. dimsizes(dims1)) then + if (any(multidim - dims1 .ne. 0)) then + dimerror = True + end if + else + dimerror = True + end if + if (dimerror) then + error_msg("f", DIAG_SCRIPT, "", "dimensions of datasets " \ + + "do not match. Use preprocessor to regrid data to " \ + + "common grid.") + end if + multimod_all(iidx, :, :) = data1 + same_grid = True + end if + end if + + ; ########################################### + ; # Style dependent annotation # + ; ########################################### + ; retrieve unique strings describing the data + ; function in ./diag_scripts/lib/ncl/style.ncl + + annots = project_style(info0, diag_script_info, "annots") + + ; ########################################### + ; # plot ressources # + ; ########################################### + + res = True + + res@cnFillOn = True ; color plot desired + res@cnLineLabelsOn = False ; contour lines + res@cnLinesOn = False + + ; colors + ; http://www.ncl.ucar.edu/Document/Graphics/color_table_gallery.shtml + + if (isdefined("pal")) then + delete(pal) + end if + pal = read_colormap_file(diag_script_info@colormap) +; pal = read_colormap_file("Cat12") + ; annotation + + res@tiMainString = names(imod) + res@tiMainFontHeightF = 0.025 + res@gsnStringFontHeightF = 0.02 + res@cnLevelSelectionMode = "ExplicitLevels" + + if (diag_script_info@projection.eq."Robinson") then + res@mpPerimOn = False ; turn off perimeter around map + res@mpGridLineColor = -1 + res@mpGridAndLimbOn = True + end if + + res@mpOutlineOn = True + res@mpFillOn = False + + ; variable specific plotting settings + + res@cnLevels = ispan(10, 200, 10) +; res@cnLevels = ispan(2, 12, 1) +; res@lbLabelStrings = (/"J", "F", "M", "A", "M", "J", "J", "A", "S", \ +; "O", "N", "D"/) +; res@lbLabelAlignment = "BoxCenters" + + nboxes = dimsizes(res@cnLevels) + clen = dimsizes(pal) + stride = max((/1, ((clen(0) - 1) - 2) / nboxes /)) + fill_colors = ispan(1, clen(0) - 1, stride) + res@cnFillColors = pal(fill_colors, :) + + res@gsnRightString = "" + res@gsnLeftString = "" + res@gsnCenterString = "" + + gavg = area_operations(data1, -90., 90., 0., 360., "average", True) + + if (.not.ismissing(gavg)) then + res@gsnLeftString = "mean = " + sprintf("%6.3f", gavg) + else + res@gsnLeftString = "" + end if + + ; map attributes + + res@mpFillDrawOrder = "PostDraw" ; draw map last + res@cnMissingValFillColor = "Gray" +; res@cnFillMode = "RasterFill" ; Raster Mode + + ; no tickmarks and no labels + + res@tmYLLabelsOn = False + res@tmYLOn = False + res@tmYRLabelsOn = False + res@tmYROn = False + res@tmXBLabelsOn = False + res@tmXBOn = False + res@tmXTLabelsOn = False + res@tmXTOn = False + res@cnInfoLabelOn = False ; turn off cn info label + + res@mpProjection = diag_script_info@projection + + ; set explicit contour levels + + if (isatt(diag_script_info, "explicit_cn_levels")) then + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = diag_script_info@explicit_cn_levels + end if + + ; ########################################### + ; # other Metadata: diag_script, var # + ; ########################################### + ; add to data1, as attributes without prefix + + if (isatt(data1, "diag_script")) then ; add to existing entries + temp = data1@diag_script + delete(data1@diag_script) + data1@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; add as new attribute + data1@diag_script = (/DIAG_SCRIPT/) + end if + data1@var = var0 ; Overwrite existing entry + if (isatt(var0_info, "long_name")) then + data1@long_name = var0_info@long_name + else + data1@long_name = var0 + end if + data1@units = "%" +; data1@units = "month" + + ; copy attributes for netCDF output + + data1@long_name = "interannual variability " + data1@long_name + + ; ########################################### + ; # create the plot # + ; ########################################### + + res@lbTitleString = data1@units + res@lbTitlePosition = "Bottom" + res@lbTitleFontHeightF = 0.02 + res@lbLabelFontHeightF = 0.02 + + ; function in aux_plotting.ncl + + if (ii.eq.0) then + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_seasonal_cycle_" \ + + var0 + filename_add) + end if + + maps(ii) = gsn_csm_contour_map(wks, data1, res) + + ; ######################################### + ; # output all datasets to common netCDF # + ; ######################################### + + nc_filename = work_dir + "clouds_seasonal_cycle_" + var0 + ".nc" + nc_filename@existing = "append" + data1@var = var0 + "_var_" + annots(imod) + nc_outfile = ncdf_write(data1, nc_filename) + + end do ; ii-loop (datasets) + + ; create panel plot + + pres = True ; needed to override + ; panelling defaults + ; print dataset name on each panel + pres@gsnPanelFigureStrings = annots(ind_all_sorted) + pres@gsnPanelFigureStringsFontHeightF = 0.007 + pres@lbLabelFontHeightF = 0.01 + pres@lbAutoManage = False + pres@lbTopMarginF = 0.1 + pres@lbPerimOn = False ; draw line around label + ; bar area + pres@gsnPanelCenter = False + pres@pmLabelBarOrthogonalPosF = -0.01 ; shift label bar a bit to + ; the bottom + outfile = panelling(wks, maps, (dim_MOD + 3) / 4, 4, pres) + + ; plot multi-obs and multi-model average (if requested) + + if (flag_multiobs) then + multiobs = dim_avg_n(multiobs_all, 0) + delete(multiobs_all) + copy_VarMeta(data1, multiobs) + gavg = area_operations(multiobs, -90., 90., 0., 360., "average", True) + res@gsnLeftString = "mean = " + sprintf("%6.3f", gavg) + res@tiMainString = "Multi-obs average" + map_multiobs = gsn_csm_contour_map(wks, multiobs, res) + end if + + if (flag_multimod) then + multimod = dim_avg_n(multimod_all, 0) + delete(multimod_all) + copy_VarMeta(data1, multimod) + + mask1 = multiobs + mask2 = multimod + mask1 = where(.not.ismissing(mask1), 0., mask1@_FillValue) + mask2 = where(.not.ismissing(mask2), 0., mask2@_FillValue) + amask = mask1 + mask2 + delete(mask1) + delete(mask2) + refmasked = multiobs + refmasked = refmasked + amask + datmasked = multimod + datmasked = datmasked + amask + delete(amask) + corr = calculate_metric(refmasked, datmasked, "correlation") + gavg = area_operations(datmasked, -90., 90., 0., 360., "average", True) + rmsd = calculate_metric(refmasked, datmasked, "RMSD") + + delete(refmasked) + delete(datmasked) + +; gavg = area_operations(multimod, -90., 90., 0., 360., "average", True) + res@gsnLeftString = "mean = " + sprintf("%6.3f", gavg) + res@gsnCenterString = "corr = " + sprintf("%6.3f", corr) + res@gsnRightString = "rmsd = " + sprintf("%6.3f", rmsd) + res@tiMainString = "Multi-model average" + map_multimod = gsn_csm_contour_map(wks, multimod, res) + end if + + log_info(" Wrote " + outfile) + + ; ------------------------------------------------------------------------ + ; write provenance to common netcdf and plot file + ; ------------------------------------------------------------------------ + + statistics = (/"clim", "var"/) + domain = "global" + plottype = "geo" + caption = "Relative amplitude of the climatological mean seasonal cycle " \ + + "of variable " + var0 + "." + log_provenance(nc_outfile, outfile, caption, statistics, domain, \ + plottype, "", "", infiles) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/clouds/clouds_taylor.ncl b/esmvaltool/diag_scripts/clouds/clouds_taylor.ncl index 5ba9ce64e3..794c559b2f 100644 --- a/esmvaltool/diag_scripts/clouds/clouds_taylor.ncl +++ b/esmvaltool/diag_scripts/clouds/clouds_taylor.ncl @@ -11,7 +11,7 @@ ; 1) perfmetrics_main.ncl ; 2) perfmetrics_taylor.ncl ; 3) perfmetrics_taylor_collect.ncl -; originally written by Franziska Frank (DLR, Germany). +; originally written by Franziska Winterstein (DLR, Germany). ; Note: This code requires that all data are provided on the same grid. ; ; Required diag_script_info attributes (diagnostic specific) @@ -34,10 +34,20 @@ ; filename_add: legacy feature: arbitrary string to be added to all ; filenames of plots and netcdf output produced ; (default = "") +; legend_filter: do not show individual datasets in legend that are of +; project "legend_filter" (default = "") ; mask_ts_sea_ice: - True = mask T < 272 K as sea ice (only for ; variable "ts") ; - False = no additional grid cells masked for ; variable "ts" +; multiobs_exclude: list of *observational* datasets to be excluded when +; calculating uncertainty estimates from multiple +; observational datasets (see also multiobs_uncertainty) +; multiobs_uncertainty: calculate uncertainty estimates from multiple +; observational datasets (true, false); by default, +; all "obs", "obs6", "obs4mips" and "native6" datasets +; are used; any of such datasets can be explicitely +; excluded when also specifying "multiobs_exclude" ; styleset: "CMIP5", "DEFAULT" ; (if not set, CLOUDS_TAYLOR will create a color table and ; symbols for plotting) @@ -47,6 +57,8 @@ ; valid_fraction: used for creating sea ice mask (mask_ts_sea_ice = true): ; fraction of valid time steps required to mask grid cell ; as valid data +; var: short_name of variable to process (default = "" - use +; first variable in variable list) ; ; Required variable_info attributes (variable specific) ; reference_dataset: name of reference data set @@ -67,20 +79,28 @@ ; variable has been calculated from) the *second* (third, ...) ; ; Modification history -; 20190221-A_laue_ax: added provenance to output (v2.0) -; 20181120-A_laue_ax: adapted code to multi-variable capable framework -; 20180923-A_laue_ax: added writing of results to netcdf -; 20180611-A_laue_ax: code rewritten for ESMValTool v2.0 -; 20170620-A_laue_ax: added tags for reporting -; 20161104-A_laue_ax: changed calculation of correlation and standard -; deviation ratio from unweighted to grid area weighted -; 20160901-A_laue_ax: added regridding option 1 deg x 1 deg -; 20160818-A_laue_ax: added support for variable attribute "ref_model" -; 20160404-A_laue_ax: added optional drawing of observational -; uncertainties -; 20151027-A_laue_ax: moved call to 'write_references' to the beginning -; of the code -; 20150415-A-laue_ax: written. +; 20230117-lauer_axel: added support for ICON (code from Manuel) +; 20211028-lauer_axel: added option to calculate tcwp as lwp + iwp +; 20211018-lauer_axel: added option to remove individual models from legend +; 20211006-lauer_axel: removed write_plots +; 20210407-lauer_axel: added option to estimate observational uncertainty +; from multiple observational datasets +; 20210318-lauer_axel: added option to speficfy variable if more than one +; variable is present +; 20190221-lauer_axel: added provenance to output (v2.0) +; 20181120-lauer_axel: adapted code to multi-variable capable framework +; 20180923-lauer_axel: added writing of results to netcdf +; 20180611-lauer_axel: code rewritten for ESMValTool v2.0 +; 20170620-lauer_axel: added tags for reporting +; 20161104-lauer_axel: changed calculation of correlation and standard +; deviation ratio from unweighted to grid area weighted +; 20160901-lauer_axel: added regridding option 1 deg x 1 deg +; 20160818-lauer_axel: added support for variable attribute "ref_model" +; 20160404-lauer_axel: added optional drawing of observational +; uncertainties +; 20151027-lauer_axel: moved call to 'write_references' to the beginning +; of the code +; 20150415-lauer_axel: written. ; ; ########################################################################### @@ -90,7 +110,8 @@ load "$diag_scripts/../interface_scripts/interface.ncl" load "$diag_scripts/shared/plot/aux_plotting.ncl" load "$diag_scripts/shared/statistics.ncl" load "$diag_scripts/shared/plot/style.ncl" -load "$diag_scripts/shared/plot/taylor_diagram_less_hardcoded.ncl" +load "$diag_scripts/shared/plot/taylor_plot.ncl" +load "$diag_scripts/shared/dataset_selection.ncl" begin @@ -99,12 +120,17 @@ begin set_default_att(diag_script_info, "embracelegend", False) set_default_att(diag_script_info, "estimate_obs_uncertainty", False) set_default_att(diag_script_info, "mask_ts_sea_ice", False) + set_default_att(diag_script_info, "legend_filter", "") + set_default_att(diag_script_info, "multiobs_exclude", "") + set_default_att(diag_script_info, "multiobs_uncertainty", False) set_default_att(diag_script_info, "timemean", "annualclim") set_default_att(diag_script_info, "valid_fraction", 0.5) + set_default_att(diag_script_info, "var", "") estimate_obs_uncertainty = diag_script_info@estimate_obs_uncertainty - variables = metadata_att_as_array(variable_info, "short_name") + variables = get_unique_values(metadata_att_as_array(variable_info, \ + "short_name")) numvars = dimsizes(variables) ; find "main" variable and if present uncertainty estimates or auxiliary @@ -113,18 +139,12 @@ begin mainvarind = 0 ; default = main variable is first (and only) variable errvarind = -1 ; default = no error estimate available - if (numvars.gt.1) then - ; uncertainty estimates are identified by "err" in their short_name - do i = 0, numvars - 1 - if (isStrSubset(variables(i), "err")) then - errvarind = i - break - end if - end do - ; now find the main variable: + if (diag_script_info@var .eq. "") then ; it is assumed that the main variable is either the first non-error ; variable or the first "derived" variable - idx = ind(ispan(0, numvars - 1, 1) .ne. errvarind) + + ivar = ispan(0, numvars - 1, 1) + idx = ind(.not.(isStrSubset(variables(ivar), "err"))) mainvarind = idx(0) ; first non-error variable ; now check for possibly derived variables do ii = 1, dimsizes(idx) - 1 @@ -141,25 +161,62 @@ begin end if end do end do + var0 = variables(mainvarind) else + mainvarind = ind(variables .eq. diag_script_info@var) + var0 = diag_script_info@var + end if + + if (ismissing(mainvarind)) then + errstr = "diagnostic " + diag + " requires the following variable: " + var0 + error_msg("f", DIAG_SCRIPT, "", errstr) + end if + + var0_info = select_metadata_by_name(variable_info, var0) + var0_info := var0_info[0] + + flag_multiobs_unc = diag_script_info@multiobs_uncertainty + multiobs_exclude = diag_script_info@multiobs_exclude + if (estimate_obs_uncertainty .and. flag_multiobs_unc) then + log_info("estimate_obs_uncertainty is not supported when " + \ + "multiobs_uncertainty is enabled. Setting " + \ + "estimate_obs_uncertainty to False") + estimate_obs_uncertainty = False + end if + + if (.not. flag_multiobs_unc) then + ; check if corresponding uncertainty estimates are available; + ; uncertainty estimates are identified by "err" in their short_name + + if (numvars.gt.1) then + ; uncertainty estimates are identified by "err" in their short_name + do i = 0, numvars - 1 + if (isStrSubset(variables(i), var0) .and. \ + isStrSubset(variables(i), "err")) then + errvarind = i + break + end if + end do + end if end if - var0 = variable_info[mainvarind]@short_name info0 = select_metadata_by_name(input_file_info, var0) dim_MOD = ListCount(info0) - if (isatt(variable_info[mainvarind], "reference_dataset")) then - refname = variable_info[mainvarind]@reference_dataset + if (isatt(var0_info, "reference_dataset")) then + refname = var0_info@reference_dataset end if - if (isatt(variable_info[mainvarind], "alternative_dataset")) then - refname2 = variable_info[mainvarind]@alternative_dataset + if (isatt(var0_info, "alternative_dataset")) then + refname2 = var0_info@alternative_dataset end if names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") infiles = metadata_att_as_array(input_file_info, "filename") - ; check for reference model definition - - if (.not.isvar("refname")) then - error_msg("f", DIAG_SCRIPT, "", "no reference dataset defined in recipe") + if (.not. flag_multiobs_unc) then + ; check for reference model definition + if (.not.isvar("refname")) then + error_msg("f", DIAG_SCRIPT, "", "no reference dataset defined in recipe") + end if end if ; print info on variable(s) to log file @@ -213,13 +270,20 @@ begin delete(auxind) end if - ; time averaging: at the moment, only "annualclim" and "seasonalclim" - ; are supported - timemean = diag_script_info@timemean numseas = 1 ; default season = (/"annual"/) + ; time averaging: at the moment, only "annualclim" and "seasonalclim" + ; are supported + + if (flag_multiobs_unc .and. timemean .ne. "annualclim") then + log_info("multiobs_uncertainty = True is currently supported for annual" \ + + " means only (timemean = annualclim). Setting " \ + + " multiobs_uncertainty to False.") + flag_multiobs_unc = False + end if + if (timemean.eq."seasonalclim") then numseas = 4 delete(season) @@ -239,12 +303,6 @@ begin ; Create work dir system("mkdir -p " + work_dir) - if (config_user_info@write_plots.eq."True") then - write_plots = True - else - write_plots = False - end if - end begin @@ -254,10 +312,38 @@ begin ; get reference model - ref_ind = ind(names .eq. refname) - if (ismissing(ref_ind)) then - error_msg("f", DIAG_SCRIPT, "", "reference dataset (" \ - + refname + ") is missing") + ; find indices of all OBS and obs4mips datasets + ; (treat ERA5 as special case) + + if (flag_multiobs_unc) then + ; find indices of all OBS and obs4mips datasets (including "native6" ERA5) + + idxobs = get_obs(names, projects, multiobs_exclude) + + if (idxobs(0) .eq. -1) then + flag_multiobs_unc = False + log_info("No OBS or obs4mips datasets found. Setting " \ + + " multiobs_uncertainty to False.") + else + refname = "REF" + ref_ind = dimsizes(names) + names := array_append_record(names, (/refname/), 0) + dim_MOD = dim_MOD + 1 + end if + end if + + if (.not. flag_multiobs_unc) then + ; if attribute is present, use it so correlations can be calculated + if (isvar("refname")) then + ; set reference model + ref_ind = ind(names .eq. refname) + if (ismissing(ref_ind)) then + error_msg("f", DIAG_SCRIPT, "", "reference dataset (" + refname + \ + ") not found.") + end if + else + error_msg("f", DIAG_SCRIPT, "", "no reference dataset defined in recipe") + end if end if ; get multi-model mean index (if present) @@ -268,6 +354,12 @@ begin mm_ind = -1 end if + legend_filter = diag_script_info@legend_filter + + if ((legend_filter .ne. "") .and. mm_ind .gt. -1) then + projects(mm_ind) = "CMIP_MMM" + end if + mask_ts_sea_ice = diag_script_info@mask_ts_sea_ice if (isatt(diag_script_info, "filename_add")) then @@ -322,6 +414,38 @@ begin ; debugfile = addfile("debug.nc","c") ; debugfile->mask = global_mask + ; --------------------------------------------------------- + ; if requested, calculate multi-observational mean and standard deviation + + if (flag_multiobs_unc) then + nobs = dimsizes(idxobs) + + ; step 1: calculate multi-obs mean + + do i = 0, nobs - 1 + A0 = read_data(info0[idxobs(i)]) + + ; calculate time average + mean = time_operations(A0, -1, -1, "average", "annualclim", True) + delete(A0) + + if (i .eq. 0) then + dims = dimsizes(mean) + newdims = new(dimsizes(dims) + 1, integer) + newdims(0) = nobs + newdims(1:dimsizes(newdims) - 1) = dims + ref_tmp = new(newdims, float) + delete(dims) + end if + ref_tmp(i, :, :) = mean + end do + delete(mean) + ; note: we are using dim_avg_n_Warp so missing values are ignored + ; when averaging + ref_avg = dim_avg_n_Wrap(ref_tmp, 0) + delete(ref_tmp) + end if + ; read data and calculate time average ierridx = 0 @@ -349,26 +473,33 @@ begin info = select_metadata_by_name(input_file_info, variables(ivar)) do imod = modelf, modell - data_temp = read_data(info[imod]) - - ; The uncertainty estimates are assumed to be given as - ; 1-sigma of normally distributed error estimates - ; Note: uncertainty estimates must have "err" in their - ; variable name, otherwise variables are assumed - ; to be fields used for calculating derived variables - ; (in this case, fields are not squared before averaging) - ; ---> square sigma before averaging over time, then - ; calculate square-root to get average sigma - - if (ivar.eq.errvarind) then - data_temp = data_temp * data_temp - end if - data1 = time_operations(data_temp, -1, -1, "average", \ - timemean, True) + if (imod .ne. ref_ind .or. .not.flag_multiobs_unc) then + data_temp = read_data(info[imod]) + ; The uncertainty estimates are assumed to be given as + ; 1-sigma of normally distributed error estimates + ; Note: uncertainty estimates must have "err" in their + ; variable name, otherwise variables are assumed + ; to be fields used for calculating derived variables + ; (in this case, fields are not squared before averaging) + ; ---> square sigma before averaging over time, then + ; calculate square-root to get average sigma + + if (ivar.eq.errvarind) then + data_temp = data_temp * data_temp + end if + + data1 = time_operations(data_temp, -1, -1, "average", \ + timemean, True) + + delete(data_temp) - if (ivar.eq.errvarind) then - data1 = sqrt(data1) + if (ivar.eq.errvarind) then + data1 = sqrt(data1) + end if + else + data1 = ref_avg + delete(ref_avg) end if if (isdefined("global_mask")) then @@ -393,30 +524,29 @@ begin copy_VarCoords(data1, data(imod, :, :, :)) end if end if - dim_data = dimsizes(data) - rank = dimsizes(dim_data) - if (numseas.eq.1) then - data(imod, :, :) = data1 - else - data(imod, :, :, :) = data1 - end if - delete(data_temp) - delete(data1) + dim_data = dimsizes(data) + rank = dimsizes(dim_data) + if (numseas.eq.1) then + data(imod, :, :) = data1 else - if (.not.isdefined("err")) then - dim_data = array_append_record((/numvars - 1/), dimsizes(data1), 0) - err = new(dim_data, float) - err!0 = "var" - err&var = variables(ind(variables .ne. var0)) - end if - if (numseas.eq.1) then - err(ierridx, :, :) = data1 - else - err(ierridx, :, :, :) = data1 - end if + data(imod, :, :, :) = data1 end if + delete(data1) + else + if (.not.isdefined("err")) then + dim_data = array_append_record((/numvars - 1/), dimsizes(data1), 0) + err = new(dim_data, float) + err!0 = "var" + err&var = variables(ind(variables .ne. var0)) + end if + if (numseas.eq.1) then + err(ierridx, :, :) = data1 + else + err(ierridx, :, :, :) = data1 + end if + end if - ; debug output + ; debug output ; debugfile->$input_file_info@dataset(imod)$ = data1 end do ; imod loop @@ -762,7 +892,7 @@ begin end if ; override alternative obs (if "EMBRACE" legend) if (embracelegend) then - if ((isStrSubset(str_lower(input_file_info@project(i)), "obs"))) \ + if ((isStrSubset(str_lower(projects(i)), "obs"))) \ then if (mm_ind .ge. 0) then colors(i) = "(/1.00, 0.00, 0.00/)" ; red @@ -789,7 +919,7 @@ begin else if (embracelegend) then colors(i) = colortab(idx) - if ((isStrSubset(str_lower(input_file_info@project(i)), "obs"))) \ + if ((isStrSubset(str_lower(projects(i)), "obs"))) \ then if (mm_ind .ge. 0) then colors(i) = "(/1.00, 0.00, 0.00/)" ; red @@ -828,12 +958,15 @@ begin end if ropts = True - ropts@Colors = colors - ropts@Markers = markers ; marker styles ropts@gsMarkerSizeF = 0.0125 + ropts@Colors = colors + ropts@Markers = markers ; marker styles + ropts@caseLabels = val&models - modelnames = val&models - ropts@caseLabels = modelnames + if (legend_filter .ne. "") then + ropts@legend_filter = legend_filter + ropts@projects = projects + end if ropts@stnRad = (/ 0.5, 1.5, 2.0, 2.5 /) ropts@OneX = "1.00" @@ -868,23 +1001,19 @@ begin end if ; plot - if (write_plots) then - ropts@tiMainString = var0 + " (" + season(is) + ")" ; title - ; observational uncertainties as normalized RMS error - ropts@rmsobs = rmsobs(is) + ropts@tiMainString = var0 + " (" + season(is) + ")" ; title + ; observational uncertainties as normalized RMS error + ropts@rmsobs = rmsobs(is) - plot = taylor_diagram(wks, legendwks, ratio(:, :, is), cc(:, :, is), \ - ropts) + plot = taylor_diagram(wks, legendwks, ratio(:, :, is), cc(:, :, is), \ + ropts) - plotfile(is) = wks@fullname - log_info("Wrote " + plotfile) + plotfile(is) = wks@fullname + log_info("Wrote " + plotfile) - if (embracelegend.and.(is.eq.(numseas-1))) then - frame(legendwks) - end if - else - plotfile(is) = "" - end if ; if write_plots + if (embracelegend.and.(is.eq.(numseas-1))) then + frame(legendwks) + end if end do ; is-loop (seasons) ; write netCDF output @@ -900,9 +1029,9 @@ begin ; write provenance to netcdf output and plot file(s) ; ------------------------------------------------------------------------ - statistics = ("clim") - domain = ("glob") - plottype = ("taylor") + statistics = "clim" + domain = "global" + plottype = "taylor" do is = 0, numseas - 1 ; note: because function log_provenance does not yet support to attach diff --git a/esmvaltool/diag_scripts/clouds/clouds_taylor_double.ncl b/esmvaltool/diag_scripts/clouds/clouds_taylor_double.ncl new file mode 100644 index 0000000000..20b7027bb4 --- /dev/null +++ b/esmvaltool/diag_scripts/clouds/clouds_taylor_double.ncl @@ -0,0 +1,742 @@ +; ############################################################################ +; CLOUDS_TAYLOR_DOUBLE +; ############################################################################ +; Author: Axel Lauer (DLR, Germany) +; PROJECT-NAME EMBRACE +; ############################################################################ +; Description +; Calculates the performance of models in reproducing 2-d fields of annual +; mean or seasonal (DFJ, MAM, JJA, SON) mean cloud properties +; (Taylor diagramms). The code is based on +; 1) clouds_taylor.ncl +; 2) perfmetrics_main.ncl +; 3) perfmetrics_taylor.ncl +; 4) perfmetrics_taylor_collect.ncl +; Note: This code requires that all data are provided on the same grid. +; +; Required diag_script_info attributes (diagnostic specific) +; none +; +; Optional diag_script_info attributes (diagnostic specific) +; filename_add: legacy feature: arbitrary string to be added to all +; filenames of plots and netcdf output produced +; (default = "") +; multiobs_exclude: list of *observational* datasets to be excluded when +; calculating uncertainty estimates from multiple +; observational datasets (see also multiobs_uncertainty) +; multiobs_uncertainty: calculate uncertainty estimates from multiple +; observational datasets (true, false); by default, +; all "obs", "obs6", "obs4mips" and "native6" datasets +; are used; any of such datasets can be explicitely +; excluded when also specifying "multiobs_exclude" +; projectcolors: colors for each projectgroups +; (e.g. (/"(/0.0, 0.0, 1.0/)", "(/1.0, 0.0, 0.0/)"/) +; projectgroups: calculated mmm per "projectgroup" +; (e.g. (/"cmip5", "cmip6")/) +; styleset: "CMIP5", "DEFAULT" +; (if not set, CLOUDS_TAYLOR_DOUBLE will create a color +; table and symbols for plotting) +; timemean: time averaging +; - annualclim (default) = 1 plot annual mean +; - seasonalclim = 4 plots (DJF, MAM, JJA, SON) +; var: short_name of variable to process (default = "" - use +; first variable in variable list) +; +; Required variable_info attributes (variable specific) +; reference_dataset: name of reference data set +; +; Optional variable attributes (variable specific) +; none +; +; Caveats +; KNOWN LIMITATIONS +; 1) only 2-dim variables are currently supported +; +; Modification history +; 20211108-lauer_axel: written. +; +; ########################################################################### + + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/taylor_plot.ncl" +load "$diag_scripts/shared/dataset_selection.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + set_default_att(diag_script_info, "multiobs_exclude", "") + set_default_att(diag_script_info, "multiobs_uncertainty", False) + set_default_att(diag_script_info, "projectcolors", "(/0.0, 0.0, 1.0/)") + set_default_att(diag_script_info, "projectgroups", "") + set_default_att(diag_script_info, "timemean", "annualclim") + set_default_att(diag_script_info, "valid_fraction", 0.5) + set_default_att(diag_script_info, "var", "") + + projectgroups = diag_script_info@projectgroups + if (projectgroups(0) .ne. "") then + dim_GROUPS = dimsizes(projectgroups) + dim_COLORS = dimsizes(diag_script_info@projectcolors) + projectcolors_mmm = new(dim_COLORS, string) + projectcolors = new(dim_GROUPS, string) + newrgb = new(3, float) + do i = 0, dim_GROUPS - 1 + if (i .lt. dim_COLORS) then + projectcolors(i) = diag_script_info@projectcolors(i) + else + projectcolors(i) = "(/0.0, 0.0, 1.0/)" + end if + + do j = 0, 2 + rgb = str_get_field(projectcolors(i), j + 1, ",") + rgb = str_sub_str(rgb, "(", "") + rgb = str_sub_str(rgb, ")", "") + rgb = str_sub_str(rgb, "/", "") + newrgb(j) = 0.75 * tofloat(rgb) + end do + + projectcolors_mmm(i) = sprintf("(/%f, ", newrgb(0)) + \ + sprintf("%f, ", newrgb(1)) + sprintf("%f/)", newrgb(2)) + end do + groupnames = projectgroups + "_mmm" + + else + dim_GROUPS = 0 + groupnames = "" + end if + + variables = metadata_att_as_array(variable_info, "short_name") + numvars = dimsizes(variables) + + if (diag_script_info@var .eq. "") then + var0 = variable_info[0]@short_name + else + var0 = diag_script_info@var + end if + + varidx = ind(variables .eq. var0) + if (ismissing(varidx)) then + errstr = "diagnostic " + diag + " requires the following variable: var0" + error_msg("f", DIAG_SCRIPT, "", errstr) + end if + + flag_multiobs_unc = diag_script_info@multiobs_uncertainty + multiobs_exclude = diag_script_info@multiobs_exclude + + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info0) + if (isatt(variable_info[varidx], "reference_dataset")) then + refname = variable_info[varidx]@reference_dataset + end if + if (isatt(variable_info[varidx], "alternative_dataset")) then + refname2 = variable_info[varidx]@alternative_dataset + end if + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + infiles = metadata_att_as_array(input_file_info, "filename") + + if (.not. flag_multiobs_unc) then + ; check for reference model definition + if (.not.isvar("refname")) then + error_msg("f", DIAG_SCRIPT, "", "no reference dataset defined in recipe") + end if + end if + + ; print info on variable(s) to log file + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT) + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info("variable: " + variables(varidx)) + log_info("++++++++++++++++++++++++++++++++++++++++++") + + timemean = diag_script_info@timemean + numseas = 1 ; default + season = (/"annual"/) + + ; time averaging: at the moment, only "annualclim" and "seasonalclim" + ; are supported + + if (flag_multiobs_unc .and. timemean .ne. "annualclim") then + log_info("multiobs_uncertainty = True is currently supported for annual" \ + + " means only (timemean = annualclim). Setting " \ + + " multiobs_uncertainty to False.") + flag_multiobs_unc = False + end if + + if (timemean.eq."seasonalclim") then + numseas = 4 + delete(season) + season = (/"DJF", "MAM", "JJA", "SON"/) + end if + + ; create string for caption (netcdf provenance) + + allseas = season(0) + do is = 1, numseas - 1 + allseas = allseas + "/" + season(i) + end do + + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + +end + +begin + ; ======================================================================== + ; ========================== initialization ============================== + ; ======================================================================== + + ; get reference model + + ; find indices of all OBS and obs4mips datasets + ; (treat ERA5 as special case) + + if (flag_multiobs_unc) then + ; find indices of all OBS and obs4mips datasets (including "native6" ERA5) + + idxobs = get_obs(names, projects, multiobs_exclude) + + if (idxobs(0) .eq. -1) then + flag_multiobs_unc = False + log_info("No OBS or obs4mips datasets found. Setting " \ + + " multiobs_uncertainty to False.") + else + refname = "REF" + ref_ind = dimsizes(names) + names := array_append_record(names, (/refname/), 0) + dim_MOD = dim_MOD + 1 + end if + end if + + if (.not. flag_multiobs_unc) then + ; if attribute is present, use it so correlations can be calculated + if (isvar("refname")) then + ; set reference model + ref_ind = ind(names .eq. refname) + if (ismissing(ref_ind)) then + error_msg("f", DIAG_SCRIPT, "", "reference dataset (" + refname + \ + ") not found.") + end if + else + error_msg("f", DIAG_SCRIPT, "", "no reference dataset defined in recipe") + end if + end if + + ; get multi-model mean index (if present) + + mm_ind = ind(names .eq. "MultiModelMean") + + if (ismissing(mm_ind)) then + mm_ind = -1 + end if + + if (isatt(diag_script_info, "filename_add")) then + filename_add = "_" + diag_script_info@filename_add + else + filename_add = "" + end if + + if (dim_GROUPS .gt. 0) then + names := array_append_record(names, groupnames, 0) + projects := array_append_record(projects, "group_" + projectgroups, 0) + dim_MOD = dim_MOD + dim_GROUPS + end if + + ; ======================================================================== + ; ============================ statistics ================================ + ; ======================================================================== + + ; --------------------------------------------------------- + ; if requested, calculate multi-observational mean and standard deviation + + if (flag_multiobs_unc) then + nobs = dimsizes(idxobs) + + ; step 1: calculate multi-obs mean + + do i = 0, nobs - 1 + A0 = read_data(info0[idxobs(i)]) + + ; calculate time average + mean = time_operations(A0, -1, -1, "average", "annualclim", True) + delete(A0) + + if (i .eq. 0) then + dims = dimsizes(mean) + newdims = new(dimsizes(dims) + 1, integer) + newdims(0) = nobs + newdims(1:dimsizes(newdims) - 1) = dims + ref_tmp = new(newdims, float) + delete(dims) + end if + ref_tmp(i, :, :) = mean + end do + delete(mean) + ; note: we are using dim_avg_n_Warp so missing values are ignored + ; when averaging + ref_avg = dim_avg_n_Wrap(ref_tmp, 0) + delete(ref_tmp) + end if + + ; read data and calculate time average + + ivar = varidx + + info = select_metadata_by_name(input_file_info, variables(ivar)) + + ; loop over all datasets not including groups means (to be calculated + ; separately) + + do imod = 0, dim_MOD - 1 - dim_GROUPS + + if (imod .ne. ref_ind .or. .not.flag_multiobs_unc) then + data_temp = read_data(info[imod]) + data1 = time_operations(data_temp, -1, -1, "average", \ + timemean, True) + + delete(data_temp) + else + data1 = ref_avg + delete(ref_avg) + end if + + if (isdefined("global_mask")) then + if (numseas.eq.1) then + data1 = data1 + global_mask + else + do is = 0, numseas - 1 + data1(is, :, :) = data1(is, :, :) + global_mask + end do + end if + end if + + if (.not.isdefined("data")) then + dim_data = array_append_record((/dim_MOD/), dimsizes(data1), 0) + data = new(dim_data, float) + data!0 = "model" + data&model = names + if (numseas.eq.1) then + copy_VarCoords(data1, data(imod, :, :)) + else + copy_VarCoords(data1, data(imod, :, :, :)) + end if + end if + + dim_data = dimsizes(data) + rank = dimsizes(dim_data) + if (numseas.eq.1) then + data(imod, :, :) = data1 + else + data(imod, :, :, :) = data1 + end if + delete(data1) + + ; debug output + + ; debugfile->$input_file_info@dataset(imod)$ = data1 + end do ; imod loop + + ; ------------------------------------ + ; calculate group means (if requested) + ; ------------------------------------ + + j = 0 + do imod = dim_MOD - dim_GROUPS, dim_MOD - 1 + idxgroup = ind(projects .eq. projectgroups(j)) + if (.not.all(ismissing(idxgroup))) then + isize = dimsizes(idxgroup) + if (isize .gt. 1) then + if (numseas.eq.1) then + data(imod, :, :) = dim_avg_n(data(idxgroup, :, :), 0) + else + data(imod, :, :, :) = dim_avg_n(data(idxgroup, :, :, :), 0) + end if + else + if (numseas.eq.1) then + data(imod, :, :) = data(idxgroup, :, :) + else + data(imod, :, :, :) = data(idxgroup, :, :, :) + end if + end if + end if + delete(idxgroup) + j = j + 1 + end do + + ; ------------------------------------ + + ; number of used reference data sets + + dim_REF = 1 + if (isvar("refname2")) then + dim_REF = dim_REF + 1 + end if + + ; define result variable + + val = new((/dim_MOD - dim_REF, 2, numseas/), float) + val!0 = "models" + val!1 = "statistic" + val!2 = "time" + val&models = names(ind((names .ne. refname))) + val&statistic = (/"stddev_ratio", "correlation"/) + val&time = ispan(0, numseas - 1, 1) + + ; extract data + + do is = 0, numseas - 1 + + if (numseas.eq.1) then + ref = data(ref_ind, :, :) + else + ref = data(ref_ind, is, :, :) + end if + + ; loop over models (skip reference) + + mod_i = 0 + do imod = 0, dim_MOD - 1 + + ; skip reference + + if (imod.eq.ref_ind) then + log_info("Ref_dataset = " + data&model(imod)) + continue + end if + + if (numseas.eq.1) then + var = data(imod, :, :) + else + var = data(imod, is, :, :) + end if + + ; common mask + mask1 = where(.not.ismissing(var), 0., var@_FillValue) + mask2 = where(.not.ismissing(ref), 0., ref@_FillValue) + amask = mask1 + mask2 + delete(mask1) + delete(mask2) + refmasked = ref + refmasked = refmasked + amask + var = var + amask + delete(amask) + + ; calculate rate of standard deviations + + ; val(mod_i, 0, is) = calculate_metric(var, ref, "stddev_ratio_nowgt") + val(mod_i, 0, is) = calculate_metric(var, refmasked, "stddev_ratio") + + ; calculate pattern correlation + + ; val(mod_i, 1, is) = calculate_metric(var, ref, "correlation_nowgt") + val(mod_i, 1, is) = calculate_metric(var, refmasked, "correlation") + + delete(var) + delete(refmasked) + mod_i = mod_i + 1 + + end do ; loop over datasets + end do ; loop over seasons + + ; attach attributes to the results + + val@title = "taylor metrics" + val@long_name = "Taylor diagram" + val@diag_script = (/DIAG_SCRIPT/) + val@var = "var" + + ; ------------------- calculate/estimate RMS of observations --------------- + + rmsobs = new((/numseas/), float) + + do is = 0, numseas - 1 + if (numseas.eq.1) then + ref = data(ref_ind, :, :) + else + ref = data(ref_ind, is, :, :) + end if + + ; This code is equivalent to the function "calculate_metric" + ; (statistics.ncl) + weights = map_area(ref&lat, ref&lon) + ; optional: no weights --> reset weights + ; wgt1d = 1.0 + + ; convert to 1-D arrays + ref1d = ndtooned(ref) + wgt1d = ndtooned(weights) + wgt1d = wgt1d / dim_avg_n_Wrap(wgt1d, 0) + + rmsobs(is) = 0.0 + + ; normalize RMS by dividing by standard deviation of obs (ref) +; avg_ref = dim_avg_wgt_Wrap(ref1d, wgt1d, 1) +; rmsobs(is) = rmsobs(is) / sqrt(dim_avg_wgt_Wrap((ref1d - avg_ref) ^ 2, \ +; wgt1d, 1)) + + end do ; loop over seasons + + if (isvar("var")) then + delete(var) + end if + if (isvar("ref")) then + delete(ref) + end if + if (isvar("weights")) then + delete(weights) + end if + if (isvar("var1d")) then + delete(var1d) + end if + if (isvar("ref1d")) then + delete(ref1d) + end if + if (isvar("wgt1d")) then + delete(wgt1d) + end if + + ; ======================================================================== + ; ============================= plotting ================================= + ; ======================================================================== + + nummods = dim_MOD - dim_REF + + colors = new(nummods, string) + markers = new(nummods, integer) + + ratio = new((/nummods, 1, numseas/), float) ; standard deviation ratios + ratio = val(:, 0, :) + cc = new((/nummods, 1, numseas/), float) ; correlations + cc = val(:, 1, :) + + if (isatt(diag_script_info, "styleset")) then + colortab = project_style(info0, diag_script_info, "colors") + markertab = project_style(info0, diag_script_info, "markers") + else + colortab = (/"(/0.00, 0.00, 0.59/)", "(/0.00, 0.39, 1.00/)", \ + "(/0.20, 1.00, 1.00/)", "(/0.20, 0.88, 0.00/)", \ + "(/1.00, 0.88, 0.00/)", "(/1.00, 0.59, 0.00/)", \ + "(/1.00, 0.20, 0.00/)", "(/0.59, 0.00, 0.00/)", \ + "(/0.78, 0.00, 0.78/)", "(/0.59, 0.00, 0.59/)", \ + "(/0.90, 0.90, 0.90/)", "(/0.70, 0.70, 0.70/)", \ + "(/0.50, 0.50, 0.50/)", "(/0.30, 0.30, 0.30/)"/) + markertab = (/16, 4, 5, 0/) + end if + + plotfile = new((/numseas/), string) + + do is = 0, numseas - 1 + if (isvar("wks")) then + delete(wks) + end if + + if (isvar("legendwks")) then + delete(legendwks) + end if + + if (isvar("plot")) then + delete(plot) + end if + + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_taylor_" + var0 \ + + "_" + season(is) + filename_add) + + ; create new marker: filled star + + mstring = "z" + fontnum = 35 + size = 1.75 ; 1.5 + angle = 0.0 + + new_index = NhlNewMarker(wks, mstring, fontnum, 0.0, 0.0, \ + 1.0, size, angle) + + ; create new marker: filled dot + + mstring = "m" + fontnum = 37 + size = 1.0 + angle = 0.0 + + new_index_dot = NhlNewMarker(wks, mstring, fontnum, 0.0, 0.0, \ + 1.0, size, angle) + + legendwks = wks + + i = 0 + idx = 0 + + if (isatt(diag_script_info, "styleset")) then + do ii = 0, dim_MOD - 1 + + if (ii.eq.ref_ind) then + continue + end if + + ; define group means (if present) + testidx = ind(groupnames .eq. names(ii)) + if (.not.ismissing(testidx)) then + colors(i) = projectcolors_mmm(testidx) + markers(i) = new_index + i = i + 1 + continue + end if + + colors(i) = colortab(i) + markers(i) = markertab(i) + + ; override multi-model mean (if present) + if (ii .eq. mm_ind) then + colors(i) = "(/0.00, 0.00, 0.00/)" ; black + markers(i) = new_index + end if + + ; override colors and markers of all group members + ; (if groups are defined) + testidx = ind(projectgroups .eq. projects(ii)) + if (.not.ismissing(testidx)) then + colors(i) = projectcolors(testidx) + markers(i) = new_index_dot ; 16 + end if + + i = i + 1 + end do + else + do ii = 0, dim_MOD - 1 + + if (ii.eq.ref_ind) then + continue + end if + + ; define group means (if present) + testidx = ind(groupnames .eq. names(ii)) + if (.not.ismissing(testidx)) then + colors(i) = projectcolors_mmm(testidx) + markers(i) = new_index + i = i + 1 + continue + end if + + if (ii .eq. mm_ind) then + colors(i) = "(/0.00, 0.00, 0.00/)" ; black + markers(i) = new_index + i = i + 1 + else + do n = 0, dim_MOD / dimsizes(colortab) + colors(i) = colortab(idx) + markers(i) = markertab(n) + i = i + 1 + if (i.ge.dim_MOD) then + break + end if + end do + idx = idx + 1 + if (idx.ge.dimsizes(colortab)) then + idx = dimsizes(colortab) - 1 + end if + end if + + ; override colors and markers of all group members + ; (if groups are defined) + testidx = ind(projectgroups .eq. projects(ii)) + if (.not.ismissing(testidx)) then + colors(i) = projectcolors(testidx) + markers(i) = new_index_dot ; 16 + end if + + end do + end if + + ropts = True + ropts@gsMarkerSizeF = 0.0125 + ropts@Colors = colors + ropts@Markers = markers ; marker styles + ropts@caseLabels = val&models + + if (dim_GROUPS .gt. 0) then + ropts@legend_filter = projectgroups + ropts@projects = projects + end if + + ropts@stnRad = (/ 0.5, 1.5, 2.0, 2.5 /) + ropts@OneX = "1.00" + ropts@ccRays = (/ 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, \ + 0.9, 0.95, 0.99 /) + ropts@ccRays_color = "Black" + ropts@centerDiffRMS = True + ropts@centerDiffRMS_color = "PaleGreen1" ; "LightGray" + + ropts@printDataLabels = False ; print numeric label for each data pt. + ropts@reverseOrder = True ; reverse order of items in legend + ropts@legendBox = True ; draw box around legend + + ; legend parameters + + ropts@legendXpos = 0.6 + ropts@legendYpos = -0.5 + ropts@legendWidth = 0.2 + + ropts@caseLabelsFontHeightF = 0.1 + ropts@plotSize = 0.6 + + ropts@legendExtraFile = False + if (dim_MOD.ge.20) then + ropts@caseLabelsFontHeightF = 0.06 + ropts@plotSize = 0.45 + end if + + ; plot + ropts@tiMainString = var0 + " (" + season(is) + ")" ; title + ropts@tiMainOffsetYF = 0.025 + ; observational uncertainties as normalized RMS error + ropts@rmsobs = rmsobs(is) + + plot = taylor_diagram(wks, legendwks, ratio(:, :, is), cc(:, :, is), \ + ropts) + + plotfile(is) = wks@fullname + log_info("Wrote " + plotfile) + end do ; is-loop (seasons) + + ; write netCDF output + + if (any(rmsobs .gt. 0.0)) then + val@RMSE_observations = rmsobs + end if + + nc_filename = work_dir + "clouds_taylor_" + var0 + filename_add + ".nc" + nc_outfile = ncdf_write(val, nc_filename) + + ; ------------------------------------------------------------------------ + ; write provenance to netcdf output and plot file(s) + ; ------------------------------------------------------------------------ + + statistics = "clim" + domain = "global" + plottype = "taylor" + + do is = 0, numseas - 1 + ; note: because function log_provenance does not yet support to attach + ; different captions to netcdf (contains all seasons) and plots + ; (contain one season each), the caption cannot specifiy the + ; season plotted; using "annual" or "DJF/MAM/JJA/SON" instead. + + caption = "Taylor diagram for variable " + var0 + " (" + allseas \ + + "), reference = " + refname + "." + + log_provenance(nc_outfile, plotfile, caption, statistics, domain, \ + plottype, "", "", infiles) + end do + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/clouds/clouds_zonal.ncl b/esmvaltool/diag_scripts/clouds/clouds_zonal.ncl new file mode 100644 index 0000000000..a9ab9848c4 --- /dev/null +++ b/esmvaltool/diag_scripts/clouds/clouds_zonal.ncl @@ -0,0 +1,1408 @@ +; CLOUDS_ZONAL +; ############################################################################ +; Author: Axel Lauer (DLR, Germany) +; ############################################################################ +; Description +; Calculates annual/seasonal means of zonally averaged 3-d (cloud) +; parameters for comparison with a reference data set. Optionally, +; differences to the reference data set are also plotted. +; +; Required diag_script_info attributes (diagnostic specific) +; none +; +; Optional diag_script_info attributes (diagnostic specific) +; embracesetup: True = 2 plots per line, False = 4 plots per line +; (default) +; explicit_cn_levels: explicit contour levels for mean values (array) +; explicit_cn_dlevels: explicit contour levels for differences (array) +; extralegend: plot legend(s) to extra file(s) +; filename_add: optionally add this string to plot filesnames +; panel_labels: label individual panels (true, false) +; PanelTop: manual override for "@gnsPanelTop" used by panel +; plot(s) +; showdiff: calculate and plot differences (default = False) +; showyears: add start and end years to the plot titles +; (default = False) +; rel_diff: if showdiff = True, then plot relative differences (%) +; (default = False) +; rel_diff_min: lower cutoff value in case of calculating relative +; differences +; (in units of input variable) +; t_test: perform t-test when calculating differences +; (default = False) +; timemean: time averaging - "seasonal" = DJF, MAM, JJA, SON), +; "annual" = annual mean +; units_to: target units (automatic conversion) +; +; Required variable attributes (variable specific) +; none +; +; Optional variable_info attributes (variable specific) +; long_name: variable description +; reference_dataset: reference dataset; REQUIRED when calculating +; differences (showdiff = True) +; units: variable units (for labeling plot only) +; +; Caveats +; none +; +; Modification history +; 20230117-lauer_axel: added support for ICON (code from Manuel) +; 20200211-lauer_axel: written. +; +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + var0 = variable_info[0]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info0) + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + end if + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; Set default values for non-required diag_script_info attributes + + set_default_att(diag_script_info, "embrace_setup", False) + set_default_att(diag_script_info, "extralegend", False) + set_default_att(diag_script_info, "filename_add", "") + set_default_att(diag_script_info, "panel_labels", True) + set_default_att(diag_script_info, "rel_diff", False) + set_default_att(diag_script_info, "rel_diff_min", -1.0e19) + set_default_att(diag_script_info, "showdiff", False) + set_default_att(diag_script_info, "showyears", False) + set_default_att(diag_script_info, "t_test", False) + set_default_att(diag_script_info, "timemean", "annualclim") + set_default_att(diag_script_info, "units_to", "") + + flag_diff = diag_script_info@showdiff + flag_rel_diff = diag_script_info@rel_diff + rel_diff_min = diag_script_info@rel_diff_min + t_test = diag_script_info@t_test + + if (.not.flag_diff) then + if (flag_rel_diff) then + log_info("rel_diff = True has no effect until showdiff is also " \ + + "set to True") + end if + if (t_test) then + log_info("t_test = True has no effect until showdiff is also " \ + + "set to True") + t_test = False + end if + end if + + if (diag_script_info@filename_add .ne. "") then + filename_add = "_" + diag_script_info@filename_add + else + filename_add = "" + end if + + embracesetup = diag_script_info@embrace_setup + + ; time averaging: at the moment, only "annualclim" and "seasonalclim" + ; are supported + + timemean = diag_script_info@timemean + numseas = 1 ; default + season = (/"annual"/) + + if (timemean.eq."seasonalclim") then + numseas = 4 + delete(season) + season = (/"DJF", "MAM", "JJA", "SON"/) + end if + + units_to = diag_script_info@units_to + + ; create string for caption (netcdf provenance) + + allseas = season(0) + do is = 1, numseas - 1 + allseas = allseas + "/" + season(i) + end do + + panel_labels = diag_script_info@panel_labels + + extralegend = diag_script_info@extralegend + + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + + ref_ind = -1 ; set to invalid value + + ; if attribute is present, use it so correlations can be calculated + if (isvar("refname")) then + ; set reference model + ref_ind = ind(names .eq. refname) + if (ismissing(ref_ind)) then + log_info("warning: reference dataset (" + refname + ") not found.") + ref_ind = -1 + end if + end if + + climofiles = metadata_att_as_array(info0, "filename") + + outfile = new(numseas, string) + outfile(:) = "" + + if (flag_diff) then + outfile_d = new(numseas, string) + outfile_d(:) = "" + + ; check for reference model definition + if (.not.isvar("refname")) then + error_msg("f", DIAG_SCRIPT, "", \ + "no reference dataset defined in recipe") + end if + + ; set reference model + + ref_ind = ind(names .eq. refname) + if (ismissing(ref_ind)) then + error_msg("f", DIAG_SCRIPT, "", "reference dataset (" \ + + refname + ") is missing") + end if + end if + +end + +begin + ; ########################################### + ; # get data and average time # + ; ########################################### + + maps = new((/dim_MOD, 4/), graphic) + maps_d = new((/dim_MOD, 4/), graphic) + + ind_all_sorted = ispan(0, dim_MOD - 1, 1) ; create array + + if (ref_ind .ge. 0) then + ind_wo_ref = ind(names .ne. refname) + ind_all_sorted(0) = ref_ind + ind_all_sorted(1:dim_MOD - 1) = ind_wo_ref + end if + + corr = new((/numseas/), float) + gavg = new((/numseas/), float) + rmsd = new((/numseas/), float) + bias = new((/numseas/), float) + + ; filenames for netcdf output + + nc_filename_bias = work_dir + "clouds_" + var0 + "_bias.nc" + nc_filename_bias@existing = "append" + nc_filename_mean = work_dir + "clouds_" + var0 + "_mean.nc" + nc_filename_mean@existing = "append" + + do ii = 0, dim_MOD - 1 + + imod = ind_all_sorted(ii) + log_info("processing " + names(imod)) + + if (isvar("data1")) then + delete(data1) + end if + + if (isvar("A0")) then + delete(A0) + end if + + A0 = read_data(info0[imod]) + + ; check dimensions + + dims = getvardims(A0) + + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + \ + " dimensions, need 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "time") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no time dimension") + end if + +; ; if coordinate variables do not have the attribute "long_name", +; ; try to use attribute "standard_name" as a substitute +; +; do n = 0, dimsizes(dims) - 1 +; if (.not.isatt(A0&$dims(n)$, "long_name")) then +; if (isatt(A0&$dims(n)$, "standard_name")) then +; A0&$dims(n)$@long_name = A0&$dims(n)$@standard_name +; end if +; end if +; end do + + vcoord = dims(1) ; save name of vertical coordinate variable + if (ii .eq. 0) then + vcoord0_var = A0&$vcoord$ + vcoord0 = vcoord + if (isatt(vcoord0_var, "units")) then + vcoord0_units = vcoord0_var@units + else + vcoord0_units = "" + end if + end if + + delete(dims) + + ; vertical coordinate is assumed to be the dimension not being + ; "time" and "lat" + + ; average over time + + data1 = time_operations(A0, -1, -1, "average", timemean, True) + + if (t_test) then + start_year = info0[imod]@start_year + end_year = info0[imod]@end_year + nyears = end_year - start_year + 1 + + if (nyears .lt. 3) then + log_info("warning: cannot calculate t-test for dataset " \ + + names(imod) + "; need at least 3 years, dataset " \ + + "length = " + tostring(nyears) + " years; disabling t-test") + t_test = False + else + if (isvar("data1_t_stddev")) then + delete(data1_t_stddev) + end if + if (isvar("data1_t_mean")) then + delete(data1_t_mean) + end if + data1_t_stddev = interannual_variability(A0, -1, -1, timemean, "None") + data1_t_mean = data1 + end if + end if + + delete(A0) + + if (units_to .ne. "") then + data0 = convert_units(data1, units_to) + delete(data1) + data1 = data0 + delete(data0) + ; if attribute is present, overwrite with user specified units + if (isatt(variable_info[0], "units")) then + variable_info[0]@units = units_to + end if + end if + + ; ########################################### + ; # Style dependent annotation # + ; ########################################### + ; retrieve unique strings describing the data + ; function in ./diag_scripts/shared/plot/style.ncl + + ; ########################################### + ; # plot ressources # + ; ########################################### + + res = True + + res@cnFillOn = True ; color plot desired + res@cnLineLabelsOn = False ; contour lines + + ; colors + ; http://www.ncl.ucar.edu/Document/Graphics/color_table_gallery.shtml + + ; annotation + + ; if desired, add years to plot title + years_str = "" + if (diag_script_info@showyears) then + years_str = " (" + variable_info[0]@start_year + if (variable_info[0]@start_year .ne. variable_info[0]@end_year) then + years_str = years_str + "-" + variable_info[0]@end_year + end if + years_str = years_str + ")" + end if + +; res@tiMainOn = False + res@tiMainString = names(imod) + years_str + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLinesOn = False + +; res@lbLabelBarOn = False + res@gsnRightString = "" + + res@cnMissingValFillColor = "Gray" + + res@cnInfoLabelOn = False ; turn off cn info label + + if (isatt(data1&$vcoord$, "standard_name")) then + res@tiYAxisString = data1&$vcoord$@standard_name + if (isatt(data1&$vcoord$, "units")) then + res@tiYAxisString = res@tiYAxisString + " (" \ + + data1&$vcoord$@units + ")" + end if + end if + + if (isatt(data1&lat, "standard_name")) then + res@tiXAxisString = data1&lat@standard_name + end if + + ; set explicit contour levels + + if (isatt(diag_script_info, "explicit_cn_levels")) then + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = diag_script_info@explicit_cn_levels + end if + + if (.not.isatt(res, "cnLevels")) then + if (var0.eq."clcalipso") then + res@cnLevels = fspan(5, 50, 10) + else + log_info(DIAG_SCRIPT + " (var: " + var0 + "):") + log_info("info: using default contour levels") + res@cnLevels = fspan(min(data1), max(data1), 20) + end if + end if + + ; ########################################### + ; # other Metadata: diag_script, var # + ; ########################################### + ; add to data1 as attributes without prefix + + if (isatt(data1, "diag_script")) then ; add to existing entries + temp = data1@diag_script + delete(data1@diag_script) + data1@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; add as new attribute + data1@diag_script = (/DIAG_SCRIPT/) + end if + + if (isatt(variable_info[0], "long_name")) then + data1@var_long_name = variable_info[0]@long_name + end if + + data1@var = var0 + + if (isatt(variable_info[0], "units")) then + data1@var_units = variable_info[0]@units + else + data1@var_units = "" + end if + + res@lbTitleOn = True + res@lbTitleString = data1@var_units + res@lbTitlePosition = "Bottom" + res@lbTitleFontHeightF = 0.015 + + if (.not. isvar("ref_data")) then + ref_data = data1 + vcoord_ref = vcoord + if (t_test) then ; save mean in same units as stddev + ref_data_t_stddev = data1_t_stddev + ref_data_t_mean = data1_t_mean + nyears_ref = nyears + end if + end if + + ; check if data are on same grid (for calculating difference, RMSD, + ; correlation) + + same_grid = False + + if (all(dimsizes(ref_data) .eq. dimsizes(data1))) then + if (max(abs(ref_data&lat - data1&lat)) .le. 1.0e-6) then + if (max(abs(ref_data&$vcoord_ref$ - data1&$vcoord$)) .le. 1.0e-6) then + same_grid = True + end if + end if + end if + + if (flag_diff .and. .not.same_grid) then + flag_diff = False + error_msg("f", DIAG_SCRIPT, "", \ + "Data are not on same grid (horizontal and / or vertical), " \ + + "cannot calculate differences. " \ + + "Set showdiff to False in recipe or regrid data to " \ + + "common grid (check/adjust " \ + + "preprocessor settings in recipe).") + end if + + corr = corr@_FillValue + gavg = gavg@_FillValue + +; if (.not.all(ismissing(data1))) then +; if (numseas.gt.1) then +; do is = 0, numseas - 1 +; if (same_grid .and. (ref_ind .ge. 0)) then +; corr(is) = calculate_metric(ref_data(is, :, :), data1(is, :, :), \ +; "correlation") +; end if +; gavg(is) = dim_avg_n_Wrap(data1(is, :, :), (/0, 1/)) +; end do +; else +; if (same_grid .and. (ref_ind .ge. 0)) then +; corr(0) = calculate_metric(ref_data, data1, "correlation") +; end if +; gavg(0) = dim_avg_n_Wrap(data1, (/0, 1/)) +; end if +; end if + + res@gsnLeftStringFontHeightF = min((/0.025, 0.015 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + res@gsnRightStringFontHeightF = min((/0.025, 0.015 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + + ; ########################################### + ; # create the plot # + ; ########################################### + + ; function in aux_plotting.ncl + + if (ii.eq.0) then + ; note: an array of workspaces (i.e. wks(numseas)) does not work as + ; attributes cannot be assigned to each array element + ; individually + wks0 = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_zonal_" + var0 + \ + "_" + season(0) + filename_add) + ; difference plots will be saved to a different file + if (flag_diff) then + wks0d = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_zonal_" + \ + var0 + "_bias_" + season(0) + filename_add) + ndframe = 0 + end if + if (numseas.gt.1) then + wks1 = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_zonal_" + var0 + \ + "_" + season(1) + filename_add) + wks2 = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_zonal_" + var0 + \ + "_" + season(2) + filename_add) + wks3 = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_zonal_" + var0 + \ + "_" + season(3) + filename_add) + ; difference plots will be saved to a different files + if (flag_diff) then + wks1d = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_zonal_" + \ + var0 + "_bias_" + season(1) + filename_add) + wks2d = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_zonal_" + \ + var0 + "_bias_" + season(2) + filename_add) + wks3d = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_zonal_" + \ + var0 + "_bias_" + season(3) + filename_add) + end if + end if + end if + + if (numseas.gt.1) then + do is = 0, numseas - 1 + if (.not.ismissing(corr(is))) then + res@gsnRightString = "corr = " + sprintf("%6.3f", corr(is)) + else + res@gsnRightString = "" + end if + if (.not.ismissing(gavg(is))) then + res@gsnLeftString = "mean = " + sprintf("%6.3f", gavg(is)) + else + res@gsnLeftString = "" + end if + + if (imod.eq.ref_ind) then ; remove corr. string for reference dataset + res@gsnRightString = "" + end if + + if (vcoord0_units .eq. "Pa") then + if (is.eq.0) then + maps(imod, is) = gsn_csm_pres_hgt(wks0, data1(is, :, :), res) + end if + if (is.eq.1) then + maps(imod, is) = gsn_csm_pres_hgt(wks1, data1(is, :, :), res) + end if + if (is.eq.2) then + maps(imod, is) = gsn_csm_pres_hgt(wks2, data1(is, :, :), res) + end if + if (is.eq.3) then + maps(imod, is) = gsn_csm_pres_hgt(wks3, data1(is, :, :), res) + end if + else + if (is.eq.0) then + maps(imod, is) = gsn_csm_contour(wks0, data1(is, :, :), res) + end if + if (is.eq.1) then + maps(imod, is) = gsn_csm_contour(wks1, data1(is, :, :), res) + end if + if (is.eq.2) then + maps(imod, is) = gsn_csm_contour(wks2, data1(is, :, :), res) + end if + if (is.eq.3) then + maps(imod, is) = gsn_csm_contour(wks3, data1(is, :, :), res) + end if + end if + end do + else + if (.not.ismissing(corr(0))) then + res@gsnRightString = "corr = " + sprintf("%6.3f", corr(0)) + else + res@gsnRightString = "" + end if + if (.not.ismissing(gavg(0))) then + res@gsnLeftString = "mean = " + sprintf("%6.3f", gavg(0)) + else + res@gsnLeftString = "" + end if + + if (imod.eq.ref_ind) then ; remove corr. string for reference dataset + res@gsnRightString = "" + end if + if (vcoord0_units .eq. "Pa") then + maps(imod, 0) = gsn_csm_pres_hgt(wks0, data1, res) + else + maps(imod, 0) = gsn_csm_contour(wks0, data1, res) + end if + end if + + ; mandatory netcdf output + + data1@var = var0 + "_mean_" + names(imod) + + ; vertical coordinate might have a different name, which prevents + ; all data to be written to the same netcdf file + ; --> rename vertical coordinate to match the one of the first dataset + ; written to the netcdf file + delete(data1&$vcoord$) + data1!0 = vcoord0 + data1&$vcoord0$ = vcoord0_var + + nc_outfile_mean = ncdf_write(data1, nc_filename_mean) + + ; ======================================================================= + ; Create difference plots (if requested) + ; ======================================================================= + + if (flag_diff .and. (imod .ne. ref_ind)) then + + diff = data1 + if (flag_rel_diff) then + ref_data = where(ref_data .le. 1.0e-19, ref_data@_FillValue, \ + ref_data) + diff = (diff - ref_data) / ref_data * 100.0 + diff = where(ref_data .le. rel_diff_min, diff@_FillValue, diff) + else + diff = diff - ref_data + end if + + dres = res + + dres@gsnLeftString = "" + dres@gsnRightString = "" + + rmsd = rmsd@_FillValue + bias = bias@_FillValue + +; if (numseas.gt.1) then +; do is = 0, numseas - 1 +; if (.not. flag_rel_diff) then +; if (same_grid) then +; rmsd(is) = calculate_metric(ref_data(is, :, :), \ +; data1(is, :, :), "RMSD") +; end if +; bias(is) = dim_avg_n_Wrap(diff(is, :, :), (/0, 1/)) +; end if +; end do +; else +; if (.not. flag_rel_diff) then +; if (same_grid) then +; rmsd(0) = calculate_metric(ref_data, data1, "RMSD") +; end if +; bias(0) = dim_avg_n_Wrap(diff, (/0, 1/)) +; end if +; end if + + ; ---------------------------------------------------------------------- + + ; ########################################### + ; # plot ressources # + ; ########################################### + + dres@gsnLeftStringFontHeightF = min((/0.025, 0.015 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + dres@gsnRightStringFontHeightF = min((/0.025, 0.015 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + +; dres@tiMainOn = False + dres@tiMainString = names(imod) + " - " + refname + years_str + + dres@cnFillOn = True ; color plot desired + dres@cnLineLabelsOn = False ; contour lines + dres@cnLinesOn = False + + ; colors + ; http://www.ncl.ucar.edu/Document/Graphics/color_table_gallery.shtml + + ; annotation + + dres@cnLevelSelectionMode = "ExplicitLevels" + + ; variable specific plotting settings + + ; set contour levels / colors + + if (.not.isvar("cnLevels")) then + + if (isatt(dres, "cnLevels")) then + delete(dres@cnLevels) + end if + if (isatt(dres, "cnFillColors")) then + delete(dres@cnFillColors) + end if + if (isvar("pal")) then + delete(pal) + end if + + if (var0.eq."clcalipso") then + dres@cnLevels = fspan(-25, 25, 11) + end if + + ; ****************************************************** + ; *** relative differences: use specific color table *** + ; ****************************************************** + + if (flag_rel_diff) then + if (isatt(dres, "cnLevels")) then + delete(dres@cnLevels) + end if + if (isatt(dres, "cnFillColors")) then + delete(dres@cnFillColors) + end if + dres@cnLevels = fspan(-100, 100, 21) + if (isvar("pal")) then + delete(pal) + end if + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "percent100.rgb") + dres@cnFillColors = pal + end if + + ; ****************************************************** + + if (.not. isatt(dres, "cnLevels")) then + log_info(DIAG_SCRIPT + " (var: " + var0 + "):") + log_info("info: using default contour levels") + dres@cnLevels = fspan(min(diff), max(diff), 20) + end if + + cnLevels = dres@cnLevels + if (isatt(dres, "cnFillColors")) then + cnFillColors = dres@cnFillColors + end if + + else ; use previously defined colors and contour intervals + + if (isatt(dres, "cnLevels")) then + delete(dres@cnLevels) + end if + if (isatt(dres, "cnFillColors")) then + delete(dres@cnFillColors) + end if + + dres@cnLevels = cnLevels + + if (isvar("cnFillColors")) then + dres@cnFillColors = cnFillColors + end if + + end if ; if .not.isvar("cnLevels") + +; if (imod.eq.ref_ind) then +; dres@lbLabelBarOn = True +; else +; dres@lbLabelBarOn = False +; end if + + ; map attributes + + dres@cnMissingValFillColor = "Gray" + + dres@cnInfoLabelOn = False ; turn off cn info label + + ; set explicit contour levels + + if (isatt(diag_script_info, "explicit_cn_dlevels")) then + dres@cnLevelSelectionMode = "ExplicitLevels" + if (isatt(dres, "cnLevels")) then + delete(dres@cnLevels) + end if + dres@cnLevels = diag_script_info@explicit_cn_dlevels + end if + + ; ########################################### + ; # other Metadata: diag_script, var # + ; ########################################### + ; add to diff as attributes without prefix + + if (isatt(variable_info[0], "long_name")) then + diff@var_long_name = variable_info[0]@long_name + end if + if (isatt(variable_info[0], "units")) then + diff@var_units = variable_info[0]@units + else + diff@var_units = "" + end if + + ; ########################################### + ; # create the plot # + ; ########################################### + + if (t_test) then + dres@gsnDraw = False ; do not draw yet + dres@gsnFrame = False ; don't advance frame + end if + + ; ---------------------------------------------------------------------- + + if (numseas.gt.1) then + do is = 0, numseas - 1 + if (.not.ismissing(rmsd(is))) then + dres@gsnRightString = "rmsd = " + sprintf("%6.3f", rmsd(is)) + else + dres@gsnRightString = "" + end if + if (.not.ismissing(bias(is))) then + dres@gsnLeftString = "bias = " + sprintf("%6.3f", bias(is)) + else + dres@gsnLeftString = "" + end if + + if (vcoord0_units .eq. "Pa") then + if (is.eq.0) then + maps_d(imod, is) = gsn_csm_pres_hgt(wks0d, diff(is, :, :), dres) + end if + if (is.eq.1) then + maps_d(imod, is) = gsn_csm_pres_hgt(wks1d, diff(is, :, :), dres) + end if + if (is.eq.2) then + maps_d(imod, is) = gsn_csm_pres_hgt(wks2d, diff(is, :, :), dres) + end if + if (is.eq.3) then + maps_d(imod, is) = gsn_csm_pres_hgt(wks3d, diff(is, :, :), dres) + end if + else + if (is.eq.0) then + maps_d(imod, is) = gsn_csm_contour(wks0d, diff(is, :, :), dres) + end if + if (is.eq.1) then + maps_d(imod, is) = gsn_csm_contour(wks1d, diff(is, :, :), dres) + end if + if (is.eq.2) then + maps_d(imod, is) = gsn_csm_contour(wks2d, diff(is, :, :), dres) + end if + if (is.eq.3) then + maps_d(imod, is) = gsn_csm_contour(wks3d, diff(is, :, :), dres) + end if + end if + end do + else + if (.not.ismissing(rmsd(0))) then + dres@gsnRightString = "rmsd = " + sprintf("%6.3f", rmsd(0)) + else + dres@gsnRightString = "" + end if + if (.not.ismissing(bias(0))) then + dres@gsnLeftString = "bias = " + sprintf("%6.3f", bias(0)) + else + dres@gsnLeftString = "" + end if + if (vcoord0_units .eq. "Pa") then + maps_d(imod, 0) = gsn_csm_pres_hgt(wks0d, diff, dres) + else + maps_d(imod, 0) = gsn_csm_contour(wks0d, diff, dres) + end if + end if + ndframe = ndframe + 1 + + ; mandatory netcdf output + + diff@var = var0 + "_bias_" + names(imod) + nc_outfile_bias = ncdf_write(diff, nc_filename_bias) + + ; --------------------------------------------------------------------- + ; optionally mask non-significant grid cells + ; --------------------------------------------------------------------- + + if (t_test) then + n1 = nyears + n2 = nyears_ref + + tres = True + + tres@gsnDraw = False ; do not draw yet + tres@gsnFrame = False ; don't advance frame + tres@cnMissingValFillColor = -1 + tres@cnLevelSelectionMode = "ExplicitLevels" + tres@cnLevels = 0.95 +; tres@cnFillColors = (/"gray70", "transparent"/) + tres@cnFillColors = (/"black", "transparent"/) + tres@cnFillPattern = 17 + tres@cnFillOn = True ; color plot desired + tres@cnInfoLabelOn = False + tres@cnLinesOn = False + tres@cnLineLabelsOn = False + tres@lbLabelBarOn = False + tres@gsnRightString = "" + tres@gsnLeftString = "" + tres@gsnCenterString = "" + tres@tiYAxisOn = False + tres@tmXBBorderOn = False + tres@tmXTBorderOn = False + tres@tmYLBorderOn = False + tres@tmYRBorderOn = False + tres@tmXBLabelsOn = False + tres@tmYLLabelsOn = False + tres@tmXBOn = False + tres@tmXTOn = False + tres@tmYLOn = False + tres@tmYROn = False + + if (numseas.gt.1) then + do is = 0, numseas - 1 + x1 = data1_t_mean(is, :, :) + x2 = ref_data_t_mean(is, :, :) + s1 = data1_t_stddev(is, :, :) ^ 2 + s2 = ref_data_t_stddev(is, :, :) ^ 2 + + prob = ttest(x1, s1, n1, x2, s2, n2, True, False) + plot_var = 1. - prob + copy_VarCoords(data1_t_mean, plot_var) + + if (vcoord0_units .eq. "Pa") then + if (is.eq.0) then + tmask = gsn_csm_pres_hgt(wks0d, plot_var, tres) + end if + if (is.eq.1) then + tmask = gsn_csm_pres_hgt(wks1d, plot_var, tres) + end if + if (is.eq.2) then + tmask = gsn_csm_pres_hgt(wks2d, plot_var, tres) + end if + if (is.eq.3) then + tmask = gsn_csm_pres_hgt(wks3d, plot_var, tres) + end if + else + if (is.eq.0) then + tmask = gsn_csm_contour(wks0d, plot_var, tres) + end if + if (is.eq.1) then + tmask = gsn_csm_contour(wks1d, plot_var, tres) + end if + if (is.eq.2) then + tmask = gsn_csm_contour(wks2d, plot_var, tres) + end if + if (is.eq.3) then + tmask = gsn_csm_contour(wks3d, plot_var, tres) + end if + end if + + overlay(maps_d(imod, is), tmask) + delete(tmask) + draw(maps_d(imod, is)) + + if (is.eq.0) then + frame(wks0d) + end if + if (is.eq.1) then + frame(wks1d) + end if + if (is.eq.2) then + frame(wks2d) + end if + if (is.eq.3) then + frame(wks3d) + end if + + end do + else + x1 = data1_t_mean + x2 = ref_data_t_mean + s1 = data1_t_stddev ^ 2 + s2 = ref_data_t_stddev ^ 2 + + prob = ttest(x1, s1, n1, x2, s2, n2, True, False) + plot_var = 1. - prob + copy_VarCoords(data1_t_mean, plot_var) + + if (vcoord0_units .eq. "Pa") then + tmask = gsn_csm_pres_hgt(wks0d, plot_var, tres) + else + tmask = gsn_csm_contour(wks0d, plot_var, tres) + end if + + overlay(maps_d(imod, 0), tmask) + delete(tmask) + draw(maps_d(imod, 0)) + frame(wks0d) + end if + + delete([/x1, x2, s1, s2, prob/]) + end if ; if t_test + + ; --------------------------------------------------------------------- + + end if ; if flag_diff + + ; ======================================================================= + + end do ; ii-loop (models) + + ; save default color map in case it is needed later for optionally + ; plotting color bar to a separate file + + tmp_colors = gsn_retrieve_colormap(wks0) + cdims = dimsizes(tmp_colors) + nboxes = dimsizes(res@cnLevels) + clen = cdims(0) + stride = max((/1, ((clen(0)-1) - 2) / nboxes /)) + fill_colors = ispan(2, clen(0) - 1, stride) + mean_colors = tmp_colors(fill_colors, :) + delete(tmp_colors) + delete(fill_colors) + delete(cdims) + + ; sort plots if needed (observations go first) + + plottmp = ispan(0, dim_MOD - 1, 1) + plotind = plottmp + + ; move plots of observational datasets (if present) into the first line(s) + ; of the panel plot + j = 0 + do i = 0, dimsizes(plottmp) - 1 + if (i.eq.ref_ind) then + plotind(j) = plottmp(i) + j = j + 1 + else if (plottmp(i) .lt. dimsizes(projects)) then + if (isStrSubset(str_lower(projects(plottmp(i))), \ + "obs")) then + plotind(j) = plottmp(i) + j = j + 1 + end if + end if + end if + end do + + do i = 0, dimsizes(plottmp) - 1 + if ((isStrSubset(str_lower(projects(plottmp(i))), \ + "obs")).or.(i.eq.ref_ind)) then + else + plotind(j) = plottmp(i) + j = j + 1 + end if + end do + + pres = True ; needed to override + ; panelling defaults +; pres@gsnPanelLabelBar = True ; add common colorbar + if (panel_labels) then + ; print dataset name on each panel + pres@gsnPanelFigureStrings = names(plotind) + end if + pres@gsnPanelFigureStringsJust = "TopRight" + pres@gsnPanelFigureStringsFontHeightF = min((/0.01, 0.01 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + pres@lbLabelFontHeightF = min((/0.015, 0.01 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + pres@lbAutoManage = False + pres@lbTopMarginF = 0.1 + pres@lbTitleOn = True + pres@lbTitleFontHeightF = min((/0.015, 0.01 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + pres@lbTitlePosition = "Bottom" + pres@lbTitleString = data1@long_name + " (" \ + + data1@units + ")" + pres@gsnPanelCenter = False + if (dim_MOD.le.8) then + pres@pmLabelBarOrthogonalPosF = -0.03 + else + pres@pmLabelBarOrthogonalPosF = -0.01 ; shift label bar a bit to + ; the bottom + end if + + if (embracesetup) then + if (numseas.gt.1) then + pres@txString = season(0) + outfile(0) = panelling(wks0, maps(plotind, 0), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(1) + outfile(1) = panelling(wks1, maps(plotind, 1), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(2) + outfile(2) = panelling(wks2, maps(plotind, 2), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(3) + outfile(3) = panelling(wks3, maps(plotind, 3), (dim_MOD + 3) / 4, \ + 4, pres) + log_info(" Wrote " + outfile) + else + pres@gsnPanelRowSpec = True ; tell panel what order to plt + pres@gsnPanelYWhiteSpacePercent = 5 + pres@gsnPanelXWhiteSpacePercent = 5 + if (isatt(diag_script_info, "PanelTop")) then + top = tofloat(diag_script_info@PanelTop) + else + top = 0.99 ; default + end if + pres@gsnPanelTop = top + + if (isvar("plotsperline")) then + delete(plotsperline) + end if + + plotsperline = new((dim_MOD + 1) / 2, integer) + plotsperline = 2 + + if ((isStrSubset(str_lower(projects(plotind(0))), \ + "obs")).and. \ + .not.(isStrSubset(str_lower(projects(plotind(1))), \ + "obs"))) then + plotsperline(0) = 1 + end if + + if (sum(plotsperline).gt.dimsizes(plotind)) then + plotsperline(dimsizes(plotsperline) - 1) = 1 + end if + + if (sum(plotsperline).lt.dimsizes(plotind)) then + xadd = 1 + xtmp = array_append_record(plotsperline, xadd, 0) + delete(plotsperline) + plotsperline = xtmp + delete(xtmp) + end if + gsn_panel(wks0, maps(plotind, 0), plotsperline, pres) + outfile(0) = wks0@fullname + end if + else ; if embracesetup + if (numseas.gt.1) then + pres@txString = season(0) + outfile(0) = panelling(wks0, maps(plotind, 0), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(1) + outfile(1) = panelling(wks1, maps(plotind, 1), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(2) + outfile(2) = panelling(wks2, maps(plotind, 2), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(3) + outfile(3) = panelling(wks3, maps(plotind, 3), (dim_MOD + 3) / 4, \ + 4, pres) + else + outfile(0) = panelling(wks0, maps(plotind, 0), (dim_MOD + 3) / 4, \ + 4, pres) + end if + end if ; if embracesetup + + do is = 0, numseas - 1 + log_info("Wrote " + outfile(is)) + end do + + ; ------------------------------------------------------------------------ + ; write provenance to netcdf output and plot file(s) (mean) + ; ------------------------------------------------------------------------ + + statistics = (/"clim", "mean"/) + domain = "global" + plottype = "zonal" + + do is = 0, numseas - 1 + caption = "Zonal mean values for variable " + var0 \ + + " (" + allseas + ")." + log_provenance(nc_outfile_mean, outfile(is), caption, statistics, \ + domain, plottype, "", "", climofiles) + end do + + ; ======================================================================== + + if (flag_diff) then + pres@lbTitleString = "~F33~D~F21~" + diff@long_name + " (" + \ + diff@units + ")" + + ; save default color map in case it is needed later for optionally + ; plotting color bar to a separate file + + if (isvar("nboxes")) then + delete(nboxes) + end if + + tmp_colors = gsn_retrieve_colormap(wks0d) + cdims = dimsizes(tmp_colors) + nboxes = dimsizes(dres@cnLevels) + clen = cdims(0) + stride = max((/1, ((clen(0)-1) - 2) / nboxes /)) + fill_colors = ispan(2, clen(0) - 1, stride) + diff_colors = tmp_colors(fill_colors, :) + delete(tmp_colors) + delete(fill_colors) + delete(cdims) + + if (isvar("plottmp")) then + delete(plottmp) + end if + + if (isvar("plotind")) then + delete(plotind) + end if + + plottmp = ind(ispan(0, dim_MOD - 1, 1).ne.ref_ind) + plotind = plottmp + + ; if there is a second observational dataset, move the corresponding + ; plot to the first line of the panel plot + + j = 0 + do i = 0, dimsizes(plottmp) - 1 + if (isStrSubset(str_lower(projects(plottmp(i))), "obs")) then + plotind(j) = plottmp(i) + j = j + 1 + end if + end do + do i = 0, dimsizes(plottmp) - 1 + if (isStrSubset(str_lower(projects(plottmp(i))), "obs")) then + else + plotind(j) = plottmp(i) + j = j + 1 + end if + end do + + if (isatt(pres, "gsnPanelFigureStrings")) then + delete(pres@gsnPanelFigureStrings) + end if + if (panel_labels) then + pres@gsnPanelFigureStrings = names(plotind) + end if + + if (dimsizes(plotind).eq.1) then + pres@gsnPanelRight = 0.5 + end if + + if (embracesetup) then + if (numseas.gt.1) then + pres@txString = season(0) + outfile_d(0) = panelling(wks0d, maps_d(plotind, 0), \ + (dim_MOD + 3) / 4, 4, pres) + + pres@txString = season(1) + outfile_d(1) = panelling(wks1d, maps_d(plotind, 1), \ + (dim_MOD + 3) / 4, 4, pres) + + pres@txString = season(2) + outfile_d(2) = panelling(wks2d, maps_d(plotind, 2), \ + (dim_MOD + 3) / 4, 4, pres) + + pres@txString = season(3) + outfile_d(3) = panelling(wks3d, maps_d(plotind, 3), \ + (dim_MOD + 3) / 4, 4, pres) + else + pres@gsnPanelRowSpec = True ; tell panel what order to plt + pres@gsnPanelYWhiteSpacePercent = 5 + pres@gsnPanelXWhiteSpacePercent = 5 + pres@gsnPanelTop = tofloat(diag_script_info@PanelTop) + + if (isvar("plotsperline")) then + delete(plotsperline) + end if + + plotsperline = new(max((/1, dim_MOD / 2/)), integer) + plotsperline = 2 + + if (dimsizes(plotind).gt.1) then + if ((isStrSubset(str_lower(projects(plotind(0))), "obs")).and. \ + .not. \ + (isStrSubset(str_lower(projects(plotind(1))), "obs"))) then + plotsperline(0) = 1 + end if + end if + + if (sum(plotsperline).gt.dimsizes(plotind)) then + plotsperline(dimsizes(plotsperline) - 1) = 1 + end if + + if (sum(plotsperline).lt.dimsizes(plotind)) then + xadd = 1 + xtmp = array_append_record(plotsperline, xadd, 0) + delete(plotsperline) + plotsperline = xtmp + delete(xtmp) + end if + + gsn_panel(wks0d, maps_d(plotind, 0), plotsperline, pres) + outfile_d(0) = wks0d@fullname + end if + else ; embracesetup = False + if (numseas.gt.1) then + pres@txString = season(0) + outfile_d(0) = panelling(wks0d, maps_d(plotind, 0), \ + (dim_MOD + 3) / 4, 4, pres) + + pres@txString = season(1) + outfile_d(1) = panelling(wks1d, maps_d(plotind, 1), \ + (dim_MOD + 3) / 4, 4, pres) + + pres@txString = season(2) + outfile_d(2) = panelling(wks2d, maps_d(plotind, 2), \ + (dim_MOD + 3) / 4, 4, pres) + + pres@txString = season(3) + outfile_d(3) = panelling(wks3d, maps_d(plotind, 3), \ + (dim_MOD + 3) / 4, 4, pres) + else + outfile_d(0) = panelling(wks0d, maps_d(plotind, 0), \ + (dim_MOD + 3) / 4, 4, pres) + end if + end if ; end if embracesetup + ndframe = ndframe + 1 + + do is = 0, numseas - 1 + log_info(" Wrote " + outfile(is)) + + ; -------------------------------------------------------------------- + ; write provenance to netcdf output and plot file(s) (bias) + ; -------------------------------------------------------------------- + + statistics = (/"clim", "diff"/) + domain = "global" + plottype = "zonal" + + ; note: because function log_provenance does not yet support to attach + ; different captions to netcdf (contains all seasons) and plots + ; (contain one season each), the caption cannot specifiy the + ; season plotted; using "annual" or "DJF/MAM/JJA/SON" instead. + + caption = "Differences for zonally averaged variable " + var0 \ + + " (" + allseas + "), reference = " + refname + "." + + if (t_test) then + caption = caption \ + + " Non-significant grid cells are hatched." + end if + + log_provenance(nc_outfile_bias, outfile_d(is), caption, statistics, \ + domain, plottype, "", "", climofiles) + end do + + end if ; if flag_diff + + ; optionally save legend(s) to extra file(s) + + if (extralegend) then + nboxes = dimsizes(res@cnLevels) + 1 + wksleg = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_" + var0 \ + + "_legend") + pres@lbMonoFillPattern = True + pres@lbOrientation = "Horizontal" + pres@vpWidthF = 0.7 + pres@vpHeightF = 0.1 + pres@lbLabelFontHeightF = 0.015 + pres@lbLabelAlignment = "InteriorEdges" + pres@lbTitleFontHeightF = 0.015 + pres@lbTitleString = data1@long_name + " (" + data1@units + ")" + + labels = tostring(res@cnLevels) + + ; remove trailing zeros from strings + + do i = 0, dimsizes(labels) - 1 + i1 = str_index_of_substr(labels(i), ".", -1) + if (.not.ismissing(i1)) then + tmp = stringtochar(labels(i)) + do j = dimsizes(tmp) - 2, i1, 1 + if ((tmp(j).ne.".").and.(tmp(j).ne."0")) then + break + end if + end do + labels(i) = chartostring(tmp(0:j)) + delete(tmp) + end if + end do + + if (isatt(data1, "res_cnFillColors")) then + pres@lbFillColors = res@cnFillColors + else if (isatt(data1, "res_cnFillPalette")) then + pres@lbFillColors = res@cnFillPalette + else + pres@lbFillColors = mean_colors ; default colors + end if + end if + + gsn_labelbar_ndc(wksleg, nboxes, labels, 0.1, 0.9, pres) + + delete(wksleg) + delete(labels) + delete(pres@lbFillColors) + + if (flag_diff) then + nboxes = dimsizes(dres@cnLevels) + 1 + wksleg = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_" + var0 \ + + "_diff_legend") + + labels = tostring(dres@cnLevels) + + ; remove trailing zeros from strings + + do i = 0, dimsizes(labels) - 1 + i1 = str_index_of_substr(labels(i), ".", -1) + if (.not.ismissing(i1)) then + tmp = stringtochar(labels(i)) + do j = dimsizes(tmp) - 2, i1, 1 + if ((tmp(j).ne.".").and.(tmp(j).ne."0")) then + break + end if + end do + labels(i) = chartostring(tmp(0:j)) + delete(tmp) + end if + end do + + if (flag_rel_diff) then + pres@lbTitleString = "~F33~D~F21~" + data1@long_name + " (%)" + else + pres@lbTitleString = "~F33~D~F21~" + data1@long_name + " (" + \ + data1@units + ")" + end if + + if (isatt(diff, "res_cnFillColors")) then + pres@lbFillColors = dres@cnFillColors + else if (isatt(diff, "res_cnFillPalette")) then + pres@lbFillColors = dres@cnFillPalette + else + pres@lbFillColors = diff_colors ; default colors + end if + end if + + gsn_labelbar_ndc(wksleg, nboxes, labels, 0.1, 0.9, pres) + end if ; if (flag_diff) + end if ; if (extralegend) + + ; ========================================================================== + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/cmorizers/era5.py b/esmvaltool/diag_scripts/cmorizers/era5.py new file mode 100644 index 0000000000..97f410d54c --- /dev/null +++ b/esmvaltool/diag_scripts/cmorizers/era5.py @@ -0,0 +1,60 @@ +"""Rename preprocessor output files so they are named according to OBS6.""" + +import logging +import shutil +from pathlib import Path + +import iris +from esmvalcore.cmor.table import CMOR_TABLES + +from esmvaltool.diag_scripts.shared import (get_diagnostic_filename, + run_diagnostic) + +logger = logging.getLogger(Path(__file__).name) + + +def main(cfg): + """Rename preprocessed native6 file.""" + fixed_files = cfg['input_data'] + + for file, info in fixed_files.items(): + stem = Path(file).stem + basename = stem.replace('native', 'OBS') + + if info['diagnostic'] == 'daily': + for mip in ['day', 'Eday', 'CFday']: + if CMOR_TABLES['CMIP6'].get_variable(mip, info['short_name']): + basename = basename.replace('E1hr', mip) + basename = basename.replace('E1hr', 'day') + elif info['diagnostic'] == '3hourly': + for mip in ['3hr', 'E3hr', 'CF3hr']: + if CMOR_TABLES['CMIP6'].get_variable(mip, info['short_name']): + basename = basename.replace('E1hr', mip) + basename = basename.replace('E1hr', '3hr') + + cube = iris.load_cube(file) + try: + time = cube.coord('time') + except iris.exceptions.CoordinateNotFoundError: + pass + else: + if info['diagnostic'] == "monthly": + start = time.cell(0).point.strftime("%Y%m") + end = time.cell(-1).point.strftime("%Y%m") + elif "hourly" in info['diagnostic']: + start = time.cell(0).point.strftime("%Y%m%d%H%M") + end = time.cell(-1).point.strftime("%Y%m%d%H%M") + else: + start = time.cell(0).point.strftime("%Y%m%d") + end = time.cell(-1).point.strftime("%Y%m%d") + basename = f"{basename.rstrip('0123456789-')}{start}-{end}" + + outfile = get_diagnostic_filename(basename, cfg) + logger.info('Moving %s to %s', file, outfile) + shutil.move(file, outfile) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/cmug_h2o/convert_h2o.py b/esmvaltool/diag_scripts/cmug_h2o/convert_h2o.py new file mode 100644 index 0000000000..070478a801 --- /dev/null +++ b/esmvaltool/diag_scripts/cmug_h2o/convert_h2o.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +"""Functions to convert water vapour variables. + +############################################################################### +convert_h2o.py +Author: Katja Weigel +EVal4CMIP + ESA-CMUG project +############################################################################### + +Description +----------- + Functions for: + Convertion between vmr (H2O) [ppmV] and specific humidity (hus) [g/kg]. + Based on IDL routines from Dominik Brunner. + +Configuration options +--------------------- + None + +############################################################################### + +""" + + +def h2o_to_hus(h2o): + """Calculate H2O in specific humidity instead of vmr.""" + mda = 28.966 # molecular mass of dry air + mwv = 18.016 # molecular mass of water vapour + helpval = h2o * (mwv / mda) + hus = helpval / (1.0 + helpval) + + return hus + + +def hus_to_h2o(hus): + """Calculate H2O in vmr instead of specific humidity.""" + mda = 28.966 # molecular mass of dry air + mwv = 18.016 # molecular mass of water vapour + h2o = mda / mwv * hus / (1.0 - hus) + + return h2o diff --git a/esmvaltool/diag_scripts/cmug_h2o/diag_tropopause.py b/esmvaltool/diag_scripts/cmug_h2o/diag_tropopause.py new file mode 100644 index 0000000000..fc42026f3e --- /dev/null +++ b/esmvaltool/diag_scripts/cmug_h2o/diag_tropopause.py @@ -0,0 +1,279 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +"""Interpolate given variable to tropopause height. + +############################################################################### +testkw/diag_tropopause.py +Author: Katja Weigel (IUP, Uni Bremen, Germany) +ESA-CMUG project +############################################################################### + +Description +----------- + Interpolate given variable to tropopause height. + Currently only cold point tropopause is used based on ta and plev. + +Configuration options +--------------------- + +############################################################################### + +""" + + +import logging +import os +from copy import deepcopy +from pprint import pformat + +import cartopy.crs as cart +import iris +import matplotlib.pyplot as plt +import numpy as np +from cartopy.util import add_cyclic_point +from iris.analysis import Aggregator + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + group_metadata, + run_diagnostic, + select_metadata, + sorted_metadata, +) +from esmvaltool.diag_scripts.shared._base import ( + get_diagnostic_filename, + get_plot_filename, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def _cube_to_save_ploted(var, lats, lons, names): + """Create cube to prepare plotted data for saving to netCDF.""" + new_cube = iris.cube.Cube(var, var_name=names['var_name'], + long_name=names['long_name'], + units=names['units']) + new_cube.add_dim_coord(iris.coords.DimCoord(lats, + var_name='lat', + long_name='latitude', + units='degrees_north'), 0) + new_cube.add_dim_coord(iris.coords.DimCoord(lons, + var_name='lon', + long_name='longitude', + units='degrees_east'), 1) + + return new_cube + + +def _get_provenance_record(ancestor_files, caption, statistics, + domains, plot_type='geo'): + """Get Provenance record.""" + record = { + 'caption': caption, + 'statistics': statistics, + 'domains': domains, + 'plot_type': plot_type, + 'projects': ['cmug'], + 'realms': ['atmos'], + 'themes': ['atmDyn'], + 'authors': [ + 'weigel_katja', + ], + 'references': [ + 'acknow_project', + ], + 'ancestors': ancestor_files, + } + return record + + +def _press_interp_cube(cube, pstart=60000, pend=2500, pgrid=221): + """Interpolate cube onto dense pressure grid.""" + # interpolate to dense grid, use equal dist points in log(p) + logpr = np.log(np.array([pstart, pend])) + sample_points = [('air_pressure', np.exp(np.linspace(logpr[0], + logpr[1], + pgrid)))] + new_cube = cube.interpolate(sample_points, iris.analysis.Linear()) + + return new_cube + + +def _set_new_dims(new_tacube, new_svarcube): + """Set dimensions to unroll.""" + dims_to_collapse = set() + dims_to_collapse.update(new_svarcube.coord_dims('air_pressure')) + untouched_dims = set(range(new_svarcube.ndim)) - set(dims_to_collapse) + dims = list(untouched_dims) + list(dims_to_collapse) + unrolled_data = np.moveaxis(new_tacube.data, dims, + range(new_svarcube.ndim)) + return unrolled_data + + +def cube_to_save_ploted_map(var, lats, lons, names): + """Create cube to prepare plotted data for saving to netCDF.""" + new_cube = iris.cube.Cube(var, var_name=names['var_name'], + long_name=names['long_name'], + units=names['units']) + new_cube.add_dim_coord(iris.coords.DimCoord(lats, + var_name='lat', + long_name='latitude', + units='degrees_north'), 0) + new_cube.add_dim_coord(iris.coords.DimCoord(lons, + var_name='lon', + long_name='longitude', + units='degrees_east'), 1) + + return new_cube + + +def find_min(data, data_min, axis): + """Functions for Iris Aggregator to find the min based on other cube.""" + if axis < 0: + # just cope with negative axis numbers + axis += data.ndim + + min_ind = np.expand_dims(np.argmin(data_min, axis=axis), axis=axis) + return_data = np.squeeze(np.take_along_axis(data, min_ind, axis=axis)) + + return return_data + + +def plot_tp_map(cfg, mean_cube, titlestr, variable, listdata): + """Plot contour map.""" + # create figure and axes instances + + subplot_kw = {'projection': cart.PlateCarree(central_longitude=0.0)} + fig, axx = plt.subplots(figsize=(7, 5), subplot_kw=subplot_kw) + axx.set_extent([-180, 180, -90, 90], cart.PlateCarree()) + + data_c, lon_c = add_cyclic_point(mean_cube.data, + coord=mean_cube.coord('longitude').points) + + if variable == "Air Temperature": + print_var = "Temperature [K]" + set_range = np.linspace(180, 230, 21) + elif variable == "Geopotential Height": + print_var = "Geopotential Height [m]" + set_range = np.linspace(10000, 20000, 21) + elif variable == "Relative Humidity": + print_var = "Relative Humidity [%]" + set_range = np.linspace(0, 100, 21) + elif variable == "Specific Humidity": + print_var = "Specific Humidity [kg/kg]" + set_range = np.linspace(0.1e-5, 2.5e-5, 25) + else: + print_var = mean_cube.long_name + set_range = np.linspace(np.nanmin(mean_cube.data), + np.nanmax(mean_cube.data), 21) + # draw filled contours + cnplot = plt.contourf( + lon_c, + mean_cube.coord('latitude').points, + data_c, + set_range, + transform=cart.PlateCarree(), + cmap='rainbow', + # cmap='RdBu_r', + extend='both') + + axx.coastlines() + + # add colorbar + cbar = fig.colorbar(cnplot, ax=axx, shrink=0.8) + # add colorbar title string + cbar.set_label(print_var) + + axx.set_xlabel('Longitude') + axx.set_ylabel('Latitude') + axx.set_title(titlestr + variable) + + fig.tight_layout() + figname = 'fig_' + titlestr.replace(" ", "_") +\ + variable.replace(" ", "_") + '_map' + fig.savefig(get_plot_filename(figname, cfg), dpi=300) + plt.close() + + logger.info("Saving analysis results to %s", + get_diagnostic_filename(figname, cfg)) + + iris.save(_cube_to_save_ploted(data_c, + mean_cube.coord('latitude').points, + lon_c, + {'var_name': mean_cube.var_name, + 'long_name': variable, + 'units': mean_cube.units}), + target=get_diagnostic_filename(figname, cfg)) + + logger.info("Recording provenance of %s:\n%s", + get_diagnostic_filename(figname, cfg), + pformat(_get_provenance_record(listdata, + titlestr + variable, + ['other'], ['global']))) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(get_diagnostic_filename(figname, cfg), + _get_provenance_record(listdata, + titlestr + variable, + ['other'], ['global'])) + provenance_logger.log(get_plot_filename(figname, cfg), + _get_provenance_record(listdata, + titlestr + variable, + ['other'], ['global'])) + + +def main(cfg): + """Read in data for tropopause calculation.""" + available_vars = list(group_metadata(cfg['input_data'].values(), + 'short_name')) + + logging.debug("Found variables in recipe:\n%s", available_vars) + available_vars_min_tas = deepcopy(available_vars) + available_vars_min_tas.remove('ta') + # Make an iris aggregator to find value based on minimum different cube. + min_pos = Aggregator('ag_find_min', find_min, + units_func=lambda units: 1) + # Get input data + data = {} + for varname in available_vars: + data[varname] = select_metadata(cfg['input_data'].values(), + short_name=varname) + data[varname] = sorted_metadata(data[varname], sort='dataset') + + for attributes in data['ta']: + logger.info("Processing dataset %s", attributes['dataset']) + dataset = attributes['dataset'] + logger.debug("Loading %s", attributes['filename']) + new_tacube = _press_interp_cube(iris.load_cube(attributes['filename'])) + for svar in available_vars_min_tas: + input_file_svar = attributes['filename'].replace('/ta/', + '/' + svar + '/') + input_file_svar = input_file_svar.replace('_ta_', '_' + svar + '_') + + # load, interpolate to dense grid, use equal dist points in log(p) + logger.debug("Loading %s", input_file_svar) + new_svarcube = _press_interp_cube(iris.load_cube(input_file_svar)) + + # set new dims + unrolled_data = _set_new_dims(new_tacube, new_svarcube) + + trop_svar = new_svarcube.collapsed('air_pressure', min_pos, + data_min=unrolled_data) + plot_tp_map(cfg, trop_svar.collapsed('time', iris.analysis.MEAN), + dataset + " Cold point tropopause ", + new_svarcube.long_name, + [data['ta'][0]['filename'], data[svar][0]['filename']]) + + trop_ta = new_tacube.collapsed('air_pressure', min_pos, + data_min=unrolled_data) + + plot_tp_map(cfg, trop_ta.collapsed('time', iris.analysis.MEAN), + dataset + " Cold point tropopause ", + "Air Temperature", [data['ta'][0]['filename']]) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/cmug_h2o/diag_tropopause_zonalmean.py b/esmvaltool/diag_scripts/cmug_h2o/diag_tropopause_zonalmean.py new file mode 100644 index 0000000000..d342a6224c --- /dev/null +++ b/esmvaltool/diag_scripts/cmug_h2o/diag_tropopause_zonalmean.py @@ -0,0 +1,436 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +"""Interpolate given variable to tropopause height. + +############################################################################### +testkw/diag_tropopause.py +Author: Katja Weigel (IUP, Uni Bremen, Germany) +ESA-CMUG project +############################################################################### + +Description +----------- + Interpolate given variable to tropopause height. + Currently only cold point tropopause is used based on ta and plev. + +Configuration options +--------------------- + +############################################################################### + +""" + + +import logging +import os +from pprint import pformat +from copy import deepcopy +import numpy as np +import matplotlib.pyplot as plt +import iris +from iris.analysis import Aggregator + +from esmvaltool.diag_scripts.shared import (group_metadata, run_diagnostic, + select_metadata, sorted_metadata) +from esmvaltool.diag_scripts.shared._base import ( + ProvenanceLogger, get_diagnostic_filename, get_plot_filename) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def _get_data_for_agg(new_svarcube, new_tacube): + """Reshape data for use in iris aggregator based on two cubes.""" + dims_to_collapse = set() + dims_to_collapse.update(new_svarcube.coord_dims('air_pressure')) + untouched_dims = set(range(new_svarcube.ndim)) -\ + set(dims_to_collapse) + dims = list(untouched_dims) + list(dims_to_collapse) + unrolled_data = np.moveaxis(new_tacube.data, dims, + range(new_svarcube.ndim)) + return unrolled_data + + +def _get_range_and_pstring(variable, mean_cube, tropopause=False): + """Get range for color bar and print string.""" + if variable == "Air Temperature": + print_var = "Temperature [K]" + set_range = np.linspace(180, 230, 21) + elif variable == "Geopotential Height": + print_var = "Geopotential Height [m]" + set_range = np.linspace(8000, 22000, 25) + elif variable == "Relative Humidity": + print_var = "Relative Humidity [%]" + set_range = np.linspace(0, 100, 21) + elif variable == "Specific Humidity": + print_var = "Specific Humidity [kg/kg]" + if tropopause: + set_range = np.linspace(0.1e-5, 2.5e-5, 25) + else: + logval = np.log(np.array([1e-6, 1e-5])) + set_range = np.exp(np.linspace(logval[0], logval[1], 41)) + else: + print_var = mean_cube.long_name + set_range = np.linspace(np.nanmin(mean_cube.data), + np.nanmax(mean_cube.data), 21) + + return {'print_var': print_var, 'set_range': set_range} + + +def _get_sel_files(cfg, dataname, dim=2): + """Get filenames from cfg for single models or multi-model mean.""" + selection = [] + if dim == 2: + for hlp in select_metadata(cfg['input_data'].values(), + dataset=dataname): + selection.append(hlp['filename']) + else: + for hlp in cfg['input_data'].values(): + selection.append(hlp['filename']) + + return selection + + +def _get_sel_files_var(cfg, varnames): + """Get filenames from cfg for all model mean and differen variables.""" + selection = [] + + for var in varnames: + for hlp in select_metadata(cfg['input_data'].values(), short_name=var): + selection.append(hlp['filename']) + + return selection + + +def _get_sel_lvardata(cfg, dataname, lvarnames): + """Get filenames from cfg for one model and differen variable(s).""" + selection = [] + + for lvar in lvarnames: + for hlp in select_metadata(cfg['input_data'].values(), long_name=lvar, + dataset=dataname): + selection.append(hlp['filename']) + + return selection + + +def _read_data(attributes, svar): + """Read data for ta and other variabe from files.""" + input_file_svar = attributes['filename'].replace('/ta/', '/' + svar + '/') + input_file_svar = input_file_svar.replace('_ta_', '_' + svar + '_') + + logger.debug("Loading %s", input_file_svar) + svarcube = iris.load_cube(input_file_svar) + svarcube = svarcube.collapsed('longitude', iris.analysis.MEAN) + + return svarcube + + +def cube_to_save_profile(var1, var2, names): + """Create cubes to prepare scatter plot data for saving to netCDF.""" + cubes = iris.cube.CubeList([iris.cube.Cube(var1, + var_name=names['var_name1'], + long_name=names['long_name1'], + units=names['units1'])]) + cubes.append(iris.cube.Cube(var2, var_name=names['var_name2'], + long_name=names['long_name2'], + units=names['units2'])) + + return cubes + + +def find_min(data, data_min, axis): + """Functions for Iris Aggregator to find the min based on other cube.""" + if axis < 0: + # just cope with negative axis numbers + axis += data.ndim + + min_ind = np.expand_dims(np.argmin(data_min, axis=axis), axis=axis) + return_data = np.squeeze(np.take_along_axis(data, min_ind, axis=axis)) + + return return_data + + +def get_provenance_record(ancestor_files, caption, statistics, + domains, plot_type='zonal'): + """Get Provenance record.""" + record = { + 'caption': caption, + 'statistics': statistics, + 'domains': domains, + 'plot_type': plot_type, + 'projects': ['cmug'], + 'realms': ['atmos'], + 'themes': ['atmDyn'], + 'authors': [ + 'weigel_katja', + ], + 'references': [ + 'acknow_project', + ], + 'ancestors': ancestor_files, + } + return record + + +def get_prof_and_plt_data(cfg, data, available_vars_min_tas): + """Plot data for singe data sets and get profile for each.""" + profiles = {} + for svar in available_vars_min_tas: + profiles[svar] = {} + + # Make an iris aggregator to find value based on minimum different cube. + min_pos = Aggregator('ag_find_min', find_min, + units_func=lambda units: 1) + + # interpolate to dense grid, use equal dist points in log(p) + logpr = np.log(np.array([25000, 2500])) + sample_points = [('air_pressure', np.exp(np.linspace(logpr[0], logpr[1], + 221)))] + + for attributes in data['ta']: + logger.info("Processing dataset %s", attributes['dataset']) + dataset = attributes['dataset'] + + tacube = _read_data(attributes, 'ta') + new_tacube = tacube.interpolate(sample_points, + iris.analysis.Linear()) + unrolled_data = _get_data_for_agg(new_tacube, new_tacube) + plot_zonal_timedev(cfg, new_tacube.collapsed('air_pressure', + min_pos, + data_min=unrolled_data), + dataset, "Cold point tropopause ", + "Air Temperature") + + for svar in available_vars_min_tas: + svarcube = _read_data(attributes, svar) + + profiles[svar][dataset] = svarcube.collapsed(['time', 'latitude'], + iris.analysis.MEAN) + + plot_zonal_mean(cfg, svarcube.collapsed(['time'], + iris.analysis.MEAN), + dataset, "Zonal mean ", + svarcube.long_name) + + new_svarcube = svarcube.interpolate(sample_points, + iris.analysis.Linear()) + + unrolled_data = _get_data_for_agg(new_svarcube, new_tacube) + + plot_zonal_timedev(cfg, + new_svarcube.collapsed('air_pressure', + min_pos, + data_min=unrolled_data), + dataset, "Cold point tropopause ", + new_svarcube.long_name) + + return profiles + + +def plot_zonal_mean(cfg, mean_cube, dataname, titlestr, variable): + """Plot zonal mean contour.""" + # Plot data + # create figure and axes instances + fig, axx = plt.subplots(figsize=(7, 5)) + + rps = _get_range_and_pstring(variable, mean_cube) + + # draw filled contours + cnplot = plt.contourf( + mean_cube.coord('latitude').points, + mean_cube.coord('air_pressure').points / 100.0, + mean_cube.data, + rps['set_range'], + cmap='jet', + extend='both') + + axx = plt.gca() + axx.invert_yaxis() + axx.set_ylim(250, 1) + + # add colorbar + cbar = fig.colorbar(cnplot, ax=axx, shrink=0.8) + # add colorbar title string + cbar.set_label(rps['print_var']) + + axx.set_ylabel('Pressure [hPa]') + axx.set_xlabel('Latitude') + axx.set_title(titlestr + variable) + + fig.tight_layout() + + caption = dataname + " " + titlestr + variable +\ + " between 250 and 1 hPa." +\ + " The diagnostic averages the complete time series." + figname = 'fig_' + dataname + "_" + titlestr.replace(" ", "_") +\ + variable.replace(" ", "_") + fig.savefig(get_plot_filename(figname, cfg), dpi=300) + plt.close() + + provenance_record = get_provenance_record(_get_sel_lvardata(cfg, + dataname, + [variable]), + caption, ['mean'], ['global']) +# + diagnostic_file = get_diagnostic_filename(figname, cfg) +# + logger.info("Saving analysis results to %s", diagnostic_file) +# + iris.save(mean_cube, target=diagnostic_file) +# + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(diagnostic_file, provenance_record) + provenance_logger.log(get_plot_filename(figname, cfg), + provenance_record) + + +def plot_zonal_timedev(cfg, mean_cube, dataname, titlestr, variable): + """Plot zonal mean temporal developement at tropopause.""" + # Plot data + # create figure and axes instances + fig, axx = plt.subplots(figsize=(7, 12)) + + rps = _get_range_and_pstring(variable, mean_cube, tropopause=True) + + iris.coord_categorisation.add_year(mean_cube, 'time', name='year') + iris.coord_categorisation.add_month_number(mean_cube, 'time', + name='month_number') + + # Adjust (ncdf) time to the format matplotlib expects + cnplot = plt.contourf( + mean_cube.coord('latitude').points, + mean_cube.coord('year').points + + (mean_cube.coord('month_number').points - 1.0) / 12.0, + mean_cube.data, + rps['set_range'], + cmap='rainbow', + # cmap='RdBu_r', + extend='both') + + # add colorbar + cbar = fig.colorbar(cnplot, ax=axx, shrink=0.8) + # add colorbar title string + cbar.set_label(rps['print_var']) + + axx.set_ylabel('Time') + axx.set_xlabel('Latitude') + axx.set_title(titlestr + variable) + + fig.tight_layout() + figname = 'fig_' + dataname + "_" + titlestr.replace(" ", "_") + \ + variable.replace(" ", "_") + caption = dataname + " " + titlestr + variable + fig.savefig(get_plot_filename(figname, cfg), dpi=300) + plt.close() + + provenance_record = get_provenance_record(_get_sel_lvardata(cfg, + dataname, + [variable]), + caption, ['mean'], ['global']) +# + diagnostic_file = get_diagnostic_filename(figname, cfg) +# + logger.info("Saving analysis results to %s", diagnostic_file) +# + iris.save(mean_cube, target=diagnostic_file) +# + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(diagnostic_file, provenance_record) + provenance_logger.log(get_plot_filename(figname, cfg), + provenance_record) + + +def plot_profiles(cfg, profiles, available_vars_min_tas, available_datasets): + """Plot zonal mean contour.""" + # Plot data + # create figure and axes instances + for svar in available_vars_min_tas: + fig, axx = plt.subplots(figsize=(7, 5)) + + for iii, dataset in enumerate(available_datasets): + plt.plot((profiles[svar][dataset]).data, + (profiles[svar][dataset]).coord('air_pressure').points / + 100.0, label=dataset) + if iii == 0: + profiles_save = iris.cube.CubeList([profiles[svar][dataset]]) + else: + profiles_save.append(profiles[svar][dataset]) + + axx = plt.gca() + axx.invert_yaxis() + + plt.legend(loc='upper right') + + axx.set_ylabel('Pressure [hPa]') + axx.set_ylim(250, 1) + onedat = profiles[svar][available_datasets[0]] + if onedat.long_name == "Specific Humidity": + unitstr = "kg/kg" + axx.set_xlim(0, 1e-4) + else: + unitstr = str(onedat.units) + + axx.set_xlabel(onedat.long_name + + ' [' + unitstr + ']') + axx.set_title('Average ' + + onedat.long_name + + ' profile') + fig.tight_layout() + figname = 'fig_profile_' +\ + onedat.long_name.replace(" ", "_") + fig.savefig(get_plot_filename(figname, cfg), dpi=300) + plt.close() + + provenance_record = get_provenance_record(_get_sel_files_var(cfg, + svar), + 'Average ' + + onedat.long_name + + ' profile', ['mean'], + ['global']) + + diagnostic_file = get_diagnostic_filename(figname, cfg) +# + logger.info("Saving analysis results to %s", diagnostic_file) +# + iris.save(profiles_save, target=diagnostic_file) +# + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(diagnostic_file, provenance_record) + provenance_logger.log(get_plot_filename(figname, cfg), + provenance_record) + + +def main(cfg): + """Read in data for tropopause calculation.""" + available_vars = list(group_metadata(cfg['input_data'].values(), + 'short_name')) + available_datasets = list(group_metadata(cfg['input_data'].values(), + 'dataset')) + + logging.debug("Found variables in recipe:\n%s", available_vars) + available_vars_min_tas = deepcopy(available_vars) + available_vars_min_tas.remove('ta') + # Get input data + data = {} + for varname in available_vars: + data[varname] = select_metadata(cfg['input_data'].values(), + short_name=varname) + data[varname] = sorted_metadata(data[varname], sort='dataset') + + profiles = get_prof_and_plt_data(cfg, data, available_vars_min_tas) + + plot_profiles(cfg, profiles, available_vars_min_tas, available_datasets) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/cos22esd/climate_change_hotspot.py b/esmvaltool/diag_scripts/cos22esd/climate_change_hotspot.py new file mode 100644 index 0000000000..8126894964 --- /dev/null +++ b/esmvaltool/diag_scripts/cos22esd/climate_change_hotspot.py @@ -0,0 +1,230 @@ +"""Main diagnostic script for the computation of climate change hotspots. + +A comparison between the global and hotspot region tas and pr can be done +Output: + - scatter plots relating large vs regional-scale changes. + - fields of the hotspot for DJF and JJA in CMIP5 and CMIP6. +""" +import iris +from esmvalcore.preprocessor import ( + annual_statistics, + anomalies, + area_statistics, + climate_statistics, + extract_region, + extract_season, + extract_time, + mask_landsea, +) + +from esmvaltool.diag_scripts.shared import run_diagnostic, save_data + + +class HotspotDiag: + """Hotspot diagnostics' class. + + Class that reads, post-processes and calls the plotting functions + necessary to obtain the hotspot figures. + """ + + def __init__(self, config): + """Variables definition. + + config is a dictionary containing metadata regarding input files + and overall, as the name suggests, configuration options. + """ + self.cfg = config + self.anomaly_reference = { + "start_year": int(self.cfg["baseline_period"][0]), + "start_month": 1, + "start_day": 1, + "end_year": int(self.cfg["baseline_period"][1]), + "end_month": 12, + "end_day": 31, + } + self.regions = { + "tas": { + "large_scale": "global", + "regional": self.cfg["region_name"] + }, + "pr": { + "large_scale": "latitudinal belt", + "regional": self.cfg["region_name"] + }, + } + + def compute(self): + """Compute Hotspot diagnostics.""" + input_data_dict = list(self.cfg["input_data"].values())[0] + filename = input_data_dict['filename'] + large_scale_cube = iris.load_cube(filename) + regional_cube = self.extract_regional(large_scale_cube) + if large_scale_cube.var_name == "pr": + large_scale_cube = extract_region(large_scale_cube, -180, 180, + self.cfg["region"][2], + self.cfg["region"][3]) + + for season in ["jja", "djf", "annual"]: + time_extr_large_scale_cube = self.extract_annual_or_season( + large_scale_cube, season) + time_extr_regional_cube = self.extract_annual_or_season( + regional_cube, season) + time_subset_cubes = [ + time_extr_large_scale_cube, time_extr_regional_cube + ] + self.compute_hotspot_fields(time_subset_cubes, season, filename, + input_data_dict) + for key, cube in zip( + ["large_scale", "regional"], + [time_extr_large_scale_cube, time_extr_regional_cube]): + rolling_mean_cube = self.compute_rolling_means(cube) + basename = f"rolling_mean_{key}_{season}" + region = self.regions[regional_cube.var_name][key] + provenance_record = self.get_rolling_mean_record( + input_data_dict, season, region, filename) + save_data(basename, provenance_record, self.cfg, + rolling_mean_cube) + + def extract_regional(self, cube): + """Extract the hotspot region and mask its sea.""" + regional_cube = extract_region( + cube, + self.cfg["region"][0], # start_lon + self.cfg["region"][1], # end_lon + self.cfg["region"][2], # start_lat + self.cfg["region"][3], # end_lat + ) + regional_cube = mask_landsea(regional_cube, "sea") + + return regional_cube + + @staticmethod + def extract_annual_or_season(cube, season): + """Compute the statistics of the cube. + + Either annual, or extract a season (djf, mam, jja or son). + """ + if season == "annual": + cube = annual_statistics(cube) + else: + cube = extract_season(cube, season) + return cube + + def compute_hotspot_fields(self, cubes, season, filename, input_data_dict): + """Compute the hotspot fields with self.retireve_data cubes. + + the resulting cubes are saved inside self.experiment_dict. + """ + regional_anomaly = anomalies(cubes[1], + "full", + reference=self.anomaly_reference) + large_scale_anomaly = anomalies(cubes[0], + "full", + reference=self.anomaly_reference) + if cubes[1].var_name == "pr": + regional_anomaly = self.relative_change(regional_anomaly, cubes[1]) + large_scale_anomaly = self.relative_change(large_scale_anomaly, + cubes[0]) + hotspot = regional_anomaly - \ + area_statistics(large_scale_anomaly, "mean") + hotspot.var_name = f"{regional_anomaly.var_name}_hotspot" + for period in self.cfg["future_periods"]: + start_year = int(period.split("-")[0]) + end_year = int(period.split("-")[-1]) + period_anomaly = extract_time(hotspot, start_year, 1, 1, end_year, + 12, 31) + period_anomaly = climate_statistics(period_anomaly) + + basename = f"hotspot_{season}_{period}" + provenance_record = self.get_hotspot_provenance_record( + input_data_dict, period, season, filename) + save_data(basename, provenance_record, self.cfg, period_anomaly) + + def compute_rolling_means(self, cube): + """Compute the 10yr rolling mean anomaly. + + A timeseries with respect to the baseline period is obtained for + the input cube. + """ + area_mean = area_statistics(cube, 'mean') + anomaly = anomalies(area_mean, + "full", + reference=self.anomaly_reference) + if cube.var_name == "pr": + anomaly = self.relative_change(anomaly, area_mean) + timeseries_cube = anomaly.rolling_window("time", iris.analysis.MEAN, + 10) + return timeseries_cube + + def relative_change(self, anomaly, cube): + """Compute relative anomaly.""" + baseline_cube = extract_time(cube, + int(self.cfg["baseline_period"][0]), 1, 1, + int(self.cfg["baseline_period"][1]), 12, + 31) + baseline_cube = climate_statistics(baseline_cube) + anomaly /= baseline_cube + return anomaly * 100 + + def get_hotspot_provenance_record(self, attributes, period, season, + ancestor_files): + """Create a provenance record. + + It describes the hotspot fields diagnostic data. + """ + baseline = self.cfg["baseline_period"] + caption = (f"{attributes['project']} {season.upper()} " + f"{attributes['long_name']} differences between global " + f"and {self.cfg['region_name']} anomalies of period " + f"{period}, against the baseline period " + f"{baseline[0]}-{baseline[1]}, according to.") + + record = { + 'caption': caption, + 'statistics': ['mean', 'anomaly', 'diff'], + 'domains': ['global', 'reg'], + 'authors': [ + 'cos_josep', + ], + 'references': [ + 'cos22esd', + ], + 'ancestors': [ancestor_files], + } + return record + + def get_rolling_mean_record(self, attributes, season, region, + ancestor_files): + """Create a provenance record. + + It describes the rolling mean diagnostic data. + """ + baseline = self.cfg["baseline_period"] + caption = (f"{attributes['project']} {season.upper()} 10 year " + f"{attributes['long_name']} rolling mean between " + f"{attributes['start_year']} and {attributes['end_year']}, " + f"against the baseline period {baseline[0]}-{baseline[1]}, " + f"according to the {region} mean") + + domain = "reg" + if region == "global": + domain = "global" + + record = { + 'caption': caption, + 'statistics': ['anomaly', "other"], + 'domains': [domain], + 'authors': [ + 'cos_josep', + ], + 'references': [ + 'cos22esd', + ], + 'ancestors': [ancestor_files], + } + return record + + +if __name__ == "__main__": + with run_diagnostic() as cfg: + HotspotDiag(cfg).compute() diff --git a/esmvaltool/diag_scripts/cos22esd/hotspot_plotter.py b/esmvaltool/diag_scripts/cos22esd/hotspot_plotter.py new file mode 100644 index 0000000000..e7de5764e8 --- /dev/null +++ b/esmvaltool/diag_scripts/cos22esd/hotspot_plotter.py @@ -0,0 +1,776 @@ +"""Functions to plot output from ancestor cos22esd/climate_change_hotspot.py. + +The plots produced reproduce Figs. 2, 3, S1, S2, S4 from Cos et al. +2022. +""" +import os +from copy import deepcopy + +import cartopy.crs as ccrs +import cartopy.feature as cf +import iris +import iris.plot as iplt +import matplotlib.path as mpath +import matplotlib.pyplot as plt +import numpy as np +from matplotlib import colors +from matplotlib.gridspec import GridSpec +from matplotlib.patches import Patch +from scipy import stats + +from esmvaltool.diag_scripts.shared import ( + get_cfg, + group_metadata, + io, + names, + run_diagnostic, + save_figure, +) + + +class HotspotPlot: + """Class that plots the results. + + The obtained plots correspond to the hotspot figures + in Cos et al. 2022, ESD. + """ + + def __init__(self, config): + """Variable definition. + + Config is a dictionary containing metadata regarding input files + and overall, as the name suggests, configuration options. + """ + self.cfg = config + self.var_combinations = ["tas:tas", "pr:pr", "pr:tas"] + self.seasons = ["jja", "djf", "annual"] + self.projects = ["cmip5", "cmip6"] + self.variables = ["tas", "pr"] + self.scenarios = ["26", "45", "85"] + + # generate list of candidate bound limits + small = np.arange(0.1, 1, 0.1) + medium = np.arange(1, 11) + high = np.arange(20, 100, 10) + v_high = np.arange(150, 400, 50) + self.bound_candidates = np.concatenate( + (small, medium, high, v_high)) * 5 / 4 + + def compute(self): + """Collect datasets and call the plotting functions.""" + self.find_n() + + # call hotspot field plots + for scenario in self.scenarios: + fields_dict = {} + ancestor_files = [] + for filename in io.get_all_ancestor_files(self.cfg, + pattern='hotspot_*.nc'): + key = os.path.basename(os.path.dirname(filename)) + splitname = os.path.basename(filename).split("_") + if key.split("_")[-1] == scenario: + fields_dict[( + f"{splitname[-1].split('.nc')[0]}_" + f"{splitname[1]}_{key}")] = iris.load_cube(filename) + ancestor_files.append(filename) + fields_dict["scenario"] = scenario + fields_dict["ancestors"] = ancestor_files + self.hotspot_fields_plot(fields_dict) + + # call scatter plots + for season in self.seasons: + timeseries_dict = {"large_scale": {}, "regional": {}} + for region, value in timeseries_dict.items(): + for filename in io.get_all_ancestor_files( + self.cfg, + pattern=f'rolling_mean_{region}_{season}.nc'): + value[os.path.basename(os.path.dirname(filename))] = ( + iris.load_cube(filename)) + value[os.path.basename( + os.path.dirname(filename))] = (filename) + for var_combination in self.var_combinations: + self.timeseries_scatter_plot(deepcopy(timeseries_dict), season, + var_combination) + + def hotspot_fields_plot(self, results_dict, tas_bound=None, pr_bound=None): + """Regional climate change hotspot maps for TAS and PR. + + Local temperature and precipitation change differences + with respect to the mean global temperature change and the + mean latitudinal belt precipitation change. + The changes are computed with respect to the 1986-2005 mean + for the mid-term and long-term periods. + + The differences are shown for the CMIP5 and CMIP6 winter, + summer and annual mean projections. + + N indicates the number of models included in the ensemble mean. + """ + sorted_keys = [(f"{period}_{season}_{variable}" + f"_{project}_{results_dict['scenario']}") + for variable in self.variables + for period in self.cfg["future_periods"] + for project in self.projects for season in self.seasons] + sorted_keys = [ + sorted_keys[:len(sorted_keys) // 2], + sorted_keys[len(sorted_keys) // 2:] + ] + ancestor_files_var = [[ + ancestor_file for ancestor_file in results_dict["ancestors"] + if f"/{var}_" in ancestor_file + ] for var in self.variables] + for ancestor_files, keys, variable in zip(ancestor_files_var, + sorted_keys, self.variables): + fig = plt.figure(figsize=(14.4, 3.4), + constrained_layout=True, + dpi=300) + plt.gcf().subplots_adjust() + # bound colorbar to abs(max) value on the map + style = self.cb_bounds(variable, results_dict, keys, + [tas_bound, pr_bound]) + # plot each panel + fill, frame = self._hotspot_fields_plot_panels( + results_dict, fig, keys, style) + # plot figtexts + self._hotspot_fields_plot_figtexts(results_dict['scenario'], frame) + # plot line + self._hotspot_fields_plot_line(fig, frame) + # plot colorbar + cbar = plt.colorbar(fill, + plt.gcf().add_axes([0.25, 0.125, 0.5, 0.04]), + orientation="horizontal", + extend="both") + if variable == "pr": + cbar.set_label("%") + against_region = ( + f"{self.cfg['region'][2]}$^o$ N-" + f"{self.cfg['region'][3]}$^o$ N latitudinal belt") + else: + cbar.set_label( + self.formatter(str(results_dict[keys[-1]].units))) + against_region = "global" + + # plot title and save + self._hotspot_fields_plot_save(against_region, variable, + results_dict['scenario'], + ancestor_files) + + def _hotspot_fields_plot_panels(self, results_dict, fig, keys, style): + """Plot field panels.""" + # define projection + proj, path_ext, plotextend = self.define_projection( + self.cfg["region"]) + # define axes and panels [top, bottom, left, right] + frame = [0.75, 0.2, 0.02, 0.99] + gspec = GridSpec( + len(self.cfg["future_periods"]), + len(self.seasons) * 2, + figure=fig, + hspace=0.005, + wspace=0.005, + top=frame[0], + bottom=frame[1], + left=frame[2], + right=frame[3], + ) + + for i, key in enumerate(keys): + if i < 6: + axes = fig.add_subplot(gspec[0, i], projection=proj) + plt.title(f"{key.split('_')[1].upper()}") + else: + axes = fig.add_subplot(gspec[1, i - 6], projection=proj) + self._add_axes_attributes(axes, path_ext, plotextend) + norm = colors.BoundaryNorm(boundaries=style[0], + ncolors=256, + extend="both") + fill = iplt.pcolormesh( + self.regrid_longitude_coord(results_dict[key]), + norm=norm, + coords=(names.LON, names.LAT), + cmap=style[1], + ) + return fill, frame + + @staticmethod + def _add_axes_attributes(axes, path_ext, plotextend): + axes.set_boundary(path_ext, transform=ccrs.PlateCarree()) + axes.set_facecolor("silver") + axes.set_extent(plotextend, crs=ccrs.PlateCarree()) + axes.coastlines("50m", linewidth=0.8) + axes.add_feature(cf.BORDERS, alpha=0.4) + + def _hotspot_fields_plot_figtexts(self, scenario, frame): + """Plot period and scenario labels.""" + top, bottom, left, right = frame + for p_ind, project in enumerate(self.projects): + n_models = self.cfg["N"][f"{project}_{scenario}"] + plt.figtext( + left + 0.18 + p_ind * (right - left) / 2, + 0.85, + (f"{self.formatter(project.upper())} " + f"{self.formatter(f'{project}-{scenario}')} " + f"(N={n_models})"), + fontsize="large", + ) + for row, period in enumerate(self.cfg["future_periods"]): + ypos = top - (top - bottom) / 2 * (1 + row * 1.1) + 0.05 + plt.figtext( + 0.005, + ypos, + period, + rotation="vertical", + fontsize="11", + ) + + @staticmethod + def _hotspot_fields_plot_line(fig, frame): + """Plot separator line.""" + _, bottom, left, right = frame + mid = left + (right - left) / 2 + line = plt.Line2D((mid, mid), (bottom, 0.9), color="k", linewidth=1) + fig.add_artist(line) + + def _hotspot_fields_plot_save(self, against_region, variable, scenario, + ancestor_files): + """Plot title and save figure.""" + suptitle = (f"{self.cfg['region_name']} {variable.upper()} " + f"change against mean {against_region} future " + f"climatology. Baseline period: " + f"{self.cfg['baseline_period'][0]}-" + f"{self.cfg['baseline_period'][1]}") + plt.suptitle(suptitle, fontsize=13) + + basename = f"{variable}_{scenario}" + provenance_record = self.get_hotspot_provenance( + suptitle, scenario, ancestor_files) + save_figure(basename, provenance_record, self.cfg) + + def timeseries_scatter_plot(self, results_dict, season, var_combination): + """Regional vs large-scale changes for three scenarios. + + Computed for different seasons for the CMIP5 and CMIP6 ensemble means. + Each dot in the plot represents a 10-year mean change beginning from + 1960-1969 (light coloring) until 2091-2100 (opaque coloring). + The changes are computed with 1986-2005 as a baseline. + + An ordinary least squares linear regression is computed and the + slope and rvalue are shown. N indicates the number of models + included in the ensemble mean. + """ + legend_elements = {} + fig = plt.figure(figsize=(12, 4), constrained_layout=True, dpi=300) + gspec = fig.add_gridspec(1, 3) + rvalue = {} + slope = {} + min_range, max_range = {}, {} + min_glob, max_glob = [], [] + axes = [] + ancestor_files = [] + for panel, scen in enumerate(self.scenarios): + legend_elements = {scen: []} + axes.append(fig.add_subplot(gspec[0, panel])) + regional_keys = [ + (f"{var_combination.split(':')[0]}_{proj}" + f"_{scen}") for proj in self.projects + ] + large_scale_keys = [ + (f"{var_combination.split(':')[1]}_" + f"{project}_{scen}") + for project in self.projects + ] + for regional_key, large_scale_key in zip(regional_keys, + large_scale_keys): + project, large_scale_signal_ts, regional_signal_ts = ( + self._timeseries_scatter_plot_data(results_dict, + large_scale_key, + regional_key)) + # find linear regression + rvalue[project], slope[project], y_values = ( + self._timeseries_scatter_plot_reg(large_scale_signal_ts, + regional_signal_ts, + rvalue, slope)) + # find max and min axis limits for project + min_range[project], max_range[ + project] = self._timeseries_scatter_plot_rges( + large_scale_signal_ts, regional_signal_ts, + var_combination) + + title_format = { + "26": "RCP2.6/SSP1-2.6", + "45": "RCP4.5/SSP2-4.5", + "60": "RCP6.0/SSP4-6.0", + "85": "RCP8.5/SSP5-8.5", + } + + # colors n scatter plot + color = self._timeseries_scatter_plot_panel( + [large_scale_signal_ts, regional_signal_ts], axes[panel], + project, y_values) + + if len(legend_elements[scen]) < 2: + legend_elements[scen].append( + Patch( + facecolor=color, + edgecolor=color, + label=(f"{self.formatter(project.upper())} (N=" + f"{self.cfg['N'][f'{project}_{scen}']})"), + )) + + max_glob.append(max(large_scale_signal_ts)) + min_glob.append(min(large_scale_signal_ts)) + # collect used ancestor files + ancestor_files.append( + results_dict["large_scale"][regional_key]) + ancestor_files.append(results_dict["regional"][regional_key]) + + meta = { + "var_combination": var_combination, + "title_format": title_format[scen], + "legend_elements": legend_elements[scen], + "slope": slope, + "rvalue": rvalue, + "season": season, + } + suptitle = self._timeseries_scatter_plot_lbls( + results_dict, [large_scale_keys, regional_keys], axes[panel], + meta) + + lims = self._timeseries_scatter_plot_lims(min_range, max_range) + + self._timeseries_scatter_plot_lines(axes[panel]) + + mins_maxs = lims[0], lims[1], min_glob, max_glob + self._timeseries_scatter_plot_axlim(axes, var_combination, slope, + mins_maxs) + + provenance_record = self.get_rolling_mean_provenance( + suptitle, ancestor_files) + basename = ("scenario_combination_" + f"{var_combination.replace(':', '-')}_{season}") + save_figure(basename, provenance_record, self.cfg) + + @staticmethod + def _timeseries_scatter_plot_data(results_dict, large_scale_key, + regional_key): + """Read regional and large scale data.""" + project = regional_key.split("_")[1] + ls_cube = results_dict["large_scale"][large_scale_key] + large_scale_signal_ts = iris.load_cube(ls_cube).data + r_cube = results_dict["regional"][regional_key] + regional_signal_ts = iris.load_cube(r_cube).data + return project, large_scale_signal_ts, regional_signal_ts + + @staticmethod + def _timeseries_scatter_plot_reg(large_scale_signal_ts, regional_signal_ts, + rvalue, slope): + """Compute the linear regression.""" + res = stats.linregress(large_scale_signal_ts, regional_signal_ts) + y_values = res.intercept + res.slope * \ + np.array(large_scale_signal_ts) + rvalue = res.rvalue + slope = res.slope + return rvalue, slope, y_values + + @staticmethod + def _timeseries_scatter_plot_rges(large_scale_signal_ts, + regional_signal_ts, var_combination): + """Find the ranges for the x and y-axis.""" + if var_combination == "pr:tas": + max_range = max(regional_signal_ts) + min_range = min(regional_signal_ts) + else: + if max(regional_signal_ts) > max(large_scale_signal_ts): + max_range = max(regional_signal_ts) + else: + max_range = max(large_scale_signal_ts) + if min(regional_signal_ts) < min(large_scale_signal_ts): + min_range = min(regional_signal_ts) + else: + min_range = min(large_scale_signal_ts) + return min_range, max_range + + def _timeseries_scatter_plot_panel(self, data, axes, project, y_values): + """Plot the scatter and the adjusted slopes.""" + timesteps = np.linspace(0, 1, len(data[0])) + if project == "cmip6": + cb_colors = plt.cm.Reds(np.linspace(0, 1, len(data[1]))) + if project == "cmip5": + cb_colors = plt.cm.Blues(np.linspace(0, 1, len(data[1]))) + cb_colors[:, -1] = timesteps + + axes.scatter( + data[0], + data[1], + facecolors="none", + linewidths=0.8, + s=70, + color=cb_colors, + label=self.formatter(project.upper()), + ) + base_colors = {"cmip5": "#2161A6", "cmip6": "#BB3437"} + # plot regression + axes.plot(data[0], y_values, color=base_colors[project]) + return base_colors[project] + + def _timeseries_scatter_plot_lbls(self, results_dict, keys, axes, meta): + """Plot the titles, suptitles and legends.""" + if meta["var_combination"].partition(":")[-1] == "tas": + against_region = "Global" + else: + against_region = ( + f"{self.cfg['region'][2]}$^o$ N-{self.cfg['region'][3]}" + f"$^o$ N latitudinal belt") + large_scale_units = self.formatter( + str( + iris.load_cube( + results_dict['large_scale'][keys[0][-1]]).units)) + regional_units = self.formatter( + str(iris.load_cube(results_dict['regional'][keys[1][-1]]).units)) + xlabel = (f"{against_region} " + f"{meta['var_combination'].partition(':')[-1].upper()} " + f"[{large_scale_units}]") + axes.set_xlabel(xlabel) + ylabel = (f"{self.cfg['region_name']} " + f"{meta['var_combination'].partition(':')[0].upper()} " + f"[{regional_units}]") + axes.set_ylabel(ylabel) + + axes.set_title(f"Scenario: {meta['title_format']} \n CMIP5: rval=" + f"{meta['rvalue']['cmip5']:.3f}; " + f"slope={meta['slope']['cmip5']:.3f} " + f"\n CMIP6: rval={meta['rvalue']['cmip6']:.3f}; " + f"slope={meta['slope']['cmip6']:.3f}") + axes.legend(handles=meta["legend_elements"]) + + long_name_dict = {"pr": "precipitation", "tas": "temperature"} + if meta["var_combination"] == "pr:tas": + suptitle = (f"{self.cfg['region_name']} {meta['season'].upper()} " + f"precipitation vs global {meta['season'].upper()} " + f"temperature.\n 10yr rolling means 1960-2100, " + f"Baseline: 1986-2005") + plt.suptitle(suptitle) + else: + y_combination = meta["var_combination"].partition(':')[0] + suptitle = (f"{self.cfg['region_name']} vs {against_region} " + f"{meta['season'].upper()} " + f"{long_name_dict[y_combination]}" + f".\n 10yr rolling means 1960-2100, " + f"Baseline: 1986-2005") + plt.suptitle(suptitle) + return suptitle + + @staticmethod + def _timeseries_scatter_plot_lims(min_range, max_range): + max_lim = max(max_range.values()) + min_lim = min(min_range.values()) + delta_range = max_lim - min_lim + min_lim -= delta_range * 0.1 + max_lim += delta_range * 0.1 + return [min_lim, max_lim] + + @staticmethod + def _timeseries_scatter_plot_lines(axes): + """Draw the reference vertical and horizontal lines.""" + axes.axvline( + x=0, + ymin=-1000, + ymax=1000, + color="grey", + linestyle="dotted", + alpha=0.6, + ) + axes.axhline( + y=0, + xmin=-1000, + xmax=1000, + color="grey", + linestyle="dotted", + alpha=0.6, + ) + + @staticmethod + def _timeseries_scatter_plot_axlim(axes, var_combination, slope, + mins_maxs): + """Fix the x and y-axis limits.""" + min_lim, max_lim, min_glob, max_glob = mins_maxs + for box in range(3): + axes[box].set_ylim(min_lim, max_lim) + if var_combination == "pr:tas": + min_l = min(min_glob) - (max(max_glob) - min(min_glob)) * 0.1 + max_l = max(max_glob) + (max(max_glob) - min(min_glob)) * 0.1 + axes[box].set_xlim(min_l, max_l) + else: + axes[box].set_xlim(min_lim, max_lim) + + if (slope["cmip5"] + slope["cmip6"]) >= 0: + axes[box].plot( + [-1000, 1000], + [-1000, 1000], + color="gray", + alpha=0.6, + ) + else: + axes[box].plot( + [-1000, 1000], + [1000, -1000], + color="gray", + alpha=0.6, + ) + + @staticmethod + def formatter(text): + """Text definitions to format strings.""" + repl_map = { + "degC": "$^o$C", + "K": "$^o$C", + "month-1": "month$^{{-1}}$", + "day-1": "day$^{{-1}}$", + "d-1": "day$^{{-1}}$", + "decade-1": "decade$^{{-1}}$", + "year-1": "year$^{{-1}}$", + "rcp85": "RCP8.5", + "rcp45": "RCP4.5", + "rcp26": "RCP2.6", + "RCP85": "RCP8.5", + "RCP45": "RCP4.5", + "RCP26": "RCP2.6", + "cmip5-85": "RCP8.5", + "cmip5-60": "RCP6.0", + "cmip5-45": "RCP4.5", + "cmip5-26": "RCP2.6", + "ssp585": "SSP5-8.5", + "ssp245": "SSP2-4.5", + "ssp126": "SSP1-2.6", + "SSP585": "SSP5-8.5", + "SSP245": "SSP2-4.5", + "SSP126": "SSP1-2.6", + "cmip6-85": "SSP5-8.5", + "cmip6-70": "SSP3-7.0", + "cmip6-60": "SSP4-6.0", + "cmip6-34": "SSP4-3.4", + "cmip6-45": "SSP2-4.5", + "cmip6-26": "SSP1-2.6", + "cmip6-19": "SSP1-1.9", + "1": "%", + "era5": "ERA5", + "gpcc025x025_v8": "GPCC", + "cru": "CRU", + "jra55": "JRA55", + "HIGHRESMIP": "HighResMIP", + " ": "", + } + for key, val in repl_map.items(): + if key in text: + text = text.replace(key, val) + break + return text + + def cb_bounds(self, variable, results_dict, keys, fixed_bounds): + """Fix colorbar bounds and cmap.""" + tas_bound, pr_bound = fixed_bounds + if variable == "tas": + if tas_bound: + bound_limit = tas_bound + else: + bound_limit = self.find_abs_bound_range(results_dict, keys) + cmap = plt.cm.RdBu_r + else: + if pr_bound: + bound_limit = pr_bound + else: + bound_limit = self.find_abs_bound_range(results_dict, + keys, + avg_over=25) + cmap = plt.cm.BrBG + bounds = np.linspace(-1 * bound_limit, bound_limit, 11) + return [bounds, cmap] + + def find_abs_bound_range(self, results_dict, keys, avg_over=5): + """Find suitable bounds for the colorbar. + + It takes into account the absolute maximum value from all the + panels. + """ + max_averages = [] + min_averages = [] + for key in keys: + result_data = results_dict[key].data + # compress to remove masked values + sorted_data = np.sort(result_data.compressed()) + # select the "avg_over" extreme values from the array + # and find it's average value + max_average_data = np.average(sorted_data[-avg_over:]) + min_average_data = np.average(sorted_data[:avg_over]) + max_averages.append(max_average_data) + min_averages.append(min_average_data) + + # the maximum absolute value for the bound + abs_max = np.abs(np.max(max_averages)) + abs_min = np.abs(np.min(min_averages)) + max_bound = np.max([abs_min, abs_max]) + + # find the bound candidate suited for the bound range + index = np.argwhere(self.bound_candidates - max_bound > 0)[0, 0] + + return self.bound_candidates[index] + + @staticmethod + def region_to_square(region, dimension): + """Definition of the region polygon.""" + if dimension == "latitude": + boundaries = [ + region["start_latitude"], + region["start_latitude"], + region["end_latitude"], + region["end_latitude"], + region["start_latitude"], + ] + elif dimension == "longitude": + boundaries = [ + region["start_longitude"], + region["end_longitude"], + region["end_longitude"], + region["start_longitude"], + region["start_longitude"], + ] + return boundaries + + def define_projection(self, region): + """Projection definition to get LambertConformal borders.""" + region = { + "start_longitude": region[0], + "end_longitude": region[1], + "start_latitude": region[2], + "end_latitude": region[3], + } + projection = "LambertConformal" + plotextend = [ + region["start_longitude"], + region["end_longitude"], + region["start_latitude"], + region["end_latitude"], + ] + if projection == "LambertConformal": + # plotextend has to be a little larger so everything is on there + plotextend = [ + plotextend[0] - 1.0, + plotextend[1] + 1.0, + plotextend[2] - 1.0, + plotextend[3] + 1.0, + ] + # path to cut out is exact though + lons = self.region_to_square(region, "longitude") + lats = self.region_to_square(region, "latitude") + path_ext = [[lon, lat] for lon, lat in zip(lons, lats)] + path_ext = mpath.Path(path_ext).interpolated(20) + # South Hemisfere + if region["start_latitude"] <= 0 and region["end_latitude"] <= 0: + proj = ccrs.LambertConformal( + central_longitude=np.sum(plotextend[:2]) / 2.0, + central_latitude=np.sum(plotextend[2:]) / 2.0, + cutoff=+30, + standard_parallels=(-33, -45), + ) + # North Hemisphere + else: + proj = ccrs.LambertConformal( + central_longitude=np.sum(plotextend[:2]) / 2.0, + central_latitude=np.sum(plotextend[2:]) / 2.0, + ) + return proj, path_ext, plotextend + + @staticmethod + def sorted_dim(cube, coord="longitude"): + """Sorts the cube data according to the longitude coordinate values. + + example: 180/-180 --> -180/180 + """ + coord_to_sort = cube.coord(coord) + assert coord_to_sort.ndim == 1, "Coord should be 1-dimensional." + (dim, ) = cube.coord_dims(coord_to_sort) + index = [slice(None)] * cube.ndim + index[dim] = np.argsort(coord_to_sort.points) + cube = cube[tuple(index)] + coord = cube.coord(coord) + iris.util.promote_aux_coord_to_dim_coord(cube, "longitude") + return cube + + def regrid_longitude_coord(self, cube): + """Sorts the longitudes of the cubes from 0/360 degrees to -180/180.""" + # make a list with the 'longitude' coord in the form: 0/180/-180/0 + neg_lons = ((cube.coord("longitude").points + 180) % 360) - 180 + # interpolates the cube data to the new 'longitude' dimensions + cube = cube.interpolate([("longitude", neg_lons)], + iris.analysis.Linear()) + sorted_cube = self.sorted_dim(cube) + return sorted_cube + + def find_n(self): + """Find how many models are inside each multi-model mean.""" + metadata_files = [ + file for file in self.cfg["input_files"] + if "tas/metadata.yml" in file + ] + self.cfg["N"] = {} + for meta_file in metadata_files: + n_identifyer = meta_file.split("/tas/")[0].split("/tas_")[-1] + metadata = group_metadata(get_cfg(meta_file).values(), "dataset") + self.cfg["N"][n_identifyer] = len(metadata.keys()) - 1 + + def get_hotspot_provenance(self, suptitle, scenario, ancestor_files): + """Create a provenance record describing the hotspot fields plots.""" + caption = (f"{suptitle}. Calculated for seasons " + f"{self.seasons[0].upper()}, " + f"{self.seasons[1].upper()} and {self.seasons[2].upper()} " + f"in the future periods {self.cfg['future_periods'][0]} " + f"and {self.cfg['future_periods'][1]} " + f"for CMIP5 {self.formatter(f'cmip5-{scenario}')} " + f"and CMIP6 {self.formatter(f'cmip6-{scenario}')}") + + record = { + 'caption': caption, + 'statistics': ['anomaly', 'diff'], + 'domains': ['reg'], + 'plot_types': ['map'], + 'authors': [ + 'cos_josep', + ], + 'references': [ + 'cos22esd', + ], + 'ancestors': ancestor_files, + } + return record + + def get_rolling_mean_provenance(self, suptitle, ancestor_files): + """Create a provenance record with the rolling mean diagnostic data.""" + suptitle = suptitle.replace('\n', '') + caption = (f"{suptitle}. For CMIP5 (" + f"{self.formatter(f'cmip5-{self.scenarios[0]}')}, " + f"{self.formatter(f'cmip5-{self.scenarios[1]}')} and " + f"{self.formatter(f'cmip5-{self.scenarios[2]}')}) " + f"and CMIP6 " + f"({self.formatter(f'cmip6-{self.scenarios[0]}')}, " + f"{self.formatter(f'cmip6-{self.scenarios[1]}')} and " + f"{self.formatter(f'cmip6-{self.scenarios[2]}')})") + + record = { + 'caption': caption, + 'statistics': ['anomaly', "other"], + 'domains': ['reg', 'global'], + 'plot_types': ['scatter', 'line', 'times'], + 'authors': [ + 'cos_josep', + ], + 'references': [ + 'cos22esd', + ], + 'ancestors': ancestor_files, + } + return record + + +if __name__ == "__main__": + with run_diagnostic() as cfg: + HotspotPlot(cfg).compute() diff --git a/esmvaltool/diag_scripts/crem/ww09_esmvaltool.py b/esmvaltool/diag_scripts/crem/ww09_esmvaltool.py index 8fe9f0b0da..be53d50543 100644 --- a/esmvaltool/diag_scripts/crem/ww09_esmvaltool.py +++ b/esmvaltool/diag_scripts/crem/ww09_esmvaltool.py @@ -26,22 +26,22 @@ none Modification history - 20190216-A_laue_ax: outsourced regridding to preprocessor - 20190215-A_laue_ax: added metadata to netcdf output and plot - 20190213-A_laue_ax: made code more flexible to support CMIP6 data - 20181012-A_laue_ax: extended (optional) netCDF output - 20180920-A_laue_ax: code adapted for ESMValTool v2.0 - 20171128-A_laue_ax: added author and diagname to meta data - switched off "replacing of exact values" - in regridding function - 20170713-A_laue_ax: added tagging (for reporting) - 20151117-A_laue_ax: added parameters for call to "write_references" - 20151113-A_laue_ax: added creation of directory for plots if needed - (code was crashing if directory does not exist) - 20151029-A_laue_ax: added output of acknowledgements + processed files - to log-file - 20150903-A_laue_ax: ESMValTool implementation. - 20150521-A_will_ke: CREM routines written. + 20190216-lauer_axel: outsourced regridding to preprocessor + 20190215-lauer_axel: added metadata to netcdf output and plot + 20190213-lauer_axel: made code more flexible to support CMIP6 data + 20181012-lauer_axel: extended (optional) netCDF output + 20180920-lauer_axel: code adapted for ESMValTool v2.0 + 20171128-lauer_axel: added author and diagname to meta data + switched off "replacing of exact values" + in regridding function + 20170713-lauer_axel: added tagging (for reporting) + 20151117-lauer_axel: added parameters for call to "write_references" + 20151113-lauer_axel: added creation of directory for plots if needed + (code was crashing if directory does not exist) + 20151029-lauer_axel: added output of acknowledgements + processed files + to log-file + 20150903-lauer_axel: ESMValTool implementation. + 20150521-williams_keith: CREM routines written. """ import logging import os @@ -207,8 +207,8 @@ def main(cfg): 'domains': ['global'], 'plot_type': 'bar', 'authors': [ - 'will_ke', - 'laue_ax', + 'williams_keith', + 'lauer_axel', ], 'references': [ 'acknow_project', @@ -218,25 +218,22 @@ def main(cfg): # plot results - if cfg['write_plots']: - plotname = os.path.join( - cfg['plot_dir'], - 'ww09_metric_multimodel.' + cfg['output_file_type'], - ) - logger.debug("Plotting results to %s", plotname) + plotname = os.path.join( + cfg['plot_dir'], + 'ww09_metric_multimodel.' + cfg['output_file_type'], + ) + logger.debug("Plotting results to %s", plotname) - plt.figure() - ypos = np.arange(nummod) - plt.barh(ypos, crems, align='center') - plt.yticks(ypos, models) - plt.xlabel('Cloud Regime Error Metric') + plt.figure() + ypos = np.arange(nummod) + plt.barh(ypos, crems, align='center') + plt.yticks(ypos, models) + plt.xlabel('Cloud Regime Error Metric') - # draw observational uncertainties (dashed red line) - plt.plot([0.96, 0.96], [-0.5, nummod - 0.5], 'r--') + # draw observational uncertainties (dashed red line) + plt.plot([0.96, 0.96], [-0.5, nummod - 0.5], 'r--') - plt.savefig(plotname, bbox_inches='tight') - - provenance_record['plot_file'] = plotname + plt.savefig(plotname, bbox_inches='tight') # save results to netcdf @@ -282,6 +279,7 @@ def main(cfg): with ProvenanceLogger(cfg) as provenance_logger: provenance_logger.log(oname, provenance_record) + provenance_logger.log(plotname, provenance_record) def read_and_check(srcfilename, varname, lons2, lats2, time2): diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/driver.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/driver.ncl index 58a632832f..74dc29e402 100644 --- a/esmvaltool/diag_scripts/cvdp/cvdp/driver.ncl +++ b/esmvaltool/diag_scripts/cvdp/cvdp/driver.ncl @@ -211,4 +211,4 @@ png_scale,webpage_title,compute_modes_mon,met_files/]) print("Finished: Climate Variability Diagnostics Package ("+systemfunc("date")+")") - + \ No newline at end of file diff --git a/esmvaltool/diag_scripts/cvdp/cvdp_wrapper.py b/esmvaltool/diag_scripts/cvdp/cvdp_wrapper.py index a82291b56b..1c01213e10 100644 --- a/esmvaltool/diag_scripts/cvdp/cvdp_wrapper.py +++ b/esmvaltool/diag_scripts/cvdp/cvdp_wrapper.py @@ -5,13 +5,16 @@ import shutil import subprocess -from esmvaltool._task import DiagnosticError -from esmvaltool.diag_scripts.shared import (group_metadata, run_diagnostic, - select_metadata) +from esmvaltool.diag_scripts.shared import (group_metadata, run_diagnostic) +from esmvaltool.diag_scripts.shared import ProvenanceLogger logger = logging.getLogger(os.path.basename(__file__)) +class DiagnosticError(Exception): + """Error in diagnostic.""" + + def setup_driver(cfg): """Write the driver.ncl file of the cvdp package.""" cvdp_root = os.path.join(os.path.dirname(__file__), 'cvdp') @@ -34,8 +37,10 @@ def _update_settings(line): search_results = re.findall(pattern, line) if search_results == []: continue - return re.sub( - r'".+?"', '"{0}"'.format(value), search_results[0], count=1) + return re.sub(r'".+?"', + '"{0}"'.format(value), + search_results[0], + count=1) return line @@ -52,7 +57,7 @@ def _update_settings(line): new_driver_file.write("".join(content)) -def create_link(cfg, inpath): +def create_link(cfg, inpath, _name): """Create link for the input file. The link matches the naming convention of the cvdp package. @@ -61,22 +66,23 @@ def create_link(cfg, inpath): cfg: configuration dict inpath: path to infile """ - def _create_link_name(inpath): + def _create_link_name(): tail = os.path.split(inpath)[1] search_result = re.search(r'[0-9]{4}-[0-9]{4}', tail).group(0) - return tail.replace(search_result, - "{0}01-{1}12".format(*search_result.split('-'))) + return _name + "_" + tail.replace( + search_result, "{0}01-{1}12".format(*search_result.split('-'))) if not os.path.isfile(inpath): raise DiagnosticError("Path {0} does not exist".format(inpath)) - lnk_dir = os.path.join(cfg['run_dir'], "links") + lnk_dir = cfg['lnk_dir'] if not os.path.isdir(lnk_dir): os.mkdir(lnk_dir) - link = os.path.join(lnk_dir, _create_link_name(inpath)) - os.symlink(inpath, link) + link = os.path.join(lnk_dir, _create_link_name()) + if not os.path.exists(link): + os.symlink(inpath, link) return link @@ -84,18 +90,16 @@ def _create_link_name(inpath): def setup_namelist(cfg): """Set the namelist file of the cvdp package.""" input_data = cfg['input_data'].values() - selection = select_metadata(input_data, project='CMIP5') - grouped_selection = group_metadata(selection, 'dataset') + grouped_selection = group_metadata(input_data, 'alias') content = [] - for key, value in grouped_selection.items(): - links = [create_link(cfg, item["filename"]) for item in value] - head, tail = os.path.split(links[0]) - head, tail = os.path.split(head) - tail = "_".join(tail.split('_')[:-1]) - ppath = "{}*/".format(os.path.join(head, tail)) - content.append("{0} | {1} | {2} | {3}\n".format( - key, ppath, value[0]["start_year"], value[0]["end_year"])) + for _, attributes in grouped_selection.items(): + for item in attributes: + create_link(cfg, item["filename"], item['alias']) + ppath = "{0}/".format(cfg['lnk_dir']) + content.append("{0} | {1}{0} | {2} | {3}\n".format( + attributes[0]["alias"], ppath, attributes[0]["start_year"], + attributes[0]["end_year"])) namelist = os.path.join(cfg['run_dir'], "namelist") @@ -127,11 +131,182 @@ def _nco_available(): return ret +def _is_png(path): + exclude = ['cas-cvdp.png'] + filename = os.path.basename(path) + return filename.endswith('.png') and filename not in exclude + + +def _get_caption(filename): + caption = [] + stat = _get_stat(filename) + if stat is not None and stat != "other": + caption.append(stat) + season = _get_season(filename) + if season is not None: + caption.append(season) + long_name = _get_long_name(filename) + if long_name is not None: + caption.append(long_name) + mode = _get_mode(filename) + if mode is not None: + caption.append(mode) + return " ".join(caption) + + +def _get_plot_type(filename): + plot_type = { + 'timeseries': "times", + 'mean': "other", + 'stddev': "other", + 'trends': "other", + 'eight_yr_runtrend': "other", + 'sixteen_yr_runtrend': "other", + 'fourteen_yr_runtrend': "other", + 'twelve_yr_runtrend': "other", + 'ten_yr_runtrend': "other", + 'powspec': "other", + 'reg': "other", + 'hov': "other", + 'monstddev': "other", + 'runstddev': "other", + 'za': "zonal", + } + ans = _get_info(filename, plot_type) + return ans if ans is not None else 'other' + + +def _get_stat(filename): + stat = { + 'timeseries': "other", + 'mean': "mean", + 'stddev': "stddev", + 'trends': "trend", + 'eight_yr_runtrend': "trend", + 'sixteen_yr_runtrend': "trend", + 'fourteen_yr_runtrend': "trend", + 'twelve_yr_runtrend': "trend", + 'ten_yr_runtrend': "trend", + 'powspec': "spectrum", + 'reg': "other", + 'hov': "other", + 'monstddev': "stddev", + 'runstddev': "stddev", + 'za': "mean", + } + ans = _get_info(filename, stat) + return ans if ans is not None else 'other' + + +def _get_season(filename): + season = { + 'ann': "Annual", + 'djf': "DJF", + 'mam': "MAM", + 'jja': "JJA", + 'son': "SON", + } + return _get_info(filename, season) + + +def _get_long_name(filename): + variable = { + 'pr': "Precipitation", + 'tas': "Surface temperature", + 'psl': "Sea level pressure", + 'sst': "Sea surface temperature", + } + return _get_info(filename, variable) + + +def _get_mode(filename): + mode = { + 'iod': "iod", + 'ipo': "ipo", + 'nam': "nam", + 'nao': "nao", + 'lanina': "La nina", + 'nino12': "El nino 12", + 'nino3': "El nino 3", + 'nino34': "El nino 34", + 'nino4': "El nino 4", + 'npi': "npi", + 'npo': "npo", + 'pdo': "pdo", + 'pna': "pna", + 'psa1': "psa1", + 'psa2': "psa2", + 'sam': "sam", + 'socn': "socn", + 'tio': "tio", + 'tna': "tna", + 'tsa': "tsa", + } + return _get_info(filename, mode) + + +def _get_info(filename, dictionary): + intersection = list( + set(os.path.basename(filename).split('.')).intersection( + dictionary.keys())) + if len(intersection) != 1: + return None + return dictionary[intersection[0]] + + +def _get_global_ancestors(cfg): + input_data = cfg['input_data'].values() + grouped_selection = group_metadata(input_data, 'dataset') + ancestor = [] + for _, attributes in grouped_selection.items(): + ancestor += [item['filename'] for item in attributes] + return ancestor + + +def set_provenance(cfg): + """Add provenance to all image files that the cvdp package creates.""" + def _get_provenance_record(filename, ancestors): + return { + 'caption': _get_caption(filename), + 'statistics': [_get_stat(filename)], + 'domain': 'global', + 'plot_type': _get_plot_type(filename), + 'authors': [ + 'phillips_adam', + ], + 'references': [ + 'acknow_project', + 'phillips14eos', + ], + 'ancestors': ancestors, + } + + ancestors = _get_global_ancestors(cfg) + logger.info("Path to work_dir: %s", cfg['work_dir']) + with ProvenanceLogger(cfg) as provenance_logger: + for root, _, files in os.walk(cfg['work_dir']): + for datei in files: + path = os.path.join(root, datei) + if _is_png(path): + logger.info("Name of file: %s", path) + provenance_record = _get_provenance_record(path, ancestors) + logger.info("Recording provenance of %s:\n%s", path, + provenance_record) + provenance_logger.log(path, provenance_record) + + +def _execute_cvdp(cfg): + subprocess.check_call(["ncl", "driver.ncl"], + cwd=os.path.join(cfg['run_dir'])) + + def main(cfg): """Set and execute the cvdp package.""" + cfg['lnk_dir'] = os.path.join(cfg['run_dir'], "links") setup_driver(cfg) setup_namelist(cfg) - subprocess.run(["ncl", "driver.ncl"], cwd=os.path.join(cfg['run_dir'])) + _execute_cvdp(cfg) + set_provenance(cfg) if __name__ == '__main__': diff --git a/esmvaltool/diag_scripts/deangelis15nat/deangelisf1b.py b/esmvaltool/diag_scripts/deangelis15nat/deangelisf1b.py new file mode 100644 index 0000000000..6d525ef34b --- /dev/null +++ b/esmvaltool/diag_scripts/deangelis15nat/deangelisf1b.py @@ -0,0 +1,245 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +""". + +Calculates radiative constraint on hydrologic cycle intensification +following DeAngelis et al. (2015). + + +############################################################################### +testkw/deangelis1b.py +Author: Katja Weigel (IUP, Uni Bremen, Germany) +EVal4CMIP project +############################################################################### + +Description +----------- + + Calculates radiative constraint on hydrologic cycle intensification + following DeAngelis et al. (2015). + Based on diag_scripts/climate_metrics/ecs.py by Manuel Schlund + +Configuration options +--------------------- + output_name : Name of the output files. + +############################################################################### + +""" + + +import logging +import os +from collections import OrderedDict +from pprint import pformat + +import iris +import matplotlib.pyplot as plt +import numpy as np + +import esmvaltool.diag_scripts.shared as e +import esmvaltool.diag_scripts.shared.names as n +from esmvaltool.diag_scripts.shared import (ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, group_metadata, + select_metadata) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def _get_sel_files_var(cfg, varnames): + """Get filenames from cfg for all model mean and differen variables.""" + selection = [] + + for var in varnames: + for hlp in select_metadata(cfg['input_data'].values(), short_name=var): + selection.append(hlp['filename']) + + return selection + + +def cube_to_save_vars(list_dict): + """Create cubes to prepare bar plot data for saving to netCDF.""" + # cubes = iris.cube.CubeList() + for iii, var in enumerate(list_dict["data"]): + if iii == 0: + cubes = iris.cube.CubeList([ + iris.cube.Cube(var, + var_name=list_dict["name"][iii]['var_name'], + long_name=list_dict["name"][iii]['long_name'], + units=list_dict["name"][iii]['units'])]) + else: + cubes.append( + iris.cube.Cube(var, + var_name=list_dict["name"][iii]['var_name'], + long_name=list_dict["name"][iii]['long_name'], + units=list_dict["name"][iii]['units'])) + + return cubes + + +def get_provenance_record(ancestor_files, caption, statistics, + domains, plot_type='bar'): + """Get Provenance record.""" + record = { + 'caption': caption, + 'statistics': statistics, + 'domains': domains, + 'plot_type': plot_type, + 'themes': ['phys'], + 'authors': [ + 'weigel_katja', + ], + 'references': [ + 'deangelis15nat', + ], + 'ancestors': ancestor_files, + } + return record + + +def plot_bar_deangelis(cfg, data_var_sum, available_exp, available_vars): + """Plot linear regression used to calculate ECS.""" + # Plot data + fig, axx = plt.subplots() + + set_colors = ['cornflowerblue', 'orange', 'silver', 'limegreen', + 'rosybrown', 'orchid'] + bar_width = 1.0 / float(len(available_vars)) + + for iii, iexp in enumerate(available_exp): + axx.bar(np.arange(len(available_vars)) + bar_width * float(iii), + data_var_sum[iexp], + bar_width, color=set_colors[iii], label=iexp) + + axx.set_xlabel(' ') + axx.set_ylabel(r'Model mean (W m$^{-2}$)') + axx.set_title(' ') + axx.set_xticks(np.arange(len(available_vars)) + bar_width) + axx.set_xticklabels(available_vars) + axx.legend(loc=1) + + fig.tight_layout() + plot_file = get_plot_filename('bar_all', cfg) + fig.savefig(plot_file, dpi=300) + plt.close() + + caption = 'Global average multi-model mean comparing different ' + \ + 'model experiments and flux variables.' + + provenance_record = get_provenance_record( + _get_sel_files_var(cfg, available_vars), caption, ['mean'], ['global']) + + diagnostic_file = get_diagnostic_filename('bar_all', cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + list_dict = {} + list_dict["data"] = [] + list_dict["name"] = [] + for iexp in available_exp: + list_dict["data"].append(data_var_sum[iexp]) + list_dict["name"].append({'var_name': iexp + '_all', + 'long_name': 'Fluxes for ' + iexp + + ' experiment', + 'units': 'W m-2'}) + + iris.save(cube_to_save_vars(list_dict), target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(plot_file, provenance_record) + provenance_logger.log(diagnostic_file, provenance_record) + + +############################################################################### +# Setup diagnostic +############################################################################### + +# Variables + + +def main(cfg): + """Run the diagnostic. + + Parameters : + + ---------- + cfg : dict + Configuration dictionary of the recipe. + + """ + ########################################################################### + # Read recipe data + ########################################################################### + + # Dataset data containers + data = e.Datasets(cfg) + logging.debug("Found datasets in recipe:\n%s", data) + + # Variables + # var = e.Variables(cfg) + available_vars = list(group_metadata(cfg['input_data'].values(), + 'short_name')) + logging.debug("Found variables in recipe:\n%s", available_vars) + + available_exp = list(group_metadata(cfg['input_data'].values(), 'exp')) + + if len(available_exp) > 6: + raise ValueError("The diagnostic can only plot up to 6 different " + + "model experiments.") + + ########################################################################### + # Read data + ########################################################################### + + # Create iris cube for each dataset and save annual means + for dataset_path in data: + cube = iris.load(dataset_path)[0] + # cube = iris.load(dataset_path, var.standard_names())[0] + cube = cube.collapsed('time', iris.analysis.MEAN) + + data.set_data(cube.data, dataset_path) + + ########################################################################### + # Process data + ########################################################################### + + data_var = OrderedDict() + for iexp in available_exp: + data_var[iexp] = OrderedDict() + for jvar in available_vars: + # data_var[iexp] = OrderedDict() + data_var[iexp][jvar] = 0.0 + + pathlist = data.get_path_list(short_name=available_vars[0], + exp=available_exp[0]) + + for dataset_path in pathlist: + + # Substract piControl experiment from abrupt4xCO2 experiment + dataset = data.get_info(n.DATASET, dataset_path) + + for jvar in available_vars: + for iexp in available_exp: + print(data_var[iexp]) + print((data_var[iexp].values())) + (data_var[iexp])[jvar] = (data_var[iexp])[jvar] + \ + data.get_data(short_name=jvar, exp=iexp, + dataset=dataset) + + data_var_sum = {} + for iexp in available_exp: + data_var_sum[iexp] = np.fromiter( + data_var[iexp].values(), dtype=np.float64) / float(len(pathlist)) + + # Plot ECS regression if desired + plot_bar_deangelis(cfg, data_var_sum, available_exp, available_vars) + + +if __name__ == '__main__': + with e.run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/deangelis15nat/deangelisf2ext.py b/esmvaltool/diag_scripts/deangelis15nat/deangelisf2ext.py new file mode 100644 index 0000000000..634e513dd5 --- /dev/null +++ b/esmvaltool/diag_scripts/deangelis15nat/deangelisf2ext.py @@ -0,0 +1,815 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +""". + +Calculates radiative constraint on hydrologic cycle intensification +following DeAngelis et al. (2015). + +############################################################################### +testkw/deangelis2.py +Author: Katja Weigel (IUP, Uni Bremen, Germany) +EVal4CMIP project +############################################################################### + +Description +----------- + Calculates radiative constraint on hydrologic cycle intensification + following DeAngelis et al. (2015). + Creates figure 2 and extended data figure 1 and figure 2 + Based on diag_scripts/climate_metrics/ecs.py by Manuel Schlund + +Configuration options +--------------------- + +############################################################################### + +""" + + +import logging +import os +from collections import OrderedDict +from pprint import pformat + +import iris +import iris.coord_categorisation as cat +import matplotlib.pyplot as plt +import matplotlib.transforms as mpltrans +import numpy as np +from scipy import stats + +import esmvaltool.diag_scripts.shared as e +import esmvaltool.diag_scripts.shared.names as n +from esmvaltool.diag_scripts.shared import (ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, group_metadata, + select_metadata) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def _set_list_dict1(sa_dict): + list_dict = {} + list_dict["data"] = [sa_dict["rsnstcsdt"], sa_dict["rsnstdt"]] + list_dict["name"] = [{'var_name': 'drsnstcs_divby_dtas', + 'long_name': 'Temperature mediated ' + + 'shortwave absorption for clear skye', + 'units': 'W m-2 K-1'}, + {'var_name': 'drsnst_divby_dtas', + 'long_name': 'Temperature mediated ' + + 'shortwave absorption for all skye', + 'units': 'W m-2 K-1'}] + return list_dict + + +def _set_list_dict2(sa_dict): + list_dict = {} + list_dict["data"] = [sa_dict["lvpdt"], sa_dict["rsnstcsdt"]] + list_dict["name"] = [{'var_name': 'dlvp_divby_dtas', + 'long_name': 'Temperature mediated latent heat ' + + 'release from precipitation', + 'units': 'W m-2 K-1'}, + {'var_name': 'drsnstcs_divby_dtas', + 'long_name': 'Temperature mediated ' + + 'shortwave absorption for clear skye', + 'units': 'W m-2 K-1'}] + + return list_dict + + +def _calculate_regression_sa(sa_dict): + """Regression between dlvp/dtas, drsnstcs/dtas, drsnst/dtas.""" + # Regression between LvdP/dtas and the clr-dSWA/dtas and all-dSWA/dtas + reg_dict = {} + reg_dict["sa"] = stats.linregress(sa_dict["rsnstcsdt"], sa_dict["lvpdt"]) + reg_dict["sa_all"] = stats.linregress(sa_dict["rsnstdt"], sa_dict["lvpdt"]) + reg_dict["y_sa"] = reg_dict["sa"].slope * np.linspace(0.2, 1.4, 2) + \ + reg_dict["sa"].intercept + + # Regression between clr-dSWA/dtas and all-dSWA/dtas + reg_dict["rsnst"] = stats.linregress(sa_dict["rsnstcsdt"], + sa_dict["rsnstdt"]) + reg_dict["y_rsnst"] = reg_dict["rsnst"].slope * \ + np.linspace(0.2, 1.4, 2) + reg_dict["rsnst"].intercept + + return reg_dict + + +def _set_axx_fig2a(cfg, axx, m_all, reg_dict, sa_dict): + """Text for fig2a.""" + text_sa = '{:.2f}'.format(reg_dict["sa"].rvalue) + text_sa_all = '{:.2f}'.format(reg_dict["sa_all"].rvalue) + + axx.plot(np.arange(len(m_all)) + 1, m_all, + linestyle='none', marker='x', + markersize=25, markeredgewidth=4.0, markerfacecolor='r', + markeredgecolor='r', label='Model mean') + axx.plot(np.arange(len(m_all)) + 1, m_all, linestyle='none', marker='x', + markersize=25, markeredgewidth=4.0, markeredgecolor='r') + + if not cfg[n.OUTPUT_FILE_TYPE] == 'eps': + axx.plot(np.ones((len(sa_dict["lvpdt"]))), sa_dict["lvpdt"], + linestyle='none', marker='o', + markersize=15, markeredgewidth=1.0, fillstyle='none', + markeredgecolor='b', label='Individual models') + axx.plot(np.ones((len(sa_dict["lvpdt"]))) + 1, sa_dict["rsnstdt"], + linestyle='none', marker='o', + markersize=15, markeredgewidth=1.0, fillstyle='none', + markeredgecolor='b') + axx.plot(np.ones((len(sa_dict["lvpdt"]))) + 2, sa_dict["rsnstcsdt"], + linestyle='none', + marker='o', markersize=15, markeredgewidth=1.0, + fillstyle='none', markeredgecolor='b') + else: + axx.plot(np.ones((len(sa_dict["lvpdt"]))), sa_dict["lvpdt"], + linestyle='none', marker='o', + markersize=15, markeredgewidth=1.0, + markerfacecolor='w', markeredgecolor='b', + label='Individual models') + axx.plot(np.ones((len(sa_dict["lvpdt"]))) + 1, sa_dict["rsnstdt"], + linestyle='none', marker='o', + markersize=15, markeredgewidth=1.0, + markerfacecolor='w', markeredgecolor='b') + axx.plot(np.ones((len(sa_dict["lvpdt"]))) + 2, sa_dict["rsnstcsdt"], + linestyle='none', + marker='o', markersize=15, markeredgewidth=1.0, + markerfacecolor='w', markeredgecolor='b') + + axx.set_xlabel(' ') + axx.set_title(' ') + axx.set_ylabel(r'Temperature-mediated response (W m$^{-2}$ K$^{-1}$)') + axx.set_xlim([0.5, 3.5]) + axx.set_xticks(np.linspace(1.0, 3.0, 3)) + axx.set_xticklabels(("dlvp/dtas", "drsnst/dtas", "rsnstcs/dtas"), + rotation=45, ha='right', rotation_mode='anchor') + axx.set_ylim([0, 3.0]) + axx.set_yticks(np.linspace(0.5, 2.5, 5)) + axx.text(1.9, 0.2, text_sa) + axx.text(2.9, 0.2, text_sa_all) + axx.legend(loc=2) + + return axx + + +def _set_axx_fig2b(axx, cfg, reg_dict, datasets, sa_dict): + """Text for fig2b.""" + axx.plot(np.linspace(0.2, 1.4, 2), reg_dict["y_sa"], color='r') + + for iii, model in enumerate(datasets): + proj = (select_metadata(cfg['input_data'].values(), + dataset=model))[0]['project'] + style = e.plot.get_dataset_style(model, style_file=proj.lower()) + axx.plot( + sa_dict["rsnstcsdt"][iii], + sa_dict["lvpdt"][iii], + marker=style['mark'], + color=style['color'], + markerfacecolor=style['facecolor'], + linestyle='none', + markersize=10, + markeredgewidth=2.0, + label=model) + + axx.set_xlabel(r'drsnstcs/dtas (W m$^{-2}$ K$^{-1}$)') + axx.set_title(' ') + axx.set_ylabel(r'dlvp/dtas (W m$^{-2}$ K$^{-1}$)') + axx.set_xlim([0.3, 1.35]) + axx.set_xticks(np.linspace(0.4, 1.2, 5)) + axx.set_ylim([1.75, 2.8]) + axx.set_yticks(np.linspace(1.8, 2.8, 6)) + axx.text(0.9, 2.75, 'Fit (r={:.2f}, '.format(reg_dict["sa"].rvalue) + + ' slope = {:.2f}, '.format(reg_dict["sa"].slope) + + ')') + axx.legend(loc=3) + return axx + + +def _set_text_exfig2a(axx, text_dict): + """Text for exfig2a.""" + axx.set_xlabel(' ') + axx.set_title(' ') + axx.set_ylabel(r'Temperature-mediated response (W m$^{-2}$ K$^{-1}$)') + axx.set_xlim([0.5, 6.5]) + axx.set_xticks(np.linspace(1.0, 6.0, 6)) + axx.set_xticklabels(("dlvp/dtas", "drlnst/dtas", "drsnst/dtas", + "dhfss/dtas", "drlnstcs/dtas", "drsnstcs/dtas"), + rotation=45, ha='right', rotation_mode='anchor') + axx.set_ylim([-1.5, 4.5]) + axx.set_yticks(np.linspace(-1.0, 4.0, 11)) + axx.vlines([1.5, 4.5], -2, 5, colors='k', linestyle='solid') + axx.hlines(0, 0, 7, colors='k', linestyle='dashed') + axx.text(1.9, 0.2, text_dict["rlnstdt"]) + axx.text(2.9, 0.2, text_dict["rsnstdt"]) + axx.text(3.9, 0.2, text_dict["hfssdt"]) + axx.text(4.9, 0.2, text_dict["rlnstcsdt"]) + axx.text(5.9, 0.2, text_dict["rsnstcsdt"]) + axx.legend(loc=2) + + return axx + + +def _set_axx_exfig2b(axx, cfg, datasets, reg_dict, sa_dict): + """Text for exfig2b.""" + axx.plot(np.linspace(0.2, 1.4, 2), reg_dict["y_rsnst"], color='r') + + for iii, model in enumerate(datasets): + proj = (select_metadata(cfg['input_data'].values(), + dataset=model))[0]['project'] + style = e.plot.get_dataset_style(model, style_file=proj.lower()) + axx.plot( + sa_dict["rsnstcsdt"][iii], + sa_dict["rsnstdt"][iii], + marker=style['mark'], + color=style['color'], + markerfacecolor=style['facecolor'], + linestyle='none', + markersize=10, + markeredgewidth=2.0, + label=model) + + axx.set_xlabel(r'drsnstcs/dtas (W m$^{-2}$ K$^{-1}$)') + axx.set_title(' ') + axx.set_ylabel(r'drsnst/dtas (W m$^{-2}$ K$^{-1}$)') + axx.set_xlim([0.45, 1.15]) + axx.set_xticks(np.linspace(0.5, 1.1, 7)) + axx.set_ylim([0.45, 1.15]) + axx.set_yticks(np.linspace(0.5, 1.1, 7)) + axx.text(0.85, 1.1, 'Fit (r={:.2f}, '.format(reg_dict["rsnst"].rvalue) + + ' slope = {:.2f}, '.format(reg_dict["rsnst"].slope) + + ')') + axx.legend(loc=2) + + return axx + + +def _get_sel_files_var(cfg, varnames): + """Get filenames from cfg for all model mean and differen variables.""" + selection = [] + + for var in varnames: + for hlp in select_metadata(cfg['input_data'].values(), short_name=var): + selection.append(hlp['filename']) + + return selection + + +def cube_to_save_matrix(var1, name): + """Create cubes to prepare scatter plot data for saving to netCDF.""" + cubes = iris.cube.CubeList([iris.cube.Cube(var1, + var_name=name['var_name'], + long_name=name['long_name'], + units=name['units'])]) + + return cubes + + +def cube_to_save_vars(list_dict): + """Create cubes to prepare bar plot data for saving to netCDF.""" + # cubes = iris.cube.CubeList() + for iii, var in enumerate(list_dict["data"]): + if iii == 0: + cubes = iris.cube.CubeList([ + iris.cube.Cube(var, + var_name=list_dict["name"][iii]['var_name'], + long_name=list_dict["name"][iii]['long_name'], + units=list_dict["name"][iii]['units'])]) + else: + cubes.append( + iris.cube.Cube(var, + var_name=list_dict["name"][iii]['var_name'], + long_name=list_dict["name"][iii]['long_name'], + units=list_dict["name"][iii]['units'])) + + return cubes + + +def get_provenance_record(ancestor_files, caption, statistics, + plot_type='scatter'): + """Get Provenance record.""" + record = { + 'caption': caption, + 'statistics': statistics, + 'domains': ['global'], + 'plot_type': plot_type, + 'themes': ['phys'], + 'authors': [ + 'weigel_katja', + ], + 'references': [ + 'deangelis15nat', + ], + 'ancestors': ancestor_files, + } + return record + + +def plot_slope_regression(cfg, data_dict): + """Scatter plot of linear regression slope, some variables (fig2a).""" + sa_dict = {} + sa_dict["lvpdt"] = data_dict['regressions'][:, 3] + sa_dict["rsnstdt"] = data_dict['regressions'][:, 1] + sa_dict["rsnstcsdt"] = data_dict['regressions'][:, 5] + datasets = data_dict['datasets'] + + m_all = np.array([np.mean(sa_dict["lvpdt"]), np.mean(sa_dict["rsnstdt"]), + np.mean(sa_dict["rsnstcsdt"])]) + + reg_dict = _calculate_regression_sa(sa_dict) + + # plt.style.use('/work/bd0854/b380216/esmvaltool/v2private/' + + # 'ESMValTool-private/esmvaltool/diag_scripts/testkw/' + + # 'style_kw_deangelis2.mplstyle') + + fig, axx = plt.subplots(figsize=(7, 7)) + + axx = _set_axx_fig2a(cfg, axx, m_all, reg_dict, sa_dict) + + fig.tight_layout() + fig.savefig(get_plot_filename('fig2a', cfg), dpi=300) + plt.close() + + caption = 'The temperature-mediated response of each atmospheric ' + \ + 'energy budget term for each model as blue circles and ' + \ + 'the model mean as a red cross. The numbers above the ' + \ + 'abscissa are the cross-model correlations between ' + \ + 'dlvp/dtas and each other temperature-mediated response.' + + provenance_record = get_provenance_record( + _get_sel_files_var(cfg, ['lvp', 'rsnst', 'rsnstcs', 'tas']), + caption, ['corr', 'mean']) + + diagnostic_file = get_diagnostic_filename('fig2a', cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + list_dict = {} + list_dict["data"] = [sa_dict["lvpdt"], sa_dict["rsnstdt"], + sa_dict["rsnstcsdt"]] + list_dict["name"] = [{'var_name': 'dlvp_divby_dtas', + 'long_name': 'Temperature mediated latent heat ' + + 'release from precipitation', + 'units': 'W m-2 K-1'}, + {'var_name': 'drsnst_divby_dtas', + 'long_name': 'Temperature mediated ' + + 'shortwave absorption', + 'units': 'W m-2 K-1'}, + {'var_name': 'drsnstcs_divby_dtas', + 'long_name': 'Temperature mediated ' + + 'shortwave absorption for clear skye', + 'units': 'W m-2 K-1'}] + + iris.save(cube_to_save_vars(list_dict), target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(diagnostic_file, provenance_record) + provenance_logger.log(get_plot_filename('fig2a', cfg), + provenance_record) + + fig, axx = plt.subplots(figsize=(7, 7)) + + axx = _set_axx_fig2b(axx, cfg, reg_dict, datasets, sa_dict) + + fig.tight_layout() + fig.savefig(get_plot_filename('fig2b', cfg), dpi=300) + plt.close() + + caption = 'Scatterplot of dlvp/dtas versus drsnstcs/dtas with ' + \ + 'corresponding least-squares linear fit (red line).' + + provenance_record = get_provenance_record( + _get_sel_files_var(cfg, ['lvp', 'rsnstcs', 'tas']), + caption, ['corr']) + + diagnostic_file = get_diagnostic_filename('fig2b', cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + list_dict = _set_list_dict2(sa_dict) + + iris.save(cube_to_save_vars(list_dict), target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + + fig, axx = plt.subplots(figsize=(7, 7)) + + axx = _set_axx_exfig2b(axx, cfg, datasets, reg_dict, sa_dict) + + fig.tight_layout() + fig.savefig(get_plot_filename('exfig2b', cfg), dpi=300) + plt.close() + + caption = 'Scatterplot of drsnstcs/dtas versus drsnst/dtas with ' + \ + 'corresponding least-squares linear fit (red line).' + + provenance_record = get_provenance_record( + _get_sel_files_var(cfg, ['rsnstcs', 'rsnst', 'tas']), + caption, ['corr']) + + diagnostic_file = get_diagnostic_filename('exfig2b', cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + list_dict = _set_list_dict1(sa_dict) + + iris.save(cube_to_save_vars(list_dict), target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + + +def plot_slope_regression_all(cfg, data_dict, available_vars): + """Scatter plot of linear regression slope, all variables (exfig2a).""" + data_model = data_dict['regressions'] + m_all = np.array([np.mean(data_model[:, 3]), np.mean(data_model[:, 0]), + np.mean(data_model[:, 1]), np.mean(data_model[:, 2]), + np.mean(data_model[:, 4]), np.mean(data_model[:, 5])]) + + reg_dict = {} + reg_dict["rsnstcsdt"] = stats.linregress(data_model[:, 5], + data_model[:, 3]) + reg_dict["rsnstdt"] = stats.linregress(data_model[:, 1], data_model[:, 3]) + reg_dict["rlnstcsdt"] = stats.linregress(data_model[:, 4], + data_model[:, 3]) + reg_dict["rlnstdt"] = stats.linregress(data_model[:, 0], data_model[:, 3]) + reg_dict["hfssdt"] = stats.linregress(data_model[:, 2], data_model[:, 3]) + + text_dict = {} + text_dict["rsnstcsdt"] = '{:.2f}'.format(reg_dict["rsnstcsdt"].rvalue) + text_dict["rsnstdt"] = '{:.2f}'.format(reg_dict["rsnstdt"].rvalue) + text_dict["rlnstcsdt"] = '{:.2f}'.format(reg_dict["rlnstcsdt"].rvalue) + text_dict["rlnstdt"] = '{:.2f}'.format(reg_dict["rlnstdt"].rvalue) + text_dict["hfssdt"] = '{:.2f}'.format(reg_dict["hfssdt"].rvalue) + + fig, axx = plt.subplots(figsize=(7, 7)) + axx.plot(np.arange(len(m_all)) + 1, m_all, + linestyle='none', marker='x', + markersize=25, markeredgewidth=4.0, markerfacecolor='r', + markeredgecolor='r', label='Model mean') + if not cfg[n.OUTPUT_FILE_TYPE] == 'eps': + axx.plot(np.ones((len(data_model[:, 2]))), data_model[:, 3], + linestyle='none', marker='o', + markersize=15, markeredgewidth=1.0, fillstyle='none', + markeredgecolor='b', label='Individual models') + axx.plot(np.ones((len(data_model[:, 2]))) + 1, data_model[:, 0], + linestyle='none', marker='o', + markersize=15, markeredgewidth=1.0, fillstyle='none', + markeredgecolor='b') + axx.plot(np.ones((len(data_model[:, 2]))) + 2, data_model[:, 1], + linestyle='none', marker='o', + markersize=15, markeredgewidth=1.0, fillstyle='none', + markeredgecolor='b') + axx.plot(np.ones((len(data_model[:, 2]))) + 3, data_model[:, 2], + linestyle='none', marker='o', + markersize=15, markeredgewidth=1.0, fillstyle='none', + markeredgecolor='b') + axx.plot(np.ones((len(data_model[:, 2]))) + 4, data_model[:, 4], + linestyle='none', marker='o', + markersize=15, markeredgewidth=1.0, fillstyle='none', + markeredgecolor='b') + axx.plot(np.ones((len(data_model[:, 2]))) + 5, data_model[:, 5], + linestyle='none', marker='o', + markersize=15, markeredgewidth=1.0, fillstyle='none', + markeredgecolor='b') + else: + axx.plot(np.ones((len(data_model[:, 2]))), data_model[:, 2], + linestyle='none', marker='o', + markersize=15, markeredgewidth=1.0, markerfacecolor='w', + markeredgecolor='b', label='Individual models') + axx.plot(np.ones((len(data_model[:, 2]))) + 1, data_model[:, 0], + linestyle='none', marker='o', + markersize=15, markeredgewidth=1.0, markerfacecolor='w', + markeredgecolor='b') + axx.plot(np.ones((len(data_model[:, 2]))) + 2, data_model[:, 1], + linestyle='none', marker='o', + markersize=15, markeredgewidth=1.0, markerfacecolor='w', + markeredgecolor='b') + axx.plot(np.ones((len(data_model[:, 2]))) + 3, data_model[:, 2], + linestyle='none', marker='o', + markersize=15, markeredgewidth=1.0, markerfacecolor='w', + markeredgecolor='b') + axx.plot(np.ones((len(data_model[:, 2]))) + 4, data_model[:, 4], + linestyle='none', marker='o', + markersize=15, markeredgewidth=1.0, markerfacecolor='w', + markeredgecolor='b') + axx.plot(np.ones((len(data_model[:, 2]))) + 5, data_model[:, 5], + acecolor='w', + markeredgecolor='b') + + axx.plot(np.arange(len(m_all)) + 1, m_all, linestyle='none', marker='x', + markersize=25, markeredgewidth=4.0, markerfacecolor='r', + markeredgecolor='r') + + axx = _set_text_exfig2a(axx, text_dict) + + fig.tight_layout() + fig.savefig(get_plot_filename('exfig2a', cfg), dpi=300) + plt.close() + + caption = 'The temperature-mediated response of each atmospheric ' + \ + 'energy budget term for each model as blue circles and ' + \ + 'the model mean as a red cross. The numbers above the ' + \ + 'abscissa are the cross-model correlations between ' + \ + 'dlvp/dtas and each other temperature-mediated response.' + + provenance_record = get_provenance_record( + _get_sel_files_var(cfg, available_vars), caption, ['mean']) + + diagnostic_file = get_diagnostic_filename('exfig2a', cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + iris.save(cube_to_save_matrix(data_model, {'var_name': 'all', + 'long_name': 'dlvp, ' + + 'drlnst, ' + + 'drsnst, ' + + 'dhfss, ' + + 'drlnstcs, and,' + + 'drsnstcs ' + + 'divided by ' + + 'dtas', + 'units': 'W m-2 K-1'}), + target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(diagnostic_file, provenance_record) + provenance_logger.log(get_plot_filename('exfig2a', cfg), + provenance_record) + + +def plot_rlnst_regression(cfg, dataset_name, data, variables, regs): + """Plot linear regression used to calculate ECS.""" + filepath = get_plot_filename(dataset_name, cfg) + + # Regression line + # x_reg = np.linspace(0.0, 7.0, 2) + yreg_dict = {} + yreg_dict["rlnst"] = regs["rlnst"].slope * np.linspace(0.0, 7.0, 2) + \ + regs["rlnst"].intercept + yreg_dict["rsnst"] = regs["rsnst"].slope * np.linspace(0.0, 7.0, 2) + \ + regs["rsnst"].intercept + yreg_dict["hfss"] = regs["hfss"].slope * np.linspace(0.0, 7.0, 2) + \ + regs["hfss"].intercept + yreg_dict["lvp"] = regs["lvp"].slope * np.linspace(0.0, 7.0, 2) + \ + regs["lvp"].intercept + + yreg_dict["lab_lvp"] = 'dlvp/dtas = {:.2f}, '.format(regs["lvp"].slope) + \ + 'y−int = {:.2f}, '.format(regs["lvp"].intercept) + \ + 'r = {:.2f}'.format(regs["lvp"].rvalue) + yreg_dict["lab_rlnst"] = 'drlnst' + \ + '/dtas = {:.2f}, '.format(regs["rlnst"].slope) + \ + 'y−int = {:.2f}, '.format(regs["rlnst"].intercept) + \ + 'r = {:.2f}'.format(regs["rlnst"].rvalue) + yreg_dict["lab_rsnst"] = 'drsnst' + \ + '/dtas = {:.2f}, '.format(regs["rsnst"].slope) + \ + 'y−int = {:.2f}, '.format(regs["rsnst"].intercept) + \ + 'r = {:.2f}'.format(regs["rsnst"].rvalue) + yreg_dict["lab_hfss"] = 'dhfss' + \ + '/dtas = {:.2f}, '.format(regs["hfss"].slope) + \ + 'y−int = {:.2f}, '.format(regs["hfss"].intercept) + \ + 'r = {:.2f}'.format(regs["hfss"].rvalue) + + axhline_dict = {'y': 0, 'linestyle': 'dashed', 'color': 'k', + 'linewidth': 2.0} + + e.plot.scatterplot( + [data["tas"], np.linspace(0.0, 7.0, 2), data["tas"], + np.linspace(0.0, 7.0, 2), + data["tas"], np.linspace(0.0, 7.0, 2), data["tas"], + np.linspace(0.0, 7.0, 2)], + [data["lvp"], yreg_dict["lvp"], data["rlnst"], yreg_dict["rlnst"], + data["rsnst"], yreg_dict["rsnst"], data["hfss"], yreg_dict["hfss"]], + filepath, + plot_kwargs=[{'linestyle': 'none', + 'marker': 'o', + 'markerfacecolor': 'g', + 'markeredgecolor': 'g', + 'label': yreg_dict["lab_lvp"]}, + {'color': 'g', + 'linestyle': '-'}, + {'linestyle': 'none', + 'marker': '^', + 'markerfacecolor': 'b', + 'markeredgecolor': 'b', + 'label': yreg_dict["lab_rsnst"]}, + {'color': 'b', + 'linestyle': '-'}, + {'linestyle': 'none', + 'marker': 's', + 'markerfacecolor': 'r', + 'markeredgecolor': 'r', + 'label': yreg_dict["lab_rlnst"]}, + {'color': 'r', + 'linestyle': '-'}, + {'linestyle': 'none', + 'marker': '*', + 'markerfacecolor': 'tab:gray', + 'markeredgecolor': 'tab:gray', + 'label': yreg_dict["lab_hfss"]}, + {'color': 'tab:gray', + 'linestyle': '-'}], + save_kwargs={'bbox_inches': mpltrans.Bbox.from_extents(0, -1, 6.5, 6)}, + axes_functions={'set_title': dataset_name, + 'set_xlabel': '2−m temperature (tas)' + + 'global−mean annual anomaly (' + + variables.units('tas') + ')', + 'set_ylabel': r'Energy budget term global - ' + + 'mean annual anomalies (W m$^{-2}$)', + 'set_xlim': [0, 7.0], + 'set_ylim': [-5.0, 17.0], + 'set_yticks': np.linspace(-4, 16, 11), + 'axhline': axhline_dict, + 'legend': {'loc': 2}}) + + caption = ' Demonstration of the Gregory method for ' + dataset_name + \ + '. Global-mean annual anomalies (' + ABRUPT4XCO2 + '—' + \ + PICONTROL + ' in atmospheric energy budget terms ' + \ + '(latent heat release from precipitation (lvp), ' + \ + 'net longwave cooling (rlnst), shortwave absorption ' + \ + '(rsnst), and sensible heating (hfss)) are regressed ' + \ + 'against those in 2-m air temperature. For lvp, ' + \ + 'precipitation anomalies are multiplied by the latent ' + \ + 'heat of vaporization. Radiative terms are computed with ' + \ + 'all-sky fluxes. The statistics of the linear regression ' + \ + '(slope, y-intercept, and correlation coefficient, r) ' + \ + 'are displayed in the key.' + + provenance_record = get_provenance_record( + _get_sel_files_var(cfg, ['lvp', 'rlnst', 'rsnst', 'hfss', 'tas']), + caption, ['corr']) + + diagnostic_file = get_diagnostic_filename(dataset_name, cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + list_dict = {} + list_dict["data"] = [data["tas"], data["lvp"], data["rlnst"], + data["rsnst"], data["hfss"]] + list_dict["name"] = [{'var_name': 'tas', + 'long_name': '2-m air temperature', + 'units': 'K'}, + {'var_name': 'lvp', + 'long_name': 'Latent heat release ' + + 'from precipitation', + 'units': 'W m-2'}, + {'var_name': 'rlnst', + 'long_name': 'Net longwave cooling', + 'units': 'W m-2'}, + {'var_name': 'rsnst', + 'long_name': 'Shortwave absorption', + 'units': 'W m-2'}, + {'var_name': 'hfss', + 'long_name': 'Sensible heating', + 'units': 'W m-2'}] + + iris.save(cube_to_save_vars(list_dict), target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(diagnostic_file, provenance_record) + provenance_logger.log(filepath, provenance_record) + + +def substract_and_reg_deangelis2(cfg, data, var): + """Substract piControl from abrupt4xCO2 for all models and variables.""" + pathlist = data.get_path_list(short_name='tas', exp=PICONTROL) + regressions = np.zeros((len(pathlist), 6)) + datasets = [] + + data_var = OrderedDict() + reg_var = OrderedDict() + varvar = var.short_names() + + for iii, dataset_path in enumerate(pathlist): + + # Substract piControl experiment from abrupt4xCO2 experiment + dataset = data.get_info(n.DATASET, dataset_path) + datasets.append(dataset) + + for jvar in varvar: + data_var[jvar] = data.get_data(short_name=jvar, exp=ABRUPT4XCO2, + dataset=dataset) - \ + data.get_data(short_name=jvar, exp=PICONTROL, + dataset=dataset) + + # Perform linear regression + for jvar in varvar: + if jvar != 'tas': + reg_var[jvar] = stats.linregress(data_var["tas"], + data_var[jvar]) + + # Plot ECS regression if desired + plot_rlnst_regression(cfg, dataset, data_var, var, reg_var) + + # Save data + regressions[iii] = [reg_var["rlnst"].slope, reg_var["rsnst"].slope, + reg_var["hfss"].slope, reg_var["lvp"].slope, + reg_var["rlnstcs"].slope, + reg_var["rsnstcs"].slope] + + return dict([('regressions', regressions), ('datasets', datasets)]) + + +############################################################################### +# Setup diagnostic +############################################################################### + +# Variables +# ECS = e.Variable('ecs', + +# Experiments +PICONTROL = 'piControl' +ABRUPT4XCO2 = 'abrupt4xCO2' + + +def main(cfg): + """Run the diagnostic. + + Parameters : + ---------- + cfg : dict + Configuration dictionary of the recipe. + + """ + ########################################################################### + # Read recipe data + ########################################################################### + + # Dataset data containers + data = e.Datasets(cfg) + logging.debug("Found datasets in recipe:\n%s", data) + + # Variables + var = e.Variables(cfg) + # logging.debug("Found variables in recipe:\n%s", var) + + available_vars = list(group_metadata(cfg['input_data'].values(), + 'short_name')) + logging.debug("Found variables in recipe:\n%s", available_vars) + + available_exp = list(group_metadata(cfg['input_data'].values(), 'exp')) + + # Check for available variables + required_vars = ('tas', 'lvp', 'rlnst', 'rsnst', 'rlnstcs', + 'rsnstcs', 'hfss') + if not e.variables_available(cfg, required_vars): + raise ValueError("This diagnostic needs {required_vars} variables") + + # Check for experiments + if 'abrupt-4xCO2' not in available_exp: + if 'abrupt4xCO2' not in available_exp: + raise ValueError("The diagnostic needs an experiment with " + + "4 times CO2.") + + if 'piControl' not in available_exp: + raise ValueError("The diagnostic needs a pre industrial control " + + "experiment.") + + ########################################################################### + # Read data + ########################################################################### + + # Create iris cube for each dataset and save annual means + for dataset_path in data: + cube = iris.load(dataset_path)[0] + cat.add_year(cube, 'time', name='year') + cube = cube.aggregated_by('year', iris.analysis.MEAN) + experiment = data.get_info(n.EXP, dataset_path) + if experiment == PICONTROL: + # DeAngelis use a 21 year running mean on piControl but the + # full extend of 150 years abrupt4xCO2. I could not find out, + # how they tread the edges, currently I just skip the mean for + # the edges. This is not exacly the same as done in the paper, + # small differences remain in extended data Fig 1, + # but closer than other methods I + # tried, e.g. skipping the edges. + # For most data sets it would be also possible to + # extend the piControl for 20 years, but then it would + # not be centered means of piControl for each year of + # abrupt4xCO2 any more. + cube_new = cube.rolling_window('time', iris.analysis.MEAN, 21) + endm10 = len(cube.coord('time').points) - 10 + cube.data[10:endm10] = cube_new.data + + data.set_data(cube.data, dataset_path) + + ########################################################################### + # Process data + ########################################################################### + + data_dict = substract_and_reg_deangelis2(cfg, data, var) + + plot_slope_regression(cfg, data_dict) + plot_slope_regression_all(cfg, data_dict, available_vars) + + +if __name__ == '__main__': + with e.run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/deangelis15nat/deangelisf3f4.py b/esmvaltool/diag_scripts/deangelis15nat/deangelisf3f4.py new file mode 100644 index 0000000000..a8014d20aa --- /dev/null +++ b/esmvaltool/diag_scripts/deangelis15nat/deangelisf3f4.py @@ -0,0 +1,711 @@ +""". + +Calculates radiative constraint on hydrologic cycle intensification +following DeAngelis et al. (2015). + +############################################################################### +testkw/deangelis3.py +Author: Katja Weigel (IUP, Uni Bremen, Germany) +EVal4CMIP project +############################################################################### + + +Description + +----------- + Calculates radiative constraint on hydrologic cycle intensification + following DeAngelis et al. (2015). + Creates figure 3b (with or without observations) + Based on diag_scripts/climate_metrics/ecs.py by Manuel Schlund + +Configuration options +--------------------- + +############################################################################### + +""" + + +import logging +import os +from collections import OrderedDict +from pprint import pformat + +import iris +import iris.coord_categorisation as cat +import matplotlib.pyplot as plt +import numpy as np +import scipy.signal as scisi +from scipy import stats + +from esmvaltool.diag_scripts.shared import (ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, group_metadata, + plot, run_diagnostic, + select_metadata, + variables_available) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def main(cfg): + """Run the diagnostic. + + Parameters : + ---------- + cfg : dict + Configuration dictionary of the recipe. + + """ + ########################################################################### + # Read recipe data + ########################################################################### + # Get Input data + input_data = (cfg['input_data'].values()) + required_vars = ('tas', 'rsnstcs', 'rsnstcsnorm', 'prw') + + if not variables_available(cfg, required_vars): + raise ValueError("This diagnostic needs {required_vars} variables") + + available_exp = list(group_metadata(input_data, 'exp')) + + if 'abrupt-4xCO2' not in available_exp: + if 'abrupt4xCO2' not in available_exp: + raise ValueError("The diagnostic needs an experiment with " + + "4 times CO2.") + + if 'piControl' not in available_exp: + raise ValueError("The diagnostic needs a pre industrial control " + + "experiment.") + + ########################################################################### + # Read data + ########################################################################### + + # Create iris cube for each dataset and save annual means + cubes = reform_data_iris_deangelis3b4(input_data) + + meas_tub_rsnstcsnorm = [] + meas_tub_prw = [] + for ctub in cubes: + if ctub[2] == 'nomodel': + if ctub[1] == 'prw': + meas_tub_prw.append(ctub) + if ctub[1] == 'rsnstcsnorm': + meas_tub_rsnstcsnorm.append(ctub) + + if len(meas_tub_rsnstcsnorm) > 1: + raise ValueError( + "This diagnostic expects one (or no) observational " + "dataset for rsnstcsnorm" + ) + + ########################################################################### + # Process data + ########################################################################### + + [grid_pw, reg_prw_obs] = set_grid_pw_reg_obs(cubes, meas_tub_rsnstcsnorm, + meas_tub_prw) + + data_model = substract_and_reg_deangelis(cfg, cubes, grid_pw, + reg_prw_obs) + + plot_deangelis_fig3b4(cfg, data_model, reg_prw_obs) + + +def _get_sel_files_var(cfg, varnames): + """Get filenames from cfg for all model mean and differen variables.""" + selection = [] + + for var in varnames: + for hlp in select_metadata(cfg['input_data'].values(), short_name=var): + selection.append(hlp['filename']) + + return selection + + +def set_grid_pw_reg_obs(cubes, meas_tub_rsnstcsnorm, meas_tub_prw): + """Set prw grid and calculate regression for observational data.""" + # Observations (one rsnstcsnorm, X PRW data set, DeAngelis contains 3 + data_prw_obs = OrderedDict() + grid_pw = {} + grid_pw["x"] = np.arange(12.0, 59.0, 2, dtype=np.float64) + grid_pw["yobs"] = OrderedDict() + reg_prw_obs = OrderedDict() + + if np.min(np.array([len(meas_tub_rsnstcsnorm), len(meas_tub_prw)])) > 0: + data_rsnstcsnorm_obs = cubes[meas_tub_rsnstcsnorm[0]] + for kmeas_tub_prw in meas_tub_prw: + data_prw_obs[kmeas_tub_prw[0]] = \ + cubes[kmeas_tub_prw] + (grid_pw["yobs"])[kmeas_tub_prw[0]] = \ + make_grid_prw(grid_pw["x"], + data_prw_obs[kmeas_tub_prw[0]], + data_rsnstcsnorm_obs) + reg_prw_obs[kmeas_tub_prw[0]] = \ + stats.linregress(grid_pw["x"], + (grid_pw["yobs"])[kmeas_tub_prw[0]]) + else: + logger.info('No observations, only model data used') + + return [grid_pw, reg_prw_obs] + + +def cube_to_save_matrix(var1, name): + """Create cubes to prepare scatter plot data for saving to netCDF.""" + cubes = iris.cube.CubeList([iris.cube.Cube(var1, + var_name=name['var_name'], + long_name=name['long_name'], + units=name['units'])]) + + return cubes + + +def cube_to_save_vars(list_dict): + """Create cubes to prepare bar plot data for saving to netCDF.""" + # cubes = iris.cube.CubeList() + for iii, var in enumerate(list_dict["data"]): + if iii == 0: + cubes = iris.cube.CubeList([ + iris.cube.Cube(var, + var_name=list_dict["name"][iii]['var_name'], + long_name=list_dict["name"][iii]['long_name'], + units=list_dict["name"][iii]['units'])]) + else: + cubes.append( + iris.cube.Cube(var, + var_name=list_dict["name"][iii]['var_name'], + long_name=list_dict["name"][iii]['long_name'], + units=list_dict["name"][iii]['units'])) + + return cubes + + +def cube_to_save_scatter(var1, var2, names): + """Create cubes to prepare scatter plot data for saving to netCDF.""" + cubes = iris.cube.CubeList([iris.cube.Cube(var1, + var_name=names['var_name1'], + long_name=names['long_name1'], + units=names['units1'])]) + cubes.append(iris.cube.Cube(var2, var_name=names['var_name2'], + long_name=names['long_name2'], + units=names['units2'])) + + return cubes + + +def get_provenance_record(ancestor_files, caption, statistics, + domains, plot_type='other'): + """Get Provenance record.""" + record = { + 'caption': caption, + 'statistics': statistics, + 'domains': domains, + 'plot_type': plot_type, + 'themes': ['phys'], + 'authors': [ + 'weigel_katja', + ], + 'references': [ + 'deangelis15nat', + ], + 'ancestors': ancestor_files, + } + return record + + +def set_axx_deangelis4(axx, ylen, ytickstrs, x_obs, dx_obs): + """Axis settings for deangelis 4.""" + axx.set_xlabel(r'$\delta$rsnst / $\delta$prw (% kg$^{-1}$ m$^2$)') + axx.set_xlim([0.02, 0.13]) + axx.set_xticks(np.linspace(0.02, 0.12, 6)) + axx.set_ylim([-0.5, ylen + 0.5]) + axx.set_yticks(np.linspace(0, ylen - 1, ylen)) + axx.legend(loc=2) + axx.set_yticklabels(ytickstrs) + + # Observations + if dx_obs != 0: + axx.text(x_obs - dx_obs * 0.95, 11.8, 'Obs.', color='k') + axx.vlines([x_obs - dx_obs, x_obs + dx_obs], -1, ylen + 1, + colors='k', linestyle='dashed') + if dx_obs != 0: + axx.text(x_obs - dx_obs * 0.95, 11.8, 'Obs.', color='k') + axx.arrow(x_obs - dx_obs, 11.5, 2 * dx_obs - 0.002, 0, head_width=0.2, + head_length=0.002, facecolor='k', edgecolor='k') + axx.arrow(x_obs + dx_obs, 11.5, -2 * dx_obs + 0.002, 0, + head_width=0.2, head_length=0.002, facecolor='k', + edgecolor='k') + + return axx + + +def set_axx_deangelis3b(axx, x_obs, dx_obs): + """Axis settings for deangelis 3b.""" + axx.set_xlabel(r'$\delta$rsnstcs / $\delta$prw (% kg$^{-1}$ m$^2$)') + axx.set_ylabel(r'$\delta$rsnstcs / $\delta$tas (W m$^{-2}$ K$^{-1}$)') + axx.set_xlim([0.0, 0.13]) + axx.set_xticks(np.linspace(0.0, 0.12, 7)) + axx.set_ylim([0.45, 1.15]) + axx.set_yticks(np.linspace(0.5, 1.1, 7)) + axx.legend(loc=2) + + # Observations + axx.vlines([x_obs - dx_obs, x_obs + dx_obs], 0.4, 1.2, colors='k', + linestyle='dashed') + if dx_obs != 0: + axx.text(x_obs - dx_obs * 0.95, 0.78, 'Obs.', color='k') + axx.arrow(x_obs - dx_obs, 0.75, 2 * dx_obs - 0.002, 0, + head_width=0.02, head_length=0.002, facecolor='k', + edgecolor='k') + axx.arrow(x_obs + dx_obs, 0.75, -2 * dx_obs + 0.002, 0, + head_width=0.02, head_length=0.002, facecolor='k', + edgecolor='k') + + return axx + + +def plot_deangelis_fig3a(cfg, dataset_name, data, reg_prw, reg_obs): + """Plot DeAngelis Fig. 3a.""" + reg_prw_dict = {} + reg_prw_dict["x"] = np.linspace(0.0, 65, 2) + reg_prw_dict["y"] = reg_prw.slope * reg_prw_dict["x"] + reg_prw.intercept + reg_prw_dict["text"] = dataset_name + \ + r', $\delta$rsnst / $\delta$prw ={:.2f}'.format(reg_prw.slope) + + fig, axx = plt.subplots(figsize=(8, 8)) + axx.plot(reg_prw_dict["x"], reg_prw_dict["y"], linestyle='solid', + color='k', + label=reg_prw_dict["text"]) + axx.plot(data["x"], data["ypic"], linestyle='none', + color='k', marker='d') + + ccc = 0.0 + for kobs in reg_obs.keys(): + reg_prw_dict["y_obs"] = (reg_obs[kobs].slope * reg_prw_dict["x"] + + reg_obs[kobs].intercept) + reg_prw_dict["text_obs"] = ('CERES-EBAF/' + kobs + + r', $\delta$rsnst / $\delta$' + + 'prw ={:.2f}'.format(reg_obs[kobs].slope)) + axx.plot(reg_prw_dict["x"], reg_prw_dict["y_obs"], linestyle='solid', + color=(0.25 * ccc, 1.0 - 0.25 * ccc, 0.7), + label=reg_prw_dict["text_obs"]) + axx.plot(data["x"], (data["yobs"])[kobs], linestyle='none', + color=(0.25 * ccc, 1.0 - 0.25 * ccc, 0.7), marker='<') + ccc = ccc + 1.0 + if ccc > 4: + ccc = 0.5 + + axx.set_title(' ') + axx.set_xlabel(r'Water Vapor Path (prw) (kg m$^{-2}$)') + axx.set_ylabel('Normalized Netto Short Wave Radiation for clear syke (%)') + axx.set_xlim([9, 61]) + axx.set_xticks(np.linspace(10, 60, 6)) + axx.set_ylim([16.5, 26.5]) + axx.set_yticks(np.linspace(17, 26, 10)) + axx.legend(loc=1) + + fig.tight_layout() + fig.savefig(get_plot_filename('fig3a_' + dataset_name, cfg), dpi=300) + plt.close() + + caption = 'Scatter plot and regression lines the between the ' + \ + 'netto short wave radiation for clear skye normalized by ' + \ + 'normalized by incoming solar flux (rsnstcsnorm) and the ' + \ + 'Water Vapor Path (prw) in the pre-industrial climate.' + + provenance_record = get_provenance_record( + _get_sel_files_var(cfg, ['prw', 'rsnstcsnorm']), caption, ['other'], + ['global']) + + diagnostic_file = get_diagnostic_filename('fig3a_' + dataset_name, cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + list_dict = {} + list_dict["data"] = [data["x"], data["ypic"]] + list_dict["name"] = [{'var_name': 'prw_' + dataset_name, + 'long_name': 'Water Vapor Path ' + dataset_name, + 'units': 'kg m-2'}, + {'var_name': 'rsnstdtsnorm', + 'long_name': 'Normalized Netto Short ' + + 'Wave Radiation fo' + + 'clear syke', + 'units': 'percent'}] + for kobs in reg_obs.keys(): + list_dict["data"].append((data["yobs"])[kobs]) + list_dict["name"].append({'var_name': 'prw_' + kobs, + 'long_name': 'Water Vapor Path ' + + 'CERES-EBAF/' + kobs, + 'units': 'kg m-2'}) + + iris.save(cube_to_save_vars(list_dict), target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(diagnostic_file, provenance_record) + provenance_logger.log(get_plot_filename('fig3a_' + dataset_name, cfg), + provenance_record) + + +def plot_deangelis_fig4(cfg, data_model, mdrsnstdts, prw): + """Plot DeAngelis Fig. 4.""" + # Set dictionary modelkey vs number from DeAngelis et al., 2015 + # all models which are in the paper, other dictonaries contain only + # the currently selected ones + model_dict = { + 'ACCESS1-0': ('Correlated-k-distribution, 10 < N < 20', 'limegreen'), + 'ACCESS1-3': ('Correlated-k-distribution, 10 < N < 20', 'limegreen'), + 'bcc-csm1-1': ('7-band parameterization (N = 7)', 'crimson'), + 'bcc-csm1-1-m': ('7-band parameterization (N = 7)', 'crimson'), + 'CanESM2': ('Correlated-k-distribution, 10 < N < 20', 'limegreen'), + 'CCSM4': ('7-band parameterization (N = 7)', 'crimson'), + 'CNRM-CM5': ('Pade approximants, higher res.', 'slategrey'), + 'CNRM-CM5-2': ('Pade approximants, higher res.', 'slategrey'), + 'GFDL-CM3': ('Pseudo-k-distribution, N >= 20', 'gold'), + 'GFDL-ESM2G': ('Pseudo-k-distribution, N >= 20', 'gold'), + 'GFDL-ESM2M': ('Pseudo-k-distribution, N >= 20', 'gold'), + 'GISS-E2-H': ('Pseudo-k-distribution, 10 < N < 20', 'darkorange'), + 'GISS-E2-R': ('Pseudo-k-distribution, 10 < N < 20', 'darkorange'), + 'HadGEM2-ES': ('Correlated-k-distribution, N >= 20', 'dodgerblue'), + 'inmcm4': ('7-band parameterization (N = 7)', 'crimson'), + 'IPSL-CM5A-LR': ('Pade approximants, lower res.', 'silver'), + 'IPSL-CM5A-MR': ('Pade approximants, lower res.', 'silver'), + 'IPSL-CM5B-LR': ('Pade approximants, lower res.', 'silver'), + 'MIROC-ESM': ('Correlated-k-distribution, N >= 20', 'dodgerblue'), + 'MIROC5': ('Correlated-k-distribution, N >= 20', 'dodgerblue'), + 'MPI-ESM-LR': ('Correlated-k-distribution, N >= 20', 'dodgerblue'), + 'MPI-ESM-MR': ('Correlated-k-distribution, N >= 20', 'dodgerblue'), + 'MPI-ESM-P': ('Correlated-k-distribution, N >= 20', 'dodgerblue'), + 'MRI-CGCM3': ('7-band parameterization (N = 7)', 'crimson'), + 'NorESM1-M': ('7-band parameterization (N = 7)', 'crimson'), + 'default': ('unknown', 'xkcd:pale purple'), + } + + ytickstrs_and_schemes = {} + ytickstrs_and_schemes["ytickstrs"] = [] + ytickstrs_and_schemes["schemes"] = [] + # used_schemes = [] + fig, axx = plt.subplots(figsize=(8, 8)) + + for iii, jjj in enumerate(np.argsort(mdrsnstdts[:, 1])): + modelkey = list(data_model.keys())[jjj] + if modelkey not in model_dict.keys(): + modelkey = 'default' + if (model_dict[modelkey])[0] not in ytickstrs_and_schemes["schemes"]: + axx.fill([mdrsnstdts[jjj, 1] - 2.0 * mdrsnstdts[jjj, 2], + mdrsnstdts[jjj, 1] + 2.0 * mdrsnstdts[jjj, 2], + mdrsnstdts[jjj, 1] + 2.0 * mdrsnstdts[jjj, 2], + mdrsnstdts[jjj, 1] - 2.0 * mdrsnstdts[jjj, 2]], + [iii - 0.3, iii - 0.3, iii + 0.3, iii + 0.3], + color=model_dict[modelkey][1], + label=model_dict[modelkey][0]) + ytickstrs_and_schemes["schemes"].append(model_dict[modelkey][0]) + else: + axx.fill([mdrsnstdts[jjj, 1] - 2.0 * mdrsnstdts[jjj, 2], + mdrsnstdts[jjj, 1] + 2.0 * mdrsnstdts[jjj, 2], + mdrsnstdts[jjj, 1] + 2.0 * mdrsnstdts[jjj, 2], + mdrsnstdts[jjj, 1] - 2.0 * mdrsnstdts[jjj, 2]], + [iii - 0.3, iii - 0.3, iii + 0.3, iii + 0.3], + color=model_dict[modelkey][1]) + + ytickstrs_and_schemes["ytickstrs"].append(list(data_model.keys())[jjj]) + + axx = set_axx_deangelis4(axx, len(data_model.keys()), + ytickstrs_and_schemes["ytickstrs"], + prw["mean"], prw["diff"]) + + fig.tight_layout() + + fig.savefig(get_plot_filename('fig4', cfg), dpi=300) + plt.close() + + caption = 'The relationship between the ratio of the change of ' + \ + 'netto short wave radiation (rsnst) and the change of the ' + \ + 'Water Vapor Path (prw) and characteristics of the ' + \ + 'parameterization scheme for solar absorption by water vapour ' + \ + 'in a cloud-free atmosphere, with colours for each model ' + \ + 'referring to different types of parameterizations as described ' + \ + 'in the key (N refers to the number of exponential terms ' + \ + 'representing water vapour absorption). The width of horizontal ' + \ + 'shading for models and the vertical dashed lines for ' + \ + 'observations (Obs.) represent statistical uncertainties of ' + \ + 'the ratio, as the 95% confidence interval (CI) of the regression ' + \ + 'slope to the rsnst versus prw curve. For the observations ' + \ + 'the minimum of the lower bounds of all CIs to the maximum of ' + \ + 'the upper bounds of all CIs is shown.' + + provenance_record = get_provenance_record(_get_sel_files_var(cfg, + ['prw', + 'rsnst']), + caption, ['other'], ['global']) + + diagnostic_file = get_diagnostic_filename('fig4', cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + iris.save(cube_to_save_matrix(mdrsnstdts, {'var_name': 'mdrsnstdts', + 'long_name': 'Change of Netto' + + 'Short Wave ' + + 'Radiation ' + + 'with Change ' + + 'of the ' + + 'Water Vapor Path', + 'units': '% kg-1 m2'}), + target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(diagnostic_file, provenance_record) + provenance_logger.log(get_plot_filename('fig4', cfg), + provenance_record) + + +def plot_deangelis_fig3b4(cfg, data_model, reg_prw_obs): + """Plot DeAngelis Fig. 3b and prepare 4.""" + # Fig 4 +# model_scheme_name = dict([(1, ['Correlated-k-distribution, N >= 20', +# 'dodgerblue']), +# (2, ['Correlated-k-distribution, 10 < N < 20', +# 'limegreen']), +# (3, ['Pseudo-k-distribution, N >= 20', +# 'gold']), +# (4, ['Pseudo-k-distribution, 10 < N < 20', +# 'darkorange']), +# (5, ['7-band parameterization (N = 7)', +# 'crimson']), +# (6, ['Pade approximants, higher res.', +# 'slategrey']), +# (7, ['Pade approximants, lower res.', +# 'silver']), +# (8, ['unknown', +# 'xkcd:pale purple'])]) + + # Plot data + fig, axx = plt.subplots(figsize=(8, 8)) + + mdrsnstdts = np.zeros((len(data_model.keys()), 3)) + + for iii, modelkey in enumerate(data_model.keys()): + mdrsnstdts[iii, 0] = (list(data_model[modelkey]))[0] + mdrsnstdts[iii, 1] = (list(data_model[modelkey]))[2] + mdrsnstdts[iii, 2] = (list(data_model[modelkey]))[3] + axx.fill([mdrsnstdts[iii, 1] - 2.0 * mdrsnstdts[iii, 2], + mdrsnstdts[iii, 1] + 2.0 * mdrsnstdts[iii, 2], + mdrsnstdts[iii, 1] + 2.0 * mdrsnstdts[iii, 2], + mdrsnstdts[iii, 1] - 2.0 * mdrsnstdts[iii, 2]], + [mdrsnstdts[iii, 0] - 0.01, mdrsnstdts[iii, 0] - 0.01, + mdrsnstdts[iii, 0] + 0.01, mdrsnstdts[iii, 0] + 0.01], + color=(0.8, 0.8, 0.8)) + style = (select_metadata(cfg['input_data'].values(), + dataset=modelkey))[0]['project'] + style = plot.get_dataset_style(modelkey, style_file=style.lower()) + axx.plot(mdrsnstdts[iii, 1], + mdrsnstdts[iii, 0], + marker=style['mark'], + color=style['color'], + markerfacecolor=style['facecolor'], linestyle='none', + markersize=10, markeredgewidth=2.0, label=modelkey) + + prw = {} + if reg_prw_obs: + prw["min"] = np.zeros(len(reg_prw_obs.keys())) + prw["max"] = np.zeros(len(reg_prw_obs.keys())) + for iii, modelkey in enumerate(reg_prw_obs.keys()): + (prw["min"])[iii] = reg_prw_obs[modelkey].slope - 2.0 * \ + reg_prw_obs[modelkey].stderr + (prw["max"])[iii] = reg_prw_obs[modelkey].slope + 2.0 * \ + reg_prw_obs[modelkey].stderr + prw["mean"] = (np.min(prw["min"]) + np.max(prw["max"])) / 2.0 + prw["diff"] = np.max(prw["max"]) - prw["mean"] + else: + prw["mean"] = 0.0 + prw["diff"] = 0.0 + + axx = set_axx_deangelis3b(axx, prw["mean"], prw["diff"]) + + # Regression line + reg = stats.linregress(mdrsnstdts[:, 1], mdrsnstdts[:, 0]) + + axx.plot(np.linspace(0.0, 0.15, 2), reg.slope * + np.linspace(0.0, 0.15, 2) + reg.intercept, + linestyle='solid', color='r', + label='Fit (r ={:.2f})'.format(reg.rvalue)) + + axx.legend(ncol=2, loc=2) + fig.tight_layout() + fig.savefig(get_plot_filename('fig3b', cfg), dpi=300) + plt.close() + + caption = 'Scatter plot and regression line the between the ratio ' + \ + 'of the change of ' + \ + 'netto short wave radiation (rsnst) and the change of the ' + \ + 'Water Vapor Path (prw) against the ratio of the change of ' + \ + 'netto short wave radiation for clear skye (rsnstcs) and the ' + \ + 'the change of surface temperature (tas).' + \ + 'The width of horizontal ' + \ + 'shading for models and the vertical dashed lines for ' + \ + 'observations (Obs.) represent statistical uncertainties of ' + \ + 'the ratio, as the 95% confidence interval (CI) of the regression ' + \ + 'slope to the rsnst versus prw curve. For the observations ' + \ + 'the minimum of the lower bounds of all CIs to the maximum of ' + \ + 'the upper bounds of all CIs is shown.' + + provenance_record = get_provenance_record(_get_sel_files_var(cfg, + ['prw', + 'tas', + 'rsnst', + 'rsnstcs']), + caption, ['other'], ['global']) + + diagnostic_file = get_diagnostic_filename('fig3b', cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + iris.save(cube_to_save_matrix(mdrsnstdts, {'var_name': 'mdrsnstdts', + 'long_name': 'Change of Netto' + + 'Short Wave ' + + 'Radiation ' + + 'with Change ' + + 'of the ' + + 'Water Vapor Path', + 'units': '% kg-1 m2'}), + target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(diagnostic_file, provenance_record) + provenance_logger.log(get_plot_filename('fig3b', cfg), + provenance_record) + + # Fig 4 + plot_deangelis_fig4(cfg, data_model, mdrsnstdts, prw) + + +def make_grid_prw(grid_pwx, data_prw_obs, data_rsnstcsnorm_obs): + """Grid rsnstcsnorm based on prw grid.""" + gridded_rsnstcsnorm_obs = np.zeros(len(grid_pwx), dtype=np.float64) + + for jjj, bincenter in enumerate(grid_pwx): + index_obs = np.where((data_prw_obs >= bincenter - 1.0) & + (data_prw_obs < bincenter + 1.0)) + gridded_rsnstcsnorm_obs[jjj] = np.mean(data_rsnstcsnorm_obs[index_obs]) + + return gridded_rsnstcsnorm_obs + + +def reform_data_iris_deangelis3b4(input_data): + """Extract data from IRIS cubes and average or reformat them.""" + # Model data for 'tas', 'rsnstcs' + cubes = {} + for my_short_name in ['tas', 'rsnstcs']: + # my_data: List of dictionaries + my_data = select_metadata(input_data, short_name=my_short_name) + # subdata: dictionary + for subdata in my_data: + cube = iris.load(subdata['filename'])[0] + cat.add_year(cube, 'time', name='year') + cube = cube.aggregated_by('year', iris.analysis.MEAN) + experiment = subdata['exp'] + if experiment == 'abrupt-4xCO2': + experiment = 'abrupt4xCO2' + dataset = subdata['dataset'] + cubetuple = (dataset, my_short_name, experiment) + if experiment == 'piControl': + # DeAngelis use a 21 month running mean on piControl but the + # full extend of 150 years abrupt4xCO2. I could not find out, + # how they tread the edges, currently I just skip the mean for + # the edges. This is not exacly the same as done in the paper, + # small differences remain in extended data Fig 1, + # but closer than other methods I + # tried, e.g. skipping the edges. + # For most data sets it would be also possible to + # extend the piControl for 20 years, but then it would + # not be centered means of piControl for each year of + # abrupt4xCO2 any more. + # cube_new = cube.rolling_window('time',iris.analysis.MEAN, 21) + # endm10 = len(cube.coord('time').points) - 10 + # cube.data[10:endm10] = cube_new.data + cube.data = scisi.savgol_filter(cube.data, 21, 1, axis=0) + cubes[cubetuple] = cube.data + + # Model data and observations for 'rsnstcsnorm', and 'prw' + for my_short_name in ['rsnstcsnorm', 'prw']: + # my_data: List of dictionaries + my_data = select_metadata(input_data, short_name=my_short_name) + # subdata: dictionary + for subdata in my_data: + if 'exp' in subdata.keys(): + experiment = subdata['exp'] + else: + experiment = 'nomodel' + dataset = subdata['dataset'] + cubetuple = (dataset, my_short_name, experiment) + if experiment in ['piControl', 'nomodel']: + cube = iris.load(subdata['filename'])[0] + total_len = len(cube.coord('time').points) * \ + len(cube.coord('latitude').points) * \ + len(cube.coord('longitude').points) + data_new = np.reshape(cube.data, total_len) + cubes[cubetuple] = data_new + + return cubes + + +def substract_and_reg_deangelis(cfg, cubes, grid_pw, + reg_prw_obs): + """Substract piControl from abrupt4xCO2 for all models and variables.""" + data_model = OrderedDict() + + model_tub_tas_pi = [] + + for tub in cubes.keys(): + if tub[2] == 'piControl': + if tub[1] == 'tas': + model_tub_tas_pi.append(tub) + + # Models + for model_run in model_tub_tas_pi: + # Substract piControl experiment from abrupt4xCO2 experiment + data_prw_pic = (cubes[(model_run[0], 'prw', model_run[2])]) + data_rsnstcsnorm_pic = (cubes[(model_run[0], + 'rsnstcsnorm', model_run[2])]) + data_tas = (cubes[(model_run[0], + model_run[1], 'abrupt4xCO2')]).data - \ + (cubes[model_run]) + data_rsnstcs = (cubes[(model_run[0], 'rsnstcs', 'abrupt4xCO2')]) - \ + (cubes[(model_run[0], 'rsnstcs', model_run[2])]) + + grid_pw["ypic"] = make_grid_prw(grid_pw["x"], data_prw_pic, + data_rsnstcsnorm_pic) + + reg6 = stats.linregress(data_tas, data_rsnstcs) + + reg_prw = stats.linregress(grid_pw["x"], grid_pw["ypic"]) + + data_model[model_run[0]] = [reg6.slope, reg6.stderr, reg_prw.slope, + reg_prw.stderr] + + plot_deangelis_fig3a(cfg, model_run[0], + grid_pw, reg_prw, + reg_prw_obs) + + return data_model + + +############################################################################### +# Setup diagnostic +############################################################################### + +# Experiments +# 'piControl' and 'abrupt4xCO2' + + ########################################################################### + # Write data + ########################################################################### +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/droughtindex/collect_drought_func.py b/esmvaltool/diag_scripts/droughtindex/collect_drought_func.py new file mode 100644 index 0000000000..908212c6c7 --- /dev/null +++ b/esmvaltool/diag_scripts/droughtindex/collect_drought_func.py @@ -0,0 +1,650 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +"""Drought characteristics and plots based on Martin (2018). + +############################################################################### +droughtindex/collect_drought_obs_multi.py +Author: Katja Weigel, Kemisola Adeniyi (IUP, Uni Bremen, Germany) +EVal4CMIP project +############################################################################### + +Description +----------- + Functions for: + collect_drought_obs_multi.py and droughtindex/collect_drought_model.py. + +Configuration options +--------------------- + None + +############################################################################### + +""" + + +import logging +import os +from pprint import pformat +import numpy as np +import iris +from iris.analysis import Aggregator +import cartopy.crs as cart +import matplotlib.pyplot as plt +import matplotlib.dates as mda +from esmvaltool.diag_scripts.shared import (ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def _get_data_hlp(axis, data, ilat, ilon): + """Get data_help dependend on axis.""" + if axis == 0: + data_help = (data[:, ilat, ilon])[:, 0] + elif axis == 1: + data_help = (data[ilat, :, ilon])[:, 0] + elif axis == 2: + data_help = data[ilat, ilon, :] + + return data_help + + +def _get_drought_data(cfg, cube): + """Prepare data and calculate characteristics.""" + # make a new cube to increase the size of the data array + # Make an aggregator from the user function. + spell_no = Aggregator('spell_count', + count_spells, + units_func=lambda units: 1) + new_cube = _make_new_cube(cube) + + # calculate the number of drought events and their average duration + drought_show = new_cube.collapsed('time', spell_no, + threshold=cfg['threshold']) + drought_show.rename('Drought characteristics') + # length of time series + time_length = len(new_cube.coord('time').points) / 12.0 + # Convert number of droughtevents to frequency (per year) + drought_show.data[:, :, 0] = drought_show.data[:, :, + 0] / time_length + return drought_show + + +def _provenance_map_spei(cfg, name_dict, spei, dataset_name): + """Set provenance for plot_map_spei.""" + caption = 'Global map of ' + \ + name_dict['drought_char'] + \ + ' [' + name_dict['unit'] + '] ' + \ + 'based on ' + cfg['indexname'] + '.' + + if cfg['indexname'].lower == "spei": + set_refs = ['martin18grl', 'vicente10jclim', ] + elif cfg['indexname'].lower == "spi": + set_refs = ['martin18grl', 'mckee93proc', ] + else: + set_refs = ['martin18grl', ] + + provenance_record = get_provenance_record([name_dict['input_filenames']], + caption, + ['global'], + set_refs) + + diagnostic_file = get_diagnostic_filename(cfg['indexname'] + '_map' + + name_dict['add_to_filename'] + + '_' + + dataset_name, cfg) + plot_file = get_plot_filename(cfg['indexname'] + '_map' + + name_dict['add_to_filename'] + + '_' + + dataset_name, cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + cubesave = cube_to_save_ploted(spei, name_dict) + iris.save(cubesave, target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(plot_file, provenance_record) + provenance_logger.log(diagnostic_file, provenance_record) + + +def _provenance_map_spei_multi(cfg, data_dict, spei, input_filenames): + """Set provenance for plot_map_spei_multi.""" + caption = 'Global map of the multi-model mean of ' + \ + data_dict['drought_char'] + \ + ' [' + data_dict['unit'] + '] ' + \ + 'based on ' + cfg['indexname'] + '.' + + if cfg['indexname'].lower == "spei": + set_refs = ['martin18grl', 'vicente10jclim', ] + elif cfg['indexname'].lower == "spi": + set_refs = ['martin18grl', 'mckee93proc', ] + else: + set_refs = ['martin18grl', ] + + provenance_record = get_provenance_record(input_filenames, caption, + ['global'], + set_refs) + + diagnostic_file = get_diagnostic_filename(cfg['indexname'] + '_map' + + data_dict['filename'] + '_' + + data_dict['datasetname'], cfg) + plot_file = get_plot_filename(cfg['indexname'] + '_map' + + data_dict['filename'] + '_' + + data_dict['datasetname'], cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + iris.save(cube_to_save_ploted(spei, data_dict), target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(plot_file, provenance_record) + provenance_logger.log(diagnostic_file, provenance_record) + + +def _provenance_time_series_spei(cfg, data_dict): + """Provenance for time series plots.""" + caption = 'Time series of ' + \ + data_dict['var'] + \ + ' at' + data_dict['area'] + '.' + + if cfg['indexname'].lower == "spei": + set_refs = ['vicente10jclim', ] + elif cfg['indexname'].lower == "spi": + set_refs = ['mckee93proc', ] + else: + set_refs = ['martin18grl', ] + + provenance_record = get_provenance_record([data_dict['filename']], + caption, + ['reg'], set_refs, + plot_type='times') + + diagnostic_file = get_diagnostic_filename(cfg['indexname'] + + '_time_series_' + + data_dict['area'] + + '_' + + data_dict['dataset_name'], cfg) + plot_file = get_plot_filename(cfg['indexname'] + + '_time_series_' + + data_dict['area'] + + '_' + + data_dict['dataset_name'], cfg) + logger.info("Saving analysis results to %s", diagnostic_file) + + cubesave = cube_to_save_ploted_ts(data_dict) + iris.save(cubesave, target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(plot_file, provenance_record) + provenance_logger.log(diagnostic_file, provenance_record) + + +def cube_to_save_ploted(var, data_dict): + """Create cube to prepare plotted data for saving to netCDF.""" + plot_cube = iris.cube.Cube(var, var_name=data_dict['var'], + long_name=data_dict['drought_char'], + units=data_dict['unit']) + plot_cube.add_dim_coord(iris.coords.DimCoord(data_dict['latitude'], + var_name='lat', + long_name='latitude', + units='degrees_north'), 0) + plot_cube.add_dim_coord(iris.coords.DimCoord(data_dict['longitude'], + var_name='lon', + long_name='longitude', + units='degrees_east'), 1) + + return plot_cube + + +def cube_to_save_ploted_ts(data_dict): + """Create cube to prepare plotted time series for saving to netCDF.""" + plot_cube = iris.cube.Cube(data_dict['data'], var_name=data_dict['var'], + long_name=data_dict['var'], + units=data_dict['unit']) + plot_cube.add_dim_coord(iris.coords.DimCoord(data_dict['time'], + var_name='time', + long_name='Time', + units='month'), 0) + + return plot_cube + + +def get_provenance_record(ancestor_files, caption, + domains, refs, plot_type='geo'): + """Get Provenance record.""" + record = { + 'caption': caption, + 'statistics': ['mean'], + 'domains': domains, + 'plot_type': plot_type, + 'themes': ['phys'], + 'authors': [ + 'weigel_katja', + 'adeniyi_kemisola', + ], + 'references': refs, + 'ancestors': ancestor_files, + } + return record + + +def _make_new_cube(cube): + """Make a new cube with an extra dimension for result of spell count.""" + new_shape = cube.shape + (4,) + new_data = iris.util.broadcast_to_shape(cube.data, new_shape, [0, 1, 2]) + new_cube = iris.cube.Cube(new_data) + new_cube.add_dim_coord(iris.coords.DimCoord( + cube.coord('time').points, long_name='time'), 0) + new_cube.add_dim_coord(iris.coords.DimCoord( + cube.coord('latitude').points, long_name='latitude'), 1) + new_cube.add_dim_coord(iris.coords.DimCoord( + cube.coord('longitude').points, long_name='longitude'), 2) + new_cube.add_dim_coord(iris.coords.DimCoord( + [0, 1, 2, 3], long_name='z'), 3) + return new_cube + + +def _plot_multi_model_maps(cfg, all_drought_mean, lats_lons, input_filenames, + tstype): + """Prepare plots for multi-model mean.""" + data_dict = {'latitude': lats_lons[0], + 'longitude': lats_lons[1], + 'model_kind': tstype + } + if tstype == 'Difference': + # RCP85 Percentage difference + data_dict.update({'data': all_drought_mean[:, :, 0], + 'var': 'diffnumber', + 'datasetname': 'Percentage', + 'drought_char': 'Number of drought events', + 'unit': '%', + 'filename': 'Percentage_difference_of_No_of_Events', + 'drought_numbers_level': np.arange(-100, 110, 10)}) + plot_map_spei_multi(cfg, data_dict, input_filenames, + colormap='rainbow') + + data_dict.update({'data': all_drought_mean[:, :, 1], + 'var': 'diffduration', + 'drought_char': 'Duration of drought events', + 'filename': 'Percentage_difference_of_Dur_of_Events', + 'drought_numbers_level': np.arange(-100, 110, 10)}) + plot_map_spei_multi(cfg, data_dict, input_filenames, + colormap='rainbow') + + data_dict.update({'data': all_drought_mean[:, :, 2], + 'var': 'diffseverity', + 'drought_char': 'Severity Index of drought events', + 'filename': 'Percentage_difference_of_Sev_of_Events', + 'drought_numbers_level': np.arange(-50, 60, 10)}) + plot_map_spei_multi(cfg, data_dict, input_filenames, + colormap='rainbow') + + data_dict.update({'data': all_drought_mean[:, :, 3], + 'var': 'diff' + (cfg['indexname']).lower(), + 'drought_char': 'Average ' + cfg['indexname'] + + ' of drought events', + 'filename': 'Percentage_difference_of_Avr_of_Events', + 'drought_numbers_level': np.arange(-50, 60, 10)}) + plot_map_spei_multi(cfg, data_dict, input_filenames, + colormap='rainbow') + else: + data_dict.update({'data': all_drought_mean[:, :, 0], + 'var': 'frequency', + 'unit': 'year-1', + 'drought_char': 'Number of drought events per year', + 'filename': tstype + '_No_of_Events_per_year', + 'drought_numbers_level': np.arange(0, 0.4, 0.05)}) + if tstype == 'Observations': + data_dict['datasetname'] = 'Mean' + else: + data_dict['datasetname'] = 'MultiModelMean' + plot_map_spei_multi(cfg, data_dict, input_filenames, + colormap='gnuplot') + + data_dict.update({'data': all_drought_mean[:, :, 1], + 'var': 'duration', + 'unit': 'month', + 'drought_char': 'Duration of drought events [month]', + 'filename': tstype + '_Dur_of_Events', + 'drought_numbers_level': np.arange(0, 6, 1)}) + plot_map_spei_multi(cfg, data_dict, input_filenames, + colormap='gnuplot') + + data_dict.update({'data': all_drought_mean[:, :, 2], + 'var': 'severity', + 'unit': '1', + 'drought_char': 'Severity Index of drought events', + 'filename': tstype + '_Sev_index_of_Events', + 'drought_numbers_level': np.arange(0, 9, 1)}) + plot_map_spei_multi(cfg, data_dict, input_filenames, + colormap='gnuplot') + namehlp = 'Average ' + cfg['indexname'] + ' of drought events' + namehlp2 = tstype + '_Average_' + cfg['indexname'] + '_of_Events' + data_dict.update({'data': all_drought_mean[:, :, 3], + 'var': (cfg['indexname']).lower(), + 'unit': '1', + 'drought_char': namehlp, + 'filename': namehlp2, + 'drought_numbers_level': np.arange(-2.8, -1.8, 0.2)}) + plot_map_spei_multi(cfg, data_dict, input_filenames, + colormap='gnuplot') + + +def _plot_single_maps(cfg, cube2, drought_show, tstype, input_filenames): + """Plot map of drought characteristics for individual models and times.""" + cube2.data = drought_show.data[:, :, 0] + name_dict = {'add_to_filename': tstype + '_No_of_Events_per_year', + 'name': tstype + ' Number of drought events per year', + 'var': 'frequency', + 'unit': 'year-1', + 'drought_char': 'Number of drought events per year', + 'input_filenames': input_filenames} + plot_map_spei(cfg, cube2, np.arange(0, 0.4, 0.05), + name_dict) + + # plot the average duration of drought events + cube2.data = drought_show.data[:, :, 1] + name_dict.update({'add_to_filename': tstype + '_Dur_of_Events', + 'name': tstype + ' Duration of drought events(month)', + 'var': 'duration', + 'unit': 'month', + 'drought_char': 'Number of drought events per year', + 'input_filenames': input_filenames}) + plot_map_spei(cfg, cube2, np.arange(0, 6, 1), name_dict) + + # plot the average severity index of drought events + cube2.data = drought_show.data[:, :, 2] + name_dict.update({'add_to_filename': tstype + '_Sev_index_of_Events', + 'name': tstype + ' Severity Index of drought events', + 'var': 'severity', + 'unit': '1', + 'drought_char': 'Number of drought events per year', + 'input_filenames': input_filenames}) + plot_map_spei(cfg, cube2, np.arange(0, 9, 1), name_dict) + + # plot the average spei of drought events + cube2.data = drought_show.data[:, :, 3] + + namehlp = tstype + '_Avr_' + cfg['indexname'] + '_of_Events' + namehlp2 = tstype + '_Average_' + cfg['indexname'] + '_of_Events' + name_dict.update({'add_to_filename': namehlp, + 'name': namehlp2, + 'var': 'severity', + 'unit': '1', + 'drought_char': 'Number of drought events per year', + 'input_filenames': input_filenames}) + plot_map_spei(cfg, cube2, np.arange(-2.8, -1.8, 0.2), name_dict) + + +def runs_of_ones_array_spei(bits, spei): + """Set 1 at beginning ond -1 at the end of events.""" + # make sure all runs of ones are well-bounded + bounded = np.hstack(([0], bits, [0])) + # get 1 at run starts and -1 at run ends + difs = np.diff(bounded) + run_starts, = np.where(difs > 0) + run_ends, = np.where(difs < 0) + spei_sum = np.full(len(run_starts), 0.5) + for iii, indexs in enumerate(run_starts): + spei_sum[iii] = np.sum(spei[indexs:run_ends[iii]]) + + return [run_ends - run_starts, spei_sum] + + +def count_spells(data, threshold, axis): + """Functions for Iris Aggregator to count spells.""" + if axis < 0: + # just cope with negative axis numbers + axis += data.ndim + data = data[:, :, 0, :] + if axis > 2: + axis = axis - 1 + + listshape = [] + inoax = [] + for iii, ishape in enumerate(data.shape): + if iii != axis: + listshape.append(ishape) + inoax.append(iii) + + listshape.append(4) + return_var = np.zeros(tuple(listshape)) + + for ilat in range(listshape[0]): + for ilon in range(listshape[1]): + data_help = _get_data_hlp(axis, data, ilat, ilon) + + if data_help.count() == 0: + return_var[ilat, ilon, 0] = data_help[0] + return_var[ilat, ilon, 1] = data_help[0] + return_var[ilat, ilon, 2] = data_help[0] + return_var[ilat, ilon, 3] = data_help[0] + else: + data_hits = data_help < threshold + [events, spei_sum] = runs_of_ones_array_spei(data_hits, + data_help) + + return_var[ilat, ilon, 0] = np.count_nonzero(events) + return_var[ilat, ilon, 1] = np.mean(events) + return_var[ilat, ilon, 2] = np.mean((spei_sum * events) / + (np.mean(data_help + [data_hits]) + * np.mean(events))) + return_var[ilat, ilon, 3] = np.mean(spei_sum / events) + + return return_var + + +def get_latlon_index(coords, lim1, lim2): + """Get index for given values between two limits (1D), e.g. lats, lons.""" + index = (np.where(np.absolute(coords - (lim2 + lim1) + / 2.0) <= (lim2 - lim1) + / 2.0))[0] + return index + + +def plot_map_spei_multi(cfg, data_dict, input_filenames, colormap='jet'): + """Plot contour maps for multi model mean.""" + spei = np.ma.array(data_dict['data'], mask=np.isnan(data_dict['data'])) + + # Get latitudes and longitudes from cube + lons = data_dict['longitude'] + if max(lons) > 180.0: + lons = np.where(lons > 180, lons - 360, lons) + # sort the array + index = np.argsort(lons) + lons = lons[index] + spei = spei[np.ix_(range(data_dict['latitude'].size), index)] + + # Plot data + # Create figure and axes instances + subplot_kw = {'projection': cart.PlateCarree(central_longitude=0.0)} + fig, axx = plt.subplots(figsize=(6.5, 4), subplot_kw=subplot_kw) + axx.set_extent([-180.0, 180.0, -90.0, 90.0], + cart.PlateCarree(central_longitude=0.0)) + + # Draw filled contours + cnplot = plt.contourf(lons, data_dict['latitude'], spei, + data_dict['drought_numbers_level'], + transform=cart.PlateCarree(central_longitude=0.0), + cmap=colormap, extend='both', corner_mask=False) + # Draw coastlines + axx.coastlines() + + # Add colorbar + cbar = fig.colorbar(cnplot, ax=axx, shrink=0.6, orientation='horizontal') + + # Add colorbar title string + if data_dict['model_kind'] == 'Difference': + cbar.set_label(data_dict['model_kind'] + ' ' + + data_dict['drought_char'] + ' [%]') + else: + cbar.set_label(data_dict['model_kind'] + ' ' + + data_dict['drought_char']) + + # Set labels and title to each plot + axx.set_xlabel('Longitude') + axx.set_ylabel('Latitude') + axx.set_title(data_dict['datasetname'] + ' ' + data_dict['model_kind'] + + ' ' + data_dict['drought_char']) + + # Sets number and distance of x ticks + axx.set_xticks(np.linspace(-180, 180, 7)) + # Sets strings for x ticks + axx.set_xticklabels(['180°W', '120°W', '60°W', + '0°', '60°E', '120°E', + '180°E']) + # Sets number and distance of y ticks + axx.set_yticks(np.linspace(-90, 90, 7)) + # Sets strings for y ticks + axx.set_yticklabels(['90°S', '60°S', '30°S', '0°', + '30°N', '60°N', '90°N']) + + fig.tight_layout() + fig.savefig(get_plot_filename(cfg['indexname'] + '_map' + + data_dict['filename'] + '_' + + data_dict['datasetname'], cfg), dpi=300) + plt.close() + + _provenance_map_spei_multi(cfg, data_dict, spei, input_filenames) + + +def plot_map_spei(cfg, cube, levels, name_dict): + """Plot contour map.""" + mask = np.isnan(cube.data) + spei = np.ma.array(cube.data, mask=mask) + np.ma.masked_less_equal(spei, 0) + + # Get latitudes and longitudes from cube + name_dict.update({'latitude': cube.coord('latitude').points}) + lons = cube.coord('longitude').points + lons = np.where(lons > 180, lons - 360, lons) + # sort the array + index = np.argsort(lons) + lons = lons[index] + name_dict.update({'longitude': lons}) + spei = spei[np.ix_(range(len(cube.coord('latitude').points)), index)] + + # Get data set name from cube + try: + dataset_name = cube.metadata.attributes['model_id'] + except KeyError: + try: + dataset_name = cube.metadata.attributes['source_id'] + except KeyError: + dataset_name = 'Observations' + + # Plot data + # Create figure and axes instances + subplot_kw = {'projection': cart.PlateCarree(central_longitude=0.0)} + fig, axx = plt.subplots(figsize=(8, 4), subplot_kw=subplot_kw) + axx.set_extent([-180.0, 180.0, -90.0, 90.0], + cart.PlateCarree(central_longitude=0.0)) + + # np.set_printoptions(threshold=np.nan) + + # Draw filled contours + cnplot = plt.contourf(lons, cube.coord('latitude').points, spei, + levels, + transform=cart.PlateCarree(central_longitude=0.0), + cmap='gnuplot', extend='both', corner_mask=False) + # Draw coastlines + axx.coastlines() + + # Add colorbar + cbar = fig.colorbar(cnplot, ax=axx, shrink=0.6, orientation='horizontal') + + # Add colorbar title string + cbar.set_label(name_dict['name']) + + # Set labels and title to each plot + axx.set_xlabel('Longitude') + axx.set_ylabel('Latitude') + axx.set_title(dataset_name + ' ' + name_dict['name']) + + # Sets number and distance of x ticks + axx.set_xticks(np.linspace(-180, 180, 7)) + # Sets strings for x ticks + axx.set_xticklabels(['180°W', '120°W', '60°W', + '0°', '60°E', '120°E', + '180°E']) + # Sets number and distance of y ticks + axx.set_yticks(np.linspace(-90, 90, 7)) + # Sets strings for y ticks + axx.set_yticklabels(['90°S', '60°S', '30°S', '0°', + '30°N', '60°N', '90°N']) + + fig.tight_layout() + + fig.savefig(get_plot_filename(cfg['indexname'] + '_map' + + name_dict['add_to_filename'] + '_' + + dataset_name, cfg), dpi=300) + plt.close() + + _provenance_map_spei(cfg, name_dict, spei, dataset_name) + + +def plot_time_series_spei(cfg, cube, filename, add_to_filename=''): + """Plot time series.""" + # SPEI vector to plot + spei = cube.data + # Get time from cube + time = cube.coord('time').points + # Adjust (ncdf) time to the format matplotlib expects + add_m_delta = mda.datestr2num('1850-01-01 00:00:00') + time = time + add_m_delta + + # Get data set name from cube + try: + dataset_name = cube.metadata.attributes['model_id'] + except KeyError: + try: + dataset_name = cube.metadata.attributes['source_id'] + except KeyError: + dataset_name = 'Observations' + + data_dict = {'data': spei, + 'time': time, + 'var': cfg['indexname'], + 'dataset_name': dataset_name, + 'unit': '1', + 'filename': filename, + 'area': add_to_filename} + + fig, axx = plt.subplots(figsize=(16, 4)) + axx.plot_date(time, spei, '-', tz=None, xdate=True, ydate=False, + color='r', linewidth=4., linestyle='-', alpha=1., + marker='x') + axx.axhline(y=-2, color='k') + + # Plot labels and title + axx.set_xlabel('Time') + axx.set_ylabel(cfg['indexname']) + axx.set_title('Mean ' + cfg['indexname'] + ' ' + + data_dict['dataset_name'] + ' ' + + data_dict['area']) + + # Set limits for y-axis + axx.set_ylim(-4.0, 4.0) + + # Often improves the layout + fig.tight_layout() + # Save plot to file + fig.savefig(get_plot_filename(cfg['indexname'] + + '_time_series_' + + data_dict['area'] + + '_' + + data_dict['dataset_name'], cfg), dpi=300) + plt.close() + + _provenance_time_series_spei(cfg, data_dict) diff --git a/esmvaltool/diag_scripts/droughtindex/collect_drought_model.py b/esmvaltool/diag_scripts/droughtindex/collect_drought_model.py new file mode 100644 index 0000000000..da0c64bd40 --- /dev/null +++ b/esmvaltool/diag_scripts/droughtindex/collect_drought_model.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +"""Collects SPI or SPEI data comparing historic and future model scenarios. + +Applies drought characteristics based on Martin (2018). + +############################################################################### +droughtindex/collect_spei.py +Author: Katja Weigel (IUP, Uni Bremen, Germany) +EVal4CMIP project +############################################################################### + +Description +----------- + Collects data produced by diag_save_spi.R or diad_save_spei_all.R + to plot/process them further. + +Configuration options +--------------------- + indexname: "SPI" or "SPEI" + start_year: year, start of historical time series + end_year: year, end of future scenario + comparison_period: should be < (end_year - start_year)/2 + +############################################################################### + +""" +# The imported modules and libraries below allow this script to run accordingly + +import os +import glob +import datetime +import iris +import numpy as np +import esmvaltool.diag_scripts.shared as e +import esmvaltool.diag_scripts.shared.names as n +from esmvaltool.diag_scripts.droughtindex.collect_drought_func import ( + _get_drought_data, _plot_multi_model_maps, _plot_single_maps) + + +def _get_and_plot_multimodel(cfg, cube, all_drought, input_filenames): + """Calculate multi-model mean and compare historic and future.""" + all_drought_mean = {} + for tstype in ['Historic', 'Future']: + all_drought_mean[tstype] = np.nanmean(all_drought[tstype], axis=-1) + + all_drought_mean['Difference'] = ((all_drought_mean['Future'] - + all_drought_mean['Historic']) / + (all_drought_mean['Future'] + + all_drought_mean['Historic']) * 200) + + # Plot multi model means + for tstype in ['Historic', 'Future', 'Difference']: + _plot_multi_model_maps(cfg, all_drought_mean[tstype], + [cube.coord('latitude').points, + cube.coord('longitude').points], + input_filenames, + tstype) + + +def _set_tscube(cfg, cube, time, tstype): + """Time slice from a cube with start/end given by cfg.""" + if tstype == 'Future': + # extract time series from rcp model data + # cfg['end_year'] - cfg['comparison_period'] to cfg['end_year'] + start = datetime.datetime(cfg['end_year'] - + cfg['comparison_period'], 1, 15, 0, 0, 0) + end = datetime.datetime(cfg['end_year'], 12, 16, 0, 0, 0) + elif tstype == 'Historic': + # extract time series from historical model data + # cfg['start_year'] to cfg['start_year'] + cfg['comparison_period'] + start = datetime.datetime(cfg['start_year'], 1, 15, 0, 0, 0) + end = datetime.datetime(cfg['start_year'] + + cfg['comparison_period'], 12, 16, 0, 0, 0) + stime = time.nearest_neighbour_index(time.units.date2num(start)) + etime = time.nearest_neighbour_index(time.units.date2num(end)) + tscube = cube[stime:etime, :, :] + return tscube + + +def main(cfg): + """Run the diagnostic. + + Parameters : + + ------------ + cfg : dict + """ + ###################################################################### + # Read recipe data + ###################################################################### + + # Make an aggregator from the user function. + # spell_no = Aggregator('spell_count', count_spells, + # units_func=lambda units: 1) + + # Define the parameters of the test. + first_run = 1 + + # Get filenames of input files produced by diag_spei.r + # input_filenames = (cfg[n.INPUT_FILES])[0] + "/*.nc" + input_filenames = (cfg[n.INPUT_FILES])[0] + "/*_" + \ + (cfg['indexname']).lower() + "_*.nc" + + for iii, spei_file in enumerate(glob.iglob(input_filenames)): + # Loads the file into a special structure (IRIS cube), + # which allows us to access the data and additional information + # with python. + cube = iris.load(spei_file)[0] + # lats = cube.coord('latitude').points + # lons = cube.coord('longitude').points + time = cube.coord('time') + # The data are 3D (time x latitude x longitude) + # To plot them, we need to reduce them to 2D or 1D + # First here is an average over time. + cube2 = cube.collapsed('time', iris.analysis.MEAN) # 3D to 2D + + if first_run == 1: + ncfiles = list(filter(lambda f: f.endswith('.nc'), + os.listdir((cfg[n.INPUT_FILES])[0]))) + all_drought = {} + all_drought['Historic'] = np.full(cube2.data.shape + (4,) + + (len(ncfiles),), np.nan) + all_drought['Future'] = np.full(cube2.data.shape + (4,) + + (len(ncfiles),), np.nan) + first_run = 0 + # Test if time series goes until cfg['end_year']/12 + timecheck = time.units.date2num(datetime.datetime(cfg['end_year'], + 11, 30, 0, 0, 0)) + + if time.points[-1] > timecheck: + for tstype in ['Historic', 'Future']: + tscube = _set_tscube(cfg, cube, time, tstype) + drought_show = _get_drought_data(cfg, tscube) + all_drought[tstype][:, :, :, iii] = drought_show.data + _plot_single_maps(cfg, cube2, drought_show, tstype, spei_file) + + # Calculating multi model mean and plot it + _get_and_plot_multimodel(cfg, cube, all_drought, + glob.glob(input_filenames)) + + +if __name__ == '__main__': + with e.run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/droughtindex/collect_drought_obs_multi.py b/esmvaltool/diag_scripts/droughtindex/collect_drought_obs_multi.py new file mode 100644 index 0000000000..6091b507bf --- /dev/null +++ b/esmvaltool/diag_scripts/droughtindex/collect_drought_obs_multi.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +"""Collects SPI or SPEI data comparing models and observations/reanalysis. + +Applies drought characteristics based on Martin (2018). + +############################################################################### +droughtindex/collect_drought_obs_multi.py +Author: Katja Weigel (IUP, Uni Bremen, Germany) +EVal4CMIP project +############################################################################### + +Description +----------- + Collects data produced by diag_save_spi.R or diad_save_spei_all.R + to plot/process them further. + +Configuration options +--------------------- + indexname: "SPI" or "SPEI" + +############################################################################### + +""" +import os +import glob +import iris +import numpy as np +import esmvaltool.diag_scripts.shared as e +import esmvaltool.diag_scripts.shared.names as n +from esmvaltool.diag_scripts.droughtindex.collect_drought_func import ( + _get_drought_data, _plot_multi_model_maps, _plot_single_maps, + get_latlon_index, plot_time_series_spei) + + +def _get_and_plot_obsmodel(cfg, cube, all_drought, all_drought_obs, + input_filenames): + """Calculate multi-model mean and compare it to observations.""" + lats = cube.coord('latitude').points + lons = cube.coord('longitude').points + all_drought_hist_mean = np.nanmean(all_drought, axis=-1) + perc_diff = ((all_drought_obs - all_drought_hist_mean) + / (all_drought_obs + all_drought_hist_mean) * 200) + + # Plot multi model means + _plot_multi_model_maps(cfg, all_drought_hist_mean, [lats, lons], + input_filenames, 'Historic') + _plot_multi_model_maps(cfg, all_drought_obs, [lats, lons], + input_filenames, 'Observations') + _plot_multi_model_maps(cfg, perc_diff, [lats, lons], + input_filenames, 'Difference') + + +def ini_time_series_plot(cfg, cube, area, filename): + """Set up cube for time series plot.""" + coords = ('longitude', 'latitude') + if area == 'Bremen': + index_lat = get_latlon_index(cube.coord('latitude').points, 52, 53) + index_lon = get_latlon_index(cube.coord('longitude').points, 7, 9) + elif area == 'Nigeria': + index_lat = get_latlon_index(cube.coord('latitude').points, 7, 9) + index_lon = get_latlon_index(cube.coord('longitude').points, 8, 10) + + cube_grid_areas = iris.analysis.cartography.area_weights( + cube[:, index_lat[0]:index_lat[-1] + 1, + index_lon[0]:index_lon[-1] + 1]) + cube4 = ((cube[:, index_lat[0]:index_lat[-1] + 1, + index_lon[0]:index_lon[-1] + + 1]).collapsed(coords, iris.analysis.MEAN, + weights=cube_grid_areas)) + + plot_time_series_spei(cfg, cube4, filename, area) + + +def main(cfg): + """Run the diagnostic. + + Parameters : + + ------------ + cfg : dict + Configuration dictionary of the recipe. + + """ + ####################################################################### + # Read recipe data + ####################################################################### + + # Get filenames of input files produced by diag_spei.r + # "cfg[n.INPUT_FILES]" is produced by the ESMValTool and contains + # information on the SPEI input files produced by diag_spei.r + input_filenames = (cfg[n.INPUT_FILES])[0] + "/*_" + \ + (cfg['indexname']).lower() + "_*.nc" + print(cfg.keys()) + first_run = 1 + iobs = 0 + + # For loop: "glob.iglob" findes all files which match the + # pattern of "input_filenames". + # It writes the resulting exact file name onto spei_file + # and runs the following indented lines for all possibilities + # for spei_file. + for iii, spei_file in enumerate(glob.iglob(input_filenames)): + # Loads the file into a special structure (IRIS cube) + cube = iris.load(spei_file)[0] + cube.coord('latitude').guess_bounds() + cube.coord('longitude').guess_bounds() + # time = cube.coord('time') + + # The data are 3D (time x latitude x longitude) + # To plot them, we need to reduce them to 2D or 1D + # First here is an average over time, i.e. data you need + # to plot the average over the time series of SPEI on a map + cube2 = cube.collapsed('time', iris.analysis.MEAN) + + # This is only possible because all data must be on the same grid + if first_run == 1: + files = os.listdir((cfg[n.INPUT_FILES])[0]) + ncfiles = list(filter(lambda f: f.endswith('.nc'), files)) + shape_all = cube2.data.shape + (4,) + \ + (len(ncfiles) - 1, ) + all_drought = np.full(shape_all, np.nan) + first_run = 0 + + ini_time_series_plot(cfg, cube, 'Bremen', spei_file) + ini_time_series_plot(cfg, cube, 'Nigeria', spei_file) + + drought_show = _get_drought_data(cfg, cube) + + # Distinguish between model and observations/reanalysis. + # Collest all model data in one array. + try: + dataset_name = cube.metadata.attributes['model_id'] + all_drought[:, :, :, iii - iobs] = drought_show.data + except KeyError: + try: + dataset_name = cube.metadata.attributes['source_id'] + all_drought[:, :, :, iii - iobs] = drought_show.data + except KeyError: + dataset_name = 'Observations' + all_drought_obs = drought_show.data + iobs = 1 + print(dataset_name) + _plot_single_maps(cfg, cube2, drought_show, 'Historic', spei_file) + + # Calculating multi model mean and plot it + _get_and_plot_obsmodel(cfg, cube, all_drought, all_drought_obs, + glob.glob(input_filenames)) + + +if __name__ == '__main__': + with e.run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/droughtindex/diag_cdd.py b/esmvaltool/diag_scripts/droughtindex/diag_cdd.py index 40d6edddea..972751444f 100644 --- a/esmvaltool/diag_scripts/droughtindex/diag_cdd.py +++ b/esmvaltool/diag_scripts/droughtindex/diag_cdd.py @@ -3,13 +3,16 @@ import os from copy import deepcopy +import cmocean.cm import iris import numpy as np -from esmvaltool.diag_scripts.shared import (ProvenanceLogger, - get_diagnostic_filename, - get_plot_filename, run_diagnostic) -from esmvaltool.diag_scripts.shared.plot import quickplot +from esmvaltool.diag_scripts.shared import ( + run_diagnostic, + save_data, + save_figure, +) +from esmvaltool.diag_scripts.shared.plot import global_pcolormesh logger = logging.getLogger(os.path.basename(__file__)) @@ -21,19 +24,17 @@ def save_results(cfg, cube, basename, ancestor_files): 'caption': cube.long_name.replace('\n', ' '), 'statistics': ['other'], 'domains': ['global'], - 'authors': ['berg_pe'], + 'authors': ['berg_peter'], 'references': ['acknow_project'], 'ancestors': ancestor_files, } - if cfg['write_plots'] and cfg.get('quickplot'): - plot_file = get_plot_filename(basename, cfg) - quickplot(cube, plot_file, **cfg['quickplot']) - provenance['plot_file'] = plot_file - if cfg['write_netcdf']: - netcdf_file = get_diagnostic_filename(basename, cfg) - iris.save(cube, target=netcdf_file) - with ProvenanceLogger(cfg) as provenance_logger: - provenance_logger.log(netcdf_file, provenance) + save_data(basename, provenance, cfg, cube) + kwargs = dict(cfg.get('plot', {})) + cmap_name = kwargs.get('cmap', 'rain') + if cmap_name in cmocean.cm.cmap_d: + kwargs['cmap'] = cmocean.cm.cmap_d[cmap_name] + global_pcolormesh(cube, **kwargs) + save_figure(basename, provenance, cfg) def main(cfg): diff --git a/esmvaltool/diag_scripts/droughtindex/diag_save_spi.R b/esmvaltool/diag_scripts/droughtindex/diag_save_spi.R new file mode 100644 index 0000000000..151e2d5ed2 --- /dev/null +++ b/esmvaltool/diag_scripts/droughtindex/diag_save_spi.R @@ -0,0 +1,127 @@ +library(yaml) +library(ncdf4) +library(SPEI) +library(RColorBrewer) # nolint + +getnc <- function(yml, m, lat = FALSE) { + id <- nc_open(yml[m][[1]]$filename, readunlim = FALSE) + if (lat){ + v <- ncvar_get(id, "lat") + }else{ + v <- ncvar_get(id, yml[m][[1]]$short_name) + } + nc_close(id) + return(v) +} + +ncwritespi <- function(yml, m, data, wdir){ + fnam <- strsplit(yml[m][[1]]$filename, "/")[[1]] + pcs <- strsplit(fnam[length(fnam)], "_")[[1]] + pcs[which(pcs == yml[m][[1]]$short_name)] <- "spi" + onam <- paste0(wdir, "/", paste(pcs, collapse = "_")) + ncid_in <- nc_open(yml[m][[1]]$filename) + # var <- ncid_in$var[[yml[m][[1]]$short_name]] + xdim <- ncid_in$dim[["lon"]] + ydim <- ncid_in$dim[["lat"]] + tdim <- ncid_in$dim[["time"]] + allatt <- ncatt_get(ncid_in, "pr") + fillvalue <- ncatt_get(ncid_in,"pr","_FillValue") + globat <- ncatt_get(ncid_in, 0) + fillfloat <- 1.e+20 + as.single(fillfloat) + var_spi <- ncvar_def("spi", "1", list(xdim, ydim, tdim), fillfloat) + idw <- nc_create(onam, var_spi) + ncvar_put(idw, "spi", data) + cntatt <- 1 + for (thisattname in names(globat)){ + ncatt_put(idw, 0, thisattname, globat[[cntatt]]) + cntatt <- cntatt + 1 + } + nc_close(idw) + nc_close(ncid_in) + return(onam) +} + +whfcn <- function(x, ilow, ihigh){ + return(length(which(x >= ilow & x < ihigh))) +} + +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +metadata <- read_yaml(params$input_files) +modfile <- names(metadata) +wdir <- params$work_dir +dir.create(wdir, recursive = TRUE) +rundir <- params$run_dir +pdir <- params$plot_dir +dir.create(pdir, recursive = TRUE) +var1_input <- read_yaml(params$input_files[1]) + +nmods <- length(names(var1_input)) + +fillfloat <- 1.e+20 +as.single(fillfloat) + +# setup provenance file and list +provenance_file <- paste0(rundir, "/", "diagnostic_provenance.yml") +provenance <- list() + + +refnam <- var1_input[1][[1]]$reference_dataset +n <- 1 +while (n <= nmods){ + if (var1_input[n][[1]]$dataset == refnam) break + n <- n + 1 +} +nref <- n +lat <- getnc(var1_input, nref, lat = TRUE) +if (max(lat) > 90){ + print(paste0("Latitude must be [-90,90]: min=", + min(lat), " max=", max(lat))) + stop("Aborting!") +} +ref <- getnc(var1_input, nref, lat = FALSE) +refmsk <- apply(ref, c(1, 2), FUN = mean, na.rm = TRUE) +refmsk[refmsk > 10000] <- fillfloat +refmsk[!is.na(refmsk)] <- 1 + +xprov <- list( + ancestors = list(""), + authors = list("weigel_katja"), + references = list("mckee93proc"), + projects = list("eval4cmip"), + caption = "", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("global") +) + +for (mod in 1:nmods){ + v1 <- getnc(var1_input, mod) + d <- dim(v1) + v1_spi <- array(fillfloat, dim=d) + for (i in 1:d[1]){ + wh <- which(!is.na(refmsk[i,])) + if (length(wh) > 0){ + tmp <- v1[i,wh,] + v1_spi[i,wh,] <- t(spi(t(tmp), params$smooth_month, na.rm = TRUE, + distribution = params$distribution)$fitted) + } + } + v1_spi[is.infinite(v1_spi)] <- fillfloat + v1_spi[is.na(v1_spi)] <- fillfloat + v1_spi[v1_spi > 10000] <- fillfloat + filename <- ncwritespi(var1_input, mod, v1_spi, wdir) + xprov$caption <- "SPI index per grid point." + xprov$ancestors <- list(modfile[mod]) + provenance[[filename]] <- xprov + print("provenance[[filename]] kwnew") + print(provenance[[filename]]) +} + +print("provenance_file kwnew") +print(provenance_file) +print("provenance kwnew") +print(provenance) +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/droughtindex/diag_spei.R b/esmvaltool/diag_scripts/droughtindex/diag_spei.R new file mode 100644 index 0000000000..7c1e90b53e --- /dev/null +++ b/esmvaltool/diag_scripts/droughtindex/diag_spei.R @@ -0,0 +1,298 @@ +library(yaml) +library(ncdf4) +library(SPEI) +library(RColorBrewer) # nolint + +leap_year <- function(year) { + return(ifelse((year %% 4 == 0 & year %% 100 != 0) | + year %% 400 == 0, TRUE, FALSE)) +} + +getnc <- function(yml, m, lat = FALSE) { + id <- nc_open(yml[m][[1]]$filename, readunlim = FALSE) + if (lat) { + v <- ncvar_get(id, "lat") + } else { + v <- ncvar_get(id, yml[m][[1]]$short_name) + if (yml[m][[1]]$short_name == "tas") { + v <- v - 273.15 + } + if (yml[m][[1]]$short_name == "pr") { + time <- ncvar_get(id, "time") + tcal <- ncatt_get(id, "time", attname = "calendar") + tunits <- ncatt_get(id, "time", attname = "units") + tustr <- strsplit(tunits$value, " ") + stdate <- as.Date(time[1], origin = unlist(tustr)[3]) + nddate <- + as.Date(time[length(time)], origin = unlist(tustr)[3]) + if (tcal$value == "365_day") { + # Correct for missing leap years in nddate + diff <- as.numeric(nddate - stdate, units = "days") + dcorr <- floor((diff / 365 - diff / 365.25) * 365.25) + nddate <- nddate + dcorr + } + if (tcal$value == "360_day") { + v <- v * 30 * 24 * 3600. + } else { + cnt <- 1 + monarr <- c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31) + date <- stdate + while (date <= nddate) { + year <- as.numeric(substr(date, 1, 4)) + lpyear <- leap_year(year) + month <- as.numeric(substr(date, 6, 7)) + mdays <- monarr[month] + pdays <- mdays + if (month == 2 & lpyear == TRUE) { + pdays <- 29 + if (tcal$value != "365_day") { + mdays <- 29 + } else { + mdays <- 28 + } + } + v[, , cnt] <- v[, , cnt] * mdays * 24 * 3600. + date <- date + pdays + cnt <- cnt + 1 + } + } + } + } + nc_close(id) + return(v) +} + +ncwritenew <- function(yml, m, hist, wdir, bins) { + fnam <- strsplit(yml[m][[1]]$filename, "/")[[1]] + pcs <- strsplit(fnam[length(fnam)], "_")[[1]] + pcs[which(pcs == yml[m][[1]]$short_name)] <- "spei" + onam <- paste(pcs, collapse = "_") + onam <- paste0(wdir, "/", strsplit(onam, ".nc"), "_hist.nc") + ncid_in <- nc_open(yml[m][[1]]$filename) + var <- ncid_in$var[[yml[m][[1]]$short_name]] + xdim <- ncid_in$dim[["lon"]] + ydim <- ncid_in$dim[["lat"]] + hdim <- ncdim_def("bins", "level", bins[1:(length(bins) - 1)]) + hdim2 <- ncdim_def("binsup", "level", bins[2:length(bins)]) + var_hist <- + ncvar_def("hist", "counts", list(xdim, ydim, hdim), NA) + idw <- nc_create(onam, var_hist) + ncvar_put(idw, "hist", hist) + nc_close(idw) + return(onam) +} + +whfcn <- function(x, ilow, ihigh) { + return(length(which(x >= ilow & x < ihigh))) +} + +dothornthwaite <- function(v, lat) { + print("Estimating PET with Thornthwaite method.") + dpet <- v * NA + d <- dim(dpet) + for (i in 1:d[2]) { + tmp <- v[, i, ] + tmp2 <- thornthwaite(t(tmp), rep(lat[i], d[1]), na.rm = TRUE) + d2 <- dim(tmp2) + tmp2 <- as.numeric(tmp2) + dim(tmp2) <- d2 + dpet[, i, ] <- t(tmp2) + } + return(dpet) +} + +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +metadata1 <- read_yaml(params$input_files[1]) +metadata2 <- read_yaml(params$input_files[2]) +modfile1 <- names(metadata1) +modfile2 <- names(metadata2) +wdir <- params$work_dir +rundir <- params$run_dir +dir.create(wdir, recursive = TRUE) +pdir <- params$plot_dir +dir.create(pdir, recursive = TRUE) +var1_input <- read_yaml(params$input_files[1]) +var2_input <- read_yaml(params$input_files[2]) +nmods <- length(names(var1_input)) + +# setup provenance file and list +provenance_file <- paste0(rundir, "/", "diagnostic_provenance.yml") +provenance <- list() + +histbrks <- c(-99999, -2, -1.5, -1, 1, 1.5, 2, 99999) +histnams <- c( + "Extremely dry", + "Moderately dry", + "Dry", + "Neutral", + "Wet", + "Moderately wet", + "Extremely wet" +) +refnam <- var1_input[1][[1]]$reference_dataset +n <- 1 +while (n <= nmods) { + if (var1_input[n][[1]]$dataset == refnam) { + break + } + n <- n + 1 +} +nref <- n +lat <- getnc(var1_input, nref, lat = TRUE) +if (max(lat) > 90) { + print(paste0( + "Latitude must be [-90,90]: min=", + min(lat), " max=", max(lat) + )) + stop("Aborting!") +} +ref <- getnc(var1_input, nref, lat = FALSE) +refmsk <- apply(ref, c(1, 2), FUN = mean, na.rm = TRUE) +refmsk[refmsk > 10000] <- NA +refmsk[!is.na(refmsk)] <- 1 + +xprov <- list( + ancestors = list(""), + authors = list("berg_peter"), + references = list("vicente10jclim"), + projects = list("c3s-magic"), + caption = "", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("global") +) + +histarr <- array(NA, c(nmods, length(histnams))) +for (mod in 1:nmods) { + lat <- getnc(var1_input, mod, TRUE) + v1 <- getnc(var1_input, mod, FALSE) + v2 <- getnc(var2_input, mod, FALSE) + if (var1_input[1][[1]]$short_name == "pr") { + prtas <- TRUE + } else { + prtas <- FALSE + } + if (prtas) { + pet <- dothornthwaite(v2, lat) + pme <- v1 - pet + } else { + pet <- dothornthwaite(v1, lat) + pme <- v2 - pet + } + print(var1_input[mod][[1]]$cmor_table) + d <- dim(pme) + pme_spei <- pme * NA + for (i in 1:d[1]) { + wh <- which(!is.na(refmsk[i, ])) + if (length(wh) > 0) { + tmp <- pme[i, wh, ] + pme_spei[i, wh, ] <- t(spei(t(tmp), 1, na.rm = TRUE)$fitted) + } + } + pme_spei[is.infinite(pme_spei)] <- NA + pme_spei[pme_spei > 10000] <- NA + hist_spei <- array(NA, c(d[1], d[2], length(histbrks) - 1)) + for (nnh in 1:(length(histbrks) - 1)) { + hist_spei[, , nnh] <- apply( + pme_spei, + c(1, 2), + FUN = whfcn, + ilow = histbrks[nnh], + ihigh = histbrks[nnh + 1] + ) + } + filename <- + ncwritenew(var1_input, mod, hist_spei, wdir, histbrks) + # Set provenance for output files + xprov$caption <- "Histogram of SPEI index per grid point." + xprov$ancestors <- list(modfile1[mod], modfile2[mod]) + provenance[[filename]] <- xprov + for (t in 1:d[3]) { + tmp <- pme_spei[, , t] + tmp[is.na(refmsk)] <- NA + pme_spei[, , t] <- tmp + } + pme_spei[is.infinite(pme_spei)] <- NA + pme_spei[pme_spei > 10000] <- NA + # Weight against latitude + h <- seq_along(histnams) * 0 + for (j in 1:d[2]) { + h <- h + hist(pme_spei[j, , ], + breaks = histbrks, + plot = FALSE + )$counts * cos(lat[j] * pi / 180.) + } + histarr[mod, ] <- h / sum(h, na.rm = TRUE) +} +filehist <- paste0(params$work_dir, "/", "histarr.rsav") +save(histarr, file = filehist) +plot_file <- paste0(params$plot_dir, "/", "histplot.png") +xprov$caption <- "Global latitude-weighted histogram of SPEI index." +xprov$ancestors <- c(modfile1, modfile2) +provenance[[plot_file]] <- xprov +provenance[[filehist]] <- xprov +write_yaml(provenance, provenance_file) + +bhistarr <- array(NA, c(nmods - 1, 7)) +marr <- c(1:nmods)[c(1:nmods) != nref] +cnt <- 1 +for (m in marr) { + bhistarr[cnt, ] <- histarr[m, ] - histarr[nref, ] + cnt <- cnt + 1 +} +parr <- c(nref, marr) + +mnam <- c(1:nmods) * NA +for (m in 1:nmods) { + mnam[m] <- var1_input[m][[1]]$dataset +} + +qual_col_pals <- + brewer.pal.info[brewer.pal.info$category == "qual", ] # nolint +col_vector <- + unlist(mapply( + brewer.pal, qual_col_pals$maxcolors, # nolint + rownames(qual_col_pals) + )) +cols <- c("black", sample(col_vector, nmods - 1)) + +png(plot_file, width = 1000, height = 500) +par( + mfrow = c(2, 1), + oma = c(3, 3, 3, 13), + mar = c(2, 1, 1, 1) +) +barplot( + histarr[parr, ], + beside = 1, + names.arg = histnams, + col = cols, + xaxs = "i" +) +box() +mtext("Probability", side = 2, line = 2.1) +barplot( + bhistarr, + beside = 1, + names.arg = histnams, + col = cols[2:nmods], + xaxs = "i" +) +box() +mtext("Absolute difference", side = 2, line = 2.1) +mtext( + "Standardized precipitation-evapotranspiration index", + outer = TRUE, + cex = 2, + font = 2 +) +par( + fig = c(0.8, .95, 0.1, 0.9), + new = T, + oma = c(1, 1, 1, 1) * 0, + mar = c(0, 0, 0, 0) +) +legend("topright", mnam[parr], fill = cols) +dev.off() diff --git a/esmvaltool/diag_scripts/droughtindex/diag_spei.r b/esmvaltool/diag_scripts/droughtindex/diag_spei.r deleted file mode 100644 index 6e79ef8466..0000000000 --- a/esmvaltool/diag_scripts/droughtindex/diag_spei.r +++ /dev/null @@ -1,243 +0,0 @@ -library(yaml) -library(ncdf4) -library(SPEI) -library(RColorBrewer) # nolint - -leap_year <- function(year) { - return(ifelse( (year %% 4 == 0 & year %% 100 != 0) | - year %% 400 == 0, TRUE, FALSE)) -} - -getnc <- function(yml, m, lat = FALSE) { - id <- nc_open(yml[m][[1]]$filename, readunlim = FALSE) - if (lat){ - v <- ncvar_get(id, "lat") - }else{ - v <- ncvar_get(id, yml[m][[1]]$short_name) - if (yml[m][[1]]$short_name == "tas") v <- v - 273.15 - if (yml[m][[1]]$short_name == "pr"){ - time <- ncvar_get(id, "time") - tcal <- ncatt_get(id, "time", attname = "calendar") - tunits <- ncatt_get(id, "time", attname = "units") - tustr <- strsplit(tunits$value, " ") - stdate <- as.Date(time[1], origin = unlist(tustr)[3]) - nddate <- as.Date(time[length(time)], origin = unlist(tustr)[3]) - if (tcal$value == "365_day"){ - # Correct for missing leap years in nddate - diff <- as.numeric(nddate - stdate, units = "days") - dcorr <- floor( (diff / 365 - diff / 365.25) * 365.25) - nddate <- nddate + dcorr - } - if (tcal$value == "360_day"){ - v <- v * 30 * 24 * 3600. - }else{ - cnt <- 1 - monarr <- c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31) - date <- stdate - while (date <= nddate){ - year <- as.numeric(substr(date, 1, 4)) - lpyear <- leap_year(year) - month <- as.numeric(substr(date, 6, 7)) - mdays <- monarr[month] - pdays <- mdays - if (month == 2 & lpyear == TRUE){ - pdays <- 29 - if (tcal$value != "365_day"){ - mdays <- 29 - }else{ - mdays <- 28 - } - } - v[,,cnt] <- v[,,cnt] * mdays * 24 * 3600. - date <- date + pdays - cnt <- cnt + 1 - } - } - } - } - nc_close(id) - return(v) -} - -ncwritenew <- function(yml, m, hist, wdir, bins){ - fnam <- strsplit(yml[m][[1]]$filename, "/")[[1]] - pcs <- strsplit(fnam[length(fnam)], "_")[[1]] - pcs[which(pcs == yml[m][[1]]$short_name)] <- "spei" - onam <- paste(pcs, collapse = "_") - onam <- paste0(wdir, "/", strsplit(onam, ".nc"), "_hist.nc") - ncid_in <- nc_open(yml[m][[1]]$filename) - var <- ncid_in$var[[yml[m][[1]]$short_name]] - xdim <- ncid_in$dim[["lon"]] - ydim <- ncid_in$dim[["lat"]] - hdim <- ncdim_def("bins", "level", bins[1:(length(bins) - 1)]) - hdim2 <- ncdim_def("binsup", "level", bins[2:length(bins)]) - var_hist <- ncvar_def("hist", "counts", list(xdim, ydim, hdim), NA) - idw <- nc_create(onam, var_hist) - ncvar_put(idw, "hist", hist) - nc_close(idw) - return(onam) -} - -whfcn <- function(x, ilow, ihigh){ - return(length(which(x >= ilow & x < ihigh))) -} - -dothornthwaite <- function(v, lat){ - print("Estimating PET with Thornthwaite method.") - dpet <- v * NA - d <- dim(dpet) - for (i in 1:d[2]){ - tmp <- v[,i,] - tmp2 <- thornthwaite(t(tmp), rep(lat[i], d[1]), na.rm = TRUE) - d2 <- dim(tmp2) - tmp2 <- as.numeric(tmp2) - dim(tmp2) <- d2 - dpet[,i,] <- t(tmp2) - } - return(dpet) -} - -args <- commandArgs(trailingOnly = TRUE) -params <- read_yaml(args[1]) -metadata1 <- read_yaml(params$input_files[1]) -metadata2 <- read_yaml(params$input_files[2]) -modfile1 <- names(metadata1) -modfile2 <- names(metadata2) -wdir <- params$work_dir -rundir <- params$run_dir -dir.create(wdir, recursive = TRUE) -pdir <- params$plot_dir -dir.create(pdir, recursive = TRUE) -var1_input <- read_yaml(params$input_files[1]) -var2_input <- read_yaml(params$input_files[2]) -nmods <- length(names(var1_input)) - -# setup provenance file and list -provenance_file <- paste0(rundir, "/", "diagnostic_provenance.yml") -provenance <- list() - -histbrks <- c(-99999, -2, -1.5, -1, 1, 1.5, 2, 99999) -histnams <- c("Extremely dry", "Moderately dry", "Dry", - "Neutral", - "Wet", "Moderately wet", "Extremely wet") -refnam <- var1_input[1][[1]]$reference_dataset -n <- 1 -while (n <= nmods){ - if (var1_input[n][[1]]$dataset == refnam) break - n <- n + 1 -} -nref <- n -lat <- getnc(var1_input, nref, lat = TRUE) -if (max(lat) > 90){ - print(paste0("Latitude must be [-90,90]: min=", - min(lat), " max=", max(lat))) - stop("Aborting!") -} -ref <- getnc(var1_input, nref, lat = FALSE) -refmsk <- apply(ref, c(1, 2), FUN = mean, na.rm = TRUE) -refmsk[refmsk > 10000] <- NA -refmsk[!is.na(refmsk)] <- 1 - -xprov <- list(ancestors = list(""), - authors = list("berg_pe"), - references = list("vicente10jclim"), - projects = list("c3s-magic"), - caption = "", - statistics = list("other"), - realms = list("atmos"), - themes = list("phys"), - domains = list("global")) - -histarr <- array(NA, c(nmods, length(histnams))) -for (mod in 1:nmods){ - lat <- getnc(var1_input, mod, TRUE) - v1 <- getnc(var1_input, mod, FALSE) - v2 <- getnc(var2_input, mod, FALSE) - if (var1_input[1][[1]]$short_name == "pr") prtas <- TRUE else prtas <- FALSE - if (prtas){ - pet <- dothornthwaite(v2, lat) - pme <- v1 - pet - }else{ - pet <- dothornthwaite(v1, lat) - pme <- v2 - pet - } - print(var1_input[mod][[1]]$cmor_table) - d <- dim(pme) - pme_spei <- pme * NA - for (i in 1:d[1]){ - wh <- which(!is.na(refmsk[i,])) - if (length(wh) > 0){ - tmp <- pme[i,wh,] - pme_spei[i,wh,] <- t(spei(t(tmp), 1, na.rm = TRUE)$fitted) - } - } - pme_spei[is.infinite(pme_spei)] <- NA - pme_spei[pme_spei > 10000] <- NA - hist_spei <- array(NA, c(d[1], d[2], length(histbrks) - 1)) - for (nnh in 1:(length(histbrks) - 1)){ - hist_spei[,,nnh] <- apply(pme_spei, c(1, 2), FUN = whfcn, - ilow = histbrks[nnh], - ihigh = histbrks[nnh + 1]) - } - filename <- ncwritenew(var1_input, mod, hist_spei, wdir, histbrks) - # Set provenance for output files - xprov$caption <- "Histogram of SPEI index per grid point." - xprov$ancestors <- list(modfile1[mod], modfile2[mod]) - provenance[[filename]] <- xprov - for (t in 1:d[3]){ - tmp <- pme_spei[,,t] - tmp[is.na(refmsk)] <- NA - pme_spei[,,t] <- tmp - } - pme_spei[is.infinite(pme_spei)] <- NA - pme_spei[pme_spei > 10000] <- NA - # Weight against latitude - h <- c(1:length(histnams)) * 0 - for (j in 1:d[2]){ - h <- h + hist(pme_spei[j,,], breaks = histbrks, - plot = FALSE)$counts * cos(lat[j] * pi / 180.) - } - histarr[mod,] <- h / sum(h, na.rm = TRUE) -} -filehist <- paste0(params$work_dir, "/", "histarr.rsav") -save(histarr, file = filehist) -plot_file <- paste0(params$plot_dir, "/", "histplot.png") -xprov$caption <- "Global latitude-weighted histogram of SPEI index." -xprov$ancestors <- list(modfile1, modfile2) -xprov[["plot_file"]] <- plot_file -provenance[[filehist]] <- xprov -write_yaml(provenance, provenance_file) - -bhistarr <- array(NA, c(nmods - 1, 7)) -marr <- c(1:nmods)[c(1:nmods) != nref] -cnt <- 1 -for (m in marr){ - bhistarr[cnt,] <- histarr[m,] - histarr[nref,] - cnt <- cnt + 1 -} -parr <- c(nref, marr) - -mnam <- c(1:nmods) * NA -for (m in 1:nmods) mnam[m] <- var1_input[m][[1]]$dataset - -qual_col_pals <- brewer.pal.info[brewer.pal.info$category == "qual",] # nolint -col_vector <- unlist(mapply(brewer.pal, qual_col_pals$maxcolors, # nolint - rownames(qual_col_pals))) -cols <- c("black", sample(col_vector, nmods - 1)) - -png(plot_file, width = 1000, height = 500) - par(mfrow = c(2, 1), oma = c(3, 3, 3, 13), mar = c(2, 1, 1, 1)) - barplot(histarr[parr,], beside = 1, names.arg = histnams, - col = cols, xaxs = "i") - box() - mtext("Probability", side = 2, line = 2.1) - barplot(bhistarr, beside = 1, names.arg = histnams, - col = cols[2:nmods], xaxs = "i") - box() - mtext("Absolute difference", side = 2, line = 2.1) - mtext("Standardized precipitation-evapotranspiration index", - outer = TRUE, cex = 2, font = 2) - par(fig = c(0.8, .95, 0.1, 0.9), new = T, oma = c(1, 1, 1, 1) * 0, - mar = c(0, 0, 0, 0)) - legend("topright", mnam[parr], fill = cols) -dev.off() diff --git a/esmvaltool/diag_scripts/droughtindex/diag_spi.R b/esmvaltool/diag_scripts/droughtindex/diag_spi.R new file mode 100644 index 0000000000..b35c2b21e9 --- /dev/null +++ b/esmvaltool/diag_scripts/droughtindex/diag_spi.R @@ -0,0 +1,212 @@ +library(yaml) +library(ncdf4) +library(SPEI) +library(RColorBrewer) # nolint + +getnc <- function(yml, m, lat = FALSE) { + id <- nc_open(yml[m][[1]]$filename, readunlim = FALSE) + if (lat) { + v <- ncvar_get(id, "lat") + } else { + v <- ncvar_get(id, yml[m][[1]]$short_name) + } + nc_close(id) + return(v) +} + +ncwritenew <- function(yml, m, hist, wdir, bins) { + fnam <- strsplit(yml[m][[1]]$filename, "/")[[1]] + pcs <- strsplit(fnam[length(fnam)], "_")[[1]] + pcs[which(pcs == yml[m][[1]]$short_name)] <- "spi" + onam <- paste(pcs, collapse = "_") + onam <- paste0(wdir, "/", strsplit(onam, ".nc"), "_hist.nc") + ncid_in <- nc_open(yml[m][[1]]$filename) + var <- ncid_in$var[[yml[m][[1]]$short_name]] + xdim <- ncid_in$dim[["lon"]] + ydim <- ncid_in$dim[["lat"]] + hdim <- ncdim_def("bins", "level", bins[1:(length(bins) - 1)]) + hdim2 <- ncdim_def("binsup", "level", bins[2:length(bins)]) + var_hist <- + ncvar_def("hist", "counts", list(xdim, ydim, hdim), NA) + idw <- nc_create(onam, var_hist) + ncvar_put(idw, "hist", hist) + nc_close(idw) + return(onam) +} + +whfcn <- function(x, ilow, ihigh) { + return(length(which(x >= ilow & x < ihigh))) +} + +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +metadata <- read_yaml(params$input_files) +modfile <- names(metadata) +wdir <- params$work_dir +rundir <- params$run_dir +dir.create(wdir, recursive = TRUE) +pdir <- params$plot_dir +dir.create(pdir, recursive = TRUE) +var1_input <- read_yaml(params$input_files[1]) +nmods <- length(names(var1_input)) + +# setup provenance file and list +provenance_file <- paste0(rundir, "/", "diagnostic_provenance.yml") +provenance <- list() + +histbrks <- c(-99999, -2, -1.5, -1, 1, 1.5, 2, 99999) +histnams <- c( + "Extremely dry", + "Moderately dry", + "Dry", + "Neutral", + "Wet", + "Moderately wet", + "Extremely wet" +) +refnam <- var1_input[1][[1]]$reference_dataset +n <- 1 +while (n <= nmods) { + if (var1_input[n][[1]]$dataset == refnam) { + break + } + n <- n + 1 +} +nref <- n +lat <- getnc(var1_input, nref, lat = TRUE) +if (max(lat) > 90) { + print(paste0( + "Latitude must be [-90,90]: min=", + min(lat), " max=", max(lat) + )) + stop("Aborting!") +} +ref <- getnc(var1_input, nref, lat = FALSE) +refmsk <- apply(ref, c(1, 2), FUN = mean, na.rm = TRUE) +refmsk[refmsk > 10000] <- NA +refmsk[!is.na(refmsk)] <- 1 + +xprov <- list( + ancestors = list(""), + authors = list("berg_peter"), + references = list("mckee93proc"), + projects = list("c3s-magic"), + caption = "", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("global") +) + +histarr <- array(NA, c(nmods, length(histnams))) +for (mod in 1:nmods) { + v1 <- getnc(var1_input, mod) + print(var1_input[mod][[1]]$cmor_table) + d <- dim(v1) + v1_spi <- v1 * NA + for (i in 1:d[1]) { + wh <- which(!is.na(refmsk[i, ])) + if (length(wh) > 0) { + tmp <- v1[i, wh, ] + v1_spi[i, wh, ] <- t(spi(t(tmp), 1, + na.rm = TRUE, + distribution = "PearsonIII" + )$fitted) + } + } + v1_spi[is.infinite(v1_spi)] <- NA + v1_spi[v1_spi > 10000] <- NA + hist_spi <- array(NA, c(d[1], d[2], length(histbrks) - 1)) + for (nnh in 1:(length(histbrks) - 1)) { + hist_spi[, , nnh] <- apply(v1_spi, + c(1, 2), + FUN = whfcn, + ilow = histbrks[nnh], + ihigh = histbrks[nnh + 1] + ) + } + filename <- ncwritenew(var1_input, mod, hist_spi, wdir, histbrks) + # Set provenance for output files + xprov$caption <- "Histogram of SPI index per grid point." + xprov$ancestors <- modfile[mod] + provenance[[filename]] <- xprov + # Weight against latitude + h <- seq_along(histnams) * 0 + for (j in 1:d[2]) { + h <- h + hist(v1_spi[j, , ], + breaks = histbrks, + plot = FALSE + )$counts * cos(lat[j] * pi / 180.) + } + histarr[mod, ] <- h / sum(h, na.rm = TRUE) +} +filehist <- paste0(params$work_dir, "/", "histarr.rsav") +save(histarr, file = filehist) +plot_file <- paste0(params$plot_dir, "/", "histplot.png") +xprov$caption <- "Global latitude-weighted histogram of SPI index." +xprov$ancestors <- modfile +provenance[[plot_file]] <- xprov +provenance[[filehist]] <- xprov +write_yaml(provenance, provenance_file) + +bhistarr <- array(NA, c(nmods - 1, 7)) +marr <- c(1:nmods)[c(1:nmods) != nref] +cnt <- 1 +for (m in marr) { + bhistarr[cnt, ] <- histarr[m, ] - histarr[nref, ] + cnt <- cnt + 1 +} +parr <- c(nref, marr) + +mnam <- c(1:nmods) * NA +for (m in 1:nmods) { + mnam[m] <- var1_input[m][[1]]$dataset +} + +qual_col_pals <- + brewer.pal.info[brewer.pal.info$category == "qual", ] # nolint +col_vector <- + unlist(mapply( + brewer.pal, qual_col_pals$maxcolors, # nolint + rownames(qual_col_pals) + )) +cols <- c("black", sample(col_vector, nmods - 1)) + +png(plot_file, width = 1000, height = 500) +par( + mfrow = c(2, 1), + oma = c(3, 3, 3, 13), + mar = c(2, 1, 1, 1) +) +barplot( + histarr[parr, ], + beside = 1, + names.arg = histnams, + col = cols, + xaxs = "i" +) +box() +mtext("Probability", side = 2, line = 2.1) +barplot( + bhistarr, + beside = 1, + names.arg = histnams, + col = cols[2:nmods], + xaxs = "i" +) +box() +mtext("Absolute difference", side = 2, line = 2.1) +mtext( + "Standardized precipitation index", + outer = TRUE, + cex = 2, + font = 2 +) +par( + fig = c(0.8, .95, 0.1, 0.9), + new = T, + oma = c(0, 0, 0, 0), + mar = c(0, 0, 0, 0) +) +legend("topright", mnam[parr], fill = cols) +dev.off() diff --git a/esmvaltool/diag_scripts/droughtindex/diag_spi.r b/esmvaltool/diag_scripts/droughtindex/diag_spi.r deleted file mode 100644 index 5319d541de..0000000000 --- a/esmvaltool/diag_scripts/droughtindex/diag_spi.r +++ /dev/null @@ -1,164 +0,0 @@ -library(yaml) -library(ncdf4) -library(SPEI) -library(RColorBrewer) # nolint - -getnc <- function(yml, m, lat = FALSE) { - id <- nc_open(yml[m][[1]]$filename, readunlim = FALSE) - if (lat){ - v <- ncvar_get(id, "lat") - }else{ - v <- ncvar_get(id, yml[m][[1]]$short_name) - } - nc_close(id) - return(v) -} - -ncwritenew <- function(yml, m, hist, wdir, bins){ - fnam <- strsplit(yml[m][[1]]$filename, "/")[[1]] - pcs <- strsplit(fnam[length(fnam)], "_")[[1]] - pcs[which(pcs == yml[m][[1]]$short_name)] <- "spi" - onam <- paste(pcs, collapse = "_") - onam <- paste0(wdir, "/", strsplit(onam, ".nc"), "_hist.nc") - ncid_in <- nc_open(yml[m][[1]]$filename) - var <- ncid_in$var[[yml[m][[1]]$short_name]] - xdim <- ncid_in$dim[["lon"]] - ydim <- ncid_in$dim[["lat"]] - hdim <- ncdim_def("bins", "level", bins[1:(length(bins) - 1)]) - hdim2 <- ncdim_def("binsup", "level", bins[2:length(bins)]) - var_hist <- ncvar_def("hist", "counts", list(xdim, ydim, hdim), NA) - idw <- nc_create(onam, var_hist) - ncvar_put(idw, "hist", hist) - nc_close(idw) - return(onam) -} - -whfcn <- function(x, ilow, ihigh){ - return(length(which(x >= ilow & x < ihigh))) -} - -args <- commandArgs(trailingOnly = TRUE) -params <- read_yaml(args[1]) -metadata <- read_yaml(params$input_files) -modfile <- names(metadata) -wdir <- params$work_dir -rundir <- params$run_dir -dir.create(wdir, recursive = TRUE) -pdir <- params$plot_dir -dir.create(pdir, recursive = TRUE) -var1_input <- read_yaml(params$input_files[1]) -nmods <- length(names(var1_input)) - -# setup provenance file and list -provenance_file <- paste0(rundir, "/", "diagnostic_provenance.yml") -provenance <- list() - -histbrks <- c(-99999, -2, -1.5, -1, 1, 1.5, 2, 99999) -histnams <- c("Extremely dry", "Moderately dry", "Dry", - "Neutral", - "Wet", "Moderately wet", "Extremely wet") -refnam <- var1_input[1][[1]]$reference_dataset -n <- 1 -while (n <= nmods){ - if (var1_input[n][[1]]$dataset == refnam) break - n <- n + 1 -} -nref <- n -lat <- getnc(var1_input, nref, lat = TRUE) -if (max(lat) > 90){ - print(paste0("Latitude must be [-90,90]: min=", - min(lat), " max=", max(lat))) - stop("Aborting!") -} -ref <- getnc(var1_input, nref, lat = FALSE) -refmsk <- apply(ref, c(1, 2), FUN = mean, na.rm = TRUE) -refmsk[refmsk > 10000] <- NA -refmsk[!is.na(refmsk)] <- 1 - -xprov <- list(ancestors = list(""), - authors = list("berg_pe"), - references = list("mckee93"), - projects = list("c3s-magic"), - caption = "", - statistics = list("other"), - realms = list("atmos"), - themes = list("phys"), - domains = list("global")) - -histarr <- array(NA, c(nmods, length(histnams))) -for (mod in 1:nmods){ - v1 <- getnc(var1_input, mod) - print(var1_input[mod][[1]]$cmor_table) - d <- dim(v1) - v1_spi <- v1 * NA - for (i in 1:d[1]){ - wh <- which(!is.na(refmsk[i,])) - if (length(wh) > 0){ - tmp <- v1[i,wh,] - v1_spi[i,wh,] <- t(spi(t(tmp), 1, na.rm = TRUE, - distribution = "PearsonIII")$fitted) - } - } - v1_spi[is.infinite(v1_spi)] <- NA - v1_spi[v1_spi > 10000] <- NA - hist_spi <- array(NA, c(d[1], d[2], length(histbrks) - 1)) - for (nnh in 1:(length(histbrks) - 1)){ - hist_spi[,,nnh] <- apply(v1_spi, c(1, 2), FUN = whfcn, - ilow = histbrks[nnh], - ihigh = histbrks[nnh + 1]) - } - filename <- ncwritenew(var1_input, mod, hist_spi, wdir, histbrks) - # Set provenance for output files - xprov$caption <- "Histogram of SPI index per grid point." - xprov$ancestors <- modfile[mod] - provenance[[filename]] <- xprov - # Weight against latitude - h <- c(1:length(histnams)) * 0 - for (j in 1:d[2]){ - h <- h + hist(v1_spi[j,,], breaks = histbrks, - plot = FALSE)$counts * cos(lat[j] * pi / 180.) - } - histarr[mod, ] <- h / sum(h, na.rm = TRUE) -} -filehist <- paste0(params$work_dir, "/", "histarr.rsav") -save(histarr, file = filehist) -plot_file <- paste0(params$plot_dir, "/", "histplot.png") -xprov$caption <- "Global latitude-weighted histogram of SPI index." -xprov$ancestors <- list(modfile) -xprov[["plot_file"]] <- plot_file -provenance[[filehist]] <- xprov -write_yaml(provenance, provenance_file) - -bhistarr <- array(NA, c(nmods - 1, 7)) -marr <- c(1:nmods)[c(1:nmods) != nref] -cnt <- 1 -for (m in marr){ - bhistarr[cnt, ] <- histarr[m, ] - histarr[nref, ] - cnt <- cnt + 1 -} -parr <- c(nref, marr) - -mnam <- c(1:nmods) * NA -for (m in 1:nmods) mnam[m] <- var1_input[m][[1]]$dataset - -qual_col_pals <- brewer.pal.info[brewer.pal.info$category == "qual", ] # nolint -col_vector <- unlist(mapply(brewer.pal, qual_col_pals$maxcolors, # nolint - rownames(qual_col_pals))) -cols <- c("black", sample(col_vector, nmods - 1)) - -png(plot_file, width = 1000, height = 500) - par(mfrow = c(2, 1), oma = c(3, 3, 3, 13), mar = c(2, 1, 1, 1)) - barplot(histarr[parr, ], beside = 1, names.arg = histnams, - col = cols, xaxs = "i") - box() - mtext("Probability", side = 2, line = 2.1) - barplot(bhistarr, beside = 1, names.arg = histnams, - col = cols[2:nmods], xaxs = "i") - box() - mtext("Absolute difference", side = 2, line = 2.1) - mtext("Standardized precipitation index", outer = TRUE, - cex = 2, font = 2) - par(fig = c(0.8, .95, 0.1, 0.9), new = T, oma = c(0, 0, 0, 0), - mar = c(0, 0, 0, 0)) - legend("topright", mnam[parr], fill = cols) -dev.off() diff --git a/esmvaltool/diag_scripts/emergent_constraints/__init__.py b/esmvaltool/diag_scripts/emergent_constraints/__init__.py index d9015ad44b..5a03750bbe 100644 --- a/esmvaltool/diag_scripts/emergent_constraints/__init__.py +++ b/esmvaltool/diag_scripts/emergent_constraints/__init__.py @@ -1,47 +1,1578 @@ """Convenience functions for emergent constraints diagnostics.""" import logging +import os +from copy import deepcopy +from pprint import pformat +import iris +import iris.pandas +import matplotlib.pyplot as plt import numpy as np -from scipy import integrate, stats +import pandas as pd +import scipy +import seaborn as sns +import yaml +from packaging.version import Version +from scipy import integrate +from scipy.stats import linregress +if Version(scipy.version.version) < Version('1.14.0'): + from scipy.integrate import simps as simpson +else: + from scipy.integrate import simpson + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + io, +) logger = logging.getLogger(__name__) +ALLOWED_VAR_TYPES = [ + 'feature', + 'label', + 'prediction_input', + 'prediction_input_error', +] +COLOR_COMBINED_GROUPS = 'gray' +LEGEND_KWARGS = { + 'loc': 'center left', + 'bbox_to_anchor': [1.05, 0.5], + 'borderaxespad': 0.0, +} +PANDAS_PRINT_OPTIONS = [ + 'display.max_rows', None, + 'display.max_colwidth', None, +] + + +def _check_x_y_arrays(x_array, y_array): + """Ensure that the X and Y arrays have correct shapes.""" + x_array = np.ma.array(x_array) + y_array = np.ma.array(y_array) + + # Check shapes + if x_array.ndim != 1: + raise ValueError( + f"Expected 1D array for X training data, got {x_array.ndim:d}D " + f"array") + if y_array.ndim != 1: + raise ValueError( + f"Expected 1D array for Y training data, got {y_array.ndim:d}D " + f"array") + if x_array.shape != y_array.shape: + raise ValueError( + f"Expected identical shapes for X and Y training data, got " + f"{x_array.shape} and {y_array.shape}, respectively") + + # Remove masked values + mask = np.ma.getmaskarray(x_array) + mask |= np.ma.getmaskarray(y_array) + x_array = np.array(x_array[~mask]) + y_array = np.array(y_array[~mask]) + + return (x_array, y_array) + + +def _add_column(data_frame, series, column_name): + """Add column to :class:`pandas.DataFrame` (expands index if necessary).""" + rows_to_add = [ + pd.Series(name=row, dtype=np.float64).to_frame().T for row in + series.index.difference(data_frame.index) + ] + data_frame = pd.concat([data_frame] + rows_to_add) + if column_name in data_frame.columns: + for row in series.index: + if np.isnan(data_frame.loc[row, column_name]): + data_frame.loc[row, column_name] = series.loc[row] + else: + if not np.isclose(data_frame.loc[row, column_name], + series.loc[row]): + raise ValueError( + f"Got duplicate data for tag '{column_name}' of " + f"'{row}': {series.loc[row]:e} and " + f"{data_frame.loc[row, column_name]:e}") + else: + data_frame[column_name] = series + return data_frame + + +def _crop_data_frame(data_frame, ref_data_frame, data_name, ref_data_name): + """Crop columns of a data_frame so that it matches a given reference.""" + diff_not_in_data_frame = list( + ref_data_frame.columns.difference(data_frame.columns)) + if diff_not_in_data_frame: + raise ValueError( + f"No '{data_name}' given for tags {diff_not_in_data_frame}") + diff_not_in_ref = list( + data_frame.columns.difference(ref_data_frame.columns)) + if diff_not_in_ref: + logger.warning( + "Ignoring '%s' of tags %s: no corresponding '%s' data available", + data_name, diff_not_in_ref, ref_data_name) + data_frame = data_frame[ref_data_frame.columns] + return data_frame + + +def _check_data_frames(features, label, pred_input, pred_input_err): + """Check indexes and columns of the input data.""" + if not list(features.columns): + raise ValueError("Expected at least one feature") + if len(label.columns) != 1: + raise ValueError( + f"Expected exactly 1 'label' variable, got {len(label.columns):d}") + + # Compare features and label + if list(features.index) != list(label.index): + raise ValueError( + f"Expected identical datasets (climate models; independent " + f"observations) for 'feature' and 'label', got " + f"{features.index.values} and {label.index.values}") + if len(features.index) < 2: + raise ValueError("Expected at least two training points for features") + + # Compare features and prediction input data + pred_input = _crop_data_frame(pred_input, features, 'prediction_input', + 'feature') + pred_input_err = _crop_data_frame(pred_input_err, features, + 'prediction_input_error', 'feature') + + # Compare prediction_input and prediction_input_error + if not list(pred_input.index): + raise ValueError("Expected at least one prediction input point") + if list(pred_input.index) != list(pred_input_err.index): + raise ValueError( + f"Expected identical training points for 'prediction_input' and " + f"'prediction_input_error', got {pred_input.index.values} " + f"and {pred_input_err.index.values}") + + return (features, label, pred_input, pred_input_err) + + +def _combine_dicts(old_dict, new_dict): + """Combine two :obj:`dict` by adding values for identical keys to lists.""" + old_dict = deepcopy(old_dict) + new_dict = deepcopy(new_dict) + for (key, val) in new_dict.items(): + if key not in old_dict: + old_dict[key] = val + continue + if isinstance(old_dict[key], list): + if not isinstance(val, list): + val = [val] + old_dict[key] = list(set([*old_dict[key], *val])) + else: + if not isinstance(val, list): + if not np.array_equal(val, old_dict[key]): + old_dict[key] = [old_dict[key], val] + else: + old_dict[key] = list(set([old_dict[key], *val])) + return old_dict + + +def _get_additional_data(additional_data, recipe): + """Get :class:`iris.cube.CubeList` from additional data.""" + if additional_data is None: + return iris.cube.CubeList() + cubes = _metadata_list_to_cube_list(additional_data, 'additional_data') + for cube in cubes: + cube.attributes['filename'] = recipe + return cubes + + +def _get_attributes(cubes): + """Extract attributes for features and labels.""" + attributes = {} + + # Extract attributes + for cube in cubes: + cube_attrs = cube.attributes + tag = cube_attrs['tag'] + attributes.setdefault(tag, {}) + if cube_attrs['var_type'] in ('feature', 'label'): + attributes[tag] = _combine_dicts(attributes[tag], + _metadata_to_dict(cube.metadata)) + elif cube_attrs['var_type'] in ('prediction_input', + 'prediction_input_error'): + attributes[tag] = _combine_dicts( + attributes[tag], {'filename': cube_attrs['filename']}) + else: + raise ValueError( + f"File '{cube_attrs['filename']}' has invalid var_type " + f"'{cube_attrs['var_type']}'") + + # Set default attributes and remove lengthy 'provenance' entry + for tag in attributes: + attributes[tag].pop('provenance', None) + attributes[tag].setdefault('plot_title', f"Emergent constraint {tag}") + if 'units' in attributes[tag]: + axis_label = f"{tag} [{attributes[tag]['units']}]" + attributes[tag].setdefault('plot_xlabel', axis_label) + attributes[tag].setdefault('plot_ylabel', axis_label) + else: + attributes[tag].setdefault('plot_xlabel', tag) + attributes[tag].setdefault('plot_ylabel', tag) + attributes[tag].setdefault('plot_xlim', None) + attributes[tag].setdefault('plot_ylim', None) + + return attributes + + +def _get_cube_list(input_files, + recipe, + additional_data=None, + external_file=None, + merge_identical_pred_input=True): + """Get :class:`iris.cube.CubeList` of input files.""" + cubes = iris.cube.CubeList() + + # Input files + for filename in input_files: + logger.info("Loading '%s'", filename) + cube = _load_cube_with_dataset_coord(filename) + cube.attributes['filename'] = filename + (feature_cube, + prediction_cube) = _split_cube(cube, merge_identical_pred_input) + if feature_cube is not None: + cubes.append(feature_cube) + if prediction_cube is not None: + cubes.append(prediction_cube) + + # Additional data + cubes.extend(_get_additional_data(additional_data, recipe)) + + # External file + cubes.extend(_get_external_cube_list(external_file)) + + # Check metadata of cubes + for cube in cubes: + check_metadata(cube.attributes) + + return cubes + + +def _get_external_cube_list(external_file): + """Get external :class:`iris.cube.CubeList`.""" + if external_file is None: + return iris.cube.CubeList() + with open(external_file, 'r') as infile: + metadata_list = yaml.safe_load(infile) + cubes = _metadata_list_to_cube_list(metadata_list, external_file) + for cube in cubes: + cube.attributes['filename'] = external_file + return cubes + + +def _get_external_file(filepath, auxiliary_data_dir): + """Get full path to external file (if available).""" + if not filepath: + return None + filepath = os.path.expanduser(os.path.expandvars(filepath)) + if not os.path.isabs(filepath): + filepath = os.path.join(auxiliary_data_dir, filepath) + if not os.path.isfile(filepath): + raise FileNotFoundError( + f"Desired external file '{filepath}' does not exist") + logger.info("Found external file '%s'", filepath) + return filepath + + +def _get_data_frame(var_type, cubes, label_all_data, group_by=None): + """Extract :class:`pandas.DataFrame` for a given ``var_type``.""" + data_frame = pd.DataFrame() + for cube in cubes: + cube_attrs = cube.attributes + if var_type != cube_attrs['var_type']: + continue + if var_type in ('feature', 'label'): + if group_by is not None and group_by not in cube_attrs: + raise AttributeError( + f"Group attribute '{group_by}' not available in input " + f"file '{cube_attrs['filename']}'") + group = cube_attrs.get(group_by, label_all_data) + index = pd.MultiIndex.from_product( + [[group], cube.coord('dataset').points], + names=[group_by, 'dataset']) + else: + index = cube.coord('dataset').points + series = pd.Series(data=cube.data, index=index) + data_frame = _add_column(data_frame, series, cube_attrs['tag']) + return data_frame + + +def _metadata_to_dict(metadata): + """Convert :class:`iris.cube.CubeMetadata` to :obj:`dict`.""" + new_dict = dict(metadata.attributes) + other_keys = [ + 'standard_name', + 'long_name', + 'var_name', + 'units', + 'cell_methods', + ] + for key in other_keys: + new_dict[key] = getattr(metadata, key) + return new_dict + + +def _split_cube(cube, merge_identical_pred_input=True): + """Split cube in features and prediction_input.""" + if not cube.attributes.get('reference_dataset'): + return (cube, None) + + # Get feature and prediction_input datasets + features_datasets = [] + predicton_datasets = [] + references = cube.attributes['reference_dataset'] + for dataset in cube.coord('dataset').points: + if dataset in references: + predicton_datasets.append(dataset) + logger.info( + "Using dataset '%s' as prediction_input for variable '%s' " + "with index %d", dataset, cube.var_name, + len(predicton_datasets) - 1) + else: + features_datasets.append(dataset) + + # Extract cubes + feature_cube = cube.extract(iris.Constraint(dataset=features_datasets)) + prediction_cube = cube.extract(iris.Constraint(dataset=predicton_datasets)) + feature_cube.attributes['var_type'] = 'feature' + prediction_cube.attributes['var_type'] = 'prediction_input' + + # Merge identical prediction_input if desired + if merge_identical_pred_input: + (_, unique_idx) = np.unique(prediction_cube.data, return_index=True) + diff = len(prediction_cube.coord('dataset').points) - len(unique_idx) + if diff > 0: + prediction_cube = prediction_cube[unique_idx] + logger.info( + "Removed %d identical prediction_input points for variable " + "'%s'", diff, prediction_cube.var_name) + + # Set new index for prediction input + prediction_cube.coord('dataset').points = np.arange( + len(prediction_cube.coord('dataset').points)) + return (feature_cube, prediction_cube) + + +def _cube_to_dataset_coord(cube): + """Convert :class:`iris.cube.Cube` to :class:`iris.coords.AuxCoord`.""" + if cube.ndim == 1: + datasets = cube.data + elif cube.ndim == 2: + cube.data = cube.data.astype(str, casting='same_kind') + datasets = [''.join(d.compressed()) for d in cube.data] + else: + raise ValueError( + f"Only 1D and 2D cubes supported, got {cube.ndim:d}D cube") + return iris.coords.AuxCoord(datasets, + var_name='dataset', + long_name='dataset') + + +def _get_first_cube_with_coord(cubes, accepted_coord_names): + """Load single cube of :class:`iris.cube.CubeList` with specific coords.""" + returned_cube = None + returned_coord = None + for cube in cubes: + for coord_name in accepted_coord_names: + try: + coord = cube.coord(coord_name) + returned_cube = cube + returned_coord = coord + break + except iris.exceptions.CoordinateNotFoundError: + pass + if returned_cube is not None: + break + else: + raise ValueError( + f"No cube of {cubes} contains 'dataset' coordinate (i.e. one of " + f"{accepted_coord_names})") + return (returned_cube, returned_coord) + + +def _load_cube_with_dataset_coord(filename): + """Load cube with single ``dataset``-like coordinate. + + Files created by NCL cannot be read using a simple + :func:`iris.load_cube`. + + """ + cubes = iris.load(filename) + accepted_coord_names = ('dataset', 'model') + + # Handle single cube + if len(cubes) == 1: + (cube, coord) = _get_first_cube_with_coord(cubes, accepted_coord_names) + if cube.ndim != 1: + raise ValueError( + f"Only 1D cubes supported, got {cube.ndim:d}D cube in file " + f"'{filename}'") + coord.var_name = 'dataset' + coord.standard_name = None + coord.long_name = 'dataset' + return cube + + # At most two cubes are supported + if len(cubes) > 2: + raise ValueError( + f"Loading NCL file '{filename}' failed, at most 2 cubes are " + f"supported, got {len(cubes):d}") + + # Get 'model' or 'dataset' cube + dataset_cube = None + for cube in cubes: + if cube.var_name in accepted_coord_names: + logger.debug("Found coordinate cube '%s'", cube.var_name) + dataset_cube = cube + else: + data_cube = cube + if dataset_cube is None: + raise ValueError( + f"No 'dataset' coordinate (one of {accepted_coord_names}) in " + f"file '{filename}' available") + + # Create new coordinate + if data_cube.ndim != 1: + raise ValueError( + f"Only 1D cubes supported, got {data_cube.ndim:d}D cube in file " + f"'{filename}'") + if data_cube.shape[0] != dataset_cube.shape[0]: + raise ValueError( + f"Got differing sizes for first dimension of data cube " + f"({data_cube.shape[0]:d}) and dataset cube " + f"({dataset_cube.shape[0]:d}) in file '{filename}'") + aux_coord = _cube_to_dataset_coord(dataset_cube) + data_cube.add_aux_coord(aux_coord, 0) + return data_cube + + +def _create_scatterplot(x_data, + y_data, + numbers_as_markers=True, + plot_regression_line_mean=False, + axes=None, + **kwargs): + """Create single scatterplot including regression line.""" + if axes is None: + (_, axes) = plt.subplots() + + # Scatterplots + scatter_kwargs = dict(kwargs) + scatter_kwargs.pop('label', None) + for (idx, _) in enumerate(x_data): + if numbers_as_markers: + axes.text(x_data[idx], + y_data[idx], + x_data.index.get_level_values(-1)[idx], + size=7, + **scatter_kwargs) + else: + axes.scatter(x_data[idx], y_data[idx], **scatter_kwargs) + + # Regression line + line_kwargs = {**kwargs, 'linestyle': '-'} + fill_between_kwargs = {**kwargs, 'alpha': 0.3} + fill_between_kwargs.pop('label', None) + if plot_regression_line_mean: + mean_kwargs = {**kwargs, 'marker': 'o'} + mean_kwargs.pop('label', None) + mean_kwargs.pop('linestyle', None) + else: + mean_kwargs = None + axes = _create_regplot(x_data, + y_data, + axes=axes, + line_kwargs=line_kwargs, + fill_between_kwargs=fill_between_kwargs, + mean_kwargs=mean_kwargs) + return axes + + +def _create_pred_input_plot(x_pred, + x_pred_error, + axes, + vline_kwargs=None, + vspan_kwargs=None): + """Create plot for prediction input data (vertical lines).""" + if vline_kwargs is None: + vline_kwargs = {'color': 'k', 'linestyle': ':', 'label': 'Observation'} + if vspan_kwargs is None: + vspan_kwargs = {'color': 'k', 'alpha': 0.1} + x_pred = x_pred[0] + x_pred_error = x_pred_error[0] + axes.axvline(x_pred, **vline_kwargs) + axes.axvspan(x_pred - x_pred_error, x_pred + x_pred_error, **vspan_kwargs) + return axes + + +def _create_pred_output_plot(x_data, + y_data, + x_pred, + x_pred_error, + axes, + hline_kwargs=None): + """Create plot for prediction input data (vertical lines).""" + if hline_kwargs is None: + hline_kwargs = {'color': 'k', 'linestyle': ':'} + (_, y_mean, _) = get_constraint(x_data, y_data, x_pred, x_pred_error) + axes.axhline(y_mean, **hline_kwargs) + return axes + + +def _create_regplot(x_data, + y_data, + axes=None, + line_kwargs=None, + fill_between_kwargs=None, + mean_kwargs=None): + """Create single regression line plot.""" + if axes is None: + (_, axes) = plt.subplots() + if line_kwargs is None: + line_kwargs = {'linestyle': '-', 'label': 'Linear regression'} + if fill_between_kwargs is None: + fill_between_kwargs = {'alpha': 0.3} + + # Create regression line + reg = regression_line(x_data, y_data) + + # Add R2 and p-value to label if possible + text = rf"$R^2={reg['rvalue']**2:.2f}, p={reg['pvalue']:.4f}$" + if 'label' in line_kwargs: + line_kwargs['label'] += rf' ({text})' + else: + if reg['rvalue'] < 0.0: + axes.text(0.62, 0.93, text, transform=axes.transAxes) + else: + axes.text(0.02, 0.93, text, transform=axes.transAxes) + + # Plots regression + axes.plot(reg['x'], reg['y'], **line_kwargs) + axes.fill_between(reg['x'], reg['y_minus_err'], reg['y_plus_err'], + **fill_between_kwargs) + + # Plot means if desired + if mean_kwargs is not None: + x_mean = np.mean(reg['x']) + y_mean = np.mean(reg['y']) + axes.scatter(x_mean, y_mean, **mean_kwargs) + return axes + + +def _get_pandas_cube(pandas_object): + """Convert :mod:`pandas` object to cube and fix coordinates.""" + cube = iris.pandas.as_cube(pandas_object) + for coord_name in ('index', 'columns'): + try: + names = getattr(pandas_object, coord_name).names + except AttributeError: + continue + coord = cube.coord(coord_name) + if not np.issubdtype(coord.dtype, np.number): + coord.points = coord.points.astype(str) + if coord.bounds is not None: + coord.bounds = coord.bounds.astype(str) + names = [n for n in names if n is not None] + if not names: + continue + new_coord_name = '-'.join(names) + coord.var_name = new_coord_name + coord.long_name = new_coord_name + return cube + + +def _metadata_list_to_cube_list(metadata_list, source): + """Convert :obj:`list` of :obj:`dict` to :class:`iris.cube.CubeList`.""" + cubes = iris.cube.CubeList() + for metadata in metadata_list: + for attr in ('data', 'dataset'): + if attr not in metadata: + raise AttributeError( + f"Entry {metadata} from source '{source}' does not " + f"contain necessary attribute '{attr}'") + aux_coord = iris.coords.AuxCoord(metadata.pop('dataset'), + var_name='dataset', + long_name='dataset') + data_of_cube = metadata.pop('data') + if data_of_cube is None: + data_of_cube = np.nan + cube = iris.cube.Cube(data_of_cube, + aux_coords_and_dims=[(aux_coord, ())]) + for key in ('var_name', 'standard_name', 'long_name', 'units'): + if key in metadata: + setattr(cube, key, metadata.pop(key)) + cube.attributes = metadata + cubes.append(cube) + return cubes + + +def _gaussian_pdf(x_val, x_mean, x_std): + """Return Gaussian probability density.""" + norm = np.sqrt(2.0 * np.pi * x_std**2) + return np.exp(-(x_val - x_mean)**2 / 2.0 / x_std**2) / norm + + +def _get_target_pdf(x_data, + y_data, + obs_mean, + obs_std, + n_points=1000, + necessary_p_value=None): + """Get PDF of target variable including linear regression information.""" + (x_data, y_data) = _check_x_y_arrays(x_data, y_data) + spe = standard_prediction_error(x_data, y_data) + reg = linregress(x_data, y_data) + + # Get evenly spaced range of y + y_range = 1.5 * (np.max(y_data) - np.min(y_data)) + y_lin = np.linspace( + np.min(y_data) - y_range, + np.max(y_data) + y_range, n_points) + + # Use unconstrained value of desired and necessary + if necessary_p_value is not None: + if reg.pvalue > necessary_p_value: + y_pdf = _gaussian_pdf(y_lin, np.mean(y_data), np.std(y_data)) + return (y_lin, y_pdf, reg) + + # Helper functions for calculation of constrained target variable + def obs_pdf(x_new): + """Return PDF of observations P(x).""" + return _gaussian_pdf(x_new, obs_mean, obs_std) + + def cond_pdf(x_new, y_new): + """Return conditional PDF P(y|x).""" + y_pred = reg.slope * x_new + reg.intercept + y_std = spe(x_new) + return _gaussian_pdf(y_new, y_pred, y_std) + + def comb_pdf(x_new, y_new): + """Return combined PDF P(y,x).""" + return obs_pdf(x_new) * cond_pdf(x_new, y_new) + + # PDF of target variable P(y) + x_range = 3 * obs_std + y_pdf = [ + integrate.quad(comb_pdf, + obs_mean - x_range, + obs_mean + x_range, + args=(y, ))[0] for y in y_lin + ] + return (y_lin, np.array(y_pdf), reg) + + +def check_metadata(metadata, allowed_var_types=None): + """Check metadata. + + Parameters + ---------- + metadata : dict + Metadata to check. + allowed_var_types : list of str, optional + Allowed var_types, defaults to ``ALLOWED_VAR_TYPES``. + + Raises + ------ + KeyError + Metadata does not contain necessary keys ``'var_type'`` and ``'tag'``. + ValueError + Got invalid value for key ``'var_type'``. + + """ + if allowed_var_types is None: + allowed_var_types = ALLOWED_VAR_TYPES + filename = metadata.get('filename', metadata) + for key in ('var_type', 'tag'): + if key not in metadata: + raise KeyError( + f"Necessary key '{key}' not given in metadata of file " + f"'{filename}'") + if metadata['var_type'] not in allowed_var_types: + raise ValueError( + f"Expected one of {allowed_var_types} for 'var_type' of file " + f"'{filename}', got '{metadata['var_type']}'") + + +def get_input_files(cfg, patterns=None, ignore_patterns=None): + """Get input files. + + Parameters + ---------- + cfg : dict + Recipe configuration. + patterns : list of str, optional + Use only ancestor files that match these patterns as input files. + ignore_patterns : list of str, optional + Ignore input files that match these patterns. + + Returns + ------- + list of str + Input files. + + """ + input_files = [] + + # Include only files that match patterns + if patterns is None: + patterns = [] + if not patterns: + patterns.append('*.nc') + for pattern in patterns: + logger.debug("Looking for files matching the pattern '%s'", pattern) + input_files.extend(io.get_all_ancestor_files(cfg, pattern=pattern)) + + # Ignore files + if not ignore_patterns: + return input_files + ignore_files = [] + for pattern in ignore_patterns: + logger.debug("Ignoring for files matching the pattern '%s'", pattern) + ignore_files.extend(io.get_all_ancestor_files(cfg, pattern=pattern)) + valid_files = [] + for filename in input_files: + if filename not in ignore_files: + valid_files.append(filename) + return valid_files + + +def get_xy_data_without_nans(data_frame, feature, label): + """Get (X, Y) data for ``(feature, label)`` combination without nans. + + Parameters + ---------- + data_frame : pandas.DataFrame + Training data. + feature : str + Name of the feature data. + label : str + Name of the label data. + + Returns + ------- + tuple + Tuple containing a :class:`pandas.DataFrame` for the X axis (feature) + and a :class:`pandas.DataFrame` for the Y axis (label) without + missing values. + + """ + idx_slice = pd.IndexSlice[:, [feature, label]] + data_frame_xy = data_frame.loc[:, idx_slice] + data_frame_xy.columns = data_frame_xy.columns.droplevel() + data_frame_xy = data_frame_xy.dropna() + x_data = data_frame_xy[feature] + y_data = data_frame_xy[label] + return (x_data, y_data) + + +def get_input_data(cfg): + """Extract input data. + + Return training data, prediction input data and corresponding attributes. + + Parameters + ---------- + cfg : dict + Recipe configuration. + + Returns + ------- + tuple + A tuple containing the training data (:class:`pandas.DataFrame`), the + prediction input data (:class:`pandas.DataFrame`) and the + corresponding attributes (:obj:`dict`). + + """ + input_files = get_input_files(cfg, + patterns=cfg.get('patterns'), + ignore_patterns=cfg.get('ignore_patterns')) + logger.debug("Found files:\n%s", pformat(input_files)) + + # Get cubes + external_file = _get_external_file(cfg.get('read_external_file'), + cfg['auxiliary_data_dir']) + cubes = _get_cube_list( + input_files, + recipe=cfg['recipe'], + additional_data=cfg.get('additional_data'), + external_file=external_file, + merge_identical_pred_input=cfg.get('merge_identical_pred_input', True), + ) + + # Extract attributes for features and labels + attributes = _get_attributes(cubes) + + # Extract DataFrames + label_all_data = cfg.get('all_data_label', 'all') + group_by = cfg.get('group_by') + if group_by: + logger.info("Grouping features and labels by '%s'", group_by) + else: + logger.info("Using label '%s' to label data in plots", label_all_data) + features = _get_data_frame('feature', cubes, label_all_data, group_by) + label = _get_data_frame('label', cubes, label_all_data, group_by) + pred_input = _get_data_frame('prediction_input', cubes, label_all_data, + group_by) + pred_input_err = _get_data_frame('prediction_input_error', cubes, + label_all_data, group_by) + + # Unify indices of features and label + rows_to_add_to_label = [ + pd.Series(name=row, dtype=np.float64).to_frame().T for row in + features.index.difference(label.index) + ] + label = pd.concat([label] + rows_to_add_to_label) + rows_to_add_to_features = [ + pd.Series(name=row, dtype=np.float64).to_frame().T for row in + label.index.difference(features.index) + ] + features = pd.concat([features] + rows_to_add_to_features) + + # Sort data frames + for data_frame in (features, label, pred_input, pred_input_err): + data_frame.sort_index(axis=0, inplace=True) + data_frame.sort_index(axis=1, inplace=True) + + # Check data + (features, label, pred_input, + pred_input_err) = _check_data_frames(features, label, pred_input, + pred_input_err) + training_data = pd.concat([features, label], axis=1, keys=['x', 'y']) + training_data['idx'] = np.arange(len(training_data.index)) + 1 + training_data.set_index('idx', append=True, inplace=True) + training_data.index.names = [group_by, 'dataset', 'idx'] + prediction_data = pd.concat([pred_input, pred_input_err], + axis=1, + keys=['mean', 'error']) + if training_data.dropna().shape[0] < 2: + logger.error("Invalid training data:\n%s", training_data) + raise ValueError( + f"Expected at least 2 independent observations (=climate models) " + f"where all training data (features and target label) is " + f"available, got {training_data.dropna().shape[0]:d}") + + # Logger output + with pd.option_context(*PANDAS_PRINT_OPTIONS): + logger.info("Found training data:\n%s", training_data) + logger.info("Found prediction data:\n%s", prediction_data) + return (training_data, prediction_data, attributes) + + +def combine_groups(groups): + """Combine :obj:`list` of groups to a single :obj:`str`. + + Parameters + ---------- + groups : list of str + List of group names. + + Returns + ------- + str + Combined :obj:`str`. + + """ + new_str = ', '.join(groups) + return new_str + + +def pandas_object_to_cube(pandas_object, + index_droplevel=None, + columns_droplevel=None, + **kwargs): + """Convert pandas object to :class:`iris.cube.Cube`. + + Parameters + ---------- + pandas_object : pandas.DataFrame or pandas.Series + Data to convert. + index_droplevel : int or list of int, optional + Drop levels of index if not ``None``. + columns_droplevel : int or list of int, optional + Drop levels of columns if not ``None``. Can only be used if + ``pandas_object`` is a :class:`pandas.DataFrame`. + **kwargs : Keyword arguments + Keyword arguments used for the cube metadata, e.g. ``standard_name``, + ``var_name``, etc. + + Returns + ------- + iris.cube.Cube + Data cube. + + Raises + ------ + TypeError + ``columns_droplevel`` is used when ``pandas_object`` is not a + :class:`pandas.DataFrame`. + + """ + pandas_object = pandas_object.copy() + if index_droplevel is not None: + pandas_object.index = pandas_object.index.droplevel(index_droplevel) + if columns_droplevel is not None: + try: + pandas_object.columns = pandas_object.columns.droplevel( + columns_droplevel) + except AttributeError: + raise TypeError( + f"'columns_droplevel' only supported for pandas.DataFrame " + f"object, got {type(pandas_object)}") + cube = _get_pandas_cube(pandas_object) + for (key, val) in kwargs.items(): + setattr(cube, key, val) + return cube + + +def set_plot_appearance(axes, attributes, **kwargs): + """Set appearance of a plot. + + Parameters + ---------- + axes : matplotlib.axes.Axes + Matplotlib Axes object which contains the plot. + attributes : dict + Plot attributes. + **kwargs : Keyword arguments + Keyword arguments of the form ``plot_option=tag`` where ``plot_option`` + is something like ``plot_title``, ``plot_xlabel``, ``plot_xlim``, etc. + and ``tag`` a key for the plot attributes :obj:`dict` that describes + which attributes should be considered for that ``plot_option``. + + """ + for (plot_option, tag) in kwargs.items(): + plot_func = plot_option.replace('plot_', 'set_') + value = attributes[tag][plot_option] + getattr(axes, plot_func)(value) + + +def get_caption(attributes, feature, label, group=None): + """Construct caption from plotting attributes for (feature, label) pair. + + Parameters + ---------- + attributes : dict + Plot attributes. + feature : str + Feature. + label : str + Label. + group : str, optional + Group. + + Returns + ------- + str + Caption. + + Raises + ------ + KeyError + ``attributes`` does not include necessary keys. + + """ + group_str = '' if group is None else f' ({group})' + if feature not in attributes: + raise KeyError( + f"Attributes do not include necessary key for feature '{feature}'") + if label not in attributes: + raise KeyError( + f"Attributes do not include necessary key for label '{label}'") + feature_attrs = attributes[feature] + label_attrs = attributes[label] + if 'plot_title' not in feature_attrs: + raise KeyError( + f"Attributes for feature '{feature}' does not include necessary " + f"key 'plot_title'") + if 'plot_xlabel' not in feature_attrs: + raise KeyError( + f"Attributes for feature '{feature}' does not include necessary " + f"key 'plot_xlabel'") + if 'plot_ylabel' not in label_attrs: + raise KeyError( + f"Attributes for label '{label}' does not include necessary " + f"key 'plot_ylabel'") + caption = (f"{attributes[feature]['plot_title']}: " + f"{attributes[label]['plot_ylabel']} vs. " + f"{attributes[feature]['plot_xlabel']}{group_str}.") + return caption + + +def get_provenance_record(attributes, tags, **kwargs): + """Get provenance record. + + Parameters + ---------- + attributes : dict + Plot attributes. All provenance keys need to start with + ``'provenance_'``. + tags : list of str + Tags used to retrieve data from the ``attributes`` :obj:`dict`, i.e. + features and/or label. + **kwargs : Keyword arguments + Additional ``key:value`` pairs directly passed to the provenance record + :obj:`dict`. All values may include the format strings ``{feature}`` + and ``{label}``. + + Returns + ------- + dict + Provenance record. + + """ + record = {} + for tag in tags: + for (key, value) in attributes[tag].items(): + if key.startswith('provenance_'): + key = key.replace('provenance_', '') + record.setdefault(key, []) + if isinstance(value, str): + record[key].append(value) + else: + record[key].extend(value) + record.setdefault('ancestors', []) + if key == 'filename': + if isinstance(value, str): + record['ancestors'].append(value) + else: + record['ancestors'].extend(value) + for (key, value) in record.items(): + if isinstance(value, list): + record[key] = list(set(value)) + record.update(kwargs) + return record + + +def get_colors(cfg, groups=None): + """Get color palette. + + Parameters + ---------- + cfg : dict + Recipe configuration. + groups : list, optional + Use to check whether color for combining groups has to be added. + + Returns + ------- + list + List of colors that can be used for :mod:`matplotlib`. + + """ + palette = cfg.get('seaborn_settings', {}).get('palette') + colors = sns.color_palette(palette=palette) + if groups is None: + return colors + if len(groups) > 1 and cfg.get('combine_groups', False): + return [COLOR_COMBINED_GROUPS] + colors + return colors -def _check_input_arrays(*arrays): - """Check the shapes of multiple arrays.""" - shape = None - for array in arrays: - if shape is None: - shape = array.shape + +def get_groups(training_data, add_combined_group=False): + """Extract groups from training data. + + Parameters + ---------- + training_data : pandas.DataFrame + Training data (features, label). + add_combined_group : bool, optional (default: False) + Add combined group of all other groups at the beginning of the + returned :obj:`list`. + + Returns + ------- + list of str + Groups. + + """ + groups = list(set(training_data.index.get_level_values(0))) + groups.sort() + if add_combined_group and len(groups) > 1: + groups.insert(0, combine_groups(groups)) + return groups + + +def plot_individual_scatterplots(training_data, pred_input_data, attributes, + basename, cfg): + """Plot individual scatterplots for the different groups. + + Plot scatterplots for all pairs of ``(feature, label)`` data (Separate plot + for each group). + + Parameters + ---------- + training_data : pandas.DataFrame + Training data (features, label). + pred_input_data : pandas.DataFrame + Prediction input data (mean and error). + attributes : dict + Plot attributes for the different features and the label data. + basename : str + Basename for the name of the file. + cfg : dict + Recipe configuration. + + """ + logger.info("Plotting individual scatterplots") + label = training_data.y.columns[0] + groups = get_groups(training_data, + add_combined_group=cfg.get('combine_groups', False)) + + # Iterate over features + for feature in training_data.x.columns: + (x_data, y_data) = get_xy_data_without_nans(training_data, feature, + label) + + # Individual plots + colors = get_colors(cfg, groups=groups) + for (idx, group) in enumerate(groups): + try: + x_sub_data = x_data.loc[group] + y_sub_data = y_data.loc[group] + index_droplevel = 1 + except KeyError: + x_sub_data = x_data + y_sub_data = y_data + index_droplevel = [0, 2] + axes = _create_scatterplot( + x_sub_data, + y_sub_data, + numbers_as_markers=cfg.get('numbers_as_markers', False), + plot_regression_line_mean=cfg.get('plot_regression_line_mean', + False), + color=colors[idx], + label=group) + axes = _create_pred_input_plot( + pred_input_data['mean'][feature].values, + pred_input_data['error'][feature].values, axes) + axes = _create_pred_output_plot( + x_sub_data, + y_sub_data, + pred_input_data['mean'][feature].values, + pred_input_data['error'][feature].values, + axes, + hline_kwargs={ + 'color': colors[idx], + 'linestyle': ':' + }, + ) + set_plot_appearance(axes, + attributes, + plot_title=feature, + plot_xlabel=feature, + plot_ylabel=label, + plot_xlim=feature, + plot_ylim=label) + plt.legend(**LEGEND_KWARGS) + filename = (f"scatterplot_{basename}_{feature}_" + f"{group.replace(', ', '-')}") + plot_path = get_plot_filename(filename, cfg) + plt.savefig(plot_path, + **cfg.get('savefig_kwargs', {})) + logger.info("Wrote %s", plot_path) + plt.close() + + # Provenance + provenance_record = get_provenance_record( + attributes, [feature, label], + caption=get_caption(attributes, feature, label, group=group), + plot_type='scatter') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + + # Write netCDF file + cubes = iris.cube.CubeList([ + pandas_object_to_cube( + x_sub_data, + index_droplevel=index_droplevel, + var_name=feature, + long_name=attributes[feature]['plot_xlabel'], + units=attributes[feature]['units']), + pandas_object_to_cube( + y_sub_data, + index_droplevel=index_droplevel, + var_name=label, + long_name=attributes[label]['plot_ylabel'], + units=attributes[label]['units']), + ]) + netcdf_path = get_diagnostic_filename(filename, cfg) + io.iris_save(cubes, netcdf_path) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + + +def plot_merged_scatterplots(training_data, pred_input_data, attributes, + basename, cfg): + """Plot merged scatterplots (all groups in one plot). + + Plot scatterplots for all pairs of ``(feature, label)`` data (all groups in + one plot). + + Parameters + ---------- + training_data : pandas.DataFrame + Training data (features, label). + pred_input_data : pandas.DataFrame + Prediction input data (mean and error). + attributes : dict + Plot attributes for the different features and the label data. + basename : str + Basename for the name of the file. + cfg : dict + Recipe configuration. + + """ + logger.info("Plotting merged scatterplots") + label = training_data.y.columns[0] + groups = get_groups(training_data, + add_combined_group=cfg.get('combine_groups', False)) + numbers_as_markers = cfg.get('numbers_as_markers', False) + plot_regression_line_mean = cfg.get('plot_regression_line_mean', False) + colors = get_colors(cfg) + + # Iterate over features + for feature in training_data.x.columns: + (x_data, y_data) = get_xy_data_without_nans(training_data, feature, + label) + (_, axes) = plt.subplots() + if len(groups) > 1 and cfg.get('combine_groups', False): + axes = _create_regplot( + x_data, + y_data, + axes=axes, + line_kwargs={ + 'color': COLOR_COMBINED_GROUPS, + 'label': groups[0], + 'linestyle': '-' + }, + fill_between_kwargs={ + 'color': COLOR_COMBINED_GROUPS, + 'alpha': 0.3 + }, + mean_kwargs=(None + if not cfg.get('plot_regression_line_mean') else { + 'color': COLOR_COMBINED_GROUPS, + 'marker': 'o' + }), + ) + axes = _create_pred_output_plot( + x_data, + y_data, + pred_input_data['mean'][feature].values, + pred_input_data['error'][feature].values, + axes, + hline_kwargs={ + 'color': COLOR_COMBINED_GROUPS, + 'linestyle': ':' + }, + ) + for (idx, group) in enumerate(groups[1:]): + axes = _create_scatterplot( + x_data.loc[group], + y_data.loc[group], + numbers_as_markers=numbers_as_markers, + plot_regression_line_mean=plot_regression_line_mean, + axes=axes, + color=colors[idx], + label=group, + ) + axes = _create_pred_output_plot( + x_data.loc[group], + y_data.loc[group], + pred_input_data['mean'][feature].values, + pred_input_data['error'][feature].values, + axes, + hline_kwargs={ + 'color': colors[idx], + 'linestyle': ':' + }, + ) else: - if array.shape != shape: - raise ValueError("Expected input arrays with identical shapes") + for (idx, group) in enumerate(groups): + axes = _create_scatterplot( + x_data.loc[group], + y_data.loc[group], + numbers_as_markers=numbers_as_markers, + plot_regression_line_mean=plot_regression_line_mean, + axes=axes, + color=colors[idx], + label=group, + ) + axes = _create_pred_output_plot( + x_data.loc[group], + y_data.loc[group], + pred_input_data['mean'][feature].values, + pred_input_data['error'][feature].values, + axes, + hline_kwargs={ + 'color': colors[idx], + 'linestyle': ':' + }, + ) + axes = _create_pred_input_plot( + pred_input_data['mean'][feature].values, + pred_input_data['error'][feature].values, axes) + set_plot_appearance(axes, + attributes, + plot_title=feature, + plot_xlabel=feature, + plot_ylabel=label, + plot_xlim=feature, + plot_ylim=label) + plt.legend(**LEGEND_KWARGS) + filename = f'scatterplot_merged_{basename}_{feature}' + plot_path = get_plot_filename(filename, cfg) + plt.savefig(plot_path, + **cfg.get('savefig_kwargs', {})) + logger.info("Wrote %s", plot_path) + plt.close() + + # Provenance + provenance_record = get_provenance_record(attributes, [feature, label], + caption=get_caption( + attributes, feature, + label), + plot_type='scatter') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + + # Write netCDF file + cubes = iris.cube.CubeList([ + pandas_object_to_cube(x_data, + index_droplevel=[0, 2], + var_name=feature, + long_name=attributes[feature]['plot_xlabel'], + units=attributes[feature]['units']), + pandas_object_to_cube(y_data, + index_droplevel=[0, 2], + var_name=label, + long_name=attributes[label]['plot_ylabel'], + units=attributes[label]['units']), + ]) + netcdf_path = get_diagnostic_filename(filename, cfg) + io.iris_save(cubes, netcdf_path) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + + +def create_simple_scatterplot(x_data, y_data, obs_mean, obs_std): + """Create simple scatterplot of an emergent relationship (without saving). + + Parameters + ---------- + x_data : numpy.ndarray + X data of the emergent constraint. + y_data : numpy.ndarray + Y data of the emergent constraint. + obs_mean : float + Mean of observational data. + obs_std : float + Standard deviation of observational data. + + """ + logger.debug("Plotting simple scatterplot") + (fig, axes) = plt.subplots() + axes.scatter(x_data, y_data, color='k', marker='o') + line_kwargs = {'color': 'C1', 'linestyle': '-'} + fill_between_kwargs = {**line_kwargs, 'alpha': 0.3} + axes = _create_regplot(x_data, y_data, axes=axes, line_kwargs=line_kwargs, + fill_between_kwargs=fill_between_kwargs) + axes = _create_pred_input_plot(obs_mean, obs_std, axes) + return (fig, axes) + + +def plot_target_distributions(training_data, pred_input_data, attributes, + basename, cfg): + """Plot distributions of target variable for every feature. + + Parameters + ---------- + training_data : pandas.DataFrame + Training data (features, label). + pred_input_data : pandas.DataFrame + Prediction input data (mean and error). + attributes : dict + Plot attributes for the different features and the label data. + basename : str + Basename for the name of the file. + cfg : dict + Recipe configuration. + + """ + logger.info("Plotting distributions of target variable") + label = training_data.y.columns[0] + groups = get_groups(training_data, + add_combined_group=cfg['combine_groups']) + summary_columns = pd.MultiIndex.from_product( + [groups, ['best estimate', 'range', 'min', 'max']]) + summaries = [] + + # Iterate over features + for feature in training_data.x.columns: + (x_data, y_data) = get_xy_data_without_nans(training_data, feature, + label) + colors = get_colors(cfg, groups=groups) + summary_for_feature = pd.Series( + index=summary_columns, name=feature, dtype=np.float64 + ) + + # Iterate over groups + for (idx, group) in enumerate(groups): + try: + x_sub_data = x_data.loc[group] + y_sub_data = y_data.loc[group] + except KeyError: + x_sub_data = x_data + y_sub_data = y_data + (y_lin, y_pdf) = target_pdf( + x_sub_data, + y_sub_data, + pred_input_data['mean'][feature].values, + pred_input_data['error'][feature].values, + ) + + # Plots + axes = sns.histplot(y_sub_data, + bins=7, + stat='density', + color=colors[idx], + alpha=0.4) + axes.plot(y_lin, + y_pdf, + color=colors[idx], + linestyle='-', + label=group) + + # Print results + (y_min, y_mean, y_max) = get_constraint( + x_sub_data, + y_sub_data, + pred_input_data['mean'][feature].values, + pred_input_data['error'][feature].values, + confidence_level=cfg['confidence_level'], + ) + y_error = np.max([y_max - y_mean, y_mean - y_min]) + reg = linregress(x_sub_data.values, y_sub_data.values) + logger.info( + "Constrained %s for feature '%s' and group '%s': %.2f ± %.2f " + "(%i%% confidence level), R2 = %f, p = %f", label, + feature, group, y_mean, y_error, + int(100.0 * cfg['confidence_level']), reg.rvalue**2, + reg.pvalue) + + # Save results of group + summary_for_feature[(group, 'best estimate')] = y_mean + summary_for_feature[(group, 'range')] = y_max - y_min + summary_for_feature[(group, 'min')] = y_min + summary_for_feature[(group, 'max')] = y_max + + # Save results for feature + summaries.append(summary_for_feature.to_frame().T) + + # Plot appearance + set_plot_appearance(axes, attributes, plot_title=feature) + axes.set_xlabel(attributes[label]['plot_ylabel']) + axes.set_ylabel('Probability density') + if attributes[label]['plot_ylim'] is not None: + axes.set_xlim(attributes[label]['plot_ylim']) + axes.set_ylim([0.0, 1.0]) + plt.legend(loc='best') + + # Save plot + plot_path = get_plot_filename( + f'target_distribution_{basename}_{feature}', cfg) + plt.savefig(plot_path, + **cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Provenance + provenance_record = get_provenance_record( + attributes, [feature, label], + caption=(f"{attributes[feature]['plot_title']}: Probability " + f"densitiy of {label}."), + plot_type='probability') + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + + # Print mean results + summary = pd.concat(summaries) + with pd.option_context(*PANDAS_PRINT_OPTIONS): + logger.info("Constrained ranges:\n%s", summary) + summary = summary.mean(axis=0) + logger.info("Mean of constrained ranges:\n%s", summary) + + +def export_csv(data_frame, attributes, basename, cfg, tags=None): + """Export CSV file. + + Parameters + ---------- + data_frame : pandas.DataFrame + Data to export. + attributes : dict + Plot attributes for the different features and the label data. Used to + retrieve provenance information. + basename : str + Basename for the name of the file. + cfg : dict + Recipe configuration. + tags : iterable of str, optional + Tags for which provenance information should be retrieved (using + ``attributes``). If not specified, use (last level of) columns of the + given ``data_frame``. + + Returns + ------- + str + Path to the new CSV file. + + """ + logger.info("Exporting CSV file for '%s'", basename) + csv_path = get_diagnostic_filename(basename, cfg).replace('.nc', '.csv') + data_frame.to_csv(csv_path) + logger.info("Wrote %s", csv_path) + if tags is None: + tags = data_frame.columns.get_level_values(-1) + provenance_record = get_provenance_record(attributes, + tags, + caption=basename) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(csv_path, provenance_record) + return csv_path def standard_prediction_error(x_data, y_data): - """Return function to calculate standard prediction error. + """Return a function to calculate standard prediction error. The standard prediction error of a linear regression is the error when - predicting a new value which is not in the original data. + predicting a data point which was not used to fit the regression line in + the first place. Parameters ---------- - x_data : numpy.array - x coordinates of the points. - y_data : numpy.array - y coordinates of the points. + x_data : numpy.ndarray + X data used to fit the linear regression. + y_data : numpy.ndarray + Y data used to fit the linear regression. Returns ------- callable - Standard prediction error function for new x values. + Function that takes a :obj:`float` as single argument (representing the + X value of a new data point) and returns the standard prediction error + for that. """ - _check_input_arrays(x_data, y_data) - reg = stats.linregress(x_data, y_data) - y_estim = reg.slope * x_data + reg.intercept + (x_data, y_data) = _check_x_y_arrays(x_data, y_data) + reg = linregress(x_data, y_data) + y_pred = reg.slope * x_data + reg.intercept n_data = x_data.shape[0] - see = np.sqrt(np.sum(np.square(y_data - y_estim)) / (n_data - 2)) + see = np.sqrt(np.sum(np.square(y_data - y_pred)) / (n_data - 2)) x_mean = np.mean(x_data) ssx = np.sum(np.square(x_data - x_mean)) @@ -49,111 +1580,239 @@ def spe(x_new): """Return standard prediction error.""" return see * np.sqrt(1.0 + 1.0 / n_data + (x_new - x_mean)**2 / ssx) - return np.vectorize(spe) + return spe -def regression_line(x_data, y_data, n_points=100): +def regression_line(x_data, y_data, n_points=1000): """Return x and y coordinates of the regression line (mean and error). Parameters ---------- - x_data : numpy.array - x coordinates of the points. - y_data : numpy.array - y coordinates of the points. - n_points : int, optional (default: 100) + x_data : numpy.ndarray + X data used to fit the linear regression. + y_data : numpy.ndarray + Y data used to fit the linear regression. + n_points : int, optional (default: 1000) Number of points for the regression lines. Returns ------- dict - `numpy.array`s for the keys `x`, `y_best_estim`, `y_minus_err`, - `y_plus_err', 'rvalue', 'slope' and 'intercept'. + :class:`numpy.ndarray` s for the keys ``'x'``, ``'y'``, + ``'y_minus_err'``, ``'y_plus_err'``, ``'slope'``, ``'intercept'``, + ``'pvalue'`` and ``'rvalue'``. """ - _check_input_arrays(x_data, y_data) - spe = standard_prediction_error(x_data, y_data) + (x_data, y_data) = _check_x_y_arrays(x_data, y_data) + spe = np.vectorize(standard_prediction_error(x_data, y_data)) out = {} - reg = stats.linregress(x_data, y_data) - x_range = max(x_data) - min(x_data) - x_lin = np.linspace(min(x_data) - x_range, max(x_data) + x_range, n_points) - out['y_best_estim'] = reg.slope * x_lin + reg.intercept - out['y_minus_err'] = out['y_best_estim'] - spe(x_lin) - out['y_plus_err'] = out['y_best_estim'] + spe(x_lin) + reg = linregress(x_data, y_data) + x_range = np.max(x_data) - np.min(x_data) + x_lin = np.linspace( + np.min(x_data) - x_range, + np.max(x_data) + x_range, n_points) out['x'] = x_lin - out['rvalue'] = reg.rvalue + out['y'] = reg.slope * x_lin + reg.intercept + out['y_minus_err'] = out['y'] - spe(x_lin) + out['y_plus_err'] = out['y'] + spe(x_lin) out['slope'] = reg.slope out['intercept'] = reg.intercept + out['pvalue'] = reg.pvalue + out['rvalue'] = reg.rvalue return out -def gaussian_pdf(x_data, y_data, obs_mean, obs_std, n_points=100): - """Calculate Gaussian probability densitiy function for target variable. +def target_pdf(x_data, + y_data, + obs_mean, + obs_std, + n_points=1000, + necessary_p_value=None): + """Calculate probability density function (PDF) for target variable. Parameters ---------- - x_data : numpy.array - x coordinates of the points. - y_data : numpy.array - y coordinates of the points. + x_data : numpy.ndarray + X data of the emergent constraint. + y_data : numpy.ndarray + Y data of the emergent constraint. obs_mean : float Mean of observational data. obs_std : float Standard deviation of observational data. - n_points : int, optional (default: 100) - Number of points for the regression lines. + n_points : int, optional (default: 1000) + Number of sampled points for PDF of target variable. + necessary_p_value : float, optional + If given, return unconstrained PDF (using Gaussian distribution with + unconstrained mean and standard deviation) when `p`-value of emergent + relationship is greater than the given necessary `p`-value. Returns ------- - tuple of numpy.array + tuple of numpy.ndarray x and y values for the PDF. """ - _check_input_arrays(x_data, y_data) - spe = standard_prediction_error(x_data, y_data) - reg = stats.linregress(x_data, y_data) - - def obs_pdf(x_new): - """Return PDF of observations P(x).""" - norm = np.sqrt(2.0 * np.pi * obs_std**2) - return np.exp(-(x_new - obs_mean)**2 / 2.0 / obs_std**2) / norm - - def cond_pdf(x_new, y_new): - """Return conditional PDF P(y|x).""" - y_estim = reg.slope * x_new + reg.intercept - norm = np.sqrt(2.0 * np.pi * spe(x_new)**2) - return np.exp(-(y_new - y_estim)**2 / 2.0 / spe(x_new)**2) / norm - - def comb_pdf(x_new, y_new): - """Return combined PDF P(y,x).""" - return obs_pdf(x_new) * cond_pdf(x_new, y_new) - - # PDF of target variable P(y) - y_range = max(y_data) - min(y_data) - y_lin = np.linspace(min(y_data) - y_range, max(y_data) + y_range, n_points) - y_pdf = [ - integrate.quad(comb_pdf, -np.inf, +np.inf, args=(y, ))[0] - for y in y_lin - ] - return (y_lin, np.array(y_pdf)) + (y_lin, y_pdf, _) = _get_target_pdf(x_data, + y_data, + obs_mean, + obs_std, + n_points=n_points, + necessary_p_value=necessary_p_value) + return (y_lin, y_pdf) def cdf(data, pdf): - """Calculate cumulative distribution function for a PDF. + """Calculate cumulative distribution function for a 1-dimensional PDF. Parameters ---------- - data : numpy.array - Data points (x axis). - pdf : numpy.array + data : numpy.ndarray + Data points (1D array). + pdf : numpy.ndarray Corresponding probability density function (PDF). Returns ------- - numpy.array + numpy.ndarray Corresponding cumulative distribution function (CDF). """ idx_range = range(1, len(data) + 1) - cum_dens = [integrate.simps(pdf[:idx], data[:idx]) for idx in idx_range] + cum_dens = [simpson(pdf[:idx], x=data[:idx]) for idx in idx_range] return np.array(cum_dens) + + +def constraint_info_array(x_data, + y_data, + obs_mean, + obs_std, + n_points=1000, + necessary_p_value=None): + """Get array with all relevant parameters of emergent constraint. + + Parameters + ---------- + x_data : numpy.ndarray + X data of the emergent constraint. + y_data : numpy.ndarray + Y data of the emergent constraint. + obs_mean : float + Mean of observational data. + obs_std : float + Standard deviation of observational data. + n_points : int, optional (default: 1000) + Number of sampled points for PDF of target variable. + necessary_p_value : float, optional + If given, replace constrained mean and standard deviation with + unconstrained values when `p`-value of emergent relationship is greater + than the given necessary `p`-value. + + Returns + ------- + numpy.ndarray + Array of shape (8,) with the elements: + 0. Constrained mean of target variable. + 1. Constrained standard deviation of target variable. + 2. Unconstrained mean of target variable. + 3. Unconstrained standard deviation of target variable. + 4. Slope of emergent relationship. + 5. Intercept of emergent relationship. + 6. Correlation coefficient `r` of emergent relationship. + 7. `p`-value of emergent relationship. + + """ + (y_lin, y_pdf, reg) = _get_target_pdf(x_data, + y_data, + obs_mean, + obs_std, + n_points=n_points, + necessary_p_value=necessary_p_value) + norm = np.sum(y_pdf) + y_mean = np.sum(y_lin * y_pdf) / norm + y_std = np.sqrt(np.sum((y_lin - y_mean)**2 * y_pdf) / norm) + info = [ + y_mean, y_std, + np.ma.mean(y_data), + np.ma.std(y_data), reg.slope, reg.intercept, reg.rvalue, reg.pvalue + ] + return np.array(info) + + +def get_constraint(x_data, y_data, obs_mean, obs_std, confidence_level=0.66): + """Get constraint on target variable. + + Parameters + ---------- + x_data : numpy.ndarray + X data of the emergent constraint. + y_data : numpy.ndarray + Y data of the emergent constraint. + obs_mean : float + Mean of observational data. + obs_std : float + Standard deviation of observational data. + confidence_level : float, optional (default: 0.66) + Confindence level to estimate the range of the target variable. + + Returns + ------- + tuple of float + Lower confidence limit, best estimate and upper confidence limit of + target variable. + + """ + (x_data, y_data) = _check_x_y_arrays(x_data, y_data) + (y_lin, y_pdf) = target_pdf(x_data, y_data, obs_mean, obs_std) + y_mean = np.sum(y_lin * y_pdf) / np.sum(y_pdf) + y_cdf = cdf(y_lin, y_pdf) + y_index_range = np.nonzero((y_cdf >= (1.0 - confidence_level) / 2.0) + & (y_cdf <= (1.0 + confidence_level) / 2.0)) + y_range = y_lin[y_index_range] + return (np.min(y_range), y_mean, np.max(y_range)) + + +def get_constraint_from_df(training_data, + pred_input_data, + confidence_level=0.66): + """Get constraint on target variable from :class:`pandas.DataFrame`. + + Parameters + ---------- + training_data : pandas.DataFrame + Training data (features, label). + pred_input_data : pandas.DataFrame + Prediction input data (mean and error). + confidence_level : float, optional (default: 0.66) + Confindence level to estimate the range of the target variable. + + Returns + ------- + tuple of float + Lower confidence limit, best estimate and upper confidence limit of + target variable. + + """ + if len(training_data.columns) != 2: + raise ValueError( + f"Expected exactly two columns for training data (feature and " + f"label), got {len(training_data.columns):d}") + if len(pred_input_data.columns) != 2: + raise ValueError( + f"Expected exactly two columns for prediction input data (mean " + f"and error, got {len(pred_input_data.columns):d}") + + # Extract data + label = training_data.y.columns[0] + feature = training_data.x.columns[0] + (x_data, y_data) = get_xy_data_without_nans(training_data, feature, label) + x_pred = pred_input_data['mean'][feature].values[0] + x_pred_error = pred_input_data['error'][feature].values[0] + + # Calculate constraint + constraint = get_constraint(x_data, + y_data, + x_pred, + x_pred_error, + confidence_level=confidence_level) + return constraint diff --git a/esmvaltool/diag_scripts/emergent_constraints/cox18nature.py b/esmvaltool/diag_scripts/emergent_constraints/cox18nature.py index 5a7c7f79bc..9318e76a30 100644 --- a/esmvaltool/diag_scripts/emergent_constraints/cox18nature.py +++ b/esmvaltool/diag_scripts/emergent_constraints/cox18nature.py @@ -1,10 +1,10 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -"""Diagnostic script to reproduce Cox et al. (2018). +"""Diagnostic script to reproduce emergent constraint of Cox et al. (2018). Description ----------- -Plot equilibrium climate sensitivity ECS vs. temperature variability metric psi +Plot equilibrium climate sensitivity ECS vs. temperature variability metric Ψ to establish an emergent relationship for ECS. Author @@ -17,7 +17,7 @@ Configuration options in recipe ------------------------------- -confidence_level : float, optional (default: 0.66) +confidence_level: float, optional (default: 0.66) Confidence level for ECS error estimation. """ @@ -26,6 +26,7 @@ import os import iris +import iris.coord_categorisation import matplotlib.lines as mlines import matplotlib.pyplot as plt import numpy as np @@ -33,8 +34,16 @@ import esmvaltool.diag_scripts.emergent_constraints as ec import esmvaltool.diag_scripts.shared.iris_helpers as ih from esmvaltool.diag_scripts.shared import ( - ProvenanceLogger, get_diagnostic_filename, get_plot_filename, - group_metadata, io, plot, run_diagnostic, select_metadata) + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + group_metadata, + io, + plot, + run_diagnostic, + select_metadata, + sorted_metadata, +) logger = logging.getLogger(os.path.basename(__file__)) plt.style.use(plot.get_path_to_mpl_style()) @@ -72,11 +81,11 @@ def _get_ancestor_files(cfg, obs_name, projects=None): select_metadata(cfg['input_data'].values(), project=project)) datasets.extend( select_metadata(cfg['input_data'].values(), dataset=obs_name)) - return [d['filename'] for d in datasets] + return sorted([d['filename'] for d in datasets]) def _get_model_color(model, lambda_cube): - """Get color of model dependent on climate sensitivity.""" + """Get color of model dependent on climate feedback parameter.""" clim_sens = lambda_cube.extract(iris.Constraint(dataset=model)).data if clim_sens < 1.0: col = COLOR_SMALL_LAMBDA @@ -89,14 +98,13 @@ def _plot_model_point(model, psi_cube, ecs_cube, lambda_cube): """Plot a single model point for emergent relationship.""" col = _get_model_color(model, lambda_cube) style = plot.get_dataset_style(model, 'cox18nature') - AXES.plot( - psi_cube.extract(iris.Constraint(dataset=model)).data, - ecs_cube.extract(iris.Constraint(dataset=model)).data, - linestyle='none', - marker=style['mark'], - markeredgecolor=col, - markerfacecolor=col, - markersize=style['size']) + AXES.plot(psi_cube.extract(iris.Constraint(dataset=model)).data, + ecs_cube.extract(iris.Constraint(dataset=model)).data, + linestyle='none', + marker=style['mark'], + markeredgecolor=col, + markerfacecolor=col, + markersize=style['size']) def _get_line_plot_legend(): @@ -126,21 +134,16 @@ def _get_project(cfg): projects = [p for p in projects if 'obs' not in p.lower()] if len(projects) == 1: return projects[0] - return projects + return sorted(projects) def _save_fig(cfg, basename, legend=None): """Save matplotlib figure.""" path = get_plot_filename(basename, cfg) - if legend is None: - legend = [] - else: - legend = [legend] - FIG.savefig( - path, - additional_artists=legend, - bbox_inches='tight', - orientation='landscape') + savefig_kwargs = dict(bbox_inches='tight', orientation='landscape') + if legend is not None: + savefig_kwargs['bbox_extra_artists'] = [legend] + FIG.savefig(path, **savefig_kwargs) logger.info("Wrote %s", path) AXES.cla() return path @@ -148,14 +151,16 @@ def _save_fig(cfg, basename, legend=None): def get_external_cubes(cfg): """Get external cubes for psi, ECS and lambda.""" - cubes = [] + cubes = iris.cube.CubeList() + input_data = list(cfg['input_data'].values()) + input_data = sorted_metadata(input_data, ['short_name', 'exp', 'dataset']) for filename in ('psi.nc', 'ecs.nc', 'lambda.nc'): filepath = io.get_ancestor_file(cfg, filename) cube = iris.load_cube(filepath) cube = cube.extract( - ih.iris_project_constraint(['OBS'], cfg, negate=True)) + ih.iris_project_constraint(['OBS'], input_data, negate=True)) cubes.append(cube) - cubes = ih.match_dataset_coordinates(cubes) + cubes = ih.intersect_dataset_coordinates(cubes) return (cubes[0], cubes[1], cubes[2]) @@ -163,7 +168,7 @@ def get_provenance_record(caption, statistics, plot_type, ancestor_files): """Create a provenance record describing the diagnostic data and plot.""" record = { 'ancestors': ancestor_files, - 'authors': ['schl_ma'], + 'authors': ['schlund_manuel'], 'caption': caption, 'domains': ['global'], 'plot_type': plot_type, @@ -175,6 +180,34 @@ def get_provenance_record(caption, statistics, plot_type, ancestor_files): return record +def get_psi(cfg): + """Get time-dependent ``psi`` data.""" + psi_cubes = {} + psi_obs = [] + for (dataset, [data]) in group_metadata( + io.netcdf_to_metadata(cfg, pattern='psi_*.nc'), 'dataset').items(): + cube = iris.load_cube(data['filename']) + cube = cube.aggregated_by('year', iris.analysis.MEAN) + psi_cubes[dataset] = cube + if data['project'] == 'OBS': + psi_obs.append(dataset) + return (psi_cubes, psi_obs) + + +def get_tas(input_data): + """Get time-dependent ``tas`` data.""" + tas_cubes = {} + tas_obs = [] + for (dataset, [data]) in group_metadata(input_data, 'dataset').items(): + cube = iris.load_cube(data['filename']) + iris.coord_categorisation.add_year(cube, 'time') + cube = cube.aggregated_by('year', iris.analysis.MEAN) + tas_cubes[dataset] = cube + if data['project'] == 'OBS': + tas_obs.append(dataset) + return (tas_cubes, tas_obs) + + def plot_temperature_anomaly(cfg, tas_cubes, lambda_cube, obs_name): """Plot temperature anomaly versus time.""" for cube in tas_cubes.values(): @@ -182,7 +215,7 @@ def plot_temperature_anomaly(cfg, tas_cubes, lambda_cube, obs_name): cube.extract( iris.Constraint(year=lambda cell: 1961 <= cell <= 1990)).data) - # Save netcdf file and provencance + # Save netcdf file and provenance filename = 'temperature_anomaly_{}'.format(obs_name) netcdf_path = get_diagnostic_filename(filename, cfg) io.save_1d_data(tas_cubes, netcdf_path, 'year', TASA_ATTRS) @@ -195,38 +228,36 @@ def plot_temperature_anomaly(cfg, tas_cubes, lambda_cube, obs_name): ['times'], _get_ancestor_files(cfg, obs_name)) # Plot - if cfg['write_plots']: - models = lambda_cube.coord('dataset').points - - # Plot lines - for model in models: - cube = tas_cubes[model] - AXES.plot( - cube.coord('year').points, - cube.data, - color=_get_model_color(model, lambda_cube)) - obs_style = plot.get_dataset_style('OBS', 'cox18nature') - obs_cube = tas_cubes[obs_name] - AXES.plot( - obs_cube.coord('year').points, - obs_cube.data, - linestyle='none', - marker='o', - markeredgecolor=obs_style['color'], - markerfacecolor=obs_style['color']) - - # Plot appearance - AXES.set_title('Simulation of global warming record') - AXES.set_xlabel('Year') - AXES.set_ylabel('Temperature anomaly / K') - legend = _get_line_plot_legend() - - # Save plot - provenance_record['plot_file'] = _save_fig(cfg, filename, legend) + models = lambda_cube.coord('dataset').points + + # Plot lines + for model in models: + cube = tas_cubes[model] + AXES.plot(cube.coord('year').points, + cube.data, + color=_get_model_color(model, lambda_cube)) + obs_style = plot.get_dataset_style('OBS', 'cox18nature') + obs_cube = tas_cubes[obs_name] + AXES.plot(obs_cube.coord('year').points, + obs_cube.data, + linestyle='none', + marker='o', + markeredgecolor=obs_style['color'], + markerfacecolor=obs_style['color']) + + # Plot appearance + AXES.set_title('Simulation of global warming record') + AXES.set_xlabel('Year') + AXES.set_ylabel('Temperature anomaly / K') + legend = _get_line_plot_legend() + + # Save plot + plot_path = _save_fig(cfg, filename, legend) # Write provenance with ProvenanceLogger(cfg) as provenance_logger: provenance_logger.log(netcdf_path, provenance_record) + provenance_logger.log(plot_path, provenance_record) def plot_psi(cfg, psi_cubes, lambda_cube, obs_name): @@ -245,38 +276,36 @@ def plot_psi(cfg, psi_cubes, lambda_cube, obs_name): ['corr', 'var'], ['times'], _get_ancestor_files(cfg, obs_name)) # Plot - if cfg['write_plots']: - models = lambda_cube.coord('dataset').points - - # Plot lines - for model in models: - cube = psi_cubes[model] - AXES.plot( - cube.coord('year').points, - cube.data, - color=_get_model_color(model, lambda_cube)) - obs_style = plot.get_dataset_style('OBS', 'cox18nature') - obs_cube = psi_cubes[obs_name] - AXES.plot( - obs_cube.coord('year').points, - obs_cube.data, - linestyle='none', - marker='o', - markeredgecolor=obs_style['color'], - markerfacecolor=obs_style['color']) - - # Plot appearance - AXES.set_title('Metric of variability versus time') - AXES.set_xlabel('Year') - AXES.set_ylabel(r'$\Psi$ / K') - legend = _get_line_plot_legend() - - # Save plot - provenance_record['plot_file'] = _save_fig(cfg, filename, legend) + models = lambda_cube.coord('dataset').points + + # Plot lines + for model in models: + cube = psi_cubes[model] + AXES.plot(cube.coord('year').points, + cube.data, + color=_get_model_color(model, lambda_cube)) + obs_style = plot.get_dataset_style('OBS', 'cox18nature') + obs_cube = psi_cubes[obs_name] + AXES.plot(obs_cube.coord('year').points, + obs_cube.data, + linestyle='none', + marker='o', + markeredgecolor=obs_style['color'], + markerfacecolor=obs_style['color']) + + # Plot appearance + AXES.set_title('Metric of variability versus time') + AXES.set_xlabel('Year') + AXES.set_ylabel(r'$\Psi$ / K') + legend = _get_line_plot_legend() + + # Save plot + plot_path = _save_fig(cfg, filename, legend) # Write provenance with ProvenanceLogger(cfg) as provenance_logger: provenance_logger.log(netcdf_path, provenance_record) + provenance_logger.log(plot_path, provenance_record) def plot_emergent_relationship(cfg, psi_cube, ecs_cube, lambda_cube, obs_cube): @@ -288,7 +317,7 @@ def plot_emergent_relationship(cfg, psi_cube, ecs_cube, lambda_cube, obs_cube): iris.coords.AuxCoord(psi_cube.data, **ih.convert_to_iris(PSI_ATTRS)), 0) netcdf_path = get_diagnostic_filename(filename, cfg) - io.save_iris_cube(cube, netcdf_path) + io.iris_save(cube, netcdf_path) provenance_record = get_provenance_record( "Emergent relationship between ECS and the psi metric. The black dot-" "dashed line shows the best-fit linear regression across the model " @@ -300,190 +329,173 @@ def plot_emergent_relationship(cfg, psi_cube, ecs_cube, lambda_cube, obs_cube): ['scatter'], _get_ancestor_files(cfg, obs_cube.attributes['dataset'])) # Plot - if cfg['write_plots']: - obs_mean = np.mean(obs_cube.data) - obs_std = np.std(obs_cube.data) - - # Calculate regression line - lines = ec.regression_line(psi_cube.data, ecs_cube.data) - logger.info("Found emergent relationship with slope %.2f (r = %.2f)", - lines['slope'], lines['rvalue']) - - # Plot points - for model in psi_cube.coord('dataset').points: - _plot_model_point(model, psi_cube, ecs_cube, lambda_cube) - - # Plot lines - AXES.set_xlim(auto=False) - AXES.set_ylim(auto=False) - AXES.plot( - lines['x'], - lines['y_best_estim'], - color='black', - linestyle='dashdot', - label='Linear regression') - AXES.plot( - lines['x'], - lines['y_minus_err'], - color='black', - linestyle='dashed') - AXES.plot( - lines['x'], lines['y_plus_err'], color='black', linestyle='dashed') - AXES.axvline( - obs_mean, - color='blue', - linestyle='dashdot', - label='Observational constraint') - AXES.axvline(obs_mean - obs_std, color='blue', linestyle='dashed') - AXES.axvline(obs_mean + obs_std, color='blue', linestyle='dashed') - - # Plot appearance - AXES.set_title('Emergent relationship fit') - AXES.set_xlabel(r'$\Psi$ / K') - AXES.set_ylabel('ECS / K') - legend = AXES.legend(loc='upper left') - - # Save plot - provenance_record['plot_file'] = _save_fig(cfg, filename, legend) + obs_mean = np.mean(obs_cube.data) + obs_std = np.std(obs_cube.data) + + # Calculate regression line + lines = ec.regression_line(psi_cube.data, ecs_cube.data) + logger.info("Found emergent relationship with slope %.2f (R2 = %.2f)", + lines['slope'], lines['rvalue']**2) + + # Plot points + for model in psi_cube.coord('dataset').points: + _plot_model_point(model, psi_cube, ecs_cube, lambda_cube) + + # Plot lines + AXES.set_xlim(auto=False) + AXES.set_ylim(auto=False) + AXES.plot(lines['x'], + lines['y'], + color='black', + linestyle='dashdot', + label='Linear regression') + AXES.plot(lines['x'], + lines['y_minus_err'], + color='black', + linestyle='dashed') + AXES.plot(lines['x'], + lines['y_plus_err'], + color='black', + linestyle='dashed') + AXES.axvline(obs_mean, + color='blue', + linestyle='dashdot', + label='Observational constraint') + AXES.axvline(obs_mean - obs_std, color='blue', linestyle='dashed') + AXES.axvline(obs_mean + obs_std, color='blue', linestyle='dashed') + + # Plot appearance + AXES.set_title('Emergent relationship fit') + AXES.set_xlabel(r'$\Psi$ / K') + AXES.set_ylabel('ECS / K') + legend = AXES.legend(loc='upper left') + + # Save plot + plot_path = _save_fig(cfg, filename, legend) # Write provenance with ProvenanceLogger(cfg) as provenance_logger: provenance_logger.log(netcdf_path, provenance_record) + provenance_logger.log(plot_path, provenance_record) -def plot_pdf(cfg, psi_cube, ecs_cube, obs_cube): +def plot_pdf(cfg, ecs_lin, ecs_pdf, ecs_cube, obs_name): """Plot probability density function of ECS.""" - obs_mean = np.mean(obs_cube.data) - obs_std = np.std(obs_cube.data) - (ecs_lin, ecs_pdf) = ec.gaussian_pdf(psi_cube.data, ecs_cube.data, - obs_mean, obs_std) - - # Provenance - filename = 'pdf_{}'.format(obs_cube.attributes['dataset']) + filename = 'pdf_{}'.format(obs_name) netcdf_path = get_diagnostic_filename(filename, cfg) - cube = iris.cube.Cube( - ecs_pdf, - var_name='pdf', - long_name='Probability density function', - units='K-1') + cube = iris.cube.Cube(ecs_pdf, + var_name='pdf', + long_name='Probability density function', + units='K-1') cube.add_aux_coord( iris.coords.AuxCoord(ecs_lin, **ih.convert_to_iris(ECS_ATTRS)), 0) - io.save_iris_cube(cube, netcdf_path) + io.iris_save(cube, netcdf_path) project = _get_project(cfg) provenance_record = get_provenance_record( "The PDF for ECS. The orange histograms show the prior distributions " "that arise from equal weighting of the {} models in 0.5 K bins.". format(project), ['mean'], ['other'], - _get_ancestor_files(cfg, obs_cube.attributes['dataset'])) + _get_ancestor_files(cfg, obs_name)) # Plot - if cfg['write_plots']: - AXES.plot( - ecs_lin, - ecs_pdf, - color='black', - linewidth=2.0, - label='Emergent constraint') - AXES.hist( - ecs_cube.data, - bins=6, - range=(2.0, 5.0), - density=True, - color='orange', - label='{} models'.format(project)) - - # Plot appearance - AXES.set_title('PDF of emergent constraint') - AXES.set_xlabel('ECS / K') - AXES.set_ylabel('Probability density') - legend = AXES.legend(loc='upper left') - - # Save plot - provenance_record['plot_file'] = _save_fig(cfg, filename, legend) + AXES.plot(ecs_lin, + ecs_pdf, + color='black', + linewidth=2.0, + label='Emergent constraint') + AXES.hist(ecs_cube.data, + bins=6, + range=(2.0, 5.0), + density=True, + color='orange', + label='{} models'.format(project)) + + # Plot appearance + AXES.set_title('PDF of emergent constraint') + AXES.set_xlabel('ECS / K') + AXES.set_ylabel('Probability density') + legend = AXES.legend(loc='upper left') + + # Save plot + plot_path = _save_fig(cfg, filename, legend) # Write provenance with ProvenanceLogger(cfg) as provenance_logger: provenance_logger.log(netcdf_path, provenance_record) + provenance_logger.log(plot_path, provenance_record) -def plot_cdf(cfg, psi_cube, ecs_cube, obs_cube): +def plot_cdf(cfg, ecs_lin, ecs_pdf, ecs_cube, obs_name): """Plot cumulative distribution function of ECS.""" confidence_level = cfg.get('confidence_level', 0.66) - (ecs_lin, ecs_pdf) = ec.gaussian_pdf(psi_cube.data, ecs_cube.data, - np.mean(obs_cube.data), - np.std(obs_cube.data)) ecs_cdf = ec.cdf(ecs_lin, ecs_pdf) # Provenance - filename = 'cdf_{}'.format(obs_cube.attributes['dataset']) + filename = 'cdf_{}'.format(obs_name) netcdf_path = get_diagnostic_filename(filename, cfg) - cube = iris.cube.Cube( - ecs_cdf, - var_name='cdf', - long_name='Cumulative distribution function', - units='1') + cube = iris.cube.Cube(ecs_cdf, + var_name='cdf', + long_name='Cumulative distribution function', + units='1') cube.add_aux_coord( iris.coords.AuxCoord(ecs_lin, **ih.convert_to_iris(ECS_ATTRS)), 0) - io.save_iris_cube(cube, netcdf_path) + io.iris_save(cube, netcdf_path) project = _get_project(cfg) provenance_record = get_provenance_record( "The CDF for ECS. The horizontal dot-dashed lines show the {}% " "confidence limits. The orange histograms show the prior " "distributions that arise from equal weighting of the {} models in " "0.5 K bins.".format(int(confidence_level * 100), project), ['mean'], - ['other'], _get_ancestor_files(cfg, obs_cube.attributes['dataset'])) + ['other'], _get_ancestor_files(cfg, obs_name)) # Plot - if cfg['write_plots']: - AXES.plot( - ecs_lin, - ecs_cdf, - color='black', - linewidth=2.0, - label='Emergent constraint') - AXES.hist( - ecs_cube.data, - bins=6, - range=(2.0, 5.0), - cumulative=True, - density=True, - color='orange', - label='{} models'.format(project)) - AXES.axhline( - (1.0 - confidence_level) / 2.0, color='black', linestyle='dashdot') - AXES.axhline( - (1.0 + confidence_level) / 2.0, color='black', linestyle='dashdot') - - # Plot appearance - AXES.set_title('CDF of emergent constraint') - AXES.set_xlabel('ECS / K') - AXES.set_ylabel('CDF') - legend = AXES.legend(loc='upper left') - - # Save plot - provenance_record['plot_file'] = _save_fig(cfg, filename, legend) + AXES.plot(ecs_lin, + ecs_cdf, + color='black', + linewidth=2.0, + label='Emergent constraint') + AXES.hist(ecs_cube.data, + bins=6, + range=(2.0, 5.0), + cumulative=True, + density=True, + color='orange', + label='{} models'.format(project)) + AXES.axhline((1.0 - confidence_level) / 2.0, + color='black', + linestyle='dashdot') + AXES.axhline((1.0 + confidence_level) / 2.0, + color='black', + linestyle='dashdot') + + # Plot appearance + AXES.set_title('CDF of emergent constraint') + AXES.set_xlabel('ECS / K') + AXES.set_ylabel('CDF') + legend = AXES.legend(loc='upper left') + + # Save plot + plot_path = _save_fig(cfg, filename, legend) # Write provenance with ProvenanceLogger(cfg) as provenance_logger: provenance_logger.log(netcdf_path, provenance_record) + provenance_logger.log(plot_path, provenance_record) -def get_ecs_range(cfg, psi_cube, ecs_cube, obs_cube): +def get_ecs_range(cfg, ecs_lin, ecs_pdf): """Get constrained ecs range.""" confidence_level = cfg.get('confidence_level', 0.66) conf_low = (1.0 - confidence_level) / 2.0 conf_high = (1.0 + confidence_level) / 2.0 - # Calculate PDF and CDF - (ecs_lin, ecs_pdf) = ec.gaussian_pdf(psi_cube.data, ecs_cube.data, - np.mean(obs_cube.data), - np.std(obs_cube.data)) - ecs_cdf = ec.cdf(ecs_lin, ecs_pdf) + # Mean ECS + ecs_mean = np.sum(ecs_lin * ecs_pdf) / np.sum(ecs_pdf) # Calculate constrained ECS range - ecs_mean = ecs_lin[np.argmax(ecs_pdf)] - ecs_index_range = np.where((ecs_cdf >= conf_low) & - (ecs_cdf <= conf_high))[0] + ecs_cdf = ec.cdf(ecs_lin, ecs_pdf) + ecs_index_range = np.where((ecs_cdf >= conf_low) + & (ecs_cdf <= conf_high))[0] ecs_range = ecs_lin[ecs_index_range] ecs_low = min(ecs_range) ecs_high = max(ecs_range) @@ -495,31 +507,15 @@ def main(cfg): input_data = ( select_metadata(cfg['input_data'].values(), short_name='tas') + select_metadata(cfg['input_data'].values(), short_name='tasa')) + input_data = sorted_metadata(input_data, ['short_name', 'exp', 'dataset']) if not input_data: raise ValueError("This diagnostics needs 'tas' or 'tasa' variable") - # Get tas data - tas_cubes = {} - tas_obs = [] - for (dataset, [data]) in group_metadata(input_data, 'dataset').items(): - cube = iris.load_cube(data['filename']) - cube = cube.aggregated_by('year', iris.analysis.MEAN) - tas_cubes[dataset] = cube - if data['project'] == 'OBS': - tas_obs.append(dataset) - - # Get time-dependent psi data - psi_cubes = {} - psi_obs = [] - psi_data = io.netcdf_to_metadata(cfg, pattern='psi_*.nc') - for (dataset, [data]) in group_metadata(psi_data, 'dataset').items(): - cube = iris.load_cube(data['filename']) - cube = cube.aggregated_by('year', iris.analysis.MEAN) - psi_cubes[dataset] = cube - if data['project'] == 'OBS': - psi_obs.append(dataset) + # Get time-dependent data + (tas_cubes, tas_obs) = get_tas(input_data) + (psi_cubes, psi_obs) = get_psi(cfg) - # Get psi, ECS and psi for models + # Get scalar psi, ECS and climate feedback parameter for models (psi_cube, ecs_cube, lambda_cube) = get_external_cubes(cfg) # Plots @@ -532,11 +528,14 @@ def main(cfg): obs_cube = psi_cubes[obs_name] plot_emergent_relationship(cfg, psi_cube, ecs_cube, lambda_cube, obs_cube) - plot_pdf(cfg, psi_cube, ecs_cube, obs_cube) - plot_cdf(cfg, psi_cube, ecs_cube, obs_cube) + (ecs_lin, ecs_pdf) = ec.target_pdf(psi_cube.data, ecs_cube.data, + np.mean(obs_cube.data), + np.std(obs_cube.data)) + plot_pdf(cfg, ecs_lin, ecs_pdf, ecs_cube, obs_name) + plot_cdf(cfg, ecs_lin, ecs_pdf, ecs_cube, obs_name) # Print ECS range - ecs_range = get_ecs_range(cfg, psi_cube, ecs_cube, obs_cube) + ecs_range = get_ecs_range(cfg, ecs_lin, ecs_pdf) logger.info("Observational constraint: Ψ = (%.2f ± %.2f) K", np.mean(obs_cube.data), np.std(obs_cube.data)) logger.info( diff --git a/esmvaltool/diag_scripts/emergent_constraints/ecs_cmip.cdl b/esmvaltool/diag_scripts/emergent_constraints/ecs_cmip.cdl new file mode 100644 index 0000000000..0f803d3e3c --- /dev/null +++ b/esmvaltool/diag_scripts/emergent_constraints/ecs_cmip.cdl @@ -0,0 +1,52 @@ +netcdf ECS { + +dimensions: + +model = 74, mchar = 17 ; + +variables: + +float ecs(model); +char model(model, mchar); + +// variable attributes + +ecs:long_name = "Equilibrium Climate Sensitivity" ; +ecs:standard_name = "ECS" ; +ecs:units = "K" ; +ecs:_FillValue = -999. ; + +model:named_coordinate = 1 ; + +:reference1_cmip3 = "Sherwood, S. C., S. Bony, J.-L. Dufresne: Spread in model climate sensitivity traced to atmospheric convective mixing, Nature, 505, 37-42, doi:10.1038/nature12829, 2014" ; +: reference2_cmip3 = "IPCC AR4, table 8.2" ; +: reference_cmip5 = "IPCC AR5" ; +: reference_cmip6 = "ESMValTool v2.0.0b2 (recipe_ecs.yml)" ; + +data: + +model = "ACCESS1-0", "ACCESS1-3", "bcc-csm1-1", "bcc-csm1-1-m", "BNU-ESM", "CanESM2", + "CCSM4", "CESM1-BGC", "CESM1-CAM5", "CESM1-WACCM", "CMCC-CESM", "CMCC-CM", + "CMCC-CMS", "CNRM-CM5", "CSIRO-Mk3-6-0", "FGOALS-g2", "FIO-ESM", "GFDL-CM3", + "GFDL-ESM2G", "GFDL-ESM2M", "GISS-E2-H", "GISS-E2-R", "HadGEM2-AO", + "HadGEM2-CC", "HadGEM2-ES", "inmcm4", "IPSL-CM5A-LR", "IPSL-CM5A-MR", + "IPSL-CM5B-LR", "MIROC-ESM", "MIROC-ESM-CHEM", "MIROC5", "MPI-ESM-LR", + "MPI-ESM-MR", "MRI-CGCM3", "NorESM1-M", "NorESM1-ME", + "bccr_bcm2_0", "cccma_cgcm3_1", "cccma_cgcm3_1_t63", "cnrm_cm3", + "csiro_mk3_0", "csiro_mk3_5", "gfdl_cm2_0", "gfdl_cm2_1", "giss_aom", + "giss_model_e_h", "giss_model_e_r", "iap_fgoals1_0_g", "ingv_echam4", + "inmcm3_0", "ipsl_cm4", "miroc3_2_hires", "miroc3_2_medres", + "mpi_echam5", "mri_cgcm2_3_2a", "ncar_ccsm3_0", "ncar_pcm1", + "ukmo_hadcm3", "ukmo_hadgem1", + "BCC-CSM2-MR", "BCC-ESM1", "CAMS-CSM1-0", "CESM2", "CESM2-WACCM", "CNRM-CM6-1", + "CNRM-ESM2-1", "GFDL-CM4", "GISS-E2-1-G", "GISS-E2-1-H", "IPSL-CM6A-LR", + "MIROC6", "MPI-ESM1-2-HR", "MRI-ESM2-0" ; + + +ecs = 3.8, -999., 2.8, 2.9, 4.1, 3.7, 2.9, -999., -999., -999., -999., -999., -999., 3.3, + 4.1, -999., -999., 4, 2.4, 2.4, 2.3, 2.1, -999., -999., 4.6, 2.1, 4.1, -999., 2.6, + 4.7, -999., 2.7, 3.6, -999., 2.6, 2.8, -999., + -999., 3.4, 3.4, -999., 3.1, -999., 2.9, 3.4, -999., 2.7, 2.7, 2.3, -999., 2.1, + 4.4, 4.3, 4.0, 3.4, 3.2, 2.7, 2.1, 3.3, 4.4, + 3.04, 3.26, 2.31, 6.29, 4.75, 4.83, 4.76, 3.87, 2.72, 3.11, 4.55, 2.61, 2.98, 3.15 ; +} diff --git a/esmvaltool/diag_scripts/emergent_constraints/ecs_cmip.nc b/esmvaltool/diag_scripts/emergent_constraints/ecs_cmip.nc new file mode 100644 index 0000000000..82c63fbfcb Binary files /dev/null and b/esmvaltool/diag_scripts/emergent_constraints/ecs_cmip.nc differ diff --git a/esmvaltool/diag_scripts/emergent_constraints/ecs_scatter.ncl b/esmvaltool/diag_scripts/emergent_constraints/ecs_scatter.ncl new file mode 100644 index 0000000000..a66e987c96 --- /dev/null +++ b/esmvaltool/diag_scripts/emergent_constraints/ecs_scatter.ncl @@ -0,0 +1,1949 @@ +; ECS_SCATTER +; ############################################################################# +; Author: Axel Lauer (DLR, Germany) +; PROJECT-NAME CRESCENDO +; ############################################################################# +; Description +; Calculates equilibrium climate sensitivity (ECS) versus: +; - southern ITCZ index (Tian, 2015) +; - tropical mid-tropospheric humidity asymmetry index (Tian, 2015) +; - lower tropospheric mixing index (LTMI) (Sherwood et al., 2014) +; - covariance of shortwave cloud reflection (Brient and Schneider, 2016) +; - climatological Hadley cell extent (Lipat et al., 2017) +; +; References: +; - Brient, F., and T. Schneider, J. Climate, 29, 5821-5835, +; doi: 10.1175/JCLIM-D-15-0897.1 (2016) +; - Lipat, B. R., et al., Geophys. Res. Lett., 44, 5739-5748, +; doi: 10.1002/2017GL073151 (2017) +; - Sherwood et al., nature, 505, 37-42, doi: 10.1038/nature12829 (2014) +; - Tian, B., Geophys. Res. Lett., 42, 4133-4141, +; doi: 10.1002/2015GL064119 (2015) +; +; Required diag_script_info attributes (diagnostics specific) +; diag: emergent constraint to calculate ("itczidx", "humidx", "ltmi", +; "covrefl", "shhc", "sherwood_s", "sherwood_d") +; ecs_file: path + file name of netCDF containing precalculated +; ECS values (only necessary if 'output_diag_only' is not True) +; +; Optional diag_script_info attributes (diagnostic specific) +; calcmm: - False (default) = do not calculate multi-model mean +; - True = calculate multi-model mean +; legend_outside: True, False +; output_diag_only: - False (default) = output full emergent constraint +; (X and Y axis, plots) +; - True = output only X axis (no plots) +; output_attributes: Additional attributes for the output files +; output_models_only: Only write models (no reference datasets) to netcdf +; files. +; predef_minmax: True, False +; styleset: "CMIP5" (if not set, ECS_SCATTER will create a color table +; and symbols for plotting) +; suffix: string to add to output filenames (e.g."cmip3") +; +; Required variable attributes +; reference_dataset: name of reference data set +; +; Optional variable_info attributes +; none +; +; Caveats +; none +; +; Modification history +; 20200102-schlund_manuel: added Sherwood S and D index. +; 20190226-lauer_axel: added output of provenance (v2.0) +; 20181022-lauer_axel: code rewritten for ESMValTool v2.0 +; 20170818-lauer_axel: preparation of code for reporting (tags) +; 20170810-lauer_axel: added emergent constraint "covariance of shortwave +; cloud reflection" (Brient and Schneider, 2016) +; 20161007-lauer_axel: written. +; +; ############################################################################# +; List of functions +; function covrefl: covariance of shortwave cloud reflectivity +; function humidx: tropical mid-tropopheric humidity asymmetry index +; function itczidx: southern ITCZ index +; function ltmi: lower tropospheric mixing index (LTMI) +; function sherwood_d: Sherwood D index +; function sherwood_s: Sherwood S index +; function shhc: Southern hemisphere Hadley cell extent +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/contour_maps.ncl" +load "$diag_scripts/shared/plot/scatterplot.ncl" + +; ============================================================================= +; Function covrefl +; Calculate covariance of shortwave cloud reflectivity. +; +; Arguments +; - dataset: names of datasets (must be of same size as variables) +; - variables: names of available variables (must be of same size as dataset) +; - ldebug: output debug data as netCDF (true, false) +; +; Reference +; - Brient and Schneider (2016) +; ============================================================================= + +function covrefl(dataset[*]:string, variables[*]:string, ldebug:logical) +begin + ; get indices associated with specific variables + ; sst is special since it can be "ts" or alternatively "tos" + sst_idx = ind(variables.eq."ts") + if (ismissing(sst_idx)) then + sst_idx = ind(variables.eq."tos") + end if + hur_idx = ind(variables.eq."hur") + rsut_idx = ind(variables.eq."rsut") + rsutcs_idx = ind(variables.eq."rsutcs") + rsdt_idx = ind(variables.eq."rsdt") + + all_idx = (/sst_idx, hur_idx, rsut_idx, rsutcs_idx, rsdt_idx/) + + ; ------------------------------------------------------------------- + ; if needed, regrid all variables to common grid + ; (e.g. needed if observations are taken from different sources) + ; for definition of the common grid, the grid used for relative + ; humidity is used + ; ------------------------------------------------------------------- + + ; get grid of variable 'relative humidity' (hur) + + atts = True + atts@short_name = variables(hur_idx) + atts@dataset = dataset(hur_idx) + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + rh500 = read_data(info[0]) + delete(info) + + if (.not.isatt(rh500, "_FillValue")) then + rh500@_FillValue = -999. + end if + + tsteps0 = dimsizes(rh500&time) + + do iv = 0, dimsizes(variables) - 1 + + ; relative humidity has already been read and does not need to be + ; processed because rh500 defines the reference grid + + if (iv.eq.hur_idx) then + continue + end if + + ; do not process any (possibly present) variables that we do not need + ; for the calculations + + if (.not.any(all_idx .eq. iv)) then + continue + end if + + ; extract data of variable #'iv' + atts = True + atts@short_name = variables(iv) + atts@dataset = dataset(iv) + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + var = read_data(info[0]) + delete(info) + + ; determine dimensions to regrid + xregrid = False ; lon + yregrid = False ; lat + dnames = getVarDimNames(var) + if (any(dnames.eq."lon")) then + xregrid = True + if (dimsizes(var&lon).eq.dimsizes(rh500&lon)) then + if (max(abs(var&lon - rh500&lon)).lt.1.0e-3) then + xregrid = False + end if + end if + end if + if (any(dnames.eq."lat")) then + yregrid = True + if (dimsizes(var&lat).eq.dimsizes(rh500&lat)) then + if (max(abs(var&lat - rh500&lat)).lt.1.0e-3) then + yregrid = False + end if + end if + end if + ; regrid in the horizontal (if necessary) + if (xregrid.and.yregrid) then + var_hreg = linint2_Wrap(var&lon, var&lat, var, True, \ + rh500&lon, rh500&lat, 0) + else if (xregrid.and..not.yregrid) then + dx = ind(dnames.eq."lon") + var_hreg = linint1_n_Wrap(var&lon, var, True, rh500&lon, 0, dx) + else if (.not.xregrid.and.yregrid) then + dx = ind(dnames.eq."lat") + var_hreg = linint1_n_Wrap(var&lat, var, False, rh500&lat, 0, dx) + else if (.not.xregrid .and. .not.yregrid) then + var_hreg = var + end if + end if + end if + end if + + delete(var) + delete(dnames) + + ; check if number of time steps are identical for all variables + + tsteps = dimsizes(rh500&time) + if (tsteps.ne.tsteps0) then + infostr = variables(hur_idx) + ": " + sprinti("%i", tsteps0) + ", " \ + + variables(iv) + ": " + sprinti("%i", tsteps) + model1 = names(imod(hur_idx)) + model2 = names(imod(iv)) + error_msg("f", DIAG_SCRIPT, "", "number of time steps for " \ + + "different variables not consistent (model = " \ + + model1 + ", " + model2 + "). " + infostr) + end if + + ; rename (regridded) field to something meaningful + + if (iv.eq.rsut_idx) + rsut = var_hreg + end if + if (iv.eq.rsutcs_idx) + rsutcs = var_hreg + end if + if (iv.eq.rsdt_idx) + rsdt = var_hreg + end if + if (iv.eq.sst_idx) + sst = var_hreg + end if + + delete(var_hreg) + + end do ; iv-loop (regridding if needed) + + ; ------------------------------------------------------------------- + + ; 500 hPa relative humidity + + ; calculate shortwave cloud forcing + + sw_cre = rsutcs + sw_cre = sw_cre - rsut + + delete(rsut) + delete(rsutcs) + + ; generate land-/sea-mask + a = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc", "r") + lsdata = a->LSMASK + lsm0 = landsea_mask(lsdata, rh500&lat, rh500&lon) + lsm = where(lsm0.eq.0, 0, lsm0@_FillValue) + copy_VarMeta(lsm0, lsm) + delete(lsdata) + delete(lsm0) + + ; lower quartile of the mid-tropospheric (500 hPa) relative humidity + ; over the tropical ocean, i.e. 30S-30N + rh500 = rh500 + conform(rh500, lsm, (/1, 2/)) + delete(lsm) + rh500(:, {-90.:-30.}, :) = rh500@_FillValue + rh500(:, {30.:90.}, :) = rh500@_FillValue + + if (ldebug) then + tmp = get_unique_values(dataset) + if (dimsizes(tmp) .eq. 1) then + refstr = tmp + else + refstr = tmp(0) + do i = 1, dimsizes(tmp) - 1 + refstr = refstr + "_" + tmp(i) + end do + end if + fname = config_user_info@work_dir + "/debug_" + refstr + ".nc" + system("rm " + fname) + debugfile = addfile(fname, "c") + debugout = time_operations(rh500, -1, -1, "average", "annualclim", True) + statb = stat_dispersion(debugout, False) + q25 = statb(6) + debugout = where(debugout.le.q25, 0.0, debugout@_FillValue) + debugfile->mask = debugout + debugfile->rh500mask = rh500 + end if + + dims = dimsizes(rh500) + do t = 0, dims(0) - 1 + statb = stat_dispersion(rh500(t, :, :), False) + q25 = statb(6) + ; mask all grid cells with relative humidity values larger than the + ; lower quartile + rh500(t, :, :) = where(rh500(t, :, :).le.q25, 0.0, rh500@_FillValue) + end do + delete(dims) + + if (ldebug) then + debugfile->rh500mask_final = rh500 + end if + + ; apply rh500 mask to shortwave cloud forcing, solar insolation and sst + sw_cre = sw_cre + rh500(:, :, :) + sst = sst + rh500(:, :, :) + rsdt = rsdt + rh500(:, :, :) + delete(rh500) + + if (ldebug) then + debugfile->sst = sst + debugfile->sw_cre = sw_cre + debugfile->rsdt = rsdt + end if + + ; calculate averages (missing values will be ignored and grid boxes + ; area-weighted) over TLC areas (determined by rh500 mask) + t1 = area_operations(sw_cre, -90., 90., 0., 360., "average", True) + t2 = area_operations(rsdt, -90., 90., 0., 360., "average", True) + t3 = area_operations(sst, -90., 90., 0., 360., "average", True) + + delete(sw_cre) + delete(rsdt) + delete(sst) + + ; shortwave cloud reflectivity (in %) + alpha = t1 + alpha = -100. * alpha / t2 + + ; deseasonalize time series + ; calculate climatological seasonal cycles + seasclim0 = time_operations(alpha, -1, -1, "average", "monthlyclim", True) + seasclim1 = time_operations(t3, -1, -1, "average", "monthlyclim", True) + + ; subtract climatological seasonal cycles + + alpha_dseas = alpha + sst_dseas = t3 + + do i = 0, 11 + alpha_dseas(i::12) = alpha_dseas(i::12) - seasclim0(i) + sst_dseas(i::12) = sst_dseas(i::12) - seasclim1(i) + end do + + delete(seasclim0) + delete(seasclim1) + delete(alpha) + + ; calculate regression coefficient alpha_dseas vs. sst_dseas + rc = regline(sst_dseas, alpha_dseas) + + if (ldebug) then + debugfile->alpha_dseas = alpha_dseas + debugfile->sst_dseas = sst_dseas + t1 = -t1 / t2 + debugfile->alpha = t1 + end if + + delete(t1) + delete(t2) + delete(t3) + + delete(alpha_dseas) + delete(sst_dseas) + + return(rc) +end + +; ============================================================================= +; Function humidx +; Calculate tropical mid-tropopheric humidity asymmetry index. +; +; Arguments +; - dataset: name of dataset +; - variables: names of available variables +; - ldebug: output debug data as netCDF (true, false) +; - nh_ref: average NH tropical Pacific, reference dataset +; - sh_ref: average SH tropical Pacific, reference dataset +; +; Reference +; - Tian (2015) +; ============================================================================= + +function humidx(dataset:string, variables[*]:string, ldebug:logical, \ + nh_ref:numeric, sh_ref:numeric) +begin + hus_idx = ind(variables.eq."hus") + + atts = True + atts@short_name = variables(hus_idx) + atts@dataset = dataset + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + A0 = read_data(info[0]) + + ; calculate climatological annual mean + q500 = time_operations(A0, -1, -1, "average", "annualclim", True) + + ; average SH tropical Pacific (120E-80W, 30S-0) + sh_avg = area_operations(q500, -30., 0., 120., 280., "average", True) + ; average NH tropical Pacific (120E-80W, 0-20N) + nh_avg = area_operations(q500, 0., 20., 120., 280., "average", True) + + delete(q500) + + result = (sh_avg - sh_ref) / sh_ref * 100.0 \ + - (nh_avg - nh_ref) / nh_ref * 100.0 + result@standard_name = "Tropical midtropospheric humidity asymmetry index" + result@units = "%" + + ; process error estimate if available (for observations) + + minmax = new((/2/), float) + minmax@standard_name = result@standard_name + " error estimate" + minmax@units = result@units + + ierr = ind(variables.eq."husStderr") + + if (.not.ismissing(ierr)) then + atts = True + atts@short_name = variables(ierr) + atts@dataset = dataset + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + n = ListCount(info) + + if (n .eq. 1) then + A0err = read_data(info[0]) + delete(info) + + ; ********************* + ; *** mean + stderr *** + ; ********************* + + A0 = A0 + A0err + + ; calculate climatological annual mean + q500 = time_operations(A0, -1, -1, "average", "annualclim", True) + + ; average SH tropical Pacific (120E-80W, 30S-0) + sh1 = area_operations(q500, -30., 0., 120., 280., "average", True) + ; average NH tropical Pacific (120E-80W, 0-20N) + nh1 = area_operations(q500, 0., 20., 120., 280., "average", True) + delete(q500) + + minmax(1) = (sh1 - sh_ref) / sh_ref * 100.0 \ + - (nh1 - nh_ref) / nh_ref * 100.0 + + ; ********************* + ; *** mean - stderr *** + ; ********************* + + A0 = A0 - 2.0 * A0err + delete(A0err) + A0 = where(A0.ge.0.0, A0, 0.0) + + ; calculate climatological annual mean + q500 = time_operations(A0, -1, -1, "average", "annualclim", True) + + ; average SH tropical Pacific (120E-80W, 30S-0) + sh0 = area_operations(q500, -30., 0., 120., 280., "average", True) + ; average NH tropical Pacific (120E-80W, 0-20N) + nh0 = area_operations(q500, 0., 20., 120., 280., "average", True) + delete(q500) + + minmax(0) = (sh0 - sh_ref) / sh_ref * 100.0 \ + - (nh0 - nh_ref) / nh_ref * 100.0 + end if ; if dataset for error estimate available + end if ; if error variable present + + delete(A0) + + return([/ result, nh_avg, sh_avg, minmax /]) +end + +; ============================================================================= +; Function itczidx +; Calculate southern ITCZ index. +; +; Arguments +; - dataset: name of dataset +; - variables: names of available variables +; - ldebug: output debug data as netCDF (true, false) +; - SEPavg_ref: average over southeastern Pacific for reference dataset +; in mm/day +; +; Reference +; - Tian (2015) +; ============================================================================= + +function itczidx(dataset:string, variables[*]:string, ldebug:logical, \ + SEPavg_ref:numeric) +begin + pr_idx = ind(variables.eq."pr") + + atts = True + atts@short_name = variables(pr_idx) + atts@dataset = dataset + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + pr = read_data(info[0]) + delete(info) + + ; average over time (annual mean) + pr_mean = time_operations(pr, -1, -1, "average", "annualclim", True) + + ; average over southeastern Pacific (150W-100W, 30S-0) + SEPavg = area_operations(pr_mean, -30., 0., 210., 260., "average", True) + delete(pr_mean) + SEPavg = SEPavg * 3600.0 * 24.0 ; convert from kg/m2/s to mm/day + result = SEPavg - SEPavg_ref + + result@standard_name = "Southern ITCZ index" + result@units = "mm day-1" + + ; process error estimate if available (for observations) + + minmax = new((/2/), float) + minmax@standard_name = result@standard_name + " error estimate" + minmax@units = result@units + + ierr = ind(variables.eq."prStderr") + + if (.not.ismissing(ierr)) then + atts = True + atts@short_name = variables(ierr) + atts@dataset = dataset + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + n = ListCount(info) + + if (n .eq. 1) then + pr_err = read_data(info[0]) + delete(info) + + ; mean + stderr + pr = pr + pr_err + tmp1 = time_operations(pr, -1, -1, "average", "annualclim", True) + minmax(1) = area_operations(tmp1, -30., 0., 210., 260., "average", \ + True) + delete(tmp1) + + ; max(0, mean - stderr) + pr = pr - 2.0 * pr_err + delete(pr_err) + pr = where(pr.ge.0.0, pr, 0.0) + tmp0 = time_operations(pr, -1, -1, "average", "annualclim", True) + minmax(0) = area_operations(tmp0, -30., 0., 210., 260., "average", \ + True) + delete(tmp0) + + minmax = minmax * 3600.0 * 24.0 ; convert from kg/m2/s to mm/day + minmax = minmax - SEPavg_ref + + end if ; if dataset for error estimate available + end if ; if error variable present + + delete(pr) + + return([/ result, minmax /]) + +end + +; ============================================================================= +; Function sherwood_d +; Calculate Sherwood D index. +; +; Arguments +; - dataset: names of datasets +; - variables: names of available variables +; - ldebug: output debug data as netCDF (true, false) +; +; Reference +; - Sherwood et al. (2014) +; ============================================================================= + +function sherwood_d(dataset[*]:string, variables[*]:string, ldebug:logical) +begin + wap_idx = ind(variables.eq."wap") + + ; vertical velocity (at 850, 700, 600, 500, 400 hPa) + + atts = True + atts@short_name = variables(wap_idx) + atts@dataset = dataset(wap_idx) + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + omega = read_data(info[0]) + delete(info) + + omega_mean = time_operations(omega, -1, -1, "average", "annualclim", True) + + ; grid boxes with upward motion at 500 hPa (annual mean) + ascend = where(omega_mean({50000}, :, :).lt.0, \ + -1.0 * omega_mean({50000}, :, :), omega_mean@_FillValue) + ascend@_FillValue = omega_mean@_FillValue + ascend!0 = "lat" + ascend!1 = "lon" + ascend&lat = omega_mean&lat + ascend&lon = omega_mean&lon + + ; generate land-/sea-mask + a = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc", "r") + lsdata = a->LSMASK + lsm0 = landsea_mask(lsdata, omega_mean&lat, omega_mean&lon) + lsm = where(lsm0.eq.0, 0, lsm0@_FillValue) + copy_VarMeta(lsm0, lsm) + delete(lsdata) + delete(lsm0) + delete(omega_mean) + + ; upper quartile of the annual mean mid-tropospheric (500 hPa) ascent rate + ; in ascending regions (over the tropical ocean 30S-30N only) + + ascend = ascend + lsm + ascend({-90.:-30.}, :) = ascend@_FillValue + ascend({30.:90.}, :) = ascend@_FillValue + + statb = stat_dispersion(ascend, False) + q25 = statb(10) + + ; mask all grid cells with ascend rates lower than the upper quartile + omgmask = where(ascend.ge.q25, 0.0, ascend@_FillValue) + omgmask@_FillValue = ascend@_FillValue + + ; average omega at 850 hPa, 700 hPa + omega1 = (omega(:, {85000}, :, :) + omega(:, {70000}, :, :)) / 2.0 + ; average omega at 600 hPa, 500 hPa, 400 hPa + omega2 = (omega(:, {60000}, :, :) + omega(:, {50000}, :, :) \ + + omega(:, {40000}, :, :)) / 3.0 + delta = omega2 - omega1 + + D1 = where((delta.gt.0.).and.(omega1.lt.0.), delta, 0.) + D2 = where(omega2.lt.0., -1. * omega2, 0.) + + D1!0 = "time" + D1!1 = "lat" + D1!2 = "lon" + D1&time = omega&time + D1&lat = omega&lat + D1&lon = omega&lon + + D2!0 = "time" + D2!1 = "lat" + D2!2 = "lon" + D2&time = omega&time + D2&lat = omega&lat + D2&lon = omega&lon + + ; average over tropical ocean regions: 160W-30E, 30S-30N + ; (ocean grid cells only) + + lsmC = conform_dims(dimsizes(D1), lsm, (/1, 2/)) + D1 = D1 + lsmC + D2 = D2 + lsmC + delete(lsmC) + + D1_avg = avg(area_operations(D1, -30., 30., 200., 30., "average", True)) + D2_avg = avg(area_operations(D2, -30., 30., 200., 30., "average", True)) + + if (D2_avg.ne.0.) then + result = D1_avg / D2_avg + else + result = D@_FillValue + end if + + result@standard_name = "Sherwood_D" + result@long_name = "Sherwood D index" + result@units = "1" + + ; optionally write debug output + + if (ldebug) then + tmp = get_unique_values(dataset) + if (dimsizes(tmp) .eq. 1) then + refstr = tmp + else + refstr = tmp(0) + do i = 1, dimsizes(tmp) - 1 + refstr = refstr + "_" + tmp(i) + end do + end if + fname = config_user_info@work_dir + "/debug_D_" + refstr + ".nc" + system("rm " + fname) + debugfile = addfile(fname, "c") + debugfile->omgmask = omgmask + debugfile->ascend = ascend + debugfile->lsm = lsm + debugfile->delta = delta + debugfile->D1 = D1 + debugfile->D2 = D2 + end if + + return(result) +end + +; ============================================================================= +; Function sherwood_s +; Calculate Sherwood S index +; +; Arguments +; - dataset: names of datasets +; - variables: names of available variables +; - ldebug: output debug data as netCDF (true, false) +; +; Reference +; - Sherwood et al. (2014) +; ============================================================================= + +function sherwood_s(dataset[*]:string, variables[*]:string, ldebug:logical) +begin + hur_idx = ind(variables.eq."hur") + ta_idx = ind(variables.eq."ta") + wap_idx = ind(variables.eq."wap") + + ; vertical velocity (at 850, 700, 600, 500, 400 hPa) + + atts = True + atts@short_name = variables(wap_idx) + atts@dataset = dataset(wap_idx) + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + omega = read_data(info[0]) + delete(info) + + omega_mean = time_operations(omega, -1, -1, "average", "annualclim", True) + + ; grid boxes with upward motion at 500 hPa (annual mean) + ascend = where(omega_mean({50000}, :, :).lt.0, \ + -1.0 * omega_mean({50000}, :, :), omega_mean@_FillValue) + ascend@_FillValue = omega_mean@_FillValue + ascend!0 = "lat" + ascend!1 = "lon" + ascend&lat = omega_mean&lat + ascend&lon = omega_mean&lon + + ; generate land-/sea-mask + a = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc", "r") + lsdata = a->LSMASK + lsm0 = landsea_mask(lsdata, omega_mean&lat, omega_mean&lon) + lsm = where(lsm0.eq.0, 0, lsm0@_FillValue) + copy_VarMeta(lsm0, lsm) + delete(lsdata) + delete(lsm0) + delete(omega_mean) + + ; upper quartile of the annual mean mid-tropospheric (500 hPa) ascent rate + ; in ascending regions (over the tropical ocean 30S-30N only) + + ascend = ascend + lsm + ascend({-90.:-30.}, :) = ascend@_FillValue + ascend({30.:90.}, :) = ascend@_FillValue + + statb = stat_dispersion(ascend, False) + q25 = statb(10) + + ; mask all grid cells with ascend rates lower than the upper quartile + omgmask = where(ascend.ge.q25, 0.0, ascend@_FillValue) + omgmask@_FillValue = ascend@_FillValue + + ; temperature + atts = True + atts@short_name = variables(ta_idx) + atts@dataset = dataset(ta_idx) + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + ta = read_data(info[0]) + delete(info) + + ta_mean = time_operations(ta, -1, -1, "average", "annualclim", True) + delete(ta) + + ; relative humidity + atts = True + atts@short_name = variables(hur_idx) + atts@dataset = dataset(hur_idx) + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + rh = read_data(info[0]) + delete(info) + + rh_mean = time_operations(rh, -1, -1, "average", "annualclim", True) + delete(rh) + + ; ------------------------------------------------------------------------- + ; if needed, regrid (horizontally) all fields to common grid + ; (needed since some models provide variables on inconsistent grids) + ; for definition of the common grid, the grid used for omega (mask) is used + ; ------------------------------------------------------------------------- + + do i = 0, 1 + if (i.eq.0) then + var = ta_mean + delete(ta_mean) + else + var = rh_mean + delete(rh_mean) + end if + + xregrid = False ; lon + yregrid = False ; lat + dnames = getVarDimNames(var) + if (any(dnames.eq."lon")) then + xregrid = True + if (dimsizes(var&lon).eq.dimsizes(omega&lon)) then + if (max(abs(var&lon - omega&lon)).lt.1.0d-5) then + xregrid = False + end if + end if + end if + if (any(dnames.eq."lat")) then + yregrid = True + if (dimsizes(var&lat).eq.dimsizes(omega&lat)) then + if (max(abs(var&lat - omega&lat)).lt.1.0d-5) then + yregrid = False + end if + end if + end if + ; regrid horizontally (if necessary) + if (xregrid.and.yregrid) then + var_hreg = linint2_Wrap(var&lon, var&lat, var, True, \ + omega&lon, omega&lat, 0) + else if (xregrid.and..not.yregrid) then + dx = ind(dnames.eq."lon") + var_hreg = linint1_n_Wrap(var&lon, var, True, omega&lon, 0, dx) + else if (.not.xregrid.and.yregrid) then + dx = ind(dnames.eq."lat") + var_hreg = linint1_n_Wrap(var&lat, var, False, omega&lat, 0, dx) + else if (.not.xregrid .and. .not.yregrid) then + var_hreg = var + end if + end if + end if + end if + + delete(var) + delete(dnames) + + if (i.eq.0) then + ta_mean = var_hreg + else + rh_mean = var_hreg + end if + + delete(var_hreg) + end do + + ; ------------------------------------------------------------------------- + + ; calculate "S" = small-scale lower-tropospheric mixing + ; (CMOR standard unit for relative humidity (hur) is percent (%)) + + ; ((rh(700)-rh(850))/ 100% - (T(700)-T(850)) / 9) / 2 + + S = ((rh_mean({70000}, :, :) - rh_mean({85000}, :, :)) / 100.0 \ + - (ta_mean({70000}, :, :) - ta_mean({85000}, :, :)) / 9.0) / 2.0 + + ; apply udraft mask + land-/sea-mask + + S = S + omgmask + lsm + S!0 = "lat" + S!1 = "lon" + S&lat = ta_mean&lat + S&lon = ta_mean&lon + + result = area_operations(S, -90., 90., 0., 360., "average", True) + + result@standard_name = "Sherwood_S" + result@long_name = "Sherwood S index" + result@units = "1" + + ; optionally write debug output + + if (ldebug) then + tmp = get_unique_values(dataset) + if (dimsizes(tmp) .eq. 1) then + refstr = tmp + else + refstr = tmp(0) + do i = 1, dimsizes(tmp) - 1 + refstr = refstr + "_" + tmp(i) + end do + end if + fname = config_user_info@work_dir + "/debug_S_" + refstr + ".nc" + system("rm " + fname) + debugfile = addfile(fname, "c") + debugfile->S = S + debugfile->omgmask = omgmask + debugfile->ascend = ascend + debugfile->lsm = lsm + end if + + return(result) +end + +; ============================================================================= +; Function ltmi +; Calculate lower tropospheric mixing index (LTMI) for one dataset. +; +; Arguments +; - dataset: names of datasets +; - variables: names of available variables +; - ldebug: output debug data as netCDF (true, false) +; +; Reference +; - Sherwood et al. (2014) +; ============================================================================= + +function ltmi(dataset[*]:string, variables[*]:string, ldebug:logical) +begin + + D_idx = sherwood_d(dataset, variables, ldebug) + S_idx = sherwood_s(dataset, variables, ldebug) + + result = D_idx + S_idx ; lower tropospheric mixing index + + result@standard_name = "LTMI" + result@long_name = "lower_tropospheric_mixing_index" + result@units = "1" + result@S = S_idx + result@D = D_idx + + return(result) +end + +; ============================================================================= +; Function shhc +; Calculate Southern hemisphere Hadley cell extent for one dataset. +; +; Arguments +; - imod: dataset index +; - ldebug: output debug data as netCDF (true, false) +; +; Reference +; - Lipat et al. (2017) +; ============================================================================= + +function shhc(dataset:string, variables[*]:string, ldebug:logical) +begin + va_idx = ind(variables.eq."va") + + atts = True + atts@short_name = variables(va_idx) + atts@dataset = dataset + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + va = read_data(info[0]) + delete(info) + + ; check units of meridional wind + if (va@units.ne."m s-1") then + error_msg("f", DIAG_SCRIPT, "", "meridional wind 'va' not in expected " \ + + "units (m s-1)") + end if + + ; average over DJF + va_seas = time_operations(va, -1, -1, "average", "DJF", True) + + delete(va) + + ; if needed, invert levels + n = dimsizes(va_seas&plev) + if (va_seas&plev(0) .gt. va_seas&plev(n-1)) then + tmp = va_seas(::-1, :, :) + tmp&plev = va_seas&plev(::-1) + delete(va_seas) + va_seas = tmp + delete(tmp) + end if + delete(n) + + ; create dummy surface pressure field (meridional wind is on pressure levels) + n = dimsizes(va_seas) + ps = new((/n(1), n(2)/), float) + ps = 101325. ; Pa + delete(n) + + ; calculate zonal average stream function + ; note: pressure must be between 5 and 1005 hPa (exclusive) and + ; monotonically increasing + i0 = ind((va_seas&plev .gt. 500.) .and. (va_seas&plev .lt. 100500.)) + psi = zonal_mpsi_Wrap(va_seas(i0, :, :), va_seas&lat, va_seas&plev(i0), ps) + delete(i0) + delete(va_seas) + delete(ps) + ; 500 hPa stream function + psi500 = int2p_n_Wrap(psi&plev, psi, (/50000./), 1, 0) + + ; optionally write debug output + + if (ldebug) then + fname = config_user_info@work_dir + "/debug_" + dataset + ".nc" + system("rm " + fname) + debugfile = addfile(fname, "c") + debugfile->psi = psi + end if + + delete(psi) + + ; calculate Hadley cell (HC) edge: + ; - identify first two grid cells from the equator where psi500 changes + ; sign (note: sign change from minus to plus as we are interested in the + ; downward branch) + ; - interpolate linearly between these two grid points to find the latitude + ; of the zero crossing + + n = dimsizes(psi500&lat) + if (psi500&lat(0).le.psi500&lat(n-1)) then + istart = max(ind(psi500&lat .lt. 0.)) + iend = 0 + idelta = -1 + else + istart = min(ind(psi500&lat .lt. 0.)) + iend = n - 1 + idelta = 1 + end if + delete(n) + + if (psi500(0, istart) .le.0) then + sgn0 = -1 + else + sgn0 = 1 + end if + + i = istart + do j = min((/istart, iend/)), max((/istart, iend/)) - 1 + if (psi500(0, i) .le.0) then + sgn1 = -1 + else + sgn1 = 1 + end if + + if ((sgn0 .eq. -1) .and. (sgn1 .eq. 1)) then + break + end if + + sgn0 = sgn1 + + i = i + idelta + end do + + delta_y = tofloat(psi500(0, i) - psi500(0, i - idelta)) + delta_x = tofloat(psi500&lat(i) - psi500&lat(i - idelta)) + + result = -1.0 * psi500(0, i - idelta) * delta_x / delta_y \ + + tofloat(psi500&lat(i - idelta)) + + if (ldebug) then + debugfile->psi500 = psi500 + end if + + return(result) +end + +; ============================================================================= +; ============================================================================= +; ============================================================================= +; main program +; ============================================================================= +; ============================================================================= +; ============================================================================= + +begin + + enter_msg(DIAG_SCRIPT, "") + + variables = metadata_att_as_array(input_file_info, "short_name") + variables := get_unique_values(variables) + dim_VAR = dimsizes(variables) + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + variables + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + tmp = metadata_att_as_array(input_file_info, "dataset") + + names = get_unique_values(tmp) ; note: get_unique_values sorts array! + + delete(tmp) + dim_MOD = dimsizes(names) + + infiles = metadata_att_as_array(input_file_info, "filename") + var0 = variables(0) + + ; write references + + if (isatt(diag_script_info, "diag")) then + diag = diag_script_info@diag + else + error_msg("f", DIAG_SCRIPT, "", "no diagnostic selected in recipe " + \ + "('diag' not set).") + end if + + caption = "Scatterplot of equilibrium climate sensitivity (ECS) vs. " + + ; check that required variables are there + + ; southern ITCZ index (Tian, Geophys. Res. Lett., 2015) + + if (diag .eq. "itczidx") then + varlist = (/"pr"/) + caption = caption + "southern ITCZ index (Tian et al., 2015)." + end if + + ; tropical mid-tropospheric humidity asymmetry + ; (Tian, Geophys. Res. Lett., 2015) + if (diag .eq. "humidx") then + varlist = (/"hus"/) + caption = caption + "tropical mid-tropospheric humidity asymetry " \ + + "(Tian et al., 2015)." + end if + + ; lower tropospheric mixing index (LTMI) (Sherwood et al., Nature, 2014) + if (diag .eq. "ltmi") then + varlist = (/"ta", "hur", "wap"/) + caption = caption + "lower tropospheric mixing index (LTMI) " \ + + "(Sherwood et al., 2014)." + end if + + ; Sherwood D index (Sherwood et al., Nature, 2014) + if (diag .eq. "sherwood_d") then + varlist = (/"wap"/) + caption = caption + "Sherwood D index (Sherwood et al., 2014)." + end if + + ; Sherwood S index (Sherwood et al., Nature, 2014) + if (diag .eq. "sherwood_s") then + varlist = (/"ta", "hur", "wap"/) + caption = caption + "Sherwood S index (Sherwood et al., 2014)." + end if + + ; SH Hadley cell extent (SHHC) (Lipat et al., 2017, Geophys. Res. Lett.) + if (diag .eq. "shhc") then + varlist = (/"va"/) + caption = caption + "Southern Hemisphere Hadley cell extent " \ + + "(Lipat et al., 2017)." + end if + + ; covariance of shortwave cloud reflection + ; (Brient and Schneider, J. Climate, 2016) + if (diag .eq. "covrefl") then + varlist = (/"ts", "hur", "rsut", "rsutcs", "rsdt"/) + sst_idx = ind(variables.eq.varlist(0)) + if (ismissing(sst_idx)) then + varlist(0) = "tos" + end if + caption = caption + "tropical low clouds (TLC) reflection feedback " \ + + "(Brient and Schneider, 2016)." + end if + + if (.not. isvar("varlist")) then + error_msg("f", DIAG_SCRIPT, "", "selected diagnostic not implemented") + end if + + idx = new(dimsizes(varlist), integer) + + do i = 0, dimsizes(varlist) - 1 + idx(i) = ind(variables.eq.varlist(i)) + end do + + if (any(ismissing(idx))) then + errstr = "diagnostic " + diag + " requires the following variable(s): " \ + + str_join(varlist, ", ") + error_msg("f", DIAG_SCRIPT, "", errstr) + end if + + delete(idx) + + ; make sure path for netcdf output exists + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + + flag_mean = False + mmm_ind = -1 + if (isatt(diag_script_info, "calcmm")) then + if (diag_script_info@calcmm) then + flag_mean = True + modelsatname = array_append_record(names, \ + (/"MultiModelMean"/), 0) + mmm_ind = dim_MOD + dim_MOD = dim_MOD + 1 + delete(names) + names = modelsatname + delete(modelsatname) + end if + end if + + ; reference dataset(s) + + refname = new(dim_VAR, string) + do i = 0, dim_VAR - 1 + short_name = variables(i) + short_name_data = select_metadata_by_name(input_file_info, short_name) + do idat = 0, ListCount(short_name_data) - 1 + if (.not. isatt(short_name_data[idat], "reference_dataset")) then + error_msg("f", DIAG_SCRIPT, "", "reference dataset not defined " + \ + "for dataset " + short_name_data[idat]@filename) + end if + if (isdefined("short_name_ref")) then + if (short_name_ref .ne. short_name_data[idat]@reference_dataset) then + error_msg("f", DIAG_SCRIPT, "", "Differing reference datasets " + \ + "variable " + short_name + ", got " + short_name_ref + \ + " and " + short_name_data[idat]@reference_dataset) + end if + else + short_name_ref = short_name_data[idat]@reference_dataset + log_info("Using reference dataset " + short_name_ref + " for " + \ + "variable " + short_name) + end if + end do + refname(i) = short_name_ref + delete(short_name_ref) + end do + dim_REF = dimsizes(refname) + + ; dataset indices with no reference dataset(s) and no multi-model mean + + model_ind = ispan(0, dim_MOD - 1, 1) + n = dimsizes(refname) + ref_ind = ispan(0, n - 1, 1) + + do i = 0, n - 1 + ri = ind(names .eq. refname(i)) + model_ind(ri) = -1 + ref_ind(i) = ri + end do + + model_ind_woref = ind(model_ind.ge.0) + if (flag_mean) then + model_ind_worefmmm = model_ind_woref(ind(model_ind_woref.ne.mmm_ind)) + else + model_ind_worefmmm = model_ind_woref + end if + + delete(model_ind) + + ; ========================== initialization ============================== + + ; Set default values for non-required diag_script_info attributes + + set_default_att(diag_script_info, "calcmm", False) + set_default_att(diag_script_info, "ecs_file", "ECS.nc") + set_default_att(diag_script_info, "legend_outside", False) + set_default_att(diag_script_info, "predef_minmax", False) + set_default_att(diag_script_info, "output_diag_only", False) + set_default_att(diag_script_info, "output_models_only", False) + + legend_outside = diag_script_info@legend_outside + ecsfile = diag_script_info@ecs_file + + ; ============================ calculations ============================== + + diagdata = new(dim_MOD, float) + diagminmax = new((/2/), float) + debug_out = False + + ; southern ITCZ index (Tian, 2015) + + if (diag.eq."itczidx") then + pr_idx = ind(variables.eq."pr") + ref0 = ref_ind(pr_idx) + + result = itczidx(names(ref0), variables, debug_out, 0.0) + + SEPavg_ref = result[0] + diagminmax = result[1] + delete(result) + + do imod = 0, dim_MOD - 1 + ; skip multi-model mean + + if (imod.eq.mmm_ind) then + continue + end if + + result = itczidx(names(imod), variables, debug_out, SEPavg_ref) + + diagdata(imod) = result[0] + if (.not.all(ismissing(result[1]))) then + diagminmax = result[1] + end if + delete(result) + end do + + end if ; diag.eq."itczidx" + + ; tropical mid-tropopheric humidity asymmetry index (Tian, 2015) + + if (diag.eq."humidx") then + sh_avg = new(dim_MOD, float) + nh_avg = new(dim_MOD, float) + + hus_idx = ind(variables.eq."hus") + ref0 = ref_ind(hus_idx) + result = humidx(names(ref0), variables, debug_out, 1.0, 1.0) + nh_avg(ref0) = result[1] + sh_avg(ref0) = result[2] + delete(result) + + do imod = 0, dim_MOD - 1 + ; skip multi-model mean entry + + if (imod.eq.mmm_ind) then + continue + end if + + result = humidx(names(imod), variables, debug_out, nh_avg(ref0), \ + sh_avg(ref0)) + + diagdata(imod) = result[0] + + if (.not.all(ismissing(result[3]))) then + diagminmax = result[3] + end if + delete(result) + + end do ; imod-loop + end if ; diag.eq."humidx" + + ; LTMI (Sherwood et al., 2014) + + if (diag.eq."ltmi") then + obs_done = False + datasets = new(dim_VAR, string) + do imod = 0, dim_MOD - 1 + ; skip multi-model mean entry + if (imod.eq.mmm_ind) then + continue + end if + + if (any(ref_ind.eq.imod)) then + do_obs = True + datasets = names(ref_ind) + if (.not.isvar("ref0")) then + ref0 = imod + end if + else + do_obs = False + datasets(:) = names(imod) + end if + + ; all observations (possibly from different sources = different datasets) + ; are processed at the same time --> skip all observations if + ; observations have been processed already + + if (do_obs.and.obs_done) then + continue + end if + + diagdata(imod) = ltmi(datasets, variables, debug_out) + + end do ; imod-loop + + diagdata@standard_name = "Lower tropospheric mixing index" + diagdata@units = "1" + end if + + ; Sherwood D index (Sherwood et al., 2014) + + if (diag.eq."sherwood_d") then + obs_done = False + datasets = new(dim_VAR, string) + do imod = 0, dim_MOD - 1 + ; skip multi-model mean entry + if (imod.eq.mmm_ind) then + continue + end if + + if (any(ref_ind.eq.imod)) then + do_obs = True + datasets = names(ref_ind) + if (.not.isvar("ref0")) then + ref0 = imod + end if + else + do_obs = False + datasets(:) = names(imod) + end if + + ; all observations (possibly from different sources = different datasets) + ; are processed at the same time --> skip all observations if + ; observations have been processed already + + if (do_obs.and.obs_done) then + continue + end if + + diagdata(imod) = sherwood_d(datasets, variables, debug_out) + + end do ; imod-loop + + diagdata@standard_name = "Sherwood_D_index" + diagdata@units = "1" + end if + + ; Sherwood S index (Sherwood et al., 2014) + + if (diag.eq."sherwood_s") then + obs_done = False + datasets = new(dim_VAR, string) + do imod = 0, dim_MOD - 1 + ; skip multi-model mean entry + if (imod.eq.mmm_ind) then + continue + end if + + if (any(ref_ind.eq.imod)) then + do_obs = True + datasets = names(ref_ind) + if (.not.isvar("ref0")) then + ref0 = imod + end if + else + do_obs = False + datasets(:) = names(imod) + end if + + ; all observations (possibly from different sources = different datasets) + ; are processed at the same time --> skip all observations if + ; observations have been processed already + + if (do_obs.and.obs_done) then + continue + end if + + diagdata(imod) = sherwood_s(datasets, variables, debug_out) + + end do ; imod-loop + + diagdata@standard_name = "Sherwood_S_index" + diagdata@units = "1" + end if + + ; covariance of shortwave cloud reflectivity (Brient and Schneider, 2016) + + if (diag.eq."covrefl") then + obs_done = False + datasets = new(dim_VAR, string) + + do imod = 0, dim_MOD - 1 + + ; skip multi-model mean entry + + if (imod.eq.mmm_ind) then + continue + end if + + if (any(ref_ind.eq.imod)) then + do_obs = True + datasets = names(ref_ind) + if (.not.isvar("ref0")) then + ref0 = imod + end if + else + do_obs = False + datasets(:) = names(imod) + end if + + ; all observations (possibly from different sources = different datasets) + ; are processed at the same time --> skip all observations if + ; observations have been processed already + + if (do_obs.and.obs_done) then + continue + end if + + diagdata(imod) = covrefl(datasets, variables, debug_out) + + end do + + diagdata@standard_name = "Regression sw cloud relectivity - sst" + diagdata@units = "%/K" + end if + + ; Southern Hemisphere Hadley cell extent (Lipat et al., 2017) + + if (diag.eq."shhc") then + va_idx = ind(variables.eq."va") + ref0 = ref_ind(va_idx) + do imod = 0, dim_MOD - 1 + ; skip multi-model mean entry + if (imod.eq.mmm_ind) then + continue + end if + diagdata(imod) = shhc(names(imod), variables, debug_out) + end do ; imod-loop + + ; this is the "Southern Hemisphere Hadley cell extent" + + diagdata@standard_name = "SH Hadley cell extent" + diagdata@units = "degrees" + end if + + ; ==================== calculate MMM if desired ================== + + if (flag_mean) then + diagdata(mmm_ind) = avg(diagdata(model_ind_worefmmm)) + end if + + ; ------------------------------------------------------------------------ + ; Skip further calculation/output if desired + ; ------------------------------------------------------------------------ + + if (diag_script_info@output_diag_only) then + log_info("Returning only X axis of emergent constraint because " + \ + "'output_diag_only' is set to True") + + outfile = str_sub_str(diagdata@standard_name, " ", "_") + "_ref" + n = dimsizes(ref_ind) + do i = 0, n - 1 + outfile = outfile + "_" + names(ref_ind(i)) + end do + if (isatt(diag_script_info, "suffix")) then + outfile = outfile + "_" + diag_script_info@suffix + end if + + diagdata!0 = "model" + diagdata&model = names + diagdata@var = diag + diagdata@diag_script = DIAG_SCRIPT + + ; Additional attributes + if (isdefined("output_attributes")) then + atts = getvaratts(output_attributes) + do iatt = 0, dimsizes(atts) - 1 + diagdata@$atts(iatt)$ = output_attributes@$atts(iatt)$ + end do + end if + + ; Reference datasets + do iref = 0, dim_REF - 1 + if (.not. isvar("ref_str")) then + ref_str = refname(iref) + else + ref_str = ref_str + "|" + refname(iref) + end if + end do + if (isvar("ref_str")) then + diagdata@reference_dataset = ref_str + end if + + nc_filename = work_dir + outfile + ".nc" + if (diag_script_info@output_models_only) then + nc_outfile = ncdf_write(diagdata(model_ind_woref), nc_filename) + else + nc_outfile = ncdf_write(diagdata, nc_filename) + end if + + log_provenance(nc_outfile, "n/a", caption, "other", "reg", "", "", "", \ + infiles) + + leave_msg(DIAG_SCRIPT, "") + exit + + end if + + ; ==================== read precalculated ECS from file ================== + + if (isfilepresent(ecsfile)) then + log_info("Read in: " + ecsfile) + ecs_tmp = ncdf_read(ecsfile, "ecs") + else + error_msg("f", DIAG_SCRIPT, "", "file with precalculated ECS data (" + \ + ecsfile + ") not found.") + end if + + ; now copy the precalculated ECS values read from the external file to the + ; array ECS + + ECS = new(dim_MOD, float) + ECS = ecs_tmp@_FillValue + ECS@_FillValue = ecs_tmp@_FillValue + ECS@units = ecs_tmp@units + ECS@standard_name = ecs_tmp@standard_name + + exit_flag = False + + do i = 0, dim_MOD - 1 + if (any(i.eq.ref_ind).or.(i.eq.mmm_ind)) then + continue + end if + name = names(i) + idx = ind(name.eq.ecs_tmp&model) + if (ismissing(idx)) then + log_info("no pre-calcuated ECS value for model " + name + " available") + exit_flag = True + else + ECS(i) = ecs_tmp(idx) + log_info("pre-calcuated ECS value for model " + name + " = " + ECS(i)) + end if + delete(idx) + end do + + if (exit_flag) then + error_msg("f", DIAG_SCRIPT, "", "precalculated ECS data (" + \ + ecsfile + ") not available for all models.") + end if + + f = addfile(ecsfile, "r") + climofiles = array_append_record(infiles, getfilepath(f), 0) + + ; ==================== calculate MMM if desired ================== + + if (flag_mean) then + ECS(mmm_ind) = avg(ECS(model_ind_worefmmm)) + end if + + ; ============================ plot results ============================== + + outfile = str_sub_str(diagdata@standard_name, " ", "_") + \ + "-" + str_sub_str(ECS@standard_name, " ", "_") + "_ref" + n = dimsizes(ref_ind) + do i = 0, n - 1 + outfile = outfile + "_" + names(ref_ind(i)) + end do + + if (isatt(diag_script_info, "suffix")) then + outfile = outfile + "_" + diag_script_info@suffix + end if + + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, outfile) + wks@legendfile = outfile + "_legend" + + if (isatt(diag_script_info, "styleset")) then + my_info = NewList("lifo") + do i = 0, dim_MOD - 1 + ListAppend(my_info, new(1, logical)) + my_info[i]@dataset = names(i) + end do + colortab = project_style(my_info, diag_script_info, "colors") + markertab = project_style(my_info, diag_script_info, "markers") + else + colortab = (/"(/0.00, 0.00, 0.59/)", "(/0.00, 0.39, 1.00/)", \ + "(/0.20, 1.00, 1.00/)", "(/0.20, 0.88, 0.00/)", \ + "(/1.00, 0.88, 0.00/)", "(/1.00, 0.59, 0.00/)", \ + "(/1.00, 0.20, 0.00/)", "(/0.59, 0.00, 0.00/)", \ + "(/0.78, 0.00, 0.78/)", "(/0.59, 0.00, 0.59/)", \ + "(/0.90, 0.90, 0.90/)", "(/0.70, 0.70, 0.70/)", \ + "(/0.50, 0.50, 0.50/)", "(/0.30, 0.30, 0.30/)"/) + markertab = (/16, 4, 5, 0/) + end if + + nmod = dimsizes(model_ind_woref) + + colors = new(nmod, string) + markers = new(nmod, integer) + + ; create new marker: filled star + + mstring = "z" + fontnum = 35 + size = 1.5 + angle = 0.0 + + new_index = NhlNewMarker(wks, mstring, fontnum, 0.0, 0.0, 1.0, size, angle) + + if (isatt(diag_script_info, "styleset")) then + colors = colortab(model_ind_woref) + markers = markertab(model_ind_woref) + i = ind(model_ind_woref.eq.mmm_ind) + if (.not. all(ismissing(i))) then + colors(i) = "(/0.00, 0.00, 0.00/)" ; black + markers(i) = new_index + end if + else + i = 0 + idx = 0 + do while (i.lt.nmod) + imod = model_ind_woref(i) + if (imod.eq.mmm_ind) then + colors(i) = "(/0.00, 0.00, 0.00/)" ; black + markers(i) = new_index + i = i + 1 + else + do n = 0, (nmod - 1) / dimsizes(colortab) + colors(i) = colortab(idx) + markers(i) = markertab(n) + i = i + 1 + if (i.ge.nmod) then + break + end if + end do + idx = idx + 1 + if (idx.ge.dimsizes(colortab)) then + idx = dimsizes(colortab) - 1 + end if + end if + end do + end if + + ; Collect data + + data_arr = new((/2, nmod/), float) + data_arr!0 = "statistic" + data_arr!1 = "model" + data_arr&statistic = (/diagdata@standard_name, ECS@standard_name/) + data_arr&model = names(model_ind_woref) + data_arr(0, :) = (/diagdata(model_ind_woref)/) + data_arr(1, :) = (/ECS(model_ind_woref)/) + data_arr@units = (/diagdata@units, ECS@units/) + data_arr@legend_outside = legend_outside + data_arr@colors = colors + data_arr@markers = markers + data_arr@annots = names(model_ind_woref) + + yregmin = 999.9 + yregmax = -999.9 + + xmin = min(data_arr(0, :)) + xmax = max(data_arr(0, :)) + xdelta = xmax - xmin + + if (diagdata(ref0) .lt. xmin) then + data_arr@trXMinF = diagdata(ref0) - 0.02 * xdelta + end if + + if (diagdata(ref0) .gt. xmax) then + data_arr@trXMaxF = diagdata(ref0) + 0.02 * xdelta + end if + + nx = dimsizes(diagdata(model_ind_worefmmm)) + ymin = min(ECS) + ymax = max(ECS) + + xmin = min(diagdata) + xmax = max(diagdata) + xdelta = xmax - xmin + x0 = xmin - 0.5 * xdelta + x1 = xmax + 0.5 * xdelta + + if (nx.ge.3) then + x = diagdata(model_ind_worefmmm) + y = ECS(model_ind_worefmmm) + ii = dim_pqsort_n(x, 1, 0) + xx = x(ii) ; temporary 'work' arrays + yy = y(ii) + + rc = regline_stats(xx, yy) ; requires NCL 6.2.0 or higher + + ; calculate confidence intervals (25%, 75%) of regression + + xxx = fspan(x0, x1, 50) + yyy = rc@b(0) + xxx * rc@b(1) + ; t-value given the one-sided probability and the degrees of freedom + tval = cdft_t(0.25, nx - 2) + + mean_se_fit = 1.0 / nx + (xxx - rc@xave) ^ 2 / sum((x - rc@xave) ^ 2) + mean_conf_upper = yyy + tval * sqrt(rc@MSE * mean_se_fit) + mean_conf_lower = yyy - tval * sqrt(rc@MSE * mean_se_fit) + + ; calculate prediction intervals (25%, 75%) + + prediction_upper = yyy + tval * sqrt(rc@MSE * (1.0 + mean_se_fit)) + prediction_lower = yyy - tval * sqrt(rc@MSE * (1.0 + mean_se_fit)) + + yregmin = min(mean_conf_lower) + yregmax = max(mean_conf_upper) + + data_arr@trYMinF = min((/ymin, yregmin/)) + data_arr@trYMaxF = max((/ymax, yregmax/)) + + data_arr@res_gsnRightString = "R~S~2~N~ = " \ + + tostring_with_format(rc@r2, "%3.2f") + data_arr@res_gsnRightStringFontHeightF = 0.025 + + delete(yyy) + delete(mean_se_fit) + delete(x) + delete(y) + delete(ii) + end if + + ; If requested by user, use predefined min/max for x- and y-axis + ; depending on diagnostic + + if (diag_script_info@predef_minmax) then + data_arr@trYMinF = 2.0 ; CMIP3/CMIP5 + data_arr@trYMaxF = 5.0 ; 6.5 ; CMIP3/CMIP5: 5.0, CMIP6: 6.5 + if (diag.eq."itczidx") then + data_arr@trXMinF = -1.0 ; southern ITCZ index + data_arr@trXMaxF = 3.0 ; southern ITCZ index + end if + if (diag.eq."humidx") then + data_arr@trXMinF = -15.0 ; tropical mid-tropos. humidity asym. index + data_arr@trXMaxF = 40.0 ; tropical mid-tropos. humidity asym. index + end if + if (diag.eq."sherwood_d") then + data_arr@trXMinF = 0.0 ; Sherwood D index + data_arr@trXMaxF = 0.6 ; Sherwood D index + end if + if (diag.eq."sherwood_s") then + data_arr@trXMinF = 0.1 ; Sherwood S index + data_arr@trXMaxF = 0.8 ; Sherwood S index + end if + if (diag.eq."ltmi") then + data_arr@trXMinF = 0.4 ; lower tropospheric mixing index (LTMI) + data_arr@trXMaxF = 1.2 ; lower tropospheric mixing index (LTMI) + end if + if (diag.eq."shhc") then + data_arr@trXMinF = -40.0 ; SH HC extent + data_arr@trXMaxF = -30.0 ; SH HC extent + end if + if (diag.eq."covrefl") then + data_arr@trXMinF = -2.0 ; covariance of TLC sw reflection with SST + data_arr@trXMaxF = 1.0 ; covariance of TLC sw reflection with SST + end if + + ymin = min((/ymin, data_arr@trYMinF/)) + ymax = max((/ymax, data_arr@trYMaxF/)) + end if + + ; draw scatter plot + data_arr@res_tiMainString = "" + plot = scatterplot_markers(wks, data_arr, var0, input_file_info) + + ydelta = ymax - ymin + y0 = min((/ymin - 0.5 * ydelta, yregmin/)) + y1 = max((/yregmax, ymax + 0.5 * ydelta/)) + + ; plot observational uncertainty (if available) + + if (.not.all(ismissing(diagminmax))) then +; if (isdefined("diagminmax")) then + x = (/diagminmax(0), diagminmax(0), diagminmax(1), diagminmax(1), \ + diagminmax(0)/) + y = (/y0, y1, y1, y0, y0/) + res = True + res@tfPolyDrawOrder = "Draw" + res@gsFillColor = (/0.9, 0.9, 0.9/) + referr = gsn_add_polygon(wks, plot, x, y, res) + delete(x) + delete(y) + delete(res) + end if + + ; draw line for reference data + + x = (/diagdata(ref0), diagdata(ref0)/) + y = (/y0, y1/) + res = True + res@gsLineColor = (/0.75, 0.75, 0.75/) + res@gsLineThicknessF = 4.0 + res@tfPolyDrawOrder = "Draw" + + ref = gsn_add_polyline(wks, plot, x, y, res) + + delete(x) + delete(y) + + ; add regression line (drawn in the background) + ; (can only be calculated if there are at least 3 models) + + if (nx.ge.3) then + x = (/x0, x1/) + y = (/x0 * rc + rc@yintercept, x1 * rc + rc@yintercept/) + res = True + res@gsLineColor = (/1.00, 0.00, 0.00/) + res@gsLineThicknessF = 4.0 + res@tfPolyDrawOrder = "Draw" + regl = gsn_add_polyline(wks, plot, x, y, res) + + res@gsLineThicknessF = 2.0 + res@gsLineDashPattern = 1 + reglc1 = gsn_add_polyline(wks, plot, xxx, mean_conf_upper, res) + reglc2 = gsn_add_polyline(wks, plot, xxx, mean_conf_lower, res) + res@gsLineDashPattern = 2 + reglp1 = gsn_add_polyline(wks, plot, xxx, prediction_upper, res) + reglp2 = gsn_add_polyline(wks, plot, xxx, prediction_lower, res) + + delete(xxx) + delete(mean_conf_upper) + delete(mean_conf_lower) + delete(prediction_upper) + delete(prediction_lower) + end if + + draw(plot) + frame(wks) + + plotfile = wks@fullname + + log_info("Wrote " + plotfile) + + ; ======================== netcdf output =========================== + + outfile = str_sub_str(diagdata@standard_name, " ", "_") + "_ref" + n = dimsizes(ref_ind) + do i = 0, n - 1 + outfile = outfile + "_" + names(ref_ind(i)) + end do + if (isatt(diag_script_info, "suffix")) then + outfile = outfile + "_" + diag_script_info@suffix + end if + + diagdata!0 = "model" + diagdata&model = names + diagdata@var = diag + diagdata@diag_script = DIAG_SCRIPT + + ; Additional attributes + if (isdefined("output_attributes")) then + atts = getvaratts(output_attributes) + do iatt = 0, dimsizes(atts) - 1 + diagdata@$atts(iatt)$ = output_attributes@$atts(iatt)$ + end do + end if + + ; Reference datasets + do iref = 0, dim_REF - 1 + if (.not. isvar("ref_str")) then + ref_str = refname(iref) + else + ref_str = ref_str + "|" + refname(iref) + end if + end do + if (isvar("ref_str")) then + diagdata@reference_dataset = ref_str + end if + + nc_filename = work_dir + outfile + ".nc" + if (diag_script_info@output_models_only) then + nc_outfile = ncdf_write(diagdata(model_ind_woref), nc_filename) + else + nc_outfile = ncdf_write(diagdata, nc_filename) + end if + + nc_filename@existing = "append" + ECS!0 = "model" + ECS&model = names + ECS@var = "ECS" + ECS@diag_script = DIAG_SCRIPT + + if (diag_script_info@output_models_only) then + nc_outfile = ncdf_write(ECS(model_ind_woref), nc_filename) + else + nc_outfile = ncdf_write(ECS, nc_filename) + end if + + ; ------------------------------------------------------------------------ + ; write provenance to netcdf output and plot file + ; ------------------------------------------------------------------------ + + statistics = ("other") + domain = ("reg") + plottype = ("scatter") + + log_provenance(nc_outfile, plotfile, caption, statistics, domain, \ + plottype, "", "", climofiles) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/emergent_constraints/ecs_scatter.py b/esmvaltool/diag_scripts/emergent_constraints/ecs_scatter.py new file mode 100644 index 0000000000..e0976306bc --- /dev/null +++ b/esmvaltool/diag_scripts/emergent_constraints/ecs_scatter.py @@ -0,0 +1,825 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Diagnostic script to calculate various emergent constraints for ECS. + +Description +----------- +Calculate the X axis of various emergent constraints for the equilibrium +climate sensitivity (ECS). + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +diag: str + Emergent constraint to calculate (must be one of ``'brient_shal'``, ``'su'``, + ``'volodin'``, ``'zhai'``). +metric: str, optional (default: 'regression_slope') + Metric to measure model error. Only relevant for Su et al. (2014) + constraint. Must be one of ``'regression_slope'``, + ``'correlation_coefficient'``. +n_jobs: int, optional (default: 1) + Maximum number of jobs spawned by this class. +output_attributes: dict, optional + Write additional attributes to netcdf files. +pattern: str, optional + Pattern matched against ancestor file names. +savefig_kwargs: dict + Keyword arguments for :func:`matplotlib.pyplot.savefig`. +seaborn_settings: dict + Options for :func:`seaborn.set_theme` (affects all plots). + +""" + +import logging +import os +from copy import deepcopy +from inspect import isfunction +from pprint import pformat + +import dask.array as da +import iris +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import seaborn as sns +from esmvalcore.cmor.fixes import add_plev_from_altitude +from iris import NameConstraint +from joblib import Parallel, delayed +from scipy.interpolate import interp1d +from scipy.stats import linregress + +import esmvaltool.diag_scripts.emergent_constraints as ec +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + group_metadata, + io, + run_diagnostic, + select_metadata, + sorted_metadata, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def _check_variables(datasets, necessary_short_names): + """Check if ``datasets`` contain necessary variables.""" + dataset_name = datasets[0]['dataset'] + necessary_short_names = set(necessary_short_names) + short_names = set(group_metadata(datasets, 'short_name').keys()) + if short_names != necessary_short_names: + raise ValueError( + f"Expected variables {necessary_short_names} for dataset " + f"'{dataset_name}', got {short_names}") + + +# TODO: Remove when bug in iris is fixed +def _fix_iris_bug_derived_coord(cube): + """Fix iris bug concerning derived coords and aggregation.""" + air_pressure_coord = cube.coord('air_pressure') + try: + altitude_coord = cube.coord('altitude') + except iris.exceptions.CoordinateNotFoundError: + altitude_coord = None + for aux_factory in cube.aux_factories: + cube.remove_aux_factory(aux_factory) + try: + cube.coord('air_pressure') + except iris.exceptions.CoordinateNotFoundError: + cube.add_aux_coord(air_pressure_coord, np.arange(cube.ndim)) + if altitude_coord is not None: + cube.add_aux_coord(altitude_coord, (1, 2, 3)) + + +def _get_cube(datasets, short_name): + """Get cube with specific ``'short_name'`` from datasets.""" + datasets = select_metadata(datasets, short_name=short_name) + if len(datasets) != 1: + raise ValueError( + f"Expected exactly one dataset with short_name '{short_name}', " + f"got {len(datasets):d}:\n{datasets}") + return iris.load_cube(datasets[0]['filename'], + NameConstraint(var_name=short_name)) + + +def _get_level_width(air_pressure_bounds, ref_lev, ref_zg): + """Get level widths of a single grid cell.""" + ref_lev = ref_lev.compressed() + ref_zg = ref_zg.compressed() + if len(ref_lev) < 2: + return np.full(air_pressure_bounds.shape[0], np.nan) + func = interp1d(ref_lev, ref_zg, kind='cubic', fill_value='extrapolate') + level_widths = [] + for bounds in air_pressure_bounds: + level_width = abs(func(bounds[0]) - func(bounds[1])) + level_widths.append(level_width) + return np.array(level_widths) + + +def _get_level_widths(cube, zg_cube, n_jobs=1): + """Get all level widths for whole :class:`iris.cube.Cube`.""" + logger.info("Calculating level widths from 'air_pressure' coordinate") + + # Get air_pressure bounds + (_, z_idx) = _get_z_coord(cube) + air_pressure_coord = cube.coord('air_pressure') + if air_pressure_coord.bounds is None: + raise ValueError( + f"Derived coordinate 'air_pressure' of cube " + f"{cube.summary(shorten=True)} does not have bounds") + if air_pressure_coord.shape == cube.shape: + air_pressure_bounds = air_pressure_coord.bounds + else: + air_pressure_bounds = np.expand_dims(air_pressure_coord.bounds, 0) + air_pressure_bounds = np.broadcast_to(air_pressure_bounds, + cube.shape + (2, )) + air_pressure_bounds = np.moveaxis(air_pressure_bounds, z_idx, -2) + air_pressure_shape = air_pressure_bounds.shape[:-1] + air_pressure_bounds = air_pressure_bounds.reshape(-1, cube.shape[z_idx], 2) + + # Geopotential height (pressure level -> altitude) + (z_coord_zg, z_idx_zg) = _get_z_coord(zg_cube) + ref_zg = np.moveaxis(zg_cube.data, z_idx_zg, + -1).reshape(-1, zg_cube.shape[z_idx_zg]) + mask = np.ma.getmaskarray(ref_zg) + ref_lev = np.expand_dims(z_coord_zg.points, 0) + ref_lev = np.ma.array(np.broadcast_to(ref_lev, ref_zg.shape), mask=mask) + + # Check shapes + if air_pressure_bounds.shape[0] != ref_zg.shape[0]: + raise ValueError(f"Expected identical first dimensions for cubes " + f"{cube.summary(shorten=True)} and " + f"{zg_cube.summary(shorten=True)}, got shapes " + f"{air_pressure_bounds.shape} and {ref_zg.shape}") + + # Calculate level widths in parallel + parallel = Parallel(n_jobs=n_jobs) + level_widths = parallel( + [delayed(_get_level_width)(b, l, z) for (b, l, z) in zip( + air_pressure_bounds, ref_lev, ref_zg)] + ) + level_widths = np.ma.masked_invalid(level_widths) + level_widths = level_widths.reshape(air_pressure_shape) + level_widths = np.moveaxis(level_widths, -1, z_idx) + return level_widths + + +def _get_level_width_coord(cube, zg_cube, n_jobs=1): + """Get auxiliary coordinate which describes vertical level widths [m].""" + try: + altitude_coord = cube.coord('altitude') + except iris.exceptions.CoordinateNotFoundError: + level_widths = _get_level_widths(cube, zg_cube, n_jobs=n_jobs) + else: + logger.info("Calculating level widths from 'altitude' coordinate") + if altitude_coord.bounds is None: + raise ValueError( + f"Height coordinate 'altitude' of cube " + f"{cube.summary(shorten=True)} does not have bounds") + level_widths = np.abs(altitude_coord.bounds[..., 1] - + altitude_coord.bounds[..., 0]) + if level_widths.shape != cube.shape: + level_widths = np.expand_dims(level_widths, 0) + level_widths = np.broadcast_to(level_widths, cube.shape) + + # Create coordinate + aux_coord = iris.coords.AuxCoord(level_widths, + var_name='level_width', + long_name='Width of vertical layer', + units='m') + return aux_coord + + +def _get_mean_over_subsidence(cube, wap_cube, lat_constraint=None): + """Get mean over subsidence regions.""" + if lat_constraint is not None: + cube = cube.intersection(latitude=lat_constraint, + longitude=(0.0, 360.0), + ignore_bounds=True) + wap_cube = wap_cube.intersection(latitude=lat_constraint, + longitude=(0.0, 360.0), + ignore_bounds=True) + else: + cube = cube.copy() + wap_cube = wap_cube.copy() + + # Get monthly climatologies + iris.coord_categorisation.add_month_number(cube, 'time') + iris.coord_categorisation.add_month_number(wap_cube, 'time') + cube = cube.aggregated_by('month_number', iris.analysis.MEAN) + wap_cube = wap_cube.aggregated_by('month_number', iris.analysis.MEAN) + + # Mask subsidence regions (positive wap at 500 hPa) + mask = da.where(wap_cube.core_data() > 0, False, True) + cube.data = da.ma.masked_array(cube.core_data(), mask=mask) + area_weights = iris.analysis.cartography.area_weights(cube) + cube = cube.collapsed(['latitude', 'longitude'], + iris.analysis.MEAN, + weights=area_weights) + return cube + + +def _get_seasonal_mblc_fraction(cl_cube, wap_cube, lat_constraint): + """Calculate MBLC fraction.""" + cl_cube = cl_cube.intersection(latitude=lat_constraint, + longitude=(0.0, 360.0), + ignore_bounds=True) + wap_cube = wap_cube.intersection(latitude=lat_constraint, + longitude=(0.0, 360.0), + ignore_bounds=True) + + # Calculate total cloud area fraction below 700 hPa + levs = cl_cube.coord('air_pressure').core_points() + mask = np.where(levs >= 70000, False, True) + if mask.shape != cl_cube.shape: + mask = np.broadcast_to(np.expand_dims(mask, 0), cl_cube.shape) + cl_cube.data = da.ma.masked_array(cl_cube.core_data(), mask=mask) + inv_cl_cube = cl_cube.copy(data=1.0 - cl_cube.core_data() / 100.0) + (z_coord, z_idx) = _get_z_coord(inv_cl_cube) + total_cl = (1.0 - inv_cl_cube.core_data().prod(axis=z_idx)) * 100.0 + clt_cube = inv_cl_cube.collapsed(z_coord, iris.analysis.MEAN) # dummy + clt_cube.data = total_cl + clt_cube.cell_methods = clt_cube.cell_methods[:-1] + + # TODO: Remove when bug in iris is fixed + for aux_factory in clt_cube.aux_factories: + clt_cube.remove_aux_factory(aux_factory) + + # Get mean over subsidence regions + return _get_mean_over_subsidence(clt_cube, wap_cube) + + +def _get_su_cube_dict(grouped_data, var_name, reference_datasets): + """Extract cubes for Su et al. (2014) constraint.""" + ref_data = None + + # Reference data + ref_filenames = [] + for ref_dataset_name in reference_datasets.split('|'): + if ref_dataset_name not in grouped_data: + raise ValueError( + f"Reference dataset '{ref_dataset_name}' not found for " + f"variable '{var_name}'") + cube = iris.load_cube(grouped_data[ref_dataset_name][0]['filename']) + if ref_data is None: + ref_data = np.ma.array(cube.data) + else: + ref_data = np.ma.where(np.ma.getmaskarray(ref_data), + np.ma.array(cube.data), ref_data) + ref_filenames.append(grouped_data[ref_dataset_name][0]['filename']) + ref_cube = cube.copy(ref_data) + ref_cube.attributes['dataset'] = reference_datasets + ref_cube.attributes['ancestors'] = '|'.join(sorted(ref_filenames)) + ref_cube.coord('air_pressure').attributes['positive'] = 'down' + + # All other cubes + cube_dict = {reference_datasets: ref_cube} + for (dataset_name, datasets) in grouped_data.items(): + if dataset_name in reference_datasets: + continue + cube = iris.load_cube(datasets[0]['filename']) + cube.attributes['ancestors'] = datasets[0]['filename'] + cube_dict[dataset_name] = cube + + return cube_dict + + +def _get_su_variable(grouped_data): + """Get variable and reference datasets of Su et al. (2014) constraint.""" + var_name = None + reference_datasets = None + for (dataset_name, datasets) in grouped_data.items(): + if len(datasets) != 1: + raise ValueError( + f"Expected exactly one file for dataset '{dataset_name}', got " + f"{len(datasets):d}") + new_var_name = datasets[0]['short_name'] + new_reference_datasets = datasets[0].get('reference_dataset') + if var_name is None: + var_name = new_var_name + else: + if new_var_name != var_name: + raise ValueError( + f"Expected identical 'short_name' for all datasets of Su " + f"et al. (2014) constraint, got '{var_name}' and " + f"'{new_var_name}'") + if reference_datasets is None: + reference_datasets = new_reference_datasets + else: + if new_reference_datasets != reference_datasets: + raise ValueError( + f"Expected identical 'reference_dataset' for all datasets " + f"of Su et al. (2014) constraint, got " + f"'{reference_datasets}' and '{new_reference_datasets}'") + if reference_datasets is None: + raise ValueError(f"'reference_dataset' not given for variable " + f"'{var_name}'") + logger.info( + "Found variable '%s' for Su et al. (2014) constraint", var_name) + logger.info("Found reference datasets '%s'", reference_datasets) + return (var_name, reference_datasets) + + +def _get_weighted_cloud_fractions(cl_cube, zg_cube, level_limits, n_jobs=1): + """Calculate mass-weighted cloud fraction.""" + level_width_coord = _get_level_width_coord(cl_cube, zg_cube, n_jobs=n_jobs) + cl_cube.add_aux_coord(level_width_coord, np.arange(cl_cube.ndim)) + + # Mask data appropriately + levs = cl_cube.coord('air_pressure') + cloud_fractions = [] + for limits in level_limits: + clt_cube = cl_cube.copy() + mask = np.where(levs.points <= limits[0], False, True) + mask |= np.where(levs.points >= limits[1], False, True) + if mask.shape != clt_cube.shape: + mask = np.broadcast_to(np.expand_dims(mask, 0), clt_cube.shape) + clt_cube.data = da.ma.masked_array(clt_cube.core_data(), mask=mask) + (z_coord, _) = _get_z_coord(clt_cube) + + # (Mass-weighted) vertical averaging + clt_cube = clt_cube.collapsed( + z_coord, + iris.analysis.MEAN, + weights=clt_cube.coord(var_name='level_width').points) + + # Temporal averaging + clt_cube = clt_cube.collapsed('time', iris.analysis.MEAN) + + # (Area-weighted) horizontal averaging + area_weights = iris.analysis.cartography.area_weights(clt_cube) + clt_cube = clt_cube.collapsed(['latitude', 'longitude'], + iris.analysis.MEAN, + weights=area_weights) + cloud_fractions.append(clt_cube.data) + return cloud_fractions + + +def _get_z_coord(cube): + """Get index of Z coordinate.""" + for coord in cube.coords(dim_coords=True): + if iris.util.guess_coord_axis(coord) == 'Z': + z_coord = coord + break + else: + raise ValueError(f"Cannot determine height axis (Z) of cube " + f"{cube.summary(shorten=True)}") + return (z_coord, cube.coord_dims(z_coord)[0]) + + +def _get_zhai_data_frame(datasets, lat_constraint): + """Get :class:`pandas.DataFrame` including the data for ``zhai``.""" + cl_cube = _get_cube(datasets, 'cl') + wap_cube = _get_cube(datasets, 'wap') + tos_cube = _get_cube(datasets, 'tos') + + # Add air_pressure coordinate if necessary + if not cl_cube.coords('air_pressure'): + if cl_cube.coords('altitude'): + add_plev_from_altitude(cl_cube) + else: + raise ValueError( + f"No 'air_pressure' coord available in cube " + f"{cl_cube.summary(shorten=True)}") + + # Apply common mask (only ocean) + mask_2d = da.ma.getmaskarray(tos_cube.core_data()) + mask_3d = mask_2d[:, np.newaxis, ...] + mask_3d = da.broadcast_to(mask_3d, cl_cube.shape) + wap_cube.data = da.ma.masked_array(wap_cube.core_data(), mask=mask_2d) + cl_cube.data = da.ma.masked_array(cl_cube.core_data(), mask=mask_3d) + + # Calculate SST mean and MBLC fraction + tos_cube = _get_mean_over_subsidence(tos_cube, wap_cube, lat_constraint) + mblc_cube = _get_seasonal_mblc_fraction(cl_cube, wap_cube, lat_constraint) + return pd.DataFrame( + {'tos': tos_cube.data, 'mblc_fraction': mblc_cube.data}, + index=pd.Index(np.arange(12) + 1, name='month'), + ) + + +def _pearson_correlation_coeff(x_data, y_data): + """Similarity metric using Pearson correlatin coefficient.""" + reg = linregress(x_data, y_data) + return reg.rvalue + + +def _regression_slope_metric(x_data, y_data): + """Similarity metric using the slope of a linear regression.""" + reg = linregress(x_data, y_data) + return reg.slope + + +def _similarity_metric(cube, ref_cube, metric): + """Calculate similarity metric between two cubes.""" + if metric == 'regression_slope': + metric_func = _regression_slope_metric + elif metric == 'correlation_coefficient': + metric_func = _pearson_correlation_coeff + else: + raise ValueError( + f"Expected one of 'regression_slope', 'correlation_coefficient' " + f"similarity metric for diagnostic 'su', got '{metric}'") + new_data = np.ma.array(cube.data, copy=True).ravel() + ref_data = np.ma.array(ref_cube.data, copy=True).ravel() + mask = np.ma.getmaskarray(ref_data) | np.ma.getmaskarray(new_data) + return metric_func(np.ma.array(new_data, mask=mask).compressed(), + np.ma.array(ref_data, mask=mask).compressed()) + + +def brient_shal(grouped_data, cfg): + """Brient et al. (2016) constraint.""" + diag_data = {} + + # Variable attributes + var_attrs = { + 'short_name': 'gamma', + 'long_name': 'Fraction of tropical (30°S - 30°N) low clouds with tops ' + 'below 850 hPa whose tops are also below 950 hPa (over ' + 'oceanic weak subsidence regions)', + 'units': '%', + } + attrs = { + 'plot_xlabel': r'Cloud shallowness index $\gamma$ [%]', + 'plot_title': 'Brient et al. (2016) constraint', + 'provenance_authors': ['schlund_manuel'], + 'provenance_domains': ['trop'], + 'provenance_realms': ['atmos'], + 'provenance_references': ['brient16climdyn'], + 'provenance_statistics': ['mean'], + 'provenance_themes': ['EC'], + + } + + # Calculate constraint + for (dataset_name, datasets) in grouped_data.items(): + logger.info("Processing dataset '%s'", dataset_name) + _check_variables(datasets, {'cl', 'wap', 'zg'}) + + # Load cubes + cl_cube = _get_cube(datasets, 'cl') + wap_cube = _get_cube(datasets, 'wap') + zg_cube = _get_cube(datasets, 'zg') + + # Add air_pressure coordinate if necessary + if not cl_cube.coords('air_pressure'): + if cl_cube.coords('altitude'): + add_plev_from_altitude(cl_cube) + else: + raise ValueError( + f"No 'air_pressure' coord available in cube " + f"{cl_cube.summary(shorten=True)}") + + # TODO: Remove when bug in iris is fixed + _fix_iris_bug_derived_coord(cl_cube) + + # Calculate monthly climatologies + iris.coord_categorisation.add_month_number(cl_cube, 'time') + iris.coord_categorisation.add_month_number(wap_cube, 'time') + iris.coord_categorisation.add_month_number(zg_cube, 'time') + cl_cube = cl_cube.aggregated_by('month_number', iris.analysis.MEAN) + wap_cube = wap_cube.aggregated_by('month_number', iris.analysis.MEAN) + zg_cube = zg_cube.aggregated_by('month_number', iris.analysis.MEAN) + + # Mask weak subsidence regions + wap_cube.convert_units('hPa day-1') + mask = np.where(wap_cube.data >= 10.0, False, True) + mask |= np.where(wap_cube.data <= 30.0, False, True) + cl_mask = np.broadcast_to(np.expand_dims(mask, 1), cl_cube.shape) + cl_cube.data = da.ma.masked_array(cl_cube.core_data(), mask=cl_mask) + wap_cube.data = da.ma.masked_array(wap_cube.core_data(), mask=mask) + zg_mask = np.broadcast_to(np.expand_dims(mask, 1), zg_cube.shape) + zg_cube.data = da.ma.masked_array(zg_cube.core_data(), mask=zg_mask) + + # Get mass-weighted cloud fractions + [cf_950, + cf_850] = _get_weighted_cloud_fractions(cl_cube, zg_cube, + [(100000, 90000), + (90000, 80000)], + n_jobs=cfg['n_jobs']) + diag_data[dataset_name] = 100.0 * cf_950 / (cf_950 + cf_850) + + return (diag_data, var_attrs, attrs) + + +def su(grouped_data, cfg): + """Su et al. (2014) constraint.""" + metric = cfg['metric'] + logger.info("Found metric '%s' for Su et al. (2014) constraint", metric) + + # Extract cubes + (var_name, reference_datasets) = _get_su_variable(grouped_data) + cube_dict = _get_su_cube_dict(grouped_data, var_name, reference_datasets) + diag_data = {} + ref_cube = cube_dict[reference_datasets] + + # Variable attributes + var_attrs = { + 'short_name': 'alpha' if metric == 'regression_slope' else 'rho', + 'long_name': f"Error in vertically-resolved tropospheric " + f"zonal-average {ref_cube.long_name} between 40°N and " + f"45°S expressed as {metric.replace('_', ' ')} between " + f"model data and observations", + 'units': '1', + } + attrs = { + 'plot_xlabel': f'Model performance in {ref_cube.long_name} [1]', + 'plot_title': 'Su et al. (2014) constraint', + 'provenance_authors': ['schlund_manuel'], + 'provenance_domains': ['trop', 'midlat'], + 'provenance_realms': ['atmos'], + 'provenance_references': ['su14jgr'], + 'provenance_statistics': ['corr'], + 'provenance_themes': ['EC'], + } + + # Calculate constraint + for (dataset_name, cube) in cube_dict.items(): + logger.info("Processing dataset '%s'", dataset_name) + + # Plot cube + if cube.ndim == 2: + iris.quickplot.contourf(cube) + filename = f"su_{dataset_name.replace('|', '_')}" + plot_path = get_plot_filename(filename, cfg) + plt.savefig(plot_path, **cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Provenance + ancestors = cube.attributes.pop('ancestors').split('|') + provenance_record = ec.get_provenance_record( + {'su': attrs}, ['su'], + caption=f'{cube.long_name} for {dataset_name}.', + plot_type='zonal', ancestors=ancestors) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + + # Write netCDF file + netcdf_path = get_diagnostic_filename(filename, cfg) + io.iris_save(cube, netcdf_path) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + + # Similarity metric + diag_data[dataset_name] = _similarity_metric(cube, ref_cube, metric) + + return (diag_data, var_attrs, attrs) + + +def volodin(grouped_data, _): + """Volodin (2008) constraint.""" + diag_data = {} + + # Variable attributes + var_attrs = { + 'short_name': 'clt_diff', + 'long_name': 'Difference in total cloud fraction between tropics ' + '(28°S - 28°N) and Southern midlatitudes (56°S - 36°S)', + 'units': '%', + } + attrs = { + 'plot_xlabel': 'Difference in tropical and midlatitude cloud fraction ' + '[%]', + 'plot_title': 'Volodin (2008) constraint', + 'provenance_authors': ['schlund_manuel'], + 'provenance_domains': ['trop', 'shmidlat'], + 'provenance_realms': ['atmos'], + 'provenance_references': ['volodin08izvestiya'], + 'provenance_statistics': ['diff', 'mean'], + 'provenance_themes': ['EC'], + } + + # Calculate constraint + for (dataset_name, datasets) in grouped_data.items(): + logger.info("Processing dataset '%s'", dataset_name) + _check_variables(datasets, {'clt'}) + + # Check if tropical and midlatitudes clt is present + if len(datasets) != 2: + raise ValueError( + f"Expected exactly two 'clt' datasets for dataset " + f"'{dataset_name}', got {len(datasets):d}") + for dataset in datasets: + if ('trop' in dataset['variable_group'] + or 'trop' in dataset['preprocessor']): + trop = iris.load_cube(dataset['filename']) + if trop.shape != (): + raise ValueError( + f"Expected scalar data for tropical 'clt' of dataset " + f"'{dataset_name}', got shape {trop.shape}") + break + else: + raise ValueError( + f"Expected exactly one dataset for tropical 'clt' (defined " + f"by the string 'trop' in the variable group name or the " + f"preprocessor name) for dataset '{dataset_name}', got none") + for dataset in datasets: + if ('midlat' in dataset['variable_group'] + or 'midlat' in dataset['preprocessor']): + midlat = iris.load_cube(dataset['filename']) + if midlat.shape != (): + raise ValueError( + f"Expected scalar data for Southern midlatitudes " + f"'clt' of dataset '{dataset_name}', got shape " + f"{midlat.shape}") + break + else: + raise ValueError( + f"Expected exactly one dataset for Southern midlatitudes " + f"'clt' (defined by the string 'midlat' in the variable group " + f"name or the preprocessor name) for dataset " + f"'{dataset_name}', got none") + + # Cloud fraction difference + diag_data[dataset_name] = trop.data - midlat.data + + return (diag_data, var_attrs, attrs) + + +def zhai(grouped_data, cfg): + """Zhai et al. (2015) constraint.""" + diag_data = {} + + # Variable attributes + var_attrs = { + 'short_name': 'mblc_sst_response', + 'long_name': 'Response of seasonal Marine Boundary Layer Cloud (MBLC) ' + 'fraction to change in Sea Surface Temperature (SST) ', + 'units': '% K-1', + } + attrs = { + 'plot_xlabel': r'Response of MBLC fraction to SST changes ' + r'[% K$^{-1}$]', + 'plot_title': 'Zhai et al. (2015) constraint', + 'provenance_authors': ['schlund_manuel'], + 'provenance_domains': ['trop', 'shmidlat'], + 'provenance_realms': ['atmos'], + 'provenance_references': ['zhai15grl'], + 'provenance_statistics': ['mean'], + 'provenance_themes': ['EC'], + } + + # Calculate constraint + for (dataset_name, datasets) in grouped_data.items(): + diag_data[dataset_name] = [] + logger.info("Processing dataset '%s'", dataset_name) + _check_variables(datasets, {'cl', 'wap', 'tos'}) + + # Consider both hemispheres separately + n_h = (20.0, 40.0) + s_h = (-40.0, -20.0) + for lat_constraint in (n_h, s_h): + data_frame = _get_zhai_data_frame(datasets, lat_constraint) + + # MBLC fraction response to SST changes + reg = linregress(data_frame['tos'].values, + data_frame['mblc_fraction'].values) + diag_data[dataset_name].append(reg.slope) + + # Plot regression + axes = sns.regplot(x='tos', y='mblc_fraction', data=data_frame) + axes.text( + 0.05, + 0.05, + rf"$\alpha={reg.slope:.3f}$ %/K ($R^2={reg.rvalue**2:.2f}$, " + rf"$p={reg.pvalue:.4f}$)", + transform=axes.transAxes) + if lat_constraint == n_h: + hem = 'Northern hemisphere' + filename = f'zhai_{dataset_name}_nh' + else: + hem = 'Southern hemisphere' + filename = f'zhai_{dataset_name}_sh' + plot_path = get_plot_filename(filename, cfg) + axes.set_title(f'{dataset_name} ({hem})') + plt.savefig(plot_path, **cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Provenance + provenance_record = ec.get_provenance_record( + {'zhai': attrs}, ['zhai'], + caption=f"Regression plot of 'mblc_fraction' vs 'tos' ({hem})", + plot_type='scatter', + ancestors=[d['filename'] for d in datasets]) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + + # Write netCDF file + netcdf_path = get_diagnostic_filename(filename, cfg) + cubes = iris.cube.CubeList([ + ec.pandas_object_to_cube( + data_frame['tos'], var_name='tos', + standard_name='sea_surface_temperature', units='K', + attributes={'region': hem}), + ec.pandas_object_to_cube( + data_frame['mblc_fraction'], var_name='mblc_fraction', + long_name='Marine Boundary Layer Cloud fraction', + units='%', attributes={'region': hem}), + ]) + io.iris_save(cubes, netcdf_path) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + + # Mean over both hemispheres + diag_data[dataset_name] = np.mean(diag_data[dataset_name]) + + return (diag_data, var_attrs, attrs) + + +def check_cfg(cfg): + """Check configuration :obj:`dict`.""" + necessary_options = ['diag'] + for opt in necessary_options: + if opt not in cfg: + raise KeyError(f"Necessary option '{opt}' not given") + diag = cfg['diag'] + if diag not in globals() or not isfunction(globals()[diag]): + raise ValueError( + f"Selected diagnostic '{diag}' not available, it needs to be " + f"implemented as a function of this diagnostic script") + logger.info("Calculating constraint '%s'", diag) + return diag + + +def check_input_data(input_data): + """Check input data.""" + if not input_data: + raise ValueError("No input data found") + + +def get_default_settings(cfg): + """Get default configuration settings.""" + cfg = deepcopy(cfg) + cfg.setdefault('metric', 'regression_slope') + cfg.setdefault('n_jobs', 1) + cfg.setdefault('savefig_kwargs', { + 'bbox_inches': 'tight', + 'dpi': 600, + 'orientation': 'landscape', + }) + logger.info("Using at most %i processes", cfg['n_jobs']) + return cfg + + +def get_global_attributes(input_data, cfg): + """Get attributes for psi cube for all datasets.""" + datasets = sorted(list({str(d['dataset']) for d in input_data})) + projects = sorted(list({str(d['project']) for d in input_data})) + ref = sorted(list({str(d.get('reference_dataset')) for d in input_data})) + datasets = "|".join(datasets) + projects = "|".join(projects) + ref = "|".join(ref) + attrs = { + 'dataset': datasets, + 'project': projects, + 'reference_dataset': ref, + } + attrs.update(cfg.get('output_attributes', {})) + return attrs + + +def main(cfg): + """Run the diagnostic.""" + cfg = get_default_settings(cfg) + diag = check_cfg(cfg) + sns.set_theme(**cfg.get('seaborn_settings', {})) + + # Get input data + input_data = list(cfg['input_data'].values()) + input_data.extend(io.netcdf_to_metadata(cfg, pattern=cfg.get('pattern'))) + input_data = deepcopy(input_data) + input_data = sorted_metadata(input_data, ['short_name', 'exp', 'dataset']) + check_input_data(input_data) + grouped_data = group_metadata(input_data, 'dataset') + + # Calculate X-axis of emergent constraint + diag_func = globals()[diag] + (diag_data, var_attrs, attrs) = diag_func(grouped_data, cfg) + attrs.update(get_global_attributes(input_data, cfg)) + + # Save data + netcdf_path = get_diagnostic_filename(diag, cfg) + io.save_scalar_data(diag_data, netcdf_path, var_attrs, attributes=attrs) + logger.info("Found data:\n%s", pformat(diag_data)) + + # Provenance + provenance_record = ec.get_provenance_record( + {diag: attrs}, [diag], caption=attrs['plot_xlabel'], + ancestors=[d['filename'] for d in input_data]) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/emergent_constraints/lif1f2.py b/esmvaltool/diag_scripts/emergent_constraints/lif1f2.py new file mode 100644 index 0000000000..edd7d67663 --- /dev/null +++ b/esmvaltool/diag_scripts/emergent_constraints/lif1f2.py @@ -0,0 +1,856 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Calculates emergent constraint on Indian summer monsoon. + +############################################################################### +testkw/lif1.py +Author: Katja Weigel (IUP, Uni Bremen, Germany) +EVal4CMIP project +############################################################################### + +Description +----------- + Calculates emergent constraint on Indian summer monsoon + following Li et al. (2017). + +Configuration options +--------------------- + output_name : Name of the output files. + +############################################################################### + +""" + +import logging +import os +from pprint import pformat + +import cartopy.crs as cart +import iris +import iris.coord_categorisation as cat +import matplotlib.pyplot as plt +import numpy as np +from scipy import stats + +import esmvaltool.diag_scripts.shared as e +import esmvaltool.diag_scripts.shared.names as n +from esmvaltool.diag_scripts.shared import (ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, group_metadata, + select_metadata) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def _get_sel_files(cfg, dataname, dim=2): + """Get filenames from cfg for single models or multi-model mean.""" + selection = [] + if dim == 2: + for hlp in select_metadata(cfg['input_data'].values(), + dataset=dataname): + selection.append(hlp['filename']) + else: + for hlp in cfg['input_data'].values(): + selection.append(hlp['filename']) + + return selection + + +def _get_sel_files_var(cfg, varnames): + """Get filenames from cfg for all model mean and differen variables.""" + selection = [] + + for var in varnames: + for hlp in select_metadata(cfg['input_data'].values(), short_name=var): + selection.append(hlp['filename']) + + return selection + + +def get_provenance_record(ancestor_files, caption, statistics, + domains, plot_type='geo'): + """Get Provenance record.""" + record = { + 'caption': caption, + 'statistics': statistics, + 'domains': domains, + 'plot_type': plot_type, + 'themes': ['atmDyn', 'monsoon', 'EC'], + 'authors': [ + 'weigel_katja', + ], + 'references': [ + 'li17natcc', + ], + 'ancestors': ancestor_files, + } + return record + + +def get_latlon_index(coords, lim1, lim2): + """Get index for given 1D vector, e.g. lats or lons between 2 limits.""" + index = (np.where( + np.absolute(coords - (lim2 + lim1) / 2.0) <= (lim2 - lim1) / 2.0))[0] + return index + + +def cube_to_save_ploted(var, lats, lons, names): + """Create cube to prepare plotted data for saving to netCDF.""" + new_cube = iris.cube.Cube(var, var_name=names['var_name'], + long_name=names['long_name'], + units=names['units']) + new_cube.add_dim_coord(iris.coords.DimCoord(lats, + var_name='lat', + long_name='latitude', + units='degrees_north'), 0) + new_cube.add_dim_coord(iris.coords.DimCoord(lons, + var_name='lon', + long_name='longitude', + units='degrees_east'), 1) + + return new_cube + + +def cube_to_save_scatter(var1, var2, names): + """Create cubes to prepare scatter plot data for saving to netCDF.""" + cubes = iris.cube.CubeList([iris.cube.Cube(var1, + var_name=names['var_name1'], + long_name=names['long_name1'], + units=names['units1'])]) + cubes.append(iris.cube.Cube(var2, var_name=names['var_name2'], + long_name=names['long_name2'], + units=names['units2'])) + + return cubes + + +def plot_rain_and_wind(cfg, dataname, data, future_exp): + """Plot contour map.""" + plotdata = {} + if data['ar_diff_rain'].ndim == 3: + plotdata['pr'] = np.mean(data['ar_diff_rain'], axis=2) + plotdata['ua'] = np.mean(data['ar_diff_ua'], axis=2) + plotdata['va'] = np.mean(data['ar_diff_va'], axis=2) + else: + plotdata['pr'] = data['ar_diff_rain'] + plotdata['ua'] = data['ar_diff_ua'] + plotdata['va'] = data['ar_diff_va'] + + # Plot data + # create figure and axes instances + subplot_kw = {'projection': cart.PlateCarree()} + fig, axx = plt.subplots(figsize=(8, 5), subplot_kw=subplot_kw) + axx.set_extent([45, 120, -15, 30], cart.PlateCarree()) + + # draw filled contours + cnplot = plt.contourf( + data['lons'], + data['lats'], + plotdata['pr'], + np.linspace(-0.75, 0.75, 11), + transform=cart.PlateCarree(), + cmap='BrBG', + # cmap='RdBu_r', + extend='both') + + if data['ar_diff_rain'].ndim == 3: + plt.contour(data['lons'], + data['lats'], + np.std(data['ar_diff_rain'], axis=2), [0.2, 0.4, 0.6], + transform=cart.PlateCarree(), + colors='w') + axx.coastlines() + axx.quiver( + data['lons'][::2], + data['lats'][::2], + plotdata['ua'][::2, ::2], + plotdata['va'][::2, ::2], + scale=5.0, + color='indigo', + transform=cart.PlateCarree()) + + # add colorbar + cbar = fig.colorbar(cnplot, ax=axx, shrink=0.8) + # add colorbar title string + cbar.set_label(r'Rainfall change, mm d$^{-1}$') + + axx.set_xlabel('Longitude') + axx.set_ylabel('Latitude') + axx.set_title(dataname + ' changes') + axx.set_xticks([40, 65, 90, 115]) + axx.set_xticklabels(['40°E', '65°E', '90°E', '115°E']) + axx.set_yticks([-10, 0, 10, 20, 30]) + axx.set_yticklabels(['10°S', '0°', '10°N', '20°N', '30°N']) + + fig.tight_layout() + fig.savefig(get_plot_filename(dataname + '_li17natcc_fig1a', cfg), + dpi=300) + plt.close() + + if data['ar_diff_rain'].ndim == 2: + caption = dataname + ': Changes in precipitation (colour shade, ' + \ + 'mm d-1) ' + \ + 'and 850-hPa wind (m s-1 scaled with 0.5) during ' + \ + 'the Indian summer monsoon season (May to September) from ' + \ + str((select_metadata(cfg['input_data'].values(), dataset=dataname, + exp='historical'))[0]['start_year']) + '-' + \ + str((select_metadata(cfg['input_data'].values(), dataset=dataname, + exp='historical'))[0]['end_year']) + \ + ' to ' + \ + str((select_metadata(cfg['input_data'].values(), dataset=dataname, + exp=future_exp))[0]['start_year']) + '-' + \ + str((select_metadata(cfg['input_data'].values(), dataset=dataname, + exp=future_exp))[0]['end_year']) + \ + ' projected ' + \ + 'under the ' + future_exp + ' scenario. All climatology ' + \ + 'changes are normalized by the corresponding global mean ' + \ + 'SST increase for each model.' + else: + caption = dataname + ': Changes in precipitation (colour shade, ' + \ + 'mm d-1) and 850-hPa wind' + \ + ' (m s-1 scaled with 0.5) ' + \ + 'during the Indian summer monsoon season (May to September)' + \ + 'under the ' + future_exp + ' scenario. All climatology ' + \ + 'changes are normalized by the corresponding global mean ' + \ + 'SST increase for each model. ' + \ + 'The white contours display the inter-model ' + \ + 'standard deviations of precipitation changes.' + + provenance_record = get_provenance_record( + _get_sel_files(cfg, dataname, dim=data['ar_diff_rain'].ndim), + caption, ['diff'], ['reg']) + + diagnostic_file = get_diagnostic_filename(dataname + '_rain_wind_change', + cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + cubelist = iris.cube.CubeList([cube_to_save_ploted(plotdata['pr'], + data['lats'], + data['lons'], + {'var_name': 'd_pr', + 'long_name': 'Prec' + + 'ipita' + + 'tion ' + + 'Change', + 'units': 'mm d-1'})]) + + if data['ar_diff_rain'].ndim == 3: + cubelist.append(cube_to_save_ploted(np.std(data['ar_diff_rain'], + axis=2), + data['lats'], + data['lons'], + {'var_name': 'std_pr', + 'long_name': 'Standard ' + + 'Deviation ' + + 'of the Prec' + + 'ipitation ', + 'units': 'mm d-1'})) + + cubelist.append(cube_to_save_ploted(plotdata['ua'][::2, ::2], + data['lats'][::2], + data['lons'][::2], + {'var_name': 'd_ua', + 'long_name': 'Eastward Wind Change', + 'units': 'm s-1'})) + + cubelist.append(cube_to_save_ploted(plotdata['va'][::2, ::2], + data['lats'][::2], + data['lons'][::2], + {'var_name': 'd_va', + 'long_name': 'Northward Wind Change', + 'units': 'm s-1'})) + + iris.save(cubelist, target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(get_plot_filename(dataname + + '_li17natcc_fig1a', + cfg), + provenance_record) + provenance_logger.log(diagnostic_file, provenance_record) + + +def plot_rain(cfg, titlestr, data, lats, lons): + """Plot contour map.""" + # Plot data + # create figure and axes instances + subplot_kw = {'projection': cart.PlateCarree()} + fig, axx = plt.subplots(figsize=(7, 5), subplot_kw=subplot_kw) + axx.set_extent([45, 120, -15, 35], cart.PlateCarree()) + + # draw filled contours + cnplot = plt.contourf( + lons, + lats, + data, + np.linspace(-0.75, 0.75, 11), + transform=cart.PlateCarree(), + cmap='BrBG', + # cmap='RdBu_r', + extend='both') + + axx.coastlines() + # ISM (60◦ –95◦ E, 10◦ –30◦ N) + axx.plot( + [60, 95, 95, 60, 60], [10, 10, 30, 30, 10], + transform=cart.PlateCarree(), + color='k') + + # add colorbar + cbar = fig.colorbar(cnplot, ax=axx, shrink=0.8) + # add colorbar title string + cbar.set_label(r'Rainfall change, mm d$^{-1}$') + + axx.set_xlabel('Longitude') + axx.set_ylabel('Latitude') + axx.set_title(titlestr) + axx.set_xticks([40, 65, 90, 115]) + axx.set_xticklabels(['40°E', '65°E', '90°E', '115°E']) + axx.set_yticks([-10, 0, 10, 20, 30]) + axx.set_yticklabels(['10°S', '0°', '10°N', '20°N', '30°N']) + + fig.tight_layout() + if titlestr == 'Multi-model mean rainfall change due to model error': + figname = 'li17natcc_fig2c' + else: + figname = 'li17natcc_fig2d' + + fig.savefig(get_plot_filename(figname, cfg), dpi=300) + plt.close() + + titlestr = titlestr + ' Box displays the area used to define the ' + \ + 'average ISM (Indian Summer Monsoon) rainfall. Precipitation ' + \ + 'changes are normalized by the corresponding global ' + \ + 'mean SST increase for each model.' + + selection = _get_sel_files_var(cfg, ['pr', 'ts']) + + provenance_record = get_provenance_record(selection, + titlestr, ['diff'], ['reg']) + + diagnostic_file = get_diagnostic_filename(figname, cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + iris.save(cube_to_save_ploted(data, lats, lons, {'var_name': 'd_pr', + 'long_name': 'Prec' + + 'ipita' + + 'tion ' + + 'Change', + 'units': 'mm d-1'}), + target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(get_plot_filename(figname, cfg), + provenance_record) + provenance_logger.log(diagnostic_file, provenance_record) + + +def plot_2dcorrelation_li(cfg, reg2d, lats, lons): + """Plot contour map.""" + # Set mask for pvalue > 0.005 + mask = reg2d[:, :, 1] > 0.05 + zzz = np.ma.array(reg2d[:, :, 0], mask=mask) + + # Plot data + # create figure and axes instances + subplot_kw = {'projection': cart.PlateCarree(central_longitude=180)} + fig, axx = plt.subplots(figsize=(8, 4), subplot_kw=subplot_kw) + axx.set_extent( + [-150, 60, -15, 30], cart.PlateCarree(central_longitude=180)) + + # draw filled contours + cnplot = plt.contourf( + lons, + lats, + zzz, [-0.7, -0.6, -0.5, -0.4, 0.4, 0.5, 0.6, 0.7], + transform=cart.PlateCarree(), + cmap='BrBG', + # cmap='RdBu_r', + extend='both', + corner_mask=False) + + axx.coastlines() + # Western pacific (140◦ E–170◦ W, 12◦ S–12◦ N) + axx.plot( + [-40, 10, 10, -40, -40], [-12, -12, 12, 12, -12], + transform=cart.PlateCarree(central_longitude=180), + color='k') + # Indian Ocean (SEIO; 70◦ –100◦ E, 8◦ S–2◦ N) + axx.plot( + [70, 100, 100, 70, 70], [-8, -8, 2, 2, -8], + transform=cart.PlateCarree(), + color='k') + + # add colorbar + cbar = fig.colorbar(cnplot, ax=axx, shrink=0.6, orientation='horizontal') + # add colorbar title string + cbar.set_label('Correlation') + + axx.set_xlabel('Longitude') + axx.set_ylabel('Latitude') + axx.set_title('Inter-model relationship between ISM rainfall changes' + + ' and mean precip.') + axx.set_xticks(np.linspace(-150, 60, 8)) + axx.set_xticklabels( + ['30°E', '60°E', '90°E', '120°E', '150°E', '180°E', '150°W', '120°W']) + axx.set_yticks([-15, 0, 15, 30]) + axx.set_yticklabels(['15°S', '0°', '15°N', '30°N']) + + fig.tight_layout() + fig.savefig(get_plot_filename('li17natcc_fig1b', cfg), dpi=300) + plt.close() + + caption = 'Inter-model relationship between ISM ' + \ + '(Indian Summer Monsoon) region (60◦ –95◦ E, 10◦ –30◦ N) ' + \ + 'rainfall changes and in simulated present-day precipitation ' + \ + 'over the Indo-Pacific. Solid boxes denote the tropical ' + \ + 'western Pacific (140◦ E–170◦ W, 12◦ S–12◦ N) and southeastern ' + \ + 'Indian Ocean (SEIO; 70◦ –100◦ E, 8◦ S–2◦ N) and colour shading ' + \ + 'indicates regions of significance at the 95% level according to ' + \ + 't-test. All the precipitation changes are normalized by the ' + \ + 'corresponding global mean SST increase for each model' + + provenance_record = get_provenance_record(_get_sel_files_var(cfg, + ['pr', + 'ts']), + caption, ['corr'], ['reg']) + + diagnostic_file = get_diagnostic_filename('li17natcc_fig1b', cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + iris.save(cube_to_save_ploted(zzz, lats, lons, {'var_name': 'corr', + 'long_name': 'Correlation', + 'units': '1'}), + target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(get_plot_filename('li17natcc_fig1b', cfg), + provenance_record) + provenance_logger.log(diagnostic_file, provenance_record) + + +def plot_reg_li(cfg, data_ar, future_exp): + """Plot scatter plot and regression.""" + # data_ar {"datasets": datasets, "ar_diff_rain": ar_diff_rain, + # "ar_diff_ua": ar_diff_ua, "ar_diff_va": ar_diff_va, + # "ar_hist_rain": ar_hist_rain, "mism_diff_rain": mism_diff_rain, + # "mwp_hist_rain": mwp_hist_rain} + reg = stats.linregress(data_ar["mwp_hist_rain"], data_ar["mism_diff_rain"]) + y_reg = reg.slope * np.linspace(5.5, 8.5, 2) + reg.intercept + + fig, axx = plt.subplots(figsize=(7, 7)) + + axx.plot(np.linspace(5.5, 8.8, 2), y_reg, color='k') + + for iii, model in enumerate(data_ar["datasets"]): + proj = (select_metadata(cfg['input_data'].values(), + dataset=model))[0]['project'] + style = e.plot.get_dataset_style(model, style_file=proj.lower()) + axx.plot( + data_ar["mwp_hist_rain"][iii], + data_ar["mism_diff_rain"][iii], + marker=style['mark'], + color=style['color'], + markerfacecolor=style['facecolor'], + linestyle='none', + markersize=10, + markeredgewidth=2.0, + label=model) + + axx.set_xlim([5.5, 8.8]) + axx.set_ylim([-0.01, 0.55]) + axx.text(8.1, 0.01, 'r = {:.2f}'.format(reg.rvalue)) + axx.set_xticks(np.linspace(6, 8, 3)) + axx.set_yticks(np.linspace(0.0, 0.5, 6)) + axx.vlines(6, 0, 0.5, colors='r', linestyle='solid') + axx.set_xlabel('Western Pacific precip.') + axx.set_ylabel('ISM rainfall change') + axx.legend(ncol=2, loc=0, framealpha=1) + + fig.tight_layout() + fig.savefig(get_plot_filename('li17natcc_fig2a', cfg), dpi=300) + plt.close() + + caption = ' Scatter plot of the simulated tropical western Pacific ' + \ + 'precipitation (mm d−1 ) versus projected average ISM ' + \ + '(Indian Summer Monsoon) rainfall changes under the ' + future_exp + \ + ' scenario. The red line denotes the observed present-day ' + \ + 'western Pacific precipitation and the inter-model ' + \ + 'correlation (r) is shown.' + + provenance_record = get_provenance_record(_get_sel_files_var(cfg, + ['pr', 'ts']), + caption, ['corr'], ['reg'], + plot_type='scatter') + + diagnostic_file = get_diagnostic_filename('li17natcc_fig2a', cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + iris.save(cube_to_save_scatter(data_ar["mwp_hist_rain"], + data_ar["mism_diff_rain"], + {'var_name1': 'm_pr', + 'long_name1': 'Mean Precipitation', + 'units1': 'mm d-1', + 'var_name2': 'd_pr', + 'long_name2': 'Precipitation Change', + 'units2': 'mm d-1'}), + + target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(get_plot_filename('li17natcc_fig2a', cfg), + provenance_record) + provenance_logger.log(diagnostic_file, provenance_record) + + +def plot_reg_li2(cfg, datasets, mdiff_ism, mdiff_ism_cor, hist_ism): + """Plot scatter plot and regression.""" + fig, axx = plt.subplots(figsize=(7, 7)) + + axx.plot(np.linspace(-2, 21, 2), 0.5 * np.linspace(-2, 21, 2), color='k') + + axx.plot( + np.mean((mdiff_ism / hist_ism) * 100.0), + np.mean((mdiff_ism_cor / hist_ism) * 100.0), + color='k', + marker='v', + linestyle='none', + markersize=12, + markeredgewidth=3.0, + label='multi-model mean') + + for iii, model in enumerate(datasets): + + proj = (select_metadata(cfg['input_data'].values(), + dataset=model))[0]['project'] + style = e.plot.get_dataset_style(model, style_file=proj.lower()) + axx.plot( + mdiff_ism[iii] / hist_ism[iii] * 100.0, + mdiff_ism_cor[iii] / hist_ism[iii] * 100.0, + marker=style['mark'], + color=style['color'], + markerfacecolor=style['facecolor'], + linestyle='none', + markersize=10, + markeredgewidth=2.0, + label=model) + + axx.errorbar( + np.mean((mdiff_ism / hist_ism) * 100.0), + np.mean((mdiff_ism_cor / hist_ism) * 100.0), + xerr=np.std((mdiff_ism / hist_ism) * 100.0), + yerr=np.std((mdiff_ism_cor / hist_ism) * 100.0), + color='k', + marker='v', + linestyle='-', + markersize=10, + markeredgewidth=3.0, + capsize=10) + + axx.set_xlim([-2, 21]) + axx.set_ylim([-2, 21]) + axx.text( + 15, + 7.1, + 'y = {:.1f}x'.format(0.5), + rotation=np.rad2deg(np.arctan(0.5)), + horizontalalignment='center', + verticalalignment='center') + axx.set_xticks(np.linspace(0, 20, 5)) + axx.set_yticks(np.linspace(0, 20, 5)) + axx.vlines(0, -2, 21, colors='k', linestyle='solid') + axx.hlines(0, -2, 21, colors='k', linestyle='solid') + axx.set_xlabel('Uncorrected ISM rainfall change ratio') + axx.set_ylabel('Corrected ISM rainfall change ratio (% per °C)') + axx.legend(ncol=2, loc=2, framealpha=1) + + fig.tight_layout() + fig.savefig(get_plot_filename('li17natcc_fig2b', cfg), dpi=300) + plt.close() + + caption = ' Scatter plot of the uncorrected versus corrected average ' + \ + 'ISM (Indian Summer Monsoon) rainfall change ratios (% per degree ' + \ + 'Celsius of global SST warming). The error bars for the ' + \ + 'Multi-model mean indicate the standard deviation spread among ' + \ + 'models and the 2:1 line (y = 0.5x) is used to illustrate the ' + \ + 'Multi-model mean reduction in projected rainfall increase.' + + provenance_record = get_provenance_record(_get_sel_files_var(cfg, + ['pr', 'ts']), + caption, ['corr'], ['reg'], + plot_type='scatter') + + diagnostic_file = get_diagnostic_filename('li17natcc_fig2b', cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + + iris.save(cube_to_save_scatter(np.mean((mdiff_ism / hist_ism) * 100.0), + np.mean((mdiff_ism_cor / hist_ism) * 100.0), + {'var_name1': 'rd_pr', + 'long_name1': 'Relative Precipitation ' + + 'Change', + 'units1': 'percent K-1', + 'var_name2': 'corr_pr', + 'long_name2': 'Precipitation Correction', + 'units2': 'percent K-1'}), + + target=diagnostic_file) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(get_plot_filename('li17natcc_fig2b', cfg), + provenance_record) + provenance_logger.log(diagnostic_file, provenance_record) + + +############################################################################### +# Setup diagnostic +############################################################################### + + +def get_reg_2d_li(mism_diff_rain, ar_hist_rain, lats, lons): + """Linear regression of 1D and 2D array, returns 2D array of p and r.""" + reg2d = np.zeros((len(lats), len(lons), 4)) + for iii in range(len(lats)): + for jjj in range(len(lons)): + reg = stats.linregress(mism_diff_rain, ar_hist_rain[iii, jjj, :]) + reg2d[iii, jjj, 0] = reg.rvalue + reg2d[iii, jjj, 1] = reg.pvalue + reg2d[iii, jjj, 2] = reg.slope + reg2d[iii, jjj, 3] = reg.intercept + + return reg2d + + +def substract_li(cfg, data, lats, lons, future_exp): + """Difference between historical and future fields.""" + pathlist = data.get_path_list(short_name='pr', exp='historical') + + ar_diff_rain = np.zeros((len(lats), len(lons), len(pathlist))) + mism_diff_rain = np.zeros(len(pathlist)) + mwp_hist_rain = np.zeros(len(pathlist)) + ar_hist_rain = np.zeros((len(lats), len(lons), len(pathlist))) + ar_diff_ua = np.zeros((len(lats), len(lons), len(pathlist))) + ar_diff_va = np.zeros((len(lats), len(lons), len(pathlist))) + datasets = [] + for iii, dataset_path in enumerate(pathlist): + + # Substract historical experiment from rcp85 experiment + datasets.append(data.get_info(n.DATASET, dataset_path)) + ar_diff_rain[:, :, iii] = (data.get_data(short_name='pr', + exp=future_exp, + dataset=datasets[iii]) - + data.get_data(short_name='pr', + exp='historical', + dataset=datasets[iii])) / \ + (data.get_data(short_name='ts', + exp=future_exp, dataset=datasets[iii]) - + data.get_data(short_name='ts', + exp='historical', dataset=datasets[iii])) + # ISM (60◦ –95◦ E, 10◦ –30◦ N) + mism_diff_rain[iii] = \ + np.mean((ar_diff_rain[:, + get_latlon_index(lons, 60, 95), + iii])[get_latlon_index(lats, 10, 30), :]) + ar_hist_rain[:, :, iii] = data.get_data( + short_name='pr', exp='historical', dataset=datasets[iii]) + # Western pacific (140◦ E–170◦ W, 12◦ S–12◦ N) + mwp_hist_rain[iii] = \ + np.mean((ar_hist_rain[:, + get_latlon_index(lons, 140, 170), + iii])[get_latlon_index(lats, -12, 12), :]) + + ar_diff_ua[:, :, iii] = (data.get_data(short_name='ua', + exp=future_exp, + dataset=datasets[iii]) - + data.get_data(short_name='ua', + exp='historical', + dataset=datasets[iii])) / \ + (data.get_data(short_name='ts', + exp=future_exp, dataset=datasets[iii]) - + data.get_data(short_name='ts', + exp='historical', dataset=datasets[iii])) + + ar_diff_va[:, :, iii] = (data.get_data(short_name='va', + exp=future_exp, + dataset=datasets[iii]) - + data.get_data(short_name='va', + exp='historical', + dataset=datasets[iii])) / \ + (data.get_data(short_name='ts', + exp=future_exp, dataset=datasets[iii]) - + data.get_data(short_name='ts', + exp='historical', dataset=datasets[iii])) + + plot_rain_and_wind(cfg, datasets[iii], + {'ar_diff_rain': ar_diff_rain[:, :, iii], + 'ar_diff_ua': ar_diff_ua[:, :, iii], + 'ar_diff_va': ar_diff_va[:, :, iii], + 'lats': lats, 'lons': lons}, future_exp) + + return { + "datasets": datasets, + "ar_diff_rain": ar_diff_rain, + "ar_diff_ua": ar_diff_ua, + "ar_diff_va": ar_diff_va, + "ar_hist_rain": ar_hist_rain, + "mism_diff_rain": mism_diff_rain, + "mwp_hist_rain": mwp_hist_rain + } + + +def correct_li(data, lats, lons, reg): + """Correction of mean western pacific rain to measured value (6 mm d−1).""" + # Prec bias for each model + mwp_hist_cor = data["mwp_hist_rain"] - 6.0 + + proj_err = np.zeros((len(lats), len(lons), len(data["datasets"]))) + ar_diff_cor = np.zeros((len(lats), len(lons), len(data["datasets"]))) + mism_hist_rain = np.zeros((len(data["datasets"]))) + mism_diff_cor = np.zeros((len(data["datasets"]))) + + for iii in range(0, len(data["datasets"])): + + # Errors of climate projection + proj_err[:, :, iii] = mwp_hist_cor[iii] * reg[:, :, 2] + + # Correction for prec difference + ar_diff_cor[:, :, iii] = data["ar_diff_rain"][:, :, iii] - \ + proj_err[:, :, iii] + mism_hist_rain[iii] = \ + np.mean((data["ar_hist_rain"][:, + get_latlon_index(lons, 60, 95), + iii])[get_latlon_index(lats, + 10, 30), + :]) + mism_diff_cor[iii] = \ + np.mean((ar_diff_cor[:, + get_latlon_index(lons, 60, 95), + iii])[get_latlon_index(lats, 10, 30), :]) + + return { + "datasets": data["datasets"], + "ar_diff_cor": ar_diff_cor, + "proj_err": proj_err, + "mism_diff_cor": mism_diff_cor, + "mism_hist_rain": mism_hist_rain, + "mwp_hist_cor": mwp_hist_cor + } + + +def main(cfg): + """Run the diagnostic.""" + ########################################################################### + # Read recipe data + ########################################################################### + + # Dataset data containers + data = e.Datasets(cfg) + logging.debug("Found datasets in recipe:\n%s", data) + + # Variables + var = e.Variables(cfg) + logging.debug("Found variables in recipe:\n%s", var) + + # Check for tas and rlnst + if not var.vars_available('pr', 'ua', 'va', 'ts'): + raise ValueError("This diagnostic needs 'pr', 'ua', " + + " 'va', and 'ts'") + + available_exp = list(group_metadata(cfg['input_data'].values(), 'exp')) + + if 'historical' not in available_exp: + raise ValueError("The diagnostic needs an historical experiment " + + " and one other experiment.") + + if len(available_exp) != 2: + raise ValueError("The diagnostic needs an two model experiments: " + + " onehistorical and one other one.") + + available_exp.remove('historical') + future_exp = available_exp[0] + ########################################################################### + # Read data + ########################################################################### + + # Create iris cube for each dataset and save annual means + for dataset_path in data: + cube = iris.load(dataset_path)[0] + cat.add_month_number(cube, 'time', name='month_number') + # MJJAS mean (monsoon season) + cube = cube[np.where( + np.absolute(cube.coord('month_number').points - 7) <= 2)] + cube = cube.collapsed('time', iris.analysis.MEAN) + + short_name = data.get_info(n.SHORT_NAME, dataset_path) + if short_name == 'pr': + # convert from kg m-2 s-1 to mm d-1 + # cube.convert_units('mm d-1') doesn't work. + cube.data = cube.data * (60.0 * 60.0 * 24.0) + cube.units = 'mm d-1' + # Possible because all data must be interpolated to the same grid. + if 'lats' not in locals(): + lats = cube.coord('latitude').points + lons = cube.coord('longitude').points + + data.set_data(cube.data, dataset_path) + ########################################################################### + # Process data + ########################################################################### + + data_ar = substract_li(cfg, data, lats, lons, future_exp) + + # data_ar {"datasets": datasets, "ar_diff_rain": ar_diff_rain, + # "ar_diff_ua": ar_diff_ua, "ar_diff_va": ar_diff_va, + # "ar_hist_rain": ar_hist_rain, "mism_diff_rain": mism_diff_rain, + # "mwp_hist_rain": mwp_hist_rain} + + plot_rain_and_wind(cfg, 'Multi-model_mean', + {'ar_diff_rain': data_ar["ar_diff_rain"], + 'ar_diff_ua': data_ar["ar_diff_ua"], + 'ar_diff_va': data_ar["ar_diff_va"], + 'lats': lats, 'lons': lons}, future_exp) + + # Regression between mean ISM rain difference and historical rain + reg2d = get_reg_2d_li(data_ar["mism_diff_rain"], data_ar["ar_hist_rain"], + lats, lons) + + plot_2dcorrelation_li(cfg, reg2d, lats, lons) + + plot_reg_li(cfg, data_ar, future_exp) + + # Regression between mean WP rain and rain difference for each location + reg2d_wp = get_reg_2d_li(data_ar["mwp_hist_rain"], data_ar["ar_diff_rain"], + lats, lons) + + data_ar2 = correct_li(data_ar, lats, lons, reg2d_wp) + # return {"datasets": data["datasets"], "ar_diff_cor": ar_diff_cor, + # "proj_err": proj_err, "mism_diff_cor": mism_diff_cor, + # "mism_hist_rain": mism_hist_rain, "mwp_hist_cor": mwp_hist_cor} + + plot_reg_li2(cfg, data_ar["datasets"], data_ar["mism_diff_rain"], + data_ar2["mism_diff_cor"], data_ar2["mism_hist_rain"]) + + plot_rain(cfg, 'Multi-model mean rainfall change due to model error', + np.mean(data_ar2["proj_err"], axis=2), lats, lons) + plot_rain(cfg, 'Corrected multi-model mean rainfall change', + np.mean(data_ar2["ar_diff_cor"], axis=2), lats, lons) + + +if __name__ == '__main__': + with e.run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/emergent_constraints/multiple_constraints.py b/esmvaltool/diag_scripts/emergent_constraints/multiple_constraints.py new file mode 100644 index 0000000000..bdf621a697 --- /dev/null +++ b/esmvaltool/diag_scripts/emergent_constraints/multiple_constraints.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Diagnostic script to evaluate multiple emergent constraints simultaneously. + +Description +----------- +Establish multiple emergent constraints for arbitrary input variables and an +arbitrary target variable. All input datasets need to be one-dimensional and +must include a coordinate ``'dataset'`` or ``'model'`` (thus, the data +describes a single scalar value for each dataset). All input datasets must be +marked with a ``var_type`` (either ``feature``, ``label``, ``prediction_input`` +or ``prediction_input_error``) and a ``tag``, which describes the type of data. +This diagnostic supports only a single ``tag`` for ``label`` and an arbitrary +number of ``tag`` s for ``feature``. For every ``tag``, a +``'reference_dataset'`` can be specified, which will be automatically +considered as ``prediction_input``. If ``reference_dataset`` contains ``'|'`` +(e.g. ``'OBS1|OBS2'``), multiple datasets are considered as +``prediction_input`` (in this case ``'OBS1'`` and ``'OBS2'``). + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +additional_data: list of dict, optional + Additional datasets given as list of metadata. +all_data_label: str, optional (default: 'all') + Label used in plots when all input data is considered. Only relevant if + ``group_by`` is not used. +combine_groups: bool, optional (default: False) + Add results to plots for data generated by combining the data of all + individual groups. +confidence_level: float, optional (default: 0.66) + Confidence level for estimation of target variable. +group_by: str, optional + Group input data by an attribute (e.g. produces separate plots for the + individual groups, etc.). +ignore_patterns: list of str, optional + Patterns matched against ancestor files. Those files are ignored. +merge_identical_pred_input: bool, optional (default: True) + Use identical prediction_input values as single value. +numbers_as_markers: bool, optional (default: False) + Use numbers as markers in scatterplots. +patterns: list of str, optional + Patterns matched against ancestor files. +plot_regression_line_mean: bool, optional (default: False) + Plot means of regression lines in scatterplots. +read_external_file: str, optional + Read input datasets from external file given as absolute path or relative + path. In the latter case, ``'auxiliary_data_dir'`` from the user + configuration file is used as base directory. +savefig_kwargs: dict + Keyword arguments for :func:`matplotlib.pyplot.savefig`. +seaborn_settings: dict + Options for :func:`seaborn.set_theme` (affects all plots). + +""" + +import logging +import os +from copy import deepcopy + +import pandas as pd +import seaborn as sns + +import esmvaltool.diag_scripts.emergent_constraints as ec +from esmvaltool.diag_scripts.shared import run_diagnostic + +logger = logging.getLogger(os.path.basename(__file__)) + + +def get_default_settings(cfg): + """Get default configuration settings.""" + cfg = deepcopy(cfg) + cfg.setdefault('all_data_label', 'all') + cfg.setdefault('combine_groups', False) + cfg.setdefault('confidence_level', 0.66) + cfg.setdefault('merge_identical_pred_input', True) + cfg.setdefault('patterns', []) + cfg.setdefault('savefig_kwargs', { + 'bbox_inches': 'tight', + 'dpi': 600, + 'orientation': 'landscape', + }) + cfg.setdefault('seaborn_settings', {}) + return cfg + + +def main(cfg): + """Run the diagnostic.""" + cfg = get_default_settings(cfg) + sns.set_theme(**cfg['seaborn_settings']) + + # Load data and perform PCA + (training_data, prediction_data, attributes) = ec.get_input_data(cfg) + training_data_no_nans = training_data.dropna() + + # Plots + with pd.option_context(*ec.PANDAS_PRINT_OPTIONS): + logger.info( + "Correlation of training data (considering all available data):\n" + "%s", training_data.corr()) + logger.info( + "Correlation of training data (considering only climate models " + "where data for all constraints is available):\n%s", + training_data_no_nans.corr()) + ec.plot_individual_scatterplots(training_data, + prediction_data, + attributes, + 'training_data', + cfg) + ec.plot_merged_scatterplots(training_data, prediction_data, attributes, + 'training_data', cfg) + ec.plot_target_distributions(training_data, prediction_data, attributes, + 'training_data', cfg) + + # Export CSV + ec.export_csv(training_data, attributes, 'training_data', cfg) + ec.export_csv(training_data_no_nans, attributes, 'training_data_no_nans', + cfg) + ec.export_csv(prediction_data, attributes, 'prediction_data', cfg) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/emergent_constraints/single_constraint.py b/esmvaltool/diag_scripts/emergent_constraints/single_constraint.py new file mode 100644 index 0000000000..9a95adb321 --- /dev/null +++ b/esmvaltool/diag_scripts/emergent_constraints/single_constraint.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Diagnostic script to evaluate a single emergent constraint. + +Description +----------- +Establish a single emergent constraint for an arbitrary input variable and an +arbitrary target variable. All input datasets need to be one-dimensional and +must include a coordinate ``'dataset'`` or ``'model'`` (thus, the data +describes a single scalar value for each dataset). All input datasets must be +marked with a ``var_type`` (either ``feature``, ``label``, ``prediction_input`` +or ``prediction_input_error``) and a ``tag``, which describes the type of data. +This diagnostic supports only a single ``tag`` for ``label`` and ``feature``. +For every ``tag``, a ``'reference_dataset'`` can be specified, which will be +automatically considered as ``prediction_input``. If ``reference_dataset`` +contains ``'|'`` (e.g. ``'OBS1|OBS2'``), multiple datasets are considered as +``prediction_input`` (in this case ``'OBS1'`` and ``'OBS2'``). + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +additional_data: list of dict, optional + Additional datasets given as list of metadata. +all_data_label: str, optional (default: 'all') + Label used in plots when all input data is considered. Only relevant if + ``group_by`` is not used. +combine_groups: bool, optional (default: False) + Add results to plots for data generated by combining the data of all + individual groups. +confidence_level: float, optional (default: 0.66) + Confidence level for estimation of target variable. +group_by: str, optional + Group input data by an attribute (e.g. produces separate plots for the + individual groups, etc.). +ignore_patterns: list of str, optional + Patterns matched against ancestor files. Those files are ignored. +merge_identical_pred_input: bool, optional (default: True) + Use identical prediction_input values as single value. +numbers_as_markers: bool, optional (default: False) + Use numbers as markers in scatterplots. +patterns: list of str, optional + Patterns matched against ancestor files. +plot_regression_line_mean: bool, optional (default: False) + Plot means of regression lines in scatterplots. +read_external_file: str, optional + Read input datasets from external file given as absolute path or relative + path. In the latter case, ``'auxiliary_data_dir'`` from the user + configuration file is used as base directory. +savefig_kwargs: dict + Keyword arguments for :func:`matplotlib.pyplot.savefig`. +seaborn_settings: dict + Options for :func:`seaborn.set_theme` (affects all plots). + +""" + +import logging +import os +from copy import deepcopy + +import pandas as pd +import seaborn as sns + +import esmvaltool.diag_scripts.emergent_constraints as ec +from esmvaltool.diag_scripts.shared import run_diagnostic + +logger = logging.getLogger(os.path.basename(__file__)) + + +def check_training_data(training_data): + """Check training data.""" + features = training_data.x + if len(features.columns) != 1: + raise ValueError( + f"Expected exactly 1 'feature' variable, got " + f"{len(features.columns):d}") + + +def get_default_settings(cfg): + """Get default configuration settings.""" + cfg = deepcopy(cfg) + cfg.setdefault('all_data_label', 'all') + cfg.setdefault('combine_groups', False) + cfg.setdefault('confidence_level', 0.66) + cfg.setdefault('merge_identical_pred_input', True) + cfg.setdefault('savefig_kwargs', { + 'bbox_inches': 'tight', + 'dpi': 600, + 'orientation': 'landscape', + }) + cfg.setdefault('seaborn_settings', {}) + return cfg + + +def main(cfg): + """Run the diagnostic.""" + cfg = get_default_settings(cfg) + sns.set_theme(**cfg['seaborn_settings']) + + # Load data + (training_data, prediction_data, attributes) = ec.get_input_data(cfg) + check_training_data(training_data) + + # Plots + with pd.option_context(*ec.PANDAS_PRINT_OPTIONS): + logger.info( + "Correlation of training data (considering all available data):\n" + "%s", training_data.corr()) + ec.plot_individual_scatterplots(training_data, + prediction_data, + attributes, + 'training_data', + cfg) + ec.plot_merged_scatterplots(training_data, + prediction_data, + attributes, + 'training_data', + cfg) + + # Export CSV + ec.export_csv(training_data, attributes, 'training_data', cfg) + ec.export_csv(prediction_data, attributes, 'prediction_data', cfg) + + # Print constraint + label = training_data.y.columns[0] + units = attributes[label]['units'] + constrained_target = ec.get_constraint_from_df(training_data, + prediction_data, + cfg['confidence_level']) + logger.info( + "Constraint on target variable '%s': [%.2f, %.2f] %s with best " + "estimate %.2f %s", label, constrained_target[0], + constrained_target[2], units, constrained_target[1], units) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/emergent_constraints/snowalbedo.ncl b/esmvaltool/diag_scripts/emergent_constraints/snowalbedo.ncl new file mode 100644 index 0000000000..9be5793f50 --- /dev/null +++ b/esmvaltool/diag_scripts/emergent_constraints/snowalbedo.ncl @@ -0,0 +1,709 @@ +; ############################################################################# +; snowalbedo (ipcc ar5 fig. 9.45a) +; ############################################################################# +; Author: Axel Lauer (DLR, Germany) +; PROJECT-NAME CRESCENDO +; ############################################################################# +; Description +; Calculates springtime snow-albedo feedback values in climate change +; versus springtime values in the seasonal cycle in transient climate +; change experiments. +; Figure resembles IPCC AR5 Fig. 9.45a (Flato et al., 2013). +; +; References: +; - Flato et al., Evaluation of climate models, in: Climate Change 2013: +; the Physical Science Basis, 2013. +; - Hall, A., and X. Qu, Using the current seasonal cycle to constrain +; snow albedo feedback in future climate change, Geophys. Res. Lett., +; 33, L03502, doi:10.1029/2005GL025127, 2006. +; +; Required settings (scripts) +; exp_presentday: name of present-day experiment (e.g. "historical") +; exp_future: name of climate change experiment (e.g. "rcp45") +; +; Optional settings (scripts) +; diagminmax: observational uncertainty (min and max) +; legend_outside: True, False +; styleset: "CMIP5" (if not set, this diagnostic will create its own +; color table and symbols for plotting) +; suffix: string to be added to output filenames +; xmax: upper limit of x-axis (default = automatic) +; xmin: lower limit of x-axis (default = automatic) +; ymax: upper limit of y-axis (default = automatic) +; ymin: lower limit of y-axis (default = automatic) +; +; Required settings (variables) +; ref_model: name of reference data set +; +; Optional settings (variables) +; none +; +; Caveats +; - requires NCL version 6.4 or higher to plot confidence bands of +; regression +; - no regridding: rsdscs and albsfc fields are required to be on the same +; grid +; - it remains unclear which experiments and time periods have to be used +; to exactly reproduce fig. 9.45a in IPCC AR5 +; +; Modification history +; 20191104-lauer_axel: rewritten for v2.0 +; 20171220-lauer_axel: written +; +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/scatterplot.ncl" + +; ============================================================================= +; Function snowalbedo_precalc +; Calculate variables needed to obtain springtime snow-albedo feedback values: +; - seasonal cycle of surface albedo weighted by April incoming solar +; radiation and averaged over the region 30N-90N, 0-360 (land grid +; cells only) +; - seasonal cycle of 2m temperature averaged over the region 30N-90N, 0-360 +; (land grid cells only) +; +; Arguments +; - tas: 2-m temperature +; - alb: surface albedo +; - rad: downwelling shortwave radiation at surface +; - lsdata: NCL build-in land-sea mask +; +; Reference +; - Hall et al. (2006) +; ============================================================================= + +function snowalbedo_precalc(tas, alb, rad, lsdata) +begin + ; check that dimensions of "rsdt" and "alb" are identical + + dims_r = dimsizes(rad) + dims_a = dimsizes(alb) + + dimensions_fine = False + + if (dimsizes(dims_r).eq.dimsizes(dims_a)) then + if (all(dims_r.eq.dims_a)) then + dimensions_fine = True + end if + end if + + delete(dims_r) + delete(dims_a) + + if (.not.dimensions_fine) then + error_msg("f", DIAG_SCRIPT, "", "dimensions of variables alb and rsdt " \ + + "do not match.") + end if + + rad_seas = time_operations(rad, -1, -1, "average", "monthlyclim", True) + rad_apr = rad_seas(3, :, :) + delete(rad_seas) + + ; ------------------------------------------------------------------- + + ; generate and apply land-mask + + lsm0 = landsea_mask(lsdata, alb&lat, alb&lon) + lsm = where(lsm0.eq.1, 0, lsm0@_FillValue) + copy_VarMeta(lsm0, lsm) + delete(lsm0) + alb = alb + conform(alb, lsm, (/1, 2/)) + + ; --------------------------------------------------------------------- + ; weight surface albedo by April incoming solar radiation + + ; apply land-mask to April incoming solar radiation + rad_apr = rad_apr + lsm + + ; extract region 30N-90N, 0-360 + rad_reg = area_operations(rad_apr, 30., 90., 0., 360., "extract", False) + delete(rad_apr) + alb_reg = area_operations(alb, 30., 90., 0., 360., "extract", False) + + ; calculate grid box areas + area_reg = map_area(rad_reg&lat, rad_reg&lon) + + ; calculate weights + weights = rad_reg * area_reg / sum(rad_reg * area_reg) + delete(area_reg) + delete(rad_reg) + + ; calculate weighted mean albedo + + albmean = dim_sum_n_Wrap(alb_reg * conform(alb_reg, weights, (/1, 2/)), \ + (/1, 2/)) + albmean!0 = "time" + albmean&time = alb_reg&time + delete(weights) + delete(alb_reg) + + ; --------------------------------------------------------------------- + + delete(lsm) + + lsm0 = landsea_mask(lsdata, tas&lat, tas&lon) + lsm = where(lsm0.eq.1, 0, lsm0@_FillValue) + copy_VarMeta(lsm0, lsm) + delete(lsm0) + tas = tas + conform(tas, lsm, (/1, 2/)) + delete(lsm) + + ; calculate average temperature (missing values will be ignored and grid + ; boxes area-weighted) over NH land grid cells (polewards of 30 deg N) + t2 = area_operations(tas, 30., 90., 0., 360., "average", True) + + alb_seas = time_operations(albmean, -1, -1, "average", "monthlyclim", True) + t2_seas = time_operations(t2, -1, -1, "average", "monthlyclim", True) + + delete(t2) + + return([/alb_seas, t2_seas/]) + +end + +; ============================================================================= +; ============================================================================= +; ============================================================================= +; main program +; ============================================================================= +; ============================================================================= +; ============================================================================= + +begin + + enter_msg(DIAG_SCRIPT, "") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT) + log_info("++++++++++++++++++++++++++++++++++++++++++") + + variables = metadata_att_as_array(variable_info, "short_name") + dim_VAR = dimsizes(variables) + + tmp_dim_MOD = new(dim_VAR, integer) + do i = 0, dim_VAR - 1 + var0 = variable_info[i]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + tmp_dim_MOD(i) = ListCount(info0) + delete(info0) + end do + + dim_MOD = tmp_dim_MOD(0) + + if (any(tmp_dim_MOD.ne.dim_MOD)) then + error_msg("f", DIAG_SCRIPT, "", "number of datasets has to be " \ + + "identical for all variables.") + end if + + names = new((/dim_VAR, dim_MOD/), string) + exps = new((/dim_VAR, dim_MOD/), string) + ensembles = new((/dim_VAR, dim_MOD/), string) + + do i = 0, dim_VAR - 1 + var0 = variable_info[i]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + names(i, :) = metadata_att_as_array(info0, "dataset") + exps(i, :) = metadata_att_as_array(info0, "exp") + ensembles(i, :) = metadata_att_as_array(info0, "ensemble") + delete(info0) + end do + + ; check if required variables are available + + tas_idx = ind(variables.eq."tas") + rad_idx = ind(variables.eq."rsdt") + alb_idx = ind(variables.eq."alb") + + if (ismissing(tas_idx)) then + error_msg("f", DIAG_SCRIPT, "", "variable tas required but not found.") + end if + if (ismissing(alb_idx)) then + error_msg("f", DIAG_SCRIPT, "", "variable albsfc required but not found.") + end if + if (ismissing(rad_idx)) then + error_msg("f", DIAG_SCRIPT, "", "variable rsdt required but not found.") + end if + + ; check if present-day and future experiment names are defined + + if (isatt(diag_script_info, "exp_presentday")) then + exp_presentday = diag_script_info@exp_presentday + else + error_msg("f", DIAG_SCRIPT, "", "'exp_presentday' not defined in recipe" \ + + " (scripts).") + end if + + if (isatt(diag_script_info, "exp_future")) then + exp_future = diag_script_info@exp_future + else + error_msg("f", DIAG_SCRIPT, "", "'exp_future' not defined in recipe" \ + + " (scripts).") + end if + + ; Set default values for non-required diag_script_info attributes + + set_default_att(diag_script_info, "legend_outside", False) + + ; reference datasets + + refname = new(dim_VAR, string) + ref_ind = new(dim_VAR, integer) + do i = 0, dim_VAR - 1 + if (isatt(variable_info[i], "reference_dataset")) then + refname(i) = variable_info[i]@reference_dataset + ref_ind(i) = ind(names(i, :).eq.refname(i)) + else + error_msg("f", DIAG_SCRIPT, "", "reference dataset not defined for " \ + + "variable " + variables(i)) + end if + end do + + ; check that each model provides two datasets: exp_presentday and exp_future + + ; model indices with no reference dataset + + do i = 0, dim_VAR - 1 + ind_tmp = ind(exps(i, :).eq.exp_presentday) + ind_tmp = where(ind_tmp.eq.ref_ind(i), -1, ind_tmp) + idx = ind(ind_tmp.ge.0) + ind_present = ind_tmp(idx) + delete(ind_tmp) + delete(idx) + ind_tmp = ind(exps(i, :).eq.exp_future) + ind_tmp = where(ind_tmp.eq.ref_ind(i), -1, ind_tmp) + idx = ind(ind_tmp.ge.0) + ind_future = ind_tmp(idx) + delete(ind_tmp) + delete(idx) + + if ((dimsizes(ind_present).ne.dim_MOD / 2) .or. \ + (dimsizes(ind_future).ne.dim_MOD / 2)) then + error_msg("f", DIAG_SCRIPT, "", "variable " + variables(i) \ + + ": number of model datasets for exp " + exp_presentday \ + + " and exp " + exp_future + " is not identical.") + end if + + do j = 0, dimsizes(ind_present) - 1 + itest = ind(names(i, ind_future).eq.names(i, ind_present(j))) + if (ismissing(itest)) then + error_msg("f", DIAG_SCRIPT, "", "variable " + variables(i) \ + + ", dataset " + names(i, ind_present(j)) \ + + ": no data for experiment " + exp_future + " found.") + end if + end do + end do + + num_models_present = dimsizes(ind_present) + delete(ind_future) + + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + + climofiles = metadata_att_as_array(input_file_info, "filename") +end + +begin + + ; ======================================================================== + ; ============================ calculations ============================== + ; ======================================================================== + + ydata = new(num_models_present + 1, float) + xdata = new(num_models_present + 1, float) + ds_names = new(num_models_present + 1, string) + + obs_done = False + + a = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc", "r") + lsdata = a->LSMASK + + do ii = 0, num_models_present + + ; -------------------------------------------------- + ; calculate seasonal cycle delta_albedo_s / delta_T_s + ; (differences between April and May) + ; unit = %/K + ; -------------------------------------------------- + + do jj = 0, dim_VAR - 1 + atts = True + atts@short_name = variables(jj) + + if (ii .eq. num_models_present) then + ; all observations (possibly from different sources) + ; are processed at the same time (at ii .eq. num_models_present) + + atts@dataset = refname(jj) + ds_names(ii) = "ref" + else ; model dataset + imod = ind_present(ii) + atts@dataset = names(0, imod) + atts@exp = exp_presentday + atts@ensemble = ensembles(0, imod) + ds_names(ii) = names(0, imod) + "_" + ensembles(0, imod) + end if + + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + if (variables(jj) .eq. "tas") then + tas = read_data(info[0]) + end if + if (variables(jj) .eq. "alb") then + alb = read_data(info[0]) + end if + if (variables(jj) .eq. "rsdt") then + rad = read_data(info[0]) + end if + + delete(info) + end do + + result = snowalbedo_precalc(tas, alb, rad, lsdata) + alb_seas = result[0] + t2_seas = result[1] + delete(result) + + xdata(ii) = (alb_seas(3) - alb_seas(4)) * 100.0 / (t2_seas(3) - t2_seas(4)) + + delete(tas) + delete(alb) + delete(rad) + + ; -------------------------------------------------- + ; calculate climate change delta_alpha_s / delta_T_s + ; note: models only, no observations + ; -------------------------------------------------- + + if (ii .lt. num_models_present) then ; i.e. model dataset + do jj = 0, dim_VAR - 1 + atts = True + atts@short_name = variables(jj) + imod = ind_present(ii) + atts@dataset = names(0, imod) + atts@exp = exp_future + atts@ensemble = ensembles(0, imod) + + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + if (variables(jj) .eq. "tas") then + tas = read_data(info[0]) + end if + if (variables(jj) .eq. "alb") then + alb = read_data(info[0]) + end if + if (variables(jj) .eq. "rsdt") then + rad = read_data(info[0]) + end if + + delete(info) + end do + + result = snowalbedo_precalc(tas, alb, rad, lsdata) + + delete(tas) + delete(alb) + delete(rad) + + alb_seas_future = result[0] + t2_seas_future = result[1] + delete(result) + + delta_t = t2_seas_future(3) - t2_seas(3) + if (delta_t.ne.0.0) then + ydata(ii) = (alb_seas_future(3) - alb_seas(3)) * 100.0 / delta_t + else + ydata(ii) = ydata@_FillValue + end if + + delete(alb_seas_future) + delete(t2_seas_future) + end if + + delete(alb_seas) + delete(t2_seas) + + print("-------------------------------------------") + print(ds_names(ii) + ": " + xdata(ii) + "%/K, " + ydata(ii) + "%/K") + + end do ; loop over datasets + + delete(lsdata) + + xdata@standard_name = "seasonal cycle" + ydata@standard_name = "climate change" + xdata@units = "%/K" + ydata@units = "%/K" + + ; ======================================================================== + ; ============================ plot results ============================== + ; ======================================================================== + + outfile = "snow_albedo_feedback_ref" + do i = 0, dim_VAR - 1 + outfile = outfile + "_" + names(i, ref_ind(i)) + end do + + if (isatt(diag_script_info, "suffix")) then + outfile = outfile + "_" + diag_script_info@suffix + end if + + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, outfile) + plot_filename = wks@fullname + wks@legendfile = outfile + "_legend" + + if (isatt(diag_script_info, "styleset")) then + info0 = select_metadata_by_name(input_file_info, var0) + colortab = project_style(info0, diag_script_info, "colors") + markertab = project_style(info0, diag_script_info, "markers") + else + colortab = (/"(/0.00, 0.00, 0.59/)", "(/0.00, 0.39, 1.00/)", \ + "(/0.20, 1.00, 1.00/)", "(/0.20, 0.88, 0.00/)", \ + "(/1.00, 0.88, 0.00/)", "(/1.00, 0.59, 0.00/)", \ + "(/1.00, 0.20, 0.00/)", "(/0.59, 0.00, 0.00/)", \ + "(/0.78, 0.00, 0.78/)", "(/0.59, 0.00, 0.59/)", \ + "(/0.90, 0.90, 0.90/)", "(/0.70, 0.70, 0.70/)", \ + "(/0.50, 0.50, 0.50/)", "(/0.30, 0.30, 0.30/)"/) + markertab = (/16, 4, 5, 0/) + end if + + nx = dimsizes(xdata) + + colors = new(nx, string) + markers = new(nx, integer) + + ; create new marker: filled star + + mstring = "z" + fontnum = 35 + size = 1.5 + angle = 0.0 + + new_index = NhlNewMarker(wks, mstring, fontnum, 0.0, 0.0, 1.0, size, angle) + + if (isatt(diag_script_info, "styleset")) then + colors(0:nx - 2) = colortab(ind_present) + markers(0:nx - 2) = markertab(ind_present) + i = ind(ds_names.eq."MultiModelMean") + if (.not. ismissing(i)) then + colors(i) = "(/0.00, 0.00, 0.00/)" ; black + markers(i) = new_index + end if + else + i = 0 + j = 0 + do n = 0, nx - 2 + if (ds_names(n).eq."MultiModelMean") then + colors(n) = "(/0.00, 0.00, 0.00/)" ; black + markers(n) = new_index + else + colors(n) = colortab(i) + markers(n) = markertab(j) + i = i + 1 + if (i.ge.dimsizes(colortab)) then + i = 0 + j = j + 1 + if (j.ge.dimsizes(markertab)) then + log_info(DIAG_SCRIPT + " (info): not enough colors/symbols " \ + + "for all datasets, reusing symbols/colors") + j = 0 + end if + end if + end if + end do + end if + + ; reference data: red star + colors(nx - 1) = "(/1.00, 0.00, 0.00/)" ; red + markers(nx - 1) = new_index + + ; Collect data (annotations, colors and markers are defined for + ; model datasets only) + + data_arr = new((/2, nx/), float) + data_arr!0 = "statistic" + data_arr!1 = "model" + data_arr&statistic = (/xdata@standard_name, ydata@standard_name/) + data_arr&model = ds_names + + data_arr(0, :) = (/xdata/) + data_arr(1, :) = (/ydata/) + data_arr@units = (/xdata@units, ydata@units/) + data_arr@legend_outside = diag_script_info@legend_outside + data_arr@colors = colors(0: nx - 2) + data_arr@markers = markers(0: nx - 2) + data_arr@annots = ds_names(0: nx - 2) + + yregmin = 999.9 + yregmax = -999.9 + + ymin = min(ydata) + ymax = max(ydata) + + xmin = min(xdata) + xmax = max(xdata) + xdelta = xmax - xmin + x0 = xmin - 0.5 * xdelta + x1 = xmax + 0.5 * xdelta + + delete(ii) + + if (nx.ge.3) then + x = xdata(0:nx - 2) ; w/o reference dataset + y = ydata(0:nx - 2) ; w/o reference dataset + ii = dim_pqsort_n(x, 1, 0) + xx = x(ii) ; temporary 'work' arrays + yy = y(ii) + + rc = regline_stats(xx, yy) ; requires NCL 6.2.0 or higher + + ; calculate confidence intervals (25%, 75%) of regression + + xxx = fspan(x0, x1, 50) + yyy = rc@b(0) + xxx * rc@b(1) + ; t-value given the one-sided probability and the degrees of freedom + tval = cdft_t(0.25, nx - 2) + + mean_se_fit = 1.0 / nx + (xxx - rc@xave) ^ 2 / sum((x - rc@xave) ^ 2) + mean_conf_upper = yyy + tval * sqrt(rc@MSE * mean_se_fit) + mean_conf_lower = yyy - tval * sqrt(rc@MSE * mean_se_fit) + + ; calculate prediction intervals (25%, 75%) + + prediction_upper = yyy + tval * sqrt(rc@MSE * (1.0 + mean_se_fit)) + prediction_lower = yyy - tval * sqrt(rc@MSE * (1.0 + mean_se_fit)) + + yregmin = min(mean_conf_lower) + yregmax = max(mean_conf_upper) + + data_arr@trYMinF = min((/ymin, yregmin/)) + data_arr@trYMaxF = max((/ymax, yregmax/)) + + delete(yyy) + delete(mean_se_fit) + delete(x) + delete(y) + delete(ii) + end if + + ; if requested by user, use predefined min/max for x- and y-axis + + if (isatt(diag_script_info, "xmax")) then + data_arr@trXMaxF = -0.3 + end if + if (isatt(diag_script_info, "xmin")) then + data_arr@trXMinF = -1.7 + end if + if (isatt(diag_script_info, "ymax")) then + data_arr@trYMaxF = -0.3 + end if + if (isatt(diag_script_info, "ymin")) then + data_arr@trYMinF = -1.7 + end if + + data_arr@trXReverse = True ; flip x axis + data_arr@trYReverse = True ; flip y axis + + ; draw scatter plot + data_arr@res_tiMainString = "" + plot = scatterplot_markers(wks, data_arr(:, 0:nx - 2), var0, input_file_info) + + ydelta = ymax - ymin + y0 = min((/ymin - 0.5 * ydelta, yregmin/)) + y1 = max((/yregmax, ymax + 0.5 * ydelta/)) + + ; plot observational uncertainty (if available) + + if (isdefined("diagminmax")) then + x = (/diagminmax(0), diagminmax(0), diagminmax(1), diagminmax(1), \ + diagminmax(0)/) + y = (/y0, y1, y1, y0, y0/) + res = True + res@tfPolyDrawOrder = "Draw" + res@gsFillColor = (/0.9, 0.9, 0.9/) + referr = gsn_add_polygon(wks, plot, x, y, res) + delete(x) + delete(y) + delete(res) + end if + + ; draw line for reference data + + x = (/xdata(nx - 1), xdata(nx - 1)/) + y = (/y0, y1/) + res = True + res@gsLineColor = (/0.75, 0.75, 0.75/) + res@gsLineThicknessF = 4.0 + res@tfPolyDrawOrder = "Draw" + + ref = gsn_add_polyline(wks, plot, x, y, res) + + delete(x) + delete(y) + + ; add regression line (drawn in the background) + ; (can only be calculated if there are at least 3 models) + + if (nx.ge.3) then + x = (/x0, x1/) + y = (/x0 * rc + rc@yintercept, x1 * rc + rc@yintercept/) + res = True + res@gsLineColor = (/1.00, 0.00, 0.00/) + res@gsLineThicknessF = 4.0 + res@tfPolyDrawOrder = "Draw" + regl = gsn_add_polyline(wks, plot, x, y, res) + + res@gsLineThicknessF = 2.0 + res@gsLineDashPattern = 1 + reglc1 = gsn_add_polyline(wks, plot, xxx, mean_conf_upper, res) + reglc2 = gsn_add_polyline(wks, plot, xxx, mean_conf_lower, res) + res@gsLineDashPattern = 2 + reglp1 = gsn_add_polyline(wks, plot, xxx, prediction_upper, res) + reglp2 = gsn_add_polyline(wks, plot, xxx, prediction_lower, res) + + delete(xxx) + delete(mean_conf_upper) + delete(mean_conf_lower) + delete(prediction_upper) + delete(prediction_lower) + end if + + draw(plot) + frame(wks) + + ; mandatory netCDF output + + nc_filename = work_dir + outfile + ".nc" + + data_arr@diag_script = (/DIAG_SCRIPT/) + data_arr@var = "snowalbedofeedback" + nc_outfile = ncdf_write(data_arr, nc_filename) + + ; ------------------------------------------------------------------------ + ; write provenance to netCDF output (and plot file) + ; ------------------------------------------------------------------------ + + statistics = (/"clim", "other"/) + domain = "nhext" + plottype = "scatter" + caption = "Scatterplot of simulated springtime snow-albedo feedback " \ + + "values versus springtime values in the seasonal cycle. " \ + + "Resembles IPCC AR5 fig. 9.45a." + + log_provenance(nc_outfile, plot_filename, caption, statistics, \ + domain, plottype, "", "", climofiles) + + ; ========================================================================== + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/ensclus/ens_anom.py b/esmvaltool/diag_scripts/ensclus/ens_anom.py index a3b664d11a..ef9c303786 100644 --- a/esmvaltool/diag_scripts/ensclus/ens_anom.py +++ b/esmvaltool/diag_scripts/ensclus/ens_anom.py @@ -1,12 +1,19 @@ """Computation of ensemble anomalies based on a desired value.""" import os + import numpy as np from scipy import stats # User-defined packages -from read_netcdf import read_iris, save_n_2d_fields -from sel_season_area import sel_area, sel_season +from esmvaltool.diag_scripts.ensclus.read_netcdf import ( + read_iris, + save_n_2d_fields, +) +from esmvaltool.diag_scripts.ensclus.sel_season_area import ( + sel_area, + sel_season, +) def ens_anom(filenames, dir_output, name_outputs, varname, numens, season, diff --git a/esmvaltool/diag_scripts/ensclus/ens_eof_kmeans.py b/esmvaltool/diag_scripts/ensclus/ens_eof_kmeans.py index 9f9bedc861..940af71278 100644 --- a/esmvaltool/diag_scripts/ensclus/ens_eof_kmeans.py +++ b/esmvaltool/diag_scripts/ensclus/ens_eof_kmeans.py @@ -10,8 +10,8 @@ from sklearn.cluster import KMeans # User-defined libraries -from eof_tool import eof_computation -from read_netcdf import read_n_2d_fields +from esmvaltool.diag_scripts.ensclus.eof_tool import eof_computation +from esmvaltool.diag_scripts.ensclus.read_netcdf import read_n_2d_fields def ens_eof_kmeans(dir_output, name_outputs, numens, numpcs, perc, numclus): @@ -33,17 +33,13 @@ def ens_eof_kmeans(dir_output, name_outputs, numens, numpcs, perc, numclus): print('Model: {0}'.format(model)) # Either perc (cluster analysis is applied on a number of PCs # such as they explain 'perc' of total variance) or numpcs - # (number of PCs to retain) is set: - if numpcs != 'no': + # (number of PCs to retain) is set. + # numpcs has priority over perc, ignored if it is set to 0 + if numpcs: numpcs = int(numpcs) print('Number of principal components: {0}'.format(numpcs)) - - if perc != 'no': - perc = int(perc) - print('Percentage of explained variance: {0}%'.format(perc)) - - if (perc == 'no' and numpcs == 'no') or (perc != 'no' and numpcs != 'no'): - raise ValueError('You have to specify either "perc" or "numpcs".') + else: + print('Percentage of variance explained: {0}'.format(perc)) print('Number of clusters: {0}'.format(numclus)) @@ -61,7 +57,9 @@ def ens_eof_kmeans(dir_output, name_outputs, numens, numpcs, perc, numclus): _, _, _, pcs_unscal0, eofs_unscal0, varfrac = eof_computation(var, lat) acc = np.cumsum(varfrac * 100) - if perc != 'no': + if numpcs: + exctperc = acc[numpcs - 1] + else: # Find how many PCs explain a certain percentage of variance # (find the mode relative to the percentage closest to perc, # but bigger than perc) @@ -70,8 +68,6 @@ def ens_eof_kmeans(dir_output, name_outputs, numens, numpcs, perc, numclus): 'to {0}% of variance (but grater than {0}%) is {1}' .format(perc, numpcs)) exctperc = min(enumerate(acc), key=lambda x: x[1] <= perc)[1] - if numpcs != 'no': - exctperc = acc[numpcs - 1] print('(the first {0} PCs explain exactly the {1}% of variance)' .format(numpcs, "%.2f" % exctperc)) @@ -81,8 +77,9 @@ def ens_eof_kmeans(dir_output, name_outputs, numens, numpcs, perc, numclus): pcs = pcs_unscal0[:, :numpcs] - clus = KMeans(n_clusters=numclus, n_init=600, max_iter=1000) - + clus = KMeans(n_clusters=numclus, n_init=2000, + init='k-means++', tol=1e-4, + max_iter=1000, random_state=42) start = datetime.datetime.now() clus.fit(pcs) end = datetime.datetime.now() @@ -129,43 +126,12 @@ def ens_eof_kmeans(dir_output, name_outputs, numens, numpcs, perc, numclus): 'the same as the member #1 PC vector dim {1}\n' .format(centroids[1, :].shape, pcs[1, :].shape)) - norm = np.empty([numclus, numens]) - final_output = [] - repres = [] - for nclus in range(numclus): - for ens in range(numens): - normens = centroids[nclus, :] - pcs[ens, :] - norm[nclus, ens] = math.sqrt(sum(normens**2)) - print('The distances between centroid of cluster {0} and ' - 'member #0 to #{1} are:\n{2}' - .format(nclus, numens - 1, np.round(norm[nclus], 3))) - print('MINIMUM DISTANCE FOR CLUSTER {0} IS {1} --> member #{2}' - .format(nclus, round(norm[nclus].min(), 3), - list(np.where(norm[nclus] == norm[nclus].min())[0]))) - repres.append(np.where(norm[nclus] == norm[nclus].min())[0][0]) - print('MAXIMUM DISTANCE FOR CLUSTER {0} IS {1} --> member #{2}\n' - .format(nclus, round(norm[nclus].max(), 3), - list(np.where(norm[nclus] == norm[nclus].max())[0]))) - - txt = ('Closest ensemble member/members ' - 'to centroid of cluster {0} is/are {1}\n' - .format(nclus, list(np.where(norm[nclus] == - norm[nclus].min())[0]))) - final_output.append(txt) - with open(os.path.join(dir_output, 'RepresentativeEnsembleMembers_{0}.txt' - .format(name_outputs)), "w") as text_file: - text_file.write(''.join(str(e) for e in final_output)) - - # ____________Save the most representative ensemble members - namef = os.path.join(dir_output, 'repr_ens_{0}.txt'.format(name_outputs)) - np.savetxt(namef, repres, fmt='%i') - outfiles.append(namef) - print('_________________________________________________________') print('In order to study the spread of each cluster,') print('the standard deviation of the distances between each member ' 'in a cluster and the cluster centroid is computed in the PC space') stat_output = [] + repres = [] for nclus in range(numclus): members = clusters[nclus][2] norm = np.empty([numclus, len(members)]) @@ -179,6 +145,8 @@ def ens_eof_kmeans(dir_output, name_outputs, numens, numpcs, perc, numclus): .format(nclus, round(norm[nclus].min(), 3), members[np.where(norm[nclus] == norm[nclus].min())[0][0]])) + repres.append(members[np.where(norm[nclus] == + norm[nclus].min())[0][0]]) print('MAXIMUM DISTANCE WITHIN CLUSTER {0} IS {1} --> member #{2}' .format(nclus, round(norm[nclus].max(), 3), members[np.where(norm[nclus] == @@ -196,6 +164,12 @@ def ens_eof_kmeans(dir_output, name_outputs, numens, numpcs, perc, numclus): d_stat['freq(%)'] = round(clusters[nclus][1], 3) stat = pd.DataFrame(d_stat) stat_output.append(stat) + + # ____________Save the most representative ensemble members + namef = os.path.join(dir_output, 'repr_ens_{0}.txt'.format(name_outputs)) + np.savetxt(namef, repres, fmt='%i') + outfiles.append(namef) + stat_output = pd.concat(stat_output, axis=0) # ____________Save statistics of cluster analysis namef = os.path.join(dir_output, 'statistics_clustering_{0}.txt' diff --git a/esmvaltool/diag_scripts/ensclus/ens_plots.py b/esmvaltool/diag_scripts/ensclus/ens_plots.py index 7dfa21a42c..1b06acd671 100644 --- a/esmvaltool/diag_scripts/ensclus/ens_plots.py +++ b/esmvaltool/diag_scripts/ensclus/ens_plots.py @@ -3,16 +3,16 @@ import math import os +import cartopy.crs as ccrs import matplotlib.pyplot as plt import numpy as np -import cartopy.crs as ccrs # User-defined libraries -from read_netcdf import read_n_2d_fields +from esmvaltool.diag_scripts.ensclus.read_netcdf import read_n_2d_fields def ens_plots(dir_output, dir_plot, name_outputs, numclus, - field_to_plot, plot_type): + field_to_plot, plot_type, season, area, extreme, numensmax): """Plot the chosen field for each ensemble.""" print('Number of clusters: {0}'.format(numclus)) @@ -29,7 +29,15 @@ def ens_plots(dir_output, dir_plot, name_outputs, numclus, # ____________Load labels namef = os.path.join(dir_output, 'labels_{0}.txt'.format(name_outputs)) - labels = np.loadtxt(namef, dtype=int) + labels = np.loadtxt(namef, dtype=np.int64) + + namef = os.path.join(dir_output, 'repr_ens_{0}.txt'.format(name_outputs)) + reprens = np.loadtxt(namef, dtype=np.int64) + + namef = os.path.join(dir_output, 'legend_{0}.txt'.format(name_outputs)) + legends = np.loadtxt(namef, dtype=str) + years = np.loadtxt(namef, dtype=str)[:, 6] + legends = np.loadtxt(namef, dtype=str)[:, 1] vmi = round_down(np.nanpercentile(vartoplot, 0.1)) vma = round_up(np.nanpercentile(vartoplot, 99.9)) @@ -51,60 +59,86 @@ def ens_plots(dir_output, dir_plot, name_outputs, numclus, clevels = np.arange(rangecbarmin, rangecbarmax + delta, delta) colors = ['b', 'g', 'r', 'c', 'm', 'y', 'DarkOrange', 'grey'] proj = ccrs.PlateCarree() - xpos = int(np.ceil(np.sqrt(numens * 1.6))) - ypos = int(np.ceil(numens / xpos)) - fig = plt.figure(figsize=(24, 14)) if min(lon) < 180. < max(lon): clon = 180. else: clon = 0. - for nens in range(numens): - axes = plt.subplot(xpos, ypos, nens + 1, - projection=ccrs.PlateCarree(central_longitude=clon)) - axes.set_extent([min(lon), max(lon), min(lat), max(lat)], - crs=ccrs.PlateCarree()) - axes.coastlines("110m") - - # Plot Data - if field_to_plot == 'anomalies': - map_plot = plt.contourf(lon, lat, vartoplot[nens], clevels, - cmap=plt.cm.RdBu_r, - transform=proj, extend='both') - else: - map_plot = plt.contourf(lon, lat, vartoplot[nens], clevels, - transform=proj, extend='both') - - # Add Title - title_obj = plt.title(nens, fontsize=32, fontweight='bold') - for nclus in range(numclus): - if nens in np.where(labels == nclus)[0]: - title_obj.set_backgroundcolor(colors[nclus]) - - cax = plt.axes([0.1, 0.03, 0.8, 0.03]) # horizontal - cbar = plt.colorbar(map_plot, cax=cax, orientation='horizontal') - cbar.ax.tick_params(labelsize=18) - cbar.set_ticks(np.arange(rangecbarmin, rangecbarmax + delta, delta * 20)) - - plt.suptitle(exp + ' ' + kind + ' ' + varname + ' ' + field_to_plot + - ' (' + varunits + ')', fontsize=45, fontweight='bold') - - top = 0.89 # the top of the subplots of the figure - bottom = 0.12 # the bottom of the subplots of the figure - left = 0.02 # the left side of the subplots of the figure - right = 0.98 # the right side of the subplots of the figure - hspace = 0.36 # amount of height reserved for white space between subplots - wspace = 0.14 # amount of width reserved for blank space between subplots - plt.subplots_adjust(left=left, bottom=bottom, right=right, top=top, - wspace=wspace, hspace=hspace) - - # plot the selected fields - namef = os.path.join(dir_plot, ('{0}_{1}.' + plot_type) - .format(field_to_plot, name_outputs)) - fig.savefig(namef) # bbox_inches='tight') - print('A ', plot_type, ' figure for the selected fields saved in {0}' - .format(dir_plot)) - - return namef + + numens0 = min(numens, numensmax) + nfigs = int(np.ceil(numens / numens0)) + + ypos = int(np.ceil(np.sqrt(numens0 / 2.0))) + xpos = int(np.ceil(numens0 / ypos)) + fsize = int(min(max(4. / ypos * 15, 10), 32)) + + namef_list = [] + for ifig in range(nfigs): + fig = plt.figure(figsize=(24, 14)) + for iens in range(ifig * numens0, min((ifig + 1) * numens0, numens)): + axes = plt.subplot( + ypos, xpos, iens + 1 - ifig * numens0, + projection=ccrs.PlateCarree(central_longitude=clon) + ) + axes.set_extent([min(lon), max(lon), min(lat), max(lat)], + crs=ccrs.PlateCarree()) + axes.coastlines("110m") + + # Plot Data + if field_to_plot == 'anomalies': + map_plot = plt.contourf(lon, lat, vartoplot[iens], clevels, + cmap=plt.cm.RdBu_r, + transform=proj, extend='both') + else: + map_plot = plt.contourf(lon, lat, vartoplot[iens], clevels, + transform=proj, extend='both') + + if iens in reprens: + rect = plt.Rectangle((-0.01, -0.01), 1.02, 1.02, fill=False, + transform=axes.transAxes, clip_on=False, + zorder=10) + rect.set_edgecolor(colors[labels[iens]]) + rect.set_linewidth(6.0) + axes.add_artist(rect) + + # Add Title + title_obj = plt.title(iens, fontsize=int(fsize * 1.8), + fontweight='bold', loc='left') + title_obj.set_backgroundcolor(colors[labels[iens]]) + title_obj = plt.title(legends[iens], fontsize=fsize, loc='right') + + cax = plt.axes([0.1, 0.03, 0.8, 0.03]) # horizontal + cbar = plt.colorbar(map_plot, cax=cax, orientation='horizontal') + cbar.ax.tick_params(labelsize=24) + cbar.set_ticks(np.arange(rangecbarmin, + rangecbarmax + delta, delta * 20)) + cbar.ax.set_ylabel(varname + '\n[' + varunits + ']', fontsize=24, + fontweight='bold', rotation='horizontal', + verticalalignment='center') + cbar.ax.yaxis.set_label_position('right') + cbar.ax.yaxis.set_label_coords(1.05, 1.4) + + plt.suptitle(field_to_plot.capitalize() + ' ' + varname + ' ' + + extreme + ' ' + area + ' ' + season + ' ' + + kind + ' ' + exp + ' ' + years[0], + fontsize=40, fontweight='bold') + + top = 0.89 # the top of the subplots of the figure + bottom = 0.12 # the bottom of the subplots of the figure + left = 0.02 # the left side of the subplots of the figure + right = 0.98 # the right side of the subplots of the figure + hspace = 0.36 # height reserved for white space between subplots + wspace = 0.14 # width reserved for blank space between subplots + plt.subplots_adjust(left=left, bottom=bottom, right=right, top=top, + wspace=wspace, hspace=hspace) + + # plot the selected fields + namef = os.path.join(dir_plot, ('{0}_{1}.fig{2}.' + plot_type) + .format(field_to_plot, name_outputs, ifig + 1)) + fig.savefig(namef) # bbox_inches='tight') + print('A ', plot_type, ' figure for the selected fields saved in {0}' + .format(dir_plot)) + namef_list.append(namef) + return namef_list def round_up(x, sig=2): diff --git a/esmvaltool/diag_scripts/ensclus/ensclus.py b/esmvaltool/diag_scripts/ensclus/ensclus.py index 5279881373..df06ea411c 100644 --- a/esmvaltool/diag_scripts/ensclus/ensclus.py +++ b/esmvaltool/diag_scripts/ensclus/ensclus.py @@ -8,21 +8,26 @@ Cluster analysis tool based on the k-means algorithm for ensembles of climate model simulations Modification history - 20181202-hard_jo: cleanup, style, provenance and finalising - 20181002-arno_en: updating to version2_develpment (recipe/dataset) - 20170710-mavi_ir: Routines written. + 20181202-vonhardenberg_jost: cleanup, style, provenance and finalising + 20181002-arnone_enrico: updating to version2_develpment (recipe/dataset) + 20170710-mavilia_irene: routines written. """ -import os import logging +import os + import numpy as np -from esmvaltool.diag_scripts.shared import group_metadata, run_diagnostic -from esmvaltool.diag_scripts.shared import ProvenanceLogger, sorted_metadata # Import user diagnostic routines -from ens_anom import ens_anom -from ens_eof_kmeans import ens_eof_kmeans -from ens_plots import ens_plots +from esmvaltool.diag_scripts.ensclus.ens_anom import ens_anom +from esmvaltool.diag_scripts.ensclus.ens_eof_kmeans import ens_eof_kmeans +from esmvaltool.diag_scripts.ensclus.ens_plots import ens_plots +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + group_metadata, + run_diagnostic, + sorted_metadata, +) logger = logging.getLogger(os.path.basename(__file__)) @@ -35,7 +40,7 @@ def get_provenance_record(gatt, vatt, ancestor_files): print(gatt) record = { 'caption': caption, - 'authors': ['hard_jo', 'arno_en', 'mavi_ir'], + 'authors': ['vonhardenberg_jost', 'arnone_enrico', 'mavilia_irene'], 'projects': ['c3s-magic'], 'references': ['straus07jcli'], 'plot_types': ['other'], @@ -49,7 +54,6 @@ def get_provenance_record(gatt, vatt, ancestor_files): def main(cfg): """Ensemble Clustering Diagnostics.""" out_dir = cfg['work_dir'] - write_plots = cfg['write_plots'] input_data = cfg['input_data'].values() input_data = sorted_metadata(input_data, sort='recipe_dataset_index') files_dict = group_metadata(input_data, 'filename', @@ -65,6 +69,9 @@ def main(cfg): logger.info('The name of the output files will be _%s.txt', name_outputs) variable_name = element['short_name'] + max_plot_panels = cfg.get('max_plot_panels', 72) + numpcs = cfg.get('numpcs', 0) + perc = cfg.get('numpcs', 80) filenames_cat = [] legend_cat = [] @@ -89,21 +96,20 @@ def main(cfg): numens, cfg['season'], cfg['area'], cfg['extreme']) # ###################### EOF AND K-MEANS ANALYSES ####################### - outfiles2 = ens_eof_kmeans(out_dir, name_outputs, numens, cfg['numpcs'], - cfg['perc'], cfg['numclus']) + outfiles2 = ens_eof_kmeans(out_dir, name_outputs, numens, numpcs, + perc, cfg['numclus']) outfiles = outfiles + outfiles2 provenance_record = get_provenance_record( cfg, list(files_dict.values())[0][0], ancestor_files=filenames_cat) # ###################### PLOT AND SAVE FIGURES ########################## - if write_plots: - plot_file = ens_plots(out_dir, cfg['plot_dir'], name_outputs, - cfg['numclus'], 'anomalies', - cfg['output_file_type']) - provenance_record['plot_file'] = plot_file + plotfiles = ens_plots(out_dir, cfg['plot_dir'], name_outputs, + cfg['numclus'], 'anomalies', + cfg['output_file_type'], cfg['season'], + cfg['area'], cfg['extreme'], max_plot_panels) - for file in outfiles: + for file in outfiles + plotfiles: with ProvenanceLogger(cfg) as provenance_logger: provenance_logger.log(file, provenance_record) diff --git a/esmvaltool/diag_scripts/ensclus/eof_tool.py b/esmvaltool/diag_scripts/ensclus/eof_tool.py index 068c18dadd..a2d92569f7 100644 --- a/esmvaltool/diag_scripts/ensclus/eof_tool.py +++ b/esmvaltool/diag_scripts/ensclus/eof_tool.py @@ -54,8 +54,8 @@ def eof_plots(neof, pcs_scal1, eofs_scal2, var, varunits, lat, lon, """ print('_________________________________________________________') print('Plotting the EOFs and PCs') - print('Variable: {1} Units: {2}'.format(var, varunits)) - print('Ensemble members: {1}'.format(numens)) + print('Variable: {0} Units: {1}'.format(var, varunits)) + print('Ensemble members: {0}'.format(numens)) # ------------------------------------------PCs scaled (case 1 of scaling) figpc_scal1 = plt.figure(figsize=(24, 14)) diff --git a/esmvaltool/diag_scripts/examples/correlate.py b/esmvaltool/diag_scripts/examples/correlate.py index fc54375803..052f3d2bdc 100644 --- a/esmvaltool/diag_scripts/examples/correlate.py +++ b/esmvaltool/diag_scripts/examples/correlate.py @@ -6,7 +6,7 @@ from iris.analysis import MEAN from iris.analysis.stats import pearsonr -from diagnostic import plot_diagnostic +from esmvaltool.diag_scripts.examples.diagnostic import plot_diagnostic from esmvaltool.diag_scripts.shared import group_metadata, run_diagnostic logger = logging.getLogger(os.path.basename(__file__)) @@ -23,7 +23,7 @@ def get_provenance_record(attributes, ancestor_files, plot_type): 'domains': ['global'], 'plot_type': plot_type, 'authors': [ - 'ande_bo', + 'andela_bouwe', ], 'references': [ 'acknow_project', @@ -36,7 +36,10 @@ def get_provenance_record(attributes, ancestor_files, plot_type): def main(cfg): """Compute the time average for each input dataset.""" input_data = group_metadata( - cfg['input_data'].values(), 'standard_name', sort='dataset') + cfg['input_data'].values(), + 'standard_name', + sort='dataset', + ) for standard_name in input_data: logger.info("Processing variable %s", standard_name) @@ -67,6 +70,13 @@ def main(cfg): "Computing correlation with settings %s between " "reference and cube:\n%s\n%s", kwargs, filename, dataset) dataset = dataset.collapsed('time', MEAN) + # Fix issue with losing vertical bounds in extract_level + # preprocessor + if reference.coords(axis='Z'): + ref_coord = reference.coord(axis='Z') + coord = dataset.coord(ref_coord) + if not coord.has_bounds(): + coord.bounds = ref_coord.bounds cube = pearsonr(dataset, reference, **kwargs) name = '{}_correlation_with_{}'.format( diff --git a/esmvaltool/diag_scripts/examples/decadal_example.py b/esmvaltool/diag_scripts/examples/decadal_example.py new file mode 100644 index 0000000000..ecbfd9dfe9 --- /dev/null +++ b/esmvaltool/diag_scripts/examples/decadal_example.py @@ -0,0 +1,102 @@ +"""Diagnostic to reproduce figures in IS-ENES D9.4.""" +import os + +import iris +import matplotlib.pyplot as plt + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + group_metadata, + names, + run_diagnostic, +) + + +class DecadalExample: + """Class used to create plots comparing OBS data with DCPP data.""" + def __init__(self, config): + """ + Set diagnostic parameters and constants. + Parameters + ---------- + config : dict + Dictionary containing configuration settings. + """ + self.cfg = config + + @staticmethod + def get_provenance_record(title, ancestor_files): + """Create a provenance record describing the diagnostic data and + plot.""" + caption = (f"Comparison of {title} between a DCPP experiment" + "and an observational dataset.") + + record = { + 'caption': caption, + 'statistics': ['mean'], + 'domains': ['global'], + 'plot_types': ['times'], + 'authors': [ + 'loosveldt-tomas_saskia', + ], + 'references': [ + 'acknow_project', + ], + 'ancestors': ancestor_files, + } + return record + + def compute(self): + """Plot time series of a decadal experiment against observations.""" + data = group_metadata(self.cfg['input_data'].values(), 'project') + ancestors = [] + for dataset in data['OBS6']: + cube = iris.load_cube(dataset['filename']) + name = dataset['dataset'] + iris.coord_categorisation.add_year(cube, 'time') + cube.coord('time').bounds = None + plt.plot(cube.coord('time').points, cube.data, label=f'{name}') + ancestors.append(dataset['filename']) + + for dataset in data['CMIP6']: + cube = iris.load_cube(dataset['filename']) + iris.coord_categorisation.add_year(cube, 'time') + name = dataset['dataset'] + sub_exp = dataset['sub_experiment'] + cube.coord('time').bounds = None + plt.plot(cube.coord('time').points, + cube.data, + label=f'{name}-{sub_exp}') + ancestors.append(dataset['filename']) + + plt.rcParams["figure.figsize"] = (40, 6) + plt.legend(loc='center left', + bbox_to_anchor=(1, 0.5), + prop={'size': 5.5}) + plt.xlabel("time (days since 01-01-1850)") + plt.ylabel("Temperature (K)") + title = 'Global mean of Near-Surface Air Temperature (tas)' + plt.title(title) + plt.tight_layout() + + plt.grid(True) + + extension = self.cfg['output_file_type'] + plot_name = 'decadal_test' + f'.{extension}' + plot_path = os.path.join(self.cfg[names.PLOT_DIR], plot_name) + plt.savefig(plot_path) + + provenance_record = self.get_provenance_record(title, ancestors) + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + plt.close() + + +def main(): + """Run Decadal Example diagnostic.""" + with run_diagnostic() as config: + DecadalExample(config).compute() + + +if __name__ == "__main__": + main() diff --git a/esmvaltool/diag_scripts/examples/diagnostic.R b/esmvaltool/diag_scripts/examples/diagnostic.R new file mode 100644 index 0000000000..61e5084044 --- /dev/null +++ b/esmvaltool/diag_scripts/examples/diagnostic.R @@ -0,0 +1,65 @@ +# ############################################################################# +# diagnostic.R +# Authors: E. Arnone (ISAC-CNR, Italy) +# ############################################################################# +# Description +# Example of ESMValTool diagnostic written in R +# +# Required +# +# Optional +# +# Caveats +# +# Modification history +# 20180620-A_arnone_e: written for v2.0 +# +# ############################################################################ +library(tools) +library(yaml) +# get path to script and source subroutines (if needed) +diag_scripts_dir <- Sys.getenv("diag_scripts") +# source paste0(diag_scripts_dir,"/subroutine.r") +print(file.path("source ", diag_scripts_dir, "subroutine.r")) +# read settings and metadata files (assuming one variable only) +args <- commandArgs(trailingOnly = TRUE) +settings <- yaml::read_yaml(args[1]) +for (myname in names(settings)) { + temp <- get(myname, settings) + assign(myname, temp) +} +metadata <- yaml::read_yaml(settings$input_files) +# get name of climofileis for first variable and list +# associated to first climofile +climofiles <- names(metadata) +climolist <- get(climofiles[1], metadata) +# get diagnostic name from metadata file +diag_base <- climolist$diagnostic +print(paste0(diag_base, ": starting routine")) +# create work and plot directories if they do not exist +print(paste0(diag_base, ": creating work and plot directories")) +dir.create(work_dir, recursive = T, showWarnings = F) +dir.create(plot_dir, recursive = T, showWarnings = F) +# extract metadata +models_name <- unname(sapply(metadata, "[[", "dataset")) +reference_model <- + unname(sapply(metadata, "[[", "reference_dataset"))[1] +models_start_year <- unname(sapply(metadata, "[[", "start_year")) +models_end_year <- unname(sapply(metadata, "[[", "end_year")) +models_experiment <- unname(sapply(metadata, "[[", "exp")) +models_ensemble <- unname(sapply(metadata, "[[", "ensemble")) +## Loop through input models +for (model_idx in c(1:(length(models_name)))) { + # Setup parameters and path + model <- models_name[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + infile <- climofiles[model_idx] + model_exp <- models_experiment[model_idx] + model_ens <- models_ensemble[model_idx] + print(paste0(diag_base, ": working on file ", infile)) + print(paste0(diag_base, ": calling diagnostic with following parameters")) + print(paste(model, model_exp, model_ens, year1, year2)) + ## Call actual diagnostic + print(paste0(diag_base, ": I am your R diagnostic")) +} diff --git a/esmvaltool/diag_scripts/examples/diagnostic.jl b/esmvaltool/diag_scripts/examples/diagnostic.jl new file mode 100644 index 0000000000..9ce2009a4d --- /dev/null +++ b/esmvaltool/diag_scripts/examples/diagnostic.jl @@ -0,0 +1,128 @@ +# ############################################################################# +# diagnostic.jl +# Authors: J. von Hardenberg (ISAC-CNR, Italy) +# ############################################################################# +# Description +# Example of ESMValTool diagnostic written in Julia +# +# Modification history +# 20190807-vonhardenberg_jost written for v2.0 +# 20191117-vonhardenberg_jost added more realistic writing of file and plot +# ############################################################################ + +import YAML +import JSON +using NetCDF +# Used to write output NetCDF file with original attributes +using RainFARM +using Statistics + +using PyPlot +# Avoid plotting to screen +pygui(false) + +# Provides the plotmap() function +include(joinpath(dirname(@__DIR__), "shared/external.jl")) + +function provenance_record(infile) + xprov = Dict("ancestors" => [infile], + "authors" => ["vonhardenberg_jost", "arnone_enrico"], + "references" => ["zhang11wcc"], + "projects" => ["crescendo", "c3s-magic"], + "caption" => "Example diagnostic in Julia", + "statistics" => ["other"], + "realms" => ["atmos"], + "themes" => ["phys"], + "domains" => ["global"]) + return(xprov) +end + +function compute_diagnostic(metadata, varname, diag_base, parameter, + work_dir, plot_dir) + provenance = Dict() + for (infile, value) in metadata + dataset = value["dataset"] + reference_dataset = value["reference_dataset"] + start_year = value["start_year"] + end_year = value["end_year"] + exp = value["exp"] + ensemble = value["ensemble"] + println(diag_base, ": working on file ", infile) + println(diag_base, ": calling diagnostic with following parameters") + println(dataset, " ", reference_dataset, " ", start_year, " ", + end_year, " ", exp, " ", ensemble, parameter) + # Call here actual diagnostic + println(diag_base, ": I am your Julia diagnostic") + + # Read the variable, lon and lat + var = ncread(infile, varname) + lon = ncread(infile, "lon") + lat = ncread(infile, "lat") + + units = ncgetatt(infile, varname, "units") + + # Compute time average and add parameter + varm = mean(var, dims = 3) .+ parameter + + # Create provenance record for the output files + xprov = provenance_record(infile) + + # Output filename + outfile = string(work_dir, "/", varname, "_", dataset, "_", exp, "_", + ensemble, "_", start_year, "-", + end_year, "_timmean.nc") + + # Use the RainFARM function write_netcdf2d to write variable to + # output file copying original attributes from infile + write_netcdf2d(outfile, varm, lon, lat, varname, infile) + provenance[outfile] = xprov + + # Plot the field + plotfile = string(plot_dir, "/", varname, "_", dataset, "_", exp, "_", + ensemble, "_", start_year, "-", + end_year, "_timmean.png") + title = string("Mean ", varname, " ", dataset, " ", exp, " ", ensemble, + " ", start_year, "-", end_year) + plotmap(lon, lat, var, title = title, proj = "robinson", clabel = units) + savefig(plotfile) + provenance[plotfile] = xprov + end + return provenance +end + +function main(settings) + + metadata = YAML.load_file(settings["input_files"][1]) + climofiles = collect(keys(metadata)) + climolist = metadata[climofiles[1]] + varname = climolist["short_name"] + diag_base = climolist["diagnostic"] + + println(diag_base, ": starting routine") + println(diag_base, ": creating work and plot directories") + work_dir = settings["work_dir"] + plot_dir = settings["plot_dir"] + run_dir = settings["run_dir"] + mkpath(work_dir) + mkpath(run_dir) + mkpath(plot_dir) + cd(run_dir) + + # Reading an example parameter from the settings + parameter = settings["parameter1"] + + # Compute the main diagnostic + provenance = compute_diagnostic(metadata, varname, diag_base, + parameter, work_dir, plot_dir) + + # setup provenance file + provenance_file = string(run_dir, "/diagnostic_provenance.yml") + + # Write provenance file + open(provenance_file, "w") do io + JSON.print(io, provenance, 4) + end +end + +settings = YAML.load_file(ARGS[1]) +main(settings) diff --git a/esmvaltool/diag_scripts/examples/diagnostic.ncl b/esmvaltool/diag_scripts/examples/diagnostic.ncl index 28c38be062..a4ceda9b7b 100644 --- a/esmvaltool/diag_scripts/examples/diagnostic.ncl +++ b/esmvaltool/diag_scripts/examples/diagnostic.ncl @@ -34,15 +34,14 @@ ; Caveats ; ; Modification history -; 20181107-A_righ_ma: revised and extended to include new metadata handling -; 20151027-A_laue_ax: moved call to 'write_references' to the beginning -; of the code -; 20150325-A_laue_ax: modified reference tags used for acknowledgements -; (projects, observations, etc) -; 20140312-A_righ_ma: adapted to new time_operations structure -; 20130618-A_gott_kl: adjusted to r738 of -; https://svn.dlr.de/ESM-Diagnostic/sources/trunk -; 20121130-A_gott_kl: written +; 20181107-righi_mattia: revised and extended to include new metadata +; handling +; 20151027-lauer_axel: moved call to 'write_references' to the beginning +; of the code +; 20150325-lauer_axel: modified reference tags used for acknowledgements +; (projects, observations, etc) +; 20140312-righi_mattia: adapted to new time_operations structure +; 20121130-gottschaldt_klaus-dirk: written ; ; ############################################################################# @@ -92,13 +91,6 @@ begin dataset = read_data(info[0]) printVarSummary(dataset) - ; Read fx-variable for the second dataset in the list - ; fx-variable are specified in the recipe, the field is returned as a list - ; where also auxiliary coordinates are stored (if available). - ; If no fx-variable is found a missing value is returned - fx_var = read_fx_data(info[1], "sftlf") - printVarSummary(fx_var) - ; Retrieve metadata item for the reference dataset atts = True atts@short_name = var0 @@ -184,17 +176,15 @@ begin ; -> useful for non-NCL routines & metrics ; map = contour_map("dummy_for_wks", ncdf_outfile,"dummy_for_var") - ; collect meta-data and call ESMValMD function - nc_file = ncdf_outfile - plot_file = map@outfile - caption = "Mean of variable: " + var0 - statistics = ("mean") - domain = ("glob") - plottype = ("geo") - authors = (/"righ_ma", "gott_kl"/) - references = (/"acknow_author"/) - infiles = metadata_att_as_array(info0, "filename") - log_provenance(nc_file, plot_file, caption, statistics, domain, plottype, \ - authors, references, infiles) + ; Call provenance logger + log_provenance(ncdf_outfile, \ + map@outfile, \ + "Mean of variable: " + var0, \ + "mean", \ + "global", \ + "geo", \ + (/"righi_mattia", "gottschaldt_klaus-dirk"/), \ + (/"acknow_author"/), \ + metadata_att_as_array(info0, "filename")) end diff --git a/esmvaltool/diag_scripts/examples/diagnostic.py b/esmvaltool/diag_scripts/examples/diagnostic.py index d49939b3df..a9e4f8667e 100644 --- a/esmvaltool/diag_scripts/examples/diagnostic.py +++ b/esmvaltool/diag_scripts/examples/diagnostic.py @@ -1,32 +1,38 @@ """Python example diagnostic.""" import logging -import os +from pathlib import Path from pprint import pformat import iris -from esmvaltool.diag_scripts.shared import (group_metadata, run_diagnostic, - select_metadata, sorted_metadata) -from esmvaltool.diag_scripts.shared._base import ( - ProvenanceLogger, get_diagnostic_filename, get_plot_filename) +from esmvaltool.diag_scripts.shared import ( + group_metadata, + run_diagnostic, + save_data, + save_figure, + select_metadata, + sorted_metadata, +) from esmvaltool.diag_scripts.shared.plot import quickplot -logger = logging.getLogger(os.path.basename(__file__)) +logger = logging.getLogger(Path(__file__).stem) def get_provenance_record(attributes, ancestor_files): """Create a provenance record describing the diagnostic data and plot.""" - caption = ("Average {long_name} between {start_year} and {end_year} " - "according to {dataset}.".format(**attributes)) + # Associated recipe uses contains a caption string with placeholders + # like {long_name} that are now populated from attributes dictionary. + # Note that for simple recipes, caption can be set here as a simple string + caption = attributes['caption'].format(**attributes) record = { 'caption': caption, 'statistics': ['mean'], 'domains': ['global'], - 'plot_type': 'zonal', + 'plot_types': ['zonal'], 'authors': [ - 'ande_bo', - 'righ_ma', + 'andela_bouwe', + 'righi_mattia', ], 'references': [ 'acknow_project', @@ -42,26 +48,21 @@ def compute_diagnostic(filename): cube = iris.load_cube(filename) logger.debug("Running example computation") - return cube.collapsed('time', iris.analysis.MEAN) + cube = iris.util.squeeze(cube) + return cube def plot_diagnostic(cube, basename, provenance_record, cfg): """Create diagnostic data and plot it.""" - diagnostic_file = get_diagnostic_filename(basename, cfg) - logger.info("Saving analysis results to %s", diagnostic_file) - iris.save(cube, target=diagnostic_file) + # Save the data used for the plot + save_data(basename, provenance_record, cfg, cube) - if cfg['write_plots'] and cfg.get('quickplot'): - plot_file = get_plot_filename(basename, cfg) - logger.info("Plotting analysis results to %s", plot_file) - provenance_record['plot_file'] = plot_file - quickplot(cube, filename=plot_file, **cfg['quickplot']) - - logger.info("Recording provenance of %s:\n%s", diagnostic_file, - pformat(provenance_record)) - with ProvenanceLogger(cfg) as provenance_logger: - provenance_logger.log(diagnostic_file, provenance_record) + if cfg.get('quickplot'): + # Create the plot + quickplot(cube, **cfg['quickplot']) + # And save the plot + save_figure(basename, provenance_record, cfg) def main(cfg): @@ -70,30 +71,35 @@ def main(cfg): input_data = cfg['input_data'].values() # Demonstrate use of metadata access convenience functions. - selection = select_metadata(input_data, short_name='pr', project='CMIP5') - logger.info("Example of how to select only CMIP5 precipitation data:\n%s", + selection = select_metadata(input_data, short_name='tas', project='CMIP5') + logger.info("Example of how to select only CMIP5 temperature data:\n%s", pformat(selection)) selection = sorted_metadata(selection, sort='dataset') logger.info("Example of how to sort this selection by dataset:\n%s", pformat(selection)) - grouped_input_data = group_metadata( - input_data, 'standard_name', sort='dataset') + grouped_input_data = group_metadata(input_data, + 'variable_group', + sort='dataset') logger.info( - "Example of how to group and sort input data by standard_name:" - "\n%s", pformat(grouped_input_data)) + "Example of how to group and sort input data by variable groups from " + "the recipe:\n%s", pformat(grouped_input_data)) # Example of how to loop over variables/datasets in alphabetical order - for standard_name in grouped_input_data: - logger.info("Processing variable %s", standard_name) - for attributes in grouped_input_data[standard_name]: + groups = group_metadata(input_data, 'variable_group', sort='dataset') + for group_name in groups: + logger.info("Processing variable %s", group_name) + for attributes in groups[group_name]: logger.info("Processing dataset %s", attributes['dataset']) input_file = attributes['filename'] cube = compute_diagnostic(input_file) - output_basename = os.path.splitext( - os.path.basename(input_file))[0] + '_mean' + output_basename = Path(input_file).stem + if group_name != attributes['short_name']: + output_basename = group_name + '_' + output_basename + if "caption" not in attributes: + attributes['caption'] = input_file provenance_record = get_provenance_record( attributes, ancestor_files=[input_file]) plot_diagnostic(cube, output_basename, provenance_record, cfg) diff --git a/esmvaltool/diag_scripts/examples/diagnostic.r b/esmvaltool/diag_scripts/examples/diagnostic.r deleted file mode 100644 index ffcc8dd70e..0000000000 --- a/esmvaltool/diag_scripts/examples/diagnostic.r +++ /dev/null @@ -1,63 +0,0 @@ -# ############################################################################# -# diagnostic.r -# Authors: E. Arnone (ISAC-CNR, Italy) -# ############################################################################# -# Description -# Example of ESMValTool diagnostic written in R -# -# Required -# -# Optional -# -# Caveats -# -# Modification history -# 20180620-A_arnone_e: written for v2.0 -# -# ############################################################################ -library(tools) -library(yaml) - # get path to script and source subroutines (if needed) -diag_scripts_dir <- Sys.getenv("diag_scripts") -# source paste0(diag_scripts_dir,"/subroutine.r") -print(file.path("source ", diag_scripts_dir, "subroutine.r")) - # read settings and metadata files (assuming one variable only) -args <- commandArgs(trailingOnly = TRUE) -settings <- yaml::read_yaml(args[1]) -for (myname in names(settings)) { - temp <- get(myname, settings); assign(myname, temp) -} -metadata <- yaml::read_yaml(settings$input_files) -# get name of climofileis for first variable and list -# associated to first climofile -climofiles <- names(metadata) -climolist <- get(climofiles[1], metadata) - # get diagnostic name from metadata file -diag_base <- climolist$diagnostic -print(paste0(diag_base, ": starting routine")) - # create work and plot directories if they do not exist -print(paste0(diag_base, ": creating work and plot directories")) -dir.create(work_dir, recursive = T, showWarnings = F) -dir.create(plot_dir, recursive = T, showWarnings = F) - # extract metadata -models_name <- unname(sapply(metadata, "[[", "dataset")) -reference_model <- unname(sapply(metadata, "[[", "reference_dataset"))[1] -models_start_year <- unname(sapply(metadata, "[[", "start_year")) -models_end_year <- unname(sapply(metadata, "[[", "end_year")) -models_experiment <- unname(sapply(metadata, "[[", "exp")) -models_ensemble <- unname(sapply(metadata, "[[", "ensemble")) - ## Loop through input models -for (model_idx in c(1:(length(models_name)))) { - # Setup parameters and path - model <- models_name[model_idx] - year1 <- models_start_year[model_idx] - year2 <- models_end_year[model_idx] - infile <- climofiles[model_idx] - model_exp <- models_experiment[model_idx] - model_ens <- models_ensemble[model_idx] - print(paste0(diag_base, ": working on file ", infile)) - print(paste0(diag_base, ": calling diagnostic with following parameters")) - print(paste(model, model_exp, model_ens, year1, year2)) - ## Call actual diagnostic - print(paste0(diag_base, ": I am your R diagnostic")) -} diff --git a/esmvaltool/diag_scripts/examples/diagnostic_object_oriented.py b/esmvaltool/diag_scripts/examples/diagnostic_object_oriented.py deleted file mode 100644 index 8903e932c8..0000000000 --- a/esmvaltool/diag_scripts/examples/diagnostic_object_oriented.py +++ /dev/null @@ -1,47 +0,0 @@ -"""Python example diagnostic using general object-based functions.""" -import logging -import os - -import iris - -import esmvaltool.diag_scripts.shared as e -import esmvaltool.diag_scripts.shared.names as n - -logger = logging.getLogger(os.path.basename(__file__)) - - -def main(cfg): - """Compute the time average for each input dataset.""" - datasets = e.Datasets(cfg) - variables = e.Variables(cfg) - logger.debug("Found datasets: %s", datasets) - logger.debug("Found variables: %s", variables) - - for path in datasets: - logger.info("Processing variable %s from dataset %s", - datasets.get_info(n.STANDARD_NAME, path), - datasets.get_info(n.DATASET, path)) - - logger.debug("Loading %s", path) - cube = iris.load_cube(path) - - logger.debug("Running example computation") - cube = cube.collapsed(n.TIME, iris.analysis.MEAN) - - name = os.path.splitext(os.path.basename(path))[0] + '_mean' - if cfg[n.WRITE_NETCDF]: - filepath = os.path.join(cfg[n.WORK_DIR], name + '.nc') - logger.debug("Saving analysis results to %s", filepath) - iris.save(cube, target=filepath) - - if cfg[n.WRITE_PLOTS] and cfg.get('quickplot'): - filepath = os.path.join(cfg[n.PLOT_DIR], - name + '.' + cfg[n.OUTPUT_FILE_TYPE]) - logger.debug("Plotting analysis results to %s", filepath) - e.plot.quickplot(cube, filename=filepath, **cfg['quickplot']) - - -if __name__ == '__main__': - - with e.run_diagnostic() as config: - main(config) diff --git a/esmvaltool/diag_scripts/examples/make_plot.py b/esmvaltool/diag_scripts/examples/make_plot.py new file mode 100644 index 0000000000..796d55cd49 --- /dev/null +++ b/esmvaltool/diag_scripts/examples/make_plot.py @@ -0,0 +1,76 @@ +"""Python example diagnostic.""" +import logging +from pathlib import Path + +import iris +import matplotlib.pyplot as plt + +from esmvaltool.diag_scripts.shared import run_diagnostic, save_figure + +logger = logging.getLogger(Path(__file__).stem) + + +def main(cfg): + """Plot part of figure_9.3a from IPCC AR6.""" + colors = { + 'historical-ssp126': '#2a3652', + 'historical-ssp585': '#78333a', + } + fill_colors = { + 'historical-ssp126': '#d2d5dc', + 'historical-ssp585': '#ddced2', + } + labels = { + 'historical-ssp126': 'Historical and SSP1-2.6', + 'historical-ssp585': 'Historical and SSP5-8.5', + } + + # Group input data by experiment + groups = {} + for filename, attributes in cfg['input_data'].items(): + exp = attributes['exp'] + if exp not in groups: + groups[exp] = {} + groups[exp][attributes['dataset']] = filename + + # Loop over experiments to populate plot + for exp, group in groups.items(): + mean = iris.load_cube(group['MultiModelMean']) + iris.quickplot.plot( + mean, + color=colors.get(exp), + label=labels.get(exp, exp), + ) + + p17 = iris.load_cube(group['MultiModelPercentile17']) + p83 = iris.load_cube(group['MultiModelPercentile83']) + time_coord = mean.coord('time') + time_axis = time_coord.units.num2date(time_coord.core_points()) + plt.fill_between( + time_axis, + p17.core_data(), + p83.core_data(), + color=fill_colors.get(exp), + label='Likely (17% - 83%) ranges', + ) + + plt.title('Sea surface temperature anomaly') + plt.legend(loc='upper left') + + filename = 'IPCC_AR6_figure_9.3a_1850-2100' + provenance_record = { + 'caption': "Part of figure 9.3a from IPCC AR6.", + 'authors': [ + 'kalverla_peter', + 'andela_bouwe', + ], + 'references': ['fox-kemper21ipcc'], + 'ancestors': list(cfg['input_data'].keys()), + } + save_figure(filename, provenance_record, cfg, dpi=300) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/examples/my_little_diagnostic.py b/esmvaltool/diag_scripts/examples/my_little_diagnostic.py index 9fdbf372c5..84b75d5bf6 100644 --- a/esmvaltool/diag_scripts/examples/my_little_diagnostic.py +++ b/esmvaltool/diag_scripts/examples/my_little_diagnostic.py @@ -6,7 +6,7 @@ Module for personal diagnostics (example). Internal imports from exmvaltool work e.g.: -from esmvaltool.preprocessor import regrid +from esmvalcore.preprocessor import regrid from esmvaltool.diag_scripts.shared.supermeans import get_supermean Pipe output through logger; @@ -21,10 +21,11 @@ # to manipulate iris cubes import iris +import matplotlib.pyplot as plt # import internal esmvaltool modules here from esmvaltool.diag_scripts.shared import group_metadata, run_diagnostic -from esmvaltool.preprocessor import average_region +from esmvalcore.preprocessor import area_statistics def _plot_time_series(cfg, cube, dataset): @@ -77,7 +78,7 @@ def run_my_diagnostic(cfg): Before plotting, we grab the squared result (not all operations on cubes) and apply an area average on it. This is a useful example of how to use - standard esmvaltool-preprocessor functionality within a diagnostic, and + standard esmvalcore.preprocessor functionality within a diagnostic, and especially after a certain (custom) diagnostic has been run and the user needs to perform an operation that is already part of the preprocessor standard library of functions. @@ -118,7 +119,7 @@ def run_my_diagnostic(cfg): # compute an area average over the squared cube # to apply the area average use a preprocessor function # rather than writing your own function - area_avg_cube = average_region(squared_cube, 'latitude', 'longitude') + area_avg_cube = area_statistics(squared_cube, 'mean') # finalize your analysis by plotting a time series of the # diffed, squared and area averaged cube; call the plot function: diff --git a/esmvaltool/diag_scripts/extreme_events/cfg_climdex.R b/esmvaltool/diag_scripts/extreme_events/cfg_climdex.R new file mode 100755 index 0000000000..616e7d3aea --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/cfg_climdex.R @@ -0,0 +1,249 @@ +############################################################################### +# climdex_df.r +# +# Authors: Christian W. Mohr (CICERO, Norway) +# +############################################################################### +# Description +# This script provides a data frame of all the possible indices (core and +# user-defined), which can be used with the climdex.pcic.ncdf R-package. +# The data frame created serves as an index to find which indices have been +# processed, and what title and labels the plots should have. +# +# Modification history +# 2018 0725-mohr_christianwilhelm: created +############################################################################### + +idx_df <- data.frame( + idx_etccdi = c( + "altcdd", + "altcsdi", + "altcwd", + "altwsdi", + "cdd", + "csdi", + "cwd", + "dtr", + "dtr", + "fd", + "gsl", + "id", + "prcptot", + "r10mm", + "r1mm", + "r20mm", + "r95p", + "r99p", + "rx1day", + "rx1day", + "rx5day", + "rx5day", + "sdii", + "su", + "tn10p", + "tn10p", + "tn90p", + "tn90p", + "tnn", + "tnn", + "tnx", + "tnx", + "tr", + "tx10p", + "tx10p", + "tx90p", + "tx90p", + "txn", + "txn", + "txx", + "txx", + "wsdi" + ), + time = c( + "yr", + "yr", + "yr", + "yr", + "yr", + "yr", + "yr", + "mon", + "yr", + "yr", + "yr", + "yr", + "yr", + "yr", + "yr", + "yr", + "yr", + "yr", + "mon", + "yr", + "mon", + "yr", + "yr", + "yr", + "mon", + "yr", + "mon", + "yr", + "mon", + "yr", + "mon", + "yr", + "yr", + "mon", + "yr", + "mon", + "yr", + "mon", + "yr", + "mon", + "yr", + "yr" + ), + unit = c( + "days", + "days", + "days", + "days", + "days", + "days", + "days", + "deg C", + "deg C", + "days", + "days", + "days", + "mm", + "days", + "days", + "days", + "mm", + "mm", + "mm", + "mm", + "mm", + "mm", + "mm/day", + "days", + "Exceedance rate, %", + "Exceedance rate, %", + "Exceedance rate, %", + "Exceedance rate, %", + "deg C", + "deg C", + "deg C", + "deg C", + "days", + "Exceedance rate, %", + "Exceedance rate, %", + "Exceedance rate, %", + "Exceedance rate, %", + "deg C", + "deg C", + "deg C", + "deg C", + "days" + ), + name = c( + "Consecutive Dry Days per Year (altCDD)", + "Cold Spell Duration Index Spanning Years (altCSDI)", + "Consecutive Wet Days per Year (altCWD)", + "Warm Spell Duration Index Spanning Years (altWSDI)", + "Consecutive Dry Days (CDD)", + "Cold Spell Duration Index (CSDI)", + "Consecutive Wet Days (CWD)", + "Monthly Diurnal Temperature Range (DTR)", + "Annual Diurnal Temperature Range (DTR)", + "Frost Days (FD)", + "Growing Season Length (GSL)", + "Icing Days (ID)", + "Annual Total Wet-Day Precipitation (PRCPTOT)", + "Heavy Precipitation Days (R10)", + "Precipitation Days (R1)", + "Very Heavy Precipitation Days (R20)", + "Very Wet Days (R95p)", + "Extremely Wet Days (R99p)", + "Monthly Max 1-day Precipitation (RX1day)", + "Annual Max 1-day Precipitation (RX1day)", + "Monthly Max 5-day Precipitation (RX5day)", + "Annual Max 5-day Precipitation (RX5day)", + "Simple Daily Intensity Index (SDII)", + "Summer Days (SD)", + "Monthly Cold Nights (TN10p)", + "Annual Cold Nights (TN10p)", + "Monthly Warm Nights (TN90p)", + "Annual Warm Nights (TN90p)", + "Monthly Minimum Tmin (TNn)", + "Annual Minimum Tmin (TNn)", + "Monthly Maximum Tmin (TNx)", + "Annual Maximum Tmin (TNx)", + "Tropical Nights (TR)", + "Monthly Cool Days (TX10p)", + "Annual Cool Days (TX10p)", + "Monthly Warm Days (TX90p)", + "Annual Warm Days (TX90p)", + "Monthly Minimum Tmax (TXn)", + "Annual Minimum Tmax (TXn)", + "Monthly Maximum Tmax (TXn)", + "Annual Maximum Tmax (TXn)", + "Warm Spell Duration Index (WSDI)" + ), + stringsAsFactors = FALSE +) + +idx_df$idx_etccdi_time <- paste(idx_df$idx_etccdi, "ETCCDI_", + idx_df$time, + sep = "" +) + +# Unfortunatley expressions cannot be added to dataframes. +# These expreesion are required for the timeseries. +idx_ylab <- c( + expression( + "days", + "days", + "days", + "days", + "days", + "days", + "days", + paste(degree, "C"), + paste(degree, "C"), + "days", + "days", + "days", + "mm", + "days", + "days", + "days", + "mm", + "mm", + "mm", + "mm", + "mm", + "mm", + "mm day^-1", + "days", + "Exceedance rate, %", + "Exceedance rate, %", + "Exceedance rate, %", + "Exceedance rate, %", + paste(degree, "C"), + paste(degree, "C"), + paste(degree, "C"), + paste(degree, "C"), + "days", + "Exceedance rate, %", + "Exceedance rate, %", + "Exceedance rate, %", + "Exceedance rate, %", + paste(degree, "C"), + paste(degree, "C"), + paste(degree, "C"), + paste(degree, "C"), + "days" + ) +) diff --git a/esmvaltool/diag_scripts/extreme_events/cfg_extreme.R b/esmvaltool/diag_scripts/extreme_events/cfg_extreme.R new file mode 100644 index 0000000000..791a5fefae --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/cfg_extreme.R @@ -0,0 +1,51 @@ +# These are default values, loaded before the recipe is read +regrid_dataset <- NA +base_range <- NA +analysis_range <- NA +climdex_parallel <- 4 +mip_name <- "cmip" +ts_col_list <- c( + "dodgerblue2", + "darkgreen", + "firebrick2", + "darkorchid", + "aquamarine3" +) +ts_png_width <- 640 +ts_png_height <- 480 +ts_png_units <- "px" +ts_png_pointsize <- 12 +ts_png_bg <- "white" +ts_lty_list <- c(1, 4, 2, 3, 5) +ts_lwd_list <- c(2, 2, 2, 2, 2) +ts_data <- TRUE +normalize <- FALSE +timeseries_idx <- c( + "tn10pETCCDI_yr", + "tn90pETCCDI_yr", + "tx10pETCCDI_yr", + "tx90pETCCDI_yr" +) +gleckler_idx <- c( + "tn10pETCCDI_yr", + "tn90pETCCDI_yr", + "tx10pETCCDI_yr", + "tx90pETCCDI_yr" +) + +ts_plt <- TRUE +glc_plt <- TRUE +glc_arr <- FALSE +gl_mar_par <- c(7, 4, 3, 11) +gl_png_res <- 480 +gl_png_units <- "px" +gl_png_pointsize <- 14 +gl_png_bg <- "white" +gl_rmsespacer <- 0.01 +gl_scaling_factor <- 1.0 +gl_text_scaling_factor <- 1.0 +gl_xscale_spacer_rmse <- 1.5 +gl_xscale_spacer_rmsestd <- 4.5 +gl_symb_scaling_factor <- 1.5 +gl_symb_yshift <- 2.5 +gl_text_symb_scaling_factor <- 0.6 diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/CHANGELOG b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/CHANGELOG new file mode 100644 index 0000000000..6ec5d5cee8 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/CHANGELOG @@ -0,0 +1,7 @@ +0.5-4: Updated maintainer and license and added users and contributors guides. + +0.5-3: Fix issue with data in rotated pole projection where attempting to run the code produces the error "squared eccentricity < 0". + +0.5-2: Fix problems when only tavg supplied. + +0.5-1: Initial release version. diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/CONTRIBUTING.rst b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/CONTRIBUTING.rst new file mode 100644 index 0000000000..0aadc75919 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/CONTRIBUTING.rst @@ -0,0 +1,171 @@ +Contributing to the climdex.pcic.ncdf R package +========================================== + +Getting Started +--------------- + +- Create a `Github account`_. +- Fork the repository on Github at https://github.com/pacificclimate/climdex.pcic.ncdf. +- Work on the code (see the `next section`_) +- Send us a `pull request`_. + +.. _Github account: https://github.com/signup/free +.. _pull request: https://help.github.com/articles/using-pull-requests/ +.. _next section: #how-to-set-up-a-development-environment + +How to set up a development environment +--------------------------------------- + +You don't need much to get started for development. You'll need to have installed: + +- R (ensure that all of the "Depends", "Imports", and "Suggests" pacakges are also installed) +- Any C++ build environment supported by the `CRAN package checking`_ +- git +- your text editor of choice + +That's it! + +Once you have the required software installed, create a local clone of the repository. +:: + $ git clone https://github.com/[your_user]/climdex.pcic.ncdf.git + +Build the docs (which builds the auto-generated NAMESPACE file needed to build). See `below <#how-to-build-the-docs>`_. + +Then make sure that everything builds out of the box +:: + $ R CMD build climdex.pcic.ncdf/ + +.. _CRAN package checking: http://cran.r-project.org/web/checks/check_flavors.html + +How to run the tests +-------------------- + +Running the tests can be done with one command: +:: + james@basalt ~/code/git $ R CMD check climdex.pcic.ncdf/ + +You'll see a bunch of package building spew that has nothing to do with the tests. But towards the end, you see something like this: +:: + * checking for unstated dependencies in tests ... OK + * checking tests ... + Running ‘bootstrap.R’ + Running ‘test_basic_file_funcs.R’ + Running ‘test_var_meta.R’ + OK + +Bug reports +----------- + +If there are problems with our package or bugs in the code, please let us know! We welcome bug reports. To submit one: + +- `Create a new issue`_ on our GitHub page. +- Tag/label the issue as a bug +- Leave it unassigned + +Then please follow these guidelines for writing your report: + +- Please describe in as much detail as possible +- Include a complete description of: + + - Exactly what you did (i.e. "steps to reproduce") + - What you expected to happen? + - What did happen? + +- Include *all* output from the terminal. +- Run R's ``sessionInfo()`` function and include the full output. + +I cannot stress enough how important it is to contrast what you expected to happen, with what actually happened. When executing the code does not produce the *advertised* result, there is a bug in the package. When the code does not produce the result that you *wished* it had, this is *not* a bug. We receive far too many reports in the latter category. + +.. _Create a new issue: https://github.com/pacificclimate/climdex.pcic.ncdf/issues/new + +.. _build-the-docs: + +How to build the docs +--------------------- + +The package documentation is inline in the code. All of the manual pages are built by using ``roxygen2``. Make sure that you have ``roxygen2`` installed and loaded: +:: + james@basalt ~/code/git/climdex.pcic.ncdf $ R + + R version 3.0.3 (2014-03-06) -- "Warm Puppy" + Copyright (C) 2014 The R Foundation for Statistical Computing + Platform: x86_64-pc-linux-gnu (64-bit) + + R is free software and comes with ABSOLUTELY NO WARRANTY. + You are welcome to redistribute it under certain conditions. + Type 'license()' or 'licence()' for distribution details. + + Natural language support but running in an English locale + + R is a collaborative project with many contributors. + Type 'contributors()' for more information and + 'citation()' on how to cite R or R packages in publications. + + Type 'demo()' for some demos, 'help()' for on-line help, or + 'help.start()' for an HTML browser interface to help. + Type 'q()' to quit R. + + > library(roxygen2) + +Then call ``roxygenize()`` to build the docs. +:: + > roxygenize() + First time using roxygen2 4.0. Upgrading automatically... + Loading required package: PCICt + Loading required package: ncdf4 + Loading required package: climdex.pcic + Loading required package: ncdf4.helpers + Loading required package: snow + Loading required package: udunits2 + Loading required package: functional + Loading required package: proj4 + Writing NAMESPACE + Writing climdex.pcic.ncdf.Rd + Writing create.climdex.cmip5.filenames.Rd + Writing get.climdex.variable.list.Rd + Writing get.climdex.functions.Rd + Writing get.climdex.variable.metadata.Rd + Writing create.ncdf.output.files.Rd + Writing compute.climdex.indices.Rd + Writing flatten.dims.Rd + Writing get.data.Rd + Writing get.northern.hemisphere.booleans.Rd + Writing get.quantiles.object.Rd + Writing compute.indices.for.stripe.Rd + Writing get.thresholds.chunk.Rd + Writing write.climdex.results.Rd + Writing get.quantiles.for.stripe.Rd + Writing create.thresholds.file.Rd + Writing get.var.file.idx.Rd + Writing create.file.metadata.Rd + Writing get.thresholds.metadata.Rd + Writing create.thresholds.from.file.Rd + Writing thresholds.open.Rd + Writing thresholds.close.Rd + Writing create.indices.from.files.Rd + + +Submitting pull requests +------------------------ + +We would love help from the greater climate community in developing the package and we welcome contributions to climdex.pcic.ncdf package. + +- Please write tests for any functionality that you may add. +- Please modify tests for any functionality that you change. +- In short, please make sure that all of the tests pass. + +After you are *positive* that everything is completely tested with passing test suite, we would love to see your pull request. If you are not familiar with the process, please follow the GitHub's help page for submitting `pull request`_. + +Don't code? No problem! +----------------------- + +Even if you don't program for a living there are plenty of ways to help. Not only is the code open and collaborative, but so is the documentation and issue tracking. Anyone can help with these. If you can't program, consider helping with the following: + +- If the documentation doesn't answer your questions, it probably doesn't answer many people's questions. Help us all out and write something that does. +- Take a look through the outstanding `"help wanted" issues`_, and see if you know any of the answers. +- If there are `open bug reports`_, see if you can reproduce the problem and verify that it exists. Having bug reports validated and/or clarified by multiple parties is extremely valuable. +- Tell us your story. If ``climdex.pcic.ncdf`` has helped your project to better understand climate extremes, we would love to hear about it. Write a blog post and/or send an e-mail to the `package maintainer`_. + +.. _"help wanted" issues: https://github.com/pacificclimate/climdex.pcic.ncdf/labels/help%20wanted +.. _open bug reports: https://github.com/pacificclimate/climdex.pcic.ncdf/labels/bug +.. _package maintainer: mailto:hiebert@uvic.ca diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/COPYING b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/COPYING new file mode 100644 index 0000000000..94a9ed024d --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/COPYING @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/DESCRIPTION b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/DESCRIPTION new file mode 100644 index 0000000000..85ad0b5fc6 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/DESCRIPTION @@ -0,0 +1,26 @@ +Package: climdex.pcic.ncdf +Version: 0.5-4 +Date: 2014-11-03 +Title: Functions to compute CLIMDEX indices over a NetCDF grid +Author: David Bronaugh for the Pacific Climate Impacts + Consortium (PCIC) +Maintainer: James Hiebert +Depends: + R (>= 3.0), + PCICt (>= 0.5-4) +Imports: + ncdf4 (>= 1.10), + climdex.pcic (>= 1.1-1), + ncdf4.helpers (>= 0.3-3), + snow (>= 0.3-13), + udunits2 (>= 0.6), + functional (>= 0.4), + proj4 (>= 1.0-8) +Suggests: + RUnit +Description: This package contains functions which can be used to compute + CLIMDEX indices using NetCDF input files, writing to NetCDF output files. + Code allows for parallel computation of indices using either a SOCK or MPI + cluster. +License: GPL-3 +URL: http://www.r-project.org diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/R/ncdf.R b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/R/ncdf.R new file mode 100644 index 0000000000..549b53c366 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/R/ncdf.R @@ -0,0 +1,1469 @@ +# nolint start +#' climdex.pcic.ncdf, a package to calculate Climdex indices from NetCDF files. +#' +#' This package implements code to facilitate computation of Climdex indices +#' from NetCDF input files. +#' +#' The Climdex climate extremes indices have historically been calculated using +#' Fortran code. This has a number of problems:\itemize{ +#' \item{Difficult to test} +#' \item{Difficult to modify (for instance, to add NetCDF file I/O)} +#' \item{Difficult to parallelize} +#' } +#' The \code{climdex.pcic} package provides an easy interface to efficient +#' computation of Climdex indices. This package is complementary to it, providing +#' easy access to functions to compute indices in parallel, using NetCDF files as +#' input and output. It implements chunked processing of input files to keep memory +#' usage reasonable; it implements parallel computation using the \code{snow} +#' library; and it includes a test suite to verify correctness of the implementation. +#' Furthermore, the package has a modular design, allowing for easy extension to +#' allow for adaptation to changing or custom requirements. +#' +#' Users of this package should pay particular attention to the +#' \code{\link{create.indices.from.files}} function, which computes Climdex indices +#' given NetCDF input files; and \code{\link{create.thresholds.from.file}}, which +#' computes thresholds for use with threshold-based indices given NetCDF input files. +#' Many of the other functions exposed by the package are intended to provide for +#' extensibility, but are unlikely to be routinely used by users of this package. +#' +#' @name climdex.pcic.ncdf +#' @aliases climdex.pcic.ncdf-package +#' @docType package +#' @seealso \code{\link{create.indices.from.files}}, \code{\link{create.thresholds.from.file}} +#' @references \url{http://etccdi.pacificclimate.org/list_27_indices.shtml} +#' +#' Karl, T.R., N. Nicholls, and A. Ghazi, 1999: CLIVAR/GCOS/WMO workshop on +#' indices and indicators for climate extremes: Workshop summary. Climatic +#' Change, 42, 3-7. +#' +#' Peterson, T.C., and Coauthors: Report on the Activities of the Working Group +#' on Climate Change Detection and Related Rapporteurs 1998-2001. WMO, Rep. +#' WCDMP-47, WMO-TD 1071, Geneve, Switzerland, 143pp. +#' +#' Zhang, X., 2005: Avoiding inhomogeneity in percentile-based indices of +#' temperature extremes. Journal of Climate 18.11 (2005):1641-. +#' @keywords climate ts +#' @importClassesFrom climdex.pcic climdexInput +#' @import snow PCICt +NULL + +## Parallel lapply across 'x', running remote.func, and filtering with local.filter.func . +## Processing is incremental, not batch, to improve parallel throughput and reduce memory consumption. +parLapplyLBFiltered <- function(cl, x, remote.func, ..., local.filter.func=NULL) { + snow::checkCluster(cl) + cluster.size <- length(cl) + num.tasks <- length(x) + if(num.tasks == 0) + return(list()) + if(cluster.size == 0) + stop("Impossible happened; cluster size = 0") + + data.to.return <- vector("list", num.tasks) + + submit.job <- function(cluster.id, task.id) { + snow::sendCall(cl[[cluster.id]], remote.func, args=c(x[task.id], list(...)), tag=task.id) + } + + ## Fire off jobs, filling in the cur.task table as we go. + for(i in 1:min(cluster.size, num.tasks)) + submit.job(i, i) + + next.task <- min(cluster.size, num.tasks) + + ## Stalk and feed jobs + for(i in 1:num.tasks) { + d <- snow::recvOneResult(cl) + next.task <- next.task + 1 + + ## Feed the finished node another task if we have one. + if(next.task <= num.tasks) + submit.job(d$node, next.task) + + if(!is.null(local.filter.func)) + data.to.return[d$tag] <- list(local.filter.func(d$value, x[[d$tag]])) + else + data.to.return[d$tag] <- list(d$value) + + rm(d) + } + + ## Return data when complete + return(data.to.return) +} + +put.history.att <- function(f, v, definemode=FALSE) { + history.string <- paste("Created by climdex.pcic", packageVersion("climdex.pcic"), "on", date()) + ncdf4::ncatt_put(f, v, "history", history.string, definemode=definemode) + invisible(0) +} + +put.ETCCDI.atts <- function(f, freq, orig.title, author.data, definemode=FALSE) { + if("institution" %in% names(author.data)) + ncdf4::ncatt_put(f, 0, "ETCCDI_institution", author.data$institution, definemode=definemode) + if("institution_id" %in% names(author.data)) + ncdf4::ncatt_put(f, 0, "ETCCDI_institution_id", author.data$institution_id, definemode=definemode) + if("indices_archive" %in% names(author.data)) + ncdf4::ncatt_put(f, 0, "ETCCDI_indices_archive", author.data$indices_archive, definemode=definemode) + + ncdf4::ncatt_put(f, 0, "ETCCDI_software", "climdex.pcic", definemode=definemode) + ncdf4::ncatt_put(f, 0, "ETCCDI_software_version", as.character(packageVersion("climdex.pcic")), definemode=definemode) + + if("contact" %in% names(author.data)) + ncdf4::ncatt_put(f, 0, "contact", author.data$contact, definemode=definemode) + if("references" %in% names(author.data)) + ncdf4::ncatt_put(f, 0, "references", author.data$references, definemode=definemode) + + ncdf4::ncatt_put(f, 0, "frequency", freq, definemode=definemode) + ncdf4::ncatt_put(f, 0, "creation_date", format(Sys.time(), "%Y-%m-%dT%H:%M:%SZ", tz="GMT"), definemode=definemode) + ncdf4::ncatt_put(f, 0, "title", paste("ETCCDI indices computed on", orig.title), definemode=definemode) + invisible(0) +} + +all.the.same <- function(dat) { + ifelse(length(dat) == 1, TRUE, all(unlist(lapply(dat, identical, dat[[1]])))) +} + +#' Creates a list of CMIP5-compliant filenames reflecting the input data. +#' +#' Creates a list of CMIP5-compliant filenames reflecting the input data. +#' +#' This function takes a split filename (as created by \code{get.split.filename.cmip5}) and a list of variables and creates corresponding filenames for the given variables. +#' +#' @param fn.split A vector containing named components, as created by \code{get.split.filename.cmip5}. +#' @param vars.list A vector containing names of variables, as created by \code{\link{get.climdex.variable.list}}. +#' @return A vector containing filenames corresponding to the variables and filename bits supplied. +#' +#' @examples +#' \dontrun{ +#' library(ncdf4.helpers) +#' ## Split out filename bits for use below... +#' fn <- "pr_day_BCCAQ+ANUSPLIN300+MRI-CGCM3_historical+rcp85_r1i1p1_19500101-21001231.nc" +#' fn.split <- get.split.filename.cmip5(fn) +#' +#' ## Create filenames with time data and variable appropriately replaced. +#' filenames <- create.climdex.cmip5.filenames(fn.split, c("rx5dayETCCDI_mon", "tn90pETCCDI_yr")) +#' } +#' +#' @export +create.climdex.cmip5.filenames <- function(fn.split, vars.list) { + time.res <- c("yr", "mon")[grepl("_mon$", vars.list) + 1] + time.range <- substr(fn.split[c('tstart', 'tend')], 1, 4) + + paste(paste(vars.list, fn.split['model'], fn.split['emissions'], fn.split['run'], sapply(time.res, function(x) { paste(time.range, switch(x, yr=c("", ""), mon=c("01", "12")), sep="", collapse="-") }), sep="_"), ".nc", sep="") +} + +#' Returns a list of Climdex variables given constraints +#' +#' Returns a list of Climdex variables given constraints. +#' +#' This function takes a character vector which specifies what source data is present and a time resolution, and generates a list of names consisting of the variable and the time resolution, separated by an underscore. +#' +#' @param source.data.present A vector of strings naming the data that's present; at least one of (tmin, tmax, prec, tavg). +#' @param time.resolution The time resolutions to compute indices at. See \code{\link{create.indices.from.files}}. +#' @param climdex.vars.subset A character vector of lower-case names of Climdex indices to calculate (eg: tr, fd, rx5day). See \code{\link{create.indices.from.files}}. +#' @return A character vector containing variable names with time resolutions appended. +#' +#' @seealso \code{\link{create.indices.from.files}} +#' @examples +#' ## Get all variables which require tmin and/or tmax, for all time resolutions. +#' var.list1 <- get.climdex.variable.list(c("tmax", "tmin")) +#' +#' ## Get all variables which require prec with an annual time resolution. +#' var.list2 <- get.climdex.variable.list("prec", time.resolution="annual") +#' +#' ## Get the intersection of a set list of vars and available data. +#' sub.vars <- c("su", "id", "tr", "fd", "gsl", "csdi", "wsdi", "r10mm") +#' var.list3 <- get.climdex.variable.list("tmax", climdex.vars.subset=sub.vars) +#' +#' @export +get.climdex.variable.list <- function(source.data.present, time.resolution=c("all", "annual", "monthly"), climdex.vars.subset=NULL) { + time.res <- match.arg(time.resolution) + annual.only <- c("fdETCCDI", "suETCCDI", "idETCCDI", "trETCCDI", "gslETCCDI", "wsdiETCCDI", "csdiETCCDI", "sdiiETCCDI", "r10mmETCCDI", "r20mmETCCDI", "r1mmETCCDI", "cddETCCDI", "cwdETCCDI", "r95pETCCDI", "r99pETCCDI", "prcptotETCCDI", "altcddETCCDI", "altcwdETCCDI", "altcsdiETCCDI", "altwsdiETCCDI") + vars.by.src.data.reqd <- list(tmax=c("suETCCDI", "idETCCDI", "txxETCCDI", "txnETCCDI", "tx10pETCCDI", "tx90pETCCDI", "wsdiETCCDI", "altwsdiETCCDI"), + tmin=c("fdETCCDI", "trETCCDI", "tnxETCCDI", "tnnETCCDI", "tn10pETCCDI", "tn90pETCCDI", "csdiETCCDI", "altcsdiETCCDI"), + prec=c("rx1dayETCCDI", "rx5dayETCCDI", "sdiiETCCDI", "r10mmETCCDI", "r20mmETCCDI", "r1mmETCCDI", "cddETCCDI", "cwdETCCDI", "r95pETCCDI", "r99pETCCDI", "prcptotETCCDI", "altcddETCCDI", "altcwdETCCDI"), + tavg=c("gslETCCDI", "dtrETCCDI") ) + + if(any(!(source.data.present %in% c("tmin", "tmax", "tavg", "prec")))) + stop("Invalid variable listed in source.data.present.") + + if(all(c("tmax", "tmin") %in% source.data.present) && !("tavg" %in% source.data.present)) + source.data.present <- c(source.data.present, "tavg") + + climdex.vars <- unlist(vars.by.src.data.reqd[source.data.present]) + if(!is.null(climdex.vars.subset)) + climdex.vars <- climdex.vars[climdex.vars %in% paste(climdex.vars.subset, "ETCCDI", sep="")] + + freq.lists <- list(c("mon", "yr"), c("yr")) + dat <- switch(time.res, + all=unlist(lapply(climdex.vars, function(x) { paste(x, freq.lists[[(x %in% annual.only) + 1]], sep="_") })), + annual=paste(climdex.vars, "yr", sep="_"), + monthly=paste(climdex.vars[!(climdex.vars %in% annual.only)], "mon", sep="_")) + + names(dat) <- NULL + + return(dat) +} + +#' Returns a list of Climdex functions, with parameters curried in. +#' +#' Returns a list of Climdex functions, with parameters curried in. +#' +#' This function takes a variable list (as created by \code{\link{get.climdex.variable.list}}) and creates a list of functions corresponding to the specified indices, with parameters such as time resolution curried in. This allows for these functions to be called with just the \code{climdexInput} object as an argument, easing the automation of computing indices. +#' +#' @param vars.list The variable list, as created by \code{\link{get.climdex.variable.list}}. +#' @param fclimdex.compatible Whether to create fclimdex compatible functions. +#' @return A list of functions, named by the variable they compute. +#' +#' @examples +#' ## Get Climdex functions for a variable list with all appropriate params +#' ## curried in, so that all they take is a ClimdexInput object. +#' cdx.funcs <- get.climdex.functions(get.climdex.variable.list(c("tmax", "tmin"))) +#' +#' @export +get.climdex.functions <- function(vars.list, fclimdex.compatible=TRUE) { + func.names <- c("climdex.fd", "climdex.su", "climdex.id", "climdex.tr", "climdex.gsl", + "climdex.txx", "climdex.tnx", "climdex.txn", "climdex.tnn", "climdex.tn10p", "climdex.tx10p", "climdex.tn90p", "climdex.tx90p", + "climdex.txx", "climdex.tnx", "climdex.txn", "climdex.tnn", "climdex.tn10p", "climdex.tx10p", "climdex.tn90p", "climdex.tx90p", + "climdex.wsdi", "climdex.csdi", + "climdex.dtr", "climdex.rx1day", "climdex.rx5day", + "climdex.dtr", "climdex.rx1day", "climdex.rx5day", + "climdex.sdii", "climdex.r10mm", "climdex.r20mm", "climdex.rnnmm", "climdex.cdd", "climdex.cwd", "climdex.r95ptot", "climdex.r99ptot", "climdex.prcptot", + "climdex.cdd", "climdex.cwd", "climdex.csdi", "climdex.wsdi") + + el <- list() + af <- list(freq="annual") + mf <- list(freq="monthly") + cwdd.opts <- list(spells.can.span.years=TRUE) + altcwdd.opts <- list(spells.can.span.years=FALSE) + wcsdi.opts <- list(spells.can.span.years=FALSE) + altwcsdi.opts <- list(spells.can.span.years=TRUE) + rx5day.opts <- list(center.mean.on.last.day=fclimdex.compatible) + r1mm.opts <- list(threshold=1) + options <- list(el, el, el, el, el, + mf, mf, mf, mf, mf, mf, mf, mf, + af, af, af, af, af, af, af, af, + wcsdi.opts, wcsdi.opts, + mf, mf, c(mf, rx5day.opts), + af, af, c(af, rx5day.opts), + el, el, el, r1mm.opts, cwdd.opts, cwdd.opts, el, el, el, + altcwdd.opts, altcwdd.opts, altwcsdi.opts, altwcsdi.opts) + + func <- lapply(1:length(func.names), function(n) do.call(functional::Curry, c(list(getFromNamespace(func.names[n], 'climdex.pcic')), options[[n]]))) + names(func) <- c("fdETCCDI_yr", "suETCCDI_yr", "idETCCDI_yr", "trETCCDI_yr", "gslETCCDI_yr", + "txxETCCDI_mon", "tnxETCCDI_mon", "txnETCCDI_mon", "tnnETCCDI_mon", "tn10pETCCDI_mon", "tx10pETCCDI_mon", "tn90pETCCDI_mon", "tx90pETCCDI_mon", + "txxETCCDI_yr", "tnxETCCDI_yr", "txnETCCDI_yr", "tnnETCCDI_yr", "tn10pETCCDI_yr", "tx10pETCCDI_yr", "tn90pETCCDI_yr", "tx90pETCCDI_yr", + "wsdiETCCDI_yr", "csdiETCCDI_yr", + "dtrETCCDI_mon", "rx1dayETCCDI_mon", "rx5dayETCCDI_mon", + "dtrETCCDI_yr", "rx1dayETCCDI_yr", "rx5dayETCCDI_yr", + "sdiiETCCDI_yr", "r10mmETCCDI_yr", "r20mmETCCDI_yr", "r1mmETCCDI_yr", "cddETCCDI_yr", "cwdETCCDI_yr", "r95pETCCDI_yr", "r99pETCCDI_yr", "prcptotETCCDI_yr", + "altcddETCCDI_yr", "altcwdETCCDI_yr", "altcsdiETCCDI_yr", "altwsdiETCCDI_yr") + + return(func[vars.list]) +} + +#' Returns metadata for specified Climdex variables +#' +#' Returns metadata for specified Climdex variables. +#' +#' This function returns metadata suitable for use in NetCDF files for the specified variables. +#' +#' @param vars.list The list of variables, as returned by \code{\link{get.climdex.variable.list}}. +#' @param template.filename The filename template to be used when generating filenames. +#' @return A data frame containing the following: +#' \itemize{ +#' \item{long.name}{Long names for the variable} +#' \item{var.name}{Variable name for use in the file} +#' \item{units}{Units for the variable} +#' \item{annual}{Whether the variable is annual} +#' \item{base.period.attr}{Whether to include a base period attribute} +#' \item{standard.name}{Standard name to use for the variable} +#' \item{filename}{Filename to be written out} +#' } +#' +#' @examples +#' ## Get metadata (including filenames) for specified variables. +#' fn <- "pr_day_BCCAQ+ANUSPLIN300+MRI-CGCM3_historical+rcp85_r1i1p1_19500101-21001231.nc" +#' var.list2 <- get.climdex.variable.list("prec", time.resolution="annual") +#' md <- get.climdex.variable.metadata(var.list2, fn) +#' +#' @export +get.climdex.variable.metadata <- function(vars.list, template.filename) { + all.data <- data.frame(long.name=c("Number of Frost Days", "Number of Summer Days", "Number of Icing Days", "Number of Tropical Nights", "Growing Season Length", + "Monthly Maximum of Daily Maximum Temperature", "Monthly Maximum of Daily Minimum Temperature", + "Monthly Minimum of Daily Maximum Temperature", "Monthly Minimum of Daily Minimum Temperature", + "Percentage of Days when Daily Minimum Temperature is Below the 10th Percentile", "Percentage of Days when Daily Maximum Temperature is Below the 10th Percentile", + "Percentage of Days when Daily Minimum Temperature is Above the 90th Percentile", "Percentage of Days when Daily Maximum Temperature is Above the 90th Percentile", + "Annual Maximum of Daily Maximum Temperature", "Annual Maximum of Daily Minimum Temperature", + "Annual Minimum of Daily Maximum Temperature", "Annual Minimum of Daily Minimum Temperature", + "Percentage of Days when Daily Minimum Temperature is Below the 10th Percentile", "Percentage of Days when Daily Maximum Temperature is Below the 10th Percentile", + "Percentage of Days when Daily Minimum Temperature is Above the 90th Percentile", "Percentage of Days when Daily Maximum Temperature is Above the 90th Percentile", + "Warm Spell Duration Index", "Cold Spell Duration Index", + "Mean Diurnal Temperature Range", "Monthly Maximum 1-day Precipitation", "Monthly Maximum Consecutive 5-day Precipitation", + "Mean Diurnal Temperature Range", "Annual Maximum 1-day Precipitation", "Annual Maximum Consecutive 5-day Precipitation", + "Simple Precipitation Intensity Index", "Annual Count of Days with At Least 10mm of Precipitation", + "Annual Count of Days with At Least 20mm of Precipitation", "Annual Count of Days with At Least 1mm of Precipitation", + "Maximum Number of Consecutive Days with Less Than 1mm of Precipitation", "Maximum Number of Consecutive Days with At Least 1mm of Precipitation", + "Annual Total Precipitation when Daily Precipitation Exceeds the 95th Percentile of Wet Day Precipitation", + "Annual Total Precipitation when Daily Precipitation Exceeds the 99th Percentile of Wet Day Precipitation", "Annual Total Precipitation in Wet Days", + "Maximum Number of Consecutive Days Per Year with Less Than 1mm of Precipitation", "Maximum Number of Consecutive Days Per Year with At Least 1mm of Precipitation", + "Cold Spell Duration Index Spanning Years", "Warm Spell Duration Index Spanning Years"), + var.name=c("fdETCCDI", "suETCCDI", "idETCCDI", "trETCCDI", "gslETCCDI", + "txxETCCDI", "tnxETCCDI", "txnETCCDI", "tnnETCCDI", "tn10pETCCDI", "tx10pETCCDI", "tn90pETCCDI", "tx90pETCCDI", + "txxETCCDI", "tnxETCCDI", "txnETCCDI", "tnnETCCDI", "tn10pETCCDI", "tx10pETCCDI", "tn90pETCCDI", "tx90pETCCDI", + "wsdiETCCDI", "csdiETCCDI", + "dtrETCCDI", "rx1dayETCCDI", "rx5dayETCCDI", + "dtrETCCDI", "rx1dayETCCDI", "rx5dayETCCDI", + "sdiiETCCDI", "r10mmETCCDI", "r20mmETCCDI", "r1mmETCCDI", "cddETCCDI", "cwdETCCDI", "r95pETCCDI", "r99pETCCDI", "prcptotETCCDI", + "altcddETCCDI", "altcwdETCCDI", "altcsdiETCCDI", "altwsdiETCCDI"), + units=c("days", "days", "days", "days", "days", + "degrees_C", "degrees_C", "degrees_C", "degrees_C", "%", "%", "%", "%", + "degrees_C", "degrees_C", "degrees_C", "degrees_C", "%", "%", "%", "%", + "days", "days", + "degrees_C", "mm", "mm", + "degrees_C", "mm", "mm", + "mm d-1", "days", "days", "days", "days", "days", "mm", "mm", "mm", + "days", "days", "days", "days"), + annual=c(T, T, T, T, T, + F, F, F, F, F, F, F, F, + T, T, T, T, T, T, T, T, + T, T, + F, F, F, + T, T, T, + T, T, T, T, T, T, T, T, T, + T, T, T, T), + base.period.attr=c(F, F, F, F, F, + F, F, F, F, T, T, T, T, + F, F, F, F, T, T, T, T, + T, T, + F, F, F, + F, F, F, + F, F, F, F, F, F, T, T, F, + F, F, T, T), + row.names=c("fdETCCDI_yr", "suETCCDI_yr", "idETCCDI_yr", "trETCCDI_yr", "gslETCCDI_yr", + "txxETCCDI_mon", "tnxETCCDI_mon", "txnETCCDI_mon", "tnnETCCDI_mon", "tn10pETCCDI_mon", "tx10pETCCDI_mon", "tn90pETCCDI_mon", "tx90pETCCDI_mon", + "txxETCCDI_yr", "tnxETCCDI_yr", "txnETCCDI_yr", "tnnETCCDI_yr", "tn10pETCCDI_yr", "tx10pETCCDI_yr", "tn90pETCCDI_yr", "tx90pETCCDI_yr", + "wsdiETCCDI_yr", "csdiETCCDI_yr", + "dtrETCCDI_mon", "rx1dayETCCDI_mon", "rx5dayETCCDI_mon", + "dtrETCCDI_yr", "rx1dayETCCDI_yr", "rx5dayETCCDI_yr", + "sdiiETCCDI_yr", "r10mmETCCDI_yr", "r20mmETCCDI_yr", "r1mmETCCDI_yr", "cddETCCDI_yr", "cwdETCCDI_yr", "r95pETCCDI_yr", "r99pETCCDI_yr", "prcptotETCCDI_yr", + "altcddETCCDI_yr", "altcwdETCCDI_yr", "altcsdiETCCDI_yr", "altwsdiETCCDI_yr"), + stringsAsFactors=FALSE) + + standard.name.lookup <- c(fdETCCDI="number_frost_days", suETCCDI="number_summer_days", idETCCDI="number_icing_days", trETCCDI="number_tropical_nights", gslETCCDI="growing_season_length", + txxETCCDI="maximum_daily_maximum_temperature", tnxETCCDI="maximum_daily_minimum_temperature", txnETCCDI="minimum_daily_maximum_temperature", tnnETCCDI="minimum_daily_minimum_temperature", + tn10pETCCDI="percent_days_when_daily_minimum_temperature_below_10p", tx10pETCCDI="percent_days_when_daily_maximum_temperature_below_10p", + tn90pETCCDI="percent_days_when_daily_minimum_temperature_above_90p", tx90pETCCDI="percent_days_when_daily_maximum_temperature_above_90p", + wsdiETCCDI="warm_spell_duration_index", csdiETCCDI="cold_spell_duration_index", dtrETCCDI="diurnal_temperature_range", + altwsdiETCCDI="warm_spell_duration_index", altcsdiETCCDI="cold_spell_duration_index", + rx1dayETCCDI="maximum_1day_precipitation", rx5dayETCCDI="maximum_5day_precipitation", sdiiETCCDI="simple_precipitation_intensity_index", + r10mmETCCDI="count_days_more_than_10mm_precipitation", r20mmETCCDI="count_days_more_than_20mm_precipitation", r1mmETCCDI="count_days_more_than_1mm_precipitation", + cddETCCDI="maximum_number_consecutive_dry_days", cwdETCCDI="maximum_number_consecutive_wet_days", + altcddETCCDI="maximum_number_consecutive_dry_days", altcwdETCCDI="maximum_number_consecutive_wet_days", + r95pETCCDI="total_precipitation_exceeding_95th_percentile", r99pETCCDI="total_precipitation_exceeding_99th_percentile", prcptotETCCDI="total_wet_day_precipitation") + + all.data$standard.name <- standard.name.lookup[all.data$var.name] + + all.data$filename <- create.climdex.cmip5.filenames(ncdf4.helpers::get.split.filename.cmip5(template.filename), rownames(all.data)) + return(all.data[vars.list,]) +} + +get.output.time.data <- function(ts, time.origin.PCICt, time.units, time.dim.name, time.bnds.name, bnds.dim, res=c("year", "month"), origin="1970-01-01") { + res <- match.arg(res) + time.bounds <- ncdf4.helpers::nc.make.time.bounds(ts, res) + time.series <- PCICt::as.PCICt.numeric((unclass(time.bounds[1,]) + unclass(time.bounds[2,])) / 2, cal=attr(time.bounds, "cal"), origin=origin) + time.bounds.days <- as.numeric(julian(time.bounds, origin=time.origin.PCICt)) + time.days <- as.numeric(julian(time.series, origin=time.origin.PCICt)) + time.dim <- ncdf4::ncdim_def(time.dim.name, units=time.units, vals=time.days, unlim=TRUE, longname='') + time.bnds.var <- ncdf4::ncvar_def(time.bnds.name, '', list(bnds.dim, time.dim), longname='', prec="double") + return(list(time.dim=time.dim, time.bnds.var=time.bnds.var, time.bnds.data=time.bounds.days)) +} + +#' Creates output files for Climdex variables. +#' +#' Creates output files for Climdex variables. +#' +#' This function creates a set of output files for the set of variable parameters passed in \code{cdx.dat}, as created by \code{\link{get.climdex.variable.metadata}}. It copies metadata from input files as appropriate and adds new metadata as required. +#' +#' @param cdx.dat The variable description data, as created by \code{\link{get.climdex.variable.metadata}}. +#' @param f The file(s) being used as input. +#' @param v.f.idx A mapping from variables to files, as created by \code{\link{get.var.file.idx}}. +#' @param variable.name.map A mapping from standardized names (tmax, tmin, prec) to NetCDF variable names. +#' @param ts The associated time data, as created by \code{nc.get.time.series}. +#' @param time.origin The time origin, as specified in the source NetCDF file(s). +#' @param base.range The base range; a vector of two numeric years. +#' @param out.dir The output directory name. +#' @param author.data A vector containing named elements describing the author; see \code{\link{create.indices.from.files}}. +#' @return A list of objects of type \code{ncdf4}. +#' +#' @examples +#' \donttest{ +#' ## Establish basic inputs. +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' +#' ## Prepare derived inputs. +#' f <- lapply(input.files, ncdf4::nc_open) +#' variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") +#' f.meta <- create.file.metadata(f, variable.name.map) +#' climdex.var.list <- get.climdex.variable.list(names(f.meta$v.f.idx), "all", NULL) +#' cdx.meta <- get.climdex.variable.metadata(climdex.var.list, input.files[1]) +#' +#' ## Create output files +#' cdx.ncfile <- create.ncdf.output.files(cdx.meta, f, f.meta$v.f.idx, variable.name.map, +#' f.meta$ts, get.time.origin(f, f.meta$dim.axes), +#' c(1981,1990), "/foo", author.data) +#' } +#' +#' @export +create.ncdf.output.files <- function(cdx.dat, f, v.f.idx, variable.name.map, ts, time.origin, base.range, out.dir, author.data) { + f.example <- f[[v.f.idx[1]]] + v.example <- variable.name.map[names(v.f.idx)[1]] + time.dim.name <- ncdf4.helpers::nc.get.dim.for.axis(f.example, v.example, "T")$name + old.time.bnds.att <- ncdf4::ncatt_get(f.example, time.dim.name, "bounds") + time.bnds.name <- if(old.time.bnds.att$hasatt) old.time.bnds.att$value else paste(time.dim.name, "bnds", sep="_") + + ## Create new time dimensions + time.origin.PCICt <- PCICt::as.PCICt.default(time.origin, cal=attr(ts, "cal")) + time.units <- paste("days since", time.origin) + + input.bounds <- ncdf4.helpers::nc.get.dim.bounds.var.list(f.example, v.example) + ## FIXME: I'm not sure how solid the assumption about the location of bnds here is. + bnds <- if(length(input.bounds) > 0) f.example$var[[input.bounds[1]]]$dim[[1]] else ncdf4::ncdim_def("bnds", "", 1:2, create_dimvar=FALSE) + time.dat <- list(annual=get.output.time.data(ts, time.origin.PCICt, time.units, time.dim.name, time.bnds.name, bnds, res="year"), + monthly=get.output.time.data(ts, time.origin.PCICt, time.units, time.dim.name, time.bnds.name, bnds, res="month")) + + grid.mapping.att <- ncdf4::ncatt_get(f.example, v.example, "grid_mapping") + vars.to.copy <- c(input.bounds[input.bounds != time.bnds.name], names(ncdf4.helpers::nc.get.coordinate.axes(f.example, v.example)), if(grid.mapping.att$hasatt) grid.mapping.att$value) + vars.to.clone.atts.for <- c(vars.to.copy, ncdf4.helpers::nc.get.dim.names(f.example, v.example)) + vars.ncvars <- sapply(vars.to.copy, function(x) { f.example$var[[x]] }, simplify=FALSE) + vars.data <- lapply(vars.ncvars, function(ncvar) { if(length(ncvar$dim) == 0) NULL else ncdf4::ncvar_get(f.example, ncvar) }) + + return(lapply(1:length(cdx.dat$var.name), function(x) { + annual <- cdx.dat$annual[x] + time.for.file <- time.dat[[c("monthly", "annual")[1 + annual]]] + + ## Establish variables, create file + nc.var.list <- c(vars.ncvars, list(time.for.file$time.bnds.var, ncdf4::ncvar_def(name=cdx.dat$var.name[x], units=cdx.dat$units[x], dim=c(f.example$var[[v.example]]$dim[1:2], list(time.for.file$time.dim)), missval=1e20, longname=cdx.dat$long.name[x]))) + new.file <- ncdf4::nc_create(paste(out.dir, cdx.dat$filename[x], sep="/"), nc.var.list, force_v4=TRUE) + + ## Copy attributes for all variables plus global attributes + att.rename <- c("frequency"="input_frequency", "creation_date"="input_creation_date", "title"="input_title", "tracking_id"="input_tracking_id") + inst.id <- ncdf4::ncatt_get(f.example, 0, "institution_id") + if(inst.id$hasatt) { + att.rename.inst <- c("contact"="contact", "references"="references") + names(att.rename.inst) <- paste(inst.id$value, names(att.rename.inst), sep="_") + att.rename <- c(att.rename, att.rename.inst) + } + + ## Copy attributes with renaming and exclusions. + ncdf4.helpers::nc.copy.atts(f.example, 0, new.file, 0, definemode=TRUE, rename.mapping=att.rename) + ncdf4.helpers::nc.copy.atts(f.example, v.example, new.file, cdx.dat$var.name[x], definemode=TRUE, exception.list=c("units", "long_name", "standard_name", "base_period", "missing_value", "_FillValue", "add_", "valid_min", "valid_max", "valid_range", "scale_factor", "add_offset", "signedness", "history")) + for(v in vars.to.clone.atts.for) { + ncdf4.helpers::nc.copy.atts(f.example, v, new.file, v, definemode=TRUE) + } + ncdf4::ncatt_put(new.file, time.dim.name, "units", time.units, definemode=TRUE) + + ## Put additional attributes. + put.history.att(new.file, cdx.dat$var.name[x], definemode=TRUE) + put.ETCCDI.atts(new.file, c("mon", "yr")[1 + annual], ncdf4::ncatt_get(f.example, 0, "title")$value, author.data, definemode=TRUE) + if(cdx.dat$base.period.attr[x]) + ncdf4::ncatt_put(new.file, cdx.dat$var.name[x], "base_period", paste(base.range, collapse="-"), definemode=TRUE) + ncdf4::nc_enddef(new.file) + + ## Copy data from vars.to.copy and put time bounds. + ncdf4::ncvar_put(new.file, time.bnds.name, time.for.file$time.bnds.data) + for(v in vars.to.copy) + if(!is.null(vars.data[[v]])) + ncdf4::ncvar_put(new.file, v, vars.data[[v]]) + + new.file + })) +} + +## Get dim sizes, with checking to make sure sizes are all the same. +get.dim.size <- function(f, v.f.idx, variable.name.map) { + dim.size.list <- lapply(names(v.f.idx), function(i) { f[[v.f.idx[i]]]$var[[variable.name.map[i]]]$varsize }) + stopifnot(all.the.same(dim.size.list)) + dim.size.list[[1]] +} + +## Get dim axes, with checking to make sure they all have same axes. +get.dim.axes <- function(f, v.f.idx, variable.name.map) { + dim.axes.list <- lapply(names(v.f.idx), function(i) { ncdf4.helpers::nc.get.dim.axes(f[[v.f.idx[i]]], variable.name.map[i]) }) + stopifnot(all.the.same(dim.axes.list)) + dim.axes.list[[1]] +} + +## Get timeseries (as PCICt), with error checking to ensure input files have same TS. +## FIXME: This will need to be revised for fixed time dimensions. Should be identified by axis. +get.ts <- function(f) { + ts.list <- lapply(lapply(f, ncdf4.helpers::nc.get.time.series), trunc, "days") + stopifnot(all.the.same(ts.list)) + ts.list[[1]] +} + +## Compute all indices for a single grid box +#' Compute Climdex indices using provided data. +#' +#' Compute Climdex indices using provided data. +#' +#' Given the provided data and functions, compute the Climdex indices defined by the functions. +#' +#' @param in.dat The input data to compute indices on. +#' @param cdx.funcs The functions to be applied to the data, as created by \code{\link{get.climdex.functions}}. +#' @param ts The associated time data, as created by \code{nc.get.time.series}. +#' @param base.range The base range; a vector of two numeric years. +#' @param fclimdex.compatible Whether to make the results identical to those of fclimdex; this affects how the data in the base period is padded. +#' @return A list of data for each index. +#' +#' @examples +#' library(climdex.pcic) +#' +#' ## Prepare input data +#' in.dat <- list(tmax=ec.1018935.tmax$MAX_TEMP) +#' cdx.funcs <- get.climdex.functions(get.climdex.variable.list(names(in.dat))) +#' in.dat$northern.hemisphere <- TRUE +#' ts <- as.PCICt(do.call(paste, ec.1018935.tmax[,c("year", "jday")]), +#' format="%Y %j", cal="gregorian") +#' +#' ## Compute indices +#' res <- compute.climdex.indices(in.dat, cdx.funcs, ts, c(1981, 1990), FALSE) +#' +#' @export +compute.climdex.indices <- function(in.dat, cdx.funcs, ts, base.range, fclimdex.compatible) { + ci <- climdex.pcic::climdexInput.raw( + in.dat$tmax, in.dat$tmin, in.dat$prec, + if(is.null(in.dat$tmax)) NULL else ts, + if(is.null(in.dat$tmin)) NULL else ts, + if(is.null(in.dat$prec)) NULL else ts, + tavg=in.dat$tavg, tavg.dates=if(is.null(in.dat$tavg)) NULL else ts, + base.range=base.range, northern.hemisphere=in.dat$northern.hemisphere, + quantiles=in.dat$quantiles) + + ## NOTE: Names must be stripped here because it increases memory usage on the head by a factor of 8-9x (!) + return(lapply(cdx.funcs, function(f) { d <- f(ci=ci); names(d) <- NULL; d })) +} + +#' Flatten the X and Y dimensions down to a space dimension. +#' +#' Flatten the X and Y dimensions down to a space dimension. +#' +#' This function takes input data, a vector of dimensions to reduce to 1 dimension, and optionally a subset of dimnames to copy. It returns the data with the specified dimensions shrunk down to 1 dimension. +#' +#' @param dat The data to operate on. +#' @param reduce.dims The names or indices of the dimensions to reduce to 1 dimension. +#' @param names.subset Optionally, a subset of dimension names to copy. +#' @return The data with the specified dimensions reduced to 1 dimension. +#' +#' @note The dimensions to reduce must be adjoining dimensions. +#' +#' @examples +#' ## Take example data and flatten the last two dims down to one. +#' dat <- structure(1:8, .Dim=c(2, 2, 2)) +#' dat.flat <- flatten.dims(dat, 2:3) +#' +#' @export +flatten.dims <- function(dat, reduce.dims, names.subset) { + stopifnot(all(diff(reduce.dims) == 1)) + dat.dim <- dim(dat) + if(!missing(names.subset)) + dat.dimnames <- dimnames(dat) + before.reduce <- 1:length(dat.dim) < min(reduce.dims) + after.reduce <- 1:length(dat.dim) > max(reduce.dims) + new.dims <- c(dat.dim[before.reduce], prod(dat.dim[reduce.dims]), dat.dim[after.reduce]) + dim(dat) <- new.dims + if(!missing(names.subset)) + dimnames(dat) <- dat.dimnames[names.subset] + return(dat) +} + +## FIXME: Handle time-minor data gracefully. +#' Retrieve and convert data to correct units and dimensions. +#' +#' Retrieve and convert data to correct units and dimensions. +#' +#' This function retrieves NetCDF data for the specified subset from the specified file and variable; converts from \code{src.units} to \code{dest.units}, transposes the data to (T, S) dimensionality, and returns the result. +#' +#' @param f The NetCDF file to retrieve data from; an object of class \code{ncdf4}. +#' @param v The variable to retrieve data from. +#' @param subset The subset to retrieve. +#' @param src.units The source units to convert data from. +#' @param dest.units The destination units to convert to. +#' @param dim.axes The dimension axes to be used. +#' @return The retrieved and massaged data. +#' +#' @examples +#' \donttest{get.data(f, "pr", list(Y=3), "kg m-2 s-1", "kg m-2 s-1", c(X="lon",Y="lat",T="time"))} +#' +#' @export +get.data <- function(f, v, subset, src.units, dest.units, dim.axes) { + stopifnot(inherits(f, "ncdf4")) + dat <- if(!missing(src.units) && !missing(dest.units)) + udunits2::ud.convert(ncdf4.helpers::nc.get.var.subset.by.axes(f, v, subset), src.units, dest.units) + else + ncdf4.helpers::nc.get.var.subset.by.axes(f, v, subset) + + reduce.dims <- which(dim.axes %in% c("X", "Y", "Z")) + return(t(flatten.dims(dat, reduce.dims=reduce.dims))) +} + +## Produce slab of northern.hemisphere booleans of the same shape as the data. +#' Determine what portions of a subset are within the northern hemisphere. +#' +#' Determine what portions of a subset are within the northern hemisphere. +#' +#' Given a subset, a file, a variable, and a projection, determine what positions are within the northern hemisphere, returning the result as an array of booleans. +#' +#' @param subset The subset to use. +#' @param f The NetCDF file to use; an object of class \code{ncdf4}. +#' @param v The variable in question. +#' @param projection The proj4 string to use; NULL if the data is not in a projected coordinate space. +#' @return An array of booleans corresponding to the subset containing TRUE if the point is within the northern hemisphere, and FALSE otherwise. +#' +#' @examples +#' \donttest{ +#' ## Open files, etc. +#' input.files <- c("tasmax_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' f <- list(nc_open(input.files)) +#' f.v <- lapply(f, ncdf4.helpers::nc.get.variable.list, min.dims=2) +#' bools <- get.northern.hemisphere.booleans(list(X=1:2, Y=1:2), f[[1]], f.v[[1]], NULL) +#' } +#' +#' @export +get.northern.hemisphere.booleans <- function(subset, f, v, projection) { + y.dim <- ncdf4.helpers::nc.get.dim.for.axis(f, v, "Y") + x.dim <- ncdf4.helpers::nc.get.dim.for.axis(f, v, "X") + y.subset.vals <- rep(y.dim$vals[if(is.null(subset$Y)) 1:y.dim$len else subset$Y], + each=(if(is.null(subset$X)) x.dim$len else length(subset$X))) + if(!is.null(projection)) { + x.subset.vals <- rep(x.dim$vals[if(is.null(subset$X)) 1:x.dim$len else subset$X], + (if(is.null(subset$Y)) y.dim$len else length(subset$Y))) + dat <- proj4::project(list(x=x.subset.vals, y=y.subset.vals), projection, inverse=TRUE, ellps.default=NA) + return(dat$y >= 0) + } else + return(y.subset.vals >= 0) +} + +#' Extract a single quantiles object from a set of thresholds. +#' +#' Extract a single quantiles object from a set of thresholds. +#' +#' From a set of thresholds as retrieved from one or more NetCDF files containing thresholds, this function extracts a single point and converts the format to one suitable for passing to \code{climdexInput.raw}. +#' +#' @param thresholds The thresholds, as extracted by \code{\link{get.thresholds.chunk}}. +#' @param idx The index to extract. +#' @return A quantiles object suitable for passing to \code{climdexInput.raw} as the \code{quantiles} argument. +#' +#' @examples +#' \donttest{ +#' ## Define mappings and filenames. +#' thresholds.name.map <- c(tx10thresh="tx10thresh", tn10thresh="tn10thresh", tx90thresh="tx90thresh", +#' tn90thresh="tn90thresh", r95thresh="r95thresh", r99thresh="r99thresh") +#' thresh.files <- "thresholds.nc" +#' +#' ## Open files, etc. +#' cdx.funcs <- get.climdex.functions(get.climdex.variable.list("tmax")) +#' thresholds.netcdf <- lapply(thresh.files, nc_open) +#' t.f.idx <- get.var.file.idx(thresholds.name.map, lapply(thresholds.netcdf, +#' ncdf4.helpers::nc.get.variable.list, min.dims=2)) +#' +#' ## Get thresholds chunk. +#' dat <- get.thresholds.chunk(list(Y=1), cdx.funcs, thresholds.netcdf, t.f.idx, thresholds.name.map) +#' +#' ## Get quantiles object for index 2 +#' q <- get.quantiles.object(dat, 2) +#' } +#' +#' @export +get.quantiles.object <- function(thresholds, idx) { + if(is.null(thresholds)) + return(NULL) + + thresh.path.2d <- list(tx10thresh=c("tmax", "outbase", "q10"), + tx90thresh=c("tmax", "outbase", "q90"), + tn10thresh=c("tmin", "outbase", "q10"), + tn90thresh=c("tmin", "outbase", "q90")) + thresh.path.1d <- list(r95thresh=c("prec", "q95"), + r99thresh=c("prec", "q99")) + result <- list() + + + recursive.append <- function(x, l, data) { + if(length(x) == 0) return(data) + if(is.null(l)) l <- list() + return(c(l[!(names(l) %in% x[1])], structure(list(recursive.append(tail(x, n=-1), l[[x[1]]], data)), .Names=x[1]))) + } + + + for(threshold.var in names(thresh.path.2d)[names(thresh.path.2d) %in% names(thresholds)]) + result <- recursive.append(thresh.path.2d[[threshold.var]], result, thresholds[[threshold.var]][,idx]) + + for(threshold.var in names(thresh.path.1d)[names(thresh.path.1d) %in% names(thresholds)]) { + thresh.path <- thresh.path.1d[[threshold.var]] + result[[thresh.path[1]]] <- c(result[[thresh.path[1]]], structure(thresholds[[threshold.var]][idx], .Names=thresh.path[2])) + } + + return(result) +} + +#' Compute Climdex indices for a subset / stripe +#' +#' Compute Climdex indices for a subset / stripe +#' +#' Given a subset, a set of Climdex functions (as created by \code{\link{get.climdex.functions}}), and ancillary data, load and convert data, create a climdexInput object for each point, run all of the functions in \code{cdx.funcs} on that data, and return the result. +#' +#' @param subset The subset to use. +#' @param cdx.funcs The functions to be applied to the data, as created by \code{\link{get.climdex.functions}}. +#' @param ts The associated time data, as created by \code{nc.get.time.series}. +#' @param base.range The base range; a vector of two numeric years. +#' @param dim.axes The dimension axes for the input data. +#' @param v.f.idx A mapping from variables to files, as created by \code{\link{get.var.file.idx}}. +#' @param variable.name.map A mapping from standardized names (tmax, tmin, prec) to NetCDF variable names. +#' @param src.units The source units to convert data from. +#' @param dest.units The destination units to convert to. +#' @param t.f.idx A mapping from threshold variables to threshold files, as created by \code{\link{get.var.file.idx}}. +#' @param thresholds.name.map A mapping from standardized names (tx10thresh, tn90thresh, etc) to NetCDF variable names. +#' @param fclimdex.compatible Whether to make the results identical to those of fclimdex; this affects how the data in the base period is padded. +#' @param projection A proj4 string representing the projection the data is in. +#' @param f A list of objects of type \code{ncdf4}, consisting of the open input files. If missing, will be pulled from the global namespace. +#' @param thresholds.netcdf A list of objects of type \code{ncdf4}, consisting of the open threshold files. If missing, will be pulled from the global namespace. +#' +#' @note This function relies on an object named 'f' and containing the opened NetCDF files being part of the global namespace. +#' +#' @examples +#' \donttest{ +#' ## Define mappings and filenames. +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") +#' +#' ## Open files, etc. +#' cdx.funcs <- get.climdex.functions(get.climdex.variable.list("tmax")) +#' f <- lapply(input.files, ncdf4::nc_open) +#' f.meta <- create.file.metadata(f, variable.name.map) +#' climdex.var.list <- get.climdex.variable.list(names(f.meta$v.f.idx), "all", NULL) +#' cdx.meta <- get.climdex.variable.metadata(climdex.var.list, input.files[1]) +#' +#' ## Compute indices for stripe +#' cdx <- compute.indices.for.stripe(list(Y=1), cdx.funcs, f.meta$ts, c(1981, 1990), f.meta$dim.axes, +#' f.meta$v.f.idx, variable.name.map, f.meta$src.units, f.meta$dest.units, +#' t.f.idx, NULL, f=f, thresholds.netcdf=NULL) +#' } +#' +#' @export +compute.indices.for.stripe <- function(subset, cdx.funcs, ts, base.range, dim.axes, v.f.idx, variable.name.map, src.units, dest.units, t.f.idx, thresholds.name.map, fclimdex.compatible=TRUE, projection=NULL, f, thresholds.netcdf) { + f <- if(missing(f)) get("f", .GlobalEnv) else f + thresholds.netcdf <- if(missing(thresholds.netcdf)) get("thresholds.netcdf", .GlobalEnv) else thresholds.netcdf + + ## Dimension order: Time, Space for each Var in list + data.list <- sapply(names(v.f.idx), function(x) { gc(); get.data(f[[v.f.idx[x]]], variable.name.map[x], subset, src.units[x], dest.units[x], dim.axes) }, simplify=FALSE) + gc() + + northern.hemisphere <- get.northern.hemisphere.booleans(subset, f[[v.f.idx[1]]], variable.name.map[names(v.f.idx)[1]], projection) + + thresholds <- if(is.null(thresholds.netcdf)) NULL else get.thresholds.chunk(subset, cdx.funcs, thresholds.netcdf, t.f.idx, thresholds.name.map) + return(lapply(1:(dim(data.list[[1]])[2]), function(x) { + dat.list <- sapply(names(data.list), function(name) { data.list[[name]][,x] }, simplify=FALSE) + ## Fast-path the all-NA case. + if(all(sapply(dat.list, function(x) { all(is.na(x)) }))) { + ## We don't need to pad this out to full length; cbind will do that for us. + return(structure(as.list(rep(NA, length(cdx.funcs))), .Names=names(cdx.funcs))) + } else { + indices.input <- c(dat.list, northern.hemisphere=northern.hemisphere[x], list(quantiles=get.quantiles.object(thresholds, x))) + return(compute.climdex.indices(indices.input, cdx.funcs, ts, base.range, fclimdex.compatible)) + } + })) +} + +#' Retrieve thresholds for a subset +#' +#' Retrieve thresholds for a subset +#' +#' Given a subset, a set of Climdex functions (as created by \code{\link{get.climdex.functions}}), and ancillary data, load the thresholds required for the functions being called and return them. +#' +#' @param subset The subset to use. +#' @param cdx.funcs The functions to be applied to the data, as created by \code{\link{get.climdex.functions}}. +#' @param thresholds.netcdf One or more NetCDF files containing thresholds. +#' @param t.f.idx A mapping from threshold variables to threshold files, as created by \code{\link{get.var.file.idx}}. +#' @param thresholds.name.map A mapping from standardized names (tx10thresh, tn90thresh, etc) to NetCDF variable names. +#' +#' @examples +#' \donttest{ +#' ## Define mappings and filenames. +#' thresholds.name.map <- c(tx10thresh="tx10thresh", tn10thresh="tn10thresh", tx90thresh="tx90thresh", +#' tn90thresh="tn90thresh", r95thresh="r95thresh", r99thresh="r99thresh") +#' thresh.files <- "thresholds.nc" +#' +#' ## Open files, etc. +#' cdx.funcs <- get.climdex.functions(get.climdex.variable.list("tmax")) +#' thresholds.netcdf <- lapply(thresh.files, nc_open) +#' t.f.idx <- get.var.file.idx(thresholds.name.map, lapply(thresholds.netcdf, +#' ncdf4.helpers::nc.get.variable.list, min.dims=2)) +#' +#' ## Get thresholds chunk. +#' dat <- get.thresholds.chunk(list(Y=1), cdx.funcs, thresholds.netcdf, t.f.idx, thresholds.name.map) +#' } +#' +#' @export +get.thresholds.chunk <- function(subset, cdx.funcs, thresholds.netcdf, t.f.idx, thresholds.name.map) { + var.thresh.map <- list(tx10thresh=c("tx10p"), tx90thresh=c("tx90p", "WSDI"), tn10thresh=c("tn10p", "CSDI"), tn90thresh=c("tn90p"), r95thresh=c("r95p"), r99thresh=c("r99p")) + + cdx.names <- names(cdx.funcs) + thresh.var.needed <- names(var.thresh.map)[sapply(var.thresh.map, function(x) { any(unlist(lapply(x, function(substr) { any(grepl(substr, cdx.names)) }))) })] + stopifnot(all(thresh.var.needed %in% names(t.f.idx))) + return(sapply(thresh.var.needed, function(threshold.var) { + dim.axes <- ncdf4.helpers::nc.get.dim.axes(thresholds.netcdf[[t.f.idx[threshold.var]]], thresholds.name.map[threshold.var]); + return(get.data(thresholds.netcdf[[t.f.idx[threshold.var]]], thresholds.name.map[threshold.var], subset, dim.axes=dim.axes)) + }, simplify=FALSE)) +} + +## Write out results for variables computed +#' Write out computed climdex results +#' +#' Write out computed climdex results +#' +#' Given a set of Climdex results, a subset, a set of files, and dimension sizes, write out the data to the appropriate files. +#' +#' @param climdex.results The results to write out. +#' @param chunk.subset The corresponding subset. +#' @param cdx.ncfile The list of NetCDF files to write the results out to. +#' @param dim.size The overall size of the input data. +#' @param cdx.varname The list of NetCDF variable names for the files in \code{cdx.ncfile}. +#' +#' @examples +#' \donttest{ +#' ## Define mappings and filenames. +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") +#' +#' ## Open files, etc. +#' cdx.funcs <- get.climdex.functions("tmax") +#' f <- lapply(input.files, ncdf4::nc_open) +#' f.meta <- create.file.metadata(f, variable.name.map) +#' climdex.var.list <- get.climdex.variable.list(names(f.meta$v.f.idx), "all", NULL) +#' cdx.meta <- get.climdex.variable.metadata(climdex.var.list, input.files[1]) +#' +#' ## Create output files +#' cdx.ncfile <- create.ncdf.output.files(cdx.meta, f, f.meta$v.f.idx, variable.name.map, +#' f.meta$ts, get.time.origin(f, f.meta$dim.axes), +#' c(1981,1990), "/foo", author.data) +#' +#' ## Compute indices for stripe +#' cdx <- compute.indices.for.stripe(list(Y=1), cdx.funcs, f.meta$ts, c(1991, 2000), f.meta$dim.axes, +#' f.meta$v.f.idx, variable.name.map, f.meta$src.units, f.meta$dest.units, +#' t.f.idx, NULL, f=f, thresholds.netcdf=NULL) +#' +#' ## Write out indices +#' write.climdex.results(cdx, list(Y=1), cdx.ncfile, f.meta$dim.size, cdx.meta$varname) +#' } +#' +#' @export +write.climdex.results <- function(climdex.results, chunk.subset, cdx.ncfile, dim.size, cdx.varname) { + xy.dims <- dim.size[1:2] + if(!is.null(chunk.subset$X)) + xy.dims[1] <- length(chunk.subset$X) + if(!is.null(chunk.subset$Y)) + xy.dims[2] <- length(chunk.subset$Y) + + ## Write out results, variable by variable + lapply(1:length(cdx.ncfile), function(v) { + dat <- t(do.call(cbind, lapply(climdex.results, function(cr) { cr[[v]] }))) + t.dim.len <- ncdf4.helpers::nc.get.dim.for.axis(cdx.ncfile[[v]], cdx.varname[v], "T")$len + + ## If data is of length 1, it's an error. + if(length(dat) == 1) + stop(dat) + + ## Special case of an entire slab missing values... repeat such that we have full data. + if(prod(dim(dat)) != prod(c(xy.dims, t.dim.len))) + dat <- rep(dat, t.dim.len) + + dim(dat) <- c(xy.dims, t.dim.len) + ncdf4.helpers::nc.put.var.subset.by.axes(cdx.ncfile[[v]], cdx.varname[v], dat, chunk.subset) + }) + invisible(0) +} + +#' Compute Climdex thresholds for a subset / stripe +#' +#' Compute Climdex thresholds for a subset / stripe +#' +#' Given a subset and ancillary data, load and convert data, get the out-of-base quantiles for the data for each point, and return the result. +#' +#' @param subset The subset to use. +#' @param ts The associated time data, as created by \code{nc.get.time.series}. +#' @param base.range The base range; a vector of two numeric years. +#' @param dim.axes The dimension axes for the input data. +#' @param v.f.idx A mapping from variables to files, as created by \code{\link{get.var.file.idx}}. +#' @param variable.name.map A mapping from standardized names (tmax, tmin, prec) to NetCDF variable names. +#' @param src.units The source units to convert data from. +#' @param dest.units The destination units to convert to. +#' @param f A list of objects of type \code{ncdf4}, consisting of the open input files. If missing, will be pulled from the global namespace. +#' +#' @note This function relies on an object named 'f' and containing the opened NetCDF files being part of the global namespace. +#' +#' @examples +#' \donttest{ +#' ## Establish basic inputs. +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' +#' ## Prepare derived inputs. +#' f <- lapply(input.files, ncdf4::nc_open) +#' variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") +#' f.meta <- create.file.metadata(f, variable.name.map) +#' threshold.dat <- get.thresholds.metadata(names(f.meta$v.f.idx)) +#' +#' ## Create output file +#' thresh.file <- create.thresholds.file("thresh.nc", f, f.meta$ts, f.meta$v.f.idx, variable.name.map, +#' c(1991, 2000), f.meta$dim.size, f.meta$dim.axes, +#' threshold.dat, author.data) +#' +#' ## Compute threshold quantiles for stripe +#' q <- get.quantiles.for.stripe(list(Y=1), f.meta$ts, c(1991, 2000), f.meta$dim.axes, +#' f.meta$v.f.idx, variable.name.map, f.meta$src.units, +#' f.meta$dest.units, f) +#' } +#' +#' @export +get.quantiles.for.stripe <- function(subset, ts, base.range, dim.axes, v.f.idx, variable.name.map, src.units, dest.units, f) { + f <- if(missing(f)) get("f", .GlobalEnv) else f + data.list <- sapply(names(v.f.idx), function(x) { gc(); get.data(f[[v.f.idx[x]]], variable.name.map[x], subset, src.units[x], dest.units[x], dim.axes) }, simplify=FALSE) + gc() + + r <- 1:(dim(data.list[[1]])[2]) + if(!is.null(data.list$tmax)) { + if(!is.null(data.list$tmin)) { + if(!is.null(data.list$prec)) { + return(lapply(r, function(x) climdex.pcic::get.outofbase.quantiles(data.list$tmax[,x], data.list$tmin[,x], data.list$prec[,x], ts, ts, ts, base.range))) + } else { + return(lapply(r, function(x) climdex.pcic::get.outofbase.quantiles(data.list$tmax[,x], data.list$tmin[,x], NULL, ts, ts, NULL, base.range))) + } + } else { + if(!is.null(data.list$prec)) { + return(lapply(r, function(x) climdex.pcic::get.outofbase.quantiles(data.list$tmax[,x], NULL, data.list$prec[,x], ts, NULL, ts, base.range))) + } else { + return(lapply(r, function(x) climdex.pcic::get.outofbase.quantiles(data.list$tmax[,x], NULL, NULL, ts, NULL, NULL, base.range))) + } + } + } else { + if(!is.null(data.list$tmin)) { + if(!is.null(data.list$prec)) { + return(lapply(r, function(x) climdex.pcic::get.outofbase.quantiles(NULL, data.list$tmin[,x], data.list$prec[,x], NULL, ts, ts, base.range))) + } else { + return(lapply(r, function(x) climdex.pcic::get.outofbase.quantiles(NULL, data.list$tmin[,x], NULL, NULL, ts, NULL, base.range))) + } + } else { + if(!is.null(data.list$prec)) { + return(lapply(r, function(x) climdex.pcic::get.outofbase.quantiles(NULL, NULL, data.list$prec[,x], NULL, NULL, ts, base.range))) + } else { + stop("Go home and take your shitty input with you.") + } + } + } +} + +set.up.cluster <- function(parallel, type="SOCK", src) { + ## Fire up the cluster... + cluster <- NULL + + if(!is.logical(parallel)) { + cat(paste("Creating cluster of", parallel, "nodes of type", type, "\n")) + cat(paste("SRC:", src)) + cluster <- snow::makeCluster(parallel, type) + snow::clusterCall(cluster, function() { source(src) }) + ##snow::clusterEvalQ(cluster, library(climdex.pcic.ncdf)) + ##snow::clusterEvalQ(cluster, try(getFromNamespace('nc_set_chunk_cache', 'ncdf4')(1024 * 2048, 1009), silent=TRUE)) + } + cluster +} + +#' Creates Climdex thresholds output file. +#' +#' Creates Climdex thresholds output file. +#' +#' This function creates a file suitable for outputting thresholds to, with all variables that can be created with the input data present in the file. +#' +#' @param thresholds.file The filename to be used for the thresholds file. +#' @param f The file(s) being used as sources for metadata. +#' @param ts The associated time data, as created by \code{nc.get.time.series}. +#' @param v.f.idx A mapping from variables to files, as created by \code{\link{get.var.file.idx}}. +#' @param variable.name.map A mapping from standardized names (tmax, tmin, prec) to NetCDF variable names. +#' @param base.range The base range; a vector of two numeric years. +#' @param dim.size Dimension sizes for the input. +#' @param dim.axes Dimension axes for the input. +#' @param threshold.dat Threshold metadata, as provided by \code{\link{get.thresholds.metadata}}. +#' @param author.data A vector containing named elements describing the author; see \code{\link{create.indices.from.files}}. +#' @return An object of class \code{ncdf4}. +#' +#' @examples +#' \donttest{ +#' ## Establish basic inputs. +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' +#' ## Prepare derived inputs. +#' f <- lapply(input.files, ncdf4::nc_open) +#' variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") +#' f.meta <- create.file.metadata(f, variable.name.map) +#' threshold.dat <- get.thresholds.metadata(names(f.meta$v.f.idx)) +#' +#' ## Create output file +#' thresh.file <- create.thresholds.file("thresh.nc", f, f.meta$ts, f.meta$v.f.idx, variable.name.map, +#' c(1981,1990), f.meta$dim.size, f.meta$dim.axes, +#' threshold.dat, author.data) +#' } +#' +#' @export +create.thresholds.file <- function(thresholds.file, f, ts, v.f.idx, variable.name.map, base.range, dim.size, dim.axes, threshold.dat, author.data) { + exemplar.file <- f[[v.f.idx[1]]] + exemplar.var.name <- variable.name.map[names(v.f.idx)[1]] + exemplar.var <- exemplar.file$var[[exemplar.var.name]] + num.thresholds <- ifelse(is.null(attr(ts, "dpy")), 365, attr(ts, "dpy")) + cal <- attr(ts, "cal") + + ## Get time metadata... + old.time.dim <- exemplar.var$dim[[which(dim.axes == "T")]] + time.units <- old.time.dim$units + time.units.split <- strsplit(time.units, " ")[[1]] + time.origin <- if(time.units.split[2] == "as") format(trunc(min(ts), units="days"), "%Y-%m-%d") else time.units.split[3] + time.dim.name <- old.time.dim$name + old.time.bnds.att <- ncdf4::ncatt_get(exemplar.file, time.dim.name, "bounds") + time.bnds.name <- if(old.time.bnds.att$hasatt) old.time.bnds.att$value else "time_bnds" + + ## Set up time variables + out.time <- as.numeric(julian(as.PCICt(paste(floor(mean(base.range)), 1:num.thresholds, sep="-"), attr(ts, "cal"), format="%Y-%j"), as.PCICt(time.origin, cal)), units="days") + out.time.dim <- ncdf4::ncdim_def("time", paste("days since", time.origin), out.time, unlim=TRUE, calendar=cal, longname="time") + + ## Set up bounds + input.bounds <- ncdf4.helpers::nc.get.dim.bounds.var.list(exemplar.file) + input.bounds <- input.bounds[input.bounds != time.bnds.name] + input.dim.names <- ncdf4.helpers::nc.get.dim.names(exemplar.file, exemplar.var.name) + input.varname.list <- c(input.bounds, input.dim.names) + + bnds.dim <- ncdf4::ncdim_def("bnds", "", 1:2, create_dimvar=FALSE) + if(length(input.bounds) > 0) + bnds.dim <- exemplar.file$var[[input.bounds[1]]]$dim[[1]] + out.time.bnds <- as.numeric(julian(as.PCICt(c(paste(base.range[1], 1:num.thresholds, sep="-"), paste(base.range[2], 1:num.thresholds, sep="-")), attr(ts, "cal"), format="%Y-%j"), as.PCICt(time.origin, cal)), units="days") + dim(out.time.bnds) <- c(num.thresholds, 2) + out.time.bnds <- t(out.time.bnds) + out.time.bnds.var <- ncdf4::ncvar_def(time.bnds.name, '', list(bnds.dim, out.time.dim), longname='', prec="double") + + input.bounds.vars <- c(lapply(input.bounds, function(x) { exemplar.file$var[[x]] }), list(out.time.bnds.var)) + input.bounds.data <- c(lapply(input.bounds, function(x) { ncdf4::ncvar_get(exemplar.file, x) }), list(out.time.bnds)) + all.bounds <- c(input.bounds, time.bnds.name) + names(input.bounds.data) <- names(input.bounds.vars) <- all.bounds + + ## Set up 2d and 3d dims + out.dims.3d <- list(exemplar.var$dim[[which(dim.axes == 'X')]], exemplar.var$dim[[which(dim.axes == 'Y')]], out.time.dim) + out.dims.2d <- list(exemplar.var$dim[[which(dim.axes == 'X')]], exemplar.var$dim[[which(dim.axes == 'Y')]]) + out.vars <- sapply(names(threshold.dat), function(n) { + tinfo <- threshold.dat[[n]] + if(tinfo$has.time) + ncdf4::ncvar_def(n, tinfo$units, out.dims.3d, 1e20, tinfo$longname, prec="double") + else + ncdf4::ncvar_def(n, tinfo$units, out.dims.2d, 1e20, tinfo$longname, prec="double") + }, simplify=FALSE) + + ## Tack bounds vars onto var list so they get created... + all.vars <- c(input.bounds.vars, out.vars) + + ## Create file + thresholds.netcdf <- ncdf4::nc_create(thresholds.file, all.vars, force_v4=TRUE) + out.dim.axes <- c("X", "Y", "T") + + ## Copy attributes for all variables plus global attributes + ncdf4::nc_redef(thresholds.netcdf) + ncdf4.helpers::nc.copy.atts(exemplar.file, 0, thresholds.netcdf, 0, definemode=TRUE) + for(v in input.varname.list) { + ncdf4.helpers::nc.copy.atts(exemplar.file, v, thresholds.netcdf, v, definemode=TRUE) + } + + put.ETCCDI.atts(thresholds.netcdf, "monClim", ncdf4::ncatt_get(exemplar.file, 0, "title")$value, author.data, definemode=TRUE) + + ## Attach history data to threshold data. + lapply(out.vars, function(v) { + put.history.att(thresholds.netcdf, v, definemode=TRUE) + ncdf4::ncatt_put(thresholds.netcdf, v, "base_period", paste(base.range, collapse="-"), definemode=TRUE) + }) + ncdf4::nc_enddef(thresholds.netcdf) + + ## Put bounds data. + for(v in all.bounds) { + ncdf4::ncvar_put(thresholds.netcdf, v, input.bounds.data[[v]]) + } + + return(thresholds.netcdf) +} + +#' Create mapping from variables to files. +#' +#' Create mapping from variables to files. +#' +#' Given a variable name map and list of variables in each file, determine a mapping from variables to files. +#' +#' @param variable.name.map A mapping from standardized names (tmax, tmin, prec) to NetCDF variable names. +#' @param v.list A list containing a vector of variables in each file. +#' @return A vector mapping standardized variable names (tmax, tmin, prec) to indices in the file list. +#' +#' @examples +#' \dontrun{ +#' ## Get mapping for a single file. +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' f <- lapply(input.files, ncdf4::nc_open) +#' v.list <- lapply(f, ncdf4.helpers::nc.get.variable.list, min.dims=2) +#' v.f.idx <- get.var.file.idx(variable.name.map, v.list) +#' } +#' +#' @export +get.var.file.idx <- function(variable.name.map, v.list) { + v.f.idx <- sapply(variable.name.map, function(v) { which(sapply(v.list, function(vl) { v %in% vl })) }, simplify=FALSE) + v.f.idx <- unlist(v.f.idx[sapply(v.f.idx, length) > 0]) + return(v.f.idx) +} + +#' Retrieve metadata about NetCDF-format files. +#' +#' Retrieve metadata about NetCDF-format files. +#' +#' Given a list of NetCDF files and a mapping from standard variable names (tmax, tmin, prec) to NetCDF variable names, retrieve a set of standardized metadata. +#' +#' @param f The list of NetCDF files. +#' @param variable.name.map A named character vector mapping standard variable names (tmax, tmin, prec) to NetCDF variable names. +#' @return A list containing time data (ts), dimension sizes (dim.size), dimension axes (dim.axes), source units (src.units), destination units (dest.units), a mapping from variables to files (v.f.idx), and a projection, if available. +#' +#' @examples +#' \dontrun{ +#' ## Get metadata about a single input file. +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' f <- lapply(input.files, ncdf4::nc_open) +#' f.meta <- create.file.metadata(f, variable.name.map) +#' } +#' +#' @export +create.file.metadata <- function(f, variable.name.map) { + v.list <- lapply(f, ncdf4.helpers::nc.get.variable.list, min.dims=2) + v.f.idx <- get.var.file.idx(variable.name.map, v.list) + + if(any(sapply(v.list, function(vl) { sum(variable.name.map %in% vl) }) == 0)) + stop("At least one input file doesn't contain any of the named variables.") + if(anyDuplicated(unlist(names(v.f.idx)))) + stop("Variable(s) present in more than one input file.") + + ## Get units and specify destination units + dest.units <- c(prec="kg m-2 d-1", tmax="degrees_C", tmin="degrees_C", tavg="degrees_C") + dest.units <- dest.units[names(dest.units) %in% names(v.f.idx)] + + ## Get projection + projection <- ncdf4.helpers::nc.get.proj4.string(f[[1]], v.list[[1]][1]) + stopifnot(!is.null(projection)) + if(projection == "") + projection <- NULL + + return(list(ts=get.ts(f), dim.size=get.dim.size(f, v.f.idx, variable.name.map), dim.axes=get.dim.axes(f, v.f.idx, variable.name.map), + src.units=sapply(names(v.f.idx), function(i) { f[[v.f.idx[i]]]$var[[variable.name.map[i]]]$units }), + dest.units=dest.units, v.f.idx=v.f.idx, projection=projection)) +} + +#' Retrieve threshold metadata +#' +#' Retrieve threshold metadata +#' +#' Returns units, long names, locations within the climdexInput data structure, and whether time data should be included given the variable information available. +#' +#' @param var.names A vector containing names of available variables (tmax, tmin, prec). +#' @return A list containing metadata for each of the six thresholds. +#' +#' @examples +#' thresholds.meta <- get.thresholds.metadata("prec") +#' +#' @export +get.thresholds.metadata <- function(var.names) { + threshold.dat <- list(tx10thresh=list(units="degrees_C", longname="10th_percentile_running_baseline_tasmax", has.time=TRUE, q.path=c("tmax", "outbase", "q10")), + tx90thresh=list(units="degrees_C", longname="90th_percentile_running_baseline_tasmax", has.time=TRUE, q.path=c("tmax", "outbase", "q90")), + tn10thresh=list(units="degrees_C", longname="10th_percentile_running_baseline_tasmin", has.time=TRUE, q.path=c("tmin", "outbase", "q10")), + tn90thresh=list(units="degrees_C", longname="90th_percentile_running_baseline_tasmin", has.time=TRUE, q.path=c("tmin", "outbase", "q90")), + r95thresh=list(units="kg m-2 d-1", longname="95th_percentile_baseline_wet_day_pr", has.time=FALSE, q.path=c("prec", "q95")), + r99thresh=list(units="kg m-2 d-1", longname="99th_percentile_baseline_wet_day_pr", has.time=FALSE, q.path=c("prec", "q99"))) + return(threshold.dat[sapply(threshold.dat, function(x) { x$q.path[1] %in% var.names })]) +} + +unsquash.dims <- function(dat.dim, subset, f, n) { + dim.axes <- ncdf4.helpers::nc.get.dim.axes(f, n) + return(sapply(dim.axes, function(x) { if(any(names(subset) == x)) length(subset[[x]]) else f$dim[[names(dim.axes)[dim.axes == x]]]$len })) +} + +## Run Climdex to generate indices for computing Climdex on future data +#' Create Climdex thresholds used for computing threshold-based indices +#' +#' Create Climdex thresholds used for computing threshold-based indices +#' +#' For many applications, one may want to compute thresholds on one data set, then apply them to another. This is usually the case when comparing GCM (Global Climate Model) results for future time periods to either historical reanalysis data or historical / pre-industrial control runs from models. The purpose of this function is to compute these thresholds on the data supplied, saving them to the file specified. Then these thresholds can be used with \code{\link{create.indices.from.files}} to compute indices using the thresholds computed using this code. +#' +#' @param input.files A list of filenames of NetCDF files to be used as input. A NetCDF file may contain one or more variables. +#' @param output.file The name of the file to be created. +#' @param author.data A vector containing named elements describing the author; see \code{\link{create.indices.from.files}}. +#' @param variable.name.map A character vector mapping from standardized names (tmax, tmin, prec) to NetCDF variable names. +#' @param axis.to.split.on The axis to split up the data on for parallel / incremental processing. +#' @param fclimdex.compatible Whether the thresholds should be created to match fclimdex thresholds; affects padding at the ends of the base period. +#' @param base.range Vector of two numeric years specifying the start and end years. +#' @param parallel The number of parallel processing threads, or FALSE if no parallel processing is desired. +#' @param verbose Whether to be chatty. + #' @param max.vals.millions The number of data values to process at one time (length of time dim * number of values * number of variables). +#' @param cluster.type The cluster type, as used by the \code{snow} library. +#' +#' @note NetCDF input files may contain one or more variables, named as per \code{variable.name.map}. The code will search the files for the named variables. +#' +#' @examples +#' \dontrun{ +#' ## Prepare input data and calculate thresholds for file. +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' create.thresholds.from.file(input.files, "thresh.nc", author.data, +#' base.range=c(1991, 2000), parallel=FALSE) +#' } +#' +#' @export +create.thresholds.from.file <- function(input.files, output.file, author.data, variable.name.map=c(tmax="tasmax", tmin="tasmin", prec="pr", tavg="tas"), axis.to.split.on="Y", fclimdex.compatible=TRUE, base.range=c(1961, 1990), parallel=4, verbose=FALSE, max.vals.millions=10, cluster.type="SOCK", src="ncdf.R") { + if(!(is.logical(parallel) || is.numeric(parallel))) + stop("'parallel' option must be logical or numeric.") + + if(length(input.files) == 0) + stop("Require at least one input file.") + + f <- lapply(input.files, ncdf4::nc_open) + f.meta <- create.file.metadata(f, variable.name.map) + + ## Define what the threshold indices will look like... + threshold.dat <- get.thresholds.metadata(names(f.meta$v.f.idx)) + + ## Create the output file + thresholds.netcdf <- create.thresholds.file(output.file, f, f.meta$ts, f.meta$v.f.idx, variable.name.map, base.range, f.meta$dim.size, f.meta$dim.axes, threshold.dat, author.data) + + cluster <- set.up.cluster(parallel, cluster.type, src) + subsets <- ncdf4.helpers::get.cluster.worker.subsets(max.vals.millions * 1000000, f.meta$dim.size, f.meta$dim.axes, axis.to.split.on) + + write.thresholds.data <- function(out.list, out.sub) { + lapply(names(threshold.dat), function(n) { + d <- threshold.dat[[n]] + if(d$has.time) + dat <- t(sapply(out.list, function(y) { return(y[[d$q.path]]) })) + else + dat <- sapply(out.list, function(y) { return(y[[d$q.path[1]]][d$q.path[2]]) }) + dim(dat) <- unsquash.dims(dim(dat), out.sub, thresholds.netcdf, n) + ncdf4.helpers::nc.put.var.subset.by.axes(thresholds.netcdf, n, dat, out.sub) + }) + gc() + } + + if(!is.null(cluster)) { + lapply(f, ncdf4::nc_close) + rm(f) + + snow::clusterExport(cluster, "input.files", environment()) + snow::clusterEvalQ(cluster, f <<- lapply(input.files, ncdf4::nc_open, readunlim=FALSE)) + + ## Compute subsets and fire jobs off; collect and write out chunk-at-a-time + parLapplyLBFiltered(cluster, subsets, get.quantiles.for.stripe, f.meta$ts, base.range, f.meta$dim.axes, f.meta$v.f.idx, variable.name.map, f.meta$src.units, f.meta$dest.units, local.filter.func=write.thresholds.data) + + snow::stopCluster(cluster) + } else { + ##try(getFromNamespace('nc_set_chunk_cache', 'ncdf4')(1024 * 2048, 1009), silent=TRUE) + + lapply(subsets, function(x) { write.thresholds.data(get.quantiles.for.stripe(x, f.meta$ts, base.range, f.meta$dim.axes, f.meta$v.f.idx, variable.name.map, f.meta$src.units, f.meta$dest.units, f), x) }) + + lapply(f, ncdf4::nc_close) + } + + ## Close all the files + ncdf4::nc_close(thresholds.netcdf) + + cat("Finished computing thresholds\n") + invisible(0) +} + +#' Open thresholds file(s) +#' +#' Open thresholds file(s) +#' +#' This function opens one or more thresholds files and returns the \code{ncdf4} objects as a list. +#' +#' @param thresholds.files A character vector containing the names of thresholds files. +#' @return A list of objects of class \code{ncdf4}, or NULL if thresholds.files is NULL. +#' +#' @examples +#' \dontrun{ +#' ## Open a single thresholds file +#' thresholds.files <- c("thresh.nc") +#' thresh <- thresholds.open(thresholds.files) +#' } +#' +#' @export +thresholds.open <- function(thresholds.files) { + return(if(is.null(thresholds.files)) NULL else lapply(thresholds.files, ncdf4::nc_open)) +} + +#' Close thresholds file(s) +#' +#' Close thresholds file(s) +#' +#' This function closes one or more thresholds files. +#' +#' @param thresholds.nc A list of objects of class \code{ncdf4}, or NULL +#' +#' @examples +#' \dontrun{ +#' ## Open a single thresholds file, then close it. +#' thresholds.files <- c("thresh.nc") +#' thresh <- thresholds.open(thresholds.files) +#' thresholds.close(thresh) +#' } +#' +#' @export +thresholds.close <- function(thresholds.nc) { + if(!is.null(thresholds.nc)) lapply(thresholds.nc, ncdf4::nc_close) + invisible(0) +} + + +get.time.origin <- function(f, dim.axes) { + time.units <- f[[1]]$dim[[names(dim.axes)[which(dim.axes == "T")]]]$units + time.units.split <- strsplit(gsub("[ ]+", " ", time.units), " ")[[1]] + time.origin <- if(time.units.split[2] == "as") format(trunc(min(ts), units="days"), "%Y-%m-%d") else time.units.split[3] + return(time.origin) +} + +get.thresholds.f.idx <- function(thresholds.files, thresholds.name.map) { + if(is.null(thresholds.files)) { + return(NULL) + } else { + thresh <- thresholds.open(thresholds.files) + t.f.idx <- get.var.file.idx(thresholds.name.map, lapply(thresh, ncdf4.helpers::nc.get.variable.list, min.dims=2)) + thresholds.close(thresh) + return(t.f.idx) + } +} + +## Run Climdex and populate the output files +#' Create Climdex indices from NetCDF input files. +#' +#' Create Climdex indices from NetCDF input files. +#' +#' This function computes Climdex indices from NetCDF input files, writing out one file per variable named like the \code{template.filename}, which must follow the CMIP5 file naming conventions (this is a deficiency which will be corrected in later versions). +#' +#' The indices to be calculated can be specified; if not, they will be determined by data availability. Thresholds can be supplied (via \code{thresholds.files}) or, if there is data within the base period, calculated and used as part of the process. Note that in-base thresholds are separate from out-of-base thresholds; this is covered in more detail in the help for the \code{climdex.pcic} package. +#' +#' @param input.files A list of filenames of NetCDF files to be used as input. A NetCDF file may contain one or more variables. +#' @param out.dir The directory to put the output files in. +#' @param output.filename.template The output filename to be used as a template, which must follow the CMIP5 file naming conventions. +#' @param author.data Data describing the author; a character vector with 0 or more of the following named values:\describe{ +#' \item{institution}{The institution generating the data.} +#' \item{institution_id}{An abbreviation for the institution generating the data.} +#' \item{indices_archive}{The URL the data is published at, if applicable.} +#' \item{contact}{The email address or contact info for the author.} +#' \item{references}{What to reference when citing this work.} +#' } +#' @param climdex.vars.subset A character vector of lower-case names of Climdex indices to calculate (eg: tr, fd, rx5day). See the list of 27 indices in the References section. +#' @param climdex.time.resolution The time resolution to compute indices at; one of "all" (both monthly and annual), "annual" (only annual), or "monthly" (only monthly). +#' @param variable.name.map A character vector mapping from standardized names (tmax, tmin, prec) to NetCDF variable names. +#' @param axis.to.split.on The axis to split up the data on for parallel / incremental processing. +#' @param fclimdex.compatible Whether the thresholds should be created to match fclimdex thresholds; affects padding at the ends of the base period. +#' @param base.range Vector of two numeric years specifying the start and end years. +#' @param parallel The number of parallel processing threads, or FALSE if no parallel processing is desired. +#' @param verbose Whether to be chatty. +#' @param thresholds.files A character vector of files containing thresholds to be used. +#' @param thresholds.name.map A mapping from threshold names to NetCDF variable names. The following names will be used: \describe{ +#' \item{tx10thresh}{10th percentile for a 5-day running window of baseline daily maximum temperature.} +#' \item{tn10thresh}{10th percentile for a 5-day running window of baseline daily minimum temperature.} +#' \item{tx90thresh}{90th percentile for a 5-day running window of baseline daily maximum temperature.} +#' \item{tn90thresh}{90th percentile for a 5-day running window of baseline daily minimum temperature.} +#' \item{r95thresh}{95th percentile of daily precipitation in wet days (>=1 mm of rain).} +#' \item{r99thresh}{99th percentile of daily precipitation in wet days (>=1 mm of rain).} +#' } +#' @param max.vals.millions The number of data values to process at one time (length of time dim * number of values * number of variables). +#' @param cluster.type The cluster type, as used by the \code{snow} library. +#' +#' @note NetCDF input files may contain one or more variables, named as per \code{variable.name.map}. The code will search the files for the named variables. The same is true of thresholds files; one file may be supplied, or multiple files may be supplied, via the \code{thresholds.files} argument; and the name mapping may be supplied via the \code{thresholds.name.map} argument. +#' +#' @references \url{http://etccdi.pacificclimate.org/list_27_indices.shtml} +#' @examples +#' \dontrun{ +#' ## Prepare input data and calculate indices for a single file +#' ## with a single thread (no parallelism). +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' create.indices.from.files(input.files, "out_dir/", input.files[1], author.data, +#' base.range=c(1991, 2000), parallel=FALSE) +#' +#' ## Prepare input data and calculate indices for two files +#' ## in parallel given thresholds. +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc", +#' "tasmax_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' create.indices.from.files(input.files, "out_dir/", input.files[1], author.data, +#' base.range=c(1991, 2000), parallel=8, thresholds.files="thresh.nc") +#' } +#' +#' @export +create.indices.from.files <- function(input.files, out.dir, output.filename.template, author.data, climdex.vars.subset=NULL, climdex.time.resolution=c("all", "annual", "monthly"), variable.name.map=c(tmax="tasmax", tmin="tasmin", prec="pr", tavg="tas"), axis.to.split.on="Y", fclimdex.compatible=TRUE, base.range=c(1961, 1990), parallel=4, verbose=FALSE, thresholds.files=NULL, thresholds.name.map=c(tx10thresh="tx10thresh", tn10thresh="tn10thresh", tx90thresh="tx90thresh", tn90thresh="tn90thresh", r95thresh="r95thresh", r99thresh="r99thresh"), max.vals.millions=10, cluster.type="SOCK", src="ncdf.R") { + if(!(is.logical(parallel) || is.numeric(parallel))) + stop("'parallel' option must be logical or numeric.") + + if(length(input.files) == 0) + stop("Require at least one input file.") + + ## Open files, determine mapping between files and variables. + f <- lapply(input.files, ncdf4::nc_open) + f.meta <- create.file.metadata(f, variable.name.map) + + ## Get thresholds variable-file mapping + t.f.idx <- get.thresholds.f.idx(thresholds.files, thresholds.name.map) + + ## Get variable list, subset if necessary + climdex.time.resolution <- match.arg(climdex.time.resolution) + climdex.var.list <- get.climdex.variable.list(names(f.meta$v.f.idx), climdex.time.resolution, climdex.vars.subset) + + cdx.meta <- get.climdex.variable.metadata(climdex.var.list, output.filename.template) + cdx.ncfile <- create.ncdf.output.files(cdx.meta, f, f.meta$v.f.idx, variable.name.map, f.meta$ts, get.time.origin(f, f.meta$dim.axes), base.range, out.dir, author.data) + cdx.funcs <- get.climdex.functions(climdex.var.list) + + ## Compute indices, either single process or multi-process using 'parallel' + subsets <- ncdf4.helpers::get.cluster.worker.subsets(max.vals.millions * 1000000, f.meta$dim.size, f.meta$dim.axes, axis.to.split.on) + if(is.numeric(parallel)) { + ## Setup... + lapply(f, ncdf4::nc_close) + rm(f) + cluster <- set.up.cluster(parallel, cluster.type, src) + snow::clusterExport(cluster, list("input.files", "thresholds.files"), environment()) + snow::clusterEvalQ(cluster, f <<- lapply(input.files, ncdf4::nc_open, readunlim=FALSE)) + snow::clusterEvalQ(cluster, thresholds.netcdf <<- thresholds.open(thresholds.files)) + + ## Meat... + parLapplyLBFiltered(cluster, subsets, compute.indices.for.stripe, cdx.funcs, f.meta$ts, base.range, f.meta$dim.axes, f.meta$v.f.idx, variable.name.map, f.meta$src.units, f.meta$dest.units, t.f.idx, thresholds.name.map, fclimdex.compatible, f.meta$projection, local.filter.func=function(x, x.sub) { + write.climdex.results(x, x.sub, cdx.ncfile, f.meta$dim.size, cdx.meta$var.name) + }) + + ## Clean-up. + snow::stopCluster(cluster) + } else { + ## Setup... + thresholds.netcdf <- thresholds.open(thresholds.files) + ##try(getFromNamespace('nc_set_chunk_cache', 'ncdf4')(1024 * 2048, 1009), silent=TRUE) + + ## Meat... + lapply(subsets, function(x) { write.climdex.results(compute.indices.for.stripe(x, cdx.funcs, f.meta$ts, base.range, f.meta$dim.axes, f.meta$v.f.idx, variable.name.map, f.meta$src.units, f.meta$dest.units, t.f.idx, thresholds.name.map, fclimdex.compatible, f.meta$projection, f, thresholds.netcdf), x, cdx.ncfile, f.meta$dim.size, cdx.meta$var.name) }) + + ## Clean-up. + thresholds.close(thresholds.netcdf) + lapply(f, ncdf4::nc_close) + } + + ## Close all the output files + lapply(cdx.ncfile, ncdf4::nc_close) + + cat("Finished computing indices\n") + invisible(0) +} +# nolint end diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/README.rst b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/README.rst new file mode 100644 index 0000000000..026ac1ae11 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/README.rst @@ -0,0 +1,66 @@ +What is climdex.pcic.ncdf? +===================== + +* `climdex.pcic.ncdf` is a companion library for `climdex.pcic` which helps in using NetCDF input grids and writing to NetCDF output files when computing the `27 core indices of extreme climate`_. The code allows for parallel computation of indices using either a SOCK or MPI cluster. It was written for the `R statistical programming language`_ by the `Pacific Climate Impacts Consortium`_. + +.. _27 core indices of extreme climate: http://etccdi.pacificclimate.org/list_27_indices.shtml +.. _R statistical programming language: http://www.r-project.org/ +.. _Pacific Climate Impacts Consortium: http://pacificclimate.org/ + +Getting Help +============ + +New to programming or to R? +--------------------------- + +* Read the the `Software Carpentry`_ `Programming in R`_ lessons +* Read one of the man `R Manuals`_. +* Attend an `R Users Group`_ meeting. + +.. _Software Carpentry: http://software-carpentry.org/index.html +.. _Programming in R: http://software-carpentry.org/v5/novice/r/index.html +.. _R Manuals: http://cran.r-project.org/manuals.html +.. _R Users Group: http://r-users-group.meetup.com/ + +Looking for code? +----------------- + +* Get the latest `climdex.pcic.ncdf release from our website`_. +* Explore the `development repository`_. +* Install it with devtools :: + + > library(devtools) + > install_github('pacificclimate/climdex.pcic.ncdf', ref='release') + +.. _climdex.pcic.ncdf release from our website: http://www.pacificclimate.org/sites/default/files/climdex.pcic_.ncdf_0.5-4.tar_.gz +.. _development repository: https://github.com/pacificclimate/climdex.pcic.ncdf/ + +Need help using the package? +---------------------------- + +* Read the manual :: + + > library(climdex.pcic.ncdf) + Loading required package: PCICt + > ?climdex.pcic.ncdf + +* Create a `new issue`_ on the `package issue tracker`_ and label it "help wanted"[1]_. + +.. _new issue: https://github.com/pacificclimate/climdex.pcic.ncdf/issues/new + +Want to contribute? +------------------- + +* To report a bug in pcic.climdex use the `package issue tracker`_ (after you've read the `bug reporting guide`_). +* To help with development read through the `contributor's guide`_ + +.. _bug reporting guide: https://github.com/pacificclimate/climdex.pcic.ncdf/blob/master/CONTRIBUTING.rst#bug-reports +.. _package issue tracker: https://github.com/pacificclimate/climdex.pcic.ncdf/issues +.. _contributor's guide: https://github.com/pacificclimate/climdex.pcic.ncdf/blob/master/CONTRIBUTING.rst + +Still need help? +---------------- + +* Contact climate@uvic.ca and let us know what we can do. + +.. [1] Please know that the pool of people who can provide support for the package is extremely small and time is limited. We don't necessarily have the capacity for long, open-ended user support. If you keep your questions short, specific and direct, there's a greater probability that someone will take on the ticket. diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/bootstrap.R b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/bootstrap.R new file mode 100644 index 0000000000..78cbca6644 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/bootstrap.R @@ -0,0 +1,14 @@ +#nolint start +load("exemplar_data.rda") + +if(require("RUnit", quietly=TRUE)) { + ## Run all the tests + library(climdex.pcic.ncdf) + library(ncdf4) + wd <- getwd() + testsuite <- defineTestSuite("climdex.pcic.ncdf", dirs=wd, testFileRegexp = "^test_.+.R$", testFuncRegexp = "^test.+") + climdex.pcic.ncdf.test.result <- runTestSuite(testsuite) + printTextProtocol(climdex.pcic.ncdf.test.result) + stopifnot(climdex.pcic.ncdf.test.result$climdex.pcic.ncdf$nFail == 0 && climdex.pcic.ncdf.test.result$climdex.pcic.ncdf$nErr == 0) +} +#nolint end diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/exemplar_data.rda b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/exemplar_data.rda new file mode 100644 index 0000000000..bafe972068 Binary files /dev/null and b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/exemplar_data.rda differ diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/test_basic_file_funcs.R b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/test_basic_file_funcs.R new file mode 100644 index 0000000000..1e403d1176 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/test_basic_file_funcs.R @@ -0,0 +1,225 @@ +#nolint start +author.data <- list(institution="Pacific Climate Impacts Consortium", + institution_id="PCIC", + indices_archive="Please check http://www.cccma.ec.gc.ca/data/climdex/climdex.shtml for errata or updates.", + contact="bronaugh@uvic.ca", + references="http://www.cccma.ec.gc.ca/data/climdex/" + ) +x.subset <- 33:34 +y.subset <- 17:18 +correct.data.dir <- "correct_output/" +correct.thresh.file.6190 <- paste(correct.data.dir, "thresholds_monClim_CanESM2_historical_r1i1p1_1961-1990.nc", sep="") +thresh.omit.list <- c("tx10p", "tn10p", "tx10p", "tx90p", "wsdi", "csdi") + +test.get.thresholds.chunk <- function() { + ## Define mappings and filenames. + thresholds.name.map <- c(tx10thresh="tx10thresh", tn10thresh="tn10thresh", tx90thresh="tx90thresh", + tn90thresh="tn90thresh", r95thresh="r95thresh", r99thresh="r99thresh") + thresh.files <- "correct_output/thresholds_monClim_CanESM2_historical_r1i1p1_1961-1990.nc" + + if(all(file.exists(thresh.files))) { + ## Open files, etc. + cdx.funcs <- get.climdex.functions(get.climdex.variable.list("tmax")) + thresholds.netcdf <- lapply(thresh.files, nc_open) + t.f.idx <- get.var.file.idx(thresholds.name.map, lapply(thresholds.netcdf, + ncdf4.helpers::nc.get.variable.list, min.dims=2)) + + ## Get thresholds chunk. + dat <- get.thresholds.chunk(list(), cdx.funcs, thresholds.netcdf, t.f.idx, thresholds.name.map) + checkEquals(thresholds.chunk.tmax.only, dat) + + lapply(thresholds.netcdf, nc_close) + } +} + +test.compute.indices.for.stripe <- function() { + ## Define mappings and filenames. + author.data <- list(institution="Looney Bin", institution_id="LBC") + input.files <- list.files("test1/", full.names=TRUE) + variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") + thresh.files <- "correct_output/thresholds_monClim_CanESM2_historical_r1i1p1_1961-1990.nc" + + if(length(input.files) > 0 && all(file.exists(input.files)) && all(file.exists(thresh.files))) { + ## Open files, etc. + cdx.funcs <- get.climdex.functions(get.climdex.variable.list("prec")) + f <- lapply(input.files, ncdf4::nc_open) + f.meta <- create.file.metadata(f, variable.name.map) + climdex.var.list <- get.climdex.variable.list(names(f.meta$v.f.idx), "all", NULL) + cdx.meta <- get.climdex.variable.metadata(climdex.var.list, input.files[1]) + + thresholds.name.map <- c(tx10thresh="tx10thresh", tn10thresh="tn10thresh", tx90thresh="tx90thresh", + tn90thresh="tn90thresh", r95thresh="r95thresh", r99thresh="r99thresh") + thresholds.netcdf <- lapply(thresh.files, nc_open) + t.f.idx <- get.var.file.idx(thresholds.name.map, lapply(thresholds.netcdf, + ncdf4.helpers::nc.get.variable.list, min.dims=2)) + thresh.dat <- get.thresholds.chunk(list(), cdx.funcs, thresholds.netcdf, t.f.idx, thresholds.name.map) + + ## Compute indices for stripe + cdx <- compute.indices.for.stripe(list(X=1:2, Y=1:2), cdx.funcs, f.meta$ts, c(1981, 1990), f.meta$dim.axes, + f.meta$v.f.idx, variable.name.map, f.meta$src.units, f.meta$dest.units, + t.f.idx, thresholds.name.map, f=f, thresholds.netcdf=thresholds.netcdf) + + lapply(thresholds.netcdf, nc_close) + + res <- lapply(names(cdx[[1]]), function(x) { + fn <- list.files("correct_output/", pattern=paste("^", x, sep=""), full.names=TRUE) + f.valid <- nc_open(fn, readunlim=FALSE) + d.input <- ncvar_get(f.valid, strsplit(x, "_")[[1]][1]) + nc_close(f.valid) + d.comparison <- t(do.call(cbind, lapply(cdx, function(cr) { cr[[x]] }))) + dim(d.comparison) <- dim(d.input) + + ## Apparently there are differences at the 3e-6 level between calculated and saved data... who knew? + checkEquals(d.input, d.comparison, tolerance=1e-5) + mean(abs(d.input - d.comparison)) + }) + + lapply(f, nc_close) + } + invisible(0) +} + +test.get.quantiles.for.stripe <- function() { + historical.files <- list.files("historical/", full.names=TRUE) + if(length(historical.files) > 0) { + ## FIXME: This is untestable with the current input data. + + ## Establish basic inputs. + author.data <- list(institution="Looney Bin", institution_id="LBC") + input.files <- list.files("test1/", full.names=TRUE) + + ## Prepare derived inputs. + f <- lapply(input.files, ncdf4::nc_open) + variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") + f.meta <- create.file.metadata(f, variable.name.map) + threshold.dat <- get.thresholds.metadata(names(f.meta$v.f.idx)) + + ## Compute threshold quantiles for stripe + q <- get.quantiles.for.stripe(list(Y=1), f.meta$ts, c(1981, 1990), f.meta$dim.axes, + f.meta$v.f.idx, variable.name.map, f.meta$src.units, + f.meta$dest.units, f) + + ## FIXME: Compare to valid data. + + lapply(f, nc_close) + } +} + +test.get.quantiles.object <- function() { + ## Define mappings and filenames. + thresholds.name.map <- c(tx10thresh="tx10thresh", tn10thresh="tn10thresh", tx90thresh="tx90thresh", + tn90thresh="tn90thresh", r95thresh="r95thresh", r99thresh="r99thresh") + thresh.files <- "correct_output/thresholds_monClim_CanESM2_historical_r1i1p1_1961-1990.nc" + + if(all(file.exists(thresh.files))) { + ## Open files, etc. + cdx.funcs <- get.climdex.functions(get.climdex.variable.list("tmax")) + thresholds.netcdf <- lapply(thresh.files, nc_open) + t.f.idx <- get.var.file.idx(thresholds.name.map, lapply(thresholds.netcdf, + ncdf4.helpers::nc.get.variable.list, min.dims=2)) + ## Get thresholds chunk. + dat <- get.thresholds.chunk(list(Y=1), cdx.funcs, thresholds.netcdf, t.f.idx, thresholds.name.map) + + ## Get quantiles object for index 2 + q <- get.quantiles.object(dat, 2) + + ## FIXME: Compare to a correct object. + + lapply(thresholds.netcdf, nc_close) + } +} + +test.get.northern.hemisphere.booleans <- function() { + test.get.nh <- function(test.dir) { + input.files <- list.files(test.dir, full.names=TRUE) + f <- lapply(input.files, ncdf4::nc_open) + f.v <- lapply(f, ncdf4.helpers::nc.get.variable.list, min.dims=2) + bools <- get.northern.hemisphere.booleans(list(X=1:2, Y=1:2), f[[1]], f.v[[1]], NULL) + lapply(f, ncdf4::nc_close) + return(bools) + } + ## FIXME: Need test data. + browser() + if(file.exists("test3/")) + checkEquals(test.get.nh("test3/"), rep(FALSE, 4)) + ## FIXME: Need test data, and a projected dataset. + ##checkEquals(test.get.nh("test7/"), correct.data) +} + +## FIXME: Needs proper test data. This is just a framework... +test.get.data <- function() { + test.dir <- "test3/" + variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") + if(file.exists(test.dir)) { + input.files <- list.files(test.dir, full.names=TRUE) + f <- lapply(input.files, ncdf4::nc_open) + f.meta <- create.file.metadata(f, variable.name.map) + d <- get.data(f[[f.meta$v.f.idx['prec']]], "pr", list(Y=2), "kg m-2 s-1", "kg m-2 s-1", c(lon="X",lat="Y",time="T")) + lapply(f, ncdf4::nc_close) + } +} + +## FIXME: Needs proper test data. This is just a framework... +test.file.funcs <- function() { + test.dir <- "test3/" + if(file.exists(test.dir)) { + input.files <- list.files(test.dir, full.names=TRUE) + variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") + f <- lapply(input.files, ncdf4::nc_open) + f.meta <- create.file.metadata(f, variable.name.map) + lapply(f, ncdf4::nc_close) + } +} + +test.thresholds.create.and.indices <- function() { + test.set <- paste("test", 1:6, "/", sep="") + lapply(test.set[file.exists(test.set)], function(test) { + input.file.list <- list.files(test, full.names=TRUE) + print(file.exists(input.file.list)) + print(input.file.list) + thresh.file <- tempfile() + indices.dir.thresh <- tempdir() + indices.dir.nothresh <- tempdir() + create.thresholds.from.file(input.file.list, thresh.file, author.data, parallel=FALSE, base.range=c(2010, 2019)) + create.indices.from.files(input.file.list, indices.dir.thresh, input.file.list[1], author.data, parallel=FALSE, thresholds.files=correct.thresh.file.6190) + + ## Compare to base data. + test.file.list <- list.files(indices.dir.thresh, pattern="ETCCDI") + lapply(test.file.list, function(fn) { + print(fn) + f.test <- nc_open(paste(indices.dir.thresh, fn, sep="/")) + f.correct <- nc_open(paste(correct.data.dir, fn, sep="/")) + + d.test <- ncvar_get(f.test, ncdf4.helpers::nc.get.variable.list(f.test)[1]) + d.correct <- ncvar_get(f.correct, ncdf4.helpers::nc.get.variable.list(f.correct)[1]) + + checkEquals(d.test, d.correct) + + nc_close(f.test) + nc_close(f.correct) + }) + + create.indices.from.files(input.file.list, indices.dir.thresh, input.file.list[1], author.data, parallel=FALSE, thresholds.files=thresh.file) + create.indices.from.files(input.file.list, indices.dir.nothresh, input.file.list[1], author.data, parallel=FALSE, base.range=c(2010, 2019)) + + unlink(paste(indices.dir.nothresh, "*", sep="/")) + unlink(paste(indices.dir.thresh, "*", sep="/")) + gc() + }) +} + +parallel.thresholds.create.and.indices <- function() { + test.set <- paste("test", 1:6, "/", sep="") + lapply(test.set[file.exists(test.set)], function(test) { + input.file.list <- list.files(test, full.names=TRUE) + print(file.exists(input.file.list)) + print(input.file.list) + thresh.file <- tempfile() + indices.dir.thresh <- tempdir() + indices.dir.nothresh <- tempdir() + create.thresholds.from.file(input.file.list, thresh.file, author.data, parallel=4, base.range=c(2010, 2029)) + create.indices.from.files(input.file.list, indices.dir.thresh, input.file.list[1], author.data, parallel=4, thresholds.files=thresh.file) + create.indices.from.files(input.file.list, indices.dir.nothresh, input.file.list[1], author.data, parallel=4, base.range=c(2010, 2029)) + }) +} +#nolint end diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/test_var_meta.R b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/test_var_meta.R new file mode 100644 index 0000000000..e682fa3037 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/test_var_meta.R @@ -0,0 +1,51 @@ +#nolint start +test.get.climdex.variable.list <- function() { + checkEquals(climdex.var.list$tavg.all, get.climdex.variable.list(c("tavg"))) + checkEquals(climdex.var.list$tmax.all, get.climdex.variable.list(c("tmax"))) + checkEquals(climdex.var.list$tmax.mon, get.climdex.variable.list(c("tmax"), time.resolution="monthly")) + checkEquals(climdex.var.list$tmax.yr, get.climdex.variable.list(c("tmax"), time.resolution="annual")) + checkEquals(climdex.var.list$tmax.tmin.all, get.climdex.variable.list(c("tmax", "tmin"))) + checkEquals(climdex.var.list$tmax.prec.yr, get.climdex.variable.list(c("tmax", "prec"), time.resolution="annual")) + checkEquals(climdex.var.list$prec.mon, get.climdex.variable.list(c("prec"), time.resolution="monthly")) + checkEquals(climdex.var.list$prec.yr, get.climdex.variable.list(c("prec"), time.resolution="annual")) + checkEquals(climdex.var.list$tmax.tmin.prec.all, get.climdex.variable.list(c("tmax", "tmin", "prec"))) + checkEquals(climdex.var.list$tmax.tmin.prec.sub, get.climdex.variable.list(c("tmax", "tmin", "prec"), climdex.vars.subset=c("su", "tr", "cdd", "gsl"))) + checkEquals(climdex.var.list$prec.sub, get.climdex.variable.list(c("prec"), climdex.vars.subset=c("su", "tr", "cdd", "gsl"))) + checkEquals(NULL, get.climdex.variable.list(c())) +} + +test.get.climdex.variable.metadata <- function() { + fn1 <- "tasmax_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc" + fn2 <- "pr_day_CanESM2_rcp85_r2i1p1_20060101-21001231.nc" + checkEquals(climdex.var.meta$tavg.all.1, get.climdex.variable.metadata(climdex.var.list$tavg.all, fn1)) + checkEquals(climdex.var.meta$prec.yr.2, get.climdex.variable.metadata(climdex.var.list$prec.yr, fn2)) +} + +test.get.climdex.functions <- function() { + checkEquals(climdex.functions$tmax.yr, get.climdex.functions(climdex.var.list$tmax.yr)) + checkEquals(climdex.functions$tmax.tmin.prec.all.fclimdex, get.climdex.functions(climdex.var.list$tmax.tmin.prec.all)) + checkEquals(climdex.functions$tmax.tmin.prec.all.notfclimdex, get.climdex.functions(climdex.var.list$tmax.tmin.prec.all, FALSE)) +} + +test.create.climdex.cmip5.filenames <- function() { + fn.split <- c(model="CanESM2", emissions="rcp45", run="r1i1p1", tstart="20100101", tend="20991231") + + valid.tmax.mon.fn <- c("txxETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc", "txnETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc", + "tx10pETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc", "tx90pETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc") + valid.tmax.all.fn <- c("suETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc", "idETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc", + "txxETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc", "txxETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc", + "txnETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc", "txnETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc", + "tx10pETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc", "tx10pETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc", + "tx90pETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc", "tx90pETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc", + "wsdiETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc", "altwsdiETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc") + + checkEquals(valid.tmax.mon.fn, create.climdex.cmip5.filenames(fn.split, climdex.var.list$tmax.mon)) + checkEquals(valid.tmax.all.fn, create.climdex.cmip5.filenames(fn.split, climdex.var.list$tmax.all)) +} + +test.flatten.dims <- function() { + dat <- structure(1:8, .Dim=c(2, 2, 2)) + valid.flat <- structure(1:8, .Dim = c(2L, 4L)) + checkEquals(flatten.dims(dat, 2:3), valid.flat) +} +#nolint end diff --git a/esmvaltool/diag_scripts/extreme_events/common_climdex_preprocessing_for_plots.R b/esmvaltool/diag_scripts/extreme_events/common_climdex_preprocessing_for_plots.R new file mode 100755 index 0000000000..91916405ef --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/common_climdex_preprocessing_for_plots.R @@ -0,0 +1,275 @@ +# ############################################################################# +# common_climdex_preprocessing.R +# +# Author: Marit Sandstad (CICERO, Norway) +# : Christian Wilhelm Mohr (CICERO, Norway) +# +# +# ############################################################################# +# Description +# Common codes to preprocsess climdex files from multiple +# sources for plotting. This includes creating a common grid +# Cropping files to the same time span. Regridding, landseamasking +# and producing timemeans. +# +# Modification history +# 20190506-vonhardenberg_jost: conversion to ESMValTool2 +# 20180725-mohr_christianwilhelm: modification of setTimeForFilesEqual() +# function +# 2017 0920-sandstad_marit: creation +# +# ############################################################################# + +## +## +## Method to create an ascii grid file for +## to use to regrid on +## @param idx_dir path of directory containing +## files from which to create the grid +## +create_grid <- function(path = idx_dir, loc = "./gridDef") { + ## Picking the grid found in the first file to regrid over + first_file <- list.files(path, + pattern = paste0(".*", regrid_dataset, ".*\\.nc"), + full.names = TRUE + )[1] + cdo( + "griddes -delvar,time_bnds", + input = first_file, + stdout = loc, + options = "-s -O" + ) +} + +# +# Method to create a landSeaMask on a suitable grid +# @param regrid name w/path of gridfile to use +# to put the landdseamask on +# +create_land_sea_mask <- function(regrid = "./gridDef", + loc = "./", + landmask = "./landSeaMask.nc") { + # Test if gridfile exists + # otherwise call function to generate one + if (!file.exists(regrid)) { + create_grid(path = loc, loc = regrid) + } + + ## Making topographic map + topof <- cdo("topo", options = "-O -f nc") + + ## Regridding the topographic map to chosen grid + rtopof <- + cdo("remapcon", + args = regrid, + input = topof, + options = "-O" + ) + + # Set above sea-level gridpoints to missing + rtopomissf <- cdo("setrtomiss", + args = "0,9000", + input = rtopof, + options = "-O" + ) + + # Set above sea-level gridpoints to 1 + rtopo1posf <- cdo("setmisstoc", + args = "1", + input = rtopomissf, + options = "-O" + ) + + # Set below sea-level gridpoints to missing + cdo( + "setrtomiss", + args = "-9000,0", + input = rtopo1posf, + output = landmask, + options = "-O" + ) + + unlink(c(topof, rtopof, rtopomissf, rtopo1posf)) +} + +## +## Method crop all index files for a single index +## to the same time period. +## The smallest common time period is chosen +## @param path gives path to location of index files +## @param idx lists the index under consideration +## @param model_list provides the list of selected models for time cropping +## @param time_cropped is the directory to put the time cropped files +## @param max_start is an optional crop start +## @param min_end is an optional crop end +## +set_time_for_files_equal <- function(path, # nolint + idx, + model_list, + time_cropped = "./timeCropped", + max_start = 0, + min_end = 2500) { + ## Getting a list of all the files for the index + models_avail <- basename(Sys.glob(file.path( + path, + paste(idx, "*.nc", sep = "") + ))) + + ## Selecting only the files from the model list + models <- vector(mode = "character", length = length(model_list)) + for (i in seq_along(model_list)) { + models[i] <- models_avail[grep( + pattern = model_list[i], + x = models_avail + )] + } + + print(models) + + ## Checking if the folder exists and making it if not + print(time_cropped) + if (!file.exists(time_cropped)) { + dir.create(time_cropped) + } + + ## Arrays to record orginal start and end years + start <- integer(length(models)) + end <- integer(length(models)) + + i <- 1 + # For-loop to find the minimum time interval + # so we can crop all files to this time interval + m <- models[1] + for (m in models) { + start[i] <- strtoi(substr(m, nchar(m) - 11, nchar(m) - 8)) + end[i] <- strtoi(substr(m, nchar(m) - 6, nchar(m) - 3)) + + if (start[i] > max_start) { + max_start <- start[i] + } + + if (end[i] < min_end) { + min_end <- end[i] + } + i <- i + 1 + } + if (max_start >= min_end) { + print("No time overlap for files") + print(c(max_start, min_end)) + for (m in models) { + file.copy(paste0(path, "/", m), paste0(time_cropped, "/", m)) + } + return(c(max_start, min_end)) + } + + i <- 1 + # For-loop to crop the files + for (m in models) { + ## If file is already of appropriate length + ## Then just copy it over + if (start[i] == max_start && end[i] == min_end) { + file.copy(paste0(path, "/", m), paste0(time_cropped, "/", m)) + ## Otherwise do the time cropping + } else { + beg <- max_start - start[i] + sto <- min_end - max_start + beg + newname <- paste(substr(m, 1, nchar(m) - 12), + max_start, + "-", + min_end, + ".nc", + sep = "" + ) + nco( + "ncks", + paste0( + "-d time,", + beg, + ",", + sto, + " ", + path, + "/", + m, + " ", + time_cropped, + "/", + newname + ) + ) + } + i <- i + 1 + } + return(c(max_start, min_end)) +} + +## +## +## Method that regrids and landseamasks a file +## Timemeaned versions are also produced +## @param idx_raw gives the full path name of the file +## @param regrid gives the file of the grid to regrid on +## @param landmask gives the file that defines the landseamask to be used +## +## +regrid_and_land_sea_mask <- function(idx_raw, + regrid = "./gridDef", + landmask = "./landSeaMask.nc", + regridded = "./Regridded", + land = "./Land", + loc = "./") { + ## Getting just the raw name of the file + idx_name <- basename(idx_raw) + + ## If the landmask does not exist, we create one. + if (!file.exists(landmask)) { + create_land_sea_mask( + regrid = regrid, + loc = loc, + landmask = landmask + ) + } + + ## Checking if directories are present and creating them if not: + if (!dir.exists(regridded)) { + dir.create(regridded) + } + if (!dir.exists(land)) { + dir.create(land) + } + + ## Regridding file: + varname <- strsplit(idx_name, "_")[[1]][1] + tmpsel <- + cdo("selvar", + args = varname, + input = idx_raw, + options = "-O" + ) + cdo( + "remapcon", + args = regrid, + input = tmpsel, + output = paste0(regridded, "/", idx_name), + options = "-O" + ) + unlink(tmpsel) + + ## Applying landseamask: + cdo( + "div", + input = c(paste0(regridded, "/", idx_name), landmask), + output = paste0(land, "/", idx_name), + options = "-O" + ) + + ## Also produce timemean: + ## !! check is this should be subject to some reference period or + ## time change + cdo( + "timmean", + input = paste0(land, "/", idx_name), + output = paste0(land, "/tm_", idx_name), + options = "-O" # nolint + ) +} diff --git a/esmvaltool/diag_scripts/extreme_events/extreme_events.R b/esmvaltool/diag_scripts/extreme_events/extreme_events.R new file mode 100644 index 0000000000..6412886d72 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/extreme_events.R @@ -0,0 +1,498 @@ +# ############################################################################# +# extreme_events.R +# +# Authors: Björn Brötz (DLR, Germany) +# Marit Sandstad (CICERO, Norway) +# Christian W. Mohr (CICERO, Norway) +# ############################################################################# +# Description +# Calculate extreme events with plotting functionality +# +# Modification history +# 20190506-vonhardenberg_jost: conversion to ESMValTool2 +# 20181006-mohr_christianwilhelm: observation read and sorting fixes +# 20181003-mohr_christianwilhelm: correcting r.interface output for +# observation data. +# 20180725-mohr_christianwilhelm: modification of timeseries_main() and +# climdex selection +# 20180615-mohr_christianwilhelm: more clean up of code +# 20180131-lauer_axel: clean-up of code, adaptation to ESMValTool standards +# added tagging support +# 20170920-sandstad_marit: modification to include plotting +# 2016 0414-A_broetz_bjoern: written +# ############################################################################# + +library(tools) +library(yaml) +library(ncdf4) +library(ncdf4.helpers) +library(scales) +library(RColorBrewer) # nolint + +# function to flatten nested lists +flatten_lists <- function(x) { + if (!inherits(x, "list")) { + return(x) + } else { + return(unlist(c(lapply(x, flatten_lists)), recursive = FALSE)) + } +} + +provenance_record <- function(infile) { + xprov <- list( + ancestors = flatten_lists(as.list(infile)), + authors = list( + "broetz_bjoern", + "sandstad_marit", + "mohr_christianwilhelm", + "vonhardenberg_jost" + ), + references = list("zhang11wcc"), + projects = list("crescendo", "c3s-magic"), + caption = "Extreme events indices", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("global") + ) + return(xprov) +} + +diag_scripts_dir <- Sys.getenv("diag_scripts") +climdex_src <- + paste0( + diag_scripts_dir, + "/extreme_events/climdex.pcic.ncdf/R/ncdf.R" + ) # nolint +source(paste0( + diag_scripts_dir, + "/extreme_events/climdex.pcic.ncdf/R/ncdf.R" +)) # nolint +source(paste0(diag_scripts_dir, "/shared/external.R")) # nolint +source(paste0(diag_scripts_dir, "/extreme_events/cfg_climdex.R")) # nolint +source(paste0(diag_scripts_dir, "/extreme_events/cfg_extreme.R")) # nolint +source( + paste0( + diag_scripts_dir, + "/extreme_events/common_climdex_preprocessing_for_plots.R" + ) +) # nolint +source(paste0( + diag_scripts_dir, + "/extreme_events/make_timeseries_plot.R" +)) # nolint +source(paste0( + diag_scripts_dir, + "/extreme_events/make_glecker_plot.R" +)) # nolint + +# read settings and metadata files +args <- commandArgs(trailingOnly = TRUE) +settings <- yaml::read_yaml(args[1]) +for (myname in names(settings)) { + temp <- get(myname, settings) + assign(myname, temp) +} + +list0 <- yaml::read_yaml(settings$input_files[1]) +# extract metadata +models_name <- unname(sapply(list0, "[[", "dataset")) +models_ensemble <- unname(sapply(list0, "[[", "ensemble")) +models_start_year <- unname(sapply(list0, "[[", "start_year")) +models_end_year <- unname(sapply(list0, "[[", "end_year")) +models_experiment <- unname(sapply(list0, "[[", "exp")) +models_project <- unname(sapply(list0, "[[", "project")) +diag_base <- unname(sapply(list0, "[[", "diagnostic"))[1] +#### Correction r.interface output correction #### +models_experiment[models_experiment == "No_value"] <- "No-values" + +variables <- c() +climofiles <- c() +models <- c() +metadata <- c() + +# loop over variables +for (i in seq_along(settings$input_files)) { + metadata <- yaml::read_yaml(settings$input_files[i]) + models_name <- unname(sapply(metadata, "[[", "dataset")) + short_name <- unname(sapply(metadata, "[[", "short_name")) + variables <- c(variables, short_name) + models <- c(models, models_name) + climofiles <- c(climofiles, names(metadata)) +} + +# associated to first climofile +print(paste(diag_base, ": starting routine")) + +# create working dirs if they do not exist +work_dir <- settings$work_dir +regridding_dir <- settings$run_dir +plot_dir <- settings$plot_dir +dir.create(work_dir, recursive = T, showWarnings = F) +dir.create(regridding_dir, + recursive = T, + showWarnings = F +) +dir.create(plot_dir, recursive = T, showWarnings = F) + +# setup provenance file and list +provenance_file <- + paste0(regridding_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +if (anyNA(base_range)) { + stop("Please choose a base_range!") +} +model_range <- c( + max(strtoi(models_start_year)), + min(strtoi(models_end_year)) +) +if ((base_range[1] < max(strtoi(models_start_year))) | + (base_range[2] > min(strtoi(models_end_year)))) { + stop( + paste( + "Base range", + base_range[1], + "-", + base_range[2], + "outside available model data period", + model_range[1], + "-", + model_range[2] + ) + ) +} +print(paste("Base range:", base_range[1], "-", base_range[2])) + +if (anyNA(regrid_dataset)) { + regrid_dataset <- reference_datasets[1] + print(paste( + "Regrid dataset not set, choosing first reference dataset:", + regrid_dataset + )) +} + +## Find earlier climdex indices in work folder +climdex_files <- list.files(path = work_dir, pattern = "ETCCDI") + +# Fix input files removing bounds +print("Removing bounds from preprocessed files") +for (i in seq_along(climofiles)) { + tmp <- tempfile() + nco( + "ncks", + paste( + "-C -O -x -v lat_bnds,lon_bnds,time_bnds", + climofiles[i], tmp + ) + ) + nco("ncatted", paste("-O -a bounds,time,d,,", tmp)) + nco("ncatted", paste("-O -a bounds,lat,d,,", tmp)) + nco("ncatted", paste("-O -a bounds,lon,d,,", tmp)) + nco( + "ncatted", + paste0("-O -a coordinates,", variables[i], ",d,, ", tmp) + ) + file.copy(tmp, climofiles[i], overwrite = TRUE) + unlink(tmp) +} + +## +## At this stage climdex indices are calculated. This process is extremely +## tedious and check points are in place to check whether the indicese are +## already produced. If the climdex files are there, then this process is +## skipped. Delete the climdex files from the work folder if you wish to have +## the climdex indices recalculated. +## +for (model_idx in c(1:length(models_name))) { # nolint + author.data <- list(institution = "None", institution_id = "None") + template <- paste( + "var_timeres_", + models_name[model_idx], + "_", + models_experiment[model_idx], + "_", + models_ensemble[model_idx], + "_", + models_start_year[model_idx], + "01-", + models_end_year[model_idx], + "12.nc", + sep = "", + collapse = "" + ) + print("") + print(paste0(">>>>>>>> Template name: ", template)) + print("") + + idx_select <- unique(c(timeseries_idx, gleckler_idx)) + + ## Check point for existing files + climdex_file_check <- c() + for (idx in idx_select) { + if (grepl("mon", idx)) { + climdex_file_check <- c( + climdex_file_check, + paste0( + idx, + "_", + models_name[model_idx], + "_", + models_experiment[model_idx], + "_", + models_ensemble[model_idx], + "_", + models_start_year[model_idx], + "01-", + models_end_year[model_idx], + "12.nc" + ) + ) + } else { + climdex_file_check <- c( + climdex_file_check, + paste0( + idx, + "_", + models_name[model_idx], + "_", + models_experiment[model_idx], + "_", + models_ensemble[model_idx], + "_", + models_start_year[model_idx], + "-", + models_end_year[model_idx], + ".nc" + ) + ) + } + } + check_control <- vector("logical", length(climdex_file_check)) + n <- 0 + for (chck in climdex_file_check) { + n <- n + 1 + tmp <- length(grep(chck, climdex_files)) + check_control[n] <- (tmp > 0) + } + + if (!any(grepl("yr", idx_select))) { + timeres <- "mon" + write_plots <- FALSE + } else if (!any(grepl("mon", idx_select))) { + timeres <- "annual" + write_plots <- TRUE + } else { + timeres <- "all" + write_plots <- FALSE + } + + if (!all(check_control)) { + print("") + print(paste0(">>>>>>> Producing Indices for ", models_name[model_idx])) + print(climofiles[models == models_name[model_idx]]) + print("") + infiles <- climofiles[models == models_name[model_idx]] + indices <- sub("ETCCDI.*", "", idx_select) + # Find best chunk size + chunk <- 10 + if (!(is.logical(climdex_parallel))) { + nc <- nc_open(infiles[1]) + chunk <- + floor( + (nc$dim$time$len * nc$dim$lon$len * nc$dim$lat$len + + 1000.0) / (climdex_parallel * 1000000) + ) + chunk <- max(min(100, chunk), 1) + nc_close(nc) + print(paste("Chunk size:", chunk)) + } + create.indices.from.files( + infiles, + # nolint + work_dir, + template, + author.data, + base.range = base_range, + parallel = climdex_parallel, + verbose = TRUE, + climdex.vars.subset = indices, + climdex.time.resolution = timeres, + max.vals.millions = chunk, + src = climdex_src + ) + + # Set provenance for output files + # Get new list of files after computation + infiles <- climofiles[models == models_name[model_idx]] + print("Computing xprov") + xprov <- provenance_record(infiles) + climdex_files <- list.files( + path = work_dir, + pattern = paste0("ETCCDI.*", models_name[model_idx], ".*\\.nc"), + full.names = TRUE + ) + for (fname in climdex_files) { + print(paste("Provenance for ", fname)) + provenance[[fname]] <- xprov + } + } +} + +if (write_plots) { # nolint + ############################# + # A climdex processing section is needed here for observation data. + # CMORized observation data found in the obs directory, + # has it's climdex indices calculated, + # which are then placed in the work/extreme_events directory + ############################# + + ## Splitting models from observations + + ################################### + #### Produce time series plots #### + ################################### + + if (anyNA(analysis_range)) { + analysis_range[1] <- max(strtoi(models_start_year)) + analysis_range[2] <- min(strtoi(models_end_year)) + print( + paste( + "Analysis range not defined, assigning model range:", + analysis_range[1], + "-", + analysis_range[2] + ) + ) + } + if ((analysis_range[1] < max(strtoi(models_start_year))) | + (analysis_range[2] > min(strtoi(models_end_year)))) { + stop( + paste( + "Analysis range", + analysis_range[1], + "-", + analysis_range[2], + "outside available model data period", + model_range[1], + "-", + model_range[2] + ) + ) + } + print(paste("Analysis range:", analysis_range[1], "-", analysis_range[2])) + + # These are forced here for testing + + print("------ Model datasets ------") + print(setdiff(models_name, reference_datasets)) + print("---- Reference datasets ----") + print(reference_datasets) + print("----------------------------") + if (ts_plt) { + print("") + print(paste0(">>>>>>>> TIME SERIES PROCESSING INITIATION")) + plotfiles <- timeseries_main( + path = work_dir, + idx_list = timeseries_idx, + model_list = setdiff(models_name, reference_datasets), + obs_list = reference_datasets, + plot_dir = plot_dir, + normalize = normalize, + start_yr = analysis_range[1], + end_yr = analysis_range[2] + ) + xprov <- provenance_record(list(climofiles)) + for (fname in plotfiles) { + provenance[[fname]] <- xprov + } + # Each timeseries file gets provenance from its reference dataset + for (model in reference_datasets) { + ncfiles <- list.files( + file.path(work_dir, "timeseries"), + pattern = model, + full.names = TRUE + ) + xprov <- provenance_record(list(climofiles[models == model])) + for (fname in ncfiles) { + provenance[[fname]] <- xprov + } + } + # The ensemble timeseries get provenance from all model datasets + ncfiles <- list.files(file.path(work_dir, "timeseries"), + pattern = "ETCCDI.*ens", + full.names = TRUE + ) + + ancestors <- sapply(setdiff(models_name, reference_datasets), + grep, + climofiles, + value = TRUE + ) + xprov <- provenance_record(ancestors) + for (fname in ncfiles) { + provenance[[fname]] <- xprov + } + } + + ############################### + #### Produce Gleckler plot #### + ############################### + if (glc_plt) { + print("") + print(paste0(">>>>>>>> GLECKLER PROCESSING INITIATION")) + + ## Check if Gleckler Array already exists + nidx <- length(gleckler_idx) # number of indices + nmodel <- length(models_name) # number of models + nobs <- length(reference_datasets) # number of observations + arrayname <- paste0( + "Gleckler-Array_", + nidx, + "-idx_", + nmodel, + "-models_", + nobs, + "-obs", + ".RDS" + ) + arraydirname <- paste0(plot_dir, "/", diag_base, "/", arrayname) + if (glc_arr) { + if (file.exists(arraydirname)) { + file.remove(arraydirname) + } + promptinput <- "y" + } + + if (file.exists(arraydirname)) { + promptinput <- "n" + } else { + promptinput <- "y" + } + + #### Running gleckler_main #### + plotfiles <- gleckler_main( + path = work_dir, + idx_list = gleckler_idx, + model_list = setdiff(models_name, reference_datasets), + obs_list = reference_datasets, + plot_dir = plot_dir, + promptinput = promptinput, + start_yr = analysis_range[1], + end_yr = analysis_range[2] + ) + + xprov <- provenance_record(list(climofiles)) + for (fname in plotfiles) { + provenance[[fname]] <- xprov + } + ncfiles <- list.files(file.path(work_dir, "gleckler/Gleck*")) + xprov <- provenance_record(list(climofiles)) + for (fname in ncfiles) { + provenance[[fname]] <- xprov + } + } +} + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/extreme_events/make_glecker_plot.R b/esmvaltool/diag_scripts/extreme_events/make_glecker_plot.R new file mode 100755 index 0000000000..3ca6e4a409 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/make_glecker_plot.R @@ -0,0 +1,852 @@ +# ############################################################################ +# make_glecker_plot.R +# +# Author: Christian W. Mohr (CICERO, Norway) +# Marit Sandstad (CICERO, Norway) +# +# +# ############################################################################ +# Description: +# Code to plot Glecker polygon diagram to compare climdex index +# performance between models and reanalysis. +# +# Modification history +# +# 20190506-vonhardenberg_jost: conversion to ESMValTool2 +# 20180601-mohr_christianwilhelm: re-creation (complete new script +# incorparating segments from +# "make_timeseries_plot.r" and +# "make_Glecker_plot.r") +# +# ############################################################################ + +gleckler_main <- + function(path = "./", + idx_list, + model_list, + obs_list, + plot_dir = "../plot/extreme_events/", + promptinput = promptinput, + start_yr = 2000, + end_yr = 2009) { + #### CLIMDEX PREPROCESSING #### + + ## For file structure and files + tsgrid <- paste(path, "/tsGridDef", sep = "") # nolint + time_cropped <- paste(path, "/timeCropped", sep = "") # nolint + landmask <- paste(path, "/landSeaMask.nc", sep = "") # nolint + regridded <- paste(path, "/regridded", sep = "") # nolint + land <- paste(path, "/Land", sep = "") # nolint + + nmodel <- length(model_list) # number of models + nidx <- length(idx_list) # number of indices + nobs <- length(obs_list) # number of observations + + if (file.exists( + paste0( + path, + "/gleckler/Gleckler-Array_", + nidx, + "-idx_", + nmodel, + "-models_", + nobs, + "-obs", + ".RDS" + ) + )) { + promptinput <- "n" + } + + if (promptinput == "y") { + # Initial nc-file time crop, regrid, land and plot purge + unlink(c( + time_cropped, regridded, land, + landmask, tsgrid + ), + recursive = TRUE + ) + + ## Initial grid and landmask creation reset + grid_and_landmask <- TRUE + + ## Combine model and observation list + modelandobs_list <- unique(c(model_list, obs_list)) + + ## Loop over the indices to produce a plot for each index + for (idx in idx_list) { + ## Time crop + returnvalue <- set_time_for_files_equal( + path = path, + idx = idx, + model_list = modelandobs_list, + time_cropped = time_cropped, + max_start = start_yr, + min_end = end_yr + ) + + max_start <- returnvalue[1] + min_end <- returnvalue[2] + + ## If there is no overlap in the files the index + ## should be skipped + if (max_start >= min_end) { + print(paste("No time overlap in files for index", idx)) + break + } + + ## Find the new model and observation names (after time cropping) + modelsandobs <- basename(Sys.glob(file.path( + time_cropped, + paste0(idx, "*.nc") + ))) + split_modelsandobs <- strsplit(modelsandobs, split = "_") + modelsandobs_index <- + unlist(lapply(split_modelsandobs, function(x) { + x[3] + })) + + ## new models + models <- + modelsandobs[which(modelsandobs_index %in% model_list)] + + ## new observations + obs <- modelsandobs[which(modelsandobs_index %in% obs_list)] + + ## Find the start year (to be used in plotting) + # start_yr <- strtoi(substr(models[1], nchar(models[1]) - 11, + # nchar(models[1]) - 8)) + # !New Grid and landseamask for each idx + # !(or just the first idx set) should be + # !produced here + if (grid_and_landmask) { + create_grid(path = path, loc = tsgrid) + create_land_sea_mask( + regrid = tsgrid, + loc = path, + landmask = landmask + ) + grid_and_landmask <- FALSE + } + + ## Loop over each file so it can be regridded + ## and landseaMasked + for (mo in modelsandobs) { + print(paste(time_cropped, "/", mo, sep = "")) + regrid_and_land_sea_mask( + idx_raw = paste(time_cropped, "/", mo, sep = ""), + regrid = tsgrid, + landmask = landmask, + regridded = regridded, + land = land, + loc = path + ) + } + } + + #### Gleckler Array Processing #### + rmserelarr <- gleckler_array( + path = land, + idx_list = idx_list, + model_list = model_list, + obs_list = obs_list + ) + + ## Save Array + glecdir <- paste0(path, "/gleckler") # nolint + if (!file.exists(glecdir)) { + dir.create(glecdir) + } + saveRDS( + object = rmserelarr, + file = paste0( + path, + "/gleckler/Gleckler-Array_", + nidx, + "-idx_", + nmodel, + "-models_", + nobs, + "-obs", + ".RDS" + ) + ) + saveRDS( + object = returnvalue, + file = paste0( + path, + "/gleckler/Gleckler-years.RDS" + ) + ) + + # Final cleanup + unlink(c( + time_cropped, regridded, land, + landmask, tsgrid + ), + recursive = TRUE + ) + } + + #### Gleckler Plotting #### + rmserelarr <- readRDS( + file = paste0( + path, + "/gleckler/Gleckler-Array_", + nidx, + "-idx_", + nmodel, + "-models_", + nobs, + "-obs", + ".RDS" + ) + ) + year_range <- readRDS(file = paste0( + path, + "/gleckler/Gleckler-years.RDS" + )) + + plotfile <- + gleckler_plotting( + arr = rmserelarr, + idx_list = idx_list, + model_list = model_list, + obs_list = obs_list, + plot_dir = plot_dir, + syear = year_range[1], + eyear = year_range[2] + ) + return(plotfile) + } + +#### Computing the RMSEs #### + +gleckler_array <- function(path = land, + idx_list = gleckler_idx, + model_list = model_list, + obs_list = obs_list) { + ## Produce an array to hold all the model and reanalysis means + + ## Input data for testing the plotting routine + nidx <- length(idx_list) # number of indices + nmodel <- length(model_list) # number of models + nobs <- length(obs_list) # number of reanalyses + + ## Check point for reanalysis data + if (nobs == 0) { + print("No reanalysis datasets provided") + break + } + + ## Function to calculate area mean + area.mean <- function(x, lat) { + nlon <- dim(x)[1] + nlat <- dim(x)[2] + + meanlat <- apply(x, 2, function(x) { + mean(x, na.rm = TRUE) + }) + + fi <- lat * 3.14159 / 180 + + wgt.prod <- meanlat * cos(fi) + + # At some latitudes there is no land and therfore no data. + nan.check <- is.nan(wgt.prod) + # The mean of missing data is not a number, and hench results in NaNs. + # These NaN must be removed in order to calculate the correct area mean. + + gl <- sum(wgt.prod[!nan.check]) + sumcos <- sum(cos(fi)[!nan.check]) + ar.m <- gl / sumcos + return(ar.m) + } + + ## Function to calculate the RMSE between the model and + ## observed climatology (RMSExy) + ## Equation 1, from Sillmann et. al 2013 + RMSE <- + function(model = tm_model_idx, + obs = tm_obs_idx, + lat = model_lat) { + RMSE <- sqrt(area.mean((model - obs)^2, lat)) + return(RMSE) + } + + # Array for the RMSE spaces in the array are created so that the + # RSMEall, ENSmean, ENSmedian and CMIP RMSE can be created + rmsearr <- array(NA, dim = c(nidx + 1, nmodel + 3, nobs)) + rmserelarr <- rmsearr + ensmodel_list <- list() + + i <- 2 + m <- 1 + o <- 1 + lat_collect <- TRUE + + for (i in seq_along(idx_list)) { + for (m in seq_along(model_list)) { + ## Read in model annual climatology + + tm_model <- nc_open(Sys.glob(file.path( + path, paste0( + "tm_", idx_list[i], + "_", model_list[m], + "*.nc" + ) + ))) + idxs <- unlist(strsplit(idx_list[i], split = "_"))[1] + tm_model_idx <- ncvar_get(tm_model, idxs) + + # extract latitudes for area mean calculations + if (lat_collect) { + model_lat <- ncvar_get(tm_model, "lat") + lat_collect <- FALSE + } + + nc_close(tm_model) + ensmodel_list[[m]] <- tm_model_idx + } + ## Create a new array for adding the time mean model matices + ensarr <- array(NA, dim = c( + nrow(tm_model_idx), + ncol(tm_model_idx), + length(ensmodel_list) + 2 + )) + + # Copy each matrix from the multimodel list to the array "ensarr". + # Notice the "+2" on the 3rd dimension. This is so later the model + # ensemble mean and median matrices can be added to the array. + for (n in seq_along(ensmodel_list)) { + ensarr[, , n + 2] <- ensmodel_list[[n]] + } + + ## Calculate the ensemble mean and median of + ## all the model time mean matrices + ensmean <- apply(ensarr, c(1, 2), function(x) { + mean(na.omit(x)) + }) + ensmedian <- apply(ensarr, c(1, 2), function(x) { + median(na.omit(x)) + }) + + # Place the ensemble model mean and medians into the + # first two matrices (3-dimention) of the array "ensarr" + ensarr[, , 1] <- ensmean + ensarr[, , 2] <- ensmedian + + j <- 1 + ## Calculate the RMSE for all the models and the ensemble mean and median + for (j in 1:dim(ensarr)[3]) { + ## Read in reannalysis annual climatology + for (o in seq_along(obs_list)) { + tm_obs <- nc_open(Sys.glob(file.path( + path, paste0( + "tm_", idx_list[i], + "_", obs_list[o], + "*.nc" + ) + ))) + tm_obs_idx <- ncvar_get(tm_obs, idxs) + nc_close(tm_obs) + rmsearr[i + 1, j, o] <- RMSE( + model = ensarr[, , j], + obs = tm_obs_idx, + lat = model_lat + ) # Calculate each RMSE and place value in RMSE-array + + ## Calculate the model standard deviation. + ## Later used for calculating the rmsemedian,std. + ## Denominator in equation 3, from Sillmann et. al 2013 + rmsearr[i + 1, ncol(rmsearr), o] <- + sqrt(area.mean(( + tm_obs_idx - area.mean(tm_obs_idx, + lat = model_lat + ) + )^2, + lat = model_lat + )) + } + } + } + + ## Calculate the RMSE median for the models + tmprmsearr <- rmsearr[, -c(1, 2, ncol(rmsearr)), ] + if (length(dim(tmprmsearr)) == 3) { + rmsemed <- apply(tmprmsearr, c(1, 3), function(x) { + median(x, na.rm = TRUE) + }) + } else { + rmsemed <- apply(tmprmsearr, 1, function(x) { + median(x, na.rm = TRUE) + }) + } + + ## Function to calculate the relative RMSE (RMSE'xy) + ## between the model and observed climatology + ## Equation 2, from Sillmann et. al 2013 + rmserel <- function(rmse, rmsemed) { + rmserel <- (rmse - rmsemed) / rmsemed + return(rmserel) + } + + ## Calculating the relative RMSE (RMSE'xy) + m <- 1 + for (m in 1:(ncol(rmsearr) - 1)) { + rmserelarr[, m, ] <- + rmserel(rmse = rmsearr[, m, ], rmsemed = rmsemed) + } + + ## Calculating the RMSE median,std. Equation 3, from Sillmann et. al 2013 + rmserelarr[, ncol(rmserelarr), ] <- + rmsemed / rmsearr[, ncol(rmsearr), ] + + ## Calculating the RSME mean + tmprmsearr <- rmserelarr[, -ncol(rmserelarr), ] + if (length(dim(tmprmsearr)) == 3) { + rmserelarr[1, -ncol(rmserelarr), ] <- apply( + tmprmsearr, c(2, 3), + function(x) { + mean(x, na.rm = TRUE) + } + ) + } else { + rmserelarr[1, -ncol(rmserelarr), ] <- apply( + tmprmsearr, c(2), + function(x) { + mean(x, na.rm = TRUE) + } + ) + } + print(rmserelarr) + return(rmserelarr) +} + +#### Plotting Routine #### +gleckler_plotting <- + function(arr = rmserelarr, + idx_list, + model_list, + obs_list, + plot_dir = "../plots/extreme_events/", + syear = max_start, + eyear = min_end) { + nidx <- length(idx_list) # number of indices + nmodel <- length(model_list) # number of models + nobs <- length(obs_list) # number of reanalyses + + ## Numbers for color scale + sclseq <- seq(-0.55, 0.55, 0.1) + + ## Colour scale + glc <- brewer.pal(length(sclseq) - 2, "RdYlBu") # nolint + glc <- c("#662506", glc, "#3f007d") + glc <- rev(glc) + + # Numbers for black & white scale + sclseq_bw <- seq(0.05, 1.15, 0.1) + sclseq_bw + glbw <- gray(seq(0, 1, length.out = length(sclseq_bw))) + glbw <- rev(glbw) + + ## Determining what shapes should be plotted, based on number of + ## observations + if (nobs == 1) { + # One reanalysis references + x1 <- c(0, 1, 1, 0) + y1 <- c(0, 0, 1, 1) + xs <- list(x1) + ys <- list(y1) + + # text coordinates + xtx <- 0.50 + ytx <- -0.25 + rotx <- 0 # text rotation in degrees + } + + if (nobs == 2) { + # Two reanalysis references + x1 <- c(0, 1, 1) # lower triangle + y1 <- c(0, 0, 1) # lower triangle + x2 <- c(0, 1, 0) # upper triangle + y2 <- c(0, 1, 1) # upper triangle + + xs <- list(x1, x2) + ys <- list(y1, y2) + + # text coordinates + xtx <- c(0.75, 0.25) + ytx <- c(-0.25, 1.25) + rotx <- c(0, 0) # text rotation in degrees + } + + if (nobs == 3) { + # Three reanalysis references + x1 <- c(0, 0.5, 0.5, 0) # bottom left + y1 <- c(0, 0, 0.5, 1) # bottom left + x2 <- c(0.5, 1, 1, 0.5) # bottom right + y2 <- c(0, 0, 1, 0.5) # bottom right + x3 <- c(0, 0, 0.5, 1, 1) # top + y3 <- c(1, 0.75, 0.5, 0.75, 1) # top + + xs <- list(x1, x2, x3) + ys <- list(y1, y2, y3) + + # text coordinates + xtx <- c(-0.25, 1.25, 0.5) + ytx <- c(0.25, 0.25, 1.25) + rotx <- c(90, 90, 0) # text rotation in degrees + } + + if (nobs == 4) { + # Four reanalysis references + x1 <- c(0, 0.5, 1) # bottom triangle + y1 <- c(0, 0.5, 0) # bottom triangle + x2 <- c(0, 0.5, 0) # left triangle + y2 <- c(0, 0.5, 1) # left triangle + x3 <- c(0, 0.5, 1) # top triangle + y3 <- c(1, 0.5, 1) # top triangle + x4 <- c(1, 0.5, 1) # right triangle + y4 <- c(1, 0.5, 0) # right triangle + + xs <- list(x1, x2, x3, x4) + ys <- list(y1, y2, y3, y4) + + # text coordinates + xtx <- c(0.5, -0.25, 0.5, 1.25) + ytx <- c(-0.25, 0.5, 1.25, 0.5) + rotx <- c(0, 90, 0, 90) # text rotation in degrees + } + + if (!(nobs %in% c(1, 2, 3, 4))) { + if (nobs == 0) { + print("No reanalysis dataset provided") + break + } else { + print( + paste( + "Too many reanalysis datasets provided.", + "Please choose between 1 and 4 datasets" + ) + ) + break + } + } + + print("--- Creating Gleckler plot ---") + img.adj <- gl_mar_par * 0.05 + width.fct <- ((nmodel + 3) / (nidx + 1)) + sum(img.adj[c(2, 4)]) + height.fct <- 1 + sum(img.adj[c(1, 3)]) + + figure_filename <- + paste( + plot_dir, + "/Gleckler_", + mip_name, + "_", + nmodel, + "-models_", + nidx, + "-idx_", + nobs, + "-obs_", + syear, + "-", + eyear, + ".", + output_file_type, + sep = "" + ) + + ## Chose output format for figure + if (tolower(output_file_type) == "png") { + png( + filename = figure_filename, + width = gl_png_res * (width.fct / height.fct), + height = gl_png_res, + units = gl_png_units, + pointsize = gl_png_pointsize, + bg = gl_png_bg + ) + } else if (tolower(output_file_type) == "pdf") { + pdf(file <- figure_filename) + } else if (tolower(output_file_type) == "eps") { + setEPS() + postscript(figure_filename) + } + + par( + mfrow = c(1, 1), + mar = gl_mar_par, + xpd = FALSE, + oma = rep(0, 4) + ) + plot( + x = c(0, 1 + gl_rmsespacer), + y = c(0, 1), + type = "n", + ann = FALSE, + xaxs = "i", + yaxs = "i", + bty = "n", + xaxt = "n", + yaxt = "n" + ) + + ## Array dimentions + xn <- ncol(arr) + yn <- nrow(arr) + + ## Testing array plotting + xi <- 1 # model + yj <- 2 # index + zk <- 1 # obs + + ## Plotting RMSE of models, ensemble mean and median and RSMEall + for (xi in 1:(xn - 1)) { + for (yj in 1:yn) { + for (zk in 1:nobs) { + polygon( + x = (xs[[zk]] / xn) + ((xi - 1) / xn), + y = (ys[[zk]] / yn) + ((yn - yj) / yn), + col = glc[which.min(abs(sclseq - arr[yj, xi, zk]))] + ) + } + } + } + + ## Plotting RMSE median standard diviation + for (yj in 2:yn) { + for (zk in 1:nobs) { + polygon( + x = (xs[[zk]] / xn) + ((xn - 1) / xn) + gl_rmsespacer, + y = (ys[[zk]] / yn) + ((yn - yj) / yn), + col = glbw[which.min(abs(sclseq_bw - arr[yj, xn, zk]))] + ) + } + } + + ## Produce the borders for the Glecker plot + par(xpd = TRUE) + rect( + xleft = 0, + ybottom = 0, + xright = (1 - 1 / xn), + ytop = (1 - 1 / yn), + density = NULL, + angle = 45, + col = NA, + border = 1, + lty = par("lty"), + lwd = 4 + ) + rect( + xleft = 0, + ybottom = (1 - 1 / yn), + xright = (1 - 1 / xn), + ytop = 1, + density = NULL, + angle = 45, + col = NA, + border = 1, + lty = par("lty"), + lwd = 4 + ) + + ## Scale for Gleckler plot + gleckler_scale <- function(sclseq, + glc, + xn, + scaling_factor, + text.scaling_factor, + xscale_spacer) { + par(xpd = TRUE) + ## Square legend + sqrxs <- c(0, 1, 1, 0) + sqrys <- c(0, 0, 1, 1) + + # up-triangle legend + utrixs <- c(0, 1, 0.5) + utriys <- c(0, 0, 1) + + # down-triangle legend + dtrixs <- c(0.5, 1, 0) + dtriys <- c(0, 1, 1) + + # Legend number shifter + seq_shift <- + mean(diff(sclseq) / 2) # Shifts the legend numbers so that + # they represent the border values + + # y-scale spacer + yscale_spacer <- (1 - scaling_factor) / 2 + + exlen <- length(glc) + for (a in 1:exlen) { + if (a == 1) { + xtmp <- scaling_factor * (dtrixs / xn) + 1 + xscale_spacer / xn + ytmp <- + (scaling_factor * (dtriys / exlen + (a - 1) / exlen) + + yscale_spacer) + polygon( + x = xtmp, + y = ytmp, + col = glc[a] + ) + text( + x = max(xtmp), + y = max(ytmp), + round(sclseq[a] + seq_shift, 1), + cex = text.scaling_factor, + pos = 4 + ) + } else if (a == exlen) { + xtmp <- scaling_factor * (utrixs / xn) + 1 + xscale_spacer / xn + ytmp <- + (scaling_factor * (utriys / exlen + (a - 1) / exlen) + + yscale_spacer) + polygon( + x = xtmp, + y = ytmp, + col = glc[a] + ) + } else { + xtmp <- scaling_factor * (sqrxs / xn) + 1 + xscale_spacer / xn + ytmp <- + (scaling_factor * (sqrys / exlen + (a - 1) / exlen) + + yscale_spacer) + polygon( + x = xtmp, + y = ytmp, + col = glc[a] + ) + text( + x = max(xtmp), + y = max(ytmp), + round(sclseq[a] + seq_shift, 1), + cex = text.scaling_factor, + pos = 4 + ) + } + } + } + + ## Plot scales + gleckler_scale( + sclseq, + glc, + xn, + scaling_factor = gl_scaling_factor, + text.scaling_factor = gl_text_scaling_factor, + xscale_spacer = gl_xscale_spacer_rmse + ) + + gleckler_scale( + sclseq_bw, + glbw, + xn, + scaling_factor = gl_scaling_factor, + text.scaling_factor = gl_text_scaling_factor, + xscale_spacer = gl_xscale_spacer_rmsestd + ) + + ## Plotting symbol legend + exlen <- length(glc) + xsym1 <- + gl_scaling_factor * (0.5 / xn) + 1 + gl_xscale_spacer_rmse / xn + exlen <- length(glbw) + xsym2 <- + gl_scaling_factor * (0.5 / xn) + 1 + gl_xscale_spacer_rmsestd / xn + x.max_adj <- max(gl_symb_scaling_factor * (xs[[zk]] / xn)) + x.min_adj <- min(gl_symb_scaling_factor * (xs[[zk]] / xn)) + xmidadj <- (x.max_adj - x.min_adj) / 2 + + gl_symb_xshift <- (xsym1 + xsym2) / 2 - xmidadj + + for (zk in 1:nobs) { + xsym <- gl_symb_scaling_factor * (xs[[zk]] / xn) + gl_symb_xshift + ysym <- (gl_symb_scaling_factor * (ys[[zk]] / xn) + - gl_symb_yshift / xn) * width.fct / height.fct + print(paste("xs:", xsym)) + print(paste("ys:", ysym)) + polygon( + x = xsym, + y = ysym, + col = "white", + border = 1 + ) + + xtxsym <- + gl_symb_scaling_factor * (xtx[[zk]] / xn) + gl_symb_xshift + ytxsym <- (gl_symb_scaling_factor * (ytx[[zk]] / xn) + - gl_symb_yshift / xn) * width.fct / height.fct + + text( + x = xtxsym, + y = ytxsym, + labels = obs_list[zk], + adj = 0.5, + cex = gl_text_symb_scaling_factor, + srt = rotx[zk] + ) + } + + ## Label adjusting parameters + axlabsize <- 0.8 + lineadj <- -0.5 + + ## Add model labels + col_names <- c("ENSMEAN", "ENSMEDIAN", model_list) + xtcks1 <- seq((0.5 / xn), ((xn - 1) / xn), by = (1 / xn)) + axis( + side = 1, + at = xtcks1, + labels = col_names, + las = 2, + cex.axis = axlabsize, + tick = FALSE, + line = lineadj + ) + + xtcks2 <- ((xn - 1) / xn) + gl_rmsespacer + (0.5 / xn) + axis( + side = 1, + at = xtcks2, + labels = expression("RMSE"["std"]), + las = 2, + cex.axis = axlabsize, + tick = FALSE, + line = lineadj + ) + + ## Add index labels + row_names <- + vector(mode = "character", length = length(idx_list)) + for (i in seq_along(idx_list)) { + row_names[i] <- idx_df$idx_etccdi[which(idx_df$idx_etccdi_time + %in% idx_list[i])] + } + row_names <- rev(c(expression("RSME"["all"]), row_names)) + ytcks1 <- seq((1 / yn) * 0.5, 1, by = (1 / yn)) + axis( + side = 2, + at = ytcks1, + labels = row_names, + las = 2, + cex.axis = axlabsize, + tick = FALSE, + line = lineadj + ) + + mtext( + text = paste(mip_name, " global land ", syear, "-", eyear, sep = ""), + side = 3, + line = 1, + font = 2, + cex = 1.1 + ) + + dev.off() + return(figure_filename) + } diff --git a/esmvaltool/diag_scripts/extreme_events/make_timeseries_plot.R b/esmvaltool/diag_scripts/extreme_events/make_timeseries_plot.R new file mode 100755 index 0000000000..dd5cc89c98 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/make_timeseries_plot.R @@ -0,0 +1,726 @@ +# ############################################################################# +# make_timeseries_plot.R +# +# Author: Marit Sandstad (CICERO, Norway) +# : Christian W. Mohr (CICERO, Norway) +# +# ############################################################################# +# Description +# Code to plot a timeseries plot for a set of climdex indices +# +# Modification history +# 20190506-vonhardenberg_jost: conversion to ESMValTool2 +# 20180816-mohr_christianwilhelm: adding input procedure and plotting for/of +# observation data +# 20180725-mohr_christianwilhelm: modification of time croppin +# 20180618-mohr_christianwilhelm: alpha levels for polygon plotting, +# second y-axis, +# 20180131-lauer_axel: clean-up of code, adaptation to ESMValTool +# standards, added tagging, bugfixes: time +# axis, cdo, filenames +# 2017 0920-sandstad_marit: creation +# +# ############################################################################# + +## +## +## Method to call all preprocessing and loop through +## all models and indices then call plotting script +## to produce time series plots for a list of indices +## @param path is the path to where the original indices +## are stored +## @param idx_list lists the indices to be considered in +## this run. Defaults are the indices from the IPCC +## report. +## + +########################### + + +timeseries_main <- function(path = "../work/extreme_events", + idx_list, + model_list, + obs_list, + plot_dir = "./plot", + normalize = FALSE, + start_yr = 2000, + end_yr = 2006) { + ## For file structure and files + tsgrid <- paste(path, "/tsGridDef", sep = "") # nolint + time_cropped <- paste(path, "/timeCropped", sep = "") # nolint + landmask <- paste(path, "/landSeaMask.nc", sep = "") # nolint + regridded <- paste(path, "/regridded", sep = "") # nolint + land <- paste(path, "/Land", sep = "") # nolint + + # Initial nc-file time crop, regrid, land and plot purge + unlink(c(time_cropped, regridded, land, landmask, tsgrid), + recursive = TRUE + ) + + # Initial grid and landmask creation reset + gridandlandmask <- TRUE + + ## Loop over the indices to produce a plot for each index + plotfiles <- list() + idx <- idx_list[1] + for (idx in idx_list) { + ## Combine the list of models and observations + modelobs_list <- unique(c(model_list, obs_list)) + + ## Find the model files + modelandobs <- basename(Sys.glob(file.path( + path, + paste( + idx, "*.nc", + sep = "" + ) + ))) + + if (ts_data) { + ## Time crop + returnvalue <- set_time_for_files_equal( + path = path, + idx = idx, + model_list = modelobs_list, + time_cropped = time_cropped + ) # This is a temporary solution + + max_start <- returnvalue[1] + min_end <- returnvalue[2] + + ## If there is no overlap in the files the index + ## should be skipped + if (max_start >= min_end) { + print(paste("No time overlap in files for index", idx)) + break + } + + ## Find the new model files after time cropping + modelandobs <- basename(Sys.glob(file.path( + time_cropped, + paste0(idx, "*.nc") + ))) + + # !New Grid and landseamask for each idx + # !(or just the first idx set) should be + # !produced here + if (gridandlandmask) { + create_grid(path = path, loc = tsgrid) + create_land_sea_mask( + regrid = tsgrid, + loc = path, + landmask = landmask + ) + gridandlandmask <- FALSE + } + + ## Loop over each file so it can be regridded + ## and landseaMasked + for (m in modelandobs) { + print(paste(time_cropped, "/", m, sep = "")) + regrid_and_land_sea_mask( + idx_raw = paste0(time_cropped, "/", m), + regrid = tsgrid, + landmask = landmask, + regridded = regridded, + land = land, + loc = path + ) + } + + ## Then do the preprocessing + + time_series_preprocessing( + land = land, + idx = idx, + model_list = model_list, + obs_list = obs_list, + plot_dir = plot_dir, + work_dir = path, + normalize = normalize + ) + } + + ## Produce plot for this index + fname <- timeseries_plot( + plot_dir = plot_dir, + idx = idx, + obs_list = obs_list, + start_yr = start_yr, + end_yr = end_yr, + normalize = normalize + ) + plotfiles <- c(plotfiles, fname) + } + + # Final cleanup + unlink(c(time_cropped, regridded, land, landmask, tsgrid), + recursive = TRUE + ) + + return(plotfiles) +} + +# +# Method that preprocesses idx-files for a single index +# in order to get data to plot time series plot +# for this index +# @param path is the path to index file location +# @param idx is the index to be processed. +# +time_series_preprocessing <- # nolint + function(land = "./Land", + idx = "tnnETCCDI_yr", + model_list = model_list, + obs_list = obs_list, + plot_dir = "./plot", + work_dir = "./work", + normalize = FALSE) { + tseriesdir <- paste0(work_dir, "/timeseries") + if (!file.exists(tseriesdir)) { + dir.create(tseriesdir) + } + + ## List of indices which are never normalized: + pidx <- c( + "tn10pETCCDI_yr", + "tx10pETCCDI_yr", + "tn90pETCCDI_yr", + "tx90pETCCDI_yr", + "csdiETCCDI_yr", + "wsdiETCCDI_yr", + "tn10pETCCDI_mon", + "tx10pETCCDI_mon", + "tn90pETCCDI_mon", + "tx90pETCCDI_mon", + "csdiETCCDI_mon", + "wsdiETCCDI_mon" + ) + + # Getting a list of all the files for the index + modelsandobs <- + basename(Sys.glob(file.path(land, paste0(idx, "*.nc")))) + modelsandobssplitlist <- strsplit(modelsandobs, split = "_") + modelsandobssplit <- + unlist(lapply(modelsandobssplitlist, function(x) { + x[3] + })) + + # Extracting only the model files + models <- modelsandobs[which(modelsandobssplit %in% model_list)] + print("These are the models:") + print(models) + + ## Extracting only the observation files + obs_order <- which(modelsandobssplit %in% obs_list) + obs <- modelsandobs[obs_order] + print("These are the observations:") + print(obs) + + #### NORMALIZE VALUES #### + if (normalize) { + # File string to be filled with file names that can be used + # For the aggregated statistics + # (ensmean, enspctl) + file_string_models <- "" + m <- models[1] + for (m in models) { + print(m) + if (idx %in% pidx) { + # Fieldmean results + cdo( + "fldmean", + input = paste0(land, "/", m), + output = paste0(land, "/", "fldm_", m), + options = "-O" + ) + + ## add the preprocessed file to the filestring + file_string_models <- paste(file_string_models, land, + "/fldm_", m, " ", + sep = "" + ) # nolint + } else { + # Subtracting timemeans from land files: + cdo( + "sub", + input = c(paste0(land, "/", m), paste0(land, "/tm_", m)), + # nolint + output = paste0(land, "/", "norm_", m), + options = "-O" + ) + + # Detrended results: + cdo( + "detrend", + input = paste0(land, "/norm_", m), + # nolint + output = paste0(land, "/", "detrend_", m), + options = "-O" + ) + + # Timstd of detrend + cdo( + "timstd", + input = paste0(land, "/detrend_", m), + # nolint + output = paste0(land, "/", "detrend_std_", m), + options = "-O" + ) + + # Divide normalized by timstded detrend + cdo( + "div", + input = c( + paste0(land, "/norm", m), + # nolint + paste0(land, "/detrend_std_", m) # nolint + ), + output = paste0(land, "/detrend_standard_", m), + options = "-O" # nolint + ) + + # Fieldmean results + cdo( + "fldmean", + input = paste0(land, "/detrend_standard_", m), + # nolint + output = paste0(land, "/detrend_std_fldm_", m), + options = "-O" # nolint + ) + + ## add the preprocessed file to the filestring + file_string_models <- paste(file_string_models, + land, + "/detrend_std_fldm_", + m, + " ", + sep = "" + ) # nolint + } + } + # Find model ensemble mean + cdo( + "ensmean", + input = file_string_models, + output = paste0(tseriesdir, "/", idx, "_ensmean_for_timeseries.nc"), + options = "-O" + ) + + # Find ensemble 25th percentile + cdo( + "enspctl", + args = "25", + input = file_string_models, + output = paste0(tseriesdir, "/", idx, "_25enspctl_for_timeseries.nc"), + options = "-O" + ) + + # Find ensemble 75th percentile + cdo( + "enspctl", + args = "75", + input = file_string_models, + output = paste0(tseriesdir, "/", idx, "_75enspctl_for_timeseries.nc"), + options = "-O" + ) + + n <- 0 + for (o in obs) { + print(o) + + if (idx %in% pidx) { + # Fieldmean results + cdo( + "fldmean", + input = paste0(land, "/", o), + output = paste0(land, "/", "fldm_", o), + options = "-O" + ) + + # Copy obs file to plot + n <- n + 1 + file.copy( + paste0(land, "/fldm_", o), + # nolint + paste0( + tseriesdir, + "/", + idx, + "_", + modelsandobssplit[obs_order[n]], + "_for_timeseries.nc" + ) + ) + } else { + # Subtracting timemeans from land files: + cdo( + "sub", + input = c(paste0(land, "/", o), paste0(land, "/tm_", o)), + # nolint + output = paste0(land, "/norm_", o), + options = "-O" # nolint + ) + + # Detrended results: + cdo( + "detrend", + input = paste0(land, "/norm_", o), + # nolint + output = paste0(land, "/detrend_", o), + options = "-O" # nolint + ) + + # Timstd of detrend + cdo( + "timstd", + input = paste0(land, "/detrend_", o), + # nolint + output = paste0(land, "/detrend_std_", o), + options = "-O" # nolint + ) + + # Divide normalized by timstded detrend + cdo( + "div", + input = c( + paste0(land, "/norm", o), + # nolint + paste0(land, "/detrend_std_", o) # nolint + ), + output = paste0(land, "/detrend_standard_", o), + options = "-O" # nolint + ) + + # Fieldmean results + cdo( + "fldmean", + input = paste0(land, "/detrend_standard_", o), + # nolint + output = paste0(land, "/detrend_std_fldm_", o), + options = "-O" # nolint + ) + + # Copy obs file to plot + n <- n + 1 + file.copy( + paste0(land, "/detrend_std_fldm_", o), + # nolint + paste0( + tseriesdir, + "/", + idx, + "_", + modelsandobssplit[obs_order[n]], + "_for_timeseries.nc" + ) + ) + } + } + } + + # ABSOLUTE VALUES #### + # Non-normalized values fieldmeans + if (!normalize) { + file_string_models <- "" + m <- models[1] + for (m in models) { + print(m) + # Fieldmean results + cdo( + "fldmean", + input = paste0(land, "/", m), + output = paste0(land, "/fldm_", m), + options = "-O" # nolint + ) + + ## add the preprocessed file to the filestring + file_string_models <- paste(file_string_models, land, + "/fldm_", m, " ", + sep = "" + ) # nolint + } + # Find model ensemble mean + cdo( + "ensmean", + input = file_string_models, + output = paste0(tseriesdir, "/", idx, "_ensmean_for_timeseries.nc"), + options = "-O" + ) + + # Find ensemble 25th percentile + cdo( + "enspctl", + args = "25", + input = file_string_models, + output = paste0(tseriesdir, "/", idx, "_25enspctl_for_timeseries.nc"), + options = "-O" + ) + + # Find ensemble 75th percentile + cdo( + "enspctl", + args = "75", + input = file_string_models, + output = paste0(tseriesdir, "/", idx, "_75enspctl_for_timeseries.nc"), + options = "-O" + ) + + ## Extracting only the observation files + obs_order <- which(modelsandobssplit %in% obs_list) + obs <- modelsandobs[obs_order] + + print("These are the observations:") + print(obs) + n <- 0 + for (o in obs) { + print(o) + # Fieldmean results + cdo( + "fldmean", + input = paste0(land, "/", o), + output = paste0(land, "/fldm_", o), + options = "-O" # nolint + ) + # Copy obs file to plot + n <- n + 1 + file.copy( + paste0(land, "/fldm_", o), + # nolint + paste0( + tseriesdir, + "/", + idx, + "_", + modelsandobssplit[obs_order[n]], + "_for_timeseries.nc" + ) + ) + } + } + } + +# +# +# Method to plot the time series plot +# of single idx for already preprocessed data +# yearly data is assumed +# @param path - path to directory containing ensemble mean +# and percentile data. +# @param idx name of index to be processed +# @param start_yr start year for data to be used to convert +# values from days after start year format to +# year. +# + +timeseries_plot <- + function(plot_dir = "./plot", + idx = "tn10pETCCDI_yr", + obs_list, + start_yr = 2006, + end_yr = 2010, + normalize = FALSE) { + # Drawing parameters + leg_names <- c(mip_name, obs_list) + + ## Reading the netcdf data files into R + ## First ensemble mean file + ensm <- nc_open(paste( + work_dir, + "/timeseries/", + idx, + "_ensmean_for_timeseries.nc", + sep = "" + )) + + ## Then 25th percentile file + enspctl25 <- nc_open(paste( + work_dir, + "/timeseries/", + idx, + "_25enspctl_for_timeseries.nc", + sep = "" + )) + ## Finally 75th percentile file + enspctl75 <- nc_open(paste( + work_dir, + "/timeseries/", + idx, + "_75enspctl_for_timeseries.nc", + sep = "" + )) + + ## Reading in time variable and converting to years: + ts <- nc.get.time.series(ensm) # nolint + time_conv <- format(ts, "%Y") # extract years + + ## Stripping off the _yr tail to the index name + idx_no <- which(idx_df$idx_etccdi_time == idx) + idx_name <- paste(idx_df$idx_etccdi[idx_no], "ETCCDI", sep = "") + + # Reading in the y-variables to be plotted + # First the ensemble mean + idx_ensm <- ncvar_get(ensm, idx_name) + # Then the 25th percentile + idx_ens25 <- ncvar_get(enspctl25, idx_name) + # Finally the 75th percentile + idx_ens75 <- ncvar_get(enspctl75, idx_name) + + # Maximum and minimum x and y values + max.x <- end_yr + min.x <- start_yr + irange <- ((time_conv >= min.x) & (time_conv <= max.x)) + max.y <- + max(idx_ensm[irange], idx_ens25[irange], idx_ens75[irange]) + min.y <- + min(idx_ensm[irange], idx_ens25[irange], idx_ens75[irange]) + + # Reading in the observations and plotting via a loop + obsdata_list <- list() + n <- 0 + for (o in obs_list) { + n <- n + 1 + nc_obs <- nc_open(paste( + work_dir, + "/timeseries/", + idx, + "_", + o, + "_for_timeseries.nc", + sep = "" + )) + ts_obs <- nc.get.time.series(nc_obs) # nolint + time_conv_obs <- format(ts_obs, "%Y") # extract years + idx_obs <- ncvar_get(nc_obs, idx_name) + nc_close(nc_obs) + obsdata_list[[n]] <- list(o, as.numeric(time_conv_obs), idx_obs) + irange <- ((time_conv_obs >= min.x) & (time_conv_obs <= max.x)) + max.y <- max(max.y, idx_obs[irange]) + min.y <- min(min.y, idx_obs[irange]) + + if (n > length(ts_col_list)) { + print( + paste( + "Error: There are more observations,", + "than available color plotting parameters." + ) + ) + print("Update cfg_ExtermeEvents.r file.") + dev.off() + break + } + } + + # Setting the x- and y-range limits for plotting + xrng <- as.numeric(c(min.x, max.x)) + yrng <- c(min.y, max.y) + print(xrng) + print(yrng) + + ## Making name string for the plot + plotname <- paste(plot_dir, + "/", + idx, + "_", + length(obs_list), + "-obs_ensmean_timeseriesplot", + sep = "" + ) + + ## Setting device to write the plot to + figure_filename <- paste(plotname, output_file_type, sep = ".") + + ## Chose output format for figure + if (tolower(output_file_type) == "png") { + png( + filename = figure_filename, + width = ts_png_width, + height = ts_png_height, + units = ts_png_units, + pointsize = ts_png_pointsize, + bg = ts_png_bg + ) + } else if (tolower(output_file_type) == "pdf") { + pdf(file <- figure_filename) + } else if (tolower(output_file_type) == "eps") { + setEPS() + postscript(figure_filename) + } + + n <- 1 + # Parameters for plot + par(mfrow = c(1, 1), mar = c(4.5, 4.5, 2, 3)) + # Plotting first the ensemblemean + plot( + time_conv, + idx_ensm, + type = "l", + col = ts_col_list[n], + lty = ts_lty_list[n], + xlim = xrng, + ylim = yrng, + lwd = ts_lwd_list[n], + ann = FALSE, + xaxs = "i", + yaxt = "n" + ) + # Then making a transparent polygon between the 25th and 75 percentile + polygon( + c(time_conv, rev(time_conv)), + c(idx_ens75, rev(idx_ens25)), + col = alpha(ts_col_list[n], 0.1), + border = NA + ) + + # Plotting observations and plotting via a loop + n <- 0 + for (o in obs_list) { + n <- n + 1 + lines( + obsdata_list[[n]][[2]], + obsdata_list[[n]][[3]], + col = ts_col_list[n + 1], + lty = ts_lty_list[n + 1], + lwd = ts_lwd_list[n + 1] + ) # plot observation + } + + # Produce a legend + legend( + "top", + legend = leg_names, + col = ts_col_list, + lty = ts_lty_list, + lwd = ts_lwd_list, + bty = "n", + ncol = 3 + ) + + # Produce a first y-axis + axis(side = 2, at = pretty(yrng, 5)) + axis(side = 2, at = pretty(yrng, 5)) + pretty(yrng, 10) + + axis(side = 2, at = pretty(yrng, 5)) + + # Produce a second y-axis + axis(side = 4, at = pretty(yrng, 5)) + + # Producing a title from info in netcdf file + title(main = idx_df$name[idx_no], font.main = 2) + + # Choosing x-label + title(xlab = "Year") + + # Chosing y-label from idx_ylab list + title(ylab = idx_ylab[idx_no]) + # Resetting plotting device to default + dev.off() + + # Close Ensemble files + nc_close(ensm) + nc_close(enspctl25) + nc_close(enspctl75) + + return(figure_filename) + } diff --git a/esmvaltool/diag_scripts/eyring06jgr/eyring06jgr_fig01.ncl b/esmvaltool/diag_scripts/eyring06jgr/eyring06jgr_fig01.ncl new file mode 100644 index 0000000000..9ba69fa6e2 --- /dev/null +++ b/esmvaltool/diag_scripts/eyring06jgr/eyring06jgr_fig01.ncl @@ -0,0 +1,367 @@ +; ############################################################################# +; SCRIPT TO REPRODUCE FIG. 1 OF EYRING ET AL. (JGR, 2006) +; Authors: Irene Cionni (ENEA, Italy) and Veronika Eyring (DLR, Germany) +; CCMI Project +; ############################################################################# +; +; Description: +; Vertical profile climatological mean bias and climatological mean for +; selected seasons and latitudinal regions. +; +; Required diag_script_info attributes: +; latrange: minimum and maximum latitude for regional average, +; e.g. (/60, 90/)) +; season: seasonfor temporal average (e.g, "DJF", "MAM", "JJA", "SON") +; multimean: calculate multi-model mean (i.e. False/True) +; refModel: name of the reference model or observation for +; bias calculation (e.g. (/"ERA-Interim"/)) +; +; Optional diag_script_info attributes: +; latmin: minimum latitude for region selection +; latmax: maximum latitude for region selection +; Xmin: lower limit for the X axis +; Xmax: upper limit for the X axis +; Ymin: lower limit Y axis +; Ymax: upper limit Y axis +; start_year: start year for the climatology calculation +; end_year: end year for the climatology calculation +; +; Required variable_info attributes: +; +; Optional variable_info attributes: +; +; Modification history +; 20200320-righi_mattia: cleaned code +; 20180320-lauer_axel: added tags for reporting and call to write_references +; 20170315-cionni_irene: written +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Get variables and datasets + var0 = variable_info[0]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD1 = ListCount(info0) + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + exps = metadata_att_as_array(info0, "exp") + ensembles = metadata_att_as_array(info0, "ensemble") + y1 = metadata_att_as_array(info0, "start_year") + y2 = metadata_att_as_array(info0, "end_year") + + ; Check for reference dataset definition + if (variable_info[0]@reference_dataset.eq."None") then + error_msg("f", DIAG_SCRIPT, "", "no reference dataset is specified") + end if + iref = ind(names.eq.variable_info[0]@reference_dataset) + + ; Check variable attributes + if (isatt(variable_info, "units")) then + units = variable_info@units + else + units = "K" + end if + + if (isatt(variable_info, "long_name")) then + long_name = variable_info@long_name + else + long_name = var0 + end if + + ; Set default values for optional diag_script_info attributes + set_default_att(diag_script_info, "season", "ANN") + set_default_att(diag_script_info, "latmin", -90.) + set_default_att(diag_script_info, "latmax", 90.) + set_default_att(diag_script_info, "Xmin", -30.) + set_default_att(diag_script_info, "Xmax", 20.) + set_default_att(diag_script_info, "Ymin", 1.) + set_default_att(diag_script_info, "Ymax", 1000.) + set_default_att(diag_script_info, "multimean", False) + set_default_att(diag_script_info, "start_year", max(y1)) + set_default_att(diag_script_info, "end_year", min(y2)) + + seasons = diag_script_info@season + styleset = diag_script_info@styleset + nseasons = dimsizes(seasons) + latrange = new((/nseasons, 2/), "float") + latrange(:, 0) = diag_script_info@latmin + latrange(:, 1) = diag_script_info@latmax + flag_mod = where(projects.ne."OBS" .and. projects.ne."obs4MIPs", 1, 0) + index_mod = ind(flag_mod.gt.0) + index_obs = ind(flag_mod.eq.0) + dim_MOD = dimsizes(index_mod) + dim_OBS = 0 + + if (.not.all(ismissing(index_obs))) then + dim_OBS = dimsizes(index_obs) + list_obs_mod = names(index_obs) + end if + + multimean = diag_script_info@multimean + if (multimean) then + dimTot = dim_MOD + dim_OBS + else + dimTot = dim_MOD + dim_OBS - 1 + end if + + year_start = toint(diag_script_info@start_year) + year_end = toint(diag_script_info@end_year) + + ; Styles + colors = project_style(info0, diag_script_info, "colors") + dashes = project_style(info0, diag_script_info, "dashes") + thicks = tofloat(project_style(info0, diag_script_info, "thicks")) + markers = project_style(info0, diag_script_info, "markers") + + ; Create output directories + system("mkdir -p " + config_user_info@work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + +end + +begin + + nPanel = nseasons + plot = new(nPanel, "graphic") + plot_num = integertochar(ispan(97, 122, 1)) + rad = acos(-1.) / 180.0 + A0 = read_data(info0[iref]) + lev0 = A0&plev + nlev0 = dimsizes(lev0) + delete(A0) + + tmpData = new((/dim_MOD + dim_OBS, nPanel, nlev0, 2/), "float") + tmpData!0 = "line" + tmpData!1 = "season" + tmpData!2 = "plev" + tmpData!3 = "stat" + + Xdata = new((/dimTot, nPanel, nlev0, 2/), "float") + Xdata!0 = "line" + Xdata!1 = "season" + Xdata!2 = "plev" + Xdata!3 = "stat" + Xdata&plev = lev0 + + Xref = new((/nPanel, nlev0, 2/), "float") + plotMod = new(dimTot, typeof(names)) + plotColor = new(dimTot, typeof(names)) + plotDash = new(dimTot, typeof(names)) + plotThicks = new(dimTot, "float") + plotScatters = new(dimTot, typeof(names)) + LineScatters = new(dimTot, typeof(names)) + ik = 0 + + ; Start datasets loop + do imod = 0, dim_MOD1 - 1 + + ; Start seasons loop + do ipan = 0, nseasons - 1 + + ; Extract data + A0 = read_data(info0[imod]) + + ; Select latitude range + coslat = cos(A0&lat * rad) + coslat!0 = "lat" + coslat&lat = A0&lat + A0 := \ + dim_avg_wgt_Wrap(A0(:, :, {latrange(ipan, 0):latrange(ipan, 1)}), \ + coslat({latrange(ipan, 0):latrange(ipan, 1)}), 1) + delete(coslat) + + if (seasons(ipan).ne."ANN") then + tmp = time_operations(A0, year_start, year_end, \ + "extract", seasons(ipan), True) + dummy2 = time_operations(tmp, year_start, year_end, \ + "average", "yearly", True) + delete(tmp) + else + dummy2 = time_operations(A0, year_start, year_end, \ + "average", "yearly", True) + end if + delete(A0) + tmp = dim_avg_n_Wrap(dummy2, 0) + tmp_st = dim_stddev_n_Wrap(dummy2, 0) + delete(dummy2) + tmpData(imod, ipan, :, 0) = (/tmp/) + tmpData(imod, ipan, :, 1) = (/tmp_st/) + delete(tmp) + delete(tmp_st) + end do ; end seasons loop + + if (imod.ne.iref) then + Xdata(ik, :, :, :) = (/tmpData(imod, :, :, :)/) + plotMod(ik) = names(imod) + plotColor(ik) = colors(imod) + plotDash(ik) = dashes(imod) + plotThicks(ik) = thicks(imod) + plotScatters(ik) = 0. + LineScatters(ik) = "Lines" + ik = ik + 1 + else + Xref = tmpData(imod, :, :, :) + end if + + end do + + ; Calculate multimodel mean + if (multimean) then + Xdata(dimTot - 1, :, :, :) = (/dim_avg_n(Xdata(index_mod, :, :, :), 0)/) + plotMod(dimTot - 1) = "MMM" + plotColor(dimTot - 1) = "black" + plotDash(dimTot - 1) = 0 + LineScatters(dimTot - 1) = "Lines" + plotThicks(dimTot - 1) = 4. + end if + + X_MMM_std = (/dim_stddev_n(Xdata(index_mod, :, :, :), 0)/) + + Xdata = (/Xdata - conform_dims(dimsizes(Xdata), Xref, (/1, 2, 3/))/) + Ydata = lev0 / 100. + + plot_d = new(nseasons, "graphic") + plot_xp_R_d = new(nseasons, "graphic") + plot_xp_M_d = new(nseasons, "graphic") + xp_R = new((/2 * nlev0, nseasons/), typeof(Xref)) + yp_R = new((/2 * nlev0, nseasons/), typeof(Xref)) + xp_M = new((/2 * nlev0, nseasons/), typeof(Xref)) + yp_M = new((/2 * nlev0, nseasons/), typeof(Xref)) + do j = 0, nlev0 - 1 + xp_R(j, :) = -1. * Xref(:, j, 1) + yp_R(j, :) = tofloat(lev0(j) / 100.) + yp_R(nlev0 + j, :) = tofloat(lev0(nlev0 - 1 - j) / 100) + xp_R(nlev0 + j, :) = (Xref(:, (nlev0 - 1 - j), 1)) + end do + + if (multimean) then + do j = 0, nlev0 - 1 + xp_M(j, :) = Xdata(dimTot - 1, :, j, 0) - X_MMM_std(:, j, 0) + yp_M(j, :) = tofloat(lev0(j) / 100.) + yp_M(nlev0 + j, :) = tofloat(lev0(nlev0 - 1 - j) / 100.) + xp_M(nlev0 + j, :) = Xdata(dimTot - 1, :, (nlev0 - 1 - j), 0) + \ + X_MMM_std(:, (nlev0 - 1 - j), 0) + end do + end if + + ; Plotting + plotpath = config_user_info@plot_dir + "eyring06f01_" + var0 + wks = gsn_open_wks(file_type, plotpath) + plotpath1 = config_user_info@plot_dir + "eyring06f01_legend_" + var0 + wks1 = gsn_open_wks(file_type, plotpath1) + + do ipanel = 0, nseasons - 1 + res = True + res@gsnDraw = False + res@gsnFrame = False + res@tfPolyDrawOrder = "Predraw" + res@trXMinF = diag_script_info@Xmin(ipanel) + res@trXMaxF = diag_script_info@Xmax(ipanel) + res@trYMinF = diag_script_info@Ymin(ipanel) + res@trYMaxF = diag_script_info@Ymax(ipanel) + res@tmYLMode = "Explicit" + res@tmYLValues = (/1, 3, 10, 30, 100, 300, 1000/) + res@tmYLLabels = (/1, 3, 10, 30, 100, 300, 1000/) + res@tmLabelAutoStride = True + + res@tiXAxisString = "Model " + long_name + " bias " + "(" + units + ")" + res@tiYAxisString = "Pressure (hPa)" + res@tiMainString = \ + seasons(ipanel) + " " + \ + lat_names(latrange(ipanel, 0)) + " - " + \ + lat_names(latrange(ipanel, 1)) + res@xyMarkLineModes = LineScatters + res@trYAxisType = "LogAxis" + res@trYReverse = True + res@xyLineThicknessF = 2. + res@xyLineDashSegLenF = 0.13 + res@xyLineColors = plotColor + res@xyDashPatterns = plotDash + res@xyMarkers = plotDash + res@xyMarkerSizeF = 0.004 + res@xyMarkerThicknessF = 1.8 + res@tmXMajorGrid = True + res@tmXMajorGridThicknessF = 1. + res@tmXMajorGridLineDashPattern = 2 + + Xdata&plev = lev0 + Ydata!0 = "plev" + Ydata&plev = lev0 + plot_d(ipanel) = \ + gsn_csm_xy(wks, \ + Xdata(line|:, season|ipanel, {plev|80000:}, stat|0), \ + Ydata({plev|80000:}), \ + res) + + gsres = True + gsres@gsFillColor = "Snow3" + plot_xp_R_d(ipanel) = gsn_add_polygon(wks, plot_d(ipanel), \ + xp_R(:, ipanel), \ + yp_R(:, ipanel), gsres) + if (multimean) then + gsres1 = True + gsres1@gsFillColor = "cyan" + gsres1@gsFillOpacityF = 0.2 + plot_xp_M_d(ipanel) = gsn_add_polygon(wks, plot_d(ipanel), \ + xp_M(:, ipanel), \ + yp_M(:, ipanel), gsres1) + end if + end do + + ; Call legend - plotting routine + Pres = True + Pres@gsnFrame = False + Pres@gsnPanelBottom = 0.2 + Pres@gsnPanelFigureStrings = plot_num(0:nseasons - 1) + Pres@gsnPanelFigureStringsFontHeightF = 0.025 + Pres@gsnPanelFigureStringsPerimOn = "False" + Pres@amJust = "TopLeft" + + ; Call panel routine + if (nseasons.gt.1) then + gsn_panel(wks, plot_d, (/2, nseasons/2/), Pres) + else + gsn_panel(wks, plot_d, (/1, 1/), Pres) + end if + + pos_leg = "0.9" + add_legend(wks1, plotMod, plotColor, tofloat(plotDash), \ + LineScatters, plotThicks, pos_leg) + frame(wks1) + frame(wks) + + nc_filename = config_user_info@work_dir + "eyring06f01_" + var0 + ".nc" + Xdata@var = var0 + Xdata@diag_script = "eyring06f01" + Xdata&stat = (/"mean", "stddev"/) + Xdata&season = seasons + Xdata&line = plotMod + nc_outfile = ncdf_write(Xdata, nc_filename) + + ; Call provenance logger + log_provenance(nc_filename, \ + plotpath + "." + file_type, \ + "Seasonal vertical profiles of " + var0 + " bias", \ + (/"mean", "stddev"/), \ + (/"nhpolar", "shpolar"/), \ + "vert", \ + (/"cionni_irene", "eyring_veronika"/), \ + (/"eyring06jgr"/), \ + metadata_att_as_array(info0, "filename")) + +end diff --git a/esmvaltool/diag_scripts/eyring06jgr/eyring06jgr_fig05a.ncl b/esmvaltool/diag_scripts/eyring06jgr/eyring06jgr_fig05a.ncl new file mode 100644 index 0000000000..0aa8895a2e --- /dev/null +++ b/esmvaltool/diag_scripts/eyring06jgr/eyring06jgr_fig05a.ncl @@ -0,0 +1,401 @@ +; ############################################################################ +; SCRIPT TO REPRODUCE FIG. 5a OF EYRING ET AL. (JGR, 2006) +; Authors: Irene Cionni (ENEA, Italy) and Veronika Eyring (DLR, Germany) +; CCMI PROJECT +; ############################################################################ + +; Description +; Vertical profile climatological mean , +; climatological mean for selected month and latitudinal region +; +; Required diag_script_info attributes (diagnostics specific) +; latrange: min lat and max lat where variable is averaged +; (i.e. (/60, 90/)) +; season: month when variable is averaged +; (i.e. "03", "04", "09") +; multimean: calculate multi-model mean (i.e. False/True) +; refModel: name of the reference model or observation +; (e.g. (/"ERA-Interim"/)) +; +; Optional diag_script_info attributes (diagnostic specific) +; XMax: max limit X axis +; XMin: min limit X axis +; levmax: max limit Y axis +; levmin: min limit Y axis +; start_year: year when start the climatology calculation +; start_year: year when end the climatology calculation +; +; Required variable_info attributes (variable specific) +; +; Caveats +; units for the variable are now handled in the pre-processor which +; means that the preprocessor must be adjusted if the variable is +; changed in the recipe from ozone to something else +; +; Modification history +; 20200403-hassler_birgit: cleaned code +; 20180320-lauer_axel: added tags for reporting and call to write_references +; 20170315-cionni_irene: written +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Get variables and datasets + var0 = variable_info[0]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD1 = ListCount(info0) + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + exps = metadata_att_as_array(info0, "exp") + ensembles = metadata_att_as_array(info0, "ensemble") + y1 = metadata_att_as_array(info0, "start_year") + y2 = metadata_att_as_array(info0, "end_year") + + ; Check for reference dataset definition + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + do imod = 0, dimsizes(names) - 1 + if (names(imod) .eq. refname) then + iref = imod + end if + end do + else + error_msg("f", DIAG_SCRIPT, "", "no reference dataset " + \ + "(variable_info[0]@reference_dataset) needs to be defined.") + end if + + if (isatt(variable_info, "long_name")) then + LONG_NAME = variable_info@long_name + else + LONG_NAME = var0 + end if + + ; Set default values for optional diag_script_info attributes + set_default_att(diag_script_info, "month", 03) + set_default_att(diag_script_info, "latmin", 75.) + set_default_att(diag_script_info, "latmax", 85.) + set_default_att(diag_script_info, "Xmax", 10.) + set_default_att(diag_script_info, "Xmin", 0.) + set_default_att(diag_script_info, "start_year", max(y1)) + set_default_att(diag_script_info, "end_year", min(y2)) + set_default_att(diag_script_info, "multimean", False) + + seasons = diag_script_info@month + month_names = \ + (/"JAN", "FEB", "MAR", "APR", "MAY", "JUN", \ + "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"/) + seasons_name = month_names(toint(seasons) - 1) + + latrange = new((/dimsizes(seasons), 2/), "float") + latrange(:, 0) = diag_script_info@latmin + latrange(:, 1) = diag_script_info@latmax + seasons = diag_script_info@month + flag_mod = where(projects.ne."OBS" .and. projects.ne."obs4MIPs", 1, 0) + index_mod = ind(flag_mod.gt.0) + index_obs = ind(flag_mod.eq.0) + dim_MOD = dimsizes(index_mod) + dim_OBS = 0 + + ; Styles + styleset = diag_script_info@styleset + colors = project_style(info0, diag_script_info, "colors") + dashes = project_style(info0, diag_script_info, "dashes") + thicks = tofloat(project_style(info0, diag_script_info, "thicks")) + markers = project_style(info0, diag_script_info, "markers") + + if (.not.all(ismissing(index_obs))) then + dim_OBS = dimsizes(index_obs) + list_obs_mod = names(index_obs) + end if + + Max_lev = diag_script_info@levmax + Min_lev = diag_script_info@levmin + XMax = diag_script_info@Xmax + XMin = diag_script_info@Xmin + + multimean = diag_script_info@multimean + if (multimean) then + dimTot = dim_MOD + dim_OBS + 1 + else + dimTot = dim_MOD + dim_OBS + end if + + year_start = toint(diag_script_info@start_year) + year_end = toint(diag_script_info@end_year) + + ; make sure path for (mandatory) netcdf output exists + work_dir = config_user_info@work_dir + "/" + + ; Create work dir + system("mkdir -p " + work_dir) + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + +end + +begin + + aux_title_info = "" + plotpath = config_user_info@plot_dir + "eyring06f05a_" + var0 + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "eyring06f05a_" + var0) + wks = gsn_open_wks(file_type, plotpath) + plotpath1 = config_user_info@plot_dir + "eyring06f05a_legend_" + var0 + wks1 = get_wks("dummy_for_wks", DIAG_SCRIPT, "eyring06f05a_legend_" + var0) + wks1 = gsn_open_wks(file_type, plotpath1) + nPanel = dimsizes(seasons) + plot = new(nPanel, "graphic") + plot_num = integertochar(ispan(97, 122, 1)) + rad = 4.0 * atan(1.0) / 180.0 + A0 = read_data(info0[iref]) + lev0 = A0&plev + dimLev = dimsizes(lev0) + delete(A0) + + tmpData = new((/dim_MOD + dim_OBS, nPanel, dimsizes(lev0), 2/), "float") + tmpData!0 = "line" + tmpData!1 = "season" + tmpData!2 = "plev" + tmpData!3 = "stat" + + Xdata = new((/dimTot, nPanel, dimsizes(lev0), 2/), "float") + Xdata!0 = "line" + Xdata!1 = "season" + Xdata!2 = "plev" + Xdata!3 = "stat" + Xdata&plev = lev0 + + Xref = new((/ nPanel, dimsizes(lev0), 2/), "float") + plotMod = new(dimTot, typeof(names)) + plotColor = new(dimTot, typeof(names)) + plotDash = new(dimTot, typeof(names)) + plotThicks = new(dimTot, "float") + plotScatters = new(dimTot, typeof(names)) + LineScatters = new(dimTot, typeof(names)) + ik = 0 + + ; Start datasets loop + do imod = 0, dim_MOD1 - 1 + + ; Start seasons loop + do ipan = 0, dimsizes(seasons) - 1 + + ; Extract data + A0 = read_data(info0[imod]) + + printVarSummary(A0) + coslat = cos(A0&lat * rad) + coslat!0 = "lat" + coslat&lat = A0&lat + dummy1 = \ + dim_avg_wgt_Wrap(A0(:, :, {latrange(ipan, 0):latrange(ipan, 1)}), \ + coslat({latrange(ipan, 0):latrange(ipan, 1)}), 1) + delete(coslat) + delete(A0) + unit_string = dummy1@units + + if (seasons(ipan).ne."ANN") then + dummy2 = time_operations(dummy1, year_start, year_end, \ + "extract", seasons(ipan), True) + else + time = dummy1&time + years1 = cd_calendar(time, 0) + dummy2a = month_to_annual(dummy1(:, :), 1) + dummy2a&year = years1(::12, 0) + dummy2 = dummy2a({year_start:year_end}, :) + delete(time) + delete(years1) + delete(dummy2a) + end if + delete(dummy1) + tmp = dim_avg_n_Wrap(dummy2, 0) + tmp_st = dim_stddev_n_Wrap(dummy2, 0) + delete(dummy2) + tmpData(imod, ipan, :, 0) = (/tmp/) + tmpData(imod, ipan, :, 1) = (/tmp_st/) + delete(tmp) + delete(tmp_st) + end do ; end seasons loop + + Xdata(imod, :, :, :) = (/tmpData(imod, :, :, :)/) + plotMod(imod) = names(imod) + plotColor(imod) = colors(imod) + plotDash(imod) = dashes(imod) + plotThicks(imod) = thicks(imod) + plotScatters(imod) = 0. + LineScatters(imod) = "Lines" + + if (imod.eq.iref) then + Xref = tmpData(imod, :, :, :) + plotDash(imod) = 16. + plotScatters(imod) = 16. + LineScatters(imod) = "Markers" + end if + + end do ; end model loop + + if (multimean) then + Xdata(dimTot - 1, :, :, :) = \ + (/dim_avg_n(Xdata(index_mod, :, :, :), 0)/) + plotMod(dimTot - 1) = "MMM" + plotColor(dimTot - 1) = "Black" + plotDash(dimTot - 1) = 0. + plotThicks(dimTot - 1) = 4. + LineScatters(dimTot - 1) = "Lines" + end if + + ; Calculate multimodel mean + X_MMM_std = (/dim_stddev_n(Xdata(index_mod, :, :, :), 0)/) + + Ydata = lev0 / 100. + + plot_d = new(dimsizes(seasons), "graphic") + plot_xp_R_d = new(dimsizes(seasons), "graphic") + plot_xp_M_d = new(dimsizes(seasons), "graphic") + xp_R = new((/2 * dimsizes(lev0), dimsizes(seasons)/), typeof(Xref)) + yp_R = new((/2 * dimsizes(lev0), dimsizes(seasons)/), typeof(Xref)) + + xp_M = new((/2 * dimsizes(lev0), dimsizes(seasons)/), typeof(Xref)) + yp_M = new((/2 * dimsizes(lev0), dimsizes(seasons)/), typeof(Xref)) + do j = 0, dimsizes(lev0) - 1 + xp_R(j, :) = (Xref(:, j, 0) - Xref(:, j, 1)) + yp_R(j, :) = tofloat(lev0(j) / 100.) + yp_R(dimsizes(lev0) + j, :) = \ + tofloat(lev0(dimsizes(lev0) - 1 - j) / 100) + xp_R(dimsizes(lev0) + j, :) = \ + Xref(:, (dimsizes(lev0) - 1 - j), 0) + \ + Xref(:, (dimsizes(lev0) - 1 - j), 1) + end do + + if (multimean) then + do j = 0, dimsizes(lev0) - 1 + xp_M(j, :) = Xdata(dimTot - 1, :, j, 0) - X_MMM_std(:, j, 0) + yp_M(j, :) = tofloat(lev0(j) / 100.) + yp_M(dimsizes(lev0) + j, :) = \ + tofloat(lev0(dimsizes(lev0) - 1 - j) / 100.) + xp_M(dimsizes(lev0) + j, :) = \ + Xdata(dimTot - 1, :, dimsizes(lev0) - 1 - j, 0) + \ + X_MMM_std(:, dimsizes(lev0) - 1 - j, 0) + end do + end if + + ; Plotting + do ipanel = 0, dimsizes(seasons) - 1 + res = True + res@gsnDraw = False + res@gsnFrame = False + res@tfPolyDrawOrder = "Predraw" + + res@trXMaxF = XMax(ipanel) + res@trXMinF = XMin(ipanel) + res@trYMaxF = Max_lev(ipanel) + res@trYMinF = Min_lev(ipanel) + res@tmYLMode = "Explicit" + res@tmYLValues = (/1, 2, 3, 5, 7, 10, 20, 30, 50, 70, 100, 200/) + res@tmYLLabels = \ + (/"1", "2", "3", "5", "7", "10", \ + "20", "30", "50", "70", "100", "200"/) + res@tmLabelAutoStride = True + + res@vpHeightF = 0.6 ; change aspect ratio of plot + res@vpWidthF = 0.45 + + ; y-axis label + res@tiYAxisString = "Pressure (hPa)" + ; x-axis label + res@tiXAxisString = LONG_NAME + " " + "(" + unit_string + ")" + + res@tiMainString = \ + LONG_NAME + " " + \ + lat_names(latrange(ipanel, 0)) + " - " + \ + lat_names(latrange(ipanel, 1)) + " " + \ + seasons_name(ipanel) + + res@xyMarkLineModes = LineScatters + + res@trYAxisType = "LogAxis" + res@trYReverse = True + res@xyLineThicknessF = 4.0 + res@xyLineDashSegLenF = 0.13 + res@xyLineColors = plotColor ; line color + res@xyDashPatterns = plotDash + res@xyMarkers = plotDash + res@xyMarkerSizeF = 0.01 + res@xyMarkerThicknessF = 1.8 + + Xdata&plev = lev0 + Ydata!0 = "plev" + Ydata&plev = lev0 + plot_d(ipanel) = \ + gsn_csm_xy(wks, \ + Xdata(line|:, season|ipanel, plev|:, stat|0), \ + Ydata(plev|:), \ + res) + + gsres = True + + gsres@gsFillColor = "Snow3" + plot_xp_R_d(ipanel) = gsn_add_polygon(wks, plot_d(ipanel), \ + xp_R(:, ipanel), \ + yp_R(:, ipanel), gsres) + if (multimean) then + gsres1 = True + gsres1@gsFillColor = "cyan" + gsres1@gsFillOpacityF = 0.2 + plot_xp_M_d(ipanel) = gsn_add_polygon(wks, plot_d(ipanel), \ + xp_M(:, ipanel), \ + yp_M(:, ipanel), gsres1) + end if + end do + + ; Call legend - plotting routine + Pres = True + Pres@gsnFrame = False + Pres@gsnPanelBottom = 0.2 + Pres@gsnPanelFigureStrings = plot_num(0:dimsizes(seasons) - 1) + Pres@gsnPanelFigureStringsFontHeightF = 0.025 + Pres@gsnPanelFigureStringsPerimOn = "False" + Pres@amJust = "TopLeft" + + gsn_panel(wks, plot_d, (/1, dimsizes(seasons)/), Pres) + frame(wks) + + add_legend(wks1, plotMod, plotColor, tofloat(plotDash), \ + LineScatters, plotThicks + 2., "0.9") + frame(wks1) + + nc_filename = work_dir + "eyring06f05a_" + var0 + ".nc" + Xdata@var = var0 + Xdata@diag_script = "eyring06f05a" + printVarSummary(Xdata) + Xdata&stat = (/"mean", "avg"/) + Xdata&season = seasons + Xdata&line = plotMod + Xdata&plev = lev0 + printVarSummary(Xdata) + nc_outfile = ncdf_write(Xdata, nc_filename) + + ; Call provenance logger + log_provenance(nc_filename, \ + plotpath + "." + file_type, \ + "Vertical profile climatological means of " + var0, \ + (/"mean"/), \ + (/"nhpolar", "shpolar", "trop"/), \ + "vert", \ + (/"cionni_irene", "eyring_veronika"/), \ + (/"eyring06jgr"/), \ + metadata_att_as_array(info0, "filename")) + + +end diff --git a/esmvaltool/diag_scripts/eyring06jgr/eyring06jgr_fig05b.ncl b/esmvaltool/diag_scripts/eyring06jgr/eyring06jgr_fig05b.ncl new file mode 100644 index 0000000000..5c60744068 --- /dev/null +++ b/esmvaltool/diag_scripts/eyring06jgr/eyring06jgr_fig05b.ncl @@ -0,0 +1,373 @@ +; ############################################################################ +; SCRIPT TO REPRODUCE FIG. 5b OF EYRING ET AL. (JGR, 2006) +; Authors: Irene Cionni (ENEA, Italy) and Veronika Eyring (DLR, Germany) +; CCMI PROJECT +; ############################################################################ + +; Description +; Vertical profile climatological mean , +; climatological mean for selected month and latitudinal region +; +; Required diag_script_info attributes (diagnostics specific) +; lev: lev where variable is selected must to be one of the +; reference dataset +; month: month when variable is averaged +; (i.e. "03", "04", "09") +; multimean: calculate multi-model mean (i.e. False/True) +; +; Optional diag_script_info attributes (diagnostic specific) +; YMax: max limit Y axis +; YMin: min limit Y axis +; start_year: year when start the climatology calculation +; start_year: year when end the climatology calculation +; +; Required variable_info attributes (variable specific) +; +; Caveats +; +; Modification history +; 20200320-hassler_birgit: cleaned code +; 20190228-cionni_irene: written +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Get variables and datasets + var0 = variable_info[0]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD1 = ListCount(info0) + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + exps = metadata_att_as_array(info0, "exp") + ensembles = metadata_att_as_array(info0, "ensemble") + y1 = metadata_att_as_array(info0, "start_year") + y2 = metadata_att_as_array(info0, "end_year") + + ; Check for reference dataset definition + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + do imod = 0, dimsizes(names) - 1 + if (names(imod) .eq. refname) then + iref = imod + end if + end do + else + iref = 0 + end if + + ; Check variable attributes + if (isatt(variable_info, "units")) then + UNITS = variable_info@units + else + UNITS = "ppmv" + end if + + if (isatt(variable_info, "long_name")) then + LONG_NAME = variable_info@long_name + else + LONG_NAME = var0 + end if + + ; Set default values for optional diag_script_info attributes + set_default_att(diag_script_info, "month", 03) + set_default_att(diag_script_info, "styleset", "CMIP5") + set_default_att(diag_script_info, "lev", 5000.) + set_default_att(diag_script_info, "YMax", 10.) + set_default_att(diag_script_info, "YMin", 0.) + set_default_att(diag_script_info, "end_year", min(y2)) + set_default_att(diag_script_info, "multimean", False) + set_default_att(diag_script_info, "multimean", False) + + seasons = diag_script_info@month + month_names = \ + (/"JAN", "FEB", "MAR", "APR", "MAY", "JUN", \ + "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"/) + seasons_name = month_names(toint(seasons) - 1) + + lev = tofloat(diag_script_info@lev) + seasons = diag_script_info@month + colors = project_style(info0, diag_script_info, "colors") + dashes = project_style(info0, diag_script_info, "dashes") + thicks = tofloat(project_style(info0, diag_script_info, "thicks")) + markers = project_style(info0, diag_script_info, "markers") + + flag_mod = where(projects.ne."OBS" .and. projects.ne."obs4MIPs", 1, 0) + index_mod = ind(flag_mod.gt.0) + index_obs = ind(flag_mod.eq.0) + dim_MOD = dimsizes(index_mod) + dim_OBS = 0 + + if (.not.all(ismissing(index_obs))) then + dim_OBS = dimsizes(index_obs) + list_obs_mod = names(index_obs) + end if + + YMax = diag_script_info@YMax + YMin = diag_script_info@YMin + + multimean = diag_script_info@multimean + if (multimean) then + dimTot = dim_MOD + dim_OBS + 1 + else + dimTot = dim_MOD + dim_OBS + end if + year_start = toint(diag_script_info@start_year) + year_end = toint(diag_script_info@end_year) + + ; make sure path for (mandatory) netcdf output exists + work_dir = config_user_info@work_dir + "/" + + ; Create work dir + system("mkdir -p " + work_dir) + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + +end + +begin + + aux_title_info = "" + plotpath = config_user_info@plot_dir + "eyring06f05b_" + var0 + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "eyring06f05b_" + var0) + wks = gsn_open_wks(file_type, plotpath) + plotpath1 = config_user_info@plot_dir + "eyring06f05b_legend_" + var0 + wks1 = get_wks("dummy_for_wks", DIAG_SCRIPT, "eyring06f05b_legend_" + var0) + wks1 = gsn_open_wks(file_type, plotpath1) + nPanel = dimsizes(seasons) + plot = new(nPanel, "graphic") + plot_num = integertochar(ispan(97, 122, 1)) + rad = 4.0 * atan(1.0) / 180.0 + A0 = read_data(info0[iref]) + lev0 = A0&plev + lat0 = A0&lat + dimLev = dimsizes(lev0) + delete(A0) + + tmpData = new((/dim_MOD + dim_OBS, nPanel, dimsizes(lat0), 2/), "float") + tmpData!0 = "line" + tmpData!1 = "season" + tmpData!2 = "lat" + tmpData!3 = "stat" + + Xdata = new((/dimTot, nPanel, dimsizes(lat0), 2/), "float") + Xdata!0 = "line" + Xdata!1 = "season" + Xdata!2 = "lat" + Xdata!3 = "stat" + Xdata&lat = lat0 + + Xref = new((/ nPanel, dimsizes(lat0), 2/), "float") + plotMod = new(dimTot, typeof(names)) + plotColor = new(dimTot, typeof(names)) + plotDash = new(dimTot, typeof(names)) + plotThicks = new(dimTot, "float") + plotScatters = new(dimTot, typeof(names)) + LineScatters = new(dimTot, typeof(names)) + ik = 0 + + ; Start datasets loop + do imod = 0, dim_MOD1 - 1 + + ; Start seasons loop + do ipan = 0, dimsizes(seasons) - 1 + + ; Extract data + A0 = read_data(info0[imod]) + dummy1 = A0(time|:, {plev|lev(ipan)}, lat|:) + delete(A0) + + if (seasons(ipan).ne."ANN") then + dummy2 = \ + time_operations(dummy1, year_start, year_end, \ + "extract", seasons(ipan), True) + else + time = dummy1&time + years1 = cd_calendar(time, 0) + dummy2a = month_to_annual(dummy1(:, :), 1) + dummy2a&year = years1(::12, 0) + dummy2 = dummy2a({year_start:year_end}, :) + delete(time) + delete(years1) + delete(dummy2a) + end if + delete(dummy1) + tmp = dim_avg_n_Wrap(dummy2, 0) + tmp_st = dim_stddev_n_Wrap(dummy2, 0) + delete(dummy2) + tmpData(imod, ipan, :, 0) = (/tmp/) + tmpData(imod, ipan, :, 1) = (/tmp_st/) + delete(tmp) + delete(tmp_st) + end do ; end seasons loop + + Xdata(imod, :, :, :) = (/tmpData(imod, :, :, :)/) + plotMod(imod) = names(imod) + plotColor(imod) = colors(imod) + plotDash(imod) = dashes(imod) + plotThicks(imod) = thicks(imod) + plotScatters(imod) = 0. + LineScatters(imod) = "Lines" + + if (imod.eq.iref) then + Xref = tmpData(imod, :, :, :) + plotDash(imod) = 16. + plotScatters(imod) = 16. + LineScatters(imod) = "Markers" + end if + + end do ; end model loop + + if (multimean) then + Xdata(dimTot-1, :, :, :) = \ + (/dim_avg_n(Xdata(index_mod, :, :, :), 0)/) + plotMod(dimTot - 1) = "MMM" + plotColor(dimTot - 1) = "Black" + plotDash(dimTot - 1) = 0. + plotThicks(dimTot - 1) = 4. + LineScatters(dimTot - 1) = "Lines" + end if + + ; Calculate multimodel mean + X_MMM_std = (/dim_stddev_n(Xdata(index_mod, :, :, :), 0)/) + + Ydata = lev0 / 100. + + plot_d = new(dimsizes(seasons), "graphic") + plot_xp_R_d = new(dimsizes(seasons), "graphic") + plot_xp_M_d = new(dimsizes(seasons), "graphic") + xp_R = new((/2 * dimsizes(lat0), dimsizes(seasons)/), typeof(Xref)) + yp_R = new((/2 * dimsizes(lat0), dimsizes(seasons)/), typeof(Xref)) + + xp_M = new((/2 * dimsizes(lat0), dimsizes(seasons)/), typeof(Xref)) + yp_M = new((/2 * dimsizes(lat0), dimsizes(seasons)/), typeof(Xref)) + do j = 0, dimsizes(lat0) - 1 + yp_R(j, :) = (Xref(:, j, 0) - Xref(:, j, 1)) + xp_R(j, :) = tofloat(lat0(j)) + xp_R(dimsizes(lat0) + j, :) = tofloat(lat0(dimsizes(lat0) - 1 - j)) + yp_R(dimsizes(lat0) + j, :) = \ + (Xref(:, (dimsizes(lat0) - 1 - j), 0) + \ + Xref(:, (dimsizes(lat0) - 1 - j), 1)) + end do + if (multimean) then + do j = 0, dimsizes(lat0) - 1 + yp_M(j, :) = Xdata(dimTot - 1, :, j, 0) - ((X_MMM_std(:, j, 0))) + xp_M(j, :) = tofloat(lat0(j)) + xp_M(dimsizes(lat0) + j, :) = tofloat(lat0(dimsizes(lat0) - 1 - j)) + yp_M(dimsizes(lat0) + j, :) = \ + Xdata(dimTot - 1, :, (dimsizes(lat0) - 1 - j), 0) + \ + (X_MMM_std(:, (dimsizes(lat0) - 1 - j), 0)) + end do + end if + + ; Plotting + do ipanel = 0, dimsizes(seasons) - 1 + res = True + res@gsnDraw = False + res@gsnFrame = False + res@tfPolyDrawOrder = "Predraw" + res@tmXBMode = "Explicit" + res@tmXBValues = (/-90., -60., -30., 0., 30., 60., 90./) + res@tmXBLabels = (/-90., -60., -30., 0., 30., 60., 90./) + print(ipanel) + print(dimsizes(seasons)) + print(seasons) + print(YMax) + res@trYMaxF = YMax(ipanel) + res@trYMinF = YMin(ipanel) + res@tmLabelAutoStride = True + + res@vpHeightF = 0.4 ; change aspect ratio of plot + res@vpWidthF = 1. / dimsizes(seasons) + + res@tiYAxisString = \ + LONG_NAME + " " + "(" + UNITS + ")" ; y-axis label + lev_nam = (lev(ipanel) / 100) + res@tiMainString = \ + LONG_NAME + " " + lev_nam + " hPa " + \ + seasons_name(ipanel) + + res@xyMarkLineModes = LineScatters + + res@xyLineThicknessF = 4.0 + res@xyLineDashSegLenF = 0.13 + res@xyLineColors = plotColor ; line color + res@xyDashPatterns = plotDash + res@xyMarkers = plotDash + res@xyMarkerSizeF = 0.01 + res@xyMarkerThicknessF = 1.8 + + Xdata&lat = lat0 + Xdata!0 = "lat" + Xdata!0 = "line" + Xdata!1 = "season" + Xdata!2 = "lat" + Xdata!3 = "stat" + plot_d(ipanel) = \ + gsn_csm_xy(wks, \ + Xdata&lat, \ + Xdata(line|:, season|(ipanel), lat|:, stat|0), \ + res) + + gsres = True + gsres@gsFillColor = "Snow3" + if (isatt(variable_info[0], "reference_dataset")) then + plot_xp_R_d(ipanel) = gsn_add_polygon(wks, plot_d(ipanel), \ + xp_R(:, ipanel), \ + yp_R(:, ipanel), gsres) + end if + if (multimean) then + gsres1 = True + gsres1@gsFillColor = "cyan" ; color chosen + gsres1@gsFillOpacityF = 0.2 + plot_xp_M_d(ipanel) = gsn_add_polygon(wks, plot_d(ipanel), \ + xp_M(:, ipanel), \ + yp_M(:, ipanel), gsres1) + end if + end do ; end loop panel + + Pres = True + Pres@gsnFrame = False + Pres@gsnPanelBottom = 0.2 + Pres@amJust = "TopLeft" + + gsn_panel(wks, plot_d, (/1, dimsizes(seasons)/), Pres) + frame(wks) + + add_legend(wks1, plotMod, plotColor, tofloat(plotDash), \ + LineScatters, plotThicks + 2., "0.9") + frame(wks1) + + nc_filename = work_dir + "eyring06f05b_" + var0 + ".nc" + Xdata@var = var0 + Xdata@diag_script = "eyring06f05b" + Xdata&stat = (/"mean", "avg"/) + Xdata&season = lev + Xdata&lat = lat0 + Xdata&line = plotMod + nc_outfile = ncdf_write(Xdata, nc_filename) + + ; Call provenance logger + log_provenance(nc_filename, \ + plotpath + "." + file_type, \ + "Latitudinal profiles at selected pressures of " \ + + var0 + " mean", \ + (/"mean"/), \ + (/"global"/), \ + "pro", \ + (/"cionni_irene", "eyring_veronika"/), \ + (/"eyring06jgr"/), \ + metadata_att_as_array(info0, "filename")) + +end diff --git a/esmvaltool/diag_scripts/eyring06jgr/eyring06jgr_fig15.ncl b/esmvaltool/diag_scripts/eyring06jgr/eyring06jgr_fig15.ncl new file mode 100644 index 0000000000..3930fe9a26 --- /dev/null +++ b/esmvaltool/diag_scripts/eyring06jgr/eyring06jgr_fig15.ncl @@ -0,0 +1,536 @@ +; ############################################################################ +; SCRIPT TO REPRODUCE FIG. 15 OF EYRING ET AL. (JGR, 2006) +; Authors: Irene Cionni (ENEA, Italy) and Veronika Eyring (DLR, Germany) +; CCMI Project +; ############################################################################ +; +; Description: +; Time series of seasonal mean anomalies of total column ozone +; calculated with a methodology described in Appendix A of Eyring06 +; Annual cycle for the reference years are also shown +; +; Required diag_script_info attributes: (diagnostics specific) +; latrange: minimum and maximum latitude for regional average, +; e.g. (/60, 90/)) +; season: season for temporal average (e.g, "DJF", "MAM", "JJA", "SON") +; multimean: calculate multi-model mean (i.e. False/True) +; refModel: name of the reference model or observation for +; bias calculation (e.g. (/"ERA-Interim"/)) +; +; Optional diag_script_info attributes: +; latmin: minimum latitude for region selection +; latmax: maximum latitude for region selection +; Xmin: lower limit for the X axis +; Xmax: upper limit for the X axis +; Ymin: lower limit Y axis +; Ymax: upper limit Y axis +; start_year: start year for the climatology calculation +; end_year: end year for the climatology calculation +; Cycle_Ymin: lower limit Y axis (for right figures) +; Cycle_Ymax: upper limit Y axis (for right figures) +; +; Required variable_info attributes: +; +; Optional variable_info attributes: +; +; Modification history +; 20200323-hassler_birgit: cleaned code +; 20180320-lauer_axel: added tags for reporting and call to write_references +; 20170315-cionni_irene: written +; +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Get variables and datasets + var0 = variable_info[0]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD1 = ListCount(info0) + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + exps = metadata_att_as_array(info0, "exp") + ensembles = metadata_att_as_array(info0, "ensemble") + y1 = metadata_att_as_array(info0, "start_year") + y2 = metadata_att_as_array(info0, "end_year") + + ; Check variable attributes + if (isatt(variable_info, "units")) then + UNITS = variable_info@units + else + UNITS = "DU" + end if + + if (isatt(variable_info, "long_name")) then + LONG_NAME = variable_info@long_name + else + LONG_NAME = var0 + end if + + ; Set default values for optional diag_script_info attributes + set_default_att(diag_script_info, "season", "ANN") + set_default_att(diag_script_info, "latmin", -90.) + set_default_att(diag_script_info, "latmax", 90.) + set_default_att(diag_script_info, "multimean", False) + set_default_att(diag_script_info, "cycle_yearmin", max(y1)) + set_default_att(diag_script_info, "cycle_yearmax", min(y2)) + set_default_att(diag_script_info, "Xmin", 1960) + set_default_att(diag_script_info, "Xmax", 2005) + set_default_att(diag_script_info, "Ymin", -10) + set_default_att(diag_script_info, "Ymax", 10) + set_default_att(diag_script_info, "cycle_Ymin", 260.) + set_default_att(diag_script_info, "cycle_Ymax", 380.) + + year_cycle_max = toint(diag_script_info@cycle_yearmax) + year_cycle_min = toint(diag_script_info@cycle_yearmin) + + seasons = diag_script_info@season + latrange = new((/dimsizes(seasons), 2/), "float") + latrange(:, 0) = diag_script_info@latmin + latrange(:, 1) = diag_script_info@latmax + + ; Styles + colors = project_style(info0, diag_script_info, "colors") + dashes = project_style(info0, diag_script_info, "dashes") + thicks = tofloat(project_style(info0, diag_script_info, "thicks")) + markers = project_style(info0, diag_script_info, "markers") + + flag_mod = where(projects.ne."OBS" .and. projects.ne."obs4MIPs", 1, 0) + index_mod = ind(flag_mod.gt.0) + index_obs = ind(flag_mod.eq.0) + dim_MOD = dimsizes(index_mod) + dim_OBS = 0 + + if (.not.all(ismissing(index_obs))) then + dim_OBS = dimsizes(index_obs) + list_obs_mod = names(index_obs) + end if + + XMax = diag_script_info@Xmax + XMin = diag_script_info@Xmin + YMax = diag_script_info@Ymax + YMin = diag_script_info@Ymin + cycle_max = toint(diag_script_info@cycle_Ymax) + cycle_min = toint(diag_script_info@cycle_Ymin) + + multimean = diag_script_info@multimean + if (multimean) then + dimTot = dim_MOD + dim_OBS + 1 + else + dimTot = dim_MOD + dim_OBS + end if + + ; make sure path for (mandatory) netcdf output exists + work_dir = config_user_info@work_dir + "/" + + ; Create output directory + system("mkdir -p " + work_dir) + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + +end + +begin + + aux_title_info = "" + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "eyring06f15_" + var0) + output_filename = wks@fullname + nPanel = dimsizes(seasons) + plot = new(nPanel, "graphic") + plot_num = integertochar(ispan(97, 122, 1)) + rad = 4.0 * atan(1.0) / 180.0 + + min_y1 = min(y1) + max_y2 = max(y2) + years = ispan(min_y1, max_y2, 1) + years!0 = "years" + years&years = years + nyears = dimsizes(years) + + tmpData = new((/dim_MOD1, nPanel, nyears, 2/), "float") + tmpData!0 = "line" + tmpData!1 = "season" + tmpData!2 = "years" + tmpData!3 = "stat" + tmpData&years = years + + Xdata = new((/dimTot, nPanel, nyears, 2/), "float") + Xdata!0 = "line" + Xdata!1 = "season" + Xdata!2 = "years" + Xdata!3 = "stat" + Xdata&years = years + Xref = new((/ nPanel, nyears, 2/), "float") + + Xref!0 = "season" + Xref!1 = "years" + Xref!2 = "stat" + Xref&years = years + + cytmpData = new((/dim_MOD + dim_OBS, nPanel, 366, 2/), "float") + cytmpData!0 = "line" + cytmpData!1 = "season" + cytmpData!2 = "day" + cytmpData!3 = "stat" + cytmpData&day = ispan(1, 366, 1) + + cyXdata = new((/dimTot, nPanel, 366, 2/), "float") + cyXdata!0 = "line" + cyXdata!1 = "season" + cyXdata!2 = "day" + cyXdata!3 = "stat" + cyXdata&day = ispan(1, 366, 1) + + plotMod = new(dimTot, typeof(names)) + plotColor = new(dimTot, typeof(names)) + plotDash = new(dimTot, typeof(names)) + plotThicks = new(dimTot, "float") + plotScatters = new(dimTot, typeof(names)) + LineScatters = new(dimTot, typeof(names)) + ik = 0 + + ; Start datasets loop + do imod = 0, dim_MOD1 - 1 + + ; Start seasons loop + do ipan = 0, dimsizes(seasons) - 1 + + ; Extract data + A0 = read_data(info0[imod]) + + if (all(isdim(A0, (/"time", "lat", "lon"/)))) then + dummy1 = area_operations(A0(time|:, lat|:, lon|:), \ + latrange(ipan, 0), \ + latrange(ipan, 1), 0., 360., \ + "average", True) + coslat = cos(A0&lat * rad) + coslat!0 = "lat" + coslat&lat = A0&lat + delete(coslat) + delete(A0) + else + if (all(isdim(A0, (/"time", "lat"/)))) then + coslat = cos(A0&lat * rad) + coslat!0 = "lat" + coslat&lat = A0&lat + dummy1 = \ + dim_avg_wgt_Wrap(A0(:, \ + {latrange(ipan, 0):latrange(ipan, 1)}), \ + coslat({latrange(ipan, 0):latrange(ipan, 1)}), 1) + delete(coslat) + delete(A0) + end if + end if + + ; Appendix A methodology + ; (function calcRegCoeffs; + ; function calcMeanAnnCycleAnnual + ; procedure rmMeanAnnCycle) + ; These functions and procedures are stored in aux_plotting.ncl + tim = dummy1&time + tim_min = doubletointeger(ut_inv_calendar(y1(imod), \ + 1, 1, 0, 0, 0, tim@units, 0)) + tim_max = doubletointeger(ut_inv_calendar(y2(imod), \ + 12, 31, 59, 0, 0, tim@units, 0)) + delete(tim) + + coeff_ab = calcRegCoeffs(dummy1({tim_min:tim_max})) + coeff_a = coeff_ab(0:8) + delete(coeff_ab) + printVarSummary(coeff_a) + FMeanAnnCycleDay = calcMeanAnnCycleAnnual(tofloat(coeff_a)) ; daily + FMeanAnnCycleDay@units = dummy1@units + FMeanAnnCycleDay@_FillValue = 10e21 + + ; Anomalies + coeff_a1 = coeff_a(0) + FMeanAnnCycleMon = calcMeanAnnCycleMonthly(tofloat(coeff_a)) ; monthly + FMeanAnnCycleMon@_FillValue = 10e21 + + ; remove mean annual cycle and calculate anomalies + ; set input for rmMeanAnnCycle + nmon = 12 + ; set outputs for rmMeanAnnCycle + dimx = dimsizes(dummy1) + ntim = dimx(0) + nyr = ntim/nmon + + FMonthlyAnomalies = new((/ntim/), typeof(dummy1)) ; Anomalies montlhy + FAnnualAnomalies = new((/nyr/), typeof(dummy1)) ; Anomalies Anuual + + rmMeanAnnCycle(dummy1, FMeanAnnCycleMon, coeff_a1, \ + FMonthlyAnomalies, FAnnualAnomalies) + + FMonthlyAnomalies@_FillValue = dummy1@_FillValue + FAnnualAnomalies@_FillValue = dummy1@_FillValue + FMonthlyAnomalies!0 = "time" + FMonthlyAnomalies&time = dummy1&time + delete(dummy1) + + if (seasons(ipan).ne."ANN") then + FSeasAnomalies = month_to_season(FMonthlyAnomalies, seasons(ipan)) + date = ut_calendar(FSeasAnomalies&time, 0) + year = date(:, 0) + FSeasAnomalies!0 = "year" + FSeasAnomalies&year = year + else + FSeasAnomalies = month_to_annual(FMonthlyAnomalies, 1) + date = ut_calendar(FMonthlyAnomalies&time, 0) + year = date(::12, 0) + FSeasAnomalies&year = year + end if ; end of Appendix A methodology + + tmpData(imod, ipan, {y1(imod):y2(imod)}, 0) = (/tofloat(FSeasAnomalies)/) + Xdata(imod, ipan, {y1(imod):y2(imod)}, 0) = (/tofloat(FSeasAnomalies)/) + + cytmpData(imod, ipan, :, 0) = (/tofloat(FMeanAnnCycleDay)/) + cyXdata(imod, ipan, :, 0) = (/tofloat(FMeanAnnCycleDay)/) + + delete(coeff_a) + delete(FMeanAnnCycleDay) + delete(FSeasAnomalies) + delete(date) + delete(year) + delete(FMonthlyAnomalies) + delete(FAnnualAnomalies) + delete(tim_min) + delete(tim_max) + + end do ; ipan + + plotMod(imod) = names(imod) + plotColor(imod) = colors(imod) + plotDash(imod) = dashes(imod) + plotThicks(imod) = thicks(imod) + plotScatters(imod) = markers(imod) + + end do ; imod + + LineScatters(index_obs) = "Markers" + ind_mon = (/0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 304, 335, 365/) + save_obs = cyXdata(index_obs, :, ind_mon, 0) + cyXdata(index_obs, :, :, 0) = cyXdata@_FillValue + cyXdata(index_obs, :, ind_mon, 0) = save_obs + save_obs = cyXdata(index_obs, :, ind_mon, 1) + cyXdata(index_obs, :, :, 1) = cyXdata@_FillValue + cyXdata(index_obs, :, ind_mon, 1) = save_obs + delete(save_obs) + + ; (obs dataset is reduced to monthly data because is plotted as scatters ) + LineScatters(index_mod) = "Lines" + + ; Calculate multimodel mean + if (multimean) then + Xdata(dimTot-1, :, :, 0) = \ + (/dim_avg_n(Xdata(index_mod, :, :, 0), 0)/) + Xdata(dimTot-1, :, :, 1) = \ + (/dim_stddev_n(Xdata(index_mod, :, :, 0), 0)/) + cyXdata(dimTot-1, :, :, 0) = \ + (/dim_avg_n_Wrap(cyXdata(index_mod, :, :, 0), 0)/) + cyXdata(dimTot-1, :, :, 1) = \ + (/dim_stddev_n_Wrap(cyXdata(index_mod, :, :, 0), 0)/) + plotMod(dimTot-1) = "MMM" + plotColor(dimTot-1) = "Black" + plotDash(dimTot-1) = 0 + plotThicks(dimTot-1) = 4. + LineScatters(dimTot-1) = "Lines" + plotScatters(dimTot-1) = 0 + end if + + ; Plotting + plot_d = new(dimTot, "graphic") + + plot_diff = new((/dimsizes(seasons)/), "graphic") + plot_ann = new((/dimsizes(seasons)/), "graphic") + plot_xp_M_d = new((/dimsizes(seasons)/), "graphic") + plot_Mult = new((/dimsizes(seasons)/), "graphic") + plot_ann_Mult = new((/dimsizes(seasons)/), "graphic") + baseplot = new((/dimsizes(seasons) * 2/), "graphic") + + plot_xp_r_d = new(dimsizes(seasons), "graphic") + plot_xp_m_d = new(dimsizes(seasons), "graphic") + + if (multimean) then + xp_M = new((/2 * dimsizes(Xdata&years), \ + dimsizes(seasons)/), typeof(Xdata)) + yp_M = new((/2 * dimsizes(Xdata&years), \ + dimsizes(seasons)/), typeof(Xdata)) + do j = 0, dimsizes(years) - 1 + yp_M(j, :) = Xdata(dimTot-1, :, j, 0) - Xdata(dimTot-1, :, j, 1) + xp_M(j, :) = tofloat(years(j)) + xp_M(dimsizes(years) + j, :) = tofloat(years(dimsizes(years) - 1 - j)) + yp_M(dimsizes(years) + j, :) = \ + Xdata(dimTot - 1, :, (dimsizes(years) - 1 - j), 0) + \ + Xdata(dimTot - 1, :, (dimsizes(years) - 1 - j), 1) + end do + xp_M2 = new((/2 * dimsizes(cyXdata&day), \ + dimsizes(seasons)/), typeof(cyXdata)) + yp_M2 = new((/2 * dimsizes(cyXdata&day), \ + dimsizes(seasons)/), typeof(cyXdata)) + do j = 0, 366 - 1 + yp_M2(j, :) = cyXdata(dimTot - 1, :, j, 0) - \ + cyXdata(dimTot - 1, :, j, 1) + xp_M2(j, :) = j + 1 + xp_M2(dimsizes(cyXdata&day) + j, :) = 366 - j + yp_M2(dimsizes(cyXdata&day) + j, :) = \ + cyXdata(dimTot - 1, :, (dimsizes(cyXdata&day) - 1 - j), 0) + \ + cyXdata(dimTot - 1, :, (dimsizes(cyXdata&day) - 1 - j), 1) + end do + end if + + ; Plotting + plotpath_anom = config_user_info@plot_dir + "eyring06f15_Anomalies_" + var0 + wks = gsn_open_wks(file_type, plotpath_anom) + plotpath1_anom = config_user_info@plot_dir + "eyring06f01_legend_" + var0 + wks1 = gsn_open_wks(file_type, plotpath1_anom) + + do ipanel = 0, dimsizes(seasons) - 1 + res = True + res@gsnDraw = False + res@gsnFrame = False + res@tfPolyDrawOrder = "Predraw" + res@trXMaxF = XMax + res@trXMinF = XMin + res@trYMaxF = YMax(ipanel) + res@trYMinF = YMin(ipanel) + res@tmLabelAutoStride = True + res@vpHeightF = 0.3 ; change aspect ratio of plot + res@vpWidthF = 0.7 + res@tiYAxisString = "Anomaly(%) " ; y-axis label + res@tiMainFontHeightF = 0.023 + res@tiMainString = seasons(ipanel) + " " + \ + lat_names(latrange(ipanel, 0)) + "-" + \ + lat_names(latrange(ipanel, 1)) + res@xyMarkLineModes = LineScatters + res@xyLineThicknessF = 3.0 + res@xyLineDashSegLenF = 0.13 + res@xyLineColors = plotColor ; line color + res@xyDashPatterns = plotDash + res@xyMarkers = plotScatters + res@xyMarkerSizeF = 0.004 + res@xyMarkerThicknessF = 1.8 + plot_d(ipanel) = \ + gsn_csm_xy(wks, Xdata&years, \ + Xdata(line|:, season|ipanel, years|:, stat|0), \ + res) + + add_line(wks, plot_d(ipanel), \ + (/years(0), years(dimsizes(years) - 1)/), \ + (/0., 0./), "gray", 1) + + if (multimean) then + gsres = True + gsres@gsFillColor = "cyan" ; color chosen + plot_xp_M_d(ipanel) = \ + gsn_add_polygon(wks, plot_d(ipanel), \ + xp_M(:, ipanel), \ + yp_M(:, ipanel), gsres) + end if + + res2 = True + res2@gsnDraw = False + res2@gsnFrame = False + res2@tfPolyDrawOrder = "Predraw" + res2@trXMaxF = 366 + res2@trXMinF = 1 + res2@trYMaxF = cycle_max(ipanel) + res2@trYMinF = cycle_min(ipanel) + res2@tmXBMode = "Explicit" + res2@tmXBValues = (/1, 32, 61, 92, 122, 153, \ + 183, 214, 245, 275, 305, 336/) + res2@tmXBLabels = (/"J", "F", "M", "A", "M", "J", \ + "J", "A", "S", "O", "N", "D"/) + + res2@vpHeightF = 0.3 ; change aspect ratio of plot + res2@vpWidthF = 0.3 + res2@xyMarkerSizeF = 0.004 + res2@xyMarkerThicknessF = 1.8 + ; y-axis label + res2@tiYAxisString = "Detrended mean annual cycle (" + UNITS + ")" + + res2@xyMarkLineModes = LineScatters(:) + res2@xyLineThicknessF = 3.0 + res2@xyLineDashSegLenF = 0.13 + res2@xyLineColors = plotColor(:) ; line color + res2@xyDashPatterns = plotDash(:) + res2@xyMarkers = plotScatters(:) + res2@xyMarkerSizeF = 0.004 + res2@xyMarkerThicknessF = 1.8 + plot_ann(ipanel) = \ + gsn_csm_xy(wks, cyXdata&day, \ + cyXdata(line|:, season|ipanel, day|:, stat|0), \ + res2) + + if (multimean) then + gsres2 = True + gsres2@gsFillColor = "cyan" + plot_ann_Mult(ipanel) = \ + gsn_add_polygon(wks, plot_ann(ipanel), \ + xp_M2(:, ipanel), \ + yp_M2(:, ipanel), \ + gsres2) + end if + + end do ; end ipanel + + do j = 0, dimsizes(seasons) - 1 + baseplot(2 * j) = plot_d(j) + baseplot(2 * j + 1) = plot_ann(j) + end do + + ; Call legend - plotting routine + Pres = True + Pres@gsnFrame = False + Pres@gsnPanelXWhiteSpacePercent = 0 + Pres@gsnPanelBottom = 0.15 + Pres@gsnPanelLeft = 0.15 + gsn_panel(wks, baseplot, (/dimsizes(seasons), 2/), Pres) + + plotThicks = plotThicks + 1 + plotDash(index_obs) = plotScatters(index_obs) + add_legend(wks1, plotMod, plotColor, tofloat(plotDash), \ + LineScatters, plotThicks, "0.9") + frame(wks) + frame(wks1) + + ; Anomalies output + nc_filename = work_dir + "eyring06f15_Anomalies_" + var0 + ".nc" + Xdata@var = var0 + Xdata@diag_script = "eyring06f15" + Xdata&stat = (/"mean", "avg"/) + Xdata&season = seasons + Xdata&line = plotMod + nc_outfile = ncdf_write(Xdata, nc_filename) + + ; AnnualCycle output + nc_filename1 = work_dir + "eyring06f15_AnnualCycle_" + var0 + ".nc" + cyXdata@var = var0 + cyXdata@diag_script = "eyring06f15" + cyXdata&stat = (/"mean", "avg"/) + cyXdata&season = seasons + cyXdata&line = plotMod + nc_outfile1 = ncdf_write(cyXdata, nc_filename1) + + ; Call provenance logger + log_provenance(nc_filename, \ + plotpath_anom + "." + file_type, \ + "Anomaly time series and seasonal anomalies of " + var0, \ + (/"anomaly", "mean"/), \ + (/"nhpolar", "shpolar", "global"/), \ + "pro", \ + (/"cionni_irene", "eyring_veronika"/), \ + (/"eyring06jgr"/), \ + metadata_att_as_array(info0, "filename")) + +end diff --git a/esmvaltool/diag_scripts/eyring13jgr/eyring13jgr_fig12.ncl b/esmvaltool/diag_scripts/eyring13jgr/eyring13jgr_fig12.ncl new file mode 100644 index 0000000000..efb75b2e2c --- /dev/null +++ b/esmvaltool/diag_scripts/eyring13jgr/eyring13jgr_fig12.ncl @@ -0,0 +1,371 @@ +; EYRING13JGR_FIG12 +; Author: Irene Cionni (ENEA, Italy), Veronika Eyring (DLR, Germany) +; PROJECT-NAME CCMI +; ############################################################################# +; Description +; Calculation of the long-term mean and trend of the zonal wind +; +; +; Optional diag_script_info attributes (diagnostic specific) +; @e13fig12_start_year: year when start the climatology calculation +; @e13fig12_end_year: year when end the climatology calculation +; @e13fig12_multimean: multimodel mean +; @e13fig12_season: season +; @e13fig12_exp_MMM: name of the experiments dor the MMM +; Caveats +; +; Modification history +; ############################################################################# +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + var0 = variable_info[0]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD1 = ListCount(info0) + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + exps = metadata_att_as_array(info0, "exp") + ensembles = metadata_att_as_array(info0, "ensemble") + y1 = (/ metadata_att_as_array(info0, "start_year")/) + y2 = (/ metadata_att_as_array(info0, "end_year") /) + work_dir = config_user_info@work_dir + "/" + ; Create work dir + + exps_name = where(ismissing(exps), "OBS", exps) + system("mkdir -p " + work_dir) + + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + do imod = 0, dimsizes(names) - 1 + if (names(imod) .eq. refname) then + iref = imod + end if + end do + end if + + if(isatt(variable_info, "units")) then + UNITS = variable_info@units + else + UNITS = "m s~S~-1~N~" + end if + + if(isatt(variable_info, "long_name")) then + LONG_NAME = variable_info@long_name + else + LONG_NAME = var0 + end if + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; Set default values for optional diag_script_info attributes + + set_default_att(diag_script_info, "e13fig12_multimean", False) + set_default_att(diag_script_info, "e13fig12_start_year", max(y1)) + set_default_att(diag_script_info, "e13fig12_end_year", min(y2)) + set_default_att(diag_script_info, "e13fig12_season", "ANN") + flag_mod = where(projects.ne."OBS6" .and. projects.ne."OBS" .and.\ + projects.ne."obs4MIPs", 1, 0) + + exp_MMM = diag_script_info@e13fig12_exp_MMM + flag_exp = new((/dimsizes(projects), dimsizes(exp_MMM)/), "integer") + do id = 0, dimsizes(exp_MMM) - 1 + flag_exp(:, id) = where(projects.eq.exp_MMM(id), 0, 1) + end do + index_mod = ind(flag_mod.gt.0) + index_obs = ind(flag_mod.eq.0) + dim_MOD = dimsizes(index_mod) + dim_OBS = 0 + + if (.not.all(ismissing(index_obs))) then + dim_OBS = dimsizes(index_obs) + list_obs_mod = names(index_obs) + end if + + exp_MMM = diag_script_info@e13fig12_exp_MMM + multimean = diag_script_info@e13fig12_multimean + season = diag_script_info@e13fig12_season + if (multimean) then + dimTot = dim_MOD + dim_OBS + 1 + if (dimsizes(exp_MMM).gt.1) then + dimTot = dim_MOD + dim_OBS + dimsizes(exp_MMM) + end if + else + dimTot = dim_MOD + dim_OBS + end if + + ; make sure path for (mandatory) netcdf output exists + year_start = toint(diag_script_info@e13fig12_start_year) + year_end = toint(diag_script_info@e13fig12_end_year) + +end + +begin + + aux_title_info = "" + ; wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "eyring13f12_" + var0 +\ + ; "_" + plotMod) + A0 = read_data(info0[iref]) + lat0 = A0&lat + plev0 = A0&plev + delete(A0) + + tmpData = new((/dim_MOD + dim_OBS, dimsizes(plev0), dimsizes(lat0), 3/),\ + "double") + tmpData!0 = "line" + tmpData!1 = "plev" + tmpData!2 = "lat" + tmpData!3 = "stat" + + XXdata = new((/dimTot, dimsizes(plev0), dimsizes(lat0), 3/), "double") + XXdata!0 = "line" + XXdata!1 = "plev" + XXdata!2 = "lat" + XXdata!3 = "stat" + + XXref = new((/dimsizes(plev0), dimsizes(lat0), 3/), "double") + XXref!0 = "plev" + XXref!1 = "lat" + XXref!2 = "stat" + + years1 = new(dimTot, "integer") + years2 = new(dimTot, "integer") + plot = new(dimTot, "graphic") + plot_num = integertochar(ispan(97, 122, 1)) + + plotMod = new(dimTot, typeof(names)) + + ik = 0 + +; ****************************************************************** + do imod = 0, dim_MOD1 - 1 + years1(imod) = y1(imod) + years2(imod) = y2(imod) + years = tofloat(ispan(years1(imod), years2(imod), 1)) + plotMod(imod) = names(imod) + "_" + exps_name(imod) + ; Extract data + A0 = read_data(info0[imod]) + if (season.eq."ANN") then + A1 = time_operations(A0, year_start, year_end, "average",\ + "annualclim", True) + A2 = time_operations(A0, -1, -1, "extract", "yearly", True) + A2trend = regCoef(years, A2(plev|:, lat|:, time|:)) + tval_A2 = A2trend@tval + b_A2 = 0.5 + df_A2 = A2trend@nptxy - 2 + prob_A2 = (/1 - betainc(df_A2 / (df_A2 + tval_A2 ^ 2),\ + df_A2 / 2.0, b_A2)/) + else + A1 = time_operations(A0, -1, -1, "average", season, True) + A2 = month_to_season(A0, season) + A2trend = regCoef(years, A2(plev|:, lat|:, time|:)) + tval_A2 = A2trend@tval + b_A2 = tval_A2 + b_A2 = (/0.5/) + df_A2 = A2trend@nptxy - 2 + prob_A2 = A2(0, :, :) + prob_A2 = onedtond((/1 - betainc(df_A2 / (df_A2 + tval_A2 ^ 2), \ + df_A2 / 2.0, b_A2)/), dimsizes(A2(0, :, :))) + delete(df_A2) + delete(tval_A2) + delete(b_A2) + end if + delete(A0) + tmpData(imod, :, :, 0) = (/A1/) + tmpData(imod, :, :, 1) = (/A2trend * 10/) + tmpData(imod, :, :, 2) = (/prob_A2/) + end do + XXdata(:dim_MOD1 - 1, :, :, :) = (/tmpData/) + + XXref = (/tmpData(iref, :, :, :)/) + + XXdata!0 = "line" + XXdata!1 = "plev" + XXdata!2 = "lat" + XXdata!3 = "stat" + XXdata&lat = lat0 + plev0@long_name = "Pressure [hPa]" + XXdata&plev = plev0 + + ; ***************************************************** + if (multimean) then + do id = 0, dimsizes(exp_MMM) - 1 + flag_exp(:, id) = where(projects.eq.exp_MMM(id), 0, 1) + index_exp = ind(flag_exp(:, id).gt.0) + XXdata(dimTot - 1, :, :, :) = \ + (/dim_avg_n(XXdata(index_exp, :, :, :), 0)/) + plotMod(dimTot - 1) = "MMM_" + exps_name(id) + years1(dimTot - 1) = max(y1) + years2(dimTot - 1) = min(y2) + delete(index_exp) + end do + end if + + X_MMM_std = (/dim_stddev_n(XXdata(index_mod, :, :, :), 0)/) + plot_d = new(dimTot, "graphic") + + ; *************************plotting****************************************** + do ipanel = 0, dimTot - 1 + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "eyring13f12_" + \ + var0 + "_" + plotMod(ipanel)) + gsn_define_colormap(wks, "BlueYellowRed") + res = True ; plot mods desired + res@gsnDraw = False + res@gsnFrame = False + ; res@lbLabelBarOn = False + + res@tiMainString = plotMod(ipanel) ; title + res@tiYAxisFont = "helvetica" + res@tiMainFont = "helvetica" + res@tmXBLabelFont = "helvetica" + res@tmYLLabelFont = "helvetica" + res@lbLabelFont = "helvetica" + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = (/-1.6, -1.4, -1.2, -1.0, -0.8, -0.6, -0.4, \ + -0.2, 0., 0.2, 0.4, 0.6, 0.8, 1.0, 1.2, \ + 1.4, 1.6/) ; scale bar from Eyring et al. (2013) + res@tmXBMode = "Explicit" + res@tmXBValues = (/-60., -30., 0., 30., 60./) + res@tmXBLabels = (/"60S", "30S", "0", "30N", "60N"/) + res@tmYLMode = "Explicit" + res@tmYLValues = (/1000., 800., 600., 400., 200./) + res@tmYLLabels = (/"1000", "800", "600", "400", "200"/) + res@trYMinF = 100. + res@trYMaxF = 1000. + res@cnLineLabelsOn = False ; turn off line labels + res@cnFillOn = True ; turn on color fill + res@cnLinesOn = False + res@cnInfoLabelOn = False ; turn off contour label + res@gsnSpreadColors = True ; use full range of colors + res@lbLabelAutoStride = True ; optimal labels + res@gsnLeftString = LONG_NAME + " [" + UNITS + "/decade]" + + res@gsnRightString = years1(ipanel) + "-" + years2(ipanel) + res@gsnLeftStringFontHeightF = 0.017 + res@gsnRightStringFontHeightF = 0.017 + res@tmXBMode = "Explicit" ; explicit labels + ; location of labels + ; res@vpHeightF = 0.4 ; change aspect ratio of plot + ; res@vpWidthF = 0.75 + ; res@lbLabelFontHeightF = 0.011 + ; res@tiYAxisString = "Latitude" ; y-axis label + ; plot_d(ipanel) = gsn_csm_lat_time(wks, XXdata(line|ipanel, \ + ; lat|:, month|:, stat|0), res) + + plotu = gsn_csm_pres_hgt(wks, XXdata(ipanel, :, :, 1), res) + res2 = True + res2@gsnDraw = False + res2@gsnFrame = False + res2@cnLevelSelectionMode = "ManualLevels" ; set manual contour levels + res2@cnMinLevelValF = -20. ; set min contour level + res2@cnMaxLevelValF = 40. ; set max contour level + res2@cnLevelSpacingF = 10. ; set contour spacing + res2@cnLineLabelsOn = True + res2@gsnContourZeroLineThicknessF = 2. ; doubles thickness of zero contour + res2@gsnContourNegLineDashPattern = 1 + res2@cnFillOn = False + res2@cnInfoLabelOn = False + res2@cnLinesOn = True + res2@cnLineColor = "Black" ; color of second contours + res2@cnLineThicknessF = 2. ; line thickness + res2@tmXBMode = "Explicit" + res2@tmXBValues = (/-60., -30., 0., 30., 60./) + res2@tmXBLabels = (/"60S", "30S", "0", "30N", "60N"/) + res2@tmYLMode = "Explicit" + res2@tmYLValues = (/1000., 800., 600., 400., 200./) + res2@tmYLLabels = (/"1000", "800", "600", "400", "200"/) + ; res2@trYMinF = 100. + ; res2@trYMaxF = 1000. + ; res2@cnInfoLabelOn = False + res2@lbTitleFontHeightF = 0.01 ; default: 0.025 + res2@lbTitleOffsetF = -0.25 ; default: 0.03 (negative values move it down) + res2@lbTopMarginF = 0.1 ; default: 0.05 (negative values move it up) + res2@lbTitleJust = "BottomCenter" ; default: CenterCenter + res2@lbLabelFontHeightF = 0.025 + ; res2@lbTitleString = variable_info@long_name + ; Data to plot + ; model specific ressources + res2@tiMainFontHeightF = 0.03 + res2@tiMainString = plotMod(ipanel) + ; res2@gsnLeftString = LONG_NAME + \ + ; " [" + UNITS + "]" + ; res2@gsnRightString = fmt + " " + variable_info@units + + plotv = gsn_csm_pres_hgt(wks, XXdata(ipanel, :, :, 0), res2) + delete(res) + res3 = True + res3@gsnDraw = False + res3@gsnFrame = False + res3@cnLevelSelectionMode = "ManualLevels" ; set manual contour levels + res3@cnMinLevelValF = 0. ; set min contour level + res3@cnMaxLevelValF = 1. ; set max contour level + res3@cnLevelSpacingF = 0.005 ; set contour spacing + res3@cnLineLabelsOn = False + res3@cnInfoLabelOn = False + res3@cnFillOn = False + res3@cnLinesOn = False + + plotz = gsn_csm_pres_hgt(wks, XXdata(ipanel, :, :, 2), res3) + opt = True + opt@gsnShadeFillType = "pattern" ; pattern fill + opt@gsnShadeHigh = 17 + plotz = gsn_contour_shade(plotz, 1., 0.95, opt) + overlay(plotu, plotv) + overlay(plotu, plotz) + draw(plotu) + frame(wks) + delete(wks) + end do + + ; Call legend-plotting routine + ; Pres = True + ; Pres@gsnFrame = False + ; Pres@gsnPanelBottom = 0.2 + ; Pres@amJust = "TopLeft" + ; Call panel routine + ; if (dimTot.gt.3) then + ; gsn_panel(wks, plot_d, (/1 + dimTot / 3, 3/), Pres) + ; else + + ; gsn_panel(wks, plot_d, (/1, 3/), Pres) + ; end if + ; frame(wks) + + ; *******************OUTPUT FILE****************************************** + nc_filename = work_dir + "eyring13f12_" + var0 + ".nc" + Xdata = XXdata(line|:, plev|:, lat|:, stat|:) + Xdata@var = var0 + Xdata@diag_script = "eyring13f12" + Xdata&stat = (/"mean", "trend", "prob"/) + Xdata&line = plotMod + Xdata&plev = plev0 + Xdata&lat = lat0 + nc_outfile = ncdf_write(Xdata, nc_filename) + + ; Call provenance logger + plotpath = config_user_info@plot_dir + "eyring13f12_" + var0 + "_" \ + + plotMod(dimTot - 1) + log_provenance(nc_filename, \ + plotpath + "." + file_type, \ + "Long-term mean and trend of " + var0, \ + (/"mean", "trend"/), \ + (/"global"/), \ + "pro", \ + (/"cionni_irene", "eyring_veronika"/), \ + (/"eyring13jgr"/), \ + metadata_att_as_array(info0, "filename")) + +end diff --git a/esmvaltool/diag_scripts/galytska23/select_variables_for_tigramite.py b/esmvaltool/diag_scripts/galytska23/select_variables_for_tigramite.py new file mode 100644 index 0000000000..424f089d69 --- /dev/null +++ b/esmvaltool/diag_scripts/galytska23/select_variables_for_tigramite.py @@ -0,0 +1,308 @@ +""" +Arctic-midlatitude teleconnections Diagnostics. + +Diagnostic calculates timeseries needed for Causal Model Evaluation +of Arctic-midlatitude teleconnections. + + Description: + This diagnostics calculates timeseries of the variables that represent + Arctic-midlatitude teleconenctions that are further used for the + Causal Model Evaluation of CMIP6 (Galytska et al., 2023). The output of + this diagnostics is a .nc file per data source. Optionally this diagnostics + plots the timeseries of the evolution of each selected variable. If the + user kept "plot_timeseries: True" in recipe_galytska23jgr.yml, then + "variable_to_plot:" expects the name of the variable to be plotted. + Possible options for "variable_to_plot:" are: + Arctic_temperature + Psl_Ural + Psl_Sib + Psl_Aleut + PV + heat_flux + BK_sic + Ok_sic + Author: Evgenia Galytska, IUP-UB + egalytska@iup.physik.uni-bremen.de + Project: USMILE +""" +import logging +from pathlib import Path +import iris +import numpy as np +import seaborn as sns +from matplotlib import pyplot as plt +from esmvalcore.preprocessor import ( + anomalies, + area_statistics, + meridional_statistics, + zonal_statistics, +) + +import esmvaltool.diag_scripts.shared.iris_helpers as ih +from esmvaltool.diag_scripts.shared import ( + group_metadata, + run_diagnostic, + save_data, +) +from esmvaltool.diag_scripts.shared._base import ( + get_plot_filename, ) + +logger = logging.getLogger(Path(__file__).stem) + +# Fixed parameters +# list of variables to be ignored per model +ignored_variables = {"HadISST": ["heat_flux"]} + +# list of variables per dataset that will be processed +proc_vars = { + "ERA5": [ + 'PV', 'Arctic_temperature', 'Psl_Ural', 'Psl_Sib', 'Psl_Aleut', + 'heat_flux' + ], + "HadISST": ['BK_sic', 'Ok_sic'], + "all_other_datasets": [ + 'PV', 'Arctic_temperature', 'Psl_Ural', 'Psl_Sib', 'Psl_Aleut', + 'heat_flux', 'BK_sic', 'Ok_sic' + ], +} + + +def get_provenance_record(ancestor_files): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'authors': ['galytska_evgenia'], + 'ancestors': ancestor_files, + 'projects': ['usmile'], + 'references': [ + 'galytska23jgr', + ], + } + return record + + +def calculate_polar_vortex(dict_item): + """Calculate polar vortex.""" + var = iris.load_cube(dict_item['filename']) + var = var.collapsed('air_pressure', iris.analysis.MEAN) + # change the sign of polar vortex so the positive values + # (negative geopotential height anomalies) stand for + # the strong polar vortex, similarly to + # Kretschmer et al., 2016 and Galytska et al., 2023 + var.data *= -1 + var.var_name = 'PV' + return var + + +def calculate_arctic_tas(dict_item): + """Read Arctic temperature data.""" + var = iris.load_cube(dict_item['filename']) + var.var_name = 'Arctic_temperature' + return var + + +def calculate_slp(dict_item): + """Get surface pressure.""" + var = iris.load_cube(dict_item['filename']) + # calculate hPa from Pa. + var.data /= 100 + return var + + +def finalize_bk_ice(dict_item): + """Read sea ice data (Barents-Kara seas).""" + var = iris.load_cube(dict_item['filename']) + var.var_name = 'BK_sic' + return var + + +def finalize_ok_ice(dict_item): + """Read sea ice data (Sea of Okhotsk).""" + var = iris.load_cube(dict_item['filename']) + var.var_name = 'Ok_sic' + return var + + +def prepare_heat_flux(dict_item): + """Prepare variables for the heat flux calculations.""" + var = iris.load_cube(dict_item['filename']) + var_avg = area_statistics(var, operator='mean') + var_mermean = meridional_statistics(var, operator='mean') + deviation = var_mermean - var_avg + return deviation + + +def calculate_heat_flux(list_va_ta): + """Calculate eddy poleward heat flux.""" + heat_flux = list_va_ta[0] * list_va_ta[1] + hf_anom = anomalies(heat_flux, period='monthly') + hf_anom_zm = zonal_statistics(hf_anom, operator='mean') + hf_anom_zm.var_name = 'heat_flux' + return hf_anom_zm + + +def variable_cases(var_name, var): + """Match preprocessor name and corresponding calculations.""" + if var_name == 'pv': + out_var = calculate_polar_vortex(var) + elif var_name == 'pre_tas': + out_var = calculate_arctic_tas(var) + elif var_name == 'pressure_ural': + out_var = calculate_slp(var) + out_var.var_name = 'Psl_Ural' + elif var_name == 'pressure_sib': + out_var = calculate_slp(var) + out_var.var_name = 'Psl_Sib' + elif var_name == 'pressure_aleut': + out_var = calculate_slp(var) + out_var.var_name = 'Psl_Aleut' + elif var_name == 'bk_ice': + out_var = finalize_bk_ice(var) + elif var_name == 'ok_ice': + out_var = finalize_ok_ice(var) + elif var_name == 'heat_flux': + out_var = prepare_heat_flux(var) + else: + raise NotImplementedError(f"Variable '{var_name}' not yet supported.") + return out_var + + +def calculate_variables(dataset_dict): + """Calculate all necessary variables.""" + logger.debug("Variables are calculated for the following datasources:%s", + dataset_dict.keys()) + processed_vars = {} + for dataset, variables in dataset_dict.items(): + processed_vars[dataset] = {} + + logger.debug("Calculating final variables %s for %s dataset", + variables, dataset) + + if dataset in ignored_variables: + to_ignore_vars = ignored_variables.get(dataset, None) + for var in variables: + var_name = var['preprocessor'] + if var_name not in to_ignore_vars: + new_var = variable_cases(var_name, var) + new_var_name = new_var.var_name + processed_vars[dataset][new_var_name] = new_var + else: + tmp_list = [] + for var in variables: + var_name = var['preprocessor'] + if var_name == "heat_flux": + tmp_list.append(variable_cases(var_name, var)) + else: + new_var = variable_cases(var_name, var) + new_var_name = new_var.var_name + processed_vars[dataset][new_var_name] = new_var + if len(tmp_list) != 2: + raise IndexError("The preprocessor heat flux requests two \ + variables in the recipe: va and ta") + heat_flux = calculate_heat_flux(tmp_list) + processed_vars[dataset][heat_flux.var_name] = heat_flux + + return processed_vars + + +def plotting_support(cube, key, **kwargs): + """Help for the pretty plot.""" + if cube.coords('time', dim_coords=True): + ih.unify_time_coord(cube) + iris.quickplot.plot(cube, label=key, **kwargs) + plt.legend(loc="center left", bbox_to_anchor=(1, 0.5)) + plt.ylabel("Anomalies, " + str(cube.units)) + plt.title(f"Time series of monthly mean {cube.var_name.upper()} anomalies") + plt.xticks(rotation=45, ha="right", rotation_mode="anchor") + + +def plot_timeseries(dictionary, var, cfg): + """Timeseries plot.""" + fig = plt.figure(figsize=(10, 4)) + sns.set_style('whitegrid') + colors = plt.cm.viridis(np.linspace(0, 1, len(dictionary.keys()))) + baseplotname = f"Timeseries_{var}_anomalies" + filename = get_plot_filename(baseplotname, cfg) + for idx, dataset in enumerate(dictionary.keys()): + if var not in proc_vars["HadISST"]: + if dataset == "HadISST": + continue + if dataset != 'ERA5': + plotting_support(dictionary[dataset][var], + dataset, color=colors[idx]) + else: + plotting_support(dictionary[dataset][var], + dataset, + color='k', + linewidth=2) + else: + if dataset == "ERA5": + continue + if dataset != 'HadISST': + plotting_support(dictionary[dataset][var], + dataset, color=colors[idx]) + else: + plotting_support(dictionary[dataset][var], + dataset, + color='blue', + linewidth=2) + fig.savefig(filename, bbox_inches='tight') + + +def assemble_cube_list(dataset, var, special_datasets): + """ + Assemble a list of processed vars cubes. + + Depending on what vars are needed per dataset, + variables list differs per analyzed dataset. Dict holding the + needed variables per dataset needs updating everytime a new dataset + or variable gets included. + + Parameters + ---------- + dataset: str + dataset name. + var: dict + variable dictionary. + special_datasets: list + list of datasets to be treated separately, + with restricted variables. + type: list of datasets (list of strings). + + Returns + ------- + iris.cube.CubeList + list of cubes. + """ + if dataset not in special_datasets: + cube_list = iris.cube.CubeList( + [var[proc_var] for proc_var in proc_vars["all_other_datasets"]]) + else: + cube_list = iris.cube.CubeList( + [var[proc_var] for proc_var in proc_vars[dataset]]) + + return cube_list + + +def main(cfg): + """Calculate and save final variables into .nc files.""" + special_datasets = ["ERA5", "HadISST"] + + my_files_dict = group_metadata(cfg['input_data'].values(), 'dataset') + all_variables = calculate_variables(my_files_dict) + + # Check is timeseries should be plotted + if cfg['plot_timeseries']: + plot_timeseries(all_variables, cfg['variable_to_plot'], cfg) + for dataset in my_files_dict: + logger.info("Processing final calculations in dataset %s", dataset) + prov_record = get_provenance_record([dataset]) + var = all_variables[dataset] + cube_list = assemble_cube_list(dataset, var, special_datasets) + save_data(dataset, prov_record, cfg, cube_list) + logger.info("%s data is saved in .nc", dataset) + logger.info("Done.") + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/hydrology/__init__.py b/esmvaltool/diag_scripts/hydrology/__init__.py new file mode 100644 index 0000000000..0956a04e64 --- /dev/null +++ b/esmvaltool/diag_scripts/hydrology/__init__.py @@ -0,0 +1 @@ +"""Collection of scripts to prepare hydrological model forcings.""" diff --git a/esmvaltool/diag_scripts/hydrology/compute_chunks.py b/esmvaltool/diag_scripts/hydrology/compute_chunks.py new file mode 100644 index 0000000000..56b3f7b43d --- /dev/null +++ b/esmvaltool/diag_scripts/hydrology/compute_chunks.py @@ -0,0 +1,40 @@ +"""Re-chunk the time dimension, to be used by the regrid processor. + +For large cubes, regridding to a high resolution grid increases the size +of the data. To reduce memory use, we re-chunk the time dimension. + +Related iris issue: +https://github.com/SciTools/iris/issues/3808 +""" +import numpy as np + + +def compute_chunks(src, tgt): + """Compute the chunk sizes needed to regrid src to tgt.""" + block_bytes = 50 * (1 << 20) # 50 MB block size + + if src.dtype == np.float32: + dtype_bytes = 4 # size of float32 in bytes + else: + dtype_bytes = 8 # size of float64 in bytes + + ntime = src.coord('time').shape[0] + tgt_nlat = tgt.coord('latitude').shape[0] + tgt_nlon = tgt.coord('longitude').shape[0] + + # Define blocks along the time dimension + min_nblocks = int(ntime * tgt_nlat * tgt_nlon * dtype_bytes / block_bytes) + min_nblocks = max(min_nblocks, 1) + timefull = ntime // min_nblocks + timepart = ntime % timefull + + nfullblocks = ntime // timefull + npartblocks = int(timepart > 0) + + time_chunks = (timefull, ) * nfullblocks + (timepart, ) * npartblocks + src_chunks = ( + time_chunks, + (src.coord('latitude').shape[0], ), + (src.coord('longitude').shape[0], ), + ) + return src_chunks diff --git a/esmvaltool/diag_scripts/hydrology/derive_evspsblpot.py b/esmvaltool/diag_scripts/hydrology/derive_evspsblpot.py new file mode 100644 index 0000000000..60c8bee355 --- /dev/null +++ b/esmvaltool/diag_scripts/hydrology/derive_evspsblpot.py @@ -0,0 +1,113 @@ +"""Derive Potential Evapotransporation (evspsblpot) using De Bruin (2016). + +De Bruin, H. A. R., Trigo, I. F., Bosveld, F. C., Meirink, J. F.: A +Thermodynamically Based Model for Actual Evapotranspiration of an Extensive +Grass Field Close to FAO Reference, Suitable for Remote Sensing Application, +American Meteorological Society, 17, 1373-1382, DOI: 10.1175/JHM-D-15-0006.1, +2016. +""" +import numpy as np +import iris + + +def tetens_derivative(tas): + """Compute the derivative of Teten's formula for saturated vapor pressure. + + Tetens formula (https://en.wikipedia.org/wiki/Tetens_equation) := + es(T) = e0 * exp(a * T / (T + b)) + + Derivate (checked with Wolfram alpha) + des / dT = a * b * e0 * exp(a * T / (b + T)) / (b + T)^2 + """ + # Ensure temperature is in degC + tas.convert_units('degC') + + # Saturated vapour pressure at 273 Kelvin + e0_const = iris.coords.AuxCoord(np.float32(6.112), + long_name='Saturated vapour pressure', + units='hPa') + emp_a = np.float32(17.67) # empirical constant a + + # Empirical constant b in Tetens formula + emp_b = iris.coords.AuxCoord(np.float32(243.5), + long_name='Empirical constant b', + units='degC') + exponent = iris.analysis.maths.exp(emp_a * tas / (tas + emp_b)) + return (exponent * e0_const / (tas + emp_b)**2) * (emp_a * emp_b) + + +def get_constants(psl): + """Define constants to compute De Bruin (2016) reference evaporation. + + The Definition of rv and rd constants is provided in + Wallace and Hobbs (2006), 2.6 equation 3.14. + The Definition of lambda and cp is provided in Wallace and Hobbs 2006. + The Definition of beta and cs is provided in De Bruin (2016), section 4a. + """ + # Ensure psl is in hPa + psl.convert_units('hPa') + + # Definition of constants + # source='Wallace and Hobbs (2006), 2.6 equation 3.14', + rv_const = iris.coords.AuxCoord(np.float32(461.51), + long_name='Gas constant water vapour', + units='J K-1 kg-1') + # source='Wallace and Hobbs (2006), 2.6 equation 3.14', + rd_const = iris.coords.AuxCoord(np.float32(287.0), + long_name='Gas constant dry air', + units='J K-1 kg-1') + + # Latent heat of vaporization in J kg-1 (or J m-2 day-1) + # source='Wallace and Hobbs 2006' + lambda_ = iris.coords.AuxCoord(np.float32(2.5e6), + long_name='Latent heat of vaporization', + units='J kg-1') + + # Specific heat of dry air constant pressure + # source='Wallace and Hobbs 2006', + cp_const = iris.coords.AuxCoord(np.float32(1004), + long_name='Specific heat of dry air', + units='J K-1 kg-1') + + # source='De Bruin (2016), section 4a', + beta = iris.coords.AuxCoord(np.float32(20), + long_name='Correction Constant', + units='W m-2') + + # source = 'De Bruin (2016), section 4a', + cs_const = iris.coords.AuxCoord(np.float32(110), + long_name='Empirical constant', + units='W m-2') + + # source = De Bruin (10.1175/JHM-D-15-0006.1), page 1376 + # gamma = (rv/rd) * (cp*msl/lambda_) + # iris.exceptions.NotYetImplementedError: coord / coord + rv_rd_const = rv_const.points[0] / rd_const.points[0] + gamma = rv_rd_const * (psl * cp_const / lambda_) + return gamma, cs_const, beta, lambda_ + + +def debruin_pet(psl, rsds, rsdt, tas): + """Compute De Bruin (2016) reference evaporation. + + Implement equation 6 from De Bruin (10.1175/JHM-D-15-0006.1) + """ + # Variable derivation + delta_svp = tetens_derivative(tas) + gamma, cs_const, beta, lambda_ = get_constants(psl) + + # the definition of the radiation components according to the paper: + kdown = rsds + kdown_ext = rsdt + # Equation 5 + rad_factor = np.float32(1 - 0.23) + net_radiation = (rad_factor * kdown) - (kdown * cs_const / kdown_ext) + # Equation 6 + # the unit is W m-2 + ref_evap = ((delta_svp / (delta_svp + gamma)) * net_radiation) + beta + + pet = ref_evap / lambda_ + pet.var_name = 'evspsblpot' + pet.standard_name = 'water_potential_evaporation_flux' + pet.long_name = 'Potential Evapotranspiration' + return pet diff --git a/esmvaltool/diag_scripts/hydrology/globwat.py b/esmvaltool/diag_scripts/hydrology/globwat.py new file mode 100755 index 0000000000..1ac1e85c77 --- /dev/null +++ b/esmvaltool/diag_scripts/hydrology/globwat.py @@ -0,0 +1,321 @@ +"""Globwat diagnostic.""" +import logging +from pathlib import Path + +import numpy as np +import xarray as xr +import pandas as pd +import dask.array as da +import iris + +from esmvalcore.preprocessor import regrid +from esmvaltool.diag_scripts.hydrology.derive_evspsblpot import debruin_pet +from esmvaltool.diag_scripts.hydrology.compute_chunks import compute_chunks +from esmvaltool.diag_scripts.shared import (ProvenanceLogger, + get_diagnostic_filename, + group_metadata, + run_diagnostic) + + +logger = logging.getLogger(Path(__file__).name) + + +def create_provenance_record(): + """Create a provenance record.""" + record = { + 'caption': "Forcings for the GlobWat hydrological model.", + 'domains': ['global'], + 'authors': [ + 'abdollahi_banafsheh', + 'alidoost_sarah', + ], + 'projects': [ + 'ewatercycle', + ], + 'references': [ + 'acknow_project', + 'debruin16ams', + 'hoogeveen15hess', + 'langbein1949usgs', + ], + 'ancestors': [], + } + return record + + +def rechunk_and_regrid(src, tgt, scheme): + """Rechunk cube src and regrid it onto the grid of cube tgt.""" + src_chunks = compute_chunks(src, tgt) + src.data = src.lazy_data().rechunk(src_chunks) + return regrid(src, tgt, scheme) + + +def change_data_type(cube): + """Change data type to float32.""" + cube.data = cube.core_data().astype('float32') + for coord_name in 'latitude', 'longitude', 'time': + coord = cube.coord(coord_name) + coord.points = coord.core_points().astype('float32') + coord.bounds = None + coord.guess_bounds() + return cube + + +def _convert_units(cube): + """Convert unit of cube, used only for water variables. + + From kg m-2 s-1 to kg m-2 month-1 or kg m-2 day-1. + Note that the unit kg m-2 s-1 is equivalent to mm s-1. + """ + mip = cube.attributes['mip'] + + if mip == 'Amon': + cube.convert_units('kg m-2 month-1') # equivalent to mm/month + elif mip == 'day': + cube.convert_units('kg m-2 day-1') # equivalent to mm/day + return cube + + +def _fix_negative_values(cube): + """Change negative values to zero.""" + cube.data = da.where(cube.core_data() < 0, 0, cube.core_data()) + return cube + + +def get_input_cubes(metadata): + """Return a dictionary with all (preprocessed) input files.""" + provenance = create_provenance_record() + all_vars = {} + for attributes in metadata: + short_name = attributes['short_name'] + filename = attributes['filename'] + logger.info("Loading variable %s", short_name) + cube = iris.load_cube(filename) + all_vars[short_name] = change_data_type(cube) + cube.attributes['mip'] = attributes['mip'] + provenance['ancestors'].append(filename) + return all_vars, provenance + + +def load_target(cfg): + """Load target grid.""" + filename = Path(cfg['auxiliary_data_dir']) / cfg['target_grid_file'] + cube = iris.load_cube(str(filename)) + for coord in 'longitude', 'latitude': + if not cube.coord(coord).has_bounds(): + cube.coord(coord).guess_bounds() + return cube + + +def langbein_pet(tas): + """Calculate potential ET using Langbein method. + + The Langbein curve represents an empirical relationship between temperature + and potential ET (pet). Where T is the annual average temperature in degree + Celcius, pet is in mm per year, and a, b, c are unitless empirical + constants. + Reference: https://doi.org/10.3133/cir52 page 8, figure 1. + An example of using Langbein method can be found at: + https://doi.org/10.1080/02626667.2017.1332416 page 1472, equation 7. + """ + tas.convert_units('degC') + constant_a = iris.coords.AuxCoord(np.float32(325), + long_name='first constant', units=None) + constant_b = iris.coords.AuxCoord(np.float32(21), + long_name='second constant', units=None) + constant_c = iris.coords.AuxCoord(np.float32(0.9), + long_name='third constant', units=None) + + # assumption here: tas is constant over time, then the monthly/daily + # average value is equal to the annual average. + pet = (tas) * constant_b + (tas ** 2) * constant_c + constant_a + pet.units = 'kg m-2 year-1' # equivalent to mm year-1 + pet.convert_units('kg m-2 s-1') # convert to a cmor compatible unit + pet.var_name = 'evspsblpot' + pet.standard_name = 'water_potential_evaporation_flux' + pet.long_name = 'Potential Evapotranspiration' + return pet + + +def get_cube_time_info(cube): + """Return year, month and day from the cube.""" + coord_time = cube.coord('time') + time = coord_time.cell(0).point + time_step = time.strftime("%Y%m%d") + return time_step + + +def get_cube_data_info(cube): + """Return short_name, and mip from the cube.""" + short_name = cube.var_name + mip = cube.attributes['mip'] + return short_name, mip + + +def _swap_western_hemisphere(cube): + """Set longitude values in range -180, 180. + + Western hemisphere longitudes should be negative. + """ + array = xr.DataArray.from_iris(cube) + + # Set longitude values in range -180, 180. + array['lon'] = (array['lon'] + 180) % 360 - 180 + + # Re-index data along longitude values + west = array.where(array.lon < 0, drop=True) + east = array.where(array.lon >= 0, drop=True) + return west.combine_first(east) + + +def _flip_latitudes(array): + """Flip latitudes for writing as ascii. + + Latitudes order should be in range 90, -90. + """ + flipped = array[::-1, ...] + flipped['lat'] = array['lat'] * -1 + return flipped + + +def save_to_ascii(cube, file_name): + """Save data to an ascii file. + + Data with index [0,0] should be in -180, 90 lon/lat. + """ + # Re-index data + array = _swap_western_hemisphere(cube) + array = _flip_latitudes(array) + + # Set nodata values + array = array.fillna(-9999) + + xmin = array['lon'].min().values + ymin = array['lat'].min().values + xres = array['lon'].values[1] - array['lon'].values[0] + output = open(file_name, "w") + output.write(f"ncols {array.shape[1]}\n") + output.write(f"nrows {array.shape[0]}\n") + output.write(f"xllcorner {xmin}\n") + output.write(f"yllcorner {ymin}\n") + output.write(f"cellsize {xres}\n") + output.write(f"NODATA_value {np.int32(-9999)}\n") + output.close() + + data_frame = pd.DataFrame(array.values, dtype=array.dtype) + data_frame.to_csv(file_name, sep=' ', na_rep='-9999', float_format=None, + header=False, index=False, mode='a') + + +def make_filename(dataset_name, cfg, cube, extension='asc'): + """Return a valid path for saving a diagnostic data file. + + filenames are specific to Globwat. + """ + time_stamp = get_cube_time_info(cube) + short_name, mip = get_cube_data_info(cube) + if cfg['evaporation_method'] == 'langbein': + pet_method_name = 'langbein_' + else: + pet_method_name = 'debruin_' + + if short_name == 'pet': + pet_method = pet_method_name + else: + pet_method = '' + + base_name = (f"globwat_{dataset_name}_{mip}_{short_name}_{pet_method}" + f"{time_stamp}") + filename = get_diagnostic_filename(base_name, cfg, extension=extension) + return filename + + +def _shift_era5_time_coordinate(cube): + """Shift instantaneous variables 30 minutes forward in time. + + After this shift, as an example: + time format [1990, 1, 1, 11, 30, 0] will be [1990, 1, 1, 12, 0, 0]. + For aggregated variables, already time format is [1990, 1, 1, 12, 0, 0]. + """ + if not cube.attributes['mip'] == 'Amon': + time = cube.coord(axis='T') + time.points = time.points + 30 / (24 * 60) + time.bounds = None + time.guess_bounds() + return cube + + +def main(cfg): + """Process data for GlobWat hydrological model. + + These variables are needed in all_vars: + pr (precipitation_flux) + psl (air_pressure_at_mean_sea_level) + rsds (surface_downwelling_shortwave_flux_in_air) + rsdt (toa_incoming_shortwave_flux) + tas (air_temperature) + """ + # Load target grid to be used in re-gridding + target_cube = load_target(cfg) + + input_metadata = cfg['input_data'].values() + for dataset_name, metadata in group_metadata(input_metadata, + 'dataset').items(): + all_vars, provenance = get_input_cubes(metadata) + + # Fix time coordinate of ERA5 instantaneous variables + if dataset_name == 'ERA5': + _shift_era5_time_coordinate(all_vars['tas']) + + if cfg['evaporation_method'] == 'langbein': + logger.info("Calculation PET uisng arora method") + all_vars.update(pet=langbein_pet(all_vars['tas'])) + else: + logger.info("Calculation PET uisng debruin method") + # Fix time coordinate of ERA5 instantaneous variables + if dataset_name == 'ERA5': + _shift_era5_time_coordinate(all_vars['psl']) + all_vars.update(pet=debruin_pet( + psl=all_vars['psl'], + rsds=all_vars['rsds'], + rsdt=all_vars['rsdt'], + tas=all_vars['tas'])) + + # Add mip to pet cube attribute + all_vars['pet'].attributes['mip'] = all_vars['pr'].attributes['mip'] + all_vars['pet'].var_name = 'pet' + + # Change negative values for pr to zero + _fix_negative_values(all_vars['pr']) + + for key in ['pr', 'pet']: + cube = all_vars[key] + + # Convert unit + _convert_units(cube) + + # Re-grid data according to the target cube + cube = rechunk_and_regrid(cube, target_cube, cfg['regrid_scheme']) + + # Save data as an ascii file per each time step + for sub_cube in cube.slices_over('time'): + + # Removes time dimension of length one + new_cube = iris.util.squeeze(sub_cube) + + # Make a file name + filename = make_filename( + dataset_name, cfg, new_cube, extension='asc' + ) + + # Save to ascii + save_to_ascii(new_cube, filename) + + # Store provenance + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, provenance) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/hydrology/hydro_forcing.py b/esmvaltool/diag_scripts/hydrology/hydro_forcing.py new file mode 100644 index 0000000000..57c4c61e66 --- /dev/null +++ b/esmvaltool/diag_scripts/hydrology/hydro_forcing.py @@ -0,0 +1,191 @@ +"""Hydro forcing diagnostic.""" +import logging +from pathlib import Path + +import matplotlib.pyplot as plt +import numpy as np +import xarray as xr + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + group_metadata, + run_diagnostic, +) + +logger = logging.getLogger(Path(__file__).name) + + +def get_provenance_record(caption: str, ancestors: list): + """Create a provenance record describing the diagnostic data and plots.""" + record = { + 'caption': caption, + 'domains': ['global'], + 'authors': [ + 'smeets_stef', + 'aerts_jerom', + ], + 'projects': [ + 'ewatercycle', + ], + 'references': [ + 'acknow_project', + ], + 'ancestors': ancestors, + } + return record + + +def log_provenance(caption: str, filename: str, cfg: dict, ancestors: list): + """Log provenance info.""" + provenance_record = get_provenance_record(caption, ancestors=ancestors) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, provenance_record) + + logger.info('Output stored as %s', filename) + + +def plot_data(*, cfg: dict, datasets: dict, xaxis: str, yaxis: str, + xlabel: str, ylabel: str, caption: str, name: str, + ancestors: list): + """Plot data.""" + figure, _ = plt.subplots(dpi=300) + + for label in datasets.dataset: + label = str(label.data) + dataset = datasets.sel(dataset=label) + if 'time' in dataset: + dataset = dataset.dropna(dim='time') # remove nan + figure.autofmt_xdate() # rotate date labels + plt.plot(dataset[xaxis], dataset[yaxis], label=label) + + plt.xlabel(xlabel) + plt.ylabel(ylabel) + plt.title(caption) + plt.legend() + plt.show() + + filename_plot = get_plot_filename(name + '_plot', cfg) + figure.savefig(filename_plot, dpi=300, bbox_inches='tight') + plt.close(figure) + + # Store provenance + log_provenance(caption, filename_plot, cfg, ancestors) + + +def plot_timeseries(cfg, metadata): + """Plot timeseries data.""" + short_name = 'pr' + xaxis = 'time' + + datasets = read_input_data(metadata) + ancestors = [info['filename'] for info in metadata] + + time_period = cfg['time_period'] + + var = datasets[short_name] + + time_unit = time_period[0].upper() + start_date = np.datetime_as_string(datasets.time.min(), unit=time_unit) + end_date = np.datetime_as_string(datasets.time.max(), unit=time_unit) + + name = f'{var.long_name}_{time_period}' + caption = f"{var.long_name} per {time_period} for {start_date}:{end_date}" + + plot_data( + cfg=cfg, + datasets=datasets, + xaxis=xaxis, + yaxis=short_name, + xlabel=f'{xaxis.capitalize()} / {time_period}', + ylabel=f'{var.long_name} / {var.units}', + caption=caption, + name=name, + ancestors=ancestors, + ) + + filename_data = get_diagnostic_filename(name, cfg, extension='nc') + datasets.to_netcdf(filename_data) + log_provenance(caption, filename_data, cfg, ancestors) + + +def plot_climatology(cfg, metadata): + """Plot climatology data.""" + short_name = 'pr' + + datasets = read_input_data(metadata) + var = datasets[short_name] + + xaxis = var.dims[-1] # i.e. month_number / day_of_year + xlabel = xaxis.replace('_', ' ') + caption = f'{var.long_name} climatology statistics per {xlabel}' + + ancestors = [info['filename'] for info in metadata] + + name = f'{var.long_name}_climatology_{xaxis}' + + plot_data( + cfg=cfg, + datasets=datasets, + xaxis=xaxis, + yaxis=short_name, + xlabel=xlabel.capitalize(), + ylabel=f'{var.long_name} / {var.units}', + caption=caption, + name=name, + ancestors=ancestors, + ) + + filename_data = get_diagnostic_filename(name, cfg, extension='nc') + datasets.to_netcdf(filename_data) + log_provenance(caption, filename_data, cfg, ancestors) + + +def read_input_data(metadata: list, dim: str = 'dataset'): + """Load data from metadata. + + Read the input data from the list of given data sets. `metadata` is + a list of metadata containing the filenames to load. The datasets + are stacked along the `dim` dimension. Returns an xarray.DataArray. + """ + identifiers = [] + datasets = [] + for info in metadata: + filename = info['filename'] + dataset = xr.load_dataset(filename) + datasets.append(dataset) + + identifier = info[dim] + identifiers.append(identifier) + + stacked_datasets = xr.concat(datasets, dim=dim) + stacked_datasets[dim] = identifiers + + return stacked_datasets + + +def main(cfg): + """Load and plot hydro forcing data.""" + plot_type = cfg['plot_type'] + + input_data = cfg['input_data'].values() + variable_groups = group_metadata(input_data, 'variable_group') + + plot_func_mapping = { + 'climatology': plot_climatology, + 'timeseries': plot_timeseries, + } + + for metadata in variable_groups.values(): + try: + plot_func = plot_func_mapping[plot_type] + except KeyError as err: + raise ValueError(f'Unknown plot_type: {plot_type!r}') from err + + plot_func(cfg, metadata=metadata) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/hydrology/hype.py b/esmvaltool/diag_scripts/hydrology/hype.py new file mode 100644 index 0000000000..c3a995900b --- /dev/null +++ b/esmvaltool/diag_scripts/hydrology/hype.py @@ -0,0 +1,111 @@ +"""HYPE diagnostic.""" +import logging +from pathlib import Path + +import dask.array as da +import iris +import numpy +import pandas + +from esmvaltool.diag_scripts.shared import (ProvenanceLogger, + get_diagnostic_filename, + group_metadata, run_diagnostic) + +logger = logging.getLogger(Path(__file__).name) + + +def get_provenance_record(attributes): + """Create a provenance record.""" + ancestor_file = attributes['filename'] + + record = { + 'caption': "Forcings for the Hype hydrological model.", + 'domains': ['global'], + 'authors': [ + 'pelupessy_inti', + ], + 'projects': [ + 'ewatercycle', + ], + 'references': [ + 'acknow_project', + ], + 'ancestors': [ancestor_file], + } + return record + + +def get_output_stem(attributes): + """Get output file stem, specific to HYPE.""" + short_to_stem = dict(tas="Tobs", + tasmin="TMINobs", + tasmax="TMAXobs", + pr="Pobs") + + shortname = attributes["short_name"] + if shortname in short_to_stem: + stem = Path(short_to_stem[shortname]) + else: + stem = Path(attributes['filename']).stem + '_hype' + + stem = attributes['alias'] / stem + + return stem + + +def get_data_times_and_ids(attributes): + """Get the data table to be written and the times and indices.""" + input_file = attributes['filename'] + + cube = iris.load_cube(input_file) + + data = numpy.array(cube.core_data()) + + # Round times to integer number of days + time_coord = cube.coord('time') + time_coord.points = da.floor(time_coord.core_points()) + time_coord.bounds = None + + times = [x.point.strftime("%Y-%m-%d") for x in time_coord.cells()] + + # first string to float, then to int + ids = cube.coord('shape_id').core_points().astype(float).astype(int) + + return data, times, ids + + +def main(cfg): + """Process data for use as input to the HYPE hydrological model.""" + input_data = cfg['input_data'].values() + grouped_input_data = group_metadata(input_data, + 'long_name', + sort='dataset') + + for long_name in grouped_input_data: + logger.info("Processing variable %s", long_name) + for attributes in grouped_input_data[long_name]: + logger.info("Processing dataset %s", attributes['dataset']) + + output_file = get_diagnostic_filename(get_output_stem(attributes), + cfg, 'txt') + Path(output_file).parent.mkdir(exist_ok=True) + + data, times, ids = get_data_times_and_ids(attributes) + + frame = pandas.DataFrame(data, index=times, columns=ids) + + frame.to_csv(output_file, + sep=' ', + index_label="DATE", + float_format='%.3f') + + # Store provenance + provenance_record = get_provenance_record(attributes) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(output_file, provenance_record) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/hydrology/lisflood.py b/esmvaltool/diag_scripts/hydrology/lisflood.py new file mode 100644 index 0000000000..29af76343f --- /dev/null +++ b/esmvaltool/diag_scripts/hydrology/lisflood.py @@ -0,0 +1,166 @@ +"""LISFLOOD diagnostic.""" +import logging +from pathlib import Path + +import iris +import numpy as np +import xarray as xr +from iris.analysis.maths import exp as iris_exp + +from esmvaltool.diag_scripts.shared import (ProvenanceLogger, + get_diagnostic_filename, + group_metadata, run_diagnostic) + +logger = logging.getLogger(Path(__file__).name) + + +def get_provenance_record(ancestor_files): + """Create a provenance record.""" + record = { + 'caption': "Forcings for the LISFLOOD hydrological model.", + 'domains': ['global'], + 'authors': [ + 'verhoeven_stefan', + 'kalverla_peter', + 'camphuijsen_jaro', + ], + 'projects': [ + 'ewatercycle', + ], + 'references': [ + 'acknow_project', + ], + 'ancestors': ancestor_files, + } + return record + + +def get_input_cubes(metadata): + """Create a dict with all (preprocessed) input files.""" + inputs = {} + ancestors = {} + for attributes in metadata: + short_name = attributes['short_name'] + if short_name in inputs: + raise ValueError(f"Multiple input files found for variable " + f"'{short_name}'.") + filename = attributes['filename'] + logger.info("Loading variable %s", short_name) + cube = iris.load_cube(filename) + cube.attributes.clear() + inputs[short_name] = cube + ancestors[short_name] = [filename] + + return inputs, ancestors + + +def shift_era5_time_coordinate(cube, shift=30): + """Shift instantaneous variables (default = 30 minutes forward in time). + + After this shift, as an example: + time format [1990, 1, 1, 11, 30, 0] will be [1990, 1, 1, 12, 0, 0]. + For aggregated variables, already time format is [1990, 1, 1, 12, 0, 0]. + """ + time = cube.coord(axis='T') + time.points = time.points + shift / (24 * 60) + time.bounds = None + time.guess_bounds() + return cube + + +def compute_vapour_pressure(tdps): + """Compute vapour pressure using tetens formula.""" + # taken from Eq. 3.21 of Goudriaan (1977; + # https://library.wur.nl/WebQuery/wurpubs/70980) + if tdps.units != 'degC': + raise Exception('tdps should be in degC') + esat = 6.10588 * iris_exp(17.32491 * tdps / (tdps + 238.102)) + esat.var_name = 'e' + esat.long_name = 'Daily Actual Water Vapour Pressure' + esat.standard_name = 'water_vapor_pressure' + esat.units = 'hPa' + esat.attributes['comment'] = ''.join( + ('Actual water vapour pressure of air near the surface calculated', + ' from tdps using Tetens formula')) + return esat + + +def compute_windspeed(uas, vas): + """Compute absolute wind speed from horizontal components.""" + sfc_wind = (uas**2 + vas**2)**.5 + sfc_wind.var_name = 'sfcWind' + sfc_wind.long_name = 'Daily-Mean Near-Surface Wind Speed' + sfc_wind.standard_name = 'wind_speed' + comment = 'near-surface (usually, 10 meters) wind speed.' + sfc_wind.attributes['comment'] = comment + return sfc_wind + + +def save(xrds, var_name, dataset, cfg): + """Save processed cube to a lisflood-compatible file.""" + start_year = int(xrds.time[0].dt.year) + end_year = int(xrds.time[-1].dt.year) + basename = '_'.join([ + 'lisflood', + dataset, + cfg['catchment'], + var_name, + str(start_year), + str(end_year), + ]) + output_file = get_diagnostic_filename(basename, cfg) + xrds.to_netcdf(output_file, encoding={var_name: {'_FillValue': 1.e20}}) + return output_file + + +def main(cfg): + """Process data for use as input to the LISFLOOD hydrological model.""" + input_metadata = cfg['input_data'].values() + logger.info(input_metadata) + + for dataset, metadata in group_metadata(input_metadata, 'dataset').items(): + cubes, ancestors = get_input_cubes(metadata) + + if dataset == 'ERA5': + shift_era5_time_coordinate(cubes['tas']) + shift_era5_time_coordinate(cubes['tdps']) + shift_era5_time_coordinate(cubes['uas']) + shift_era5_time_coordinate(cubes['vas']) + + # Compute additional variables as input for lisvap + tdps = cubes.pop('tdps') + uas = cubes.pop('uas') + vas = cubes.pop('vas') + cubes['e'] = compute_vapour_pressure(tdps) + ancestors['e'] = ancestors['tdps'] + cubes['sfcWind'] = compute_windspeed(uas, vas) + ancestors['sfcWind'] = ancestors['uas'] + ancestors['vas'] + + cubes['pr'].units = 'mm d-1' + + for var_name, cube in cubes.items(): + # Western emisphere longitudes should be negative + points = cube.coord('longitude').points + cube.coord('longitude').points = (points + 180) % 360 - 180 + # latitudes decreasing + cube = cube[:, ::-1, ...] + + # convert to xarray dataset (xrds) + # remove coordinate bounds drop extra coordinates and reorder + xrds = xr.DataArray.from_iris(cube).to_dataset() + ordered_coords = ['lon', 'lat', 'time'] + extra_coords = np.setdiff1d(xrds.coords, ordered_coords) + xrds = xrds.drop(extra_coords)[ordered_coords + [var_name]] + + output_file = save(xrds, var_name, dataset, cfg) + + # Store provenance + provenance_record = get_provenance_record(ancestors[var_name]) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(output_file, provenance_record) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/hydrology/marrmot.py b/esmvaltool/diag_scripts/hydrology/marrmot.py new file mode 100644 index 0000000000..5362552641 --- /dev/null +++ b/esmvaltool/diag_scripts/hydrology/marrmot.py @@ -0,0 +1,175 @@ +"""Marrmot diagnostic.""" +import logging +from pathlib import Path + +import iris +import numpy as np +import scipy.io as sio + +from esmvalcore import preprocessor as preproc +from esmvaltool.diag_scripts.hydrology.derive_evspsblpot import debruin_pet +from esmvaltool.diag_scripts.shared import (ProvenanceLogger, + get_diagnostic_filename, + group_metadata, run_diagnostic) + +logger = logging.getLogger(Path(__file__).name) + + +def create_provenance_record(): + """Create a provenance record.""" + record = { + 'caption': "Forcings for the Marrmot hydrological model.", + 'domains': ['global'], + 'authors': [ + 'kalverla_peter', + 'camphuijsen_jaro', + 'alidoost_sarah', + ], + 'projects': [ + 'ewatercycle', + ], + 'references': [ + 'acknow_project', + ], + 'ancestors': [], + } + return record + + +def get_input_cubes(metadata): + """Return a dict with all (preprocessed) input files.""" + provenance = create_provenance_record() + all_vars = {} + for attributes in metadata: + short_name = attributes['short_name'] + if short_name in all_vars: + raise ValueError( + f"Multiple input files found for variable '{short_name}'.") + filename = attributes['filename'] + logger.info("Loading variable %s", short_name) + all_vars[short_name] = iris.load_cube(filename) + provenance['ancestors'].append(filename) + return all_vars, provenance + + +def _get_extra_info(cube): + """Get start/end time and origin of cube. + + Get the start and end times as an array with length 6 + and get latitude and longitude as an array with length 2 + """ + coord = cube.coord('time') + time_start_end = [] + for index in 0, -1: + time_val = coord.cell(index).point + time_val = [ + time_val.year, + time_val.month, + time_val.day, + time_val.hour, + time_val.minute, + time_val.second, + ] + time_val = [float(time) for time in time_val] + time_start_end.append(time_val) + + # Add data_origin + lat_lon = [ + cube.coord(name).points[0] for name in ('latitude', 'longitude') + ] + return time_start_end, lat_lon + + +def _shift_era5_time_coordinate(cube): + """Shift instantaneous variables 30 minutes forward in time. + + After this shift, as an example: + time format [1990, 1, 1, 11, 30, 0] will be [1990, 1, 1, 12, 0, 0]. + For aggregated variables, already time format is [1990, 1, 1, 12, 0, 0]. + """ + time = cube.coord(axis='T') + time.points = time.points + 30 / (24 * 60) + time.bounds = None + time.guess_bounds() + return cube + + +def main(cfg): + """Process data for use as input to the marrmot hydrological model. + + These variables are needed in all_vars: + tas (air_temperature) + pr (precipitation_flux) + psl (air_pressure_at_mean_sea_level) + rsds (surface_downwelling_shortwave_flux_in_air) + rsdt (toa_incoming_shortwave_flux) + """ + input_metadata = cfg['input_data'].values() + for dataset, metadata in group_metadata(input_metadata, 'dataset').items(): + all_vars, provenance = get_input_cubes(metadata) + + # Fix time coordinate of ERA5 instantaneous variables + if dataset == 'ERA5': + _shift_era5_time_coordinate(all_vars['psl']) + _shift_era5_time_coordinate(all_vars['tas']) + + # Processing variables and unit conversion + # Unit of the fluxes in marrmot should be in kg m-2 day-1 (or mm/day) + logger.info("Processing variable PET") + pet = debruin_pet( + psl=all_vars['psl'], + rsds=all_vars['rsds'], + rsdt=all_vars['rsdt'], + tas=all_vars['tas'], + ) + pet = preproc.area_statistics(pet, operator='mean') + pet.convert_units('kg m-2 day-1') # equivalent to mm/day + + logger.info("Processing variable tas") + temp = preproc.area_statistics(all_vars['tas'], operator='mean') + temp.convert_units('celsius') + + logger.info("Processing variable pr") + precip = preproc.area_statistics(all_vars['pr'], operator='mean') + precip.convert_units('kg m-2 day-1') # equivalent to mm/day + + # Get the start and end times and latitude longitude + time_start_end, lat_lon = _get_extra_info(temp) + + # make data structure + # use astype(np.float64) to make sure data is in + # double-Precision Floating Point + # delta_t_days could also be extracted from the cube + output_data = { + 'forcing': { + 'precip': precip.data.astype(np.float64), + 'temp': temp.data.astype(np.float64), + 'pet': pet.data.astype(np.float64), + 'delta_t_days': float(1), + 'time_unit': 'day', + }, + 'time_start': time_start_end[0], + 'time_end': time_start_end[1], + 'data_origin': lat_lon, + } + + # Save to matlab structure + basename = '_'.join([ + 'marrmot', + dataset, + cfg['basin'], + str(int(output_data['time_start'][0])), + str(int(output_data['time_end'][0])), + ]) + output_name = get_diagnostic_filename(basename, cfg, extension='mat') + sio.savemat(output_name, output_data) + + # Store provenance + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(output_name, provenance) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/hydrology/pcrglobwb.py b/esmvaltool/diag_scripts/hydrology/pcrglobwb.py new file mode 100644 index 0000000000..932e62cd24 --- /dev/null +++ b/esmvaltool/diag_scripts/hydrology/pcrglobwb.py @@ -0,0 +1,151 @@ +"""PCR-GLOBWB diagnostic.""" +import logging +from pathlib import Path + +import dask.array as da +import iris +from esmvalcore.preprocessor import concatenate + +from esmvaltool.diag_scripts.shared import (ProvenanceLogger, + get_diagnostic_filename, + group_metadata, run_diagnostic, + select_metadata) + +logger = logging.getLogger(Path(__file__).name) + + +def get_provenance_record(ancestor_files): + """Create a provenance record.""" + record = { + 'caption': + "Forcings for the PCR-GLOBWB hydrological model.", + 'domains': ['global'], + 'authors': [ + 'aerts_jerom', + 'andela_bouwe', + 'alidoost_sarah', + 'kalverla_peter', + ], + 'projects': [ + 'ewatercycle', + ], + 'references': [ + 'acknow_project', + ], + 'ancestors': ancestor_files, + } + return record + + +def add_spinup_year(cube, cube_climatology): + """Prepend the climatology to the cube. + + To reach the equilibrium, the model was spun up using + the average climatological forcing over each year. + """ + # Remove leap year day from climatology + cube_climatology = cube_climatology.extract( + iris.Constraint(day_of_year=lambda cell: cell < 366)) + + # Set climatology year in front of regular startyear + points = cube.coord('time').points[0] - 366 + points += cube_climatology.coord('day_of_year').points + time = cube.coord('time').copy(points) + + # Drop dimension day_of_year + iris.util.demote_dim_coord_to_aux_coord(cube_climatology, 'day_of_year') + cube_climatology.remove_coord('day_of_year') + + # Add dimension time + cube_climatology.add_dim_coord(time, 0) + + # Round times to integer number of days + time_coord = cube_climatology.coord('time') + time_coord.points = da.floor(time_coord.core_points()) + time_coord.bounds = None + + # Set cube cell_methods to None + cube.cell_methods = () + cube_climatology.cell_methods = () + + # fix dtype + cube.data = cube.core_data().astype('float32') + cube_climatology.data = cube_climatology.core_data().astype('float32') + + for coord_name in 'latitude', 'longitude', 'time': + coord = cube.coord(coord_name) + coord.points = coord.core_points().astype('float32') + coord.bounds = None + coord.guess_bounds() + coord_climatology = cube_climatology.coord(coord_name) + coord_climatology.points = ( + coord_climatology.core_points().astype('float32')) + coord_climatology.bounds = None + coord_climatology.guess_bounds() + + # Create CubeList and concatenate + new_cube = concatenate([cube, cube_climatology]) + + return new_cube + + +def main(cfg): + """Process data for use as input to the PCR-GLOBWB hydrological model.""" + for dataset, metadata in group_metadata(cfg['input_data'].values(), + 'dataset').items(): + for short_name in "pr", "tas": + logger.info("Processing variable %s for dataset %s", short_name, + dataset) + + # Load preprocessed cubes for normal data and climatology + var = select_metadata(metadata, variable_group=short_name)[0] + cube = iris.load_cube(var['filename']) + var_climatology = select_metadata( + metadata, + variable_group=short_name + '_climatology', + )[0] + cube_climatology = iris.load_cube(var_climatology['filename']) + + # Create a spin-up year for pcrglob based on the climatology data + cube = add_spinup_year(cube, cube_climatology) + + # Round times to integer number of days + time_coord = cube.coord('time') + time_coord.points = da.floor(time_coord.core_points()) + time_coord.bounds = None + time_coord.guess_bounds() + + # Set lat from highest to lowest value + cube = cube[:, ::-1, ...] + + # Workaround for bug in PCRGlob + # (see https://github.com/UU-Hydro/PCR-GLOBWB_model/pull/13) + for coord_name in ['latitude', 'longitude']: + coord = cube.coord(coord_name) + coord.points = coord.points + 0.001 + + # Unit conversion 'kg m-3 day-1' to 'm' precip (divide by density) + if short_name == "pr": + cube.units = cube.units / 'kg m-3 day-1' + cube.data = cube.core_data() / 1000 + + # Save data + basename = '_'.join([ + 'pcrglobwb', + Path(var['filename']).stem, + cfg['basin'], + ]) + output_file = get_diagnostic_filename(basename, cfg) + iris.save(cube, output_file, fill_value=1.e20) + + # Store provenance + provenance_record = get_provenance_record( + [var['filename'], var_climatology['filename']]) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(output_file, provenance_record) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/hydrology/wflow.py b/esmvaltool/diag_scripts/hydrology/wflow.py new file mode 100644 index 0000000000..102aadf17c --- /dev/null +++ b/esmvaltool/diag_scripts/hydrology/wflow.py @@ -0,0 +1,268 @@ +"""wflow diagnostic.""" +import logging +from pathlib import Path + +import numpy as np +from osgeo import gdal +import iris + +from esmvalcore.preprocessor import regrid +from esmvaltool.diag_scripts.hydrology.derive_evspsblpot import debruin_pet +from esmvaltool.diag_scripts.shared import (ProvenanceLogger, + get_diagnostic_filename, + group_metadata, run_diagnostic) + +logger = logging.getLogger(Path(__file__).name) + + +def create_provenance_record(): + """Create a provenance record.""" + record = { + 'caption': + "Forcings for the wflow hydrological model.", + 'domains': ['global'], + 'authors': [ + 'kalverla_peter', + 'camphuijsen_jaro', + 'alidoost_sarah', + 'aerts_jerom', + 'andela_bouwe', + ], + 'projects': [ + 'ewatercycle', + ], + 'references': [ + 'acknow_project', + ], + 'ancestors': [], + } + return record + + +def get_input_cubes(metadata): + """Create a dict with all (preprocessed) input files.""" + provenance = create_provenance_record() + all_vars = {} + for attributes in metadata: + short_name = attributes['short_name'] + if short_name in all_vars: + raise ValueError( + f"Multiple input files found for variable '{short_name}'.") + filename = attributes['filename'] + logger.info("Loading variable %s", short_name) + cube = iris.load_cube(filename) + cube.attributes.clear() + all_vars[short_name] = cube + provenance['ancestors'].append(filename) + + return all_vars, provenance + + +def save(cubes, dataset, provenance, cfg): + """Save cubes to file. + + Output format: "wflow_local_forcing_ERA5_Meuse_1990_2018.nc" + """ + time_coord = cubes[0].coord('time') + start_year = time_coord.cell(0).point.year + end_year = time_coord.cell(-1).point.year + basename = '_'.join([ + 'wflow', + dataset, + cfg['basin'], + str(start_year), + str(end_year), + ]) + output_file = get_diagnostic_filename(basename, cfg) + logger.info("Saving cubes to file %s", output_file) + iris.save(cubes, output_file, fill_value=1.e20) + + # Store provenance + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(output_file, provenance) + + +def lapse_rate_correction(height): + """Temperature correction over a given height interval.""" + gamma = iris.coords.AuxCoord(np.float32(0.0065), + long_name='Environmental lapse rate', + units='K m-1') + return height * gamma + + +def regrid_temperature(src_temp, src_height, target_height, scheme): + """Convert temperature to target grid with lapse rate correction.""" + # Convert 2m temperature to sea-level temperature (slt) + src_dtemp = lapse_rate_correction(src_height) + src_slt = src_temp.copy(data=src_temp.core_data() + src_dtemp.core_data()) + + # Interpolate sea-level temperature to target grid + target_slt = regrid(src_slt, target_height, scheme) + + # Convert sea-level temperature to new target elevation + target_dtemp = lapse_rate_correction(target_height) + target_temp = target_slt + target_temp.data = target_slt.core_data() - target_dtemp.core_data() + + return target_temp + + +def load_dem(filename): + """Load DEM into iris cube.""" + logger.info("Reading digital elevation model from %s", filename) + if filename.suffix.lower() == '.nc': + cube = iris.load_cube(str(filename)) + elif filename.suffix.lower() == '.map': + cube = _load_pcraster_dem(filename) + else: + raise ValueError(f"Unknown file format {filename}. Supported formats " + "are '.nc' and '.map'.") + for coord in 'longitude', 'latitude': + if not cube.coord(coord).has_bounds(): + logger.warning("Guessing DEM %s bounds", coord) + cube.coord(coord).guess_bounds() + return cube + + +def _load_pcraster_dem(filename): + """Load DEM from a PCRASTER .map file.""" + dataset = gdal.Open(str(filename)) + lon_offset, lon_step, _, lat_offset, _, lat_step = dataset.GetGeoTransform( + ) + lon_size, lat_size = dataset.RasterXSize, dataset.RasterYSize + data = dataset.ReadAsArray() + data = np.ma.masked_less(data, -1e8) + dataset = None + + lons = lon_offset + lon_step * (np.arange(lon_size) + 0.5) + lats = lat_offset + lat_step * (np.arange(lat_size) + 0.5) + + lon_coord = iris.coords.DimCoord( + lons, + var_name='lon', + standard_name='longitude', + units='degrees', + ) + lat_coord = iris.coords.DimCoord( + lats, + var_name='lat', + standard_name='latitude', + units='degrees', + ) + + cube = iris.cube.Cube( + data, + var_name='height', + units='m', + dim_coords_and_dims=[ + (lat_coord, 0), + (lon_coord, 1), + ], + ) + return cube + + +def check_dem(dem, cube): + """Check that the DEM and extract_region parameters match.""" + for coord in ('longitude', 'latitude'): + start_dem_coord = dem.coord(coord).cell(0).point + end_dem_coord = dem.coord(coord).cell(-1).point + start_cube_coord = cube.coord(coord).cell(0).point + end_cube_coord = cube.coord(coord).cell(-1).point + if start_dem_coord < start_cube_coord: + logger.warning( + "Insufficient data available, input data starts at %s " + "degrees %s, but should be at least one grid " + "cell larger than the DEM start at %s degrees %s.", + start_cube_coord, coord, start_dem_coord, coord) + if end_dem_coord > end_cube_coord: + logger.warning( + "Insufficient data available, input data ends at %s " + "degrees %s, but should be at least one grid " + "cell larger than the DEM end at %s degrees %s.", + end_cube_coord, coord, end_dem_coord, coord) + + +def shift_era5_time_coordinate(cube, shift=30): + """Shift instantaneous variables (default = 30 minutes forward in time). + + After this shift, as an example: + time format [1990, 1, 1, 11, 30, 0] will be [1990, 1, 1, 12, 0, 0]. + For aggregated variables, already time format is [1990, 1, 1, 12, 0, 0]. + """ + time = cube.coord(axis='T') + time.points = time.points + shift / (24 * 60) + time.bounds = None + time.guess_bounds() + return cube + + +def main(cfg): + """Process data for use as input to the wflow hydrological model.""" + input_metadata = cfg['input_data'].values() + + for dataset, metadata in group_metadata(input_metadata, 'dataset').items(): + all_vars, provenance = get_input_cubes(metadata) + + if dataset == 'ERA5': + shift_era5_time_coordinate(all_vars['tas']) + shift_era5_time_coordinate(all_vars['psl']) + + # Interpolating variables onto the dem grid + # Read the target cube, which contains target grid and target elevation + dem_path = Path(cfg['auxiliary_data_dir']) / cfg['dem_file'] + dem = load_dem(dem_path) + check_dem(dem, all_vars['pr']) + + logger.info("Processing variable precipitation_flux") + scheme = cfg['regrid'] + pr_dem = regrid(all_vars['pr'], dem, scheme) + + logger.info("Processing variable temperature") + tas_dem = regrid_temperature( + all_vars['tas'], + all_vars['orog'], + dem, + scheme, + ) + + logger.info("Processing variable potential evapotranspiration") + if 'evspsblpot' in all_vars: + pet = all_vars['evspsblpot'] + pet_dem = regrid(pet, dem, scheme) + else: + logger.info("Potential evapotransporation not available, deriving") + psl_dem = regrid(all_vars['psl'], dem, scheme) + rsds_dem = regrid(all_vars['rsds'], dem, scheme) + rsdt_dem = regrid(all_vars['rsdt'], dem, scheme) + pet_dem = debruin_pet( + tas=tas_dem, + psl=psl_dem, + rsds=rsds_dem, + rsdt=rsdt_dem, + ) + pet_dem.var_name = 'pet' + + logger.info("Converting units") + pet_dem.units = pet_dem.units / 'kg m-3' + pet_dem.data = pet_dem.core_data() / 1000. + pet_dem.convert_units('mm day-1') + + pr_dem.units = pr_dem.units / 'kg m-3' + pr_dem.data = pr_dem.core_data() / 1000. + pr_dem.convert_units('mm day-1') + + tas_dem.convert_units('degC') + + # Adjust longitude coordinate to wflow convention + for cube in [tas_dem, pet_dem, pr_dem]: + cube.coord('longitude').points = (cube.coord('longitude').points + + 180) % 360 - 180 + + cubes = iris.cube.CubeList([pr_dem, tas_dem, pet_dem]) + save(cubes, dataset, provenance, cfg) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/hyint/hyint.R b/esmvaltool/diag_scripts/hyint/hyint.R new file mode 100644 index 0000000000..81d4379198 --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint.R @@ -0,0 +1,294 @@ +# ############################################################################# +# hyint.R +# Authors: E. Arnone (ISAC-CNR, Italy) +# J. von Hardenberg (ISAC-CNR, Italy) +# ############################################################################# +# Description +# HyInt is a tool for calculation of the HY-INT index (Giorgi et al. 2011) +# and additional hydroclimatic indices (Giorgi et al. 2014) +# which allow an estimate of the overall behaviour of the hydroclimatic cycle. +# The tool calculates also timeseries and trends over selected regions and +# produces a variety of types of plots including maps and timeseries. The +# timeseries/trend and plotting modules handle also ETCCDI indices data +# calculated with the climdex library through an ad hoc pre-processing. +# +# Details +# The following indices are calculated based on input daily precipitation data: +# PRY = mean annual precipitation +# INT = mean annual precipitation intensity (intensity during wet days, or +#   simple precipitation intensity index SDII) +# WSL = mean annual wet spell length (number of consecutive days +# during each wet spell) +# DSL = mean annual dry spell lenght (number of consecutive days +# during each dry spell) +# PA = precipitation area (area over which of any given day i +# precipitation occurs) +# R95 = heavy precipitation index (percent of total precipitation above the 95% +# percentile of the reference distribution) +# HY-INT = hydroclimatic intensity. HY-INT = normalized(INT) x normalized(DSL). +# +# For EC-Earth data and then extended to any model and observational data, +# producing plots of data vs. a reference dataset (e.g. ERA-INTERIM). Indices +# are normalized over a reference period. Both absolute and normalized values +# are made available: users can select the indices to be stored and plotted. +# The tool makes extensives use of the cfg_hyint configuration file for user +# selectable options and ease feeding needed inputs (e.g. region boundaries for +# timeseries or value ranges and labels for figures). +# +# Required +# It reads daily precipitation data through ESMValTool. If requested, input +# precipitation data are pre-processed interpolating on a common grid set by +# the user in the hyint_parameters file. +# R libraries:"tools","PCICt","ncdf4","maps" +# +# i Optional +# Several options can be selected via the configuration file, e.g. provision +# of an external normalization functions for the indices; a reference +# climatology for the R95 index; type of plots; etc. +# +# Caveats +# +# Modification history +# 20181001-arnone_enrico: converted to latest v2.0 +# 20180302-arnone_enrico: converted to ESMValTool2 +# 20171206-arnone_enrico: modularized version accepting climdex indices +# 20171010-arnone_enrico: modularized version +# 20170901-arnone_enrico: 1st github version +# +# ############################################################################ + + +library(tools) +library(yaml) +library(ncdf4) + +# get path to script and source subroutines +diag_scripts_dir <- Sys.getenv("diag_scripts") +spath <- paste0(diag_scripts_dir, "/", "hyint", "/") + +source(paste0(spath, "hyint_functions.R")) +source(paste0(spath, "hyint_metadata.R")) +source(paste0(spath, "hyint_preproc.R")) +source(paste0(spath, "hyint_diagnostic.R")) +source(paste0(spath, "hyint_etccdi_preproc.R")) +source(paste0(spath, "hyint_trends.R")) +source(paste0(spath, "hyint_plot_maps.R")) +source(paste0(spath, "hyint_plot_trends.R")) +source(paste0(spath, "hyint_parameters.R")) + +diag_script_cfg <- paste0(spath, "hyint_parameters.R") + +# Read settings and metadata files +args <- commandArgs(trailingOnly = TRUE) +settings_file <- args[1] +settings <- yaml::read_yaml(settings_file) +# load data from settings +for (myname in names(settings)) { + temp <- get(myname, settings) + assign(myname, temp) +} +metadata <- yaml::read_yaml(settings$input_files[1]) + +## check required settings +if (!all(plot_type %in% c(1, 2, 3, 11, 12, 13, 14, 15))) { + stop("requested plot_type not available") +} + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() +prov_info <- list() + +# get name of climofile for selected variable and +# list associated to first climofile +climofiles <- names(metadata) +climolist <- get(climofiles[1], metadata) +list0 <- climolist +climolist0 <- list0 + +# get variable name +varname <- paste0("'", climolist$short_name, "'") +var0 <- varname +var0 <- "pr" + +diag_base <- climolist0$diagnostic +print(paste0(diag_base, ": starting routine")) + +if (etccdi_preproc & !exists("etccdi_dir")) { + etccdi_dir <- settings$input_files[2] +} + +dir.create(plot_dir, recursive = T, showWarnings = F) +dir.create(work_dir, recursive = T, showWarnings = F) + +# Set dir +setwd(run_dir) + +# extract metadata +models_name <- unname(sapply(metadata, "[[", "dataset")) +reference_model <- + unname(sapply(metadata, "[[", "reference_dataset"))[1] +models_start_year <- unname(sapply(metadata, "[[", "start_year")) +models_end_year <- unname(sapply(metadata, "[[", "end_year")) +models_experiment <- unname(sapply(metadata, "[[", "exp")) +models_ensemble <- unname(sapply(metadata, "[[", "ensemble")) + +# select reference dataset; if not available, use last of list +ref_idx <- which(models_name == reference_model) +if (length(ref_idx) == 0) { + ref_idx <- length(models_name) +} + +# check requested time intervals +if (!anyNA(match( + models_start_year[ref_idx]:models_end_year[ref_idx], + norm_years[1]:norm_years[2] +))) { + stop( + paste0( + "normalization period covering entire dataset: ", + "reduce it to calculate meaningful results" + ) + ) +} +if (trend_years != F) { + if (anyNA(match( + trend_years[1]:trend_years[2], + models_start_year[ref_idx]:models_end_year[ref_idx] + ))) { + stop("trend period outside available data") + } + if (trend_years[2] - trend_years[1] < 2) { + stop("set at least a 3 year interval for trend calculation") + } +} + +# Select regions and indices to be adopted and test selection +selregions <- match(select_regions, region_codes) +if (anyNA(selregions)) { + stop("requested region not available") +} +selfields <- match(select_indices, field_names) +if (anyNA(selfields)) { + stop("requested field not available") +} + +## Run regridding and diagnostic +if (write_netcdf) { + # loop through models + for (model_idx in c(1:(length(models_name)))) { + # Setup filenames + climofile <- climofiles[model_idx] + sgrid <- "noregrid" + if (rgrid != F) { + sgrid <- rgrid + } + regfile <- + getfilename_regridded(run_dir, sgrid, var0, model_idx) + + # If needed, pre-process file and add absolute time axis + if (run_regridding) { + if (!file.exists(regfile) | force_regridding) { + dummy <- hyint_preproc( + work_dir, model_idx, ref_idx, climofile, + regfile, rgrid + ) + } else { + gridfile <- getfilename_indices(work_dir, diag_base, model_idx, + grid = T + ) + cdo("griddes", input = regfile, stdout = gridfile) + print(paste0(diag_base, ": data file exists: ", regfile)) + print(paste0(diag_base, ": corresponding grid: ", gridfile)) + } + } + + if (run_diagnostic) { + # Loop through seasons and call diagnostic + for (seas in seasons) { + prov_info <- hyint_diagnostic(work_dir, regfile, model_idx, + climofiles, + seas, + prov_info, + rewrite = force_diagnostic + ) + } + } + } +} + +## Preprocess ETCCDI input files and merge them with HyInt indices +if (write_netcdf & etccdi_preproc) { + for (model_idx in c(1:(length(models_name)))) { + prov_info <- + hyint_etccdi_preproc(work_dir, + etccdi_dir, + etccdi_list_import, + model_idx, + "ALL", + prov_info, + yrmon = "yr" + ) + } +} + +## Calculate timeseries and trends +if (write_netcdf & run_timeseries) { + for (model_idx in c(1:(length(models_name)))) { + for (seas in seasons) { + prov_info <- hyint_trends(work_dir, model_idx, seas, prov_info) + } + } +} + +## Create figures +plot_type_list <- plot_type +for (plot_type in plot_type_list) { + print(paste0("******** PLOT TYPE: ", plot_type, " *********")) + for (seas in seasons) { + if (plot_type <= 10) { + # Plot maps + prov_info <- hyint_plot_maps( + work_dir, + plot_dir, + work_dir, + ref_idx, + seas, + prov_info + ) + } else { + # Plot timeseries and trends + prov_info <- hyint_plot_trends( + work_dir, + plot_dir, + ref_idx, + seas, + prov_info + ) + } + } +} + +# Assign provenance information for timeseries&trends figures +for (fname in names(prov_info)) { + anc_list <- flatten_lists(prov_info[[fname]]$ancestors) + xprov <- + list( + ancestors = anc_list, + authors = list("arnone_enrico", "vonhardenberg_jost"), + references = list("giorgi11jc", "giorgi14jgr"), + projects = list("c3s-magic"), + caption = prov_info[[fname]]$caption, + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("global") + ) + provenance[[fname]] <- xprov +} + +# Write provenance to file +write_yaml(provenance, provenance_file) + +# Closing message +print(paste0(diag_base, ": done.")) diff --git a/esmvaltool/diag_scripts/hyint/hyint_diagnostic.R b/esmvaltool/diag_scripts/hyint/hyint_diagnostic.R new file mode 100644 index 0000000000..4dafeaba99 --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_diagnostic.R @@ -0,0 +1,519 @@ +###################################################### +#-----Hydroclimatic Intensity (HyInt) diagnostic-----# +#-------------E. Arnone (June 2017)------------------# +###################################################### + +hyint_diagnostic <- function(work_dir, # nolint + infile, + model_idx, + climofiles, + season, + prov_info, + rewrite = FALSE) { + # setting up path and parameters + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + + outfile <- + getfilename_indices(work_dir, diag_base, model_idx, season) + + # If diagnostic output file already exists skip calculation + if (file.exists(outfile) & !rewrite) { + print(paste0(diag_base, ": output file already exists:", outfile)) + print(paste0(diag_base, ": skipping calculation")) + return() + } + + # Test input file exists + print(infile) + if (!file.exists(infile)) { + stop("HyInt: missing regridded input file. Run HyInt pre-processing.") + } + + # setting up time domain + years <- year1:year2 + timeseason <- season2timeseason(season) + + # file opening + pr_list <- ncdf_opener_universal( + infile, + namevar = "pr", + tmonths = timeseason, + tyears = years, + rotate = rotlongitude + ) + + # extract calendar and time unit from the original file + tcal <- attributes(pr_list$time)$cal + tunit <- attributes(pr_list$time)$units + + etime <- power_date_new(pr_list$time) + + # declare and convert variable + pr <- pr_list$field * 86400. # convert (Kg m-2 s-1) to (mm day-1) + + ############################################################# + #--------HyInt calculation (Giorgi et al. 2011/14)----------# + ############################################################# + + # Setup useful arrays and parameters + nyear <- length(years) + pry <- + pr[, , 1:nyear] * NA # annual mean precipitation (over all days) + int <- + pr[, , 1:nyear] * NA # mean prec. intensity (over wet days == SDII) + dsl <- pr[, , 1:nyear] * NA # mean dry spell length (DSL) + wsl <- pr[, , 1:nyear] * NA # mean wet spell length (WSL) + pa <- pr[, , 1:nyear] * NA # precipitation area (PA) + r95 <- pr[, , 1:nyear] * NA # heavy precipitation index (R95) + pry_norm <- pry + int_norm <- int + dsl_norm <- dsl + wsl_norm <- wsl + pa_norm <- pa + r95_norm <- r95 + + # Evaluate r95_threshold over normalization period (or load it if requested) + if (external_r95[1] == F) { + r95_threshold <- + apply(pr, c(1, 2), quantile, probs = 0.95, na.rm = T) + } else { + # if required, use HyInt file from historical period + if (external_r95[1] == "HIST") { + external_r95 <- getfilename_indices( + work_dir, + diag_base, + model_idx, + season, + hist = T, + hist_years = norm_years + ) + } + r95_idx <- + model_idx # assume each model has its r95_threshold file + if (length(external_r95) == 1) { + # if list of files with r95_threshold has only 1 entry, + # use that for all models + r95_idx <- 1 + } + print(paste( + diag_base, + ": loading external r95_threshold data from ", + external_r95[r95_idx] + )) + r95_threshold <- + ncdf_opener(external_r95[r95_idx], "r95_threshold", + rotate = "no" + ) + } + r95_threshold360 <- replicate(360, r95_threshold) + r95_threshold365 <- replicate(365, r95_threshold) + r95_threshold366 <- replicate(366, r95_threshold) + + # Calculate indices + print(paste0(diag_base, ": calculating indices")) + for (iyear in 1:nyear) { + ret_year <- which(etime$year == years[iyear]) + pr_year <- pr[, , ret_year] + + r95_thresh_year <- r95_threshold365 + if (length(pr_year[1, 1, ]) == 360) { + r95_thresh_year <- r95_threshold360 + } + if (length(pr_year[1, 1, ]) == 366) { + r95_thresh_year <- r95_threshold366 + } + + + # Identify dry and wet days (Salinger and Griffiths 2001) + ret_dry <- (pr_year < 1) # Dry days when pr < 1 mm + ret_wet <- (pr_year >= 1) # Rainy days when pr >= 1 mm + ret_below_r95 <- + (pr_year < r95_thresh_year) # Rainy days when pr < + # reference 95% quantile + pr_year_dry <- pr_year * 0. + pr_year_dry[ret_dry] <- 1 # mask with 1 for dry day + pr_year_wet <- pr_year * 0. + pr_year_wet[ret_wet] <- 1 # mask with 1 for rainy day + pr_year_int <- pr_year + pr_year_int[ret_dry] <- + NA # actual precipitation but with NA on dry days + pr_year_r95 <- pr_year + pr_year_r95[ret_below_r95] <- + NA # actual precipitation but with NA on + # days with pr < reference 95% quantile + + # Mean annual precipitation + pry_year <- apply(pr_year, c(1, 2), mean, na.rm = T) + + # Mean annual precipitation intensity (INT/SDII; intensity during wet days) + int_year <- apply(pr_year_int, c(1, 2), mean, na.rm = T) + + # Mean annual dry spell length (DSL: + # number of consecutive dry days during each dry spell). + dsl_year <- mean_spell_length(pr_year_dry) + + # Mean annual wet spell length (WSL: + # number of consecutive wet days during each wet spell). + wsl_year <- mean_spell_length(pr_year_wet) + + # Precipitation area (PA: number of rainy days * area of grid box) + area_size <- area_size(ics, ipsilon) + pa_year <- + (apply(pr_year_wet, c(1, 2), sum, na.rm = T)) * area_size + + # Heavy precipitation Index (R95: percent of total precipitation above the + # 95% percentile of the reference distribution); + r95_year <- apply(pr_year_r95, c(1, 2), sum, na.rm = T) / + apply(pr_year, c(1, 2), sum, na.rm = T) * 100. + + # Assign in-loop variables to storage array + pry[, , iyear] <- pry_year + dsl[, , iyear] <- dsl_year + wsl[, , iyear] <- wsl_year + int[, , iyear] <- int_year + pa[, , iyear] <- pa_year + r95[, , iyear] <- r95_year + } + + # remove desert areas if required + # (mean annual precipitation <0.5 mm, Giorgi et al. 2014) + if (removedesert) { + retdes <- which(pry < 0.5) + pry[retdes] <- NA + # create mask with NAs for deserts and 1's for not-desert + retdes2D <- apply(pry * 0, c(1, 2), sum) + 1 + retdes3D <- replicate(nyear, retdes2D) + pry <- pry * retdes3D + dsl <- dsl * retdes3D + wsl <- wsl * retdes3D + int <- int * retdes3D + pa <- pa * retdes3D + r95 <- r95 * retdes3D + } + + # Normalize to available data in reference period + # NOTE: take care of normalization by 0: when the normalizing function is 0 + # (e.g. short dataset in reference period), the resulting normalized + # index will be NA. + + # calculate normalization function + if (external_norm[1] == F) { + ret_years <- which(years >= norm_years[1] & years <= norm_years[2]) + if (length(ret_years) == 0) { + stop( + paste0( + diag_base, + ": no data over selected normalization period, + unable to normalize" + ) + ) + } + pry_mean <- apply(pry[, , ret_years], c(1, 2), mean, na.rm = T) + dsl_mean <- apply(dsl[, , ret_years], c(1, 2), mean, na.rm = T) + wsl_mean <- apply(wsl[, , ret_years], c(1, 2), mean, na.rm = T) + int_mean <- apply(int[, , ret_years], c(1, 2), mean, na.rm = T) + pa_mean <- apply(pa[, , ret_years], c(1, 2), mean, na.rm = T) + r95_mean <- apply(r95[, , ret_years], c(1, 2), mean, na.rm = T) + pry_mean_sd <- apply(pry[, , ret_years], c(1, 2), sd, na.rm = T) + dsl_mean_sd <- apply(dsl[, , ret_years], c(1, 2), sd, na.rm = T) + wsl_mean_sd <- apply(wsl[, , ret_years], c(1, 2), sd, na.rm = T) + int_mean_sd <- apply(int[, , ret_years], c(1, 2), sd, na.rm = T) + pa_mean_sd <- apply(pa[, , ret_years], c(1, 2), sd, na.rm = T) + r95_mean_sd <- apply(r95[, , ret_years], c(1, 2), sd, na.rm = T) + } else { + # load normalization data from file + mean_idx <- + model_idx # assume each model has its normalization file + if (external_norm[1] == "HIST") { + # if required, use HyInt file from historical period + external_norm <- + getfilename_indices( + work_dir, + diag_base, + model_idx, + season, + hist = T, + hist_years = norm_years + ) + } + if (length(external_norm) == 1) { + mean_idx <- 1 + } + # if list of files with normalization functions has only 1 entry, + # use that for all models + print(paste( + diag_base, + ": loading external normalization data from ", + external_norm[mean_idx] + )) + pry_mean <- + ncdf_opener(external_norm[mean_idx], "pry_mean", rotate = "no") + dsl_mean <- + ncdf_opener(external_norm[mean_idx], "dsl_mean", rotate = "no") + wsl_mean <- + ncdf_opener(external_norm[mean_idx], "wsl_mean", rotate = "no") + int_mean <- + ncdf_opener(external_norm[mean_idx], "int_mean", rotate = "no") + pa_mean <- + ncdf_opener(external_norm[mean_idx], "pa_mean", rotate = "no") + r95_mean <- + ncdf_opener(external_norm[mean_idx], "r95_mean", rotate = "no") + pry_mean_sd <- + ncdf_opener(external_norm[mean_idx], "pry_mean_sd", + rotate = "no" + ) + dsl_mean_sd <- + ncdf_opener(external_norm[mean_idx], "dsl_mean_sd", + rotate = "no" + ) + wsl_mean_sd <- + ncdf_opener(external_norm[mean_idx], "wsl_mean_sd", + rotate = "no" + ) + int_mean_sd <- + ncdf_opener(external_norm[mean_idx], "int_mean_sd", + rotate = "no" + ) + pa_mean_sd <- ncdf_opener(external_norm[mean_idx], "pa_mean_sd", + rotate = "no" + ) + r95_mean_sd <- + ncdf_opener(external_norm[mean_idx], "r95_mean_sd", + rotate = "no" + ) + } + + # remove 0s from normalizing functions + pry_mean[pry_mean == 0] <- NA + dsl_mean[dsl_mean == 0] <- NA + wsl_mean[wsl_mean == 0] <- NA + int_mean[int_mean == 0] <- NA + pa_mean[pa_mean == 0] <- NA + r95_mean[r95_mean == 0] <- NA + + # perform normalization + for (iyear in 1:nyear) { + pry_norm[, , iyear] <- pry[, , iyear] / pry_mean + dsl_norm[, , iyear] <- dsl[, , iyear] / dsl_mean + wsl_norm[, , iyear] <- wsl[, , iyear] / wsl_mean + int_norm[, , iyear] <- int[, , iyear] / int_mean + pa_norm[, , iyear] <- pa[, , iyear] / pa_mean + r95_norm[, , iyear] <- r95[, , iyear] / r95_mean + } + + # Calculate HY-INT index + hyint <- dsl_norm * int_norm + + + # Calculate mean and mean_sd for hyint + if (external_norm[1] == F) { + # calculate or load hyint_mean from file for consistency with other indice + ret_years <- + which(years >= norm_years[1] & years <= norm_years[2]) + hyint_mean <- + apply(hyint[, , ret_years], c(1, 2), mean, na.rm = T) + hyint_mean_sd <- + apply(hyint[, , ret_years], c(1, 2), sd, na.rm = T) + } else { + # load normalization data from file + mean_idx <- + model_idx # assume each model has its normalization file + if (length(external_norm) == 1) { + # if list of files with normalization functions has only 1 entry, + # use that for all models + mean_idx <- 1 + } + hyint_mean <- ncdf_opener(external_norm[mean_idx], "hyint_mean", + rotate = "no" + ) + hyint_mean_sd <- + ncdf_opener(external_norm[mean_idx], "hyint_mean_sd", + rotate = "no" + ) + } + + # HyInt list + hyint_list <- list( + pry = pry, + dsl = dsl, + wsl = wsl, + int = int, + pa = pa, + r95 = r95, + hyint = hyint, + pry_mean = pry_mean, + dsl_mean = dsl_mean, + wsl_mean = wsl_mean, + int_mean = int_mean, + pa_mean = pa_mean, + r95_mean = r95_mean, + hyint_mean = hyint_mean, + pry_mean_sd = pry_mean_sd, + dsl_mean_sd = dsl_mean_sd, + wsl_mean_sd = wsl_mean_sd, + int_mean_sd = int_mean_sd, + pa_mean_sd = pa_mean_sd, + r95_mean_sd = r95_mean_sd, + hyint_mean_sd = hyint_mean_sd, + pry_norm = pry_norm, + dsl_norm = dsl_norm, + wsl_norm = wsl_norm, + int_norm = int_norm, + pa_norm = pa_norm, + r95_norm = r95_norm, + r95_threshold = r95_threshold + ) + + print( + paste( + diag_base, + ": calculation done. Returning mean precipitation, + sdii, dsl, wsl, pa, r95 (absolute and normalized values) + and hyint indices" + ) + ) + + + ########################################################## + #------------------------Save to NetCDF------------------# + ########################################################## + + # saving output to netcdf files + print(paste0(diag_base, ": saving data to NetCDF file:")) + + # define fieds to be saved + field_list <- c( + "pry", + "dsl", + "wsl", + "int", + "pa", + "r95", + "hyint", + "pry_mean", + "dsl_mean", + "wsl_mean", + "int_mean", + "pa_mean", + "r95_mean", + "hyint_mean", + "pry_mean_sd", + "dsl_mean_sd", + "wsl_mean_sd", + "int_mean_sd", + "pa_mean_sd", + "r95_mean_sd", + "hyint_mean_sd", + "pry_norm", + "dsl_norm", + "wsl_norm", + "int_norm", + "pa_norm", + "r95_norm", + "r95_threshold" + ) + + TIME <- + paste(tunit, " since ", year1, "-", timeseason[1], "-01 00:00:00", + sep = "" + ) + + # dimensions definition + x <- ncdim_def("lon", "degrees_east", ics, longname = "longitude") + y <- + ncdim_def("lat", "degrees_north", ipsilon, longname = "latitude") + t <- ncdim_def( + timedimname, + "years", + years, + unlim = T, + calendar = tcal, + longname = timedimname + ) + # timedim <- ncdim_def( timedimname, + # "years since 1950-01-01 00:00:00", (years-1950),unlim=T) + + # t <- ncdim_def( timedimname, TIME, years, + # unlim=T, calendar=tcal, longname=timedimname) + + for (var in field_list) { + field <- get(var, hyint_list) + field[is.nan(field)] <- NA + metadata <- setmetadata_indices(var) + longvar <- metadata$longvar + unit <- metadata$unit + # variable definitions + var_ncdf <- ncvar_def( + var, + unit, + list(x, y, t), + -999, + longname = longvar, + prec = "single", + compression = 1 + ) + if ((var == "pry_mean") | + (var == "int_mean") | (var == "dsl_mean") | + (var == "wsl_mean") | + (var == "pa_mean") | (var == "r95_mean") | + (var == "hyint_mean") | (var == "pry_mean_sd") | + (var == "int_mean_sd") | (var == "dsl_mean_sd") | + (var == "wsl_mean_sd") | (var == "pa_mean_sd") | + (var == "r95_mean_sd") | (var == "hyint_mean_sd") | + (var == "r95_threshold")) { + var_ncdf <- ncvar_def( + var, + unit, + list(x, y), + -999, + longname = longvar, + prec = "single", + compression = 1 + ) + } + assign(paste0("var", var), var_ncdf) + assign(paste0("field", var), field) + } + + # Netcdf file creation + print(paste(diag_base, ": saving output to ", outfile)) + namelist <- paste0("var", field_list) + nclist <- mget(namelist) + ncfile <- nc_create(outfile, nclist) + for (var in field_list) { + # put variables into the ncdf file + ndims <- get(paste0("var", var))$ndims + ncvar_put( + ncfile, + var, + get(paste0("field", var)), + start = rep(1, ndims), + count = rep(-1, ndims) + ) + } + nc_close(ncfile) + + # Set provenance for this output file + caption <- + paste0( + "Hyint indices for years ", + year1, + " to ", + year2, + " according to ", + models_name[model_idx] + ) + anc_list <- flatten_lists(climofiles[model_idx]) + xprov <- list( + ancestors = anc_list, + model_idx = list(model_idx), + caption = caption + ) + + # Store provenance in main provenance list + prov_info[[outfile]] <- xprov + + print(paste(diag_base, ": diagnostic netCDF files saved")) + return(prov_info) +} diff --git a/esmvaltool/diag_scripts/hyint/hyint_etccdi_preproc.R b/esmvaltool/diag_scripts/hyint/hyint_etccdi_preproc.R new file mode 100644 index 0000000000..36c09f952d --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_etccdi_preproc.R @@ -0,0 +1,64 @@ +###################################################### +#-------------ETCCDI preprocessing for HyInt---------# +#-------------E. Arnone (Oct 2017)-------------------# +############################################################################ +# ABOUT: This function pre-process ETCCDI files obtained with the +# extreme_events recipe remapping the data from +# gaussian to lonlat, changing longitude range from 0/360 to -180/180 +# and merging all indices into the HyInt indices file. + +hyint_etccdi_preproc <- + function(work_dir, + etccdi_dir, + etccdi_list_import, + model_idx, + season, + prov_info, + yrmon = "yr") { + year1 <- toString(models_start_year[model_idx]) + year2 <- toString(models_end_year[model_idx]) + hyint_file <- + getfilename_indices(work_dir, diag_base, model_idx, season) + etccdi_files <- + getfilename_etccdi(etccdi_dir, etccdi_list_import, model_idx, + yrmon = "yr" + ) + etccdi_files_tmp <- c() + for (sfile in etccdi_files) { + print(paste0("HyInt: pre-processing ", sfile)) + sfile_tmp0 <- cdo("delvar", args = "time_bnds", input = sfile) + gridf <- tempfile() + cdo("griddes", input = hyint_file, stdout = gridf) + sfile_tmp1 <- cdo("remapscon2", + args = gridf, + input = sfile_tmp0 + ) + sfile_tmp <- cdo("sellonlatbox", + args = "-180,180,-90,90", + input = sfile_tmp1 + ) + etccdi_files_tmp <- c(etccdi_files_tmp, sfile_tmp) + unlink(c(sfile_tmp0, sfile_tmp1)) + } + hyint_file_tmp <- tempfile() + mv_command <- paste("mv -n ", hyint_file, hyint_file_tmp) + system(mv_command) + print(paste0("HyInt: merging ", length(etccdi_files), " ETCCDI files")) + hyint_file_tmp_sel <- + cdo("sellonlatbox", + args = "-180,180,-90,90", + input = hyint_file_tmp + ) + cdo("merge", + options = "-O", + input = c(hyint_file_tmp_sel, etccdi_files_tmp), + output = hyint_file + ) + unlink(c(etccdi_files_tmp, hyint_file_tmp, hyint_file_tmp_sel)) + + # Update provenance with etccdi files + prov_info[[hyint_file]]$ancestors <- + c(prov_info[[hyint_file]]$ancestors, etccdi_files) + + return(prov_info) + } diff --git a/esmvaltool/diag_scripts/hyint/hyint_functions.R b/esmvaltool/diag_scripts/hyint/hyint_functions.R new file mode 100644 index 0000000000..a1595c2a72 --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_functions.R @@ -0,0 +1,1699 @@ +# ############################################################################# +# hyint_functions.R +# +# Author: Enrico Arnone (ISAC-CNR, Italy) +# +# ############################################################################# +# Description +# Functions used in HyInt routines +# +# Modification history +# 20170901-arnone_enrico: adapted to HyInt and extended +# 20170522-davini_paolo: creation for MiLES +# ############################################################################# + +# basis functions + +########################################################## +#------------------------Packages------------------------# +########################################################## + +# loading packages +library("maps") +library("ncdf4") +library("PCICt") + +# check if fast linear fit is operative (after R 3.1): +# 3x faster than lm.fit, 36x faster than lm +if (exists(".lm.fit")) { + lin.fit <- .lm.fit +} else { + lin.fit <- lm.fit +} + + +########################################################## +#----------------Naming functions------------------------# +########################################################## + +# function to flatten nested lists +flatten_lists <- function(x) { + if (!inherits(x, "list")) return(list(x)) + else return(unlist(c(lapply(x, flatten_lists)), recursive = FALSE)) +} + +getfilename_regridded <- function(spath, rgrid, var0, model_idx) { + exp <- models_name[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + model_exp <- models_experiment[model_idx] + model_ens <- models_ensemble[model_idx] + filename <- paste0( + spath, + "/", + exp, + "_", + model_exp, + "_", + model_ens, + "_", + toString(year1), + "-", + toString(year2), + "_", + var0, + "_", + rgrid, + ".nc" + ) + return(filename) +} + +getfilename_indices <- + function(spath, + label, + model_idx, + season, + hist = F, + hist_years = hist_years, + grid = F, + topo = F) { + exp <- models_name[model_idx] + model_exp <- models_experiment[model_idx] + model_ens <- models_ensemble[model_idx] + if (grid) { + filename <- paste0( + spath, + "/", + label, + "_", + exp, + "_", + model_exp, + "_", + model_ens, + ".grid" + ) + } else if (topo) { + filename <- paste0( + spath, + "/", + label, + "_", + exp, + "_", + model_exp, + "_", + model_ens, + "_topo.nc" + ) + } else { + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + if (hist) { + model_exp <- "historical" + year1 <- hist_years[1] + year2 <- hist_years[2] + } + filename <- paste0( + spath, + "/", + label, + "_", + exp, + "_", + model_exp, + "_", + model_ens, + "_", + toString(year1), + "_", + toString(year2), + "_", + season, + ".nc" + ) + } + return(filename) + } + +getfilename_etccdi <- + function(spath, var, model_idx, yrmon = "yr") { + # Function to get names of files of ETCCDI indices + # If input 'var' is an array of names, 'filename' an array will be as well + + filename <- "" + for (svar in var) { + exp <- models_name[model_idx] + model_exp <- models_experiment[model_idx] + model_ens <- models_ensemble[model_idx] + year1 <- toString(models_start_year[model_idx]) + year2 <- toString(models_end_year[model_idx]) + if (yrmon == "mon") { + year1 <- paste0(year1, "01") + year2 <- paste0(year2, "12") + } + filenametmp <- paste0( + spath, + "/", + svar, + "_", + yrmon, + "_", + exp, + "_", + model_exp, + "_", + model_ens, + "_", + year1, + "-", + year2, + ".nc" + ) + filename <- c(filename, filenametmp) + } + filename <- filename[2:length(filename)] + return(filename) + } + +getfilename_trends <- function(spath, label, model_idx, season) { + exp <- models_name[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + model_exp <- models_experiment[model_idx] + model_ens <- models_ensemble[model_idx] + filename <- paste0( + spath, + "/", + diag_base, + "_", + exp, + "_", + model_exp, + "_", + model_ens, + "_", + toString(year1), + "_", + toString(year2), + "_", + season, + "_tseries_", + label, + ".nc" + ) + return(filename) +} + +getfilename_figure <- + function(spath, + var, + year1, + year2, + model_idx, + season, + syears, + sregion, + label, + map, + output_file_type, + multimodel = F) { + if (nchar(var) > 10) { + var <- substr(var, 1, 10) + } + exp <- models_name[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + model_exp <- models_experiment[model_idx] + model_ens <- models_ensemble[model_idx] + model_tag <- paste(exp, model_exp, model_ens, sep = "_") + if (multimodel) { + model_tag <- "multimodel" + } + figname <- paste0( + spath, + "/", + paste( + var, + model_tag, + paste(year1, year2, sep = "-"), + season, + syears, + sregion, + map, + sep = "_" + ), + ".", + output_file_type + ) + if (!(label == "") & !(label == F)) { + figname <- paste0( + spath, + "/", + paste( + var, + model_tag, + paste(year1, year2, sep = "-"), + season, + syears, + sregion, + label, + map, + sep = "_" + ), + ".", + output_file_type + ) + } + return(figname) + } + + +########################################################## +#-----------------Basic functions------------------------# +########################################################## + +# normalize a time series +standardize <- function(timeseries) { + out <- + (timeseries - mean(timeseries, na.rm = T)) / sd(timeseries, na.rm = T) + return(out) +} + + +# detect ics ipsilon lat-lon +whicher <- function(axis, number) { + out <- which.min(abs(axis - number)) + return(out) +} + + +# area of longitude/latitude rectangle +area_lonlat <- function(lon1, lon2, lat1, lat2) { + R <- 6378 + return(2 * pi * R^2 * abs(sin(lat1 / 180. * pi) - sin(lat2 / 180. * pi)) + * abs(lon1 - lon2) / 360) +} + + +# produce a 2d matrix of area size for given longitude/latitude grid points +area_size <- function(ics, + ipsilon, + resolution = NA, + norm = F) { + if (is.na(resolution) & + (length(ics) == 1) & (length(ipsilon) == 1)) { + stop("Provide either resolution or two adjacent elements") + } + if (is.na(resolution) & (length(ics) != 1)) { + resolution <- ics[2] - ics[1] + } + field <- array(NA, dim = c(length(ics), length(ipsilon))) + for (j in seq_along(ipsilon)) { + field[, j] <- area_lonlat( + 0, + resolution, + ipsilon[j] - 0.5 * resolution, + ipsilon[j] + 0.5 * resolution + ) + } + if (norm) { + field <- field / sum(field) + } + + return(field) +} + + + +# produce a 2d matrix of area weight +area_weight <- function(ics, + ipsilon, + root = T, + norm = F) { + field <- array(NA, dim = c(length(ics), length(ipsilon))) + if (root == T) { + for (j in seq_along(ipsilon)) { + field[, j] <- sqrt(cos(pi / 180 * ipsilon[j])) + } + } + + if (root == F) { + for (j in 1:length(ipsilon)) { + field[, j] <- cos(pi / 180 * ipsilon[j]) + } + } + if (norm) { + field <- field / mean(field) + } + return(field) +} + +# normalize a 2D or 3D field by a 2d matrix of area weight +area_weight_norm <- + function(ics, + ipsilon, + field, + root = T, + norm = F) { + timedim <- dim(field)[length(dim(field))] + weights <- replicate(timedim, area_weight(ics, ipsilon, + root = root, + norm = norm + )) + field <- field * weights + return(field) + } + +########################################################## +#--------------Time Based functions----------------------# +########################################################## + +# check number of days for each month + +number_days_month <- function(datas) { + # evaluate the number of days in a defined month of a year + datas <- as.Date(datas) + m <- format(datas, format = "%m") + while (format(datas, format = "%m") == m) { + datas <- datas + 1 + } + return(as.integer(format(datas - 1, format = "%d"))) +} + + +# to convert season charname to months number +season2timeseason <- function(season) { + if (season == "ALL") { + timeseason <- 1:12 + } + if (season == "JJA") { + timeseason <- 6:8 + } + if (season == "DJF") { + timeseason <- c(1, 2, 12) + } + if (season == "MAM") { + timeseason <- 3:5 + } + if (season == "SON") { + timeseason <- 9:11 + } + if (!exists("timeseason")) { + stop("wrong season selected!") + } + return(timeseason) +} + +# leap year treu/false function +is_leapyear <- function(year) { + return(((year %% 4 == 0) & (year %% 100 != 0)) | (year %% 400 == 0)) +} + +power_date_new <- function(datas) { + whichdays <- as.numeric(format(datas, "%m")) + # create a "season" for continuous time + seas <- whichdays * 1 + ss <- 1 + for (i in 1:(length(whichdays) - 1)) { + if (diff(whichdays)[i] > 1) { + ss <- ss + 1 + } + seas[i + 1] <- ss + } + etime <- list( + day = as.numeric(format(datas, "%d")), + month = as.numeric(format(datas, "%m")), + year = as.numeric(format(datas, "%Y")), + data = datas, + season = seas + ) + return(etime) +} + +power_date <- function(season, ANNO1, ANNO2) { + # evalute the number of days that will analyze in order + # to create arrays of the needed dimensions + + # create continous calendar + p1 <- as.Date(paste0(ANNO1, "-01-01")) + p2 <- as.Date(paste0(ANNO2, "-12-31")) + datas <- seq(p1, p2, by = "day") + + # select only days correspondeing to the needed season + timeseason <- season2timeseason(season) + month <- as.numeric(format(datas, "%m")) + whichdays <- which(month %in% timeseason) + + # create a "season" for continuous time, used by persistance tracking + seas <- whichdays * 1 + ss <- 1 + for (i in 1:(length(whichdays) - 1)) { + if (diff(whichdays)[i] > 1) { + ss <- ss + 1 + } + seas[i + 1] <- ss + } + # produce a final timeseries of dates + datas <- datas[whichdays] + dataline <- list( + day = as.numeric(format(datas, "%d")), + month = as.numeric(format(datas, "%m")), + year = as.numeric(format(datas, "%Y")), + season = seas, + data = datas + ) + print("Time Array Built") + print(paste("Length:", length(seas), "days for", season, "season")) + print(paste("From", datas[1], "to", datas[length(seas)])) + + return(dataline) +} + +power_date_no_leap <- function(season, ANNO1, ANNO2) { + # apply to power_date object to clean out elements for leap years + e <- power_date(season, ANNO1, ANNO2) + leap.days <- which(e$month == 2 & e$day == 29) + dataline.leap <- list( + day = e$day[-leap.days], + month = e$month[-leap.days], + year = e$year[-leap.days], + season = e$season[-leap.days], + data = e$data[-leap.days] + ) + print("FIXED FOR NO LEAP CALENDAR: Time Array Built") + print(paste( + "Length:", + length(dataline.leap$season), + "days for", + season, + "season" + )) + print(paste( + "From", dataline.leap$data[1], "to", + dataline.leap$data[length(dataline.leap$season)] + )) + return(dataline.leap) +} + +power_date_30day <- function(season, ANNO1, ANNO2) { + # apply to power.date object to clean out elements for leap years + nmonths <- length(season2timeseason(season)) + nyears <- as.numeric(ANNO2) - as.numeric(ANNO1) + 1 + dd <- rep(seq(1, 30), nmonths * nyears) + mm <- rep(rep(season2timeseason(season), each = 30), nyears) + # create a "season" for continuous time, used by persistance tracking + seas <- mm * 0 + 1 + ss <- 1 + for (i in 1:(length(mm) - 1)) { + if (diff(mm)[i] > 1) { + ss <- ss + 1 + } + seas[i + 1] <- ss + } + dataline_30day <- list( + day = dd, + month = mm, + season = seas + ) + print("SIMPLIFIED CALENDAR FOR 30-day CALENDAR: Time Array Built") + print(paste( + "Length:", + length(dataline.30day$season), + "days for", + season, + "season" + )) + return(dataline_30day) +} + +calc_region_timeseries <- + function(x, + y, + indata, + region, + calc_sd = F, + weighted_mean = T, + root = F, + norm = T, + ..) { + # This function subsets a lon/lat/time array based on an input + # region(lon1,loni2,lat1,lat2) and returns its timeseries. + # Area weights are applied if requested. The function returns also + # the standard deviation of the averaging elements + # (currently excluding weights) + + idimtimedata <- length(dim(indata)) + dimtimedata <- (dim(indata))[idimtimedata] + retx <- which(region[1] <= x & x <= region[2]) + rety <- which(region[3] <= y & y <= region[4]) + if (!calc_sd) { + print(paste( + "Calc.region.timeseries: ", + length(retx) * length(rety) + )) + } + if (is.na(retx[1]) | is.na(rety[1])) { + cat( + "calc.region.timeseries: no data in selected region.", + "Returning NA." + ) + outdata <- array(dim = dimtimedata) + } else { + retdata <- indata[retx, rety, , drop = F] + if (weighted_mean & !calc_sd) { + retdata <- area_weight_norm(x[retx], y[rety], retdata, + root = root, norm = norm + ) + } + outdata <- apply(retdata, idimtimedata, mean, na.rm = T) + if (calc_sd) { + outdata <- apply(retdata, idimtimedata, sd, na.rm = T) + } + } + return(outdata) + } + +################## +#--------Data preprocessing +################# + +# +# Method to create an asci grid file for +# to use to regrid on +# @param idx_dir path of directory containing +# files from which to create the grid +# Adapted from 20170920-sandstad_marit +# +create_grid <- function(ref_file = "./reffile", + path = idx_dir, + out_file = "./gridDef") { + ## Picking the grid found in reference file to regrid over + if (!file.exists(ref_file)) { + ## Picking the grid found in the first file to regrid over + ref_file <- + list.files(path, pattern = "*.nc", full.names = TRUE)[1] + } + cdo("griddes", input = ref_file, stdout = out_file) +} + +# +# Method to create a landSeaMask on a suitable grid +# @param regrid name w/path of gridfile to use +# to put the landdseamask on +# Adapted from 20170920-sandstad_marit +# +create_landseamask <- + function(regrid = "./gridDef", + ref_file = ref_file, + loc = "./", + regridded_topo = paste0("./", "regridded_topo.nc"), + landmask = "./landSeaMask.nc", + topo_only = F) { + # Test if gridfile exists + # otherwise call function to generate one + if (!file.exists(regrid)) { + if (length(ref_file) == 0) { + print("Unable to access grid file") + stop + } + create_grid(ref_file = ref_file, out_file = regrid) + } + + ## Making topographic map + ftopo <- cdo("topo", options = "-f nc") + + ## Regridding the topographic map to chosen grid + cdo( + "remapscon2", + args = paste0("'", regrid, "'"), + input = ftopo, + output = regridded_topo + ) + + if (!topo_only) { + # Set above sea-level gridpoints to missing + ftopomiss1 <- + cdo("setrtomiss", args = "0,9000", input = regridded_topo) + + # Set above sea-level gridpoints to 1 + ftopo1pos <- cdo("setmisstoc", args = "1", input = ftopomiss1) + + # Set below sea-level gridpoints to missing + cdo("setrtomiss", + args = "-9000,0", + input = ftopo1pos, + output = landmask + ) + unlink(c(ftopomiss1, ftopo1pos)) + } + unlink(ftopo) + } + +## +## Read seaLandElevationMask and mask data +## +apply_elevation_mask <- function(rfield, + relevation, + el_threshold, + reverse = F) { + if (!reverse) { + if (el_threshold >= 0) { + # mountains + relevation[relevation < el_threshold] <- NA + relevation <- relevation * 0 + 1 + } else { + # oceans + relevation[relevation > el_threshold] <- NA + relevation <- relevation * 0 + 1 + } + } else { + if (el_threshold >= 0) { + # mountains + relevation[relevation > el_threshold] <- NA + relevation <- relevation * 0 + 1 + } else { + # oceans + relevation[relevation < el_threshold] <- NA + relevation <- relevation * 0 + 1 + } + } + itimedim <- dim(rfield)[length(dim(rfield))] + myear_relevation <- replicate(itimedim, relevation) + if (dim(myear_relevation) != dim(rfield)) { + stop( + "STOP - dimension of topography does not match + dimension of field: remove old topography files if needed" + ) + } + rfield <- rfield * myear_relevation + + return(rfield) +} + + + +########################################################## +#-------------------Data analysis------------------------# +########################################################## + + +################################### +# Function: Annual mean spell length +# +# About: This function calculates the annual mean spell length of a given +# field (lon x lat x time) reporting 1's for active parameter and +# 0's for non active parameter. In order to reduce memory usage only +# the annual mean spell length is returned. E.g. calculation of dry +# spell length needs input fields with 1 for dry days, 0 for wet ones. +# +# Author: E. Arnone ( ISAC-CNR, Torino) +# Last update: 14 June 2017 + +mean_spell_length <- function(m) { + # Setup useful arrays and parameters + nlon <- dim(m)[1] + nlat <- dim(m)[2] + ntime <- dim(m)[3] + mean_spell_length_year <- m[, , 1] * NA + + # Loop through grid points + for (ilon in 1:nlon) { + for (ilat in 1:nlat) { + spell_point <- (m[ilon, ilat, ]) + # Look for variations along time axis + diff_spell_point <- + spell_point[2:ntime] - spell_point[1:ntime - 1] + # select when variation is positive (starting spell) + spell_start <- which(diff_spell_point == 1) + 1 + if (!is.na(spell_point[1])) { + if (spell_point[1] == 1) { + spell_start <- c(1, spell_start) + } + } # if first day is active add it to list + # select when variation is negative (ending spell) + spell_stop <- which(diff_spell_point == -1) + if (!is.na(spell_point[ntime])) { + if (spell_point[ntime] == 1) { + spell_stop <- c(spell_stop, ntime) + } + } # if last day is active add it to list + # difference between stop and start gives spell length + spell_length <- spell_stop - spell_start + 1 + # assign annual mean spell length to output array + mean_spell_length_year[ilon, ilat] <- + mean(spell_length, na.rm = T) + } + } + return(mean_spell_length_year) +} + +get_elevation <- + function(filename = NULL, + elev_range = c(-1000, 10000), + mask = F, + elev_plot = F) { + # get elevation data from a high resolution topography file. + + funlink <- F + if (is.null(filename)) { + filename <- cdo("topo", options = "-f nc") + funlink <- T + } + elevation <- ncdf_opener( + filename, + namevar = "elevation", + namelon = "longitude", + namelat = "latitude", + rotate = "no" + ) + lon_el <- + ncdf_opener(filename, namevar = "longitude", rotate = "no") + lat_el <- + ncdf_opener(filename, namevar = "latitude", rotate = "no") + elevation[which(elevation < elev_range[1] | + elevation > elev_range[2])] <- NA + if (mask) { + elevation[which(elevation >= elev_range[1] & + elevation <= elev_range[2])] <- 1 + } + if (elev_plot) { + filled_contour3(lon_el, lat_el, elevation, color.palette = rainbow) + map( + "world", + regions = ".", + interior = F, + exact = F, + boundary = T, + add = T, + col = "gray", + lwd = 1.5 + ) + } + el_list <- + list( + elevation = elevation, + lon_el = lon_el, + lat_el = lat_el + ) + if (funlink) { + unlink(filename) + } + return(el_list) + } + + +########################################################## +#--------------NetCDF loading function-------------------# +########################################################## + +# universal function to open a single var 3D (x,y,time) ncdf files: it includes +# rotation, y-axis filpping, time selection and CDO-based interpolation +# to replace both ncdf.opener.time and ncdf.opener (deprecated and removed) +# automatically rotate matrix to place greenwich at the center (flag "rotate") +# and flip the latitudes in order to have increasing +# if required (flag "interp2grid") additional interpolation with CDO is used. +# "grid" can be used to specify the target grid name +# time selection based on package PCICt must be specifed with both "tmonths" +# and "tyears" flags. It returns a list including its own dimensions +ncdf_opener_universal <- # nolint + function(namefile, + namevar = NULL, + namelon = NULL, + namelat = NULL, + tmonths = NULL, + tyears = NULL, + rotate = "full", + interp2grid = F, + grid = "r144x73", + remap_method = "remapscon2", + exportlonlat = TRUE, + verbose = F) { + # load package + require(ncdf4) + + # verbose-only printing function + printv <- function(value) { + if (verbose) { + print(value) + } + } + + # check if timeflag is activated or full file must be loaded + if (is.null(tyears) | is.null(tmonths)) { + timeflag <- FALSE + printv("No time and months specified, loading all the data") + } else { + timeflag <- TRUE + printv("tyears and tmonths are set!") + require(PCICt) + } + + if (rotate == "full") { + rot <- T + move1 <- move2 <- 1 / 2 + } # 180 degrees rotation of longitude + if (rotate == "half") { + rot <- T + move1 <- 1 / 4 + move2 <- 3 / 4 + } # 90 degree rotation (useful for TM90) + if (rotate == "no") { + rot <- F + } # keep as it is + + # interpolation made with CDO: second order conservative remapping + if (interp2grid) { + print(paste("Remapping with CDO on", grid, "grid")) + if (is.null(namevar)) { + namefile <- cdo(remap_method, + args = paste0("'", grid, "'"), + input = namefile + ) + } else { + selectf <- cdo("selvar", args = namevar, input = namefile) + gridf <- tempfile() + cdo("griddes", input = grid, stdout = gridf) + namefile <- cdo(remap_method, args = gridf, input = selectf) + unlink(c(selectf, gridf)) + } + } + + # define rotate function (faster than with apply) + rotation <- function(line) { + vettore <- line + dims <- length(dim(vettore)) + # for longitudes + if (dims == 1) { + ll <- length(line) + line[(ll * move1):ll] <- vettore[1:(ll * move2 + 1)] + line[1:(ll * move1 - 1)] <- vettore[(ll * move2 + 2):ll] - 360 + } + # for x,y data + if (dims == 2) { + ll <- length(line[, 1]) + line[(ll * move1):ll, ] <- vettore[1:(ll * move2 + 1), ] + line[1:(ll * move1 - 1), ] <- vettore[(ll * move2 + 2):ll, ] + } + # for x,y,t data + if (dims == 3) { + ll <- length(line[, 1, 1]) + line[(ll * move1):ll, , ] <- vettore[1:(ll * move2 + 1), , ] + line[1:(ll * move1 - 1), , ] <- + vettore[(ll * move2 + 2):ll, , ] + } + return(line) + } + + # define flip function ('cos rev/apply is not working) + flipper <- function(field) { + dims <- length(dim(field)) + if (dims == 2) { + ll <- length(field[1, ]) + field <- field[, ll:1] + } # for x,y data + if (dims == 3) { + ll <- length(field[1, , 1]) + field <- field[, ll:1, ] + } # for x,y,t data + return(field) + } + + # opening file: getting variable (if namevar is given, that variable + # is extracted) + printv(paste("opening file:", namefile)) + a <- nc_open(namefile) + + # if no name provided load the only variable available + if (is.null(namevar)) { + namevar <- names(a$var) + if (length(namevar) > 1) { + print(namevar) + stop("More than one var in the files, please select it + with namevar=yourvar") + } + } + + # load axis: updated version, looking for dimension directly stored + # inside the variable + naxis <- + unlist(lapply(a$var[[namevar]]$dim, function(x) + x["name"])) + for (axis in naxis) { + assign(axis, ncvar_get(a, axis)) + printv(paste(axis, ":", length(get(axis)), "records")) + } + + if (timeflag) { + printv("selecting years and months") + + # based on preprocessing of CDO time format: get calendar type and + # use PCICt package for irregular data + caldata <- ncatt_get(a, "time", "calendar")$value + timeline <- + as.PCICt(as.character(time), format = "%Y%m%d", cal = caldata) + + # break if the calendar has not been recognized + if (any(is.na(timeline))) { + stop("Calendar from NetCDF is unsupported or not present. Stopping!!!") + } + + # break if the data requested is not there + lastday_base <- paste0(max(tyears), "-", max(tmonths), "-28") + maxdays <- number_days_month(lastday_base) + if (caldata == "360_day") { + maxdays <- 30 + } + # uses number_days_month, which loops to get the month change + lastday <- as.PCICt(paste0( + max(tyears), "-", max(tmonths), "-", + maxdays + ), + cal = caldata, + format = "%Y-%m-%d" + ) + firstday <- + as.PCICt(paste0(min(tyears), "-", min(tmonths), "-01"), + cal = caldata, + format = "%Y-%m-%d" + ) + if (max(timeline) < lastday | min(timeline) > firstday) { + stop("You requested a time interval that is not present in the NetCDF") + } + } + + # time selection and variable loading + printv("loading full field...") + field <- ncvar_get(a, namevar) + + if (timeflag) { + # select data we need + select <- which(as.numeric(format(timeline, "%Y")) %in% tyears & + as.numeric(format(timeline, "%m")) %in% tmonths) + field <- field[, , select] + time <- timeline[select] + + printv(paste("This is a", caldata, "calendar")) + printv(paste( + length(time), "days selected from", time[1], + "to", time[length(time)] + )) + + printv(paste("Months that have been loaded are.. ")) + printv(unique(format(time, "%Y-%m"))) + } + + # check for dimensions (presence or not of time dimension) + dimensions <- length(dim(field)) + + # if dimensions are multiple, get longitude, latitude + # if needed, rotate and flip the array + xlist <- c("lon", "Lon", "longitude", "Longitude") + ylist <- c("lat", "Lat", "latitude", "Latitude") + if (dimensions > 1) { + # assign ics and ipsilon + if (is.null(namelon)) { + if (any(xlist %in% naxis)) { + ics <- get(naxis[naxis %in% xlist], a$dim)$vals + } else { + print("WARNING: No lon found") + ics <- NA + } + } else { + ics <- ncvar_get(a, namelon) + } + if (is.null(namelat)) { + if (any(ylist %in% naxis)) { + ipsilon <- get(naxis[naxis %in% ylist], a$dim)$vals + } else { + print("WARNING: No lat found") + ipsilon <- NA + } + } else { + ipsilon <- ncvar_get(a, namelat) + } + + # longitute rotation around Greenwich + if (rot) { + printv("rotating...") + ics <- rotation(ics) + field <- rotation(field) + } + if (ipsilon[2] < ipsilon[1] & length(ipsilon) > 1) { + if (length(ics) > 1) { + print("flipping...") + ipsilon <- sort(ipsilon) + field <- flipper(field) + } + } + + # exporting variables to the main program + if (exportlonlat) { + assign("ics", ics, envir = .GlobalEnv) + assign("ipsilon", ipsilon, envir = .GlobalEnv) + } + assign(naxis[naxis %in% c(xlist, namelon)], ics) + assign(naxis[naxis %in% c(ylist, namelat)], ipsilon) + } + + if (dimensions > 3) { + stop("This file is more than 3D file") + } + + # close connection + nc_close(a) + + # remove interpolated file + if (interp2grid) { + unlink(namefile) + } + + # showing array properties + printv(paste(dim(field))) + if (timeflag) { + printv(paste("From", time[1], "to", time[length(time)])) + } + + # returning file list + return(mget(c("field", naxis))) + } + +# ncdf.opener is a simplified wrapper for ncdf.opener.universal which returns +# only the field, ignoring the list +ncdf_opener <- function(namefile, + namevar = NULL, + namelon = NULL, + namelat = NULL, + tmonths = NULL, + tyears = NULL, + rotate = "full", + interp2grid = F, + grid = "r144x73", + remap_method = "remapscon2", + exportlonlat = T) { + field <- + ncdf_opener_universal( + namefile, + namevar, + namelon, + namelat, + tmonths, + tyears, + rotate, + interp2grid, + grid, + remap_method, + exportlonlat = exportlonlat + ) + return(field$field) +} + + +# function to open ncdf files (much more refined, with CDO-based interpolation) +ncdf_opener_time <- # nolint + function(namefile, + namevar = NULL, + namelon = NULL, + namelat = NULL, + tmonths = NULL, + tyears = NULL, + ics = ics, + ipsilon = ipsilon, + rotate = "full", + interp2grid = F, + grid = "r144x73", + remap_method = "remapscon2") { + # function to open netcdf files. It uses ncdf4 library + # time selection of month and years needed automatically rotate matrix + # to place greenwich at the center (flag "rotate") + # and flip the latitudes in order to have increasing + # if require (flag "interp2grid") additional interpolation with CDO + # can be used. "grid" can be used to specify the grid name + require(ncdf4) + require(PCICt) + + if (is.null(tyears) | is.null(tmonths)) { + stop("Please specify both months and years to load") + } + + if (rotate == "full") { + rot <- T + move1 <- move2 <- 1 / 2 + } # 180 degrees rotation of longitude + if (rotate == "half") { + rot <- T + move1 <- 1 / 4 + move2 <- 3 / 4 + } # 90 degree rotation (useful for TM90) + if (rotate == "no") { + rot <- F + } # keep as it is, breaking at Greemwich + + # interpolation made with CDO: second order conservative remapping + if (interp2grid) { + print(paste("Remapping with CDO on", grid, "grid")) + namefile <- cdo(remap_method, + args = paste0("'", grid, "'"), + input = namefile + ) + } + + # define rotate function (faster than with apply) + rotation <- function(line) { + vettore <- line + dims <- length(dim(vettore)) + if (dims == 1) { + # for longitudes + ll <- length(line) + line[(ll * move1):ll] <- vettore[1:(ll * move2 + 1)] + line[1:(ll * move1 - 1)] <- vettore[(ll * move2 + 2):ll] - 360 + } + if (dims == 2) { + # for x,y data + ll <- length(line[, 1]) + line[(ll * move1):ll, ] <- vettore[1:(ll * move2 + 1), ] + line[1:(ll * move1 - 1), ] <- vettore[(ll * move2 + 2):ll, ] + } + if (dims == 3) { + # for x,y,t data + ll <- length(line[, 1, 1]) + line[(ll * move1):ll, , ] <- vettore[1:(ll * move2 + 1), , ] + line[1:(ll * move1 - 1), , ] <- + vettore[(ll * move2 + 2):ll, , ] + } + return(line) + } + + # define flip function ('cos rev/apply is not working) + flipper <- function(field) { + dims <- length(dim(field)) + if (dims == 2) { + ll <- length(field[1, ]) + field <- field[, ll:1] + } # for x,y data + if (dims == 3) { + ll <- length(field[1, , 1]) + field <- field[, ll:1, ] + } # for x,y,t data + return(field) + } + + + # opening file: getting variable (if namevar is given, + # that variable is extracted) + print(paste("opening file:", namefile)) + a <- nc_open(namefile) + + # load axis: old version, loading the variable dimensions with a max of + # 4 dimensions. It showed some issues with the time_bnds variable appearing + # in some NetCDF file. naxis=names(a$dim)[1:min(c(4,length(a$dim)))] + # load axis: updated version, looking for dimension directly stored inside + # the variable + naxis <- + unlist(lapply(a$var[[namevar]]$dim, function(x) + x["name"])) + for (axis in naxis) { + print(axis) + assign(axis, ncvar_get(a, axis)) + } + # based on preprocessing of CDO time format: get calendar type and + # use PCICt package for irregular data + caldata <- ncatt_get(a, "time", "calendar")$value + timeline <- + as.PCICt(as.character(time), format = "%Y%m%d", cal = caldata) + str(timeline) + + # break if the calendar has not been recognized + if (any(is.na(timeline))) { + stop("Calendar from NetCDF is unsupported or not present. Stopping!!!") + } + + # break if the data requested is not there + lastday_base <- paste0(max(tyears), "-", max(tmonths), "-28") + # uses number_days_month, which loops to get the month change + lastday <- as.PCICt( + paste0( + max(tyears), + "-", + max(tmonths), + "-", + number_days_month(lastday_base) + ), + cal = caldata, + format = "%Y-%m-%d" + ) + firstday <- + as.PCICt(paste0(min(tyears), "-", min(tmonths), "-01"), + cal = caldata, + format = "%Y-%m-%d" + ) + if (max(timeline) < lastday | min(timeline) > firstday) { + stop("You requested a time interval that is not present in the NetCDF") + } + + # time selection and variable loading + # if no name provided load the only variable available + if (is.null(namevar)) { + namevar <- names(a$var) + } + field <- ncvar_get(a, namevar) + + # select data we need + select <- which(as.numeric(format(timeline, "%Y")) %in% tyears & + as.numeric(format(timeline, "%m")) %in% tmonths) + + field <- field[, , select] + time <- timeline[select] + + # check for dimensions (presence or not of time dimension) + dimensions <- length(dim(field)) + + # if dimensions are multiple, get longitude, latitude + # if needed, rotate and flip the array + if (dimensions > 1) { + # assign ics and ipsilon + if (is.null(namelon)) { + xlist <- c("lon", "Lon", "longitude", "Longitude") + if (any(xlist %in% naxis)) { + ics <- get(naxis[(naxis %in% xlist)], a$dim)$vals + } else { + stop("No lon found") + } + } else { + ics <- ncvar_get(a, namelon) + } + if (is.null(namelat)) { + ylist <- c("lat", "Lat", "latitude", "Latitude") + if (any(ylist %in% naxis)) { + ipsilon <- get(naxis[(naxis %in% ylist)], a$dim)$vals + } else { + stop("No lon found") + } + } else { + ipsilon <- ncvar_get(a, namelat) + } + + print("flipping and rotating") + # longitute rotation around Greenwich + if (rot) { + ics <- rotation(ics) + field <- rotation(field) + } + if (ipsilon[2] < ipsilon[1] & length(ipsilon) > 1) { + if (length(ics) > 1) { + ipsilon <- sort(ipsilon) + field <- flipper(field) + } + } + + # exporting variables to the main program + assign("ics", ics, envir = .GlobalEnv) + assign("ipsilon", ipsilon, envir = .GlobalEnv) + assign(naxis[naxis %in% xlist], ics) + assign(naxis[naxis %in% ylist], ipsilon) + } + + + if (dimensions > 3) { + stop("This file is more than 3D file") + } + + # close connection + nc_close(a) + + # remove interpolated file + if (interp2grid) { + unlink(namefile) + } + + # showing array properties + print(paste(dim(field))) + print(paste("From", time[1], "to", time[length(time)])) + + return(mget(c("field", naxis))) + } + + +########################################################## +#--------------Plotting functions------------------------# +########################################################## + + +# Figure functions +scale_figure <- function(plot_type, + diag_script_cfg, + nfields, + npancol, + npanrow) { + source(diag_script_cfg) + if (plot_type == 1 || plot_type == 11) { + npancol <- 1 + npanrow <- 1 + } + if (plot_type == 2) { + npancol <- 1 + npanrow <- 3 + } + if (plot_type == 3) { + npancol <- 3 + napnrow <- nfields + } + npanels <- npancol * npanrow + if (npancol > 1) { + png_width <- png_width_multi * npancol + pdf_width <- pdf_width_multi * npancol + x11_width <- x11_width_multi * npancol + } + png_width <- png_width * figure_rel_width[plot_type] + pdf_width <- pdf_width * figure_rel_width[plot_type] + x11_width <- x11_width * figure_rel_width[plot_type] + + figure_aspect_ratio[plot_type] <- (figure_aspect_ratio[plot_type] + * npancol / npanrow) + + plot_size <- + c(png_width, png_width / figure_aspect_ratio[plot_type]) + if (tolower(output_file_type) == "pdf") { + plot_size[1] <- pdf_width + plot_size[2] <- pdf_width / figure_aspect_ratio[plot_type] + } else if ((tolower(output_file_type) == "eps") | + (tolower(output_file_type) == "epsi") | + (tolower(output_file_type) == "ps")) { + plot_size[1] <- pdf_width + plot_size[2] <- pdf_width / figure_aspect_ratio[plot_type] + } else if (tolower(output_file_type) == "x11") { + plot_size[1] <- x11_width + plot_size[2] <- x11_width / figure_aspect_ratio[plot_type] + } + print(plot_size) + return(plot_size) +} + +graphics_startup <- function(figname, output_file_type, plot_size) { + source(diag_script_cfg) + # choose output format for figure - by JvH + if (tolower(output_file_type) == "png") { + png( + filename = figname, + width = plot_size[1], + height = plot_size[2] + ) + } else if (tolower(output_file_type) == "pdf") { + pdf( + file = figname, + width = plot_size[1], + height = plot_size[2], + onefile = T + ) + } else if ((tolower(output_file_type) == "eps") | + (tolower(output_file_type) == "epsi") | + (tolower(output_file_type) == "ps")) { + setEPS( + width = plot_size[1], + height = plot_size[2], + onefile = T, + paper = "special" + ) + postscript(figname) + } else if (tolower(output_file_type) == "x11") { + x11(width = plot_size[1], height = plot_size[2]) + } + return() +} + +graphics_close <- function(figname) { + print(figname) + dev.off() + return() +} + +# extensive filled.contour function +filled_contour3 <- # nolint + function(x = seq(0, 1, length.out = nrow(z)), + y = seq(0, 1, length.out = ncol(z)), + z, + xlim = range(x, finite = TRUE), + ylim = range(y, finite = TRUE), + zlim = range(z, finite = TRUE), + levels = pretty(zlim, nlevels), + nlevels = 20, + color.palette = cm.colors, + col = color.palette(length(levels) - 1), + extend = TRUE, + plot.title, + plot.axes, + key.title, + key.axes, + asp = NA, + xaxs = "i", + yaxs = "i", + las = 1, + axes = TRUE, + frame.plot = axes, + mar, + ...) { + # modification by Ian Taylor of the filled.contour function + # to remove the key and facilitate overplotting with contour() + # further modified by Carey McGilliard and Bridget Ferris + # to allow multiple plots on one page + # modification to allow plot outside boundaries + + if (missing(z)) { + if (!missing(x)) { + if (is.list(x)) { + z <- x$z + y <- x$y + x <- x$x + } + else { + z <- x + x <- seq.int(0, 1, length.out = nrow(z)) + } + } + else { + stop("no 'z' matrix specified") + } + } + else if (is.list(x)) { + y <- x$y + x <- x$x + } + if (any(diff(x) <= 0) || any(diff(y) <= 0)) { + stop("increasing 'x' and 'y' values expected") + } + + # trim extremes for nicer plots + if (extend) { + z[z < min(levels)] <- min(levels) + z[z > max(levels)] <- max(levels) + } + + plot.new() + plot.window(xlim, + ylim, + "", + xaxs = xaxs, + yaxs = yaxs, + asp = asp + ) + if (!is.matrix(z) || nrow(z) <= 1 || ncol(z) <= 1) { + stop("no proper 'z' matrix specified") + } + if (!is.double(z)) { + storage.mode(z) <- "double" + } + .filled.contour(as.double(x), as.double(y), z, as.double(levels), + col = col + ) + if (missing(plot.axes)) { + if (axes) { + title( + main = "", + xlab = "", + ylab = "" + ) + Axis(x, side = 1, ...) + Axis(y, side = 2, ...) + } + } + else { + plot.axes + } + if (frame.plot) { + box() + } + if (missing(plot.title)) { + title(...) + } else { + plot.title + } + invisible() + } + +image_scale3 <- function(z, + levels, + color.palette = heat.colors, + colorbar.label = "image.scale", + extend = T, + line.label = 2, + line.colorbar = 0, + cex.label = 1, + cex.colorbar = 1, + colorbar.width = 1, + new_fig_scale = c(-0.07, -0.03, 0.1, -0.1), + ...) { + # save properties from main plotting region + old.par <- par(no.readonly = TRUE) + mfg.save <- par()$mfg + old.fig <- par()$fig + + # defining plotting region with proper scaling + xscal <- (old.fig[2] - old.fig[1]) + yscal <- (old.fig[4] - old.fig[3]) + lw <- colorbar.width + lp <- line.colorbar / 100 + new.fig <- c( + old.fig[2] + new_fig_scale[1] * xscal * lw - lp, + old.fig[2] + new_fig_scale[2] * xscal - lp, + old.fig[3] + new_fig_scale[3] * yscal, + old.fig[4] + new_fig_scale[4] * yscal + ) + + if (missing(levels)) { + levels <- seq(min(z), max(z), , 12) + } + # fixing color palette + col <- color.palette(length(levels) - 1) + + # starting plot + par( + mar = c(1, 1, 1, 1), + fig = new.fig, + new = TRUE + ) + + # creating polygons for legend + poly <- vector(mode = "list", length(col)) + for (i in seq(poly)) { + poly[[i]] <- c(levels[i], levels[i + 1], levels[i + 1], levels[i]) + } + + xlim <- c(0, 1) + if (extend) { + longer <- 1.5 + dl <- diff(levels)[1] * longer + ylim <- c(min(levels) - dl, max(levels) + dl) + } else { + ylim <- range(levels) + } + plot( + 1, + 1, + t = "n", + ylim = ylim, + xlim = xlim, + axes = FALSE, + xlab = "", + ylab = "", + xaxs = "i", + yaxs = "i", + ... + ) + for (i in seq(poly)) { + polygon(c(0, 0, 1, 1), poly[[i]], col = col[i], border = NA) + } + if (extend) { + polygon(c(0, 1, 1 / 2), + c(levels[1], levels[1], levels[1] - dl), + col = col[1], + border = NA + ) + polygon(c(0, 1, 1 / 2), + c( + levels[length(levels)], levels[length(levels)], + levels[length(levels)] + dl + ), + col = col[length(col)], + border = NA + ) + polygon( + c(0, 0, 1 / 2, 1, 1, 1 / 2), + c( + levels[1], levels[length(levels)], levels[length(levels)] + dl, + levels[length(levels)], levels[1], levels[1] - dl + ), + border = "black", + lwd = 2 + ) + ylim0 <- range(levels) + prettyspecial <- pretty(ylim0) + prettyspecial <- prettyspecial[prettyspecial <= max(ylim0) & + prettyspecial >= min(ylim0)] + axis( + 4, + las = 1, + cex.axis = cex.colorbar, + at = prettyspecial, + labels = prettyspecial, + ... + ) + } else { + box() + axis(4, las = 1, cex.axis = cex.colorbar, ...) + } + + # box, axis and leged + mtext(colorbar.label, + line = line.label, + side = 4, + cex = cex.label, + ... + ) + + # resetting properties for starting a new plot (mfrow style) + par(old.par) + par(mfg = mfg.save, new = FALSE) + invisible() +} + +cdo <- + function(command, + args = "", + input = "", + options = "", + output = "", + stdout = "", + noout = F) { + if (args != "") { + args <- paste0(",", args) + } + if (stdout != "") { + stdout <- paste0(" > '", stdout, "'") + noout <- T + } + if (input[1] != "") { + for (i in seq_along(input)) { + input[i] <- paste0("'", input[i], "'") + } + input <- paste(input, collapse = " ") + } + output0 <- output + if (output != "") { + output <- paste0("'", output, "'") + } else if (!noout) { + output <- tempfile() + output0 <- output + } + argstr <- + paste0( + options, " ", command, args, " ", input, " ", output, + " ", stdout + ) + print(paste("cdo", argstr)) + ret <- system2("cdo", args = argstr) + if (ret != 0) { + stop(paste("Failed (", ret, "): cdo", argstr)) + } + return(output0) + } diff --git a/esmvaltool/diag_scripts/hyint/hyint_metadata.R b/esmvaltool/diag_scripts/hyint/hyint_metadata.R new file mode 100644 index 0000000000..93ee29e8e7 --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_metadata.R @@ -0,0 +1,161 @@ +# HyInt metadata + +########################################################## +#----------------Metadata functions------------------------# +########################################################## + +getmetadata_indices <- function(var, sfile) { + ncfile <- nc_open(sfile) + long_name <- (ncatt_get(ncfile, var, "long_name"))$value + units <- (ncatt_get(ncfile, var, "units"))$value + missval <- (ncatt_get(ncfile, var, "missing_value"))$value + if (units == 0) { + units <- "" + } + nc_close(ncfile) + metadata <- + list( + long_name = long_name, + units = units, + missval = missval + ) + + return(metadata) +} + +setmetadata_indices <- function(var) { # nolint + longvar <- "" + unit <- "" + + # name of the var + if (var == "pry") { + longvar <- "Annual mean precipitation" + unit <- "days" + } + if (var == "pry_mean") { + longvar <- + "Normalization function: Annual mean precipitation averaged over + available XX century data" + unit <- "days" + } + if (var == "pry_mean_sd") { + longvar <- + "Normalization function: Standard deviation of the annual mean + precipitation averaged over available XX century data" + unit <- "days" + } + if (var == "pry_norm") { + longvar <- "Normalized annual mean precipitation" + unit <- "" + } + + if (var == "dsl") { + longvar <- "Annual mean dry spell length" + unit <- "days" + } + if (var == "dsl_mean") { + longvar <- + "Normalization function: Annual mean dry spell length averaged + over available XX century data" + unit <- "days" + } + if (var == "dsl_mean_sd") { + longvar <- + "Normalization function: Standard deviation of the annual mean + dry spell length averaged over available XX century data" + unit <- "days" + } + if (var == "dsl_norm") { + longvar <- "Normalized annual mean dry spell length" + unit <- "" + } + + + if (var == "dsl_tseries") { + longvar <- "dsl timeseries over selected regions" + unit <- "" + } + if (var == "dsl_tseries_sd") { + longvar <- "standard deviation about the mean dsl timeseries" + unit <- "" + } + if (var == "dsl_trend") { + longvar <- "trend coefficients over selected time period" + unit <- "" + } + if (var == "dsl_trend_stat") { + longvar <- "statistics of trend over selected time period" + unit <- "" + } + + if (var == "wsl") { + longvar <- "Annual mean wet spell length" + unit <- "days" + } + if (var == "wsl_mean") { + longvar <- + "Normalization function: Annual mean wet spell length averaged + over available XX century data" + unit <- "days" + } + if (var == "wsl_mean_sd") { + longvar <- + "Normalization function: Standard deviation of the annual mean + wet spell length averaged over available XX century data" + unit <- "days" + } + if (var == "wsl_norm") { + longvar <- "Normalized annual mean wet spell length" + unit <- "" + } + + if (var == "int") { + longvar <- "Annual mean precipitation intensity" + unit <- "mm day-1" + } + if (var == "int_mean") { + longvar <- + "Normalization function: Annual mean precipitation intensity + averaged over available XX century data" + unit <- "mm day-1" + } + if (var == "int_mean_sd") { + longvar <- + "Normalization function: Standard deviation of the annual mean + precipitation intensity averaged over available XX century data" + unit <- "mm day-1" + } + if (var == "int_norm") { + longvar <- "Normalized annual mean precipitation intensity" + unit <- "" + } + + if (var == "pa") { + longvar <- "Precipitation area: area over which of any given day + precipitation occurs." + unit <- "mm day-1 km2" + } + if (var == "pa_mean") { + longvar <- "Normalization function: Precipitation ara averaged over + available XX century data" + unit <- "mm day-1" + } + if (var == "pa_mean_sd") { + longvar <- "Normalization function: Standard deviation of the + precipitation area averaged over available XX century data" + unit <- "mm day-1" + } + if (var == "pa_norm") { + longvar <- "Normalized precipitation area" + unit <- "" + } + + if (var == "hyint") { + longvar <- "Hydroclimatic intensity index" + unit <- "" + } + + metadata <- list(longvar = longvar, unit = unit) + + return(metadata) +} diff --git a/esmvaltool/diag_scripts/hyint/hyint_parameters.R b/esmvaltool/diag_scripts/hyint/hyint_parameters.R new file mode 100644 index 0000000000..48f4006234 --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_parameters.R @@ -0,0 +1,418 @@ +##################################################################### +# +# HyInt parameters file +# +# About: fixed parameters for ESMValTool HyInt tool. +# Please refer to hyint_recipe for user-friendly settings. +# Only advanced users should change the settings here below. +# +##################################################################### + +run_regridding <- T +force_regridding <- F +run_diagnostic <- T +force_diagnostic <- F +run_timeseries <- T + +if (!exists("write_ncdf")) { + write_netcdf <- T +} +if (!exists("write_plots")) { + write_plots <- T +} +if (!exists("etccdi_preproc")) { + etccdi_preproc <- F +} + +# basic settings +seasons <- c("ALL") # seasons to be analysed: "ALL", "DJF", ... + +timedimname <- "time" + +rotlongitude <- + "full" # a) "full" to convert input arrays from 0/360 to +# -180/180 longitude grid +# b) "no" to leave input data on its original grid + +grid_file <- "grid_file" # suffix for grid file +topography_file <- + "topo" # suffix for topography file (needed for filtering +# ocean/land or land elevation) + +# Diagnostic options +# norm_years set in recipe +external_norm <- F # a) F=use internal data to normalize +# b) list of names of normalization files +# (one per input data file or one for all) +# c) "HIST" to automatically generate the name of the +# historical run associated with the model name + +external_r95 <- + external_norm # a) F=use internal data for r95 threshold +# over the norm_years period +# b) list of names of files (one per input +# data file or one for all) +# c) "HIST" to automatically generate the name of +# the historical experiment associated with the +# model name + +masksealand <- + F # T to mask depending on seaLandElevation threshold +sealandelevation <- + 0 # a) 0 land; b) positive value: land above given +# elevation; c) negative value: sea below given depth. +# The topography/bathymetry file is generated with cdo +# from ETOPO data. +reverse_masksealand <- F # T to reject what selected, +# F to keep what selected +topography_highres <- "" + +# Plotting options +# Plot_type set in namelist +map_continents <- -2 # thickness of continents: +# positive values in white, negative values in gray +map_continents_regions <- F # plot also regional boundaries +ryearplot <- 2006 # years to be plotted for experiments +# (maps over individual years): +# a) actual years, b) "FIRST" = first year in dataset +# c) "ALL" = all years in dataset. E.g. c(1998,2000) +rmultiyear_mean <- T # plot multiyear mean (this override ryearplot) + + + +ryearplot_ref <- + c("EXP") # year to be plotted for reference dataset: options +# a) "EXP" == same as experiments, +# b) one year only, e.g. c(1998) +force_ref <- F # set TRUE to force plotting of reference data +# as any other experiment + +# settings for trend calculation +lm_trend <- T # calculate linear trend + +# user defined extra label for figure file name +label <- "" + +# colorbar +add_colorbar <- T # T to add colorbar + +# timeseries options +highreselevation <- + F # a) F: neglect; b) value: threshold of minimum elevation +# to be overplotted with contour lines of elevation +highreselevation_only <- + F # T to plot only high resolution elevation contours + +# timeseries and trend plotting options +add_legend <- + 4 # a) F=no legend; b) n>0 list disposed in n column; +# c) <0 horizontal legend +xy_legend <- + c(0.01, 0.98) # position of legend in fraction of plotting panel +tag_legend <- + c(T, F, F) # 1=model name, 2=model experiment, 3=model ensemble +# (select one or more) + +# define fields for timeseries calculation and plotting +hyint_list <- c( + "int_norm", + "dsl_norm", + "wsl_norm", + "hyint", + "int", + "dsl", + "wsl", + "pa_norm", + "r95_norm" +) +etccdi_yr_list <- c( + "altcddETCCDI", + "altcsdiETCCDI", + "altcwdETCCDI", + "altwsdiETCCDI", + "cddETCCDI", + "csdiETCCDI", + "cwdETCCDI", + "dtrETCCDI", + "fdETCCDI", + "gslETCCDI", + "idETCCDI", + "prcptotETCCDI", + "r10mmETCCDI", + "r1mmETCCDI", + "r20mmETCCDI", + "r95pETCCDI", + "r99pETCCDI", + "rx1dayETCCDI", + "rx5dayETCCDI", + "sdiiETCCDI", + "suETCCDI", + "tn10pETCCDI", + "tn90pETCCDI", + "tnnETCCDI", + "tnxETCCDI", + "trETCCDI", + "tx10pETCCDI", + "tx90pETCCDI", + "txnETCCDI", + "txxETCCDI", + "wsdiETCCDI" +) +# Select one or more fields to be plotted (with the required order) +# through the selfields key above +etccdi_list_import <- etccdi_yr_list +field_names <- c(hyint_list, etccdi_yr_list) + +# region box matrix (predefined following Giorgi et al. 2011,2014): +# add here further regions and select those needed through iregion +region_names <- c( + "World", + "World60", + "Tropics", + "South-America", + "Africa", + "North-America", + "India", + "Europe", + "East-Asia", + "Australia" +) +region_codes <- c( + "GL", "GL60", "TR", "SA", "AF", + "NA", "IN", "EU", "EA", "AU" +) +# Select one or more index values through selregions in the standard +# settings above to define regions to be used. Default c(1) == global. + +regions <- matrix(nrow = length(region_names), ncol = 4) +# c(lon1,lon2,lat1,lat2) NOTE: lon(-180/180) +regions[1, ] <- c(-180, 180, -90, 90) +regions[2, ] <- c(-180, 180, -60, 60) +regions[3, ] <- c(-180, 180, -30, 30) +regions[4, ] <- c(-90, -30, -60, 10) +regions[5, ] <- c(-20, 60, -40, 35) +regions[6, ] <- c(-140, -60, 10, 60) +regions[7, ] <- c(60, 100, 0, 35) +regions[8, ] <- c(-10, 30, 35, 70) +regions[9, ] <- c(100, 150, 20, 50) +regions[10, ] <- c(110, 160, -40, -10) + +# define titles and units +title_unit_m <- matrix(nrow = length(field_names), ncol = 4) +title_unit_m[1, ] <- c( + "SDII", + "Norm. annual mean INT", + "Norm. annual mean precipitation intensity", + "" +) +title_unit_m[2, ] <- c( + "DSL", + "Norm. annual mean DSL", + "Norm. annual mean dry spell length", + "" +) +title_unit_m[3, ] <- c( + "WSL", + "Norm. annual mean WSL", + "Norm. annual mean wet spell length", + "" +) +title_unit_m[4, ] <- + c("HY-INT", "HY-INT", "Hydroclimatic intensity", "") +title_unit_m[5, ] <- c( + "ABS_INT", + "Annual mean INT", + "Annual mean precipitation intensity", + "(mm/day)" +) +title_unit_m[6, ] <- c( + "ABS_DSL", + "Annual mean DSL", + "Annual mean dry spell length", + "(days)" +) +title_unit_m[7, ] <- c( + "ABS_WSL", + "Annual mean WSL", + "Annual mean wet spell length", + "(days)" +) +title_unit_m[8, ] <- c( + "PA", + " Normalized precipitation area", + "Norm. precipitation area", + "" +) +title_unit_m[9, ] <- c( + "R95", + "Norm. heavy precipitation index", + "Norm. % of total precip. above 95% percentile of reference distribution", + "" +) + + +# define levels for contour/yrange for abs. values: +# (minlev,maxlev,minlev_diff,maxlev_diff) and nlev +nlev <- 24 +levels_m <- matrix(nrow = length(field_names), ncol = 4) + +levels_m[1, ] <- c(0.9, 1.1, -1.2, 1.2) +levels_m[1, ] <- c(0.5, 1.3, -1.2, 1.2) +levels_m[2, ] <- c(0.9, 1.1, -1.2, 1.2) +levels_m[2, ] <- c(0.6, 1.4, -1.2, 1.2) +levels_m[3, ] <- c(0.9, 1.1, -1.2, 1.2) +levels_m[3, ] <- c(0.7, 1.3, -1.2, 1.2) +levels_m[4, ] <- c(0.5, 1.5, -1.2, 1.2) +levels_m[5, ] <- c(0, 10, -5, 5) +levels_m[6, ] <- c(0, 20, -5, 5) +levels_m[7, ] <- c(0, 10, -3, 3) +levels_m[8, ] <- c(0.5, 1.5, -1.2, 1.2) +levels_m[9, ] <- c(0.5, 1.5, -2, 2) +levels_m[10, ] <- c(0, 200, -5, 5) +levels_m[11, ] <- c(-5, 15, -5, 5) +levels_m[12, ] <- c(0, 20, -5, 5) +levels_m[13, ] <- c(0, 20, -5, 5) +levels_m[14, ] <- c(0, 200, -5, 5) +levels_m[15, ] <- c(-10, 30, -5, 5) +levels_m[16, ] <- c(0, 80, -5, 5) +levels_m[17, ] <- c(0, 15, -4, 4) +levels_m[18, ] <- c(0, 200, -10, 10) +levels_m[19, ] <- c(0, 400, -10, 10) +levels_m[20, ] <- c(-10, 200, -10, 10) +levels_m[21, ] <- c(0, 3000, -100, 100) +levels_m[22, ] <- c(0, 80, -10, 10) +levels_m[23, ] <- c(0, 300, -10, 10) +levels_m[24, ] <- c(0, 50, -2, 2) +levels_m[25, ] <- c(0, 800, -20, 20) +levels_m[26, ] <- c(0, 300, -10, 10) +levels_m[27, ] <- c(0, 100, -10, 10) +levels_m[28, ] <- c(0, 200, -10, 10) +levels_m[29, ] <- c(0, 15, -5, 5) +levels_m[30, ] <- c(0, 300, -20, 20) +levels_m[31, ] <- c(-5, 25, -5, 5) +levels_m[32, ] <- c(0, 300, -5, 5) +levels_m[33, ] <- c(-40, 40, -5, 5) +levels_m[34, ] <- c(0, 40, -5, 5) +levels_m[35, ] <- c(-20, 300, -5, 5) +levels_m[36, ] <- c(-5, 25, -2, 2) +levels_m[37, ] <- c(-20, 140, -4, 4) +levels_m[38, ] <- c(-30, 30, -5, 5) +levels_m[39, ] <- c(0, 50, -2, 2) +levels_m[40, ] <- c(-20, 320, -2, 2) + +# define levels for contour/yrange for trends (minlev,maxlev) +ntlev <- 24 +tlevels_m <- matrix(nrow = length(field_names), ncol = 2) +tlevels_m[1, ] <- c(-0.05, 0.2) * 0.01 +tlevels_m[2, ] <- c(-0.1, 0.4) * 0.01 +tlevels_m[3, ] <- c(-0.1, 0.1) * 0.01 +tlevels_m[4, ] <- c(0, 0.4) * 0.01 +tlevels_m[5, ] <- c(0, 1.5) * 0.01 +tlevels_m[6, ] <- c(-1, 6) * 0.01 +tlevels_m[7, ] <- c(-0.8, 0.8) * 0.01 +tlevels_m[8, ] <- c(-0.3, 0.5) * 0.01 +tlevels_m[9, ] <- c(0, 0.6) * 0.01 +tlevels_m[10, ] <- c(0, 200) * 0.01 +tlevels_m[11, ] <- c(0, 12) * 0.01 +tlevels_m[12, ] <- c(0, 20) * 0.01 +tlevels_m[13, ] <- c(0, 20) * 0.01 +tlevels_m[14, ] <- c(0, 15) * 0.01 +tlevels_m[15, ] <- c(-70, 0) * 0.01 +tlevels_m[16, ] <- c(-4, 4) * 0.01 +tlevels_m[17, ] <- c(-1, 0) * 0.01 +tlevels_m[18, ] <- c(-70, 10) * 0.01 +tlevels_m[19, ] <- c(-10, 90) * 0.01 +tlevels_m[20, ] <- c(-60, 0) * 0.01 +tlevels_m[21, ] <- c(-20, 120) * 0.01 +tlevels_m[22, ] <- c(0, 10) * 0.01 +tlevels_m[23, ] <- c(-15, 5) * 0.01 +tlevels_m[24, ] <- c(0, 6) * 0.01 +tlevels_m[25, ] <- c(0, 100) * 0.01 +tlevels_m[26, ] <- c(0, 60) * 0.01 +tlevels_m[27, ] <- c(0, 15) * 0.01 +tlevels_m[28, ] <- c(0, 50) * 0.01 +tlevels_m[29, ] <- c(0, 15) * 0.01 +tlevels_m[30, ] <- c(0, 140) * 0.01 +tlevels_m[31, ] <- c(-30, 0) * 0.01 +tlevels_m[32, ] <- c(0, 100) * 0.01 +tlevels_m[33, ] <- c(0, 8) * 0.01 +tlevels_m[34, ] <- c(0, 8) * 0.01 +tlevels_m[35, ] <- c(0, 150) * 0.01 +tlevels_m[36, ] <- c(-30, 0) * 0.01 +tlevels_m[37, ] <- c(0, 160) * 0.01 +tlevels_m[38, ] <- c(2, 8) * 0.01 +tlevels_m[39, ] <- c(0, 8) * 0.01 +tlevels_m[40, ] <- c(-100, 300) * 0.01 + +# Figure details + +# Aspect ratio (width:height) +# (depending on plot_type) +figure_aspect_ratio <- c( + 1.6, 1.6, 1, 1.3, 1, 1, 1, 1, 1, 1, + 1.6, 1.8, 1.8, 1.8, 1.8, 1, 1, 1, 1, 1 +) + +figure_rel_width <- c( + 0.7, 0.7, 0.7, 1, 1, 1, 1, 1, 1, 1, + 0.7, 1, 1, 1, 1, 1, 1, 1, 1, 1 +) + +# Figure width +png_width <- 960 +pdf_width <- 10 +x11_width <- 7 + +#  Panel width when adopting multi-panel +# (this overrides figure width) +png_width_multi <- 480 +pdf_width_multi <- 5 +x11_width_multi <- 4 + +# color palette to be used +palette1 <- colorRampPalette(c("white", "orange", "darkred")) +palette2 <- colorRampPalette(c("blue", "white", "red")) +palette3 <- colorRampPalette(c( + "darkblue", + "blue", + "dodgerblue", + "white", + "orange", + "red", + "darkred" +)) +palette_giorgi2011 <- colorRampPalette( + c( + "white", + "khaki1", + "darkseagreen2", + "mediumseagreen", + "lightskyblue1", + "lightskyblue", + "deepskyblue2", + "dodgerblue2", + "dodgerblue3", + "royalblue4" + ) +) +palette_ts <- + c( + "#377EB8", + "#4DAF4A", + "#984EA3", + "#FF7F00", + "#A65628", + "#F781BF", + "#E41A1C", + "#8DD3C7", + "#BEBADA", + "#FB8072", + "#80B1D3", + "#FDB462", + "#B3DE69", + "#FCCDE5", + "#D9D9D9", + "#BC80BD", + "#CCEBC5", + "#FFED6F" + ) diff --git a/esmvaltool/diag_scripts/hyint/hyint_plot_maps.R b/esmvaltool/diag_scripts/hyint/hyint_plot_maps.R new file mode 100644 index 0000000000..08da0c2dae --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_plot_maps.R @@ -0,0 +1,781 @@ +###################################################### +#---------Maps plotting routine for HyInt------------# +#-------------E. Arnone (September 2017)-------------# +###################################################### + +# DECLARING THE FUNCTION: EXECUTION IS AT THE BOTTOM OF THE SCRIPT + +hyint_plot_maps <- # nolint + function(work_dir, + plot_dir, + ref_dir, + ref_idx, + season, + prov_info) { + # setting up path and parameters + dataset_ref <- models_name[ref_idx] + year1_ref <- models_start_year[ref_idx] + year2_ref <- models_end_year[ref_idx] + years_ref <- year1_ref:year2_ref + + # set main paths + work_dir_exp <- work_dir + plot_dir_exp <- plot_dir + dir.create(plot_dir_exp, recursive = T) + + # Define fields to be used + if (selfields[1] != F) { + field_names <- field_names[selfields, drop = F] + levels_m <- levels_m[selfields, , drop = F] + title_unit_m <- title_unit_m[selfields, , drop = F] + } + nfields <- length(field_names) + + # Define quantity (exp, ref, exp-ref) to be plotted depending on plot_type + # 1=exp_only, 2=ref_only, 3=exp/ref/exp-ref + nquantity <- c(1, 3, 3, 1) + + # Define regions to be used + nregions <- length(selregions) + if (nregions > dim(regions)[1]) { + stop(paste(diag_base, ": requesting regions outside list")) + } + + if (autolevels) { + levels_m[] <- NA + } + + # ------- loading reference data ---------- + # load topography if needed + if (masksealand) { + topofile <- + getfilename_indices(work_dir, diag_base, ref_idx, topo = T) + gridfile <- + getfilename_indices(work_dir, diag_base, ref_idx, grid = T) + if (!file.exists(topofile)) { + create_landseamask( + regrid = gridfile, + loc = run_dir, + regridded_topo = topofile, + topo_only = T + ) + } + relevation <- ncdf_opener(topofile, "topo", "lon", "lat", + rotate = "no" + ) + } + if (highreselevation) { + highresel <- get_elevation(elev_range = c(highreselevation, 9000)) + } + + # produce desert areas map if required from reference file + # (mean annual precipitation <0.5 mm, Giorgi et al. 2014) + if (removedesert) { + # reference model + ref_filename <- + getfilename_indices(ref_dir, diag_base, ref_idx, season) + pry <- + ncdf_opener(ref_filename, "pry", "lon", "lat", rotate = "no") + retdes <- which(pry < 0.5) + pry[retdes] <- NA + # create mask with NAs for deserts and 1's for non-desert + ref_retdes2D <- apply(pry * 0, c(1, 2), sum) + 1 + ref_retdes3D <- + replicate(dim(pry)[length(dim(pry))], ref_retdes2D) + } + + # open reference field + ref_filename <- + getfilename_indices(ref_dir, diag_base, ref_idx, season) + print(paste("Reading reference ", ref_filename)) + for (field in field_names) { + field_ref <- + ncdf_opener(ref_filename, field, "lon", "lat", rotate = "no") + ics_ref <- ics + ipsilon_ref <- ipsilon + + if (removedesert) { + field_ref <- field_ref * ref_retdes3D + } + if (masksealand) { + field_ref <- apply_elevation_mask( + field_ref, relevation, + sealandelevation + ) + } + # if requested calculate multiyear average and store at time=1 + # in this case skip multi-year plot_type 4 + if (rmultiyear_mean) { + if (plot_type == 4) { + print("skipping multi-year plot_type 4 with multiyear mean") + return(0) + } + # exclude normalization years from multiyear mean + retyears <- seq_along(years_ref) + skipyears <- which(as.logical(match( + years_ref, + norm_years[1]:norm_years[2] + ))) + retyears[skipyears] <- NA + retyears <- retyears[which(is.finite(retyears))] + field_ref[, , 1] <- apply(field_ref[, , retyears], + c(1, 2), mean, + na.rm = T + ) + } + assign(paste(field, "_ref", sep = ""), field_ref) + } + + # Loop over models + for (model_idx in c(1:(length(models_name)))) { + # Do not compare reference with itself + if ((model_idx == ref_idx) && + ((plot_type == 2) || (plot_type == 3))) { + if (length(models_name) == 1) { + print("skipping comparison plots because + only one dataset was requested") + } + next + } + + # setting up path and parameters + exp <- models_name[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + + # Years to be considered based on namelist and cfg_file + years <- year1:year2 + if (ryearplot[1] == "ALL") { + years <- year1:year2 + } else if (ryearplot[1] == "FIRST") { + years <- year1 + } else { + years <- years[match(ryearplot, years)] + years <- years[!is.na(years)] + } + nyears <- length(years) + + # Remove deserts if required + if (removedesert) { + filename <- getfilename_indices( + work_dir_exp, diag_base, model_idx, + season + ) + if ((rgrid == F) & ((plot_type == 2) | (plot_type == 3))) { + # regrid when comparing + pry <- + ncdf_opener( + filename, + "pry", + "lon", + "lat", + rotate = "no", + interp2grid = T, + grid = ref_filename + ) + } else { + pry <- ncdf_opener(filename, "pry", "lon", "lat", rotate = "no") + } + retdes <- which(pry < 0.5) + pry[retdes] <- NA + # create mask with NAs for deserts and 1's for non-desert + exp_retdes2D <- apply(pry * 0, c(1, 2), sum) + 1 + exp_retdes3D <- + replicate(dim(pry)[length(dim(pry))], exp_retdes2D) + } + + #-----------------Loading data-----------------------# + # open experiment field + for (field in field_names) { + infile <- getfilename_indices( + work_dir_exp, diag_base, model_idx, + season + ) + print(paste("Reading ", field, " from experiment ", infile)) + if ((rgrid == F) & ((plot_type == 2) | (plot_type == 3))) { + # regrid when comparing + field_exp <- + ncdf_opener( + infile, + field, + "lon", + "lat", + rotate = "no", + interp2grid = T, + grid = ref_filename + ) + } else { + field_exp <- ncdf_opener(infile, field, "lon", "lat", rotate = "no") + } + if (removedesert) { + field_exp <- field_exp * exp_retdes3D + } + if (masksealand) { + field_exp <- apply_elevation_mask( + field_exp, relevation, + sealandelevation + ) + } + # if requested calculate multiyear average and store it at time=1 + if (rmultiyear_mean) { + years <- year1:year2 + retyears <- seq_along(years) + skipyears <- which(as.logical(match( + years, + norm_years[1]:norm_years[2] + ))) + retyears[skipyears] <- NA + retyears <- retyears[which(is.finite(retyears))] + field_exp[, , 1] <- apply(field_exp[, , retyears], + c(1, 2), mean, + na.rm = T + ) + } + if (highreselevation_only) { + field_exp[] <- NA + } + assign(paste(field, "_exp", sep = ""), field_exp) + } + + #---------------Multiyear mean-----# + if (rmultiyear_mean) { + nyears <- 1 + } + + #-----------------Producing figures------------------------# + + print(paste0(diag_base, ": starting figures")) + + # Set figure dimensions + plot_size <- + scale_figure( + plot_type, + diag_script_cfg, + length(selfields), + npancol, + npanrow + ) + if (boxregion != 0) { + # boxregion will plot region boxes over a global map of selected field + nregions <- 1 + } + + # LOOP over selected regions + for (iselregion in 1:nregions) { + iregion <- selregions[iselregion] + print(paste("region: ", region_names[iregion])) + + # Startup graphics for multiple years in one figure + if (plot_type == 4) { + field_label <- "multiindex" + figname <- getfilename_figure( + plot_dir_exp, + field_label, + year1, + year2, + model_idx, + season, + "multiyear", + region_codes[iregion], + label, + "map", + output_file_type + ) + graphics_startup(figname, output_file_type, plot_size) + par( + mfrow = c(nyears, nfields), + cex.main = 1.3, + cex.axis = 1.2, + cex.lab = 1.2, + mar = c(2, 2, 2, 2), + oma = c(1, 1, 1, 1) + ) + } + # LOOP over years defined in parameter file + for (iyear in c(1:nyears)) { + if (ryearplot_ref[1] == "EXP") { + iyear_ref <- iyear + } else { + iyear_ref <- match(ryearplot_ref, years_ref) + } + time_label <- years[iyear] + time_label_ref <- years[iyear_ref] + time_label_fig <- time_label + if (rmultiyear_mean) { + time_label <- paste(year1, year2, sep = "-") + time_label_ref <- paste(year1_ref, year2_ref, sep = "-") + time_label_fig <- "myearmean" + } + print(paste0( + diag_base, + ": plotting data for ", + region_names[iregion], + "-", + time_label + )) + + # standard properties + info_exp <- paste(exp, time_label) # ,season) + info_ref <- paste(dataset_ref, time_label_ref) # ,season) + + #  Startup graphics for multiple fields/quantities in one figure + if (plot_type == 3) { + field_label <- "multiindex" + figname <- getfilename_figure( + plot_dir_exp, + field_label, + year1, + year2, + model_idx, + season, + time_label_fig, + region_codes[iregion], + label, + "map", + output_file_type + ) + graphics_startup(figname, output_file_type, plot_size) + par( + mfrow = c(nfields, 3), + cex.main = 1.3, + cex.axis = 1.2, + cex.lab = 1.2, + mar = c(2, 2, 2, 6), + oma = c(1, 1, 1, 1) + ) + } + # LOOP over fields + for (field in field_names) { + ifield <- which(field == field_names) + if (anyNA(title_unit_m[ifield, 1:3])) { + title_unit_m[ifield, 1:3] <- field + title_unit_m[ifield, 4] <- "" + } + + # get fields + field_ref <- get(paste(field, "_ref", sep = "")) + field_exp <- get(paste(field, "_exp", sep = "")) + + # MAPS: select required year (if requested, multiyear average + # is stored at iyear=1) + field_ref <- field_ref[, , iyear] + field_exp <- field_exp[, , iyear_ref] + tmp_field <- field_exp + + # define quantity-dependent properties (exp, ref, exp-ref) + tmp.colorbar <- c(F, T, T) + if (plot_type == 1) { + tmp.colorbar <- T + } + tmp.palette <- palette_giorgi2011 + if (is.na(levels_m[ifield, 1]) | + is.na(levels_m[ifield, 2])) { + print("No value for range: assigning min and max") + tmp.levels <- seq(min(field_ref, na.rm = T), + max(field_ref, na.rm = T), + len = nlev + ) + } else { + tmp.levels <- seq(levels_m[ifield, 1], levels_m[ifield, 2], + len = nlev + ) + } + if (highreselevation_only) { + title_unit_m[ifield, 1] <- "Elevation" + } + tmp.titles <- paste0( + title_unit_m[ifield, 1], + ": ", + region_names[iregion], + "-", + c(info_exp, info_ref, "Difference") + ) + if (plot_type == 4) { + tmp.titles <- paste(title_unit_m[ifield, 1], time_label) + } + + # Startup graphics for individual fields and multi + # quantities in each figure + if (plot_type == 2) { + figname <- getfilename_figure( + plot_dir_exp, + field, + year1, + year2, + model_idx, + season, + time_label_fig, + region_codes[iregion], + label, + "comp_map", + output_file_type + ) + graphics_startup(figname, output_file_type, plot_size) + par( + mfrow = c(3, 1), + cex.main = 2, + cex.axis = 1.5, + cex.lab = 1.5, + mar = c(5, 5, 4, 8), + oma = c(1, 1, 1, 1) + ) + } + + # --- MAPS ---- + # LOOP over quantity (exp,ref,exp-ref difference) to be plotted + for (iquantity in c(1:nquantity[plot_type])) { + if (iquantity == 2) { + tmp_field <- field_ref + ipsilon <- ipsilon_ref + ics <- ics_ref + } + if (iquantity == 3) { + tmp.palette <- palette2 + tmp_field <- field_exp - field_ref + if (is.na(levels_m[ifield, 3]) | + is.na(levels_m[ifield, 4])) { + tmp_field_max <- max(abs(tmp_field), na.rm = T) + tmp.levels <- seq(-tmp_field_max, tmp_field_max, + len = nlev + ) + } else { + tmp.levels <- seq(levels_m[ifield, 3], levels_m[ifield, 4], + len = nlev + ) + } + } + # Startup graphics for individual field in each figure + if (plot_type == 1) { + figname <- getfilename_figure( + plot_dir_exp, + field, + year1, + year2, + model_idx, + season, + time_label_fig, + region_codes[iregion], + label, + "map", + output_file_type + ) + graphics_startup(figname, output_file_type, plot_size) + lonlat_aratio <- (max(ics) - min(ics)) / + (max(ipsilon) - min(ipsilon)) + par( + mfrow = c(1, 1), + cex.main = 2, + cex.axis = 1.5, + cex.lab = 1.5, + mar = c(5, 5, 4, 8), + oma = c(1, 1, 1, 1) + ) + # mar = c(3, 3, 4, 8), oma = c(1, 1, 1, 1)) + } + + # set active panel + if (plot_type == 3) { + par(mfg = c(ifield, iquantity, nfields, 3)) + } + if (plot_type == 4) { + par(mfg = c(iyear, ifield, nyears, nfields)) + } + + # scale autolevels if required + if (autolevels && (autolevels_scale != 1)) { + autorange <- max(tmp.levels) - min(tmp.levels) + meanrange <- mean(tmp.levels) + tmp.levels <- + seq( + meanrange - autorange * autolevels_scale, + meanrange + autorange * autolevels_scale, + len = nlev + ) + } + + cex_main <- 1.4 + if (plot_type == 1) { + cex_main <- 1.3 + } + + # drop data outside region limits + retlon <- which(ics < regions[iregion, 1] + | ics > regions[iregion, 2]) + retlat <- which(ipsilon < regions[iregion, 3] + | ipsilon > regions[iregion, 4]) + mask_field <- tmp_field + mask_field[retlon, ] <- NA + mask_field[, retlat] <- NA + tmp_field <- mask_field + + # contours + filled_contour3( + ics, + ipsilon, + tmp_field, + xlab = "Longitude", + ylab = "Latitude", + main = tmp.titles[iquantity], + levels = tmp.levels, + color.palette = tmp.palette, + xlim = c(regions[iregion, 1], regions[iregion, 2]), + ylim = c(regions[iregion, 3], regions[iregion, 4]), + axes = F, + asp = 1, + cex.main = cex_main + ) + # continents + continents_col <- "white" + if (map_continents <= 0) { + continents_col <- "gray30" + } + map( + "world", + regions = ".", + interior = map_continents_regions, + exact = F, + boundary = T, + add = T, + col = continents_col, + lwd = abs(map_continents) + ) + # rect(regions[iregion, 1], regions[iregion, 3], + # regions[iregion, 2], regions[iregion, 4], + # border = "grey90", lwd = 3) + # grid points + if (oplot_grid) { + # build up grid if needed + ics2 <- replicate(length(ipsilon), ics) + ipsilon2 <- t(replicate(length(ics), ipsilon)) + points( + ics2, + ipsilon2, + pch = 1, + col = "grey40", + cex = oplot_grid + ) + } + # add highres elevation contours + if (highreselevation) { + palette(terrain.colors(10)) + contour( + highresel$lon_el, + highresel$lat_el, + highresel$elevation, + levels = seq(500, 5000, length.out = 10), + col = 1:10, + add = T + ) + } + # boxes + box(col = "grey60") + if (boxregion != 0) { + box_col <- "white" + if (boxregion <= 0) { + box_col <- "grey30" + } + for (ireg in 2:length(selregions)) { + iselreg <- selregions[ireg] + rect( + regions[iselreg, 1], + regions[iselreg, 3], + regions[iselreg, 2], + regions[iselreg, 4], + border = box_col, + lwd = abs(boxregion) + ) + text( + regions[iselreg, 1], + regions[iselreg, 3], + paste0(" ", region_codes[iselreg]), + col = box_col, + pos = 3, + offset = 0.5 + ) + } + } + # axis + if (plot_type <= 2) { + if ((regions[iregion, 2] - regions[iregion, 1] > 90) + | + (regions[iregion, 4] - regions[iregion, 3] > 90)) { + axis(1, + col = "grey40", + at = seq(-180, 180, 45) + ) + axis(2, col = "grey40", at = seq(-90, 90, 30)) + } else { + axis(1, col = "grey40") + axis(2, col = "grey40") + } + } else if (plot_type == 3) { + if (iquantity == 1) { + if ((regions[iregion, 2] - regions[iregion, 1] > 90) + | + (regions[iregion, 4] - regions[iregion, 3] > 90)) { + axis(2, + col = "grey40", + at = seq(-90, 90, 30) + ) + } else { + axis(2, col = "grey40") + } + } + if (ifield == length(field_names)) { + if ((regions[iregion, 2] - regions[iregion, 1] > 90) + | + (regions[iregion, 4] - regions[iregion, 3] > 90)) { + axis(1, + col = "grey40", + at = seq(-180, 180, 45) + ) + } else { + axis(1, col = "grey40") + } + } + } else if (plot_type == 4) { + if (iyear == nyears) { + if ((regions[iregion, 2] - regions[iregion, 1] > 90) + | + (regions[iregion, 4] - regions[iregion, 3] > 90)) { + axis(1, + col = "grey40", + at = seq(-180, 180, 45) + ) + } else { + axis(1, col = "grey40") + } + } + if (field == "int_norm") { + if ((regions[iregion, 2] - regions[iregion, 1] > 90) + | + (regions[iregion, 4] - regions[iregion, 3] > 90)) { + axis(2, + col = "grey40", + at = seq(-90, 90, 30) + ) + } else { + axis(2, col = "grey40") + } + } + } + + # colorbar + new_fig_scale <- c(-0.11, -0.04, 0.1, -0.1) + line_label <- 2.7 + cex_label <- 1.2 + cex_colorbar <- 1 + if (plot_type == 2) { + new_fig_scale <- c(-0.07, -0.02, 0.1, -0.1) + line_label <- 2.7 + cex_label <- 1 + cex_colorbar <- 1.5 + } + if (plot_type == 3) { + new_fig_scale <- c(-0.11, -0.03, 0.1, -0.1) + line_label <- 3 + cex_label <- 1 + cex_colorbar <- 1.2 + } + if ((tmp.colorbar[iquantity]) & add_colorbar) { + image_scale3( + volcano, + levels = tmp.levels, + new_fig_scale = new_fig_scale, + color.palette = tmp.palette, + colorbar.label = + paste( + title_unit_m[ifield, 1], + title_unit_m[ifield, 4] + ), + cex.colorbar = cex_colorbar, + cex.label = cex_label, + colorbar.width = 1, + line.label = line_label, + line.colorbar = 1.0 + ) + } + } # close loop over quantity + if (plot_type == 1) { + graphics_close(figname) + # Store data for provenance + caption <- + paste0( + "Map for index ", + field, + " over region ", + region_codes[iregion], + " according to ", + models_name[model_idx] + ) + anc_list <- flatten_lists(prov_info[[infile]]$ancestors) + prov_fig_now <- list( + figname = figname, + caption = caption, + model_idx = list(model_idx), + ancestors = anc_list + ) + prov_info[[figname]] <- prov_fig_now + } + if (plot_type == 2) { + graphics_close(figname) + # Store data for provenance + caption <- + paste0( + "Map for index ", + field, + " over region ", + region_codes[iregion], + " according to ", + models_name[model_idx], + " in comparison to reference dataset" + ) + anc_list <- flatten_lists(c(prov_info[[infile]]$ancestors, + prov_info[[ref_filename]]$ancestors)) + prov_fig_now <- list( + figname = figname, + caption = caption, + model_idx = list(model_idx, ref_idx), + ancestors = anc_list + ) + prov_info[[figname]] <- prov_fig_now + } + } # close loop over field + if (plot_type == 3) { + graphics_close(figname) + # Store data for provenance + caption <- paste0( + "Comparison maps for multiple indices", + " over region ", + region_codes[iregion] + ) + + anc_list <- flatten_lists(c(prov_info[[infile]]$ancesstors, + prov_info[[ref_filename]]$ancestors)) + prov_fig_now <- list( + figname = figname, + caption = caption, + model_idx = list(model_idx, ref_idx), + ancestors = anc_list + ) + prov_info[[figname]] <- prov_fig_now + } + } # close loop over years + if (plot_type == 4) { + graphics_close(figname) + # Store data for provenance + caption <- + paste0("Maps for multiple indices over selected years") + anc_list <- flatten_lists(prov_info[[infile]]$ancestors) + prov_fig_now <- list( + figname = figname, + caption = caption, + model_idx = list(model_idx), + ancestors = anc_list + ) + prov_info[[figname]] <- prov_fig_now + } + } # close loop over regions + } # close loop over models + return(prov_info) + } # close function diff --git a/esmvaltool/diag_scripts/hyint/hyint_plot_trends.R b/esmvaltool/diag_scripts/hyint/hyint_plot_trends.R new file mode 100644 index 0000000000..ba6acfbadf --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_plot_trends.R @@ -0,0 +1,902 @@ +###################################################### +#--------Trend plotting routine for HyInt------------# +#-------------E. Arnone (September 2017)-------------# +###################################################### + +hyint_plot_trends <- function(work_dir, # nolint + plot_dir, + ref_idx, + season, + prov_info) { + #  Define subscripts for variable names + var_type <- c("tseries", "tseries-sd", "trend", "trend-stat") + + # Set main paths + dir.create(plot_dir, recursive = T) + + # Load palette + palette(palette_ts) + + # Number of models + nmodels <- length(models_name) + + # Define regions to be used + nregions <- length(selregions) + if ((plot_type == 13) | (plot_type == 15)) { + # if plotting multiple models use only first region of list + nregions <- 1 + } + + #  Check whether enough panels are allocated for plotting the + # indices requested. In not, drop extra indices + npanels <- npancol * npanrow + if (npanels < length(selfields)) { + selfields <- selfields[1:npanels] + } + + # Update number of panels and columns if selfields has one element only + if (length(selfields) == 1) { + npancol <- 1 + npanrow <- 1 + } + + # Define fields to be used (note that the routine is + # optimized for 6 fields in 3x2 panels per multi-panel figures) + if (selfields[1] != F) { + field_names <- field_names[selfields, drop = F] + levels_m <- levels_m[selfields, , drop = F] + tlevels_m <- tlevels_m[selfields, , drop = F] + title_unit_m <- title_unit_m[selfields, , drop = F] + } + + # Define field label for filenames + field_label <- "multiindex" + if (length(selfields) == 1) { + field_label <- field_names + } + + # Remove preset range of values for plotting if needed + nyears <- models_end_year[ref_idx] - models_start_year[ref_idx] + if (autolevels) { + tlevels_m[] <- NA + levels_m[] <- NA + } + + # If on, switch shade option off and lines on for plot_type 13 + if (plot_type == 13 & add_trend_sd_shade) { + add_trend_sd_shade <- F + add_trend_sd_lines <- T + } + + # Define array to store plotting limits for each panel of multi-panel figures + plot_limits <- array(NaN, c(4, length(field_names))) + + # Load parameters for reference dataset + year1_ref <- models_start_year[ref_idx] + year2_ref <- models_end_year[ref_idx] + + # Handle label tag when overplotting data from tseries + # files with different labels in plot_type 14 and 15 + label_figname <- label[1] + if (length(label) > 1 & plot_type >= 10) { + label_figname <- paste0(label[1], "-plus") + } + + # Set figure dimensions + plot_size <- + scale_figure( + plot_type, + diag_script_cfg, + length(selfields), + npancol, + npanrow + ) + + # Startup graphics for multi-model timeseries or trends + plot_type_now <- (plot_type == 13) | (plot_type == 15) + if (plot_type_now == T) { + tseries_trend_tag <- "timeseries" + if (plot_type == 15) { + tseries_trend_tag <- "trend_summary" + } + figname <- getfilename_figure( + plot_dir, + field_label, + year1_ref, + year2_ref, + ref_idx, + season, + "", + region_codes[selregions[1]], + label_figname, + tseries_trend_tag, + output_file_type, + multimodel = T + ) + graphics_startup(figname, output_file_type, plot_size) + par( + mfrow = c(npanrow, npancol), + cex.main = 1.3, + cex.axis = 1.2, + cex.lab = 1.2, + mar = c(5, 5, 5, 5), + oma = c(1, 1, 1, 1) + ) + } + + n_noplot <- 1 + if ((plot_type == 13 | plot_type == 15) & autolevels) { + n_noplot <- 2 + } + minmax_levels <- c(NA, NA) + + # if requested, loop twice over all models to get range of values for plots + for (noplot in n_noplot:1) { + # Loop over models + for (model_idx in 1:nmodels) { + # setting up path and parameters + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + + # Years to be considered based on namelist and cfg_file + years <- year1:year2 + years_ref <- year1_ref:year2_ref + if (ryearplot[1] == "ALL") { + years <- year1:year2 + } else if (ryearplot[1] == "FIRST") { + years <- year1 + } else { + years <- years[match(ryearplot, years)] + years <- years[!is.na(years)] + } + if (plot_type >= 14) { + add_trend <- F + } # do not plot trend line for plot 14 or 15 + + # Startup graphics for multi-region timeseries + if (plot_type == 12) { + figname <- getfilename_figure( + plot_dir, + field_label, + year1, + year2, + model_idx, + season, + "", + "multiregion", + label_figname, + "timeseries", + output_file_type + ) + graphics_startup(figname, output_file_type, plot_size) + par( + mfrow = c(npanrow, npancol), + cex.main = 1.3, + cex.axis = 1.2, + cex.lab = 1.2, + mar = c(5, 5, 5, 5), + oma = c(1, 1, 1, 1) + ) + } + #  Startup graphics for bar plot of trend coefficients + if (plot_type == 14) { + figname <- getfilename_figure( + plot_dir, + field_label, + year1, + year2, + model_idx, + season, + "", + "multiregion", + label_figname, + "trend_summary", + output_file_type + ) + graphics_startup(figname, output_file_type, plot_size) + par( + mfrow = c(npanrow, npancol), + cex.main = 1.3, + cex.axis = 1.2, + cex.lab = 1.2, + mar = c(8, 8, 2, 2), + oma = c(1, 1, 1, 1) + ) + } + + if (model_idx == 1) { + store_label <- label + } + + # ----- Loop over label when plotting more files in the same panel ---- + for (ilabel in seq_along(store_label)) { + label <- store_label[ilabel] + #-----------------Loading data-----------------------# + + # open timeseries and trends for exp and ref + infile <- + getfilename_trends(work_dir, label, model_idx, season) + print(paste("HyInt_trends: reading file ", infile)) + field_long_names <- array(NaN, length(field_names)) + field_units <- array(NaN, length(field_names)) + + if ((plot_type == 13) | (plot_type == 15)) { + # Store data for provenance + caption <- + paste0( + "Hyint timeseries for selected indices and regions ", + "according to selected datasets" + ) + if (plot_type == 15) { + caption <- paste0( + "Hyint trends for multiple indices and regions ", + "according to selected datasets" + ) + } + if (length(prov_info[[figname]]) == 0) { + anc_list <- flatten_lists(prov_info[[infile]]$ancestors) + prov_fig_now <- list( + figname = figname, + caption = caption, + model_idx = list(model_idx), + ancestors = anc_list + ) + prov_info[[figname]] <- prov_fig_now + } else { + if (is.na(match(model_idx, prov_info[[figname]]$model_idx))) { + prov_info[[figname]]$model_idx <- + c(prov_info[[figname]]$model_idx, model_idx) + prov_info[[figname]]$ancestors <- + c(prov_info[[figname]]$ancestors, + prov_info[[infile]]$ancestors) + } + } + } + + for (var in field_names) { + ivar <- which(field_names == var) + for (stype in var_type[1:2]) { + svar <- paste0(var, "_", stype) + rfield <- ncdf_opener(infile, svar, "region", timedimname, + rotate = "no" + ) + assign(svar, rfield) # assign field data to field name + nc <- nc_open(infile) + dlname <- ncatt_get(nc, svar, "long_name") + dunits <- ncatt_get(nc, svar, "units") + field_long_names[ivar] <- dlname$value + field_units[ivar] <- dunits$value + nc_close(nc) + } + for (stype in var_type[3:4]) { + svar <- paste0(var, "_", stype) + rfield <- + ncdf_opener(infile, svar, "region", "coefficients", + rotate = "no" + ) + assign(svar, rfield) # assign field data to field name + } + } + + # store size of time and region arrays + time <- ncdf_opener(infile, timedimname, + timedimname, + rotate = "no" + ) + 1950 + regions <- + ncdf_opener(infile, "regions", "region", "boundaries", + rotate = "no" + ) + # setup time selection for trends + rettimes <- which(!is.na(time)) + if (trend_years[1] != F) { + # apply trend to limited time interval if required + rettimes_tmp <- + (time >= trend_years[1]) & time <= trend_years[2] + rettimes <- which(rettimes_tmp) + if (length(trend_years) == 4) { + # apply trend also to second time interval if required + rettime2_tmp <- + (time >= trend_years[3]) & time <= trend_years[4] + rettimes2 <- which(rettime2_tmp) + } + } + xlim <- c(min(time), max(time)) + if (trend_years_only & (trend_years[1] != F)) { + xlim <- trend_years[1:2] + } + + + #-----------------Producing figures------------------------# + + print(paste0(diag_base, ": starting figures")) + + # LOOP over fields + for (field in field_names) { + ifield <- which(field == field_names) + + if (noplot == 2 & model_idx == 1) { + minmax_levels <- c(NA, NA) + minmax_tlevels <- c(NA, NA) + assign(paste0(field, "_levels"), minmax_levels) + assign(paste0(field, "_tlevels"), minmax_tlevels) + } + + if (anyNA(title_unit_m[ifield, 1:3])) { + title_unit_m[ifield, 1] <- field_names[ifield] + title_unit_m[ifield, 2:3] <- field_long_names[ifield] + title_unit_m[ifield, 4] <- field_units[ifield] + } + + # TIMESERIES: get timeseries and trends + tfield_exp <- get(paste0(field, "_", var_type[1])) + tfield_exp_sd <- get(paste0(field, "_", var_type[2])) + trend_exp <- get(paste0(field, "_", var_type[3])) + trend_exp_stat <- get(paste0(field, "_", var_type[4])) + + if (length(dim(tfield_exp)) < 2) { + # reshape data to matrix if regions has only one element + tfield_exp <- array(tfield_exp, c(1, length(tfield_exp))) + tfield_exp_sd <- + array(tfield_exp_sd, c(1, length(tfield_exp_sd))) + trend_exp <- array(trend_exp, c(1, length(trend_exp))) + trend_exp_stat <- + array(trend_exp_stat, c(1, length(trend_exp_stat))) + } + if (plot_type == 13 | plot_type == 15) { + # get only first region if working on multimodel + tfield_exp <- tfield_exp[1, , drop = F] + tfield_exp_sd <- tfield_exp_sd[1, , drop = F] + trend_exp <- trend_exp[1, , drop = F] + trend_exp_stat <- trend_exp_stat[1, , drop = F] + } + + if (is.na(levels_m[ifield, 1]) | + is.na(levels_m[ifield, 2])) { + print("No value for range: assigning min and max") + tmp.levels <- c( + min(tfield_exp, na.rm = T), + max(tfield_exp, na.rm = T) + ) + if (add_trend_sd | add_trend_sd_shade) { + tmp.levels <- c( + min(tfield_exp - tfield_exp_sd, na.rm = T), + max(tfield_exp + tfield_exp_sd, na.rm = T) + ) + } + } else { + tmp.levels <- c(levels_m[ifield, 1], levels_m[ifield, 2]) + } + + if (nyears < 20 & (!autolevels)) { + levrange <- max(tmp.levels, na.rm = T) - min(tmp.levels, na.rm = T) + meanrange <- mean(tmp.levels, na.rm = T) + tmp.levels <- c( + meanrange - levrange * 1.5, + meanrange + levrange * 1.5 + ) + } + + #  Startup graphics for one timeseries in one figure + if (plot_type == 11) { + figname <- getfilename_figure( + plot_dir, + field, + year1, + year2, + model_idx, + season, + "", + "multiregion", + label_figname, + "timeseries_single", + output_file_type + ) + graphics_startup(figname, output_file_type, plot_size) + par( + cex.main = 1.3, + cex.axis = 1.2, + cex.lab = 1.2, + mar = c(4, 4, 2, 2), + oma = c(1, 1, 1, 1) + ) + } + + # Actual plotting + if ((plot_type == 11) | + (plot_type == 12) | (plot_type == 13)) { + if (plot_type != 11) { + # set active panel + par_row <- (ifield - 1) %/% npancol + 1 + par_col <- (ifield - 1) %% npancol + 1 + par(mfg = c(par_row, par_col, npanrow, npancol)) + } + + # scale autolevels if required + if (autolevels && (autolevels_scale != 1)) { + autorange <- max(tmp.levels, nat.rm = T) + -min(tmp.levels, na.rm = T) + meanrange <- mean(tmp.levels, na.rm = T) + tmp.levels <- + c( + meanrange - autorange * autolevels_scale, + meanrange + autorange * autolevels_scale + ) + } + + if (noplot == 2 & autolevels & plot_type == 13) { + # Recursively store min and max values to be plotted + # NOTE: this works as long as only one region at the time is used + minmax_levels <- get(paste0(field, "_levels")) + minmax_levels[1] <- + min(c(minmax_levels[1], tmp.levels[1]), + na.rm = T + ) + minmax_levels[2] <- + max(c(minmax_levels[2], tmp.levels[2]), + na.rm = T + ) + assign(paste0(field, "_levels"), minmax_levels) + next + } + + if (noplot == 1 & autolevels & plot_type == 13) { + tmp.levels[1] <- (get(paste0(field, "_levels")))[1] + tmp.levels[2] <- (get(paste0(field, "_levels")))[2] + } + + # Base plot + if (!(plot_type == 13 & model_idx > 1) & ilabel == 1) { + ylab <- paste0(title_unit_m[ifield, 1]) + if (title_unit_m[ifield, 4] != "") { + ylab <- paste0(ylab, title_unit_m[ifield, 4]) + } + plot( + time, + type = "n", + ylim = c(tmp.levels[1], tmp.levels[2]), + xlim = xlim, + xlab = "Year", + ylab = ylab, + main = title_unit_m[ifield, 3], + xaxs = "i" + ) + # store panel plot limits + plot_limits[, ifield] <- par("usr") + } + + # Update plot limits in case panel has changed + par(usr = plot_limits[, ifield]) + + # LOOP over regions to plot timeseries + if (add_trend_sd_shade) { + for (ireg in 1:nregions) { + iselreg <- selregions[ireg] + shade_area <- c( + tfield_exp[ireg, ] + tfield_exp_sd[ireg, ], + rev(tfield_exp[ireg, ] - tfield_exp_sd[ireg, ]) + ) + shade_area[shade_area < tmp.levels[1]] <- + tmp.levels[1] + polygon(c(time, rev(time)), + shade_area, + col = "grey95", + border = NA + ) + } + } + for (ireg in 1:nregions) { + iselreg <- selregions[ireg] + col_ts <- ireg + if (length(label) > 1) { + col_ts <- c( + "dodgerblue4", + "darkseagreen4", + "goldenrod4", + "coral4", + "grey", + "mediumorchid1", + "black" + )[ilabel] + } + if (plot_type == 13) { + col_ts <- model_idx + } + if (add_trend_sd) { + lines(time, + tfield_exp[ireg, ] + tfield_exp_sd[ireg, ], + lty = 3, + col = col_ts + ) + lines(time, + tfield_exp[ireg, ] - tfield_exp_sd[ireg, ], + lty = 3, + col = col_ts + ) + } + if (add_tseries_lines) { + lines(time, tfield_exp[ireg, ], col = col_ts) + } + points(time, tfield_exp[ireg, ], col = col_ts) + if (add_trend) { + lines( + time[rettimes], + trend_exp[ireg, 1] + trend_exp[ireg, 2] + * time[rettimes], + col = col_ts, + lwd = 2 + ) + if (length(trend_years) == 4) { + # apply trend also to second time interval if required + lines( + time[rettimes2], + trend_exp[ireg, 3] + trend_exp[ireg, 4] + * time[rettimes2], + col = col_ts, + lwd = 2 + ) + } + } + } + if (abs(add_legend) & ((plot_type == 11) | + (plot_type == 12)) & (ifield == 1)) { + pos_legend <- c( + plot_limits[1, ifield] + (plot_limits[2, ifield] + - plot_limits[1, ifield]) * xy_legend[1], + plot_limits[3, ifield] + (plot_limits[4, ifield] + - plot_limits[3, ifield]) * xy_legend[2] + ) + ncol <- 1 + if (add_legend < 0) { + ncol <- nregions + } + if (add_legend > 1) { + ncol <- add_legend + } + legend( + pos_legend[1], + pos_legend[2], + region_codes[selregions], + text.col = (1:nregions), + ncol = ncol + ) + } + box(lwd = 2) + if (plot_type == 11) { + graphics_close(figname) + # Store data for provenance + caption <- paste0( + "Hyint timeseries for index ", + field, + " over selected regions according to ", + models_name[model_idx] + ) + anc_list <- flatten_lists(prov_info[[infile]]$ancestors) + prov_fig_now <- list( + figname = figname, + caption = caption, + model_idx = list(model_idx), + ancestors = anc_list + ) + prov_info[[figname]] <- prov_fig_now + } + } + if ((plot_type == 14) | (plot_type == 15)) { + # plot trend coefficients for different regions, + # one panel per field + if (anyNA(tlevels_m[ifield, ])) { + print("No value for range: assigning min and max") + ylim <- c( + min(trend_exp_stat[, 1] - trend_exp_stat[, 2], na.rm = T), + max(trend_exp_stat[, 1] + trend_exp_stat[, 2], na.rm = T) + ) + # scale autolevels if required + if (autolevels && (autolevels_scale_t != 1)) { + autorange <- max(ylim, na.rm = T) - min(ylim, na.rm = T) + meanrange <- mean(ylim, na.rm = T) + ylim <- c( + meanrange - autorange * autolevels_scale_t, + meanrange + autorange * autolevels_scale_t + ) + } + } else { + ylim <- tlevels_m[ifield, ] + } + + if (trend_years[1] != F) { + xlim <- trend_years[1:2] + } + ylab <- paste0("Avg trend") + # change y scale to % and 1/100 years + if (scalepercent & (field != "hyint")) { + trend_exp <- trend_exp * 100 # trend coefficients + trend_exp_stat[, 2] <- + trend_exp_stat[, 2] * 100 # standard error + ylab <- paste0(ylab, " (%)") + ylim <- ylim * 100 + } + if (scale100years) { + trend_exp <- trend_exp * 100 # trend coefficients + trend_exp_stat[, 2] <- + trend_exp_stat[, 2] * 100 # standard error + ylab <- paste0(ylab, " (1/100 years)") + ylim <- ylim * 100 + } + nx <- nregions + xlab <- "Regions" + xlabels <- region_codes[selregions] + if (plot_type == 15) { + nx <- nmodels + xlab <- "" # "Models" + xlabels <- models_name + } + # hereafter xregions is the x which also holds models + # for plottype 15 + xregions <- 1:nx + + # Actual plotting + # set active panel + par_row <- (ifield - 1) %/% npancol + 1 + par_col <- (ifield - 1) %% npancol + 1 + par(mfg = c(par_row, par_col, npanrow, npancol)) + + if (noplot == 2 & autolevels & plot_type == 15) { + # Recursively store min and max values to be plotted + # NOTE: this works as long as only one region at the time is used + minmax_tlevels <- get(paste0(field, "_tlevels")) + minmax_tlevels[1] <- min(c(minmax_tlevels[1], ylim[1]), + na.rm = T + ) + minmax_tlevels[2] <- max(c(minmax_tlevels[2], ylim[2]), + na.rm = T + ) + assign(paste0(field, "_tlevels"), minmax_tlevels) + next + } + if (noplot == 1 & autolevels & plot_type == 15) { + ylim[1] <- (get(paste0(field, "_tlevels")))[1] + ylim[2] <- (get(paste0(field, "_tlevels")))[2] + } + + # Base plot + if (!(plot_type == 15 & model_idx > 1) & ilabel == 1) { + plot( + xregions, + xregions, + type = "n", + pch = 22, + axes = F, + xlab = xlab, + ylab = ylab, + ylim = ylim, + main = ( + paste0( + title_unit_m[ifield, 1], + " trend (", xlim[1], "-", xlim[2], ")" + ) + ) + ) + box() + # store panel plot limits + plot_limits[, ifield] <- par("usr") + } + + # Update plot limits in case panel has changed + par(usr = plot_limits[, ifield]) + for (ireg in 1:nregions) { + iregion <- ireg + ixregion <- ireg + if (plot_type == 15) { + ixregion <- model_idx + } + # add errorbar (standard error) + if (!anyNA(trend_exp_stat[iregion, ])) { + arrows( + xregions[ixregion], + trend_exp[iregion, 2] - + trend_exp_stat[iregion, 2], + xregions[ixregion], + trend_exp[iregion, 2] + trend_exp_stat[iregion, 2], + length = 0.05, + angle = 90, + code = 3 + ) + points( + xregions[ixregion], + trend_exp[iregion, 2], + pch = 22, + col = "grey40", + bg = "white", + cex = 2 + ) + # add filled points for significant (95% level) + col90 <- "grey70" + col95 <- "dodgerblue3" + if (length(label) > 1) { + col90 <- c( + "dodgerblue3", + "darkseagreen3", + "goldenrod3", + "coral3", + "grey", + "mediumorchid1", + "black" + ) + col95 <- + c( + "dodgerblue4", + "darkseagreen4", + "goldenrod4", + "coral4", + "grey", + "mediumorchid1", + "black" + ) + } + if (trend_exp_stat[iregion, 4] <= 0.1) { + points( + xregions[ixregion], + trend_exp[iregion, 2], + pch = 22, + col = col90[ilabel], + bg = col90[ilabel], + cex = 2 + ) + } + if (trend_exp_stat[iregion, 4] <= 0.05) { + points( + xregions[ixregion], + trend_exp[iregion, 2], + pch = 22, + col = col95[ilabel], + bg = col95[ilabel], + cex = 2 + ) + } + } else { + print(paste( + "MISSING VALUES in index ", + field, + ", region ", + region_codes[iregion] + )) + print(trend_exp_stat[iregion, ]) + } + } + if (length(label) > 1) { + retsig90 <- which(trend_exp_stat[, 4] < 0.1) + if (!is.na(retsig90[1])) { + points( + xregions[retsig90], + trend_exp[retsig90, 2], + pch = 22, + col = "grey70", + bg = "grey70", + cex = 2 + ) + } + retsig95 <- which(trend_exp_stat[, 4] < 0.05) + if (!is.na(retsig95[1])) { + points( + xregions[retsig95], + trend_exp[retsig95, 2], + pch = 22, + col = "dodgerblue3", + bg = "dodgerblue3", + cex = 2 + ) + } + } + box() + if (!((plot_type == 15) & (model_idx > 1))) { + if (add_zeroline & (ylim[1] != 0)) { + lines( + c(-1, nx + 1), + c(0, 0), + lty = 2, + lwd = 1.5, + col = "grey40" + ) + } + las <- 1 + cex.axis <- 1 + if (plot_type == 15) { + las <- 2 + cex.axis <- 0.8 + } + axis( + 1, + labels = xlabels, + at = xregions, + las = las, + cex.axis = cex.axis + ) + axis(2) + } + } # close if on plot_type 14 and 15 + } # close loop over field + } # close loop over label + if ((plot_type == 12) | (plot_type == 14)) { + graphics_close(figname) + # Store data for provenance + caption <- + paste0( + "Hyint timeseries for selected indices and regions ", + "according to ", + models_name[model_idx] + ) + if (plot_type == 14) { + caption <- paste0( + "Hyint trends for multiple indices and regions ", + "according to ", + models_name[model_idx] + ) + } + anc_list <- flatten_lists(prov_info[[infile]]$ancestors) + prov_fig_now <- list( + figname = figname, + caption = caption, + model_idx = list(model_idx), + ancestors = anc_list + ) + prov_info[[figname]] <- prov_fig_now + } + } # close loop over model + } # close miniloop over noplot + + # Legend for plot_type 13 + if (abs(add_legend) & (plot_type == 13)) { + ncol <- 1 + if (add_legend > 1) { + ncol <- add_legend + } + if (add_legend < 0) { + ncol <- nmodels + } + # for (ifield in 1:length(field_names)) { + ifield <- 1 + # set active panel + par_row <- (ifield - 1) %/% npancol + 1 + par_col <- (ifield - 1) %% npancol + 1 + par( + mfg = c(par_row, par_col, npanrow, npancol), + usr = plot_limits[, ifield] + ) + pos_legend <- c( + plot_limits[1, ifield] + (plot_limits[2, ifield] - + plot_limits[1, ifield]) * xy_legend[1], + plot_limits[3, ifield] + (plot_limits[4, ifield] - + plot_limits[3, ifield]) * xy_legend[2] + ) + legend_label <- "" + if (tag_legend[1]) { + legend_label <- models_name + } + if (tag_legend[2]) { + legend_label <- paste(legend_label, + models_experiments, + sep = " " + ) + } + if (tag_legend[3]) { + legend_label <- paste(legend_label, + models_ensemble, + sep = " " + ) + } + legend( + pos_legend[1], + pos_legend[2], + legend = legend_label, + text.col = (1:nmodels), + ncol = ncol, + cex = 0.9 + ) + print(legend_label) + print("legend_label") + } + if ((plot_type == 13) | (plot_type == 15)) { + graphics_close(figname) + } + return(prov_info) +} # close function diff --git a/esmvaltool/diag_scripts/hyint/hyint_preproc.R b/esmvaltool/diag_scripts/hyint/hyint_preproc.R new file mode 100644 index 0000000000..c8350a061d --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_preproc.R @@ -0,0 +1,47 @@ +###################################################### +#---------Regridding preprocessing for HyInt---------# +#-------------E. Arnone (Oct 2017)-------------------# +###################################################### + +hyint_preproc <- function(work_dir, + model_idx, + ref_idx, + climofile, + regfile, + rgrid) { + print(paste0(diag_base, ": pre-processing file: ", climofile)) + + # add absolute axis, remove leap year days, regrid if needed + # cdo delete and copy do not like files with whitespace + + if (rgrid != F) { + if (rgrid == "REF") { + rgrid <- climofiles[ref_idx] + gridf <- tempfile() + cdo("griddes", input = rgrid, stdout = gridf) + } else { + gridf <- rgrid + } + tempf <- cdo("remapscon2", args = gridf, input = climofile) + unlink(gridf) + } else { + tempf <- cdo("addc", args = "0", input = climofile) + } + + cdo("-copy", + options = "-L -f nc -a", + input = tempf, + output = regfile + ) + + unlink(tempf) + + # generate grid file + gridfile <- + getfilename_indices(work_dir, diag_base, model_idx, grid = T) + cdo("griddes", input = regfile, stdout = gridfile) + + print(paste0(diag_base, ": pre-processed file: ", regfile)) + + return(0) +} diff --git a/esmvaltool/diag_scripts/hyint/hyint_trends.R b/esmvaltool/diag_scripts/hyint/hyint_trends.R new file mode 100644 index 0000000000..b9e5e518ac --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_trends.R @@ -0,0 +1,335 @@ +###################################################### +#-------------Trends routine for HyInt---------------# +#-------------E. Arnone (June 2017)------------------# +###################################################### + +# MAIN TRENDS FUNCTION +hyint_trends <- function(work_dir, model_idx, season, prov_info) { # nolint + # setup useful strings + var_type <- c("tseries", "tseries-sd", "trend", "trend-stat") + var_type_long <- c( + "Timeseries", + "St.dev of timeseries", + "Trend coeff. for two intervals ", + "Trend statistics for trend 1 (Estimate, Std. Error, t value, Pr(>|t|))" + ) + + # setup parameters + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + + # set main paths + outfile <- getfilename_trends(work_dir, label, model_idx, season) + + # Define regions to be used + nregions <- length(selregions) + + # Define fields to be used (main list loaded from cfg_file) + if (selfields[1] != F) { + field_names <- field_names[selfields] + } + + # Years to be considered based on namelist and cfg_file + years <- year1:year2 + nyears <- length(years) + + print(paste0(diag_base, ": starting timeseries calculation")) + + #-----------------Loading data-----------------------# + # open experiment field + + gridfile <- + getfilename_indices(work_dir, diag_base, model_idx, grid = T) + infile <- + getfilename_indices(work_dir, diag_base, model_idx, season) + # test if file contains all requested variables and + # keep file open for reading attributes + nc <- nc_open(infile) + nc_att_glob <- ncatt_get(nc, 0) + if (!all(is.element(field_names, names(nc$var)))) { + missing <- (field_names)[!is.element(field_names, names(nc$var))] + print(paste( + "HyInt_trends: missing variable in input indices file: ", + missing + )) + nc_close(nc) + stop("HyInt: check field_names list in configuration file") + } + nc_close(nc) + + # Get seaLandElevation mask + if (masksealand) { + topofile <- + getfilename_indices(work_dir, diag_base, ref_idx, topo = T) + if (!file.exists(topofile)) { + create_landseamask( + regrid = gridfile, + ref_file = infile, + loc = run_dir, + regridded_topo = topofile, + topo_only = T + ) + } + relevation <- ncdf_opener(topofile, "topo", rotate = "no") + } + + # remove desert areas if required + # (mean annual precipitation <0.5 mm, Giorgi et al. 2014) + if (removedesert) { + pry <- ncdf_opener(infile, "pry", rotate = "no") + retdes <- which(pry < 0.5) + pry[retdes] <- NA + # create mask with NAs for deserts and 1's for not-desert + retdes2D <- apply(pry * 0, c(1, 2), sum) + 1 + retdes3D <- replicate(dim(pry)[length(dim(pry))], retdes2D) + } + + for (var in field_names) { + rfield <- ncdf_opener(infile, var, rotate = "no") + print("===========================================") + print(paste(infile, var)) + + if (removedesert) { + rfield <- rfield * retdes3D + } + if (masksealand) { + rfield <- apply_elevation_mask(rfield, relevation, sealandelevation, + reverse = reverse_masksealand + ) + } + # store size of time array + ntime <- length(rfield[1, 1, ]) + + #-----------------Calculating timeseries and trends-----------------------# + + # TIMESERIES: + # - select required region and calculate timeseries + # - timeseries are temporarily stored as a "region x time" matrix + # - trends are temporarily stored as a "region x coefficient" matrix + tfield <- matrix(nrow = nregions, ncol = ntime) + tfield_sd <- matrix(nrow = nregions, ncol = ntime) + rtrend <- matrix(nrow = nregions, ncol = 4) + rtrend_stat <- matrix(nrow = nregions, ncol = 4) + for (ireg in 1:nregions) { + iselreg <- selregions[ireg] + # extract data and perform averages + print(paste("Working on ", region_names[iselreg])) + + tfield[ireg, ] <- calc_region_timeseries(ics, ipsilon, rfield, + regions[iselreg, ], + weighted_mean = weight_tseries + ) + tfield_sd[ireg, ] <- + calc_region_timeseries(ics, ipsilon, rfield, + regions[iselreg, ], + calc_sd = T + ) + } + + # setup time array + times <- as.numeric(year1) + 1:ntime - 1 + rettimes <- seq_along(times) + if (trend_years[1] != F) { + # apply trend to limited time interval if required + rettimes <- which((times >= trend_years[1]) & + times <= trend_years[2]) + if (length(trend_years) == 4) { + # apply trend also to second time interval if required + rettimes2 <- which((times >= trend_years[3]) & + times <= trend_years[4]) + } + } + + # LOOP through regions to calculate trends as required + for (ireg in 1:nregions) { + iselreg <- selregions[ireg] + if (lm_trend) { + # linear regression + print("-----------------------------------------------------") + print(paste(var, region_names[iselreg])) + temp.tfield <- tfield[ireg, rettimes] + if (length(which(!is.na(temp.tfield))) < 2) { + print("less than 2 points in selected region - skipping") + } else { + lm_fit <- lm(temp.tfield ~ times[rettimes]) + lm_sum <- summary(lm_fit) + # store trend coefficients (intercept and linear coef.) + rtrend[ireg, 1:2] <- lm_fit$coefficients + # store trend coef., standard error, t value, Pr(>|t|) + rtrend_stat[ireg, ] <- lm_sum$coefficients[2, ] + print(lm_sum$coefficients[2, ]) + if (length(trend_years) == 4) { + # apply trend also to second time interval if required + temp_tfield2 <- tfield[ireg, rettimes2] + if (length(which(!is.na(temp.tfield))) < 2) { + print("less than 2 points in second trend over selected region + - skipping") + } else { + lm_fit2 <- lm(temp_tfield2 ~ times[rettimes2]) + # store 2nd interval trend coefficients + rtrend[ireg, 3:4] <- lm_fit2$coefficients + } + } + } + } + } + + # assign timeseries and trends to named field variables + assign(paste0(var, "_tseries"), tfield) + assign(paste0(var, "_tseries-sd"), tfield_sd) + assign(paste0(var, "_trend"), rtrend) + assign(paste0(var, "_trend-stat"), rtrend_stat) + } # close loop over fields + + # store field variables in named lists + stseries_list <- c( + paste0(field_names, "_tseries"), + paste0(field_names, "_tseries-sd"), + paste0(field_names, "_trend"), + paste0(field_names, "_trend-stat") + ) + rtseries_list <- mget(stseries_list) + names(rtseries_list) <- stseries_list + + ########################################################## + #------------------------Save to NetCDF------------------# + ########################################################## + + # saving output to netcdf files + print(paste0(diag_base, "_timeseries: saving data to NetCDF file:")) + + + # dimensions definition + var_region <- 1:nregions + regiondim <- ncdim_def("region", "number", var_region) + coeffdim <- ncdim_def("coefficients", "number", 1:4) + boundarydim <- ncdim_def("boundaries", "degrees", 1:4) + timedim <- + ncdim_def(timedimname, + "years since 1950-01-01 00:00:00", + (years - 1950), + unlim = T + ) + + # variables definition + for (var in field_names) { + for (itype in seq_along(var_type)) { + svar <- paste0(var, "_", var_type[itype]) + rfield <- get(svar, rtseries_list) + rfield[is.nan(rfield)] <- NA + # copy and update attributes + metadata <- getmetadata_indices(var, infile) + long_name <- metadata$long_name + description <<- + paste0(var_type_long[itype], " of ", metadata$long_name) + units <- metadata$units + missval <- metadata$missing_value + # variable definitions + var_ncdf <- ncvar_def( + svar, + units, + list(regiondim, timedim), + missval, + longname = long_name, + prec = "single", + compression = 1 + ) + if (itype > 2) { + # trends + var_ncdf <- ncvar_def( + svar, + units, + list(regiondim, coeffdim), + missval, + longname = long_name, + prec = "single", + compression = 1 + ) + } + assign(paste0("var", svar), var_ncdf) + assign(paste0("field", svar), rfield) + assign(paste0(svar, "_", "description"), description) + } + } + + varregions <- ncvar_def( + "regions", + "degrees", + list(regiondim, boundarydim), + -999, + "region boundaries", + prec = "single", + compression = 1 + ) + regions_description <- "regions over which averages are performed" + fieldregions <- regions[selregions, ] + fieldregion_names <- region_names[selregions] + fieldregion_codes <- region_codes[selregions] + + # Netcdf file creation + print(paste0(diag_base, ": saving output to ", outfile)) + namelist <- c("regions", stseries_list) + varnamelist <- paste0("var", c(namelist)) + nclist <- mget(varnamelist) + ncfile <- nc_create(outfile, nclist) + + # put variables into the ncdf file + for (var in namelist) { + ndims <- get(paste0("var", var))$ndims + tmp.field <- get(paste0("field", var)) + ncvar_put(ncfile, + var, + tmp.field, + start = rep(1, ndims), + count = rep(-1, ndims) + ) + ncatt_put(ncfile, var, "description", get(paste0(var, "_description"))) + } + + # put additional attributes into dimension and data variables + ncatt_put( + ncfile, + "regions", + "regionnames", + paste(fieldregion_names, + collapse = " " + ) + ) + ncatt_put( + ncfile, + "regions", + "region_codes", + paste(fieldregion_codes, collapse = " ") + ) + + nc_close(ncfile) + + # Set provenance for this output file + caption <- paste0( + "Hyint timeseries and trends for years ", + year1, + " to ", + year2, + " according to ", + models_name[model_idx] + ) + + + #ancs <- list(infile) + #if (length(infile) == 1) { + # ancs <- infile + #} + + anc_list <- flatten_lists(prov_info[[infile]]$ancestors) + xprov <- list( + ancestors = anc_list, + model_idx = list(model_idx), + caption = caption + ) + + # Store provenance in main provenance list + prov_info[[outfile]] <- xprov + + print(paste(diag_base, ": timeseries netCDF file saved")) + return(prov_info) +} diff --git a/esmvaltool/diag_scripts/iht_toa/poisson_solver.py b/esmvaltool/diag_scripts/iht_toa/poisson_solver.py new file mode 100644 index 0000000000..886d5fc616 --- /dev/null +++ b/esmvaltool/diag_scripts/iht_toa/poisson_solver.py @@ -0,0 +1,293 @@ +# (C) Crown Copyright 2023, the Met Office. +"""Poisson solver for the full ocean-atmosphere column. + +The Poisson equation is solved by numerically using the bi-conjugate +gradient stabilized (BiCGSTAB) method. + +The solution is achieved when the difference between the input field (radiative +flux) and the Laplacian of the output field is less than the stated tolerance. +If the solver fails to converge, the tolerance can be increased. + +Convergence is achieved faster by using a preconditioner on the output field. + +The heat transport is calculated as the gradient of the energy flux potential, +the output of the Poisson solver. +""" + +import numpy as np +from numba import jit + + +def swap_bounds(array): + """Extend the array by one in all directions. + + As the array is periodic it allows for easier computations at + boundaries. + """ + shape0, shape1 = np.array(array.shape) - 2 + wrap_point = int(shape1 / 2 + 1) + for i in range(1, shape1 + 1): + array[0, i] = array[1, wrap_point] + array[shape0 + 1, i] = array[shape0, wrap_point] + wrap_point += 1 + if wrap_point > shape1: + wrap_point = 1 + + array[:, 0] = array[:, shape1] + array[:, shape1 + 1] = array[:, 1] + + return array + + +def dot_prod(a_matrix, b_matrix): + """Calculate dot product of two matrices only over source term size.""" + shape0, shape1 = np.array(a_matrix.shape) - 2 + return (a_matrix[1:shape0 + 1, 1:shape1 + 1] * + b_matrix[1:shape0 + 1, 1:shape1 + 1]).sum() + + +def precon(x_matrix, m_matrix): + """Preconditioner. + + This is a wrapper to two steps that are optimised using jit. + It implements the preconditioning step of van der Vorst, H. A., 1992. + https://doi.org/10.1137/0913035. + """ + cx_matrix = np.zeros(np.array(x_matrix.shape)) + precon_a(x_matrix, m_matrix[1], m_matrix[2], m_matrix[4], cx_matrix) + cx_matrix = swap_bounds(cx_matrix) + precon_b(m_matrix[0], m_matrix[3], cx_matrix) + cx_matrix = swap_bounds(cx_matrix) + return cx_matrix + + +@jit(nopython=True) +def precon_a(x_matrix, m_w, m_s, m_p, cx_matrix): + """First step of preconditioner.""" + shape0, shape1 = np.array(cx_matrix.shape) - 2 + for j in range(1, shape0 + 1): + for i in range(1, shape1 + 1): + cx_matrix[j, i] = m_p[j, i] * (x_matrix[j, i] - + m_s[j, i] * cx_matrix[j - 1, i] - + m_w[j, i] * cx_matrix[j, i - 1]) + + +@jit(nopython=True) +def precon_b(m_e, m_n, cx_matrix): + """Second step of preconditioner.""" + shape0, shape1 = np.array(cx_matrix.shape) - 2 + for j in range(shape0, 0, -1): + for i in range(shape1, 0, -1): + cx_matrix[j, i] = (cx_matrix[j, i] - + m_e[j, i] * cx_matrix[j, i + 1] - + m_n[j, i] * cx_matrix[j + 1, i]) + + +class SphericalPoisson: + """Poisson solver over the sphere. + + Solve Poisson equation for a given source term (forcing) and + calculate meridional heat transport (MHT). + """ + + def __init__(self, logger, source, tolerance=2.0e-4): + """Initialise solver with source field, metrics and matrices.""" + self.logger = logger + self.source = source + self.tolerance = tolerance + self.energy_flux_potential = None + self.meridional_heat_transport = None + logger.info("Initialising Poisson solver.") + self.set_matrices() + + def set_matrices(self): + """Calculate A and M matrices. + + A is the matrix that defines the five-point stencil (Eq. 8). The + A_matrix are the values are the contributions from each of the + four neighbouring cells: e,w,s,n,p. + """ + # Calculate metrics hpi and hvj + src_shape = np.array(self.source.shape) + hpi = np.zeros(src_shape[0]) + hvj = np.zeros(src_shape[0] + 1) + deltay = np.pi / src_shape[0] + yyy = -0.5 * np.pi + 0.5 * deltay + hvj[0] = 0.0 + for j in range(0, src_shape[0]): + hpi[j] = np.cos(yyy) + hvj[j + 1] = np.cos(yyy + 0.5 * deltay) + yyy += deltay + hvj[-1] = 0.0 + + # Storing the full matrix + a_matrix = np.zeros((5, *src_shape)) + + # ILU factors + m_matrix = np.zeros((5, *(src_shape + 1))) + + # Spherical Laplacian variables + aaa = 1.0 / ((2.0 * np.pi / src_shape[1])**2.) + bbb = 1.0 / ((np.pi / src_shape[0])**2.) + + # First calculate the Poisson equations 5-point stencil + # A_w is the contribution from i-1, A_e is from i+1, + # A_s is j-1, A_n is j+1, and A_p is the diagonal + for j in range(0, src_shape[0]): + txa = aaa / hpi[j]**2.0 + tyb = bbb / hpi[j] + + for i in range(0, src_shape[1]): + a_matrix[0, j, i] = txa + a_matrix[1, j, i] = txa + a_matrix[2, j, i] = tyb * hvj[j] + a_matrix[3, j, i] = tyb * hvj[j + 1] + a_matrix[4, j, i] = -a_matrix[0:4, j, i].sum() + + # ILU/SIP preconditioner factors: alf = 0.0 is ILU + alf = 0.9 + m_matrix[4] += 1.0 + + for j in range(1, src_shape[0] + 1): + for i in range(1, src_shape[1] + 1): + m_matrix[2, j, i] = (a_matrix[2, j - 1, i - 1] / + (1.0 + alf * m_matrix[0, j - 1, i])) + + m_matrix[1, j, i] = (a_matrix[1, j - 1, i - 1] / + (1.0 + alf * m_matrix[3, j, i - 1])) + + m_matrix[4, j, i] = (a_matrix[4, j - 1, i - 1] - + m_matrix[2, j, i] * + (m_matrix[3, j - 1, i] - + alf * m_matrix[0, j - 1, i]) - + m_matrix[1, j, i] * + (m_matrix[0, j, i - 1] - + alf * m_matrix[3, j, i - 1])) + + m_matrix[4, j, i] = 1.0 / m_matrix[4, j, i] + + m_matrix[0, j, i] = ((a_matrix[0, j - 1, i - 1] - + alf * m_matrix[2, j, i] * + m_matrix[0, j - 1, i]) * + m_matrix[4, j, i]) + + m_matrix[3, j, i] = ((a_matrix[3, j - 1, i - 1] - + alf * m_matrix[1, j, i] * + m_matrix[3, j, i - 1]) * + m_matrix[4, j, i]) + + self.a_matrix = a_matrix + self.m_matrix = m_matrix + + def solve(self, max_iterations=1000): + """Solve equation for the source term. + + Bi-conjugate gradient stabilized numerical solver: van der + Vorst, H. A., 1992: Bi-cgstab: A fast and smoothly converging + variant of bi-cg for the solution of nonsymmetric linear + systems. SIAM Journal on Scientific and Statistical Computing, + https://doi.org/10.1137/0913035. + This solver implements the preconditioned Bi-CGSTAB algorithm, + described in page 638 of that paper. + """ + bbb = np.zeros(np.array(self.source.shape) + 2) + xxx = np.zeros(np.array(self.source.shape) + 2) + bbb[1:-1, 1:-1] = self.source + bbb = swap_bounds(bbb) + + sc_err = dot_prod(bbb, bbb) + + # Group some temporal variables + stv = { + 'alf': 1.0, + 'omg': 1.0, + 'nrm': 1.0, + 'rrr': bbb - self.calc_ax(xxx) + } + stv['crrr'] = stv['rrr'].copy() + + ppp = np.zeros(np.array(self.source.shape) + 2) + vvv = np.zeros(np.array(self.source.shape) + 2) + + iteration = 0 + while iteration < max_iterations: + rho = dot_prod(stv['rrr'], stv['crrr']) + + bet = (rho / stv['nrm']) * (stv['alf'] / stv['omg']) + + ttt = stv['rrr'] - bet * stv['omg'] * vvv + + sss = precon(ttt, self.m_matrix) + ppp = sss + bet * ppp + + vvv = self.calc_ax(ppp) + stv['nrm'] = dot_prod(stv['crrr'], vvv) + + stv['alf'] = rho / stv['nrm'] + sss = stv['rrr'] - stv['alf'] * vvv + + csss = precon(sss, self.m_matrix) + ttt = self.calc_ax(csss) + + stv['omg'] = dot_prod(ttt, sss) / dot_prod(ttt, ttt) + + xxx = xxx + stv['alf'] * ppp + stv['omg'] * csss + stv['rrr'] = sss - stv['omg'] * ttt + + stv['nrm'] = rho + + if abs(stv['omg']) < 1.0e-16: + self.logger.info('Terminating Poisson solver.') + break + + err = np.sqrt(dot_prod(stv['rrr'], stv['rrr']) / sc_err) + if err < self.tolerance: + self.logger.info('Poisson solver has converged.') + break + + iteration += 1 + + if iteration == max_iterations: + raise RuntimeError('Poisson solver has not converged.') + + self.energy_flux_potential = xxx + + def calc_meridional_heat_transport(self): + """Meridional heat transport of energy flux potential. + + Calculate of the meridional heat transport using the gradient of + the energy flux potential. Equation (11) in Pearce and Bodas- + Salcedo (2023). + """ + deltax = 2.0 * np.pi / self.source.shape[1] + deltay = np.pi / self.source.shape[0] + yvalues = np.arange(-0.5 * np.pi + 0.5 * deltay, 0.5 * np.pi, deltay) + grad_phi = np.gradient(self.energy_flux_potential, deltay, axis=0) + grad_phi = grad_phi[1:-1, 1:-1] + self.meridional_heat_transport = np.sum((grad_phi.T * + np.cos(yvalues) * + deltax).T, axis=1) + + def calc_ax(self, x_matrix): + """Matrix calculation of the Laplacian equation, LHS of Eq. + + (9) in Pearce and Bodas-Salcedo (2023). + """ + # Laplacian equation + src_shape = np.array(self.source.shape) + ax_matrix = np.zeros(src_shape + 2) + x_matrix = swap_bounds(x_matrix) + shape0, shape1 = src_shape + ax_matrix[1:shape0 + 1, 1:shape1 + 1] = ( + self.a_matrix[2, 0:shape0, 0:shape1] * + x_matrix[0:shape0, 1:shape1 + 1] + + self.a_matrix[1, 0:shape0, 0:shape1] * + x_matrix[1:shape0 + 1, 0:shape1] + + self.a_matrix[0, 0:shape0, 0:shape1] * + x_matrix[1:shape0 + 1, 2:shape1 + 2] + + self.a_matrix[3, 0:shape0, 0:shape1] * + x_matrix[2:shape0 + 2, 1:shape1 + 1] + + self.a_matrix[4, 0:shape0, 0:shape1] * + x_matrix[1:shape0 + 1, 1:shape1 + 1]) + ax_matrix = swap_bounds(ax_matrix) + return ax_matrix diff --git a/esmvaltool/diag_scripts/iht_toa/single_model_diagnostics.py b/esmvaltool/diag_scripts/iht_toa/single_model_diagnostics.py new file mode 100644 index 0000000000..e56240c67a --- /dev/null +++ b/esmvaltool/diag_scripts/iht_toa/single_model_diagnostics.py @@ -0,0 +1,993 @@ +# (C) Crown Copyright 2023, the Met Office. +"""Single model diagnostics. + +Apply Poisson solver to input fluxes and produce plots. +""" + +import datetime +import logging +from copy import deepcopy + +import cartopy.crs as ccrs +import iris +import iris.plot as iplt +import matplotlib.dates as mdates +import matplotlib.pyplot as plt +import numpy as np +from iris import NameConstraint +from matplotlib import gridspec, rcParams + +from esmvaltool.diag_scripts.iht_toa.poisson_solver import SphericalPoisson +from esmvaltool.diag_scripts.shared import ( + group_metadata, + run_diagnostic, + save_figure, +) + +# Initialise logger +logger = logging.getLogger(__name__) + +rcParams.update({ + 'font.size': 14, + 'xtick.major.pad': 10, + 'ytick.major.pad': 10, + 'xtick.major.size': 10, + 'ytick.major.size': 10, + 'xtick.minor.size': 5, + 'ytick.minor.size': 5, + 'axes.linewidth': 2, + 'lines.markersize': 8, + 'lines.linewidth': 2 +}) + +# Figure captions +caption = { + 'F1': 'Figure 1. The implied heat transport due to TOA net flux (blue), ' + 'split into the contributions from SW (orange) and LW (green).', + 'F2': 'Figure 2. The TOA energy flux potentials for (a) TOT, (c) ' + 'SW, and (e) LW net fluxes, alongside maps of the spatial anomalies ' + 'of the input fluxes [(b),(d),(f)]. The implied heat transport is ' + 'the gradient of the energy flus potential, shown by the white ' + 'vector arrows (with the same magnitude scale across all subplots). ' + 'Heat is directed from the blue minima of the potential field to ' + 'yellow maxima, with the magnitude implied by the density of ' + 'contours. All maps of the same type share the same color bar at ' + 'the bottom of the column so that it is possible to directly ' + 'compare the results from different fluxes.', + 'F3': 'Figure 3. Direct radiative effects of clouds on the meridional ' + 'heat transport. (a) Contributions from TOT CRE (blue), SW CRE ' + '(orange), and LW CRE (green). (b) Contributions from all-sky and ' + 'clear-sky OSR. Both curves have been multiplied by -1 such that ' + 'positive heat transport is northward.', + 'F4': 'Figure 4. As in Figure 2, but for cloud radiative effects.', + 'F5': 'Figure 5. As in Figure 2, but for energy flux potentials and ' + 'spatial radiative anomalies associated with all-sky and clear-sky ' + 'outgoing shortwave radiation. ', + 'F6': 'Figure 6. A measure of the symmetry between heat transport in the ' + 'Northern and Southern Hemispheres, calculated for the 12-month ' + 'running mean of MHT in (a) the full hemisphere, (b) from the ' + 'equator to 30 deg latitude, and (c) between 30 and 90 deg ' + 'latitude. Symmetry values obtained when including (blue) and ' + 'excluding (orange) the effect of clouds are shown. The ' + 'climatological symmetry values for the two cases are shown as ' + 'black lines in each subplot. The standard deviations of the ' + 'time series are shown in each subplot.', +} + + +def get_provenance_record(filenames, figure_caption): + """Return a provenance record describing the plot. + + Parameters + ---------- + filenames : list of strings + The filenames containing the data used to create the plot. + figure_caption : string + Detailed description of the figure. + + Returns + ------- + dictionary + The provenance record describing the plot. + """ + record = { + 'ancestors': filenames, + 'caption': figure_caption, + 'references': ['pearce23jclim'] + } + return record + + +def matching_strings(list_of_strings, substrings): + """Return subset of ``list_of_strings`` with matches in ``substrings``. + + Parameters + ---------- + list_of_strings : list of strings + List of strings to be searched. + substrings : list of strings + The list of search strings. + + Returns + ------- + list + The elements in ``list_of_strings`` that contain + any of the substrings. + """ + matches = [] + for element in list_of_strings: + for var in substrings: + if var in element: + matches.append(element) + return matches + + +def area_average(cube, latitude='latitude', longitude='longitude', mdtol=1): + """Return area-weighted average of a cube. + + Parameters + ---------- + cube : :class:`iris.cube.Cube` + Input cube. + latitude : string + Name of latitude coordinate in ``cube``. + longitude : string + Name of longitude coordinate in ``cube``. + mdtol : float + Tolerance to missing data, between 0 and 1. + + + Returns + ------- + :class:`iris.cube.Cube` + Collapsed cube with the weighted average. + """ + if cube.coord(latitude).bounds is None: + cube.coord(latitude).guess_bounds() + if cube.coord(longitude).bounds is None: + cube.coord(longitude).guess_bounds() + grid_areas = iris.analysis.cartography.area_weights(cube) + cube_avg = cube.collapsed([longitude, latitude], + iris.analysis.MEAN, + weights=grid_areas, + mdtol=mdtol) + return cube_avg + + +def weight_zm(cube, latitude=None): + """Weight zonal-mean by normalised gridbox areas. + + Parameters + ---------- + cube : :class:`iris.cube.Cube` + Input cube. + latitude : tuple + Four-element tuple defining the latitude range. + The last two elements must be False, e.g. + latitude=(-90, 0, False, False). + + Returns + ------- + :class:`numpy.array` + Zonal-mean in the selected latitude range, weighted + by the normalised areas. + """ + if cube.coord('latitude').bounds is None: + cube.coord('latitude').guess_bounds() + areas_data = iris.analysis.cartography.area_weights(cube, normalize=True) + cube_areas = iris.cube.Cube(areas_data, + long_name="normalised_area", + var_name="area", units="1", + dim_coords_and_dims=[(cube.coords()[0], 0)]) + if latitude is not None: + cube = cube.intersection(latitude=latitude) + cube_areas = cube_areas.intersection(latitude=latitude) + return cube.data * cube_areas.data + + +def call_poisson(flux_cube, latitude='latitude', longitude='longitude'): + """Call the Poisson solver with the data in ``flux_cube`` as source term. + + Return the energy flux potential and implied meridional heat transport + as cubes. + + Parameters + ---------- + flux_cube : :class:`iris.cube.Cube` + Input cube. + latitude : string + Name of latitude coordinate in ``cube``. + longitude : string + Name of longitude coordinate in ``cube``. + + Returns + ------- + efp_cube: :class:`iris.cube.Cube` + Energy flux potential cube. + mht_cube: :class:`iris.cube.Cube` + Implied meridional heat transport associated + with the source flux field. + """ + earth_radius = 6371e3 # Earth's radius in m + if flux_cube.coord(latitude).bounds is None: + flux_cube.coord(latitude).guess_bounds() + if flux_cube.coord(longitude).bounds is None: + flux_cube.coord(longitude).guess_bounds() + + # Remove average of flux field to account for storage term + grid_areas = iris.analysis.cartography.area_weights(flux_cube) + data_mean = flux_cube.collapsed(["longitude", "latitude"], + iris.analysis.MEAN, + weights=grid_areas).data + data = flux_cube.data - data_mean + + logger.info("Calling spherical_poisson") + sphpo = SphericalPoisson(logger, + source=data * (earth_radius**2.0), + tolerance=2.0e-4) + sphpo.solve() + sphpo.calc_meridional_heat_transport() + logger.info("Ending spherical_poisson") + + # Energy flux potential + efp_cube = iris.cube.Cube(sphpo.energy_flux_potential[1:-1, 1:-1], + long_name=f"energy_flux_potential" + f"_of_{flux_cube.var_name}", + var_name=f"{flux_cube.var_name}_efp", + units='J s-1', + dim_coords_and_dims=[(flux_cube.coords()[0], 0), + (flux_cube.coords()[1], 1)]) + + # MHT data cube + collapsed_longitude = iris.coords.AuxCoord(180.0, + bounds=(0.0, 360.0), + long_name='longitude', + standard_name='longitude', + units='degrees') + dim_coords_and_dims = [(flux_cube.coord('latitude'), 0)] + aux_coords_and_dims = [(flux_cube.coord('time'), None), + (collapsed_longitude, None)] + mht_cube = iris.cube.Cube(sphpo.meridional_heat_transport, + long_name=f"meridional_heat_transport_of" + f"_{flux_cube.var_name}", + var_name=f"{flux_cube.var_name}_mht", + units='W', + dim_coords_and_dims=dim_coords_and_dims, + aux_coords_and_dims=aux_coords_and_dims) + return efp_cube, mht_cube + + +def symmetry_metric(cube): + """Calculate symmetry metrics for a zonal-mean cube. + + It returns the symmetry metric S, as defined in Pearce and + Bodas-Salcedo, JClim, 2023, for 3 regions: entire hemisphere, + tropics (0 to 30 deg latitude) and extratropics + (30 to 90 degrees latitude). Perfectly symmetrical latitude + bands give S=0. + + Parameters + ---------- + cube : :class:`iris.cube.Cube` + Input cube. + + Returns + ------- + hemisphere: float + Metric for the whole hemisphere. + tropics: float + Metric for the tropics. + extra_tropics: float + Metric for the extra-tropics. + """ + hemisphere = np.abs( + weight_zm(cube, latitude=(0, 90, False, False))[::-1] + + weight_zm(cube, latitude=(-90, 0, False, False))).sum() + tropics = np.abs( + weight_zm(cube, latitude=(0, 30, False, False))[::-1] + + weight_zm(cube, latitude=(-30, 0, False, False))).sum() + extra_tropics = np.abs( + weight_zm(cube, latitude=(30, 90, False, False))[::-1] + + weight_zm(cube, latitude=(-90, -30, False, False))).sum() + return hemisphere, tropics, extra_tropics + + +def format_plot(axes, label, title): + """Format plots in quiver panel. + + Parameters + ---------- + axes : :class:`matplotlib.axes.Axes` + Input axes. + label : string + Top-left plot label. + title : string + Plot title. + """ + axes.set_xticks(np.arange(-180, 190, 60)) + axes.set_xticklabels(['180', '120W', '60W', '0', '60E', '120E', '180']) + axes.set_yticks(np.arange(-90, 100, 30)) + axes.set_yticklabels(['90S', '60S', '30S', 'Eq', '30N', '60N', '90N']) + axes.annotate(label, xy=(0, 1.05), xycoords='axes fraction', color='k') + axes.set_title(title) + + +class ImpliedHeatTransport: + """Class that solves implied heat transport for an input dataset. + + These are the physical meanings of the main acronyms + used in the variable names: + FLX: radiative flux + EFP: energy flux potential + MHT: meridional heat transport + """ + + def __init__(self, flx_files): + """Calculate all the diagnostics for all fluxes in ``flx_files``. + + Parameters + ---------- + flx_files : list + List of files with input data. + """ + self.flx_files = flx_files + + # Create cube lists for the different datasets + self.flx_clim = iris.cube.CubeList() + self.mht_clim = iris.cube.CubeList() + self.efp_clim = iris.cube.CubeList() + self.mht_rolling_mean = iris.cube.CubeList() + self.symmetry_metric = iris.cube.CubeList() + + # Calculate 12-month rolling means for time series. + self.flx_rolling_mean = iris.cube.CubeList() + for flx_file in flx_files: + flx = iris.load_cube(flx_file) + if len(flx.shape) == 3: + self.flx_rolling_mean.append( + flx.rolling_window('time', iris.analysis.MEAN, 12)) + else: + self.flx_clim.append(flx) + + # Compute derived fluxes + self.derived_fluxes() + + # Calculate Energy Flux Potential and Meridional Heat Transport + # for each flux component + self.compute_efp_and_mht() + + # Times series of MHT symmetry metric + self.mht_symmetry_metrics() + + self.print() + + def compute_efp_and_mht(self): + """Calculate Energy Flux Potential and meridional heat transport. + + Loop over input data and calculate EFP and MHT of the + climatologies of radiative fluxes and the 12-month + rolling means of radiative fluxes. + """ + # Loop over climatologies + for flx in self.flx_clim: + efp, mht = call_poisson(flx) + self.efp_clim.append(efp) + self.mht_clim.append(mht) + # Loop over rolling means + for flx_rm in self.flx_rolling_mean: + mht_series = iris.cube.CubeList() + for flx in flx_rm.slices_over('time'): + efp, mht = call_poisson(flx) + mht_series.append(mht) + # Append MHT rolling mean after merging time series. + self.mht_rolling_mean.append(mht_series.merge_cube()) + + def derived_fluxes(self): + """Calculate derived radiative fluxes. + + rlnt_clim: climatology of net LW TOA + rtntcs_clim: climatology of clear-sky net TOA + rtntcs_rolling_mean: 12-month rolling mean of rtntcs + """ + # Derived TOA climatologies: rlnt_clim, rtntcs_clim + rlnt_clim = -1.0 * self.flx_clim.extract_cube( + NameConstraint(var_name="rlut")) + rlnt_clim.var_name = "rlnt" + rlnt_clim.long_name = "radiative_flux_of_rlnt" + self.flx_clim.append(rlnt_clim) + rtntcs_clim = (self.flx_clim.extract_cube( + NameConstraint(var_name="rsdt")) - + self.flx_clim.extract_cube( + NameConstraint(var_name="rsutcs")) - + self.flx_clim.extract_cube( + NameConstraint(var_name="rlutcs"))) + rtntcs_clim.var_name = "rtntcs" + rtntcs_clim.long_name = "radiative_flux_of_rtntcs" + self.flx_clim.append(rtntcs_clim) + # Annual rolling means clear-sky net total TOA + rtntcs_rolling_mean = (self.flx_rolling_mean.extract_cube( + NameConstraint(var_name="rsdt")) - + self.flx_rolling_mean.extract_cube( + NameConstraint(var_name="rsutcs")) - + self.flx_rolling_mean.extract_cube( + NameConstraint(var_name="rlutcs"))) + rtntcs_rolling_mean.var_name = "rtntcs" + rtntcs_rolling_mean.long_name = "radiative_flux_of_rtntcs" + self.flx_rolling_mean.append(rtntcs_rolling_mean) + + def print(self): + """Print variable names of all cubes in an IHT object.""" + logger.info("=== implied_heat_transport object ===") + logger.info(self.mht_clim) + info_message = "Long name: %s; Variable: %s." + for climatology in self.mht_clim: + logger.info(info_message, + climatology.long_name, + climatology.var_name) + + logger.info(self.efp_clim) + for climatology in self.efp_clim: + logger.info(info_message, + climatology.long_name, + climatology.var_name) + + logger.info(self.flx_clim) + for climatology in self.flx_clim: + logger.info(info_message, + climatology.long_name, + climatology.var_name) + + logger.info(self.mht_rolling_mean) + for rolling_mean in self.mht_rolling_mean: + logger.info(info_message, + rolling_mean.long_name, + rolling_mean.var_name) + + logger.info(self.symmetry_metric) + for metric in self.symmetry_metric: + logger.info(info_message, + metric.long_name, + metric.var_name) + + logger.info(self.flx_files) + + def mht_symmetry_metrics(self): + """Calculate symmetry metrics. + + Produce 12-month rolling means for all monthly time series + of MHT. + """ + petaunit = 1.0e15 + for mht_series in self.mht_rolling_mean: + time_coord = mht_series.coord('time') + ntime = time_coord.shape[0] + hemisphere = np.zeros(ntime) + tropics = np.zeros(ntime) + extra_tropics = np.zeros(ntime) + for i in np.arange(ntime): + hemisphere[i], tropics[i], extra_tropics[i] = ( + symmetry_metric(mht_series[i])) + # Create the cubes for each metric + long_name = f"symmetry_hemisphere_of_{mht_series.long_name}" + var_name = f"s_hem_{mht_series.var_name}" + cube_h = iris.cube.Cube(hemisphere / petaunit, + long_name=long_name, + var_name=var_name, + units="PW", + dim_coords_and_dims=[(time_coord, 0)]) + long_name = f"symmetry_tropics_of_{mht_series.long_name}" + var_name = f"s_tro_{mht_series.var_name}" + cube_t = iris.cube.Cube(tropics / petaunit, + long_name=long_name, + var_name=var_name, + units="PW", + dim_coords_and_dims=[(time_coord, 0)]) + long_name = f"symmetry_extratropics_of_{mht_series.long_name}" + var_name = f"s_ext_{mht_series.var_name}" + cube_e = iris.cube.Cube(extra_tropics / petaunit, + long_name=long_name, + var_name=var_name, + units="PW", + dim_coords_and_dims=[(time_coord, 0)]) + self.symmetry_metric.append(cube_h) + self.symmetry_metric.append(cube_t) + self.symmetry_metric.append(cube_e) + + def mht_plot(self, var_names, legend_label, ylim=(-10, 10)): + """Produce a single multi-line plot of MHT components. + + MHT is presented in PW. Up to three variables are on each plot. + + Parameters + ---------- + var_names : list + Variable names to plot, e.g. ["rtnt_mht", "rsnt_mht"]. + legend_label : list + List of labels for each line. + ylim : tuple + y axis limits. + """ + plt.figure() + for i, vname in enumerate(var_names): + mht = self.mht_clim.extract_cube(NameConstraint(var_name=vname)) + mht.convert_units('PW') + plt.plot(mht.coord('latitude').points, + mht.data, + label=legend_label[i]) + plt.hlines(0, -90, 90, color='k', linestyles=':') + plt.vlines(0, -10, 10, color='k', linestyles=':') + plt.xlim(-90, 90) + plt.ylim(ylim[0], ylim[1]) + plt.xticks(np.arange(-90, 120, 30)) + plt.xlabel('Latitude') + plt.ylabel('MHT (PW)') + plt.legend() + plt.tight_layout() + + def cre_mht_plot(self, left, right, ylim=(-1.5, 1.5)): + """Produce two multiline plots of MHT components. + + Parameters + ---------- + left : dictionary + Dictionary with variable names and labels for + the LHS plot, e.g. + {'vname': ['netcre_mht', 'swcre_mht', 'lwcre_mht'], + 'legend': ['Net CRE', 'SW CRE', 'LW CRE']} + right : dictionary + As ``left`` but for the RHS plot + ylim : tuple + y axis limits. + """ + plt.figure(figsize=(11, 5)) + ax1 = plt.subplot(121) + for i, vname in enumerate(left['vname']): + mht = self.mht_clim.extract_cube(NameConstraint(var_name=vname)) + mht.convert_units('PW') + ax1.plot(mht.coord('latitude').points, + mht.data, + label=left['legend'][i]) + ax1.axhline(0, color='k', ls=':') + ax1.axvline(0, color='k', ls=':') + ax1.set_xlim(-90, 90) + ax1.set_xticks(np.arange(-90, 120, 30)) + ax1.set_xlabel('Latitude') + ax1.set_ylim(ylim[0], ylim[1]) + ax1.set_ylabel('MHT (PW)') + ax1.annotate('(a)', + xy=(0.01, 0.95), + xycoords='axes fraction', + color='k') + plt.legend() + + ax2 = plt.subplot(122) + col = ['C3', 'C7'] + for i, vname in enumerate(right['vname']): + mht = self.mht_clim.extract_cube(NameConstraint(var_name=vname)) + mht.convert_units('PW') + ax2.plot(mht.coord('latitude').points, + -mht.data, + label=right['legend'][i], + color=col[i]) + ax2.axhline(0, color='k', ls=':') + ax2.axvline(0, color='k', ls=':') + ax2.set_xlim(-90, 90) + ax2.set_xticks(np.arange(-90, 120, 30)) + ax2.set_xlabel('Latitude') + ax2.set_ylim(ylim[0], ylim[1]) + ax2.set_ylabel('MHT (PW)') + ax2.annotate('(b)', + xy=(0.01, 0.95), + xycoords='axes fraction', + color='k') + plt.legend(loc='lower right') + plt.tight_layout() + + def quiver_start(self, ntot, step): + """Calculate start point for quiver plot. + + Parameters + ---------- + ntot : int + Total number of points of the full vector. + step : int + Sampling step. + """ + start = (ntot - 2 - ((ntot - 2) // step) * step) // 2 + return start + + def quiver_maps_data(self, vnames, change_sign): + """Obtain data for one row of plots. + + Parameters + ---------- + vnames : list + Two-element list with the names of the EFP and + flux variables. + change_sign : list + Two-element list of booleans to indicate if + the variable has to be plotted with the sign changed. + """ + efp = self.efp_clim.extract_cube(NameConstraint(var_name=vnames[0])) + flx = self.flx_clim.extract_cube(NameConstraint(var_name=vnames[1])) + # The choice of origin for efp is arbitrary, + # we choose the unweighted mean. + efp = efp - efp.collapsed(efp.coords(), iris.analysis.MEAN) + flx = flx - area_average(flx) + if change_sign[0]: + efp = -efp + if change_sign[1]: + flx = -flx + efp.convert_units("PW") + v_component, u_component = np.gradient(efp.data) + u_component = u_component[1:-1, 1:-1] + v_component = v_component[1:-1, 1:-1] + return {'efp': efp, 'flx': flx, 'uuu': u_component, 'vvv': v_component} + + def quiver_subplot(self, dargs): + """Produce panel with maps of EFPs and fluxes. + + Plot figures with energy flux potential and gradient in the left-hand + column and the corresponding source term in the right-hand column. + + Parameters + ---------- + dargs : dictionary + Dictionary with variable names and plot configuration + data. + """ + mshgrd = np.meshgrid(self.flx_clim[0].coord('longitude').points, + self.flx_clim[0].coord('latitude').points) + nrows = len(dargs['var_name']) + # Calculate sampling for vector plot + dxy = [mshgrd[0].shape[1] // 20, mshgrd[0].shape[0] // 10] + startx = self.quiver_start(mshgrd[0].shape[1], dxy[0]) + starty = self.quiver_start(mshgrd[0].shape[0], dxy[1]) + + # Set grid layout depending on number of rows. + # Place figures every grid_step rows in the grid. + grid_step = 7 + if nrows == 3: + plt.figure(figsize=(10, 10)) + grds = gridspec.GridSpec(22, 2) + grds.update(wspace=0.25, hspace=1.5) + elif nrows == 2: + plt.figure(figsize=(10, 6.5)) + grds = gridspec.GridSpec(15, 2) + grds.update(wspace=0.25, hspace=1.5) + elif nrows == 1: + plt.figure(figsize=(12, 4)) + grds = gridspec.GridSpec(8, 2) + grds.update(wspace=0.25, hspace=1.5) + + cbs = [] + for i in range(nrows): + data = self.quiver_maps_data(dargs['var_name'][i], + dargs['change_sign'][i]) + plt.subplot(grds[i * grid_step:(i * grid_step) + grid_step, 0], + projection=ccrs.PlateCarree(central_longitude=0)) + cbs.append( + iplt.contourf(data['efp'], + levels=np.linspace(dargs['vmin'], dargs['vmax'], + dargs['nlevs']))) + plt.gca().coastlines() + if i == 0: + qqq = plt.quiver(mshgrd[0][starty::dxy[1], startx::dxy[0]], + mshgrd[1][starty::dxy[1], startx::dxy[0]], + data['uuu'][starty::dxy[1], startx::dxy[0]], + data['vvv'][starty::dxy[1], startx::dxy[0]], + pivot='mid', + color='w', + width=0.005) + else: + plt.quiver(mshgrd[0][starty::dxy[1], startx::dxy[0]], + mshgrd[1][starty::dxy[1], startx::dxy[0]], + data['uuu'][starty::dxy[1], startx::dxy[0]], + data['vvv'][starty::dxy[1], startx::dxy[0]], + pivot='mid', + scale=qqq.scale, + color='w') + format_plot(plt.gca(), dargs['label'][i][0], dargs['title'][i][0]) + + plt.subplot(grds[i * grid_step:(i * grid_step) + grid_step, 1], + projection=ccrs.PlateCarree(central_longitude=0)) + cbs.append( + iplt.contourf(data['flx'], + levels=np.linspace(dargs['wmin'], dargs['wmax'], + dargs['nwlevs']), + cmap='RdBu_r')) + plt.gca().coastlines() + format_plot(plt.gca(), dargs['label'][i][1], dargs['title'][i][1]) + + plt.subplot(grds[-1, 0]) + plt.colorbar(cbs[0], + cax=plt.gca(), + orientation='horizontal', + label='Energy flux potential (PW)') + plt.subplot(grds[-1, 1]) + plt.colorbar(cbs[1], + cax=plt.gca(), + orientation='horizontal', + label=r'Flux (Wm$^{-2}$)', + ticks=np.linspace(dargs['wmin'], dargs['wmax'], + dargs['nwlevs'])[1::dargs['wlevstep']]) + + if nrows == 3: + plt.subplots_adjust(left=0.1, right=0.94, top=1.0, bottom=0.11) + elif nrows == 2: + plt.subplots_adjust(left=0.11, right=0.9, top=1.0, bottom=0.13) + elif nrows == 1: + plt.subplots_adjust(left=0.11, right=0.9, top=1.0, bottom=0.20) + + def plot_symmetry_time_series(self): + """Produce Figure 6. + + All-sky and clear-sky time series of the symmetry metrics for + three regions: globe, tropics and extra-tropics. + """ + var_list = [["s_hem_rtnt_mht", "s_hem_rtntcs_mht"], + ["s_tro_rtnt_mht", "s_tro_rtntcs_mht"], + ["s_ext_rtnt_mht", "s_ext_rtntcs_mht"]] + col = ['C0', 'C1'] + label = [ + r'Global: 0$^\mathrm{o}$ - 90$^\mathrm{o}$', + r'Tropics: 0$^\mathrm{o}$ - 30$^\mathrm{o}$', + r'Extratropics: 30$^\mathrm{o}$ - 90$^\mathrm{o}$' + ] + legend_label = ["TOA net all-sky", "TOA net clear-sky"] + + plt.figure(figsize=(6, 12)) + for count, (var_name_1, var_name_2) in enumerate(var_list): + yy0 = self.symmetry_metric.extract_cube( + NameConstraint(var_name=var_name_1)) + yy1 = self.symmetry_metric.extract_cube( + NameConstraint(var_name=var_name_2)) + axx = plt.subplot(3, 1, count + 1) + dtx = [ + datetime.datetime.strptime(str(cell[0]), '%Y-%m-%d %H:%M:%S') + for cell in yy0.coord('time').cells() + ] + plt.plot(dtx, yy0.data, lw=4, linestyle='-', label=legend_label[0]) + plt.plot(dtx, yy1.data, lw=4, linestyle='-', label=legend_label[1]) + axx.annotate(rf'$\sigma$: {np.std(yy0.data):5.3f}', (0.05, 0.55), + xycoords='axes fraction', + color=col[0]) + axx.annotate(rf'$\sigma$: {np.std(yy1.data):5.3f}', (0.05, 0.45), + xycoords='axes fraction', + color=col[1]) + axx.set_ylim(0, 0.8) + axx.set_ylabel(r'$S$ (PW)') + axx.xaxis.set_major_locator(mdates.YearLocator(3, month=1, day=1)) + axx.xaxis.set_major_formatter(mdates.DateFormatter('%Y')) + axx.xaxis.set_minor_locator(mdates.YearLocator()) + axx.set_title(label[count]) + if count == 0: + plt.legend(loc=5) + plt.tight_layout() + + +def efp_maps(iht, model, experiment, config): + """Produce Figures 2, 4, and 5. + + Parameters + ---------- + iht : :class: ImpliedHeatTransport + Object with the recipe datasets. + model : string + Model name. + experiment : string + Experiment name. + config : dict + The ESMValTool configuration. + """ + # Figure 2 + iht.quiver_subplot( + { + 'var_name': [['rtnt_efp', 'rtnt'], ['rsnt_efp', 'rsnt'], + ['rlnt_efp', 'rlnt']], + 'title': [['$P_{TOA}^{TOT}$', r'$\Delta F_{TOA}^{TOT}$'], + ['$P_{TOA}^{SW}$', r'$\Delta F_{TOA}^{SW}$'], + ['$P_{TOA}^{LW}$', r'$\Delta F_{TOA}^{LW}$']], + 'label': [['(a)', '(b)'], ['(c)', '(d)'], ['(e)', '(f)']], + 'change_sign': [[False, False], [False, False], [False, False]], + 'wmin': + -180, + 'wmax': + 180, + 'nwlevs': + 19, + 'wlevstep': + 4, + 'vmin': + -1.2, + 'vmax': + 1.2, + 'nlevs': + 11 + }) + flx_files = matching_strings(iht.flx_files, ['rtnt/', + 'rsut/', + 'rlut/']) + provenance_record = get_provenance_record(flx_files, caption['F2']) + figname = f"figure2_{model}_{experiment}" + save_figure(figname, provenance_record, config) + # Figure 4 + iht.quiver_subplot( + { + 'var_name': [['netcre_efp', 'netcre'], ['swcre_efp', 'swcre'], + ['lwcre_efp', 'lwcre']], + 'title': [['$P_{TOA}^{TOTCRE}$', r'$\Delta CRE_{TOA}^{TOT}$'], + ['$P_{TOA}^{SWCRE}$', r'$\Delta CRE_{TOA}^{SW}$'], + ['$P_{TOA}^{LWCRE}$', r'$\Delta CRE_{TOA}^{LW}$']], + 'label': [['(a)', '(b)'], ['(c)', '(d)'], ['(e)', '(f)']], + 'change_sign': [[False, False], [False, False], [False, False]], + 'wmin': + -60, + 'wmax': + 60, + 'nwlevs': + 13, + 'wlevstep': + 2, + 'vmin': + -0.3, + 'vmax': + 0.3, + 'nlevs': + 11 + }) + flx_files = matching_strings(iht.flx_files, ['netcre/', + 'swcre/', + 'lwcre/']) + provenance_record = get_provenance_record(flx_files, caption['F4']) + figname = f"figure4_{model}_{experiment}" + save_figure(figname, provenance_record, config) + # Figure 5 + iht.quiver_subplot( + { + 'var_name': [['rsutcs_efp', 'rsutcs'], ['rsut_efp', 'rsut']], + 'title': [['$P_{TOA}^{SWup, clr}$', + r'$\Delta F_{TOA}^{SWup, clr}$'], + ['$P_{TOA}^{SWup, all}$', + r'$\Delta F_{TOA}^{SWup, all}$']], + 'label': [['(a)', '(b)'], ['(c)', '(d)']], + 'change_sign': [[True, True], [True, True]], + 'wmin': -100, + 'wmax': 100, + 'nwlevs': 21, + 'wlevstep': 3, + 'vmin': -0.35, + 'vmax': 0.35, + 'nlevs': 11 + }) + flx_files = matching_strings(iht.flx_files, ['rsut/', 'rsutcs/']) + provenance_record = get_provenance_record(flx_files, caption['F5']) + figname = f"figure5_{model}_{experiment}" + save_figure(figname, provenance_record, config) + + +def mht_plots(iht, model, experiment, config): + """Produce Figures 1 and 3. + + Parameters + ---------- + iht : :class: ImpliedHeatTransport + Object with the recipe datasets. + model : string + Model name. + experiment : string + Experiment name. + config : dict + The ESMValTool configuration. + """ + # Figure 1 + iht.mht_plot(["rtnt_mht", "rsnt_mht", "rlnt_mht"], ['Net', 'SW', 'LW']) + flx_files = matching_strings(iht.flx_files, ['rtnt/', 'rsut/', 'rlut/']) + provenance_record = get_provenance_record(flx_files, caption['F1']) + figname = f"figure1_{model}_{experiment}" + save_figure(figname, provenance_record, config) + # Figure 3 + iht.cre_mht_plot( + { + 'vname': ['netcre_mht', 'swcre_mht', 'lwcre_mht'], + 'legend': ['Net CRE', 'SW CRE', 'LW CRE'] + }, { + 'vname': ['rsut_mht', 'rsutcs_mht'], + 'legend': ['-1 x OSR (all-sky)', '-1 x OSR (clear-sky)'] + }) + flx_files = matching_strings(iht.flx_files, ['netcre/', 'swcre/', 'lwcre/', + 'rsut/', 'rsutcs/']) + provenance_record = get_provenance_record(flx_files, caption['F3']) + figname = f"figure3_{model}_{experiment}" + save_figure(figname, provenance_record, config) + + +def symmetry_plots(iht, model, experiment, config): + """Produce Figure 6. + + Parameters + ---------- + iht : :class: ImpliedHeatTransport + Object with the recipe datasets. + model : string + Model name. + experiment : string + Experiment name. + config : dict + The ESMValTool configuration. + """ + iht.plot_symmetry_time_series() + flx_files = matching_strings(iht.flx_files, ['rtnt_monthly', + 'rsutcs_monthly', + 'rlutcs_monthly', + 'rsdt_monthly']) + provenance_record = get_provenance_record(flx_files, caption['F6']) + figname = f"figure6_{model}_{experiment}" + save_figure(figname, provenance_record, config) + + +def plot_single_model_diagnostics(iht_dict, config): + """Produce plots for a single model and experiment. + + Parameters + ---------- + iht_dict : dict + iht_dict is a two-level dictionary: iht_dict[model][experiment] + config : dict + The ESMValTool configuration. + """ + for model, iht_model in iht_dict.items(): + logger.info("Plotting model: %s", model) + for experiment, iht_experiment in iht_model.items(): + logger.info("Plotting experiment: %s", experiment) + mht_plots(iht_experiment, model, experiment, config) + efp_maps(iht_experiment, model, experiment, config) + symmetry_plots(iht_experiment, model, experiment, config) + + +def main(config): + """Produce all the recipe's plots. + + Produce Figures 1 to 6 of Pearce and Bodas-Salcedo (2023) for each + model and dataset combination. + + Parameters + ---------- + config : dict + The ESMValTool configuration. + """ + input_data = deepcopy(list(config['input_data'].values())) + input_data = group_metadata(input_data, 'dataset', sort='variable_group') + + # Arrange input flux files in a 2-level dictionary [model_name][dataset] + flux_files = {} + for model_name, datasets in input_data.items(): + flux_files[model_name] = {} + for dataset in datasets: + if dataset['dataset'] in flux_files[model_name]: + flux_files[model_name][dataset['dataset']].append( + dataset['filename']) + else: + flux_files[model_name][dataset['dataset']] = [ + dataset['filename'] + ] + + # Create dictionary of implied_heat_transport objects. + # It's a 2-level dictionary like flux_files. + # This is where all the calculations are done. + iht = {} + for model_name, datasets in flux_files.items(): + logger.info("Model %s", model_name) + iht[model_name] = {} + for dataset_name, files in datasets.items(): + logger.info("Dataset %s", dataset_name) + iht[model_name][dataset_name] = ImpliedHeatTransport(files) + + # Produce plots + plot_single_model_diagnostics(iht, config) + + +if __name__ == '__main__': + + with run_diagnostic() as configuration: + main(configuration) diff --git a/esmvaltool/diag_scripts/impact/bias_and_change.py b/esmvaltool/diag_scripts/impact/bias_and_change.py new file mode 100644 index 0000000000..00e156edf2 --- /dev/null +++ b/esmvaltool/diag_scripts/impact/bias_and_change.py @@ -0,0 +1,266 @@ +"""Calculate and plot bias and change for each model.""" +import logging +from datetime import datetime +from pathlib import Path + +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import seaborn as sns +import xarray as xr + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + group_metadata, + run_diagnostic, + select_metadata, +) + +logger = logging.getLogger(Path(__file__).stem) + + +def log_provenance(filename, ancestors, caption, cfg): + """Create a provenance record for the output file.""" + provenance = { + 'caption': caption, + 'domains': ['reg'], + 'authors': ['kalverla_peter'], + 'projects': ['isenes3'], + 'ancestors': ancestors, + } + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, provenance) + + +def make_standard_calendar(xrda: 'xr.DataArray'): + """Make sure time coordinate uses the default calendar. + + Workaround for incompatible calendars 'standard' and 'no-leap'. + Assumes yearly data. + """ + try: + years = xrda.time.dt.year.values + xrda['time'] = [datetime(year, 7, 1) for year in years] + except TypeError: + # Time dimension is 0-d array + pass + except AttributeError: + # Time dimension does not exist + pass + + +def load_data(metadata: list): + """Load all files from metadata into an Xarray dataset. + + ``metadata`` is a list of dictionaries with dataset descriptors. + """ + + data_arrays = [] + identifiers = [] + ancestors = [] + + for infodict in metadata: + if infodict.get('ensemble') is not None: + alias = "{project}_{dataset}_{ensemble}".format(**infodict) + else: + alias = infodict['alias'] + input_file = infodict['filename'] + short_name = infodict['short_name'] + + xrds = xr.open_dataset(input_file) + xrda = xrds[short_name] + + # Make sure datasets can be combined + make_standard_calendar(xrda) + redundant_dims = np.setdiff1d(xrda.coords, xrda.dims) + xrda = xrda.drop(redundant_dims) + + data_arrays.append(xrda) + identifiers.append(alias) + ancestors.append(input_file) + + # Combine along a new dimension + data_array = xr.concat(data_arrays, dim='dataset') + data_array['dataset'] = identifiers + + return data_array, ancestors + + +def area_weighted_mean(data_array: 'xr.DataArray') -> 'xr.DataArray': + """Calculate area mean weighted by the latitude.""" + weights_lat = np.cos(np.radians(data_array.lat)) + means = data_array.weighted(weights_lat).mean(dim=['lat', 'lon', 'time']) + + return means + + +def calculate_bias(model_data: 'xr.DataArray', + obs_data: 'xr.DataArray') -> 'xr.DataArray': + """Calculate area weighted RMSD with respect to (mean of) observations.""" + if len(obs_data['dataset']) > 1: + obs_data = obs_data.mean(dim='dataset') + else: + obs_data = obs_data.squeeze() + + diff = model_data - obs_data + bias = area_weighted_mean(diff**2)**0.5 + + bias.attrs = model_data.attrs + return bias + + +def plot_scatter(tidy_df, ancestors, cfg): + """Plot bias on one axis and change on the other.""" + grid = sns.relplot( + data=tidy_df, + x="Bias (RMSD of all gridpoints)", + y="Mean change (Future - Reference)", + hue="dataset", + col="variable", + facet_kws=dict(sharex=False, sharey=False), + kind='scatter', + ) + + filename = get_plot_filename('bias_vs_change', cfg) + grid.fig.savefig(filename, bbox_inches='tight') + + caption = "Bias and change for each variable" + log_provenance(filename, ancestors, caption, cfg) + + +def plot_table(dataframe, ancestors, cfg): + """Render pandas table as a matplotlib figure.""" + fig, axes = plt.subplots() + pd.plotting.table(axes, dataframe.reset_index().round(2)) + axes.set_axis_off() + + filename = get_plot_filename('table', cfg) + fig.savefig(filename, bbox_inches='tight') + + caption = "Bias and change for each variable" + log_provenance(filename, ancestors, caption, cfg) + + +def plot_htmltable(dataframe, ancestors, cfg): + """Render pandas table as html output. + + # https://pandas.pydata.org/pandas-docs/stable/user_guide/style.html + """ + styles = [ + { + "selector": ".index_name", + "props": [("text-align", "right")] + }, + { + "selector": ".row_heading", + "props": [("text-align", "right")] + }, + { + "selector": "td", + "props": [("padding", "3px 25px")] + }, + ] + + styled_table = dataframe\ + .unstack('variable')\ + .style\ + .set_table_styles(styles)\ + .background_gradient(cmap='RdYlGn', low=0, high=1, axis=0)\ + .format("{:.2e}", na_rep="-")\ + .to_html() + + filename = get_diagnostic_filename('bias_vs_change', cfg, extension='html') + with open(filename, 'w') as htmloutput: + htmloutput.write(styled_table) + + caption = "Bias and change for each variable" + log_provenance(filename, ancestors, caption, cfg) + + +def make_tidy(dataset): + """Convert xarray data to tidy dataframe.""" + dataframe = dataset.rename( + tas='Temperature (K)', + pr='Precipitation (kg/m2/s)', + ).to_dataframe() + dataframe.columns.name = 'variable' + tidy_df = dataframe.stack('variable').unstack('metric') + + return tidy_df + + +def save_csv(dataframe, ancestors, cfg): + """Save output for use in Climate4Impact preview page.""" + # modify dataframe columns + dataframe = dataframe.unstack('variable') + dataframe.columns = ['tas_bias', 'pr_bias', 'tas_change', 'pr_change'] + project_model_member = np.array( + [x.split('_') for x in dataframe.index.values]) + + # metadata in separate columns + dataframe[['project', 'member', + 'model']] = project_model_member[:, [0, -1, 1]] + + # kg/m2/s to mm/day + dataframe[['pr_bias', 'pr_change']] *= 24 * 60 * 60 + + # save + filename = get_diagnostic_filename('recipe_output', cfg, extension='csv') + caption = "Bias and change for each variable" + dataframe.to_csv(filename) + log_provenance(filename, ancestors, caption, cfg) + + +def main(cfg): + """Calculate, visualize and save the bias and change for each model.""" + metadata = cfg['input_data'].values() + grouped_metadata = group_metadata(metadata, 'variable_group') + + biases = {} + changes = {} + ancestors = [] + for group, metadata in grouped_metadata.items(): + + model_metadata = select_metadata(metadata, tag='model') + model_data, model_ancestors = load_data(model_metadata) + ancestors.extend(model_ancestors) + + variable = model_data.name + + if group.endswith('bias'): + obs_metadata = select_metadata(metadata, tag='observations') + obs_data, obs_ancestors = load_data(obs_metadata) + ancestors.extend(obs_ancestors) + + bias = calculate_bias(model_data, obs_data) + biases[variable] = bias + + elif group.endswith('change'): + changes[variable] = model_data + + else: + logger.warning( + "Got input for variable group %s" + " but I don't know what to do with it.", group) + + # Combine all variables + bias = xr.Dataset(biases) + change = xr.Dataset(changes) + combined = xr.concat([bias, change], dim='metric') + combined['metric'] = [ + 'Bias (RMSD of all gridpoints)', 'Mean change (Future - Reference)' + ] + + tidy_df = make_tidy(combined) + plot_scatter(tidy_df, ancestors, cfg) + plot_table(tidy_df, ancestors, cfg) + plot_htmltable(tidy_df, ancestors, cfg) + save_csv(tidy_df, ancestors, cfg) + return + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_14.py b/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_14.py new file mode 100644 index 0000000000..d28b093ae8 --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_14.py @@ -0,0 +1,432 @@ +# -*- coding: utf-8 -*- +"""Diagnostic script to plot figure 9.14 of IPCC AR5 chapter 9. + +Description +----------- +Calculate and plot the following quantities with regards to sea +surface temperature: zonal mean error, equatorial mean error, +equatorial mean. The errors are calculated against the reference given +in the namelist. Equatorial here means between 5 degrees north and 5 +degrees south. This has been modelled after IPCC AR5 WG1 Ch. 9, +Fig. 9.14. + +Author +------ +Klaus Zimmermann (SMHI, Sweden) + +Project +------- +CRESCENDO +""" + +import logging +import os + +import iris +import iris.plot as iplt +import matplotlib +import matplotlib.pyplot as plt +import matplotlib.ticker as mticker +import numpy as np +from iris.exceptions import CoordinateNotFoundError +from iris.util import equalise_attributes +from matplotlib.ticker import MultipleLocator + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + group_metadata, + run_diagnostic, +) + +matplotlib.rcParams.update({'font.size': 9}) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def get_provenance_record(ancestor_files): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'caption': + ('(a) Zonally averaged sea surface temperature (SST) error in CMIP5 ' + 'models. (b) Equatorial SST error in CMIP5 models. (c) Zonally ' + 'averaged multi-model mean SST error for CMIP5 (red line) together ' + 'with inter-model standard deviation (shading). (d) Equatorial ' + 'multi-model mean SST in CMIP5(red line) together with inter-model ' + 'standard deviation (shading) and observations (black). Model ' + 'climatologies are derived from the 1979-1999 mean of the historical ' + 'simulations. The Hadley Centre Sea Ice and Sea Surface Temperature ' + '(HadISST)(Rayner et al., 2003) observational climatology for ' + '1979-1999 is used as reference for the error calculation (a), (b), ' + 'and (c); and for observations in (d).'), + 'statistics': ['anomaly', 'mean', 'stddev', 'clim'], + 'domains': ['eq', 'global'], + 'plot_types': ['geo', 'sect', 'zonal'], + 'authors': ['zimmermann_klaus'], + 'projects': ['crescendo'], + 'references': ['flato13ipcc', 'hadisst'], + 'realms': ['ocean'], + 'themes': ['phys'], + 'ancestors': + ancestor_files, + } + return record + + +DEGREE_SYMBOL = u'\u00B0' + + +def _fix_lons(lons): + """Fix the given longitudes into the range ``[-180, 180]``.""" + lons = np.array(lons, copy=False, ndmin=1) + fixed_lons = ((lons + 180) % 360) - 180 + # Make the positive 180s positive again. + fixed_lons[(fixed_lons == -180) & (lons > 0)] *= -1 + return fixed_lons + + +def _lon_heimisphere(longitude): + """Return the hemisphere (E, W or '' for 0) for the given longitude.""" + longitude = _fix_lons(longitude) + if longitude in (0, 180): + hemisphere = '' + elif longitude > 0: + hemisphere = ' E' + elif longitude < 0: + hemisphere = ' W' + else: + hemisphere = '' + return hemisphere + + +def _lat_heimisphere(latitude): + """Return the hemisphere (N, S or '' for 0) for the given latitude.""" + if latitude > 0: + hemisphere = ' N' + elif latitude < 0: + hemisphere = ' S' + else: + hemisphere = '' + return hemisphere + + +def _east_west_formatted(longitude, num_format='g'): + fmt_string = u'{longitude:{num_format}}{degree}{hemisphere}' + longitude = _fix_lons(longitude)[0] + return fmt_string.format(longitude=abs(longitude), + num_format=num_format, + hemisphere=_lon_heimisphere(longitude), + degree=DEGREE_SYMBOL) + + +def _north_south_formatted(latitude, num_format='g'): + fmt_string = u'{latitude:{num_format}}{degree}{hemisphere}' + return fmt_string.format(latitude=abs(latitude), + num_format=num_format, + hemisphere=_lat_heimisphere(latitude), + degree=DEGREE_SYMBOL) + + +#: A formatter which turns longitude values into nice longitudes such as 110W +LONGITUDE_FORMATTER = mticker.FuncFormatter( + lambda v, pos: _east_west_formatted(v)) +#: A formatter which turns longitude values into nice longitudes such as 45S +LATITUDE_FORMATTER = mticker.FuncFormatter( + lambda v, pos: _north_south_formatted(v)) + +CM_PER_INCH = 2.54 + + +def cm_to_inch(cms): + """Convert cm to inch.""" + return cms / CM_PER_INCH + + +def calc_error(data, reference=None): + """Calculate the error against a reference.""" + if reference is None: + return None + error = data - reference + error.metadata = data.metadata + name = data.long_name + if name is None: + name = data.name() + error.long_name = '{} error'.format(name) + time_coordinates = error.coords('time') + if len(time_coordinates) > 1: + # if data and reference times differ (usually in calendar), + # keep the reference time coordinate for easy merging. + data_time_coordinate = data.coord('time') + error.remove_coord(data_time_coordinate) + return error + + +def multi_model_merge(cubes): + """Merge cubes of different models into one cube. + + This merges cubes from different models/datasets into one big cube + by promoting the cmip model_id attribute to a scalar coordinate and + then performing a merge along that coordinate. Conflicting + attributes and coordinates are simply removed. + """ + def promote_model_name(cube): + """Promote model_id attribute to scalar variable.""" + new_cube = cube.copy() + model_name = new_cube.attributes['model_id'] + coord = iris.coords.AuxCoord(np.array([model_name]), + standard_name=None, + units='no_unit', + long_name=u'model', + var_name='model') + new_cube.add_aux_coord(coord) + return new_cube + + cube_list = iris.cube.CubeList([promote_model_name(m) for m in cubes]) + equalise_attributes(cube_list) + for cube in cube_list: + cube.cell_methods = tuple() + for coord in ['day_of_year', 'day_of_month', 'month_number', 'year']: + try: + cube.remove_coord(coord) + except CoordinateNotFoundError: + pass + return cube_list.merge_cube() + + +def load_data(config): + """Load cubes into config dict.""" + for key in config['input_data'].keys(): + filename = config['input_data'][key]['filename'] + config['input_data'][key]['cube'] = iris.load_cube(filename) + + +def prepare_reference(group): + """Prepare reference cube and remove from the group.""" + ref_name = group[0]['reference_dataset'] + reference_candidates = [ds for ds in group if ds['dataset'] == ref_name] + assert len(reference_candidates) == 1 + reference = reference_candidates[0] + group.remove(reference) + return reference + + +def mask_equatorial(equ): + """Mask out Indonesian island area.""" + lon = equ.coord('longitude').points + equ.data.mask = equ.data.mask | np.logical_and(lon >= 98., lon <= 121.) + return equ + + +def prepare_data(config): + """Perform data calculations.""" + groups = group_metadata(config['input_data'].values(), 'variable_group') + zm_g = groups["tos_zm"] + zm_ref = prepare_reference(zm_g)['cube'] + zm_errors = [calc_error(dataset['cube'], zm_ref) for dataset in zm_g] + eq_g = groups["tos_eq"] + eq_ref = mask_equatorial(prepare_reference(eq_g)['cube']) + eqs = [mask_equatorial(ds['cube']) for ds in eq_g] + eq_errors = [calc_error(eq, eq_ref) for eq in eqs] + data = { + 'zonal_mean_errors': zm_errors, + 'equatorials': eqs, + 'equatorial_ref': eq_ref, + 'equatorial_errors': eq_errors, + } + return data + + +def setup_figure(): + """Setup basic figure.""" + fig = plt.figure(figsize=(cm_to_inch(18), cm_to_inch(15))) + axes = np.array([ + [ + fig.add_axes([0.10, 0.56, 0.30, 0.35]), + fig.add_axes([0.50, 0.56, 0.30, 0.35]) + ], + [ + fig.add_axes([0.10, 0.10, 0.30, 0.35]), + fig.add_axes([0.50, 0.10, 0.30, 0.35]) + ], + ]) + return fig, axes + + +def plot_zonal_mean_errors_ensemble(axes, zonal_mean_errors, ref_line_style): + """Plot zonal mean error plot (subfigure a).""" + axes.set_title('(a) Zonal mean SST error CMIP5') + axes.yaxis.set_label_text(u'SST error (°C)') + axes.yaxis.set_minor_locator(MultipleLocator(.5)) + axes.xaxis.set_minor_locator(MultipleLocator(10)) + axes.xaxis.set_major_locator(MultipleLocator(30)) + axes.yaxis.set_major_locator(MultipleLocator(2)) + axes.xaxis.set_major_formatter(LATITUDE_FORMATTER) + axes.set_ylim(-5., 5.) + axes.set_xlim(-90., 90.) + axes.tick_params(which='both', + direction='in', + top=True, + right=True, + labelsize=7.) + axes.xaxis.set_label_text(u'Latitude') + lines = [] + labels = [] + cube_list = multi_model_merge(zonal_mean_errors) + for error in zonal_mean_errors: + lines.append(iplt.plot(error.coord('latitude'), error, axes=axes)[0]) + labels.append(error.attributes['model_id']) + ensemble_mean = cube_list.collapsed('model', iris.analysis.MEAN) + mean_line = iplt.plot(ensemble_mean.coord('latitude'), + ensemble_mean, + axes=axes, + color='#e61f25', + **ref_line_style)[0] + lines = [mean_line] + lines + labels = ['CMIP5 mean'] + labels + return (lines, labels) + + +def plot_equatorial_errors(axes, equatorial_errors, ref_line_style): + """Plot equatorial errors (subfigure b).""" + axes.set_title('(b) Equatorial SST error CMIP5') + axes.yaxis.set_label_text(u'SST error (°C)') + axes.yaxis.set_minor_locator(MultipleLocator(.5)) + axes.xaxis.set_minor_locator(MultipleLocator(30)) + axes.xaxis.set_major_locator(MultipleLocator(60)) + axes.yaxis.set_major_locator(MultipleLocator(2)) + axes.xaxis.set_major_formatter(LONGITUDE_FORMATTER) + axes.set_ylim(-5., 5.) + axes.set_xlim(25., 360.) + axes.tick_params(which='both', + direction='in', + top=True, + right=True, + labelsize=7.) + axes.xaxis.set_label_text(u'Longitude') + for error in equatorial_errors: + iplt.plot(error, label=error.attributes['model_id'], axes=axes) + cube_list = multi_model_merge(equatorial_errors) + ensemble_mean = cube_list.collapsed('model', iris.analysis.MEAN) + iplt.plot(ensemble_mean, + label='CMIP5 mean', + axes=axes, + color='#e61f25', + **ref_line_style) + + +def plot_zonal_mean_errors_project(axes, zonal_mean_errors, ref_line_style): + """Plot zonal error multi model mean (subfigure c).""" + axes.set_title('(c) Zonal mean SST error CMIP5') + axes.yaxis.set_label_text(u'SST error (°C)') + axes.yaxis.set_minor_locator(MultipleLocator(.5)) + axes.xaxis.set_minor_locator(MultipleLocator(10)) + axes.xaxis.set_major_locator(MultipleLocator(30)) + axes.yaxis.set_major_locator(MultipleLocator(2)) + axes.xaxis.set_major_formatter(LATITUDE_FORMATTER) + axes.set_ylim(-5., 5.) + axes.set_xlim(-90., 90.) + axes.tick_params(which='both', + direction='in', + top=True, + right=True, + labelsize=7.) + axes.xaxis.set_label_text(u'Latitude') + lat = zonal_mean_errors[0].coord('latitude').points + data = np.ma.vstack([m.data for m in zonal_mean_errors]) + std = data.std(axis=0) + avg = data.mean(axis=0) + axes.fill_between(lat, avg - std, avg + std, facecolor='#e61f25', alpha=.5) + axes.plot(lat, avg, color='#e61f25', **ref_line_style) + + +def plot_equatorials(axes, reference, equatorials, ref_line_style): + """Plot equatorial multi model mean (subfigure d).""" + axes.set_title('(d) Equatorial SST CMIP5') + axes.yaxis.set_label_text(u'SST (°C)') + axes.yaxis.set_minor_locator(MultipleLocator(.5)) + axes.xaxis.set_minor_locator(MultipleLocator(30)) + axes.xaxis.set_major_locator(MultipleLocator(60)) + axes.yaxis.set_major_locator(MultipleLocator(2)) + axes.xaxis.set_major_formatter(LONGITUDE_FORMATTER) + axes.set_ylim(22., 31.) + axes.set_xlim(25., 360.) + axes.tick_params(which='both', + direction='in', + top=True, + right=True, + labelsize=7.) + axes.xaxis.set_label_text(u'Longitude') + lon = reference.coord('longitude').points + data = np.ma.vstack([m.data for m in equatorials[1:]]) + std = data.std(axis=0) + avg = data.mean(axis=0) + axes.fill_between(lon, avg - std, avg + std, facecolor='#e61f25', alpha=.5) + axes.plot(lon, avg, color='#e61f25', **ref_line_style) + lines = axes.plot(lon, reference.data, 'k', **ref_line_style) + return (lines, ['HadISST']) + + +def draw_legend(fig, lines, labels): + """Draw the legend.""" + return fig.legend(lines, + labels, + loc='upper left', + fontsize=6., + bbox_to_anchor=(.81, .92)) + + +def produce_plots(config, data): + """Produce all elements of the full plot.""" + ref_line_style = {'linestyle': '-', 'linewidth': 2.} + fig, axes = setup_figure() + lines, labels = plot_zonal_mean_errors_ensemble(axes[0, 0], + data['zonal_mean_errors'], + ref_line_style) + plot_equatorial_errors(axes[0, 1], data['equatorial_errors'], + ref_line_style) + plot_zonal_mean_errors_project(axes[1, 0], data['zonal_mean_errors'], + ref_line_style) + ref_ls, ref_labels = plot_equatorials(axes[1, 1], data['equatorial_ref'], + data['equatorials'], ref_line_style) + all_lines = ref_ls + lines + all_labels = ref_labels + labels + legend = draw_legend(fig, all_lines, all_labels) + path = get_plot_filename('fig-9-14', config) + fig.savefig(path, bbox_extra_artists=(legend, )) + return path + + +def write_data(config, data): + """Write all the calculated data to output file.""" + cubes = iris.cube.CubeList([data['equatorial_ref']] + + data['zonal_mean_errors'] + + data['equatorials'] + data['equatorial_errors']) + path = get_diagnostic_filename('fig-9-14', config) + iris.save(cubes, path) + return path + + +def main(config): + """Run sst zonal mean and equatorial errors diagnostic. + + Arguments + config: Dictionary containing project information + + Description + This is the main routine of the diagnostic. + """ + load_data(config) + data = prepare_data(config) + plot_path = produce_plots(config, data) + ancestor_files = list(config['input_data'].keys()) + provenance_record = get_provenance_record(ancestor_files) + netcdf_path = write_data(config, data) + with ProvenanceLogger(config) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + provenance_logger.log(plot_path, provenance_record) + + +if __name__ == '__main__': + with run_diagnostic() as cfg: + main(cfg) diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_3.ncl b/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_3.ncl new file mode 100644 index 0000000000..6a461b9cc5 --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_3.ncl @@ -0,0 +1,297 @@ +; ch09_fig09-3 +; ############################################################################ +; Author: Axel Lauer (DLR, Germany) +; PROJECT-NAME CRESCENDO +; ############################################################################ +; Description +; Calculates the multi-model mean seasonality (DJF - JJA), multi-model mean +; of absolute seasonality, multi-model mean bias in seasonality, and multi- +; model mean bias in absolute seasonality (similar to IPCC AR5 ch. 9 +; fig. 9.3). +; +; Required diag_script_info attributes (diagnostics specific) +; none +; +; Optional diag_script_info attributes (diagnostic specific) +; projection: map projection, e.g., Mollweide, Mercator (default = Robinson) + +; Required variable_info attributes (variable specific) +; reference_dataset: name of reference datatset +; +; Optional variable_info attributes (variable specific) +; map_diff_levels: explicit contour levels for plotting +; +; Caveats +; none +; +; Modification history +; 20211006-lauer_axel: removed write_plots +; 20200714-lauer_axel: code rewritten for ESMValTool v2.0, renamed to +; ch09_fig09-3.ncl (was seasonality_mm.ncl) +; 20170622-lauer_axel: added tags for reporting +; 20170320-lauer_axel: written. +; +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/contour_maps.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + var0 = variable_info[0]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info0) + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + end if + names = metadata_att_as_array(info0, "dataset") + infiles = metadata_att_as_array(info0, "filename") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; check for reference dataset definition + if (.not.(isvar("refname"))) then + error_msg("f", DIAG_SCRIPT, "", "no reference dataset defined in recipe") + end if + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "projection", "Robinson") + + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + +end + +begin + ; get reference dataset + + ref_ind = ind(names .eq. refname) + if (ismissing(ref_ind)) then + error_msg("f", DIAG_SCRIPT, "", "reference dataset (" \ + + refname + ") is missing") + end if + + ; get multi-model mean index + + mm_ind = ind(names .eq. "MultiModelMean") + + if (ismissing(mm_ind)) then + error_msg("f", DIAG_SCRIPT, "", "multi-model mean is missing (required)") + end if + + climofiles = new(2, string) + climofiles(0) = infiles(mm_ind) + climofiles(1) = infiles(ref_ind) + + ; ======================================================================== + ; =========================== calculations =============================== + ; ======================================================================== + + ; note: 1) masking is handled by the backend + ; 2) multi-model mean is calculated by the backend + + ; read data and calculate seasonal means + + A0 = read_data(info0[mm_ind]) + mmdata_djf = time_operations(A0, -1, -1, "average", "DJF", True) + mmdata_jja = time_operations(A0, -1, -1, "average", "JJA", True) + delete(A0) + + A0 = read_data(info0[ref_ind]) + refdata_djf = time_operations(A0, -1, -1, "average", "DJF", True) + refdata_jja = time_operations(A0, -1, -1, "average", "JJA", True) + delete(A0) + + ; seasonality = DJF - JJA + + seas_ref = refdata_djf - refdata_jja + seas_mm = mmdata_djf - mmdata_jja + seas_mm@diag_script = (/DIAG_SCRIPT/) + copy_VarMeta(refdata_djf, seas_ref) + copy_VarMeta(seas_ref, seas_mm) + delete(refdata_djf) + delete(refdata_jja) + delete(mmdata_djf) + delete(mmdata_jja) + + abs_seas = abs(seas_mm) + bias_seas = seas_mm - seas_ref + bias_abs_seas = abs_seas - abs(seas_ref) + + ; ======================================================================== + ; ============================= plotting ================================= + ; ======================================================================== + + seas_mm@res_gsnMaximize = True ; use full page for the plot + seas_mm@res_cnFillOn = True ; color plot desired + seas_mm@res_cnLineLabelsOn = False ; contour lines + seas_mm@res_cnLinesOn = False + seas_mm@res_tiMainOn = True + seas_mm@res_gsnLeftStringFontHeightF = 0.015 + seas_mm@res_cnLevelSelectionMode = "ExplicitLevels" + seas_mm@res_mpOutlineOn = True + seas_mm@res_mpFillOn = False + seas_mm@res_lbLabelBarOn = True + seas_mm@res_gsnRightString = "" + seas_mm@res_mpFillDrawOrder = "PostDraw" ; draw map fill last + seas_mm@res_cnMissingValFillColor = "Gray" + seas_mm@res_tmYLLabelsOn = False + seas_mm@res_tmYLOn = False + seas_mm@res_tmYRLabelsOn = False + seas_mm@res_tmYROn = False + seas_mm@res_tmXBLabelsOn = False + seas_mm@res_tmXBOn = False + seas_mm@res_tmXTLabelsOn = False + seas_mm@res_tmXTOn = False + seas_mm@res_cnInfoLabelOn = False ; turn off cn info label + seas_mm@res_mpProjection = diag_script_info@projection + + ; -------------------------------------------------------------------- + ; create workspace + + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "seasonality_" + var0) +; drawNDCGrid(wks) ; debugging option + + plots = new((/4/), graphic) + + ; -------------------------------------------------------------------- + ; plot contour map + + seas_mm@res_gsnDraw = False ; Do not draw yet + seas_mm@res_gsnFrame = False ; Don't advance frame. + seas_mm@res_mpPerimOn = False + + seas_mm@res_lbTitleString = "(" + seas_mm@units + ")" + seas_mm@res_lbTitlePosition = "Bottom" + + seas_mm@res_lbLabelFontHeightF = 0.014 + seas_mm@res_lbTopMarginF = 0.1 + seas_mm@res_lbTitleFontHeightF = 0.014 + + seas_mm@res_tiMainFontHeightF = 0.016 + + seas_mm@res_tiMainString = "Multi Model Mean Bias" + + if (isatt(seas_mm, "res_cnLevels")) then + delete(seas_mm@res_cnLevels) + end if + + if (isatt(variable_info[0], "map_diff_levels")) then + seas_mm@res_cnLevels = variable_info@map_diff_levels + end if + + copy_VarMeta(seas_mm, abs_seas) + copy_VarMeta(seas_mm, bias_seas) + copy_VarMeta(seas_mm, bias_abs_seas) + + if (var0.eq."tas") then + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-tas.rgb") + seas_mm@res_cnFillColors = pal + pal2 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-tas-absdelta.rgb") + abs_seas@res_cnFillColors = pal2 + pal3 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-tas-seasdelta.rgb") + bias_seas@res_cnFillColors = pal3 + bias_abs_seas@res_cnFillColors = pal3 + + if (isatt(seas_mm, "res_cnLevels")) then + delete(seas_mm@res_cnLevels) + end if + if (isatt(abs_seas, "res_cnLevels")) then + delete(abs_seas@res_cnLevels) + end if + if (isatt(bias_seas, "res_cnLevels")) then + delete(bias_seas@res_cnLevels) + end if + if (isatt(bias_abs_seas, "res_cnLevels")) then + delete(bias_abs_seas@res_cnLevels) + end if + + ; IPCC ch.9 fig. 9.3 scales for tas + seas_mm@res_cnLevels = fspan(-50, 50, 21) + abs_seas@res_cnLevels = fspan(5, 50, 10) + bias_seas@res_cnLevels = fspan(-8, 8, 17) + bias_abs_seas@res_cnLevels = fspan(-8, 8, 17) + end if + + seas_mm@res_tiMainString = "Multi Model Mean" + abs_seas@res_tiMainString = "Multi Model Mean of Absolute Seasonality" + bias_seas@res_tiMainString = "Multi Model Mean Bias in Seasonality" + bias_abs_seas@res_tiMainString = "Multi Model Mean Bias in Absolute " \ + + "Seasonality" + + ; panelling resources + + pres = True + pres@gsnPanelCenter = False + pres@gsnPanelRowSpec = True + pres@gsnPanelYWhiteSpacePercent = 5 + pres@gsnPanelXWhiteSpacePercent = 5 + + plots(0) = contour_map(wks, seas_mm, var0) + plots(1) = contour_map(wks, abs_seas, var0) + plots(2) = contour_map(wks, bias_seas, var0) + plots(3) = contour_map(wks, bias_abs_seas, var0) + + gsn_panel(wks, plots, (/2, 2/), pres) + + plotfile = wks@fullname + + ; ########################################### + ; # output to netCDF # + ; ########################################### + + seas_mm@var = "seas_" + var0 + seas_mm@var_long_name = "Multi-model mean seasonality" + seas_mm@var_units = seas_mm@units + + abs_seas@var = "abs_seas_" + var0 + abs_seas@var_long_name = "Multi-model mean of absolute seasonality" + abs_seas@var_units = abs_seas@units + + bias_seas@var = "bias_seas_" + var0 + bias_seas@var_long_name = "Multi-model mean bias in seasonality" + bias_seas@var_units = bias_seas@units + + bias_abs_seas@var = "bias_abs_seas_" + var0 + bias_abs_seas@var_long_name = "Multi-model mean bias in absolute seasonality" + bias_abs_seas@var_units = bias_abs_seas@units + + nc_filename = work_dir + "seasonality_" + var0 + ".nc" + nc_filename@existing = "append" + + nc_outfile = ncdf_write(seas_mm, nc_filename) + nc_outfile = ncdf_write(abs_seas, nc_filename) + nc_outfile = ncdf_write(bias_seas, nc_filename) + nc_outfile = ncdf_write(bias_abs_seas, nc_filename) + + ; ------------------------------------------------------------------------ + ; write provenance to netcdf output and plot file(s) (mean) + ; ------------------------------------------------------------------------ + + statistics = (/"clim", "diff", "mean"/) + domain = "global" + plottype = "geo" + + caption = "Multi model values for variable " + var0 + ", from top left " \ + + "to bottom right: mean, mean of absolute seasonality, mean bias in " \ + + "seasonality, mean bias in absolute seasonality. Reference dataset: " \ + + names(ref_ind) + ". Similar to IPCC AR5, fig. 9.3." + + log_provenance(nc_outfile, plotfile, caption, statistics, \ + domain, plottype, "", "", climofiles) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_42a.py b/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_42a.py index cd8d0e70d1..d0e23f4008 100644 --- a/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_42a.py +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_42a.py @@ -17,8 +17,6 @@ Configuration options in recipe ------------------------------- -save : dict, optional - Keyword arguments for the `fig.saveplot()` function. axes_functions : dict, optional Keyword arguments for the plot appearance functions. dataset_style : str, optional @@ -27,19 +25,32 @@ matplotlib_style : str, optional Dataset style file (located in :mod:`esmvaltool.diag_scripts.shared.plot.styles_python.matplotlib`). +save : dict, optional + Keyword arguments for :func:`matplotlib.pyplot.savefig`. +seaborn_settings : dict, optional + Options for :func:`seaborn.set_theme` (affects all plots). """ import logging import os +from copy import deepcopy import iris -from iris import Constraint +import seaborn as sns from esmvaltool.diag_scripts.shared import ( - ProvenanceLogger, extract_variables, get_diagnostic_filename, - get_plot_filename, group_metadata, io, plot, run_diagnostic, - variables_available) + ProvenanceLogger, + extract_variables, + get_diagnostic_filename, + get_plot_filename, + group_metadata, + io, + plot, + run_diagnostic, + sorted_metadata, + variables_available, +) logger = logging.getLogger(os.path.basename(__file__)) @@ -54,8 +65,7 @@ def get_provenance_record(project, ancestor_files): 'pre-industrial control runs (smaller symbols).'.format(project)), 'statistics': ['mean'], 'domains': ['global'], - 'plot_types': ['scatter'], - 'authors': ['schl_ma'], + 'authors': ['schlund_manuel'], 'references': ['flato13ipcc'], 'realms': ['atmos'], 'themes': ['phys'], @@ -67,8 +77,6 @@ def get_provenance_record(project, ancestor_files): def plot_data(cfg, hist_cubes, pi_cubes, ecs_cube): """Plot data.""" - if not cfg['write_plots']: - return None x_data = [] y_data = [] dataset_names = [] @@ -76,9 +84,12 @@ def plot_data(cfg, hist_cubes, pi_cubes, ecs_cube): # Collect data for dataset in hist_cubes: + ecs = ecs_cube.extract(iris.Constraint(dataset=dataset)) + if ecs is None: + raise ValueError(f"No ECS data for '{dataset}' available") # Historical data - x_data.append(ecs_cube.extract(Constraint(dataset=dataset)).data) + x_data.append(ecs.data) y_data.append(hist_cubes[dataset].data) dataset_names.append(dataset) plot_kwargs.append({ @@ -88,7 +99,7 @@ def plot_data(cfg, hist_cubes, pi_cubes, ecs_cube): }) # PiControl data - x_data.append(ecs_cube.extract(Constraint(dataset=dataset)).data) + x_data.append(ecs.data) y_data.append(pi_cubes[dataset].data) dataset_names.append(dataset) plot_kwargs.append({ @@ -115,43 +126,47 @@ def plot_data(cfg, hist_cubes, pi_cubes, ecs_cube): def write_data(cfg, hist_cubes, pi_cubes, ecs_cube): """Write netcdf file.""" - datasets = list(hist_cubes) - - # Collect data + datasets = [] data_ecs = [] data_hist = [] data_pi = [] - for dataset in datasets: - data_ecs.append(ecs_cube.extract(Constraint(dataset=dataset)).data) + for dataset in list(hist_cubes): + ecs = ecs_cube.extract(iris.Constraint(dataset=dataset)) + if ecs is None: + raise ValueError(f"No ECS data for '{dataset}' available") + datasets.append(dataset) + data_ecs.append(ecs.data) data_hist.append(hist_cubes[dataset].data) data_pi.append(pi_cubes[dataset].data) # Create cube dataset_coord = iris.coords.AuxCoord(datasets, long_name='dataset') - tas_hist_coord = iris.coords.AuxCoord( - data_hist, - attributes={'exp': 'historical'}, - **extract_variables(cfg, as_iris=True)['tas']) - tas_picontrol_coord = iris.coords.AuxCoord( - data_pi, - attributes={'exp': 'piControl'}, - **extract_variables(cfg, as_iris=True)['tas']) - cube = iris.cube.Cube( - data_ecs, - var_name='ecs', - long_name='equilibrium_climate_sensitivity', - aux_coords_and_dims=[(dataset_coord, 0), (tas_hist_coord, 0), - (tas_picontrol_coord, 0)]) + tas_hist_coord = iris.coords.AuxCoord(data_hist, + attributes={'exp': 'historical'}, + **extract_variables( + cfg, as_iris=True)['tas']) + tas_picontrol_coord = iris.coords.AuxCoord(data_pi, + attributes={'exp': 'piControl'}, + **extract_variables( + cfg, as_iris=True)['tas']) + cube = iris.cube.Cube(data_ecs, + var_name='ecs', + long_name='Equilibrium Climate Sensitivity (ECS)', + aux_coords_and_dims=[(dataset_coord, 0), + (tas_hist_coord, 0), + (tas_picontrol_coord, 0)]) # Save file path = get_diagnostic_filename('ch09_fig09_42a', cfg) - io.save_iris_cube(cube, path) + io.iris_save(cube, path) return path def main(cfg): """Run the diagnostic.""" - input_data = cfg['input_data'].values() + sns.set_theme(**cfg.get('seaborn_settings', {})) + input_data = deepcopy(list(cfg['input_data'].values())) + input_data = sorted_metadata(input_data, ['short_name', 'exp', 'dataset']) project = list(group_metadata(input_data, 'project').keys()) project = [p for p in project if 'obs' not in p.lower()] if len(project) == 1: @@ -193,11 +208,14 @@ def main(cfg): # Provenance ancestor_files = [d['filename'] for d in input_data] + ancestor_files.append(ecs_filepath) provenance_record = get_provenance_record(project, ancestor_files) - if plot_path is not None: - provenance_record['plot_file'] = plot_path + provenance_record.update({ + 'plot_types': ['scatter'], + }) with ProvenanceLogger(cfg) as provenance_logger: provenance_logger.log(netcdf_path, provenance_record) + provenance_logger.log(plot_path, provenance_record) if __name__ == '__main__': diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_42b.py b/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_42b.py new file mode 100644 index 0000000000..ba49c8afa5 --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_42b.py @@ -0,0 +1,303 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Diagnostic script to plot figure 9.42b of IPCC AR5 chapter 9. + +Description +----------- +Calculate and plot the transient climate response (TCR) vs. the equilibrium +climate sensitivity (ECS) (see IPCC AR5 WG1 ch.9, fig. 9.42b). + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +dataset_style : str, optional + Dataset style file (located in + :mod:`esmvaltool.diag_scripts.shared.plot.styles_python`). The entry + ``marker`` is ignored when ``marker_file`` is given. +log_x : bool, optional (default: False) + Apply logarithm to X axis (ECS). +log_y : bool, optional (default: False) + Apply logarithm to Y axis (TCR). +marker_column : str, optional (default: 'marker') + Name of the column to look up markers in ``marker_file``. +marker_file : str, optional + CSV file with markers (can also be integers). Must have the columns + ``dataset`` and ``marker`` (or the column specified by ``marker_column``). + If a relative path is given, assumes that this is a pattern to search for + ancestor files. +savefig_kwargs : dict, optional + Keyword arguments for :func:`matplotlib.pyplot.savefig`. +seaborn_settings : dict, optional + Options for :func:`seaborn.set_theme` (affects all plots). +x_lim : list of float, optional (default: [1.5, 6.0]) + Plot limits for X axis (ECS). +y_lim : list of float, optional (default: [0.5, 3.5]) + Plot limits for Y axis (TCR). + +""" + +import logging +import os +from copy import deepcopy + +import iris +import matplotlib.pyplot as plt +import matplotlib.ticker as ticker +import numpy as np +import pandas as pd +import seaborn as sns +from scipy import stats + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + io, + iris_helpers, + plot, + run_diagnostic, +) + +logger = logging.getLogger(os.path.basename(__file__)) + +COLORS = sns.color_palette() + + +def _get_reg_line(x_cube, y_cube, cfg, n_points=100): + """Get regression line and means for two cubes.""" + x_lim = [cfg['x_lim'][0] - 1.0, cfg['x_lim'][1] + 1.0] + x_mean = np.mean(x_cube.data) + y_mean = np.mean(y_cube.data) + + # Apply logarithms if desired + if cfg.get('log_x'): + x_cube = x_cube.copy(np.ma.log(x_cube.data)) + if cfg.get('log_y'): + y_cube = y_cube.copy(np.ma.log(y_cube.data)) + + # Regression + reg = stats.linregress(x_cube.data, y_cube.data) + logger.info("Regression stats") + logger.info("Slope: %.2f", reg.slope) + logger.info("Intercept: %.2f", reg.intercept) + logger.info("R2: %.2f", reg.rvalue**2) + + # Regression line + x_reg = np.linspace(x_lim[0], x_lim[1], n_points) + y_reg = reg.slope * x_reg + reg.intercept + if cfg.get('log_x'): + x_reg = np.exp(x_reg) + if cfg.get('log_y'): + y_reg = np.exp(y_reg) + + return ((x_reg, y_reg), (x_mean, y_mean), reg.rvalue) + + +def _get_style(dataset_name, cfg): + """Get style for individual data points.""" + style = plot.get_dataset_style(dataset_name, cfg.get('dataset_style')) + if not cfg.get('marker_file'): + return style + marker_file = os.path.expanduser(cfg['marker_file']) + if not os.path.isabs(marker_file): + marker_file = io.get_ancestor_file(cfg, marker_file) + data_frame = pd.read_csv(marker_file) + marker_column = cfg['marker_column'] + for column in ('dataset', marker_column): + if column not in data_frame.columns: + raise ValueError( + f"Marker file '{marker_file}' does not contain necessary " + f"column '{column}'") + marker = data_frame[marker_column][data_frame['dataset'] == dataset_name] + if len(marker) != 1: + raise ValueError( + f"Expected exactly one entry for marker of '{dataset_name}' in " + f"file '{marker_file}', got {len(marker):d}") + style['mark'] = marker.values[0] + return style + + +def get_provenance_record(project, ancestor_files): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'caption': + (f'Transient climate response (TCR) against equilibrium climate ' + f'sensitivity (ECS) for {project} models.'), + 'statistics': ['mean'], + 'domains': ['global'], + 'authors': ['schlund_manuel'], + 'references': ['flato13ipcc'], + 'realms': ['atmos'], + 'themes': ['phys'], + 'ancestors': + ancestor_files, + } + return record + + +def plot_data(cfg, ecs_cube, tcr_cube): + """Plot data.""" + logger.debug("Plotting Fig. 9.42b of IPCC AR5") + (_, axes) = plt.subplots() + project = ecs_cube.attributes['project'] + + # Plot scatterplot + plot_types = [] + for dataset_name in ecs_cube.coord('dataset').points: + if dataset_name == 'MultiModelMean': + continue + style = _get_style(dataset_name, cfg) + + # Plot single point + if isinstance(style['mark'], str): + axes.plot( + ecs_cube.extract(iris.Constraint(dataset=dataset_name)).data, + tcr_cube.extract(iris.Constraint(dataset=dataset_name)).data, + marker=style['mark'], + linestyle='none', + markeredgecolor=style['color'], + markerfacecolor=style['facecolor'], + label=dataset_name, + ) + plot_types.append(0) + else: + axes.text( + ecs_cube.extract(iris.Constraint(dataset=dataset_name)).data, + tcr_cube.extract(iris.Constraint(dataset=dataset_name)).data, + str(int(style['mark'])), + size=7, + ha='center', + va='center', + ) + plot_types.append(1) + plot_type = np.mean(plot_types) + + # Plot regression line and MMM + (reg_line, mmm, r_value) = _get_reg_line(ecs_cube, tcr_cube, cfg) + if plot_type <= 0.5: + axes.plot(reg_line[0], reg_line[1], 'k-') + axes.plot(mmm[0], mmm[1], 'ro', label=f'{project} mean') + else: + axes.plot(reg_line[0], reg_line[1], linestyle='-', color=COLORS[0]) + + # Plot appearance + title = f"Linear TCR vs. ECS for {project} models" + axes.set_xlim(cfg['x_lim']) + axes.set_ylim(cfg['y_lim']) + if cfg.get('log_x'): + axes.set_xscale('log') + axes.get_xaxis().set_major_formatter(ticker.ScalarFormatter()) + axes.get_xaxis().set_minor_formatter(ticker.ScalarFormatter()) + axes.grid(True, which='both', axis='x') + title = f"Non-linear TCR vs. ECS for {project} models" + if cfg.get('log_y'): + axes.set_yscale('log') + axes.get_yaxis().set_major_formatter(ticker.ScalarFormatter()) + axes.get_yaxis().set_minor_formatter(ticker.ScalarFormatter()) + axes.grid(True, which='both', axis='y') + title = f"Non-linear TCR vs. ECS for {project} models" + axes.set_title(title) + axes.set_xlabel('ECS [K]') + axes.set_ylabel('TCR [K]') + if plot_type <= 0.5: + legend = axes.legend(loc='center left', + bbox_to_anchor=[1.05, 0.5], + borderaxespad=0.0, + ncol=2) + else: + legend = None + axes.text(0.05, 0.9, f'$R^2$ = {r_value**2:.2f}', transform=axes.transAxes) + + # Save plot + plot_path = get_plot_filename(f'{project}_ch09_fig09_42b', cfg) + savefig_kwargs = dict(cfg['savefig_kwargs']) + if legend is not None: + savefig_kwargs['bbox_extra_artists'] = [legend] + plt.savefig(plot_path, **savefig_kwargs) + logger.info("Wrote %s", plot_path) + plt.close() + return plot_path + + +def set_default_cfg(cfg): + """Set default values for cfg.""" + cfg = deepcopy(cfg) + cfg.setdefault('marker_column', 'marker') + cfg.setdefault('savefig_kwargs', { + 'dpi': 300, + 'orientation': 'landscape', + 'bbox_inches': 'tight', + }) + cfg.setdefault('x_lim', [1.5, 6.0]) + cfg.setdefault('y_lim', [0.5, 3.5]) + cfg['x_lim'] = np.array(cfg['x_lim']) + cfg['y_lim'] = np.array(cfg['y_lim']) + return cfg + + +def write_data(cfg, ecs_cube, tcr_cube): + """Write netcdf file.""" + project = ecs_cube.attributes['project'] + ecs_attrs = { + 'var_name': ecs_cube.var_name, + 'long_name': ecs_cube.long_name, + 'units': ecs_cube.units, + } + + # Write data to netcdf + ecs_coord = iris.coords.AuxCoord(ecs_cube.data, **ecs_attrs) + tcr_cube.add_aux_coord(ecs_coord, 0) + tcr_cube.attributes.pop('provenance', None) + netcdf_path = get_diagnostic_filename(f'{project}_ch09_fig09_42b', cfg) + io.iris_save(tcr_cube, netcdf_path) + return netcdf_path + + +def main(cfg): + """Run the diagnostic.""" + cfg = set_default_cfg(cfg) + sns.set_theme(**cfg.get('seaborn_settings', {})) + ecs_file = io.get_ancestor_file(cfg, 'ecs.nc') + tcr_file = io.get_ancestor_file(cfg, 'tcr.nc') + ecs_cube = iris.load_cube(ecs_file) + tcr_cube = iris.load_cube(tcr_file) + + # Project + if (ecs_cube.attributes.get('project', 'a') != tcr_cube.attributes.get( + 'project', 'b')): + raise ValueError( + "ECS and TCR input files have either no 'project' attribute or " + "differ in it") + project = ecs_cube.attributes['project'] + + # Remove missing data and use equal coordinate + [ecs_cube, tcr_cube + ] = iris_helpers.intersect_dataset_coordinates([ecs_cube, tcr_cube]) + + # Create plot + plot_path = plot_data(cfg, ecs_cube, tcr_cube) + + # Write netcdf file + netcdf_path = write_data(cfg, ecs_cube, tcr_cube) + + # Provenance + ancestor_files = [ecs_file, tcr_file] + provenance_record = get_provenance_record(project, ancestor_files) + provenance_record.update({ + 'plot_types': ['scatter'], + }) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + provenance_logger.log(plot_path, provenance_record) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_6.ncl b/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_6.ncl new file mode 100644 index 0000000000..39af3a66ee --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_6.ncl @@ -0,0 +1,151 @@ +; ############################################################################# +; DIAGNOSTIC SCRIPT for reproducing IPCC ch. 9 fig. 9.6 +; Author: Bettina Gier (Uni Bremen & DLR, Germany) +; CRESCENDO project +; ############################################################################# +; +; Description +; Calculates centred pattern correlations for annual mean climatologies +; and plots them. Like IPCC ch. 9 fig 9.6 +; +; Required diag_script_info attributes (diagnostics specific) +; +; Optional diag_script_info attributes (diagnostic specific) +; +; Required variable_info attributes (variable specific) +; none +; +; Optional variable_info attributes (variable specific) +; none +; +; Caveats +; +; +; Modification history +; 20190205-gier_bettina: Adapted to new ncl structure +; 20181012-gier_bettina: Ported to v2 +; 20171020-lauer_axel: added tags for reporting +; 20170404-gier_bettina: written. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + infiles = metadata_att_as_array(info_items, "filename") + datasetnames = metadata_att_as_array(info_items, "dataset") + projectnames = metadata_att_as_array(info_items, "project") + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check and set reference dataset + ref_model = variable_info[0]@reference_dataset + if (variable_info[0]@reference_dataset.eq."None") then + error_msg("f", DIAG_SCRIPT, "", "no reference dataset is specified") + end if + log_info("reference model = " + ref_model) + ref_ind = ind(datasetnames.eq.ref_model) + if isatt(variable_info[0], "alternative_dataset") then + alt_ref = True + aref_ind = ind(datasetnames.eq.variable_info[0]@alternative_dataset) + else + alt_ref = False + end if + +end + +begin + ; Output directories + ncdf_dir = config_user_info@work_dir + "pattern_cor.nc" + modproj_dir = config_user_info@work_dir + "modprojnames.txt" + system("mkdir -p " + config_user_info@work_dir) +end + +begin +; ----------------------------------------------------------------------------- +; -------------------- Compute correlation for one var ------------------------ +; ----------------------------------------------------------------------------- + + ; Save list of preproc files for provenance in collect.ncl + preproc_files = metadata_att_as_array(info_items, "filename") + + ; Reference model + mod_idx = ispan(0, dim_MOD - 1, 1) + mod_ind_woref = mod_idx(ind(mod_idx.ne.ref_ind)) + delete(mod_idx) + + ; Make output array + all_cor = new((/dim_MOD-1/), float) + all_cor!0 = "models" + all_cor&models = datasetnames(mod_ind_woref) + ; Pass on alt models + if alt_ref then + all_cor@alt_obs = variable_info[0]@alternative_dataset + else + all_cor@alt_obs = "none" + end if + + ; Loop over models, with ref model processed first + model_ind = array_append_record(ref_ind, mod_ind_woref, 0) + do iloop = 0, dim_MOD - 1 + imod = model_ind(iloop) + log_info("Processing " + datasetnames(imod)) + + ; Extract model corresponding data + var = read_data(info_items[imod]) + + ; Calculate annual mean + data_yearly = time_operations(var, \ + toint(info_items[imod]@start_year), \ + toint(info_items[imod]@end_year), \ + "average", "yearly", True) + + ; Mean over the years + mean_years = dim_avg_n_Wrap(data_yearly, 0) + delete(data_yearly) + + ; Compute centred pattern correlation (ref_model will be first so no error) + if datasetnames(imod).eq.ref_model then + data_ref = mean_years + else + ; Since ref model processed first, move all models up one spot + all_cor(iloop-1) = pattern_cor(data_ref, mean_years, 1.0, 0) + end if + delete(mean_years) + delete(var) + end do + + ; Write data + all_cor@corvar = var0 + all_cor@corvar_long = variable_info[0]@long_name + all_cor@var = "cor" + all_cor@diag_script = DIAG_SCRIPT + all_cor@diagnostics = variable_info[0]@diagnostic + all_cor@ncdf = ncdf_dir + all_cor@input = str_join(infiles, ",") + ncdf_outfile = ncdf_write(all_cor, ncdf_dir) + + ; Write provenance + statistics = (/"corr", "clim"/) + domains = (/"global"/) + plottype = "other" + authors = (/"gier_bettina", "bock_lisa"/) + references = (/"flato13ipcc"/) + log_provenance(ncdf_outfile, "n/a", "n/a", statistics, domains, "other", \ + authors, references, preproc_files) + + ; Write list of models with project (arrays get squished as attributes) + modnames = datasetnames(mod_ind_woref) + projnames = projectnames(mod_ind_woref) + modproj = (/modnames, projnames/) + asciiwrite(modproj_dir, modproj) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_6_collect.ncl b/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_6_collect.ncl new file mode 100644 index 0000000000..3f6131469a --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_6_collect.ncl @@ -0,0 +1,426 @@ +; ############################################################################# +; DIAGNOSTIC PLOT SCRIPT for reproducing IPCC ch. 9 fig. 9.6 +; Author: Bettina Gier (DLR, Germany) +; CRESCENDO project +; ############################################################################# +; +; Description +; Calculated centred pattern correlations for annual mean climatologies +; and plots them. Like IPCC ch. 9 fig 9.6 +; +; Required diag_script_info attributes (diagnostics specific) +; +; Optional diag_script_info attributes (diagnostic specific) +; diag_script_info@diag_order: give order of plotting variables on the +; x-axis +; +; Required variable_info attributes (variable specific) +; none +; +; Optional variable_info attributes (variable specific) +; none +; +; Required variable attributes (defined in namelist) +; reference_dataset: name of reference data set (observations) +; +; Caveats +; Effect of different regridding methods not yet determined +; +; Modification history +; 20191011-A_bock_lisa: Add customizable order of variables +; 20190205-A_gier_bettina: Adapted to new ncl structures +; 20181101-A_gier_bettina: moved collect from main script +; +; ############################################################################# +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; Define file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + +; ----------------------------------------------------------------------------- +; ------------------- Collecting Data ----------------------------------------- +; ----------------------------------------------------------------------------- + + ; List of correlation files and project names + file_list = tostring(diag_script_info@input_files) + "/pattern_cor.nc" + mp_file_list = tostring(diag_script_info@input_files) + "/modprojnames.txt" + + ; Filter non-existing files (mp files only useful if cor file exists) + file_list := file_list(ind(isfilepresent(file_list))) + mp_file_list := mp_file_list(ind(isfilepresent(file_list))) + + ; Set up auxiliary variables + var_collect = new(dimsizes(file_list), string) + var_diag = new(dimsizes(file_list), string) + alt_obs = new(dimsizes(file_list), string) + + ; Loop over files in list, read and append data + do ii = 0, dimsizes(file_list) - 1 + data_temp = ncdf_read(file_list(ii), "cor") + var_collect(ii) = data_temp@corvar + var_diag(ii) = data_temp@diagnostics + alt_obs(ii) = data_temp@alt_obs + + ; Make 2D array to store all data + if (.not.isdefined("data_all")) then + data_all = new((/dimsizes(data_temp), dimsizes(file_list)/), float) + data_all(:, ii) = data_temp + data_all!0 = "models" + data_all&models = data_temp&models + + ; Input file list for provenance + prov_files = str_split(data_temp@input, ",") + else + ; If model coordinates are identical + if (dimsizes(data_temp&models).eq.dimsizes(data_all&models)) \ + .and. all(data_temp&models.eq.data_all&models) then + data_all(:, ii) = (/data_temp/) + else + ; Loop over models in new data entry + do imod_temp = 0, dimsizes(data_temp&models) - 1 + ; If current model is not already part of the model coordinate + if (.not.any(data_temp&models(imod_temp) .eq. data_all&models)) then + ; Append record for model(imod) + data_new = extend_var_at(data_all, 0, \ + dimsizes(data_all&models)) + data_new(dimsizes(data_all&models), ii) = (/data_temp(imod_temp)/) + data_new&models(dimsizes(data_all&models)) = \ + (/data_temp&models(imod_temp)/) + delete(data_all) + data_all = data_new + delete(data_new) + else + ; Loop over models of data + do imod = 0, dimsizes(data_all&models)-1 + ; if neq data model is similar to current + ; entry, write data entry + if (data_all&models(imod).eq. data_temp&models(imod_temp)) then + data_all(imod, ii) = (/data_temp(imod_temp)/) + end if + end do + end if + end do + end if + ; Append input file list for provenance + prov_files := array_append_record(prov_files, \ + str_split(data_temp@input, ","), 0) + end if + delete(data_temp) + end do + data_all!1 = "vars" + data_all&vars = var_collect + delete(var_collect) + + ; Get project for models + projects = new(dimsizes(data_all&models), string) + + ; Loop over model-project files to complete project list + do ii = 0, dimsizes(mp_file_list) - 1 + modproj = asciiread(mp_file_list(ii), -1, "string") + mods = modproj(:dimsizes(modproj)/2-1) + projs = modproj(dimsizes(modproj)/2:) + + overlap_index = get1Dindex(data_all&models, mods) + projects(overlap_index) = projs + delete([/modproj, mods, projs, overlap_index/]) + end do + + data_all&models@project = projects + delete(projects) + + ; Sort diagnostics in the order specified in the settings + if (isatt(diag_script_info, "diag_order")) then + l_ok = True + if (dimsizes(data_all&vars).ne. \ + dimsizes(diag_script_info@diag_order)) then + error_msg("w", DIAG_SCRIPT, "", "specified order of diagnostics " + \ + "cannot be applied, number of diagnostics does not match") + l_ok = False + end if + pid = new(dimsizes(diag_script_info@diag_order), integer) + do ii = 0, dimsizes(diag_script_info@diag_order) - 1 + tmp = ind(var_diag.eq.diag_script_info@diag_order(ii)) + if (any(ismissing(tmp)) .or. dimsizes(tmp).gt.1) then + error_msg("w", DIAG_SCRIPT, "", "specified order of diagnostics " + \ + "cannot be applied, invalid entry in diag_order") + break + end if + pid(ii) = tmp + delete(tmp) + end do + if (l_ok) then + data_all := data_all(:, pid) + alt_obs := alt_obs(pid) + end if + end if + +; ------------------------------------------------------------------------- +; ----------------- Interim Functions ------------------------------------- +; ------------------------------------------------------------------------- + + undef("get_unique_entries") + function get_unique_entries(array) + ; + ; Arguments: + ; array: 1D array + ; + ; Return value: 1D array of unique entries in array + ; + ; Modification history: + ; 20170406-A_gier_bettina: written. + local dummy_array, unique_new, new_array, nodupes + begin + dummy_array = array + do while (dimsizes(dummy_array).ne.0) + if (.not.isdefined("unique")) then + unique = dummy_array(0) + else + unique_new = array_append_record(unique, dummy_array(0), 0) + delete(unique) + unique = unique_new + delete(unique_new) + end if + nodupes = ind(dummy_array.ne.dummy_array(0)) + ; Missing value index are dim 1 and would give an error + if (dimsizes(dummy_array).eq. \ + dimsizes(ind(dummy_array.eq.dummy_array(0)))) then + break + end if + new_array = dummy_array(nodupes) + delete(nodupes) + delete(dummy_array) + dummy_array = new_array + delete(new_array) + end do + return(unique) + end +; ----------------------------------------------------------------------------- +; ---------------------------- Plotting --------------------------------------- +; ----------------------------------------------------------------------------- + + ; Calculating necessary values + ; Number of Projects needed to determine span + ; For now just CMIP projects + c_projects = str_match_ic(data_all&models@project, "CMIP") + projects = get_unique_entries(c_projects) + n_var = dimsizes(data_all&vars) + + nr_projects = dimsizes(projects) + x_val = ispan(1, n_var*nr_projects, nr_projects) + + ; Mean and Median of Ensemble - without alt obs + obs_ind = get1Dindex(data_all&models, alt_obs) + if all(alt_obs.eq."none") then + mod_ind = ispan(0, dimsizes(data_all&models)-1, 1) + else + ex_ind = obs_ind(ind(.not.ismissing(obs_ind))) + mods_ind = ispan(0, dimsizes(data_all&models)-1, 1) + ex_ind@_FillValue = default_fillvalue("integer") + mods_ind@_FillValue = default_fillvalue("integer") + mod_ind = get1Dindex_Collapse(mods_ind, ex_ind) + delete(mods_ind) + delete(ex_ind) + end if + ; Split by project + means = new((/nr_projects, n_var/), float) + median = new((/nr_projects, n_var/), float) + + do iproj = 0, nr_projects - 1 + mod_proj_ind = ind(data_all&models@project(mod_ind).eq.projects(iproj)) + means(iproj, :) = dim_avg_n(data_all(mod_proj_ind, :), 0) + median(iproj, :) = dim_median_n(data_all(mod_proj_ind, :), 0) + delete(mod_proj_ind) + end do + + ; Create outfile directory + system("mkdir -p " + config_user_info@plot_dir) + + ; Plotting preparation + name = "" + outfile = config_user_info@plot_dir + name + "patterncor." + file_type + wks = gsn_open_wks(file_type, outfile) + wks@fullname = outfile + + ; Calc limits + y_min = min(data_all) + y_min := decimalPlaces(y_min-0.05, 1, True) + x_max = max(x_val) + nr_projects + + ; Set half line length + l_length = 0.3 + + ; Project Colors - TODO: let them be specified in cfg + fcolors = (/"black", "blue", "red"/) + + res = True + res@gsnDraw = False + res@gsnFrame = False + res@vpWidthF = 0.8 + n_square = 16. + if x_max.le. n_square then + res@vpHeightF = 0.8 + else + res@vpHeightF = 0.8*(n_square/x_max) + end if + font_height = 0.02/0.6 * res@vpHeightF + res@gsnMaximize = True + res@tiYAxisString = "Correlation" + res@trYMinF = y_min + res@trYMaxF = 1 + yspan = res@trYMaxF - res@trYMinF + res@trXMinF = 0 + res@trXMaxF = x_max + res@tmXBLabels = data_all&vars + res@tmXBValues = x_val + res@tmXBMode = "Explicit" + res@tmXBLabelAngleF = 90. + res@tmXBLabelJust = "CenterRight" + res@tmXBLabelFontHeightF = font_height + ; Set Marker Size to be half of line_length + marker_size = res@vpWidthF * l_length / (res@trXMaxF - res@trXMinF) + + ; Resources for model lines + res_lines = True ; polyline mods desired + res_lines@gsLineDashPattern = 0. ; solid line + res_lines@gsLineThicknessF = 2.5 ; line thickness + res_lines@tfPolyDrawOrder = "PreDraw" + + ; Resources for mean lines + res_mlines = True ; polyline mods desired + res_mlines@gsLineDashPattern = 0. ; solid line + res_mlines@gsLineThicknessF = 4. ; line thicker + res_mlines@tfPolyDrawOrder = "PreDraw" + + ; Resources for obs data markers + res_circ = True + res_circ@gsMarkerIndex = 16 + res_circ@gsMarkerColor = "green" + res_circ@gsMarkerSizeF = marker_size + res_circ@gsMarkerOpacityF = 0.4 + + ; Resources for white markers below median + res_circw = True + res_circw@gsMarkerIndex = 16 + res_circw@gsMarkerColor = "white" + res_circw@gsMarkerSizeF = 0.95*marker_size + res_circw@tfPolyDrawOrder = "PreDraw" + + ; Resources for median markers if required + res_circm = True + res_circm@gsMarkerIndex = 4 + res_circm@gsMarkerSizeF = marker_size + res_circm@gsMarkerThicknessF = 3. + res_circm@tfPolyDrawOrder = "Draw" + + ; Resources for legend text + res_text = True ; text mods desired + res_text@txFontHeightF = font_height ; change text size + res_text@txJust = "CenterLeft" ; text justification + + ; New x_val according to median! + x_val_proj = new((/nr_projects, n_var/), float) + ; space between projects in graph + if nr_projects.eq.1 then + d_proj = 1.5 ; offset + else + d_proj = 1 ; (nr_projects - 1.)/nr_projects + end if + do iproj = 0, nr_projects - 1 + do ivar = 0, n_var - 1 + x_val_proj(iproj, ivar) = ivar*nr_projects - 0.5 + d_proj*(iproj+1) + end do + end do + ; Start with blank plot! gs and xy marker sizes are different.. + plot = gsn_csm_blank_plot(wks, res) + + do iproj = 0, nr_projects - 1 + res_circm@gsMarkerColor = fcolors(iproj) + plot@$unique_string("dum_median")$ = gsn_add_polymarker( \ + wks, plot, x_val_proj(iproj, :), median(iproj, :), res_circm) + end do + + ; add lines for individual models + do ivar = 0, dimsizes(data_all(0, :))-1 + do iproj = 0, dimsizes(projects)-1 + ; Skip Project if no data for it + proj_mods = ind(data_all&models@project(mod_ind).eq.projects(iproj)) + if .not. all(ismissing(data_all(proj_mods, ivar))) then + proj_center = x_val_proj(iproj, ivar) + xx = (/proj_center-l_length, proj_center+l_length/) + ; Plot lines for mean + xx_mean = (/proj_center-l_length*1.5, proj_center+l_length*1.5/) + yy_mean = (/means(iproj, ivar), means(iproj, ivar)/) + res_mlines@gsLineColor = fcolors(iproj) + res_lines@gsLineColor = fcolors(iproj) + plot@$unique_string("dum")$ = gsn_add_polyline( \ + wks, plot, xx_mean, yy_mean, res_mlines) + do imod = 0, dimsizes(data_all(:, 0)) - 1 + ; Only plot if model in right project + if data_all&models@project(imod).eq.projects(iproj) then + ; Don't plot obs as lines + if (.not.ismissing(data_all(imod, ivar))) then + if (data_all&models(imod).ne.alt_obs(ivar)) then + yy = (/data_all(imod, ivar), data_all(imod, ivar)/) + plot@$unique_string("dum")$ = gsn_add_polyline( \ + wks, plot, xx, yy, res_lines) + end if + end if + end if + end do + plot@$unique_string("dum_ci")$ = gsn_add_polymarker( \ + wks, plot, x_val_proj(iproj, ivar), median(iproj, ivar), res_circw) + end if + delete(proj_mods) + end do + if (alt_obs(ivar).ne."none") then + ; Plot obs as circles + plot@$unique_string("dum_circ")$ = gsn_add_polymarker( \ + wks, plot, x_val(ivar), data_all(obs_ind(ivar), ivar), res_circ) + end if + end do + + y_min_label = res@trYMinF + 0.1*yspan + lb_stride = yspan/res@vpHeightF * font_height * 1.2 ; font_height*3 + plabel = projects + ; Draw Legend + do iproj = 0, dimsizes(projects)-1 + res_text@txFontColor = fcolors(iproj) + ; CMIP5 label has to be reduced to CMIP5 sometimes + if str_match_ind_ic(plabel, "CMIP5").eq.iproj then + plabel(iproj) = "CMIP5" + end if + plot@$unique_string("dum_l")$ = gsn_add_text(wks, plot, plabel(iproj),\ + x_val(0), y_min_label + \ + lb_stride*(iproj+1), res_text) + end do + res_text@txFontColor = "green" + plot@$unique_string("dum_l")$ = gsn_add_text(wks, plot, "OBS", \ + x_val(0), y_min_label, res_text) + + draw(plot) + frame(wks) + ; Write output + system("mkdir -p " + config_user_info@work_dir) + workpath = config_user_info@work_dir + "pattern_cor.nc" + ncdf_outfile = ncdf_write(data_all, workpath) + + ; collect meta-data and call ESMValMD function + caption = "Centered pattern correlations between models and observations" \ + + " for the annual mean climatologies " \ + + "(similar to IPCC ch. 9 fig. 9.6)." + statistics = (/"corr", "clim"/) + domains = (/"global"/) + plottype = "other" + authors = (/"gier_bettina", "bock_lisa"/) + references = (/"flato13ipcc"/) + log_provenance(ncdf_outfile, outfile, caption, statistics, domains, \ + plottype, authors, references, prov_files) + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl b/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl new file mode 100644 index 0000000000..e92d60ab43 --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl @@ -0,0 +1,425 @@ +; ############################################################################# +; INTERANNUAL VARIABILITY MULTI-MODEL MEAN FOR STIPPLING SIGNIFICANCE IPCCCH12 +; Author: Ruth Lorenz (ETH Zurich, Switzerland) +; CRESCENDO project +; ############################################################################# +; +; Description +; Calculate Interannual variability from piControl runs for plotting +; significance with stippling and hatching, save for each model +; (iavmode: "each") or multi-model mean only. +; Can either be calculated over full time period of piControl run +; (no periodlength given) +; or first averaged annually/seasonally over periodlength and then standard +; deviation calculated based on averaged periods. +; +; Required diag_script_info attributes (diagnostics specific) +; time_avg: time period to calculate IAV, e.g. annualclim, seasonalclim +; +; Optional diag_script_info attributes (diagnostic specific) +; iavmode: calculate multi-model mean of IAV over all models or calculate +; and save IAV for each individual model? (mmm, each) +; periodlength: length of periods to calculate IAV accross, depends on +; period lengths used in +; ch12_calc_map_diff_mmm_stippandhatch.ncl +; if not given whole time period calculated at once +; +; Caveats +; Needs lots of memory for 3D ocean variables +; +; Modification history +; 20161219-A_lorenz_ruth: remove seasonal cycle before std if seasonal +; 20161024-A_lorenz_ruth: adapted to ESMValTool +; 20130501-A_sedlacek_jan: written for IPCC AR5 as get_natvar.ncl. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/shared/statistics.ncl" + +begin + ; ############################################################## + ; # Fetch general parameters, set in namelist_collins13ipcc.xml# + ; # passed via environment variables by python code # + ; ############################################################## + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check required diag_script_info attributes + req_atts = (/"time_avg"/) + exit_if_missing_atts(diag_script_info, req_atts) +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + ; ############################################# + ; # Get parameters from ./variable_defs/*.ncl # + ; # passed via the 'info' attribute # + ; ############################################# + if (isvar("MyParam")) then + delete(MyParam) + end if + if (isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if +end + +begin + ; ########################################### + ; # Get data and average time # + ; ########################################### + ; get data from first dataset + imod = 0 ; NCL array indicies start from zero + log_debug("processing " + info_items[imod]@dataset + "_" \ + + info_items[imod]@exp + "_" \ + + info_items[imod]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[imod]) + + ; Check dimensions + dims = getvardims(A0) + ndim = dimsizes(dims) + + if (ndim .lt. 3) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 3 or 4") + end if + idx = ind(dims .eq. "lat" .or. dims .eq. "rlat" .or. dims .eq. "j") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + lat_name = dims(idx) + + idx = ind(dims .eq. "lon" .or. dims .eq. "rlon" .or. dims .eq. "i") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + lon_name = dims(idx) + + if (ndim .gt. 3) then + idx = ind(dims .eq. "lev" .or. dims .eq. "plev") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no level dimension") + end if + lev_name = dims(idx) + end if + + index = ispan(0, dim_MOD - 1, 1) + if (diag_script_info@time_avg .eq. "seasonalclim") then + dim_seas = 4 + diag_script_info@seasons = (/0, 1, 2, 3/) + else + dim_seas = 1 + diag_script_info@seasons = (/0/) + end if + + Fill = default_fillvalue(typeof(A0)) + if (ndim .eq. 3) then + data1 = new((/dim_MOD, dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(A0), Fill) + if ((.not. isatt(diag_script_info, "iavmode")) .or. \ + (diag_script_info@iavmode .ne. "each")) then + IAV_mmm = new((/dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(A0), Fill) + end if + elseif (ndim .eq. 4) then + data1 = new((/dim_MOD, dim_seas, dimsizes(A0&$lev_name$), \ + dimsizes(A0&lat), dimsizes(A0&lon)/), \ + typeof(A0), Fill) + if ((.not. isatt(diag_script_info, "iavmode")) .or. \ + (diag_script_info@iavmode .ne. "each")) then + IAV_mmm = new((/dim_seas, dimsizes(A0&$lev_name$), \ + dimsizes(A0&lat)/), \ + typeof(A0), Fill) + end if + end if + + do imod = 0, dim_MOD - 1 + if (imod .ne. 0) then + delete(A0) + A0 = read_data(info_items[imod]) + dims = getvardims(A0) + idx = ind(dims .eq. "lat" .or. dims .eq. "rlat" .or. dims .eq. "j") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + lat_name = dims(idx) + + idx = ind(dims .eq. "lon" .or. dims .eq. "rlon" .or. dims .eq. "i") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + lon_name = dims(idx) + + if (ndim .eq. 4) then + idx = ind(dims .eq. "lev" .or. dims .eq. "plev") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no level dimension") + end if + lev_name = dims(idx) + end if + end if + + ; Calculate IAV (function in ./diag_scripts/lib/ncl/statistics.ncl) + ; cut the first 100 yr + time = A0&time + start_yr = cd_calendar(time(0), 0) + ind_end = dimsizes(time) - 1 + end_yr = cd_calendar(time(ind_end), 0) + if ((end_yr(0, 0) - start_yr(0, 0) + 1) .lt. 500) then + log_info("warning: Control run is less than 500 years.") + end if + if ((isatt(diag_script_info, "not_cut_100_years") .and. \ + diag_script_info@not_cut_100_years .eq. True) .or. \ + (end_yr(0, 0) - start_yr(0, 0) + 1) .lt. 500) then + new_start_yr = toint(start_yr(0, 0)) + else + new_start_yr = toint((start_yr(0, 0) + 100)) + end if + end_yr_int = toint(end_yr(0, 0)) + if (isatt(diag_script_info, "periodlength")) then + length_of_period = toint(diag_script_info@periodlength) + if ((end_yr(0, 0) - start_yr(0, 0) + 1) .lt. 500) then + nr_periods = toint(floor((dimsizes(time)) / \ + (length_of_period * 12.))) + else + nr_periods = toint(floor((dimsizes(time) - 12 * 100) / \ + (length_of_period * 12.))) + end if + if (nr_periods .lt. 1) then + error_msg("w", DIAG_SCRIPT, "", "time range too short, " + \ + "less than 1 period covered, continue with " + \ + "next model") + delete(start_yr) + delete(end_yr) + delete(time) + continue + end if + yr_possible = (dimsizes(time) - 12 * 100) / (length_of_period * 12.) + rest = yr_possible - nr_periods + start_yrs = ispan(new_start_yr, end_yr_int, length_of_period) + if (ndim .eq. 3) then + data_tmp = new((/nr_periods, dim_seas, dimsizes(A0&$lat_name$), \ + dimsizes(A0&$lon_name$)/), typeof(A0), Fill) + elseif (ndim .eq. 4) then + data_tmp = new((/nr_periods, dim_seas, dimsizes(A0&$lev_name$), \ + dimsizes(A0&$lat_name$), dimsizes(A0&$lon_name$)/), \ + typeof(A0), Fill) + end if + + do per = 0, nr_periods - 1 + if ((rest .gt. 0) .and. (per .eq. nr_periods - 1)) then + new_end_yr = end_yr_int + else + new_end_yr = toint(start_yrs(per) + length_of_period - 1.0) + end if + ; calculate seasonal/annual averages over periods + if ((dim_seas .eq. 1) .and. \ + (diag_script_info@time_avg .eq. "annualclim")) then + if (ndim .eq. 3) then + data_tmp(per, 0, :, :) = \ + time_operations(A0, start_yrs(per), new_end_yr, "average", \ + diag_script_info@time_avg, True) + elseif (ndim .eq. 4) then + data_tmp(per, 0, :, :, :) = \ + time_operations(A0, start_yrs(per), new_end_yr, "average", \ + diag_script_info@time_avg, True) + end if + else + if (ndim .eq. 3) then + data_tmp(per, :, :, :) = \ + time_operations(A0, start_yrs(per), new_end_yr, "average", \ + diag_script_info@time_avg, True) + elseif (ndim .eq. 4) then + data_tmp(per, :, :, :, :) = \ + time_operations(A0, start_yrs(per), new_end_yr, "average", \ + diag_script_info@time_avg, True) + end if + end if + end do + data_dtr = dtrend_quadratic_msg_n(data_tmp, False, False, 0) + if (typeof(data_dtr) .ne. typeof(data1)) then + if ((typeof(data_dtr) .eq. "double") .and. \ + (typeof(data1) .eq. "float")) then + tmp = data_dtr + delete(data_dtr) + data_dtr = doubletofloat(tmp) + elseif ((typeof(data_dtr) .eq. "float") .and. \ + (typeof(data1) .eq. "double")) then + tmp = data_dtr + delete(data_dtr) + data_dtr = floattodouble(tmp) + else + error_msg("f", DIAG_SCRIPT, "", "Type conversion issue, " + \ + "data_dtr has a different type than data1 which is " + \ + "neither float or double.") + end if + end if + if (ndim .eq. 3) then + data1(imod, :, :, :) = dim_stddev_n(data_dtr, 0) + elseif (ndim .eq. 4) then + data1(imod, :, :, :, :) = dim_stddev_n(data_dtr, 0) + end if + delete(start_yrs) + delete(data_tmp) + delete(data_dtr) + else + data1_tmp = interannual_variability(A0, new_start_yr, end_yr_int, \ + diag_script_info@time_avg, \ + "quadratic") + if (ndim .eq. 3) then + data1(imod, :, :, :) = data1_tmp + elseif (ndim .eq. 4) then + data1(imod, :, :, :, :) = data1_tmp + end if + end if + delete(start_yr) + delete(end_yr) + delete(time) + +; if ((isatt(diag_script_info, "iavmode")) .and. \ +; (diag_script_info@iavmode .eq. "each")) then + outfile = "IAV_" + info_items[imod]@exp + "_" + \ + info_items[imod]@dataset + "_" + info_items[imod]@ensemble + "_" + \ + var0 + "_" + diag_script_info@time_avg + ".nc" + file_exist = isfilepresent(outfile) + if (file_exist .and. diag_script_info@overwrite .eq. False) then + continue + end if + if (imod .eq.0) then + if (ndim .eq. 3) then + data1!0 = "models" + data1!1 = "season" + data1!2 = "lat" + data1!3 = "lon" + data1&lon = A0&lon + elseif (ndim .eq. 4) then + data1!0 = "models" + data1!1 = "season" + data1!2 = lev_name + data1&$lev_name$ = A0&$lev_name$ + data1!3 = "lat" + data1!4 = "lon" + data1&lon = A0&lon + end if + if (diag_script_info@time_avg .eq. "seasonalclim") then + data1&season = (/ispan(0, dim_seas - 1, 1)/) + else + data1&season = (/0/) + end if + data1&lat = A0&lat + data1@diag_script = (/DIAG_SCRIPT/) + data1@var = "iav" + if (isatt(variable_info[0], "long_name")) then + data1@var_long_name = "iav of " + variable_info[0]@long_name + end if + if (isatt(variable_info[0], "units")) then + data1@units = variable_info[0]@units + end if + end if + ; ########################################### + ; # Output to netCDF # + ; ########################################### + if (dim_seas .eq. 1) then + if (ndim .eq. 3) then + write_data = data1(imod, 0, :, :) + elseif (ndim .eq. 4) then + write_data = data1(imod, 0, :, :, :) + end if + else + if (ndim .eq. 3) then + write_data = data1(imod, :, :, :) + elseif (ndim .eq. 4) then + write_data = data1(imod, :, :, :, :) + end if + end if + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(write_data, work_dir + outfile) +; end if + end do ; imod + + if ((.not. isatt(diag_script_info, "iavmode")) .or. \ + (diag_script_info@iavmode .ne. "each")) then + ; ########################################### + ; # Calculate multi-model mean of IAV # + ; ########################################### + do s = 0, dim_seas - 1 + if (ndim .eq. 3) then + IAV_mmm(s, :, :) = \ + rm_single_dims(dim_avg_n(data1(:, s, :, :), 0) * sqrt(2.)) + elseif (ndim .eq. 4) then + ; Calculate the zonal average + tmp_zon = dim_avg_n_Wrap(data1, 4) ; data1(mon, seas, lev, lat, lon) + ; Calculate multi-model mean + IAV_mmm(s, :, :) = \ + rm_single_dims(dim_avg_n(tmp_zon(:, s, :, :), 0) * sqrt(2.)) + end if + end do + + if (ndim .eq. 3) then + IAV_mmm!0 = "season" + IAV_mmm!1 = "lat" + IAV_mmm!2 = "lon" + IAV_mmm&lon = A0&lon + elseif (ndim .eq. 4) then + IAV_mmm!0 = "season" + IAV_mmm!1 = lev_name + IAV_mmm&$lev_name$ = A0&$lev_name$ + IAV_mmm!2 = "lat" + end if + if (isatt(diag_script_info, "seasons")) then + IAV_mmm&season = (/ispan(0, dim_seas - 1, 1)/) + else + IAV_mmm&season = (/0/) + end if + IAV_mmm&lat = A0&lat + + ; ########################################### + ; # Output to netCDF # + ; ########################################### + outfile = "IAV_mmm_piControl_" + \ + var0 + "_" + diag_script_info@time_avg + ".nc" + + IAV_mmm@diag_script = (/DIAG_SCRIPT/) + IAV_mmm@var = "iav" + if (isatt(variable_info[0], "long_name")) then + IAV_mmm@var_long_name = "iav of " + variable_info[0]@long_name + end if + if (isatt(variable_info[0], "units")) then + IAV_mmm@units = variable_info[0]@units + end if + IAV_mmm@comment = metadata_att_as_array(info_items, "dataset") + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(IAV_mmm, work_dir + outfile) + end if + + ; collect meta-data + nc_file = ncdf_outfile + caption = "Inter-annual variability based on piControl runs." + statistics = ("var") + domains = ("global") + plot_types = ("other") + authors = (/"lorenz_ruth"/) + references = (/"collins13ipcc"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, "n/a", caption, statistics, domains, \ + plot_types, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl b/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl new file mode 100644 index 0000000000..38ec2debab --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl @@ -0,0 +1,479 @@ +; ############################################################################# +; Multi-model mean change map with significance +; Author: Ruth Lorenz (ETH, Switzerland) +; CRESCENDO project +; ############################################################################# +; +; Caveats +; Only handles one variable at a time. Appends multiple output variables +; (variable to_plot, signficance to_plot_signif and not significant +; to_plot_not_signif and number of model in mmm to model_nr) +; to the same file, if the file already exists overwrites +; +; Modification history +; 20181307-A_lorenz_ruth: Ported to version 2 +; 20170120-A_lorenz_ruth: separated plotting from calculations +; 20161027-A_lorenz_ruth: added stippling and hatching for significance +; 20161024-A_lorenz_ruth: adjusted to plot multiple figures +; with one diagnostic script +; 20160621-A_lorenz_ruth: adapted to ESMValTool +; 20130501-A_sedlacek_jan: written for IPCC AR5. +; +; ############################################################################# + +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/ensemble.ncl" +load "$diag_scripts/shared/scaling.ncl" + +load "$diag_scripts/shared/plot/style.ncl" + +begin + + ; ############################################################## + ; # Fetch general parameters, set in namelist_collins13ipcc.xml# + ; # passed via environment variables by python code # + ; ############################################################## + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Save some dataset attributes as arrays for index-based selection below + dataset_names = metadata_att_as_array(info_items, "dataset") + dataset_exps = metadata_att_as_array(info_items, "exp") + dataset_ensembles = metadata_att_as_array(info_items, "ensemble") + dataset_startyears = metadata_att_as_array(info_items, "start_year") + + ; Check required diag_script_info attributes + req_atts = (/"scenarios", "periods", "time_avg", "label"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + +begin + + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + input_dir = diag_script_info@input_files(1) + +end + +begin + + ; ############################################# + ; # Get parameters from ./variable_defs/*.ncl # + ; # passed via the 'info' attribute # + ; ############################################# + + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_info(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data and average time # + ; ########################################### + + ; Get data from first dataset + imod = 0 ; NCL array indicies start from zero + log_debug("processing " + info_items[imod]@dataset + "_" \ + + info_items[imod]@exp + "_" \ + + info_items[imod]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[imod]) + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat" .or. dims .eq. "rlat" .or. dims .eq. "j") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + lat_name = dims(idx) + idx = ind(dims .eq. "lon" .or. dims .eq. "rlon" .or. dims .eq. "i") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + lon_name = dims(idx) + + if (isatt(diag_script_info, "seasons")) then + dim_seas = dimsizes(diag_script_info@seasons) + else + dim_seas = 1 + end if + + ; Check if diag_script_info@label has correct size, otherwise error message + dim_periods = dimsizes(diag_script_info@periods) + dim_scenarios = dimsizes(diag_script_info@scenarios) + dim_label = dim_seas * dim_scenarios * dim_periods + if (dim_label .ne. dimsizes(diag_script_info@label)) then + error_msg("f", DIAG_SCRIPT, "", "wrong number of labels, needs to be " + \ + "seasons * scenarios * periods") + end if + + annots = project_style(info_items, diag_script_info, "annots") + Fill = default_fillvalue(typeof(A0)) + if (isatt(diag_script_info, "iavmode")) then + if (diag_script_info@iavmode .eq. "each") then + iav = new((/dim_MOD, dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(A0), Fill) + end if + end if + data2 = new((/dim_MOD, dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(A0), Fill) + + do imod = 0, dim_MOD - 1 + log_info(info_items[imod]@dataset) + ; Average over time + if (imod .eq. 0) then + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + data1 = tmp_seas(diag_script_info@seasons, :, :) + delete(tmp_seas) + else + data1 = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + else + if isvar((/"A1"/)) then + delete(A1) + end if + A1 = read_data(info_items[imod]) + dims = getvardims(A1) + idx = ind(dims .eq. "lat" .or. dims .eq. "rlat" .or. dims .eq. "j") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + lat_name = dims(idx) + idx = ind(dims .eq. "lon" .or. dims .eq. "rlon" .or. dims .eq. "i") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + lon_name = dims(idx) + ; Average over time + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A1, -1, -1, "average", \ + diag_script_info@time_avg, True) + data1 = tmp_seas(diag_script_info@seasons, :, :) + delete(tmp_seas) + else + data1 = time_operations(A1, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + end if + if (dim_seas .eq. 1) then + data2(imod, 0, :, :) = data1 + else + data2(imod, :, :, :) = data1 + end if + + if (isatt(diag_script_info, "iavmode")) then + if (diag_script_info@iavmode .eq. "each") then + ; ######################################################### + ; # Read natural variability for stippling for each model # + ; ######################################################### + log_info("Read natural variability for stippling for each model") + iav_file = addfile(input_dir + "/" + "IAV_piControl_" + \ + info_items[imod]@dataset + "_" + \ + info_items[imod]@ensemble + "_" + \ + "_" + var0 + "_" + diag_script_info@time_avg + \ + ".nc", "r") + if ((dim_seas .eq. 1) .and. \ + (diag_script_info@time_avg .eq. "annualclim")) then + iav(imod, 0, :, :) = iav_file->iav + elseif ((dim_seas .eq. 1) .and. \ + (diag_script_info@time_avg .eq. "seasonalclim")) then + tmp_iav = iav_file->iav + iav(imod, 0, :, :) = tmp_iav(diag_script_info@seasons, :, :) + else + iav(imod, :, :, :) = iav_file->iav + end if + end if + end if + end do + + if (isatt(diag_script_info, "iavmode")) then + if (diag_script_info@iavmode .eq. "each") then + ; Calculate multi-model mean of iav + log_info("Calculate multi-model mean of natural variability") + natvar_mmm = rm_single_dims(dim_avg_n(iav, 0) * sqrt(2.)) + elseif (diag_script_info@iavmode .eq. "mmm") then + log_info("Read precalculated natural variability for multi-model") + natvar_file = addfile(input_dir + "/" + "IAV_mmm_piControl_" + \ + var0 + "_" + \ + diag_script_info@time_avg + ".nc", "r") + natvar_mmm = natvar_file->iav + if ((dim_seas .ne. 4) .and. \ + (diag_script_info@time_avg .ne. "annualclim")) then + tmp = natvar_mmm + delete(natvar_mmm) + natvar_mmm = rm_single_dims(tmp(diag_script_info@seasons, :, :)) + elseif (diag_script_info@time_avg .eq. "annualclim") then + tmp = natvar_mmm + delete(natvar_mmm) + natvar_mmm = rm_single_dims(tmp) + end if + else + error_msg("f", DIAG_SCRIPT, "", "Error: This iavmode is not know") + end if + else + ; Read already calculated mmm iav as default + log_info("Read precalculated natural variability for multi-model") + natvar_file = addfile(input_dir + "/" + "IAV_mmm_piControl_" + \ + var0 + "_" + \ + diag_script_info@time_avg + ".nc", "r") + natvar_mmm = natvar_file->iav + if ((dim_seas .ne. 4) .and. \ + (diag_script_info@time_avg .ne. "annualclim")) then + tmp = natvar_mmm + delete(natvar_mmm) + natvar_mmm = rm_single_dims(tmp(diag_script_info@seasons, :, :)) + elseif (diag_script_info@time_avg .eq. "annualclim") then + tmp = natvar_mmm + delete(natvar_mmm) + natvar_mmm = rm_single_dims(tmp) + end if + end if + data2!0 = "models" + data2!1 = "season" + data2!2 = "lat" + data2!3 = "lon" + data2&models = annots + if (isatt(diag_script_info, "seasons")) then + data2&season = (/ispan(0, dim_seas - 1, 1)/) + else + data2&season = (/0/) + end if + + ; ############################################## + ; # Calculate change from hist to periods # + ; ############################################## + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + + ; Loop over rcps and periods + dim_scen = dimsizes(diag_script_info@scenarios) + dim_per = dimsizes(diag_script_info@periods) + + to_plot = new((/dim_scen * dim_per * dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(data1), Fill) + if (isatt(diag_script_info, "percent")) then + percent = diag_script_info@percent + ref = new((/dim_scen * dim_per * dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(data1), Fill) + else + percent = 0 + end if + to_plot_signif = new((/dim_scen * dim_per * dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(data1), Fill) + to_plot_not_signif = new((/dim_scen * dim_per * dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(data1), Fill) + model_number = new((/dim_scen * dim_per * dim_seas/), integer) + i = 0 + do seas = 0, dim_seas - 1 ; loop over seasons + do rcp = 0, dim_scen - 1 ; loop over rcps + do per = 0, dim_per - 1 ; loop over periods + idx_rcp = ind(dataset_exps .eq. diag_script_info@scenarios(rcp) .and. \ + dataset_startyears .eq. diag_script_info@periods(per)) + proj = data2(idx_rcp, seas, :, :) + + ; Cut low values + proj = where(abs(proj) .gt. 1.e-14, proj, proj@_FillValue) + proj_avg = dim_avg_n_Wrap(proj, 0) + + ; Find historical runs from same datasets as in rcp + do jj = 0, dimsizes(idx_rcp) - 1 + tmp_idx = ind(dataset_names .eq. dataset_names(idx_rcp(jj)) .and. \ + dataset_exps .eq. reference_run_name .and. \ + dataset_ensembles .eq. dataset_ensembles(idx_rcp(jj))) + if (isdefined("idx_hist")) then + idx_hist := array_append_record(idx_hist, tmp_idx, 0) + else + idx_hist = tmp_idx + end if + delete(tmp_idx) + end do + + base = data2(idx_hist, seas, :, :) + + ; Cut low values + base = where(abs(base) .gt. 1.e-14, base, base@_FillValue) + base_avg = dim_avg_n_Wrap(base, 0) + + var_diff = proj - base + delete([/base, proj/]) + to_plot(i, :, :) = proj_avg - base_avg + if (isatt(diag_script_info, "percent")) then + ref(i, :, :) = base_avg + end if + + ; Determine sigma and sign for significance + var_pos_signif = dim_num_n(where(var_diff .gt. 0., 1., \ + to_plot@_FillValue) .eq. 1., 0) + var_neg_signif = dim_num_n(where(var_diff .lt. 0., 1., \ + to_plot@_FillValue) .eq. 1., 0) + var_pos_signif_tmp = \ + var_pos_signif / where((var_pos_signif + var_neg_signif) \ + .ne. 0., var_pos_signif + \ + var_neg_signif, to_plot@_FillValue) + var_neg_signif_tmp = \ + var_neg_signif / where((var_pos_signif + var_neg_signif) \ + .ne. 0., var_pos_signif + \ + var_neg_signif, to_plot@_FillValue) + signif_and_sign = where(var_pos_signif_tmp .ge. 0.9 .or. \ + var_neg_signif_tmp .ge. 0.9, 1., 0.) + signif_and_sign = where(ismissing(signif_and_sign), 0.,\ + signif_and_sign) + if (dim_seas .ne. 1) then + sigma_and_sign = where(abs(to_plot(i, :, :)) .gt. \ + abs(2. * natvar_mmm(seas, :, :)), 1., 0.) + to_plot_not_signif(i, :, :) = where(abs(to_plot(i, :, :)) .lt. \ + abs(natvar_mmm(seas, :, :)),\ + 1., 0.) + else + sigma_and_sign = where(abs(to_plot(i, :, :)) .gt. \ + abs(2. * natvar_mmm), 1., 0.) + to_plot_not_signif(i, :, :) = where(abs(to_plot(i, :, :)) .lt. \ + abs(natvar_mmm),\ + 1., 0.) + end if + to_plot_signif(i, :, :) = where(signif_and_sign .eq. 1, \ + sigma_and_sign, 0.) + model_number(i) = dimsizes(idx_rcp) + i = i + 1 + delete([/idx_hist, idx_rcp, var_diff/]) + end do ; per + end do ; rcp + end do ; seas + + to_plot!0 = "panel" + to_plot&panel = diag_script_info@label + to_plot!1 = "lat" + to_plot&lat = A0&lat + to_plot!2 = "lon" + to_plot&lon = A0&lon + copy_VarMeta(to_plot, to_plot_signif) + copy_VarMeta(to_plot, to_plot_not_signif) + if (isatt(diag_script_info, "percent")) then + percent = diag_script_info@percent + if (percent .eq. 1) then + to_plot = 100 * to_plot / where(ref .ne. 0., ref, ref@_FillValue) + to_plot@units = "%" + delete(ref) + end if + end if + + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + + ; Add to to_plot, as attributes without prefix + if (isatt(to_plot, "diag_script")) then ; add to existing entries + temp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if (isatt(variable_info[0], "long_name")) then + to_plot@var_long_name = variable_info[0]@long_name + end if + + ; Check units and adjust for plotting if necessary + if (percent .eq. 0) then + to_plot@units = variable_info[0]@units + end if + ; difference in K is the same as difference in degC, only change label + if (to_plot@units.eq."K") then + to_plot@units = "degC" + end if + if (isatt(diag_script_info, "plot_units")) then + to_plot = convert_units(to_plot, diag_script_info@plot_units) + end if + + if(isatt(to_plot_signif, "diag_script")) then ; add to existing entries + temp = to_plot_signif@diag_script + delete(to_plot_signif@diag_script) + to_plot_signif@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot_signif@diag_script = (/DIAG_SCRIPT/) + end if + + to_plot_signif@var = "signif" + to_plot_signif@var_long_name = "significance: mean change larger than " \ + + "2*natvar and 90% of models have the " \ + + "same sign" + to_plot_signif@units = 1 + + if(isatt(to_plot_not_signif, "diag_script")) then + temp = to_plot_not_signif@diag_script + delete(to_plot_not_signif@diag_script) + to_plot_not_signif@diag_script = array_append_record(temp, \ + (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot_not_signif@diag_script = (/DIAG_SCRIPT/) + end if + to_plot_not_signif@var = "not_signif" + to_plot_not_signif@var_long_name = "not significant: mean change " \ + + "smaller than 1*natvar" + to_plot_not_signif@units = 1 + + model_number!0 = "panel" + model_number&panel = diag_script_info@label + if(isatt(model_number, "diag_script")) then ; add to existing entries + temp = model_number@diag_script + delete(model_number@diag_script) + model_number@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + model_number@diag_script = (/DIAG_SCRIPT/) + end if + model_number@var = "model_nr" + model_number@var_long_name = "number of models in mmm" + model_number@units = 1 + + ; ########################################### + ; # Optional output to netCDF # + ; ########################################### + ; Optional output (controlled by diag_script_info) + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + ncdf_file@existing = "append" + ncdf_outfile = ncdf_write(to_plot_signif, ncdf_file) + ncdf_outfile = ncdf_write(to_plot_not_signif, ncdf_file) + ncdf_outfile = ncdf_write(model_number, ncdf_file) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_map_diff_scaleT_mmm_stipp.ncl b/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_map_diff_scaleT_mmm_stipp.ncl new file mode 100644 index 0000000000..4861af08c0 --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_map_diff_scaleT_mmm_stipp.ncl @@ -0,0 +1,381 @@ +; ############################################################################# +; Multi-model mean change scaled by global T change map with significance +; Author: Ruth Lorenz (ETH, Switzerland) +; CRESCENDO project +; ############################################################################# +; +; Description +; Pattern scaling. Annual mean or seasonal change scaled by global T +; change per model multi-model mean with significance in different +; periods (e.g. 2081-2100 and 2181-2200 with respect to 1986-2005) +; +; Required diag_script_info attributes (diagnostics specific) +; scenarios: list with scenarios to be included in the +; figure, e.g (/"rcp26","rcp45","rcp60","rcp85"/) +; periods: list with start years of periods to be included +; e.g. (/"2046","2081","2180"/) +; time_avg: list with seasons or annual to be included +; e.g (/"annualclim"/) +; +; Optional diag_script_info attributes (diagnostic specific) +; seasons: list with seasons index to be included if +; averagetime is "seasonalclim" (required for +; seasonalclim), DJF:0, MAM:1, JJA:2, SON:3 +; percent: 0 or 1, difference in percent = 1, default = 0 +; explicit_cn_levels: explicit levels for all contour plots +; max_vert: maximum number of plots in vertical +; default determined by number of scenarios +; max_hori: maximum number of plots in horizontal +; default determined by number of periods +; title: Figure title +; label: labels on top of each panel, e.g. scenario + rcp, +; loop to plot is seasons, scenarios, periods +; colormap: specify colormap for contour plots +; units: unit string in ncl formatting for legend title +; +; Caveats +; Only handles one variable at a time +; +; Modification history +; 20180618-A_lorenz_ruth: refactored for v2, fixed calculation of stippling +; 20161027-A_lorenz_ruth: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/ensemble.ncl" +load "$diag_scripts/shared/scaling.ncl" + +load "$diag_scripts/shared/plot/style.ncl" + +begin + ; ############################################################## + ; # Fetch general parameters, set in namelist_collins13ipcc.xml# + ; # passed via environment variables by python code # + ; ############################################################## + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + if (dim_VAR .eq. 2) then + var0 = variable_info[0]@short_name + var1 = variable_info[1]@short_name + info_items2 = select_metadata_by_name(input_file_info, var1) + else + var0 = variable_info[0]@short_name + var1 = var0 + info_items2 = info_items + end if + + if (var1 .ne. "tas") then + error_msg("f", DIAG_SCRIPT, "", "first variable must be tas " + \ + "to scale by global mean tas change") + end if + + ; Save some dataset attributes as arrays for index-based selection below + dataset_names = metadata_att_as_array(info_items, "dataset") + dataset_exps = metadata_att_as_array(info_items, "exp") + dataset_ensembles = metadata_att_as_array(info_items, "ensemble") + dataset_startyears = metadata_att_as_array(info_items, "start_year") + + if (isatt(diag_script_info, "seasons")) then + dim_seas = dimsizes(diag_script_info@seasons) + else + dim_seas = 1 + end if + + ; Check required diag_script_info attributes + req_atts = (/"scenarios", "periods", "time_avg"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + ; ############################################# + ; # Get parameters from ./variable_defs/*.ncl # + ; # passed via the 'info' attribute # + ; ############################################# + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_info(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data and average time # + ; ########################################### + ; get data from first model + imod = 0 ; NCL array indices start from zero + log_info("processing " + info_items[imod]@dataset + "_" \ + + info_items[imod]@exp + "_" \ + + info_items[imod]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[imod]) + Fill = default_fillvalue(typeof(A0)) ; 1e20 + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + nlat = dimsizes(A0&lat) + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + nlon = dimsizes(A0&lon) + + temp_avg = new((/dim_MOD/), typeof(A0), Fill) + temp_avg!0 = "models" + data1 = new((/dim_MOD, dim_seas, nlat, nlon/), \ + typeof(A0), Fill) + ; Average over time (function in ./diag_scripts/lib/ncl/statistics.ncl) + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + if (dim_seas .eq. 1) then + data1(imod, 0, :, :) = tmp_seas(diag_script_info@seasons, :, :) + else + data1(imod, :, :, :) = tmp_seas(diag_script_info@seasons, :, :) + end if + elseif ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .eq. 4)) then + data1(imod, :, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + else + data1(imod, 0, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + rad = (4.0 * atan(1.0) / 180.0) + do imod = 0, dim_MOD - 1 + A0_temp = read_data(info_items2[imod]) + temp_tavg = dim_avg_n_Wrap(A0_temp, 0) + latw = cos(data1&lat * rad) + temp_avg(imod) = wgt_areaave_Wrap(temp_tavg, latw, 1.0, 0) + delete(temp_tavg) + delete(latw) + if (imod .ne. 0) then + A0 = read_data(info_items[imod]) + ; Average over time, + ; function in ./diag_scripts/lib/ncl/statistics.ncl) + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + if (dim_seas .eq. 1) then + data1(imod, 0, :, :) = tmp_seas(diag_script_info@seasons, :, :) + else + data1(imod, :, :, :) = tmp_seas(diag_script_info@seasons, :, :) + end if + delete(tmp_seas) + elseif ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .eq. 4)) then + data1(imod, :, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + else + data1(imod, 0, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + end if + delete(A0_temp) + delete(A0) + end do + delete(imod) + + ; ############################################## + ; # Calculate change from hist to periods # + ; ############################################## + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + + ; loop over rcps and periods + dim_scen = dimsizes(diag_script_info@scenarios) + dim_per = dimsizes(diag_script_info@periods) + + to_plot = new((/dim_per * dim_seas, nlat, nlon/), typeof(data1), Fill) + if (isatt(diag_script_info, "percent")) then + percent = diag_script_info@percent + else + percent = 0 + end if + to_plot_signif = new((/dim_per * dim_seas, nlat, nlon/), typeof(data1), Fill) + perc95 = new((/dim_per * dim_seas, nlat, nlon/), typeof(data1), Fill) + i = 0 + do seas = 0, dim_seas - 1 ; loop over seasons + do per = 0, dim_per - 1 ; loop over periods + do rcp = 0, dim_scen - 1 ; loop over rcps + idx_rcp = ind((dataset_exps .eq. diag_script_info@scenarios(rcp)) \ + .and. (dataset_startyears .eq. \ + diag_script_info@periods(per))) + proj = data1(idx_rcp, seas, :, :) + temp_proj = temp_avg(idx_rcp) + ; Cut low values + proj = where(abs(proj) .gt. 1.e-14, proj, proj@_FillValue) + ; find historical runs from same models as in rcp + do jj = 0, dimsizes(idx_rcp) - 1 + tmp_idx = ind(dataset_names .eq. dataset_names(idx_rcp(jj)) .and. \ + dataset_exps .eq. reference_run_name .and. \ + dataset_ensembles .eq. dataset_ensembles(idx_rcp(jj))) + if (isdefined("idx_hist")) then + idx_hist := array_append_record(idx_hist, tmp_idx, 0) + else + idx_hist = tmp_idx + end if + delete(tmp_idx) + end do + + base = data1(idx_hist, seas, :, :) + temp_base = temp_avg(idx_hist) + ; Cut low values + base = where(abs(base) .gt. 1.e-14, base, base@_FillValue) + ; scale each model by global T change + log_debug("Scale each model by global T change") + dim_mod = dimsizes(idx_rcp) + if (dim_mod .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", \ + "Only one model found, multi-model needs at least two.") + end if + var_diff_scal = new((/dim_mod, nlat, nlon/), typeof(data1), Fill) + do imod = 0, dim_mod - 1 + if (percent .eq. 1) then + var_diff_scal(imod, :, :) = \ + ((100 * (proj(imod, :, :) - base(imod, :, :))) / \ + where(base(imod, :, :) .ne. 0.,\ + base(imod, :, :), base@_FillValue)) / \ + (temp_proj(imod) - temp_base(imod)) + else + var_diff_scal(imod, :, :) = (proj(imod, :, :) - \ + base(imod, :, :)) / \ + (temp_proj(imod) - temp_base(imod)) + end if + end do + delete([/base, proj, temp_proj, temp_base/]) + if rcp .eq. 0 then + var_diff_scal_all_mod = var_diff_scal + else + tmp_scal_all_mod = var_diff_scal_all_mod + delete(var_diff_scal_all_mod) + var_diff_scal_all_mod = \ + array_append_record(tmp_scal_all_mod, var_diff_scal, 0) + delete(tmp_scal_all_mod) + end if + delete([/idx_hist, idx_rcp, var_diff_scal/]) + end do ; rcp + var_diff_scal_all_mod!0 = "models" + var_diff_scal_all_mod!1 = "lat" + var_diff_scal_all_mod!2 = "lon" + var_diff_scal_all_mod&lat = data1&lat + var_diff_scal_all_mod&lon = data1&lon + ; average over rcps + log_debug("Average over models and rcps") + to_plot(i, :, :) = dim_avg_n_Wrap(var_diff_scal_all_mod, 0) + log_debug("determine significance") + ; calculate standard deviation and 95-percentile (assuming gaussian + ; distribution) over all models and scenarios + nvalid = dim_num_n(.not.ismissing(var_diff_scal_all_mod(:, nlat / 2, \ + nlon / 2)), 0) + perc95(i, :, :) = dim_stddev_n_Wrap(var_diff_scal_all_mod, 0) * 1.96 / \ + sqrt(nvalid) ; standard error + ; compare change to perc95 + to_plot_signif(i, :, :) = where(abs(to_plot(i, :, :)) .gt. \ + abs(perc95(i, :, :)), 1., 0) + i = i + 1 + delete([/var_diff_scal_all_mod/]) + end do ; per + end do ; seas + to_plot!0 = "panel" + to_plot&panel = diag_script_info@label + to_plot!1 = "lat" + to_plot&lat = data1&lat + to_plot!2 = "lon" + to_plot&lon = data1&lon + to_plot@units = variable_info[0]@units + if (percent .eq. 1) then + to_plot@units = "%" + end if + + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + ; Add to to_plot, as attributes without prefix + if (isatt(to_plot, "diag_script")) then ; Add to existing entries + tmp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(tmp, (/DIAG_SCRIPT/), 0) + delete(tmp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if (isatt(variable_info, "long_name")) then + to_plot@var_long_name = variable_info@long_name + end if + + ; Check units and adjust for plotting if necessary + ; difference in K is the same as difference in degC, only change label + if (to_plot@units.eq."K") then + to_plot@units = "degC" + end if + if (isatt(diag_script_info, "plot_units")) then + to_plot = convert_units(to_plot, diag_script_info@plot_units) + end if + tmp_unit = to_plot@units + to_plot@units = tmp_unit + " per degC" + + copy_VarMeta(to_plot, to_plot_signif) + to_plot_signif@diag_script = (/DIAG_SCRIPT/) + to_plot_signif@var = "signif" + to_plot_signif@var_long_name = "significance: where average change over" + \ + "all realizations is larger than " + \ + "95-percentile of distribution of models." + to_plot_signif@units = 1 + + copy_VarCoords(to_plot, perc95) + perc95@diag_script = (/DIAG_SCRIPT/) + perc95@var = "95perc" + perc95@var_long_name = "95-percentile of distribution of models" + perc95@units = 1 + + ; ########################################### + ; # Output to netCDF # + ; ########################################### + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + ncdf_file@existing = "append" + ncdf_outfile = ncdf_write(to_plot_signif, ncdf_file) + ncdf_outfile = ncdf_write(perc95, ncdf_file) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_zonal_cont_diff_mmm_stippandhatch.ncl b/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_zonal_cont_diff_mmm_stippandhatch.ncl new file mode 100644 index 0000000000..dbe89529b6 --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_zonal_cont_diff_mmm_stippandhatch.ncl @@ -0,0 +1,490 @@ +; ############################################################################# +; Annual mean zonal mean multi model changes +; Author: Ruth Lorenz (ETH Zurich, Switzerland) +; CRESCENDO project +; ############################################################################# +; +; Description +; Multi-model annual or seasonal mean zonal mean changes +; e.g. 2081-2100 relative to base period (1986-2005 in CMIP5) +; for multiple RCPs +; +; Required diag_script_info attributes (diagnostics specific) +; ncdf: filename for netcdf to write +; scenarios: which scenarios to include +; periods: list with start years of periods to be included +; e.g. (/"2046", "2081", "2180"/) +; time_avg: list with seasons or annual to be included e.g (/"annualclim"/) +; +; Optional diag_script_info attributes (diagnostic specific) +; title: title of whole figure +; label: labels of individual panels +; max_hori: maximum number of panels in horizontal +; max_vert: maximum number of panels in vertical +; colormap: colortable different from default +; explicit_cn_levels: range and levels for contour plot +; base_cn: flag indicating if base period plotted in +; contour lines (as in ipcc Fig. 12.19 for wind) +; base_cnLevelSpacing: spacing for contour levels (e.g. 10) +; if base_cn is True +; base_cnMinLevel: min value for base contours +; base_cnMaxLevel: max level for base contours +; +; Modification history +; 20181024-A_lorenz_ruth: ported to v2, regridding in preprocessor +; 20170517-A_lorenz_ruth: changed to regrid to common grid first before +; calculating zonal average and added ability to use +; irregular grids (e.g. thetao) +; 20170127-A_lorenz_ruth: changed routine for plotting stippling/hatching to +; zonalmean_profile to get dots and hatch +; 20161031-A_lorenz_ruth: increased flexibility and added +; stippling and hatching +; 20160831-A_lorenz_ruth: adapted for ESMValTool +; 20130503-A_sedlacek_jan: written for IPCC AR5 +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/ensemble.ncl" +load "$diag_scripts/shared/scaling.ncl" + +load "$diag_scripts/shared/plot/style.ncl" + +begin + ; ############################################################## + ; # Fetch general parameters, set in namelist_collins13ipcc.xml# + ; # passed via environment variables by python code # + ; ############################################################## + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Save some dataset attributes as arrays for index-based selection below + dataset_names = metadata_att_as_array(info_items, "dataset") + dataset_exps = metadata_att_as_array(info_items, "exp") + dataset_ensembles = metadata_att_as_array(info_items, "ensemble") + dataset_startyears = metadata_att_as_array(info_items, "start_year") + + ; Check required diag_script_info attributes + req_atts = (/"scenarios", "periods", "time_avg", "label"/) + exit_if_missing_atts(diag_script_info, req_atts) +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + input_dir = diag_script_info@input_files(1) +end + +begin + ; ############################################# + ; # Get parameters from ./variable_defs/*.ncl # + ; # passed via the 'info' attribute # + ; ############################################# + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_info(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data and average time # + ; ########################################### + ; get data from first model + imod = 0 ; NCL array indicies start from zero + log_info("processing " + info_items[imod]@dataset + "_"\ + + info_items[imod]@exp + "_"\ + + info_items[imod]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[imod]) + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat" .or. dims .eq. "rlat" .or. dims .eq. "j") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + lat_name = dims(idx) + + idx = ind(dims .eq. "lev" .or. dims .eq. "plev") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no level dimension") + end if + lev_name = dims(idx) + + if (isatt(diag_script_info, "seasons")) then + dim_seas = dimsizes(diag_script_info@seasons) + else + dim_seas = 1 + end if + + ; check if diag_script_info@label has correct size, otherwise error message + dim_periods = dimsizes(diag_script_info@periods) + dim_scenarios = dimsizes(diag_script_info@scenarios) + dim_label = dim_seas * dim_scenarios * dim_periods + if (dim_label .ne. dimsizes(diag_script_info@label)) then + error_msg("f", DIAG_SCRIPT, "", "wrong number of labels, needs to be " + \ + "seasons * scenarios * periods") + end if + ; Average over time (function in ./diag_scripts/lib/ncl/statistics.ncl) + data1 = time_operations(A0, -1, -1, "average", diag_script_info@time_avg, \ + True) + + annots = project_style(info_items, diag_script_info, "annots") + Fill = default_fillvalue(typeof(data1)) + + var_reg = new((/dim_MOD, dim_seas, dimsizes(A0&$lev_name$), \ + dimsizes(A0&lat)/), typeof(data1), Fill) + + var_reg!0 = "models" + var_reg!1 = "season" + var_reg!3 = "lat" + var_reg&models = annots + if (isatt(diag_script_info, "seasons")) then + var_reg&season = (/ispan(0, dim_seas - 1, 1)/) + else + var_reg&season = (/0/) + end if + var_reg!2 = lev_name + var_reg&$lev_name$ = A0&$lev_name$ + var_reg&lat = A0&lat + if (isatt(diag_script_info, "iavmode")) then + if (diag_script_info@iavmode .eq. "each") then + iav = new((/dim_MOD, dim_seas, dimsizes(A0&$lev_name$), \ + dimsizes(A0&lat)/), typeof(A0), Fill) + copy_VarMeta(var_reg, iav) + end if + end if + do imod = 0, dim_MOD - 1 + if (imod .ne. 0) then + A0 = read_data(info_items[imod]) + data1 = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + dims = getvardims(A0) + idx = ind(dims .eq. "lat" .or. dims .eq. "rlat" .or. dims .eq. "j") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + lat_name = dims(idx) + idx = ind(dims .eq. "lev" .or. dims .eq. "plev") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no level dimension") + end if + lev_name = dims(idx) + end if + if ((dim_seas .eq. 1) .and. \ + (diag_script_info@time_avg .eq. "annualclim")) then + var_reg(imod, 0, :, :) = (/data1/) + elseif ((dim_seas .eq. 1) .and. \ + (diag_script_info@time_avg .eq. "seasonalclim")) then + var_reg(imod, 0, :, :) = (/data1(diag_script_info@seasons, :, :)/) + else + var_reg(imod, :, :, :) = (/data1(diag_script_info@seasons, :, :)/) + end if + if (isatt(diag_script_info, "iavmode")) then + if (diag_script_info@iavmode .eq. "each") then + ; ######################################################### + ; # Read natural variability for stippling for each model # + ; ######################################################### + iav_file = addfile(input_dir + "/" + \ + "IAV_piControl_" + \ + info_items[imod]@dataset + "_" + \ + info_items[imod]@ensemble + "_" \ + + "_" + var0 + "_" + \ + diag_script_info@time_avg + ".nc", "r") + iav_native = iav_file->iav + if ((dim_seas .eq. 1) .and. \ + (diag_script_info@time_avg .eq. "annualclim")) then + ; Average over longitudes for zonal mean + iav_zon = dim_avg_n_Wrap(iav_native, 2) + iav(imod, 0, :, :) = iav_zon + elseif ((dim_seas .eq. 1) .and. \ + (diag_script_info@time_avg .eq. "seasonalclim")) then + ; Average over longitudes for zonal mean + iav_zon = dim_avg_n_Wrap(iav_native, 3) + iav(imod, 0, :, :) = iav_zon(diag_script_info@seasons, :, :) + else + ; Average over longitudes for zonal mean + iav_zon = dim_avg_n_Wrap(iav_native, 3) + iav(imod, :, :, :) = iav_zon(diag_script_info@seasons, :, :) + end if + end if + end if + delete(A0) + delete(data1) + end do + if (isatt(diag_script_info, "iavmode")) then + if (diag_script_info@iavmode .eq. "each") then + ; calculate multi-model mean of iav + tmp_pow = dim_avg_n(iav ^ 2, 0) + natvar_mmm = rm_single_dims(tmp_pow * sqrt(2.)) + elseif (diag_script_info@iavmode .eq. "mmm") then + log_info("Read precalculated natural variability for multi-model") + ; read already calculated mmm iav + natvar_file = addfile(input_dir + "/" + \ + "IAV_mmm_piControl_" + \ + var0 + "_" + diag_script_info@time_avg + \ + ".nc", "r") + natvar_mmm = natvar_file->iav + if ((dim_seas .ne. 4) .and. \ + (diag_script_info@time_avg .ne. "annualclim")) then + tmp = natvar_mmm + delete(natvar_mmm) + natvar_mmm = rm_single_dims(tmp(diag_script_info@seasons, :, :)) + elseif (diag_script_info@time_avg .eq. "annualclim") then + tmp = natvar_mmm + delete(natvar_mmm) + natvar_mmm = rm_single_dims(tmp) + end if + else + error_msg("f", DIAG_SCRIPT, "", "Error: This iavmode is not know") + end if + else + log_info("Read precalculated natural variability for multi-model") + ; read already calculated mmm iav + natvar_file = addfile(input_dir + "/" + \ + "IAV_mmm_piControl_" + \ + var0 + "_" + diag_script_info@time_avg + \ + ".nc", "r") + natvar_mmm = natvar_file->iav + if ((dim_seas .ne. 4) .and. \ + (diag_script_info@time_avg .ne. "annualclim")) then + tmp = natvar_mmm + delete(natvar_mmm) + natvar_mmm = rm_single_dims(tmp(diag_script_info@seasons, :, :)) + elseif (diag_script_info@time_avg .eq. "annualclim") then + tmp = natvar_mmm + delete(natvar_mmm) + natvar_mmm = rm_single_dims(tmp) + end if + end if + ; ############################################## + ; # Calculate change from hist to periods # + ; ############################################## + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + ; loop over rcps and periods + dim_scen = dimsizes(diag_script_info@scenarios) + dim_per = dimsizes(diag_script_info@periods) + + to_plot = new((/dim_scen * dim_per * dim_seas, \ + dimsizes(var_reg&$lev_name$), dimsizes(var_reg&lat)/), \ + typeof(var_reg), Fill) + if (isatt(diag_script_info, "base_cn")) then + to_plot_base = new((/dim_scen * dim_per * dim_seas, \ + dimsizes(var_reg&$lev_name$), \ + dimsizes(var_reg&lat)/), typeof(var_reg), Fill) + end if + to_plot_signif = new((/dim_scen * dim_per * dim_seas, \ + dimsizes(var_reg&$lev_name$), \ + dimsizes(var_reg&lat)/), \ + typeof(var_reg), Fill) + to_plot_not_signif = new((/dim_scen * dim_per * dim_seas, \ + dimsizes(var_reg&$lev_name$), \ + dimsizes(var_reg&lat)/), \ + typeof(var_reg), Fill) + model_number = new((/dim_scen * dim_per * dim_seas/), integer) + i = 0 + do seas = 0, dim_seas - 1 ; loop over seasons + do rcp = 0, dim_scen - 1 ; loop over rcps + do per = 0, dim_per - 1 ; loop over periods + idx_rcp = ind(dataset_exps .eq. diag_script_info@scenarios(rcp) .and. \ + dataset_startyears .eq. diag_script_info@periods(per)) + proj = var_reg(idx_rcp, seas, :, :) + proj_avg = dim_avg_n_Wrap(proj, 0) + ; Find historical runs from same models as in rcp + do jj = 0, dimsizes(idx_rcp) - 1 + tmp_idx = ind(dataset_names .eq. dataset_names(idx_rcp(jj)) .and. \ + dataset_exps .eq. reference_run_name .and. \ + dataset_ensembles .eq. dataset_ensembles(idx_rcp(jj))) + if (isdefined("idx_hist")) then + idx_hist := array_append_record(idx_hist, tmp_idx, 0) + else + idx_hist = tmp_idx + end if + delete(tmp_idx) + end do + base = var_reg(idx_hist, seas, :, :) + base_avg = dim_avg_n_Wrap(base, 0) + var_diff = proj - base + delete([/base, proj/]) + to_plot(i, :, :) = proj_avg - base_avg + if (isatt(diag_script_info, "base_cn")) then + to_plot_base(i, :, :) = base_avg + end if + + ; Determine sigma and sign for significance + log_debug("determine sigma and sign for significance") + var_pos_signif = dim_num_n(where(var_diff .gt. 0., 1., \ + to_plot@_FillValue) .eq. 1., 0) + var_neg_signif = dim_num_n(where(var_diff .lt. 0., 1., \ + to_plot@_FillValue) .eq. 1., 0) + var_pos_signif_tmp = \ + var_pos_signif / where((var_pos_signif + var_neg_signif) \ + .ne. 0., var_pos_signif + var_neg_signif, \ + to_plot@_FillValue) + var_neg_signif_tmp = \ + var_neg_signif / where((var_pos_signif + var_neg_signif) \ + .ne. 0., var_pos_signif + var_neg_signif, \ + to_plot@_FillValue) + signif_and_sign = where(var_pos_signif_tmp .ge. 0.9 .or. \ + var_neg_signif_tmp .ge. 0.9, 1., 0.) + signif_and_sign = where(ismissing(signif_and_sign), 0., \ + signif_and_sign) + if (dim_seas .ne. 1) then + sigma_and_sign = where(abs(to_plot(i, :, :)) .gt. \ + abs(2. * natvar_mmm(seas, :, :)), 1., 0.) + to_plot_not_signif(i, :, :) = where(abs(to_plot(i, :, :)) .lt. \ + abs(natvar_mmm(seas, :, :)), \ + 1., 0.) + else + sigma_and_sign = where(abs(to_plot(i, :, :)) .gt. \ + abs(2. * natvar_mmm), 1., 0.) + to_plot_not_signif(i, :, :) = where(abs(to_plot(i, :, :)) .lt. \ + abs(natvar_mmm), \ + 1., 0.) + end if + to_plot_signif(i, :, :) = where(signif_and_sign .eq. 1, \ + sigma_and_sign, 0.) + model_number(i) = dimsizes(idx_rcp) + i = i + 1 + delete([/idx_rcp, idx_hist, var_diff/]) + end do ; per + end do ; rcp + end do ; seas + to_plot!0 = "panel" + to_plot&panel = diag_script_info@label + + to_plot!1 = lev_name + to_plot&$lev_name$ = var_reg&$lev_name$ + if (isatt(diag_script_info, "base_cn")) then + to_plot_base!1 = lev_name + to_plot_base&$lev_name$ = var_reg&$lev_name$ + end if + + to_plot!2 = "lat" + to_plot&lat = var_reg&lat + if (isatt(diag_script_info, "base_cn")) then + copy_VarMeta(to_plot, to_plot_base) + end if + copy_VarMeta(to_plot, to_plot_signif) + copy_VarMeta(to_plot, to_plot_not_signif) + + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + ; Add to to_plot, as attributes without prefix + if(isatt(to_plot, "diag_script")) then ; Add to existing entries + temp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if(isatt(variable_info[0], "long_name")) then + to_plot@var_long_name = "change in " + variable_info[0]@long_name + end if + if (isatt(diag_script_info, "plot_units")) then + to_plot = convert_units(to_plot, diag_script_info@plot_units) + end if + + if (isatt(diag_script_info, "base_cn")) then + if(isatt(to_plot_base, "diag_script")) then ; Add to existing entries + temp = to_plot_base@diag_script + delete(to_plot_base@diag_script) + to_plot_base@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else + to_plot_base@diag_script = (/DIAG_SCRIPT/) + end if + to_plot_base@var = var0 + "_base" + to_plot_base@var_long_name = variable_info[0]@long_name + to_plot_base@units = variable_info[0]@units + end if + if(isatt(to_plot_signif, "diag_script")) then ; Add to existing entries + temp = to_plot_signif@diag_script + delete(to_plot_signif@diag_script) + to_plot_signif@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot_signif@diag_script = (/DIAG_SCRIPT/) + end if + to_plot_signif@var = "signif" + to_plot_signif@var_long_name = "significance: mean change larger than" +\ + " 2*natvar and 90% of models have the same"\ + + " sign" + to_plot_signif@units = 1 + + if(isatt(to_plot_not_signif, "diag_script")) then ; Add to existing entries + temp = to_plot_not_signif@diag_script + delete(to_plot_not_signif@diag_script) + to_plot_not_signif@diag_script = array_append_record(temp, \ + (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot_not_signif@diag_script = (/DIAG_SCRIPT/) + end if + to_plot_not_signif@var = "not_signif" + to_plot_not_signif@var_long_name = "not significant: mean change smaller"\ + + " than 1*natvar" + to_plot_not_signif@units = 1 + + model_number!0 = "panel" + model_number&panel = diag_script_info@label + if(isatt(model_number, "diag_script")) then ; Add to existing entries + temp = model_number@diag_script + delete(model_number@diag_script) + model_number@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + model_number@diag_script = (/DIAG_SCRIPT/) + end if + model_number@var = "model_nr" + model_number@var_long_name = "number of models in mmm" + model_number@units = 1 + + ; ########################################### + ; # Output to netCDF # + ; ########################################### + ; Output (controlled by diag_script_info) + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + ncdf_file@existing = "append" + ncdf_outfile = ncdf_write(to_plot_signif, ncdf_file) + ncdf_outfile = ncdf_write(to_plot_not_signif, ncdf_file) + ncdf_outfile = ncdf_write(model_number, ncdf_file) + if ((isatt(diag_script_info, "base_cn")) .and. \ + (diag_script_info@base_cn .eq. True)) then + ncdf_outfile = ncdf_write(to_plot_base, ncdf_file) + end if + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch12_map_diff_each_model_fig12-9.ncl b/esmvaltool/diag_scripts/ipcc_ar5/ch12_map_diff_each_model_fig12-9.ncl new file mode 100644 index 0000000000..65f2dbf50c --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch12_map_diff_each_model_fig12-9.ncl @@ -0,0 +1,301 @@ +; ############################################################################# +; Mean variable change in one RCP for individual models +; Author: Ruth Lorenz (ETH, Switzerland) +; CRESCENDO project +; ############################################################################# +; Caveats +; All models are on different grids, so not possible to fill in all data +; into one array, therefore for each model read hist and rcp after each +; other, draw all panels and panel together at the end. Only include models +; in namelist which exist for both, historical and rcp. +; +; Modification history +; 20181806-A_lorenz_ruth: ported to version2 +; 20171002-A_lorenz_ruth: Added tags for searching +; 20160428-A_lorenz_ruth: adapted to ESMValTool +; 20130501-A_sedlacek_jan: written for IPCC AR5. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/ensemble.ncl" +load "$diag_scripts/shared/scaling.ncl" + +load "$diag_scripts/shared/plot/contour_maps.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + ; ############################################################## + ; # Fetch general parameters, set in namelist_collins13ipcc.yml# + ; # passed via environment variables by python code # + ; ############################################################## + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"experiment", "time_avg"/)) +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + ; ############################################# + ; # Get parameters from ./variable_defs/*.ncl # + ; # passed via the 'info' attribute # + ; ############################################# + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data and average time # + ; ########################################### + ; Set dataset indexes + modidx = ispan(0, dim_MOD / 2 - 1, 1) + ; get data from first dataset + imod = modidx(0) ; NCL array indicies start from zero + log_debug("processing " + info_items[imod]@dataset + "_" \ + + info_items[imod]@exp + "_" \ + + info_items[imod]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[imod]) + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + + ; Average over time (function in ./diag_scripts/lib/ncl/statistics.ncl) + data1 = \ + time_operations(A0, -1, -1, "average", diag_script_info@time_avg, True) + + climofiles = new(dim_MOD, string) + + jj = 0 + do ii = 0, dimsizes(modidx) - 1 + imod = modidx(ii) + if (imod .eq. 0) then + data2 = data1 + else + A0 = read_data(info_items[imod]) + data2 = \ + time_operations(A0, -1, -1, "average", diag_script_info@time_avg, True) + end if + climofiles = info_items[imod]@filename + + atts = True + atts@dataset = info_items[imod]@dataset + atts@exp = diag_script_info@experiment + atts@ensemble = info_items[imod]@ensemble + item_rcp = select_metadata_by_atts(info_items, atts) + A1 = read_data(item_rcp[0]) + delete(atts) + delete(item_rcp) + data3 = \ + time_operations(A1, -1, -1, "average", diag_script_info@time_avg, True) + + ; ############################################## + ; # Calculate change from period 1 to period 2 # + ; ############################################## + to_plot = data3 - data2 + copy_VarMeta(data2, to_plot) + + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + ; Add to to_plot, as attributes without prefix + if (isatt(to_plot, "diag_script")) then ; Add to existing entries + temp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if (isatt(variable_info[0], "long_name")) then + to_plot@var_long_name = variable_info[0]@long_name + end if + if(isatt(variable_info[0], "units")) then + to_plot@units = variable_info[0]@units + ; difference in K is the same as difference in degC, only change label + if (to_plot@units.eq."K") then + to_plot@units = "degC" + end if + if (isatt(diag_script_info, "plot_units")) then + to_plot = convert_units(to_plot, diag_script_info@plot_units) + end if + plot_units = to_plot@units + end if + + ; ########################################### + ; # Output to netCDF # + ; ########################################### + to_plot@ncdf = work_dir + "/" + info_items[imod]@dataset + "_" + \ + info_items[imod]@ensemble + "_" + variable_info[0]@diagnostic + ".nc" + to_plot@ncdf_existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, to_plot@ncdf) + + if (.not.isatt(diag_script_info, "styleset")) then + diag_script_info@styleset = "DEFAULT" + end if + + ; Select plotting attributes from the project style files + ; See ./diag_scripts/shared/plot/style.ncl + annots = project_style(input_file_info, diag_script_info, "annots") + + ; ###################################################### + ; # Separate output to a graphics file, Panelling # + ; ###################################################### + to_plot@res_gsnDraw = False ; Do not draw yet + to_plot@res_gsnFrame = False ; Don't advance frame. + to_plot@res_cnFillOn = True ; Color plot desired + to_plot@res_cnLineLabelsOn = False ; Contour lines + to_plot@res_cnLinesOn = False + if (isatt(diag_script_info, "colormap")) then ; Set colormap + col = read_colormap_file(diag_script_info@colormap) + else + col = read_colormap_file("./diag_scripts/shared/plot/rgb/" + \ + "ipcc_temperature_with_grey.rgb") + end if + to_plot@res_cnFillColors = col + if (isatt(diag_script_info, "span")) then + to_plot@res_cnSpanFillPalette = diag_script_info@span + else + to_plot@res_cnSpanFillPalette = False ; use full colormap + end if + to_plot@res_lbLabelBarOn = False + ; function in aux_plotting.ncl + if (imod.eq.0) then + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, variable_info[0]@diagnostic) + nmap = dim_MOD / 2 + + if (isatt(diag_script_info, "max_vert")) then + ; Maximum allowed number of plots per page (vertical) + nvert = toint(diag_script_info@max_vert) + else + nvert = 10 + end if + if (isatt(diag_script_info, "max_hori")) then + ; Maximum allowed number of plots per page (horizontal) + nhori = toint(diag_script_info@max_hori) + else + nhori = 5 + end if + maps = new(nmap, graphic) ; collect individual maps in a graphic array + end if + annots_split = str_split(annots(imod), "_") + to_plot@res_tiMainString = annots_split(0) + " " + annots_split(1) + to_plot@res_gsnRightString = "" + to_plot@res_tiMainFontHeightF = 0.035 + + if(isatt(diag_script_info, "projection")) then + to_plot@res_mpProjection = diag_script_info@projection + else + to_plot@res_mpProjection = "Robinson" + end if + ; Set explicit contour levels + if(isatt(diag_script_info, "diff_levs")) then + to_plot@res_cnLevelSelectionMode = "ExplicitLevels" + to_plot@res_cnLevels = diag_script_info@diff_levs + end if + + to_plot@res_mpPerimOn = False + maps(jj) = contour_map(wks, to_plot(:, :), var0) + jj = jj + 1 + + ; clean up + delete(A0) + delete(A1) + delete(data2) + delete(data3) + delete(to_plot) + + end do + + pres = True ; needed to override panelling defaults + pres@gsnPanelLabelBar = True ; no general label bar desired here + + a4_height = 29.7 ; in centimeters + a4_width = 23.0 ; reference is correct + cm_per_inch = 2.54 + + pres@gsnPaperWidth = a4_width / cm_per_inch + pres@gsnPaperHeight = a4_height / cm_per_inch + pres@gsnPaperOrientation = "portrait" + pres@gsnPanelTop = 0.96 + + if (isatt(diag_script_info, "units")) then + pres@lbTitleString = diag_script_info@units + else + unit_string = format_units(plot_units) + pres@lbTitleString = "(" + unit_string + ")" + end if + pres@lbTitleFontHeightF = 0.017 + pres@lbTitleDirection = "across" + pres@lbTitlePosition = "Right" + pres@lbTitleJust = "CenterLeft" + pres@lbLabelFontHeightF = 0.014 + pres@lbLabelJust = "CenterCenter" + pres@lbLabelAutoStride = False + + pres@txFontHeightF = 0.015 + if (isatt(diag_script_info, "title")) then + pres@txString = diag_script_info@title + end if + pres@pmLabelBarParallelPosF = 0.06 + + outfile = panelling(wks, maps, nvert, nhori, pres) + + log_info(" Wrote " + outfile) + + ; collect meta-data + nc_file = ncdf_outfile + plot_file = outfile + caption = "Change in " + var0 + " in " + info_items[dim_MOD-1]@start_year + \ + "-" + info_items[dim_MOD - 1]@end_year + \ + " displayed as anomalies with respect to " + info_items[0]@start_year + \ + "-" + info_items[0]@end_year + " for " + \ + diag_script_info@experiment + "." + statistics = ("mean") + domains = ("global") + plot_types = ("geo") + authors = (/"lorenz_ruth"/) + references = (/"collins13ipcc"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, plot_file, caption, statistics, domains, \ + plot_types, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl b/esmvaltool/diag_scripts/ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl new file mode 100644 index 0000000000..77f6441f59 --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl @@ -0,0 +1,424 @@ +; ############################################################################# +; Multi-model mean change map with significance +; Author: Ruth Lorenz (ETH, Switzerland) +; CRESCENDO project +; ############################################################################# +; +; Caveats +; Loop to plot is variables, seasons, scenarios, periods +; Can either plot seasonal means or annual means in one panel, but not both +; +; Modification history +; 20181307-A_lorenz_ruth: Ported to version 2 +; 20171002-A_lorenz_ruth: Added tags for searching +; 20170530-A_lorenz_ruth: multiple variables possible +; 20170120-A_lorenz_ruth: adapted towritten for ESMValTool +; based on IPCC AR5 scripts. +; +; ############################################################################# + +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/contour_maps.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + ; ############################################################## + ; # Fetch general parameters, set in namelist_collins13ipcc.xml# + ; # passed via environment variables by python code # + ; ############################################################## + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; check if diagnostic is change scaled by global temperature + ; in this case even if two variables present only one to be plotted + tf = isStrSubset(variable_info[0]@diagnostic, "scaleT") + if (tf) then + dim_VAR = 1 + end if + + ; Check required diag_script_info attributes + req_atts = (/"scenarios", "periods"/) + exit_if_missing_atts(diag_script_info, req_atts) +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + netcdf_dir = diag_script_info@input_files(1) + + dim_scen = dimsizes(diag_script_info@scenarios) + dim_per = dimsizes(diag_script_info@periods) + if (isatt(diag_script_info, "seasons")) then + dim_seas = dimsizes(diag_script_info@seasons) + else + dim_seas = 1 + end if +end + +begin + ; ############################################# + ; # Get parameters from ./variable_defs/*.ncl # + ; # passed via the 'info' attribute # + ; ############################################# + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_info(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data # + ; ########################################### + if (dim_VAR .eq. 1) then + datapath = netcdf_dir + "/" + variable_info[0]@diagnostic + ".nc" + else + datapath = netcdf_dir + "/" + var0 + "_" + variable_info[0]@diagnostic + \ + ".nc" + end if + tmp_plot = ncdf_read(datapath, var0) + + ; Check dimensions + dims = getvardims(tmp_plot) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + nlat = dimsizes(tmp_plot&$dims(idx)$) + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + nlon = dimsizes(tmp_plot&$dims(idx)$) + + ; predefine data arrays + to_plot = new((/dim_VAR * dim_scen * dim_per * dim_seas, nlat, nlon/), \ + float) + if (isatt(diag_script_info, "sig") .and. \ + diag_script_info@sig .eq. True) then + to_plot_signif = new((/dim_VAR * dim_scen * dim_per * dim_seas, nlat, \ + nlon/), float) + end if + if (isatt(diag_script_info, "not_sig") .and. \ + diag_script_info@not_sig .eq. True) then + to_plot_not_signif = new((/dim_VAR * dim_scen * dim_per * dim_seas, \ + nlat, nlon/), float) + end if + if (isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + model_number = new((/dim_VAR * dim_scen * dim_per * dim_seas/), float) + end if + if (tf) then + pan_var = dim_per * dim_seas + else + pan_var = dim_scen * dim_per * dim_seas + end if + + do v = 0, dim_VAR - 1 + if (v .eq. 0) then + ind1 = 0 + ind2 = pan_var - 1 + else + datapath = netcdf_dir + "/" + variable_info[v]@short_name + "_" + \ + variable_info[v]@diagnostic + ".nc" + tmp_plot = ncdf_read(datapath, variable_info[v]@short_name) + ind1 = v * pan_var + ind2 = ind1 + pan_var - 1 + end if + to_plot(ind1 : ind2, :, :) = tmp_plot + + if (isatt(diag_script_info, "sig") .and. \ + diag_script_info@sig .eq. True) then + ; read already calculated significance + to_plot_signif(ind1 : ind2, :, :) = ncdf_read(datapath, "signif") + end if + if (isatt(diag_script_info, "not_sig") .and. \ + diag_script_info@not_sig .eq. True) then + ; read already calculated significance + to_plot_not_signif(ind1 : ind2, :, :) = ncdf_read(datapath, "not_signif") + end if + if (isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + ; read already calculated number of models included in plot + model_number(ind1 : ind2) = ncdf_read(datapath, "model_nr") + end if + + end do ; loop over variables + ; ###################################################### + ; # Separate output to a graphics file, Panelling # + ; ###################################################### + to_plot@res_gsnDraw = False ; Do not draw yet + to_plot@res_gsnFrame = False ; Don't advance frame. + to_plot@res_cnFillOn = True ; Color plot desired + to_plot@res_cnLineLabelsOn = False ; Contour lines + to_plot@res_cnLinesOn = False + if (isatt(diag_script_info, "colormap")) then + col = read_colormap_file(diag_script_info@colormap) + else + col = "BlueYellowRed" + end if + to_plot@res_cnFillPalette = col + if (isatt(diag_script_info, "span")) then + to_plot@res_cnSpanFillPalette = diag_script_info@span + else + to_plot@res_cnSpanFillPalette = True ; use full colormap + end if + to_plot@res_lbLabelBarOn = False + + if (isatt(diag_script_info, "sig") .and. \ + diag_script_info@sig .eq. True) then + ; resources dots + ress = True + ress@gsnDraw = False + ress@gsnFrame = False + ress@gsnAddCyclic = True + ress@cnLinesOn = False + ress@cnLineLabelsOn = False + ress@cnLevelSelectionMode = "ExplicitLevels" + ress@cnLevels = (/.3, 1.3/) + ress@cnInfoLabelOn = False + ress@tiMainOn = False + ress@cnFillOn = True + ress@cnFillMode = "AreaFill" + ress@cnMonoFillPattern = False + ress@cnFillPatterns = (/-1, 17, 17/) + ress@cnMonoFillColor = True + ress@lbLabelBarOn = False + ress@gsnLeftString = "" + ress@gsnRightString = "" + ress@cnFillDotSizeF = 0.001 + ress@cnFillScaleF = .6 ; add extra density + end if + if (isatt(diag_script_info, "not_sig") .and. \ + diag_script_info@not_sig .eq. True) then + ; resources hatch + resb = True + resb@gsnDraw = False + resb@gsnFrame = False + resb@gsnAddCyclic = True + resb@cnLinesOn = False + resb@cnLineLabelsOn = False + resb@cnLevelSelectionMode = "ExplicitLevels" + resb@cnLevels = (/.3, 1.3/) + resb@cnInfoLabelOn = False + resb@tiMainOn = False + resb@cnFillOn = True + resb@cnFillMode = "AreaFill" + resb@cnRasterSmoothingOn = False + resb@cnMonoFillPattern = False + resb@cnFillPatterns = (/-1, 10, 10/) + resb@cnMonoFillColor = True + resb@lbLabelBarOn = False + resb@gsnLeftString = "" + resb@gsnRightString = "" + resb@cnFillScaleF = 0.25 + end if + if (isatt(diag_script_info, "seasons")) then + dim_seas = dimsizes(diag_script_info@seasons) + else + dim_seas = 1 + end if + if ((isatt(diag_script_info, "label")) .and. \ + (dimsizes(diag_script_info@label) .ne. \ + (dim_scen * dim_per * dim_seas))) then + dim_scen = 1 + end if + + nmap = dim_VAR * dim_scen * dim_per * dim_seas + + ; function in aux_plotting.ncl + if (isatt(diag_script_info, "pltname")) then + wks = get_wks(0, variable_info[0]@diagnostic, \ + diag_script_info@pltname + "_" + \ + diag_script_info@time_avg) + else + wks = get_wks(0, variable_info[0]@diagnostic, \ + var0 + "_" + diag_script_info@time_avg) + end if + if (isatt(diag_script_info, "max_vert")) then + ; Maximum allowed number of plots per page (vertical) + nvert = toint(diag_script_info@max_vert) + else + nvert = dim_scen + end if + if (isatt(diag_script_info, "max_hori")) then + ; Maximum allowed number of plots per page (horizontal) + nhori = toint(diag_script_info@max_hori) + else + nhori = dim_per + end if + if ((tf) .and. (nhori * nvert .lt. nmap)) then + nhori = nmap + end if + maps = new(nmap, graphic) ; collect individual maps in a graphic array + do i = 0, nmap - 1 ; this stupid loop creates nmap times the same plot + if(isatt(diag_script_info, "label")) then + to_plot@res_tiMainString = diag_script_info@label(i) + end if + to_plot@res_gsnRightString = "" + to_plot@res_gsnLeftString = "" + if(isatt(diag_script_info, "projection")) then + to_plot@res_mpProjection = diag_script_info@projection + else + to_plot@res_mpProjection = "Robinson" + end if + ; Set explicit contour levels + if (isatt(diag_script_info, "diff_levs")) then + to_plot@res_cnLevelSelectionMode = "ExplicitLevels" + to_plot@res_cnLevels = diag_script_info@diff_levs + end if + + to_plot@res_mpPerimOn = False + + maps(i) = contour_map(wks, to_plot(i, :, :), var0) + + if (isatt(diag_script_info, "sig") .and. \ + diag_script_info@sig .eq. True) then + ; plot dots + if all(to_plot_signif(i, :, :) .eq. 1) then + to_plot_signif(i, 0, 0) = 0 + end if + if (max(to_plot_signif(i, :, :)) .gt. \ + min(to_plot_signif(i, :, :))) then + plot1 = gsn_csm_contour(wks, to_plot_signif(i, :, :), ress) + overlay(maps(i), plot1) + end if + end if + if (isatt(diag_script_info, "not_sig") .and. \ + diag_script_info@not_sig .eq. True) then + ; plot hatch + if all(to_plot_not_signif(i, :, :) .eq. 1) then + to_plot_not_signif(i, 0, 0) = 0 + end if + if (max(to_plot_not_signif(i, :, :)) .gt. \ + min(to_plot_not_signif(i, :, :))) then + plot2 = gsn_csm_contour(wks, to_plot_not_signif(i, :, :), resb) + overlay(maps(i), plot2) + end if + end if + end do + + txres = True + txres@txFontHeightF = 0.02 + amres = True + amres@amParallelPosF = 0.48 ; This is the right edge of the plot. + amres@amOrthogonalPosF = -0.48 ; This is the bottom edge of the plot. + amres@amJust = "TopRight" + if (isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + do i = 0, dimsizes(maps) - 1 + txid1 = gsn_create_text(wks, "" + model_number(i), txres) + annoid1 = gsn_add_annotation(maps(i), txid1, amres) + end do + end if + pres = True ; needed to override panelling defaults + pres@gsnPanelLabelBar = True ; no general label bar desired here + + a4_height = 29.7 ; in centimeters + a4_width = 23.0 ; reference is correct + cm_per_inch = 2.54 + + pres@gsnPaperWidth = a4_width / cm_per_inch + pres@gsnPaperHeight = a4_height / cm_per_inch + pres@gsnPaperOrientation = "portrait" + pres@gsnPanelTop = 0.96 + if (isatt(diag_script_info, "units")) then + pres@lbTitleString = diag_script_info@units + else + unit_string = format_units(to_plot@units) + pres@lbTitleString = "(" + unit_string + ")" + end if + pres@lbTitleFontHeightF = 0.017 + pres@lbTitleDirection = "across" + pres@lbTitlePosition = "Right" + pres@lbTitleJust = "CenterLeft" + pres@lbLabelFontHeightF = 0.014 + pres@lbLabelJust = "CenterCenter" + pres@lbLabelAutoStride = True + + pres@txFontHeightF = 0.02 + if (isatt(diag_script_info, "title")) then + pres@txString = diag_script_info@title + end if + pres@pmLabelBarParallelPosF = 0.06 + pres@pmLabelBarWidthF = .6 + + outfile = panelling(wks, maps, nvert, nhori, pres) + + log_info("Wrote " + outfile) + + ; collect meta-data + nc_file = datapath + plot_file = outfile + if ((isatt(diag_script_info, "diagscript") .and. \ + diag_script_info@diagscript .eq. \ + "ch12_calc_map_diff_scaleT_mmm_stipp")) then + string1 = variable_info[0]@long_name + \ + " change patterns derived from transient " + \ + "simulations from the CMIP ensemble, scaled to 1C of " + \ + "global mean surface temperature change. The patterns " + \ + " have been calculated by computing 20-year averages " + \ + "starting in " + str_join(diag_script_info@periods, ", ") + \ + " and over the period " + info_items[0]@start_year + "-" + \ + info_items[0]@end_year + " for the available simulations " + \ + "under all RCPs, taking their difference " + if (var0 .eq. "pr") then + string2 = "(percentage difference in the case of precipitation) " + else + string2 = "" + end if + string3 = "and normalizing it, grid-point by grid-point, by the " + \ + "corresponding value of global average temperature change for " + \ + "each model and scenario. The normalized patterns have then " + \ + "been averaged across models and scenarios. Stippling " + \ + "indicates where the mean change averaged over all ralizations" + \ + " is larger than the 95% percentile of the distribution of " + \ + "models. Similar to Figure 12.10 of Collins et al. (2013)." + caption = string1 + string2 + string3 + else + caption = "Multi-model ensemble average of " + \ + variable_info[0]@long_name + \ + " change (compared to " + info_items[0]@start_year + "-" + \ + info_items[0]@end_year + " base period) for 20 year periods " + \ + "starting in " + str_join(diag_script_info@periods, ", ") + \ + " for " + str_join(diag_script_info@scenarios, ", ") + \ + ". Hatching indicates " + \ + "regions where the multi-model mean change is less than one " + \ + "standard deviation of internal variability. Stippling " + \ + "indicates regions where the multi-model change is greater " + \ + "than two standard deviations of internal variability and " + \ + "where at least 90% of the models agree on the sign of " + \ + "change. The number of models used is indicated in the" + \ + " upper right corner of each plot. Similar to Figure " + \ + "12.11 of Collins et al. (2013)." + end if + statistics = ("mean") + domains = ("global") + plot_types = ("geo") + authors = (/"lorenz_ruth"/) + references = (/"collins13ipcc"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, plot_file, caption, statistics, domains, \ + plot_types, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl b/esmvaltool/diag_scripts/ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl new file mode 100644 index 0000000000..20a5e277ea --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl @@ -0,0 +1,278 @@ +; ############################################################################# +; Plot global (or regional) timeseries for historical and RCPs 1850-2?00 +; Author: Ruth Lorenz (ETH, Switzerland) +; PROJECT-NAME CRESCENDO +; ############################################################################# +; +; Description +; Plot global or regional timeseries in historical and RCPs +; 1850-2?00 for all available models precalculated by +; ts_line_collins_ipcc_ch12.ncl +; +; Caveats +; Variable number of scenarios? So far hardcoded 4 scenarios (rcp2.6, +; rcp4.5, rcp6.0, rcp8.5) +; +; Modification history +; 20171002-A_lorenz_ruth: Added tags for searching +; 20170523-A_lorenz_ruth: separated plotting from calculations +; 20160625-A_lorenz_ruth: adapted to ESMValTool in ts_line_collins_ipcc_ch12 +; 20130506-A_sedlacek_jan: written for IPCC AR5. +; +; ############################################################################# + +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +begin + ; ############################################################## + ; # Fetch general parameters, set in namelist_collins13ipcc.xml# + ; # passed via environment variables by python code # + ; ############################################################## + enter_msg(DIAG_SCRIPT, "") + + ; 'models', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check required diag_script_info attributes + req_atts = (/"syears", "eyears"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + netcdf_dir = diag_script_info@input_files(1) + + ; Output netcdf directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) +end + +begin + ; ############################################# + ; # Get parameters from ./variable_defs/*.ncl # + ; # passed via the 'info' attribute # + ; ############################################# + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data # + ; ########################################### + datapath = netcdf_dir + "/" + variable_info[0]@diagnostic + ".nc" + to_plot = ncdf_read(datapath, var0) + if(isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + datapath2 = netcdf_dir + "/nr_runs_" + variable_info[0]@diagnostic + ".nc" + runs = ncdf_read(datapath2, "nr") + end if + ; start and end years of timeperiods + syears = diag_script_info@syears + eyears = diag_script_info@eyears + nperiods = dimsizes(syears) + + dims_data = (dimsizes(to_plot&metric) - 1) / 3 + + ; ########################################### + ; *********** PLOTTING ************ + ; Create plot variables + outfile = var0 + "_ts_line_" + syears(0) + "_" + eyears(nperiods - 1) + wks = get_wks(0, DIAG_SCRIPT, outfile) + if(isatt(diag_script_info, "colormap")) then + colormap = RGBtoCmap(diag_script_info@colormap) + else + colormap = "StepSeq25" + end if + gsn_define_colormap(wks, colormap) + + colors_main = (/1, 3, 4, 5, 6/) + colors_spread = (/2, 3, 4, 5, 6/) + + res = True + + res@gsnDraw = False + res@gsnFrame = False + res@txFont = 25 + res@vpHeightF = 0.5 ; change aspect ratio of plot + res@vpWidthF = 0.8 + res@vpXF = 0.15 ; start plot at x ndc coord + + if (isatt(diag_script_info, "title")) then + res@tiMainString = diag_script_info@title + end if + if(isatt(diag_script_info, "yaxis")) then + res@tiYAxisString = diag_script_info@yaxis + end if + res@tiXAxisString = "Year" + + res@xyDashPatterns = 0 + res@trXMinF = toint(syears(0)) + res@trXMaxF = toint(eyears(nperiods - 1)) + if(isatt(diag_script_info, "ymin")) then + res@trYMinF = diag_script_info@ymin + end if + if(isatt(diag_script_info, "ymax")) then + res@trYMaxF = diag_script_info@ymax + end if + + res@tmXBMode = "Explicit" + res@tmXBValues = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 50) + res@tmXBLabels = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 50) + res@tmXBMinorValues = ispan(toint(syears(0)), \ + toint(eyears(nperiods - 1)), 10) + + res@xyLineThicknesses = 3.5 + res@xyLineColor = colors_main(0) + + ; create plot + plot = gsn_csm_xy(wks, to_plot&year, to_plot(0, :), res) + + res@gsnXYFillColors = colors_spread(0) + res@xyLineColor = -1 + spread_plot = gsn_csm_xy(wks, to_plot&year, to_plot(1 : 2, :), res) + overlay(plot, spread_plot) + + delete(res@gsnXYFillColors) + + do j = 1, dims_data + if(all(ismissing(to_plot(j * 3, :)))) then + error_msg("w", DIAG_SCRIPT, "", "all missing values for " + \ + "dimension " + j * 3 + " in to_plot, need at least 5" + \ + " models, go to next") + continue + end if + res@xyLineColor = colors_main(j) + plot1 = gsn_csm_xy(wks, to_plot&year, to_plot(j * 3, :), res) + overlay(plot, plot1) + + res@gsnXYFillColors = colors_spread(j) + res@gsnXYFillOpacities = (/0.2, 0.2, 0.2/) + res@xyLineColor = -1 + spread_plot = gsn_csm_xy(wks, to_plot&year,\ + to_plot((j * 3) + 1 : (j * 3) + 2, :), res) + overlay(plot, spread_plot) + delete(res@gsnXYFillColors) + delete(res@gsnXYFillOpacities) + end do + + polyres = True + polyres@gsLineThicknessF = 1.5 + ; add polylines + dum = new(nperiods, graphic) + do n = 0, nperiods - 1 + dum(n) = gsn_add_polyline(wks, plot, (/toint(eyears(n)), \ + toint(eyears(n))/), (/-20, 20/), polyres) + end do + delete(res@xyLineColor) + + ; Attach a legend + lgres = True + lgres@lgLineColors = colors_main ; (/1, 3, 4, 5, 6/) + lgres@lgItemType = "Lines" ; show lines only (default) + lgres@lgLabelFontHeightF = .08 ; legend label font thickness + lgres@vpWidthF = 0.15 ; width of legend (NDC) + lgres@vpHeightF = 0.16 ; height of legend (NDC) + lgres@lgMonoDashIndex = True + lgres@lgDashIndex = 0 + lgres@lgLineThicknessF = 4 + lgres@lgPerimOn = False + lgres@lgItemOrder = (/4, 3, 2, 1, 0/) + if (isatt(diag_script_info, "label")) then + labels = " " + diag_script_info@label + legend = gsn_create_legend(wks, 5, labels, lgres) + amres = True + amres@amJust = "TopLeft" ; Use bottom right corner of box + ; for determining its location. + amres@amParallelPosF = -0.46 ; Move legend to right + if (var0 .ne. "snw") then + amres@amParallelPosF = -0.45 ; Move legend to right + amres@amOrthogonalPosF = -0.45 ; Move legend up. + else + amres@amParallelPosF = -0.47 ; Move legend to right + amres@amOrthogonalPosF = 0.12 ; Move legend down. + end if + annoid = gsn_add_annotation(plot, legend, amres) ; add legend to plot + end if + if(isatt(diag_script_info, "model_nr")) then + getvalues plot + "tmYLLabelFontHeightF" : fheight + "trYMinF" : trYMinF + "trYMaxF" : trYMaxF + end getvalues + text = new(1 + (dims_data - 1) * nperiods, graphic) + txres = True + txres@txFontHeightF = fheight + label = tostring(runs(0, 0)) + xloc = (toint(syears(0)) + (toint(eyears(0)) - toint(syears(0))) / 2) + yloc = (trYMaxF + trYMinF) / 2 + ydiff = (trYMaxF + trYMinF) / 7 + if yloc .le. 0 then + ydiff = - ydiff + end if + text(0) = gsn_add_text(wks, plot, label, xloc, yloc, txres) + i = 1 + do nscen = 1, dims_data + do nper = 1, nperiods - 1 + txres@txFontColor = colors_main(nscen) + label = tostring(runs(nscen, nper)) + xloc = (toint(eyears(nper)) - toint(syears(nper))) / 2 + text(i) = gsn_add_text(wks, plot, label, toint(syears(nper)) + xloc, \ + yloc - nscen * ydiff, txres) + i = i + 1 + end do + end do + end if + draw(wks) + frame(wks) + + log_info(" Wrote " + wks@fullname) + + ; collect meta-data + nc_file = datapath + + if (isatt(diag_script_info, "spread")) then + spread_str = diag_script_info@spread + else + spread_str = "1.0" + end if + caption = "Time series of global annual mean " + \ + variable_info[0]@long_name + " anomalies (relative to " + \ + diag_script_info@begin_ref_year + "-" + diag_script_info@end_ref_year + \ + ") from concentration driven experiments. " + \ + "Projections are shown for each RCP for the " + \ + "multi-model mean (solid lines) and the 5 to 95% range (+-" \ + + spread_str + " standard deviation) across the" + \ + " distribution of individual models (shading). " + \ + "Only one ensemble member is used from each model and numbers in" + \ + " the figure indicate the number of different models " + \ + "contributing to the different time periods." + statistics = ("mean") + domains = ("global") + plot_types = ("times") + authors = (/"lorenz_ruth"/) + references = (/"collins13ipcc"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, wks@fullname, caption, statistics, domains, \ + plot_types, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch12_plot_zonal_diff_mmm_stipp.ncl b/esmvaltool/diag_scripts/ipcc_ar5/ch12_plot_zonal_diff_mmm_stipp.ncl new file mode 100644 index 0000000000..261082f79f --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch12_plot_zonal_diff_mmm_stipp.ncl @@ -0,0 +1,392 @@ +; ############################################################################# +; Multi-model mean change zonal plot (with significance and non-significance) +; Author: Ruth Lorenz (ETH, Switzerland) +; CRESCENDO project +; ############################################################################# +; +; Description +; Annual mean or seasonal change in multi-model mean with signficance +; and non-significance +; lev-lon field precalculated, plotting only +; +; Caveats +; Only handles one variable at a time +; +; Modification history +; 20181105-A_lorenz_ruth: ported to v2 +; 20171002-A_lorenz_ruth: Added tags for searching +; 20170523-A_lorenz_ruth: written for ESMValTool +; based on IPCC AR5 scripts. +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/zonalmean_profile.ncl" +load "$diag_scripts/shared/plot/contourplot.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + ; ############################################################## + ; # Fetch general parameters, set in namelist_collins13ipcc.xml# + ; # passed via environment variables by python code # + ; ############################################################## + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the + ; above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check required diag_script_info attributes + req_atts = (/"scenarios", "periods"/) + exit_if_missing_atts(diag_script_info, req_atts) +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + netcdf_dir = diag_script_info@input_files(1) + + dim_scen = dimsizes(diag_script_info@scenarios) + dim_per = dimsizes(diag_script_info@periods) + if (isatt(diag_script_info, "seasons")) then + dim_seas = dimsizes(diag_script_info@seasons) + else + dim_seas = 1 + end if +end + +begin + ; ############################################# + ; # Get parameters from ./variable_defs/*.ncl # + ; # passed via the 'info' attribute # + ; ############################################# + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_info(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data # + ; ########################################### + datapath = netcdf_dir + "/" + variable_info[0]@diagnostic + ".nc" + to_plot = ncdf_read(datapath, var0) + + ; Check dimensions + dims = getvardims(to_plot) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lev" .or. dims .eq. "plev") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + lev_name = dims(idx) + + if (isatt(diag_script_info, "sig") .and. \ + diag_script_info@sig .eq. True) then + ; read already calculated significance + to_plot_signif = ncdf_read(datapath, "signif") + end if + if (isatt(diag_script_info, "not_sig") .and. \ + diag_script_info@not_sig .eq. True) then + ; read already calculated significance + to_plot_not_signif = ncdf_read(datapath, "not_signif") + end if + if (isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + ; read already calculated number of models included in plot + model_number = ncdf_read(datapath, "model_nr") + end if + if (isatt(diag_script_info, "base_cn")) then + to_plot_base = ncdf_read(datapath, var0 + "_base") + end if + ; ###################################################### + ; # Separate output to a graphics file, Panelling # + ; ###################################################### + ; function in aux_plotting.ncl + wks = get_wks(0, variable_info[0]@diagnostic, \ + var0 + "_" + diag_script_info@time_avg) + + if (isatt(diag_script_info, "max_vert")) then + ; Maximum allowed number of plots per page (vertical) + nvert = toint(diag_script_info@max_vert) + else + nvert = dim_scen + end if + if (isatt(diag_script_info, "max_hori")) then + ; Maximum allowed number of plots per page (horizontal) + nhori = toint(diag_script_info@max_hori) + else + nhori = 1 + end if + + to_plot@res_gsnDraw = False ; Do not draw yet + to_plot@res_gsnFrame = False ; Don't advance frame. + to_plot@res_cnFillOn = True ; Color plot desired + to_plot@res_cnLineLabelsOn = False ; Contour lines + to_plot@res_cnLinesOn = False + to_plot@res_cnInfoLabelOn = False + to_plot@res_tiMainOn = False + if(isatt(diag_script_info, "colormap")) then + colormap = read_colormap_file(diag_script_info@colormap) + else + colormap = "BlueYellowRed" + end if + to_plot@res_cnFillPalette = colormap + if (isatt(diag_script_info, "span")) then + to_plot@res_cnSpanFillPalette = diag_script_info@span + else + to_plot@res_cnSpanFillPalette = True ; use full colormap + end if + to_plot@res_lbLabelBarOn = False + if (lev_name .eq. "lev") then + to_plot@res_trYReverse = True + to_plot@res_gsnYAxisIrregular2Linear = True + else + to_plot@res_trYMinF = 50. + to_plot@res_trYMaxF = 1000. + end if + if (isatt(diag_script_info, "base_cn")) then + to_plot_base@res_gsnFrame = False + to_plot_base@res_gsnDraw = False + to_plot_base@res_gsnStringFontHeightF = 0.019 + to_plot_base@res_cnInfoLabelOn = True + to_plot_base@res_cnFillOn = False + to_plot_base@res_cnLinesOn = True + to_plot_base@res_cnLineLabelsOn = False + to_plot_base@res_lbLabelBarOn = False + to_plot_base@res_trYReverse = True + to_plot_base@res_trYMinF = 50. + to_plot_base@res_trYMaxF = 1000. + to_plot_base@res_gsnContourNegLineDashPattern = 1 + to_plot_base@res_gsnContourZeroLineThicknessF = 2. + + to_plot_base@res_gsnCenterString = "" + to_plot_base@res_gsnLeftString = "" + to_plot_base@res_gsnLeftStringOrthogonalPosF = -.003 + to_plot_base@res_gsnRightString = "" ; avg_period + if (isatt(diag_script_info, "base_cnMaxLevel")) then + to_plot_base@res_cnMaxLevelValF = diag_script_info@base_cnMaxLevel + else + to_plot_base@res_cnMaxLevelValF = 40 + end if + if (isatt(diag_script_info, "base_cnMinLevel")) then + to_plot_base@res_cnMinLevelValF = diag_script_info@base_cnMinLevel + else + to_plot_base@res_cnMinLevelValF = - (to_plot_base@res_cnMaxLevelValF) + end if + if (isatt(diag_script_info, "base_cnLevelSpacing")) then + to_plot_base@res_cnLevelSpacingF = diag_script_info@base_cnLevelSpacing + else + to_plot_base@res_cnLevelSpacingF = 10 + end if + end if + if (isatt(diag_script_info, "sig") .and. \ + diag_script_info@sig .eq. True) then + ; resources dots + to_plot_signif@res_gsnDraw = False + to_plot_signif@res_gsnFrame = False + to_plot_signif@res_gsnAddCyclic = True + to_plot_signif@res_cnLinesOn = False + to_plot_signif@res_cnLineLabelsOn = False + to_plot_signif@res_cnLevelSelectionMode = "ExplicitLevels" + to_plot_signif@res_cnLevels = (/.3, 1.3/) + to_plot_signif@res_cnInfoLabelOn = False + to_plot_signif@res_tiMainOn = False + to_plot_signif@res_cnFillOn = True + to_plot_signif@res_cnFillMode = "AreaFill" + to_plot_signif@res_cnMonoFillPattern = False + to_plot_signif@res_cnFillPatterns = (/-1, 17, 17/) + to_plot_signif@res_cnMonoFillColor = True + to_plot_signif@res_lbLabelBarOn = False + to_plot_signif@res_gsnLeftString = "" + to_plot_signif@res_gsnRightString = "" + to_plot_signif@res_cnFillDotSizeF = 0.001 + to_plot_signif@res_cnFillScaleF = .6 + end if + if (isatt(diag_script_info, "not_sig") .and. \ + diag_script_info@not_sig .eq. True) then + ; resources hatch + to_plot_not_signif@res_gsnDraw = False + to_plot_not_signif@res_gsnFrame = False + to_plot_not_signif@res_gsnAddCyclic = True + to_plot_not_signif@res_cnLinesOn = False + to_plot_not_signif@res_cnLineLabelsOn = False + to_plot_not_signif@res_cnLevelSelectionMode = "ExplicitLevels" + to_plot_not_signif@res_cnLevels = (/.3, 1.3/) + to_plot_not_signif@res_cnInfoLabelOn = False + to_plot_not_signif@res_tiMainOn = False + to_plot_not_signif@res_cnFillOn = True + to_plot_not_signif@res_cnFillMode = "AreaFill" + to_plot_not_signif@res_cnRasterSmoothingOn = False + to_plot_not_signif@res_cnMonoFillPattern = False + to_plot_not_signif@res_cnFillPatterns = (/-1, 10, 10/) + to_plot_not_signif@res_cnMonoFillColor = True + to_plot_not_signif@res_lbLabelBarOn = False + to_plot_not_signif@res_gsnLeftString = "" + to_plot_not_signif@res_gsnRightString = "" + end if + + nmap = dim_scen * dim_per * dim_seas + + maps = new(nmap, graphic) ; collect individual "map"s in a graphic array + do i = 0, nmap - 1 + to_plot@res_gsnLeftString = "" + to_plot@res_gsnRightString = "" + if(isatt(diag_script_info, "label")) then + to_plot@res_gsnCenterString = diag_script_info@label(i) + end if + ; Set explicit contour levels + if(isatt(diag_script_info, "diff_levs")) then + to_plot@res_cnLevelSelectionMode = "ExplicitLevels" + to_plot@res_cnLevels = diag_script_info@diff_levs + end if + if (lev_name .eq. "plev") then + maps(i) = zonalmean_profile(wks, to_plot(i, :, :), var0) + else + to_plot@res_tiYAxisString = "Depth [m]" + maps(i) = contourplot(wks, to_plot(i, :, :), var0) + end if + if (isatt(diag_script_info, "sig") .and. \ + diag_script_info@sig .eq. True) then + ; plot dots + if all(to_plot_signif(i, :, :) .eq. 1) then + to_plot_signif(i, 0, 0) = 0 + end if + if (max(to_plot_signif(i, :, :)) .gt. \ + min(to_plot_signif(i, :, :))) then + to_plot_signif@res_gsnAddCyclic = False + if (lev_name .eq. "plev") then + plot1 = zonalmean_profile(wks, to_plot_signif(i, :, :), var0) + else + plot1 = contourplot(wks, to_plot_signif(i, :, :), var0) + end if + overlay(maps(i), plot1) + end if + end if + if (isatt(diag_script_info, "not_sig") .and. \ + diag_script_info@not_sig .eq. True) then + ; plot hatch + if all(to_plot_not_signif(i, :, :) .eq. 1) then + to_plot_not_signif(i, 0, 0) = 0 + end if + if (max(to_plot_not_signif(i, :, :)) .gt. \ + min(to_plot_not_signif(i, :, :))) then + to_plot_not_signif@res_gsnAddCyclic = False + to_plot_not_signif@res_cnFillScaleF = 0.45 + if (lev_name .eq. "plev") then + plot2 = zonalmean_profile(wks, to_plot_not_signif(i, :, :), var0) + else + plot2 = contourplot(wks, to_plot_not_signif(i, :, :), var0) + end if + overlay(maps(i), plot2) + end if + end if + + if (isatt(diag_script_info, "base_cn")) then + if (lev_name .eq. "plev") then + cont1 = zonalmean_profile(wks, to_plot_base(i, :, :), var0) + else + cont1 = contourplot(wks, to_plot_base(i, :, :), var0) + end if + overlay(maps(i), cont1) + end if + end do + + txres = True + txres@txFontHeightF = 0.03 + amres = True + amres@amParallelPosF = 0.48 ; This is the right edge of the plot. + amres@amOrthogonalPosF = -0.48 ; This is the bottom edge of the plot. + amres@amJust = "TopRight" + if ((isatt(diag_script_info, "model_nr")) .and. \ + (diag_script_info@model_nr .eq. True)) then + do i = 0, dimsizes(maps) - 1 + txid1 = gsn_create_text(wks, "" + model_number(i), txres) + annoid1 = gsn_add_annotation(maps(i), txid1, amres) + end do + end if + psles = True ; needed to override panelling defaults + psles@gsnPanelLabelBar = True ; general label bar desired here + + a4_height = 29.7 ; in centimeters, if my + a4_width = 23.0 ; reference is correct + cm_per_inch = 2.54 + + psles@gsnPaperWidth = a4_width / cm_per_inch + psles@gsnPaperHeight = a4_height / cm_per_inch + psles@gsnPaperOrientation = "portrait" + psles@gsnPanelTop = 0.96 + + if (isatt(diag_script_info, "units")) then + psles@lbTitleString = diag_script_info@units + else + unit_string = format_units(to_plot@units) + psles@lbTitleString = "(" + unit_string + ")" + end if + psles@lbTitleFontHeightF = 0.017 + psles@lbTitleDirection = "across" + psles@lbTitlePosition = "Right" + psles@lbTitleJust = "CenterLeft" + psles@lbLabelFontHeightF = 0.014 + psles@lbLabelJust = "CenterCenter" + psles@lbLabelAutoStride = False + + psles@txFontHeightF = 0.02 + if(isatt(diag_script_info, "title")) then + psles@txString = diag_script_info@title + end if + psles@pmLabelBarParallelPosF = 0.06 + psles@pmLabelBarOrthogonalPosF = -0.01 + + outfile = panelling(wks, maps, nvert, nhori, psles) + + log_info(" Wrote " + outfile) + + ; collect meta-data + nc_file = datapath + plot_file = outfile + caption = "Multi-model changes in zonal mean " + \ + variable_info[0]@long_name + \ + " relative to " + info_items[0]@start_year + "-" + \ + info_items[0]@end_year + " for 20 year periods starting in " + \ + str_join(diag_script_info@periods, ", ") + " under the " + \ + str_join(diag_script_info@scenarios, ", ") + \ + " forcing scenarios. Hatching indicates regions where the " + \ + "multi-model mean change is less than one standard " + \ + "deviation of internal variability. Stippling indicates " + \ + "regions where the multi-model change mean is greater " + \ + "than two standard deviations of internal variability and " + \ + "where at least 90% of the models agree on the sign of " + \ + "change. Similar to Figure 12.12" + \ + " of Collins et al. (2013)." + statistics = ("mean") + domains = ("global") + plot_types = ("zonal") + authors = (/"lorenz_ruth"/) + references = (/"collins13ipcc"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, plot_file, caption, statistics, domains, \ + plot_types, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch12_snw_area_change_fig12-32.ncl b/esmvaltool/diag_scripts/ipcc_ar5/ch12_snw_area_change_fig12-32.ncl new file mode 100644 index 0000000000..4834f5ccf0 --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch12_snw_area_change_fig12-32.ncl @@ -0,0 +1,596 @@ +; ############################################################################# +; SNOW AREA CHANGE +; Author: Ruth Lorenz (ETH Zurich, Switzerland) +; CRESCENDO project +; ############################################################################# +; +; Description +; Calculate Snow area in a region (e.g Northern Hemisphere) +; and season (e.g. NH spring March & April) +; relative to reference period (e.g 1986-2005) as in Fig. 12.32 of IPCC AR5 +; +; Modification history +; 20180718-A_lorenz_ruth: ported to v2 and bugs in area calculation fixed +; 20170120-A_lorenz_ruth: written. +; +; ############################################################################# + +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" + +begin + ; ############################################################## + ; # Fetch general parameters, set in namelist_collins13ipcc.xml# + ; # passed via environment variables by python code # + ; ############################################################## + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + dim_VAR = ListCount(variable_info) + if (dim_VAR .eq. 3) then + var0 = variable_info[0]@short_name + info_items0 = select_metadata_by_name(input_file_info, var0) + var1 = variable_info[1]@short_name + info_items1 = select_metadata_by_name(input_file_info, var1) + var2 = variable_info[2]@short_name + info_items2 = select_metadata_by_name(input_file_info, var2) + else + error_msg("f", DIAG_SCRIPT, "", "3 variables are needed, " + \ + "'snw', 'sftlf' and 'sftgif'") + end if + + ; Match snw variable to correct variable + if (var0 .eq. "snw") then + var_snw = var0 + info_items = info_items0 + idx_snw = 0 + elseif (var1 .eq. "snw") then + var_snw = var1 + info_items = info_items1 + idx_snw = 1 + elseif (var2 .eq. "snw") then + var_snw = var2 + info_items = info_items2 + idx_snw = 2 + end if + dim_MOD = ListCount(info_items) + + ; Check required diag_script_info attributes + req_atts = (/"scenarios", "syears", "eyears", "begin_ref_year", \ + "end_ref_year", "months"/) + exit_if_missing_atts(diag_script_info, req_atts) +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + +end +begin + ; ############################################# + ; # Get parameters from ./variable_defs/*.ncl # + ; # passed via the 'info' attribute # + ; ############################################# + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_info(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + ; ########################################### + ; # Get data and average annual and globally# + ; ########################################### + ; get data from first model for historical period + imod = 0 ; NCL array indices start from zero + log_info("processing " + info_items[imod]@dataset + "_" \ + + info_items[imod]@exp + "_" \ + + info_items[imod]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[imod]) + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + + ; how many months included in analysis? + months = diag_script_info@months + dim_SEAS = dimsizes(months) + + ; we need to calculate the snow area using sftgif and sftlf + ; read sftgif and sftlf (land sea mask) + if (var0 .eq. "sftlf") then + land_sea_sftlf = read_data(info_items0[imod]) + var_sftlf = var0 + info_items_sftlf = info_items0 + elseif (var1 .eq. "sftlf") then + land_sea_sftlf = read_data(info_items1[imod]) + var_sftlf = var1 + info_items_sftlf = info_items1 + elseif (var2 .eq. "sftlf") then + land_sea_sftlf = read_data(info_items2[imod]) + var_sftlf = var2 + info_items_sftlf = info_items2 + end if + + if (var0 .eq. "sftgif") then + land_ice_sftgif = read_data(info_items0[imod]) + var_sftgif = var0 + info_items_sftgif = info_items0 + elseif (var1 .eq. "sftgif") then + land_ice_sftgif = read_data(info_items1[imod]) + var_sftgif = var1 + info_items_sftgif = info_items1 + elseif (var2 .eq. "sftgif") then + land_ice_sftgif = read_data(info_items2[imod]) + var_sftgif = var2 + info_items_sftgif = info_items2 + end if + sftifl = land_sea_sftlf - land_ice_sftgif + + sftifl_conf = conform(A0, sftifl, (/1, 2/)) + tmp2 = (A0 * sftifl_conf) / 100 ; sftifl in percent -> / 100 + ; where > 5 + tmp3 = where(tmp2 .gt. 5., 1.0, 0) + ; Add coordinates + copy_VarCoords(A0, tmp3) + ; Average over globe/region + ; (function in ./diag_scripts/lib/ncl/latlon.ncl) + if (isatt(diag_script_info, "ts_minlat")) then + tmp4 = area_operations(tmp3, diag_script_info@ts_minlat, \ + diag_script_info@ts_maxlat, \ + diag_script_info@ts_minlon, \ + diag_script_info@ts_maxlon, "average", True) + else + tmp4 = coswgt_areaave(tmp3) + end if + tmp5 = (tmp4 * 5.10067e14) / 2.0 + ; Add coordinates + tmp5!0 = "time" + tmp5&time = A0&time + tmp6 = extract_season(tmp5, months) + data2_glob = time_operations(tmp6, -1, -1, "average", "yearly", False) + delete([/tmp2, tmp3, tmp4, tmp5, tmp6, sftifl_conf/]) + + dim_file = dimsizes(data2_glob) ; file size of historical file + + ; How many historical model runs? + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + + attsh = True + attsh@exp = reference_run_name + item_hist = select_metadata_by_atts(info_items, attsh) + dim_MOD_hist = ListCount(item_hist) + + ref_data2 = new((/dim_MOD_hist/), float, A0@_FillValue) + data2_rel = new((/dim_MOD_hist, dim_file(0)/), float, \ + A0@_FillValue) + + ; How many scenarios? + dim_scen = dimsizes(diag_script_info@scenarios) + ; start and end years of timeperiods + syears = diag_script_info@syears + eyears = diag_script_info@eyears + nperiods = dimsizes(syears) + nyears_full = toint(eyears(nperiods - 1)) - toint(syears(0)) + 1 + nyearsp1 = toint(eyears(nperiods - 1)) - toint(eyears(0)) + + ; How many model runs in 1st scenario? + atts1 = True + atts1@exp = diag_script_info@scenarios(0) + item_rcp26 = select_metadata_by_atts(info_items, atts1) + dim_MOD_26 = ListCount(item_rcp26) + ; indicate array for scenarios + data26_rel = new((/dim_MOD_26, nyearsp1/), float, \ + A0@_FillValue) + + if (dim_scen .gt. 1) then + ; How many model runs in 2nd sceanrio? + atts2 = True + atts2@exp = diag_script_info@scenarios(1) + item_rcp45 = select_metadata_by_atts(info_items, atts2) + dim_MOD_45 = ListCount(item_rcp45) + data45_rel = new((/dim_MOD_45, nyearsp1/), float, \ + A0@_FillValue) + end if + if (dim_scen .gt. 2) then + ; How many model runs in 3rd scenario? + atts3 = True + atts3@exp = diag_script_info@scenarios(2) + item_rcp60 = select_metadata_by_atts(info_items, atts3) + dim_MOD_60 = ListCount(item_rcp60) + data60_rel = new((/dim_MOD_60, nyearsp1/), float, \ + A0@_FillValue) + end if + if (dim_scen .gt. 3) then + ; How many model runs in 4th scenario? + atts4 = True + atts4@exp = diag_script_info@scenarios(3) + item_rcp85 = select_metadata_by_atts(info_items, atts4) + dim_MOD_85 = ListCount(item_rcp85) + data85_rel = new((/dim_MOD_85, nyearsp1/), float, \ + A0@_FillValue) + end if + if (dim_scen .gt. 4) then + error_msg("f", DIAG_SCRIPT, "", "Too many different scenarios, " + \ + "code not set up for more than 4.") + end if + delete([/attsh, atts1, atts2, atts3, atts4/]) + delete([/item_rcp26, item_rcp45, item_rcp60, item_rcp85/]) + + i = 0 + i26 = 0 + i45 = 0 + i60 = 0 + i85 = 0 + do imod = 0, dim_MOD_hist - 1 + if (imod .eq. 0) then + delete(A0) + else + A0 = read_data(info_items[imod]) + ; we need to calculate the snow area using sftgif (glacier) and sftlf + land_ice_sftgif = read_data(info_items_sftgif[imod]) + ; read sftlf (land sea mask) + land_sea_sftlf = read_data(info_items_sftlf[imod]) + sftifl = land_sea_sftlf - land_ice_sftgif + + sftifl_conf = conform(A0, sftifl, (/1, 2/)) + tmp2 = (A0 * sftifl_conf) / 100 + ; where > 5 kg m**-2 considered snow covered + tmp3 = where(tmp2 .gt. 5., 1., 0) + ; Add coordinates + copy_VarCoords(A0, tmp3) + ; Average over globe/region + ; (function in ./diag_scripts/lib/ncl/latlon.ncl) + if (isatt(diag_script_info, "ts_minlat")) then + tmp4 = area_operations(tmp3, diag_script_info@ts_minlat, \ + diag_script_info@ts_maxlat, \ + diag_script_info@ts_minlon, \ + diag_script_info@ts_maxlon, "average", \ + True) + else + tmp4 = coswgt_areaave(tmp3) + end if + tmp5 = (tmp4 * 5.10067e14) / 2.0 + ; Add coordinates + tmp5!0 = "time" + tmp5&time = A0&time + tmp6 = extract_season(tmp5, months) + data2_glob = time_operations(tmp6, -1, -1, "average", "yearly", False) + delete([/tmp2, tmp3, tmp4, tmp5, tmp6, sftifl_conf, A0/]) + end if + + ind_start = ind(data2_glob&year .eq. diag_script_info@begin_ref_year) + ind_end = ind(data2_glob&year .eq. diag_script_info@end_ref_year) + ref_data2(imod) = dim_avg(data2_glob(ind_start : ind_end)) + dim_data2g = dimsizes(data2_glob) + if (dim_data2g .ne. dim_file(0)) then + tdiff = dim_file(0) - dim_data2g + data2_rel(imod, 0 : tdiff - 1) = data2_rel@_FillValue + data2_rel(imod, tdiff :) = (data2_glob / ref_data2(imod)) - 1 + else + data2_rel(imod, :) = (data2_glob / ref_data2(imod)) - 1 + end if + ; clean up + delete([/data2_glob/]) + + do s = 0, dim_scen - 1 + ; find all other runs from this model and calculate relative ts + atts = True + atts@dataset = info_items[imod]@dataset + atts@exp = diag_script_info@scenarios(s) + atts@ensemble = info_items[imod]@ensemble + item_rcp = select_metadata_by_atts(info_items, atts) ; matching list + if (ListCount(item_rcp) .ne. 0) then + A1 = read_data(item_rcp[0]) + ; we need to calculate the snow area using sftgif (glacier) and sftlf + ; these are fx files and should not change between runs, use same + ; sftifl as for historical + sftifl_conf = conform(A1, sftifl, (/1, 2/)) + tmp2 = (A1 * sftifl_conf) / 100 ; sftifl in percent -> / 100 + ; where > 5 kg m**-2 considered snow covered + tmp3 = where(tmp2 .gt. 5., 1., 0) + ; Add coordinates + copy_VarCoords(A1, tmp3) + ; Average over globe/region + ; (function in ./diag_scripts/lib/ncl/latlon.ncl) + if (isatt(diag_script_info, "ts_minlat")) then + tmp4 = area_operations(tmp3, diag_script_info@ts_minlat, \ + diag_script_info@ts_maxlat, \ + diag_script_info@ts_minlon, \ + diag_script_info@ts_maxlon, "average", \ + True) + else + tmp4 = coswgt_areaave(tmp3) + end if + tmp5 = (tmp4 * 5.10067e14) / 2.0 + ; Add coordinates + tmp5!0 = "time" + tmp5&time = A1&time + tmp6 = extract_season(tmp5, months) + data2_glob = time_operations(tmp6, -1, -1, "average", "yearly", False) + delete([/tmp2, tmp3, tmp4, tmp5, tmp6, sftifl_conf/]) + dim_data2g = dimsizes(data2_glob) + if (s .eq. 0) then + if (dim_data2g .ne. nyearsp1) then + if (dim_data2g .gt. nyearsp1) then + error_msg("w", DIAG_SCRIPT, "", "Length of dataset " + \ + info_items[imod]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data26_rel(i26, 0 : dim_data2g - 1) = \ + (data2_glob / ref_data2(imod)) - 1 + data26_rel(i26, dim_data2g :) = data26_rel@_FillValue + end if + else + data26_rel(i26, :) = (data2_glob / ref_data2(imod)) - 1 + end if + i26 = i26 + 1 + end if + if (s .eq. 1) then + if (dim_data2g .ne. nyearsp1) then + if (dim_data2g .gt. nyearsp1) then + error_msg("w", DIAG_SCRIPT, "", "Length of dataset " + \ + info_items[imod]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data45_rel(i45, 0 : dim_data2g - 1) = \ + (data2_glob / ref_data2(imod)) - 1 + data45_rel(i45, dim_data2g:) = data45_rel@_FillValue + end if + else + data45_rel(i45, :) = (data2_glob / ref_data2(imod)) - 1 + end if + i45 = i45 + 1 + end if + if (s .eq. 2) then + if (dim_data2g.ne.nyearsp1) then + if (dim_data2g .gt. nyearsp1) then + error_msg("w", DIAG_SCRIPT, "", "Length of dataset " + \ + info_items[imod]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data60_rel(i60, 0 : dim_data2g - 1) = \ + (data2_glob / ref_data2(imod)) - 1 + data60_rel(i60, dim_data2g:) = data60_rel@_FillValue + end if + else + data60_rel(i60, :) = (data2_glob / ref_data2(imod)) - 1 + end if + i60 = i60 + 1 + end if + if (s .eq. 3) then + if (dim_data2g.ne.nyearsp1) then + if (dim_data2g .gt. nyearsp1) then + error_msg("w", DIAG_SCRIPT, "", "Length of dataset " + \ + info_items[imod]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data85_rel(i85, 0 : dim_data2g - 1) = \ + (data2_glob / ref_data2(imod)) - 1 + data85_rel(i85, dim_data2g :) = data85_rel@_FillValue + end if + else + data85_rel(i85, :) = (data2_glob / ref_data2(imod)) - 1 + end if + i85 = i85 + 1 + end if + ; clean up + delete([/A1, data2_glob/]) + end if + delete(item_rcp) + end do ; scenario + delete([/land_ice_sftgif, land_sea_sftlf, sftifl/]) + end do ; imod + + ; average and standard deviation over all datasets + datahist_avg = dim_avg_n_Wrap(data2_rel, 0) + datahist_std = dim_stddev_n_Wrap(data2_rel, 0) + + ; put all data from 1850-2100 into mean_val and std_val + mean_val = new((/dim_scen + 1, nyears_full/), typeof(data2_rel),\ + data2_rel@_FillValue) + std_val = new((/dim_scen + 1, nyears_full/), typeof(data2_rel),\ + data2_rel@_FillValue) + p0 = toint(eyears(0)) - toint(syears(0)) + p1 = toint(eyears(0)) - toint(syears(0)) + 1 + mean_val(0, 0 : p0) = datahist_avg + std_val(0, 0 : p0) = datahist_std + + data26_avg = dim_avg_n_Wrap(data26_rel, 0) + data26_std = dim_stddev_n_Wrap(data26_rel, 0) + + mean_val(1, p1 :) = data26_avg + std_val(1, p1 :) = data26_std + + if (dim_scen .gt. 1) then + data45_avg = dim_avg_n_Wrap(data45_rel, 0) + data45_std = dim_stddev_n_Wrap(data45_rel, 0) + mean_val(2, p1 :) = data45_avg + std_val(2, p1 :) = data45_std + end if + if (dim_scen .gt. 2) then + data60_avg = dim_avg_n_Wrap(data60_rel, 0) + data60_std = dim_stddev_n_Wrap(data60_rel, 0) + mean_val(3, p1 :) = data60_avg + std_val(3, p1 :) = data60_std + end if + if (dim_scen .gt. 3) then + data85_avg = dim_avg_n_Wrap(data85_rel, 0) + data85_std = dim_stddev_n_Wrap(data85_rel, 0) + mean_val(4, p1 :) = data85_avg + std_val(4, p1 :) = data85_std + end if + ; check number of datasets for timeperiods in scenarios + ; if there are less than 5 datasets set to missing + runs = new((/1 + dim_scen, nperiods/), integer) + atts_runs = True + atts_runs@exp = reference_run_name + item_runs = select_metadata_by_atts(info_items, atts_runs) + runs(0, 0) = ListCount(item_runs) + do scen = 0, dim_scen - 1 + do p = 1, nperiods - 1 + atts_runs@exp = diag_script_info@scenarios(scen) + item_runs = select_metadata_by_atts(info_items, atts_runs) + nrp1 = NewList("fifo") + nrp2 = NewList("fifo") + do r = 0, ListCount(item_runs) - 1 + if (item_runs[r]@end_year .gt. toint(diag_script_info@eyears(p))) then + ListAppend(nrp1, item_runs[r]) + elseif (item_runs[r]@end_year .eq. \ + toint(diag_script_info@eyears(p))) then + ListAppend(nrp2, item_runs[r]) + end if + end do + print(nrp1) + tmp = ListCount(nrp1) + ; check if only found missing, in that case set tmp to zero + if tmp .eq. 0 then + tmp = 0 + end if + print(nrp2) + tmp2 = ListCount(nrp2) + ; check if only found missing, in that case set tmp2 to zero + if tmp2 .eq. 0 then + tmp2 = 0 + end if + runs(scen + 1, p) = tmp + tmp2 + delete([/tmp, tmp2, nrp1, nrp2/]) + if (runs(scen + 1, p) .le. 4) .and. (p .ne. 0) then + p2 = toint(eyears(p - 1)) - toint(syears(0)) + p3 = toint(eyears(p)) - toint(syears(0)) + mean_val(scen + 1, p2 : p3) = mean_val@_FillValue + std_val(scen + 1, p2 : p3) = std_val@_FillValue + log_info("Scenario " + diag_script_info@scenarios(scen) + \ + " in period ending " + diag_script_info@eyears(p) + \ + " has less than 5 models, set to missing.") + end if + end do + end do + + dims_data = dimsizes(mean_val) + to_plot = new((/3 * dims_data(0), dims_data(1)/), float) + + if (isatt(diag_script_info, "spread")) then + spread = diag_script_info@spread + else + spread = 1.0 + end if + + do j = 0, dims_data(0) - 1 + to_plot(j * 3, :) = mean_val(j, :) + to_plot((j * 3) + 1, :) = to_plot(j * 3, :) + spread * std_val(j, :) + to_plot((j * 3) + 2, :) = to_plot(j * 3, :) - spread * std_val(j, :) + end do + copy_VarMeta(mean_val, to_plot) + to_plot!0 = "metric" + str_spread = sprintf("%5.2f", spread) + if (dim_scen .eq. 1) then + to_plot&metric = (/"mean", "+" + str_spread + "std", \ + "-" + str_spread + "std", \ + "mean", "+" + str_spread + "std", \ + "-" + str_spread + "std"/) + elseif (dim_scen .eq. 2) then + to_plot&metric = (/"mean", "+" + str_spread + "std", \ + "-" + str_spread + "std", \ + "mean", "+" + str_spread + "std", \ + "-" + str_spread + "std", \ + "mean", "+" + str_spread + "std", \ + "-" + str_spread + "std"/) + elseif (dim_scen .eq. 3) then + to_plot&metric = (/"mean", "+" + str_spread + "std", \ + "-" + str_spread + "std", \ + "mean", "+" + str_spread + "std", \ + "-" + str_spread + "std", \ + "mean", "+" + str_spread + "std", \ + "-" + str_spread + "std", \ + "mean", "+" + str_spread + "std", \ + "-" + str_spread + "std"/) + elseif (dim_scen .eq. 4) then + to_plot&metric = (/"mean", "+" + str_spread + "std", \ + "-" + str_spread + "std", \ + "mean", "+" + str_spread + "std", \ + "-" + str_spread + "std", \ + "mean", "+" + str_spread + "std", \ + "-" + str_spread + "std", \ + "mean", "+" + str_spread + "std", \ + "-" + str_spread + "std", \ + "mean", "+" + str_spread + "std", \ + "-" + str_spread + "std"/) + end if + to_plot!1 = "year" + to_plot&year = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 1) + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + ; Add to to_plot, as attributes without prefix + if(isatt(to_plot, "diag_script")) then ; Add to existing entries + temp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var_snw ; Overwrite existing entry + if(isatt(variable_info[idx_snw], "long_name")) then + to_plot@var_long_name = variable_info[idx_snw]@long_name + end if + if(isatt(variable_info[0], "units")) then + to_plot@units = variable_info[idx_snw]@units + end if + + runs@var = "nr" + runs!0 = "scenario" + runs&scenario = diag_script_info@label + runs!1 = "period" + runs&period = diag_script_info@syears + runs@diag_script = (/DIAG_SCRIPT/) + runs@var_long_name = "number of model runs per scenario and period" + + ; ########################################### + ; # Output to netCDF # + ; ########################################### + ; Output (controlled by diag_script_info) + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + if(isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + ncdf_file2 = work_dir + "/nr_runs_" + variable_info[0]@diagnostic + ".nc" + ncdf_outfile2 = ncdf_write(runs, ncdf_file2) + end if + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch12_ts_line_mean_spread.ncl b/esmvaltool/diag_scripts/ipcc_ar5/ch12_ts_line_mean_spread.ncl new file mode 100644 index 0000000000..e488d3a07f --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch12_ts_line_mean_spread.ncl @@ -0,0 +1,535 @@ +; ############################################################################# +; Global timeseries for historical and RCPs 1850-2?00 +; Author: Ruth Lorenz (ETH, Switzerland) +; PROJECT-NAME CRESCENDO +; ############################################################################# +; Caveats +; Needs lots of memory, in particular if lots of models and scenarios +; are included +; Variable number of scenarios? So far hardcoded not more than 4 scenarios +; (rcp2.6, rcp4.5, rcp6.0, rcp8.5) +; every model (ensemble) in RCP needs corresponding historical run as +; reference, if no corresponding model found in scenario log_info +; +; Modification history +; 20182006-A_lorenz_ruth: ported to version2 +; 20170523-A_lorenz_ruth: separated plotting from calcuation +; 20160625-A_lorenz_ruth: adapted to ESMValTool +; 20130506-A_sedlacek_jan: written for IPCC AR5. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" + +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + ; ############################################################## + ; # Fetch general parameters, set in namelist_collins13ipcc.xml# + ; # passed via environment variables by python code # + ; ############################################################## + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"scenarios", "syears", "eyears", \ + "begin_ref_year", "end_ref_year"/)) +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + ; ############################################# + ; # Get parameters from ./variable_defs/*.ncl # + ; # passed via the 'info' attribute # + ; ############################################# + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + ; ########################################### + ; # Get data and average annual and globally# + ; ########################################### + ; get data from first dataset for historical period + imod = 0 ; NCL array indices start from zero + log_debug("processing " + info_items[imod]@dataset + "_" \ + + info_items[imod]@exp + "_" \ + + info_items[imod]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[imod]) + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + + ; Average over time (function in ./diag_scripts/shared/statistics.ncl) + data1 = time_operations(A0, -1, -1, "average", "yearly", True) + + dim_file = dimsizes(data1) ; file size of historical file + ; Average over globe/region + ; (function in ./diag_scripts/shared/latlon.ncl) + if (isatt(diag_script_info, "ts_minlat")) then + data1_glob = area_operations(data1, diag_script_info@ts_minlat, \ + diag_script_info@ts_maxlat, \ + diag_script_info@ts_minlon, \ + diag_script_info@ts_maxlon, "average", \ + True) + else + data1_glob = coswgt_areaave(data1) + end if + + ; How many historical model runs? + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + + attsh = True + attsh@exp = reference_run_name + item_hist = select_metadata_by_atts(info_items, attsh) + dim_MOD_hist = ListCount(item_hist) + + ref_data2 = new((/dim_MOD_hist/), float, data1_glob@_FillValue) + data2_rel = new((/dim_MOD_hist, dim_file(0)/), float, \ + data1_glob@_FillValue) + + ; How many scenarios? + dim_scen = dimsizes(diag_script_info@scenarios) + ; start and end years of timeperiods + syears = diag_script_info@syears + eyears = diag_script_info@eyears + nperiods = dimsizes(syears) + nyears = toint(eyears(nperiods - 1)) - toint(syears(0)) + 1 + nyearsp1 = toint(eyears(nperiods - 1)) - toint(eyears(0)) + + ; How many rcp2.6 model runs? + atts1 = True + atts1@exp = diag_script_info@scenarios(0) + item_rcp26 = select_metadata_by_atts(info_items, atts1) + dim_MOD_26 = ListCount(item_rcp26) + ; indicate array for scenarios + data26_rel = new((/dim_MOD_26, nyearsp1/), float, \ + data1_glob@_FillValue) + i26 = 0 + + if (dim_scen .gt. 1) then + ; How many rcp4.5 model runs? + atts2 = True + atts2@exp = diag_script_info@scenarios(1) + item_rcp45 = select_metadata_by_atts(info_items, atts2) + dim_MOD_45 = ListCount(item_rcp45) + data45_rel = new((/dim_MOD_45, nyearsp1/), float, \ + data1_glob@_FillValue) + i45 = 0 + end if + if (dim_scen .gt. 2) then + ; How many rcp6.0 model runs? + atts3 = True + atts3@exp = diag_script_info@scenarios(2) + item_rcp60 = select_metadata_by_atts(info_items, atts3) + dim_MOD_60 = ListCount(item_rcp60) + data60_rel = new((/dim_MOD_60, nyearsp1/), float, \ + data1_glob@_FillValue) + i60 = 0 + end if + if (dim_scen .gt. 3) then + ; How many rcp8.5 model runs? + atts4 = True + atts4@exp = diag_script_info@scenarios(3) + item_rcp85 = select_metadata_by_atts(info_items, atts4) + dim_MOD_85 = ListCount(item_rcp85) + data85_rel = new((/dim_MOD_85, nyearsp1/), float, \ + data1_glob@_FillValue) + i85 = 0 + end if + if (dim_scen .gt. 4) then + error_msg("f", DIAG_SCRIPT, "", "Too many different scenarios, " + \ + "code not set up for more than 4.") + end if + do imod = 0, dim_MOD_hist - 1 + if (imod .eq. 0) then + data2 = data1 + delete(A0) + else + A0 = read_data(info_items[imod]) + data2 = time_operations(A0, -1, -1, "average", "yearly", True) + delete(A0) + end if + ; Average over globe (function in + ; ./diag_scripts/shared/statistics.ncl) + data2_glob = coswgt_areaave(data2) + ind_start = ind(data2&year .eq. diag_script_info@begin_ref_year) + ind_end = ind(data2&year .eq. diag_script_info@end_ref_year) + ref_data2(imod) = dim_avg(data2_glob(ind_start : ind_end)) + dim_data2g = dimsizes(data2_glob) + if (dim_data2g .ne. dim_file(0)) then + tdiff = dim_file(0) - dim_data2g + data2_rel(imod, 0 : tdiff - 1) = data2_glob@_FillValue + data2_rel(imod, tdiff :) = data2_glob - ref_data2(imod) + else + data2_rel(imod, :) = data2_glob - ref_data2(imod) + end if + ; clean up + delete([/data2, data2_glob/]) + + ; find all other runs from this model and calculate relative ts + atts = True + atts@dataset = info_items[imod]@dataset + atts@exp = diag_script_info@scenarios(0) + atts@ensemble = info_items[imod]@ensemble + item_26 = select_metadata_by_atts(info_items, atts) + if (ListCount(item_26) .ne. 0) then + A1 = read_data(item_26[0]) + data3 = time_operations(A1, -1, -1, "average", "yearly", True) + data3_glob = coswgt_areaave(data3) + dim_data3g = dimsizes(data3_glob) + if (dim_data3g .ne. nyearsp1) then + if (dim_data3g .gt. nyearsp1) then + error_msg("w", DIAG_SCRIPT, "", "Length of dataset " + \ + info_items[imod]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data26_rel(i26, 0 : dim_data3g - 1) = data3_glob - ref_data2(imod) + data26_rel(i26, dim_data3g :) = data3_glob@_FillValue + end if + else + data26_rel(i26, :) = data3_glob - ref_data2(imod) + end if + i26 = i26 + 1 + ; clean up + delete([/A1, data3, data3_glob/]) + else + log_info("No corresponding model for historial run " + \ + info_items[imod]@dataset + " found in scenario " + \ + diag_script_info@scenarios(0) + \ + ", cannot calculate relative change.") + end if + delete(item_26) + + if (dim_scen .gt. 1) then + atts@exp = diag_script_info@scenarios(1) + item_45 = select_metadata_by_atts(info_items, atts) + + if (ListCount(item_45) .ne. 0) then + A1 = read_data(item_45[0]) + data3 = time_operations(A1, -1, -1, "average", "yearly", True) + data3_glob = coswgt_areaave(data3) + dim_data3g = dimsizes(data3_glob) + if (dim_data3g .ne. nyearsp1) then + if (dim_data3g .gt. nyearsp1) then + error_msg("w", diag_script, "", "Length of dataset " + \ + info_items[imod]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data45_rel(i45, 0:dim_data3g - 1) = data3_glob - ref_data2(imod) + data45_rel(i45, dim_data3g:) = data3_glob@_FillValue + end if + else + data45_rel(i45, :) = data3_glob - ref_data2(imod) + end if + i45 = i45 + 1 + ; clean up + delete([/A1, data3, data3_glob/]) + else + log_info("No corresponding model ensemble for historial run " + \ + info_items[imod]@dataset + " found in scenario " + \ + diag_script_info@scenarios(1) + \ + ", cannot calculate relative change.") + end if + delete(item_45) + end if + + if (dim_scen .gt. 2) then + atts@exp = diag_script_info@scenarios(2) + item_60 = select_metadata_by_atts(info_items, atts) + + if (ListCount(item_60) .ne. 0) then + A1 = read_data(item_60[0]) + data3 = time_operations(A1, -1, -1, "average", "yearly", True) + data3_glob = coswgt_areaave(data3) + dim_data3g = dimsizes(data3_glob) + if (dim_data3g .ne. nyearsp1) then + if (dim_data3g .gt. nyearsp1) then + error_msg("w", diag_script, "", "Length of dataset " + \ + info_items[imod]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data60_rel(i60, 0 : dim_data3g - 1) = data3_glob - ref_data2(imod) + data60_rel(i60, dim_data3g :) = data3_glob@_FillValue + end if + else + data60_rel(i60, :) = data3_glob - ref_data2(imod) + end if + i60 = i60 + 1 + ; clean up + delete([/A1, data3, data3_glob/]) + else + log_info("No corresponding model ensemble for historial run " + \ + info_items[imod]@dataset + " found in scenario " + \ + diag_script_info@scenarios(2) + \ + ", cannot calculate relative change.") + end if + delete(item_60) + end if + + if (dim_scen .gt. 3) then + atts@exp = diag_script_info@scenarios(3) + item_85 = select_metadata_by_atts(info_items, atts) + + if (ListCount(item_85) .ne. 0) then + A1 = read_data(item_85[0]) + data3 = time_operations(A1, -1, -1, "average", "yearly", True) + data3_glob = coswgt_areaave(data3) + dim_data3g = dimsizes(data3_glob) + if (dim_data3g .ne. nyearsp1) then + if (dim_data3g .gt. nyearsp1) then + error_msg("w", diag_script, "", "Length of dataset " + \ + info_items[imod]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data85_rel(i85, 0:dim_data3g - 1) = data3_glob - ref_data2(imod) + data85_rel(i85, dim_data3g:) = data3_glob@_FillValue + end if + else + data85_rel(i85, :) = data3_glob - ref_data2(imod) + end if + i85 = i85 + 1 + ; clean up + delete(A1) + delete(data3) + delete(data3_glob) + else + log_info("No corresponding model ensemble for historial run " + \ + info_items[imod]@dataset + " found in scenario " + \ + diag_script_info@scenarios(3) + \ + ", cannot calculate relative change, set to missing.") + end if + delete(item_85) + end if + end do ; imod + + ; average and standard deviation over all models + datahist_avg = dim_avg_n_Wrap(data2_rel, 0) + datahist_std = dim_stddev_n_Wrap(data2_rel, 0) + + ; put all data from 1850-2100 into mean_val and std_val + mean_val = new((/dim_scen + 1, nyears/), typeof(data2_rel), \ + data2_rel@_FillValue) + std_val = new((/dim_scen + 1, nyears/), typeof(data2_rel), \ + data2_rel@_FillValue) + p0 = toint(eyears(0)) - toint(syears(0)) + p1 = toint(eyears(0)) - toint(syears(0)) + 1 + mean_val(0, 0 : p0) = datahist_avg + std_val(0, 0 : p0) = datahist_std + + data26_avg = dim_avg_n_Wrap(data26_rel, 0) + data26_std = dim_stddev_n_Wrap(data26_rel, 0) + mean_val(1, p1 :) = data26_avg + std_val(1, p1 :) = data26_std + + if (dim_scen .gt. 1) then + data45_avg = dim_avg_n_Wrap(data45_rel, 0) + data45_std = dim_stddev_n_Wrap(data45_rel, 0) + mean_val(2, p1 :) = data45_avg + std_val(2, p1 :) = data45_std + end if + if (dim_scen .gt. 2) then + data60_avg = dim_avg_n_Wrap(data60_rel, 0) + data60_std = dim_stddev_n_Wrap(data60_rel, 0) + mean_val(3, p1 :) = data60_avg + std_val(3, p1 :) = data60_std + end if + if (dim_scen .gt. 3) then + data85_avg = dim_avg_n_Wrap(data85_rel, 0) + data85_std = dim_stddev_n_Wrap(data85_rel, 0) + mean_val(4, p1 :) = data85_avg + std_val(4, p1 :) = data85_std + end if + + ; check number of models for timeperiods in scenarios + ; if there are less than 5 models set to missing + runs = new((/1 + dim_scen, nperiods/), integer) + atts_runs = True + atts_runs@exp = reference_run_name + item_runs = select_metadata_by_atts(info_items, atts_runs) + runs(0, 0) = ListCount(item_runs) + do scen = 0, dim_scen - 1 + do p = 1, nperiods - 1 + atts_runs@exp = diag_script_info@scenarios(scen) + item_runs = select_metadata_by_atts(info_items, atts_runs) + nrp1 = NewList("fifo") + nrp2 = NewList("fifo") + do r = 0, ListCount(item_runs) - 1 + if (item_runs[r]@end_year .gt. toint(diag_script_info@eyears(p))) then + ListAppend(nrp1, item_runs[r]) + elseif (item_runs[r]@end_year .eq. \ + toint(diag_script_info@eyears(p))) then + ListAppend(nrp2, item_runs[r]) + end if + end do + tmp = ListCount(nrp1) + ; check if only found missing, in that case set tmp to zero + if tmp .eq. 0 then + tmp = 0 + end if + tmp2 = ListCount(nrp2) + ; check if only found missing, in that case set tmp2 to zero + if tmp2 .eq. 0 then + tmp2 = 0 + end if + runs(scen + 1, p) = tmp2 + tmp + delete([/tmp, tmp2, nrp1, nrp2/]) + if (runs(scen + 1, p) .le. 4) .and. (p .ne. 0) then + p2 = toint(eyears(p - 1)) - toint(syears(0)) + p3 = toint(eyears(p)) - toint(syears(0)) + mean_val(scen + 1, p2 : p3) = mean_val@_FillValue + std_val(scen + 1, p2 : p3) = std_val@_FillValue + log_info("Scenario " + diag_script_info@scenarios(scen) + \ + " in period ending " + diag_script_info@eyears(p) + \ + " has less than 5 models, set to missing.") + end if + end do + end do + dims_data = dimsizes(mean_val) + to_plot = new((/3 * dims_data(0), dims_data(1)/), float) + + if (isatt(diag_script_info, "spread")) then + spread = diag_script_info@spread + else + spread = 1.0 + end if + do j = 0, dims_data(0) - 1 + ; if variable sw or lw TOA flux: change direction + if (var0 .eq. "rlut" .or. var0 .eq. "rsut") then + to_plot(j * 3, :) = - mean_val(j, :) + else + to_plot(j * 3, :) = mean_val(j, :) + end if + to_plot((j * 3) + 1, :) = to_plot(j * 3, :) + spread * std_val(j, :) + to_plot((j * 3) + 2, :) = to_plot(j * 3, :) - spread * std_val(j, :) + end do + copy_VarMeta(mean_val, to_plot) + to_plot!0 = "metric" + str_spread = sprintf("%5.2f", spread) + if (dim_scen .eq. 1) then + to_plot&metric = (/"mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std"/) + elseif (dim_scen .eq. 2) then + to_plot&metric = (/"mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std", "mean", "+" + str_spread + "std", "-" \ + + str_spread + "std"/) + elseif (dim_scen .eq. 3) then + to_plot&metric = (/"mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std", "mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std"/) + elseif (dim_scen .eq. 4) then + to_plot&metric = (/"mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std", "mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std", "mean", "+" + str_spread + "std", "-" + \ + str_spread + "std"/) + end if + + to_plot!1 = "year" + to_plot&year = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 1) + + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + ; Add to to_plot, as attributes without prefix + if(isatt(to_plot, "diag_script")) then ; Add to existing entries + temp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if(isatt(variable_info[0], "long_name")) then + to_plot@var_long_name = variable_info[0]@long_name + end if + if(isatt(variable_info[0], "units")) then + to_plot@units = variable_info[0]@units + end if + + runs!0 = "scenario" + runs&scenario = diag_script_info@label + runs!1 = "period" + runs&period = diag_script_info@syears + if(isatt(runs, "diag_script")) then ; Add to existing entries + temp = runs@diag_script + delete(runs@diag_script) + runs@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + runs@diag_script = (/DIAG_SCRIPT/) + end if + runs@var = "nr" + runs@var_long_name = "number of model runs per scenario and period" + runs@units = 1 + ; ########################################### + ; # Output to netCDF # + ; ########################################### + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + if(isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + ncdf_file2 = work_dir + "/nr_runs_" + variable_info[0]@diagnostic + ".nc" + ncdf_outfile2 = ncdf_write(runs, ncdf_file2) + end if + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/ipcc_ar5/tsline.ncl b/esmvaltool/diag_scripts/ipcc_ar5/tsline.ncl index de81fd4692..679e7b7563 100644 --- a/esmvaltool/diag_scripts/ipcc_ar5/tsline.ncl +++ b/esmvaltool/diag_scripts/ipcc_ar5/tsline.ncl @@ -1,12 +1,11 @@ ; ############################################################################# ; PLOTS TIME SERIES -; Author: Irene Cionni (ENEA, Italy), Veronika Eyring (DLR, Germany) -; ESMVal project +; Authors: Lisa Bock (DLR, Germany), Sabrina Wenzel (DLR, Germany) and +; Irene Cionni (ENEA, Italy) ; ############################################################################# ; ; Description -; This script is based on the original CCMVal script tsline.ncl and has -; been further developed as part of the ESMValTool. +; This script is for plotting a time series of the global mean (anomaly). ; ; Required diag_script_info attributes (diagnostic specific) ; styleset: as in diag_scripts/shared/plot/style.ncl functions @@ -14,28 +13,42 @@ ; Optional diag_script_info attributes (diagnostic specific) ; time_avg: type of time average (currently only "yearly" and "monthly" are ; available). -; ts_anomaly: calculates anomalies with respect to the first 10-year -; average +; ts_anomaly: calculates anomalies with respect to the defined period; +; for each gird point by removing the mean for the given +; calendar month (requiring at least 50% of the data to be +; non-missing) ; ref_start: start year of reference period for anomalies ; ref_end: end year of reference period for anomalies +; ref_value: if true, right panel with mean values is attached +; ref_mask: if true, model fields will be masked by reference fields +; region: name of domain ; plot_units: variable unit for plotting +; y-min: set min of y-axis +; y-max: set max of y-axis +; mean_nh_sh: if true, calculate first NH and SH mean +; volcanoes: if true, lines of main volcanic eruptions will be added +; run_ave: if not equal 0 than calculate running mean over this number +; of years +; header: if true, region name as header ; ; Caveats ; ; Modification history -; 20181112-A_bock_ls: code rewritten for ESMValTool v2.0 -; 20170623-A_laue_ax: added tags for reporting -; 20160905-A-Bock_li: implemented multi-model mean and variable refence -; period for anomaly with refence mean value. -; 20151027-A_laue_ax: moved call to 'write_references' to the beginning -; of the code -; 20150622-A_wenz_sa: added optional anomaly calculation, choise of -; area opperations (sum, average) and detrending of -; time series. -; 20150420-A_righ_ma: adapted to new structure. Some of the original -; features not ported yet (seasonal average, smoothing, -; multi-model mean, etc.). -; 20??????-A_cion_ir: written. +; 20190911-bock_lisa: included method of Jones et al., 2013 and +; added provenance +; 20181112-bock_lisa: code rewritten for ESMValTool v2.0 +; 20170623-lauer_axel: added tags for reporting +; 20160905-bock_lisa: implemented multi-model mean and variable refence +; period for anomaly with refence mean value +; 20151027-lauer_axel: moved call to 'write_references' to the beginning +; of the code +; 20150622-wenzel_sabrina: added optional anomaly calculation, choise of +; area opperations (sum, average) and detrending of +; time series. +; 20150420-righi_mattia: adapted to new structure. Some of the original +; features not ported yet (seasonal average, +; smoothing, multi-model mean, etc.). +; 20??????-cionni_irene: written. ; ; ############################################################################# @@ -55,25 +68,21 @@ begin enter_msg(DIAG_SCRIPT, "") var0 = variable_info[0]@short_name - field_type0 = variable_info[0]@field + exp0 = variable_info[0]@exp + project0 = input_file_info[0]@project info_items = select_metadata_by_name(input_file_info, var0) datasetnames = metadata_att_as_array(info_items, "dataset") dim_MOD = ListCount(info_items) dim_VAR = ListCount(variable_info) + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + ; Create output plot directory plot_dir = config_user_info@plot_dir system("mkdir -p " + plot_dir) - ; Check field type - plot_fields = (/"T2Ms"/) - ind_f = any(field_type0 .eq. plot_fields) - if (all(ismissing(ind_f))) then - error_msg("f", DIAG_SCRIPT, "", "can't use field " + field_type0) - end if - delete(plot_fields) - delete(ind_f) - ; Plot file type file_type = config_user_info@output_file_type if (ismissing(file_type)) then @@ -98,6 +107,13 @@ begin ; Set default values for non-required diag_script_info attributes set_default_att(diag_script_info, "time_avg", "monthly") set_default_att(diag_script_info, "ts_anomaly", "noanom") + set_default_att(diag_script_info, "ref_value", True) + set_default_att(diag_script_info, "ref_mask", False) + set_default_att(diag_script_info, "region", "Global") + set_default_att(diag_script_info, "mean_nh_sh", False) + set_default_att(diag_script_info, "run_ave", 0) + set_default_att(diag_script_info, "header", False) + set_default_att(diag_script_info, "volcanoes", False) ; Determine time range start_year = min(metadata_att_as_array(info_items, "start_year")) @@ -113,28 +129,21 @@ begin time(12 * (yy - start_year) + mm - 1) = 100 * yy + mm end do end do - else if (time_avg.eq."yearly") then + elseif (time_avg.eq."yearly") then ntime = end_year - start_year + 1 time = new(ntime, integer) time = ispan(start_year, end_year, 1) end if - end if - ; Set index of the reference (and alternative) dataset + ; get multi-model mean index + mmm_ind = ind(datasetnames .eq. "MultiModelMean") + + ; Set index of the reference dataset and read it if (isatt(variable_info[0], "reference_dataset")) then ref_ind = ind(datasetnames.eq.variable_info[0]@reference_dataset) A_ref = read_data(info_items[ref_ind]) end if - ; individual case for HadCRUT4 observations - ; FIX-ME: mean value comes from climatology file (absolute.nc). - ; There are no missing values as in the anomaly data. - clim_ind = -1 - if (any(datasetnames.eq."HadCRUT4-clim")) then - clim_ind = ind(datasetnames.eq."HadCRUT4-clim") - ind_wo_clim = ind(datasetnames.ne."HadCRUT4-clim") - end if - ; Anomaly if (isatt(diag_script_info, "ts_anomaly")) then anom = diag_script_info@ts_anomaly @@ -143,6 +152,9 @@ begin isatt(diag_script_info, "ref_end")) then ref_start = diag_script_info@ref_start ref_end = diag_script_info@ref_end + if (ref_start.lt.start_year) then + ref_start = start_year + end if if ((ref_start.lt.start_year) .or. (ref_end.gt.end_year) .or. \ (ref_end.lt.ref_start)) then error_msg("f", DIAG_SCRIPT, "", \ @@ -171,167 +183,169 @@ begin model_arr!1 = "time" model_arr&model = datasetnames model_arr&time = time + model_arr@_FillValue = 1e+20 + model_arr = model_arr@_FillValue delete(time) - delete(ntime) ; Loop over models do imod = 0, dim_MOD - 1 - if(imod.ne.clim_ind) then - - ; Read data - A0 = read_data(info_items[imod]) - dnames = getVarDimNames(A0) + log_info("Process dataset: " + datasetnames(imod)) - ; Convert units for plotting (if required) - if (isatt(diag_script_info, "plot_units")) then - A0 = convert_units(A0, diag_script_info@plot_units) - end if - - ; time, lat, lon - if (field_type0.eq."T2Ms") then - - if (isatt(variable_info[0], "reference_dataset")) then - - if(imod .ne. ref_ind) then - - ; Determine start/end year - start_year = info_items[imod]@start_year - end_year = info_items[imod]@end_year - A_ref_mask = time_operations(A_ref, start_year, \ - end_year, "extract", "", 0) - - A0_mask = A0 - A0_mask = where(A_ref_mask.eq.A_ref_mask@_FillValue, \ - A_ref_mask@_FillValue, A0) + ; Read data + A0 = read_data(info_items[imod]) + dnames = getVarDimNames(A0) - delete(A_ref_mask) - end if + ; Convert units for plotting (if required) + if (isatt(diag_script_info, "plot_units")) then + A0 = convert_units(A0, diag_script_info@plot_units) + end if - end if + if (isatt(variable_info[0], "reference_dataset")) then + + ; masking with reference dataset + if (diag_script_info@ref_mask .and. imod .ne. ref_ind) then + ; Determine start/end year + start_year = info_items[imod]@start_year + end_year = info_items[imod]@end_year + A_ref_mask = time_operations(A_ref, start_year, \ + end_year, "extract", "", 0) + A0_nomask_ref = time_operations(A0, ref_start, \ + ref_end, "extract", "", 0) + A0 = where(A_ref_mask.eq.A_ref_mask@_FillValue, \ + A_ref_mask@_FillValue, A0) + delete(A_ref_mask) + end if - ; Anomaly - if(anom .eq. "anom") then - A0_timavg_ref = time_operations(A0, ref_start, ref_end, \ - "average", "annualclim", True) - if(imod .ne. ref_ind) then - do i = 0, dimsizes(A0&time) - 1 - A0_mask(i, :, :) = A0_mask(i, :, :) - A0_timavg_ref(:, :) - end do - else - do i = 0, dimsizes(A0&time) - 1 - A0(i, :, :) = A0(i, :, :) - A0_timavg_ref(:, :) - end do - end if - - anom_ref(imod) = area_operations(A0_timavg_ref, -90., 90., \ - 0., 360., "average", True) - delete(A0_timavg_ref) + end if + ; Anomaly + if (anom .eq. "anom") then + ; calculate monthly mean of ref period if 0.5 data points are available + tmp = time_operations(A0, ref_start, ref_end, "extract", "", 0) + limit = toint(0.5 * dimsizes(tmp&time)) + do i = 0, dimsizes(tmp!0)-1 + tmp(i, :, :) = where(dim_num_n(.not.ismissing(tmp), 0).ge.limit, \ + tmp(i, :, :), tmp@_FillValue) + end do + A0_monavg_ref = time_operations(tmp, ref_start, ref_end, "average", \ + "monthlyclim", True) + + ; calculate anomaly for each grid point + do i = 0, dimsizes(A0&time) - 1 + A0(i, :, :) = A0(i, :, :) - A0_monavg_ref(mod(i, 12), :, :) + end do + + if (diag_script_info@ref_value) then + ; Calculate time average of ref period + ; annual mean if at least 2 months of data is available + ; if masking then take original unmasked dataset for ref mean + if (diag_script_info@ref_mask .and. imod .ne. ref_ind) then + tmp = A0_nomask_ref + delete(A0_nomask_ref) end if - - if(imod .ne. ref_ind) then - procmod = area_operations(A0_mask, -90., 90., 0., \ - 360., "average", True) + date := cd_calendar(tmp&time, 0) + year := date(:, 0) + month := date(:, 1) + weights = days_in_month(toint(year), toint(month)) + A0_timavg_ref = dim_avg_wgt_n_Wrap(tmp, weights, 2, 0) + delete(year) + delete(month) + delete(date) + delete(tmp) + delete(A0_monavg_ref) + + ; calculate global mean of reference period + ; first for each hemisphere - if choosen + if (diag_script_info@mean_nh_sh) then + tmp1 = area_operations(A0_timavg_ref, -90., 0., 0., 360., \ + "average", True) + tmp2 = area_operations(A0_timavg_ref, 0., 90., 0., 360., \ + "average", True) + anom_ref_tmp = (tmp1 + tmp2) / 2. + delete(tmp1) + delete(tmp2) else - procmod = area_operations(A0, -90., 90., 0., 360., \ - "average", True) + anom_ref_tmp = area_operations(A0_timavg_ref, -90., 90., \ + 0., 360., "average", True) end if + delete(A0_timavg_ref) + anom_ref(imod) = anom_ref_tmp + ; delete(anom_ref_tmp) end if - ; time - if (field_type0.eq."T0M") then - - ; Anomaly - if(anom .eq. "anom") then - A0_timavg_ref = time_operations(A0, ref_start, ref_end, \ - "average", "annualclim", \ - True) - do i = 0, dimsizes(A0&time) - 1 - A0(i) = A0(i) - A0_timavg_ref - end do - end if - - procmod = A0 - - end if + end if - if (.not.isdefined("procmod")) then - error_msg("f", diag_script_info, "", "cannot process this " + \ - "field type " + field_type0) - end if - delete(A0) - - ; Detrend - if (isatt(diag_script_info, "ts_detrend")) then - detr = diag_script_info@ts_detrend - tmp = runave_Wrap(procmod, 2, 0) - delete(procmod) - procmod = tmp - delete(tmp) - else - detr = "nodetr" - end if + ; Calculate time average of dataset + ; annual mean if at least 2 months of data is available + if (time_avg.eq."yearly") then + do i = 0, dimsizes(A0!0) - 1 + A0(i, :, :) = where(dim_num_n(.not.ismissing(A0), 0).ge.2, \ + A0(i, :, :), A0@_FillValue) + end do + A0_timavg = time_operations(A0, -1, -1, "average", \ + "yearly", True) + else + A0_timavg = A0 + end if + delete(A0) + + ; calculate global mean of complete time series + ; first for each hemisphere - if choosen + if (diag_script_info@mean_nh_sh) then + tmp1 = area_operations(A0_timavg, -90., 0., 0., 360., \ + "average", True) + tmp2 = area_operations(A0_timavg, 0., 90., 0., 360., \ + "average", True) + procmod = tmp1 + procmod = (tmp1 + tmp2) / 2. + delete(tmp1) + delete(tmp2) + else + procmod = area_operations(A0_timavg, -90., 90., 0., 360., \ + "average", True) + end if + delete(A0_timavg) - ; Calculate time average - if (time_avg.ne."monthly") then - A0_timavg = time_operations(procmod, -1, -1, "average", \ - time_avg, True) - else - A0_timavg = procmod - end if - delete(procmod) - procmod = A0_timavg - delete(A0_timavg) - - ; Match time coordinate - if (time_avg.eq."monthly") then - date = cd_calendar(procmod&time, -1) - else if (time_avg.eq."yearly") then - date = procmod&year - end if - end if - idx1 = ind(date(0).eq.model_arr&time) - idx2 = ind(date(dimsizes(date) - 1).eq.model_arr&time) - model_arr(imod, idx1:idx2) = (/procmod/) - if (imod.eq.0) then - copy_VarAtts(procmod, model_arr) - end if + ; Running Mean + if(diag_script_info@run_ave .gt. 0) then + run_ave = "running_mean" + tmp = runave_Wrap(procmod, 12 * diag_script_info@runave, 0) delete(procmod) - delete(date) + procmod = tmp + delete(tmp) + else + run_ave = "" + end if + ; Match time coordinate + if (time_avg.eq."monthly") then + date = cd_calendar(procmod&time, -1) + elseif (time_avg.eq."yearly") then + date = procmod&year end if + idx1 = ind(date(0).eq.model_arr&time) + idx2 = ind(date(dimsizes(date) - 1).eq.model_arr&time) + model_arr(imod, idx1:idx2) = (/procmod/) + if (imod.eq.0) then + copy_VarAtts(procmod, model_arr) + end if + delete(procmod) + delete(date) end do ; individual case for HadCRUT4 observations - ; FIX-ME: mean value comes from climatology file (absolute.nc). + ; mean value comes from climatology file (absolute.nc). ; There are no missing values as in the anomaly data. - if (any(datasetnames.eq."HadCRUT4-clim")) then - ; Read data - A0 = read_data(info_items[clim_ind]) - dnames = getVarDimNames(A0) - - ; Convert units for plotting (if required) - if (isatt(diag_script_info, "plot_units")) then - A0 = convert_units(A0, diag_script_info@plot_units) + if (anom .eq. "anom" .and. isatt(variable_info[0], "reference_dataset") \ + .and. variable_info[0]@reference_dataset .eq. "HadCRUT4") then + if(ref_start .eq. 1961 .and. ref_end .eq. 1990) then + anom_ref(ref_ind) = 14.0 + else + anom_ref(ref_ind) = anom_ref@_FillValue end if - - A0_timavg_clim = time_operations(A0, 1990, 1990, "average", \ - "annualclim", True) - clim_ref = area_operations(A0_timavg_clim, -90., 90., 0., 360., \ - "average", True) - anom_ref(ref_ind) = clim_ref - - tmp = model_arr(ind_wo_clim, :) - delete(model_arr) - model_arr = tmp - delete(tmp) - tmp = anom_ref(ind_wo_clim) - delete(anom_ref) - anom_ref = tmp - delete(tmp) end if ; Convert time coordinate to years (required by the plot routine) @@ -346,7 +360,7 @@ begin delete(time) delete(year) delete(month) - else if (time_avg.eq."yearly") + elseif (time_avg.eq."yearly") then xmin = min(model_arr&time) xmax = max(model_arr&time) tmp = todouble(model_arr&time) @@ -354,23 +368,24 @@ begin model_arr&time = tmp delete(tmp) end if - end if - ; Optional output to NetCDF - if (config_user_info@write_netcdf.eq."True") then - out_path = config_user_info@work_dir - system("mkdir -p " + out_path) - out_path = out_path + "tsline_" + var0 + "_" + anom + "_" + detr + "_" \ - + start_year + "-" + end_year + ".nc" - model_arr@ncdf = out_path - model_arr@diag_script = DIAG_SCRIPT - model_arr@var = var0 - ncdf_outfile = ncdf_write(model_arr, out_path) - end if + ; ************************************* + ; output to NetCDF + ; ************************************* + out_path = config_user_info@work_dir + system("mkdir -p " + out_path) + out_path1 = out_path + "tsline_" + var0 + "_" + anom + "_" + run_ave + ".nc" + model_arr@ncdf = out_path + model_arr@experiment = project0 + "_" + exp0 + model_arr@diag_script = DIAG_SCRIPT + model_arr@var = var0 + ncdf_outfile = ncdf_write(model_arr, out_path1) ; Define workstation - outfile = config_user_info@plot_dir + var0 + "_" + anom + "_" + \ - detr + "_" + start_year + "-" + end_year + outfile = config_user_info@plot_dir + var0 + "_" \ + + str_sub_str(diag_script_info@region, " ", "_") \ + + "_" + project0 + "_" + exp0 + "_" + anom + "_" + run_ave + "_" \ + + start_year + "-" + end_year wks = gsn_open_wks(file_type, outfile) ; Set resources @@ -390,24 +405,51 @@ begin end if res@tmXBLabelAngleF = 45 res@tmXBLabelJust = "CenterRight" + if (isatt(diag_script_info, "y_min")) then + res@trYMinF = diag_script_info@y_min + end if + if (isatt(diag_script_info, "y_max")) then + res@trYMaxF = diag_script_info@y_max + end if + + res@tmXBMode = "Manual" + res@tmXBTickSpacingF = 20 - if isatt(model_arr, "long_name") - res@tiMainString = model_arr@long_name + if (diag_script_info@header) then + res@tiMainString = diag_script_info@region + else + res@tiMainString = "" end if - if (isatt(diag_script_info, "ts_anomaly")) then - res@tiYAxisString = var0 + " Anomaly" + " [" + model_arr@units + "]" + if (isatt(variable_info[0], "long_name")) then + if (var0 .eq. "tas") then + varname = "Temperature" + else + varname = variable_info[0]@long_name + end if else - res@tiYAxisString = var0 + " [" + model_arr@units + "]" + varname = var0 + end if + + if (model_arr@units .eq. "degC") then + units = "~F34~0~F~ C" + else + units = model_arr@units + end if + + if (anom .eq. "anom") then + res@tiYAxisString = varname + " Anomaly" + " (" + units + ")" + else + res@tiYAxisString = varname + " (" + units + ")" end if res0 = True ref_start = diag_script_info@ref_start ref_end = diag_script_info@ref_end res0@tiYAxisString = tostring(ref_start) + "-" + tostring(ref_end) \ - + " " + var0 + " Mean" + " [" + model_arr@units + "]" + + " Mean " + varname + " (" + units + ")" - if(anom .eq. "anom") then + if (anom .eq. "anom") then xy_line_anom(wks, anom_ref, model_arr, model_arr&time, \ model_arr_stddev, ref_start, ref_end, res, res0, info_items) else @@ -416,39 +458,37 @@ begin log_info(" wrote " + outfile + "." + file_type) -; ; add meta data to plot (for reporting) -; -; climofiles = new(dim_MOD, string) -; climofiles = input_file_info@filename -; -; domain = "DM_global" -; -; if (anom .eq. "anom") then -; stat = "ST_anomaly" -; captionadd = "anomalies" -; else -; stat = "ST_mean" -; captionadd = "means" -; end if -; -; alltags = array_append_record(tags, (/"PT_time", stat, domain/), 0) -; -; if (diag_script_info@multi_model_mean.eq."y") then -; allmodelnames = array_append_record(dataset_info@dataset, \ -; (/"multi-model-mean"/), 0) -; else -; allmodelnames = dataset_info@dataset -; end if -; -; caption = "Time series of the " + captionadd + " for variable " \ -; + variables(0) + ", similar to IPCC AR5, fig. 9.8." -; -; id = DIAG_SCRIPT + "_" + variables(0) -; -; contrib_authors = (/"A_cion_ir", "A_righ_ma", "A_wenz_sa", "A_bock_ls"/) -; -; ; ESMValMD(wks@fullname, alltags, caption, id, variables(0), \ -; ; allmodelnames, climofiles, DIAG_SCRIPT, contrib_authors) + ; *************************************** + ; add meta data to plot (for reporting) + ; *************************************** + + if (diag_script_info@region .eq. "Global") then + domain = "global" + else + domain = "reg" + end if + + if (anom .eq. "anom") then + statistics = "anomaly" + else + statistics = "mean" + end if + + caption = "Time series of the " + statistics + " for variable " \ + + varname + ", similar to IPCC AR5, fig. 9.8." + contrib_authors = (/"cionni_irene", "righi_mattia", \ + "wenzel_sabrina", "bock_lisa"/) + + ; Call provenance logger + log_provenance(ncdf_outfile, \ + outfile + "." + file_type, \ + caption, \ + statistics, \ + domain, \ + "times", \ + contrib_authors, \ + (/"flato13ipcc", "jones13jgr"/), \ + metadata_att_as_array(input_file_info, "filename")) leave_msg(DIAG_SCRIPT, "") diff --git a/esmvaltool/diag_scripts/ipcc_ar6/corr_pattern.ncl b/esmvaltool/diag_scripts/ipcc_ar6/corr_pattern.ncl new file mode 100644 index 0000000000..98724696ae --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar6/corr_pattern.ncl @@ -0,0 +1,193 @@ +; ############################################################################# +; DIAGNOSTIC SCRIPT for pattern correlation figure +; Author: Bettina Gier (Uni Bremen & DLR, Germany) +; IPCC-AR6 project +; ############################################################################# +; +; Description +; Calculates centred pattern correlations for annual mean climatologies +; and plots them. Like IPCC AR5 Ch. 9 Fig 9.6 +; +; Required diag_script_info attributes (diagnostics specific) +; none +; +; Optional diag_script_info attributes (diagnostic specific) +; none +; +; Required variable_info attributes (variable specific) +; reference_dataset: name of reference observation +; +; Optional variable_info attributes (variable specific) +; alternative_dataset: name of alternative observations +; +; Caveats +; +; +; Modification history +; 20210226-A_bock_lisa: Modified diagnostic based on +; ipcc_ar5/ch09_fig09_6.ncl +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + infiles = metadata_att_as_array(info_items, "filename") + datasetnames = metadata_att_as_array(info_items, "dataset") + projectnames = metadata_att_as_array(info_items, "project") + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check and set reference dataset + ref_model = variable_info[0]@reference_dataset + if (variable_info[0]@reference_dataset.eq."None") then + error_msg("f", DIAG_SCRIPT, "", "no reference dataset is specified") + end if + log_info("reference model = " + ref_model) + ref_ind = ind(datasetnames.eq.ref_model) + if isatt(variable_info[0], "alternative_dataset") then + alt_ref = True + aref_ind = ind(datasetnames.eq.variable_info[0]@alternative_dataset) + else + alt_ref = False + end if + +end + +begin + ; Output directories + ncdf_dir = config_user_info@work_dir + "pattern_cor.nc" + modproj_dir = config_user_info@work_dir + "modprojnames.txt" + system("mkdir -p " + config_user_info@work_dir) +end + +begin +; ----------------------------------------------------------------------------- +; -------------------- Compute correlation for one var ------------------------ +; ----------------------------------------------------------------------------- + + ; Save list of preproc files for provenance in collect.ncl + preproc_files = metadata_att_as_array(info_items, "filename") + + ; Reference model + mod_idx = ispan(0, dim_MOD - 1, 1) + mod_ind_woref = mod_idx(ind(mod_idx.ne.ref_ind)) + delete(mod_idx) + + ; Make output array + all_cor = new((/dim_MOD-1/), float) + all_cor@_FillValue = 1.e20 + all_cor!0 = "models" + all_cor&models = datasetnames(mod_ind_woref) + ; Pass on alt models + if alt_ref then + all_cor@alt_obs = variable_info[0]@alternative_dataset + else + all_cor@alt_obs = "none" + end if + + ; Loop over models, with ref model processed first + model_ind = array_append_record(ref_ind, mod_ind_woref, 0) + do iloop = 0, dim_MOD - 1 + imod = model_ind(iloop) + log_info("Processing " + datasetnames(imod)) + + ; Extract model corresponding data + var = read_data(info_items[imod]) + + ; Calculate annual mean + data_yearly = time_operations(var, \ + toint(info_items[imod]@start_year), \ + toint(info_items[imod]@end_year), \ + "average", "yearly", True) + + ; Mean over the years + mean_years = dim_avg_n_Wrap(data_yearly, 0) + delete(data_yearly) + + ; Compute centred pattern correlation (ref_model will be first so no error) + if datasetnames(imod).eq.ref_model then + data_ref = mean_years + else + ; Since ref model processed first, move all models up one spot + all_cor(iloop-1) = pattern_cor(data_ref, mean_years, 1.0, 0) + end if + delete(mean_years) + delete(var) + end do + + ; ************************************* + ; calculate means over ensemble members + ; ************************************* + + datasetnames := datasetnames(mod_ind_woref) + projectnames := projectnames(mod_ind_woref) + + ndim := dimsizes(datasetnames) + ensemble_assign = new(ndim, integer) + + unique_models = get_unique_values(datasetnames) + do gg = 0, dimsizes(unique_models) - 1 + ensemble_assign = where(datasetnames.eq.unique_models(gg), gg + 1, \ + ensemble_assign) + end do + ensemble_assign@model = datasetnames + + all_cor_em = all_cor(:max(ensemble_assign)-1) + all_cor_em = all_cor_em@_FillValue + datasetnames_em = datasetnames(:max(ensemble_assign)-1) + projectnames_em = projectnames(:max(ensemble_assign)-1) + + do gg = 1, max(ensemble_assign) ; calculate ensemble means + wind := ind(ensemble_assign.eq.gg) + if (dimsizes(wind).eq.1) then + all_cor_em(gg-1) = (/ all_cor(wind) /) + all_cor_em&models(gg-1) = datasetnames(wind) + datasetnames_em(gg-1) = datasetnames(wind) + projectnames_em(gg-1) = projectnames(wind) + else + all_cor_em(gg-1) = (/ dim_avg_n(all_cor(wind), 0) /) + all_cor_em&models(gg-1) = datasetnames(wind(0)) + datasetnames_em(gg-1) = datasetnames(wind(0)) + projectnames_em(gg-1) = projectnames(wind(0)) + end if + end do + + delete(all_cor) + all_cor = all_cor_em + delete(all_cor_em) + + ; Write data + all_cor@corvar = var0 + all_cor@corvar_long = variable_info[0]@long_name + all_cor@var = "cor" + all_cor@diag_script = DIAG_SCRIPT + all_cor@diagnostics = variable_info[0]@diagnostic + all_cor@ncdf = ncdf_dir + all_cor@input = str_join(infiles, ",") + ncdf_outfile = ncdf_write(all_cor, ncdf_dir) + + ; Write provenance + statistics = (/"corr", "clim"/) + domains = (/"global"/) + plottype = "other" + authors = (/"gier_bettina", "bock_lisa"/) + references = (/"flato13ipcc"/) + log_provenance(ncdf_outfile, "n/a", "n/a", statistics, domains, "other", \ + authors, references, preproc_files) + + ; Write list of models with project (arrays get squished as attributes) + modnames = datasetnames_em + projnames = projectnames_em + modproj = (/modnames, projnames/) + asciiwrite(modproj_dir, modproj) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/ipcc_ar6/corr_pattern_collect.ncl b/esmvaltool/diag_scripts/ipcc_ar6/corr_pattern_collect.ncl new file mode 100644 index 0000000000..28aaab2410 --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar6/corr_pattern_collect.ncl @@ -0,0 +1,492 @@ +; ############################################################################# +; DIAGNOSTIC PLOT SCRIPT for correlation pattern figure +; Author: Bettina Gier (DLR, Germany), Lisa Bock (DLR, Germany) +; IPCC-AR6 project +; ############################################################################# +; +; Description +; Calculated centred pattern correlations for annual mean climatologies +; and plots them. Like IPCC AR5 Ch. 9 Fig 9.6 +; +; Required diag_script_info attributes (diagnostics specific) +; +; Optional diag_script_info attributes (diagnostic specific) +; diag_order: give order of plotting variables on the x-axis +; labels: List of labels for each variable on the x-axis +; model_spread: if True, model spread is shaded +; plot_median: if True, median is plotted +; project_order: give order of projects +; +; Required variable_info attributes (variable specific) +; none +; +; Optional variable_info attributes (variable specific) +; none +; +; Required variable attributes (defined in namelist) +; reference_dataset: name of reference data set (observations) +; +; Caveats +; Effect of different regridding methods not yet determined +; +; Modification history +; 20210226-A_bock_lisa: Modified diagnostic based on +; ipcc_ar5/ch09_fig09_6_collect.ncl +; +; ############################################################################# +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; Define file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "plot_median", False) + +; ----------------------------------------------------------------------------- +; ------------------- Collecting Data ----------------------------------------- +; ----------------------------------------------------------------------------- + + ; List of correlation files and project names + file_list = tostring(diag_script_info@input_files) + "/pattern_cor.nc" + mp_file_list = tostring(diag_script_info@input_files) + "/modprojnames.txt" + + ; Filter non-existing files (mp files only useful if cor file exists) + file_list := file_list(ind(isfilepresent(file_list))) + mp_file_list := mp_file_list(ind(isfilepresent(file_list))) + + ; Set up auxiliary variables + var_name = new(dimsizes(file_list), string) + var_collect = new(dimsizes(file_list), string) + var_diag = new(dimsizes(file_list), string) + alt_obs = new(dimsizes(file_list), string) + + ; Loop over files in list, read and append data + do ii = 0, dimsizes(file_list) - 1 + + data_temp = ncdf_read(file_list(ii), "cor") + var_name(ii) = data_temp@corvar_long + var_collect(ii) = data_temp@corvar + var_diag(ii) = data_temp@diagnostics + alt_obs(ii) = data_temp@alt_obs + + ; Make 2D array to store all data + if (.not.isdefined("data_all")) then + data_all = new((/dimsizes(data_temp), dimsizes(file_list)/), float) + data_all(:, ii) = data_temp + data_all!0 = "models" + data_all&models = data_temp&models + + ; Input file list for provenance + prov_files = str_split(data_temp@input, ",") + else + ; If model coordinates are identical + if (dimsizes(data_temp&models).eq.dimsizes(data_all&models)) \ + .and. all(data_temp&models.eq.data_all&models) then + data_all(:, ii) = (/data_temp/) + else + ; Loop over models in new data entry + do imod_temp = 0, dimsizes(data_temp&models) - 1 + ; If current model is not already part of the model coordinate + if (.not.any(data_temp&models(imod_temp) .eq. data_all&models)) then + ; Append record for model(imod) + data_new = extend_var_at(data_all, 0, \ + dimsizes(data_all&models)) + data_new(dimsizes(data_all&models), ii) = (/data_temp(imod_temp)/) + data_new&models(dimsizes(data_all&models)) = \ + (/data_temp&models(imod_temp)/) + delete(data_all) + data_all = data_new + delete(data_new) + else + ; Loop over models of data + do imod = 0, dimsizes(data_all&models)-1 + ; if neq data model is similar to current + ; entry, write data entry + if (data_all&models(imod).eq. data_temp&models(imod_temp)) then + data_all(imod, ii) = (/data_temp(imod_temp)/) + end if + end do + end if + end do + end if + ; Append input file list for provenance + prov_files := array_append_record(prov_files, \ + str_split(data_temp@input, ","), 0) + end if + delete(data_temp) + end do + data_all!1 = "vars" + data_all&vars = var_name + delete(var_name) + + ; Get project for models + projects = new(dimsizes(data_all&models), string) + + ; Loop over model-project files to complete project list + do ii = 0, dimsizes(mp_file_list) - 1 + modproj = asciiread(mp_file_list(ii), -1, "string") + mods = modproj(:dimsizes(modproj)/2-1) + projs = modproj(dimsizes(modproj)/2:) + + overlap_index = get1Dindex(data_all&models, mods) + projects(overlap_index) = projs + delete([/modproj, mods, projs, overlap_index/]) + end do + + data_all&models@project = projects + delete(projects) + + ; Sort diagnostics in the order specified in the settings + if (isatt(diag_script_info, "diag_order")) then + l_ok = True + if (dimsizes(data_all&vars).ne. \ + dimsizes(diag_script_info@diag_order)) then + error_msg("w", DIAG_SCRIPT, "", "specified order of diagnostics " + \ + "cannot be applied, number of diagnostics does not match") + l_ok = False + end if + pid = new(dimsizes(diag_script_info@diag_order), integer) + do ii = 0, dimsizes(diag_script_info@diag_order) - 1 + tmp = ind(var_diag.eq.diag_script_info@diag_order(ii)) + if (any(ismissing(tmp)) .or. dimsizes(tmp).gt.1) then + error_msg("w", DIAG_SCRIPT, "", "specified order of diagnostics " + \ + "cannot be applied, invalid entry in diag_order") + break + end if + pid(ii) = tmp + delete(tmp) + end do + if (l_ok) then + data_all := data_all(:, pid) + alt_obs := alt_obs(pid) + end if + end if + +; ------------------------------------------------------------------------- +; ----------------- Interim Functions ------------------------------------- +; ------------------------------------------------------------------------- + + undef("get_unique_entries") + function get_unique_entries(array) + ; + ; Arguments: + ; array: 1D array + ; + ; Return value: 1D array of unique entries in array + ; + ; Modification history: + ; 20170406-A_gier_bettina: written. + local dummy_array, unique_new, new_array, nodupes + begin + dummy_array = array + do while (dimsizes(dummy_array).ne.0) + if (.not.isdefined("unique")) then + unique = dummy_array(0) + else + unique_new = array_append_record(unique, dummy_array(0), 0) + delete(unique) + unique = unique_new + delete(unique_new) + end if + nodupes = ind(dummy_array.ne.dummy_array(0)) + ; Missing value index are dim 1 and would give an error + if (dimsizes(dummy_array).eq. \ + dimsizes(ind(dummy_array.eq.dummy_array(0)))) then + break + end if + new_array = dummy_array(nodupes) + delete(nodupes) + delete(dummy_array) + dummy_array = new_array + delete(new_array) + end do + return(unique) + end +; ----------------------------------------------------------------------------- +; ---------------------------- Plotting --------------------------------------- +; ----------------------------------------------------------------------------- + + ; Calculating necessary values + ; Number of Projects needed to determine span + ; For now just CMIP projects + c_projects = str_match_ic(data_all&models@project, "CMIP") + projects = get_unique_entries(c_projects) + if (isatt(diag_script_info, "project_order")) then + project_order = diag_script_info@project_order + if (dimsizes(project_order) .eq. dimsizes(projects)) then + projects := project_order + else + error_msg("w", DIAG_SCRIPT, "", "specified order of projects " + \ + "cannot be applied, invalid number of projects") + end if + end if + n_var = dimsizes(data_all&vars) + + nr_projects = dimsizes(projects) + if (nr_projects .eq. 2) then + x_val = new(n_var, float) + do i = 0, n_var-1 + x_val(i) = 1 + i * 3 + end do + elseif (nr_projects .eq. 3) then + x_val = new(n_var, float) + do i = 0, n_var-1 + x_val(i) = 1.5 + i * 4 + end do + else + x_val = ispan(1, n_var*nr_projects, nr_projects) + end if + + ; Mean and Median of Ensemble - without alt obs + obs_ind = get1Dindex(data_all&models, alt_obs) + if all(alt_obs.eq."none") then + mod_ind = ispan(0, dimsizes(data_all&models)-1, 1) + else + ex_ind = obs_ind(ind(.not.ismissing(obs_ind))) + mods_ind = ispan(0, dimsizes(data_all&models)-1, 1) + ex_ind@_FillValue = default_fillvalue("integer") + mods_ind@_FillValue = default_fillvalue("integer") + mod_ind = get1Dindex_Collapse(mods_ind, ex_ind) + delete(mods_ind) + delete(ex_ind) + end if + ; Split by project + means = new((/nr_projects, n_var/), float) + if (diag_script_info@plot_median) then + median = new((/nr_projects, n_var/), float) + end if + + do iproj = 0, nr_projects - 1 + mod_proj_ind = ind(data_all&models@project(mod_ind).eq.projects(iproj)) + means(iproj, :) = dim_avg_n(data_all(mod_proj_ind, :), 0) + if (diag_script_info@plot_median) then + median(iproj, :) = dim_median_n(data_all(mod_proj_ind, :), 0) + end if + delete(mod_proj_ind) + end do + + ; Create outfile directory + system("mkdir -p " + config_user_info@plot_dir) + + ; Plotting preparation + name = "" + outfile = config_user_info@plot_dir + name + "patterncor." + file_type + wks = gsn_open_wks(file_type, outfile) + wks@fullname = outfile + + ; Calc limits + y_min = min(data_all) + y_min := decimalPlaces(y_min-0.05, 1, True) + x_max = max(x_val) + nr_projects * 0.5 + + ; Set half line length + l_length = 0.1 + + ; Project Colors - TODO: let them be specified in cfg + fcolors = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_cmip_line.rgb") + + res = True + res@gsnDraw = False + res@gsnFrame = False + res@tiMainString = "Pattern Correlation With Observational Reference" + res@tiMainFontHeightF = 0.01 + res@vpWidthF = 0.8 + n_square = 16. + if x_max.le. n_square then + res@vpHeightF = 0.8 + else + res@vpHeightF = 0.8*(n_square/x_max) + end if + font_height = 0.02/0.6 * res@vpHeightF + res@gsnMaximize = True + res@tiYAxisString = "Correlation" + res@trYMinF = y_min + res@trYMaxF = 1 + yspan = res@trYMaxF - res@trYMinF + res@trXMinF = 0 + res@trXMaxF = x_max +if (isatt(diag_script_info, "labels")) then + res@tmXBLabels = diag_script_info@labels +else + res@tmXBLabels = data_all&vars +end if + + res@tmXBValues = x_val + res@tmXBMode = "Explicit" + if (n_var .gt. 3) then + res@tmXBLabelFontHeightF = font_height * 0.6 + else + res@tmXBLabelFontHeightF = font_height + end if + ; Set Marker Size to be half of line_length + marker_size = res@vpWidthF * 3 * l_length / (res@trXMaxF - res@trXMinF) + + ; Resources for model lines + res_lines = True ; polyline mods desired + res_lines@gsLineDashPattern = 0. ; solid line + res_lines@gsLineThicknessF = 2.5 ; line thickness + res_lines@tfPolyDrawOrder = "PreDraw" + + ; Resources for mean lines + res_mlines = True ; polyline mods desired + res_mlines@gsLineDashPattern = 0. ; solid line + res_mlines@gsLineThicknessF = 4. ; line thicker + res_mlines@tfPolyDrawOrder = "PreDraw" + + ; Resources for obs data markers + res_circ = True + res_circ@gsMarkerIndex = 16 + res_circ@gsMarkerColor = (/120, 120, 120, 255/) / 255. + res_circ@gsMarkerSizeF = 4 * marker_size + res_circ@gsMarkerOpacityF = 0.4 + + ; Resources for white markers below median + res_circw = True + res_circw@gsMarkerIndex = 16 + res_circw@gsMarkerColor = "white" + res_circw@gsMarkerSizeF = 0.95*marker_size + res_circw@tfPolyDrawOrder = "PreDraw" + + ; Resources for median markers if required + res_circm = True + res_circm@gsMarkerIndex = 4 + res_circm@gsMarkerSizeF = marker_size + res_circm@gsMarkerThicknessF = 3. + res_circm@tfPolyDrawOrder = "Draw" + + ; Resources for legend text + res_text = True ; text mods desired + res_text@txFontHeightF = font_height ; change text size + res_text@txJust = "CenterLeft" ; text justification + + ; New x_val according to median! + x_val_proj = new((/nr_projects, n_var/), float) + ; space between projects in graph +d_proj = 1. ; offset + do iproj = 0, nr_projects - 1 + do ivar = 0, n_var - 1 + x_val_proj(iproj, ivar) = ivar*(1 + nr_projects) - 0.5 \ + + d_proj*(iproj+1.) + end do + end do + ; Start with blank plot! gs and xy marker sizes are different.. + plot = gsn_csm_blank_plot(wks, res) + + if (diag_script_info@plot_median) then + do iproj = 0, nr_projects - 1 + res_circm@gsMarkerColor = fcolors(iproj, :) + plot@$unique_string("dum_median")$ = gsn_add_polymarker( \ + wks, plot, x_val_proj(iproj, :), median(iproj, :), res_circm) + end do + end if + + ; add lines for individual models + do ivar = 0, dimsizes(data_all(0, :))-1 + do iproj = 0, dimsizes(projects)-1 + ; Skip Project if no data for it + proj_mods = ind(data_all&models@project(mod_ind).eq.projects(iproj)) + if .not. all(ismissing(data_all(proj_mods, ivar))) then + proj_center = x_val_proj(iproj, ivar) + xx = (/proj_center-l_length, proj_center+l_length/) + ; Plot lines for mean + xx_mean = (/proj_center-l_length*3., proj_center+l_length*3./) + yy_mean = (/means(iproj, ivar), means(iproj, ivar)/) + res_mlines@gsLineColor = fcolors(iproj, :) + res_lines@gsLineColor = fcolors(iproj, :) + plot@$unique_string("dum")$ = gsn_add_polyline( \ + wks, plot, xx_mean, yy_mean, res_mlines) + ymin = 1. + ymax = 0. + do imod = 0, dimsizes(data_all(:, 0)) - 1 + ; Only plot if model in right project + if data_all&models@project(imod).eq.projects(iproj) then + ; Don't plot obs as lines + if (.not.ismissing(data_all(imod, ivar))) then + if (data_all&models(imod).ne.alt_obs(ivar)) then + yy = (/data_all(imod, ivar), data_all(imod, ivar)/) + plot@$unique_string("dum")$ = gsn_add_polyline( \ + wks, plot, xx, yy, res_lines) + end if + if (yy(0) .lt. ymin) then + ymin = yy(0) + end if + if (yy(0) .gt. ymax) then + ymax = yy(0) + end if + end if + end if + end do + if (diag_script_info@plot_median) then + plot@$unique_string("dum_ci")$ = gsn_add_polymarker( \ + wks, plot, x_val_proj(iproj, ivar), median(iproj, ivar), res_circw) + end if + if (isatt(diag_script_info, "model_spread")) then + if (diag_script_info@model_spread) then + yyyy = (/ymin, ymin, ymax, ymax/) + xxxx = (/xx(0), xx(1), xx(1), xx(0)/) + res_shading = res_lines + res_shading@gsFillColor = fcolors(iproj, :) + res_shading@gsFillOpacityF = 0.2 + plot@$unique_string("dum")$ = gsn_add_polygon( \ + wks, plot, xxxx, yyyy, res_shading) + end if + end if + end if + delete(proj_mods) + end do + if (alt_obs(ivar).ne."none") then + ; Plot obs as circles + plot@$unique_string("dum_circ")$ = gsn_add_polymarker( \ + wks, plot, x_val(ivar), data_all(obs_ind(ivar), ivar), res_circ) + end if + end do + + y_min_label = res@trYMinF + 0.1*yspan + lb_stride = yspan/res@vpHeightF * font_height * 2. + plabel = projects + ; Draw Legend + do iproj = 0, dimsizes(projects)-1 + res_text@txFontColor = fcolors(iproj, :) + ; CMIP5 label has to be reduced to CMIP5 sometimes + if str_match_ind_ic(plabel, "CMIP5").eq.iproj then + plabel(iproj) = "CMIP5" + end if + plot@$unique_string("dum_l")$ = gsn_add_text(wks, plot, plabel(iproj),\ + x_val(0)-0.5, y_min_label + \ + lb_stride*(iproj+1.2), \ + res_text) + end do + res_text@txFontColor = res_circ@gsMarkerColor + plot@$unique_string("dum_l")$ = gsn_add_text(wks, plot, \ + "additional Observations", \ + x_val(0)-0.5, y_min_label, \ + res_text) + + draw(plot) + frame(wks) + ; Write output + system("mkdir -p " + config_user_info@work_dir) + workpath = config_user_info@work_dir + "pattern_cor.nc" + ncdf_outfile = ncdf_write(data_all, workpath) + + ; collect meta-data and call ESMValMD function + caption = "Centered pattern correlations between models and observations" \ + + " for the annual mean climatologies " \ + + "(similar to IPCC ch. 9 fig. 9.6)." + statistics = (/"corr", "clim"/) + domains = (/"global"/) + plottype = "other" + authors = (/"gier_bettina", "bock_lisa"/) + references = (/"flato13ipcc"/) + log_provenance(ncdf_outfile, outfile, caption, statistics, domains, \ + plottype, authors, references, prov_files) + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/ipcc_ar6/model_bias.ncl b/esmvaltool/diag_scripts/ipcc_ar6/model_bias.ncl new file mode 100644 index 0000000000..9d7c6b6f7f --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar6/model_bias.ncl @@ -0,0 +1,777 @@ +; MODEL_BIAS +; ############################################################################ +; Author: Lisa Bock (DLR, Germany), Axel Lauer (DLR, Germany) +; PROJECT-NAME IPCC-AR6 +; ############################################################################ +; Description +; Calculates the multi-model mean bias, absolute difference, relative +; difference and root mean square difference of annual mean 2-d cloud +; variables compared with a reference dataset (observations). +; +; Required diag_script_info attributes (diagnostic specific) +; none +; +; Optional diag_script_info attributes (diagnostic specific) +; plot_abs_diff: additionally also plot absolute differences (True, False) +; plot_rel_diff: additionally also plot relative differences (True, False) +; plot_rms_diff: additionally also plot root mean square differences +; (True, False) +; projection: map projection, e.g., Mollweide, Mercator +; +; Required variable_info attributes (variable specific) +; reference_dataset: name of reference datatset +; +; Optional variable_info attributes (variable specific) +; long_name: description of variable +; +; Caveats +; none +; +; Modification history +; 20190312-bock_lisa: added calculation of RMSD and IPCC AR6 color scheme +; 20190312-bock_lisa: adapted code (based on clouds/clouds_bias.ncl) +; +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/contour_maps.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + var0 = variable_info[0]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info0) + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + end if + names = metadata_att_as_array(info0, "dataset") + infiles = metadata_att_as_array(info0, "filename") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "plot_abs_diff", False) + set_default_att(diag_script_info, "plot_rms_diff", False) + set_default_att(diag_script_info, "plot_rel_diff", False) + set_default_att(diag_script_info, "stippandhatch", False) + set_default_att(diag_script_info, "projection", "Robinson") + + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + + input_dir = diag_script_info@input_files(1) + +end + +begin + ; ======================================================================== + ; ========================== initialization ============================== + ; ======================================================================== + + ; check for reference dataset definition + + if (.not.(isvar("refname"))) then + error_msg("f", DIAG_SCRIPT, "", "no reference dataset defined in recipe") + end if + + plot_abs_diff = diag_script_info@plot_abs_diff + plot_rel_diff = diag_script_info@plot_rel_diff + plot_rms_diff = diag_script_info@plot_rms_diff + + stippandhatch = diag_script_info@stippandhatch + + ; get reference dataset + + ref_ind = ind(names .eq. refname) + if (ismissing(ref_ind)) then + error_msg("f", DIAG_SCRIPT, "", "reference dataset (" \ + + refname + ") is missing") + end if + + ; get multi-model mean index + + mm_ind = ind(names .eq. "MultiModelMean") + + if (ismissing(mm_ind)) then + error_msg("f", DIAG_SCRIPT, "", "multi-model mean is missing (required)") + end if + + ; basename of diag_script + + diag_script_base = basename(DIAG_SCRIPT) + + ; ======================================================================== + ; =========================== Read data ================================== + ; ======================================================================== + + ; note: 1) masking is handled by the backend + ; 2) multi-model mean is calculated by the backend + + ; read data + + A0 = read_data(info0[mm_ind]) + mmdata = time_operations(A0, -1, -1, "average", "annualclim", True) + if (dimsizes(A0&time).gt.12) then + mm_mon = time_operations(A0, -1, -1, "average", "monthlyclim", True) + else + mm_mon = A0 + end if + delete(A0) + + A0 = read_data(info0[ref_ind]) + refdata = time_operations(A0, -1, -1, "average", "annualclim", True) + if (dimsizes(A0&time).gt.12) then + ref_mon = time_operations(A0, -1, -1, "average", "monthlyclim", True) + else + ref_mon = A0 + end if + delete(A0) + + dim = dimsizes(mmdata) + data = new((/dim_MOD-2, dim(0), dim(1)/), float) + data_mon = new((/dim_MOD-2, 12, dim(0), dim(1)/), float) + + ii = 0 + + do imod = 0, dim_MOD - 1 + + if(imod .ne. mm_ind .and. imod .ne. ref_ind) then + log_info("processing " + names(imod)) + + if (isvar("data1")) then + delete(data1) + end if + + if (isvar("A0")) then + delete(A0) + end if + + A0 = read_data(info0[imod]) + + data(ii, :, :) = time_operations(A0, -1, -1, "average", "annualclim", \ + True) + data_mon(ii, :, :, :) = time_operations(A0, -1, -1, "average", \ + "monthlyclim", True) + + ii = ii + 1 + + delete(A0) + + end if + + end do + + ; ----------------------------------------------------------- + ; read precalculated natural variability for multi-model-mean + ; ----------------------------------------------------------- + + if (stippandhatch) then + log_info("Read precalculated natural variability for multi-model") + natvar_file = input_dir + "/" + "IAV_mmm_piControl_" + \ + var0 + "_annualclim.nc" + natvar = addfile(natvar_file, "r") + natvar_mmm = natvar->iav + tmp = natvar_mmm + delete(natvar_mmm) + natvar_mmm = rm_single_dims(tmp) + delete(tmp) + end if + + ; ----------------------------------------------------------- + + ; convert units for plotting + if (var0.eq."pr") then + ; kg m-2 s-1 --> mm day-1 + mmdata = convert_units(mmdata, "mm/day") + refdata = convert_units(refdata, "mm/day") + data = convert_units(data, "mm/day") + ref_mon = convert_units(ref_mon, "mm/day") + data_mon = convert_units(data_mon, "mm/day") + elseif (var0.eq."tas") then + mmdata = convert_units(mmdata, "degC") + data = convert_units(data, "degC") + refdata = convert_units(refdata, "degC") + elseif (var0.eq."tos" .or. var0.eq."thetao") then + if (mmdata@units .eq. "K") then + mmdata = convert_units(mmdata, "degC") + data_mon = convert_units(data_mon, "degC") + end if + if (refdata@units .eq. "K") then + refdata = convert_units(refdata, "degC") + ref_mon = convert_units(ref_mon, "degC") + end if + elseif (var0.eq."sos") then + refdata = refdata * 1000. + end if + + ; ======================================================================== + ; =========================== Calculations =============================== + ; ======================================================================== + + ; differences between multi-model mean and reference data set + ; (multi-model bias) + + diff = mmdata - refdata + mmdata@diag_script = DIAG_SCRIPT + copy_VarMeta(mmdata, diff) + + if any(var0 .eq. (/"tas", "tos", "thetao"/)) then + diff@units = "~F34~0~F~ C" + diff@res_cnLevels = ispan(-6, 6, 1) + elseif (var0 .eq. "pr") then + diff@res_cnLevels = ispan(-30, 30, 5) * 0.1 + elseif any(var0 .eq. (/"sos", "so"/)) then + diff@units = "PSS-78" + diff@res_cnLevels = ispan(-30, 30, 5) * 0.1 + end if + + diff_i = new((/dim_MOD-2, dim(0), dim(1)/), float) + absdiff_i = new((/dim_MOD-2, dim(0), dim(1)/), float) + rmsdiff_i = new((/dim_MOD-2, dim(0), dim(1)/), float) + reldiff_i = new((/dim_MOD-2, dim(0), dim(1)/), float) + + if (stippandhatch .or. plot_rms_diff .or. plot_abs_diff \ + .or. plot_rel_diff) then + + ii = 0 + + do imod = 0, dim_MOD - 1 + + if(imod .ne. mm_ind .and. imod .ne. ref_ind) then + log_info("processing " + names(imod)) + + ; difference + diff_i(ii, :, :) = data(ii, :, :) - refdata + + ; absolute difference + absdiff_i(ii, :, :) = abs(diff_i(ii, :, :)) + + ; root mean square error + if (plot_rms_diff) then + rmsdiff_i(ii, :, :) = calculate_metric(data_mon(ii, :, :, :), \ + ref_mon, "RMSDxy") + end if + + ; relative difference + ; replace "epsilon" values with missing value + datam = where(abs(data) .lt. 1.e-6, data@_FillValue, data) + reldiff_i(ii, :, :) = 100.0 * (diff_i(ii, :, :) / datam(ii, :, :)) + + ii = ii + 1 + + end if + + end do + + end if + + ; multi-model mean of absolute difference between models and reference + ; data set + + absdiff = dim_avg_n_Wrap(absdiff_i, 0) + copy_VarMeta(diff, absdiff) + + rmsdiff = dim_avg_n_Wrap(rmsdiff_i, 0) + copy_VarMeta(diff, rmsdiff) + + reldiff = dim_avg_n_Wrap(reldiff_i, 0) + copy_VarMeta(diff, reldiff) + + rmsd = calculate_metric(refdata, mmdata, "RMSD") + bias = area_operations(diff, -90., 90., 0., 360., "average", True) + mean = area_operations(mmdata, -90., 90., 0., 360., "average", True) + + ; ---------------------------------------------- + ; Determine sigma and sign for significance + ; ---------------------------------------------- + if (stippandhatch) then + var_pos_signif = dim_num_n(where(diff_i .gt. 0., 1., \ + diff@_FillValue) .eq. 1., 0) + var_neg_signif = dim_num_n(where(diff_i .lt. 0., 1., \ + diff@_FillValue) .eq. 1., 0) + var_pos_signif_tmp = \ + var_pos_signif / where((var_pos_signif + var_neg_signif) \ + .ne. 0., var_pos_signif + \ + var_neg_signif, diff@_FillValue) + var_neg_signif_tmp = \ + var_neg_signif / where((var_pos_signif + var_neg_signif) \ + .ne. 0., var_pos_signif + \ + var_neg_signif, diff@_FillValue) + signif_and_sign = where(var_pos_signif_tmp .ge. 0.8 .or. \ + var_neg_signif_tmp .ge. 0.8, 1., 0.) + signif_and_sign = where(ismissing(signif_and_sign), 0.,\ + signif_and_sign) + ; variablity threshold: sqrt(2)*1.645=~2.3264 + var_thres = abs(2.3264 * natvar_mmm) + tmp := new(dimsizes(diff_i), "float") + tmp = 0 + do imod = 0, dim_MOD - 3 + tmp(imod, :, :) = where(abs(diff_i(imod, :, :)) .gt. var_thres(:, :), \ + 1, 0) + end do + var_pos_signif := dim_sum_n(tmp, 0) + delete(tmp) + var_pos_signif_tmp := var_pos_signif / (dim_MOD - 2) + sigma_and_sign = where(var_pos_signif_tmp .ge. 0.66, 1., 0.) + + to_plot_not_robust = where(sigma_and_sign .eq. 0, 1., 0.) + + to_plot_confl_sign = where(signif_and_sign .eq. 0, sigma_and_sign, 0.) + + copy_VarMeta(diff, to_plot_confl_sign) + copy_VarMeta(diff, to_plot_not_robust) + + if (plot_rms_diff) then + to_plot_not_signif_rmsd = where(rmsdiff(:, :) .lt. \ + abs(natvar_mmm), 1., 0.) + copy_VarMeta(diff, to_plot_not_signif_rmsd) + end if + end if + + ; ======================================================================== + ; ============================= plotting ================================= + ; ======================================================================== + + diff@res_gsnMaximize = True ; use full page for the plot + diff@res_cnFillOn = True ; color plot desired + diff@res_cnLineLabelsOn = False ; contour lines + diff@res_cnLinesOn = False + diff@res_tiMainOn = True + diff@res_gsnLeftStringFontHeightF = 0.015 + diff@res_gsnRightStringFontHeightF = 0.015 + diff@res_cnLevelSelectionMode = "ExplicitLevels" + diff@res_mpOutlineOn = True + if (.not.isatt(diff, "res_cnLevels")) then + diff@res_cnLevels = fspan(min(diff), max(diff), 20) + end if + diff@res_mpFillOn = False + diff@res_lbLabelBarOn = True + diff@res_gsnRightString = "" + diff@res_gsnLeftString = "" + diff@res_mpFillDrawOrder = "PostDraw" ; draw map fill last + diff@res_cnMissingValFillColor = "Gray" + diff@res_tmYLLabelsOn = False + diff@res_tmYLOn = False + diff@res_tmYRLabelsOn = False + diff@res_tmYROn = False + diff@res_tmXBLabelsOn = False + diff@res_tmXBOn = False + diff@res_tmXTLabelsOn = False + diff@res_tmXTOn = False + diff@res_cnInfoLabelOn = False ; turn off cn info label + diff@res_mpProjection = diag_script_info@projection + + if (var0 .eq. "tos" .or. var0 .eq. "sos") then + diff@res_mpCenterLonF = 180 + end if + + diff@var = var0 ; Overwrite existing entry + if (isatt(variable_info[0], "long_name")) then + diff@var_long_name = variable_info[0]@long_name + end if + diff@var_units = diff@units + + plots = new((/5/), graphic) + + ; -------------------------------------------------------------------- + ; plot contour map + + diff@res_gsnDraw = False ; Do not draw yet + diff@res_gsnFrame = False ; Don't advance frame. + diff@res_mpPerimOn = False + + diff@res_mpGeophysicalLineColor = "gray42" + diff@res_mpPerimOn = False + diff@res_mpGridLineColor = "transparent" + diff@res_mpGridAndLimbOn = True + diff@res_mpOutlineOn = True + + diff@res_lbTitleString = "(" + diff@units + ")" + diff@res_lbTitlePosition = "Bottom" + + diff@res_lbLabelFontHeightF = 0.014 + diff@res_lbTopMarginF = 0.1 + diff@res_lbTitleFontHeightF = 0.014 + + diff@res_lbBoxEndCapStyle = "TriangleBothEnds" + diff@res_lbBoxLineColor = "gray10" + + diff@res_tiMainFontHeightF = 0.016 + + diff@res_gsnRightStringParallelPosF = 0.8 + + if(isatt(diag_script_info, "bias_caption")) then + diff@res_tiMainString = diag_script_info@bias_caption + else + diff@res_tiMainString = "Multi-Model Mean Bias" + end if + + copy_VarMeta(diff, mmdata) + delete(mmdata@res_cnLevels) + + if(isatt(diag_script_info, "mean_caption")) then + mmdata@res_tiMainString = diag_script_info@mean_caption + else + mmdata@res_tiMainString = "Multi-Model Mean" + end if + + mmdata@res_gsnLeftStringFontHeightF = 0.015 + mmdata@res_gsnRightStringFontHeightF = 0.015 + mmdata@res_gsnRightString = " " + mmdata@res_gsnLeftString = " " + + if (var0 .eq. "tas") then + mmdata@res_cnLevels = ispan(-35, 35, 5) + + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_temperature_div.rgb") + mmdata@res_cnFillPalette = pal + diff@res_cnFillPalette = pal + if (plot_abs_diff) then + pal3 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_temperature_div.rgb") + end if + if (plot_rel_diff) then + pal4 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_temperature_10.rgb") + end if + if (plot_rms_diff) then + pal5 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_temperature_seq.rgb") + end if + elseif (var0.eq."pr") then + mmdata@res_cnLevels = ispan(1, 10, 1) + + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_precipitation_seq.rgb") + mmdata@res_cnFillPalette = pal + mmdata@res_lbBoxEndCapStyle = "TriangleHighEnd" + pal2 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_precipitation_div.rgb") + diff@res_cnFillPalette = pal2 + if (plot_abs_diff) then + pal3 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_precipitation_10.rgb") + end if + if (plot_rel_diff) then + pal4 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_precipitation_10.rgb") + end if + if (plot_rms_diff) then + pal5 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_precipitation_seq.rgb") + end if + elseif (var0.eq."tos" .or. var0.eq."thetao") then + mmdata@res_cnLevels = ispan(-30, 30, 5) + + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_temperature_div.rgb") + mmdata@res_cnFillPalette = pal + diff@res_cnFillPalette = pal + if (plot_abs_diff) then + pal3 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_temperature_10.rgb") + end if + if (plot_rel_diff) then + pal4 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_temperature_10.rgb") + end if + if (plot_rms_diff) then + pal5 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_temperature_seq.rgb") + end if + elseif (var0.eq."sos" .or. var0 .eq. "so") then + mmdata@res_cnLevels = ispan(26, 37, 1) + + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_misc_seq_1.rgb") + mmdata@res_cnFillPalette = pal + pal2 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_misc_div.rgb") + diff@res_cnFillPalette = pal2 + else + mmdata@res_cnLevels = fspan(min(mmdata), max(mmdata), 20) + end if + + plotsperline = (/2, 0/) + plotind = (/0, 1/) ; mmm and mean bias are always plotted + + ; absolute differences + + if (plot_abs_diff) then + copy_VarMeta(diff, absdiff) + + absdiff@res_tiMainString = "Multi Model Mean of Absolute Error" + + iadd = 2 + itmp = array_append_record(plotind, iadd, 0) + delete(plotind) + plotind = itmp + delete(itmp) + plotsperline(1) = plotsperline(1) + 1 + end if + + ; root mean square differences + + if (plot_rms_diff) then + copy_VarMeta(diff, rmsdiff) + if (isvar("pal5")) then + rmsdiff@res_cnFillPalette = pal5 + rmsdiff@res_lbBoxEndCapStyle = "TriangleHighEnd" + end if + if (var0 .eq. "tas") then + delete(rmsdiff@res_cnLevels) + rmsdiff@res_cnLevels = ispan(1, 12, 1) * 0.5 + elseif (var0 .eq. "pr") + delete(rmsdiff@res_cnLevels) + rmsdiff@res_cnLevels = ispan(1, 8, 1) * 0.5 + end if + + if(isatt(diag_script_info, "rmsd_caption")) then + rmsdiff@res_tiMainString = diag_script_info@rmsd_caption + else + rmsdiff@res_tiMainString = "Multi Model Mean of Root Mean Square Error" + end if + + iadd = 3 + itmp = array_append_record(plotind, iadd, 0) + delete(plotind) + plotind = itmp + delete(itmp) + plotsperline(1) = plotsperline(1) + 1 + end if + + ; relative differences + + if (plot_rel_diff) then + copy_VarMeta(diff, reldiff) + delete(reldiff@res_cnLevels) + reldiff@res_cnLevels = fspan(-80.0, 80.0, 9) + reldiff@res_tiMainString = "Multi-Model Mean of Relative Error" + reldiff@units = "%" + reldiff@res_lbTitleString = "(" + reldiff@units + ")" + if (isvar("pal4")) then + delete(reldiff@res_cnFillColors) + reldiff@res_cnFillColors = pal4 + end if + + iadd = 4 + itmp = array_append_record(plotind, iadd, 0) + delete(plotind) + plotind = itmp + delete(itmp) + plotsperline(1) = plotsperline(1) + 1 + end if + + ; panelling resources + + pres = True + pres@gsnPanelCenter = False + pres@gsnPanelRowSpec = True ; tell panel what order to plot + pres@gsnPanelYWhiteSpacePercent = 5 + pres@gsnPanelXWhiteSpacePercent = 5 + if (isatt(diag_script_info, "panel_num")) then + pres@gsnPanelFigureStrings = diag_script_info@panel_num + else + pres@gsnPanelFigureStrings = (/"a)", "b)", "c)", "d)"/) + end if + pres@gsnPanelFigureStringsPerimOn = False + pres@gsnPanelFigureStringsBackgroundFillColor = "transparent" + pres@gsnPanelFigureStringsJust = "TopLeft" + pres@gsnPanelFigureStringsFontHeightF = 0.016 + + if (isatt(diag_script_info, "panel_title")) then + pres@gsnPanelMainString = diag_script_info@panel_title + end if + + ; -------------------------------------------------------------------- + ; create workspace + + if (isvar("wks")) then + delete(wks) + end if + + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "model_bias_" + var0 \ + + "_annualclim_" + input_file_info[1]@project) + + plotfile = wks@fullname + + plots(0) = contour_map(wks, mmdata, var0) + plots(1) = contour_map(wks, diff, var0) + if (plot_abs_diff) then + plots(2) = contour_map(wks, absdiff, var0) + end if + if (plot_rms_diff) then + plots(3) = contour_map(wks, rmsdiff, var0) + end if + if (plot_rel_diff) then + plots(4) = contour_map(wks, reldiff, var0) + end if + + ; add significance + if (stippandhatch) then + ; resources cross-hatching + ress = True + ress@gsnDraw = False + ress@gsnFrame = False + ress@gsnAddCyclic = True + ress@cnLinesOn = False + ress@cnLineLabelsOn = False + ress@cnLevelSelectionMode = "ExplicitLevels" + ress@cnLevels = (/.3, 1.3/) + ress@cnInfoLabelOn = False + ress@tiMainOn = False + ress@cnFillOn = True + ress@cnFillMode = "AreaFill" + ress@cnMonoFillPattern = False + ress@cnFillPatterns = (/-1, 6, 6/) + ress@cnMonoFillColor = True + ress@lbLabelBarOn = False + ress@gsnLeftString = "" + ress@gsnRightString = "" + ress@cnFillDotSizeF = 0.001 + ress@cnFillScaleF = 1. ; add extra density + + ; plot cross-hatching + if (max(to_plot_confl_sign(:, :)) .gt. \ + min(to_plot_confl_sign(:, :))) then + plot1 = gsn_csm_contour(wks, to_plot_confl_sign(:, :), ress) + overlay(plots(1), plot1) + end if + + ; resources hatching + resb = True + resb@gsnDraw = False + resb@gsnFrame = False + resb@gsnAddCyclic = True + resb@cnLinesOn = False + resb@cnLineLabelsOn = False + resb@cnLevelSelectionMode = "ExplicitLevels" + resb@cnLevels = (/.3, 1.3/) + resb@cnInfoLabelOn = False + resb@tiMainOn = False + resb@cnFillOn = True + resb@cnFillMode = "AreaFill" + resb@cnRasterSmoothingOn = False + resb@cnMonoFillPattern = False + resb@cnFillPatterns = (/-1, 4, 4/) + resb@cnMonoFillColor = True + resb@lbLabelBarOn = False + resb@gsnLeftString = "" + resb@gsnRightString = "" + resb@cnFillScaleF = 0.5 + + ; plot hatching + if (max(to_plot_not_robust(:, :)) .gt. \ + min(to_plot_not_robust(:, :))) then + plot2 = gsn_csm_contour(wks, to_plot_not_robust(:, :), resb) + overlay(plots(1), plot2) + end if + + end if + + gsn_panel(wks, plots(plotind), plotsperline, pres) + + ; add meta data to plot (for reporting) + + caption = "Multi model values, from top left to bottom right: " \ + + "mean, bias" + if (plot_abs_diff) then + caption = caption + ", absolute error" + end if + if (plot_rms_diff) then + caption = caption + ", root mean square error" + end if + if (plot_rel_diff) then + caption = caption + ", relative error" + end if + + ; ########################################### + ; output to netCDF + ; ########################################### + + nc_filename = work_dir + "model_bias_" + var0 + "_annualclim_" \ + + input_file_info[1]@project + ".nc" + + nc_filename@existing = "overwrite" + + mmdata@var = var0 + "_mean" + mmdata@long_name = var0 + " (multi-model mean)" + nc_outfile = ncdf_write(mmdata, nc_filename) + + nc_filename@existing = "append" + + refdata@var = var0 + "_ref" + refdata@long_name = var0 + " (reference data)" + nc_outfile = ncdf_write(refdata, nc_filename) + + diff@var = var0 + "_bias" + diff@long_name = var0 + " (multi-model bias)" + nc_outfile = ncdf_write(diff, nc_filename) + + if (isvar("absdiff")) then + absdiff@var = var0 + "_abs_bias" + absdiff@long_name = var0 + " (multi-model absolute bias)" + nc_outfile = ncdf_write(absdiff, nc_filename) + end if + + if (isvar("rmsdiff")) then + rmsdiff@var = var0 + "_rms_bias" + rmsdiff@long_name = var0 + " (multi-model root mean square difference)" + nc_outfile = ncdf_write(rmsdiff, nc_filename) + end if + + if (isvar("reldiff")) then + reldiff@var = var0 + "_rel_bias" + reldiff@long_name = var0 + " (multi-model relative bias)" + reldiff@units = reldiff@units + nc_outfile = ncdf_write(reldiff, nc_filename) + end if + + if (stippandhatch) then + to_plot_confl_sign@diag_script = (/DIAG_SCRIPT/) + to_plot_confl_sign@var = "confl_sign" + to_plot_confl_sign@var_long_name = "more than 66% of models show change " \ + + "greater than variability threshold" \ + + " and <80% agree on sign of change" + to_plot_confl_sign@units = 1 + ncdf_outfile = ncdf_write(to_plot_confl_sign, nc_filename) + + to_plot_not_robust@diag_script = (/DIAG_SCRIPT/) + to_plot_not_robust@var = "not_robust" + to_plot_not_robust@var_long_name = "more than 66% of models show change " \ + + "greater than variability threshold" + to_plot_not_robust@units = 1 + ncdf_outfile = ncdf_write(to_plot_not_robust, nc_filename) + end if + + ; ------------------------------------------------------------------------ + ; write provenance to netcdf output and plot file(s) (mean) + ; ------------------------------------------------------------------------ + + authors = (/"bock_lisa"/) + statistics = (/"clim", "diff"/) + domain = ("global") + plottype = ("geo") + references = (/"eyring21ipcc"/) + prov_caption = caption + " for variable " + var0 \ + + ", reference = " + names(ref_ind) + "." + infiles := array_append_record(infiles, natvar_file, 0) + + log_provenance(nc_filename, plotfile, prov_caption, statistics, \ + domain, plottype, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/ipcc_ar6/percentiles.ncl b/esmvaltool/diag_scripts/ipcc_ar6/percentiles.ncl new file mode 100644 index 0000000000..449b56d3f4 --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar6/percentiles.ncl @@ -0,0 +1,59 @@ +function weighted_mean(x, w) + local x, w, y +begin + y = dim_sum_n(x*conform(x, w, 0), 0) + y = y / dim_sum_n(where(ismissing(x), w@_FillValue, conform(x, w, 0)), 0) + return(y) +end + +function weighted_percentile(x, w, t) +local x, w, xx, ww, wsum, j, k, dims, idx +begin + xx = x + dims = dimsizes(xx) + if (dimsizes(dims).eq.1) then + idx = dim_pqsort(xx, 2) + wsum = sum(where(ismissing(x), w@_FillValue, w)) + ww = 0. + do j = 0, dims(0)-1 + if (.not.ismissing(xx(j))) then + ww = ww + w(idx(j)) + if (ww/wsum.eq.t) then + y = xx(j) + break + elseif (ww/wsum.gt.t) then + if (j.eq.0) then + y = xx(j) + else + y = (xx(j) + xx(j-1))/2. + end if + break + end if + end if + end do + else + y = new(dims(1), typeof(x)) + idx = dim_pqsort_n(xx, 2, 0) + do k = 0, dims(1)-1 + wsum = sum(where(ismissing(x(:, k)), w@_FillValue, w)) + ww = 0. + do j = 0, dims(0)-1 + if (.not.ismissing(xx(j, k))) then + ww = ww + w(idx(j, k)) + if (ww/wsum.eq.t) then + y(k) = xx(j, k) + break + elseif (ww/wsum.gt.t) then + if (j .gt. 0) then + y(k) = (xx(j, k) + xx(j-1, k))/2. + else + y(k) = xx(j, k) + end if + break + end if + end if + end do + end do + end if + return(y) +end diff --git a/esmvaltool/diag_scripts/ipcc_ar6/precip_anom.ncl b/esmvaltool/diag_scripts/ipcc_ar6/precip_anom.ncl new file mode 100644 index 0000000000..15a476bd4e --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar6/precip_anom.ncl @@ -0,0 +1,702 @@ +; ############################################################################# +; PLOTS TIME SERIES OF PRECIPITATION ANOMALY +; Authors: Lisa Bock (DLR, Germany) +; ############################################################################# +; +; Description +; This script is for plotting a time series of the global and regional +; anomaly of precipitation. +; +; Required diag_script_info attributes (diagnostic specific) +; panels: list of variables plotted in each panel +; start_year: start of time coordinate +; end_year: end of time coordinate +; +; Optional diag_script_info attributes (diagnostic specific) +; anomaly: true if anomaly should be calculated +; ref_start: start year of reference period for anomalies +; ref_end: end year of reference period for anomalies +; ref_mask: if true, model fields will be masked by reference fields +; region: name of domain +; plot_units: variable unit for plotting +; header: if true, region name as header +; stat: statistics for multi model nc-file (MinMax,5-95,10-90) +; y_min: set min of y-axis +; y_max: set max of y-axis +; +; Caveats +; +; Modification history +; 20190911-bock_lisa: written +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/ensemble.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + +load "$diag_scripts/ipcc_ar6/percentiles.ncl" + + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"panels", "start_year", \ + "end_year"/)) + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "anomaly", True) + set_default_att(diag_script_info, "ref_mask", False) + set_default_att(diag_script_info, "region", "Global") + set_default_att(diag_script_info, "header", False) + set_default_att(diag_script_info, "stat", "MinMax") + + ; Create output directories + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + out_path = config_user_info@work_dir + system("mkdir -p " + out_path) + + if (diag_script_info@anomaly) then + anom = "anom" + end if + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Determine time range + start_year = diag_script_info@start_year + end_year = diag_script_info@end_year + all_years = ispan(start_year, end_year, 1) + + ; Create time coordinate + ntime = end_year - start_year + 1 + time = new(ntime, integer) + time = ispan(start_year, end_year, 1) + + ; ************************************* + ; General settings for the plot + ; ************************************* + + ; Define workstation + outfile = config_user_info@plot_dir + "precip_" + anom + "_" \ + + start_year + "-" + end_year + wks = gsn_open_wks(file_type, outfile) + + ; Set resources + res = True + res@gsnDraw = False + res@gsnFrame = False + + res@vpXF = 0.05 + res@vpYF = 0.7 + res@vpHeightF = 0.4 + res@vpWidthF = 0.7 + res@pmLegendDisplayMode = "Never" + res@tmYLLabelFontHeightF = 0.03 + res@tmXBLabelFontHeightF = 0.03 + res@tiXAxisFontHeightF = 0.018 + res@tiYAxisFontHeightF = 0.018 + + xmin = start_year + xmax = end_year + res@trXMinF = xmin + res@trXMaxF = xmax + res@tmXBMode = "Explicit" + if (xmax - xmin.gt.20) then + res@tmXBValues = ispan(xmin, xmax, 10) + res@tmXBLabels = ispan(xmin, xmax, 10) + res@tmXBMinorValues = ispan(xmin, xmax, 5) + else + res@tmXBValues = ispan(xmin, xmax, 5) + res@tmXBLabels = ispan(xmin, xmax, 5) + res@tmXBMinorValues = ispan(xmin, xmax, 1) + end if + + res@tmXBMode = "Manual" + res@tmXBTickSpacingF = 10 + + if (isatt(diag_script_info, "y_min")) then + res@trYMinF = diag_script_info@y_min + end if + if (isatt(diag_script_info, "y_max")) then + res@trYMaxF = diag_script_info@y_max + end if + res@tiYAxisOn = True + res@tiXAxisString = "" + + resb = True + resb@vpWidthF = 0.2 + resb@trXMinF = -0.8 + resb@trXMaxF = 2.5 + resb@trYMinF = -0.002 + resb@trYMaxF = 0.003 + resb@tmXTOn = False + resb@tmXBOn = False + resb@tmYRBorderOn = True + resb@tmXTBorderOn = True + resb@tmYROn = True + resb@tmYUseLeft = False + resb@tmYRLabelsOn = True + resb@tmYRLabelFontHeightF = 0.03 + resb@tiYAxisOn = True + resb@tiYAxisSide = "Right" + resb@tiYAxisAngleF = 90. + resb@tiYAxisFontHeightF = 0.018 + + llres = True + llres@gsLineThicknessF = 2.5 + + cmap = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_damip_line.rgb") + opti = True + opti@boxColors = (/"peru", "darkgreen"/) + + ; ------------------------------------------------------------------ + ; Panel settings + ; ------------------------------------------------------------------ + + nvar = ListCount(variable_info) + + panels = diag_script_info@panels + tmp := dimsizes(panels) + if(dimsizes(tmp).gt.1) then + npanels = tmp(0) + nexp = tmp(1) + else + npanels = 1 + nexp = tmp(0) + end if + delete(tmp) + plots = new((/npanels/), graphic) + + ; ------------------------------------------------------------------ + ; Loop over panels + ; ------------------------------------------------------------------ + + do ivar = 0, npanels-1 + + ; create array for statistics + model_arr_stat = new((/3, 3, ntime/), double) + model_arr_stat!0 = "exp" + model_arr_stat!1 = "stat" + model_arr_stat!2 = "time" + model_arr_stat&exp = (/"exp1", "exp2", "ref"/) + model_arr_stat&time = time + model_arr_stat&stat = (/"mean", "min", "max"/) + + model_arr_ref = new((/3, ntime/), double) + model_arr_ref!0 = "ref" + model_arr_ref!1 = "time" + model_arr_ref&ref = (/"ref1", "ref2", "ref3"/) + + model_arr_box = new((/2, 5/), double) + model_arr_box!0 = "exp" + model_arr_box!1 = "stat" + model_arr_box&exp = (/"exp1", "exp2"/) + + do iexp = 0, 1 + + if(npanels .gt. 1) then + var = panels(ivar, iexp) + else + var = panels(iexp) + end if + + log_info("processing " + var) + + ; Retrieve metadata item + atts = True + atts@variable_group = var + info_items = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + var0 = info_items[0]@short_name + exp0 = info_items[0]@exp + datasetnames := metadata_att_as_array(info_items, "dataset") + ensembles := metadata_att_as_array(info_items, "ensemble") + dim_MOD = ListCount(info_items) + + ; get multi-model mean index + mmm_ind = ind(datasetnames .eq. "MultiModelMean") + + ; Set index of the reference dataset and read it + if (isatt(info_items[0], "reference_dataset")) then + ref_ind = ind(datasetnames.eq.info_items[0]@reference_dataset) + A_ref = read_data(info_items[ref_ind]) + end if + + ; Anomaly + if (isdefined(anom)) then + if (isatt(diag_script_info, "ref_start") .and. \ + isatt(diag_script_info, "ref_end")) then + ref_start = diag_script_info@ref_start + ref_end = diag_script_info@ref_end + if (ref_start.lt.start_year) then + ref_start = start_year + end if + if ((ref_start.lt.start_year) .or. (ref_end.gt.end_year) .or. \ + (ref_end.lt.ref_start)) then + error_msg("f", DIAG_SCRIPT, "", \ + "period for reference years is not properly defined") + end if + else + error_msg("f", DIAG_SCRIPT, "", \ + "period for reference years is not defined " + \ + "(needed for anomaly)") + end if + end if + + ; Create model array + model_arr = new((/dim_MOD, ntime/), double) + model_arr!0 = "model" + model_arr!1 = "time" + model_arr&model = datasetnames + model_arr&time = time + model_arr@_FillValue = 1e+20 + model_arr = model_arr@_FillValue + + model_arr_trend := new(dim_MOD, double) + model_arr_trend!0 = "model" + model_arr_trend&model = datasetnames + model_arr_trend@_FillValue = 1e+20 + model_arr_trend = model_arr_trend@_FillValue + + ; Loop over models + do imod = 0, dim_MOD - 1 + + log_info("Process dataset: " + datasetnames(imod)) + + ; Read data + A0 = read_data(info_items[imod]) + dnames = getVarDimNames(A0) + + ; Convert units for plotting (if required) + if (isatt(diag_script_info, "plot_units")) then + A0 = convert_units(A0, diag_script_info@plot_units) + end if + + ; masking with reference dataset + if (isatt(info_items[0], "reference_dataset")) then + if (diag_script_info@ref_mask .and. imod .ne. ref_ind) then + ; Determine start/end year + start_year = info_items[imod]@start_year + end_year = info_items[imod]@end_year + A_ref_mask = time_operations(A_ref, start_year, \ + end_year, "extract", "", 0) + A0 = where(A_ref_mask.eq.A_ref_mask@_FillValue, \ + A_ref_mask@_FillValue, A0) + delete(A_ref_mask) + end if + end if + + ; Anomaly + if (anom .eq. "anom") then + if (ref_start.lt.start_year) then + ref_start = start_year + end if + tmp = time_operations(A0, ref_start, ref_end, "extract", "", 0) + A0_monavg_ref = time_operations(tmp, ref_start, ref_end, "average", \ + "monthlyclim", True) + delete(tmp) + + ; calculate anomaly for each grid point + do i = 0, dimsizes(A0&time) - 1 + A0(i, :, :) = A0(i, :, :) - A0_monavg_ref(mod(i, 12), :, :) + end do + + delete(A0_monavg_ref) + + end if + + ; Calculate time average of dataset + A0_timavg = time_operations(A0, -1, -1, "average", \ + "yearly", True) + delete(A0) + + ; calculate global mean of complete time series + procmod = area_operations(A0_timavg, -90., 90., 0., 360., \ + "average", True) + delete(A0_timavg) + + ; Match time coordinate + date = procmod&year + idx1 = ind(date(0).eq.model_arr&time) + idx2 = ind(date(dimsizes(date) - 1).eq.model_arr&time) + model_arr(imod, idx1:idx2) = (/procmod/) + if (imod.eq.0) then + copy_VarAtts(procmod, model_arr) + end if + delete(procmod) + delete(date) + + ; calculate trend + tmp = dtrend_msg_n(time, model_arr(imod, :), True, True, 0) + model_arr_trend(imod) = tmp@slope + delete(tmp) + + end do + + ; ************************************* + ; calculate means over ensemble members + ; ************************************* + + ndim := dimsizes(datasetnames) + ensemble_assign := new(ndim, integer) + ensemble_size := new(ndim, integer) + + enum = 1 + temp = datasetnames(0) + do gg = 0, ndim - 1 + ; does the model name match what's in temp? + if (temp.eq.datasetnames(gg)) then + ensemble_assign(gg) = enum ; if so, assign it the same number + else + enum = enum + 1 ; if not, assign it the next number + ensemble_assign(gg) = enum + temp = datasetnames(gg) + end if + end do + ensemble_assign@model = datasetnames + delete(temp) + ensemble_assign@models = str_join(datasetnames+"/"+ensembles, ", ") + + model_arr_em := model_arr(:max(ensemble_assign)-1, :) + model_arr_em = model_arr_em@_FillValue + model_arr_trend_em := model_arr_trend(:max(ensemble_assign)-1) + model_arr_trend_em = model_arr_em@_FillValue + datasetnames_em := datasetnames(:max(ensemble_assign)-1) + + do gg = 1, max(ensemble_assign) ; calculate ensemble means + wind := ind(ensemble_assign.eq.gg) + ensemble_size(wind) = dimsizes(wind) + if (dimsizes(wind).eq.1) then + model_arr_em(gg-1, :) = (/ model_arr(wind, :) /) + model_arr_trend_em(gg-1) = (/ model_arr_trend(wind) /) + datasetnames_em(gg-1) = datasetnames(wind) + else + model_arr_em(gg-1, :) = (/ dim_avg_n(model_arr(wind, :), 0) /) + model_arr_trend_em(gg-1) = (/ dim_avg_n(model_arr_trend(wind), 0) /) + datasetnames_em(gg-1) = datasetnames(wind(0)) + end if + end do + + ; -------------------------------------------------------------------- + ; calculate multi model statistics + ; -------------------------------------------------------------------- + + if (isatt(info_items[0], "reference_dataset")) then + ind_mod := ind(datasetnames_em.ne."MultiModelMean" .and. \ + datasetnames_em.ne.info_items[0]@reference_dataset \ + .and. datasetnames_em.ne."GHCN" .and. \ + datasetnames_em.ne."GPCP-SG" .and. \ + datasetnames_em.ne."CRU") + else + ind_mod := ind(datasetnames_em.ne."MultiModelMean" .and. \ + datasetnames_em.ne."GHCN" .and. \ + datasetnames_em.ne."GPCP-SG" .and. \ + datasetnames_em.ne."CRU") + end if + + model_arr_stat&exp(iexp) = var + weight := new(dimsizes(ind_mod), float) + weight = 1 + weight@_FillValue = 1e+20 + model_arr_stat(iexp, 0, :) = (/weighted_mean(model_arr_em(ind_mod, :), \ + weight)/) + model_arr_stat(iexp, 1, :) = \ + (/weighted_percentile(model_arr_em(ind_mod, :), weight, 0.05)/) + model_arr_stat(iexp, 2, :) = \ + (/weighted_percentile(model_arr_em(ind_mod, :), weight, 0.95)/) + + if (iexp .eq. 1 .and. isatt(info_items[0], "reference_dataset")) then + ind_ref = new(3, integer) + ind_ref(0) = ind(datasetnames_em .eq. "GHCN") + ind_ref(1) = ind(datasetnames_em .eq. "GPCP-SG") + ind_ref(2) = ind(datasetnames_em .eq. "CRU") + model_arr_ref = (/model_arr_em(ind_ref, :)/) + end if + + delete(model_arr) + delete(A_ref) + + ; -------------------------------------------------------------------- + ; Low pass filter + ; -------------------------------------------------------------------- + + ; create filter + nwt = 9 ; number of weights + fca = 0.2 + nsigma = 1. + wgt = filwgts_lanczos(nwt, 0, fca, 0, nsigma) + + ; apply filter + model_arr_stat(iexp, 0, :) = \ + wgt_runave_n_Wrap(model_arr_stat(iexp, 0, :), wgt, 1, 0) + model_arr_stat(iexp, 1, :) = \ + wgt_runave_n_Wrap(model_arr_stat(iexp, 1, :), wgt, 1, 0) + model_arr_stat(iexp, 2, :) = \ + wgt_runave_n_Wrap(model_arr_stat(iexp, 2, :), wgt, 1, 0) + if (iexp .eq. 0 .and. isatt(info_items[0], "reference_dataset")) then + model_arr_ref(0, :) = wgt_runave_n_Wrap(model_arr_ref(0, :), wgt, 1, 0) + model_arr_ref(1, :) = wgt_runave_n_Wrap(model_arr_ref(1, :), wgt, 1, 0) + model_arr_ref(2, :) = wgt_runave_n_Wrap(model_arr_ref(2, :), wgt, 1, 0) + + model_arr_stat(2, :, :) = (/model_arr_ref(:, :)/) + end if + + ; -------------------------------------------------------------------- + ; calculate statistics for boxplot of trends + ; -------------------------------------------------------------------- + + model_arr_box&exp(iexp) = var + + weight := new(dimsizes(ind_mod), float) + weight = 1 + weight@_FillValue = 1e+20 + + model_arr_box(iexp, 2) = \ + weighted_mean(model_arr_trend_em(ind_mod), weight) + model_arr_box(iexp, 0) = \ + weighted_percentile(model_arr_trend_em(ind_mod), weight, 0.05) + model_arr_box(iexp, 1) = \ + weighted_percentile(model_arr_trend_em(ind_mod), weight, 0.25) + model_arr_box(iexp, 3) = \ + weighted_percentile(model_arr_trend_em(ind_mod), weight, 0.75) + model_arr_box(iexp, 4) = \ + weighted_percentile(model_arr_trend_em(ind_mod), weight, 0.95) + + end do + + model_arr_stat(2, :, :) = (/model_arr_ref(:, :)/) + + ; ************************************* + ; output to NetCDF + ; ************************************* + + out_path1 = out_path + "precip_" + anom + "_" + ivar + ".nc" + model_arr_stat@ncdf = out_path + model_arr_stat@diag_script = DIAG_SCRIPT + model_arr_stat@var = var0 + ncdf_outfile = ncdf_write(model_arr_stat, out_path1) + + ; ************************************* + ; Plotting + ; ************************************* + + cmap = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_damip_line.rgb") + + res@xyDashPatterns := (/0, 0/) + res@xyLineThicknesses := (/3, 3/) + res@xyLineColors := cmap(:, :) + + ymax = max((/max(model_arr_stat), max(model_arr_ref)/)) + ymin = min((/min(model_arr_stat), min(model_arr_ref)/)) + res@trYMaxF := max((/0.2, ymax + 0.05 * (ymax - ymin)/)) + res@trYMinF := min((/-0.2, ymin - 0.05 * (ymax - ymin)/)) + + if (isatt(diag_script_info, "header")) then + res@tiMainString = diag_script_info@header(ivar) + else + res@tiMainString = "" + end if + + if (isatt(diag_script_info, "plot_units")) then + units = diag_script_info@plot_units + else + units = info_items[0]@units + end if + + varname = "Precipitation" + + if (anom .eq. "anom") then + res@tiYAxisString = varname + " Anomaly" + " (" + units + ")" + else + res@tiYAxisString = varname + " (" + units + ")" + end if + + plots(ivar) = gsn_csm_xy(wks, time, model_arr_stat(0:1, 0, :), res) + + res@pmLegendDisplayMode = "Never" + + ; --------------------------------------------------------------------- + ; Add multi model statistics + ; --------------------------------------------------------------------- + res_stat = True + res_stat@gsnXYFillOpacities = 0.2 + cmap = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_damip_shading.rgb") + copy_VarMeta(res, res_stat) + delete(res_stat@xyLineColors) + res_stat@gsnXYFillColors = cmap(0, :) + res_stat@xyLineColor := cmap(0, :) + res_stat@xyLineThicknesses := (/1, 1/) + shading_plot = gsn_csm_xy(wks, time, model_arr_stat(0, 1:2, :), \ + res_stat) + overlay(plots(ivar), shading_plot) + + res_stat@gsnXYFillColors = cmap(1, :) + res_stat@xyLineColor = cmap(1, :) + shading_plot = gsn_csm_xy(wks, time, model_arr_stat(1, 1:2, :), \ + res_stat) + overlay(plots(ivar), shading_plot) + + ; --------------------------------------------------------------------- + ; add reference datasets + ; --------------------------------------------------------------------- + res@xyDashPatterns := (/0, 1, 2, 3/) + res@xyLineThicknesses := (/3, 3, 3, 3/) + res@xyLineColors := "black" ; change line color + + ref_p = gsn_csm_xy(wks, time, model_arr_ref, res) ; create plot + overlay(plots(ivar), ref_p) + + ; --------------------------------------------------------------------- + ; Create legend + ; --------------------------------------------------------------------- + if (ivar .eq. 0) then + res_text = True ; text mods desired + res_text@txFontHeightF = 0.018 ; change text size + res_text@txJust = "CenterLeft" ; text justification + + res_lines = True ; polyline mods desired + res_lines@gsLineDashPattern = 0. ; solid line + res_lines@gsLineThicknessF = 4. ; line thicker + cmap = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_damip_line.rgb") + res_lines@gsLineColor = cmap(0, :) + xx = (/1975, 1981/) + yy = (/-.150, -.15/) + dum1 = gsn_add_polyline(wks, plots(ivar), xx, yy, res_lines) + dum2 = gsn_add_text(wks, plots(ivar), "Anthropogenic + Natural",\ + 1983, -.150, res_text) + + yy = (/-.18, -.18/) + res_lines@gsLineColor = cmap(1, :) + dum3 = gsn_add_polyline(wks, plots(ivar), xx, yy, res_lines) + dum4 = gsn_add_text(wks, plots(ivar), "Natural", 1983, -.18, res_text) + + xx = (/1952, 1960/) + yy = (/-.12, -.12/) + res_lines@gsLineColor := "black" ; change to black + res_lines@gsLineDashPattern := 0 + dum5 = gsn_add_polyline(wks, plots(ivar), xx, yy, res_lines) + dum6 = gsn_add_text(wks, plots(ivar), datasetnames(ind_ref(0)), 1962, \ + -.12, res_text) + yy = (/-.15, -.15/) + res_lines@gsLineDashPattern := 1 + dum7 = gsn_add_polyline(wks, plots(ivar), xx, yy, res_lines) + dum8 = gsn_add_text(wks, plots(ivar), datasetnames(ind_ref(1)), 1962, \ + -.15, res_text) + yy = (/-.18, -.18/) + res_lines@gsLineDashPattern := 2 + dum9 = gsn_add_polyline(wks, plots(ivar), xx, yy, res_lines) + dum10 = gsn_add_text(wks, plots(ivar), datasetnames(ind_ref(2)), \ + 1962, -.18, res_text) + end if + + ; --------------------------------------------------------------------- + ; Add inset with trend boxplot + ; --------------------------------------------------------------------- + + resb@tiYAxisString = "Precipitation Trend (" + units + "/yr)" + + box = boxplot(wks, (/0, 2/), model_arr_box, opti, resb, llres) + + getvalues box + "trXMinF" : bxmin + "trYMinF" : bymin + "trXMaxF" : bxmax + "trYMaxF" : bymax + end getvalues + + xbox1 = (/bxmin, bxmax, bxmax, bxmin, bxmin/) + ybox1 = (/bymin, bymin, bymax, bymax, bymin/) + + gnres = True + gnres@gsLineColor = "black" + gnres@gsLineThicknessF = 2.0 + gnres@gsFillColor = "Gray70" + gnres@gsFillOpacityF = 0.1 + gnres@tfPolyDrawOrder = "PreDraw" + plots@$unique_string("box")$ = gsn_add_polygon(wks, box, xbox1, ybox1, \ + gnres) + + ; add reference dataset + gnres@gsLineThicknessF = 4.0 + gnres@gsLineDashPattern := 0 + plots@$unique_string("box")$ = \ + gsn_add_polyline(wks, box, (/bxmin, bxmax/), \ + (/model_arr_trend(ind_ref(0)), \ + model_arr_trend(ind_ref(0))/), gnres) + gnres@gsLineDashPattern := 1 + plots@$unique_string("box")$ = \ + gsn_add_polyline(wks, box, (/bxmin, bxmax/), \ + (/model_arr_trend(ind_ref(1)), \ + model_arr_trend(ind_ref(1))/), gnres) + gnres@gsLineDashPattern := 2 + plots@$unique_string("box")$ = \ + gsn_add_polyline(wks, box, (/bxmin, bxmax/), \ + (/model_arr_trend(ind_ref(2)), \ + model_arr_trend(ind_ref(2))/), gnres) + + newplot = gsn_attach_plots(plots(ivar), box, res, resb) + + end do + + ; Plotting panels + + pres = True + + pres@gsnPanelMainString = "Precipitation Anomaly" + + pres@gsnPanelRowSpec = True + pres@gsnPanelCenter = False + + pres@gsnPanelFigureStrings = (/"a)", "c)", "d)", "e)", "f)"/) + pres@gsnPanelFigureStringsPerimOn = False + pres@gsnPanelFigureStringsBackgroundFillColor = "transparent" + pres@gsnPanelFigureStringsJust = "TopLeft" + pres@gsnPanelFigureStringsFontHeightF = 0.016 + + gsn_panel(wks, plots, (/1, 2, 2/), pres) + + log_info(" wrote " + outfile + "." + file_type) + + ; *************************************** + ; add meta data to plot (for reporting) + ; *************************************** + + out_path1 = out_path + "precip_" + anom + "_0" + ".nc" + + domain = "reg" + statistics = (/"anomaly", "trend"/) + caption = "Global and zonal average annual mean precipitation (mm day-1)" + contrib_authors = (/"bock_lisa"/) + + ; Call provenance logger + log_provenance(out_path1, \ + outfile + "." + file_type, \ + caption, \ + statistics, \ + domain, \ + "times", \ + contrib_authors, \ + (/"eyring21ipcc"/), \ + metadata_att_as_array(input_file_info, "filename")) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/ipcc_ar6/tas_anom.ncl b/esmvaltool/diag_scripts/ipcc_ar6/tas_anom.ncl new file mode 100644 index 0000000000..f052b80ae0 --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar6/tas_anom.ncl @@ -0,0 +1,683 @@ +; ############################################################################# +; PLOTS TIME SERIES OF NEAR-SURFACE AIR TEMPERATURE ANOMALY +; Authors: Lisa Bock (DLR, Germany) +; ############################################################################# +; +; Description +; This script is for plotting a time series of the global mean (anomaly) +; surface temperature (GMST) +; +; Required diag_script_info attributes (diagnostic specific) +; styleset: as in diag_scripts/shared/plot/style.ncl functions +; +; Optional diag_script_info attributes (diagnostic specific) +; blending: if true, calculates blended surface temperature +; ref_start: start year of reference period for anomalies +; ref_end: end year of reference period for anomalies +; ref_value: if true, right panel with mean values is attached +; ref_mask: if true, model fields will be masked by reference fields +; region: name of domain +; plot_units: variable unit for plotting +; y-min: set min of y-axis +; y-max: set max of y-axis +; header: if true, region name as header +; volcanoes: if true, adds volcanoes to the plot +; write_stat: if true, write multi model statistics in nc-file +; +; Caveats +; +; Modification history +; 20191030-bock_lisa: written. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/ensemble.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + +load "$diag_scripts/ipcc_ar6/percentiles.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Check for required settings + exit_if_missing_atts(diag_script_info, "styleset") + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "blending", False) + set_default_att(diag_script_info, "ref_value", True) + set_default_att(diag_script_info, "ref_mask", False) + set_default_att(diag_script_info, "region", "Global") + set_default_att(diag_script_info, "header", False) + set_default_att(diag_script_info, "volcanoes", False) + set_default_att(diag_script_info, "write_stat", False) + + var = metadata_att_as_array(variable_info, "short_name") + + info_tas = select_metadata_by_name(input_file_info, "tas") + project0 = info_tas[0]@project + exp0 = info_tas[0]@exp + + datasetnames = metadata_att_as_array(info_tas, "dataset") + ensembles = metadata_att_as_array(info_tas, "ensemble") + dim_MOD = ListCount(info_tas) + + if (any(var .eq. "tasa")) then + info_tasa = select_metadata_by_name(input_file_info, "tasa") + end if + + if (diag_script_info@blending) then + info_tos = select_metadata_by_name(input_file_info, "tos") + if (project0 .eq. "CMIP6") then + info_sic = select_metadata_by_name(input_file_info, "siconc") + else + info_sic = select_metadata_by_name(input_file_info, "sic") + end if + info_sftlf = select_metadata_by_name(input_file_info, "sftlf") + end if + + ; Create output directories + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + out_path = config_user_info@work_dir + system("mkdir -p " + out_path) + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Determine time range + start_year = min(metadata_att_as_array(info_tas, "start_year")) + end_year = max(metadata_att_as_array(info_tas, "end_year")) + all_years = ispan(start_year, end_year, 1) + + ; Create time coordinate + ntime = end_year - start_year + 1 + time = new(ntime, integer) + time = ispan(start_year, end_year, 1) + + anom_ref = new((/dim_MOD/), double) + anom_ref!0 = "model" + anom_ref&model = datasetnames + + ref_start = diag_script_info@ref_start + ref_end = diag_script_info@ref_end + + ; Set index of the reference dataset and read it + if (isvar("info_tasa")) then + ref_ind = -1 + ref_dataset = metadata_att_as_array(info_tasa, "dataset") + datasetnames := array_append_record(datasetnames, ref_dataset, 0) + ensembles := array_append_record(ensembles, "1", 0) + anom_ref := array_append_record(anom_ref, anom_ref@_FillValue, 0) + anom_ref&model = datasetnames + ListAppend(info_tas, info_tasa[0]) + tas_ref = read_data(info_tasa[0]) + ndim = dimsizes(tas_ref) + if (ref_start .ne. 1961 .or. ref_end .ne. 1990) then + tmp = time_operations(tas_ref, ref_start, ref_end, "average", \ + "monthlyclim", True) + do i = 0, ndim(0) - 1 + tas_ref(i, :, :) = tas_ref(i, :, :) - tmp(mod(i, 12), :, :) + end do + delete(tmp) + end if + delete(ndim) + ; calculate field mean + gmst_mon := area_operations(tas_ref, -90., 90., \ + 0., 360., "average", True) + ; yearly average + diag_ref := time_operations(gmst_mon, -1, -1, "average", \ + "yearly", True) + elseif (isatt(info_tas[0], "reference_dataset")) + ref_dataset = metadata_att_as_array(info_tas, "reference_dataset") + ref_ind = ind(datasetnames.eq.ref_dataset(0)) + tas_ref = read_data(info_tas[ref_ind]) + else + error_msg("f", DIAG_SCRIPT, "", \ + "no reference dataset defined") + end if + + ; Create model array + if (ref_ind .ge. 0) then + model_arr = new((/dim_MOD, ntime/), double) + else + model_arr = new((/dim_MOD+1, ntime/), double) + end if + model_arr!0 = "model" + model_arr!1 = "time" + model_arr&model = datasetnames + model_arr&time = time + model_arr@_FillValue = 1e+20 + model_arr = model_arr@_FillValue + model_arr@units = "degC" + model_arr@ref = ref_dataset + delete(time) + + if (ref_ind .ge. 0) then + model_fullspan = new(dim_MOD, logical) + else + model_fullspan = new(dim_MOD+1, logical) + end if + model_fullspan = 1 + + ; get multi-model mean index + mmm_ind = ind(datasetnames .eq. "MultiModelMean") + + if (ref_ind .eq. -1) then + model_arr(dim_MOD, :) = (/diag_ref/) + end if + + model_arr_tas = model_arr + + ; Loop over models + do imod = 0, dim_MOD - 1 + + log_info("Process dataset: " + datasetnames(imod)) + + ; Read data + tas0 := read_data(info_tas[imod]) + dnames = getVarDimNames(tas0) + ndim = dimsizes(tas0) + + ; Convert units from K to degC + if (info_tas[imod]@units .eq. "K") then + tas0 = convert_units(tas0, "degC") + end if + + if (diag_script_info@blending .and. \ + all(imod .ne. (/ref_ind, mmm_ind/))) then + + ; Read data + tos0 := read_data(info_tos[imod]) + sic0 := read_data(info_sic[imod]) + sftlf0 := read_data(info_sftlf[imod]) + ndim_sftlf = dimsizes(sftlf0) + + if (info_tos[imod]@units .eq. "K") then + tos0 = convert_units(tos0, "degC") + end if + + ; force missing cells to open water/land and scale if stored as + ; percentage + sic0 = where(sic0.lt.0.0, 0.0, sic0) + sic0 = where(sic0.gt.100.0, 0.0, sic0) + if (max(sic0) .gt. 90.0) then + sic0 = 0.01 * sic0 + end if + + sftof0 = 1. - sftlf0 + sftof0 = where(sftof0.lt.0.0, 0.0, sftof0) + sftof0 = where(sftof0.gt.100.0, 0.0, sftof0) + if (max(sftof0) .gt. 90.0) then + sftof0 = 0.01 * sftof0 + end if + + ; combine land/ice masks + ndim_sic = dimsizes(sic0) + do m = 0, ndim_sic(0) - 1 + sic0(m, :, :) = (1.0 - sic0(m, :, :)) * sftof0(:, :) + end do + + ; deal with missing tos through sic + sic0 = where(tos0 .lt. -500.00, 0.0, sic0) + sic0 = where(tos0 .gt. 500.00, 0.0, sic0) + sic0 = where(tos0 .eq. tos0@_FillValue, 0.0, sic0) + + ; prepare missing + tos0 = where(abs(tos0) .gt. 500.0, tos0@_FillValue, tos0) + + end if + + ; calculate tas anomaly for each grid point + ; Determine start/end year + start_year = info_tas[imod]@start_year + end_year = info_tas[imod]@end_year + if (start_year .gt. diag_script_info@ref_start) then + ref_start = start_year + else + ref_start = diag_script_info@ref_start + end if + + base_mon = time_operations(tas0, 1961, 1990, "average", \ + "monthlyclim", True) + + tas0_anom := tas0 + do i = 0, ndim(0) - 1 + tas0_anom(i, :, :) = tas0(i, :, :) - base_mon(mod(i, 12), :, :) + end do + + if (diag_script_info@blending .and. \ + all(imod .ne. (/ref_ind, mmm_ind/))) then + + ; calculate tos anomaly for each grid point + base_mon = time_operations(tos0, 1961, 1990, "average", \ + "monthlyclim", True) + + tos0_anom := tos0 + do i = 0, ndim(0) - 1 + tos0_anom(i, :, :) = tos0(i, :, :) - base_mon(mod(i, 12), :, :) + end do + + ; blend + tas0 = tas0 * (1.0 - sic0) + tos0 * sic0 + tos0_anom = tas0_anom * (1.0 - sic0) + tos0_anom * sic0 + + ; deal with any remaining NaNs + tos0 = where(tos0.eq.tos0@_FillValue, tas0, tos0) + tos0_anom = where(tos0_anom.eq.tos0@_FillValue, tas0_anom, tos0_anom) + + end if + + ; Determine start/end year + start_year = info_tas[imod]@start_year + end_year = info_tas[imod]@end_year + + ; masking anomaly with reference dataset + if (diag_script_info@ref_mask) then + tas_ref_mask := time_operations(tas_ref, start_year, \ + end_year, "extract", "", 0) + tas0_anom = where(tas_ref_mask.eq.tas_ref_mask@_FillValue, \ + tas_ref_mask@_FillValue, tas0_anom) + end if + + ; calculate field mean + tas_mon := area_operations(tas0, -90., 90., \ + 0., 360., "average", True) + tas_mon_anom := area_operations(tas0_anom, -90., 90., \ + 0., 360., "average", True) + + ; yearly average + diag_tas := time_operations(tas_mon_anom, -1, -1, "average", \ + "yearly", True) + + if (ref_start .ne. 1961 .or. ref_end .ne. 1990) then + ref_diag_tas = avg(diag_tas(ref_start-start_year:ref_end-start_year)) + diag_tas = diag_tas - ref_diag_tas + ref_diag_tas := time_operations(tas_mon, ref_start, ref_end, "average", \ + "annualclim", True) + else + ref_diag_tas = time_operations(tas_mon, 1961, 1990, "average", \ + "annualclim", True) + end if + + ; save mean ref value + anom_ref(imod) = ref_diag_tas + + procmod := diag_tas + + if (diag_script_info@blending .and. \ + all(imod .ne. (/ref_ind, mmm_ind/))) then + + if (diag_script_info@ref_mask) then + ; masking with reference dataset + tos0_anom = where(tas_ref_mask.eq.tas_ref_mask@_FillValue, \ + tas_ref_mask@_FillValue, tos0_anom) + end if + + ; calculate field mean + gmst_mon := area_operations(tos0, -90., 90., \ + 0., 360., "average", True) + gmst_mon_anom := area_operations(tos0_anom, -90., 90., \ + 0., 360., "average", True) + + ; yearly average + diag := time_operations(gmst_mon_anom, -1, -1, "average", \ + "yearly", True) + + if (ref_start .ne. 1961 .or. ref_end .ne. 1990) then + ref_diag = avg(diag(ref_start-start_year:ref_end-start_year)) + diag = diag - ref_diag + ref_diag := time_operations(gmst_mon, ref_start, ref_end, "average", \ + "annualclim", True) + else + ref_diag = time_operations(gmst_mon, 1961, 1990, "average", \ + "annualclim", True) + end if + + ; save mean ref value + anom_ref(imod) = ref_diag + + procmod := diag + + end if + + ; Match time coordinate + date = procmod&year + idx1 = ind(date(0).eq.model_arr&time) + idx2 = ind(date(dimsizes(date) - 1).eq.model_arr&time) + if (idx1 .ne. 0) then + model_fullspan(imod) = 0 + end if + if (idx2 .ne. (ntime - 1)) then + model_fullspan(imod) = 0 + end if + model_arr(imod, idx1:idx2) = (/procmod/) + + delete(procmod) + delete(date) + + end do + + ; ************************************* + ; calculate means over ensemble members + ; ************************************* + + ndim := dimsizes(datasetnames) + ensemble_assign = new(ndim, integer) + ensemble_size = new(ndim, integer) + + enum = 1 + temp = datasetnames(0) + do gg = 0, ndim - 1 + ; does the model name match what's in temp? + if (temp.eq.datasetnames(gg)) then + ensemble_assign(gg) = enum ; if so, assign it the same number + else + enum = enum + 1 ; if not, assign it the next number + ensemble_assign(gg) = enum + temp = datasetnames(gg) + end if + end do + ensemble_assign@model = datasetnames + delete(temp) + ensemble_assign@models = str_join(datasetnames + "/" + ensembles, ", ") + + model_arr_em = model_arr(:max(ensemble_assign)-1, :) + model_arr_em = model_arr_em@_FillValue + anom_ref_em = anom_ref(:max(ensemble_assign)-1) + anom_ref_em = anom_ref_em@_FillValue + datasetnames_em = datasetnames(:max(ensemble_assign)-1) + model_fullspan_em = model_fullspan(:max(ensemble_assign)-1) + + do gg = 1, max(ensemble_assign) ; calculate ensemble means + wind := ind(ensemble_assign.eq.gg) + ensemble_size(wind) = dimsizes(wind) + if (dimsizes(wind).eq.1) then + model_arr_em(gg-1, :) = (/ model_arr(wind, :) /) + anom_ref_em(gg-1) = (/ anom_ref(wind) /) + datasetnames_em(gg-1) = datasetnames(wind) + model_fullspan_em(gg-1) = model_fullspan(wind) + else + model_arr_em(gg-1, :) = (/ dim_avg_n(model_arr(wind, :), 0) /) + anom_ref_em(gg-1) = (/ dim_avg_n(anom_ref(wind), 0) /) + datasetnames_em(gg-1) = datasetnames(wind(0)) + model_fullspan_em(gg-1) = model_fullspan(wind(0)) + end if + end do + + ; ************************************* + ; calculate multi-model mean + ; ************************************* + + if (ref_ind .ge. 0) then + ind_mod := ind(datasetnames_em.ne."MultiModelMean" .and. \ + model_fullspan_em .and. \ + datasetnames_em.ne.variable_info[0]@reference_dataset) + elseif (ref_ind .eq. -1) + ind_mod := ind(datasetnames_em.ne."MultiModelMean" .and. \ + model_fullspan_em .and. \ + datasetnames_em.ne.ref_dataset) + else + ind_mod := ind(datasetnames_em.ne."MultiModelMean" .and. \ + model_fullspan_em) + end if + model_arr(mmm_ind, :) = dim_avg_n(model_arr_em(ind_mod, :), 0) + anom_ref(mmm_ind) = avg(anom_ref_em(ind_mod)) + + ; ************************************* + ; calculate 5% and 95% quantiles + ; ************************************* + + q_05 = new(ntime, double) + q_95 = new(ntime, double) + + do itime = 0, ntime - 1 + model_num = num(.not.ismissing(model_arr_em(ind_mod, itime))) + if (ref_ind .ge. 0) then + ind_m := ind(.not.ismissing(model_arr(:, itime)) .and. \ + model_fullspan .and. \ + datasetnames.ne."MultiModelMean" .and. \ + datasetnames.ne.variable_info[0]@reference_dataset) + elseif (ref_ind .eq. -1) + ind_m := ind(.not.ismissing(model_arr(:, itime)) .and. \ + model_fullspan .and. \ + datasetnames.ne."MultiModelMean" .and. \ + datasetnames.ne.ref_dataset) + else + ind_m := ind(.not.ismissing(model_arr(:, itime)) .and. \ + model_fullspan .and. \ + datasetnames.ne."MultiModelMean") + end if + ind_sort := dim_pqsort(model_arr(ind_m, itime), 1) + count = 0. + do imodel = 0, dimsizes(ind_m) - 1 + count = count + 1./(model_num*ensemble_size(ind_m(ind_sort(imodel)))) + if(count .gt. 0.95) then + break + end if + if(ismissing(q_05(itime)) .and. count .gt. 0.05) then + q_05(itime) = model_arr(ind_m(ind_sort(imodel)), itime) + end if + q_95(itime) = model_arr(ind_m(ind_sort(imodel)), itime) + end do + end do + + ; ************************************* + ; Convert time coordinate to years (required by the plot routine) + ; ************************************* + + xmin = min(model_arr&time) + xmax = max(model_arr&time) + tmp = todouble(model_arr&time) + delete(model_arr&time) + model_arr&time = tmp + delete(tmp) + + ; ************************************* + ; output to NetCDF + ; ************************************* + + if (diag_script_info@blending) then + var0 = "gmst" + else + var0 = "gsat" + end if + + out_path1 = out_path + "tsline_gmst_anom_" + project0 + "_" \ + + exp0 + ".nc" + out_path1@existing = "overwrite" + model_arr_em@ncdf = out_path + model_arr_em@experiment = project0 + "_" + exp0 + model_arr_em@diag_script = DIAG_SCRIPT + model_arr_em@var = var0 + model_arr_em@long_name = "Global Mean Surface Temperature" + ncdf_outfile = ncdf_write(model_arr_em, out_path1) + + ; ************************************* + ; optional output with statistics + ; ************************************* + + if (diag_script_info@write_stat) then + + model_arr_stat = new((/ntime, 10/), double) + model_arr_stat!0 = "time" + model_arr_stat&time = model_arr&time + do i = 0, ntime-1 + statb = stat_dispersion(model_arr_em(ind_mod, i), False) + ; mean + model_arr_stat(i, 0) = (/statb(0)/) + ; standard deviation + model_arr_stat(i, 1) = (/statb(1)/) + ; spread + model_arr_stat(i, 2) = (/statb(14)-statb(2)/) + ; min + model_arr_stat(i, 3) = (/statb(2)/) + ; max + model_arr_stat(i, 8) = (/statb(14)/) + ; 5% quantile + model_arr_stat(i, 4) = (/statb(24)/) + ; 95% quantile + model_arr_stat(i, 7) = (/statb(25)/) + ; 10% quantile + model_arr_stat(i, 5) = (/statb(3)/) + ; 90% quantile + model_arr_stat(i, 6) = (/statb(13)/) + end do + if (isatt(variable_info[0], "reference_dataset")) then + model_arr_stat(:, 9) = (/model_arr(ref_ind, :)/) + end if + + model_arr_stat(:, 0) = (/model_arr(mmm_ind, :)/) + model_arr_stat(:, 4) = (/q_05/) + model_arr_stat(:, 7) = (/q_95/) + + out_path2 = out_path + "tsline_gmst_anom_stat.nc" + + out_path2@existing = "overwrite" + + model_arr_stat@ncdf = out_path + model_arr_stat@experiment = project0 + "_" + exp0 + model_arr_stat@diag_script = DIAG_SCRIPT + model_arr_stat@var = var0 + "_mean" + model_arr_stat@long_name = var0 + " mulit-model mean" + ncdf_outfile = ncdf_write(model_arr_stat(:, 0), out_path2) + + out_path2@existing = "append" + + model_arr_stat@var = var0 + "_stddev" + model_arr_stat@long_name = var0 + " standard deviation" + ncdf_outfile = ncdf_write(model_arr_stat(:, 1), out_path2) + + model_arr_stat@var = var0 + "_spread" + model_arr_stat@long_name = var0 + " spread" + ncdf_outfile = ncdf_write(model_arr_stat(:, 2), out_path2) + + model_arr_stat@var = var0 + "_min" + model_arr_stat@long_name = var0 + " minimum" + ncdf_outfile = ncdf_write(model_arr_stat(:, 3), out_path2) + + model_arr_stat@var = var0 + "_5quantile" + model_arr_stat@long_name = var0 + " 5% quantile" + ncdf_outfile = ncdf_write(model_arr_stat(:, 4), out_path2) + + model_arr_stat@var = var0 + "_10quantile" + model_arr_stat@long_name = var0 + " 10% quantile" + ncdf_outfile = ncdf_write(model_arr_stat(:, 5), out_path2) + + model_arr_stat@var = var0 + "_90quantile" + model_arr_stat@long_name = var0 + " 90% quantile" + ncdf_outfile = ncdf_write(model_arr_stat(:, 6), out_path2) + + model_arr_stat@var = var0 + "_95quantile" + model_arr_stat@long_name = var0 + " 95% quantile" + ncdf_outfile = ncdf_write(model_arr_stat(:, 7), out_path2) + + model_arr_stat@var = var0 + "_max" + model_arr_stat@long_name = var0 + " maximum" + ncdf_outfile = ncdf_write(model_arr_stat(:, 8), out_path2) + + if (isatt(variable_info[0], "reference_dataset")) then + model_arr_stat@var = var0 + "_ref" + model_arr_stat@long_name = var0 + " reference dataset" + ncdf_outfile = ncdf_write(model_arr_stat(:, 9), out_path2) + end if + + end if + + ; ************************************* + ; plotting + ; ************************************* + + ; Define workstation + outfile = config_user_info@plot_dir + var0 + "_" \ + + str_sub_str(diag_script_info@region, " ", "_") \ + + "_" + project0 + "_" + exp0 + "_anom_" \ + + start_year + "-" + end_year + wks = gsn_open_wks(file_type, outfile) + + ; Set resources + res = True + res@trXMinF = xmin + res@trXMaxF = xmax + ; res@trXMaxF = xmax + 0.25 * (xmax - xmin) + res@tmXBMode = "Explicit" + if (xmax - xmin.gt.20) then + res@tmXBValues = ispan(xmin, xmax, 10) + res@tmXBLabels = ispan(xmin, xmax, 10) + res@tmXBMinorValues = ispan(xmin, xmax, 5) + else + res@tmXBValues = ispan(xmin, xmax, 5) + res@tmXBLabels = ispan(xmin, xmax, 5) + res@tmXBMinorValues = ispan(xmin, xmax, 1) + end if + res@tmXBLabelAngleF = 45 + res@tmXBLabelJust = "CenterRight" + if (isatt(diag_script_info, "y_min")) then + res@trYMinF = diag_script_info@y_min + end if + if (isatt(diag_script_info, "y_max")) then + res@trYMaxF = diag_script_info@y_max + end if + + res@tmXBMode = "Manual" + res@tmXBTickSpacingF = 20 + + if (diag_script_info@header) then + res@tiMainString = diag_script_info@region + else + res@tiMainString = "Anomaly of Near-Surface Air Temperature" + end if + + res@tiYAxisString = "Change in Temperature (~F34~0~F~ C)" + + res0 = True + ref_start = diag_script_info@ref_start + ref_end = diag_script_info@ref_end + res0@tiYAxisString = tostring(ref_start) + "-" + tostring(ref_end) \ + + " Mean Temperature (~F34~0~F~ C)" + + xy_line_anom(wks, anom_ref, model_arr, model_arr&time, \ + model_arr_stat, ref_start, ref_end, res, res0, info_tas) + + log_info(" wrote " + outfile + "." + file_type) + + ; *************************************** + ; add meta data to plot (for reporting) + ; *************************************** + + if (diag_script_info@region .eq. "Global") then + domain = "global" + else + domain = "reg" + end if + + statistics = "anomaly" + + caption = "Time series of the " + statistics + " for Surface Temperature " \ + + "with blending and masking as Cowtan et al., 2015." + contrib_authors = (/"bock_lisa"/) + + ; Call provenance logger + log_provenance(ncdf_outfile, \ + outfile + "." + file_type, \ + caption, \ + statistics, \ + domain, \ + "times", \ + contrib_authors, \ + (/"eyring21ipcc"/), \ + metadata_att_as_array(input_file_info, "filename")) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/ipcc_ar6/tas_anom_damip.ncl b/esmvaltool/diag_scripts/ipcc_ar6/tas_anom_damip.ncl new file mode 100644 index 0000000000..21cbe4992a --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar6/tas_anom_damip.ncl @@ -0,0 +1,624 @@ +; ############################################################################# +; PLOTS TIME SERIES +; Authors: Lisa Bock (DLR, Germany) +; ############################################################################# +; +; Description +; This script is for plotting a time series of the global mean (anomaly). +; +; Required diag_script_info attributes (diagnostic specific) +; start_year: start year in figure +; end_year: end year in figure +; panels: list of variable blocks for each panel +; +; Optional diag_script_info attributes (diagnostic specific) +; ref_start: start year of reference period for anomalies +; ref_end: end year of reference period for anomalies +; ref_mask: if true, model fields will be masked by reference fields +; plot_units: variable unit for plotting +; y-min: set min of y-axis +; y-max: set max of y-axis +; header: title for each panel +; title: name of region as part of filename +; legend: set labels for optional output of a legend in an extra file +; +; Caveats +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/ensemble.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"panels", "start_year", \ + "end_year"/)) + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "anomaly", True) + set_default_att(diag_script_info, "ref_mask", False) + + if (isatt(diag_script_info, "title")) then + header = diag_script_info@title + else + header = "" + end if + + ; Create output directories + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + out_path = config_user_info@work_dir + system("mkdir -p " + out_path) + + if (diag_script_info@anomaly) then + anom = "anom" + if (isatt(diag_script_info, "ref_start") .and. \ + isatt(diag_script_info, "ref_end")) then + ref_start = diag_script_info@ref_start + ref_end = diag_script_info@ref_end + else + error_msg("f", DIAG_SCRIPT, "", \ + "period for reference years is not defined " + \ + "(needed for anomaly)") + end if + else + anom = "abs" + end if + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Determine time range + start_year = diag_script_info@start_year + end_year = diag_script_info@end_year + all_years = ispan(start_year, end_year, 1) + + ; Create time coordinate + ntime = end_year - start_year + 1 + time = new(ntime, integer) + time = ispan(start_year, end_year, 1) + + ; ************************************* + ; General settings for the plot + ; ************************************* + + ; Define workstation + outfile = config_user_info@plot_dir + "tas_" + anom + "_damip_" \ + + header + "_" + start_year \ + + "-" + end_year + wks = gsn_open_wks(file_type, outfile) + + ; Set resources + res = True + res@gsnDraw = False + res@gsnFrame = False + + res@vpXF = 0.05 + res@vpYF = 0.7 + res@vpHeightF = 0.4 + res@vpWidthF = 0.7 + res@pmLegendDisplayMode = "Never" + res@tmYRLabelFontHeightF = 0.016 + res@tmYLLabelFontHeightF = 0.016 + res@tiXAxisFontHeightF = 0.016 + res@tiYAxisFontHeightF = 0.016 + res@tmXBMajorOutwardLengthF = 0.006 + res@tmYLMajorOutwardLengthF = 0.006 + res@tmXBMinorOutwardLengthF = 0.003 + res@tmYLMinorOutwardLengthF = 0.003 + res@tmXBMajorLengthF = 0.006 + res@tmYLMajorLengthF = 0.006 + res@tmXBMinorLengthF = 0.003 + res@tmYLMinorLengthF = 0.003 + + xmin = start_year + xmax = end_year + res@trXMinF = xmin + res@trXMaxF = xmax + ; res@trXMaxF = xmax + 0.25 * (xmax - xmin) + res@tmXBMode = "Explicit" + if (xmax - xmin.gt.20) then + res@tmXBValues = ispan(xmin, xmax, 10) + res@tmXBLabels = ispan(xmin, xmax, 10) + res@tmXBMinorValues = ispan(xmin, xmax, 5) + else + res@tmXBValues = ispan(xmin, xmax, 5) + res@tmXBLabels = ispan(xmin, xmax, 5) + res@tmXBMinorValues = ispan(xmin, xmax, 1) + end if + res@tmXBLabelAngleF = 45 + res@tmXBLabelJust = "CenterRight" + + res@tmXBMode = "Manual" + res@tmXBTickSpacingF = 20 + + if (isatt(diag_script_info, "y_min")) then + res@trYMinF = diag_script_info@y_min + end if + if (isatt(diag_script_info, "y_max")) then + res@trYMaxF = diag_script_info@y_max + end if + res@tiYAxisOn = True + res@tiXAxisString = "" + res@gsnStringFontHeightF = 0.016 + + ; ------------------------------------------------------------------ + ; Panel settings + ; ------------------------------------------------------------------ + + nvar = ListCount(variable_info) + + panels = diag_script_info@panels + reference = diag_script_info@reference + tmp := dimsizes(panels) + if(dimsizes(tmp).gt.1) then + npanels = tmp(0) + nexp = tmp(1) + else + npanels = 1 + nexp = tmp(0) + end if + delete(tmp) + plots = new((/npanels/), graphic) + + ; ------------------------------------------------------------------ + ; Loop over panels + ; ------------------------------------------------------------------ + + do ivar = 0, npanels-1 + + ; ------------------------------------------------------------------ + ; Read reference dataset + ; ------------------------------------------------------------------ + + var = reference(ivar) + + log_info("processing " + var) + + ; Retrieve metadata item + atts = True + atts@variable_group = var + info_ref = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + ref_dataset = metadata_att_as_array(info_ref, "dataset") + + A_ref = read_data(info_ref[0]) + + ; Anomaly + if (anom .eq. "anom") then + tmp = time_operations(A_ref, ref_start, ref_end, "average", \ + "monthlyclim", True) + ndim := dimsizes(A_ref) + do i = 0, ndim(0) - 1 + A_ref(i, :, :) = A_ref(i, :, :) - tmp(mod(i, 12), :, :) + end do + delete(tmp) + end if + + ; yearly average + tmp := time_operations(A_ref, -1, -1, "average", \ + "yearly", True) + + ; calculate field mean + diag_ref := area_operations(tmp, -90., 90., \ + 0., 360., "average", True) + + ref_arr = new((/ntime/), double) + ref_arr!0 = "time" + ref_arr&time = time + ref_arr@_FillValue = 1e+20 + ref_arr = ref_arr@_FillValue + + ; Match time coordinate + date = diag_ref&year + idx1 = ind(date(0).eq.ref_arr&time) + idx2 = ind(date(dimsizes(date) - 1).eq.ref_arr&time) + ref_arr(idx1:idx2) = (/diag_ref/) + copy_VarAtts(diag_ref, ref_arr) + delete(diag_ref) + delete(date) + + ; ------------------------------------------------------------------ + ; Read model datasets + ; ------------------------------------------------------------------ + + ; create array for statistics + model_arr_stat = new((/5, 3, ntime/), double) + model_arr_stat!0 = "exp" + model_arr_stat!1 = "stat" + model_arr_stat!2 = "time" + model_arr_stat&exp = (/"exp1", "exp2", "exp3", "exp4", "ref"/) + model_arr_stat&time = time + model_arr_stat&stat = (/"mean", "min", "max"/) + + model_arr_stat(4, 0, :) = (/ref_arr/) + + do iexp = 0, nexp - 1 + + if(npanels .gt. 1) then + var = panels(ivar, iexp) + else + var = panels(iexp) + end if + + log_info("processing " + var) + + ; Retrieve metadata item + atts = True + atts@variable_group = var + info_items = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + var0 = info_items[0]@short_name + exp0 = info_items[0]@exp + datasetnames := metadata_att_as_array(info_items, "dataset") + ensembles := metadata_att_as_array(info_items, "ensemble") + dim_MOD = ListCount(info_items) + + ; get multi-model mean index + mmm_ind = ind(datasetnames .eq. "MultiModelMean") + + ; Create model array + model_arr = new((/dim_MOD, ntime/), double) + model_arr!0 = "model" + model_arr!1 = "time" + model_arr&model = datasetnames + model_arr&time = time + model_arr@_FillValue = 1e+20 + model_arr = model_arr@_FillValue + + ; Loop over models + do imod = 0, dim_MOD - 1 + + log_info("Process dataset: " + datasetnames(imod)) + + ; Read data + A0 = read_data(info_items[imod]) + dnames = getVarDimNames(A0) + + ; Convert units for plotting (if required) + if (isatt(diag_script_info, "plot_units")) then + A0 = convert_units(A0, diag_script_info@plot_units) + end if + + ; Anomaly + if (anom .eq. "anom") then + if (ref_start.lt.start_year) then + ref_start = start_year + end if + if ((ref_start.lt.start_year) .or. (ref_end.gt.end_year) .or. \ + (ref_end.lt.ref_start)) then + error_msg("f", DIAG_SCRIPT, "", \ + "period for reference years is not properly defined") + end if + end if + + ; masking with reference dataset + if (diag_script_info@ref_mask) then + ; Determine start/end year + start_year = info_items[imod]@start_year + end_year = info_items[imod]@end_year + A_ref_mask = time_operations(A_ref, start_year, \ + end_year, "extract", "", 0) + A0 = where(A_ref_mask.eq.A_ref_mask@_FillValue, \ + A_ref_mask@_FillValue, A0) + delete(A_ref_mask) + end if + + ; Anomaly + if (anom .eq. "anom") then + tmp := time_operations(A0, ref_start, ref_end, "extract", "", 0) + A0_monavg_ref = time_operations(tmp, ref_start, ref_end, "average", \ + "monthlyclim", True) + delete(tmp) + + ; calculate anomaly for each grid point + do i = 0, dimsizes(A0&time) - 1 + A0(i, :, :) = A0(i, :, :) - A0_monavg_ref(mod(i, 12), :, :) + end do + + delete(A0_monavg_ref) + + end if + + ; Calculate time average of dataset + A0_timavg = time_operations(A0, -1, -1, "average", \ + "yearly", True) + delete(A0) + + ; calculate global mean of complete time series + procmod = area_operations(A0_timavg, -90., 90., 0., 360., \ + "average", True) + delete(A0_timavg) + + ; Match time coordinate + date = procmod&year + idx1 = ind(date(0).eq.model_arr&time) + idx2 = ind(date(dimsizes(date) - 1).eq.model_arr&time) + model_arr(imod, idx1:idx2) = (/procmod/) + if (imod.eq.0) then + copy_VarAtts(procmod, model_arr) + end if + delete(procmod) + delete(date) + + end do + + ; ************************************* + ; calculate means over ensemble members + ; ************************************* + + ndim := dimsizes(datasetnames) + ensemble_assign := new(ndim, integer) + ensemble_size := new(ndim, integer) + + enum = 1 + temp = datasetnames(0) + do gg = 0, ndim - 1 + if (temp.eq.datasetnames(gg)) then + ensemble_assign(gg) = enum + else + enum = enum + 1 + ensemble_assign(gg) = enum + temp = datasetnames(gg) + end if + end do + ensemble_assign@model = datasetnames + delete(temp) + ensemble_assign@models = str_join(datasetnames + "/" + ensembles, ",") + + model_arr_em := model_arr(:max(ensemble_assign)-1, :) + model_arr_em = model_arr_em@_FillValue + datasetnames_em := datasetnames(:max(ensemble_assign)-1) + + do gg = 1, max(ensemble_assign) ; calculate ensemble means + wind := ind(ensemble_assign.eq.gg) + ensemble_size(wind) = dimsizes(wind) + if (dimsizes(wind).eq.1) then + model_arr_em(gg-1, :) = (/ model_arr(wind, :) /) + datasetnames_em(gg-1) = datasetnames(wind) + else + model_arr_em(gg-1, :) = (/ dim_avg_n(model_arr(wind, :), 0) /) + datasetnames_em(gg-1) = datasetnames(wind(0)) + end if + end do + + ; ************************************* + ; calculate multi-model mean + ; ************************************* + + ind_mod := ind(datasetnames_em.ne."MultiModelMean") + model_arr(mmm_ind, :) = dim_avg_n(model_arr_em(ind_mod, :), 0) + + ; ************************************* + ; calculate 5% and 95% quantiles + ; ************************************* + + q_05 = new(ntime, double) + q_95 = new(ntime, double) + + do itime = 0, ntime - 1 + if (any(.not.ismissing(model_arr(:, itime)))) then + model_num = num(.not.ismissing(model_arr_em(ind_mod, itime))) + ind_m := ind(.not.ismissing(model_arr(:, itime)) .and. \ + datasetnames .ne. "MultiModelMean") + ind_sort := dim_pqsort(model_arr(ind_m, itime), 1) + count = 0. + do imodel = 0, dimsizes(ind_m) - 1 + count = count + 1./(model_num * \ + ensemble_size(ind_m(ind_sort(imodel)))) + if(count .gt. 0.95) then + break + end if + if(ismissing(q_05(itime)) .and. count .gt. 0.05) then + q_05(itime) = model_arr(ind_m(ind_sort(imodel)), itime) + end if + q_95(itime) = model_arr(ind_m(ind_sort(imodel)), itime) + end do + end if + end do + + model_arr_stat&exp(iexp) = var + model_arr_stat(iexp, 0, :) = (/model_arr(mmm_ind, :)/) + model_arr_stat(iexp, 1, :) = (/q_05/) + model_arr_stat(iexp, 2, :) = (/q_95/) + + delete(model_arr) + + end do + + ; ************************************* + ; output to NetCDF + ; ************************************* + + out_path1 = out_path + "tas_" + anom + "_damip_" + header + "_" \ + + ivar + ".nc" + model_arr_stat@ncdf = out_path + model_arr_stat@diag_script = DIAG_SCRIPT + model_arr_stat@var = var0 + ncdf_outfile = ncdf_write(model_arr_stat, out_path1) + + ; ************************************* + ; Plotting + ; ************************************* + + cmap = read_colormap_file("$diag_scripts/shared/plot/rgb/" + \ + "ipcc-ar6_damip_line.rgb") + + res@xyDashPatterns := (/0, 0, 0, 0/) + res@xyLineThicknesses := (/3, 3, 3, 3/) + res@xyLineColors := cmap(:, :) + + ymax = max(model_arr_stat) + ymin = min(model_arr_stat) + res@trYMaxF := max((/0.2, ymax + 0.05 * (ymax - ymin)/)) + res@trYMinF := min((/-0.2, ymin - 0.05 * (ymax - ymin)/)) + + if (isatt(diag_script_info, "header")) then + res@tiMainString = diag_script_info@header(ivar) + else + res@tiMainString = "" + end if + + if (isatt(diag_script_info, "plot_units")) then + units = diag_script_info@plot_units + else + units = info_items[0]@units + end if + + if (ivar .eq. 0) then + res@tiYAxisString = "Change in Temperature (~F34~0~F~ C)" + else + res@tiYAxisString = "" + end if + + plots(ivar) = gsn_csm_xy(wks, time, model_arr_stat(0:4, 0, :), res) + + res@pmLegendDisplayMode = "Never" + + ; --------------------------------------------------------------------- + ; Add multi model statistics + ; --------------------------------------------------------------------- + res_stat = True + res_stat@gsnXYFillOpacities = 0.1 + cmap = read_colormap_file("$diag_scripts/shared/plot/rgb/" + \ + "ipcc-ar6_damip_shading.rgb") + copy_VarMeta(res, res_stat) + delete(res_stat@xyLineColors) + res_stat@gsnXYFillColors = cmap(3, :) + res_stat@xyLineColor := cmap(3, :) + res_stat@xyLineThicknesses := (/1, 1/) + shading_plot = gsn_csm_xy(wks, time, model_arr_stat(3, 1:2, :), \ + res_stat) + overlay(plots(ivar), shading_plot) + + res_stat@gsnXYFillColors = cmap(2, :) + res_stat@xyLineColor = cmap(2, :) + shading_plot = gsn_csm_xy(wks, time, model_arr_stat(2, 1:2, :), \ + res_stat) + overlay(plots(ivar), shading_plot) + + res_stat@gsnXYFillColors = cmap(1, :) + res_stat@xyLineColor = cmap(1, :) + shading_plot = gsn_csm_xy(wks, time, model_arr_stat(1, 1:2, :), \ + res_stat) + overlay(plots(ivar), shading_plot) + + res_stat@gsnXYFillColors = cmap(0, :) + res_stat@xyLineColor = cmap(0, :) + shading_plot = gsn_csm_xy(wks, time, model_arr_stat(0, 1:2, :), \ + res_stat) + overlay(plots(ivar), shading_plot) + + ; --------------------------------------------------------------------- + ; add reference datasets + ; --------------------------------------------------------------------- + res@xyDashPatterns := (/0/) + res@xyLineThicknesses := (/3/) ; make 2nd lines thicker + res@xyLineColors := "black" ; change line color + + ref_p = gsn_csm_xy(wks, time, ref_arr, res) ; create plot + overlay(plots(ivar), ref_p) + + end do + + ; Plotting panels + + pres = True + + pres@gsnPanelMainString = "Anomaly of Near-Surface Air Temperature" + + pres@gsnPanelRowSpec = True + pres@gsnPanelCenter = False + + pres@gsnPanelFigureStrings = diag_script_info@panel_figure_strings + pres@gsnPanelFigureStringsPerimOn = False + pres@gsnPanelFigureStringsBackgroundFillColor = "transparent" + pres@gsnPanelFigureStringsJust = "TopLeft" + pres@gsnPanelFigureStringsFontHeightF = 0.014 + + gsn_panel(wks, plots, (/3, 3, 3, 1/), pres) + + log_info(" wrote " + outfile + "." + file_type) + + ; *************************************** + ; add file with legend + ; *************************************** + + if isatt(diag_script_info, "legend") then + + labels = diag_script_info@legend + labels := array_append_record(labels, "Observations", 0) + cmap = read_colormap_file("$diag_scripts/shared/plot/rgb/" + \ + "ipcc-ar6_damip_line.rgb") + cmap_l := new((/5, 4/), float) + cmap_l(0:3, :) = cmap + cmap_l(4, :) = (/0., 0., 0., 1/) + + wks = gsn_open_wks(file_type, config_user_info@plot_dir + "legend") + + lgres = True + + lgres@vpWidthF = 0.7 ; width of legend (NDC) + lgres@vpHeightF = 0.4 ; height of legend (NDC) + + lgres@lgTitleString = "Legend" + lgres@lgTitleFontHeightF = 0.03 + lgres@lgAutoManage = True + + lgres@lgLineColors = cmap_l(::-1, :) + lgres@lgLineThicknessF = 4. + lgres@lgLineLabelsOn = False + lgres@lgMonoDashIndex = True + + gsn_legend_ndc(wks, 5, labels(::-1), 0.1, 0.95, lgres) + + frame(wks) + + end if + + ; *************************************** + ; add meta data to plot (for reporting) + ; *************************************** + + domain = "global" + + if (anom .eq. "anom") then + statistics = "anomaly" + else + statistics = "mean" + end if + + caption = "Time series of the " + statistics + " for " \ + + info_items[0]@long_name + contrib_authors = (/"bock_lisa"/) + + ; Call provenance logger + log_provenance(ncdf_outfile, \ + outfile + "." + file_type, \ + caption, \ + statistics, \ + domain, \ + "times", \ + contrib_authors, \ + (/"eyring21ipcc"/), \ + metadata_att_as_array(input_file_info, "filename")) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/ipcc_ar6/tsline_collect.ncl b/esmvaltool/diag_scripts/ipcc_ar6/tsline_collect.ncl new file mode 100644 index 0000000000..65a70300ee --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar6/tsline_collect.ncl @@ -0,0 +1,538 @@ +; ############################################################################# +; WRAPPER SCRIPT FOR COLLECTING AND PLOTTING TIMESERIES +; Author: Lisa Bock (DLR, Germany) +; IPCC AR6 +; ############################################################################# +; Description +; Collects timeseries and their statistics previously calculated by +; teas_anom.ncl and passes them to a new tsline plot +; +; Required diag_script_info attributes +; None +; +; Optional diag_script_info attributes +; blending: if true, then var="gmst" otherwise "gsat" +; ref_start: start year of reference period for anomalies +; ref_end: end year of reference period for anomalies +; region: name of domain +; plot_units: variable unit for plotting +; y-min: set min of y-axis +; y-max: set max of y-axis +; order: order in which experiments should be plotted +; stat_shading: if true: shading of statistic range +; ref_shading: if true: shading of reference period +; +; Caveats +; +; Modification history +; 20190204-A_bock_lisa: written. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/ensemble.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + diag_script_base = basename(DIAG_SCRIPT) + + var_names = metadata_att_as_array(variable_info, "short_name") + + var0 = variable_info[0]@short_name + info_var = select_metadata_by_name(input_file_info, var0) + units0 = info_var[0]@units + datasets = metadata_att_as_array(info_var, "dataset") + dim_MOD = ListCount(info_var) + + if (diag_script_info@blending) then + var = "gmst" + units0 = "degC" + else + var = "gsat" + end if + + ; Create output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + + ; Check file type + file_type = config_user_info@output_file_type + if(ismissing(file_type)) then + file_type = "ps" + end if + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "region", "Global") + set_default_att(diag_script_info, "stat_shading", False) + set_default_att(diag_script_info, "ref_shading", False) + + ; Determine time range + start_year = diag_script_info@start_year + end_year = diag_script_info@end_year + all_years = ispan(start_year, end_year, 1) + + ; Create time coordinate + ntime = end_year - start_year + 1 + time = new(ntime, integer) + time = ispan(start_year, end_year, 1) + + ref_start = diag_script_info@ref_start + ref_end = diag_script_info@ref_end + + ; ------------------------------------------- + ; Read additional reference datasets + ; ------------------------------------------- + + if (dim_MOD .gt. 0) then + + data_ref = new((/dim_MOD, ntime/), double) + data_ref!0 = "dataset" + data_ref!1 = "time" + data_ref&dataset = datasets + data_ref&time = time + + ; Loop over datasets + do imod = 0, dim_MOD - 1 + + log_info("Process dataset: " + datasets(imod)) + + ; Read data + A0 = read_data(info_var[imod]) + + ndim = dimsizes(A0) + + ; calculate anomaly for each grid point + start_year_0 = info_var[imod]@start_year + end_year_0 = info_var[imod]@end_year + if (start_year_0 .gt. diag_script_info@ref_start) then + ref_start = start_year_0 + else + ref_start = diag_script_info@ref_start + end if + if (ref_start .ne. 1961 .or. ref_end .ne. 1990) then + tmp = time_operations(A0, ref_start, ref_end, "average", \ + "monthlyclim", True) + do i = 0, ndim(0) - 1 + A0(i, :, :) = A0(i, :, :) - tmp(mod(i, 12), :, :) + end do + end if + + ; calculate field mean + A0_mean := area_operations(A0, -90., 90., \ + 0., 360., "average", True) + + ; yearly average + diag = time_operations(A0_mean, -1, -1, "average", \ + "yearly", True) + + ; Match time coordinate + date = diag&year + idx1 = ind(date(0).eq.data_ref&time) + idx2 = ind(date(dimsizes(date) - 1).eq.data_ref&time) + delete(date) + + data_ref(imod, idx1:idx2) = (/diag/) + + delete(A0) + delete(A0_mean) + delete(diag) + + end do + + end if + + ; ----------------------------------------------- + ; Read pre-calculated multi model statistics + ; ----------------------------------------------- + + input_files = diag_script_info@input_files + "/" + \ + "tsline_gmst_anom" + "_stat.nc" + input_files := tostring(input_files) + + ; Filter out non-existing files + input_files := input_files(ind(isfilepresent(input_files))) + + nfiles = dimsizes(input_files) + + dim_data = (/nfiles, 3, ntime/) + data_mean = new(dim_data, double) + data_mean!0 = "experiment" + data_mean!1 = "stat" + data_mean!2 = "time" + data_mean&experiment = new(nfiles, string, "exp") + data_mean&stat = (/"mean", "stat1", "stat2"/) + data_mean&time = time + data_mean@var = var + + ; Loop over files in the list, read and append data + do ii = 0, nfiles - 1 + + log_info("Read in " + input_files(ii)) + + var_mean = var + "_mean" + var_stddev = var + "_stddev" + + data_temp = ncdf_read(input_files(ii), var_mean) + data_stat = ncdf_read(input_files(ii), var_stddev) + data_stat1 = ncdf_read(input_files(ii), var+"_5quantile") + data_stat2 = ncdf_read(input_files(ii), var+"_95quantile") + + log_info("Experiment: " + data_temp@experiment) + + if(isatt(diag_script_info, "order")) then + ii_n = ind(data_temp@experiment .eq. diag_script_info@order) + if (all(ismissing(ii_n))) then + error_msg("f", DIAG_SCRIPT, "", "Collected experiment " + \ + data_temp@experiment + " does not appear in " + \ + "diag_script_info@order") + elseif (ii_n .gt. nfiles-1) + error_msg("f", DIAG_SCRIPT, "", "diag_script_info@order not " \ + + "well defined (less experiments available)") + end if + else + ii_n = ii + end if + + ; Match time coordinate + date = data_temp&time + idx1 = ind(date(0).eq.data_mean&time) + idx2 = ind(date(dimsizes(date) - 1).eq.data_mean&time) + delete(date) + + data_mean(ii_n, 0, idx1:idx2) = (/data_temp(:)/) + data_mean(ii_n, 1, idx1:idx2) = (/data_stat1(:)/) + data_mean(ii_n, 2, idx1:idx2) = (/data_stat2(:)/) + data_mean&experiment(ii_n) = data_temp@experiment + data_mean@units = units0 + + delete(data_temp) + delete(data_stat) + delete(data_stat1) + delete(data_stat2) + + end do + + ; ------------------------------------------- + ; NetCDF Output + ; ------------------------------------------- + + do i = 0, dimsizes(data_mean&experiment) - 1 + if (any(data_mean&experiment(i) .eq. (/"CMIP6_historical", \ + "CMIP6_historical-ssp245"/))) then + data_mean&experiment(i) = "CMIP6" + elseif (any(data_mean&experiment(i) .eq. (/"CMIP5_historical", \ + "CMIP5_historical-rcp45"/))) + data_mean&experiment(i) = "CMIP5" + end if + end do + + out_path = config_user_info@work_dir + system("mkdir -p " + out_path) + out_path1 = out_path + "tsline_collect_" + var0 + ".nc" + data_mean@ncdf = out_path + data_mean@diag_script = DIAG_SCRIPT + data_mean@var = var + ncdf_outfile = ncdf_write(data_mean, out_path1) + + if (isatt(diag_script_info, "ref")) then + out_path1 = out_path + "tsline_collect_" + var0 + "_ref.nc" + data_ref@ncdf = out_path + data_ref@diag_script = DIAG_SCRIPT + data_ref@var = var + ncdf_outfile_ref = ncdf_write(data_ref, out_path1) + end if + + ; ------------------------------------------- + ; Plotting + ; ------------------------------------------- + + ; Define workstation + outfile = config_user_info@plot_dir + data_mean@var + "_" + \ + str_sub_str(diag_script_info@region, " ", "_") + \ + "_multimodel_anom_" + start_year + "-" + end_year + wks = gsn_open_wks(file_type, outfile) + + ; Select colors and other plotting attributes + ; (see ./diag_scripts/lib/ncl/style.ncl) + colors = project_style(info_var, diag_script_info, "colors") + dashes = project_style(info_var, diag_script_info, "dashes") + thicks = project_style(info_var, diag_script_info, "thicks") + annots = project_style(info_var, diag_script_info, "annots") + + ; ************************************************ + ; plotting parameters + ; ************************************************ + + plot = new(1, graphic) + + res = True + res@gsnDraw = False + res@gsnFrame = False + + res@vpXF = 0.05 + res@vpYF = 0.7 + res@vpHeightF = 0.4 + res@vpWidthF = 0.7 + res@pmLegendDisplayMode = "Never" + res@tmYRLabelFontHeightF = 0.016 + res@tmYLLabelFontHeightF = 0.016 + res@tiXAxisFontHeightF = 0.016 + res@tiYAxisFontHeightF = 0.016 + res@tmXBMajorOutwardLengthF = 0.006 + res@tmYLMajorOutwardLengthF = 0.006 + res@tmXBMinorOutwardLengthF = 0.003 + res@tmYLMinorOutwardLengthF = 0.003 + res@tmXBMajorLengthF = 0.006 + res@tmYLMajorLengthF = 0.006 + res@tmXBMinorLengthF = 0.003 + res@tmYLMinorLengthF = 0.003 + + if (isatt(diag_script_info, "ref")) then + min_tmp = (/min(data_mean), min(data_ref)/) + max_tmp = (/max(data_mean), max(data_ref)/) + else + min_tmp = (/min(data_mean), min(data_stat)/) + max_tmp = (/max(data_mean), max(data_stat)/) + end if + res@trYMinF = min(min_tmp) - 0.05 * (max(max_tmp) - min(min_tmp)) + res@trYMaxF = max(max_tmp) + 0.05 * (max(max_tmp) - min(min_tmp)) + res@tiYAxisOn = True + res@tiXAxisString = "" + res@gsnStringFontHeightF = 0.016 + + xmin = start_year + xmax = end_year + res@trXMinF = xmin + res@trXMaxF = xmax + res@tmXBMode = "Explicit" + if (xmax - xmin.gt.20) then + res@tmXBValues = ispan(xmin, xmax, 10) + res@tmXBLabels = ispan(xmin, xmax, 10) + res@tmXBMinorValues = ispan(xmin, xmax, 5) + else + res@tmXBValues = ispan(xmin, xmax, 5) + res@tmXBLabels = ispan(xmin, xmax, 5) + res@tmXBMinorValues = ispan(xmin, xmax, 1) + end if + res@tmXBLabelAngleF = 45 + res@tmXBLabelJust = "CenterRight" + if (isatt(diag_script_info, "y_min")) then + res@trYMinF = diag_script_info@y_min + end if + if (isatt(diag_script_info, "y_max")) then + res@trYMaxF = diag_script_info@y_max + end if + + res@tmXBMode = "Manual" + res@tmXBTickSpacingF = 20 + + res@tiMainString = "" + + res@tiYAxisString = "Change in Temperature (~F34~0~F~ C)" + + ; --------------------------------------------------------------------- + ; add multi model mean of different experiments + ; --------------------------------------------------------------------- + ; number of different experiments + nexp = dim_data(0) + + cmap = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_cmip_line.rgb") + res@xyDashPatterns = (/0, 0, 0, 0, 0, 0/) + res@xyLineThicknesses = (/5, 5, 5, 5, 5, 5/) + res@xyLineColors = cmap(::-1, :) + + if (nexp .gt. 6) then + error_msg("w", scriptname, funcname, "Color palette not defined for " \ + + "more than " + nexp + " experiments") + end if + + res@pmLegendDisplayMode = "Never" + res@xyExplicitLegendLabels = data_mean&experiment + res@lgBoxMinorExtentF = 0.2 ; Shorten the legend lines + + plot(0) = gsn_csm_xy(wks, data_ref&time, data_mean(:, 0, :), res) + + res@pmLegendDisplayMode = "Never" + + ; --------------------------------------------------------------------- + ; Add multi model statistics (5% and 95% quantile) + ; --------------------------------------------------------------------- + if (isatt(diag_script_info, "stat_shading")) then + if (diag_script_info@stat_shading .ne. False) then + res_stat = True + res_stat@gsnXYFillOpacities = 0.2 + cmap = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_cmip_shading.rgb") + copy_VarMeta(res, res_stat) + res_stat@gsnXYFillColors = cmap(2, :) + delete(res_stat@xyLineColors) + ; We don't want the line, so make it transparent. + res_stat@xyLineColor = cmap(2, :) + delete(res_stat@xyLineThicknesses) + res_stat@xyLineThicknesses = (/1, 1/) + shading_plot = gsn_csm_xy(wks, data_ref&time, data_mean(0, 1:2, :), \ + res_stat) + overlay(plot(0), shading_plot) + if(nexp .ge. 2) then + res_stat@gsnXYFillColors = cmap(1, :) + res_stat@xyLineColor = cmap(1, :) + shading_plot = gsn_csm_xy(wks, data_ref&time, data_mean(1, 1:2, :), \ + res_stat) + overlay(plot(0), shading_plot) + end if + if(nexp .ge. 3) then + res_stat@gsnXYFillColors = cmap(0, :) + res_stat@xyLineColor = cmap(0, :) + shading_plot = gsn_csm_xy(wks, data_ref&time, data_mean(2, 1:2, :), \ + res_stat) + overlay(plot(0), shading_plot) + end if + if(nexp .ge. 4) then + res_stat@gsnXYFillColors = cmap(3, :) + res_stat@xyLineColor = cmap(3, :) + shading_plot = gsn_csm_xy(wks, data_ref&time, data_mean(3, 1:2, :), \ + res_stat) + overlay(plot(0), shading_plot) + end if + if(nexp .gt. 4) then + error_msg("w", scriptname, funcname, "Color palette not defined for " \ + + nexp + " experiments") + end if + end if + end if + + ; --------------------------------------------------------------------- + ; add reference datasets + ; --------------------------------------------------------------------- + if (isatt(diag_script_info, "ref")) then + delete(res@xyDashPatterns) + delete(res@xyLineThicknesses) + delete(res@xyLineColors) + res@xyDashPatterns = dashes + res@xyLineThicknesses = (/3, 3, 3, 3, 3/) ; make lines thicker + res@xyLineColors = colors ; change line color + + ref_p = gsn_csm_xy(wks, data_ref&time, data_ref, res) ; create plot + overlay(plot(0), ref_p) + end if + + if (diag_script_info@ref_shading) then + + ; --------------------------------------------------------------------- + ; grey shading of reference period + ; --------------------------------------------------------------------- + gsres = True + gsres@gsFillColor = "Grey70" + gsres@gsFillOpacityF = 0.1 + xbox = (/ref_start, ref_end, ref_end, ref_start, ref_start/) + ybox = (/res@trYMinF, res@trYMinF, res@trYMaxF, res@trYMaxF, res@trYMinF/) + newplot00 = gsn_add_polygon(wks, plot(0), xbox, ybox, gsres) + + txtres = True + txtres@txFont = "helvetica-bold" + txtres@txFontColor = "Grey40" + txtres@txFontHeightF = 0.013 + ref_txt = gsn_add_text(wks, plot, "reference period", \ + 0.5*(ref_start + ref_end), \ + res@trYMaxF - 0.05 * (res@trYMaxF - res@trYMinF), \ + txtres) + + end if + + ; --------------------------------------------------------------------- + ; Draw some lines to create a legend + ; --------------------------------------------------------------------- + res_lines = True ; polyline mods desired + res_lines@tfPolyDrawOrder = "Predraw" + res_lines@gsLineColor = "grey" ; line color + res_lines@gsLineThicknessF = 1. ; line thicker + res_lines@gsLineDashPattern = 1. ; dash pattern + + xx = (/res@trXMinF, res@trXMaxF/) + yy = (/0.0, 0.0/) + dum0 = gsn_add_polyline(wks, plot(0), xx, yy, res_lines) + + ; *********************************************** + ; legend resources + ; *********************************************** + + lgres = True + lgres@lgItemType = "Lines" ; show lines only (default) + lgres@lgLabelFontHeightF = .06 ; set the legend label font thickness + lgres@vpWidthF = 0.2 ; width of legend (NDC) + lgres@vpHeightF = 0.15 ; height of legend (NDC) + lgres@lgPerimColor = "gray" ; draw the box perimeter in orange + lgres@lgPerimThicknessF = 1.0 ; thicken the box perimeter + lgres@lgPerimFill = 0 + lgres@lgPerimFillColor = "white" + + delete(cmap) + cmap = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_cmip_line.rgb") + tmp := tostring(cmap) + tmp1 = (/"(/" + tmp(2, 0) + "," + tmp(2, 1) + "," + tmp(2, 2) + "/)", \ + "(/" + tmp(1, 0) + "," + tmp(1, 1) + "," + tmp(1, 2) + "/)", \ + "(/" + tmp(0, 0) + "," + tmp(0, 1) + "," + tmp(0, 2) + "/)"/) + lgres@lgLineColors = array_append_record(colors, tmp1, 0) + lgres@lgDashIndexes = array_append_record(dashes, (/0, 0, 0/), 0) + lgres@lgLineThicknessF = 3 + labels = array_append_record(annots, data_mean&experiment, 0) + nitems = dimsizes(labels) + lgres@lgItemOrder = ispan(nitems - 1, 0, 1) + + ; Create legend + lbid = gsn_create_legend(wks, nitems, labels, lgres) + + amres = True + amres@amParallelPosF = 0.0 + amres@amOrthogonalPosF = - 0.25 + annoid1 = gsn_add_annotation(plot(0), lbid, amres) + + resP = True + resP@gsnMaximize = True + resP@gsnPaperOrientation = "portrait" + resP@gsnPaperMargin = 0.8 + + gsn_panel(wks, plot, (/1, 1/), resP) + + log_info(" wrote " + outfile + "." + file_type) + + ; *************************************** + ; add meta data to plot (for reporting) + ; *************************************** + + if (diag_script_info@region .eq. "Global") then + domain = "global" + else + domain = "reg" + end if + + caption = "Multi-model mean time series of the anomaly of near-surface air" \ + + "temperature" + + infiles = metadata_att_as_array(input_file_info, "filename") + infiles := array_append_record(infiles, input_files, 0) + + ; Call provenance logger + log_provenance(ncdf_outfile, \ + outfile + "." + file_type, \ + caption, \ + "anomaly", \ + domain, \ + "times", \ + (/"bock_lisa"/), \ + (/"eyring21ipcc"/), \ + infiles) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/ipcc_ar6/zonal_st_dev.ncl b/esmvaltool/diag_scripts/ipcc_ar6/zonal_st_dev.ncl new file mode 100644 index 0000000000..81045d80a8 --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar6/zonal_st_dev.ncl @@ -0,0 +1,361 @@ +; ############################################################################# +; PLOTS ZONAL MEAN STANDARD DEVIATION +; Author: Lisa Bock (lisa.bock@dlr.de) +; IPCC AR6 +; ############################################################################# +; +; Description +; Global climate variability of surface temperature as represented by the +; standard deviation of zonal-mean values +; This script has been developed to reproduce Fig. 9.33 in +; IPCC AR5 Chapter 9. +; +; Optional variable_info attributes (variable specific) +; reference_dataset: reference dataset; REQUIRED when calculating +; anomalies +; +; Required diag_script_info attributes (diagnostics specific) +; styleset: as in diag_scripts/shared/plot/style.ncl functions +; +; Optional diag_script_info attributes (diagnostic specific) +; plot_legend: if true, plot legend will be plotted +; plot_units: variable unit for plotting +; multi_model_mean: if true, multi-model mean and uncertaintiy will be +; plotted +; +; Modification history +; 20190304-bock_lisa: written +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/ensemble.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Check for required settings + exit_if_missing_atts(diag_script_info, "styleset") + + var0 = "tas" + info_items = select_metadata_by_name(input_file_info, var0) + datasetnames = metadata_att_as_array(info_items, "dataset") + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + if (dim_VAR .gt. 1) then + var1 = "tasa" + info_items1 = select_metadata_by_name(input_file_info, var1) + datasetnames1 = metadata_att_as_array(info_items1, "dataset") + dim_MOD1 = ListCount(info_items1) + do i = 0, dim_MOD1-1 + ListAppend(info_items, info_items1[i]) + end do + datasetnames := array_append_record(datasetnames, datasetnames1, 0) + dim_MOD = dim_MOD + dim_MOD1 + end if + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "plot_legend", True) + set_default_att(diag_script_info, "multi_model_mean", True) + + ; Create output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Set index of the reference (and alternative) dataset + if (isatt(variable_info[0], "reference_dataset")) then + ref_ind = ind(datasetnames.eq.variable_info[0]@reference_dataset) + A_ref = read_data(info_items[ref_ind]) + else + A_ref = read_data(info_items[0]) + end if + + ; Determine time range + start_year = min(metadata_att_as_array(info_items, "start_year")) + end_year = max(metadata_att_as_array(info_items, "end_year")) + all_years = ispan(start_year, end_year, 1) + + ; ------------------------------------------------------------------ + ; Calculation of standard deviation + ; ------------------------------------------------------------------ + + ; Create model array + model_arr = new((/dim_MOD, dimsizes(A_ref&lat)/), float) + model_arr!0 = "model" + model_arr!1 = "lat" + model_arr&model = datasetnames + model_arr&lat = A_ref&lat + + ; Loop over models + do imod = 0, dim_MOD - 1 + + ; Read data + A0 = read_data(info_items[imod]) + + ; Convert units for plotting (if required) + if (isatt(diag_script_info, "plot_units")) then + if (info_items[imod]@short_name .ne. "tasa" .and. \ + info_items[imod]@units .ne. diag_script_info@plot_units) then + A0 = convert_units(A0, diag_script_info@plot_units) + end if + end if + + ; detrending all timeseries + A0_detrend = dtrend_quadratic_msg_n(A0, False, False, 0) + + A0 = (/A0_detrend/) + + procmod = dim_avg_Wrap(A0) + delete(A0) + delete(A0_detrend) + + A0_timavg = time_operations(procmod, -1, -1, "average", "yearly", True) + delete(procmod) + + ; calculate zonal standard deviation + A0_stddev = dim_stddev_n_Wrap(A0_timavg, 0) + delete(A0_timavg) + if(any(A0_stddev.eq."-nan")) then + A0_stddev = where(A0_stddev.eq."-nan", A0_stddev@_FillValue, A0_stddev) + end if + + if (dimsizes(A0_stddev&lat).ne.dimsizes(A_ref&lat)) then + dnames = getVarDimNames(A0_stddev) + dx = ind(dnames.eq."lat") + var_hreg = linint1_n_Wrap(A0_stddev&lat, A0_stddev, False, \ + A_ref&lat, 0, dx) + else + var_hreg = A0_stddev + copy_VarAtts(A0_stddev, var_hreg) + end if + + model_arr(imod, :) = (/var_hreg/) + if (imod.eq.0) then + copy_VarAtts(var_hreg, model_arr) + end if + delete(A0_stddev) + + end do + + ; ------------------------------------------------------------------ + ; Calculate multi model mean and stddev + ; ------------------------------------------------------------------ + + model_arr_stddev = new((/4, dimsizes(A_ref&lat)/), float) + model_arr_stddev!1 = "lat" + model_arr_stddev&lat = A_ref&lat + + model_arr_stddev(0, :) = dim_avg_n_Wrap(model_arr, 0) + model_arr_stddev(1, :) = dim_stddev_n_Wrap(model_arr, 0) + model_arr_stddev(2, :) = model_arr_stddev(0, :) - model_arr_stddev(1, :) + model_arr_stddev(3, :) = model_arr_stddev(0, :) + model_arr_stddev(1, :) + + ; ------------------------------------------------------------------ + ; Output to NetCDF + ; ------------------------------------------------------------------ + + out_path = config_user_info@work_dir + system("mkdir -p " + out_path) + out_path = out_path + var0 + "_std_dev_zonmean.nc" + model_arr@ncdf = out_path + model_arr@diag_script = DIAG_SCRIPT + model_arr@var = var0 + ncdf_outfile = ncdf_write(model_arr, out_path) + + ; ------------------------------------------------------------------ + ; Plotting + ; ------------------------------------------------------------------ + + ; Define workstation + outfile = config_user_info@plot_dir + var0 + "_std_dev_zonmean" + wks = gsn_open_wks(file_type, outfile) + + ; Select colors and other plotting attributes + ; (see ./diag_scripts/shared/plot/style.ncl) + colors := project_style(info_items, diag_script_info, "colors") + dashes := project_style(info_items, diag_script_info, "dashes") + thicks := project_style(info_items, diag_script_info, "thicks") + annots := project_style(info_items, diag_script_info, "annots") + + ; Select colors and other plotting attributes for multi-model mean + if (diag_script_info@multi_model_mean) then + items_mmm = NewList("fifo") + mmm_var = True + mmm_var@dataset = "MultiModelMean" + ListPush(items_mmm, mmm_var) + colors_mmm = project_style(items_mmm, diag_script_info, "colors") + dashes_mmm = project_style(items_mmm, diag_script_info, "dashes") + thicks_mmm = project_style(items_mmm, diag_script_info, "thicks") + annots_mmm = project_style(items_mmm, diag_script_info, "annots") + end if + + ; Set/copy resources + res = True + res@xyLineColors = colors ; change line color + res@xyLineThicknesses = thicks ; make 2nd lines thicker + res@xyMonoDashPattern = False + res@xyDashPatterns = dashes + res@gsnDraw = False ; don't draw yet + res@gsnFrame = False ; don't advance frame yet + res@tiMainFontHeightF = .02 ; font height + res@tiXAxisFontHeightF = .015 + res@tiYAxisFontHeightF = .015 + res@gsnMaximize = True + res@vpHeightF = 0.4 + res@vpWidthF = 0.8 + + res@tmYROn = False + res@tmXTOn = False + res@tmYRBorderOn = False + res@tmXTBorderOn = False + + res@tmXBMode = "Explicit" + d2r = get_d2r("f") + res@tmXBValues = sin(d2r * ispan(-60, 60, 30)) + res@tmXBLabels = (/"60~F34~0~F~S", "30~F34~0~F~S", "0~F34~0~F~", \ + "30~F34~0~F~N", "60~F34~0~F~N"/) + res@tmXBMinorValues = sin(d2r * ispan(-90, 90, 10)) + res@trXMinF = -1. + res@trXMaxF = 1. + + res@trYMinF = min(model_arr) - 0.05 * (max(model_arr) - min(model_arr)) + res@trYMaxF = max(model_arr) + 0.05 * (max(model_arr) - min(model_arr)) + + res@tiMainString = "Temporal variability of near-surface air temperature" + res@tiXAxisString = "Latitude" + + if (model_arr@units .eq. "degC") then + units = "~F34~0~F~ C" + else + units = model_arr@units + end if + res@tiYAxisString = "Standard deviation of temperature (" + units + ")" + + model_arr&lat = sin(d2r * model_arr&lat) + + ; Plot + plot = gsn_csm_xy(wks, model_arr&lat, model_arr, res) + + ; Add multi model mean and stddev + if (diag_script_info@multi_model_mean) then + ; Stddev + res_stddev = True + copy_VarMeta(res, res_stddev) + res_stddev@gsnXYFillColors = "LightGrey" + delete(res_stddev@xyLineColors) + res_stddev@xyLineColor = -1 ; Make lines transparent + shading_plot = gsn_csm_xy(wks, model_arr&lat, model_arr_stddev(2:3, :), \ + res_stddev) + overlay(plot, shading_plot) + ; MMM + delete([/res@xyLineThicknesses, res@xyLineColors, res@xyDashPatterns/]) + res@xyLineThicknesses = thicks_mmm + res@xyLineColors = colors_mmm + res@xyDashPatterns = dashes_mmm + mmm = gsn_csm_xy(wks, model_arr&lat, model_arr_stddev(0, :), res) + overlay(plot, mmm) + end if + + ; *********************************************** + ; legend resources + ; *********************************************** + + if (diag_script_info@plot_legend) then + + lgres = True + lgres@lgItemType = "Lines" ; show lines only (default) + lgres@lgLabelFontHeightF = 1.0 ; set the legend label font thickness + lgres@vpWidthF = 0.15 ; width of legend (NDC) + lgres@vpHeightF = 0.7 ; height of legend (NDC) + lgres@lgPerimColor = "gray" ; draw the box perimeter in orange + lgres@lgPerimThicknessF = 1.0 ; thicken the box perimeter + + lgres@lgLineColors = colors + lgres@lgDashIndexes = dashes + lgres@lgLineThicknesses = thicks + 3 + labels = annots + nitems = dimsizes(labels) + lgres@lgItemOrder = ispan(nitems - 1, 0, 1) + + ; Create legend + lbid = gsn_create_legend(wks, nitems, labels, lgres) + + amres = True + amres@amParallelPosF = 0.65 + amres@amOrthogonalPosF = 0.0 + annoid1 = gsn_add_annotation(plot, lbid, amres) + + else + + colors := project_style(info_items1, diag_script_info, "colors") + dashes := project_style(info_items1, diag_script_info, "dashes") + annots := project_style(info_items1, diag_script_info, "annots") + colors := array_append_record(colors, (/"red"/), 0) + dashes := array_append_record(dashes, (/0/), 0) + annots := array_append_record(annots, (/"MultiModelMean"/), 0) + + lgres = True + lgres@lgItemType = "Lines" ; show lines only (default) + lgres@lgLabelFontHeightF = .06 ; set the legend label font thickness + lgres@vpWidthF = 0.2 ; width of legend (NDC) + lgres@vpHeightF = 0.15 ; height of legend (NDC) + lgres@lgPerimColor = "gray" ; draw the box perimeter in orange + lgres@lgPerimThicknessF = 1.0 ; thicken the box perimeter + lgres@lgPerimFill = 0 + lgres@lgPerimFillColor = "white" + + lgres@lgLineColors = colors + lgres@lgDashIndexes = dashes + lgres@lgLineThicknessF = 3 + + ; Create legend + lbid = gsn_create_legend(wks, dimsizes(annots), annots, lgres) + + amres = True + amres@amParallelPosF = -0.25 + amres@amOrthogonalPosF = - 0.25 + annoid1 = gsn_add_annotation(plot, lbid, amres) + + end if + + resP = True + resP@gsnMaximize = True + + gsn_panel(wks, plot, (/1, 1/), resP) + + log_info(" wrote " + outfile + "." + file_type) + + ; ------------------------------------------------------------------------ + ; write provenance to netcdf output and plot file(s) (mean) + ; ------------------------------------------------------------------------ + + authors = (/"bock_lisa"/) + statistics = (/"stddev"/) + domain = ("global") + plottype = ("line") + references = (/"eyring21ipcc"/) + prov_caption = "Temporal variability of Near-Surface Air Temperature" + infiles = metadata_att_as_array(info_items, "filename") + + log_provenance(out_path, outfile + "." + file_type, prov_caption, \ + statistics, domain, plottype, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/ipcc_ar6/zonal_westerly_winds.ncl b/esmvaltool/diag_scripts/ipcc_ar6/zonal_westerly_winds.ncl new file mode 100644 index 0000000000..7219db221e --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar6/zonal_westerly_winds.ncl @@ -0,0 +1,392 @@ +; Based on EYRING13JGR_FIG12 +; Author: Irene Cionni (ENEA, Italy), Veronika Eyring (DLR, Germany), +; Lisa Bock (DLR, Germany), Birgit Hassler (DLR, Germany) +; ############################################################################# +; Description +; Calculation of the long-term mean and trend of the zonal wind +; +; +; Optional diag_script_info attributes (diagnostic specific) +; @e13fig12_start_year: year when the climatology calculation starts +; @e13fig12_end_year: year when the climatology calculation ends +; @e13fig12_multimean: multimodel mean +; @e13fig12_season: season +; @e13fig12_exp_MMM: name of the experiments dor the MMM +; Caveats +; +; Modification history +; ############################################################################# +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + var0 = variable_info[0]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD1 = ListCount(info0) + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + exps = metadata_att_as_array(info0, "exp") + ensembles = metadata_att_as_array(info0, "ensemble") + y1 = (/ metadata_att_as_array(info0, "start_year")/) + y2 = (/ metadata_att_as_array(info0, "end_year") /) + work_dir = config_user_info@work_dir + "/" + ; Create work dir + + exps_name = where(ismissing(exps), "OBS", exps) + system("mkdir -p " + work_dir) + + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + do imod = 0, dimsizes(names) - 1 + if (names(imod) .eq. refname) then + iref = imod + end if + end do + end if + + if(isatt(variable_info, "units")) then + UNITS = variable_info@units + else + UNITS = "m s~S~-1~N~" + end if + + if(isatt(variable_info, "long_name")) then + LONG_NAME = variable_info@long_name + else + LONG_NAME = var0 + end if + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; Set default values for optional diag_script_info attributes + + set_default_att(diag_script_info, "e13fig12_multimean", False) + set_default_att(diag_script_info, "e13fig12_start_year", max(y1)) + set_default_att(diag_script_info, "e13fig12_end_year", min(y2)) + set_default_att(diag_script_info, "e13fig12_season", "ANN") + flag_mod = where(projects.ne."OBS6" .and. projects.ne."OBS" .and.\ + projects.ne."obs4mips", 1, 0) + + index_mod = ind(flag_mod.gt.0) + index_obs = ind(flag_mod.eq.0) + dim_MOD = dimsizes(index_mod) + dim_OBS = 0 + + if (.not.all(ismissing(index_obs))) then + dim_OBS = dimsizes(index_obs) + list_obs_mod = names(index_obs) + end if + + multimean = diag_script_info@e13fig12_multimean + season = diag_script_info@e13fig12_season + if (multimean) then + exp_MMM = diag_script_info@e13fig12_exp_MMM + flag_exp = new((/dimsizes(projects), dimsizes(exp_MMM)/), "integer") + do id = 0, dimsizes(exp_MMM) - 1 + flag_exp(:, id) = where(projects.eq.exp_MMM(id), 0, 1) + end do + dimTot = dim_MOD + dim_OBS + 1 + if (dimsizes(exp_MMM).gt.1) then + dimTot = dim_MOD + dim_OBS + dimsizes(exp_MMM) + end if + else + dimTot = dim_MOD + dim_OBS + end if + + ; make sure path for (mandatory) netcdf output exists + year_start = toint(diag_script_info@e13fig12_start_year) + year_end = toint(diag_script_info@e13fig12_end_year) + +end + +begin + + aux_title_info = "" + ; wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "eyring13f12_" + var0 +\ + ; "_" + plotMod) + A0 = read_data(info0[iref]) + lat0 = A0&lat + plev0 = A0&plev + delete(A0) + + tmpData = new((/dim_MOD + dim_OBS, dimsizes(plev0), dimsizes(lat0), 3/),\ + "double") + tmpData!0 = "line" + tmpData!1 = "plev" + tmpData!2 = "lat" + tmpData!3 = "stat" + + XXdata = new((/dimTot, dimsizes(plev0), dimsizes(lat0), 3/), "double") + XXdata!0 = "line" + XXdata!1 = "plev" + XXdata!2 = "lat" + XXdata!3 = "stat" + + XXref = new((/dimsizes(plev0), dimsizes(lat0), 3/), "double") + XXref!0 = "plev" + XXref!1 = "lat" + XXref!2 = "stat" + + years1 = new(dimTot, "integer") + years2 = new(dimTot, "integer") + plot = new(dimTot, "graphic") + plot_num = integertochar(ispan(97, 122, 1)) + + plotMod = new(dimTot, typeof(names)) + + ik = 0 + +; ****************************************************************** + do imod = 0, dim_MOD1 - 1 + years1(imod) = y1(imod) + years2(imod) = y2(imod) + years = tofloat(ispan(years1(imod), years2(imod), 1)) + plotMod(imod) = names(imod) + ; Extract data + A0 = read_data(info0[imod]) + if (season.eq."ANN") then + A1 = time_operations(A0, year_start, year_end, "average",\ + "annualclim", True) + A2 = time_operations(A0, -1, -1, "extract", "yearly", True) + A2trend = regCoef(years, A2(plev|:, lat|:, time|:)) + tval_A2 = A2trend@tval + b_A2 = 0.5 + df_A2 = A2trend@nptxy - 2 + prob_A2 = (/1 - betainc(df_A2 / (df_A2 + tval_A2 ^ 2),\ + df_A2 / 2.0, b_A2)/) + else + A1 = time_operations(A0, -1, -1, "average", season, True) + A2 = month_to_season(A0, season) + A2trend = regCoef(years, A2(plev|:, lat|:, time|:)) + tval_A2 = A2trend@tval + b_A2 = tval_A2 + b_A2 = (/0.5/) + df_A2 = A2trend@nptxy - 2 + prob_A2 = A2(0, :, :) + prob_A2 = onedtond((/1 - betainc(df_A2 / (df_A2 + tval_A2 ^ 2), \ + df_A2 / 2.0, b_A2)/), dimsizes(A2(0, :, :))) + delete(df_A2) + delete(tval_A2) + delete(b_A2) + end if + delete(A0) + tmpData(imod, :, :, 0) = (/A1/) + tmpData(imod, :, :, 1) = (/A2trend * 10/) + tmpData(imod, :, :, 2) = (/prob_A2/) + end do + XXdata(:dim_MOD1 - 1, :, :, :) = (/tmpData/) + + XXref = (/tmpData(iref, :, :, :)/) + + XXdata!0 = "line" + XXdata!1 = "plev" + XXdata!2 = "lat" + XXdata!3 = "stat" + XXdata&lat = lat0 + plev0@long_name = "Pressure [hPa]" + XXdata&plev = plev0 + + ; ***************************************************** + if (multimean) then + do id = 0, dimsizes(exp_MMM) - 1 + flag_exp(:, id) = where(projects.eq.exp_MMM(id), 0, 1) + index_exp = ind(flag_exp(:, id).gt.0) + XXdata(dimTot - 1, :, :, :) = \ + (/dim_avg_n(XXdata(index_exp, :, :, :), 0)/) + ; plotMod(dimTot - 1) = "MMM_" + exps_name(id) + plotMod(dimTot - 1) = projects(flag_mod(0)) + years1(dimTot - 1) = max(y1) + years2(dimTot - 1) = min(y2) + delete(index_exp) + end do + end if + + X_MMM_std = (/dim_stddev_n(XXdata(index_mod, :, :, :), 0)/) + plot_d = new(2, "graphic") + + ; *************************plotting****************************************** + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "zonal_westerly_winds") + res = True ; plot mods desired + res@gsnDraw = False + res@gsnFrame = False + + res@lbLabelBarOn = False + + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-ar6_wind_div.rgb") + res@cnFillPalette = pal + + res@tiYAxisFont = "helvetica" + res@tiMainFont = "helvetica" + res@tmXBLabelFont = "helvetica" + res@tmYLLabelFont = "helvetica" + res@lbLabelFont = "helvetica" + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = (/-5., -2., -1., -0.5, -0.2, -0.1, 0., 0.1, 0.2, 0.5, 1., \ + 2., 5./) + res@tmXBMode = "Explicit" + res@tmXBValues = (/-90., -60., -30., 0./) + res@tmXBLabels = (/"90~F34~0~F~S", "60~F34~0~F~S", "30~F34~0~F~S", \ + "0~F34~0~F~"/) + res@trXMinF = -90. + res@trXMaxF = 0. + res@tmYRMode = "Automatic" + res@tmYRLabelsOn = False + res@tmYLMode = "Explicit" + res@tmYLValues = (/1000., 500., 100., 50., 10., 5., 1./) + res@tmYLLabels = (/"1000", "500", "100", "50", "10", "5", "1"/) + res@trYMinF = 1. + res@trYMaxF = 1000. + res@cnLineLabelsOn = False ; turn off line labels + res@cnFillOn = True ; turn on color fill + res@cnLinesOn = False + res@cnInfoLabelOn = False ; turn off contour label + res@gsnSpreadColors = True ; use full range of colors + res@lbLabelAutoStride = True ; optimal labels + res@gsnLeftString = "" + + res@gsnLeftStringFontHeightF = 0.017 + res@gsnRightStringFontHeightF = 0.017 + res@tmXBMode = "Explicit" ; explicit labels + + res@gsnRightString = "" + + res@tiMainString = plotMod(iref) ; title + + plot_d(0) = gsn_csm_pres_hgt(wks, XXdata(iref, :, :, 1), res) + + res@tiMainString = plotMod(dimTot - 1) ; title + res@tiYAxisString = "" + + plot_d(1) = gsn_csm_pres_hgt(wks, XXdata(dimTot - 1, :, :, 1), res) + + res2 = True + res2@gsnDraw = False + res2@gsnFrame = False + res2@cnLevelSelectionMode = "ManualLevels" ; set manual contour levels + res2@cnMinLevelValF = -20. ; set min contour level + res2@cnMaxLevelValF = 40. ; set max contour level + res2@cnLevelSpacingF = 10. ; set contour spacing + res2@cnLineLabelsOn = True + res2@gsnContourZeroLineThicknessF = 2. ; doubles thickness of zero contour + res2@gsnContourNegLineDashPattern = 1 + res2@cnFillOn = False + res2@cnInfoLabelOn = False + res2@cnLinesOn = True + res2@cnLineColor = "Black" ; color of second contours + res2@cnLineThicknessF = 2. ; line thickness + res2@tmYRMode = "Automatic" + res2@tmXBMode = "Explicit" + res2@tmXBValues = (/-90., -60., -30., 0./) + res2@tmYLMode = "Explicit" + res2@tmYLValues = (/1000., 500., 100., 50., 10., 5., 1./) + res2@lbTitleFontHeightF = 0.01 ; default: 0.025 + res2@lbTitleOffsetF = -0.25 ; default: 0.03 (negative values move it down) + res2@lbTopMarginF = 0.1 ; default: 0.05 (negative values move it up) + res2@lbTitleJust = "BottomCenter" ; default: CenterCenter + res2@lbLabelFontHeightF = 0.025 + ; model specific ressources + res2@tiMainFontHeightF = 0.03 + + res2@tiMainString = plotMod(iref) + plotv_0 = gsn_csm_pres_hgt(wks, XXdata(iref, :, :, 0), res2) + + res2@tiMainString = plotMod(dimTot - 1) + plotv_1 = gsn_csm_pres_hgt(wks, XXdata(dimTot - 1, :, :, 0), res2) + + delete(res) + res3 = True + res3@gsnDraw = False + res3@gsnFrame = False + res3@cnLevelSelectionMode = "ManualLevels" ; set manual contour levels + res3@cnMinLevelValF = 0. ; set min contour level + res3@cnMaxLevelValF = 1. ; set max contour level + res3@cnLevelSpacingF = 0.005 ; set contour spacing + res3@cnLineLabelsOn = False + res3@cnInfoLabelOn = False + res3@cnFillOn = False + res3@cnLinesOn = False + res3@tmYRMode = "Automatic" + + plotz_0 = gsn_csm_pres_hgt(wks, XXdata(iref, :, :, 2), res3) + + plotz_1 = gsn_csm_pres_hgt(wks, XXdata(dimTot - 1, :, :, 2), res3) + + opt = True + opt@gsnShadeFillType = "pattern" ; pattern fill + opt@gsnShadeHigh = 17 + plotz_0 = gsn_contour_shade(plotz_0, 1., 0.95, opt) + plotz_1 = gsn_contour_shade(plotz_1, 1., 0.95, opt) + + overlay(plot_d(0), plotv_0) + + overlay(plot_d(1), plotv_1) + + ; Call legend-plotting routine + Pres = True + Pres@gsnFrame = False + Pres@gsnPanelBottom = 0.2 + Pres@amJust = "TopLeft" + + Pres@gsnPanelMainString = "Speed-up of zonal-mean wind (" + years1(iref) \ + + "-" + years2(iref) + ")" + Pres@gsnPanelXWhiteSpacePercent = 0.2 + + Pres@gsnPanelFigureStrings = (/"a)", "b)"/) + Pres@gsnPanelFigureStringsPerimOn = False + Pres@gsnPanelFigureStringsBackgroundFillColor = "transparent" + Pres@gsnPanelFigureStringsJust = "TopLeft" + Pres@gsnPanelFigureStringsFontHeightF = 0.016 + + Pres@gsnPanelLabelBar = True ; add common colorbar + Pres@lbLabelFontHeightF = 0.007 ; make labels smaller + Pres@lbBoxEndCapStyle = "TriangleBothEnds" + Pres@lbBoxLineColor = "gray10" + Pres@lbTitleString = "zonal wind (" + UNITS + "/decade)" + Pres@lbTitlePosition = "Bottom" + Pres@lbLabelFontHeightF = 0.012 + Pres@pmLabelBarWidthF = 0.7 + Pres@lbTitleFontHeightF = 0.016 + + gsn_panel(wks, plot_d, (/1, 2/), Pres) + + frame(wks) + + ; *******************OUTPUT FILE****************************************** + nc_filename = work_dir + "eyring13f12_" + var0 + ".nc" + Xdata = XXdata(line|:, plev|:, lat|:, stat|:) + Xdata@var = var0 + Xdata@diag_script = "eyring13f12" + Xdata&stat = (/"mean", "trend", "prob"/) + Xdata&line = plotMod + Xdata&plev = plev0 + Xdata&lat = lat0 + nc_outfile = ncdf_write(Xdata, nc_filename) + + ; Call provenance logger + plotpath = config_user_info@plot_dir + "zonal_westerly_winds" + log_provenance(nc_filename, \ + plotpath + "." + file_type, \ + "Long-term mean and trend of " + var0, \ + (/"mean", "trend"/), \ + (/"global"/), \ + "pro", \ + (/"bock_lisa", "cionni_irene", "eyring_veronika", \ + "hassler_birgit"/), \ + (/"eyring13jgr"/), \ + metadata_att_as_array(info0, "filename")) + +end diff --git a/esmvaltool/diag_scripts/kcs/global_matching.py b/esmvaltool/diag_scripts/kcs/global_matching.py new file mode 100644 index 0000000000..7737c448f9 --- /dev/null +++ b/esmvaltool/diag_scripts/kcs/global_matching.py @@ -0,0 +1,193 @@ +"""Align the target model with the CMIP ensemble.""" +import logging +from itertools import product +from pathlib import Path + +import matplotlib.pyplot as plt +import pandas as pd +import xarray as xr + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + run_diagnostic, + select_metadata, +) + +logger = logging.getLogger(Path(__file__).name) + + +def create_provenance_record(ancestor_files): + """Create a provenance record.""" + record = { + 'caption': + "Match temperature anomaly in target model to CMIP ensemble", + 'domains': ['global'], + 'authors': [ + 'kalverla_peter', + 'alidoost_sarah', + 'rol_evert', + ], + 'ancestors': ancestor_files, + } + return record + + +def mean_of_target_models(metadata): + """Get the average delta T of the target model ensemble members.""" + target_model_data = select_metadata(metadata, variable_group='tas_target') + files = [ + tmd['filename'] for tmd in target_model_data + if 'MultiModel' not in tmd['filename'] + ] + datasets = xr.open_mfdataset(files, combine='nested', concat_dim='ens') + provenance = create_provenance_record(files) + return datasets.tas.mean(dim='ens'), provenance + + +def get_cmip_dt(metadata, year, percentile): + """Compute target delta T for KNMI scenarios.""" + attribute = f'MultiModel{percentile}' + multimodelstat = select_metadata(metadata, alias=attribute)[0] + dataset = xr.open_dataset(multimodelstat['filename']) + return dataset.tas.sel(time=str(year)).values[0] + + +def get_resampling_period(target_dts, cmip_dt): + """Return 30-year time bounds of the resampling period. + + This is the period for which the target model delta T matches the + cmip delta T for a specific year. Uses a 30-year rolling window to + get the best match. + """ + target_dts = target_dts.rolling(time=30, center=True, + min_periods=30).mean() + time_idx = abs(target_dts - cmip_dt).argmin(dim='time').values + year = target_dts.isel(time=time_idx).year.values.astype(int) + target_dt = target_dts.isel(time=time_idx).values.astype(float) + return [year - 14, year + 15], target_dt + + +def _timeline(axes, yloc, interval): + """Plot an interval near the bottom of the plot.""" + xmin, xmax = interval + + # Later years should be located slightly higher: + # yloc is relative to the axes, not in data coordinates. + yloc = 0.05 + yloc / 20 + + plot_args = dict(transform=axes.get_xaxis_transform(), + linewidth=2, + color='red') + + axes.plot([xmin, xmax], [yloc] * 2, **plot_args, label='Selected periods') + axes.plot([xmin] * 2, [yloc - 0.01, yloc + 0.01], **plot_args) + axes.plot([xmax] * 2, [yloc - 0.01, yloc + 0.01], **plot_args) + + +def make_plot(metadata, scenarios, cfg, provenance): + """Make figure 3, left graph. + + Multimodel values as line, reference value in black square, steering + variables in dark dots. + """ + fig, axes = plt.subplots() + for member in select_metadata(metadata, variable_group='tas_cmip'): + filename = member['filename'] + dataset = xr.open_dataset(filename) + if 'MultiModel' not in filename: + axes.plot(dataset.time.dt.year, + dataset.tas.values, + c='grey', + alpha=0.3, + lw=.5, + label='CMIP members') + else: + # Only display stats for the future period: + dataset = dataset.sel(time=slice('2010', None, None)) + axes.plot(dataset.time.dt.year, + dataset.tas.values, + color='k', + linewidth=2, + label='CMIP ' + member['alias'][10:]) + + for member in select_metadata(metadata, variable_group='tas_target'): + filename = member['filename'] + dataset = xr.open_dataset(filename) + if 'MultiModel' not in filename: + axes.plot(dataset.time.dt.year, + dataset.tas.values, + color='blue', + linewidth=1, + label=member['dataset']) + + # Add the scenario's with dots at the cmip dt and bars for the periods + for i, scenario in enumerate(scenarios): + axes.scatter(scenario['year'], + scenario['cmip_dt'], + s=50, + zorder=10, + color='r', + label=r"Scenarios' steering $\Delta T_{CMIP}$") + _timeline(axes, i, scenario['period_bounds']) + + handles, labels = plt.gca().get_legend_handles_labels() + by_label = dict(zip(labels, handles)) # dict removes dupes + axes.legend(by_label.values(), by_label.keys()) + axes.set_xlabel('Year') + axes.set_ylabel(r'Global mean $\Delta T$ (K) w.r.t. reference period') + + # Save figure + filename = get_plot_filename('global_matching', cfg) + fig.savefig(filename, bbox_inches='tight', dpi=300) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, provenance) + + +def save(output, cfg, provenance): + """Save the output as csv file.""" + scenarios = pd.DataFrame(output) + filename = get_diagnostic_filename('scenarios', cfg, extension='csv') + scenarios.to_csv(filename) + print(scenarios.round(2)) + print(f"Output written to {filename}") + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, provenance) + + +def main(cfg): + """Return scenarios tables.""" + # A list of dictionaries describing all datasets passed on to the recipe + metadata = cfg['input_data'].values() + + # Get the average delta T of the target model + target_dts, provenance = mean_of_target_models(metadata) + + # Define the different scenario's + scenarios = [] + combinations = product(cfg['scenario_years'], cfg['scenario_percentiles']) + for year, percentile in combinations: + cmip_dt = get_cmip_dt(metadata, year, percentile) + bounds, target_dt = get_resampling_period(target_dts, cmip_dt) + + scenario = { + 'year': year, + 'percentile': percentile, + 'cmip_dt': cmip_dt, + 'period_bounds': bounds, + 'target_dt': float(target_dt), + 'pattern_scaling_factor': cmip_dt / target_dt + } + scenarios.append(scenario) + + # Save scenarios tables as csv file + save(scenarios, cfg, provenance) + + # Plot the results + make_plot(metadata, scenarios, cfg, provenance) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/kcs/local_resampling.py b/esmvaltool/diag_scripts/kcs/local_resampling.py new file mode 100644 index 0000000000..0bf6260d65 --- /dev/null +++ b/esmvaltool/diag_scripts/kcs/local_resampling.py @@ -0,0 +1,504 @@ +"""Resample the target model for the selected time periods.""" +import logging +from itertools import product +from pathlib import Path + +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import xarray as xr + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + run_diagnostic, + select_metadata, +) + +LOGGER = logging.getLogger(Path(__file__).name) + + +def _create_provenance_record(ancestor_files): + """Create a provenance record.""" + record = { + 'caption': "Resampling of local climate model.", + 'domains': ['global'], + 'authors': [ + 'kalverla_peter', + 'alidoost_sarah', + 'rol_evert', + 'daniels_emma', + ], + 'ancestors': ancestor_files, + } + return record + + +def _get_data_target_model(cfg): + """Load ensembles for each variable and merge.""" + LOGGER.info("Reading input data for target model") + dataset_dicts = cfg['input_data'].values() + dataset = [] + ancestor_files = [] + for short_name in "pr", "tas": + group = f'{short_name}_target' + var = select_metadata(dataset_dicts, variable_group=group) + files = [metadata['filename'] for metadata in var] + dataset.append( + xr.open_mfdataset(files, + concat_dim='ensemble_member', + combine='nested')) + ancestor_files.extend(files) + provenance = _create_provenance_record(ancestor_files) + return xr.merge(dataset).load(), provenance + + +def _segment(dataset, period, step=5): + """Compute season means for each n-year segment of input dataset.""" + segments = [] + for year in range(*period, step): + segments.append( + dataset.sel(time=slice(str(year), str(year + step - 1)))) + segmented_dataset = xr.concat(segments, dim='segment') + return segmented_dataset + + +def get_segment_season_means(cfg): + """Return a dict with segment season means for control and all futures. + + Read input data for the target model; extract segmented subsets for both + the control and future periods; precompute seasonal means for each segment. + + Store intermediate results; if files already exist, load them instead. + """ + dataset, provenance = _get_data_target_model(cfg) + + # Combine the future scenarios (name and resampling period) + # with the control period in a single dictionary + periods = { + name: info['resampling_period'] + for name, info in cfg['scenarios'].items() + } + periods['control'] = cfg['control_period'] + + # Get the segment season means for each of these periods + segments_season_means = {} + for name, period in periods.items(): + LOGGER.info("Get segment season means for %s", name) + filename = f"{cfg['run_dir']}/season_means_{name}.nc" + if Path(filename).exists(): + LOGGER.info("Found intermediate file %s", filename) + else: + LOGGER.info("Computing seasonal means for segmented dataset") + means = _segment(dataset, period).groupby('time.season').mean() + means.to_netcdf(filename) + LOGGER.info("Intermediate results stored as %s", filename) + segments_season_means[name] = xr.open_dataset(filename) + return segments_season_means, provenance + + +def _find_single_top1000(segment_means, target): + """Select n_combinations that are closest to the target. + + First, get all possible combinations, then select n_combinations. + Store the result under 'filename' + """ + n_segments = len(segment_means.segment) + n_members = len(segment_means.ensemble_member) + segment_indices = range(n_segments) + segment_means = segment_means.values # much faster indexing + + all_possible_combinations = product(range(n_members), repeat=n_segments) + results = [] + + for combination in all_possible_combinations: + results.append( + list(combination) + + [abs(segment_means[segment_indices, combination].mean() - target)]) + + # Create a pandas dataframe with the combinations and distance to target + dataframe = pd.DataFrame(results, + columns=list(segment_indices) + ['distance']) + top1000 = dataframe.sort_values('distance').head(1000) + return top1000 + + +def get_all_top1000s(cfg, segment_season_means): + """Return a dict with 1000 combinations for control and all futures. + + For control, these samples should have the same mean winter + precipitation as the overall mean of the x ensemble members. + + For the future periods, the target value is a relative change + with respect to the overall mean of the control period. + """ + # Create a dict of target values for control and all futures + control_segments = segment_season_means['control'].pr.sel(season='DJF') + control_mean = control_segments.mean().values + target_values = {'control': control_mean} + for name, info in cfg['scenarios'].items(): + target_values[name] = control_mean * (1 + info['dpr_winter'] / 100) + + # Find the 1000 recombinations that are closest to the target values + top1000s = {} + for name, target in target_values.items(): + LOGGER.info('Get 1000 recombinations for %s', name) + filename = f"{cfg['run_dir']}/top1000_{name}.csv" + if Path(filename).exists(): + LOGGER.info("Found intermediate file %s", filename) + else: + segments = segment_season_means[name].pr.sel(season='DJF') + top1000 = _find_single_top1000(segments, target) + top1000.to_csv(filename, index=False) + LOGGER.info("Intermediate results stored as %s.", filename) + top1000s[name] = pd.read_csv(filename) + return top1000s + + +def _index_with_xarray(combinations): + """Generate indexer for all selected combinations of segmented dataset. + + combinations: numpy 2d array with shape (n_combinations, n_segments) + + Note that numpy indexing is much faster than xarray labelled indexing, + but in this case it's nice to keep working with xarray labelled arrays. + """ + n_segments = len(combinations[0]) + + # Create a DataArray once... + indices = xr.DataArray(data=np.zeros(n_segments), + dims=['segment'], + coords={'segment': np.arange(n_segments)}) + + # ...and update its values for each selected combination + for combination in combinations: + indices.values = combination + yield indices + + +def _season_means(combinations, segment_means): + """Compute summer pr,and summer and winter tas for recombined climates.""" + interesting_variables = [] + columns = ['combination', 'pr_summer', 'tas_winter', 'tas_summer'] + for combination in _index_with_xarray(combinations): + recombined_segments = segment_means.sel(ensemble_member=combination) + season_means = recombined_segments.mean('segment') + + interesting_variables.append([ + combination.values, + season_means.pr.sel(season='JJA').values, + season_means.tas.sel(season='DJF').values, + season_means.tas.sel(season='JJA').values + ]) + return pd.DataFrame(interesting_variables, columns=columns) + + +def _within_bounds(values, bounds): + """Return true if value is within percentile bounds.""" + low, high = np.percentile(values, bounds) + return values.between(low, high) + + +def _get_subset(top1000, info, period): + """Select samples based on the percentile bounds. + + Select samples for which summer pr, and summer and winter tas are + within the percentile bounds specified in the recipe. + """ + pr_summer = _within_bounds(top1000['pr_summer'], + info[f'pr_summer_{period}']) + tas_winter = _within_bounds(top1000['tas_winter'], + info[f'tas_winter_{period}']) + tas_summer = _within_bounds(top1000['tas_summer'], + info[f'tas_summer_{period}']) + subset = top1000[np.all([pr_summer, tas_winter, tas_summer], axis=0)] + return subset + + +def get_percentile_subsets(cfg, segment_season_means, top1000s): + """Get subsets based on percentile ranges. + + For each set of 1000 samples, compute summer precipitation, and + summer and winter temperature. Then, for each scenario, select + samples for which summer precipitation, and summer and winter + temperature are within the percentile bounds specified in the + recipe. + """ + # Overwrite top1000s with seasonal mean characteristics + for name, dataframe in top1000s.items(): + LOGGER.info( + "Compute summer mean pr and summer and winter " + "mean tas for 1000 selected combinations for %s", name) + segment_means = segment_season_means[name] + combinations = dataframe.drop('distance', axis=1).values + top1000s[name] = _season_means(combinations, segment_means) + + # For each scenario, get a subset for the control and future period. + subsets = {} + for scenario, info in cfg['scenarios'].items(): + LOGGER.info("Get percentile-based subsets for scenario %s", scenario) + subsets[scenario] = { + 'control': _get_subset(top1000s['control'], info, 'control'), + 'future': _get_subset(top1000s[scenario], info, 'future') + } + return subsets + + +def _penalties(overlap): + """Determine penalties dependent on the number of overlaps.""" + return np.piecewise( + overlap, + condlist=[overlap < 3, overlap == 3, overlap == 4, overlap > 4], + funclist=[0, 1, 5, 100]) + + +def _best_subset(combinations, n_sample=8): + """Find n samples with minimal reuse of ensemble members per segment. + + combinations: a pandas series with the remaining candidates + n: the final number of samples drawn from the remaining set. + """ + # Convert series of 1d arrays to 2d array (much faster!) + combinations = np.array( + [list(combination) for combination in combinations]) + + # Store the indices in a nice dataframe + n_segments = combinations.shape[1] + best_subset = pd.DataFrame( + data=np.empty((n_sample, n_segments), dtype=np.int64), + columns=[f'Segment {x}' for x in range(n_segments)], + index=[f'Combination {x}' for x in range(n_sample)]) + + # Random number generator + rng = np.random.default_rng() + + lowest_penalty = 500 # just a random high value + for _ in range(10000): + subset = rng.choice(combinations, size=n_sample) + penalty = 0 + for segment in subset.T: + _, counts = np.unique(segment, return_counts=True) + penalty += _penalties(counts).sum() + if penalty < lowest_penalty: + lowest_penalty = penalty + best_subset.loc[:, :] = subset + + return best_subset + + +def select_final_subset(cfg, subsets, prov=None): + """Select sample with minimal reuse of ensemble segments. + + Final set of eight samples should have with minimal reuse of the + same ensemble member for the same period. From 10.000 randomly + selected sets of 8 samples, count and penalize reused segments (1 + for 3*reuse, 5 for 4*reuse). Choose the set with the lowest penalty. + """ + n_samples = cfg['n_samples'] + all_scenarios = {} + for scenario, dataframes in subsets.items(): + # Make a table with the final indices + LOGGER.info("Selecting %s final samples for scenario %s", n_samples, + scenario) + control = _best_subset(dataframes['control'].combination, n_samples) + future = _best_subset(dataframes['future'].combination, n_samples) + table = pd.concat([control, future], + axis=1, + keys=['control', 'future']) + all_scenarios[scenario] = table + + # Store the output + filename = get_diagnostic_filename(f'indices_{scenario}', + cfg, + extension='csv') + table.to_csv(filename) + LOGGER.info("Selected recombinations for scenario %s: \n %s", scenario, + table) + LOGGER.info('Output stored as %s', filename) + + # Write provenance information + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, prov) + return all_scenarios + + +def _cmip_envelope(datasetlist, variable, target_year, control_period): + """Determine the change in PDF of each CMIP model. + + Note: using mf_dataset not possible due to different calendars. + """ + cmip = select_metadata(datasetlist, variable_group=f'{variable}_cmip') + envelope = [] + ancestors = [] + for data_dict in cmip: + dataset = xr.open_dataset(data_dict['filename'])[variable] + control = dataset.sel(time=slice(str(control_period[0]), + str(control_period[1]))) + future = dataset.sel(time=slice(str(target_year - + 15), str(target_year + 15))) + + quantiles = [.05, .1, .25, .5, .75, .90, .95] + qcontrol = control.groupby('time.season').quantile(quantiles) + qfuture = future.groupby('time.season').quantile(quantiles) + + if variable == 'tas': + # absolute diff + envelope.append(qfuture - qcontrol) + else: + # pr; relative diff + envelope.append((qfuture - qcontrol) / qcontrol * 100) + ancestors.append(data_dict['filename']) + + cmip = xr.concat(envelope, dim='multimodel') + provenance = _create_provenance_record(ancestors) + + # Prevent confusion between dimension 'quantile' and method 'quantile' + return cmip.rename({'quantile': 'percentile'}), provenance + + +def _recombine(segments, combinations): + """Recombine segments according to the final combinations.""" + n_segments = len(segments.segment) + new_climates = [] + for _, indices in combinations.iterrows(): + # Create indexer array + indexer = xr.DataArray(indices, + dims=['segment'], + coords={'segment': range(n_segments)}) + + # Recombine the segments using the indexer + resample = segments.sel(ensemble_member=indexer).mean('segment', + keep_attrs=True) + new_climates.append(resample) + return xr.concat(new_climates, dim='sample') + + +def _get_climatology(cfg, scenario_name, table, prov=None): + """Determine the change in PDF of each scenario. + + Save the resampled climates of each scenario to nc files. + """ + dataset, _ = _get_data_target_model(cfg) + + future = cfg['scenarios'][scenario_name]['resampling_period'] + segments_control = _segment(dataset, cfg['control_period']) + segments_future = _segment(dataset, future) + + resampled_control = _recombine(segments_control, table['control']) + resampled_future = _recombine(segments_future, table['future']) + # Store the resampled control climates + filename = get_diagnostic_filename(f'resampled_control_{scenario_name}', + cfg, + extension='nc') + resampled_control.to_netcdf(filename) + LOGGER.info("Created control resamples for scenario %s: \n %s", + scenario_name, table) + LOGGER.info('Output stored as %s', filename) + # # Write provenance information + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, prov) + + # Store the resampled future climates + filename = get_diagnostic_filename(f'resampled_future_{scenario_name}', + cfg, + extension='nc') + resampled_future.to_netcdf(filename) + LOGGER.info("Created future resamples for scenario %s: \n %s", + scenario_name, table) + LOGGER.info('Output stored as %s', filename) + # # Write provenance information + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, prov) + + quantiles = [.05, .1, .25, .5, .75, .90, .95] + qcontrol = resampled_control.groupby('time.season').quantile( + quantiles, dim=['sample', 'time']) + qfuture = resampled_future.groupby('time.season').quantile( + quantiles, dim=['sample', 'time']) + + qchange_tas = (qfuture - qcontrol).tas + qchange_pr = ((qfuture - qcontrol) / qcontrol * 100).pr + return xr.merge([qchange_tas, qchange_pr]) + + +def get_climatologies(cfg, scenario_tables, prov=None): + """Determine the changes in PDF of all scenarios.""" + climates = {} + for name in cfg['scenarios'].keys(): + climatology = _get_climatology(cfg, name, table=scenario_tables[name], + prov=prov) + climates[name] = climatology + return climates + + +def make_plots(cfg, climates): + """Reproduce figure 5 from the paper.""" + # Note that quantile is applied twice! Once to get the pdf's of seasonal + # tas/pr and once to get the multimodel pdf of the quantile changes + metadata = cfg['input_data'].values() + + years = np.unique([scenario['scenario_year'] + for scenario in cfg['scenarios'].values()]) + for year in years: + fig, subplots = plt.subplots(2, 2, figsize=(12, 8)) + + for row, variable in zip(subplots, ['pr', 'tas']): + cmip, prov = _cmip_envelope(metadata, variable, year, + cfg['control_period']) + + for axes, season in zip(row, ['DJF', 'JJA']): + percentiles = cmip.percentile.values + xlocs = np.arange(len(percentiles)) + + # Plot the cmip envelope + seasondata = cmip.sel(season=season) + for high, low in [[0.9, 0.1], [0.75, 0.25]]: + upper = seasondata.quantile(high, dim='multimodel') + lower = seasondata.quantile(low, dim='multimodel') + axes.fill_between(xlocs, upper, lower, color='k', alpha=.3) + axes.set_title(f'{variable} / {season}') + + # Plot the recombined scenarios + for name, info in cfg['scenarios'].items(): + if year == info['scenario_year']: + climate = climates[name].sel(season=season)[variable] + axes.plot(xlocs, climate, lw=3, label=name) + + axes.set_xticks(xlocs) + axes.set_xticklabels([f'P{100*x:02.0f}' for x in percentiles]) + subplots[0, 0].set_ylabel('change (%)') + subplots[1, 0].set_ylabel('change (K)') + subplots[1, 1].legend() + filename = get_plot_filename(f'local_validation_{year}', cfg) + fig.suptitle(f'Year: {year}') + fig.savefig(filename, bbox_inches='tight', dpi=300) + LOGGER.info("Envelope figure stored as %s", filename) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, prov) + + +def main(cfg): + """Resample the model of interest.""" + # Step 0: extract segmented subsets and precompute seasonal means + segment_season_means, provenance = get_segment_season_means(cfg) + + # Step 1: get 1000 combinations + top1000s = get_all_top1000s(cfg, segment_season_means) + + # Step 2: select samples based on the the percentile bounds + subsets = get_percentile_subsets(cfg, segment_season_means, top1000s) + + # Step 3: select final set of eight samples + scenario_tables = select_final_subset(cfg, subsets, prov=provenance) + + # Step 4: create the resampled climates + climates = get_climatologies(cfg, scenario_tables, prov=provenance) + + # Step 5: plot the results + make_plots(cfg, climates) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/land_carbon_cycle/diag_global_turnover.py b/esmvaltool/diag_scripts/land_carbon_cycle/diag_global_turnover.py new file mode 100644 index 0000000000..d703c87a24 --- /dev/null +++ b/esmvaltool/diag_scripts/land_carbon_cycle/diag_global_turnover.py @@ -0,0 +1,822 @@ +"""Function to compare global distributions of turnover time.""" + +import os.path + +import cartopy.crs as ccrs +import iris +import matplotlib as mpl +import matplotlib.pyplot as plt +import numpy as np +import scipy.stats as stats +from iris import NameConstraint + +import esmvaltool.diag_scripts.land_carbon_cycle.plot_utils as plut +from esmvaltool.diag_scripts.land_carbon_cycle.provenance import ( + _get_ancestor_files, + _get_provenance_record, +) +from esmvaltool.diag_scripts.land_carbon_cycle.shared import ( + _apply_common_mask, + _load_variable, + _remove_invalid, +) +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + group_metadata, + run_diagnostic, +) + +# set the properties of the lines used for hatching +mpl.rcParams['hatch.color'] = 'yellow' +mpl.rcParams['hatch.linewidth'] = 0.7 + + +# Figure settings and colorbar info +def _get_diagonal_colorbar_info(): + """ + Get dictionary of colormap and colorbar information for diagonal maps. + + needed for plotting the maps along the diagonal, i.e., the maps of turnover + time + """ + cb_info_diagonal = {} + cb_name = 'plasma_r' + cb_info_diagonal['tickBounds'] = np.concatenate( + ([1], np.linspace(8, 16, num=10)[:-1], np.linspace(16, 32, + num=10)[:-1], + np.linspace(32, 64, num=10)[:-1], np.linspace(64, 128, num=10)[:-1], + np.linspace(128, 256, + num=10)[:-1], np.linspace(256, 1000, num=2, + endpoint=True))) + cb_info_diagonal['ticksLoc'] = np.array([1, 8, 16, 32, 64, 128, 256]) + clist_ = plut.get_colomap(cb_name, + cb_info_diagonal['tickBounds'], + lowp=0., + hip=1) + cb_info_diagonal['colMap'] = mpl.colors.ListedColormap(clist_) + return cb_info_diagonal + + +def _get_fig_config(diag_config): + """ + Get figure setting and configurations. + + default settings of the figure, and replace default with + runtime settings from recipe + + Argument: + -------- + diag_config - nested dictionary of metadata + + Return: + ------ + a dictionary of settings + """ + nmodels = len(group_metadata(diag_config['input_data'].values(), + 'dataset')) + 1 + w_pl = 1. / nmodels + h_pl = w_pl + aspect_map = 0.5 + + fig_config = { + # generic settings + 'ax_fs': 7.1, + 'fill_value': np.nan, + # settings of the figure and maps + 'x0': 0.02, + 'y0': 1.0, + 'wp': w_pl, + 'hp': h_pl, + 'xsp': 0.0, + 'ysp': -0.03, + 'aspect_map': aspect_map, + # settings for the location of scatterplots + 'xsp_sca': w_pl / 3 * aspect_map, + 'ysp_sca': h_pl / 3 * aspect_map, + # colorbar specific settings + 'hcolo': 0.0123, + 'wcolo': 0.25, + 'cb_off_y': 0.06158, + 'x_colo_d': 0.02, + 'x_colo_r': 0.76, + 'y_colo_single': 0.1086, + # the correlation method for metric + # given in the title of the scatterplot + 'correlation_method': 'spearman', + 'tx_y_corr': 1.075, + # define the range of data and masks + 'valrange_sc': (2, 256), + 'obs_global': 23, + 'gpp_threshold': 0.01 + } + # replace default values with those provided in recipe + fig_config.update(diag_config.get('fig_config')) + return fig_config + + +def _get_ratio_colorbar_info(): + """ + Get dictionary of colormap and colorbar information for off-diagonal maps. + + The maps of ratios above the diagonal. + """ + cb_info_ratio = {} + border = 0.9 + ncolo = 128 + num_gr = int(ncolo // 4) + num_col = num_gr - 4 + # get the colormap + cb_info_ratio['tickBounds'] = np.concatenate( + (np.geomspace(0.2, 0.25, + num=num_col), np.geomspace(0.25, 0.33, num=num_col), + np.geomspace(0.33, 0.5, + num=num_col), np.geomspace(0.5, border, num=num_col), + np.linspace(border, 1 / border, + num=num_gr), np.geomspace(1 / border, 2, num=num_col), + np.geomspace(2, 3, num=num_col), np.geomspace(3, 4, num=num_col), + np.geomspace(4, 5, num=num_col))) + colors1 = plt.cm.Blues(np.linspace(0.15, 0.998, (num_col) * 4))[::-1] + colorsgr = np.tile(np.array([0.8, 0.8, 0.8, 1]), + num_gr).reshape(num_gr, -1) + colors2 = plt.cm.Reds(np.linspace(0.15, 0.998, (num_col) * 4)) + + # combine them and build a new colormap + colors1g = np.vstack((colors1, colorsgr)) + colors = np.vstack((colors1g, colors2)) + cb_info_ratio['colMap'] = mpl.colors.LinearSegmentedColormap.from_list( + 'my_colormap', colors) + cb_info_ratio['ticksLoc'] = [0.2, 0.25, 0.33, 0.5, 0.9, 1.1, 2, 3, 4, 5] + cb_info_ratio['ticksLab'] = [ + '$\\dfrac{1}{5}$', '$\\dfrac{1}{4}$', '$\\dfrac{1}{3}$', + '$\\dfrac{1}{2}$', '$\\dfrac{1}{1.1}$', '$1.1$', '$2$', '$3$', '$4$', + '$5$' + ] + return cb_info_ratio + + +def _get_agreement_mask(mmdat, dat_5, dat_95, nmodel_reject=2): + """ + Get mask of multimodel agreement. + + Finds regions where fewer than one quarter of the model + simulations are outside the range of observational uncertainty. + """ + _maskf = np.zeros_like(mmdat) + _maskf[(mmdat < dat_95) & (mmdat > dat_5)] = 1 + num_count = _maskf.sum(0) + agreement_mask = np.zeros_like(num_count) + agreement_mask[num_count < nmodel_reject] = 1 + wnan = np.ma.masked_invalid(dat_5).mask + agreement_mask[wnan] = 0. + return agreement_mask + + +def _get_hex_data(dat_1, dat_2, fig_config): + """ + Get data for density plots. + + Requires that both the arrays have the same mask with regards to valid data + points + """ + dat_1[(dat_1 < fig_config['valrange_sc'][0] * 0.5)] = np.nan + dat_1[(dat_1 > fig_config['valrange_sc'][1] * 1.5)] = np.nan + dat_2[(dat_2 < fig_config['valrange_sc'][0] * 0.5)] = np.nan + dat_2[(dat_2 > fig_config['valrange_sc'][1] * 1.5)] = np.nan + dat_1, dat_2 = _apply_common_mask(dat_1, dat_2) + dat_1mc = np.ma.masked_equal(dat_1, np.nan).compressed() + dat_2mc = np.ma.masked_equal(dat_2, np.nan).compressed() + return dat_1mc, dat_2mc + + +def _get_obs_data(diag_config): + """ + Get and handle the observations of turnover time from Carvalhais 2014. + + Argument: + -------- + diag_config - nested dictionary of metadata + + Return: + ------ + dictionary with observation data with different variables as keys + """ + if not diag_config.get('obs_variable'): + raise ValueError('The observation variable needs to be specified in ' + 'the recipe (see recipe description for details)') + + obs_dir = os.path.join(diag_config['auxiliary_data_dir'], + diag_config['obs_info']['obs_data_subdir']) + + all_data = {} + all_data['global'] = {} + all_data['grid'] = {} + fig_config = _get_fig_config(diag_config) + var_list = diag_config.get('obs_variable') + + input_files = [] + for _var in var_list: + var_list = np.append(var_list, '{var}_{perc:d}'.format(var=_var, + perc=5)) + var_list = np.append(var_list, '{var}_{perc:d}'.format(var=_var, + perc=95)) + obs_filename = (f'{_var}_{{frequency}}_{{source_label}}_' + f'{{variant_label}}_{{grid_label}}.nc'.format( + **diag_config['obs_info'])) + input_files = np.append(input_files, + os.path.join(obs_dir, obs_filename)) + nvars = len(var_list) + for v_ind in range(nvars): + var_obs = var_list[v_ind] + all_data['coords'] = {} + variable_constraint = NameConstraint(var_name=var_obs) + cube = iris.load_cube(input_files, constraint=variable_constraint) + all_data['grid'][var_obs] = cube + all_data['global'][var_obs] = fig_config['obs_global'] + for coord in cube.coords(): + all_data['coords'][coord.name()] = coord.points + + all_data['input_files'] = input_files + return all_data + + +def _calc_turnover(ctotal, gpp, _model): + """ + Calculate the turnover time from ctotal and gpp. + + Argument: + -------- + ctotal- iris cube of total carbon stock + gpp - iris cube of gross primary productivity + + Return: + ------ + tau_ctotal - iris cube of turnover time in years + """ + # calculate turnover and convert units to yr + tau_ctotal = (ctotal / gpp) + tau_ctotal.convert_units('yr') + + # set the attributes + tau_ctotal.var_name = 'tau_ctotal' + tau_ctotal.standard_name = None + tau_ctotal.long_name = 'ecosystem_carbon_turnover_time' + tau_ctotal.units = 'yr' + + return tau_ctotal + + +def _fix_map(axis_obj): + """ + Beautify map object. + + Clean boundaries, coast lines, and removes the outline box/circle. + """ + axis_obj.set_global() + axis_obj.coastlines(linewidth=0.4, color='grey') + plt.gca().spines["geo"].set_visible(False) + return axis_obj + + +def _get_data_to_plot(_data): + """ + Get data to plot on map. + + Correct for the rotations of latitude and longitude. + """ + xroll = _data.shape[1] / 2 + _data = np.roll(np.flipud(_data), int(xroll), axis=1) + return _data + + +def _get_matrix_map_axes(_row_m, _col_m, _fig_config): + """ + Get the axes object for matrix maps. + + Argument: + -------- + _row_m - row location in the matrix + _col_m - column location in the matrix + _fig_config - figure settings + + Return: + ------ + _ax - an axes object + """ + if _row_m == _col_m: + _ax = plt.axes([ + _fig_config['x0'] + _row_m * _fig_config['wp'] + + _row_m * _fig_config['xsp'], _fig_config['y0'] - + (_col_m * _fig_config['hp'] + _col_m * _fig_config['ysp']), + _fig_config['wp'], _fig_config['hp'] + ], projection=ccrs.Robinson(central_longitude=0), frameon=False) + if _row_m < _col_m: + _ax = plt.axes([ + _fig_config['x0'] + _row_m * _fig_config['wp'] + + _row_m * _fig_config['xsp'] + _fig_config['xsp_sca'], + _fig_config['y0'] - + (_col_m * _fig_config['hp'] + _col_m * _fig_config['ysp']) + + _fig_config['ysp_sca'], + _fig_config['wp'] * _fig_config['aspect_map'], + _fig_config['hp'] * _fig_config['aspect_map'] + ]) + + if _row_m > _col_m: + _ax = plt.axes([ + _fig_config['x0'] + _row_m * _fig_config['wp'] + + _row_m * _fig_config['xsp'], _fig_config['y0'] - + (_col_m * _fig_config['hp'] + _col_m * _fig_config['ysp']), + _fig_config['wp'], _fig_config['hp'] + ], projection=ccrs.Robinson(central_longitude=0), frameon=False) + return _ax + + +def _fix_matrix_axes(row_m, col_m, models, nmodels, diag_config, fig_config): + """Fix the axes lines and titles in matrix maps.""" + row_mod = models[row_m] + col_mod = models[col_m] + if row_m != 0 and col_m != nmodels - 1: + plut.ax_clr() + plut.rotate_labels(which_ax='x', axfs=fig_config['ax_fs'], rot=90) + elif row_m == 0 and col_m != nmodels - 1: + plut.ax_clr_x(axfs=fig_config['ax_fs']) + plut.rotate_labels(which_ax='x', axfs=fig_config['ax_fs'], rot=90) + elif col_m == nmodels - 1 and row_m != 0: + plut.ax_clr_y(axfs=fig_config['ax_fs']) + plut.rotate_labels(which_ax='x', axfs=fig_config['ax_fs'], rot=90) + if row_m == 0 and col_m == nmodels - 1: + plut.ax_orig(axfs=fig_config['ax_fs']) + plut.rotate_labels(which_ax='x', axfs=fig_config['ax_fs'], rot=90) + plt.ylabel('$model_{column}$', fontsize=fig_config['ax_fs']) + plt.xlabel('$model_{row}$', fontsize=fig_config['ax_fs']) + if col_m == 0: + if row_mod == 'obs': + _title_sp = diag_config['obs_info']['source_label'] + else: + _title_sp = row_mod + plt.title(str(row_m + 1) + ': ' + _title_sp, + fontsize=0.809 * fig_config['ax_fs']) + if row_m == nmodels - 1: + if col_mod == 'obs': + _title_sp = diag_config['obs_info']['source_label'] + else: + _title_sp = col_mod + _title_sp = str(col_m + 1) + t_x = plt.gca().text(1.1, + 0.5, + _title_sp, + fontsize=0.809 * fig_config['ax_fs'], + va='center', + ha='center', + transform=plt.gca().transAxes) + else: + t_x = '' + + return t_x + + +def _draw_121_line(): + """Draw 1:1 line on the current axis.""" + ymin, ymax = plt.ylim() + xmin, xmax = plt.xlim() + plt.plot((xmin, xmax), (ymin, ymax), 'k', lw=0.1) + + +def _plot_matrix_map(plot_path_matrix, global_tau_mod, global_tau_obs, + diag_config): + """ + Plot the matrix of maps model-observation full factorial comparison. + + Argument: + -------- + diag_config - nested dictionary of metadata + cube - the cube to plot + dataset - name of the dataset to plot + """ + fig_config = _get_fig_config(diag_config) + models = list(global_tau_mod['grid'].keys()) + models = sorted(models, key=str.casefold) + multimodel_stats = 'MultiModelMedian MultiModelMean'.split() + for _mm in multimodel_stats: + if _mm in models: + models.append(models.pop(models.index(_mm))) + models.insert(0, 'obs') + + global_tau_mod['grid']['obs'] = global_tau_obs['grid']['tau_ctotal'] + global_tau_mod['global']['obs'] = global_tau_obs['global']['tau_ctotal'] + nmodels = len(models) + + # define the data and information for plotting ratios + cb_info_ratio = _get_ratio_colorbar_info() + + # get the colormap for diagonal maps + cb_info_diagonal = _get_diagonal_colorbar_info() + + plt.figure(figsize=(9, 6)) + for row_m in range(nmodels): + dat_row = global_tau_mod['grid'][models[row_m]].data + for col_m in range(nmodels): + dat_col = global_tau_mod['grid'][models[col_m]].data + _ax = _get_matrix_map_axes(row_m, col_m, fig_config) + # plot the maps along the diagonal + if row_m == col_m: + plt.imshow(_get_data_to_plot(dat_row), + norm=mpl.colors.BoundaryNorm( + cb_info_diagonal['tickBounds'], + len(cb_info_diagonal['tickBounds'])), + cmap=cb_info_diagonal['colMap'], + origin='upper', + transform=ccrs.PlateCarree()) + _fix_map(_ax) + + # plot the scatterplot/density plot below the diagonal + if row_m < col_m: + dat1h, dat2h = _get_hex_data(dat_col, dat_row, fig_config) + _ax.hexbin(dat1h, + dat2h, + bins='log', + mincnt=3, + gridsize=40, + cmap='viridis_r', + linewidths=0) + plt.ylim(fig_config['valrange_sc'][0], + fig_config['valrange_sc'][1] * 1.05) + plt.xlim(fig_config['valrange_sc'][0], + fig_config['valrange_sc'][1] * 1.05) + _draw_121_line() + if fig_config['correlation_method'] == 'pearson': + corr = (stats.pearsonr(dat1h, dat2h)[0])**2 + else: + corr = (stats.spearmanr(dat1h, dat2h)[0])**2 + plt.title('$R^2$={corr:.2f}'.format(corr=corr), + fontsize=fig_config['ax_fs'] * 0.953, + ma='left', + y=fig_config['tx_y_corr'], + va="top") + + # plot the maps of ratio of models and observation above the + # diagonal + if row_m > col_m: + plot_dat = _remove_invalid(dat_row / dat_col, + fill_value=fig_config['fill_value']) + _ax.imshow(_get_data_to_plot(plot_dat), + norm=mpl.colors.BoundaryNorm( + cb_info_ratio['tickBounds'], + len(cb_info_ratio['tickBounds'])), + interpolation='none', + cmap=cb_info_ratio['colMap'], + origin='upper', + transform=ccrs.PlateCarree()) + _fix_map(_ax) + t_x = _fix_matrix_axes(row_m, col_m, models, nmodels, diag_config, + fig_config) + + # plot the colorbar for maps along the diagonal + y_colo = fig_config['y0'] + fig_config['hp'] + fig_config['cb_off_y'] + _axcol_dia = [ + fig_config['x_colo_d'], y_colo, fig_config['wcolo'], + fig_config['hcolo'] + ] + cb_tit_d = '{name} ({unit})'.format( + name=global_tau_mod['grid'][models[col_m]].long_name, + unit=global_tau_mod['grid'][models[col_m]].units) + col_bar = plut.mk_colo_tau(_axcol_dia, + cb_info_diagonal['tickBounds'], + cb_info_diagonal['colMap'], + tick_locs=cb_info_diagonal['ticksLoc'], + cbfs=0.86 * fig_config['ax_fs'], + cbtitle=cb_tit_d, + cbrt=90) + + # plot the colorbar for maps above the diagonal + y_colo = fig_config['y0'] + fig_config['hp'] + fig_config['cb_off_y'] + _axcol_rat = [ + fig_config['x_colo_r'], y_colo, fig_config['wcolo'], + fig_config['hcolo'] + ] + col_bar = plut.mk_colo_cont( + _axcol_rat, + cb_info_ratio['tickBounds'], + cb_info_ratio['colMap'], + cbfs=0.7 * fig_config['ax_fs'], + cbrt=90, + col_scale='log', + cbtitle='ratio ($model_{column}$/$model_{row}$)', + tick_locs=cb_info_ratio['ticksLoc']) + col_bar.ax.set_xticklabels(cb_info_ratio['ticksLab'], + fontsize=0.86 * fig_config['ax_fs'], + ha='center', + rotation=0) + + # save and close figure + plut.save_figure(plot_path_matrix, _extr_art=[t_x]) + plt.close() + + +def _plot_multimodel_agreement(plot_path_multimodel, global_tau_mod, + global_tau_obs, diag_config): + """ + Plot map of multimodel bias and multimodel agreement. + + Argument: + -------- + global_tau_mod - dictionary of all model data + global_tau_obs - dictionary of observed data + diag_config - nested dictionary of metadata + """ + # get the settings for plotting figure + fig_config = _get_fig_config(diag_config) + + # get the observation data needed to calculate the bias and multimodel + # agreement + obs_var = diag_config.get('obs_variable')[0] + tau_obs = global_tau_obs['grid'][obs_var].data + tau_obs_5 = global_tau_obs['grid'][obs_var + '_5'].data + tau_obs_95 = global_tau_obs['grid'][obs_var + '_95'].data + + # set the information of the colormap used for plotting bias + cb_info = _get_ratio_colorbar_info() + + # calculate the bias of multimodel median turnover time + models = list(global_tau_mod['grid'].keys()) + + # remove multimodel estimates from the list of models + multimodel_stats = 'MultiModelMedian MultiModelMean'.split() + for _mm in multimodel_stats: + if _mm in models: + models.remove(_mm) + + nmodels = len(models) + dat_tau_full = np.ones((nmodels, np.shape(tau_obs)[0], + np.shape(tau_obs)[1])) * fig_config['fill_value'] + for row_m in range(nmodels): + row_mod = models[row_m] + dat_tau = global_tau_mod['grid'][row_mod] + dat_tau_full[row_m] = _remove_invalid( + dat_tau.data, fill_value=fig_config['fill_value']) + + mm_tau = _remove_invalid(np.nanmedian(dat_tau_full, axis=0), + fill_value=fig_config['fill_value']) + mm_bias_tau = mm_tau / tau_obs + mm_bias_tau = _remove_invalid(mm_bias_tau, + fill_value=fig_config['fill_value']) + + # define figure and main axis to plot the map + plt.figure(figsize=(5, 3)) + _ax = plt.axes([0.1, 0.1, 0.9, 0.9], + projection=ccrs.Robinson(central_longitude=0), + frameon=False) + + # plot the data of multimodel bias (=bias of multimodel median turnover + # time) + _ax.imshow(_get_data_to_plot(mm_bias_tau), + norm=mpl.colors.BoundaryNorm(cb_info['tickBounds'], + len(cb_info['tickBounds'])), + interpolation='none', + cmap=cb_info['colMap'], + origin='upper', + transform=ccrs.PlateCarree()) + _fix_map(_ax) + + # get the model agreement mask (less than quarter of the model within the + # observational uncertainty) + agreement_mask_tau = _get_agreement_mask(dat_tau_full, + tau_obs_5, + tau_obs_95, + nmodel_reject=int(nmodels / 4)) + + # plot the hatches for uncertainty/multimodel agreement + lats = global_tau_obs['coords']['latitude'] + lons = global_tau_obs['coords']['longitude'] + latint = abs(lats[1] - lats[0]) + lonint = abs(lons[1] - lons[0]) + x_lat, y_lon = np.meshgrid(lons - lonint / 2, lats - latint / 2) + + _ax.contourf(x_lat, + y_lon, + agreement_mask_tau, + levels=[0, 0.5, 1], + alpha=0., + hatches=['', '//////'], + linewidth=0.2, + transform=ccrs.PlateCarree()) + + title_str = ('multimodel bias and agreement (-)\n{title}'.format( + title=global_tau_obs['grid']['tau_ctotal'].long_name)) + plt.title(title_str, fontsize=0.98 * fig_config['ax_fs']) + + # plot colorbar using extraUtils + _axcol_rat = [0.254, fig_config['y_colo_single'], 0.6, 0.035] + + col_bar = plut.mk_colo_cont(_axcol_rat, + cb_info['tickBounds'], + cb_info['colMap'], + cbfs=0.8 * fig_config['ax_fs'], + cbrt=90, + col_scale='log', + cbtitle='', + tick_locs=cb_info['ticksLoc']) + col_bar.ax.set_xticklabels(cb_info['ticksLab'], + fontsize=0.9586 * fig_config['ax_fs'], + ha='center', + rotation=0) + + # save and close figure + t_x = plt.figtext(0.5, 0.5, ' ', transform=plt.gca().transAxes) + plut.save_figure(plot_path_multimodel, _extr_art=[t_x]) + plt.close() + + +def _plot_single_map(plot_path, _dat, _datglobal, _name, provenance_record, + diag_config): + """ + Plot a map for a given variable. + + Argument: + -------- + diag_config - nested dictionary of metadata + cube - the cube to plot + dataset - name of the dataset to plot + """ + # figure configuration + fig_config = _get_fig_config(diag_config) + + # colormap configuration + cb_info = _get_diagonal_colorbar_info() + + # define the figure and axis + plt.figure(figsize=(5, 3)) + _ax = plt.axes([0.1, 0.1, 0.9, 0.9], + projection=ccrs.Robinson(central_longitude=0), + frameon=False) + # plot data over the map + plt.imshow(_get_data_to_plot(_dat.data), + norm=mpl.colors.BoundaryNorm(cb_info['tickBounds'], + len(cb_info['tickBounds'])), + cmap=cb_info['colMap'], + origin='upper', + transform=ccrs.PlateCarree()) + _fix_map(_ax) + + # get the data and set the title of the map + + _dat_median = np.nanmedian( + _remove_invalid(_dat.data, fill_value=fig_config['fill_value'])) + title_str = (f'{_dat.long_name} ({_dat.units}), {_name},\n' + f'global = {_datglobal:.2f}, median = {_dat_median:.2f}') + + plt.title(title_str, fontsize=0.98 * fig_config['ax_fs']) + + # draw the colorbar + _axcol_dia = [0.254, fig_config['y_colo_single'], 0.6, 0.035] + plut.mk_colo_tau(_axcol_dia, + cb_info['tickBounds'], + cb_info['colMap'], + tick_locs=cb_info['ticksLoc'], + cbfs=0.86 * fig_config['ax_fs'], + cbtitle='', + cbrt=90) + + # save and close figure + t_x = plt.figtext(0.5, 0.5, ' ', transform=plt.gca().transAxes) + plut.save_figure(plot_path, _extr_art=[t_x]) + plt.close() + with ProvenanceLogger(diag_config) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + + +def main(diag_config): + """ + Evaluate global distribution of ecosystem carbon turnover time. + + Argument: + -------- + diag_config - nested dictionary of metadata + """ + model_data_dict = group_metadata(diag_config['input_data'].values(), + 'dataset') + + # get the data from the observation + global_tau_obs = _get_obs_data(diag_config) + base_name = ('{title}_{source_label}_' + '{grid_label}'.format( + title=global_tau_obs['grid']['tau_ctotal'].long_name, + source_label=diag_config['obs_info']['source_label'], + grid_label=diag_config['obs_info']['grid_label'])) + + global_tau_mod = {} + global_tau_mod['grid'] = {} + global_tau_mod['global'] = {} + + provenance_record_matrix = _get_provenance_record( + "Matrix Comparison of global distributions of turnover time of carbon", + ['mean', 'perc'], ['global'], + _get_ancestor_files(diag_config, 'tau_ctotal')) + + provenance_record_multimodel = _get_provenance_record( + "Multimodel bias and agreements of global distributions of turnover" + "time of carbon. Reproduces figure 3 in Carvalhais et al. (2014).", + ['mean', 'perc'], ['global'], + _get_ancestor_files(diag_config, 'tau_ctotal')) + + for model_name, model_dataset in model_data_dict.items(): + global_tau_mod[model_name] = {} + + # load the data + ctotal = _load_variable(model_dataset, 'ctotal') + gpp = _load_variable(model_dataset, 'gpp') + tau_ctotal = _calc_turnover(ctotal, gpp, model_name) + global_tau_mod['grid'][model_name] = tau_ctotal + + # apply the GPP threshold and set the data in dictionary + gpp_global = gpp.collapsed(['latitude', 'longitude'], + iris.analysis.SUM) + ctotal_global = ctotal.collapsed(['latitude', 'longitude'], + iris.analysis.SUM) + tau_global = ctotal_global / gpp_global + tau_global.convert_units('yr') + + # since dask=2023.3 there is an issue with converting the core_data() + # to float; I have not managed to pinpoint the issue neither in dask + # nor in iris, since minimal test cases are not reproducing it + # this is a scalar cube so no big mem issue by realizing the data + # global_tau_mod['global'][model_name] = float(tau_global.core_data()) + global_tau_mod['global'][model_name] = float(tau_global.data) + + base_name_mod = ( + 'global_{title}_{source_label}_' + '{grid_label}'.format( + title=global_tau_obs['grid']['tau_ctotal'].long_name, + source_label=model_name, + grid_label=diag_config['obs_info']['grid_label'])) + plot_path_mod = get_plot_filename(base_name_mod, diag_config) + # plot_path_list.append(plot_path_mod) + provenance_record_mod = _get_provenance_record( + "Map of global distribution of turnover time of carbon", + ['mean', 'perc'], + ['global'], + {model_name: model_dataset}) + _plot_single_map(plot_path_mod, tau_ctotal, + global_tau_mod['global'][model_name], + model_name, + provenance_record_mod, + diag_config) + + model_cubes = [ + c for c in global_tau_mod['grid'].values() + if isinstance(c, iris.cube.Cube) + ] + obs_cubes = [ + c for c in global_tau_obs['grid'].values() + if isinstance(c, iris.cube.Cube) + ] + netcdf_path = get_diagnostic_filename(base_name_mod, diag_config) + save_cubes = iris.cube.CubeList(model_cubes + obs_cubes) + iris.save(save_cubes, netcdf_path) + + with ProvenanceLogger(diag_config) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record_mod) + + # multimodel agreement + base_name_multimodel = '{prefix}_{base_name}'.format( + prefix='global_multimodelAgreement', base_name=base_name) + plot_path_multimodel = get_plot_filename(base_name_multimodel, + diag_config) + _plot_multimodel_agreement(plot_path_multimodel, global_tau_mod, + global_tau_obs, config) + with ProvenanceLogger(diag_config) as provenance_logger: + provenance_logger.log(plot_path_multimodel, + provenance_record_multimodel) + + # map of observation + base_name_obs = '{prefix}_{base_name}'.format(prefix='global', + base_name=base_name) + plot_path_obs = get_plot_filename(base_name_obs, diag_config) + provenance_record_obs = _get_provenance_record( + "Map of observed global distribution of turnover time of carbon", + ['mean', 'perc'], + ['global'], + global_tau_obs['input_files'].tolist()) + + _plot_single_map(plot_path_obs, + global_tau_obs['grid']['tau_ctotal'], + global_tau_obs['global']['tau_ctotal'], + config['obs_info']['source_label'], + provenance_record_obs, + diag_config) + + # matrix of maps + base_name_matrix = '{prefix}_{base_name}'.format( + prefix='global_matrix_map', base_name=base_name) + plot_path_matrix = get_plot_filename(base_name_matrix, diag_config) + _plot_matrix_map(plot_path_matrix, global_tau_mod, global_tau_obs, + config) + + with ProvenanceLogger(diag_config) as provenance_logger: + provenance_logger.log(plot_path_matrix, provenance_record_matrix) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/land_carbon_cycle/diag_zonal_correlation.py b/esmvaltool/diag_scripts/land_carbon_cycle/diag_zonal_correlation.py new file mode 100644 index 0000000000..e4d6a2b6eb --- /dev/null +++ b/esmvaltool/diag_scripts/land_carbon_cycle/diag_zonal_correlation.py @@ -0,0 +1,449 @@ +""" +Evaluate the correlation between turnover time of carbon and climate. + +Use partial correlations between turnover time with precipitation and +temperature +""" + +import sys +import iris +import matplotlib.pyplot as plt +import numpy as np +import scipy.stats as stats + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + group_metadata, + run_diagnostic, +) + +import esmvaltool.diag_scripts.land_carbon_cycle.plot_utils as plut +from esmvaltool.diag_scripts.land_carbon_cycle.shared import ( + _apply_common_mask, + _get_obs_data_zonal, + _load_variable, + _remove_invalid, +) +from esmvaltool.diag_scripts.land_carbon_cycle.provenance import ( + _get_ancestor_files, + _get_provenance_record, +) + + +def _get_fig_config(diag_config): + """ + Get figure setting and configurations. + + default settings of the figure, and replace default with + runtime settings from recipe + + Argument: + -------- + diag_config - nested dictionary of metadata + + Return: + ------ + a dictionary of settings + """ + fig_config = { + 'fill_value': np.nan, + 'correlation_method': 'pearson', + 'min_points_frac': 0.125, + # define the data and information for plotting ratios + 'ax_fs': 7.1, + 'valrange_x': (-1, 1), + 'valrange_y': (-70, 90), + 'bandsize': 9.5, + 'gpp_threshold': 0.01 + } + fig_config.update(diag_config.get('fig_config')) + return fig_config + + +def partial_corr(dat_columns, fig_config): + """ + Calculate the linear partial correlation. + + The correlation between variables in the first and second column of + dat_columns is controlled for the covariation with that in the third + column. + + Argument: + -------- + dat_columns - an array with different variables in different columns + fig_config - configuration with correlation_method. Uses the scipy + stats module to calculate correlation using either pearsons linear + (http://tiny.cc/pearsonr) or spearmans rank (http://tiny.cc/spearmanr) + correlation coefficients. + + Return: + ------ + r123 - correlation between variables 1 and 2 controlled for 3 + """ + dat_x = dat_columns[:, 0] + dat_y = dat_columns[:, 1] + dat_z = dat_columns[:, 2] + if fig_config['correlation_method'] == 'pearson': + r12 = stats.pearsonr(dat_x, dat_y)[0] + r13 = stats.pearsonr(dat_x, dat_z)[0] + r23 = stats.pearsonr(dat_y, dat_z)[0] + elif fig_config['correlation_method'] == 'spearman': + r12 = stats.spearmanr(dat_x, dat_y)[0] + r13 = stats.spearmanr(dat_x, dat_z)[0] + r23 = stats.spearmanr(dat_y, dat_z)[0] + else: + sys.exit('set a valid correlation_method [pearson/spearman]') + # calculate the partial correlation coefficient as, + # rxy,z = (rxy - rxz * ryz) / sqrt((1 - rxz^2) * (1 - ryz^2)) + # https://en.wikipedia.org/wiki/Partial_correlation + r123 = (r12 - r13 * r23) / np.sqrt((1 - r13**2) * (1 - r23**2)) + return r123 + + +def _calc_zonal_correlation(dat_tau, dat_pr, dat_tas, dat_lats, fig_config): + """ + Calculate zonal partial correlations for sliding windows. + + Argument: + -------- + dat_tau - data of global tau + dat_pr - precipitation + dat_tas - air temperature + dat_lats - latitude of the given model + fig_config - figure/diagnostic configurations + + Return: + ------ + corr_dat zonal correlations + """ + # get the interval of latitude and create array for partial correlation + lat_int = abs(dat_lats[1] - dat_lats[0]) + corr_dat = np.ones((np.shape(dat_tau)[0], 2)) * np.nan + + # get the size of the sliding window based on the bandsize in degrees + window_size = round(fig_config['bandsize'] / (lat_int * 2.)) + + dat_tau, dat_pr, dat_tas = _apply_common_mask(dat_tau, dat_pr, dat_tas) + # minimum 1/8 of the given window has valid data points + min_points = np.shape(dat_tau)[1] * fig_config['min_points_frac'] + for lat_index in range(len(corr_dat)): + istart = int(max(0, lat_index - window_size)) + iend = int(min(np.size(dat_lats), lat_index + window_size + 1)) + dat_tau_zone = dat_tau[istart:iend, :] + dat_pr_zone = dat_pr[istart:iend, :] + dat_tas_zone = dat_tas[istart:iend, :] + dat_x = np.ma.masked_invalid(dat_tau_zone).compressed().flatten() + dat_y = np.ma.masked_invalid(dat_pr_zone).compressed().flatten() + dat_z = np.ma.masked_invalid(dat_tas_zone).compressed().flatten() + num_valid_points = sum(~np.isnan(dat_x + dat_y + dat_z)) + if num_valid_points > min_points: + corr_dat[lat_index, 1] = partial_corr( + np.vstack((dat_x, dat_y, dat_z)).T, fig_config) + corr_dat[lat_index, 0] = partial_corr( + np.vstack((dat_x, dat_z, dat_y)).T, fig_config) + return corr_dat + + +def _get_multimodel_stats(r_multimodel): + """ + Compute mean, low and high correlations of all models. + + Uses the fisher's z transformation + + Argument: + -------- + r_multimodel - zonal correlation from the models in the column + dimensions + + Return: + ------ + mean, mean - std, and mean + std correlations + """ + # set the threshold of correlation to avoid infinities + r_multimodel[r_multimodel > 0.99] = 0.99 + r_multimodel[r_multimodel < -0.99] = -0.99 + + # z tranform the correlation + z_multimodel = 0.5 * (np.log(1 + r_multimodel) - np.log(1 - r_multimodel)) + z_multimodel[np.isinf(z_multimodel)] = np.nan + zmm_ens = np.nanmean(z_multimodel, axis=1) + zmm_ens_std = np.nanstd(z_multimodel, axis=1) + + # get the mean correlation using inverse of fisher's z transformation + r_mean = (np.exp(2 * zmm_ens) - 1) / (np.exp(2 * zmm_ens) + 1) + + # get the lower bound of correlation using inverse of fisher's z + # transformation + z_low = zmm_ens - zmm_ens_std + r_low = (np.exp(2 * z_low) - 1) / (np.exp(2 * z_low) + 1) + + # get the upper bound of correlation using inverse of fisher's z + # transformation + z_high = zmm_ens + zmm_ens_std + r_hi = (np.exp(2 * z_high) - 1) / (np.exp(2 * z_high) + 1) + return r_mean, r_low, r_hi + + +def _fix_axis(x_lab, fig_config, axlw=0.4, rem_list=('top', 'right')): + """ + Fix the axis limits, labels and lines. + + Argument: + -------- + x_lab - axis labels + fig_config - figure configurations + ax_fs - fontsize for axis and tick labels + ax_lw - linewidth of axis lines + rem_list - list of axis lines to remove + """ + plt.xlim(fig_config['valrange_x'][0], fig_config['valrange_x'][1]) + plt.ylim(fig_config['valrange_y'][0], fig_config['valrange_y'][1]) + plt.axhline(y=0, lw=0.48, color='grey') + plt.axvline(x=0, lw=0.48, color='grey') + plt.xlabel(x_lab, fontsize=fig_config['ax_fs']) + _ax = plt.gca() + for loc, spine in _ax.spines.items(): + if loc in rem_list: + spine.set_position(('outward', 0)) + spine.set_linewidth(0.) + else: + spine.set_linewidth(axlw) + + +def _plot_zonal_correlation(plot_path, zonal_correlation_mod, + zonal_correlation_obs, diag_config): + """ + Make the line plots of zonal correlations from all models. + + Argument: + -------- + diag_config - nested dictionary of metadata + zonal_correlation_mod - dictionary of correlations from all models + zonal_correlation_obs - dictionary of correlations and ranges from + observation + """ + fig_config = _get_fig_config(diag_config) + models = list(zonal_correlation_mod.keys()) + nmodels = len(models) + models = sorted(models, key=str.casefold) + multimodel_stats = 'MultiModelMedian MultiModelMean'.split() + for _mm in multimodel_stats: + if _mm in models: + models.append(models.pop(models.index(_mm))) + + plt.figure(figsize=(5, 4)) + # tau-tas correlations + sp1 = plt.subplot(1, 2, 1) + + # get the observations out of the dictionary + lats_obs = zonal_correlation_obs['latitude'] + obs_var = diag_config.get('obs_variable')[0] + r_tau_ctotal_tas = zonal_correlation_obs[obs_var] + r_tau_ctotal_tas_5 = zonal_correlation_obs[obs_var + '_5'] + r_tau_ctotal_tas_95 = zonal_correlation_obs[obs_var + '_95'] + # plot the correlations from observation + + _fix_axis(obs_var, fig_config) + plt.ylabel('{name}\n({unit})'.format(name=lats_obs.long_name, + unit=lats_obs.units), + fontsize=fig_config['ax_fs'], + ma='center') + + sp1.plot(r_tau_ctotal_tas.data, + lats_obs.points, + color='k', + lw=1.1, + label=diag_config['obs_info']['source_label']) + sp1.fill_betweenx(lats_obs.points, + r_tau_ctotal_tas_5.data, + r_tau_ctotal_tas_95.data, + facecolor='grey', + alpha=0.40) + + # tau-pr correlations + sp2 = plt.subplot(1, 2, 2) + + # get the observations out of the dictionary + obs_var = diag_config.get('obs_variable')[1] + r_tau_ctotal_pr = zonal_correlation_obs[obs_var] + r_tau_ctotal_pr_5 = zonal_correlation_obs[obs_var + '_5'] + r_tau_ctotal_pr_95 = zonal_correlation_obs[obs_var + '_95'] + _fix_axis(obs_var, fig_config) + + # plot the correlations from observation + sp2.plot(r_tau_ctotal_pr.data, + lats_obs.points, + color='k', + lw=1.1, + label=diag_config['obs_info']['source_label']) + sp2.fill_betweenx(lats_obs.points, + r_tau_ctotal_pr_5.data, + r_tau_ctotal_pr_95.data, + facecolor='grey', + alpha=0.40) + + # PLOTTING for models + + # loop over models and plot zonal correlations + for row_m in range(nmodels): + row_mod = models[row_m] + r_mod = zonal_correlation_mod[row_mod]['data'] + lats_mod = zonal_correlation_mod[row_mod]['latitude'] + r_tau_tas_c_pr_mod = r_mod[:, 0] + r_tau_pr_c_tas_mod = r_mod[:, 1] + if row_mod in ['MultiModelMedian', 'MultiModelMean']: + sp1.plot(np.ma.masked_equal(r_tau_tas_c_pr_mod, np.nan), + lats_mod.points, + lw=1.1, + color='blue', + label=row_mod) + sp2.plot(np.ma.masked_equal(r_tau_pr_c_tas_mod, np.nan), + lats_mod.points, + lw=1.1, + color='blue', + label=row_mod) + else: + sp1.plot(np.ma.masked_equal(r_tau_tas_c_pr_mod, np.nan), + lats_mod.points, + lw=0.3, + label=row_mod) + sp2.plot(np.ma.masked_equal(r_tau_pr_c_tas_mod, np.nan), + lats_mod.points, + lw=0.3, + label=row_mod) + + # normalized mean correlations from model + + # remove the multimodel estimates + models = list(zonal_correlation_mod.keys()) + for _mm in multimodel_stats: + if _mm in models: + models.remove(_mm) + + nmodels = len(models) + + r_tau_pr_c_tas_all = np.ones((len(lats_obs.points), nmodels)) * np.nan + r_tau_tas_c_pr_all = np.ones((len(lats_obs.points), nmodels)) * np.nan + for row_m in range(nmodels): + row_mod = models[row_m] + r_mod = zonal_correlation_mod[row_mod]['data'] + lats_mod = zonal_correlation_mod[row_mod]['latitude'] + r_tau_tas_c_pr_all[:, row_m] = r_mod[:, 0] + r_tau_pr_c_tas_all[:, row_m] = r_mod[:, 1] + + r_mmod, r_mmod_std_low, r_mmod_std_hi = _get_multimodel_stats( + r_tau_tas_c_pr_all) + sp1.plot(np.ma.masked_equal(r_mmod, np.nan), + lats_mod.points, + color='red', + ls='--', + lw=1, + label='Norm. Mean r') + sp1.fill_betweenx(lats_mod.points, + np.ma.masked_equal(r_mmod_std_low, np.nan), + np.ma.masked_equal(r_mmod_std_hi, np.nan), + facecolor='#42d4f4', + alpha=0.25) + + r_mmod, r_mmod_std_low, r_mmod_std_hi = _get_multimodel_stats( + r_tau_pr_c_tas_all) + + sp2.plot(np.ma.masked_equal(r_mmod, np.nan), + lats_mod.points, + color='red', + ls='--', + lw=1, + label='Norm. Mean r') + sp2.fill_betweenx(lats_mod.points, + np.ma.masked_equal(r_mmod_std_low, np.nan), + np.ma.masked_equal(r_mmod_std_hi, np.nan), + facecolor='#42d4f4', + alpha=0.25) + + plt.gca().yaxis.set_label_position("right") + + # draw the legend + leg = plut.draw_line_legend(ax_fs=fig_config['ax_fs']) + + plut.save_figure(plot_path, _extr_art=[leg]) + plt.close() + + +def main(diag_config): + """ + Diagnostic to evaluate zonal correlation between turnover time and climate. + + Argument: + -------- + diag_config - nested dictionary of metadata + """ + model_data_dict = group_metadata(diag_config['input_data'].values(), + 'dataset') + fig_config = _get_fig_config(diag_config) + zonal_correlation_mod = {} + for model_name, model_dataset in model_data_dict.items(): + zonal_correlation_mod[model_name] = {} + mod_coords = {} + ctotal = _load_variable(model_dataset, 'ctotal') + gpp = _load_variable(model_dataset, 'gpp') + precip = _load_variable(model_dataset, 'pr') + tas = _load_variable(model_dataset, 'tas') + tau_ctotal = (ctotal / gpp) + tau_ctotal.convert_units('yr') + # set the attributes + tau_ctotal.var_name = 'tau_ctotal' + for coord in gpp.coords(): + mod_coords[coord.name()] = coord + + _tau_dat = _remove_invalid(tau_ctotal.data, fill_value=np.nan) + _precip_dat = _remove_invalid(precip.data, fill_value=np.nan) + _tas_dat = _remove_invalid(tas.data, fill_value=np.nan) + zon_corr = _calc_zonal_correlation(_tau_dat, _precip_dat, _tas_dat, + mod_coords['latitude'].points, + fig_config) + zonal_correlation_mod[model_name]['data'] = zon_corr + zonal_correlation_mod[model_name]['latitude'] = mod_coords['latitude'] + zonal_correlation_obs = _get_obs_data_zonal(diag_config) + + base_name = '{title}_{corr}_{source_label}_{grid_label}z'.format( + title='r_tau_ctotal_climate', + corr=fig_config['correlation_method'], + source_label=diag_config['obs_info']['source_label'], + grid_label=diag_config['obs_info']['grid_label']) + + provenance_record = _get_provenance_record( + "Comparison of latitudinal (zonal) variations of pearson" + " correlation between turnover time and climate: turnover" + " time and precipitation, controlled for temperature" + " (left) and vice-versa (right). Reproduces figures 2c" + " and 2d in Carvalhais et al. (2014).", ['corr', 'perc'], ['zonal'], + _get_ancestor_files(diag_config, 'tau_ctotal')) + + model_cubes = [ + c for c in zonal_correlation_mod.values() + if isinstance(c, iris.cube.Cube) + ] + obs_cubes = [ + c for c in zonal_correlation_obs.values() + if isinstance(c, iris.cube.Cube) + ] + netcdf_path = get_diagnostic_filename(base_name, diag_config) + save_cubes = iris.cube.CubeList(model_cubes + obs_cubes) + iris.save(save_cubes, netcdf_path) + + with ProvenanceLogger(diag_config) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + + plot_path = get_plot_filename(base_name, diag_config) + _plot_zonal_correlation(plot_path, zonal_correlation_mod, + zonal_correlation_obs, diag_config) + + with ProvenanceLogger(diag_config) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/land_carbon_cycle/diag_zonal_turnover.py b/esmvaltool/diag_scripts/land_carbon_cycle/diag_zonal_turnover.py new file mode 100644 index 0000000000..9f93115754 --- /dev/null +++ b/esmvaltool/diag_scripts/land_carbon_cycle/diag_zonal_turnover.py @@ -0,0 +1,223 @@ +""" +evaluates the zonal distribution of turnover time. + +compare the model simulations with observation from +Carvalhais et al., 2014 +""" + +import iris +import matplotlib.pyplot as plt +import numpy as np + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + group_metadata, + run_diagnostic, +) + +import esmvaltool.diag_scripts.land_carbon_cycle.plot_utils as plut +from esmvaltool.diag_scripts.land_carbon_cycle.shared import ( + _get_obs_data_zonal, + _load_variable, +) +from esmvaltool.diag_scripts.land_carbon_cycle.provenance import ( + _get_ancestor_files, + _get_provenance_record, +) + + +def _get_fig_config(diag_config): + """ + Get figure setting and configurations. + + default settings of the figure, and replace default with + runtime settings from recipe + + Argument: + -------- + diag_config - nested dictionary of metadata + + Return: + ------ + a dictionary of settings + """ + fig_config = { + 'fill_value': np.nan, + 'ax_fs': 7.1, + 'valrange_x': (2, 1000), + 'valrange_y': (-70, 90), + 'bandsize': 2.5, + 'gpp_threshold': 0.01 + } + fig_config.update(diag_config.get('fig_config')) + return fig_config + + +def _calc_zonal_tau(gpp, ctotal, fig_config): + """ + Calculate zonal turnover time. + + Argument: + -------- + gpp - cube of global gpp + ctotal - cube of total carbon content + fig_config - figure/diagnostic configurations + + Return: + ------ + zonal_tau - zonal turnover time of carbon + """ + gpp_zs = gpp.collapsed('longitude', iris.analysis.SUM) + ctotal_zs = ctotal.collapsed('longitude', iris.analysis.SUM) + + # get the size of the sliding window based on the bandsize in degrees + if fig_config['bandsize'] is not None: + # get the interval of latitude and create array for partial correlation + dat_lats = gpp.coord('latitude').points + lat_int = abs(dat_lats[1] - dat_lats[0]) + window_size = int( + max(2, np.round(fig_config['bandsize'] / lat_int)) + ) + gpp_z = gpp_zs.rolling_window('latitude', iris.analysis.SUM, + window_size) + ctotal_z = ctotal_zs.rolling_window('latitude', iris.analysis.SUM, + window_size) + else: + gpp_z = gpp_zs + ctotal_z = ctotal_zs + + zonal_tau = ctotal_z / gpp_z + zonal_tau.convert_units('yr') + + return zonal_tau + + +def _plot_zonal_tau(plot_path, all_mod_dat, all_obs_dat, diag_config): + """ + Plot the zonal distribution of turnover time. + + Argument: + -------- + diag_config - nested dictionary of metadata + cube - the cube to plot + dataset - name of the dataset to plot + """ + fig_config = _get_fig_config(diag_config) + models = list(all_mod_dat.keys()) + models = sorted(models, key=str.casefold) + for _mm in ['MultiModelMedian', 'MultiModelMean']: + if _mm in models: + models.append(models.pop(models.index(_mm))) + nmodels = len(models) + + lats_obs = all_obs_dat['latitude'] + obs_var = diag_config.get('obs_variable')[0] + tau_obs = all_obs_dat[obs_var] + tau_obs_5 = all_obs_dat[obs_var + '_5'] + tau_obs_95 = all_obs_dat[obs_var + '_95'] + + plt.figure(figsize=(3, 5)) + + sp0 = plt.subplot(1, 1, 1) + sp0.plot(tau_obs.data, + lats_obs.points, + color='k', + lw=1.5, + label=diag_config['obs_info']['source_label']) + sp0.fill_betweenx(lats_obs.points, + tau_obs_5.data, + tau_obs_95.data, + facecolor='grey', + alpha=0.40) + + for row_m in range(nmodels): + row_mod = models[row_m] + dat_mod_tau = all_mod_dat[row_mod] + if row_mod in ['MultiModelMedian', 'MultiModelMean']: + sp0.plot(dat_mod_tau.data, + dat_mod_tau.coord('latitude').points, + lw=1.5, + color='blue', + label=row_mod) + else: + sp0.plot(dat_mod_tau.data, + dat_mod_tau.coord('latitude').points, + lw=0.5, + label=row_mod) + + leg = plut.draw_line_legend(ax_fs=fig_config['ax_fs']) + + plt.gca().set_xscale('log') + plt.xlim(fig_config['valrange_x'][0], fig_config['valrange_x'][1]) + plt.ylim(fig_config['valrange_y'][0], fig_config['valrange_y'][1]) + plt.axhline(y=0, lw=0.48, color='grey') + plt.xlabel(f'{tau_obs.long_name} ({tau_obs.units})', + fontsize=fig_config['ax_fs'], + ma='center') + plt.ylabel(f'{lats_obs.long_name} ({lats_obs.units})', + fontsize=fig_config['ax_fs'], + ma='center') + plut.rem_ax_line(['top', 'right']) + + plut.save_figure(plot_path, _extr_art=[leg]) + plt.close() + + +def main(diag_config): + """ + Diagnostic function to compare the zonal turnover time. + + Argument: + -------- + diag_config - nested dictionary of metadata + """ + model_data_dict = group_metadata(diag_config['input_data'].values(), + 'dataset') + + fig_config = _get_fig_config(diag_config) + zonal_tau_mod = {} + for model_name, model_dataset in model_data_dict.items(): + zonal_tau_mod[model_name] = {} + ctotal = _load_variable(model_dataset, 'ctotal') + gpp = _load_variable(model_dataset, 'gpp') + zonal_tau_mod[model_name] = _calc_zonal_tau(gpp, ctotal, fig_config) + + zonal_tau_obs = _get_obs_data_zonal(diag_config) + + obs_var = diag_config.get('obs_variable')[0] + tau_obs = zonal_tau_obs[obs_var] + base_name = '{title}_{source_label}_{grid_label}z'.format( + title=tau_obs.long_name, + source_label=diag_config['obs_info']['source_label'], + grid_label=diag_config['obs_info']['grid_label']) + + provenance_record = _get_provenance_record( + "Comparison of latitudinal (zonal) variations of observation-based and" + " modelled ecosystem carbon turnover time. The zonal turnover time is" + " calculated as the ratio of zonal `ctotal` and `gpp`. Reproduces " + " figure 2a and 2b in Carvalhais et al. (2014).", ['mean', 'perc'], + ['zonal'], _get_ancestor_files(diag_config, obs_var)) + + model_cubes = [ + c for c in zonal_tau_mod.values() if isinstance(c, iris.cube.Cube) + ] + obs_cubes = [ + c for c in zonal_tau_obs.values() if isinstance(c, iris.cube.Cube) + ] + netcdf_path = get_diagnostic_filename(base_name, diag_config) + save_cubes = iris.cube.CubeList(model_cubes + obs_cubes) + iris.save(save_cubes, netcdf_path) + with ProvenanceLogger(diag_config) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + + plot_path = get_plot_filename(base_name, diag_config) + _plot_zonal_tau(plot_path, zonal_tau_mod, zonal_tau_obs, diag_config) + with ProvenanceLogger(diag_config) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/land_carbon_cycle/plot_utils.py b/esmvaltool/diag_scripts/land_carbon_cycle/plot_utils.py new file mode 100644 index 0000000000..cafdb91034 --- /dev/null +++ b/esmvaltool/diag_scripts/land_carbon_cycle/plot_utils.py @@ -0,0 +1,291 @@ +"""Provide additional plotting utilities.""" + +import numpy as np +from matplotlib import pyplot as plt +import matplotlib as mpl + + +def ax_clr(): + """Remove all the axis lines.""" + rem_ax_line(rem_list=['top', 'right', 'left', 'bottom']) + rem_ticks(which_ax='both') + + +def ax_clr_x(axfs=7, axlw=0.3, nticks=3): + """Remove all the axis lines except the left one.""" + rem_ax_line(rem_list=['top', 'right', 'bottom']) + rem_ticks(which_ax='x') + plt.gca().tick_params(axis='y', labelsize=axfs) + put_ticks(which_ax='y', axlw=axlw, nticks=nticks) + + +def ax_clr_y(axfs=7, axlw=0.3, nticks=3): + """Remove all the axis lines except the bottom one.""" + rem_ax_line(rem_list=['top', 'right', 'left']) + rem_ticks(which_ax='y') + put_ticks(which_ax='x', axlw=axlw, nticks=nticks) + plt.gca().tick_params(axis='x', labelsize=axfs) + + +def ax_clr_xy(axfs=7, axlw=0.3, nticks=3): + """Remove the top and right axis.""" + rem_ax_line(rem_list=['top', 'right']) + put_ticks(which_ax='y', axlw=axlw, nticks=nticks) + plt.gca().tick_params(axis='both', labelsize=axfs) + + +def ax_orig(axfs=7, axlw=0.3, nticks=3): + """ + Remove the top and right axis line. + + Set the axis linewidth to axlw. + """ + rem_ax_line(rem_list=('top', 'right'), axlw=axlw) + put_ticks(which_ax='both', axlw=axlw, nticks=nticks) + plt.gca().tick_params(axis='both', labelsize=axfs) + + +def draw_line_legend(ax_fs=8): + """ + Draws a legend for line plots. + + It puts it outside the plot area in + x-direction + """ + leg = plt.legend(loc=(1.00974, .06), + fontsize=ax_fs, + ncol=1, + columnspacing=0.05, + fancybox=True, + handlelength=1.5) + leg.get_frame().set_linewidth(0) + leg.get_frame().set_facecolor('#eeeeee') + leg.legendPatch.set_alpha(0.45) + texts = leg.get_texts() + plt.setp(texts, fontsize=ax_fs * 0.9) + return leg + + +def get_colomap(cmap_nm, bounds__, lowp=0.05, hip=0.95): + """ + Get the list of colors from any official colormaps in matplotlib. + + It returns the number of colors based on the number of items in the bounds. + Bounds is a list of boundary for each color. + """ + cmap__ = mpl.cm.get_cmap(cmap_nm) + clist_v = np.linspace(lowp, hip, len(bounds__) - 1) + rgba_ = [cmap__(_cv) for _cv in clist_v] + return rgba_ + + +def mk_colo_tau(axcol_, + bounds__, + cm2, + cblw=0.1, + cbrt=0, + cbfs=9, + cbtitle='', + tick_locs=(), + ex_tend='both', + cb_or='horizontal', + spacing='uniform'): + """ + Plot the colorbar to the axis given by axcol_. + + Uses arrows on two sides. + """ + axco1 = plt.axes(axcol_) + col_bar = mpl.colorbar.ColorbarBase(axco1, + cmap=cm2, + norm=mpl.colors.BoundaryNorm( + bounds__, cm2.N), + boundaries=bounds__, + orientation=cb_or, + drawedges=False, + extend=ex_tend, + ticks=tick_locs, + spacing=spacing) + col_bar.ax.tick_params(labelsize=cbfs, size=2, width=0.3) + # hack the lines of the colorbar to make them white, the same color of + # background so that the colorbar looks broken. + col_bar.outline.set_alpha(0.) + col_bar.outline.set_color('white') + col_bar.outline.set_linewidth(0 * cblw) + for ti_ck in col_bar.ax.get_yticklabels(): + ti_ck.set_fontsize(cbfs) + ti_ck.set_rotation(cbrt) + + for ti_ck in col_bar.ax.get_xticklabels(): + ti_ck.set_fontsize(cbfs) + ti_ck.set_rotation(cbrt) + ti_ck.set_y(-0.02) + if cbtitle != '': + col_bar.ax.set_title(cbtitle, fontsize=1.3 * cbfs) + col_bar.update_ticks() + return col_bar + + +def mk_colo_cont(axcol_, + bounds__, + cm2, + cblw=0.1, + cbrt=0, + cbfs=9, + nticks=10, + cbtitle='', + col_scale='linear', + tick_locs=(), + ex_tend='both', + cb_or='horizontal', + spacing='uniform'): + """ + Plot the colorbar to the axis given by axcol_. + + Uses arrows on two sides. + """ + axco1 = plt.axes(axcol_) + if col_scale == 'linear': + col_bar = mpl.colorbar.ColorbarBase(axco1, + cmap=cm2, + norm=mpl.colors.BoundaryNorm( + bounds__, cm2.N), + boundaries=bounds__, + orientation=cb_or, + drawedges=False, + extend=ex_tend, + ticks=bounds__[1:-1], + spacing=spacing) + if not tick_locs: + tick_locator = mpl.ticker.MaxNLocator(nbins=nticks, + min_n_ticks=nticks) + else: + tick_locator = mpl.ticker.FixedLocator(tick_locs) + col_bar.locator = tick_locator + col_bar.update_ticks() + if col_scale == 'log': + col_bar = mpl.colorbar.ColorbarBase(axco1, + cmap=cm2, + norm=mpl.colors.BoundaryNorm( + bounds__, cm2.N), + boundaries=bounds__, + orientation=cb_or, + extend=ex_tend, + drawedges=False, + ticks=bounds__[1:-1], + spacing=spacing) + if cb_or == 'horizontal': + tick_locs_ori = col_bar.ax.xaxis.get_ticklocs() + else: + tick_locs_ori = col_bar.ax.yaxis.get_ticklocs() + tick_locs_bn = [] + for _tl in tick_locs: + tl_ind = np.argmin(np.abs(bounds__[1:-1] - _tl)) + tick_locs_bn = np.append(tick_locs_bn, tick_locs_ori[tl_ind]) + if cb_or == 'horizontal': + col_bar.ax.xaxis.set_ticks(tick_locs_bn) + col_bar.ax.xaxis.set_ticklabels(tick_locs) + else: + col_bar.ax.yaxis.set_ticks(tick_locs_bn) + col_bar.ax.yaxis.set_ticklabels(tick_locs) + col_bar.ax.tick_params(labelsize=cbfs, size=2, width=0.3) + col_bar.outline.set_alpha(0.) + col_bar.outline.set_color('white') + col_bar.outline.set_linewidth(0 * cblw) + for ti_ck in col_bar.ax.get_yticklabels(): + ti_ck.set_fontsize(cbfs) + ti_ck.set_rotation(cbrt) + + for ti_ck in col_bar.ax.get_xticklabels(): + ti_ck.set_fontsize(cbfs) + ti_ck.set_rotation(cbrt) + ti_ck.set_y(-0.02) + if cbtitle != '': + col_bar.ax.set_title(cbtitle, fontsize=1.3 * cbfs) + return col_bar + + +def put_ticks(nticks=5, which_ax='both', axlw=0.3): + """Put the ticks on given locations and sets the width of axis lines.""" + if which_ax == 'x': + plt.gca().xaxis.set_ticks_position('bottom') + lines = plt.gca().get_xticklines() + labels = plt.gca().get_xticklabels() + for line in lines: + line.set_marker(mpl.lines.TICKDOWN) + for label in labels: + label.set_y(-0.02) + plt.gca().xaxis.set_major_locator( + plt.MaxNLocator(nbins=nticks, min_n_ticks=nticks)) + + if which_ax == 'y': + plt.gca().yaxis.set_ticks_position('left') + lines = plt.gca().get_yticklines() + labels = plt.gca().get_yticklabels() + for line in lines: + line.set_marker(mpl.lines.TICKLEFT) + line.set_linewidth(axlw) + plt.gca().yaxis.set_major_locator( + plt.MaxNLocator(nbins=nticks, min_n_ticks=nticks)) + if which_ax == 'both': + plt.gca().yaxis.set_ticks_position('left') + lines = plt.gca().get_yticklines() + labels = plt.gca().get_yticklabels() + for line in lines: + line.set_marker(mpl.lines.TICKLEFT) + line.set_linewidth(axlw) + plt.gca().yaxis.set_major_locator( + plt.MaxNLocator(nbins=nticks, min_n_ticks=nticks)) + plt.gca().xaxis.set_ticks_position('bottom') + lines = plt.gca().get_xticklines() + for line in lines: + line.set_marker(mpl.lines.TICKDOWN) + plt.gca().xaxis.set_major_locator( + plt.MaxNLocator(nbins=nticks, min_n_ticks=nticks)) + + +def rem_ticks(which_ax='both'): + """Remove ticks from either x or y axis and preserves the lines.""" + if which_ax in ('x', 'both'): + plt.gca().set_xticklabels([]) + plt.gca().xaxis.set_ticks_position("none") + if which_ax in ('y', 'both'): + plt.gca().set_yticklabels([]) + plt.gca().yaxis.set_ticks_position("none") + + +def rem_ax_line(rem_list=('top', 'right'), axlw=0.4): + """ + Remove the axis lines. + + It uses the list of which lines to remove + rem_list can be 'left', 'right', 'top', 'bottom' + """ + for loc, spine in plt.gca().spines.items(): + if loc in rem_list: + spine.set_position(('outward', 0)) + spine.set_linewidth(0.) + else: + spine.set_linewidth(axlw) + + +def rotate_labels(which_ax='both', rot=0, axfs=6): + """ + Rotate the ticks labels to rot. + + Also sets it fontsize to axfs + """ + if which_ax in ('x', 'both'): + _, labels = plt.xticks() + plt.setp(labels, rotation=rot, fontsize=axfs) + if which_ax in ('y', 'both'): + _, labels = plt.yticks() + plt.setp(labels, rotation=rot, fontsize=axfs) + + +def save_figure(plot_path, _extr_art=None): + """Write the figure to a file.""" + plt.savefig(plot_path, + bbox_inches='tight', + bbox_extra_artists=_extr_art, + dpi=450) diff --git a/esmvaltool/diag_scripts/land_carbon_cycle/provenance.py b/esmvaltool/diag_scripts/land_carbon_cycle/provenance.py new file mode 100644 index 0000000000..465bf0a1e4 --- /dev/null +++ b/esmvaltool/diag_scripts/land_carbon_cycle/provenance.py @@ -0,0 +1,47 @@ +"""Handle provenance record of land_carbon_cycle diagnostic.""" + +from esmvaltool.diag_scripts.shared import ( + group_metadata, + select_metadata, +) + + +def _get_project(cfg): + """Extract project from cfg.""" + input_data = cfg['input_data'].values() + projects = list(group_metadata(input_data, 'project').keys()) + projects = [p for p in projects if 'obs' not in p.lower()] + if len(projects) == 1: + return projects[0] + return projects + + +def _get_ancestor_files(cfg, obs_name, projects=None): + """Get ancestor files for provenance.""" + if projects is None: + projects = _get_project(cfg) + if isinstance(projects, str): + projects = [projects] + datasets = [] + for project in projects: + datasets.extend( + select_metadata(cfg['input_data'].values(), project=project)) + datasets.extend( + select_metadata(cfg['input_data'].values(), dataset=obs_name)) + return [d['filename'] for d in datasets] + + +def _get_provenance_record(caption, statistics, plot_type, ancestor_files): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'ancestors': ancestor_files, + 'authors': ['koirala_sujan'], + 'caption': caption, + 'domains': ['global'], + 'plot_type': plot_type, + 'realms': ['land'], + 'references': ['carvalhais14nature'], + 'statistics': statistics, + 'themes': ['bgchem', 'carbon', 'chem', 'ghg'], + } + return record diff --git a/esmvaltool/diag_scripts/land_carbon_cycle/shared.py b/esmvaltool/diag_scripts/land_carbon_cycle/shared.py new file mode 100644 index 0000000000..66d1e8240b --- /dev/null +++ b/esmvaltool/diag_scripts/land_carbon_cycle/shared.py @@ -0,0 +1,127 @@ +"""Provide shared functions for land carbon cycle diagnostic.""" +import os + +import iris +import numpy as np +from iris import NameConstraint + +from esmvaltool.diag_scripts.shared import select_metadata + + +def _apply_common_mask(*args): + """ + Apply common mask to all arrays passed as argument. + + Argument: + -------- + arrays + + Return: + ------ + an array with size of nargs x common size of all input arrays + """ + nargs = len(args) + for arg_ in range(nargs): + _dat = args[arg_] + vars()['dat_mask' + str(arg_)] = np.ones((np.shape(_dat))) + vars()['dat_mask_inv' + str(arg_)] = np.ma.masked_invalid(_dat).mask + vars()['dat_mask' + str(arg_)][vars()['dat_mask_inv' + str(arg_)]] = 0 + dat_mask = vars()['dat_mask0'] + for arg_ in range(nargs): + dat_mask = dat_mask * vars()['dat_mask' + str(arg_)] + mask_where = np.ma.getmask(np.ma.masked_less(dat_mask, 1.)) + odat = [] + for arg_ in range(nargs): + _dat = args[arg_].astype(np.float64) + _dat[mask_where] = np.nan + odat = np.append(odat, np.ma.masked_invalid(_dat)) + odat = odat.reshape(nargs, _dat.shape[0], _dat.shape[1]) + return odat + + +def _apply_gpp_threshold(gpp_dat, fig_config): + """Mask the gpp array below threshold.""" + # converting gC m-2 yr-1 to kgC m-2 s-1 + gpp_thres = fig_config["gpp_threshold"] / (86400.0 * 365 * 1000.) + gpp_dat = np.ma.masked_less(gpp_dat, + gpp_thres).filled(fig_config["fill_value"]) + return gpp_dat + + +def _get_obs_data_zonal(diag_config): + """ + Get and handle the observations of turnover time from Carvalhais 2014. + + Argument: + -------- + diag_config - nested dictionary of metadata + + Return: + ------ + dictionary with observation data with different variables as keys + """ + if not diag_config.get('obs_variable'): + raise ValueError('The observation variable needs to be specified in ' + 'the recipe (see recipe description for details)') + obs_dir = os.path.join(diag_config['auxiliary_data_dir'], + diag_config['obs_info']['obs_data_subdir']) + + all_data = {} + var_list = diag_config.get('obs_variable') + + input_files = [] + for _var in var_list: + var_list = np.append(var_list, '{var}_{perc:d}'.format(var=_var, + perc=5)) + var_list = np.append(var_list, '{var}_{perc:d}'.format(var=_var, + perc=95)) + obs_filename = (f'{_var}_{{frequency}}_{{source_label}}_' + f'{{variant_label}}_{{grid_label}}z.nc'.format( + **diag_config['obs_info'])) + input_files = np.append(input_files, + os.path.join(obs_dir, obs_filename)) + + nvars = len(var_list) + for v_ind in range(nvars): + var_obs = var_list[v_ind] + variable_constraint = NameConstraint(var_name=var_obs) + cube = iris.load_cube(input_files, constraint=variable_constraint) + all_data[var_obs] = cube + for coord in cube.coords(): + all_data[coord.name()] = coord + return all_data + + +def _load_variable(metadata, var_name): + """ + Load data for the variable listed in metadata of the diagnostic variable. + + Argument: + -------- + metadata - nested dictionary of metadata + + Return: + ------ + iris cube of the data + """ + candidates = select_metadata(metadata, short_name=var_name) + assert len(candidates) == 1 + filename = candidates[0]['filename'] + cube = iris.load_cube(filename) + return cube + + +def _remove_invalid(tmp, fill_value=-9999.): + """ + Remove the invalid non-numeric values from the input array. + + Fill it with fill_value. + Remove all large and small values with magnitude + beyond 1e15 + """ + tmp = np.ma.masked_outside(tmp, -1e15, 1e15).filled(fill_value) + where_nan = np.isnan(tmp) + tmp[where_nan] = fill_value + where_inf = np.isinf(tmp) + tmp[where_inf] = fill_value + return tmp diff --git a/esmvaltool/diag_scripts/landcover/albedolandcover.py b/esmvaltool/diag_scripts/landcover/albedolandcover.py new file mode 100644 index 0000000000..96e7792039 --- /dev/null +++ b/esmvaltool/diag_scripts/landcover/albedolandcover.py @@ -0,0 +1,361 @@ +"""Landcover analysis plots. + +############################################################### +landcover/landcover.py +Authors ESMValToolV1 Version + lejeune_quentin +Port to ESMValTool Version 2 + crezee_bas +############################################################### +Description +----------- + Computes relationship between landcover and albedo for models + and compares this to observations. + +Projects +-------- + CMIP5 + CMIP6 (experimental) +""" + + +import copy +import glob +import itertools as it +import logging +import os + +from cartopy import crs # This line causes a segmentation fault in prospector +import cartopy.feature as cfeature +import iris +import matplotlib.pyplot as plt +import numpy as np +# specific imports for this diagnostic +from sklearn import linear_model + +from esmvaltool.diag_scripts.shared import (group_metadata, + run_diagnostic, + ProvenanceLogger, + get_plot_filename) + +# This part sends debug statements to stdout +logger = logging.getLogger(os.path.basename(__file__)) + + +def _add_masks_albedolandcover(model_data, this_models_xxfracs, cfg): + + total_frac = sum([model_data[key] for key in this_models_xxfracs]) + + # Mask out regions where total_frac is too low + fracmask = (total_frac.data.data < cfg['params']['threshold_sumpred']) + + # Start masking operations. Remember that a True means masked out. + basemask = model_data['snc'].data.mask + + # Mask out regions where there is little snow + snowmask = model_data['snc'].data.data < 0.1 + snowfreemask = model_data['snc'].data.data > 0.9 + + # Update the masks + snowmask |= basemask + snowmask |= fracmask + snowfreemask |= basemask + snowfreemask |= fracmask + + # Plotting intermezzo for the masks + masksavedir = os.path.join(cfg['plot_dir'], 'masks/') + if not os.path.exists(masksavedir): + os.mkdir(masksavedir) + + template_time = model_data['snc'].coord('time') + month_string = template_time.units.num2date( + template_time.points)[0].strftime('%b') + if 'source_id' in model_data['snc'].attributes: + model_attr_name = model_data['snc'].attributes['source_id'] + elif 'model_id' in model_data['snc'].attributes: + model_attr_name = model_data['snc'].attributes['model_id'] + else: + logger.warning("Could not find attribute that describes model name") + + masksavename = '{0}-{1}'.format( + month_string, model_attr_name) + plt.imshow(total_frac.data[::-1]) + plt.savefig(os.path.join(masksavedir, masksavename + + 'total_frac.' + cfg['output_file_type'])) + plt.imshow(fracmask[::-1]) + plt.savefig(os.path.join(masksavedir, masksavename + + 'fracmask.' + cfg['output_file_type'])) + plt.imshow(snowmask[::-1]) + plt.title('snowmask') + plt.savefig(os.path.join(masksavedir, masksavename + + 'snowmask.' + cfg['output_file_type'])) + plt.imshow(snowfreemask[::-1]) + plt.title('snowfreemask') + plt.savefig(os.path.join(masksavedir, masksavename + + 'snowfreemask.' + cfg['output_file_type'])) + + # Distinguish between snowfree and snow areas + if cfg['params']['snowfree']: + mymask = copy.deepcopy(snowfreemask) + else: + mymask = copy.deepcopy(snowmask) + + for varkey in model_data: + model_data[varkey].data.mask = mymask + + return model_data + + +def _reconstruct_albedo_pixel(x_0, y_0, lc_logical): + # Check that the system is not over_parameterised + alb_lc_pixel = np.zeros((3, )) + alb_lc_pixel[...] = np.nan + + if len(y_0) > np.sum(lc_logical.astype(int)) + 1 and\ + np.sum(lc_logical.astype(int)) > 0: + # Do multiple linear regression + linreg = linear_model.LinearRegression().fit(x_0, y_0) + intercept = linreg.intercept_ + coefficients = linreg.coef_ + + # Now loop again and reconstruct albedo's + lc_reg = 0 + for i_0 in range(3): + if lc_logical[i_0]: + alb_lc_pixel[i_0] = intercept\ + + coefficients[lc_reg] * 100. + lc_reg = lc_reg + 1 + return alb_lc_pixel + + +def _prepare_data_for_linreg(model_data, islice, jslice): + lc_logical = np.full((3, ), True) + lc_classes = [config['params']['lc1_class'], + config['params']['lc2_class'], + config['params']['lc3_class']] + lc_data = [] + # Loop over lc_classes + for i_0 in range(3): + current_class = lc_classes[i_0] + # First flatten the array + lc_flattened = {} + for varkey in current_class: + lc_flattened[varkey] = model_data[varkey].data[ + islice, jslice].compressed() + lc_sum = sum([lc_flattened[varkey] + for varkey in current_class]) + # Now check thresholds + if (np.var(lc_sum) > 0. and + len(lc_sum) >= config['params']['mingc']): + lc_data.append(lc_sum) + lc_logical[i_0] = True + else: + logger.info("Variance zero or not enough\ + valid data for this landcover class") + lc_logical[i_0] = False + # Now the multiple lin reg part + x_0 = np.stack(lc_data) + x_0 = x_0.swapaxes(0, 1) + # Same mask, so shape is fine + y_0 = model_data['alb'].data[islice, jslice].compressed() + + return x_0, y_0, lc_logical + + +def _get_reconstructed_albedos(model_data, cfg): + alb_lc = np.zeros((3, ) + model_data['alb'].shape) + alb_lc[...] = np.nan + + # Now loop over these arrays and do the math + for (indices, maskbool) in np.ndenumerate(model_data['alb'].data.mask): + if not maskbool: # Only if not masked we need to check neighbourhood + i, j = indices + # Create the neighbourhood as bbox + islice = slice(int(i - (cfg['params']['lonsize_BB'] - 1) / 2), + int(i + (cfg['params']['lonsize_BB'] - 1) / 2 + 1)) + jslice = slice(int(j - (cfg['params']['latsize_BB'] - 1) / 2), + int(j + (cfg['params']['latsize_BB'] - 1) / 2 + 1)) + bbox_mask = model_data['alb'].data.mask[islice, jslice] + + # Check if there are enough valid data points + # in the neighbourhood bbox + if (np.sum((~bbox_mask).astype(int)) > + cfg['params']['minnum_gc_bb']): + x_0, y_0, lc_logical = _prepare_data_for_linreg(model_data, + islice, + jslice) + alb_lc[:, i, j] = _reconstruct_albedo_pixel(x_0, y_0, + lc_logical) + + return alb_lc + + +def _write_albedochanges_to_disk(alb_lc, template_cube, + datadict, cfg): + transition_cube = template_cube + # Remove attributes that are not applicable to derived data + for att in ['comment', 'modeling_realm', 'table_id']: + if att in transition_cube.attributes: + transition_cube.attributes.pop(att) + # Set correct unit + transition_cube.units = '1' + result_dict = {'lc1': alb_lc[0, :, :], 'lc2': alb_lc[1, :, :], + 'lc3': alb_lc[2, :, :]} + names = {'lc1': '-'.join(cfg['params']['lc1_class']), + 'lc2': '-'.join(cfg['params']['lc2_class']), + 'lc3': '-'.join(cfg['params']['lc3_class'])} + for ikey, jkey in it.product(result_dict.keys(), result_dict.keys()): + if not ikey == jkey: + # Take out Frac for readability + transition_cube.data = result_dict[jkey] - result_dict[ikey] + transition_cube.rename("albedo_change_from_{0}_to_{1}".format( + names[ikey], names[jkey]).replace('Frac', '')) + logger.info("Calculated: %s", transition_cube.name()) + # Get some usefull info for constructing the filenames + month_string = template_cube.coord('time').units.num2date( + template_cube.coord('time').points)[0].strftime('%b') + basename = '{0}-{1}-{2}'.format(month_string, + datadict['alb']['dataset'], + transition_cube.name()) + transition_cube.attributes['plottitle'] = month_string + '-'\ + + datadict['alb']['dataset'] + transition_cube.attributes['plotsuptitle'] = transition_cube.name() + savename_nc = os.path.join(cfg['work_dir'], + '{0}.nc'.format(basename)) + logger.info("Saving file as: %s", savename_nc) + iris.save(transition_cube, savename_nc) + + # Create provenance record + # Create caption + prov_rec = { + 'caption': '{0} {1}'.format( + transition_cube.attributes['plottitle'], + transition_cube.attributes['plotsuptitle']), + 'statistics': ['other'], + 'domains': ['global'], + 'plot_type': 'other', + 'authors': [ + 'lejeune_quentin', + 'crezee_bas', + ], + 'project': [ + 'crescendo', + ], + } + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(os.path.join(cfg['work_dir'], basename), + prov_rec) + + +def _plot_cube(cube, cfg): + """Plot the transition cube.""" + # Also plot the transition_cube + if not cube.ndim == 2: + raise ValueError("Cube should be two-dimensional") + plt.clf() + cow = plt.axes(projection=crs.PlateCarree()) + cow.add_feature(cfeature.LAND) + iris.quickplot.pcolormesh(cube, vmin=-.24, vmax=.24, cmap='bwr') + # Set title/suptitle for plot + if 'plottitle' in cube.attributes: + plt.title(cube.attributes['plottitle']) + if 'suptitle' in cube.attributes: + plt.suptitle(cube.attributes['plotsuptitle']) + # Draw coast lines + plt.gca().coastlines() + # Get right path for saving plots from the cfg dictionary. + if 'parent_mip_era' in cube.attributes: + model_attr_name = 'source_id' if\ + cube.attributes['parent_mip_era'] == 'CMIP6'\ + else 'model_id' + else: # In this case it must be OBS, and we set it to model_id explicitly + model_attr_name = 'model_id' + basename = cube.attributes[model_attr_name] + '_'\ + + cube.name().replace(' ', '_') + savename_fig = get_plot_filename(basename, cfg) + logger.info("Saving figure as: %s", savename_fig) + plt.savefig(savename_fig) + + +def main(cfg): + """Calculate linear regression between albedo and xxfrac. + + Arguments: + --------- + cfg - nested dictionary of metadata + """ + # Assemble the data dictionary keyed by dataset name + my_files_dict = group_metadata(cfg['input_data'].values(), 'dataset') + all_short_names = ['alb', 'snc', 'cropFrac', 'treeFrac', 'grassFrac', + 'shrubFrac', 'pastureFrac'] + + # Loop over all datasets + for dataset_name in my_files_dict: + dataset_dict = my_files_dict[dataset_name] + + if dataset_name == 'Duveiller2018': + logger.info("Only do plotting for dataset %s", dataset_name) + cube = iris.load_cube(dataset_dict[0]['filename']) + # Set plot title and plot suptitle + cube.attributes['plottitle'] = cube.coord('time').units.num2date( + cube.coord('time').points)[0].strftime('%b') + '-'\ + + 'Duveiller2018' + cube.attributes['model_id'] = 'Duveiller2018' + + _plot_cube(cube, cfg) + continue + + logger.info("Starting diagnostic for dataset %s", dataset_name) + + # Now reorder the dictionary in a meaningfull way, making data + # accessible by short name + datadict = {} + for file_dict in dataset_dict: + if file_dict['short_name'] in all_short_names: + datadict[file_dict['short_name']] = file_dict + + # Define the different lc classes + this_models_xxfracs = [key for key in datadict if 'Frac' in key] + # Note that lc3 class depends on the classes available for this model + lc3_class = cfg['params']['lc3_class'] + cfg['params']['lc3_class'] = [key for key in this_models_xxfracs + if key in lc3_class] + + # Load all data + model_data = {frac_key: iris.load_cube(datadict[frac_key]['filename']) + for frac_key in this_models_xxfracs} + # Load albedo and snow cover + model_data['alb'] = iris.load_cube(datadict['alb']['filename']) + model_data['snc'] = iris.load_cube(datadict['snc']['filename']) + + # Make sure that for each cube the dimension equals 2 + assert {c.ndim for _, c in model_data.items()} == {2} + + # Add the appropriate masks to model_data + model_data = _add_masks_albedolandcover(model_data, + this_models_xxfracs, + cfg) + + # Now get albedo change due to landcover change + alb_lc = _get_reconstructed_albedos(model_data, cfg) + + # Now mask where albedo values are physically impossible + alb_lc[alb_lc < 0] = np.nan + alb_lc[alb_lc > 1] = np.nan + + # Calculate differences between them and save + _write_albedochanges_to_disk(alb_lc, model_data['snc'], + datadict, cfg) + + # Loop through all nc files and plot them + for ncfile in glob.glob(os.path.join(cfg['work_dir'], '*.nc')): + transition_cube = iris.load_cube(ncfile) + _plot_cube(transition_cube, cfg) + + +if __name__ == '__main__': + # always use run_diagnostic() to get the config (the preprocessor + # nested dictionary holding all the needed information) + with run_diagnostic() as config: + # list here the functions that need to run + main(config) diff --git a/esmvaltool/diag_scripts/landcover/landcover.py b/esmvaltool/diag_scripts/landcover/landcover.py index 8b6424e227..1ad01812a2 100644 --- a/esmvaltool/diag_scripts/landcover/landcover.py +++ b/esmvaltool/diag_scripts/landcover/landcover.py @@ -309,7 +309,12 @@ def get_timmeans(attr, cubes, refset, prov_rec): 'statistics': ['mean'], 'domains': ['global'], 'plot_type': 'regional averages', - 'authors': ['hage_st', 'loew_al', 'muel_bn', 'stac_to'], + 'authors': [ + 'hagemann_stefan', + 'loew_alexander', + 'mueller_benjamin', + 'stacke_tobias', + ], 'references': [ 'acknow_project', ], @@ -339,9 +344,8 @@ def write_data(cfg, cubes, var, prov_rec): # Join cubes in one list with ref being the last entry outcubes = cubes['exp'][var] + cubes['ref'][var] - if cfg[diag.names.WRITE_NETCDF]: - iris.save(outcubes, filepath) - logger.info("Writing %s", filepath) + iris.save(outcubes, filepath) + logger.info("Writing %s", filepath) # provenance tracking with ProvenanceLogger(cfg) as provenance_logger: diff --git a/esmvaltool/diag_scripts/lst/lst.py b/esmvaltool/diag_scripts/lst/lst.py new file mode 100644 index 0000000000..904f15d9fe --- /dev/null +++ b/esmvaltool/diag_scripts/lst/lst.py @@ -0,0 +1,232 @@ +""" +ESMValTool diagnostic for ESA CCI LST data. + +The code uses the all time average monthly data. +The ouptput is a timeseries plot of the mean differnce of +CCI LST to model ensemble average, with the ensemble spread +represented by a standard deviation either side of the mean. +""" + +import logging + +import iris +import matplotlib.pyplot as plt +import numpy as np + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_plot_filename, + group_metadata, + run_diagnostic, +) + +logger = logging.getLogger(__name__) + + +def _get_input_cubes(metadata): + """Load the data files into cubes. + + Based on the hydrology diagnostic. + + Inputs: + metadata = List of dictionaries made from the preprocessor config + + Outputs: + inputs = Dictionary of cubes + ancestors = Dictionary of filename information + """ + inputs = {} + ancestors = {} + for attributes in metadata: + short_name = attributes['short_name'] + filename = attributes['filename'] + logger.info("Loading variable %s", short_name) + cube = iris.load_cube(filename) + cube.attributes.clear() + inputs[short_name] = cube + ancestors[short_name] = [filename] + + return inputs, ancestors + + +def _make_plots(lst_diff_data, lst_diff_data_low, lst_diff_data_high, config): + """Create and save the output figure. + + The plot is a mean differnce with +/- one standard deviation + of the model spread, + + Inputs: + lst_diff_data = cube of the mean difference + lst_diff_data_low = cube of the mean difference + with model minus standard deviation + lst_diff_data_high = cube of the mean difference + with model plus standard deviation + config = The config dictionary from the preprocessor + + Outputs: + Saved figure + """ + fig, ax = plt.subplots(figsize=(20, 15)) + + ax.plot(lst_diff_data.data, color='black', linewidth=4) + ax.plot(lst_diff_data_low.data, '--', color='blue', linewidth=3) + ax.plot(lst_diff_data_high.data, '--', color='blue', linewidth=3) + ax.fill_between(range(len(lst_diff_data.data)), + lst_diff_data_low.data, + lst_diff_data_high.data, + color='blue', + alpha=0.25) + + # make X ticks + x_tick_list = [] + time_list = lst_diff_data.coord('time').units.num2date( + lst_diff_data.coord('time').points) + for item in time_list: + if item.month == 1: + x_tick_list.append(item.strftime('%Y %b')) + elif item.month == 7: + x_tick_list.append(item.strftime('%b')) + else: + x_tick_list.append('') + + ax.set_xticks(range(len(lst_diff_data.data))) + ax.set_xticklabels(x_tick_list, fontsize=18, rotation=45) + + # make Y ticks + y_lower = np.floor(lst_diff_data_low.data.min()) + y_upper = np.ceil(lst_diff_data_high.data.max()) + ax.set_yticks(np.arange(y_lower, y_upper + 0.1, 2)) + ax.set_yticklabels(np.arange(y_lower, y_upper + 0.1, 2), fontsize=18) + ax.set_ylim((y_lower - 0.1, y_upper + 0.1)) + + ax.set_xlabel('Date', fontsize=20) + ax.set_ylabel('Difference / K', fontsize=20) + + ax.grid() + + lons = lst_diff_data.coord('longitude').bounds + lats = lst_diff_data.coord('latitude').bounds + + ax.set_title('Area: lon %s lat %s' % (lons[0], lats[0]), fontsize=22) + + fig.suptitle('ESACCI LST - CMIP6 Historical Ensemble Mean', fontsize=24) + + plot_path = get_plot_filename('timeseries', config) + plt.savefig(plot_path) + plt.close('all') # Is this needed? + + +def _get_provenance_record(attributes, ancestor_files): + """Create the provenance record dictionary. + + Inputs: + attributes = dictionary of ensembles/models used, the region bounds + and years of data used. + ancestor_files = list of data files used by the diagnostic. + + Outputs: + record = dictionary of provenance records. + """ + caption = "Timeseries of ESA CCI LST difference to mean of "\ + + "model ensembles calculated over region bounded by latitude "\ + + "{lat_south} to {lat_north}, longitude {lon_west} to {lon_east} "\ + + "and for model/ensembles {ensembles}. "\ + + "Shown for years {start_year} to {end_year}.".format(**attributes) + + record = { + 'caption': caption, + 'statistics': ['mean', 'stddev'], + 'domains': ['reg'], + 'plot_types': ['times'], + 'authors': ['king_robert'], + # 'references': [], + 'ancestors': ancestor_files + } + + return record + + +def _diagnostic(config): + """Perform the control for the ESA CCI LST diagnostic. + + Parameters + ---------- + config: dict + the preprocessor nested dictionary holding + all the needed information. + + Returns + ------- + figures made by make_plots. + """ + # this loading function is based on the hydrology diagnostic + input_metadata = config['input_data'].values() + + loaded_data = {} + ancestor_list = [] + for dataset, metadata in group_metadata(input_metadata, 'dataset').items(): + cubes, ancestors = _get_input_cubes(metadata) + loaded_data[dataset] = cubes + ancestor_list.append(ancestors['ts'][0]) + + # loaded data is a nested dictionary + # KEY1 model ESACCI-LST or something else + # KEY2 is ts, the surface temperature + # ie loaded_data['ESACCI-LST']['ts'] is the CCI cube + # loaded_data['MultiModelMean']['ts'] is CMIP6 data, emsemble means + # similarly dor Std, see preprocessor + + # The Diagnostic uses CCI - MODEL + + # CMIP data had 360 day calendar, CCI data has 365 day calendar + # Assume the loaded data is all the same shape + loaded_data['MultiModelMean']['ts'].remove_coord('time') + loaded_data['MultiModelMean']['ts'].add_dim_coord( + loaded_data['ESACCI-LST']['ts'].coord('time'), 0) + loaded_data['MultiModelStd_Dev']['ts'].remove_coord('time') + loaded_data['MultiModelStd_Dev']['ts'].add_dim_coord( + loaded_data['ESACCI-LST']['ts'].coord('time'), 0) + + # Make a cube of the LST difference, and with +/- std of model variation + lst_diff_cube = loaded_data['ESACCI-LST']['ts'] - loaded_data[ + 'MultiModelMean']['ts'] + lst_diff_cube_low = loaded_data['ESACCI-LST']['ts'] - ( + loaded_data['MultiModelMean']['ts'] + + loaded_data['MultiModelStd_Dev']['ts']) + lst_diff_cube_high = loaded_data['ESACCI-LST']['ts'] - ( + loaded_data['MultiModelMean']['ts'] - + loaded_data['MultiModelStd_Dev']['ts']) + + # Plotting + _make_plots(lst_diff_cube, lst_diff_cube_low, lst_diff_cube_high, config) + + # Provenance + # Get this information form the data cubes + data_attributes = {} + data_attributes['start_year'] = lst_diff_cube.coord('time').units.num2date( + lst_diff_cube.coord('time').points)[0].year + data_attributes['end_year'] = lst_diff_cube.coord('time').units.num2date( + lst_diff_cube.coord('time').points)[-1].year + data_attributes['lat_south'] = lst_diff_cube.coord('latitude').bounds[0][0] + data_attributes['lat_north'] = lst_diff_cube.coord('latitude').bounds[0][1] + data_attributes['lon_west'] = lst_diff_cube.coord('longitude').bounds[0][0] + data_attributes['lon_east'] = lst_diff_cube.coord('longitude').bounds[0][1] + data_attributes['ensembles'] = '' + + for item in input_metadata: + if 'ESACCI' in item['alias'] or 'MultiModel' in item[ + 'alias'] or 'OBS' in item['alias']: + continue + data_attributes['ensembles'] += "%s " % item['alias'] + + record = _get_provenance_record(data_attributes, ancestor_list) + plot_file = get_plot_filename('timeseries', config) + with ProvenanceLogger(config) as provenance_logger: + provenance_logger.log(plot_file, record) + + +if __name__ == '__main__': + # always use run_diagnostic() to get the config (the preprocessor + # nested dictionary holding all the needed information) + with run_diagnostic() as config: + _diagnostic(config) diff --git a/esmvaltool/diag_scripts/magic_bsc/PC.R b/esmvaltool/diag_scripts/magic_bsc/PC.R new file mode 100644 index 0000000000..3207c689a5 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/PC.R @@ -0,0 +1,136 @@ +library(ggplot2) +library(plyr) + +read_pc <- function(file) { + pc <- list() + pc$points <- rbind(c(0, 0), read.delim(file, comment.char = "#")) + pc$fun <- approxfun( + pc$points$WindSpeed, + pc$points$Power, + method = "linear", + yleft = NA, + yright = 0 + ) + attr <- strsplit( + trimws(system( + paste( + "perl -e 'open FH,\"", + file, + "\";while(){@parts= /^# (.+): (.+) /;print \"@parts \";}'", # nolint + sep = "" + ), + intern = TRUE + )), + "\\s+" + ) + attr <- matrix(unlist(attr), ncol = 2, byrow = T) + pc$attr <- as.list(attr[, 2]) + names(pc$attr) <- attr[, 1] + pc$attr$Filename <- file # nolint + pc$attr$RatedPower <- as.numeric(pc$attr$RatedPower) # nolint + return(pc) +} +read_xml_pc <- function(file) { + xml <- xmlTreeParse(file, useInternalNodes = TRUE) # nolint + xml_data <- xmlToList(xml) # nolint + pc <- list() + pcs <- xml_data$wind_turbine_properties$power_curves + for (i in seq_along(pcs)) { + if (pcs[[i]]$air_density == 1.225) { + pc$points <- ldply( + pcs[[i]]$power_curve_table, # nolint + data.frame + )[, c(2, 3)] + colnames(pc$points) <- c("WindSpeed", "Power") # nolint + pc$points <- transform( + pc$points, + WindSpeed = as.numeric(as.character(WindSpeed)), + # nolint + Power = as.numeric(as.character(Power)) + ) + pc$points <- rbind(c(0, 0), pc$points) + break + } + } + pc$fun <- approxfun( + pc$points$WindSpeed, + # nolint + pc$points$Power, + # nolint + method = "linear", + yleft = NA, + yright = 0, + ties = "ordered" + ) + pc$attr$Diameter <- + xml_data$wind_turbine_properties$rotor_diameter # nolint + pc$attr$CutIn <- NA # nolint + pc$attr$CutOut <- NA # nolint + pc$attr$ReCutIn <- NA # nolint + pc$attr$RatedSpeed <- NA # nolint + pc$attr$RatedPower <- + xml_data$wind_turbine_properties$rated_power # nolint + pc$attr$IECClass <- NA # nolint + pc$attr$Control <- NA # nolint + pc$attr$Density <- 1.225 # nolint + pc$attr$Name <- file # nolint + pc$attr$Filename <- file # nolint + pc$attr$RatedPower <- as.numeric(pc$attr$RatedPower) # nolint + return(pc) +} +plot_pc <- function(pc) { + plot <- ggplot(pc$points, aes(x = WindSpeed, y = Power)) + # nolint + geom_point() + + stat_function(fun = pc$fun) + + xlim(0, 35) + return(plot) +} +plot_pc_list <- function(list_pcs) { + list_funs <- lapply(list_pcs, function(x) { + function(y) { + x$fun(y) / x$attr$RatedPower + } # nolint + }) + names <- lapply(list_pcs, function(x) + x$attr$Name) # nolint + plot <- ggplot(NULL, aes(x = x, colour = Turbine)) # nolint + for (i in seq_along(list_pcs)) { + plot <- plot + stat_function( + data = data.frame( + x = 0:30, + Turbine = factor(names[[i]]) + ), + fun = list_funs[[i]] + ) + } + plot <- + plot + xlab("Wind speed (m/s)") + ylab("Capacity Factor (%)") + + ggtitle("Selected power curves") + return(plot) +} +get_list_turbines <- function() { + files <- list.files() + turb_list <- list() + for (i in seq(files)) { + file <- files[i] + turb_list[[i]] <- read_xml_pc(file) + } + names(turb_list) <- files + return(turb_list) +} +wind2power <- function(wind, pc) { + power <- pc$fun(wind) +} +wind2CF <- function(wind, pc) { + power <- pc$fun(wind) + CF <- power / pc$attr$RatedPower # nolint +} +WPD <- function(wind, ro) { + return(0.5 * ro * wind^3) +} +bump <- function(x) { + f <- function(y) { + exp(-1 / y^2) + } + return(f(x) / (f(x) + f(1 - x))) +} diff --git a/esmvaltool/diag_scripts/magic_bsc/PC.r b/esmvaltool/diag_scripts/magic_bsc/PC.r deleted file mode 100644 index 3f25ecd619..0000000000 --- a/esmvaltool/diag_scripts/magic_bsc/PC.r +++ /dev/null @@ -1,112 +0,0 @@ -library(ggplot2) -library(XML) # nolint -library(plyr) - -read_pc <- function(file) { - pc <- list() - pc$points <- rbind(c(0, 0), read.delim(file, comment.char = "#")) - pc$fun <- approxfun(pc$points$WindSpeed, pc$points$Power, # nolint - method = "linear", - yleft = NA, yright = 0) - attr <- strsplit(trimws(system(paste( - "perl -e 'open FH,\"", file, - "\";while(){@parts= /^# (.+): (.+) /;print \"@parts \";}'", - sep = ""), - intern = TRUE)), - "\\s+") - attr <- matrix(unlist(attr), ncol = 2, byrow = T) - pc$attr <- as.list(attr[, 2]) - names(pc$attr) <- attr[, 1] - pc$attr$Filename <- file # nolint - pc$attr$RatedPower <- as.numeric(pc$attr$RatedPower) # nolint - return(pc) -} -read_xml_pc <- function(file) { - xml <- xmlTreeParse(file, useInternalNodes = TRUE) # nolint - xml_data <- xmlToList(xml) # nolint - pc <- list() - pcs <- xml_data$wind_turbine_properties$power_curves - for (i in 1 : length(pcs)) { - if (pcs[[i]]$air_density == 1.225) { - pc$points <- ldply(pcs[[i]]$power_curve_table, #nolint - data.frame)[, c(2, 3)] - colnames(pc$points) <- c("WindSpeed", "Power") #nolint - pc$points <- transform( - pc$points, - WindSpeed = as.numeric(as.character(WindSpeed)), # nolint - Power = as.numeric(as.character(Power)) - ) - pc$points <- rbind(c(0, 0), pc$points) - break - } - } - pc$fun <- approxfun( - pc$points$WindSpeed, #nolint - pc$points$Power, # nolint - method = "linear", - yleft = NA, - yright = 0, - ties = "ordered" - ) - pc$attr$Diameter <- xml_data$wind_turbine_properties$rotor_diameter # nolint - pc$attr$CutIn <- NA # nolint - pc$attr$CutOut <- NA # nolint - pc$attr$ReCutIn <- NA # nolint - pc$attr$RatedSpeed <- NA # nolint - pc$attr$RatedPower <- xml_data$wind_turbine_properties$rated_power # nolint - pc$attr$IECClass <- NA # nolint - pc$attr$Control <- NA # nolint - pc$attr$Density <- 1.225 # nolint - pc$attr$Name <- file # nolint - pc$attr$Filename <- file # nolint - pc$attr$RatedPower <- as.numeric(pc$attr$RatedPower) # nolint - return(pc) -} -plot_pc <- function(pc) { - plot <- ggplot(pc$points, aes(x = WindSpeed, y = Power)) + # nolint - geom_point() + - stat_function(fun = pc$fun) + - xlim(0, 35) -return(plot) -} -plot_pc_list <- function(list_pcs) { - list_funs <- lapply(list_pcs, function(x) { - function(y) { - x$fun(y) / x$attr$RatedPower} # nolint - }) - names <- lapply(list_pcs, function(x) x$attr$Name ) # nolint - plot <- ggplot(NULL, aes(x = x, colour = Turbine)) #nolint - for (i in 1 : length(list_pcs)) { - plot <- plot + stat_function(data = data.frame(x = 0 : 30, - Turbine = factor(names[[i]])), fun = list_funs[[i]]) - } - plot <- plot + xlab("Wind speed (m/s)") + ylab("Capacity Factor (%)") + - ggtitle("Selected power curves") - return(plot) -} -get_list_turbines <- function() { - files <- list.files() - turb_list <- list() - for (i in seq(files)) { - file <- files[i] - turb_list[[i]] <- read_xml_pc(file) - } - names(turb_list) <- files - return(turb_list) -} -wind2power <- function(wind, pc) { - power <- pc$fun(wind) -} -wind2CF <- function(wind, pc) { - power <- pc$fun(wind) - CF <- power / pc$attr$RatedPower #nolint -} -WPD <- function(wind, ro) { - return(0.5 * ro * wind ^ 3) -} -bump <- function(x) { - f <- function(y) { - exp(-1 / y ^ 2) - } - return(f(x) / (f(x) + f(1 - x))) -} diff --git a/esmvaltool/diag_scripts/magic_bsc/RegimesAssign.R b/esmvaltool/diag_scripts/magic_bsc/RegimesAssign.R new file mode 100644 index 0000000000..fad807914d --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/RegimesAssign.R @@ -0,0 +1,148 @@ +anom2regime <- function(ref, target, method = "distance", lat) { # nolint + posdim <- which(names(dim(ref)) == "nclust") + poslat <- which(names(dim(ref)) == "lat") + poslon <- which(names(dim(ref)) == "lon") + + nclust <- dim(ref)[posdim] + + if (all(dim(ref)[-posdim] != dim(target))) { + stop("The target should have the same dimensions [lat,lon] that + the reference ") + } + if (is.null(names(dim(ref))) | is.null(names(dim(target)))) { + stop( + "The arrays should include dimensions names ref[nclust,lat,lon] + and target [lat,lon]" + ) + } + + if (length(lat) != dim(ref)[poslat]) { + stop("latitudes do not match with the maps") + } + + # This dimensions are reorganized + ref <- aperm(ref, c(posdim, poslat, poslon)) + target <- + aperm(target, c(which(names(dim( + target + )) == "lat"), which(names(dim( + target + )) == "lon"))) + + # weights are defined + latWeights <- + InsertDim(sqrt(cos(lat * pi / 180)), 2, dim(ref)[3]) # nolint + + + rmsdiff <- function(x, y) { + dims <- dim(x) + ndims <- length(dims) + if (ndims != 2 | ndims != length(dim(y))) { + stop("x and y should be maps") + } + map_diff <- NA * x + for (i in 1:dims[1]) { + for (j in 1:dims[2]) { + map_diff[i, j] <- (x[i, j] - y[i, j])^2 + } + } + rmsdiff <- sqrt(mean(map_diff, na.rm = TRUE)) + return(rmsdiff) + } + + if (method == "ACC") { + corr <- rep(NA, nclust) + for (i in 1:nclust) { + corr[i] <- + ACC( + InsertDim(InsertDim( + # nolint + InsertDim(ref[i, , ] * latWeights, 1, 1), 2, 1 # nolint + ), 3, 1), + InsertDim(InsertDim( + # nolint + InsertDim(target * latWeights, 1, 1), 2, 1 # nolint + ), 3, 1) + )$ACC[2] + } + assign <- which(corr == max(corr)) + } + + if (method == "distance") { + rms <- rep(NA, nclust) + for (i in 1:nclust) { + rms[i] <- + rmsdiff(ref[i, , ] * latWeights, target * latWeights) # nolint + } + assign <- which(rms == min(rms, na.rm = TRUE)) + } + return(assign) +} + +RegimesAssign <- function(var_ano, ref_maps, lats, # nolint + method = "distance") { + posdim <- which(names(dim(ref_maps)) == "nclust") + poslat <- which(names(dim(ref_maps)) == "lat") + poslon <- which(names(dim(ref_maps)) == "lon") + poslat_ano <- which(names(dim(var_ano)) == "lat") + poslon_ano <- which(names(dim(var_ano)) == "lon") + + nclust <- dim(ref_maps)[posdim] + nlat <- dim(ref_maps)[poslat] + nlon <- dim(ref_maps)[poslon] + + + if (is.null(names(dim(ref_maps))) | + is.null(names(dim(var_ano)))) { + stop( + "The arrays should include dimensions names ref[nclust,lat,lon] + and target [lat,lon]" + ) + } + + if (length(lats) != dim(ref_maps)[poslat]) { + stop("latitudes do not match with the maps") + } + print(str(var_ano)) + assign <- + Apply( + data = list(target = var_ano), + margins = c((seq_along(dim( + var_ano + )))[-c(poslat_ano, poslon_ano)]), + fun = "anom2regime", + ref = ref_maps, + lat = lats, + method = method + ) + + if (poslat_ano < poslon_ano) { + dim_order <- c(nlat, nlon) + } else { + dim_order <- c(nlon, nlat) + } + + anom_array <- + array(var_ano, dim = c( + prod(dim(var_ano)[-c(poslat_ano, poslon_ano)]), + dim_order + )) + + rm(var_ano) + + index <- as.vector(assign$output1) + recon <- + Composite(var = aperm(anom_array, c(3, 2, 1)), occ = index) + freqs <- rep(NA, nclust) + for (n in 1:nclust) { + freqs[n] <- (length(which(index == n)) / length(index)) * 100 + } + output <- + list( + composite = recon$composite, + pvalue = recon$pvalue, + cluster = assign$output1, + frequency = freqs + ) + return(output) +} diff --git a/esmvaltool/diag_scripts/magic_bsc/RegimesAssign.r b/esmvaltool/diag_scripts/magic_bsc/RegimesAssign.r deleted file mode 100644 index e0c6319a1a..0000000000 --- a/esmvaltool/diag_scripts/magic_bsc/RegimesAssign.r +++ /dev/null @@ -1,136 +0,0 @@ -anom2regime <- function(ref, target, method = "distance", lat) { - posdim <- which(names(dim(ref)) == "nclust") - poslat <- which(names(dim(ref)) == "lat") - poslon <- which(names(dim(ref)) == "lon") - - nclust <- dim(ref)[posdim] - - if (all(dim(ref)[-posdim] != dim(target))) { - stop("The target should have the same dimensions [lat,lon] that - the reference ") - } - if (is.null(names(dim(ref))) | is.null(names(dim(target)))) { - stop( - "The arrays should include dimensions names ref[nclust,lat,lon] - and target [lat,lon]" - ) - } - - if (length(lat) != dim(ref)[poslat]) { - stop("latitudes do not match with the maps") - } - - # This dimensions are reorganized - ref <- aperm(ref, c(posdim, poslat, poslon)) - target <- - aperm(target, c(which(names(dim( - target - )) == "lat"), which(names(dim( - target - )) == "lon"))) - - # weights are defined - latWeights <- InsertDim(sqrt(cos(lat * pi / 180)), 2, dim(ref)[3]) #nolint - - - rmsdiff <- function(x, y) { - dims <- dim(x) - ndims <- length(dims) - if (ndims != 2 | ndims != length(dim(y))) { - stop("x and y should be maps") - } - map_diff <- NA * x - for (i in 1 : dims[1]) { - for (j in 1 : dims[2]) { - map_diff[i, j] <- (x[i, j] - y[i, j]) ^ 2 - } - } - rmsdiff <- sqrt(mean(map_diff, na.rm = TRUE)) - return(rmsdiff) - } - - if (method == "ACC") { - corr <- rep(NA, nclust) - for (i in 1:nclust) { - corr[i] <- - ACC(InsertDim(InsertDim( #nolint - InsertDim(ref[i, , ] * latWeights, 1, 1), 2, 1 #nolint - ), 3, 1), - InsertDim(InsertDim( #nolint - InsertDim(target * latWeights, 1, 1), 2, 1 #nolint - ), 3, 1))$ACC[2] - } - assign <- which(corr == max(corr)) - } - - if (method == "distance") { - rms <- rep(NA, nclust) - for (i in 1 : nclust) { - rms[i] <- rmsdiff(ref[i, , ] * latWeights, target * latWeights)#nolint - } - assign <- which(rms == min(rms, na.rm = TRUE)) - } - return(assign) -} - -RegimesAssign <- function(var_ano, ref_maps, lats, #nolint - method = "distance") { - posdim <- which(names(dim(ref_maps)) == "nclust") - poslat <- which(names(dim(ref_maps)) == "lat") - poslon <- which(names(dim(ref_maps)) == "lon") - poslat_ano <- which(names(dim(var_ano)) == "lat") - poslon_ano <- which(names(dim(var_ano)) == "lon") - - nclust <- dim(ref_maps)[posdim] - nlat <- dim(ref_maps)[poslat] - nlon <- dim(ref_maps)[poslon] - - - if (is.null(names(dim(ref_maps))) | is.null(names(dim(var_ano)))) { - stop( - "The arrays should include dimensions names ref[nclust,lat,lon] - and target [lat,lon]" - ) - } - - if (length(lats) != dim(ref_maps)[poslat]) { - stop("latitudes do not match with the maps") - } -print(str(var_ano)) - assign <- - Apply( - data = list(target = var_ano), - margins = c( (1 : length(dim(var_ano)) )[-c(poslat_ano, poslon_ano)]), - fun = "anom2regime", - ref = ref_maps, - lat = lats, - method = method - ) - - if (poslat_ano < poslon_ano) { - dim_order <- c(nlat, nlon) - } else { - dim_order <- c(nlon, nlat) - } - - anom_array <- - array(var_ano, dim = c(prod(dim(var_ano)[-c(poslat_ano, poslon_ano)]), - dim_order)) - - rm(var_ano) - - index <- as.vector(assign$output1) - recon <- Composite(var = aperm(anom_array, c(3, 2, 1)), occ = index) - freqs <- rep(NA, nclust) - for (n in 1 : nclust) { - freqs[n] <- (length(which(index == n)) / length(index)) * 100 - } - output <- - list( - composite = recon$composite, - pvalue = recon$pvalue, - cluster = assign$output1, - frequency = freqs - ) - return(output) -} diff --git a/esmvaltool/diag_scripts/magic_bsc/WeatherRegime.R b/esmvaltool/diag_scripts/magic_bsc/WeatherRegime.R new file mode 100644 index 0000000000..042fc764bf --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/WeatherRegime.R @@ -0,0 +1,163 @@ +AtomicWeatherRegime <- function( # nolint + data, + EOFS = TRUE, + neofs = 30, + threshold = NULL, + lon = NULL, + lat = NULL, + ncenters = NULL, + method = "kmeans", + nstart = 30) { + names(dim(data)) <- c("sdate", "ftime", "lat", "lon") + sdate <- which(names(dim(data)) == "sdate") + ftime <- which(names(dim(data)) == "ftime") + nftimes <- dim(data)[ftime] + nsdates <- dim(data)[sdate] + lon2 <- which(names(dim(data)) == "lon") + lat2 <- which(names(dim(data)) == "lat") + data <- aperm(data, c(ftime, sdate, lat2, lon2)) + nlon <- dim(data)[lon2] + nlat <- dim(data)[lat2] + dim(data) <- c(nftimes * nsdates, nlat, nlon) + + if (is.null(ncenters)) { + stop("ncenters must be specified") + } + if (EOFS == TRUE && is.null(lon)) { + stop("longitudes must be specified") + } + if (EOFS == TRUE && is.null(lat)) { + stop("latitudes must be specified") + } + + if (EOFS == TRUE) { + data_pc <- EOF( # nolint + data, + lat = as.vector(lat), + lon = as.vector(lon), + neofs = neofs + ) + if (is.null(threshold)) { + threshold <- sum(data_pc$var) + cluster_input <- data_pc$PC + } else { + threshold <- threshold + min_pc <- + head(as.numeric(which(cumsum(data_pc$var) > threshold)), 1) + cluster_input <- data_pc$PC[, 1:min_pc] + } + } else { + cluster_input <- data + latWeights <- InsertDim( # nolint + InsertDim(cos(lat * pi / 180), 1, nftimes * nsdates), # nolint + 3, + nlon + ) + cluster_input <- cluster_input * latWeights # nolint + dim(cluster_input) <- c(nftimes * nsdates, nlat * nlon) + } + if (method == "kmeans") { + result <- kmeans( + cluster_input, + centers = ncenters, + iter.max = 100, + nstart = nstart, + trace = FALSE + ) + reconstructed <- array(0, c(ncenters, nlat, nlon)) + data <- aperm(data, c(2, 3, 1)) + reconstructed <- Composite(data, result$cluster) # nolint + names(dim(reconstructed$composite)) <- + c("lon", "lat", "cluster") + cluster_timeseries <- list(lengths = c(), values = c()) + frequency <- persistence <- matrix(NA, nsdates, ncenters) + for (i in 1:nsdates) { + occurences <- + rle(result$cluster[((i * nftimes) + 1 - nftimes):(i * nftimes)]) + cluster_timeseries <- list( + lengths = c(cluster_timeseries$lengths, occurences$lengths), + values = c(cluster_timeseries$values, occurences$values) + ) + for (j in 1:ncenters) { + total <- sum(occurences$lengths[occurences$values == j]) + frequency[i, j] <- + (total / nftimes) * 100 + persistence[i, j] <- + mean(occurences$lengths[occurences$values == j]) + } + } + } else { + result <- hclust(dist(cluster_input), method = method) + clusterCut <- cutree(result, ncenters) # nolint + data <- aperm(data, c(3, 2, 1)) + result <- Composite(data, clusterCut) # nolint + } + if (method == "kmeans") { + return( + list( + composite = reconstructed$composite, + pvalue = reconstructed$pvalue, + cluster = as.array(result$cluster), + center = as.array(result$center), + cluster_lengths = as.array(cluster_timeseries$lengths), + cluster_values = as.array(cluster_timeseries$values), + persistence = as.array(persistence), + frequency = frequency + ) + ) + } else { + return(list( + composite = result$composite, + pvalue = result$pvalue, + cluster = as.array(clusterCut) # nolint + )) + } +} + +WeatherRegime <- function( # nolint + data, + EOFS = TRUE, + neofs = 30, + threshold = NULL, + lon = NULL, + lat = NULL, + ncenters = NULL, + method = "kmeans", + nstart = 30, + iter.max = 100, + ncores = NULL) { + if (length(dim(data)) > 4) { + sdate <- which(names(dim(data)) == "sdate") + ftime <- which(names(dim(data)) == "ftime") + lon_dim <- which(names(dim(data)) == "lon") + lat_dim <- which(names(dim(data)) == "lat") + dims <- + c(seq_along(dim(data)))[-c(sdate, ftime, lon_dim, lat_dim)] + data <- aperm(data, c(sdate, ftime, lat_dim, lon_dim, dims)) + margins <- 5:length(dim(data)) + result <- Apply( + data = list(data), + margins = list(margins), + fun = "AtomicWeatherRegime", + EOFS = EOFS, + neofs = neofs, + threshold = threshold, + lon = lon, + lat = lat, + ncenters = ncenters, + method = method, + ncores = ncores + ) + } else { + result <- AtomicWeatherRegime( + data, + EOFS = EOFS, + neofs = neofs, + threshold = threshold, + lon = lon, + lat = lat, + ncenters = ncenters, + method = method + ) + } +} diff --git a/esmvaltool/diag_scripts/magic_bsc/WeatherRegime.r b/esmvaltool/diag_scripts/magic_bsc/WeatherRegime.r deleted file mode 100644 index aab9823911..0000000000 --- a/esmvaltool/diag_scripts/magic_bsc/WeatherRegime.r +++ /dev/null @@ -1,135 +0,0 @@ -AtomicWeatherRegime <- function( # nolint - data, EOFS = TRUE, neofs = 30, threshold = NULL, lon = NULL, lat = NULL, - ncenters = NULL, method = "kmeans", nstart = 30) { - names(dim(data)) <- c("sdate", "ftime", "lat", "lon") - sdate <- which(names(dim(data)) == "sdate") - ftime <- which(names(dim(data)) == "ftime") - nftimes <- dim(data)[ftime] - nsdates <- dim(data)[sdate] - lon2 <- which(names(dim(data)) == "lon") - lat2 <- which(names(dim(data)) == "lat") - data <- aperm(data, c(ftime, sdate, lat2, lon2)) - nlon <- dim(data)[lon2] - nlat <- dim(data)[lat2] - dim(data) <- c(nftimes * nsdates, nlat, nlon) - - if (is.null(ncenters)) { - stop("ncenters must be specified") - } - if (EOFS == TRUE && is.null(lon)) { - stop("longitudes must be specified") - } - if (EOFS == TRUE && is.null(lat)) { - stop("latitudes must be specified") - } - - if (EOFS == TRUE) { - data_pc <- EOF( # nolint - data, - lat = as.vector(lat), - lon = as.vector(lon), - neofs = neofs - ) - if (is.null(threshold)){ - threshold <- sum(data_pc$var) - cluster_input <- data_pc$PC - } else { - threshold <- threshold - min_pc <- head(as.numeric(which(cumsum(data_pc$var) > threshold)), 1) - cluster_input <- data_pc$PC[, 1 : min_pc] - } - } else { - cluster_input <- data - latWeights <- InsertDim( #nolint - InsertDim(cos(lat * pi / 180), 1, nftimes * nsdates), #nolint - 3, - nlon - ) - cluster_input <- cluster_input * latWeights #nolint - dim(cluster_input) <- c(nftimes * nsdates, nlat * nlon) - } - if (method == "kmeans") { - result <- kmeans(cluster_input, centers = ncenters, - iter.max = 100, nstart = nstart, trace = FALSE) - reconstructed <- array(0, c(ncenters, nlat, nlon)) - data <- aperm(data, c(2, 3, 1)) - reconstructed <- Composite(data, result$cluster) # nolint - names(dim(reconstructed$composite)) <- c("lon", "lat", "cluster") - cluster_timeseries <- list(lengths = c(), values = c()) - frequency <- persistence <- matrix(NA, nsdates, ncenters) - for (i in 1 : nsdates) { - occurences <- rle( - result$cluster[((i * nftimes) + 1 - nftimes) : (i * nftimes)] #nolint - ) - cluster_timeseries <- list( - lengths = c(cluster_timeseries$lengths, occurences$lengths), - values = c(cluster_timeseries$values, occurences$values) - ) - for (j in 1 : ncenters) { - total <- sum(occurences$lengths[occurences$values == j]) - frequency[i, j] <- (total / nftimes) * 100 - persistence[i, j] <- mean(occurences$lengths[occurences$values == j]) - } - } - } else { - result <- hclust(dist(cluster_input), method = method) - clusterCut <- cutree(result, ncenters) #nolint - data <- aperm(data, c(3, 2, 1)) - result <- Composite(data, clusterCut) # nolint - } - if (method == "kmeans") { - return(list( - composite = reconstructed$composite, - pvalue = reconstructed$pvalue, - cluster = as.array(result$cluster), - center = as.array(result$center), - cluster_lengths = as.array(cluster_timeseries$lengths), - cluster_values = as.array(cluster_timeseries$values), - persistence = as.array(persistence), frequency = frequency)) - } else { - return(list( - composite = result$composite, - pvalue = result$pvalue, - cluster = as.array(clusterCut) # nolint - )) - } -} - -WeatherRegime <- function( # nolint - data, EOFS = TRUE, neofs = 30, threshold = NULL, lon = NULL, lat = NULL, - ncenters = NULL, method = "kmeans", nstart = 30, iter.max = 100, - ncores = NULL) { - if (length(dim(data)) > 4) { - sdate <- which(names(dim(data)) == "sdate") - ftime <- which(names(dim(data)) == "ftime") - lon_dim <- which(names(dim(data)) == "lon") - lat_dim <- which(names(dim(data)) == "lat") - dims <- c(1 : length(dim(data)))[-c(sdate, ftime, lon_dim, lat_dim)] - data <- aperm(data, c(sdate, ftime, lat_dim, lon_dim, dims)) - margins <- 5 : length(dim(data)) - result <- Apply( - data = list(data), - margins = list(margins), - fun = "AtomicWeatherRegime", - EOFS = EOFS, - neofs = neofs, - threshold = threshold, - lon = lon, - lat = lat, - ncenters = ncenters, - method = method, - ncores = ncores - ) - } else { - result <- AtomicWeatherRegime( # nolint - data, - EOFS = EOFS, - neofs = neofs, - threshold = threshold, - lon = lon, - lat = lat, - ncenters = ncenters, - method = method - ) - } -} diff --git a/esmvaltool/diag_scripts/magic_bsc/capacity_factor.R b/esmvaltool/diag_scripts/magic_bsc/capacity_factor.R new file mode 100644 index 0000000000..f6756a8d88 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/capacity_factor.R @@ -0,0 +1,249 @@ +library(abind) +library(climdex.pcic) +library(ggplot2) +library(multiApply) # nolint +library(ncdf4) +library(RColorBrewer) # nolint +library(s2dverification) +library(yaml) + +# Parsing input file paths and creating output dirs +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +initial_options <- commandArgs(trailingOnly = FALSE) +file_arg_name <- "--file=" +script_name <- sub( + file_arg_name, "", + initial_options[grep(file_arg_name, initial_options)] +) +script_dirname <- dirname(script_name) + +source(file.path(script_dirname, "PC.R")) +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +## Create working dirs if they do not exist +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + + +input_files_per_var <- yaml::read_yaml(params$input_files) +var_names <- names(input_files_per_var) +model_names <- lapply(input_files_per_var, function(x) x$dataset) +model_names <- unname(model_names) +var0 <- lapply(input_files_per_var, function(x) x$short_name) +fullpath_filenames <- names(var0) +var0 <- unname(var0)[1] +start_year <- lapply(input_files_per_var, function(x) x$start_year) +start_year <- c(unlist(unname(start_year)))[1] +end_year <- lapply(input_files_per_var, function(x) x$end_year) +end_year <- c(unlist(unname(end_year)))[1] +seasons <- params$seasons +power_curves <- params$power_curves + + +no_of_years <- length(seq(start_year, end_year, 1)) +var0 <- unlist(var0) +for (i in seq(1, length(model_names), 1)) { + data_nc <- nc_open(fullpath_filenames[i]) + data <- ncvar_get(data_nc, var0) + + names(dim(data)) <- c("lon", "lat", "time") + lat <- ncvar_get(data_nc, "lat") + lon <- ncvar_get(data_nc, "lon") + units <- ncatt_get(data_nc, var0, "units")$value + calendar <- ncatt_get(data_nc, "time", "calendar")$value + long_names <- ncatt_get(data_nc, var0, "long_name")$value + time <- ncvar_get(data_nc, "time") + start_date <- as.POSIXct(substr(ncatt_get( + data_nc, "time", + "units" + )$value, 11, 29)) + nc_close(data_nc) + time <- as.Date(time, + origin = substr(start_date, 1, 10), + calendar = calendar + ) + time <- as.POSIXct(time, format = "%Y-%m-%d") + time_dim <- which(names(dim(data)) == "time") + time <- as.PCICt(time, cal = calendar) + if (calendar != "360_day" & calendar != "365_day") { + time <- as.character(time) + jdays <- as.numeric(strftime(time, format = "%j")) + pos <- which(substr(time, 6, 10) == "02-29") + if (length(pos) > 0) { + time <- time[-pos] + data <- apply( + data, c(seq(1, length(dim(data)), 1))[-time_dim], + function(x) { + x[-pos] + } + ) + data <- aperm(data, c(2, 3, 1)) + names(dim(data)) <- c("lon", "lat", "time") + } + } + dims <- dim(data) + dims <- append(dims[-time_dim], c(no_of_years, dims[time_dim] / + no_of_years), after = 2) + dim(data) <- dims + # Convert to 100 m wind: + data <- data * 1.39 + data <- aperm(data, c(3, 4, 2, 1)) + names(dim(data)) <- c("year", "day", "lat", "lon") + ##################################### + # Cross with PC + #################################### + + #--------------------------- + # Load PC to use and compute CF for 6h values + #--------------------------- + seas_data <- Mean1Dim(data, 2) + pc1 <- read_pc(file.path(script_dirname, power_curves[1])) + pc2 <- read_pc(file.path(script_dirname, power_curves[2])) + pc3 <- read_pc(file.path(script_dirname, power_curves[3])) + pc4 <- read_pc(file.path(script_dirname, power_curves[4])) + pc5 <- read_pc(file.path(script_dirname, power_curves[5])) + + + data_cf1 <- wind2CF(data, pc1) + dim(data_cf1) <- dim(data) + data_cf2 <- wind2CF(data, pc2) + dim(data_cf2) <- dim(data) + data_cf3 <- wind2CF(data, pc3) + dim(data_cf3) <- dim(data) + data_cf4 <- wind2CF(data, pc4) + dim(data_cf4) <- dim(data) + data_cf5 <- wind2CF(data, pc5) + dim(data_cf5) <- dim(data) + + #--------------------------- + # Aggregate daily data to seasonal means + #--------------------------- + + seas_data_cf1 <- Mean1Dim(data_cf1, 2) + seas_data_cf2 <- Mean1Dim(data_cf2, 2) + seas_data_cf3 <- Mean1Dim(data_cf3, 2) + seas_data_cf4 <- Mean1Dim(data_cf4, 2) + seas_data_cf5 <- Mean1Dim(data_cf5, 2) + + ############################## + # Make some plots + ############################## + #--------------------------- + # Prepare data, labels and colorscales + #--------------------------- + p <- colorRampPalette(brewer.pal(9, "YlOrRd")) + q <- colorRampPalette(rev(brewer.pal(11, "RdBu"))) + years <- seq(start_year, end_year) + turb_types <- c("IEC I", "IEC I/II", "IEC II", "IEC II/III", "IEC III") + + seas_data_cf_all <- abind(seas_data_cf1, seas_data_cf2, seas_data_cf3, + seas_data_cf4, seas_data_cf5, + along = 0 + ) + mean_data_cf_all <- Mean1Dim(seas_data_cf_all, 2) + anom_data_cf_all <- seas_data_cf_all - InsertDim( # nolint + Mean1Dim(seas_data_cf_all, 2), 2, dim(data)[1] + ) # nolint + pct_anom_data_cf_all <- (seas_data_cf_all / InsertDim( # nolint + Mean1Dim(seas_data_cf_all, 2), 2, dim(data)[1] + )) - 1 # nolint + #--------------------------- + # Plot seasonal CF maps + #--------------------------- + filepng <- paste0( + plot_dir, "/", "capacity_factor_", model_names[i], "_", + start_year, "-", end_year, ".png" + ) + title <- paste0( + seasons, " CF from ", model_names[i], + " (", start_year, "-", end_year, ")" + ) + + pw_names <- c( + "Enercon E70", "Gamesa G80", "Gamesa G87", + "Vestas V100", "Vestas V110" + ) + PlotLayout(PlotEquiMap, # nolint + c(3, 2), Mean1Dim(seas_data_cf_all, 2), lon, lat, + colNA = "white", + brks = seq( + from = 0, to = max(seas_data_cf_all, na.rm = TRUE), + length.out = 10 + ), color_fun = clim.palette("yellowred"), + filled.continents = FALSE, toptitle = title, + titles = pw_names, fileout = filepng + ) + + filencdf <- paste0( + work_dir, "/", "capacity_factor_", model_names[i], "_", + start_year, "-", end_year, ".nc" + ) + dimlon <- ncdim_def( + name = "lon", units = "degrees_east", + vals = as.vector(lon), longname = "longitude" + ) + dimlat <- ncdim_def( + name = "lat", units = "degrees_north", + vals = as.vector(lat), longname = "latitude" + ) + dimtime <- ncdim_def( + name = "season", units = "season", + vals = start_year:end_year, + longname = "season of the year: DJF, MAM, JJA, SON" + ) + dimcurve <- ncdim_def( + name = "curve", units = "name", vals = seq(1, 5, 1), + longname = "Power curves of considered turbines" + ) + names(dim(seas_data_cf_all)) <- c("curve", "time", "lat", "lon") + defdata <- ncvar_def( + name = "CapacityFactor", units = "%", + dim = list( + season = dimcurve, dimtime, lat = dimlat, + lon = dimlon + ), + longname = paste( + "Capacity Factor of wind on", + "different turbines" + ) + ) + file <- nc_create(filencdf, list(defdata)) + ncvar_put(file, defdata, seas_data_cf_all) + nc_close(file) + + # Set provenance for output files + xprov <- list( + ancestors = list( + fullpath_filenames[i], + file.path(script_dirname, power_curves[1]), + file.path(script_dirname, power_curves[2]), + file.path(script_dirname, power_curves[3]), + file.path(script_dirname, power_curves[4]), + file.path(script_dirname, power_curves[5]) + ), + authors = list( + "hunter_alasdair", "perez-zanon_nuria", + "manubens_nicolau", "lledo_llorenc", + "caron_louis-philippe", "bojovic_dragana", + "gonzalez-reviriego_nube" + ), + projects = list("c3s-magic"), + caption = title, + statistics = list("other"), + realms = list("atmos"), + themes = list("phys") + ) + provenance[[filepng]] <- xprov + provenance[[filencdf]] <- xprov +} + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/capacity_factor.r b/esmvaltool/diag_scripts/magic_bsc/capacity_factor.r deleted file mode 100644 index 0d4d037efb..0000000000 --- a/esmvaltool/diag_scripts/magic_bsc/capacity_factor.r +++ /dev/null @@ -1,244 +0,0 @@ - -Sys.setenv(TAR = "/bin/tar") # nolint - -library(multiApply) # nolint -library(ggplot2) -library(yaml) -library(s2dverification) -library(climdex.pcic) -library(ncdf4) -library("XML") -#Parsing input file paths and creating output dirs -args <- commandArgs(trailingOnly = TRUE) -params <- read_yaml(args[1]) -print(args) -initial.options <- commandArgs(trailingOnly = FALSE) -file_arg_name <- "--file=" -script_name <- sub( - file_arg_name, "", initial.options[grep(file_arg_name, initial.options)] -) -script_dirname <- dirname(script_name) - -source(file.path(script_dirname, "PC.r")) -plot_dir <- params$plot_dir -run_dir <- params$run_dir -work_dir <- params$work_dir - -# setup provenance file and list -provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") -provenance <- list() - -## Create working dirs if they do not exist -dir.create(plot_dir, recursive = TRUE) -dir.create(run_dir, recursive = TRUE) -dir.create(work_dir, recursive = TRUE) - - -input_files_per_var <- yaml::read_yaml(params$input_files) -var_names <- names(input_files_per_var) -model_names <- lapply(input_files_per_var, function(x) x$dataset) -model_names <- unname(model_names) -var0 <- lapply(input_files_per_var, function(x) x$short_name) -fullpath_filenames <- names(var0) -var0 <- unname(var0)[1] -start_year <- lapply(input_files_per_var, function(x) x$start_year) -start_year <- c(unlist(unname(start_year)))[1] -end_year <- lapply(input_files_per_var, function(x) x$end_year) -end_year <- c(unlist(unname(end_year)))[1] -seasons <- params$seasons -power_curves <- params$power_curves - - -no_of_years <- length(start_year : end_year) -var0 <- unlist(var0) -data_nc <- nc_open(fullpath_filenames) -data <- ncvar_get(data_nc, var0) - -names(dim(data)) <- c("lon", "lat", "time") -lat <- ncvar_get(data_nc, "lat") -lon <- ncvar_get(data_nc, "lon") -units <- ncatt_get(data_nc, var0, "units")$value -calendar <- ncatt_get(data_nc, "time", "calendar")$value -long_names <- ncatt_get(data_nc, var0, "long_name")$value -time <- ncvar_get(data_nc, "time") -start_date <- as.POSIXct( - substr(ncatt_get(data_nc, "time", "units")$value, 11, 29 ) -) -nc_close(data_nc) -time <- as.Date(time, origin = start_date, calendar = calendar) -time <- as.POSIXct(time, format = "%Y-%m-%d") - -print(calendar) -print(str(data)) -time_dim <- which(names(dim(data)) == "time") -time <- as.PCICt(time, cal = calendar) -time <- as.character(time) -jdays <- as.numeric(strftime(time, format = "%j")) -if (calendar == "gregorian" | calendar == "standard" | - calendar == "proleptic_gregorian") { - year <- as.numeric(strftime(time, format = "%Y")) - pos <- ( (year / 100) %% 1 == 0) + ( (year / 4) %% 1 == 0) - + ( (year / 400) %% 1 == 0) - pos <- which(pos == 1) - bisiesto <- which(jdays == 60) - if ( length(intersect(pos, bisiesto)) > 0) { - time <- time[-intersect(pos, bisiesto)] - data <- apply(data, c(1 : length(dim(data)))[-time_dim], - function(x) { - x[-intersect(pos, bisiesto)] - }) - data <- aperm(data, c(2, 3, 1)) - names(dim(data)) <- c("lon", "lat", "time") - } -} - -dims <- dim(data) -dims <- append( - dims[-time_dim], c(no_of_years, dims[time_dim] / no_of_years), after = 2 -) - -dim(data) <- dims - -data <- aperm(data, c(3, 4, 2, 1)) -names(dim(data)) <- c("year", "day", "lat", "lon") -##################################### -# Cross with PC -#################################### - -#--------------------------- -# Load PC to use and compute CF for 6h values -#--------------------------- -seas_data <- Mean1Dim(data, 2) -print(power_curves[1]) -pc1 <- read_pc(file.path(script_dirname, power_curves[1])) -pc2 <- read_pc(file.path(script_dirname, power_curves[2])) -pc3 <- read_pc(file.path(script_dirname, power_curves[3])) -pc4 <- read_pc(file.path(script_dirname, power_curves[4])) -pc5 <- read_pc(file.path(script_dirname, power_curves[5])) - - -data_cf1 <- wind2CF(data, pc1) -dim(data_cf1) <- dim(data) -data_cf2 <- wind2CF(data, pc2) -dim(data_cf2) <- dim(data) -data_cf3 <- wind2CF(data, pc3) -dim(data_cf3) <- dim(data) -data_cf4 <- wind2CF(data, pc4) -dim(data_cf4) <- dim(data) -data_cf5 <- wind2CF(data, pc5) -dim(data_cf5) <- dim(data) - -#--------------------------- -# Aggregate daily data to seasonal means -#--------------------------- - -seas_data_cf1 <- Mean1Dim(data_cf1, 2) -seas_data_cf2 <- Mean1Dim(data_cf2, 2) -seas_data_cf3 <- Mean1Dim(data_cf3, 2) -seas_data_cf4 <- Mean1Dim(data_cf4, 2) -seas_data_cf5 <- Mean1Dim(data_cf5, 2) - - - -############################## -# Make some plots -############################## -#--------------------------- -# Prepare data, labels and colorscales -#--------------------------- -library(RColorBrewer) # nolint -library(abind) -p <- colorRampPalette(brewer.pal(9, "YlOrRd")) -q <- colorRampPalette(rev(brewer.pal(11, "RdBu"))) -years <- seq(start_year, end_year) -turb_types <- c("IEC I", "IEC I/II", "IEC II", "IEC II/III", "IEC III") - -seas_data_cf_all <- abind( - seas_data_cf1, seas_data_cf2, seas_data_cf3, seas_data_cf4, seas_data_cf5, - along = 0 -) -mean_data_cf_all <- Mean1Dim(seas_data_cf_all, 2) -anom_data_cf_all <- seas_data_cf_all - InsertDim( # nolint - Mean1Dim(seas_data_cf_all, 2), 2, dim(data)[1] # nolint -) -pct_anom_data_cf_all <- (seas_data_cf_all / InsertDim( # nolint - Mean1Dim(seas_data_cf_all, 2), 2, dim(data)[1] # nolint -)) - 1 - -#--------------------------- -# Plot seasonal CF maps -#--------------------------- -filepng <- paste0( - plot_dir, "/", "capacity_factor_", - model_names, "_", start_year, "-", end_year, ".png") -title <- paste0(seasons, " CF from ", - model_names, " (", start_year, "-", end_year, ")") -PlotLayout( # nolint - PlotEquiMap, # nolint - c(3, 2), - Mean1Dim(seas_data_cf_all, 2), - lon, - lat, - filled.continents = F, - toptitle = title, - fileout = filepng) - -filencdf <- paste0(work_dir, "/", "capacity_factor_", - model_names, "_", start_year, "-", end_year, ".nc") - -dimlon <- ncdim_def( - name = "lon", - units = "degrees_east", - vals = as.vector(lon), - longname = "longitude" -) -dimlat <- ncdim_def( - name = "lat", - units = "degrees_north", - vals = as.vector(lat), - longname = "latitude" -) -dimtime <- ncdim_def( - name = "season", - units = "season", - vals = start_year : end_year, - longname = "season of the year: DJF, MAM, JJA, SON" -) -dimcurve <- ncdim_def( - name = "curve", - units = "name", - vals = 1 : 5, - longname = "Power curves of considered turbines" -) - -names(dim(seas_data_cf_all)) <- c("curve", "time", "lat", "lon") -defdata <- ncvar_def( - name = "CapacityFactor", - units = "%", - dim = list(season = dimcurve, dimtime, lat = dimlat, lon = dimlon), - longname = paste0("Capacity Factor of wind on different turbines") -) -file <- nc_create(filencdf, list(defdata)) -ncvar_put(file, defdata, seas_data_cf_all) -nc_close(file) - - # Set provenance for output files - xprov <- list(ancestors = list(fullpath_filenames, - file.path(script_dirname, power_curves[1]), - file.path(script_dirname, power_curves[2]), - file.path(script_dirname, power_curves[3]), - file.path(script_dirname, power_curves[4]), - file.path(script_dirname, power_curves[5])), - authors = list("hunt_al", "manu_ni", "lled_ll", "caro_lo", - "bojo_dr", "gonz_nu"), - projects = list("c3s-magic"), - caption = title, - statistics = list("other"), - realms = list("atmos"), - themes = list("phys"), - plot_file = filepng) - - provenance[[filencdf]] <- xprov - -# Write provenance to file -write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/combined_indices.R b/esmvaltool/diag_scripts/magic_bsc/combined_indices.R new file mode 100644 index 0000000000..8a9d828636 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/combined_indices.R @@ -0,0 +1,191 @@ +library(s2dverification) +library(multiApply) # nolint +library(ggplot2) +library(yaml) +library(ncdf4) +library(ClimProjDiags) #nolint +library(abind) +library(climdex.pcic) + +#Parsing input file paths and creating output dirs +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) + +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir + +## Create working dirs if they do not exist +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +input_files_per_var <- yaml::read_yaml(params$input_files) +var_names <- names(input_files_per_var) +model_names <- lapply(input_files_per_var, function(x) x$dataset) +model_names <- unique(unlist(unname(model_names))) +var0 <- lapply(input_files_per_var, function(x) x$short_name) +scenario <- unlist(lapply(input_files_per_var, function(x) x$exp)) +ensemble <- unlist(lapply(input_files_per_var, function(x) x$ensemble)) +fullpath_filenames <- names(var0) +var0 <- unname(var0)[1] +var0 <- unlist(var0) + +start_year <- lapply(input_files_per_var, function(x) x$start_year) +starting <- c(unlist(unname(start_year)))[1] +end_year <- lapply(input_files_per_var, function(x) x$end_year) +ending <- c(unlist(unname(end_year)))[1] +start_year <- as.POSIXct(as.Date(paste0(starting, "-01-01"), "%Y-%m-%d")) +end_year <- as.POSIXct(as.Date(paste0(ending, "-12-31"), "%Y-%m-%d")) + +#Parameters for Season() function +monini <- 1 +moninf <- params$moninf +monsup <- params$monsup +months <- "" +region <- params$region +running_mean <- params$running_mean +timestamp <- "" +standardized <- params$standardized +data_frame <- NULL +if (region == "Nino3") { + lon_min <- 360 - 150 + lon_max <- 360 - 90 + lat_min <- -5 + lat_max <- 5 +} else if (region == "Nino3.4") { + lon_min <- 360 - 170 + lon_max <- 360 - 120 + lat_min <- -5 + lat_max <- 5 +} else if (region == "Nino4") { + lon_min <- 360 - 160 + lon_max <- 360 - 150 + lat_min <- -5 + lat_max <- 5 +} else if (region == "NAO") { + lon_min <- 360 + c(-90, -90) + lon_max <- c(40, 40) + lat_min <- c(25, 60) + lat_max <- c(45, 80) +} else if (region == "SOI") { + lon_min <- c(90, 360 - 130) + lon_max <- c(140, 360 - 80) + lat_min <- c(-5, -5) + lat_max <- c(5, 5) +} +### Load data +for (i in seq(1, length(model_names), 1)) { + data_nc <- nc_open(fullpath_filenames[i]) + lat <- as.vector(ncvar_get(data_nc, "lat")) + lon <- as.vector(ncvar_get(data_nc, "lon")) + units <- ncatt_get(data_nc, var0, "units")$value + long_names <- ncatt_get(data_nc, var0, "long_name")$value + + data <- InsertDim(ncvar_get(data_nc, var0), 1, 1) # nolint + names(dim(data)) <- c("model", "lon", "lat", "time") + time <- seq(start_year, end_year, "month") + nc_close(data_nc) + if (standardized) { + data <- Apply(list(data), target_dims = c("time"), + fun = function(x) {(x - mean(x)) / sqrt(var(x))}) #nolint + data <- aperm(data$output1, c(2, 3, 4, 1)) + names(dim(data)) <- c("model", "lon", "lat", "time") + } + if (!is.null(running_mean)) { + data <- Smoothing(data, runmeanlen = running_mean, numdimt = 4) #nolint + timestamp <- paste0(running_mean, "-month-running-mean-") + } + if (!is.null(moninf)) { + data <- Season(data, posdim = 4, monini = monini, #nolint + moninf = moninf, monsup = monsup) + months <- paste0(month.abb[moninf], "-", month.abb[monsup]) + } + if (length(lon_min) == 1) { + data <- WeightedMean(data, lon = lon, lat = lat, #nolint + region = c(lon_min, lon_max, lat_min, lat_max), + londim = 2, latdim = 3, mask = NULL) + + data <- drop(data) + } else { + data1 <- WeightedMean(data, lon = lon, lat = lat, #nolint + region = c(lon_min[1], lon_max[1], lat_min[1], lat_max[1]), + londim = 2, latdim = 3, mask = NULL) + data2 <- WeightedMean(data, lon = lon, lat = lat, #nolint + region = c(lon_min[2], lon_max[2], lat_min[2], lat_max[2]), + londim = 2, latdim = 3, mask = NULL) + data1 <- drop(data1) + data2 <- drop(data2) + data <- CombineIndices(list(data1, data2), weights = c(1, -1), #nolint + operation = "add") + } + if (moninf > monsup) { + period <- (starting : ending)[-1] + } else { + period <- starting : ending + } + data_frame <- cbind(data_frame, data) +} # close for loop i +# convert to data frame for ggplot +data_frame_plot <- as.data.frame.table(data_frame) +data_frame_plot$year <- rep(period, length(model_names)) +names(data_frame_plot)[2] <- "model" +data_frame_plot$model <- as.factor(sort(rep(seq(1, length(model_names), 1), + length(period)))) +for (i in seq(1, length(levels(data_frame_plot$model)), 1)) { + levels(data_frame_plot$model)[i] <- paste(model_names[i], scenario[i], + ensemble[i]) +} +font_size <- 12 +g <- ggplot(data_frame_plot, aes(x = year, y = Freq, color = model)) + + theme_bw() + + geom_line() + ylab(paste0("Anomaly (", units, ")")) + xlab("Year") + + theme(text = element_text(size = font_size), + legend.text = element_text(size = font_size), + axis.title = element_text(size = font_size)) + + stat_summary(data = data_frame_plot, fun.y = "mean", + mapping = aes(x = data_frame_plot$year, y = data_frame_plot$Freq, + group = interaction(data_frame_plot[2, 3]), + color = data_frame_plot$model), geom = "line", size = 1) + + ggtitle(paste0(region, " index for ", var0, " on ", months, + " (", starting, "-", ending, ")")) + +filepng <- paste0(plot_dir, "/", region, "_", var0, "_", months, + "_running-mean_", running_mean, "_", + starting, "-", ending, ".png") +ggsave(filename = filepng, g, device = NULL) +# Save ncdf data_frame +dimtime <- ncdim_def(name = "Time", units = "years", + vals = period, longname = "Time") +dimmodel <- ncdim_def(name = "Models", units = "names", + vals = seq(1, length(model_names), 1), + longname = paste(model_names, scenario)) +defdata <- ncvar_def(name = "data", units = units, + dim = list(time = dimtime, model = dimmodel), + longname = paste("Index for region", region, + "Variable", var0)) +filencdf <- paste0(work_dir, "/", var0, "_", timestamp, "_", months, "_", + paste(model_names, collapse = ""), + starting, ending, "_", ".nc") +file <- nc_create(filencdf, list(defdata)) +ncvar_put(file, defdata, data_frame) +nc_close(file) + +# Set provenance for output files +xprov <- list(ancestors = fullpath_filenames, + authors = list("perez-zanon_nuria", "hunter_alasdair", + "manubens_nicolau"), + projects = list("c3s-magic"), + caption = "Combined selection", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys")) +provenance[[filencdf]] <- xprov + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/combined_indices_wp6.r b/esmvaltool/diag_scripts/magic_bsc/combined_indices_wp6.r deleted file mode 100644 index c35bc6e4f5..0000000000 --- a/esmvaltool/diag_scripts/magic_bsc/combined_indices_wp6.r +++ /dev/null @@ -1,350 +0,0 @@ -Sys.setenv(TAR = "/bin/tar") # nolint -library(s2dverification) -library(multiApply) # nolint -library(ggplot2) -library(yaml) -library(ncdf4) -library(ClimProjDiags) #nolint -library(abind) -library(climdex.pcic) - -#Parsing input file paths and creating output dirs -args <- commandArgs(trailingOnly = TRUE) -params <- read_yaml(args[1]) -print(params) -plot_dir <- params$plot_dir -run_dir <- params$run_dir -work_dir <- params$work_dir - -## Create working dirs if they do not exist -dir.create(plot_dir, recursive = TRUE) -dir.create(run_dir, recursive = TRUE) -dir.create(work_dir, recursive = TRUE) - - -# setup provenance file and list -provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") -provenance <- list() - -input_files_per_var <- yaml::read_yaml(params$input_files[1]) -var_names <- names(input_files_per_var) -model_names <- lapply(input_files_per_var, function(x) x$dataset) -model_names <- unique(unlist(unname(model_names))) - -var0 <- lapply(input_files_per_var, function(x) x$short_name) -fullpath_filenames <- names(var0) -var0 <- unname(var0)[1] - -var0 <- unlist(var0) - - -start_year <- lapply(input_files_per_var, function(x) x$start_year) -start_year <- c(unlist(unname(start_year)))[1] -end_year <- lapply(input_files_per_var, function(x) x$end_year) -end_year <- c(unlist(unname(end_year)))[1] -print(end_year) -print("H") -start_year <- as.POSIXct(as.Date(paste0(start_year, "-01-01"), "%Y-%m-%d")) -end_year <- as.POSIXct(as.Date(paste0(end_year, "-12-31"), "%Y-%m-%d")) - -#Parameters for Season() function -monini <- 1 -moninf <- params$moninf -monsup <- params$monsup -region <- params$region -print("MESES") -print(moninf) -print(monsup) -mask <- NULL ### How can we pass masks from ESMValTool? -running_mean <- params$running_mean -multi_year_average <- params$multi_year_average -weights <- params$weights -### Load data -print(running_mean) -print(multi_year_average) -print(weights) -data_nc <- nc_open(fullpath_filenames) -lat <- ncvar_get(data_nc, "lat") -lon <- ncvar_get(data_nc, "lon") -units <- ncatt_get(data_nc, var0, "units")$value -calendario <- ncatt_get(data_nc, "time", "calendar")$value -long_names <- ncatt_get(data_nc, var0, "long_name")$value -time <- ncvar_get(data_nc, "time") -print(calendario) - -data <- InsertDim(ncvar_get(data_nc, var0), 1, 1) # nolint -print(dim(data)) -start_date <- as.POSIXct(substr(ncatt_get(data_nc, "time", - "units")$value, 11, 29)) -data_type <- ifelse(grepl("day", fullpath_filenames[1]), "day", "month") -print(start_year) -print(end_year) -print(data_type) -time <- seq(start_year, end_year, data_type) -print(length(time)) -print(calendario) -print(time) -print(dim(data)) - -if (dim(data)[4] != length(time)) { - print("AS") - if ( - calendario == "365" | calendario == "365_days" | - calendario == "365_day" | calendario == "noleap" - ) { -time <- time[-which(substr(time, 6, 10) == "02-29")]#nolint - } -} - -print(head(time)) -time <- as.Date(time, origin = start_date, calendar = calendario) -print(head(time)) -time <- as.POSIXct(time, format = "%Y-%m-%d") -print(head(time)) - -time <- as.PCICt(time, cal = calendario) -time <- as.character(time) -jdays <- as.numeric(strftime(time, format = "%j")) -if (data_type == "day") { -if (calendario == "gregorian" | calendario == "standard" | - calendario == "proleptic_gregorian") { - year <- as.numeric(strftime(time, format = "%Y")) - pos <- ( (year / 100) %% 1 == 0) + ( (year / 4) %% 1 == 0) - + ( (year / 400) %% 1 == 0) - pos <- which(pos == 1) - bisiesto <- which(jdays == 60) - if ( length(intersect(pos, bisiesto)) > 0) { - time <- time[-intersect(pos, bisiesto)] - data <- apply(data, c(1 : length(dim(data)))[-3], - function(x) { - x[-intersect(pos, bisiesto)] - }) - data <- aperm(data, c(2, 3, 1)) - names(dim(data)) <- c("lon", "lat", "time") - } -} -} -projection <- "NULL" -nc_close(data_nc) -if (length(params$input_files) >= 2) { - for (i in 2 : length(params$input_files)) { - input_files_per_var <- yaml::read_yaml(params$input_files[i]) - var_names <- names(input_files_per_var) - model_names <- lapply(input_files_per_var, function(x) x$dataset) - model_names <- unique(unlist(unname(model_names))) - var0 <- lapply(input_files_per_var, function(x) x$short_name) - fullpath_filenames <- names(var0) - var0 <- unname(var0)[1] - var0 <- unlist(var0) - data_nc <- nc_open(fullpath_filenames) - data <- abind(data, - InsertDim(ncvar_get(data_nc, var0), 1, 1), along = 1) # nolint - nc_close(data_nc) - } -} -names(dim(data)) <- c("model", "lon", "lat", "time") -region <- c(min(lon), max(lon), min(lat), max(lat)) -attributes(lon) <- NULL -attributes(lat) <- NULL -dim(lon) <- c(lon = length(lon)) -dim(lat) <- c(lat = length(lat)) -time_dim <- which(names(dim(data)) == "time") -timestamp <- "" -attributes(time) <- NULL -dim(time) <- c(time = length(time)) -metadata <- list(time = list( - standard_name = "time", - long_name = "time", - units = "days since 1970-01-01 00:00:00", - prec = "double", - dim = list(list(name = "time", unlim = FALSE)) -)) -attr(time, "variables") <- metadata -if (!is.null(region)) { - dim_names <- names(dim(data)) - londim <- which(names(dim(data)) == "lon") - latdim <- which(names(dim(data)) == "lat") - data <- WeightedMean( # nolint - data, - lon = as.vector(lon), - lat = as.vector(lat), - region = region, - mask = NULL - ) - names(dim(data)) <- dim_names[-c(londim, latdim)] - time_dim <- which(names(dim(data)) == "time") -} - -if (!is.null(running_mean)) { - data <- Smoothing(data, runmeanlen = running_mean, numdimt = time_dim) - timestamp <- paste0(running_mean, "-month-running-mean-") -} - -print(paste("moninf", moninf)) -if (!is.null(moninf)) { - months <- paste0(month.abb[moninf], "-", month.abb[monsup]) - print(months) -print(length(time)) -print(dim(data)) - dims <- dim(data) - dims <- append(dims, c(12, dims[time_dim] / 12), after = time_dim) - dims <- dims[-time_dim] - -print(dims) - dim(data) <- dims - names(dim(data))[c(time_dim, time_dim + 1)] <- c("month", "year") - margins <- list(c(1 : length(dim(data)))[-c(time_dim + 1)]) - data <- Season( - data, - posdim = time_dim, - monini = monini, - moninf = moninf, - monsup = monsup - ) - margins <- list(c(1 : length(dim(data)))[-c(time_dim + 1)]) - years_dim <- which(names(dim(data)) == "year") - timestamp <- "Mar-May" - month_dim <- which(names(dim(data)) == "month") - data <- adrop(data, month_dim) - if (multi_year_average == "TRUE") { - time <- time[1] - data <- Mean1Dim(data, c(years_dim - 1)) - } -} - -if (!is.null(weights)) { - indices_dim <- which(names(dim(data)) == "model") - indices <- list() - print(indices_dim) - for (i in 1 : dim(data)[indices_dim]) { - indices[[i]] <- Subset(data, along = indices_dim, indices = i) - } - print(str(indices)) - if (!is.numeric(weights)) { - weights <- "NULL" - print("AQUI") - data <- CombineIndices(indices, weights = NULL) # nolint - print(dim(data)) - } else { - data <- CombineIndices(indices, weights = weights) # nolint - } -} -print(region) -if (!is.null(region)) { - data <- data[1, ] - attributes(data) <- NULL - dim(data) <- c(time = length(data)) - metadata <- list( - index = list( - dim = list(list(name = "time", unlim = FALSE, prec = "double")) - ) - ) - names(metadata)[1] <- var0 - attr(data, "variables") <- metadata - variable_list <- list(variable = data, time = time) - names(variable_list)[1] <- var0 - - model_names_filename <- paste(model_names, collapse = "_") - - print( - paste( - "Attribute projection from climatological data is saved and,", - "if it's correct, it can be added to the final output:", - projection)) - dimlon <- ncdim_def( - name = "lon", - units = "degrees_east", - vals = as.vector(lon), - longname = "longitude") - dimlat <- ncdim_def( - name = "lat", - units = "degrees_north", - vals = as.vector(lat), - longname = "latitude") - dimtime <- ncdim_def( - name = "years", - units = "years", - vals = substr(start_year, 1, 4) : substr(end_year, 1, 4), - longname = "time") - defdata <- ncvar_def( - name = "data", - units = units, - dim = list(time = dimtime), - longname = paste("Combination", long_names) - ) - filencdf <- paste0( - work_dir, "/", var0, "_", paste0(model_names, collapse = "_"), - "_", timestamp, "_", model_names_filename, "_", start_year, - "_", end_year, "_", ".nc") -print(dim(data)) -print(str(defdata)) - file <- nc_create(filencdf, list(defdata)) - ncvar_put(file, defdata, data) - nc_close(file) -} else { - data <- data[1, 1, , , ] # nolint - data <- aperm(data, c(3, 2, 1)) - names(dim(data)) <- c("lon", "lat", "time") - metadata <- list( - index = list( - dim = list(list(name = "time", unlim = FALSE, prec = "double")))) - names(metadata)[1] <- var0 - attr(data, "variables") <- metadata - variable_list <- list(variable = data, lat = lat, lon = lon, time = time) - names(variable_list)[1] <- var0 - - print(paste( - "Attribute projection from climatological data is saved and,", - "if it's correct, it can be added to the final output:", - projection) - ) - - dimlon <- ncdim_def( - name = "lon", - units = "degrees_east", - vals = as.vector(lon), - longname = "longitude" - ) - dimlat <- ncdim_def( - name = "lat", - units = "degrees_north", - vals = as.vector(lat), - longname = "latitude") - dimtime <- ncdim_def( - name = "time", - units = "days since 1970-01-01 00:00:00", - vals = as.vector(as.numeric(time)), - longname = "time") - defdata <- ncvar_def( - name = "data", - units = units, - dim = list(time = dimtime), - longname = paste("Combination", long_names)) - filencdf <- paste0( - plot_dir, "/", var0, "_", paste0(model_names, collapse = "_"), - "_", timestamp, "_", model_names_filename, "_", start_year, "_", - end_year, "_", ".nc") - file <- nc_create(filencdf, list(defdata)) - ncvar_put(file, defdata, data) - nc_close(file) - -} - # Set provenance for output files - xprov <- list(ancestors = list(fullpath_filenames), - authors = list("hunt_al", "manu_ni"), - projects = list("c3s-magic"), - caption = "Combined selection", - statistics = list("other"), - moninf = params$moninf, - monsup = params$monsup, - region = list(params$region), - running_mean = params$running_mean, - multi_year_average = params$multi_year_average, - weights = params$weights, - realms = list("atmos"), - themes = list("phys")) -print("OJ") - provenance[[filencdf]] <- xprov -print("Aag") -# Write provenance to file -write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/diurnal_temp_index.R b/esmvaltool/diag_scripts/magic_bsc/diurnal_temp_index.R new file mode 100644 index 0000000000..eac3da8ba6 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/diurnal_temp_index.R @@ -0,0 +1,285 @@ +library(yaml) +library(s2dverification) +library(multiApply) # nolint +library(climdex.pcic) +library(ClimProjDiags) # nolint +library(parallel) +library(ncdf4) + +## Insurance products +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) + +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir +## Create working dirs if they do not exist +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +# FOR THE FIRST METADATA.yml +input_files_tasmax <- yaml::read_yaml(params$input_files[1]) +model_names <- input_files_tasmax[[1]]$dataset +var_names_tmax <- input_files_tasmax[[1]]$short_name +experiment <- lapply(input_files_tasmax, function(x) { + x$exp +}) # nolint +filename_tasmax <- + lapply(input_files_tasmax, function(x) { + x$filename + }) # nolint + +input_files_tasmin <- yaml::read_yaml(params$input_files[2]) +var_names_tmin <- input_files_tasmin[[1]]$short_name +filename_tasmin <- + lapply(input_files_tasmin, function(x) { + x$filename + }) # nolint + +reference_files <- which(experiment == "historical") +projection_files <- which(experiment != "historical") + +start_historical <- input_files_tasmax[[reference_files]]$start_year +end_historical <- input_files_tasmax[[reference_files]]$end_year +start_projection <- + input_files_tasmax[[projection_files[1]]]$start_year +end_projection <- input_files_tasmax[[projection_files[1]]]$end_year + + +fullpath_hist_tasmax <- filename_tasmax[[reference_files]] +file <- nc_open(fullpath_hist_tasmax) +historical_tasmax <- ncvar_get(file, "tasmax") +names(dim(historical_tasmax)) <- rev(names(file$dim))[-1] +lat <- ncvar_get(file, "lat") +lon <- ncvar_get(file, "lon") +units <- ncatt_get(file, "tasmax", "units")$value +calendario <- ncatt_get(file, "time", "calendar")$value +long_names <- ncatt_get(file, "tasmax", "long_name")$value +time <- ncvar_get(file, "time") +start_date <- as.POSIXct(substr(ncatt_get( + file, "time", + "units" +)$value, 11, 29)) +nc_close(file) + +fullpath_hist_tasmin <- filename_tasmin[[reference_files]] +file <- nc_open(fullpath_hist_tasmin) +historical_tasmin <- ncvar_get(file, "tasmin") +names(dim(historical_tasmin)) <- rev(names(file$dim))[-1] +lat <- ncvar_get(file, "lat") +lon <- ncvar_get(file, "lon") +units <- ncatt_get(file, "tasmin", "units")$value +calendario <- ncatt_get(file, "time", "calendar")$value +long_names <- ncatt_get(file, "tasmin", "long_name")$value +tunits <- ncatt_get(file, "time", "units")$value +time <- ncvar_get(file, "time") +start_date <- as.POSIXct(substr(ncatt_get( + file, "time", + "units" +)$value, 11, 29)) +nc_close(file) +dia <- as.Date(strsplit(tunits, " ")[[1]][3], format = "%Y-%m-%d") +time <- time + dia + + +dtr_base <- DTRRef( + tmax = historical_tasmax, + # nolint + tmin = historical_tasmin, + by.seasons = TRUE, + ncores = NULL, + dates = time, + calendar = calendario +) + +for (i in seq_along(projection_files)) { + fullpath_projection_tasmax <- filename_tasmax[[projection_files[i]]] + file <- nc_open(fullpath_projection_tasmax) + rcp_tasmax <- ncvar_get(file, "tasmax") + names(dim(rcp_tasmax)) <- rev(names(file$dim))[-1] + lat <- ncvar_get(file, "lat") + lon <- ncvar_get(file, "lon") + units <- ncatt_get(file, "tasmax", "units")$value + calendario <- ncatt_get(file, "time", "calendar")$value + long_names <- ncatt_get(file, "tasmax", "long_name")$value + time <- ncvar_get(file, "time") + start_date <- as.POSIXct(substr(ncatt_get( + file, "time", + "units" + )$value, 11, 29)) + nc_close(file) + + fullpath_projection_tasmin <- + filename_tasmin[[projection_files[i]]] + file <- nc_open(fullpath_projection_tasmin) + rcp_tasmin <- ncvar_get(file, "tasmin") + names(dim(rcp_tasmin)) <- rev(names(file$dim))[-1] + lat <- ncvar_get(file, "lat") + lon <- ncvar_get(file, "lon") + units <- ncatt_get(file, "tasmin", "units")$value + calendario <- ncatt_get(file, "time", "calendar")$value + long_names <- ncatt_get(file, "tasmin", "long_name")$value + tunits <- ncatt_get(file, "time", "units")$value + time <- ncvar_get(file, "time") + start_date <- as.POSIXct(substr(ncatt_get( + file, "time", + "units" + )$value, 11, 29)) + nc_close(file) + + dia <- + as.Date(strsplit(tunits, " ")[[1]][3], format = "%Y-%m-%d") + time <- time + dia + + dtr_indicator <- + DTRIndicator( + rcp_tasmax, + rcp_tasmin, + ref = dtr_base, + by.seasons = TRUE, + ncores = NULL, + dates = time, + calendar = calendario + ) + + dtr_rcp <- array(dim = c(4, length(lon), length(lat))) + for (j in 1:4) { + dtr_rcp[j, , ] <- + Mean1Dim(dtr_indicator$indicator[, j, , ], 1) + } + names(dim(dtr_rcp)) <- c("season", "lon", "lat") + title <- paste0( + "Number of days exceeding the DTR by 5 degrees\n", + "during the period ", + start_projection, + "-", + end_projection + ) + plot_file <- file.path( + plot_dir, + paste0( + "Seasonal_DTRindicator_", + model_names, + "_", + start_projection, + "_", + end_projection, + "_", + start_historical, + "_", + end_historical, + ".png" + ) + ) + + PlotLayout( + PlotEquiMap, + plot_dims = c("lon", "lat"), + # nolint + var = dtr_rcp, + colNA = "white", + lon = lon, + lat = lat, + titles = c("DJF", "MAM", "JJA", "SON"), + toptitle = title, + filled.continents = FALSE, + units = "Days", + axelab = FALSE, + draw_separators = TRUE, + subsampleg = 1, + brks = seq(0, max(dtr_rcp, na.rm = TRUE), 2), + color_fun = clim.palette("yellowred"), + extra_margin = c(0, 0, 1, 0), + bar_extra_labels = c(2, 0, 0, 0), + title_scale = 0.7, + fileout = plot_file, + col_inf = "white", + col_sup = "darkred" + ) + + dimlon <- ncdim_def( + name = "lon", + units = "degrees_east", + vals = as.vector(lon), + longname = "longitude" + ) + dimlat <- ncdim_def( + name = "lat", + units = "degrees_north", + vals = as.vector(lat), + longname = "latitude" + ) + dimseason <- + ncdim_def( + name = "season", + units = "season", + vals = 1:4, + longname = "season of the year: DJF, MAM, JJA, SON" + ) + defdata <- + ncvar_def( + name = "VulnerabilityIndex", + units = "number_of_days", + dim = list( + season = dimseason, + lat = dimlat, + lon = dimlon + ), + longname = paste0( + "Number of days exceeding in 5 degrees ", + "the Diurnal Temperature Range for ", + "the reference period" + ) + ) + filencdf <- + paste0( + work_dir, + "/", + "Seasonal_DTRindicator_", + model_names, + "_", + start_projection, + "_", + end_projection, + "_", + start_historical, + "_", + end_historical, + ".nc" + ) + file <- nc_create(filencdf, list(defdata)) + ncvar_put(file, defdata, dtr_rcp) + nc_close(file) + + + # Set provenance for output files + xprov <- + list( + ancestors = list( + filename_tasmin[[reference_files]], + filename_tasmax[[reference_files]], + filename_tasmin[[projection_files[i]]], + filename_tasmax[[projection_files[i]]] + ), + authors = list( + "hunter_alasdair", + "manubens_nicolau", + "caron_louis-philippe" + ), + projects = list("c3s-magic"), + caption = title, + statistics = list("other"), + realms = list("atmos"), + themes = list("phys") + ) + provenance[[plot_file]] <- xprov + provenance[[filencdf]] <- xprov +} + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/diurnal_temp_index.r b/esmvaltool/diag_scripts/magic_bsc/diurnal_temp_index.r deleted file mode 100644 index 531e53cb95..0000000000 --- a/esmvaltool/diag_scripts/magic_bsc/diurnal_temp_index.r +++ /dev/null @@ -1,221 +0,0 @@ -library(yaml) -library(s2dverification) -library(multiApply) # nolint -library(climdex.pcic) -library(ClimProjDiags) #nolint -library(parallel) -library(ncdf4) - -## Insurance products -args <- commandArgs(trailingOnly = TRUE) -params <- read_yaml(args[1]) - -plot_dir <- params$plot_dir -run_dir <- params$run_dir -work_dir <- params$work_dir -## Create working dirs if they do not exist -dir.create(plot_dir, recursive = TRUE) -dir.create(run_dir, recursive = TRUE) -dir.create(work_dir, recursive = TRUE) - -# setup provenance file and list -provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") -provenance <- list() - -#FOR THE FIRST METADATA.yml -input_files_tasmax <- yaml::read_yaml(params$input_files[1]) -model_names <- input_files_tasmax[[1]]$dataset -var_names_tmax <- input_files_tasmax[[1]]$short_name -experiment <- lapply( - input_files_tasmax, - function(x){ - x$exp - } -) -filename_tasmax <- lapply( - input_files_tasmax, - function(x){ - x$filename - } -) - -input_files_tasmin <- yaml::read_yaml(params$input_files[2]) -var_names_tmin <- input_files_tasmin[[1]]$short_name -filename_tasmin <- lapply( - input_files_tasmin, - function(x){ - x$filename - } -) - -reference_files <- which(experiment == "historical") -projection_files <- which(experiment != "historical") - -start_historical <- input_files_tasmax[[reference_files]]$start_year -end_historical <- input_files_tasmax[[reference_files]]$end_year -start_projection <- input_files_tasmax[[projection_files[1]]]$start_year -end_projection <- input_files_tasmax[[projection_files[1]]]$end_year - -#Regime parameters -metric <- params$metric -rcp8.5 <- params$rcp8.5 -rcp2.6 <- params$rcp2.6 -rcp_scenario <- c(rcp8.5, rcp2.6) - -fullpath_hist_tasmax <- filename_tasmax[[reference_files]] -file <- nc_open(fullpath_hist_tasmax) -historical_tasmax <- ncvar_get(file, "tasmax") -names(dim(historical_tasmax)) <- rev(names(file$dim))[-1] -lat <- ncvar_get(file, "lat") -lon <- ncvar_get(file, "lon") -units <- ncatt_get(file, "tasmax", "units")$value -calendario <- ncatt_get(file, "time", "calendar")$value -long_names <- ncatt_get(file, "tasmax", "long_name")$value -time <- ncvar_get(file, "time") -start_date <- as.POSIXct(substr(ncatt_get(file, "time", - "units")$value, 11, 29)) -nc_close(file) - -fullpath_hist_tasmin <- filename_tasmin[[reference_files]] -file <- nc_open(fullpath_hist_tasmin) -historical_tasmin <- ncvar_get(file, "tasmin") -names(dim(historical_tasmin)) <- rev(names(file$dim))[-1] -lat <- ncvar_get(file, "lat") -lon <- ncvar_get(file, "lon") -units <- ncatt_get(file, "tasmin", "units")$value -calendario <- ncatt_get(file, "time", "calendar")$value -long_names <- ncatt_get(file, "tasmin", "long_name")$value -tunits <- ncatt_get(file, "time", "units")$value -time <- ncvar_get(file, "time") -start_date <- as.POSIXct(substr(ncatt_get(file, "time", - "units")$value, 11, 29)) -nc_close(file) -dia <- as.Date(strsplit(tunits, " ")[[1]][3], format = "%Y-%m-%d") -time <- time + dia - - -dtr_base <- DTRRef( # nolint - tmax = historical_tasmax, - tmin = historical_tasmin, - by.seasons = TRUE, - ncores = NULL, - dates = time, - calendar = calendario -) - -for (i in 1 : length(projection_files)) { - fullpath_projection_tasmax <- filename_tasmax[[projection_files[i]]] - file <- nc_open(fullpath_projection_tasmax) - rcp_tasmax <- ncvar_get(file, "tasmax") - names(dim(rcp_tasmax)) <- rev(names(file$dim))[-1] - lat <- ncvar_get(file, "lat") - lon <- ncvar_get(file, "lon") - units <- ncatt_get(file, "tasmax", "units")$value - calendario <- ncatt_get(file, "time", "calendar")$value - long_names <- ncatt_get(file, "tasmax", "long_name")$value - time <- ncvar_get(file, "time") - start_date <- as.POSIXct(substr(ncatt_get(file, "time", - "units")$value, 11, 29)) - nc_close(file) - - fullpath_projection_tasmin <- filename_tasmin[[projection_files[i]]] - file <- nc_open(fullpath_projection_tasmin) - rcp_tasmin <- ncvar_get(file, "tasmin") - names(dim(rcp_tasmin)) <- rev(names(file$dim))[-1] - lat <- ncvar_get(file, "lat") - lon <- ncvar_get(file, "lon") - units <- ncatt_get(file, "tasmin", "units")$value - calendario <- ncatt_get(file, "time", "calendar")$value - long_names <- ncatt_get(file, "tasmin", "long_name")$value - tunits <- ncatt_get(file, "time", "units")$value - time <- ncvar_get(file, "time") - start_date <- as.POSIXct(substr(ncatt_get(file, "time", - "units")$value, 11, 29)) - nc_close(file) - - dia <- as.Date(strsplit(tunits, " ")[[1]][3], format = "%Y-%m-%d") - time <- time + dia - -dtr_indicator <- DTRIndicator( - rcp_tasmax, rcp_tasmin, ref = dtr_base, by.seasons = TRUE, ncores = NULL, - dates = time, calendar = calendario - ) - - -dtr_rcp <- array(dim = c(4, length(lon), length(lat))) -for (j in 1 : 4){ - dtr_rcp[j , , ] <- Mean1Dim(dtr_indicator$indicator[, j, , ], 1)#nolint -} -names(dim(dtr_rcp)) <- c("season", "lon", "lat") -title <- paste( - "Number of days exceeding the DTR in 5 degrees during the period", - start_projection, "-", end_projection) -PlotLayout( # nolint - PlotEquiMap, # nolint - plot_dims = c("lon", "lat"), - var = dtr_rcp, - lon = lon, - lat = lat, - titles = c("DJF", "MAM", "JJA", "SON"), - toptitle = title, - filled.continents = FALSE, units = "Days", - axelab = FALSE, draw_separators = TRUE, subsampleg = 1, - brks = seq(0, max(dtr_rcp), 2), color_fun = clim.palette("yellowred"), - bar_extra_labels = c(2, 0, 0, 0), title_scale = 0.7, - fileout = file.path(plot_dir, "rcp85.png") -) - -dimlon <- ncdim_def( - name = "lon", - units = "degrees_east", - vals = as.vector(lon), - longname = "longitude" -) -dimlat <- ncdim_def( - name = "lat", - units = "degrees_north", - vals = as.vector(lat), - longname = "latitude" -) -dimseason <- ncdim_def( - name = "season", - units = "season", - vals = 1 : 4, - longname = "season of the year: DJF, MAM, JJA, SON" -) -defdata <- ncvar_def( - name = "VulnerabilityIndex", - units = "number_of_days", - dim = list(season = dimseason, lat = dimlat, lon = dimlon), - longname = paste0( - "Number of days exceeding in 5 degrees the Diurnal ", - "Temeprature Range for the reference period") -) - -filencdf <- paste0( - work_dir, "/", "Seasonal_DTRindicator_", model_names, "_", - start_projection, "_", end_projection, "_", - start_historical, "_", end_historical, ".nc") -file <- nc_create(filencdf, list(defdata)) -ncvar_put(file, defdata, dtr_rcp) -nc_close(file) - - - # Set provenance for output files - xprov <- list(ancestors = list(filename_tasmin[[reference_files]], - filename_tasmax[[reference_files]], - filename_tasmin[[projection_files[i]]], - filename_tasmax[[projection_files[i]]]), - authors = list("hunt_al", "manu_ni", "caro_lo"), - projects = list("c3s-magic"), - caption = title, - statistics = list("other"), - realms = list("atmos"), - themes = list("phys"), - plot_file = file.path(plot_dir, "rcp85.png")) - - provenance[[filencdf]] <- xprov -} - -# Write provenance to file -write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/extreme_index.R b/esmvaltool/diag_scripts/magic_bsc/extreme_index.R new file mode 100644 index 0000000000..1ef29901ca --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/extreme_index.R @@ -0,0 +1,539 @@ +library(s2dverification) +library(multiApply) # nolint +library(yaml) +library(ncdf4) +library(abind) +library(parallel) +library(ClimProjDiags) # nolint + +# function to flatten nested lists +flatten_lists <- function(x) { + if (!inherits(x, "list")) { + return(list(x)) + } else { + return(unlist(c(lapply(x, flatten_lists)), recursive = FALSE)) + } +} + +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir + +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + +weights <- c( + t90p = params$weight_t90p, + t10p = params$weight_t10p, + Wx = params$weight_Wx, + rx5day = params$weight_rx5day, + cdd = params$weight_cdd +) +running_mean <- params$running_mean + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +wdata <- NULL +for (j in 1:4) { # nolint + input_files_per_var <- yaml::read_yaml(params$input_files[j]) + var0 <- lapply(input_files_per_var, function(x) { + x$short_name + }) + fullpath_filenames <- names(var0) + var0 <- unname(var0)[1] + experiment <- lapply(input_files_per_var, function(x) { + x$exp + }) + experiment <- unlist(unname(experiment)) + + reference_files <- which(unname(experiment) == "historical") + projection_files <- which(unname(experiment) != "historical") + + rcp_scenario <- unique(experiment[projection_files]) + model_names <- lapply(input_files_per_var, function(x) { + x$dataset + }) + model_names <- unlist(unname(model_names))[projection_files] + + start_reference <- + lapply(input_files_per_var, function(x) { + x$start_year + }) + start_reference <- + c(unlist(unname(start_reference))[reference_files])[1] + end_reference <- + lapply(input_files_per_var, function(x) { + x$end_year + }) + end_reference <- + c(unlist(unname(end_reference))[reference_files])[1] + + start_projection <- + lapply(input_files_per_var, function(x) { + x$start_year + }) + start_projection <- + c(unlist(unname(start_projection))[projection_files])[1] + end_projection <- + lapply(input_files_per_var, function(x) { + x$end_year + }) + end_projection <- + c(unlist(unname(end_projection))[projection_files])[1] + + var0 <- unlist(var0) + projection <- "NULL" + reference_filenames <- fullpath_filenames[reference_files] + hist_nc <- nc_open(reference_filenames) + historical_data <- ncvar_get(hist_nc, var0) + + names(dim(historical_data)) <- rev(names(hist_nc$dim))[-1] + lat <- ncvar_get(hist_nc, "lat") + lon <- ncvar_get(hist_nc, "lon") + units <- ncatt_get(hist_nc, var0, "units")$value + calendar <- ncatt_get(hist_nc, "time", "calendar")$value + long_names <- ncatt_get(hist_nc, var0, "long_name")$value + time <- ncvar_get(hist_nc, "time") + # Time correction: + start_date <- + as.POSIXct(paste0(start_reference, "-01-01"), + tz = "UTC", + format = "%Y-%m-%d" + ) + end_date <- + as.POSIXct(paste0(end_reference, "-12-31"), + tz = "UTC", + format = "%Y-%m-%d" + ) + nc_close(hist_nc) + time <- seq(start_date, end_date, "day") + if (calendar == "noleap" | + calendar == "365_day" | calendar == "365") { + time <- time[format(time, "%m-%d") != "02-29"] + } else if (calendar == "360_day" | calendar == "360") { + time <- time[format(time, "%m-%d") != "02-29"] + time <- time[format(time, "%m-%d") != "01-31"] + time <- time[format(time, "%m-%d") != "05-31"] + time <- time[format(time, "%m-%d") != "07-31"] + time <- time[format(time, "%m-%d") != "10-31"] + time <- time[format(time, "%m-%d") != "12-31"] + } + # nolint start + # hist_names <- names(dim(historical_data)) + # jpeg(paste0(plot_dir, "/plot1.jpg")) + # PlotEquiMap(historical_data[1,1,1,,], lon = lon, lat = lat, filled = F) + # dev.off() + # ------------------------------ + # Provisional solution to error in dimension order: + # nolint end + historical_data <- as.vector(historical_data) + dim(historical_data) <- c( + model = 1, + var = 1, + lon = length(lon), + lat = length(lat), + time = length(time) + ) + historical_data <- aperm(historical_data, c(1, 2, 5, 3, 4)) + # nolint start + # ------------------------------ + # jpeg(paste0(plot_dir, "/plot2.jpg")) + # PlotEquiMap(historical_data[1,1,1,,], lon = lon, lat = lat, filled = F) + # dev.off() + # nolint end + names(dim(historical_data)) <- + c("model", "var", "time", "lon", "lat") + time_dimension <- which(names(dim(historical_data)) == "time") + attributes(lon) <- NULL + attributes(lat) <- NULL + + dim(lon) <- c(lon = length(lon)) + dim(lat) <- c(lat = length(lat)) + model_dim <- which(names(dim(historical_data)) == "model") + ### Compute the quantiles and standard deviation for the historical period. + if (var0 == "tasmin") { + quantile <- 0.1 + metric <- "t10p" + } else if (var0 == "tasmax") { + quantile <- 0.9 + metric <- "t90p" + } else if (var0 == "sfcWind") { + historical_data <- 0.5 * 1.23 * (historical_data**3) + quantile <- 0.9 + metric <- "Wx" + } else if (var0 == "pr") { + historical_data <- historical_data * 60 * 60 * 24 + metric <- c("rx5day", "cdd") + } + attr(historical_data, "Variables")$dat1$time <- time + + base_sd <- base_sd_historical <- base_mean <- list() + for (m in seq_along(metric)) { + if (var0 != "pr") { + thresholds <- Threshold( + # nolint + historical_data, + calendar = calendar, + qtiles = quantile, + ncores = detectCores() - 1, + na.rm = TRUE + ) + str(thresholds) + base_index <- Climdex( + # nolint + data = historical_data, + calendar = calendar, + metric = metric[m], + threshold = thresholds, + ncores = detectCores() - 1 + ) + } else { + base_index <- Climdex( + # nolint + data = historical_data, + calendar = calendar, + metric = metric[m], + ncores = detectCores() - 1 + ) + } + base_sd[[m]] <- Apply( # nolint + list(base_index$result), + target_dims = list(c(1)), + "sd" + )$output1 + base_sd_historical[[m]] <- InsertDim( # nolint + base_sd[[m]], 1, dim(base_index$result)[1] + ) + + if (var0 != "pr") { + base_mean[[m]] <- 10 + base_mean_historical <- 10 + } else { + base_mean[[m]] <- Apply( # nolint + list(base_index$result), + target_dims = list(c(1)), + "mean" + )$output1 + base_mean_historical <- InsertDim( # nolint + base_mean[[m]], 1, dim(base_index$result)[1] + ) + } + } + # Compute the time series of the relevant index, using the quantiles + # and standard deviation from the index + projection_filenames <- fullpath_filenames[projection_files] + + for (i in seq_along(projection_filenames)) { + proj_nc <- nc_open(projection_filenames[i]) + projection_data <- ncvar_get(proj_nc, var0) + time <- ncvar_get(proj_nc, "time") + # Time correction: + start_date <- + as.POSIXct(paste0(start_projection, "-01-01"), + tz = "UTC", + format = "%Y-%m-%d" + ) + end_date <- + as.POSIXct(paste0(end_projection, "-12-31"), + tz = "UTC", + format = "%Y-%m-%d" + ) + nc_close(proj_nc) + time <- seq(start_date, end_date, "day") + if (calendar == "noleap" | + calendar == "365_day" | calendar == "365") { + time <- time[format(time, "%m-%d") != "02-29"] + } else if (calendar == "360_day" | calendar == "360") { + time <- time[format(time, "%m-%d") != "02-29"] + time <- time[format(time, "%m-%d") != "01-31"] + time <- time[format(time, "%m-%d") != "05-31"] + time <- time[format(time, "%m-%d") != "07-31"] + time <- time[format(time, "%m-%d") != "10-31"] + time <- time[format(time, "%m-%d") != "12-31"] + } + projection_data <- as.vector(projection_data) + dim(projection_data) <- c( + model = 1, + var = 1, + lon = length(lon), + lat = length(lat), + time = length(time) + ) + projection_data <- aperm(projection_data, c(1, 2, 5, 3, 4)) + attr(projection_data, "Variables")$dat1$time <- time + names(dim(projection_data)) <- + c("model", "var", "time", "lon", "lat") + num_model <- dim(projection_data)["model"] + print(num_model) + # nolint start + # ------------------------------ + # jpeg(paste0(plot_dir, "/plot4.jpg")) + # PlotEquiMap(projection_data[1,1,1,,], lon = lon, lat = lat, filled = F) + # dev.off() + # nolint end + if (var0 == "pr") { + projection_data <- projection_data * 60 * 60 * 24 + } else if (var0 == "sfcWind") { + projection_data <- 0.5 * 1.23 * (projection_data**3) + } + + for (m in seq_along(metric)) { + if (var0 != "pr") { + projection_index <- + Climdex( + data = projection_data, + metric = metric[m], + calendar = calendar, + threshold = thresholds, + ncores = detectCores() - 1 + ) + projection_mean <- 10 + } else { + # nolint + projection_index <- + Climdex( + data = projection_data, + metric = metric[m], + calendar = calendar, + ncores = detectCores() - 1 + ) + projection_mean <- InsertDim( + base_mean[[m]], 1, # nolint + dim(projection_index$result)[1] + ) + } + base_sd_proj <- InsertDim( + base_sd[[m]], 1, # nolint + dim(projection_index$result)[1] + ) + projection_index_standardized <- + (projection_index$result - projection_mean) / base_sd_proj + for (mod in 1:num_model) { + model_dim <- + which(names(dim(projection_index_standardized)) == "model") + if (length(model_dim) == 0) { + data <- drop(projection_index_standardized) + } else { + print(dim(projection_index_standardized)) + data <- drop(projection_index_standardized[, mod, , , ]) + } + print( + paste( + "Attribute projection from climatological data is saved and,", + "if it's correct, it can be added to the final output:", + projection + ) + ) + dimlon <- ncdim_def( + name = "lon", + units = "degrees_east", + vals = as.vector(lon), + longname = "longitude" + ) + dimlat <- ncdim_def( + name = "lat", + units = "degrees_north", + vals = as.vector(lat), + longname = "latitude" + ) + dimtime <- ncdim_def( + name = "time", + units = "Years", + vals = start_projection:end_projection, + longname = "Time in years" + ) + defdata <- ncvar_def( + name = "data", + units = units, + dim = list( + year = dimtime, + lon = dimlon, + lat = dimlat + ), + longname = paste("Annual", metric[m], long_names) + ) + filencdf <- paste0( + work_dir, + "/", + var0, + "_", + metric[m], + "_risk_insurance_index_", + model_names, + "_", + start_projection, + "_", + end_projection, + "_", + start_reference, + "_", + end_reference, + ".nc" + ) + file <- nc_create(filencdf, list(defdata)) + ncvar_put(file, defdata, projection_index_standardized) + nc_close(file) + + # Plottings + data <- drop(Mean1Dim(projection_index_standardized, 1)) + title <- paste0( + "Index for ", + metric[m], + " ", + substr(start_projection, 1, 4), + "-", + substr(end_projection, 1, 4), + " ", + " (", + rcp_scenario[i], + " ", + model_names, + ")" + ) + + breaks <- + seq( + -1 * ceiling(max(abs(data), na.rm = TRUE)), + ceiling(max(abs(data), na.rm = TRUE)), + 2 * ceiling(max(abs(data), na.rm = TRUE)) / 16 + ) + filepng <- paste0( + plot_dir, + "/", + metric[m], + "_", + model_names[mod], + "_", + rcp_scenario[i], + "_", + start_projection, + "_", + end_projection, + ".png" + ) + PlotEquiMap( + # nolint + data, + lon = lon, + lat = lat, + filled.continents = FALSE, + toptitle = title, + sizetit = 0.5, + brks = breaks, + fileout = filepng, + colNA = "white" + ) + # Set provenance for output files + xprov <- + list( + ancestors = flatten_lists( + list( + projection_filenames, + reference_filenames + ) + ), + authors = list( + "hunter_alasdair", + "manubens_nicolau", + "caron_louis-philippe" + ), + projects = list("c3s-magic"), + caption = title, + statistics = list("other"), + weight = weights[j + (m - 1)], + realms = list("atmos"), + themes = list("phys") + ) + provenance[[filepng]] <- xprov + provenance[[filencdf]] <- xprov + # compute weights in the data + lon <- as.vector(lon) + lat <- as.vector(lat) + temporal <- + WeightedMean(projection_index_standardized, + # nolint + lon = lon, + lat = lat + ) + time_dim <- which(names(dim(temporal)) == "year") + if (!is.null(running_mean)) { + temporal <- Smoothing(temporal, + runmeanlen = running_mean, + # nolint + numdimt = time_dim + ) + timestamp <- + paste0(running_mean, "-month-running-mean-") + } + wdata[[j + (m - 1)]] <- temporal + } # model index + } # metric index + } # number of projections +} # variable index +if (!is.numeric(weights)) { + data <- CombineIndices(wdata, weights = NULL) # nolint +} else { + data <- CombineIndices(wdata, weights = weights) # nolint +} + +# Plotting time series: +xprov <- list( + ancestors = fullpath_filenames, + authors = list( + "hunter_alasdair", + "manubens_nicolau", + "perez-zanon_nuria" + ), + projects = list("c3s-magic"), + caption = "Combined selection", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys") +) + +data <- drop(data) +if (length(data) >= 5) { + plot_file <- paste0(plot_dir, "/", "CombinedIndices.png") + png(plot_file) + plot( + start_projection:end_projection, + data, + type = "l", + lwd = 2, + col = "darkblue", + xlab = "Time (years)", + ylab = "Combined indices" + ) + dev.off() + provenance[[plot_file]] <- xprov +} +dimtime <- ncdim_def( + name = "time", + units = "years", + vals = start_projection:end_projection, + longname = "time" +) +defdata <- ncvar_def( + name = "data", + units = "adimensional", + dim = list(time = dimtime), + longname = paste("Combination", long_names) +) +filencdf <- + paste0( + work_dir, "/", "_", paste0(model_names, collapse = "_"), + ".nc" + ) +file <- nc_create(filencdf, list(defdata)) +ncvar_put(file, defdata, data) +nc_close(file) + +provenance[[filencdf]] <- xprov + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/extreme_index.r b/esmvaltool/diag_scripts/magic_bsc/extreme_index.r deleted file mode 100644 index 4d2324f40c..0000000000 --- a/esmvaltool/diag_scripts/magic_bsc/extreme_index.r +++ /dev/null @@ -1,326 +0,0 @@ -library(s2dverification) -library(multiApply) #nolint -library(yaml) -library(ncdf4) - -library(parallel) -library(ClimProjDiags) #nolint - -args <- commandArgs(trailingOnly = TRUE) -params <- read_yaml(args[1]) -plot_dir <- params$plot_dir -run_dir <- params$run_dir -work_dir <- params$work_dir - -dir.create(plot_dir, recursive = TRUE) -dir.create(run_dir, recursive = TRUE) -dir.create(work_dir, recursive = TRUE) - -# setup provenance file and list -provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") -provenance <- list() - -input_files_per_var <- yaml::read_yaml(params$input_files) - -model_names <- lapply(input_files_per_var, function(x) x$model) -model_names <- unname(model_names) -var0 <- lapply(input_files_per_var, function(x) x$short_name) -fullpath_filenames <- names(var0) -var0 <- unname(var0)[1] -experiment <- lapply(input_files_per_var, function(x) x$exp) -experiment <- unlist(unname(experiment)) - - -reference_files <- which(unname(experiment) == "historical") -projection_files <- which(unname(experiment) != "historical") - -rcp_scenario <- unique(experiment[projection_files]) -model_names <- lapply(input_files_per_var, function(x) x$dataset) -model_names <- unlist(unname(model_names))[projection_files] - -start_reference <- lapply(input_files_per_var, function(x) x$start_year) -start_reference <- c(unlist(unname(start_reference))[reference_files])[1] -end_reference <- lapply(input_files_per_var, function(x) x$end_year) -end_reference <- c(unlist(unname(end_reference))[reference_files])[1] - -start_projection <- lapply(input_files_per_var, function(x) x$start_year) -start_projection <- c(unlist(unname(start_projection))[projection_files])[1] -end_projection <- lapply(input_files_per_var, function(x) x$end_year) -end_projection <- c(unlist(unname(end_projection))[projection_files])[1] - - - - - -metric <- params$metric -var0 <- unlist(var0) -projection <- "NULL" -reference_filenames <- fullpath_filenames[reference_files] -hist_nc <- nc_open(reference_filenames) -historical_data <- ncvar_get(hist_nc, var0) - -names(dim(historical_data)) <- rev(names(hist_nc$dim))[-1] -lat <- ncvar_get(hist_nc, "lat") -lon <- ncvar_get(hist_nc, "lon") -units <- ncatt_get(hist_nc, var0, "units")$value -calendar <- ncatt_get(hist_nc, "time", "calendar")$value -long_names <- ncatt_get(hist_nc, var0, "long_name")$value -time <- ncvar_get(hist_nc, "time") -start_date <- as.POSIXct(substr(ncatt_get(hist_nc, "time", "units")$value, - 11, 29)) -nc_close(hist_nc) -time <- as.Date(time, origin = start_date, calendar = calendar) - - -# nolint start -#hist_names <- names(dim(historical_data)) -#jpeg(paste0(plot_dir, "/plot1.jpg")) -#PlotEquiMap(historical_data[1,1,1,,], lon = lon, lat = lat, filled = F) -#dev.off() -# ------------------------------ -# Provisional solution to error in dimension order: -# nolint end -if ( (end_reference - start_reference + 1) * 12 == length(time) ) { - time <- seq( - as.Date( - paste(start_reference, "01", "01", sep = "-"), - format = "%Y-%m-%d" - ), - as.Date( - paste(end_reference, "12", "01", sep = "-"), - format = "%Y-%m-%d" - ), - "month" - ) -} -historical_data <- as.vector(historical_data) -dim(historical_data) <- c( - model = 1, - var = 1, - lon = length(lon), - lat = length(lat), - time = length(time) -) -historical_data <- aperm(historical_data, c(1, 2, 5, 3, 4)) -# nolint start -# ------------------------------ -#jpeg(paste0(plot_dir, "/plot2.jpg")) -#PlotEquiMap(historical_data[1,1,1,,], lon = lon, lat = lat, filled = F) -#dev.off() -# nolint end - -names(dim(historical_data)) <- c("model", "var", "time", "lon", "lat") -time_dimension <- which(names(dim(historical_data)) == "time") - -attributes(lon) <- NULL -attributes(lat) <- NULL - -dim(lon) <- c(lon = length(lon)) -dim(lat) <- c(lat = length(lat)) -model_dim <- which(names(dim(historical_data)) == "model") -###Compute the quantiles and standard deviation for the historical period. - -if (var0 == "tasmin") { - quantile <- 0.1 -} else if (var0 == "tasmax") { - quantile <- 0.9 -} else if (var0 == "sfcWind") { - historical_data <- 0.5 * 1.23 * (historical_data ** 3) - quantile <- 0.9 -} else if (var0 == "pr") { - historical_data <- historical_data * 60 * 60 * 24 - -} -attr(historical_data, "Variables")$dat1$time <- time - -base_sd <- base_sd_historical <- base_mean <- list() -for (m in 1 : length(metric)) { - if (var0 != "pr") { - thresholds <- Threshold( #nolint - historical_data, - calendar = calendar, - qtiles = quantile, - ncores = detectCores() - 1 - ) - str(thresholds) - base_index <- Climdex( #nolint - data = historical_data, - calendar = calendar, - metric = metric[m], - threshold = thresholds, - ncores = detectCores() - 1 - ) - } else { - base_index <- Climdex( #nolint - data = historical_data, - calendar = calendar, - metric = metric[m], - ncores = detectCores() - 1 - ) - } - base_sd[[m]] <- Apply( #nolint - list(base_index$result), - target_dims = list(c(1)), - "sd" - )$output1 - base_sd_historical[[m]] <- InsertDim( #nolint - base_sd[[m]], 1, dim(base_index$result)[1] - ) - - if (var0 != "pr") { - base_mean[[m]] <- 10 - base_mean_historical <- 10 - } else { - base_mean[[m]] <- Apply( #nolint - list(base_index$result), - target_dims = list(c(1)), - "mean" - )$output1 - base_mean_historical <- InsertDim( #nolint - base_mean[[m]], 1, dim(base_index$result)[1] - ) - } -} -# Compute the time series of the relevant index, using the quantiles -# and standard deviation from the index -projection_filenames <- fullpath_filenames[projection_files] - -for (i in 1 : length(projection_filenames)) { - proj_nc <- nc_open(projection_filenames[i]) - projection_data <- ncvar_get(proj_nc, var0) - time <- ncvar_get(proj_nc, "time") - start_date <- as.POSIXct(substr(ncatt_get(proj_nc, "time", "units")$value, - 11, 29)) - calendar <- ncatt_get(hist_nc, "time", "calendar")$value - time <- as.Date(time, origin = start_date, calendar = calendar) - nc_close(proj_nc) - - if ( (end_projection - start_projection + 1) * 12 == length(time) ) { - time <- seq( - as.Date( - paste(start_projection, "01", "01", sep = "-"), - format = "%Y-%m-%d" - ), - as.Date( - paste(end_projection, "12", "01", sep = "-"), - format = "%Y-%m-%d" - ), - "month" - ) - } - projection_data <- as.vector(projection_data) - dim(projection_data) <- c( - model = 1, - var = 1, - lon = length(lon), - lat = length(lat), - time = length(time) - ) - projection_data <- aperm(projection_data, c(1, 2, 5, 3, 4)) - attr(projection_data, "Variables")$dat1$time <- time - names(dim(projection_data)) <- c("model", "var", "time", "lon", "lat") - # nolint start - # ------------------------------ - #jpeg(paste0(plot_dir, "/plot4.jpg")) - #PlotEquiMap(projection_data[1,1,1,,], lon = lon, lat = lat, filled = F) - #dev.off() - # nolint end - - if (var0 == "pr") { - projection_data <- projection_data * 60 * 60 * 24 - } else if (var0 == "sfcWind") { - projection_data <- 0.5 * 1.23 * (projection_data ** 3) - } - - for (m in 1 : length(metric)) { - - if (var0 != "pr") { - projection_index <- Climdex(data = projection_data, metric = metric[m], - calendar = calendar, threshold = thresholds, - ncores = detectCores() - 1) - projection_mean <- 10 - } else { - projection_index <- Climdex(data = projection_data, metric = metric[m], - calendar = calendar, - ncores = detectCores() - 1) - projection_mean <- InsertDim(base_mean[[m]], 1, #nolint - dim(projection_index$result)[1]) - } - - base_sd_proj <- InsertDim(base_sd[[m]], 1, #nolint - dim(projection_index$result)[1]) - projection_index_standardized <- - (projection_index$result - projection_mean) / base_sd_proj - for (mod in 1 : dim(projection_data)[model_dim]) { - data <- drop(Mean1Dim(projection_index_standardized, 1)) - print(paste( - "Attribute projection from climatological data is saved and,", - "if it's correct, it can be added to the final output:", - projection - )) - dimlon <- ncdim_def( - name = "lon", - units = "degrees_east", - vals = as.vector(lon), - longname = "longitude" - ) - dimlat <- ncdim_def( - name = "lat", - units = "degrees_north", - vals = as.vector(lat), - longname = "latitude" - ) - defdata <- ncvar_def( - name = "data", - units = units, - dim = list(lat = dimlat, lon = dimlon), - longname = paste("Mean", metric[m], long_names) - ) - filencdf <- paste0( - work_dir, "/", var0, "_", metric[m], "_risk_insurance_index_", - model_names, "_", start_projection, "_", end_projection, "_", - start_reference, "_", end_reference, ".nc") - file <- nc_create(filencdf, list(defdata)) - ncvar_put(file, defdata, data) - nc_close(file) - - - title <- paste0( - "Index for ", metric[m], " ", substr(start_projection, 1, 4), "-", - substr(end_projection, 1, 4), " ", " (", rcp_scenario[i], - " ", model_names, ")") - - breaks <- seq(-1 * ceiling(max(abs(data))), ceiling(max(abs(data))), - 2 * ceiling(max(abs(data))) / 16) - filepng <- paste0( - plot_dir, "/", metric[m], "_", model_names[mod], "_", - rcp_scenario[i], - "_", start_projection, "_", end_projection, ".png") - PlotEquiMap( #nolint - data, - lon = lon, - lat = lat, - filled.continents = FALSE, - toptitle = title, - brks = breaks, - fileout = filepng) - - # Set provenance for output files - xprov <- list(ancestors = list(projection_filenames, reference_filenames), - authors = list("hunt_al", "manu_ni", "caro_lo"), - projects = list("c3s-magic"), - caption = title, - statistics = list("other"), - metric = params$metric, - realms = list("atmos"), - themes = list("phys"), - plot_file = filepng) - - provenance[[filencdf]] <- xprov - } - } - -} - -# Write provenance to file -write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/extreme_spells.R b/esmvaltool/diag_scripts/magic_bsc/extreme_spells.R new file mode 100644 index 0000000000..a738768b61 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/extreme_spells.R @@ -0,0 +1,421 @@ +library(yaml) +library(s2dverification) +library(multiApply) # nolint +library(ncdf4) +library(climdex.pcic) +library(parallel) +library(ClimProjDiags) # nolint +library(ggplot2) + +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir + +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +input_files_per_var <- yaml::read_yaml(params$input_files) +var_names <- names(input_files_per_var) +model_names <- lapply(input_files_per_var, function(x) { + x$dataset +}) +model_names <- unname(model_names) +var0 <- lapply(input_files_per_var, function(x) { + x$short_name +}) +fullpath_filenames <- names(var0) +var0 <- unname(var0)[1] + +experiment <- lapply(input_files_per_var, function(x) { + x$exp +}) +experiment <- unlist(unname(experiment)) + +reference_files <- which(unname(experiment) == "historical") +projection_files <- which(unname(experiment) != "historical") + +rcp_scenario <- unique(experiment[projection_files]) +model_names <- lapply(input_files_per_var, function(x) { + x$dataset +}) +model_names <- unlist(unname(model_names))[projection_files] + +start_reference <- + lapply(input_files_per_var, function(x) { + x$start_year + }) +start_reference <- + c(unlist(unname(start_reference))[reference_files])[1] +end_reference <- lapply(input_files_per_var, function(x) { + x$end_year +}) +end_reference <- + c(unlist(unname(end_reference))[reference_files])[1] + +start_projection <- + lapply(input_files_per_var, function(x) { + x$start_year + }) +start_projection <- + c(unlist(unname(start_projection))[projection_files])[1] +end_projection <- + lapply(input_files_per_var, function(x) { + x$end_year + }) +end_projection <- + c(unlist(unname(end_projection))[projection_files])[1] + + +op <- as.character(params$operator) +qtile <- params$quantile +spell_length <- params$min_duration +season <- params$season + +reference_filenames <- fullpath_filenames[reference_files] +projection <- "NULL" +reference_filenames <- fullpath_filenames[reference_files] +hist_nc <- nc_open(reference_filenames) +var0 <- unlist(var0) +historical_data <- ncvar_get(hist_nc, var0) + +names(dim(historical_data)) <- rev(names(hist_nc$dim))[-1] +lat <- ncvar_get(hist_nc, "lat") +lon <- ncvar_get(hist_nc, "lon") +units <- ncatt_get(hist_nc, var0, "units")$value +calendar <- ncatt_get(hist_nc, "time", "calendar")$value +long_names <- ncatt_get(hist_nc, var0, "long_name")$value +time <- ncvar_get(hist_nc, "time") +start_date <- as.POSIXct(substr(ncatt_get( + hist_nc, "time", + "units" +)$value, 11, 29)) +nc_close(hist_nc) +time <- as.Date(time, origin = start_date, calendar = calendar) + + +historical_data <- as.vector(historical_data) +dim(historical_data) <- c( + model = 1, + var = 1, + lon = length(lon), + lat = length(lat), + time = length(time) +) +historical_data <- aperm(historical_data, c(1, 2, 5, 4, 3)) +attr(historical_data, "Variables")$dat1$time <- time +names(dim(historical_data)) <- + c("model", "var", "time", "lon", "lat") +time_dimension <- which(names(dim(historical_data)) == "time") + +base_range <- c( + as.numeric(substr(start_reference, 1, 4)), + as.numeric(substr(end_reference, 1, 4)) +) +threshold <- + Threshold( + historical_data, + base.range = base_range, + # nolint + calendar = calendar, + qtiles = qtile, + ncores = NULL, + na.rm = TRUE + ) + +projection_filenames <- fullpath_filenames[projection_files] +for (i in seq_along(projection_filenames)) { + proj_nc <- nc_open(projection_filenames[i]) + projection_data <- ncvar_get(proj_nc, var0) + time <- ncvar_get(proj_nc, "time") + start_date <- as.POSIXct(substr(ncatt_get( + proj_nc, "time", + "units" + )$value, 11, 29)) + calendar <- ncatt_get(hist_nc, "time", "calendar")$value + time <- as.Date(time, origin = start_date, calendar = calendar) + nc_close(proj_nc) + projection_data <- as.vector(projection_data) + dim(projection_data) <- c( + model = 1, + var = 1, + lon = length(lon), + lat = length(lat), + time = length(time) + ) + projection_data <- aperm(projection_data, c(1, 2, 5, 4, 3)) + attr(projection_data, "Variables")$dat1$time <- time + names(dim(projection_data)) <- + c("model", "var", "time", "lon", "lat") + # ------------------------------ + heatwave <- WaveDuration( + projection_data, + threshold, + # nolint + calendar = calendar, + op = op, + spell.length = spell_length, + by.seasons = TRUE, + ncores = NULL + ) + if (season == "summer") { + heatwave_season <- + heatwave$result[seq(2, dim(heatwave$result)[1] - 2, + by = 4 + ), 1, 1, , ] # nolint + years <- + heatwave$years[seq(2, length(heatwave$years) - 2, by = 4)] + } else if (season == "winter") { + heatwave_season <- + heatwave$result[seq(1, dim(heatwave$result)[1] - 2, + by = 4 + ), 1, 1, , ] # nolint + years <- + heatwave$years[seq(1, length(heatwave$years) - 1, by = 4)] + } else if (season == "spring") { + heatwave_season <- + heatwave$result[seq(3, dim(heatwave$result)[1] - 2, + by = 4 + ), 1, 1, , ] # nolint + years <- + heatwave$years[seq(3, length(heatwave$years) - 2, by = 4)] + } else { + heatwave_season <- + heatwave$result[seq(4, dim(heatwave$result)[1] - 2, + by = 4 + ), 1, 1, , ] # nolint + years <- + heatwave$years[seq(4, length(heatwave$years) - 2, by = 4)] + } + + data <- heatwave_season + names(dim(data)) <- c("time", "lon", "lat") + attributes(lon) <- NULL + attributes(lat) <- NULL + dim(lon) <- c(lon = length(lon)) + dim(lat) <- c(lat = length(lat)) + time <- as.numeric(substr(years, 1, 4)) + attributes(time) <- NULL + dim(time) <- c(time = length(time)) + print( + paste( + "Attribute projection from climatological data is saved and,", + "if it's correct, it can be added to the final output:", + projection + ) + ) + + dimlon <- ncdim_def( + name = "lon", + units = "degrees_east", + vals = as.vector(lon), + longname = "longitude" + ) + dimlat <- ncdim_def( + name = "lat", + units = "degrees_north", + vals = as.vector(lat), + longname = "latitude" + ) + dimtime <- + ncdim_def( + name = "time", + units = "years since 0-0-0 00:00:00", + vals = time, + longname = "time" + ) + defdata <- ncvar_def( + name = "duration", + units = "days", + dim = list( + season = dimtime, + lat = dimlat, + lon = dimlon + ), + longname = paste( + "Number of days during the period", + start_projection, + "-", + end_projection, + "for", + season, + "in which", + var0, + "is", + op, + "than the", + qtile, + "quantile obtained from", + start_reference, + "-", + end_reference + ) + ) + filencdf <- + paste0( + work_dir, + "/", + var0, + "_extreme_spell_duration", + season, + "_", + model_names, + "_", + rcp_scenario[i], + "_", + start_projection, + "_", + end_projection, + ".nc" + ) + file <- nc_create(filencdf, list(defdata)) + ncvar_put(file, defdata, data) + nc_close(file) + + # Check dimension order: + if (length(lat) != dim(data)["lat"] | + length(lon) != dim(data)["lon"]) { + if (length(lat) == dim(data)["lon"] & + length(lon) == dim(data)["lat"]) { + poslat <- which(names(dim(data)) == "lat") + poslon <- which(names(dim(data)) == "lon") + names(dim(data))[poslat] <- "lon" + names(dim(data))[poslon] <- "lat" + } + } + timeseries <- WeightedMean(data, + lon = as.vector(lon), + # nolint + lat = as.vector(lat), + mask = NULL + ) + data_frame <- data.frame(Experiment = timeseries) + years <- rep(start_projection:end_projection) + data_frame$year <- c(years) + + title <- + paste( + "Days ", + season, + var0, + paste0( + substr(start_projection, 1, 4), + "-", + substr(end_projection, 1, 4) + ), + op, + "the", + qtile * 100, + "th quantile for", + paste0( + substr(start_reference, 1, 4), + "-", + substr(end_reference, 1, 4), + " (", + rcp_scenario[i], + ")" + ) + ) + filepng1 <- file.path( + plot_dir, + paste0( + "Time_", + var0, + "_extreme_spell_duration", + season, + "_", + model_names, + "_", + rcp_scenario[i], + "_", + start_projection, + "_", + end_projection, + ".png" + ) + ) + + g <- ggplot(data_frame, aes(x = year, y = Experiment)) + + theme_bw() + + geom_line() + + ylab(paste0("Number of Days")) + + xlab("Year") + + theme( + text = element_text(size = 12), + legend.text = element_text(size = 12), + axis.title = element_text(size = 12) + ) + + ggtitle(title) + ggsave( + filename = filepng1, + g, + device = NULL, + dpi = "print", + width = 10, + height = 6 + ) + + brks <- seq(0, max(Mean1Dim(data, 1), na.rm = TRUE), 4) + filepng <- + paste0( + plot_dir, + "/", + var0, + "_extreme_spell_duration", + season, + "_", + model_names, + "_", + rcp_scenario[i], + "_", + start_projection, + "_", + end_projection, + ".png" + ) + PlotEquiMap( + Mean1Dim(data, 1), + lat = lat, + lon = lon, + # nolint + filled.continents = FALSE, + brks = brks, + color_fun = clim.palette("yellowred"), + units = "Days", + toptitle = title, + fileout = filepng, + title_scale = 0.5 + ) + + # Set provenance for output files + + xprov <- + list( + ancestors = list(projection_filenames, reference_filenames), + authors = list( + "hunter_alasdair", + "manubens_nicolau", + "caron_louis-philippe" + ), + projects = list("c3s-magic"), + caption = title, + statistics = list("other"), + op = as.character(params$operator), + qtile = params$quantile, + spell_length = params$min_duration, + season = params$season, + realms = list("atmos"), + themes = list("phys") + ) + provenance[[filepng]] <- xprov + provenance[[filencdf]] <- xprov +} + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/extreme_spells.r b/esmvaltool/diag_scripts/magic_bsc/extreme_spells.r deleted file mode 100644 index 8db805157a..0000000000 --- a/esmvaltool/diag_scripts/magic_bsc/extreme_spells.r +++ /dev/null @@ -1,236 +0,0 @@ -library(yaml) -library(s2dverification) -library(multiApply) # nolint -library(ncdf4) -library(climdex.pcic) -library(parallel) -library(ClimProjDiags) # nolint - - -args <- commandArgs(trailingOnly = TRUE) -params <- read_yaml(args[1]) -plot_dir <- params$plot_dir -run_dir <- params$run_dir -work_dir <- params$work_dir - -dir.create(plot_dir, recursive = TRUE) -dir.create(run_dir, recursive = TRUE) -dir.create(work_dir, recursive = TRUE) - -# setup provenance file and list -provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") -provenance <- list() - -input_files_per_var <- yaml::read_yaml(params$input_files) -var_names <- names(input_files_per_var) -model_names <- lapply(input_files_per_var, function(x) x$dataset) -model_names <- unname(model_names) -var0 <- lapply(input_files_per_var, function(x) x$short_name) -fullpath_filenames <- names(var0) -var0 <- unname(var0)[1] - -experiment <- lapply(input_files_per_var, function(x) x$exp) -experiment <- unlist(unname(experiment)) - -reference_files <- which(unname(experiment) == "historical") -projection_files <- which(unname(experiment) != "historical") - -rcp_scenario <- unique(experiment[projection_files]) -model_names <- lapply(input_files_per_var, function(x) x$dataset) -model_names <- unlist(unname(model_names))[projection_files] - -start_reference <- lapply(input_files_per_var, function(x) x$start_year) -start_reference <- c(unlist(unname(start_reference))[reference_files])[1] -end_reference <- lapply(input_files_per_var, function(x) x$end_year) -end_reference <- c(unlist(unname(end_reference))[reference_files])[1] - -start_projection <- lapply(input_files_per_var, function(x) x$start_year) -start_projection <- c(unlist(unname(start_projection))[projection_files])[1] -end_projection <- lapply(input_files_per_var, function(x) x$end_year) -end_projection <- c(unlist(unname(end_projection))[projection_files])[1] - - -op <- as.character(params$operator) -qtile <- params$quantile -spell_length <- params$min_duration -season <- params$season - -reference_filenames <- fullpath_filenames[reference_files] -projection <- "NULL" -reference_filenames <- fullpath_filenames[reference_files] -hist_nc <- nc_open(reference_filenames) -var0 <- unlist(var0) -historical_data <- ncvar_get(hist_nc, var0) - -names(dim(historical_data)) <- rev(names(hist_nc$dim))[-1] -lat <- ncvar_get(hist_nc, "lat") -lon <- ncvar_get(hist_nc, "lon") -units <- ncatt_get(hist_nc, var0, "units")$value -calendar <- ncatt_get(hist_nc, "time", "calendar")$value -long_names <- ncatt_get(hist_nc, var0, "long_name")$value -time <- ncvar_get(hist_nc, "time") -start_date <- as.POSIXct(substr(ncatt_get(hist_nc, "time", - "units")$value, 11, 29)) -nc_close(hist_nc) -time <- as.Date(time, origin = start_date, calendar = calendar) - - -historical_data <- as.vector(historical_data) -dim(historical_data) <- c( - model = 1, - var = 1, - lon = length(lon), - lat = length(lat), - time = length(time) -) -historical_data <- aperm(historical_data, c(1, 2, 5, 4, 3)) -attr(historical_data, "Variables")$dat1$time <- time -print(dim(historical_data)) - -names(dim(historical_data)) <- c("model", "var", "time", "lon", "lat") -time_dimension <- which(names(dim(historical_data)) == "time") - -base_range <- c( - as.numeric(substr(start_reference, 1, 4)), - as.numeric(substr(end_reference, 1, 4)) -) -threshold <- Threshold(historical_data, base.range = base_range, #nolint - calendar = calendar, qtiles = qtile, ncores = NULL) - -projection_filenames <- fullpath_filenames[projection_files] -for (i in 1 : length(projection_filenames)) { - proj_nc <- nc_open(projection_filenames[i]) - projection_data <- ncvar_get(proj_nc, var0) - time <- ncvar_get(proj_nc, "time") - start_date <- as.POSIXct(substr(ncatt_get(proj_nc, "time", - "units")$value, 11, 29)) - calendar <- ncatt_get(hist_nc, "time", "calendar")$value - time <- as.Date(time, origin = start_date, calendar = calendar) - nc_close(proj_nc) - projection_data <- as.vector(projection_data) - dim(projection_data) <- c( - model = 1, - var = 1, - lon = length(lon), - lat = length(lat), - time = length(time) - ) - projection_data <- aperm(projection_data, c(1, 2, 5, 4, 3)) - attr(projection_data, "Variables")$dat1$time <- time - names(dim(projection_data)) <- c("model", "var", "time", "lon", "lat") - # ------------------------------ - heatwave <- WaveDuration( # nolint - projection_data, - threshold, - calendar = calendar, - op = op, - spell.length = spell_length, - by.seasons = TRUE, - ncores = NULL - ) - - if (season == "summer") { - heatwave_season <- heatwave$result[seq(2, dim(heatwave$result)[1] - 2, - by = 4), 1, 1, , ]#nolint - years <- heatwave$years[seq(2, length(heatwave$years) - 2, by = 4)] - } else if (season == "winter") { - heatwave_season <- heatwave$result[seq(1, dim(heatwave$result)[1] - 2, - by = 4), 1, 1, , ]#nolint - years <- heatwave$years[seq(1, length(heatwave$years) - 1, by = 4)] - } else if (season == "spring") { - heatwave_season <- heatwave$result[seq(3, dim(heatwave$result)[1] - 2, - by = 4), 1, 1, , ]#nolint - years <- heatwave$years[seq(3, length(heatwave$years) - 2, by = 4)] - } else { - heatwave_season <- heatwave$result[seq(4, dim(heatwave$result)[1] - 2, - by = 4), 1, 1, , ]#nolint - years <- heatwave$years[seq(4, length(heatwave$years) - 2, by = 4)] - } - - data <- heatwave_season - names(dim(data)) <- c("time", "lon", "lat") - attributes(lon) <- NULL - attributes(lat) <- NULL - dim(lon) <- c(lon = length(lon)) - dim(lat) <- c(lat = length(lat)) - time <- as.numeric(substr(years, 1, 4)) - attributes(time) <- NULL - dim(time) <- c(time = length(time)) - print(paste( - "Attribute projection from climatological data is saved and,", - "if it's correct, it can be added to the final output:", - projection - )) - - dimlon <- ncdim_def( - name = "lon", units = "degrees_east", - vals = as.vector(lon), longname = "longitude") - dimlat <- ncdim_def( - name = "lat", units = "degrees_north", - vals = as.vector(lat), longname = "latitude") - dimtime <- ncdim_def( - name = "time", units = "years since 0-0-0 00:00:00", - vals = time, longname = "time") - defdata <- ncvar_def( - name = "duration", units = "days", - dim = list(season = dimtime, lat = dimlat, lon = dimlon), - longname = paste( - "Number of days during the period", start_projection, "-", end_projection, - "for", season, "in which", var0, "is", op, "than the", qtile, - "quantile obtained from", start_reference, "-", end_reference - ) - ) - filencdf <- paste0(work_dir, "/", var0, "_extreme_spell_duration", season, - "_", model_names, "_", rcp_scenario[i], "_", start_projection, "_", - end_projection, ".nc") - - file <- nc_create(filencdf, list(defdata)) - ncvar_put(file, defdata, data) - nc_close(file) - - - - - brks <- seq(0, 40, 4) - title <- paste0( - "Days ", season, " ", var0, " ", substr(start_projection, 1, 4), "-", - substr(end_projection, 1, 4), " ", op, " the ", qtile * 100, - "th quantile for ", substr(start_reference, 1, 4), "-", - substr(end_reference, 1, 4), " (", rcp_scenario[i], ")" - ) - filepng <- paste0( - plot_dir, "/", var0, "_extreme_spell_duration", season, "_", - model_names, "_", rcp_scenario[i], "_", start_projection, "_", - end_projection, ".png") - PlotEquiMap( Mean1Dim(data, 1), # nolint - lat = lat, - lon = lon, - filled.continents = FALSE, - brks = brks, - color_fun = clim.palette("yellowred"), - units = "Days", - toptitle = title, - fileout = filepng, - title_scale = 0.5 - ) - - # Set provenance for output files - - xprov <- list(ancestors = list(projection_filenames, reference_filenames), - authors = list("hunt_al", "manu_ni", "caro_lo"), - projects = list("c3s-magic"), - caption = title, - statistics = list("other"), - op = as.character(params$operator), - qtile = params$quantile, - spell_length = params$min_duration, - season = params$season, - realms = list("atmos"), - themes = list("phys"), - plot_file = filepng) - - provenance[[filencdf]] <- xprov -} - -# Write provenance to file -write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/multimodel_products.R b/esmvaltool/diag_scripts/magic_bsc/multimodel_products.R new file mode 100644 index 0000000000..f6fc8a79be --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/multimodel_products.R @@ -0,0 +1,746 @@ + + +Sys.setenv(TAR = "/bin/tar") # nolint +library(s2dverification) +library(ClimProjDiags) # nolint +library(abind) +library(ggplot2) +library(yaml) +library(ncdf4) +library(multiApply) # nolint + +# Parsing input file paths and creating output dirs +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir + +## Create working dirs if they do not exist +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +input_files_per_var <- yaml::read_yaml(params$input_files) + +var0 <- lapply(input_files_per_var, function(x) { + x$short_name +}) +fullpath_filenames <- names(var0) +var0 <- unname(var0)[1] +experiment <- lapply(input_files_per_var, function(x) { + x$exp +}) +experiment <- unlist(unname(experiment)) + +climatology_files <- which(unname(experiment) == "historical") +projection_files <- which(unname(experiment) != "historical") + +rcp_scenario <- unique(experiment[projection_files]) +model_names <- lapply(input_files_per_var, function(x) { + x$dataset +}) +model_names <- unlist(unname(model_names))[projection_files] + +start_climatology <- + lapply(input_files_per_var, function(x) { + x$start_year + }) +start_climatology <- + c(unlist(unname(start_climatology))[climatology_files])[1] +end_climatology <- + lapply(input_files_per_var, function(x) { + x$end_year + }) +end_climatology <- + c(unlist(unname(end_climatology))[climatology_files])[1] + +start_projection <- + lapply(input_files_per_var, function(x) { + x$start_year + }) +start_projection <- + c(unlist(unname(start_projection))[projection_files])[1] +end_projection <- + lapply(input_files_per_var, function(x) { + x$end_year + }) +end_projection <- + c(unlist(unname(end_projection))[projection_files])[1] + + +agreement_threshold <- params$agreement_threshold + +font_size <- 12 + +# Parameters for Season() function +monini <- 1 +moninf <- params$moninf +monsup <- params$monsup +if (is.null(moninf) & !is.null(monsup)) { + moninf <- monsup +} else if (!is.null(moninf) & is.null(monsup)) { + monsup <- moninf +} +month_names <- c( + "JAN", + "FEB", + "MAR", + "APR", + "MAY", + "JUN", + "JUL", + "AGO", + "SEP", + "OCT", + "NOV", + "DEC" +) +if (moninf == monsup) { + months <- month_names[moninf] +} else { + months <- + paste0(month_names[moninf], month_names[monsup], sep = "-") +} + +time_series_plot <- params$time_series_plot +### Load data and compute climatologies and anomalies +var0 <- unlist(var0) +climatology_filenames <- fullpath_filenames[climatology_files] +ref_nc <- nc_open(fullpath_filenames[climatology_files][1]) +lat <- ncvar_get(ref_nc, "lat") +lon <- ncvar_get(ref_nc, "lon") +units <- ncatt_get(ref_nc, var0, "units")$value +calendar <- ncatt_get(ref_nc, "time", "calendar")$value +long_names <- ncatt_get(ref_nc, var0, "long_name")$value +time <- ncvar_get(ref_nc, "time") +reference_data <- InsertDim(ncvar_get(ref_nc, var0), 1, 1) # nolint +start_date <- as.POSIXct(substr(ncatt_get( + ref_nc, "time", + "units" +)$value, 11, 29)) +time <- as.Date(time, origin = start_date, calendar = calendar) +projection <- "NULL" +nc_close(ref_nc) +for (i in 2:length(fullpath_filenames[climatology_files])) { + ref_nc <- nc_open(fullpath_filenames[climatology_files][i]) + reference_data <- abind(reference_data, + InsertDim(ncvar_get(ref_nc, var0), 1, 1), + along = 1 + ) # nolint + nc_close(ref_nc) +} +attr(reference_data, "Variables")$dat1$time <- time + +names(dim(reference_data)) <- c("model", "lon", "lat", "time") +# nolint start +# jpeg(paste0(plot_dir, "/plot.jpg")) +# PlotEquiMap(reference_data[1,1,1,,], lon = lon, lat = lat, filled = F) +# dev.off() +# ------------------------------ +# Provisional solution to error in dimension order and time values: +# nolint end +time <- attr(reference_data, "Variables")$dat1$time +attributes(time)$variables$time$calendar <- calendar +if ((end_climatology - start_climatology + 1) * 12 == length(time)) { + time <- seq( + as.Date(paste(start_climatology, "01", "01", sep = "-"), + format = "%Y-%m-%d" + ), + as.Date(paste(end_climatology, "12", "01", sep = "-"), + format = "%Y-%m-%d" + ), + "month" + ) +} + +num_models <- + dim(reference_data)[which(names(dim(reference_data)) == "model")] +reference_data <- as.vector(reference_data) +dim(reference_data) <- c( + num_models, + var = 1, + lon = length(lon), + lat = length(lat), + time = length(time) +) +reference_data <- aperm(reference_data, c(1, 2, 5, 4, 3)) +attr(reference_data, "Variables")$dat1$time <- time +names(dim(reference_data)) <- + c("model", "var", "time", "lat", "lon") +# nolint start +# ------------------------------ +# jpeg(paste0(plot_dir, "/plot1.jpg")) +# PlotEquiMap(reference_data[1,1,1,,], lon = lon, lat = lat, filled = F) +# dev.off() +#--------------------------------------------- +# MONTHLY - SEASONAL - ANNUAL +# MONTH: moninf = monsup +# SEASONAL: specify the moninf and monsup; +# if winter: moninf = 12 monsup = 2; +# any other moninf > monsup allowed +#--------------------------------------------- +# nolint end + +dims <- dim(reference_data) +time_dim <- which(names(dim(reference_data)) == "time") +if (moninf <= monsup) { + dims <- append(dims, c(12, dims[time_dim] / 12), after = time_dim) + dims <- dims[-time_dim] + dim(reference_data) <- dims + names(dim(reference_data))[c(time_dim, time_dim + 1)] <- + c("month", "year") + reference_seasonal_mean <- Season( + reference_data, + posdim = time_dim, + monini = monini, + moninf = moninf, + monsup = monsup + ) + reference_seasonal_mean <- + adrop(adrop(reference_seasonal_mean, 2), 2) +} else { + if (monsup == 2 & moninf == 12) { + reference_seasonal_mean <- SeasonSelect( + # nolint + reference_data, + season = "DJF", + dates = time, + calendar = calendar + )$data + # Adding one NA december at the beginning + time_dim <- which(names(dim(reference_seasonal_mean)) == "time") + dims <- dim(reference_seasonal_mean) + empty_array <- rep(NA, prod(dims[-time_dim])) + dims[time_dim] <- 1 + dim(empty_array) <- dims[-time_dim] + nom <- names(dim(reference_seasonal_mean)) + reference_seasonal_mean <- abind( + reference_seasonal_mean, + empty_array, + along = time_dim + ) + # and removing the last december + names(dim(reference_seasonal_mean)) <- nom + dimensiones <- seq_along(dim(reference_seasonal_mean)) + reference_seasonal_mean <- Apply( + reference_seasonal_mean, + target_dims = time_dim, + fun = function(x) { + x[1:(length(x) - 1)] + } + )$output1 + dims <- dim(reference_seasonal_mean) + time_dim <- which(names(dim(reference_seasonal_mean)) == "time") + dims <- append(dims, c(3, dims[time_dim] / 3), after = time_dim) + dims <- dims[-time_dim] + dim(reference_seasonal_mean) <- dims + names(dim(reference_seasonal_mean))[c(time_dim, time_dim + 1)] <- + c("season", "year") + reference_seasonal_mean <- Mean1Dim(reference_seasonal_mean, + posdim = time_dim + ) + } +} + +margins <- + list(seq_along(dim( + reference_seasonal_mean + ))[-c(time_dim + 1)]) +years_dim <- which(names(dim(reference_seasonal_mean)) == "year") +climatology <- Mean1Dim(reference_seasonal_mean, years_dim) # nolint +projection_filenames <- fullpath_filenames[projection_files] +rcp_nc <- nc_open(projection_filenames[1]) +lat <- ncvar_get(rcp_nc, "lat") +lon <- ncvar_get(rcp_nc, "lon") +units <- ncatt_get(rcp_nc, var0, "units")$value +calendar <- ncatt_get(rcp_nc, "time", "calendar")$value +long_names <- ncatt_get(rcp_nc, var0, "long_name")$value +time <- ncvar_get(rcp_nc, "time") +rcp_data <- InsertDim(ncvar_get(rcp_nc, var0), 1, 1) # nolint +start_date <- as.POSIXct(substr(ncatt_get( + rcp_nc, "time", + "units" +)$value, 11, 29)) +time <- as.Date(time, origin = start_date, calendar = calendar) + +nc_close(rcp_nc) +for (i in 2:length(projection_filenames)) { + rcp_nc <- nc_open(projection_filenames[i]) + rcp_data <- + abind(rcp_data, InsertDim(ncvar_get(rcp_nc, var0), 1, 1), # nolint + along = 1 + ) + nc_close(rcp_nc) +} +attr(rcp_data, "Variables")$dat1$time <- time + +names(dim(rcp_data)) <- c("model", "lon", "lat", "time") +# nolint start +# jpeg(paste0(plot_dir, "/plot2.jpg")) +# PlotEquiMap(rcp_data[1,1,1,,], lon = lon, lat = lat, filled = F) +# dev.off() +# ------------------------------ +# Provisional solution to error in dimension order +# if (attributes(time)$variables$time$calendar != calendar) { +# print("Different calendars between climatology and anomaly.") +# } +# nolint end +if ((end_projection - start_projection + 1) * 12 == length(time)) { + time <- seq( + as.Date(paste(start_projection, "01", "01", sep = "-"), + format = "%Y-%m-%d" + ), + as.Date(paste(end_projection, "12", "01", sep = "-"), + format = "%Y-%m-%d" + ), + "month" + ) +} +num_models <- dim(rcp_data)[which(names(dim(rcp_data)) == "model")] +rcp_data <- as.vector(rcp_data) +dim(rcp_data) <- c( + num_models, + var = 1, + lon = length(lon), + lat = length(lat), + time = length(time) +) +rcp_data <- aperm(rcp_data, c(1, 2, 5, 4, 3)) +names(dim(rcp_data)) <- c("model", "var", "time", "lat", "lon") +attr(rcp_data, "Variables")$dat1$time <- time + +# nolint start +# ------------------------------ +# jpeg(paste0(plot_dir, "/plot3.jpg")) +# PlotEquiMap(rcp_data[1,1,1,,], lon = lon, lat = lat, filled = F) +# dev.off() + + +#--------------------------------------------- +# MONTHLY - SEASONAL - ANNUAL +# MONTH: moninf = monsup +# SEASONAL: specify the moninf and monsup; +# if winter: moninf = 12 monsup = 2; +# any other moninf > monsup allowed +#--------------------------------------------- +# nolint end + +time_dim <- which(names(dim(rcp_data)) == "time") +dims <- dim(rcp_data) +mes <- as.numeric(substr(time, 6, 7)) + +if (moninf <= monsup) { + dims <- append(dims, c(12, dims[time_dim] / 12), after = time_dim) + dims <- dims[-time_dim] + dim(rcp_data) <- dims + names(dim(rcp_data))[c(time_dim, time_dim + 1)] <- + c("month", "year") + rcp_seasonal_mean <- Season( + rcp_data, + posdim = time_dim, + monini = monini, + moninf = moninf, + monsup = monsup + ) + rcp_seasonal_mean <- adrop(adrop(rcp_seasonal_mean, 2), 2) +} else { + if (monsup == 2 & moninf == 12) { + rcp_seasonal_mean <- SeasonSelect( # nolint + rcp_data, + season = "DJF", + dates = time, + calendar = calendar + )$data + time_dim <- which(names(dim(rcp_seasonal_mean)) == "time") + dims <- dim(rcp_seasonal_mean) + empty_array <- rep(NA, prod(dims[-time_dim])) + dims[time_dim] <- 1 + dim(empty_array) <- dims[-time_dim] + nom <- names(dim(rcp_seasonal_mean)) + rcp_seasonal_mean <- abind( + rcp_seasonal_mean, + empty_array, + along = time_dim + ) + borrar <- dim(rcp_seasonal_mean)[time_dim] + names(dim(rcp_seasonal_mean)) <- nom + dimensiones <- seq_along(dim(rcp_seasonal_mean)) + rcp_seasonal_mean <- Apply( + # nolint + rcp_seasonal_mean, + target_dims = time_dim, + fun = function(x) { + x[1:(length(x) - 1)] + } + )$output1 + dims <- dim(rcp_seasonal_mean) + time_dim <- which(names(dim(rcp_seasonal_mean)) == "time") + dims <- append(dims, c(3, dims[time_dim] / 3), after = time_dim) + dims <- dims[-time_dim] + dim(rcp_seasonal_mean) <- dims + names(dim(rcp_seasonal_mean))[c(time_dim, time_dim + 1)] <- + c("season", "year") + rcp_seasonal_mean <- + Mean1Dim(rcp_seasonal_mean, posdim = time_dim) + rcp_seasonal_mean <- aperm(rcp_seasonal_mean, c(2, 1, 3, 4)) + } +} +years_dim <- which(names(dim(rcp_seasonal_mean)) == "year") +climatology <- InsertDim( # nolint + climatology, + years_dim, + lendim = dim(rcp_seasonal_mean)[years_dim] +) +anomaly <- rcp_seasonal_mean - climatology +multi_year_anomaly <- Mean1Dim(anomaly, years_dim) + +time <- seq(start_projection, end_projection, by = 1) +month <- moninf +if (month <= 9) { + month <- paste0(as.character(0), as.character(month)) +} +month <- paste0("-", month, "-") +day <- "01" +time <- as.POSIXct(paste0(time, month, day), tz = "CET") +time <- julian(time, origin = as.POSIXct("1970-01-01")) + +attributes(time) <- NULL +dim(time) <- c(time = length(time)) +metadata <- list( + time = list( + standard_name = "time", + long_name = "time", + units = "days since 1970-01-01 00:00:00", + prec = "double", + dim = list(list(name = "time", unlim = FALSE)) + ) +) +attr(time, "variables") <- metadata + +# Save the single model anomalies +for (mod in seq_along(model_names)) { + data <- anomaly[mod, , , ] # nolint + data <- aperm(data, c(2, 3, 1)) + names(dim(data)) <- c("lat", "lon", "time") + metadata <- list(variable = list( + dim = list(list( + name = "time", unlim = FALSE + )), + units = units + )) + names(metadata)[1] <- var0 + attr(data, "variables") <- metadata + attributes(lat) <- NULL + attributes(lon) <- NULL + dim(lat) <- c(lat = length(lat)) + dim(lon) <- c(lon = length(lon)) + variable_list <- + list( + variable = data, + lat = lat, + lon = lon, + time = time + ) + names(variable_list)[1] <- var0 + + # ArrayToNetCDF( # nolint + # variable_list, + # paste0( + # plot_dir, "/", var0, "_", months, "_anomaly_", model_names[mod], + # "_", start_anomaly, "_", end_anomaly, "_", start_climatology, "_", + # end_climatology, ".nc" + # ) + # ) +} + +model_anomalies <- WeightedMean( # nolint + anomaly, + lon = as.vector(lon), + lat = as.vector(lat), + mask = NULL +) +if (!is.null(params$running_mean)) { + model_anomalies <- Smoothing( # nolint + model_anomalies, + runmeanlen = params$running_mean, + numdimt = 2 + ) +} +data_frame <- as.data.frame.table(t(model_anomalies[, ])) +years <- + rep(start_projection:end_projection, dim(model_anomalies)[1]) +data_frame$year <- c(years) +names(data_frame)[2] <- "Model" + +for (i in seq_along(levels(data_frame$Model))) { + levels(data_frame$Model)[i] <- model_names[i] +} + +if (time_series_plot == "single") { + g <- ggplot( + data_frame, + aes(x = year, y = Freq, color = Model) + ) + + theme_bw() + + geom_line() + + ylab(paste0("Anomaly (", units, ")")) + + xlab("Year") + + theme( + text = element_text(size = font_size), + legend.text = element_text(size = font_size), + axis.title = element_text(size = font_size) + ) + + stat_summary( + data = data_frame, + fun.y = "mean", + mapping = aes( + x = data_frame$year, + y = data_frame$Freq, + group = interaction(data_frame[2, 3]), + color = data_frame$Model + ), + geom = "line", + size = 1 + ) + + ggtitle( + paste0( + months, + " ", + var0, + " anomaly (", + start_projection, + "-", + end_projection, + ") - ", + "(", + start_climatology, + "-", + end_climatology, + ")" + ) + ) +} else { + g <- ggplot(data_frame, aes(x = year, y = Freq)) + + theme_bw() + + ylab(paste0("Anomaly (", units, ")")) + + xlab("Year") + + theme( + text = element_text(size = font_size), + legend.text = element_text(size = font_size), + axis.title = element_text(size = font_size) + ) + + stat_summary( + data = data_frame, + fun.y = "mean", + mapping = aes( + x = data_frame$Year, + y = data_frame$Freq, + group = interaction(data_frame[2, 3]), + color = data_frame$Model + ), + geom = "line", + size = 0.8 + ) + + stat_summary( + data = data_frame, + geom = "ribbon", + fun.ymin = "min", + fun.ymax = "max", + mapping = aes( + x = data_frame$year, + y = data_frame$Freq, + group = interaction(data_frame[2, 3]) + ), + alpha = 0.3, + color = "red", + fill = "red" + ) + + ggtitle( + paste0( + months, + " ", + var0, + " anomaly (", + start_projection, + "-", + end_projection, + ") - ", + "(", + start_climatology, + "-", + end_climatology, + ")" + ) + ) +} +filepng1 <- paste0( + plot_dir, + "/", + "Area-averaged_", + var0, + "_", + months, + "_multimodel-anomaly_", + start_projection, + "_", + end_projection, + "_", + start_climatology, + "_", + end_climatology, + ".png" +) +ggsave( + filename = filepng1, + g, + device = NULL +) + +if (!is.null(agreement_threshold)) { + model_dim <- which(names(dim(multi_year_anomaly)) == "model") + agreement <- AnoAgree(multi_year_anomaly + # nolint + rnorm(length(unique(model_names)) * length(lat) * length(lon)), + membersdim = model_dim + ) +} else { + agreement_threshold <- 1000 + agreement <- NULL +} + +colorbar_lim <- params$colorbar_lim + +if (colorbar_lim == 0) { + colorbar_lim <- + ceiling(max(abs(max(multi_year_anomaly)), abs(min(data)))) +} + +brks <- seq(-colorbar_lim, colorbar_lim, length.out = 21) + +title <- paste0( + months, + " ", + var0, + " anomaly (", + start_projection, + "-", + end_projection, + ") - (", + start_climatology, + "-", + end_climatology, + ")" +) +data <- drop(Mean1Dim(multi_year_anomaly, model_dim)) + +filepng2 <- + paste0( + plot_dir, + "/", + var0, + "_", + months, + "_multimodel-anomaly_", + start_projection, + "_", + end_projection, + "_", + start_climatology, + "_", + end_climatology, + ".png" + ) +PlotEquiMap( + # nolint + data, + lat = lat, + lon = lon, + brks = brks, + units = units, + toptitle = title, + filled.continents = FALSE, + dots = drop(agreement) >= agreement_threshold, + fileout = filepng2 +) +model_names_filename <- paste(model_names, collapse = "_") +print( + paste( + "Attribute projection from climatological data is saved and,", + "if it's correct, it can be added to the final output:", + projection + ) +) + +dimlon <- ncdim_def( + name = "lon", + units = "degrees_east", + vals = as.vector(lon), + longname = "longitude" +) +dimlat <- ncdim_def( + name = "lat", + units = "degrees_north", + vals = as.vector(lat), + longname = "latitude" +) +defdata <- ncvar_def( + name = "data", + units = units, + dim = list(lat = dimlat, lon = dimlon), + longname = paste("Mean", long_names) +) +defagreement <- ncvar_def( + name = "agreement", + units = "%", + dim = list(lat = dimlat, lon = dimlon), + longname = "Agremeent between models" +) +filencdf <- paste0( + work_dir, + "/", + var0, + "_", + months, + "_multimodel-anomaly_", + model_names_filename, + "_", + start_projection, + "_", + end_projection, + "_", + start_climatology, + "_", + end_climatology, + ".nc" +) +file <- nc_create(filencdf, list(defdata, defagreement)) +ncvar_put(file, defdata, data) +ncvar_put(file, defagreement, agreement) +nc_close(file) + + +# Set provenance for output files +xprov <- list( + ancestors = fullpath_filenames, + authors = list("hunter_alasdair", "manubens_nicolau"), + projects = list("c3s-magic"), + caption = title, + statistics = list("other"), + agreement_threshold = params$agreement_threshold, + moninf = params$moninf, + monsup = params$monsup, + runmena = params$running_mean, + time_series_plot = params$time_series_plot, + realms = list("atmos"), + themes = list("phys") +) +provenance[[filepng1]] <- xprov +provenance[[filencdf]] <- xprov + + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/multimodel_products.r b/esmvaltool/diag_scripts/magic_bsc/multimodel_products.r deleted file mode 100644 index 85f2784bbb..0000000000 --- a/esmvaltool/diag_scripts/magic_bsc/multimodel_products.r +++ /dev/null @@ -1,560 +0,0 @@ -# nolint start -####REQUIRED SYSTEM LIBS -####ŀibssl-dev -####libnecdf-dev -####cdo - -# conda install -c conda-forge r-ncdf4 - -#R package dependencies installation script -#install.packages("yaml") -#install.packages("devtools") -#library(devtools) -#Sys.setenv(TAR = "/bin/tar") -#install_git("https://earth.bsc.es/gitlab/es/startR", branch = "develop-hotfixes-0.0.2") -#install_git("https://earth.bsc.es/gitlab/es/easyNCDF", branch = "master") -# nolint end - - -Sys.setenv(TAR = "/bin/tar") # nolint -library(s2dverification) -library(ClimProjDiags) # nolint -library(abind) -library(ggplot2) -library(yaml) -library(ncdf4) -library(multiApply)# nolint - -#Parsing input file paths and creating output dirs -args <- commandArgs(trailingOnly = TRUE) -params <- read_yaml(args[1]) -plot_dir <- params$plot_dir -run_dir <- params$run_dir -work_dir <- params$work_dir - -## Create working dirs if they do not exist -dir.create(plot_dir, recursive = TRUE) -dir.create(run_dir, recursive = TRUE) -dir.create(work_dir, recursive = TRUE) - -# setup provenance file and list -provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") -provenance <- list() - -input_files_per_var <- yaml::read_yaml(params$input_files) -var_names <- names(input_files_per_var) -model_names <- lapply(input_files_per_var, function(x) x$dataset) -model_names <- unique(unlist(unname(model_names))) - -var0 <- lapply(input_files_per_var, function(x) x$short_name) -fullpath_filenames <- names(var0) -var0 <- unname(var0)[1] -experiment <- lapply(input_files_per_var, function(x) x$exp) -experiment <- unlist(unname(experiment)) - -climatology_class <- params$climatology_class -anomaly_class <- params$anomaly_class -climatology_files <- which( - unname(experiment) == as.character(climatology_class) -) -anomaly_files <- which(unname(experiment) == as.character(anomaly_class)) - -agreement_threshold <- params$agreement_threshold -start_climatology <- params$climatology_start_year -end_climatology <- params$climatology_end_year -start_anomaly <- params$anomaly_start_year -end_anomaly <- params$anomaly_end_year -font_size <- 12 - -#Parameters for Season() function -monini <- 1 -moninf <- params$moninf -monsup <- params$monsup -if (is.null(moninf) & !is.null(monsup)){ - moninf <- monsup -} else if (!is.null(moninf) & is.null(monsup)) { - monsup <- moninf -} -month_names <- c( - "JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AGO", "SEP", - "OCT", "NOV", "DEC" -) -if (moninf == monsup) { - months <- month_names[moninf] -} else { - months <- paste0(month_names[moninf], month_names[monsup], sep = "-") -} - -time_series_plot <- params$time_series_plot -### Load data and compute climatologies and anomalies -var0 <- unlist(var0) -climatology_filenames <- fullpath_filenames[climatology_files] -ref_nc <- nc_open(fullpath_filenames[climatology_files][1]) -lat <- ncvar_get(ref_nc, "lat") -lon <- ncvar_get(ref_nc, "lon") -units <- ncatt_get(ref_nc, var0, "units")$value -calendar <- ncatt_get(ref_nc, "time", "calendar")$value -long_names <- ncatt_get(ref_nc, var0, "long_name")$value -time <- ncvar_get(ref_nc, "time") -reference_data <- InsertDim(ncvar_get(ref_nc, var0), 1, 1) # nolint -start_date <- as.POSIXct(substr(ncatt_get(ref_nc, "time", - "units")$value, 11, 29)) -time <- as.Date(time, origin = start_date, calendar = calendar) -projection <- "NULL" -nc_close(ref_nc) -for (i in 2 : length(fullpath_filenames[climatology_files])) { - ref_nc <- nc_open(fullpath_filenames[climatology_files][i]) - reference_data <- abind(reference_data, - InsertDim(ncvar_get(ref_nc, var0), 1, 1), along = 1) # nolint - nc_close(ref_nc) -} -attr(reference_data, "Variables")$dat1$time <- time - -names(dim(reference_data)) <- c("model", "lon", "lat", "time") -# nolint start -#jpeg(paste0(plot_dir, "/plot.jpg")) -#PlotEquiMap(reference_data[1,1,1,,], lon = lon, lat = lat, filled = F) -#dev.off() -# ------------------------------ -# Provisional solution to error in dimension order and time values: -# nolint end -time <- attr(reference_data, "Variables")$dat1$time -attributes(time)$variables$time$calendar <- calendar -if ( (end_climatology - start_climatology + 1) * 12 == length(time) ) { - time <- seq( - as.Date( - paste(start_climatology, "01", "01", sep = "-"), - format = "%Y-%m-%d" - ), - as.Date( - paste(end_climatology, "12", "01", sep = "-"), - format = "%Y-%m-%d" - ), - "month" - ) -} - -num_models <- dim(reference_data)[which(names(dim(reference_data)) == "model")] -reference_data <- as.vector(reference_data) -dim(reference_data) <- c( - num_models, - var = 1, - lon = length(lon), - lat = length(lat), - time = length(time) -) -reference_data <- aperm(reference_data, c(1, 2, 5, 4, 3)) -attr(reference_data, "Variables")$dat1$time <- time -names(dim(reference_data)) <- c("model", "var", "time", "lat", "lon") -# nolint start -# ------------------------------ -#jpeg(paste0(plot_dir, "/plot1.jpg")) -#PlotEquiMap(reference_data[1,1,1,,], lon = lon, lat = lat, filled = F) -#dev.off() -#--------------------------------------------- -# MONTHLY - SEASONAL - ANNUAL -# MONTH: moninf = monsup -# SEASONAL: specify the moninf and monsup; -# if winter: moninf = 12 monsup = 2; -# any other moninf > monsup allowed -#--------------------------------------------- -# nolint end - -dims <- dim(reference_data) -time_dim <- which(names(dim(reference_data)) == "time") -if (moninf <= monsup) { - dims <- append(dims, c(12, dims[time_dim] / 12), after = time_dim) - dims <- dims[-time_dim] - dim(reference_data) <- dims - names(dim(reference_data))[c(time_dim, time_dim + 1)] <- c("month", "year") - reference_seasonal_mean <- Season( - reference_data, - posdim = time_dim, - monini = monini, - moninf = moninf, - monsup = monsup - ) - reference_seasonal_mean <- adrop(adrop(reference_seasonal_mean, 2), 2) -} else { - if (monsup == 2 & moninf == 12) { - reference_seasonal_mean <- SeasonSelect( #nolint - reference_data, season = "DJF", dates = time, calendar = calendar - )$data - # Adding one NA december at the begining - time_dim <- which(names(dim(reference_seasonal_mean)) == "time") - dims <- dim(reference_seasonal_mean) - empty_array <- rep(NA, prod(dims[-time_dim])) - dims[time_dim] <- 1 - dim(empty_array) <- dims[-time_dim] - nom <- names(dim(reference_seasonal_mean)) - reference_seasonal_mean <- abind( - reference_seasonal_mean, empty_array, along = time_dim - ) - # and removing the last december - names(dim(reference_seasonal_mean)) <- nom - dimensiones <- 1 : length(dim(reference_seasonal_mean)) - reference_seasonal_mean <- Apply( - reference_seasonal_mean, - target_dims = time_dim, - fun = function (x) { - x[1 : (length(x) - 1)] - } - )$output1 - dims <- dim(reference_seasonal_mean) - time_dim <- which(names(dim(reference_seasonal_mean)) == "time") - dims <- append(dims, c(3, dims[time_dim] / 3), after = time_dim) - dims <- dims[-time_dim] - dim(reference_seasonal_mean) <- dims - names(dim(reference_seasonal_mean))[c(time_dim, time_dim + 1)] <- - c("season", "year") - reference_seasonal_mean <- Mean1Dim(reference_seasonal_mean, - posdim = time_dim) - } -} - -margins <- list(c(1 : length(dim(reference_seasonal_mean)))[-c(time_dim + 1)]) -years_dim <- which(names(dim(reference_seasonal_mean)) == "year") -climatology <- Mean1Dim(reference_seasonal_mean, years_dim) #nolint -anomaly_filenames <- fullpath_filenames[anomaly_files] -rcp_nc <- nc_open(anomaly_filenames[1]) -lat <- ncvar_get(rcp_nc, "lat") -lon <- ncvar_get(rcp_nc, "lon") -units <- ncatt_get(rcp_nc, var0, "units")$value -calendar <- ncatt_get(rcp_nc, "time", "calendar")$value -long_names <- ncatt_get(rcp_nc, var0, "long_name")$value -time <- ncvar_get(rcp_nc, "time") -rcp_data <- InsertDim(ncvar_get(rcp_nc, var0), 1, 1) # nolint -start_date <- as.POSIXct(substr(ncatt_get(rcp_nc, "time", - "units")$value, 11, 29)) -time <- as.Date(time, origin = start_date, calendar = calendar) - -nc_close(rcp_nc) -for (i in 2 : length(anomaly_filenames)) { - rcp_nc <- nc_open(anomaly_filenames[i]) - rcp_data <- abind(rcp_data, InsertDim(ncvar_get(rcp_nc, var0), 1, 1), #nolint - along = 1) - nc_close(rcp_nc) -} -attr(rcp_data, "Variables")$dat1$time <- time - -names(dim(rcp_data)) <- c("model", "lon", "lat", "time") -# nolint start -#jpeg(paste0(plot_dir, "/plot2.jpg")) -#PlotEquiMap(rcp_data[1,1,1,,], lon = lon, lat = lat, filled = F) -#dev.off() -# ------------------------------ -# Provisional solution to error in dimension order -#if (attributes(time)$variables$time$calendar != calendar) { -# print("Different calendars between climatology and anomaly.") -#} -# nolint end -if ( (end_anomaly - start_anomaly + 1) * 12 == length(time)) { - time <- seq( - as.Date( - paste(start_anomaly, "01", "01", sep = "-"), - format = "%Y-%m-%d" - ), - as.Date( - paste(end_anomaly, "12", "01", sep = "-"), - format = "%Y-%m-%d" - ), - "month" - ) -} -num_models <- dim(rcp_data)[which(names(dim(rcp_data)) == "model")] -rcp_data <- as.vector(rcp_data) -dim(rcp_data) <- c( - num_models, - var = 1, - lon = length(lon), - lat = length(lat), - time = length(time) -) -rcp_data <- aperm(rcp_data, c(1, 2, 5, 4, 3)) -names(dim(rcp_data)) <- c("model", "var", "time", "lat", "lon") -attr(rcp_data, "Variables")$dat1$time <- time - -# nolint start -# ------------------------------ -#jpeg(paste0(plot_dir, "/plot3.jpg")) -#PlotEquiMap(rcp_data[1,1,1,,], lon = lon, lat = lat, filled = F) -#dev.off() - - -#--------------------------------------------- -# MONTHLY - SEASONAL - ANNUAL -# MONTH: moninf = monsup -# SEASONAL: specify the moninf and monsup; -# if winter: moninf = 12 monsup = 2; -# any other moninf > monsup allowed -#--------------------------------------------- -# nolint end - -time_dim <- which(names(dim(rcp_data)) == "time") -dims <- dim(rcp_data) -mes <- as.numeric(substr(time, 6, 7)) - -if (moninf <= monsup) { - dims <- append(dims, c(12, dims[time_dim] / 12), after = time_dim) - dims <- dims[-time_dim] - dim(rcp_data) <- dims - names(dim(rcp_data))[c(time_dim, time_dim + 1)] <- c("month", "year") - rcp_seasonal_mean <- Season( - rcp_data, - posdim = time_dim, - monini = monini, - moninf = moninf, - monsup = monsup - ) - rcp_seasonal_mean <- adrop(adrop(rcp_seasonal_mean, 2), 2) -} else { - if (monsup == 2 & moninf == 12) { - rcp_seasonal_mean <- SeasonSelect( #nolint - rcp_data, season = "DJF", - dates = time, - calendar = calendar - )$data - time_dim <- which(names(dim(rcp_seasonal_mean)) == "time") - dims <- dim(rcp_seasonal_mean) - empty_array <- rep(NA, prod(dims[-time_dim])) - dims[time_dim] <- 1 - dim(empty_array) <- dims[-time_dim] - nom <- names(dim(rcp_seasonal_mean)) - rcp_seasonal_mean <- abind( - rcp_seasonal_mean, empty_array, along = time_dim - ) - borrar <- dim(rcp_seasonal_mean)[time_dim] - names(dim(rcp_seasonal_mean)) <- nom - dimensiones <- 1 : length(dim(rcp_seasonal_mean)) - rcp_seasonal_mean <- Apply( # nolint - rcp_seasonal_mean, - target_dims = time_dim, - fun = function (x) { - x[1 : (length(x) - 1)] - } - )$output1 - dims <- dim(rcp_seasonal_mean) - time_dim <- which(names(dim(rcp_seasonal_mean)) == "time") - dims <- append(dims, c(3, dims[time_dim] / 3), after = time_dim) - dims <- dims[-time_dim] - dim(rcp_seasonal_mean) <- dims - names(dim(rcp_seasonal_mean))[c(time_dim, time_dim + 1)] <- - c("season", "year") - rcp_seasonal_mean <- Mean1Dim(rcp_seasonal_mean, posdim = time_dim) - rcp_seasonal_mean <- aperm(rcp_seasonal_mean, c(2, 1, 3, 4)) - } -} -years_dim <- which(names(dim(rcp_seasonal_mean)) == "year") -climatology <- InsertDim( # nolint - climatology, - years_dim, - lendim = dim(rcp_seasonal_mean)[years_dim] -) -anomaly <- rcp_seasonal_mean - climatology -multi_year_anomaly <- Mean1Dim(anomaly, years_dim) - -time <- seq(start_anomaly, end_anomaly, by = 1) -month <- moninf - if (month <= 9) { - month <- paste0(as.character(0), as.character(month)) - } - month <- paste0("-", month, "-") - day <- "01" - time <- as.POSIXct(paste0(time, month, day), tz = "CET") - time <- julian(time, origin = as.POSIXct("1970-01-01")) - -attributes(time) <- NULL -dim(time) <- c(time = length(time)) -metadata <- list(time = list( - standard_name = "time", - long_name = "time", - units = "days since 1970-01-01 00:00:00", - prec = "double", - dim = list(list(name = "time", unlim = FALSE)) -)) -attr(time, "variables") <- metadata - -#Save the single model anomalies -for (mod in 1 : length(model_names)) { - data <- anomaly[mod, , , ] # nolint - data <- aperm(data, c(2, 3, 1)) - names(dim(data)) <- c("lat", "lon", "time") - metadata <- list(variable = list( - dim = list(list(name = "time", unlim = FALSE)), - units = units - )) - names(metadata)[1] <- var0 - attr(data, "variables") <- metadata - attributes(lat) <- NULL - attributes(lon) <- NULL - dim(lat) <- c(lat = length(lat)) - dim(lon) <- c(lon = length(lon)) - variable_list <- list(variable = data, lat = lat, lon = lon, time = time) - names(variable_list)[1] <- var0 - - #ArrayToNetCDF( # nolint - # variable_list, - # paste0( - # plot_dir, "/", var0, "_", months, "_anomaly_", model_names[mod], - # "_", start_anomaly, "_", end_anomaly, "_", start_climatology, "_", - # end_climatology, ".nc" - # ) - #) -} - -model_anomalies <- WeightedMean( # nolint - anomaly, - lon = as.vector(lon), - lat = as.vector(lat), - mask = NULL -) -if (!is.null(params$running_mean)) { - model_anomalies <- Smoothing( # nolint - model_anomalies, - runmeanlen = params$running_mean, - numdimt = 2 - ) -} -data_frame <- as.data.frame.table(t(model_anomalies[, ])) -years <- rep(start_anomaly : end_anomaly, dim(model_anomalies)[1]) -data_frame$Year <- c(years) -names(data_frame)[2] <- "Model" - -for (i in 1 : length(levels(data_frame$Model))) { - levels(data_frame$Model)[i] <- model_names[i] -} - -if (time_series_plot == "single") { - g <- ggplot( - data_frame, - aes(x = Year, y = Freq, color = Model)) + theme_bw() + - geom_line() + ylab(paste0("Anomaly (", units, ")")) + xlab("Year") + - theme(text = element_text(size = font_size), - legend.text = element_text(size = font_size), - axis.title = element_text(size = font_size)) + - stat_summary(data = data_frame, fun.y = "mean", - mapping = aes(x = data_frame$Year, y = data_frame$Freq, - group = interaction(data_frame[2, 3]), - color = data_frame$Model), geom = "line", size = 1) + - ggtitle(paste0(months, " ", var0, " anomaly (", start_anomaly, "-", - end_anomaly, ") - ", "(", start_climatology, "-", end_climatology, - ")")) -} else { - g <- ggplot(data_frame, aes(x = Year, y = Freq)) + theme_bw() + - ylab(paste0("Anomaly (", units, ")")) + xlab("Year") + - theme(text = element_text(size = font_size), - legend.text = element_text(size = font_size), - axis.title = element_text(size = font_size)) + - stat_summary(data = data_frame, fun.y = "mean", - mapping = aes(x = data_frame$Year, y = data_frame$Freq, - group = interaction(data_frame[2, 3]), - color = data_frame$Model), geom = "line", size = 0.8) + - stat_summary(data = data_frame, geom = "ribbon", fun.ymin = "min", - fun.ymax = "max", mapping = aes(x = data_frame$Year, - y = data_frame$Freq, group = interaction(data_frame[2, 3])), - alpha = 0.3, color = "red", fill = "red") + - ggtitle(paste0(months, " ", var0, " anomaly (", start_anomaly, "-", - end_anomaly, ") - ", "(", start_climatology, "-", end_climatology, - ")")) -} -filepng1 <- paste0( - plot_dir, "/", "Area-averaged_", var0, "_", months, "_multimodel-anomaly_", - start_anomaly, "_", end_anomaly, "_", start_climatology, "_", - end_climatology, ".png") -ggsave( - filename = filepng1, - g, - device = NULL -) - -if (!is.null(agreement_threshold)) { - model_dim <- which(names(dim(multi_year_anomaly)) == "model") - agreement <- AnoAgree(multi_year_anomaly + # nolint - rnorm(length(unique(model_names)) * length(lat) * length(lon)), - membersdim = model_dim - ) -} else { - agreement_threshold <- 1000 - agreement <- NULL -} - -colorbar_lim <- ceiling(max(abs(max(multi_year_anomaly)), abs(min(data)))) -brks <- seq(-colorbar_lim, colorbar_lim, length.out = 21) -title <- paste0( - months, " ", var0, " anomaly (", start_anomaly, "-", end_anomaly, - ") - (", start_climatology, "-", end_climatology, ")") -data <- drop(Mean1Dim(multi_year_anomaly, model_dim)) - -filepng2 <- paste0(plot_dir, "/", var0, "_", months, "_multimodel-anomaly_", - start_anomaly, - "_", end_anomaly, "_", start_climatology, "_", end_climatology, ".png") -PlotEquiMap( # nolint - data, - lat = lat, - lon = lon, - brks = brks, - units = units, - toptitle = title, - filled.continents = FALSE, - dots = drop(agreement) >= agreement_threshold, - fileout = filepng2) -model_names_filename <- paste(model_names, collapse = "_") -print(paste( - "Attribute projection from climatological data is saved and,", - "if it's correct, it can be added to the final output:", - projection -)) - -dimlon <- ncdim_def( - name = "lon", - units = "degrees_east", - vals = as.vector(lon), - longname = "longitude" -) -dimlat <- ncdim_def( - name = "lat", - units = "degrees_north", - vals = as.vector(lat), - longname = "latitude" -) -defdata <- ncvar_def( - name = "data", - units = units, - dim = list(lat = dimlat, lon = dimlon), - longname = paste("Mean", long_names) -) -defagreement <- ncvar_def( - name = "agreement", - units = "%", - dim = list(lat = dimlat, lon = dimlon), - longname = "Agremeent between models") -filencdf <- paste0( - work_dir, "/", var0, "_", months, "_multimodel-anomaly_", - model_names_filename, "_", start_anomaly, "_", end_anomaly, "_", - start_climatology, "_", end_climatology, ".nc") -file <- nc_create(filencdf, list(defdata, defagreement)) -ncvar_put(file, defdata, data) -ncvar_put(file, defagreement, agreement) -nc_close(file) - - - # Set provenance for output files - xprov <- list(ancestors = list(fullpath_filenames), - authors = list("hunt_al", "manu_ni"), - projects = list("c3s-magic"), - caption = title, - statistics = list("other"), - agreement_threshold = params$agreement_threshold, - moninf = params$moninf, - monsup = params$monsup, - runmena = params$running_mean, - time_series_plot = params$time_series_plot, - realms = list("atmos"), - themes = list("phys"), - plot_file = filepng1) - - provenance[[filencdf]] <- xprov - - -# Write provenance to file -write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/toymodel.R b/esmvaltool/diag_scripts/magic_bsc/toymodel.R new file mode 100644 index 0000000000..c4fa8f15a1 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/toymodel.R @@ -0,0 +1,264 @@ +library(s2dverification) +library(ncdf4) +library(multiApply) # nolint +library(yaml) +library(abind) +library(ClimProjDiags) # nolint +library(RColorBrewer) # nolint + +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir + +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +input_files_per_var <- yaml::read_yaml(params$input_files) + +model_names <- lapply(input_files_per_var, function(x) { + x$model +}) +model_names <- unname(model_names) +var0 <- lapply(input_files_per_var, function(x) { + x$short_name +}) +fullpath_filenames <- names(var0) +var0 <- unname(var0)[1] +a <- 1 +b <- params$beta +g <- 0.1 +nm <- params$number_of_members +nstartd <- 1 +nleadt <- params$no_of_lead_times + + +var0 <- unlist(var0) +data_nc <- nc_open(fullpath_filenames) +data <- ncvar_get(data_nc, var0) +data <- InsertDim(InsertDim(data, 1, 1), 1, 1) # nolint +names(dim(data)) <- c("model", "var", "lon", "lat", "time") +lat <- ncvar_get(data_nc, "lat") +lon <- ncvar_get(data_nc, "lon") +lon <- unlist(lon) +lat <- unlist(lat) +attributes(lon) <- NULL +attributes(lat) <- NULL +units <- ncatt_get(data_nc, var0, "units")$value +calendar <- ncatt_get(data_nc, "time", "calendar")$value +long_names <- ncatt_get(data_nc, var0, "long_name")$value +time <- ncvar_get(data_nc, "time") +start_date <- + as.POSIXct(substr( + ncatt_get(data_nc, "time", "units")$value, + 11, 29 + )) +nc_close(data_nc) +time <- as.Date(time, origin = start_date, calendar = calendar) + +dim_names <- names(dim(data)) +lon_dim <- which(names(dim(data)) == "lon") +lat_dim <- which(names(dim(data)) == "lat") +data <- WeightedMean( + data, + lat = lat, + lon = lon, + # nolint + londim = lon_dim, + latdim = lat_dim +) +names(dim(data)) <- dim_names[-c(lon_dim, lat_dim)] +time_dim <- which(names(dim(data)) == "time") + +ToyModel <- # nolint + function(alpha = 0.1, + beta = 0.4, + gamma = 1, + sig = 1, + # nolint + trend = 0, + nstartd = 30, + nleadt = 4, + nmemb = 10, + obsini = NULL, + fxerr = NULL) { + if (any(!is.numeric(c( + alpha, beta, gamma, sig, trend, nstartd, + nleadt, nmemb + )))) { + stop( + paste( + "Parameters alpha, beta, gamma, sig, trend, nstartd,", + "nleadt and nmemb must be numeric." + ) + ) + } + nstartd <- round(nstartd) + nleadt <- round(nleadt) + nmemb <- round(nmemb) + if (!is.null(obsini)) { + if (!is.numeric(obsini) || !is.array(obsini)) { + stop("Parameter obsini must be a numeric array.") + } + if (length(dim(obsini)) != 4) { + stop( + paste( + "Parameter obsini must be an array with dimensions", + "c(1, 1, nleadt, nstartd)." + ) + ) + } + if (dim(obsini)[3] != nstartd || dim(obsini)[4] != nleadt) { + stop( + paste0( + "The dimensions of parameter obsini and the parameters ", + "nleadt and nstartd must match:\n dim(obsini) = c(", + dim(obsini)[3], + ", ", + dim(obsini)[4], + ")\n nstartd = ", + nstartd, + " nleadt = ", + nleadt + ) + ) + } + } + if (!is.null(fxerr)) { + if (!is.numeric(fxerr)) { + stop("Parameter fxerr must be numeric.") + } + } + if (nstartd < 0) { + stop("Number of start dates must be positive") + } + if (nleadt < 0) { + stop("Number of lead-times must be positive") + } + if (nmemb < 0) { + stop("Number of members must be positive") + } + + obs_ano <- obsini + + forecast <- array(dim = c(length(gamma), nmemb, nstartd, nleadt)) + for (j in seq_len(nstartd)) { + for (f in seq_len(nleadt)) { + for (g in seq_along(gamma)) { + auto_term <- obs_ano[1, 1, j, f] + if (is.numeric(fxerr)) { + conf_term <- fxerr + } else { + conf_term <- rnorm(nmemb, mean = 0, sd = beta) + } + trend_term <- gamma[g] * trend * j + var_corr <- rnorm(nmemb, + mean = 0, + sd = sqrt(sig - alpha^2 - beta^2) + ) + forecast[g, , j, f] <- + matrix(auto_term, c(nmemb, 1)) + # nolint + matrix(conf_term, c(nmemb, 1)) + matrix(trend_term, c(nmemb, 1)) + } + } + } + list(mod = forecast, obs = obs_ano) + } + +forecast <- + ToyModel( + alpha = a, + beta = b, + gamma = g, + nmemb = nm, + # nolint + obsini = InsertDim(data, 1, 1), + # nolint + nstartd = 1, + nleadt = dim(data)[time_dim] + ) + +ymin <- min(forecast$mod, na.rm = TRUE) +ymax <- max(forecast$mod, na.rm = TRUE) + +filepng <- paste0(plot_dir, "/", "synthetic_", gsub( + ".nc", "", + basename(fullpath_filenames) +), ".jpg") +jpeg( + filepng, + height = 15, + width = 20, + res = 300, + units = "cm" +) +title <- paste(nm, "synthetic members generated") +plot( + time, + forecast$obs, + type = "l", + ylab = paste(var0, "(", units, ")"), + main = title, + bty = "n", + ylim = c(ymin, ymax) +) +matlines(time, t(forecast$mod[1, , 1, ]), # nolint + col = brewer.pal(n = nm, name = "Blues") +) +lines(time, forecast$obs, lwd = 2) +dev.off() + + +obs_data <- forecast$obs +data <- forecast$mod[1, , 1, ] # nolint +names(dim(data))[c(1, 2)] <- c("number", "time") + +attributes(time) <- NULL +dim(time) <- c(time = length(time)) +metadata <- + list( + time = list( + standard_name = "time", + long_name = "time", + units = "days since 1970-01-01 00:00:00", + prec = "double", + dim = list(list(name = "time", unlim = FALSE)) + ) + ) +attr(time, "variables") <- metadata +metadata <- + list(index = list(dim = list( + list( + name = "time", unlim = FALSE, + prec = "double" + ) + ))) +names(metadata)[1] <- var0 +attr(data, "variables") <- metadata +variable_list <- list(variable = data, time = time) +names(variable_list)[1] <- var0 +filencdf <- + paste0(work_dir, "/", "synthetic_", basename(fullpath_filenames)) +ArrayToNetCDF(variable_list, filencdf) # nolint + +# Set provenance for output files +xprov <- list( + ancestors = list(fullpath_filenames), + authors = list("bellprat_omar"), + projects = list("c3s-magic"), + caption = title, + statistics = list("other"), + realms = list("atmos"), + themes = list("phys") +) +provenance[[filepng]] <- xprov +provenance[[filencdf]] <- xprov + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/toymodel.r b/esmvaltool/diag_scripts/magic_bsc/toymodel.r deleted file mode 100644 index 2e5622628e..0000000000 --- a/esmvaltool/diag_scripts/magic_bsc/toymodel.r +++ /dev/null @@ -1,205 +0,0 @@ -library(s2dverification) -library(ncdf4) -library(multiApply) #nolint -library(yaml) -library(abind) -library(ClimProjDiags) #nolint -library(RColorBrewer) #nolint - -args <- commandArgs(trailingOnly = TRUE) -params <- read_yaml(args[1]) -plot_dir <- params$plot_dir -run_dir <- params$run_dir -work_dir <- params$work_dir - -dir.create(plot_dir, recursive = TRUE) -dir.create(run_dir, recursive = TRUE) -dir.create(work_dir, recursive = TRUE) - -# setup provenance file and list -provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") -provenance <- list() - -input_files_per_var <- yaml::read_yaml(params$input_files) - -model_names <- lapply(input_files_per_var, function(x) x$model) -model_names <- unname(model_names) -var0 <- lapply(input_files_per_var, function(x) x$short_name) -fullpath_filenames <- names(var0) -var0 <- unname(var0)[1] -a <- 1 -b <- params$beta -g <- 0.1 -nm <- params$number_of_members -nstartd <- 1 -nleadt <- params$no_of_lead_times - - -var0 <- unlist(var0) -data_nc <- nc_open(fullpath_filenames) -data <- ncvar_get(data_nc, var0) -data <- InsertDim(InsertDim(data, 1, 1), 1, 1) #nolint -names(dim(data)) <- c("model", "var", "lon", "lat", "time") -lat <- ncvar_get(data_nc, "lat") -lon <- ncvar_get(data_nc, "lon") -lon <- unlist(lon) -lat <- unlist(lat) -print(lon) -print(lat) -attributes(lon) <- NULL -attributes(lat) <- NULL -units <- ncatt_get(data_nc, var0, "units")$value -calendar <- ncatt_get(data_nc, "time", "calendar")$value -long_names <- ncatt_get(data_nc, var0, "long_name")$value -time <- ncvar_get(data_nc, "time") -start_date <- as.POSIXct(substr(ncatt_get(data_nc, "time", "units")$value, - 11, 29)) -nc_close(data_nc) -time <- as.Date(time, origin = start_date, calendar = calendar) - -dim_names <- names(dim(data)) -lon_dim <- which(names(dim(data)) == "lon") -lat_dim <- which(names(dim(data)) == "lat") -data <- WeightedMean(data, lat = lat, lon = lon, #nolint - londim = lon_dim, latdim = lat_dim) -names(dim(data)) <- dim_names[-c(lon_dim, lat_dim)] -time_dim <- which(names(dim(data)) == "time") - -ToyModel <- function ( #nolint - alpha = 0.1, beta = 0.4, gamma = 1, sig = 1, trend = 0, - nstartd = 30, nleadt = 4, nmemb = 10, obsini = NULL, fxerr = NULL -) { - if (any(!is.numeric(c(alpha, beta, gamma, sig, trend, nstartd, - nleadt, nmemb)))) { - stop(paste("Parameters alpha, beta, gamma, sig, trend, nstartd,", - "nleadt and nmemb must be numeric.")) - } - nstartd <- round(nstartd) - nleadt <- round(nleadt) - nmemb <- round(nmemb) - if (!is.null(obsini)) { - if (!is.numeric(obsini) || !is.array(obsini)) { - stop("Parameter obsini must be a numeric array.") - } - if (length(dim(obsini)) != 4) { - stop(paste( - "Parameter obsini must be an array with dimensions", - "c(1, 1, nleadt, nstartd)." - )) - } - if (dim(obsini)[3] != nstartd || dim(obsini)[4] != nleadt) { - stop(paste0( - "The dimensions of parameter obsini and the parameters ", - "nleadt and nstartd must match:\n dim(obsini) = c(", - dim(obsini)[3], ", ", dim(obsini)[4], ")\n nstartd = ", - nstartd, " nleadt = ", nleadt - )) - } - } - if (!is.null(fxerr)) { - if (!is.numeric(fxerr)) { - stop("Parameter fxerr must be numeric.") - } - } - if (nstartd < 0) { - stop("Number of start dates must be positive") - } - if (nleadt < 0) { - stop("Number of lead-times must be positive") - } - if (nmemb < 0) { - stop("Number of members must be positive") - } - - obs_ano <- obsini - - forecast <- array(dim = c(length(gamma), nmemb, nstartd, nleadt)) - for (j in 1 : nstartd) { - for (f in 1 : nleadt) { - for (g in 1 : length(gamma)) { - auto_term <- obs_ano[1, 1, j, f] - if (is.numeric(fxerr)) { - conf_term <- fxerr - } - else { - conf_term <- rnorm(1, mean = 0, sd = beta) - } - trend_term <- gamma[g] * trend * j - var_corr <- rnorm( - nmemb, - mean = 0, - sd = sqrt(sig - alpha ^ 2 - beta ^ 2) - ) - forecast[g, , j, f] <- matrix(auto_term, c(nmemb,1)) + #nolint - matrix(conf_term, c(nmemb, 1)) + matrix(trend_term, c(nmemb, 1)) - } - } - } - list(mod = forecast, obs = obs_ano) -} - -forecast <- ToyModel(#nolint - alpha = a, - beta = b, - gamma = g, - nmemb = nm, - obsini = InsertDim(data, 1, 1), # nolint - nstartd = 1, - nleadt = dim(data)[time_dim] -) - - -print(brewer.pal(n = nm, name = "Reds")) -filepng <- paste0(plot_dir, "/", "synthetic_", gsub(".nc", "", - basename(fullpath_filenames)), ".jpg") -jpeg(filepng, height = 460, width = 600) -title <- paste(nm, "synthetic members generated") -plot(time, forecast$obs, type = "l", - ylab = paste(var0, "(", units, ")"), - main = title, - bty = "n" -) -matlines( - time, - t(forecast$mod[1, , 1, ]), #nolint - col = brewer.pal(n = nm, name = "Blues") -) -lines(time, forecast$obs, lwd = 2) -dev.off() - - -obs_data <- forecast$obs -data <- forecast$mod[1, , 1, ] #nolint -names(dim(data))[c(1, 2)] <- c("number", "time") - -attributes(time) <- NULL -dim(time) <- c(time = length(time)) -metadata <- list(time = list(standard_name = "time", long_name = "time", - units = "days since 1970-01-01 00:00:00", prec = "double", - dim = list(list(name = "time", unlim = FALSE)))) -attr(time, "variables") <- metadata -metadata <- list(index = list(dim = list(list(name = "time", unlim = FALSE, - prec = "double")))) -names(metadata)[1] <- var0 -attr(data, "variables") <- metadata -variable_list <- list(variable = data, time = time) -names(variable_list)[1] <- var0 -print(str(data)) -filencdf <- paste0(work_dir, "/", "synthetic_", - basename(fullpath_filenames)) -ArrayToNetCDF(variable_list, filencdf) #nolint - - # Set provenance for output files - xprov <- list(ancestors = list(fullpath_filenames), - authors = list("bell_om"), - projects = list("c3s-magic"), - caption = title, - statistics = list("other"), - realms = list("atmos"), - themes = list("phys"), - plot_file = filepng) - - provenance[[filencdf]] <- xprov - -# Write provenance to file -write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/weather_regime.R b/esmvaltool/diag_scripts/magic_bsc/weather_regime.R new file mode 100644 index 0000000000..12d2acdc9a --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/weather_regime.R @@ -0,0 +1,934 @@ +library(s2dverification) +library(ggplot2) +library(multiApply) # nolint +library(ncdf4) +library(gridExtra) # nolint +library(ClimProjDiags) # nolint +library(yaml) + +initial_options <- commandArgs(trailingOnly = FALSE) +file_arg_name <- "--file=" +script_name <- + sub(file_arg_name, "", initial_options[grep( + file_arg_name, + initial_options + )]) +script_dirname <- dirname(script_name) +source(file.path(script_dirname, "WeatherRegime.R")) +source(file.path(script_dirname, "RegimesAssign.R")) + +## Regimes namelist +args <- commandArgs(trailingOnly = TRUE) +params <- yaml::read_yaml(args[1]) + +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir +## Create working dirs if they do not exist +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + + +input_files_per_var <- yaml::read_yaml(params$input_files) + +model_names <- lapply(input_files_per_var, function(x) { + x$dataset +}) +model_names <- unique(unlist(unname(model_names))) + + +var0 <- lapply(input_files_per_var, function(x) { + x$short_name +}) +fullpath_filenames <- names(var0) +var0 <- unname(var0)[1] + +experiment <- lapply(input_files_per_var, function(x) { + x$exp +}) +experiment <- unlist(unname(experiment)) + +reference_files <- which(unname(experiment) == "historical") +projection_files <- which(unname(experiment) != "historical") + + +# Region considered to select the plot +region <- params$plot_type + + +# Start and end periods for the historical and projection periods +start_historical <- lapply( + input_files_per_var, + function(x) { + x$start_year + } +)[reference_files] +starting1 <- c(unlist(unname(start_historical)))[1] +end_historical <- lapply( + input_files_per_var, + function(x) { + x$end_year + } +)[reference_files] +ending1 <- c(unlist(unname(end_historical)))[1] +start_historical <- as.POSIXct(as.Date( + paste0(starting1, "-01-01"), + "%Y-%m-%d" +)) +end_historical <- + as.POSIXct(as.Date(paste0(ending1, "-12-31"), "%Y-%m-%d")) + +start_projection <- lapply( + input_files_per_var, + function(x) { + x$start_year + } +)[projection_files] +starting2 <- c(unlist(unname(start_projection)))[1] +end_projection <- lapply( + input_files_per_var, + function(x) { + x$end_year + } +)[projection_files] +ending2 <- c(unlist(unname(end_projection)))[1] +start_projection <- as.POSIXct(as.Date( + paste0(starting2, "-01-01"), + "%Y-%m-%d" +)) +end_projection <- as.POSIXct(as.Date( + paste0(ending2, "-12-31"), + "%Y-%m-%d" +)) + + +# Regime parameters +ncenters <- params$ncenters +cluster_method <- params$cluster_method +if (cluster_method != "kmeans") { + cluster_method <- "complete" +} +frequency <- params$frequency +detrend_order <- params$detrend_order + +if (grepl("OImon", fullpath_filenames[1])) { + var_lat <- "rlat" + var_lon <- "rlon" +} else { + var_lat <- "lat" + var_lon <- "lon" +} +# --------------------------- +# Reading and formatting +# --------------------------- +ref_nc <- nc_open(fullpath_filenames[reference_files]) +var0 <- unlist(var0) +reference_data <- ncvar_get(ref_nc, var0) +if (var0 == "psl") { + names(dim(reference_data)) <- rev(names(ref_nc$dim))[-1] +} +lat <- ncvar_get(ref_nc, var_lat) +lon <- ncvar_get(ref_nc, var_lon) +units <- ncatt_get(ref_nc, var0, "units")$value +calendario <- ncatt_get(ref_nc, "time", "calendar")$value +long_names <- ncatt_get(ref_nc, var0, "long_name")$value +time <- ncvar_get(ref_nc, "time") +start_date <- as.POSIXct(substr(ncatt_get( + ref_nc, "time", + "units" +)$value, 11, 29)) +nc_close(ref_nc) + +data_type <- + ifelse(grepl("day", fullpath_filenames[1]), "day", "month") +dates_historical <- seq(start_historical, end_historical, data_type) + +if (dim(reference_data)[3] != length(dates_historical)) { + if (calendario == "365" | calendario == "365_days" | + calendario == "365_day" | calendario == "noleap") { + dates_historical <- dates_historical[-which(substr( + dates_historical, + 6, 10 + ) == "02-29")] # nolint + } +} + +reference_data <- as.vector(reference_data) +dim(reference_data) <- c( + model = 1, + var = 1, + lon = length(lon), + lat = length(lat), + time = length(dates_historical) +) + +reference_data <- aperm(reference_data, c(1, 2, 5, 3, 4)) +attr(reference_data, "Variables")$dat1$time <- dates_historical + +names(dim(reference_data)) <- + c("model", "var", "time", "lon", "lat") +time_dimension <- which(names(dim(reference_data)) == "time") + + +# ------------------------------- +## Selecting the season or month +# ------------------------------- +time_dim <- which(names(dim(reference_data)) == "time") + +months <- + c( + "JAN", + "FEB", + "MAR", + "APR", + "MAY", + "JUN", + "JUL", + "AUG", + "SEP", + "OCT", + "NOV", + "DEC" + ) +seasons <- c("DJF", "MAM", "JJA", "SON") +mes <- match(frequency, months) +sea <- match(frequency, seasons) +if (!is.na(mes)) { + dims <- dim(reference_data) + ind <- which(as.numeric(substr(dates_historical, 6, 7)) == mes) + years <- unique(as.numeric(substr(dates_historical, 1, 4))) + reference_data <- reference_data[, , ind, , ] # nolint + dims <- + append(dims, c(length(ind) / length(years), length(years)), + after = time_dim + ) +} else if (!is.na(sea)) { + reference_data <- + SeasonSelect( + reference_data, + season = frequency, + # nolint + dates = dates_historical, + calendar = calendario + ) + time <- reference_data$dates + years <- unique(as.numeric(substr(time, 1, 4))) + reference_data <- reference_data$data + reference_data <- + InsertDim(reference_data, posdim = 1, lendim = 1) # nolint + reference_data <- + InsertDim(reference_data, posdim = 1, lendim = 1) # nolint + names(dim(reference_data))[c(1, 2)] <- c("model", "var") + dims <- dim(reference_data) + dims <- + append(dims, c(length(time) / length(years), length(years)), + after = time_dim + ) +} +dims <- dims[-time_dim] +dim(reference_data) <- dims + +names(dim(reference_data))[c(time_dim, time_dim + 1)] <- + c("sdate", "ftime") + +compute_loess <- function(clim, loess_span) { + if (sum(is.na(clim)) != length(clim)) { + data <- data.frame(ensmean = clim, day = seq_along(clim)) + loess_filt <- loess(ensmean ~ day, data, + span = loess_span, + degree = detrend_order + ) + output <- predict(loess_filt) + } else { + output <- rep(NA, length(clim)) + } + return(output) +} +# ------------------------------- +## Computing the WR_obs +# ------------------------------- + +clim_obs <- array(apply(reference_data, c(1, 2, 3, 5, 6), mean), + dim = dim(reference_data)[-4] +) +if (data_type == "day" | !is.na(sea)) { + clim_obs <- aperm(apply(clim_obs, + c(seq_along(dim( + clim_obs + )))[-which(names(dim(clim_obs)) == "sdate")], + compute_loess, + loess_span = 1 + ), c(2, 3, 1, 4, 5)) +} +names(dim(clim_obs))[3] <- "sdate" +anom_obs <- Ano(reference_data, clim_obs) +print(dim(anom_obs)) +print(length(lon)) +wr_obs <- + WeatherRegime( + data = anom_obs, + EOFS = params$EOFS, + lat = lat, + # nolint + lon = lon, + ncenters = ncenters, + method = cluster_method + ) +if (cluster_method != "kmeans" & data_type == "day") { + wr_obs$composite <- aperm(wr_obs$composite, c(2, 1, 3, 4, 5)) +} +names(dim(wr_obs$composite)) <- + c("lat", "lon", "Cluster", "Mod", "exp") +names(dim(wr_obs$cluster))[1] <- "Evolution" +# ------------------------------- +## Plotting the WR_obs output +# ------------------------------- + +if (cluster_method == "kmeans") { + clim_frequencies <- paste0( + "freq = ", + round(Mean1Dim(wr_obs$frequency, 1), 1), "%" + ) +} else { + freq_clus <- NULL + for (i in 1:ncenters) { + freq_clus <- c(freq_clus, round(sum(wr_obs$cluster[, 1, 1] == i) / + length(wr_obs$cluster[, 1, 1]) * 100, 1)) + } + clim_frequencies <- paste0("freq = ", freq_clus, "%") + wr_obs$frequency <- freq_clus +} + +cosa <- aperm(drop(wr_obs$composite), c(3, 1, 2)) +lim <- max(abs(cosa / 100), na.rm = TRUE) +if (lim < 1) { + x <- floor(log10(lim)) + 1 + lim <- 10^x +} else { + lim <- ceiling(lim) +} + +if (region == "polar") { + filepng1 <- paste0( + plot_dir, "/", frequency, "-", var0, + "_reference_regimes.png" + ) + title <- + paste0(paste0("Cluster ", 1:4), " (", clim_frequencies, " )") + PlotLayout( + PlotStereoMap, + c(2, 3), + lon = lon, + lat = lat, + # nolint + var = cosa / 100, + titles = title, + filled.continents = FALSE, + axelab = FALSE, + draw_separators = TRUE, + subsampleg = 1, + brks = seq(-1 * lim, lim, by = lim / 10), + fileout = filepng1 + ) +} else { + filepng1 <- paste0( + plot_dir, "/", frequency, "-", var0, + "_observed_regimes.png" + ) + title <- + paste0(paste0("Cluster ", 1:4), " (", clim_frequencies, " )") + if (length(lon) != dim(cosa)["lon"]) { + pos_lon <- which(names(dim(cosa)) == "lon") + pos_lat <- which(names(dim(cosa)) == "lat") + names(dim(cosa))[pos_lon] <- "lat" + names(dim(cosa))[pos_lat] <- "lon" + } + PlotLayout( + PlotEquiMap, + c(2, 3), + lon = lon, + lat = lat, + # nolint + var = cosa / 100, + titles = title, + filled.continents = FALSE, + axelab = FALSE, + draw_separators = TRUE, + subsampleg = 1, + brks = seq(-1 * lim, lim, by = lim / 10), + fileout = filepng1 + ) +} + +# ------------------------------- +## Save the WR_obs output to ncdf +# ------------------------------- +time <- dates_historical +time <- julian(time, origin = as.POSIXct("1970-01-01")) +attributes(time) <- NULL +dim(time) <- c(time = length(time)) +metadata <- + list( + time = list( + standard_name = "time", + long_name = "time", + units = "days since 1970-01-01 00:00:00", + prec = "double", + dim = list(list(name = "time", unlim = FALSE)) + ) + ) +attr(time, "variables") <- metadata + +attributes(lon) <- NULL +attributes(lat) <- NULL +dim(lon) <- c(lon = length(lon)) +dim(lat) <- c(lat = length(lat)) +metadata <- list(variable = list(dim = list(list( + name = "time", + unlim = FALSE +)))) +dim(wr_obs$frequency) <- c(frequency = length(wr_obs$frequency)) +dim(wr_obs$pvalue) <- c(pvalue = length(wr_obs$pvalue)) +dim(wr_obs$cluster) <- c(cluster = length(wr_obs$cluster)) +if (cluster_method != "kmeans") { + names(dim(wr_obs$composite))[c(1, 2)] <- c("lon", "lat") +} +if (length(lon) != dim(wr_obs$composite)["lon"]) { + pos_lon <- which(names(dim(wr_obs$composite)) == "lon") + pos_lat <- which(names(dim(wr_obs$composite)) == "lat") + names(dim(wr_obs$composite))[pos_lon] <- "lat" + names(dim(wr_obs$composite))[pos_lat] <- "lon" +} +variable_list <- + list( + variable = wr_obs$composite, + pvalue = wr_obs$pvalue, + cluster = wr_obs$cluster, + frequency = wr_obs$frequency, + lat = lat, + lon = lon, + time = time + ) +names(variable_list)[1] <- var0 +attributes(variable_list) <- NULL +filencdf1 <- + paste0( + work_dir, + "/", + var0, + "_", + frequency, + "_WR_ref_", + model_names, + "_", + start_projection, + "_", + end_projection, + "_", + start_historical, + "_", + end_historical, + ".nc" + ) + +ArrayToNetCDF(variable_list, filencdf1) # nolint +# Set provenance for output files +xprov <- + list( + ancestors = list(fullpath_filenames[reference_files]), + authors = list( + "torralba_veronica", + "fuckar_neven", + "cortesi_nicola", + "guemas_virginie", + "hunter_alasdair", + "manubens_nicolau" + ), + projects = list("c3s-magic"), + caption = "Reference modes of variability", + statistics = list("eof"), + ncenters = params$ncenters, + cluster_method = cluster_method, + EOFS = params$EOFS, + frequency = params$frequency, + detrend_order = params$detrend_order, + realms = list("atmos"), + themes = list("phys") + ) +provenance[[filepng1]] <- xprov +provenance[[filencdf1]] <- xprov +# --------------------------- +# --------------------------- +# Reading and formatting +# --------------------------- +proj_nc <- nc_open(fullpath_filenames[projection_files]) +projection_data <- ncvar_get(proj_nc, var0) +names(dim(projection_data)) <- rev(names(proj_nc$dim))[-1] +time <- ncvar_get(proj_nc, "time") +start_date <- as.POSIXct(substr(ncatt_get( + proj_nc, "time", + "units" +)$value, 11, 29)) +nc_close(proj_nc) + +dates_projection <- seq(start_projection, end_projection, data_type) + +if (dim(projection_data)[3] != length(dates_projection)) { + if (calendario == "365" | calendario == "365_days" | + calendario == "365_day" | calendario == "noleap") { + dates_projection <- dates_projection[-which(substr( + dates_projection, + 6, 10 + ) == "02-29")] # nolint + } +} +data <- as.vector(projection_data) +dim(projection_data) <- c( + model = 1, + var = 1, + lon = length(lon), + lat = length(lat), + time = length(time) +) +projection_data <- aperm(projection_data, c(1, 2, 5, 3, 4)) +attr(projection_data, "Variables")$dat1$time <- dates_projection +names(dim(projection_data)) <- + c("model", "var", "time", "lon", "lat") +time_dimension <- which(names(dim(projection_data)) == "time") + +# --------------------------- +# Selecting the period +# --------------------------- +time_dim <- which(names(dim(projection_data)) == "time") + +if (!is.na(mes)) { + dims <- dim(projection_data) + ind <- which(as.numeric(substr(dates_projection, 6, 7)) == mes) + years <- unique(as.numeric(substr(dates_projection, 1, 4))) + projection_data <- projection_data[, , ind, , ] # nolint + dims <- + append(dims, c(length(ind) / length(years), length(years)), + after = time_dim + ) +} else if (!is.na(sea)) { + projection_data <- SeasonSelect( + projection_data, + # nolint + season = frequency, + dates = dates_projection, + calendar = calendario + ) + time <- projection_data$dates + years <- unique(as.numeric(substr(time, 1, 4))) + projection_data <- projection_data$data + projection_data <- + InsertDim(projection_data, posdim = 1, lendim = 1) # nolint + projection_data <- + InsertDim(projection_data, posdim = 1, lendim = 1) # nolint + + names(dim(projection_data))[c(1, 2)] <- c("model", "var") + dims <- dim(projection_data) + dims <- append(dims, + c(length(time) / length(years), length(years)), + after = time_dim + ) +} +dims <- dims[-time_dim] +dim(projection_data) <- dims +names(dim(projection_data))[c(time_dim, time_dim + 1)] <- + c("sdate", "ftime") + +clim_ref <- array(apply(projection_data, c(1, 2, 3, 5, 6), mean), + dim = dim(projection_data)[-4] +) +if (data_type == "day" | !is.na(sea)) { + clim_ref <- aperm(apply(clim_ref, c(seq_along(dim( + clim_ref + ))) + [-which(names(dim(clim_ref)) == "sdate")], + compute_loess, + loess_span = 1 + ), c(2, 3, 1, 4, 5)) +} +names(dim(clim_ref))[3] <- "sdate" +anom_exp <- Ano(projection_data, clim_ref) +reference <- drop(wr_obs$composite) +if (cluster_method == "kmeans") { + names(dim(reference)) <- c("lat", "lon", "nclust") + reference <- aperm(reference, c(3, 2, 1)) + names(dim(reference)) <- c("nclust", "lon", "lat") +} else { + names(dim(reference)) <- c("lon", "lat", "nclust") +} +if (length(lon) != dim(reference)["lon"]) { + pos_lon <- which(names(dim(reference)) == "lon") + pos_lat <- which(names(dim(reference)) == "lat") + names(dim(reference))[pos_lon] <- "lat" + names(dim(reference))[pos_lat] <- "lon" +} +if (length(lon) != dim(anom_exp)["lon"]) { + pos_lon <- which(names(dim(anom_exp)) == "lon") + pos_lat <- which(names(dim(anom_exp)) == "lat") + names(dim(anom_exp))[pos_lon] <- "lat" + names(dim(anom_exp))[pos_lat] <- "lon" +} + +if (any(names(dim(reference)) == "Cluster")) { + pos <- which(names(dim(reference)) == "Cluster") + names(dim(reference)) <- "nclust" # nolint +} +if (( + which(names(dim(reference)) == "lon") < # nolint + which(names(dim(reference)) == "lat") & + which(names(dim(anom_exp)) == "lon") > + which(names(dim(anom_exp)) == "lat") +) | + ( + which(names(dim(reference)) == "lon") > + which(names(dim(reference)) == "lat") & + which(names(dim(anom_exp)) == "lon") < + which(names(dim(anom_exp)) == "lat") + )) { + # nolint + dim_names <- names(dim(reference)) + pos_lon <- which(names(dim(reference)) == "lon") + pos_lat <- which(names(dim(reference)) == "lat") + pos <- seq_along(dim(reference)) + pos[pos_lon] <- pos_lat + pos[pos_lat] <- pos_lon + reference <- aperm(reference, pos) + names(dim(reference))[pos_lon] == "lat" + names(dim(reference))[pos_lat] == "lon" +} +wr_exp <- + RegimesAssign( + var_ano = anom_exp, + ref_maps = reference, + # nolint + lats = lat, + method = "distance" + ) + + +# --------------------------- +# Plotting WR projection: +# --------------------------- + +if (cluster_method == "kmeans") { + cosa <- aperm(wr_exp$composite, c(3, 2, 1)) + names(dim(wr_exp$composite))[3] <- "nclust" +} else { + names(dim(wr_exp$composite))[3] <- "nclust" +} +lim <- max(abs(cosa / 100), na.rm = TRUE) +if (lim < 1) { + x <- floor(log10(lim)) + 1 + lim <- 10^x +} else { + lim <- ceiling(lim) +} +if (region == "polar") { + filepng2 <- paste0( + plot_dir, "/", frequency, "-", var0, + "_predicted_regimes.png" + ) + title <- paste0(paste0( + "Cluster ", + 1:dim(cosa)[1], + " (", + paste0("freq = ", round(wr_exp$frequency, 1), "%"), + " )" + )) + PlotLayout( + PlotStereoMap, + c(2, 3), + lon = lon, + lat = lat, + # nolint + var = cosa / 100, + titles = title, + filled.continents = FALSE, + draw_separators = TRUE, + subsampleg = 1, + brks = seq(-1 * lim, lim, by = lim / 10), + fileout = filepng2 + ) +} else { + filepng2 <- paste0( + plot_dir, "/", frequency, "-", var0, + "_predicted_regimes.png" + ) + title <- paste0(paste0( + "Cluster ", + 1:dim(cosa)[1], + " (", + paste0("freq = ", round(wr_exp$frequency, 1), "%"), + " )" + )) + PlotLayout( + PlotEquiMap, + c(2, 3), + lon = lon, + lat = lat, + # nolint + var = cosa / 100, + titles = title, + filled.continents = FALSE, + axelab = FALSE, + draw_separators = TRUE, + subsampleg = 1, + brks = seq(-1 * lim, lim, by = lim / 10), + fileout = filepng2 + ) +} + + +# ------------------------------- +## Save the WR_exp output to ncdf +# ------------------------------- +time <- dates_projection +time <- julian(time, origin = as.POSIXct("1970-01-01")) +attributes(time) <- NULL +dim(time) <- c(time = length(time)) +metadata <- + list( + time = list( + standard_name = "time", + long_name = "time", + units = "days since 1970-01-01 00:00:00", + prec = "double", + dim = list(list(name = "time", unlim = FALSE)) + ) + ) +attr(time, "variables") <- metadata + +attributes(lon) <- NULL +attributes(lat) <- NULL +dim(lon) <- c(lon = length(lon)) +dim(lat) <- c(lat = length(lat)) +metadata <- list(variable = list(dim = list(list( + name = "time", + unlim = FALSE +)))) + +dim(wr_exp$frequency) <- c(frequency = length(wr_exp$frequency)) +dim(wr_exp$pvalue) <- c(pvalue = length(wr_exp$pvalue)) +dim(wr_exp$cluster) <- c(cluster = length(wr_exp$cluster)) + +variable_list <- + list( + variable = wr_exp$composite, + pvalue = wr_exp$pvalue, + cluster = wr_exp$cluster, + frequency = wr_exp$frequency, + lat = lat, + lon = lon, + time = time + ) +names(variable_list)[1] <- var0 + +attributes(variable_list) <- NULL +filencdf2 <- + paste0( + work_dir, + "/", + var0, + "_", + frequency, + "_WR_exp_", + model_names, + "_", + start_projection, + "_", + end_projection, + "_", + start_historical, + "_", + end_historical, + ".nc" + ) +ArrayToNetCDF(variable_list, filencdf2) # nolint + +# Set provenance for output files +xprov <- list( + ancestors = list( + filencdf1, + fullpath_filenames[projection_files] + ), + authors = list( + "torralba_veronica", + "fuckar_neven", + "cortesi_nicola", + "guemas_virginie", + "hunter_alasdair", + "manubens_nicolau" + ), + projects = list("c3s-magic"), + caption = "Predicted modes of variability", + statistics = list("other"), + ncenters = params$ncenters, + cluster_method = cluster_method, + EOFS = params$EOFS, + frequency = params$frequency, + detrend_order = params$detrend_order, + realms = list("atmos"), + themes = list("phys") +) +provenance[[filepng2]] <- xprov +provenance[[filencdf2]] <- xprov +# --------------------------- +# Computing the RMSE: +# --------------------------- +if (cluster_method != "kmeans") { + cosa <- aperm(cosa, c(2, 3, 1)) + reference <- aperm(reference, c(3, 1, 2)) +} else { + cosa <- aperm(wr_exp$composite, c(2, 1, 3)) +} +if (length(lon) != dim(cosa)["lon"]) { + pos_lon <- which(names(dim(cosa)) == "lon") + pos_lat <- which(names(dim(cosa)) == "lat") + names(dim(cosa))[pos_lon] <- "lat" + names(dim(cosa))[pos_lat] <- "lon" +} +if (( + which(names(dim(reference)) == "lon") < # nolint + which(names(dim(reference)) == "lat") & + which(names(dim(cosa)) == "lon") > + which(names(dim(cosa)) == "lat") +) | + ( + which(names(dim(reference)) == "lon") > + which(names(dim(reference)) == "lat") & + which(names(dim(cosa)) == "lon") < + which(names(dim(cosa)) == "lat") + )) { + # nolint + dim_names <- names(dim(reference)) + pos_lon <- which(names(dim(reference)) == "lon") + pos_lat <- which(names(dim(reference)) == "lat") + pos <- seq_along(dim(reference)) + pos[pos_lon] <- pos_lat + pos[pos_lat] <- pos_lon + reference <- aperm(reference, pos) + names(dim(reference))[pos_lon] == "lat" + names(dim(reference))[pos_lat] == "lon" +} + +rmse <- NULL +for (i in 1:ncenters) { + for (j in 1:ncenters) { + rmse <- c(rmse, sqrt(mean(( + reference[i, , ] - cosa[, , j] + )^2, + na.rm = T + ))) + } +} +dim(rmse) <- c(ncenters, ncenters) +print(rmse) + +dimpattern <- ncdim_def( + name = "pattern", + units = "undim", + vals = 1:ncenters, + longname = "Pattern" +) +title <- paste0( + "Root Mean Squared Error between reference and ", + "future projected patterns" +) +defrmse <- ncvar_def( + name = "rmse", + units = "undim", + dim = list( + observed = dimpattern, + experiment = dimpattern + ), + longname = title +) +filencdf3 <- paste0( + work_dir, + "/", + var0, + "_", + frequency, + "_rmse_", + model_names, + "_", + start_projection, + "_", + end_projection, + "_", + start_historical, + "_", + end_historical, + ".nc" +) +file <- nc_create(filencdf3, list(defrmse)) +ncvar_put(file, defrmse, rmse) + +nc_close(file) + +colnames(rmse) <- paste("Ref", 1:ncenters) +rownames(rmse) <- paste("Pre", 1:ncenters) +filepng3 <- + paste0( + file.path(plot_dir, "Table_"), + var0, + "_", + frequency, + "_rmse_", + model_names, + "_", + start_projection, + "_", + end_projection, + "_", + start_historical, + "_", + end_historical, + ".png" + ) +png( + filepng3, + height = 6, + width = 18, + units = "cm", + res = 100 +) +grid.table(round(rmse, 2)) +dev.off() + +# Set provenance for output files +xprov <- list( + ancestors = list(filencdf1, filencdf2), + authors = list( + "torralba_veronica", + "fuckar_neven", + "cortesi_nicola", + "guemas_virginie", + "hunter_alasdair", + "manubens_nicolau" + ), + projects = list("c3s-magic"), + caption = title, + statistics = list("rmsd"), + ncenters = params$ncenters, + cluster_method = cluster_method, + EOFS = params$EOFS, + frequency = params$frequency, + detrend_order = params$detrend_order, + realms = list("atmos"), + themes = list("phys") +) +provenance[[filepng3]] <- xprov +provenance[[filencdf3]] <- xprov + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/weather_regime.r b/esmvaltool/diag_scripts/magic_bsc/weather_regime.r deleted file mode 100644 index 592840a720..0000000000 --- a/esmvaltool/diag_scripts/magic_bsc/weather_regime.r +++ /dev/null @@ -1,673 +0,0 @@ -library(s2dverification) -library(ggplot2) -library(multiApply) #nolint -library(ncdf4) -library(gridExtra) #nolint -library(ClimProjDiags) #nolint -library(yaml) - -initial_options <- commandArgs(trailingOnly = FALSE) -file_arg_name <- "--file=" -script_name <- sub(file_arg_name, "", initial_options[grep(file_arg_name, - initial_options)]) -script_dirname <- dirname(script_name) -source(file.path(script_dirname, "WeatherRegime.r")) -source(file.path(script_dirname, "RegimesAssign.r")) - -## Regimes namelist -args <- commandArgs(trailingOnly = TRUE) -params <- yaml::read_yaml(args[1]) - -plot_dir <- params$plot_dir -run_dir <- params$run_dir -work_dir <- params$work_dir -## Create working dirs if they do not exist -dir.create(plot_dir, recursive = TRUE) -dir.create(run_dir, recursive = TRUE) -dir.create(work_dir, recursive = TRUE) - -# setup provenance file and list -provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") -provenance <- list() - -input_files_per_var <- yaml::read_yaml(params$input_files) - - -model_names <- lapply(input_files_per_var, function(x) x$dataset) -model_names <- unique(unlist(unname(model_names))) - - -var0 <- lapply(input_files_per_var, function(x) x$short_name) -fullpath_filenames <- names(var0) -var0 <- unname(var0)[1] - -experiment <- lapply(input_files_per_var, function(x) x$exp) -experiment <- unlist(unname(experiment)) - -reference_files <- which(unname(experiment) == "historical") -projection_files <- which(unname(experiment) != "historical") - - -#Region considered to select the plot -region <- params$region - - -#Start and end periods for the historical and projection periods -start_historical <- as.POSIXct(params$start_historical) -end_historical <- as.POSIXct(params$end_historical) -start_projection <- as.POSIXct(params$start_projection) -end_projection <- as.POSIXct(params$end_projection) - -#Regime parameters -ncenters <- params$ncenters -cluster_method <- params$cluster_method -if (cluster_method != "kmeans") { - cluster_method <- "complete" -} -EOFS <- params$EOFS -frequency <- params$frequency -detrend_order <- params$detrend_order - -if (grepl("OImon", fullpath_filenames[1])) { - var_lat <- "rlat" - var_lon <- "rlon" -} else { - var_lat <- "lat" - var_lon <- "lon" -} -# --------------------------- -# Reading and formating -# --------------------------- -ref_nc <- nc_open(fullpath_filenames[reference_files]) -var0 <- unlist(var0) -reference_data <- ncvar_get(ref_nc, var0) -print(str(reference_data)) -names(dim(reference_data)) <- rev(names(ref_nc$dim))[-1] -lat <- ncvar_get(ref_nc, var_lat) -lon <- ncvar_get(ref_nc, var_lon) -units <- ncatt_get(ref_nc, var0, "units")$value -calendario <- ncatt_get(ref_nc, "time", "calendar")$value -long_names <- ncatt_get(ref_nc, var0, "long_name")$value -time <- ncvar_get(ref_nc, "time") -start_date <- as.POSIXct(substr(ncatt_get(ref_nc, "time", - "units")$value, 11, 29)) -nc_close(ref_nc) - -data_type <- ifelse(grepl("day", fullpath_filenames[1]), "day", "month") -dates_historical <- seq(start_historical, end_historical, data_type) - -print(calendario) -if (dim(reference_data)[3] != length(dates_historical)) { - if ( - calendario == "365" | calendario == "365_days" | - calendario == "365_day" | calendario == "noleap" - ) { -dates_historical <- -dates_historical[-which(substr(dates_historical, 6, 10) == "02-29")]#nolint - } -} - -reference_data <- as.vector(reference_data) -dim(reference_data) <- c( - model = 1, - var = 1, - lon = length(lon), - lat = length(lat), - time = length(dates_historical) -) - -reference_data <- aperm(reference_data, c(1, 2, 5, 3, 4)) -attr(reference_data, "Variables")$dat1$time <- dates_historical - - -names(dim(reference_data)) <- c("model", "var", "time", "lon", "lat") -time_dimension <- which(names(dim(reference_data)) == "time") - - -# ------------------------------- -## Selecting the season or month -# ------------------------------- -time_dim <- which(names(dim(reference_data)) == "time") - -months <- c( - "JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", - "OCT", "NOV", "DEC" -) -seasons <- c("DJF", "MAM", "JJA", "SON") -mes <- match(frequency, months) -sea <- match(frequency, seasons) -print(mes) -print(frequency) -if (!is.na(mes)) { - print("MONTHLY") - dims <- dim(reference_data) - ind <- which(as.numeric(substr(dates_historical, 6, 7)) == mes) - years <- unique(as.numeric(substr(dates_historical, 1, 4))) - reference_data <- reference_data[ , , ind , , ] #nolint - print(dims) - dims <- append( - dims, c(length(ind) / length(years), length(years)), after = time_dim - ) - print(dims) -} else if (!is.na(sea)) { - print("Seasonal") - reference_data <- SeasonSelect( #nolint - reference_data, - season = frequency, - dates = dates_historical, - calendar = calendario - ) - time <- reference_data$dates - years <- unique(as.numeric(substr(time, 1, 4))) - reference_data <- reference_data$data - reference_data <- InsertDim(reference_data, posdim = 1, lendim = 1) #nolint - reference_data <- InsertDim(reference_data, posdim = 1, lendim = 1) #nolint - names(dim(reference_data))[c(1, 2)] <- c("model", "var") - dims <- dim(reference_data) -dims <- append( - dims, c(length(time) / length(years), length(years)), after = time_dim - ) - } - - -dims <- dims[-time_dim] -dim(reference_data) <- dims - -names(dim(reference_data))[c(time_dim, time_dim + 1)] <- c("sdate", "ftime") - -Loess <- function(clim, loess_span) { - if (sum(is.na(clim)) != length(clim)) { - data <- data.frame(ensmean = clim, day = 1 : length(clim)) - loess_filt <- loess( - ensmean ~ day, data, span = loess_span, degree = detrend_order - ) - output <- predict(loess_filt) - } else { - output <- rep(NA, length(clim)) - } - return(output) -} -# ------------------------------- -## Computing the WR_obs -# ------------------------------- - -clim_obs <- array( - apply(reference_data, c(1, 2, 3, 5, 6), mean), - dim = dim(reference_data)[-4] -) -print(sea) -print(data_type) -save(clim_obs, clim_obs, file = file.path(plot_dir, "ClimObs.RData")) -print(data_type == "day" | !is.na(sea)) -if (data_type == "day" | !is.na(sea)) { -clim_obs <- aperm( - apply( - clim_obs, - c(1 : length(dim(clim_obs)))[-which(names(dim(clim_obs)) == "sdate")], - Loess, - loess_span = 1), - c(2, 3, 1, 4, 5) -) -} - -anom_obs <- Ano(reference_data, clim_obs) -print(head(anom_obs[which(anom_obs > 0)])) -print(dim(anom_obs)) -WR_obs <- WeatherRegime( #nolint - data = anom_obs, - EOFS = EOFS, - lat = lat, - lon = lon, - ncenters = ncenters, - method = cluster_method -) -print(str(WR_obs)) -print(cluster_method) -if (cluster_method != "kmeans" & data_type == "day") { - WR_obs$composite <- aperm(WR_obs$composite, c(2, 1, 3, 4, 5)) -} -names(dim(WR_obs$composite)) <- c("lat", "lon", "Cluster", "Mod", "exp") -names(dim(WR_obs$cluster))[1] <- "Evolution" - print(dim(WR_obs$composite)) -# ------------------------------- -## Plotting the WR_obs output -# ------------------------------- - -if (cluster_method == "kmeans") { - clim_frequencies <- paste0( - "freq = ", - round(Mean1Dim(WR_obs$frequency, 1), 1), "%") -} else { - freq_clus <- NULL - for (i in 1 : ncenters) { - freq_clus <- c(freq_clus, round(sum(WR_obs$cluster[, 1, 1] == i) / - length(WR_obs$cluster[ , 1, 1]) * 100, 1)) - } - clim_frequencies <- paste0( - "freq = ", freq_clus, "%") - WR_obs$frequency <- freq_clus -} - -cosa <- aperm(drop(WR_obs$composite), c(3, 1, 2)) -print(dim(cosa)) -lim <- max(abs(cosa / 100), na.rm = TRUE) -print(lim) -if (lim < 1) { - x <- floor(log10(lim)) + 1 - lim <- 10 ^ x -} else { - lim <- ceiling(lim) -} - -if (region == "Polar") { - filepng1 <- paste0( - plot_dir, "/", frequency, "-", var0, "_observed_regimes.png") - title <- paste0(paste0("Cluster ", 1 : 4), " (", clim_frequencies, " )") - PlotLayout( #nolint - PlotStereoMap, #nolint - c(2, 3), - lon = lon, - lat = lat, - var = cosa / 100, - titles = title, - filled.continents = FALSE, - axelab = FALSE, - draw_separators = TRUE, - subsampleg = 1, - brks = seq(-1 * lim, lim, by = lim / 10), - fileout = filepng1) -} else { - filepng1 <- paste0( - plot_dir, "/", frequency, "-", var0, "_observed_regimes.png") - title <- paste0(paste0("Cluster ", 1 : 4), " (", clim_frequencies, " )") - PlotLayout( #nolint - PlotEquiMap, #nolint - c(2, 3), - lon = lon, - lat = lat, - var = cosa / 100, - titles = title, - filled.continents = FALSE, - axelab = FALSE, - draw_separators = TRUE, - subsampleg = 1, - brks = seq(-1 * lim, lim, by = lim / 10), - fileout = filepng1) -} - -# ------------------------------- -## Save the WR_obs output to ncdf -# ------------------------------- -time <- dates_historical -time <- julian(time, origin = as.POSIXct("1970-01-01")) -attributes(time) <- NULL -dim(time) <- c(time = length(time)) -metadata <- list(time = list( - standard_name = "time", long_name = "time", - units = "days since 1970-01-01 00:00:00", prec = "double", - dim = list(list(name = "time", unlim = FALSE)))) -attr(time, "variables") <- metadata - -attributes(lon) <- NULL -attributes(lat) <- NULL -dim(lon) <- c(lon = length(lon)) -dim(lat) <- c(lat = length(lat)) -metadata <- list(variable = list(dim = list(list(name = "time", - unlim = FALSE)))) -dim(WR_obs$frequency) <- c(frequency = length(WR_obs$frequency)) -dim(WR_obs$pvalue) <- c(pvalue = length(WR_obs$pvalue)) -dim(WR_obs$cluster) <- c(cluster = length(WR_obs$cluster)) -print("dimensions") -print(dim(WR_obs$composite)) -print(dim(WR_obs$pvalue)) -print(dim(WR_obs$cluster)) -print(dim(WR_obs$frequency)) -print(dim(lat)) -print(cluster_method) -if (cluster_method != "kmeans") { -print("IN") - names(dim(WR_obs$composite))[c(1, 2)] <- c("lon", "lat") -print(dim(WR_obs$composite)) -} -variable_list <- list( - variable = WR_obs$composite, - pvalue = WR_obs$pvalue, - cluster = WR_obs$cluster, - frequency = WR_obs$frequency, - lat = lat, - lon = lon, - time = time) - -names(variable_list)[1] <- var0 -attributes(variable_list) <- NULL -filencdf1 <- paste0( - work_dir, "/", var0, "_", frequency, "_WR_obs_", model_names, "_", - start_projection, "_", end_projection, "_", start_historical, "_", - end_historical, ".nc") - -ArrayToNetCDF( #nolint - variable_list, filencdf1) - # Set provenance for output files - xprov <- list(ancestors = list(fullpath_filenames[reference_files]), - authors = list("torr_ve", "fuck_ne","cort_ni", "guem_vi", - "hunt_al", "manu_ni"), - projects = list("c3s-magic"), - caption = "Observed modes of variability", - statistics = list("eof"), - ncenters = params$ncenters, - cluster_method = cluster_method, - EOFS = params$EOFS, - frequency = params$frequency, - detrend_order = params$detrend_order, - realms = list("atmos"), - themes = list("phys"), - plot_file = filepng1) - - provenance[[filencdf1]] <- xprov -# --------------------------- -# --------------------------- -# Reading and formating -# --------------------------- -proj_nc <- nc_open(fullpath_filenames[projection_files]) -projection_data <- ncvar_get(proj_nc, var0) -names(dim(projection_data)) <- rev(names(proj_nc$dim))[-1] -time <- ncvar_get(proj_nc, "time") -start_date <- as.POSIXct(substr(ncatt_get(proj_nc, "time", - "units")$value, 11, 29)) -nc_close(proj_nc) - -dates_projection <- seq(start_projection, end_projection, data_type) - -print(calendario) -if (dim(projection_data)[3] != length(dates_projection)) { - if ( - calendario == "365" | calendario == "365_days" | - calendario == "365_day" | calendario == "noleap" - ) { -dates_projection <- -dates_projection[-which(substr(dates_projection, 6, 10) == "02-29")]#nolint - } -} - - - -data <- as.vector(projection_data) -dim(projection_data) <- c( - model = 1, - var = 1, - lon = length(lon), - lat = length(lat), - time = length(time) -) -print(dim(projection_data)) -projection_data <- aperm(projection_data, c(1, 2, 5, 3, 4)) -attr(projection_data, "Variables")$dat1$time <- dates_projection -names(dim(projection_data)) <- c("model", "var", "time", "lon", "lat") -time_dimension <- which(names(dim(projection_data)) == "time") - -# --------------------------- -# Selecting the period -# --------------------------- -time_dim <- which(names(dim(projection_data)) == "time") - -if (!is.na(mes)) { - print("MONTHLY") - dims <- dim(projection_data) - ind <- which(as.numeric(substr(dates_projection, 6, 7)) == mes) - years <- unique(as.numeric(substr(dates_projection, 1, 4))) - projection_data <- projection_data[ , , ind , , ] #nolint - dims <- append( - dims, - c(length(ind) / length(years), length(years)), - after = time_dim) -} else if (!is.na(sea)) { - projection_data <- SeasonSelect( #nolint - projection_data, - season = frequency, - dates = dates_projection, - calendar = calendario - ) - time <- projection_data$dates - years <- unique(as.numeric(substr(time, 1, 4))) - projection_data <- projection_data$data - projection_data <- InsertDim(projection_data, posdim = 1, lendim = 1)#nolint - projection_data <- InsertDim(projection_data, posdim = 1, lendim = 1)#nolint - - names(dim(projection_data))[c(1, 2)] <- c("model", "var") - dims <- dim(projection_data) - dims <- append( - dims, - c(length(time) / length(years), length(years)), - after = time_dim - ) -} -dims <- dims[-time_dim] -dim(projection_data) <- dims -names(dim(projection_data))[c(time_dim, time_dim + 1)] <- c("sdate", "ftime") - -clim_ref <- array( - apply(projection_data, c(1, 2, 3, 5, 6), mean), - dim = dim(projection_data)[-4] -) - - -if (data_type == "day" | !is.na(sea)) { -clim_ref <- aperm( - apply( - clim_ref, - c(1 : length(dim(clim_ref)))[-which(names(dim(clim_ref)) == "sdate")], - Loess, - loess_span = 1), - c(2, 3, 1, 4, 5)) -} -print(dim(clim_ref)) -print("NOHERE") -anom_exp <- Ano(projection_data, clim_ref) -print(dim(anom_exp)) -reference <- drop(WR_obs$composite) -print(dim(reference)) -if (cluster_method == "kmeans") { - names(dim(reference)) <- c("lat", "lon", "nclust") - - reference <- aperm(reference, c(3, 2, 1)) - names(dim(reference)) <- c("nclust", "lon", "lat") -} else { - names(dim(reference)) <- c("lon", "lat", "nclust") -} -print(head(reference[which(reference > 0)])) -print(head(anom_exp[which(anom_exp > 0)])) - -WR_exp <- RegimesAssign( #nolint - var_ano = anom_exp, ref_maps = reference, lats = lat, method = "distance" -) - - -# --------------------------- -# Plotting WR projection: -# --------------------------- - -if (cluster_method == "kmeans") { - cosa <- aperm(WR_exp$composite, c(3, 2, 1)) - names(dim(WR_exp$composite))[3] <- "nclust" -} else { - names(dim(WR_exp$composite))[3] <- "nclust" -} -print("AAET") -print(dim(cosa)) -print(dim(WR_exp$composite)) -lim <- max(abs(cosa / 100), na.rm = TRUE) -if (lim < 1) { - x <- floor(log10(lim)) + 1 - lim <- 10 ^ x -} else { - lim <- ceiling(lim) -} -if (region == "Polar") { - filepng2 <- paste0( - plot_dir, "/", frequency, "-", var0, "_predicted_regimes.png") - title <- paste0(paste0("Cluster ", 1 : dim(cosa)[1], " (", - paste0("freq = ", round(WR_exp$frequency, 1), "%"), " )")) - PlotLayout( #nolint - PlotStereoMap, #nolint - c(2, 3), - lon = lon, - lat = lat, - var = cosa / 100, - titles = title, - filled.continents = FALSE, - draw_separators = TRUE, subsampleg = 1, - brks = seq(-1 * lim, lim, by = lim / 10), - fileout = filepng2) -} else { - filepng2 <- paste0( - plot_dir, "/", frequency, "-", var0, "_predicted_regimes.png") - title <- paste0(paste0("Cluster ", 1 : dim(cosa)[1], " (", - paste0("freq = ", round(WR_exp$frequency, 1), "%"), " )")) - PlotLayout( #nolint - PlotEquiMap, #nolint - c(2, 3), - lon = lon, - lat = lat, - var = cosa / 100, - titles = title, - filled.continents = FALSE, - axelab = FALSE, draw_separators = TRUE, subsampleg = 1, - brks = seq(-1 * lim, lim, by = lim / 10), - fileout = filepng2) -} - - -# ------------------------------- -## Save the WR_exp output to ncdf -# ------------------------------- -time <- dates_projection -time <- julian(time, origin = as.POSIXct("1970-01-01")) -attributes(time) <- NULL -dim(time) <- c(time = length(time)) -metadata <- list(time = list(standard_name = "time", long_name = "time", - units = "days since 1970-01-01 00:00:00", prec = "double", - dim = list(list(name = "time", unlim = FALSE)))) -attr(time, "variables") <- metadata - -attributes(lon) <- NULL -attributes(lat) <- NULL -dim(lon) <- c(lon = length(lon)) -dim(lat) <- c(lat = length(lat)) -metadata <- list(variable = list(dim = list(list(name = "time", - unlim = FALSE)))) - -dim(WR_exp$frequency) <- c(frequency = length(WR_exp$frequency)) -dim(WR_exp$pvalue) <- c(pvalue = length(WR_exp$pvalue)) -dim(WR_exp$cluster) <- c(cluster = length(WR_exp$cluster)) - -variable_list <- list( - variable = WR_exp$composite, - pvalue = WR_exp$pvalue, - cluster = WR_exp$cluster, - frequency = WR_exp$frequency, - lat = lat, - lon = lon, - time = time) -names(variable_list)[1] <- var0 - -attributes(variable_list) <- NULL -filencdf2 <- paste0( - work_dir, "/", var0, "_", frequency, "_WR_exp_", model_names, "_", - start_projection, "_", end_projection, "_", start_historical, "_", - end_historical, ".nc") -ArrayToNetCDF(variable_list, filencdf2) #nolint - - # Set provenance for output files - xprov <- list(ancestors = list(filencdf1, - fullpath_filenames[projection_files]), - authors = list("torr_ve", "fuck_ne","cort_ni", "guem_vi", - "hunt_al", "manu_ni"), - projects = list("c3s-magic"), - caption = "Predicted modes of variability", - statistics = list("other"), - ncenters = params$ncenters, - cluster_method = cluster_method, - EOFS = params$EOFS, - frequency = params$frequency, - detrend_order = params$detrend_order, - realms = list("atmos"), - themes = list("phys"), - plot_file = filepng2) - - provenance[[filencdf2]] <- xprov -# --------------------------- -# Computing the RMSE: -# --------------------------- - if (cluster_method != "kmeans") { - print("DOME") - cosa <- aperm(cosa, c(2, 3, 1)) - reference <- aperm(reference, c(3, 1, 2)) -} else { - cosa <- aperm(WR_exp$composite, c(2, 1, 3)) -} -print("JEH") -print(dim(cosa)) -print(dim(reference)) -rmse <- NULL -for (i in 1 : ncenters) { - for (j in 1 : ncenters) { - rmse <- c(rmse, sqrt(mean( (reference[i, , ] - cosa[, , j]) ^ 2, #nolint - na.rm = T))) - } -} -dim(rmse) <- c(ncenters, ncenters) -print(rmse) - -dimpattern <- ncdim_def( - name = "pattern", - units = "undim", - vals = 1 : ncenters, - longname = "Pattern" -) -title <- paste0( - "Root Mean Squared Error between observed and ", - "future projected patterns") -defrmse <- ncvar_def( - name = "rmse", - units = "undim", - dim = list(observed = dimpattern, experiment = dimpattern), - longname = title) -filencdf3 <- paste0(work_dir, "/", var0, "_", frequency, "_rmse_", - model_names, "_", start_projection, "_", - end_projection, "_", start_historical, "_", - end_historical, ".nc") -file <- nc_create(filencdf3, list(defrmse)) -ncvar_put(file, defrmse, rmse) - -nc_close(file) - -colnames(rmse) <- paste("Obs", 1 : ncenters) -rownames(rmse) <- paste("Pre", 1 : ncenters) -filepng3 <- paste0(file.path(plot_dir, "Table_"), var0, "_", frequency, - "_rmse_", model_names, - "_", start_projection, "_", end_projection, "_", start_historical, - "_", end_historical, ".png") -png(filepng3, height = 6, width = 18, units = "cm", res = 100) -grid.table(round(rmse, 2)) -dev.off() - - # Set provenance for output files - xprov <- list(ancestors = list(filencdf1, filencdf2), - authors = list("torr_ve", "fuck_ne","cort_ni", "guem_vi", - "hunt_al", "manu_ni"), - projects = list("c3s-magic"), - caption = title, - statistics = list("rmsd"), - ncenters = params$ncenters, - cluster_method = cluster_method, - EOFS = params$EOFS, - frequency = params$frequency, - detrend_order = params$detrend_order, - realms = list("atmos"), - themes = list("phys"), - plot_file = filepng3) - - provenance[[filencdf3]] <- xprov - -# Write provenance to file -write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/mder/absolute_correlation.ncl b/esmvaltool/diag_scripts/mder/absolute_correlation.ncl new file mode 100644 index 0000000000..682d6086e7 --- /dev/null +++ b/esmvaltool/diag_scripts/mder/absolute_correlation.ncl @@ -0,0 +1,492 @@ +; ############################################################################# +; mder/absolute_correlation.ncl +; ############################################################################# +; +; Description: +; This routine calculates absolute correlation coefficients between all +; diagnostics, calculated for a present-day period. +; +; Calls one plotscript: +; + Plots the absolute correlation coefficient between the variable of +; interest and present-day diagnostic. +; +; Required diag_script_info attributes: +; p_time: start years for future projections. +; p_step: time range for future projections. +; scal_time: Time range for base period used in anomaly calculations when +; "calc_type = 'trend'". +; time_oper: operation in time_operation function. +; time_opt: option in time_operation function. +; calc_type: trend/pos/int. +; domain: domain of the diagnostic (used for provenance tracking). +; +; Optional diag_script_info attributes: +; average_ens: average over ensemble members of the same model (default: +; False). +; region: the region to be averaged (required for multidimensional input). +; area_oper: operation in area_operation function (required for +; multidimensional input). +; +; Optional variable_info attributes: +; plot_units: convert variable to these units. +; +; Caveats: +; "warning: in unique_labels_min (diag_scripts/shared/plot/style.ncl), Add +; more attributes to prio to make labels unique! Continuing with non-unique +; labels" is normal when using "average_ens = true". +; +; Modification history: +; 20191121-schlund_manuel: added new provenance tracking. +; 20180726-schlund_manuel: ported to v2.0. +; 201307??-wenzel_sabrina: written. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/latlon.ncl" + +load "$diag_scripts/shared/plot/mder.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +load "$diag_scripts/shared/mder.ncl" + + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Variable + VAR0 = variable_info[0] + var0 = VAR0@short_name + DIM_VAR = ListCount(variable_info) + if (DIM_VAR .gt. 1) then + error_msg("w", DIAG_SCRIPT, "", "this diagnostic supports only one " + \ + "variable, processing " + VAR0@short_name) + end if + + ; Input data + INFO0 = select_metadata_by_name(input_file_info, var0) + ALL_DATASETS = metadata_att_as_array(INFO0, "dataset") + + ; Rank of input data + dummy_data = read_data(INFO0[0]) + rank = dimsizes(dimsizes(dummy_data)) + if (rank .gt. 1) then + is_multi_dim = True + else + is_multi_dim = False + end if + delete([/dummy_data, rank/]) + + ; CMIP5 models + atts := True + atts@project = "CMIP5" + CMIP5 = select_metadata_by_atts(INFO0, atts) + exps = metadata_att_as_array(CMIP5, "exp") + future_exp = get_unique_values(exps(ind(exps .ne. "historical"))) + if (dimsizes(future_exp) .ne. 1) then + error_msg("f", DIAG_SCRIPT, "", "this diagnostic supports only one " + \ + "projection experiment, got " + future_exp) + end if + + ; Historical experiment + atts@exp = "historical" + HIST = select_metadata_by_atts(CMIP5, atts) + hist_datasets = metadata_att_as_array(HIST, "dataset") + + ; Future experiment + atts@exp = future_exp + FUT = select_metadata_by_atts(CMIP5, atts) + fut_datasets = metadata_att_as_array(FUT, "dataset") + fail = False + if (dimsizes(hist_datasets) .ne. dimsizes(fut_datasets)) then + fail = True + else + if (all(hist_datasets .ne. fut_datasets)) then + fail = True + end if + end if + if (fail) then + error_msg("f", DIAG_SCRIPT, "", "historical and future experiment do " + \ + "not cover the same models, got " + dimsizes(hist_datasets) + \ + " historical models and " + dimsizes(fut_datasets) + \ + " projection models") + else + MODELS = fut_datasets + end if + + ; Ensemble averaging (if desired) + avgens = False + if (isatt(diag_script_info, "average_ens")) then + avgens = diag_script_info@average_ens + end if + if (avgens) then + MODELS := get_unique_values(MODELS) + else + MODELS := unique_labels_min(FUT, (/"dataset", "ensemble"/)) + end if + DIM_MOD = dimsizes(MODELS) + + ; Directories + work_dir = config_user_info@work_dir + plot_dir = config_user_info@plot_dir + system("mkdir -p " + work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + +end + +begin + + ; --------------------------------------------------------------------------- + ; Read recipe and config data + ; --------------------------------------------------------------------------- + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Required attributes + req_atts = (/"p_time", "p_step", "scal_time", "time_oper", "time_opt", \ + "calc_type", "domain"/) + exit_if_missing_atts(diag_script_info, req_atts) + p_time = diag_script_info@p_time + p_step = diag_script_info@p_step + scal_time = diag_script_info@scal_time + time_oper = diag_script_info@time_oper + time_opt = diag_script_info@time_opt + calc_type = diag_script_info@calc_type + dim_p_time = dimsizes(p_time) + + ; Provenance + AUTHORS = (/"wenzel_sabrina", "schlund_manuel"/) + DOMAIN = diag_script_info@domain + PLOT_TYPE = "errorbar" + STATISTICS = (/"corr"/) + REFERENCES = (/"wenzel16jclim"/) + + ; For multi-dimensional variables, 'region' and 'area_oper' are required + if (is_multi_dim) then + exit_if_missing_atts(diag_script_info, (/"region", "area_oper"/)) + area_oper = diag_script_info@area_oper + if (isstring(diag_script_info@region)) then + region = select_region(diag_script_info@region) + else if (dimsizes(diag_script_info@region .eq. 2)) then + region = (/diag_script_info@region(0), \ + diag_script_info@region(1), \ + 0.0, \ + 360.0/) + region@name = flt2string(region(0)) + "_-_" + flt2string(region(1)) + region@DM_tag = "DM_reg" + else + error_msg("f", DIAG_SCRIPT, "", diag_script_info@region + \ + " is an invalid region type") + end if + end if + end if + + ; Plot numbering + plot_num = integertochar(ispan(97, 122, 1)) + + ; Output arrays + xx = new((/dim_p_time, DIM_MOD/), float) + future_diag = new((/DIM_MOD/), float) + + ; --------------------------------------------------------------------------- + ; Read and preprocess data + ; --------------------------------------------------------------------------- + + ; Call the variable of interest and merge to one timeline + ANCESTORS = new(DIM_MOD * 2, string) + do imod = 0, DIM_MOD - 1 + log_info(MODELS(imod)) + + ; Average over ensemble members if desired + if (avgens) then + atts := True + atts@dataset = MODELS(imod) + info_hist := select_metadata_by_atts(HIST, atts) + info_fut := select_metadata_by_atts(FUT, atts) + else + info_hist := NewList("lifo") + info_fut := NewList("lifo") + ListAppend(info_hist, HIST[imod]) + ListAppend(info_fut, FUT[imod]) + end if + + ; Get data + data_hist = get_average(info_hist) + data_fut = get_average(info_fut) + info_hist := info_hist[0] + info_fut := info_fut[0] + + ; Get filenames + ANCESTORS(2 * imod) = info_hist@filename + ANCESTORS(2 * imod + 1) = info_fut@filename + + ; Process multi-dimensional variables + if (is_multi_dim) then + data_hist := area_operations(data_hist, region(0), region(1), \ + region(2), region(3), area_oper, True) + data_fut := area_operations(data_fut, region(0), region(1), \ + region(2), region(3), area_oper, True) + end if + + ; Append future array to historical array + data_y = array_append_record(data_hist, data_fut, 0) + delete([/data_hist, data_fut/]) + + ; Convert units if desired + if (isatt(VAR0, "plot_units")) then + data_y := convert_units(data_y, VAR0@plot_units) + VAR0@units = VAR0@plot_units + end if + + ; Calculate future diagnostic for diagnostic scatter plot + future_temp = time_operations(data_y, p_time(0), \ + (/p_time(dim_p_time - 1) + p_step/), \ + time_oper, time_opt, True) + if (time_oper .eq. "extract") then + future_diag(imod) = tofloat(avg(time_operations( \ + future_temp, p_time(0), (/p_time(dim_p_time - 1) + p_step/), \ + "average", "yearly", True))) + else + future_diag(imod) = tofloat(avg(future_temp)) + end if + + ; Adapt scal_time(1) (necessary of larger than end_year) + if (scal_time(1) .gt. info_hist@end_year) then + scal_time(1) = info_hist@end_year + end if + + ; Calculate average over starting period for trend calculation + trend_base = time_operations(data_y, scal_time(0), scal_time(1), \ + time_oper, time_opt, True) + if (time_oper .eq. "extract") then + trend_base := time_operations(trend_base, scal_time(0), scal_time(1), \ + "average", "yearly", True) + end if + trend_base = tofloat(avg(trend_base)) + + ; Loop over different target periods + do pt = 0, dim_p_time - 1 + ; Average over var time series + y_mean = time_operations(data_y, p_time(pt), p_time(pt) + p_step, \ + time_oper, time_opt, True) + if (time_oper .eq. "extract") then + y_mean := time_operations(y_mean, p_time(pt), \ + p_time(pt) + p_step, "average", \ + "yearly", True) + end if + + ; Perform calculation type + if (calc_type .eq. "trend") then + xx(pt, imod) = tofloat(avg(y_mean)) - trend_base + else if (calc_type .eq. "pos") then + xx(pt, imod) = tofloat(avg(y_mean)) + else if (calc_type .eq. "int") then + integrated_vals = y_mean + do tt = 0, dimsizes(y_mean) - 2 + integrated_vals(tt + 1) = integrated_vals(tt) + y_mean(tt + 1) + end do + xx(pt, imod) = tofloat(avg(integrated_vals)) + delete([/integrated_vals, tt/]) + else + error_msg("f", DIAG_SCRIPT, "", "calc_type " + calc_type + \ + " not supported") + end if + end if + end if + delete(y_mean) + end do + delete([/data_y, trend_base/]) + end do + + ; Consider ensemble averaging + if (avgens) then + INFO0 := get_unique_items(INFO0, "dataset") + HIST := get_unique_items(HIST, "dataset") + FUT := get_unique_items(FUT, "dataset") + end if + + ; Adapt units for "int" calculation + if (calc_type .eq. "int") then + VAR0@units = integrate_time_units(VAR0@units, 0) + end if + + ; --------------------------------------------------------------------------- + ; Call diagnostics + ; --------------------------------------------------------------------------- + + ; Get input directory (using ancestor diagnostics) + input_files := diag_script_info@input_files + var_files = str_match(input_files, "_info.ncl") + do ivar = 0, dimsizes(var_files) - 1 + input_files := input_files(ind(input_files .ne. var_files(ivar))) + end do + input_dirs = input_files + if (dimsizes(input_dirs) .lt. 1) then + error_msg("f", DIAG_SCRIPT, "", "no diagnositcs given (use ancestors " + \ + "key in recipe") + end if + + ; Get diagnostics + file_list = new(1, string) + do idx_dir = 0, dimsizes(input_dirs) - 1 + file_list := array_append_record(\ + file_list, \ + systemfunc("ls " + input_dirs(idx_dir) + "/*.nc"), \ + 0) + end do + file_list := file_list(1:) + ANCESTORS := array_append_record(ANCESTORS, file_list, 0) + + ; Create diagnostic arrays + DIM_DIAG = dimsizes(file_list) + diag_all = new((/DIM_DIAG, DIM_MOD/), float) + wdiag = new((/DIM_DIAG/), string) + + ; Loop over files in the list and append data + do idiag = 0, DIM_DIAG - 1 + log_info("Reading " + file_list(idiag)) + data_temp = ncdf_read(file_list(idiag), "diag") + + ; Loop over datasets and save diagnostic data + do imod = 0, DIM_MOD - 1 + do idat = 0, dimsizes(data_temp&datasets) - 1 + + ; Add data if datasets match + if (MODELS(imod) .eq. data_temp&datasets(idat)) then + diag_all(idiag, imod) = data_temp(0, idat, 0) + if (isdim(data_temp, "diagnostics")) then + wdiag(idiag) = tostring(data_temp&diagnostics(0)) + else + error_msg("f", DIAG_SCRIPT, "", "Cannot read diagnostic name " + \ + "from data " + data_temp + ", need dimension " + \ + "'diagnostics'") + end if + end if + + end do + end do + delete(data_temp) + end do + diag_all!0 = "diag" + diag_all!1 = "dataset" + diag_all&diag = wdiag + diag_all&dataset = MODELS + + ; Create scatterplots for each diagnostic + res_scat = True + future_diag@future_exp = future_exp + var0@units = VAR0@units + scatterplots = diag_scatter(res_scat, diag_all, future_diag, var0, FUT) + + ; --------------------------------------------------------------------------- + ; Correlation calculations + ; --------------------------------------------------------------------------- + + ; New arrays + corr = new((/dim_p_time, DIM_DIAG/), float) + diag_sign = new((/dim_p_time, DIM_DIAG/), string) + + ; Loop over different target periods + do pt = 0, dim_p_time - 1 + corr_sig = new((/2, DIM_DIAG/), float) + r_sig0 = new((/DIM_DIAG/), float) + r_sig1 = new((/DIM_DIAG/), float) + r_wdiag = new((/DIM_DIAG/), string) + + ; Iterate over diagnostics + do idiag = 0, DIM_DIAG - 1 + hlp = diag_all(idiag, :) + + ; Only use models with data (filter missing values) + good = ind(hlp .ne. 999.0) + ngood = dimsizes(good) + + ; Calculate correlations + corr(pt, idiag) = escorc(hlp(good), xx(pt, good)) + rc = regline(hlp(good), xx(pt, good)) + df = rc@nptxy - 2 + z = 0.5 * log((1.0 + abs(corr(pt, idiag))) / \ + (1.0 - abs(corr(pt, idiag)))) + corr_sig(0, idiag) = tanh(z - 1.96 / (sqrt(ngood - 3.0))) + corr_sig(1, idiag) = tanh(z + 1.96 / (sqrt(ngood - 3.0))) + + t0 = abs(corr(pt, idiag)) / sqrt((1 - corr(pt, idiag) ^ 2) / \ + (ngood-2)) + sign0 = 1.0 - betainc(df / (df + rc@tval ^ 2), df / 2.0, 0.5) + + log_info(wdiag(idiag) + " corr: " + corr(pt, idiag) + \ + " datasets: " + ngood + " stddv: " + sign0(0)) + delete([/hlp, good, ngood, rc, df/]) + end do + + ; Sort abs(coerr. coef.) to decreasing order in array + acorr = abs(corr(pt, :)) + qsort(acorr) + acorr = acorr(::-1) + do idiag = 0, DIM_DIAG - 1 + idx_diag = ind(abs(corr(pt, :)) .eq. acorr(idiag)) + r_wdiag(idiag) = wdiag(idx_diag) + r_sig0(idiag) = corr_sig(0, idx_diag) + r_sig1(idiag) = corr_sig(1, idx_diag) + diag_sign(pt, idiag) = where(corr(pt, idx_diag) .gt. 0.0, "red", "blue") + delete(idx_diag) + end do + backup_wdiag = wdiag + delete([/wdiag, corr_sig/]) + wdiag = r_wdiag + corr_sig = new((/2, DIM_DIAG/), float) + corr_sig(0, :) = r_sig0 + corr_sig(1, :) = r_sig1 + delete([/r_wdiag, r_sig0, r_sig1/]) + log_info(wdiag + ": " + acorr) + acorr!0 = "diag" + acorr&diag = wdiag + + ; ------------------------------------------------------------------------- + ; Plot + ; ------------------------------------------------------------------------- + + filename = var0 + "-" + calc_type + "_" + future_exp + "_" + \ + p_time(pt) + "-" + (/p_time(pt) + p_step/) + wks = gsn_open_wks(file_type, plot_dir + filename) + MainStg = plot_num(pt) + ") " + p_time(pt) + "-" + \ + (/p_time(pt) + p_step/) + " (" + future_exp + ")" + YStg = "Absolute Correlation Coefficient" + plot = error_bar_plot(acorr, corr_sig, wdiag, diag_sign(pt, :), \ + MainStg, YStg, wks, "draw") + + ; Write ncdf file + new_path = work_dir + filename + ".nc" + acorr@var = var0 + acorr@diag_script = DIAG_SCRIPT + acorr@ncdf = new_path + outfile_acorr = ncdf_write(acorr, new_path) + + ; Provenance tracking + plot_path = plot_dir + filename + "." + file_type + caption = "Absolute values of the correlation coefficient between " + \ + var0 + " and multiple present-day diagnostics." + log_provenance(outfile_acorr, plot_path, caption, STATISTICS, \ + DOMAIN, PLOT_TYPE, AUTHORS, REFERENCES, ANCESTORS) + + ; Restore old order + wdiag = backup_wdiag + delete([/corr_sig, acorr, backup_wdiag/]) + end do + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/mder/regression_stepwise.ncl b/esmvaltool/diag_scripts/mder/regression_stepwise.ncl new file mode 100644 index 0000000000..6abfb48f06 --- /dev/null +++ b/esmvaltool/diag_scripts/mder/regression_stepwise.ncl @@ -0,0 +1,1175 @@ +; ############################################################################# +; mder/regression_stepwise.ncl +; ############################################################################# +; +; Description: +; This routine computes weights for a set of climate models following the +; multiple diagnostic ensemble regression (MDER) described by Karpechko et +; al. 2013. The script calls a variable of interest from different models +; (which shall be weighted) and additionally several diagnostics which are +; specified in the recipe for computig the weights. +; Calls three plotscripts: +; + Timeseries of the variable of interest including the multi-model +; mean, the weighted and unweighted model ensemble. +; + Scatter plot of the climatological mean of the variable of interest +; and the result of the MDER. +; + Boxplot of RMSE in a pseudo-reality out-of-sample testing. +; +; Required diag_script_info attributes: +; p_time: start years for future projections. +; p_step: time range for future projections. +; scal_time: Time range for base period used in anomaly calculations when +; "calc_type = 'trend'". +; time_oper: select for operation in time_operation function. +; time_opt: select for option in time_operation function. +; calc_type: trend/pos. +; domain: domain of the diagnostic (used for provenance tracking). +; +; Optional diag_script_info attributes: +; average_ens: average over ensemble members of the same model (default: +; False). +; smooth: smoothes time period with 1-2-1 filter. +; iter: number of iteration for smoothing (required when "smooth" is set). +; cross_validation_mode: perform cross validation. +; region: the region to be averaged (required for multidimensional input). +; area_oper: operation in area_operation function (required for +; multidimensional input). + +; Optional variable_info attributes: +; plot_units: convert variable to these units. +; +; Caveats: +; "warning: in unique_labels_min (diag_scripts/shared/plot/style.ncl), Add +; more attributes to prio to make labels unique! Continuing with non-unique +; labels" is normal when using "average_ens = true". +; +; Modification history +; 20191121-schlund_manuel: added new provenance tracking. +; 20180806-schlund_manuel: ported to v2.0. +; 201303??-wenzel_sabrina: written. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/latlon.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/mder.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +load "$diag_scripts/shared/mder.ncl" + + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Variable + VAR0 = variable_info[0] + var0 = VAR0@short_name + DIM_VAR = ListCount(variable_info) + if (DIM_VAR .gt. 1) then + error_msg("w", DIAG_SCRIPT, "", "this diagnostic supports only one " + \ + "variable, processing " + VAR0@short_name) + end if + + ; Input data + INFO0 = select_metadata_by_name(input_file_info, var0) + ALL_DATASETS = metadata_att_as_array(INFO0, "dataset") + + ; Rank of input data + dummy_data = read_data(INFO0[0]) + rank = dimsizes(dimsizes(dummy_data)) + if (rank .gt. 1) then + is_multi_dim = True + else + is_multi_dim = False + end if + delete([/dummy_data, rank/]) + + ; OBS + OBS = get_obs_list(INFO0) + if (ListCount(OBS) .lt. 1) then + error_msg("f", DIAG_SCRIPT, "", "this diagnostic needs at least one " + \ + "OBS dataset") + end if + OBS_DATASETS = metadata_att_as_array(OBS, "dataset") + DIM_OBS = dimsizes(OBS_DATASETS) + + ; CMIP5 models + atts := True + atts@project = "CMIP5" + CMIP5 = select_metadata_by_atts(INFO0, atts) + exps = metadata_att_as_array(CMIP5, "exp") + future_exp = get_unique_values(exps(ind(exps .ne. "historical"))) + if (dimsizes(future_exp) .ne. 1) then + error_msg("f", DIAG_SCRIPT, "", "this diagnostic supports only one " + \ + "projection experiment, got " + future_exp) + end if + + ; Historical experiment + atts@exp = "historical" + HIST = select_metadata_by_atts(CMIP5, atts) + hist_datasets = metadata_att_as_array(HIST, "dataset") + + ; Future experiment + atts@exp = future_exp + FUT = select_metadata_by_atts(CMIP5, atts) + fut_datasets = metadata_att_as_array(FUT, "dataset") + fail = False + if (dimsizes(hist_datasets) .ne. dimsizes(fut_datasets)) then + fail = True + else + if (all(hist_datasets .ne. fut_datasets)) then + fail = True + end if + end if + if (fail) then + error_msg("f", DIAG_SCRIPT, "", "historical and future experiment do " + \ + "not cover the same models, got " + dimsizes(hist_datasets) + \ + " historical models and " + dimsizes(fut_datasets) + \ + " projection models") + else + MODELS = fut_datasets + end if + + ; Ensemble averaging (if desired) + avgens = False + if (isatt(diag_script_info, "average_ens")) then + avgens = diag_script_info@average_ens + end if + if (avgens) then + MODELS := get_unique_values(MODELS) + else + MODELS := unique_labels_min(FUT, (/"dataset", "ensemble"/)) + end if + DIM_MOD = dimsizes(MODELS) + + ; Directories + work_dir = config_user_info@work_dir + plot_dir = config_user_info@plot_dir + system("mkdir -p " + work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + +end + +begin + + ; --------------------------------------------------------------------------- + ; Read recipe and config data + ; --------------------------------------------------------------------------- + + ; Read years + min_year = 9999 + max_year = -9999 + do idat = 0, ListCount(CMIP5) - 1 + if (CMIP5[idat]@start_year .lt. min_year) then + min_year = INFO0[idat]@start_year + end if + if (CMIP5[idat]@end_year .gt. max_year) then + max_year = INFO0[idat]@end_year + end if + end do + time_span = max_year - min_year + 1 + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Required attributes + req_atts = (/"p_time", "p_step", "scal_time", "time_oper", "time_opt", \ + "calc_type", "domain"/) + exit_if_missing_atts(diag_script_info, req_atts) + p_time = diag_script_info@p_time + p_step = diag_script_info@p_step + scal_time = diag_script_info@scal_time + time_oper = diag_script_info@time_oper + time_opt = diag_script_info@time_opt + calc_type = diag_script_info@calc_type + dim_p_time = dimsizes(p_time) + + ; Provenance + AUTHORS = (/"wenzel_sabrina", "schlund_manuel"/) + DOMAIN = diag_script_info@domain + STATISTICS = (/"mder"/) + REFERENCES = (/"wenzel16jclim"/) + + ; For multi-dimensional variables, 'region' and 'area_oper' are required + if (is_multi_dim) then + exit_if_missing_atts(diag_script_info, (/"region", "area_oper"/)) + area_oper = diag_script_info@area_oper + if (isstring(diag_script_info@region)) then + region = select_region(diag_script_info@region) + else if (dimsizes(diag_script_info@region .eq. 2)) then + region = (/diag_script_info@region(0), \ + diag_script_info@region(1), \ + 0.0, \ + 360.0/) + region@name = flt2string(region(0)) + "_-_" + flt2string(region(1)) + region@DM_tag = "DM_reg" + else + error_msg("f", diag_script, "", diag_script_info@region + \ + " is an invalid region type") + end if + end if + end if + + ; Optional attributes + smooth = False + if (isatt(diag_script_info, "smooth")) then + smooth = diag_script_info@smooth + if (isatt(diag_script_info, "iter")) then + iter = diag_script_info@iter + else + error_msg("f", diag_script, "", "this diagnotic requires attribute " + \ + "'iter' in recipe when 'smooth' is set to True") + end if + end if + cross_validation_mode = False + if (isatt(diag_script_info, "cross_validation_mode")) then + cross_validation_mode = diag_script_info@cross_validation_mode + end if + + ; Plot numbering + plot_num = integertochar(ispan(97, 122, 1)) + filename = "CMPI5_" + var0 + \ + "-" + calc_type + "_" + future_exp + "_" + (p_step + 1) + "ystep" + + ; Output arrays + xx = new((/DIM_MOD, dimsizes(p_time)/), float) + xx_val = new((/DIM_MOD, dimsizes(p_time), 2/), float) + YY = new((/DIM_MOD + 3, time_span/), float) ; incl. uMMM and wMMM + t_mder_errors = new((/2, time_span/), float) + mder_errors = new((/2, time_span/), float) + yy_obs = new((/DIM_OBS, time_span/), float) + weights_t = new((/1, DIM_MOD, dimsizes(p_time)/), float) + cfint_t = new((/DIM_MOD, dimsizes(p_time)/), float) + coeff_y_est = new((/DIM_MOD, dimsizes(p_time)/), double) ; Type of coef + Y_WEIGHT = new((/dimsizes(p_time)/), double) ; Type of coef + MCORR = new((/dimsizes(p_time)/), double) ; Type of coef + + ; --------------------------------------------------------------------------- + ; Read and preprocess model data + ; --------------------------------------------------------------------------- + + ; Call the variable of interest and save in one array YY + ANCESTORS = new(DIM_MOD * 2, string) + do imod = 0, DIM_MOD - 1 + log_info(MODELS(imod)) + + ; Average over ensemble members if desired + if (avgens) then + atts := True + atts@dataset = MODELS(imod) + info_hist := select_metadata_by_atts(HIST, atts) + info_fut := select_metadata_by_atts(FUT, atts) + else + info_hist := NewList("lifo") + info_fut := NewList("lifo") + ListAppend(info_hist, HIST[imod]) + ListAppend(info_fut, FUT[imod]) + end if + + ; Get data + data_hist = get_average(info_hist) + data_fut = get_average(info_fut) + info_hist := info_hist[0] + info_fut := info_fut[0] + + ; Get filenames + ANCESTORS(2 * imod) = info_hist@filename + ANCESTORS(2 * imod + 1) = info_fut@filename + + ; Process multi-dimensional variables + if (is_multi_dim) then + data_hist := area_operations(data_hist, region(0), region(1), \ + region(2), region(3), area_oper, True) + data_fut := area_operations(data_fut, region(0), region(1), \ + region(2), region(3), area_oper, True) + end if + + ; Append future array to historical array + data_y = array_append_record(data_hist, data_fut, 0) + delete([/data_hist, data_fut/]) + + ; Convert units if desired + if (isatt(VAR0, "plot_units")) then + data_y := convert_units(data_y, VAR0@plot_units) + end if + + ; Adapt scal_time(1) (necessary of larger than end_year) + if (scal_time(1) .gt. info_hist@end_year) then + scal_time(1) = info_hist@end_year + end if + + ; Calculate average over starting period for trend calculation + trend_base = time_operations(data_y, scal_time(0), scal_time(1), \ + time_oper, time_opt, True) + if (time_oper .eq. "extract") then + trend_base := time_operations(trend_base, scal_time(0), scal_time(1), \ + "average", "yearly", True) + end if + trend_base = tofloat(avg(trend_base)) + + ; Loop over different target periods + do pt = 0, dim_p_time - 1 + ; Average over var time series + y_mean = time_operations(data_y, p_time(pt), p_time(pt) + p_step, \ + time_oper, time_opt, True) + if (time_oper .eq. "extract") then + y_mean := time_operations(y_mean, p_time(pt), \ + p_time(pt) + p_step, "average", \ + "yearly", True) + end if + + ; Perform calculation type + if (calc_type .eq. "trend") then + xx(imod, pt) = tofloat(avg(y_mean) - trend_base) + xx_val(imod, pt, 0) = tofloat(avg(y_mean) - trend_base) + + ; FIXME: Why substracting base value from error? + xx_val(imod, pt, 1) = tofloat(stddev(y_mean) / \ + sqrt(dimsizes(y_mean)) - trend_base) + else if (calc_type .eq. "pos") then + xx(imod, pt) = tofloat(avg(y_mean)) + xx_val(imod, pt, 0) = tofloat(avg(y_mean)) + xx_val(imod, pt, 1) = tofloat(stddev(y_mean) / \ + sqrt(dimsizes(y_mean))) + else if (calc_type .eq. "int") then + integrated_vals = y_mean + do tt = 0, dimsizes(y_mean) - 2 + integrated_vals(tt + 1) = integrated_vals(tt) + y_mean(tt + 1) + end do + xx(imod, pt) = tofloat(avg(integrated_vals)) + xx_val(imod, pt, 0) = tofloat(avg(integrated_vals)) + xx_val(imod, pt, 1) = tofloat(stddev(integrated_vals) / \ + sqrt(dimsizes(integrated_vals))) + delete([/integrated_vals, tt/]) + else + error_msg("f", diag_script, "", "calc_type " + calc_type + \ + " not supported") + end if + end if + end if + delete(y_mean) + end do + + ; Extract season for var time series + y_mean = time_operations(data_y, -1, -1, time_oper, time_opt, True) + if (time_oper .eq. "extract") then + y_mean := time_operations(y_mean, -1, -1, "average", "yearly", \ + True) + end if + + ; Smoothing var time series with 1-2-1 filter + if (smooth) then + y_mean := filter121(y_mean, iter) + end if + if (calc_type .eq. "trend") then + YY(imod + 3, 0:dimsizes(y_mean) - 1) = tofloat(y_mean - trend_base) + else if (calc_type .eq. "pos") then + YY(imod + 3, 0:dimsizes(y_mean) - 1) = tofloat(y_mean) + else if (calc_type .eq. "int") then + YY(imod + 3, 0) = y_mean(0) + do tt = 0, dimsizes(y_mean) - 2 + YY(imod + 3, tt + 1) = YY(i + 3, tt) + y_mean(tt + 1) + end do + end if + end if + end if + delete([/trend_base, data_y, y_mean/]) + end do + + ; Consider ensemble averaging + if (avgens) then + INFO0 := get_unique_items(INFO0, "dataset") + CMIP5 := get_unique_items(CMIP5, "dataset") + HIST := get_unique_items(HIST, "dataset") + FUT := get_unique_items(FUT, "dataset") + end if + + ; Add attributes to models time series + YY!1 = "time" + YY&time = ispan(min_year, max_year, 1) ; y_mean&year + YY&time@calendar = "standard" + + ; --------------------------------------------------------------------------- + ; Read and preprocess observational data + ; --------------------------------------------------------------------------- + + ; Call OBS of the variable of interest and save in one array yy_obs + do iobs = 0, DIM_OBS - 1 + log_info(OBS_DATASETS(iobs)) + info = OBS[iobs] + ANCESTORS := array_append_record(ANCESTORS, info@filename, 0) + + ; Read observational data + data_obs = read_data(info) + + ; Process multi-dimensional variables + if (is_multi_dim) then + data_obs := area_operations(data_obs, region(0), region(1), region(2), \ + region(3), area_oper, True) + end if + + ; Convert units if desired + if (isatt(VAR0, "plot_units")) then + data_obs := convert_units(data_obs, VAR0@plot_units) + VAR0@units = VAR0@plot_units + end if + + ; Perform calculation type + ymin = min(info@start_year) - min_year + obs_mean = time_operations(data_obs, -1, -1, time_oper, time_opt, True) + if (time_oper .eq. "extract") then + obs_mean := time_operations(obs_mean, -1, -1, "average", "yearly", True) + end if + if (calc_type .eq. "trend") then + VAR0@units = VAR0@units + "/dec" + obs_base = time_operations(data_obs, scal_time(0), -1, "average", \ + "yearly", True) + obs_base = tofloat(avg(obs_base)) + yy_obs(iobs, ymin:ymin + dimsizes(obs_mean) - 1) = tofloat(obs_mean - \ + obs_base) + delete(obs_base) + else if (calc_type .eq. "pos") then + yy_obs(iobs, ymin:ymin + dimsizes(obs_mean) - 1) = tofloat(obs_mean) + else if (calc_type .eq. "int") then + yy_obs(iobs, ymin) = obs_mean(0) + do tt = 0, dimsizes(obs_mean) - 2 + yy_obs(iobs, ymin + tt + 1) = yy_obs(iobs, ymin + tt) + \ + obs_mean(tt + 1) + end do + VAR0@units = integrate_time_units(VAR0@units, 0) + delete(tt) + end if + end if + end if + delete([/data_obs, ymin, obs_mean/]) + end do + + ; Add attributes to observations time series + yy_obs!0 = "name" + yy_obs!1 = "time" + yy_obs&name := OBS_DATASETS + yy_obs&time = YY&time + yy_obs&time@calendar = "standard" + + ; --------------------------------------------------------------------------- + ; Call diagnostics + ; --------------------------------------------------------------------------- + + ; Get input directory (using ancestor diagnostics) + input_files := diag_script_info@input_files + var_files = str_match(input_files, "_info.ncl") + do ivar = 0, dimsizes(var_files) - 1 + input_files := input_files(ind(input_files .ne. var_files(ivar))) + end do + input_dirs = input_files + if (dimsizes(input_dirs) .lt. 1) then + error_msg("f", DIAG_SCRIPT, "", "no diagnositcs given (use ancestors " + \ + "key in recipe") + end if + + ; Get diagnostics + file_list = new(1, string) + do idx_dir = 0, dimsizes(input_dirs) - 1 + file_list := array_append_record(\ + file_list, \ + systemfunc("ls " + input_dirs(idx_dir) + "/*.nc"), \ + 0) + end do + file_list := file_list(1:) + ANCESTORS := array_append_record(ANCESTORS, file_list, 0) + + ; Create diagnostic arrays + DIM_DIAG = dimsizes(file_list) + diag_a = new((/DIM_DIAG, DIM_MOD/), float) + diag_a_std = new((/DIM_DIAG, DIM_MOD/), float) + obs0 = new((/DIM_DIAG/), float) + obs0_std = new((/DIM_DIAG/), float) + wdiag = new((/DIM_DIAG/), string) + + log_info("") + ; Loop over files in the list and append data + do idiag = 0, DIM_DIAG - 1 + log_info("Reading " + file_list(idiag)) + data_temp = ncdf_read(file_list(idiag), "diag") + + ; Loop over datasets and save diagnostic data + do imod = 0, DIM_MOD - 1 + do idat = 0, dimsizes(data_temp&datasets) - 1 + + ; Add data if datasets match + if (MODELS(imod) .eq. data_temp&datasets(idat)) then + diag_a(idiag, imod) = data_temp(0, idat, 0) + diag_a_std(idiag, imod) = data_temp(0, idat, 1) + if (isdim(data_temp, "diagnostics")) then + wdiag(idiag) = tostring(data_temp&diagnostics(0)) + else + error_msg("f", DIAG_SCRIPT, "", "Cannot read diagnostic name " + \ + "from data " + data_temp + ", need dimension " + \ + "'diagnostics'") + end if + end if + + end do + end do + + ; Save reference data + if (isatt(data_temp, "ref_dataset"))then + if (any(data_temp&datasets .eq. data_temp@ref_dataset)) then + idx_ref = ind(data_temp&datasets .eq. data_temp@ref_dataset) + obs0(idiag) = data_temp(0, idx_ref, 0) + obs0_std(idiag) = data_temp(0, idx_ref, 1) + delete(idx_ref) + end if + end if + delete(data_temp) + + ; FIXME: degrees of freedom are hardcoded (26)? + log_info(wdiag(idiag) + ": OBS: +/- 95% conv.: " + obs0(idiag) + \ + " +/- " + (cdft_t(0.05 / 2.0, 26) * obs0_std(idiag)) + \ + " MODELS: " + (avg(diag_a(idiag, :))) + " +/- " + \ + (stddev(diag_a(idiag, :)))) + + ; Only account for good diagnostics where no model is missing + if (.not. any(ismissing(diag_a(idiag, :)))) then + if (.not. isvar("good_diag_idx")) then + good_diag_idx = idiag + else + tmp_gd = good_diag_idx + good_diag_idx := new((/dimsizes(tmp_gd) + 1/), integer) + good_diag_idx(:dimsizes(tmp_gd) - 1) = tmp_gd + good_diag_idx(dimsizes(tmp_gd)) = idiag + delete(tmp_gd) + end if + else + missing_datasets := ind(ismissing(diag_a(idiag, :))) + error_msg("w", DIAG_SCRIPT, "", "in diagnostic " + wdiag(idiag) + \ + ", some datasets are missing: " + \ + diag_a&datasets(missing_datasets)) + end if + end do + if (.not. isvar("good_diag_idx")) then + error_msg("f", DIAG_SCRIPT, "", "no diagnostic without missing " + \ + "datasets given") + end if + + log_info("") + + ; Save "good" diagnostics without missing datasets + wdiag := wdiag(good_diag_idx) + diag_all = diag_a(good_diag_idx, :) + diag_all_std = diag_a_std(good_diag_idx, :) + diag_all&diagnostics = wdiag + diag_all_std&diagnostics = wdiag + DIM_DIAG = dimsizes(good_diag_idx) + DIM_DIAG_ALL = dimsizes(diag_a(:, 0)) + delete([/diag_a, diag_a_std, good_diag_idx/]) + + ; --------------------------------------------------------------------------- + ; Regression calculations + ; --------------------------------------------------------------------------- + + ; Loop over different target periods + do pt = 0, dimsizes(p_time) - 1 + log_info("Starting calculations for target period " + p_time(pt) + "-" + \ + (p_time(pt) + p_step)) + + ; Skip calculation if values are missing + if (any(ismissing(xx(:, pt)))) then + continue + end if + + ; Regression model: cross-validation cycle + if (cross_validation_mode) then + do imod = 0, DIM_MOD - 1 + model = MODELS(imod) + log_debug("Starting cross-validation cycle for dataset " + model) + good = ind(MODELS .ne. model) + coef = regress_stepwise(diag_all(:, good), xx(good, pt)) + const = coef@const + used_diags = toint(coef@idx_used_diags) + n_terms = coef@n_terms + YFIT = coef@YFIT + mcorr = coef@MCORRELATION + corr = coef@correlation + log_debug("Cross-validation mode. Dataset: " + model + \ + ", constant: " + const) + if (n_terms .gt. 0) then + do it = 0, dimsizes(coef) - 1 + log_debug(wdiag(used_diags(it)) + ", " + coef(it) + ", " + \ + corr(it)) + end do + end if + log_debug("Multiple correlation: " + mcorr) + log_debug("") + delete([/const, used_diags, n_terms, YFIT, mcorr, corr, coef, \ + good/]) + end do + end if + + ; Building the final regression + coef := regress_stepwise(diag_all, xx(:, pt)) + const = coef@const + used_diags := toint(coef@idx_used_diags) + n_terms := coef@n_terms + YFIT := coef@YFIT + mcorr := coef@MCORRELATION + corr := coef@correlation + + ; Print final regression model + log_info("The final model (" + scal_time(0) + "-" + \ + (p_time(pt) + p_step) + "):") + log_info(" Constant: " + const) + if (n_terms .gt. 0) then + do it = 0, dimsizes(coef) - 1 + log_info(" " + wdiag(used_diags(it)) + ": " + coef(it) + \ + ", (corr = " + corr(it) + ")") + end do + end if + log_info(" " + "Multiple correlation: " + mcorr) + log_info("") + + ; ------------------------------------------------------------------------- + ; Model weighting + ; ------------------------------------------------------------------------- + + x_diag := transpose(diag_all(used_diags, :)) + good_diag := ind(.not. ismissing(x_diag(:, 0))) + x := x_diag(good_diag, :) + + ; tmp_dat = DIM_MOD + tmp_dat := dimsizes(x(:, 0)) + + ; Create Design Matrix D + D := new((/tmp_dat, dimsizes(used_diags) + 1/), typeof(diag_all)) + D = 1 + do id = 0, dimsizes(x(:, 0)) - 1 + D(id, 1:) = x(id, :) + end do + + ; Observations + xobs := new((/1, dimsizes(used_diags) + 1/), typeof(diag_all), \ + getFillValue(diag_all)) + xobs = 1.0 + xobs(0, 1:) = obs0(used_diags) + df = tmp_dat - dimsizes(xobs(0, :)) + + ; Identity matrix + Id := new((/tmp_dat, tmp_dat/), typeof(diag_all)) + Id = 0.0 + do idat = 0, tmp_dat - 1 + Id(idat, idat) = 1.0 + end do + + ; Auxiliary vectors + m := new((/tmp_dat, 1/), float) ; column vector + m = 1.0 + n := new((/tmp_dat, 1/), float) ; column vector + n = (/1.0 / tmp_dat/) + + ; Future projections + y := xx(good_diag, pt) + + ; Calculate model weights (see Karpechko et al. 2013, eq. (4)) + weights := (transpose(n) + (xobs(:, 1:) - transpose(n) # x) # \ + inverse_matrix(transpose(x) # x - transpose(x) # m # \ + transpose(n) # x) # \ + (transpose(x) - transpose(x) # m # transpose(n))) + + ; Confidence interval + cfint1 = cdft_t(0.05 / 2.0, df) ^ 2 * \ + (1.0 + xobs # inverse_matrix(transpose(D) # D) # transpose(xobs)) + SSE1 := y # (Id - D # inverse_matrix(transpose(D) # D) # transpose(D)) + sigmae1 := (1.0 / df) * SSE1 + tmp_cf := cfint1(0, 0) * sigmae1 + delete([/SSE1, sigmae1, cfint1/]) + + ; Weighted mean and std + y_weight := (weights # y) + cf := sqrt(tmp_cf # y) + uM = avg(y) + uM_std = stddev(y) / sqrt(DIM_MOD) + log_info("uMMM = " + uM + " +/- " + uM_std) + log_info("y_weight = " + y_weight) + log_info("cf = " + cf) + log_info("") + + ; Save weights and confidence intervals in one array + do imod = 0, DIM_MOD - 1 + do gdiad = 0, dimsizes(good_diag) - 1 + if (imod .eq. good_diag(gdiad)) + weights_t(:, imod, pt) = weights(:, gdiad) + cfint_t(imod, pt) = tmp_cf(gdiad) + end if + end do + end do + + ; Output + log_info("Model weights:") + do imod = 0, DIM_MOD - 1 + log_info(MODELS(imod) + ": " + weights_t(:, imod, pt)) + end do + log_info("") + + ; Save Y-estimates and multiple correllation coeficients + coeff_y_est(:, pt) = coef@Yest + MCORR(pt) = mcorr + Y_WEIGHT(pt) = y_weight + + ; Save coefficients and terms of regression model in array + ; Array sizes are first known at runtime (-> build at runtime) + if (isvar("mder_coefs")) then + max_dim = max((/dimsizes(coef), dimsizes(mder_coefs(pt - 1, 1:))/)) + tmp_mder_coefs = mder_coefs + tmp_names = names_used_diags + tmpt = idx_used_diags + delete([/mder_coefs, names_used_diags, idx_used_diags/]) + + mder_coefs = new((/pt + 1, max_dim + 1/), typeof(tmp_mder_coefs)) + names_used_diags = new((/pt + 1, max_dim + 1/), typeof(tmp_names)) + idx_used_diags = new((/pt + 1, max_dim + 1/), typeof(tmpt)) + + mder_coefs(:pt - 1, :dimsizes(tmp_mder_coefs(0, :)) - 1) = tmp_mder_coefs + names_used_diags(:pt - 1, :dimsizes(tmp_names(0, :)) - 1) = tmp_names + idx_used_diags(:pt-1, :dimsizes(tmpt(0, :)) - 1) = tmpt + mder_coefs(pt, 0) = const + mder_coefs(pt, 1:dimsizes(coef)) = coef + names_used_diags(pt, 1:dimsizes(wdiag(used_diags))) = wdiag(used_diags) + idx_used_diags(pt, 1:dimsizes(used_diags)) = used_diags + + delete([/tmp_mder_coefs, tmp_names, tmpt, max_dim/]) + else + mder_coefs = new((/1, dimsizes(coef) + 1/), typeof(coef)) + names_used_diags = new((/1, dimsizes(coef) + 1/), string) + idx_used_diags = new((/1, dimsizes(coef) + 1/), typeof(used_diags)) + + mder_coefs(pt, 0) = const + mder_coefs(pt, 1:) = coef + names_used_diags(pt, 1:) = wdiag(used_diags) + idx_used_diags(pt, 1:) = used_diags + end if + + delete([/y, corr, mcorr, x, x_diag, good_diag, xobs, cf, tmp_cf, \ + weights, uM, uM_std/]) + end do ; Loop over different target periods + + ; Iterate over target periods and save MDER results + do pt = 0, dimsizes(p_time) - 1 + tmp_yy = new(dimsizes(YY), typeof(YY)) + tmp_mder_errors = new(dimsizes(mder_errors), typeof(mder_errors)) + do tt = 0, time_span - 2 + YY(0, tt) = avg(YY(3:, tt)) + + ; MDER errors for time periods + if (tt .ge. p_time(pt) - min_year .and. \ + tt .lt. (p_time(pt) + p_step + 1 - min_year)) then + tmp_yy(1, tt) = sum(YY(3:, tt) * weights_t(0, :, pt)) + tmp_mder_errors(0, tt) = tmp_yy(1, tt) + \ + sqrt(abs(sum(YY(3:, tt) * cfint_t(:, pt)))) + tmp_mder_errors(1, tt) = tmp_yy(1, tt) - \ + sqrt(abs(sum(YY(3:, tt) * cfint_t(:, pt)))) + end if + if (tt .eq. (p_time(pt) + p_step - min_year)) then + YY(1, tt - (p_step + 1) / 2) = avg(tmp_yy(1, tt-p_step:tt)) + mder_errors(0, tt - (p_step + 1) / 2) = \ + avg(tmp_mder_errors(0, tt - p_step:tt)) + mder_errors(1, tt - (p_step + 1) / 2) = \ + avg(tmp_mder_errors(1, tt - p_step:tt)) + end if + + ; MDER errors for whole time span + YY(2, tt) = sum(YY(3:, tt) * weights_t(0, :, pt)) + t_mder_errors(0, tt) = YY(2, tt) + \ + sqrt(abs(sum(YY(3:, tt) * cfint_t(:, pt)))) + t_mder_errors(1, tt) = YY(2, tt) - \ + sqrt(abs(sum(YY(3:, tt) * cfint_t(:, pt)))) + end do + end do + + ; --------------------------------------------------------------------------- + ; Plot preparations + ; --------------------------------------------------------------------------- + + ; Plot regression line + ; y_mder_results(:, 0): const + ; y_mder_results(:, 1): fitted observations (with MDER coefficients) + ; y_mder_results(:, 2:): fitted models (with model weights) + y_mder_results = new((/dimsizes(p_time), DIM_MOD + 2/), typeof(mder_coefs)) + y_mder_results_std = new((/dimsizes(p_time), DIM_MOD + 2/), \ + typeof(mder_coefs)) + + nfit = fspan(0, dimsizes(y_mder_results(0, 2:)) - 1, \ + dimsizes(y_mder_results(0, 2:))) + ; nfit = fspan(0, 49, 50) + wy = new((/dimsizes(p_time), DIM_MOD + 2/), typeof(mder_coefs)) + wy_std = new((/dimsizes(p_time), DIM_MOD + 2/), float) + yfit_reg = new((/dimsizes(p_time), dimsizes(nfit)/), typeof(mder_coefs)) + yfit_std = new((/dimsizes(p_time), 2, dimsizes(nfit)/), typeof(mder_coefs)) + + ; Iterate over target periods + do ppt = 0, dimsizes(p_time) - 1 + xgood = ind(.not. ismissing(idx_used_diags(ppt, :))) + hlp_tmp = new((/dimsizes(mder_coefs(ppt, xgood)), dimsizes(nfit)/), float) + hlp = new((/1, dimsizes(mder_coefs(ppt, xgood)) + 1/), float) + y_mder_results(ppt, :) = mder_coefs(ppt, 0) ; const + y_mder_results_std(ppt, :) = 0.0 + yfit_reg(ppt, :) = mder_coefs(ppt, 0) ; const + hlp = 1.0 + + wy(ppt, 0) = avg(xx(:, ppt)) + wy(ppt, 1) = Y_WEIGHT(ppt) + wy(ppt, 2:) = xx(:, ppt) + wy_std(ppt, 2:) = xx_val(:, ppt, 1) ; cfint_t(:, ppt) + + do ii = 1, dimsizes(mder_coefs(ppt, :)) - 1 + if (.not. ismissing(mder_coefs(ppt, ii))) then + y_mder_results(ppt, 1) = y_mder_results(ppt, 1) + \ + mder_coefs(ppt, ii) * obs0(idx_used_diags(ppt, ii)) + y_mder_results_std(ppt, 1) = y_mder_results_std(ppt, 1) + \ + (mder_coefs(ppt, ii) * (obs0_std(idx_used_diags(ppt, ii)))) ^ 2 + + y_mder_results(ppt, 2:) = coeff_y_est(:, ppt) + ; = y_mder_results(2:) + coef(ii) * diag_all(terms(ii), :) + y_mder_results_std(ppt, 2:) = y_mder_results_std(ppt, 2:) + \ + (mder_coefs(ppt, ii) * \ + (diag_all_std(idx_used_diags(ppt, ii), :))) ^ 2 + + minx = min(diag_all(idx_used_diags(ppt, ii), :)) - 1.1 * \ + (max(diag_all(idx_used_diags(ppt, ii), :)) - \ + min(diag_all(idx_used_diags(ppt, ii), :))) + maxx = max(diag_all(idx_used_diags(ppt, ii), :)) + 1.1 * \ + (max(diag_all(idx_used_diags(ppt, ii), :)) - \ + min(diag_all(idx_used_diags(ppt, ii), :))) + diag_var = minx + (maxx - minx) * nfit / dimsizes(nfit) + hlp_tmp(ii - 1, :) = diag_var + delete([/diag_var, minx, maxx/]) + end if + end do + y_mder_results_std = sqrt(y_mder_results_std) + + x_diag = transpose(diag_all(idx_used_diags(ppt, xgood), :)) + good_diag = ind(.not. ismissing(x_diag(:, 0))) + x = x_diag(good_diag, :) + + ; Create Design matrix D and Identity matrix Id + D := new((/dimsizes(x(:, 0)), dimsizes(idx_used_diags(ppt, xgood)) + 1/), \ + typeof(diag_all)) + D = 1.0 + do id = 0, dimsizes(x(:, 0)) - 1 + D(id, 1:) = x(id, :) + end do + df = tmp_dat - dimsizes(obs0(idx_used_diags(ppt, xgood))) + 1 + Id = new((/dimsizes(x(:, 0)), dimsizes(x(:, 0))/), typeof(diag_all)) + Id = 0.0 + do idat = 0, tmp_dat - 1 + Id(idat, idat) = 1.0 + end do + + ; Error + SSE2 = xx(:, ppt) # (Id - D # inverse_matrix(transpose(D) # D) # \ + transpose(D)) + sigmae2 = (1.0 / df) * SSE2 + delete(SSE2) + + ; Calculate 95% confidence interval bounds + miny = min(xx(:, ppt)) - 1.1 * (max(xx(:, ppt)) - min(xx(:, ppt))) + maxy = max(xx(:, ppt)) + 1.1 * (max(xx(:, ppt)) - min(xx(:, ppt))) + yfit_reg(ppt, :) = miny + (maxy - miny) * nfit / dimsizes(nfit) + delete([/miny, maxy/]) + do ti = 0, dimsizes(nfit) - 1 + hlp(0, 1:) = (/hlp_tmp(:, ti)/) + cfint2 = cdft_t(0.05 / 2.0, df) ^ 2 * ( \ + 1.0 + hlp # inverse_matrix(transpose(D) # D) # transpose(hlp)) + cfint2 := cfint2(0, 0) * sigmae2 + cfint2 := sqrt(abs(cfint2 # xx(:, ppt))) + + yfit_std(ppt, 0, ti) = yfit_reg(ppt, ti) + cfint2 + yfit_std(ppt, 1, ti) = yfit_reg(ppt, ti) - cfint2 + + delete(cfint2) + end do + + ; Clean up + delete([/xgood, hlp_tmp, hlp, x_diag, good_diag, x, D, df, sigmae2/]) + end do + + ; Plot squared differences between ensemble mean and the total change in + ; pseudoreality + ; 1. #grades, 2. #pseudorealities + grade_all = new((/DIM_DIAG_ALL, DIM_MOD - 1, DIM_MOD/), float) + xx_pre = new((/DIM_MOD, dimsizes(p_time)/), float) + grade_trans = dim_avg_n_Wrap(grade_all(:, :, :), 0) + do imod = 0, DIM_MOD - 1 + good = ind(MODELS(imod) .ne. MODELS) + do pt = 0, dimsizes(p_time) - 1 + xx_pre(imod, pt) = avg(xx(good, pt)) + end do + delete(good) + end do + + ; Calculating statistics + x25 = round(0.25 * DIM_MOD, 3) - 1 + x75 = round(0.75 * DIM_MOD, 3) - 1 + + presq = new((/dimsizes(p_time), DIM_MOD/), typeof(mder_coefs)) + premdersq = new((/dimsizes(p_time), DIM_MOD/), typeof(mder_coefs)) + BSS = new((/dimsizes(p_time)/), float) + + ; Iterate over target periods + do pt = 0, dimsizes(p_time) - 1 + presq(pt, :) = (xx(:, pt) - xx_pre(:, pt)) ^ 2 + premdersq(pt, :) = (xx(:, pt) - coeff_y_est(:, pt)) ^ 2 + qsort(presq(pt, :)) + qsort(premdersq(pt, :)) + + tmpRDS = (/(/presq(pt, x25), presq(pt, x25), dim_median(presq(pt, :)), \ + presq(pt, x75), presq(pt, x75)/), \ + (/premdersq(pt, x25), premdersq(pt, x25), \ + dim_median(premdersq(pt, :)), premdersq(pt, x75), \ + premdersq(pt, x75)/)/) + tmpAVG = (/avg(presq(pt, :)), avg(premdersq(pt, :))/) + tmpPRE = (/presq(pt, :), premdersq(pt, :)/) + + if (all(isvar((/"preRDS", "preAVG", "prePRE"/)))) then + preRDS := array_append_record(preRDS, sqrt(tmpRDS), 0) + preAVG := array_append_record(preAVG, sqrt(tmpAVG), 0) + prePRE := array_append_record(prePRE, sqrt(tmpPRE), 0) + else + preRDS = sqrt(tmpRDS) + preAVG = sqrt(tmpAVG) + prePRE = sqrt(tmpPRE) + end if + delete([/tmpRDS, tmpAVG, tmpPRE/]) + log_info("RMSE (" + p_time(pt) + "-" + (p_time(pt) + p_step) + \ + "), uMMM: " + sqrt(avg(presq(pt, :))) + ", MDER: " + \ + sqrt(avg(premdersq(pt, :)))) + end do + + ; --------------------------------------------------------------------------- + ; Write regression model equations to ASCII table + ; --------------------------------------------------------------------------- + + log_info("") + + ; Create arrays + times_str = new((/dimsizes(p_time) + 1/), string) + terms_str = new((/dimsizes(p_time) + 1/), string) + mcorr_str = new((/dimsizes(p_time) + 1/), string) + + ; Header + times_str(0) = "Target period" + terms_str(0) = "Model equation" + mcorr_str(0) = "Correlation R^2" + + ; Contents + do pt = 0, dimsizes(p_time) - 1 + terms_str(pt + 1) = sprintf("%4.2f", mder_coefs(pt, 0)) + do ii = 1, dimsizes(mder_coefs(pt, :)) - 1 + if (.not. ismissing(mder_coefs(pt, ii))) then + if (mder_coefs(pt, ii) .gt. 0.0) then + terms_str(pt + 1) = terms_str(pt + 1) + " +" + \ + sprintf("%4.2f", mder_coefs(pt, ii)) + " x " + \ + names_used_diags(pt, ii) + else + terms_str(pt + 1) = terms_str(pt + 1) + " " + \ + sprintf("%4.2f", mder_coefs(pt, ii)) + " x " + \ + names_used_diags(pt, ii) + end if + end if + end do + p1_time = p_time(pt) + (p_step - 2000) + times_str(pt + 1) = p_time(pt) + "-" + sprinti("%0.2hi", p1_time) + mcorr_str(pt + 1) = sprintf("%.3f", MCORR(pt)) + end do + + ; --------------------------------------------------------------------------- + ; Write files + ; --------------------------------------------------------------------------- + + ; Ascii + table_path = work_dir + filename + ".txt" + ascii_path = work_dir + var0 + "_" + future_exp + "_" + \ + (p_step + 1) + "ystep_diagnostics.txt" + write_table(table_path, "w", [/times_str, terms_str, mcorr_str/], \ + "%s %s %s") + asciiwrite(ascii_path, names_used_diags) + log_info("Wrote " + ascii_path) + log_info("Wrote " + table_path) + delete([/table_path, ascii_path/]) + + ; NetCDF + do pt = 0, dimsizes(p_time) - 1 + weights_out = weights_t(0, :, pt) + weights_path = work_dir + filename + "_weights_" + \ + p_time(pt) + "-" + (p_time(pt) + p_step) + ".nc" + weights_out!0 = "dataset" + weights_out&dataset = MODELS + weights_out@diag_script = DIAG_SCRIPT + weights_out@var = "weight" + weights_out@description = "Weights from MDER" + weights_file = ncdf_write(weights_out, weights_path) + + ; Provenance tracking + plot_path = "n/a" + plot_type = "" + caption = "MDER model weights for target variable " + var0 + log_provenance(weights_file, plot_path, caption, STATISTICS, \ + DOMAIN, plot_type, AUTHORS, REFERENCES, ANCESTORS) + delete([/weights_out, weights_path, weights_file/]) + end do + + ; --------------------------------------------------------------------------- + ; Plots + ; --------------------------------------------------------------------------- + + ; Add uMMM and MDER to array + datasets := new(DIM_MOD + 3, string) + datasets(0) = "uMMM" + datasets(1) = "MDER time dependent" + datasets(2) = "MDER" + datasets(3:) = MODELS + YY!0 = "dataset" + YY&dataset := datasets + NEW_INFO = NewList("lifo") + do i = 0, 2 + ListAppend(NEW_INFO, True) + NEW_INFO[i]@project = "CMIP5" + NEW_INFO[i]@dataset = datasets(i) + end do + do imod = 0, DIM_MOD - 1 + ListAppend(NEW_INFO, FUT[imod]) + end do + do iobs = 0, DIM_OBS - 1 + ListAppend(NEW_INFO, OBS[iobs]) + end do + delete(datasets) + + ; Line plot showing the time series including MDER weights + plot_name_1 = filename + "_FIG1" + path_1 = plot_dir + plot_name_1 + "." + file_type + wks = gsn_open_wks(file_type, plot_dir + plot_name_1) + MainStg = future_exp + YStg = var0 + " [" + VAR0@units + "]" + plot = weight_lin_plot(YY, mder_errors, t_mder_errors, yy_obs, MainStg, \ + YStg, wks, "draw", NEW_INFO) + log_info("Wrote " + path_1) + + ; Write ncdf file + new_path = work_dir + plot_name_1 + ".nc" + YY@var = var0 + YY@diag_script = DIAG_SCRIPT + YY@ncdf = new_path + outfile = ncdf_write(YY, new_path) + + ; Provenance tracking + caption = "Time series of " + var0 + " for " + future_exp + "." + plot_type = (/"times"/) + log_provenance(outfile, path_1, caption, STATISTICS, DOMAIN, plot_type, \ + AUTHORS, REFERENCES, ANCESTORS) + + ; MDER scatter plots for all time periods + y_mder_results!0 = "time" + y_mder_results!1 = "dataset" + y_mder_results&time = p_time + data_dim = (/"constant", "OBS"/) + data_dim := array_append_record(data_dim, MODELS, 0) + y_mder_results&dataset = data_dim + delete(data_dim) + do ppt = 0, dimsizes(p_time) - 1 + plot_name_2 = filename + "_FIG2" + plot_num(ppt) + path_2 = plot_dir + plot_name_2 + "." + file_type + wks = gsn_open_wks(file_type, plot_dir + plot_name_2) + MainStg = plot_num(ppt) + ") " + p_time(ppt) + "-" + \ + (p_time(ppt) + p_step) + " (" + future_exp + ")" + XStg = terms_str(ppt + 1) + " [" + VAR0@units + "]" + YStg = var0 + " [" + VAR0@units + "]" + plot2 = mder_scatter_plot(y_mder_results(ppt, :), \ + y_mder_results_std(ppt, :), \ + wy(ppt, :), wy_std(ppt, :), \ + yfit_reg(ppt, :), \ + yfit_std(ppt, :, :), \ + MainStg, XStg, YStg, wks, "draw", INFO0) + log_info("Wrote " + path_2) + + ; Write ncdf file + new_path = work_dir + plot_name_2 + ".nc" + y_mder_results@var = var0 + y_mder_results@diag_script = DIAG_SCRIPT + y_mder_results@ncdf = new_path + outfile = ncdf_write(y_mder_results, new_path) + + ; Provenance tracking + caption = "Scatterplot showing the correlation between " + \ + "future " + var0 + " and the quantity resulting from MDER " + \ + "analysis on the X axis for " + future_exp + "." + plot_type = (/"scatter"/) + log_provenance(outfile, path_2, caption, STATISTICS, DOMAIN, plot_type, \ + AUTHORS, REFERENCES, ANCESTORS) + end do + + ; Boxplot of RMSE of different weigting methods incl pseudoreality check + plot_name_3 = filename + "_FIG3" + path_3 = plot_dir + plot_name_3 + "." + file_type + YStg = "RMSE projection error [" + VAR0@units + "]" + XBname = new((dimsizes(times_str) - 1) * 2, string) + if (dimsizes(XBname) .eq. 2) then + XBname = (/"uMMM", "MDER"/) + else if (dimsizes(XBname) .eq. 4) then + XBname(0::2) = (/"uMMM~C~" + times_str(1:)/) + XBname(1::2) = (/"MDER~C~" + times_str(1:)/) + else + XBname(0::2) = (/" "/) + XBname(1::2) = times_str(1:) + end if + end if + + wks = gsn_open_wks(file_type, plot_dir + plot_name_3) + MainStg = future_exp + plot3 = squared_error_plot(preRDS, preAVG, prePRE, MainStg, XBname, YStg, \ + wks, "draw") + log_info("Wrote " + path_3) + + ; Write ncdf file + new_path = work_dir + plot_name_3 + ".nc" + prePRE!0 = "type" + prePRE!1 = "pseudo_reality_obs" + do pt = 0, dimsizes(p_time) - 1 + if (isvar("dim_type")) then + dim_type := array_append_record(dim_type, (/"uMMM " + p_time(pt), \ + "wMMM " + p_time(pt)/), 0) + else + dim_type := (/"uMMM " + p_time(pt), "wMMM " + p_time(pt)/) + end if + end do + prePRE&type = dim_type + dim_PRE = dimsizes(prePRE) + prePRE&pseudo_reality_obs = ispan(0, dim_PRE(1) - 1, 1) + prePRE@var = var0 + prePRE@diag_script = DIAG_SCRIPT + prePRE@ncdf = new_path + outfile = ncdf_write(prePRE, new_path) + + ; Provenance tracking + caption = "RMSE difference between the ensemble mean future " + \ + "climatological mean " + var0 + " and its equivalent in " + \ + "pseudo-reality under " + future_exp + "." + plot_type = (/"box"/) + log_provenance(outfile, path_3, caption, STATISTICS, DOMAIN, plot_type, \ + AUTHORS, REFERENCES, ANCESTORS) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/mder/select_for_mder.ncl b/esmvaltool/diag_scripts/mder/select_for_mder.ncl new file mode 100644 index 0000000000..d07cda657f --- /dev/null +++ b/esmvaltool/diag_scripts/mder/select_for_mder.ncl @@ -0,0 +1,255 @@ +; ############################################################################# +; mder/select_for_mder.ncl +; ############################################################################# +; +; Description: +; Collects output from other diagnostics and saves them in a temporary +; file. It requires precalculation by other diagnostic scripts. +; +; Required diag_script_info attributes: +; wdiag: diagnostic names. +; domain: domain of the diagnostic (used for provenance tracking). +; ref_dataset: reference dataset. +; +; Optional diag_script_info attributes: +; average_ens: average over ensemble members of the same model (default: +; False). +; derive_var: derive variable. +; +; Caveats: +; "warning: in unique_labels_min (diag_scripts/shared/plot/style.ncl), Add +; more attributes to prio to make labels unique! Continuing with non-unique +; labels" is normal when using "average_ens = true". +; +; Modification history: +; 20191121-schlund_manuel: added new provenance tracking. +; 20180718-schlund_manuel: ported to v2.0. +; 20131203-wenzel_sabrina: written based on perfmetrics_grading.ncl. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/set_operators.ncl" + +load "$diag_scripts/shared/plot/style.ncl" + +load "$diag_scripts/shared/mder.ncl" + + +begin + + enter_msg(DIAG_SCRIPT, "") + + if (.not. isdefined("input_file_info")) then + error_msg("f", DIAG_SCRIPT, "", "no input_file_info available, " + \ + "variable has to be selected as ancestor in the recipe") + end if + + ; Variable + VAR0 = variable_info[0] + DIM_VAR = ListCount(variable_info) + if (DIM_VAR .gt. 2) then + error_msg("w", DIAG_SCRIPT, "", "this diagnostic supports at most two " + \ + "variables, got " + DIM_VAR) + end if + + ; Input data + INFO0 = select_metadata_by_name(input_file_info, VAR0@short_name) + DATASETS = metadata_att_as_array(INFO0, "dataset") + + ; Get correct variable + if (isatt(diag_script_info, "derive_var")) then + var0 = diag_script_info@derive_var + else + var0 = VAR0@short_name + end if + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; Ensemble averaging (if desired) + avgens = False + if (isatt(diag_script_info, "average_ens")) then + avgens = diag_script_info@average_ens + end if + if (avgens) then + DATASETS := get_unique_values(DATASETS) + INFO0 := get_unique_items(INFO0, "dataset") + else + DATASETS := unique_labels_min(INFO0, (/"dataset", "ensemble"/)) + end if + DIM_DAT = dimsizes(DATASETS) + +end + +begin + + ; Required attributes + req_atts = (/"wdiag", "domain", "ref_dataset"/) + exit_if_missing_atts(diag_script_info, req_atts) + DIM_DIAG = dimsizes(diag_script_info@wdiag) + + ; Provenance + AUTHORS = (/"wenzel_sabrina", "schlund_manuel"/) + DOMAIN = diag_script_info@domain + PLOT_TYPE = "" + PLOT_FILE = "n/a" + REFERENCES = (/"wenzel16jclim"/) + + ; Get reference dataset(s) + ref_dataset = diag_script_info@ref_dataset + +end + +begin + + opt = diag_script_info + + ; Iterate over all diagnostics + do dd = 0, DIM_DIAG - 1 + ANCESTORS := new(DIM_DAT, string) + opt@wdiag := diag_script_info@wdiag(dd) + var0_new = opt@wdiag + + ; Caption and statistics + if (isStrSubset(opt@wdiag, "_t")) then + statistics := (/"trend", "stddev"/) + caption = "Trend in " + var0 + "." + else if(isStrSubset(opt@wdiag, "_c")) then + statistics := (/"mean", "clim", "stddev"/) + caption = "Climatological mean in " + var0 + "." + else + statistics := (/"mean", "stddev"/) + caption = "Mean in " + var0 + "." + end if + end if + + ; Output file + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + ncdf_file = work_dir + var0_new + ".nc" + + ; Read data + do idat = 0, DIM_DAT - 1 + log_info(DATASETS(idat)) + info = INFO0[idat] + + ; Get input directory (using ancestor diagnostic) + input_files := diag_script_info@input_files + var_files = str_match(input_files, "_info.ncl") + do ivar = 0, dimsizes(var_files) - 1 + input_files := input_files(ind(input_files .ne. var_files(ivar))) + end do + input_dir = input_files + if (dimsizes(input_dir) .gt. 1) then + error_msg("f", DIAG_SCRIPT, "", "multiple input directories " + \ + "(ancestors in recipe) given, only one supported") + end if + + ; Check if dataset is available + new_path = input_dir + "/" + diag_file_name(info@filename, opt) + if (fileexists(new_path)) then + log_info("Reading " + new_path) + data_temp = ncdf_read(new_path, var0) + ANCESTORS(idat) = new_path + + ; Define data array in the first iteration + if (.not. isdefined("data")) then + dim_data = array_append_record((/DIM_DAT/), dimsizes(data_temp), 0) + data = new(dim_data, float) + data!0 = "dataset" + data&dataset = DATASETS + end if + rank = dimsizes(dim_data) + + ; Add data + if (rank .eq. 2) then + data(idat, :) = data_temp + else if (rank .eq. 3) then + data(idat, :, :) = data_temp + else if (rank .eq. 4) then + data(idat, :, :, :) = data_temp + else if (rank .eq. 5) then + data(idat, :, :, :, :) = data_temp + else + error_msg("f", DIAG_SCRIPT, "", "rank " + rank + \ + " data not supported") + end if + end if + end if + end if + delete(data_temp) + + ; Skip dataset if no data is available + else + log_info("No data available for dataset: " + DATASETS(idat)) + log_info("Skipping") + end if + end do + + ; Define result variable (always one diagnostic) + val = new((/1, DIM_DAT, 2/), float) + val!0 = "diagnostics" + val!1 = "datasets" + val!2 = "stat" + val&diagnostics = var0_new + val&datasets = DATASETS + val&stat = (/"mean", "stddev"/) + val@ncdf_file = ncdf_file + + if (dimsizes(ref_dataset) .gt. 1) then + val@reference = (/ref_dataset(dd)/) + else + val@reference = (/ref_dataset/) + end if + + ; Loop over all datasets + do idat = 0, DIM_DAT - 1 + + ; Extract data of given dataset + if (rank .eq. 2) then + var = data(idat, 0) + var_std = data(idat, 1) + else if (rank .eq. 3) then + var = data(idat, :, 0) + var_std = data(idat, :, 1) + else if (rank .eq. 4) then + var = data(idat, :, :, 0) + var_std = data(idat, :, :, 1) + else if (rank .eq. 5) then + var = data(idat, :, :, :, 0) + var_std = data(idat, :, :, :, 1) + else + error_msg("f", DIAG_SCRIPT, "", "rank " + rank + " data not supported") + end if + end if + end if + end if + val(0, idat, 0) = var + val(0, idat, 1) = var_std + delete(var) + end do + delete(data) + + ; Attach attributes to the results + val@title = "selected diagnostic" + val@long_name = "Diagnostic for Multiple Diagnostic Ensemble Regression" + val@diag_script = (/DIAG_SCRIPT/) + val@var = "diag" + + ; Write NetCDF output + ncdf_outfile = ncdf_write(val, val@ncdf_file) + delete(val) + + ; Provenance tracking + log_provenance(ncdf_outfile, PLOT_FILE, caption, statistics, DOMAIN, \ + PLOT_TYPE, AUTHORS, REFERENCES, ANCESTORS) + end do + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/miles/basis_functions.R b/esmvaltool/diag_scripts/miles/basis_functions.R index 69c10a7ec7..adab0c0d6a 100644 --- a/esmvaltool/diag_scripts/miles/basis_functions.R +++ b/esmvaltool/diag_scripts/miles/basis_functions.R @@ -10,7 +10,8 @@ library("maps") library("ncdf4") library("PCICt") -# check if fast linear fit is operative (after R 3.1): 3x faster than lm.fit, 36x faster than lm +# check if fast linear fit is operative (after R 3.1): 3x faster +# than lm.fit, 36x faster than lm if (exists(".lm.fit")) { lin.fit <- .lm.fit } else { @@ -18,7 +19,8 @@ if (exists(".lm.fit")) { } # check R version as numeric -R_version <- as.numeric(R.Version()$major) + as.numeric(R.Version()$minor) / 10 +R_version <- + as.numeric(R.Version()$major) + as.numeric(R.Version()$minor) / 10 ########################################################## #-----------------Basic functions------------------------# @@ -26,7 +28,8 @@ R_version <- as.numeric(R.Version()$major) + as.numeric(R.Version()$minor) / 10 # normalize a time series standardize <- function(timeseries) { - out <- (timeseries - mean(timeseries, na.rm = T)) / sd(timeseries, na.rm = T) + out <- + (timeseries - mean(timeseries, na.rm = T)) / sd(timeseries, na.rm = T) return(out) } @@ -41,13 +44,13 @@ whicher <- function(axis, number) { area_weight <- function(ics, ipsilon, root = T) { field <- array(NA, dim = c(length(ics), length(ipsilon))) if (root == T) { - for (j in 1:length(ipsilon)) { + for (j in seq_along(ipsilon)) { field[, j] <- sqrt(cos(pi / 180 * ipsilon[j])) } } if (root == F) { - for (j in 1:length(ipsilon)) { + for (j in seq_along(ipsilon)) { field[, j] <- cos(pi / 180 * ipsilon[j]) } } @@ -95,7 +98,10 @@ sector_details <- function(SECTOR) { lonssel <- left1:right1 } out <- list( - lons = lons, lonssel = lonssel, lats = lats, latssel = latssel, + lons = lons, + lonssel = lonssel, + lats = lats, + latssel = latssel, name = namesec ) return(out) @@ -118,61 +124,79 @@ weighted_cor <- function(x, y, w) { weighted_sd <- function(x, w) { w_mean <- sum(w * x) / sum(w) v1 <- sum(w) - v2 <- sum(w ^ 2) - var <- v1 / (v1 ^ 2 - v2) * sum(w * (x - w_mean) ^ 2) + v2 <- sum(w^2) + var <- v1 / (v1^2 - v2) * sum(w * (x - w_mean)^2) sdd <- sqrt(var) return(sdd) } # info string creator -info_builder <- function(dataset, expid, ens, year1, year2, season) { - - # loop on descriptors that are concatenated to create info string - descriptors <- c(dataset, expid, ens, paste0(year1, "-", year2), season) - info <- NULL - for (dcode in descriptors) { - if (length(dcode) > 0) { - info <- paste(info, dcode) +info_builder <- + function(dataset, expid, ens, year1, year2, season) { + # loop on descriptors that are concatenated to create info string + descriptors <- + c(dataset, expid, ens, paste0(year1, "-", year2), season) + info <- NULL + for (dcode in descriptors) { + if (length(dcode) > 0) { + info <- paste(info, dcode) + } } + return(info) } - return(info) -} -# basic switch to create NetCDF file names and folders (use recursive structure from v0.6) -file_builder <- function(DATADIR, dir_name, file_name, dataset, expid, ens, - year1, year2, season) { - - # loop on descriptors that are concatenated to create dir and file name - descriptors <- c(dataset, expid, ens, paste0(year1, "-", year2), season) - for (dcode in descriptors) { - if (length(dcode) > 0) { - DATADIR <- file.path(DATADIR, dcode) - file_name <- paste0(file_name, "_", dcode) +# basic switch to create NetCDF file names and folders +# (use recursive structure from v0.6) +file_builder <- + function(DATADIR, + dir_name, + file_name, + dataset, + expid, + ens, + year1, + year2, + season) { + # loop on descriptors that are concatenated to create dir and file name + descriptors <- + c(dataset, expid, ens, paste0(year1, "-", year2), season) + for (dcode in descriptors) { + if (length(dcode) > 0) { + DATADIR <- file.path(DATADIR, dcode) + file_name <- paste0(file_name, "_", dcode) + } } - } - # add directory name descriptor - DATADIR <- file.path(DATADIR, dir_name) + # add directory name descriptor + DATADIR <- file.path(DATADIR, dir_name) - # actually dir.exists is in devtools only for R < 3.2, - # then is included in base package - if (exists("dir.exists")) { - if (!dir.exists(DATADIR)) { - dir.create(DATADIR, recursive = T) + # actually dir.exists is in devtools only for R < 3.2, + # then is included in base package + if (exists("dir.exists")) { + if (!dir.exists(DATADIR)) { + dir.create(DATADIR, recursive = T) + } + } else { + dir.create(DATADIR, recursive = T, showWarnings = F) } - } else { - dir.create(DATADIR, recursive = T, showWarnings = F) + return(file.path(DATADIR, paste0(file_name, ".nc"))) } - return(file.path(DATADIR, paste0(file_name, ".nc"))) -} # basic switch to create figures names and folders # (use recursive structure from v0.6) -fig_builder <- function(FIGDIR, dir_name, file_name, dataset, expid, - ens, year1, year2, season, output_file_type) { - +fig_builder <- function(FIGDIR, + dir_name, + file_name, + dataset, + expid, + ens, + year1, + year2, + season, + output_file_type) { # loop on descriptors that are concatenated to create dir and file name - descriptors <- c(dataset, expid, ens, paste0(year1, "-", year2), season) + descriptors <- + c(dataset, expid, ens, paste0(year1, "-", year2), season) for (dcode in descriptors) { if (dcode != "NO") { FIGDIR <- file.path(FIGDIR, dcode) @@ -237,7 +261,8 @@ season2timeseason <- function(season) { } } print(timeseason) - if (length(timeseason) == 0 | min(timeseason) < 0 | max(timeseason) > 13) { + if (length(timeseason) == 0 | + min(timeseason) < 0 | max(timeseason) > 13) { stop("wrong season selected!") } return(timeseason) @@ -245,7 +270,8 @@ season2timeseason <- function(season) { # leap year treu/false function is_leapyear <- function(year) { - return( ( (year %% 4 == 0) & (year %% 100 != 0) ) | (year %% 400 == 0) ) + return(((year %% 4 == 0) & + (year %% 100 != 0)) | (year %% 400 == 0)) } # check number of days for each month @@ -274,7 +300,9 @@ power_date_new <- function(datas) { etime <- list( day = as.numeric(format(datas, "%d")), month = as.numeric(format(datas, "%m")), - year = as.numeric(format(datas, "%Y")), data = datas, season = seas + year = as.numeric(format(datas, "%Y")), + data = datas, + season = seas ) print("Time Array Built") print(paste("Length:", length(seas))) @@ -297,293 +325,343 @@ power_date_new <- function(datas) { # time selection based on package PCICt must be specifed with # both "tmonths" and "tyears" flags # it returns a list including its own dimensions -ncdf_opener_universal <- function(namefile, namevar = NULL, namelon = NULL, - namelat = NULL, tmonths = NULL, - tyears = NULL, rotate = "full", - interp2grid = F, grid = "r144x73", - remap_method = "remapcon2", - exportlonlat = TRUE, verbose = TRUE) { - - # load package - require(ncdf4) - - # verbose-only printing function - printv <- function(value) { - if (verbose) { - print(value) +ncdf_opener_universal <- # nolint + function(namefile, + namevar = NULL, + namelon = NULL, + namelat = NULL, + tmonths = NULL, + tyears = NULL, + rotate = "full", + interp2grid = FALSE, + fillmiss = FALSE, + grid = "r144x73", + remap_method = "remapscon2", + exportlonlat = TRUE, + verbose = TRUE) { + # load package + require(ncdf4) + + # verbose-only printing function + printv <- function(value) { + if (verbose) { + print(value) + } } - } - - # check if timeflag is activated or full file must be loaded - if (is.null(tyears) | is.null(tmonths)) { - timeflag <- FALSE - printv("No time and months specified, loading all the data") - } else { - timeflag <- TRUE - printv("tyears and tmonths are set!") - require(PCICt) - } - - if (rotate == "full") { - rot <- T - move1 <- move2 <- 1 / 2 - } # 180 degrees rotation of longitude - if (rotate == "half") { - rot <- T - move1 <- 1 / 4 - move2 <- 3 / 4 - } # 90 degree rotation (useful for TM90) - if (rotate == "no") { - rot <- F - } # keep as it is - - # interpolation made with CDO: second order conservative remapping - if (interp2grid) { - print(paste("Remapping with CDO on", grid, "grid")) - filename <- basename(normalizePath(namefile)) - filedir <- dirname(normalizePath(namefile)) - cdo <- Sys.which("cdo") - tempfile <- paste0(file.path(filedir, paste0("tempfile_", filename))) - system2(cdo, args = c(paste0(remap_method, ",", grid), namefile, tempfile)) - namefile <- tempfile - } - # define rotate function (faster than with apply) - rotation <- function(line) { - vettore <- line - dims <- length(dim(vettore)) - # for longitudes - if (dims == 1) { - ll <- length(line) - line[(ll * move1):ll] <- vettore[1:(ll * move2 + 1)] - line[1:(ll * move1 - 1)] <- vettore[(ll * move2 + 2):ll] - 360 - } - # for x,y data - if (dims == 2) { - ll <- length(line[, 1]) - line[(ll * move1):ll, ] <- vettore[1:(ll * move2 + 1), ] - line[1:(ll * move1 - 1), ] <- vettore[(ll * move2 + 2):ll, ] - } - # for x,y,t data - if (dims == 3) { - ll <- length(line[, 1, 1]) - line[(ll * move1):ll, , ] <- vettore[1:(ll * move2 + 1), , ] - line[1:(ll * move1 - 1), , ] <- vettore[(ll * move2 + 2):ll, , ] - } - # for x,y,z,t data - if (dims == 4) { - ll <- length(line[, 1, 1, 1]) - line[(ll * move1):ll, , , ] <- vettore[1:(ll * move2 + 1), , , ] - line[1:(ll * move1 - 1), , , ] <- vettore[(ll * move2 + 2):ll, , , ] + # check if timeflag is activated or full file must be loaded + if (is.null(tyears) | is.null(tmonths)) { + timeflag <- FALSE + printv("No time and months specified, loading all the data") + } else { + timeflag <- TRUE + printv("tyears and tmonths are set!") + require(PCICt) } - return(line) - } - - # define flip function ('cos rev/apply is not working) - flipper <- function(field) { - dims <- length(dim(field)) - if (dims == 2) { - ll <- length(field[1, ]) - field <- field[, ll:1] - } # for x,y data - if (dims == 3) { - ll <- length(field[1, , 1]) - field <- field[, ll:1, ] - } # for x,y,t data - if (dims == 4) { - ll <- length(field[1, , 1, 1]) - field <- field[, ll:1, , ] - } # for x,y,z,t data - return(field) - } - # opening file: getting variable (if namevar is given, that variable - # is extracted) - printv(paste("opening file:", namefile)) - a <- nc_open(namefile) - print(paste("Loading", namevar, "...")) - - # if no name provided load the only variable available - if (is.null(namevar)) { - namevar <- names(a$var) - if (length(namevar) > 1) { - print(namevar) - stop(paste("More than one var in the files, please select it with"), - " namevar=yourvar", - sep = "" - ) + if (rotate == "full") { + rot <- T + move1 <- move2 <- 1 / 2 + } # 180 degrees rotation of longitude + if (rotate == "half") { + rot <- T + move1 <- 1 / 4 + move2 <- 3 / 4 + } # 90 degree rotation (useful for TM90) + if (rotate == "no") { + rot <- F + } # keep as it is + + # interpolation made with CDO: second order conservative remapping + if (interp2grid) { + print(paste("Remapping with CDO on", grid, "grid")) + if (is.null(namevar)) { + namefile <- cdo(remap_method, + args = paste0("'", grid, "'"), + input = namefile + ) + } else { + selectf <- cdo("selvar", args = namevar, input = namefile) + gridf <- tempfile() + cdo("griddes", input = grid, stdout = gridf) + namefile <- cdo(remap_method, args = gridf, input = selectf) + unlink(c(selectf, gridf)) + } } - } - - # load axis: updated version, looking for dimension directly - # stored inside the variable - naxis <- unlist(lapply(a$var[[namevar]]$dim, function(x) x["name"])) - for (axis in naxis) { - assign(axis, ncvar_get(a, axis)) - printv(paste(axis, ":", length(get(axis)), "records")) - } - if (timeflag) { - printv("selecting years and months") - - # based on preprocessing of CDO time format: get calendar type and - # use PCICt package for irregular data - units <- ncatt_get(a, "time", "units")$value - caldata <- ncatt_get(a, "time", "calendar")$value - if (grepl("day as", units, fixed = TRUE) | - grepl("days as", units, fixed = TRUE)) { - timeline <- as.PCICt(as.character(time), - format = "%Y%m%d", - cal = caldata - ) - } else if (grepl("day since", units, fixed = TRUE) | - grepl("days since", units, fixed = TRUE)) { - origin <- unlist(strsplit(units, "[a-zA-Z ]+"))[2] - origin.pcict <- as.PCICt(origin, cal = caldata, format = "%Y-%m-%d") - timeline <- origin.pcict + (floor(time) * 86400) - } else { - printv(units) - stop("Time units from NetCDF unsupported. Stopping!!!") + if (fillmiss) { + namefile <- cdo("fillmiss", input = namefile) } - # break if the calendar has not been recognized - if (any(is.na(timeline))) { - stop("Calendar from NetCDF is unsupported or not present. Stopping!!!") + # define rotate function (faster than with apply) + rotation <- function(line) { + vettore <- line + dims <- length(dim(vettore)) + # for longitudes + if (dims == 1) { + ll <- length(line) + line[(ll * move1):ll] <- vettore[1:(ll * move2 + 1)] + line[1:(ll * move1 - 1)] <- vettore[(ll * move2 + 2):ll] - 360 + } + # for x,y data + if (dims == 2) { + ll <- length(line[, 1]) + line[(ll * move1):ll, ] <- vettore[1:(ll * move2 + 1), ] + line[1:(ll * move1 - 1), ] <- vettore[(ll * move2 + 2):ll, ] + } + # for x,y,t data + if (dims == 3) { + ll <- length(line[, 1, 1]) + line[(ll * move1):ll, , ] <- vettore[1:(ll * move2 + 1), , ] + line[1:(ll * move1 - 1), , ] <- + vettore[(ll * move2 + 2):ll, , ] + } + # for x,y,z,t data + if (dims == 4) { + ll <- length(line[, 1, 1, 1]) + line[(ll * move1):ll, , , ] <- + vettore[1:(ll * move2 + 1), , , ] + line[1:(ll * move1 - 1), , , ] <- + vettore[(ll * move2 + 2):ll, , , ] + } + return(line) } - # break if the data requested is not there - lastday_base <- paste0(max(tyears), "-", max(tmonths), "-28") - # uses number.days.month, which loops to get the month change - lastday <- as.PCICt(paste0( - max(tyears), "-", max(tmonths), "-", - number_days_month(lastday_base) - ), cal = caldata, format = "%Y-%m-%d") - firstday <- as.PCICt(paste0(min(tyears), "-", min(tmonths), "-01"), - cal = caldata, format = "%Y-%m-%d" - ) + # define flip function ('cos rev/apply is not working) + flipper <- function(field) { + dims <- length(dim(field)) + if (dims == 2) { + ll <- length(field[1, ]) + field <- field[, ll:1] + } # for x,y data + if (dims == 3) { + ll <- length(field[1, , 1]) + field <- field[, ll:1, ] + } # for x,y,t data + if (dims == 4) { + ll <- length(field[1, , 1, 1]) + field <- field[, ll:1, , ] + } # for x,y,z,t data + return(field) + } - if (max(timeline) < lastday | min(timeline) > firstday) { - print(firstday) - print(lastday) - print(min(timeline)) - print(max(timeline)) - stop("You requested a time interval that is not present in the NetCDF") + # opening file: getting variable (if namevar is given, that variable + # is extracted) + printv(paste("opening file:", namefile)) + a <- nc_open(namefile) + print(paste("Loading", namevar, "...")) + + # if no name provided load the only variable available + if (is.null(namevar)) { + namevar <- names(a$var) + if (length(namevar) > 1) { + print(namevar) + stop( + paste("More than one var in the files, please select it with"), + " namevar=yourvar", + sep = "" + ) + } } - } - # time selection and variable loading - printv("loading full field...") - field <- ncvar_get(a, namevar) + # load axis: updated version, looking for dimension directly + # stored inside the variable + naxis <- + unlist(lapply(a$var[[namevar]]$dim, function(x) + x["name"])) + for (axis in naxis) { + assign(axis, ncvar_get(a, axis)) + printv(paste(axis, ":", length(get(axis)), "records")) + } - if (timeflag) { + if (timeflag) { + printv("selecting years and months") + + # based on preprocessing of CDO time format: get calendar type and + # use PCICt package for irregular data + units <- ncatt_get(a, "time", "units")$value + caldata <- ncatt_get(a, "time", "calendar")$value + if (grepl("day as", units, fixed = TRUE) | + grepl("days as", units, fixed = TRUE)) { + timeline <- as.PCICt(as.character(time), + format = "%Y%m%d", + cal = caldata + ) + } else if (grepl("day since", units, fixed = TRUE) | + grepl("days since", units, fixed = TRUE)) { + origin <- unlist(strsplit(units, "[a-zA-Z ]+"))[2] + origin.pcict <- + as.PCICt(origin, cal = caldata, format = "%Y-%m-%d") + timeline <- origin.pcict + (floor(time) * 86400) + } else { + printv(units) + stop("Time units from NetCDF unsupported. Stopping!!!") + } - # select data we need - select <- which(as.numeric(format(timeline, "%Y")) %in% - tyears & as.numeric(format(timeline, "%m")) %in% tmonths) - field <- field[, , select] - time <- timeline[select] + # break if the calendar has not been recognized + if (any(is.na(timeline))) { + stop("Calendar from NetCDF is unsupported or not present. Stopping!!!") + } - printv(paste("This is a", caldata, "calendar")) - printv(paste( - length(time), "days selected from", time[1], "to", - time[length(time)] - )) + # break if the data requested is not there + lastday_base <- paste0(max(tyears), "-", max(tmonths), "-28") + # uses number.days.month, which loops to get the month change + lastday <- as.PCICt( + paste0( + max(tyears), + "-", + max(tmonths), + "-", + number_days_month(lastday_base) + ), + cal = caldata, + format = "%Y-%m-%d" + ) + firstday <- + as.PCICt(paste0(min(tyears), "-", min(tmonths), "-01"), + cal = caldata, + format = "%Y-%m-%d" + ) + + if (max(timeline) < lastday | min(timeline) > firstday) { + print(firstday) + print(lastday) + print(min(timeline)) + print(max(timeline)) + stop("You requested a time interval that is not present in the NetCDF") + } + } - printv(paste("Months that have been loaded are.. ")) - printv(unique(format(time, "%Y-%m"))) - } + # time selection and variable loading + printv("loading full field...") + field <- ncvar_get(a, namevar) + + if (timeflag) { + # select data we need + select <- which(as.numeric(format(timeline, "%Y")) %in% + tyears & as.numeric(format(timeline, "%m")) %in% tmonths) + field <- field[, , select] + time <- timeline[select] + + printv(paste("This is a", caldata, "calendar")) + printv(paste( + length(time), "days selected from", time[1], "to", + time[length(time)] + )) + + printv(paste("Months that have been loaded are.. ")) + printv(unique(format(time, "%Y-%m"))) + } - # check for dimensions (presence or not of time dimension) - dimensions <- length(dim(field)) - - # if dimensions are multiple, get longitude, latitude - # if needed, rotate and flip the array - xlist <- c("lon", "Lon", "longitude", "Longitude") - ylist <- c("lat", "Lat", "latitude", "Latitude") - if (dimensions > 1) { - # assign ics and ipsilon - if (is.null(namelon)) { - if (any(xlist %in% naxis)) { - ics <- get(naxis[naxis %in% xlist], a$dim)$vals + # check for dimensions (presence or not of time dimension) + dimensions <- length(dim(field)) + + # if dimensions are multiple, get longitude, latitude + # if needed, rotate and flip the array + xlist <- c("lon", "Lon", "longitude", "Longitude") + ylist <- c("lat", "Lat", "latitude", "Latitude") + if (dimensions > 1) { + # assign ics and ipsilon + if (is.null(namelon)) { + if (any(xlist %in% naxis)) { + ics <- get(naxis[naxis %in% xlist], a$dim)$vals + } else { + printv("WARNING: No lon found") + ics <- NA + } } else { - printv("WARNING: No lon found") - ics <- NA + ics <- ncvar_get(a, namelon) } - } else { - ics <- ncvar_get(a, namelon) - } - if (is.null(namelat)) { - if (any(ylist %in% naxis)) { - ipsilon <- get(naxis[naxis %in% ylist], a$dim)$vals + if (is.null(namelat)) { + if (any(ylist %in% naxis)) { + ipsilon <- get(naxis[naxis %in% ylist], a$dim)$vals + } else { + printv("WARNING: No lat found") + ipsilon <- NA + } } else { - printv("WARNING: No lat found") - ipsilon <- NA + ipsilon <- ncvar_get(a, namelat) } - } else { - ipsilon <- ncvar_get(a, namelat) - } - # longitute rotation around Greenwich - if (rot) { - printv("rotating...") - ics <- rotation(ics) - field <- rotation(field) - } - if (ipsilon[2] < ipsilon[1] & length(ipsilon) > 1) { - if (length(ics) > 1) { - print("flipping...") - ipsilon <- sort(ipsilon) - field <- flipper(field) + # longitute rotation around Greenwich + if (rot) { + printv("rotating...") + ics <- rotation(ics) + field <- rotation(field) + } + if (ipsilon[2] < ipsilon[1] & length(ipsilon) > 1) { + if (length(ics) > 1) { + print("flipping...") + ipsilon <- sort(ipsilon) + field <- flipper(field) + } } - } - # exporting variables to the main program - if (exportlonlat) { - assign("ics", ics, envir = .GlobalEnv) - assign("ipsilon", ipsilon, envir = .GlobalEnv) - } - # if ics and ipsilon exists, assign the rearranged values - if (!is.na(ics[1])) { - assign(naxis[naxis %in% c(xlist, namelon)], ics) + # exporting variables to the main program + if (exportlonlat) { + assign("ics", ics, envir = .GlobalEnv) + assign("ipsilon", ipsilon, envir = .GlobalEnv) + } + # if ics and ipsilon exists, assign the rearranged values + if (!is.na(ics[1])) { + assign(naxis[naxis %in% c(xlist, namelon)], ics) + } + if (!is.na(ipsilon[1])) { + assign(naxis[naxis %in% c(ylist, namelat)], ipsilon) + } } - if (!is.na(ipsilon[1])) { - assign(naxis[naxis %in% c(ylist, namelat)], ipsilon) + + if (dimensions > 4) { + stop("This file is more than 4D file") } - } - if (dimensions > 4) { - stop("This file is more than 4D file") - } + # close connection + nc_close(a) - # close connection - nc_close(a) + # remove interpolated file + if (interp2grid) { + system2("rm", tempfile) + } - # remove interpolated file - if (interp2grid) { - system2("rm", tempfile) - } + # showing array properties + printv(paste(dim(field))) + if (timeflag) { + printv(paste("From", time[1], "to", time[length(time)])) + } - # showing array properties - printv(paste(dim(field))) - if (timeflag) { - printv(paste("From", time[1], "to", time[length(time)])) + # returning file list + return(mget(c("field", naxis))) } - # returning file list - return(mget(c("field", naxis))) -} - # ncdf_opener is a simplified wrapper for ncdf_opener_universal which returns # only the field, ignoring the list and no verbosity -ncdf_opener <- function(namefile, namevar = NULL, namelon = NULL, - namelat = NULL, tmonths = NULL, tyears = NULL, - rotate = "full", interp2grid = F, grid = "r144x73", - remap_method = "remapcon2", - exportlonlat = TRUE, verbose = FALSE) { +ncdf_opener <- function(namefile, + namevar = NULL, + namelon = NULL, + namelat = NULL, + tmonths = NULL, + tyears = NULL, + rotate = "full", + interp2grid = FALSE, + fillmiss = FALSE, + grid = "r144x73", + remap_method = "remapscon2", + exportlonlat = TRUE, + verbose = FALSE) { field <- ncdf_opener_universal( - namefile, namevar, namelon, namelat, - tmonths, tyears, rotate, interp2grid, - grid, remap_method, exportlonlat, verbose + namefile, + namevar, + namelon, + namelat, + tmonths, + tyears, + rotate, + interp2grid, + fillmiss, + grid, + remap_method, + exportlonlat, + verbose ) return(field$field) } @@ -593,59 +671,89 @@ ncdf_opener <- function(namefile, namevar = NULL, namelon = NULL, ########################################################## # function to open devices -open_plot_device <- function(figname, output_file_type, special = FALSE) { - # Choose output format for figure - output_file_type <- tolower(output_file_type) - if (special == FALSE) { - if (output_file_type == "png") { - png(filename = figname, width = png_width, height = png_height) - } else if (output_file_type == "pdf") { - pdf(file = figname, width = pdf_width, height = pdf_height, onefile = T) - } else if ( (output_file_type == "eps") | - (output_file_type == "epsi") | - (output_file_type == "ps") ) { - setEPS( - width = pdf_width, height = pdf_height, onefile = T, - paper = "special" - ) - postscript(figname) - } - } else { - if (output_file_type == "png") { - png( - filename = figname, width = png_width / af, - height = png_height * af / 2 - ) - } else if (output_file_type == "pdf") { - pdf( - file = figname, width = pdf_width / af, - height = pdf_height * af / 2, onefile = T - ) - } else if ( (output_file_type == "eps") | - (output_file_type == "epsi") | - (output_file_type == "ps") ) { - setEPS( - width = pdf_width / af, - height = pdf_height * af / 2, onefile = T, paper = "special" - ) - postscript(figname) +open_plot_device <- + function(figname, output_file_type, special = FALSE) { + # Choose output format for figure + output_file_type <- tolower(output_file_type) + if (special == FALSE) { + if (output_file_type == "png") { + png( + filename = figname, + width = png_width, + height = png_height + ) + } else if (output_file_type == "pdf") { + pdf( + file = figname, + width = pdf_width, + height = pdf_height, + onefile = T + ) + } else if ((output_file_type == "eps") | + (output_file_type == "epsi") | + (output_file_type == "ps")) { + setEPS( + width = pdf_width, + height = pdf_height, + onefile = T, + paper = "special" + ) + postscript(figname) + } + } else { + if (output_file_type == "png") { + png( + filename = figname, + width = png_width / af, + height = png_height * af / 2 + ) + } else if (output_file_type == "pdf") { + pdf( + file = figname, + width = pdf_width / af, + height = pdf_height * af / 2, + onefile = T + ) + } else if ((output_file_type == "eps") | + (output_file_type == "epsi") | + (output_file_type == "ps")) { + setEPS( + width = pdf_width / af, + height = pdf_height * af / 2, + onefile = T, + paper = "special" + ) + postscript(figname) + } } } -} # extensive filled_contour function -filled_contour3 <- +filled_contour3 <- # nolint function(x = seq(0, 1, length.out = nrow(z)), - y = seq(0, 1, length.out = ncol(z)), z, + y = seq(0, 1, length.out = ncol(z)), + z, xlim = range(x, finite = TRUE), - ylim = range(y, finite = TRUE), zlim = range(z, finite = TRUE), - levels = pretty(zlim, nlevels), nlevels = 20, + ylim = range(y, finite = TRUE), + zlim = range(z, finite = TRUE), + levels = pretty(zlim, nlevels), + nlevels = 20, color.palette = cm.colors, - col = color.palette(length(levels) - 1), extend = TRUE, - plot.title, plot.axes, key.title, key.axes, asp = NA, - xaxs = "i", yaxs = "i", las = 1, - axes = TRUE, frame.plot = axes, mar, ...) { + col = color.palette(length(levels) - 1), + extend = TRUE, + plot.title, + plot.axes, + key.title, + key.axes, + asp = NA, + xaxs = "i", + yaxs = "i", + las = 1, + axes = TRUE, + frame.plot = axes, + mar, + ...) { # modification by Ian Taylor of the filled_contour function # to remove the key and facilitate overplotting with contour() # further modified by Carey McGilliard and Bridget Ferris @@ -682,7 +790,13 @@ filled_contour3 <- } plot.new() - plot.window(xlim, ylim, "", xaxs = xaxs, yaxs = yaxs, asp = asp) + plot.window(xlim, + ylim, + "", + xaxs = xaxs, + yaxs = yaxs, + asp = asp + ) if (!is.matrix(z) || nrow(z) <= 1 || ncol(z) <= 1) { stop("no proper 'z' matrix specified") } @@ -694,7 +808,11 @@ filled_contour3 <- ) if (missing(plot.axes)) { if (axes) { - title(main = "", xlab = "", ylab = "") + title( + main = "", + xlab = "", + ylab = "" + ) Axis(x, side = 1, ...) Axis(y, side = 2, ...) } @@ -713,11 +831,17 @@ filled_contour3 <- invisible() } -image_scale3 <- function(z, levels, color.palette = heat.colors, - colorbar.label = "image.scale", extend = T, - line.label = 2, line.colorbar = 0, cex.label = 1, - cex.colorbar = 1, colorbar.width = 1, ...) { - +image_scale3 <- function(z, + levels, + color.palette = heat.colors, + colorbar.label = "image.scale", + extend = T, + line.label = 2, + line.colorbar = 0, + cex.label = 1, + cex.colorbar = 1, + colorbar.width = 1, + ...) { # save properties from main plotting region old.par <- par(no.readonly = TRUE) mfg.save <- par()$mfg @@ -730,7 +854,8 @@ image_scale3 <- function(z, levels, color.palette = heat.colors, lp <- line.colorbar / 100 new.fig <- c( old.fig[2] - 0.07 * xscal * lw - lp, - old.fig[2] - 0.03 * xscal - lp, old.fig[3] + 0.1 * yscal, + old.fig[2] - 0.03 * xscal - lp, + old.fig[3] + 0.1 * yscal, old.fig[4] - 0.1 * yscal ) @@ -741,7 +866,11 @@ image_scale3 <- function(z, levels, color.palette = heat.colors, col <- color.palette(length(levels) - 1) # starting plot - par(mar = c(1, 1, 1, 1), fig = new.fig, new = TRUE) + par( + mar = c(1, 1, 1, 1), + fig = new.fig, + new = TRUE + ) # creating polygons for legend poly <- vector(mode = "list", length(col)) @@ -757,34 +886,58 @@ image_scale3 <- function(z, levels, color.palette = heat.colors, } else { ylim <- range(levels) } - plot(1, 1, - t = "n", ylim = ylim, xlim = xlim, axes = FALSE, - xlab = "", ylab = "", xaxs = "i", yaxs = "i", ... + plot( + 1, + 1, + t = "n", + ylim = ylim, + xlim = xlim, + axes = FALSE, + xlab = "", + ylab = "", + xaxs = "i", + yaxs = "i", + ... ) for (i in seq(poly)) { polygon(c(0, 0, 1, 1), poly[[i]], col = col[i], border = NA) } if (extend) { - polygon(c(0, 1, 1 / 2), c(levels[1], levels[1], levels[1] - dl), - col = col[1], border = NA + polygon(c(0, 1, 1 / 2), + c(levels[1], levels[1], levels[1] - dl), + col = col[1], + border = NA + ) + polygon(c(0, 1, 1 / 2), + c( + levels[length(levels)], levels[length(levels)], + levels[length(levels)] + dl + ), + col = col[length(col)], + border = NA + ) + polygon( + c(0, 0, 1 / 2, 1, 1, 1 / 2), + c( + levels[1], levels[length(levels)], + levels[length(levels)] + dl, levels[length(levels)], levels[1], + levels[1] - dl + ), + border = "black", + lwd = 2 ) - polygon(c(0, 1, 1 / 2), c( - levels[length(levels)], levels[length(levels)], - levels[length(levels)] + dl ), - col = col[length(col)], border = NA) - polygon(c(0, 0, 1 / 2, 1, 1, 1 / 2), c( - levels[1], levels[length(levels)], - levels[length(levels)] + dl, levels[length(levels)], levels[1], - levels[1] - dl - ), border = "black", lwd = 2) ylim0 <- range(levels) prettyspecial <- pretty(ylim0) prettyspecial <- prettyspecial[prettyspecial <= max(ylim0) & prettyspecial >= min(ylim0)] - axis(4, - las = 1, cex.axis = cex.colorbar, at = prettyspecial, - labels = prettyspecial, ... + axis( + 4, + las = 1, + cex.axis = cex.colorbar, + at = prettyspecial, + labels = prettyspecial, + ... ) } else { box() @@ -792,7 +945,12 @@ image_scale3 <- function(z, levels, color.palette = heat.colors, } # box, axis and leged - mtext(colorbar.label, line = line.label, side = 4, cex = cex.label, ...) + mtext(colorbar.label, + line = line.label, + side = 4, + cex = cex.label, + ... + ) # resetting properties for starting a new plot (mfrow style) par(old.par) @@ -802,73 +960,110 @@ image_scale3 <- function(z, levels, color.palette = heat.colors, # function for interpolation and projection of a 2D field on a # mapproj R projection -proj_plot <- function(lon, lat, field, lmin = NULL, proj = "azequalarea", - param = NULL, orient = c(90, 0, 0), npoints = 201) { +proj_plot <- + function(lon, + lat, + field, + lmin = NULL, + proj = "azequalarea", + param = NULL, + orient = c(90, 0, 0), + npoints = 201) { + # default is azimuthal equal area map + + # required packages + require(mapproj) + require(akima) + + # it provides lower latitude limit for plots + if (is.null(lmin)) { + lmin <- min(lat) + } - # default is azimuthal equal area map + # build grids + lon.grid <- rep(lon, length(lat)) + lat.grid <- sort(rep(ipsilon, length(lon))) + + # project grid + proj.grid <- mapproject( + lon.grid, + lat.grid, + projection = proj, + parameters = param, + orientation = orient + ) - # required packages - require(mapproj) - require(akima) + # provide limits for future plots (for polar projection) + limiter <- mapproject(c(0, 90, 180, 270), + rep(lmin, 4), + proj = "", + orientation = orient + ) + xlims <- sort(c(limiter$x[2], limiter$x[4])) + ylims <- sort(c(limiter$y[1], limiter$y[3])) + + # plot grid + lon.plot <- + seq(min(proj.grid$x, na.rm = T), + max(proj.grid$x, na.rm = T), + length.out = npoints + ) + lat.plot <- + seq(min(proj.grid$y, na.rm = T), + max(proj.grid$y, na.rm = T), + length.out = npoints + ) - # it provides lower latitude limit for plots - if (is.null(lmin)) { - lmin <- min(lat) + # interpolation (akima needed) + good <- + is.finite(field) & is.finite(proj.grid$x) & is.finite(proj.grid$y) + projected <- + interp(proj.grid$x[good], + proj.grid$y[good], + field[good], + lon.plot, + lat.plot, + duplicate = "strip" + ) + return(projected = list( + x = projected$x, + y = projected$y, + z = projected$z, + xlim = xlims, + ylim = ylims + )) } - # build grids - lon.grid <- rep(lon, length(lat)) - lat.grid <- sort(rep(ipsilon, length(lon))) - - # project grid - proj.grid <- mapproject(lon.grid, lat.grid, - projection = proj, - parameters = param, orientation = orient - ) - - # provide limits for future plots (for polar projection) - limiter <- mapproject(c(0, 90, 180, 270), rep(lmin, 4), - proj = "", orientation = orient - ) - xlims <- sort(c(limiter$x[2], limiter$x[4])) - ylims <- sort(c(limiter$y[1], limiter$y[3])) - - # plot grid - lon.plot <- seq(min(proj.grid$x, na.rm = T), max(proj.grid$x, na.rm = T), - length.out = npoints - ) - lat.plot <- seq(min(proj.grid$y, na.rm = T), max(proj.grid$y, na.rm = T), - length.out = npoints - ) - - # interpolation (akima needed) - good <- is.finite(field) & is.finite(proj.grid$x) & is.finite(proj.grid$y) - projected <- interp(proj.grid$x[good], proj.grid$y[good], field[good], - lon.plot, lat.plot, - duplicate = "strip" - ) - return(projected = list( - x = projected$x, y = projected$y, - z = projected$z, xlim = xlims, ylim = ylims - )) -} - # addland function based on map which can handle projections -proj_addland <- function(proj = "no", orient = c(90, 0, 0), - param = NULL, color = "black") { - +proj_addland <- function(proj = "no", + orient = c(90, 0, 0), + param = NULL, + color = "black") { # required packages require(maps) require(mapproj) if (proj == "no") { - map("world", regions = ".", interior = F, exact = F, boundary = T, add = T) + map( + "world", + regions = ".", + interior = F, + exact = F, + boundary = T, + add = T + ) } else { # get map, project and do the lines box() - map("world", - add = T, projection = proj, orientation = orient, - parameter = param, interior = F, exact = F, boundary = T + map( + "world", + add = T, + projection = proj, + orientation = orient, + parameter = param, + interior = F, + exact = F, + boundary = T ) # default lines for northern hemisphere @@ -893,18 +1088,36 @@ proj_addland <- function(proj = "no", orient = c(90, 0, 0), plot_prepare <- function(ics, ipsilon, field, proj, lat_lim) { if (proj == "no") { outfile <- list( - x = ics, y = ipsilon, z = field, xlim = range(ics), - ylim = lat_lim, xlab = "Longitude", ylab = "Latitude", axes = T + x = ics, + y = ipsilon, + z = field, + xlim = range(ics), + ylim = lat_lim, + xlab = "Longitude", + ylab = "Latitude", + axes = T ) } else { field[is.na(field)] <- 0 - p <- proj_plot(ics, ipsilon, field, - lmin = lat_lim[1], proj = proj, - param = NULL, orient = c(90, 0, 0), npoints = 80 + p <- proj_plot( + ics, + ipsilon, + field, + lmin = lat_lim[1], + proj = proj, + param = NULL, + orient = c(90, 0, 0), + npoints = 80 ) outfile <- list( - x = p$x, y = p$y, z = p$z, xlim = p$xlim, - ylim = p$ylim, xlab = "", ylab = "", axes = F + x = p$x, + y = p$y, + z = p$z, + xlim = p$xlim, + ylim = p$ylim, + xlab = "", + ylab = "", + axes = F ) } return(outfile) @@ -912,7 +1125,6 @@ plot_prepare <- function(ics, ipsilon, field, proj, lat_lim) { # function that provides labels and names for Blocking Plots field_details <- function(field) { - # default value legend_distance <- 3 lev_hist <- NULL @@ -942,7 +1154,8 @@ field_details <- function(field) { lev_field <- seq(0, 36, 3) lev_diff <- seq(-10.5, 10.5, 1) legend_unit <- "Blocked Days (%)" - title_name <- "Instantaneous Blocking frequency (GHGS2 condition):" + title_name <- + "Instantaneous Blocking frequency (GHGS2 condition):" } if (field == "BlockEvents") { @@ -1028,9 +1241,13 @@ field_details <- function(field) { out <- list( - color_field = color_field, color_diff = color_diff, - lev_field = lev_field, lev_diff = lev_diff, lev_hist = lev_hist, - legend_unit = legend_unit, legend_distance = legend_distance, + color_field = color_field, + color_diff = color_diff, + lev_field = lev_field, + lev_diff = lev_diff, + lev_hist = lev_hist, + legend_unit = legend_unit, + legend_distance = legend_distance, title_name = title_name ) return(out) @@ -1050,58 +1267,71 @@ time_persistence <- function(timeseries, persistence = 5) { # blocking 5 days tracking -blocking_persistence <- function(field, minduration = 5, time.array) { - - # function for persistence - pers2 <- function(timeseries, persistence, time.array) { - dd <- min(time.array$season):max(time.array$season) - nn <- sapply(dd, function(x) { - time_persistence(timeseries[which(time.array$season == x)], persistence) - }) - xx <- c(unlist(nn)) - return(xx) - } +blocking_persistence <- + function(field, minduration = 5, time.array) { + # function for persistence + pers2 <- function(timeseries, persistence, time.array) { + dd <- min(time.array$season):max(time.array$season) + nn <- sapply(dd, function(x) { + time_persistence( + timeseries[which(time.array$season == x)], + persistence + ) + }) + xx <- c(unlist(nn)) + return(xx) + } - # check for etime - if (length(time.array$month) != length(field[1, 1, ])) { - stop("Wrong time array! Exiting...") - } + # check for etime + if (length(time.array$month) != length(field[1, 1, ])) { + stop("Wrong time array! Exiting...") + } - print("Time filtering...") - newfield <- apply(field, c(1, 2), function(x) pers2(x, - persistence = minduration, time.array + print("Time filtering...") + newfield <- apply(field, c(1, 2), function(x) + pers2(x, + persistence = minduration, time.array + )) + newfield <- aperm(newfield, c(2, 3, 1)) + print("Mean field...") + meanfield <- apply(newfield, c(1, 2), mean, na.rm = T) * 100 + + + print("Events detection...") + maxdim <- max(apply( + newfield, c(1, 2), + function(x) + length(rle(x)$length[which(rle(x)$values == 1)]) )) - newfield <- aperm(newfield, c(2, 3, 1)) - print("Mean field...") - meanfield <- apply(newfield, c(1, 2), mean, na.rm = T) * 100 - - - print("Events detection...") - maxdim <- max(apply( - newfield, c(1, 2), - function(x) length(rle(x)$length[which(rle(x)$values == 1)]) - )) - events <- apply( - newfield, c(1, 2), - function(x) c( - rle(x)$lengths[which(rle(x)$values == 1)], - rep(NA, maxdim - length(rle(x)$length[which(rle(x)$values == 1)])) - ) - ) - events <- aperm(events, c(2, 3, 1)) - print("Mean Duration...") - duration <- apply(events, c(1, 2), mean, na.rm = T) - print("Number of Events...") - nevents <- apply(events, c(1, 2), function(x) length(x[!is.na(x)])) - - out <- list( - track = newfield, percentage = meanfield, duration = duration, - events = events, nevents = nevents - ) - print(quantile(meanfield)) - print(min(duration, na.rm = T)) - return(out) -} + events <- apply( + newfield, c(1, 2), + function(x) + c( + rle(x)$lengths[which(rle(x)$values == 1)], + rep(NA, maxdim - length(rle( + x + )$length[which(rle(x)$values == 1)])) + ) + ) + events <- aperm(events, c(2, 3, 1)) + print("Mean Duration...") + duration <- apply(events, c(1, 2), mean, na.rm = T) + print("Number of Events...") + nevents <- + apply(events, c(1, 2), function(x) + length(x[!is.na(x)])) + + out <- list( + track = newfield, + percentage = meanfield, + duration = duration, + events = events, + nevents = nevents + ) + print(quantile(meanfield)) + print(min(duration, na.rm = T)) + return(out) + } # large scale extension with further implementation @@ -1113,21 +1343,27 @@ largescale_extension_if <- function(ics, ipsilon, field) { xreso <- ics[2] - ics[1] passo <- 5 / xreso # horizontal movemenent vertical <- 2.5 / yreso # vertical movement - time <- which(apply(field, 3, max) != 0) # elements length of the dataset + time <- + which(apply(field, 3, max) != 0) # elements length of the dataset # (removing not blocked days) print(paste( - "Box dimension:", passo * 2 * xreso, "° lon x ", - vertical * 2 * yreso, "° lat" + "Box dimension:", + passo * 2 * xreso, + "° lon x ", + vertical * 2 * yreso, + "° lat" )) short <- function(ics, ipsilon, field, passo, vertical) { control <- field - range <- which.min(abs(ipsilon - fimin)):which.min(abs(ipsilon - fimax)) + range <- + which.min(abs(ipsilon - fimin)):which.min(abs(ipsilon - fimax)) # check range for latitude excursion # reduce range considering border effect - new <- rbind(field, field, field) # bind domain for cross-date line - for (i in 1:length(ics)) { + new <- + rbind(field, field, field) # bind domain for cross-date line + for (i in seq_along(ics)) { ii <- i + length(ics) # check to speed up if (!all(new[(ii - passo):(ii + passo), ] == 0)) { @@ -1147,7 +1383,8 @@ largescale_extension_if <- function(ics, ipsilon, field) { tt <- length(time) for (t in time) { progression_bar(t, tt) - field[, , t] <- short(ics, ipsilon, field[, , t], passo, vertical) + field[, , t] <- + short(ics, ipsilon, field[, , t], passo, vertical) } return(field) } @@ -1169,10 +1406,10 @@ longitude_filter <- function(ics, ipsilon, field) { progression_bar(t, tt) new <- rbind(field[, , t], field[, , t], field[, , t]) - for (j in startipsilon:( (startipsilon + estension))) { + for (j in startipsilon:((startipsilon + estension))) { new[, j] <- time_persistence(new[, j], persistence = passo) } - field[, , t] <- new[length(ics) + (1:length(ics)), ] + field[, , t] <- new[length(ics) + (seq_along(ics)), ] } return(field) } @@ -1182,147 +1419,177 @@ longitude_filter <- function(ics, ipsilon, field) { #------------EOFs and regims functions-------------------# ########################################################## -eofs <- function(lon, lat, field, neof = 4, xlim, ylim, method = "SVD", - do_standardize = F, do_regression = F) { - # R tool for computing EOFs based on Singular Value Decomposition - # ("SVD", default) - # or with the eigenvectors of the covariance matrix ("covariance", slower) - # If requested, computes linear regressions and standardizes the PCs - # If you want to use the regressions, remember to standardize the PCs - # Take as input a 3D anomaly field. - # Requires "personal" functions area_weight, whicher and standardize - - # area weighting, based on the root of cosine - print("Area Weighting...") - ww <- area_weight(lon, lat, root = T) - wwfield <- sweep(field, c(1, 2), ww, "*") - - # selection of the box - box <- wwfield[ - whicher(lon, xlim[1]):whicher(lon, xlim[2]), - whicher(lat, ylim[1]):whicher(lat, ylim[2]), - ] - slon <- lon[whicher(lon, xlim[1]):whicher(lon, xlim[2])] - slat <- lat[whicher(lat, ylim[1]):whicher(lat, ylim[2])] - - # transform 3D field in a matrix - new_box <- array(box, dim = c(dim(box)[1] * dim(box)[2], dim(box)[3])) - - # calling SVD - if (method == "SVD") { - print("Calling SVD...") - SVD <- svd(new_box, nu = neof, nv = neof) - - # extracting EOFs (loading pattern), - # expansions coefficient and variance explained - pattern <- array(SVD$u, dim = c(dim(box)[1], dim(box)[2], neof)) - coefficient <- SVD$v - variance <- (SVD$d[1:neof]) ^ 2 / sum( (SVD$d) ^ 2) - if (do_standardize) { - coefficient <- apply(coefficient, c(2), standardize) - } else { - coefficient <- sweep(coefficient, c(2), sqrt(variance), "*") +eofs <- + function(lon, + lat, + field, + neof = 4, + xlim, + ylim, + method = "SVD", + do_standardize = F, + do_regression = F) { + # R tool for computing EOFs based on Singular Value Decomposition + # ("SVD", default) + # or with the eigenvectors of the covariance matrix ("covariance", slower) + # If requested, computes linear regressions and standardizes the PCs + # If you want to use the regressions, remember to standardize the PCs + # Take as input a 3D anomaly field. + # Requires "personal" functions area_weight, whicher and standardize + + # area weighting, based on the root of cosine + print("Area Weighting...") + ww <- area_weight(lon, lat, root = T) + wwfield <- sweep(field, c(1, 2), ww, "*") + + # selection of the box + box <- wwfield[ + whicher(lon, xlim[1]):whicher(lon, xlim[2]), + whicher(lat, ylim[1]):whicher(lat, ylim[2]), + ] + slon <- lon[whicher(lon, xlim[1]):whicher(lon, xlim[2])] + slat <- lat[whicher(lat, ylim[1]):whicher(lat, ylim[2])] + + # transform 3D field in a matrix + new_box <- + array(box, dim = c(dim(box)[1] * dim(box)[2], dim(box)[3])) + + # calling SVD + if (method == "SVD") { + print("Calling SVD...") + SVD <- svd(new_box, nu = neof, nv = neof) + + # extracting EOFs (loading pattern), + # expansions coefficient and variance explained + pattern <- array(SVD$u, dim = c(dim(box)[1], dim(box)[2], neof)) + coefficient <- SVD$v + variance <- (SVD$d[1:neof])^2 / sum((SVD$d)^2) + if (do_standardize) { + coefficient <- apply(coefficient, c(2), standardize) + } else { + coefficient <- sweep(coefficient, c(2), sqrt(variance), "*") + } } - } - # calling covariance matrix - if (method == "covariance") { - print("Calling eigenvectors of the covariance matrix...") - covma <- cov(t(new_box)) - eig <- eigen(covma) - coef <- (t(new_box) %*% eig$vector)[, 1:neof] - pattern <- array(eig$vectors, dim = c( - dim(box)[1], dim(box)[2], - dim(box)[3] - ))[, , 1:neof] - variance <- eig$values[1:neof] / sum(eig$values) - if (do_standardize) { - coefficient <- apply(coef, c(2), standardize) - } else { - coefficient <- coef + # calling covariance matrix + if (method == "covariance") { + print("Calling eigenvectors of the covariance matrix...") + covma <- cov(t(new_box)) + eig <- eigen(covma) + coef <- (t(new_box) %*% eig$vector)[, 1:neof] + pattern <- array(eig$vectors, dim = c( + dim(box)[1], dim(box)[2], + dim(box)[3] + ))[, , 1:neof] + variance <- eig$values[1:neof] / sum(eig$values) + if (do_standardize) { + coefficient <- apply(coef, c(2), standardize) + } else { + coefficient <- coef + } } - } - # linear regressions on anomalies - regression <- NULL - if (do_regression) { - print("Linear Regressions (it can takes a while)... ") - regression <- array(NA, dim = c(length(lon), length(lat), neof)) - # for (i in 1:neof) {regression[,,i]=apply(field,c(1,2), - # function(x) coef(lm(x ~ coefficient[,i]))[2])} - for (i in 1:neof) { - regression[, , i] <- apply( - field, c(1, 2), - function(x) lin.fit(as.matrix(coefficient[, i], - ncol = 1 - ), x)$coefficients - ) + # linear regressions on anomalies + regression <- NULL + if (do_regression) { + print("Linear Regressions (it can takes a while)... ") + regression <- array(NA, dim = c(length(lon), length(lat), neof)) + for (i in 1:neof) { + regression[, , i] <- apply( + field, c(1, 2), + function(x) + lin.fit(as.matrix(coefficient[, i], + ncol = 1 + ), x)$coefficients + ) + } } - } - - # preparing output - print("Finalize...") - pattern <- list(x = slon, y = slat, z = pattern) - out <- list( - pattern = pattern, coeff = coefficient, - variance = variance, regression = regression - ) - return(out) -} -eofs_coeff <- function(lon, lat, field, eof_object, do_standardize = F) { - # Computes expansion coefficient (i.e. PCs) of a given dataset on the - # loading pattern of EOF previously computed - # Works only on eof_object obtained with "eofs" function - - # Area weighting, based on the root of cosine - print("Area Weighting...") - ww <- area_weight(lon, lat, root = T) - wwfield <- sweep(field, c(1, 2), ww, "*") - - # selection of the box - xlim <- c(min(eof_object$pattern$x), max(eof_object$pattern$x)) - ylim <- c(min(eof_object$pattern$y), max(eof_object$pattern$y)) - box <- wwfield[ - whicher(lon, xlim[1]):whicher(lon, xlim[2]), - whicher(lat, ylim[1]):whicher(lat, ylim[2]), - ] - - # transform 3D field in a matrix - new_box <- array(box, dim = c(dim(box)[1] * dim(box)[2], dim(box)[3])) - new_pattern <- array(eof_object$pattern$z, - dim = c( - dim(eof_object$pattern$z)[1] * dim(eof_object$pattern$z)[2], - dim(eof_object$pattern$z)[3] + # preparing output + print("Finalize...") + pattern <- list(x = slon, y = slat, z = pattern) + out <- list( + pattern = pattern, + coeff = coefficient, + variance = variance, + regression = regression + ) + return(out) + } + +eofs_coeff <- + function(lon, + lat, + field, + eof_object, + do_standardize = F) { + # Computes expansion coefficient (i.e. PCs) of a given dataset on the + # loading pattern of EOF previously computed + # Works only on eof_object obtained with "eofs" function + + # Area weighting, based on the root of cosine + print("Area Weighting...") + ww <- area_weight(lon, lat, root = T) + wwfield <- sweep(field, c(1, 2), ww, "*") + + # selection of the box + xlim <- c(min(eof_object$pattern$x), max(eof_object$pattern$x)) + ylim <- c(min(eof_object$pattern$y), max(eof_object$pattern$y)) + box <- wwfield[ + whicher(lon, xlim[1]):whicher(lon, xlim[2]), + whicher(lat, ylim[1]):whicher(lat, ylim[2]), + ] + + # transform 3D field in a matrix + new_box <- + array(box, dim = c(dim(box)[1] * dim(box)[2], dim(box)[3])) + new_pattern <- array(eof_object$pattern$z, + dim = c( + dim(eof_object$pattern$z)[1] * dim(eof_object$pattern$z)[2], + dim(eof_object$pattern$z)[3] + ) ) - ) - # projects the coefficients - coef <- (t(new_box) %*% new_pattern) + # projects the coefficients + coef <- (t(new_box) %*% new_pattern) - # standardize - if (do_standardize) { - coefficient <- apply(coef, c(2), standardize) - } else { - coefficient <- coef - } + # standardize + if (do_standardize) { + coefficient <- apply(coef, c(2), standardize) + } else { + coefficient <- coef + } - print("Finalize...") - return(coefficient) -} + print("Finalize...") + return(coefficient) + } -regimes <- function(lon, lat, field, ncluster = 4, ntime = 1000, - neof = 10, xlim, ylim, alg = "Hartigan-Wong") { +regimes <- function(lon, + lat, + field, + ncluster = 4, + ntime = 1000, + neof = 10, + xlim, + ylim, + alg = "Hartigan-Wong") { # R tool to compute cluster analysis based on k-means. # Requires "personal" function eofs # Take as input a 3D anomaly field # Reduce the phase space with EOFs: use SVD and do not standardize PCs print("Launching EOFs...") - reducedspace <- eofs(lon, lat, field, - neof = neof, xlim = xlim, ylim = ylim, - method = "SVD", do_regression = F, do_standardize = F + reducedspace <- eofs( + lon, + lat, + field, + neof = neof, + xlim = xlim, + ylim = ylim, + method = "SVD", + do_regression = F, + do_standardize = F ) # extract the principal components @@ -1332,9 +1599,12 @@ regimes <- function(lon, lat, field, ncluster = 4, ntime = 1000, # k-means computation repeat for ntime to find best solution. print("Computing k-means...") print(str(ncluster)) - regimes <- kmeans(PC, as.numeric(ncluster), + regimes <- kmeans( + PC, + as.numeric(ncluster), nstart = ntime, - iter.max = 1000, algorithm = alg + iter.max = 1000, + algorithm = alg ) # Extract regimes frequencyr and timeseries of occupation @@ -1343,7 +1613,8 @@ regimes <- function(lon, lat, field, ncluster = 4, ntime = 1000, print(frequencies[order(frequencies, decreasing = T)]) print("Creating Composites...") - compose <- aperm(apply(field, c(1, 2), by, cluster, mean), c(2, 3, 1)) + compose <- + aperm(apply(field, c(1, 2), by, cluster, mean), c(2, 3, 1)) # sorting from the more frequent to the less frequent kk <- order(frequencies, decreasing = T) @@ -1355,68 +1626,95 @@ regimes <- function(lon, lat, field, ncluster = 4, ntime = 1000, # prepare output print("Finalize...") out <- list( - cluster = cluster, frequencies = frequencies[kk], - regimes = compose[, , kk], tot.withinss = regimes$tot.withinss + cluster = cluster, + frequencies = frequencies[kk], + regimes = compose[, , kk], + tot.withinss = regimes$tot.withinss ) return(out) } -regimes2 <- function(lon, lat, field, ncluster = 4, ntime = 1000, minvar = 0.8, - xlim, ylim, alg = "Hartigan-Wong") { - - # R tool to compute cluster analysis based on k-means. - # Requires "personal" function eofs (see above) - # Take as input a 3D anomaly field - - # Reduce the phase space with EOFs: use SVD and do not standardize PCs - print("Launching EOFs...") - reducedspace <- eofs(lon, lat, field, - neof = 20, xlim = xlim, ylim = ylim, - method = "SVD", do_regression = F, do_standardize = F - ) - reqpc <- which(cumsum(reducedspace$variance) > minvar)[1] - print(paste( - "Retaining", reqpc, - "PCs to fullfil minimum explained variance required (", minvar * 100, "%)" - )) +regimes2 <- + function(lon, + lat, + field, + ncluster = 4, + ntime = 1000, + minvar = 0.8, + xlim, + ylim, + alg = "Hartigan-Wong") { + # R tool to compute cluster analysis based on k-means. + # Requires "personal" function eofs (see above) + # Take as input a 3D anomaly field + + # Reduce the phase space with EOFs: use SVD and do not standardize PCs + print("Launching EOFs...") + reducedspace <- eofs( + lon, + lat, + field, + neof = 20, + xlim = xlim, + ylim = ylim, + method = "SVD", + do_regression = F, + do_standardize = F + ) + reqpc <- which(cumsum(reducedspace$variance) > minvar)[1] + print( + paste( + "Retaining", + reqpc, + "PCs to fullfil minimum explained variance required (", + minvar * 100, + "%)" + ) + ) - # extract the principal components - PC <- reducedspace$coeff[, 1:reqpc] - print(str(PC)) + # extract the principal components + PC <- reducedspace$coeff[, 1:reqpc] + print(str(PC)) + + # k-means computation repeat for ntime to find best solution. + print("Computing k-means...") + print(str(ncluster)) + regimes <- kmeans( + PC, + as.numeric(ncluster), + nstart = ntime, + iter.max = 100, + algorithm = alg + ) - # k-means computation repeat for ntime to find best solution. - print("Computing k-means...") - print(str(ncluster)) - regimes <- kmeans(PC, as.numeric(ncluster), - nstart = ntime, - iter.max = 100, algorithm = alg - ) + # Extract regimes frequencyr and timeseries of occupation + cluster <- regimes$cluster + frequencies <- regimes$size / dim(field)[3] * 100 + print(frequencies[order(frequencies, decreasing = T)]) - # Extract regimes frequencyr and timeseries of occupation - cluster <- regimes$cluster - frequencies <- regimes$size / dim(field)[3] * 100 - print(frequencies[order(frequencies, decreasing = T)]) + print("Creating Composites...") + compose <- + aperm(apply(field, c(1, 2), by, cluster, mean), c(2, 3, 1)) - print("Creating Composites...") - compose <- aperm(apply(field, c(1, 2), by, cluster, mean), c(2, 3, 1)) + # sorting from the more frequent to the less frequent + kk <- order(frequencies, decreasing = T) + cluster <- cluster + 10 + for (ss in 1:ncluster) { + cluster[cluster == (ss + 10)] <- which(kk == ss) + } - # sorting from the more frequent to the less frequent - kk <- order(frequencies, decreasing = T) - cluster <- cluster + 10 - for (ss in 1:ncluster) { - cluster[cluster == (ss + 10)] <- which(kk == ss) + # prepare output + print("Finalize...") + out <- list( + cluster = cluster, + frequencies = frequencies[kk], + regimes = compose[, , kk], + tot.withinss = regimes$tot.withinss + ) + return(out) } - # prepare output - print("Finalize...") - out <- list( - cluster = cluster, frequencies = frequencies[kk], - regimes = compose[, , kk], tot.withinss = regimes$tot.withinss - ) - return(out) -} - ########################################################## #-------------------Time Avg functions-------------------# ########################################################## @@ -1456,7 +1754,8 @@ run_mean <- function(field, n = 5) { run_mean5 <- function(field) { newfield <- rowMeans(cbind( c(field[3:length(field)], NA, NA), - c(field[2:length(field)], NA), field, + c(field[2:length(field)], NA), + field, c(NA, field[1:(length(field) - 1)]), c(NA, NA, field[1:(length(field) - 2)]) ), diff --git a/esmvaltool/diag_scripts/miles/block_fast.R b/esmvaltool/diag_scripts/miles/block_fast.R index 6cb78e134c..6dbc589a26 100644 --- a/esmvaltool/diag_scripts/miles/block_fast.R +++ b/esmvaltool/diag_scripts/miles/block_fast.R @@ -2,419 +2,510 @@ #-----Blocking routines computation for MiLES--------# #-------------P. Davini (Oct 2014)-------------------# ###################################################### -miles_block_fast <- function(dataset, expid, ens, year1, year2, season, - z500filename, FILESDIR, doforce) { - t0 <- proc.time() - - # setting up time domain - years <- year1:year2 - timeseason <- season2timeseason(season) - - # define folders using file.builder function (takes care of ensembles) - savefile1 <- file_builder( - FILESDIR, "Block", "BlockClim", dataset, - expid, ens, year1, year2, season - ) - savefile2 <- file_builder( - FILESDIR, "Block", "BlockFull", dataset, - expid, ens, year1, year2, season - ) - - # check if data is already there to avoid re-run - if (file.exists(savefile1) & file.exists(savefile2)) { - print("Actually requested blocking data is already there!") - print(savefile1) - print(savefile2) - if (doforce == TRUE) { - print("Running with doforce=true... re-run!") - } else { - print("Skipping... activate doforce=true if you want to re-run it") - q() - } - } +miles_block_fast <- # nolint + function(dataset, + expid, + ens, + year1, + year2, + season, + z500filename, + FILESDIR, + doforce) { + t0 <- proc.time() + + # setting up time domain + years <- year1:year2 + timeseason <- season2timeseason(season) + + # define folders using file.builder function (takes care of ensembles) + savefile1 <- file_builder( + FILESDIR, + "Block", + "BlockClim", + dataset, + expid, + ens, + year1, + year2, + season + ) + savefile2 <- file_builder( + FILESDIR, + "Block", + "BlockFull", + dataset, + expid, + ens, + year1, + year2, + season + ) - # new file opening - nomefile <- z500filename - fieldlist <- ncdf_opener_universal(nomefile, - namevar = "zg", - tmonths = timeseason, tyears = years, - rotate = "full" - ) - print(str(fieldlist)) - - # extract calendar and time unit from the original file - tcal <- attributes(fieldlist$time)$cal - tunit <- attributes(fieldlist$time)$units - - # time array to simplify time filtering - etime <- power_date_new(fieldlist$time) - totdays <- length(fieldlist$time) - - # declare variable - z500 <- fieldlist$field - - # grid resolution - yreso <- ipsilon[2] - ipsilon[1] - xreso <- ics[2] - ics[1] - - # reso checks: this are not needed with default 2.5 grid, - # but they may be relevant with - # future envisaged power up to finer grids - # xcritical factor is due to RWB longitudinal jump of 7.5 - # ycritical factor is due to Large Scale Extension of 2.5 - xcritical <- 2.5 - ycritical <- 2.5 - if (ycritical %% yreso != 0) { - stop("Latitudinal resolution is not a factor of 5 deg") - } + # check if data is already there to avoid re-run + if (file.exists(savefile1) & file.exists(savefile2)) { + print("Actually requested blocking data is already there!") + print(savefile1) + print(savefile2) + if (doforce == TRUE) { + print("Running with doforce=true... re-run!") + } else { + print("Skipping... activate doforce=true if you want to re-run it") + q() + } + } - if (xcritical %% xreso != 0) { - stop("Longitudinal resolution is not a factor of 5 deg") - } + # new file opening + nomefile <- z500filename + fieldlist <- ncdf_opener_universal( + nomefile, + namevar = "zg", + tmonths = timeseason, + tyears = years, + rotate = "full", + fillmiss = TRUE + ) + print(str(fieldlist)) + + # extract calendar and time unit from the original file + tcal <- attributes(fieldlist$time)$cal + tunit <- attributes(fieldlist$time)$units + + # time array to simplify time filtering + etime <- power_date_new(fieldlist$time) + totdays <- length(fieldlist$time) + + # declare variable + z500 <- fieldlist$field + + # grid resolution + yreso <- ipsilon[2] - ipsilon[1] + xreso <- ics[2] - ics[1] + + # reso checks: this are not needed with default 2.5 grid, + # but they may be relevant with + # future envisaged power up to finer grids + # xcritical factor is due to RWB longitudinal jump of 7.5 + # ycritical factor is due to Large Scale Extension of 2.5 + xcritical <- 2.5 + ycritical <- 2.5 + if (ycritical %% yreso != 0) { + stop("Latitudinal resolution is not a factor of 5 deg") + } - ########################################################## - #--------------Tibaldi and Molteni 1990------------------# - ########################################################## - - print("Tibaldi and Molteni (1990) index...") - # TM90: parametres for blocking detection - tm90_fi0 <- 60 # central_lat - tm90_fin <- tm90_fi0 + 20 - tm90_fis <- tm90_fi0 - 20 # south and north lat, 80N and 40N - tm90_central <- whicher(ipsilon, tm90_fi0) - tm90_south <- whicher(ipsilon, tm90_fis) - tm90_north <- whicher(ipsilon, tm90_fin) - tm90_range <- seq(-5, 5, yreso) / yreso # 5 degrees to the north, - # 5 to the south (larger than TM90 or D'Andrea et al 1998) - - # TM90: beta version, the amazing power of R vectorization! - # 6 lines to get the climatology - tm90_ghgn <- (z500[, tm90_north + tm90_range, ] - - z500[, tm90_central + tm90_range, ]) / (tm90_fin - tm90_fi0) - tm90_ghgs <- (z500[, tm90_central + tm90_range, ] - - z500[, tm90_south + tm90_range, ]) / (tm90_fi0 - tm90_fis) - tm90_check <- (tm90_ghgs > 0 & tm90_ghgn < (-10)) # TM90 conditions - tm90_check[tm90_check == T] <- 1 - tm90_check[tm90_check == F] <- 0 - tottm90 <- apply(tm90_check, c(1, 3), max, na.rm = T) - tm90 <- apply(tottm90, 1, mean) * 100 - print("Done!") - - ########################################################## - #--------------Davini et al. 2012------------------------# - ########################################################## - - # decleare main variables to be computed (considerable speed up!) - totrwb <- totmeridional <- totbi <- z500 * NA - totblocked <- totblocked2 <- z500 * 0 - - # Davini et al. 2012: parameters to be set for blocking detection - fi0 <- 30 # lowest latitude to be analyzed - jump <- 15 # distance on which compute gradients - step0 <- jump / yreso # number of grid points to be used - central <- which.min(abs(ipsilon - fi0)) # lowest starting latitude - north <- central + step0 # lowest north latitude - south <- central - step0 # lowest sourth latitude - maxsouth <- central - 2 * step0 - fin <- ipsilon[north] - fis <- ipsilon[south] - range <- (90 - fi0 - jump) / yreso # escursion to the north for - # computing blocking (from 30 up to 75) - - print("--------------------------------------------------") - print("Davini et al. (2012) index and diagnostics...") - print(c("distance for gradients:", step0 * diff(ics)[1])) - print(paste("range of latitudes ", fi0, "-", 90 - step0 * diff(ics)[1], - " N", - sep = "" - )) - - ########################################################## - #--------------Istantaneous Blocking---------------------# - ########################################################## - - #----COMPUTING BLOCKING INDICES----- - for (t in 1:totdays) { - progression_bar(t, totdays) - - # multidim extension - new_field <- rbind(z500[, , t], z500[, , t], z500[, , t]) - - # computing blocking for different latitudes - for (delta in 0:range) { - ghgn <- (z500[, north + delta, t] - - z500[, central + delta, t]) / (fin - fi0) - ghgs <- (z500[, central + delta, t] - - z500[, south + delta, t]) / (fi0 - fis) - gh2gs <- (z500[, south + delta, t] - - z500[, maxsouth + delta, t]) / (fi0 - fis) - check1 <- which(ghgs > 0 & ghgn < (-10)) - check2 <- which(ghgs > 0 & ghgn < (-10) & gh2gs < (-5)) - # supplementary condition - - if (length(check2) > 0) { - totblocked2[check2, central + delta, t] <- 1 - } + if (xcritical %% xreso != 0) { + stop("Longitudinal resolution is not a factor of 5 deg") + } - if (length(check1) > 0) { - # 1-MATRIX FOR INSTANTANEOUS BLOCKING - totblocked[check1, central + delta, t] <- 1 - - - # 2-PART ON COMPUTATION OF ROSSBY WAVEBREAKING - r <- check1 + length(ics) - rwb_jump <- jump / 2 - steprwb <- rwb_jump / xreso - rwb_west <- new_field[(r - steprwb), south + delta + steprwb] - rwb_east <- new_field[(r + steprwb), south + delta + steprwb] - fullgh <- (rwb_west - rwb_east) - - totrwb[check1[fullgh < 0], central + delta, t] <- (-10) - # gradient decreasing: cyclonic RWB - totrwb[check1[fullgh > 0], central + delta, t] <- 10 - # gradient increasing: anticyclonic RWB - - # 4-part about adapted version of blocking intensity - # by Wiedenmann et al. (2002) - step <- 60 / xreso - ii <- check1 + length(ics) - zu <- zd <- NULL - for (ll in ii) { - zu <- c(zu, min(new_field[(ll - step):ll, central + delta])) - zd <- c(zd, min(new_field[ll:(ll + step), central + delta])) + ########################################################## + #--------------Tibaldi and Molteni 1990------------------# + ########################################################## + + print("Tibaldi and Molteni (1990) index...") + # TM90: parametres for blocking detection + tm90_fi0 <- 60 # central_lat + tm90_fin <- tm90_fi0 + 20 + tm90_fis <- tm90_fi0 - 20 # south and north lat, 80N and 40N + tm90_central <- whicher(ipsilon, tm90_fi0) + tm90_south <- whicher(ipsilon, tm90_fis) + tm90_north <- whicher(ipsilon, tm90_fin) + tm90_range <- seq(-5, 5, yreso) / yreso # 5 degrees to the north, + # 5 to the south (larger than TM90 or D'Andrea et al 1998) + + # TM90: beta version, the amazing power of R vectorization! + # 6 lines to get the climatology + tm90_ghgn <- (z500[, tm90_north + tm90_range, ] - + z500[, tm90_central + tm90_range, ]) / (tm90_fin - tm90_fi0) + tm90_ghgs <- (z500[, tm90_central + tm90_range, ] - + z500[, tm90_south + tm90_range, ]) / (tm90_fi0 - tm90_fis) + tm90_check <- + (tm90_ghgs > 0 & tm90_ghgn < (-10)) # TM90 conditions + tm90_check[tm90_check == T] <- 1 + tm90_check[tm90_check == F] <- 0 + tottm90 <- apply(tm90_check, c(1, 3), max, na.rm = T) + tm90 <- apply(tottm90, 1, mean) * 100 + print("Done!") + + ########################################################## + #--------------Davini et al. 2012------------------------# + ########################################################## + + # decleare main variables to be computed (considerable speed up!) + totrwb <- totmeridional <- totbi <- z500 * NA + totblocked <- totblocked2 <- z500 * 0 + + # Davini et al. 2012: parameters to be set for blocking detection + fi0 <- 30 # lowest latitude to be analyzed + jump <- 15 # distance on which compute gradients + step0 <- jump / yreso # number of grid points to be used + central <- + which.min(abs(ipsilon - fi0)) # lowest starting latitude + north <- central + step0 # lowest north latitude + south <- central - step0 # lowest sourth latitude + maxsouth <- central - 2 * step0 + if (maxsouth <= 0) { + stop("Latitude=0 has to be included in the domain.") + } + fin <- ipsilon[north] + fis <- ipsilon[south] + range <- (90 - fi0 - jump) / yreso # escursion to the north for + # computing blocking (from 30 up to 75) + + print("--------------------------------------------------") + print("Davini et al. (2012) index and diagnostics...") + print(c("distance for gradients:", step0 * diff(ics)[1])) + print(paste("range of latitudes ", fi0, "-", 90 - step0 * diff(ics)[1], + " N", + sep = "" + )) + + ########################################################## + #--------------Istantaneous Blocking---------------------# + ########################################################## + + #----COMPUTING BLOCKING INDICES----- + for (t in 1:totdays) { + progression_bar(t, totdays) + + # multidim extension + new_field <- rbind(z500[, , t], z500[, , t], z500[, , t]) + + # computing blocking for different latitudes + for (delta in 0:range) { + ghgn <- (z500[, north + delta, t] - + z500[, central + delta, t]) / (fin - fi0) + ghgs <- (z500[, central + delta, t] - + z500[, south + delta, t]) / (fi0 - fis) + gh2gs <- (z500[, south + delta, t] - + z500[, maxsouth + delta, t]) / (fi0 - fis) + check1 <- which(ghgs > 0 & ghgn < (-10)) + check2 <- which(ghgs > 0 & ghgn < (-10) & gh2gs < (-5)) + # supplementary condition + + if (length(check2) > 0) { + totblocked2[check2, central + delta, t] <- 1 } - mz <- z500[check1, central + delta, t] - rc <- 0.5 * ( (zu + mz) / 2 + (zd + mz) / 2) - totbi[check1, central + delta, t] <- 100 * (mz / rc - 1) - # 5 - part about meridional gradient index - totmeridional[check1, central + delta, t] <- ghgs[check1] + if (length(check1) > 0) { + # 1-MATRIX FOR INSTANTANEOUS BLOCKING + totblocked[check1, central + delta, t] <- 1 + + + # 2-PART ON COMPUTATION OF ROSSBY WAVEBREAKING + r <- check1 + length(ics) + rwb_jump <- jump / 2 + steprwb <- rwb_jump / xreso + rwb_west <- + new_field[(r - steprwb), south + delta + steprwb] + rwb_east <- + new_field[(r + steprwb), south + delta + steprwb] + fullgh <- (rwb_west - rwb_east) + + totrwb[check1[fullgh < 0], central + delta, t] <- (-10) + # gradient decreasing: cyclonic RWB + totrwb[check1[fullgh > 0], central + delta, t] <- 10 + # gradient increasing: anticyclonic RWB + + # 4-part about adapted version of blocking intensity + # by Wiedenmann et al. (2002) + step <- 60 / xreso + ii <- check1 + length(ics) + zu <- zd <- NULL + for (ll in ii) { + zu <- c(zu, min(new_field[(ll - step):ll, central + delta])) + zd <- + c(zd, min(new_field[ll:(ll + step), central + delta])) + } + mz <- z500[check1, central + delta, t] + rc <- 0.5 * ((zu + mz) / 2 + (zd + mz) / 2) + totbi[check1, central + delta, t] <- 100 * (mz / rc - 1) + + # 5 - part about meridional gradient index + totmeridional[check1, central + delta, t] <- ghgs[check1] + } } } - } - print(paste("Total # of days:", t)) - print("-------------------------") - - ########################################################## - #--------------------Mean Values-------------------------# - ########################################################## - - # compute mean values (use rowMeans, faster when there are no NA values) - frequency <- rowMeans(totblocked, dims = 2) * 100 - # frequency of Instantaneous Blocking days - frequency2 <- rowMeans(totblocked2, dims = 2) * 100 - # frequency of Instantaneous Blocking days with GHGS2 - z500mean <- rowMeans(z500, dims = 2) # Z500 mean value - bi <- apply(totbi, c(1, 2), mean, na.rm = T) - # Blocking Intensity Index as Wiedenmann et al. (2002) - mgi <- apply(totmeridional, c(1, 2), mean, na.rm = T) - # Value of meridional gradient inversion - - # anticyclonic and cyclonic averages RWB - cn <- apply(totrwb, c(1, 2), function(x) sum(x[x == (-10)], na.rm = T)) / - (totdays) * (-10) - acn <- apply(totrwb, c(1, 2), function(x) sum(x[x == (10)], na.rm = T)) / - (totdays) * (10) - - t1 <- proc.time() - t0 - print(t1) - - print("Instantaneous blocking and diagnostics done!") - - ########################################################## - #--------------------Time filtering----------------------# - ########################################################## - - # spatial filtering on fixed longitude distance - spatial <- longitude_filter(ics, ipsilon, totblocked) - - # large scale extension on 10x5 box - large <- largescale_extension_if(ics, ipsilon, spatial) - - # 5-day persistence filter - block <- blocking_persistence(large, minduration = 5, time.array = etime) - - # 10-day persistence for extreme long block - longblock <- blocking_persistence(large, - minduration = 10, - time.array = etime - ) - - tf <- proc.time() - t1 - print(tf) - - - ########################################################## - #------------------------Save to NetCDF------------------# - ########################################################## - - # saving output to netcdf files - print("saving NetCDF climatologies...") - - # which fieds to plot/save - fieldlist <- c( - "TM90", "InstBlock", "ExtraBlock", "Z500", "MGI", "BI", - "CN", "ACN", "BlockEvents", "LongBlockEvents", - "DurationEvents", "NumberEvents" - ) - full_fieldlist <- c( - "TM90", "InstBlock", "ExtraBlock", "Z500", "MGI", "BI", - "CN", "ACN", "BlockEvents", "LongBlockEvents" - ) - - # dimensions definition - fulltime <- as.numeric(etime$data) - as.numeric(etime$data)[1] - TIME <- paste(tunit, " since ", year1, "-", timeseason[1], - "-01 00:00:00", - sep = "" - ) - LEVEL <- 50000 - x <- ncdim_def("lon", "degrees_east", ics, longname = "longitude") - y <- ncdim_def("lat", "degrees_north", ipsilon, longname = "latitude") - z <- ncdim_def("plev", "Pa", LEVEL, longname = "pressure") - t1 <- ncdim_def("time", TIME, 0, - unlim = T, calendar = tcal, - longname = "time" - ) - t2 <- ncdim_def("time", TIME, fulltime, - unlim = T, calendar = tcal, - longname = "time" - ) - - for (var in fieldlist) { - # name of the var - if (var == "TM90") { - longvar <- "Tibaldi-Molteni 1990 Instantaneous Blocking frequency" - unit <- "%" - field <- tm90 - full_field <- tottm90 - } - if (var == "InstBlock") { - longvar <- "Instantaneous Blocking frequency" - unit <- "%" - field <- frequency - full_field <- totblocked - } - if (var == "ExtraBlock") { - longvar <- "Instantaneous Blocking frequency (GHGS2)" - unit <- "%" - field <- frequency2 - full_field <- totblocked2 - } - if (var == "Z500") { - longvar <- "Geopotential Height" - unit <- "m" - field <- z500mean - full_field <- z500 - } - if (var == "BI") { - longvar <- "BI index" - unit <- "" - field <- bi - full_field <- totbi - } - if (var == "MGI") { - longvar <- "MGI index" - unit <- "" - field <- mgi - full_field <- totmeridional - } - if (var == "ACN") { - longvar <- "Anticyclonic RWB frequency" - unit <- "%" - field <- acn - full_field <- totrwb / 10 - full_field[full_field == (-1)] <- NA - } - if (var == "CN") { - longvar <- "Cyclonic RWB frequency" - unit <- "%" - field <- cn - full_field <- totrwb / 10 - full_field[full_field == (1)] <- NA - } - if (var == "BlockEvents") { - longvar <- "Blocking Events frequency" - unit <- "%" - field <- block$percentage - full_field <- block$track - } - if (var == "LongBlockEvents") { - longvar <- "10-day Blocking Events frequency" - unit <- "%" - field <- longblock$percentage - full_field <- longblock$track - } - if (var == "DurationEvents") { - longvar <- "Blocking Events duration" - unit <- "days" - field <- block$duration - } - if (var == "NumberEvents") { - longvar <- "Blocking Events number" - unit <- "" - field <- block$nevents - } + print(paste("Total # of days:", t)) + print("-------------------------") + + ########################################################## + #--------------------Mean Values-------------------------# + ########################################################## + + # compute mean values (use rowMeans, faster when there are no NA values) + frequency <- rowMeans(totblocked, dims = 2) * 100 + # frequency of Instantaneous Blocking days + frequency2 <- rowMeans(totblocked2, dims = 2) * 100 + # frequency of Instantaneous Blocking days with GHGS2 + z500mean <- rowMeans(z500, dims = 2) # Z500 mean value + bi <- apply(totbi, c(1, 2), mean, na.rm = T) + # Blocking Intensity Index as Wiedenmann et al. (2002) + mgi <- apply(totmeridional, c(1, 2), mean, na.rm = T) + # Value of meridional gradient inversion + + # anticyclonic and cyclonic averages RWB + cn <- + apply(totrwb, c(1, 2), function(x) + sum(x[x == (-10)], na.rm = T)) / + (totdays) * (-10) + acn <- + apply(totrwb, c(1, 2), function(x) + sum(x[x == (10)], na.rm = T)) / + (totdays) * (10) + + t1 <- proc.time() - t0 + print(t1) + + print("Instantaneous blocking and diagnostics done!") + + ########################################################## + #--------------------Time filtering----------------------# + ########################################################## + + # spatial filtering on fixed longitude distance + spatial <- longitude_filter(ics, ipsilon, totblocked) + + # large scale extension on 10x5 box + large <- largescale_extension_if(ics, ipsilon, spatial) + + # 5-day persistence filter + block <- + blocking_persistence(large, minduration = 5, time.array = etime) + + # 10-day persistence for extreme long block + longblock <- blocking_persistence(large, + minduration = 10, + time.array = etime + ) - # fix eventual NaN - field[is.nan(field)] <- NA + tf <- proc.time() - t1 + print(tf) + + + ########################################################## + #------------------------Save to NetCDF------------------# + ########################################################## + + # saving output to netcdf files + print("saving NetCDF climatologies...") + + # which fieds to plot/save + fieldlist <- c( + "TM90", + "InstBlock", + "ExtraBlock", + "Z500", + "MGI", + "BI", + "CN", + "ACN", + "BlockEvents", + "LongBlockEvents", + "DurationEvents", + "NumberEvents" + ) + full_fieldlist <- c( + "TM90", + "InstBlock", + "ExtraBlock", + "Z500", + "MGI", + "BI", + "CN", + "ACN", + "BlockEvents", + "LongBlockEvents" + ) - # variable definitions - if (var == "TM90") { - var_ncdf <- ncvar_def(var, unit, list(x, t = t1), -999, - longname = longvar, prec = "single", compression = 1 - ) - full_var_ncdf <- ncvar_def(var, unit, list(x, t = t2), -999, - longname = longvar, prec = "single", compression = 1 - ) - } else { - var_ncdf <- ncvar_def(var, unit, list(x, y, z, t = t1), -999, - longname = longvar, prec = "single", compression = 1 - ) - full_var_ncdf <- ncvar_def(var, unit, list(x, y, z, t = t2), -999, - longname = longvar, prec = "single", compression = 1 - ) + # dimensions definition + fulltime <- as.numeric(etime$data) - as.numeric(etime$data)[1] + TIME <- paste(tunit, " since ", year1, "-", timeseason[1], + "-01 00:00:00", + sep = "" + ) + LEVEL <- 50000 + x <- ncdim_def("lon", "degrees_east", ics, longname = "longitude") + y <- + ncdim_def("lat", "degrees_north", ipsilon, longname = "latitude") + z <- ncdim_def("plev", "Pa", LEVEL, longname = "pressure") + t1 <- ncdim_def( + "time", + TIME, + 0, + unlim = T, + calendar = tcal, + longname = "time" + ) + t2 <- ncdim_def( + "time", + TIME, + fulltime, + unlim = T, + calendar = tcal, + longname = "time" + ) + + for (var in fieldlist) { + # name of the var + if (var == "TM90") { + longvar <- "Tibaldi-Molteni 1990 Instantaneous Blocking frequency" + unit <- "%" + field <- tm90 + full_field <- tottm90 + } + if (var == "InstBlock") { + longvar <- "Instantaneous Blocking frequency" + unit <- "%" + field <- frequency + full_field <- totblocked + } + if (var == "ExtraBlock") { + longvar <- "Instantaneous Blocking frequency (GHGS2)" + unit <- "%" + field <- frequency2 + full_field <- totblocked2 + } + if (var == "Z500") { + longvar <- "Geopotential Height" + unit <- "m" + field <- z500mean + full_field <- z500 + } + if (var == "BI") { + longvar <- "BI index" + unit <- "" + field <- bi + full_field <- totbi + } + if (var == "MGI") { + longvar <- "MGI index" + unit <- "" + field <- mgi + full_field <- totmeridional + } + if (var == "ACN") { + longvar <- "Anticyclonic RWB frequency" + unit <- "%" + field <- acn + full_field <- totrwb / 10 + full_field[full_field == (-1)] <- NA + } + if (var == "CN") { + longvar <- "Cyclonic RWB frequency" + unit <- "%" + field <- cn + full_field <- totrwb / 10 + full_field[full_field == (1)] <- NA + } + if (var == "BlockEvents") { + longvar <- "Blocking Events frequency" + unit <- "%" + field <- block$percentage + full_field <- block$track + } + if (var == "LongBlockEvents") { + longvar <- "10-day Blocking Events frequency" + unit <- "%" + field <- longblock$percentage + full_field <- longblock$track + } + if (var == "DurationEvents") { + longvar <- "Blocking Events duration" + unit <- "days" + field <- block$duration + } + if (var == "NumberEvents") { + longvar <- "Blocking Events number" + unit <- "" + field <- block$nevents + } + + # fix eventual NaN + field[is.nan(field)] <- NA + + # variable definitions + if (var == "TM90") { + var_ncdf <- ncvar_def( + var, + unit, + list(x, t = t1), + -999, + longname = longvar, + prec = "single", + compression = 1 + ) + full_var_ncdf <- ncvar_def( + var, + unit, + list(x, t = t2), + -999, + longname = longvar, + prec = "single", + compression = 1 + ) + } else { + var_ncdf <- ncvar_def( + var, + unit, + list(x, y, z, t = t1), + -999, + longname = longvar, + prec = "single", + compression = 1 + ) + full_var_ncdf <- + ncvar_def( + var, + unit, + list(x, y, z, t = t2), + -999, + longname = longvar, + prec = "single", + compression = 1 + ) + } + + assign(paste0("var", var), var_ncdf) + assign(paste0("full_var", var), full_var_ncdf) + assign(paste0("field", var), field) + assign(paste0("full_field", var), full_field) } - assign(paste0("var", var), var_ncdf) - assign(paste0("full_var", var), full_var_ncdf) - assign(paste0("field", var), field) - assign(paste0("full_field", var), full_field) - } + # Climatologies Netcdf file creation + print(savefile1) + namelist1 <- paste0("var", fieldlist) + nclist1 <- mget(namelist1) + ncfile1 <- nc_create(savefile1, nclist1) + for (var in fieldlist) { + # put variables into the ncdf file + ndims <- get(paste0("var", var))$ndims + ncvar_put( + ncfile1, + var, + get(paste0("field", var)), + start = rep(1, ndims), + count = rep(-1, ndims) + ) + } + nc_close(ncfile1) - # Climatologies Netcdf file creation - print(savefile1) - namelist1 <- paste0("var", fieldlist) - nclist1 <- mget(namelist1) - ncfile1 <- nc_create(savefile1, nclist1) - for (var in fieldlist) { - # put variables into the ncdf file - # ncvar_put(ncfile1, fieldlist[which(var==fieldlist)], - # get(paste0("field",var)), start = c(1, 1, 1, 1), count = c(-1,-1,-1,-1)) - ndims <- get(paste0("var", var))$ndims - ncvar_put(ncfile1, var, get(paste0("field", var)), - start = rep(1, ndims), - count = rep(-1, ndims) - ) - } - nc_close(ncfile1) - - # Fullfield Netcdf file creation - print(savefile2) - namelist2 <- paste0("full_var", full_fieldlist) - nclist2 <- mget(namelist2) - ncfile2 <- nc_create(savefile2, nclist2) - for (var in full_fieldlist) { - # put variables into the ncdf file - # ncvar_put(ncfile2, full_fieldlist[which(var==full_fieldlist)], - # get(paste0("full_field",var)), start = c(1, 1, 1, 1), - # count = c(-1,-1,-1,-1)) - ndims <- get(paste0("full_var", var))$ndims - ncvar_put(ncfile2, var, get(paste0("full_field", var)), - start = rep(1, ndims), count = rep(-1, ndims) - ) + # Fullfield Netcdf file creation + print(savefile2) + namelist2 <- paste0("full_var", full_fieldlist) + nclist2 <- mget(namelist2) + ncfile2 <- nc_create(savefile2, nclist2) + for (var in full_fieldlist) { + # put variables into the ncdf file + ndims <- get(paste0("full_var", var))$ndims + ncvar_put( + ncfile2, + var, + get(paste0("full_field", var)), + start = rep(1, ndims), + count = rep(-1, ndims) + ) + } + nc_close(ncfile2) + return(c(savefile1, savefile2)) } - nc_close(ncfile2) - return(c(savefile1, savefile2)) -} diff --git a/esmvaltool/diag_scripts/miles/block_figures.R b/esmvaltool/diag_scripts/miles/block_figures.R index 203e7060c5..0e33680e76 100644 --- a/esmvaltool/diag_scripts/miles/block_figures.R +++ b/esmvaltool/diag_scripts/miles/block_figures.R @@ -3,16 +3,34 @@ #-------------P. Davini (May 2017)-------------------# ###################################################### -miles_block_figures <- function(dataset, expid, ens, year1, year2, - dataset_ref, expid_ref, ens_ref, year1_ref, +miles_block_figures <- function(dataset, + expid, + ens, + year1, + year2, + dataset_ref, + expid_ref, + ens_ref, + year1_ref, year2_ref, - season, FIGDIR, FILESDIR, REFDIR) { - + season, + FIGDIR, + FILESDIR, + REFDIR) { # which fieds to load/plot fieldlist <- c( - "InstBlock", "ExtraBlock", "Z500", "MGI", "BI", "CN", "ACN", - "BlockEvents", "LongBlockEvents", "DurationEvents", - "NumberEvents", "TM90" + "InstBlock", + "ExtraBlock", + "Z500", + "MGI", + "BI", + "CN", + "ACN", + "BlockEvents", + "LongBlockEvents", + "DurationEvents", + "NumberEvents", + "TM90" ) ########################################################## @@ -21,36 +39,58 @@ miles_block_figures <- function(dataset, expid, ens, year1, year2, # open field for (field in fieldlist) { - # use file.builder function nomefile <- file_builder( - FILESDIR, "Block", "BlockClim", dataset, expid, - ens, year1, year2, season + FILESDIR, + "Block", + "BlockClim", + dataset, + expid, + ens, + year1, + year2, + season ) - field_exp <- ncdf_opener(nomefile, namevar = field, rotate = "no") + field_exp <- + ncdf_opener(nomefile, namevar = field, rotate = "no") assign(paste(field, "_exp", sep = ""), field_exp) } # open reference field for (field in fieldlist) { - # check for REFDIR==FILESDIR, i.e. if we are using the climatology # provided by MiLES or another dataset MiLES-generated if (REFDIR != FILESDIR) { nomefile_ref <- paste0( - file.path(REFDIR, "Block"), "/BlockClim_", # nolint - dataset_ref, "_", year1_ref, "_", year2_ref, "_", season, ".nc" + file.path(REFDIR, "Block"), + "/BlockClim_", + # nolint + dataset_ref, + "_", + year1_ref, + "_", + year2_ref, + "_", + season, + ".nc" ) } else { - # use file.builder to create the path of the blocking files nomefile_ref <- file_builder( - FILESDIR, "Block", "BlockClim", - dataset_ref, expid_ref, ens_ref, year1_ref, year2_ref, season + FILESDIR, + "Block", + "BlockClim", + dataset_ref, + expid_ref, + ens_ref, + year1_ref, + year2_ref, + season ) } - field_ref <- ncdf_opener(nomefile_ref, namevar = field, rotate = "no") + field_ref <- + ncdf_opener(nomefile_ref, namevar = field, rotate = "no") assign(paste(field, "_ref", sep = ""), field_ref) } @@ -59,15 +99,16 @@ miles_block_figures <- function(dataset, expid, ens, year1, year2, ########################################################## # standard properties - info_exp <- info_builder(dataset, expid, ens, year1, year2, season) + info_exp <- + info_builder(dataset, expid, ens, year1, year2, season) info_ref <- info_builder( dataset_ref, expid_ref, ens_ref, year1_ref, year2_ref, season ) + filenames <- c() # loop on fields for (field in fieldlist) { - # define field-dependent properties fp <- field_details(field) @@ -77,10 +118,19 @@ miles_block_figures <- function(dataset, expid, ens, year1, year2, # create figure names with ad-hoc function figname <- fig_builder( - FIGDIR, "Block", field, dataset, expid, ens, year1, - year2, season, output_file_type + FIGDIR, + "Block", + field, + dataset, + expid, + ens, + year1, + year2, + season, + output_file_type ) print(figname) + filenames <- c(filenames, figname) # special treatment for TM90: it is a 1D field! if (field == "TM90") { @@ -88,7 +138,10 @@ miles_block_figures <- function(dataset, expid, ens, year1, year2, # panels option par( - cex.main = 2, cex.axis = 1.5, cex.lab = 1.5, mar = c(5, 5, 4, 3), + cex.main = 2, + cex.axis = 1.5, + cex.lab = 1.5, + mar = c(5, 5, 4, 3), oma = c(0, 0, 0, 0) ) @@ -101,19 +154,35 @@ miles_block_figures <- function(dataset, expid, ens, year1, year2, # plot properties lwdline <- 4 tm90cols <- fp$color_field - plot(ics2, field_exp2, - type = "l", lwd = lwdline, ylim = fp$lev_field, - main = fp$title_name, xlab = "Longitude", ylab = fp$legend_unit, + plot( + ics2, + field_exp2, + type = "l", + lwd = lwdline, + ylim = fp$lev_field, + main = fp$title_name, + xlab = "Longitude", + ylab = fp$legend_unit, col = tm90cols[1] ) - points(ics2, field_ref2, - type = "l", lwd = lwdline, lty = 1, + points( + ics2, + field_ref2, + type = "l", + lwd = lwdline, + lty = 1, col = tm90cols[2] ) grid() - legend(100, 30, - legend = c(info_exp, info_ref), lwd = lwdline, - lty = c(1, 1), col = tm90cols, bg = "white", cex = 1. + legend( + 100, + 30, + legend = c(info_exp, info_ref), + lwd = lwdline, + lty = c(1, 1), + col = tm90cols, + bg = "white", + cex = 1. ) dev.off() @@ -129,55 +198,100 @@ miles_block_figures <- function(dataset, expid, ens, year1, year2, par(plotpar) # main experiment plot - im <- plot_prepare(ics, ipsilon, field_exp, + im <- plot_prepare(ics, + ipsilon, + field_exp, proj = map_projection, lat_lim = lat_lim ) - filled_contour3(im$x, im$y, im$z, - xlab = im$xlab, ylab = im$ylab, - main = paste(info_exp), levels = fp$lev_field, - color.palette = fp$color_field, xlim = im$xlim, - ylim = im$ylim, axes = im$axes + filled_contour3( + im$x, + im$y, + im$z, + xlab = im$xlab, + ylab = im$ylab, + main = paste(info_exp), + levels = fp$lev_field, + color.palette = fp$color_field, + xlim = im$xlim, + ylim = im$ylim, + axes = im$axes + ) + mtext( + fp$title_name, + side = 3, + line = .5, + outer = TRUE, + cex = 2, + font = 2 ) - mtext(fp$title_name, side = 3, line = .5, outer = TRUE, cex = 2, font = 2) proj_addland(proj = map_projection) # reference field plot - im <- plot_prepare(ics, ipsilon, field_ref, - proj = map_projection, lat_lim = lat_lim + im <- plot_prepare(ics, + ipsilon, + field_ref, + proj = map_projection, + lat_lim = lat_lim ) - filled_contour3(im$x, im$y, im$z, - xlab = im$xlab, ylab = im$ylab, - main = paste(info_ref), levels = fp$lev_field, - color.palette = fp$color_field, xlim = im$xlim, - ylim = im$ylim, axes = im$axes + filled_contour3( + im$x, + im$y, + im$z, + xlab = im$xlab, + ylab = im$ylab, + main = paste(info_ref), + levels = fp$lev_field, + color.palette = fp$color_field, + xlim = im$xlim, + ylim = im$ylim, + axes = im$axes ) proj_addland(proj = map_projection) - image_scale3(volcano, + image_scale3( + volcano, levels = fp$lev_field, color.palette = fp$color_field, colorbar.label = fp$legend_unit, - cex.colorbar = imgscl_colorbar, cex.label = imgscl_label, - colorbar.width = 1 * af, line.label = fp$legend_distance + cex.colorbar = imgscl_colorbar, + cex.label = imgscl_label, + colorbar.width = 1 * af, + line.label = fp$legend_distance ) # delta field plot - im <- plot_prepare(ics, ipsilon, field_exp - field_ref, - proj = map_projection, lat_lim = lat_lim) - filled_contour3(im$x, im$y, im$z, - xlab = im$xlab, ylab = im$ylab, - main = paste("Difference"), levels = fp$lev_diff, - color.palette = fp$color_diff, xlim = im$xlim, - ylim = im$ylim, axes = im$axes + im <- plot_prepare(ics, + ipsilon, + field_exp - field_ref, + proj = map_projection, + lat_lim = lat_lim + ) + filled_contour3( + im$x, + im$y, + im$z, + xlab = im$xlab, + ylab = im$ylab, + main = paste("Difference"), + levels = fp$lev_diff, + color.palette = fp$color_diff, + xlim = im$xlim, + ylim = im$ylim, + axes = im$axes ) proj_addland(proj = map_projection) - image_scale3(volcano, - levels = fp$lev_diff, color.palette = fp$color_diff, + image_scale3( + volcano, + levels = fp$lev_diff, + color.palette = fp$color_diff, colorbar.label = fp$legend_unit, - cex.colorbar = imgscl_colorbar, cex.label = imgscl_label, - colorbar.width = 1 * af, line.label = fp$legend_distance + cex.colorbar = imgscl_colorbar, + cex.label = imgscl_label, + colorbar.width = 1 * af, + line.label = fp$legend_distance ) dev.off() } + return(list(figs = filenames, mod = nomefile, ref = nomefile_ref)) } diff --git a/esmvaltool/diag_scripts/miles/eof_fast.R b/esmvaltool/diag_scripts/miles/eof_fast.R index 716175dac4..5b7692cf2f 100644 --- a/esmvaltool/diag_scripts/miles/eof_fast.R +++ b/esmvaltool/diag_scripts/miles/eof_fast.R @@ -2,236 +2,313 @@ #-----EOFs routines computation for MiLES--------# #-------------P. Davini (Feb 2018)-------------------# ###################################################### -miles_eofs_fast <- function(dataset, expid, ens, year1, year2, season, - tele, z500filename, FILESDIR, PROGDIR, doforce) { - - # standard defined 4 EOFs - neofs <- 4 - - # t0 - t0 <- proc.time() - - # setting up time domain - years <- year1:year2 - timeseason <- season2timeseason(season) - - # define folders using file.builder function (takes care of ensembles) - print(".....") - print(dataset) - print(expid) - print(ens) - savefile1 <- file_builder( - FILESDIR, paste0("EOFs/", tele), "EOFs", dataset, - expid, ens, year1, year2, season - ) - - # select teleconnection region - if (tele == "NAO") { - xlim <- c(-90, 40) - ylim <- c(20, 85) - rotation <- "full" - } else if (tele == "AO") { - xlim <- c(-180, 180) - ylim <- c(20, 85) - rotation <- "full" - } else if (tele == "PNA") { - xlim <- c(140, 280) - ylim <- c(20, 85) - rotation <- "no" # 140E-80W: use trick of rotation for cross-dateline - } else { - # use non standard region, detect region with strsplit - splitter <- as.numeric(strsplit(tele, "_")[[1]]) - if (length(splitter) == 4) { - xlim <- c(splitter[1], splitter[2]) - ylim <- c(splitter[3], splitter[4]) - if (xlim[2] > 180) { - rotation <- "no" +miles_eofs_fast <- # nolint + function(dataset, + expid, + ens, + year1, + year2, + season, + tele, + z500filename, + FILESDIR, + PROGDIR, + doforce) { + # standard defined 4 EOFs + neofs <- 4 + + # t0 + t0 <- proc.time() + + # setting up time domain + years <- year1:year2 + timeseason <- season2timeseason(season) + + # define folders using file.builder function (takes care of ensembles) + print(".....") + print(dataset) + print(expid) + print(ens) + savefile1 <- file_builder( + FILESDIR, + paste0("EOFs/", tele), + "EOFs", + dataset, + expid, + ens, + year1, + year2, + season + ) + + # select teleconnection region + if (tele == "NAO") { + xlim <- c(-90, 40) + ylim <- c(20, 85) + rotation <- "full" + } else if (tele == "AO") { + xlim <- c(-180, 180) + ylim <- c(20, 85) + rotation <- "full" + } else if (tele == "PNA") { + xlim <- c(140, 280) + ylim <- c(20, 85) + rotation <- + "no" # 140E-80W: use trick of rotation for cross-dateline + } else { + # use non standard region, detect region with strsplit + splitter <- as.numeric(strsplit(tele, "_")[[1]]) + if (length(splitter) == 4) { + xlim <- c(splitter[1], splitter[2]) + ylim <- c(splitter[3], splitter[4]) + if (xlim[2] > 180) { + rotation <- "no" + } else { + rotation <- "full" + } } else { - rotation <- "full" + stop("Wrong teleconnection region!") } - } else { - stop("Wrong teleconnection region!") } - } - # check if data is already there to avoid re-run - if (file.exists(savefile1)) { - print("Actually requested EOFs data is already there!") - print(savefile1) - if (doforce == TRUE) { - print("Running with doforce=true... re-run!") - } else { - print("Skipping... activate doforce=true if you want to re-run it") - q() + # check if data is already there to avoid re-run + if (file.exists(savefile1)) { + print("Actually requested EOFs data is already there!") + print(savefile1) + if (doforce == TRUE) { + print("Running with doforce=true... re-run!") + } else { + print("Skipping... activate doforce=true if you want to re-run it") + q() + } } - } - # new file opening - nomefile <- z500filename - fieldlist <- ncdf_opener_universal(nomefile, "zg", - tmonths = timeseason, - tyears = years, rotate = rotation - ) - print(str(fieldlist)) - - # extract calendar and time unit from the original file - tcal <- attributes(fieldlist$time)$cal - tunit <- attributes(fieldlist$time)$units - - # time array - etime <- power_date_new(fieldlist$time) - - # declare variable - z500 <- fieldlist$field - - # monthly averaging - print("monthly mean...") - - # new faster monthly mean function - z500monthly <- monthly_mean(ics, ipsilon, z500, etime) - - # climatology - print("climatological mean...") - z500clim <- apply(z500monthly, c(1, 2), ave, rep(timeseason, length(years))) - z500clim <- aperm(z500clim, c(2, 3, 1)) - - # monthly anomalies - print("anomalies...") - z500anom <- z500monthly - z500clim - - # compute EOFs - print("EOFs...") - EOFS <- eofs(ics, ipsilon, z500anom, - neof = neofs, xlim, ylim, - method = "SVD", do_standardize = T, do_regression = T - ) - # COEFF=eofs.coeff(ics,ipsilon,z500anom,EOFS, - # do_standardize=T) #do we really need this? - - # flip signs of patterns and regressions for NAO and AO - print("checking signs...") - for (i in 1:neofs) { - posreg <- NULL - - # define regions for sign control: boxes where values should be positive - if (tele == "NAO") { - if (i == 1) { - posreg <- c(-30, 30, 40, 50) - } # NAO - if (i == 2) { - posreg <- c(-60, 0, 40, 60) - } # East Atlantic Pattern - if (i == 3) { - posreg <- c(-30, 30, 50, 70) - } # Scandinavian Blocking - } + # new file opening + nomefile <- z500filename + fieldlist <- ncdf_opener_universal( + nomefile, + "zg", + tmonths = timeseason, + tyears = years, + rotate = rotation, + fillmiss = T + ) + print(str(fieldlist)) + + # extract calendar and time unit from the original file + tcal <- attributes(fieldlist$time)$cal + tunit <- attributes(fieldlist$time)$units + + # time array + etime <- power_date_new(fieldlist$time) + + # declare variable + z500 <- fieldlist$field + + # monthly averaging + print("monthly mean...") + + # new faster monthly mean function + z500monthly <- monthly_mean(ics, ipsilon, z500, etime) + # climatology + print("climatological mean...") + z500clim <- + apply(z500monthly, c(1, 2), ave, rep(timeseason, length(years))) + z500clim <- aperm(z500clim, c(2, 3, 1)) + + # monthly anomalies + print("anomalies...") + z500anom <- z500monthly - z500clim + + # compute EOFs + print("EOFs...") + EOFS <- eofs( + ics, + ipsilon, + z500anom, + neof = neofs, + xlim, + ylim, + method = "SVD", + do_standardize = T, + do_regression = T + ) + # COEFF=eofs.coeff(ics,ipsilon,z500anom,EOFS, + # do_standardize=T) #do we really need this? + + # flip signs of patterns and regressions for NAO and AO + print("checking signs...") + for (i in 1:neofs) { + posreg <- NULL + + # define regions for sign control: boxes where values should be positive + if (tele == "NAO") { + if (i == 1) { + posreg <- c(-30, 30, 40, 50) + } # NAO + if (i == 2) { + posreg <- c(-60, 0, 40, 60) + } # East Atlantic Pattern + if (i == 3) { + posreg <- c(-30, 30, 50, 70) + } # Scandinavian Blocking + } - if (tele == "AO") { - if (i == 1) { - posreg <- c(-180, 180, 20, 50) - } # Arctic Oscillation - if (i == 2) { - posreg <- c(-120, -60, 40, 60) - } # PNA + if (tele == "AO") { + if (i == 1) { + posreg <- c(-180, 180, 20, 50) + } # Arctic Oscillation + if (i == 2) { + posreg <- c(-120, -60, 40, 60) + } # PNA + } + + # if definition of region exists + if (!is.null(posreg)) { + # convert into indices + xbox <- whicher(EOFS$pattern$x, posreg[1]):whicher( + EOFS$pattern$x, + posreg[2] + ) + ybox <- whicher(EOFS$pattern$y, posreg[3]):whicher( + EOFS$pattern$y, + posreg[4] + ) + valuereg <- mean(EOFS$pattern$z[xbox, ybox, i]) + + # if negative in the box, flip all signs! + if (valuereg < 0) { + EOFS$pattern$z[, , i] <- -EOFS$pattern$z[, , i] + EOFS$regression <- -EOFS$regression + } + } } - # if definition of region exists - if (!is.null(posreg)) { - # convert into indices - xbox <- whicher(EOFS$pattern$x, posreg[1]):whicher( - EOFS$pattern$x, - posreg[2] + # expand EOF pattern to save it + expanded_pattern <- EOFS$regression * NA + expanded_pattern[ + whicher(ics, xlim[1]):whicher(ics, xlim[2]), + whicher(ipsilon, ylim[1]):whicher(ipsilon, ylim[2]), + ] <- + EOFS$pattern$z + + t1 <- proc.time() - t0 + print(t1) + + + ########################################################## + #------------------------Save to NetCDF------------------# + ########################################################## + + # saving output to netcdf files + print("saving NetCDF climatologies...") + print(savefile1) + + # monthly specific time + monthtime <- as.numeric(etime$data[etime$day == 15]) + + # dimensions definition + TIME <- paste(tunit, " since ", year1, "-", timeseason[1], + "-01 00:00:00", + sep = "" + ) + LEVEL <- 50000 + x <- ncdim_def("lon", "degrees_east", ics, longname = "longitude") + y <- + ncdim_def("lat", "degrees_north", ipsilon, longname = "latitude") + z <- ncdim_def("plev", "Pa", LEVEL, longname = "pressure") + ef <- ncdim_def("PC", "-", 1:neofs) + t <- ncdim_def( + "time", + TIME, + monthtime, + calendar = tcal, + longname = "time", + unlim = T + ) + + # defining vars + unit <- "m" + longvar <- "EOFs Loading Pattern" + pattern_ncdf <- + ncvar_def( + "Patterns", + unit, + list(x, y, z, ef), + -999, + longname = longvar, + prec = "single", + compression = 1 ) - ybox <- whicher(EOFS$pattern$y, posreg[3]):whicher( - EOFS$pattern$y, - posreg[4] + + unit <- "m" + longvar <- "EOFs Linear Regressions" + regression_ncdf <- + ncvar_def( + "Regressions", + unit, + list(x, y, z, ef), + -999, + longname = longvar, + prec = "single", + compression = 1 ) - valuereg <- mean(EOFS$pattern$z[xbox, ybox, i]) - # if negative in the box, flip all signs! - if (valuereg < 0) { - EOFS$pattern$z[, , i] <- -EOFS$pattern$z[, , i] - EOFS$regression <- -EOFS$regression - } - } + unit <- paste0("0-", neofs) + longvar <- "PCs timeseries" + pc_ncdf <- ncvar_def( + "PCs", + unit, + list(ef, t), + -999, + longname = longvar, + prec = "single", + compression = 1 + ) + + unit <- "%" + longvar <- "EOFs variance" + variance_ncdf <- ncvar_def( + "Variances", + unit, + list(ef), + -999, + longname = longvar, + prec = "single", + compression = 1 + ) + + # saving files + ncfile1 <- nc_create( + savefile1, + list(pattern_ncdf, pc_ncdf, variance_ncdf, regression_ncdf) + ) + ncvar_put( + ncfile1, + "Patterns", + expanded_pattern, + start = c(1, 1, 1, 1), + count = c(-1, -1, -1, -1) + ) + ncvar_put( + ncfile1, + "Regressions", + EOFS$regression, + start = c(1, 1, 1, 1), + count = c(-1, -1, -1, -1) + ) + ncvar_put(ncfile1, + "PCs", + EOFS$coeff, + start = c(1, 1), + count = c(-1, -1) + ) + ncvar_put(ncfile1, + "Variances", + EOFS$variance, + start = c(1), + count = c(-1) + ) + nc_close(ncfile1) + return(savefile1) } - - # expand EOF pattern to save it - expanded_pattern <- EOFS$regression * NA - expanded_pattern[ - whicher(ics, xlim[1]):whicher(ics, xlim[2]), - whicher(ipsilon, ylim[1]):whicher(ipsilon, ylim[2]), - ] <- - EOFS$pattern$z - - t1 <- proc.time() - t0 - print(t1) - - - ########################################################## - #------------------------Save to NetCDF------------------# - ########################################################## - - # saving output to netcdf files - print("saving NetCDF climatologies...") - print(savefile1) - - # monthly specific time - monthtime <- as.numeric(etime$data[etime$day == 15]) - - # dimensions definition - TIME <- paste(tunit, " since ", year1, "-", timeseason[1], - "-01 00:00:00", - sep = "" - ) - LEVEL <- 50000 - x <- ncdim_def("lon", "degrees_east", ics, longname = "longitude") - y <- ncdim_def("lat", "degrees_north", ipsilon, longname = "latitude") - z <- ncdim_def("plev", "Pa", LEVEL, longname = "pressure") - ef <- ncdim_def("PC", "-", 1:neofs) - t <- ncdim_def("time", TIME, monthtime, - calendar = tcal, - longname = "time", unlim = T - ) - - # defining vars - unit <- "m" - longvar <- "EOFs Loading Pattern" - pattern_ncdf <- ncvar_def("Patterns", unit, list(x, y, z, ef), -999, - longname = longvar, prec = "single", compression = 1 - ) - - unit <- "m" - longvar <- "EOFs Linear Regressions" - regression_ncdf <- ncvar_def("Regressions", unit, list(x, y, z, ef), -999, - longname = longvar, prec = "single", compression = 1 - ) - - unit <- paste0("0-", neofs) - longvar <- "PCs timeseries" - pc_ncdf <- ncvar_def("PCs", unit, list(ef, t), -999, - longname = longvar, prec = "single", compression = 1 - ) - - unit <- "%" - longvar <- "EOFs variance" - variance_ncdf <- ncvar_def("Variances", unit, list(ef), -999, - longname = longvar, prec = "single", compression = 1 - ) - - # saving files - ncfile1 <- nc_create( - savefile1, - list(pattern_ncdf, pc_ncdf, variance_ncdf, regression_ncdf) - ) - ncvar_put(ncfile1, "Patterns", expanded_pattern, - start = c(1, 1, 1, 1), count = c(-1, -1, -1, -1) - ) - ncvar_put(ncfile1, "Regressions", EOFS$regression, - start = c(1, 1, 1, 1), count = c(-1, -1, -1, -1) - ) - ncvar_put(ncfile1, "PCs", EOFS$coeff, start = c(1, 1), count = c(-1, -1)) - ncvar_put(ncfile1, "Variances", EOFS$variance, start = c(1), count = c(-1)) - nc_close(ncfile1) - return(savefile1) -} diff --git a/esmvaltool/diag_scripts/miles/eof_figures.R b/esmvaltool/diag_scripts/miles/eof_figures.R index 7e4612f203..e03d0b797c 100644 --- a/esmvaltool/diag_scripts/miles/eof_figures.R +++ b/esmvaltool/diag_scripts/miles/eof_figures.R @@ -5,30 +5,63 @@ # DECLARING THE FUNCTION: EXECUTION IS AT THE BOTTOM OF THE SCRIPT -miles_eof_figures <- function(dataset, expid, ens, year1, year2, - dataset_ref, expid_ref, ens_ref, - year1_ref, year2_ref, - season, FIGDIR, FILESDIR, - REFDIR, PROGDIR, tele) { - +miles_eof_figures <- function(dataset, + expid, + ens, + year1, + year2, + dataset_ref, + expid_ref, + ens_ref, + year1_ref, + year2_ref, + season, + FIGDIR, + FILESDIR, + REFDIR, + PROGDIR, + tele) { # use filebuilding script to access to file nomefile_exp <- file_builder( - FILESDIR, paste0("EOFs/", tele), "EOFs", - dataset, expid, ens, year1, year2, season + FILESDIR, + paste0("EOFs/", tele), + "EOFs", + dataset, + expid, + ens, + year1, + year2, + season ) # check for REFDIR==FILESDIR, i.e. if we are using the # climatology provided by MiLES or another dataset MiLES-generated if (REFDIR != FILESDIR) { nomefile_ref <- paste0( - file.path(REFDIR, paste0("EOFs/", tele)), "/EOFs_", # nolint - dataset_ref, "_", year1_ref, "_", year2_ref, "_", season, ".nc" + file.path(REFDIR, paste0("EOFs/", tele)), + "/EOFs_", + # nolint + dataset_ref, + "_", + year1_ref, + "_", + year2_ref, + "_", + season, + ".nc" ) } else { # use file.builder to create the path of the blocking files nomefile_ref <- file_builder( - FILESDIR, paste0("EOFs/", tele), "EOFs", - dataset_ref, expid_ref, ens_ref, year1_ref, year2_ref, season + FILESDIR, + paste0("EOFs/", tele), + "EOFs", + dataset_ref, + expid_ref, + ens_ref, + year1_ref, + year2_ref, + season ) } @@ -65,7 +98,8 @@ miles_eof_figures <- function(dataset, expid, ens, year1, year2, ########################################################## # plot properties - info_exp <- info_builder(dataset, expid, ens, year1, year2, season) + info_exp <- + info_builder(dataset, expid, ens, year1, year2, season) info_ref <- info_builder( dataset_ref, expid_ref, ens_ref, year1_ref, year2_ref, season @@ -73,6 +107,7 @@ miles_eof_figures <- function(dataset, expid, ens, year1, year2, lev_field <- seq(-150, 150, 20) lev_diff <- seq(-95, 95, 10) + filenames <- c() # loop on number of EOFs for (neof in 1:neofs) { linear_exp <- regressions_exp[, , neof] @@ -100,10 +135,19 @@ miles_eof_figures <- function(dataset, expid, ens, year1, year2, # define figure figname <- fig_builder( - FIGDIR, paste0("EOFs/", tele), paste0("EOF", neof), - dataset, expid, ens, year1, year2, season, output_file_type + FIGDIR, + paste0("EOFs/", tele), + paste0("EOF", neof), + dataset, + expid, + ens, + year1, + year2, + season, + output_file_type ) print(figname) + filenames <- c(filenames, figname) # Chose output format for figure - by JvH open_plot_device(figname, output_file_type) @@ -118,60 +162,125 @@ miles_eof_figures <- function(dataset, expid, ens, year1, year2, # plot properties par(plotpar) - im <- plot_prepare(ics, ipsilon, linear_exp, proj = map_projection, - lat_lim = lat_lim) - filled_contour3(im$x, im$y, im$z, - xlab = im$xlab, ylab = im$ylab, - main = paste(info_exp), levels = lev_field, - color.palette = palette3, xlim = im$xlim, ylim = im$ylim, + im <- + plot_prepare(ics, + ipsilon, + linear_exp, + proj = map_projection, + lat_lim = lat_lim + ) + filled_contour3( + im$x, + im$y, + im$z, + xlab = im$xlab, + ylab = im$ylab, + main = paste(info_exp), + levels = lev_field, + color.palette = palette3, + xlim = im$xlim, + ylim = im$ylim, axes = im$axes ) - mtext(title_name, side = 3, line = .5, outer = TRUE, cex = 2, font = 2) + mtext( + title_name, + side = 3, + line = .5, + outer = TRUE, + cex = 2, + font = 2 + ) proj_addland(proj = map_projection) - text(varpoints[1], varpoints[2], paste("Variance Explained: ", - round(variance_exp[neof], 2), "%", - sep = "" - ), cex = 2) - - im <- plot_prepare(ics, ipsilon, linear_ref, proj = map_projection, - lat_lim = lat_lim) - filled_contour3(im$x, im$y, im$z, - xlab = im$xlab, ylab = im$ylab, - main = paste(info_ref), levels = lev_field, - color.palette = palette3, xlim = im$xlim, ylim = im$ylim, + text(varpoints[1], + varpoints[2], + paste("Variance Explained: ", + round(variance_exp[neof], 2), "%", + sep = "" + ), + cex = 2 + ) + + im <- + plot_prepare(ics, + ipsilon, + linear_ref, + proj = map_projection, + lat_lim = lat_lim + ) + filled_contour3( + im$x, + im$y, + im$z, + xlab = im$xlab, + ylab = im$ylab, + main = paste(info_ref), + levels = lev_field, + color.palette = palette3, + xlim = im$xlim, + ylim = im$ylim, axes = im$axes ) - mtext(title_name, side = 3, line = .5, outer = TRUE, cex = 2, font = 2) + mtext( + title_name, + side = 3, + line = .5, + outer = TRUE, + cex = 2, + font = 2 + ) proj_addland(proj = map_projection) - image_scale3(volcano, - levels = lev_field, color.palette = palette3, - colorbar.label = "m", cex.colorbar = imgscl_colorbar, - cex.label = imgscl_label, colorbar.width = 1 * af, + image_scale3( + volcano, + levels = lev_field, + color.palette = palette3, + colorbar.label = "m", + cex.colorbar = imgscl_colorbar, + cex.label = imgscl_label, + colorbar.width = 1 * af, line.label = imgscl_line ) - text(varpoints[1], varpoints[2], paste("Variance Explained: ", - round(variance_ref[neof], 2), "%", - sep = "" - ), cex = 2) + text(varpoints[1], + varpoints[2], + paste("Variance Explained: ", + round(variance_ref[neof], 2), "%", + sep = "" + ), + cex = 2 + ) # delta field plot - im <- plot_prepare(ics, ipsilon, linear_exp - linear_ref, - proj = map_projection, lat_lim = lat_lim + im <- plot_prepare(ics, + ipsilon, + linear_exp - linear_ref, + proj = map_projection, + lat_lim = lat_lim ) - filled_contour3(im$x, im$y, im$z, - xlab = im$xlab, ylab = im$ylab, - main = paste("Difference"), levels = lev_diff, - color.palette = palette2, xlim = im$xlim, ylim = im$ylim, + filled_contour3( + im$x, + im$y, + im$z, + xlab = im$xlab, + ylab = im$ylab, + main = paste("Difference"), + levels = lev_diff, + color.palette = palette2, + xlim = im$xlim, + ylim = im$ylim, axes = im$axes ) proj_addland(proj = map_projection) - image_scale3(volcano, - levels = lev_diff, color.palette = palette2, - colorbar.label = "m", cex.colorbar = imgscl_colorbar, - cex.label = imgscl_label, colorbar.width = 1 * af, + image_scale3( + volcano, + levels = lev_diff, + color.palette = palette2, + colorbar.label = "m", + cex.colorbar = imgscl_colorbar, + cex.label = imgscl_label, + colorbar.width = 1 * af, line.label = imgscl_line ) dev.off() } + return(list(figs = filenames, mod = nomefile_exp, ref = nomefile_ref)) } diff --git a/esmvaltool/diag_scripts/miles/miles_block.R b/esmvaltool/diag_scripts/miles/miles_block.R index 0a28be0d20..ed8b06d153 100644 --- a/esmvaltool/diag_scripts/miles/miles_block.R +++ b/esmvaltool/diag_scripts/miles/miles_block.R @@ -5,34 +5,50 @@ # E. Arnone (ISAC-CNR, Italy) (ESMValTool v2.0 adaptation) # ############################################################################# # Description -# MiLES is a tool for estimating properties of mid-latitude climate originally -# thought for EC-Earth output and then extended to any model data. +# MiLES is a tool for estimating properties of mid-latitude climate. # It works on daily 500hPa geopotential height data and it produces # climatological figures for the chosen time period. Data are interpolated # on a common 2.5x2.5 grid. -# Model data are compared against ECMWF ERA-INTERIM reanalysis -# for a standard period (1989-2010). -# It supports analysis for the 4 standard seasons.# -# Required -# -# Optional -# -# Caveats +# Model data are compared against a reference field such as the +# ECMWF ERA-Interim reanalysis. # # Modification history -# 20180525-arno_en: Conversion to v2.0 -# 20181203 hard_jo: Completed conversion, rlint compliant +# 20180525-arnone_enrico: Conversion to v2.0 +# 20181203-vonhardenberg_jost: Completed conversion, rlint compliant # # ############################################################################ library(tools) library(yaml) +provenance_record <- function(infile) { + xprov <- list( + ancestors = infile, + authors = list( + "vonhardenberg_jost", "davini_paolo", + "arnone_enrico" + ), + references = list( + "davini18", "davini12jclim", + "tibaldi90tel" + ), + projects = list("c3s-magic"), + caption = "MiLES blocking statistics", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("nh") + ) + return(xprov) +} + diag_scripts_dir <- Sys.getenv("diag_scripts") + source(paste0(diag_scripts_dir, "/miles/basis_functions.R")) source(paste0(diag_scripts_dir, "/miles/block_figures.R")) source(paste0(diag_scripts_dir, "/miles/block_fast.R")) source(paste0(diag_scripts_dir, "/miles/miles_parameters.R")) +source(paste0(diag_scripts_dir, "/shared/external.R")) # nolint # read settings and metadata files args <- commandArgs(trailingOnly = TRUE) @@ -62,18 +78,25 @@ work_dir <- settings$work_dir regridding_dir <- settings$run_dir plot_dir <- settings$plot_dir dir.create(work_dir, recursive = T, showWarnings = F) -dir.create(regridding_dir, recursive = T, showWarnings = F) +dir.create(regridding_dir, + recursive = T, + showWarnings = F +) dir.create(plot_dir, recursive = T, showWarnings = F) # setup provenance file and list -provenance_file <- paste0(regridding_dir, "/", "diagnostic_provenance.yml") +provenance_file <- + paste0(regridding_dir, "/", "diagnostic_provenance.yml") provenance <- list() # extract metadata models_dataset <- unname(sapply(list0, "[[", "dataset")) models_ensemble <- unname(sapply(list0, "[[", "ensemble")) models_exp <- unname(sapply(list0, "[[", "exp")) -reference_model <- unname(sapply(list0, "[[", "reference_dataset"))[1] +reference_dataset <- + unname(sapply(list0, "[[", "reference_dataset"))[1] +reference_exp <- + unname(sapply(list0, "[[", "reference_exp"))[1] models_start_year <- unname(sapply(list0, "[[", "start_year")) models_end_year <- unname(sapply(list0, "[[", "end_year")) models_experiment <- unname(sapply(list0, "[[", "exp")) @@ -92,63 +115,77 @@ for (model_idx in c(1:(length(models_dataset)))) { infile <- climofiles[model_idx] for (seas in seasons) { filenames <- miles_block_fast( - year1 = year1, year2 = year2, expid = exp, ens = ensemble, - dataset = dataset, season = seas, z500filename = infile, - FILESDIR = work_dir, doforce = TRUE + year1 = year1, + year2 = year2, + expid = exp, + ens = ensemble, + dataset = dataset, + season = seas, + z500filename = infile, + FILESDIR = work_dir, + doforce = TRUE ) # Set provenance for output files - caption <- paste0("MiLES blocking statistics") - xprov <- list(ancestors = list(infile), - authors = list("hard_jo", "davi_pa", "arno_en"), - references = list("davini18", "davini12jclim", - "tibaldi90tel"), - projects = list("c3s-magic"), - caption = caption, - statistics = list("other"), - realms = list("atmos"), - themes = list("phys"), - domains = list("nh")) + xprov <- provenance_record(list(infile)) for (fname in filenames) { provenance[[fname]] <- xprov } } } -# Write provenance to file -write_yaml(provenance, provenance_file) - ## ## Make the plots ## -if (write_plots) { - ref_idx <- which(models_dataset == reference_model) - if (length(ref_idx) == 0) { - ref_idx <- length(models_dataset) - } - dataset_ref <- models_dataset[ref_idx] - exp_ref <- models_exp[ref_idx] - ensemble_ref <- models_ensemble[ref_idx] - year1_ref <- models_start_year[ref_idx] - year2_ref <- models_end_year[ref_idx] - - for (model_idx in c(1:(length(models_dataset)))) { - if (model_idx != ref_idx) { - exp <- models_exp[model_idx] - dataset <- models_dataset[model_idx] - ensemble <- models_ensemble[model_idx] - year1 <- models_start_year[model_idx] - year2 <- models_end_year[model_idx] - for (seas in seasons) { - miles_block_figures( - year1 = year1, year2 = year2, expid = exp, - dataset = dataset, ens = ensemble, - dataset_ref = dataset_ref, year1_ref = year1_ref, - year2_ref = year2_ref, expid_ref = exp_ref, - ens_ref = ensemble_ref, season = seas, - FIGDIR = plot_dir, FILESDIR = work_dir, - REFDIR = work_dir - ) +if (!is.null(reference_exp)) { + ref_idx <- which((models_dataset == reference_dataset) && + (models_exp == reference_exp)) +} else { + ref_idx <- which(models_dataset == reference_dataset) +} +if (length(ref_idx) == 0) { + ref_idx <- length(models_dataset) +} +dataset_ref <- models_dataset[ref_idx] +exp_ref <- models_exp[ref_idx] +ensemble_ref <- models_ensemble[ref_idx] +year1_ref <- models_start_year[ref_idx] +year2_ref <- models_end_year[ref_idx] + +for (model_idx in c(1:(length(models_dataset)))) { + if (model_idx != ref_idx) { + exp <- models_exp[model_idx] + dataset <- models_dataset[model_idx] + ensemble <- models_ensemble[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + for (seas in seasons) { + filenames <- miles_block_figures( + year1 = year1, + year2 = year2, + expid = exp, + dataset = dataset, + ens = ensemble, + dataset_ref = dataset_ref, + year1_ref = year1_ref, + year2_ref = year2_ref, + expid_ref = exp_ref, + ens_ref = ensemble_ref, + season = seas, + FIGDIR = plot_dir, + FILESDIR = work_dir, + REFDIR = work_dir + ) + # Set provenance for output files (same as diagnostic files) + xprov <- provenance_record(list( + climofiles[model_idx], + climofiles[ref_idx] + )) + for (fname in filenames$figs) { + provenance[[fname]] <- xprov } } } } + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/miles/miles_eof.R b/esmvaltool/diag_scripts/miles/miles_eof.R index f8b114fd22..2467a488c8 100644 --- a/esmvaltool/diag_scripts/miles/miles_eof.R +++ b/esmvaltool/diag_scripts/miles/miles_eof.R @@ -1,36 +1,47 @@ # ############################################################################# -# miles_eof.r +# miles_eof.R # Authors: P. Davini (ISAC-CNR, Italy) (author of MiLES) # J. von Hardenberg (ISAC-CNR, Italy) (ESMValTool adaptation) # ############################################################################# # Description -# MiLES is a tool for estimating properties of mid-latitude climate originally -# thought for EC-Earth output and then extended to any model data. +# MiLES is a tool for estimating properties of mid-latitude climate. # It works on daily 500hPa geopotential height data and it produces # climatological figures for the chosen time period. Data are interpolated # on a common 2.5x2.5 grid. -# Model data are compared against ECMWF ERA-INTERIM reanalysis -# for a standard period (1989-2010). -# It supports analysis for the 4 standard seasons.# -# Required -# -# Optional -# -# Caveats -# -# Modification history +# Model data are compared against a reference field such as the +# ECMWF ERA-Interim reanalysis. +# It supports analysis for the 4 standard seasons. # # ############################################################################ library(tools) library(yaml) +provenance_record <- function(infile) { + xprov <- list( + ancestors = infile, + authors = list( + "vonhardenberg_jost", "davini_paolo", + "arnone_enrico" + ), + references = list("davini18"), + projects = list("c3s-magic"), + caption = "MiLES EOF statistics", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("nh") + ) + return(xprov) +} + diag_scripts_dir <- Sys.getenv("diag_scripts") source(paste0(diag_scripts_dir, "/miles/basis_functions.R")) source(paste0(diag_scripts_dir, "/miles/eof_figures.R")) source(paste0(diag_scripts_dir, "/miles/eof_fast.R")) source(paste0(diag_scripts_dir, "/miles/miles_parameters.R")) +source(paste0(diag_scripts_dir, "/shared/external.R")) # nolint # read settings and metadata files args <- commandArgs(trailingOnly = TRUE) @@ -60,18 +71,25 @@ work_dir <- settings$work_dir regridding_dir <- settings$run_dir plot_dir <- settings$plot_dir dir.create(work_dir, recursive = T, showWarnings = F) -dir.create(regridding_dir, recursive = T, showWarnings = F) +dir.create(regridding_dir, + recursive = T, + showWarnings = F +) dir.create(plot_dir, recursive = T, showWarnings = F) # setup provenance file and list -provenance_file <- paste0(regridding_dir, "/", "diagnostic_provenance.yml") +provenance_file <- + paste0(regridding_dir, "/", "diagnostic_provenance.yml") provenance <- list() # extract metadata models_dataset <- unname(sapply(list0, "[[", "dataset")) models_ensemble <- unname(sapply(list0, "[[", "ensemble")) models_exp <- unname(sapply(list0, "[[", "exp")) -reference_model <- unname(sapply(list0, "[[", "reference_dataset"))[1] +reference_dataset <- + unname(sapply(list0, "[[", "reference_dataset"))[1] +reference_exp <- + unname(sapply(list0, "[[", "reference_exp"))[1] models_start_year <- unname(sapply(list0, "[[", "start_year")) models_end_year <- unname(sapply(list0, "[[", "end_year")) models_experiment <- unname(sapply(list0, "[[", "exp")) @@ -91,23 +109,19 @@ for (model_idx in c(1:(length(models_dataset)))) { for (tele in teles) { for (seas in seasons) { filenames <- miles_eofs_fast( - dataset = dataset, expid = exp, ens = ensemble, - year1 = year1, year2 = year2, season = seas, tele = tele, - z500filename = infile, FILESDIR = work_dir, + dataset = dataset, + expid = exp, + ens = ensemble, + year1 = year1, + year2 = year2, + season = seas, + tele = tele, + z500filename = infile, + FILESDIR = work_dir, doforce = TRUE ) # Set provenance for output files - caption <- paste0("MiLES eof statistics") - xprov <- list(ancestors = list(infile), - authors = list("hard_jo", "davi_pa", "arno_en"), - references = list("davini18", "davini12jclim", - "tibaldi90tel"), - projects = list("c3s-magic"), - caption = caption, - statistics = list("other"), - realms = list("atmos"), - themes = list("phys"), - domains = list("nh")) + xprov <- provenance_record(list(infile)) for (fname in filenames) { provenance[[fname]] <- xprov } @@ -115,41 +129,62 @@ for (model_idx in c(1:(length(models_dataset)))) { } } -# Write provenance to file -write_yaml(provenance, provenance_file) - ## ## Make the plots ## -if (write_plots) { - ref_idx <- which(models_dataset == reference_model) - if (length(ref_idx) == 0) { - ref_idx <- length(models_dataset) - } - dataset_ref <- models_dataset[ref_idx] - exp_ref <- models_exp[ref_idx] - ensemble_ref <- models_ensemble[ref_idx] - year1_ref <- models_start_year[ref_idx] - year2_ref <- models_end_year[ref_idx] - - for (model_idx in c(1:(length(models_dataset)))) { - if (model_idx != ref_idx) { - exp <- models_exp[model_idx] - dataset <- models_dataset[model_idx] - ensemble <- models_ensemble[model_idx] - year1 <- models_start_year[model_idx] - year2 <- models_end_year[model_idx] - for (tele in teles) { - for (seas in seasons) { - miles_eof_figures( - expid = exp, year1 = year1, year2 = year2, dataset = dataset, - ens = ensemble, dataset_ref = dataset_ref, expid_ref = exp_ref, - year1_ref = year1_ref, ens_ref = ensemble_ref, - year2_ref = year2_ref, season = seas, tele = tele, - FIGDIR = plot_dir, FILESDIR = work_dir, REFDIR = work_dir - ) +if (!is.null(reference_exp)) { + ref_idx <- which((models_dataset == reference_dataset) && + (models_exp == reference_exp)) +} else { + ref_idx <- which(models_dataset == reference_dataset) +} +if (length(ref_idx) == 0) { + ref_idx <- length(models_dataset) +} +dataset_ref <- models_dataset[ref_idx] +exp_ref <- models_exp[ref_idx] +ensemble_ref <- models_ensemble[ref_idx] +year1_ref <- models_start_year[ref_idx] +year2_ref <- models_end_year[ref_idx] + +for (model_idx in c(1:(length(models_dataset)))) { + if (model_idx != ref_idx) { + exp <- models_exp[model_idx] + dataset <- models_dataset[model_idx] + ensemble <- models_ensemble[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + for (tele in teles) { + for (seas in seasons) { + filenames <- miles_eof_figures( + expid = exp, + year1 = year1, + year2 = year2, + dataset = dataset, + ens = ensemble, + dataset_ref = dataset_ref, + expid_ref = exp_ref, + year1_ref = year1_ref, + ens_ref = ensemble_ref, + year2_ref = year2_ref, + season = seas, + tele = tele, + FIGDIR = plot_dir, + FILESDIR = work_dir, + REFDIR = work_dir + ) + # Set provenance for output files (same as diagnostic files) + xprov <- provenance_record(list( + climofiles[model_idx], + climofiles[ref_idx] + )) + for (fname in filenames$figs) { + provenance[[fname]] <- xprov } } } } } + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/miles/miles_parameters.R b/esmvaltool/diag_scripts/miles/miles_parameters.R index 40dbdb5ead..210a52b364 100644 --- a/esmvaltool/diag_scripts/miles/miles_parameters.R +++ b/esmvaltool/diag_scripts/miles/miles_parameters.R @@ -23,7 +23,7 @@ af <- 1 # All projection from mapproj package should be supported # but error may arise for non-polar plots # DEFAULT IS POLAR PLOT -map_projection <- "azequalarea" # Alternative: "azequidistant" +map_projection <- "azequalarea" # Alternative: "azequidistant" # Number of panels per figure (rows and column): default for polar plots panels <- c(3, 1) @@ -42,8 +42,15 @@ if (map_projection != "no") { } # Custom parameteres for plots -plotpar <- list(mfrow = panels, cex.main = 2.5, cex.axis = 1.5, cex.lab = 1.5, - mar = c(5, 5, 5, 7), oma = c(1, 1, 3, 2)) +plotpar <- + list( + mfrow = panels, + cex.main = 2.5, + cex.axis = 1.5, + cex.lab = 1.5, + mar = c(5, 5, 5, 7), + oma = c(1, 1, 3, 2) + ) # imagescale3 color bar details imgscl_colorbar <- 1.4 @@ -52,26 +59,94 @@ imgscl_line <- 3 # color palette to be used # palette0 is taken from tim.colors of field to avoid library dependencies... -palette0 <- colorRampPalette(c( - "#00008F", "#00009F", "#0000AF", "#0000BF", "#0000CF", - "#0000DF", "#0000EF", "#0000FF", "#0010FF", "#0020FF", - "#0030FF", "#0040FF", "#0050FF", "#0060FF", "#0070FF", - "#0080FF", "#008FFF", "#009FFF", "#00AFFF", "#00BFFF", - "#00CFFF", "#00DFFF", "#00EFFF", "#00FFFF", "#10FFEF", - "#20FFDF", "#30FFCF", "#40FFBF", "#50FFAF", "#60FF9F", - "#70FF8F", "#80FF80", "#8FFF70", "#9FFF60", "#AFFF50", - "#BFFF40", "#CFFF30", "#DFFF20", "#EFFF10", "#FFFF00", - "#FFEF00", "#FFDF00", "#FFCF00", "#FFBF00", "#FFAF00", - "#FF9F00", "#FF8F00", "#FF8000", "#FF7000", "#FF6000", - "#FF5000", "#FF4000", "#FF3000", "#FF2000", "#FF1000", - "#FF0000", "#EF0000", "#DF0000", "#CF0000", "#BF0000", - "#AF0000", "#9F0000", "#8F0000", "#800000" -)) +palette0 <- colorRampPalette( + c( + "#00008F", + "#00009F", + "#0000AF", + "#0000BF", + "#0000CF", + "#0000DF", + "#0000EF", + "#0000FF", + "#0010FF", + "#0020FF", + "#0030FF", + "#0040FF", + "#0050FF", + "#0060FF", + "#0070FF", + "#0080FF", + "#008FFF", + "#009FFF", + "#00AFFF", + "#00BFFF", + "#00CFFF", + "#00DFFF", + "#00EFFF", + "#00FFFF", + "#10FFEF", + "#20FFDF", + "#30FFCF", + "#40FFBF", + "#50FFAF", + "#60FF9F", + "#70FF8F", + "#80FF80", + "#8FFF70", + "#9FFF60", + "#AFFF50", + "#BFFF40", + "#CFFF30", + "#DFFF20", + "#EFFF10", + "#FFFF00", + "#FFEF00", + "#FFDF00", + "#FFCF00", + "#FFBF00", + "#FFAF00", + "#FF9F00", + "#FF8F00", + "#FF8000", + "#FF7000", + "#FF6000", + "#FF5000", + "#FF4000", + "#FF3000", + "#FF2000", + "#FF1000", + "#FF0000", + "#EF0000", + "#DF0000", + "#CF0000", + "#BF0000", + "#AF0000", + "#9F0000", + "#8F0000", + "#800000" + ) +) palette1 <- colorRampPalette(c("white", "orange", "darkred")) palette2 <- colorRampPalette(c("blue", "white", "red")) -palette3 <- colorRampPalette(c("darkblue", "blue", "dodgerblue", - "white", "orange", "red", "darkred")) +palette3 <- colorRampPalette(c( + "darkblue", + "blue", + "dodgerblue", + "white", + "orange", + "red", + "darkred" +)) # additional color palette used for extradiagnostics histogram -KOL <- c("black", "darkgreen", "blue", "darkorange", "red", - "violet", "grey50", "black") +KOL <- c( + "black", + "darkgreen", + "blue", + "darkorange", + "red", + "violet", + "grey50", + "black" +) diff --git a/esmvaltool/diag_scripts/miles/miles_regimes.R b/esmvaltool/diag_scripts/miles/miles_regimes.R index 3bdeb6624a..2d10bddc74 100644 --- a/esmvaltool/diag_scripts/miles/miles_regimes.R +++ b/esmvaltool/diag_scripts/miles/miles_regimes.R @@ -4,33 +4,44 @@ # J. von Hardenberg (ISAC-CNR, Italy) (ESMValTool adaptation) # ############################################################################# # Description -# MiLES is a tool for estimating properties of mid-latitude climate originally -# thought for EC-Earth output and then extended to any model data. +# MiLES is a tool for estimating properties of mid-latitude climate. # It works on daily 500hPa geopotential height data and it produces # climatological figures for the chosen time period. Data are interpolated # on a common 2.5x2.5 grid. -# Model data are compared against ECMWF ERA-INTERIM reanalysis -# for a standard period (1989-2010). -# It supports analysis for the 4 standard seasons.# -# Required -# -# Optional -# -# Caveats -# -# Modification history +# Model data are compared against a reference field such as the +# ECMWF ERA-Interim reanalysis. +# It supports analysis for the 4 standard seasons. # # ############################################################################ library(tools) library(yaml) +provenance_record <- function(infile) { + xprov <- list( + ancestors = infile, + authors = list( + "vonhardenberg_jost", "davini_paolo", + "arnone_enrico" + ), + references = list("davini18", "corti99nat"), + projects = list("c3s-magic"), + caption = "MiLES regimes statistics", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("nh") + ) + return(xprov) +} + diag_scripts_dir <- Sys.getenv("diag_scripts") source(paste0(diag_scripts_dir, "/miles/basis_functions.R")) source(paste0(diag_scripts_dir, "/miles/regimes_figures.R")) source(paste0(diag_scripts_dir, "/miles/regimes_fast.R")) source(paste0(diag_scripts_dir, "/miles/miles_parameters.R")) +source(paste0(diag_scripts_dir, "/shared/external.R")) # nolint # read settings and metadata files args <- commandArgs(trailingOnly = TRUE) @@ -60,18 +71,25 @@ work_dir <- settings$work_dir regridding_dir <- settings$run_dir plot_dir <- settings$plot_dir dir.create(work_dir, recursive = T, showWarnings = F) -dir.create(regridding_dir, recursive = T, showWarnings = F) +dir.create(regridding_dir, + recursive = T, + showWarnings = F +) dir.create(plot_dir, recursive = T, showWarnings = F) # setup provenance file and list -provenance_file <- paste0(regridding_dir, "/", "diagnostic_provenance.yml") +provenance_file <- + paste0(regridding_dir, "/", "diagnostic_provenance.yml") provenance <- list() # extract metadata models_dataset <- unname(sapply(list0, "[[", "dataset")) models_ensemble <- unname(sapply(list0, "[[", "ensemble")) models_exp <- unname(sapply(list0, "[[", "exp")) -reference_model <- unname(sapply(list0, "[[", "reference_dataset"))[1] +reference_dataset <- + unname(sapply(list0, "[[", "reference_dataset"))[1] +reference_exp <- + unname(sapply(list0, "[[", "reference_exp"))[1] models_start_year <- unname(sapply(list0, "[[", "start_year")) models_end_year <- unname(sapply(list0, "[[", "end_year")) models_experiment <- unname(sapply(list0, "[[", "exp")) @@ -90,62 +108,79 @@ for (model_idx in c(1:(length(models_dataset)))) { infile <- climofiles[model_idx] for (seas in seasons) { filenames <- miles_regimes_fast( - dataset = dataset, expid = exp, ens = ensemble, - year1 = year1, year2 = year2, season = seas, - z500filename = infile, FILESDIR = work_dir, nclusters = nclusters, + dataset = dataset, + expid = exp, + ens = ensemble, + year1 = year1, + year2 = year2, + season = seas, + z500filename = infile, + FILESDIR = work_dir, + nclusters = nclusters, doforce = T ) -# Set provenance for output files - caption <- paste0("MiLES regimes statistics") - xprov <- list(ancestors = list(infile), - authors = list("hard_jo", "davi_pa", "arno_en"), - references = list("davini18", "corti99nat"), - projects = list("c3s-magic"), - caption = caption, - statistics = list("other"), - realms = list("atmos"), - themes = list("phys"), - domains = list("nh")) + # Set provenance for output files + xprov <- provenance_record(list(infile)) for (fname in filenames) { provenance[[fname]] <- xprov } } } -# Write provenance to file -write_yaml(provenance, provenance_file) - ## ## Make the plots ## -if (write_plots) { - ref_idx <- which(models_dataset == reference_model) - if (length(ref_idx) == 0) { - ref_idx <- length(models_dataset) - } - dataset_ref <- models_dataset[ref_idx] - exp_ref <- models_exp[ref_idx] - ensemble_ref <- models_ensemble[ref_idx] - year1_ref <- models_start_year[ref_idx] - year2_ref <- models_end_year[ref_idx] - - for (model_idx in c(1:(length(models_dataset)))) { - if (model_idx != ref_idx) { - exp <- models_exp[model_idx] - dataset <- models_dataset[model_idx] - ensemble <- models_ensemble[model_idx] - year1 <- models_start_year[model_idx] - year2 <- models_end_year[model_idx] - for (seas in seasons) { - miles_regimes_figures( - expid = exp, year1 = year1, year2 = year2, dataset = dataset, - ens = ensemble, dataset_ref = dataset_ref, expid_ref = exp_ref, - year1_ref = year1_ref, ens_ref = ensemble_ref, - year2_ref = year2_ref, season = seas, - FIGDIR = plot_dir, FILESDIR = work_dir, REFDIR = work_dir, - nclusters - ) +if (!is.null(reference_exp)) { + ref_idx <- which((models_dataset == reference_dataset) && + (models_exp == reference_exp)) +} else { + ref_idx <- which(models_dataset == reference_dataset) +} +if (length(ref_idx) == 0) { + ref_idx <- length(models_dataset) +} +dataset_ref <- models_dataset[ref_idx] +exp_ref <- models_exp[ref_idx] +ensemble_ref <- models_ensemble[ref_idx] +year1_ref <- models_start_year[ref_idx] +year2_ref <- models_end_year[ref_idx] + +for (model_idx in c(1:(length(models_dataset)))) { + if (model_idx != ref_idx) { + exp <- models_exp[model_idx] + dataset <- models_dataset[model_idx] + ensemble <- models_ensemble[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + for (seas in seasons) { + filenames <- miles_regimes_figures( + expid = exp, + year1 = year1, + year2 = year2, + dataset = dataset, + ens = ensemble, + dataset_ref = dataset_ref, + expid_ref = exp_ref, + year1_ref = year1_ref, + ens_ref = ensemble_ref, + year2_ref = year2_ref, + season = seas, + FIGDIR = plot_dir, + FILESDIR = work_dir, + REFDIR = work_dir, + nclusters + ) + # Set provenance for output files (same as diagnostic files) + xprov <- provenance_record(list( + climofiles[model_idx], + climofiles[ref_idx] + )) + for (fname in filenames$figs) { + provenance[[fname]] <- xprov } } } } + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/miles/regimes_fast.R b/esmvaltool/diag_scripts/miles/regimes_fast.R index 464bd7c26b..ed46b26498 100644 --- a/esmvaltool/diag_scripts/miles/regimes_fast.R +++ b/esmvaltool/diag_scripts/miles/regimes_fast.R @@ -3,174 +3,271 @@ #-------------P. Davini (May 2017)-------------------# ###################################################### -miles_regimes_fast <- function(dataset, expid, ens, year1, year2, season, - z500filename, FILESDIR, nclusters, doforce) { - - # t0 - t0 <- proc.time() - - if (nclusters != 4 | season != "DJF") { - stop("Beta version: unsupported season and/or number of clusters") - } - - # test function to smooth seasonal cycle: - # it does not work fine yet, keep it false - smoothing <- T - - # region boundaries for North Atlantic - xlim <- c(-80, 40) - ylim <- c(30, 87.5) - - # define file where save data - savefile1 <- file_builder(FILESDIR, "Regimes", "RegimesPattern", dataset, - expid, ens, year1, year2, season) - - # check if data is already there to avoid re-run - if (file.exists(savefile1)) { - print("Actually requested weather regimes data is already there!") - print(savefile1) - if (doforce == T) { - print("Running with doforce=true... re-run!") - } else { - print("Skipping... activate doforce=true if you want to re-run it") - q() +miles_regimes_fast <- + function(dataset, + expid, + ens, + year1, + year2, + season, + z500filename, + FILESDIR, + nclusters, + doforce) { + # t0 + t0 <- proc.time() + + if (nclusters != 4 | season != "DJF") { + stop("Beta version: unsupported season and/or number of clusters") } - } - - # setting up time domain - years <- year1:year2 - timeseason <- season2timeseason(season) - - # new file opening - fieldlist <- ncdf_opener_universal(z500filename, namevar = "zg", - tmonths = timeseason, tyears = years, - rotate = "full") - # extract calendar and time unit from the original file - tcal <- attributes(fieldlist$time)$cal - tunit <- attributes(fieldlist$time)$units - - # time array - etime <- power_date_new(fieldlist$time) - - # declare variable - Z500 <- fieldlist$field - - print("Compute anomalies based on daily mean") - # smoothing flag and daily anomalies - if (smoothing) { - Z500anom <- daily_anom_run_mean(ics, ipsilon, Z500, etime) - } else { - Z500anom <- daily_anom_mean(ics, ipsilon, Z500, etime) - } + # test function to smooth seasonal cycle: + # it does not work fine yet, keep it false + smoothing <- T + + # region boundaries for North Atlantic + xlim <- c(-80, 40) + ylim <- c(30, 87.5) + + # define file where save data + savefile1 <- + file_builder( + FILESDIR, + "Regimes", + "RegimesPattern", + dataset, + expid, + ens, + year1, + year2, + season + ) + + # check if data is already there to avoid re-run + if (file.exists(savefile1)) { + print("Actually requested weather regimes data is already there!") + print(savefile1) + if (doforce == T) { + print("Running with doforce=true... re-run!") + } else { + print("Skipping... activate doforce=true if you want to re-run it") + q() + } + } - # compute weather regimes: new regimes2 function with minimum - # variance evaluation - weather_regimes <- regimes2(ics, ipsilon, Z500anom, ncluster = nclusters, - ntime = 1000, minvar = 0.8, xlim, ylim, - alg = "Hartigan-Wong") - - # Cluster assignation: based on the position of the absolute maximum/minimum - # negative value for NAO-, maximum for the other 3 regimes - compose <- weather_regimes$regimes - names <- paste("Regimes", 1:nclusters) - position <- rbind(c(-45, 65), c(-35, 50), c(10, 60), c(-20, 60)) - rownames(position) <- c("NAO-", "Atlantic Ridge", - "Scandinavian Blocking", "NAO+") - - # minimum distance in degrees to assign a regime name - min_dist_in_deg <- 20 - - # loop - for (i in 1:nclusters) { - - # find position of max and minimum values - MM <- which(compose[, , i] == max(compose[, , i], na.rm = T), arr.ind = T) - mm <- which(compose[, , i] == min(compose[, , i], na.rm = T), arr.ind = T) - - # use maximum or minimum (use special vector to alterate - # distance when needed) - if (max(compose[, , i], na.rm = T) > abs(min(compose[, , i], na.rm = T))) { - distmatrix <- rbind(c(ics[MM[1]], ipsilon[MM[2]]), - position + c(0, 0, 0, 1000)) + # setting up time domain + years <- year1:year2 + timeseason <- season2timeseason(season) + + # new file opening + fieldlist <- ncdf_opener_universal( + z500filename, + namevar = "zg", + tmonths = timeseason, + tyears = years, + rotate = "full", + fillmiss = TRUE + ) + + # extract calendar and time unit from the original file + tcal <- attributes(fieldlist$time)$cal + tunit <- attributes(fieldlist$time)$units + + # time array + etime <- power_date_new(fieldlist$time) + + # declare variable + Z500 <- fieldlist$field + + print("Compute anomalies based on daily mean") + # smoothing flag and daily anomalies + if (smoothing) { + Z500anom <- daily_anom_run_mean(ics, ipsilon, Z500, etime) } else { - distmatrix <- rbind(c(ics[mm[1]], ipsilon[mm[2]]), - position + c(1000, 1000, 1000, 0)) + Z500anom <- daily_anom_mean(ics, ipsilon, Z500, etime) } - # compute distances and names assignation - distmm <- dist(distmatrix)[1:nclusters] - print(distmm) - - # minimum distance for correct assignation of 15 deg - if (min(distmm) < min_dist_in_deg) { - names[i] <- rownames(position)[which.min(distmm)] + # compute weather regimes: new regimes2 function with minimum + # variance evaluation + weather_regimes <- + regimes2( + ics, + ipsilon, + Z500anom, + ncluster = nclusters, + ntime = 1000, + minvar = 0.8, + xlim, + ylim, + alg = "Hartigan-Wong" + ) + + # Cluster assignation: based on the position of the absolute + # maximum/minimum negative value for NAO-, maximum for the other + # 3 regimes + compose <- weather_regimes$regimes + names <- paste("Regimes", 1:nclusters) + position <- rbind(c(-45, 65), c(-35, 50), c(10, 60), c(-20, 60)) + rownames(position) <- c( + "NAO-", "Atlantic Ridge", + "Scandinavian Blocking", "NAO+" + ) + + # minimum distance in degrees to assign a regime name + min_dist_in_deg <- 20 + + # loop + for (i in 1:nclusters) { + # find position of max and minimum values + MM <- + which(compose[, , i] == max(compose[, , i], na.rm = T), arr.ind = T) + mm <- + which(compose[, , i] == min(compose[, , i], na.rm = T), arr.ind = T) + + # use maximum or minimum (use special vector to alterate + # distance when needed) + if (max(compose[, , i], na.rm = T) + > abs(min(compose[, , i], na.rm = T))) { + distmatrix <- rbind( + c(ics[MM[1]], ipsilon[MM[2]]), + position + c(0, 0, 0, 1000) + ) + } else { + distmatrix <- rbind( + c(ics[mm[1]], ipsilon[mm[2]]), + position + c(1000, 1000, 1000, 0) + ) + } - # avoid double assignation - if (i > 1 & any(names[i] == names[1:max(c(1, i - 1))])) { - print(paste("Warning: double assignation of the same regime.", - "Avoiding last assignation...")) - names[i] <- paste("Regime", i) + # compute distances and names assignation + distmm <- dist(distmatrix)[1:nclusters] + print(distmm) + + # minimum distance for correct assignation of 15 deg + if (min(distmm) < min_dist_in_deg) { + names[i] <- rownames(position)[which.min(distmm)] + + # avoid double assignation + if (i > 1 & any(names[i] == names[1:max(c(1, i - 1))])) { + print( + paste( + "Warning: double assignation of the same regime.", + "Avoiding last assignation..." + ) + ) + names[i] <- paste("Regime", i) + } } + print(names[i]) } - print(names[i]) - } - t1 <- proc.time() - t0 - print(t1) - - ########################################################## - #------------------------Save to NetCDF------------------# - ########################################################## - - # saving output to netcdf files - print("saving NetCDF climatologies...") - - # dimensions definition - fulltime <- as.numeric(etime$data) - as.numeric(etime$data)[1] - TIME <- paste(tunit, " since ", year1, "-", timeseason[1], - "-01 00:00:00", sep = "") - x <- ncdim_def("lon", "degrees_east", ics, longname = "longitude") - y <- ncdim_def("lat", "degrees_north", ipsilon, longname = "latitude") - t <- ncdim_def("time", TIME, fulltime, calendar = tcal, - longname = "time", unlim = T) - - # extra dimensions definition - cl <- ncdim_def("lev", "cluster index", 1:nclusters, longname = "pressure") - - # var definition - unit <- "m" - longvar <- "Weather Regimes Pattern" - pattern_ncdf <- ncvar_def("Regimes", unit, list(x, y, cl), -999, - longname = longvar, prec = "single", - compression = 1) - - unit <- paste0("0-", nclusters) - longvar <- "Weather Regimes Cluster Index" - cluster_ncdf <- ncvar_def("Indices", unit, list(t), -999, longname = longvar, - prec = "single", compression = 1) - - unit <- "%" - longvar <- "Weather Regimes Frequencies" - frequencies_ncdf <- ncvar_def("Frequencies", unit, list(cl), -999, - longname = longvar, prec = "single", - compression = 1) - - # testnames - dimnchar <- ncdim_def("nchar", "", 1:max(nchar(names)), - create_dimvar = FALSE) - names_ncdf <- ncvar_def("Names", "", list(dimnchar, cl), prec = "char") - - # saving file - ncfile1 <- nc_create(savefile1, list(pattern_ncdf, cluster_ncdf, - frequencies_ncdf, names_ncdf)) - ncvar_put(ncfile1, "Regimes", weather_regimes$regimes, start = c(1, 1, 1), - count = c(-1, -1, -1)) - ncvar_put(ncfile1, "Indices", weather_regimes$cluster, start = c(1), - count = c(-1)) - ncvar_put(ncfile1, "Frequencies", weather_regimes$frequencies, - start = c(1), count = c(-1)) - ncvar_put(ncfile1, "Names", names) - nc_close(ncfile1) - return(savefile1) -} + t1 <- proc.time() - t0 + print(t1) + + ########################################################## + #------------------------Save to NetCDF------------------# + ########################################################## + + # saving output to netcdf files + print("saving NetCDF climatologies...") + + # dimensions definition + fulltime <- as.numeric(etime$data) - as.numeric(etime$data)[1] + TIME <- paste(tunit, " since ", year1, "-", timeseason[1], + "-01 00:00:00", + sep = "" + ) + x <- ncdim_def("lon", "degrees_east", ics, longname = "longitude") + y <- + ncdim_def("lat", "degrees_north", ipsilon, longname = "latitude") + t <- ncdim_def( + "time", + TIME, + fulltime, + calendar = tcal, + longname = "time", + unlim = T + ) + + # extra dimensions definition + cl <- + ncdim_def("lev", "cluster index", 1:nclusters, longname = "pressure") + + # var definition + unit <- "m" + longvar <- "Weather Regimes Pattern" + pattern_ncdf <- ncvar_def( + "Regimes", + unit, + list(x, y, cl), + -999, + longname = longvar, + prec = "single", + compression = 1 + ) + + unit <- paste0("0-", nclusters) + longvar <- "Weather Regimes Cluster Index" + cluster_ncdf <- + ncvar_def( + "Indices", + unit, + list(t), + -999, + longname = longvar, + prec = "single", + compression = 1 + ) + + unit <- "%" + longvar <- "Weather Regimes Frequencies" + frequencies_ncdf <- ncvar_def( + "Frequencies", + unit, + list(cl), + -999, + longname = longvar, + prec = "single", + compression = 1 + ) + + # testnames + dimnchar <- ncdim_def("nchar", "", 1:max(nchar(names)), + create_dimvar = FALSE + ) + names_ncdf <- + ncvar_def("Names", "", list(dimnchar, cl), prec = "char") + + # saving file + ncfile1 <- nc_create( + savefile1, + list( + pattern_ncdf, cluster_ncdf, + frequencies_ncdf, names_ncdf + ) + ) + ncvar_put( + ncfile1, + "Regimes", + weather_regimes$regimes, + start = c(1, 1, 1), + count = c(-1, -1, -1) + ) + ncvar_put( + ncfile1, + "Indices", + weather_regimes$cluster, + start = c(1), + count = c(-1) + ) + ncvar_put( + ncfile1, + "Frequencies", + weather_regimes$frequencies, + start = c(1), + count = c(-1) + ) + ncvar_put(ncfile1, "Names", names) + nc_close(ncfile1) + return(savefile1) + } diff --git a/esmvaltool/diag_scripts/miles/regimes_figures.R b/esmvaltool/diag_scripts/miles/regimes_figures.R index dfae1920ee..242d4d9c20 100644 --- a/esmvaltool/diag_scripts/miles/regimes_figures.R +++ b/esmvaltool/diag_scripts/miles/regimes_figures.R @@ -3,11 +3,21 @@ #-------------P. Davini (May 2017)-------------------# ###################################################### -miles_regimes_figures <- function(dataset, expid, ens, year1, year2, - dataset_ref, expid_ref, ens_ref, - year1_ref, year2_ref, season, - FIGDIR, FILESDIR, REFDIR, nclusters) { - +miles_regimes_figures <- function(dataset, + expid, + ens, + year1, + year2, + dataset_ref, + expid_ref, + ens_ref, + year1_ref, + year2_ref, + season, + FIGDIR, + FILESDIR, + REFDIR, + nclusters) { if (nclusters != 4 | season != "DJF") { stop("Beta version: unsupported season and/or number of clusters") } @@ -17,10 +27,21 @@ miles_regimes_figures <- function(dataset, expid, ens, year1, year2, ########################################################## # loading anomalies and variances of experiment - nomefile <- file_builder(FILESDIR, "Regimes", "RegimesPattern", dataset, - expid, ens, year1, year2, season) + nomefile <- + file_builder( + FILESDIR, + "Regimes", + "RegimesPattern", + dataset, + expid, + ens, + year1, + year2, + season + ) frequencies_exp <- ncdf_opener(nomefile, "Frequencies") - regimes_exp <- ncdf_opener(nomefile, namevar = "Regimes", rotate = "no") + regimes_exp <- + ncdf_opener(nomefile, namevar = "Regimes", rotate = "no") # loading names p <- nc_open(nomefile) @@ -32,18 +53,38 @@ miles_regimes_figures <- function(dataset, expid, ens, year1, year2, # check for REFDIR==FILESDIR, i.e. if we are using the climatology # provided by MiLES or another dataset MiLES-generated if (REFDIR != FILESDIR) { - nomefile_ref <- paste0(file.path(REFDIR, "Regimes"), "/RegimesPattern_", - dataset_ref, "_", year1_ref, "_", year2_ref, - "_", season, ".nc") + nomefile_ref <- + paste0( + file.path(REFDIR, "Regimes"), + "/RegimesPattern_", + dataset_ref, + "_", + year1_ref, + "_", + year2_ref, + "_", + season, + ".nc" + ) } else { # use file.builder to create the path of the blocking files - nomefile_ref <- file_builder(FILESDIR, "Regimes", "RegimesPattern", - dataset_ref, expid_ref, ens_ref, - year1_ref, year2_ref, season) + nomefile_ref <- + file_builder( + FILESDIR, + "Regimes", + "RegimesPattern", + dataset_ref, + expid_ref, + ens_ref, + year1_ref, + year2_ref, + season + ) } frequencies_ref <- ncdf_opener(nomefile_ref, "Frequencies") - regimes_ref <- ncdf_opener(nomefile_ref, namevar = "Regimes", rotate = "no") + regimes_ref <- + ncdf_opener(nomefile_ref, namevar = "Regimes", rotate = "no") # loading names p <- nc_open(nomefile_ref) @@ -56,10 +97,14 @@ miles_regimes_figures <- function(dataset, expid, ens, year1, year2, lev_diff <- seq(-150, 150, 20) # standard properties - info_exp <- info_builder(dataset, expid, ens, year1, year2, season) - info_ref <- info_builder(dataset_ref, expid_ref, ens_ref, - year1_ref, year2_ref, season) + info_exp <- + info_builder(dataset, expid, ens, year1, year2, season) + info_ref <- info_builder( + dataset_ref, expid_ref, ens_ref, + year1_ref, year2_ref, season + ) + filenames <- c() kk0 <- 1 # loop on regimes for (name in names_ref) { @@ -77,10 +122,20 @@ miles_regimes_figures <- function(dataset, expid, ens, year1, year2, print(name) # final plot production - figname <- fig_builder(FIGDIR, "Regimes", paste0("Regime", ii), - dataset, expid, ens, year1, year2, - season, output_file_type) + figname <- fig_builder( + FIGDIR, + "Regimes", + paste0("Regime", ii), + dataset, + expid, + ens, + year1, + year2, + season, + output_file_type + ) print(figname) + filenames <- c(filenames, figname) # Chose output format for figure - by JvH open_plot_device(figname, output_file_type) @@ -95,45 +150,113 @@ miles_regimes_figures <- function(dataset, expid, ens, year1, year2, # plot properties par(plotpar) - im <- plot_prepare(ics, ipsilon, regimes_exp[, , ii], - proj = map_projection, lat_lim = lat_lim) - filled_contour3(im$x, im$y, im$z, xlab = im$xlab, ylab = im$ylab, - main = paste(info_exp), levels = lev_field, - color.palette = palette3, xlim = im$xlim, - ylim = im$ylim, axes = im$axes) - mtext(name, side = 3, line = .5, outer = TRUE, cex = 2, font = 2) + im <- plot_prepare(ics, + ipsilon, + regimes_exp[, , ii], + proj = map_projection, + lat_lim = lat_lim + ) + filled_contour3( + im$x, + im$y, + im$z, + xlab = im$xlab, + ylab = im$ylab, + main = paste(info_exp), + levels = lev_field, + color.palette = palette3, + xlim = im$xlim, + ylim = im$ylim, + axes = im$axes + ) + mtext( + name, + side = 3, + line = .5, + outer = TRUE, + cex = 2, + font = 2 + ) proj_addland(proj = map_projection) - text(varpoints[1], varpoints[2], - paste("Frequencies: ", round(frequencies_exp[ii], 2), "%", sep = ""), - cex = 2) - - im <- plot_prepare(ics, ipsilon, regimes_ref[, , jj], - proj = map_projection, lat_lim = lat_lim) - filled_contour3(im$x, im$y, im$z, xlab = im$xlab, ylab = im$ylab, - main = paste(info_ref), levels = lev_field, - color.palette = palette3, xlim = im$xlim, - ylim = im$ylim, axes = im$axes) + text(varpoints[1], + varpoints[2], + paste("Frequencies: ", round(frequencies_exp[ii], 2), "%", sep = ""), + cex = 2 + ) + + im <- plot_prepare(ics, + ipsilon, + regimes_ref[, , jj], + proj = map_projection, + lat_lim = lat_lim + ) + filled_contour3( + im$x, + im$y, + im$z, + xlab = im$xlab, + ylab = im$ylab, + main = paste(info_ref), + levels = lev_field, + color.palette = palette3, + xlim = im$xlim, + ylim = im$ylim, + axes = im$axes + ) proj_addland(proj = map_projection) - text(varpoints[1], varpoints[2], paste("Frequencies: ", - round(frequencies_ref[ii], 2), "%", sep = ""), cex = 2) - image_scale3(volcano, levels = lev_field, color.palette = palette3, - colorbar.label = "m", cex.colorbar = imgscl_colorbar, - cex.label = imgscl_label, colorbar.width = 1 * af, - line.label = imgscl_line) + text(varpoints[1], + varpoints[2], + paste("Frequencies: ", + round(frequencies_ref[ii], 2), "%", + sep = "" + ), + cex = 2 + ) + image_scale3( + volcano, + levels = lev_field, + color.palette = palette3, + colorbar.label = "m", + cex.colorbar = imgscl_colorbar, + cex.label = imgscl_label, + colorbar.width = 1 * af, + line.label = imgscl_line + ) # delta field plot - im <- plot_prepare(ics, ipsilon, regimes_exp[, , ii] - regimes_ref[, , jj], - proj = map_projection, lat_lim = lat_lim) - filled_contour3(im$x, im$y, im$z, xlab = im$xlab, ylab = im$ylab, - main = paste("Difference"), levels = lev_diff, - color.palette = palette2, xlim = im$xlim, - ylim = im$ylim, axes = im$axes) + im <- + plot_prepare(ics, + ipsilon, + regimes_exp[, , ii] - regimes_ref[, , jj], + proj = map_projection, + lat_lim = lat_lim + ) + filled_contour3( + im$x, + im$y, + im$z, + xlab = im$xlab, + ylab = im$ylab, + main = paste("Difference"), + levels = lev_diff, + color.palette = palette2, + xlim = im$xlim, + ylim = im$ylim, + axes = im$axes + ) proj_addland(proj = map_projection) - image_scale3(volcano, levels = lev_diff, color.palette = palette2, - colorbar.label = "m", cex.colorbar = imgscl_colorbar, - cex.label = imgscl_label, colorbar.width = 1 * af, - line.label = imgscl_line) + image_scale3( + volcano, + levels = lev_diff, + color.palette = palette2, + colorbar.label = "m", + cex.colorbar = imgscl_colorbar, + cex.label = imgscl_label, + colorbar.width = 1 * af, + line.label = imgscl_line + ) dev.off() } + return(list(figs = filenames, mod = nomefile, ref = nomefile_ref)) } diff --git a/esmvaltool/diag_scripts/mlr/__init__.py b/esmvaltool/diag_scripts/mlr/__init__.py new file mode 100644 index 0000000000..aa89303896 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/__init__.py @@ -0,0 +1,867 @@ +"""Convenience functions for MLR diagnostics.""" + +import logging +import os +import re +import warnings +from copy import deepcopy +from functools import lru_cache +from pprint import pformat + +import esmvalcore.preprocessor +import iris +import numpy as np +import shapely.vectorized as shp_vect +from cartopy.io import shapereader +from cf_units import Unit +from iris.fileformats.netcdf import UnknownCellMethodWarning + +from esmvaltool.diag_scripts.shared import ( + get_diagnostic_filename, + io, + select_metadata, + sorted_metadata, +) + +logger = logging.getLogger(os.path.basename(__file__)) + +NECESSARY_KEYS = io.NECESSARY_KEYS + [ + 'tag', + 'var_type', +] +VAR_TYPES = [ + 'feature', + 'label', + 'label_to_rescale', + 'prediction_input', + 'prediction_input_error', + 'prediction_output', + 'prediction_output_error', + 'prediction_output_misc', + 'prediction_reference', + 'prediction_residual', +] +WARNINGS_TO_IGNORE = [ + { + 'message': ".* contains unknown cell method 'trend'", + 'category': UnknownCellMethodWarning, + 'module': 'iris', + }, + { + 'message': "Using DEFAULT_SPHERICAL_EARTH_RADIUS", + 'category': UserWarning, + 'module': 'iris', + }, +] + + +def _check_coords(cube, coords, weights_type): + """Check coordinates prior to weights calculations.""" + cube_str = cube.summary(shorten=True) + for coord_name in coords: + try: + coord = cube.coord(coord_name) + except iris.exceptions.CoordinateNotFoundError: + logger.error( + "Calculation of %s for cube %s failed, coordinate " + "'%s' not found", weights_type, cube_str, coord_name) + raise + if not coord.has_bounds(): + logger.debug( + "Guessing bounds of coordinate '%s' of cube %s for " + "calculation of %s", coord_name, cube_str, weights_type) + coord.guess_bounds() + + +def _get_datasets(input_data, **kwargs): + """Get datasets according to ``**kwargs``.""" + datasets = [] + for dataset in input_data: + dataset_copy = deepcopy(dataset) + for key in kwargs: + if key not in dataset_copy: + dataset_copy[key] = None + if select_metadata([dataset_copy], **kwargs): + datasets.append(dataset) + return datasets + + +@lru_cache +def _get_ne_land_mask_cube(n_lats=1000, n_lons=2000): + """Get Natural Earth land mask.""" + ne_dir = os.path.join( + os.path.dirname(os.path.realpath(esmvalcore.preprocessor.__file__)), + 'ne_masks', + ) + ne_file = os.path.join(ne_dir, 'ne_10m_land.shp') + reader = shapereader.Reader(ne_file) + geometries = list(reader.geometries()) + + # Setup grid + lat_coord = iris.coords.DimCoord( + np.linspace(-90.0, 90.0, n_lats), var_name='lat', + standard_name='latitude', long_name='latitude', units='degrees') + lon_coord = iris.coords.DimCoord( + np.linspace(-180.0, 180.0, n_lons), var_name='lon', + standard_name='longitude', long_name='longitude', units='degrees') + (lats, lons) = np.meshgrid(lat_coord.points, lon_coord.points) + + # Setup mask (1: land, 0: sea) + mask = np.full(lats.shape, False, dtype=bool) + for geometry in geometries: + mask |= shp_vect.contains(geometry, lons, lats) + land_mask = np.swapaxes(np.where(mask, 1, 0), 0, 1) + + # Setup cube + cube = iris.cube.Cube(land_mask, + var_name='land_mask', + long_name='Land mask (1: land, 0: sea)', + units='no_unit', + dim_coords_and_dims=[(lat_coord, 0), (lon_coord, 1)]) + return cube + + +def check_predict_kwargs(predict_kwargs): + """Check keyword argument for ``predict()`` functions. + + Parameters + ---------- + predict_kwargs : keyword arguments, optional + Keyword arguments for a ``predict()`` function. + + Raises + ------ + RuntimeError + ``return_var`` and ``return_cov`` are both set to ``True`` in the + keyword arguments. + + """ + return_var = predict_kwargs.get('return_var', False) + return_cov = predict_kwargs.get('return_cov', False) + if return_var and return_cov: + raise RuntimeError( + "Cannot return variance (return_cov=True) and full covariance " + "matrix (return_cov=True) simultaneously") + + +def create_alias(dataset, attributes, delimiter='-'): + """Create alias key of a dataset using a list of attributes. + + Parameters + ---------- + dataset : dict + Metadata dictionary representing a single dataset. + attributes : list of str + List of attributes used to create the alias. + delimiter : str, optional (default: '-') + Delimiter used to separate different attributes in the alias. + + Returns + ------- + str + Dataset alias. + + Raises + ------ + AttributeError + ``dataset`` does not contain one of the ``attributes``. + + """ + alias = [] + if not attributes: + raise ValueError( + "Expected at least one element for attributes, got empty list") + for attribute in attributes: + if attribute not in dataset: + raise AttributeError( + f"Dataset {dataset} does not contain attribute '{attribute}' " + f"for alias creation") + alias.append(dataset[attribute]) + return delimiter.join(alias) + + +def datasets_have_mlr_attributes(datasets, log_level='debug', mode='full'): + """Check (MLR) attributes of ``datasets``. + + Parameters + ---------- + datasets : list of dict + Datasets to check. + log_level : str, optional (default: 'debug') + Verbosity level of the logger. + mode : str, optional (default: 'full') + Checking mode. Must be one of ``'only_missing'`` (only check if + attributes are missing), ``'only_var_type'`` (check only `var_type`) or + ``'full'`` (check both). + + Returns + ------- + bool + ``True`` if all required attributes are available, ``False`` if not. + + Raises + ------ + ValueError + Invalid value for argument ``mode`` is given. + + """ + output = True + accepted_modes = ('full', 'only_missing', 'only_var_type') + if mode not in accepted_modes: + raise ValueError( + f"'mode' must be one of {accepted_modes}, got '{mode}'") + for dataset in datasets: + if mode != 'only_var_type': + for key in NECESSARY_KEYS: + if key not in dataset: + getattr(logger, log_level)( + "Dataset '%s' does not have necessary (MLR) attribute " + "'%s'", dataset, key) + output = False + if mode != 'only_missing' and dataset.get('var_type') not in VAR_TYPES: + getattr(logger, log_level)( + "Dataset '%s' has invalid var_type '%s', must be one of %s", + dataset, dataset.get('var_type'), VAR_TYPES) + output = False + return output + + +def get_1d_cube(x_data, y_data, x_kwargs=None, y_kwargs=None): + """Convert 2 arrays to :class:`iris.cube.Cube` (with single coordinate). + + Parameters + ---------- + x_data : numpy.ndarray + Data for coordinate. + y_data : numpy.ndarray + Data for cube. + x_kwargs : dict + Keyword arguments passed to :class:`iris.coords.AuxCoord`. + y_kwargs : dict + Keyword arguments passed to :class:`iris.cube.Cube`. + + Returns + ------- + iris.cube.Cube + 1D cube with single auxiliary coordinate. + + Raises + ------ + ValueError + Arrays are not 1D and do not have matching shapes. + + """ + if x_kwargs is None: + x_kwargs = {} + if y_kwargs is None: + y_kwargs = {} + x_data = np.ma.array(x_data) + y_data = np.ma.array(y_data) + if x_data.ndim != 1: + raise ValueError( + f"Expected 1D array for 'x_data', got {x_data.ndim:d}D array") + if y_data.ndim != 1: + raise ValueError( + f"Expected 1D array for 'y_data', got {y_data.ndim:d}D array") + if x_data.shape != y_data.shape: + raise ValueError( + f"Expected identical shapes for 'x_data' and 'y_data', got " + f"{x_data.shape} and {y_data.shape}, respectively") + aux_coord = iris.coords.AuxCoord(x_data, **x_kwargs) + cube = iris.cube.Cube(y_data, aux_coords_and_dims=[(aux_coord, 0)], + **y_kwargs) + return cube + + +def get_absolute_time_units(units): + """Convert time reference units to absolute ones. + + This function converts reference time units (like ``'days since YYYY'``) to + absolute ones (like ``'days'``). + + Parameters + ---------- + units : cf_units.Unit + Time units to convert. + + Returns + ------- + cf_units.Unit + Absolute time units. + + Raises + ------ + ValueError + If conversion failed (e.g. input units are not time units). + + """ + if units.is_time_reference(): + units = Unit(units.symbol.split()[0]) + if not units.is_time(): + raise ValueError( + f"Cannot convert units '{units}' to reasonable time units") + return units + + +def get_alias(dataset): + """Get alias for dataset. + + Parameters + ---------- + dataset : dict + Dataset metadata. + + Returns + ------- + str + Alias. + + """ + alias = f"{dataset['project']} dataset {dataset['dataset']}" + additional_info = [] + for key in ('mip', 'exp', 'ensemble'): + if key in dataset: + additional_info.append(dataset[key]) + if additional_info: + alias += f" ({', '.join(additional_info)})" + if 'start_year' in dataset and 'end_year' in dataset: + alias += f" from {dataset['start_year']:d} to {dataset['end_year']:d}" + return alias + + +def get_all_weights(cube, area_weighted=True, time_weighted=True, + landsea_fraction_weighted=None, normalize=False): + """Get all desired weights for a cube. + + Parameters + ---------- + cube : iris.cube.Cube + Input cube. + area_weighted : bool, optional (default: True) + Use area weights calculated from grid cell areas using + :func:`iris.analysis.cartography.area_weights`. Only works for regular + grids. + time_weighted : bool, optional (default: True) + Use time weights calculated from time bounds. + landsea_fraction_weighted : str, optional + If given, use land/sea fraction weights calculated from Natural Earth + files. Must be one of ``'land'``, ``'sea'``. Only works for regular + grids. + normalize : bool, optional (default: False) + Normalize weights with total area and total time range. + + Returns + ------- + numpy.ndarray + Area weights. + + Raises + ------ + iris.exceptions.CoordinateMultiDimError + Dimension of ``latitude`` or ``longitude`` coordinate is greater than + 1. + iris.exceptions.CoordinateNotFoundError + Cube does not contain the coordinates ``latitude`` and ``longitude`` + (if used with ``area_weighted`` or ``landsea_fraction_weighted``) or + cube does not contain the coordinate ``time`` (if used with + ``time_weighted``). + ValueError + ``landsea_fraction_weighted`` is not one of ``None``, ``'land'``, + ``'sea'`` or coordinates ``latitude`` and ``longitude`` share + dimensions. + + """ + logger.debug("Calculating all weights of cube %s", + cube.summary(shorten=True)) + weights = np.ones(cube.shape) + + # Horizontal weights + horizontal_weights = get_horizontal_weights( + cube, area_weighted=area_weighted, + landsea_fraction_weighted=landsea_fraction_weighted, + normalize=normalize) + weights *= horizontal_weights + + # Time weights + if time_weighted: + time_weights = get_time_weights(cube, normalize=normalize) + weights *= time_weights + + return weights + + +def get_area_weights(cube, normalize=False): + """Get area weights calculated from grid cell areas. + + Note + ---- + Only works for regular grids. Uses + :func:`iris.analysis.cartography.area_weights` for an approximate + calculation of the grid cell areas. + + Parameters + ---------- + cube : iris.cube.Cube + Input cube. + normalize : bool, optional (default: False) + Normalize weights with total area. + + Returns + ------- + numpy.ndarray + Area weights. + + Raises + ------ + iris.exceptions.CoordinateNotFoundError + Cube does not contain the coordinates ``latitude`` and ``longitude``. + + """ + logger.debug("Calculating area weights") + _check_coords(cube, ['latitude', 'longitude'], 'area weights') + area_weights = iris.analysis.cartography.area_weights(cube, + normalize=normalize) + return area_weights + + +def get_horizontal_weights(cube, area_weighted=True, + landsea_fraction_weighted=None, normalize=False): + """Get horizontal (latitude/longitude) weights of cube. + + Parameters + ---------- + cube : iris.cube.Cube + Input cube. + area_weighted : bool, optional (default: True) + Use area weights calculated from grid cell areas using + :func:`iris.analysis.cartography.area_weights`. Only works for regular + grids. + landsea_fraction_weighted : str, optional + If given, use land/sea fraction weights calculated from Natural Earth + files. Must be one of ``'land'``, ``'sea'``. Only works for regular + grids. + normalize : bool, optional (default: False) + Normalize weights with sum of weights over latitude and longitude (i.e. + if only ``area_weighted`` is given, this is equal to the total area). + + Returns + ------- + numpy.ndarray + Horizontal (latitude/longitude) weights. + + Raises + ------ + iris.exceptions.CoordinateMultiDimError + Dimension of ``latitude`` or ``longitude`` coordinate is greater than + 1. + iris.exceptions.CoordinateNotFoundError + Cube does not contain the coordinates ``latitude`` and ``longitude``. + ValueError + ``landsea_fraction_weighted`` is not one of ``'land'``, ``'sea'``. + + """ + logger.debug("Calculating horizontal weights") + weights = np.ones(cube.shape) + if not (area_weighted or landsea_fraction_weighted): + return weights + + # Get weights + if area_weighted: + weights *= get_area_weights(cube, normalize=False) + if landsea_fraction_weighted is not None: + weights *= get_landsea_fraction_weights( + cube, landsea_fraction_weighted, normalize=False) + + # No normalization + if not normalize: + return weights + + # Get horizontal dimensions + horizontal_dims = [] + if cube.coord_dims('latitude'): + horizontal_dims.append(cube.coord_dims('latitude')[0]) + if cube.coord_dims('longitude'): + horizontal_dims.append(cube.coord_dims('longitude')[0]) + + # Normalization + horizontal_dims = tuple(horizontal_dims) + if not horizontal_dims: + norm = np.ravel(weights)[0] + else: + norm = np.ravel(np.sum(weights, axis=horizontal_dims))[0] + return weights / norm + + +def get_input_data(cfg, pattern=None, check_mlr_attributes=True, ignore=None): + """Get input data and check MLR attributes if desired. + + Use ``input_data`` and ancestors to get all relevant input files. + + Parameters + ---------- + cfg : dict + Recipe configuration. + pattern : str, optional + Pattern matched against ancestor file names. + check_mlr_attributes : bool, optional (default: True) + If ``True``, only returns datasets with valid MLR attributes. If + ``False``, returns all found datasets. + ignore : list of dict, optional + Ignore specific datasets by specifying multiple :obj:`dict`s of + metadata. By setting an attribute to ``None``, ignore all datasets + which do not have that attribute. + + Returns + ------- + list of dict + List of input datasets. + + Raises + ------ + ValueError + No input data found or at least one dataset has invalid attributes. + + """ + logger.debug("Extracting input files") + input_data = list(cfg['input_data'].values()) + input_data.extend(io.netcdf_to_metadata(cfg, pattern=pattern)) + input_data = deepcopy(input_data) + if ignore is not None: + valid_data = [] + ignored_datasets = [] + logger.info("Ignoring files with %s", ignore) + for kwargs in ignore: + ignored_datasets.extend(_get_datasets(input_data, **kwargs)) + for dataset in input_data: + if dataset not in ignored_datasets: + valid_data.append(dataset) + else: + valid_data = input_data + if not valid_data: + raise ValueError("No input data found") + if check_mlr_attributes: + if not datasets_have_mlr_attributes(valid_data, log_level='error'): + raise ValueError("At least one input dataset does not have valid " + "MLR attributes") + valid_data = sorted_metadata(valid_data, ['var_type', 'tag', 'dataset']) + logger.debug("Found files:") + logger.debug(pformat([d['filename'] for d in valid_data])) + return valid_data + + +def get_landsea_fraction_weights(cube, area_type, normalize=False): + """Get land/sea fraction weights calculated from Natural Earth files. + + Note + ---- + The implementation of this feature is not optimal. For large cubes, + calculating the land/sea fraction weights might be very slow. Only works + for regular grids. + + Parameters + ---------- + cube : iris.cube.Cube + Input cube. + area_type : str + Area type. Must be one of ``'land'`` (land fraction weighting) or + ``'sea'`` (sea fraction weighting). + normalize : bool, optional (default: False) + Normalize weights with total land/sea fraction. + + Raises + ------ + iris.exceptions.CoordinateMultiDimError + Dimension of ``latitude`` or ``longitude`` coordinate is greater than + 1. + iris.exceptions.CoordinateNotFoundError + Cube does not contain the coordinates ``latitude`` and ``longitude``. + ValueError + ``area_type`` is not one of ``'land'``, ``'sea'`` or coordinates + ``latitude`` and ``longitude`` share dimensions. + + """ + allowed_types = ('land', 'sea') + if area_type not in allowed_types: + raise ValueError( + f"Expected one of {allowed_types} for 'area_type' of land/sea " + f"fraction weighting, got '{area_type}'") + logger.debug("Calculating %s fraction weights", area_type) + _check_coords(cube, ['latitude', 'longitude'], + f'{area_type} fraction weights') + lat_coord = cube.coord('latitude') + lon_coord = cube.coord('longitude') + for coord in (lat_coord, lon_coord): + if coord.ndim > 1: + raise iris.exceptions.CoordinateMultiDimError( + f"Calculating {area_type} fraction weights for " + f"multidimensional coordinate '{coord.name}' is not supported") + if cube.coord_dims(lat_coord) != (): + if cube.coord_dims(lat_coord) == cube.coord_dims(lon_coord): + raise ValueError( + f"1D latitude and longitude coordinates share dimensions " + f"(this usually happens with unstructured grids) - " + f"calculating {area_type} fraction weights for latitude and " + "longitude that share dimensions is not possible") + + # Calculate land fractions on coordinate grid of cube + ne_land_mask_cube = _get_ne_land_mask_cube() + land_fraction = np.empty((lat_coord.shape[0], lon_coord.shape[0]), + dtype=np.float64) + for lat_idx in range(lat_coord.shape[0]): + for lon_idx in range(lon_coord.shape[0]): + lat_bounds = lat_coord.bounds[lat_idx] + lon_bounds = lon_coord.bounds[lon_idx] + submask = ne_land_mask_cube.intersection(latitude=lat_bounds, + longitude=lon_bounds) + land_fraction[lat_idx, lon_idx] = (submask.data.sum() / + submask.data.size) + if area_type == 'sea': + fraction_weights = 1.0 - land_fraction + else: + fraction_weights = land_fraction + if normalize: + fraction_weights /= np.ma.sum(fraction_weights) + + # Broadcast to original shape + coord_dims = [] + if cube.coord_dims(lon_coord): + coord_dims.append(cube.coord_dims(lon_coord)[0]) + else: + fraction_weights = np.squeeze(fraction_weights, axis=1) + if cube.coord_dims(lat_coord): + coord_dims.insert(0, cube.coord_dims(lat_coord)[0]) + else: + fraction_weights = np.squeeze(fraction_weights, axis=0) + fraction_weights = iris.util.broadcast_to_shape(fraction_weights, + cube.shape, + tuple(coord_dims)) + + return fraction_weights + + +def get_new_path(cfg, old_path): + """Convert old path to new diagnostic path. + + Parameters + ---------- + cfg : dict + Recipe configuration. + old_path : str + Old path. + + Returns + ------- + str + New diagnostic path. + + """ + basename = os.path.splitext(os.path.basename(old_path))[0] + new_path = get_diagnostic_filename(basename, cfg) + return new_path + + +def get_squared_error_cube(ref_cube, error_datasets): + """Get array of squared errors. + + Parameters + ---------- + ref_cube : iris.cube.Cube + Reference cube (determines mask, coordinates and attributes of output). + error_datasets : list of dict + List of metadata dictionaries where each dictionary represents a single + dataset. + + Returns + ------- + iris.cube.Cube + Cube containing squared errors. + + Raises + ------ + ValueError + Shape of a dataset does not match shape of reference cube. + + """ + squared_error_cube = ref_cube.copy() + + # Fill cube with zeros + squared_error_cube.data = np.ma.array( + np.full(squared_error_cube.shape, 0.0), + mask=np.ma.getmaskarray(squared_error_cube.data), + ) + + # Adapt cube metadata + if 'error' in squared_error_cube.attributes.get('var_type', ''): + if not squared_error_cube.attributes.get('squared'): + squared_error_cube.var_name += '_squared' + squared_error_cube.long_name += ' (squared)' + squared_error_cube.units = units_power(squared_error_cube.units, 2) + else: + if squared_error_cube.attributes.get('squared'): + squared_error_cube.var_name += '_error' + squared_error_cube.long_name += ' (error)' + else: + squared_error_cube.var_name += '_squared_error' + squared_error_cube.long_name += ' (squared error)' + squared_error_cube.units = units_power(squared_error_cube.units, 2) + squared_error_cube.attributes['squared'] = 1 + squared_error_cube.attributes['var_type'] = 'prediction_output_error' + + # Aggregate errors + filenames = [] + for dataset in error_datasets: + path = dataset['filename'] + cube = iris.load_cube(path) + filenames.append(path) + + # Check shape + if cube.shape != ref_cube.shape: + raise ValueError( + f"Expected shape {ref_cube.shape} for error cubes, got " + f"{cube.shape} for dataset '{path}'") + + # Add squared error + new_data = cube.data + if not cube.attributes.get('squared'): + new_data **= 2 + squared_error_cube.data += new_data + logger.debug("Added '%s' to squared error datasets", path) + squared_error_cube.attributes['filename'] = '|'.join(filenames) + return squared_error_cube + + +def get_time_weights(cube, normalize=False): + """Get time weights of cube calculated from time bounds. + + Parameters + ---------- + cube : iris.cube.Cube + Input cube. + normalize : bool, optional (default: False) + Normalize weights with total time range. + + Returns + ------- + numpy.ndarray + Time weights. + + Raises + ------ + iris.exceptions.CoordinateNotFoundError + Cube does not contain the coordinate ``time``. + + """ + logger.debug("Calculating time weights") + _check_coords(cube, ['time'], 'time weights') + coord = cube.coord('time') + time_weights = coord.bounds[:, 1] - coord.bounds[:, 0] + time_weights = time_weights.squeeze() + if normalize: + time_weights /= np.ma.sum(time_weights) + if time_weights.shape == (): + time_weights = np.broadcast_to(time_weights, cube.shape) + else: + time_weights = iris.util.broadcast_to_shape(time_weights, cube.shape, + cube.coord_dims('time')) + return time_weights + + +def ignore_warnings(): + """Ignore warnings given by ``WARNINGS_TO_IGNORE``.""" + for warning_kwargs in WARNINGS_TO_IGNORE: + warning_kwargs.setdefault('action', 'ignore') + warnings.filterwarnings(**warning_kwargs) + + +def square_root_metadata(cube): + """Take the square root of the cube metadata. + + Parameters + ---------- + cube : iris.cube.Cube + Cube (will be modified in-place). + + """ + if 'squared_' in cube.var_name: + cube.var_name = cube.var_name.replace('squared_', '') + elif '_squared' in cube.var_name: + cube.var_name = cube.var_name.replace('_squared', '') + else: + cube.var_name = 'root_' + cube.var_name + if 'squared ' in cube.long_name: + cube.long_name = cube.long_name.replace('squared ', '') + elif 'Squared ' in cube.long_name: + cube.long_name = cube.long_name.replace('Squared ', '') + elif ' squared' in cube.long_name: + cube.long_name = cube.long_name.replace(' squared', '') + elif ' Squared' in cube.long_name: + cube.long_name = cube.long_name.replace(' Squared', '') + elif ' (squared)' in cube.long_name: + cube.long_name = cube.long_name.replace(' (squared)', '') + elif ' (Squared)' in cube.long_name: + cube.long_name = cube.long_name.replace(' (Squared)', '') + else: + cube.long_name = 'Root ' + cube.long_name + cube.units = cube.units.root(2) + if cube.attributes.get('squared'): + cube.attributes.pop('squared') + + +def units_power(units, power): + """Raise a :class:`cf_units.Unit` to given power preserving symbols. + + Raise :class:`cf_units.Unit` to given power without expanding it first. For + example, using ``units_power(Unit('J'), 2)`` gives ``Unit('J2')``. In + contrast, simply using ``Unit('J')**2`` would yield ``'kg2 m4 s-4'``. + + Parameters + ---------- + units : cf_units.Unit + Input units. + power : int + Desired exponent. + + Returns + ------- + cf_units.Unit + Input units raised to given power. + + Raises + ------ + TypeError + Argument ``power`` is not :obj:`int`-like. + ValueError + Invalid unit given. + + """ + if round(power) != power: + raise TypeError( + f"Expected integer-like power for units exponentiation, got " + f"{power}") + power = int(power) + if any([units.is_no_unit(), units.is_unknown()]): + raise ValueError( + f"Cannot raise units '{units.name}' to power {power:d}") + if units.origin is None: + logger.warning( + "Symbol-preserving exponentiation of units '%s' is not " + "supported, origin is not given", units) + return units**power + if units.origin.isdigit(): + return units**power + if units.origin.split()[0][0].isdigit(): + logger.warning( + "Symbol-preserving exponentiation of units '%s' is not " + "supported yet because of leading numbers", units) + return units**power + new_units_list = [] + for split in units.origin.split(): + for elem in split.split('.'): + if elem[-1].isdigit(): + exp = [int(d) for d in re.findall(r'-?\d+', elem)][0] + val = ''.join(list(re.findall(r'[A-Za-z]', elem))) + new_units_list.append(f'{val}{exp * power}') + else: + new_units_list.append(f'{elem}{power}') + new_units = ' '.join(new_units_list) + return Unit(new_units) diff --git a/esmvaltool/diag_scripts/mlr/custom_sklearn.py b/esmvaltool/diag_scripts/mlr/custom_sklearn.py new file mode 100644 index 0000000000..f1b99f4c5c --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/custom_sklearn.py @@ -0,0 +1,1098 @@ +"""Custom expansions of :mod:`sklearn` functionalities. + +Note +---- +This module provides custom expansions of some :mod:`sklearn` classes and +functions which are necessary to fit the purposes for the desired +functionalities of the :ref:`MLR module `. As +long-term goal we would like to include these functionalities to the +:mod:`sklearn` package since we believe these additions might be helpful for +everyone. This module serves as interim solution. To ensure that all features +are properly working this module is also covered by extensive tests. + +Parts of this code have been copied from :mod:`sklearn`. + +License: BSD 3-Clause License + +Copyright (c) 2007-2020 The scikit-learn developers. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +""" + +# pylint: disable=arguments-differ +# pylint: disable=attribute-defined-outside-init +# pylint: disable=protected-access +# pylint: disable=super-init-not-called +# pylint: disable=too-many-arguments +# pylint: disable=too-many-instance-attributes +# pylint: disable=too-many-locals +# pylint: disable=too-many-return-statements + +import itertools +import logging +import numbers +import os +import warnings +from contextlib import suppress +from copy import deepcopy +from inspect import getfullargspec +from traceback import format_exc + +import numpy as np +import scipy.sparse as sp +from joblib import Parallel, delayed, effective_n_jobs +from sklearn.base import BaseEstimator, clone, is_classifier +from sklearn.compose import ColumnTransformer, TransformedTargetRegressor +from sklearn.exceptions import FitFailedWarning, NotFittedError +from sklearn.feature_selection import RFE, SelectorMixin +from sklearn.linear_model import LinearRegression +from sklearn.metrics import check_scoring +from sklearn.model_selection import check_cv +from sklearn.pipeline import Pipeline +from sklearn.preprocessing import FunctionTransformer +from sklearn.utils import check_array, check_X_y, indexable, safe_sqr +from sklearn.utils.fixes import np_version, parse_version +from sklearn.utils.metaestimators import available_if +from sklearn.utils.validation import check_is_fitted + +from esmvaltool.diag_scripts import mlr + +logger = logging.getLogger(os.path.basename(__file__)) + + +_DEFAULT_TAGS = { + 'array_api_support': False, + 'non_deterministic': False, + 'requires_positive_X': False, + 'requires_positive_y': False, + 'X_types': ['2darray'], + 'poor_score': False, + 'no_validation': False, + 'multioutput': False, + "allow_nan": False, + 'stateless': False, + 'multilabel': False, + '_skip_test': False, + '_xfail_checks': False, + 'multioutput_only': False, + 'binary_only': False, + 'requires_fit': True, + 'preserves_dtype': [np.float64], + 'requires_y': False, + 'pairwise': False, +} + + +def _estimator_has(attr): + """Check if we can delegate a method to the underlying estimator. + + First, we check the first fitted estimator if available, otherwise we + check the unfitted estimator. + + """ + return lambda self: ( + hasattr(self.estimator_, attr) + if hasattr(self, "estimator_") + else hasattr(self.estimator, attr) + ) + + +def _determine_key_type(key, accept_slice=True): + """Determine the data type of key.""" + err_msg = ("No valid specification of the columns. Only a scalar, list or " + "slice of all integers or all strings, or boolean mask is " + "allowed") + + dtype_to_str = {int: 'int', str: 'str', bool: 'bool', np.bool_: 'bool'} + array_dtype_to_str = {'i': 'int', 'u': 'int', 'b': 'bool', 'O': 'str', + 'U': 'str', 'S': 'str'} + + if key is None: + return None + if isinstance(key, tuple(dtype_to_str.keys())): + try: + return dtype_to_str[type(key)] + except KeyError as exc: + raise ValueError(err_msg) from exc + if isinstance(key, slice): + if not accept_slice: + raise TypeError( + 'Only array-like or scalar are supported. ' + 'A Python slice was given.' + ) + if key.start is None and key.stop is None: + return None + key_start_type = _determine_key_type(key.start) + key_stop_type = _determine_key_type(key.stop) + if key_start_type is not None and key_stop_type is not None: + if key_start_type != key_stop_type: + raise ValueError(err_msg) + if key_start_type is not None: + return key_start_type + return key_stop_type + if isinstance(key, (list, tuple)): + unique_key = set(key) + key_type = {_determine_key_type(elt) for elt in unique_key} + if not key_type: + return None + if len(key_type) != 1: + raise ValueError(err_msg) + return key_type.pop() + if hasattr(key, 'dtype'): + try: + return array_dtype_to_str[key.dtype.kind] + except KeyError as exc: + raise ValueError(err_msg) from exc + raise ValueError(err_msg) + + +def _array_indexing(array, key, key_dtype, axis): + """Index an array or scipy.sparse consistently across numpy version.""" + if np_version < parse_version('1.12') or sp.issparse(array): + if key_dtype == 'bool': + key = np.asarray(key) + if isinstance(key, tuple): + key = list(key) + return array[key] if axis == 0 else array[:, key] + + +def _list_indexing(x_data, key, key_dtype): + """Index a python list.""" + if np.isscalar(key) or isinstance(key, slice): + # key is a slice or a scalar + return x_data[key] + if key_dtype == 'bool': + # key is a boolean array-like + return list(itertools.compress(x_data, key)) + # key is a integer array-like of key + return [x_data[idx] for idx in key] + + +def _pandas_indexing(x_data, key, key_dtype, axis): + """Index a pandas dataframe or a series.""" + if hasattr(key, 'shape'): + key = np.asarray(key) + key = key if key.flags.writeable else key.copy() + elif isinstance(key, tuple): + key = list(key) + # check whether we should index with loc or iloc + indexer = x_data.iloc if key_dtype == 'int' else x_data.loc + return indexer[:, key] if axis else indexer[key] + + +def _safe_indexing(x_data, indices, *_, axis=0): + """Return rows, items or columns of x_data using indices.""" + if indices is None: + return x_data + + if axis not in (0, 1): + raise ValueError( + f"'axis' should be either 0 (to index rows) or 1 (to index " + f"column). Got {axis} instead." + ) + + indices_dtype = _determine_key_type(indices) + + if axis == 0 and indices_dtype == 'str': + raise ValueError( + "String indexing is not supported with 'axis=0'" + ) + + if axis == 1 and x_data.ndim != 2: + raise ValueError( + f"'x_data' should be a 2D NumPy array, 2D sparse matrix or pandas " + f"dataframe when indexing the columns (i.e. 'axis=1'). " + f"Got {type(x_data)} instead with {x_data.ndim} dimension(s)." + ) + + if axis == 1 and indices_dtype == 'str' and not hasattr(x_data, 'loc'): + raise ValueError( + "Specifying the columns using strings is only supported for " + "pandas DataFrames" + ) + + if hasattr(x_data, "iloc"): + return _pandas_indexing(x_data, indices, indices_dtype, axis=axis) + if hasattr(x_data, "shape"): + return _array_indexing(x_data, indices, indices_dtype, axis=axis) + return _list_indexing(x_data, indices, indices_dtype) + + +def _is_arraylike(input_array): + """Check whether the input is array-like.""" + return (hasattr(input_array, '__len__') or + hasattr(input_array, 'shape') or + hasattr(input_array, '__array__')) + + +def _make_indexable(iterable): + """Ensure iterable supports indexing or convert to an indexable variant.""" + if sp.issparse(iterable): + return iterable.tocsr() + if hasattr(iterable, "__getitem__") or hasattr(iterable, "iloc"): + return iterable + if iterable is None: + return iterable + return np.array(iterable) + + +def _num_samples(x_data): + """Return number of samples in array-like x_data.""" + message = f"Expected sequence or array-like, got {type(x_data)}" + if hasattr(x_data, 'fit') and callable(x_data.fit): + # Don't get num_samples from an ensembles length! + raise TypeError(message) + + if not hasattr(x_data, '__len__') and not hasattr(x_data, 'shape'): + if hasattr(x_data, '__array__'): + x_data = np.asarray(x_data) + else: + raise TypeError(message) + + if hasattr(x_data, 'shape') and x_data.shape is not None: + if len(x_data.shape) == 0: + raise TypeError( + f"Singleton array {x_data!r} cannot be considered a valid " + f"collection." + ) + # Check that shape is returning an integer or default to len + # Dask dataframes may not return numeric shape[0] value + if isinstance(x_data.shape[0], numbers.Integral): + return x_data.shape[0] + + try: + return len(x_data) + except TypeError as type_error: + raise TypeError(message) from type_error + + +def _check_fit_params(x_data, fit_params, indices=None): + """Check and validate the parameters passed during ``fit``.""" + fit_params_validated = {} + for param_key, param_value in fit_params.items(): + if (not _is_arraylike(param_value) or + _num_samples(param_value) != _num_samples(x_data)): + # Non-indexable pass-through (for now for backward-compatibility). + # https://github.com/scikit-learn/scikit-learn/issues/15805 + fit_params_validated[param_key] = param_value + else: + # Any other fit_params should support indexing + # (e.g. for cross-validation). + fit_params_validated[param_key] = _make_indexable(param_value) + fit_params_validated[param_key] = _safe_indexing( + fit_params_validated[param_key], indices + ) + + return fit_params_validated + + +def _safe_tags(estimator, key=None): + """Safely get estimator tags.""" + if hasattr(estimator, "_get_tags"): + tags_provider = "_get_tags()" + tags = estimator._get_tags() + elif hasattr(estimator, "_more_tags"): + tags_provider = "_more_tags()" + tags = {**_DEFAULT_TAGS, **estimator._more_tags()} + else: + tags_provider = "_DEFAULT_TAGS" + tags = _DEFAULT_TAGS + + if key is not None: + if key not in tags: + raise ValueError( + f"The key {key} is not defined in {tags_provider} for the " + f"class {estimator.__class__.__name__}." + ) + return tags[key] + return tags + + +def _is_pairwise(estimator): + """Return ``True`` if estimator is pairwise.""" + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', category=FutureWarning) + has_pairwise_attribute = hasattr(estimator, '_pairwise') + pairwise_attribute = getattr(estimator, '_pairwise', False) + pairwise_tag = _safe_tags(estimator, key="pairwise") + + if has_pairwise_attribute: + if pairwise_attribute != pairwise_tag: + warnings.warn( + "_pairwise attribute is inconsistent with tags. Set the " + "estimator tags of your estimator instead", FutureWarning, + ) + return pairwise_attribute + + # Use pairwise tag when the attribute is not present + return pairwise_tag + + +def _safe_split(estimator, x_data, y_data, indices, train_indices=None): + """Create subset of dataset and properly handle kernels.""" + if _is_pairwise(estimator): + if not hasattr(x_data, "shape"): + raise ValueError("Precomputed kernels or affinity matrices have " + "to be passed as arrays or sparse matrices.") + # x_data is a precomputed square kernel matrix + if x_data.shape[0] != x_data.shape[1]: + raise ValueError("x_data should be a square kernel matrix") + if train_indices is None: + x_subset = x_data[np.ix_(indices, indices)] + else: + x_subset = x_data[np.ix_(indices, train_indices)] + else: + x_subset = _safe_indexing(x_data, indices) + + if y_data is not None: + y_subset = _safe_indexing(y_data, indices) + else: + y_subset = None + + return (x_subset, y_subset) + + +def _fit_and_score_weighted(estimator, x_data, y_data, scorer, train, test, + parameters, fit_params, error_score=np.nan, + sample_weights=None): + """Expand :func:`sklearn.model_selection._validation._fit_and_score`.""" + # Adjust length of sample weights + fit_params = fit_params if fit_params is not None else {} + fit_params = _check_fit_params(x_data, fit_params, train) + + if parameters is not None: + # clone after setting parameters in case any parameters + # are estimators (like pipeline steps) + # because pipeline doesn't clone steps in fit + cloned_parameters = {} + for (key, val) in parameters.items(): + cloned_parameters[key] = clone(val, safe=False) + + estimator = estimator.set_params(**cloned_parameters) + + (x_train, y_train) = _safe_split(estimator, x_data, y_data, train) + (x_test, y_test) = _safe_split(estimator, x_data, y_data, test, train) + if sample_weights is not None: + sample_weights_test = sample_weights[test] + else: + sample_weights_test = None + + try: + if y_train is None: + estimator.fit(x_train, **fit_params) + else: + estimator.fit(x_train, y_train, **fit_params) + except Exception as exc: + if error_score == 'raise': + raise + if isinstance(error_score, numbers.Number): + test_score = error_score + warnings.warn( + f"Estimator fit failed. The score on this train-test " + f"partition for these parameters will be set to " + f"{error_score:f}. Details: \n{format_exc()}", + FitFailedWarning) + else: + raise ValueError( + "error_score must be the string 'raise' or a " + "numeric value. (Hint: if using 'raise', please " + "make sure that it has been spelled correctly.)") from exc + else: + test_score = _score_weighted(estimator, x_test, y_test, scorer, + sample_weights=sample_weights_test) + + return test_score + + +def _get_fit_parameters(fit_kwargs, steps, cls): + """Retrieve fit parameters from ``fit_kwargs``.""" + params = {name: {} for (name, step) in steps if step is not None} + step_names = list(params.keys()) + for (param_name, param_val) in fit_kwargs.items(): + param_split = param_name.split('__', 1) + if len(param_split) != 2: + raise ValueError( + f"Fit parameters for {cls} have to be given in the form " + f"'s__p', where 's' is the name of the step and 'p' the name " + f"of the parameter, got '{param_name}'") + try: + params[param_split[0]][param_split[1]] = param_val + except KeyError as exc: + raise ValueError( + f"Expected one of {step_names} for step of fit parameter, got " + f"'{param_split[0]}' for parameter '{param_name}'") from exc + return params + + +def _score_weighted(estimator, x_test, y_test, scorer, sample_weights=None): + """Expand :func:`sklearn.model_selection._validation._score`.""" + if y_test is None: + score = scorer(estimator, x_test, sample_weight=sample_weights) + else: + score = scorer(estimator, x_test, y_test, sample_weight=sample_weights) + + error_msg = ("Scoring must return a number, got %s (%s) instead. " + "(scorer=%s)") + if hasattr(score, 'item'): + with suppress(ValueError): + # e.g. unwrap memmapped scalars + score = score.item() + if not isinstance(score, numbers.Number): + raise ValueError(error_msg % (score, type(score), scorer)) + return score + + +def _split_fit_kwargs(fit_kwargs, train_idx, test_idx): + """Get split fit kwargs for single CV step.""" + fit_kwargs_train = {} + fit_kwargs_test = {} + for (key, val) in fit_kwargs.items(): + if 'sample_weight' in key and 'sample_weight_eval_set' not in key: + fit_kwargs_train[key] = deepcopy(val)[train_idx] + fit_kwargs_test[key] = deepcopy(val)[test_idx] + else: + fit_kwargs_train[key] = deepcopy(val) + fit_kwargs_test[key] = deepcopy(val) + return (fit_kwargs_train, fit_kwargs_test) + + +def _rfe_single_fit(rfe, estimator, x_data, y_data, train, test, scorer, + **fit_kwargs): + """Return the score for a fit across one fold.""" + (x_train, y_train) = _safe_split(estimator, x_data, y_data, train) + (x_test, y_test) = _safe_split(estimator, x_data, y_data, test, train) + (fit_kwargs_train, fit_kwargs_test) = _split_fit_kwargs(fit_kwargs, train, + test) + if 'sample_weight' in fit_kwargs_test: + fit_kwargs_test['sample_weights'] = fit_kwargs_test.pop( + 'sample_weight') + + def step_score(estimator, features): + """Score for a single step in the recursive feature elimination.""" + return _score_weighted(estimator, x_test[:, features], y_test, scorer, + **fit_kwargs_test) + + return rfe._fit(x_train, y_train, step_score=step_score, + **fit_kwargs_train).scores_ + + +def _map_features(features, support): + """Map old features indices to new ones using boolean mask.""" + feature_mapping = {} + new_idx = 0 + for (old_idx, supported) in enumerate(support): + if supported: + val = new_idx + new_idx += 1 + else: + val = None + feature_mapping[old_idx] = val + new_features = [] + for feature in features: + new_feature = feature_mapping[feature] + if new_feature is not None: + new_features.append(new_feature) + return new_features + + +def _update_transformers_param(estimator, support): + """Update ``transformers`` argument of ``ColumnTransformer`` steps.""" + all_params = estimator.get_params() + params = [] + for key in all_params: + if key.endswith('transformers'): + params.append(key) + if isinstance(estimator, (Pipeline, AdvancedPipeline)): + step = estimator.named_steps[key.split('__')[0]] + if not isinstance(step, ColumnTransformer): + raise TypeError( + f"Found 'transformers' parameter ('{key}'), but the " + f"corresponding pipeline step is not a " + f"ColumnTransformer (got '{type(step)}')") + else: + raise TypeError( + f"Found 'transformers' parameter ('{key}'), but the " + f"corresponding estimator is not a Pipeline or " + f"AdvancedPipeline") + new_params = {} + for param in params: + new_transformers = [] + for transformer in all_params[param]: + new_columns = _map_features(transformer[2], support) + new_transformers.append( + (transformer[0], transformer[1], new_columns)) + new_params[param] = new_transformers + estimator.set_params(**new_params) + + +def cross_val_score_weighted(estimator, x_data, y_data=None, groups=None, + scoring=None, cv=None, n_jobs=None, verbose=0, + fit_params=None, pre_dispatch='2*n_jobs', + error_score=np.nan, sample_weights=None): + """Expand :func:`sklearn.model_selection.cross_val_score`.""" + scorer = check_scoring(estimator, scoring=scoring) + (x_data, y_data, groups) = indexable(x_data, y_data, groups) + + cv = check_cv(cv, y_data, classifier=is_classifier(estimator)) + + # We clone the estimator to make sure that all the folds are + # independent, and that it is pickle-able. + parallel = Parallel(n_jobs=n_jobs, verbose=verbose, + pre_dispatch=pre_dispatch) + scores = parallel( + delayed(_fit_and_score_weighted)( + clone(estimator), x_data, y_data, scorer, train, test, None, + fit_params, error_score=error_score, sample_weights=sample_weights) + for train, test in cv.split(x_data, y_data, groups)) + return np.array(scores) + + +def get_rfecv_transformer(rfecv_estimator): + """Get transformer step of RFECV estimator.""" + try: + check_is_fitted(rfecv_estimator) + except NotFittedError as exc: + raise NotFittedError( + "RFECV instance used to initialize FeatureSelectionTransformer " + "must be fitted") from exc + transformer = FeatureSelectionTransformer( + grid_scores=rfecv_estimator.grid_scores_, + n_features=rfecv_estimator.n_features_, + ranking=rfecv_estimator.ranking_, + support=rfecv_estimator.support_, + ) + return transformer + + +def perform_efecv(estimator, x_data, y_data, **kwargs): + """Perform exhaustive feature selection.""" + x_data, y_data = check_X_y( + x_data, y_data, ensure_min_features=2, force_all_finite='allow-nan') + n_all_features = x_data.shape[1] + + # Iterate over all possible feature combinations + supports = list(itertools.product([False, True], repeat=n_all_features)) + supports.remove(tuple([False] * n_all_features)) + logger.info( + "Testing all %i possible feature combinations for exhaustive feature " + "selection", len(supports)) + grid_scores = [] + for support in supports: + support = np.array(support) + features = np.arange(n_all_features)[support] + + # Evaluate estimator on new subset of features + new_estimator = clone(estimator) + _update_transformers_param(new_estimator, support) + scores = cross_val_score_weighted(new_estimator, x_data[:, features], + y_data, **kwargs) + grid_scores.append(np.mean(scores)) + logger.debug("Fitted estimator with %i features, CV score was %.5f", + support.sum(), np.mean(scores)) + + # Final parameters + grid_scores = np.array(grid_scores) + best_idx = np.argmax(grid_scores) + support = np.array(supports[best_idx]) + features = np.arange(n_all_features)[support] + n_features = support.sum() + ranking = np.where(support, 1, 2) + transformer = FeatureSelectionTransformer( + grid_scores=grid_scores, n_features=n_features, ranking=ranking, + support=support) + + # Get final estimator + best_estimator = clone(estimator) + _update_transformers_param(best_estimator, support) + best_estimator.fit(x_data[:, features], y_data, + **kwargs.get('fit_params', {})) + + logger.info("Found optimal score %.5f for %i features", + grid_scores[best_idx], n_features) + return (best_estimator, transformer) + + +class AdvancedPipeline(Pipeline): + """Expand :class:`sklearn.pipeline.Pipeline`.""" + + @property + def coef_(self): + """numpy.ndarray: Model coefficients.""" + return self.steps[-1][1].coef_ + + @property + def feature_importances_(self): + """numpy.ndarray: Feature importances.""" + return self.steps[-1][1].feature_importances_ + + def _check_final_step(self): + """Check type of final step of pipeline.""" + final_step = self.steps[-1][1] + if not isinstance(final_step, AdvancedTransformedTargetRegressor): + raise TypeError( + f"Expected estimator of type " + f"{AdvancedTransformedTargetRegressor} for final step of " + f"pipeline, got {final_step.__class__}") + + def fit_target_transformer_only(self, y_data, **fit_kwargs): + """Fit only ``transform`` step of of target regressor.""" + self._check_final_step() + reg = self.steps[-1][1] + fit_params = _get_fit_parameters(fit_kwargs, self.steps, + self.__class__) + reg_fit_params = fit_params[self.steps[-1][0]] + reg.fit_transformer_only(y_data, **reg_fit_params) + + def fit_transformers_only(self, x_data, y_data, **fit_kwargs): + """Fit only ``transform`` steps of Pipeline.""" + # Temporarily set the final estimator to 'passthrough' to avoid fitting + # it + final_step = self.steps[-1] + self.steps[-1] = (final_step[0], 'passthrough') + + # This will now fit all transformers, but not the final estimator + self.fit(x_data, y_data, **fit_kwargs) + + # Re-assign the original (non-fitted) final estimator + self.steps[-1] = final_step + + return self + + def transform_only(self, x_data): + """Only perform ``transform`` steps of Pipeline.""" + for (_, transformer) in self.steps[:-1]: + x_data = transformer.transform(x_data) + return x_data + + def transform_target_only(self, y_data): + """Only perform ``transform`` steps of target regressor.""" + self._check_final_step() + reg = self.steps[-1][1] + if not hasattr(reg, 'transformer_'): + raise NotFittedError( + "Transforming target not possible, final regressor is not " + "fitted yet, call fit() or fit_target_transformer_only() " + "first") + if y_data.ndim == 1: + y_data = y_data.reshape(-1, 1) + y_trans = reg.transformer_.transform(y_data) + if y_trans.ndim == 2 and y_trans.shape[1] == 1: + y_trans = y_trans.squeeze(axis=1) + return y_trans + + +class AdvancedRFE(RFE): + """Expand :class:`sklearn.feature_selection.RFE`.""" + + def fit(self, x_data, y_data, **fit_kwargs): + """Expand :meth:`fit` to accept kwargs.""" + return self._fit(x_data, y_data, **fit_kwargs) + + def _fit(self, x_data, y_data, step_score=None, **fit_kwargs): + """Expand :meth:`_fit` to accept kwargs.""" + # Parameter step_score controls the calculation of self.scores_ + # step_score is not exposed to users + # and is used when implementing AdvancedRFECV + # self.scores_ will not be calculated when calling _fit through fit + x_data, y_data = check_X_y(x_data, y_data, "csc", + ensure_min_features=2, + force_all_finite=False) + + # Initialization + n_features = x_data.shape[1] + if self.n_features_to_select is None: + n_features_to_select = n_features // 2 + else: + n_features_to_select = self.n_features_to_select + + if 0.0 < self.step < 1.0: + step = int(max(1, self.step * n_features)) + else: + step = int(self.step) + if step <= 0: + raise ValueError("Step must be >0") + + support_ = np.ones(n_features, dtype=bool) + ranking_ = np.ones(n_features, dtype=np.int64) + + if step_score: + self.scores_ = [] + + # Elimination + while np.sum(support_) > n_features_to_select: + # Remaining features + features = np.arange(n_features)[support_] + + # Rank the remaining features + estimator = clone(self.estimator) + if self.verbose > 0: + print(f"Fitting estimator with {np.sum(support_):d} features.") + + _update_transformers_param(estimator, support_) + estimator.fit(x_data[:, features], y_data, **fit_kwargs) + + # Get coefs (hasattr(estimator, 'coef_') raises a KeyError for + # XGBRegressor models + try: + coefs = estimator.coef_ + except (AttributeError, KeyError): + coefs = getattr(estimator, 'feature_importances_', None) + if coefs is None: + raise RuntimeError("The classifier does not expose " + "'coef_' or 'feature_importances_' " + "attributes") + + # Get ranks + if coefs.ndim > 1: + ranks = np.argsort(safe_sqr(coefs).sum(axis=0)) + else: + ranks = np.argsort(safe_sqr(coefs)) + + # Transformer steps that reduce number of features is not supported + if len(ranks) != len(features): + raise NotImplementedError( + f"Estimators that contain transforming steps that reduce " + f"the number of features are not supported in " + f"{self.__class__}, got {len(features):d} features for ", + f"fit(), but only {len(ranks):d} elements for 'coefs_' / " + f"'feature_importances_' are provided. Estimator:\n" + f"{estimator}") + + # for sparse case ranks is matrix + ranks = np.ravel(ranks) + + # Eliminate the worse features + threshold = min(step, np.sum(support_) - n_features_to_select) + + # Compute step score on the previous selection iteration + # because 'estimator' must use features + # that have not been eliminated yet + if step_score: + self.scores_.append(step_score(estimator, features)) + support_[features[ranks][:threshold]] = False + ranking_[np.logical_not(support_)] += 1 + + # Set final attributes + features = np.arange(n_features)[support_] + self.estimator_ = clone(self.estimator) + _update_transformers_param(self.estimator_, support_) + self.estimator_.fit(x_data[:, features], y_data, **fit_kwargs) + + # Compute step score when only n_features_to_select features left + if step_score: + self.scores_.append(step_score(self.estimator_, features)) + self.n_features_ = support_.sum() + self.support_ = support_ + self.ranking_ = ranking_ + + return self + + @available_if(_estimator_has("predict")) + def predict(self, x_data, **predict_kwargs): + """Expand :meth:`predict()` to accept kwargs.""" + check_is_fitted(self) + return self.estimator_.predict(self.transform(x_data), + **predict_kwargs) + + +class AdvancedRFECV(AdvancedRFE): + """Expand :class:`sklearn.feature_selection.RFECV`.""" + + def __init__(self, estimator, step=1, min_features_to_select=1, cv=None, + scoring=None, verbose=0, n_jobs=None): + """Original constructor of :class:`sklearn.feature_selection.RFECV`.""" + self.estimator = estimator + self.step = step + self.min_features_to_select = min_features_to_select + self.cv = cv + self.scoring = scoring + self.verbose = verbose + self.n_jobs = n_jobs + + def fit(self, x_data, y_data, groups=None, **fit_kwargs): + """Expand :meth:`fit` to accept kwargs.""" + x_data, y_data = check_X_y( + x_data, y_data, "csr", ensure_min_features=2, + force_all_finite=False) + + # Initialization + cv = check_cv(self.cv, y_data, + classifier=is_classifier(self.estimator)) + scorer = check_scoring(self.estimator, scoring=self.scoring) + n_features = x_data.shape[1] + + if 0.0 < self.step < 1.0: + step = int(max(1, self.step * n_features)) + else: + step = int(self.step) + if step <= 0: + raise ValueError("Step must be >0") + + # Build an AdvancedRFE object, which will evaluate and score each + # possible feature count, down to self.min_features_to_select + rfe = AdvancedRFE(estimator=self.estimator, + n_features_to_select=self.min_features_to_select, + step=self.step, verbose=self.verbose) + + # Determine the number of subsets of features by fitting across + # the train folds and choosing the "features_to_select" parameter + # that gives the least averaged error across all folds. + + # Note that joblib raises a non-picklable error for bound methods + # even if n_jobs is set to 1 with the default multiprocessing + # backend. + # This branching is done so that to + # make sure that user code that sets n_jobs to 1 + # and provides bound methods as scorers is not broken with the + # addition of n_jobs parameter. + + if effective_n_jobs(self.n_jobs) == 1: + (parallel, func) = (list, _rfe_single_fit) + else: + parallel = Parallel(n_jobs=self.n_jobs) + func = delayed(_rfe_single_fit) + + scores = parallel( + func(rfe, self.estimator, x_data, y_data, train, test, scorer, + **fit_kwargs) + for train, test in cv.split(x_data, y_data, groups)) + + scores = np.sum(scores, axis=0) + scores_rev = scores[::-1] + argmax_idx = len(scores) - np.argmax(scores_rev) - 1 + n_features_to_select = max( + n_features - (argmax_idx * step), + self.min_features_to_select) + + # Re-execute an elimination with best_k over the whole set + rfe = AdvancedRFE(estimator=self.estimator, + n_features_to_select=n_features_to_select, + step=self.step, verbose=self.verbose) + + rfe.fit(x_data, y_data, **fit_kwargs) + + # Set final attributes + self.support_ = rfe.support_ + self.n_features_ = rfe.n_features_ + self.ranking_ = rfe.ranking_ + self.estimator_ = clone(self.estimator) + _update_transformers_param(self.estimator_, self.support_) + self.estimator_.fit(self.transform(x_data), y_data, **fit_kwargs) + + # Fixing a normalization error, n is equal to + # get_n_splits(x_data, y_data) - 1 here, the scores are normalized by + # get_n_splits(x_data, y_data) + self.grid_scores_ = scores[::-1] / cv.get_n_splits(x_data, y_data, + groups) + return self + + +class AdvancedTransformedTargetRegressor(TransformedTargetRegressor): + """Expand :class:`sklearn.compose.TransformedTargetRegressor`.""" + + @property + def coef_(self): + """numpy.ndarray: Model coefficients.""" + return self.regressor_.coef_ + + @property + def feature_importances_(self): + """numpy.ndarray: Feature importances.""" + return self.regressor_.feature_importances_ + + def fit(self, x_data, y_data, **fit_kwargs): + """Expand :meth:`fit` to accept kwargs.""" + (y_2d, + regressor_kwargs) = self.fit_transformer_only(y_data, **fit_kwargs) + + # Transform y and convert back to 1d array if necessary + y_trans = self.transformer_.transform(y_2d) + if y_trans.ndim == 2 and y_trans.shape[1] == 1: + y_trans = y_trans.squeeze(axis=1) + + # Perform linear regression if regressor is not given + if self.regressor is None: + self.regressor_ = LinearRegression() + else: + self.regressor_ = clone(self.regressor) + + # Fit regressor with kwargs + self.regressor_.fit(x_data, y_trans, **regressor_kwargs) + return self + + def fit_transformer_only(self, y_data, **fit_kwargs): + """Fit only ``transformer`` step.""" + y_data = check_array(y_data, + accept_sparse=False, + force_all_finite=True, + ensure_2d=False, + dtype='numeric') + self._training_dim = y_data.ndim + + # Process kwargs + (_, regressor_kwargs) = self._get_fit_params(fit_kwargs) + + # Transformers are designed to modify X which is 2D, modify y_data + # FIXME: Transformer does NOT use transformer_kwargs + if y_data.ndim == 1: + y_2d = y_data.reshape(-1, 1) + else: + y_2d = y_data + self._fit_transformer(y_2d) + return (y_2d, regressor_kwargs) + + def predict(self, x_data, always_return_1d=True, **predict_kwargs): + """Expand :meth:`predict()` to accept kwargs.""" + check_is_fitted(self) + if not hasattr(self, 'regressor_'): + raise NotFittedError( + f"Regressor of {self.__class__} is not fitted yet, call fit() " + f"first") + + # Kwargs for returning variance or covariance + if ('return_std' in predict_kwargs and 'return_std' in getfullargspec( + self.regressor_.predict).args): + raise NotImplementedError( + f"Using keyword argument 'return_std' for final regressor " + f"{self.regressor_.__class__} is not supported yet, only " + f"'return_var' is allowed. Expand the regressor to accept " + f"'return_var' instead (see 'esmvaltool/diag_scripts/mlr" + f"/models/gpr_sklearn.py' for an example)") + mlr.check_predict_kwargs(predict_kwargs) + return_var = predict_kwargs.get('return_var', False) + return_cov = predict_kwargs.get('return_cov', False) + + # Prediction + prediction = self.regressor_.predict(x_data, **predict_kwargs) + if return_var or return_cov: + pred = prediction[0] + else: + pred = prediction + if pred.ndim == 1: + pred_trans = self.transformer_.inverse_transform( + pred.reshape(-1, 1)) + else: + pred_trans = self.transformer_.inverse_transform(pred) + if self._to_be_squeezed(pred_trans, always_return_1d=always_return_1d): + pred_trans = pred_trans.squeeze(axis=1) + if not (return_var or return_cov): + return pred_trans + + # Return scaled variance or covariance if desired + err = prediction[1] + if not hasattr(self.transformer_, 'scale_'): + raise NotImplementedError( + f"Transforming of additional prediction output (e.g. by " + f"'return_var' or 'return_cov') is not supported for " + f"transformer {self.transformer_.__class__} yet, the " + f"necessary attribute 'scale_' is missing") + scale = self.transformer_.scale_ + if scale is not None: + err *= scale**2 + if self._to_be_squeezed(err, always_return_1d=always_return_1d): + err = err.squeeze(axis=1) + return (pred_trans, err) + + def _get_fit_params(self, fit_kwargs): + """Separate ``transformer`` and ``regressor`` kwargs.""" + steps = [ + ('transformer', self.transformer), + ('regressor', self.regressor), + ] + fit_params = _get_fit_parameters(fit_kwargs, steps, self.__class__) + fit_params.setdefault('transformer', {}) + fit_params.setdefault('regressor', {}) + + # FIXME + if fit_params['transformer']: + raise NotImplementedError( + f"Fit parameters {fit_params['transformer']} for transformer " + f"{self.transformer.__class__} of {self.__class__} are not " + f"supported at the moment") + + return (fit_params['transformer'], fit_params['regressor']) + + def _fit_transformer(self, y_data): + """Check transformer and fit transformer.""" + if (self.transformer is not None and + (self.func is not None or self.inverse_func is not None)): + raise ValueError("'transformer' and functions 'func'/" + "'inverse_func' cannot both be set.") + if self.transformer is not None: + self.transformer_ = clone(self.transformer) + else: + if self.func is not None and self.inverse_func is None: + raise ValueError( + "When 'func' is provided, 'inverse_func' must also be " + "provided") + self.transformer_ = FunctionTransformer( + func=self.func, inverse_func=self.inverse_func, validate=True, + check_inverse=self.check_inverse) + self.transformer_.fit(y_data) + if self.check_inverse: + idx_selected = slice(None, None, max(1, y_data.shape[0] // 10)) + y_sel = _safe_indexing(y_data, idx_selected) + y_sel_t = self.transformer_.transform(y_sel) + if not np.allclose(y_sel, + self.transformer_.inverse_transform(y_sel_t)): + warnings.warn("The provided functions or transformer are " + "not strictly inverse of each other. If " + "you are sure you want to proceed regardless, " + "set 'check_inverse=False'", UserWarning) + + def _to_be_squeezed(self, array, always_return_1d=True): + """Check if ``array`` should be squeezed or not.""" + squeeze = array.ndim == 2 and array.shape[1] == 1 + if not always_return_1d: + squeeze = squeeze and self._training_dim == 1 + return squeeze + + +class FeatureSelectionTransformer(BaseEstimator, SelectorMixin): + """Transformer step of a feature selection estimator.""" + + def __init__(self, grid_scores, n_features, ranking, support): + """Initialize feature selection transformer.""" + self.grid_scores = grid_scores + self.n_features = n_features + self.ranking = ranking + self.support = support + + def fit(self, *_, **__): + """Empty method.""" + return self + + def _get_support_mask(self): + """Get support mask.""" + return self.support + + def _more_tags(self): + """Additional estimator tags.""" + more_tags = deepcopy(_DEFAULT_TAGS) + more_tags['allow_nan'] = True + return more_tags diff --git a/esmvaltool/diag_scripts/mlr/evaluate_residuals.py b/esmvaltool/diag_scripts/mlr/evaluate_residuals.py new file mode 100644 index 0000000000..e76f7c9eab --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/evaluate_residuals.py @@ -0,0 +1,205 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Simple evaluation of residuals (coming from MLR model output). + +Description +----------- +This diagnostic evaluates residuals created by MLR models. + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +ignore: list of dict, optional + Ignore specific datasets by specifying multiple :obj:`dict` s of metadata. +mse_plot: dict, optional + Additional options for plotting the mean square errors (MSE). Specify + additional keyword arguments for :func:`seaborn.boxplot` by ``plot_kwargs`` + and plot appearance options by ``pyplot_kwargs`` (processed as functions of + :mod:`matplotlib.pyplot`). +pattern: str, optional + Pattern matched against ancestor file names. +rmse_plot: dict, optional + Additional options for plotting the root mean square errors (RMSE). + Specify additional keyword arguments for :func:`seaborn.boxplot` by + ``plot_kwargs`` and plot appearance options by ``pyplot_kwargs`` (processed + as functions of :mod:`matplotlib.pyplot`). +savefig_kwargs: dict, optional + Keyword arguments for :func:`matplotlib.pyplot.savefig`. +seaborn_settings: dict, optional + Options for :func:`seaborn.set_theme` (affects all plots). +weighted_samples: dict + If specified, use weighted root mean square error. The given keyword + arguments are directly passed to + :func:`esmvaltool.diag_scripts.mlr.get_all_weights` to calculate the sample + weights. By default, area weights and time weights are used. + +""" + +import logging +import os +from copy import deepcopy + +import iris +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import seaborn as sns + +import esmvaltool.diag_scripts.emergent_constraints as ec +import esmvaltool.diag_scripts.mlr.plot as mlr_plot +from esmvaltool.diag_scripts import mlr +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_plot_filename, + group_metadata, + io, + run_diagnostic, + select_metadata, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def _plot_boxplot(cfg, data_frame, plot_name): + """Plot boxplot.""" + boxplot_kwargs = { + 'color': 'b', + 'data': data_frame, + 'showfliers': False, + 'showmeans': True, + 'meanprops': { + 'marker': 'x', + 'markeredgecolor': 'k', + 'markerfacecolor': 'k', + 'markersize': 8, + }, + 'whis': [0, 100], + } + boxplot_kwargs.update(mlr_plot.get_plot_kwargs(cfg, plot_name)) + sns.boxplot(**boxplot_kwargs) + sns.swarmplot(data=data_frame, color='k', alpha=0.6) + + # Plot appearance + plt.ylim(0.0, plt.ylim()[1]) + mlr_plot.process_pyplot_kwargs(cfg, plot_name) + + # Save plot + plot_path = get_plot_filename(plot_name, cfg) + plt.savefig(plot_path, **mlr_plot.get_savefig_kwargs(cfg)) + logger.info("Wrote %s", plot_path) + plt.close() + return plot_path + + +def _write_provenance(cfg, data_frame, plot_path, title, ancestors, + **cube_kwargs): + """Write provenance information.""" + cube = ec.pandas_object_to_cube(data_frame, **cube_kwargs) + netcdf_path = mlr.get_new_path(cfg, plot_path) + io.iris_save(cube, netcdf_path) + record = { + 'ancestors': ancestors, + 'authors': ['schlund_manuel'], + 'caption': f"Boxplot of {title}.", + 'plot_types': ['box'], + 'references': ['schlund20jgr'], + } + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, record) + provenance_logger.log(plot_path, record) + + +def get_residual_data(cfg): + """Get residual data.""" + input_data = mlr_plot.get_input_datasets(cfg) + residual_data = select_metadata(input_data, var_type='prediction_residual') + if not residual_data: + raise ValueError("No 'prediction_residual' data found") + return group_metadata(residual_data, 'mlr_model_name') + + +def plot_mse(cfg, residual_data): + """Plot distribution of mean square error (MSE).""" + logger.info("Plotting mean square error (MSE) distribution") + mlr_models_mse = [] + + # Collect data for every statistical model + ancestors = [] + for (model_name, datasets) in residual_data.items(): + mse_data = [] + for dataset in datasets: + cube = iris.load_cube(dataset['filename']) + ancestors.append(dataset['filename']) + weights = mlr.get_all_weights(cube, **cfg['weighted_samples']) + mse = np.ma.average(cube.data**2, weights=weights) + mse_data.append(mse) + data_frame = pd.DataFrame(mse_data, columns=[model_name]) + mlr_models_mse.append(data_frame) + boxplot_data_frame = pd.concat(mlr_models_mse, axis=1) + boxplot_data_frame.columns.name = 'mlr_model' + + # Plot + plot_path = _plot_boxplot(cfg, boxplot_data_frame, 'mse_plot') + logger.info("MSEs:\n%s", boxplot_data_frame.describe()) + + # Provenance + _write_provenance(cfg, boxplot_data_frame, plot_path, 'MSE', ancestors, + var_name='mse', long_name='Mean Square Error') + + +def plot_rmse(cfg, residual_data): + """Plot distribution of root mean square error (RMSE).""" + logger.info("Plotting root mean square error (RMSE) distribution") + mlr_models_rmse = [] + + # Collect data for every statistical model + ancestors = [] + for (model_name, datasets) in residual_data.items(): + rmse_data = [] + for dataset in datasets: + cube = iris.load_cube(dataset['filename']) + ancestors.append(dataset['filename']) + weights = mlr.get_all_weights(cube, **cfg['weighted_samples']) + mse = np.ma.average(cube.data**2, weights=weights) + rmse_data.append(np.ma.sqrt(mse)) + data_frame = pd.DataFrame(rmse_data, columns=[model_name]) + mlr_models_rmse.append(data_frame) + boxplot_data_frame = pd.concat(mlr_models_rmse, axis=1) + boxplot_data_frame.columns.name = 'mlr_model' + + # Plot + plot_path = _plot_boxplot(cfg, boxplot_data_frame, 'rmse_plot') + logger.info("RMSEs:\n%s", boxplot_data_frame.describe()) + + # Provenance + _write_provenance(cfg, boxplot_data_frame, plot_path, 'RMSE', ancestors, + var_name='rmse', long_name='Root Mean Square Error') + + +def main(cfg): + """Run the diagnostic.""" + cfg = deepcopy(cfg) + cfg.setdefault('weighted_samples', + {'area_weighted': True, 'time_weighted': True}) + sns.set_theme(**cfg.get('seaborn_settings', {})) + + # Extract data + residual_data = get_residual_data(cfg) + + # Plots + plot_mse(cfg, residual_data) + plot_rmse(cfg, residual_data) + + +# Run main function when this script is called +if __name__ == '__main__': + mlr.ignore_warnings() + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/mlr/main.py b/esmvaltool/diag_scripts/mlr/main.py new file mode 100644 index 0000000000..76566f66a5 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/main.py @@ -0,0 +1,349 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Main Diagnostic script to create MLR models. + +Description +----------- +This diagnostic script creates Machine Learning Regression (MLR) models which +use inter-model relations between process-based predictors (usually from the +past/present climate) and a target variable (usually a projection of the future +climate) to get a constrained prediction of the target variable. It provides an +interface for using MLR models (subclasses of +:class:`esmvaltool.diag_scripts.mlr.models.MLRModel`). + + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +efecv_kwargs: dict, optional + If specified, use these additional keyword arguments to perform a + exhaustive feature elimination using cross-validation. May not be used + together with ``grid_search_cv_param_grid`` or ``rfecv_kwargs``. +grid_search_cv_kwargs: dict, optional + Keyword arguments for the grid search cross-validation, see + ``_. +grid_search_cv_param_grid: dict or list of dict, optional + If specified, perform exhaustive parameter search using cross-validation + instead of simply calling + :meth:`esmvaltool.diag_scripts.mlr.models.MLRModel.fit`. Contains + parameters (keys) and ranges (values) for the exhaustive parameter search. + Have to be given for each step of the pipeline separated by two + underscores, i.e. ``s__p`` is the parameter ``p`` for step ``s``. May not + be used together with ``efecv_kwargs`` or ``rfecv_kwargs``. +group_metadata: str, optional + Group input data by an attribute. For every group element (set of + datasets), an individual MLR model is calculated. Only affects ``feature`` + and ``label`` datasets. May be used together with the option + ``pseudo_reality``. +ignore: list of dict, optional + Ignore specific datasets by specifying multiple :obj:`dict` s of metadata. +mlr_model_type: str + MLR model type. The given model has to be defined in + :mod:`esmvaltool.diag_scripts.mlr.models`. +only_predict: bool, optional (default: False) + If ``True``, only use + :meth:`esmvaltool.diag_scripts.mlr.models.MLRModel.predict` and do not + create any other output (CSV files, plots, etc.). +pattern: str, optional + Pattern matched against ancestor file names. +plot_partial_dependences: bool, optional (default: False) + Plot partial dependence of every feature in MLR model (computationally + expensive). +predict_kwargs: dict, optional + Optional keyword arguments for the final regressor's ``predict()`` + function. +pseudo_reality: list of str, optional + List of dataset attributes which are used to group input data for a pseudo- + reality test (also known as `model-as-truth` or `perfect-model` setup). For + every element of the group a single MLR model is fitted on all data + **except** for that of the specified group element. This group element is + then used as additional ``prediction_input`` and ``prediction_reference``. + This allows a direct assessment of the predictive power of the MLR model by + comparing the MLR prediction output and the true labels (similar to + splitting the input data in a training and test set, but not dividing the + data randomly but using specific datasets, e.g. the different climate + models). May be used together with the option ``group_metadata``. +rfecv_kwargs: dict, optional + If specified, use these additional keyword arguments to perform a recursive + feature elimination using cross-validation, see + ``_. May not be used together with + ``efecv_kwargs`` or ``grid_search_cv_param_grid``. +save_mlr_model_error: str or int, optional + Additionally saves estimated squared MLR model error. This error represents + the uncertainty of the prediction caused by the MLR model itself and not by + errors in the prediction input data (errors in that will be considered by + including datasets with ``var_type`` set to ``prediction_input_error`` and + setting ``save_propagated_errors`` to ``True``). If the option is set to + ``'test'``, the (constant) error is estimated as RMSEP using a (hold-out) + test data set. Only possible if test data is available, i.e. the option + ``test_size`` is not set to ``False`` during class initialization. If the + option is set to ``'logo'``, the (constant) error is estimated as RMSEP + using leave-one-group-out cross-validation using the group_attributes. Only + possible if ``group_datasets_by_attributes`` is given. If the option is set + to an integer ``n`` (!= 0), the (constant) error is estimated as RMSEP + using n-fold cross-validation. +save_lime_importance: bool, optional (default: False) + Additionally save local feature importance given by LIME (Local + Interpretable Model-agnostic Explanations). +save_propagated_errors: bool, optional (default: False) + Additionally save propagated errors from ``prediction_input_error`` + datasets. +select_metadata: dict, optional + Pre-select input data by specifying (key, value) pairs. Affects all + datasets regardless of ``var_type``. + +Additional optional parameters are optional parameters for +:class:`esmvaltool.diag_scripts.mlr.models.MLRModel` given :ref:`here +` or optional parameters of +:mod:`esmvaltool.diag_scripts.mlr.mmm` if ``mlr_model_type='mmm'``. + +""" + +import logging +import os +from copy import deepcopy +from pprint import pformat + +from sklearn.gaussian_process import kernels as sklearn_kernels + +from esmvaltool.diag_scripts import mlr +from esmvaltool.diag_scripts.mlr.mmm import main as create_mmm_model +from esmvaltool.diag_scripts.mlr.models import MLRModel +from esmvaltool.diag_scripts.shared import ( + group_metadata, + run_diagnostic, + select_metadata, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def _get_grouped_data(cfg, input_data): + """Group input data to create individual MLR models for each group.""" + group_attribute = cfg['group_metadata'] + logger.info( + "Grouping training data by attribute '%s' and creating individual MLR " + "model for each group member", group_attribute) + + # Group data using var types + var_types = group_metadata(input_data, 'var_type') + training_data = var_types.get('feature', []) + var_types.get('label', []) + prediction_data = [] + for pred_type in var_types: + if 'prediction_' in pred_type: + prediction_data.extend(var_types[pred_type]) + + # Create groups of dataset using training data + grouped_datasets = group_metadata(training_data, group_attribute) + grouped_input_data = {} + for (group_val, datasets) in grouped_datasets.items(): + datasets.extend(prediction_data) + grouped_input_data[group_val] = datasets + return (group_attribute, grouped_input_data) + + +def _get_pseudo_reality_data(cfg, input_data): + """Get input data groups for pseudo-reality experiment.""" + pseudo_reality_attrs = cfg['pseudo_reality'] + logger.info( + "Grouping input data for pseudo-reality experiment using attributes " + "%s", pseudo_reality_attrs) + + # Extract training data + var_types = group_metadata(input_data, 'var_type') + training_data = var_types.get('feature', []) + var_types.get('label', []) + + # Extract given prediction datasets + original_prediction_data = [] + for pred_type in var_types: + if 'prediction_' in pred_type: + original_prediction_data.extend(var_types[pred_type]) + original_prediction_data = deepcopy(original_prediction_data) + + # Add aliases and group datasets + for dataset in training_data: + dataset['pseudo_reality_group'] = mlr.create_alias( + dataset, pseudo_reality_attrs) + grouped_datasets = group_metadata(training_data, 'pseudo_reality_group') + grouped_input_data = {} + for (group_val, datasets) in grouped_datasets.items(): + logger.debug("Found pseudo reality group '%s'", group_val) + pred_datasets = deepcopy(datasets) + for dataset in pred_datasets: + dataset['prediction_name'] = group_val + if dataset['var_type'] == 'feature': + dataset['var_type'] = 'prediction_input' + else: + dataset['var_type'] = 'prediction_reference' + remaining_datasets = [] + for data in training_data: + if data['pseudo_reality_group'] != group_val: + remaining_datasets.append(deepcopy(data)) + grouped_input_data[group_val] = (pred_datasets + remaining_datasets + + original_prediction_data) + return ('pseudo-reality', grouped_input_data) + + +def _get_raw_input_data(cfg): + """Extract all input datasets.""" + input_data = mlr.get_input_data(cfg, + pattern=cfg.get('pattern'), + ignore=cfg.get('ignore')) + select_kwargs = cfg.get('select_metadata', {}) + if select_kwargs: + logger.info("Only selecting files matching %s", select_kwargs) + input_data = select_metadata(input_data, **select_kwargs) + paths = [d['filename'] for d in input_data] + logger.debug("Remaining files:") + logger.debug(pformat(paths)) + return input_data + + +def _update_mlr_model(mlr_model_type, mlr_model): + """Update MLR model parameters during run time.""" + if mlr_model_type == 'gpr_sklearn': + new_kernel = (sklearn_kernels.ConstantKernel(1.0, (1e-5, 1e5)) * + sklearn_kernels.RBF(1.0, (1e-5, 1e5))) + mlr_model.update_parameters(final__regressor__kernel=new_kernel) + + +def check_cfg(cfg): + """Check recipe configuration for invalid options.""" + if 'mlr_model_type' not in cfg: + raise ValueError( + "Necessary configuration option 'mlr_model_type' not given") + if cfg.get('group_metadata') and cfg.get('pseudo_reality'): + raise ValueError( + "The options 'group_metadata' and 'pseudo_reality' may be used " + "together") + mutual_exclusive_options = [ + int('efecv_kwargs' in cfg), + int('grid_search_cv_param_grid' in cfg), + int('rfecv_kwargs' in cfg), + ] + if sum(mutual_exclusive_options) > 1: + raise ValueError( + "The options 'efecv_kwargs', 'grid_search_cv_param_grid' and " + "'rfecv_kwargs' may not be used together") + + +def get_grouped_data(cfg): + """Get (grouped) input datasets according to given settings.""" + input_data = _get_raw_input_data(cfg) + if cfg.get('group_metadata'): + return _get_grouped_data(cfg, input_data) + if cfg.get('pseudo_reality'): + return _get_pseudo_reality_data(cfg, input_data) + logger.info("Creating single MLR model") + return (None, {None: input_data}) + + +def run_mlr_model(cfg, mlr_model_type, group_attribute, grouped_datasets): + """Run MLR model(s) of desired type on input data.""" + for (descr, datasets) in grouped_datasets.items(): + if descr is not None: + attr = '' if group_attribute is None else f'{group_attribute} ' + logger.info("Creating MLR model '%s' for %s'%s'", mlr_model_type, + attr, descr) + cfg['sub_dir'] = descr + mlr_model = MLRModel.create(mlr_model_type, datasets, **cfg) + + # Update MLR model parameters dynamically + _update_mlr_model(mlr_model_type, mlr_model) + + # Fit and predict + if ('grid_search_cv_param_grid' in cfg and + cfg['grid_search_cv_param_grid']): + cv_param_grid = cfg['grid_search_cv_param_grid'] + cv_kwargs = cfg.get('grid_search_cv_kwargs', {}) + mlr_model.grid_search_cv(cv_param_grid, **cv_kwargs) + elif 'efecv_kwargs' in cfg: + mlr_model.efecv(**cfg['efecv_kwargs']) + elif 'rfecv_kwargs' in cfg: + mlr_model.rfecv(**cfg['rfecv_kwargs']) + else: + mlr_model.fit() + predict_args = { + 'save_mlr_model_error': cfg.get('save_mlr_model_error'), + 'save_lime_importance': cfg.get('save_lime_importance'), + 'save_propagated_errors': cfg.get('save_propagated_errors'), + **cfg.get('predict_kwargs', {}), + } + mlr_model.predict(**predict_args) + + # Print further information + mlr_model.print_correlation_matrices() + mlr_model.print_regression_metrics() + mlr_model.test_normality_of_residuals() + + # Skip further output if desired + if not cfg.get('only_predict'): + mlr_model.export_training_data() + mlr_model.export_prediction_data() + run_mlr_model_plots(cfg, mlr_model, mlr_model_type) + + +def run_mlr_model_plots(cfg, mlr_model, mlr_model_type): + """Run MLR model plotting functions.""" + mlr_model.plot_residuals() + mlr_model.plot_residuals_histogram() + mlr_model.plot_residuals_distribution() + mlr_model.plot_prediction_errors() + mlr_model.plot_scatterplots() + if not cfg.get('accept_only_scalar_data') and cfg.get( + 'plot_partial_dependences'): + mlr_model.plot_partial_dependences() + if 'gbr' in mlr_model_type: + mlr_model.plot_feature_importance() + if ('rfecv_kwargs' not in cfg and 'efecv_kwargs' not in cfg): + mlr_model.plot_training_progress() + if 'gpr' in mlr_model_type and not cfg.get('accept_only_scalar_data'): + mlr_model.print_kernel_info() + is_linear_model = any([ + 'lasso' in mlr_model_type, + 'linear' in mlr_model_type, + 'ridge' in mlr_model_type, + mlr_model_type == 'huber', + ]) + if is_linear_model: + mlr_model.plot_coefs() + mlr_model.plot_feature_importance() + if mlr_model.features.size == 1: + mlr_model.plot_1d_model() + + +def run_mmm_model(cfg, group_attribute, grouped_datasets): + """Run simple MMM model(s) on input data.""" + for (descr, datasets) in grouped_datasets.items(): + if descr is not None: + attr = '' if group_attribute is None else f'{group_attribute} ' + logger.info("Creating MMM model for %s'%s'", attr, descr) + create_mmm_model(cfg, input_data=datasets, description=descr) + + +def main(cfg): + """Run the diagnostic.""" + check_cfg(cfg) + mlr_model_type = cfg.pop('mlr_model_type') + logger.info("Found MLR model type '%s'", mlr_model_type) + (group_attr, grouped_datasets) = get_grouped_data(cfg) + if mlr_model_type == 'mmm': + run_mmm_model(cfg, group_attr, grouped_datasets) + else: + run_mlr_model(cfg, mlr_model_type, group_attr, grouped_datasets) + + +# Run main function when this script is called +if __name__ == '__main__': + mlr.ignore_warnings() + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/mlr/mmm.py b/esmvaltool/diag_scripts/mlr/mmm.py new file mode 100644 index 0000000000..03aef8d144 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/mmm.py @@ -0,0 +1,350 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Use simple multi-model mean for predictions. + +Description +----------- +This diagnostic calculates the (unweighted) mean over all given datasets for a +given target variable. + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +convert_units_to: str, optional + Convert units of the input data. Can also be given as dataset option. +dtype: str (default: 'float64') + Internal data type which is used for all calculations, see + ``_ for a list of + allowed values. +ignore: list of dict, optional + Ignore specific datasets by specifying multiple :obj:`dict` s of metadata. +mlr_model_name: str, optional (default: 'MMM') + Human-readable name of the MLR model instance (e.g used for labels). +mmm_error_type: str, optional + If given, additionally saves estimated squared MMM model error. If the + option is set to ``'loo'``, the (constant) error is estimated as RMSEP + using leave-one-out cross-validation. No other options are supported at the + moment. +pattern: str, optional + Pattern matched against ancestor file names. +prediction_name: str, optional + Default ``prediction_name`` of output cubes if no 'prediction_reference' + dataset is given. +weighted_samples: dict + If specified, use weighted mean square error to estimate prediction error. + The given keyword arguments are directly passed to + :func:`esmvaltool.diag_scripts.mlr.get_all_weights` to calculate the sample + weights. By default, area weights and time weights are used. + +""" + +import logging +import os +from copy import deepcopy +from pprint import pformat + +import iris +import numpy as np +from sklearn.metrics import mean_squared_error +from sklearn.model_selection import LeaveOneOut + +import esmvaltool.diag_scripts.shared.iris_helpers as ih +from esmvaltool.diag_scripts import mlr +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + group_metadata, + io, + run_diagnostic, + select_metadata, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def _add_dataset_attributes(cube, datasets, cfg): + """Add dataset-related attributes to cube.""" + dataset_names = sorted(list({d['dataset'] for d in datasets})) + projects = sorted(list({d['project'] for d in datasets})) + start_years = list({d['start_year'] for d in datasets}) + end_years = list({d['end_year'] for d in datasets}) + cube.attributes['dataset'] = '|'.join(dataset_names) + cube.attributes['description'] = 'MMM prediction' + cube.attributes['end_year'] = min(end_years) + cube.attributes['mlr_model_name'] = cfg['mlr_model_name'] + cube.attributes['mlr_model_type'] = 'mmm' + cube.attributes['project'] = '|'.join(projects) + cube.attributes['start_year'] = min(start_years) + cube.attributes['var_type'] = 'prediction_output' + + +def _load_cube(cfg, dataset): + """Load single :class:`iris.cube.Cube`.""" + path = dataset['filename'] + cube = iris.load_cube(path) + cube.data = cube.core_data().astype(cfg['dtype'], casting='same_kind') + convert_units(cfg, cube, dataset) + return (cube, path) + + +def add_general_attributes(cube, **kwargs): + """Add general attributes to cube.""" + for (key, val) in kwargs.items(): + if val is not None: + cube.attributes[key] = val + + +def convert_units(cfg, cube, data): + """Convert units if desired.""" + cfg_settings = cfg.get('convert_units_to') + data_settings = data.get('convert_units_to') + if cfg_settings or data_settings: + units_to = cfg_settings + if data_settings: + units_to = data_settings + logger.info("Converting units from '%s' to '%s'", cube.units, units_to) + cube.convert_units(units_to) + + +def get_loo_error_cube(cfg, label_datasets): + """Estimate prediction error using cross-validation.""" + loo = LeaveOneOut() + logger.info("Estimating prediction error using cross-validator %s", + str(loo.__class__)) + label_datasets = np.array(label_datasets) + errors = [] + for (train_idx, test_idx) in loo.split(label_datasets): + ref_cube = get_mmm_cube(cfg, label_datasets[test_idx]) + mmm_cube = get_mmm_cube(cfg, label_datasets[train_idx]) + + # Apply mask + mask = np.ma.getmaskarray(ref_cube.data).ravel() + mask |= np.ma.getmaskarray(mmm_cube.data).ravel() + + y_true = ref_cube.data.ravel()[~mask] + y_pred = mmm_cube.data.ravel()[~mask] + weights = mlr.get_all_weights(ref_cube, **cfg['weighted_samples']) + weights = weights.ravel()[~mask] + + # Calculate mean squared error + error = mean_squared_error(y_true, y_pred, sample_weight=weights) + errors.append(error) + + # Get error cube + error_cube = get_mmm_cube(cfg, label_datasets) + error_array = np.empty(error_cube.shape).ravel() + mask = np.ma.getmaskarray(error_cube.data).ravel() + error_array[mask] = np.nan + error_array[~mask] = np.mean(errors) + error_array = np.ma.masked_invalid(error_array) + error_cube.data = error_array.reshape(error_cube.shape) + + # Cube metadata + error_cube.attributes['error_type'] = 'loo' + error_cube.attributes['squared'] = 1 + error_cube.attributes['var_type'] = 'prediction_output_error' + error_cube.var_name += '_squared_mmm_error_estim' + error_cube.long_name += ' (squared MMM error estimation using CV)' + error_cube.units = mlr.units_power(error_cube.units, 2) + return error_cube + + +def get_grouped_data(cfg, input_data=None): + """Get input files.""" + if input_data is None: + logger.debug("Loading input data from 'cfg' argument") + input_data = mlr.get_input_data(cfg, + pattern=cfg.get('pattern'), + ignore=cfg.get('ignore')) + else: + logger.debug("Loading input data from 'input_data' argument") + if not mlr.datasets_have_mlr_attributes(input_data, log_level='error'): + raise ValueError("At least one input dataset does not have valid " + "MLR attributes") + if not input_data: + raise ValueError("No input data found") + paths = [d['filename'] for d in input_data] + logger.debug("Found files") + logger.debug(pformat(paths)) + + # Extract necessary data + label_data = select_metadata(input_data, var_type='label') + if not label_data: + raise ValueError("No data with var_type 'label' found") + prediction_reference_data = select_metadata( + input_data, var_type='prediction_reference') + extracted_data = label_data + prediction_reference_data + logger.debug("Found 'label' data") + logger.debug(pformat([d['filename'] for d in label_data])) + logger.debug("Found 'prediction_reference' data") + logger.debug(pformat([d['filename'] for d in prediction_reference_data])) + + # Return grouped data + return group_metadata(extracted_data, 'tag') + + +def get_mmm_cube(cfg, label_datasets): + """Get multi-model mean data.""" + cubes = iris.cube.CubeList() + paths = [] + (ref_cube, _) = _load_cube(cfg, label_datasets[0]) + for dataset in label_datasets: + (cube, path) = _load_cube(cfg, dataset) + ih.prepare_cube_for_merging(cube, path) + cubes.append(cube) + paths.append(path) + mmm_cube = cubes.merge_cube() + if len(paths) > 1: + mmm_cube = mmm_cube.collapsed(['cube_label'], iris.analysis.MEAN) + for aux_coord in ref_cube.coords(dim_coords=False): + mmm_cube.add_aux_coord(aux_coord, ref_cube.coord_dims(aux_coord)) + mmm_cube.remove_coord('cube_label') + _add_dataset_attributes(mmm_cube, label_datasets, cfg) + return mmm_cube + + +def get_reference_dataset(datasets, tag): + """Get ``prediction_reference`` dataset.""" + ref_datasets = select_metadata(datasets, var_type='prediction_reference') + if not ref_datasets: + logger.warning( + "Calculating residuals for '%s' not possible, no " + "'prediction_reference' dataset given", tag) + return (None, None) + if len(ref_datasets) > 1: + filenames = [d['filename'] for d in ref_datasets] + raise ValueError( + f"Expected at most one 'prediction_reference' dataset for " + f"'{tag}', got {len(ref_datasets):d}:\n{pformat(filenames)}") + return (ref_datasets[0], ref_datasets[0].get('prediction_name')) + + +def get_residual_cube(mmm_cube, ref_cube): + """Calculate residuals.""" + if mmm_cube.shape != ref_cube.shape: + raise ValueError( + f"Expected identical shapes for 'label' and " + f"'prediction_reference' datasets, got {mmm_cube.shape} and " + f"{ref_cube.shape}, respectively") + res_cube = ref_cube.copy() + res_cube.data -= mmm_cube.data + res_cube.attributes = mmm_cube.attributes + res_cube.attributes['residuals'] = 'true minus predicted values' + res_cube.attributes['var_type'] = 'prediction_residual' + res_cube.var_name += '_residual' + res_cube.long_name += ' (residual)' + return res_cube + + +def save_error(cfg, label_datasets, mmm_path, **cube_attrs): + """Save estimated error of MMM.""" + if len(label_datasets) < 2: + logger.warning( + "Estimating MMM prediction error not possible, at least 2 'label' " + "datasets are needed, only %i is given", len(label_datasets)) + return + error_type = cfg['mmm_error_type'] + allowed_error_types = ['loo'] + logger.info("Calculating error using error type '%s'", error_type) + if error_type == 'loo': + err_cube = get_loo_error_cube(cfg, label_datasets) + else: + raise NotImplementedError( + f"mmm_error_type '{error_type}' is currently not supported, " + f"supported types are {allowed_error_types}") + add_general_attributes(err_cube, **cube_attrs) + err_path = mmm_path.replace('_prediction', '_squared_prediction_error') + io.iris_save(err_cube, err_path) + write_provenance(cfg, err_path, + [d['filename'] for d in label_datasets], + f"{err_cube.long_name} of MMM model " + f"{cfg['mlr_model_name']} using error type {error_type}.") + + +def save_residuals(cfg, mmm_cube, ref_dataset, label_datasets, **cube_attrs): + """Save residuals.""" + logger.info("Calculating residuals") + (ref_cube, _) = _load_cube(cfg, ref_dataset) + res_cube = get_residual_cube(mmm_cube, ref_cube) + add_general_attributes(res_cube, **cube_attrs) + mmm_path = mmm_cube.attributes['filename'] + res_path = mmm_path.replace('_prediction', '_prediction_residual') + io.iris_save(res_cube, res_path) + ancestors = ([d['filename'] for d in label_datasets] + + [ref_dataset['filename']]) + caption = (f"Residuals of predicted {res_cube.long_name} of MMM model " + f"{cfg['mlr_model_name']}") + if 'prediction_name' in cube_attrs: + caption += f" for prediction {cube_attrs['prediction_name']}" + caption += '.' + write_provenance(cfg, res_path, ancestors, caption) + + +def write_provenance(cfg, netcdf_path, ancestors, caption): + """Write provenance information.""" + record = { + 'ancestors': ancestors, + 'authors': ['schlund_manuel'], + 'caption': caption, + 'references': ['schlund20jgr'], + } + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, record) + + +def main(cfg, input_data=None, description=None): + """Run the diagnostic.""" + cfg = deepcopy(cfg) + cfg.setdefault('dtype', 'float64') + cfg.setdefault('mlr_model_name', 'MMM') + cfg.setdefault('weighted_samples', + {'area_weighted': True, 'time_weighted': True}) + + # Get data + grouped_data = get_grouped_data(cfg, input_data=input_data) + description = '' if description is None else f'_for_{description}' + + # Loop over all tags + for (tag, datasets) in grouped_data.items(): + logger.info("Processing label '%s'", tag) + + # Get label datasets and reference dataset if possible + label_datasets = select_metadata(datasets, var_type='label') + (ref_dataset, pred_name) = get_reference_dataset(datasets, tag) + if pred_name is None: + pred_name = cfg.get('prediction_name') + + # Calculate multi-model mean + logger.info("Calculating multi-model mean") + mmm_cube = get_mmm_cube(cfg, label_datasets) + add_general_attributes(mmm_cube, tag=tag, prediction_name=pred_name) + mmm_path = get_diagnostic_filename( + f"mmm_{tag}_prediction{description}", cfg) + io.iris_save(mmm_cube, mmm_path) + write_provenance(cfg, mmm_path, + [d['filename'] for d in label_datasets], + f"Predicted {mmm_cube.long_name} of MMM model " + f"{cfg['mlr_model_name']}.") + + # Estimate prediction error using cross-validation + if 'mmm_error_type' in cfg: + save_error(cfg, label_datasets, mmm_path, tag=tag, + prediction_name=pred_name) + + # Calculate residuals + if ref_dataset is not None: + save_residuals(cfg, mmm_cube, ref_dataset, label_datasets, tag=tag, + prediction_name=pred_name) + + +# Run main function when this script is called +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/mlr/models/__init__.py b/esmvaltool/diag_scripts/mlr/models/__init__.py new file mode 100644 index 0000000000..2cd7730298 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/__init__.py @@ -0,0 +1,3580 @@ +"""Base class for MLR models. + +Example recipe +-------------- +The :ref:`MLR main diagnostic script` +provides an interface for using MLR models in recipes. The following recipe +shows a typical example on how to setup MLR recipes/diagnostics with the +following properties: + +#. Setup an MLR model with target variable ``y`` (using the tag ``Y``) and + three predictors ``x1``, ``x2`` and ``latitude`` (with tags ``X1``, ``X2`` + and ``latitude``, respectively). The target variable needs the attribute + ``var_type: label``; the predictors ``x1`` and ``x2`` the attribute + ``var_type: feature``. The coordinate feature ``latitude`` is added via the + option ``coords_as_features: [latitude]``. +#. Suppose ``y`` and ``x1`` are 3D fields (pressure, latitude, longitude); + ``x2`` is a 2D field (latitude, longitude). Thus, it is necessary to add the + attribute ``broadcast_from: [1, 2]`` to it (see ``dim_map`` parameter in + :func:`iris.util.broadcast_to_shape` for details). In order to consider + multiple climate models (``A``, ``B`` and ``C``) at once, the option + ``group_datasets_by_attributes: [dataset]`` is necessary. Otherwise the + diagnostic will complain about duplicate data. +#. For the prediction, data from dataset ``D`` is used (with + ``var_type: prediction_input``). For the feature ``X1`` additional input + error (with ``var_type: prediction_input_error``) is used. + + .. code-block:: yaml + + diag_feature_x1: + variables: + feature: + ... # specify project, mip, start_year, end_year, etc. + short_name: x1 + var_type: feature + tag: X1 + additional_datasets: + - {dataset: A, ...} + - {dataset: B, ...} + - {dataset: C, ...} + prediction_input: + ... # specify project, mip, start_year, end_year, etc. + short_name: x1 + var_type: prediction_input + tag: X1 + additional_datasets: + - {dataset: D, ...} + prediction_input_error: + ... # specify project, mip, start_year, end_year, etc. + short_name: x1Stderr + var_type: prediction_input_error + tag: X1 + additional_datasets: + - {dataset: D, ...} + scripts: + null + + diag_feature_x2: + variables: + feature: + ... # specify project, mip, start_year, end_year, etc. + short_name: x2 + var_type: feature + broadcast_from: [1, 2] + tag: X2 + additional_datasets: + - {dataset: A, ...} + - {dataset: B, ...} + - {dataset: C, ...} + prediction_input: + ... # specify project, mip, start_year, end_year, etc. + short_name: x2 + var_type: prediction_input + broadcast_from: [1, 2] + tag: X2 + additional_datasets: + - {dataset: D, ...} + scripts: + null + + diag_label: + variables: + label: + ... # specify project, mip, start_year, end_year, etc. + short_name: y + var_type: label + tag: Y + additional_datasets: + - {dataset: A, ...} + - {dataset: B, ...} + - {dataset: C, ...} + scripts: + null + +#. In this example, a + `GBRT model + `_ (with ``mlr_model_type: gbr_sklearn``) is used. + Parameters for this are specified via ``parameters_final_regressor``. Apart + from the best-estimate prediction, the estimated MLR model error + (``save_mlr_model_error: test``) and the propagated prediction input error + (``save_propagated_errors: true``) are returned. +#. With ``postprocess.py``, the global mean of the best estimate prediction and + the corresponding errors (MLR model + propagated input error) are calculted. + + .. code-block:: yaml + + diag_mlr_gbrt: + scripts: + mlr: + script: mlr/main.py + ancestors: [ + 'diag_label/y', + 'diag_feature_*/*', + ] + coords_as_features: [latitude] + group_datasets_by_attributes: [dataset] + mlr_model_name: GBRT + mlr_model_type: gbr_sklearn + parameters_final_regressor: + learning_rate: 0.1 + n_estimators: 100 + save_mlr_model_error: test + save_propagated_errors: true + postprocess: + script: mlr/postprocess.py + ancestors: ['diag_mlr_gbrt/mlr'] + ignore: + - {var_type: null} + mean: [pressure, latitude, longitude] + +#. Plots of the global distribution (latitude, longitude) are created with + ``plot.py`` after calculating the mean over the pressure coordinate using + ``preprocess.py``. + + .. code-block:: yaml + + diag_plot: + scripts: + preprocess: + script: mlr/preprocess.py + ancestors: ['diag_mlr_gbrt/mlr'] + collapse: [pressure] + ignore: + - {var_type: null} + plot: + script: mlr/plot.py + ancestors: ['diag_plot/preprocess'] + plot_map: + plot_kwargs: + cbar_label: 'Y' + cbar_ticks: [0, 1, 2, 3] + vmin: 0 + vmax: 3 + +All datasets must have the attribute ``var_type`` which specifies the type of +the dataset. Possible values are ``feature`` (independent variables used for +training/testing), ``label`` (dependent variables, y-axis), +``prediction_input`` (independent variables used for prediction of dependent +variables, usually observational data), ``prediction_input_error`` (standard +error of the ``prediction_input`` data, optional) or ``prediction_reference`` +(`true` values for the ``prediction_input`` data, optional). In addition, all +datasets must habe the attribute ``tag``, which specifies the name of +variable/diagnostic. All datasets can be converted to new units in the loading +step by specifying the key ``convert_units_to`` in the respective dataset(s). + +Training data +------------- +All groups (specified in ``group_datasets_by_attributes``, if desired) given +for ``label`` datasets must also be given for the ``feature`` datasets. Within +these groups, all ``feature`` and ``label`` datasets must have the same shape, +except the attribute ``broadcast_from`` is set to a list of suitable coordinate +indices to map this dataset to regular datasets (see parameter ``dim_map`` in +:func:`iris.util.broadcast_to_shape`). + +Prediction data +--------------- +All ``tag`` s specified for ``prediction_input`` datasets must also be given +for the ``feature`` datasets (except ``allow_missing_features`` is set to +``True``). Multiple predictions can be specified by ``prediction_name``. +Within these predictions, all ``prediction_input`` datasets must have the same +shape, except the attribute ``broadcast_from`` is given. Errors in the +prediction input data can be specified by ``prediction_input_error``. If given, +these errors are used to calculate errors in the final prediction using linear +error propagation given by `LIME `_. +Additionally, `true` values for ``prediction_input`` can be specified with +``prediction_reference`` datasets (together with the respective +``prediction_name``). This allows an evaluation of the performance of the MLR +model by calculating residuals (`true` minus predicted values). + +Available MLR models +-------------------- +MLR models are subclasses of this base class. A list of all available MLR +models can be found :ref:`here `. To add a new MLR model, +create a new file in ``esmvaltool/diag_scripts/mlr/models/`` with a child class +of :class:`esmvaltool.diag_scripts.mlr.models.MLRModel` decorated with +:meth:`esmvaltool.diag_scripts.mlr.models.MLRModel.register_mlr_model`. + +.. _MLRModeloptionalparameters: + +Optional parameters for class initialization +-------------------------------------------- +accept_only_scalar_data: bool (default: False) + If set to ``True``, only accept scalar input data. Should be used together + with the option ``group_datasets_by_attributes``. +allow_missing_features: bool (default: False) + Allow missing features in the training data. +cache_intermediate_results: bool (default: True) + Cache the intermediate results of the pipeline's transformers. +categorical_features: list of str + Names of features which are interpreted as categorical features (in + contrast to numerical features). +coords_as_features: list of str + If given, specify a list of coordinates which should be used as features. +dtype: str (default: 'float64') + Internal data type which is used for all calculations, see + ``_ for a list of + allowed values. +fit_kwargs: dict + Optional keyword arguments for the pipeline's ``fit()`` function. These + arguments have to be given for each step of the pipeline separated by two + underscores, i.e. ``s__p`` is the parameter ``p`` for step ``s``. +group_datasets_by_attributes: list of str + List of dataset attributes which are used to group input data for + ``feature`` s and ``label`` s. For example, this is necessary if the MLR + model should consider multiple climate models in the training phase. If + this option is not given, specifying multiple datasets with identical + ``var_type`` and ``tag`` entries results in an error. If given, all the + input data is first grouped by the given attributes and then checked for + uniqueness within this group. After that, all groups are stacked to form a + single set of training data. +imputation_strategy: str (default: 'remove') + Strategy for the imputation of missing values in the features. Must be one + of ``'remove'``, ``'mean'``, ``'median'``, ``'most_frequent'`` or + ``'constant'``. +log_level: str (default: 'info') + Verbosity for the logger. Must be one of ``'debug'``, ``'info'``, + ``'warning'`` or ``'error'``. +mlr_model_name: str + Human-readable name of the MLR model instance (e.g used for labels). +n_jobs: int (default: 1) + Maximum number of jobs spawned by this class. Use ``-1`` to use all + processors. More details are given `here + `_. +output_file_type: str (default: 'png') + File type for the plots. +parameters: dict + Parameters used for the whole pipeline. Have to be given for each step of + the pipeline separated by two underscores, i.e. ``s__p`` is the parameter + ``p`` for step ``s``. ``random_state`` parameters are explicitly allowed + here (in contrast to ``parameters_final_regressor``). +parameters_final_regressor: dict + Parameters used for the **final** regressor. If these parameters are + updated using the function :meth:`update_parameters`, the new names have to + be given for each step of the pipeline separated by two underscores, i.e. + ``s__p`` is the parameter ``p`` for step ``s``. Note: to pass an argument + for ``random_state``, use the option ``random_state`` of this class. +pca: bool (default: False) + Preprocess numerical input features using PCA. Parameters for this pipeline + step can be given via the ``parameters`` argument. +plot_dir: str (default: ~/plots) + Root directory to save plots. +plot_units: dict + Replace specific units (keys) with other text (values) in plots. +random_state: int or None (default: None) + Random seed for :class:`numpy.random.RandomState` that is used by all + functionalities of this class that require randomness (e.g., probabilistic + ML algorithms like Gradient Boosting Regression models, random train test + splits, etc.). If ``None``, use a random seed. Use an :obj:`int` to get + reproducible results. See ``__ for more details. +savefig_kwargs: dict + Keyword arguments for :func:`matplotlib.pyplot.savefig`. +seaborn_settings: dict + Options for :func:`seaborn.set_theme` (affects all plots). +standardize_data: bool (default: True) + Linearly standardize numerical input data by removing mean and scaling to + unit variance. +sub_dir: str + Create additional subdirectory for output in ``work_dir`` and ``plot_dir``. +test_size: float (default: 0.25) + If given, randomly exclude the desired fraction of input data from training + and use it as test data. +weighted_samples: dict + If specified, use weighted samples in the loss function used for the + training of the MLR model. The given keyword arguments are directly passed + to :func:`esmvaltool.diag_scripts.mlr.get_all_weights` to calculate the + sample weights. By default, no weights are used. Raises errors if the + desired weights cannot be calculated for the data, e.g., when + ``time_weighted=True`` is used but the data does not contain a dimension + ``time``. +work_dir: str (default: ~/work) + Root directory to save all other files (mainly ``*.nc`` files). + +""" + +import importlib +import logging +import os +import warnings +from copy import deepcopy +from inspect import getfullargspec +from pprint import pformat + +import iris +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import seaborn as sns +from cf_units import Unit +from joblib import Parallel, delayed +from lime.lime_tabular import LimeTabularExplainer +from matplotlib.ticker import ScalarFormatter +from scipy.stats import shapiro +from sklearn import metrics +from sklearn.compose import ColumnTransformer +from sklearn.decomposition import PCA +from sklearn.exceptions import NotFittedError +from sklearn.impute import SimpleImputer +from sklearn.inspection import PartialDependenceDisplay +from sklearn.model_selection import ( + GridSearchCV, + LeaveOneGroupOut, + LeaveOneOut, + train_test_split, +) +from sklearn.preprocessing import StandardScaler + +from esmvaltool.diag_scripts import mlr +from esmvaltool.diag_scripts.mlr.custom_sklearn import ( + AdvancedPipeline, + AdvancedRFECV, + AdvancedTransformedTargetRegressor, + cross_val_score_weighted, + get_rfecv_transformer, + perform_efecv, +) +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + group_metadata, + io, + select_metadata, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +class MLRModel(): + """Base class for MLR models.""" + + _CLF_TYPE = None + _MODELS = {} + _MLR_MODEL_TYPE = None + + @staticmethod + def _load_mlr_models(): + """Load MLR models from :mod:`esmvaltool.diag_scripts.mlr.models`.""" + current_path = os.path.dirname(os.path.realpath(__file__)) + models_path = os.path.join(current_path) + for (root, _, model_files) in os.walk(models_path): + for model_file in model_files: + rel_path = ('' if root == models_path else os.path.relpath( + root, models_path)) + module = os.path.join(rel_path, + os.path.splitext(model_file)[0]) + try: + importlib.import_module( + f"esmvaltool.diag_scripts.mlr.models." + f"{module.replace(os.sep, '.')}" + ) + except ImportError: + pass + + @classmethod + def register_mlr_model(cls, mlr_model_type): + """Add MLR model (subclass of this class) (decorator).""" + logger.debug("Found available MLR model '%s'", mlr_model_type) + + def decorator(subclass): + """Decorate subclass.""" + subclass._MLR_MODEL_TYPE = mlr_model_type + cls._MODELS[mlr_model_type] = subclass + return subclass + + return decorator + + @classmethod + def create(cls, mlr_model_type, *args, **kwargs): + """Create desired MLR model subclass (factory method).""" + cls._load_mlr_models() + if not cls._MODELS: + raise NotImplementedError( + f"Cannot initialize new MLR model with type " + f"'{mlr_model_type}', no MLR models found. Please add " + f"subclasses of {cls} in new files under 'esmvaltool/" + f"diag_scripts/mlr/models/' decorated by 'esmvaltool." + f"diag_scripts.mlr.models.{cls.__name__}." + f"register_mlr_model()'") + if mlr_model_type not in cls._MODELS: + raise NotImplementedError( + f"MLR model type '{mlr_model_type}' not found in 'esmvaltool/" + f"diag_scripts/mlr/models/'") + subclass = cls._MODELS[mlr_model_type] + logger.info( + "Initialized MLR model with type '%s' and final regressor %s", + mlr_model_type, subclass._CLF_TYPE) + return subclass(*args, **kwargs) + + def __init__(self, input_datasets, **kwargs): + """Initialize class members. + + Parameters + ---------- + input_datasets : list of dict + List of dataset metadata used as data for the MLR model. + **kwargs + Optional keyword arguments, see next sections. + + Raises + ------ + NotImplementedError + Class is initialized directly without the use of its factory + function ``create()``. + ValueError + Invalid data given. + + """ + self._check_clf() + + # Private attributes + self._cfg = deepcopy(kwargs) + self._clf = None + self._lime_explainer = None + self._data = {} + self._data['pred'] = {} + self._datasets = {} + self._classes = {} + self._parameters = {} + + # Set default settings + self._set_default_settings() + + # Random state + self._random_state = np.random.RandomState(self._cfg['random_state']) + + # Seaborn + sns.set_theme(**self._cfg.get('seaborn_settings', {})) + + # Adapt output directories + self._cfg['mlr_work_dir'] = os.path.join(self._cfg['work_dir'], + self._cfg['sub_dir']) + self._cfg['mlr_plot_dir'] = os.path.join(self._cfg['plot_dir'], + self._cfg['sub_dir']) + if not os.path.exists(self._cfg['mlr_work_dir']): + os.makedirs(self._cfg['mlr_work_dir']) + logger.info("Created %s", self._cfg['mlr_work_dir']) + if not os.path.exists(self._cfg['mlr_plot_dir']): + os.makedirs(self._cfg['mlr_plot_dir']) + logger.info("Created %s", self._cfg['mlr_plot_dir']) + + # Load datasets, classes and training data + self._load_input_datasets(input_datasets) + self._load_classes() + self._load_data() + + # Create pipeline (with all preprocessor steps and final regressor) + self.reset_pipeline() + if self._cfg['parameters']: + logger.debug("Using parameter(s): %s", self._cfg['parameters']) + self.update_parameters(**self._cfg['parameters']) + + # Log successful initialization + logger.info("Initialized MLR model (using at most %i processes)", + self._cfg['n_jobs']) + logger.debug("With parameters") + logger.debug(pformat(self.parameters)) + + @property + def categorical_features(self): + """numpy.ndarray: Categorical features.""" + return self.features[self._classes['features'].categorical] + + @property + def data(self): + """dict: Input data of the MLR model.""" + return self._data + + @property + def features(self): + """numpy.ndarray: Features of the input data.""" + return self._classes['features'].index.values + + @property + def features_after_preprocessing(self): + """numpy.ndarray: Features of the input data after preprocessing.""" + x_train = self.data['train'].x + y_train = self.get_y_array('train') + try: + self._check_fit_status('Calculating features after preprocessing') + except NotFittedError: + self._clf.fit_transformers_only(x_train, y_train, + **self.fit_kwargs) + x_trans = self._clf.transform_only(x_train) + features = self.features + n_features_may_drop = False + if 'feature_selection' in self._clf.named_steps: + support = self._clf.named_steps['feature_selection'].support + features = features[support] + n_features_may_drop = True + if 'pca' in self._clf.named_steps: + categorical_features = np.array([ + f for f in features if f in self.categorical_features]) + n_numerical_features = x_trans.shape[1] - categorical_features.size + features = [ + f'Principal component {idx}' + for idx in range(n_numerical_features) + ] + features.extend(categorical_features) + n_features_may_drop = True + if not n_features_may_drop and x_trans.shape[1] != self.features.size: + logger.warning( + "Number of features decreased from %i to %i during " + "preprocessing for unknown reasons (neither feature selection " + "using recursive feature elimination nor PCA is performed)", + self.features.size, x_trans.shape[1]) + features = [ + f'Unknown feature {idx}' for idx in range(x_trans.shape[1]) + ] + return np.array(features, dtype='str') + + @property + def features_types(self): + """pandas.Series: Types of the features.""" + return self._classes['features'].types + + @property + def features_units(self): + """pandas.Series: Units of the features.""" + return self._classes['features'].units + + @property + def fit_kwargs(self): + """dict: Keyword arguments for :meth:`fit`.""" + fit_kwargs = self._cfg['fit_kwargs'] + fit_kwargs = self._update_fit_kwargs(fit_kwargs) + verbosity_kwargs = self._get_verbosity_parameters(self._clf.fit) + for (key, val) in verbosity_kwargs.items(): + fit_kwargs.setdefault(key, val) + return fit_kwargs + + @property + def group_attributes(self): + """numpy.ndarray: Group attributes of the input data.""" + return self._classes['group_attributes'] + + @property + def label(self): + """str: Label of the input data.""" + return self._classes['label'].index.values[0] + + @property + def label_units(self): + """str: Units of the label.""" + return self._classes['label'].units.values[0] + + @property + def mlr_model_type(self): + """str: MLR model type.""" + return self._MLR_MODEL_TYPE + + @property + def numerical_features(self): + """numpy.ndarray: Numerical features.""" + return self.features[~self._classes['features'].categorical] + + @property + def parameters(self): + """dict: Parameters of the complete MLR model pipeline.""" + return self._parameters + + @property + def random_state(self): + """numpy.random.RandomState: Random state instance.""" + return self._random_state + + def efecv(self, **kwargs): + """Perform exhaustive feature elimination using cross-validation. + + Parameters + ---------- + **kwargs : keyword arguments, optional + Additional options for :func:`esmvaltool.diag_scripts.mlr. + custom_sklearn.cross_val_score_weighted`. + + """ + logger.info( + "Performing exhaustive feature elimination using cross-validation " + "with final regressor %s on %i training points (thiy may take a " + "while...)", self._CLF_TYPE, + len(self.data['train'].index)) + + # Get fit parameters + fit_kwargs = deepcopy(self.fit_kwargs) + keys_to_remove = [] + for key in fit_kwargs: + if key.endswith('eval_set'): + keys_to_remove.append(key) + for key in keys_to_remove: + logger.warning( + "Fit parameter '%s' is not supported for efecv()", key) + fit_kwargs.pop(key) + + # Get other keyword arguments + kwargs = deepcopy(kwargs) + verbosity_kwargs = self._get_verbosity_parameters( + cross_val_score_weighted) + for (key, val) in verbosity_kwargs.items(): + kwargs.setdefault(key, val) + kwargs.setdefault('n_jobs', self._cfg['n_jobs']) + kwargs['fit_params'] = fit_kwargs + kwargs['sample_weights'] = self._get_sample_weights('train') + if kwargs.get('cv') == 'logo': + kwargs.update(self._get_logo_cv_kwargs()) + + # Exhaustive feature selection + (self._clf, transformer) = perform_efecv( + self._clf, self.data['train'].x, self.get_y_array('train'), + **kwargs) + self._clf.steps.insert(0, ('feature_selection', transformer)) + + # Log results + new_features = self.features[transformer.support] + logger.info( + "Exhaustive feature elimination was successful, %i of the %i " + "features remain", new_features.size, self.features.size) + logger.info("Old features: %s", self.features) + logger.info("New features: %s", new_features) + logger.info("Successfully fitted MLR model on %i training point(s)", + len(self.data['train'].index)) + logger.debug("Pipeline steps:") + logger.debug(pformat(list(self._clf.named_steps.keys()))) + logger.debug("Parameters:") + logger.debug(pformat(self.parameters)) + + # LIME + self._load_lime_explainer() + + def export_prediction_data(self, filename=None): + """Export all prediction data contained in `self._data`. + + Parameters + ---------- + filename : str, optional (default: '{data_type}_{pred_name}.csv') + Name of the exported files. + + """ + for pred_name in self.data['pred']: + self._save_csv_file('pred', filename, pred_name=pred_name) + + def export_training_data(self, filename=None): + """Export all training data contained in `self._data`. + + Parameters + ---------- + filename : str, optional (default: '{data_type}.csv') + Name of the exported files. + + """ + for data_type in ('all', 'train', 'test'): + self._save_csv_file(data_type, filename) + + def fit(self): + """Fit MLR model. + + Note + ---- + Specifying keyword arguments for this function is not allowed here + since :attr:`features_after_preprocessing` might be altered by + that. Use the keyword argument ``fit_kwargs`` during class + initialization instead. + + """ + logger.info( + "Fitting MLR model with final regressor %s on %i training " + "point(s)", self._CLF_TYPE, len(self.data['train'].index)) + + # Create MLR model with desired parameters and fit it + self._clf.fit(self.data['train'].x, self.data['train'].y, + **self.fit_kwargs) + self._parameters = self._get_clf_parameters() + logger.info("Successfully fitted MLR model on %i training point(s)", + len(self.data['train'].index)) + logger.debug("Pipeline steps:") + logger.debug(pformat(list(self._clf.named_steps.keys()))) + logger.debug("Parameters:") + logger.debug(pformat(self.parameters)) + + # LIME + self._load_lime_explainer() + + def get_ancestors(self, label=True, features=None, prediction_names=None, + prediction_reference=False): + """Return ancestor files. + + Parameters + ---------- + label : bool, optional (default: True) + Return ``label`` files. + features : list of str, optional (default: None) + Features for which files should be returned. If ``None``, return + files for all features. + prediction_names : list of str, optional (default: None) + Prediction names for which files should be returned. If ``None``, + return files for all prediction names. + prediction_reference : bool, optional (default: False) + Return ``prediction_reference`` files if available for given + ``prediction_names``. + + Returns + ------- + list of str + Ancestor files. + + Raises + ------ + ValueError + Invalid ``feature`` or ``prediction_name`` given. + + """ + ancestors = [] + + # Label files + if label: + ancestors.extend([d['filename'] for d in self._datasets['label']]) + + # Feature files + if features is None: + features = self.features + for feature in features: + if feature not in self.features: + raise ValueError( + f"Got invalid feature '{feature}', expected one of " + f"{self.features}") + ancestors.extend( + [d['filename'] for d in self._datasets['feature'] + if d['tag'] == feature] + ) + + # Prediction files + available_pred_names = list(self._datasets['prediction_input'].keys()) + if prediction_names is None: + prediction_names = available_pred_names + for pred_name in prediction_names: + if pred_name not in available_pred_names: + raise ValueError( + f"Got invalid prediction name '{pred_name}', expected one " + f"of {available_pred_names}") + ancestors.extend( + [d['filename'] for d in + self._datasets['prediction_input'][pred_name]] + ) + ancestors.extend( + [d['filename'] for d in + self._datasets['prediction_input_error'].get(pred_name, [])] + ) + if prediction_reference: + ancestors.extend( + [d['filename'] for d in + self._datasets['prediction_reference'].get(pred_name, [])] + ) + + return ancestors + + def get_data_frame(self, data_type, impute_nans=False): + """Return data frame of specified type. + + Parameters + ---------- + data_type : str + Data type to be returned. Must be one of ``'all'``, ``'train'`` or + ``'test'``. + impute_nans : bool, optional (default: False) + Impute nans if desired. + + Returns + ------- + pandas.DataFrame + Desired data. + + Raises + ------ + TypeError + ``data_type`` is invalid or data does not exist (e.g. test data is + not set). + + """ + allowed_types = ('all', 'train', 'test') + if data_type not in allowed_types: + raise TypeError( + f"'{data_type}' is not an allowed type, specify one of " + f"'{allowed_types}'") + if data_type not in self.data: + raise TypeError(f"No '{data_type}' data available") + data_frame = self.data[data_type] + if impute_nans: + data_frame = self._impute_nans(data_frame) + return data_frame + + def get_x_array(self, data_type, impute_nans=False): + """Return x data of specific type. + + Parameters + ---------- + data_type : str + Data type to be returned. Must be one of ``'all'``, ``'train'`` or + ``'test'``. + impute_nans : bool, optional (default: False) + Impute nans if desired. + + Returns + ------- + numpy.ndarray + Desired data. + + Raises + ------ + TypeError + ``data_type`` is invalid or data does not exist (e.g. test data is + not set). + + """ + data_frame = self.get_data_frame(data_type, impute_nans=impute_nans) + return data_frame.x.values + + def get_y_array(self, data_type, impute_nans=False): + """Return y data of specific type. + + Parameters + ---------- + data_type : str + Data type to be returned. Must be one of ``'all'``, ``'train'`` or + ``'test'``. + impute_nans : bool, optional (default: False) + Impute nans if desired. + + Returns + ------- + numpy.ndarray + Desired data. + + Raises + ------ + TypeError + ``data_type`` is invalid or data does not exist (e.g. test data is + not set). + + """ + data_frame = self.get_data_frame(data_type, impute_nans=impute_nans) + return data_frame.y.squeeze().values + + def grid_search_cv(self, param_grid, **kwargs): + """Perform exhaustive parameter search using cross-validation. + + Parameters + ---------- + param_grid : dict or list of dict + Parameter names (keys) and ranges (values) for the search. Have to + be given for each step of the pipeline separated by two + underscores, i.e. ``s__p`` is the parameter ``p`` for step ``s``. + **kwargs : keyword arguments, optional + Additional options for + :class:`sklearn.model_selection.GridSearchCV`. + + Raises + ------ + ValueError + Final regressor does not supply the attributes ``best_estimator_`` + or ``best_params_``. + + """ + logger.info( + "Performing exhaustive grid search cross-validation with final " + "regressor %s and parameter grid %s on %i training points", + self._CLF_TYPE, param_grid, len(self.data['train'].index)) + + # Get keyword arguments + (cv_kwargs, fit_kwargs) = self._get_cv_estimator_kwargs(GridSearchCV, + **kwargs) + + # Create and fit GridSearchCV instance + clf = GridSearchCV(self._clf, param_grid, **cv_kwargs) + clf.fit(self.data['train'].x, self.data['train'].y, **fit_kwargs) + + # Try to find best estimator + if hasattr(clf, 'best_estimator_'): + self._clf = clf.best_estimator_ + elif hasattr(clf, 'best_params_'): + self.update_parameters(**clf.best_params_) + self._clf.fit(self.data['train'].x, self.data['train'].y, + **fit_kwargs) + else: + raise ValueError( + "GridSearchCV not successful, cannot determine best estimator " + "(neither using 'best_estimator_' nor 'best_params_'), " + "adapt keyword arguments accordingly (see " + "https://scikit-learn.org/stable/modules/generated/" + "sklearn.model_selection.GridSearchCV.html for more help)") + self._parameters = self._get_clf_parameters() + logger.info( + "Exhaustive grid search successful, found best parameter(s) %s", + clf.best_params_) + logger.debug("CV results:") + logger.debug(pformat(clf.cv_results_)) + logger.info("Successfully fitted MLR model on %i training point(s)", + len(self.data['train'].index)) + logger.debug("Pipeline steps:") + logger.debug(pformat(list(self._clf.named_steps.keys()))) + logger.debug("Parameters:") + logger.debug(pformat(self.parameters)) + + # LIME + self._load_lime_explainer() + + def plot_1d_model(self, filename=None, n_points=1000): + """Plot lineplot that represents the MLR model. + + Note + ---- + This only works for a model with a single feature. + + Parameters + ---------- + filename : str, optional (default: '1d_mlr_model') + Name of the plot file. + n_points : int, optional (default: 1000) + Number of sampled points for the single feature (using linear + spacing between minimum and maximum value). + + Raises + ------ + sklearn.exceptions.NotFittedError + MLR model is not fitted. + ValueError + MLR model is built from more than 1 feature. + + """ + if not self._is_ready_for_plotting(): + return + n_features = self.features.size + if n_features > 1: + raise ValueError( + f"Plotting lineplot of MLR model using 'plot_1d_model' is not " + f"possible, MLR model {self._cfg['mlr_model_name']} contains " + f"more than one feature ({n_features:d} features: " + f"{self.features})") + feature = self.features[0] + logger.info("Plotting 1D MLR model (sampling %i points for single " + "feature '%s')", n_points, feature) + if filename is None: + filename = '1d_mlr_model' + (_, axes) = plt.subplots() + + # Get available datasets + data_to_plot = ['train'] + if 'test' in self.data: + data_to_plot.append('test') + + # Plot training and test data (if available) + for data_type in data_to_plot: + x_data = self.data[data_type].x[feature].values + y_data = self.get_y_array(data_type) + axes.scatter( + x_data, y_data, + **self._get_plot_kwargs(data_type, plot_type='scatter')) + + # Plot MLR model + x_lin = pd.DataFrame.from_dict( + {feature: np.linspace(self.data['all'].x[feature].values.min(), + self.data['all'].x[feature].values.max(), + n_points)} + ) + y_pred = self._clf.predict(x_lin) + x_lin_1d = x_lin.values[:, 0] + axes.plot(x_lin_1d, y_pred, color='k', linewidth=2, + label=self._cfg['mlr_model_name']) + + # Plot appearance + title = (f"Predicted {self.label} by MLR model " + f"{self._cfg['mlr_model_name']}") + axes.set_title(title) + axes.set_xlabel(self._get_plot_feature(feature)) + axes.set_ylabel(self._get_plot_label()) + axes.legend(loc='best') + + # Save plot + plot_path = os.path.join( + self._cfg['mlr_plot_dir'], + filename + '.' + self._cfg['output_file_type'], + ) + plt.savefig(plot_path, **self._cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save provenance + cube = mlr.get_1d_cube( + x_lin_1d, + y_pred, + x_kwargs={'var_name': feature, + 'long_name': feature, + 'units': self.features_units[feature]}, + y_kwargs={'var_name': self.label, + 'long_name': title, + 'units': self.label_units, + 'attributes': {'project': '', 'dataset': ''}}, + ) + self._write_plot_provenance( + cube, plot_path, ancestors=self.get_ancestors(prediction_names=[]), + caption=title + '.', plot_types=['line']) + + def plot_partial_dependences(self, filename=None): + """Plot partial dependences for every feature. + + Parameters + ---------- + filename : str, optional (default: 'partial_dependece_{feature}') + Name of the plot file. + + Raises + ------ + sklearn.exceptions.NotFittedError + MLR model is not fitted. + + """ + if not self._is_ready_for_plotting(): + return + logger.info("Plotting partial dependences") + if filename is None: + filename = 'partial_dependece_{feature}' + + # Plot for every feature + # Note: Ignore warnings about missing feature names here because they + # are not used. + x_train = self.get_x_array('train', impute_nans=True) + verbosity = self._get_verbosity_parameters( + PartialDependenceDisplay.from_estimator + ) + for feature_name in self.features: + logger.debug("Plotting partial dependence of '%s'", feature_name) + with warnings.catch_warnings(): + warnings.filterwarnings( + 'ignore', + message=('X does not have valid feature names, but ' + 'SimpleImputer was fitted with feature names'), + category=UserWarning, + module='sklearn', + ) + display = PartialDependenceDisplay.from_estimator( + self._clf, + x_train, + features=[feature_name], + feature_names=self.features, + method='brute', + line_kw={'color': 'b'}, + random_state=self.random_state, + **verbosity, + ) + title = (f"Partial dependence of {self.label} on {feature_name} " + f"for MLR model {self._cfg['mlr_model_name']}") + plt.title(title) + plt.xlabel(self._get_plot_feature(feature_name)) + plt.ylabel(self._get_plot_label()) + + # Save plot + new_filename = (filename.format(feature=feature_name) + '.' + + self._cfg['output_file_type']) + plot_path = os.path.join(self._cfg['mlr_plot_dir'], new_filename) + plt.savefig(plot_path, **self._cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save provenance + cube = mlr.get_1d_cube( + display.lines_[0, 0].get_xdata(), + display.lines_[0, 0].get_ydata(), + x_kwargs={'var_name': feature_name, + 'long_name': feature_name, + 'units': self.features_units[feature_name]}, + y_kwargs={'var_name': self.label, + 'long_name': self.label, + 'units': self.label_units, + 'attributes': {'project': '', 'dataset': ''}}, + ) + self._write_plot_provenance( + cube, plot_path, + ancestors=self.get_ancestors(prediction_names=[]), + caption=title + '.', plot_types=['line']) + + def plot_prediction_errors(self, filename=None): + """Plot predicted vs. true values. + + Parameters + ---------- + filename : str, optional (default: 'prediction_errors') + Name of the plot file. + + Raises + ------ + sklearn.exceptions.NotFittedError + MLR model is not fitted. + + """ + if not self._is_ready_for_plotting(): + return + logger.info("Plotting prediction errors") + if filename is None: + filename = 'prediction_errors' + (_, axes) = plt.subplots() + + # Get available datasets + data_to_plot = ['train'] + if 'test' in self.data: + data_to_plot.append('test') + + # Create plot + y_pred_all = [] + y_true_all = [] + data_types = [] + for data_type in data_to_plot: + logger.debug("Plotting prediction error of '%s' data", data_type) + x_data = self.data[data_type].x + y_pred = self._clf.predict(x_data) + y_true = self.get_y_array(data_type) + axes.scatter( + y_pred, y_true, + **self._get_plot_kwargs(data_type, plot_type='scatter')) + + # Collect data + y_pred_all.append(y_pred) + y_true_all.append(y_true) + data_types.append(np.full(y_pred.shape, data_type)) + + # Plot appearance + lims = [ + np.min([axes.get_xlim(), axes.get_ylim()]), + np.max([axes.get_xlim(), axes.get_ylim()]), + ] + axes.plot(lims, lims, linestyle='--', color='k', alpha=0.75) + axes.set_aspect('equal') + axes.set_xlim(lims) + axes.set_ylim(lims) + title = (f"Prediction errors of {self.label} " + f"({self._cfg['mlr_model_name']})") + axes.set_title(title) + axes.set_xlabel(f'Predicted {self._get_plot_label()}') + axes.set_ylabel(f'True {self._get_plot_label()}') + axes.legend(loc='upper left') + + # Save plot + plot_path = os.path.join( + self._cfg['mlr_plot_dir'], + filename + '.' + self._cfg['output_file_type'], + ) + plt.savefig(plot_path, **self._cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save provenance + cube = mlr.get_1d_cube( + np.concatenate(y_pred_all), + np.concatenate(y_true_all), + x_kwargs={'var_name': self.label, + 'long_name': f'Predicted {self.label}', + 'units': self.label_units}, + y_kwargs={'var_name': self.label, + 'long_name': f'True {self.label}', + 'units': self.label_units, + 'attributes': {'project': '', 'dataset': ''}}, + ) + cube.add_aux_coord( + self._get_data_type_coord(np.concatenate(data_types)), 0) + self._write_plot_provenance( + cube, plot_path, ancestors=self.get_ancestors(prediction_names=[]), + caption=title + '.', plot_types=['scatter']) + + def plot_residuals(self, filename=None): + """Plot residuals of training and test (if available) data. + + Parameters + ---------- + filename : str, optional (default: 'residuals') + Name of the plot file. + + Raises + ------ + sklearn.exceptions.NotFittedError + MLR model is not fitted. + + """ + if not self._is_ready_for_plotting(): + return + logger.info("Plotting residuals") + if filename is None: + filename = 'residuals' + (_, axes) = plt.subplots() + + # Get available datasets + data_to_plot = ['train'] + if 'test' in self.data: + data_to_plot.append('test') + + # Create plot + y_pred_all = [] + y_res_all = [] + data_types = [] + for data_type in data_to_plot: + logger.debug("Plotting residuals of '%s' data", data_type) + x_data = self.data[data_type].x + y_pred = self._clf.predict(x_data) + y_true = self.get_y_array(data_type) + y_res = self._get_residuals(y_true, y_pred) + axes.scatter( + y_pred, y_res, + **self._get_plot_kwargs(data_type, plot_type='scatter')) + + # Collect data + y_pred_all.append(y_pred) + y_res_all.append(y_res) + data_types.append(np.full(y_pred.shape, data_type)) + + # Plot appearance + axes.axhline(0.0, linestyle='--', color='k', alpha=0.75) + axes.set_aspect('equal') + title = (f"Residuals of {self.label} ({self._cfg['mlr_model_name']})") + axes.set_title(title) + axes.set_xlabel(f'Predicted {self._get_plot_label()}') + axes.set_ylabel(f'Residuals of {self._get_plot_label()}') + self._set_axis_lim_symmetric(axes, 'y') + axes.legend(loc='best') + + # Save plot + plot_path = os.path.join( + self._cfg['mlr_plot_dir'], + filename + '.' + self._cfg['output_file_type'], + ) + plt.savefig(plot_path, **self._cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save provenance + cube = mlr.get_1d_cube( + np.concatenate(y_pred_all), + np.concatenate(y_res_all), + x_kwargs={'var_name': self.label, + 'long_name': f'Predicted {self.label}', + 'units': self.label_units}, + y_kwargs={'var_name': self.label, + 'long_name': f'Residuals of {self.label}', + 'units': self.label_units, + 'attributes': {'project': '', 'dataset': ''}}, + ) + cube.add_aux_coord( + self._get_data_type_coord(np.concatenate(data_types)), 0) + self._write_plot_provenance( + cube, plot_path, ancestors=self.get_ancestors(prediction_names=[]), + caption=title + '.', plot_types=['scatter']) + + def plot_residuals_histogram(self, filename=None): + """Plot histogram of residuals of training and test data. + + Parameters + ---------- + filename : str, optional (default: 'residuals_histogram') + Name of the plot file. + + Raises + ------ + sklearn.exceptions.NotFittedError + MLR model is not fitted. + + """ + if not self._is_ready_for_plotting(): + return + logger.info("Plotting residuals histogram") + if filename is None: + filename = 'residuals_histogram' + (_, axes) = plt.subplots() + + # Get available datasets + data_to_plot = ['train'] + if 'test' in self.data: + data_to_plot.append('test') + + # Create plot (centralize bins around the zero) + y_res_all = [] + freq_all = [] + data_types = [] + for data_type in data_to_plot: + logger.debug("Plotting residuals histogram of '%s' data", + data_type) + x_data = self.data[data_type].x + y_pred = self._clf.predict(x_data) + y_true = self.get_y_array(data_type) + y_res = self._get_residuals(y_true, y_pred) + bins = self._get_centralized_bins(y_res, n_bins=20) + hist = axes.hist(y_res, bins=bins, + **self._get_plot_kwargs(data_type)) + + # Collect data + y_res_all.append(np.convolve(hist[1], (1, 1), 'valid') / 2.0) + freq_all.append(hist[0]) + data_types.append(np.full(hist[0].shape, data_type)) + + # Plot appearance + axes.axvline(0.0, linestyle='--', color='k', alpha=0.75) + title = (f"Histogram for residuals of {self.label} " + f"({self._cfg['mlr_model_name']})") + axes.set_title(title) + axes.set_xlabel(f'Residuals of {self._get_plot_label()}') + axes.set_ylabel('Frequency') + self._set_axis_lim_symmetric(axes, 'x') + axes.legend(loc='best') + + # Save plot + plot_path = os.path.join( + self._cfg['mlr_plot_dir'], + filename + '.' + self._cfg['output_file_type'], + ) + plt.savefig(plot_path, **self._cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save provenance + cube = mlr.get_1d_cube( + np.concatenate(y_res_all), + np.concatenate(freq_all), + x_kwargs={'var_name': self.label, + 'long_name': f'Residuals of {self.label}', + 'units': self.label_units}, + y_kwargs={'var_name': 'frequency', + 'long_name': 'Frequency', + 'units': '1', + 'attributes': {'project': '', 'dataset': ''}}, + ) + cube.add_aux_coord( + self._get_data_type_coord(np.concatenate(data_types)), 0) + self._write_plot_provenance( + cube, plot_path, ancestors=self.get_ancestors(prediction_names=[]), + caption=title + '.', plot_types=['histogram']) + + def plot_residuals_distribution(self, filename=None): + """Plot distribution of residuals of training and test data (KDE). + + Parameters + ---------- + filename : str, optional (default: 'residuals_distribution') + Name of the plot file. + + Raises + ------ + sklearn.exceptions.NotFittedError + MLR model is not fitted. + + """ + if not self._is_ready_for_plotting(): + return + logger.info("Plotting residuals distribution") + if filename is None: + filename = 'residuals_distribution' + + # Get available datasets + data_to_plot = ['train'] + if 'test' in self.data: + data_to_plot.append('test') + + # Create plot (centralize bins around the zero) + data_types = [] + for data_type in data_to_plot: + logger.debug("Plotting residuals distribution of '%s' data", + data_type) + x_data = self.data[data_type].x + y_pred = self._clf.predict(x_data) + y_true = self.get_y_array(data_type) + y_res = self._get_residuals(y_true, y_pred) + axes = sns.kdeplot(y_res, **self._get_plot_kwargs(data_type)) + + # Collect data + data_types.append(np.full(axes.lines[-1].get_xdata().shape, + data_type)) + + # Plot appearance + axes.axvline(0.0, linestyle='--', color='k', alpha=0.75) + title = (f"Probability distribution of residuals of {self.label} " + f"({self._cfg['mlr_model_name']})") + axes.set_title(title) + axes.set_xlabel(f'Residuals of {self._get_plot_label()}') + axes.set_ylabel('Probability density') + self._set_axis_lim_symmetric(axes, 'x') + axes.legend(loc='best') + + # Save plot + plot_path = os.path.join( + self._cfg['mlr_plot_dir'], + filename + '.' + self._cfg['output_file_type'], + ) + plt.savefig(plot_path, **self._cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save provenance + cube = mlr.get_1d_cube( + np.concatenate([line.get_xdata() for line in axes.lines[:-1]]), + np.concatenate([line.get_ydata() for line in axes.lines[:-1]]), + x_kwargs={'var_name': self.label, + 'long_name': f'Residuals of {self.label}', + 'units': self.label_units}, + y_kwargs={'var_name': 'probability_density', + 'long_name': 'Probability Density', + 'units': '1', + 'attributes': {'project': '', 'dataset': ''}}, + ) + cube.add_aux_coord( + self._get_data_type_coord(np.concatenate(data_types)), 0) + self._write_plot_provenance( + cube, plot_path, ancestors=self.get_ancestors(prediction_names=[]), + caption=title + '.', plot_types=['probability']) + + def plot_scatterplots(self, filename=None): + """Plot scatterplots label vs. feature for every feature. + + Parameters + ---------- + filename : str, optional (default: 'scatterplot_{feature}') + Name of the plot file. + + Raises + ------ + sklearn.exceptions.NotFittedError + MLR model is not fitted. + + """ + if not self._is_ready_for_plotting(): + return + logger.info("Plotting scatterplots") + if filename is None: + filename = 'scatterplot_{feature}' + + # Plot scatterplot for every feature + for feature in self.features: + logger.debug("Plotting scatterplot of '%s'", feature) + (_, axes) = plt.subplots() + + # Iterate over group attributes + for group_attr in self.group_attributes: + group_attr = self._group_attr_to_pandas_index_str(group_attr) + axes.plot(self.data['all'].x.loc[group_attr, feature], + self.data['all'].y.loc[group_attr, self.label], + '.', label=group_attr) + + # Plot appearance + axes.legend(loc='center left', ncol=2, bbox_to_anchor=[1.05, 0.5], + borderaxespad=0.0) + title = f"Target variable {self.label} vs. feature {feature}" + axes.set_title(title) + axes.set_xlabel(self._get_plot_feature(feature)) + axes.set_ylabel(self._get_plot_label()) + + # Save plot + plot_path = os.path.join( + self._cfg['mlr_plot_dir'], + filename.format(feature=feature) + '.' + + self._cfg['output_file_type']) + plt.savefig(plot_path, **self._cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save provenance + cube = mlr.get_1d_cube( + self.data['all'].x.loc[:, feature].values, + self.get_y_array('all'), + x_kwargs={'var_name': feature, + 'long_name': feature, + 'units': self.features_units[feature]}, + y_kwargs={'var_name': self.label, + 'long_name': self.label, + 'units': self.label_units, + 'attributes': {'project': '', 'dataset': ''}}, + ) + ancestors = self.get_ancestors(features=[feature], + prediction_names=[]) + self._write_plot_provenance( + cube, plot_path, ancestors=ancestors, caption=title + '.', + plot_types=['scatter']) + + def predict(self, + save_mlr_model_error=None, + save_lime_importance=False, + save_propagated_errors=False, + **kwargs): + """Perform prediction using the MLR model(s) and write ``*.nc`` files. + + Parameters + ---------- + save_mlr_model_error : str or int, optional + Additionally saves estimated squared MLR model error. This error + represents the uncertainty of the prediction caused by the MLR + model itself and not by errors in the prediction input data (errors + in that will be considered by including datasets with ``var_type`` + set to ``prediction_input_error`` and setting + ``save_propagated_errors`` to ``True``). If the option is set to + ``'test'``, the (constant) error is estimated as RMSEP using a + (hold-out) test data set. Only possible if test data is available, + i.e. the option ``test_size`` is not set to ``False`` during class + initialization. If the option is set to ``'logo'``, the (constant) + error is estimated as RMSEP using leave-one-group-out + cross-validation using the group_attributes. Only possible if + ``group_datasets_by_attributes`` is given. If the option is set to + an integer ``n`` (!= 0), the (constant) error is estimated as RMSEP + using n-fold cross-validation. + save_lime_importance : bool, optional (default: False) + Additionally saves local feature importance given by LIME (Local + Interpretable Model-agnostic Explanations). + save_propagated_errors : bool, optional (default: False) + Additionally saves propagated errors from + ``prediction_input_error`` datasets. Only possible when these are + available. + **kwargs : keyword arguments, optional + Additional options for the final regressors ``predict()`` function. + + Raises + ------ + RuntimeError + ``return_var`` and ``return_cov`` are both set to ``True``. + sklearn.exceptions.NotFittedError + MLR model is not fitted. + ValueError + An invalid value for ``save_mlr_model_error`` is given. + ValueError + ``save_propagated_errors`` is ``True`` and no + ``prediction_input_error`` data is available. + + """ + self._check_fit_status('Prediction') + logger.info("Started prediction") + mlr.check_predict_kwargs(kwargs) + if kwargs: + logger.info( + "Using additional keyword argument(s) %s for predict() " + "function", kwargs) + + # Iterate over different predictions + for pred_name in self._datasets['prediction_input']: + logger.info("Predicting '%s'", self._get_name(pred_name)) + + # Prediction + (x_pred, x_err, y_ref, + x_cube) = self._extract_prediction_input(pred_name) + pred_dict = self._get_prediction_dict( + pred_name, x_pred, x_err, y_ref, + get_mlr_model_error=save_mlr_model_error, + get_lime_importance=save_lime_importance, + get_propagated_errors=save_propagated_errors, **kwargs) + + # Save data in class member + y_pred = pd.DataFrame(pred_dict[None], + columns=[self.label], + index=x_pred.index, + dtype=self._cfg['dtype']) + self._data['pred'][pred_name] = pd.concat([x_pred, y_pred], + axis=1, + keys=['x', 'y']) + + # Save prediction cubes + self._save_prediction_cubes(pred_dict, pred_name, x_cube) + + def print_correlation_matrices(self): + """Print correlation matrices for all datasets.""" + self._check_fit_status('Printing correlation matrices') + for data_type in ('all', 'train', 'test'): + if data_type not in self.data: + continue + logger.info("Correlation matrix for %s data:\n%s", data_type, + self.data[data_type][['x', 'y']].corr()) + + def print_regression_metrics(self, logo=False): + """Print all available regression metrics for training data. + + Parameters + ---------- + logo : bool, optional (default: False) + Print regression metrics using + :class:`sklearn.model_selection.LeaveOneGroupOut` cross-validation. + Only possible when `group_datasets_by_attributes` was given during + class initialization. + + """ + self._check_fit_status('Printing regression metrics') + regression_metrics = [ + 'explained_variance_score', + 'mean_absolute_error', + 'mean_squared_error', + 'r2_score', + ] + + # Metrics on train and test data + for data_type in ('all', 'train', 'test'): + self._print_metrics(regression_metrics, data_type) + logger.info("") + + # Metrics on CV data + if logo: + logger.info( + "Evaluating regression metrics using 'LeaveOneGroupOut' " + "cross-validation using group attributes %s on training data", + self._cfg['group_datasets_by_attributes']) + regression_metrics = { + 'explained_variance_score': 'explained_variance', + 'mean_absolute_error': 'neg_mean_absolute_error', + 'root_mean_squared_error': 'neg_root_mean_squared_error', + 'r2_score': 'r2', + } + x_data = self.data['train'].x + y_data = self.get_y_array('train') + sample_weights = self._get_sample_weights('train') + for (metric, scoring) in regression_metrics.items(): + value = cross_val_score_weighted( + self._clf, x_data, y_data, scoring=scoring, + n_jobs=self._cfg['n_jobs'], fit_params=self.fit_kwargs, + **self._get_verbosity_parameters(cross_val_score_weighted), + **self._get_logo_cv_kwargs()) + value = np.mean(value) + if 'neg_' in scoring: + value = -value + logger.info("%s: %s", metric, value) + if sample_weights is None: + return + for (metric, scoring) in regression_metrics.items(): + value = cross_val_score_weighted( + self._clf, x_data, y_data, scoring=scoring, + n_jobs=self._cfg['n_jobs'], fit_params=self.fit_kwargs, + sample_weights=sample_weights, + **self._get_verbosity_parameters(cross_val_score_weighted), + **self._get_logo_cv_kwargs()) + value = np.mean(value) + if 'neg_' in scoring: + value = -value + logger.info("Weighted %s: %s", metric, value) + + def reset_pipeline(self): + """Reset regressor pipeline.""" + steps = [] + numerical_features_idx = [ + int(np.where(self.features == tag)[0][0]) + for tag in self.numerical_features + ] + + # Imputer + if self._cfg['imputation_strategy'] != 'remove': + imputer = SimpleImputer(strategy=self._cfg['imputation_strategy']) + steps.append(('imputer', imputer)) + + # Scaler for numerical features + if self._cfg['standardize_data']: + x_scaler = ColumnTransformer( + [('', StandardScaler(), numerical_features_idx)], + remainder='passthrough', + ) + steps.append(('x_scaler', x_scaler)) + + # PCA for numerical features + if self._cfg.get('pca'): + pca = ColumnTransformer( + [('', PCA(random_state=self.random_state), + numerical_features_idx)], + remainder='passthrough', + ) + steps.append(('pca', pca)) + + # Final regressor + final_parameters = self._load_final_parameters() + final_regressor = self._CLF_TYPE(**final_parameters) + + # Transformer for labels if desired (if not, add pd to np converter) + if self._cfg['standardize_data']: + y_scaler = StandardScaler() + else: + y_scaler = StandardScaler(with_mean=False, with_std=False) + transformed_target_regressor = AdvancedTransformedTargetRegressor( + transformer=y_scaler, regressor=final_regressor) + steps.append(('final', transformed_target_regressor)) + + # Final pipeline + if self._cfg['cache_intermediate_results']: + if self._cfg['n_jobs'] is None or self._cfg['n_jobs'] == 1: + memory = self._cfg['mlr_work_dir'] + else: + logger.debug( + "Caching intermediate results of Pipeline is not " + "supported for multiple processes (using at most %i " + "processes)", self._cfg['n_jobs']) + memory = None + else: + memory = None + self._clf = AdvancedPipeline(steps, memory=memory) + logger.info("Created pipeline with steps %s", + list(self._clf.named_steps.keys())) + + def rfecv(self, **kwargs): + """Perform recursive feature elimination using cross-validation. + + Note + ---- + This only works for final estimators that provide information about + feature importance either through a ``coef_`` attribute or through a + ``feature_importances_`` attribute. + + Parameters + ---------- + **kwargs : keyword arguments, optional + Additional options for :class:`sklearn.feature_selection.RFECV`. + + Raises + ------ + RuntimeError + Final estimator does not provide ``coef_`` or + ``feature_importances_`` attribute. + + """ + logger.info( + "Performing recursive feature elimination using cross-validation " + "with final regressor %s on %i training points", self._CLF_TYPE, + len(self.data['train'].index)) + + # Get keyword arguments + (cv_kwargs, fit_kwargs) = self._get_cv_estimator_kwargs(AdvancedRFECV, + **kwargs) + fit_kwargs = deepcopy(fit_kwargs) + keys_to_remove = [] + for key in fit_kwargs: + if key.endswith('eval_set'): + keys_to_remove.append(key) + for key in keys_to_remove: + logger.warning( + "Fit parameter '%s' is not supported for rfecv()", key) + fit_kwargs.pop(key) + + # Create and fit AdvancedRFECV instance + rfecv = AdvancedRFECV(self._clf, **cv_kwargs) + rfecv.fit(self.data['train'].x, self.get_y_array('train'), + **fit_kwargs) + + # Add feature selection step to pipeline + self._clf = rfecv.estimator_ + transformer = get_rfecv_transformer(rfecv) + self._clf.steps.insert(0, ('feature_selection', transformer)) + + # Log results + new_features = self.features[rfecv.support_] + logger.info( + "Recursive feature elimination was successful, %i of the %i " + "features remain", new_features.size, self.features.size) + logger.info("Old features: %s", self.features) + logger.info("New features: %s", new_features) + logger.info("Successfully fitted MLR model on %i training point(s)", + len(self.data['train'].index)) + logger.debug("Pipeline steps:") + logger.debug(pformat(list(self._clf.named_steps.keys()))) + logger.debug("Parameters:") + logger.debug(pformat(self.parameters)) + + # LIME + self._load_lime_explainer() + + def test_normality_of_residuals(self): + """Perform Shapiro-Wilk test to normality of residuals. + + Raises + ------ + sklearn.exceptions.NotFittedError + MLR model is not fitted. + + """ + if not self._is_ready_for_plotting(): + return + + # Get available datasets + data_to_check = ['train'] + if 'test' in self.data: + data_to_check.append('test') + + # Perform Shapiro-Wilk test + for data_type in data_to_check: + x_data = self.data[data_type].x + y_pred = self._clf.predict(x_data) + y_true = self.get_y_array(data_type) + y_res = self._get_residuals(y_true, y_pred) + (w_value, p_value) = shapiro(y_res) + logger.info( + "Result of Shapiro-Wilk test for normality of residuals: W = " + "%.5f, p = %.5f", w_value, p_value) + + def update_parameters(self, **params): + """Update parameters of the whole pipeline. + + Note + ---- + Parameter names have to be given for each step of the pipeline + separated by two underscores, i.e. ``s__p`` is the parameter ``p`` for + step ``s``. + + Parameters + ---------- + **params : keyword arguments, optional + Parameters for the pipeline which should be updated. + + Raises + ------ + ValueError + Invalid parameter for pipeline given. + + """ + allowed_params = self._get_clf_parameters() + new_params = {} + for (key, val) in params.items(): + if key in allowed_params: + new_params[key] = val + else: + raise ValueError( + f"'{key}' is not a valid parameter for the pipeline") + self._clf.set_params(**new_params) + self._parameters = self._get_clf_parameters() + if new_params: + logger.info("Updated pipeline with parameters %s", new_params) + + def _calculate_sample_weights(self, cube, var_type, group_attr=None): + """Calculate sample weights if desired.""" + if not self._cfg['weighted_samples']: + return None + if var_type != 'feature': + return None + weights = mlr.get_all_weights(cube, **self._cfg['weighted_samples']) + weights = weights.astype(self._cfg['dtype'], casting='same_kind') + weights = pd.DataFrame( + {'sample_weight': weights.ravel()}, + index=self._get_multiindex(cube, group_attr=group_attr), + dtype=self._cfg['dtype'], + ) + msg = '' if group_attr is None else f" of '{group_attr}'" + logger.debug( + "Successfully calculated %i sample weights for training data%s " + "using %s", len(weights.index), msg, self._cfg['weighted_samples']) + return weights + + def _check_clf(self): + """Check if valid regressor type is given.""" + class_name = self.__class__.__name__ + if self._CLF_TYPE is None: + raise NotImplementedError( + f"No MLR model type specified, please use the factory " + f"function 'esmvaltool.diag_scripts.mlr.models.{class_name}." + f"create()' to initialize this class") + + def _check_cube_dimensions(self, cube, ref_cube, text=None): + """Check shape and coordinates of a given cube.""" + msg = '' if text is None else f' for {text}' + if self._cfg.get('accept_only_scalar_data'): + allowed_shapes = [(), (1, )] + if cube.shape not in allowed_shapes: + raise ValueError( + f"Expected only cubes with shapes {allowed_shapes} when " + f"option 'accept_only_scalar_data' is set to 'True', got " + f"{cube.shape}{msg}") + else: + if ref_cube is None: + return + if cube.shape != ref_cube.shape: + raise ValueError( + f"Expected cubes with shapes {ref_cube.shape}{msg}, got " + f"{cube.shape}. Consider regridding, pre-selecting data " + f"at class initialization (argument 'input_datasets') or " + f"the options 'broadcast_from' or 'group_datasets_by_" + f"attributes'") + cube_coords = cube.coords(dim_coords=True) + ref_coords = ref_cube.coords(dim_coords=True) + cube_coords_str = [ + f'{coord.name()}, shape {coord.shape}' for coord in cube_coords + ] + ref_coords_str = [ + f'{coord.name()}, shape {coord.shape}' for coord in ref_coords + ] + if cube_coords_str != ref_coords_str: + logger.warning( + "Cube coordinates differ, expected %s%s, got %s. Check " + "input cubes", ref_coords_str, msg, cube_coords_str) + return + for (idx, cube_coord) in enumerate(cube_coords): + ref_coord = ref_coords[idx] + if not np.allclose(cube_coord.points, ref_coord.points): + logger.warning( + "'%s' coordinate for different cubes does not " + "match, got %s%s, expected %s (values differ by " + "more than allowed tolerance, check input cubes)", + cube_coord.name(), cube_coord.points, msg, + ref_coord.points) + + def _check_dataset(self, datasets, var_type, tag, text=None): + """Check if datasets exist and are valid.""" + datasets = select_metadata(datasets, tag=tag, var_type=var_type) + msg = '' if text is None else text + if not datasets: + if var_type == 'prediction_input_error': + return None + if var_type == 'prediction_reference': + return None + if var_type == 'label': + raise ValueError(f"Label '{tag}'{msg} not found") + if not self._cfg.get('allow_missing_features'): + raise ValueError( + f"{var_type} '{tag}'{msg} not found, use 'allow_missing_" + f"features' to ignore this") + logger.info( + "Ignored missing %s '%s'%s since 'allow_missing_features' is " + "set to 'True'", var_type, tag, msg) + return None + if len(datasets) > 1: + raise ValueError( + f"{var_type} '{tag}'{msg} not unique, consider adapting the " + f"argument 'input_datasets' at class initialization to " + f"pre-select datasets or specify suitable attributes to group " + f"datasets with the option 'group_datasets_by_attributes'") + if var_type in ('label', 'prediction_reference'): + units = self.label_units + else: + units = self.features_units[tag] + if units != Unit(datasets[0]['units']): + raise ValueError( + f"Expected units '{units}' for {var_type} '{tag}'{msg}, got " + f"'{datasets[0]['units']}'") + return datasets[0] + + def _check_fit_status(self, text): + """Check if MLR model is fitted and raise exception otherwise.""" + x_dummy = pd.DataFrame( + np.ones((1, self.features.size), dtype=self._cfg['dtype']), + columns=self.features, + ) + try: + self._clf.predict(x_dummy) + except NotFittedError as exc: + raise NotFittedError( + f"{text} not possible, MLR model {self._CLF_TYPE} is not " + f"fitted yet, call fit(), grid_search_cv() or rfecv() " + f"first") from exc + + def _estimate_mlr_model_error(self, target_length, strategy): + """Estimate squared error of MLR model (using CV or test data).""" + logger.info( + "Estimating squared error of MLR model using strategy '%s'", + strategy) + + # Estimate MLR model error + if strategy == 'test': + if 'test' not in self.data: + raise ValueError( + f"'save_mlr_model_error' using strategy 'test' is not " + f"possible because no test data is available ('test_size' " + f"was set to '{self._cfg['test_size']}' during class " + f"initialization)") + y_pred = self._clf.predict(self.data['test'].x) + error = metrics.mean_squared_error( + self.get_y_array('test'), + y_pred, + sample_weight=self._get_sample_weights('test'), + ) + else: + if strategy == 'logo': + cv_kwargs = self._get_logo_cv_kwargs() + elif isinstance(strategy, int): + cv_kwargs = {'cv': strategy} + else: + raise ValueError( + f"Expected 'test', 'logo' or an integer as strategy for " + f"estimating MLR model error (argument " + f"'save_mlr_model_error'), got '{strategy}'") + x_data = self.data['train'].x + y_data = self.get_y_array('train') + error = cross_val_score_weighted( + self._clf, x_data, y_data, scoring='neg_mean_squared_error', + n_jobs=self._cfg['n_jobs'], fit_params=self.fit_kwargs, + sample_weights=self._get_sample_weights('train'), + **self._get_verbosity_parameters(cross_val_score_weighted), + **cv_kwargs) + error = -np.mean(error) + + # Reshape error + error_array = np.full(target_length, error, dtype=self._cfg['dtype']) + units = mlr.units_power(self.label_units, 2) + logger.info( + "Estimated squared MLR model error by %s %s using strategy '%s'", + error, units, strategy) + return error_array + + def _extract_features_and_labels(self): + """Extract feature and label data points from training data.""" + (x_data, _, + sample_weights) = self._extract_x_data(self._datasets['feature'], + 'feature') + y_data = self._extract_y_data(self._datasets['label'], 'label') + + # Check number of input points + if not x_data.index.equals(y_data.index): + raise ValueError( + f"Got differing point(s) for features and labels (" + f"{len(x_data.index):d} feature points and " + f"{len(y_data.index):d} label points):\n" + f"{x_data.index.difference(y_data.index)}") + logger.info("Found %i raw input data point(s) with data type '%s'", + len(y_data.index), self._cfg['dtype']) + + # Remove missing values in labels + (x_data, y_data, + sample_weights) = self._remove_missing_labels(x_data, y_data, + sample_weights) + + # Remove missing values in features (if desired) + (x_data, y_data, sample_weights) = self._remove_missing_features( + x_data, y_data, sample_weights) + + return (x_data, y_data, sample_weights) + + def _extract_prediction_input(self, prediction_name): + """Extract prediction input data points for ``prediction_name``.""" + (x_pred, x_cube, _) = self._extract_x_data( + self._datasets['prediction_input'][prediction_name], + 'prediction_input') + logger.info( + "Found %i raw prediction input data point(s) with data type '%s'", + len(x_pred.index), self._cfg['dtype']) + + # Prediction reference + if prediction_name not in self._datasets['prediction_reference']: + y_ref = None + logger.debug( + "No prediction reference for prediction '%s' available", + self._get_name(prediction_name)) + else: + y_ref = self._extract_y_data( + self._datasets['prediction_reference'][prediction_name], + 'prediction_reference') + if y_ref is not None: + if not x_pred.index.equals(y_ref.index): + raise ValueError( + f"Got differing point(s) for prediction input and " + f"prediction output ({len(x_pred.index):d} " + f"prediction input points and {len(y_ref.index):d} " + f"prediction output points):\n" + f"{x_pred.index.difference(y_ref.index)}") + logger.info( + "Found %i raw prediction output data point(s) with data " + "type '%s'", len(y_ref.index), self._cfg['dtype']) + + # Error + if prediction_name not in self._datasets['prediction_input_error']: + x_err = None + logger.debug( + "Propagating prediction input errors for prediction '%s' not " + "possible, no 'prediction_input_error' datasets given", + self._get_name(prediction_name)) + else: + (x_err, _, _) = self._extract_x_data( + self._datasets['prediction_input_error'][prediction_name], + 'prediction_input_error') + if not x_pred.index.equals(x_err.index): + raise ValueError( + f"Got differing point(s) for prediction input and " + f"prediction input error ({len(x_pred.index):d} " + f"prediction input points and {len(x_err.index):d} " + f"prediction input error points):\n" + f"{x_pred.index.difference(x_err.index)}") + logger.info( + "Found %i raw prediction input error data point(s) with data " + "type '%s'", len(x_err.index), self._cfg['dtype']) + + # Remove missing values if necessary + (x_pred, x_err, y_ref, + mask) = self._remove_missing_pred_input(x_pred, x_err, y_ref) + + # Create cube with appropriate mask for output + mask = mask.reshape(x_cube.shape) + cube_data = np.empty(mask.shape, dtype=self._cfg['dtype']) + x_cube.data = np.ma.array(cube_data, mask=mask) + + return (x_pred, x_err, y_ref, x_cube) + + def _extract_x_data(self, datasets, var_type): + """Extract required x data of type ``var_type`` from ``datasets``.""" + allowed_types = ('feature', 'prediction_input', + 'prediction_input_error') + if var_type not in allowed_types: + raise ValueError( + f"Excepted one of '{allowed_types}' for 'var_type', got " + f"'{var_type}'") + x_data_for_groups = [] + x_cube = None + if self._cfg['weighted_samples'] and var_type == 'feature': + sample_weights_for_groups = [] + else: + sample_weights_for_groups = None + + # Iterate over datasets + datasets = select_metadata(datasets, var_type=var_type) + if var_type == 'feature': + groups = self.group_attributes + else: + groups = [None] + for group_attr in groups: + group_datasets = select_metadata(datasets, + group_attribute=group_attr) + if group_attr is not None: + logger.info("Loading '%s' data of '%s'", var_type, group_attr) + msg = '' if group_attr is None else f" for '{group_attr}'" + if not group_datasets: + raise ValueError(f"No '{var_type}' data{msg} found") + (group_data, x_cube, + weights) = self._get_x_data_for_group(group_datasets, var_type, + group_attr) + x_data_for_groups.append(group_data) + + # Append weights if desired + if sample_weights_for_groups is not None: + sample_weights_for_groups.append(weights) + + # Adapt sample_weights if necessary + if sample_weights_for_groups is not None: + sample_weights = pd.concat(sample_weights_for_groups) + sample_weights.index = pd.MultiIndex.from_tuples( + sample_weights.index, names=self._get_multiindex_names()) + logger.info( + "Successfully calculated sample weights for training data " + "using %s", self._cfg['weighted_samples']) + if (sample_weights.max().values[0] / + sample_weights.min().values[0]) > 150.0: + logger.warning( + "Sample weights differ by more than a factor of 150, got " + "a minimum value of %e and a maximum value of %e. This " + "might be caused by differing coordinates in the training " + "cubes", + sample_weights.min().values[0], + sample_weights.max().values[0]) + else: + sample_weights = None + + # Convert index back to MultiIndex + x_data = pd.concat(x_data_for_groups) + x_data.index = pd.MultiIndex.from_tuples( + x_data.index, names=self._get_multiindex_names()) + + return (x_data, x_cube, sample_weights) + + def _extract_y_data(self, datasets, var_type): + """Extract required y data of type ``var_type`` from ``datasets``.""" + allowed_types = ('label', 'prediction_reference') + if var_type not in allowed_types: + raise ValueError( + f"Excepted one of '{allowed_types}' for 'var_type', got " + f"'{var_type}'") + y_data_for_groups = [] + + # Iterate over datasets + datasets = select_metadata(datasets, var_type=var_type) + if var_type == 'label': + groups = self.group_attributes + else: + groups = [None] + for group_attr in groups: + if group_attr is not None: + logger.info("Loading '%s' data of '%s'", var_type, group_attr) + msg = '' if group_attr is None else f" for '{group_attr}'" + group_datasets = select_metadata(datasets, + group_attribute=group_attr) + dataset = self._check_dataset(group_datasets, var_type, self.label, + msg) + if dataset is None: + return None + cube = self._load_cube(dataset) + text = f"{var_type} '{self.label}'{msg}" + self._check_cube_dimensions(cube, None, text) + cube_data = pd.DataFrame( + self._get_cube_data(cube), + columns=[self.label], + index=self._get_multiindex(cube, group_attr=group_attr), + dtype=self._cfg['dtype'], + ) + y_data_for_groups.append(cube_data) + + # Convert index back to MultiIndex + y_data = pd.concat(y_data_for_groups) + y_data.index = pd.MultiIndex.from_tuples( + y_data.index, names=self._get_multiindex_names()) + + return y_data + + def _get_broadcasted_cube(self, dataset, ref_cube, text=None): + """Get broadcasted cube.""" + msg = '' if text is None else text + target_shape = ref_cube.shape + cube_to_broadcast = self._load_cube(dataset) + data_to_broadcast = np.ma.filled(cube_to_broadcast.data, np.nan) + logger.info("Broadcasting %s from %s to %s", msg, + data_to_broadcast.shape, target_shape) + broadcasted_data = iris.util.broadcast_to_shape( + data_to_broadcast, target_shape, dataset['broadcast_from']) + new_cube = ref_cube.copy(np.ma.masked_invalid(broadcasted_data)) + for idx in dataset['broadcast_from']: + new_coord = new_cube.coord(dimensions=idx) + new_coord.points = cube_to_broadcast.coord(new_coord).points + logger.debug("Added broadcasted %s", msg) + return new_cube + + def _get_clf_parameters(self, deep=True): + """Get parameters of pipeline.""" + return self._clf.get_params(deep=deep) + + def _get_colors_for_features(self, color_coded=True): + """Get colors for bars of feature importance plot.""" + features = self.features_after_preprocessing + if not color_coded: + colors = dict(zip(features, ['b'] * len(features))) + else: + if not np.array_equal(features, self.features): + raise ValueError( + f"Extracting color-coded feature colors is not possible " + f"since features changed after preprocessing, before: " + f"{self.features}, after: {features}") + colors = {} + corrs = self.data['train'][['x', 'y']].corr() + for feature in features: + corr = corrs.loc[('y', self.label), ('x', feature)] + color = 'r' if corr >= 0.0 else 'b' + colors[feature] = color + return colors + + def _get_cv_estimator_kwargs(self, cv_estimator, **kwargs): + """Get keyword arguments for CV estimator class.""" + fit_kwargs = self.fit_kwargs + verbosity = self._get_verbosity_parameters(cv_estimator) + cv_kwargs = { + 'n_jobs': self._cfg['n_jobs'], + **verbosity, + } + cv_kwargs.update(kwargs) + logger.info("Using keyword argument(s) %s for class %s", cv_kwargs, + cv_estimator) + if isinstance(cv_kwargs.get('cv'), str): + if cv_kwargs['cv'].lower() == 'loo': + cv_kwargs['cv'] = LeaveOneOut() + if cv_kwargs['cv'].lower() == 'logo': + cv_kwargs['cv'] = self._get_logo_cv_kwargs()['cv'] + fit_kwargs['groups'] = self._get_logo_cv_kwargs()['groups'] + return (cv_kwargs, fit_kwargs) + + def _get_features(self): + """Extract all features from the ``prediction_input`` datasets.""" + logger.debug("Extracting features from 'prediction_input' datasets") + pred_name = list(self._datasets['prediction_input'].keys())[0] + pred_name_str = self._get_name(pred_name) + datasets = self._datasets['prediction_input'][pred_name] + (units, + types) = self._get_features_of_datasets(datasets, 'prediction_input', + pred_name) + + # Mark categorical variables + categorical = {feature: False for feature in types} + for tag in self._cfg.get('categorical_features', []): + if tag in categorical: + logger.debug("Treating '%s' as categorical feature", tag) + categorical[tag] = True + else: + raise ValueError( + f"Cannot treat '{tag}' as categorical variable, feature " + f"not found") + + # Check if features were found + if not units: + raise ValueError( + f"No features for 'prediction_input' data for prediction " + f"'{pred_name_str}' found") + + # Check for wrong options + if self._cfg.get('accept_only_scalar_data'): + if 'broadcasted' in types.values(): + raise TypeError( + "The use of 'broadcast_from' is not possible if " + "'accept_only_scalar_data' is given") + if 'coordinate' in types.values(): + raise TypeError( + "The use of 'coords_as_features' is not possible if " + "'accept_only_scalar_data' is given") + + # Convert to DataFrame and sort it + units = pd.DataFrame.from_dict(units, + orient='index', + columns=['units']) + types = pd.DataFrame.from_dict(types, + orient='index', + columns=['types']) + categorical = pd.DataFrame.from_dict(categorical, + orient='index', + columns=['categorical']) + features = pd.concat([units, types, categorical], axis=1).sort_index() + + # Return features + logger.info( + "Found %i feature(s) (defined in 'prediction_input' data for " + "prediction '%s')", len(features.index), pred_name_str) + for feature in features.index: + logger.debug("'%s' with units '%s' and type '%s'", feature, + features.units.loc[feature], + features.types.loc[feature]) + return features + + def _get_features_of_datasets(self, datasets, var_type, pred_name): + """Extract all features (with units and types) of given datasets.""" + pred_name_str = self._get_name(pred_name) + units = {} + types = {} + cube = None + ref_cube = None + for (tag, datasets_) in group_metadata(datasets, 'tag').items(): + dataset = datasets_[0] + cube = self._load_cube(dataset) + if 'broadcast_from' not in dataset: + ref_cube = cube + units[tag] = Unit(dataset['units']) + if 'broadcast_from' in dataset: + types[tag] = 'broadcasted' + else: + types[tag] = 'regular' + + # Check if reference cube was given + if ref_cube is None: + if cube is None: + raise ValueError( + f"Expected at least one '{var_type}' dataset for " + f" prediction '{pred_name_str}'") + raise ValueError( + f"Expected at least one '{var_type}' dataset for prediction " + f"'{pred_name_str}' without the option 'broadcast_from'") + + # Coordinate features + for coord_name in self._cfg.get('coords_as_features', []): + try: + coord = ref_cube.coord(coord_name) + except iris.exceptions.CoordinateNotFoundError as exc: + raise iris.exceptions.CoordinateNotFoundError( + f"Coordinate '{coord_name}' given in 'coords_as_features' " + f"not found in '{var_type}' data for prediction " + f"'{pred_name_str}'") from exc + units[coord_name] = coord.units + types[coord_name] = 'coordinate' + + return (units, types) + + def _get_group_attributes(self): + """Get all group attributes from ``label`` datasets.""" + logger.debug("Extracting group attributes from 'label' datasets") + grouped_datasets = group_metadata(self._datasets['label'], + 'group_attribute', + sort=True) + group_attributes = list(grouped_datasets.keys()) + if group_attributes == [None]: + logger.debug("No group attributes given") + else: + logger.info( + "Found %i group attribute(s) (defined in 'label' data)", + len(group_attributes)) + logger.debug(pformat(group_attributes)) + return np.array(group_attributes) + + def _get_label(self): + """Extract label from training data.""" + logger.debug("Extracting label from training datasets") + grouped_datasets = group_metadata(self._datasets['label'], 'tag') + labels = list(grouped_datasets.keys()) + if len(labels) > 1: + raise ValueError(f"Expected unique label tag, got {labels}") + units = Unit(self._datasets['label'][0]['units']) + logger.info( + "Found label '%s' with units '%s' (defined in 'label' " + "data)", labels[0], units) + label = pd.DataFrame.from_dict({labels[0]: units}, + orient='index', + columns=['units']) + return label + + def _get_lime_feature_importance(self, x_pred): + """Get most important feature given by LIME.""" + logger.info( + "Calculating local feature importance using LIME (this may take " + "a while...)") + x_pred = self._impute_nans(x_pred) + + # Most important feature for single input + def _most_important_feature(x_single_pred, explainer, predict_fn): + """Get most important feature for single input. + + Note + ---- + Ignore warnings about missing feature names here because they are + not used. + + """ + with warnings.catch_warnings(): + warnings.filterwarnings( + 'ignore', + message=('X does not have valid feature names, but ' + 'SimpleImputer was fitted with feature names'), + category=UserWarning, + module='sklearn', + ) + explanation = explainer.explain_instance(x_single_pred, + predict_fn) + local_exp = explanation.local_exp[1] + sorted_exp = sorted(local_exp, key=lambda elem: elem[0]) + norm = sum(abs(elem[1]) for elem in sorted_exp) + return [abs(elem[1]) / norm for elem in sorted_exp] + + # Apply on whole input (using multiple processes) + parallel = Parallel(n_jobs=self._cfg['n_jobs']) + lime_feature_importance = parallel( + [ + delayed(_most_important_feature)( + x, + explainer=self._lime_explainer, + predict_fn=self._clf.predict, + ) for x in x_pred.values + ] + ) + lime_feature_importance = np.array(lime_feature_importance, + dtype=self._cfg['dtype']) + lime_feature_importance = np.moveaxis(lime_feature_importance, -1, 0) + lime_feature_importance = dict(zip(self.features, + lime_feature_importance)) + return lime_feature_importance + + def _get_logo_cv_kwargs(self): + """Get :class:`sklearn.model_selection.LeaveOneGroupOut` CV.""" + if not self._cfg['group_datasets_by_attributes']: + raise ValueError( + "Cannot create 'LeaveOneGroupOut' CV splitter, " + "'group_datasets_by_attributes' was not given during " + "class initialization") + kwargs = { + 'cv': LeaveOneGroupOut(), + 'groups': self.data['train'].y.index.get_level_values(0).values, + } + return kwargs + + def _get_mask(self, x_data, data_type): + """Get mask for missing features.""" + x_regular = x_data[self.features[self.features_types == 'regular']] + + # Get points where no regular feature is given + mask = x_regular.isnull().all(axis=1).values + logger.debug( + "Removing %i %s point(s) where all regular features are missing", + mask.sum(), data_type) + + # Get other missing points if desired + if self._cfg['imputation_strategy'] == 'remove': + mask = x_data.isnull().any(axis=1).values + logger.debug( + "Removing total %i %s point(s) where at least one feature is " + "missing (because imputation_strategy = 'remove')", mask.sum(), + data_type) + + return mask + + def _get_multiindex(self, ref_cube, group_attr=None): + """Get :class:`pandas.MultiIndex` for data.""" + group_attr = self._group_attr_to_pandas_index_str(group_attr) + index = pd.MultiIndex.from_product( + [[group_attr], np.arange(ref_cube.data.size)], + names=self._get_multiindex_names(), + ) + return index + + def _get_multiindex_names(self): + """Get names for :class:`pandas.MultiIndex` for data.""" + return ['-'.join(self._cfg['group_datasets_by_attributes']), 'index'] + + def _get_plot_feature(self, feature): + """Get :obj:`str` of selected ``feature`` and respective units.""" + units = self._get_plot_units(self.features_units[feature]) + return f'{feature} [{units}]' + + def _get_plot_label(self): + """Get :obj:`str` of label and respective units.""" + return f'{self.label} [{self._get_plot_units(self.label_units)}]' + + def _get_plot_units(self, units): + """Get plot units version of specified ``units``.""" + return self._cfg['plot_units'].get(str(units), str(units)) + + def _get_prediction_dict(self, pred_name, x_pred, x_err, y_ref, + get_mlr_model_error=None, + get_lime_importance=False, + get_propagated_errors=False, **kwargs): + """Get prediction output in a dictionary.""" + logger.info("Predicting %i point(s)", len(x_pred.index)) + y_preds = self._clf.predict(x_pred, **kwargs) + pred_dict = self._prediction_to_dict(y_preds, **kwargs) + + # Estimate error of MLR model itself + if get_mlr_model_error: + pred_dict['squared_mlr_model_error_estim'] = ( + self._estimate_mlr_model_error(len(x_pred.index), + get_mlr_model_error)) + + # LIME feature importance + if get_lime_importance: + lime_importance = self._get_lime_feature_importance(x_pred) + for (feature, importance) in lime_importance.items(): + pred_dict[f'lime_importance___{feature}'] = importance + + # Propagate prediction input errors + if get_propagated_errors: + if x_err is None: + raise ValueError( + f"'save_propagated_errors' is not possible because no " + f"'prediction_input_error' data for prediction " + f"'{self._get_name(pred_name)}' is available") + pred_dict['squared_propagated_input_error'] = ( + self._propagate_input_errors(x_pred, x_err)) + + # Calculate residuals relative to reference if possible + if y_ref is not None: + y_ref = y_ref.values + if y_ref.ndim == 2 and y_ref.shape[1] == 1: + y_ref = np.squeeze(y_ref, axis=1) + pred_dict['residual'] = self._get_residuals(y_ref, pred_dict[None]) + + # Return dictionary + for pred_type in pred_dict: + if pred_type is not None: + logger.debug("Found additional prediction type '%s'", + pred_type) + logger.info( + "Successfully created prediction array(s) with %i point(s)", + pred_dict[None].size) + return pred_dict + + def _get_prediction_dtype(self): + """Get ``dtype`` of the output of final regressor's ``predict()``.""" + x_data = self.data['train'].x.iloc[:1] + y_pred = self._clf.predict(x_data) + return y_pred.values.dtype + + def _get_prediction_properties(self): + """Get important properties of prediction input.""" + properties = {} + for attr in ('dataset', 'exp', 'project', 'start_year', 'end_year'): + attrs = list(group_metadata(self._datasets['label'], attr).keys()) + properties[attr] = attrs[0] + if len(attrs) > 1: + if attr == 'start_year': + properties[attr] = min(attrs) + elif attr == 'end_year': + properties[attr] = max(attrs) + else: + properties[attr] = '|'.join(sorted(attrs)) + logger.debug( + "Attribute '%s' of label data is not unique, got values " + "%s, using '%s' for prediction cubes", attr, attrs, + properties[attr]) + return properties + + def _get_reference_cube(self, datasets, var_type, text=None): + """Get reference cube for ``datasets``.""" + msg = '' if text is None else text + regular_features = self.features[self.features_types == 'regular'] + + for tag in regular_features: + dataset = self._check_dataset(datasets, var_type, tag, msg) + if dataset is not None: + ref_cube = self._load_cube(dataset) + logger.debug( + "For var_type '%s'%s, use reference cube with tag '%s'", + var_type, msg, tag) + logger.debug(ref_cube.summary(shorten=True)) + return ref_cube + raise ValueError(f"No {var_type} data{msg} without the option " + f"'broadcast_from' found") + + def _get_sample_weights(self, data_type): + """Get sample weights of desired data.""" + data_frame = self.data[data_type] + if 'sample_weight' not in data_frame: + return None + return data_frame.sample_weight.squeeze().values + + def _get_verbosity_parameters(self, function, boolean=False): + """Get verbosity parameters for class initialization.""" + verbosity_params = { + 'silent': { + 'debug': False, + 'info': False, + 'default': True, + }, + 'verbose': { + 'debug': 1, + 'info': 0, + 'default': 0, + }, + 'verbosity': { + 'debug': 2, + 'info': 1, + 'default': 0, + }, + } + parameters = {} + for (param, log_levels) in verbosity_params.items(): + all_params = ( + getfullargspec(function).args + + getfullargspec(function).kwonlyargs + ) + if param in all_params: + parameters[param] = log_levels.get(self._cfg['log_level'], + log_levels['default']) + if boolean: + parameters[param] = bool(parameters[param]) + logger.debug("Set verbosity parameter '%s' of %s to '%s'", + param, str(function), parameters[param]) + return parameters + + def _get_x_data_for_group(self, datasets, var_type, group_attr=None): + """Get x data for a group of datasets.""" + msg = '' if group_attr is None else f" for '{group_attr}'" + ref_cube = self._get_reference_cube(datasets, var_type, msg) + group_data = pd.DataFrame( + columns=self.features, + index=self._get_multiindex(ref_cube, group_attr=group_attr), + dtype=self._cfg['dtype'], + ) + sample_weights = self._calculate_sample_weights(ref_cube, + var_type, + group_attr=group_attr) + + # Iterate over all features + for tag in self.features: + if self.features_types[tag] != 'coordinate': + dataset = self._check_dataset(datasets, var_type, tag, msg) + + # No dataset found + if dataset is None: + if var_type == 'prediction_input_error': + logger.debug( + "Prediction input error of '%s'%s not available, " + "setting it to 0.0", tag, msg) + new_data = 0.0 + else: + new_data = np.nan + + # Found exactly one dataset + else: + text = f"{var_type} '{tag}'{msg}" + + # Broadcast if necessary + if 'broadcast_from' in dataset: + cube = self._get_broadcasted_cube( + dataset, ref_cube, text) + else: + cube = self._load_cube(dataset) + self._check_cube_dimensions(cube, ref_cube, text) + + # Do not accept errors for categorical features + if (var_type == 'prediction_input_error' + and tag in self.categorical_features): + raise ValueError( + f"Specifying prediction input error for " + f"categorical feature '{tag}'{msg} is not " + f"possible") + new_data = self._get_cube_data(cube) + + # Load coordinate feature data + else: + new_data = self._get_coordinate_data(ref_cube, var_type, tag, + msg) + + # Save data + new_data = np.array(new_data) + if new_data.size != ref_cube.data.size: + new_data = np.broadcast_to(new_data, (ref_cube.data.size,)) + group_data[tag] = new_data + + # Return data and reference cube + logger.debug("Found %i raw '%s' input data points%s", + len(group_data.index), var_type, msg) + return (group_data, ref_cube, sample_weights) + + def _group_by_attributes(self, datasets): + """Group datasets by specified attributes.""" + attributes = self._cfg['group_datasets_by_attributes'] + if not attributes: + if self._cfg.get('accept_only_scalar_data'): + attributes = ['dataset'] + logger.warning("Automatically set 'group_datasets_by_'" + "attributes' to ['dataset'] because 'accept_" + "only_scalar_data' is given") + else: + for dataset in datasets: + dataset['group_attribute'] = None + return datasets + for dataset in datasets: + dataset['group_attribute'] = mlr.create_alias(dataset, attributes) + logger.info("Grouped feature and label datasets by %s", attributes) + return datasets + + def _impute_nans(self, data_frame, copy=True): + """Impute all nans of a given :class:`pandas.DataFrame`.""" + if copy: + data_frame = data_frame.copy() + if 'feature_selection' in self._clf.named_steps: + support = self._clf.named_steps['feature_selection'].support + else: + support = None + if 'imputer' in self._clf.named_steps: + transform = self._clf.named_steps['imputer'].transform + if 'x' in data_frame.columns: + if support is not None: + data_frame.x.values[:, support] = transform( + data_frame.x.iloc[:, support]) + data_frame = data_frame.fillna(data_frame.mean()) + else: + data_frame.x.values[:] = transform(data_frame.x) + else: + if support is not None: + data_frame.values[:, support] = transform( + data_frame.iloc[:, support]) + data_frame = data_frame.fillna(data_frame.mean()) + else: + data_frame.values[:] = transform(data_frame) + return data_frame + + def _is_ready_for_plotting(self): + """Check if the class is ready for plotting.""" + self._check_fit_status('Plotting') + return True + + def _load_classes(self): + """Populate :attribute:`_classes` and check for errors.""" + self._classes['group_attributes'] = self._get_group_attributes() + self._classes['features'] = self._get_features() + self._classes['label'] = self._get_label() + + def _load_cube(self, dataset): + """Load iris cube, check data type and convert units if desired.""" + logger.debug("Loading %s", dataset['filename']) + cube = iris.load_cube(dataset['filename']) + + # Check dtype + if not np.issubdtype(cube.dtype, np.number): + raise TypeError( + f"Data type of cube loaded from '{dataset['filename']}' is " + f"'{cube.dtype}', at the moment only numeric data is " + f"supported") + + # Convert dtypes + cube.data = cube.core_data().astype(self._cfg['dtype'], + casting='same_kind') + for coord in cube.coords(): + try: + coord.points = coord.points.astype(self._cfg['dtype'], + casting='same_kind') + except TypeError: + logger.debug( + "Cannot convert dtype of coordinate array '%s' from '%s' " + "to '%s'", coord.name(), coord.points.dtype, + self._cfg['dtype']) + + # Convert and check units + if dataset.get('convert_units_to'): + self._convert_units_in_cube(cube, dataset['convert_units_to']) + if not cube.units == Unit(dataset['units']): + raise ValueError( + f"Units of cube '{dataset['filename']}' for " + f"{dataset['var_type']} '{dataset['tag']}' differ from units " + f"given in dataset list, got '{cube.units}' in cube and " + f"'{dataset['units']}' in dataset list") + return cube + + def _load_data(self): + """Load train/test data (features/labels).""" + (x_all, y_all, sample_weights) = self._extract_features_and_labels() + + # Normalize and add sample weights if necessary + objs = [x_all, y_all] + keys = ['x', 'y'] + if sample_weights is not None: + sample_weights /= sample_weights.mean() + objs.append(sample_weights) + keys.append('sample_weight') + + # Save complete data + self._data['all'] = pd.concat(objs, axis=1, keys=keys) + if len(y_all.index) < 2: + raise ValueError( + f"Need at least 2 data points for MLR training, got only " + f"{len(y_all.index)}") + logger.info("Loaded %i input data point(s)", len(y_all.index)) + + # Split train/test data if desired + test_size = self._cfg['test_size'] + if test_size: + (self._data['train'], self._data['test']) = train_test_split( + self._data['all'].copy(), + test_size=test_size, + random_state=self.random_state, + ) + self._data['train'] = self._data['train'].sort_index() + self._data['test'] = self._data['test'].sort_index() + for data_type in ('train', 'test'): + if len(self.data[data_type].index) < 2: + raise ValueError( + f"Need at least 2 datasets for '{data_type}' data, " + f"got {len(self.data[data_type].index)}") + logger.info( + "Using %i%% of the input data as test data (%i point(s))", + int(test_size * 100), len(self.data['test'].index)) + logger.info("%i point(s) remain(s) for training", + len(self.data['train'].index)) + else: + self._data['train'] = self.data['all'].copy() + logger.info("Using all %i input data point(s) for training", + len(y_all.index)) + + def _load_final_parameters(self): + """Load parameters for final regressor.""" + parameters = self._cfg.get('parameters_final_regressor', {}) + + # Update parameters + self._update_random_state_parameter(self._CLF_TYPE, parameters) + verbosity_params = self._get_verbosity_parameters(self._CLF_TYPE) + for (param, verbosity) in verbosity_params.items(): + parameters.setdefault(param, verbosity) + + logger.debug("Using parameter(s) for final regressor: %s", parameters) + return parameters + + def _load_input_datasets(self, input_datasets): + """Load input datasets.""" + input_datasets = deepcopy(input_datasets) + + # Catch invalid var_types + if not mlr.datasets_have_mlr_attributes( + input_datasets, log_level='error', mode='only_var_type'): + raise ValueError("Data with invalid 'var_type' given") + + # Training datasets + feature_datasets = select_metadata(input_datasets, var_type='feature') + label_datasets = select_metadata(input_datasets, var_type='label') + + # Prediction datasets + pred_in_datasets = select_metadata(input_datasets, + var_type='prediction_input') + pred_in_err_datasets = select_metadata( + input_datasets, var_type='prediction_input_error') + pred_ref_datasets = select_metadata(input_datasets, + var_type='prediction_reference') + + # Check datasets + msg = ("At least one '{}' dataset does not have necessary MLR " + "attributes") + datasets_to_check = { + 'feature': feature_datasets, + 'label': label_datasets, + 'prediction_input': pred_in_datasets, + 'prediction_input_error': pred_in_err_datasets, + 'prediction_reference': pred_ref_datasets, + } + for (label, datasets) in datasets_to_check.items(): + if not mlr.datasets_have_mlr_attributes(datasets, + log_level='error'): + raise ValueError(msg.format(label)) + + # Check if data was found + if not feature_datasets: + raise ValueError("No 'feature' data found") + if not label_datasets: + raise ValueError("No 'label' data found") + if not pred_in_datasets: + raise ValueError("No 'prediction_input' data found") + + # Convert units + self._convert_units_in_metadata(feature_datasets) + self._convert_units_in_metadata(label_datasets) + self._convert_units_in_metadata(pred_in_datasets) + self._convert_units_in_metadata(pred_in_err_datasets) + self._convert_units_in_metadata(pred_ref_datasets) + + # Save datasets + logger.info( + "Found %i 'feature' dataset(s), %i 'label' dataset(s), %i " + "'prediction_input' dataset(s), %i 'prediction_input_error' " + "dataset(s) and %i 'prediction_reference' datasets(s)", + len(feature_datasets), len(label_datasets), len(pred_in_datasets), + len(pred_in_err_datasets), len(pred_ref_datasets)) + labeled_datasets = { + 'Feature': feature_datasets, + 'Label': label_datasets, + 'Prediction input': pred_in_datasets, + 'Prediction input error': pred_in_err_datasets, + 'Prediction output': pred_ref_datasets, + } + for (msg, datasets) in labeled_datasets.items(): + logger.debug("%s datasets:", msg) + logger.debug(pformat([d['filename'] for d in datasets])) + self._datasets['feature'] = self._group_by_attributes(feature_datasets) + self._datasets['label'] = self._group_by_attributes(label_datasets) + self._datasets['prediction_input'] = self._group_prediction_datasets( + pred_in_datasets) + self._datasets['prediction_input_error'] = ( + self._group_prediction_datasets(pred_in_err_datasets)) + self._datasets['prediction_reference'] = ( + self._group_prediction_datasets(pred_ref_datasets)) + + def _load_lime_explainer(self): + """Load :class:`lime.lime_tabular.LimeTabularExplainer`.""" + x_train = self.get_x_array('train', impute_nans=True) + y_train = self.get_y_array('train', impute_nans=True) + verbosity = self._get_verbosity_parameters(LimeTabularExplainer, + boolean=True) + verbosity = {param: False for param in verbosity} + categorical_features_idx = [ + int(np.where(self.features == tag)[0][0]) + for tag in self.categorical_features + ] + self._lime_explainer = LimeTabularExplainer( + x_train, + mode='regression', + training_labels=y_train, + feature_names=self.features, + categorical_features=categorical_features_idx, + discretize_continuous=False, + sample_around_instance=True, + random_state=self.random_state, + **verbosity, + ) + logger.debug( + "Loaded %s with new training data", str(LimeTabularExplainer)) + + def _mask_prediction_array(self, y_pred, ref_cube): + """Apply mask of reference cube to prediction array.""" + mask = np.ma.getmaskarray(ref_cube.data).ravel() + if y_pred.ndim == 1 and y_pred.shape[0] != mask.shape[0]: + new_y_pred = np.empty(mask.shape[0], dtype=self._cfg['dtype']) + new_y_pred[mask] = np.nan + new_y_pred[~mask] = y_pred + else: + new_y_pred = y_pred + return np.ma.masked_invalid(new_y_pred) + + def _plot_feature_importance(self, feature_importance_dict, colors, + plot_path): + """Plot feature importance.""" + logger.info("Plotting feature importance") + (_, axes) = plt.subplots() + + # Sort data and get position of bars + features = np.array(list(feature_importance_dict.keys())) + feature_importances = np.array(list(feature_importance_dict.values())) + sorted_idx = np.argsort(feature_importances) + pos = np.arange(sorted_idx.shape[0]) + 0.5 + + # Write cube with feature importance for provenance tracking + ancestors = self.get_ancestors(prediction_names=[]) + cube = mlr.get_1d_cube( + features, + feature_importances, + x_kwargs={'var_name': 'feature', + 'long_name': 'Feature name', + 'units': 'no unit'}, + y_kwargs={'var_name': 'feature_importance', + 'long_name': 'Relative Feature Importance', + 'units': '1', + 'attributes': {'project': '', 'dataset': ''}}, + ) + + # Plot + for (idx, importance) in enumerate(feature_importances[sorted_idx]): + feature = features[sorted_idx][idx] + axes.barh(pos[idx], importance, align='center', + color=colors[feature]) + + # Plot appearance + axes.tick_params(axis='y', which='minor', left=False, right=False) + axes.tick_params(axis='y', which='major', left=True, right=False) + title = f"Global feature importance ({self._cfg['mlr_model_name']})" + axes.set_title(title) + axes.set_xlabel('Relative Importance') + axes.set_yticks(pos) + axes.set_yticklabels(features[sorted_idx]) + + # Save plot and provenance + plt.savefig(plot_path, **self._cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + self._write_plot_provenance(cube, plot_path, ancestors=ancestors, + caption=title + '.', plot_types=['bar']) + + # Save additional plot with logarithmic X axis + axes.set_xscale('log') + axes.xaxis.set_major_formatter(ScalarFormatter()) + ext = os.path.splitext(plot_path)[1] + plot_path_log = plot_path.replace(ext, f'_log{ext}') + plt.savefig(plot_path_log, **self._cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path_log) + self._write_plot_provenance(cube, plot_path_log, ancestors=ancestors, + caption=title + '.', plot_types=['bar']) + plt.close() + + def _prediction_to_dict(self, pred_out, **kwargs): + """Convert output of final regressor's ``predict()`` to :obj:`dict`.""" + if not isinstance(pred_out, (list, tuple)): + pred_out = [pred_out] + idx_to_name = {0: None} + if 'return_var' in kwargs: + idx_to_name[1] = 'var' + elif 'return_cov' in kwargs: + idx_to_name[1] = 'cov' + pred_dict = {} + for (idx, pred) in enumerate(pred_out): + pred = pred.astype(self._cfg['dtype'], casting='same_kind') + if pred.ndim == 2 and pred.shape[1] == 1: + logger.warning( + "Prediction output is 2D and length of second axis is 1, " + "squeezing second axis") + pred = np.squeeze(pred, axis=1) + pred_dict[idx_to_name.get(idx, idx)] = pred + return pred_dict + + def _pred_type_to_metadata(self, pred_type, cube): + """Get correct :mod:`iris.cube.CubeMetadata` of prediction cube.""" + standard_name = cube.standard_name + var_name = cube.var_name + long_name = cube.long_name + units = cube.units + attributes = cube.attributes + suffix = '' if pred_type is None else f'_{pred_type}' + error_types = { + 'var': ' (variance)', + 'cov': ' (covariance)', + 'squared_mlr_model_error_estim': (' (squared MLR model error ' + 'estimation using hold-out test ' + 'data set)'), + 'squared_propagated_input_error': (' (squared propagated error of ' + 'prediction input estimated by ' + 'LIME)'), + } + if pred_type is None: + attributes['var_type'] = 'prediction_output' + elif isinstance(pred_type, int): + var_name += f'_{pred_type:d}' + long_name += f' {pred_type:d}' + logger.warning("Got unknown prediction type with index %i", + pred_type) + attributes['var_type'] = 'prediction_output_misc' + elif pred_type in error_types: + var_name += suffix + long_name += error_types[pred_type] + units = mlr.units_power(cube.units, 2) + attributes['var_type'] = 'prediction_output_error' + attributes['squared'] = 1 + elif 'lime_importance___' in pred_type: + standard_name = None + feature = pred_type.replace('lime_importance___', '') + var_name = f'importance_of_feature_{feature}' + long_name = (f'Local importance of feature {feature} for ' + f'predicting {self.label} given by LIME') + units = Unit('1') + attributes['var_type'] = 'prediction_output_misc' + elif pred_type == 'residual': + var_name += suffix + long_name += ' (residual)' + attributes['residual'] = 'true minus predicted values' + attributes['var_type'] = 'prediction_residual' + else: + raise ValueError(f"Got unknown prediction type '{pred_type}'") + return iris.cube.CubeMetadata( + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=units, + attributes=attributes, + cell_methods=cube.cell_methods, + ) + + def _print_metrics(self, regression_metrics, data_type): + """Print regression metrics.""" + if data_type not in self.data: + return + logger.info("Evaluating regression metrics for %s data", data_type) + x_data = self.data[data_type].x + y_true = self.get_y_array(data_type) + y_pred = self._clf.predict(x_data) + sample_weights = self._get_sample_weights(data_type) + for metric in regression_metrics: + metric_function = getattr(metrics, metric) + value = metric_function(y_true, y_pred) + if 'squared' in metric: + value = np.sqrt(value) + metric = f'root_{metric}' + logger.info("%s: %s", metric, value) + if sample_weights is None: + return + for metric in regression_metrics: + metric_function = getattr(metrics, metric) + value = metric_function(y_true, y_pred, + sample_weight=sample_weights) + if 'squared' in metric: + value = np.sqrt(value) + metric = f'root_{metric}' + logger.info("Weighted %s: %s", metric, value) + + def _propagate_input_errors(self, x_pred, x_err): + """Propagate errors from prediction input.""" + logger.info( + "Propagating prediction input errors using LIME (this may take a " + "while...)") + if 'feature_selection' in self._clf.named_steps: + logger.warning( + "Propagating input errors might not work correctly when a " + "'feature_selection' step is present (usually because of " + "calling rfecv())") + x_pred = self._impute_nans(x_pred) + + # Propagated error for single input + def _propagated_error(x_single_pred, x_single_err, explainer, + predict_fn, features, categorical_features): + """Get propagated prediction input error for single input. + + Note + ---- + Ignore warnings about missing feature names here because they are + not used. + + """ + with warnings.catch_warnings(): + warnings.filterwarnings( + 'ignore', + message=('X does not have valid feature names, but ' + 'SimpleImputer was fitted with feature names'), + category=UserWarning, + module='sklearn', + ) + exp = explainer.explain_instance(x_single_pred, predict_fn) + x_single_err = np.nan_to_num(x_single_err) + x_err_scaled = x_single_err / explainer.scaler.scale_ + squared_error = 0.0 + for (idx, coef) in exp.local_exp[1]: + if features[idx] in categorical_features: + continue + squared_error += (x_err_scaled[idx] * coef)**2 + return squared_error + + # Apply on whole input (using multiple processes) + parallel = Parallel(n_jobs=self._cfg['n_jobs']) + errors = parallel( + [delayed(_propagated_error)( + x, x_e, explainer=self._lime_explainer, + predict_fn=self._clf.predict, + features=self.features, + categorical_features=self.categorical_features, + ) for (x, x_e) in zip(x_pred.values, x_err.values)] + ) + return np.array(errors, dtype=self._cfg['dtype']) + + def _remove_missing_features(self, x_data, y_data, sample_weights): + """Remove missing values in the features data (if desired).""" + mask = self._get_mask(x_data, 'training') + x_data = x_data[~mask] + y_data = y_data[~mask] + if sample_weights is not None: + sample_weights = sample_weights[~mask] + diff = mask.sum() + if diff: + msg = ('Removed %i training point(s) where features were ' + 'missing') + if self._cfg.get('accept_only_scalar_data'): + removed_groups = self.group_attributes[mask] + msg += f' ({removed_groups})' + self._classes['group_attributes'] = ( + self.group_attributes[~mask]) + logger.info(msg, diff) + return (x_data, y_data, sample_weights) + + def _remove_missing_pred_input(self, x_pred, x_err=None, y_ref=None): + """Remove missing values in the prediction input data.""" + mask = self._get_mask(x_pred, 'prediction input') + x_pred = x_pred[~mask] + if x_err is not None: + x_err = x_err[~mask] + if y_ref is not None: + y_ref = y_ref[~mask] + diff = mask.sum() + if diff: + logger.info( + "Removed %i prediction input point(s) where features were " + "missing", diff) + return (x_pred, x_err, y_ref, mask) + + def _save_prediction_cubes(self, pred_dict, pred_name, x_cube): + """Save (multi-dimensional) prediction output.""" + logger.debug("Creating output cubes") + for (pred_type, y_pred) in pred_dict.items(): + y_pred = self._mask_prediction_array(y_pred, x_cube) + if y_pred.size == np.prod(x_cube.shape, dtype=np.int64): + pred_cube = x_cube.copy(y_pred.reshape(x_cube.shape)) + else: + dim_coords = [] + for (dim_idx, dim_size) in enumerate(y_pred.shape): + dim_coords.append((iris.coords.DimCoord( + np.arange(dim_size, dtype=np.float64), + long_name=f'MLR prediction index {dim_idx}', + var_name=f'idx_{dim_idx}'), dim_idx)) + pred_cube = iris.cube.Cube(y_pred, + dim_coords_and_dims=dim_coords) + new_path = self._set_prediction_cube_attributes( + pred_cube, pred_type, pred_name=pred_name) + io.iris_save(pred_cube, new_path) + + # Save provenance + ancestors = self.get_ancestors( + prediction_names=[pred_name], + prediction_reference=pred_type == 'residual') + record = { + 'ancestors': ancestors, + 'authors': ['schlund_manuel'], + 'caption': (f"{pred_cube.long_name} of MLR model " + f"{self._cfg['mlr_model_name']} for prediction " + f"{pred_name}."), + 'references': ['schlund20jgr'], + } + with ProvenanceLogger(self._cfg) as provenance_logger: + provenance_logger.log(new_path, record) + + def _save_csv_file(self, data_type, filename, pred_name=None): + """Save CSV file.""" + if data_type not in self.data: + return + if data_type == 'pred': + csv_data = self.data[data_type][pred_name] + else: + csv_data = self.data[data_type] + + # Filename and path + if filename is None: + if data_type == 'pred': + filename = '{data_type}_{pred_name}.csv' + format_kwargs = { + 'data_type': data_type, + 'pred_name': self._get_name(pred_name), + } + else: + filename = '{data_type}.csv' + format_kwargs = {'data_type': data_type} + filename = filename.format(**format_kwargs) + path = os.path.join(self._cfg['mlr_work_dir'], filename) + + # Save file + csv_data.to_csv(path, na_rep='nan') + logger.info("Wrote %s", path) + + def _set_default_settings(self): + """Set default (non-``False``) keyword arguments.""" + self._cfg.setdefault('weighted_samples', {}) + self._cfg.setdefault('cache_intermediate_results', True) + self._cfg.setdefault('dtype', 'float64') + self._cfg.setdefault('fit_kwargs', {}) + self._cfg.setdefault('group_datasets_by_attributes', []) + self._cfg.setdefault('imputation_strategy', 'remove') + self._cfg.setdefault('log_level', 'info') + self._cfg.setdefault('mlr_model_name', f'{self._CLF_TYPE} model') + self._cfg.setdefault('n_jobs', 1) + self._cfg.setdefault('output_file_type', 'png') + self._cfg.setdefault('parameters', {}) + self._cfg.setdefault('plot_dir', + os.path.expanduser(os.path.join('~', 'plots'))) + self._cfg.setdefault('plot_units', {}) + self._cfg.setdefault('random_state', None) + self._cfg.setdefault('savefig_kwargs', { + 'bbox_inches': 'tight', + 'dpi': 300, + 'orientation': 'landscape', + }) + self._cfg.setdefault('standardize_data', True) + self._cfg.setdefault('sub_dir', '') + self._cfg.setdefault('test_size', 0.25) + self._cfg.setdefault('work_dir', + os.path.expanduser(os.path.join('~', 'work'))) + logger.info("Using imputation strategy '%s'", + self._cfg['imputation_strategy']) + if self._cfg['fit_kwargs']: + logger.info( + "Using additional keyword argument(s) %s for fit() function", + self._cfg['fit_kwargs']) + + def _set_prediction_cube_attributes(self, cube, pred_type, pred_name=None): + """Set the attributes of the prediction cube.""" + cube.cell_methods = None + cube.attributes = { + 'description': 'MLR model prediction', + 'mlr_model_name': self._cfg['mlr_model_name'], + 'mlr_model_type': self.mlr_model_type, + 'final_regressor': str(self._CLF_TYPE), + 'prediction_name': self._get_name(pred_name), + 'tag': self.label, + } + cube.attributes.update(self._get_prediction_properties()) + for (key, val) in self.parameters.items(): + cube.attributes[key] = str(val) + cube.attributes['mlr_parameters'] = list(self.parameters.keys()) + label_cube = self._load_cube(self._datasets['label'][0]) + for attr in ('standard_name', 'var_name', 'long_name', 'units'): + setattr(cube, attr, getattr(label_cube, attr)) + + # Modify cube metadata depending on prediction type + cube.metadata = self._pred_type_to_metadata(pred_type, cube) + + # Get new path + suffix = '' if pred_type is None else f'_{pred_type}' + pred_str = f'_for_prediction_{self._get_name(pred_name)}' + sub_str = ('' if self._cfg['sub_dir'] == '' else + f"_of_group_{self._cfg['sub_dir']}") + filename = (f'{self.mlr_model_type}_{self.label}_prediction{suffix}' + f'{pred_str}{sub_str}.nc') + new_path = os.path.join(self._cfg['mlr_work_dir'], filename) + cube.attributes['filename'] = new_path + return new_path + + def _update_fit_kwargs(self, fit_kwargs): + """Check and update fit kwargs.""" + new_fit_kwargs = {} + + # Sort out wrong fit kwargs + for (param_name, param_val) in fit_kwargs.items(): + step = param_name.split('__')[0] + if step in self._clf.named_steps: + new_fit_kwargs[param_name] = param_val + else: + raise ValueError( + f"Got invalid pipeline step '{step}' in fit parameter " + f"'{param_name}'") + + # Add sample weights if possible + allowed_fit_kwargs = ( + getfullargspec(self._CLF_TYPE.fit).args + + getfullargspec(self._CLF_TYPE.fit).kwonlyargs + ) + for kwarg in ('sample_weight', 'sample_weights'): + if kwarg not in allowed_fit_kwargs: + continue + long_kwarg = f'{self._clf.steps[-1][0]}__regressor__{kwarg}' + sample_weights = self._get_sample_weights('train') + new_fit_kwargs[long_kwarg] = sample_weights + if sample_weights is not None: + logger.debug( + "Updated keyword arguments of final regressor's fit() " + "function with '%s'", kwarg) + break + + return new_fit_kwargs + + def _update_random_state_parameter(self, function, parameters): + """Update ``random_state`` parameter if necessary.""" + all_params = ( + getfullargspec(function).args + + getfullargspec(function).kwonlyargs + ) + if 'random_state' in all_params: + if 'random_state' in parameters: + logger.warning( + "Parameter 'random_state=%s' is ignored for '%s', use the " + "'random_state' option to initialize the MLRModel class " + "instead", + parameters['random_state'], + self._CLF_TYPE, + ) + parameters['random_state'] = self.random_state + logger.debug( + "Updated 'random_state' parameter of '%s' to '%s'", + self._CLF_TYPE, + self.random_state, + ) + return parameters + + def _write_plot_provenance(self, cube, plot_path, **additional_info): + """Write provenance information for plots.""" + netcdf_path = mlr.get_new_path(self._cfg, plot_path) + io.iris_save(cube, netcdf_path) + record = { + 'authors': ['schlund_manuel'], + 'references': ['schlund20jgr'], + **additional_info, + } + with ProvenanceLogger(self._cfg) as provenance_logger: + provenance_logger.log(netcdf_path, record) + provenance_logger.log(plot_path, record) + + @staticmethod + def _convert_units_in_cube(cube, new_units, power=None, text=None): + """Convert units of cube if possible.""" + msg = '' if text is None else f' of {text}' + if isinstance(new_units, str): + new_units = Unit(new_units) + if power: + logger.debug("Raising target units of cube '%s' by power of %i", + cube.summary(shorten=True), power) + new_units = mlr.units_power(new_units, power) + logger.debug("Converting units%s from '%s' to '%s'", msg, cube.units, + new_units) + try: + cube.convert_units(new_units) + except ValueError as exc: + raise ValueError( + f"Cannot convert units{msg} from '{cube.units}' to " + f"'{new_units}'") from exc + + @staticmethod + def _convert_units_in_metadata(datasets): + """Convert units of datasets if desired.""" + for dataset in datasets: + if not dataset.get('convert_units_to'): + continue + units_from = Unit(dataset['units']) + units_to = Unit(dataset['convert_units_to']) + try: + units_from.convert(0.0, units_to) + except ValueError as exc: + raise ValueError( + f"Cannot convert units of {dataset['var_type']} " + f"'{dataset['tag']}' from '{units_from}' to " + f"'{units_to}'") from exc + dataset['units'] = dataset['convert_units_to'] + + @staticmethod + def _get_centralized_bins(array, n_bins=None, ref=0.0): + """Get bins for array centralized around a reference value.""" + diff = max([ref - array.min(), array.max() - ref]) + if n_bins is None: + auto_bins = np.histogram_bin_edges(array) + if len(auto_bins) < 2: + raise ValueError( + f"Expected at least 2 bins, got {len(auto_bins):d}") + delta = auto_bins[1] - auto_bins[0] + n_bins = 2.0 * diff / delta + if not n_bins % 2: + n_bins += 1 + return np.linspace(ref - diff, ref + diff, n_bins + 1) + + @staticmethod + def _get_coordinate_data(ref_cube, var_type, tag, text=None): + """Get coordinate variable ``ref_cube`` which can be used as x data.""" + msg = '' if text is None else text + if var_type == 'prediction_input_error': + logger.debug( + "Prediction input error of coordinate feature '%s'%s is set " + "to 0.0", tag, msg) + return 0.0 + try: + coord = ref_cube.coord(tag) + except iris.exceptions.CoordinateNotFoundError as exc: + raise iris.exceptions.CoordinateNotFoundError( + f"Coordinate '{tag}' given in 'coords_as_features' not found " + f"in reference cube for '{var_type}'{msg}") from exc + coord_array = np.ma.filled(coord.points, np.nan) + coord_dims = ref_cube.coord_dims(coord) + if coord_dims == (): + logger.warning( + "Coordinate '%s' is scalar, including it as feature does not " + "add any information to the model (array is constant)", tag) + coord_array = np.broadcast_to(coord_array, ref_cube.shape) + else: + coord_array = iris.util.broadcast_to_shape(coord_array, + ref_cube.shape, + coord_dims) + logger.debug("Added %s coordinate '%s'%s", var_type, tag, msg) + return coord_array.ravel() + + @staticmethod + def _get_cube_data(cube): + """Get data from cube.""" + cube_data = np.ma.filled(cube.data, np.nan) + return cube_data.ravel() + + @staticmethod + def _get_data_type_coord(data_types): + """Get :class:`iris.coords.AuxCoord` ``data_type``.""" + aux_coord = iris.coords.AuxCoord(data_types, + var_name='data_type', + long_name='Data type', + units='no unit') + return aux_coord + + @staticmethod + def _get_name(string): + """Convert ``None`` to :obj:`str` if necessary.""" + return 'unnamed' if string is None else string + + @staticmethod + def _get_plot_kwargs(data_type, plot_type=None): + """Get plot kwargs for a data type.""" + plot_kwargs = { + 'all': { + 'color': 'r', + 'label': 'All data', + }, + 'train': { + 'color': 'b', + 'label': 'Train data', + }, + 'test': { + 'color': 'g', + 'label': 'Test data', + }, + } + allowed_data_types = list(plot_kwargs.keys()) + if data_type not in allowed_data_types: + raise NotImplementedError( + f"Plot kwargs for data type '{data_type}' not implemented " + f"yet, only {allowed_data_types} are supported yet") + kwargs = deepcopy(plot_kwargs[data_type]) + if plot_type == 'scatter': + kwargs.update({'alpha': 0.5, 'marker': 'o', 's': 6}) + return kwargs + + @staticmethod + def _get_residuals(y_true, y_pred): + """Calculate residuals (true minus predicted values).""" + logger.debug("Calculating residuals") + return y_true - y_pred + + @staticmethod + def _group_attr_to_pandas_index_str(group_attr): + """Convert group attribute to :obj:`str` used in pandas index.""" + if group_attr is None: + return 'none' + return group_attr + + @staticmethod + def _group_prediction_datasets(datasets): + """Group prediction datasets (use ``prediction_name`` key).""" + for dataset in datasets: + dataset['group_attribute'] = None + return group_metadata(datasets, 'prediction_name') + + @staticmethod + def _remove_missing_labels(x_data, y_data, sample_weights): + """Remove missing values in the label data.""" + mask = y_data.isnull().values + x_data = x_data[~mask] + y_data = y_data[~mask] + if sample_weights is not None: + sample_weights = sample_weights[~mask] + diff = mask.sum() + if diff: + logger.info( + "Removed %i training point(s) where labels were missing", diff) + return (x_data, y_data, sample_weights) + + @staticmethod + def _set_axis_lim_symmetric(axes, axis): + """Make axis range of plot symmetric around 0.""" + if axis == 'x': + getter = getattr(axes, 'get_xlim') + setter = getattr(axes, 'set_xlim') + elif axis == 'y': + getter = getattr(axes, 'get_ylim') + setter = getattr(axes, 'set_ylim') + else: + raise ValueError(f"Expected 'x' or 'y' for axis, got '{axis}'") + maximum = np.max(np.abs(getter())) + setter([-maximum, maximum]) diff --git a/esmvaltool/diag_scripts/mlr/models/gbr_base.py b/esmvaltool/diag_scripts/mlr/models/gbr_base.py new file mode 100644 index 0000000000..cf0412fabd --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/gbr_base.py @@ -0,0 +1,127 @@ +"""Base class for Gradient Boosting Regression model.""" + +import logging +import os + +import matplotlib.pyplot as plt +import numpy as np + +from esmvaltool.diag_scripts import mlr +from esmvaltool.diag_scripts.mlr.models import MLRModel + +logger = logging.getLogger(os.path.basename(__file__)) + + +class GBRModel(MLRModel): + """Base class for Gradient Boosting Regression models.""" + + _CLF_TYPE = None + + def plot_feature_importance(self, filename=None, color_coded=True): + """Plot feature importance. + + This function uses properties of the GBR model based on the number of + appearances of that feature in the regression trees and the + improvements made by the individual splits (see Friedman, 2001). + + Note + ---- + The features plotted here are not necessarily the real input features, + but the ones after preprocessing. + + Parameters + ---------- + filename : str, optional (default: 'feature_importance') + Name of the plot file. + color_coded : bool, optional (default: True) + If ``True``, mark positive (linear) correlations with red bars and + negative (linear) correlations with blue bars. If ``False``, all + bars are blue. + + """ + if not self._is_ready_for_plotting(): + return + + # Get plot path + if filename is None: + filename = 'feature_importance' + new_filename = filename + '.' + self._cfg['output_file_type'] + plot_path = os.path.join(self._cfg['mlr_plot_dir'], new_filename) + + # Get feature importance dictionary and colors for bars + feature_importance_dict = dict(zip(self.features_after_preprocessing, + self._clf.feature_importances_)) + colors = self._get_colors_for_features(color_coded=color_coded) + + # Plot + self._plot_feature_importance(feature_importance_dict, colors, + plot_path) + + def _plot_training_progress(self, + train_score, + test_score=None, + filename=None): + """Plot training progress during fitting.""" + if not self._is_ready_for_plotting(): + return + logger.info("Plotting training progress for GBR model") + if filename is None: + filename = 'training_progress' + (_, axes) = plt.subplots() + x_values = np.arange(len(train_score), dtype=np.float64) + 1.0 + x_values_all = [] + scores_all = [] + data_types = [] + + # Plot train score + axes.plot(x_values, + train_score, + color='b', + linestyle='-', + label='train data') + x_values_all.append(x_values) + scores_all.append(train_score) + data_types.append(np.full(x_values.shape, 'train')) + + # Plot test score if possible + if test_score is not None: + axes.plot(x_values, + test_score, + color='g', + linestyle='-', + label='test data') + x_values_all.append(x_values) + scores_all.append(test_score) + data_types.append(np.full(x_values.shape, 'test')) + + # Appearance + ylim = axes.get_ylim() + axes.set_ylim(0.0, ylim[1]) + title = f"Training progress ({self._cfg['mlr_model_name']})" + axes.set_title(title) + axes.set_xlabel('Boosting iterations') + axes.set_ylabel('Loss') + axes.legend(loc='upper right') + new_filename = filename + '.' + self._cfg['output_file_type'] + plot_path = os.path.join(self._cfg['mlr_plot_dir'], new_filename) + plt.savefig(plot_path, **self._cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save provenance + cube = mlr.get_1d_cube( + np.concatenate(x_values_all), + np.concatenate(scores_all), + x_kwargs={'var_name': 'iteration', + 'long_name': 'Boosting Iteration', + 'units': 'no unit'}, + y_kwargs={'var_name': 'rmse', + 'long_name': 'Normalized RMSE', + 'units': '1', + 'attributes': {'project': '', 'dataset': ''}}, + ) + cube.add_aux_coord( + self._get_data_type_coord(np.concatenate(data_types)), 0) + self._write_plot_provenance( + cube, plot_path, ancestors=self.get_ancestors(prediction_names=[]), + caption=title + '.', plot_types=['line']) diff --git a/esmvaltool/diag_scripts/mlr/models/gbr_sklearn.py b/esmvaltool/diag_scripts/mlr/models/gbr_sklearn.py new file mode 100644 index 0000000000..3921e12691 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/gbr_sklearn.py @@ -0,0 +1,50 @@ +"""Gradient Boosting Regression model (using :mod:`sklearn`). + +Use ``mlr_model_type: gbr_sklearn`` to use this MLR model in the recipe. + +""" + +import logging +import os + +import numpy as np +from sklearn.ensemble import GradientBoostingRegressor +from sklearn.metrics import mean_squared_error + +from esmvaltool.diag_scripts.mlr.models import MLRModel +from esmvaltool.diag_scripts.mlr.models.gbr_base import GBRModel + +logger = logging.getLogger(os.path.basename(__file__)) + + +@MLRModel.register_mlr_model('gbr_sklearn') +class SklearnGBRModel(GBRModel): + """Gradient Boosting Regression model (:mod:`sklearn` implementation).""" + + _CLF_TYPE = GradientBoostingRegressor + + def plot_training_progress(self, filename=None): + """Plot training progress for training and (if possible) test data. + + Parameters + ---------- + filename : str, optional (default: 'training_progress') + Name of the plot file. + + """ + clf = self._clf.steps[-1][1].regressor_ + train_score = clf.train_score_ + test_score = None + if 'test' in self.data: + test_score = np.zeros((len(clf.train_score_), ), dtype=np.float64) + x_test = self._clf.transform_only(self.data['test'].x) + y_test = self._clf.transform_target_only(self.get_y_array('test')) + sample_weights = self._get_sample_weights('test') + for (idx, y_pred) in enumerate(clf.staged_predict(x_test)): + test_score[idx] = np.sqrt(mean_squared_error( + y_test, + y_pred, + sample_weight=sample_weights, + )) + self._plot_training_progress(train_score, test_score=test_score, + filename=filename) diff --git a/esmvaltool/diag_scripts/mlr/models/gbr_xgboost.py b/esmvaltool/diag_scripts/mlr/models/gbr_xgboost.py new file mode 100644 index 0000000000..502487c998 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/gbr_xgboost.py @@ -0,0 +1,83 @@ +"""Gradient Boosting Regression model (using :mod:`xgboost`). + +Use ``mlr_model_type: gbr_xgboost`` to use this MLR model in the recipe. + +""" + +import logging +import os + +from xgboost import XGBRegressor + +from esmvaltool.diag_scripts.mlr.models import MLRModel +from esmvaltool.diag_scripts.mlr.models.gbr_base import GBRModel + +logger = logging.getLogger(os.path.basename(__file__)) + + +@MLRModel.register_mlr_model('gbr_xgboost') +class XGBoostGBRModel(GBRModel): + """Gradient Boosting Regression model (:mod:`xgboost` implementation).""" + + _CLF_TYPE = XGBRegressor + + def plot_training_progress(self, filename=None): + """Plot training progress for training and (if possible) test data. + + Parameters + ---------- + filename : str, optional (default: 'training_progress') + Name of the plot file. + + """ + clf = self._clf.steps[-1][1].regressor_ + if not hasattr(clf, 'evals_result_'): + raise AttributeError( + "Plotting training progress for XGBRegressor model is not " + "possible, necessary attribute 'evals_result_' is missing. " + "This is usually cause by calling MLRModel.rfecv()") + evals_result = clf.evals_result() + train_score = evals_result['validation_0']['rmse'] + test_score = None + if 'test' in self.data: + test_score = evals_result['validation_1']['rmse'] + self._plot_training_progress(train_score, test_score=test_score, + filename=filename) + + def _update_fit_kwargs(self, fit_kwargs): + """Add transformed training and test data as fit kwargs.""" + fit_kwargs = super()._update_fit_kwargs(fit_kwargs) + + # Fit all transformers + x_train = self.data['train'].x + y_train = self.get_y_array('train') + self._clf.fit_transformers_only(x_train, y_train, **fit_kwargs) + self._clf.fit_target_transformer_only(y_train, **fit_kwargs) + + # Transform input data + x_train = self._clf.transform_only(x_train) + y_train = self._clf.transform_target_only(y_train) + eval_set = [(x_train, y_train)] + sample_weights = [self._get_sample_weights('train')] + if 'test' in self.data: + x_test = self._clf.transform_only(self.data['test'].x) + y_test = self._clf.transform_target_only(self.get_y_array('test')) + eval_set.append((x_test, y_test)) + sample_weights.append(self._get_sample_weights('test')) + if self._get_sample_weights('all') is None: + sample_weights = None + + # Update kwargs + fit_kwargs.update({ + f'{self._clf.steps[-1][0]}__regressor__eval_metric': + 'rmse', + f'{self._clf.steps[-1][0]}__regressor__eval_set': + eval_set, + f'{self._clf.steps[-1][0]}__regressor__sample_weight_eval_set': + sample_weights, + }) + logger.debug( + "Updated keyword arguments of final regressor's fit() function " + "with training and (if possible) test datasets for evaluation of " + "prediction errors") + return fit_kwargs diff --git a/esmvaltool/diag_scripts/mlr/models/gpr_sklearn.py b/esmvaltool/diag_scripts/mlr/models/gpr_sklearn.py new file mode 100644 index 0000000000..e7093c33c2 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/gpr_sklearn.py @@ -0,0 +1,44 @@ +"""Gaussian Process Regression model (using :mod:`sklearn`). + +Use ``mlr_model_type: gpr_sklearn`` to use this MLR model in the recipe. + +""" + +# pylint: disable=arguments-differ + +import logging +import os + +from sklearn.gaussian_process import GaussianProcessRegressor + +from esmvaltool.diag_scripts.mlr.models import MLRModel + +logger = logging.getLogger(os.path.basename(__file__)) + + +class AdvancedGaussianProcessRegressor(GaussianProcessRegressor): + """Expand :class:`sklearn.gaussian_process.GaussianProcessRegressor`.""" + + def predict(self, x_data, return_var=False, return_cov=False): + """Expand :meth:`predict` to accept ``return_var``.""" + pred = super().predict(x_data, return_std=return_var, + return_cov=return_cov) + if return_var: + return (pred[0], pred[1]**2) + return pred + + +@MLRModel.register_mlr_model('gpr_sklearn') +class SklearnGPRModel(MLRModel): + """Gaussian Process Regression model (:mod:`sklearn` implementation).""" + + _CLF_TYPE = AdvancedGaussianProcessRegressor + + def print_kernel_info(self): + """Print information of the fitted kernel of the GPR model.""" + self._check_fit_status('Printing kernel') + kernel = self._clf.steps[-1][1].regressor_.kernel_ + logger.info("Fitted kernel: %s", kernel) + logger.info("All fitted log-hyperparameters:") + for (idx, hyper_param) in enumerate(kernel.hyperparameters): + logger.info("%s: %s", hyper_param, kernel.theta[idx]) diff --git a/esmvaltool/diag_scripts/mlr/models/huber.py b/esmvaltool/diag_scripts/mlr/models/huber.py new file mode 100644 index 0000000000..22e8b362be --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/huber.py @@ -0,0 +1,22 @@ +"""Huber Regression model. + +Use ``mlr_model_type: huber`` to use this MLR model in the recipe. + +""" + +import logging +import os + +from sklearn.linear_model import HuberRegressor + +from esmvaltool.diag_scripts.mlr.models import MLRModel +from esmvaltool.diag_scripts.mlr.models.linear_base import LinearModel + +logger = logging.getLogger(os.path.basename(__file__)) + + +@MLRModel.register_mlr_model('huber') +class HuberRegressionModel(LinearModel): + """Huber Regression model.""" + + _CLF_TYPE = HuberRegressor diff --git a/esmvaltool/diag_scripts/mlr/models/krr.py b/esmvaltool/diag_scripts/mlr/models/krr.py new file mode 100644 index 0000000000..c17769839a --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/krr.py @@ -0,0 +1,21 @@ +"""Kernel Ridge Regression model. + +Use ``mlr_model_type: krr`` to use this MLR model in the recipe. + +""" + +import logging +import os + +from sklearn.kernel_ridge import KernelRidge + +from esmvaltool.diag_scripts.mlr.models import MLRModel + +logger = logging.getLogger(os.path.basename(__file__)) + + +@MLRModel.register_mlr_model('krr') +class KRRModel(MLRModel): + """Kernel Ridge Regression model.""" + + _CLF_TYPE = KernelRidge diff --git a/esmvaltool/diag_scripts/mlr/models/lasso.py b/esmvaltool/diag_scripts/mlr/models/lasso.py new file mode 100644 index 0000000000..183f6588f9 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/lasso.py @@ -0,0 +1,22 @@ +"""Lasso Regression model. + +Use ``mlr_model_type: lasso`` to use this MLR model in the recipe. + +""" + +import logging +import os + +from sklearn.linear_model import Lasso + +from esmvaltool.diag_scripts.mlr.models import MLRModel +from esmvaltool.diag_scripts.mlr.models.linear_base import LinearModel + +logger = logging.getLogger(os.path.basename(__file__)) + + +@MLRModel.register_mlr_model('lasso') +class LassoModel(LinearModel): + """Lasso Regression model.""" + + _CLF_TYPE = Lasso diff --git a/esmvaltool/diag_scripts/mlr/models/lasso_cv.py b/esmvaltool/diag_scripts/mlr/models/lasso_cv.py new file mode 100644 index 0000000000..63f0b78bf3 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/lasso_cv.py @@ -0,0 +1,28 @@ +"""Lasso Regression model with built-in CV. + +Use ``mlr_model_type: lasso_cv`` to use this MLR model in the recipe. + +""" + +import logging +import os + +from sklearn.linear_model import LassoCV + +from esmvaltool.diag_scripts.mlr.models import MLRModel +from esmvaltool.diag_scripts.mlr.models.linear_base import LinearModel + +logger = logging.getLogger(os.path.basename(__file__)) + + +@MLRModel.register_mlr_model('lasso_cv') +class LassoCVModel(LinearModel): + """Lasso Regression model with built-in CV.""" + + _CLF_TYPE = LassoCV + + def fit(self): + """Print final ``alpha`` after successful fitting.""" + super().fit() + logger.info("Optimal alpha of Lasso model: α = %.5f", + self._clf.steps[-1][1].regressor_.alpha_) diff --git a/esmvaltool/diag_scripts/mlr/models/lasso_lars_cv.py b/esmvaltool/diag_scripts/mlr/models/lasso_lars_cv.py new file mode 100644 index 0000000000..7e99da59c1 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/lasso_lars_cv.py @@ -0,0 +1,28 @@ +"""Lasso Regression model with built-in CV using LARS algorithm. + +Use ``mlr_model_type: lasso_lars_cv`` to use this MLR model in the recipe. + +""" + +import logging +import os + +from sklearn.linear_model import LassoLarsCV + +from esmvaltool.diag_scripts.mlr.models import MLRModel +from esmvaltool.diag_scripts.mlr.models.linear_base import LinearModel + +logger = logging.getLogger(os.path.basename(__file__)) + + +@MLRModel.register_mlr_model('lasso_lars_cv') +class LassoLarsCVModel(LinearModel): + """Lasso Regression model with built-in CV using LARS algorithm.""" + + _CLF_TYPE = LassoLarsCV + + def fit(self): + """Print final ``alpha`` after successful fitting.""" + super().fit() + logger.info("Optimal alpha of Lasso model: α = %.5f", + self._clf.steps[-1][1].regressor_.alpha_) diff --git a/esmvaltool/diag_scripts/mlr/models/linear.py b/esmvaltool/diag_scripts/mlr/models/linear.py new file mode 100644 index 0000000000..13958e77b7 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/linear.py @@ -0,0 +1,22 @@ +"""Linear Regression model. + +Use ``mlr_model_type: linear`` to use this MLR model in the recipe. + +""" + +import logging +import os + +from sklearn.linear_model import LinearRegression + +from esmvaltool.diag_scripts.mlr.models import MLRModel +from esmvaltool.diag_scripts.mlr.models.linear_base import LinearModel + +logger = logging.getLogger(os.path.basename(__file__)) + + +@MLRModel.register_mlr_model('linear') +class LinearRegressionModel(LinearModel): + """Linear Regression model.""" + + _CLF_TYPE = LinearRegression diff --git a/esmvaltool/diag_scripts/mlr/models/linear_base.py b/esmvaltool/diag_scripts/mlr/models/linear_base.py new file mode 100644 index 0000000000..2aec1e85de --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/linear_base.py @@ -0,0 +1,118 @@ +"""Base class for linear Machine Learning Regression models.""" + +import logging +import os + +import matplotlib.pyplot as plt +import numpy as np + +from esmvaltool.diag_scripts import mlr +from esmvaltool.diag_scripts.mlr.models import MLRModel + +logger = logging.getLogger(os.path.basename(__file__)) + + +class LinearModel(MLRModel): + """Base class for linear Machine Learning models.""" + + _CLF_TYPE = None + + def plot_coefs(self, filename=None): + """Plot linear coefficients of models. + + Note + ---- + The features plotted here are not necessarily the real input features, + but the ones after preprocessing. + + Parameters + ---------- + filename : str, optional (default: 'coefs') + Name of the plot file. + + """ + if not self._is_ready_for_plotting(): + return + logger.info("Plotting linear coefficients") + if filename is None: + filename = 'coefs' + (_, axes) = plt.subplots() + + # Plot + coefs = self._clf.coef_ + sorted_idx = np.argsort(coefs) + pos = np.arange(sorted_idx.shape[0]) + 0.5 + axes.barh(pos, coefs[sorted_idx], align='center') + + # Plot appearance + axes.tick_params(axis='y', which='minor', left=False, right=False) + axes.tick_params(axis='y', which='major', left=True, right=False) + y_tick_labels = self.features_after_preprocessing[sorted_idx] + title = f"Linear coefficients ({self._cfg['mlr_model_name']})" + axes.set_title(title) + axes.set_yticks(pos) + axes.set_yticklabels(y_tick_labels) + axes.set_xlim(-np.max(np.abs(axes.get_xlim())), + np.max(np.abs(axes.get_xlim()))) + axes.axvline(0.0, color='k') + + # Save plot + new_filename = filename + '.' + self._cfg['output_file_type'] + plot_path = os.path.join(self._cfg['mlr_plot_dir'], new_filename) + plt.savefig(plot_path, **self._cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save provenance + cube = mlr.get_1d_cube( + y_tick_labels, + coefs[sorted_idx], + x_kwargs={'var_name': 'feature', + 'long_name': 'Feature name', + 'units': 'no unit'}, + y_kwargs={'var_name': 'coef', + 'long_name': '(Normalized) Linear Coefficients', + 'units': '1', + 'attributes': {'project': '', 'dataset': ''}}, + ) + self._write_plot_provenance( + cube, plot_path, ancestors=self.get_ancestors(prediction_names=[]), + caption=title + '.', plot_types=['bar']) + + def plot_feature_importance(self, filename=None, color_coded=True): + """Plot feature importance given by linear coefficients. + + Note + ---- + The features plotted here are not necessarily the real input features, + but the ones after preprocessing. + + Parameters + ---------- + filename : str, optional (default: 'feature_importance') + Name of the plot file. + color_coded : bool, optional (default: True) + If ``True``, mark positive (linear) correlations with red bars and + negative (linear) correlations with blue bars. If ``False``, all + bars are blue. + + """ + if not self._is_ready_for_plotting(): + return + + # Get plot path + if filename is None: + filename = 'feature_importance' + new_filename = filename + '.' + self._cfg['output_file_type'] + plot_path = os.path.join(self._cfg['mlr_plot_dir'], new_filename) + + # Get feature importance dictionary and colors for bars + coefs = self._clf.coef_ + feature_importances = np.abs(coefs) / np.sum(np.abs(coefs)) + feature_importance_dict = dict(zip(self.features_after_preprocessing, + feature_importances)) + colors = self._get_colors_for_features(color_coded=color_coded) + + # Plot + self._plot_feature_importance(feature_importance_dict, colors, + plot_path) diff --git a/esmvaltool/diag_scripts/mlr/models/rfr.py b/esmvaltool/diag_scripts/mlr/models/rfr.py new file mode 100644 index 0000000000..6e65bf857e --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/rfr.py @@ -0,0 +1,21 @@ +"""Random Forest Regression model. + +Use ``mlr_model_type: rfr`` to use this MLR model in the recipe. + +""" + +import logging +import os + +from sklearn.ensemble import RandomForestRegressor + +from esmvaltool.diag_scripts.mlr.models import MLRModel + +logger = logging.getLogger(os.path.basename(__file__)) + + +@MLRModel.register_mlr_model('rfr') +class RFRModel(MLRModel): + """Random Forest Regression model.""" + + _CLF_TYPE = RandomForestRegressor diff --git a/esmvaltool/diag_scripts/mlr/models/ridge.py b/esmvaltool/diag_scripts/mlr/models/ridge.py new file mode 100644 index 0000000000..cf13272994 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/ridge.py @@ -0,0 +1,22 @@ +"""Ridge Regression model. + +Use ``mlr_model_type: ridge`` to use this MLR model in the recipe. + +""" + +import logging +import os + +from sklearn.linear_model import Ridge + +from esmvaltool.diag_scripts.mlr.models import MLRModel +from esmvaltool.diag_scripts.mlr.models.linear_base import LinearModel + +logger = logging.getLogger(os.path.basename(__file__)) + + +@MLRModel.register_mlr_model('ridge') +class RidgeModel(LinearModel): + """Ridge Regression model.""" + + _CLF_TYPE = Ridge diff --git a/esmvaltool/diag_scripts/mlr/models/ridge_cv.py b/esmvaltool/diag_scripts/mlr/models/ridge_cv.py new file mode 100644 index 0000000000..c5c53d750b --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/ridge_cv.py @@ -0,0 +1,28 @@ +"""Ridge Regression model with built-in CV. + +Use ``mlr_model_type: ridge_cv`` to use this MLR model in the recipe. + +""" + +import logging +import os + +from sklearn.linear_model import RidgeCV + +from esmvaltool.diag_scripts.mlr.models import MLRModel +from esmvaltool.diag_scripts.mlr.models.linear_base import LinearModel + +logger = logging.getLogger(os.path.basename(__file__)) + + +@MLRModel.register_mlr_model('ridge_cv') +class RidgeCVModel(LinearModel): + """Ridge Regression model with built-in CV.""" + + _CLF_TYPE = RidgeCV + + def fit(self): + """Print final ``alpha`` after successful fitting.""" + super().fit() + logger.info("Optimal alpha of Ridge model: α = %.5f", + self._clf.steps[-1][1].regressor_.alpha_) diff --git a/esmvaltool/diag_scripts/mlr/models/svr.py b/esmvaltool/diag_scripts/mlr/models/svr.py new file mode 100644 index 0000000000..48ea41ad7c --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/models/svr.py @@ -0,0 +1,21 @@ +"""Support Vector Regression model. + +Use ``mlr_model_type: svr`` to use this MLR model in the recipe. + +""" + +import logging +import os + +from sklearn.svm import SVR + +from esmvaltool.diag_scripts.mlr.models import MLRModel + +logger = logging.getLogger(os.path.basename(__file__)) + + +@MLRModel.register_mlr_model('svr') +class SVRModel(MLRModel): + """Support Vector Regression model.""" + + _CLF_TYPE = SVR diff --git a/esmvaltool/diag_scripts/mlr/plot.py b/esmvaltool/diag_scripts/mlr/plot.py new file mode 100644 index 0000000000..264b6162a5 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/plot.py @@ -0,0 +1,852 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Plotting scripts for MLR models input/output. + +Description +----------- +This diagnostic creates plots for MLR model input/output. + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Notes +----- +All configuration options starting with ``plot_`` specify keyword arguments for +a specific plot type. A certain plot type is only plotted if the corresponding +option is given in the recipe (if no additional keyword arguments are desired, +use ``{}``). + +Configuration options in recipe +------------------------------- +additional_plot_kwargs_xy_plots: dict, optional + Optional keyword arguments (values) for single datasets used in X-Y plots. + They keys may include a ``var_type`` or values of the attribute given by + ``group_by_attribute``. +alias: dict, optional + :obj:`str` to :obj:`str` mapping for nicer plot labels (e.g. + ``{'feature': 'Historical CMIP5 data'}``. +apply_common_mask: bool, optional (default: False) + Apply common mask to all datasets prior to plotting. Requires identical + shapes for all datasets. +group_attribute_as_default_alias: bool, optional (default: True) + If ``True``, use value of attribute given by ``group_by_attribute`` as + default alias if possible. If ``False``, use full group name (including + ``var_type``) as default alias. +group_by_attribute: str, optional (default: 'mlr_model_name') + By default, datasets are grouped using the ``var_type`` attribute. This + option can be used to specify a further attribute to group datasets. This + diagnostic expects a single dataset per group. +ignore: list of dict, optional + Ignore specific datasets by specifying multiple :obj:`dict` s of metadata. +legend_kwargs: dict, optional + Optional keyword arguments of :func:`matplotlib.pyplot.legend` (affects + only plots with legends). +map_plot_type: str, optional (default: 'pcolormesh') + Type of plot used for plotting maps. Must be one of ``'pcolormesh'`` or + ``'contourf'``. +pattern: str, optional + Pattern matched against ancestor file names. +plot_map: dict, optional + Specify additional keyword arguments for plotting global maps showing + datasets by ``plot_kwargs`` and plot appearance options by + ``pyplot_kwargs`` (processed as functions of :mod:`matplotlib.pyplot`). +plot_map_abs_biases: dict, optional + Specify additional keyword arguments for plotting global maps showing + absolute biases by ``plot_kwargs`` and plot appearance options by + ``pyplot_kwargs`` (processed as functions of :mod:`matplotlib.pyplot`). +plot_map_ratios: dict, optional + Specify additional keyword arguments for plotting global maps showing + ratios of datasets by ``plot_kwargs`` and plot appearance options by + ``pyplot_kwargs`` (processed as functions of :mod:`matplotlib.pyplot`). +plot_map_rel_biases: dict, optional + Specify additional keyword arguments for plotting global maps showing + relative biases of datasets by ``plot_kwargs`` and plot appearance options + by ``pyplot_kwargs`` (processed as functions of :mod:`matplotlib.pyplot`). +plot_xy: dict, optional + Specify additional keyword arguments for simple X-Y plots by + ``plot_kwargs`` and plot appearance options by ``pyplot_kwargs`` (processed + as functions of :mod:`matplotlib.pyplot`). By default, plots data against + dimensional coordinate (if available). Use ``x_coord`` (:obj:`str`) to use + another coordinate as X-axis. Use ``reg_line: True`` to additionally plot + a linear regression line. +plot_xy_with_errors: dict, optional + Specify additional keyword arguments for X-Y plots with error ranges + ``plot_kwargs`` and plot appearance options by ``pyplot_kwargs`` (processed + as functions of :mod:`matplotlib.pyplot`). By default, plots data against + dimensional coordinate (if available). Use ``x_coord`` (:obj:`str`) to use + another coordinate as X-axis. +print_corr: bool, optional (default: False) + Print and save Pearson correlation coefficient between all datasets at the + end. Requires identical shapes for all datasets. +savefig_kwargs: dict, optional + Keyword arguments for :func:`matplotlib.pyplot.savefig`. +seaborn_settings: dict, optional + Options for :func:`seaborn.set_theme` (affects all plots). +years_in_title: bool, optional (default: False) + Print years in default title of plots. + +""" + +import itertools +import logging +import os +from copy import deepcopy +from pprint import pformat + +import iris +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import seaborn as sns +from scipy.stats import linregress + +import esmvaltool.diag_scripts.shared.iris_helpers as ih +from esmvaltool.diag_scripts import mlr +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + group_metadata, + io, + plot, + run_diagnostic, +) + +logger = logging.getLogger(os.path.basename(__file__)) + +ALL_CUBES = pd.DataFrame() +COLORS = sns.color_palette() +SEP = '___' + + +def _add_correlation_information(cfg, title, cube): + """Add data from cube to :class:`pandas.DataFrame` holding all data.""" + if not cfg['print_corr']: + return + if not ALL_CUBES.empty and len(ALL_CUBES.index) != cube.data.size: + raise ValueError( + "Expected datasets with identical shapes when 'print_corr' is set") + ALL_CUBES[title] = np.ma.filled(cube.data.ravel(), np.nan) + + +def _get_alias(cfg, name): + """Get alias for given ``name``.""" + aliases = cfg.get('aliases', {}) + if name in aliases: + return aliases[name] + if cfg['group_attribute_as_default_alias']: + return name.split(SEP)[-1] + return name + + +def _get_cube(var_type, group_by_attribute, attr, datasets): + """Get single cube for datasets of type ``key``.""" + key = _get_key(var_type, attr) + logger.info("Found the following datasets for '%s':\n%s", key, + pformat([d['filename'] for d in datasets])) + if 'error' in var_type: + logger.debug("Calculating cube for '%s' by squared error aggregation", + key) + ref_cube = iris.load_cube(datasets[0]['filename']) + cube = mlr.get_squared_error_cube(ref_cube, datasets) + mlr.square_root_metadata(cube) + cube.data = np.ma.sqrt(cube.data) + else: + if len(datasets) != 1: + raise ValueError(f"Expected exactly one dataset for '{key}' got " + f"{len(datasets):d}:\n" + f"{pformat([d['filename'] for d in datasets])}") + cube = iris.load_cube(datasets[0]['filename']) + dataset_names = sorted(list({d['dataset'] for d in datasets})) + end_years = list({d['end_year'] for d in datasets}) + filenames = sorted(list({d['filename'] for d in datasets})) + projects = sorted(list({d['project'] for d in datasets})) + start_years = list({d['start_year'] for d in datasets}) + cube.attributes.update({ + 'dataset': '|'.join(dataset_names), + 'end_year': max(end_years), + 'filename': '|'.join(filenames), + 'project': '|'.join(projects), + 'start_year': min(start_years), + 'tag': datasets[0]['tag'], + 'var_type': var_type, + }) + if attr is not None: + cube.attributes[group_by_attribute] = attr + if cube.coords('time', dim_coords=True): + ih.unify_time_coord(cube) + return cube + + +def _get_key(var_type, attr): + """Get dictionary key for specific dataset.""" + if attr is None: + return var_type + return f'{var_type}{SEP}{attr}' + + +def _get_map_plot_func(cfg): + """Get function used for plotting maps.""" + allowed_funcs = { + 'contourf': plot.global_contourf, + 'pcolormesh': plot.global_pcolormesh, + } + if cfg['map_plot_type'] not in allowed_funcs: + raise ValueError( + f"Expected one of {list(allowed_funcs.keys())} for " + f"'map_plot_type', got '{cfg['map_plot_type']}'") + return allowed_funcs[cfg['map_plot_type']] + + +def _get_title(cfg, alias_1, attrs_1, alias_2=None, attrs_2=None, + op_type='-'): + """Get title for plots.""" + if alias_2 is None: + title = alias_1 + if cfg['years_in_title']: + title += f" ({attrs_1['start_year']}-{attrs_1['end_year']})" + return title + if attrs_2 is None: + raise ValueError( + "'attrs_2' needs to be given when 'alias_2' is not None") + if op_type == 'rel_bias': + if not cfg['years_in_title']: + title = f"({alias_1} - {alias_2}) / {alias_2}" + return title + if (attrs_1['start_year'] == attrs_2['start_year'] + and attrs_1['end_year'] == attrs_2['end_year']): + title = (f"({alias_1} - {alias_2}) / {alias_2} " + f"({attrs_1['start_year']}-{attrs_1['end_year']})") + else: + title = (f"({alias_1} ({attrs_1['start_year']}-" + f"{attrs_1['end_year']}) - {alias_2} (" + f"{attrs_2['start_year']}-{attrs_2['end_year']})) / " + f"{alias_2} ({attrs_2['start_year']}-" + f"{attrs_2['end_year']})") + return title + if not cfg['years_in_title']: + title = f"{alias_1} {op_type} {alias_2}" + return title + if (attrs_1['start_year'] == attrs_2['start_year'] + and attrs_1['end_year'] == attrs_2['end_year']): + title = (f"{alias_1} {op_type} {alias_2} ({attrs_1['start_year']}-" + f"{attrs_1['end_year']})") + else: + title = (f"{alias_1} ({attrs_1['start_year']}-{attrs_1['end_year']}) " + f"{op_type} {alias_2} ({attrs_2['start_year']}-" + f"{attrs_2['end_year']})") + return title + + +def _mask_cube(cube): + """Mask cube to avoid divisions by zero.""" + cube = cube.copy() + val_range = np.ma.max(cube.data) - np.ma.min(cube.data) + threshold = val_range * 5e-2 + cube.data = np.ma.masked_inside(cube.data, -threshold, threshold) + return cube + + +def _write_map_provenance(cfg, cube, plot_path, title, *attrs): + """Write provenance information for map plots.""" + cube = cube.copy() + ancestors = [] + for attr in attrs: + ancestors.extend(attr['filename'].split('|')) + netcdf_path = mlr.get_new_path(cfg, plot_path) + io.iris_save(cube, netcdf_path) + record = { + 'ancestors': ancestors, + 'authors': ['schlund_manuel'], + 'caption': f"Geographical distribution of {cube.long_name} for " + f"{title}.", + 'plot_types': ['geo'], + 'references': ['schlund20jgr'], + } + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, record) + provenance_logger.log(plot_path, record) + + +def _write_xy_error_provenance(cfg, cubes, plot_path, title, ancestors): + """Write provenance information for X-Y plots with error range.""" + cubes = cubes.copy() + if isinstance(cubes, iris.cube.Cube): + cubes = iris.cube.CubeList([cubes]) + netcdf_path = mlr.get_new_path(cfg, plot_path) + io.iris_save(cubes, netcdf_path) + long_name = ' and '.join([cube.long_name for cube in cubes]) + caption = f"Line plot with error bars of {long_name}" + if title: + caption += f" for {title}." + else: + caption += '.' + record = { + 'ancestors': ancestors, + 'authors': ['schlund_manuel'], + 'caption': caption, + 'plot_types': ['line', 'errorbar'], + 'references': ['schlund20jgr'], + } + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, record) + provenance_logger.log(plot_path, record) + + +def _write_xy_provenance(cfg, cubes, plot_path, title, *attrs): + """Write provenance information for X-Y plots.""" + cubes = cubes.copy() + if isinstance(cubes, iris.cube.Cube): + cubes = iris.cube.CubeList([cubes]) + ancestors = [] + for attr in attrs: + ancestors.extend(attr['filename'].split('|')) + netcdf_path = mlr.get_new_path(cfg, plot_path) + io.iris_save(cubes, netcdf_path) + long_name = ' and '.join([cube.long_name for cube in cubes]) + caption = f"Line plot of {long_name}" + if title: + caption += f" for {title}." + else: + caption += '.' + record = { + 'ancestors': ancestors, + 'authors': ['schlund_manuel'], + 'caption': caption, + 'plot_types': ['line'], + 'references': ['schlund20jgr'], + } + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, record) + provenance_logger.log(plot_path, record) + + +def _xy_plot(cube, x_coord=None, reg_line=False, **plot_kwargs): + """Create single X-Y plot.""" + plot_kwargs = deepcopy(plot_kwargs) + if reg_line: + if plot_kwargs.get('linestyle', '-') == '-': + plot_kwargs.setdefault('marker', 'o') + else: + plot_kwargs.setdefault('marker', 's') + plot_kwargs['linestyle'] = 'none' + plot_kwargs.setdefault('markersize', 3) + if x_coord is None: + iris.plot.plot(cube, **plot_kwargs) + if cube.coords(dim_coords=True): + coord = cube.coord(dim_coords=True) + x_data = coord.points + else: + coord = None + x_data = np.arange(cube.shape[0]) + else: + coord = cube.coord(x_coord) + iris.plot.plot(coord, cube, **plot_kwargs) + x_data = coord.points + if not reg_line: + return + plot_kwargs['linestyle'] = '-' + plot_kwargs['marker'] = None + plot_kwargs.pop('label', None) + y_data = cube.data + reg = linregress(x_data, y_data) + y_reg = reg.slope * x_data + reg.intercept + plt.plot(x_data, y_reg, **plot_kwargs) + + +def _xy_plot_with_errors(cfg, cube_dict, split_key, **plot_kwargs): + """Create single X-Y plot with error ranges.""" + ancestors = [] + plot_kwargs = deepcopy(plot_kwargs) + key = SEP.join(split_key) + error_key = split_key[0] + '_error' + if len(split_key) > 1: + error_key = SEP.join([error_key, *split_key[1:]]) + if error_key not in cube_dict: + raise ValueError( + f"Corresponding error '{error_key}' for '{key}' not available") + x_coord = cfg['plot_xy_with_errors'].get('x_coord') + + # Extract data + cube = cube_dict[key] + error_cube = cube_dict[error_key] + ancestors.extend(cube.attributes['filename'].split('|')) + ancestors.extend(error_cube.attributes['filename'].split('|')) + if cube.ndim != 1 or error_cube.ndim != 1: + raise ValueError( + f"Expected 1D cube for X-Y plots with error range, got " + f"{cube.ndim:d}D and {error_cube.ndim:d} (error) cubes") + if x_coord is not None: + coord = cube.coord(x_coord) + x_data = coord.points + elif cube.coords(dim_coords=True): + coord = cube.coord(dim_coords=True) + x_data = coord.points + else: + coord = None + x_data = np.arange(cube.shape[0]) + if coord is not None: + if not error_cube.coord(coord): + raise iris.exceptions.CoordinateNotFoundError( + f"Coordinate '{coord.name()}' of '{key}' not found in " + f"corresponding error '{error_key}'") + + # Plot + alias = _get_alias(cfg, key) + plot_kwargs.setdefault('color', COLORS[0]) + plot_kwargs.setdefault('label', alias) + plt.plot(x_data, cube.data, **plot_kwargs) + plot_kwargs.pop('label') + plot_kwargs['alpha'] = 0.12 + plt.fill_between(x_data, cube.data - error_cube.data, + cube.data + error_cube.data, **plot_kwargs) + + return (cube, error_cube, coord, ancestors) + + +def get_cube_dict(cfg, group_by_attribute): + """Get dictionary of mean cubes (values) with ``var_type`` (keys).""" + logger.info("Grouping datasets by 'var_type' and '%s'", group_by_attribute) + input_data = get_input_datasets(cfg) + cube_dict = {} + masks = [] + for (var_type, datasets) in group_metadata(input_data, 'var_type').items(): + grouped_datasets = group_metadata(datasets, group_by_attribute) + for (attr, attr_datasets) in grouped_datasets.items(): + key = _get_key(var_type, attr) + cube = _get_cube(var_type, group_by_attribute, attr, attr_datasets) + logger.info("Found cube for '%s'", key) + cube_dict[key] = cube + masks.append(np.ma.getmaskarray(cube.data)) + if cfg.get('apply_common_mask'): + mask = masks[0] + for new_mask in masks[1:]: + if new_mask.shape != mask.shape: + raise ValueError( + "Expected datasets with identical shapes when " + "'apply_common_mask' is set") + mask |= new_mask + for cube in cube_dict.values(): + cube.data = np.ma.array(cube.data, mask=mask) + return cube_dict + + +def get_input_datasets(cfg): + """Get grouped datasets (by tag).""" + input_data = mlr.get_input_data(cfg, + pattern=cfg.get('pattern'), + ignore=cfg.get('ignore')) + tags = list(group_metadata(input_data, 'tag').keys()) + if len(tags) != 1: + raise ValueError( + f"Expected unique 'tag' for all input datasets, got {len(tags):d} " + f"different ones ({tags})") + return input_data + + +def get_plot_kwargs(cfg, option, key=None): + """Get keyword arguments for desired plot function and key.""" + plot_kwargs = cfg.get(option, {}).get('plot_kwargs', {}) + if key is None: + return plot_kwargs + if '_xy' in option: + additional_plot_kwargs = cfg.get('additional_plot_kwargs_xy_plots', {}) + if key in additional_plot_kwargs: + return {**plot_kwargs, **additional_plot_kwargs[key]} + subkey = key.split(SEP)[-1] + if subkey in additional_plot_kwargs: + return {**plot_kwargs, **additional_plot_kwargs[subkey]} + return deepcopy(plot_kwargs) + + +def get_savefig_kwargs(cfg): + """Get keyword arguments for :func:`matplotlib.pyplot.savefig`.""" + if 'savefig_kwargs' in cfg: + return cfg['savefig_kwargs'] + savefig_kwargs = { + 'bbox_inches': 'tight', + 'dpi': 300, + 'orientation': 'landscape', + } + return savefig_kwargs + + +def process_pyplot_kwargs(cfg, option): + """Process functions for :mod:`matplotlib.pyplot`.""" + for (key, val) in cfg.get(option, {}).get('pyplot_kwargs', {}).items(): + getattr(plt, key)(val) + + +def plot_map(cfg, cube_dict): + """Plot global maps showing datasets.""" + logger.info("Creating map plots") + for (key, cube) in cube_dict.items(): + logger.debug("Plotting '%s'", key) + attrs = cube.attributes + + # Plot + plot_kwargs = { + 'cbar_label': f"{attrs['tag']} / {cube.units}", + 'cmap': 'YlGn', + } + plot_kwargs.update(get_plot_kwargs(cfg, 'plot_map', key=key)) + _get_map_plot_func(cfg)(cube, **plot_kwargs) + + # Plot appearance + alias = _get_alias(cfg, key) + title = _get_title(cfg, alias, attrs) + plt.title(title) + process_pyplot_kwargs(cfg, 'plot_map') + + # Write minimum and maximum + logger.debug("Minimum of '%s': %.2f", title, cube.data.min()) + logger.debug("Maximum of '%s': %.2f", title, cube.data.max()) + + # Save plot + plot_path = get_plot_filename(f'map_{key}', cfg) + plt.savefig(plot_path, **get_savefig_kwargs(cfg)) + logger.info("Wrote %s", plot_path) + plt.close() + + # Provenance + _write_map_provenance(cfg, cube, plot_path, title, attrs) + + # Add to global DataFrame + _add_correlation_information(cfg, title, cube) + + +def plot_map_abs_biases(cfg, cube_dict): + """Plot global maps showing absolute biases of datasets.""" + logger.info("Creating absolute bias map plots") + for (key_1, key_2) in itertools.permutations(cube_dict, 2): + logger.debug("Plotting absolute bias '%s' - '%s'", key_1, key_2) + cube_1 = cube_dict[key_1] + cube_2 = cube_dict[key_2] + attrs_1 = cube_1.attributes + attrs_2 = cube_2.attributes + alias_1 = _get_alias(cfg, key_1) + alias_2 = _get_alias(cfg, key_2) + + # Plot + bias_cube = cube_1.copy() + bias_cube.data = cube_1.data - cube_2.data + plot_kwargs = { + 'cbar_label': f"Δ{attrs_1['tag']} / {bias_cube.units}", + 'cmap': 'bwr', + } + plot_kwargs.update( + get_plot_kwargs(cfg, 'plot_map_abs_biases')) + _get_map_plot_func(cfg)(bias_cube, **plot_kwargs) + + # Plot appearance + title = _get_title(cfg, alias_1, attrs_1, alias_2, attrs_2, + op_type='-') + plt.title(title) + process_pyplot_kwargs(cfg, 'plot_map_abs_biases') + + # Write minimum and maximum + logger.debug("Minimum of '%s': %.2f", title, bias_cube.data.min()) + logger.debug("Maximum of '%s': %.2f", title, bias_cube.data.max()) + + # Save plot + plot_path = get_plot_filename(f'map_abs_bias_{key_1}-{key_2}', cfg) + plt.savefig(plot_path, **get_savefig_kwargs(cfg)) + logger.info("Wrote %s", plot_path) + plt.close() + + # Provenance + _write_map_provenance(cfg, bias_cube, plot_path, title, attrs_1, + attrs_2) + + # Add to global DataFrame + _add_correlation_information(cfg, title, bias_cube) + + +def plot_map_ratios(cfg, cube_dict): + """Plot global maps showing ratios of datasets.""" + logger.info("Creating ratio map plots") + for (key_1, key_2) in itertools.permutations(cube_dict, 2): + logger.debug("Plotting ratio '%s' / '%s'", key_1, key_2) + cube_1 = cube_dict[key_1] + cube_2 = cube_dict[key_2] + attrs_1 = cube_1.attributes + attrs_2 = cube_2.attributes + alias_1 = _get_alias(cfg, key_1) + alias_2 = _get_alias(cfg, key_2) + + # Mask cube to avoid division by zero + cube_2 = _mask_cube(cube_2) + + # Plot + ratio_cube = cube_1.copy() + ratio_cube.data = cube_1.data / cube_2.data + plot_kwargs = { + 'cbar_label': f"{attrs_1['tag']} ratio / 1", + 'cmap': 'bwr', + } + plot_kwargs.update(get_plot_kwargs(cfg, 'plot_map_ratios')) + _get_map_plot_func(cfg)(ratio_cube, **plot_kwargs) + + # Plot appearance + title = _get_title(cfg, alias_1, attrs_1, alias_2, attrs_2, + op_type='/') + plt.title(title) + process_pyplot_kwargs(cfg, 'plot_map_ratios') + + # Write minimum and maximum + logger.debug("Minimum of '%s': %.2f", title, ratio_cube.data.min()) + logger.debug("Maximum of '%s': %.2f", title, ratio_cube.data.max()) + + # Save plot + plot_path = get_plot_filename(f'map_ratio_{key_1}-{key_2}', cfg) + plt.savefig(plot_path, **get_savefig_kwargs(cfg)) + logger.info("Wrote %s", plot_path) + plt.close() + + # Provenance + _write_map_provenance(cfg, ratio_cube, plot_path, title, attrs_1, + attrs_2) + + # Add to global DataFrame + _add_correlation_information(cfg, title, ratio_cube) + + +def plot_map_rel_biases(cfg, cube_dict): + """Plot global maps showing relative biases of datasets.""" + logger.info("Creating relative bias map plots") + for (key_1, key_2) in itertools.permutations(cube_dict, 2): + logger.debug("Plotting relative bias ('%s' - '%s') / '%s'", key_1, + key_2, key_2) + cube_1 = cube_dict[key_1] + cube_2 = cube_dict[key_2] + attrs_1 = cube_1.attributes + attrs_2 = cube_2.attributes + alias_1 = _get_alias(cfg, key_1) + alias_2 = _get_alias(cfg, key_2) + + # Mask cube to avoid division by zero + cube_2 = _mask_cube(cube_2) + + # Plot + bias_cube = cube_1.copy() + bias_cube.data = (cube_1.data - cube_2.data) / cube_2.data + plot_kwargs = { + 'cbar_label': f"relative change in {attrs_1['tag']} / 1", + 'cmap': 'bwr', + } + plot_kwargs.update(get_plot_kwargs(cfg, 'plot_map_rel_biases')) + _get_map_plot_func(cfg)(bias_cube, **plot_kwargs) + + # Plot appearance + title = _get_title(cfg, alias_1, attrs_1, alias_2, attrs_2, + op_type='rel_bias') + plt.title(title) + process_pyplot_kwargs(cfg, 'plot_map_rel_biases') + + # Write minimum and maximum + logger.debug("Minimum of '%s': %.2f", title, bias_cube.data.min()) + logger.debug("Maximum of '%s': %.2f", title, bias_cube.data.max()) + + # Save plot + plot_path = get_plot_filename(f'map_rel_bias_{key_1}-{key_2}', cfg) + plt.savefig(plot_path, **get_savefig_kwargs(cfg)) + logger.info("Wrote %s", plot_path) + plt.close() + + # Provenance + _write_map_provenance(cfg, bias_cube, plot_path, title, attrs_1, + attrs_2) + + # Add to global DataFrame + _add_correlation_information(cfg, title, bias_cube) + + +def plot_xy(cfg, cube_dict): + """Plot X-Y plots.""" + logger.info("Creating X-Y plots") + x_coord = cfg['plot_xy'].get('x_coord') + all_attrs = [] + + # Individual plots + for (key, cube) in cube_dict.items(): + logger.debug("Plotting '%s'", key) + if cube.ndim != 1: + raise ValueError( + f"Expected 1D cube for X-Y plots, got {cube.ndim:d}D cube") + alias = _get_alias(cfg, key) + plot_kwargs = get_plot_kwargs(cfg, 'plot_xy', key=key) + plot_kwargs.setdefault('label', alias) + _xy_plot(cube, x_coord=x_coord, + reg_line=cfg['plot_xy'].get('reg_line', False), **plot_kwargs) + attrs = cube.attributes + all_attrs.append(attrs) + title = _get_title(cfg, alias, attrs) + plt.title(title) + plt.ylabel(f'{cube.var_name} / {cube.units}') + if x_coord is not None: + coord = cube.coord(x_coord) + plt.xlabel(f'{coord.var_name} / {coord.units}') + elif cube.coords(dim_coords=True): + coord = cube.coord(dim_coords=True) + plt.xlabel(f'{coord.var_name} / {coord.units}') + process_pyplot_kwargs(cfg, 'plot_xy') + plt.legend(**cfg['legend_kwargs']) + + # Save plot + plot_path = get_plot_filename(f'xy_{key}', cfg) + savefig_kwargs = get_savefig_kwargs(cfg) + plt.savefig(plot_path, **savefig_kwargs) + logger.info("Wrote %s", plot_path) + plt.close() + + # Provenance + _write_xy_provenance(cfg, cube, plot_path, title, attrs) + + # Add to global DataFrame + _add_correlation_information(cfg, title, cube) + + # Merged plot + logger.debug("Plotting merged plot") + cubes = iris.cube.CubeList() + for (key, cube) in cube_dict.items(): + alias = _get_alias(cfg, key) + plot_kwargs = get_plot_kwargs(cfg, 'plot_xy', key=key) + plot_kwargs.setdefault('label', alias) + _xy_plot(cube, x_coord=x_coord, + reg_line=cfg['plot_xy'].get('reg_line', False), **plot_kwargs) + cube = cube.copy() + ih.prepare_cube_for_merging(cube, key) + cubes.append(cube) + cubes = cubes.merge() + process_pyplot_kwargs(cfg, 'plot_xy') + plt.legend(**cfg.get('legend_kwargs')) + plot_path = get_plot_filename('merged_xy', cfg) + savefig_kwargs = get_savefig_kwargs(cfg) + plt.savefig(plot_path, **savefig_kwargs) + logger.info("Wrote %s", plot_path) + plt.close() + _write_xy_provenance(cfg, cubes, plot_path, None, *all_attrs) + + +def plot_xy_with_errors(cfg, cube_dict): + """Plot X-Y plots with error range.""" + logger.info("Creating X-Y plots with error ranges") + keys = {key: key.split(SEP) for key in cube_dict if 'error' not in key} + + # Individual plots + for (key, split_key) in keys.items(): + cubes = iris.cube.CubeList() + logger.debug("Plotting '%s'", key) + plot_kwargs = get_plot_kwargs(cfg, 'plot_xy_with_errors', key=key) + (cube, error_cube, coord, ancestors) = _xy_plot_with_errors( + cfg, cube_dict, split_key, **plot_kwargs) + + # Plot appearance + alias = _get_alias(cfg, key) + attrs = cube.attributes + title = _get_title(cfg, alias, attrs) + plt.title(title) + plt.ylabel(f'{cube.var_name} / {cube.units}') + if coord is not None: + plt.xlabel(f'{coord.var_name} / {coord.units}') + process_pyplot_kwargs(cfg, 'plot_xy_with_errors') + plt.legend(**cfg['legend_kwargs']) + + # Save plot + plot_path = get_plot_filename(f'xy_with_errors_{key}', cfg) + savefig_kwargs = get_savefig_kwargs(cfg) + plt.savefig(plot_path, **savefig_kwargs) + logger.info("Wrote %s", plot_path) + plt.close() + + # Add to global DataFrame + _add_correlation_information(cfg, title, cube) + + # Provenance + cube = cube.copy() + error_cube = error_cube.copy() + ih.prepare_cube_for_merging(cube, key) + ih.prepare_cube_for_merging(error_cube, f'{key}{SEP}error') + cubes.append(cube) + cubes.append(error_cube) + cubes = cubes.merge() + _write_xy_error_provenance(cfg, cubes, plot_path, title, ancestors) + + # Merged plot + all_ancestors = [] + cubes = iris.cube.CubeList() + logger.debug("Plotting merged plot") + plot_kwargs.pop('color', None) + for (idx, (key, split_key)) in enumerate(keys.items()): + plot_kwargs = get_plot_kwargs(cfg, 'plot_xy_with_errors', key=key) + plot_kwargs['color'] = COLORS[idx] + (cube, error_cube, _, ancestors) = _xy_plot_with_errors(cfg, cube_dict, + split_key, + **plot_kwargs) + all_ancestors.extend(ancestors) + cube = cube.copy() + error_cube = error_cube.copy() + ih.prepare_cube_for_merging(cube, key) + ih.prepare_cube_for_merging(error_cube, f'{key}{SEP}error') + cubes.append(cube) + cubes.append(error_cube) + cubes = cubes.merge() + process_pyplot_kwargs(cfg, 'plot_xy_with_errors') + plt.legend(**cfg['legend_kwargs']) + plot_path = get_plot_filename('merged_xy_with_errors', cfg) + savefig_kwargs = get_savefig_kwargs(cfg) + plt.savefig(plot_path, **savefig_kwargs) + logger.info("Wrote %s", plot_path) + plt.close() + _write_xy_error_provenance(cfg, cubes, plot_path, None, all_ancestors) + + +def main(cfg): + """Run the diagnostic.""" + sns.set_theme(**cfg.get('seaborn_settings', {})) + cfg = deepcopy(cfg) + cfg.setdefault('group_by_attribute', 'mlr_model_name') + cfg.setdefault('group_attribute_as_default_alias', True) + cfg.setdefault('legend_kwargs', {}) + cfg.setdefault('map_plot_type', 'pcolormesh') + cfg.setdefault('print_corr', False) + cfg.setdefault('years_in_title', False) + cube_dict = get_cube_dict(cfg, cfg['group_by_attribute']) + + # Plots + plot_types = [ + 'plot_map', + 'plot_map_abs_biases', + 'plot_map_ratios', + 'plot_map_rel_biases', + 'plot_xy', + 'plot_xy_with_errors', + ] + for plot_type in plot_types: + if plot_type in cfg: + globals()[plot_type](cfg, cube_dict) + + # Print and save correlations between figures if desired + if cfg['print_corr']: + pandas_print_options = [ + 'display.max_rows', None, + 'display.max_colwidth', None, + ] + corr = ALL_CUBES.corr() + with pd.option_context(*pandas_print_options): + logger.info("Unweighted means:\n%s", ALL_CUBES.mean(axis=0)) + logger.info("Unweighted correlations:\n%s", corr) + corr_path = get_diagnostic_filename('corr', cfg).replace('.nc', '.csv') + corr.to_csv(corr_path) + logger.info("Wrote %s", corr_path) + + +# Run main function when this script is called +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/mlr/postprocess.py b/esmvaltool/diag_scripts/mlr/postprocess.py new file mode 100644 index 0000000000..0f7ab80f82 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/postprocess.py @@ -0,0 +1,775 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Simple postprocessing of MLR model output. + +Description +----------- +This diagnostic performs postprocessing operations for MLR model output (mean +and error). + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Notes +----- +Prior to postprocessing, this diagnostic groups input datasets according to +``tag`` and ``prediction_name``. For each group, accepts datasets with three +different ``var_type`` s: + +* ``prediction_output``: **Exactly one** necessary, refers to + the mean prediction and serves as reference dataset (regarding shape). +* ``prediction_output_error``: Arbitrary number of error datasets. If not + given, error calculation is skipped. May be squared errors (marked by the + attribute ``squared``) or not. In addition, a single covariance dataset can + be specified (``short_name`` ending with ``_cov``). +* ``prediction_input``: Dataset used to estimate covariance structure of + the mean prediction (i.e. matrix of Pearson correlation coefficients) for + error estimation. At most one dataset allowed. Ignored when no + ``prediction_output_error`` is given. This is only possible when (1) the + shape of the ``prediction_input`` dataset is identical to the shape of the + ``prediction_output_error`` datasets, (2) the number of dimensions of the + ``prediction_input`` dataset is higher than the number of dimensions of the + ``prediction_output_error`` datasets and they have identical trailing + (rightmost) dimensions or (3) the number of dimensions of the + ``prediction_input`` dataset is higher than the number of dimensions of + ``prediction_output_error`` datasets and all dimensions of the + ``prediction_output_error`` datasets are mapped to a corresponding dimension + of the ``prediction_input`` using the ``cov_estimate_dim_map`` option (e.g. + when ``prediction_input`` has shape ``(10, 5, 100, 20)`` and + ``prediction_output_error`` has shape ``(5, 20)``, you can use + ``cov_estimate_dim_map: [1, 3]`` to map the dimensions of + ``prediction_output_error`` to dimension 1 and 3 of ``prediction_input``). + +All data with other ``var_type`` s is ignored (``feature``, ``label``, etc.). + +Real error calculation (using covariance dataset given as +``prediction_output_error``) and estimation (using ``prediction_input`` dataset +to estimate covariance structure) is only possible if the mean prediction cube +is collapsed completely during postprocessing, i.e. all coordinates are listed +for either ``mean`` or ``sum``. + +Configuration options in recipe +------------------------------- +add_var_from_cov: bool, optional (default: True) + Calculate variances from covariance matrix (diagonal elements) and add + those to (squared) error datasets. Set to ``False`` if variance is already + given separately in prediction output. +area_weighted: bool, optional (default: True) + Calculate weighted averages/sums when collapsing over latitude and/or + longitude coordinates using grid cell areas (calculated using grid cell + bounds). Only possible for datasets on regular grids that contain + ``latitude`` and ``longitude`` coordinates. +convert_units_to: str, optional + Convert units of the input data. +cov_estimate_dim_map: list of int, optional + Map dimensions of ``prediction_output_error`` datasets to corresponding + dimensions of ``prediction_input`` used for estimating covariance. Only + relevant if both dataset types are given. See notes above for more + information. +ignore: list of dict, optional + Ignore specific datasets by specifying multiple :obj:`dict` s of metadata. +landsea_fraction_weighted: str, optional + When given, calculate weighted averages/sums when collapsing over latitude + and/or longitude coordinates using land/sea fraction (calculated using + Natural Earth masks). Only possible if the datasets contains ``latitude`` + and ``longitude`` coordinates. Must be one of ``'land'``, ``'sea'``. +mean: list of str, optional + Perform mean over the given coordinates. +pattern: str, optional + Pattern matched against ancestor file names. +sum: list of str, optional + Perform sum over the given coordinates. +time_weighted: bool, optional (default: True) + Calculate weighted averages/sums for time (using time bounds). + +""" + +import logging +import os +import warnings +from copy import deepcopy +from pprint import pformat + +import iris +import numpy as np +from cf_units import Unit + +from esmvaltool.diag_scripts import mlr +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + group_metadata, + io, + run_diagnostic, +) + +logger = logging.getLogger(os.path.basename(__file__)) + +OPS = { + 'mean': iris.analysis.MEAN, + 'sum': iris.analysis.SUM, +} + + +def _calculate_lower_error_bound(cfg, squared_error_cube, basepath): + """Calculate lower error bound.""" + logger.debug("Calculating lower error bound") + lower_bound = _collapse_regular_cube(cfg, squared_error_cube, power=2) + lower_bound.data = np.ma.sqrt(lower_bound.data) + mlr.square_root_metadata(lower_bound) + _convert_units(cfg, lower_bound) + lower_bound.attributes['error_type'] = 'lower_bound' + new_path = basepath.replace('.nc', '_lower_bound.nc') + io.iris_save(lower_bound, new_path) + logger.info("Lower bound of error: %s %s", lower_bound.data, + lower_bound.units) + ancestors = _get_ancestors(squared_error_cube) + _write_provenance(cfg, 'Lower bound of', lower_bound, new_path, ancestors) + + +def _calculate_real_error(cfg, ref_cube, cov_cube, basepath): + """Calculate real error using covariance.""" + logger.debug("Calculating real error using covariance") + real_error = _collapse_covariance_cube(cfg, cov_cube, ref_cube) + real_error.data = np.ma.sqrt(real_error.data) + real_error.var_name = cov_cube.var_name.replace('_cov', '_error') + real_error.long_name = cov_cube.long_name.replace('(covariance)', + '(error)') + real_error.units = real_error.units.root(2) + _convert_units(cfg, real_error) + real_error.attributes['error_type'] = 'real_error' + new_path = basepath.replace('.nc', '_real.nc') + io.iris_save(real_error, new_path) + logger.info("Real error (using covariance): %s %s", real_error.data, + real_error.units) + ancestors = _get_ancestors(cov_cube) + _write_provenance(cfg, 'Real', real_error, new_path, ancestors) + + +def _calculate_upper_error_bound(cfg, squared_error_cube, basepath): + """Calculate upper error bound.""" + logger.debug("Calculating upper error bound") + upper_bound = squared_error_cube.copy() + upper_bound.data = np.ma.sqrt(upper_bound.data) + mlr.square_root_metadata(upper_bound) + upper_bound = _collapse_regular_cube(cfg, upper_bound) + _convert_units(cfg, upper_bound) + upper_bound.attributes['error_type'] = 'upper_bound' + new_path = basepath.replace('.nc', '_upper_bound.nc') + io.iris_save(upper_bound, new_path) + logger.info("Upper bound of error: %s %s", upper_bound.data, + upper_bound.units) + ancestors = _get_ancestors(squared_error_cube) + _write_provenance(cfg, 'Upper bound of', upper_bound, new_path, ancestors) + + +def _convert_units(cfg, cube): + """Convert units if desired.""" + cfg_settings = cfg.get('convert_units_to') + if cfg_settings: + units_to = cfg_settings + logger.debug("Converting units from '%s' to '%s'", cube.units, + units_to) + try: + cube.convert_units(units_to) + except ValueError as exc: + raise ValueError( + f"Cannot convert units of cube {cube.summary(shorten=True)} " + f"from '{cube.units}' to '{units_to}'") from exc + + +def _collapse_covariance_cube(cfg, cov_cube, ref_cube): + """Collapse covariance cube with using desired operations.""" + (weights, units, coords) = _get_all_weights(cfg, ref_cube) + if len(coords) < ref_cube.ndim: + raise ValueError( + f"Calculating real error using covariance dataset " + f"('prediction_output_error') is only possible if all " + f"{ref_cube.ndim:d} dimensions of the cube are collapsed, got " + f"only {len(coords):d} ({coords})") + weights = weights.ravel() + weights = weights[~np.ma.getmaskarray(ref_cube.data).ravel()] + weights = np.outer(weights, weights) + cov_cube = cov_cube.collapsed(cov_cube.coords(dim_coords=True), + iris.analysis.SUM, + weights=weights) + cov_cube.units *= units**2 + return cov_cube + + +def _collapse_regular_cube(cfg, cube, power=1): + """Collapse cube with using desired operations.""" + (weights, units, coords) = _get_all_weights(cfg, cube, power=power) + cube = cube.collapsed(coords, iris.analysis.SUM, weights=weights) + cube.units *= units + return cube + + +def _corrcoef(array, rowvar=True, weights=None): + """Fast version of :func:`numpy.ma.corrcoef`.""" + if not rowvar: + array = array.T + if weights is not None: + weights = weights.T + mean = np.ma.average(array, axis=1, weights=weights).reshape(-1, 1) + if weights is None: + sqrt_weights = 1.0 + else: + sqrt_weights = np.ma.sqrt(weights) + demean = (array - mean) * sqrt_weights + res = np.ma.dot(demean, demean.T) + row_norms = np.ma.sqrt(np.ma.sum(demean**2, axis=1)) + res /= np.ma.outer(row_norms, row_norms) + return res + + +def _estim_cov_differing_shape(cfg, squared_error_cube, cov_est_cube, weights): + """Collapse estimated covariance. + + Estimate error by estimating covariance from dataset with at least one + dimension more than errors themselves. + + """ + logger.info( + "Estimating true error from covariance derived from " + "'prediction_input' dataset with shape %s for errors " + "('prediction_output_error') with shape %s", cov_est_cube.shape, + squared_error_cube.shape) + + # Load data + error = np.ma.sqrt(squared_error_cube.data) + error = np.ma.filled(error, 0.0) + cov_est = np.ma.filled(cov_est_cube.data, np.nan) + + # Reshape if necessary + if 'cov_estimate_dim_map' in cfg: + dim_map = tuple(cfg['cov_estimate_dim_map']) + cov_est = _reshape_covariance(cov_est, error, dim_map) + cov_est = np.ma.masked_invalid(cov_est) + if not _identical_trailing_dimensions(cov_est, error): + raise ValueError( + f"Expected identical trailing (rightmost) dimensions of " + f"'prediction_input' data used to estimate covariance structure " + f"and 'prediction_output_error' datasets, got {cov_est.shape} and " + f"{error.shape}") + + # Estimate covariance + error = error.ravel() + cov_est = cov_est.reshape(-1, *error.shape) + pearson_coeffs = _corrcoef(cov_est, rowvar=False) + covariance = pearson_coeffs * np.ma.outer(error, error) + + # Collapse covariance + weights = weights.ravel() + weights = np.outer(weights, weights) + error = np.ma.sqrt(np.ma.sum(covariance * weights)) + return error + + +def _estim_cov_identical_shape(squared_error_cube, cov_est_cube, weights): + """Collapse estimated covariance. + + Estimate error by approximating covariance from dataset with identical + shape as errors themselves (for a better estimate, the dataset used to + estimate covariance needs at least one dimension more than the errors). + + """ + logger.info( + "Estimating true error from covariance derived from " + "'prediction_input' dataset with same shape as errors " + "('prediction_output_error')") + error = np.ma.sqrt(squared_error_cube.data) + error = np.ma.filled(error, 0.0) + cov_est = np.ma.array(cov_est_cube.data) + if cov_est.ndim > 2: + error = error.reshape(error.shape[0], -1) + cov_est = cov_est.reshape(cov_est.shape[0], -1) + weights = weights.reshape(weights.shape[0], -1) + + # Pearson coefficients (= normalized covariance) over both dimensions + pearson_dim0 = _corrcoef(cov_est, weights=weights) + pearson_dim1 = _corrcoef(cov_est, rowvar=False, weights=weights) + + # Covariances + cov_dim0 = (np.einsum('...i,...j->...ij', error, error) * + np.einsum('...i,...j->...ij', weights, weights) * pearson_dim1) + cov_dim1 = (np.einsum('i...,j...->...ij', error, error) * + np.einsum('i...,j...->...ij', weights, weights) * pearson_dim0) + + # Errors over dimensions + error_dim0 = np.ma.sqrt(np.ma.sum(cov_dim0, axis=(1, 2))) + error_dim1 = np.ma.sqrt(np.ma.sum(cov_dim1, axis=(1, 2))) + + # Collapse further (all weights are already included in first step) + cov_order_0 = pearson_dim0 * np.ma.outer(error_dim0, error_dim0) + cov_order_1 = pearson_dim1 * np.ma.outer(error_dim1, error_dim1) + error_order_0 = np.ma.sqrt(np.ma.sum(cov_order_0)) + error_order_1 = np.ma.sqrt(np.ma.sum(cov_order_1)) + logger.debug( + "Found real errors %e and %e after collapsing with different " + "orderings, using maximum", error_order_0, error_order_1) + + # Ordering of collapsing matters, maximum is used + return max([error_order_0, error_order_1]) + + +def _estimate_real_error(cfg, squared_error_cube, cov_est_dataset, basepath): + """Estimate real error using estimated covariance.""" + logger.debug( + "Estimating real error using estimated covariance from " + "'prediction_input' dataset %s", cov_est_dataset['filename']) + cov_est_cube = iris.load_cube(cov_est_dataset['filename']) + + # Check dimensions + if cov_est_cube.ndim < 2: + raise ValueError( + f"Expected at least 2D 'prediction_input' dataset for covariance " + f"structure estimation, got {cov_est_cube.ndim:d}D dataset") + if cov_est_cube.ndim < squared_error_cube.ndim: + raise ValueError( + f"Expected number of dimensions of 'prediction_input' dataset " + f"used for covariance structure estimation to be greater than or " + f"equal the number of dimensions of the errors datasets, got " + f"{cov_est_cube.ndim:d} and {squared_error_cube.ndim}") + + # Check if all dimensions are collapsed + (weights, units, coords) = _get_all_weights(cfg, squared_error_cube) + if len(coords) < squared_error_cube.ndim: + raise ValueError( + f"Estimating real error using 'prediction_input' dataset for " + f"covariance structure estimation is only possible if all " + f"{squared_error_cube.ndim:d} dimensions of the error cube are " + f"collapsed, got only {len(coords):d} ({coords})") + + # Estimate error + if cov_est_cube.shape == squared_error_cube.shape: + error = _estim_cov_identical_shape(squared_error_cube, cov_est_cube, + weights) + else: + error = _estim_cov_differing_shape(cfg, squared_error_cube, + cov_est_cube, weights) + + # Create cube (collapse using dummy operation) + with warnings.catch_warnings(): + warnings.filterwarnings( + 'ignore', + message="Collapsing spatial coordinate 'latitude' without " + "weighting", + category=UserWarning, + module='iris', + ) + real_error = squared_error_cube.collapsed(coords, iris.analysis.MEAN) + real_error.data = error + mlr.square_root_metadata(real_error) + real_error.units *= units + _convert_units(cfg, real_error) + real_error.attributes['error_type'] = 'estimated_real_error' + + # Save cube + new_path = basepath.replace('.nc', '_estimated.nc') + io.iris_save(real_error, new_path) + logger.info("Estimated real error (using estimated covariance): %s %s", + real_error.data, real_error.units) + + # Provenance + ancestors = [ + *_get_ancestors(squared_error_cube), + cov_est_dataset['filename'], + ] + _write_provenance(cfg, 'Estimated', real_error, new_path, ancestors) + + +def _get_all_weights(cfg, cube, power=1): + """Get all necessary weights (including norm for mean calculation).""" + cfg = deepcopy(cfg) + all_coords = [] + weights = np.ones(cube.shape) + units = Unit('1') + + # Iterate over operations + for operation in ('sum', 'mean'): + normalize = (operation == 'mean') + coords = cfg.get(operation, []) + all_coords.extend(coords) + if coords == 'all': + coords = [c.name() for c in cube.coords(dim_coords=True)] + horizontal_coords = _get_horizontal_coordinates(coords) + + # Horizontal coordinates + if horizontal_coords: + (horizontal_weights, area_units) = _get_horizontal_weights( + cfg, cube, power=power) + weights *= horizontal_weights + if operation == 'sum': + units *= area_units + weights /= _get_normalization_factor( + horizontal_weights, horizontal_coords, cube, + normalize=normalize)**power + for coord in horizontal_coords: + coords.remove(coord) + + # Time coordinate + if 'time' in coords: + (time_weights, time_units) = _get_time_weights(cfg, cube, + power=power) + if operation == 'sum': + units *= time_units + if time_weights is not None: + weights *= time_weights + weights /= _get_normalization_factor( + time_weights, ['time'], cube, normalize=normalize)**power + coords.remove('time') + + # Remaining coordinates + weights /= _get_normalization_factor( + None, coords, cube, normalize=normalize)**power + + # Apply mask of cube to weights + weights = np.ma.array(weights, mask=np.ma.getmaskarray(cube.data)) + logger.debug("Found coordinates %s to collapse over", all_coords) + logger.debug("Found units '%s' for weights", units) + return (weights, units, all_coords) + + +def _get_ancestors(cube): + """Extract ancestors from ``filename`` attribute of cube.""" + ancestors = cube.attributes['filename'].split('|') + return ancestors + + +def _get_covariance_dataset(error_datasets, ref_cube): + """Extract covariance dataset.""" + explanation = ("i.e. dataset with short_name == '*_cov' among " + "'prediction_output_error' datasets") + cov_datasets = [] + other_datasets = [] + + # Get covariance dataset(s) + for dataset in error_datasets: + if '_cov' in dataset['short_name']: + cov_datasets.append(dataset) + else: + other_datasets.append(dataset) + if not cov_datasets: + logger.warning( + "No covariance dataset (%s) found, calculation of real error not " + "possible", explanation) + return (None, other_datasets) + if len(cov_datasets) > 1: + filenames = [d['filename'] for d in cov_datasets] + raise ValueError( + f"Expected at most one covariance dataset ({explanation}), got " + f"{len(cov_datasets):d}:\n{pformat(filenames)}") + + # Check shape + cov_cube = iris.load_cube(cov_datasets[0]['filename']) + cov_cube.attributes['filename'] = cov_datasets[0]['filename'] + ref_size = np.ma.array(ref_cube.data).compressed().shape[0] + if cov_cube.shape != (ref_size, ref_size): + raise ValueError( + f"Expected shape of covariance dataset to be " + f"{(ref_size, ref_size)}, got {cov_cube.shape} (after removal of " + f"all missing values)") + return (cov_cube, other_datasets) + + +def _get_horizontal_weights(cfg, cube, power=1): + """Calculate weights for horizontal coordinates.""" + weights = mlr.get_horizontal_weights( + cube, + area_weighted=cfg['area_weighted'], + landsea_fraction_weighted=cfg.get('landsea_fraction_weighted'), + ) + weights = weights**power + if cfg['area_weighted']: + units = Unit('m2')**power + else: + units = Unit('1')**power + return (weights, units) + + +def _get_horizontal_coordinates(coords): + """Extract horizontal coordinates from :obj:`list` of coordinates.""" + horizontal_coords = [] + if 'latitude' in coords: + horizontal_coords.append('latitude') + if 'longitude' in coords: + horizontal_coords.append('longitude') + return horizontal_coords + + +def _get_normalization_factor(weights, coords, cube, normalize=False): + """Get normalization constant for calculation of means.""" + if not normalize: + return 1.0 + if not coords: + return 1.0 + if weights is None: + weights = np.ones(cube.shape) + weights = np.ma.array(weights.copy(), mask=np.ma.getmaskarray(cube.data)) + coord_dims = [] + for coord in coords: + if cube.coord_dims(coord): + coord_dims.extend(cube.coord_dims(coord)) + coord_dims = tuple(coord_dims) + if not coord_dims: + norm = np.ma.ravel(weights) + else: + norm = np.ma.ravel(np.ma.sum(weights, axis=coord_dims)) + norm = norm[~np.ma.getmaskarray(norm)] + return norm[0] + + +def _get_time_weights(cfg, cube, power=1): + """Calculate time weights.""" + time_weights = None + time_units = mlr.get_absolute_time_units(cube.coord('time').units) + if cfg.get['time_weighted']: + time_weights = mlr.get_time_weights(cube) + time_weights = time_weights**power + return (time_weights, time_units**power) + + +def _identical_trailing_dimensions(larger_array, smaller_array): + """Check if trailing dimensions of two arrays are identical.""" + if larger_array.ndim < smaller_array.ndim: + raise ValueError( + f"Expected array with higher number of dimensions as first " + f"argument, got {larger_array.ndim:d}D array as first argument, " + f"{smaller_array.ndim:d}D array as second") + return larger_array.shape[-smaller_array.ndim:] == smaller_array.shape + + +def _write_provenance(cfg, title, error_cube, netcdf_path, ancestors): + """Write provenance record.""" + caption = f'{title} {error_cube.long_name}' + attrs = error_cube.attributes + if 'mlr_model_name' in attrs: + caption += f" of MLR model {attrs['mlr_model_name']}" + if 'prediction_name' in attrs: + caption += f" for prediction {attrs['prediction_name']}" + caption += '.' + record = { + 'ancestors': ancestors, + 'authors': ['schlund_manuel'], + 'caption': caption, + 'references': ['schlund20jgr'], + } + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, record) + + +def check_cfg(cfg): + """Check options of configuration and catch errors.""" + for operation in ('sum', 'mean'): + if operation in cfg: + cfg[operation] = sorted(list(set(cfg[operation]))) + for coord in cfg.get('sum', []): + if coord in cfg.get('mean', []): + raise ValueError(f"Coordinate '{coord}' given in 'sum' and 'mean'") + + +def postprocess_errors(cfg, ref_cube, error_datasets, cov_estim_datasets): + """Postprocess errors.""" + logger.info( + "Postprocessing errors using mean prediction cube %s as reference", + ref_cube.summary(shorten=True)) + + # Extract covariance + (cov_cube, + error_datasets) = _get_covariance_dataset(error_datasets, ref_cube) + + # Extract squared errors + squared_error_cube = mlr.get_squared_error_cube(ref_cube, error_datasets) + + # Extract variance from covariance if desired + if cfg.get('add_var_from_cov', True) and cov_cube is not None: + var = np.ma.empty(ref_cube.shape, dtype=ref_cube.dtype) + mask = np.ma.getmaskarray(ref_cube.data) + var[mask] = np.ma.masked + var[~mask] = np.diagonal(cov_cube.data.copy()) + squared_error_cube.data += var + logger.debug( + "Added variance calculated from covariance to squared error " + "datasets") + if not error_datasets: + error_datasets = True + + # Extract basename for error cubes + basepath = mlr.get_new_path(cfg, ref_cube.attributes['filename']) + basepath = basepath.replace('.nc', '_error.nc') + + # Lower and upper error bounds + if error_datasets: + _calculate_lower_error_bound(cfg, squared_error_cube, basepath) + _calculate_upper_error_bound(cfg, squared_error_cube, basepath) + + # Estimated real error using estimated covariance + if cov_estim_datasets: + _estimate_real_error(cfg, squared_error_cube, + cov_estim_datasets[0], basepath) + + # Real error + if cov_cube is not None: + _calculate_real_error(cfg, ref_cube, cov_cube, basepath) + + +def postprocess_mean(cfg, cube, data): + """Postprocess mean prediction cube.""" + logger.info("Postprocessing mean prediction cube %s", + cube.summary(shorten=True)) + cube = _collapse_regular_cube(cfg, cube) + _convert_units(cfg, cube) + new_path = mlr.get_new_path(cfg, data['filename']) + io.iris_save(cube, new_path) + logger.info("Mean prediction: %s %s", cube.data, cube.units) + _write_provenance(cfg, "Postprocessed", cube, new_path, [data['filename']]) + + +def _reshape_covariance(cov_est, error, dim_map): + """Reshape covariance estimation input to match errors.""" + if len(dim_map) != error.ndim: + raise ValueError( + f"Dimension mapping for covariance estimation " + f"'cov_estimate_dim_map' needs to cover all dimensions of " + f"'prediction_output_error' cubes with shape {error.shape}, " + f"got {dim_map}") + if len(set(dim_map)) != len(dim_map): + raise ValueError( + f"Duplicate dimensions in 'cov_estimate_dim_map' are not " + f"allowed, got {dim_map}") + logger.debug( + "Reshaping 'prediction_input' with shape %s to contain dimensions " + "of 'prediction_output_error' %s as last dimensions using mapping " + "%s", cov_est.shape, error.shape, dim_map) + + # Get target shape + indices = list(range(cov_est.ndim)) + for dim in dim_map: + if dim not in indices: + raise ValueError( + f"Dimensional index {dim:d} in 'cov_estimate_dim_map' is out " + f"of range for {cov_est.ndim:d}D 'prediction_input' dataset " + f"used for covariance estimation") + indices.remove(dim) + indices.append(dim) + new_shape = tuple(np.array(cov_est.shape)[indices]) + + # Broadcast to new shape + dim_map_for_broadcasting = [] + for idx in range(cov_est.ndim): + dim_map_for_broadcasting.append(indices.index(idx)) + cov_est = iris.util.broadcast_to_shape(cov_est, new_shape, + dim_map_for_broadcasting) + logger.info( + "Reshaped 'prediction_input' for covariance estimation to %s", + cov_est.shape) + return cov_est + + +def split_datasets(datasets, tag, pred_name): + """Split datasets into mean and error.""" + grouped_data = group_metadata(datasets, 'var_type') + + # Mean/reference dataset + mean = grouped_data.get('prediction_output', []) + if len(mean) != 1: + filenames = [d['filename'] for d in mean] + raise ValueError( + f"Expected exactly one 'prediction_output' dataset for tag " + f"'{tag}' of prediction '{pred_name}', got {len(mean):d}:\n" + f"{pformat(filenames)}") + logger.info( + "Found mean prediction dataset ('prediction_output') for tag '%s' of " + "prediction '%s': %s (used as reference)", tag, pred_name, + mean[0]['filename']) + + # Errors + error = grouped_data.get('prediction_output_error', []) + if not error: + logger.warning( + "No 'prediction_output_error' datasets for tag '%s' of prediction " + "'%s' found, error calculation not possible (not searching for " + "'prediction_input' datasets for covariance estimation, either)", + tag, pred_name) + cov_estimation = [] + else: + logger.info( + "Found error datasets ('prediction_output_error') for tag '%s' of " + "prediction '%s':", tag, pred_name) + logger.info(pformat([d['filename'] for d in error])) + + # Estimation for covariance + cov_estimation = grouped_data.get('prediction_input', []) + if not cov_estimation: + logger.warning( + "No 'prediction_input' dataset for tag '%s' of prediction " + "'%s' found, real error estimation using estimated covariance " + "structure not possible", tag, pred_name) + elif len(cov_estimation) > 1: + filenames = [d['filename'] for d in cov_estimation] + raise ValueError( + f"Expected at most one 'prediction_input' dataset for tag " + f"'{tag}' of prediction '{pred_name}', got " + f"{len(cov_estimation):d}:\n{pformat(filenames)}") + else: + logger.info( + "Found 'prediction_input' dataset for covariance structure " + "estimation for tag '%s' of prediction '%s': %s", tag, + pred_name, cov_estimation[0]['filename']) + + return (mean[0], error, cov_estimation) + + +def main(cfg): + """Run the diagnostic.""" + warnings.filterwarnings( + 'ignore', + message='Using DEFAULT_SPHERICAL_EARTH_RADIUS', + category=UserWarning, + module='iris', + ) + input_data = mlr.get_input_data(cfg, + pattern=cfg.get('pattern'), + ignore=cfg.get('ignore')) + + # Check cfg + check_cfg(cfg) + cfg.setdefault('area_weighted', True) + cfg.setdefault('time_weighted', True) + + # Process data + for (tag, tag_datasets) in group_metadata(input_data, 'tag').items(): + logger.info("Processing tag '%s'", tag) + grouped_data = group_metadata(tag_datasets, 'prediction_name') + for (pred_name, datasets) in grouped_data.items(): + logger.info("Processing prediction '%s'", pred_name) + (dataset, error_datasets, + cov_estim_datastets) = split_datasets(datasets, tag, pred_name) + + # Extract cubes + logger.debug( + "Loaded mean prediction cube from '%s' (used as reference)", + dataset['filename']) + cube = iris.load_cube(dataset['filename']) + cube.attributes['filename'] = dataset['filename'] + if cube.ndim < 1: + raise ValueError( + f"Postprocessing scalar dataset '{dataset['filename']}' " + f"not supported yet") + + # Process mean prediction + postprocess_mean(cfg, cube, dataset) + + # Process errors + postprocess_errors(cfg, cube, error_datasets, cov_estim_datastets) + + +# Run main function when this script is called +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/mlr/preprocess.py b/esmvaltool/diag_scripts/mlr/preprocess.py new file mode 100644 index 0000000000..1e2908e825 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/preprocess.py @@ -0,0 +1,1181 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Simple preprocessing of MLR model input. + +Description +----------- +This diagnostic performs preprocessing operations for datasets used as MLR +model input in a desired way. It can also be used to process output of MLR +models for plotting. + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +aggregate_by: dict, optional + Aggregate over given coordinates (dict values; given as :obj:`list` of + :obj:`str`) using a desired aggregator (dict key; given as :obj:`str`). + Allowed aggregators are ``'max'``, ``'mean'``, ``'median'``, ``'min'``, + ``'sum'``, ``'std'``, ``'var'``, and ``'trend'``. +apply_common_mask: bool, optional (default: False) + Apply common mask to all datasets. Requires identical shapes for all + datasets. +area_weighted: bool, optional (default: True) + Use weighted aggregation when collapsing over latitude and/or longitude + using ``collapse``. Weights are estimated using grid cell bounds. Only + possible for datasets on regular grids that contain ``latitude`` and + ``longitude`` coordinates. +argsort: dict, optional + Calculate :func:`numpy.ma.argsort` along given coordinate to get ranking. + The coordinate can be specified by the ``coord`` key. If ``descending`` is + set to ``True``, use descending order instead of ascending. +collapse: dict, optional + Collapse over given coordinates (dict values; given as :obj:`list` of + :obj:`str`) using a desired aggregator (dict key; given as :obj:`str`). + Allowed aggregators are ``'max'``, ``'mean'``, ``'median'``, ``'min'``, + ``'sum'``, ``'std'``, ``'var'``, and ``'trend'``. +convert_units_to: str, optional + Convert units of the input data. Can also be given as dataset option. +extract: dict, optional + Extract certain values (dict values, given as :obj:`int`, :obj:`float` or + iterable of them) for certain coordinates (dict keys, given as :obj:`str`). +extract_ignore_bounds: bool, optional (default: False) + If ``True``, ignore coordinate bounds when using ``extract`` or + ``extract_range``. If ``False``, consider coordinate bounds when using + ``extract`` or ``extract_range``. For time coordinates, bounds are always + ignored. +extract_range: dict, optional + Like ``extract``, but instead of specific values extract ranges (dict + values, given as iterable of exactly two :obj:`int` s or :obj:`float` s) + for certain coordinates (dict keys, given as :obj:`str`). +ignore: list of dict, optional + Ignore specific datasets by specifying multiple :obj:`dict` s of metadata. +landsea_fraction_weighted: str, optional + When given, use land/sea fraction for weighted aggregation when collapsing + over latitude and/or longitude using ``collapse``. Only possible if the + dataset contains ``latitude`` and ``longitude`` coordinates and for regular + grids. Must be one of ``'land'``, ``'sea'``. +mask: dict of dict + Mask datasets. Keys have to be :mod:`numpy.ma` conversion operations (see + ``_) and + values all the keyword arguments of them. +n_jobs: int (default: 1) + Maximum number of jobs spawned by this diagnostic script. Use ``-1`` to use + all processors. More details are given `here + `_. +normalize_by_mean: bool, optional (default: False) + Remove total mean of the dataset in the last step (resulting mean will be + 0.0). Calculates weighted mean if ``area_weighted``, ``time_weighted`` or + ``landsea_fraction_weighted`` are set and the cube contains the + corresponding coordinates. Does not apply to error datasets. +normalize_by_std: bool, optional (default: False) + Scale total standard deviation of the dataset in the last step (resulting + standard deviation will be 1.0). +output_attributes: dict, optional + Write additional attributes to netcdf files, e.g. ``'tag'``. +pattern: str, optional + Pattern matched against ancestor file names. +ref_calculation: str, optional + Perform calculations involving reference dataset. Must be one of ``merge`` + (simply merge two datasets by adding the data of the reference dataset as + :class:`iris.coords.AuxCoord` to the original dataset), ``add`` (add + reference dataset), ``divide`` (divide by reference dataset), ``multiply`` + (multiply with reference dataset), ``subtract`` (subtract reference + dataset) or ``trend`` (use reference dataset as x axis for calculation of + linear trend along a specified axis, see ``ref_kwargs``). +ref_kwargs: dict, optional + Keyword arguments for calculations involving reference datasets. Allowed + keyword arguments are: + + * ``matched_by`` (:obj:`list` of :obj:`str`, default: ``[]``): Use a + given set of attributes to match datasets with their corresponding + reference datasets (specified by ``ref = True``). + * ``collapse_over`` (:obj:`str`, default: ``'time'``): Coordinate which + is collapsed. Only relevant when ``ref_calculation`` is set to ``trend``. +return_trend_stderr: bool, optional (default: True) + Return standard error of slope in case of trend calculations (as + ``var_type`` ``prediction_input_error``). +scalar_operations: dict, optional + Operations involving scalars. Allowed keys are ``add``, ``divide``, + ``multiply`` or ``subtract``. The corresponding values (:obj:`float` or + :obj:`int`) are scalars that are used with the operations. +time_weighted: bool, optional (default: True) + Use weighted aggregation when collapsing over time dimension using + ``collapse``. Weights are estimated using time bounds. +unify_coords_to: dict, optional + If given, replace coordinates of all datasets with that of a reference cube + (if necessary and possible, broadcast beforehand). The reference dataset + is determined by keyword arguments given to this option (keyword arguments + must point to exactly one dataset). + +""" + +import datetime +import functools +import logging +import os +import warnings +from copy import deepcopy + +import dask.array as da +import iris +import numpy as np +from cf_units import Unit +from joblib import Parallel, delayed +from scipy import stats + +from esmvaltool.diag_scripts import mlr +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + io, + run_diagnostic, + select_metadata, +) + +logger = logging.getLogger(os.path.basename(__file__)) + +AGGREGATORS = { + 'max': iris.analysis.MAX, + 'mean': iris.analysis.MEAN, + 'median': iris.analysis.MEDIAN, + 'min': iris.analysis.MIN, + 'std': iris.analysis.STD_DEV, + 'sum': iris.analysis.SUM, + 'var': iris.analysis.VARIANCE, +} + + +def _add_categorized_time_coords(cube, coords, aggregator): + """Add categorized time coordinates to cube.""" + for coord_name in coords: + if cube.coords(coord_name): + continue + if hasattr(iris.coord_categorisation, f'add_{coord_name}'): + getattr(iris.coord_categorisation, f'add_{coord_name}')(cube, + 'time') + logger.debug("Added coordinate '%s' to cube", coord_name) + else: + raise ValueError( + f"Cannot aggregate over coordinate(s) '{coords}' using " + f"'{aggregator}': Categorized coordinate '{coord_name}' is " + f"not a coordinate of cube {cube.summary(shorten=True)} and " + f"cannot be added via iris.coord_categorisation") + + +def _apply_trend_aggregator(cfg, cube, data, coord_name): + """Apply aggregator ``trend`` to cube.""" + return_stderr = _return_stderr(cfg, data) + units = cube.units + + # Get corresponding dimensional coordinate + coord_dims = cube.coord_dims(coord_name) + if len(coord_dims) != 1: + raise ValueError( + f"Trend aggregation along coordinate '{coord_name}' requires 1D " + f"coordinate, got {len(coord_dims):d}D coordinate") + dim_coord = cube.coord(dim_coords=True, dimensions=coord_dims[0]) + + # Calculate trends in parallel + parallel = Parallel(n_jobs=cfg['n_jobs']) + coord_values = np.unique(cube.coord(coord_name).points) + cube_slices = [cube.extract(iris.Constraint(**{coord_name: val})) for + val in coord_values] + all_cubes = parallel( + [delayed(_calculate_slope_along_coord)(cube_slice, dim_coord.name(), + return_stderr=return_stderr) + for cube_slice in cube_slices] + ) + + # Merge output (Original units might get lost in pool) + cubes = [tup[0] for tup in all_cubes] + cube = iris.cube.CubeList(cubes).merge_cube() + cube.units = units + if return_stderr: + cube_stderr = iris.cube.CubeList( + [tup[1] for tup in all_cubes]).merge_cube() + cube_stderr.units = units + else: + cube_stderr = None + units = _get_coord_units(cube, dim_coord.name()) + (cube, data) = _set_trend_metadata(cfg, cube, cube_stderr, data, units) + data['trend'] = f'aggregated along coordinate {coord_name}' + return (cube, data) + + +def _calculate_slope_along_coord(cube, coord_name, return_stderr=True): + """Calculate slope of a cube along a given coordinate.""" + coord = cube.coord(coord_name) + coord_dims = cube.coord_dims(coord_name) + if len(coord_dims) != 1: + raise ValueError( + f"Trend calculation along coordinate '{coord_name}' requires " + f"1D coordinate, got {len(coord_dims):d}D coordinate") + + # Get slope and error if desired + x_data = coord.points + y_data = np.moveaxis(cube.data, coord_dims[0], -1) + calc_slope = np.vectorize(_get_slope, excluded=['x_arr'], + signature='(n),(n)->()') + slope = calc_slope(x_data, y_data) + if return_stderr: + calc_slope_stderr = np.vectorize(_get_slope_stderr, excluded=['x_arr'], + signature='(n),(n)->()') + slope_stderr = calc_slope_stderr(x_data, y_data) + else: + slope_stderr = None + + # Apply dummy aggregator for correct cell method and set data + aggregator = iris.analysis.Aggregator('trend', _remove_axis) + with warnings.catch_warnings(): + warnings.filterwarnings( + 'ignore', + message='Collapsing a non-contiguous coordinate', + category=UserWarning, + module='iris', + ) + cube = cube.collapsed(coord_name, aggregator) + cube.data = np.ma.masked_invalid(slope) + if slope_stderr is not None: + cube_stderr = cube.copy() + cube_stderr.data = np.ma.masked_invalid(slope_stderr) + else: + cube_stderr = None + return (cube, cube_stderr) + + +def _check_cubes(cube, ref_cube, ref_option): + """Check cube and reference cube.""" + if cube.shape != ref_cube.shape: + raise ValueError( + f"Expected identical shapes for data and reference data, got " + f"{cube.shape} and {ref_cube.shape}") + if ref_option == 'subtract' and cube.units != ref_cube.units: + logger.warning( + "Got different units for original dataset and the corresponding " + "reference dataset ('%s' and '%s') for operation '%s'", + cube.units, ref_cube.units, ref_option) + + +def _collapse_over(cfg, cube, data, coords, aggregator): + """Collapse over cube.""" + coords = deepcopy(coords) + iris_op = AGGREGATORS[aggregator] + if aggregator not in ('mean', 'sum'): + cube = cube.collapsed(coords, iris_op) + return (cube, data) + + # Latitude and/or longitude (weighted if desired) + horizontal_coords = _get_horizontal_coordinates(coords) + if horizontal_coords: + horizontal_weights = _get_horizontal_weights(cfg, cube) + cube = cube.collapsed(horizontal_coords, iris_op, + weights=horizontal_weights) + for coord in horizontal_coords: + coords.remove(coord) + if aggregator == 'sum' and cfg['area_weighted']: + cube.units *= Unit('m2') + data['units'] = str(cube.units) + + # Time (weighted if desired) + if 'time' in coords: + time_weights = _get_time_weights(cfg, cube) + time_units = mlr.get_absolute_time_units(cube.coord('time').units) + cube = cube.collapsed(['time'], iris_op, weights=time_weights) + coords.remove('time') + if aggregator == 'sum' and time_weights is not None: + cube.units *= time_units + data['units'] = str(cube.units) + + # Remaining operations + if coords: + cube = cube.collapsed(coords, iris_op) + + return (cube, data) + + +def _coord_constraint(cell, value, coord_name, ignore_bounds=False, + interpret_as_range=False): + """Callable that can be used to form a :class:`iris.Constraint`.""" + if coord_name == 'time' or ignore_bounds: + cell_object = cell.point + else: + cell_object = cell + if interpret_as_range: + return value[0] <= cell_object <= value[1] + try: + return cell_object in value + except TypeError: + return cell_object == value + + +def _fail_if_stderr(data, description): + """Raise exception of data is a standard error.""" + if 'stderr' in data: + raise ValueError( + f"{description} is not supported with standard errors yet") + + +def _get_all_weights(cfg, cube): + """Get all desired weights for a cube.""" + weights = mlr.get_all_weights( + cube, area_weighted=cfg['area_weighted'], + time_weighted=cfg['time_weighted'], + landsea_fraction_weighted=cfg.get('landsea_fraction_weighted')) + return weights + + +def _get_constrained_cube(cube, constraints): + """Merge multiple :class:`iris.Constraint` s and apply them to cube.""" + constraint = constraints[0] + for new_constraint in constraints[1:]: + constraint &= new_constraint + return cube.extract(constraint) + + +def _get_coord_units(cube, coord_name): + """Get units of cube's coordinate.""" + coord = cube.coord(coord_name) + if coord_name == 'time': + units = mlr.get_absolute_time_units(coord.units) + else: + units = coord.units + return units + + +def _get_error_datasets(input_data, **kwargs): + """Extract error datasets from input data.""" + input_data = select_metadata(input_data, **kwargs) + error_data = [] + for dataset in input_data: + if dataset.get('stderr', False): + error_data.append(dataset) + return error_data + + +def _get_horizontal_coordinates(coords): + """Extract horizontal coordinates from :obj:`list` of coordinates.""" + horizontal_coords = [] + if 'latitude' in coords: + horizontal_coords.append('latitude') + if 'longitude' in coords: + horizontal_coords.append('longitude') + return horizontal_coords + + +def _get_horizontal_weights(cfg, cube): + """Get weights for horizontal dimensions.""" + weights = mlr.get_horizontal_weights( + cube, + area_weighted=cfg['area_weighted'], + landsea_fraction_weighted=cfg.get('landsea_fraction_weighted')) + return weights + + +def _get_ref_calc(cfg, dataset, ref_datasets, ref_option): + """Perform calculations involving reference datasets for regular data.""" + ref_kwargs = cfg.get('ref_kwargs', {}) + ref_dataset = _get_ref_dataset(dataset, ref_datasets, **ref_kwargs) + cube = dataset['cube'] + ref_cube = ref_dataset['cube'] + _check_cubes(cube, ref_cube, ref_option) + dataset['original_cube'] = cube.copy() + dataset['ref_cube'] = ref_cube.copy() + if ref_option == 'merge': + aux_coord = cube_to_aux_coord(ref_cube) + cube.add_aux_coord(aux_coord, np.arange(cube.ndim)) + suffix = None + elif ref_option == 'add': + cube.data += ref_cube.data + suffix = 'plus ref' + elif ref_option == 'multiply': + cube.data *= ref_cube.data + cube.units *= ref_cube.units + dataset['units'] = str(cube.units) + suffix = 'multiplied by ref' + elif ref_option == 'divide': + cube.data /= ref_cube.data + cube.units /= ref_cube.units + dataset['units'] = str(cube.units) + suffix = 'divided by ref' + elif ref_option == 'subtract': + cube.data -= ref_cube.data + suffix = 'minus ref' + elif ref_option == 'trend': + (cube, cube_stderr) = _get_trend_relative_to_ref( + cfg, dataset, ref_cube, + collapse_over=ref_kwargs.get('collapse_over')) + (cube, dataset) = _set_trend_metadata(cfg, cube, cube_stderr, dataset, + ref_cube.units) + suffix = 'relative to ref' + else: + raise ValueError(f"Got invalid ref option '{ref_option}'") + if suffix is not None: + suffix_no_space = suffix.replace(' ', '_') + dataset['standard_name'] = None + dataset['short_name'] += f'_{suffix_no_space}' + dataset['long_name'] += f' ({suffix})' + exp = ('' if ref_dataset.get('exp') is None else + f", experiment {ref_dataset['exp']}") + dataset['reference_data'] = ( + f"{ref_dataset['short_name']} of {ref_dataset['dataset']} " + f"(project {ref_dataset['project']}{exp}) for years " + f"{ref_dataset['start_year']} to {ref_dataset['end_year']}") + dataset['cube'] = cube + return dataset + + +def _get_ref_calc_stderr(cfg, dataset, ref_datasets, regular_datasets, + ref_option): + """Perform calculations involving reference datasets for error data.""" + ref_kwargs = cfg.get('ref_kwargs', {}) + + # Extract reference dataset (error) + ref_dataset = _get_ref_dataset(dataset, ref_datasets, **ref_kwargs) + + # Extract regular dataset (corresponding mean to error) + excluded_keys = ['var_type', 'short_name', 'standard_name', 'long_name', + 'variable_group', 'diagnostic', 'filename', 'cube', + 'recipe_dataset_index', 'stderr', 'alias', 'units'] + kwargs = {key: dataset[key] for key in dataset if key not in excluded_keys} + reg_dataset = select_metadata(regular_datasets, **kwargs) + if len(reg_dataset) != 1: + raise ValueError( + f"Expected exactly one regular dataset for error dataset " + f"{dataset}, got {len(reg_dataset):d}") + reg_dataset = reg_dataset[0] + + # Perform calculations + cube = dataset['cube'] + ref_cube = ref_dataset['cube'] + reg_cube = reg_dataset['cube'] + _check_cubes(cube, ref_cube, ref_option) + if ref_option == 'merge': + aux_coord = cube_to_aux_coord(ref_cube) + cube.add_aux_coord(aux_coord, np.arange(cube.ndim)) + if ref_option == 'divide': + error = np.ma.abs(reg_cube.data) * np.ma.sqrt( + (cube.data / reg_dataset['original_cube'].data)**2 + + (ref_cube.data / reg_dataset['ref_cube'].data)**2) + cube.data = error + elif ref_option == 'subtract': + cube.data = np.ma.sqrt(cube.data**2 + ref_cube.data**2) + elif ref_option == 'trend': + raise ValueError( + "Calculations involving reference datasets with option 'trend' " + "is not supported for error datasets yet; errors are calculated " + "from the original dataset using the standard error of slopes") + else: + raise NotImplementedError( + f"Calculations involving reference datasets with option " + f"'{ref_option}' are not supported yet") + cube.units = reg_cube.units + dataset['standard_name'] = reg_dataset['standard_name'] + dataset['short_name'] = reg_dataset['short_name'] + dataset['long_name'] = reg_dataset['long_name'] + dataset['units'] = reg_dataset['units'] + dataset['reference_data'] = reg_dataset['reference_data'] + dataset['cube'] = cube + return dataset + + +def _get_ref_dataset(dataset, ref_datasets, **ref_kwargs): + """Extract reference dataset for a given dataset.""" + metadata = ref_kwargs.get('matched_by', []) + kwargs = {m: dataset[m] for m in metadata if m in dataset} + ref_dataset = select_metadata(ref_datasets, **kwargs) + if len(ref_dataset) != 1: + raise ValueError( + f"Expected exactly one reference dataset (with attribute ref " + f"== True) for dataset {dataset}, got {len(ref_dataset):d}. " + f"Consider extending list of metadata for option 'matched_by' in " + f"'ref_kwargs' (used {kwargs})") + ref_dataset = ref_dataset[0] + return ref_dataset + + +def _get_single_constraint(cube, coord_name, val, + ignore_bounds=False, interpret_as_range=False): + """Get single :class:`iris.Constraint`.""" + if coord_name == 'time': + time_units = cube.coord('time').units + val = time_units.num2date(val) + if interpret_as_range: + try: + len_range = len(val) + except TypeError: + raise TypeError( + f"Expected iterable for values of 'extract_range' for " + f"coordinate '{coord_name}', got '{val}'") + if len_range != 2: + raise ValueError( + f"Expected exactly two elements for range of '{coord_name}' " + f"in 'extract_range', got {len_range:d} ({val})") + logger.debug("Extracting range %s for coordinate '%s'", val, + coord_name) + coord_vals = functools.partial(_coord_constraint, value=val, + coord_name=coord_name, + ignore_bounds=ignore_bounds, + interpret_as_range=interpret_as_range) + return iris.Constraint(**{coord_name: coord_vals}) + + +def _get_slope(x_arr, y_arr): + """Get slope of linear regression of two (masked) arrays.""" + if np.ma.is_masked(y_arr): + x_arr = x_arr[~y_arr.mask] + y_arr = y_arr[~y_arr.mask] + if len(y_arr) < 2: + return np.nan + reg = stats.linregress(x_arr, y_arr) + return reg.slope + + +def _get_slope_stderr(x_arr, y_arr): + """Get standard error of linear slope of two (masked) arrays.""" + if np.ma.is_masked(y_arr): + x_arr = x_arr[~y_arr.mask] + y_arr = y_arr[~y_arr.mask] + if len(y_arr) < 2: + return np.nan + reg = stats.linregress(x_arr, y_arr) + return reg.stderr + + +def _get_time_weights(cfg, cube): + """Calculate time weights.""" + time_weights = None + if cfg['time_weighted']: + time_weights = mlr.get_time_weights(cube) + return time_weights + + +def _get_trend_relative_to_ref(cfg, data, ref_cube, collapse_over=None): + """Calculate linear trend relative to reference dataset.""" + if collapse_over is None: + collapse_over = 'time' + cube = data['cube'] + return_stderr = _return_stderr(cfg, data) + + # Get coordinate + coord_dims = cube.coord_dims(collapse_over) + if len(coord_dims) != 1: + raise ValueError( + f"Trend calculation involving reference dataset along coordinate " + f"'{collapse_over}' requires 1D coordinate, got " + f"{len(coord_dims):d}D coordinate") + if ref_cube.coord_dims(collapse_over) != coord_dims: + raise ValueError( + f"Trend calculation involving reference dataset along coordinate " + f"'{collapse_over}' requires that the coordinate covers identical " + f"dimensions for the dataset and reference dataset, got " + f"{coord_dims} and {ref_cube.coord_dims(collapse_over)}") + + # Get slope and error if desired + x_data = np.moveaxis(ref_cube.data, coord_dims[0], -1) + y_data = np.moveaxis(cube.data, coord_dims[0], -1) + calc_slope = np.vectorize(_get_slope, signature='(n),(n)->()') + slope = calc_slope(x_data, y_data) + if return_stderr: + calc_slope_stderr = np.vectorize(_get_slope_stderr, + signature='(n),(n)->()') + slope_stderr = calc_slope_stderr(x_data, y_data) + else: + slope_stderr = None + + # Apply dummy aggregator for correct cell method and set data + aggregator = iris.analysis.Aggregator('trend using ref', _remove_axis) + with warnings.catch_warnings(): + warnings.filterwarnings( + 'ignore', + message='Collapsing a non-contiguous coordinate', + category=UserWarning, + module='iris', + ) + cube = cube.collapsed(collapse_over, aggregator) + cube.data = np.ma.masked_invalid(slope) + if slope_stderr is not None: + cube_stderr = cube.copy() + cube_stderr.data = np.ma.masked_invalid(slope_stderr) + else: + cube_stderr = None + return (cube, cube_stderr) + + +def _remove_axis(data, axis=None): + """Remove given axis of arrays by the first index of a given axis.""" + return np.take(data, 0, axis=axis) + + +def _return_stderr(cfg, data): + """Check if standard error should be returned.""" + return (data.get('var_type') == 'prediction_input' and + cfg['return_trend_stderr']) + + +def _promote_aux_coord(cube, data, coord_name): + """Promote auxiliary coordinate to dimensional coordinate.""" + aux_coords = [coord.name() for coord in cube.coords(dim_coords=False)] + if coord_name in aux_coords: + try: + iris.util.promote_aux_coord_to_dim_coord(cube, coord_name) + except ValueError as exc: + logger.debug( + "Could not promote coordinate '%s' to dimensional " + "coordinate: %s", coord_name, str(exc)) + else: + if isinstance(data.get('stderr'), dict): + stderr_cube = data['stderr']['cube'] + iris.util.promote_aux_coord_to_dim_coord(stderr_cube, + coord_name) + + +def _set_trend_metadata(cfg, cube, cube_stderr, data, units): + """Set correct metadata for trend calculation.""" + cube.units /= units + data['standard_name'] = None + data['short_name'] += '_trend' + data['long_name'] += ' (trend)' + data['units'] = str(cube.units) + if cube_stderr is not None: + cube_stderr.units /= units + stderr_data = deepcopy(data) + stderr_data = cache_cube(cfg, cube_stderr, stderr_data) + data['stderr'] = stderr_data + return (cube, data) + + +def add_standard_errors(input_data): + """Add calculated standard errors to list of data.""" + new_input_data = [] + for data in input_data: + if isinstance(data.get('stderr'), dict): + stderr_data = data.pop('stderr') + stderr_data['stderr'] = True + stderr_data['standard_name'] = None + stderr_data['short_name'] += '_stderr' + stderr_data['long_name'] += ' (Standard Error)' + stderr_data['var_type'] += '_error' + new_input_data.append(stderr_data) + logger.info("Added standard error for %s", data['filename']) + new_input_data.append(data) + return new_input_data + + +def aggregate_by(cfg, cube, data): + """Aggregate cube over specified coordinate.""" + for (aggregator, coords) in cfg.get('aggregate_by', {}).items(): + if not isinstance(coords, list): + coords = [coords] + if aggregator not in AGGREGATORS: + raise ValueError( + f"Expected one of {list(AGGREGATORS.keys())} as aggregator " + f"for 'aggregate_by', got '{aggregator}'") + iris_op = AGGREGATORS[aggregator] + logger.debug("Aggregating over coordinate(s) %s by calculating %s", + coords, aggregator) + _add_categorized_time_coords(cube, coords, aggregator) + cube = cube.aggregated_by(coords, iris_op) + if len(coords) == 1: + _promote_aux_coord(cube, data, coords[0]) + return (cube, data) + + +def aggregate_by_trend(cfg, cube, data): + """Aggregate cube over specified coordinate using ``trend``.""" + if 'trend' not in cfg.get('aggregate_by', {}): + return (cube, data) + coords = cfg['aggregate_by']['trend'] + if not isinstance(coords, list): + coords = [coords] + logger.debug("Aggregating over coordinate(s) %s by calculating 'trend'", + coords) + if len(coords) != 1: + raise ValueError( + f"Aggregation using 'trend' is currently only supported with a " + f"single coordinate, got {coords}") + _add_categorized_time_coords(cube, coords, 'trend') + coord_name = coords[0] + (cube, data) = _apply_trend_aggregator(cfg, cube, data, coord_name) + _promote_aux_coord(cube, data, coord_name) + return (cube, data) + + +def apply_common_mask(cfg, input_data): + """Apply common mask to all datasets.""" + if not cfg.get('apply_common_mask'): + return input_data + logger.info("Applying common mask to all cubes") + shapes = {data['cube'].shape for data in input_data} + if len(shapes) > 1: + raise ValueError( + f"Expected cubes with identical shapes when 'apply_common_mask' " + f"is set to 'True', got shapes {shapes}") + common_mask = da.full(list(shapes)[0], False) + for data in input_data: + common_mask |= da.ma.getmaskarray(data['cube'].core_data()) + for data in input_data: + data['cube'].data = da.ma.masked_array(data['cube'].core_data(), + mask=common_mask) + return input_data + + +def argsort(cfg, cube, data): + """Calculate :func:`numpy.ma.argsort` along given axis (= Ranking).""" + if not cfg.get('argsort'): + return (cube, data) + _fail_if_stderr(data, "'argsort'") + coord = cfg['argsort'].get('coord') + if not coord: + raise ValueError( + "When 'argsort' is given, a valid 'coord' needs to specified as " + "key") + logger.debug("Calculating argsort along coordinate '%s' to get ranking", + coord) + axis = cube.coord_dims(coord)[0] + original_mask = np.ma.getmaskarray(cube.data) + if cfg['argsort'].get('descending'): + ranking = np.ma.argsort(-cube.data, axis=axis, fill_value=-np.inf) + cube.attributes['order'] = 'descending' + else: + ranking = np.ma.argsort(cube.data, axis=axis, fill_value=np.inf) + cube.attributes['order'] = 'ascending' + cube.data = np.ma.array(ranking, mask=original_mask, dtype=cube.dtype) + cube.units = Unit('no unit') + data['standard_name'] = None + data['short_name'] += '_ranking' + data['long_name'] += ' (ranking)' + data['units'] = str(cube.units) + return (cube, data) + + +def cache_cube(cfg, cube, data): + """Cache cube in :obj:`dict`.""" + path = data['filename'] + basename = os.path.splitext(os.path.basename(path))[0] + if cube.var_name is not None: + basename = basename.replace(cube.var_name, data['short_name']) + cube.var_name = data['short_name'] + if 'var_type' in data: + for var_type in mlr.VAR_TYPES: + if basename.endswith(f'_{var_type}'): + basename = basename.replace(f'_{var_type}', '') + basename += f"_{data['var_type']}" + new_path = get_diagnostic_filename(basename, cfg) + data['filename'] = new_path + data['cube'] = cube + new_attrs = cfg.get('output_attributes', {}) + data.update(new_attrs) + return data + + +def collapse(cfg, cube, data): + """Collapse data over specified coordinates.""" + for (aggregator, coords) in cfg.get('collapse', {}).items(): + if not isinstance(coords, list): + coords = [coords] + if aggregator not in AGGREGATORS: + raise ValueError( + f"Expected one of {list(AGGREGATORS.keys())} as aggregator " + f"for 'collapse', got '{aggregator}'") + logger.debug("Collapsing coordinate(s) %s by calculating %s", coords, + aggregator) + if coords == ['all']: + coords = [coord.name() for coord in cube.coords(dim_coords=True)] + (cube, data) = _collapse_over(cfg, cube, data, coords, aggregator) + return (cube, data) + + +def collapse_with_trend(cfg, cube, data): + """Collapse data over specified coordinates using ``trend``.""" + if 'trend' not in cfg.get('collapse', {}): + return (cube, data) + coords = cfg['collapse']['trend'] + if not isinstance(coords, list): + coords = [coords] + logger.debug("Collapsing coordinate(s) %s by calculating 'trend'", coords) + if coords == ['all']: + coords = [coord.name() for coord in cube.coords(dim_coords=True)] + if len(coords) != 1: + raise ValueError( + f"Collapsing using 'trend' is currently only supported with a " + f"single coordinate, got {coords}") + coord_name = coords[0] + if not cube.coords(coord_name): + raise iris.exceptions.CoordinateNotFoundError( + f"Cannot calculate trend along '{coord_name}', cube " + f"{cube.summary(shorten=True)} does not contain a coordinate " + f"with that name") + return_stderr = _return_stderr(cfg, data) + (cube, + cube_stderr) = _calculate_slope_along_coord( + cube, coord_name, return_stderr=return_stderr) + units = _get_coord_units(cube, coord_name) + (cube, data) = _set_trend_metadata(cfg, cube, cube_stderr, data, units) + data['trend'] = f'along coordinate {coord_name}' + return (cube, data) + + +def convert_units_to(cfg, cube, data): + """Convert units if desired.""" + cfg_settings = cfg.get('convert_units_to') + data_settings = data.get('convert_units_to') + if cfg_settings or data_settings: + units_to = cfg_settings + if data_settings: + units_to = data_settings + logger.debug("Converting units from '%s' to '%s'", cube.units, + units_to) + try: + cube.convert_units(units_to) + except ValueError: + raise ValueError( + f"Cannot convert units of cube {cube.summary(shorten=True)} " + f"from '{cube.units}' to '{units_to}'") + data['units'] = str(cube.units) + return (cube, data) + + +def cube_to_aux_coord(cube): + """Convert :class:`iris.cube.Cube` to :class:`iris.coords.AuxCoord`.""" + aux_coord = iris.coords.AuxCoord(cube.data, + var_name=cube.var_name, + standard_name=cube.standard_name, + long_name=cube.long_name, + units=cube.units) + return aux_coord + + +def extract(cfg, cube): + """Extract specific coordinate values.""" + if not cfg.get('extract'): + return cube + constraints = [] + for (coord_name, val) in cfg['extract'].items(): + constraint = _get_single_constraint( + cube, coord_name, val, ignore_bounds=cfg['extract_ignore_bounds']) + constraints.append(constraint) + new_cube = _get_constrained_cube(cube, constraints) + if new_cube is None: + raise ValueError( + f"Extracting {cfg['extract']} from cube " + f"{cube.summary(shorten=True)} yielded empty cube") + return new_cube + + +def extract_range(cfg, cube): + """Extract range of coordinate values.""" + if not cfg.get('extract_range'): + return cube + constraints = [] + for (coord_name, coord_range) in cfg['extract_range'].items(): + constraint = _get_single_constraint( + cube, coord_name, coord_range, + ignore_bounds=cfg['extract_ignore_bounds'], + interpret_as_range=True) + constraints.append(constraint) + new_cube = _get_constrained_cube(cube, constraints) + if new_cube is None: + raise ValueError( + f"Extracting range {cfg['extract_range']} from cube " + f"{cube.summary(shorten=True)} yielded empty cube") + return new_cube + + +def get_ref_cube(input_data, **kwargs): + """Extract reference dataset.""" + logger.info("Using keyword arguments %s to extract reference datasets for " + "unifying coordinates", kwargs) + datasets = select_metadata(input_data, **kwargs) + if len(datasets) != 1: + raise ValueError( + f"Expected exactly one reference dataset for unifying coords " + f"matching {kwargs}, got {len(datasets):d}") + ref_cube = iris.load_cube(datasets[0]['filename']) + return ref_cube + + +def load_cubes(input_data): + """Load cubes into :obj:`dict`.""" + for data in input_data: + path = data['filename'] + logger.info("Loading %s", path) + cube = iris.load_cube(path) + data['cube'] = cube + data['original_filename'] = path + return input_data + + +def mask(cfg, cube): + """Perform masking operations.""" + n_masked_values_old = np.count_nonzero(np.ma.getmaskarray(cube.data)) + for (masking_op, kwargs) in cfg.get('mask', {}).items(): + if not hasattr(np.ma, masking_op): + raise AttributeError( + f"Invalid masking operation, '{masking_op}' is not a function " + f"of module numpy.ma") + logger.debug("Applying mask operation '%s' using arguments %s", + masking_op, kwargs) + masked_data = getattr(np.ma, masking_op)(cube.data, **kwargs) + cube = cube.copy(masked_data) + n_masked_values_new = np.count_nonzero(np.ma.getmaskarray(cube.data)) + n_total = cube.data.size + diff = n_masked_values_new - n_masked_values_old + if diff: + logger.info( + "Additionally masked %i values by operations %s (before: %i " + "non-masked values, after: %i non-masked values", diff, + cfg['mask'], n_total - n_masked_values_old, + n_total - n_masked_values_new) + return cube + + +def normalize_by_mean(cfg, cube, data): + """Normalize final dataset by mean.""" + if cfg.get('normalize_by_mean') and '_error' not in data['var_type']: + units = cube.units + logger.debug("Normalizing mean") + weights = _get_all_weights(cfg, cube) + mean = np.ma.average(cube.data, weights=weights) + cube.data -= mean + data['long_name'] += ' (mean normalized)' + data['normalize_by_mean'] = ( + f"Mean normalized to 0.0 {units} by subtraction, original mean " + f"was {mean} {units}") + data['original_mean'] = mean + return (cube, data) + + +def normalize_by_std(cfg, cube, data): + """Normalize final dataset by standard deviation.""" + if not cfg.get('normalize_by_std'): + return (cube, data) + units = cube.units + logger.debug("Normalizing by standard_deviation") + std = np.ma.std(cube.data) + cube.data /= std + cube.units = '1' + data['long_name'] += ' (std normalized)' + data['units'] = str(cube.units) + data['normalize_by_std'] = ( + f"Standard deviation scaled to 1.0 by division, original std was " + f"{std} {units}") + data['original_units'] = str(units) + data['original_std'] = std + return (cube, data) + + +def ref_calculation(cfg, input_data): + """Perform all calculation involving reference datasets.""" + if not cfg.get('ref_calculation'): + return input_data + ref_option = cfg['ref_calculation'] + ref_options = ['merge', 'add', 'divide', 'multiply', 'subtract', 'trend'] + if ref_option not in ref_options: + raise ValueError( + f"Expected one of {ref_options} for 'ref_calculation', got " + f"'{ref_option}'") + ref_kwargs = cfg.get('ref_kwargs', {}) + metadata = ref_kwargs.get('matched_by', []) + logger.info("Performing calculation '%s' involving reference datasets", + ref_option) + logger.info("Retrieving reference dataset attributes %s to match datasets", + metadata) + ref_datasets = select_metadata(input_data, ref=True) + regular_datasets_errors = _get_error_datasets(input_data, ref=False) + regular_datasets = [] + for dataset in select_metadata(input_data, ref=False): + if dataset not in regular_datasets_errors: + regular_datasets.append(dataset) + new_data = [] + logger.info( + "Performing calculations involving reference datasets for %i regular " + "dataset(s)", len(regular_datasets)) + for dataset in regular_datasets: + dataset = _get_ref_calc(cfg, dataset, ref_datasets, ref_option) + new_data.append(dataset) + logger.info( + "Performing calculations involving reference datasets for %i error " + "dataset(s)", len(regular_datasets_errors)) + for dataset in regular_datasets_errors: + dataset = _get_ref_calc_stderr(cfg, dataset, ref_datasets, new_data, + ref_option) + new_data.append(dataset) + return new_data + + +def scalar_operations(cfg, cube): + """Perform scalar operations.""" + allowed_operations = ('add', 'divide', 'multiply', 'divide') + for (operation, constant) in cfg.get('scalar_operations', {}).items(): + if operation == 'add': + cube.data += constant + logger.debug("Added %f to data", constant) + elif operation == 'divide': + cube.data /= constant + logger.debug("Divided %f from data", constant) + elif operation == 'multiply': + cube.data *= constant + logger.debug("Multiplied %f to data", constant) + elif operation == 'subtract': + cube.data -= constant + logger.debug("Subtracted %f from data", constant) + else: + raise ValueError( + f"Expected one of {allowed_operations} for operation in " + f"'scalar_operations', got '{operation}'") + return cube + + +def unify_coords_to(cube, ref_cube): + """Unify coordinates.""" + if ref_cube is None: + return cube + + # Broadcast if necessary/possible + if cube.shape != ref_cube.shape: + logger.info( + "Broadcasting %s to shape of reference cube %s", + cube.summary(shorten=True), ref_cube.shape) + old_cube = cube.copy() + broadcasted_data = np.broadcast_to( + np.ma.array(old_cube.data).filled(np.nan), ref_cube.shape) + cube = iris.cube.Cube(np.ma.masked_invalid(broadcasted_data)) + cube.metadata = old_cube.metadata + + # Set new coordinates + for coord in cube.coords(): + cube.remove_coord(coord) + for dim_coord in ref_cube.coords(dim_coords=True): + coord_dims = ref_cube.coord_dims(dim_coord) + cube.add_dim_coord(dim_coord, coord_dims) + for aux_coord in ref_cube.coords(dim_coords=False): + coord_dims = ref_cube.coord_dims(aux_coord) + cube.add_aux_coord(aux_coord, coord_dims) + return cube + + +def write_cube(cfg, cube, data): + """Write cube (check for MLR attributes and existing files first).""" + if not mlr.datasets_have_mlr_attributes([data], log_level='error'): + raise ValueError( + f"Cannot write cube {cube.summary(shorten=True)} using metadata " + f"{data}") + + # Get new path + new_path = data['filename'] + if os.path.exists(new_path): + now = datetime.datetime.utcnow().strftime("%Y%m%d_%H%M%S%f") + data['filename'] = new_path.replace('.nc', f'_{now}.nc') + + # Provenance + ancestors = [data.pop('original_filename')] + opts = [opt for opt in cfg if opt in globals()] + caption = (f"{cube.long_name} for {mlr.get_alias(data)} preprocessed with " + f"operations {opts}.") + record = { + 'ancestors': ancestors, + 'authors': ['schlund_manuel'], + 'caption': caption, + 'references': ['schlund20jgr'], + } + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(data['filename'], record) + + # Write file + io.metadata_to_netcdf(cube, data) + + +def main(cfg): + """Run the diagnostic.""" + cfg = deepcopy(cfg) + warnings.filterwarnings( + 'ignore', + message='Using DEFAULT_SPHERICAL_EARTH_RADIUS', + category=UserWarning, + module='iris', + ) + input_data = mlr.get_input_data(cfg, + pattern=cfg.get('pattern'), + ignore=cfg.get('ignore'), + check_mlr_attributes=False) + + # Default options + cfg.setdefault('area_weighted', True) + cfg.setdefault('extract_ignore_bounds', False) + cfg.setdefault('n_jobs', 1) + cfg.setdefault('return_trend_stderr', True) + cfg.setdefault('time_weighted', True) + logger.info("Using at most %i processes", cfg['n_jobs']) + + # Get reference dataset for unifying coordinates if necessary + if 'unify_coords_to' in cfg: + ref_cube = get_ref_cube(input_data, **cfg['unify_coords_to']) + else: + ref_cube = None + + # Load cubes and apply common mask + input_data = load_cubes(input_data) + input_data = apply_common_mask(cfg, input_data) + + # Operations that add additional datasets (standard errors) + for data in input_data: + data.setdefault('ref', False) + if data['ref'] == 'True': + data['ref'] = True + if data['ref'] == 'False': + data['ref'] = False + cube = data['cube'] + cube = unify_coords_to(cube, ref_cube) + cube = mask(cfg, cube) + cube = scalar_operations(cfg, cube) + cube = extract_range(cfg, cube) + cube = extract(cfg, cube) + (cube, data) = aggregate_by_trend(cfg, cube, data) + (cube, data) = collapse_with_trend(cfg, cube, data) + data = cache_cube(cfg, cube, data) + input_data = add_standard_errors(input_data) + cfg.get('collapse', {}).pop('trend', None) + cfg.get('aggregate_by', {}).pop('trend', None) + + # Remaining operations + for data in input_data: + cube = data['cube'] + (cube, data) = aggregate_by(cfg, cube, data) + (cube, data) = collapse(cfg, cube, data) + (cube, data) = argsort(cfg, cube, data) + data = cache_cube(cfg, cube, data) + + # Calculations involving reference datasets + input_data = ref_calculation(cfg, input_data) + input_data = add_standard_errors(input_data) + + # Convert units + for data in input_data: + cube = data['cube'] + (cube, data) = convert_units_to(cfg, cube, data) + data = cache_cube(cfg, cube, data) + + # Save cubes + for data in input_data: + cube = data.pop('cube') + data.pop('original_cube', None) + data.pop('ref_cube', None) + data.pop('stderr', None) + + # Normalize and write cubes + (cube, data) = normalize_by_mean(cfg, cube, data) + (cube, data) = normalize_by_std(cfg, cube, data) + write_cube(cfg, cube, data) + + +# Run main function when this script is called +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/mlr/rescale_with_emergent_constraint.py b/esmvaltool/diag_scripts/mlr/rescale_with_emergent_constraint.py new file mode 100644 index 0000000000..32610744d7 --- /dev/null +++ b/esmvaltool/diag_scripts/mlr/rescale_with_emergent_constraint.py @@ -0,0 +1,517 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Rescale label data using a single emergent constraint. + +Description +----------- +This diagnostic uses an emergent relationship between data marked as +``var_type=label`` (Y axis) and ``var_type=feature`` (X axis) together with an +observation of the X axis (``var_type=prediction_input`` and +``var_type=prediction_input_error``) to calculate factors that are necessary to +rescale each input point so that it matches the constraint. The rescaling is +applied to data marked as ``var_type=label_to_rescale``. All data needs the +attribute ``tag`` which needs to be identical for ``label``, +``prediction_input``, ``prediction_input_error`` and ``label_to_rescale``. Only +a single ``tag`` for ``feature`` is possible. + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +group_by_attributes: list of str, optional (default: ['dataset']) + List of attributes used to separate different input points. +ignore: list of dict, optional + Ignore specific datasets by specifying multiple :obj:`dict` s of metadata. +legend_kwargs: dict, optional + Optional keyword arguments of :func:`matplotlib.pyplot.legend` (affects + only plots with legends). +pattern: str, optional + Pattern matched against ancestor file names. +plot_emergent_relationship: dict, optional + If given, plot emergent relationship between X and Y data. Specify + additional keyword arguments by ``plot_kwargs`` and plot appearance options + by ``pyplot_kwargs`` (processed as functions of :mod:`matplotlib.pyplot`). + Use ``{}`` to plot with default settings. +plot_kwargs_for_groups: dict, optional + Specify additional keyword arguments (values) for the different points + defined by ``group_by_attributes`` (keys) used in plots. +savefig_kwargs: dict, optional + Keyword arguments for :func:`matplotlib.pyplot.savefig`. +seaborn_settings: dict, optional + Options for :func:`seaborn.set_theme` (affects all plots). + +""" + +import logging +import os +from copy import deepcopy + +import iris +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import seaborn as sns + +import esmvaltool.diag_scripts.emergent_constraints as ec +import esmvaltool.diag_scripts.shared.iris_helpers as ih +from esmvaltool.diag_scripts import mlr +from esmvaltool.diag_scripts.mlr.plot import get_savefig_kwargs +from esmvaltool.diag_scripts.mlr.preprocess import cube_to_aux_coord +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, + group_metadata, + io, + run_diagnostic, + select_metadata, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +GROUPS_SEP = '__' +UNITS_SEP = '___' + + +def _check_datasets(datasets, name): + """Check input data.""" + if not datasets: + raise ValueError(f"No data with var_type '{name}' given") + keys_to_check = ['tag', 'short_name', 'long_name', 'units'] + for key in keys_to_check: + vals = list(group_metadata(datasets, key).keys()) + if len(vals) != 1: + raise ValueError( + f"Expected data with unique '{key}' for var_type " + f"'{name}', got {vals}") + + +def _get_data(dataset): + """Get data from netcdf file.""" + cube = iris.load_cube(dataset['filename']) + if cube.data.size != 1: + raise ValueError( + f"Expected scalar data for dataset {dataset}, got data with shape " + f"{cube.shape}") + return cube.data.squeeze() + + +def _get_data_frame(datasets, group_by_attributes, var_type): + """Extract :class:`pandas.DataFrame` from :obj:`list` of datasets.""" + tag = f"{datasets[0]['tag']}{UNITS_SEP}{datasets[0]['units']}" + data_frame = pd.DataFrame(columns=[tag]) + for dataset in datasets: + new_group = _get_group(dataset, group_by_attributes) + if new_group in data_frame.index: + raise ValueError( + f"Got duplicate data for group '{new_group}' of var_type " + f"'{var_type}', consider extending list of attributes that " + f"is used for grouping (currently: {group_by_attributes})") + new_data = _get_data(dataset) + data_frame.loc[new_group] = new_data + data_frame = data_frame.sort_index() + data_frame.index.name = 'dataset' + return data_frame + + +def _get_datasets_for_ec(input_data): + """Check input data.""" + features = select_metadata(input_data, var_type='feature') + labels = select_metadata(input_data, var_type='label') + pred_input = select_metadata(input_data, var_type='prediction_input') + pred_input_err = select_metadata(input_data, + var_type='prediction_input_error') + data_to_check = { + 'feature': features, + 'label': labels, + 'prediction_input': pred_input, + 'prediction_input_error': pred_input_err, + } + for (name, data) in data_to_check.items(): + _check_datasets(data, name) + return (features, labels, pred_input, pred_input_err) + + +def _get_ec_ancestors(cfg): + """Get ancestor files for emergent constraint.""" + input_data = _get_input_data(cfg) + ancestors = [] + for var_type in ('feature', 'label', 'prediction_input', + 'prediction_input_error'): + datasets = select_metadata(input_data, var_type=var_type) + ancestors.extend([d['filename'] for d in datasets]) + return ancestors + + +def _get_ec_cube(x_data, y_data): + """Get :class:`iris.cube.Cube` representing emergent relationship.""" + (feature, feature_units, label, label_units) = _get_tags(x_data, y_data) + x_cube = ec.pandas_object_to_cube(x_data, var_name=feature, + units=feature_units)[:, 0] + x_coord = cube_to_aux_coord(x_cube) + y_cube = ec.pandas_object_to_cube(y_data, var_name=label, + units=label_units)[:, 0] + y_cube.add_aux_coord(x_coord, 0) + y_cube.remove_coord('columns') + y_cube.attributes['dataset'] = '' + y_cube.attributes['project'] = '' + return y_cube + + +def _get_error_dataset(cfg, datasets): + """Get error dataset.""" + error_dataset = {} + for key in mlr.NECESSARY_KEYS: + vals = sorted(list({str(d[key]) for d in datasets})) + error_dataset[key] = '|'.join(vals) + start_years = list({d['start_year'] for d in datasets}) + end_years = list({d['end_year'] for d in datasets}) + error_dataset['start_year'] = min(start_years) + error_dataset['end_year'] = max(end_years) + error_dataset['standard_name'] = None + error_dataset['short_name'] += '_standard_error' + error_dataset['long_name'] += ' (Standard Error)' + error_dataset['var_type'] = 'prediction_output_error' + error_dataset['error'] = 'due to rescaling using emergent relationship' + error_dataset['filename'] = get_diagnostic_filename( + 'standard_error_of_emergent_constraint', cfg) + return error_dataset + + +def _get_group(dataset, group_by_attributes): + """Get name of group.""" + values = [] + for attr in group_by_attributes: + if attr not in dataset: + raise KeyError( + f"Attribute '{attr}' not available in dataset {dataset}") + values.append(dataset[attr]) + return GROUPS_SEP.join(values) + + +def _get_input_data(cfg): + """Get input data.""" + input_data = mlr.get_input_data(cfg, + pattern=cfg.get('pattern'), + ignore=cfg.get('ignore')) + return input_data + + +def _get_plot_kwargs(cfg, option, group=None): + """Get plot keyword arguments for a group.""" + plot_kwargs = cfg.get(option, {}).get('plot_kwargs', {}) + plot_kwargs = deepcopy(plot_kwargs) + if group is None: + return plot_kwargs + group_plot_kwargs = cfg.get('plot_kwargs_for_groups', {}).get(group, {}) + plot_kwargs.update(group_plot_kwargs) + plot_kwargs.setdefault('linestyle', '-') + if plot_kwargs['linestyle'] == '-': + plot_kwargs.setdefault('marker', 'o') + else: + plot_kwargs.setdefault('marker', 's') + return plot_kwargs + + +def _get_ref_cube(datasets): + """Get (unique) shape of datasets.""" + cubes = [iris.load_cube(d['filename']) for d in datasets] + shapes = list({cube.shape for cube in cubes}) + if len(shapes) != 1: + raise ValueError( + f"Expected unique shape for 'label_to_rescale' data, got {shapes}") + ref_cube = cubes[0] + ref_cube.attributes = {} + return ref_cube + + +def _get_mmm_cube(datasets): + """Extract data.""" + cubes = iris.cube.CubeList() + cube_labels = [] + ref_cube = iris.load_cube(datasets[0]['filename']) + for (idx, dataset) in enumerate(datasets): + path = dataset['filename'] + cube = iris.load_cube(path) + ih.prepare_cube_for_merging(cube, str(idx)) + cubes.append(cube) + cube_labels.append(str(idx)) + mmm_cube = cubes.merge_cube() + if len(cube_labels) > 1: + mmm_cube = mmm_cube.collapsed(['cube_label'], iris.analysis.MEAN) + for aux_coord in ref_cube.coords(dim_coords=False): + mmm_cube.add_aux_coord(aux_coord, ref_cube.coord_dims(aux_coord)) + mmm_cube.remove_coord('cube_label') + return mmm_cube + + +def _get_tags(x_data, y_data): + """Extract tags from X and Y data.""" + feature = x_data.columns[0].split(UNITS_SEP)[0] + feature_units = x_data.columns[0].split(UNITS_SEP)[1] + label = y_data.columns[0].split(UNITS_SEP)[0] + label_units = y_data.columns[0].split(UNITS_SEP)[1] + return (feature, feature_units, label, label_units) + + +def _process_pyplot_kwargs(cfg, option): + """Process functions for :mod:`matplotlib.pyplot`.""" + for (key, val) in cfg.get(option, {}).get('pyplot_kwargs', {}).items(): + getattr(plt, key)(val) + + +def get_constraint(x_data, y_data, x_ref, x_ref_err): + """Print constraint value for Y axis.""" + (feature, feature_units, label, label_units) = _get_tags(x_data, y_data) + x_data = x_data.values.squeeze() + y_data = y_data.values.squeeze() + x_ref = x_ref.values.squeeze() + x_ref_err = x_ref_err.values.squeeze() + (y_data_lin, y_pdf) = ec.target_pdf(x_data, y_data, x_ref, x_ref_err) + y_mean = np.sum(y_data_lin * y_pdf) / np.sum(y_pdf) + y_var = np.sum((y_data_lin - y_mean)**2 * y_pdf) / np.sum(y_pdf) + y_std = np.sqrt(y_var) + lines = ec.regression_line(x_data, y_data) + logger.info("Observational constraint on '%s': (%.3f ± %.3f) %s", feature, + x_ref, x_ref_err, feature_units) + logger.info("Constraint on target variable '%s': (%.3f ± %.3f) %s", label, + y_mean, y_std, label_units) + logger.info("R2 of emergent relationship: %.3f (p = %.4f)", + lines['rvalue']**2, lines['pvalue']) + return (y_mean, y_std) + + +def get_emergent_constraint_data(cfg): + """Get :class:`pandas.DataFrame` that contains the data.""" + input_data = _get_input_data(cfg) + (features, labels, pred_input, + pred_input_err) = _get_datasets_for_ec(input_data) + + # Extract data frames + x_data = _get_data_frame(features, cfg['group_by_attributes'], 'feature') + y_data = _get_data_frame(labels, cfg['group_by_attributes'], 'label') + x_ref = _get_data_frame(pred_input, cfg['group_by_attributes'], + 'prediction_input') + x_ref_err = _get_data_frame(pred_input_err, cfg['group_by_attributes'], + 'prediction_input_error') + + # Check data frames + if len(x_data.index) < 2: + raise ValueError( + f"Expected at least two input points for X data, got " + f"{len(x_data.index):d}") + if not x_data.index.equals(y_data.index): + raise ValueError( + f"Expected identical input points for X and Y data, got\nX: " + f"{x_data.index.values}\nY: {y_data.index.values}") + if len(x_ref.index) != 1: + raise ValueError( + f"Expected exactly one prediction input point for X data, got " + f"{len(x_ref.index):d}") + if not x_ref.index.equals(x_ref_err.index): + raise ValueError( + f"Expected identical input points for prediction input and its " + f"corresponding errors for X data, got {x_ref.index.values} and " + f"{x_ref_err.index.values}, respectively") + logger.info("Found X data:\n%s", x_data) + logger.info("Found Y data:\n%s", y_data) + logger.info("Found X reference data:\n%s", x_ref) + logger.info("Found X reference error data:\n%s", x_ref_err) + return (x_data, y_data, x_ref, x_ref_err) + + +def plot_emergent_relationship(cfg, x_data, y_data, x_ref, x_ref_err, y_mean): + """Plot emergent relationship.""" + (feature, feature_units, label, label_units) = _get_tags(x_data, y_data) + logger.info("Plotting emergent relationship between '%s' and '%s'", + label, feature) + (_, axes) = plt.subplots() + + # Plot data points + for group in x_data.index: + plot_kwargs = _get_plot_kwargs(cfg, 'plot_emergent_relationship', + group=group) + plot_kwargs['linestyle'] = 'none' + plot_kwargs['label'] = group + axes.plot(x_data.loc[group], y_data.loc[group], **plot_kwargs) + + # Plot regression lines + axes.set_xlim(auto=False) + axes.set_ylim(auto=False) + lines = ec.regression_line(x_data.values.squeeze(), + y_data.values.squeeze()) + lines['x'] = np.squeeze(lines['x']) + axes.plot(lines['x'], + lines['y'], + color='orange', + linestyle='-', + label='Linear regression') + axes.fill_between(lines['x'], + lines['y_minus_err'], + lines['y_plus_err'], + color='orange', + alpha=0.2) + + # Plot reference + x_ref = x_ref.values.squeeze() + x_ref_err = x_ref_err.values.squeeze() + axes.axvline(x_ref, + color='k', + linestyle=':', + label='Observational constraint') + axes.axvspan(x_ref - x_ref_err, x_ref + x_ref_err, color='k', alpha=0.1) + axes.axhline(y_mean, color='k', linestyle=':') + + # Plot appearance + axes.set_title(f"Emergent relationship between {label} and {feature}") + axes.set_xlabel(f"{feature} [{feature_units}]") + axes.set_ylabel(f"{label} [{label_units}]") + _process_pyplot_kwargs(cfg, 'plot_emergent_relationship') + plt.legend(**cfg['legend_kwargs']) + text = rf"$R^2$ = {lines['rvalue']**2:.2f}, p = {lines['pvalue']:.3f}" + if lines['rvalue'] > 0.0: + axes.text(0.6, 0.05, text, transform=axes.transAxes) + else: + axes.text(0.6, 0.95, text, transform=axes.transAxes) + + # Save plot + plot_path = get_plot_filename(f'{label}_vs_{feature}', cfg) + savefig_kwargs = get_savefig_kwargs(cfg) + plt.savefig(plot_path, **savefig_kwargs) + logger.info("Wrote %s", plot_path) + plt.close() + + # Provenance + cube = _get_ec_cube(x_data, y_data) + netcdf_path = get_diagnostic_filename(f'{label}_vs_{feature}', cfg) + io.iris_save(cube, netcdf_path) + record = { + 'ancestors': _get_ec_ancestors(cfg), + 'authors': ['schlund_manuel'], + 'caption': f"Emergent relationship between {label} and {feature}.", + 'plot_types': ['scatter'], + 'references': ['schlund20jgr'], + 'themes': ['EC'], + } + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, record) + provenance_logger.log(plot_path, record) + + +def rescale_labels(cfg, y_data, y_mean, y_std): + """Rescale labels.""" + input_data = _get_input_data(cfg) + labels_to_rescale = select_metadata(input_data, + var_type='label_to_rescale') + _check_datasets(labels_to_rescale, 'label_to_rescale') + + # Get groups + groups = [] + for dataset in labels_to_rescale: + group = _get_group(dataset, cfg['group_by_attributes']) + groups.append(group) + dataset['group__for__rescaling'] = group + + groups.sort() + if set(groups) != set(y_data.index): + raise ValueError( + f"Expected identical groups for 'label' and 'label_to_rescale' " + f"data, got\n'label': {y_data.index.values}\n'label_to_rescale': " + f"{np.array(groups)}") + + # Rescale data + ref_cube = _get_ref_cube(labels_to_rescale) + for dataset in labels_to_rescale: + cube = iris.load_cube(dataset['filename']) + rescaling_factor = ( + y_mean / y_data.loc[dataset['group__for__rescaling']].values) + logger.info("Rescaling '%s' with factor %.2f", + dataset['group__for__rescaling'], rescaling_factor) + rescaled_cube = cube.copy(cube.data * rescaling_factor) + + # Adapt metadata + rescaled_dataset = deepcopy(dataset) + rescaled_dataset['var_type'] = 'label' + rescaled_dataset['rescaled'] = 'using emergent relationship' + if '_label' in dataset['filename']: + rescaled_dataset['filename'] = dataset['filename'].replace( + '_label_to_rescale', '_rescaled_label') + else: + rescaled_dataset['filename'] = dataset['filename'].replace( + '.nc', '_rescaled_label.nc') + + # Save data + rescaled_dataset['filename'] = mlr.get_new_path( + cfg, rescaled_dataset['filename']) + io.metadata_to_netcdf(rescaled_cube, rescaled_dataset) + + # Provenance + record = { + 'ancestors': [dataset['filename']] + _get_ec_ancestors(cfg), + 'authors': ['schlund_manuel'], + 'caption': f"Rescaled {rescaled_cube.long_name} for " + f"{mlr.get_alias(rescaled_dataset)} using emergent " + f"relationship.", + 'references': ['schlund20jgr'], + 'themes': ['EC'], + } + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(rescaled_dataset['filename'], record) + + # Rescale MMM to estimate error + logger.debug("Estimating error using global error %e", y_std) + mmm_cube = _get_mmm_cube(labels_to_rescale) + error_cube = ref_cube.copy(mmm_cube.data * y_std / y_data.mean().values) + error_dataset = _get_error_dataset(cfg, labels_to_rescale) + io.metadata_to_netcdf(error_cube, error_dataset) + + # Provenance + record = { + 'ancestors': ([d['filename'] for d in labels_to_rescale] + + _get_ec_ancestors(cfg)), + 'authors': ['schlund_manuel'], + 'caption': f"Rescaled {error_cube.long_name} using emergent " + f"relationship.", + 'references': ['schlund20jgr'], + 'themes': ['EC'], + } + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(error_dataset['filename'], record) + + +def main(cfg): + """Run the diagnostic.""" + sns.set_theme(**cfg.get('seaborn_settings', {})) + cfg = deepcopy(cfg) + cfg.setdefault('group_by_attributes', ['dataset']) + cfg.setdefault('legend_kwargs', {}) + logger.info("Using attributes %s to group input data", + cfg['group_by_attributes']) + + # Extract data + (x_data, y_data, x_ref, x_ref_err) = get_emergent_constraint_data(cfg) + + # Get constraint + (y_mean, y_std) = get_constraint(x_data, y_data, x_ref, x_ref_err) + + # Plots + if 'plot_emergent_relationship' in cfg: + plot_emergent_relationship(cfg, x_data, y_data, x_ref, x_ref_err, + y_mean) + + # Rescale labels + rescale_labels(cfg, y_data, y_mean, y_std) + + +# Run main function when this script is called +if __name__ == '__main__': + mlr.ignore_warnings() + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/monitor/__init__.py b/esmvaltool/diag_scripts/monitor/__init__.py new file mode 100644 index 0000000000..d741b2bbf7 --- /dev/null +++ b/esmvaltool/diag_scripts/monitor/__init__.py @@ -0,0 +1 @@ +"""Model monitoring recipe for ESMValTool.""" diff --git a/esmvaltool/diag_scripts/monitor/compute_eofs.py b/esmvaltool/diag_scripts/monitor/compute_eofs.py new file mode 100644 index 0000000000..a07ca835c0 --- /dev/null +++ b/esmvaltool/diag_scripts/monitor/compute_eofs.py @@ -0,0 +1,153 @@ +"""Diagnostic to compute and plot the first EOF of an arbitrary input. + +Description +----------- +This diagnostic can be used to compute and show Empirical Orthogonal Functions +(EOFs) and Principal Components (PCs) of arbitrary input. It creates a map plot +of the first EOF and the associated PC time series. + +Configuration options in recipe +------------------------------- +cartopy_data_dir: str, optional (default: None) + Path to cartopy data dir. Defaults to None. See + https://scitools.org.uk/cartopy/docs/latest/. +config_file: str, optional + Path to the monitor configuration file. Defaults to ``monitor_config.yml`` + in the same folder as the diagnostic script. More information on the + monitor configuration file can be found :ref:`here `. +plot_filename: str, optional + Filename pattern for the plots. + Defaults to ``{plot_type}_{real_name}_{dataset}_{mip}_{exp}_{ensemble}``. + All tags (i.e., the entries in curly brackets, e.g., ``{dataset}``, are + replaced with the corresponding tags). +plot_folder: str, optional + Path to the folder to store figures. Defaults to + ``{plot_dir}/../../{dataset}/{exp}/{modeling_realm}/{real_name}``. All + tags (i.e., the entries in curly brackets, e.g., ``{dataset}``, are + replaced with the corresponding tags). ``{plot_dir}`` is replaced with the + default ESMValTool plot directory (i.e., + ``output_dir/plots/diagnostic_name/script_name/``, see + :ref:`esmvalcore:outputdata`). +rasterize_maps: bool, optional (default: True) + If ``True``, use `rasterization + `_ for + map plots to produce smaller files. This is only relevant for vector + graphics (e.g., ``output_file_type=pdf,svg,ps``). + +.. hint:: + + Extra arguments given to the recipe are ignored, so it is safe to use yaml + anchors to share the configuration of common arguments with other monitor + diagnostic script. + +""" +import logging +from copy import deepcopy + +import iris +import matplotlib.pyplot as plt +from eofs.iris import Eof +from mapgenerator.plotting.plotmap import PlotMap + +import esmvaltool.diag_scripts.shared +import esmvaltool.diag_scripts.shared.names as n +from esmvaltool.diag_scripts.monitor.monitor_base import MonitorBase +from esmvaltool.diag_scripts.shared import group_metadata + +logger = logging.getLogger(__name__) + + +class Eofs(MonitorBase): + """Diagnostic to compute EOFs and plot them. + + It is also an example on how to derive the monitor class to use its + plotting capabilities in diagnostics that can not be done using only + the preprocessor. + """ + + def __init__(self, config): + """Initialize class member.""" + super().__init__(config) + + # Get default settings + self.cfg = deepcopy(self.cfg) + self.cfg.setdefault('rasterize_maps', True) + + def compute(self): + """Compute the diagnostic.""" + for module in ['matplotlib', 'fiona']: + module_logger = logging.getLogger(module) + module_logger.setLevel(logging.WARNING) + + data = group_metadata(self.cfg['input_data'].values(), 'alias') + # Loop over datasets + for alias in data: + # Loop over variables + variables = group_metadata(data[alias], 'variable_group') + for var_name, var_info in variables.items(): + logger.info('Plotting variable %s', var_name) + var_info = var_info[0] + # Load variable + cube = iris.load_cube(var_info['filename']) + # Initialise solver + solver = Eof(cube, weights='coslat') + # Get variable options as defined in monitor_config.yml + variable_options = self._get_variable_options( + var_info['variable_group'], '') + # Initialise PlotMap class + plot_map = PlotMap(loglevel='INFO') + # Compute EOF + eof = solver.eofs(neofs=1)[0, ...] + # Set metadata + eof.long_name = var_info.get('eof_name', eof.long_name) + eof.standard_name = None + # Plot EOF map using plot_cube from PlotMap + plot_map.plot_cube(eof, save=False, **variable_options) + # Use rasterization if desired + # Note: plt.gca() is the colorbar here, use plt.gcf().axes to + # access the correct axes + if self.cfg['rasterize_maps']: + self._set_rasterized(plt.gcf().axes[0]) + # Get filename for the EOF plot + filename = self.get_plot_path('eof', var_info) + # Save figure + plt.savefig(filename, + bbox_inches='tight', + pad_inches=.2, + dpi=plot_map.dpi) + plt.close(plt.gcf()) + # Record provenance for EOF plot + caption = (f"{eof.long_name} of dataset {var_info[n.DATASET]} " + f"(project {var_info[n.PROJECT]}).") + self.record_plot_provenance(filename, var_info, 'eof', + caption=caption) + + # Compute PC + pcomp = solver.pcs(npcs=1, pcscaling=1)[:, 0] + # Set metadata + pcomp.long_name = var_info.get('pc_name', pcomp.long_name) + pcomp.standard_name = None + # Get filename for the PC plot + filename = self.get_plot_path('pc', var_info, add_ext=False) + # Plot PC timeseries using plot_cube from MonitorBase + self.plot_cube(pcomp, filename) + # Record provenance for the PC plot + caption = (f"{pcomp.long_name} of dataset " + f"{var_info[n.DATASET]} " + f"(project {var_info[n.PROJECT]}).") + self.record_plot_provenance( + self._add_file_extension(filename), + var_info, + 'pc', + caption=caption, + ) + + +def main(): + """Run EOFs diagnostic.""" + with esmvaltool.diag_scripts.shared.run_diagnostic() as config: + Eofs(config).compute() + + +if __name__ == "__main__": + main() diff --git a/esmvaltool/diag_scripts/monitor/monitor.py b/esmvaltool/diag_scripts/monitor/monitor.py new file mode 100644 index 0000000000..dda5aa4f3d --- /dev/null +++ b/esmvaltool/diag_scripts/monitor/monitor.py @@ -0,0 +1,577 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Diagnostic to plot preprocessor output. + +Description +----------- +This diagnostic can be used to visualize arbitrary preprocessor output. + +Currently supported plot types (use the option ``plots`` to specify them): + - Climatology (plot type ``clim``): Plots climatology. Supported + coordinates: (`latitude`, `longitude`, `month_number`). + - Seasonal climatologies (plot type ``seasonclim``): It produces a multi + panel (2x2) plot with the seasonal climatologies. Supported coordinates: + (`latitude`, `longitude`, `month_number`). + - Monthly climatologies (plot type ``monclim``): It produces a multi panel + (3x4) plot with the monthly climatologies. Can be customized to show only + certain months and to rearrange the number of columns and rows. Supported + coordinates: (`latitude`, `longitude`, `month_number`). + - Time series (plot type ``timeseries``): Generate time series plots. It + will always generate the full period time series, but if the period is + longer than 75 years, it will also generate two extra time series for the + first and last 50 years. It will produce multi panel plots for data with + `shape_id` or `region` coordinates of length > 1. Supported coordinates: + `time`, `shape_id` (optional) and `region` (optional). + - Annual cycle (plot type ``annual_cycle``): Generate an annual cycle plot + (timeseries like climatological from January to December). It will + produce multi panel plots for data with `shape_id` or `region` + coordinates of length > 1. Supported coordinates: `time`, `shape_id` + (optional) and `region` (optional). + +Configuration options in recipe +------------------------------- +cartopy_data_dir: str, optional (default: None) + Path to cartopy data dir. Defaults to None. See + https://scitools.org.uk/cartopy/docs/latest/. +config_file: str, optional + Path to the monitor configuration file. Defaults to ``monitor_config.yml`` + in the same folder as the diagnostic script. More information on the + monitor configuration file can be found :ref:`here `. +plots: dict, optional + Plot types plotted by this diagnostic (see list above). Dictionary keys + must be ``clim``, ``seasonclim``, ``monclim``, ``timeseries`` or + ``annual_cycle``. Dictionary values are dictionaries used as options for + the corresponding plot. The allowed options for the different plot types + are given below. +plot_filename: str, optional + Filename pattern for the plots. + Defaults to ``{plot_type}_{real_name}_{dataset}_{mip}_{exp}_{ensemble}``. + All tags (i.e., the entries in curly brackets, e.g., ``{dataset}``, are + replaced with the corresponding tags). +plot_folder: str, optional + Path to the folder to store figures. Defaults to + ``{plot_dir}/../../{dataset}/{exp}/{modeling_realm}/{real_name}``. All + tags (i.e., the entries in curly brackets, e.g., ``{dataset}``, are + replaced with the corresponding tags). ``{plot_dir}`` is replaced with the + default ESMValTool plot directory (i.e., + ``output_dir/plots/diagnostic_name/script_name/``, see + :ref:`esmvalcore:outputdata`). +rasterize_maps: bool, optional (default: True) + If ``True``, use `rasterization + `_ for + map plots to produce smaller files. This is only relevant for vector + graphics (e.g., ``output_file_type=pdf,svg,ps``). + +In the variable definitions, users can set the attribute ``plot_name`` to fix +the variable name that will be used for the plot's title. If it is not set, +``mapgenerator`` will try to choose a sensible one from the name attributes +(``long_name``, ``standard_name`` and ``var_name``). + +Configuration options for plot type ``clim`` +-------------------------------------------- +maps: list of str, optional (default: ['global']) + List of maps to plot, as defined in the monitor configuration file. + +Configuration options for plot type ``seasonclim`` +-------------------------------------------------- +maps: list of str, optional (default: ['global']) + List of maps to plot, as defined in the monitor configuration file. + +Configuration options for plot type ``monclim`` +----------------------------------------------- +maps: list of str, optional (default: ['global']) + List of maps to plot, as defined in the monitor configuration file. +months: list of int, optional + Select only specific months. Defaults to ``None`` (i.e. show all months). +plot_size: tuple of int, optional (default: (5, 4)) + Size of each individual figure. +columns: int, optional (default: 3) + Number of columns in the plot. +rows: int, optional (default: 4) + Number of rows in the plot. + +Configuration options for plot type ``timeseries`` +-------------------------------------------------- +None + +Configuration options for plot type ``annual_cycle`` +---------------------------------------------------- +None + +.. hint:: + + Extra arguments given to the recipe are ignored, so it is safe to use yaml + anchors to share the configuration of common arguments with other monitor + diagnostic script. + +""" + +import calendar +import logging +from copy import deepcopy + +import iris +import iris.coord_categorisation +import matplotlib.pyplot as plt +import numpy as np +from esmvalcore.preprocessor import climate_statistics +from iris.coords import AuxCoord +from mapgenerator.plotting.plotmap import PlotMap +from mapgenerator.plotting.timeseries import PlotSeries + +import esmvaltool.diag_scripts.shared +import esmvaltool.diag_scripts.shared.names as n +from esmvaltool.diag_scripts.monitor.monitor_base import MonitorBase +from esmvaltool.diag_scripts.shared import group_metadata + +logger = logging.getLogger(__name__) + + +class Monitor(MonitorBase): + """Diagnostic to plot preprocessor output.""" + + def __init__(self, config): + super().__init__(config) + self.plots = config.get('plots', {}) + self.has_errors = False + + # Get default settings + self.cfg = deepcopy(self.cfg) + self.cfg.setdefault('rasterize_maps', True) + + def compute(self): + """Plot preprocessed data.""" + for module in ['matplotlib', 'fiona']: + module_logger = logging.getLogger(module) + module_logger.setLevel(logging.WARNING) + data = group_metadata(self.cfg['input_data'].values(), 'alias') + for alias in data: + variables = group_metadata(data[alias], 'variable_group') + + for var_name, var_info in variables.items(): + logger.info('Plotting variable %s', var_name) + var_info = var_info[0] + cubes = iris.load(var_info['filename']) + if len(cubes) == 1: + cube = cubes[0] + else: + for cube in cubes: + if cube.var_name == var_name: + break + else: + raise ValueError( + f'Can not find cube {var_name} in {cubes}') + cube.var_name = self._real_name(var_name) + cube.attributes['plot_name'] = var_info.get('plot_name', '') + + self.timeseries(cube, var_info) + self.plot_annual_cycle(cube, var_info) + self.plot_monthly_climatology(cube, var_info) + self.plot_seasonal_climatology(cube, var_info) + self.plot_climatology(cube, var_info) + if self.has_errors: + raise Exception( + 'Errors detected. Please check log for more details') + + @staticmethod + def _add_month_name(cube): + if cube.coords('month_number'): + month_number = cube.coord('month_number') + points = np.empty(month_number.shape, dtype='|S12') + for i in range(1, 13): + points[month_number.points == i] = calendar.month_name[i] + cube.add_aux_coord( + AuxCoord(points=points, + var_name='month_name', + long_name='month_name'), + cube.coord_dims(month_number)) + points = np.empty(month_number.shape, dtype='|S3') + for i in range(1, 13): + points[month_number.points == i] = str( + calendar.month_name[i].upper()) + cube.add_aux_coord( + AuxCoord(points=points, var_name='month', long_name='month'), + cube.coord_dims(month_number)) + return + + def timeseries(self, cube, var_info): + """Plot timeseries according to configuration. + + The key 'timeseries' must be passed to the 'plots' option in the + configuration. + + Parameters + ---------- + cube: iris.cube.Cube + Data to plot. Must be 1D with time or 2D with an extra 'shape_id' + or 'region' coordinate. In that case, the plot will be a multiple + one with one figure for each region + var_info: dict + Variable's metadata from ESMValTool + """ + if 'timeseries' not in self.plots: + return + if not cube.coords('year'): + iris.coord_categorisation.add_year(cube, 'time') + self.plot_timeseries(cube, var_info, suptitle='Full period') + if var_info[n.END_YEAR] - var_info[n.START_YEAR] > 75: + self.plot_timeseries(cube.extract( + iris.Constraint( + year=lambda cell: cell <= (var_info[n.START_YEAR] + 50))), + var_info, + period='start', + suptitle='First 50 years') + self.plot_timeseries(cube.extract( + iris.Constraint( + year=lambda cell: cell >= (var_info[n.END_YEAR] - 50))), + var_info, + period='end', + suptitle='Last 50 years') + + def plot_annual_cycle(self, cube, var_info): + """Plot the annual cycle according to configuration. + + The key 'annual_cycle' must be passed to the 'plots' option in the + configuration. + + Parameters + ---------- + cube: iris.cube.Cube + Data to plot. Must be 1D with time or 2D with an extra 'shape_id' + or 'region' coordinate. In that case, the plot will be a multiple + one with one figure for each region + var_info: dict + Variable's metadata from ESMValTool + + Warning + ------- + The monthly climatology is done inside the function so the users can + plot both the timeseries and the annual cycle in one go + """ + if 'annual_cycle' not in self.plots: + return + cube = climate_statistics(cube, period='month') + self._add_month_name(cube) + + plotter = PlotSeries() + plotter.outdir = self.get_plot_folder(var_info) + plotter.img_template = self.get_plot_path('annualcycle', var_info, + add_ext=False) + plotter.filefmt = self.cfg['output_file_type'] + region_coords = ('shape_id', 'region') + options = { + 'xlabel': '', + 'xlimits': None, + 'suptitle': 'Annual cycle', + } + for region_coord in region_coords: + if cube.coords(region_coord): + plotter.multiplot_cube(cube, 'month', region_coord, **options) + return + plotter.plot_cube(cube, 'month', **options) + caption = (f"Annual cycle of {var_info[n.LONG_NAME]} of " + f"dataset {var_info[n.DATASET]} (project " + f"{var_info[n.PROJECT]}) from {var_info[n.START_YEAR]} to " + f"{var_info[n.END_YEAR]}.") + self.record_plot_provenance( + self.get_plot_path('annualcycle', var_info), + var_info, + 'Annual cycle', + caption=caption, + ) + + def plot_monthly_climatology(self, cube, var_info): + """Plot the monthly climatology as a multipanel plot. + + The key 'monclim' must be passed to the 'plots' option in the + configuration. + + Parameters + ---------- + cube: iris.cube.Cube + Data to plot. Must be 3D with latitude, longitude and month_number + var_info: dict + Variable's metadata from ESMValTool + """ + if 'monclim' not in self.plots: + return + + plot_map = PlotMap() + maps = self.plots['monclim'].get('maps', ['global']) + months = self.plots['monclim'].get('months', None) + plot_size = self.plots['monclim'].get('plot_size', (5, 4)) + columns = self.plots['monclim'].get('columns', 3) + rows = self.plots['monclim'].get('rows', 4) + if months: + cube = cube.extract( + iris.Constraint(month_number=lambda cell: cell in months)) + self._add_month_name(cube) + for map_name in maps: + map_options = self._get_proj_options(map_name) + variable_options = self._get_variable_options( + var_info['variable_group'], map_name) + plt.figure(figsize=(plot_size[0] * columns, plot_size[1] * rows), + dpi=120) + + for cube_slice in cube.slices_over('month_number'): + if cube_slice.ndim != 2: + logger.error( + 'Climatologies can only be plotted for 2D vars. ' + 'Skipping...') + self.has_errors = True + return + self._plot_monthly_cube(plot_map, months, columns, rows, + map_options, variable_options, + cube_slice) + plt.suptitle( + 'Monthly climatology ' + f'({var_info[n.START_YEAR]}-{var_info[n.END_YEAR]})' + f'\n{cube.long_name} ({cube.units})', + fontsize=plot_map.fontsize + 4., + y=1.2 - rows * 0.07, + ) + plt.subplots_adjust( + top=0.85, + bottom=.05, + left=0, + right=1, + hspace=.20, + wspace=.15, + ) + filename = self.get_plot_path(f'monclim{map_name}', var_info) + plt.savefig( + filename, + bbox_inches='tight', + pad_inches=.2, + ) + plt.close(plt.gcf()) + caption = (f"Monthly climatology of {var_info[n.LONG_NAME]} of " + f"dataset {var_info[n.DATASET]} (project " + f"{var_info[n.PROJECT]}) from {var_info[n.START_YEAR]} " + f"to {var_info[n.END_YEAR]}.") + self.record_plot_provenance( + filename, + var_info, + 'Monthly climatology', + region=map_name, + caption=caption, + ) + cube.remove_coord('month') + cube.remove_coord('month_name') + + def _plot_monthly_cube(self, plot_map, months, columns, rows, map_options, + variable_options, cube_slice): + month = cube_slice.coord('month_number').points[0] + month_name = cube_slice.coord('month_name').points[0] + if months: + index = months.index(month) + 1 + else: + if month == 12: + index = 1 + else: + index = month + 1 + plot_map.plot_cube( + cube_slice, + save=False, + subplot=(rows, columns, index), + keep_aspect=True, + title=month_name.decode(), + **{ + **map_options, + **variable_options + }, + ) + if self.cfg['rasterize_maps']: + self._set_rasterized() + + def plot_seasonal_climatology(self, cube, var_info): + """Plot the seasonal climatology as a multipanel plot. + + The key 'seasonclim' must be passed to the 'plots' option in the + configuration. + + Parameters + ---------- + cube: iris.cube.Cube + Data to plot. Must be 3D with latitude, longitude and month_number + or season + var_info: dict + Variable's metadata from ESMValTool + + Warning + ------- + The seasonal climatology can be done inside the function so the users + can plot monthly, seasonal and yearly climatologies in one go + """ + if 'seasonclim' not in self.plots: + return + + season = { + 12: 'DJF', + 1: 'DJF', + 2: 'DJF', + 3: 'MAM', + 4: 'MAM', + 5: 'MAM', + 6: 'JJA', + 7: 'JJA', + 8: 'JJA', + 9: 'SON', + 10: 'SON', + 11: 'SON' + } + if cube.coords('month_number'): + points = [ + season[point] for point in cube.coord('month_number').points + ] + cube.add_aux_coord(iris.coords.AuxCoord(points, var_name='season'), + cube.coord_dims('month_number')) + cube = cube.aggregated_by('season', iris.analysis.MEAN) + + plot_map = PlotMap() + maps = self.plots['seasonclim'].get('maps', ['global']) + for map_name in maps: + map_options = self._get_proj_options(map_name) + variable_options = self._get_variable_options( + var_info['variable_group'], map_name) + index = 0 + for cube_slice in cube.slices_over('season'): + index += 1 + season = cube_slice.coord('season').points[0] + if cube_slice.ndim != 2: + logger.error( + 'Climatologies can only be plotted for 2D vars. ' + 'Skipping...') + self.has_errors = True + return + plot_map.plot_cube( + cube_slice, + save=False, + subplot=(2, 2, index), + keep_aspect=True, + title=season, + **{ + **map_options, + **variable_options, + }, + ) + if self.cfg['rasterize_maps']: + self._set_rasterized() + plt.tight_layout() + plt.suptitle( + 'Seasonal climatology ' + f'({var_info[n.START_YEAR]}-{var_info[n.END_YEAR]})\n' + f'{cube.long_name} ({cube.units})', + fontsize=plot_map.fontsize + 4, + ) + plt.subplots_adjust( + top=0.85, + bottom=.05, + left=0, + right=1, + hspace=.20, + wspace=.15, + ) + filename = self.get_plot_path(f'seasonclim{map_name}', var_info) + plt.savefig( + filename, + bbox_inches='tight', + pad_inches=.2, + ) + plt.close(plt.gcf()) + caption = (f"Seasonal climatology of {var_info[n.LONG_NAME]} of " + f"dataset {var_info[n.DATASET]} (project " + f"{var_info[n.PROJECT]}) from {var_info[n.START_YEAR]} " + f"to {var_info[n.END_YEAR]}.") + self.record_plot_provenance( + filename, + var_info, + 'Seasonal climatology', + region=map_name, + caption=caption, + ) + cube.remove_coord('season') + + def plot_climatology(self, cube, var_info): + """Plot the climatology as a multipanel plot. + + The key 'clim' must be passed to the 'plots' option in the + configuration. + + Parameters + ---------- + cube: iris.cube.Cube + Data to plot. Must be 3D with latitude, longitude and month_number + or season or 2D with latitude and longitude + var_info: dict + Variable's metadata from ESMValTool + + Warning + ------- + The climatology can be done inside the function from the monthly and + seasonal climatologies so the users can plot several of them in one go + """ + if 'clim' not in self.plots: + return + + if cube.coords('month_number'): + cube = cube.collapsed('month_number', iris.analysis.MEAN) + elif cube.coords('season'): + cube = cube.collapsed('season', iris.analysis.MEAN) + maps = self.plots['clim'].get('maps', ['global']) + plot_map = PlotMap(loglevel='INFO') + plot_map.outdir = self.get_plot_folder(var_info) + for map_name in maps: + map_options = self._get_proj_options(map_name) + + variable_options = self._get_variable_options( + var_info['variable_group'], map_name) + if cube.ndim != 2: + logger.error('Climatologies can only be plotted for 2D vars. ' + 'Skipping...') + self.has_errors = True + return + + plot_map.plot_cube(cube, + save=False, + **{ + **map_options, + **variable_options + }) + + # Note: plt.gca() is the colorbar here, use plt.gcf().axes to + # access the correct axes + if self.cfg['rasterize_maps']: + self._set_rasterized(plt.gcf().axes[0]) + plt.suptitle( + f'Climatology ({var_info[n.START_YEAR]}' + f'-{var_info[n.END_YEAR]})', + y=map_options.get('suptitle_pos', 0.95), + fontsize=plot_map.fontsize + 4) + filename = self.get_plot_path(f'clim{map_name}', var_info) + plt.savefig(filename, + bbox_inches='tight', + pad_inches=.2, + dpi=plot_map.dpi) + plt.close(plt.gcf()) + caption = (f"Climatology of {var_info[n.LONG_NAME]} of dataset " + f"{var_info[n.DATASET]} (project " + f"{var_info[n.PROJECT]}) from {var_info[n.START_YEAR]} " + f"to {var_info[n.END_YEAR]}.") + self.record_plot_provenance( + filename, + var_info, + 'Climatology', + region=map_name, + caption=caption, + ) + + +def main(): + """Execute diagnostic.""" + with esmvaltool.diag_scripts.shared.run_diagnostic() as config: + Monitor(config).compute() + + +if __name__ == "__main__": + main() diff --git a/esmvaltool/diag_scripts/monitor/monitor_base.py b/esmvaltool/diag_scripts/monitor/monitor_base.py new file mode 100644 index 0000000000..21dc159619 --- /dev/null +++ b/esmvaltool/diag_scripts/monitor/monitor_base.py @@ -0,0 +1,346 @@ +"""Base class for monitoring diagnostics.""" + +import logging +import os +import re + +import cartopy +import matplotlib.pyplot as plt +import yaml +from iris.analysis import MEAN +from mapgenerator.plotting.timeseries import PlotSeries + +from esmvaltool.diag_scripts.shared import ProvenanceLogger, names + +logger = logging.getLogger(__name__) + + +def _replace_tags(paths, variable): + """Replace tags in the config-developer's file with actual values.""" + if isinstance(paths, str): + paths = set((paths.strip('/'), )) + else: + paths = set(path.strip('/') for path in paths) + tlist = set() + for path in paths: + tlist = tlist.union(re.findall(r'{([^}]*)}', path)) + if 'sub_experiment' in variable: + new_paths = [] + for path in paths: + new_paths.extend( + (re.sub(r'(\b{ensemble}\b)', r'{sub_experiment}-\1', path), + re.sub(r'({ensemble})', r'{sub_experiment}-\1', path))) + tlist.add('sub_experiment') + paths = new_paths + + for tag in tlist: + original_tag = tag + tag, _, _ = _get_caps_options(tag) + + if tag == 'latestversion': # handled separately later + continue + if tag in variable: + replacewith = variable[tag] + else: + raise ValueError(f"Dataset key '{tag}' must be specified for " + f"{variable}, check your recipe entry") + paths = _replace_tag(paths, original_tag, replacewith) + return paths + + +def _replace_tag(paths, tag, replacewith): + """Replace tag by replacewith in paths.""" + _, lower, upper = _get_caps_options(tag) + result = [] + if isinstance(replacewith, (list, tuple)): + for item in replacewith: + result.extend(_replace_tag(paths, tag, item)) + else: + text = _apply_caps(str(replacewith), lower, upper) + result.extend(p.replace('{' + tag + '}', text) for p in paths) + return list(set(result)) + + +def _get_caps_options(tag): + lower = False + upper = False + if tag.endswith('.lower'): + lower = True + tag = tag[0:-6] + elif tag.endswith('.upper'): + upper = True + tag = tag[0:-6] + return tag, lower, upper + + +def _apply_caps(original, lower, upper): + if lower: + return original.lower() + if upper: + return original.upper() + return original + + +class MonitorBase(): + """Base class for monitoring diagnostic. + + It contains the common methods for path creation, provenance + recording, option parsing and to create some common plots. + + """ + + def __init__(self, config): + self.cfg = config + plot_folder = config.get( + 'plot_folder', + '{plot_dir}/../../{dataset}/{exp}/{modeling_realm}/{real_name}', + ) + plot_folder = plot_folder.replace('{plot_dir}', + self.cfg[names.PLOT_DIR]) + self.plot_folder = os.path.abspath( + os.path.expandvars(os.path.expanduser(plot_folder)) + ) + self.plot_filename = config.get( + 'plot_filename', + '{plot_type}_{real_name}_{dataset}_{mip}_{exp}_{ensemble}') + self.plots = config.get('plots', {}) + default_config = os.path.join(os.path.dirname(__file__), + "monitor_config.yml") + cartopy_data_dir = config.get('cartopy_data_dir', None) + if cartopy_data_dir: + cartopy.config['data_dir'] = cartopy_data_dir + with open(config.get('config_file', default_config)) as config_file: + self.config = yaml.safe_load(config_file) + + def _add_file_extension(self, filename): + """Add extension to plot filename.""" + return f"{filename}.{self.cfg['output_file_type']}" + + def _get_proj_options(self, map_name): + return self.config['maps'][map_name] + + def _get_variable_options(self, variable_group, map_name): + options = self.config['variables'].get( + variable_group, self.config['variables']['default']) + if 'default' not in options: + variable_options = options + else: + variable_options = options['default'] + if map_name in options: + variable_options = {**variable_options, **options[map_name]} + + if 'bounds' in variable_options: + if not isinstance(variable_options['bounds'], str): + variable_options['bounds'] = [ + float(n) for n in variable_options['bounds'] + ] + logger.debug(variable_options) + return variable_options + + def plot_timeseries(self, cube, var_info, period='', **kwargs): + """Plot timeseries from a cube. + + It also automatically smoothes it for long timeseries of monthly data: + - Between 10 and 70 years long, it also plots the 12-month rolling + average along the raw series + - For more than ten years, it plots the 12-month and 10-years + rolling averages and not the raw series + + """ + if 'xlimits' not in kwargs: + kwargs['xlimits'] = 'auto' + length = cube.coord("year").points.max() - cube.coord( + "year").points.min() + filename = self.get_plot_path(f'timeseries{period}', var_info, + add_ext=False) + caption = ("{} of " + f"{var_info[names.LONG_NAME]} of dataset " + f"{var_info[names.DATASET]} (project " + f"{var_info[names.PROJECT]}) from " + f"{var_info[names.START_YEAR]} to " + f"{var_info[names.END_YEAR]}.") + if length < 10 or length * 11 > cube.coord("year").shape[0]: + self.plot_cube(cube, filename, **kwargs) + self.record_plot_provenance( + self._add_file_extension(filename), + var_info, + 'timeseries', + period=period, + caption=caption.format("Time series"), + ) + elif length < 70: + self.plot_cube(cube, filename, **kwargs) + self.record_plot_provenance( + self._add_file_extension(filename), + var_info, + 'timeseries', + period=period, + caption=caption.format("Time series"), + ) + + # Smoothed time series (12-month running mean) + plt.gca().set_prop_cycle(None) + self.plot_cube(cube.rolling_window('time', MEAN, 12), + f"{filename}_smoothed_12_months", + **kwargs) + self.record_plot_provenance( + self._add_file_extension(f"{filename}_smoothed_12_months"), + var_info, + 'timeseries', + period=period, + caption=caption.format( + "Smoothed (12-months running mean) time series"), + ) + else: + # Smoothed time series (12-month running mean) + self.plot_cube(cube.rolling_window('time', MEAN, 12), + f"{filename}_smoothed_12_months", + **kwargs) + self.record_plot_provenance( + self._add_file_extension(f"{filename}_smoothed_12_months"), + var_info, + 'timeseries', + period=period, + caption=caption.format( + "Smoothed (12-months running mean) time series"), + ) + + # Smoothed time series (10-year running mean) + self.plot_cube(cube.rolling_window('time', MEAN, 120), + f"{filename}_smoothed_10_years", + **kwargs) + self.record_plot_provenance( + self._add_file_extension(f"{filename}_smoothed_10_years"), + var_info, + 'timeseries', + period=period, + caption=caption.format( + "Smoothed (10-years running mean) time series"), + ) + + def record_plot_provenance(self, filename, var_info, plot_type, **kwargs): + """Write provenance info for a given file.""" + with ProvenanceLogger(self.cfg) as provenance_logger: + prov = self.get_provenance_record( + ancestor_files=[var_info['filename']], + plot_type=plot_type, + long_names=[var_info[names.LONG_NAME]], + **kwargs, + ) + provenance_logger.log(filename, prov) + + def plot_cube(self, cube, filename, linestyle='-', **kwargs): + """Plot a timeseries from a cube. + + Supports multiplot layouts for cubes with extra dimensions + `shape_id` or `region`. + + """ + plotter = PlotSeries() + plotter.filefmt = self.cfg['output_file_type'] + plotter.img_template = filename + region_coords = ('shape_id', 'region') + + for region_coord in region_coords: + if cube.coords(region_coord): + if cube.coord(region_coord).shape[0] > 1: + plotter.multiplot_cube(cube, 'time', region_coord, + **kwargs) + return + plotter.plot_cube(cube, 'time', linestyle=linestyle, **kwargs) + + @staticmethod + def get_provenance_record(ancestor_files, **kwargs): + """Create provenance record for the diagnostic data and plots.""" + record = { + 'authors': [ + 'vegas-regidor_javier', + ], + 'references': [ + 'acknow_project', + ], + 'ancestors': ancestor_files, + **kwargs + } + return record + + def get_plot_path(self, plot_type, var_info, add_ext=True): + """Get plot full path from variable info. + + Parameters + ---------- + plot_type: str + Name of the plot + var_info: dict + Variable information from ESMValTool + add_ext: bool, optional (default: True) + Add filename extension from configuration file. + + """ + return os.path.join( + self.get_plot_folder(var_info), + self.get_plot_name(plot_type, var_info, add_ext=add_ext), + ) + + def get_plot_folder(self, var_info): + """Get plot storage folder from variable info. + + Parameters + ---------- + var_info: dict + Variable information from ESMValTool + + """ + info = { + 'real_name': self._real_name(var_info['variable_group']), + **var_info + } + folder = list(_replace_tags(self.plot_folder, info))[0] + if self.plot_folder.startswith('/'): + folder = '/' + folder + if not os.path.isdir(folder): + os.makedirs(folder, exist_ok=True) + return folder + + def get_plot_name(self, plot_type, var_info, add_ext=True): + """Get plot filename from variable info. + + Parameters + ---------- + plot_type: str + Name of the plot + var_info: dict + Variable information from ESMValTool + add_ext: bool, optional (default: True) + Add filename extension from configuration file. + + """ + info = { + "plot_type": plot_type, + 'real_name': self._real_name(var_info['variable_group']), + **var_info + } + file_name = list(_replace_tags(self.plot_filename, info))[0] + if add_ext: + file_name = self._add_file_extension(file_name) + return file_name + + @staticmethod + def _set_rasterized(axes=None): + """Rasterize all artists and collection of axes if desired.""" + if axes is None: + axes = plt.gca() + if not isinstance(axes, list): + axes = [axes] + for single_axes in axes: + for artist in single_axes.artists: + artist.set_rasterized(True) + for collection in single_axes.collections: + collection.set_rasterized(True) + + @staticmethod + def _real_name(variable_group): + for subfix in ('Ymean', 'Ysum', 'mean', 'sum'): + if variable_group.endswith(subfix): + variable_group = variable_group.replace(subfix, '') + return variable_group diff --git a/esmvaltool/diag_scripts/monitor/monitor_config.yml b/esmvaltool/diag_scripts/monitor/monitor_config.yml new file mode 100644 index 0000000000..24aecf873f --- /dev/null +++ b/esmvaltool/diag_scripts/monitor/monitor_config.yml @@ -0,0 +1,114 @@ +--- +maps: + global: # Map name + projection: PlateCarree + projection_kwargs: + central_longitude: 285 + smooth: true + lon: [-120, -60, 0, 60, 120, 180] + lat: [-90, -60, -30, 0, 30, 60, 90] + colorbar_location: bottom + extent: null + suptitle_pos: 0.87 + arctic: + projection: NorthPolarStereo + projection_kwargs: + central_longitude: 270 + lon: [-180, -150, -120, -90, -60, 0, 30, 60, 90, 120, 150, 180] + lat: [50, 60, 70, 80, 90] + smooth: true + draw_labels: true + suptitle_pos: 1.06 + antarctic: + projection: SouthPolarStereo + projection_kwargs: + central_longitude: 270 + lon: [-180, -150, -120, -90, -60, 0, 30, 60, 90, 120, 150, 180] + lat: [-50, -60, -70, -80, -90] + draw_labels: true + smooth: true + suptitle_pos: 1.06 + north: + projection: NorthPolarStereo + projection_kwargs: + central_longitude: 270 + lon: [-180, -150, -120, -90, -60, 0, 30, 60, 90, 120, 150, 180] + lat: [40, 50, 60, 70, 80, 90] + smooth: true + suptitle_pos: 1.06 + south: + projection: SouthPolarStereo + projection_kwargs: + central_longitude: 270 + lon: [-180, -150, -120, -90, -60, 0, 30, 60, 90, 120, 150, 180] + lat: [-40, -50, -60, -70, -80, -90] + smooth: true + suptitle_pos: 1.06 + + +variables: + default: &default + colors: RdYlBu_r + N: 20 + bad: [0.9, 0.9, 0.9] + pr: + <<: *default + colors: gist_earth_r + bounds: 0-10.5,0.5 + extend: max + heatc0-300m: + <<: *default + extend: both + bounds: 3.e11-3.75e11,0.05e11 + sos: + default: + <<: *default + bounds: 25-41,1 + extend: both + arctic: + bounds: 25-40,1 + antarctic: + bounds: 30-40,0.5 + siconc: + <<: *default + colors: Blues_r + bounds: 0-100,10 + sivol: + <<: *default + bounds: 0-5,0.25 + extend: max + msftbarot: + <<: *default + bounds: [-2.5e11, -2.25e11, -2.0e11, -1.75e11, -1.5e11, -1.25e11, -1.0e11, + -0.75e11, -0.5e11, -0.25e11, 0.0, 0.25e11, 0.5e11, 0.75e11, + 1.0e11, 1.25e11, 1.5e11, 1.75e11, 2.0e11, 2.25e11, 2.5e11] + extend: both + od550aer: &opticaldepth + <<: *default + colors: Blues + bounds: 0-2,0.20 + extend: max + od550dust: *opticaldepth + od550so4: *opticaldepth + od550bc: *opticaldepth + od550oa: *opticaldepth + od550no3: *opticaldepth + od550ss: *opticaldepth + nao: &nao + <<: *default + extend: both + bounds: [-0.03, -0.025, -0.02, -0.015, -0.01, -0.005, 0., 0.005, 0.01, 0.015, 0.02, 0.025, 0.03] + projection: PlateCarree + smooth: true + lon: [-90, -60, -30, 0, 30] + lat: [20, 40, 60, 80] + colorbar_location: bottom + suptitle_pos: 0.87 + sam: + <<: *nao + lat: [-90, -80, -70, -60, -50] + projection: SouthPolarStereo + projection_kwargs: + central_longitude: 270 + smooth: true + lon: [-120, -60, 0, 60, 120, 180] diff --git a/esmvaltool/diag_scripts/monitor/multi_datasets.py b/esmvaltool/diag_scripts/monitor/multi_datasets.py new file mode 100644 index 0000000000..41f238a64e --- /dev/null +++ b/esmvaltool/diag_scripts/monitor/multi_datasets.py @@ -0,0 +1,2621 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Monitoring diagnostic to show multiple datasets in one plot (incl. biases). + +Description +----------- +This diagnostic can be used to visualize multiple datasets in one plot. + +For some plot types, a reference dataset can be defined. For this, use the +facet ``reference_for_monitor_diags: true`` in the definition of the dataset in +the recipe. Note that at most one reference dataset per variable is supported. + +Currently supported plot types (use the option ``plots`` to specify them): + - Time series (plot type ``timeseries``): for each variable separately, all + datasets are plotted in one single figure. Input data needs to be 1D with + single dimension `time`. + - Annual cycle (plot type ``annual_cycle``): for each variable separately, + all datasets are plotted in one single figure. Input data needs to be 1D + with single dimension `month_number`. + - Maps (plot type ``map``): for each variable and dataset, an individual + map is plotted. If a reference dataset is defined, also include this + dataset and a bias plot into the figure. Note that if a reference dataset + is defined, all input datasets need to be given on the same horizontal + grid (you can use the preprocessor :func:`esmvalcore.preprocessor.regrid` + for this). Input data needs to be 2D with dimensions `latitude`, + `longitude`. + - Zonal mean profiles (plot type ``zonal_mean_profile``): + for each variable and dataset, an individual profile is plotted. If a + reference dataset is defined, also include this dataset and a bias plot + into the figure. Note that if a reference dataset is defined, all input + datasets need to be given on the same horizontal and vertical grid (you + can use the preprocessors :func:`esmvalcore.preprocessor.regrid` and + :func:`esmvalcore.preprocessor.extract_levels` for this). Input data + needs to be 2D with dimensions `latitude`, `altitude`/`air_pressure`. + + .. warning:: + + The plot_type ``profile`` for zonal mean profiles has been deprecated + in ESMValTool version 2.9.0 and is scheduled for removal in version + 2.11.0. Please use plot type ``zonal_mean_profile`` instead. This is + an exact replacement. + + - 1D profiles (plot type ``1d_profile``): for each variable separately, all + datasets are plotted in one single figure. Input data needs to be 1D with + single dimension `altitude` / `air_pressure` + - Variable vs. latitude plot (plot type ``variable_vs_lat``): + for each variable separately, all datasets are plotted in one + single figure. Input data needs to be 1D with single + dimension `latitude`. + - Hovmoeller Z vs. time (plot type ``hovmoeller_z_vs_time``): for each + variable and dataset, an individual figure is plotted. If a reference + dataset is defined, also include this dataset and a bias plot into the + figure. Note that if a reference dataset is defined, all input datasets + need to be given on the same temporal and vertical grid (you can use + the preprocessors :func:`esmvalcore.preprocessor.regrid_time` and + :func:`esmvalcore.preprocessor.extract_levels` for this). Input data + needs to be 2D with dimensions `time`, `altitude`/`air_pressure`. + - Hovmoeller time vs. latitude or longitude (plot type + ``hovmoeller_time_vs_lat_or_lon``): for each variable and dataset, an + individual figure is plotted. If a reference dataset is defined, also + include this dataset and a bias plot into the figure. Note that if a + reference dataset is defined, all input datasets need to be given on the + same temporal and horizontal grid (you can use the preprocessors + :func:`esmvalcore.preprocessor.regrid_time` and + :func:`esmvalcore.preprocessor.regrid` for this). Input data + needs to be 2D with dimensions `time`, `latitude`/`longitude`. + +Author +------ +Manuel Schlund (DLR, Germany) + +Configuration options in recipe +------------------------------- +facet_used_for_labels: str, optional (default: 'dataset') + Facet used to label different datasets in plot titles and legends. For + example, ``facet_used_for_labels: dataset`` will use dataset names in plot + titles and legends; ``facet_used_for_labels: exp`` will use experiments in + plot titles and legends. In addition, ``facet_used_for_labels`` is used to + select the correct ``plot_kwargs`` for the different datasets (see + configuration options for the different plot types below). +figure_kwargs: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.figure`. By + default, uses ``constrained_layout: true``. +group_variables_by: str, optional (default: 'short_name') + Facet which is used to create variable groups. For each variable group, an + individual plot is created. +plots: dict, optional + Plot types plotted by this diagnostic (see list above). Dictionary keys + must be ``timeseries``, ``annual_cycle``, ``map``, ``zonal_mean_profile``, + ``1d_profile``, ``variable_vs_lat``, ``hovmoeller_z_vs_time``, + ``hovmoeller_time_vs_lat_or_lon``. Dictionary values are dictionaries used + as options for the corresponding plot. The allowed options for the + different plot types are given below. +plot_filename: str, optional + Filename pattern for the plots. + Defaults to ``{plot_type}_{real_name}_{dataset}_{mip}_{exp}_{ensemble}``. + All tags (i.e., the entries in curly brackets, e.g., ``{dataset}``, are + replaced with the corresponding tags). +plot_folder: str, optional + Path to the folder to store figures. Defaults to + ``{plot_dir}/../../{dataset}/{exp}/{modeling_realm}/{real_name}``. All + tags (i.e., the entries in curly brackets, e.g., ``{dataset}``, are + replaced with the corresponding tags). ``{plot_dir}`` is replaced with the + default ESMValTool plot directory (i.e., + ``output_dir/plots/diagnostic_name/script_name/``, see + :ref:`esmvalcore:outputdata`). +savefig_kwargs: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.savefig`. By + default, uses ``bbox_inches: tight, dpi: 300, orientation: landscape``. +seaborn_settings: dict, optional + Options for :func:`seaborn.set_theme` (affects all plots). By default, uses + ``style: ticks``. + +Configuration options for plot type ``timeseries`` +-------------------------------------------------- +annual_mean_kwargs: dict, optional + Optional keyword arguments for :func:`iris.plot.plot` for plotting annual + means. These keyword arguments update (and potentially overwrite) the + ``plot_kwargs`` for the annual mean plots. Use ``annual_mean_kwargs`` to + not show annual means. +gridline_kwargs: dict, optional + Optional keyword arguments for grid lines. By default, ``color: lightgrey, + alpha: 0.5`` are used. Use ``gridline_kwargs: false`` to not show grid + lines. +legend_kwargs: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.legend`. Use + ``legend_kwargs: false`` to not show legends. +plot_kwargs: dict, optional + Optional keyword arguments for :func:`iris.plot.plot`. Dictionary keys are + elements identified by ``facet_used_for_labels`` or ``default``, e.g., + ``CMIP6`` if ``facet_used_for_labels: project`` or ``historical`` if + ``facet_used_for_labels: exp``. Dictionary values are dictionaries used as + keyword arguments for :func:`iris.plot.plot`. String arguments can include + facets in curly brackets which will be derived from the corresponding + dataset, e.g., ``{project}``, ``{short_name}``, ``{exp}``. Examples: + ``default: {linestyle: '-', label: '{project}'}, CMIP6: {color: red, + linestyle: '--'}, OBS: {color: black}``. +pyplot_kwargs: dict, optional + Optional calls to functions of :mod:`matplotlib.pyplot`. Dictionary keys + are functions of :mod:`matplotlib.pyplot`. Dictionary values are used as + argument(s) for these functions (if values are dictionaries, these are + interpreted as keyword arguments; otherwise a single argument is assumed). + String arguments can include facets in curly brackets which will be derived + from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, + ``{exp}``. Examples: ``title: 'Awesome Plot of {long_name}'``, ``xlabel: + '{short_name}'``, ``xlim: [0, 5]``. +time_format: str, optional (default: None) + :func:`~datetime.datetime.strftime` format string that is used to format + the time axis using :class:`matplotlib.dates.DateFormatter`. If ``None``, + use the default formatting imposed by the iris plotting function. + +Configuration options for plot type ``annual_cycle`` +---------------------------------------------------- +gridline_kwargs: dict, optional + Optional keyword arguments for grid lines. By default, ``color: lightgrey, + alpha: 0.5`` are used. Use ``gridline_kwargs: false`` to not show grid + lines. +legend_kwargs: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.legend`. Use + ``legend_kwargs: false`` to not show legends. +plot_kwargs: dict, optional + Optional keyword arguments for :func:`iris.plot.plot`. Dictionary keys are + elements identified by ``facet_used_for_labels`` or ``default``, e.g., + ``CMIP6`` if ``facet_used_for_labels: project`` or ``historical`` if + ``facet_used_for_labels: exp``. Dictionary values are dictionaries used as + keyword arguments for :func:`iris.plot.plot`. String arguments can include + facets in curly brackets which will be derived from the corresponding + dataset, e.g., ``{project}``, ``{short_name}``, ``{exp}``. Examples: + ``default: {linestyle: '-', label: '{project}'}, CMIP6: {color: red, + linestyle: '--'}, OBS: {color: black}``. +pyplot_kwargs: dict, optional + Optional calls to functions of :mod:`matplotlib.pyplot`. Dictionary keys + are functions of :mod:`matplotlib.pyplot`. Dictionary values are used as + argument(s) for these functions (if values are dictionaries, these are + interpreted as keyword arguments; otherwise a single argument is assumed). + String arguments can include facets in curly brackets which will be derived + from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, + ``{exp}``. Examples: ``title: 'Awesome Plot of {long_name}'``, ``xlabel: + '{short_name}'``, ``xlim: [0, 5]``. + +Configuration options for plot type ``map`` +------------------------------------------- +cbar_label: str, optional (default: '{short_name} [{units}]') + Colorbar label. Can include facets in curly brackets which will be derived + from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, + ``{exp}``. +cbar_label_bias: str, optional (default: 'Δ{short_name} [{units}]') + Colorbar label for plotting biases. Can include facets in curly brackets + which will be derived from the corresponding dataset, e.g., ``{project}``, + ``{short_name}``, ``{exp}``. This option has no effect if no reference + dataset is given. +cbar_kwargs: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.colorbar`. By + default, uses ``orientation: horizontal, aspect: 30``. +cbar_kwargs_bias: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.colorbar` for + plotting biases. These keyword arguments update (and potentially overwrite) + the ``cbar_kwargs`` for the bias plot. This option has no effect if no + reference dataset is given. +common_cbar: bool, optional (default: False) + Use a common colorbar for the top panels (i.e., plots of the dataset and + the corresponding reference dataset) when using a reference dataset. If + neither ``vmin`` and ``vmix`` nor ``levels`` is given in ``plot_kwargs``, + the colorbar bounds are inferred from the dataset in the top left panel, + which might lead to an inappropriate colorbar for the reference dataset + (top right panel). Thus, the use of the ``plot_kwargs`` ``vmin`` and + ``vmax`` or ``levels`` is highly recommend when using this ``common_cbar: + true``. This option has no effect if no reference dataset is given. +fontsize: int, optional (default: 10) + Fontsize used for ticks, labels and titles. For the latter, use the given + fontsize plus 2. Does not affect suptitles. +gridline_kwargs: dict, optional + Optional keyword arguments for grid lines. By default, ``color: lightgrey, + alpha: 0.5`` are used. Use ``gridline_kwargs: false`` to not show grid + lines. +plot_func: str, optional (default: 'contourf') + Plot function used to plot the maps. Must be a function of :mod:`iris.plot` + that supports plotting of 2D cubes with coordinates latitude and longitude. +plot_kwargs: dict, optional + Optional keyword arguments for the plot function defined by ``plot_func``. + Dictionary keys are elements identified by ``facet_used_for_labels`` or + ``default``, e.g., ``CMIP6`` if ``facet_used_for_labels: project`` or + ``historical`` if ``facet_used_for_labels: exp``. Dictionary values are + dictionaries used as keyword arguments for the plot function defined by + ``plot_func``. String arguments can include facets in curly brackets which + will be derived from the corresponding dataset, e.g., ``{project}``, + ``{short_name}``, ``{exp}``. Examples: ``default: {levels: 2}, CMIP6: + {vmin: 200, vmax: 250}``. In addition to the normalization_ options + supported by the plot function, the option ``norm: centered`` can be + specified. In this case, the keywords ``vcenter`` and ``halfrange`` should + be used instead of ``vmin`` or ``vmax`` (see + :class:`~matplotlib.colors.CenteredNorm`). +plot_kwargs_bias: dict, optional + Optional keyword arguments for the plot function defined by ``plot_func`` + for plotting biases. These keyword arguments update (and potentially + overwrite) the ``plot_kwargs`` for the bias plot. This option has no effect + if no reference dataset is given. See option ``plot_kwargs`` for more + details. By default, uses ``cmap: bwr`` and ``norm: centered``. +projection: str, optional (default: 'Robinson') + Projection used for the map plot. Needs to be a valid projection class of + :mod:`cartopy.crs`. Keyword arguments can be specified using the option + ``projection_kwargs``. +projection_kwargs: dict, optional + Optional keyword arguments for the projection given by ``projection``. For + the default projection ``Robinson``, the default keyword arguments + ``central_longitude: 10`` are used. +pyplot_kwargs: dict, optional + Optional calls to functions of :mod:`matplotlib.pyplot`. Dictionary keys + are functions of :mod:`matplotlib.pyplot`. Dictionary values are used as + argument(s) for these functions (if values are dictionaries, these are + interpreted as keyword arguments; otherwise a single argument is assumed). + String arguments can include facets in curly brackets which will be derived + from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, + ``{exp}``. Examples: ``title: 'Awesome Plot of {long_name}'``, ``xlabel: + '{short_name}'``, ``xlim: [0, 5]``. +rasterize: bool, optional (default: True) + If ``True``, use rasterization_ for map plots to produce smaller files. + This is only relevant for vector graphics (e.g., ``output_file_type: + pdf,svg,ps``). +show_stats: bool, optional (default: True) + Show basic statistics on the plots. +x_pos_stats_avg: float, optional (default: 0.0) + Text x-position of average (shown on the left) in Axes coordinates. Can be + adjusted to avoid overlap with the figure. Only relevant if ``show_stats: + true``. +x_pos_stats_bias: float, optional (default: 0.92) + Text x-position of bias statistics (shown on the right) in Axes + coordinates. Can be adjusted to avoid overlap with the figure. Only + relevant if ``show_stats: true``. + +Configuration options for plot type ``zonal_mean_profile`` +---------------------------------------------------------- +cbar_label: str, optional (default: '{short_name} [{units}]') + Colorbar label. Can include facets in curly brackets which will be derived + from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, + ``{exp}``. +cbar_label_bias: str, optional (default: 'Δ{short_name} [{units}]') + Colorbar label for plotting biases. Can include facets in curly brackets + which will be derived from the corresponding dataset, e.g., ``{project}``, + ``{short_name}``, ``{exp}``. This option has no effect if no reference + dataset is given. +cbar_kwargs: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.colorbar`. By + default, uses ``orientation: vertical``. +cbar_kwargs_bias: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.colorbar` for + plotting biases. These keyword arguments update (and potentially overwrite) + the ``cbar_kwargs`` for the bias plot. This option has no effect if no + reference dataset is given. +common_cbar: bool, optional (default: False) + Use a common colorbar for the top panels (i.e., plots of the dataset and + the corresponding reference dataset) when using a reference dataset. If + neither ``vmin`` and ``vmix`` nor ``levels`` is given in ``plot_kwargs``, + the colorbar bounds are inferred from the dataset in the top left panel, + which might lead to an inappropriate colorbar for the reference dataset + (top right panel). Thus, the use of the ``plot_kwargs`` ``vmin`` and + ``vmax`` or ``levels`` is highly recommend when using this ``common_cbar: + true``. This option has no effect if no reference dataset is given. +fontsize: int, optional (default: 10) + Fontsize used for ticks, labels and titles. For the latter, use the given + fontsize plus 2. Does not affect suptitles. +log_y: bool, optional (default: True) + Use logarithmic Y-axis. +plot_func: str, optional (default: 'contourf') + Plot function used to plot the profiles. Must be a function of + :mod:`iris.plot` that supports plotting of 2D cubes with coordinates + latitude and altitude/air_pressure. +plot_kwargs: dict, optional + Optional keyword arguments for the plot function defined by ``plot_func``. + Dictionary keys are elements identified by ``facet_used_for_labels`` or + ``default``, e.g., ``CMIP6`` if ``facet_used_for_labels: project`` or + ``historical`` if ``facet_used_for_labels: exp``. Dictionary values are + dictionaries used as keyword arguments for the plot function defined by + ``plot_func``. String arguments can include facets in curly brackets which + will be derived from the corresponding dataset, e.g., ``{project}``, + ``{short_name}``, ``{exp}``. Examples: ``default: {levels: 2}, CMIP6: + {vmin: 200, vmax: 250}``. In addition to the normalization_ options + supported by the plot function, the option ``norm: centered`` can be + specified. In this case, the keywords ``vcenter`` and ``halfrange`` should + be used instead of ``vmin`` or ``vmax`` (see + :class:`~matplotlib.colors.CenteredNorm`). +plot_kwargs_bias: dict, optional + Optional keyword arguments for the plot function defined by ``plot_func`` + for plotting biases. These keyword arguments update (and potentially + overwrite) the ``plot_kwargs`` for the bias plot. This option has no effect + if no reference dataset is given. See option ``plot_kwargs`` for more + details. By default, uses ``cmap: bwr`` and ``norm: centered``. +pyplot_kwargs: dict, optional + Optional calls to functions of :mod:`matplotlib.pyplot`. Dictionary keys + are functions of :mod:`matplotlib.pyplot`. Dictionary values are used as + argument(s) for these functions (if values are dictionaries, these are + interpreted as keyword arguments; otherwise a single argument is assumed). + String arguments can include facets in curly brackets which will be derived + from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, + ``{exp}``. Examples: ``title: 'Awesome Plot of {long_name}'``, ``xlabel: + '{short_name}'``, ``xlim: [0, 5]``. +rasterize: bool, optional (default: True) + If ``True``, use rasterization_ for profile plots to produce smaller files. + This is only relevant for vector graphics (e.g., ``output_file_type: + pdf,svg,ps``). +show_stats: bool, optional (default: True) + Show basic statistics on the plots. +show_y_minor_ticklabels: bool, optional (default: False) + Show tick labels for the minor ticks on the Y axis. +x_pos_stats_avg: float, optional (default: 0.01) + Text x-position of average (shown on the left) in Axes coordinates. Can be + adjusted to avoid overlap with the figure. Only relevant if ``show_stats: + true``. +x_pos_stats_bias: float, optional (default: 0.7) + Text x-position of bias statistics (shown on the right) in Axes + coordinates. Can be adjusted to avoid overlap with the figure. Only + relevant if ``show_stats: true``. + +Configuration options for plot type ``1d_profile`` +-------------------------------------------------- +aspect_ratio: float, optional (default: 1.5) + Aspect ratio of the plot. The default value results in a slender upright + plot. +gridline_kwargs: dict, optional + Optional keyword arguments for grid lines. By default, ``color: lightgrey, + alpha: 0.5`` are used. Use ``gridline_kwargs: false`` to not show grid + lines. +legend_kwargs: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.legend`. Use + ``legend_kwargs: false`` to not show legends. +log_x: bool, optional (default: False) + Use logarithmic X-axis. Note that for the logarithmic x axis tickmarks are + set so that minor tickmarks show up. Setting of individual tickmarks by + pyplot_kwargs is not recommended in this case. +log_y: bool, optional (default: True) + Use logarithmic Y-axis. +plot_kwargs: dict, optional + Optional keyword arguments for :func:`iris.plot.plot`. Dictionary keys are + elements identified by ``facet_used_for_labels`` or ``default``, e.g., + ``CMIP6`` if ``facet_used_for_labels: project`` or ``historical`` if + ``facet_used_for_labels: exp``. Dictionary values are dictionaries used as + keyword arguments for :func:`iris.plot.plot`. String arguments can include + facets in curly brackets which will be derived from the corresponding + dataset, e.g., ``{project}``, ``{short_name}``, ``{exp}``. Examples: + ``default: {linestyle: '-', label: '{project}'}, CMIP6: {color: red, + linestyle: '--'}, OBS: {color: black}``. +pyplot_kwargs: dict, optional + Optional calls to functions of :mod:`matplotlib.pyplot`. Dictionary keys + are functions of :mod:`matplotlib.pyplot`. Dictionary values are used as + argument(s) for these functions (if values are dictionaries, these are + interpreted as keyword arguments; otherwise a single argument is assumed). + String arguments can include facets in curly brackets which will be derived + from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, + ``{exp}``. Examples: ``title: 'Awesome Plot of {long_name}'``, ``xlabel: + '{short_name}'``, ``xlim: [0, 5]``. +show_y_minor_ticklabels: bool, optional (default: False) + Show tick labels for the minor ticks on the Y axis. + +Configuration options for plot type ``variable_vs_lat`` +------------------------------------------------------- +gridline_kwargs: dict, optional + Optional keyword arguments for grid lines. By default, ``color: lightgrey, + alpha: 0.5`` are used. Use ``gridline_kwargs: false`` to not show grid + lines. +legend_kwargs: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.legend`. Use + ``legend_kwargs: false`` to not show legends. +plot_kwargs: dict, optional + Optional keyword arguments for :func:`iris.plot.plot`. Dictionary keys are + elements identified by ``facet_used_for_labels`` or ``default``, e.g., + ``CMIP6`` if ``facet_used_for_labels: project`` or ``historical`` if + ``facet_used_for_labels: exp``. Dictionary values are dictionaries used as + keyword arguments for :func:`iris.plot.plot`. String arguments can include + facets in curly brackets which will be derived from the corresponding + dataset, e.g., ``{project}``, ``{short_name}``, ``{exp}``. Examples: + ``default: {linestyle: '-', label: '{project}'}, CMIP6: {color: red, + linestyle: '--'}, OBS: {color: black}``. +pyplot_kwargs: dict, optional + Optional calls to functions of :mod:`matplotlib.pyplot`. Dictionary keys + are functions of :mod:`matplotlib.pyplot`. Dictionary values are used as + argument(s) for these functions (if values are dictionaries, these are + interpreted as keyword arguments; otherwise a single argument is assumed). + String arguments can include facets in curly brackets which will be derived + from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, + ``{exp}``. Examples: ``title: 'Awesome Plot of {long_name}'``, ``xlabel: + '{short_name}'``, ``xlim: [0, 5]``. + +Configuration options for plot type ``hovmoeller_z_vs_time`` +------------------------------------------------------------ +cbar_label: str, optional (default: '{short_name} [{units}]') + Colorbar label. Can include facets in curly brackets which will be derived + from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, + ``{exp}``. +cbar_label_bias: str, optional (default: 'Δ{short_name} [{units}]') + Colorbar label for plotting biases. Can include facets in curly brackets + which will be derived from the corresponding dataset, e.g., ``{project}``, + ``{short_name}``, ``{exp}``. This option has no effect if no reference + dataset is given. +cbar_kwargs: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.colorbar`. By + default, uses ``orientation: vertical``. +cbar_kwargs_bias: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.colorbar` for + plotting biases. These keyword arguments update (and potentially overwrite) + the ``cbar_kwargs`` for the bias plot. This option has no effect if no + reference dataset is given. +common_cbar: bool, optional (default: False) + Use a common colorbar for the top panels (i.e., plots of the dataset and + the corresponding reference dataset) when using a reference dataset. If + neither ``vmin`` and ``vmix`` nor ``levels`` is given in ``plot_kwargs``, + the colorbar bounds are inferred from the dataset in the top left panel, + which might lead to an inappropriate colorbar for the reference dataset + (top right panel). Thus, the use of the ``plot_kwargs`` ``vmin`` and + ``vmax`` or ``levels`` is highly recommend when using this ``common_cbar: + true``. This option has no effect if no reference dataset is given. +fontsize: int, optional (default: 10) + Fontsize used for ticks, labels and titles. For the latter, use the given + fontsize plus 2. Does not affect suptitles. +log_y: bool, optional (default: True) + Use logarithmic Y-axis. +plot_func: str, optional (default: 'contourf') + Plot function used to plot the profiles. Must be a function of + :mod:`iris.plot` that supports plotting of 2D cubes with coordinates + latitude and altitude/air_pressure. +plot_kwargs: dict, optional + Optional keyword arguments for the plot function defined by ``plot_func``. + Dictionary keys are elements identified by ``facet_used_for_labels`` or + ``default``, e.g., ``CMIP6`` if ``facet_used_for_labels: project`` or + ``historical`` if ``facet_used_for_labels: exp``. Dictionary values are + dictionaries used as keyword arguments for the plot function defined by + ``plot_func``. String arguments can include facets in curly brackets which + will be derived from the corresponding dataset, e.g., ``{project}``, + ``{short_name}``, ``{exp}``. Examples: ``default: {levels: 2}, CMIP6: + {vmin: 200, vmax: 250}``. In addition to the normalization_ options + supported by the plot function, the option ``norm: centered`` can be + specified. In this case, the keywords ``vcenter`` and ``halfrange`` should + be used instead of ``vmin`` or ``vmax`` (see + :class:`~matplotlib.colors.CenteredNorm`). +plot_kwargs_bias: dict, optional + Optional keyword arguments for the plot function defined by ``plot_func`` + for plotting biases. These keyword arguments update (and potentially + overwrite) the ``plot_kwargs`` for the bias plot. This option has no effect + if no reference dataset is given. See option ``plot_kwargs`` for more + details. By default, uses ``cmap: bwr`` and ``norm: centered``. +pyplot_kwargs: dict, optional + Optional calls to functions of :mod:`matplotlib.pyplot`. Dictionary keys + are functions of :mod:`matplotlib.pyplot`. Dictionary values are used as + argument(s) for these functions (if values are dictionaries, these are + interpreted as keyword arguments; otherwise a single argument is assumed). + String arguments can include facets in curly brackets which will be derived + from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, + ``{exp}``. Examples: ``title: 'Awesome Plot of {long_name}'``, ``xlabel: + '{short_name}'``, ``xlim: [0, 5]``. +rasterize: bool, optional (default: True) + If ``True``, use rasterization_ for profile plots to produce smaller files. + This is only relevant for vector graphics (e.g., ``output_file_type: + pdf,svg,ps``). +show_stats: bool, optional (default: True) + Show basic statistics on the plots. +show_y_minor_ticklabels: bool, optional (default: False) + Show tick labels for the minor ticks on the Y axis. +x_pos_stats_avg: float, optional (default: 0.01) + Text x-position of average (shown on the left) in Axes coordinates. Can be + adjusted to avoid overlap with the figure. Only relevant if ``show_stats: + true``. +x_pos_stats_bias: float, optional (default: 0.7) + Text x-position of bias statistics (shown on the right) in Axes + coordinates. Can be adjusted to avoid overlap with the figure. Only + relevant if ``show_stats: true``. +time_format: str, optional (default: None) + :func:`~datetime.datetime.strftime` format string that is used to format + the time axis using :class:`matplotlib.dates.DateFormatter`. If ``None``, + use the default formatting imposed by the iris plotting function. + +Configuration options for plot type ``hovmoeller_time_vs_lat_or_lon`` +--------------------------------------------------------------------- +cbar_label: str, optional (default: '{short_name} [{units}]') + Colorbar label. Can include facets in curly brackets which will be derived + from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, + ``{exp}``. +cbar_label_bias: str, optional (default: 'Δ{short_name} [{units}]') + Colorbar label for plotting biases. Can include facets in curly brackets + which will be derived from the corresponding dataset, e.g., ``{project}``, + ``{short_name}``, ``{exp}``. This option has no effect if no reference + dataset is given. +cbar_kwargs: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.colorbar`. By + default, uses ``orientation: vertical``. +cbar_kwargs_bias: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.colorbar` for + plotting biases. These keyword arguments update (and potentially overwrite) + the ``cbar_kwargs`` for the bias plot. This option has no effect if no + reference dataset is given. +common_cbar: bool, optional (default: False) + Use a common colorbar for the top panels (i.e., plots of the dataset and + the corresponding reference dataset) when using a reference dataset. If + neither ``vmin`` and ``vmix`` nor ``levels`` is given in ``plot_kwargs``, + the colorbar bounds are inferred from the dataset in the top left panel, + which might lead to an inappropriate colorbar for the reference dataset + (top right panel). Thus, the use of the ``plot_kwargs`` ``vmin`` and + ``vmax`` or ``levels`` is highly recommend when using this ``common_cbar: + true``. This option has no effect if no reference dataset is given. +fontsize: int, optional (default: 10) + Fontsize used for ticks, labels and titles. For the latter, use the given + fontsize plus 2. Does not affect suptitles. +plot_func: str, optional (default: 'contourf') + Plot function used to plot the profiles. Must be a function of + :mod:`iris.plot` that supports plotting of 2D cubes with coordinates + latitude and height/air_pressure. +plot_kwargs: dict, optional + Optional keyword arguments for the plot function defined by ``plot_func``. + Dictionary keys are elements identified by ``facet_used_for_labels`` or + ``default``, e.g., ``CMIP6`` if ``facet_used_for_labels: project`` or + ``historical`` if ``facet_used_for_labels: exp``. Dictionary values are + dictionaries used as keyword arguments for the plot function defined by + ``plot_func``. String arguments can include facets in curly brackets which + will be derived from the corresponding dataset, e.g., ``{project}``, + ``{short_name}``, ``{exp}``. Examples: ``default: {levels: 2}, CMIP6: + {vmin: 200, vmax: 250}``. In addition to the normalization_ options + supported by the plot function, the option ``norm: centered`` can be + specified. In this case, the keywords ``vcenter`` and ``halfrange`` should + be used instead of ``vmin`` or ``vmax`` (see + :class:`~matplotlib.colors.CenteredNorm`). +plot_kwargs_bias: dict, optional + Optional keyword arguments for the plot function defined by ``plot_func`` + for plotting biases. These keyword arguments update (and potentially + overwrite) the ``plot_kwargs`` for the bias plot. This option has no effect + if no reference dataset is given. See option ``plot_kwargs`` for more + details. By default, uses ``cmap: bwr`` and ``norm: centered``. +pyplot_kwargs: dict, optional + Optional calls to functions of :mod:`matplotlib.pyplot`. Dictionary keys + are functions of :mod:`matplotlib.pyplot`. Dictionary values are used as + argument(s) for these functions (if values are dictionaries, these are + interpreted as keyword arguments; otherwise a single argument is assumed). + String arguments can include facets in curly brackets which will be derived + from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, + ``{exp}``. Examples: ``title: 'Awesome Plot of {long_name}'``, ``xlabel: + '{short_name}'``, ``xlim: [0, 5]``. +rasterize: bool, optional (default: True) + If ``True``, use rasterization_ for profile plots to produce smaller files. + This is only relevant for vector graphics (e.g., ``output_file_type: + pdf,svg,ps``). +show_y_minor_ticks: bool, optional (default: True) + Show minor ticks for time on the Y axis. +show_x_minor_ticks: bool, optional (default: True) + Show minor ticks for latitude or longitude on the X axis. +time_format: str, optional (default: None) + :func:`~datetime.datetime.strftime` format string that is used to format + the time axis using :class:`matplotlib.dates.DateFormatter`. If ``None``, + use the default formatting imposed by the iris plotting function. +time_on: str, optional (default: y-axis) + Optional switch to change the orientation of the plot so that time is on + the x-axis ``time_on: x-axis``. Default orientation is time on y-axis and + lat/lon on x-axis. + + +.. hint:: + + Extra arguments given to the recipe are ignored, so it is safe to use yaml + anchors to share the configuration of common arguments with other monitor + diagnostic script. + +.. _rasterization: https://matplotlib.org/stable/gallery/misc/ + rasterization_demo.html +.. _normalization: https://matplotlib.org/stable/users/explain/colors/ + colormapnorms.html + +""" +import logging +import warnings +from copy import deepcopy +from pathlib import Path +from pprint import pformat + +import cartopy.crs as ccrs +import dask.array as da +import iris +import matplotlib as mpl +import matplotlib.dates as mdates +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns +from iris.analysis.cartography import area_weights +from iris.coord_categorisation import add_year +from iris.coords import AuxCoord +from iris.exceptions import ConstraintMismatchError +from matplotlib.colors import CenteredNorm +from matplotlib.gridspec import GridSpec +from matplotlib.ticker import ( + AutoMinorLocator, + FormatStrFormatter, + LogLocator, + NullFormatter, +) +from sklearn.metrics import r2_score + +import esmvaltool.diag_scripts.shared.iris_helpers as ih +from esmvaltool.diag_scripts.monitor.monitor_base import MonitorBase +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + group_metadata, + io, + run_diagnostic, +) + +logger = logging.getLogger(Path(__file__).stem) + + +class MultiDatasets(MonitorBase): + """Diagnostic to plot multi-dataset plots.""" + + def __init__(self, config): + """Initialize class member.""" + super().__init__(config) + + # Get default settings + self.cfg = deepcopy(self.cfg) + self.cfg.setdefault('facet_used_for_labels', 'dataset') + self.cfg.setdefault('figure_kwargs', {'constrained_layout': True}) + self.cfg.setdefault('group_variables_by', 'short_name') + self.cfg.setdefault('savefig_kwargs', { + 'bbox_inches': 'tight', + 'dpi': 300, + 'orientation': 'landscape', + }) + self.cfg.setdefault('seaborn_settings', {'style': 'ticks'}) + logger.info("Using facet '%s' to group variables", + self.cfg['group_variables_by']) + logger.info("Using facet '%s' to create labels", + self.cfg['facet_used_for_labels']) + + # Load input data + self.input_data = self._load_and_preprocess_data() + self.grouped_input_data = group_metadata( + self.input_data, + self.cfg['group_variables_by'], + sort=self.cfg['facet_used_for_labels'], + ) + + if 'profile' in self.plots: + logger.warning("The plot_type ``profile`` for zonal mean profiles" + " has been deprecated in ESMValTool version 2.9.0" + " and is scheduled for removal in version 2.11.0." + " Please use plot type ``zonal_mean_profile``" + " instead. This is an exact replacement.") + if 'zonal_mean_profile' in self.plots: + raise ValueError( + "Both ``profile`` and ``zonal_mean_profile`` is used." + " Please use ``zonal_mean_profile`` only.") + self.plots['zonal_mean_profile'] = self.plots.pop('profile') + + # Check given plot types and set default settings for them + self.supported_plot_types = [ + 'timeseries', + 'annual_cycle', + 'map', + 'zonal_mean_profile', + '1d_profile', + 'variable_vs_lat', + 'hovmoeller_z_vs_time', + 'hovmoeller_time_vs_lat_or_lon', + ] + for (plot_type, plot_options) in self.plots.items(): + if plot_type not in self.supported_plot_types: + raise ValueError( + f"Got unexpected plot type '{plot_type}' for option " + f"'plots', expected one of {self.supported_plot_types}") + if plot_options is None: + self.plots[plot_type] = {} + + # Default options for the different plot types + if plot_type == 'timeseries': + self.plots[plot_type].setdefault('annual_mean_kwargs', {}) + self.plots[plot_type].setdefault('gridline_kwargs', {}) + self.plots[plot_type].setdefault('legend_kwargs', {}) + self.plots[plot_type].setdefault('plot_kwargs', {}) + self.plots[plot_type].setdefault('pyplot_kwargs', {}) + self.plots[plot_type].setdefault('time_format', None) + + elif plot_type == 'annual_cycle': + self.plots[plot_type].setdefault('gridline_kwargs', {}) + self.plots[plot_type].setdefault('legend_kwargs', {}) + self.plots[plot_type].setdefault('plot_kwargs', {}) + self.plots[plot_type].setdefault('pyplot_kwargs', {}) + + elif plot_type == 'map': + self.plots[plot_type].setdefault( + 'cbar_label', '{short_name} [{units}]') + self.plots[plot_type].setdefault( + 'cbar_label_bias', 'Δ{short_name} [{units}]') + self.plots[plot_type].setdefault( + 'cbar_kwargs', {'orientation': 'horizontal', 'aspect': 30} + ) + self.plots[plot_type].setdefault('cbar_kwargs_bias', {}) + self.plots[plot_type].setdefault('common_cbar', False) + self.plots[plot_type].setdefault('fontsize', 10) + self.plots[plot_type].setdefault('gridline_kwargs', {}) + self.plots[plot_type].setdefault('plot_func', 'contourf') + self.plots[plot_type].setdefault('plot_kwargs', {}) + self.plots[plot_type].setdefault('plot_kwargs_bias', {}) + self.plots[plot_type]['plot_kwargs_bias'].setdefault( + 'cmap', 'bwr' + ) + self.plots[plot_type]['plot_kwargs_bias'].setdefault( + 'norm', 'centered' + ) + if 'projection' not in self.plots[plot_type]: + self.plots[plot_type].setdefault('projection', 'Robinson') + self.plots[plot_type].setdefault( + 'projection_kwargs', {'central_longitude': 10} + ) + else: + self.plots[plot_type].setdefault('projection_kwargs', {}) + self.plots[plot_type].setdefault('pyplot_kwargs', {}) + self.plots[plot_type].setdefault('rasterize', True) + self.plots[plot_type].setdefault('show_stats', True) + self.plots[plot_type].setdefault('x_pos_stats_avg', 0.0) + self.plots[plot_type].setdefault('x_pos_stats_bias', 0.92) + + elif plot_type == 'zonal_mean_profile': + self.plots[plot_type].setdefault( + 'cbar_label', '{short_name} [{units}]') + self.plots[plot_type].setdefault( + 'cbar_label_bias', 'Δ{short_name} [{units}]') + self.plots[plot_type].setdefault( + 'cbar_kwargs', {'orientation': 'vertical'} + ) + self.plots[plot_type].setdefault('cbar_kwargs_bias', {}) + self.plots[plot_type].setdefault('common_cbar', False) + self.plots[plot_type].setdefault('fontsize', 10) + self.plots[plot_type].setdefault('log_y', True) + self.plots[plot_type].setdefault('plot_func', 'contourf') + self.plots[plot_type].setdefault('plot_kwargs', {}) + self.plots[plot_type].setdefault('plot_kwargs_bias', {}) + self.plots[plot_type]['plot_kwargs_bias'].setdefault( + 'cmap', 'bwr' + ) + self.plots[plot_type]['plot_kwargs_bias'].setdefault( + 'norm', 'centered' + ) + self.plots[plot_type].setdefault('pyplot_kwargs', {}) + self.plots[plot_type].setdefault('rasterize', True) + self.plots[plot_type].setdefault('show_stats', True) + self.plots[plot_type].setdefault( + 'show_y_minor_ticklabels', False + ) + self.plots[plot_type].setdefault('x_pos_stats_avg', 0.01) + self.plots[plot_type].setdefault('x_pos_stats_bias', 0.7) + + elif plot_type == '1d_profile': + self.plots[plot_type].setdefault('aspect_ratio', 1.5) + self.plots[plot_type].setdefault('gridline_kwargs', {}) + self.plots[plot_type].setdefault('legend_kwargs', {}) + self.plots[plot_type].setdefault('log_x', False) + self.plots[plot_type].setdefault('log_y', True) + self.plots[plot_type].setdefault('plot_kwargs', {}) + self.plots[plot_type].setdefault('pyplot_kwargs', {}) + self.plots[plot_type].setdefault( + 'show_y_minor_ticklabels', False + ) + elif plot_type == 'variable_vs_lat': + self.plots[plot_type].setdefault('gridline_kwargs', {}) + self.plots[plot_type].setdefault('legend_kwargs', {}) + self.plots[plot_type].setdefault('plot_kwargs', {}) + self.plots[plot_type].setdefault('pyplot_kwargs', {}) + + elif plot_type == 'hovmoeller_z_vs_time': + self.plots[plot_type].setdefault('cbar_label', + '{short_name} [{units}]') + self.plots[plot_type].setdefault('cbar_label_bias', + 'Δ{short_name} [{units}]') + self.plots[plot_type].setdefault('cbar_kwargs', + {'orientation': 'vertical'}) + self.plots[plot_type].setdefault('cbar_kwargs_bias', {}) + self.plots[plot_type].setdefault('common_cbar', False) + self.plots[plot_type].setdefault('fontsize', 10) + self.plots[plot_type].setdefault('log_y', True) + self.plots[plot_type].setdefault('plot_func', 'contourf') + self.plots[plot_type].setdefault('plot_kwargs', {}) + self.plots[plot_type].setdefault('plot_kwargs_bias', {}) + self.plots[plot_type]['plot_kwargs_bias'].setdefault( + 'cmap', 'bwr') + self.plots[plot_type]['plot_kwargs_bias'].setdefault( + 'norm', 'centered' + ) + self.plots[plot_type].setdefault('pyplot_kwargs', {}) + self.plots[plot_type].setdefault('rasterize', True) + self.plots[plot_type].setdefault('show_stats', True) + self.plots[plot_type].setdefault('show_y_minor_ticklabels', + False) + self.plots[plot_type].setdefault('time_format', None) + self.plots[plot_type].setdefault('x_pos_stats_avg', 0.01) + self.plots[plot_type].setdefault('x_pos_stats_bias', 0.7) + + elif plot_type == 'hovmoeller_time_vs_lat_or_lon': + self.plots[plot_type].setdefault( + 'cbar_label', '{short_name} [{units}]') + self.plots[plot_type].setdefault( + 'cbar_label_bias', 'Δ{short_name} [{units}]') + self.plots[plot_type].setdefault( + 'cbar_kwargs', {'orientation': 'vertical'} + ) + self.plots[plot_type].setdefault('cbar_kwargs_bias', {}) + self.plots[plot_type].setdefault('common_cbar', False) + self.plots[plot_type].setdefault('fontsize', 10) + self.plots[plot_type].setdefault('plot_func', 'contourf') + self.plots[plot_type].setdefault('plot_kwargs', {}) + self.plots[plot_type].setdefault('plot_kwargs_bias', {}) + self.plots[plot_type]['plot_kwargs_bias'].setdefault( + 'cmap', 'bwr' + ) + self.plots[plot_type]['plot_kwargs_bias'].setdefault( + 'norm', 'centered' + ) + self.plots[plot_type].setdefault('pyplot_kwargs', {}) + self.plots[plot_type].setdefault('rasterize', True) + self.plots[plot_type].setdefault( + 'show_y_minor_ticks', True + ) + self.plots[plot_type].setdefault( + 'show_x_minor_ticks', True + ) + self.plots[plot_type].setdefault('time_format', None) + self.plots[plot_type].setdefault('time_on', 'y-axis') + + # Check that facet_used_for_labels is present for every dataset + for dataset in self.input_data: + if self.cfg['facet_used_for_labels'] not in dataset: + raise ValueError( + f"facet_used_for_labels " + f"'{self.cfg['facet_used_for_labels']}' not present for " + f"the following dataset:\n{pformat(dataset)}") + + # Load seaborn settings + sns.set_theme(**self.cfg['seaborn_settings']) + + def _add_colorbar(self, plot_type, plot_left, plot_right, axes_left, + axes_right, dataset_left, dataset_right): + """Add colorbar(s) for plots.""" + fontsize = self.plots[plot_type]['fontsize'] + cbar_kwargs = self._get_cbar_kwargs(plot_type) + cbar_label_left = self._get_cbar_label(plot_type, dataset_left) + cbar_label_right = self._get_cbar_label(plot_type, dataset_right) + + # Create one common colorbar for the top panels + # Note: Increase aspect ratio for nicer looks + if self.plots[plot_type]['common_cbar']: + if 'aspect' in cbar_kwargs: + cbar_kwargs['aspect'] += 20.0 + cbar = plt.colorbar(plot_left, ax=[axes_left, axes_right], + **cbar_kwargs) + cbar.set_label(cbar_label_left, fontsize=fontsize) + cbar.ax.tick_params(labelsize=fontsize) + + # Create two separate colorbars for the top panels + else: + cbar_left = plt.colorbar(plot_left, ax=axes_left, **cbar_kwargs) + cbar_left.set_label(cbar_label_left, fontsize=fontsize) + cbar_left.ax.tick_params(labelsize=fontsize) + cbar_right = plt.colorbar(plot_right, ax=axes_right, **cbar_kwargs) + cbar_right.set_label(cbar_label_right, fontsize=fontsize) + cbar_right.ax.tick_params(labelsize=fontsize) + + def _add_stats(self, plot_type, axes, dim_coords, dataset, + ref_dataset=None): + """Add text to plot that describes basic statistics.""" + if not self.plots[plot_type]['show_stats']: + return + + # Extract cube(s) + cube = dataset['cube'] + if ref_dataset is None: + ref_cube = None + label = self._get_label(dataset) + else: + ref_cube = ref_dataset['cube'] + label = (f'{self._get_label(dataset)} vs. ' + f'{self._get_label(ref_dataset)}') + + # Different options for the different plots types + fontsize = 6.0 + y_pos = 0.95 + if all([ + 'x_pos_stats_avg' in self.plots[plot_type], + 'x_pos_stats_bias' in self.plots[plot_type], + ]): + x_pos_bias = self.plots[plot_type]['x_pos_stats_bias'] + x_pos = self.plots[plot_type]['x_pos_stats_avg'] + else: + raise NotImplementedError(f"plot_type '{plot_type}' not supported") + + # For zonal_mean_profile plots add scalar longitude coordinate + # (necessary for calculation of area weights). The exact values for the + # points/bounds of this coordinate do not matter since they don't + # change the weights. + if not cube.coords('longitude'): + lon_coord = AuxCoord( + 180.0, + bounds=[0.0, 360.0], + var_name='lon', + standard_name='longitude', + long_name='longitude', + units='degrees_east', + ) + cube.add_aux_coord(lon_coord, ()) + + # Mean + weights = area_weights(cube) + if ref_cube is None: + mean = cube.collapsed(dim_coords, iris.analysis.MEAN, + weights=weights) + logger.info( + "Area-weighted mean of %s for %s = %f%s", + dataset['short_name'], + label, + mean.data, + dataset['units'], + ) + else: + mean = (cube - ref_cube).collapsed(dim_coords, iris.analysis.MEAN, + weights=weights) + logger.info( + "Area-weighted bias of %s for %s = %f%s", + dataset['short_name'], + label, + mean.data, + dataset['units'], + ) + if np.abs(mean.data) >= 0.1: + mean_val = f"{mean.data:.2f} {cube.units}" + else: + mean_val = f"{mean.data:.2e} {cube.units}" + axes.text( + x_pos, y_pos, mean_val, fontsize=fontsize, transform=axes.transAxes + ) + if ref_cube is None: + return + + # Weighted RMSE + rmse = (cube - ref_cube).collapsed(dim_coords, iris.analysis.RMS, + weights=weights) + if np.abs(rmse.data) >= 0.1: + rmse_val = f"{rmse.data:.2f} {cube.units}" + else: + rmse_val = f"{rmse.data:.2e} {cube.units}" + axes.text(x_pos_bias, y_pos, f"RMSE={rmse_val}", + fontsize=fontsize, transform=axes.transAxes) + logger.info( + "Area-weighted RMSE of %s for %s = %f%s", + dataset['short_name'], + label, + rmse.data, + dataset['units'], + ) + + # Weighted R2 + mask = np.ma.getmaskarray(cube.data).ravel() + mask |= np.ma.getmaskarray(ref_cube.data).ravel() + cube_data = cube.data.ravel()[~mask] + ref_cube_data = ref_cube.data.ravel()[~mask] + weights = weights.ravel()[~mask] + r2_val = r2_score(cube_data, ref_cube_data, sample_weight=weights) + axes.text(x_pos_bias, y_pos - 0.1, rf"R$^2$={r2_val:.2f}", + fontsize=fontsize, transform=axes.transAxes) + logger.info( + "Area-weighted R2 of %s for %s = %f", + dataset['short_name'], + label, + r2_val, + ) + + def _get_custom_mpl_rc_params(self, plot_type): + """Get custom matplotlib rcParams.""" + fontsize = self.plots[plot_type]['fontsize'] + custom_rc_params = { + 'axes.titlesize': fontsize + 2.0, + 'axes.labelsize': fontsize, + 'xtick.labelsize': fontsize, + 'ytick.labelsize': fontsize, + } + return custom_rc_params + + def _get_label(self, dataset): + """Get label of dataset.""" + return dataset[self.cfg['facet_used_for_labels']] + + def _get_cbar_kwargs(self, plot_type, bias=False): + """Get colorbar kwargs.""" + cbar_kwargs = deepcopy(self.plots[plot_type]['cbar_kwargs']) + if bias: + cbar_kwargs.update(self.plots[plot_type]['cbar_kwargs_bias']) + return deepcopy(cbar_kwargs) + + def _get_cbar_label(self, plot_type, dataset, bias=False): + """Get colorbar label.""" + if bias: + cbar_label = self.plots[plot_type]['cbar_label_bias'] + descr = f"cbar_label_bias of {plot_type} '{cbar_label}'" + else: + cbar_label = self.plots[plot_type]['cbar_label'] + descr = f"cbar_label of {plot_type} '{cbar_label}'" + cbar_label = self._fill_facet_placeholders(cbar_label, dataset, descr) + return cbar_label + + def _get_gridline_kwargs(self, plot_type): + """Get gridline kwargs.""" + gridline_kwargs = self.plots[plot_type]['gridline_kwargs'] + return deepcopy(gridline_kwargs) + + def _get_map_projection(self): + """Get projection used for map plots.""" + plot_type = 'map' + projection = self.plots[plot_type]['projection'] + projection_kwargs = self.plots[plot_type]['projection_kwargs'] + + # Check if desired projection is valid + if not hasattr(ccrs, projection): + raise AttributeError( + f"Got invalid projection '{projection}' for plotting " + f"{plot_type}, expected class of cartopy.crs") + + return getattr(ccrs, projection)(**projection_kwargs) + + def _get_plot_func(self, plot_type): + """Get plot function.""" + plot_func = self.plots[plot_type]['plot_func'] + if not hasattr(iris.plot, plot_func): + raise AttributeError( + f"Got invalid plot function '{plot_func}' for plotting " + f"{plot_type}, expected function of iris.plot") + logger.info("Creating %s plots using function '%s'", plot_type, + plot_func) + return getattr(iris.plot, plot_func) + + def _get_plot_kwargs(self, plot_type, dataset, bias=False): + """Get keyword arguments for plot functions.""" + all_plot_kwargs = self.plots[plot_type]['plot_kwargs'] + all_plot_kwargs = deepcopy(all_plot_kwargs) + + # First get default kwargs, then overwrite them with dataset-specific + # ones + plot_kwargs = all_plot_kwargs.get('default', {}) + label = self._get_label(dataset) + plot_kwargs.update(all_plot_kwargs.get(label, {})) + + # For bias plots, overwrite the kwargs with bias-specific option + if bias: + bias_kwargs = self.plots[plot_type]['plot_kwargs_bias'] + plot_kwargs.update(bias_kwargs) + + # Replace facets with dataset entries for string arguments + for (key, val) in plot_kwargs.items(): + if isinstance(val, str): + val = self._fill_facet_placeholders( + val, + dataset, + f"plot_kwargs of {plot_type} '{key}: {val}'", + ) + plot_kwargs[key] = val + + # Default settings for different plot types + if plot_type in ('timeseries', 'annual_cycle', '1d_profile', + 'variable_vs_lat'): + plot_kwargs.setdefault('label', label) + + if plot_kwargs.get('norm') == 'centered': + norm = CenteredNorm( + vcenter=plot_kwargs.pop('vcenter', 0.0), + halfrange=plot_kwargs.pop('halfrange', None), + ) + plot_kwargs['norm'] = norm + + return deepcopy(plot_kwargs) + + def _load_and_preprocess_data(self): + """Load and preprocess data.""" + input_data = list(self.cfg['input_data'].values()) + + for dataset in input_data: + filename = dataset['filename'] + logger.info("Loading %s", filename) + cubes = iris.load(filename) + if len(cubes) == 1: + cube = cubes[0] + else: + var_name = dataset['short_name'] + try: + cube = cubes.extract_cube(iris.NameConstraint( + var_name=var_name + )) + except ConstraintMismatchError as exc: + var_names = [c.var_name for c in cubes] + raise ValueError( + f"Cannot load data: multiple variables ({var_names}) " + f"are available in file {filename}, but not the " + f"requested '{var_name}'" + ) from exc + + # Fix time coordinate if present + if cube.coords('time', dim_coords=True): + ih.unify_time_coord(cube) + + # Fix Z-coordinate if present + if cube.coords('air_pressure', dim_coords=True): + z_coord = cube.coord('air_pressure', dim_coords=True) + z_coord.attributes['positive'] = 'down' + z_coord.convert_units('hPa') + elif cube.coords('altitude', dim_coords=True): + z_coord = cube.coord('altitude') + z_coord.attributes['positive'] = 'up' + + dataset['cube'] = cube + + return input_data + + def _plot_map_with_ref(self, plot_func, dataset, ref_dataset): + """Plot map plot for single dataset with a reference dataset.""" + plot_type = 'map' + logger.info("Plotting map with reference dataset '%s' for '%s'", + self._get_label(ref_dataset), self._get_label(dataset)) + + # Make sure that the data has the correct dimensions + cube = dataset['cube'] + ref_cube = ref_dataset['cube'] + dim_coords_dat = self._check_cube_dimensions(cube, plot_type) + dim_coords_ref = self._check_cube_dimensions(ref_cube, plot_type) + + # Create single figure with multiple axes + with mpl.rc_context(self._get_custom_mpl_rc_params(plot_type)): + fig = plt.figure(**self.cfg['figure_kwargs']) + gridspec = GridSpec(5, 4, figure=fig, + height_ratios=[1.0, 1.0, 0.4, 1.0, 1.0]) + + # Options used for all subplots + projection = self._get_map_projection() + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + gridline_kwargs = self._get_gridline_kwargs(plot_type) + fontsize = self.plots[plot_type]['fontsize'] + + # Plot dataset (top left) + axes_data = fig.add_subplot(gridspec[0:2, 0:2], + projection=projection) + plot_kwargs['axes'] = axes_data + if plot_func is iris.plot.contourf: + # see https://github.com/SciTools/cartopy/issues/2457 + # and https://github.com/SciTools/cartopy/issues/2468 + plot_kwargs['transform_first'] = True + npx = da if cube.has_lazy_data() else np + cube_to_plot = cube.copy( + npx.ma.filled(cube.core_data(), np.nan) + ) + else: + cube_to_plot = cube + plot_data = plot_func(cube_to_plot, **plot_kwargs) + axes_data.coastlines() + if gridline_kwargs is not False: + axes_data.gridlines(**gridline_kwargs) + axes_data.set_title(self._get_label(dataset), pad=3.0) + self._add_stats(plot_type, axes_data, dim_coords_dat, dataset) + self._process_pyplot_kwargs(plot_type, dataset) + + # Plot reference dataset (top right) + # Note: make sure to use the same vmin and vmax than the top left + # plot if a common cpltolorbar is desired + axes_ref = fig.add_subplot(gridspec[0:2, 2:4], + projection=projection) + plot_kwargs['axes'] = axes_ref + if self.plots[plot_type]['common_cbar']: + plot_kwargs.setdefault('vmin', plot_data.get_clim()[0]) + plot_kwargs.setdefault('vmax', plot_data.get_clim()[1]) + if plot_func is iris.plot.contourf: + # see https://github.com/SciTools/cartopy/issues/2457 + # and https://github.com/SciTools/cartopy/issues/2468 + plot_kwargs['transform_first'] = True + npx = da if ref_cube.has_lazy_data() else np + ref_cube_to_plot = ref_cube.copy( + npx.ma.filled(ref_cube.core_data(), np.nan) + ) + else: + ref_cube_to_plot = ref_cube + plot_ref = plot_func(ref_cube_to_plot, **plot_kwargs) + axes_ref.coastlines() + if gridline_kwargs is not False: + axes_ref.gridlines(**gridline_kwargs) + axes_ref.set_title(self._get_label(ref_dataset), pad=3.0) + self._add_stats(plot_type, axes_ref, dim_coords_ref, ref_dataset) + self._process_pyplot_kwargs(plot_type, ref_dataset) + + # Add colorbar(s) + self._add_colorbar(plot_type, plot_data, plot_ref, axes_data, + axes_ref, dataset, ref_dataset) + + # Plot bias (bottom center) + bias_cube = cube - ref_cube + axes_bias = fig.add_subplot(gridspec[3:5, 1:3], + projection=projection) + plot_kwargs_bias = self._get_plot_kwargs(plot_type, dataset, + bias=True) + plot_kwargs_bias['axes'] = axes_bias + if plot_func is iris.plot.contourf: + # see https://github.com/SciTools/cartopy/issues/2457 + # and https://github.com/SciTools/cartopy/issues/2468 + plot_kwargs_bias['transform_first'] = True + npx = da if bias_cube.has_lazy_data() else np + bias_cube_to_plot = bias_cube.copy( + npx.ma.filled(bias_cube.core_data(), np.nan) + ) + else: + bias_cube_to_plot = bias_cube + plot_bias = plot_func(bias_cube_to_plot, **plot_kwargs_bias) + axes_bias.coastlines() + if gridline_kwargs is not False: + axes_bias.gridlines(**gridline_kwargs) + axes_bias.set_title( + f"{self._get_label(dataset)} - {self._get_label(ref_dataset)}", + pad=3.0, + ) + cbar_kwargs_bias = self._get_cbar_kwargs(plot_type, bias=True) + cbar_bias = fig.colorbar(plot_bias, ax=axes_bias, + **cbar_kwargs_bias) + cbar_bias.set_label( + self._get_cbar_label(plot_type, dataset, bias=True), + fontsize=fontsize, + ) + cbar_bias.ax.tick_params(labelsize=fontsize) + self._add_stats(plot_type, axes_bias, dim_coords_dat, dataset, + ref_dataset) + + # Customize plot + fig.suptitle(dataset['long_name']) + self._process_pyplot_kwargs(plot_type, dataset) + + # Rasterization + if self.plots[plot_type]['rasterize']: + self._set_rasterized([axes_data, axes_ref, axes_bias]) + + # File paths + plot_path = self.get_plot_path(plot_type, dataset) + netcdf_path = ( + get_diagnostic_filename(Path(plot_path).stem + "_{pos}", self.cfg) + ) + netcdf_paths = { + netcdf_path.format(pos='top_left'): cube, + netcdf_path.format(pos='top_right'): ref_cube, + netcdf_path.format(pos='bottom'): bias_cube, + } + + return (plot_path, netcdf_paths) + + def _plot_map_without_ref(self, plot_func, dataset): + """Plot map plot for single dataset without a reference dataset.""" + plot_type = 'map' + logger.info("Plotting map without reference dataset for '%s'", + self._get_label(dataset)) + + # Make sure that the data has the correct dimensions + cube = dataset['cube'] + dim_coords_dat = self._check_cube_dimensions(cube, plot_type) + + # Create plot with desired settings + with mpl.rc_context(self._get_custom_mpl_rc_params(plot_type)): + fig = plt.figure(**self.cfg['figure_kwargs']) + axes = fig.add_subplot(projection=self._get_map_projection()) + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + plot_kwargs['axes'] = axes + if plot_func is iris.plot.contourf: + # see https://github.com/SciTools/cartopy/issues/2457 + # and https://github.com/SciTools/cartopy/issues/2468 + plot_kwargs['transform_first'] = True + npx = da if cube.has_lazy_data() else np + cube_to_plot = cube.copy( + npx.ma.filled(cube.core_data(), np.nan) + ) + else: + cube_to_plot = cube + plot_map = plot_func(cube_to_plot, **plot_kwargs) + axes.coastlines() + gridline_kwargs = self._get_gridline_kwargs(plot_type) + if gridline_kwargs is not False: + axes.gridlines(**gridline_kwargs) + + # Print statistics if desired + self._add_stats(plot_type, axes, dim_coords_dat, dataset) + + # Setup colorbar + fontsize = self.plots[plot_type]['fontsize'] + colorbar = fig.colorbar(plot_map, ax=axes, + **self._get_cbar_kwargs(plot_type)) + colorbar.set_label(self._get_cbar_label(plot_type, dataset), + fontsize=fontsize) + colorbar.ax.tick_params(labelsize=fontsize) + + # Customize plot + axes.set_title(self._get_label(dataset)) + fig.suptitle(dataset['long_name']) + self._process_pyplot_kwargs(plot_type, dataset) + + # Rasterization + if self.plots[plot_type]['rasterize']: + self._set_rasterized([axes]) + + # File paths + plot_path = self.get_plot_path(plot_type, dataset) + netcdf_path = get_diagnostic_filename(Path(plot_path).stem, self.cfg) + + return (plot_path, {netcdf_path: cube}) + + def _plot_zonal_mean_profile_with_ref(self, plot_func, dataset, + ref_dataset): + """Plot zonal mean profile for single dataset with reference.""" + plot_type = 'zonal_mean_profile' + logger.info("Plotting zonal mean profile with reference dataset" + " '%s' for '%s'", + self._get_label(ref_dataset), self._get_label(dataset)) + + # Make sure that the data has the correct dimensions + cube = dataset['cube'] + ref_cube = ref_dataset['cube'] + dim_coords_dat = self._check_cube_dimensions(cube, plot_type) + dim_coords_ref = self._check_cube_dimensions(ref_cube, plot_type) + + # Create single figure with multiple axes + with mpl.rc_context(self._get_custom_mpl_rc_params(plot_type)): + fig = plt.figure(**self.cfg['figure_kwargs']) + gridspec = GridSpec(5, 4, figure=fig, + height_ratios=[1.0, 1.0, 0.4, 1.0, 1.0]) + + # Options used for all subplots + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + fontsize = self.plots[plot_type]['fontsize'] + + # Plot dataset (top left) + axes_data = fig.add_subplot(gridspec[0:2, 0:2]) + plot_kwargs['axes'] = axes_data + plot_data = plot_func(cube, **plot_kwargs) + axes_data.set_title(self._get_label(dataset), pad=3.0) + z_coord = cube.coord(axis='Z') + axes_data.set_ylabel(f'{z_coord.long_name} [{z_coord.units}]') + if self.plots[plot_type]['log_y']: + axes_data.set_yscale('log') + axes_data.get_yaxis().set_major_formatter( + FormatStrFormatter('%.1f')) + if self.plots[plot_type]['show_y_minor_ticklabels']: + axes_data.get_yaxis().set_minor_formatter( + FormatStrFormatter('%.1f')) + else: + axes_data.get_yaxis().set_minor_formatter(NullFormatter()) + self._add_stats(plot_type, axes_data, dim_coords_dat, dataset) + self._process_pyplot_kwargs(plot_type, dataset) + + # Plot reference dataset (top right) + # Note: make sure to use the same vmin and vmax than the top left + # plot if a common colorbar is desired + axes_ref = fig.add_subplot(gridspec[0:2, 2:4], sharex=axes_data, + sharey=axes_data) + plot_kwargs['axes'] = axes_ref + if self.plots[plot_type]['common_cbar']: + plot_kwargs.setdefault('vmin', plot_data.get_clim()[0]) + plot_kwargs.setdefault('vmax', plot_data.get_clim()[1]) + plot_ref = plot_func(ref_cube, **plot_kwargs) + axes_ref.set_title(self._get_label(ref_dataset), pad=3.0) + plt.setp(axes_ref.get_yticklabels(), visible=False) + self._add_stats(plot_type, axes_ref, dim_coords_ref, ref_dataset) + self._process_pyplot_kwargs(plot_type, ref_dataset) + + # Add colorbar(s) + self._add_colorbar(plot_type, plot_data, plot_ref, axes_data, + axes_ref, dataset, ref_dataset) + + # Plot bias (bottom center) + bias_cube = cube - ref_cube + axes_bias = fig.add_subplot(gridspec[3:5, 1:3], sharex=axes_data, + sharey=axes_data) + plot_kwargs_bias = self._get_plot_kwargs(plot_type, dataset, + bias=True) + plot_kwargs_bias['axes'] = axes_bias + plot_bias = plot_func(bias_cube, **plot_kwargs_bias) + axes_bias.set_title( + f"{self._get_label(dataset)} - {self._get_label(ref_dataset)}", + pad=3.0, + ) + axes_bias.set_xlabel('latitude [°N]') + axes_bias.set_ylabel(f'{z_coord.long_name} [{z_coord.units}]') + cbar_kwargs_bias = self._get_cbar_kwargs(plot_type, bias=True) + cbar_bias = fig.colorbar(plot_bias, ax=axes_bias, + **cbar_kwargs_bias) + cbar_bias.set_label( + self._get_cbar_label(plot_type, dataset, bias=True), + fontsize=fontsize, + ) + cbar_bias.ax.tick_params(labelsize=fontsize) + self._add_stats(plot_type, axes_bias, dim_coords_dat, dataset, + ref_dataset) + + # Customize plot + fig.suptitle(dataset['long_name']) + self._process_pyplot_kwargs(plot_type, dataset) + + # Rasterization + if self.plots[plot_type]['rasterize']: + self._set_rasterized([axes_data, axes_ref, axes_bias]) + + # File paths + plot_path = self.get_plot_path(plot_type, dataset) + netcdf_path = ( + get_diagnostic_filename(Path(plot_path).stem + "_{pos}", self.cfg) + ) + netcdf_paths = { + netcdf_path.format(pos='top_left'): cube, + netcdf_path.format(pos='top_right'): ref_cube, + netcdf_path.format(pos='bottom'): bias_cube, + } + + return (plot_path, netcdf_paths) + + def _plot_zonal_mean_profile_without_ref(self, plot_func, dataset): + """Plot zonal mean profile for single dataset without reference.""" + plot_type = 'zonal_mean_profile' + logger.info("Plotting zonal mean profile without reference dataset" + " for '%s'", + self._get_label(dataset)) + + # Make sure that the data has the correct dimensions + cube = dataset['cube'] + dim_coords_dat = self._check_cube_dimensions(cube, plot_type) + + # Create plot with desired settings + with mpl.rc_context(self._get_custom_mpl_rc_params(plot_type)): + fig = plt.figure(**self.cfg['figure_kwargs']) + axes = fig.add_subplot() + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + plot_kwargs['axes'] = axes + plot_zonal_mean_profile = plot_func(cube, **plot_kwargs) + + # Print statistics if desired + self._add_stats(plot_type, axes, dim_coords_dat, dataset) + + # Setup colorbar + fontsize = self.plots[plot_type]['fontsize'] + colorbar = fig.colorbar(plot_zonal_mean_profile, ax=axes, + **self._get_cbar_kwargs(plot_type)) + colorbar.set_label(self._get_cbar_label(plot_type, dataset), + fontsize=fontsize) + colorbar.ax.tick_params(labelsize=fontsize) + + # Customize plot + axes.set_title(self._get_label(dataset)) + fig.suptitle(dataset['long_name']) + axes.set_xlabel('latitude [°N]') + z_coord = cube.coord(axis='Z') + axes.set_ylabel(f'{z_coord.long_name} [{z_coord.units}]') + if self.plots[plot_type]['log_y']: + axes.set_yscale('log') + axes.get_yaxis().set_major_formatter( + FormatStrFormatter('%.1f')) + if self.plots[plot_type]['show_y_minor_ticklabels']: + axes.get_yaxis().set_minor_formatter( + FormatStrFormatter('%.1f')) + else: + axes.get_yaxis().set_minor_formatter(NullFormatter()) + self._process_pyplot_kwargs(plot_type, dataset) + + # Rasterization + if self.plots[plot_type]['rasterize']: + self._set_rasterized([axes]) + + # File paths + plot_path = self.get_plot_path(plot_type, dataset) + netcdf_path = get_diagnostic_filename(Path(plot_path).stem, self.cfg) + + return (plot_path, {netcdf_path: cube}) + + def _plot_hovmoeller_z_vs_time_without_ref(self, plot_func, dataset): + """Plot Hovmoeller Z vs. time for single dataset without reference.""" + plot_type = 'hovmoeller_z_vs_time' + logger.info( + "Plotting Hovmoeller Z vs. time without reference dataset" + " for '%s'", self._get_label(dataset)) + + # Make sure that the data has the correct dimensions + cube = dataset['cube'] + dim_coords_dat = self._check_cube_dimensions(cube, plot_type) + + # Create plot with desired settings + with mpl.rc_context(self._get_custom_mpl_rc_params(plot_type)): + fig = plt.figure(**self.cfg['figure_kwargs']) + axes = fig.add_subplot() + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + plot_kwargs['axes'] = axes + plot_hovmoeller = plot_func(cube, **plot_kwargs) + + # Print statistics if desired + self._add_stats(plot_type, axes, dim_coords_dat, dataset) + + # Setup colorbar + fontsize = self.plots[plot_type]['fontsize'] + colorbar = fig.colorbar(plot_hovmoeller, + ax=axes, + **self._get_cbar_kwargs(plot_type)) + colorbar.set_label(self._get_cbar_label(plot_type, dataset), + fontsize=fontsize) + colorbar.ax.tick_params(labelsize=fontsize) + + # Customize plot + axes.set_title(self._get_label(dataset)) + fig.suptitle(dataset['long_name']) + z_coord = cube.coord(axis='Z') + axes.set_ylabel(f'{z_coord.long_name} [{z_coord.units}]') + if self.plots[plot_type]['log_y']: + axes.set_yscale('log') + axes.get_yaxis().set_major_formatter( + FormatStrFormatter('%.1f')) + if self.plots[plot_type]['show_y_minor_ticklabels']: + axes.get_yaxis().set_minor_formatter( + FormatStrFormatter('%.1f')) + else: + axes.get_yaxis().set_minor_formatter(NullFormatter()) + if self.plots[plot_type]['time_format'] is not None: + axes.get_xaxis().set_major_formatter( + mdates.DateFormatter(self.plots[plot_type]['time_format'])) + axes.set_xlabel('time') + self._process_pyplot_kwargs(plot_type, dataset) + + # Rasterization + if self.plots[plot_type]['rasterize']: + self._set_rasterized([axes]) + + # File paths + plot_path = self.get_plot_path(plot_type, dataset) + netcdf_path = get_diagnostic_filename(Path(plot_path).stem, self.cfg) + + return (plot_path, {netcdf_path: cube}) + + def _plot_hovmoeller_z_vs_time_with_ref(self, plot_func, dataset, + ref_dataset): + """Plot Hovmoeller Z vs. time for single dataset with reference.""" + plot_type = 'hovmoeller_z_vs_time' + logger.info( + "Plotting Hovmoeller z vs. time with reference dataset" + " '%s' for '%s'", self._get_label(ref_dataset), + self._get_label(dataset)) + + # Make sure that the data has the correct dimensions + cube = dataset['cube'] + ref_cube = ref_dataset['cube'] + dim_coords_dat = self._check_cube_dimensions(cube, plot_type) + dim_coords_ref = self._check_cube_dimensions(ref_cube, plot_type) + + # Create single figure with multiple axes + with mpl.rc_context(self._get_custom_mpl_rc_params(plot_type)): + fig = plt.figure(**self.cfg['figure_kwargs']) + gridspec = GridSpec(5, + 4, + figure=fig, + height_ratios=[1.0, 1.0, 0.4, 1.0, 1.0]) + + # Options used for all subplots + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + fontsize = self.plots[plot_type]['fontsize'] + + # Plot dataset (top left) + axes_data = fig.add_subplot(gridspec[0:2, 0:2]) + plot_kwargs['axes'] = axes_data + plot_data = plot_func(cube, **plot_kwargs) + axes_data.set_title(self._get_label(dataset), pad=3.0) + z_coord = cube.coord(axis='Z') + axes_data.set_ylabel(f'{z_coord.long_name} [{z_coord.units}]') + if self.plots[plot_type]['log_y']: + axes_data.set_yscale('log') + axes_data.get_yaxis().set_major_formatter( + FormatStrFormatter('%.1f')) + if self.plots[plot_type]['show_y_minor_ticklabels']: + axes_data.get_yaxis().set_minor_formatter( + FormatStrFormatter('%.1f')) + else: + axes_data.get_yaxis().set_minor_formatter(NullFormatter()) + if self.plots[plot_type]['time_format'] is not None: + axes_data.get_xaxis().set_major_formatter( + mdates.DateFormatter(self.plots[plot_type]['time_format'])) + self._add_stats(plot_type, axes_data, dim_coords_dat, dataset) + self._process_pyplot_kwargs(plot_type, dataset) + + # Plot reference dataset (top right) + # Note: make sure to use the same vmin and vmax than the top left + # plot if a common colorbar is desired + axes_ref = fig.add_subplot(gridspec[0:2, 2:4], + sharex=axes_data, + sharey=axes_data) + plot_kwargs['axes'] = axes_ref + if self.plots[plot_type]['common_cbar']: + plot_kwargs.setdefault('vmin', plot_data.get_clim()[0]) + plot_kwargs.setdefault('vmax', plot_data.get_clim()[1]) + plot_ref = plot_func(ref_cube, **plot_kwargs) + axes_ref.set_title(self._get_label(ref_dataset), pad=3.0) + plt.setp(axes_ref.get_yticklabels(), visible=False) + self._add_stats(plot_type, axes_ref, dim_coords_ref, ref_dataset) + self._process_pyplot_kwargs(plot_type, ref_dataset) + + # Add colorbar(s) + self._add_colorbar(plot_type, plot_data, plot_ref, axes_data, + axes_ref, dataset, ref_dataset) + + # Plot bias (bottom center) + bias_cube = cube - ref_cube + axes_bias = fig.add_subplot(gridspec[3:5, 1:3], + sharex=axes_data, + sharey=axes_data) + plot_kwargs_bias = self._get_plot_kwargs(plot_type, + dataset, + bias=True) + plot_kwargs_bias['axes'] = axes_bias + plot_bias = plot_func(bias_cube, **plot_kwargs_bias) + axes_bias.set_title( + f"{self._get_label(dataset)} - {self._get_label(ref_dataset)}", + pad=3.0, + ) + axes_bias.set_xlabel('time') + axes_bias.set_ylabel(f'{z_coord.long_name} [{z_coord.units}]') + cbar_kwargs_bias = self._get_cbar_kwargs(plot_type, bias=True) + cbar_bias = fig.colorbar(plot_bias, + ax=axes_bias, + **cbar_kwargs_bias) + cbar_bias.set_label( + self._get_cbar_label(plot_type, dataset, bias=True), + fontsize=fontsize, + ) + cbar_bias.ax.tick_params(labelsize=fontsize) + self._add_stats(plot_type, axes_bias, dim_coords_dat, dataset, + ref_dataset) + + # Customize plot + fig.suptitle(dataset['long_name']) + self._process_pyplot_kwargs(plot_type, dataset) + + # Rasterization + if self.plots[plot_type]['rasterize']: + self._set_rasterized([axes_data, axes_ref, axes_bias]) + + # File paths + plot_path = self.get_plot_path(plot_type, dataset) + netcdf_path = (get_diagnostic_filename( + Path(plot_path).stem + "_{pos}", self.cfg)) + netcdf_paths = { + netcdf_path.format(pos='top_left'): cube, + netcdf_path.format(pos='top_right'): ref_cube, + netcdf_path.format(pos='bottom'): bias_cube, + } + + return (plot_path, netcdf_paths) + + def _plot_hovmoeller_time_vs_lat_or_lon_with_ref(self, plot_func, dataset, + ref_dataset): + """Plot the hovmoeller profile for single dataset with reference.""" + plot_type = 'hovmoeller_time_vs_lat_or_lon' + logger.info("Plotting Hovmoeller plots with reference dataset" + " '%s' for '%s'", + self._get_label(ref_dataset), self._get_label(dataset)) + + # Make sure that the data has the correct dimensions + cube = dataset['cube'] + ref_cube = ref_dataset['cube'] + dim_coords_dat = self._check_cube_dimensions(cube, plot_type) + self._check_cube_dimensions(ref_cube, plot_type) + if 'latitude' in dim_coords_dat: + non_time_label = 'latitude [°N]' + else: + non_time_label = 'longitude [°E]' + + # Create single figure with multiple axes + with mpl.rc_context(self._get_custom_mpl_rc_params(plot_type)): + fig = plt.figure(**self.cfg['figure_kwargs']) + gridspec = GridSpec(5, 4, figure=fig, + height_ratios=[1.0, 1.0, 0.4, 1.0, 1.0]) + + # Options used for all subplots + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + fontsize = self.plots[plot_type]['fontsize'] + + # Plot dataset (top left) + axes_data = fig.add_subplot(gridspec[0:2, 0:2]) + plot_kwargs['axes'] = axes_data + if self.plots[plot_type]['time_on'] == 'x-axis': + plot_kwargs['coords'] = list(dim_coords_dat) + x_label = 'time' + y_label = non_time_label + time_axis = axes_data.get_xaxis() + else: + plot_kwargs['coords'] = list(reversed(dim_coords_dat)) + x_label = non_time_label + y_label = 'time' + time_axis = axes_data.get_yaxis() + plot_data = plot_func(cube, **plot_kwargs) + axes_data.set_title(self._get_label(dataset), pad=3.0) + axes_data.set_ylabel(y_label) + if self.plots[plot_type]['time_format'] is not None: + time_axis.set_major_formatter(mdates.DateFormatter( + self.plots[plot_type]['time_format'] + )) + if self.plots[plot_type]['show_y_minor_ticks']: + axes_data.get_yaxis().set_minor_locator(AutoMinorLocator()) + if self.plots[plot_type]['show_x_minor_ticks']: + axes_data.get_xaxis().set_minor_locator(AutoMinorLocator()) + self._process_pyplot_kwargs(plot_type, dataset) + + # Plot reference dataset (top right) + # Note: make sure to use the same vmin and vmax than the top left + # plot if a common colorbar is desired + axes_ref = fig.add_subplot(gridspec[0:2, 2:4], sharex=axes_data, + sharey=axes_data) + plot_kwargs['axes'] = axes_ref + if self.plots[plot_type]['common_cbar']: + plot_kwargs.setdefault('vmin', plot_data.get_clim()[0]) + plot_kwargs.setdefault('vmax', plot_data.get_clim()[1]) + plot_ref = plot_func(ref_cube, **plot_kwargs) + axes_ref.set_title(self._get_label(ref_dataset), pad=3.0) + plt.setp(axes_ref.get_yticklabels(), visible=False) + self._process_pyplot_kwargs(plot_type, ref_dataset) + + # Add colorbar(s) + self._add_colorbar(plot_type, plot_data, plot_ref, axes_data, + axes_ref, dataset, ref_dataset) + + # Plot bias (bottom center) + bias_cube = cube - ref_cube + axes_bias = fig.add_subplot(gridspec[3:5, 1:3], sharex=axes_data, + sharey=axes_data) + plot_kwargs_bias = self._get_plot_kwargs(plot_type, dataset, + bias=True) + plot_kwargs_bias['axes'] = axes_bias + plot_kwargs_bias['coords'] = plot_kwargs['coords'] + plot_bias = plot_func(bias_cube, **plot_kwargs_bias) + axes_bias.set_title( + f"{self._get_label(dataset)} - {self._get_label(ref_dataset)}", + pad=3.0, + ) + axes_bias.set_xlabel(x_label) + axes_bias.set_ylabel(y_label) + cbar_kwargs_bias = self._get_cbar_kwargs(plot_type, bias=True) + cbar_bias = fig.colorbar(plot_bias, ax=axes_bias, + **cbar_kwargs_bias) + cbar_bias.set_label( + self._get_cbar_label(plot_type, dataset, bias=True), + fontsize=fontsize, + ) + cbar_bias.ax.tick_params(labelsize=fontsize) + + # Customize plot + fig.suptitle(dataset['long_name']) + self._process_pyplot_kwargs(plot_type, dataset) + + # Rasterization + if self.plots[plot_type]['rasterize']: + self._set_rasterized([axes_data, axes_ref, axes_bias]) + + # File paths + plot_path = self.get_plot_path(plot_type, dataset) + netcdf_path = ( + get_diagnostic_filename(Path(plot_path).stem + "_{pos}", self.cfg) + ) + netcdf_paths = { + netcdf_path.format(pos='top_left'): cube, + netcdf_path.format(pos='top_right'): ref_cube, + netcdf_path.format(pos='bottom'): bias_cube, + } + + return (plot_path, netcdf_paths) + + def _plot_hovmoeller_time_vs_lat_or_lon_without_ref(self, plot_func, + dataset): + """Plot time vs zonal or meridional Hovmoeller without reference.""" + plot_type = 'hovmoeller_time_vs_lat_or_lon' + logger.info("Plotting Hovmoeller plots without reference dataset" + " for '%s'", self._get_label(dataset)) + + # Make sure that the data has the correct dimensions + cube = dataset['cube'] + dim_coords_dat = self._check_cube_dimensions(cube, plot_type) + if 'latitude' in dim_coords_dat: + non_time_label = 'latitude [°N]' + else: + non_time_label = 'longitude [°E]' + + # Create plot with desired settings + with mpl.rc_context(self._get_custom_mpl_rc_params(plot_type)): + fig = plt.figure(**self.cfg['figure_kwargs']) + axes = fig.add_subplot() + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + plot_kwargs['axes'] = axes + + # Put time on desired axis + if self.plots[plot_type]['time_on'] == 'x-axis': + plot_kwargs['coords'] = list(dim_coords_dat) + x_label = 'time' + y_label = non_time_label + time_axis = axes.get_xaxis() + else: + plot_kwargs['coords'] = list(reversed(dim_coords_dat)) + x_label = non_time_label + y_label = 'time' + time_axis = axes.get_yaxis() + plot_hovmoeller = plot_func(cube, **plot_kwargs) + + # Setup colorbar + fontsize = self.plots[plot_type]['fontsize'] + colorbar = fig.colorbar(plot_hovmoeller, ax=axes, + **self._get_cbar_kwargs(plot_type)) + colorbar.set_label(self._get_cbar_label(plot_type, dataset), + fontsize=fontsize) + colorbar.ax.tick_params(labelsize=fontsize) + + # Customize plot + axes.set_title(self._get_label(dataset)) + fig.suptitle(dataset['long_name']) + axes.set_xlabel(x_label) + axes.set_ylabel(y_label) + if self.plots[plot_type]['time_format'] is not None: + time_axis.set_major_formatter(mdates.DateFormatter( + self.plots[plot_type]['time_format'] + )) + if self.plots[plot_type]['show_y_minor_ticks']: + axes.get_yaxis().set_minor_locator(AutoMinorLocator()) + if self.plots[plot_type]['show_x_minor_ticks']: + axes.get_xaxis().set_minor_locator(AutoMinorLocator()) + self._process_pyplot_kwargs(plot_type, dataset) + + # Rasterization + if self.plots[plot_type]['rasterize']: + self._set_rasterized([axes]) + + # File paths + plot_path = self.get_plot_path(plot_type, dataset) + netcdf_path = get_diagnostic_filename(Path(plot_path).stem, self.cfg) + return (plot_path, {netcdf_path: cube}) + + def _process_pyplot_kwargs(self, plot_type, dataset): + """Process functions for :mod:`matplotlib.pyplot`.""" + pyplot_kwargs = self.plots[plot_type]['pyplot_kwargs'] + for (func, arg) in pyplot_kwargs.items(): + if isinstance(arg, str): + arg = self._fill_facet_placeholders( + arg, + dataset, + f"pyplot_kwargs of {plot_type} '{func}: {arg}'", + ) + if arg is None: + getattr(plt, func)() + elif isinstance(arg, dict): + getattr(plt, func)(**arg) + else: + getattr(plt, func)(arg) + + @staticmethod + def _check_cube_dimensions(cube, plot_type): + """Check that cube has correct dimensional variables.""" + expected_dimensions_dict = { + 'annual_cycle': (['month_number'],), + 'map': (['latitude', 'longitude'],), + 'zonal_mean_profile': (['latitude', 'air_pressure'], + ['latitude', 'altitude']), + 'timeseries': (['time'],), + '1d_profile': (['air_pressure'], + ['altitude']), + 'variable_vs_lat': (['latitude'],), + 'hovmoeller_z_vs_time': (['time', 'air_pressure'], + ['time', 'altitude']), + 'hovmoeller_time_vs_lat_or_lon': (['time', 'latitude'], + ['time', 'longitude']), + } + if plot_type not in expected_dimensions_dict: + raise NotImplementedError(f"plot_type '{plot_type}' not supported") + expected_dimensions = expected_dimensions_dict[plot_type] + for dims in expected_dimensions: + cube_dims = [cube.coords(dim, dim_coords=True) for dim in dims] + if all(cube_dims) and cube.ndim == len(dims): + return dims + expected_dims_str = ' or '.join( + [str(dims) for dims in expected_dimensions] + ) + raise ValueError( + f"Expected cube that exactly has the dimensional coordinates " + f"{expected_dims_str}, got {cube.summary(shorten=True)}") + + @staticmethod + def _fill_facet_placeholders(string, dataset, description): + """Fill facet placeholders.""" + try: + string = string.format(**dataset) + except KeyError as exc: + raise ValueError( + f"Not all necessary facets in {description} available for " + f"dataset\n{pformat(dataset)}") from exc + return string + + @staticmethod + def _get_multi_dataset_facets(datasets): + """Derive common facets for multiple datasets.""" + all_keys = {key for dataset in datasets for key in dataset} + multi_dataset_facets = {} + for key in all_keys: + if all(d.get(key) == datasets[0].get(key) for d in datasets): + multi_dataset_facets[key] = datasets[0].get(key) + else: + multi_dataset_facets[key] = f'ambiguous_{key}' + return multi_dataset_facets + + def _get_reference_dataset(self, datasets): + """Extract reference dataset.""" + variable = datasets[0][self.cfg['group_variables_by']] + ref_datasets = [d for d in datasets if + d.get('reference_for_monitor_diags', False)] + if len(ref_datasets) > 1: + raise ValueError( + f"Expected at most 1 reference dataset (with " + f"'reference_for_monitor_diags: true' for variable " + f"'{variable}', got {len(ref_datasets):d}") + if ref_datasets: + return ref_datasets[0] + return None + + def create_timeseries_plot(self, datasets): + """Create time series plot.""" + plot_type = 'timeseries' + if plot_type not in self.plots: + return + + if not datasets: + raise ValueError(f"No input data to plot '{plot_type}' given") + + logger.info("Plotting %s", plot_type) + fig = plt.figure(**self.cfg['figure_kwargs']) + axes = fig.add_subplot() + + # Plot all datasets in one single figure + ancestors = [] + cubes = {} + for dataset in datasets: + ancestors.append(dataset['filename']) + cube = dataset['cube'] + cubes[self._get_label(dataset)] = cube + self._check_cube_dimensions(cube, plot_type) + + # Plot original time series + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + plot_kwargs['axes'] = axes + iris.plot.plot(cube, **plot_kwargs) + + # Plot annual means if desired + annual_mean_kwargs = self.plots[plot_type]['annual_mean_kwargs'] + if annual_mean_kwargs is not False: + logger.debug("Plotting annual means") + if not cube.coords('year'): + add_year(cube, 'time') + annual_mean_cube = cube.aggregated_by('year', + iris.analysis.MEAN) + plot_kwargs.pop('label', None) + plot_kwargs.update(annual_mean_kwargs) + iris.plot.plot(annual_mean_cube, **plot_kwargs) + + # Default plot appearance + multi_dataset_facets = self._get_multi_dataset_facets(datasets) + axes.set_title(multi_dataset_facets['long_name']) + axes.set_xlabel('time') + # apply time formatting + if self.plots[plot_type]['time_format'] is not None: + axes.get_xaxis().set_major_formatter( + mdates.DateFormatter(self.plots[plot_type]['time_format'])) + axes.set_ylabel( + f"{multi_dataset_facets[self.cfg['group_variables_by']]} " + f"[{multi_dataset_facets['units']}]" + ) + gridline_kwargs = self._get_gridline_kwargs(plot_type) + if gridline_kwargs is not False: + axes.grid(**gridline_kwargs) + + # Legend + legend_kwargs = self.plots[plot_type]['legend_kwargs'] + if legend_kwargs is not False: + axes.legend(**legend_kwargs) + + # Customize plot appearance + self._process_pyplot_kwargs(plot_type, multi_dataset_facets) + + # Save plot + plot_path = self.get_plot_path(plot_type, multi_dataset_facets) + fig.savefig(plot_path, **self.cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save netCDF file + netcdf_path = get_diagnostic_filename(Path(plot_path).stem, self.cfg) + var_attrs = { + n: datasets[0][n] for n in ('short_name', 'long_name', 'units') + } + io.save_1d_data(cubes, netcdf_path, 'time', var_attrs) + + # Provenance tracking + caption = (f"Time series of {multi_dataset_facets['long_name']} for " + f"various datasets.") + provenance_record = { + 'ancestors': ancestors, + 'authors': ['schlund_manuel'], + 'caption': caption, + 'plot_types': ['line'], + 'long_names': [var_attrs['long_name']], + } + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + provenance_logger.log(netcdf_path, provenance_record) + + def create_annual_cycle_plot(self, datasets): + """Create annual cycle plot.""" + plot_type = 'annual_cycle' + if plot_type not in self.plots: + return + + if not datasets: + raise ValueError(f"No input data to plot '{plot_type}' given") + + logger.info("Plotting %s", plot_type) + fig = plt.figure(**self.cfg['figure_kwargs']) + axes = fig.add_subplot() + + # Plot all datasets in one single figure + ancestors = [] + cubes = {} + for dataset in datasets: + ancestors.append(dataset['filename']) + cube = dataset['cube'] + cubes[self._get_label(dataset)] = cube + self._check_cube_dimensions(cube, plot_type) + + # Plot annual cycle + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + plot_kwargs['axes'] = axes + iris.plot.plot(cube, **plot_kwargs) + + # Default plot appearance + multi_dataset_facets = self._get_multi_dataset_facets(datasets) + axes.set_title(multi_dataset_facets['long_name']) + axes.set_xlabel('Month') + axes.set_ylabel( + f"{multi_dataset_facets[self.cfg['group_variables_by']]} " + f"[{multi_dataset_facets['units']}]" + ) + axes.set_xticks(range(1, 13), [str(m) for m in range(1, 13)]) + gridline_kwargs = self._get_gridline_kwargs(plot_type) + if gridline_kwargs is not False: + axes.grid(**gridline_kwargs) + + # Legend + legend_kwargs = self.plots[plot_type]['legend_kwargs'] + if legend_kwargs is not False: + axes.legend(**legend_kwargs) + + # Customize plot appearance + self._process_pyplot_kwargs(plot_type, multi_dataset_facets) + + # Save plot + plot_path = self.get_plot_path(plot_type, multi_dataset_facets) + fig.savefig(plot_path, **self.cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save netCDF file + netcdf_path = get_diagnostic_filename(Path(plot_path).stem, self.cfg) + var_attrs = { + n: datasets[0][n] for n in ('short_name', 'long_name', 'units') + } + io.save_1d_data(cubes, netcdf_path, 'month_number', var_attrs) + + # Provenance tracking + caption = (f"Annual cycle of {multi_dataset_facets['long_name']} for " + f"various datasets.") + provenance_record = { + 'ancestors': ancestors, + 'authors': ['schlund_manuel'], + 'caption': caption, + 'plot_types': ['seas'], + 'long_names': [var_attrs['long_name']], + } + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + provenance_logger.log(netcdf_path, provenance_record) + + def create_map_plot(self, datasets): + """Create map plot.""" + plot_type = 'map' + if plot_type not in self.plots: + return + + if not datasets: + raise ValueError(f"No input data to plot '{plot_type}' given") + + # Get reference dataset if possible + ref_dataset = self._get_reference_dataset(datasets) + if ref_dataset is None: + logger.info("Plotting %s without reference dataset", plot_type) + else: + logger.info("Plotting %s with reference dataset '%s'", plot_type, + self._get_label(ref_dataset)) + + # Get plot function + plot_func = self._get_plot_func(plot_type) + + # Create a single plot for each dataset (incl. reference dataset if + # given) + for dataset in datasets: + if dataset == ref_dataset: + continue + ancestors = [dataset['filename']] + if ref_dataset is None: + (plot_path, netcdf_paths) = ( + self._plot_map_without_ref(plot_func, dataset) + ) + caption = ( + f"Map plot of {dataset['long_name']} of dataset " + f"{dataset['alias']}." + ) + else: + (plot_path, netcdf_paths) = ( + self._plot_map_with_ref(plot_func, dataset, ref_dataset) + ) + caption = ( + f"Map plot of {dataset['long_name']} of dataset " + f"{dataset['alias']} including bias relative to " + f"{ref_dataset['alias']}." + ) + ancestors.append(ref_dataset['filename']) + + # If statistics are shown add a brief description to the caption + if self.plots[plot_type]['show_stats']: + caption += ( + " The number in the top left corner corresponds to the " + "spatial mean (weighted by grid cell areas).") + + # Save plot + plt.savefig(plot_path, **self.cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save netCDFs + for (netcdf_path, cube) in netcdf_paths.items(): + io.iris_save(cube, netcdf_path) + + # Provenance tracking + provenance_record = { + 'ancestors': ancestors, + 'authors': ['schlund_manuel'], + 'caption': caption, + 'plot_types': ['map'], + 'long_names': [dataset['long_name']], + } + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + for netcdf_path in netcdf_paths: + provenance_logger.log(netcdf_path, provenance_record) + + def create_zonal_mean_profile_plot(self, datasets): + """Create zonal mean profile plot.""" + plot_type = 'zonal_mean_profile' + if plot_type not in self.plots: + return + + if not datasets: + raise ValueError(f"No input data to plot '{plot_type}' given") + + # Get reference dataset if possible + ref_dataset = self._get_reference_dataset(datasets) + if ref_dataset is None: + logger.info("Plotting %s without reference dataset", plot_type) + else: + logger.info("Plotting %s with reference dataset '%s'", plot_type, + self._get_label(ref_dataset)) + + # Get plot function + plot_func = self._get_plot_func(plot_type) + + # Create a single plot for each dataset (incl. reference dataset if + # given) + for dataset in datasets: + if dataset == ref_dataset: + continue + ancestors = [dataset['filename']] + if ref_dataset is None: + (plot_path, netcdf_paths) = ( + self._plot_zonal_mean_profile_without_ref(plot_func, + dataset) + ) + caption = ( + f"Zonal mean profile of {dataset['long_name']} of dataset " + f"{dataset['alias']}." + ) + else: + (plot_path, netcdf_paths) = ( + self._plot_zonal_mean_profile_with_ref(plot_func, dataset, + ref_dataset) + ) + caption = ( + f"Zonal mean profile of {dataset['long_name']} of dataset " + f"{dataset['alias']} including bias relative to " + f"{ref_dataset['alias']}." + ) + ancestors.append(ref_dataset['filename']) + + # If statistics are shown add a brief description to the caption + if self.plots[plot_type]['show_stats']: + caption += ( + " The number in the top left corner corresponds to the " + "spatial mean (weighted by grid cell areas).") + + # Save plot + plt.savefig(plot_path, **self.cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save netCDFs + for (netcdf_path, cube) in netcdf_paths.items(): + io.iris_save(cube, netcdf_path) + + # Provenance tracking + provenance_record = { + 'ancestors': ancestors, + 'authors': ['schlund_manuel'], + 'caption': caption, + 'plot_types': ['vert'], + 'long_names': [dataset['long_name']], + } + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + for netcdf_path in netcdf_paths: + provenance_logger.log(netcdf_path, provenance_record) + + def create_1d_profile_plot(self, datasets): + """Create 1D profile plot.""" + plot_type = '1d_profile' + if plot_type not in self.plots: + return + + if not datasets: + raise ValueError(f"No input data to plot '{plot_type}' given") + + logger.info("Plotting %s", plot_type) + fig = plt.figure(**self.cfg['figure_kwargs']) + axes = fig.add_subplot() + + multi_dataset_facets = self._get_multi_dataset_facets(datasets) + + # Plot all datasets in one single figure + ancestors = [] + cubes = {} + for dataset in datasets: + ancestors.append(dataset['filename']) + cube = dataset['cube'] + cubes[self._get_label(dataset)] = cube + self._check_cube_dimensions(cube, plot_type) + + # Plot 1D profile + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + plot_kwargs['axes'] = axes + + iris.plot.plot(cube, **plot_kwargs) + + # Default plot appearance + axes.set_title(multi_dataset_facets['long_name']) + axes.set_xlabel( + f"{multi_dataset_facets[self.cfg['group_variables_by']]} " + f"[{multi_dataset_facets['units']}]" + ) + z_coord = cube.coord(axis='Z') + axes.set_ylabel(f'{z_coord.long_name} [{z_coord.units}]') + + # apply logarithmic axes + if self.plots[plot_type]['log_y']: + axes.set_yscale('log') + axes.get_yaxis().set_major_formatter( + FormatStrFormatter('%.1f')) + if self.plots[plot_type]['show_y_minor_ticklabels']: + axes.get_yaxis().set_minor_formatter( + FormatStrFormatter('%.1f')) + else: + axes.get_yaxis().set_minor_formatter(NullFormatter()) + if self.plots[plot_type]['log_x']: + axes.set_xscale('log') + # major and minor ticks + x_major = LogLocator(base=10.0, numticks=12) + axes.get_xaxis().set_major_locator(x_major) + x_minor = LogLocator(base=10.0, + subs=np.arange(1.0, 10.0) * 0.1, + numticks=12) + + axes.get_xaxis().set_minor_locator(x_minor) + axes.get_xaxis().set_minor_formatter(NullFormatter()) + + # gridlines + gridline_kwargs = self._get_gridline_kwargs(plot_type) + if gridline_kwargs is not False: + axes.grid(**gridline_kwargs) + # nicer aspect ratio + aspect_ratio = self.plots[plot_type]['aspect_ratio'] + axes.set_box_aspect(aspect_ratio) + + # Legend + legend_kwargs = self.plots[plot_type]['legend_kwargs'] + if legend_kwargs is not False: + axes.legend(**legend_kwargs) + + # Customize plot appearance + self._process_pyplot_kwargs(plot_type, multi_dataset_facets) + + # Save plot + plot_path = self.get_plot_path(plot_type, multi_dataset_facets) + fig.savefig(plot_path, **self.cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save netCDF file + netcdf_path = get_diagnostic_filename(Path(plot_path).stem, self.cfg) + var_attrs = { + n: datasets[0][n] for n in ('short_name', 'long_name', 'units') + } + io.save_1d_data(cubes, netcdf_path, z_coord.standard_name, var_attrs) + + # Provenance tracking + caption = ("Vertical one-dimensional profile of " + f"{multi_dataset_facets['long_name']}" + " for various datasets.") + provenance_record = { + 'ancestors': ancestors, + 'authors': ['schlund_manuel', 'winterstein_franziska'], + 'caption': caption, + 'plot_types': ['line'], + 'long_names': [var_attrs['long_name']], + } + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + provenance_logger.log(netcdf_path, provenance_record) + + def create_variable_vs_lat_plot(self, datasets): + """Create Variable as a function of latitude.""" + plot_type = 'variable_vs_lat' + if plot_type not in self.plots: + return + if not datasets: + raise ValueError(f"No input data to plot '{plot_type}' given") + logger.info("Plotting %s", plot_type) + fig = plt.figure(**self.cfg['figure_kwargs']) + axes = fig.add_subplot() + + # Plot all datasets in one single figure + ancestors = [] + cubes = {} + for dataset in datasets: + ancestors.append(dataset['filename']) + cube = dataset['cube'] + cubes[self._get_label(dataset)] = cube + self._check_cube_dimensions(cube, plot_type) + + # Plot data + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + plot_kwargs['axes'] = axes + iris.plot.plot(cube, **plot_kwargs) + + # Default plot appearance + multi_dataset_facets = self._get_multi_dataset_facets(datasets) + axes.set_title(multi_dataset_facets['long_name']) + axes.set_xlabel('latitude [°N]') + axes.set_ylabel( + f"{multi_dataset_facets[self.cfg['group_variables_by']]} " + f"[{multi_dataset_facets['units']}]" + ) + gridline_kwargs = self._get_gridline_kwargs(plot_type) + if gridline_kwargs is not False: + axes.grid(**gridline_kwargs) + + # Legend + legend_kwargs = self.plots[plot_type]['legend_kwargs'] + if legend_kwargs is not False: + axes.legend(**legend_kwargs) + + # Customize plot appearance + self._process_pyplot_kwargs(plot_type, multi_dataset_facets) + + # Save plot + plot_path = self.get_plot_path(plot_type, multi_dataset_facets) + fig.savefig(plot_path, **self.cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save netCDF file + netcdf_path = get_diagnostic_filename(Path(plot_path).stem, self.cfg) + var_attrs = { + n: datasets[0][n] for n in ('short_name', 'long_name', 'units') + } + io.save_1d_data(cubes, netcdf_path, 'latitude', var_attrs) + + # Provenance tracking + caption = (f"{multi_dataset_facets['long_name']} vs. latitude for " + f"various datasets.") + provenance_record = { + 'ancestors': ancestors, + 'authors': ['sarauer_ellen'], + 'caption': caption, + 'plot_types': ['line'], + 'long_names': [var_attrs['long_name']], + } + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + provenance_logger.log(netcdf_path, provenance_record) + + def create_hovmoeller_z_vs_time_plot(self, datasets): + """Create Hovmoeller Z vs. time plot.""" + plot_type = 'hovmoeller_z_vs_time' + if plot_type not in self.plots: + return + + if not datasets: + raise ValueError(f"No input data to plot '{plot_type}' given") + + # Get reference dataset if possible + ref_dataset = self._get_reference_dataset(datasets) + if ref_dataset is None: + logger.info("Plotting %s without reference dataset", plot_type) + else: + logger.info("Plotting %s with reference dataset '%s'", plot_type, + self._get_label(ref_dataset)) + + # Get plot function + plot_func = self._get_plot_func(plot_type) + + # Create a single plot for each dataset (incl. reference dataset if + # given) + for dataset in datasets: + if dataset == ref_dataset: + continue + ancestors = [dataset['filename']] + if ref_dataset is None: + (plot_path, + netcdf_paths) = (self._plot_hovmoeller_z_vs_time_without_ref( + plot_func, dataset)) + caption = ( + f"Hovmoeller Z vs. time plot of {dataset['long_name']} " + f"of dataset {dataset['alias']}." + ) + else: + (plot_path, + netcdf_paths) = (self._plot_hovmoeller_z_vs_time_with_ref( + plot_func, dataset, ref_dataset)) + caption = ( + f"Hovmoeller Z vs. time plot of {dataset['long_name']} " + f"of dataset {dataset['alias']} including bias relative " + f"to {ref_dataset['alias']}." + ) + ancestors.append(ref_dataset['filename']) + + # If statistics are shown add a brief description to the caption + if self.plots[plot_type]['show_stats']: + caption += ( + " The number in the top left corner corresponds to the " + "spatiotemporal mean.") + + # Save plot + plt.savefig(plot_path, **self.cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save netCDFs + for (netcdf_path, cube) in netcdf_paths.items(): + io.iris_save(cube, netcdf_path) + + # Provenance tracking + provenance_record = { + 'ancestors': ancestors, + 'authors': ['kuehbacher_birgit', 'heuer_helge'], + 'caption': caption, + 'plot_types': ['vert'], + 'long_names': [dataset['long_name']], + } + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + for netcdf_path in netcdf_paths: + provenance_logger.log(netcdf_path, provenance_record) + + def create_hovmoeller_time_vs_lat_or_lon_plot(self, datasets): + """Create the Hovmoeller plot with time vs latitude or longitude.""" + plot_type = 'hovmoeller_time_vs_lat_or_lon' + if plot_type not in self.plots: + return + + if not datasets: + raise ValueError(f"No input data to plot '{plot_type}' given") + + # Get reference dataset if possible + ref_dataset = self._get_reference_dataset(datasets) + if ref_dataset is None: + logger.info("Plotting %s without reference dataset", plot_type) + else: + logger.info("Plotting %s with reference dataset '%s'", plot_type, + self._get_label(ref_dataset)) + + # Get plot function + plot_func = self._get_plot_func(plot_type) + + # Create a single plot for each dataset (incl. reference dataset if + # given) + for dataset in datasets: + if dataset == ref_dataset: + continue + ancestors = [dataset['filename']] + if ref_dataset is None: + (plot_path, netcdf_paths) = ( + self._plot_hovmoeller_time_vs_lat_or_lon_without_ref( + plot_func, + dataset) + ) + caption = ( + f"Hovmoeller plot of {dataset['long_name']} of dataset " + f"{dataset['alias']}." + ) + else: + (plot_path, netcdf_paths) = ( + self._plot_hovmoeller_time_vs_lat_or_lon_with_ref( + plot_func, dataset, ref_dataset) + ) + caption = ( + f"Hovmoeller plot of {dataset['long_name']} of dataset " + f"{dataset['alias']} including bias relative to " + f"{ref_dataset['alias']}." + ) + ancestors.append(ref_dataset['filename']) + + # Save plot + plt.savefig(plot_path, **self.cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save netCDFs + for (netcdf_path, cube) in netcdf_paths.items(): + io.iris_save(cube, netcdf_path) + + # Provenance tracking + provenance_record = { + 'ancestors': ancestors, + 'authors': [ + 'schlund_manuel', + 'kraft_jeremy', + 'lindenlaub_lukas' + ], + 'caption': caption, + 'plot_types': ['zonal'], + 'long_names': [dataset['long_name']], + } + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + for netcdf_path in netcdf_paths: + provenance_logger.log(netcdf_path, provenance_record) + + def compute(self): + """Plot preprocessed data.""" + for (var_key, datasets) in self.grouped_input_data.items(): + logger.info("Processing variable %s", var_key) + self.create_timeseries_plot(datasets) + self.create_annual_cycle_plot(datasets) + self.create_map_plot(datasets) + self.create_zonal_mean_profile_plot(datasets) + self.create_1d_profile_plot(datasets) + self.create_variable_vs_lat_plot(datasets) + self.create_hovmoeller_z_vs_time_plot(datasets) + self.create_hovmoeller_time_vs_lat_or_lon_plot(datasets) + + +def main(): + """Run diagnostic.""" + with run_diagnostic() as config: + with warnings.catch_warnings(): + warnings.filterwarnings( + 'ignore', + message="Using DEFAULT_SPHERICAL_EARTH_RADIUS", + category=UserWarning, + module='iris', + ) + MultiDatasets(config).compute() + + +if __name__ == '__main__': + main() diff --git a/esmvaltool/diag_scripts/mpqb/mpqb_cfg_xch4.yml b/esmvaltool/diag_scripts/mpqb/mpqb_cfg_xch4.yml new file mode 100644 index 0000000000..5cd6804c7b --- /dev/null +++ b/esmvaltool/diag_scripts/mpqb/mpqb_cfg_xch4.yml @@ -0,0 +1,39 @@ +# ESMValTool +# +# Configuration related to plotting of different ECVs +# for usage in the different MPQB diagnostics. +--- + +datasetnames: + CDS-XCH4: CDS-XCH4 + BCC-CSM2-MR: BCC-CSM2-MR + CNRM-ESM2-1: CNRM-ESM2-1 + UKESM1-0-LL: UKESM1-0-LL + BNU-ESM: BNU-ESM + FIO-ESM: FIO-ESM + MRI-ESM1: MRI-ESM1 + BCC-ESM1: BCC-ESM1 + MRI-ESM2-0: MRI-ESM2-0 + CESM2-WACCM: CESM2-WACCM + CESM2-WACCM-FV2: CESM2-WACCM-FV2 + GFDL-ESM4: GFDL-ESM4 + +datasetcolors: + CDS-XCH4: '#d62728' + BCC-CSM2-MR: '#a569bd' + CNRM-ESM2-1: '#2ca02c' + UKESM1-0-LL: '#1f77b4' + BNU-ESM: '#458ef6' + FIO-ESM: '#2ca02c' + MRI-ESM1: '#1f77b4' + BCC-ESM1: '#633974' + MRI-ESM2-0: '#aed6f1' + CESM2-WACCM: '#17a589' + CESM2-WACCM-FV2: '#76d7c4' + GFDL-ESM4: '#9c640c' + +# From: +# https://stackoverflow.com/questions/42086276/ +# Cycle continues like this: +# '#17becf' +# Hint: use colorbrewer for setting colors. diff --git a/esmvaltool/diag_scripts/mpqb/mpqb_lineplot.py b/esmvaltool/diag_scripts/mpqb/mpqb_lineplot.py new file mode 100644 index 0000000000..b08a2cd012 --- /dev/null +++ b/esmvaltool/diag_scripts/mpqb/mpqb_lineplot.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python +"""Python example diagnostic.""" +import logging +import os +from pprint import pformat + +import iris +import matplotlib.dates as mdates +import matplotlib.pyplot as plt + +import esmvaltool.diag_scripts.shared.iris_helpers as ih +from esmvaltool.diag_scripts.mpqb.mpqb_utils import get_mpqb_cfg +from esmvaltool.diag_scripts.shared import group_metadata, run_diagnostic +from esmvaltool.diag_scripts.shared._base import ( + ProvenanceLogger, + get_plot_filename, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def get_provenance_record(caption): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'caption': caption, + 'statistics': ['mean'], + 'domains': ['user'], + 'plot_type': 'lineplot', + 'authors': [ + 'mueller_benjamin', + 'crezee_bas', + 'hassler_birgit', + ], + 'projects': ['cmug'], + 'references': [ + 'acknow_project', + ], + } + return record + + +def main(cfg): + """Create lineplot.""" + ylims = [cfg.pop('y0', None), cfg.pop('y1', None)] + + # Get a description of the preprocessed data that we will use as input. + input_data = cfg['input_data'].values() + + grouped_input_data = group_metadata(input_data, 'alias', sort='alias') + + logger.info( + "Example of how to group and sort input data by standard_name:" + "\n%s", pformat(grouped_input_data)) + + # In order to get the right line colors for MPQB soil moisture + # here we put ERA-Interim-Land at the end of the dictionary if + # it is included. + if 'ERA-Interim-Land' in grouped_input_data.keys(): + grouped_input_data.move_to_end('ERA-Interim-Land') + + plt.clf() + fig, (ax1, lax) = plt.subplots(nrows=2, + gridspec_kw={"height_ratios": [10, 1]}, + figsize=(10, 5)) + + plt.sca(ax1) + for dataset in grouped_input_data: + dataset_cfg = grouped_input_data[dataset][0] + alias = dataset_cfg['alias'] + + logger.info("Opening dataset: %s", dataset) + cube = iris.load_cube(dataset_cfg['filename']) + if cube.coords('time', dim_coords=True): + ih.unify_time_coord(cube) + + # Set default if not defined. + label = get_mpqb_cfg('datasetname', alias) + color = get_mpqb_cfg('datasetcolor', alias) + + # iris.quickplot.plot(cube, label=label, color=color, + # linestyle='dotted') + # iris.quickplot.plot(cube, label=label, color=color, + # linestyle='dashed') + iris.quickplot.plot(cube, label=label, color=color) + plt.xticks(rotation=90) + # Add the zero line when plotting anomalies + if 'ano' in dataset_cfg['preprocessor']: + plt.axhline(y=0, linestyle=':', color='k') + plt.tight_layout() + # Time axis formatting + years = mdates.YearLocator() # every year + years_fmt = mdates.DateFormatter('%Y') + ax1 = plt.gca() + ax1.xaxis.set_major_locator(years) + ax1.xaxis.set_major_formatter(years_fmt) + ax1.grid(True, which='major', axis='x') + ax1.set_ylim(ylims) + ax1.set_ylabel(f"{cube.var_name.upper()} ({cube.units})") + ax1.set_title(f"Time series of monthly mean {cube.var_name.upper()}") + + h1, l1 = ax1.get_legend_handles_labels() + leg = lax.legend(h1, l1, borderaxespad=0, ncol=4, loc='center') + for legobj in leg.legendHandles: + legobj.set_linewidth(2.0) + lax.axis("off") + + baseplotname = (f"lineplot_{dataset_cfg['variable_group']}_" + f"{dataset_cfg['start_year']}-{dataset_cfg['end_year']}") + + filename = get_plot_filename(baseplotname, cfg) + logger.info("Saving as %s", filename) + fig.savefig(filename, bbox_inches='tight') + + caption = ( + f"Time series of the domain average of " + f"{dataset_cfg['variable_group']} between " + f"{dataset_cfg['start_year']} and {dataset_cfg['end_year']}") + + provenance_record = get_provenance_record(caption) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, provenance_record) + + plt.close(fig) + logger.info("Finished!") + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/mpqb/mpqb_lineplot_anncyc.py b/esmvaltool/diag_scripts/mpqb/mpqb_lineplot_anncyc.py new file mode 100644 index 0000000000..d087499d60 --- /dev/null +++ b/esmvaltool/diag_scripts/mpqb/mpqb_lineplot_anncyc.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python +"""Python example diagnostic.""" +import logging +import os +from pprint import pformat + +import iris +import matplotlib.pyplot as plt + +from esmvaltool.diag_scripts.mpqb.mpqb_utils import get_mpqb_cfg +from esmvaltool.diag_scripts.shared import group_metadata, run_diagnostic +from esmvaltool.diag_scripts.shared._base import ( + ProvenanceLogger, + get_plot_filename, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def get_provenance_record(caption): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'caption': caption, + 'statistics': ['clim'], + 'domains': ['user'], + 'plot_type': 'lineplot', + 'authors': [ + 'mueller_benjamin', + 'crezee_bas', + 'hassler_birgit', + ], + 'projects': ['cmug'], + 'references': [ + 'acknow_project', + ], + } + return record + + +def main(cfg): + """Create lineplot.""" + ylims = [cfg.pop('y0', None), cfg.pop('y1', None)] + + # Get a description of the preprocessed data that we will use as input. + input_data = cfg['input_data'].values() + + grouped_input_data = group_metadata(input_data, 'alias', sort='alias') + + logger.info( + "Example of how to group and sort input data by standard_name:" + "\n%s", pformat(grouped_input_data)) + + # In order to get the right line colors for MPQB soil moisture + # here we put ERA-Interim-Land at the end of the dictionary if + # it is included. + if 'ERA-Interim-Land' in grouped_input_data.keys(): + grouped_input_data.move_to_end('ERA-Interim-Land') + + plt.clf() + # fig = plt.figure(figsize=(10, 4)) + fig = plt.figure(figsize=(6, 4)) + ax1 = fig.add_subplot() + + for dataset in grouped_input_data: + dataset_cfg = grouped_input_data[dataset][0] + alias = dataset_cfg['alias'] + + logger.info("Opening dataset: %s", dataset) + cube = iris.load_cube(dataset_cfg['filename']) + + # Set default if not defined. + label = get_mpqb_cfg('datasetname', alias) + color = get_mpqb_cfg('datasetcolor', alias) + + # iris.quickplot.plot(cube, label=label, color=color, + # linestyle='dotted') + # iris.quickplot.plot(cube, label=label, color=color, + # linestyle='dashed') + iris.quickplot.plot(cube, label=label, color=color) + plt.legend() + plt.tight_layout() + # Time axis formatting + ax1 = plt.gca() + ax1.set_xticks([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]) + ax1.set_xticklabels(['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', + 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC']) + ax1.set_xlabel('month') + ax1.grid(True, which='major', axis='x') + + ax1.set_ylim(ylims) + ax1.set_ylabel(f"{cube.var_name.upper()} ({cube.units})") + ax1.set_title(f"{cube.var_name.upper()} annual cycle") + + baseplotname = (f"lineplot_{dataset_cfg['variable_group']}_" + f"{dataset_cfg['start_year']}-{dataset_cfg['end_year']}") + + filename = get_plot_filename(baseplotname, cfg) + logger.info("Saving as %s", filename) + fig.savefig(filename, bbox_inches='tight') + + # Provenance + caption = ( + f"Domain average annual cycle of {dataset_cfg['variable_group']} " + f"between {dataset_cfg['start_year']} and {dataset_cfg['end_year']}") + + provenance_record = get_provenance_record(caption) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, provenance_record) + + plt.close(fig) + logger.info("Finished!") + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/mpqb/mpqb_lineplot_growthrate.py b/esmvaltool/diag_scripts/mpqb/mpqb_lineplot_growthrate.py new file mode 100644 index 0000000000..90662be72b --- /dev/null +++ b/esmvaltool/diag_scripts/mpqb/mpqb_lineplot_growthrate.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python +"""Python example diagnostic.""" +import logging +import os +from pprint import pformat + +import iris +import matplotlib.dates as mdates +import matplotlib.pyplot as plt +import numpy as np + +import esmvaltool.diag_scripts.shared.iris_helpers as ih +from esmvaltool.diag_scripts.mpqb.mpqb_utils import get_mpqb_cfg +from esmvaltool.diag_scripts.shared import group_metadata, run_diagnostic +from esmvaltool.diag_scripts.shared._base import ( + ProvenanceLogger, + get_plot_filename, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def get_provenance_record(caption): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'caption': caption, + 'statistics': ['diff'], + 'domains': ['user'], + 'plot_type': 'lineplot', + 'authors': [ + 'mueller_benjamin', + 'crezee_bas', + 'hassler_birgit', + ], + 'projects': ['cmug'], + 'references': [ + 'acknow_project', + ], + } + return record + + +def main(cfg): + """Create lineplot.""" + # Limits for the y-axis + ylims = [-5., 15.] + + # Get a description of the preprocessed data that we will use as input. + input_data = cfg['input_data'].values() + + grouped_input_data = group_metadata(input_data, 'alias', sort='alias') + + logger.info( + "Example of how to group and sort input data by standard_name:" + "\n%s", pformat(grouped_input_data)) + + plt.clf() + fig, (ax1, lax) = plt.subplots(nrows=2, + gridspec_kw={"height_ratios": [10, 1]}, + figsize=(10, 5)) + + plt.sca(ax1) + for dataset in grouped_input_data: + dataset_cfg = grouped_input_data[dataset][0] + alias = dataset_cfg['alias'] + + logger.info("Opening dataset: %s", dataset) + cube = iris.load_cube(dataset_cfg['filename']) + if cube.coords('time', dim_coords=True): + ih.unify_time_coord(cube) + + # calculate the growth rates + diff_data = np.diff(cube.data) + cube = cube[1:] + cube.data = diff_data + + # Set default if not defined. + label = get_mpqb_cfg('datasetname', alias) + color = get_mpqb_cfg('datasetcolor', alias) + + # iris.quickplot.plot(cube, label=label, color=color, + # linestyle='dotted') + # iris.quickplot.plot(cube, label=label, color=color, + # linestyle='dashed') + iris.quickplot.plot(cube, label=label, color=color) + plt.xticks(rotation=90) + # Add the zero line + plt.axhline(y=0, linestyle=':', color='k') + plt.tight_layout() + + # Time axis formatting + years = mdates.YearLocator() # every year + years_fmt = mdates.DateFormatter('%Y') + ax1 = plt.gca() + ax1.xaxis.set_major_locator(years) + ax1.xaxis.set_major_formatter(years_fmt) + ax1.grid(True, which='major', axis='x') + + ax1.set_ylim(ylims) + ax1.set_ylabel(f"Change in {cube.var_name.upper()} ({cube.units})") + ax1.set_title(f"Time series of annual {cube.var_name.upper()} growth rate") + + h1, l1 = ax1.get_legend_handles_labels() + leg = lax.legend(h1, l1, borderaxespad=0, ncol=4, loc='center') + for legobj in leg.legendHandles: + legobj.set_linewidth(2.0) + lax.axis("off") + + baseplotname = (f"lineplot_{dataset_cfg['variable_group']}_" + f"{dataset_cfg['start_year']}-{dataset_cfg['end_year']}") + + filename = get_plot_filename(baseplotname, cfg) + logger.info("Saving as %s", filename) + fig.savefig(filename, bbox_inches='tight') + + caption = ( + f"Domain average annual growth rates of " + f"{dataset_cfg['variable_group']} " + f"between {dataset_cfg['start_year']} and {dataset_cfg['end_year']}") + + provenance_record = get_provenance_record(caption) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, provenance_record) + + plt.close(fig) + logger.info("Finished!") + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/mpqb/mpqb_utils.py b/esmvaltool/diag_scripts/mpqb/mpqb_utils.py new file mode 100644 index 0000000000..7a7290fefa --- /dev/null +++ b/esmvaltool/diag_scripts/mpqb/mpqb_utils.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +"""Python example diagnostic.""" +import os + +import yaml + + +# Get metrics dict, get dataset color(s) or get datasetnames +def get_mpqb_cfg(cfgtype, cfgkey): + """cfgtype: colormap, datasetcolor, datasetname""" + # First read cfg file + cfg_filename = os.path.join(os.path.split(__file__)[0], + 'mpqb_cfg_xch4.yml') + with open(cfg_filename, 'r', encoding="utf-8") as handle: + mpqb_cfg = yaml.safe_load(handle) + # Defaults to specified defaults in yml file + if cfgtype == 'colormap': + if cfgkey in mpqb_cfg['colormaps']: + return mpqb_cfg['colormaps'][cfgkey] + return mpqb_cfg['colormaps']['default'] + # Defaults to alias (provided as cfgkey) + if cfgtype == 'datasetname': + if cfgkey in mpqb_cfg['datasetnames']: + return mpqb_cfg['datasetnames'][cfgkey] + return cfgkey + # Defaults to black. + if cfgtype == 'datasetcolor': + if cfgkey in mpqb_cfg['datasetcolors']: + return mpqb_cfg['datasetcolors'][cfgkey] + return 'k' + + return None diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_maps.py b/esmvaltool/diag_scripts/ocean/diagnostic_maps.py index 586afd8588..00896279c4 100644 --- a/esmvaltool/diag_scripts/ocean/diagnostic_maps.py +++ b/esmvaltool/diag_scripts/ocean/diagnostic_maps.py @@ -18,7 +18,8 @@ extract_levels: levels: [100., ] scheme: linear_extrap - time_average: + climate_statistics: + operator: mean Note that this recipe may not function on machines with no access to the @@ -39,18 +40,19 @@ Author: Lee de Mora (PML) ledm@pml.ac.uk """ +import itertools import logging import os import sys -from itertools import product -import matplotlib.pyplot as plt +import cartopy import iris import iris.quickplot as qplt -import cartopy +import matplotlib.pyplot as plt from esmvaltool.diag_scripts.ocean import diagnostic_tools as diagtools from esmvaltool.diag_scripts.shared import run_diagnostic +from esmvaltool.diag_scripts.shared import ProvenanceLogger # This part sends debug statements to stdout logger = logging.getLogger(os.path.basename(__file__)) @@ -113,20 +115,31 @@ def make_map_plots( path = diagtools.folder( cfg['plot_dir']) + os.path.basename(filename).replace( '.nc', '_map_' + str(layer_index) + image_extention) + caption = 'Multimodel map of ' + title else: path = diagtools.get_image_path( cfg, metadata, suffix='map_' + str(layer_index) + image_extention, ) + caption = 'Map of ' + title # Saving files: - if cfg['write_plots']: + logger.info('Saving plots to %s', path) + plt.savefig(path) + plt.close() - logger.info('Saving plots to %s', path) - plt.savefig(path) + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=caption, + statistics=['mean'], + domain=['global'], + plot_type=['map'], + ancestors=[filename], + ) - plt.close() + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) def make_map_contour( @@ -209,7 +222,7 @@ def make_map_contour( title = ' '.join([metadata['dataset'], metadata['long_name']]) depth_units = str(cube_layer.coords('depth')[0].units) if layer: - title = '{} ({} {})'.format(title, layer, depth_units) + title = f'{title} ({layer} {depth_units})' plt.title(title) # Determine image filename: @@ -226,12 +239,22 @@ def make_map_contour( ) # Saving files: - if cfg['write_plots']: - logger.info('Saving plots to %s', path) - plt.savefig(path) - + logger.info('Saving plots to %s', path) + plt.savefig(path) plt.close() + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=title, + statistics=['mean'], + domain=['global'], + plot_type=['map'], + ancestors=[filename], + ) + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + def multi_model_contours( cfg, @@ -268,7 +291,7 @@ def multi_model_contours( thresholds = diagtools.load_thresholds(cfg, metadata) # Make a plot for each layer and each threshold - for layer, threshold in product(layers, thresholds): + for layer, threshold in itertools.product(layers, thresholds): title = '' z_units = '' @@ -335,19 +358,18 @@ def multi_model_contours( plt.legend(loc='best') # Saving files: - if cfg['write_plots']: - path = diagtools.get_image_path( - cfg, - metadata[filename], - prefix='MultipleModels_', - suffix='_'.join(['_contour_map_', - str(threshold), - str(layer) + image_extention]), - metadata_id_list=[ - 'field', 'short_name', 'preprocessor', 'diagnostic', - 'start_year', 'end_year' - ], - ) + path = diagtools.get_image_path( + cfg, + metadata[filename], + prefix='MultipleModels_', + suffix='_'.join(['_contour_map_', + str(threshold), + str(layer) + image_extention]), + metadata_id_list=[ + 'field', 'short_name', 'preprocessor', 'diagnostic', + 'start_year', 'end_year' + ], + ) # Resize and add legend outside thew axes. plt.gcf().set_size_inches(9., 6.) @@ -358,6 +380,18 @@ def multi_model_contours( plt.savefig(path) plt.close() + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=title, + statistics=['mean'], + domain=['global'], + plot_type=['map'], + ancestors=metadata.keys(), + ) + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + def main(cfg): """ diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_maps_multimodel.py b/esmvaltool/diag_scripts/ocean/diagnostic_maps_multimodel.py new file mode 100644 index 0000000000..400309e069 --- /dev/null +++ b/esmvaltool/diag_scripts/ocean/diagnostic_maps_multimodel.py @@ -0,0 +1,418 @@ +"""Model vs Observations maps Diagnostic. + +Diagnostic to produce comparison maps of model(s) and data (if provided). +If observations are not provided, data maps for each model are drawn. + +The image shows on top row observational data and the following subplot(s) +the comparison for each model by following the order of the recipe. + +Note that this diagnostic assumes that the preprocessors do the bulk of the +hard work, and that the cube received by this diagnostic (via the settings.yml +and metadata.yml files) has no time component, a small number of depth layers, +and a latitude and longitude coordinates. + +An appropriate preprocessor for a 2D + time field would be:: + + preprocessors: + prep_map: + time_average: + regrid: + target_grid: 1x1 + scheme: linear + +This tool is part of the ocean diagnostic tools package in the ESMValTool. + +Author: lovato_tomas +""" +import logging +import os +from pprint import pformat + +import cartopy.crs as ccrs +import iris +from matplotlib import gridspec +import matplotlib.pyplot as plt +import matplotlib.ticker as mticker +import numpy as np +from mpl_toolkits.axes_grid1.inset_locator import inset_axes + +from esmvaltool.diag_scripts.ocean import diagnostic_tools as diagtools +from esmvaltool.diag_scripts.shared import run_diagnostic +from esmvaltool.diag_scripts.shared._base import ProvenanceLogger + +logger = logging.getLogger(os.path.basename(__file__)) + + +def get_provenance_record(cfg, attributes, obsname, ancestor_files): + """Create a provenance record describing the diagnostic data and plot.""" + if obsname != '': + caption = ( + "{long_name} bias for average between {start_year} and {end_year}". + format(**attributes) + " against " + obsname + " observations.") + else: + caption = ( + "Average {long_name} between {start_year} and {end_year} ".format( + **attributes)) + + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=caption, + statistics=['mean'], + domain=['global'], + plot_type=['map'], + ancestors=ancestor_files, + ) + + return provenance_record + + +def add_map_plot(fig, axs, plot_cube, cols): + """Add a map in the current pyplot suplot. + + Parameters + ---------- + fig: object + The matplotlib.pyplot Figure object + axs: object + The matplotlib.pyplot Axes object + plot_cube: dictionary + dictionary with data for plot defined in select_cubes + cols: integer + Number of columns in the multipanel plot + """ + contour_lev = 13 + nspace = np.linspace(plot_cube['range'][0], + plot_cube['range'][1], + contour_lev, + endpoint=True) + iris.plot.contourf(plot_cube['cube'], + nspace, + cmap=plt.cm.get_cmap(plot_cube['cmap']), + extend=plot_cube['extend']) + + axs.coastlines() + gls = axs.gridlines(draw_labels=False, color='black', alpha=0.4) + gls.ylocator = mticker.MaxNLocator(7) + axs.set_title(plot_cube['title'], fontweight="bold", fontsize='large') + + if plot_cube['hascbar']: + if cols == 0: + ratio = axs.get_xlim() + axs.get_ylim() + ratio = (ratio[3] - ratio[2]) / (ratio[1] - ratio[0]) + width = "200%" if ratio > 1 else "100%" + + bba = (0., -0.1, 1, 1) + axins = inset_axes( + axs, + width=width, + height="6%", + loc='lower center', + bbox_to_anchor=bba, + bbox_transform=axs.transAxes, + borderpad=0, + ) + else: + axins = fig.add_axes([0.25, 0.04, 0.5, 0.02]) + + cformat = '%.1f' + if abs(nspace[1] - nspace[0]) < 1: + cformat = int(np.ceil(-np.log10(abs(nspace[1] - nspace[0])))) + cformat = '%.' + str(cformat) + 'f' + elif max(nspace) > 100.: + cformat = '%.0f' + cbar = plt.colorbar(orientation='horizontal', + cax=axins, + format=cformat) + cbar.set_ticks(nspace[::2]) + + +def make_subplots(cubes, layout, obsname, fig, projection): + """ + Realize subplots using cubes input data. + + Parameters + ---------- + cubes: dict + dictionary with data for plot defined in select_cubes + layout : list + subplot rows x cols organization + obsname: string + Observation data name + fig: object + The matplotlib.pyplot Figure object + projection: string + Name of Cartopy projection + """ + proj = getattr(ccrs, projection)(central_longitude=0) + gsc = gridspec.GridSpec(layout[0], layout[1]) + row = 0 + col = 0 + for thename in cubes: + axs = plt.subplot(gsc[row, col], projection=proj) + add_map_plot(fig, axs, cubes[thename], col) + # next row & column indexes + row = row + 1 + if row == layout[0]: + row = 1 if obsname != '' else 0 + col = col + 1 + + # Adjust subplots size & position + plt.subplots_adjust(top=0.92, + bottom=0.08, + left=0.05, + right=0.95, + hspace=0.2, + wspace=0.15) + + # Vertically detach OBS plot and center + if obsname != '': + axs = fig.axes + box = axs[0].get_position() + shift = box.y0 * 0.05 + box.y0 = box.y0 + shift + box.y1 = box.y1 + shift + shift = box.x1 - box.x0 + box.x0 = 0.5 - shift * 0.5 + box.x1 = box.x0 + shift + axs[0].set_position(box) + + +def load_cubes(filenames, obs_filename, metadata): + """Organize data provided by recipe. + + Parameters + ---------- + filenames: dict + input files listed in the recipe + obs_filename: str + the preprocessed observations file. + metadata: dict + the input files dictionary + """ + # check if observations are provided + if obs_filename: + obsname = metadata[obs_filename]['dataset'] + filenames.remove(obs_filename) + filenames.insert(0, obs_filename) + else: + obsname = '' + logger.info('Observations not provided. Plot each model data.') + + # Load the data for each layer as a separate cube + layers = {} + cubes = {} + for thename in filenames: + logger.debug('loading: \t%s', thename) + cube = iris.load_cube(thename) + cube.coord('latitude').long_name = "Latitude" + cube.coord('longitude').long_name = "Longitude" + cube = diagtools.bgc_units(cube, metadata[thename]['short_name']) + model_name = metadata[thename]['dataset'] + cubes[model_name] = diagtools.make_cube_layer_dict(cube) + for layer in cubes[model_name]: + layers[layer] = True + + logger.debug('layers: %s', layers) + logger.debug('cubes: %s', ', '.join(cubes.keys())) + + return cubes, layers, obsname + + +def select_cubes(cubes, layer, obsname, metadata): + """Create a dictionary of input layer data & metadata to plot. + + Parameters + ---------- + cubes: list + Input data iris cubes + layer: list + Data level to be plotted + obsname: string + Observation data name + metadata: dict + the first input file dictionary + """ + plot_cubes = {} + list_cubes = [] + + for thename in cubes: + plot_cubes[thename] = { + 'cube': cubes[thename][layer], + 'title': thename, + 'cmap': 'viridis', + 'range': None, + 'extend': 'neither', + 'hascbar': False + } + if (obsname != '') & (thename != obsname): + plot_cubes[thename] = { + 'cube': cubes[thename][layer] - cubes[obsname][layer], + 'title': thename, + 'cmap': 'RdBu_r', + 'range': None, + 'extend': 'neither', + 'hascbar': False + } + if thename in [obsname, list(cubes.keys())[-1]]: + plot_cubes[thename]['hascbar'] = True + + if thename == list(cubes.keys())[0]: + cube = plot_cubes[thename]['cube'] + plot_cubes[thename][ + 'title'] = thename + ' (' + cube.var_name + ') [' + str( + cube.units) + ']' + list_cubes.append(plot_cubes[thename]['cube']) + + # get cubes data ranges + mrange = diagtools.get_cube_range(list_cubes) + if obsname != '': + mrange = diagtools.get_cube_range([list_cubes[0]]) + drange = diagtools.get_cube_range(list_cubes[1:]) + + # get user defined plot ranges + user_range = {'maps': None, 'diff': None} + if 'maps_range' in metadata: + user_range['maps'] = metadata['maps_range'] + if 'diff_range' in metadata: + user_range['diff'] = metadata['diff_range'] + + # define contour levels using ranges + for thename in cubes: + if user_range['maps']: + mrange = user_range['maps'] + plot_cubes[thename]['extend'] = 'both' + if (obsname != '') & (thename != obsname): + mrange = drange + if user_range['diff']: + mrange = user_range['diff'] + plot_cubes[thename]['extend'] = 'both' + if mrange[0] >= 0.: + plot_cubes[thename]['cmap'] = 'plasma' + plot_cubes[thename]['range'] = mrange + + return plot_cubes + + +def make_plots(cfg, metadata, obsname): + """Produce multiple panel comparison maps of model(s) and data (if + provided). + + If observations are not provided, plots of each model data are drawn. + Put on top row observational data (if available) and in following subplots + model difference (or data) organized in rows/cols using row/col layout. + + Parameters + ---------- + cfg: dict + the opened global config dictionary, passed by ESMValTool. + metadata: dict + the input files dictionary + obsname: str + the preprocessed observations file. + """ + logger.debug('make_plots') + + filenames = list(metadata.keys()) + + # plot setting + layout = metadata[filenames[0]]['layout_rowcol'] + projection = 'Robinson' + if 'plot_ccrs' in metadata[filenames[0]]: + projection = metadata[filenames[0]]['plot_ccrs'] + + # load input data + [cubes, layers, obsname] = load_cubes(filenames, obsname, metadata) + + if obsname != '': + layout[0] = layout[0] + 1 + else: + logger.info('Observations not provided. Plot each model data.') + + if len(filenames) > (layout[0] * layout[1]): + raise ValueError( + 'Number of inputfiles is larger than layout scheme (rows x cols). ' + 'Revise layout_rowcol size in recipe.') + + # Make a plot for each layer + for layer in layers: + + fig = plt.figure() + fig.set_size_inches(layout[1] * 4., layout[0] * 2. + 2.) + + # select cubes to plot + plot_cubes = select_cubes(cubes, layer, obsname, + metadata[filenames[0]]) + + # create individual subplot + make_subplots(plot_cubes, layout, obsname, fig, projection) + + # Determine image filename + plot_file = metadata[filenames[0]]['short_name'] + layer_lab = str(np.int32(layer)) if layer != '' else '' + if obsname != '': + plot_file = [ + 'multimodel_vs', obsname, plot_file, layer_lab, 'maps' + ] + else: + plot_file = ['multimodel', plot_file, layer_lab, 'maps'] + plot_file = diagtools.folder(cfg['plot_dir']) + '_'.join( + plot_file) + diagtools.get_image_format(cfg) + + # Saving file: + logger.info('Saving plot to %s', plot_file) + plt.savefig(plot_file, dpi=200) + + # Provenance + provenance_record = get_provenance_record(cfg, + metadata[filenames[-1]], + obsname, filenames) + logger.info("Recording provenance of %s:\n%s", plot_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(plot_file, provenance_record) + + plt.close() + + +def main(cfg): + """Load the config file, and send it to the plot maker. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + """ + for index, metadata_filename in enumerate(cfg['input_files']): + logger.info( + 'metadata filename:\t%s, %s', + index, + metadata_filename, + ) + metadatas = diagtools.get_input_files(cfg, index=index) + + model_type = 'observational_dataset' + logger.debug( + 'model_type: %s, %s', + index, + model_type, + ) + logger.debug( + 'metadatas: %s, %s', + index, + metadatas, + ) + obs_filename = None + if model_type in cfg.keys(): + obs_filename = diagtools.match_model_to_key( + 'observational_dataset', cfg[model_type], metadatas) + if not os.path.isfile(obs_filename): + logger.info('OBS file not found %s', obs_filename) + + make_plots(cfg, metadatas, obs_filename) + + logger.info('Success') + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_maps_quad.py b/esmvaltool/diag_scripts/ocean/diagnostic_maps_quad.py index ce762f7e13..bfb50f66a4 100644 --- a/esmvaltool/diag_scripts/ocean/diagnostic_maps_quad.py +++ b/esmvaltool/diag_scripts/ocean/diagnostic_maps_quad.py @@ -27,7 +27,8 @@ extract_levels: levels: [100., ] scheme: linear_extrap - time_average: + climate_statistics: + operator: mean This diagnostic also requires the ``exper_model``, ``exper_model`` and ``observational_dataset`` keys in the recipe:: @@ -60,6 +61,7 @@ from esmvaltool.diag_scripts.ocean import diagnostic_tools as diagtools from esmvaltool.diag_scripts.shared import run_diagnostic +from esmvaltool.diag_scripts.shared import ProvenanceLogger # This part sends debug statements to stdout logger = logging.getLogger(os.path.basename(__file__)) @@ -182,12 +184,22 @@ def multi_model_maps( path = path.replace(' ', '') + image_extention # Saving files: - if cfg['write_plots']: - logger.info('Saving plots to %s', path) - plt.savefig(path) - + logger.info('Saving plots to %s', path) + plt.savefig(path) plt.close() + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=f'Quadmap models comparison against {obs}', + statistics=['mean', 'diff', ], + domain=['global'], + plot_type=['map'], + ancestors=list(input_files.keys()), + ) + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + def main(cfg): """ diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_model_vs_obs.py b/esmvaltool/diag_scripts/ocean/diagnostic_model_vs_obs.py index 4eab99b76b..c75f32a59b 100644 --- a/esmvaltool/diag_scripts/ocean/diagnostic_model_vs_obs.py +++ b/esmvaltool/diag_scripts/ocean/diagnostic_model_vs_obs.py @@ -23,14 +23,15 @@ and metadata.yml files) has no time component, a small number of depth layers, and a latitude and longitude coordinates. -An approproate preprocessor for a 3D + time field would be:: +An appropriate preprocessor for a 3D + time field would be:: preprocessors: prep_map: extract_levels: levels: [100., ] scheme: linear_extrap - time_average: + climate_statistics: + operator: mean regrid: target_grid: 1x1 scheme: linear @@ -42,31 +43,33 @@ ledm@pml.ac.uk """ import logging +import math import os import sys -import math - -from matplotlib import pyplot -from matplotlib.colors import LogNorm -import matplotlib.pyplot as plt import iris import iris.quickplot as qplt +import matplotlib.pyplot as plt import numpy as np +from matplotlib.colors import LogNorm from scipy.stats import linregress from esmvaltool.diag_scripts.ocean import diagnostic_tools as diagtools -from esmvaltool.diag_scripts.shared import run_diagnostic +from esmvaltool.diag_scripts.shared import ProvenanceLogger, run_diagnostic # This part sends debug statements to stdout logger = logging.getLogger(os.path.basename(__file__)) logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) -def add_map_subplot(subplot, cube, nspace, title='', - cmap='', extend='neither', log=False): - """ - Add a map subplot to the current pyplot figure. +def add_map_subplot(subplot, + cube, + nspace, + title='', + cmap='', + extend='neither', + log=False): + """Add a map subplot to the current pyplot figure. Parameters ---------- @@ -89,25 +92,23 @@ def add_map_subplot(subplot, cube, nspace, title='', plt.subplot(subplot) logger.info('add_map_subplot: %s', subplot) if log: - qplot = qplt.contourf( - cube, - nspace, - linewidth=0, - cmap=plt.cm.get_cmap(cmap), - norm=LogNorm(), - zmin=nspace.min(), - zmax=nspace.max()) + qplot = qplt.contourf(cube, + nspace, + linewidth=0, + cmap=plt.cm.get_cmap(cmap), + norm=LogNorm(), + zmin=nspace.min(), + zmax=nspace.max()) qplot.colorbar.set_ticks([0.1, 1., 10.]) else: - qplot = iris.plot.contourf( - cube, - nspace, - linewidth=0, - cmap=plt.cm.get_cmap(cmap), - extend=extend, - zmin=nspace.min(), - zmax=nspace.max()) - cbar = pyplot.colorbar(orientation='horizontal') + qplot = iris.plot.contourf(cube, + nspace, + linewidth=0, + cmap=plt.cm.get_cmap(cmap), + extend=extend, + zmin=nspace.min(), + zmax=nspace.max()) + cbar = plt.colorbar(orientation='horizontal') cbar.set_ticks( [nspace.min(), (nspace.max() + nspace.min()) / 2., nspace.max()]) @@ -116,13 +117,8 @@ def add_map_subplot(subplot, cube, nspace, title='', plt.title(title) -def make_model_vs_obs_plots( - cfg, - metadata, - model_filename, - obs_filename): - """ - Make a figure showing four maps and the other shows a scatter plot. +def make_model_vs_obs_plots(cfg, metadata, model_filename, obs_filename): + """Make a figure showing four maps and the other shows a scatter plot. The four pane image is a latitude vs longitude figures showing: @@ -141,7 +137,6 @@ def make_model_vs_obs_plots( the preprocessed model file. obs_filename: str the preprocessed observations file. - """ filenames = {'model': model_filename, 'obs': obs_filename} logger.debug('make_model_vs_obs_plots filenames: %s', filenames) @@ -152,7 +147,7 @@ def make_model_vs_obs_plots( cubes = {} for model_type, input_file in filenames.items(): logger.debug('loading: \t%s, \t%s', model_type, input_file) - cube = iris.load_cube(input_file) + cube = iris.util.squeeze(iris.load_cube(input_file)) cube = diagtools.bgc_units(cube, metadata[input_file]['short_name']) cubes[model_type] = diagtools.make_cube_layer_dict(cube) for layer in cubes[model_type]: @@ -169,8 +164,8 @@ def make_model_vs_obs_plots( long_name = cubes['model'][list(layers.keys())[0]].long_name units = str(cubes['model'][list(layers.keys())[0]].units) - # Load image format extention - image_extention = diagtools.get_image_format(cfg) + # Load image format extension + image_extension = diagtools.get_image_format(cfg) # Make a plot for each layer for layer in layers: @@ -198,55 +193,73 @@ def make_model_vs_obs_plots( cube224.data = np.ma.clip(cube224.data, 0.1, 10.) n_points = 12 - linspace12 = np.linspace( - zrange12[0], zrange12[1], n_points, endpoint=True) - linspace3 = np.linspace( - zrange3[0], zrange3[1], n_points, endpoint=True) + linspace12 = np.linspace(zrange12[0], + zrange12[1], + n_points, + endpoint=True) + linspace3 = np.linspace(zrange3[0], + zrange3[1], + n_points, + endpoint=True) logspace4 = np.logspace(-1., 1., 12, endpoint=True) # Add the sub plots to the figure. - add_map_subplot( - 221, cube221, linspace12, cmap='viridis', title=model, - extend=extend) - add_map_subplot( - 222, cube222, linspace12, cmap='viridis', - title=' '.join([obs]), - extend=extend) - add_map_subplot( - 223, - cube223, - linspace3, - cmap='bwr', - title=' '.join([model, 'minus', obs]), - extend=extend) + add_map_subplot(221, + cube221, + linspace12, + cmap='viridis', + title=model, + extend=extend) + add_map_subplot(222, + cube222, + linspace12, + cmap='viridis', + title=' '.join([obs]), + extend=extend) + add_map_subplot(223, + cube223, + linspace3, + cmap='bwr', + title=' '.join([model, 'minus', obs]), + extend=extend) if np.min(zrange12) > 0.: - add_map_subplot( - 224, - cube224, - logspace4, - cmap='bwr', - title=' '.join([model, 'over', obs]), - log=True) - + add_map_subplot(224, + cube224, + logspace4, + cmap='bwr', + title=' '.join([model, 'over', obs]), + log=True) + + caption = f'{long_name} [{units}]' # Add overall title - fig.suptitle(long_name + ' [' + units + ']', fontsize=14) + fig.suptitle(caption, fontsize=14) # Determine image filename: fn_list = ['model_vs_obs', long_name, model, obs, str(layer), 'maps'] path = diagtools.folder(cfg['plot_dir']) + '_'.join(fn_list) - path = path.replace(' ', '') + image_extention + path = path.replace(' ', '') + image_extension # Saving files: - if cfg['write_plots']: - logger.info('Saving plots to %s', path) - plt.savefig(path, dpi=200) + logger.info('Saving plots to %s', path) + plt.savefig(path, dpi=200) plt.close() + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=caption, + statistics=['mean', 'clim', 'diff'], + domain=['global'], + plot_type=['map'], + ancestors=[model_filename, obs_filename], + ) + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + def rounds_sig(value, sig=3): - """ - Round a float to a specific number of sig. figs. & return it as a string. + """Round a float to sig significant digits & return it as a string. Parameters ---------- @@ -259,14 +272,13 @@ def rounds_sig(value, sig=3): ---------- str: The rounded output string. - """ if value == 0.: return str(0.) if value < 0.: value = abs(value) - return str( - -1. * round(value, sig - int(math.floor(math.log10(value))) - 1)) + return str(-1. * + round(value, sig - int(math.floor(math.log10(value))) - 1)) return str(round(value, sig - int(math.floor(math.log10(value))) - 1)) @@ -276,8 +288,7 @@ def add_linear_regression(plot_axes, showtext=True, add_diagonal=False, extent=None): - """ - Add a straight line fit to an axis. + """Add a straight line fit to an axis. Parameters ---------- @@ -297,21 +308,18 @@ def add_linear_regression(plot_axes, beta1, beta0, r_value, p_value, std_err = linregress(arr_x, arr_y) texts = [ r'$\^\beta_0$ = ' + rounds_sig(beta0), - r'$\^\beta_1$ = ' + rounds_sig(beta1), - r'R = ' + rounds_sig(r_value), - r'P = ' + rounds_sig(p_value), - r'N = ' + str(int(len(arr_x))) + r'$\^\beta_1$ = ' + rounds_sig(beta1), r'R = ' + rounds_sig(r_value), + r'P = ' + rounds_sig(p_value), r'N = ' + str(int(len(arr_x))) ] thetext = '\n'.join(texts) if showtext: - pyplot.text( - 0.04, - 0.96, - thetext, - horizontalalignment='left', - verticalalignment='top', - transform=plot_axes.transAxes) + plt.text(0.04, + 0.96, + thetext, + horizontalalignment='left', + verticalalignment='top', + transform=plot_axes.transAxes) if extent is None: x_values = np.arange(arr_x.min(), arr_x.max(), @@ -328,22 +336,17 @@ def add_linear_regression(plot_axes, x_values = np.ma.masked_where(mask, x_values) y_values = np.ma.masked_where(mask, y_values) - pyplot.plot(x_values, y_values, 'k') + plt.plot(x_values, y_values, 'k') if add_diagonal: - axis = pyplot.gca().axis() + axis = plt.gca().axis() step = (max(axis) - min(axis)) / 100. one_to_one = np.arange(min(axis), max(axis) + step, step) - pyplot.plot(one_to_one, one_to_one, 'k--') + plt.plot(one_to_one, one_to_one, 'k--') -def make_scatter( - cfg, - metadata, - model_filename, - obs_filename): - """ - Makes Scatter plots of model vs observational data. +def make_scatter(cfg, metadata, model_filename, obs_filename): + """Makes Scatter plots of model vs observational data. Make scatter plot showing the matched model and observational data with the model data as the x-axis coordinate and the observational data as the @@ -361,7 +364,6 @@ def make_scatter( obs_filename: str the preprocessed observations file. """ - filenames = {'model': model_filename, 'obs': obs_filename} logger.debug('make_model_vs_obs_plots: \t%s', filenames) # #### @@ -386,8 +388,8 @@ def make_scatter( long_name = cubes['model'][list(layers.keys())[0]].long_name - # Load image format extention - image_extention = diagtools.get_image_format(cfg) + # Load image format extension + image_extension = diagtools.get_image_format(cfg) # Make a plot for each layer for layer in layers: @@ -396,10 +398,10 @@ def make_scatter( fig.set_size_inches(7, 6) # Create the cubes - model_data = np.ma.array(cubes['model'][layer].data) - obs_data = np.ma.array(cubes['obs'][layer].data) + model_data = cubes['model'][layer].data.squeeze() + obs_data = cubes['obs'][layer].data.squeeze() - mask = model_data.mask + obs_data.mask + mask = np.ma.getmask(model_data) + np.ma.getmask(obs_data) model_data = np.ma.masked_where(mask, model_data).compressed() obs_data = np.ma.masked_where(mask, obs_data).compressed() @@ -414,7 +416,7 @@ def make_scatter( logger.info('Skip scatter for %s. Min is < 0', long_name) return - pyplot.hexbin( + plt.hexbin( model_data, obs_data, xscale=x_scale, @@ -422,25 +424,24 @@ def make_scatter( bins='log', # extent=np.log10(plotrange), gridsize=50, - cmap=pyplot.get_cmap(colours), + cmap=plt.get_cmap(colours), mincnt=0) - cbar = pyplot.colorbar() + cbar = plt.colorbar() cbar.set_label('log10(N)') - pyplot.gca().set_aspect("equal") - pyplot.axis(plotrange) + plt.gca().set_aspect("equal") + plt.axis(plotrange) - add_linear_regression( - pyplot.gca(), - model_data, - obs_data, - showtext=True, - add_diagonal=True, - extent=plotrange) + add_linear_regression(plt.gca(), + model_data, + obs_data, + showtext=True, + add_diagonal=True, + extent=plotrange) - pyplot.title(long_name) - pyplot.xlabel(model) - pyplot.ylabel(obs) + plt.title(long_name) + plt.xlabel(model) + plt.ylabel(obs) # Determine image filename: fn_list = [ @@ -448,25 +449,34 @@ def make_scatter( str(layer), 'scatter' ] path = diagtools.folder(cfg['plot_dir']) + '_'.join(fn_list) - path = path.replace(' ', '') + image_extention + path = path.replace(' ', '') + image_extension # Saving files: - if cfg['write_plots']: - logger.info('Saving plots to %s', path) - plt.savefig(path) + logger.info('Saving plots to %s', path) + plt.savefig(path) plt.close() + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=long_name, + statistics=['mean', 'clim', 'diff'], + domain=['global'], + plot_type=['scatter'], + ancestors=[model_filename, obs_filename], + ) + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + def main(cfg): - """ - Load the config file, and send it to the plot maker. + """Load the config file, and send it to the plot maker. Parameters ---------- cfg: dict the opened global config dictionairy, passed by ESMValTool. - """ for index, metadata_filename in enumerate(cfg['input_files']): logger.info( diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_profiles.py b/esmvaltool/diag_scripts/ocean/diagnostic_profiles.py index 35d38b583d..82a9902cb3 100644 --- a/esmvaltool/diag_scripts/ocean/diagnostic_profiles.py +++ b/esmvaltool/diag_scripts/ocean/diagnostic_profiles.py @@ -22,9 +22,9 @@ lat2: 30. z_min: 0. z_max: 3000. - average_region: - coord1: longitude - coord2: latitude + area_statistics: + operator: mean + In order to add an observational dataset to the profile plot, the following arguments are needed in the diagnostic script:: @@ -51,11 +51,14 @@ import numpy as np import iris +import iris.coord_categorisation +import iris.exceptions import iris.quickplot as qplt import matplotlib.pyplot as plt from esmvaltool.diag_scripts.ocean import diagnostic_tools as diagtools from esmvaltool.diag_scripts.shared import run_diagnostic +from esmvaltool.diag_scripts.shared._base import ProvenanceLogger # This part sends debug statements to stdout logger = logging.getLogger(os.path.basename(__file__)) @@ -121,10 +124,15 @@ def make_profiles_plots( cube = iris.load_cube(filename) cube = diagtools.bgc_units(cube, metadata['short_name']) - raw_times = diagtools.cube_time_to_float(cube) + try: + raw_times = diagtools.cube_time_to_float(cube) + except iris.exceptions.CoordinateNotFoundError: + return # Make annual or Decadal means from: if np.max(raw_times) - np.min(raw_times) < 20: + if not cube.coords('year'): + iris.coord_categorisation.add_year(cube, 'time') cube = cube.aggregated_by('year', iris.analysis.MEAN) else: cube = diagtools.decadal_average(cube) @@ -188,12 +196,22 @@ def make_profiles_plots( ) # Saving files: - if cfg['write_plots']: - logger.info('Saving plots to %s', path) - plt.savefig(path) - + logger.info('Saving plots to %s', path) + plt.savefig(path) plt.close() + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=f'Profiles of {title}', + statistics=['mean'], + domain=['global'], + plot_type=['vert'], + ancestors=[filename, obs_filename], + ) + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + def main(cfg): """ @@ -216,17 +234,23 @@ def main(cfg): obs_key = 'observational_dataset' obs_filename = '' obs_metadata = {} + if obs_key in cfg: obs_filename = diagtools.match_model_to_key(obs_key, cfg[obs_key], metadatas) - obs_metadata = metadatas[obs_filename] - + if obs_filename: + obs_metadata = metadatas[obs_filename] + else: + obs_metadata = '' for filename in sorted(metadatas.keys()): if filename == obs_filename: continue + if metadatas[filename]['frequency'] == 'fx': + continue + logger.info('-----------------') logger.info( 'model filenames:\t%s', diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_seaice.py b/esmvaltool/diag_scripts/ocean/diagnostic_seaice.py index 7b99926bce..3a04f0cb34 100644 --- a/esmvaltool/diag_scripts/ocean/diagnostic_seaice.py +++ b/esmvaltool/diag_scripts/ocean/diagnostic_seaice.py @@ -61,13 +61,14 @@ Author: Lee de Mora (PML) ledm@pml.ac.uk """ +import itertools import logging import os import sys -from itertools import product import cartopy import iris +import iris.coord_categorisation import iris.quickplot as qplt import matplotlib import matplotlib.pyplot as plt @@ -75,6 +76,7 @@ from esmvaltool.diag_scripts.ocean import diagnostic_tools as diagtools from esmvaltool.diag_scripts.shared import run_diagnostic +from esmvaltool.diag_scripts.shared._base import ProvenanceLogger # This part sends debug statements to stdout logger = logging.getLogger(os.path.basename(__file__)) @@ -177,6 +179,7 @@ def make_ts_plots( """ # Load cube and set up units cube = iris.load_cube(filename) + iris.coord_categorisation.add_year(cube, 'time') cube = diagtools.bgc_units(cube, metadata['short_name']) cube = agregate_by_season(cube) @@ -234,12 +237,22 @@ def make_ts_plots( ) # Saving files: - if cfg['write_plots']: - logger.info('Saving plots to %s', path) - plt.savefig(path) - + logger.info('Saving plots to %s', path) + plt.savefig(path) plt.close() + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=f'Time serie of {title}', + statistics=['mean'], + domain=['polar'], + plot_type=['times'], + ancestors=[filename], + ) + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + def make_polar_map( cube, @@ -287,7 +300,7 @@ def make_polar_map( ax1 = plt.subplot(111, projection=cartopy.crs.SouthPolarStereo()) ax1.set_extent([-180, 180, -90, -50], cartopy.crs.PlateCarree()) - linrange = np.linspace(0., 100., 21.) + linrange = np.linspace(0, 100, 21) qplt.contourf(cube, linrange, cmap=cmap, linewidth=0, rasterized=True) plt.tight_layout() @@ -411,6 +424,7 @@ def make_map_plots( """ # Load cube and set up units cube = iris.load_cube(filename) + iris.coord_categorisation.add_year(cube, 'time') cube = diagtools.bgc_units(cube, metadata['short_name']) cube = agregate_by_season(cube) @@ -427,7 +441,7 @@ def make_map_plots( # Making plots for each layer plot_types = ['Fractional cover', 'Ice Extent'] plot_times = [0, -1] - for plot_type, plot_time in product(plot_types, plot_times): + for plot_type, plot_time in itertools.product(plot_types, plot_times): for layer_index, (layer, cube_layer) in enumerate(cubes.items()): layer = str(layer) @@ -471,12 +485,22 @@ def make_map_plots( ) # Saving files: - if cfg['write_plots']: - logger.info('Saving plots to %s', path) - plt.savefig(path) - + logger.info('Saving plots to %s', path) + plt.savefig(path) plt.close() + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=f'Map of {title}', + statistics=['mean'], + domain=['polar'], + plot_type=['map'], + ancestors=[filename], + ) + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + def agregate_by_season(cube): """ @@ -525,6 +549,7 @@ def make_map_extent_plots( """ # Load cube and set up units cube = iris.load_cube(filename) + iris.coord_categorisation.add_year(cube, 'time') cube = diagtools.bgc_units(cube, metadata['short_name']) cube = agregate_by_season(cube) @@ -649,11 +674,22 @@ def make_map_extent_plots( ) # Saving files: - if cfg['write_plots']: - logger.info('Saving plots to %s', path) - plt.savefig(path) + logger.info('Saving plots to %s', path) + plt.savefig(path) plt.close() + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=f'Temporal extent of {title}', + statistics=['mean'], + domain=['polar'], + plot_type=['map'], + ancestors=[filename], + ) + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + def main(cfg): """ diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_timeseries.py b/esmvaltool/diag_scripts/ocean/diagnostic_timeseries.py index 4817635a1c..e1377693cb 100644 --- a/esmvaltool/diag_scripts/ocean/diagnostic_timeseries.py +++ b/esmvaltool/diag_scripts/ocean/diagnostic_timeseries.py @@ -21,10 +21,9 @@ preprocessors: prep_timeseries_1:# For Global Volume Averaged - average_volume: - coord1: longitude - coord2: latitude - coordz: depth + volume_statistics: + operator: mean + An approproate preprocessor for a 3D+time field at the surface would be:: @@ -32,16 +31,16 @@ extract_levels: levels: [0., ] scheme: linear_extrap - average_area: - coord1: longitude - coord2: latitude + area_statistics: + operator: mean + An approproate preprocessor for a 2D+time field would be:: prep_timeseries_2: # For Global surface Averaged - average_area: - coord1: longitude - coord2: latitude + area_statistics: + operator: mean + This tool is part of the ocean diagnostic tools package in the ESMValTool. @@ -58,6 +57,7 @@ from esmvaltool.diag_scripts.ocean import diagnostic_tools as diagtools from esmvaltool.diag_scripts.shared import run_diagnostic +from esmvaltool.diag_scripts.shared import ProvenanceLogger # This part sends debug statements to stdout logger = logging.getLogger(os.path.basename(__file__)) @@ -133,8 +133,8 @@ def moving_average(cube, window): 'days', 'day', 'dy', 'months', 'month', 'mn', 'years', 'yrs', 'year', 'yr' ]: - raise ValueError("Moving average window units not recognised: " + - "{}".format(win_units)) + raise ValueError( + f'Moving average window units not recognised: {win_units}') times = cube.coord('time').units.num2date(cube.coord('time').points) @@ -245,6 +245,10 @@ def make_time_series_plots( 'start_year', 'end_year' ], ) + caption = ' '.join([ + 'Time series of', metadata["dataset"], metadata["long_name"], + 'with MultiModel value', + ]) else: path = diagtools.get_image_path( @@ -252,14 +256,26 @@ def make_time_series_plots( metadata, suffix='timeseries_' + str(layer_index) + image_extention, ) + caption = ' '.join([ + 'Time series of', metadata["dataset"], metadata["long_name"], + ]) - # Saving files: - if cfg['write_plots']: + # Saving files + logger.info('Saving plots to %s', path) + plt.savefig(path) + plt.close() - logger.info('Saving plots to %s', path) - plt.savefig(path) + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=caption, + statistics=['mean'], + domain=['global'], + plot_type=['times'], + ancestors=filename, + ) - plt.close() + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) def multi_model_time_series( @@ -287,13 +303,14 @@ def multi_model_time_series( model_cubes = {} layers = {} for filename in sorted(metadata): - cube = iris.load_cube(filename) - cube = diagtools.bgc_units(cube, metadata[filename]['short_name']) + if metadata[filename]['frequency'] != 'fx': + cube = iris.load_cube(filename) + cube = diagtools.bgc_units(cube, metadata[filename]['short_name']) - cubes = diagtools.make_cube_layer_dict(cube) - model_cubes[filename] = cubes - for layer in cubes: - layers[layer] = True + cubes = diagtools.make_cube_layer_dict(cube) + model_cubes[filename] = cubes + for layer in cubes: + layers[layer] = True # Load image format extention image_extention = diagtools.get_image_format(cfg) @@ -363,18 +380,17 @@ def multi_model_time_series( plt.ylabel(str(model_cubes[filename][layer].units)) # Saving files: - if cfg['write_plots']: - path = diagtools.get_image_path( - cfg, - metadata[filename], - prefix='MultipleModels_', - suffix='_'.join(['timeseries', - str(layer) + image_extention]), - metadata_id_list=[ - 'field', 'short_name', 'preprocessor', 'diagnostic', - 'start_year', 'end_year' - ], - ) + path = diagtools.get_image_path( + cfg, + metadata[filename], + prefix='MultipleModels_', + suffix='_'.join(['timeseries', + str(layer) + image_extention]), + metadata_id_list=[ + 'field', 'short_name', 'preprocessor', 'diagnostic', + 'start_year', 'end_year' + ], + ) # Resize and add legend outside thew axes. plt.gcf().set_size_inches(9., 6.) @@ -385,6 +401,18 @@ def multi_model_time_series( plt.savefig(path) plt.close() + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=f'Time series of {title}', + statistics=['mean'], + domain=['global'], + plot_type=['times'], + ancestors=list(metadata.keys()), + ) + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + def main(cfg): """ @@ -410,16 +438,16 @@ def main(cfg): ) for filename in sorted(metadatas): + if metadatas[filename]['frequency'] != 'fx': + logger.info('-----------------') + logger.info( + 'model filenames:\t%s', + filename, + ) - logger.info('-----------------') - logger.info( - 'model filenames:\t%s', - filename, - ) - - ###### - # Time series of individual model - make_time_series_plots(cfg, metadatas[filename], filename) + ###### + # Time series of individual model + make_time_series_plots(cfg, metadatas[filename], filename) logger.info('Success') diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_tools.py b/esmvaltool/diag_scripts/ocean/diagnostic_tools.py index 03b2dbad7f..eabd60495d 100644 --- a/esmvaltool/diag_scripts/ocean/diagnostic_tools.py +++ b/esmvaltool/diag_scripts/ocean/diagnostic_tools.py @@ -13,6 +13,7 @@ import logging import os import sys +from pathlib import Path import iris import numpy as np @@ -40,7 +41,7 @@ def get_obs_projects(): Returns a list of strings of the various types of observational data. """ obs_projects = [ - 'obs4mips', + 'obs4MIPs', ] return obs_projects @@ -85,14 +86,14 @@ def get_input_files(cfg, index=''): Arguments --------- cfg: dict - the opened global config dictionairy, passed by ESMValTool. + the opened global config dictionary, passed by ESMValTool. index: int the index of the file in the cfg file. Returns --------- dict - A dictionairy of the input files and their linked details. + A dictionary of the input files and their linked details. """ if isinstance(index, int): metadata_file = cfg['input_files'][index] @@ -125,14 +126,17 @@ def bgc_units(cube, name): if name in ['tos', 'thetao']: new_units = 'celsius' - if name in ['no3', ]: + if name in ['sos', 'so']: + cube.units = '0.001' + + if name in ['no3', 'o2', 'po4', 'si', 'dissic', 'talk']: new_units = 'mmol m-3' if name in ['chl', ]: new_units = 'mg m-3' if name in ['intpp', ]: - new_units = 'mol m-2 d-1' + new_units = 'mol m-2 yr-1' if name in ['fgco2', ]: new_units = 'g m-2 d-1' @@ -163,23 +167,23 @@ def match_model_to_key( This function checks that the control_model, exper_model and observational_dataset dictionairies from the recipe are matched with the - input file dictionairy in the cfg metadata. + input file dictionary in the cfg metadata. Arguments --------- model_type: str The string model_type to match (only used in debugging). cfg_dict: dict - the config dictionairy item for this model type, parsed directly from + the config dictionary item for this model type, parsed directly from the diagnostics/ scripts, part of the recipe. input_files_dict: dict - The input file dictionairy, loaded directly from the get_input_files() + The input file dictionary, loaded directly from the get_input_files() function, in diagnostics_tools.py. Returns --------- dict - A dictionairy of the input files and their linked details. + A dictionary of the input files and their linked details. """ for input_file, intput_dict in input_files_dict.items(): intersect_keys = intput_dict.keys() & cfg_dict.keys() @@ -253,15 +257,23 @@ def guess_calendar_datetime(cube): """ time_coord = cube.coord('time') - if time_coord.units.calendar in ['360_day', ]: + if time_coord.units.calendar in [ + '360_day', + ]: datetime = cftime.Datetime360Day elif time_coord.units.calendar in ['365_day', 'noleap']: datetime = cftime.DatetimeNoLeap - elif time_coord.units.calendar in ['julian', ]: + elif time_coord.units.calendar in [ + 'julian', + ]: datetime = cftime.DatetimeJulian - elif time_coord.units.calendar in ['gregorian', ]: + elif time_coord.units.calendar in [ + 'gregorian', + ]: datetime = cftime.DatetimeGregorian - elif time_coord.units.calendar in ['proleptic_gregorian', ]: + elif time_coord.units.calendar in [ + 'proleptic_gregorian', + ]: datetime = cftime.DatetimeProlepticGregorian else: logger.warning('Calendar set to Gregorian, instead of %s', @@ -305,9 +317,9 @@ def load_thresholds(cfg, metadata): Parameters ---------- cfg: dict - the opened global config dictionairy, passed by ESMValTool. + the opened global config dictionary, passed by ESMValTool. metadata: dict - the metadata dictionairy + the metadata dictionary Returns ------- @@ -351,8 +363,8 @@ def get_colour_from_cmap(number, total, cmap='jet'): cmap = plt.get_cmap(cmap) if number > total: - raise ValueError('The cannot be larger than the total length ' - 'of the list ie: {} > {}'.format(number, total)) + raise ValueError(f'The cannot be larger than the total length ' + f' of the list ie: {number} > {total}') if total > 1: colour = cmap(float(number) / float(total - 1.)) @@ -423,17 +435,15 @@ def add_legend_outside_right(plot_details, ax1, column_width=0.1, loc='right'): plt.plot([], [], c=colour, lw=linewidth, ls=linestyle, label=label) if loc.lower() == 'right': - legd = ax1.legend( - loc='center left', - ncol=ncols, - prop={'size': 10}, - bbox_to_anchor=(1., 0.5)) + legd = ax1.legend(loc='center left', + ncol=ncols, + prop={'size': 10}, + bbox_to_anchor=(1., 0.5)) if loc.lower() == 'below': - legd = ax1.legend( - loc='upper center', - ncol=ncols, - prop={'size': 10}, - bbox_to_anchor=(0.5, -2. * column_width)) + legd = ax1.legend(loc='upper center', + ncol=ncols, + prop={'size': 10}, + bbox_to_anchor=(0.5, -2. * column_width)) legd.draw_frame(False) legd.get_frame().set_alpha(0.) @@ -453,7 +463,7 @@ def get_image_format(cfg, default='png'): Arguments --------- cfg: dict - the opened global config dictionairy, passed by ESMValTool. + the opened global config dictionary, passed by ESMValTool. Returns --------- @@ -493,14 +503,14 @@ def get_image_path( Produce a path to the final location of the image. The cfg is the opened global config, - metadata is the metadata dictionairy (for the individual dataset file) + metadata is the metadata dictionary (for the individual dataset file) Arguments --------- cfg: dict - the opened global config dictionairy, passed by ESMValTool. + the opened global config dictionary, passed by ESMValTool. metadata: dict - The metadata dictionairy for a specific model. + The metadata dictionary for a specific model. prefix: str A string to prepend to the image basename. suffix: str @@ -552,7 +562,7 @@ def get_image_path( def make_cube_layer_dict(cube): """ - Take a cube and return a dictionairy layer:cube + Take a cube and return a dictionary layer:cube Each item in the dict is a layer with a separate cube for each layer. ie: cubes[depth] = cube from specific layer @@ -568,7 +578,7 @@ def make_cube_layer_dict(cube): Returns --------- dict - A dictionairy of layer name : layer cube. + A dictionary of layer name : layer cube. """ ##### # Check layering: @@ -579,7 +589,7 @@ def make_cube_layer_dict(cube): layers.append(coord) cubes = {} - if layers == []: + if not layers: cubes[''] = cube return cubes @@ -636,7 +646,10 @@ def get_cube_range(cubes): for cube in cubes: mins.append(cube.data.min()) maxs.append(cube.data.max()) - return [np.min(mins), np.max(maxs), ] + return [ + np.min(mins), + np.max(maxs), + ] def get_cube_range_diff(cubes): @@ -681,4 +694,40 @@ def get_array_range(arrays): mins.append(arr.min()) maxs.append(arr.max()) logger.info('get_array_range: %s, %s', np.min(mins), np.max(maxs)) - return [np.min(mins), np.max(maxs), ] + return [ + np.min(mins), + np.max(maxs), + ] + + +def prepare_provenance_record(cfg, **provenance_record): + """ + Prepare informations to feed provenance + + Arguments + --------- + cfg: dict + the opened global config dictionary, passed by ESMValTool. + provenance_record: dict + dictionary for a specific diagnostic provenance details. + """ + recipe_path = Path(cfg['run_dir']).parents[1] / cfg['recipe'] + with recipe_path.open() as recipe_file: + recipe = yaml.safe_load(recipe_file) + + doc = recipe['documentation'] + authors = doc.get('authors', []) + authors += [ + maintainer for maintainer in doc.get('maintainer', []) + if maintainer not in authors + ] + provenance_record['authors'] = authors + for key in ['title', 'description', 'projects']: + val = doc[key] + if val: + provenance_record[key] = val + for key in ['realms', 'themes']: + val = cfg.get(key) + if val: + provenance_record[key] = val + return provenance_record diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_transects.py b/esmvaltool/diag_scripts/ocean/diagnostic_transects.py index b217fcd6dc..5d74d1ebbd 100644 --- a/esmvaltool/diag_scripts/ocean/diagnostic_transects.py +++ b/esmvaltool/diag_scripts/ocean/diagnostic_transects.py @@ -14,8 +14,9 @@ preprocessors: prep_transect: - time_average: - extract_slice: # Atlantic Meridional Transect + climate_statistics: + operator: mean + extract_transect: # Atlantic Meridional Transect latitude: [-50.,50.] longitude: 332. @@ -25,10 +26,10 @@ ledm@pml.ac.uk """ +import itertools import logging import os import sys -from itertools import product import iris import iris.quickplot as qplt @@ -37,6 +38,7 @@ from esmvaltool.diag_scripts.ocean import diagnostic_tools as diagtools from esmvaltool.diag_scripts.shared import run_diagnostic +from esmvaltool.diag_scripts.shared._base import ProvenanceLogger # This part sends debug statements to stdout logger = logging.getLogger(os.path.basename(__file__)) @@ -172,7 +174,7 @@ def make_cube_region_dict(cube): layers.append(coord) cubes = {} - if layers == []: + if not layers: cubes[''] = cube return cubes @@ -259,7 +261,7 @@ def make_transects_plots( qplt.contourf(cube, 15, linewidth=0, rasterized=True) if set_y_logscale: - plt.axes().set_yscale('log') + plt.gca().set_yscale('log') if region: region_title = region @@ -287,12 +289,22 @@ def make_transects_plots( ) # Saving files: - if cfg['write_plots']: - logger.info('Saving plots to %s', path) - plt.savefig(path) - + logger.info('Saving plots to %s', path) + plt.savefig(path) plt.close() + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=f'Transect of {title}', + statistics=['mean'], + domain=['reg'], + plot_type=['sect', 'zonal', ], + ancestors=[filename], + ) + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + def add_sea_floor(cube): """ @@ -369,9 +381,9 @@ def make_transect_contours( linestyles=linestyles, rasterized=True) - # Determine y log scale. + # Determine y log scale. Use gca to set scale if determine_set_y_logscale(cfg, metadata): - plt.axes().set_yscale('log') + plt.gca().set_yscale('log') add_sea_floor(cube) @@ -402,12 +414,22 @@ def make_transect_contours( ) # Saving files: - if cfg['write_plots']: - logger.info('Saving plots to %s', path) - plt.savefig(path) - + logger.info('Saving plots to %s', path) + plt.savefig(path) plt.close() + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=f'Transect of {title}', + statistics=['mean'], + domain=['reg'], + plot_type=['sect', 'zonal', ], + ancestors=[filename], + ) + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + def multi_model_contours( cfg, @@ -456,7 +478,7 @@ def multi_model_contours( image_extention = diagtools.get_image_format(cfg) # Make a plot for each layer and each threshold - for region, threshold in product(regions, thresholds): + for region, threshold in itertools.product(regions, thresholds): logger.info('plotting threshold: \t%s', threshold) title = '' plot_details = {} @@ -495,7 +517,7 @@ def multi_model_contours( } if set_y_logscale: - plt.axes().set_yscale('log') + plt.gca().set_yscale('log') title = metadatas[filename]['long_name'] units = str(model_cubes[filename][region].units) @@ -512,20 +534,19 @@ def multi_model_contours( plt.legend(loc='best') # Saving files: - if cfg['write_plots']: - path = diagtools.get_image_path( - cfg, - metadatas[filename], - prefix='MultipleModels', - suffix='_'.join([ - 'contour_tramsect', region, - str(threshold) + image_extention - ]), - metadata_id_list=[ - 'field', 'short_name', 'preprocessor', 'diagnostic', - 'start_year', 'end_year' - ], - ) + path = diagtools.get_image_path( + cfg, + metadatas[filename], + prefix='MultipleModels', + suffix='_'.join([ + 'contour_tramsect', region, + str(threshold) + image_extention + ]), + metadata_id_list=[ + 'field', 'short_name', 'preprocessor', 'diagnostic', + 'start_year', 'end_year' + ], + ) # Resize and add legend outside thew axes. plt.gcf().set_size_inches(9., 6.) @@ -536,6 +557,18 @@ def multi_model_contours( plt.savefig(path) plt.close() + provenance_record = diagtools.prepare_provenance_record( + cfg, + caption=f'Transect of {title}', + statistics=['mean'], + domain=['reg'], + plot_type=['sect', 'zonal', ], + ancestors=list(metadatas.keys()), + ) + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) + def main(cfg): """ diff --git a/esmvaltool/diag_scripts/perfmetrics/collect.ncl b/esmvaltool/diag_scripts/perfmetrics/collect.ncl index 491b857f97..a2b14ab733 100644 --- a/esmvaltool/diag_scripts/perfmetrics/collect.ncl +++ b/esmvaltool/diag_scripts/perfmetrics/collect.ncl @@ -1,6 +1,6 @@ ; ############################################################################# ; WRAPPER SCRIPT FOR COLLECTING AND PLOTTING PRECALCULATED METRICS -; Author: Franziska Frank (DLR, Germany) +; Author: Franziska Winterstein (DLR, Germany) ; ESMVal project ; ############################################################################# ; Description @@ -19,6 +19,7 @@ ; cm_interval: min and max color of the color table ; cm_reverse: reverse color table ; sort: sort datasets in alphabetic order (excluding MMM) +; diag_order: sort diagnostics in the specified order ; title: plot title ; scale_font: scaling factor applied to the default font size ; disp_values: switch on/off the grading values on the plot @@ -28,15 +29,20 @@ ; Caveats ; ; Modification history -; 20180508-A_righ_ma: renamed and generalized -; 20151027-A_laue_ax: moved call to 'write_references' to the beginning -; of the code -; 20150325-A_laue_ax: modified reference tags used for acknowledgements -; (projects, observations, etc) -; 20150109-A_gott_kl: distinguish RMSD & BIAS in output file naming. -; 20140620-A_gott_kl: modified output file naming (variable outfile_plot) -; and added option for explicit plot title. -; 20140204-A_fran_fr: written. +; 20220609-bock_lisa: added gaps between project blocks and improved style +; 20211014-bock_lisa: added sorting by project +; 20190315-hassler_birgit: added SMPI case +; 20180508-righi_mattia: renamed and generalized +; 20151027-lauer_axel: moved call to 'write_references' to the beginning +; of the code +; 20150325-lauer_axel: modified reference tags used for acknowledgements +; (projects, observations, etc) +; 20150109-gottschaldt_klaus-dirk: distinguish RMSD & BIAS in output file +; naming. +; 20140620-gottschaldt_klaus-dirk: modified output file naming (variable +; plotpath) and added option for explicit +; plot title. +; 20140204-winterstein_franziska: written. ; ; ############################################################################# @@ -57,14 +63,6 @@ begin file_type = "ps" end if - ; Write references ; FIX-ME to be replaced by new method - ; write_references(diag_script, "A_fran_fr", \ - ; (/"A_righ_ma", "A_eyri_ve", "A_gott_kl"/), \ - ; (/"D_righi15gmd", "D_gleckler08jgr"/), \ - ; (/"E_ncep", "E_erainterim", "E_airs", "E_ceresebaf", \ - ; "E_srb"/), \ - ; (/"P_embrace", "P_esmval"/)) - ; List of grading files input_files = diag_script_info@input_files + "/" + \ diag_script_info@metric + ".nc" @@ -72,143 +70,441 @@ begin ; Filter out non-existing files input_files := input_files(ind(isfilepresent(input_files))) + data_files = new(dimsizes(input_files), string) ; Loop over files in the list, read and append data do ii = 0, dimsizes(input_files) - 1 f = addfile(input_files(ii), "r") - curr_file = tostring(f->temp_list) - data_temp = ncdf_read(curr_file, "grade") + data_files(ii) = tostring(f->temp_list) - ; Change to 3 dimensional - if (dimsizes(dimsizes(data_temp)).eq.2) then + if (diag_script_info@metric.eq."SMPI") then + data_temp_smpi = ncdf_read(data_files(ii), "performance_index") - dim_temp = array_append_record(dimsizes(data_temp), 2, 0) - temp = new(dim_temp, typeof(data_temp)) - temp(:, :, 0) = data_temp - data_temp := temp - delete(temp) + if (.not.isdefined("data_all_smpi")) then - end if + dim_temp_smpi = dimsizes(data_temp_smpi) + n = dimsizes(dim_temp_smpi) + dim_data_smpi = new(n + 1, integer) + dim_data_smpi(0) = dimsizes(input_files) + dim_data_smpi(1:n) = dim_temp_smpi + vars = new(dimsizes(input_files), string) + data_all_smpi = new(dim_data_smpi, float) + delete(dim_data_smpi) + data_all_smpi(0, :, :) = data_temp_smpi + data_all_smpi!0 = "vars" + data_all_smpi!1 = "bootstrap_member" + data_all_smpi!2 = "models" + data_all_smpi&models = data_temp_smpi&models + data_all_smpi&bootstrap_member = data_temp_smpi&bootstrap_member - ; Create array for collecting all datasets - if (.not.isdefined("data_all")) then - dim_temp = dimsizes(data_temp) - dim_data = (/dimsizes(input_files), dim_temp(1), 2/) - data_all = new(dim_data, float) - data_all(0, :, :) = data_temp - data_all!1 = "models" - data_all&models = data_temp&models - end if + end if - ; Make sure dataset coordinate is consistent - consistent = False - if (dimsizes(data_temp&models).eq.dimsizes(data_all&models)) then - if (all(data_temp&models.eq.data_all&models)) then - consistent = True + ; Make sure dataset coordinate is consistent + consistent = False + if (dimsizes(data_temp_smpi&models).eq. \ + dimsizes(data_all_smpi&models)) then + if (all(data_temp_smpi&models.eq.data_all_smpi&models)) then + consistent = True + end if end if - end if - ; Append data - if (consistent) then - data_all(ii, :, :) = (/data_temp/) - copy_VarCoords(data_temp, data_all(ii:ii, :, :)) - else - do imod2 = 0, dimsizes(data_temp&models) - 1 - if (.not.any(data_temp&models(imod2).eq.data_all&models)) then - ; Append record for dataset(imod) - data_new = extend_var_at(data_all, 1, dimsizes(data_all&models)) - data_new(ii, dimsizes(data_all&models), :) = \ - (/data_temp(0, imod2, :)/) - data_new&models(dimsizes(data_all&models)) = \ - (/data_temp&models(imod2)/) - data_all := data_new - delete(data_new) - else - ; Loop over datasets of data - do imod = 0, dimsizes(data_all&models) - 1 - ; if no data dataset is similar to curreny entry, write data entry - if (data_all&models(imod).eq. data_temp&models(imod2)) then + ; Append data + if (consistent) then + data_all_smpi(ii, :, :) = (/data_temp_smpi/) + copy_VarCoords(data_temp_smpi, data_all_smpi(ii, :, :)) + else + ; Loop over datasets in new data entry + do imod_temp = 0, dimsizes(data_temp_smpi&models) - 1 + ; If current dataset is not already part of the model coordinate + if (.not.any(data_temp_smpi&models(imod_temp) .eq. \ + data_all_smpi&models)) then + ; Append record for dataset(imod) + data_new = extend_var_at(data_all_smpi, 2, \ + dimsizes(data_all_smpi&models)) + data_new(ii, :, dimsizes(data_all_smpi&models)) = \ + (/data_temp_smpi(:, imod_temp)/) + data_new&models(dimsizes(data_all_smpi&models)) = \ + (/data_temp_smpi&models(imod_temp)/) + delete(data_all_smpi) + data_all_smpi = data_new + delete(data_new) + else + ; Loop over datasets of data + do imod = 0, dimsizes(data_all_smpi&models) - 1 + ; if new dataset is identical to current entry, write data entry + if (data_all_smpi&models(imod).eq. \ + data_temp_smpi&models(imod_temp)) then + data_all_smpi(ii, :, imod) = (/data_temp_smpi(:, imod_temp)/) + copy_VarCoords(data_temp_smpi(:, imod_temp), \ + data_all_smpi(ii:ii, :, imod)) + end if + end do + end if + end do + end if + + vars(ii) = data_temp_smpi@invar + delete(data_temp_smpi) + + else ; metric not SMPI + + data_temp = ncdf_read(data_files(ii), "grade") + + ; Change to 3 dimensional + if (dimsizes(dimsizes(data_temp)).eq.2) then + dim_temp = array_append_record(dimsizes(data_temp), 2, 0) + temp = new(dim_temp, typeof(data_temp)) + temp(:, :, 0) = data_temp + data_temp := temp + delete(temp) + end if + + ; Create array for collecting all datasets + if (.not.isdefined("data_all")) then + dim_temp = dimsizes(data_temp) + dim_data = (/dimsizes(input_files), dim_temp(1), 2/) + data_all = new(dim_data, float) + data_all(0, :, :) = data_temp + data_all!1 = "models" + data_all&models = data_temp&models + projectnames = str_split(data_temp@projects, " ") + end if + + ; Make sure model coordinate is consistent + consistent = False + if (dimsizes(data_temp&models).eq.dimsizes(data_all&models)) then + if (all(data_temp&models.eq.data_all&models)) then + consistent = True + end if + end if + + ; Append data + if (consistent) then + data_all(ii, :, :) = (/data_temp/) + copy_VarCoords(data_temp, data_all(ii:ii, :, :)) + else + projects = str_split(data_temp@projects, " ") + do imod2 = 0, dimsizes(data_temp&models) - 1 + if (.not.any(data_temp&models(imod2).eq.data_all&models)) then + ; Append record for dataset(imod) + data_new = extend_var_at(data_all, 1, dimsizes(data_all&models)) + data_new(ii, dimsizes(data_all&models), :) = \ + (/data_temp(0, imod2, :)/) + data_new&models(dimsizes(data_all&models)) = \ + (/data_temp&models(imod2)/) + data_all := data_new + delete(data_new) + projectnames := array_append_record(projectnames, \ + projects(imod2), 0) + else + ; Loop over datasets of data + do imod = 0, dimsizes(data_all&models) - 1 + ; if no dataset is similar to curreny entry, write data entry + if (data_all&models(imod).eq. data_temp&models(imod2)) then data_all(ii, imod, :) = (/data_temp(0, imod2, :)/) copy_VarCoords(data_temp(0:0, imod2, :), \ data_all(ii:ii, imod, :)) + end if + end do + end if + end do + delete(projects) + end if + delete(data_temp) + end if ; metric distinction end 1 + end do + + if (diag_script_info@metric.eq."SMPI") then + + data_all_smpi&vars = vars + delete(vars) + + ; Check for incomplete array, SMPI only for datasets with all vars + if any(ismissing(data_all_smpi)) then + do ivar = 0, dimsizes(data_all_smpi&vars) - 1 + do ii = 0, dimsizes(data_all_smpi&models) - 1 + if all(ismissing(data_all_smpi(ivar, :, ii))) then + if (isvar("miss_mods")) then + newmm = array_append_record(miss_mod, \ + data_all_smpi&models(ii), 0) + delete(miss_mod) + miss_mod = newmm + delete(newmm) + else + miss_mod = data_all_smpi&models(ii) end if - end do + end if + end do + if (isvar("miss_mods")) then + log_info("Variable " + data_all_smpi&vars(ivar) \ + + " is missing dataset(s) " \ + + str_join(miss_mods, ", ")) + delete(miss_mods) + missing_mods = True end if end do + if (isvar("missing_mods")) then + error_msg("f", DIAG_SCRIPT, "", "SMPI only supported for" \ + + " datasets containing all variables") + end if end if - delete(data_temp) - end do - ; Reduce size if all entries have only one reference - if (all(ismissing(data_all(:, :, 1)))) then - data_new = data_all(:, :, 0) - delete(data_all) - data_all = data_new - delete(data_new) - end if - delete(data_all@var) - delete(data_all@title) - delete(data_all@ncdf_dir) - if (isatt(data_all, "reference")) then - delete(data_all@reference) - end if + i2mean = dim_avg_n(data_all_smpi, 0) ; eq. 3 + dims = dimsizes(i2mean) + nboot = dims(0) - 1 + nmod = dims(1) + delete(dims) + + data_smpi = new((/dimsizes(data_all_smpi&models), 2/), float) + data_smpi!0 = "models" + data_smpi&models = data_all_smpi&models + data_smpi(:, 0) = (/i2mean(0, :)/) ; clim. mean + + i5 = toint(0.05 * nboot) + i95 = toint(0.95 * nboot) + + do imod = 0, nmod - 1 + data_sorted = i2mean(1:nboot, imod) + qsort(data_sorted) + data_smpi(imod, 1) = 0.5 * (data_sorted(i95) - data_sorted(i5)) + delete(data_sorted) + end do + + data_smpi!1 = "statistic" + data_smpi&statistic = (/"SMPI", "95_conf"/) + data_smpi@diag_script = DIAG_SCRIPT + data_smpi@var = "SMPI" + data_smpi@ensemble_name = data_all_smpi@ensemble_name + + ; Sort datasets in alphabetical order (exclude MMM and set to end) + pid = sort_alphabetically(data_smpi&models, \ + ind(data_smpi&models.eq."Mean-model".or. \ + data_smpi&models.eq."Median-model"), "end") + tmp = data_smpi(pid, :) + tmp&models = data_smpi&models(pid) + delete(data_smpi) + data_smpi = tmp + delete(tmp) + delete(pid) + + ; Define output filenames + workpath = config_user_info@work_dir + "SMPI.nc" + plotpath = config_user_info@plot_dir + "SMPI" + + ; Write output + system("mkdir -p " + config_user_info@work_dir) + ncdf_outfile = ncdf_write(data_smpi, workpath) + + ; Attach plotting options + copy_VarAtts(diag_script_info, data_smpi) + if (isatt(diag_script_info, "title")) then + data_smpi@res_tiMainString = diag_script_info@title + end if + + else ; if metric not SMPI + + ; Reduce size if all entries have only one reference + if (all(ismissing(data_all(:, :, 1)))) then + data_new = data_all(:, :, 0) + delete(data_all) + data_all = data_new + delete(data_new) + end if + data_all@var = "grade" + if (isatt(data_all, "reference")) then + delete(data_all@reference) + end if - ; Sort datasets in alphabetical order, excluding multi-model mean/median - ; which are placed at the beginning - if (isatt(diag_script_info, "sort")) then - if (diag_script_info@sort) then - idx = ind(data_all&models.eq."MultiModelMean" .or. \ - data_all&models.eq."MultiModelMedian") - pid = sort_alphabetically(data_all&models, idx, "begin") + ; Sort datasets in alphabetical order, excluding multi-model mean/median + ; which are placed at the beginning + if (isatt(diag_script_info, "sort")) then + if (diag_script_info@sort) then + idx = ind(data_all&models.eq."MultiModelMean" .or. \ + data_all&models.eq."MultiModelMedian") + pid = sort_alphabetically(data_all&models, idx, "begin") + if (dimsizes(dimsizes(data_all)).eq.3) then + data_all := data_all(:, pid, :) + else + data_all := data_all(:, pid) + end if + delete(pid) + end if + elseif (isatt(diag_script_info, "project_order")) then + data_temp = data_all + n_proj = dimsizes(diag_script_info@project_order) + id1 = ind(projectnames.eq.diag_script_info@project_order(0)) + id1_mm = (/ind(data_all&models(id1) .eq. \ + diag_script_info@project_order(0) + "_mean"), \ + ind(data_all&models(id1) .eq. \ + diag_script_info@project_order(0) + "_median") /) + if(any(ismissing(id1_mm))) then + id1_mm = -1 + end if + pid1 = sort_alphabetically(data_all&models(id1), id1_mm, "begin") + n1 = dimsizes(id1) + breakpoints = n1 if (dimsizes(dimsizes(data_all)).eq.3) then - data_all := data_all(:, pid, :) + data_all(:, 0:(n1-1), :) = data_temp(:, id1(pid1), :) else - data_all := data_all(:, pid) + data_all(:, 0:(n1-1)) = data_temp(:, id1(pid1)) + end if + if (n_proj .gt. 1) then + id2 = ind(projectnames.eq.diag_script_info@project_order(1)) + id2_mm = (/ind(data_temp&models(id2) .eq. \ + diag_script_info@project_order(1) + "_mean"), \ + ind(data_temp&models(id2) .eq. \ + diag_script_info@project_order(1) + "_median") /) + if (any(ismissing(id2_mm))) then + id2_mm = -1 + end if + pid2 = sort_alphabetically(data_temp&models(id2), id2_mm, "begin") + n2 = dimsizes(id2) + breakpoints = n1 + if (dimsizes(dimsizes(data_all)).eq.3) then + data_all(:, n1:(n1+n2-1), :) = data_temp(:, id2(pid2), :) + else + data_all(:, n1:(n1+n2-1)) = data_temp(:, id2(pid2)) + end if + if (n_proj .gt. 2) then + id3 = ind(projectnames.eq.diag_script_info@project_order(2)) + id3_mm = (/ind(data_temp&models(id3) .eq. \ + diag_script_info@project_order(2) + "_mean"), \ + ind(data_temp&models(id3) .eq. \ + diag_script_info@project_order(2) + "_median") /) + if (any(ismissing(id3_mm))) then + id3_mm = -1 + end if + pid3 = sort_alphabetically(data_temp&models(id3), id3_mm, "begin") + n3 = dimsizes(id3) + breakpoints := array_append_record(breakpoints, n1+n2, 0) + if (dimsizes(dimsizes(data_all)).eq.3) then + data_all(:, (n1+n2):(n1+n2+n3-1), :) = data_temp(:, id3(pid3), :) + else + data_all(:, (n1+n2):(n1+n2+n3-1)) = data_temp(:, id3(pid3)) + end if + if (n_proj .gt. 3) then + error_msg("w", DIAG_SCRIPT, "", "number of projects greater" + \ + "than 3, not implemented") + end if + end if + data_all@breakpoints = breakpoints end if - delete(pid) end if - end if - ; Attach plotting options - copy_VarAtts(diag_script_info, data_all) - if (isatt(diag_script_info, "title")) then - data_all@res_tiMainString = diag_script_info@title - end if - if (diag_script_info@metric.eq."taylor") then - data_all@res_varLabels = data_all&models - data_all@res_caseLabels = data_all&diagnostics - end if + ; Sort diagnostics in the order specified in the settings + if (isatt(diag_script_info, "diag_order")) then + l_ok = True + if (dimsizes(data_all&diagnostics).lt. \ + dimsizes(diag_script_info@diag_order)) then + error_msg("w", DIAG_SCRIPT, "", "specified order of diagnostics " + \ + "cannot be applied, number of diagnostics does not match") + l_ok = False + end if + pid = new(dimsizes(diag_script_info@diag_order), integer) + do ii = 0, dimsizes(diag_script_info@diag_order) - 1 + tmp = ind(data_all&diagnostics.eq.diag_script_info@diag_order(ii)) + if (any(ismissing(tmp)) .or. dimsizes(tmp).gt.1) then + error_msg("w", DIAG_SCRIPT, "", "specified order of diagnostics " + \ + "cannot be applied, invalid entry in diag_order") + break + end if + pid(ii) = tmp + delete(tmp) + end do + if (l_ok) then + if (dimsizes(dimsizes(data_all)).eq.3) then + data_all := data_all(pid, :, :) + else + data_all := data_all(pid, :) + end if + end if + end if - ; Create outfile directory - system("mkdir -p " + config_user_info@plot_dir) - if (dimsizes(data_all&diagnostics).gt.1) then - diags = data_all&diagnostics(0) + "_to_" + \ - data_all&diagnostics(dimsizes(data_all&diagnostics) - 1) - else - diags = str_concat(data_all&diagnostics) - end if + ; Define output filenames + if (dimsizes(data_all&diagnostics).gt.1) then + diags = data_all&diagnostics(0) + "_to_" + \ + data_all&diagnostics(dimsizes(data_all&diagnostics) - 1) + else + diags = str_concat(data_all&diagnostics) + end if + workpath = config_user_info@work_dir + diags + plotpath = config_user_info@plot_dir + diags + if (isatt(data_all, "metric")) then + workpath = workpath + "_" + data_all@metric + ".nc" + plotpath = plotpath + "_" + data_all@metric + end if + delete(diags) + + ; Write output + system("mkdir -p " + config_user_info@work_dir) + ncdf_outfile = ncdf_write(data_all, workpath) + + ; Attach plotting options + copy_VarAtts(diag_script_info, data_all) + if (isatt(diag_script_info, "title")) then + data_all@res_tiMainString = diag_script_info@title + end if + if (diag_script_info@metric.eq."taylor") then + data_all@res_varLabels = data_all&models + data_all@res_caseLabels = data_all&diagnostics + end if - ; Define outfile name - outfile_plot = config_user_info@plot_dir + diags - if (isatt(data_all, "metric")) then - outfile_plot = outfile_plot + "_" + data_all@metric end if ; Create workspace - wks = gsn_open_wks(file_type, outfile_plot) + system("mkdir -p " + config_user_info@plot_dir) + wks = gsn_open_wks(file_type, plotpath) if (diag_script_info@metric.eq."taylor") then - wks@legendfile = outfile_plot + "_legend" + wks@legendfile = plotpath + "_legend" end if ; Call plotting function if (diag_script_info@metric.eq."taylor") then plot = taylor_plot(wks, data_all, "grade") + elseif (diag_script_info@metric.eq."SMPI") then + plot = circle_plot(wks, data_smpi, "performance_index", \ + data_smpi@ensemble_name) else plot = portrait_plot(wks, data_all, "grade") end if - log_info(" gv " + outfile_plot + "." + file_type) + ; Call provenance logger + if (diag_script_info@metric.eq."RMSD") then + caption = "RMSD performance metric" + statistics = (/"rmsd"/) + authors = (/"winterstein_franziska", "righi_mattia", "eyring_veronika"/) + plottype = "portrait" + references = (/"righi15gmd", "gleckler08jgr"/) + elseif (diag_script_info@metric.eq."BIAS") then + caption = "Bias performance metric" + statistics = (/"diff"/) + authors = (/"winterstein_franziska", "righi_mattia", "eyring_veronika"/) + plottype = "portrait" + references = (/"righi15gmd", "gleckler08jgr"/) + elseif (diag_script_info@metric.eq."taylor") then + caption = "Taylor diagram" + statistics = (/"rmsd", "corr"/) + authors = (/"winterstein_franziska", "righi_mattia", "eyring_veronika"/) + plottype = "taylor" + references = (/"righi15gmd", "gleckler08jgr"/) + elseif (diag_script_info@metric.eq."SMPI") then + caption = \ + "Performance index I2. Similar to Figure 1 of Reichler and Kim (2008)." + statistics = "smpi" + authors = (/"gier_bettina", "hassler_birgit"/) + plottype = "circle" + references = (/"rk2008bams"/) + else + error_msg("f", DIAG_SCRIPT, "", "cannot add provenance information " + \ + "for metric " + diag_script_info@metric) + end if + domains = (/"global"/) + + ; Call provenance logger + log_provenance(ncdf_outfile, plotpath + "." + file_type, caption, \ + statistics, domains, plottype, authors, references, \ + data_files) leave_msg(DIAG_SCRIPT, "") diff --git a/esmvaltool/diag_scripts/perfmetrics/cycle.ncl b/esmvaltool/diag_scripts/perfmetrics/cycle.ncl index 1aabe497b0..5d6f25e945 100644 --- a/esmvaltool/diag_scripts/perfmetrics/cycle.ncl +++ b/esmvaltool/diag_scripts/perfmetrics/cycle.ncl @@ -1,6 +1,6 @@ ; ############################################################################# ; PROCEDURE FOR THE ANNUAL CYCLE PLOT OF THE PERFORMANCE METRICS -; Authors: Mattia Righi (DLR, Germany) and Franziska Frank (DLR, Germany) +; Authors: Mattia Righi (DLR, Germany) and Franziska Winterstein (DLR, Germany) ; ESMVal project ; ############################################################################# @@ -36,23 +36,31 @@ begin copy_VarAtts(diag_script_info, var_all) var_all@ref_model = variable_info[0]@reference_dataset + ; Search for level + f = addfile(info_items[0]@filename, "r") + if (isfilevar(f, "plev")) then + if (dimsizes(f->plev).eq.1) then + level = toint(f->plev/100.) + end if + end if + + ; Set path for saving processed data + system("mkdir -p " + config_user_info@work_dir) + if (isdefined("level")) then + vv = var0 + level + else + vv = var0 + end if + fname = str_join((/"perfmetrics", "cycle", vv, \ + diag_script_info@time_avg, diag_script_info@region/), "_") + workpath = config_user_info@work_dir + fname + ".nc" + plotpath = config_user_info@plot_dir + fname + ; Loop over datasets do imod = 0, nDatasets - 1 log_debug("Processing " + datasetnames(imod)) - ; Set path for saving processed data ; FIX-ME add preproc_id - fullpath = config_user_info@work_dir + "/" - system("mkdir -p " + fullpath) - fname = basename(systemfunc("basename " + info_items[imod]@filename)) - fname = fname + "_" + basename(DIAG_SCRIPT) + "_cycle" - fname = fname + "_" + diag_script_info@time_avg - if (isatt(diag_script_info, "level")) then - fname = fname + "_" + diag_script_info@level ; FIX-ME - end if - fname = fname + "_" + str_sub_str(diag_script_info@region, " ", "") - procpath = fullpath + fname + ".nc" - ; Determine start/end year start_year = info_items[imod]@start_year end_year = info_items[imod]@end_year @@ -83,22 +91,17 @@ begin end if delete(var_reg) - ; Write output + ; Store in global array var_all(imod, :, 0) = var_avg var_all(imod, :, 1) = var_std delete(var_avg) delete(var_std) - if (config_user_info@write_netcdf.eq."True") then - var_all@ncdf = procpath - ncdf_outfile = ncdf_write(var_all(imod, :, :), procpath) - end if end do - ; Plotting - if (.not.diag_script_info@draw_plots) then - return - end if + ; Write output + var_all@ncdf = workpath + ncdf_outfile = ncdf_write(var_all, workpath) ; Convert units for plotting (if required) if (isatt(diag_script_info, "plot_units")) then @@ -106,66 +109,33 @@ begin end if ; Annotation and file names - region_name = "" ; priority 3 - location = "" ; priority 3 - if (isatt(diag_script_info, "aux_info")) then - region_name = "_" + diag_script_info@aux_info ; priority 2 - location = " - " + diag_script_info@aux_info ; priority 2 - end if - if (isatt(diag_script_info, "location")) then - location = " - " + diag_script_info@location ; priority 1 - end if - if (isatt(diag_script_info, "region")) then - region_name = "_" + region@name ; priority 1 - location = " - " + diag_script_info@region - end if - if (isatt(var_all, "long_name")) then - varstring = var_all@long_name ; priority 3 - end if - if (isatt(var_all, "short_name")) then - varstring = var_all@short_name ; priority 2 - end if - if (isStrSubset(var0, "vmr").and.isStrSubset(var0, "_")) then - varstring = var0 ; priority 1 - end if - altitude = "" ; priority 2 - if (isatt(diag_script_info, "altitude")) then - altitude = " - " + diag_script_info@altitude ; priority 1 - end if - if (isatt(diag_script_info, "level")) then ; FIX-ME - if (diag_script_info@level.ne."all") then - altitude = " - " + diag_script_info@level + " hPa" - end if + title = var_all@long_name + caption = var0 + if (isdefined("level")) then + title = title + " " + level + " hPa" + caption = caption + level + delete(level) end if - outfile = config_user_info@plot_dir + var0 + "_cycle_" + \ - diag_script_info@time_avg + "_" + region_name + title = title + " - " + diag_script_info@region - wks = gsn_open_wks(file_type, outfile) - wks@legendfile = outfile + "_legend" - var_all@res_tiMainString = varstring - var_all@res_tiMainString = var_all@res_tiMainString + location + altitude + ; Draw plot + wks = gsn_open_wks(file_type, plotpath) + wks@legendfile = plotpath + "_legend" + var_all@res_tiMainString = title plot = cycle_plot(wks, var_all, var0, info_items) draw(plot) frame(wks) - log_info(" gv " + outfile + "." + file_type) - - ; collect meta-data and call ESMValMD function - nc_file = "nonexistent" - plot_file = outfile+"."+file_type - caption = "Cycle plot of variable: " + var0 - statistics = (/"mean", "rmsd"/) - domain = "global" - plottype = "seas" - diag_des = "Calculates and (optionally) plots annual/seasonal cycles, " \ - + "zonal means, lat-lon fields and time-lat-lon fields from " \ - + "input 2D/3D monthly data. The calculated fields can be " \ - + "plotted as difference w.r.t. a given reference dataset. It " \ - + "also calculates grading and taylor metrics." - authors = (/"righ_ma", "fran_fr", "eyri_ve", "gott_kl", "senf_da"/) - references = (/"righi15gmd", "gleckler08jgr"/) - infiles = (/"infile1", "infile2"/) - ; ESMValMD(nc_file, plot_file, caption, statistics, domain, plottype, \ - ; diag_des, authors, references, infiles) + ; Call provenance logger + log_provenance(ncdf_outfile, \ + plotpath + "." + file_type, \ + "Cycle plot of variable " + caption, \ + (/"mean", "stddev"/), \ + diag_script_info@region, \ + "seas", \ + (/"winterstein_franziska", "righi_mattia", \ + "eyring_veronika"/), \ + (/"righi15gmd", "gleckler08jgr"/), \ + metadata_att_as_array(info_items, "filename")) end diff --git a/esmvaltool/diag_scripts/perfmetrics/cycle_latlon.ncl b/esmvaltool/diag_scripts/perfmetrics/cycle_latlon.ncl index 4a5d32ed79..e54176b34f 100644 --- a/esmvaltool/diag_scripts/perfmetrics/cycle_latlon.ncl +++ b/esmvaltool/diag_scripts/perfmetrics/cycle_latlon.ncl @@ -1,9 +1,11 @@ ; ############################################################################# ; PROCEDURE FOR THE CYCLE-LATLON PLOT OF THE PERFORMANCE METRICS -; Authors: Mattia Righi (DLR, Germany) and Franziska Frank (DLR, Germany) +; Authors: Mattia Righi (DLR, Germany) and Franziska Winterstein (DLR, Germany) ; ESMVal project ; ############################################################################# +load "$diag_scripts/shared/plot/style.ncl" + procedure perfmetrics_ptype_script() begin @@ -17,24 +19,21 @@ begin modidx := array_append_record(ref_ind, modidx(ind(modidx.ne.ref_ind)), 0) end if + annots = project_style(info_items, diag_script_info, "annots") + ; Loop over datasets - do ii = 0, dimsizes(modidx) - 1 + do ii_modidx = 0, dimsizes(modidx) - 1 - imod = modidx(ii) + imod = modidx(ii_modidx) log_debug("Processing " + datasetnames(imod)) - ; Set path for saving processed data ; FIX-ME add preproc_id - fullpath = config_user_info@work_dir + "/" - system("mkdir -p " + fullpath) - fname = basename(systemfunc("basename " + info_items[imod]@filename)) - fname = fname + "_" + basename(DIAG_SCRIPT) + "_cycle_latlon" - fname = fname + "_" + diag_script_info@time_avg - if (isatt(diag_script_info, "level")) then - fname = fname + "_" + diag_script_info@level ; FIX-ME - end if - fname = fname + "_" + str_sub_str(diag_script_info@region, " ", "") - procpath = fullpath + fname + ".nc" + ; Set path for saving processed data + system("mkdir -p " + config_user_info@work_dir) + fname = \ + str_join((/"perfmetrics", "cycle_latlon", annots(imod), var0, \ + diag_script_info@time_avg, diag_script_info@region/), "_") + workpath = config_user_info@work_dir + fname + ".nc" ; Determine start/end year start_year = info_items[imod]@start_year @@ -42,7 +41,15 @@ begin ; Read data var = read_data(info_items[imod]) - dnames = getVarDimNames(var) + + if (var0 .eq. "tos" .and. var@units .eq. "K") then + var = convert_units(var, "degC") + end if + + ; Convert data to floats if necessary + if (typeof(var) .eq. "double") then + var := dble2flt(var) + end if ; Extract region var_reg = area_operations(var, region(0), region(1), \ @@ -57,7 +64,7 @@ begin ; Standard deviation calculation for this ptype is not yet implemented in ; diag_scripts/shared/statistics.ncl, but is anyway not required here - ; Write output + ; Store in global array var_all = new(array_append_record(dimsizes(var_avg), 2, 0), float) var_all!3 = "statistic" var_all&statistic = (/"mean", "stddev"/) @@ -66,14 +73,28 @@ begin var_all@diag_script = (/DIAG_SCRIPT/) copy_VarAtts(diag_script_info, var_all) var_all@ref_model = variable_info[0]@reference_dataset - if (config_user_info@write_netcdf.eq."True") then - var_all@ncdf = procpath - ncdf_outfile = ncdf_write(var_all, procpath) - end if + delete(var_avg) - ; Calculate grading - if (.not.diag_script_info@calc_grading) then - continue + grading_models = remove_index(datasetnames, ref_inds) + + ; Store all models in one array + if(ii_modidx .eq. 0) then + var_all_mod = new(array_append_record(dimsizes(var_all), \ + dimsizes(grading_models), 0), float) + var_all_mod!4 = "models" + var_all_mod&models = remove_index(datasetnames, ref_inds) + copy_VarAtts(diag_script_info, var_all_mod) + end if + if (l_altern) then + if (imod .ne. ref_ind .and. imod .ne. alt_ind) then + igrad = ind(datasetnames(imod) .eq. grading_models) + var_all_mod(:, :, :, :, igrad) = var_all + end if + else + if (imod.ne.ref_ind) then + igrad = ind(datasetnames(imod) .eq. grading_models) + var_all_mod(:, :, :, :, igrad) = var_all + end if end if ; Store reference dataset @@ -88,10 +109,32 @@ begin end if end if + ; Write output + var_all@ncdf = workpath + ncdf = ncdf_write(var_all, workpath) + + ; Call provenance logger + log_provenance(ncdf, \ + "n/a", \ + "n/a", \ + (/"mean", "stddev"/), \ + diag_script_info@region, \ + "other", \ + (/"winterstein_franziska", "righi_mattia", \ + "eyring_veronika"/), \ + (/"righi15gmd", "gleckler08jgr"/), \ + info_items[imod]@filename) + + ; Calculate grading + if (.not.diag_script_info@calc_grading) then + continue + end if + ; Loop over requested metrics do met = 0, nmetrics - 1 if (diag_script_info@metric(met).eq."taylor") then + locidx = ind(datasetnames(imod).eq.taylor&models) if (ismissing(locidx)) then continue @@ -102,23 +145,101 @@ begin taylor(0, locidx, 1) = \ calculate_metric(var_all(:, :, :, 0), var_ref(:, :, :, 0), \ "correlation") - else - locidx = ind(datasetnames(imod).eq.grading&models) + + elseif (diag_script_info@metric(met).eq."SMPI") then + + locidx = ind(datasetnames(imod).eq.smpi&models) if (ismissing(locidx)) then continue end if - grading(met, 0, locidx, 0) = \ + smpi(:, locidx) = \ + (/calculate_metric(var_all(:, :, :, 0), var_ref(:, :, :, 0), \ + "SMPI")/) + + else + + if (.not.any(datasetnames(imod).eq.grading&models)) then + continue + end if + grading(met, 0, igrad, 0) = \ calculate_metric(var_all(:, :, :, 0), var_ref(:, :, :, 0), \ diag_script_info@metric(met)) if (l_altern) then - grading(met, 0, locidx, 1) = \ + grading(met, 0, igrad, 1) = \ calculate_metric(var_all(:, :, :, 0), var_alt(:, :, :, 0), \ diag_script_info@metric(met)) end if + end if end do end do + if (diag_script_info@calc_grading .and. \ + any(diag_script_info@metric.eq."RMSD") .and. \ + all(grading_models.ne."MultiModelMean") .and. \ + all(grading_models.ne."MultiModelMedian")) then + + ; ************************************* + ; calculate means over ensemble members + ; ************************************* + + imet = ind(diag_script_info@metric.eq."RMSD") + + datasetnames := grading@models + projectnames := remove_index(projectnames, ref_inds) + + ndim := dimsizes(datasetnames) + + ; ****************************************************** + ; calculate multi-model mean and median for each project + ; ****************************************************** + + projects = get_unique_values(projectnames) + n_proj = dimsizes(projects) + + do iproj = 0, n_proj - 1 + + log_debug("Processing mean of " + projects(iproj)) + + ind_proj := ind(projectnames .eq. projects(iproj)) + + tmp := dim_avg_n_Wrap(var_all_mod(:, :, :, :, ind_proj), 4) + mean_ref = calculate_metric(tmp(:, :, :, 0), var_ref(:, :, :, 0), \ + diag_script_info@metric(imet)) + if (l_altern) then + mean_alt = calculate_metric(tmp(:, :, :, 0), var_alt(:, :, :, 0), \ + diag_script_info@metric(imet)) + end if + tmp = dim_median_n(var_all_mod(:, :, :, :, ind_proj), 4) + median_ref = calculate_metric(tmp(:, :, :, 0), var_ref(:, :, :, 0), \ + diag_script_info@metric(imet)) + if (l_altern) then + median_alt = calculate_metric(tmp(:, :, :, 0), var_alt(:, :, :, 0), \ + diag_script_info@metric(imet)) + end if + + dims = dimsizes(grading) + tmp := new((/dims(0), dims(1), dims(2) + 2, dims(3)/), float) + copy_VarAtts(grading, tmp) + tmp(imet, 0, :dims(2)-1, :) = grading(imet, 0, :, :) + tmp(imet, 0, dims(2), 0) = (/mean_ref/) + tmp(imet, 0, dims(2)+1, 0) = (/median_ref/) + if (l_altern) then + tmp(imet, 0, dims(2), 1) = (/mean_alt/) + tmp(imet, 0, dims(2)+1, 1) = (/median_alt/) + end if + + delete(grading) + grading = tmp + grading&models(dims(2)) = projects(iproj)+"_mean" + grading&models(dims(2)+1) = projects(iproj)+"_median" + grading@projects = grading@projects + " " + projects(iproj) + \ + " " + projects(iproj) + delete(tmp) + end do + + end if + end diff --git a/esmvaltool/diag_scripts/perfmetrics/cycle_zonal.ncl b/esmvaltool/diag_scripts/perfmetrics/cycle_zonal.ncl new file mode 100644 index 0000000000..9b86ebf0d5 --- /dev/null +++ b/esmvaltool/diag_scripts/perfmetrics/cycle_zonal.ncl @@ -0,0 +1,152 @@ +; ############################################################################# +; PROCEDURE FOR THE CYCLE-ZONAL PLOT OF THE PERFORMANCE METRICS +; Author: Bettina Gier (University of Bremen, Germany) +; ESMVal project +; ############################################################################# + +load "$diag_scripts/shared/plot/style.ncl" + +procedure perfmetrics_ptype_script() +begin + + ; Set dataset indexes, with reference and alternative datasets first + modidx = ispan(0, nDatasets - 1, 1) + if (l_altern) then + modidx := array_append_record( \ + (/ref_ind, alt_ind/), \ + modidx(ind(modidx.ne.ref_ind .and. modidx.ne.alt_ind)), 0) + else + modidx := array_append_record(ref_ind, modidx(ind(modidx.ne.ref_ind)), 0) + end if + + if (isatt(diag_script_info, "annots")) then + annots = metadata_att_as_array(info_items, diag_script_info@annots) + else + annots = project_style(info_items, diag_script_info, "annots") + end if + + ; Loop over datasets + do ii = 0, dimsizes(modidx) - 1 + + imod = modidx(ii) + + log_debug("Processing " + datasetnames(imod)) + + ; Set path for saving processed data + system("mkdir -p " + config_user_info@work_dir) + fname = \ + str_join((/"perfmetrics", "cycle_zonal", annots(imod), var0, \ + diag_script_info@time_avg, diag_script_info@region/), "_") + workpath = config_user_info@work_dir + fname + ".nc" + + ; Determine start/end year + start_year = info_items[imod]@start_year + end_year = info_items[imod]@end_year + + ; Read data + var = read_data(info_items[imod]) + + ; Calculate zonal mean + var_reg = dim_avg_Wrap(var) + delete(var) + + ; Calculate time average + var_avg = time_operations(var_reg, start_year, end_year, "average", \ + diag_script_info@time_avg, True) + delete(var_reg) + + ; Standard deviation calculation for this ptype is not yet implemented in + ; diag_scripts/shared/statistics.ncl, but is anyway not required here + + ; Store in global array + var_all = new(array_append_record(dimsizes(var_avg), 2, 0), float) + var_all!3 = "statistic" + var_all&statistic = (/"mean", "stddev"/) + var_all(:, :, :, 0) = var_avg + var_all@var = var0 + var_all@diag_script = (/DIAG_SCRIPT/) + copy_VarAtts(diag_script_info, var_all) + var_all@ref_model = variable_info[0]@reference_dataset + delete(var_avg) + + ; Store reference dataset + if (imod.eq.ref_ind) then + var_ref = var_all + end if + + ; Store alternative dataset + if (l_altern) then + if (imod.eq.alt_ind) then + var_alt = var_all + end if + end if + + ; Write output + var_all@ncdf = workpath + ncdf = ncdf_write(var_all, workpath) + + ; Call provenance logger + log_provenance(ncdf, \ + "n/a", \ + "n/a", \ + (/"mean", "rmsd"/), \ + diag_script_info@region, \ + "other", \ + (/"winterstein_franziska", "righi_mattia", \ + "eyring_veronika"/), \ + (/"righi15gmd", "gleckler08jgr"/), \ + info_items[imod]@filename) + + ; Calculate grading + if (.not.diag_script_info@calc_grading) then + continue + end if + + ; Loop over requested metrics + do met = 0, nmetrics - 1 + + if (diag_script_info@metric(met).eq."taylor") then + + locidx = ind(datasetnames(imod).eq.taylor&models) + if (ismissing(locidx)) then + continue + end if + taylor(0, locidx, 0) = \ + calculate_metric(var_all(:, :, :, 0), var_ref(:, :, :, 0), \ + "stddev_ratio") + taylor(0, locidx, 1) = \ + calculate_metric(var_all(:, :, :, 0), var_ref(:, :, :, 0), \ + "correlation") + + elseif (diag_script_info@metric(met).eq."SMPI") then + + locidx = ind(datasetnames(imod).eq.smpi&models) + if (ismissing(locidx)) then + continue + end if + smpi(:, locidx) = \ + (/calculate_metric(var_all(:, :, :, 0), var_ref(:, :, :, 0), \ + "SMPI")/) + + else + + locidx = ind(datasetnames(imod).eq.grading&models) + if (ismissing(locidx)) then + continue + end if + grading(met, 0, locidx, 0) = \ + calculate_metric(var_all(:, :, :, 0), var_ref(:, :, :, 0), \ + diag_script_info@metric(met)) + if (l_altern) then + grading(met, 0, locidx, 1) = \ + calculate_metric(var_all(:, :, :, 0), var_alt(:, :, :, 0), \ + diag_script_info@metric(met)) + end if + + end if + + end do + + end do + +end diff --git a/esmvaltool/diag_scripts/perfmetrics/latlon.ncl b/esmvaltool/diag_scripts/perfmetrics/latlon.ncl index de9b0ea1e5..10bbaa242f 100644 --- a/esmvaltool/diag_scripts/perfmetrics/latlon.ncl +++ b/esmvaltool/diag_scripts/perfmetrics/latlon.ncl @@ -1,9 +1,10 @@ ; ############################################################################# ; PROCEDURE FOR THE LATLON PLOT OF THE PERFORMANCE METRICS -; Authors: Mattia Righi (DLR, Germany) and Franziska Frank (DLR, Germany) +; Authors: Mattia Righi (DLR, Germany) and Franziska Winterstein (DLR, Germany) ; ESMVal project ; ############################################################################# +load "$diag_scripts/shared/plot/style.ncl" load "$diag_scripts/shared/plot/contour_maps.ncl" procedure perfmetrics_ptype_script() @@ -13,6 +14,8 @@ begin modidx = ispan(0, nDatasets - 1, 1) modidx := array_append_record(ref_ind, modidx(ind(modidx.ne.ref_ind)), 0) + annots = project_style(info_items, diag_script_info, "annots") + ; Loop over datasets do ii = 0, dimsizes(modidx) - 1 @@ -20,13 +23,26 @@ begin log_debug("Processing " + datasetnames(imod)) + ; Check if difference is possible + if (diag_script_info@plot_diff .and. imod.ne.ref_ind) then + l_diff = True + else + l_diff = False + end if + ; Set path for saving processed data - fullpath = config_user_info@work_dir + "/" - system("mkdir -p " + fullpath) - fname = basename(systemfunc("basename " + info_items[imod]@filename)) - fname = fname + "_" + basename(DIAG_SCRIPT) + "_latlon" - fname = fname + "_" + diag_script_info@time_avg - procpath = fullpath + fname + "_Global.nc" + system("mkdir -p " + config_user_info@work_dir) + fname_abs = \ + str_join((/"perfmetrics", "latlon", annots(imod), var0, \ + diag_script_info@time_avg, diag_script_info@region/), "_") + workpath_abs = config_user_info@work_dir + fname_abs + ".nc" + plotpath_abs = config_user_info@plot_dir + fname_abs + fname_diff = \ + str_join((/"perfmetrics", "latlon", \ + annots(imod) + "-" + annots(ref_ind), var0, \ + diag_script_info@time_avg, diag_script_info@region/), "_") + workpath_diff = config_user_info@work_dir + fname_diff + ".nc" + plotpath_diff = config_user_info@plot_dir + fname_diff ; Determine start/end year start_year = info_items[imod]@start_year @@ -43,7 +59,7 @@ begin diag_script_info@time_avg, "None") delete(var) - ; Write output + ; Store in global array var_all = new(array_append_record(dimsizes(var_avg), 2, 0), float) var_all!2 = "statistic" var_all&statistic = (/"mean", "stddev"/) @@ -53,34 +69,42 @@ begin var_all@diag_script = (/DIAG_SCRIPT/) copy_VarAtts(diag_script_info, var_all) var_all@ref_model = variable_info[0]@reference_dataset - if (config_user_info@write_netcdf.eq."True") then - var_all@ncdf = procpath - ncdf_outfile = ncdf_write(var_all, procpath) - end if + delete(var_avg) + delete(var_std) ; Store reference dataset if (imod.eq.ref_ind) then var_ref = var_all end if - ; Plotting - if (.not.diag_script_info@draw_plots) then - continue + ; Calculate difference + if (l_diff) then + var_diff = var_all(:, :, 0) + var_diff = var_all(:, :, 0) - var_ref(:, :, 0) + end if + + ; Write output + var_all@ncdf = workpath_abs + ncdf_abs = ncdf_write(var_all, workpath_abs) + if (l_diff) then + var_diff@ncdf = workpath_diff + ncdf_diff = ncdf_write(var_diff, workpath_diff) end if ; Convert units for plotting (if required) if (isatt(diag_script_info, "plot_units")) then var_all = convert_units(var_all, diag_script_info@plot_units) + if (l_diff) + var_diff = convert_units(var_diff, diag_script_info@plot_units) + end if end if ; Absolute plot - outfile = config_user_info@plot_dir + datasetnames(imod) + "_" + var0 + \ - "_latlon_" + diag_script_info@time_avg + "_Glob" plot_var = var_all(:, :, 0) plot_var@projection = diag_script_info@projection plot_var@res_cnLinesOn = False plot_var@res_cnLevelSelectionMode = "ExplicitLevels" - plot_var@res_tiMainString = datasetnames(imod) + plot_var@res_tiMainString = annots(imod) plot_var@res_cnLevels = diag_script_info@abs_levs plot_var@res_gsnLeftString = \ plot_var@long_name + " [" + format_units(plot_var@units) + "]" @@ -89,44 +113,34 @@ begin plot_var@res_gsnRightString = \ sprintf("%5.2f", gavg) + " " + format_units(plot_var@units) end if - wks = gsn_open_wks(file_type, outfile) + wks = gsn_open_wks(file_type, plotpath_abs) gsn_define_colormap(wks, diag_script_info@latlon_cmap) plot = contour_map_ce(wks, plot_var, var0) draw(plot) frame(wks) - log_info(" gv " + outfile + "." + file_type) delete(plot_var@res_cnLevels) delete(wks) delete(plot) - ; collect meta-data and call ESMValMD function - nc_file = "nonexistent" - plot_file = outfile+"."+file_type - caption = "Geographical distribution of variable: " + var0 - statistics := (/"mean", "rmsd"/) - domain = "global" - plottype = "geo" - diag_des = "Calculates and (optionally) plots annual/seasonal cycles, " \ - + "zonal means, lat-lon fields and time-lat-lon fields from " \ - + "input 2D/3D monthly data. The calculated fields can be " \ - + "plotted as difference w.r.t. a given reference dataset. " \ - + "It also calculates grading and taylor metrics." - authors = (/"righ_ma", "fran_fr", "eyri_ve", "gott_kl", "senf_da"/) - references = (/"righi15gmd", "gleckler08jgr"/) - infiles := (/"infile1", "infile2"/) - ; ESMValMD(nc_file, plot_file, caption, statistics, domain, plottype, \ - ; diag_des, authors, references, infiles) + ; Call provenance logger + log_provenance(ncdf_abs, \ + plotpath_abs + "." + file_type, \ + "Geographical distribution of variable " + var0, \ + (/"mean", "rmsd"/), \ + diag_script_info@region, \ + "geo", \ + (/"winterstein_franziska", "righi_mattia", \ + "eyring_veronika"/), \ + (/"righi15gmd", "gleckler08jgr"/), \ + info_items[imod]@filename) ; Difference plot to the reference - if (.not.diag_script_info@plot_diff .or. imod.eq.ref_ind) then + if (.not.l_diff) then continue end if - plot_var = var_all(:, :, 0) - var_ref(:, :, 0) - outfile = config_user_info@plot_dir + datasetnames(imod) + "-" + \ - datasetnames(ref_ind) + "_" + var0 + "_latlon_" + \ - diag_script_info@time_avg + "_Glob" - plot_var@res_tiMainString = \ - datasetnames(imod) + " - " + datasetnames(ref_ind) + plot_var = var_diff + plot_var@projection = diag_script_info@projection + plot_var@res_tiMainString = annots(imod) + " - " + annots(ref_ind) if (isatt(diag_script_info, "t_test")) then plot_var@res_gsnLeftString = plot_var@res_gsnLeftString + " - " + \ sprinti("%2i", toint(100 * diag_script_info@conf_level)) + "% c.l." @@ -135,69 +149,73 @@ begin plot_var@res_cnLevels = diag_script_info@diff_levs plot_var@res_cnMissingValFillColor = "gray70" plot_var@res_gsnSpreadColorEnd = -2 - plot_var@projection = diag_script_info@projection if (diag_script_info@show_global_avg) then ; append global average gavg = area_operations(plot_var, -90., 90., 0., 360., "average", True) plot_var@res_gsnRightString = \ sprintf("%5.2f", gavg) + " " + format_units(plot_var@units) end if - wks = gsn_open_wks(file_type, outfile) + wks = gsn_open_wks(file_type, plotpath_diff) gsn_merge_colormaps(wks, "temp_19lev", "gray70") plot = contour_map_ce(wks, plot_var, var0) delete(plot_var@res_cnLevels) ; Apply t-test (assuming different population variances) - if (.not.diag_script_info@t_test) then - continue - end if - x1 = var_all(:, :, 0) - x2 = var_ref(:, :, 0) - s1 = var_all(:, :, 1) ^ 2 - s2 = var_ref(:, :, 1) ^ 2 - n1 = info_items[imod]@end_year - info_items[imod]@start_year + 1 - n2 = info_items[ref_ind]@end_year - info_items[ref_ind]@start_year + 1 - prob = ttest(x1, s1, n1, x2, s2, n2, True, False) - mask_var = 1. - prob - copy_VarCoords(plot_var, mask_var) - delete(prob) - - ; Mask non-significant values - res2 = True - res2@cnLevelSelectionMode = "ExplicitLevels" - res2@cnLevels = (/0.0, diag_script_info@conf_level, 1.0/) - res2@cnInfoLabelOn = False - res2@cnLinesOn = False - res2@cnLineLabelsOn = False - res2@gsnDraw = False - res2@gsnFrame = False - plot2 = gsn_csm_contour(wks, mask_var, res2) - opt = True - if (diag_script_info@stippling) then - opt@gsnShadeFillType = "pattern" - opt@gsnShadeLow = 17 - else - opt@gsnShadeFillType = "color" - opt@gsnShadeLow = "gray70" + caption = "Difference in geographical distribution of variable " + var0 + if (diag_script_info@t_test) then + x1 = var_all(:, :, 0) + x2 = var_ref(:, :, 0) + s1 = var_all(:, :, 1) ^ 2 + s2 = var_ref(:, :, 1) ^ 2 + n1 = info_items[imod]@end_year - info_items[imod]@start_year + 1 + n2 = info_items[ref_ind]@end_year - info_items[ref_ind]@start_year + 1 + prob = ttest(x1, s1, n1, x2, s2, n2, True, False) + mask_var = 1. - prob + copy_VarCoords(plot_var, mask_var) + delete(prob) + + ; Mask non-significant values + res2 = True + res2@cnLevelSelectionMode = "ExplicitLevels" + res2@cnLevels = (/0.0, diag_script_info@conf_level, 1.0/) + res2@cnInfoLabelOn = False + res2@cnLinesOn = False + res2@cnLineLabelsOn = False + res2@gsnDraw = False + res2@gsnFrame = False + plot2 = gsn_csm_contour(wks, mask_var, res2) + opt = True + if (diag_script_info@stippling) then + opt@gsnShadeFillType = "pattern" + opt@gsnShadeLow = 17 + caption = caption + " (non-significant points are stippled)" + else + opt@gsnShadeFillType = "color" + opt@gsnShadeLow = "gray70" + caption = caption + " (non-significant points are masked out in gray)" + end if + plot2 = gsn_contour_shade(plot2, diag_script_info@conf_level, 999., opt) + overlay(plot, plot2) + draw(plot) + frame(wks) + delete(wks) + delete(plot) + delete(plot2) + delete(mask_var) + delete(plot_var) + end if - plot2 = gsn_contour_shade(plot2, diag_script_info@conf_level, 999., opt) - overlay(plot, plot2) - draw(plot) - frame(wks) - log_info(" gv " + outfile + "." + file_type) - delete(wks) - delete(plot) - delete(plot2) - delete(mask_var) - delete(plot_var) - - ; collect meta-data and call ESMValMD function - nc_file = "nonexistent" - plot_file = outfile+"."+file_type - caption = "Difference in geographical distribution of variable: " + var0 - statistics := (/"mean", "rmsd", "diff"/) - infiles := (/"infile1", "infile2"/) - ; ESMValMD(nc_file, plot_file, caption, statistics, domain, plottype, \ - ; diag_des, authors, references, infiles) + + ; Call provenance logger + log_provenance(ncdf_diff, \ + plotpath_diff + "." + file_type, \ + caption, \ + (/"mean", "rmsd", "diff"/), \ + diag_script_info@region, \ + "zonal", \ + (/"winterstein_franziska", "righi_mattia", \ + "eyring_veronika"/), \ + (/"righi15gmd", "gleckler08jgr"/), \ + info_items[imod]@filename) end do diff --git a/esmvaltool/diag_scripts/perfmetrics/main.ncl b/esmvaltool/diag_scripts/perfmetrics/main.ncl index 1d7f2a6bf2..3789709b79 100644 --- a/esmvaltool/diag_scripts/perfmetrics/main.ncl +++ b/esmvaltool/diag_scripts/perfmetrics/main.ncl @@ -1,6 +1,6 @@ ; ############################################################################# ; MAIN SCRIPT FOR PERFORMANCE METRICS -; Authors: Mattia Righi (DLR, Germany) and Franziska Frank (DLR, Germany) +; Authors: Mattia Righi (DLR, Germany) and Franziska Winterstein (DLR, Germany) ; ESMVal project ; ############################################################################# ; @@ -29,7 +29,6 @@ ; (default: False) ; projection: map projection for lat-lon plots (default: ; CylindricalEquidistant) -; draw_plots: draw plots (default: True) ; plot_diff: draw difference plots (default: False) ; calc_grading: calculate grading (default: False) ; stippling: use stippling to mark stat. significant differences (default: @@ -44,6 +43,8 @@ ; zonal_ymin (for zonal): minimum pressure on the plots (default: 5. hPa) ; latlon_cmap (for latlon): color table (default: "amwg_blueyellowred") ; plot_units: plotting units (if different from standard CMOR units) +; add_tropopause: add an optional tropopause outline to the zonal plots +; res_*: any resource as applied to ncl gsn_csm_press_hgt plots ; ; Required variable_info attributes: ; reference_dataset: reference dataset to compare with (usualy observations) @@ -54,34 +55,44 @@ ; Caveats ; ; Modification history -; 20180503-A_righ_ma: completely rewritten and modularized -; 20171215-A_righ_ma: merged with perfmetrics_grading and -; permetrics_taylor.ncl -; 20171124-A_righ_ma: completely revised to adapt it to the new backend -; (level selection, regridding and masking now done -; by the python preprocessor) -; 20161220-A_laue_ax: added option to set map projection for lat-lon plots -; (diag_script_info@projection) -; added option to choose how to plot t-test results: -; stippling or masking out in gray (lat-lon plots only) -; 20161019-A_laue_ax: changed plotting of t-test results: -; now stippling significant grid cells (old version: -; masking out non-significant values in gray) -; 20160628-A_righ_ma: moving ref_model specification from cfg- files to +; 20221209-winterstein_franziska: added optional tropopause and plotting +; 20220609-bock_lisa: added calculation of multi-model mean and median for +; each project +; 20211014-bock_lisa: added sorting by project +; 20200506-gier_bettina: implemented support for multiple occurence of +; models with different experiments and ensembles +; 20190405-righi_mattia: added provenance logging +; 20190315-hassler_birgit: extended to smpi metric +; 20180503-righi_mattia: completely rewritten and modularized +; 20171215-righi_mattia: merged with perfmetrics_grading and +; permetrics_taylor.ncl +; 20171124-righi_mattia: completely revised to adapt it to the new backend +; (level selection, regridding and masking now done +; by the python preprocessor) +; 20161220-lauer_axel: added option to set map projection for lat-lon plots +; (diag_script_info@projection) +; added option to choose how to plot t-test results: +; stippling or masking out in gray (lat-lon plots only) +; 20161019-lauer_axel: changed plotting of t-test results: +; now stippling significant grid cells (old version: +; masking out non-significant values in gray) +; 20160628-righi_mattia: moving ref_model specification from cfg- files to ; recipe file -; 20160628-A_senf_da: added regridding for irregular grids (ESMF_regrid). -; 20151027-A_laue_ax: moved call to 'write_references' to the beginning -; of the code. -; 20151013-A_righ_ma: fixed t-test mask in lat-lon difference plots. -; 20150325-A_laue_ax: modified reference tags used for acknowledgements -; (projects, observations, etc.). -; 20150119-A-gott_kl: removed "grid", "region" from req_atts -; (for T2Ms vmrco). -; 20150113-A_gott_kl: reconciled generalised regridding with T1* & T0* -; 20140905-A_righ_ma: consistent regridding and missing values mask. -; 20140701-A_gott_kl: Adapted for T1M. -; 20140630-A_gott_kl: Adapted for T0Ms. -; 20131203-A_fran_fr: written. +; 20160628-senftleben_daniel: added regridding for irregular grids +; (ESMF_regrid). +; 20151027-lauer_axel: moved call to 'write_references' to the beginning +; of the code. +; 20151013-righi_mattia: fixed t-test mask in lat-lon difference plots. +; 20150325-lauer_axel: modified reference tags used for acknowledgements +; (projects, observations, etc.). +; 20150119-gottschaldt_klaus-dirk: removed "grid", "region" from req_atts +; (for T2Ms vmrco). +; 20150113-gottschaldt_klaus-dirk: reconciled generalised regridding with +; T1* & T0* +; 20140905-righi_mattia: consistent regridding and missing values mask. +; 20140701-gottschaldt_klaus-dirk: Adapted for T1M. +; 20140630-gottschaldt_klaus-dirk: Adapted for T0Ms. +; 20131203-winterstein_franziska: written. ; ; ############################################################################# @@ -99,11 +110,23 @@ begin enter_msg(DIAG_SCRIPT, "") + vars = metadata_att_as_array(variable_info, "short_name") + ; Get variables and datasets var0 = variable_info[0]@short_name info_items = select_metadata_by_name(input_file_info, var0) nDatasets = ListCount(info_items) + if (dimsizes(vars) .gt. 1) then + var1 = variable_info[1]@short_name + info_items_1 = select_metadata_by_name(input_file_info, var1) + cnt = ListCount(info_items_1) + do ii = 0, cnt - 1 + ListAppend(info_items, info_items_1[ii]) + end do + nDatasets = ListCount(info_items) + end if + ; Check required diag_script_info attributes exit_if_missing_atts(diag_script_info, (/"plot_type", "time_avg", "region"/)) @@ -113,7 +136,8 @@ begin ; Store required attributes ptype = diag_script_info@plot_type - if (all(ptype.ne.(/"cycle", "zonal", "latlon", "cycle_latlon"/))) then + if (all(ptype.ne. \ + (/"cycle", "zonal", "latlon", "cycle_latlon", "cycle_zonal"/))) then error_msg("f", DIAG_SCRIPT, "", "plot_type " + ptype + " is not a " + \ "supported plot_type in this diagnostic") end if @@ -125,14 +149,13 @@ begin end if if ((ptype.eq."zonal" .or. ptype.eq."latlon") .and. \ - diag_script_info@region.ne."Global") then + diag_script_info@region.ne."global") then error_msg("f", DIAG_SCRIPT, "", "plot_type " + ptype + \ - " implemented only for region='Global'") + " implemented only for region 'global'") end if ; Set default values for non-required diag_script_info attributes set_default_att(diag_script_info, "projection", "CylindricalEquidistant") - set_default_att(diag_script_info, "draw_plots", True) set_default_att(diag_script_info, "plot_diff", False) set_default_att(diag_script_info, "calc_grading", False) set_default_att(diag_script_info, "stippling", False) @@ -141,6 +164,7 @@ begin set_default_att(diag_script_info, "zonal_ymin", 5.) set_default_att(diag_script_info, "zonal_cmap", "amwg_blueyellowred") set_default_att(diag_script_info, "latlon_cmap", "amwg_blueyellowred") + set_default_att(diag_script_info, "add_tropopause", False) ; Check consistency of diff plots settings if (diag_script_info@t_test .and. .not.diag_script_info@plot_diff) then @@ -164,6 +188,53 @@ begin ; Set dataset names datasetnames = metadata_att_as_array(info_items, "dataset") + ; Save projects + projectnames = metadata_att_as_array(info_items, "project") + + ; Extend model name by ensemble/experiment on multiple occurence + if (.not. (dimsizes(datasetnames) .eq. \ + count_unique_values(datasetnames))) then + new_datasetnames = datasetnames + experiments = metadata_att_as_array(info_items, "exp") + ensembles = metadata_att_as_array(info_items, "ensemble") + + do imod = 0, dimsizes(datasetnames) - 1 + do jmod = 0, dimsizes(datasetnames) - 1 + if imod.eq.jmod then + continue + else + if (datasetnames(imod) .eq. datasetnames(jmod)) then + if (experiments(imod) .ne. experiments(jmod)) then + new_datasetnames(imod) = new_datasetnames(imod) + " " + \ + experiments(imod) + break + end if + end if + end if + end do + end do + do imod = 0, dimsizes(datasetnames) - 1 + do jmod = 0, dimsizes(datasetnames) - 1 + if imod.eq.jmod then + continue + else + if (datasetnames(imod) .eq. datasetnames(jmod)) then + if (ensembles(imod) .ne. ensembles(jmod)) then + new_datasetnames(imod) = new_datasetnames(imod) + " " + \ + ensembles(imod) + break + end if + end if + end if + end do + end do + datasetnames = new_datasetnames + delete(new_datasetnames) + end if + + ; Save list of preproc files for provenance in collect.ncl + preproc_files = metadata_att_as_array(info_items, "filename") + ; Check for reference dataset definition if (variable_info[0]@reference_dataset.eq."None") then error_msg("f", DIAG_SCRIPT, "", "no reference dataset is specified") @@ -182,10 +253,8 @@ begin end if ; Create output plot directory - if (diag_script_info@draw_plots) then - plot_dir = config_user_info@plot_dir - system("mkdir -p " + plot_dir) - end if + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) ; Plot file type file_type = config_user_info@output_file_type @@ -196,33 +265,6 @@ begin ; Grading settings if (diag_script_info@calc_grading) then - ; Define variable name - region_name = "" ; priority 3 - location = "" ; priority 3 - if (isatt(diag_script_info, "aux_info")) then - region_name = "_" + diag_script_info@aux_info ; priority 2 - location = " - " + diag_script_info@aux_info ; priority 2 - end if - if (isatt(diag_script_info, "location")) then - location = " - " + diag_script_info@location ; priority 1 - end if - if (isatt(diag_script_info, "region")) then - region_name = "_" + region@name ; priority 1 - location = " - " + diag_script_info@region - end if - altitude = "" ; priority 2 - if (isatt(diag_script_info, "altitude")) then - altitude = " - " + diag_script_info@altitude ; priority 1 - end if - if (isatt(diag_script_info, "level")) then - altitude = " - " + diag_script_info@level + " hPa" ; priority 1 - end if - var0_grade = var0 - var0_grade = var0_grade + region_name - if (isatt(diag_script_info, "level")) then - var0_grade = var0_grade + "-" + diag_script_info@level ; FIX-ME - end if - ; Define grading arrays nmetrics = dimsizes(diag_script_info@metric) ncdf_dir = new(nmetrics, string) @@ -233,11 +275,13 @@ begin grading!1 = "diagnostics" ; dummy coord. to facilitate appending grading!2 = "models" grading!3 = "reference" - grading&diagnostics = var0_grade + grading&diagnostics = \ + variable_info[0]@diagnostic + "-" + diag_script_info@region grading&models = remove_index(datasetnames, ref_inds) if (isdim(grading, "reference")) then grading&reference = datasetnames(ref_inds) end if + grading@projects = str_join(remove_index(projectnames, ref_inds), " ") ; Special case Taylor if (any(diag_script_info@metric.eq."taylor")) then @@ -246,15 +290,27 @@ begin taylor!0 = "diagnostics" ; dummy coord. to facilitate appending taylor!1 = "models" taylor!2 = "statistic" - taylor&diagnostics = var0_grade + taylor&diagnostics = \ + variable_info[0]@diagnostic + "-" + diag_script_info@region taylor&statistic = (/"stddev_ratio", "correlation"/) taylor&models = remove_index(datasetnames, ref_ind) end if + ; Special case SMPI + if (any(diag_script_info@metric.eq."SMPI")) then + nModels = dimsizes(datasetnames) - 1 ; always 1 reference model + smpi = new((/diag_script_info@smpi_n_bootstrap + 1, nModels/), float) + smpi!0 = "bootstrap_member" + smpi!1 = "models" + smpi&bootstrap_member = ispan(0, diag_script_info@smpi_n_bootstrap, 1) + smpi&models = remove_index(datasetnames, ref_ind) + end if + ; Define grading filename do met = 0, nmetrics - 1 - ncdf_dir(met) = config_user_info@work_dir + "/" + \ - diag_script_info@metric(met) + "_" + var0_grade + ".nc" + ncdf_dir(met) = config_user_info@work_dir + \ + diag_script_info@metric(met) + "_" + \ + variable_info[0]@diagnostic + "-" + diag_script_info@region + ".nc" end do end if @@ -274,67 +330,152 @@ begin do met = 0, nmetrics - 1 - if (diag_script_info@metric(met).eq."taylor") then - metric = taylor - else + if (diag_script_info@metric(met).eq."RMSD") then + metric = grading(met, :, :, :) - end if + metric@title = diag_script_info@metric(met) + " metric" + metric@long_name = \ + "Grading table of metric " + diag_script_info@metric(met) + metric@var = "grade" - ; Apply normalization - if (diag_script_info@metric(met).ne."taylor") then + ; Normalization do iobs = 0, nobs - 1 metric(:, :, iobs) = \ normalize_metric(metric(:, :, iobs), \ diag_script_info@normalization(met)) end do - end if - ; Reduce dimensionality if no alternative dataset - if (.not.l_altern .and. diag_script_info@metric(met).ne."taylor") then - metric := metric(:, :, 0) - delete(metric@reference) + ; Reduce dimensionality if no alternative dataset + if (.not.l_altern) then + metric := metric(:, :, 0) + delete(metric@reference) + end if + + ; Provenance information + statistics = (/"rmsd"/) + authors = (/"winterstein_franziska", "righi_mattia", \ + "eyring_veronika"/) + references = (/"righi15gmd", "gleckler08jgr"/) + + elseif (diag_script_info@metric(met).eq."BIAS") then + + metric = grading(met, :, :, :) + metric@title = diag_script_info@metric(met) + " metric" + metric@long_name = \ + "Grading table of metric " + diag_script_info@metric(met) + metric@var = "grade" + + ; Normalization + do iobs = 0, nobs - 1 + metric(:, :, iobs) = \ + normalize_metric(metric(:, :, iobs), \ + diag_script_info@normalization(met)) + end do + + ; Reduce dimensionality if no alternative dataset + if (.not.l_altern) then + metric := metric(:, :, 0) + delete(metric@reference) + end if + + ; Provenance information + statistics = (/"diff"/) + authors = (/"winterstein_franziska", "righi_mattia", \ + "eyring_veronika"/) + references = (/"righi15gmd", "gleckler08jgr"/) + + elseif (diag_script_info@metric(met).eq."taylor") then + + metric = taylor + metric@title = diag_script_info@metric(met) + " metric" + metric@long_name = \ + "Grading table of metric " + diag_script_info@metric(met) + metric@var = "grade" + + ; Provenance information + statistics = (/"rmsd", "corr"/) + authors = (/"winterstein_franziska", "righi_mattia", \ + "eyring_veronika"/) + references = (/"righi15gmd", "gleckler08jgr"/) + + elseif (diag_script_info@metric(met).eq."SMPI") then + + metric = smpi + metric@title = "metrics" + metric@long_name = "1 variable's Performance Index for " + \ + "the Single Model Performance Index" + metric@var = "performance_index" + metric@invar = var0 + metric@ensemble_name = diag_script_info@normalization(met) + + ; Normalization + ens_idx = new(dimsizes(metric&models), integer) + atts = True + atts@project = diag_script_info@normalization(met) + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + do ii = 0, dimsizes(ens_idx) - 1 + if (dimsizes(info).ne.0) then + ens_idx(ii) = ii + end if + end do + if (all(ismissing(ens_idx))) then + error_msg("f", DIAG_SCRIPT, "", "No datasets for the selected " + \ + "normalization (" + diag_script_info@normalization(met) + \ + ") found") + end if + ens_idx := ens_idx(ind(.not.ismissing(ens_idx))) + do iboot = 0, dimsizes(metric&bootstrap_member)-1 + metric(iboot, :) = metric(iboot, :) / avg(metric(iboot, ens_idx)) + end do + + ; Provenance information + statistics = "smpi" + authors = (/"gier_bettina", "hassler_birgit"/) + references = (/"rk2008bams"/) + + else + error_msg("f", DIAG_SCRIPT, "", "unrecognized metric " + \ + diag_script_info@metric(met)) end if - ; Attach attributes to the results - metric@title = diag_script_info@metric(met) + " metric" - metric@long_name = \ - "Grading table of metric " + diag_script_info@metric(met) + ; Common attributes metric@metric = diag_script_info@metric(met) metric@diag_script = (/DIAG_SCRIPT/) - metric@var = "grade" - metric@region = location - metric@num_climofiles = nDatasets - do imod = 0, nDatasets - 1 - num_climo = "climofile_" + imod - metric@$num_climo$ = info_items[imod]@filename + metric@region = region@name + metric@num_preproc_files = dimsizes(preproc_files) + do imod = 0, metric@num_preproc_files - 1 + num_preproc = "preproc_file_" + imod + metric@$num_preproc$ = preproc_files(imod) end do metric@ncdf_dir = ncdf_dir(met) - - ; Write NetCDF output ncdf_outfile = ncdf_write(metric, metric@ncdf_dir) + ; Call provenance logger + log_provenance(ncdf_outfile, "n/a", "n/a", statistics, \ + diag_script_info@region, "other", authors, references, \ + preproc_files) + delete([/statistics, authors, references/]) + ; Write results of temporary grading list temp_dir = config_user_info@work_dir + "/" + \ diag_script_info@metric(met) + ".nc" if (fileexists(temp_dir)) then - temp_file = addfile(temp_dir, "r") - temp_list = temp_file->temp_list - temp_list := tostring(temp_list) - temp_list := array_append_record(temp_list, ncdf_dir(met), 0) - temp_list := tochar(temp_list) system("rm -f " + temp_dir) - else - ncdf_char = tochar(ncdf_dir(met)) - temp_list = new((/1, dimsizes(ncdf_char)/), character) - temp_list(0, :) = ncdf_char end if + ncdf_char = tochar(ncdf_dir(met)) + temp_list = new((/1, dimsizes(ncdf_char)/), character) + temp_list(0, :) = ncdf_char + delete(ncdf_char) + ; Create new file and add list temp = addfile(temp_dir, "c") temp->temp_list = temp_list - delete([/metric, temp_dir, temp_list, ncdf_char/]) + delete([/metric, temp_dir, temp_list/]) end do diff --git a/esmvaltool/diag_scripts/perfmetrics/zonal.ncl b/esmvaltool/diag_scripts/perfmetrics/zonal.ncl index 66beb31a14..2f8b96f34c 100644 --- a/esmvaltool/diag_scripts/perfmetrics/zonal.ncl +++ b/esmvaltool/diag_scripts/perfmetrics/zonal.ncl @@ -1,9 +1,10 @@ ; ############################################################################# ; PROCEDURE FOR THE ZONAL PLOT OF THE PERFORMANCE METRICS -; Authors: Mattia Righi (DLR, Germany) and Franziska Frank (DLR, Germany) +; Authors: Mattia Righi (DLR, Germany) and Franziska Winterstein (DLR, Germany) ; ESMVal project ; ############################################################################# +load "$diag_scripts/shared/plot/style.ncl" load "$diag_scripts/shared/plot/zonalmean_profile.ncl" procedure perfmetrics_ptype_script() @@ -13,19 +14,41 @@ begin modidx = ispan(0, nDatasets - 1, 1) modidx := array_append_record(ref_ind, modidx(ind(modidx.ne.ref_ind)), 0) + if (isatt(diag_script_info, "annots")) then + annots = metadata_att_as_array(info_items, diag_script_info@annots) + else + annots = project_style(info_items, diag_script_info, "annots") + end if + + print(annots) + + ; Loop over datasets do ii = 0, dimsizes(modidx) - 1 imod = modidx(ii) log_debug("Processing " + datasetnames(imod)) + ; Check if difference is possible + if (diag_script_info@plot_diff .and. imod.ne.ref_ind) then + l_diff = True + else + l_diff = False + end if + ; Set path for saving processed data - fullpath = config_user_info@work_dir + "/" - system("mkdir -p " + fullpath) - fname = basename(systemfunc("basename " + info_items[imod]@filename)) - fname = fname + "_" + basename(DIAG_SCRIPT) + "_zonal" - fname = fname + "_" + diag_script_info@time_avg - procpath = fullpath + fname + "_Global.nc" + system("mkdir -p " + config_user_info@work_dir) + fname_abs = \ + str_join((/"perfmetrics", "zonal", annots(imod), var0, \ + diag_script_info@time_avg, diag_script_info@region/), "_") + workpath_abs = config_user_info@work_dir + fname_abs + ".nc" + plotpath_abs = config_user_info@plot_dir + fname_abs + fname_diff = \ + str_join((/"perfmetrics", "zonal", \ + annots(imod) + "-" + annots(ref_ind), var0, \ + diag_script_info@time_avg, diag_script_info@region/), "_") + workpath_diff = config_user_info@work_dir + fname_diff + ".nc" + plotpath_diff = config_user_info@plot_dir + fname_diff ; Determine start/end year start_year = info_items[imod]@start_year @@ -33,7 +56,6 @@ begin ; Read data var = read_data(info_items[imod]) - dnames = getVarDimNames(var) ; Calculate zonal mean var_zon = dim_avg_Wrap(var) @@ -47,7 +69,7 @@ begin diag_script_info@time_avg, "None") delete(var_zon) - ; Write output + ; Store in global array var_all = new(array_append_record(dimsizes(var_avg), 2, 0), float) var_all!2 = "statistic" var_all&statistic = (/"mean", "stddev"/) @@ -57,74 +79,72 @@ begin var_all@diag_script = (/DIAG_SCRIPT/) copy_VarAtts(diag_script_info, var_all) var_all@ref_model = variable_info[0]@reference_dataset - if (config_user_info@write_netcdf.eq."True") then - var_all@ncdf = procpath - ncdf_outfile = ncdf_write(var_all, procpath) - end if + delete(var_avg) + delete(var_std) ; Store reference dataset if (imod.eq.ref_ind) then var_ref = var_all end if - ; Plotting - if (.not.diag_script_info@draw_plots) then - continue + ; Calculate difference + if (l_diff) then + var_diff = var_all(:, :, 0) + var_diff = var_all(:, :, 0) - var_ref(:, :, 0) + end if + + ; Write output + var_all@ncdf = workpath_abs + ncdf_abs = ncdf_write(var_all, workpath_abs) + if (l_diff) then + var_diff@ncdf = workpath_diff + ncdf_diff = ncdf_write(var_diff, workpath_diff) end if ; Convert units for plotting (if required) if (isatt(diag_script_info, "plot_units")) then var_all = convert_units(var_all, diag_script_info@plot_units) + if (l_diff) + var_diff = convert_units(var_diff, diag_script_info@plot_units) + end if end if ; Absolute plot - outfile = config_user_info@plot_dir + datasetnames(imod) + "_" + var0 + \ - "_zonal_" + diag_script_info@time_avg + "_Glob" plot_var = var_all(:, :, 0) - plot_var@res_tiMainString = datasetnames(imod) + plot_var@res_tiMainString = annots(imod) plot_var@res_cnLevelSelectionMode = "ExplicitLevels" plot_var@res_cnLevels = diag_script_info@abs_levs plot_var@res_trYMinF = diag_script_info@zonal_ymin plot_var@res_gsnLeftString = \ plot_var@long_name + " [" + format_units(plot_var@units) + "]" - wks = gsn_open_wks(file_type, outfile) + copy_VarMeta(diag_script_info, plot_var) + wks = gsn_open_wks(file_type, plotpath_abs) gsn_define_colormap(wks, diag_script_info@zonal_cmap) plot = zonalmean_profile(wks, plot_var, var0) draw(plot) frame(wks) - log_info(" gv " + outfile + "." + file_type) delete(plot_var@res_cnLevels) delete(wks) delete(plot) - ; collect meta-data and call ESMValMD function - nc_file = "nonexistent" - plot_file = outfile+"."+file_type - caption = "Zonal plot of variable: " + var0 - statistics := (/"mean", "rmsd"/) - domain = "global" - plottype = "zonal" - diag_des = "Calculates and (optionally) plots annual/seasonal cycles, " \ - + "zonal means, lat-lon fields and time-lat-lon fields from " \ - + "input 2D/3D monthly data. The calculated fields can be " \ - + "plotted as difference w.r.t. a given reference dataset. " \ - + "It also calculates grading and taylor metrics." - authors = (/"righ_ma", "fran_fr", "eyri_ve", "gott_kl", "senf_da"/) - references = (/"righi15gmd", "gleckler08jgr"/) - infiles := (/"infile1", "infile2"/) - ; ESMValMD(nc_file, plot_file, caption, statistics, domain, plottype, \ - ; diag_des, authors, references, infiles) + ; Call provenance logger + log_provenance(ncdf_abs, \ + plotpath_abs + "." + file_type, \ + "Zonal mean of variable " + var0, \ + (/"mean", "stddev"/), \ + diag_script_info@region, \ + "zonal", \ + (/"winterstein_franziska", "righi_mattia", \ + "eyring_veronika"/), \ + (/"righi15gmd", "gleckler08jgr"/), \ + info_items[imod]@filename) ; Difference plot to the reference - if (.not.diag_script_info@plot_diff .or. imod.eq.ref_ind) then + if (.not.l_diff) then continue end if - plot_var = var_all(:, :, 0) - var_ref(:, :, 0) - outfile = config_user_info@plot_dir + datasetnames(imod) + "-" + \ - datasetnames(ref_ind) + "_" + var0 + "_zonal_" + \ - diag_script_info@time_avg + "_Glob" - plot_var@res_tiMainString = \ - datasetnames(imod) + " - " + datasetnames(ref_ind) + plot_var = var_diff + plot_var@res_tiMainString = annots(imod) + " - " + annots(ref_ind) if (isatt(diag_script_info, "t_test")) then plot_var@res_gsnLeftString = plot_var@res_gsnLeftString + " - " + \ sprinti("%2i", toint(100 * diag_script_info@conf_level)) + "% c.l." @@ -133,61 +153,67 @@ begin plot_var@res_cnLevels = diag_script_info@diff_levs plot_var@res_cnMissingValFillColor = "gray70" plot_var@res_gsnSpreadColorEnd = -2 - wks = gsn_open_wks(file_type, outfile) + wks = gsn_open_wks(file_type, plotpath_diff) gsn_merge_colormaps(wks, "temp_19lev", "gray70") plot = zonalmean_profile(wks, plot_var, var0) delete(plot_var@res_cnLevels) ; Apply t-test (assuming different population variances) - if (.not.diag_script_info@t_test) then - continue - end if - x1 = var_all(:, :, 0) - x2 = var_ref(:, :, 0) - s1 = var_all(:, :, 1) ^ 2 - s2 = var_ref(:, :, 1) ^ 2 - n1 = info_items[imod]@end_year - info_items[imod]@start_year + 1 - n2 = info_items[ref_ind]@end_year - info_items[ref_ind]@start_year + 1 - prob = ttest(x1, s1, n1, x2, s2, n2, True, False) - plot_var = 1. - prob - delete(prob) - - ; Mask non-significant values - plot_var@res_cnMissingValFillColor = -1 - plot_var@res_gsnDraw = False - plot_var@res_gsnFrame = False - plot_var@res_cnLevelSelectionMode = "ExplicitLevels" - plot_var@res_cnLevels := diag_script_info@conf_level - if (diag_script_info@stippling) then - plot_var@res_cnFillColors = (/"transparent", "black"/) - plot_var@res_cnFillPattern = 17 - else - plot_var@res_cnFillColors = (/"transparent", "gray70"/) + caption = "Difference in zonal mean of variable " + var0 + if (diag_script_info@t_test) then + + x1 = var_all(:, :, 0) + x2 = var_ref(:, :, 0) + s1 = var_all(:, :, 1) ^ 2 + s2 = var_ref(:, :, 1) ^ 2 + n1 = info_items[imod]@end_year - info_items[imod]@start_year + 1 + n2 = info_items[ref_ind]@end_year - info_items[ref_ind]@start_year + 1 + prob = ttest(x1, s1, n1, x2, s2, n2, True, False) + plot_var = 1. - prob + delete(prob) + + ; Mask non-significant values + plot_var@res_cnMissingValFillColor = -1 + plot_var@res_gsnDraw = False + plot_var@res_gsnFrame = False + plot_var@res_cnLevelSelectionMode = "ExplicitLevels" + plot_var@res_cnLevels := diag_script_info@conf_level + if (diag_script_info@stippling) then + plot_var@res_cnFillColors = (/"black", "transparent"/) + plot_var@res_cnFillPattern = 17 + caption = caption + " (non-significant points are stippled)" + else + plot_var@res_cnFillColors = (/"gray70", "transparent"/) + caption = caption + " (non-significant points are masked out in gray)" + end if + plot_var@res_cnInfoLabelOn = False + plot_var@res_cnLinesOn = False + plot_var@res_cnLineLabelsOn = False + plot_var@res_lbLabelBarOn = False + plot_var@res_gsnLeftString = "" + delete(plot_var@long_name) + plot2 = zonalmean_profile(wks, plot_var, var0) + overlay(plot, plot2) + draw(plot) + frame(wks) + delete(wks) + delete(plot) + delete(plot2) + delete(plot_var) + end if - plot_var@res_cnInfoLabelOn = False - plot_var@res_cnLinesOn = False - plot_var@res_cnLineLabelsOn = False - plot_var@res_lbLabelBarOn = False - plot_var@res_gsnLeftString = "" - delete(plot_var@long_name) - plot2 = zonalmean_profile(wks, plot_var, var0) - overlay(plot, plot2) - draw(plot) - frame(wks) - log_info(" gv " + outfile + "." + file_type) - delete(wks) - delete(plot) - delete(plot2) - delete(plot_var) - - ; collect meta-data and call ESMValMD function - nc_file = "nonexistent" - plot_file = outfile+"."+file_type - caption = "Difference in zonal plot of variable: " + var0 - statistics := (/"mean", "rmsd", "diff"/) - infiles := (/"infile1", "infile2"/) - ; ESMValMD(nc_file, plot_file, caption, statistics, domain, plottype, \ - ; diag_des, authors, references, infiles) + + ; Call provenance logger + log_provenance(ncdf_diff, \ + plotpath_diff + "." + file_type, \ + caption, \ + (/"mean", "stddev", "diff"/), \ + diag_script_info@region, \ + "zonal", \ + (/"winterstein_franziska", "righi_mattia", \ + "eyring_veronika"/), \ + (/"righi15gmd", "gleckler08jgr"/), \ + info_items[imod]@filename) end do diff --git a/esmvaltool/diag_scripts/primavera/eady_growth_rate/eady_growth_rate.py b/esmvaltool/diag_scripts/primavera/eady_growth_rate/eady_growth_rate.py new file mode 100644 index 0000000000..aa2a516c76 --- /dev/null +++ b/esmvaltool/diag_scripts/primavera/eady_growth_rate/eady_growth_rate.py @@ -0,0 +1,321 @@ +"""Diagnostic for PRIMAVERA Eady Growth Rate.""" +import logging +import os +import sys + +import cartopy.crs as ccrs +import iris +import iris.analysis +import iris.cube +import iris.quickplot as qplt +import iris.util +import matplotlib.pyplot as plt +import numpy as np +from dask import array as da +from esmvalcore.preprocessor import ( + annual_statistics, + extract_levels, + regrid, + seasonal_statistics, +) + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + group_metadata, + names, + run_diagnostic, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +class EadyGrowthRate: + """Class used to compute the Eady Growth Rate.""" + + def __init__(self, config): + """ + Set diagnostic parameters and constants. + + Parameters + ---------- + config : dict + Dictionary containing configuration settings. + """ + self.cfg = config + self.fill_value = 1e20 + """Fill Value.""" + self.ref_p = 1000.0 + """Reference Pressure [Pa].""" + self.gravity = 9.80665 + """Gravity [m/s].""" + self.con = 0.3098 + """Constant.""" + self.omega = 7.292e-5 + """Rotation of the Earth [rad/s].""" + self.time_statistic = self.cfg['time_statistic'] + """Time statistic to perform.""" + + def compute(self): + """Compute Eady Growth Rate and either it's annual or seasonal mean.""" + data = group_metadata(self.cfg['input_data'].values(), 'alias') + for alias in data: + var = group_metadata(data[alias], 'short_name') + temperature = iris.load_cube(var['ta'][0]['filename']) + plev = temperature.coord('air_pressure') + + theta = self.potential_temperature(temperature, plev) + + del temperature + + geopotential = iris.load_cube(var['zg'][0]['filename']) + + brunt = self.brunt_vaisala_frq(theta, geopotential) + + lats = geopotential.coord('latitude') + fcor = self.coriolis(lats, geopotential.shape) + + eastward_wind = iris.load_cube(var['ua'][0]['filename']) + if eastward_wind.shape is not geopotential.shape: + eastward_wind = regrid(eastward_wind, + geopotential, + scheme='linear') + + egr = self.eady_growth_rate(fcor, eastward_wind, geopotential, + brunt) + + cube_egr = eastward_wind.copy(egr * 86400) + + cube_egr.standard_name = None + cube_egr.long_name = 'eady_growth_rate' + cube_egr.var_name = 'egr' + cube_egr.units = 'day-1' + + if self.time_statistic == 'annual_mean': + cube_egr = annual_statistics(cube_egr) + cube_egr = cube_egr.collapsed('time', iris.analysis.MEAN) + elif self.time_statistic == 'seasonal_mean': + cube_egr = seasonal_statistics(cube_egr) + cube_egr = cube_egr.collapsed('time', iris.analysis.MEAN) + self.seasonal_plots(cube_egr, alias) + else: + logger.info( + "Parameter time_statistic is not well set in the recipe." + "Must be 'annual_mean' or 'seasonal_mean'") + sys.exit() + + self.save(cube_egr, alias, data) + + def potential_temperature(self, temperature, plev): + """Compute potential temperature. + + Parameters + ---------- + temperature: iris.cube.Cube + Cube of air temperature ta. + plev: iris.coords.Coord + Pressure level coordinates + + Returns + ------- + theta: iris.cube.Cube + Cube of potential temperature theta. + """ + reference_pressure = iris.coords.AuxCoord( + self.ref_p, long_name='reference_pressure', units='hPa') + reference_pressure.convert_units(plev.units) + pressure = (reference_pressure.points / plev.points)**(2 / 7) + theta = temperature * iris.util.broadcast_to_shape( + pressure, temperature.shape, + temperature.coord_dims('air_pressure')) + theta.long_name = 'potential_air_temperature' + + return theta + + @staticmethod + def vertical_integration(var_x, var_y): + """ + Vertical integration. + + Perform a non-cyclic centered finite-difference to integrate + variable x with respect to variable y along pressure levels. + + Parameters + ---------- + x: iris.cube.Cube + Cube of variable x. + y: iris.cube.Cube + Cube of variable y. + + Returns + ------- + dxdy: iris.cube.Cube + Cube of variable integrated along pressure levels. + """ + plevs = var_x.shape[1] + + dxdy_0 = ( + (var_x[:, 1, :, :].lazy_data() - var_x[:, 0, :, :].lazy_data()) / + (var_y[:, 1, :, :].lazy_data() - var_y[:, 0, :, :].lazy_data())) + + dxdy_centre = ((var_x[:, 2:plevs, :, :].lazy_data() - + var_x[:, 0:plevs - 2, :, :].lazy_data()) / + (var_y[:, 2:plevs, :, :].lazy_data() - + var_y[:, 0:plevs - 2, :, :].lazy_data())) + + dxdy_end = ((var_x[:, plevs - 1, :, :].lazy_data() - + var_x[:, plevs - 2, :, :].lazy_data()) / + (var_y[:, plevs - 1, :, :].lazy_data() - + var_y[:, plevs - 2, :, :].lazy_data())) + + bounds = [dxdy_end, dxdy_0] + stacked_bounds = da.stack(bounds, axis=1) + total = [dxdy_centre, stacked_bounds] + + # Concatenate arrays where the last slice is dxdy_0 + dxdy = da.concatenate(total, axis=1) + + # Move dxdy_0 to the beggining of the array + dxdy = da.roll(dxdy, 1, axis=1) + + return dxdy + + def brunt_vaisala_frq(self, theta, geopotential): + """Compute Brunt-Väisälä frequency. + + Parameters + ---------- + theta: iris.cube.Cube + Cube of potential temperature. + geopotential: iris.cube.Cube + Cube of variable zg. + + Returns + ------- + brunt: da.array + Array containing Brunt-Väisälä frequency. + """ + dthdz = self.vertical_integration(theta, geopotential) + dthdz = da.where(dthdz > 0, dthdz, 0) + buoy = (self.gravity / theta.lazy_data()) * dthdz + brunt = da.sqrt(buoy) + brunt = da.where(brunt != 0, brunt, self.fill_value) + + return brunt + + def coriolis(self, lats, ndim): + """Compute Coriolis force. + + Parameters + ---------- + lats: iris.coord.Coord + Latitude coordinate. + ndim: int + Number of dimension. + + Returns + ------- + fcor: da.array + Array containing Coriolis force. + """ + fcor = 2.0 * self.omega * np.sin(np.radians(lats.points)) + fcor = fcor[np.newaxis, np.newaxis, :, np.newaxis] + fcor = da.broadcast_to(fcor, ndim) + + return fcor + + def eady_growth_rate(self, fcor, eastward_wind, geopotential, brunt): + """Compute Eady Growth Rate. + + Parameters + ---------- + fcor: da.array + Array containing Coriolis force. + eastward_wind: iris.cube.Cube + Cube containing variable ua. + geopotential: iris.cube.Cube + Cube containing variable zg. + brunt: da.array + Array containing Brunt-Väisäla frequency + + Returns + ------- + egr: da.array + Array containing Eady Growth Rate. + """ + dudz = self.vertical_integration(eastward_wind, geopotential) + egr = self.con * abs(fcor) * abs(dudz) / brunt + + return egr + + def seasonal_plots(self, egr, alias): + """ + Plot seasonal Eady Growth rate values. + + Parameters + ---------- + egr: iris.cube.Cube + Cube containing variable egr. + alias: str + Alias of the dataset. + """ + try: + levels = self.cfg['plot_levels'] + except KeyError: + logger.info("Parameter plot_levels is not set in the recipe." + "Plotting all pressure levels instead.") + levels = egr.coord('air_pressure').points + for level in levels: + cube = extract_levels(egr, level, scheme='linear') + crs_latlon = ccrs.PlateCarree() + axes = plt.axes(projection=ccrs.PlateCarree()) + axes.coastlines(linewidth=1, color='black') + # North Atlantic + axes.set_extent((-90.0, 30.0, 20.0, 80.0), crs=crs_latlon) + axes.set_yticks(np.linspace(25, 75, 6)) + # Relevant range + qplt.contourf(cube, levels=np.arange(0, 1.1, 0.05)) + extension = self.cfg['output_file_type'] + diagnostic = self.cfg['script'] + plotname = '_'.join([alias, diagnostic, + str(int(level))]) + f'.{extension}' + plt.savefig(os.path.join(self.cfg[names.PLOT_DIR], plotname)) + plt.close() + + def save(self, egr, alias, data): + """Save results and write provenance.""" + script = self.cfg[names.SCRIPT] + info = data[alias][0] + keys = [ + str(info[key]) for key in ('project', 'dataset', 'exp', 'ensemble', + 'diagnostic', 'start_year', 'end_year') + if key in info + ] + output_name = '_'.join(keys) + '.nc' + output_file = os.path.join(self.cfg[names.WORK_DIR], output_name) + iris.save(egr, output_file) + + script_name = script.replace(" ", '_') + caption = (f"{script_name} between {info['start_year']} " + f"and {info['end_year']} according to {info['dataset']}") + ancestors = [] + for i in range(len(data[alias])): + ancestors.append(data[alias][i]['filename']) + record = { + 'caption': caption, + 'domains': ['global'], + 'authors': ['sanchez-gomez_emilia', 'moreno-chamarro_eduardo'], + 'references': ['morenochamarro2021', 'acknow_project'], + 'ancestors': ancestors + } + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(output_file, record) + + +def main(): + """Run Eady Growth Rate diagnostic.""" + with run_diagnostic() as config: + EadyGrowthRate(config).compute() + + +if __name__ == "__main__": + main() diff --git a/esmvaltool/diag_scripts/psyplot_diag.py b/esmvaltool/diag_scripts/psyplot_diag.py new file mode 100644 index 0000000000..20016e21ef --- /dev/null +++ b/esmvaltool/diag_scripts/psyplot_diag.py @@ -0,0 +1,143 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Create arbitrary Psyplot plots. + +Description +----------- +This diagnostic provides a high-level interface to Psyplot. + +Author +------ +Manuel Schlund (DLR, Germany) + +Notes +----- +For each input dataset, an individual plot is created. This diagnostic supports +arbitrary variables of arbitrary datasets. + +Configuration options in recipe +------------------------------- +psyplot_func: str + Function used to plot the data. Must be a function of + :mod:`psyplot.project.plot`. Run ``python -c "from psyplot.project import + plot; print(plot.show_plot_methods())"`` to get a list of all currently + supported plotting functions (make sure to run this command in your + ESMValTool environment). +psyplot_kwargs: dict, optional + Optional keyword arguments for the plotting function given by + ``psyplot_func``. String arguments can include facets in curly brackets + which will be derived from the corresponding dataset, e.g., ``clabel: + '{long_name} [{units}]'``, ``title: '{long_name} Climatology of {dataset} + ({start_year}-{end_year})'``. +savefig_kwargs: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.savefig`. By + default, uses ``bbox_inches: tight, dpi: 300, orientation: landscape``. +seaborn_settings: dict, optional + Options for :func:`seaborn.set_theme` (affects all plots). + +""" +import logging +from contextlib import redirect_stdout +from copy import deepcopy +from io import StringIO +from pathlib import Path +from pprint import pformat + +import matplotlib.pyplot as plt +import psyplot.project as psy +import seaborn as sns + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_plot_filename, + run_diagnostic, +) + +logger = logging.getLogger(Path(__file__).stem) + + +def _get_default_cfg(cfg): + """Get default options for configuration dictionary.""" + cfg = deepcopy(cfg) + cfg.setdefault('psyplot_kwargs', {}) + cfg.setdefault('savefig_kwargs', { + 'bbox_inches': 'tight', + 'dpi': 300, + 'orientation': 'landscape', + }) + cfg.setdefault('seaborn_settings', {}) + return cfg + + +def _get_plot_func(cfg): + """Get psyplot plot function.""" + if 'psyplot_func' not in cfg: + raise ValueError("Necessary option 'psyplot_func' missing") + if not hasattr(psy.plot, cfg['psyplot_func']): + with redirect_stdout(StringIO()) as str_in: + psy.plot.show_plot_methods() + all_plot_funcs = str_in.getvalue() + raise AttributeError( + f"Invalid psyplot_func '{cfg['psyplot_func']}' (must be a " + f"function of the module psyplot.project.plot). Currently " + f"supported:\n{all_plot_funcs}") + logger.info( + "Using plotting function psyplot.project.plot.%s", cfg['psyplot_func']) + return getattr(psy.plot, cfg['psyplot_func']) + + +def _get_psyplot_kwargs(cfg, dataset): + """Get keyword arguments for psyplot plotting function.""" + psyplot_kwargs = deepcopy(cfg['psyplot_kwargs']) + for (key, val) in psyplot_kwargs.items(): + if isinstance(val, str): + try: + val = val.format(**dataset) + except KeyError as exc: + raise ValueError( + f"Not all necessary facets psyplot_kwargs '{key}: {val}' " + f"available for dataset" f"\n{pformat(dataset)}") from exc + psyplot_kwargs[key] = val + return psyplot_kwargs + + +def main(cfg): + """Run diagnostic.""" + cfg = _get_default_cfg(cfg) + sns.set_theme(**cfg['seaborn_settings']) + plot_func = _get_plot_func(cfg) + + # Create individual plots for each dataset + input_data = list(cfg['input_data'].values()) + for dataset in input_data: + filename = dataset['filename'] + logger.info("Creating plot '%s' for %s", cfg['psyplot_func'], filename) + + # Create plot + psyplot_kwargs = _get_psyplot_kwargs(cfg, dataset) + plot_func(filename, **psyplot_kwargs) + + # Save plot + basename = Path(filename).stem + plot_path = get_plot_filename(basename, cfg) + plt.savefig(plot_path, **cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Provenance tracking + caption = (f"Plot {cfg['psyplot_func']} of {dataset['long_name']} of " + f"dataset {dataset['dataset']} ({dataset['start_year']}-" + f"{dataset['end_year']}).") + provenance_record = { + 'ancestors': [filename], + 'authors': ['schlund_manuel'], + 'caption': caption, + } + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/pv_capacityfactor/PV_CF.R b/esmvaltool/diag_scripts/pv_capacityfactor/PV_CF.R new file mode 100644 index 0000000000..3aceaed3d9 --- /dev/null +++ b/esmvaltool/diag_scripts/pv_capacityfactor/PV_CF.R @@ -0,0 +1,24 @@ +# ############################################################################# +# diagnostic.R +# Authors: Irene Cionni (ENEA, Italy) +# ############################################################################# +# Description +# This script calculates the capacity factor used in pv_capacity_factor.R +# +# Required +# +# Optional +# +# Caveats +# +# Modification history +# 20210401-cionni_irene: written for v2.0 +# 20210621-weigel_katja: formattiong updates +# 20210714-weigel_katja: removed unused parts +# +# ############################################################################library(ggplot2) +library(plyr) +rsds2cf <- function(rsds, t2m) { + cf <- 0.9 * (1.0 - 0.00042 * (t2m - (273.15 + 25.))) * rsds / 1000 + return(cf) +} \ No newline at end of file diff --git a/esmvaltool/diag_scripts/pv_capacityfactor/pv_capacity_factor.R b/esmvaltool/diag_scripts/pv_capacityfactor/pv_capacity_factor.R new file mode 100644 index 0000000000..93d63e0ac4 --- /dev/null +++ b/esmvaltool/diag_scripts/pv_capacityfactor/pv_capacity_factor.R @@ -0,0 +1,293 @@ +# ############################################################################# +# diagnostic.R +# Authors: Irene Cionni (ENEA, Italy) +# ############################################################################# +# Description +# This script modifies the wind capacity factor diagnostic wrote for the +# MAGIC project from BSC, see also esmvaltool/diag_scripts/magic_bsc/. +# +# Required +# season: String to include shortcut for season in plot title +# and name (e.g. "djf"). It will be converted to upper case. +# This season should be the one set in the preprocessor, since it is +# only used as a string and does not affect the data in the diagnostic. +# In the default recipe this is solved through a node anchor. +# +# Optional +# maxval_colorbar: Optional upper limit for the colorbar. +# +# Caveats +# +# Modification history +# 20210401-cionni_irene: written for v2.0 +# 20210401-weigel_katja: changed to allow multiple models +# 20210621-weigel_katja: formattiong updates +# +# ############################################################################ +library(abind) +library(climdex.pcic) +library(ggplot2) +library(multiApply) # nolint +library(ncdf4) +library(RColorBrewer) # nolint +library(s2dverification) +library(yaml) + +# Parsing input file paths and creating output dirs +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +initial_options <- commandArgs(trailingOnly = FALSE) +file_arg_name <- "--file=" +script_name <- sub( + file_arg_name, "", + initial_options[grep(file_arg_name, initial_options)] +) +script_dirname <- dirname(script_name) + +source(file.path(script_dirname, "PV_CF.R")) +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +## Create working dirs if they do not exist +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) +input_files_per_var <- yaml::read_yaml(params$input_files[1]) +var_names <- names(input_files_per_var) +model_names <- lapply(input_files_per_var, function(x) x$dataset) +model_names <- unname(model_names) +var0 <- lapply(input_files_per_var, function(x) x$short_name) +fullpath_filenames <- names(var0) +input_files_per_var1 <- yaml::read_yaml(params$input_files[2]) +var_names1 <- names(input_files_per_var1) +var1 <- lapply(input_files_per_var1, function(x) x$short_name) +fullpath_filenames1 <- names(var1) +var0 <- unname(var0)[1] +var1 <- unname(var1)[1] +start_years <- lapply(input_files_per_var, function(x) x$start_year) +start_years <- unname(start_years) +end_years <- lapply(input_files_per_var, function(x) x$end_year) +end_years <- unname(end_years) +seasons <- toupper(params$season) + +var0 <- unlist(var0) +for (i in seq(1, length(model_names))) { + start_year <- c(unlist(start_years[i])) + end_year <- c(unlist(end_years[i])) + no_of_years <- length(seq(start_year, end_year, 1)) + data_nc <- nc_open(fullpath_filenames[i]) + data <- ncvar_get(data_nc, var0) + names(dim(data)) <- c("lon", "lat", "time") + lat <- ncvar_get(data_nc, "lat") + lon <- ncvar_get(data_nc, "lon") + units <- ncatt_get(data_nc, var0, "units")$value + calendar <- ncatt_get(data_nc, "time", "calendar")$value + long_names <- ncatt_get(data_nc, var0, "long_name")$value + time <- ncvar_get(data_nc, "time") + start_date <- as.POSIXct(substr(ncatt_get( + data_nc, "time", + "units" + )$value, 11, 29)) + nc_close(data_nc) + time <- as.Date(time, + origin = substr(start_date, 1, 10), + calendar = calendar + ) + time <- as.POSIXct(time, format = "%Y-%m-%d") + time_dim <- which(names(dim(data)) == "time") + time <- as.PCICt(time, cal = calendar) + if (calendar != "360_day" & calendar != "365_day") { + time <- as.character(time) + jdays <- as.numeric(strftime(time, format = "%j")) + pos <- which(substr(time, 6, 10) == "02-29") + if (length(pos) > 0) { + time <- time[-pos] + data <- apply( + data, c(seq(1, length(dim(data)), 1))[-time_dim], + function(x) { + x[-pos] + } + ) + data <- aperm(data, c(2, 3, 1)) + names(dim(data)) <- c("lon", "lat", "time") + } + } + dims <- dim(data) + dims <- append(dims[-time_dim], c(no_of_years, dims[time_dim] / + no_of_years), after = 2) + dim(data) <- dims + data <- aperm(data, c(3, 4, 2, 1)) + names(dim(data)) <- c("year", "day", "lat", "lon") + ######## var1######################################### + var1 <- unlist(var1) + data_nc1 <- nc_open(fullpath_filenames1[i]) + data1 <- ncvar_get(data_nc1, var1) + + names(dim(data1)) <- c("lon", "lat", "time") + lat <- ncvar_get(data_nc1, "lat") + lon <- ncvar_get(data_nc1, "lon") + units <- ncatt_get(data_nc1, var1, "units")$value + calendar <- ncatt_get(data_nc1, "time", "calendar")$value + long_names <- ncatt_get(data_nc1, var1, "long_name")$value + time <- ncvar_get(data_nc1, "time") + start_date <- as.POSIXct(substr(ncatt_get( + data_nc1, "time", + "units" + )$value, 11, 29)) + nc_close(data_nc1) + time <- as.Date(time, + origin = substr(start_date, 1, 10), + calendar = calendar + ) + time <- as.POSIXct(time, format = "%Y-%m-%d") + time_dim <- which(names(dim(data1)) == "time") + time <- as.PCICt(time, cal = calendar) + if (calendar != "360_day" & calendar != "365_day") { + time <- as.character(time) + jdays <- as.numeric(strftime(time, format = "%j")) + pos <- which(substr(time, 6, 10) == "02-29") + if (length(pos) > 0) { + time <- time[-pos] + data1 <- apply( + data1, c(seq(1, length(dim(data1)), 1))[-time_dim], + function(x) { + x[-pos] + } + ) + data1 <- aperm(data1, c(2, 3, 1)) + names(dim(data1)) <- c("lon", "lat", "time") + } + } + dims1 <- dim(data1) + dims1 <- append(dims1[-time_dim], c(no_of_years, dims1[time_dim] / + no_of_years), after = 2) + dim(data1) <- dims1 + data1 <- aperm(data1, c(3, 4, 2, 1)) + names(dim(data1)) <- c("year", "day", "lat", "lon") + + ##################################### + # CF modelC + #################################### + + seas_data <- Mean1Dim(data, 2) + data_cf1 <- rsds2cf(data1, data) + dim(data_cf1) <- dim(data) + #--------------------------- + # Aggregate daily data to seasonal means + #--------------------------- + + seas_data_cf1 <- Mean1Dim(data_cf1, 2) + ############################## + # Make some plots + ############################## + #--------------------------- + # Prepare data, labels and colorscales + #--------------------------- + p <- colorRampPalette(brewer.pal(9, "YlOrRd")) + q <- colorRampPalette(rev(brewer.pal(11, "RdBu"))) + years <- seq(start_year, end_year) + turb_types <- c("PVCF") + + seas_data_cf_all <- seas_data_cf1 + + mean_data_cf_all <- Mean1Dim(seas_data_cf_all, 1) + + anom_data_cf_all <- seas_data_cf_all - InsertDim( # nolint + Mean1Dim(seas_data_cf_all, 1), 1, dim(data)[1] + ) # nolint + pct_anom_data_cf_all <- (seas_data_cf_all / InsertDim( # nolint + Mean1Dim(seas_data_cf_all, 1), 1, dim(data)[1] + )) - 1 # nolint + #--------------------------- + # Plot seasonal CF maps + #--------------------------- + filepng <- paste0( + plot_dir, "/", "capacity_factor_", model_names[i], "_", + start_year, "-", end_year, "_", seasons, ".png" + ) + title <- paste0( + seasons, " CF from ", model_names[i], + " (", start_year, "-", end_year, ")" + ) + + # Optional upper limit for the color bar set in recipe + if (length(params$maxval_colorbar) == 1) { + maxval_colorbar <- params$maxval_colorbar + } else { + maxval_colorbar <- max(seas_data_cf_all, na.rm = TRUE) + } + + PlotEquiMap(Mean1Dim(seas_data_cf_all, 1), lon, lat, + filled.continents = F, + brks = seq( + from = 0, to = maxval_colorbar, + length.out = 11 + ), + color_fun = clim.palette("yellowred"), + toptitle = title, + height = 6, + fileout = filepng + ) + filencdf <- paste0( + work_dir, "/", "capacity_factor_", model_names[i], "_", + start_year, "-", end_year, ".nc" + ) + dimlon <- ncdim_def( + name = "lon", units = "degrees_east", + vals = as.vector(lon), longname = "longitude" + ) + dimlat <- ncdim_def( + name = "lat", units = "degrees_north", + vals = as.vector(lat), longname = "latitude" + ) + dimtime <- ncdim_def( + name = "season", units = "season", + vals = start_year:end_year, + longname = "season of the year: DJF, MAM, JJA, SON" + ) + dimcurve <- ncdim_def( + name = "curve", units = "name", vals = seq(1, 5, 1), + longname = "Power curves of considered turbines" + ) + names(dim(seas_data_cf_all)) <- c("time", "lat", "lon") + defdata <- ncvar_def( + name = "CapacityFactor", units = "%", + dim = list( + dimtime, + lat = dimlat, + lon = dimlon + ), + longname = paste( + "Capacity Factor of PV", + "based on rsds and tas" + ) + ) + file <- nc_create(filencdf, list(defdata)) + ncvar_put(file, defdata, seas_data_cf_all) + nc_close(file) + + # Set provenance for output files + xprov <- list( + ancestors = list( + fullpath_filenames[i] + ), + authors = list( + "cionni_irene" + ), + projects = list("crescendo"), + caption = title, + statistics = list("other"), + realms = list("atmos"), + themes = list("phys") + ) + provenance[[filepng]] <- xprov + provenance[[filencdf]] <- xprov +} + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/quantilebias/quantilebias.R b/esmvaltool/diag_scripts/quantilebias/quantilebias.R index a0b27a1e9f..ae90dacf20 100644 --- a/esmvaltool/diag_scripts/quantilebias/quantilebias.R +++ b/esmvaltool/diag_scripts/quantilebias/quantilebias.R @@ -18,41 +18,24 @@ # Caveats # # Modification history -# 20180926-A_arno_en: Refined for usage as recipe -# 20180518-A_arno_en: Written for v2.0 +# 20180926-arnone_enrico: Refined for usage as recipe +# 20180518-arnone_enrico: Written for v2.0 # # ############################################################################# library(tools) library(yaml) -cdo <- function(command, args = "", input = "", options = "", output = "", - stdout = "", noout = F) { - if (args != "") args <- paste0(",", args) - if (stdout != "") stdout <- paste0(" > '", stdout, "'") - if (input[1] != "") { - for (i in 1:length(input)) { - input[i] <- paste0("'", input[i], "'") - } - input <- paste(input, collapse = " ") - } - output0 <- output - if (output != "") { - output <- paste0("'", output, "'") - } else if ( !noout ) { - output <- tempfile() - output0 <- output - } - argstr <- paste0(options, " ", command, args, " ", input, " ", output, - " ", stdout) - print(paste("cdo", argstr)) - ret <- system2("cdo", args = argstr) - if (ret != 0) { - stop(paste("Failed (", ret, "): cdo", argstr)) - } - return(output0) -} +# get path to script and source subroutines (if needed) +args <- commandArgs(trailingOnly = FALSE) +spath <- paste0(dirname(unlist(strsplit( + grep("--file", args, + value = TRUE + ), "=" +))[2]), "/") +source(paste0(spath, "quantilebias_functions.R")) +source(paste0(spath, "../shared/external.R")) # nolint # read settings and metadata files args <- commandArgs(trailingOnly = TRUE) @@ -76,6 +59,7 @@ varname <- climolist$short_name # create working dirs if they do not exist dir.create(work_dir, recursive = T, showWarnings = F) +dir.create(plot_dir, recursive = T, showWarnings = F) setwd(work_dir) # setup provenance file and list @@ -84,7 +68,8 @@ provenance <- list() # extract metadata models_name <- unname(sapply(metadata, "[[", "dataset")) -reference_model <- unname(sapply(metadata, "[[", "reference_dataset"))[1] +reference_model <- + unname(sapply(metadata, "[[", "reference_dataset"))[1] models_start_year <- unname(sapply(metadata, "[[", "start_year")) models_end_year <- unname(sapply(metadata, "[[", "end_year")) models_experiment <- unname(sapply(metadata, "[[", "exp")) @@ -96,7 +81,7 @@ ref_data_file <- climofiles[ref_idx] ## Loop through input models for (model_idx in c(1:(length(models_name)))) { if (model_idx == ref_idx) { - next + next } # Setup parameters and path exp <- models_name[model_idx] @@ -106,25 +91,34 @@ for (model_idx in c(1:(length(models_name)))) { model_exp <- models_experiment[model_idx] model_ens <- models_ensemble[model_idx] - inregname <- paste0(exp, "_", model_exp, "_", model_ens, "_", - toString(year1), "-", toString(year2), "_", varname) - outfile <- paste0(work_dir, "/", inregname, "_", perc_lev, "qb.nc") - print(paste0(diag_base, ": pre-processing file: ", infile)) - - print(paste0(diag_base, ": ", perc_lev, " percent quantile")) + inregname <- paste0( + exp, + "_", + model_exp, + "_", + model_ens, + "_", + toString(year1), + "-", + toString(year2), + "_", + varname + ) + outfile <- + paste0(work_dir, "/", inregname, "_", perc_lev, "qb.nc") # Select variable of interest modf <- cdo("selvar", args = varname, input = infile) - # Remap reference onto model grid - selectf <- cdo("selvar", args = varname, input = ref_data_file) - reff <- cdo("remapcon", args = modf, input = selectf) + reff <- ref_data_file # Get (X)th percentile of reference dataset refminf <- cdo("timmin", input = reff) refmaxf <- cdo("timmax", input = reff) - ref_perc_pf <- cdo("timpctl", args = perc_lev, - input = c(reff, refminf, refmaxf)) + ref_perc_pf <- cdo("timpctl", + args = perc_lev, + input = c(reff, refminf, refmaxf) + ) # Select points with monthly precipitation greater than (75)th perc mask_reff <- cdo("ge", input = c(reff, ref_perc_pf)) @@ -140,31 +134,138 @@ for (model_idx in c(1:(length(models_name)))) { qb1f <- cdo("div", input = c(mod_sumf, ref_sumf)) tempfile <- tempfile() - temp1f <- cdo("chname", args = paste0(varname, ",qb"), input = qb1f) - temp2f <- cdo("setattribute", args = "qb@units=' '", input = temp1f) + temp1f <- + cdo("chname", args = paste0(varname, ",qb"), input = qb1f) + temp2f <- + cdo("setattribute", args = "qb@units=' '", input = temp1f) temp1f <- cdo("setattribute", - args = "qb@long_name='Precipitation quantile bias'", - input = temp2f, output = temp1f) - cdo("setattribute", args = "qb@standard_name='precipitation_quantile_bias'", - input = temp1f, output = outfile) + args = "qb@long_name='Precipitation quantile bias'", + input = temp2f, + output = temp1f + ) + cdo("setattribute", + args = "qb@standard_name='precipitation_quantile_bias'", + input = temp1f, + output = outfile + ) # Remove temporary files - unlink(c(modf, reff, ref_perc_pf, mask_reff, mask_modf, - ref_sumf, mod_sumf, qb1f, refminf, refmaxf, selectf, - mask_mod2f, mask_ref2f, temp1f, temp2f)) + unlink( + c( + modf, + ref_perc_pf, + mask_reff, + mask_modf, + ref_sumf, + mod_sumf, + qb1f, + refminf, + refmaxf, + mask_mod2f, + mask_ref2f, + temp1f, + temp2f + ) + ) + + + # Produce figure + field <- ncdf_opener(outfile, "qb", "lon", "lat", rotate = "full") + ics_ref <- ics + ipsilon_ref <- ipsilon + + tmp_figname <- sub(".nc", paste0(".", output_file_type), outfile) + figname <- sub(work_dir, plot_dir, tmp_figname) + + figure_size <- c(600, 400) + if (tolower(output_file_type) != "png") { + figure_size <- c(10, 6) + } + graphics_startup(figname, output_file_type, figure_size) + + tmp_levels <- c(0:20) * 0.1 + tmp_colors <- rev(rainbow(30)[1:20]) + + # contours + par( + cex.main = 1.8, + cex.axis = 1.4, + cex.lab = 1.4, + mar = c(5, 5, 4, 8) + ) + filled_contour3( + ics, + ipsilon, + field, + xlab = "Longitude", + ylab = "Latitude", + main = paste0(exp), + levels = tmp_levels, + col = tmp_colors, + axes = F, + asp = 1 + ) + # continents + map( + "world", + regions = ".", + interior = F, + exact = F, + boundary = T, + add = T, + col = "black", + lwd = 2 + ) + axis(1, col = "grey40", at = seq(-180, 180, 45)) + axis(2, col = "grey40", at = seq(-90, 90, 30)) + + colorbar_scale <- c(-0.15, -0.08, 0.1, -0.1) + if (tolower(output_file_type) != "png") { + colorbar_scale <- c(-0.13, -0.06, 0.1, -0.1) + } + image_scale3( + volcano, + levels = tmp_levels, + new_fig_scale = colorbar_scale, + col = tmp_colors, + colorbar.label = paste0("QB", perc_lev), + cex.colorbar = 1.3, + cex.label = 1.4, + colorbar.width = 1, + line.label = 2.9, + line.colorbar = 1.0, + extend = F + ) + graphics_close(figname) # Set provenance for this output file - caption <- paste0("Precipitation quantile bias ", perc_lev, "% for years ", - year1, " to ", year2, " according to ", exp) - xbase <- list(ancestors = list(infile, ref_data_file), - authors = list("arno_en", "hard_jo"), - projects = list("c3s-magic"), references = list("mehran14jgr"), - caption = caption, statistics = list("perc"), - realms = list("atmos"), themes = list("phys"), - domains = list("global"), reference_dataset = ref_data_file) + caption <- + paste0( + "Precipitation quantile bias ", + perc_lev, + "% for years ", + year1, + " to ", + year2, + " according to ", + exp + ) + xbase <- list( + ancestors = list(infile, ref_data_file), + authors = list("arnone_enrico", "vonhardenberg_jost"), + projects = list("c3s-magic"), + references = list("mehran14jgr"), + caption = caption, + statistics = list("perc"), + realms = list("atmos"), + themes = list("phys"), + domains = list("global"), + reference_dataset = ref_data_file + ) # Store provenance in main provenance list provenance[[outfile]] <- xbase + provenance[[figname]] <- xbase } # Write provenance to file diff --git a/esmvaltool/diag_scripts/quantilebias/quantilebias_functions.R b/esmvaltool/diag_scripts/quantilebias/quantilebias_functions.R new file mode 100644 index 0000000000..cbd9e72acf --- /dev/null +++ b/esmvaltool/diag_scripts/quantilebias/quantilebias_functions.R @@ -0,0 +1,618 @@ +# ############################################################################# +# quantilebias_functions.R +# +# Author: Enrico Arnone (ISAC-CNR, Italy) +# +# ############################################################################# +# Description +# Originally developed as functions to be used in HyInt routines +# +# Modification history +# 20170901-A_arno_en: adapted to HyInt and extended +# 20170522-A_davi_pa: Creation for MiLES +# ############################################################################# + +# basis functions + +########################################################## +#------------------------Packages------------------------# +########################################################## + +# loading packages +library("maps") +library("ncdf4") +library("PCICt") + +########################################################## +#--------------Time Based functions----------------------# +########################################################## + +# check number of days for each month + +number_days_month <- function(datas) { + # evaluate the number of days in a defined month of a year + datas <- as.Date(datas) + m <- format(datas, format = "%m") + while (format(datas, format = "%m") == m) { + datas <- datas + 1 + } + return(as.integer(format(datas - 1, format = "%d"))) +} + +########################################################## +#--------------NetCDF loading function-------------------# +########################################################## + +# universal function to open a single var 3D (x,y,time) ncdf files: it includes +# rotation, y-axis filpping, time selection and CDO-based interpolation +# to replace both ncdf.opener.time and ncdf.opener (deprecated and removed) +# automatically rotate matrix to place greenwich at the center (flag "rotate") +# and flip the latitudes in order to have increasing +# if required (flag "interp2grid") additional interpolation with CDO is used. +# "grid" can be used to specify the target grid name +# time selection based on package PCICt must be specifed with both "tmonths" +# and "tyears" flags. It returns a list including its own dimensions +ncdf_opener_universal <- # nolint + function(namefile, + namevar = NULL, + namelon = NULL, + namelat = NULL, + tmonths = NULL, + tyears = NULL, + rotate = "full", + interp2grid = F, + grid = "r144x73", + remap_method = "remapscon2", + exportlonlat = TRUE, + verbose = F) { + # load package + require(ncdf4) + + # verbose-only printing function + printv <- function(value) { + if (verbose) { + print(value) + } + } + + # check if timeflag is activated or full file must be loaded + if (is.null(tyears) | is.null(tmonths)) { + timeflag <- FALSE + printv("No time and months specified, loading all the data") + } else { + timeflag <- TRUE + printv("tyears and tmonths are set!") + require(PCICt) + } + + if (rotate == "full") { + rot <- T + move1 <- move2 <- 1 / 2 + } # 180 degrees rotation of longitude + if (rotate == "half") { + rot <- T + move1 <- 1 / 4 + move2 <- 3 / 4 + } # 90 degree rotation (useful for TM90) + if (rotate == "no") { + rot <- F + } # keep as it is + + # interpolation made with CDO: second order conservative remapping + if (interp2grid) { + print(paste("Remapping with CDO on", grid, "grid")) + if (is.null(namevar)) { + namefile <- cdo(remap_method, + args = paste0("'", grid, "'"), + input = namefile + ) + } else { + selectf <- cdo("selvar", args = namevar, input = namefile) + gridf <- tempfile() + cdo("griddes", input = grid, stdout = gridf) + namefile <- cdo(remap_method, args = gridf, input = selectf) + unlink(c(selectf, gridf)) + } + } + + # define rotate function (faster than with apply) + rotation <- function(line) { + vettore <- line + dims <- length(dim(vettore)) + # for longitudes + if (dims == 1) { + ll <- length(line) + line[(ll * move1):ll] <- vettore[1:(ll * move2 + 1)] + line[1:(ll * move1 - 1)] <- vettore[(ll * move2 + 2):ll] - 360 + } + # for x,y data + if (dims == 2) { + ll <- length(line[, 1]) + line[(ll * move1):ll, ] <- vettore[1:(ll * move2 + 1), ] + line[1:(ll * move1 - 1), ] <- vettore[(ll * move2 + 2):ll, ] + } + # for x,y,t data + if (dims == 3) { + ll <- length(line[, 1, 1]) + line[(ll * move1):ll, , ] <- vettore[1:(ll * move2 + 1), , ] + line[1:(ll * move1 - 1), , ] <- + vettore[(ll * move2 + 2):ll, , ] + } + return(line) + } + + # define flip function ('cos rev/apply is not working) + flipper <- function(field) { + dims <- length(dim(field)) + if (dims == 2) { + ll <- length(field[1, ]) + field <- field[, ll:1] + } # for x,y data + if (dims == 3) { + ll <- length(field[1, , 1]) + field <- field[, ll:1, ] + } # for x,y,t data + return(field) + } + + # opening file: getting variable (if namevar is given, that variable + # is extracted) + printv(paste("opening file:", namefile)) + a <- nc_open(namefile) + + # if no name provided load the only variable available + if (is.null(namevar)) { + namevar <- names(a$var) + if (length(namevar) > 1) { + print(namevar) + stop("More than one var in the files, please select it + with namevar=yourvar") + } + } + + # load axis: updated version, looking for dimension directly stored + # inside the variable + naxis <- + unlist(lapply(a$var[[namevar]]$dim, function(x) { + x["name"] + })) + for (axis in naxis) { + assign(axis, ncvar_get(a, axis)) + printv(paste(axis, ":", length(get(axis)), "records")) + } + + if (timeflag) { + printv("selecting years and months") + + # based on preprocessing of CDO time format: get calendar type and + # use PCICt package for irregular data + caldata <- ncatt_get(a, "time", "calendar")$value + timeline <- + as.PCICt(as.character(time), format = "%Y%m%d", cal = caldata) + + # break if the calendar has not been recognized + if (any(is.na(timeline))) { + stop("Calendar from NetCDF is unsupported or not present. Stopping!!!") + } + + # break if the data requested is not there + lastday_base <- paste0(max(tyears), "-", max(tmonths), "-28") + maxdays <- number_days_month(lastday_base) + if (caldata == "360_day") { + maxdays <- 30 + } + # uses number_days_month, which loops to get the month change + lastday <- as.PCICt(paste0( + max(tyears), "-", max(tmonths), "-", + maxdays + ), + cal = caldata, + format = "%Y-%m-%d" + ) + firstday <- + as.PCICt(paste0(min(tyears), "-", min(tmonths), "-01"), + cal = caldata, + format = "%Y-%m-%d" + ) + if (max(timeline) < lastday | min(timeline) > firstday) { + stop("You requested a time interval that is not present in the NetCDF") + } + } + + # time selection and variable loading + printv("loading full field...") + field <- ncvar_get(a, namevar) + + if (timeflag) { + # select data we need + select <- which(as.numeric(format(timeline, "%Y")) %in% tyears & + as.numeric(format(timeline, "%m")) %in% tmonths) + field <- field[, , select] + time <- timeline[select] + + printv(paste("This is a", caldata, "calendar")) + printv(paste( + length(time), "days selected from", time[1], + "to", time[length(time)] + )) + + printv(paste("Months that have been loaded are.. ")) + printv(unique(format(time, "%Y-%m"))) + } + + # check for dimensions (presence or not of time dimension) + dimensions <- length(dim(field)) + + # if dimensions are multiple, get longitude, latitude + # if needed, rotate and flip the array + xlist <- c("lon", "Lon", "longitude", "Longitude") + ylist <- c("lat", "Lat", "latitude", "Latitude") + if (dimensions > 1) { + # assign ics and ipsilon + if (is.null(namelon)) { + if (any(xlist %in% naxis)) { + ics <- get(naxis[naxis %in% xlist], a$dim)$vals + } else { + print("WARNING: No lon found") + ics <- NA + } + } else { + ics <- ncvar_get(a, namelon) + } + if (is.null(namelat)) { + if (any(ylist %in% naxis)) { + ipsilon <- get(naxis[naxis %in% ylist], a$dim)$vals + } else { + print("WARNING: No lat found") + ipsilon <- NA + } + } else { + ipsilon <- ncvar_get(a, namelat) + } + + # longitute rotation around Greenwich + if (rot) { + printv("rotating...") + ics <- rotation(ics) + field <- rotation(field) + } + if (ipsilon[2] < ipsilon[1] & length(ipsilon) > 1) { + if (length(ics) > 1) { + print("flipping...") + ipsilon <- sort(ipsilon) + field <- flipper(field) + } + } + + # exporting variables to the main program + if (exportlonlat) { + assign("ics", ics, envir = .GlobalEnv) + assign("ipsilon", ipsilon, envir = .GlobalEnv) + } + assign(naxis[naxis %in% c(xlist, namelon)], ics) + assign(naxis[naxis %in% c(ylist, namelat)], ipsilon) + } + + if (dimensions > 3) { + stop("This file is more than 3D file") + } + + # close connection + nc_close(a) + + # remove interpolated file + if (interp2grid) { + unlink(namefile) + } + + # showing array properties + printv(paste(dim(field))) + if (timeflag) { + printv(paste("From", time[1], "to", time[length(time)])) + } + + # returning file list + return(mget(c("field", naxis))) + } + +# ncdf.opener is a simplified wrapper for ncdf.opener.universal which returns +# only the field, ignoring the list +ncdf_opener <- function(namefile, + namevar = NULL, + namelon = NULL, + namelat = NULL, + tmonths = NULL, + tyears = NULL, + rotate = "full", + interp2grid = F, + grid = "r144x73", + remap_method = "remapscon2", + exportlonlat = T) { + field <- + ncdf_opener_universal( + namefile, + namevar, + namelon, + namelat, + tmonths, + tyears, + rotate, + interp2grid, + grid, + remap_method, + exportlonlat = exportlonlat + ) + return(field$field) +} + +########################################################## +#--------------Plotting functions------------------------# +########################################################## + +graphics_startup <- function(figname, output_file_type, plot_size) { + # choose output format for figure - by JvH + if (tolower(output_file_type) == "png") { + png( + filename = figname, + width = plot_size[1], + height = plot_size[2] + ) + } else if (tolower(output_file_type) == "pdf") { + pdf( + file = figname, + width = plot_size[1], + height = plot_size[2], + onefile = T + ) + } else if ((tolower(output_file_type) == "eps") | + (tolower(output_file_type) == "epsi") | + (tolower(output_file_type) == "ps")) { + setEPS( + width = plot_size[1], + height = plot_size[2], + onefile = T, + paper = "special" + ) + postscript(figname) + } else if (tolower(output_file_type) == "x11") { + x11(width = plot_size[1], height = plot_size[2]) + } + return() +} + +graphics_close <- function(figname) { + print(figname) + dev.off() + return() +} + +# extensive filled.contour function +filled_contour3 <- # nolint + function(x = seq(0, 1, length.out = nrow(z)), + y = seq(0, 1, length.out = ncol(z)), + z, + xlim = range(x, finite = TRUE), + ylim = range(y, finite = TRUE), + zlim = range(z, finite = TRUE), + levels = pretty(zlim, nlevels), + nlevels = 20, + color.palette = cm.colors, + col = color.palette(length(levels) - 1), + extend = TRUE, + plot.title, + plot.axes, + key.title, + key.axes, + asp = NA, + xaxs = "i", + yaxs = "i", + las = 1, + axes = TRUE, + frame.plot = axes, + mar, + ...) { + # modification by Ian Taylor of the filled.contour function + # to remove the key and facilitate overplotting with contour() + # further modified by Carey McGilliard and Bridget Ferris + # to allow multiple plots on one page + # modification to allow plot outside boundaries + + if (missing(z)) { + if (!missing(x)) { + if (is.list(x)) { + z <- x$z + y <- x$y + x <- x$x + } + else { + z <- x + x <- seq.int(0, 1, length.out = nrow(z)) + } + } + else { + stop("no 'z' matrix specified") + } + } + else if (is.list(x)) { + y <- x$y + x <- x$x + } + if (any(diff(x) <= 0) || any(diff(y) <= 0)) { + stop("increasing 'x' and 'y' values expected") + } + + # trim extremes for nicer plots + if (extend) { + z[z < min(levels)] <- min(levels) + z[z > max(levels)] <- max(levels) + } + + plot.new() + plot.window(xlim, + ylim, + "", + xaxs = xaxs, + yaxs = yaxs, + asp = asp + ) + if (!is.matrix(z) || nrow(z) <= 1 || ncol(z) <= 1) { + stop("no proper 'z' matrix specified") + } + if (!is.double(z)) { + storage.mode(z) <- "double" + } + .filled.contour(as.double(x), as.double(y), z, as.double(levels), + col = col + ) + if (missing(plot.axes)) { + if (axes) { + title( + main = "", + xlab = "", + ylab = "" + ) + Axis(x, side = 1, ...) + Axis(y, side = 2, ...) + } + } + else { + plot.axes + } + if (frame.plot) { + box() + } + if (missing(plot.title)) { + title(...) + } else { + plot.title + } + invisible() + } + +image_scale3 <- function(z, + levels, + color.palette = heat.colors, + col = col, + colorbar.label = "image.scale", + extend = T, + line.label = 2, + line.colorbar = 0, + cex.label = 1, + cex.colorbar = 1, + colorbar.width = 1, + new_fig_scale = c(-0.07, -0.03, 0.1, -0.1), + ...) { + # save properties from main plotting region + old.par <- par(no.readonly = TRUE) + mfg.save <- par()$mfg + old.fig <- par()$fig + + # defining plotting region with proper scaling + xscal <- (old.fig[2] - old.fig[1]) + yscal <- (old.fig[4] - old.fig[3]) + lw <- colorbar.width + lp <- line.colorbar / 100 + new.fig <- c( + old.fig[2] + new_fig_scale[1] * xscal * lw - lp, + old.fig[2] + new_fig_scale[2] * xscal - lp, + old.fig[3] + new_fig_scale[3] * yscal, + old.fig[4] + new_fig_scale[4] * yscal + ) + + if (missing(levels)) { + levels <- seq(min(z), max(z), , 12) + } + # fixing color palette + if (missing(col)) { + col <- color.palette(length(levels) - 1) + } + # starting plot + par( + mar = c(1, 1, 1, 1), + fig = new.fig, + new = TRUE + ) + + # creating polygons for legend + poly <- vector(mode = "list", length(col)) + for (i in seq(poly)) { + poly[[i]] <- c(levels[i], levels[i + 1], levels[i + 1], levels[i]) + } + + xlim <- c(0, 1) + if (extend) { + longer <- 1.5 + dl <- diff(levels)[1] * longer + ylim <- c(min(levels) - dl, max(levels) + dl) + } else { + ylim <- range(levels) + } + plot( + 1, + 1, + t = "n", + ylim = ylim, + xlim = xlim, + axes = FALSE, + xlab = "", + ylab = "", + xaxs = "i", + yaxs = "i", + ... + ) + for (i in seq(poly)) { + polygon(c(0, 0, 1, 1), poly[[i]], col = col[i], border = NA) + } + if (extend) { + polygon(c(0, 1, 1 / 2), + c(levels[1], levels[1], levels[1] - dl), + col = col[1], + border = NA + ) + polygon(c(0, 1, 1 / 2), + c( + levels[length(levels)], levels[length(levels)], + levels[length(levels)] + dl + ), + col = col[length(col)], + border = NA + ) + polygon( + c(0, 0, 1 / 2, 1, 1, 1 / 2), + c( + levels[1], levels[length(levels)], levels[length(levels)] + dl, + levels[length(levels)], levels[1], levels[1] - dl + ), + border = "black", + lwd = 2 + ) + ylim0 <- range(levels) + prettyspecial <- pretty(ylim0) + prettyspecial <- prettyspecial[prettyspecial <= max(ylim0) & + prettyspecial >= min(ylim0)] + axis( + 4, + las = 1, + cex.axis = cex.colorbar, + at = prettyspecial, + labels = prettyspecial, + ... + ) + } else { + box() + axis(4, las = 1, cex.axis = cex.colorbar, ...) + } + + # box, axis and leged + mtext(colorbar.label, + line = line.label, + side = 4, + cex = cex.label, + ... + ) + + # resetting properties for starting a new plot (mfrow style) + par(old.par) + par(mfg = mfg.save, new = FALSE) + invisible() +} diff --git a/esmvaltool/diag_scripts/radiation_budget/Demory_et_al_2014_obs_Energy_Budget.yml b/esmvaltool/diag_scripts/radiation_budget/Demory_et_al_2014_obs_Energy_Budget.yml new file mode 100644 index 0000000000..2f5ca1c89d --- /dev/null +++ b/esmvaltool/diag_scripts/radiation_budget/Demory_et_al_2014_obs_Energy_Budget.yml @@ -0,0 +1,20 @@ +- {name: radiation_net_toa, unit: W m-2, data: 0.9} +- {name: toa_incoming_shortwave_flux, unit: W m-2, data: 341.3} +- {name: toa_outgoing_shortwave_flux, unit: W m-2, data: 101.9} +- {name: toa_outgoing_shortwave_flux_assuming_clear_sky, unit: W m-2, data: .nan} +- {name: total_sw_cloud_forcing, unit: W m-2, data: .nan} +- {name: surface_downwelling_shortwave_flux_in_air, unit: W m-2, data: 184.0} +- {name: surface_net_downward_shortwave_flux, unit: W m-2, data: 161.0} +- {name: upward_sw_reflected_surface, unit: W m-2, data: 23.0} +- {name: sw_reflected_clouds, unit: W m-2, data: 79.0} +- {name: sw_absorbed_atm, unit: W m-2, data: 78.0} +- {name: toa_outgoing_longwave_flux, unit: W m-2, data: 239.0} +- {name: toa_outgoing_longwave_flux_assuming_clear_sky, unit: W m-2, data: .nan} +- {name: total_lw_cloud_forcing, unit: W m-2, data: .nan} +- {name: surface_downwelling_longwave_flux_in_air, unit: W m-2, data: 333.0} +- {name: surface_net_downward_longwave_flux, unit: W m-2, data: -63.0} +- {name: upward_lw_emitted_surface, unit: W m-2, data: 396.0} +- {name: net_surface_radiation, unit: W m-2, data: 98.0} +- {name: surface_upward_sensible_heat_flux, unit: W m-2, data: 17.0} +- {name: surface_upward_latent_heat_flux, unit: W m-2, data: 80.0} +- {name: radiation_adsorbed_surface, unit: W m-2, data: 1.0} diff --git a/esmvaltool/diag_scripts/radiation_budget/Stephens_et_al_2012_obs_Energy_Budget.yml b/esmvaltool/diag_scripts/radiation_budget/Stephens_et_al_2012_obs_Energy_Budget.yml new file mode 100644 index 0000000000..e18dec4e13 --- /dev/null +++ b/esmvaltool/diag_scripts/radiation_budget/Stephens_et_al_2012_obs_Energy_Budget.yml @@ -0,0 +1,20 @@ +- {name: radiation_net_toa, unit: W m-2, data: 0.6, error: 0.4} +- {name: toa_incoming_shortwave_flux, unit: W m-2, data: 340.2, error: 0.1} +- {name: toa_outgoing_shortwave_flux, unit: W m-2, data: 100.0, error: 2.0} +- {name: toa_outgoing_shortwave_flux_assuming_clear_sky, unit: W m-2, data: 50.2, error: 5.5} +- {name: total_sw_cloud_forcing, unit: W m-2, data: 47.5, error: 3.0} +- {name: surface_downwelling_shortwave_flux_in_air, unit: W m-2, data: 188.0, error: 6.7} +- {name: surface_net_downward_shortwave_flux, unit: W m-2, data: 165.0, error: 6.0} +- {name: upward_sw_reflected_surface, unit: W m-2, data: 23.0, error: 3.0} +- {name: sw_reflected_clouds, unit: W m-2, data: 74.7, error: 5.5} +- {name: sw_absorbed_atm, unit: W m-2, data: 80.0, error: 11.2} +- {name: toa_outgoing_longwave_flux, unit: W m-2, data: 239.7, error: 3.3} +- {name: toa_outgoing_longwave_flux_assuming_clear_sky, unit: W m-2, data: 266.4, error: 3.3} +- {name: total_lw_cloud_forcing, unit: W m-2, data: 26.7, error: 4.0} +- {name: surface_downwelling_longwave_flux_in_air, unit: W m-2, data: 345.6, error: 9.0} +- {name: surface_net_downward_longwave_flux, unit: W m-2, data: -52.4, error: 10.3} +- {name: upward_lw_emitted_surface, unit: W m-2, data: 398.0, error: 5.0} +- {name: net_surface_radiation, unit: W m-2, data: 112.6, error: 11.9} +- {name: surface_upward_sensible_heat_flux, unit: W m-2, data: 24.0, error: 7.0} +- {name: surface_upward_latent_heat_flux, unit: W m-2, data: 88.0, error: 10.0} +- {name: radiation_adsorbed_surface, unit: W m-2, data: 0.6, error: 17.1} diff --git a/esmvaltool/diag_scripts/radiation_budget/radiation_budget.py b/esmvaltool/diag_scripts/radiation_budget/radiation_budget.py new file mode 100644 index 0000000000..a5bd52b2b5 --- /dev/null +++ b/esmvaltool/diag_scripts/radiation_budget/radiation_budget.py @@ -0,0 +1,468 @@ +"""Plot the global radiation budget.""" + +# To run the doctests: +# % cd ESMValTool/esmvaltool/ +# % python -m doctest diag_scripts/radiation_budget/radiation_budget.py +import logging +import os + +import iris +import matplotlib.pyplot as plt +import numpy as np +import yaml +from iris import NameConstraint + +from esmvaltool.diag_scripts.shared import ( + group_metadata, + run_diagnostic, + save_figure, +) + +CWD = os.path.abspath(os.path.dirname(__file__)) +STEPHENS_FILENAME = "Stephens_et_al_2012_obs_Energy_Budget.yml" +DEMORY_FILENAME = "Demory_et_al_2014_obs_Energy_Budget.yml" + + +def derive_additional_variables(cubes): + """Return input ``cubes`` with the additional cubes. + + ``cubes`` must contain the variables specified in the recipe. + + The additional cubes derived from the cubes in ``cubes`` are as follows: + + * total_sw_cloud_forcing + * upward_sw_reflected_surface + * sw_reflected_clouds + * sw_absorbed_atm + * upward_lw_emitted_surface + * total_lw_cloud_forcing + * net_surface_radiation + * radiation_adsorbed_surface + * radiation_net_toa + + Parameters + ---------- + cubes : :class:`iris.cube.CubeList` + The cubes corresponding with the variables in the recipe. + + Returns + ------- + :class:`iris.cube.CubeList` + The input ``cubes`` with the additional cubes. + """ + rss = cubes.extract_cube(NameConstraint(var_name="rss")) + rsdt = cubes.extract_cube(NameConstraint(var_name="rsdt")) + rsut = cubes.extract_cube(NameConstraint(var_name="rsut")) + rsutcs = cubes.extract_cube(NameConstraint(var_name="rsutcs")) + rsds = cubes.extract_cube(NameConstraint(var_name="rsds")) + rls = cubes.extract_cube(NameConstraint(var_name="rls")) + rlut = cubes.extract_cube(NameConstraint(var_name="rlut")) + rlutcs = cubes.extract_cube(NameConstraint(var_name="rlutcs")) + rlds = cubes.extract_cube(NameConstraint(var_name="rlds")) + hfss = cubes.extract_cube(NameConstraint(var_name="hfss")) + hfls = cubes.extract_cube(NameConstraint(var_name="hfls")) + + # Derivations for the following two cloud_forcing variables are + # performed this way so that they match with the observational data + # (all positive), the convention used is to treat SW as positive + # downward and LW as positive upward. + total_sw_cloud_forcing = rsut - rsutcs + total_lw_cloud_forcing = rlutcs - rlut + upward_sw_reflected_surface = rsds - rss + sw_reflected_clouds = rsut - upward_sw_reflected_surface + sw_absorbed_atm = rsdt - sw_reflected_clouds - rsds + upward_lw_emitted_surface = rlds - rls + net_surface_radiation = rss + rls + radiation_adsorbed_surface = rss + rls - hfss - hfls + radiation_net_toa = rsdt - rsut - rlut + + total_sw_cloud_forcing.standard_name = "" + total_sw_cloud_forcing.long_name = "total_sw_cloud_forcing" + + upward_sw_reflected_surface.standard_name = "" + upward_sw_reflected_surface.long_name = "upward_sw_reflected_surface" + + sw_reflected_clouds.standard_name = "" + sw_reflected_clouds.long_name = "sw_reflected_clouds" + + sw_absorbed_atm.standard_name = "" + sw_absorbed_atm.long_name = "sw_absorbed_atm" + + upward_lw_emitted_surface.standard_name = "" + upward_lw_emitted_surface.long_name = "upward_lw_emitted_surface" + + total_lw_cloud_forcing.standard_name = "" + total_lw_cloud_forcing.long_name = "total_lw_cloud_forcing" + + net_surface_radiation.standard_name = "" + net_surface_radiation.long_name = "net_surface_radiation" + + radiation_adsorbed_surface.standard_name = "" + radiation_adsorbed_surface.long_name = "radiation_adsorbed_surface" + + radiation_net_toa.standard_name = "" + radiation_net_toa.long_name = "radiation_net_toa" + + additional_cubes = [ + total_sw_cloud_forcing, + upward_sw_reflected_surface, + sw_reflected_clouds, + sw_absorbed_atm, + upward_lw_emitted_surface, + total_lw_cloud_forcing, + net_surface_radiation, + radiation_adsorbed_surface, + radiation_net_toa, + ] + + cubes.extend(additional_cubes) + return cubes + + +def validate_variable_data(variable_data, name, unit): + """Return the variable from ``variable_data`` that has the same name and + units as provided by ``name`` and ``unit``. + + If ``name`` doesn't exist in ``variable_data``, the returned variable will + have a name and unit equal to ``name`` and ``unit`` and data equal to + 'NaN'. + + Parameters + ---------- + variable_data : list of dictionaries + The data to check where each dictionary corresponds + to a variable and the key of the dictionary is the + metadata attribute name. + name : string + The name of the variable to validate. + unit : string + The unit of the variable to validate. + + Raises + ------ + KeyError + If multiple ``name`` exist in ``variable_data``. + ValueError + If ``unit`` does not match the unit in ``variable_data``. + + Returns + ------- + dictionary + The validated variable. + + Examples + -------- + >>> var1 = {"name": "sw_reflected_clouds", "unit": "W m-2", "data": 79.0} + >>> var2 = {"name": "toa_outgoing_longwave_flux", "unit": "W m-2", + ... "data": 239.0} + >>> variable_data = [var1, var2] + >>> name = "sw_reflected_clouds" + >>> unit = "W m-2" + >>> validated_variable = validate_variable_data(variable_data, name, unit) + >>> assert validated_variable == var1 + """ + items = [item for item in variable_data if item["name"] == name] + + if not items: + variable = {"name": name, "unit": unit, "data": np.nan} + + if len(items) == 1: + variable = items[0] + + if len(items) > 1: + raise KeyError(f"Multiple '{name}' exist in '{items}'.") + + if variable["unit"] != unit: + raise ValueError( + f"Unit {unit} does not match the unit {variable['unit']} " + f"in {variable} for {name}.") + + return variable + + +def order_data(cubes, obs_names, obs_unit): + """Return the data from the cubes in the order defined by ``obs_names``. + + The units from the cubes are checked against ``obs_units``. + + Parameters + ---------- + cubes : :class:`iris.cube.CubeList` + The cubes in a random order. + obs_names : list + The ordered names from the observation files. + obs_unit : string + The unit of the observation variables. + + Returns + ------- + list + The ordered data from the model cubes. + """ + variable_data = [] + for cube in cubes: + variable = {} + variable["name"] = cube.name() + variable["unit"] = cube.units + if np.ma.isMaskedArray(cube.data): + variable["data"] = cube.data.data + else: + variable["data"] = cube.data + variable_data.append(variable) + + ordered_model_data = [] + for obs_name in obs_names: + validated_variable = validate_variable_data(variable_data, obs_name, + obs_unit) + ordered_model_data.append(validated_variable["data"]) + + return ordered_model_data + + +def read_yaml_file(filepath): + """Return contents of a yaml file. + + Parameters + ---------- + filepath : string + The full path to the yaml file. + + Returns + ------- + list of dictionaries + The contents of the yaml file where each dictionary corresponds + to a line in the file and the key of the dictionary is the name + of the column. + """ + with open(filepath, "r") as stream: + contents = yaml.safe_load(stream) + return contents + + +def load_obs_data(): + """Return the names, units, data and error from the Stephens and Demory + observation files. + + The observation files should exist in the same directory as this + module. + + Returns + ------- + tuple of lists + The names, units, stephens data, stephens error and demory data + from the observation files. + """ + # Stephens data contains name, units, data, error. + stephens_filepath = os.path.join(CWD, STEPHENS_FILENAME) + stephens_contents = read_yaml_file(stephens_filepath) + + # Demory data contains name, units, data. + demory_filepath = os.path.join(CWD, DEMORY_FILENAME) + demory_contents = read_yaml_file(demory_filepath) + + # Arbitrarily use the order as defined in the Stephens filename. + names = [] + units = [] + stephens_data = [] + stephens_error = [] + demory_data = [] + + for line in stephens_contents: + name = line["name"] + unit = line["unit"] + names.append(name) + units.append(unit) + stephens_data.append(line["data"]) + stephens_error.append(line["error"]) + + demory_line = validate_variable_data(demory_contents, name, unit) + demory_data.append(demory_line["data"]) + + if len(set(units)) == 1: + unit = units[0] + else: + raise RuntimeError("Not all observations have the same unit.") + + return names, unit, stephens_data, stephens_error, demory_data + + +def plot_data( + model_dataset, + model_data, + model_period, + obs_names, + obs_unit, + stephens_data, + stephens_error, + demory_data, + ceres_dataset, + ceres_data, + ceres_period, +): + """Produce and save the radiation budget comparison plot. + + Parameters + ---------- + model_dataset : string + The name of the model. + model_data : list + Data values from the model for which this comparison plot is being + generated. + model_period : string + The start and end years of the model dataset. + obs_names : list + The names of variables included in the observation data. + obs_unit : list + The unit of variables included in the observation data. + stephens_data : list + Stephens observation data values. + stephens_error : list + Stephens observation data error values. + demory_data : list + Demory observation data values. + ceres_dataset : string + The name of the CERES observation data. + ceres_data : list + CERES observation data values. + ceres_period : string + The start and end years of the CERES observation data. + + Returns + ------- + :class:`matplotlib.figure.Figure` + The figure containing the plot. + """ + model_minus_stephens = np.array(model_data) - np.array(stephens_data) + model_minus_demory = np.array(model_data) - np.array(demory_data) + model_minus_ceres = np.array(model_data) - np.array(ceres_data) + + figure, axes = plt.subplots(figsize=(12, 8)) + title = f"Radiation budget for {model_dataset}" + y_label = f"Difference between model output and observations [{obs_unit}]" + y_lim = (-20, 20) + axes.set(title=title, ylabel=y_label, ylim=y_lim) + + num_x_ticks = len(obs_names) + x_ticks = np.arange(0, num_x_ticks * 2, 2) + + bar_width = 0.5 + opacity = 0.6 + axes.bar( + x_ticks + 0.2, + model_minus_stephens, + bar_width, + alpha=opacity, + color="cornflowerblue", + label=f"{model_dataset} ({model_period}) - Stephens et al. (2012)", + yerr=stephens_error, + ) + axes.bar( + x_ticks + 0.2 + bar_width, + model_minus_ceres, + bar_width, + alpha=opacity, + color="orange", + label=(f"{model_dataset} ({model_period}) - {ceres_dataset} " + f"({ceres_period})"), + ) + axes.bar( + x_ticks + 0.2 + bar_width * 2, + model_minus_demory, + bar_width, + alpha=opacity, + color="darkgrey", + label=f"{model_dataset} ({model_period}) - Demory et al. (2014)", + ) + axes.spines["bottom"].set_position(("data", 0)) + axes.spines["top"].set_position(("data", 0)) + + axes.set_xticks(x_ticks + bar_width + 0.5) + axes.set_xticklabels(obs_names, ha="center", rotation=90, fontsize=10) + + axes.legend(frameon=False, fontsize=10, loc="upper left") + + return figure + + +def get_provenance_record(filenames): + """Return a provenance record describing the plot. + + Parameters + ---------- + filenames : list of strings + The filenames containing the data used to create the plot. + + Returns + ------- + dictionary + The provenance record describing the plot. + """ + record = { + 'ancestors': filenames, + } + return record + + +def main(config): + """Radiation budget comparison for models defined in the radiation_budget + recipe file. + + Parameters + ---------- + config : dict + The ESMValTool configuration. + """ + logger = logging.getLogger(__name__) + + input_data = config["input_data"] + datasets = group_metadata(input_data.values(), "dataset") + + ( + obs_names, + obs_unit, + stephens_data, + stephens_error, + demory_data, + ) = load_obs_data() + + ceres_dataset = "CERES-EBAF" + ceres_group = datasets.pop(ceres_dataset) + ceres_filenames = [item["filename"] for item in ceres_group] + raw_ceres_data = iris.load(ceres_filenames) + ceres_data = order_data(raw_ceres_data, obs_names, obs_unit) + ceres_period = (f"{ceres_group[0]['start_year']} - " + f"{ceres_group[0]['end_year']}") + + for model_dataset, group in datasets.items(): + # 'model_dataset' is the name of the model dataset. + # 'group' is a list of dictionaries containing metadata. + logger.info("Processing data for %s", model_dataset) + filenames = [item["filename"] for item in group] + unordered_model_data = iris.load(filenames) + all_model_data = derive_additional_variables(unordered_model_data) + model_data = order_data(all_model_data, obs_names, obs_unit) + model_period = f"{group[0]['start_year']} - {group[0]['end_year']}" + figure = plot_data( + model_dataset, + model_data, + model_period, + obs_names, + obs_unit, + stephens_data, + stephens_error, + demory_data, + ceres_dataset, + ceres_data, + ceres_period, + ) + provenance_record = get_provenance_record(filenames) + save_figure(model_dataset, + provenance_record, + config, + figure, + close=True) + + +if __name__ == "__main__": + with run_diagnostic() as CONFIG: + main(CONFIG) diff --git a/esmvaltool/diag_scripts/radiation_budget/seasonal_radiation_budget.py b/esmvaltool/diag_scripts/radiation_budget/seasonal_radiation_budget.py new file mode 100644 index 0000000000..8ddc9e5845 --- /dev/null +++ b/esmvaltool/diag_scripts/radiation_budget/seasonal_radiation_budget.py @@ -0,0 +1,91 @@ +"""Write the global climatological seasonal radiation budget to a text file.""" + +import csv +import logging +import os + +import iris + +from esmvaltool.diag_scripts.shared import group_metadata, run_diagnostic + +SEASONS = {0: "djf", 1: "mam", 2: "jja", 3: "son"} + + +def organise_seasonal_data(model_data): + """Return the seasonal data from the cubes. + + Parameters + ---------- + model_data : :class:`iris.cube.CubeList` + The cubes containing seasonal data. + + Returns + ------- + list of lists + The seasonal data in the form ``[[, value], ...]``. + """ + seasonal_data = [] + for cube in model_data: + long_name = cube.long_name + for season in cube.slices_over("season_number"): + season_name = SEASONS[season.coord("season_number").points[0]] + value = season.data + seasonal_data.append([f"{long_name} {season_name}", str(value)]) + average_value = cube.data.mean() + seasonal_data.append([f'{long_name} {"ann"}', str(average_value)]) + return seasonal_data + + +def write_seasonal_data_output(output_dir, model_dataset, seasonal_data): + """Write seasonal data to CSV file. + + The CSV file will have the name ``_metrics.csv`` and can be + used for the normalised metric assessment plot. + + Parameters + ---------- + output_dir : string + The full path to the directory in which the CSV file will be written. + model_dataset : string + The model name. + seasonal_data : list of lists + The seasonal data to write. + """ + file_name = f"{model_dataset}_metrics.csv" + file_path = os.path.join(output_dir, file_name) + + with open(file_path, "w", newline="") as csvfile: + csv_writer = csv.writer(csvfile) + for line in seasonal_data: + csv_writer.writerow(line) + + +def main(config): + """Seasonal radiation budget comparison for models defined in the + radiation_budget recipe file. + + Parameters + ---------- + config : dict + The ESMValTool configuration. + """ + logger = logging.getLogger(__name__) + + input_data = config["input_data"] + datasets = group_metadata(input_data.values(), "dataset") + + for model_dataset, group in datasets.items(): + # 'model_dataset' is the name of the model dataset. + # 'group' is a list of dictionaries containing metadata. + logger.info("Processing data for %s", model_dataset) + filenames = [item["filename"] for item in group] + model_data = iris.load(filenames) + seasonal_data = organise_seasonal_data(model_data) + + write_seasonal_data_output(config["work_dir"], model_dataset, + seasonal_data) + + +if __name__ == "__main__": + with run_diagnostic() as CONFIG: + main(CONFIG) diff --git a/esmvaltool/diag_scripts/rainfarm/rainfarm.jl b/esmvaltool/diag_scripts/rainfarm/rainfarm.jl new file mode 100644 index 0000000000..42ed02900d --- /dev/null +++ b/esmvaltool/diag_scripts/rainfarm/rainfarm.jl @@ -0,0 +1,153 @@ +# ############################################################################# +# rainfarm.jl +# Authors: J. von Hardenberg (ISAC-CNR, Italy) +# E. Arnone (ISAC-CNR, Italy) +# ############################################################################# +# Description +# ESMValTool diagnostic calling the RainFARM library written in Julia +# (by von Hardenberg, ISAC-CNR, Italy). +# RainFARM is a stochastic precipitation downscaling method, further adapted +# for climate downscaling. +# +# Required +# CDO +# Julia language: https://julialang.org +# RainFARM Julia library: https://github.com/jhardenberg/RainFARM.jl +# +# Optional +# +# Caveats +# +# Modification history +# 20190810-vonhardenberg_jost: rewritten in pure Julia, no R +# 20181210-vonhardenberg_jost: cleanup and using juliacall +# 20180508-arnone_enrico: Conversion to v2.0 +# 20170908-arnone_enrico: 1st github version +# +# ############################################################################ + +import YAML +using RainFARM +using Printf + +function provenance_record(infile) + xprov = Dict( "ancestors" => infile, + "authors" => ["vonhardenberg_jost", "arnone_enrico"], + "references" => ["donofrio14jh", "rebora06jhm", + "terzago18nhess"], + "projects" => ["c3s-magic"], + "caption" => "RainFARM precipitation downscaling", + "statistics" => ["other"], + "realms" => ["atmos"], + "themes" => ["phys"], + "domains" => ["reg"] + ) + return(xprov) +end + +let +diag_scripts_dir = dirname(@__DIR__) +include(joinpath(diag_scripts_dir, "shared/external.jl")) + +settings = YAML.load_file(ARGS[1]) + +metadata = YAML.load_file(settings["input_files"][1]) +climofiles = collect(keys(metadata)) +climolist = metadata[climofiles[1]] +varname = climolist["short_name"] +diag_base = climolist["diagnostic"] + +println(diag_base, ": starting routine") +println(diag_base, ": creating work and plot directories") +work_dir = settings["work_dir"] +run_dir = settings["run_dir"] +mkpath(work_dir) +mkpath(run_dir) +cd(run_dir) + +# setup provenance file and list +provenance_file = joinpath(run_dir, "diagnostic_provenance.yml") +provenance = Dict() + +# Reading parameters from the settings +nf = get(settings, "nf", 2) +slope = get(settings, "slope", 0.0) +kmin = get(settings, "kmin", 1) +nens = get(settings, "nens", 1) +weights_climo = get(settings, "weights_climo", "") +conserv_glob = get(settings, "conserv_glob", false) +conserv_smooth = get(settings, "conserv_smooth", true) +auxiliary_data_dir = get(settings, "auxiliary_data_dir", "") + +if weights_climo isa Bool # Compatibility with old standard + weights_climo = "" +end + +# Conservation options +if (conserv_glob) + println("Conserving global field") +elseif (conserv_smooth) + println("Smooth conservation") +else + println("Box conservation") +end + +for (infile, value) in metadata + (infilename, ) = splitext(basename(infile)) + outfilename = joinpath(work_dir, infilename * "_downscaled") + + println(diag_base, ": calling RainFARM for ", infilename) + + (pr, lon_mat, lat_mat) = read_netcdf2d(infile, varname) + + # Ensure grid is square and with even dims + nmin = min(size(pr)[1], size(pr)[2]) + nmin = floor(Int, nmin / 2) * 2 + pr = pr[1:nmin, 1:nmin, :] + if (ndims(lon_mat) == 1) + lon_mat = lon_mat[1:nmin] + lat_mat = lat_mat[1:nmin] + else + lon_mat = lon_mat[1:nmin, 1:nmin] + lat_mat = lat_mat[1:nmin, 1:nmin] + end + + (lon_f, lat_f) = lon_lat_fine(lon_mat, lat_mat, nf); + + # Automatic spectral slope + if (slope == 0.) + (fxp, ftp)=fft3d(pr) + slope =fitslopex(fxp, kmin=kmin) + println("Computed spatial spectral slope: ", slope) + else + println("Fixed spatial spectral slope: ", slope) + end + + if weights_climo != "" + if weights_climo[1] != '/' + weights_climo = joinpath(auxiliary_data_dir, weights_climo) + end + println("Using external climatology for weights: ", weights_climo) + fileweights = joinpath(work_dir, infilename * "_w.nc") + + ww = rfweights(weights_climo, infile, nf, + weightsfn = fileweights, varname = varname, + fsmooth = conserv_smooth) + else + ww = 1. + end + + for iens=1:nens + println("Realization ", iens) + rd=rainfarm(pr, slope, nf, ww, fglob = conserv_glob, + fsmooth = conserv_smooth, verbose=true) + fname=@sprintf("%s_%04d.nc", outfilename, iens); + write_netcdf2d(fname, rd, lon_f, lat_f, varname, infile) + xprov = provenance_record(infile) + provenance[fname] = xprov + end +end + +# Write provenance file +create_yaml(provenance, provenance_file) +end diff --git a/esmvaltool/diag_scripts/regional_downscaling/Figure9.38.ncl b/esmvaltool/diag_scripts/regional_downscaling/Figure9.38.ncl new file mode 100644 index 0000000000..e8844107e2 --- /dev/null +++ b/esmvaltool/diag_scripts/regional_downscaling/Figure9.38.ncl @@ -0,0 +1,822 @@ +; ########### ############################################################### +; # ANNUAL CYCLE CONTOUR PLOTS OF REGIONAL MEANS # +; ############################################################################# +; # Author: Irene Cionni(ENEA, Italy) # +; # CRESCENDO project +; ############################################################################# +; # +; # Description +; # * Creates annual cycle regional plots for individual models and +; # multi model over the selected project&experiment&mip +; # - Read regions +; # - select the regions +; # - calculate annual cycle for each model +; # - evaluate the MMM over selected project&experiment&mip +; # - plot the individual models annual cycle +; # - plot the MMMs annual cycle and standard deviations +; # +; # Optional diag_script_info attributes: +; # +; # * styleset, default "CMIP5" +; # * fig938_region_label: +; # (/"WNA","ENA","CAM","TSA","SSA","EUM","NAF","CAF","SAF", +; # "NAS","CAS","EAS","SAS","SEA","AUS"/) +; # these reagions are defined in function select_region +; # (./diag_scripts/regional_downscaling/regional_function.ncl) +; # according to a personal comunication of Sin Chan Chou, default "WNA" +; # * fig938_project_MMM: projects to average, default "CMIP5" +; # * fig938_experiment_MMM: experiments to average, default "historical" +; # * fig938_mip_MMM: mip to average, default "Amon" +; # * fig938_names_MMM: names in legend, default "CMIP5" +; # * fig938_colors_MMM: colors, default "red" +; # * fig938_YMin : minimum Y Axis +; # * fig938_YMax : maximum Y Axis +; # * fig938_diff: difference to reference data (true) or +; # absolute annual cycle (false), default: true +; # +; # fig938_mip_MMM, fig938_experiment_MMM, fig938_project_MMM, +; # fig938_names_MMM, and fig938_colors_MMM +; # must have the same number of elements i.e. +; # fig938_project_MMM=(/"CMIP5", "CMIP3"/) +; # fig938_experiment_MMM=(/"historical", "historical"/) +; # fig938_mip_MMM=(/"Amon", "Amon"/) +; # fig938_names_MMM=(/"CMIP5","CMIP3"/) +; # fig938_colors_MMM:=(/"red","blue"/) +; # +; # +; # +; # Caveats: +; # +; # Comments: +; # Regions borders are polygons defined in function select_region_938 +; # in regional_function.ncl +; # +; # Modification history: +; # 20230127-weigel_katja: Update for absolute annual cycle including +; # new fig938_diff attribute +; # and bug fixes in index of data sets/models and +; # color for regions +; # 20221026-weigel_katja: header updated +; # 20220314-weigel_katja: header updated +; # original file anncycplot.ncl +; # # +; ############################################################### +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/regional_downscaling/regional_function.ncl" + + +begin + enter_msg(DIAG_SCRIPT, "") + var0 = variable_info[0]@short_name + units = variable_info[0]@units + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD1 = ListCount(info0) + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + exps = metadata_att_as_array(info0, "exp") + ensembles = metadata_att_as_array(info0, "ensemble") + mips = metadata_att_as_array(info0, "mip") + y1 = metadata_att_as_array(info0, "start_year") + y2 = metadata_att_as_array(info0, "end_year") + long_name = variable_info[0]@long_name + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + do imod = 0, dimsizes(names) - 1 + if (names(imod) .eq. refname) then + iref = imod + end if + end do + else + error_msg("f", DIAG_SCRIPT, "", "no reference dataset " + \ + "(variable_info[0]@reference_dataset) needs to be defined.") + end if + if(isatt(variable_info, "units")) then + units = variable_info@units + else + units = "K" + end if + if(isatt(variable_info, "long_name")) then + LONG_NAME = variable_info@long_name + else + LONG_NAME = var0 + end if + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; Set default values for optional diag_script_info attributes + + set_default_att(diag_script_info, "fig938_region_label", "WNA") + set_default_att(diag_script_info, "styleset", "CMIP5") + set_default_att(diag_script_info, "fig938_MMM", "True") + set_default_att(diag_script_info, "fig938_project_MMM", "CMIP5") + set_default_att(diag_script_info, "fig938_experiment_MMM", "historical") + set_default_att(diag_script_info, "fig938_mip_MMM", "Amon") + set_default_att(diag_script_info, "fig938_names_MMM", \ + diag_script_info@fig938_project_MMM) + set_default_att(diag_script_info, "fig938_colors_MMM", "red") + set_default_att(diag_script_info, "refModel", "ERA-Interim") + set_default_att(diag_script_info, "fig938_diff", "True") + colors = project_style(info0, diag_script_info, "colors") + dashes = project_style(info0, diag_script_info, "dashes") + thicks = tofloat(project_style(info0, diag_script_info, "thicks")) + markers = project_style(info0, diag_script_info, "markers") + lin_mar_mod = new((/dimsizes(colors)/), "string") + lin_mar_mod = (/"Lines"/) + + if (isatt(diag_script_info, "refModel")) then + idx_ref = ind(names .eq. diag_script_info@refModel) + if (all(ismissing(idx_ref))) then + error_msg("f", diag_script, "", "refModel is not included in " + \ + "model list (namelist)") + end if + p_ref = "ref_" + else + idx_ref = -1 + p_ref = "" + end if + region_label = tostring(diag_script_info@fig938_region_label) + dim_reg = dimsizes(region_label) + latrange = new((/dimsizes(region_label), 2/), "double") + lonrange = new((/dimsizes(region_label), 2/), "double") + do ir = 0, dimsizes(region_label) - 1 + region = select_region1(region_label(ir)) + latrange(ir, :) = region(0:1) + lonrange(ir, :) = region(2:3) + end do + flag_mod = where(projects.ne."OBS" .and.projects.ne."OBS6"\ + .and. projects.ne."obs4mips", 1, 0) + index_mod = ind(flag_mod.gt.0) + index_obs = ind(flag_mod.eq.0) + dim_MOD = dimsizes(index_mod) + dim_OBS = 0 + + if (.not.all(ismissing(index_obs))) then + dim_OBS = dimsizes(index_obs) + list_obs_mod = names(index_obs) + end if + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + +end +begin + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "Figure9_38_" + var0) + wks_r = get_wks("dummy_for_wks", DIAG_SCRIPT, "Figure9_38_regions_" + var0) + wks_l = get_wks("dummy_for_wks", DIAG_SCRIPT, "Figure9_38_legend_" + var0) + if (diag_script_info@fig938_MMM.eq."True") + wks_MMM = get_wks("dummy_for_wks", DIAG_SCRIPT, "Figure9_38_MMM_" + var0) + end if + ; ****************************************************************** + ; common resources for plots + pan = True + res = True + res@gsnDraw = False + res@gsnFrame = False + res@vpHeightF = 0.6 + res@vpWidthF = 0.8 + ; res@txFontHeightF = 0.018 + months = ispan(0, 12, 1) + + res@tiMainFontHeightF = 0.022 + res@tmXBMode = "Explicit" + res@tmXBValues = ispan(0, 12, 1) + res@tmXBLabels = (/"J", "F", "M", "A", "M", "J", "J", "A", "S", \ + "O", "N", "D", "J"/) + res@tmXBLabelFontHeightF = 0.035 + res@tmYLLabelFontHeightF = 0.035 + res@tiXAxisString = " " + res@tiYAxisString = " " + res@xyLineDashSegLenF = 0.5 +; ------------res1-------------------------------------------------- + + pan1 = True + res1 = True + res1@gsnDraw = False + res1@gsnFrame = False + ; res1@txFontHeightF = 0.02 + res1@tiMainFontHeightF = 0.02 + res1@tmXBMode = "Explicit" + res1@tmXBValues = ispan(0, 12, 1) + res1@tmXBLabels = (/"J", "F", "M", "A", "M", "J", "J", "A", "S", \ + "O", "N", "D", "J"/) + res1@vpHeightF = 0.6 + res1@vpWidthF = 0.8 + res1@tmXBLabelFontHeightF = 0.035 + res1@tmYLLabelFontHeightF = 0.035 + res1@tiXAxisString = " " + res1@tiYAxisString = " " + res1@xyLineDashSegLenF = 0.5 + ; ************************select MMM********************* + dim_MMM = 1 + if (diag_script_info@fig938_MMM.eq."True") + if (dimsizes(diag_script_info@fig938_project_MMM).eq.\ + dimsizes(diag_script_info@fig938_experiment_MMM).eq.\ + dimsizes(diag_script_info@fig938_mip_MMM)) then + dim_MMM = dimsizes(diag_script_info@fig938_project_MMM) + indic_s = new((/dimsizes(diag_script_info@fig938_project_MMM),\ + dimsizes(names)/), "integer") + indic_not_s = new((/dimsizes(diag_script_info@fig938_project_MMM),\ + dimsizes(names)/), "integer") + do ik = 0, dimsizes(diag_script_info@fig938_project_MMM) - 1 + ii_s = ind((projects.eq.diag_script_info@fig938_project_MMM(ik)).and.\ + (exps.eq.diag_script_info@fig938_experiment_MMM(ik)).and.\ + (mips.eq.diag_script_info@fig938_mip_MMM(ik))) + indic_s(ik, :dimsizes(ii_s) - 1) = ii_s + delete(ii_s) + + ii_not_s = \ + ind((projects.ne.diag_script_info@fig938_project_MMM(ik))\ + .or.(exps.ne.diag_script_info@fig938_experiment_MMM(ik))\ + .or.(mips.ne.diag_script_info@fig938_mip_MMM(ik))) + indic_not_s(ik, :dimsizes(ii_not_s)-1) = ii_not_s + delete(ii_not_s) + end do + else + error_msg("f", diag_script, "", "diag_script_info@fig938_project_MMM" + \ + "diag_script_info@fig938_experiment_MMM and" + \ + " diag_script_info@fig938_experiment_MMM" + \ + " must have the same dimension") + end if + end if + if (dimsizes(diag_script_info@fig938_project_MMM).gt.1) then + indic_not_sela = get_unique_difference(indic_s, indic_not_s) + else + ndim = ndtooned(indic_not_s) + ise = ind(.not.ismissing(ndim)) + indic_not_sela = ndim(ise) + delete(ise) + delete(ndim) + end if + plot = new(dim_reg, "graphic") + plot_MMM = new(dim_reg, "graphic") + plot_STD = new((/dim_reg, dim_MMM/), "graphic") + + ; map = new(dimsizes(region_label), "graphic") + map_r = new(1, "graphic") + mres_r = True + mres_r@gsnDraw = False + mres_r@gsnFrame = False + mres_r@lbLabelBarOn = False + mres_r@cnLevelSelectionMode = "ManualLevels" + mres_r@cnMinLevelValF = 0. + mres_r@cnMaxLevelValF = 200.0 + mres_r@cnMaxLevelCount = 42.0 + mres_r@cnLevelSpacingF = max((/5.0, 200.0/tofloat(dim_reg + 2.0)/)) + mres_r@cnInfoLabelOn = False + mres_r@cnLineLabelsOn = False + mres_r@cnFillOn = True + mres_r@cnFillOpacityF = 0.5 + cmap_r = read_colormap_file("GMT_wysiwygcont") + mres_r@cnFillPalette = cmap_r(::-1, :) + mres_r@mpFillOn = True + txres = True + txres@txFontHeightF = 0.015 + txres@txFont = 22 + txres@txJust = "CenterLeft" + + random_setallseed(36484749, 9494848) + unf = random_uniform(0, 1, (/dim_reg + 2/)) + ip = dim_pqsort(unf, 1) + + do ir = 0, dim_reg - 1 + case1 = region_label(ir) + if(idx_ref.ne.-1) then + if (diag_script_info@fig938_MMM.eq."True") + if ((diag_script_info@fig938_diff.eq."True").and.(idx_ref.ne.-1)) then + indic_s = where(indic_s.eq.idx_ref, indic_s@_FillValue, indic_s) + indic_not_sel = where(indic_not_sela.eq.idx_ref,\ + indic_not_sela@_FillValue, indic_not_sela) + if(.not.(any(ismissing(indic_not_sel)))) then + indic_not_sel1 = indic_not_sel(ind(.not.ismissing(indic_not_sel))) + delete(indic_not_sel) + indic_not_sel = indic_not_sel1 + delete(indic_not_sel1) + end if + else + indic_not_sel = indic_not_sela + end if + end if + + if (diag_script_info@fig938_diff.eq."True") then + pan@txString = LONG_NAME + " bias vs. " + names(idx_ref) + else + pan@txString = LONG_NAME + end if + D = new((/dim_MOD1, 13/), "double") + models = new((/dim_MOD1/), "string") + C = new((/dim_MOD1 - 1, 13/), "double") + models1 = new((/dim_MOD1 - 1/), "string") + colors1 = new((/dim_MOD1 - 1/), typeof(colors)) + dashes1 = new((/dim_MOD1 - 1/), typeof(dashes)) + thicks1 = new((/dim_MOD1 - 1/), typeof(thicks)) + markers1 = new((/dim_MOD1 - 1/), typeof(markers)) + lin_mar_mod1 = new((/dim_MOD1 - 1/), "string") + ; (b) difference of each model from 'reference' + ; CASE 1 + A0 = read_data(info0[idx_ref]) + ; +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + ; Get infos for map plot + lsdata = A0(0, :, :) + cell_fill = (/select_region_938(region_label(ir), lsdata)/) + cell_fill!0 = "lat" + cell_fill!1 = "lon" + cell_fill&lat = lsdata&lat + cell_fill&lon = lsdata&lon + irc = (ip(ir + 1)) * 200.0/tofloat(dim_reg + 2) + + if (ir.eq.0) then + cell_fill2 = where(ismissing(cell_fill), -1., irc) + cell_fill2!0 = "lat" + cell_fill2!1 = "lon" + cell_fill2&lat = lsdata&lat + cell_fill2&lon = lsdata&lon + else + cell_fill2 = where(ismissing(cell_fill), cell_fill2, irc) + end if + ; ***************************************************************** + Atmp1 = A0 + Atmp = area_operations(Atmp1, latrange(ir, 0), latrange(ir, 1), \ + lonrange(ir, 0), lonrange(ir, 1), "average",\ + True) + delete(A0) + if (Atmp@units.eq."K") then + A = convert_units(Atmp, "degC") ; get Celsius + UNITS = "~S~o~N~C" + else + if (Atmp@units.eq."kg m-2 s-1") then + A = convert_units(Atmp, "mm/day") + UNITS = "mm/day" + else + A = (/Atmp/) + UNITS = units + end if + end if + ; end if + delete(Atmp) + delete(Atmp1) + + jmod = 0 + do imod = 0, dim_MOD1 - 1 + models(imod) = names(imod) + if (imod .ne. idx_ref) then + B0 = read_data(info0[imod]) + Btmp1 = B0 + Btmp = area_operations(Btmp1, latrange(ir, 0), latrange(ir, 1), \ + lonrange(ir, 0), lonrange(ir, 1), "average",\ + True) + delete(B0) + if (Btmp@units.eq."K") then + B = convert_units(Btmp, "degC") ; get Celsius + UNITS = "~S~o~N~C" + else + if (Btmp@units.eq."kg m-2 s-1") then + B = convert_units(Btmp, "mm/day") + UNITS = "mm/day" + else + B = (/Btmp/) + UNITS = units + end if + end if + delete(Btmp) + delete(Btmp1) + + ; --------------------------------------------------------- + ; difference plot of time(x) vs. latitude(y) + + if (diag_script_info@fig938_diff.eq."True") then + C(jmod, 0:11) = (/A - B/) ; get diff values + C(jmod, 12) = (/A(0) - B(0)/) + D(imod, 0:11) = (/A - B/) + D(imod, 12) = (/A(0) - B(0)/) + else + C(jmod, 0:11) = (/B/) + C(jmod, 12) = (/B(0)/) + D(imod, 0:11) = (/B/) + D(imod, 12) = (/B(0)/) + end if + + delete(B) + colors1(jmod) = colors(imod) + dashes1(jmod) = dashes(imod) + thicks1(jmod) = thicks(imod) + markers1(jmod) = markers(imod) + models1(jmod) = names(imod) + lin_mar_mod1(jmod) = "Lines" + jmod = jmod + 1 + end if + end do ; imod + else + if (diag_script_info@fig938_MMM.eq."True") + if ((diag_script_info@fig938_diff.eq."True").and.(idx_ref.ne.-1)) then + indic_s = where(indic_s.eq.idx_ref, indic_s@_FillValue, indic_s) + indic_not_sel = where(indic_not_sela.eq.idx_ref,\ + indic_not_sela@_FillValue, indic_not_sela) + if(.not.(any(ismissing(indic_not_sel)))) then + indic_not_sel1 = indic_not_sel(ind(.not.ismissing(indic_not_sel))) + delete(indic_not_sel) + indic_not_sel = indic_not_sel1 + delete(indic_not_sel1) + end if + else + indic_not_sel = indic_not_sela + end if + end if + lin_mar_mod1 = lin_mar_mod + C = new((/dim_MOD1, 13/), "double") + do imod = 0, dim_MOD1 - 1 + ; CASE 1 + pan@txString = LONG_NAME + A0 = read_data(info0[imod]) + Atmp1 = A0 + Atmp = area_operations(Atmp1, latrange(ir, 0), latrange(ir, 1), \ + lonrange(ir, 0), lonrange(ir, 1), "average",\ + True) + delete(A0) + if (Atmp@units.eq."K") then + A = convert_units(Atmp, "degC") ; get Celsius + UNITS = "~S~o~N~C" + else + if (Atmp@units.eq."kg m-2 s-1") then + A = convert_units(Atmp, "mm/day") + UNITS = "mm/day" + else + A = (/Atmp/) + UNITS = units + end if + end if + delete(Atmp) + delete(Atmp1) + C(imod, 0:11) = A + C(imod, 12) = A(0) + lin_mar_mod1(imod) = "Lines" + end do + + colors1 = colors + dashes1 = dashes + thicks1 = thicks + markers1 = markers + models1 = names + end if + + ; case contour plots of time(x) vs. latitude(y) + + ; res@tiMainString =variable_info@long_name + if (diag_script_info@fig938_diff.eq."True") then + E = C + else + E = D + E(idx_ref, 0:11) = (/A/) + E(idx_ref, 12) = (/A(0)/) + end if + if (diag_script_info@fig938_diff.eq."True") then + Ehlp = reshape(E, (/13*(dim_MOD1 - 1)/)) + else + Ehlp = reshape(E, (/13*dim_MOD1/)) + end if + if (any(.not.ismissing(Ehlp))) then + min1 = min(Ehlp(ind(.not.ismissing(Ehlp)))) + max1 = max(Ehlp(ind(.not.ismissing(Ehlp)))) + else + min1 = 0.0 + max1 = 1.0 + end if + if (isatt(diag_script_info, "fig938_YMin")) then + res@trYMinF = diag_script_info@Figure_938_YMin + else + res@trYMinF = min1 + end if + if (isatt(diag_script_info, "fig938_YMax")) then + res@trYMaxF = diag_script_info@Figure_938_YMax + else + res@trYMaxF = max1 + end if + if (diag_script_info@fig938_diff.eq."True") then + res@xyLineColors = colors1 ; line colors + res@xyLineThicknesses = thicks1 ; line thicknesses + res@xyDashPatterns = dashes1 ; line patterns + else + res@xyLineColors = colors ; line colors + res@xyLineThicknesses = thicks ; line thicknesses + res@xyDashPatterns = dashes ; line patterns + end if + + res@gsnLeftString = case1 + res@gsnLeftStringFontHeightF = 0.04 + res@gsnRightString = UNITS + res@gsnRightStringFontHeightF = 0.04 + res@tfPolyDrawOrder = "Draw" + + plot(ir) = gsn_csm_xy(wks, months, E, res) + + ; ******************single region output file************************** + + nc_filename = work_dir + "fig938_" + var0 + ".nc" + E@var = var0 + "_" + case1 + E@diag_script = "fig938" + E!0 = "line" + E!1 = "month" + month_n = (/"J", "F", "M", "A", "M", "J", "J", "A", "S", \ + "O", "N", "D", "J"/) + + if (diag_script_info@fig938_diff.eq."True") then + E&line = models1 + else + E&line = models + end if + E&month = month_n + + if (ir .eq. 0) then + E@existing = "overwrite" + else + E@existing = "append" + end if + + nc_outfile = ncdf_write(E, nc_filename) + + ; ******************single region output file************************** + + if (diag_script_info@fig938_MMM.eq."True") + if ((diag_script_info@fig938_diff.ne."True").and.(idx_ref .ne. -1)) then + if(.not.all(ismissing(indic_not_sel))) then + if (any(indic_not_sel.eq.idx_ref)) then + dim_extr = dimsizes(indic_not_sel) + else + dim_extr = dimsizes(indic_not_sel) + 1 + end if + else + dim_extr = 1 + end if + else + if(.not.all(ismissing(indic_not_sel))) then + dim_extr = dimsizes(indic_not_sel) + else + dim_extr = 0 + end if + end if + + MMM = new((/dimsizes(diag_script_info@fig938_project_MMM) +\ + dim_extr, 13/), typeof(C)) + STD = new((/dimsizes(diag_script_info@fig938_project_MMM) + \ + dim_extr, 13/), typeof(C)) + names_MMM = new((/dimsizes(diag_script_info@fig938_project_MMM) + \ + dim_extr/), "string") + colors_MMM = new((/dimsizes(diag_script_info@fig938_project_MMM) + \ + dim_extr/), typeof(colors)) + thicks_MMM = new((/dimsizes(diag_script_info@fig938_project_MMM) + \ + dim_extr/), typeof(thicks)) + dashes_MMM = new((/dimsizes(diag_script_info@fig938_project_MMM) + \ + dim_extr/), typeof(dashes)) + lin_mar_mod_MMM = new((/dimsizes(diag_script_info@fig938_project_MMM) + \ + dim_extr/), typeof(lin_mar_mod1)) + colori_def = (/"red", "blue", "green", "magenta", "yellow"/) + do iin = 0, dimsizes(diag_script_info@fig938_project_MMM) - 1 + aa = indic_s(iin, ind(.not.ismissing(indic_s(iin, :)))) + if (idx_ref .ne. -1) + D!0 = "case" + MMM(iin, :) = dim_avg_n_Wrap(D(aa, :), 0) + STD(iin, :) = dim_stddev_n_Wrap(D(aa, :), 0) + else + C!0 = "case" + MMM(iin, :) = dim_avg_n_Wrap(C(aa, :), 0) + STD(iin, :) = dim_stddev_n_Wrap(C(aa, :), 0) + end if + delete(aa) + if(isatt(diag_script_info, "fig938_colors_MMM")) then + colors_MMM(iin) = diag_script_info@fig938_colors_MMM(iin) + else + colors_MMM(iin) = colori_def(iin) + end if + thicks_MMM(iin) = 3. + dashes_MMM(iin) = 0 + lin_mar_mod_MMM(iin) = "Lines" + if(isatt(diag_script_info, "fig938_names_MMM")) then + names_MMM(iin) = diag_script_info@fig938_names_MMM(iin) + else + names_MMM(iin) = diag_script_info@fig938_project_MMM(iin) + " " + \ + diag_script_info@fig938_experiment_MMM(iin) + \ + " " + diag_script_info@fig938_mip_MMM(iin) + end if + end do + if(.not.all(ismissing(indic_not_sel))) then + if (idx_ref .ne. -1) + MMM(dimsizes(diag_script_info@fig938_project_MMM):, :) = \ + (/E(indic_not_sel(ind(.not.ismissing(indic_not_sel))), :)/) + colors_MMM(dimsizes(diag_script_info@fig938_project_MMM):) = \ + colors(indic_not_sel(ind(.not.ismissing(indic_not_sel)))) + thicks_MMM(dimsizes(diag_script_info@fig938_project_MMM):) = 3. + dashes_MMM(dimsizes(diag_script_info@fig938_project_MMM):) = \ + dashes(indic_not_sel(ind(.not.ismissing(indic_not_sel)))) + lin_mar_mod_MMM(dimsizes(diag_script_info@fig938_project_MMM):) = \ + lin_mar_mod(indic_not_sel(ind(.not.ismissing(indic_not_sel)))) + names_MMM(dimsizes(diag_script_info@fig938_project_MMM):) = \ + models(indic_not_sel(ind(.not.ismissing(indic_not_sel)))) + else + MMM(dimsizes(diag_script_info@fig938_project_MMM):, :) = \ + (/C(indic_not_sel, :)/) + colors_MMM(dimsizes(diag_script_info@fig938_project_MMM):) = \ + colors1(indic_not_sel) + thicks_MMM(dimsizes(diag_script_info@fig938_project_MMM):) = 3. + dashes_MMM(dimsizes(diag_script_info@fig938_project_MMM):) = \ + dashes1(indic_not_sel) + lin_mar_mod_MMM(dimsizes(diag_script_info@fig938_project_MMM):) = \ + lin_mar_mod1(indic_not_sel) + names_MMM(dimsizes(diag_script_info@fig938_project_MMM):) = \ + models1(indic_not_sel) + end if + end if + end if + if(isatt(diag_script_info, "fig938_YMin")) then + res1@trYMinF = diag_script_info@fig938_YMin + else + res1@trYMinF = min1 + end if + + if(isatt(diag_script_info, "fig938_YMax")) then + res1@trYMaxF = diag_script_info@fig938_YMax + else + res1@trYMaxF = max1 + end if + + res1@xyLineColors = colors_MMM ; line colors + res1@xyLineThicknesses = thicks_MMM ; line thicknesses + res1@xyDashPatterns = dashes_MMM ; line patterns + res1@gsnLeftString = case1 + res1@gsnRightString = UNITS + res1@gsnLeftStringFontHeightF = 0.04 + res1@gsnRightStringFontHeightF = 0.04 + res1@tfPolyDrawOrder = "Draw" + plot_MMM(ir) = gsn_csm_xy(wks_MMM, months, MMM, res1) + xp = new((/2 * 13/), typeof(MMM)) + yp = new((/2 * 13, dimsizes(diag_script_info@fig938_project_MMM)/), \ + typeof(MMM)) + do k = 0, 12 + yp(k, :) = MMM(:dimsizes(diag_script_info@fig938_project_MMM) - 1, k) + \ + STD(:dimsizes(diag_script_info@fig938_project_MMM) - 1, k) + xp(k) = months(k) + xp(2 * 13 - 1 - k) = months(k) + yp(2 * 13 - 1 - k, :) = \ + MMM(:dimsizes(diag_script_info@fig938_project_MMM) - 1, k) \ + - STD(:dimsizes(diag_script_info@fig938_project_MMM) - 1, k) + end do + do iin = 0, dimsizes(diag_script_info@fig938_project_MMM) - 1 + gsres = True + gsres@gsFillColor = colors_MMM(iin) + gsres@gsFillOpacityF = 0.3 + plot_STD(ir, iin) = gsn_add_polygon(wks_MMM, plot_MMM(ir), xp,\ + yp(:, iin), gsres) + end do + + ; ******************single region output file************************** + + nc_filename_MMM = work_dir + "fig938_" + var0 + "_MMM.nc" + MMM@var = var0 + "_" + case1 + MMM@diag_script = "fig938" + month_n = (/"J", "F", "M", "A", "M", "J", "J", "A", "S", \ + "O", "N", "D", "J"/) + MMM!0 = "line" + MMM!1 = "month" + MMM&line = names_MMM + MMM&month = month_n + + if (ir .eq. 0) then + MMM@existing = "overwrite" + else + MMM@existing = "append" + end if + + nc_outfile = ncdf_write(MMM, nc_filename_MMM) + + ; ******************single region output file************************** + + nc_filename_STD = work_dir + "fig938_" + var0 + "_STD.nc" + STD@var = var0 + "_" + case1 + STD@diag_script = "fig938" + month_n = (/"J", "F", "M", "A", "M", "J", "J", "A", "S", \ + "O", "N", "D", "J"/) + STD!0 = "line" + STD!1 = "month" + STD&line = names_MMM + STD&month = month_n + + if (ir .eq. 0) then + STD@existing = "overwrite" + else + STD@existing = "append" + end if + + nc_outfile = ncdf_write(STD, nc_filename_STD) + + ; ******************single region output file************************** + + delete(min1) + delete(max1) + delete(C) + delete(MMM) + delete(STD) + delete(xp) + delete(yp) + delete(indic_not_sel) + delete(A) + delete(E) + delete(Ehlp) + end do ; region + + ; Panel plots + pan = True + pan@gsnMaximize = True + pan@gsnFrame = False + pan@gsnPaperOrientation = "portrait" + n_p = dimsizes(plot) / 5 + if (mod(dimsizes(plot), 5) .ne. 0) then + n_p = n_p + 1 + end if + gsn_panel(wks, plot, (/n_p, 5/), pan) + frame(wks) + + ; ***********add legend**************************** + if (diag_script_info@fig938_diff.eq."True") then + add_legenda_page(wks_l, models1, colors1, dashes1, lin_mar_mod1,\ + thicks1, "0.9") + else + add_legenda_page(wks_l, models, colors, dashes, lin_mar_mod,\ + thicks, "0.9") + end if + frame(wks_l) + + if (diag_script_info@fig938_MMM.eq."True") + pan1 = True + pan1@gsnMaximize = True + pan1@gsnFrame = False + pan1@gsnPaperOrientation = "portrait" + n_p = dimsizes(plot_MMM) / 5 + if (mod(dimsizes(plot_MMM), 5) .ne. 0) then + n_p = n_p + 1 + end if + gsn_panel(wks_MMM, plot_MMM, (/n_p, 5/), pan1) + add_legend(wks_MMM, names_MMM, colors_MMM,\ + dashes_MMM, lin_mar_mod_MMM, thicks_MMM, "0.2") + ; end if + end if + frame(wks_MMM) + delete(plot_MMM) + delete(res1) + delete(pan1) + + map_r = gsn_csm_contour_map(wks_r, cell_fill2, mres_r) + + do ir = 0, dim_reg - 1 + txres@txFontColor = "black" + region_array_hlp = select_region1(region_label(ir)) + dum = gsn_add_text(wks_r, map_r, region_label(ir),\ + region_array_hlp(2) + 5.0, region_array_hlp(0) + 15.0,\ + txres) + delete(region_array_hlp) + end do + draw(map_r) + frame(wks_r) + + ; ------------------------------------------------------------- + ; Add provenance + ; ------------------------------------------------------------- + + caption = "Mean seasonal cycle for " + var0 + " over " + \ + "land in different regions" + + log_provenance(nc_filename, \ + wks@fullname, \ + caption, \ + (/"clim"/), \ + (/"reg"/), \ + "seas", \ + (/"cionni_irene"/), \ + (/"flato13ipcc"/), \ + metadata_att_as_array(info0, "filename")) + + if (diag_script_info@fig938_MMM .eq. "True") + + log_provenance(nc_filename_MMM, \ + wks_MMM@fullname, \ + caption, \ + (/"clim"/), \ + (/"reg"/), \ + "seas", \ + (/"cionni_irene"/), \ + (/"flato13ipcc"/), \ + metadata_att_as_array(info0, "filename")) + + end if + + +end diff --git a/esmvaltool/diag_scripts/regional_downscaling/Figure9.39.ncl b/esmvaltool/diag_scripts/regional_downscaling/Figure9.39.ncl new file mode 100644 index 0000000000..96ebeb27d7 --- /dev/null +++ b/esmvaltool/diag_scripts/regional_downscaling/Figure9.39.ncl @@ -0,0 +1,782 @@ +; ############################################################################# +; # SEASONAL OR ANNUAL BIAS # +; ############################################################################# +; # Author: Irene Cionni(ENEA, Italy) # +; # CRESCENDO project +; ############################################################################# +; +; # Description: # +; # * Create seasonal or annual bias box plots at selected regions # +; # - Read regions +; # -create mask map using region poligons +; # - for all the models calculate the bias reference_dataset +; # - sort biases for each selected project&experiment&mip +; # - evaluate percentiles (5th 25th 50th 75th 95th) +; # - plot the first selected project&experiment&mip as box-and-whisker +; # - plot the others selected projects&experiment&mip as markers +; # - compare the root-mean-square error of first project&experiment&mip +; # with the second if exist labeled with red for the regions where +; # the first error is larger than the second. +; # +; # Required variable_info attributes (variable specific): +; # +; # * reference_dataset: reference dataset name +; # +; # Optional diag_script_info attributes: +; # +; # * styleset, default "CMIP5" +; # * fig939_season : seasons i.e. (/"DJF","JJA","ANN"/) +; # * fig939_region_label : +; # (/"ALA","CGI","WNA","CNA","ENA","CAM","AMZ","NEB","WSA","SSA","NEU", +; # "CEU","MED","SAH","WAF","EAF","SAF","NAS","WAS","CAS","TIB","EAS", +; # "SAS","SEA","NAU","SAU"/) +; # these region are defined in Seneviratne et al., 2012: Appendix 3.A +; # Notes and technical details on Chapter 3 figures. +; # In: Managing the Risks of Extreme Events and Disasters to Advance +; # Climate Change Adaptation(IPCC). . +; # * fig939_MMM, default: True +; # * fig939_project_MMM: projects to average +; # * fig939_experiment_MMM: experiments to average +; # * fig939_mip_MMM: mip to average +; # * Dimensions of fig939_project_MMM, fig939_experiment_MMM and +; # fig939_mip_MMM must have the number of elements i.e. +; # fig939_project_MMM=(/"CMIP5", "CMIP3"/) +; # fig939_experiment_MMM=(/"historical", "historical"/) +; # fig939_mip_MMM=(/"Amon", "Amon"/) +; # +; # * Names for legend +; # fig939_names_MMM :names in legend i.e. (/"CMIP5","CMIP3"/ +; # * Vertical lines divide groups of region +; # fig939_vert_line_pos +; # i.e. (/6,10,13,17,24,26/) +; # * labels of vertical lines +; # fig939_vert_line_label +; # e.g. (/"North America","South America","Europe","Africa", +; # "Asia","Australia"/) +; # fig939_mode : True= cumulative mode +; # fig939_YMin : minimum Y Axis +; # fig939_YMax : maximum Y Axis +; # fig939_percentage : default: False +; # Caveats: +; # This script requires NCL version 6.40 +; # +; # Comments: +; # Regions borders are polygons defined in function select_region_srex +; # in regional_function.ncl +; # +; # Modification history: +; # 20230110-weigel_katja: fixed calendar, calendar starting point needs to +; # including time: "months since 1-1-1 00:00:00" +; # and converting month number to time Axis +; # with -1.0 +; # 20230111-weigel_katja: added option for percentage difference +; # 20230110-weigel_katja: fixed calendar, calendar starting point needs to +; # including time: "months since 1-1-1 00:00:00" +; # and converting month number to time Axis +; # with -1.0 +; # 20221026-weigel_katja: header updated +; # 20221026-weigel_katja: fixed calendar, time_operations expects +; # "gregorian" instead of "Gregorian" +; # 20221026-weigel_katja: removed unused variable fig939_colors_MMM +; # 20221026-weigel_katja: removed unused variable refModel +; # (reference_dataset instead) +; # 20220314-weigel_katja: header updated +; ############################################################### +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/regional_downscaling/regional_function.ncl" + + +begin + enter_msg(DIAG_SCRIPT, "") + var0 = variable_info[0]@short_name + UNITS = variable_info[0]@units + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD1 = ListCount(info0) + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + exps = metadata_att_as_array(info0, "exp") + ensembles = metadata_att_as_array(info0, "ensemble") + mips = metadata_att_as_array(info0, "mip") + y1 = metadata_att_as_array(info0, "start_year") + y2 = metadata_att_as_array(info0, "end_year") + long_name = variable_info[0]@long_name + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + do imod = 0, dimsizes(names) - 1 + if (names(imod) .eq. refname) then + idx_ref = imod + end if + end do + else + error_msg("f", DIAG_SCRIPT, "", "no reference dataset " + \ + "(variable_info[0]@reference_dataset) needs to be defined.") + end if + if(isatt(variable_info, "long_name")) then + LONG_NAME = variable_info@long_name + else + LONG_NAME = var0 + end if + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; Set default values for optional diag_script_info attributes + + set_default_att(diag_script_info, "fig939_region_label", "ALAs") + set_default_att(diag_script_info, "styleset", "CMIP5") + set_default_att(diag_script_info, "fig939_MMM", "True") + set_default_att(diag_script_info, "fig939_project_MMM", "CMIP5") + set_default_att(diag_script_info, "fig939_experiment_MMM", "historical") + set_default_att(diag_script_info, "fig939_mip_MMM", "Amon") + ; set_default_att(diag_script_info, "fig939_colors_MMM", "red") + set_default_att(diag_script_info, "fig939_names_MMM", \ + diag_script_info@fig939_project_MMM) + set_default_att(diag_script_info, "fig939_season", "DJF") + set_default_att(diag_script_info, "fig939_percentage", "False") + + colors = project_style(info0, diag_script_info, "colors") + dashes = project_style(info0, diag_script_info, "dashes") + thicks = tofloat(project_style(info0, diag_script_info, "thicks")) + markers = project_style(info0, diag_script_info, "markers") + lin_mar_mod = new((/dimsizes(colors)/), "string") + lin_mar_mod = (/"Lines"/) + season = diag_script_info@fig939_season + mode = new(1, logical) + if(isatt(diag_script_info, "fig939_mode")) then + mode = True + else + mode = False + end if + region_label = tostring(diag_script_info@fig939_region_label) + dim_reg = dimsizes(region_label) + dim_seas = dimsizes(season) + + flag_mod = where(projects.ne."OBS" .and.projects.ne."OBS6"\ + .and. projects.ne."obs4mips", 1, 0) + index_mod = ind(flag_mod.gt.0) + index_obs = ind(flag_mod.eq.0) + dim_MOD = dimsizes(index_mod) + dim_OBS = 0 + + if (.not.all(ismissing(index_obs))) then + dim_OBS = dimsizes(index_obs) + list_obs_mod = names(index_obs) + end if + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + +end +begin + dim_MMM = 1 + + project_MMM = diag_script_info@fig939_project_MMM + exp_MMM = diag_script_info@fig939_experiment_MMM + mip_MMM = diag_script_info@fig939_mip_MMM + if (diag_script_info@fig939_MMM.eq."True") + if (dimsizes(project_MMM).eq.dimsizes(exp_MMM).eq.dimsizes(mip_MMM)) then + dim_MMM = dimsizes(project_MMM) + indic_s = new((/dimsizes(project_MMM),\ + dimsizes(names)/), "integer") + indic_not_s = new((/dimsizes(project_MMM),\ + dimsizes(names)/), "integer") + do ik = 0, dimsizes(diag_script_info@fig939_project_MMM) - 1 + ii_s = ind((projects.eq.project_MMM(ik))\ + .and.(exps.eq.exp_MMM(ik))\ + .and.(mips.eq.mip_MMM(ik))) + indic_s(ik, :dimsizes(ii_s) - 1) = ii_s + delete(ii_s) + ii_not_s = ind((projects.ne.project_MMM(ik)).or.(exps.ne.exp_MMM(ik))\ + .or.(mips.ne.mip_MMM(ik))) + indic_not_s(ik, :dimsizes(ii_not_s) - 1) = ii_not_s + delete(ii_not_s) + end do + else + error_msg("f", diag_script, "", "diag_script_info@fig939_project_MMM" + \ + "diag_script_info@fig939_experiment_MMM and" + \ + " diag_script_info@fig939_experiment_MMM" + \ + " must have the same dimension") + end if + end if + + if (dimsizes(project_MMM).gt.1) then + indic_not_sela = get_unique_difference(indic_s, indic_not_s) + else + ndim = ndtooned(indic_not_s) + ise = ind(.not.ismissing(ndim)) + indic_not_sela = ndim(ise) + delete(ise) + delete(ndim) + end if + MMM = new((/dim_seas, dim_reg, dimsizes(project_MMM) +\ + dimsizes(indic_not_sela), 6/), "double") + names_MMM = new((/dimsizes(project_MMM)/), "string") + MMM_rmse = new((/dim_seas, dim_reg, dimsizes(project_MMM)/), "double") + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "Figure9_39_" + var0) + wks_r = get_wks("dummy_for_wks", DIAG_SCRIPT, "Figure9_39_regions_" + var0) + wks_l = get_wks("dummy_for_wks", DIAG_SCRIPT, "Figure9_39_legend_" + var0) + ; +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + Amask = read_data(info0[idx_ref]) + lsdata = Amask(0, :, :) + lsdata = where(ismissing(lsdata), 1., lsdata) + ; ***************************************************************** + map_r = new(1, "graphic") + plot = new(dim_seas, "graphic") + dum_ref = new(dim_seas, "graphic") + dum1 = new((/dim_seas, dimsizes(project_MMM)/),\ + "graphic") + dum2 = new((/dim_seas, dimsizes(project_MMM)/),\ + "graphic") + dum3 = new((/dim_seas, dimsizes(project_MMM)/),\ + "graphic") + dum4 = new((/dim_seas, dimsizes(project_MMM)/),\ + "graphic") + dum5 = new((/dim_seas, dimsizes(project_MMM)/),\ + "graphic") + dum_lines = new((/dim_seas, 30/), "graphic") + dum_txt = new((/dim_seas, 30/), "graphic") + mres_r = True + mres_r@gsnDraw = False + mres_r@gsnFrame = False + mres_r@lbLabelBarOn = False + mres_r@cnLevelSelectionMode = "ManualLevels" + mres_r@cnMinLevelValF = 0. + mres_r@cnMaxLevelValF = 200.0 + mres_r@cnMaxLevelCount = 42.0 + mres_r@cnLevelSpacingF = max((/5.0, 200.0/tofloat(dim_reg + 2.0)/)) + mres_r@cnInfoLabelOn = False + mres_r@cnLineLabelsOn = False + mres_r@cnFillOn = True + mres_r@cnFillOpacityF = 0.5 + cmap_r = read_colormap_file("GMT_wysiwygcont") + mres_r@cnFillPalette = cmap_r(::-1, :) + mres_r@mpFillOn = True + gsres = True + gsres@gsFillOpacityF = 0.7 + txres = True + txres@txFontHeightF = 0.012 + txres@txFont = 22 + txres@txJust = "CenterLeft" + + random_setallseed(36484749, 9494848) + unf = random_uniform(0, 1, (/dim_reg + 2/)) + ip = dim_pqsort(unf, 1) + + do iseas = 0, dim_seas - 1 + do ir = 0, dim_reg - 1 + case1 = region_label(ir) + cell_fill = (/select_region_srex(region_label(ir), lsdata)/) + cell_fill!0 = "lat" + cell_fill!1 = "lon" + cell_fill&lat = lsdata&lat + cell_fill&lon = lsdata&lon + + if (iseas.eq.0) then + irc = (ip(ir + 1)) * 200.0/tofloat(dim_reg + 2.0) + + if (ir.eq.0) then + cell_fill2 = where(ismissing(cell_fill), -1., irc) + cell_fill2!0 = "lat" + cell_fill2!1 = "lon" + cell_fill2&lat = lsdata&lat + cell_fill2&lon = lsdata&lon + else + cell_fill2 = where(ismissing(cell_fill), cell_fill2, irc) + end if + end if + cell_fill = where(ismissing(cell_fill), -1., 1.) + if (diag_script_info@fig939_MMM.eq."True") + indic_s = where(indic_s.eq.idx_ref, indic_s@_FillValue, indic_s) + indic_not_sel = where(indic_not_sela.eq.idx_ref,\ + indic_not_sela@_FillValue, indic_not_sela) + if(.not.ismissing(all(indic_not_sel))) then + indic_not_sel1 = indic_not_sel(ind(.not.ismissing(indic_not_sel))) + delete(indic_not_sel) + indic_not_sel = indic_not_sel1 + delete(indic_not_sel1) + end if + end if + D = new((/dim_MOD+1/), "double") + C = new((/dim_MOD/), "double") + models1 = new((/dim_MOD/), "string") + colors1 = new((/dim_MOD/), typeof(colors)) + dashes1 = new((/dim_MOD/), typeof(dashes)) + thicks1 = new((/dim_MOD/), typeof(thicks)) + markers1 = new((/dim_MOD/), typeof(markers)) + lin_mar_mod1 = new((/dim_MOD/), "string") + ; (b) difference of each model from 'reference' + ; CASE 1 In this plot only case1 exsist!!!!! + ; case1 = models@name(idx_ref) + " " + models@case_name(idx_ref) + A0 = read_data(info0[idx_ref]) + if (season(iseas).ne."ANN") then + if (mode) then + A0!0 = "time" + time1 = todouble(A0&time - 1.0) + time1!0 = "time" + ; ys=input_file_info[idx_ref]@start_year + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(A0&time) + A0&time = time1 + delete(time1) + Atmp3 = time_operations(A0, -1, -1, "extract", season(iseas), True) + date3 = cd_calendar(Atmp3&time, 0) + year3 = date3(:, 0) + month3 = date3(:, 1) + weights3 = days_in_month(toint(year3), toint(month3)) + Atmp4 = Atmp3 + Atmp4 = (/Atmp3 * conform(Atmp3, weights3, 0)/) + Atmp1 = dim_sum_n_Wrap(Atmp4, 0) + delete(Atmp3) + delete(Atmp4) + delete(date3) + delete(year3) + delete(month3) + delete(weights3) + else + A0!0 = "time" + time1 = todouble(A0&time - 1.0) + time1!0 = "time" + ; ys=input_file_info[idx_ref]@start_yea + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(A0&time) + A0&time = time1 + Atmp1 = time_operations(A0, -1, -1, "average", season(iseas), True) + end if + else + if (mode) then + A0!0 = "time" + time1 = todouble(A0&time - 1.0) + time1!0 = "time" + ; ys=input_file_info[idx_ref]@start_year + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(A0&time) + A0&time = time1 + Atmp3 = A0 + date3 = cd_calendar(Atmp3&time, 0) + year3 = date3(:, 0) + month3 = date3(:, 1) + weights3 = days_in_month(toint(year3), toint(month3)) + Atmp4 = Atmp3 + Atmp4 = (/Atmp3 * conform(Atmp3, weights3, 0)/) + Atmp1 = dim_sum_n_Wrap(Atmp4, 0) + delete(Atmp3) + delete(Atmp4) + delete(date3) + delete(year3) + delete(month3) + delete(weights3) + else + A0!0 = "time" + time1 = todouble(A0&time - 1.0) + time1!0 = "time" + ; ys=input_file_info[idx_ref]@start_year + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(A0&time) + A0&time = time1 + Atmp1 = time_operations(A0, -1, -1, "average", "annualclim", True) + end if + end if + ; ******************mask region********************* + Atmp1 = mask(Atmp1, cell_fill.eq.-1, False) + ; ************************************************** + Atmp = area_operations(Atmp1, -90., 90., 0., 360., "average", True) + delete(A0) + A = Atmp + delete(Atmp) + delete(Atmp1) + jmod = 0 + do imod = 0, dim_MOD - 1 + if (imod .ne. idx_ref) then + B0 = read_data(info0[imod]) + if (season(iseas).ne."ANN") then + if (mode) then + B0!0 = "time" + time1 = todouble(B0&time - 1.0) + time1!0 = "time" + ; ys = input_file_info[idx_ref]@start_year + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(B0&time) + B0&time = time1 + Btmp3 = time_operations(B0, -1, -1, "extract",\ + season(iseas), True) + + date3 = cd_calendar(Btmp3&time, 0) + year3 = date3(:, 0) + month3 = date3(:, 1) + weights3 = days_in_month(toint(year3), toint(month3)) + Btmp4 = Btmp3 + Btmp4 = (/Btmp3 * conform(Btmp3, weights3, 0)/) + Btmp1 = dim_sum_n_Wrap(Btmp4, 0) + delete(Btmp3) + delete(Btmp4) + delete(date3) + delete(year3) + delete(month3) + delete(weights3) + else + B0!0 = "time" + time1 = todouble(B0&time - 1.0) + time1!0 = "time" + ; ys=input_file_info[idx_ref]@start_year + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(B0&time) + B0&time = time1 + Btmp1 = time_operations(B0, -1, -1, "average",\ + season(iseas), True) + end if + else + if (mode) then + Btmp3 = B0 + Btmp3!0 = "time" + time1 = todouble(Btmp3&time - 1.0) + time1!0 = "time" + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(Btmp3&time) + Btmp3&time = time1 + date3 = cd_calendar(Btmp3&time, 0) + year3 = date3(:, 0) + month3 = date3(:, 1) + weights3 = days_in_month(toint(year3), toint(month3)) + Btmp4 = Btmp3 + Btmp4 = (/Btmp3 * conform(Btmp3, weights3, 0)/) + Btmp1 = dim_sum_n_Wrap(Btmp4, 0) + delete(Btmp3) + delete(Btmp4) + delete(date3) + delete(year3) + delete(month3) + delete(weights3) + else + B0!0 = "time" + time1 = todouble(B0&time - 1.0) + time1!0 = "time" + ; ys = input_file_info[idx_ref]@start_year + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(B0&time) + B0&time = time1 + Btmp1 = time_operations(B0, -1, -1, "average", "annualclim",\ + True) + end if + end if + ; ******************mask region********************** + Btmp1 = mask(Btmp1, cell_fill.eq.-1, False) + ; ************************************************** + Btmp = area_operations(Btmp1, -90., 90., 0., 360., "average", True) + delete(B0) + B = Btmp + delete(Btmp) + delete(Btmp1) + ; -------------------------------------------------------- + ; Bias + if (mode) then + print("cumulative mode") + C(jmod) = (/ 100 * (avg(B) - avg(A)) / avg(A)/) + D(imod) = (/100 * (avg(B) - avg(A)) / avg(A)/) + else + C(jmod) = (/avg(B) - avg(A)/) ; get diff values + D(imod) = (/avg(B) - avg(A)/) + end if + + if (UNITS.eq."K") then + UNITS = "~S~o~N~C" + else + if (mode) then + UNITS = "%" + else + UNITS = variable_info[0]@units + end if + end if + + delete(B) + colors1(jmod) = colors(imod) + dashes1(jmod) = dashes(imod) + thicks1(jmod) = thicks(imod) + markers1(jmod) = markers(imod) + models1(jmod) = names(imod) + lin_mar_mod1(jmod) = "Lines" + jmod = jmod + 1 + end if + end do ; imod + delete(A) + if (diag_script_info@fig939_MMM.eq."True") + colori_def = (/"red", "orange", "green", "blue", "purple"/) + scatters_def = (/0, 5, 16, 4, 7, 8, 12/) + do iin = 0, dimsizes(diag_script_info@fig939_project_MMM) - 1 + if (.not.all(ismissing(indic_s(iin, :)))) then + aa = indic_s(iin, ind(.not.ismissing(indic_s(iin, :)))) + end if + F = D(aa) + qsort(F) + dimt = dimsizes(F) + x5 = round(.05 * dimt, 3) - 1 + x25 = round(.25 * dimt, 3) - 1 + x50 = round(.50 * dimt, 3) - 1 + x75 = round(.75 * dimt, 3) - 1 + x95 = round(.95 * dimt, 3) - 1 + x5 = where(x5.lt.0, 0, x5) + x25 = where(x25.lt.0, 0, x25) + x50 = where(x50.lt.0, 0, x50) + x75 = where(x75.lt.0, 0, x75) + x95 = where(x95.lt.0, 0, x95) + MMM_rmse(iseas, ir, iin) = (sum(F ^ 2) / dimt) ^ 0.5 + MMM(iseas, ir, iin, :) = (/F(x5), F(x25), F(x50),\ + F(x75), F(x95), stddev(F)/) + delete(aa) + delete(F) + delete(dimt) + + ; if(isatt(diag_script_info, "fig939_markers_MMM")) then + ; markers_MMM(iin) = diag_script_info@fig939_markers_MMM(iin) + ; else + ; markers_MMM(iin) = scatters_def(iin) + ; end if + + ; markers_MMM(iin) = 0 + + if(isatt(diag_script_info, "fig939_names_MMM")) then + names_MMM(iin) = diag_script_info@fig939_names_MMM(iin) + else + names_MMM(iin) = diag_script_info@fig939_project_MMM(iin) + " " +\ + diag_script_info@fig939_experiment_MMM(iin) +\ + " " + diag_script_info@fig939_mip_MMM(iin) + end if + end do + if (.not.ismissing(all(indic_not_sela))) then + MMM(iseas, ir, dimsizes(diag_script_info@fig939_project_MMM):, 0) =\ + (/D(indic_not_sela)/) + end if + end if + delete(C) + delete(D) + end do + res = True ; plot mods desired + ; add * if first project root mean square error is larger than the second + region_label1 = region_label + colors_reg = new(dimsizes(region_label), "string") + do irg = 0, dimsizes(region_label) - 1 + if (dim_MMM.gt.1) then + if (MMM_rmse(iseas, irg, 0).gt.MMM_rmse(iseas, irg, 1)) then + region_label1(irg) = region_label(irg) + "*" + colors_reg(irg) = "red" + else + region_label1(irg) = region_label(irg) + colors_reg(irg) = "blue" + end if + else + colors_reg(irg) = "blue" + end if + end do + + ind_blue = ind(colors_reg.eq."blue") + ind_red = ind(colors_reg.eq."red") + res@ind_blue = ind_blue + res@ind_red = ind_red + res@tmXBLabels = region_label1 + res@tmXBLabelFontHeightF = 0.017 + res@tiMainString = "bias, " + long_name + \ + " ("+UNITS+"), " + season(iseas) + if(isatt(diag_script_info, "fig939_YMin")) then + res@trYMinF = diag_script_info@fig939_YMin + else + res@trYMinF = min(MMM) - min(MMM) / 5 + end if + + if (isatt(diag_script_info, "fig939_YMax")) then + res@trYMaxF = diag_script_info@fig939_YMax + else + res@trYMaxF = max(MMM) + max(MMM) / 5 + end if + res@tmXBLabelAngleF = 90. + res@vpHeightF = 0.5 + res@vpWidthF = 0.9 + ; res@gsnYRefLine=0.0 + ; res@gsnYRefLineColor="gray11" + ; res@gsnYRefLineDashPattern=1. + plot(iseas) = box_plot_cr(wks, ispan(1, dim_reg, 1),\ + MMM(iseas, :, 0, :), False, res, False) + if (isatt(diag_script_info, "fig939_vert_line_pos")) then + lines_x = diag_script_info@fig939_vert_line_pos + do ilin = 0, dimsizes(lines_x) - 1 + dres = True + dres@gsLineColor = "gray11" + dres@gsLineDashPattern = 2. + dum_lines(iseas, ilin) = gsn_add_polyline(wks, plot(iseas),\ + (/lines_x(ilin) + 0.5,\ + lines_x(ilin) + 0.5/),\ + (/min(MMM) - avg(MMM) / 10,\ + max(MMM) + \ + avg(MMM) / 10/),\ + dres) + end do + if (isatt(diag_script_info, "fig939_vert_line_label")) then + lab_x = diag_script_info@fig939_vert_line_label + + do itxt = 0, dimsizes(lab_x) - 1 + txres = True + txres@txFontHeightF = 0.02 + txres@txAngleF = 90. + txres@txJust = "BottomRight" + dum_txt(iseas, itxt) = gsn_add_text(wks, plot(iseas), lab_x(itxt),\ + lines_x(itxt) + 0.4,\ + min(MMM) - min(MMM) / 4,\ + txres) + end do + end if + end if + rres = True + rres@gsLineColor = "gray11" + rres@gsLineDashPattern = 1. + dum_ref(iseas) = gsn_add_polyline(wks, plot(iseas), (/0., 100./),\ + (/0., 0./), rres) + do iin = 1, dimsizes(diag_script_info@fig939_project_MMM) - 1 + mres = True ; marker mods desired + mres@gsMarkerIndex = scatters_def(iin) ; polymarker style + mres@gsMarkerSizeF = 0.008 * 26 / dim_reg + mres@gsMarkerThicknessF = 2. + mres@gsMarkerColor = "purple" ; polymarker color + dum1(iseas, iin) = gsn_add_polymarker(wks, plot(iseas),\ + ispan(1, dim_reg, 1),\ + MMM(iseas, :, iin, 0), mres) + mres@gsMarkerColor = "blue" ; polymarker color + dum2(iseas, iin) = gsn_add_polymarker(wks, plot(iseas),\ + ispan(1, dim_reg, 1),\ + MMM(iseas, :, iin, 1),\ + mres) + mres@gsMarkerColor = "green" ; polymarker color + dum3(iseas, iin) = gsn_add_polymarker(wks, plot(iseas),\ + ispan(1, dim_reg, 1), \ + MMM(iseas, :, iin, 2), \ + mres) + mres@gsMarkerColor = "orange" ; polymarker color + dum4(iseas, iin) = gsn_add_polymarker(wks, plot(iseas),\ + ispan(1, dim_reg, 1),\ + MMM(iseas, :, iin, 3),\ + mres) + mres@gsMarkerColor = "red" ; polymarker color + dum5(iseas, iin) = gsn_add_polymarker(wks, plot(iseas), \ + ispan(1, dim_reg, 1),\ + MMM(iseas, :, iin, 4), mres) + end do + delete(ind_blue) + delete(ind_red) + delete(res@ind_blue) + delete(res@ind_red) + end do + + ; ******************single region output file************************** + + print(work_dir) + nc_filename_MMM = work_dir + "fig939_" + var0 + "_MMM.nc" + MMM@var = var0 + MMM@diag_script = "fig939" + MMM!0 = "season" + MMM!1 = "region" + MMM!2 = "line" + MMM!3 = "stat" + dsizes_x = dimsizes(MMM) + nline = dsizes_x(2) + MMM&line = new((/nline/), "string") ; (/"None", "None", "None"/) + MMM&season = season + MMM®ion = region_label + MMM&stat = (/"x5", "x25", "x50", "x75", "x95", "stddev"/) + nc_outfile = ncdf_write(MMM, nc_filename_MMM) + + pan = True + pan@gsnMaximize = True + pan@gsnFrame = False + pan@gsnPaperOrientation = "portrait" + n_p = dimsizes(plot) / 3 + if (mod(dimsizes(plot), 3) .ne. 0) then + n_p = n_p + 1 + end if + gsn_panel(wks, plot, (/3, n_p/), pan) + frame(wks) + res1 = True ; plot mods desired + res1@tmXTLabels = names_MMM + res1@tmXTValues = ispan(1, dimsizes(diag_script_info@fig939_project_MMM)\ + + 1, 1) + res1@vpHeightF = 0.5 + res1@vpWidthF = 0.5 + lg_y = new((/1, 6/), "float") + lg_y(0, 0:4) = (/5., 25., 50., 75., 95./) + plot_lg = box_plot_lg(wks_l, 1, lg_y, False, res1, False) + duml1 = new(dimsizes(project_MMM), "graphic") + duml2 = new(dimsizes(project_MMM), "graphic") + duml3 = new(dimsizes(project_MMM), "graphic") + duml4 = new(dimsizes(project_MMM), "graphic") + duml5 = new(dimsizes(project_MMM), "graphic") + do iin = 1, dimsizes(project_MMM) - 1 + mresl = True ; marker mods desired + mresl@gsMarkerIndex = scatters_def(iin) ; polymarker style + ; mresl@gsMarkerSizeF = 60./dim_reg ; polymarker size + mresl@gsMarkerSizeF = 0.025 * 26 / dim_reg + mresl@gsMarkerThicknessF = 3. + mresl@gsMarkerColor = "purple" ; polymarker color + duml1(iin) = gsn_add_polymarker(wks_l, plot_lg, iin + 1, lg_y(0, 0), mresl) + mresl@gsMarkerColor = "blue" ; polymarker color + duml2(iin) = gsn_add_polymarker(wks_l, plot_lg, iin + 1, lg_y(0, 1), mresl) + mresl@gsMarkerColor = "green" ; polymarker color + duml3(iin) = gsn_add_polymarker(wks_l, plot_lg, iin + 1, lg_y(0, 2), mresl) + mresl@gsMarkerColor = "orange" ; polymarker color + duml4(iin) = gsn_add_polymarker(wks_l, plot_lg, iin + 1, lg_y(0, 3), mresl) + mresl@gsMarkerColor = "red" ; polymarker color + duml5(iin) = gsn_add_polymarker(wks_l, plot_lg, iin + 1, lg_y(0, 4), mresl) + end do + draw(plot_lg) + delete(pan) + delete(n_p) + frame(wks_l) + pan = True + pan@gsnMaximize = True + pan@gsnFrame = False + pan@gsnPaperOrientation = "portrait" + map_r = gsn_csm_contour_map(wks_r, cell_fill2, mres_r) + + do ir = 0, dim_reg - 1 + txres@txFontColor = "black" + cregion_array = (/select_region_srex_poly(region_label(ir))/) + dum = gsn_add_text(wks_r, map_r, region_label(ir),\ + cregion_array(0), cregion_array(1), txres) + delete(cregion_array) + end do + draw(map_r) + frame(wks_r) +; info_output("wrote " + output_dir + output_filename, verbosity, 1) +; info_output(">>>>>>>> Leaving " + diag_script, verbosity, 4) + + ; ------------------------------------------------------------- + ; Add provenance + ; ------------------------------------------------------------- + + caption = "Seasonal- and annual mean biases of " + var0 + " over " + \ + "land in different regions" + + log_provenance(nc_filename_MMM, \ + wks@fullname, \ + caption, \ + (/"diff", "mean", "perc"/), \ + (/"reg"/), \ + "box", \ + (/"cionni_irene"/), \ + (/"flato13ipcc", "seneviratne12ipcc"/), \ + metadata_att_as_array(info0, "filename")) + +end diff --git a/esmvaltool/diag_scripts/regional_downscaling/Figure9.40.ncl b/esmvaltool/diag_scripts/regional_downscaling/Figure9.40.ncl new file mode 100644 index 0000000000..744e3d7b66 --- /dev/null +++ b/esmvaltool/diag_scripts/regional_downscaling/Figure9.40.ncl @@ -0,0 +1,830 @@ +; ########### ############################################################### +; # SEASONAL OR ANNUAL BIAS # +; ############################################################################# +; # Author: Irene Cionni(ENEA, Italy) # +; # CRESCENDO project +; ############################################################################# +; +; # Description: +; # * Create seasonal or annual bias box plots at selected regions +; # - Read regions +; # -create mask map using region poligons +; # - for all the models calculate the bias respect reference_dataset +; # - sort biases for each selected project&experiment&mip +; # - evaluate percentiles (5th 25th 50th 75th 95th) +; # - plot the first selected project&experiment&mip as box-and-whisker +; # - plot the others selected projects&experiment&mip as markers +; # - compare the root-mean-square error of first project&experiment&mip +; # with the second if exist labeled with red for the regions where +; # the first error is larger than the second. +; # +; # Required variable_info attributes (variable specific): +; # +; # * reference_dataset: reference dataset name +; # +; # Optional diag_script_info attributes +; # +; # *styleset, default "CMIP5" +; # * fig940_season : seasons i.e. (/"DJF","JJA","ANN"/), default "DJF" +; # * fig940_region_label :(/(/"Arctic_land","Arctic_sea",\ +; # "Antarctic_land",\ +; # "Antarctic_sea","Caribbean",\ +; # "WesternIndianOcean",\ +; # "NorthernIndianOcean","NorthernTropicalPacific",\ +; # "EquatorialTropicalPacific",\ +; # "SouthernTropicalPacific",\ +; # "World_land","World_sea","World"/) +; # these region are defined in Seneviratne et al., 2012: Appendix 3.A +; # Notes and technical details on Chapter 3 figures. +; # In: Managing the Risks of Extreme Events and Disasters to Advance +; # Climate Change Adaptation(IPCC). . +; # default "Arctic_land" +; # * fig940_MMM, default True +; # * fig940_project_MMM: projects to average, deafault "CMIP5" +; # * fig940_experiment_MMM: experiments to average, default "historical" +; # * fig940_mip_MMM: mip to average +; # * Dimensions of fig940_project_MMM, fig940_experiment_MMM and +; # fig940_mip_MMM must be the same i.e. +; # fig940_project_MMM=(/"CMIP5", "CMIP3"/) +; # fig940_experiment_MMM=(/"historical", "historical"/) +; # fig940_mip_MMM=(/"Amon", "Amon"/), default "Amon" +; # * fig940_names_MMM:names in legend i.e. (/"CMIP5","CMIP3"/) +; # default fig940_project_MMM +; # * Vertical lines divide groups of region +; # fig940_vert_line_pos +; # i.e. (/6,10,13,17,24,26/) +; # * labels of vertical lines +; # fig940_vert_line_label +; # e.g.(/"North America", "South America", "Europe", "Africa", +; # "Asia", "Australia"/) +; # * fig940_mode : True= cumulative mode +; # * fig940_YMin : minimum Y Axis +; # * fig940_YMax : maximum Y Axis +; # +; # Caveats: +; # This script requires NCL version 6.40 +; # +; # Comments: +; # Regions borders are polygons defined in function select_region_srex +; # in regional_function.ncl +; # +; # Modification history: +; # 20230110-weigel_katja: fixed calendar, calendar starting point needs to +; # including time: "months since 1-1-1 00:00:00" +; # and converting month number to time Axis +; # with -1.0 +; # 20221026-weigel_katja: header updated +; # 20221026-weigel_katja: fixed calendar, time_operations expects +; # "gregorian" instead of "Gregorian" +; # 20221026-weigel_katja: removed unused variable fig940_colors_MMM +; # 20221026-weigel_katja: removed unused variable refModel +; # (reference_dataset instead) +; # 20221024-weigel_katja: Figure9_40_regions fixed +; # 20220314-weigel_katja: header updated +; # +; # # +; ############################################################### +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/regional_downscaling/regional_function.ncl" + + +begin + enter_msg(DIAG_SCRIPT, "") + var0 = variable_info[0]@short_name + UNITS = variable_info[0]@units + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD1 = ListCount(info0) + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + exps = metadata_att_as_array(info0, "exp") + ensembles = metadata_att_as_array(info0, "ensemble") + mips = metadata_att_as_array(info0, "mip") + y1 = metadata_att_as_array(info0, "start_year") + y2 = metadata_att_as_array(info0, "end_year") + long_name = variable_info[0]@long_name + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + do imod = 0, dimsizes(names) - 1 + if (names(imod) .eq. refname) then + idx_ref = imod + end if + end do + else + error_msg("f", DIAG_SCRIPT, "", "no reference dataset " + \ + "(variable_info[0]@reference_dataset) needs to be defined.") + end if + if(isatt(variable_info, "long_name")) then + LONG_NAME = variable_info@long_name + else + LONG_NAME = var0 + end if + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; Set default values for optional diag_script_info attributes + + set_default_att(diag_script_info, "fig940_region_label", "Arctic_land") + set_default_att(diag_script_info, "styleset", "CMIP5") + set_default_att(diag_script_info, "fig940_MMM", "True") + set_default_att(diag_script_info, "fig940_project_MMM", "CMIP5") + set_default_att(diag_script_info, "fig940_experiment_MMM", "historical") + set_default_att(diag_script_info, "fig940_mip_MMM", "Amon") + set_default_att(diag_script_info, "fig940_names_MMM", \ + diag_script_info@fig940_project_MMM) + set_default_att(diag_script_info, "fig940_season", "DJF") + + colors = project_style(info0, diag_script_info, "colors") + dashes = project_style(info0, diag_script_info, "dashes") + thicks = tofloat(project_style(info0, diag_script_info, "thicks")) + markers = project_style(info0, diag_script_info, "markers") + lin_mar_mod = new((/dimsizes(colors)/), "string") + lin_mar_mod = (/"Lines"/) + season = diag_script_info@fig940_season + mode = new(1, logical) + if(isatt(diag_script_info, "fig940_mode")) then + mode = True + else + mode = False + end if + region_label = tostring(diag_script_info@fig940_region_label) + dim_reg = dimsizes(region_label) + dim_seas = dimsizes(season) + flag_mod = where(projects.ne."OBS" .and.projects.ne."OBS6"\ + .and. projects.ne."obs4mips", 1, 0) + index_mod = ind(flag_mod.gt.0) + index_obs = ind(flag_mod.eq.0) + dim_MOD = dimsizes(index_mod) + dim_OBS = 0 + + if (.not.all(ismissing(index_obs))) then + dim_OBS = dimsizes(index_obs) + list_obs_mod = names(index_obs) + end if + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + +end +begin + dim_MMM = 1 + + project_MMM = diag_script_info@fig940_project_MMM + exp_MMM = diag_script_info@fig940_experiment_MMM + mip_MMM = diag_script_info@fig940_mip_MMM + if (diag_script_info@fig940_MMM.eq."True") + if (dimsizes(project_MMM).eq.dimsizes(exp_MMM).eq.dimsizes(mip_MMM)) then + dim_MMM = dimsizes(project_MMM) + indic_s = new((/dimsizes(project_MMM),\ + dimsizes(names)/), "integer") + indic_not_s = new((/dimsizes(project_MMM),\ + dimsizes(names)/), "integer") + do ik = 0, dimsizes(diag_script_info@fig940_project_MMM) - 1 + ii_s = ind((projects.eq.project_MMM(ik))\ + .and.(exps.eq.exp_MMM(ik))\ + .and.(mips.eq.mip_MMM(ik))) + indic_s(ik, :dimsizes(ii_s) - 1) = ii_s + delete(ii_s) + ii_not_s = ind((projects.ne.project_MMM(ik)).or.(exps.ne.exp_MMM(ik))\ + .or.(mips.ne.mip_MMM(ik))) + indic_not_s(ik, :dimsizes(ii_not_s) - 1) = ii_not_s + delete(ii_not_s) + end do + else + error_msg("f", diag_script, "", "diag_script_info@fig940_project_MMM" + \ + "diag_script_info@fig940_experiment_MMM and" + \ + " diag_script_info@fig940_experiment_MMM" + \ + " must have the same dimension") + end if + end if + + if (dimsizes(project_MMM).gt.1) then + indic_not_sela = get_unique_difference(indic_s, indic_not_s) + else + ndim = ndtooned(indic_not_s) + ise = ind(.not.ismissing(ndim)) + indic_not_sela = ndim(ise) + delete(ise) + delete(ndim) + end if + MMM = new((/dim_seas, dim_reg, dimsizes(project_MMM) +\ + dimsizes(indic_not_sela), 6/), "double") + names_MMM = new((/dimsizes(project_MMM)/), "string") + MMM_rmse = new((/dim_seas, dim_reg, dimsizes(project_MMM)/), "double") + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "Figure9_40_" + var0) + wks_r = get_wks("dummy_for_wks", DIAG_SCRIPT, "Figure9_40_regions_" + var0) + wks_l = get_wks("dummy_for_wks", DIAG_SCRIPT, "Figure9_40_legend_" + var0) + + ; +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + Amask = read_data(info0[idx_ref]) + lsdata = Amask(0, :, :) + a = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc", "r") + lsdata1 = a->LSMASK + lsm = landsea_mask(lsdata1, lsdata&lat, lsdata&lon) + ; ***************************************************************** + + map_r = new(1, "graphic") + plot = new(dim_seas, "graphic") + dum_ref = new(dim_seas, "graphic") + dum1 = new((/dim_seas, dimsizes(project_MMM)/),\ + "graphic") + dum2 = new((/dim_seas, dimsizes(project_MMM)/),\ + "graphic") + dum3 = new((/dim_seas, dimsizes(project_MMM)/),\ + "graphic") + dum4 = new((/dim_seas, dimsizes(project_MMM)/),\ + "graphic") + dum5 = new((/dim_seas, dimsizes(project_MMM)/),\ + "graphic") + dum_lines = new((/dim_seas, 30/), "graphic") + dum_txt = new((/dim_seas, 30/), "graphic") + mres_r = True + mres_r@gsnDraw = False + mres_r@gsnFrame = False + mres_r@lbLabelBarOn = False + mres_r@cnLevelSelectionMode = "ManualLevels" + mres_r@cnMinLevelValF = 0. + mres_r@cnMaxLevelValF = 200.0 + ; mres_r@cnLevelSpacingF = 0.1 * 200.0/tofloat(dim_reg) + mres_r@cnLevelSpacingF = max((/5.0, 200.0/tofloat(dim_reg + 2.0)/)) + mres_r@cnInfoLabelOn = False + mres_r@cnLineLabelsOn = False + mres_r@cnFillOn = True + mres_r@cnFillOpacityF = 0.5 + cmap_r = read_colormap_file("GMT_wysiwygcont") + mres_r@cnFillPalette = cmap_r(::-1, :) + mres_r@mpFillOn = True + gsres = True + gsres@gsFillOpacityF = 0.7 + txres = True + txres@txFontHeightF = 0.012 + txres@txFont = 22 + txres@txJust = "CenterLeft" + + random_setallseed(36484749, 9494848) + unf = random_uniform(0, 1, (/dim_reg + 2/)) + ip = dim_pqsort(unf, 1) + + do iseas = 0, dim_seas - 1 + do ir = 0, dim_reg - 1 + + case1 = region_label(ir) + region_lab = str_get_field(case1, 1, "_") + region_ch = str_get_field(case1, 2, "_") + cell_fill = (/select_region_srex(region_lab, lsdata)/) + cell_fill!0 = "lat" + cell_fill!1 = "lon" + cell_fill&lat = lsdata&lat + cell_fill&lon = lsdata&lon + if(region_ch.eq."land") then + cell_fill = mask(cell_fill, lsm.eq.0, False) ; 0 = Ocean + cell_fill = mask(cell_fill, lsm.eq.2, False) ; 2 = Lake + ; cell_fill = mask(cell_fill,lsdata.eq.4,False) ; 4 = Ice Shelf + end if + if(region_ch.eq."sea") then + cell_fill = mask(cell_fill, lsm.eq.1, False) ; 1 = Land + cell_fill = mask(cell_fill, lsm.eq.3, False) ; 3 = Small Island + end if + if (iseas.eq.0) then + irc = (ip(ir + 1)) * 200.0/tofloat(dim_reg) + if (ir.eq.0) then + cell_fill2 = where(ismissing(cell_fill), -1., irc) + cell_fill2!0 = "lat" + cell_fill2!1 = "lon" + cell_fill2&lat = lsdata&lat + cell_fill2&lon = lsdata&lon + else + plot_on = True + if (region_label(ir).eq."World") then + plot_on = False + end if + if (region_label(ir).eq."World_land") then + plot_on = False + end if + if (region_label(ir).eq."World_sea") then + plot_on = False + end if + if (plot_on) then + cell_fill2 = where(ismissing(cell_fill), cell_fill2, irc) + end if + end if + end if + cell_fill = where(ismissing(cell_fill), -1., 1.) + mres_r@tiMainString = region_label(ir) + if (iseas.eq.0) then + mres_r@tiMainString = region_label(ir) + ; map(ir) = gsn_csm_contour_map(wks_r, cell_fill, mres) + end if + if (diag_script_info@fig940_MMM.eq."True") + indic_s = where(indic_s.eq.idx_ref, indic_s@_FillValue, indic_s) + indic_not_sel = where(indic_not_sela.eq.idx_ref,\ + indic_not_sela@_FillValue, indic_not_sela) + if(.not.ismissing(all(indic_not_sel))) then + indic_not_sel1 = indic_not_sel(ind(.not.ismissing(indic_not_sel))) + delete(indic_not_sel) + indic_not_sel = indic_not_sel1 + delete(indic_not_sel1) + end if + end if + D = new((/dim_MOD+1/), "double") + C = new((/dim_MOD/), "double") + models1 = new((/dim_MOD/), "string") + colors1 = new((/dim_MOD/), typeof(colors)) + dashes1 = new((/dim_MOD/), typeof(dashes)) + thicks1 = new((/dim_MOD/), typeof(thicks)) + markers1 = new((/dim_MOD/), typeof(markers)) + lin_mar_mod1 = new((/dim_MOD/), "string") + + A0 = read_data(info0[idx_ref]) + if (season(iseas).ne."ANN") then + if (mode) then + A0!0 = "time" + time1 = todouble(A0&time - 1.0) + time1!0 = "time" + ; ys=input_file_info[idx_ref]@start_year + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(A0&time) + A0&time = time1 + delete(time1) + Atmp3 = time_operations(A0, -1, -1, "extract", season(iseas), True) + date3 = cd_calendar(Atmp3&time, 0) + year3 = date3(:, 0) + month3 = date3(:, 1) + weights3 = days_in_month(toint(year3), toint(month3)) + Atmp4 = Atmp3 + Atmp4 = (/Atmp3 * conform(Atmp3, weights3, 0)/) + Atmp1 = dim_sum_n_Wrap(Atmp4, 0) + delete(Atmp3) + delete(Atmp4) + delete(date3) + delete(year3) + delete(month3) + delete(weights3) + else + A0!0 = "time" + time1 = todouble(A0&time - 1.0) + time1!0 = "time" + ; ys=input_file_info[idx_ref]@start_yea + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(A0&time) + A0&time = time1 + Atmp1 = time_operations(A0, -1, -1, "average", season(iseas), True) + end if + else + if (mode) then + A0!0 = "time" + time1 = todouble(A0&time - 1.0) + time1!0 = "time" + ; ys=input_file_info[idx_ref]@start_year + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(A0&time) + A0&time = time1 + Atmp3 = A0 + date3 = cd_calendar(Atmp3&time, 0) + year3 = date3(:, 0) + month3 = date3(:, 1) + weights3 = days_in_month(toint(year3), toint(month3)) + Atmp4 = Atmp3 + Atmp4 = (/Atmp3 * conform(Atmp3, weights3, 0)/) + Atmp1 = dim_sum_n_Wrap(Atmp4, 0) + delete(Atmp3) + delete(Atmp4) + delete(date3) + delete(year3) + delete(month3) + delete(weights3) + else + A0!0 = "time" + time1 = todouble(A0&time - 1.0) + time1!0 = "time" + ; ys=input_file_info[idx_ref]@start_year + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(A0&time) + A0&time = time1 + Atmp1 = time_operations(A0, -1, -1, "average", "annualclim", True) + end if + end if + ; ******************mask region********************* + Atmp1 = mask(Atmp1, cell_fill.eq.-1, False) + ; ************************************************** + Atmp = area_operations(Atmp1, -90., 90., 0., 360., "average", True) + delete(A0) + A = Atmp + delete(Atmp) + delete(Atmp1) + jmod = 0 + do imod = 0, dim_MOD - 1 + if (imod .ne. idx_ref) then + B0 = read_data(info0[imod]) + if (season(iseas).ne."ANN") then + if (mode) then + B0!0 = "time" + time1 = todouble(B0&time - 1.0) + time1!0 = "time" + ; ys = input_file_info[idx_ref]@start_year + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(B0&time) + B0&time = time1 + Btmp3 = time_operations(B0, -1, -1, "extract",\ + season(iseas), True) + date3 = cd_calendar(Btmp3&time, 0) + year3 = date3(:, 0) + month3 = date3(:, 1) + weights3 = days_in_month(toint(year3), toint(month3)) + Btmp4 = Btmp3 + Btmp4 = (/Btmp3 * conform(Btmp3, weights3, 0)/) + Btmp1 = dim_sum_n_Wrap(Btmp4, 0) + delete(Btmp3) + delete(Btmp4) + delete(date3) + delete(year3) + delete(month3) + delete(weights3) + else + B0!0 = "time" + time1 = todouble(B0&time - 1.0) + time1!0 = "time" + ; ys=input_file_info[idx_ref]@start_year + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(B0&time) + B0&time = time1 + Btmp1 = time_operations(B0, -1, -1, "average",\ + season(iseas), True) + end if + else + if (mode) then + Btmp3 = B0 + Btmp3!0 = "time" + time1 = todouble(Btmp3&time - 1.0) + time1!0 = "time" + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(Btmp3&time) + Btmp3&time = time1 + date3 = cd_calendar(Btmp3&time, 0) + year3 = date3(:, 0) + month3 = date3(:, 1) + weights3 = days_in_month(toint(year3), toint(month3)) + Btmp4 = Btmp3 + Btmp4 = (/Btmp3 * conform(Btmp3, weights3, 0)/) + Btmp1 = dim_sum_n_Wrap(Btmp4, 0) + delete(Btmp3) + delete(Btmp4) + delete(date3) + delete(year3) + delete(month3) + delete(weights3) + else + B0!0 = "time" + time1 = todouble(B0&time - 1.0) + time1!0 = "time" + ; ys = input_file_info[idx_ref]@start_year + time1@units = "months since 1-1-1 00:00:00" + time1@calendar = "gregorian" + delete(B0&time) + B0&time = time1 + Btmp1 = time_operations(B0, -1, -1, "average", "annualclim",\ + True) + end if + end if + ; ******************mask region********************** + Btmp1 = mask(Btmp1, cell_fill.eq.-1, False) + ; ************************************************** + Btmp = area_operations(Btmp1, -90., 90., 0., 360., "average", True) + delete(B0) + B = Btmp + delete(Btmp) + delete(Btmp1) + ; -------------------------------------------------------- + ; Bias + if (mode) then + print("cumulative mode") + C(jmod) = (/ 100 * (avg(B) - avg(A)) / avg(A)/) + D(imod) = (/100 * (avg(B) - avg(A)) / avg(A)/) + UNITS = "%" + else + C(jmod) = (/avg(B) - avg(A)/) ; get diff values + D(imod) = (/avg(B) - avg(A)/) + end if + + if (UNITS.eq."K") then + UNITS = "~S~o~N~C" + else + if (mode) then + UNITS = "%" + else + UNITS = variable_info[0]@units + end if + end if + + delete(B) + colors1(jmod) = colors(imod) + dashes1(jmod) = dashes(imod) + thicks1(jmod) = thicks(imod) + markers1(jmod) = markers(imod) + models1(jmod) = names(imod) + lin_mar_mod1(jmod) = "Lines" + jmod = jmod + 1 + end if + end do ; imod + delete(A) + if (diag_script_info@fig940_MMM.eq."True") + colori_def = (/"red", "orange", "green", "blue", "purple"/) + scatters_def = (/0, 5, 16, 4, 7, 8, 12/) + do iin = 0, dimsizes(diag_script_info@fig940_project_MMM) - 1 + if (.not.all(ismissing(indic_s(iin, :)))) then + aa = indic_s(iin, ind(.not.ismissing(indic_s(iin, :)))) + end if + F = D(aa) + qsort(F) + dimt = dimsizes(F) + x5 = round(.05 * dimt, 3) - 1 + x25 = round(.25 * dimt, 3) - 1 + x50 = round(.50 * dimt, 3) - 1 + x75 = round(.75 * dimt, 3) - 1 + x95 = round(.95 * dimt, 3) - 1 + x5 = where(x5.lt.0, 0, x5) + x25 = where(x25.lt.0, 0, x25) + x50 = where(x50.lt.0, 0, x50) + x75 = where(x75.lt.0, 0, x75) + x95 = where(x95.lt.0, 0, x95) + MMM_rmse(iseas, ir, iin) = (sum(F ^ 2) / dimt) ^ 0.5 + MMM(iseas, ir, iin, :) = (/F(x5), F(x25), F(x50),\ + F(x75), F(x95), stddev(F)/) + delete(aa) + delete(F) + delete(dimt) + + if(isatt(diag_script_info, "fig940_names_MMM")) then + names_MMM(iin) = diag_script_info@fig940_names_MMM(iin) + else + names_MMM(iin) = diag_script_info@fig940_project_MMM(iin) + " " +\ + diag_script_info@fig940_experiment_MMM(iin) +\ + " " + diag_script_info@fig940_mip_MMM(iin) + end if + end do + if (.not.ismissing(all(indic_not_sela))) then + MMM(iseas, ir, dimsizes(diag_script_info@fig940_project_MMM):, 0) =\ + (/D(indic_not_sela)/) + end if + end if + delete(C) + delete(D) + end do + res = True ; plot mods desired + ; add * if first project root mean square error is larger than the second + region_label1 = region_label + colors_reg = new(dimsizes(region_label), "string") + do irg = 0, dimsizes(region_label) - 1 + if (dim_MMM.gt.1) then + if (MMM_rmse(iseas, irg, 0).gt.MMM_rmse(iseas, irg, 1)) then + region_label1(irg) = region_label(irg) + "*" + colors_reg(irg) = "red" + else + region_label1(irg) = region_label(irg) + colors_reg(irg) = "blue" + end if + else + colors_reg(irg) = "blue" + end if + end do + + ind_blue = ind(colors_reg.eq."blue") + ind_red = ind(colors_reg.eq."red") + res@ind_blue = ind_blue + res@ind_red = ind_red + res@tmXBLabels = region_label1 + res@tmXBLabelFontHeightF = 0.017 + res@tiMainString = "bias, " + long_name + \ + " ("+UNITS+"), " + season(iseas) + if(isatt(diag_script_info, "fig940_YMin")) then + res@trYMinF = diag_script_info@fig940_YMin + else + res@trYMinF = min(MMM) - min(MMM) / 5 + end if + + if (isatt(diag_script_info, "fig940_YMax")) then + res@trYMaxF = diag_script_info@fig940_YMax + else + res@trYMaxF = max(MMM) + max(MMM) / 5 + end if + res@tmXBLabelAngleF = 90. + res@vpHeightF = 0.5 + res@vpWidthF = 0.9 + plot(iseas) = box_plot_cr(wks, ispan(1, dim_reg, 1),\ + MMM(iseas, :, 0, :), False, res, False) + if (isatt(diag_script_info, "fig940_vert_line_pos")) then + lines_x = diag_script_info@fig940_vert_line_pos + do ilin = 0, dimsizes(lines_x) - 1 + dres = True + dres@gsLineColor = "gray11" + dres@gsLineDashPattern = 2. + dum_lines(iseas, ilin) = gsn_add_polyline(wks, plot(iseas),\ + (/lines_x(ilin) + 0.5,\ + lines_x(ilin) + 0.5/),\ + (/min(MMM) - avg(MMM) / 10,\ + max(MMM) + \ + avg(MMM) / 10/),\ + dres) + end do + if (isatt(diag_script_info, "fig940_vert_line_label")) then + lab_x = diag_script_info@fig940_vert_line_label + + do itxt = 0, dimsizes(lab_x) - 1 + txres = True + txres@txFontHeightF = 0.02 + txres@txAngleF = 90. + txres@txJust = "BottomRight" + dum_txt(iseas, itxt) = gsn_add_text(wks, plot(iseas), lab_x(itxt),\ + lines_x(itxt) + 0.4,\ + min(MMM) - min(MMM) / 4,\ + txres) + end do + end if + end if + rres = True + rres@gsLineColor = "gray11" + rres@gsLineDashPattern = 1. + dum_ref(iseas) = gsn_add_polyline(wks, plot(iseas), (/0., 100./),\ + (/0., 0./), rres) + do iin = 1, dimsizes(diag_script_info@fig940_project_MMM) - 1 + mres = True ; marker mods desired + mres@gsMarkerIndex = scatters_def(iin) ; polymarker style + mres@gsMarkerSizeF = 0.008 * 26 / dim_reg + mres@gsMarkerThicknessF = 2. + mres@gsMarkerColor = "purple" ; polymarker color + dum1(iseas, iin) = gsn_add_polymarker(wks, plot(iseas),\ + ispan(1, dim_reg, 1),\ + MMM(iseas, :, iin, 0), mres) + mres@gsMarkerColor = "blue" ; polymarker color + dum2(iseas, iin) = gsn_add_polymarker(wks, plot(iseas),\ + ispan(1, dim_reg, 1),\ + MMM(iseas, :, iin, 1),\ + mres) + mres@gsMarkerColor = "green" ; polymarker color + dum3(iseas, iin) = gsn_add_polymarker(wks, plot(iseas),\ + ispan(1, dim_reg, 1), \ + MMM(iseas, :, iin, 2), \ + mres) + mres@gsMarkerColor = "orange" ; polymarker color + dum4(iseas, iin) = gsn_add_polymarker(wks, plot(iseas),\ + ispan(1, dim_reg, 1),\ + MMM(iseas, :, iin, 3),\ + mres) + mres@gsMarkerColor = "red" ; polymarker color + dum5(iseas, iin) = gsn_add_polymarker(wks, plot(iseas), \ + ispan(1, dim_reg, 1),\ + MMM(iseas, :, iin, 4), mres) + end do + delete(ind_blue) + delete(ind_red) + delete(res@ind_blue) + delete(res@ind_red) + end do + + ; ******************single region output file************************** + + nc_filename_MMM = work_dir + "fig940_" + var0 + "_MMM.nc" + MMM@var = var0 + MMM@diag_script = "fig940" + MMM!0 = "season" + MMM!1 = "region" + MMM!2 = "line" + MMM!3 = "stat" + dsizes_x = dimsizes(MMM) + nline = dsizes_x(2) + MMM&line = new((/nline/), "string") ; (/"None", "None", "None"/) + MMM&season = season + MMM®ion = region_label + MMM&stat = (/"x5", "x25", "x50", "x75", "x95", "stddev"/) + nc_outfile = ncdf_write(MMM, nc_filename_MMM) + + pan = True + pan@gsnMaximize = True + pan@gsnFrame = False + pan@gsnPaperOrientation = "portrait" + n_p = dimsizes(plot) / 3 + if (mod(dimsizes(plot), 3) .ne. 0) then + n_p = n_p + 1 + end if + gsn_panel(wks, plot, (/3, n_p/), pan) + frame(wks) + res1 = True ; plot mods desired + res1@tmXTLabels = names_MMM + res1@tmXTValues = ispan(1, dimsizes(diag_script_info@fig940_project_MMM)\ + + 1, 1) + res1@vpHeightF = 0.5 + res1@vpWidthF = 0.5 + lg_y = new((/1, 6/), "float") + lg_y(0, 0:4) = (/5., 25., 50., 75., 95./) + plot_lg = box_plot_lg(wks_l, 1, lg_y, False, res1, False) + duml1 = new(dimsizes(project_MMM), "graphic") + duml2 = new(dimsizes(project_MMM), "graphic") + duml3 = new(dimsizes(project_MMM), "graphic") + duml4 = new(dimsizes(project_MMM), "graphic") + duml5 = new(dimsizes(project_MMM), "graphic") + do iin = 1, dimsizes(project_MMM) - 1 + mresl = True ; marker mods desired + mresl@gsMarkerIndex = scatters_def(iin) ; polymarker style + ; mresl@gsMarkerSizeF = 60./dim_reg ; polymarker size + mresl@gsMarkerSizeF = 0.025 * 26 / dim_reg + mresl@gsMarkerThicknessF = 3. + mresl@gsMarkerColor = "purple" ; polymarker color + duml1(iin) = gsn_add_polymarker(wks_l, plot_lg, iin + 1, lg_y(0, 0), mresl) + mresl@gsMarkerColor = "blue" ; polymarker color + duml2(iin) = gsn_add_polymarker(wks_l, plot_lg, iin + 1, lg_y(0, 1), mresl) + mresl@gsMarkerColor = "green" ; polymarker color + duml3(iin) = gsn_add_polymarker(wks_l, plot_lg, iin + 1, lg_y(0, 2), mresl) + mresl@gsMarkerColor = "orange" ; polymarker color + duml4(iin) = gsn_add_polymarker(wks_l, plot_lg, iin + 1, lg_y(0, 3), mresl) + mresl@gsMarkerColor = "red" ; polymarker color + duml5(iin) = gsn_add_polymarker(wks_l, plot_lg, iin + 1, lg_y(0, 4), mresl) + end do + draw(plot_lg) + delete(pan) + delete(n_p) + frame(wks_l) + pan = True + pan@gsnMaximize = True + pan@gsnFrame = False + pan@gsnPaperOrientation = "portrait" + map_r = gsn_csm_contour_map(wks_r, cell_fill2, mres_r) + + do ir = 0, dim_reg - 1 + txres@txFontColor = "black" + region = region_label(ir) + add_r = 0.0 + add_y = 0.0 + plot_on = True + if (region_label(ir).eq."Arctic_land") then + region = "Arctic" + add_r = 5.0 + end if + if (region_label(ir).eq."Arctic_sea") then + region = "Arctic" + add_r = 60.0 + end if + if (region_label(ir).eq."Antarctic_land") then + region = "Antarctic" + add_r = 110.0 + end if + if (region_label(ir).eq."Antarctic_sea") then + region = "Antarctic" + add_y = 15.0 + end if + if (region_label(ir).eq."NorthernTropicalPacific") then + add_r = 10.0 + end if + if (region_label(ir).eq."World") then + plot_on = False + end if + if (region_label(ir).eq."World_land") then + plot_on = False + end if + if (region_label(ir).eq."World_sea") then + plot_on = False + end if + if (plot_on) then + cregion_array = (/select_region_srex_poly(region)/) + dum = gsn_add_text(wks_r, map_r, region_label(ir),\ + cregion_array(0) + add_r, \ + cregion_array(1) + add_y, txres) + delete(cregion_array) + end if + end do + draw(map_r) + frame(wks_r) + ; ------------------------------------------------------------- + ; Add provenance + ; ------------------------------------------------------------- + + caption = "Seasonal- and annual mean biases of " + var0 + " over " + \ + "land in different regions" + + log_provenance(nc_filename_MMM, \ + wks@fullname, \ + caption, \ + (/"diff", "mean", "perc"/), \ + (/"reg"/), \ + "box", \ + (/"cionni_irene"/), \ + (/"flato13ipcc"/), \ + metadata_att_as_array(info0, "filename")) + +end diff --git a/esmvaltool/diag_scripts/regional_downscaling/Figure9.41.ncl b/esmvaltool/diag_scripts/regional_downscaling/Figure9.41.ncl new file mode 100644 index 0000000000..6e714ab8a4 --- /dev/null +++ b/esmvaltool/diag_scripts/regional_downscaling/Figure9.41.ncl @@ -0,0 +1,286 @@ +; ############################################################################# +; ############################################################################# +; # Author: Irene Cionni (ENEA, Italy) # +; # ESMVal project +; ############################################################################# +; # * Creates plot ranked monthly data of observations vs models. # +; # 3 pages are created: +; # 1st page Centred (data are adjusted to get a zero mean in model data with +; # respect to the diagonal) +; # 2nd page Uncentred (data not adjusted) +; # 3th page Full models legend +; # +; # Required variable_info attributes (variable specific): +; # *Reference Dataset +; # variable_info[0]@reference_dataset +; # Optional diag_script_info attributes (diagnostics specific): +; # *Region Label +; # diag_script_info@fig941_region_label, default "MEDs" +; # *styleset, default "CMIP5" +; # +; #Optional diag_script_info attributes +; # # +; # # +; # Caveats: # +; # # +; # Modification history: +; # 20221026-weigel_katja: header updated +; # 20220314-weigel_katja: header updated +; ############################################################################# +; ############################################################################# +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/regional_downscaling/regional_function.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + var0 = variable_info[0]@short_name + UNITS = variable_info[0]@units + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD1 = ListCount(info0) + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + exps = metadata_att_as_array(info0, "exp") + ensembles = metadata_att_as_array(info0, "ensemble") + mips = metadata_att_as_array(info0, "mip") + y1 = metadata_att_as_array(info0, "start_year") + y2 = metadata_att_as_array(info0, "end_year") + long_name = variable_info[0]@long_name + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + do imod = 0, dimsizes(names) - 1 + if (names(imod) .eq. refname) then + idx_ref = imod + end if + end do + else + error_msg("f", DIAG_SCRIPT, "", "no reference dataset " + \ + "(variable_info[0]@reference_dataset) needs to be defined.") + end if + if(isatt(variable_info, "long_name")) then + LONG_NAME = variable_info@long_name + else + LONG_NAME = var0 + end if + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; Set default values for optional diag_script_info attributes + + set_default_att(diag_script_info, "fig941_region_label", "MEDs") + set_default_att(diag_script_info, "styleset", "CMIP5") + colors = project_style(info0, diag_script_info, "colors") + dashes = project_style(info0, diag_script_info, "dashes") + thicks = tofloat(project_style(info0, diag_script_info, "thicks")) + markers = project_style(info0, diag_script_info, "markers") + lin_mar_mod = new((/dimsizes(colors)/), "string") + lin_mar_mod = (/"Lines"/) + region_label = tostring(diag_script_info@fig941_region_label) + dim_reg = dimsizes(region_label) + flag_mod = where(projects.ne."OBS" .and.projects.ne."OBS6"\ + .and. projects.ne."obs4mips", 1, 0) + index_mod = ind(flag_mod.gt.0) + index_obs = ind(flag_mod.eq.0) + dim_MOD = dimsizes(index_mod) + dim_OBS = 0 + + if (.not.all(ismissing(index_obs))) then + dim_OBS = dimsizes(index_obs) + list_obs_mod = names(index_obs) + end if + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + +end +begin + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "Figure9_41_" + var0) + wks_r = get_wks("dummy_for_wks", DIAG_SCRIPT, "Figure9_41_raw_" + var0) + wks_l = get_wks("dummy_for_wks", DIAG_SCRIPT, "Figure9_41_legend_" + var0) + ; +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + Amask = read_data(info0[idx_ref]) + lsdata = Amask(0, :, :) + ; ***************************************************************** + plot = new(dimsizes(region_label), "graphic") + plot1 = new(dimsizes(region_label), "graphic") + dum_diag = new(dimsizes(region_label), "graphic") + dum_diag1 = new(dimsizes(region_label), "graphic") + do ir = 0, dim_reg - 1 + case1 = region_label(ir) + cell_fill = (/select_region_srex(region_label(ir), lsdata)/) + cell_fill!0 = "lat" + cell_fill!1 = "lon" + cell_fill&lat = lsdata&lat + cell_fill&lon = lsdata&lon + cell_fill = where(ismissing(cell_fill), -1., 1.) + models1 = new((/dim_MOD/), "string") + colors1 = new((/dim_MOD/), typeof(colors)) + dashes1 = new((/dim_MOD/), typeof(dashes)) + thicks1 = new((/dim_MOD/), typeof(thicks)) + markers1 = new((/dim_MOD/), typeof(markers)) + lin_mar_mod1 = new((/dim_MOD/), "string") + A0 = read_data(info0[idx_ref]) + time = A0&time + Atmp1 = A0 + ; ******************mask region********************* + Atmp1 = (/ mask(A0, cell_fill.eq.-1, False)/) + ; ************************************************** + + Atmp = area_operations(Atmp1, -90., 90., 0., 360., "average", True) + delete(A0) + A = Atmp + delete(Atmp) + qsort(A) ; rank the observation in ascending order + + ; ----------------------------------------------------------- + ; netcdf output file + + nc_filename = work_dir + "fig941_" + var0 + "_" + \ + region_label(ir) + "_obs.nc" + A@var = var0 + A@diag_script = "fig941" + nc_outfile = ncdf_write(A, nc_filename) + + D = new((/dim_MOD, dimsizes(time)/), "double") + C = new((/dim_MOD, dimsizes(time)/), "double") + + jmod = 0 + do imod = 0, dim_MOD - 1 + if (imod .ne. idx_ref) then + B0 = read_data(info0[imod]) + B0&time = time + ; ******************mask region********************** + Btmp1 = B0 + Btmp1 = (/mask(B0, cell_fill.eq.-1, False)/) + ; ************************************************** + Btmp = area_operations(Btmp1, -90., 90., 0., 360., "average", True) + delete(B0) + B = Btmp + qsort(B) + delete(Btmp) + delete(Btmp1) + ; -------------------------------------------------------- + ; Bias + C(jmod, :) = B + D(jmod, :) = B - avg(B) + avg(A) + delete(B) + colors1(jmod) = colors(imod) + dashes1(jmod) = dashes(imod) + thicks1(jmod) = thicks(imod)+1 + markers1(jmod) = markers(imod) + models1(jmod) = names(imod) + lin_mar_mod1(jmod) = "Lines" + jmod = jmod + 1 + end if + end do ; imod + + UNITS = variable_info[0]@units + if (UNITS.eq."K") then + UNITS = "~S~o~N~C" + C = C - 273.15 + D = D - 273.15 + A = A - 273.15 + end if + + ; ----------------------------------------------------------- + ; netcdf output file + nc_filename = work_dir + "fig941_" + var0 + "_" \ + + region_label(ir) + "_models.nc" + D!0 = "models" + D!1 = "time" + D&models = models1 + D&time = A&time + D@var = var0 + D@diag_script = "fig941" + D@existing = "overwrite" + nc_outfile = ncdf_write(D, nc_filename) + + ; ----------------------------------------------------------- + ; plots of time(x) vs. latitude(y) + min1 = min(C) + max1 = max(C) + res = True + res@gsnDraw = False ; don't draw + res@gsnFrame = False ; don't advance frame + ; res@tiMainString =variable_info@long_name + res@tiYAxisString = "RAW MODELS" + res@tiXAxisString = "OBS" + res@trYMinF = min1 + res@trYMaxF = max1 + res@trXMinF = min1 + res@trXMaxF = max1 + res@xyLineColors = colors1 ; line colors + res@xyLineThicknesses = thicks1 ; line thicknesses + res@xyDashPatterns = dashes1 ; line patterns + res@gsnLeftString = case1 + res@gsnRightString = UNITS + res@vpHeightF = 0.4 + res@vpWidthF = 0.8 + res@tiMainFontHeightF = 0.02 + res@txFontHeightF = 0.02 + res@tmXBLabelFontHeightF = 0.02 + res@tmYLLabelFontHeightF = 0.02 + ; res@tfPolyDrawOrder = "Draw" + plot(ir) = gsn_csm_xy(wks_r, A, C, res) + lnres = True + dum_diag(ir) = gsn_add_polyline(wks_r, plot(ir), (/min1, max1/), \ + (/min1, max1/), lnres) + res@tiYAxisString = "MODELS" + res@tiXAxisString = "OBS" + plot1(ir) = gsn_csm_xy(wks, A, D, res) + dum_diag1(ir) = gsn_add_polyline(wks, plot1(ir), (/min1, max1/), \ + (/min1, max1/), lnres) + delete(C) + delete(A) + delete(D) + end do + + ; Panel plots + pan = True + pan@gsnMaximize = True + pan@gsnFrame = False + pan@gsnPaperOrientation = "portrait" + n_p = dimsizes(plot) / 3 + if (mod(dimsizes(plot), 3) .ne. 0) then + n_p = n_p + 1 + end if + pan@txString = "Centered " + LONG_NAME + " bias vs. " + \ + names(idx_ref) + gsn_panel(wks, plot1, (/3, n_p/), pan) + + ; ***********add legend**************************** + frame(wks) + pan@txString = "Uncentered " + LONG_NAME + " bias vs. " + \ + names(idx_ref) + gsn_panel(wks_r, plot, (/3, n_p/), pan) + frame(wks_r) + add_legenda_page(wks_l, models1, colors1, dashes1, lin_mar_mod1,\ + thicks1, "0.99") + frame(wks_l) + + ; ------------------------------------------------------------- + ; Add provenance + ; ------------------------------------------------------------- + + caption = "Modelled versus observed monthly mean temperature" + + log_provenance(nc_filename, \ + wks@fullname, \ + caption, \ + (/"corr"/), \ + (/"reg"/), \ + "line", \ + (/"cionni_irene"/), \ + (/"flato13ipcc"/), \ + metadata_att_as_array(info0, "filename")) + +end diff --git a/esmvaltool/diag_scripts/regional_downscaling/regional_function.ncl b/esmvaltool/diag_scripts/regional_downscaling/regional_function.ncl new file mode 100644 index 0000000000..ff1fb4303c --- /dev/null +++ b/esmvaltool/diag_scripts/regional_downscaling/regional_function.ncl @@ -0,0 +1,1429 @@ +; ************************************************************************** +; This library contains functions in support of regional_downscaling +; the functions included are: +; * function: +; select_region1(region:string) +; * function: +; box_plot_cr(wks:graphic, x[* ]:numeric, y[* ][* ]:numeric, \ +; boxOpts:logical, plotres:logical, lineres:logical) +; * function: +; * box_plot_lg(wks:graphic, x[* ]:numeric, y[* ][* ]:numeric, \ +; boxOpts:logical, plotres:logical, lineres:logical) +; * function: +; select_region_938(region:string, lsdata:numeric) +; * function +; select_region_srex_poly(region:string) +; * function: +; select_region_srex(region:string, lsdata:numeric) +; * function: +; get_unique_intersection( a, b ) +; * function: +; get_unique_difference( a, b ) +; * procedure: +; add_legenda_page(wks, MODEL:string, colors:string, dashs:numeric,\ +; scatters:string, ticknesses:numeric, place:string) +; # Caveats: # +; # # +; # Modification history: +; 20220314-weigel_katja: get_unique_intersection and get_unique_difference +; (ESMValTool v1 functions to allow labelling of +; multiple projects) +; 2021????-weigel_katja: replaces select_region_939 and select_region_940 +; with select_region_srex, added select_region_srex_poly +; ************************************************************************** +undef("select_region1") +function select_region1(region:string) +; +; Arguments +; region: a string specifying the region to be selected. +; +; Return value +; An array with the region boundary as (latmin, latmax, lonmin, lonmax) +; with the name of the region as a string attribute @name. +; +; Description +; Translates a region specification into lat/lon boundaries and a region +; name as an attribute. +; +; Modification history +; +; 20141205 -A_gott_kl: adjusted names to Righi et al. (2015). +; 20140410 -A_fran_fr: extended to midlat, equatorial and polar regions. +; 20140129 -A_fran_fr: written. +; +local funcname, scriptname, verbosity, region +begin + + funcname = "select_region1" + scriptname = "diag_scripts/lib/ncl/latlon.ncl" + verbosity = stringtointeger(getenv("ESMValTool_verbosity")) + ; enter_msg(scriptname, funcname, 10) + + if (region.eq."Global") then + region_array = (/ -90., 90., 0., 360./) + region_array@name = "Glob" + return(region_array) + end if + if (region.eq."Tropics") then + region_array = (/ -20., 20., 0., 360./) + region_array@name = "Trop" + return(region_array) + end if + if (region.eq."NH extratropics") then + region_array = (/20., 90., 0., 360./) + region_array@name = "NHext" + return(region_array) + end if + if (region.eq."SH extratropics") then + region_array = (/ -90., -20., 0., 360./) + region_array@name = "SHext" + return(region_array) + end if + if (region.eq."NH equatorial") then + region_array = (/0., 30., 0., 360./) + region_array@name = "NHtrop" + return(region_array) + end if + if (region.eq."SH equatorial") then + region_array = (/ -30., 0., 0., 360./) + region_array@name = "SHtrop" + return(region_array) + end if + if (region.eq."Northern Hemisphere") then + region_array = (/20., 90., 0., 360./) + region_array@name = "NH" + return(region_array) + end if + if (region.eq."Southern Hemisphere") then + region_array = (/ -90., -20., 0., 360./) + region_array@name = "SH" + return(region_array) + end if + if (region.eq."NH midlatitudes") then + region_array = (/35., 60., 0., 360./) + region_array@name = "NHmidlat" + return(region_array) + end if + if (region.eq."SH midlatitudes") then + region_array = (/ -60., -35., 0., 360./) + region_array@name = "SHmidlat" + return(region_array) + end if + if (region.eq."Arctic") then + region_array = (/60., 90., 0., 360./) + region_array@name = "NHpolar" + return(region_array) + end if + if (region.eq."Antarctic") then + region_array = (/ -90., -60., 0., 360./) + region_array@name = "SHpolar" + return(region_array) + end if + if (region.eq."Equatorial") then + region_array = (/ -10., 10., 0., 360./) + region_array@name = "EQ" + return(region_array) + end if + ; Region latitudes and longitudes following Figure 9.38 Chapter 9 AR5 + ; (as Comunication from Chou Sin Chan) + ; 1 - ENA Eastern North America + ; Lon = 250 to 350 + ; Lat = 30 to 67 + + if (region.eq."ENA") then + region_array = (/30., 67., 250., 350./) + region_array@name = "ENA" + return(region_array) + end if + ; 2 - WNA Western North America + ; Lon = 190 to 250 + ; Lat = 30 to 67 + if (region.eq."WNA") then + region_array = (/30., 67., 190., 250./) + region_array@name = "WNA" + return(region_array) + end if + ; 3 - CAM Central America + ; Lon = 240 to 300 + ; Lat = 10 to 30 + if (region.eq."CAM") then + region_array = (/10., 30., 240., 300./) + region_array@name = "CAM" + return(region_array) + end if + ; 4 - AMZ Amazon + ; Lon = 277 to 329 + ; Lat = -20 to 10 + if (region.eq."TSA") then + region_array = (/ -20., 10., 277., 329./) + region_array@name = "TSA" + return(region_array) + end if + ; 5 - SSA Southern South America + ; Lon = 277 to 329 + ; Lat = -56 to -20 + if (region.eq."SSA") then + region_array = (/ -56., -20., 277., 329./) + region_array@name = "SSA" + return(region_array) + end if + ; 6 - EUM Europe and Mediterranean + ; Lon = 350 to 40 + ; Lat = 30 to 67 + if (region.eq."EUM") then + region_array = (/30., 67., 350., 40./) + region_array@name = "ENA" + return(region_array) + end if + ; 7 - NAF Northern Africa + ; Lon = 342 to 65 + ; Lat = 16 to 30 + if (region.eq."NAF") then + region_array = (/16., 30., 342., 65./) + region_array@name = "NAF" + return(region_array) + end if + ; 8 - SAF South Africa + ; Lon = 10 to 51 + ; Lat = -35 to -10 + if (region.eq."SAF") then + region_array = (/ -35., -10., 10., 51./) + region_array@name = "SAF" + return(region_array) + end if + ; 9 - CAF central Africa + ; Lon = 342 to 60 + ; Lat = -10 to 16 + if (region.eq."CAF") then + region_array = (/ -10., 16., 342., 60./) + region_array@name = "CAF" + return(region_array) + end if + ; 10 - NAS Nothern Asia + ; Lon = 40 to 167 + ; Lat = 48 to 67 + if (region.eq."NAS") then + region_array = (/48., 67., 40., 167./) + region_array@name = "NAS" + return(region_array) + end if + ; 11 - EAS Eastern Asia + ; Lon = 100 to 167 + ; Lat = 20 to 48 + if (region.eq."EAS") then + region_array = (/20., 48., 100., 167./) + region_array@name = "EAS" + return(region_array) + end if + ; 12 - CAS Central Asia + ; Lon = 40 to 100 + ; Lat = 30 to 48 + if (region.eq."CAS") then + region_array = (/30., 48., 40., 100./) + region_array@name = "CAS" + return(region_array) + end if + ; 13 - SEA Southeast Asia + ; Lon = 95 to 151 + ; Lat = -11 to 20 + if (region.eq."SEA") then + region_array = (/ - 11., 20., 95., 151./) + region_array@name = "SEA" + return(region_array) + end if + ; 14 - SAS South Asia + ; Lon = 65 to 95 + ; Lat = 5 to 30 + if (region.eq."SAS") then + region_array = (/5., 30., 65., 95./) + region_array@name = "SAS" + return(region_array) + end if + ; 15 - AUS Oceania + ; Lon = 112 to 180 + ; Lat = -48 to -11 + if (region.eq."AUS") then + region_array = (/ - 48., - 11., 112., 180./) + region_array@name = "AUS" + return(region_array) + end if + if (region.eq."Arctic") then + region_array = (/67.5, 90., 0., 360./) + region_array@name = "Arctic" + return(region_array) + end if +end +; * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * +; - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + +undef("box_plot_cr") +function box_plot_cr(wks:graphic, x[*]:numeric, y[*][*]:numeric, \ + boxOpts:logical, plotres:logical, lineres:logical) + +begin + dimquery = dimsizes(y) + numbox = dimquery(0) + boxWidths = new((/numbox/), float) + if (numbox.ne.dimsizes(x)) then + print("boxplot: Fatal: X must be one - dimensional and both X and Y" + \ + "have the same rightmost dimension") + exit + end if + if (any(ismissing(x))) then + print("boxplot: Fatal: X array cannot contain missing data, exiting") + exit + end if +; Developing x -axis + xAxis = new(numbox + 2, typeof(x)) + xAxis(1:numbox) = x + if (numbox.ne.1) then + dx = x(1) - x(0) + xAxis(0) = x(0) - dx + xAxis(numbox + 1) = x(numbox - 1) + dx + else + dx = 1 + xAxis(0) = x - dx + xAxis(2) = x + dx + end if + if (boxOpts) then + if (isatt(boxOpts, "boxWidth")) then + if (dimsizes(boxOpts@boxWidth).ne.1.and.dimsizes(boxOpts@boxWidth)\ + .ne.numbox) then + print("boxplot: Number of input box widths must either equal 1 or" +\ + "the number of boxes (" + numbox + "). Using first specified" +\ + "box width only.") + boxWidths(:) = boxOpts@boxWidth(0) + else + boxWidths = boxOpts@boxWidth + end if + else + boxWidths(:) = dx * .4 + end if + else + boxWidths(:) = dx * .4 + end if + labarr = new(numbox + 2, "string") ; Prepare actual X -axis labels... + labarr(0) = "" + labarr(numbox + 1) = "" + labarr(1:numbox) = xAxis(1:numbox) + + ; Whether to maximize plot in frame. + maximize = get_res_value(plotres, "gsnMaximize", False) + + if (plotres) then + ; print("Plot resources detected, accepting") + fsatts = getvaratts(plotres) + do ty = 0, dimsizes(fsatts) - 1 + if (fsatts(ty).eq."tmXBLabels") then ; XBLabels + if (dimsizes(plotres@tmXBLabels).ne.numbox) then + print("boxplot: Fatal:Number of XB Labels does not match number" + \ + "of boxes, exiting") + exit + else + labarr(1:numbox) = plotres@$fsatts(ty)$ + end if + xblab = plotres@tmXBLabels + delete(plotres@tmXBLabels) ; Delete so tmXBLabels is not used + end if ; atts are assigned below... + end do + delete(fsatts) + end if + ind_blue = plotres@ind_blue + ind_red = plotres@ind_red + ind_blue = ind_blue + 1 + ind_red = ind_red + 1 + plot = create "plot" logLinPlotClass wks + "trYMinF" : min(y) - 2 + "trYMaxF" : max(y) + 2 + "trXMinF" : min(xAxis) + "trXMaxF" : max(xAxis) + "pmTickMarkDisplayMode" : "Always" + "tmXBMode" : "Explicit" + "tmXBValues" : xAxis(ind_blue) + "tmXBLabels" : labarr(ind_blue) + "tmXBLabelFontHeightF" : 0.014 + "tmXBLabelAngleF" : 90. + "tmXBLabelJust" : "TopRight" + "tmXBLabelFontColor" : "blue" + "tmYROn" : False + "tmXTOn" : False + "tmYRBorderOn" : True + "tmXTBorderOn" : True + "pmTitleDisplayMode": "Always" + "tiMainOn" : True + "tiMainString" : "" + end create + if (.not.all(ismissing(plotres@ind_red))) then + plot1 = create "plot" logLinPlotClass wks + "trYMinF" : min(y) - 2 + "trYMaxF" : max(y) + 2 + "trXMinF" : min(xAxis) + "trXMaxF" : max(xAxis) + "pmTickMarkDisplayMode" : "Always" + "tmXBMode" : "Explicit" + "tmXBValues" : xAxis(ind_red) + "tmXBLabels" : labarr(ind_red) + "tmXBLabelFontHeightF" : 0.0105 + "tmXBLabelAngleF" : 90. + "tmXBLabelFontColor" : "red" + "tmXBLabelJust" : "TopRight" + "tmYROn" : False + "tmXTOn" : False + "tmYRBorderOn" : True + "tmXTBorderOn" : True + "pmTitleDisplayMode": "Always" ; allow titles + "tiMainOn" : True + "tiMainString" : "" + end create + overlay(plot, plot1) + end if + + if (plotres) then + attsetvalues(plot, plotres) + end if + + polyres = True ; Set up defaults + polyres@gsLineColor = "black" ; color of lines + polyres@gsLineThicknessF = 1.5 ; thickness of lines + polyres@gsLineDashPattern = 0 + if (lineres) then + fsatts = getvaratts(lineres) + do ty = 0, dimsizes(fsatts) - 1 + polyres@$fsatts(ty)$ = lineres@$fsatts(ty)$ + end do + end if + + if (boxOpts) then + if (isatt(boxOpts, "boxColors")) then + boxcolor = boxOpts@boxColors + if (dimsizes(boxcolor).eq.1.or.dimsizes(boxcolor).ne.numbox) then + if (dimsizes(boxcolor).ne.numbox) then + print("boxplot: warning: Number of input colors must either " + \ + "equal 1 or the number of boxes (" + numbox + ")." + \ + "Using first specified color only.") + end if + polyres@gsLineColor = boxcolor(0) + cflag = 1 + else + cflag = 2 + end if + else + cflag = 1 + end if + else + cflag = 1 + end if + + dum = new((/numbox, 9/), graphic) + do gg = 0, numbox - 1 + ff = xAxis(gg + 1) + if (cflag.eq.2) then + polyres@gsLineColor = boxcolor(gg) + end if + yy = (/y(gg, 4), y(gg, 4)/) + xx = (/(ff - (boxWidths(gg) / 8.)), (ff + (boxWidths(gg) / 8.))/) + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 0) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + + yy = (/y(gg, 3), y(gg, 4)/) + xx = (/ff, ff/) + polyres@gsLineDashPattern = 0 + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 1) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + polyres@gsLineDashPattern = 0 + + yy = (/y(gg, 3), y(gg, 3)/) + xx = (/(ff - (boxWidths(gg) / 2.)), (ff + (boxWidths(gg) / 2.))/) + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 2) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + + yy = (/y(gg, 1), y(gg, 3)/) + xx = (/(ff - (boxWidths(gg) / 2.)), (ff - (boxWidths(gg) / 2.))/) + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 3) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + + yy = (/y(gg, 2), y(gg, 2)/) + xx = (/(ff - (boxWidths(gg) / 2.)), (ff + (boxWidths(gg) / 2.))/) + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 4) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + + yy = (/y(gg, 1), y(gg, 3)/) + xx = (/(ff + (boxWidths(gg) / 2.)), (ff + (boxWidths(gg) / 2.))/) + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 5) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + + yy = (/y(gg, 1), y(gg, 1)/) + xx = (/(ff - (boxWidths(gg) / 2.)), (ff + (boxWidths(gg) / 2.))/) + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 6) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + + yy = (/y(gg, 0), y(gg, 1)/) + xx = (/ff, ff/) + polyres@gsLineDashPattern = 0 + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 7) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + polyres@gsLineDashPattern = 0 + + yy = (/y(gg, 0), y(gg, 0)/) + xx = (/(ff - (boxWidths(gg) / 8.)), (ff + (boxWidths(gg) / 8.))/) + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 8) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + end do + + dumname = unique_string("dum") + plot@$dumname$ = dum + if (isvar("xblab").eq."True") then + plotres@tmXBLabels = xblab ; reassign XBLabels for possible future plots + end if + if (maximize) then + mres = True + mres@gsnDraw = False + mres@gsnFrame = False + maximize_output(wks, mres) + end if + return(plot) +end +; ------------------------------------- +; Adam Phillips +; +undef("box_plot_lg") +function box_plot_lg(wks:graphic, x[*]:numeric, y[*][*]:numeric, \ + boxOpts:logical, plotres:logical, lineres:logical) +begin + dimquery = dimsizes(y) + numbox = dimquery(0) + boxWidths = new((/numbox/), float) + if (numbox.ne.dimsizes(x)) then + print("boxplot: Fatal: X must be one -dimensional and both X and Y " + \ + "must have the same rightmost dimension") + exit + end if + if (any(ismissing(x))) then + print("boxplot: Fatal: X array cannot contain missing data, exiting") + exit + end if + + ; Developing x -axis + xAxis = new(numbox + 2, typeof(x)) + xAxis(1:numbox) = x + if (numbox.ne.1) then + dx = x(1) - x(0) + xAxis(0) = x(0) - dx + xAxis(numbox + 1) = x(numbox - 1) + dx + else + dx = 1 + xAxis(0) = x - dx + xAxis(2) = x + dx + end if + + if (boxOpts) then + if (isatt(boxOpts, "boxWidth")) then + if (dimsizes(boxOpts@boxWidth).ne.1.and.(dimsizes(boxOpts@boxWidth).ne.\ + numbox)) then + print("boxplot: Number of input box widths must either equal 1 " + \ + "or the number of boxes (" + numbox + "). Using first " + \ + "specified box width only.") + boxWidths(:) = boxOpts@boxWidth(0) + else + boxWidths = boxOpts@boxWidth + end if + else + boxWidths(:) = dx * .4 + end if + else + boxWidths(:) = dx * .4 + end if + + labarr = new(numbox + 2, "string") + labarr(0) = "" + labarr(numbox + 1) = "" + labarr(1:numbox) = xAxis(1:numbox) + +; Whether to maximize plot in frame. + maximize = get_res_value(plotres, "gsnMaximize", False) + + if (plotres) then + print("Plot resources detected, accepting") + fsatts = getvaratts(plotres) + do ty = 0, dimsizes(fsatts) - 1 + if (fsatts(ty).eq."tmXTLabels") then + print(plotres@tmXTLabels) + if (dimsizes(plotres@tmXTLabels).ne.numbox) then + delete(labarr) + labarr = new(dimsizes(plotres@tmXTLabels) + 2, "string") + labarr(0) = "" + labarr(dimsizes(plotres@tmXTLabels) + 1) = "" + labarr(1:dimsizes(plotres@tmXTLabels)) = plotres@$fsatts(ty)$ + else + labarr(1:numbox) = plotres@$fsatts(ty)$ + end if + xblab = plotres@tmXTLabels + delete(plotres@tmXTLabels) + end if + end do + delete(fsatts) + end if + plot = create "plot" logLinPlotClass wks + "trYMinF" : - 25 + "trYMaxF" : 125 + "trXMinF" : 0.5 + "trXMaxF" : dimsizes(labarr) + 0.5 + "pmTickMarkDisplayMode" : "Always" + "tmXTMode" : "Explicit" + "tmXTValues" : ispan(0, dimsizes(labarr) - 2, 1) + "tmXTLabels" : labarr(1:) + "tmYLMode" : "Explicit" + "tmYLValues" : (/5, 25, 50, 75, 95/) + "tmYLLabels" : (/"5%", "25%", "50%", "75%", "95%"/) + "tmXTLabelFontColor" : "black" + "tmYLOn" : True + "tmXBOn" : False + "tmXTOn" : True + "tmXBLabelsOn": False + "tmXTLabelsOn": True + "tmXTMinorOn" :False + "tmXBMinorOn" :False + "tmYLMinorOn" :False + "tmXUseBottom": False + "tmYRBorderOn" : False + "tmXTBorderOn" : False + "tmYLBorderOn" : False + "tmXBBorderOn" : False + "tmXTMajorLineColor" : "Transparent" + "tmYLMajorLineColor" : "Transparent" + "tmYLLabelFontHeightF" : 0.02 + "tmXTLabelFontHeightF" : 0.02 + "pmTitleDisplayMode": "Always" + "tiMainOn" : True + "tiMainString" : "" + end create + + if (plotres) then + attsetvalues(plot, plotres) + end if + polyres = True + polyres@gsLineColor = "black" + polyres@gsLineThicknessF = 3. + polyres@gsLineDashPattern = 0 + if (lineres) then + fsatts = getvaratts(lineres) + do ty = 0, dimsizes(fsatts) - 1 + polyres@$fsatts(ty)$ = lineres@$fsatts(ty)$ + end do + end if + + if (boxOpts) then + if (isatt(boxOpts, "boxColors")) then + boxcolor = boxOpts@boxColors + if (dimsizes(boxcolor).eq.1.or.dimsizes(boxcolor).ne.numbox) then + if (dimsizes(boxcolor).ne.numbox) then + print("boxplot: warning: Number of input colors must either" + \ + "equal 1 or the number of boxes (" + numbox + ")." + \ + "Using first specified color only.") + end if + polyres@gsLineColor = boxcolor(0) + cflag = 1 + else + cflag = 2 + end if + else + cflag = 1 + end if + else + cflag = 1 + end if + dum = new((/numbox, 9/), graphic) + do gg = 0, numbox - 1 + ff = xAxis(gg + 1) + if (cflag.eq.2) then + polyres@gsLineColor = boxcolor(gg) + end if + yy = (/y(gg, 4), y(gg, 4)/) + xx = (/(ff - (boxWidths(gg)/8.)), (ff + (boxWidths(gg)/8.))/) + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 0) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + + yy = (/y(gg, 3), y(gg, 4)/) + xx = (/ff, ff/) + polyres@gsLineDashPattern = 0 + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 1) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + polyres@gsLineDashPattern = 0 + + yy = (/y(gg, 3), y(gg, 3)/) + xx = (/(ff - (boxWidths(gg)/2.)), (ff + (boxWidths(gg)/2.))/) + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 2) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + + yy = (/y(gg, 1), y(gg, 3)/) + xx = (/(ff - (boxWidths(gg)/2.)), (ff - (boxWidths(gg)/2.))/) + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 3) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + + yy = (/y(gg, 2), y(gg, 2)/) + xx = (/(ff - (boxWidths(gg)/2.)), (ff + (boxWidths(gg)/2.))/) + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 4) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + + yy = (/y(gg, 1), y(gg, 3)/) + xx = (/(ff + (boxWidths(gg)/2.)), (ff + (boxWidths(gg)/2.))/) + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 5) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + + yy = (/y(gg, 1), y(gg, 1)/) + xx = (/(ff - (boxWidths(gg)/2.)), (ff + (boxWidths(gg)/2.))/) + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 6) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + + yy = (/y(gg, 0), y(gg, 1)/) + xx = (/ff, ff/) + polyres@gsLineDashPattern = 0 + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 7) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + polyres@gsLineDashPattern = 0 + + yy = (/y(gg, 0), y(gg, 0)/) + xx = (/(ff - (boxWidths(gg)/8.)), (ff + (boxWidths(gg)/8.))/) + if (.not.(any(ismissing(xx).or.ismissing(yy)))) then + dum(gg, 8) = gsn_add_polyline(wks, plot, xx, yy, polyres) + end if + end do + + dumname = unique_string("dum") + plot@$dumname$ = dum + if (isvar("xblab").eq."True") then + ; plotres@tmXBLabels = + plotres@tmXBLabels = xblab + end if + if (maximize) then + mres = True + mres@gsnDraw = False + mres@gsnFrame = False + maximize_output(wks, mres) + end if + return(plot) +end +; - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +; ##################################################################### +undef("select_region_938") +function select_region_938(region:string, lsdata:numeric) +; Arguments +; region: a string specifying the region to be selected. +; +; Return value +; An array with the region boundary as (lon1, lat1, lon2,\ +; lat2, lon3, lat3, lon4, lat4 ect) based on the borders +; given through select_region1 +; with the name of the region as a string attribute @name. +; +; Description +; Translates a region specification into lat/lon boundaries and a region +; name as an attribute for non SREX regions +; +; Modification history +begin + + funcname = "select_region_938" + scriptname = "diag_scripts/lib/ncl/latlon.ncl" + verbosity = stringtointeger(getenv("ESMValTool_verbosity")) + + region_array_hlp = select_region1(region) + region_array = (/(/region_array_hlp(2), region_array_hlp(0)/), \ + (/region_array_hlp(2), region_array_hlp(1)/), \ + (/region_array_hlp(3), region_array_hlp(1)/), \ + (/region_array_hlp(3), region_array_hlp(0)/)/) + + lat = lsdata&lat + lon = lsdata&lon + inout = new((/dimsizes(lsdata)/), "float") + in = 0 + inn = 0 + do ilat = 0, dimsizes(lat) - 1 + do ilon = 0, dimsizes(lon) - 1 + if (dimsizes(region_array(:, 0)).le.2) then + if ((lat(ilat).ge.region_array(0, 1)).and.(lat(ilat)\ + .lt.region_array(1, 1)).and.(lon(ilon).ge.region_array(0, 0))\ + .and.(lon(ilon).lt.region_array(1, 0))) then + inou = "True" + if (inou.eq."True") then + inout(ilat, ilon) = 1. + in = in + 1 + end if + delete(inou) + inn = inn + 1 + end if + else + inou = gc_inout(lat(ilat), lon(ilon), region_array(:, 1),\ + region_array(:, 0)) + + if(inou.eq."True") then + inout(ilat, ilon) = 1. + in = in + 1 + delete(inou) + end if + inn = inn + 1 + end if + end do + end do + + return(inout) +end +; °°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°° +; - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +; ##################################################################### +undef("select_region_srex_poly") +function select_region_srex_poly(region:string) +; +; Arguments +; region: a string specifying the region to be selected. +; +; Return value +; An array with the position for the string in the region boundary as +; (lon, lat) +; +; Description +; Position for the name of the region on a map. +; +; Modification history +begin + + funcname = "select_region_srex_poly" + scriptname = "diag_scripts/lib/ncl/latlon.ncl" + verbosity = stringtointeger(getenv("ESMValTool_verbosity")) + + if (region.eq."ALAs") then + region_array = (/200.000, 68.000/) + end if + if (region.eq."AMZs") then + region_array = (/285., 0.000/) + end if + if (region.eq."CAMs") then + region_array = (/265., 15./) + end if + if (region.eq."CASs") then + region_array = (/62.000, 40.000/) + end if + if (region.eq."CEUs") then + region_array = (/ 15.000, 52.000/) + end if + if (region.eq."CGIs") then + region_array = (/265.000, 65.000/) + end if + if (region.eq."CNAs") then + region_array = (/255.000, 40.000/) + end if + if (region.eq."EAFs") then + region_array = (/30.000, -5.0/) + end if + if (region.eq."EASs") then + region_array = (/110.000, 30.000/) + end if + if (region.eq."ENAs") then + region_array = (/280.000, 30.000/) + end if + if (region.eq."MEDs") then + region_array = (/355.000, 38.000/) + end if + if (region.eq."NASs") then + region_array = (/50.000, 72.000/) + end if + if (region.eq."NAUs") then + region_array = (/120.000, -20.000/) + end if + if (region.eq."NEBs") then + region_array = (/315.000, -6.000/) + end if + if (region.eq."NEUs") then + region_array = (/355.000, 65.000/) + end if + if (region.eq."SAFs") then + region_array = (/320.000, -20.000/) + end if + if (region.eq."SAHs") then + region_array = (/350.000, 22.000/) + end if + if (region.eq."SASs") then + region_array = (/70.000, 10.000/) + end if + if (region.eq."SAUs") then + region_array = (/120.000, -42.000/) + end if + if (region.eq."SSAs") then + region_array = (/295.0, -40.000/) + end if + if (region.eq."SEAs") then + region_array = (/105.000, -0.000/) + end if + if (region.eq."TIBs") then + region_array = (/85.000, 40.000/) + end if + if (region.eq."WAFs") then + region_array = (/350.000, 0.0/) + end if + if (region.eq."WASs") then + region_array = (/45.000, 25.000/) + end if + if (region.eq."WSAs") then + region_array = (/280.0, -30.0/) + end if + if (region.eq."WNAs") then + region_array = (/233.000, 35.0/) + end if + if (region.eq."Arctic") then + region_array = (/300.000, 75.000/) + end if + if (region.eq."Antarctic") then + region_array = (/300.000, -75.000/) + end if + if (region.eq."Caribbean") then + region_array = (/290.0, 15.0/) + end if + if (region.eq."WesternIndianOcean") then + region_array = (/60.000, -20.000/) + end if + if (region.eq."NorthernIndianOcean") then + region_array = (/70.000, 10.000/) + end if + if (region.eq."NorthernTropicalPacific") then + region_array = (/175.000, 10.000/) + end if + if (region.eq."EquatorialTropicalPacific") then + region_array = (/185.000, 0.000/) + end if + if (region.eq."SouthernTropicalPacific") then + region_array = (/195.000, -10.000/) + end if + if (region.eq."World") then + region_array = (/300.000, 0.000/) + end if + + return(region_array) + +end + +; * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * +undef("select_region_srex") +function select_region_srex(region:string, lsdata:numeric) +; Arguments +; region: a string specifying the region to be selected. +; +; Return value +; An array with the region boundary as (lon1, lat1, lon2,\ +; lat2, lon3, lat3, lon4, lat4 ect) +; with the name of the region as a string attribute @name. +; +; Description +; Translates a region specification into lat/lon boundaries and a region +; name as an attribute. +; +; Modification history +begin + + funcname = "select_region_srex" + scriptname = "diag_scripts/lib/ncl/latlon.ncl" + verbosity = stringtointeger(getenv("ESMValTool_verbosity")) + + if (region.eq."ALAs") then + region_array = (/(/255.000, 60.000/), \ + (/191.978, 60.000/), \ + (/191.978, 72.554/), \ + (/255.000, 72.554/)/) + end if + if (region.eq."AMZs") then + region_array = (/(/293.623, - 20.000/), \ + (/280.271, - 1.239/), \ + (/291.2, 11.439/), \ + (/310.00, 11.439/), \ + (/310.00, - 20.00/)/) + end if + if (region.eq."CAMs") then + region_array = (/(/291.2, 11.439/), \ + (/280.271, -1.239/), \ + (/241.677, 28.566/), \ + (/269.685, 28.566/)/) + end if + if (region.eq."CASs") then + region_array = (/(/60.000, 30.000/), \ + (/60.000, 50.000/), \ + (/75.000, 50.000/), \ + (/75.000, 30.000/)/) + end if + if (region.eq."CEUs") then + region_array = (/(/ -10.000, 45.000/), \ + (/ -10.000, 48.000/), \ + (/ 40.000, 61.320/), \ + (/ 40.000, 45.000/)/) + end if + if (region.eq."CGIs") then + region_array = (/(/350.000, 50.000/), \ + (/255.000, 50.000/), \ + (/255.000, 85.000/), \ + (/350.000, 85.000/)/) + end if + if (region.eq."CNAs") then + region_array = (/(/275.000, 50.000/), \ + (/275.000, 28.566/), \ + (/255.000, 28.566/), \ + (/255.000, 50.000/)/) + end if + if (region.eq."EAFs") then + region_array = (/(/25.000, -11.365/), \ + (/25.000, 15.000/), \ + (/51.990, 15.000/), \ + (/51.990, -11.365/)/) + end if + if (region.eq."EASs") then + region_array = (/(/100.000, 20.000/), \ + (/100.000, 50.000/), \ + (/145.000, 50.000/), \ + (/145.000, 20.000/)/) + end if + if (region.eq."ENAs") then + region_array = (/(/300.000, 25.000/), \ + (/275.000, 25.000/), \ + (/275.000, 50.000/), \ + (/300.000, 50.000/)/) + end if + if (region.eq."MEDs") then + region_array = (/(/350.000, 30.000/), \ + (/350.000, 45.000/), \ + (/40.000, 45.000/), \ + (/40.000, 30.000/)/) + end if + if (region.eq."NASs") then + region_array = (/(/40.000, 50.000/), \ + (/40.000, 70.000/), \ + (/180.000, 70.000/), \ + (/180.000, 50.000/)/) + end if + if (region.eq."NAUs") then + region_array = (/(/110.000, -30.000/), \ + (/110.000, -10.000/), \ + (/155.000, -10.000/), \ + (/155.000, -30.000/)/) + end if + if (region.eq."NEBs") then + region_array = (/(/326.000, -20.000/), \ + (/310.000, -20.000/), \ + (/310.000, 0.000/), \ + (/326.000, 0.000/)/) + end if + if (region.eq."NEUs") then + region_array = (/(/350.000, 48.000/), \ + (/350.000, 75.000/), \ + (/40.000, 75.000/), \ + (/40.000, 61.320/)/) + end if + if (region.eq."SAFs") then + region_array = (/(/350.000, -35.000/), \ + (/350.000, -11.365/), \ + (/308.01, -11.365/), \ + (/308.01, -35.000/)/) + end if + if (region.eq."SAHs") then + region_array = (/(/340.000, 15.000/), \ + (/340.000, 30.000/), \ + (/ 40.000, 30.000/), \ + (/ 40.000, 15.000/)/) + end if + if (region.eq."SASs") then + region_array = (/(/60.000, 5.000/), \ + (/60.000, 30.000/), \ + (/100.000, 30.000/), \ + (/100.000, 20.000/), \ + (/95.000, 20.000/), \ + (/95.000, 5.000/)/) + end if + if (region.eq."SAUs") then + region_array = (/(/110.000, -50.000/), \ + (/110.000, -30.000/), \ + (/180.000, -30.000/), \ + (/180.000, -50.000/)/) + end if + if (region.eq."SSAs") then + region_array = (/(/320.624, -20.000/), \ + (/320.624, -56.704/), \ + (/292.652, -56.704/), \ + (/287.859, -50.000/), \ + (/293.623, -20.000/)/) + end if + if (region.eq."SEAs") then + region_array = (/(/95.000, -10.000/), \ + (/95.000, 20.000/), \ + (/155.000, 20.000/), \ + (/155.000, -10.000/)/) + end if + if (region.eq."TIBs") then + region_array = (/(/75.000, 30.000/), \ + (/75.000, 50.000/), \ + (/100.000, 50.000/), \ + (/100.000, 30.000/)/) + end if + if (region.eq."WAFs") then + region_array = (/(/340.000, -11.365/), \ + (/340.000, 15.000/), \ + (/25.000, 15.000/), \ + (/25.000, -11.365/)/) + end if + if (region.eq."WASs") then + region_array = (/(/40.000, 15.000/), \ + (/40.000, 50.000/), \ + (/60.000, 50.000/), \ + (/60.000, 15.000/)/) + end if + if (region.eq."WSAs") then + region_array = (/(/280.271, -1.239/), \ + (/293.623, -20.000/), \ + (/287.859, -50.000/), \ + (/292.652, -56.704/), \ + (/277.978, -56.704/), \ + (/277.978, 0.530/)/) + end if + + if (region.eq."WNAs") then + region_array = (/(/255.000, 28.566/), \ + (/230.000, 28.566/), \ + (/230.000, 60.000/), \ + (/255.000, 60.000/)/) + end if + + if (region.eq."Arctic") then + region_array = (/(/0.000, 67.500/), \ + (/360.000, 90.000/)/) + + end if + if (region.eq."Antarctic") then + region_array = (/(/0.000, -90.000/), \ + (/360.000, -50.000/)/) + end if + if (region.eq."Caribbean") then + region_array = (/(/291.200, 11.400/), \ + (/274.200, 25.000/), \ + (/300.000, 25.000/), \ + (/300.000, 11.400/)/) + end if + if (region.eq."WesternIndianOcean") then + region_array = (/(/52.000, -25.000/), \ + (/75.000, 5.000/)/) + end if + if (region.eq."NorthernIndianOcean") then + region_array = (/(/60.000, 5.000/), \ + (/95.000, 30.000/)/) + end if + if (region.eq."NorthernTropicalPacific") then + region_array = (/(/155.000, 5.000/), \ + (/210.000, 25.000/)/) + end if + if (region.eq."EquatorialTropicalPacific") then + region_array = (/(/155.000, -5.000/), \ + (/230.000, 5.000/)/) + end if + if (region.eq."SouthernTropicalPacific") then + region_array = (/(/155.000, -25.000/), \ + (/230.000, -5.000/)/) + end if + if (region.eq."World") then + region_array = (/(/0.000, -90.000/), \ + (/360.000, 90.000/)/) + end if + + lat = lsdata&lat + lon = lsdata&lon + inout = new((/dimsizes(lsdata)/), "float") + in = 0 + inn = 0 + do ilat = 0, dimsizes(lat) - 1 + do ilon = 0, dimsizes(lon) - 1 + if (dimsizes(region_array(:, 0)).le.2) then + if ((lat(ilat).ge.region_array(0, 1)).and.(lat(ilat)\ + .lt.region_array(1, 1)).and.(lon(ilon).ge.region_array(0, 0))\ + .and.(lon(ilon).lt.region_array(1, 0))) then + inou = "True" + if (inou.eq."True") then + inout(ilat, ilon) = 1. + in = in + 1 + end if + delete(inou) + inn = inn + 1 + end if + else + inou = gc_inout(lat(ilat), lon(ilon), region_array(:, 1),\ + region_array(:, 0)) + + if(inou.eq."True") then + inout(ilat, ilon) = 1. + in = in + 1 + delete(inou) + end if + inn = inn + 1 + end if + end do + end do + + return(inout) +end +; °°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°°° +undef("add_legenda_page") +procedure add_legenda_page(wks, MODEL:string, colors:string, dashs:numeric,\ + scatters:string, ticknesses:numeric, place:string) +local MODEL, dim_mod, colors, dashs, edge_x, edge_y, edge_res, tx1res, \ + dim_lb, xleg, xl, yleg, yl, xtxt, ytxt, place, dim_lines,\ + space_lines, lin_sp, scatters, ticknesses +begin +; * * * Create Legend * * * * * * * * * * * * * * * * * * * * * * * * + dim_mod = dimsizes(MODEL) + dim_lines = floattoint(dim_mod/3) + 1 + if ((mod(dim_mod, 3)).eq.0) then + dim_lines = floattoint(dim_mod/3) + 1 + else + dim_lines = floattoint(dim_mod/3) + 2 + end if + lin_sp = 0.025 + space_lines = (dim_lines + 1) * lin_sp + + if (place.eq."bottom") + top_line = 0.20 + end if + if (place.eq."middle") + top_line = 0.42 + end if + if (place.eq."top") + top_line = 0.99 + end if + if ((place.ne."bottom").and.(place.ne."middle").and.(place.ne."top")) then + top_line = tofloat(place) + end if + bot_line = top_line - space_lines + ; edge_x = (/ 0.12, 0.92, 0.92, 0.12, 0.12/) + + edge_x = (/ 0.22, 0.76, 0.76, 0.22, 0.22/) + edge_y = (/space_lines + bot_line, space_lines + bot_line,\ + bot_line, bot_line, space_lines + bot_line/) + edge_res = True ; Indicate you want to set some resources. + edge_res@gsLineColor = "white" + gsn_polyline_ndc(wks, edge_x, edge_y, edge_res) + +; * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + dim_lb = dimsizes(MODEL) + + gs1res = True + tx1res = True + tx1res@txFontHeightF = lin_sp / 5 * 2 + + xleg = new((/3 * dim_lines, 4/), "float") + xl = new((/3, 4/), "float") + + do i = 0, 2 + xl(0, :) = fspan((edge_x(0) + 0.02), (edge_x(0) + 0.07), 4) + xl(1, :) = fspan(((edge_x(2) + edge_x(0))/2 - 0.09),\ + ((edge_x(2) + edge_x(0))/2 - 0.04), 4) + xl(2, :) = fspan((edge_x(2) - 0.20), (edge_x(2) - 0.15), 4) + end do + + nt = 0 + do j = 0, dim_lines - 1 + do i = 0, 2 + xleg(nt, :) = (/xl(i, :)/) + nt = nt + 1 + end do + end do + + yleg = new((/3 * dim_lines, 4/), "float") + yl = new(dim_lines, "float") + + do i = 0, dim_lines - 1 + yl(dim_lines - 1 - i) = edge_y(3) + (((edge_y(0) - \ + (edge_y(3)))/dim_lines) * \ + (i + 1)) - (lin_sp) + end do + + nt = 0 + do j = 0, dim_lines - 1 + do i = 0, 2 + yleg(nt, :) = (/yl(j)/) + nt = nt + 1 + end do + end do + + xtxt = new((/dim_lines * 3/), "float") + ytxt = new((/dim_lines * 3/), "float") + + nt = 0 + do j = 0, dim_lines - 1 + do i = 0, 2 + xtxt(nt) = xl(i, 3) + lin_sp/5. + nt = nt + 1 + end do + end do + nt = 0 + do i = 0, dim_lines - 1 + do j = 0, 2 + ytxt(nt) = (/yl(i)/) + nt = nt + 1 + end do + end do + + do i = 0, (dimsizes(MODEL) - 1) + gs1res@gsLineColor = colors(i) + gs1res@gsLineDashPattern = dashs(i) + gs1res@gsLineThicknessF = ticknesses(i) + tx1res@txFontColor = colors(i) + tx1res@txJust = "CenterLeft" ; change justification + if (scatters(i).eq."Markers") + gs1res@gsMarkerColor = colors(i) + gs1res@gsMarkerIndex = dashs(i) + gs1res@gsMarkerSizeF = 0.01 + gs1res@gsMarkerThicknessF = 1.5 + gsn_polymarker_ndc(wks, xleg(i, 2), yleg(i, 2), gs1res) + else + + gsn_polyline_ndc(wks, xleg(i, :), yleg(i, :), gs1res) + end if + + gsn_text_ndc(wks, MODEL(i), xtxt(i), ytxt(i), tx1res) + end do + +end + +; --------------------------------------------------------------------- +; The INTERSECTION of two sets is the set of elements which are in both sets. +; For example: let A = (1,2,3) and B = (3,4,5). +; The INTERSECTION of A and B, written A intersection B = (3). +; --------------------------------------------------------------------- +undef("get_unique_intersection") +function get_unique_intersection(a, b) +local atype, btype, au, bu, nau, nbu, abi, k, n, m +begin + atype = typeof(a) + btype = typeof(b) + if (atype.ne.btype) then + print("get_unique_intersection: arguments a and b must be the same type") + print(" typeof(a)="+atype) + print(" typeof(b)="+btype) + exit + end if + ; 'get_unique_values' sorts the elements + ; and returns a one-dimensional arry + au = get_unique_values(a) ; avoid duplicate 'a' entries + bu = get_unique_values(b) ; avoid duplicate 'b' entries + + nau = dimsizes(au) + nbu = dimsizes(bu) + ; reserve space + abi = new(min((/nau, nbu/)), typeof(a), default_fillvalue(atype)) + k = -1 + + na = 0 ; MH suggested loop + nb = 0 + do while(na.lt.nau.and.nb.lt.nbu) + if (bu(nb).gt.au(na)) then + na = na + 1 + else + if (au(na).gt.bu(nb)) then + nb = nb + 1 + else ; they are equal + k = k+1 + abi(k) = au(na) + nb = nb + 1 + na = na + 1 + end if + end if + end do + + if (k.eq.-1) then + abi := new(1, typeof(a), default_fillvalue(atype)) + abi@tag = "get_unique_intersection; NULL set; no intersection pairs" + return(abi) + end if + + abi@tag = "NCL: get_unique_intersection has created the result" + return(abi(0:k)) +end + +; --------------------------------------------------------------------- +; The DIFFERENCE of two sets is the set of elements, +; which are unique to each set. +; For example: let A = (1,2,3) and B = (3,4,5). +; The DIFFERENCE of A and B, written A difference B = (1,2,4,5). +; --------------------------------------------------------------------- +undef("get_unique_difference") +function get_unique_difference(a, b) +local atype, btype, abi, nabi, au, bu, abd, AB, nAB, n +begin + atype = typeof(a) + btype = typeof(b) + if (atype.ne.btype) then + print("get_unique_difference: arguments a and b must be the same type") + print(" typeof(a)=" + atype) + print(" typeof(b)=" + btype) + exit + end if + ; 'get_unique_values' sorts the elements + ; and returns a one-dimensional arry + abi = get_unique_intersection(a, b) ; intersect (common) values + if (ismissing(abi(0))) then ; NO intersect + ; 'get_unique_values' sorts the elements + au = get_unique_values(a) ; unique 'a' [au] + bu = get_unique_values(b) ; unique 'b' [bu] + ; all unique valuesl [d] difference + abd = array_append_record(au, bu, 0) + delete([/au, bu /]) + ; must sort again cuz 'ab' may not be in order + if (atype.eq."string") then + sqsort(abd) + else + qsort(abd) + end if + abd@tag = "NCL: get_unique_difference; arrays are DISJOINT; " + \ + "no intersection" + return(abd) + end if + nabi = dimsizes(abi) + + au = get_unique_values(a) ; 'a' [u]nique entries + bu = get_unique_values(b) ; 'b' [u]nique entries + nau = dimsizes(au) + nbu = dimsizes(bu) + ; reserve space + abd = new(nau + nbu, typeof(a), default_fillvalue(atype)) + ABu = array_append_record(au, bu, 0) ; unique AB + delete([/au, bu /]) + nABu = dimsizes(ABu) + + abd = new(nABu, typeof(a), default_fillvalue(atype)) + k = -1 + do n = 0, nABu - 1 + if (.not.any(abi.eq.ABu(n))) then + k = k+1 + abd(k) = ABu(n) + end if + end do + ; must sort again cuz 'ab' may not be in order + if (atype.eq."string") then + sqsort(abd(0:k)) + else + qsort(abd(0:k)) + end if + abd@tag = "NCL: get_unique_difference has created the result" + return(abd(0:k)) +end + +; ###################################################################### diff --git a/esmvaltool/diag_scripts/runoff_et/catchment_analysis.py b/esmvaltool/diag_scripts/runoff_et/catchment_analysis.py index 953b4ffb49..6340813e94 100644 --- a/esmvaltool/diag_scripts/runoff_et/catchment_analysis.py +++ b/esmvaltool/diag_scripts/runoff_et/catchment_analysis.py @@ -33,6 +33,7 @@ from itertools import cycle import iris +import iris.coord_categorisation import numpy as np import esmvaltool.diag_scripts.shared as diag @@ -474,7 +475,13 @@ def get_catchment_data(cfg): """ catchments = get_defaults() catchments['refname'] = 'default' - catchment_filepath = cfg.get('catchmentmask') + if not cfg.get('catchmentmask'): + raise ValueError('A catchment mask file needs to be specified in the ' + 'recipe (see recipe description for details)') + catchment_filepath = os.path.join(cfg['auxiliary_data_dir'], + cfg.get('catchmentmask')) + if not os.path.isfile(catchment_filepath): + raise IOError('Catchment file {} not found'.format(catchment_filepath)) catchments['cube'] = iris.load_cube(catchment_filepath) if catchments['cube'].coord('latitude').bounds is None: catchments['cube'].coord('latitude').guess_bounds() @@ -519,6 +526,7 @@ def get_sim_data(cfg, datapath, catchment_cube): for i, days in enumerate(daypermonth): new_cube.data[i] *= days # Aggregate over year --> unit mm per year + iris.coord_categorisation.add_year(new_cube, 'time') year_cube = new_cube.aggregated_by('year', iris.analysis.SUM) year_cube.units = "mm a-1" # Compute long term mean @@ -548,9 +556,9 @@ def get_catch_avg(catchments, sim_cube): avg = {} for river, rid in catchments['catchments'].items(): data_catch = np.ma.masked_where( - catchments['cube'].data.astype(np.int) != rid, sim_cube.data) + catchments['cube'].data.astype(np.int64) != rid, sim_cube.data) area_catch = np.ma.masked_where( - catchments['cube'].data.astype(np.int) != rid, + catchments['cube'].data.astype(np.int64) != rid, catchments['area'].data) avg[river] = (data_catch * (area_catch / area_catch.sum())).sum() return avg @@ -650,9 +658,8 @@ def main(cfg): for model, mcube in allcubes.items(): filepath = os.path.join(cfg[diag.names.WORK_DIR], '_'.join(['postproc', model]) + '.nc') - if cfg[diag.names.WRITE_NETCDF]: - iris.save(mcube, filepath) - logger.info("Writing %s", filepath) + iris.save(mcube, filepath) + logger.info("Writing %s", filepath) # Write plotdata as ascii files for user information write_plotdata(cfg, plotdata, my_catch) diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig2.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig2.ncl new file mode 100644 index 0000000000..db6426cc3a --- /dev/null +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig2.ncl @@ -0,0 +1,197 @@ +; ############################################################################ +; russell18jgr_fig2.ncl +; Based on Figure 2 - Russell, J.L.,et al., 2018, J. Geophysical Research – +; Oceans, 123, 3120-3143. https://doi.org/10.1002/2017JC013461 (figure 2) +; +; Author: Pandde Amarjiit (University of Arizona, USA) +; Russell Joellen (University of Arizona, USA) +; Goodman Paul (University of Arizona, USA) +; +; ############################################################################ +; Description +; - Uses original grid (no regridding). +; - Uses ESMValTool land masking (preprocessor) to mask land data points. +; - Calculates longitudinal averaged tauu/tauuo values +; - Plots averaged tauu values vs lat as xy line plot +; +; Note - The figure in russell18jgr paper was made using tauuo but here +; we can use both tauu and tauuo, it can be changed in the recipe. +; +; Required variable_info_attributtes: +; - preprocessor +; - mip +; - project +; - exp +; - ensemble +; - start_year +; - end_year +; - additional datasets +; +; Required preprocessor attributtes: +; preprocessor_time_land: (for tauu) +; climate_statistics: +; operator: mean +; period: full +; mask_landsea: +; mask_out: "land" +; preprocessor_time - for tauuo +; climate_statistics: +; operator: mean +; period: full +; +; Required diag_script_info attributes (diagnostics specific) +; +; - styleset : CMIP5 - default +; - ncdf : default +; +; Caveats +; - uses the same color, dash and thichkness for datasets of same models +; (for ex. GFDL-ESM2M tauu and tauuo datasets use the same line) +; - not tested for any observation models. +; +; Modification history +; 20190510 - A_russell_joellen, A_pandde_amarjiit - written and implemented +; for ESMValToolv2 +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" ; load metadata +load "$diag_scripts/shared/plot/style.ncl" ; load plot style functions +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + start_years_data = metadata_att_as_array(input_file_info, "start_year") + end_years_data = metadata_att_as_array(input_file_info, "end_year") + inputfile_paths = metadata_att_as_array(input_file_info, "filename") + var_name = metadata_att_as_array(input_file_info, "short_name") + nDatasets = ListCount(input_file_info) + +end + +begin + + ; Set annotations + annots = project_style(input_file_info, diag_script_info, "annots") + colors = project_style(input_file_info, diag_script_info, "colors") + dashes = project_style(input_file_info, diag_script_info, "dashes") + thicks = project_style(input_file_info, diag_script_info, "thicks") + plotpath = config_user_info@plot_dir + "russell18jgr_fig2_" \ + + sprinti("%0.4i", min(toint(start_years_data))) + "-" \ + + sprinti("%0.4i", max(toint(end_years_data))) + + system("mkdir -p " + config_user_info@work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + wks = gsn_open_wks(output_type(), plotpath) + plot = new(nDatasets, graphic) + res = True + res@tmXBLabelFontHeightF = 0.008 + res@tmYLLabelFontHeightF = 0.008 + res@gsnFrame = False + res@gsnDraw = False + res@trXMinF = -80. + res@trXMaxF = -30. + res@trYMaxF = 0.25 + res@trYMinF = -0.1 + res@vpHeightF = 0.6 + res@vpWidthF = 0.6 + res@tiMainString = " " + res@gsnYRefLine = 0 + res@gsnYRefLineDashPattern = 2 + res@gsnYRefLineColor = "grey" + res@vpYF = 0.85 + res@vpXF = 0.08 + res@gsnRightString = "Units - (Pa)" + res@gsnRightStringFontHeightF = 15. + res@gsnLeftString = "Russell et al -2018 - Figure 2" + res@gsnLeftStringFontHeightF = 17. + res@tmYLMinorPerMajor = 4 + res@tmYLTickStartF = -0.1 + res@tmYLTickSpacingF = 0.1 + res@tmYLTickEndF = 0.25 + res@tmYLMode = "Manual" + res@tmXBMinorPerMajor = 1 + res@tmXBTickStartF = -80. + res@tmXBTickSpacingF = 5. + res@tmXBTickEndF = -30. + res@tmXBMode = "Manual" + res@tiYAxisFontHeightF = 0.013 + res@tiXAxisFontHeightF = 0.013 + res@tiYAxisOffsetXF = 0.0 + res@tiYAxisString = "Surface eastward wind stress" + res@tiXAxisString = "Latitude" + + do iii = 0, nDatasets - 1 + + infile_path = inputfile_paths(iii) + dataset = read_data(input_file_info[iii]) ; reading data + var_lon_avg = dim_avg_n_Wrap(dataset, 1) ; taking zonal average + var_lon_avg@var = var_name(iii) + var_lon_avg@diag_script = "russell18jgr-fig2.ncl" + res@xyDashPatterns = dashes(iii) + res@xyLineColors = colors(iii) + res@xyExplicitLegendLabels = annots(iii) + res@xyLineThicknessF = thicks(iii) + + if(.not.(iscoord(dataset, "lat"))) then + ; extracting lat array from file for plotting + infile_iii = addfile(infile_path, "r") + area_lat = infile_iii->lat + var_lat = area_lat(:, 10) + delete(area_lat) + end if + + if (iscoord(var_lon_avg, "lat")) then + plot(iii) = gsn_csm_xy(wks, var_lon_avg&lat, var_lon_avg, res) + else + plot(iii) = gsn_csm_xy(wks, var_lat, var_lon_avg, res) + delete(var_lat) + end if + nc_filename = config_user_info@work_dir + "russell18jgr_fig2_" \ + + var_name(iii) + "_" + annots(iii) + "_" \ + + (start_years_data(iii)) + "-" + (end_years_data(iii))+".nc" + + ncdf_outfile = ncdf_write(var_lon_avg, nc_filename) + delete(var_lon_avg) + delete(dataset) + + if (iii .ne. 0) then + overlay(plot(0), plot(iii)) + end if + + end do + + draw(plot(0)) + + legend = create "Legend" legendClass wks + "vpXF" : 0.62 ; orientation on page + "vpYF" : 0.925 + "vpWidthF" : 0.5 ; width + "vpHeightF" : 0.725 ; height + "lgPerimOn" : False ; no perimeter + "lgItemCount" : dimsizes(annots) ; how many + "lgLineLabelStrings" : annots ; labels + "lgLabelsOn" : False ; no default lables + "lgLineLabelFontHeightF" : 0.00825 ; font height + "lgDashIndexes" : dashes ; line paterns + "lgLineColors" : colors + "lgLeftMarginF" : -0.09 + "lgMonoLineLabelFontColor" : True ; one label color + "lgLineDashSegLenF" : 0.11 + end create + draw(legend) + + frame(wks) + + ; Call provenance logger + log_provenance(ncdf_outfile, \ + plotpath + "." + output_type(), \ + "Russell et al 2018 figure 2", \ + "mean", \ + "sh", \ + "geo", \ + "russell_joellen", \ + "russell18jgr", \ + infile_path) +end diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig3b-2.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig3b-2.ncl new file mode 100644 index 0000000000..add52a2b7a --- /dev/null +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig3b-2.ncl @@ -0,0 +1,307 @@ +; ############################################################################# +; russell_figure3b-2.ncl (Polar Front) +; +; Russell, J.L.,et al., 2018, J. Geophysical Research – Oceans, 123,\ +; 3120-3143. https://doi.org/10.1002/2017JC013461 (figure 3b - polar fronts) +; +; Author: Pandde Amarjiit (University of Arizona, USA) +; Russell Joellen (University of Arizona, USA) +; Goodman Paul (University of Arizona, USA) +; +; ############################################################################# +; Description +; - Uses original grid (no regridding). +; - Changes i-j and x-y coordinates to lat - lon coordinates +; - Takes the time average of temperature +; - Makes sure the temperature values are in Kelvin +; - extracts the temperature from surface to depth closest and less than 200m +; - takes the minimum temperature of all levels at a grid point +; - aranges the temperature array in ascending order of lon (var_new) +; - creats as a contour map of ascending sorted temperature +; (no actual plotting) +; - extracts the position of 2 degree celsius isotherm from the above plot +; - increase the lon range from 0 - 360 to -360 to 360 +; - plots the isoline after removing stray points from isoline +; - overlays all the plots on the first plot +; - creates legend +; +; Required diag_script_info attributes +; +; - styleset : CMIP5 - default +; +; +; Required preprocessor attributtes: +; - none (no preprocessing required) +; in the recipe do not keep a preprocessor +; +; Caveats +; +; - IPSL models do not work as they have 360+ range in longitude. +; - CNRM models give a plotting warning, but the plot looks fine. +; +; +; Modification history +; +; 20190510 - russell_joellen, pandde_amarjiit - written and +; implemented for ESMValTool v2. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" ; load metadata +load "$diag_scripts/shared/plot/style.ncl" ; load plot style functions +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + datasetnames = metadata_att_as_array(info_items, "dataset") + start_years_data = metadata_att_as_array(info_items, "start_year") + end_years_data = metadata_att_as_array(info_items, "end_year") + inputfile_paths = metadata_att_as_array(info_items, "filename") + nDatasets = ListCount(info_items) + +end + +begin + ; Set annotations + annots = project_style(input_file_info, diag_script_info, "annots") + colors = project_style(input_file_info, diag_script_info, "colors") + dashes = project_style(input_file_info, diag_script_info, "dashes") + thicks = project_style(input_file_info, diag_script_info, "thicks") + + plotpath = config_user_info@plot_dir \ + + "russell18jgr_fig3_Polar-Front-position_" \ + + sprinti("%0.4i", min(toint(start_years_data))) + "-" \ + + sprinti("%0.4i", max(toint(end_years_data))) + + system("mkdir -p " + config_user_info@work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + wks = gsn_open_wks(output_type(), plotpath) + plot = new(nDatasets, graphic) + plot1 = new(nDatasets, graphic) + + res = True ; resources for the xy (isoline) plot + res@gsnDraw = False + res@gsnFrame = False + res@trYMaxF = -40. + res@trYMinF = -70. + res@trXMaxF = 300. + res@trXMinF = -60. + res@vpXF = 0.1 ; orientation on page + res@vpYF = 0.875 + res@vpWidthF = 0.625 ; width + res@vpHeightF = 0.815 ; height + res@gsnLeftString = "Russell et al -2018 - Figure 3 b" + res@tmXBMinorPerMajor = 1 + res@tmXBMaxTicks = 15 + res@tmXBTickStartF = -60. + res@tmXBTickSpacingF = 30. + res@tmXBTickEndF = 300. + res@tmYLTickStartF = -70. + res@tmYLTickSpacingF = 2. + res@tmYLTickEndF = -40. + res@tmYLMinorPerMajor = 1 + res@tmYLMaxTicks = 20 + res@tmYLMode = "Manual" + res@tmXBLabelFontHeightF = 0.0125 + res@tmYLLabelFontHeightF = 0.0145 + res@gsnRightStringFontHeightF = 15. + res@gsnLeftStringFontHeightF = 15. + res@tiMainString = " Polar Fronts" + + res1 = True + res1@gsnDraw = False + res1@gsnFrame = False + + do iii = 0, nDatasets - 1 ; looping over all the models + res@xyDashPatterns = dashes(iii) + res@xyLineColors = colors(iii) + res@xyExplicitLegendLabels = annots(iii) + res@xyLineThicknessF = thicks(iii) + + dataset_time = read_data(info_items[iii]) ; reading data + if (max(dataset_time) .gt. 273.0) then + ; making sure the temperature is in K + dataset_time@units = "K" + if (min(dataset_time) .eq. 0) then + dataset_time = where(dataset_time .eq.0, dataset_time@_FillValue,\ + dataset_time) + end if + else + dataset_time = dataset_time + 273.0 + dataset_time@units = "converted to K" + end if + dataset_lev = dim_avg_n_Wrap(dataset_time, 0) ; taking time average + delete(dataset_time) + ; extracting temperatures between surface and 200m depth + lev_data = dataset_lev&lev + lev_a = closest_val(200.0, lev_data) + + if (lev_data(lev_a) .lt. 200.0) then + dataset_1 = dataset_lev(0:lev_a, :, :) + else + dataset_1 = dataset_lev(0:lev_a - 1, :, :) + end if + ; dataset_1 = dataset_lev(lev_new) + delete(dataset_lev) + delete(lev_data) + + if (iscoord(dataset_1, "i")) then ; changes i-j coordinates to lat-lon + delete(dataset_1&i) + delete(dataset_1&j) + infile_path = inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + area_lon = infile_iii->lon + area_lat = infile_iii->lat + dataset_1!1 = "lat" + dataset_1!2 = "lon" + dataset_1&lat = tofloat(area_lat(:, 10)) + dataset_1&lon = tofloat(area_lon(10, :)) + delete(area_lat) + delete(area_lon) + end if + + if(.not.(iscoord(dataset_1, "lat"))) then + infile_path = inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + area_lon = infile_iii->lon + area_lat = infile_iii->lat + a1 = closest_val(-40.0, area_lat(:, 0)) + a2 = closest_val(-80.0, area_lat(:, 0)) + ; sort the temperature variable in ascending order of lon + ip0 = dim_pqsort(area_lon(0, :), 1) + dataset = dim_min_n_Wrap(dataset_1(:, a1:a2, ip0), 0) + ; take the min temperature at all grid points over depth + dataset@lon2d = area_lon(a1:a2, ip0) + ; adding lat lon arrays for plotting + dataset@lat2d = area_lat(a1:a2, ip0) + delete(area_lat) + delete(area_lon) + else + a1 = closest_val(-40.0, dataset_1&lat) + a2 = closest_val(-80.0, dataset_1&lat) + ; sort the temperature variable in ascending order of lon + ip0 = dim_pqsort(dataset_1&lon, 1) + ; take the min temperature at all grid points over depth + dataset = dim_min_n_Wrap(dataset_1(:, a1:a2, ip0), 0) + end if + + delete(ip0) + delete(dataset_1) + dataset@var = var0 + dataset@diag_script = "russell18jgr-fig3b-2.ncl" + plot1(iii) = gsn_csm_contour(wks, dataset, res1) + ; create the contour plot (no actual plotting) + delete(dataset) + + nc_filename = config_user_info@work_dir + "russell_figure2_" + var0 \ + + "_" + annots(iii) + "_" + (start_years_data(iii)) + "-" \ + + (end_years_data(iii)) + ".nc" + + isoline = get_isolines(plot1(iii), 275.15) + ; extract the position of 2 degree celsius isotherm + b = isoline@start_point(0) + e = b + isoline@n_points(0) - 1 + ; NCL picks a few isolated points on the isoline in some of the models + ; we are discarding them if the difference in lon is more than 20 degrees + + if (isoline@segment_count .gt.1) then + do segmod = 1, isoline@segment_count - 1 + start_point1 = isoline@start_point(segmod) + end_point1 = isoline@start_point(segmod-1) \ + + isoline@n_points(segmod-1) - 1 + if (abs(isoline(1, start_point1) - isoline(1, e)) .lt. 20) then + e = e + isoline@n_points(segmod) + end if + end do + end if + last_index = e-b + + y = new((3 * (e + 1 - b)), typeof(isoline)) + x = new((3 * (e + 1 - b)), typeof(isoline)) + x@units = "Degrees_east" + y@units = "Degrees_north" + + ; making the isoline repeat 3 times to make lon range from -360 to 360 + if (isoline(1, 11) .gt. isoline(1, 1)) then + y(0 : last_index) = isoline(0, b:e) + y(last_index+1 : 2*last_index+1) = isoline(0, b:e) + y(2*last_index+2 : 3*last_index+2) = isoline(0, b:e) + + x(0 : last_index) = isoline(1, b:e) - 360 + x(last_index+1 : 2*last_index + 1) = isoline(1, b:e) + x(2*last_index+2 : 3*last_index + 2) = isoline(1, b:e) + 360 + + else + y(0 : last_index) = isoline(0, b:e:-1) + y(last_index+1 : 2*last_index + 1) = isoline(0, b:e:-1) + y(2*last_index+2 : 3*last_index + 2) = isoline(0, b:e:-1) + + x(0 : last_index) = isoline(1, b:e:-1) - 360 + x(last_index + 1 : 2*last_index + 1) = isoline(1, b:e:-1) + x(2*last_index + 2 : 3*last_index + 2) = isoline(1, b:e:-1) + 360 + + end if + + plot(iii) = gsn_csm_xy(wks, x, y, res) + + if (iii .ne. 0) then + overlay(plot(0), plot(iii)) + end if + + out_var = new((/2, dimsizes(x) /), float) + ; variable to be saved in netCDF file + out_var(0, :) = x + out_var(1, :) = y + out_var!0 = "points" + out_var!1 = "xy" + out_var&points = (/0, 1/) + out_var&xy = ispan(0, (dimsizes(x) - 1), 1) + out_var@var = "position_of_polar_front" + out_var@diag_script = DIAG_SCRIPT + out_var@description = "out_var(0,:) are lon positions of isoline and " \ + + " out_var(1,:) is the lat position of isoline " \ + + "- (277.15K)" + ncdf_outfile = ncdf_write(out_var, nc_filename) + delete(isoline) + delete(x) + delete(y) + delete(b) + delete(e) + delete(last_index) + delete(out_var) + end do + + draw(plot(0)) + legend = create "Legend" legendClass wks + "vpXF" : 0.625 ; orientation on page + "vpYF" : 0.925 + "vpWidthF" : 0.5 ; width + "vpHeightF" : 0.725 ; height + "lgPerimOn" : False ; no perimeter + "lgItemCount" : dimsizes(annots) ; how many + "lgLineLabelStrings" : annots ; labels + "lgLabelsOn" : False ; no default lables + "lgLineLabelFontHeightF" : 0.0085 ; font height + "lgDashIndexes" : dashes ; line paterns + "lgLineColors" : colors + "lgMonoLineLabelFontColor" : True ; one label color + end create + draw(legend) + + frame(wks) + + log_provenance(ncdf_outfile, \ + plotpath + "." + output_type(), \ + "Russell et al 2018 figure 3b part 2", \ + "mean", \ + "sh", \ + "geo", \ + "russell_joellen", \ + "russell18jgr", \ + inputfile_paths) + +end diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig3b.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig3b.ncl new file mode 100644 index 0000000000..41ee0c3194 --- /dev/null +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig3b.ncl @@ -0,0 +1,297 @@ +; ############################################################################# +; russell_figure3b.ncl (Subantarctic Front) +; +; Russell, J.L.,et al., 2018, J. Geophysical Research – Oceans, 123, 3120-3143 +; https://doi.org/10.1002/2017JC013461 (figure 3b - Subantarctic Front) +; +; Author: Pandde Amarjiit (University of Arizona, USA) +; Russell Joellen (University of Arizona, USA) +; Goodman Paul (University of Arizona, USA) +; +; ############################################################################# +; Description +; - Uses original grid (no regridding). +; - Changes i-j and x-y coordinates to lat - lon coordinates +; - Takes the time average of temperature +; - Makes sure the temperature values are in Kelvin +; - extracts the temperature closest to and less than 400m depth +; - aranges the temperature array in ascending order of lon (dataset) +; - creates as a contour map of ascending sorted temperature +; (no actual plotting of contour map) +; - extracts the position of 4 degree celsius isotherm from the above plot +; - increase the lon range from 0 - 360 to -360 to 360 +; - plots the isoline after removing stray points from isoline +; - overlays all the plots on the first plot +; - creates legend +; +; Required diag_script_info attributes +; +; - styleset : CMIP5 - default +; - ncdf : default +; +; Required Preprocessor attributes ( no_preprocessor ) +; - none (no preprocessing required) +; in the recipe do not keep a preprocessor in variable section +; +; Caveats +; +; - IPSL models do not work as they have 360+ range in longitude. +; - CNRM models give a plotting warning, but the plot looks fine. +; +; +; Modification history +; +; 20190510- russell_joellen, pandde_amarjiit - written and +; implemented for ESMValTool v2 +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" ; load metadata +load "$diag_scripts/shared/plot/style.ncl" ; load plot style functions +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + datasetnames = metadata_att_as_array(info_items, "dataset") + start_years_data = metadata_att_as_array(info_items, "start_year") + end_years_data = metadata_att_as_array(info_items, "end_year") + inputfile_paths = metadata_att_as_array(info_items, "filename") + nDatasets = ListCount(info_items) +end + +begin + + ; Set annotations + annots = project_style(input_file_info, diag_script_info, "annots") + colors = project_style(input_file_info, diag_script_info, "colors") + dashes = project_style(input_file_info, diag_script_info, "dashes") + thicks = project_style(input_file_info, diag_script_info, "thicks") + + plotpath = config_user_info@plot_dir + "Russell18jgr_fig3_Subantarctic" \ + + "-Fronts_" + sprinti("%0.4i", min(toint(start_years_data))) \ + + "-" + sprinti("%0.4i", max(toint(end_years_data))) + + system("mkdir -p " + config_user_info@work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + wks = gsn_open_wks(output_type(), plotpath) + plot = new(nDatasets, graphic) + plot1 = new(nDatasets, graphic) + res = True + res@gsnDraw = False + res@gsnFrame = False + res@trYMaxF = -40. + res@trYMinF = -70. + res@trXMaxF = 300. + res@trXMinF = -60. + res@vpXF = 0.1 ; orientation on page + res@vpYF = 0.875 + res@vpWidthF = 0.625 ; width + res@vpHeightF = 0.815 ; height + res@gsnLeftString = "Russell et al -2018 - Figure 3 b" + res@tmXBMinorPerMajor = 1 + res@tmXBMaxTicks = 15 + res@tmXBTickStartF = -60. + res@tmXBTickSpacingF = 30. + res@tmXBTickEndF = 300. + res@tmYLTickStartF = -70. + res@tmYLTickSpacingF = 2. + res@tmYLTickEndF = -40. + res@tmYLMinorPerMajor = 1 + res@tmYLMaxTicks = 20 + res@tmYLMode = "Manual" + res@tmXBLabelFontHeightF = 0.0125 + res@tmYLLabelFontHeightF = 0.0145 + res@gsnRightStringFontHeightF = 15. + res@gsnLeftStringFontHeightF = 15. + res@tiMainString = " Subantarctic Fronts" + + res1 = True + res1@gsnDraw = False + res1@gsnFrame = False + + do iii = 0, nDatasets - 1 + dataset_time = read_data(info_items[iii]) + if (max(dataset_time) .gt. 273.0) then + dataset_time@units = "K" + if (min(dataset_time) .eq. 0) then ; some models have 0 as fillvalues + dataset_time = where(dataset_time .eq.0, dataset_time@_FillValue, \ + dataset_time) + end if + else + dataset_time = dataset_time + 273.0 + dataset_time@units = "converted to K" + end if + + dataset_lev = dim_avg_n_Wrap(dataset_time, 0) + delete(dataset_time) + lev_data = dataset_lev&lev + lev_a = closest_val(400.0, lev_data) + + if (lev_data(lev_a) .lt. 400.0) then + lev_new = lev_data(lev_a) + dataset_1 = dataset_lev(lev_a, :, :) + else + lev_new = lev_data(lev_a - 1) + dataset_1 = dataset_lev(lev_a - 1, :, :) + end if + delete(dataset_lev) + delete(lev_data) + delete(lev_new) + + res@xyDashPatterns = dashes(iii) + res@xyLineColors = colors(iii) + res@xyExplicitLegendLabels = annots(iii) + res@xyLineThicknessF = thicks(iii) + + if (iscoord(dataset_1, "i")) then ; changes i-j coordinates to lat-lon + delete(dataset_1&i) + delete(dataset_1&j) + infile_path = inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + area_lon = infile_iii->lon + area_lat = infile_iii->lat + dataset_1!0 = "lat" + dataset_1!1 = "lon" + dataset_1&lat = tofloat(area_lat(:, 10)) + dataset_1&lon = tofloat(area_lon(10, :)) + delete(area_lat) + delete(area_lon) + end if + + if(.not.(iscoord(dataset_1, "lat"))) then + infile_path = inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + area_lon = infile_iii->lon + area_lat = infile_iii->lat + a1 = closest_val(-35.0, area_lat(:, 0)) + a2 = closest_val(-80.0, area_lat(:, 0)) + ; sort the temperature variable in ascending order of lon + ip0 = dim_pqsort(area_lon(0, :), 1) + dataset = dataset_1(a1:a2, ip0) + dataset@lon2d = area_lon(a1:a2, ip0) + dataset@lat2d = area_lat(a1:a2, ip0) + delete(area_lat) + delete(area_lon) + else + a1 = closest_val(-35.0, dataset_1&lat) + a2 = closest_val(-80.0, dataset_1&lat) + ; sort the temperature variable in ascending order of lon + ip0 = dim_pqsort(dataset_1&lon, 1) + dataset = dataset_1(a1:a2, ip0) + end if + delete(ip0) + delete(dataset_1) + + plot1(iii) = gsn_csm_contour(wks, dataset, res1) ; create the plot + delete(dataset) + + nc_filename = config_user_info@work_dir + "russell18jgr_fig3b_" \ + + "subantarctic-front-position_" + annots(iii) + "_" \ + + (start_years_data(iii)) + "-" + (end_years_data(iii)) + ".nc" + + isoline = get_isolines(plot1(iii), 277.15) + b = isoline@start_point(0) + e = b + isoline@n_points(0) - 1 + + ; NCL picks a few isolated points on the isoline in some of the models + ; we are discarding them if the difference in lon is more than 20 degrees + + if (isoline@segment_count .gt.1) then + do segmod = 1, isoline@segment_count - 1 + start_point1 = isoline@start_point(segmod) + end_point1 = isoline@start_point(segmod-1) \ + + isoline@n_points(segmod-1) - 1 + + if (abs(isoline(1, start_point1) - isoline(1, e)) .lt. 20) then + e = e + isoline@n_points(segmod) + end if + end do + end if + last_index = e-b + + y = new((3*(e + 1 - b)), typeof(isoline)) + x = new((3*(e + 1 - b)), typeof(isoline)) + x@units = "Degrees_east" + y@units = "Degrees_north" + + ; making the isoline repeat 3 times to make lon range from -360 to 360 + if (isoline(1, 11) .gt. isoline(1, 1)) then + y(0 : last_index) = isoline(0, b:e) + y(last_index+1 : 2*last_index + 1) = isoline(0, b:e) + y(2*last_index+2 : 3*last_index + 2) = isoline(0, b:e) + + x(0 : last_index) = isoline(1, b:e) - 360 + x(last_index+1 : 2*last_index + 1) = isoline(1, b:e) + x(2*last_index+2 : 3*last_index + 2) = isoline(1, b:e) + 360 + + else + y(0 : last_index) = isoline(0, b:e:-1) + y(last_index+1 : 2*last_index + 1) = isoline(0, b:e:-1) + y(2*last_index+2 : 3*last_index + 2) = isoline(0, b:e:-1) + + x(0 : last_index) = isoline(1, b:e:-1) - 360 + x(last_index+1 : 2*last_index + 1) = isoline(1, b:e:-1) + x(2*last_index+2 : 3*last_index + 2) = isoline(1, b:e:-1) + 360 + + end if + + plot(iii) = gsn_csm_xy(wks, x, y, res) + + if (iii .ne. 0) then + overlay(plot(0), plot(iii)) + end if + + out_var = new((/2, dimsizes(x) /), float) + out_var(0, :) = x + out_var(1, :) = y + out_var!0 = "points" + out_var!1 = "xy" + out_var&points = (/0, 1/) + out_var&xy = ispan(0, (dimsizes(x) - 1), 1) + out_var@var = "position_of_subantarctic_front" + out_var@diag_script = DIAG_SCRIPT + out_var@description = "out_var(0,:) are lon positions of isoline " \ + + " and out_var(1,:) is the lat position of isoline - (277.15K)" + ncdf_outfile = ncdf_write(out_var, nc_filename) + delete(isoline) + delete(x) + delete(y) + delete(b) + delete(e) + delete(last_index) + delete(out_var) + end do + draw(plot(0)) + + legend = create "Legend" legendClass wks + "vpXF" : 0.625 ; orientation on page + "vpYF" : 0.925 + "vpWidthF" : 0.5 ; width + "vpHeightF" : 0.725 ; height + "lgPerimOn" : False ; no perimeter + "lgItemCount" : dimsizes(annots) ; how many + "lgLineLabelStrings" : annots ; labels + "lgLabelsOn" : False ; no default lables + "lgLineLabelFontHeightF" : 0.0085 ; font height + "lgDashIndexes" : dashes ; line paterns + "lgLineColors" : colors + "lgMonoLineLabelFontColor" : True ; one label color + end create + draw(legend) + + frame(wks) + + log_provenance(ncdf_outfile, \ + plotpath + "." + output_type(), \ + "Russell et al 2018 figure 3b", \ + "mean", \ + "sh", \ + "geo", \ + "russell_joellen", \ + "russell18jgr", \ + inputfile_paths) + +end diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig4.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig4.ncl new file mode 100644 index 0000000000..b03b81959f --- /dev/null +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig4.ncl @@ -0,0 +1,393 @@ +; ############################################################################ +; russell18jgr_fig4.ncl +; +; Russell, J.L.,et al., 2018, J. Geophysical Research - Oceans, 123, +; 3120-3143. https://doi.org/10.1002/2017JC013461 (figure 4) +; +; Author: Pandde Amarjiit (University of Arizona, USA) +; Russell Joellen (University of Arizona, USA) +; Goodman Paul (University of Arizona, USA) +; +; ############################################################################ +; Description +; - Uses original grid (no regridding). +; - Changes i-j and x-y coordinates to lat-lon coordinates +; - Uses the time averaged data from the closes lon to 69W (291E) +; - Converts units if unitCorrectionalFactor attribute is present +; - Calculates total transport from volcello file +; - If volcello file not found then skips directly to plotting +; - Volcello(m^3) is divided by the horizontal distance between two data +; points along the same latitude, which results in east-west +; cross-section area of each grid cell +; - total transport is sum of transport of each cell between 75S and 49S +; - Panels all the plots together +; +; Required diag_script_info attributes +; +; - styleset : CMIP5 - default +; - ncdf : default +; +; Optional diag_script_info attributes +; +; - max_vert, max_hori: allowed dimensions of a panel page +; (number of plots per column/row) +; +; - unitCorrectionalFactor : 100 (to convert from m/s to cm/s) +; - new_units : "cm/s" +; +; Required Preprocessor attributes ((configured to recreate figure 4)) +; - None (no preprocessing required) +; in the recipe do not keep a preprocessor in variable section +; +; +; Caveats +; +; - MIROC-ESM and BNU-ESM does not work as depth variable is not called lev +; - total transport is very small in case of NorESM1-M and ME as volcello +; values look incorrect(very small) +; - MRI models have 0 in land points instead of missing values. +; So, no grey region produced on land. +; +; Modification history +; +; 20190610-russell_joellen, pandde_amarjiit: written and implemented for +; ESMValTool v2 +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" ; load metadata +load "$diag_scripts/shared/plot/style.ncl" ; load plot style functions +load "$diag_scripts/shared/plot/contour_maps.ncl" ; load plot function +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + att = True + att@mip = "Omon" + info = select_metadata_by_atts(input_file_info, att) ; variable + var0 = info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + inputfile_paths = metadata_att_as_array(info_items, "filename") + start_years_data = metadata_att_as_array(info_items, "start_year") + end_years_data = metadata_att_as_array(info_items, "end_year") + nDatasets = ListCount(info_items) + annots = project_style(input_file_info, diag_script_info, "annots") + uo_dataset_name = metadata_att_as_array(info_items, "dataset") + + att@mip = "fx" + volinfo = select_metadata_by_atts(input_file_info, att) ; area + voldatasets = metadata_att_as_array(volinfo, "dataset") + volfile_paths = metadata_att_as_array(volinfo, "filename") + delete(att) + +end + +begin + + colors_new = (/ (/15.0, 69, 168/), (/36, 118, 205/), (/57, 162, 245/), \ + (/96, 190, 250/), (/131, 212, 253/), (/146, 230, 253/), \ + (/161, 241, 255/), (/188, 246, 255/), (/205, 226, 229/), \ + (/234, 231, 211/), (/251, 246, 190/), (/255, 232, 154/), \ + (/252, 224, 97/), (/254, 173, 26/), (/251, 136, 10/), \ + (/238, 91, 12/), (/209, 49, 7/), (/178, 0, 0/) /) + colors_new = colors_new / 256.0 + + plotpath = config_user_info@plot_dir + "Russell18jgr-fig4_" \ + + sprinti("%0.4i", min(toint(start_years_data))) + "-" \ + + sprinti("%0.4i", max(toint(end_years_data))) + + system("mkdir -p " + config_user_info@work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + file_type = output_type() + wks = gsn_open_wks(file_type, plotpath) + wks@fullname = plotpath + plots = new(nDatasets, graphic) + + ; panel parameters + nvert = 1 ; default + nhori = 1 ; default + if (isatt(diag_script_info, "max_vert")) then + nvert = toint(diag_script_info@max_vert) + ; Maximum allowed number of plots per page (vertical) + end if + + if (isatt(diag_script_info, "max_hori")) then + nhori = toint(diag_script_info@max_hori) + ; Maximum allowed number of plots per page (horizontal) + end if + + warn_ij = True + + res = True ; plot mods desired + res@gsnDraw = False ; draw plot + res@gsnFrame = False ; advance frame + res@cnFillOn = True ; turn on color fill + res@cnLinesOn = True ; turn on contour lines + res@cnLineLabelsOn = True ; turn on contour labels + res@cnLineLabelsOn = True ; draw contour labels + res@cnLineLabelFontHeightF = 0.006 + res@cnMissingValFillColor = "grey44" + res@cnFillPalette = colors_new + res@cnLevelSelectionMode = "ManualLevels" ; set manual contour levels + res@cnMinLevelValF = -40.0 ; set min contour level + res@cnMaxLevelValF = 40.0 ; set max contour level + res@cnLevelSpacingF = 5.0 ; set contour spacing + res@lbLabelBarOn = True ; color bar + res@lbOrientation = "vertical" ; vertical label bars + res@pmLabelBarOrthogonalPosF = -0.04 ; shift left-right + res@lbBoxMinorExtentF = 0.2 ; slim width + res@tiXAxisString = "Latitude" ; title + res@tiYAxisString = "Depth (m)" + res@gsnCenterString = "" + res@gsnLeftStringFontHeightF = 13. + res@gsnRightStringFontHeightF = 13. + res@vpWidthF = 0.60 ; resize + res@vpHeightF = 0.45 + res@trXMinF = -74. + res@trXMaxF = -50. + res@tmXBTickSpacingF = 2.0 ; tick mark interval + res@tmXBMinorPerMajor = 1 + res@tmXBTickStartF = -74. + res@tmXBTickEndF = -50. + res@tmXBMode = "Manual" ; adjust X axis + res@gsnXAxisIrregular2Linear = True ; adjust Y axis + res@gsnYAxisIrregular2Linear = True + res@trYReverse = True ; reverse Y axis + res@tiMainFontHeightF = 0.022 ; fontsize of the main title + res@txFontHeightF = 0.02 ; fontsize of the subtitles + res@tmXBLabelFontHeightF = 0.01 ; fontsize of tickmark labels + res@tmYLLabelFontHeightF = 0.01 ; fontsize of tickmark labels + res@tiXAxisFontHeightF = 0.015 + res@tiYAxisFontHeightF = 0.017 + res@lbBoxEndCapStyle = "TriangleBothEnds" + res@gsnContourZeroLineThicknessF = 2 ; doubles thickness of zero contour + res@gsnContourNegLineDashPattern = 1 ; dash pattern 1 for neg. contours + + do iii = 0, nDatasets - 1 + + dataset_all_time = read_data(info_items[iii]) + dataset = dim_avg_n_Wrap(dataset_all_time, 0) + delete(dataset_all_time) + dataset@var = var0 + dataset@diag_script = DIAG_SCRIPT + dataset@model_name = annots(iii) + + strUnits = " (" + dataset@units + ")" ; to show units in title + if (isatt(diag_script_info, "new_units")) then + strUnits = " (" + diag_script_info@new_units + ") " + end if + + dataset_compare = uo_dataset_name(iii) + "$" + fx_ind = str_match_ind_regex(voldatasets, dataset_compare) + if (ismissing(fx_ind)) then + fx_var = new(1, float) + fx_var = fx_var@_FillValue + else + fx_var = read_data(volinfo[fx_ind]) + end if + + if (iscoord(dataset, "i")) then ; changes i-j coordinates to lat-lon + + delete(dataset&i) + delete(dataset&j) + infile_path = inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + area_lon = infile_iii->lon + area_lat = infile_iii->lat + dataset!1 = "lat" + dataset!2 = "lon" + dataset&lat = tofloat(area_lat(:, 10)) + dataset&lon = tofloat(area_lon(10, :)) + delete(area_lat) + delete(area_lon) + warn_ij = True + + end if + + if(.not.(iscoord(dataset, "lat"))) then + infile_path = inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + area_lon = infile_iii->lon + area_lat = infile_iii->lat + var_lat = area_lat(:, 10) + var_lon = area_lon(10, :) + + delete(area_lat) + delete(area_lon) + + if(max(var_lon) .gt. 291) then ; finds the closest lon grid point to 69W + a = closest_val(291.0, var_lon) + else + a = closest_val(-69.0, var_lon) + end if + + exact_lon = var_lon(a) + var_final = dataset(:, :, a) ; taking the lon value closest to 69W + b1 = closest_val(-76, var_lat) + b2 = closest_val(-48, var_lat) + + if (.not. all((ismissing(fx_var)))) then + + drlon = tofloat(abs(var_lon(a) - var_lon(a + 1))) * 6.37e06 * 0.0174533 + ; drlon = horizontal distance between 2 data poins along the same lon + volcello_3d = fx_var + volcello_3d = volcello_3d/drlon + if(abs(volcello_3d&lev(0)) .gt. abs(volcello_3d&lev(1))) then + volcello_2d = volcello_3d(::-1, :, a) + ; if volcello has decreasing lev + else + volcello_2d = volcello_3d(:, :, a) + end if + if (any(dimsizes(volcello_2d) .ne. dimsizes(var_final))) then + ; if dimensions of volcello are not same as uo + volcello_2d_new = linint1_n_Wrap(volcello_2d&rlat, volcello_2d, \ + False, var_final&rlat, 0, 1) + delete(volcello_2d) + volcello_2d = volcello_2d_new + delete(volcello_2d_new) + end if + + transportpercell = var_final * volcello_2d / (10 ^ 6) + ; to convert from m^3/s to sverdrups + copy_VarCoords(var_final, transportpercell) + transportperlat = dim_sum_n_Wrap(transportpercell, 0) + ; sum of transport on each lat + transportperlat = transportperlat / tofloat(cos(var_lat * 0.0174533)) + ; to compensate for changing horizontal distance with lat + totaltransport = sum(transportperlat(b1:b2)) + delete(drlon) + delete(volcello_2d) + delete(transportpercell) + delete(transportperlat) + delete(volcello_3d) + + else + totaltransport = "missing volume file" + error_msg("w", "russell18jgr-fig4.ncl", " ", " volcello file for " \ + + annots(iii) + " not found in the recipe. If volcello " \ + + "file available, please copy the dataset name from " \ + + "additional dataset section of uo to additional " \ + + "dataset section of volcello. This is a warning, not " \ + + "an error, it just skips the transport calcultions") + end if + delete(var_lat) + delete(var_lon) + + else + ; finds the closest lon grid point to 69 W + if(max(dataset&lon) .gt. 291) then + a = closest_val(291.0, dataset&lon) + else + a = closest_val(-69.0, dataset&lon) + end if + var_final = dataset(:, :, a) + b1 = closest_val(-76, var_final&lat) + b2 = closest_val(-48, var_final&lat) + exact_lon = dataset&lon(a) ; taking the lon value closest to 69W + + if (.not. all(ismissing(fx_var))) then + dlon = tofloat(abs(dataset&lon(a) - dataset&lon(a+1))) * 6.37e06 \ + * 0.0174533 + ; dlon = horizontal distance between 2 data poins along the same lon + volcello_3d = fx_var + volcello_3d = volcello_3d/dlon + + if(abs(volcello_3d&lev(0)) .gt. abs(volcello_3d&lev(1))) then + volcello_2d = volcello_3d(::-1, :, a) + ; if volcello has decreasing lev + else + volcello_2d = volcello_3d(:, :, a) + end if + if (any(dimsizes(volcello_2d) .ne. dimsizes(var_final))) then + ; if dimensions of volcello are not same as uo + volcello_2d_new = linint1_n_Wrap(volcello_2d&lat, volcello_2d, \ + False, var_final&lat, 0, 1) + delete(volcello_2d) + volcello_2d = volcello_2d_new + delete(volcello_2d_new) + end if + transportpercell = var_final * volcello_2d / (10 ^ 6) + ; to convert from m^3/s to sverdrups + transportperlat = dim_sum_n_Wrap(transportpercell, 0) + transportperlat = transportperlat \ + / tofloat(cos(dataset&lat * 0.0174533)) + ; to compensate for changing horizontal distance with lat + totaltransport = sum(transportperlat(b1:b2)) + + delete(dlon) + delete(volcello_2d) + delete(transportpercell) + delete(transportperlat) + delete(volcello_3d) + else + totaltransport = "missing volume file" + error_msg("w", "russell18jgr-fig4.ncl", " ", " volcello file for " \ + + annots(iii) + " not found in the recipe. If volcello " \ + + "file available, please copy the dataset name from " \ + + "additional dataset section of uo to additional " \ + + "dataset section of volcello. This is a warning, not " \ + + "an error, it just skips the transport calcultions") + end if + end if + delete(fx_var) + + if (exact_lon .gt. 200) then ; editing the lon value for printing as W + exact_lon = 360.0 - exact_lon + else + exact_lon = exact_lon * -1 + end if + + if(typeof(totaltransport) .eq. "float") then + res@gsnLeftString = "Net transport : " \ + + sprintf("%4.1f", totaltransport) + "Sv" + else + res@gsnLeftString = " no volcello file " + end if + + if (isatt(diag_script_info, "unitCorrectionalFactor")) then + ; to convert the units in datasets + var_final = var_final * diag_script_info@unitCorrectionalFactor + end if + + res@gsnRightString = "Drake passage (" + sprintf("%4.2f", exact_lon) \ + + " W)" + res@tiMainString = "Section velocity of " + annots(iii) + strUnits + + plots(iii) = gsn_csm_contour(wks, var_final(:, b1:b2), res) + + nc_filename = config_user_info@work_dir + "russell18jgr_fig4_" + var0 \ + + "_" + annots(iii) + "_" + (start_years_data(iii)) + "-" \ + + (end_years_data(iii)) + ".nc" + + ncdf_outfile = ncdf_write(dataset, nc_filename) + delete(dataset) + delete(var_final) + delete(exact_lon) + delete(totaltransport) + + end do + + ; Draw the panel + pres = True + pres@gsnPanelLabelBar = False + outfile = panelling(wks, plots, nvert, nhori, pres) + if (warn_ij) then + error_msg("w", "russell18jgr-fig4.ncl", " ", " All models having " \ + + "i-j coordinates have been assigned lat-lon coordinates. " \ + + "This is not an error, no response is needed by user for " \ + + "plotting. ") + end if + + ; Call provenance logger + log_provenance(ncdf_outfile, \ + plotpath + "." + file_type, \ + "Russell et al 2018 figure 4", \ + "mean", \ + "sh", \ + "geo", \ + "russell_joellen", \ + "russell18jgr", \ + inputfile_paths) +end diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig5.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig5.ncl new file mode 100644 index 0000000000..62a5a6be1a --- /dev/null +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig5.ncl @@ -0,0 +1,189 @@ +; ############################################################################# +; russell18jgr_fig5.ncl +; +; Russell, J.L.,et al., 2018, J. Geophysical Research – Oceans, 123, +; 3120-3143. https://doi.org/10.1002/2017JC013461 (figure 5) +; +; Author: Russell Joellen (University of Arizona, USA) +; Goodman Paul (University of Arizona, USA) +; Pandde Amarjiit (University of Arizona, USA) +; ESMVal project +; ############################################################################# +; Description +; - Uses original grid (no regridding). +; - Creates monthly climatology of sic data +; - replaces grid cells with more than 15% sic +; with 1 for September & 2 for March +; - Plots March & September extent 15% or higher of sic +; as red and blue respectively +; - Panels multiple plots: starts a new page after max_vert*max_hori +; +; Required Preprocessor attributes ( no_preprocessor ) +; - None (no preprocessing required) +; in the recipe do not keep a preprocessor in variable section +; +; Required diag_script_info attributes (configured for russell figure 5) +; (Do not change) +; - max_lat : plot ranges for SH +; +; Optional diag_script_info attributes (diagnostic specific) +; - max_vert, max_hori: allowed dimensions of a panel page +; (number of plots per column/row) +; +; Caveats +; - BNU-ESM appears to have a rotated grid +; +; Modification history +; 20190510- russell_joellen, pandde_amarjiit - written and +; implemented for ESMValTool v2 +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" ; load metadata +load "$diag_scripts/shared/plot/style.ncl" ; load plot style functions +load "$diag_scripts/shared/plot/contour_maps.ncl" ; load plot function +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + var0 = variable_info[0]@short_name + var_atts = variable_info[0] + info_items = select_metadata_by_name(input_file_info, var0) + inputfile_paths = metadata_att_as_array(info_items, "filename") + start_years_data = metadata_att_as_array(info_items, "start_year") + end_years_data = metadata_att_as_array(info_items, "end_year") + nDatasets = ListCount(info_items) +end + +begin + + plotpath = config_user_info@plot_dir + "russell18jgr-fig5_" + var0 + "_" \ + + sprinti("%0.4i", min(toint(start_years_data))) + "-" \ + + sprinti("%0.4i", max(toint(end_years_data))) + + system("mkdir -p " + config_user_info@work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + wks = gsn_open_wks(output_type(), plotpath) + wks@fullname = plotpath + plots = new(nDatasets, graphic) + + ; Set annotations + annots = project_style(input_file_info, diag_script_info, "annots") + ; panel parameters + nvert = 1 ; default + nhori = 1 ; default + if (isatt(diag_script_info, "max_vert")) then + nvert = toint(diag_script_info@max_vert) + ; Maximum allowed number of plots per page (vertical) + end if + + if (isatt(diag_script_info, "max_hori")) then + nhori = toint(diag_script_info@max_hori) + ; Maximum allowed number of plots per page (horizontal) + end if + + cnLevels = (/ 1.01, 2.001/) + colorshere = (/ "blue4", "red"/) + + res = True + res@lbLabelBarOn = False ; individual bars + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = cnLevels + res@cnFillPalette = colorshere + res@mpGridLatSpacingF = 10 + res@mpFillDrawOrder = "PostDraw" ; covers antartica with grey + res@mpLandFillColor = (/128, 128, 128/) / 256.0 + res@cnLinesOn = False + res@cnLineLabelsOn = False + res@cnFillOn = True + res@gsnCenterString = "(Blue - September & Red - March)" + res@gsnDraw = False ; Draw and Frame is + res@gsnFrame = False ; called in panelling + res@gsnLeftString = "Southern Ocean Max Min Sea ice extent" + res@gsnPolar = "SH" + res@mpMaxLatF = diag_script_info@max_lat ; Default is 0 + res@gsnAddCyclic = True + res@gsnRightStringOrthogonalPosF = 0.14 + res@gsnLeftStringOrthogonalPosF = 0.14 + res@gsnRightStringFontHeightF = 13. + res@gsnLeftStringFontHeightF = 15. + + do iii = 0, nDatasets - 1 + + dataset = read_data(info_items[iii]) + sic12m = clmMonTLL(dataset) ; create monthly climatology + delete(dataset) + if (max(sic12m) .lt. 5.0) then + sic12m = sic12m * 100.0 + ; if variable is not in percentage + end if + + sic_march = sic12m(2, :, :) ; average data for march month + sic_september = sic12m(8, :, :) ; average data for september month + sic_september = where((sic_september .gt. 15), 1, sic12m@_FillValue) + ; masking all the grid points which have sic less than 15% + dataset = where((sic_march .gt. 15), 2, sic_september) + delete(sic_september) + copy_VarCoords(sic_march, dataset) + delete(sic_march) + infile_path = inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + + if(.not.(iscoord(sic12m, "lat"))) then + + area_lon = infile_iii->lon + area_lat = infile_iii->lat + dataset@lat2d = area_lat + dataset@lon2d = area_lon + delete(area_lat) + delete(area_lon) + else + ; if lat lon arrays lost in where command + var_lon = infile_iii->lon + var_lat = infile_iii->lat + dataset!0 = "lat" + dataset!1 = "lon" + dataset&lat = var_lat + dataset&lon = var_lon + delete(var_lon) + delete(var_lat) + end if + + delete(sic12m) + range = start_years_data(iii) + " - " + end_years_data(iii) + res@tiMainString = annots(iii) + res@gsnRightString = "annual mean " + range + dataset@var = var0 + dataset@diag_script = "russell18jgr-fig5" + + plots(iii) = gsn_csm_contour_map_polar(wks, dataset, res) + + nc_filename = config_user_info@work_dir + "russell18jgr_fig5_" + var0 \ + + "_" + annots(iii) + "_" + (start_years_data(iii)) + "-" \ + + (end_years_data(iii)) + ".nc" + + ncdf_outfile = ncdf_write(dataset, nc_filename) + delete(dataset) + + end do + + ; Draw the panel + pres = True + pres@gsnPanelLabelBar = False + outfile = panelling(wks, plots, nvert, nhori, pres) + + log_provenance(ncdf_outfile, \ + plotpath + "." + output_type(), \ + "Russell et al 2018 figure 5 -polar", \ + "mean", \ + "sh", \ + "geo", \ + "russell_joellen", \ + "russell18jgr", \ + infile_path) + + +end diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig5g.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig5g.ncl new file mode 100644 index 0000000000..0b48c1e7ed --- /dev/null +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig5g.ncl @@ -0,0 +1,266 @@ +; ############################################################################# +; russell18jgr-fig5g.ncl +; +; Russell, J.L.,et al., 2018, J. Geophysical Research – Oceans, 123, +; 3120-3143. https://doi.org/10.1002/2017JC013461 (figure 5g) +; +; Author: Pandde Amarjiit (University of Arizona, USA) +; Russell Joellen (University of Arizona, USA) +; Goodman Paul (University of Arizona, USA) +; +; ############################################################################# +; Description +; - Uses original grid (no regridding). +; - Calculates the monthly climatology values of sic +; - if var coordinates are lat-lon then the script calculates the areacello +; otherwise, the script reads the areacello variable from input file. +; - Multiplies areacello and sic/100 to find the total area of each +; cell covered by ice +; - Adds all cells below equator to get total sea ice area for each month +; - Plots total area in 10^12 m^2 vs months +; - overlays all the plots on the first one +; - Draws the legend +; +; Areacello is calculated for lat-lon model as many lat-lon models +; have atmo grid for sic +; +; Required diag_script_info attributes (diagnostics specific) +; (Do not change) +; +; styleset : CMIP5 - Default +; +; Optional diag_script_info attributes (diagnostic specific) +; +; +; Caveats +; +; - no caveats known on May 10, 2019 +; +; Modification history +; +; 20190510 - russell_joellen, pandde_amarjiit - written and +; implemented for ESMValTool v2. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" ; load metadata +load "$diag_scripts/shared/plot/style.ncl" ; load plot style functions +load "$diag_scripts/shared/plot/contour_maps.ncl" ; load plot function +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + att = True + att@mip = "OImon" + info = select_metadata_by_atts(input_file_info, att) ; variable + var0 = info[0]@short_name + + info_items = select_metadata_by_name(input_file_info, var0) + sic_datasets = metadata_att_as_array(info_items, "dataset") + inputfile_paths = metadata_att_as_array(info_items, "filename") + start_years_data = metadata_att_as_array(info_items, "start_year") + end_years_data = metadata_att_as_array(info_items, "end_year") + nDatasets = ListCount(info_items) + + att@mip = "fx" + volinfo = select_metadata_by_atts(input_file_info, att) ; area + voldatasets = metadata_att_as_array(volinfo, "dataset") + volfile_paths = metadata_att_as_array(volinfo, "filename") + delete(att) + if (dimsizes(sic_datasets) .ne. dimsizes(voldatasets)) then + error_msg("f", DIAG_SCRIPT, " ", "areacello files for " + \ + "russell18jgr-fig5g.ncl do not match with sic datasets. " + \ + "Please check to make sure all the sic files have a " + \ + "areacello file in the recipe.") + end if +end + +begin + + ; Set annotations + annots = project_style(input_file_info, diag_script_info, "annots") + colors = project_style(input_file_info, diag_script_info, "colors") + dashes = project_style(input_file_info, diag_script_info, "dashes") + thicks = project_style(input_file_info, diag_script_info, "thicks") + + plotpath = config_user_info@plot_dir + "russell18jgr-fig5g_" + var0 + "_" \ + + sprinti("%0.4i", min(toint(start_years_data))) + "-" \ + + sprinti("%0.4i", max(toint(end_years_data))) + + system("mkdir -p " + config_user_info@work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + wks = gsn_open_wks(output_type(), plotpath) + wks@fullname = plotpath + plots = new(nDatasets, graphic) + + monthss = ispan(0, 11, 1) + areacello_lat = False ; counter for warning + + res = True + res@tmXBLabelFontHeightF = 0.008 + res@tmYLLabelFontHeightF = 0.008 + res@gsnFrame = False + res@gsnDraw = False + res@trYMaxF = 24 + res@trYMinF = 0. + res@tmXBMode = "Explicit" + res@tmXBValues = (/0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11/) + res@vpHeightF = 0.65 + res@vpWidthF = 0.65 + res@gsnYRefLine = 0 + res@gsnYRefLineDashPattern = 2 + res@gsnYRefLineColor = "grey" + res@xyMarkLineMode = "MarkLines" + res@xyMarkers = 4 + res@vpYF = 0.9 + res@vpXF = 0.08 + res@gsnRightStringFontHeightF = 15. + res@tiXAxisString = "months" + res@gsnLeftString = "Russell et al -2018 - Figure 5 g" + res@gsnLeftStringFontHeightF = 17. + res@tiXAxisFontHeightF = 0.0175 + res@tiYAxisFontHeightF = 0.0175 + res@tiYAxisOffsetXF = 0.01 + res@tiYAxisString = "Area under sea ice ( 10~S~12 ~N~ m~S~2~N~ )" + res@tmXBLabels = (/"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", \ + "Aug", "Sep", "Oct", "Nov", "Dec"/) + + do iii = 0, nDatasets - 1 + dataset = read_data(info_items[iii]) ; reading data + sic12m = clmMonTLL(dataset) ; create monthly climatology + delete(dataset) + if (max(sic12m) .lt. 5.0) then + sic12m = sic12m * 100.0 + ; if variable is not in percentage + end if + dim_sic_12m = dimsizes(sic12m) + a1 = round((dim_sic_12m(1) /2), 3) + ; finding the index of grid point half of dimension sizes + delete(dim_sic_12m) + infile_path = inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + + if (iscoord(sic12m, "lat")) then + areacello_lat = True + var_lon = infile_iii->lon + var_lat = infile_iii->lat + radius_earth = 6.37e06 + deg2rad_convF = 0.0174533 + dlat = abs(var_lat(20) - var_lat(19)) + ; some models have closer lat points near poles. + dlon = abs(var_lon(20) - var_lon(19)) + dist_x_deg_earth = radius_earth * deg2rad_convF * dlat + clat = cos(var_lat*deg2rad_convF) + + dx = dlon * radius_earth * deg2rad_convF * clat + ; dx = radius of earth * cos(lat of this data point) * (lon1 - lon2) + dy = dlat * radius_earth * deg2rad_convF + ; dy = radius of earth *(lat1 - lat2 (in radians)) + dxdy = tofloat(dx*dy) + ; area of cell = dx * dy + areacello_2d = new(dimsizes(sic12m(0, :, :)), float) + areacello_2d = conform(areacello_2d, dxdy, 0) + delete(var_lon) + delete(dx) + delete(dy) + delete(dxdy) + delete(clat) + delete(dlon) + delete(dlat) + delete(var_lat) + + else + dataset_compare = sic_datasets(iii) + "$" + fx_ind = str_match_ind_regex(voldatasets, dataset_compare) + fx_var = read_data(volinfo[fx_ind]) + if (all(ismissing(fx_var))) then + error_msg("f", DIAG_SCRIPT, " ", "areacello file for " + \ + annots(iii) + " not found, please add dataset " \ + + "name in the additional dataset section of areacello" \ + + " or remove the dataset from sic section.") + end if + + areacello_2d = fx_var + delete(fx_var) + end if + + areacello_3d = new(dimsizes(sic12m), float) + ; making areacello same dimensions as var12m + areacello_3d = conform(areacello_3d, areacello_2d, (/1, 2/)) + delete(infile_iii) + delete(areacello_2d) + copy_VarCoords(sic12m, areacello_3d) + sic12m = sic12m * areacello_3d + delete(areacello_3d) + sic_12m_lon_avg = dim_sum_Wrap(sic12m(:, 0:a1, :)) + ; take total sum of southern hemisphere + delete(sic12m) + var_final = dim_sum_Wrap(sic_12m_lon_avg) ; unit conversion + delete(sic_12m_lon_avg) + var_final = var_final / (10.0 ^ 14) + ; 10^14 = 10^12 for unit conversion & + ; 100 for converting sic from percentage to decimal + + res@xyDashPatterns = dashes(iii) + res@xyLineColors = colors(iii) + res@xyMarkerColors = colors(iii) + res@xyExplicitLegendLabels = annots(iii) + res@xyLineThicknessF = thicks(iii) + var_final@var = var0 + var_final@diag_script = DIAG_SCRIPT + + plots(iii) = gsn_csm_xy(wks, monthss, var_final, res) + + nc_filename = config_user_info@work_dir + "russell18jgr-fig5g_" + var0 \ + + "_" + annots(iii) + "_" + (start_years_data(iii)) + "-" \ + + (end_years_data(iii)) + ".nc" + + ncdf_outfile = ncdf_write(var_final, nc_filename) + delete(var_final) + + if (iii .ne. 0) then + overlay(plots(0), plots(iii)) + end if + + end do + + draw(plots(0)) + + legend = create "Legend" legendClass wks + "vpXF" : 0.625 ; orientation on page + "vpYF" : 0.925 + "vpWidthF" : 0.5 ; width + "vpHeightF" : 0.725 ; height + "lgPerimOn" : False ; no perimeter + "lgItemCount" : dimsizes(annots) ; how many + "lgLineLabelStrings" : annots ; labels + "lgLabelsOn" : False ; no default lables + "lgLineLabelFontHeightF" : 0.0085 ; font height + "lgDashIndexes" : dashes ; line paterns + "lgLineColors" : colors + "lgMonoLineLabelFontColor" : True ; one label color + end create + draw(legend) + + if (areacello_lat) then ; warning for lat lon areacello manual colection + error_msg("w", DIAG_SCRIPT, " ", "All the models having lat-lon " + \ + "coordinates had a manual calculation of areacello this is " + \ + "not an error, no responce is needed by user for plotting ") + end if + + frame(wks) + + ; Call provenance logger + log_provenance(ncdf_outfile, \ + plotpath + "." + output_type(), \ + "Russell et al 2018 figure 5g", \ + "mean", \ + "sh", \ + "times", \ + "russell_joellen", \ + "russell18jgr", \ + infile_path) + +end diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6a.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6a.ncl new file mode 100644 index 0000000000..0f1b49c224 --- /dev/null +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6a.ncl @@ -0,0 +1,711 @@ +; ############################################################################# +; +; russell18jgr_fig6a.ncl +; +; Based on Figure 6a - Russell, J.L.,et al., 2018, J. Geophysical Research – +; Oceans, 123, 3120-3143. https://doi.org/10.1002/2017JC013461 (figure 6a) +; +; Author: Pandde Amarjiit (University of Arizona, USA) +; Russell Joellen (University of Arizona, USA) +; Goodman Paul (University of Arizona, USA) +; +; ############################################################################# +; Description +; +; - Calculates the time average of thetao, so and vo +; - Regrids temperature and salinity onto the vo grid +; - Calculate the potential density of all cells relative to +; 0, 2000, 4000 decibars (or 1977m and 3948m) +; - extracts the volcello of the lat closest to 30S +; - divide volcello by latitudinal distance between two data points on +; same lon, this gives us north-south cross-sectional area of each grid +; - volume transport(m^3/s) in each cell is calculated by vo(m/s) +; * cross sectional area(m^2) +; - make lxx variables that have 1 for specific density layer and +; missing values in rest +; - multiply lxx with volume transport per cell to get volume transport +; of cells in that layer (lvxx) +; - total sum of lvxx gives the net volume transported in that density layer +; - plots the volume transport per layer as a bar chart +; +; Density layers defined as per : (Talley, L.D., 2003. Shallow, intermediate +; and deep overturning components of the global heat budget. +; Journal of Physical Oceanography 33, 530–560) +; +; Required Preprocessor attributes ( no_preprocessor ) +; - None (no preprocessing required) +; in the recipe do not keep a preprocessor in variable section +; +; Required diag_script_info attributes +; +; styleset = "CMIP5" - default +; ncdf = "default" +; +; Optional diag_script_info attributes (diagnostic specific) +; +; Caveats +; +; - MIROC-ESM and BNU-ESM do not work as depth variable is not called lev +; - MRI models does not work as the data has 0 as fillvalue instead of 1e+20 +; - CCSM4 and CESM1-CAM5 dont work as the units for so is 1, +; which is not accepted by ESMValTool +; - Transport is very small in case of NorESM1-M and ME as volcello +; values look incorrect(very small) +; +; +; Modification history +; 20190510 - russell_joellen, pandde_amarjiit - written and +; implemented for ESMValTool v2. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" ; load metadata +load "$diag_scripts/shared/plot/style.ncl" ; load plot style functions +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + vo_items = select_metadata_by_name(input_file_info, "vo") + so_items = select_metadata_by_name(input_file_info, "so") + thetao_items = select_metadata_by_name(input_file_info, "thetao") + volcello_items = select_metadata_by_name(input_file_info, "volcello") + + vo_datasets = metadata_att_as_array(vo_items, "dataset") + start_years_data = metadata_att_as_array(vo_items, "start_year") + end_years_data = metadata_att_as_array(vo_items, "end_year") + vo_inputfile_paths = metadata_att_as_array(vo_items, "filename") + thetao_inputfile_paths = metadata_att_as_array(thetao_items, "filename") + so_inputfile_paths = metadata_att_as_array(so_items, "filename") + nDatasets = ListCount(vo_items) + nVolcello = ListCount(volcello_items) + nVariables = ListCount(variable_info) + dim_models = dimsizes(vo_datasets) + if (nVolcello .ne. nDatasets) then + error_msg("f", "russell18jgr-fig6a.ncl", " ", "volcello files " + \ + "for russell18jgr-fig6a.ncl do not match with vo datasets." + \ + " Please do not add additional variables in variable groups ") + end if +end + +begin + + plotpath = config_user_info@plot_dir + "Russell_figure-6a_" \ + + sprinti("%0.4i", min(toint(start_years_data))) + "-" \ + + sprinti("%0.4i", max(toint(end_years_data))) + + system("mkdir -p " + config_user_info@work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + wks = gsn_open_wks(output_type(), plotpath) + plots = new(nDatasets, graphic) + plot_1 = new(nDatasets, graphic) + plot_talley = new(nDatasets, graphic) + + y_val = (/-0.5, 0.5, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5, 10.5/) + y1_val = fspan(0.125, 10.875, 44) + yaxis_labels = (/-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11/) + talley_SO_Zonal_all_Levels = (/0.0, -7.34, -1.9, 9.71, 2.37, -5.86, -10.02, \ + -11.11, -2.84, 9.96, 16.49, 0.51 /) + + res = True ; resources for plotting + res@gsnXYBarChart = True + res@gsnDraw = False + res@gsnFrame = False + res@vpXF = 0.1 + res@vpYF = 0.75 + res@vpHeightF = 0.5 + res@vpWidthF = 0.4 + res@gsnMaximize = True + res@trYReverse = True + res@tmXBTickStartF = -20. + res@tmXBTickSpacingF = 4. + res@tmXBTickEndF = 20. + res@tmXBMode = "Manual" + res@tmXBMinorPerMajor = 1 + res@trXMinF = -20. + res@trXMaxF = 20. + res@trYMinF = -1. + res@trYMaxF = 11. + res@gsnXRefLine = 0.0 + res@gsnXYBarChartColors2 = ("red") + res@xyLineColors = (/"black", "black"/) + res@tmYLMode = "Explicit" + res@tmYLLabelsOn = True + res@tmYLValues = yaxis_labels + res@tmYLLabelFont = 5 + res@tmYLLabelFontHeightF = 0.008 + res@tmYLLabels = (/"~F0~Net~F~", "~F0~Surface~F~", "26.10s~B~0", \ + "26.40s~B~0", "26.90s~B~0", "27.10s~B~0", "27.40s~B~0", \ + "36.8s~B~2", "45.8s~B~4", "45.86s~B~4", "45.92s~B~4", \ + "46.0s~B~4", "~F0~Bottom~F~" /) + res@gsnRightStringFontHeightF = 0.009 + res@gsnLeftStringFontHeightF = 0.009 + + do iii = 0, dim_models - 1 + + fx_var = read_data(volcello_items[iii]) + + if (all(ismissing(fx_var))) then + ; to give fatal error if volcello is missing + fx_variable = "volcello" + error_msg("f", "russell18jgr-fig6.ncl", " ", "volcello file for " \ + + vo_datasets(iii) \ + + " not found in the metadata file, please specify " \ + + "'volcello' as supplementary variable in the recipe.") + end if + + dataset_so_time = read_data(so_items[iii]) + dataset_so = dim_avg_n_Wrap(dataset_so_time, 0) + delete(dataset_so_time) + dataset_thetao_time = read_data(thetao_items[iii]) + dataset_thetao = dim_avg_n_Wrap(dataset_thetao_time, 0) + delete(dataset_thetao_time) + + if (max(dataset_thetao) .gt. 250) then ; making sure temperature is in K + dataset_thetao = dataset_thetao - 273.15 + end if + + rho0 = new(2, double) + assignFillValue(dataset_thetao, rho0) + + vo_file = addfile(vo_inputfile_paths(iii), "r") + thetao_file = addfile(thetao_inputfile_paths(iii), "r") + var_test_lat = vo_file->lat ; extracting lat array of vo + var_test_lon = vo_file->lon ; extracting lon array of vo + + if((iscoord(dataset_so, "lat"))) then + a = closest_val(-30.0, var_test_lat) + ; getting index for lat closest to 30S + exact_lat = var_test_lat(a) ; lat value closest to -30 + ; interpolate thetao and so onto vo grid + theta_inter = linint1_n_Wrap(dataset_thetao&lat, dataset_thetao, \ + False, exact_lat, 0, 1) + theta_new = linint1_n_Wrap(dataset_thetao&lon, theta_inter, \ + True, var_test_lon, 0, 2) + delete(theta_inter) + delete(dataset_thetao) + so_inter = linint1_n_Wrap(dataset_so&lat, dataset_so, False, \ + exact_lat, 0, 1) + so_new = linint1_n_Wrap(dataset_so&lon, so_inter, True, \ + var_test_lon, 0, 2) + delete(dataset_so) + delete(so_inter) + ; potential density calculation + rho0_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 0.0) + rho2_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 1977.0) + rho4_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 3948.0) + + delete(theta_new) + delete(so_new) + + elseif (iscoord(dataset_so, "rlat")) then + + theta_lat = thetao_file->lat ; extracting lat array of thetao + theta_lon = thetao_file->lon ; extracting lon arrays of thetao + lat1 = closest_val(-15.0, var_test_lat(:, 0)) + ; index value of vo-lat closest to -10 + lat2 = closest_val(-45.0, var_test_lat(:, 0)) + ; index value of vo-lat closest to -50 + a = closest_val(-30.0, var_test_lat(:, 0)) + ; index value of vo-lat closest to -30 + exact_lat = var_test_lat(a, 0) ; lat value closest to -30 + ; interpolate thetao and so onto vo grid + theta_inter = linint1_n_Wrap(theta_lat(lat2:lat1, 1), \ + dataset_thetao(:, lat2:lat1, :), False, \ + exact_lat, 0, 1) + delete(dataset_thetao) + + so_inter = linint1_n_Wrap(theta_lat(lat2:lat1, 1), \ + dataset_so(:, lat2:lat1, :), False, \ + exact_lat, 0, 1) + delete(dataset_so) + + ; checking for monotonic nature of lon arrays, as NCL + ; needs monotonically increasing lon for interpolations. + if ((isMonotonic(theta_lon(a, :)) .eq. 1) .and. \ + (isMonotonic(var_test_lon(a, :)) .eq. 1)) then + theta_new = linint1_n_Wrap(theta_lon(a, :), theta_inter, \ + True, var_test_lon(a, :), 0, 2) + else + theta_new = theta_inter + end if + delete(theta_inter) + + if ((isMonotonic(theta_lon(a, :)) .eq. 1) .and. \ + (isMonotonic(var_test_lon(a, :)) .eq. 1)) then + so_new = linint1_n_Wrap(theta_lon(a, :), so_inter, True, \ + var_test_lon(a, :), 0, 2) + else + so_new = so_inter + end if + delete(theta_lat) + delete(theta_lon) + delete(so_inter) + + ; potential density calculation + rho0_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 0.) + rho2_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 1977.0) + rho4_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 3948.0) + delete(theta_new) + delete(so_new) + else + theta_lat = thetao_file->lat ; extracting lat array of thetao + theta_lon = thetao_file->lon ; extracting lon arrays of thetao + + lat1 = closest_val(-15.0, var_test_lat(:, 0)) + ; index value of vo-lat closest to -10 + lat2 = closest_val(-45.0, var_test_lat(:, 0)) + ; index value of vo-lat closest to -50 + a = closest_val(-30.0, var_test_lat(:, 0)) + ; index value of vo-lat closest to -30 + exact_lat = var_test_lat(a, 0) ; lat value closest to -30 + ; interpolate thetao and so onto vo grid + + theta_inter = linint1_n_Wrap(theta_lat(lat2:lat1, 1), \ + dataset_thetao(:, lat2:lat1, :), False, \ + exact_lat, 0, 1) + delete(dataset_thetao) + + so_inter = linint1_n_Wrap(theta_lat(lat2:lat1, 1), \ + dataset_so(:, lat2:lat1, :), False, \ + exact_lat, 0, 1) + delete(dataset_so) + + ; checking for monotonic nature of lon arrays, as NCL needs + ; monotonically increasing lon for interpolations. + if ((isMonotonic(theta_lon(a, :)) .eq. 1) .and. \ + (isMonotonic(var_test_lon(a, :)) .eq. 1)) then + theta_new = linint1_n_Wrap(theta_lon(a, :), theta_inter, True, \ + var_test_lon(a, :), 0, 2) + else + theta_new = theta_inter + end if + + if ((isMonotonic(theta_lon(a, :)) .eq. 1) .and. \ + (isMonotonic(var_test_lon(a, :)) .eq. 1)) then + so_new = linint1_n_Wrap(theta_lon(a, :), so_inter, True, \ + var_test_lon(a, :), 0, 2) + else + so_new = so_inter + end if + + delete(theta_inter) + delete(so_inter) + delete(theta_lat) + delete(theta_lon) + + ; potential density calculation + rho0_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 0.) + rho2_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 1977.0) + rho4_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 3948.0) + delete(theta_new) + delete(so_new) + + end if + + dataset_vo_time = read_data(vo_items[iii]) + dataset_vo = dim_avg_n_Wrap(dataset_vo_time, 0) + delete(dataset_vo_time) + delete(var_test_lon) + volcello = fx_var + delete(fx_var) + + ; transport calculation + if(iscoord(dataset_vo, "lat")) then + + var_tmp2 = dataset_vo(:, a, :) ; y velocity of lat closest to -30 + + dlat = tofloat(abs(dataset_vo&lat(a) - dataset_vo&lat(a + 1)) \ + * 6.37e06 * 0.0174533) + ; dlat is the north-south distance between 2 consecutive data latitudes + volumecello_3d = volcello/dlat + ; north-south crossectional area is volcello / dlat + if(abs(volcello&lev(0)) .gt. abs(volcello&lev(1))) then + ; extracting volume of cells closest to 30S + volcello_2d = volumecello_3d(::-1, a, :) + ; reversing lev if it is in descending order. + else + volcello_2d = volumecello_3d(:, a, :) + end if + transportpercell = var_tmp2 * volcello_2d / (10 ^ 6) + ; unit conversion from m^3/s to sverdrups + copy_VarCoords(var_tmp2, transportpercell) + + elseif (iscoord(dataset_vo, "rlat")) then + + var_tmp2 = dataset_vo(:, a, :) ; y transport of lat closest to -30 + drlat = tofloat(abs(var_test_lat(a, 0) - var_test_lat(a+1, 0)) \ + * 6.37e06 * 0.0174533) + ; drlat is north-south distance between 2 consecutive data latitudes + volumecello_3d = volcello/drlat + ; north-south crossectional area is volcello / drlat + if(abs(volcello&lev(0)) .gt. abs(volcello&lev(1))) then + ; extracting volume of cells closest to 30S + volcello_2d = volumecello_3d(::-1, a, :) + ; reversing lev if it is in descending order. + else + volcello_2d = volumecello_3d(:, a, :) + end if + transportpercell = var_tmp2 * volcello_2d / (10 ^ 6) + ; unit conversion from m^3/s to sverdrups + copy_VarCoords(var_tmp2, transportpercell) + + else + + var_tmp2 = dataset_vo(:, a, :) ; y transport of lat closest to 30S + dlat = tofloat(abs(var_test_lat(a, 0) - var_test_lat(a + 1, 0)) \ + * 6.37e06 * 0.0174533) + ; dlat is the north-south distance between 2 consecutive data latitudes + volumecello_3d = volcello / dlat + ; north-south crossectional area is volcello / dlat + + if(abs(volcello&lev(1)) .gt. abs(volcello&lev(2))) then + ; extracting volume of cells closest to 30S + volcello_2d = volumecello_3d(::-1, a, :) + ; reversing lev if it is in descending order. + else + volcello_2d = volumecello_3d(:, a, :) + end if + transportpercell = var_tmp2 * volcello_2d / (10 ^ 6) + ; unit conversion from m^3/s to sverdrups + copy_VarCoords(var_tmp2, transportpercell) + + end if + delete(volcello) + delete(var_test_lat) + delete(dataset_vo) + delete(volcello_2d) + delete(volumecello_3d) + delete(var_tmp2) + + rho0_30s = 1000. * (rho0_30s - 1.0) + rho2_30s = 1000. * (rho2_30s - 1.0) + rho4_30s = 1000. * (rho4_30s - 1.0) + + ; making masks for density levels based on : Talley, L.D., 2003. + l10 = where((rho0_30s .lt. 26.10), 1, rho0@_FillValue) + l11 = where((rho0_30s .lt. 24.900), 1, rho0@_FillValue) + l12 = where((rho0_30s .ge. 24.900) .and. (rho0_30s .lt. 25.300), 1, \ + rho0@_FillValue) + l13 = where((rho0_30s .ge. 25.300) .and. (rho0_30s .lt. 25.700), 1, \ + rho0@_FillValue) + l14 = where((rho0_30s .ge. 25.700) .and. (rho0_30s .lt. 26.100), 1, \ + rho0@_FillValue) + + l20 = where((rho0_30s .ge. 26.100) .and. (rho0_30s .lt. 26.400), 1, \ + rho0@_FillValue) + l21 = where((rho0_30s .ge. 26.100) .and. (rho0_30s .lt. 26.175), 1, \ + rho0@_FillValue) + l22 = where((rho0_30s .ge. 26.175) .and. (rho0_30s .lt. 26.250), 1, \ + rho0@_FillValue) + l23 = where((rho0_30s .ge. 26.250) .and. (rho0_30s .lt. 26.325), 1, \ + rho0@_FillValue) + l24 = where((rho0_30s .ge. 26.325) .and. (rho0_30s .lt. 26.400), 1, \ + rho0@_FillValue) + + l30 = where((rho0_30s .ge. 26.400) .and. (rho0_30s .lt. 26.900), 1, \ + rho0@_FillValue) + l31 = where((rho0_30s .ge. 26.400) .and. (rho0_30s .lt. 26.525), 1, \ + rho0@_FillValue) + l32 = where((rho0_30s .ge. 26.525) .and. (rho0_30s .lt. 26.650), 1, \ + rho0@_FillValue) + l33 = where((rho0_30s .ge. 26.650) .and. (rho0_30s .lt. 26.775), 1, \ + rho0@_FillValue) + l34 = where((rho0_30s .ge. 26.775) .and. (rho0_30s .lt. 26.900), 1, \ + rho0@_FillValue) + + l40 = where((rho0_30s .ge. 26.900) .and. (rho0_30s .lt. 27.100), 1, \ + rho0@_FillValue) + l41 = where((rho0_30s .ge. 26.900) .and. (rho0_30s .lt. 26.950), 1, \ + rho0@_FillValue) + l42 = where((rho0_30s .ge. 26.950) .and. (rho0_30s .lt. 27.000), 1, \ + rho0@_FillValue) + l43 = where((rho0_30s .ge. 27.000) .and. (rho0_30s .lt. 27.050), 1, \ + rho0@_FillValue) + l44 = where((rho0_30s .ge. 27.050) .and. (rho0_30s .lt. 27.100), 1, \ + rho0@_FillValue) + + l50 = where((rho0_30s .ge. 27.100) .and. (rho0_30s .lt. 27.400), 1, \ + rho0@_FillValue) + l51 = where((rho0_30s .ge. 27.100) .and. (rho0_30s .lt. 27.175), 1, \ + rho0@_FillValue) + l52 = where((rho0_30s .ge. 27.175) .and. (rho0_30s .lt. 27.250), 1, \ + rho0@_FillValue) + l53 = where((rho0_30s .ge. 27.250) .and. (rho0_30s .lt. 27.325), 1, \ + rho0@_FillValue) + l54 = where((rho0_30s .ge. 27.325) .and. (rho0_30s .lt. 27.400), 1, \ + rho0@_FillValue) + + l60 = where((rho0_30s .ge. 27.400) .and. (rho2_30s .lt. 36.800), 1, \ + rho0@_FillValue) + l61 = where((rho0_30s .ge. 27.400) .and. (rho0_30s .lt. 27.500), 1, \ + rho0@_FillValue) + l62 = where((rho0_30s .ge. 27.500) .and. (rho2_30s .lt. 36.700), 1, \ + rho0@_FillValue) + l63 = where((rho2_30s .ge. 36.700) .and. (rho2_30s .lt. 36.750), 1, \ + rho0@_FillValue) + l64 = where((rho2_30s .ge. 36.750) .and. (rho2_30s .lt. 36.800), 1, \ + rho0@_FillValue) + + l70 = where((rho2_30s .ge. 36.800) .and. (rho4_30s .lt. 45.800), 1, \ + rho0@_FillValue) + l71 = where((rho2_30s .ge. 36.800) .and. (rho2_30s .lt. 36.850), 1, \ + rho0@_FillValue) + l72 = where((rho2_30s .ge. 36.850) .and. (rho2_30s .lt. 36.900), 1, \ + rho0@_FillValue) + l73 = where((rho2_30s .ge. 36.900) .and. (rho2_30s .lt. 36.950), 1, \ + rho0@_FillValue) + l74 = where((rho2_30s .ge. 36.950) .and. (rho4_30s .lt. 45.800), 1, \ + rho0@_FillValue) + + l80 = where((rho4_30s .ge. 45.800) .and. (rho4_30s .lt. 45.860), 1, \ + rho0@_FillValue) + l81 = where((rho4_30s .ge. 45.800) .and. (rho4_30s .lt. 45.815), 1, \ + rho0@_FillValue) + l82 = where((rho4_30s .ge. 45.815) .and. (rho4_30s .lt. 45.830), 1, \ + rho0@_FillValue) + l83 = where((rho4_30s .ge. 45.830) .and. (rho4_30s .lt. 45.845), 1, \ + rho0@_FillValue) + l84 = where((rho4_30s .ge. 45.845) .and. (rho4_30s .lt. 45.860), 1, \ + rho0@_FillValue) + + l90 = where((rho4_30s .ge. 45.860) .and. (rho4_30s .lt. 45.920), 1, \ + rho0@_FillValue) + l91 = where((rho4_30s .ge. 45.860) .and. (rho4_30s .lt. 45.875), 1, \ + rho0@_FillValue) + l92 = where((rho4_30s .ge. 45.875) .and. (rho4_30s .lt. 45.890), 1, \ + rho0@_FillValue) + l93 = where((rho4_30s .ge. 45.890) .and. (rho4_30s .lt. 45.905), 1, \ + rho0@_FillValue) + l94 = where((rho4_30s .ge. 45.905) .and. (rho4_30s .lt. 45.920), 1, \ + rho0@_FillValue) + + l100 = where((rho4_30s .ge. 45.920) .and. (rho4_30s .lt. 46.000), 1, \ + rho0@_FillValue) + l101 = where((rho4_30s .ge. 45.920) .and. (rho4_30s .lt. 45.940), 1, \ + rho0@_FillValue) + l102 = where((rho4_30s .ge. 45.940) .and. (rho4_30s .lt. 45.960), 1, \ + rho0@_FillValue) + l103 = where((rho4_30s .ge. 45.960) .and. (rho4_30s .lt. 45.980), 1, \ + rho0@_FillValue) + l104 = where((rho4_30s .ge. 45.980) .and. (rho4_30s .lt. 46.000), 1, \ + rho0@_FillValue) + + l110 = where((rho4_30s .ge. 46.000), 1, rho0@_FillValue) + l111 = where((rho4_30s .ge. 46.000) .and. (rho4_30s .lt. 46.050), 1, \ + rho0@_FillValue) + l112 = where((rho4_30s .ge. 46.050) .and. (rho4_30s .lt. 46.100), 1, \ + rho0@_FillValue) + l113 = where((rho4_30s .ge. 46.100) .and. (rho4_30s .lt. 46.150), 1, \ + rho0@_FillValue) + l114 = where((rho4_30s .ge. 46.150), 1, rho0@_FillValue) + + delete(rho0_30s) + delete(rho2_30s) + delete(rho4_30s) + + ; assignning filling values from rho0 to masked layers + assignFillValue(rho0, l10) + assignFillValue(rho0, l20) + assignFillValue(rho0, l30) + assignFillValue(rho0, l40) + assignFillValue(rho0, l50) + assignFillValue(rho0, l60) + assignFillValue(rho0, l70) + assignFillValue(rho0, l80) + assignFillValue(rho0, l90) + assignFillValue(rho0, l100) + assignFillValue(rho0, l110) + assignFillValue(rho0, l11) + assignFillValue(rho0, l21) + assignFillValue(rho0, l31) + assignFillValue(rho0, l41) + assignFillValue(rho0, l51) + assignFillValue(rho0, l61) + assignFillValue(rho0, l71) + assignFillValue(rho0, l81) + assignFillValue(rho0, l91) + assignFillValue(rho0, l101) + assignFillValue(rho0, l111) + assignFillValue(rho0, l12) + assignFillValue(rho0, l22) + assignFillValue(rho0, l32) + assignFillValue(rho0, l42) + assignFillValue(rho0, l52) + assignFillValue(rho0, l62) + assignFillValue(rho0, l72) + assignFillValue(rho0, l82) + assignFillValue(rho0, l92) + assignFillValue(rho0, l102) + assignFillValue(rho0, l112) + assignFillValue(rho0, l13) + assignFillValue(rho0, l23) + assignFillValue(rho0, l33) + assignFillValue(rho0, l43) + assignFillValue(rho0, l53) + assignFillValue(rho0, l63) + assignFillValue(rho0, l73) + assignFillValue(rho0, l83) + assignFillValue(rho0, l93) + assignFillValue(rho0, l103) + assignFillValue(rho0, l113) + assignFillValue(rho0, l14) + assignFillValue(rho0, l24) + assignFillValue(rho0, l34) + assignFillValue(rho0, l44) + assignFillValue(rho0, l54) + assignFillValue(rho0, l64) + assignFillValue(rho0, l74) + assignFillValue(rho0, l84) + assignFillValue(rho0, l94) + assignFillValue(rho0, l104) + assignFillValue(rho0, l114) + delete(rho0) + + lv = new((/12/), double) ; lv is array of big blue bars + assignFillValue(l10, lv) + lv(1) = sum(l10 * transportpercell) + lv(2) = sum(l20 * transportpercell) + lv(3) = sum(l30 * transportpercell) + lv(4) = sum(l40 * transportpercell) + lv(5) = sum(l50 * transportpercell) + lv(6) = sum(l60 * transportpercell) + lv(7) = sum(l70 * transportpercell) + lv(8) = sum(l80 * transportpercell) + lv(9) = sum(l90 * transportpercell) + lv(10) = sum(l100 * transportpercell) + lv(11) = sum(l110 * transportpercell) + lv(0) = 0 + lv(0) = sum(lv) + + lv0 = new((/44/), double) ; lv0 is array of small red bars + assignFillValue(l104, lv0) + lv0(0) = sum(l11 * transportpercell) + lv0(1) = sum(l12 * transportpercell) + lv0(2) = sum(l13 * transportpercell) + lv0(3) = sum(l14 * transportpercell) + lv0(4) = sum(l21 * transportpercell) + lv0(5) = sum(l22 * transportpercell) + lv0(6) = sum(l23 * transportpercell) + lv0(7) = sum(l24 * transportpercell) + lv0(8) = sum(l31 * transportpercell) + lv0(9) = sum(l32 * transportpercell) + lv0(10) = sum(l33 * transportpercell) + lv0(11) = sum(l34 * transportpercell) + lv0(12) = sum(l41 * transportpercell) + lv0(13) = sum(l42 * transportpercell) + lv0(14) = sum(l43 * transportpercell) + lv0(15) = sum(l44 * transportpercell) + lv0(16) = sum(l51 * transportpercell) + lv0(17) = sum(l52 * transportpercell) + lv0(18) = sum(l53 * transportpercell) + lv0(19) = sum(l54 * transportpercell) + lv0(20) = sum(l61 * transportpercell) + lv0(21) = sum(l62 * transportpercell) + lv0(22) = sum(l63 * transportpercell) + lv0(23) = sum(l64 * transportpercell) + lv0(24) = sum(l71 * transportpercell) + lv0(25) = sum(l72 * transportpercell) + lv0(26) = sum(l73 * transportpercell) + lv0(27) = sum(l74 * transportpercell) + lv0(28) = sum(l81 * transportpercell) + lv0(29) = sum(l82 * transportpercell) + lv0(30) = sum(l83 * transportpercell) + lv0(31) = sum(l84 * transportpercell) + lv0(32) = sum(l91 * transportpercell) + lv0(33) = sum(l92 * transportpercell) + lv0(34) = sum(l93 * transportpercell) + lv0(35) = sum(l94 * transportpercell) + lv0(36) = sum(l101 * transportpercell) + lv0(37) = sum(l102 * transportpercell) + lv0(38) = sum(l103 * transportpercell) + lv0(39) = sum(l104 * transportpercell) + lv0(40) = sum(l111 * transportpercell) + lv0(41) = sum(l112 * transportpercell) + lv0(42) = sum(l113 * transportpercell) + lv0(43) = sum(l114 * transportpercell) + if(any(ismissing(lv))) then ; if all cells are missing then print 0 + aa = ind(ismissing(lv)) + lv(aa) = 0 + delete(aa) + end if + delete([/ l10, l11, l12, l13, l14, l20, l21, l22, l23, l24, l30, l31 /]) + delete([/ l32, l33, l34, l40, l41, l42, l43, l44, l50, l51, l52, l53 /]) + delete([/ l54, l60, l61, l62, l63, l64, l70, l71, l72, l73, l74, l80 /]) + delete([/ l81, l82, l83, l84, l90, l91, l92, l93, l94, l100, l101 /]) + delete([/ l102, l103, l104, l110, l111, l112, l113, l114 /]) + delete(transportpercell) + exact_lat = exact_lat * -1 + strUnits = "" + + res@gsnLeftString = "(" + start_years_data(iii) + " - " + \ + end_years_data(iii) + ") at (" \ + + sprintf("%4.2f", exact_lat) + "S)" + res@gsnRightString = "Net Transport out of Southern ocean = " + \ + sprintf("%4.2f", lv(0)) + "Sv" + res@tiMainString = vo_datasets(iii) + res@xyLineColors = (/"black", "black"/) + res@gsnXYBarChartColors2 = ("red") + + plot_1(iii) = gsn_csm_xy(wks, lv0, y1_val, res) ; plotting of red bars + + res@gsnXYBarChartColors2 = ("blue4") + res@xyLineColors = (/"blue4", "blue4"/) + plots(iii) = gsn_csm_xy(wks, lv, y_val, res) ; plotting of blue bars + txres = True ; to print volume transport of blue bars + txres@gsnFrame = False + txres@txFontHeightF = 0.009 + gsn_text_ndc(wks, sprintf("%4.3f", lv(0)), 0.27, 0.845, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(1)), 0.27, 0.78, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(2)), 0.27, 0.715, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(3)), 0.27, 0.655, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(4)), 0.27, 0.58, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(5)), 0.27, 0.51, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(6)), 0.27, 0.435, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(7)), 0.27, 0.375, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(8)), 0.27, 0.31, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(9)), 0.27, 0.2475, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(10)), 0.27, 0.175, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(11)), 0.27, 0.11, txres) + + res@gsnXYBarChartColors2 = ("White") + res@xyLineColors = (/"magenta2", "magenta2"/) + plot_talley(iii) = gsn_csm_xy(wks, talley_SO_Zonal_all_Levels, y_val, res) + ; to plot the magenta for talley values of transport + overlay(plots(iii), plot_1(iii)) + overlay(plot_talley(iii), plots(iii)) + draw(plot_talley(iii)) + frame(wks) + out_var = new((/56/), double) + out_var(0:11) = (/lv/) + out_var(12:55) = (/lv0/) + out_var!0 = "i" + out_var&i = ispan(0, 55, 1) + out_var@var = "transport_per_layer" + out_var@diag_script = "russell18jgr_fig6a.ncl" + out_var@description = "Transport in main layers(blue bars) in i(0-11)" + \ + " and transport in sub layers (red bars) in i(12-55) at " + \ + exact_lat + "S of model " + vo_datasets(iii) + delete(lv) + delete(lv0) + delete(exact_lat) + + nc_filename = config_user_info@work_dir + "russell18jgr-figure6a_" \ + + vo_datasets(iii) + "_" + (start_years_data(iii)) + "-" + \ + (end_years_data(iii)) + ".nc" + ncdf_outfile = ncdf_write(out_var, nc_filename) + + log_provenance(ncdf_outfile, \ + plotpath + "." + output_type(), \ + "Russell et al 2018 figure 6 part a", \ + "mean", \ + "sh", \ + (/"bar", "vert"/), \ + "russell_joellen", \ + "russell18jgr", \ + (/ vo_inputfile_paths(iii), thetao_inputfile_paths(iii), \ + so_inputfile_paths(iii)/)) + end do + +end diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6b.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6b.ncl new file mode 100644 index 0000000000..71323f411d --- /dev/null +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6b.ncl @@ -0,0 +1,691 @@ +; ############################################################################# +; russell18jgr-fig6b.ncl +; +; Russell, J.L.,et al., 2018, J. Geophysical Research – Oceans, 123, 3120-3143. +; https://doi.org/10.1002/2017JC013461 (figure 6 B) +; +; Author: Russell Joellen (University of Arizona, USA) +; Goodman Paul (University of Arizona, USA) +; Pandde Amarjiit (University of Arizona, USA) +; ############################################################################# +; Description +; +; - Calculates the time average of thetao, so and vo. +; - Regrids temperature and salinity onto the vo grid. +; - Calculate the potential density of all cells relative +; to 0, 2000, 4000 decibars (or 1977m and 3948km). +; - extracts the volcello of the lat closest to 30S. +; - divide volcello by latitudinal distance between two data points on +; same lon, this gives us north-south cross-sectional area of each grid. +; - heat transport in each cell is calculated by temperature(degreeC) +; * vo(m/s) * specific heat(KJ/Kg * K) * cross sectional area(m^2) +; * density(Kg/m^3). +; - make lxx variables that have 1 for specific density layer and +; missing values in rest. +; - multiply lxx with heat transport per cell to get heat transport of +; cells in that layer (lvxx). +; - total sum of lvxx gives the net heat energy transported in that +; density layer +; - plots the heat transport per layer as a bar chart. +; +; Density layers defined as per: +; Talley, L.D., 2003. Shallow, intermediate and deep overturning +; components of the global heat budget. Journal of Physical +; Oceanography 33, 530–560. +; +; Required Preprocessor attributes (no_preprocessor) +; - None (no preprocessing required) +; in the recipe do not keep a preprocessor in variable section +; +; Required diag_script_info attributes +; +; styleset = "CMIP5" - default +; +; Optional diag_script_info attributes (diagnostic specific) +; +; Caveats +; +; - MIROC-ESM and BNU-ESM does not work as depth variable is not called lev. +; - MRI_ESM1 does not work as the data is offset by 80 degrees in longitude +; and causes problem in interpolation. +; - CCSM4 and CESM1-CAM5 dont work as the units for so is 1, not accepted +; by ESMValTool. +; - Transport is very small in case of NorESM1-M and ME as volcello +; values look incorrect(very small). +; +; +; Modification history +; +; 20190510 -russell_joellen, pandde_amarjiit - written and +; implemented for ESMValTool v2. +; +; ############################################################################# + + +load "$diag_scripts/../interface_scripts/interface.ncl" ; load metadata +load "$diag_scripts/shared/plot/style.ncl" ; load plot style functions +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + vo_items = select_metadata_by_name(input_file_info, "vo") + so_items = select_metadata_by_name(input_file_info, "so") + thetao_items = select_metadata_by_name(input_file_info, "thetao") + volcello_items = select_metadata_by_name(input_file_info, "volcello") + vo_datasets = metadata_att_as_array(vo_items, "dataset") + start_years_data = metadata_att_as_array(vo_items, "start_year") + end_years_data = metadata_att_as_array(vo_items, "end_year") + vo_inputfile_paths = metadata_att_as_array(vo_items, "filename") + thetao_inputfile_paths = metadata_att_as_array(thetao_items, "filename") + so_inputfile_paths = metadata_att_as_array(so_items, "filename") + nDatasets = ListCount(vo_items) + nVolcello = ListCount(volcello_items) + nVariables = ListCount(variable_info) + dim_models = dimsizes(vo_datasets) + if (nVolcello .ne. nDatasets) then + error_msg("f", "russell18jgr-fig6a.ncl", " ", "volcello files " + \ + "for russell18jgr-fig6a.ncl do not match with vo datasets. " + \ + "Please do not add additional variables in variable groups ") + end if +end + +begin + + plotpath = config_user_info@plot_dir + "russell18jgr-fig6b_" \ + + sprinti("%0.4i", min(toint(start_years_data))) + "-" \ + + sprinti("%0.4i", max(toint(end_years_data))) + + system("mkdir -p " + config_user_info@work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + wks = gsn_open_wks(output_type(), plotpath) + plots = new(nDatasets, graphic) + plot_1 = new(nDatasets, graphic) + plot_talley = new(nDatasets, graphic) + + y_val = (/-0.5, 0.5, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5, 10.5 /) + y1_val = fspan(0.125, 10.875, 44) + yaxis_labels = (/-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11/) + talley_SO_Zonal_all_Levels = (/-0.91, -0.89, -0.13, 0.43, 0.04, -0.16,\ + -0.14, -0.12, -0.03, 0.05, 0.05, 0.00 /) + + res = True + res@gsnXYBarChart = True + res@gsnDraw = False + res@gsnFrame = False + res@vpXF = 0.1 + res@vpYF = 0.75 + res@vpHeightF = 0.5 + res@vpWidthF = 0.4 + res@gsnMaximize = True + res@trYReverse = True + res@tmXBTickStartF = -1.4 + res@tmXBTickSpacingF = 0.2 + res@tmXBTickEndF = 1.4 + res@tmXBMode = "Manual" + res@tmXBMinorPerMajor = 1 + res@trXMinF = -1.4 + res@trXMaxF = 1.4 + res@trYMinF = -1. + res@trYMaxF = 11. + res@gsnXRefLine = 0.0 + res@gsnXYBarChartColors2 = ("red") + res@xyLineColors = (/"black", "black"/) + res@tmYLMode = "Explicit" + res@tmYLLabelsOn = True + res@tmYLValues = yaxis_labels + res@tmYLLabelFont = 5 + res@tmYLLabelFontHeightF = 0.008 + res@tmXBLabelFontHeightF = 0.008 + res@tmYLLabels = (/"~F0~Net~F~", "~F0~Surface~F~", "26.10s~B~0", \ + "26.40s~B~0", "26.90s~B~0", "27.10s~B~0", \ + "27.40s~B~0", "36.8s~B~2", "45.8s~B~4", "45.86s~B~4",\ + "45.92s~B~4", "46.0s~B~4", "~F0~Bottom~F~" /) + res@gsnRightStringFontHeightF = 0.009 + res@gsnLeftStringFontHeightF = 0.009 + + lev_new = fspan(10, 200, 20) + + do iii = 0, dim_models - 1 + + fx_var = read_data(volcello_items[iii]) + + if (all(ismissing(fx_var))) then + fx_variable = "volcello" + error_msg("f", "russell_fig-7i.ncl", " ", "volcello file for " + \ + vo_datasets(iii) \ + + " not found in the metadata file, please specify " \ + + "'volcello' as supplementary variable in the recipe.") + end if + + dataset_so_time = read_data(so_items[iii]) + dataset_so = dim_avg_n_Wrap(dataset_so_time, 0) + delete(dataset_so_time) + dataset_thetao_time = read_data(thetao_items[iii]) + dataset_thetao = dim_avg_n_Wrap(dataset_thetao_time, 0) + delete(dataset_thetao_time) + + if (max(dataset_thetao) .gt. 250) then + dataset_thetao = dataset_thetao - 273.15 + end if + + rho0 = new(2, double) + assignFillValue(dataset_thetao, rho0) + + vo_file = addfile(vo_inputfile_paths(iii), "r") + thetao_file = addfile(thetao_inputfile_paths(iii), "r") + var_test_lat = vo_file->lat ; extracting lat array of vo + var_test_lon = vo_file->lon ; extracting lon array of vo + + if((iscoord(dataset_so, "lat"))) then + a = closest_val(-30.0, var_test_lat) + ; getting index for lat closest to 30S + exact_lat = var_test_lat(a) ; lat value closest to 30S + ; interpolate thetao and so onto vo grid + theta_inter = linint1_n_Wrap(dataset_thetao&lat, dataset_thetao, False, \ + exact_lat, 0, 1) + theta_new = linint1_n_Wrap(dataset_thetao&lon, theta_inter, True, \ + var_test_lon, 0, 2) + delete(theta_inter) + delete(dataset_thetao) + so_inter = linint1_n_Wrap(dataset_so&lat, dataset_so, False, exact_lat, \ + 0, 1) + so_new = linint1_n_Wrap(dataset_so&lon, so_inter, True, var_test_lon, \ + 0, 2) + delete(dataset_so) + delete(so_inter) + + ; potential density calculation + rho0_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 0.0) + rho2_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 1977.0) + rho4_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 3948.0) + + delete(so_new) + + elseif (iscoord(dataset_so, "rlat")) then + + theta_lat = thetao_file->lat ; extracting lat array of thetao + theta_lon = thetao_file->lon ; extracting lon arrays of thetao + lat1 = closest_val(-15.0, var_test_lat(:, 0)) + ; index value of vo-lat closest to -10 + lat2 = closest_val(-45.0, var_test_lat(:, 0)) + ; index value of vo-lat closest to -50 + a = closest_val(-30.0, var_test_lat(:, 0)) + ; index value of vo-lat closest to -30 + exact_lat = var_test_lat(a, 0) ; lat value closest to 30S + ; interpolate thetao and so onto vo grid + theta_inter = linint1_n_Wrap(theta_lat(lat2:lat1, 1), \ + dataset_thetao(:, lat2:lat1, :), \ + False, exact_lat, 0, 1) + delete(dataset_thetao) + if ((isMonotonic(theta_lon(a, :)) .eq. 1) .and. \ + (isMonotonic(var_test_lon(a, :)) .eq. 1)) then + theta_new = linint1_n_Wrap(theta_lon(a, :), theta_inter, True, \ + var_test_lon(a, :), 0, 2) + + else + theta_new = theta_inter + + end if + delete(theta_inter) + + so_inter = linint1_n_Wrap(theta_lat(lat2:lat1, 1), \ + dataset_so(:, lat2:lat1, :), False, \ + exact_lat, 0, 1) + delete(dataset_so) + + if ((isMonotonic(theta_lon(a, :)) .eq. 1) .and. \ + (isMonotonic(var_test_lon(a, :)) .eq. 1)) then + so_new = linint1_n_Wrap(theta_lon(a, :), so_inter, True, \ + var_test_lon(a, :), 0, 2) + else + so_new = so_inter + end if + delete(so_inter) + delete(theta_lat) + delete(theta_lon) + ; potential density calculation + rho0_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 0.) + rho2_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 1977.0) + rho4_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 3948.0) + delete(so_new) + else + + theta_lat = thetao_file->lat ; extracting lat array of thetao + theta_lon = thetao_file->lon ; extracting lon array of thetao + + lat1 = closest_val(-15.0, var_test_lat(:, 0)) + ; index value of vo-lat closest to 10S + lat2 = closest_val(-45.0, var_test_lat(:, 0)) + ; index value of vo-lat closest to 50S + a = closest_val(-30.0, var_test_lat(:, 0)) + ; index value of vo-lat closest to 30S + exact_lat = var_test_lat(a, 0) ; lat value closest to 30S + ; interpolate thetao and so onto vo grid + + theta_new = linint1_n_Wrap(theta_lat(lat2:lat1, 1), \ + dataset_thetao(:, lat2:lat1, :), False, \ + exact_lat, 0, 1) + so_new = linint1_n_Wrap(theta_lat(lat2:lat1, 1), \ + dataset_so(:, lat2:lat1, :), False, \ + exact_lat, 0, 1) + + delete(dataset_thetao) + delete(dataset_so) + delete(theta_lat) + delete(theta_lon) + ; potential density calculation + rho0_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 0.) + rho2_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 1977.0) + rho4_30s = rho_mwjf(theta_new(:, 0, :), so_new(:, 0, :), 3948.0) + delete(so_new) + + end if + + dataset_vo_time = read_data(vo_items[iii]) + dataset_vo = dim_avg_n_Wrap(dataset_vo_time, 0) + delete(dataset_vo_time) + delete(var_test_lon) + + volcello = fx_var + delete(fx_var) + ; volcello interpolation and transport calculation + if(iscoord(dataset_vo, "lat")) then + + var_tmp2 = dataset_vo(:, a, :) ; y transport of lat closest to 30S + + dlat = tofloat(abs(dataset_vo&lat(a) - dataset_vo&lat(a+1)) \ + * 6.37e06 * 0.0174533) + ; dlat is the north-south distance between 2 consecutive data latitudes + volumecello_3d = volcello/dlat + ; north-south crossectional area is volcello / dlat + if(abs(volcello&lev(0)) .gt. abs(volcello&lev(1))) then + volcello_2d = volumecello_3d(::-1, a, :) + ; reversing lev if it is in descending order. + else + volcello_2d = volumecello_3d(:, a, :) + end if + transportpercell = var_tmp2 * volcello_2d * theta_new(:, 0, :) * \ + (1035.0 * 4.2) / (10 ^ 12) + ; 10^12 - unit conversion from W to PW, + ; 4.2KJ/(KG*K)- specific heat of water, 1035 kg/m^3 - density of water + + copy_VarCoords(var_tmp2, transportpercell) + + elseif (iscoord(dataset_vo, "rlat")) then + + var_tmp2 = dataset_vo(:, a, :) ; y transport of lat closest to -30 + drlat = tofloat(abs(var_test_lat(a, 0) - var_test_lat(a + 1, 0)) \ + * 6.37e06 * 0.0174533) + ; drlat is the north-south distance between 2 consecutive data latitudes + + volumecello_3d = volcello/drlat + ; north-south crossectional area is volcello / drlat + if(abs(volcello&lev(0)) .gt. abs(volcello&lev(1))) then + volcello_2d = volumecello_3d(::-1, a, :) + ; reversing lev if it is in descending order. + else + volcello_2d = volumecello_3d(:, a, :) + end if + transportpercell = var_tmp2 * volcello_2d * theta_new(:, 0, :) \ + * (1035.0 * 4.2) / (10 ^ 12) + ; 10^12 - unit conversion from W to PW + ; 4.2KJ/(KG*K) - specific heat of water, 1035 kg/m^3 - density of water + copy_VarCoords(var_tmp2, transportpercell) + + else + + var_tmp2 = dataset_vo(:, a, :) ; y transport of lat closest to -30 + dlat = tofloat(abs(var_test_lat(a, 0) - var_test_lat(a + 1, 0)) \ + * 6.37e06 * 0.0174533) + ; dlat is the north-south distance between 2 consecutive data latitudes + volumecello_3d = volcello/dlat + ; north-south crossectional area is volcello / dlat + if(abs(volcello&lev(1)) .gt. abs(volcello&lev(2))) then + volcello_2d = volumecello_3d(::-1, a, :) + ; reversing lev if it is in descending order. + else + volcello_2d = volumecello_3d(:, a, :) + end if + transportpercell = var_tmp2 * volcello_2d * theta_new(:, 0, :) \ + * (1035.0 * 4.2) / (10 ^ 12) + ; 10^12 - unit conversion from W to PW + ; 4.2KJ/(KG*K)- specific heat of water, 1035 kg/m^3 - density of water + copy_VarCoords(var_tmp2, transportpercell) + + end if + delete(volcello) + delete(var_test_lat) + delete(dataset_vo) + delete(volcello_2d) + delete(volumecello_3d) + delete(var_tmp2) + delete(theta_new) + + rho0_30s = 1000. * (rho0_30s - 1.0) + rho2_30s = 1000. * (rho2_30s - 1.0) + rho4_30s = 1000. * (rho4_30s - 1.0) + + ; making masks for density levels based on : Talley, L.D., 2003. + l10 = where((rho0_30s .lt. 26.10), 1, rho0@_FillValue) + l11 = where((rho0_30s .lt. 24.900), 1, rho0@_FillValue) + l12 = where((rho0_30s .ge. 24.900) .and. (rho0_30s .lt. 25.300), 1, \ + rho0@_FillValue) + l13 = where((rho0_30s .ge. 25.300) .and. (rho0_30s .lt. 25.700), 1, \ + rho0@_FillValue) + l14 = where((rho0_30s .ge. 25.700) .and. (rho0_30s .lt. 26.100), 1, \ + rho0@_FillValue) + + l20 = where((rho0_30s .ge. 26.100) .and. (rho0_30s .lt. 26.400), 1, \ + rho0@_FillValue) + l21 = where((rho0_30s .ge. 26.100) .and. (rho0_30s .lt. 26.175), 1, \ + rho0@_FillValue) + l22 = where((rho0_30s .ge. 26.175) .and. (rho0_30s .lt. 26.250), 1, \ + rho0@_FillValue) + l23 = where((rho0_30s .ge. 26.250) .and. (rho0_30s .lt. 26.325), 1, \ + rho0@_FillValue) + l24 = where((rho0_30s .ge. 26.325) .and. (rho0_30s .lt. 26.400), 1, \ + rho0@_FillValue) + + l30 = where((rho0_30s .ge. 26.400) .and. (rho0_30s .lt. 26.900), 1, \ + rho0@_FillValue) + l31 = where((rho0_30s .ge. 26.400) .and. (rho0_30s .lt. 26.525), 1, \ + rho0@_FillValue) + l32 = where((rho0_30s .ge. 26.525) .and. (rho0_30s .lt. 26.650), 1, \ + rho0@_FillValue) + l33 = where((rho0_30s .ge. 26.650) .and. (rho0_30s .lt. 26.775), 1, \ + rho0@_FillValue) + l34 = where((rho0_30s .ge. 26.775) .and. (rho0_30s .lt. 26.900), 1, \ + rho0@_FillValue) + + l40 = where((rho0_30s .ge. 26.900) .and. (rho0_30s .lt. 27.100), 1, \ + rho0@_FillValue) + l41 = where((rho0_30s .ge. 26.900) .and. (rho0_30s .lt. 26.950), 1, \ + rho0@_FillValue) + l42 = where((rho0_30s .ge. 26.950) .and. (rho0_30s .lt. 27.000), 1, \ + rho0@_FillValue) + l43 = where((rho0_30s .ge. 27.000) .and. (rho0_30s .lt. 27.050), 1, \ + rho0@_FillValue) + l44 = where((rho0_30s .ge. 27.050) .and. (rho0_30s .lt. 27.100), 1, \ + rho0@_FillValue) + + l50 = where((rho0_30s .ge. 27.100) .and. (rho0_30s .lt. 27.400), 1, \ + rho0@_FillValue) + l51 = where((rho0_30s .ge. 27.100) .and. (rho0_30s .lt. 27.175), 1, \ + rho0@_FillValue) + l52 = where((rho0_30s .ge. 27.175) .and. (rho0_30s .lt. 27.250), 1, \ + rho0@_FillValue) + l53 = where((rho0_30s .ge. 27.250) .and. (rho0_30s .lt. 27.325), 1, \ + rho0@_FillValue) + l54 = where((rho0_30s .ge. 27.325) .and. (rho0_30s .lt. 27.400), 1, \ + rho0@_FillValue) + + l60 = where((rho0_30s .ge. 27.400) .and. (rho2_30s .lt. 36.800), 1, \ + rho0@_FillValue) + l61 = where((rho0_30s .ge. 27.400) .and. (rho0_30s .lt. 27.500), 1, \ + rho0@_FillValue) + l62 = where((rho0_30s .ge. 27.500) .and. (rho2_30s .lt. 36.700), 1, \ + rho0@_FillValue) + l63 = where((rho2_30s .ge. 36.700) .and. (rho2_30s .lt. 36.750), 1, \ + rho0@_FillValue) + l64 = where((rho2_30s .ge. 36.750) .and. (rho2_30s .lt. 36.800), 1, \ + rho0@_FillValue) + + l70 = where((rho2_30s .ge. 36.800) .and. (rho4_30s .lt. 45.800), 1, \ + rho0@_FillValue) + l71 = where((rho2_30s .ge. 36.800) .and. (rho2_30s .lt. 36.850), 1, \ + rho0@_FillValue) + l72 = where((rho2_30s .ge. 36.850) .and. (rho2_30s .lt. 36.900), 1, \ + rho0@_FillValue) + l73 = where((rho2_30s .ge. 36.900) .and. (rho2_30s .lt. 36.950), 1, \ + rho0@_FillValue) + l74 = where((rho2_30s .ge. 36.950) .and. (rho4_30s .lt. 45.800), 1, \ + rho0@_FillValue) + + l80 = where((rho4_30s .ge. 45.800) .and. (rho4_30s .lt. 45.860), 1, \ + rho0@_FillValue) + l81 = where((rho4_30s .ge. 45.800) .and. (rho4_30s .lt. 45.815), 1, \ + rho0@_FillValue) + l82 = where((rho4_30s .ge. 45.815) .and. (rho4_30s .lt. 45.830), 1, \ + rho0@_FillValue) + l83 = where((rho4_30s .ge. 45.830) .and. (rho4_30s .lt. 45.845), 1, \ + rho0@_FillValue) + l84 = where((rho4_30s .ge. 45.845) .and. (rho4_30s .lt. 45.860), 1, \ + rho0@_FillValue) + + l90 = where((rho4_30s .ge. 45.860) .and. (rho4_30s .lt. 45.920), 1, \ + rho0@_FillValue) + l91 = where((rho4_30s .ge. 45.860) .and. (rho4_30s .lt. 45.875), 1, \ + rho0@_FillValue) + l92 = where((rho4_30s .ge. 45.875) .and. (rho4_30s .lt. 45.890), 1, \ + rho0@_FillValue) + l93 = where((rho4_30s .ge. 45.890) .and. (rho4_30s .lt. 45.905), 1, \ + rho0@_FillValue) + l94 = where((rho4_30s .ge. 45.905) .and. (rho4_30s .lt. 45.920), 1, \ + rho0@_FillValue) + + l100 = where((rho4_30s .ge. 45.920) .and. (rho4_30s .lt. 46.000), 1, \ + rho0@_FillValue) + l101 = where((rho4_30s .ge. 45.920) .and. (rho4_30s .lt. 45.940), 1, \ + rho0@_FillValue) + l102 = where((rho4_30s .ge. 45.940) .and. (rho4_30s .lt. 45.960), 1, \ + rho0@_FillValue) + l103 = where((rho4_30s .ge. 45.960) .and. (rho4_30s .lt. 45.980), 1, \ + rho0@_FillValue) + l104 = where((rho4_30s .ge. 45.980) .and. (rho4_30s .lt. 46.000), 1, \ + rho0@_FillValue) + + l110 = where((rho4_30s .ge. 46.000), 1, rho0@_FillValue) + l111 = where((rho4_30s .ge. 46.000) .and. (rho4_30s .lt. 46.050), 1, \ + rho0@_FillValue) + l112 = where((rho4_30s .ge. 46.050) .and. (rho4_30s .lt. 46.100), 1, \ + rho0@_FillValue) + l113 = where((rho4_30s .ge. 46.100) .and. (rho4_30s .lt. 46.150), 1, \ + rho0@_FillValue) + l114 = where((rho4_30s .ge. 46.150), 1, rho0@_FillValue) + + delete(rho0_30s) + delete(rho2_30s) + delete(rho4_30s) + + ; assignning filling values from rho0 to masked layers + assignFillValue(rho0, l10) + assignFillValue(rho0, l20) + assignFillValue(rho0, l30) + assignFillValue(rho0, l40) + assignFillValue(rho0, l50) + assignFillValue(rho0, l60) + assignFillValue(rho0, l70) + assignFillValue(rho0, l80) + assignFillValue(rho0, l90) + assignFillValue(rho0, l100) + assignFillValue(rho0, l110) + assignFillValue(rho0, l11) + assignFillValue(rho0, l21) + assignFillValue(rho0, l31) + assignFillValue(rho0, l41) + assignFillValue(rho0, l51) + assignFillValue(rho0, l61) + assignFillValue(rho0, l71) + assignFillValue(rho0, l81) + assignFillValue(rho0, l91) + assignFillValue(rho0, l101) + assignFillValue(rho0, l111) + assignFillValue(rho0, l12) + assignFillValue(rho0, l22) + assignFillValue(rho0, l32) + assignFillValue(rho0, l42) + assignFillValue(rho0, l52) + assignFillValue(rho0, l62) + assignFillValue(rho0, l72) + assignFillValue(rho0, l82) + assignFillValue(rho0, l92) + assignFillValue(rho0, l102) + assignFillValue(rho0, l112) + assignFillValue(rho0, l13) + assignFillValue(rho0, l23) + assignFillValue(rho0, l33) + assignFillValue(rho0, l43) + assignFillValue(rho0, l53) + assignFillValue(rho0, l63) + assignFillValue(rho0, l73) + assignFillValue(rho0, l83) + assignFillValue(rho0, l93) + assignFillValue(rho0, l103) + assignFillValue(rho0, l113) + assignFillValue(rho0, l14) + assignFillValue(rho0, l24) + assignFillValue(rho0, l34) + assignFillValue(rho0, l44) + assignFillValue(rho0, l54) + assignFillValue(rho0, l64) + assignFillValue(rho0, l74) + assignFillValue(rho0, l84) + assignFillValue(rho0, l94) + assignFillValue(rho0, l104) + assignFillValue(rho0, l114) + delete(rho0) + + lv = new((/12/), double) ; lv is array of big blue bars + assignFillValue(l10, lv) + lv(1) = sum(l10 * transportpercell) + lv(2) = sum(l20 * transportpercell) + lv(3) = sum(l30 * transportpercell) + lv(4) = sum(l40 * transportpercell) + lv(5) = sum(l50 * transportpercell) + lv(6) = sum(l60 * transportpercell) + lv(7) = sum(l70 * transportpercell) + lv(8) = sum(l80 * transportpercell) + lv(9) = sum(l90 * transportpercell) + lv(10) = sum(l100 * transportpercell) + lv(11) = sum(l110 * transportpercell) + lv(0) = 0 + lv(0) = sum(lv) + lv0 = new((/44/), double) ; lv0 is array of small red bars + assignFillValue(l104, lv0) + lv0(0) = sum(l11 * transportpercell) + lv0(1) = sum(l12 * transportpercell) + lv0(2) = sum(l13 * transportpercell) + lv0(3) = sum(l14 * transportpercell) + lv0(4) = sum(l21 * transportpercell) + lv0(5) = sum(l22 * transportpercell) + lv0(6) = sum(l23 * transportpercell) + lv0(7) = sum(l24 * transportpercell) + lv0(8) = sum(l31 * transportpercell) + lv0(9) = sum(l32 * transportpercell) + lv0(10) = sum(l33 * transportpercell) + lv0(11) = sum(l34 * transportpercell) + lv0(12) = sum(l41 * transportpercell) + lv0(13) = sum(l42 * transportpercell) + lv0(14) = sum(l43 * transportpercell) + lv0(15) = sum(l44 * transportpercell) + lv0(16) = sum(l51 * transportpercell) + lv0(17) = sum(l52 * transportpercell) + lv0(18) = sum(l53 * transportpercell) + lv0(19) = sum(l54 * transportpercell) + lv0(20) = sum(l61 * transportpercell) + lv0(21) = sum(l62 * transportpercell) + lv0(22) = sum(l63 * transportpercell) + lv0(23) = sum(l64 * transportpercell) + lv0(24) = sum(l71 * transportpercell) + lv0(25) = sum(l72 * transportpercell) + lv0(26) = sum(l73 * transportpercell) + lv0(27) = sum(l74 * transportpercell) + lv0(28) = sum(l81 * transportpercell) + lv0(29) = sum(l82 * transportpercell) + lv0(30) = sum(l83 * transportpercell) + lv0(31) = sum(l84 * transportpercell) + lv0(32) = sum(l91 * transportpercell) + lv0(33) = sum(l92 * transportpercell) + lv0(34) = sum(l93 * transportpercell) + lv0(35) = sum(l94 * transportpercell) + lv0(36) = sum(l101 * transportpercell) + lv0(37) = sum(l102 * transportpercell) + lv0(38) = sum(l103 * transportpercell) + lv0(39) = sum(l104 * transportpercell) + lv0(40) = sum(l111 * transportpercell) + lv0(41) = sum(l112 * transportpercell) + lv0(42) = sum(l113 * transportpercell) + lv0(43) = sum(l114 * transportpercell) + if(any(ismissing(lv))) then ; if all cells are missing then print 0 + aa = ind(ismissing(lv)) + lv(aa) = 0 + delete(aa) + end if + delete([/ l10, l11, l12, l13, l14, l20, l21, l22, l23, l24, l30, l31 /]) + delete([/ l32, l33, l34, l40, l41, l42, l43, l44, l50, l51, l52, l53 /]) + delete([/ l54, l60, l61, l62, l63, l64, l70, l71, l72, l73, l74, l80 /]) + delete([/ l81, l82, l83, l84, l90, l91, l92, l93, l94, l100, l101 /]) + delete([/ l102, l103, l104, l110, l111, l112, l113, l114 /]) + delete(transportpercell) + exact_lat = exact_lat * -1 + strUnits = "" + + res@gsnLeftString = "(" + start_years_data(iii) + " - " + \ + end_years_data(iii) + ") at (" + sprintf("%4.2f", exact_lat) + "S)" + res@gsnRightString = "Net energy out of Southern ocean = " + \ + sprintf("%4.2f", lv(0)) + "PW" + res@tiMainString = vo_datasets(iii) + res@gsnXYBarChartColors2 = ("red") + res@xyLineColors = (/"black", "black"/) + + plot_1(iii) = gsn_csm_xy(wks, lv0, y1_val, res) ; plotting of red bars + res@gsnXYBarChartColors2 = ("blue4") + res@xyLineColors = (/"blue4", "blue4"/) + plots(iii) = gsn_csm_xy(wks, lv, y_val, res) ; plotting of blue bars + txres = True ; to print the volume transport in blue bars + txres@gsnFrame = False + txres@txFontHeightF = 0.009 + gsn_text_ndc(wks, sprintf("%4.3f", lv(0)), 0.27, 0.845, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(1)), 0.27, 0.78, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(2)), 0.27, 0.715, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(3)), 0.27, 0.655, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(4)), 0.27, 0.58, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(5)), 0.27, 0.51, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(6)), 0.27, 0.435, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(7)), 0.27, 0.375, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(8)), 0.27, 0.31, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(9)), 0.27, 0.2475, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(10)), 0.27, 0.175, txres) + gsn_text_ndc(wks, sprintf("%4.3f", lv(11)), 0.27, 0.11, txres) + res@gsnXYBarChartColors2 = ("White") + res@xyLineColors = (/"magenta2", "magenta2"/) + plot_talley(iii) = gsn_csm_xy(wks, talley_SO_Zonal_all_Levels, y_val, res) + overlay(plots(iii), plot_1(iii)) + overlay(plot_talley(iii), plots(iii)) + draw(plot_talley(iii)) + + frame(wks) + out_var = new((/56/), double) + out_var(0:11) = (/lv/) + out_var(12:55) = (/lv0/) + out_var!0 = "i" + out_var&i = ispan(1, 56, 1) + out_var@var = "energy_transport_per_layer" + out_var@diag_script = "russell_fig-6.ncl" + out_var@description = "Transport in main layers(blue bars) in i(0-11) " + \ + "and transport in sub layers(red bars) in i(12-55) at " + \ + exact_lat + "S of model " + vo_datasets(iii) + delete(lv) + delete(lv0) + delete(exact_lat) + + nc_filename = config_user_info@work_dir + "russell18jgr_fig6b_" \ + + vo_datasets(iii) + "_" + (start_years_data(iii)) + "-" + \ + (end_years_data(iii)) + ".nc" + ncdf_outfile = ncdf_write(out_var, nc_filename) + + log_provenance(ncdf_outfile, \ + plotpath + "." + output_type(), \ + "Russell et al 2018 figure 6b", \ + "mean", \ + "sh", \ + (/"bar", "vert"/), \ + "russell_joellen", \ + "russell18jgr", \ + (/vo_inputfile_paths(iii), thetao_inputfile_paths(iii), \ + so_inputfile_paths(iii)/)) + end do + +end diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7h.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7h.ncl new file mode 100644 index 0000000000..7700790e65 --- /dev/null +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7h.ncl @@ -0,0 +1,198 @@ +; ############################################################################# +; russell18jgr_fig7h.ncl +; +; Based on Figure 7h - Russell, J.L.,et al., 2018, J. Geophysical Research – +; Oceans, 123, 3120-3143. https://doi.org/10.1002/2017JC013461 (figure 7h) +; +; Author: Pandde Amarjiit (University of Arizona, USA) +; Russell Joellen (University of Arizona, USA) +; Goodman Paul (University of Arizona, USA) +; +; ############################################################################# +; Description +; +; - Uses original grid (no regridding). +; - Maskes the fgco2 data points on the land using ESMValTool preprocessor. +; - Calculates time and longitudinal averaged fgco2 values. +; - Plots averaged fgco2 values vs lat as xy line plot. +; +; Required diag_script_info attributes (diagnostics specific) +; +; - styleset : CMIP5 - default +; +; Required Preprocessor attributes ( no_preprocessor ) +; - None (no preprocessing required) +; but for esmvaltool just add (time_average: false) +; +; Caveats: +; +; 1) i-j models are assigned lat - lon arrays assuming the grid has +; constant lat across j and constant lon across i. +; 2) CNRM-CM5 model doesnt work with ncdf_write function as it has +; x,y coordinate variable instead of lat-lon or i-j or rlat-rlon +; +; +; Modification history +; +; 20190510 -russell_joellen, pandde_amarjiit - written and +; implemented for ESMValTool v2. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" ; load metadata +load "$diag_scripts/shared/plot/style.ncl" ; load plot style functions +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + datasetnames = metadata_att_as_array(info_items, "dataset") + start_years_data = metadata_att_as_array(info_items, "start_year") + end_years_data = metadata_att_as_array(info_items, "end_year") + inputfile_paths = metadata_att_as_array(info_items, "filename") + nDatasets = ListCount(info_items) + +end + +begin + + ; Set annotations + annots = project_style(input_file_info, diag_script_info, "annots") + colors = project_style(input_file_info, diag_script_info, "colors") + dashes = project_style(input_file_info, diag_script_info, "dashes") + thicks = project_style(input_file_info, diag_script_info, "thicks") + + plotpath = config_user_info@plot_dir + "Russell_figure7h_" + var0 + "_" \ + + sprinti("%0.4i", min(toint(start_years_data))) + "-" \ + + sprinti("%0.4i", max(toint(end_years_data))) + + system("mkdir -p " + config_user_info@work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + wks = gsn_open_wks(output_type(), plotpath) + plot = new(nDatasets, graphic) + + res = True + res@tmXBLabelFontHeightF = 0.009 + res@tmYLLabelFontHeightF = 0.009 + res@gsnFrame = False + res@gsnDraw = False + res@trXMinF = -80. + res@trXMaxF = -30. + res@trYMaxF = 40. + res@trYMinF = -60. + res@vpHeightF = 0.65 + res@vpWidthF = 0.65 + res@tiMainString = "Zonal-mean Flux" + res@gsnYRefLine = 0 + res@gsnYRefLineDashPattern = 2 + res@gsnYRefLineColor = "grey" + res@vpYF = 0.9 + res@vpXF = 0.08 + res@gsnRightString = "Units - gC/ (m~S~2~N~ * yr)" + res@gsnRightStringFontHeightF = 15. + res@gsnLeftString = "Russell et al -2018 - Figure 7 h" + res@gsnLeftStringFontHeightF = 17. + res@tmYLMinorPerMajor = 1 + res@tmYLTickStartF = -60. + res@tmYLTickSpacingF = 10. + res@tmYLTickEndF = 40. + res@tmYLMode = "Manual" + res@tmXBMinorPerMajor = 0 + res@tmXBTickStartF = -80. + res@tmXBTickSpacingF = 5. + res@tmXBTickEndF = -30. + res@tmXBMode = "Manual" + res@tiYAxisFontHeightF = 0.0175 + res@tiYAxisOffsetXF = 0.01 + res@tiXAxisString = "Latitude" + + do iii = 0, nDatasets - 1 + dataset = read_data(info_items[iii]) + infile_path = inputfile_paths(iii) + + if (iscoord(dataset, "i")) then + ; to convert i-j coordinate variable to lat -lon + delete(dataset&i) + delete(dataset&j) + infile_iii = addfile(infile_path, "r") + area_lon = infile_iii->lon + area_lat = infile_iii->lat + + dataset!0 = "lat" + dataset&lat = tofloat(area_lat(:, 0)) + dataset!1 = "lon" + dataset&lon = tofloat(area_lon(0, :)) + delete(area_lat) + delete(area_lon) + end if + dataset = dataset * -31536000000. + ; unit conversion factor from kg /(m^2 * sec) to g / (m^2 * yr) + if(.not.(iscoord(dataset, "lat"))) then + + infile_iii = addfile(infile_path, "r") + area_lon = infile_iii->lon + area_lat = infile_iii->lat + var_lat = area_lat(:, 10) + delete(area_lat) + delete(area_lon) + end if + var_lon_avg = dim_avg_n_Wrap(dataset, 1) ; taking zonal average + var_lon_avg@var = var0 + var_lon_avg@diag_script = "russell18jgr_fig7h.ncl" + res@xyDashPatterns = dashes(iii) + res@xyLineColors = colors(iii) + res@xyExplicitLegendLabels = annots(iii) + + if (iscoord(var_lon_avg, "lat")) then + plot(iii) = gsn_csm_xy(wks, var_lon_avg&lat, var_lon_avg, res) + else + plot(iii) = gsn_csm_xy(wks, var_lat, var_lon_avg, res) + delete(var_lat) + end if + if (iii .ne. 0) then + overlay(plot(0), plot(iii)) + end if + + nc_filename = config_user_info@work_dir + "russell18jgr_fig-7h_" + var0 \ + + "_" + datasetnames(iii) + "_" + (start_years_data(iii)) + "-" + \ + (end_years_data(iii)) + ".nc" + + ncdf_outfile = ncdf_write(var_lon_avg, nc_filename) + delete(var_lon_avg) + delete(dataset) + + end do + draw(plot(0)) + + legend = create "Legend" legendClass wks + "vpXF" : 0.625 ; orientation on page + "vpYF" : 0.925 + "vpWidthF" : 0.5 ; width + "vpHeightF" : 0.725 ; height + "lgPerimOn" : False ; no perimeter + "lgItemCount" : dimsizes(annots) ; how many + "lgLineLabelStrings" : annots ; labels + "lgLabelsOn" : False ; no default lables + "lgLineLabelFontHeightF" : 0.0085 ; font height + "lgDashIndexes" : dashes ; line paterns + "lgLineColors" : colors + "lgMonoLineLabelFontColor" : True ; one label color + end create + draw(legend) + + frame(wks) + + log_provenance(ncdf_outfile, \ + plotpath + "." + output_type(), \ + "Russell et al 2018 figure 7h", \ + "mean", \ + "sh", \ + "zonal", \ + "russell_joellen", \ + "russell18jgr", \ + infile_path) + +end diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7i.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7i.ncl new file mode 100644 index 0000000000..cf14857a7b --- /dev/null +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7i.ncl @@ -0,0 +1,234 @@ +; ############################################################################# +; russell_figure7i.ncl +; +; Based on Figure 7i - Russell, J.L.,et al., 2018, J. Geophysical Research – +; Oceans, 123, 3120-3143. https://doi.org/10.1002/2017JC013461 (figure 7i) +; +; Author: Pandde Amarjiit (University of Arizona, USA) +; Russell Joellen (University of Arizona, USA) +; Goodman Paul (University of Arizona, USA) +; ESMVal project +; ############################################################################# +; Description +; +; - Uses original grid (no regridding). +; - Maskes the fgco2 data points on the land using ESMValTool preprocessor. +; - if var coordinates are lat-lon then the script calculates the areacello +; otherwise, the script reads the areacello variable from input file. +; - Multiplies time averaged fgco2 with areacello to get flux per cell +; - flux per lat = sum of all cells across a lat +; - integrated flux at lat = cumulative sum of flux per lat from pole +; till that lat. +; - Plots integrated flux of fgco2 vs lat as xy line plot +; +; Required diag_script_info attributes (diagnostics specific) +; +; - styleset : CMIP5 - default +; +; Caveats: +; +; Modification history +; +; 20190510 - russell_joellen, pandde_amarjiit - written and +; implemented for ESMValTool v2. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" ; load metadata +load "$diag_scripts/shared/plot/style.ncl" ; load plot style functions +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + fgco2_items = select_metadata_by_name(input_file_info, "fgco2") + datasetnames = metadata_att_as_array(fgco2_items, "dataset") + start_years_data = metadata_att_as_array(fgco2_items, "start_year") + end_years_data = metadata_att_as_array(fgco2_items, "end_year") + inputfile_paths = metadata_att_as_array(fgco2_items, "filename") + areacello_items = select_metadata_by_name(input_file_info, "areacello") + areadatasets = metadata_att_as_array(areacello_items, "dataset") + nDatasets = ListCount(fgco2_items) + +end + +begin + + ; Set annotations + annots = project_style(input_file_info, diag_script_info, "annots") + colors = project_style(input_file_info, diag_script_info, "colors") + dashes = project_style(input_file_info, diag_script_info, "dashes") + + var0 = variable_info[0]@short_name + + plotpath = config_user_info@plot_dir + "Russell_figure7i_" + var0 + "_" \ + + sprinti("%0.4i", min(toint(start_years_data))) + "-" \ + + sprinti("%0.4i", max(toint(end_years_data))) + + system("mkdir -p " + config_user_info@work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + wks = gsn_open_wks(output_type(), plotpath) + plot = new(nDatasets, graphic) + + res = True + res@tmXBLabelFontHeightF = 0.008 + res@tmYLLabelFontHeightF = 0.008 + res@gsnFrame = False + res@gsnDraw = False + res@trXMinF = -80. + res@trXMaxF = -30. + res@trYMaxF = 1. + res@trYMinF = -1.2 + res@vpHeightF = 0.65 + res@vpWidthF = 0.65 + res@tiMainString = "Integrated Flux" + res@gsnYRefLine = 0 + res@gsnYRefLineDashPattern = 2 + res@gsnYRefLineColor = "grey" + res@vpYF = 0.9 + res@vpXF = 0.08 + res@gsnRightString = "Units - ( PgC/yr )" + res@gsnRightStringFontHeightF = 13. + res@gsnLeftStringFontHeightF = 13. + res@tmXBMinorPerMajor = 0 + res@tmXBTickStartF = -80. + res@tmXBTickSpacingF = 5. + res@tmXBTickEndF = -30. + res@tmXBMode = "Manual" + res@tmYLMinorPerMajor = 1 + res@tmYLTickStartF = -1.2 + res@tmYLTickSpacingF = 0.2 + res@tmYLTickEndF = 1.0 + res@tmYLMode = "Manual" + res@tiYAxisFontHeightF = 0.0175 + res@tiYAxisOffsetXF = 0.01 + + do iii = 0, nDatasets - 1 + dataset = read_data(fgco2_items[iii]) + infile_path = inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + + if (iscoord(dataset, "lat")) then + var_lat = infile_iii->lat + var_lon = infile_iii->lon + radius_earth = 6.37e06 + deg2rad_convF = 0.0174533 + dlat = abs(var_lat(20) - var_lat(19)) + ; some models have closer lat points near poles. + ; hence taking difference of 20th and 19th lat points + dlon = abs(var_lon(20) - var_lon(19)) + dist_x_deg_earth = radius_earth * deg2rad_convF * dlat + clat = cos(var_lat*deg2rad_convF) + + dx = dlon * radius_earth * deg2rad_convF * clat + ; dx = radius of earth * cos(lat of this data point) * + ; (lon1 - lon2 (in radians)) + dy = dlat * radius_earth * deg2rad_convF + ; dy = radius of earth *(lat1 - lat2 (in radians)) + dxdy = tofloat(dx*dy) ; area of cell = dx * dy + areacello_2d = new(dimsizes(dataset), float) + areacello_2d = conform(areacello_2d, dxdy, 0) + delete(var_lon) + delete(dx) + delete(dy) + delete(dxdy) + delete(clat) + delete(dlon) + delete(dlat) + dataset = dataset * -0.000031536 + ; unit conversion from kg/s to Pg/yr + carbon_flux = dataset * areacello_2d + ; flux per cell = (flux per area) * (area per cell) + carbon_flux_per_lat = dim_sum_n_Wrap(carbon_flux, 1) + ; flux per lat = sum of flux per cell on lon dimension + int_carbon_flux = cumsum(carbon_flux_per_lat, 2) + ; integrated flux = cumulative sum of flux per lat + int_carbon_flux!0 = "lat" + int_carbon_flux&lat = var_lat + delete(var_lat) + else + dataset_compare = datasetnames(iii) + "$" + fx_ind = str_match_ind_regex(areadatasets, dataset_compare) + fx_var = read_data(areacello_items[fx_ind]) + + if (all(ismissing(fx_var))) then + fx_variable = "areacello" + error_msg("f", "russell_fig-7i.ncl", " ", "areacello file for " + \ + datasetnames(iii) + " not found in the metadata file," + \ + + " not found in the metadata file, please specify " \ + + "'areacello' as supplementary variable in the recipe.") + end if + areacello_2d = fx_var + delete(fx_var) + dataset = dataset * -0.000031536 + ; unit conversion from kg/s to Pg/yr + carbon_flux = dataset * areacello_2d + ; flux per cell = (flux per area) * (area per cell) + carbon_flux_per_lat = dim_sum_n_Wrap(carbon_flux, 1) + ; flux per lat = sum of flux per cell on lon dimension + int_carbon_flux = cumsum(carbon_flux_per_lat, 2) + ; integrated flux = cumulative sum of flux per lat + int_carbon_flux!0 = "lat" + area_lat = infile_iii->lat + int_carbon_flux&lat = tofloat(area_lat(:, 0)) + delete(area_lat) + + end if + delete(infile_iii) + delete(carbon_flux) + delete(carbon_flux_per_lat) + delete(areacello_2d) + delete(dataset) + + int_carbon_flux@var = var0 + int_carbon_flux@diag_script = "russell18jgr-fig7i.ncl" + res@xyDashPatterns = dashes(iii) + res@xyLineColors = colors(iii) + res@xyExplicitLegendLabels = annots(iii) + res@gsnLeftString = "Russell et al -2018 - Figure 7i " + + plot(iii) = gsn_csm_xy(wks, int_carbon_flux&lat, int_carbon_flux, res) + + nc_filename = config_user_info@work_dir + "russell_figure-7i_" + var0 + \ + "_" + annots(iii) + "_" + (start_years_data(iii)) + "-" + \ + (end_years_data(iii)) + ".nc" + + ncdf_outfile = ncdf_write(int_carbon_flux, nc_filename) + delete(int_carbon_flux) + if (iii .ne. 0) then + overlay(plot(0), plot(iii)) + end if + + end do + draw(plot(0)) + + legend = create "Legend" legendClass wks + "vpXF" : 0.625 ; orientation on page + "vpYF" : 0.925 + "vpWidthF" : 0.5 ; width + "vpHeightF" : 0.725 ; height + "lgPerimOn" : False ; no perimeter + "lgItemCount" : dimsizes(annots) ; how many + "lgLineLabelStrings" : annots ; labels + "lgLabelsOn" : False ; no default labsels + "lgLineLabelFontHeightF" : 0.0085 ; font height + "lgDashIndexes" : dashes ; line patterns + "lgLineColors" : colors + "lgMonoLineLabelFontColor" : True ; one label color + end create + draw(legend) + + frame(wks) + + log_provenance(ncdf_outfile, \ + plotpath + "." + output_type(), \ + "Russell et al 2018 figure 7i", \ + "mean", \ + "sh", \ + "zonal", \ + "russell_joellen", \ + "russell18jgr", \ + infile_path) + +end diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9a.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9a.ncl new file mode 100644 index 0000000000..24ebe3f12e --- /dev/null +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9a.ncl @@ -0,0 +1,332 @@ +; ############################################################################ +; russell18jgr_fig9a.ncl +; +; Russell, J.L.,et al., 2018, J. Geophysical Research – Oceans, 123, 3120-3143. +; https://doi.org/10.1002/2017JC013461 (figure 9a) +; +; Author: Russell Joellen (University of Arizona, USA) +; Goodman Paul (University of Arizona, USA) +; Pandde Amarjiit (University of Arizona, USA) +; ESMVal project +; ############################################################################# +; Description +; +; - Uses original grid (no regridding). +; (for total heat flux part of script) +; - Maskes the hfds data points on the land using ESMValTool preprocessor. +; - if var coordinates are lat-lon then the script calculates the areacello +; otherwise, the script reads the areacello variable from input file. +; - Multiplies time averaged hfds with areacello to get heat flux per cell. +; - flux per lat = sum of all cells across a lat. +; - total heat flux in Southern ocean = sum of flux per lat from 30S - 90S. +; +; (for Southern westerly band part of the script) +; - Uses ESMValTool land masking (preprocessor) to mask land data points. +; - Calculates longitudinal averaged tauu/tauuo values. +; - Calculates the first lat north and south of 50S where tauu is zero. +; - Subtracts the north lat value from south lat value to get +; Latitudinal width of Southern Hemisphere Westerly Band. +; - Calculates line of linear regression between lat width and +; total heat flux. +; - Plots the line of linear regression. +; - Plots each model's Carbon flux and lat width as markers. +; +; Required diag_script_info attributes (configured for russell figure 9a) +; None +; +; Required variable_info_attributtes: +; - preprocessor +; - mip +; - project +; - exp +; - ensemble +; - start_year +; - end_year +; - additional datasets +; +; Required preprocessor attributtes: +; preprocessor_time - for tauuo and hfds +; - time_average +; +; Required diag_script_info attributes (diagnostics specific) +; +; - styleset : CMIP5 - default +; - ncdf : default +; +; Caveats +; - Does not work for models without hfds, these models will be made +; compatible in future. +; +; Modification history +; +; 20190510 - russell_joellen, pandde_amarjiit - written and +; implemented for ESMValTool v2. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" ; load metadata +load "$diag_scripts/shared/plot/style.ncl" ; load plot style functions +load "$diag_scripts/shared/plot/contour_maps.ncl" ; load plot function +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + nVariables = ListCount(variable_info) + hfds_items = select_metadata_by_name(input_file_info, "hfds") + + var_names = metadata_att_as_array(variable_info, "short_name") + var_ind = str_match_ind(var_names, "tauu") + tauu_var = var_names(var_ind) + tauu_items = select_metadata_by_name(input_file_info, tauu_var) + tauu_inputfile_paths = metadata_att_as_array(tauu_items, "filename") + hfds_inputfile_paths = metadata_att_as_array(hfds_items, "filename") + start_years_data = metadata_att_as_array(tauu_items, "start_year") + end_years_data = metadata_att_as_array(tauu_items, "end_year") + nDatasets = ListCount(tauu_items) + areacello_items = select_metadata_by_name(input_file_info, "areacello") + areadatasets = metadata_att_as_array(areacello_items, "dataset") + + if (nVariables .ne. 3) then + error_msg("f", "russell_fig9a.ncl", " ", "Number of variables for" + \ + " this diag script must be 3 (hfds, areacello & tauu/tauuo)" + \ + ". Please make sure one of tauu or tauuo is commented out") + end if + if (nDatasets .lt. 3) then + error_msg("f", "russell_fig9a.ncl", " ", "Minimum number of " + \ + "datasets required for this diag script must be 3. " + \ + "Please add more datasets.") + end if + datasetnames = metadata_att_as_array(tauu_items, "dataset") +end + +begin + + plotpath = config_user_info@plot_dir + "russell18jgr-fig9a_" \ + + sprinti("%0.4i", min(toint(start_years_data))) + "-" \ + + sprinti("%0.4i", max(toint(end_years_data))) + + system("mkdir -p " + config_user_info@work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + wks = gsn_open_wks(output_type(), plotpath) + wks@fullname = plotpath + plots = new(nDatasets, graphic) + lat_width = new(nDatasets, double) + heat_flux_iii = new(nDatasets, double) + + data1 = new((/2, dimsizes(datasetnames)/), double) + aaa = new(dimsizes(datasetnames), double) + + ; Set annotations + annots = project_style(tauu_items, diag_script_info, "annots") + markers = project_style(tauu_items, diag_script_info, "markers") + colors = project_style(tauu_items, diag_script_info, "colors") + + res = True + res@gsnMaximize = False + res@gsnFrame = False + res@gsnDraw = True + res@tiYAxisString = "Southern ocean heat uptake (PW)" + res@tiMainString = " Russell et al 2018 - Figure 9a " + res@vpHeightF = 0.45 + res@vpWidthF = 0.65 + res@tmYLMinorPerMajor = 0 + res@tmXBMinorPerMajor = 0 + res@tmYLMode = "Automatic" + res@tmXBMode = "Manual" + res@tiYAxisFontHeightF = 0.0125 + res@tiXAxisFontHeightF = 0.0125 + res@tmXBLabelFontHeightF = 0.01 ; fontsize of tickmark labels (x-axis) + res@tmYLLabelFontHeightF = 0.01 ; fontsize of tickmark labels (x-axis) + res@tmYROn = False ; Turn off top tickmarks + res@tmXTOn = False ; Turn off top tickmarks + res@vpYF = 0.9 + res@vpXF = 0.1 + res@tmXMajorGrid = True ; Add vertical grid lines + res@tmXMajorGridLineColor = "grey" + res@tmYMajorGrid = True ; Add horizontal grid lines + res@tmYMajorGridLineColor = "grey" + res@tiXAxisString = "Latitudinal width of Southern Hemisphere Westerly Band" + + do iii = 0, nDatasets - 1 + + hfds_dataset = read_data(hfds_items[iii]) + infile_path = hfds_inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + + tauu_dataset = read_data(tauu_items[iii]) + tauu_lon = dim_avg_n_Wrap(tauu_dataset, 1) + delete(tauu_dataset) + + fx_var = read_data(areacello_items[iii]) + ; checking for areacello + if (all(ismissing(fx_var))) then + error_msg("f", "russell18jgr-fig9a.ncl", " ", "areacello file of " + \ + datasetnames(iii) + "not found in the recipe. If areacello" + \ + " file available, please copy the dataset name to " + \ + "additional dataset section of areacello.") + end if + areacello_2d = fx_var + + if (iscoord(hfds_dataset, "lat")) then + var_lat = hfds_dataset&lat + else + area_lat = infile_iii->lat + var_lat = area_lat(:, 0) + delete(area_lat) + end if + ; unit conversion from W to PW + heat_flux = hfds_dataset * areacello_2d / (10.0 ^ 15) + ; flux per cell = (flux per area) * (area per cell) + heat_flux_per_lat = dim_sum_n_Wrap(heat_flux, 1) + ; flux per lat = sum of flux per cell on lon dimension + a = closest_val(-30.0, var_lat) + ; finds the closest lat grid point to 30S + heat_flux_iii(iii) = sum(heat_flux_per_lat(0:a)) + delete(var_lat) + delete(fx_var) + + if (iscoord(tauu_lon, "lat")) then ; extracting lat values + var_lat = tauu_lon&lat + else + infile_path = tauu_inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + area_lat = infile_iii->lat + var_lat = dim_avg_n_Wrap(area_lat, 1) + delete(area_lat) + end if + + a1 = closest_val(-50.0, var_lat) + a2 = closest_val(-75.0, var_lat) + counter_lat = True + do lat_ind = a1, (dimsizes(var_lat)/2) + ; loops over each lat from 50S equatorward till it becomes negative + if (counter_lat) then + if(tauu_lon(lat_ind).ge. 0 .and. tauu_lon(lat_ind+1).lt.0) then + lat1 = var_lat(lat_ind) + lat2 = var_lat(lat_ind+1) + final_lat = lat1 - ((lat1 - lat2) * tauu_lon(lat_ind)) \ + / (tauu_lon(lat_ind)-tauu_lon(lat_ind+1)) + counter_lat = False + end if + end if + end do + delete(lat1) + delete(lat2) + do lat_ind1 = a2, a1 + ; loops over each lat from 50S poleward till tauu becomes negative + if ((tauu_lon(lat_ind1) .lt. 0).and.(tauu_lon(lat_ind1+1).ge.0)) then + lat1 = var_lat(lat_ind1) + lat2 = var_lat(lat_ind1+1) + lower_lat = lat1 - ((lat1 - lat2) * tauu_lon(lat_ind1) \ + / (tauu_lon(lat_ind1) - tauu_lon(lat_ind1+1))) + end if + end do + delete(lat1) + delete(lat2) + delete(var_lat) + lat_width(iii) = final_lat - lower_lat + delete(lower_lat) + delete(final_lat) + delete(infile_iii) + delete(heat_flux) + delete(heat_flux_per_lat) + delete(areacello_2d) + delete(hfds_dataset) + delete(tauu_lon) + + end do + + res@trYMaxF = decimalPlaces(max(heat_flux_iii+0.1), 1, True) + res@trYMinF = decimalPlaces(min(heat_flux_iii-0.1), 1, True) + xval_start_point = (round(min(lat_width - 0.5), 3)) + xval_end_point = (round(max(lat_width + 0.5), 3)) + + res@tmYLMaxTicks = 10 + res@trXMaxF = xval_end_point + res@trXMinF = xval_start_point + res@tmXBTickStartF = xval_start_point + res@tmXBTickSpacingF = 1.0 + res@tmXBTickEndF = xval_end_point + res@xyMarkLineMode = "Lines" + res@xyDashPatterns = 1 ; solid line + res@xyLineThicknessF = 1 + + aaa = fspan(xval_start_point, xval_end_point, nDatasets) + ; array of x coordinates for line of best fit + rc = regline(lat_width, heat_flux_iii) + ; calculates the slope and y-intercept for line of best fit + linereg = rc@yintercept + (rc * aaa) + ; calculates the y coordinates of line of best fit + data1(0, :) = heat_flux_iii + data1(1, :) = lat_width + data1!0 = "i" + data1&i = (/1, 2/) + plot = gsn_csm_xy(wks, aaa, linereg, res) ; create plot + + mres = True + mres@gsMarkerSizeF = 0.0125 ; choose marker size + + do i = 0, dimsizes(lat_width) - 1 + ; overlaying the markers on the plot + mres@gsMarkerColor = colors(i) ; set marker colors + mres@gsMarkerIndex = markers(i) ; choose marker types + id = unique_string("mark") ; create unique id + ; add markers to plot + plot@$id$ = gsn_add_polymarker(wks, plot, lat_width(i), \ + heat_flux_iii(i), mres) + end do + + draw(plot) ; draw the plot + + legend = create "Legend" legendClass wks + "vpXF" : 0.675 ; orientation on page + "vpYF" : 0.925 + "vpWidthF" : 0.31 ; width + "vpHeightF" : 0.725 ; height + "lgPerimOn" : False ; no perimeter + "lgItemCount" : dimsizes(annots) ; how many + "lgLabelStrings" : annots ; labels + "lgLabelsOn" : True ; no default lables + "lgLabelFontHeightF" : 0.001 ; font height + "lgItemType" : "markers" ; line paterns + "lgMarkerColors" : colors + "lgMarkerIndexes" : markers ; one label color + end create + draw(legend) + + frame(wks) + + do idd = 0, dimsizes(lat_width) - 1 + nc_filename = config_user_info@work_dir + "russell18jgr_fig9a_" \ + + annots(idd) + "_" + (start_years_data(idd)) + "-" \ + + (end_years_data(idd)) + ".nc" + outvar = data1(:, idd) + outvar@var = "heat-flux_lat-width" + outvar@diag_script = "russell18jgr_fig9a.ncl" + outvar@model_name = annots(idd) + outvar@regline_y_coord = linereg + outvar@regline_x_coord = aaa + + outvar@description = "Total heat flux and lat width of southern westerly" \ + + " band for dataset : " + annots(idd) + " for years " \ + + (start_years_data(idd)) + "-" + (end_years_data(idd)) \ + + " are in 0th and 1st dimension respectively. Line of best fit's " \ + + "coordinates are added as attributes regline_coords" + + ncdf_outfile = ncdf_write(outvar, nc_filename) + + log_provenance(ncdf_outfile, \ + plotpath + "." + output_type(), \ + "Russell et al 2018 figure 9a", \ + "mean", \ + "sh", \ + "scatter", \ + "russell_joellen", \ + "russell18jgr", \ + (/tauu_inputfile_paths(idd), hfds_inputfile_paths(idd)/)) + + end do +end diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9b.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9b.ncl new file mode 100644 index 0000000000..112cd846f5 --- /dev/null +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9b.ncl @@ -0,0 +1,350 @@ +; ############################################################################# +; russell18jgr_fig9b.ncl +; +; Russell, J.L.,et al., 2018, J. Geophysical Research – Oceans, 123, 3120-3143 +; https://doi.org/10.1002/2017JC013461 (figure 9b) +; +; Author: Russell Joellen (University of Arizona, USA) +; Goodman Paul (University of Arizona, USA) +; Pandde Amarjiit (University of Arizona, USA) +; ESMVal project +; ############################################################################# +; Description +; - Uses original grid (no regridding). +; (for Total carbon flux part of script) +; - Maskes the fgco2 data points on the land using ESMValTool preprocessor. +; - if var coordinates are lat-lon then the script calculates the areacello +; otherwise, the script reads the areacello variable from input file. +; - Multiplies time averaged fgco2 with areacello to get flux per cell. +; - flux per lat = sum of all cells across a lat. +; - total carbon flux in Southern ocean = sum of flux per lat from 30S - 90S. +; +; (for Southern westerly band part of the script) +; - Uses ESMValTool land masking (preprocessor) to mask land data points. +; - Calculates longitudinal averaged tauu/tauuo values. +; - Calculates the first lat north and south of 50S where tauu is zero. +; - Subtracts the north lat value from south lat value to get +; Latitudinal width of Southern Hemisphere Westerly Band. +; - Calculates line of linear regression between lat width and +; total carbon flux. +; - Plots the line of linear regression. +; - Plots each model's Carbon flux and lat width as markers. +; +; Required diag_script_info attributes (configured for russell figure 9a) +; None +; +; Required variable_info_attributtes: +; - preprocessor +; - mip +; - project +; - exp +; - ensemble +; - start_year +; - end_year +; - additional datasets +; +; Required preprocessor attributtes: +; preprocessor_time - for tauuo and hfds +; climate_statistics: +; operator: mean +; period: full +; +; Required diag_script_info attributes (diagnostics specific) +; +; - styleset : CMIP5 - default +; - ncdf : default +; +; Caveats +; - the lat width calculation does not match exactly with one in paper +; +; Modification history +; +; 20190510 - russell_joellen, pandde_amarjiit - written +; and implemented for ESMValTool v2. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" ; load metadata +load "$diag_scripts/shared/plot/style.ncl" ; load plot style functions +load "$diag_scripts/shared/plot/contour_maps.ncl" ; load plot function +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + fgco2_items = select_metadata_by_name(input_file_info, "fgco2") + + var_names = metadata_att_as_array(variable_info, "short_name") + var_ind = str_match_ind(var_names, "tauu") + tauu_var = var_names(var_ind) + ; to run either tauu or tauuo + tauu_items = select_metadata_by_name(input_file_info, tauu_var) + tauu_inputfile_paths = metadata_att_as_array(tauu_items, "filename") + + areacello_items = select_metadata_by_name(input_file_info, "areacello") + + fgco2_inputfile_paths = metadata_att_as_array(fgco2_items, "filename") + start_years_data = metadata_att_as_array(fgco2_items, "start_year") + end_years_data = metadata_att_as_array(fgco2_items, "end_year") + nDatasets = ListCount(tauu_items) + datasetnames = metadata_att_as_array(fgco2_items, "dataset") + +end + +begin + + plotpath = config_user_info@plot_dir + "russell18jgr-fig9b_" \ + + sprinti("%0.4i", min(toint(start_years_data))) + "-" \ + + sprinti("%0.4i", max(toint(end_years_data))) + + system("mkdir -p " + config_user_info@work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + wks = gsn_open_wks(output_type(), plotpath) + wks@fullname = plotpath + plots = new(nDatasets, graphic) + lat_width = new(nDatasets, double) + carbon_flux_iii = new(nDatasets, double) + + data1 = new((/2, dimsizes(datasetnames)/), double) + aaa = new(dimsizes(datasetnames), double) + + ; Set annotations + annots = project_style(tauu_items, diag_script_info, "annots") + markers = project_style(tauu_items, diag_script_info, "markers") + colors = project_style(tauu_items, diag_script_info, "colors") + + res = True ; plot mods desired + res@gsnMaximize = False ; maximize plot in frame + res@gsnFrame = False + res@gsnDraw = True + res@tiYAxisString = "Southern ocean carbon uptake (Pg/yr)" + res@tiMainString = "Russell et at 2018 - fig 9b " + res@vpHeightF = 0.45 + res@vpWidthF = 0.6 + res@tmYLMode = "Automatic" + res@tmYLMaxTicks = 8 + res@tmXBMode = "Manual" + res@tmXBMinorPerMajor = 0 + res@tiYAxisFontHeightF = 0.0125 + res@tiXAxisFontHeightF = 0.0125 + res@tmXBLabelFontHeightF = 0.01 + res@tmYLLabelFontHeightF = 0.01 + res@tmYROn = False ; Turn off top tickmarks + res@tmXTOn = False ; Turn off top tickmarks + res@vpYF = 0.9 + res@vpXF = 0.08 + res@tmXMajorGrid = True ; Add vertical grid lines + res@tmXMajorGridLineColor = "grey" + res@tmYMajorGrid = True ; Add horizontal grid lines + res@tmYMajorGridLineColor = "grey" + res@tiXAxisString = "Latitudinal width of Southern Hemisphere Westerly Band" + + do iii = 0, nDatasets - 1 + + fgco2_dataset = read_data(fgco2_items[iii]) + tauu_dataset = read_data(tauu_items[iii]) + infile_path = fgco2_inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + + if (iscoord(fgco2_dataset, "lat")) then + ; some models have fgco2 on different grid than areacello + var_lat = infile_iii->lat + var_lon = infile_iii->lon + radius_earth = 6.37e06 + deg2rad_convF = 0.0174533 + dlat = abs(var_lat(20) - var_lat(19)) + ; some models have closer lat points near poles. + ; hence taking difference of 20th and 19th lat points + dlon = abs(var_lon(20) - var_lon(19)) + dist_x_deg_earth = radius_earth * deg2rad_convF * dlat + clat = cos(var_lat*deg2rad_convF) + dx = dlon * radius_earth * deg2rad_convF * clat + ; dx = radius of earth * cos(lat of this data point) + ; * (lon1 - lon2 (in radians)) + dy = dlat * radius_earth * deg2rad_convF + ; dy = radius of earth *(lat1 - lat2 (in radians)) + dxdy = tofloat(dx*dy) ; area of cell = dx * dy + areacello_2d = new(dimsizes(fgco2_dataset), float) + areacello_2d = conform(areacello_2d, dxdy, 0) + delete(var_lon) + delete(dx) + delete(dy) + delete(dxdy) + delete(clat) + delete(dlon) + delete(dlat) + fgco2_dataset = fgco2_dataset * 0.000031536 + ; unit conversion from kg/s to Pg/yr + carbon_flux = fgco2_dataset * areacello_2d + ; flux per cell = (flux per area) * (area per cell) + carbon_flux_per_lat = dim_sum_n_Wrap(carbon_flux, 1) + ; flux per lat = sum of flux per cell on lon dimension + a = closest_val(-30.0, var_lat) + carbon_flux_iii(iii) = sum(carbon_flux_per_lat(0:a)) + delete(var_lat) + + else + fx_var = read_data(areacello_items[iii]) + if (all(ismissing(fx_var))) then + error_msg("f", "russell18jgr-fig9b.ncl", " ", "areacello file of " + \ + datasetnames(iii) + "not found in the recipe. If " + \ + "areacello file available, please copy the dataset name " + \ + "to additional dataset section of areacello.") + end if + areacello_2d = fx_var + area_lat = infile_iii->lat + fgco2_dataset = fgco2_dataset * 0.000031536 + ; unit conversion from kg/s to Pg/yr + carbon_flux = fgco2_dataset * areacello_2d + ; flux per cell = (flux per area) * (area per cell) + carbon_flux_per_lat = dim_sum_n_Wrap(carbon_flux, 1) + ; flux per lat = sum of flux per cell on lon dimension + a = closest_val(-30.0, area_lat(:, 0)) + carbon_flux_iii(iii) = sum(carbon_flux_per_lat(0:a)) + delete(area_lat) + delete(fx_var) + end if + + tauu_lon = dim_avg_n_Wrap(tauu_dataset, 1) + delete(tauu_dataset) + + if (iscoord(tauu_lon, "lat")) then ; extracting lat values + var_lat = tauu_lon&lat + else + infile_path = tauu_inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + area_lat = infile_iii->lat + var_lat = dim_avg_n_Wrap(area_lat, 1) + delete(area_lat) + end if + + a1 = closest_val(-50.0, var_lat) + a2 = closest_val(-75.0, var_lat) + counter_lat = True + do lat_ind = a1, (dimsizes(var_lat) / 2) + ; loops over each lat from 50S equatorward till it becomes negative + if (counter_lat) then + if(tauu_lon(lat_ind).ge. 0 .and. tauu_lon(lat_ind+1).lt.0) then + lat1 = var_lat(lat_ind) + lat2 = var_lat(lat_ind+1) + final_lat = lat1 - ((lat1 - lat2) * tauu_lon(lat_ind)) \ + / (tauu_lon(lat_ind)-tauu_lon(lat_ind+1)) + counter_lat = False + delete(lat1) + delete(lat2) + end if + end if + end do + do lat_ind1 = a2, a1 + ; loops over each lat from 50S poleward till tauu becomes negative + if ((tauu_lon(lat_ind1) .lt. 0).and.(tauu_lon(lat_ind1+1).ge.0)) then + lat1 = var_lat(lat_ind1) + lat2 = var_lat(lat_ind1+1) + lower_lat = lat1 - ((lat1 - lat2) * tauu_lon(lat_ind1) \ + / (tauu_lon(lat_ind1) - tauu_lon(lat_ind1+1))) + delete(lat1) + delete(lat2) + end if + end do + delete(var_lat) + lat_width(iii) = final_lat - lower_lat + delete(lower_lat) + delete(final_lat) + delete(infile_iii) + delete(carbon_flux) + delete(carbon_flux_per_lat) + delete(areacello_2d) + delete(fgco2_dataset) + delete(tauu_lon) + end do + + res@trYMaxF = decimalPlaces(max(carbon_flux_iii + 0.1), 1, True) + res@trYMinF = decimalPlaces(min(carbon_flux_iii - 0.1), 1, True) + xval_start_point = (round(min(2*lat_width), 3)/2.0) - 0.5 + xval_end_point = (round(max(2*lat_width), 3)/2.0) + 0.5 + res@trXMaxF = xval_end_point + res@trXMinF = xval_start_point + res@tmXBTickStartF = xval_start_point + res@tmXBTickSpacingF = 0.5 + res@tmXBTickEndF = xval_end_point + res@xyDashPatterns = 1 ; solid line + res@xyLineThicknessF = 1 ; set second line to 2 + + aaa = fspan(xval_start_point, xval_end_point, nDatasets) + ; array of x coordinates for line of best fit + rc = regline(lat_width, carbon_flux_iii) + ; calculates the slope and y-intercept for line of best fit + linereg = rc@yintercept + (rc * aaa) + ; calculates the y coordinates of line of best fit + linereg = rc@yintercept + rc * aaa + data1(0, :) = carbon_flux_iii + data1(1, :) = linereg + data1!0 = "i" + data1&i = (/1, 2/) + + plot = gsn_csm_xy(wks, aaa, linereg, res) ; create plot + mres = True + mres@gsMarkerSizeF = 0.0125 ; choose marker size + + do i = 0, dimsizes(lat_width) - 1 + mres@gsMarkerColor = colors(i) ; set marker colors + mres@gsMarkerIndex = markers(i) ; choose marker types + id = unique_string("mark") ; create unique id + ; add marker to plot + plot@$id$ = gsn_add_polymarker(wks, plot, lat_width(i), \ + carbon_flux_iii(i), mres) + end do + + draw(plot) ; draw the plot + + legend = create "Legend" legendClass wks + "vpXF" : 0.675 ; orientation on page + "vpYF" : 0.925 + "vpWidthF" : 0.31 ; width + "vpHeightF" : 0.725 ; height + "lgPerimOn" : False ; no perimeter + "lgItemCount" : dimsizes(annots) ; how many + "lgLabelStrings" : annots ; labels + "lgLabelsOn" : True ; no default lables + "lgLabelFontHeightF" : 0.001 ; font height + "lgItemType" : "markers" ; line paterns + "lgMarkerColors" : colors + "lgMarkerIndexes" : markers ; one label color + end create + draw(legend) + + frame(wks) + + do idd = 0, dimsizes(lat_width) - 1 + nc_filename = config_user_info@work_dir + "russell18jgr_fig9b_" \ + + annots(idd) + "_" + (start_years_data(idd)) + "-" \ + + (end_years_data(idd)) + ".nc" + outvar = data1(:, idd) + outvar@var = "carbon-flux_lat-width" + outvar@diag_script = "russell18jgr_fig9b.ncl" + outvar@model_name = annots(idd) + outvar@regline_y_coord = linereg + outvar@regline_x_coord = aaa + outvar@description = "Total carbon flux and lat width of southern " \ + + "westerly band for dataset : " + annots(idd) \ + + " for years " + (start_years_data(idd)) + "-" \ + + (end_years_data(idd)) + " are in 0th and 1st "\ + + "dimension respectively. Line of best fit's " \ + + "coordinates are added as attributes regline_coords" + + ncdf_outfile = ncdf_write(outvar, nc_filename) + + log_provenance(ncdf_outfile, \ + plotpath + "." + output_type(), \ + "Russell et al 2018 figure 9b", \ + "mean", \ + "sh", \ + "scatter", \ + "russell_joellen", \ + "russell18jgr", \ + (/tauu_inputfile_paths(idd), fgco2_inputfile_paths(idd)/)) + + end do +end diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9c.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9c.ncl new file mode 100644 index 0000000000..017b70103a --- /dev/null +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9c.ncl @@ -0,0 +1,345 @@ +; ############################################################################# +; russell18jgr_fig9c.ncl +; +; Russell, J.L.,et al., 2018, J. Geophysical Research – Oceans, 123, 3120-3143. +; https://doi.org/10.1002/2017JC013461 (figure 9c) +; +; Author: Russell Joellen (University of Arizona, USA) +; Goodman Paul (University of Arizona, USA) +; Pandde Amarjiit (University of Arizona, USA) +; ESMVal project +; ############################################################################# +; Description +; +; - Uses original grid (no regridding). +; (for total heat flux part of script) +; - Maskes the hfds data points on the land using ESMValTool preprocessor. +; - if var coordinates are lat-lon then the script calculates the areacello +; otherwise, the script reads the areacello variable from input file. +; - Multiplies time averaged hfds with areacello to get heat flux per cell. +; - flux per lat = sum of all cells across a lat. +; - total heat flux in Southern ocean = sum of flux per lat from 30S - 90S. +; +; (for Total carbon flux part of script) +; - Maskes the fgco2 data points on the land using ESMValTool preprocessor. +; - if var coordinates are lat-lon then the script calculates the areacello +; otherwise, the script reads the areacello variable from input file. +; - Multiplies time averaged fgco2 with areacello to get flux per cell. +; - flux per lat = sum of all cells across a lat. +; - total carbon flux in Southern ocean = sum of flux per lat from 30S - 90S. +; - Calculates line of linear regression between total carbon flux and +; total heat flux. +; - Plots the line of linear regression. +; - Plots each model's Carbon flux and heat flux as markers. +; +; Required diag_script_info attributes (configured for russell figure 9a) +; None +; +; Required variable_info_attributtes: +; - preprocessor +; - mip +; - project +; - exp +; - ensemble +; - start_year +; - end_year +; - additional datasets +; +; Required preprocessor attributtes: +; preprocessor_time - for tauuo and hfds +; climate_statistics: +; operator: mean +; period: full +; +; Required diag_script_info attributes (diagnostics specific) +; +; - styleset : CMIP5 - default +; - ncdf : default +; +; Caveats +; - Does not work for models without hfds, these models will be made +; compatible in future. +; +; Modification history +; +; 20190510 - russell_joellen, pandde_amarjiit - written +; and implemented for ESMValTool v2. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" ; load metadata +load "$diag_scripts/shared/plot/style.ncl" ; load plot style functions +load "$diag_scripts/shared/plot/contour_maps.ncl" ; load plot function +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + nVariables = ListCount(variable_info) + hfds_items = select_metadata_by_name(input_file_info, "hfds") + hfds_inputfile_paths = metadata_att_as_array(hfds_items, "filename") + + areacello_items = select_metadata_by_name(input_file_info, "areacello") + areadatasets = metadata_att_as_array(areacello_items, "dataset") + + fgco2_items = select_metadata_by_name(input_file_info, "fgco2") + fgco2_inputfile_paths = metadata_att_as_array(fgco2_items, "filename") + start_years_data = metadata_att_as_array(fgco2_items, "start_year") + end_years_data = metadata_att_as_array(fgco2_items, "end_year") + + nDatasets = ListCount(fgco2_items) + nHFDSdatasets = ListCount(fgco2_items) + + if (nVariables .ne. 3) then + error_msg("f", "russell_fig9c.ncl", " ", "Number of variables for" + \ + " this diag script must be 3 (hfds, areacello & tauu/tauuo)" + \ + ". Please make sure one of tauu or tauuo is commented out.") + end if + if (nDatasets .lt. 3) then + error_msg("f", "russell_fig9c.ncl", " ", "Minimum number of " + \ + "datasets required for this diag script must be 3. Please " + \ + " add more datasets.") + end if + datasetnames = metadata_att_as_array(fgco2_items, "dataset") + +end + +begin + + plotpath = config_user_info@plot_dir + "russell18jgr-fig9c_" \ + + sprinti("%0.4i", min(toint(start_years_data))) + "-" \ + + sprinti("%0.4i", max(toint(end_years_data))) + + system("mkdir -p " + config_user_info@work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + wks = gsn_open_wks(output_type(), plotpath) + wks@fullname = plotpath + plots = new(nDatasets, graphic) + lat_width = new(nDatasets, double) + heat_flux_iii = new(nDatasets, double) + carbon_flux_iii = new(nDatasets, double) + + data1 = new((/2, dimsizes(datasetnames)/), double) + aaa = new(dimsizes(datasetnames), double) + + ; Set annotations + annots = project_style(fgco2_items, diag_script_info, "annots") + markers = project_style(fgco2_items, diag_script_info, "markers") + colors = project_style(fgco2_items, diag_script_info, "colors") + + res = True ; plot mods desired + res@gsnMaximize = False ; maximize plot in frame + res@gsnFrame = False + res@gsnDraw = True + res@tiXAxisString = "Southern ocean heat uptake (PW)" + res@tiYAxisString = "Southern ocean carbon uptake (Pg/yr)" + res@tiMainString = " Russell et al 2018 - Figure 9c " + res@vpHeightF = 0.45 + res@vpWidthF = 0.65 + res@tmYLMinorPerMajor = 0 + res@tmXBMinorPerMajor = 0 + res@tmYLMode = "Automatic" + res@tmXBMode = "Automatic" + res@tiYAxisFontHeightF = 0.0125 + res@tiXAxisFontHeightF = 0.0125 + res@tmXBLabelFontHeightF = 0.01 + res@tmYLLabelFontHeightF = 0.01 + res@tmYROn = False ; Turn off top tickmarks + res@tmXTOn = False ; Turn off top tickmarks + res@vpYF = 0.9 + res@vpXF = 0.1 + res@tmXMajorGrid = True ; Add vertical grid lines + res@tmXMajorGridLineColor = "grey" + res@tmYMajorGrid = True ; Add horizontal grid lines + res@tmYMajorGridLineColor = "grey" + + do iii = 0, nDatasets - 1 + + fgco2_dataset = read_data(fgco2_items[iii]) + infile_path = fgco2_inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + + if (iscoord(fgco2_dataset, "lat")) then + var_lat = infile_iii->lat + var_lon = infile_iii->lon + radius_earth = 6.37e06 + deg2rad_convF = 0.0174533 + dlat = abs(var_lat(20) - var_lat(19)) + ; some models have closer lat points near poles. + ; hence taking difference of 20th and 19th lat points + dlon = abs(var_lon(20) - var_lon(19)) + dist_x_deg_earth = radius_earth * deg2rad_convF * dlat + clat = cos(var_lat*deg2rad_convF) + dx = dlon * radius_earth * deg2rad_convF * clat + ; dx = radius of earth * cos(lat of this data point) + ; * (lon1 - lon2 (in radians)) + dy = dlat * radius_earth * deg2rad_convF + ; dy = radius of earth *(lat1 - lat2 (in radians)) + dxdy = tofloat(dx*dy) ; area of cell = dx * dy + areacello_2d = new(dimsizes(fgco2_dataset), float) + areacello_2d = conform(areacello_2d, dxdy, 0) + delete(var_lon) + delete(dx) + delete(dy) + delete(dxdy) + delete(clat) + delete(dlon) + delete(dlat) + fgco2_dataset = fgco2_dataset * 0.000031536 + ; unit conversion from kg/s to Pg/yr + carbon_flux = fgco2_dataset * areacello_2d + ; flux per cell = (flux per area) * (area per cell) + carbon_flux_per_lat = dim_sum_n_Wrap(carbon_flux, 1) + ; flux per lat = sum of flux per cell on lon dimension + a = closest_val(-30.0, var_lat) + carbon_flux_iii(iii) = sum(carbon_flux_per_lat(0:a)) + delete(var_lat) + else + fx_var = read_data(areacello_items[iii]) + if (all(ismissing(fx_var))) then + error_msg("f", "russell18jgr-fig9c.ncl", " ", "areacello file of " + \ + datasetnames(iii) + "not found in the recipe. If " + \ + " areacello file available, please copy the dataset name" + \ + " to additional dataset section of areacello.") + end if + areacello_2d = fx_var + area_lat = infile_iii->lat + fgco2_dataset = fgco2_dataset * 0.000031536 + ; unit conversion from kg/s to Pg/yr + carbon_flux = fgco2_dataset * areacello_2d + ; flux per cell = (flux per area) * (area per cell) + carbon_flux_per_lat = dim_sum_n_Wrap(carbon_flux, 1) + ; flux per lat = sum of flux per cell on lon dimension + a = closest_val(-30.0, area_lat(:, 0)) + carbon_flux_iii(iii) = sum(carbon_flux_per_lat(0:a)) + delete(area_lat) + delete(fx_var) + end if + delete(areacello_2d) + hfds_dataset = read_data(hfds_items[iii]) + infile_path = hfds_inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + + fx_var = read_data(areacello_items[iii]) + ; checking for areacello + if (all(ismissing(fx_var))) then + error_msg("f", "russell18jgr-fig9c.ncl", " ", "areacello file for " + \ + datasetnames(iii) + " not found in the metadata file, " + \ + + " not found in the metadata file, please specify " \ + + "'areacello' as supplementary variable in the recipe.") + end if + areacello_2d = fx_var + + if (iscoord(hfds_dataset, "lat")) then + var_lat = hfds_dataset&lat + else + area_lat = infile_iii->lat + var_lat = area_lat(:, 0) + delete(area_lat) + end if + ; unit conversion from W to PW + heat_flux = hfds_dataset * areacello_2d / (10.0 ^ 15) + ; flux per cell = (flux per area) * (area per cell) + heat_flux_per_lat = dim_sum_n_Wrap(heat_flux, 1) + ; flux per lat = sum of flux per cell on lon dimension + a = closest_val(-30.0, var_lat) ; finds the closest lat grid point to 30S + heat_flux_iii(iii) = sum(heat_flux_per_lat(0:a)) + delete(var_lat) + delete(fx_var) + delete(fgco2_dataset) + delete(carbon_flux) + delete(carbon_flux_per_lat) + delete(hfds_dataset) + delete(heat_flux) + delete(heat_flux_per_lat) + delete(areacello_2d) + end do + + xval_start_point = decimalPlaces(min(heat_flux_iii-0.2), 1, True) + xval_end_point = decimalPlaces(max(heat_flux_iii+0.2), 1, True) + res@trXMaxF = xval_end_point + res@trXMinF = xval_start_point + res@trYMaxF = decimalPlaces(max(carbon_flux_iii+0.1), 1, True) + res@trYMinF = decimalPlaces(min(carbon_flux_iii-0.1), 1, True) + res@tmYLMaxTicks = 10 + res@tmXBMaxTicks = 10 + res@xyDashPatterns = 1 + res@xyLineThicknessF = 1 + + aaa = fspan(xval_start_point, xval_end_point, nDatasets) + ; array of x coordinates for line of best fit + rc = regline(heat_flux_iii, carbon_flux_iii) + ; calculates the slope and y-intercept for line of best fit + linereg = rc@yintercept + rc * aaa + ; calculates the y coordinates of line of best fit + data1(0, :) = heat_flux_iii + data1(1, :) = carbon_flux_iii + data1!0 = "i" + data1&i = (/1, 2/) + + plot = gsn_csm_xy(wks, aaa, linereg, res) ; create plot + + mres = True + mres@gsMarkerSizeF = 0.0125 ; choose marker size + + do i = 0, (nDatasets - 1) + mres@gsMarkerColor = colors(i) ; set marker colors + mres@gsMarkerIndex = markers(i) ; choose marker types + id = unique_string("mark") ; create unique id + ; add marker to plot + plot@$id$ = gsn_add_polymarker(wks, plot, heat_flux_iii(i), \ + carbon_flux_iii(i), mres) + end do + + draw(plot) ; draw the plot + + legend = create "Legend" legendClass wks + "vpXF" : 0.675 ; orientation on page + "vpYF" : 0.925 + "vpWidthF" : 0.31 ; width + "vpHeightF" : 0.725 ; height + "lgPerimOn" : False ; no perimeter + "lgItemCount" : dimsizes(annots) ; how many + "lgLabelStrings" : annots ; labels + "lgLabelsOn" : True ; no default labels + "lgLabelFontHeightF" : 0.001 ; font height + "lgItemType" : "markers" ; line patterns + "lgMarkerColors" : colors + "lgMarkerIndexes" : markers ; one label color + end create + draw(legend) + + frame(wks) + + do idd = 0, dimsizes(lat_width) - 1 + nc_filename = config_user_info@work_dir + "russell18jgr_fig9c_" \ + + annots(idd) + "_" + (start_years_data(idd)) + "-" \ + + (end_years_data(idd)) + ".nc" + outvar = data1(:, idd) + outvar@var = "heat-flux_carbon-flux" + outvar@diag_script = "russell18jgr_fig9c.ncl" + outvar@model_name = annots(idd) + outvar@regline_y_coord = linereg + outvar@regline_x_coord = aaa + + outvar@description = "Total heat and carbon flux of southern westerly " \ + + "band for dataset :" + annots(idd) + " for years " \ + + (start_years_data(idd)) + "-" + (end_years_data(idd)) \ + + " are in 0th and 1st dimension respectively. Line of " \ + + "best fit's coordinates are added as attributes regline_coords" + ncdf_outfile = ncdf_write(outvar, nc_filename) + + log_provenance(ncdf_outfile, \ + plotpath + "." + output_type(), \ + "Russell et al 2018 figure 9c", \ + "mean", \ + "sh", \ + "scatter", \ + "russell_joellen", \ + "russell18jgr", \ + (/fgco2_inputfile_paths(idd), hfds_inputfile_paths(idd)/)) + + end do +end diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-polar.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-polar.ncl new file mode 100644 index 0000000000..106f161801 --- /dev/null +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-polar.ncl @@ -0,0 +1,250 @@ +; ############################################################################ +; russell_polcon_landmasked.ncl (polor contour plot with land values masked) +; +; Russell, J.L.,et al., 2018, J. Geophysical Research – Oceans, 123, 3120-3143 +; https://doi.org/10.1002/2017JC013461 (figures 1, 7a, & 8a) +; +; Author: Russell Joellen (University of Arizona, USA) +; Goodman Paul (University of Arizona, USA) +; Pandde Amarjiit (University of Arizona, USA) +; +; adapted from sea ice polcon made by Daniel Senftleben (DLR, Germany) +; +; ########################################################################### +; Description +; - Uses original grid (no regridding). +; - Panels multiple plots: starts a new page after max_vert*max_hori. +; - Uses the preprocessor land masked values of tauu, ph and fgco2. +; - The script takes a time average of the masked data. +; - Plots a polar contour plot. +; - Adapted from the SouthernOcean_polcon.ncl & SeaIce Polar contor plot. +; +; Required diag_script_info attributes (configured to recreate figure 1, 7 & 8) +; (Does not need changes) +; +; - max_lat : plot ranges for SH. +; - labelBar_end_type: determines the type of label bars. +; +; Optional diag_script_info attributes (configured to recreate figure 1, 7 & 8) +; (Does not need changes) +; +; - max_vert, max_hori: allowed dimensions of a panel page. +; (number of plots per column/row) +; - grid_min, grid_max, grid_step: min, max and step size for color scale. +; - colormap: NCL colour map name. +; - colors: array of rgb value as colormap. +; +; Required Preprocessor attributes ((configured to recreate figure 1, 7 & 8)) +; preprocessor_time_land: (for tauu) +; climate_statistics: +; operator: mean +; period: full +; mask_landsea: +; mask_out: "land" +; +; Caveats +; - fgco2 of CNRM-CM5 model doesnt work with ncdf_write function as it has +; x,y coordinate variable instead of lat-lon or i-j or rlat-rlon +; +; +; Modification history +; +; 20190610 - russell_joellen, pandde_amarjiit - written and +; implemented for ESMValTool v2 +; +; ######################################################################### + +load "$diag_scripts/../interface_scripts/interface.ncl" ; load metadata +load "$diag_scripts/shared/plot/style.ncl" ; load plot style functions +load "$diag_scripts/shared/plot/contour_maps.ncl" ; load plot function +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + var0 = variable_info[0]@short_name + var_atts = variable_info[0] + ; info_items = select_metadata_by_name(input_file_info, var0) + var_name = metadata_att_as_array(input_file_info, "short_name") + + inputfile_paths = metadata_att_as_array(input_file_info, "filename") + start_years_data = metadata_att_as_array(input_file_info, "start_year") + end_years_data = metadata_att_as_array(input_file_info, "end_year") + nDatasets = ListCount(input_file_info) + +end + +begin + + plotpath = config_user_info@plot_dir + "Russell_polar-contour_" + var0 + \ + "_" + sprinti("%0.4i", min(toint(start_years_data))) + "-" \ + + sprinti("%0.4i", max(toint(end_years_data))) + + system("mkdir -p " + config_user_info@work_dir) + system("mkdir -p " + config_user_info@plot_dir) + + wks = gsn_open_wks(output_type(), plotpath) + wks@fullname = plotpath + plots = new(nDatasets, graphic) + + ; Set annotations + annots = project_style(input_file_info, diag_script_info, "annots") + ; Set contour parameters + grid_min = 0. ; default + grid_max = 1. ; default + grid_step = 0.1 ; default + if (isatt(diag_script_info, "grid_min")) then + grid_min = tofloat(diag_script_info@grid_min) + end if + if (isatt(diag_script_info, "grid_max")) then + grid_max = tofloat(diag_script_info@grid_max) + end if + if (isatt(diag_script_info, "grid_step")) then + grid_step = tofloat(diag_script_info@grid_step) + end if + nsteps = round((grid_max - grid_min) / grid_step, 3) + 1 + cnLevels = fspan(grid_min, grid_max, nsteps) + ; panel parameters + nvert = 1 ; default + nhori = 1 ; default + if (isatt(diag_script_info, "max_vert")) then + nvert = toint(diag_script_info@max_vert) + ; Maximum allowed number of plots per page (vertical) + end if + + if (isatt(diag_script_info, "max_hori")) then + nhori = toint(diag_script_info@max_hori) + ; Maximum allowed number of plots per page (horizontal) + end if + + ; Define color table + if (isatt(diag_script_info, "colors")) then + colors = diag_script_info@colors + colors = colors / 256.0 + else + if (isatt(diag_script_info, "colormap")) then + colormap = diag_script_info@colormap + gsn_define_colormap(wks, colormap) + ColorStart = 2 + ColorEnd = -1 + if (colormap .eq. "BlWhRe") then + ColorEnd = 95 + ColorStart = 7 + end if + else + colormap = "nrl_sirkes_nowhite" + gsn_define_colormap(wks, colormap) + ColorStart = 10 + ColorEnd = 240 + end if + end if + + if (diag_script_info@labelBar_end_type .eq. "ExcludeOuterBoxes") then + type_of_outer_boxes = "ExcludeOuterBoxes" + outerBoxes_present = False + else + outerBoxes_present = True + type_of_outer_boxes = "TriangleBothEnds" + end if + + strUnits = " (" + var_atts@units + ")" ; to show units in title + if (isatt(diag_script_info, "new_units")) then + strUnits = " (" + diag_script_info@new_units + ") " + end if + + res = True + res@lbLabelBarOn = True ; individual bars + res@lbOrientation = "Vertical" + + if (outerBoxes_present) then + res@lbBoxEndCapStyle = type_of_outer_boxes + else + res@cnLabelBarEndStyle = type_of_outer_boxes + end if + + if (isatt(diag_script_info, "grid_color")) then + res@mpGridLineColor = diag_script_info@grid_color + else + res@mpGridLineColor = "green" + end if + + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = cnLevels + res@cnFillOn = True ; turn on color fill + + if (isatt(diag_script_info, "colors")) then + res@cnFillPalette = colors + else + res@gsnSpreadColors = True ; use full colormap + res@gsnSpreadColorStart = ColorStart ; color to start + res@gsnSpreadColorEnd = ColorEnd ; color to end + end if + + res@mpGridLineDashPattern = 0 + res@cnLinesOn = True ; turn on color lines + res@cnLineLabelsOn = True ; turn on contour line labels + res@gsnAddCyclic = True ; to make the plot complete + res@mpFillOn = True + res@mpLandFillColor = (/128, 128, 128/) / 256.0 + res@gsnDraw = False + res@gsnFrame = False + res@gsnPolar = "SH" + res@mpMaxLatF = diag_script_info@max_lat ; Default is 0 + res@gsnContourZeroLineThicknessF = 2. + res@gsnContourNegLineDashPattern = 2 + res@gsnRightStringOrthogonalPosF = 0.11 + res@mpFillDrawOrder = "PostDraw" ; to cover antartica with grey + + do iii = 0, nDatasets - 1 + dataset = read_data(input_file_info[iii]) ; reading data + dataset@var = var_name(iii) + dataset@diag_script = DIAG_SCRIPT + if (isatt(diag_script_info, "unitCorrectionalFactor")) then + ; to convert the units in datasets + correction_factor = diag_script_info@unitCorrectionalFactor + dataset = dataset * correction_factor + delete(correction_factor) + end if + + if(.not.(iscoord(dataset, "lat"))) then + + infile_path = inputfile_paths(iii) + infile_iii = addfile(infile_path, "r") + area_lon = infile_iii->lon + area_lat = infile_iii->lat + dataset@lat2d = area_lat + dataset@lon2d = area_lon + delete(area_lat) + delete(area_lon) + end if + + range = start_years_data(iii) + " - " + end_years_data(iii) + res@tiMainString = annots(iii) + res@gsnRightString = "annual mean " + range + res@gsnLeftString = var_name(iii) + strUnits + + plots(iii) = gsn_csm_contour_map_polar(wks, dataset, res) + nc_filename = config_user_info@work_dir + "russell18jgr_polar_" \ + + var_name(iii) + "_" + annots(iii) + "_" + (start_years_data(iii)) \ + + "-" + (end_years_data(iii)) + ".nc" + + ncdf_outfile = ncdf_write(dataset, nc_filename) + delete(dataset) + end do + + ; Draw the panel + pres = True + pres@gsnPanelLabelBar = False + outfile = panelling(wks, plots, nvert, nhori, pres) + + log_provenance(ncdf_outfile, \ + plotpath + "." + output_type(), \ + "Russell et al 2018 polar plot " + var0, \ + "mean", \ + "sh", \ + "geo", \ + "russell_joellen", \ + "russell18jgr", \ + inputfile_paths) +end diff --git a/esmvaltool/diag_scripts/sea_surface_salinity/compare_salinity.py b/esmvaltool/diag_scripts/sea_surface_salinity/compare_salinity.py new file mode 100644 index 0000000000..ec13a92b9d --- /dev/null +++ b/esmvaltool/diag_scripts/sea_surface_salinity/compare_salinity.py @@ -0,0 +1,311 @@ +import logging +import os +import string +from datetime import datetime +from functools import reduce + +import cf_units +import iris +import iris.quickplot as qplot +import matplotlib.pyplot as plt +import numpy as np +from esmvalcore.iris_helpers import date2num +from esmvalcore.preprocessor import climate_statistics, regrid_time +from iris.coord_categorisation import add_month_number, add_year +from matplotlib.legend import Legend +from matplotlib.legend_handler import HandlerBase +from matplotlib.text import Text + +import esmvaltool.diag_scripts.shared +from esmvaltool.diag_scripts.shared import group_metadata, names +from esmvaltool.diag_scripts.shared._base import ProvenanceLogger + +logger = logging.getLogger(__name__) + + +class CompareSalinity(object): + def __init__(self, config): + self.cfg = config + self.ticks = { + 'mean': [-0.5, 0.0, 0.5], + 'std_dev': [0.25, 0.5, 1, 2, 4] + } + self.lim = {'mean': [-1, 1], 'std_dev': [0.01, 10]} + self.operation = {'mean': 'bias', 'std_dev': 'std_ratio'} + + def compute(self): + data = group_metadata(self.cfg[names.INPUT_DATA].values(), + names.SHORT_NAME) + for short_name in data: + logger.info("Processing variable %s", short_name) + variables = group_metadata(data[short_name], names.ALIAS) + ref_alias = list(variables.values())[0][0]['reference_dataset'] + reference_dataset = variables.pop(ref_alias)[0] + reference = iris.load_cube(reference_dataset[names.FILENAME]) + reference_ancestor = reference_dataset[names.FILENAME] + logger.debug("Info reference dataset:") + logger.debug(reference) + for alias, dataset_info in variables.items(): + logger.info("Plotting dataset %s", alias) + dataset_info = dataset_info[0] + dataset = iris.load_cube(dataset_info[names.FILENAME]) + time_coord = dataset.coord('time') + if time_coord.units.calendar == 'proleptic_gregorian': + time_coord.units = cf_units.Unit( + time_coord.units.name, + calendar='gregorian', + ) + self._unify_time_coordinates([reference, dataset]) + logger.debug("Info dataset %s:", alias) + logger.debug(dataset) + ancestors = (dataset_info[names.FILENAME], reference_ancestor) + for region_slice in dataset.slices_over('shape_id'): + region = region_slice.coord('shape_id').points[0] + self.create_timeseries_plot(region, region_slice, + reference, ref_alias, + dataset_info, ancestors) + self.create_radar_plot(dataset_info, dataset, reference, + ref_alias, ancestors) + + def create_timeseries_plot(self, region, data, reference, reference_alias, + dataset_info, ancestors): + alias = dataset_info[names.ALIAS] + qplot.plot(data, label=alias) + qplot.plot(reference.extract(iris.Constraint(shape_id=region)), + label=reference_alias) + plt.legend() + plt.title(f"{dataset_info[names.LONG_NAME]} ({region})") + plt.tight_layout() + plt.savefig(f'test_timeseries_{region}.png') + plot_path = os.path.join( + self.cfg[names.PLOT_DIR], + f"{dataset_info[names.SHORT_NAME]}_{region.replace(' ', '')}" + f"_{alias}.{self.cfg[names.OUTPUT_FILE_TYPE]}") + plt.savefig(plot_path) + plt.close() + caption = (f"{dataset_info[names.SHORT_NAME]} mean in {region} for " + f"{alias} and {reference_alias}") + self._create_prov_record(plot_path, caption, ancestors) + + def create_radar_plot(self, data_info, data, reference, reference_alias, + ancestors): + interval = self._get_overlap([data, reference]) + indices = self._slice_cube(data, interval[0], interval[1]) + data = data[indices[0]:indices[1] + 1] + indices = self._slice_cube(reference, interval[0], interval[1]) + reference = reference[indices[0]:indices[1] + 1] + + add_month_number(data, 'time') + add_year(data, 'time') + + add_month_number(reference, 'time') + add_year(reference, 'time') + + data_alias = data_info[names.ALIAS] + for operator in ['mean', 'std_dev']: + climat_ref = climate_statistics(reference, operator) + climat_data = climate_statistics(data, operator) + if operator == 'mean': + result_data = climat_ref.data - climat_data.data + else: + result_data = climat_ref.data / climat_data.data + + result = climat_ref.copy(result_data) + angles = np.linspace(0, 2 * np.pi, result.shape[0] + 1) + # Initialise the spider plot + ax = plt.subplot(111, polar=True) + for spine in ax.spines.values(): + spine.set_color('grey') + + # Draw one axe per variable + add labels labels yet + letters = [ + string.ascii_uppercase[i] for i in range(0, result.shape[0]) + ] + plt.xticks(angles[:-1], + letters, + color='grey', + size=8, + rotation=45) + + # Draw ylabels + ax.set_rlabel_position(0) + plt.yticks(self.ticks[operator], + list(map(str, self.ticks[operator])), + color="grey", + size=7) + plt.ylim(min(self.lim[operator]), max(self.lim[operator])) + + radar_data = np.append(result.data, result.data[0]) + more_angles = np.linspace(0, 2 * np.pi, result.shape[0] * 20 + 1) + interp_data = np.interp(more_angles, angles, radar_data) + + # Plot data + ax.plot(more_angles, interp_data, linewidth=1, linestyle='solid') + ax.fill(more_angles, interp_data, 'b', alpha=0.1) + ax.legend(letters, + result.coord('shape_id').points, + loc='upper center', + ncol=2, + frameon=False, + bbox_to_anchor=(0.5, -0.1), + borderaxespad=0.) + if operator == 'std_dev': + ax.set_yscale('symlog', linthresh=0.1) + operation = self.operation[operator] + plt.title( + f'{data_info[names.SHORT_NAME]} {operation}\n' + f'{data_alias} vs {reference_alias}', + pad=20) + plt.tight_layout() + plot_path = os.path.join( + self.cfg[names.PLOT_DIR], + f"{data_info[names.SHORT_NAME]}_{operation}" + f"_comparison_{data_alias}_" + f"{reference_alias}.{self.cfg[names.OUTPUT_FILE_TYPE]}") + plt.savefig(plot_path) + plt.close() + caption = ( + f"Absolute {operation} comparison in different regions for " + f"{data_alias} and {reference_alias}") + self._create_prov_record(plot_path, caption, ancestors) + + def _create_prov_record(self, filepath, caption, ancestors): + record = { + 'caption': caption, + 'domains': [ + 'global', + ], + 'autors': ['vegas-regidor_javier'], + 'references': ['acknow_author'], + 'ancestors': ancestors + } + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(filepath, record) + + def _get_time_offset(self, time_unit): + """Return a datetime object equivalent to tunit.""" + # tunit e.g. 'day since 1950-01-01 00:00:00.0000000 UTC' + cfunit = cf_units.Unit(time_unit, calendar=cf_units.CALENDAR_STANDARD) + time_offset = cfunit.num2date(0) + return time_offset + + def _align_yearly_axes(self, cube): + """Align years. + + Perform a time-regridding operation to align time axes for yr + data. + """ + years = [cell.point.year for cell in cube.coord('time').cells()] + # be extra sure that the first point is not in the previous year + if 0 not in np.diff(years): + return regrid_time(cube, 'yr') + return cube + + def _datetime_to_int_days(self, cube): + """Return list of int(days) converted from cube datetime cells.""" + cube = self._align_yearly_axes(cube) + time_cells = [cell.point for cell in cube.coord('time').cells()] + + # extract date info + real_dates = [] + for date_obj in time_cells: + # real_date resets the actual data point day + # to the 1st of the month so that there are no + # wrong overlap indices + real_date = datetime(date_obj.year, date_obj.month, 1, 0, 0, 0) + real_dates.append(real_date) + + # get the number of days starting from the reference unit + time_unit = cube.coord('time').units.name + time_offset = self._get_time_offset(time_unit) + days = [(date_obj - time_offset).days for date_obj in real_dates] + + return days + + def _get_overlap(self, cubes): + """Get discrete time overlaps. + + This method gets the bounds of coord time from the cube and + assembles a continuous time axis with smallest unit 1; then it + finds the overlaps by doing a 1-dim intersect; takes the floor + of first date and ceil of last date. + """ + all_times = [] + for cube in cubes: + span = self._datetime_to_int_days(cube) + start, stop = span[0], span[-1] + all_times.append([start, stop]) + bounds = [range(b[0], b[-1] + 1) for b in all_times] + time_pts = reduce(np.intersect1d, bounds) + if len(time_pts) > 1: + time_bounds_list = [time_pts[0], time_pts[-1]] + return time_bounds_list + + def _slice_cube(self, cube, t_1, t_2): + """Efficient slicer. + + Simple cube data slicer on indices of common time-data elements. + """ + time_pts = [t for t in cube.coord('time').points] + converted_t = self._datetime_to_int_days(cube) + idxs = sorted([ + time_pts.index(ii) for ii, jj in zip(time_pts, converted_t) + if t_1 <= jj <= t_2 + ]) + return [idxs[0], idxs[-1]] + + @staticmethod + def _get_consistent_time_unit(cubes): + """Fix time units. + + Return cubes' time unit if consistent, standard calendar + otherwise. + """ + t_units = [cube.coord('time').units for cube in cubes] + if len(set(t_units)) == 1: + return t_units[0] + return cf_units.Unit("days since 1850-01-01", calendar="standard") + + def _unify_time_coordinates(self, cubes): + """Make sure all cubes' share the same time coordinate.""" + t_unit = self._get_consistent_time_unit(cubes) + for cube in cubes: + # Extract date info from cube + coord = cube.coord('time') + years = [p.year for p in coord.units.num2date(coord.points)] + months = [p.month for p in coord.units.num2date(coord.points)] + dates = [ + datetime(year, month, 15, 0, 0, 0) + for year, month in zip(years, months) + ] + + # Update the cubes' time coordinate + cube.coord('time').points = date2num(dates, t_unit, coord.dtype) + cube.coord('time').units = t_unit + cube.coord('time').bounds = None + cube.coord('time').guess_bounds() + + +class TextHandler(HandlerBase): + def create_artists(self, legend, text, xdescent, ydescent, width, height, + fontsize, trans): + tx = Text(width / 2., + height / 2, + text, + fontsize=fontsize, + ha="center", + va="center", + fontweight="bold") + return [tx] + + +Legend.update_default_handler_map({str: TextHandler()}) + + +def main(): + with esmvaltool.diag_scripts.shared.run_diagnostic() as config: + CompareSalinity(config).compute() + + +if __name__ == "__main__": + main() diff --git a/esmvaltool/diag_scripts/seaborn_diag.py b/esmvaltool/diag_scripts/seaborn_diag.py new file mode 100644 index 0000000000..aa6ca1b504 --- /dev/null +++ b/esmvaltool/diag_scripts/seaborn_diag.py @@ -0,0 +1,502 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Create arbitrary Seaborn plots. + +Description +----------- +This diagnostic provides a high-level interface to Seaborn. For this, the input +data is arranged into a single :class:`pandas.DataFrame`, which is then used as +input for the Seaborn function defined by the option `seaborn_func`. + +Caveats +------- +All datasets of a given variable must have the same units (e.g., it is not +allowed to use datasets with units `K` and datasets with units `°C` for the +variable `tas`). + +Author +------ +Manuel Schlund (DLR, Germany) + +Configuration options in recipe +------------------------------- +add_ancillary_variables: bool, optional (default: False) + Add :meth:`~iris.cube.Cube.ancillary_variables` to the main data frame. + Note that this will assume that ancillary variables are identical across + cubes within a group (see option `groupby_facet`). This equality is not + checked! +add_aux_coords: bool, optional (default: False) + Add :attr:`~iris.cube.Cube.aux_coords` to the main data frame. Note that + this will assume that auxiliary coordinates are identical across cubes + within a group (see option `groupby_facet`). This equality is not checked! +add_cell_measures: bool, optional (default: False) + Add :meth:`~iris.cube.Cube.cell_measures` to the main data frame. Note that + this will assume that cell measures are identical across cubes within a + group (see option `groupby_facet`). This equality is not checked! +data_frame_ops: dict, optional + Perform additional operations on the main data frame. Allowed operations + are :meth:`pandas.DataFrame.query` (dict key `query`) and + :meth:`pandas.DataFrame.eval` (dict key `eval`). Operations are defined by + strings (dict values). Examples: ``{'query': 'latitude > 80', 'eval': + 'longitude = longitude - 180.0'}``. +dropna_kwargs: dict, optional + Optional keyword arguments for :meth:`pandas.DataFrame.dropna` to drop + missing values in the input data. If not given, do not drop NaNs. Note: + NaNs are dropped after potential `data_frame_ops`. +facets_as_columns: list of str, optional + Facets that will be added as a columns to the main data frame. Values for + these facets must be identical across all datasets within a group (see + option `groupby_facet`). +groupby_facet: str, optional (default: 'alias') + Facet which is used to group input datasets when creating the main data + frame. All datasets within a group are expected to have the same index + after calling :func:`iris.pandas.as_data_frame` on them. These datasets + within a group will then get merged (combined along axis 1, i.e., columns) + into a single data frame per group. Finally, the data frames for all groups + are concatenated (combined along axis 0, i.e., rows) into one main data + frame. `groupby_facet` is also added as a column to this main data frame. +legend_title: str, optional (default: None) + Title for legend. If ``None``, Seaborn will determine the legend title (if + possible). +plot_object_methods: dict, optional + Execute methods of the object returned by the plotting function + (`seaborn_func`). This object will either be a + :class:`matplotlib.axes.Axes` (e.g., :func:`~seaborn.scatterplot`, + :func:`~seaborn.lineplot`), a :class:`seaborn.FacetGrid` (e.g., + :func:`~seaborn.relplot`, :func:`~seaborn.displot`), a + :class:`seaborn.JointGrid` (e.g., :func:`~seaborn.jointplot`), or a + :class:`seaborn.PairGrid` (e.g., :func:`~seaborn.pairplot`). Dictionary + keys are method names, dictionary values function arguments (use a + :obj:`dict` to specify keyword arguments). Example (for + :func:`~seaborn.relplot`): ``{'set': {'xlabel': 'X [km]'}, 'set_titles': + 'Model {col_name}'}``. +reset_index: bool, optional (default: False) + Put coordinate information of datasets into columns instead of (multi-) + indices. This avoids the deletion of coordinate information if different + groups of datasets have different dimensions but increases the memory + footprint of this diagnostic. +savefig_kwargs: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.savefig`. By + default, uses ``bbox_inches: tight, dpi: 300, orientation: landscape``. +seaborn_func: str + Function used to plot the data. Must be a function of Seaborn. An overview + of Seaborn's plotting functions is given `here + `__. +seaborn_kwargs: dict, optional + Optional keyword arguments for the plotting function given by + `seaborn_func`. Must not include an argument called `data`. Example: + ``{'x': 'variable_1', 'y': 'variable_2', 'hue': 'coord_1'}``. Note: + variables (here: `variable_1` and `variable_2` are identified by their + `variable_group` in the recipe, i.e., the keys that specify variable groups + in `variables`. +seaborn_settings: dict, optional + Options for :func:`seaborn.set_theme` (affects all plots). +suptitle: str or None, optional (default: None) + Suptitle for the plot (see :func:`matplotlib.pyplot.suptitle`). If + ``None``, do not create a suptitle. If the plot shows only a single panel, + use `plot_object_methods` with ``{'set': {'title': 'TITLE'}}`` instead. + +""" +from __future__ import annotations + +import logging +from copy import deepcopy +from pathlib import Path +from pprint import pformat + +import iris +import iris.pandas +import matplotlib.pyplot as plt +import pandas as pd +import seaborn as sns +from matplotlib.colors import LogNorm, Normalize + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_plot_filename, + group_metadata, + run_diagnostic, +) + +logger = logging.getLogger(Path(__file__).stem) + +# Use the new behavior of :func:`iris.pandas.as_data_frame` +iris.FUTURE.pandas_ndim = True + +# Save units of different variables +# Note: units must be unique across datasets of the same variable +UNITS: dict[str, str] = {} + + +def _create_plot( + plot_func: callable, + data_frame: pd.DataFrame, + cfg: dict, +) -> None: + """Create plot.""" + logger.debug( + "Using main data frame as input for plotting:\n%s", data_frame + ) + + # Plot + plot_kwargs = cfg['seaborn_kwargs'] + plot_func_str = cfg['seaborn_func'] + if 'data' in plot_kwargs: + raise ValueError("'data' is an invalid argument for 'seaborn_kwargs'") + logger.info( + "Creating plot with\nseaborn.%s(\n data=main_data_frame,\n%s\n)", + plot_func_str, + _get_str_from_kwargs(plot_kwargs), + ) + plot_obj = plot_func(data=data_frame, **plot_kwargs) + + # Adjust plot appearance + if cfg['plot_object_methods']: + for (func_name, func_args) in cfg['plot_object_methods'].items(): + if isinstance(func_args, dict): + logger.debug( + "Running\n%s.%s(\n%s\n)", + type(plot_obj).__name__, + func_name, + _get_str_from_kwargs(func_args), + ) + getattr(plot_obj, func_name)(**func_args) + else: + logger.debug( + "Running %s.%s(%r)", + type(plot_obj).__name__, + func_name, + func_args, + ) + getattr(plot_obj, func_name)(func_args) + if cfg['suptitle'] is not None: + logger.debug("Setting `suptitle='%s'`", cfg['suptitle']) + plt.suptitle(cfg['suptitle'], y=1.05) + if cfg['legend_title'] is not None: + _set_legend_title(plot_obj, cfg['legend_title']) + + # Save plot + plot_path = get_plot_filename(f"seaborn_{plot_func_str}", cfg) + plt.savefig(plot_path, **cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Provenance tracking + caption = f"Seaborn {cfg['seaborn_func']} for one or more dataset(s)" + ancestors = [d['filename'] for d in cfg['input_data'].values()] + provenance_record = { + 'ancestors': ancestors, + 'authors': ['schlund_manuel'], + 'caption': caption, + } + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + + +def _get_grouped_data(cfg: dict) -> dict: + """Get grouped input data.""" + groupby_facet = cfg['groupby_facet'] + input_data = list(cfg['input_data'].values()) + + # Check if necessary facets are present + for dataset in input_data: + if groupby_facet not in dataset: + raise ValueError( + f"groupby_facet '{groupby_facet}' is not available for " + f"dataset {dataset['filename']}" + ) + for facet in cfg['facets_as_columns']: + if facet not in dataset: + raise ValueError( + f"Facet '{facet}' used for option 'facets_as_columns' is " + f"not available for dataset {dataset['filename']}" + ) + + # Group data accordingly + grouped_data = group_metadata( + input_data, + groupby_facet, + sort='filename', + ) + + return grouped_data + + +def _get_dataframe(cfg: dict) -> pd.DataFrame: + """Get main :class:`pandas.DataFrame` used as input for plotting. + + Note + ---- + Data is stored in long form, see also :func:`iris.pandas.as_data_frame`. + + """ + logger.info( + "Grouping datasets by '%s' to create main data frame (data frames " + "are merged [combined along axis 1, i.e., columns] within groups, " + "then concatenated [combined along axis 0, i.e., rows] across groups)", + cfg['groupby_facet'], + ) + if cfg['add_aux_coords']: + logger.info("Adding aux_coords as columns") + if cfg['add_cell_measures']: + logger.info("Adding cell_measures as columns") + if cfg['add_ancillary_variables']: + logger.info("Adding ancillary_variables as columns") + if cfg['facets_as_columns']: + logger.info("Adding facets as columns: %s", cfg['facets_as_columns']) + + grouped_data = _get_grouped_data(cfg) + + # Merge data frames within groups + df_dict = {} + for (group, datasets) in grouped_data.items(): + logger.info("Processing group '%s'", group) + df_group = _get_df_for_group(cfg, group, datasets) + df_dict[group] = df_group + + # Concatenate data frames across groups and use dtype 'category' for facet + # columns to reduce memory usage and decrease computation times + groupby_facet = cfg['groupby_facet'] + df_main = pd.concat(df_dict.values(), ignore_index=cfg['reset_index']) + df_main = df_main.astype({ + f: 'category' for f in cfg['facets_as_columns'] + [groupby_facet] + }) + + logger.info("Successfully retrieved main data frame from input data") + logger.debug("Got main data frame:\n%s", df_main) + return df_main + + +def _get_df_for_group( + cfg: dict, + group: str, + datasets: list[dict], +) -> pd.DataFrame: + """Extract :class:`pandas.DataFrame` for a single group of datasets. + + This merges (i.e., combines along axis 1 = columns) all data frames of + individual datasets of a group. + + """ + df_group = pd.DataFrame() + facets_as_columns: dict[str, str] = {} + for dataset in datasets: + filename = dataset['filename'] + logger.info("Reading %s", filename) + cube = iris.load_cube(filename) + + # Update units + variable_group = dataset['variable_group'] + units = dataset['units'] + if variable_group in UNITS and UNITS[variable_group] != units: + raise ValueError( + f"Got duplicate units for variable '{variable_group}': " + f"'{units}' and '{UNITS[variable_group]}'" + ) + UNITS.setdefault(variable_group, units) + + # Get data frame for individual dataset with proper name + df_dataset = iris.pandas.as_data_frame( + cube, + add_aux_coords=cfg['add_aux_coords'], + add_cell_measures=cfg['add_cell_measures'], + add_ancillary_variables=cfg['add_ancillary_variables'], + ) + df_dataset = df_dataset.rename( + {cube.name(): variable_group}, axis='columns' + ) + + # Merge + if df_group.empty: + df_group = df_dataset + facets_as_columns = { + f: dataset[f] for f in cfg['facets_as_columns'] + } + else: + # Make sure that dimensional coordinates match across cubes within + # a group + if not df_group.index.equals(df_dataset.index): + raise ValueError( + f"Dimensions of cube {filename} differ from other cubes " + f"of group '{group}'. Cubes of that group:\n" + f"{pformat([d['filename'] for d in datasets])}" + ) + + # Make sure that facet values used as columns match across datasets + # within a cube + for (facet, val) in facets_as_columns.items(): + if dataset[facet] != val: + raise ValueError( + f"Facet value for facet '{facet}' (used by option " + f"'facets_as_columns') of dataset {filename} differs " + f"from value of other datasets of group '{group}': " + f"expected '{val}', got '{dataset[facet]}'. Datasets " + f"of that group:\n" + f"{pformat([d['filename'] for d in datasets])}" + ) + df_group = pd.merge( + df_group, + df_dataset, + left_index=True, + right_index=True, + sort=False, + suffixes=[None, '_DUPLICATE'], + ) + + # Assume that aux_coords, cell_measures, and ancillary_variables + # (if requested) are equal across cubes within the group. Only add + # them when they first appear. + df_group = df_group.filter(regex='^(?!.*_DUPLICATE)') + + # Move dimensional coordinates from (multi-) index into columns if + # requested + if cfg['reset_index']: + df_group = df_group.reset_index() + + # Add additional information as column and save the data frame + for (facet, val) in facets_as_columns.items(): + df_group[facet] = val + if cfg['groupby_facet'] not in df_group.columns: + df_group[cfg['groupby_facet']] = group + + return df_group + + +def _get_default_cfg(cfg: dict) -> dict: + """Get default options for configuration dictionary.""" + cfg = deepcopy(cfg) + + cfg.setdefault('add_ancillary_variables', False) + cfg.setdefault('add_aux_coords', False) + cfg.setdefault('add_cell_measures', False) + cfg.setdefault('data_frame_ops', {}) + cfg.setdefault('dropna_kwargs', {}) + cfg.setdefault('facets_as_columns', []) + cfg.setdefault('groupby_facet', 'alias') + cfg.setdefault('legend_title', None) + cfg.setdefault('plot_object_methods', {}) + cfg.setdefault('reset_index', False) + cfg.setdefault('savefig_kwargs', { + 'bbox_inches': 'tight', + 'dpi': 300, + 'orientation': 'landscape', + }) + cfg.setdefault('seaborn_kwargs', {}) + cfg.setdefault('seaborn_settings', {}) + cfg.setdefault('suptitle', None) + + return cfg + + +def _get_str_from_kwargs(kwargs, separator='\n', prefix=' '): + """Get overview string for kwargs.""" + return separator.join(f"{prefix}{k}={v!r}," for (k, v) in kwargs.items()) + + +def _get_plot_func(cfg: dict) -> callable: + """Get seaborn plot function.""" + if 'seaborn_func' not in cfg: + raise ValueError("Necessary option 'seaborn_func' missing") + if not hasattr(sns, cfg['seaborn_func']): + raise AttributeError( + f"Invalid seaborn_func '{cfg['seaborn_func']}' (must be a " + f"function of the module seaborn; an overview of seaborn plotting " + f"functions is given here: https://seaborn.pydata.org/tutorial/" + f"function_overview.html)" + ) + logger.info("Using plotting function seaborn.%s", cfg['seaborn_func']) + return getattr(sns, cfg['seaborn_func']) + + +def _modify_dataframe(data_frame: pd.DataFrame, cfg: dict) -> pd.DataFrame: + """Modify data frame according to the option ``data_frame_ops``.""" + allowed_funcs = ('query', 'eval') + + # data_frame_ops + for (func, expr) in cfg['data_frame_ops'].items(): + if func not in allowed_funcs: + raise ValueError( + f"Got invalid operation '{func}' for option 'data_frame_ops', " + f"expected one of {allowed_funcs}" + ) + op_str = f"'{func}' with argument '{expr}'" + logger.info("Modifying main data frame through operation %s", op_str) + data_frame = getattr(data_frame, func)(expr) + logger.debug( + "Main data frame after operation %s:\n%s", op_str, data_frame + ) + + # dropna_kwargs + if cfg['dropna_kwargs']: + logger.debug( + "Running\ndata_frame.dropna(\n%s\n)", + _get_str_from_kwargs(cfg['dropna_kwargs']), + ) + data_frame = data_frame.dropna(**cfg['dropna_kwargs']) + logger.debug("Main data frame after dropna \n%s", data_frame) + return data_frame + + +def _set_legend_title(plot_obj, legend_title: str) -> None: + """Set legend title.""" + if hasattr(plot_obj, 'get_legend'): # Axes + legend = plot_obj.get_legend() + elif hasattr(plot_obj, 'legend'): # FacetGrid, PairGrid + legend = plot_obj.legend + else: + raise ValueError( + f"Cannot set legend title, `{type(plot_obj).__name__}` does not " + f"support legends" + ) + if legend is None: + raise ValueError( + "Cannot set legend title, plot does not contain legend" + ) + logger.debug("Setting `legend_title='%s'`", legend_title) + legend.set_title(legend_title) + + +def _validate_config(cfg: dict) -> dict: + """Validate configuration dictionary.""" + cfg = deepcopy(cfg) + + # seaborn_kwargs: hue_norm + if 'hue_norm' in cfg['seaborn_kwargs']: + hue_norm = cfg['seaborn_kwargs']['hue_norm'] + if isinstance(hue_norm, str): + vmin = cfg['seaborn_kwargs'].pop('vmin', None) + vmax = cfg['seaborn_kwargs'].pop('vmax', None) + if hue_norm == 'linear': + hue_norm = Normalize(vmin=vmin, vmax=vmax) + elif hue_norm == 'log': + hue_norm = LogNorm(vmin=vmin, vmax=vmax) + else: + raise ValueError( + f"String value for `hue_norm` can only be `linear` or " + f"`log`, got `{hue_norm}`" + ) + cfg['seaborn_kwargs']['hue_norm'] = hue_norm + if isinstance(hue_norm, list): + cfg['seaborn_kwargs']['hue_norm'] = tuple(hue_norm) + + return cfg + + +def main(cfg: dict) -> None: + """Run diagnostic.""" + cfg = _get_default_cfg(cfg) + cfg = _validate_config(cfg) + + sns.set_theme(**cfg['seaborn_settings']) + plot_func = _get_plot_func(cfg) + + df_main = _get_dataframe(cfg) + df_main = _modify_dataframe(df_main, cfg) + + _create_plot(plot_func, df_main, cfg) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/seaice/seaice_aux.ncl b/esmvaltool/diag_scripts/seaice/seaice_aux.ncl new file mode 100644 index 0000000000..1887052428 --- /dev/null +++ b/esmvaltool/diag_scripts/seaice/seaice_aux.ncl @@ -0,0 +1,264 @@ +; ############################################################################# +; # EXTERNAL SEA ICE FUNCTIONS +; ############################################################################# +; Please consider using of extending existing routines before adding new ones. +; Check the header of each routine for documentation. +; +; Contents: +; function calc_yod +; function sea_ice_area +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/latlon.ncl" + +; ############################################################################# +undef("sea_ice_area") +function sea_ice_area(sic[*][*][*]:numeric, + area[*][*]: numeric, + arealat: numeric, + l_irreg[1]: logical, + region[1]:string, + start_year[1]:integer, + end_year[1]:integer, + month[1]:string, + operation[1]:string, + fill_pole_hole[1]:logical) + +; Arguments: +; sic : sea ice concentration variable +; area : area variable +; arealat : array with the latitude coordinate of the area +; l_irreg : logical for irregular grids +; region : "Arctic" or "Antarctic" +; start_year : first year averaged time period +; end_year : last year of averaged time period +; month : [1, 12] = select month (e.g., "3" = Mar, "9" = Sep) +; "A" = annual mean +; "all" = no average, take all input +; operation : "area" or "extent" +; fill_pole_hole : True = fill northern polar hole with sic = 1 +; +; Return value: +; 1-d array (time) containing sea ice area or sea ice extent +; +; Description: +; Calculates sea ice area or extent from sea ice concentration +; +; Caveats +; +; Modification history: +; 20180226-senftleben_daniel: written. +; +local funcname, scriptname, verbosity, l_irreg, aux_vars, area_temp, date, \ + month_ind, sic_month, check_month, smonths, month_string, latstep, max_lat, \ + lat1D, actual_lat, data1D, i1D, i2D, dim_i2D, npts, ring, SIA_tmp, SIA_tmp2 +begin + + funcname = "sea_ice_area" + scriptname = "diag_scripts/seaice/seaice_aux.ncl" + enter_msg(scriptname, funcname) + + ; Check region and units + if (region.ne."Arctic" .and. region.ne."Antarctic") then + error_msg("f", scriptname, funcname, \ + "region can be either 'Arctic' or 'Antarctic'") + end if + if (sic@units.eq."%") then + sic = sic / 100. + sic@units = "1" + end if + + ; Fill pole hole + if (fill_pole_hole .and. region.eq."Arctic") then + if (.not.l_irreg) then + lat1D = ndtooned(conform(sic(0, :, :), arealat, 0)) + else + lat1D = ndtooned(arealat) ; already conformal by definition + end if + latstep = 0.1 + max_lat = 90. + + do itime = 0, dimsizes(sic&time) - 1 ; hole depends on time + actual_lat = max_lat + sic1D = ndtooned(sic(itime, :, :)) + if (all(ismissing(sic1D))) then ; time step missing + continue + end if + do while (True) ; will break when hole is filled + idx1D = ind(lat1D.ge.actual_lat .and. lat1D.lt.actual_lat + latstep) + if (.not. all(ismissing(idx1D))) then + idx2D = ind_resolve(ind(lat1D.ge.actual_lat), dimsizes(sic(0, :, :))) + dim_idx2D = dimsizes(idx2D) + npts = dim_idx2D(0) + if (any(ismissing(sic1D(idx1D)))) then + do n = 0, npts - 1 + ring = sic(itime, idx2D(n, 0), idx2D(n, 1)) + sic(itime, idx2D(n, 0), idx2D(n, 1)) = \ + where(ismissing(ring), 1., ring) + delete(ring) + end do + else ; out of the hole! + delete([/idx1D, idx2D, dim_idx2D, npts/]) + break + end if + delete([/idx2D, dim_idx2D, npts/]) + end if + delete([/idx1D/]) + actual_lat = actual_lat - latstep + end do ; while + delete(sic1D) + end do ; itime + end if + + ; Calculate time coordinate + date = cd_calendar(sic&time, 0) ; Create calendar array + if (month.eq."A") then + sic_month = time_operations(sic, start_year, end_year, \ + "average", "yearly", True) + sic_month!0 = "time" + month_string = "Annual mean " + elseif (month.eq."all") then + sic_month = sic + month_string = "" + else ; "month" must be a number between 1 and 12 + check_month = tostring(ispan(1, 12, 1)) + if (.not.any(check_month.eq.month)) then + error_msg("f", scriptname, funcname, "month must be between 1 and 12") + end if + smonths = (/"January", "February", "March", "April", "May", \ + "June", "July", "August", "September", "October", \ + "November", "December"/) + month_string = smonths(ind(check_month.eq.month)) + " " + month_ind = ind(date(:, 1).eq.toint(month)) + sic_month = sic(month_ind, :, :) + end if + + ; Calculate sea ice area or extent + if (operation.eq."extent") then + sic_month = where(sic_month.ge.0.15, 1., 0.) ; make sic binary for SIE + end if + + if (l_irreg) then + if (region.eq."Arctic") then + area_tmp = where(arealat.gt.0., area * 1.e-12, 0.) + else + area_tmp = where(arealat.lt.0., area * 1.e-12, 0.) + end if + SIA_tmp = conform(sic_month, area_tmp, (/1, 2/)) * sic_month + copy_VarMeta(sic_month, SIA_tmp) + SIA_tmp2 = dim_sum_Wrap(SIA_tmp) + SIA = dim_sum_Wrap(SIA_tmp2) + else + area_tmp = area / 1.e12 ; Million square km + SIA_tmp = conform(sic_month, area_tmp, (/1, 2/)) * sic_month + copy_VarMeta(sic_month, SIA_tmp) + if (region.eq."Arctic") then + SIA = area_operations(SIA_tmp, 0, 90, 0, 360, "sum", False) + else + SIA = area_operations(SIA_tmp, -90, 0, 0, 360, "sum", False) + end if + end if + + SIA@units = "10~S~6~N~ km~S~2~N~" + if (operation.eq."extent") then + SIA@long_name = month_string + region + " Sea Ice Extent" + SIA@standard_name = "sea_ice_extent" + elseif (operation.eq."area") then + SIA@long_name = month_string + region + " Sea Ice Area" + SIA@standard_name = "sea_ice_area" + end if + + return(SIA) + +end + +; ############################################################################# +undef("calc_yod") +function calc_yod(SIE:float, + threshold[1]:float, + cons_years[1]:integer) +; +; Arguments: +; SIE: sea ice extent (one value per year), must be of type float +; threshold: threshold value of sea ice extent in million km^2 (usually 1.) +; cons_years: number of consecutive years in which SIE must be < threshold +; +; Return value: +; first year of cons_years years in which SIE < threshold, type of SIE +; +; Description: +; Calculates year of near disappearance of (Arctic) sea ice (YOD). YOD is +; defined as the first year of five consecutive years in which September +; Arctic sea ice extent is below 1 million square kilometres. Returns +; missing value if no YOD occurs in the given timeseries. +; This function allows the user to specify a different threshold and +; number of consecutive years. +; +; Caveats +; (September Arctic) sea ice extent as input expected, must be calculated +; from sic beforehand. Input must be of one-dimensional: 'time' or 'year'. +; If function returns a missing value, it is set to be of type float. Make +; sure that your script recognizes the output correctly as missing. +; +; Modification history: +; 20180827-senftleben_daniel: written. +; +local funcname, scriptname, verbosity, SIE_smoo, a, itime, iyod + +begin + + funcname = "calc_yod" + scriptname = "diag_scripts/seaice/seaice_aux.ncl" + enter_msg(scriptname, funcname) + + ; check input + if (SIE!0.ne."year") then + if (SIE!0.eq."time") then ; need 'year' + utc_date = cd_calendar(SIE&time, 0) + SIE!0 = "year" + SIE&year = utc_date(:, 0) + delete(utc_date) + else + error_msg("f", scriptname, "", \ + "Input array must have time coordinate" + \ + " 'time' or 'year'") + end if + end if + + ; calculate 5-year running mean of input data + SIE_smoo = runave(SIE, 5, 0) + copy_VarMeta(SIE, SIE_smoo) + copy_VarCoords(SIE, SIE_smoo) + + ; calculate YOD + a = 0 + do itime = 0, dimsizes(SIE_smoo&year) - 1 + if (ismissing(SIE_smoo(itime))) then + a = 0 + continue + end if + if (SIE_smoo(itime).lt.threshold) then + a = a + 1 + if (a.eq.cons_years) then + iyod = itime - cons_years + 1 + break + else + continue + end if + else + a = 0 + end if + end do + + ; return YOD + if (isdefined("iyod")) then + return_value = tofloat(SIE_smoo&year(iyod)) + else + return_value = SIE@_FillValue + end if + + return(return_value) +end diff --git a/esmvaltool/diag_scripts/seaice/seaice_ecs.ncl b/esmvaltool/diag_scripts/seaice/seaice_ecs.ncl new file mode 100644 index 0000000000..e7b84c4efd --- /dev/null +++ b/esmvaltool/diag_scripts/seaice/seaice_ecs.ncl @@ -0,0 +1,449 @@ +; ############################################################################# +; EMERGENT CONSTRAINTS FOR SEA ICE +; Author: Senftleben, Daniel (DLR, Germany) +; CRESCENDO project +; ############################################################################# +; Description +; - Produces a scatter plot following Massonnet et al., 2012: +; historical Sept sea ice extent (SSIE) trend vs hist SSIE long-term mean +; - Produces two scatter plots following IPCC AR5 Fig. 12.31 a), d): +; - historical SSIE mean vs 1st year of disappearance (YOD) RCP8.5 +; - historical SSIE trend vs YOD RCP8.5 +; +; Required diag_script_info attributes +; hist_exp: name of historical experiment (string) +; month: selected month (1, 2, ..., 12) or annual mean ("A") +; rcp_exp: name of RCP experiment (string) +; region: region to be analyzed ( "Arctic" or "Antarctic") +; +; Optional diag_script_info attributes +; fill_pole_hole: fill observational hole at North pole (default: False) +; styleset: color style (e.g. "CMIP5") +; +; Optional variable_info attributes (variable specific) +; reference_dataset: reference dataset +; +; Caveats +; - CMOR units expected: areacello in m2, sic in % +; - only 1 reference dataset supported +; +; Modification history +; 20211006-lauer_axel: removed write_plots +; 20191028-lauer_axel: rewritten for v2.0 +; 20161212-senftleben_daniel: implemented. +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" + +load "$diag_scripts/seaice/seaice_aux.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/scatterplot.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Get metadata items + att = True + att@mip = "OImon" + info = select_metadata_by_atts(input_file_info, att) ; variable + var0 = info[0]@short_name + datasets = metadata_att_as_array(info, "dataset") + ensembles = metadata_att_as_array(info, "ensemble") + tmpexps = metadata_att_as_array(info, "exp") + exps = where(.not.ismissing(tmpexps), tmpexps, "") + delete(tmpexps) + dim_MOD = dimsizes(datasets) + att@mip = "fx" + areainfo = select_metadata_by_atts(input_file_info, att) ; area + areadatasets = metadata_att_as_array(areainfo, "dataset") + delete(att) + + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"hist_exp", "rcp_exp", "month", \ + "region"/)) + + hist_exp = diag_script_info@hist_exp + rcp_exp = diag_script_info@rcp_exp + + ; Check for reference dataset + if (isatt(info[0], "reference_dataset")) then + refname = info[0]@reference_dataset + else + refname = "" + end if + + ref_ind = -1 ; set to invalid value + + ; if attribute is present, use it so correlations can be calculated + if (refname.ne."") then + ; set reference model + ref_ind = ind(datasets .eq. refname) + if (ismissing(ref_ind)) then + log_info("warning: reference dataset (" + refname + ") not found.") + ref_ind = -1 + end if + end if + + ; Check fill_pole_hole option + if (.not.isatt(diag_script_info, "fill_pole_hole")) then + diag_script_info@fill_pole_hole = False + else + if (diag_script_info@fill_pole_hole) then + if (diag_script_info@region.eq."Antarctic") then + diag_script_info@fill_pole_hole = False + error_msg("f", DIAG_SCRIPT, "", "No pole hole at Antarctic " + \ + "- setting diag_script_info@fill_pole_hole to False") + end if + end if + end if + + ; Create output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + + ; Create output directory (work) + system("mkdir -p " + config_user_info@work_dir) + + ; Plot file type + file_type = config_user_info@output_file_type + + ; Determine time range + start_years = metadata_att_as_array(info, "start_year") + end_years = metadata_att_as_array(info, "end_year") + + ; Get experiment indices + + ; historical experiments (with no reference dataset - if specified) + idcs_hist_tmp = ind(exps.eq.hist_exp) + itmp = ind(idcs_hist_tmp.ne.ref_ind) + idcs_hist = idcs_hist_tmp(itmp) + + delete([/itmp, idcs_hist_tmp/]) + idcs_rcp_tmp = ind(exps.eq.rcp_exp) + idcs_rcp = new(dimsizes(idcs_rcp_tmp), integer) + + if (dimsizes(idcs_hist).ne.dimsizes(idcs_rcp_tmp)) then + error_msg("fatal", DIAG_SCRIPT, "", "need same number of " \ + + hist_exp + " and " + rcp_exp + " simulations") + end if + + ; find indices of RCP experiments corresponding to historical experiments + ; (in case both lists of datasets are not in the same order) + + datasets_rcp = datasets(idcs_rcp_tmp) ; names of RCP datasets + datasets_hist = datasets(idcs_hist) ; names of historical datasets + do imod = 0, dimsizes(idcs_hist) - 1 + idcs_rcp(imod) = idcs_rcp_tmp(ind(datasets_rcp.eq.datasets_hist(imod))) + end do + + delete([/datasets_rcp, datasets_hist, idcs_rcp_tmp/]) + + if (any(ismissing(idcs_rcp))) then + error_msg("fatal", DIAG_SCRIPT, "", "list of " \ + + hist_exp + " simulations does not contain all datasets " \ + + "specified in list of " + rcp_exp + " simulations") + end if + + hist_ind = 0 ; counter for historical simulations + rcp_ind = 0 ; counter for RCP simulations + + ; Set up time arrays + time_hist = ispan(max(toint(start_years(idcs_hist))), \ + min(toint(end_years(idcs_hist))), 1) + time_rcp = ispan(max(toint(start_years(idcs_rcp))), \ + min(toint(end_years(idcs_rcp))), 1) + + ; Initialize arrays collecting data + SSIE_MEAN = new((/dimsizes(idcs_hist)/), float) ; long-term mean + SSIE_MEAN!0 = "datasets" + SSIE_MEAN&datasets = datasets(idcs_hist) + "_" + ensembles(idcs_hist) + SSIE_TRND = SSIE_MEAN ; trend of September Arctic sea ice extent + SSIE_TRND@units = "10~S~3~N~ km~S~2~N~ / 10 yr" + YOD = new((/dimsizes(idcs_rcp)/), float) ; year of disappearance + YOD!0 = "datasets" + YOD&datasets = datasets(idcs_rcp) + "_" + ensembles(idcs_rcp) + + ; Loop over datasets + do imod = 0, dim_MOD - 1 + + log_info("Processing dataset " + datasets(imod)) + + sic = read_data(info[imod]) + + ; Check grid type + l_irreg = True + if (sic!1.eq."lat" .and. sic!2.eq."lon") then + if (dimsizes(dimsizes(sic&lat)).eq.1 .and. \ + dimsizes(dimsizes(sic&lon)).eq.1) then + l_irreg = False + end if + end if + + ; Look for area variable + if (any(datasets(imod).eq.areadatasets)) then + + ; Read areacello + log_info("Using variable areacello for the area") + area = read_data(areainfo[imod]) + locinfo = areainfo[imod] + locinfo@short_name = "lat" ; read auxiliary latitude coordinate + arealat = read_data(locinfo) + delete(locinfo) + + ; Check grid consistency + if (.not.isconform(sic(0, :, :), area)) then + if (l_irreg) then + error_msg("f", DIAG_SCRIPT, "", "areacello not conformal") + else + delete(area) + delete(arealat) + log_info("areacello has inconsistent dimensions, " + \ + "calculating area for a regular grid instead") + area = map_area(sic&lat, sic&lon) + arealat = area&lat + end if + end if + + else + + if (.not.l_irreg) then + log_info("Calculating area for a regular grid") + area = map_area(sic&lat, sic&lon) + arealat = area&lat + else + error_msg("f", DIAG_SCRIPT, "", "cannot calculate area") + end if + + end if + + ; Calcluate sea ice extent + SSIE = sea_ice_area(sic, area, arealat, l_irreg, \ + diag_script_info@region, \ + start_years(imod), end_years(imod), \ + diag_script_info@month, "extent", \ + diag_script_info@fill_pole_hole) + + if ((exps(imod).eq.hist_exp) .or. (imod.eq.ref_ind)) then + ; long-term mean of sea ice extent [10^6 km^2] + mean = dim_avg_Wrap(SSIE) + ; trend of sea ice extent [10^3 km^2 decade^-1] + trend = regline(time_hist, SSIE * 1.e3) * 10. + if (imod.eq.ref_ind) then + ssie_mean_ref = mean + ssie_trnd_ref = trend + else + SSIE_MEAN(hist_ind) = mean + SSIE_TRND(hist_ind) = trend + hist_ind = hist_ind + 1 + end if + end if + + ; calculate first year of disappearance (YOD) + ; = 1st year when 5-yr smoothed SSIE drops below 10^6 km^2 + ; for more than 5 consecutive years + if (exps(imod).eq.rcp_exp) then + YOD(rcp_ind) = calc_yod(SSIE, 1., 5) + rcp_ind = rcp_ind + 1 + end if + + ; Clean up + delete([/sic, area, arealat, SSIE/]) + end do ; imod + + ; =========================================================================== + ; plotting section + ; =========================================================================== + + smonths = (/"January", "February", "March", "April", \ + "May", "June", "July", "August", \ + "September", "October", "November", "December"/) + + if (diag_script_info@month .eq. "A") then + i2month = "annual" + else + i2month = smonths(toint(diag_script_info@month) - 1) + end if + + ; ----------------------------------------------- + ; scatter plot historical MEAN vs historical TRND + ; ----------------------------------------------- + + outf = "SSIE-MEAN_vs_SSIE-TRND_" + var0 + "_extend_" \ + + diag_script_info@region + "_" + i2month + "_" + min(time_hist) \ + + "-" + max(time_hist) + + toplot = (/SSIE_MEAN, SSIE_TRND/) + toplot!0 = "datasets" ; Required by plot routine, becomes axes labels + toplot&datasets = (/i2month + " " + diag_script_info@region \ + + " SSIE averaged~C~over " \ + + min(time_hist) + "-" + max(time_hist) \ + + " [" + SSIE_MEAN@units + "]", \ + "Trend in " + i2month + " " + diag_script_info@region \ + + " SSIE~C~over " \ + + min(time_hist) + "-" + max(time_hist) \ + + " [" + SSIE_TRND@units + "]"/) + caption1 = toplot&datasets(0) + " vs " + toplot&datasets(1) + "." + wks_MEAN_TRND = get_wks(0, DIAG_SCRIPT, outf) + + min_mean = min((/min(SSIE_MEAN), ssie_mean_ref/)) + max_mean = max((/max(SSIE_MEAN), ssie_mean_ref/)) + min_trnd = min((/min(SSIE_TRND), ssie_trnd_ref/)) + max_trnd = max((/max(SSIE_TRND), ssie_trnd_ref/)) + + toplot@res_tiMainString = diag_script_info@region + " Sea Ice Extent (SSIE)" + toplot@res_gsnCenterString = "" + toplot@res_trXMinF = decimalPlaces((min_mean - 1), 0, False) + toplot@res_trXMaxF = decimalPlaces((max_mean + 1), 0, False) + toplot@res_trYMinF = decimalPlaces((min_trnd - 100), 0, False) + toplot@res_trYMaxF = decimalPlaces((max_trnd + 100), 0, False) + toplot@res_xyMarker = 16 + colors = project_style(info, diag_script_info, "colors") + toplot@res_xyMarkerColors = colors + plot_MEAN_TRND = scatterplot(wks_MEAN_TRND, toplot, var0, False, False, info) + + res = True + res@gsLineColor = (/0.75, 0.75, 0.75/) + res@gsLineThicknessF = 4.0 + res@tfPolyDrawOrder = "Draw" + + x = (/ssie_mean_ref, ssie_mean_ref/) + y = (/toplot@res_trYMinF, toplot@res_trYMaxF/) + ref0 = gsn_add_polyline(wks_MEAN_TRND, plot_MEAN_TRND, x, y, res) + + x = (/toplot@res_trXMinF, toplot@res_trXMaxF/) + y = (/ssie_trnd_ref, ssie_trnd_ref/) + ref1 = gsn_add_polyline(wks_MEAN_TRND, plot_MEAN_TRND, x, y, res) + + delete([/x, y/]) + + draw(plot_MEAN_TRND) + frame(wks_MEAN_TRND) + + ; --------------------------------------- + ; scatter plot historical MEAN vs RCP YOD + ; --------------------------------------- + + outf2 = "SSIE-MEAN_vs_YOD_" + var0 + "_extend_" \ + + diag_script_info@region + "_" + i2month + "_" + min(time_hist) \ + + "-" + max(time_rcp) + + wks_MEAN_YOD = get_wks(0, DIAG_SCRIPT, outf2) + + ; overwrite array --> retain most attributes + toplot = (/SSIE_MEAN, YOD/) + toplot&datasets = (/i2month + " " + diag_script_info@region \ + + " SSIE averaged~C~over " \ + + min(time_hist) + "-" + max(time_hist) \ + + " [" + SSIE_MEAN@units + "]", \ + "First year of near disappearance"/) + caption2 = toplot&datasets(0) + " vs " + toplot&datasets(1) + "." + toplot@res_gsnCenterString = "" + toplot@res_trYMinF = 2000. + toplot@res_trYMaxF = 2100. + plot_MEAN_YOD = scatterplot(wks_MEAN_YOD, toplot, var0, False, False, info) + + ; reference datasets (observations) + x = (/ssie_mean_ref, ssie_mean_ref/) + y = (/toplot@res_trYMinF, toplot@res_trYMaxF/) + ref2 = gsn_add_polyline(wks_MEAN_YOD, plot_MEAN_YOD, x, y, res) + delete(x) + + draw(plot_MEAN_YOD) + frame(wks_MEAN_YOD) + + ; --------------------------------------- + ; scatter plot historical TRND vs RCP YOD + ; --------------------------------------- + + toplot = (/SSIE_TRND, YOD/) + toplot&datasets = (/"Trend in " + i2month + " " + diag_script_info@region \ + + " SSIE~C~over " \ + + min(time_hist) + "-" + max(time_hist) \ + + " [" + SSIE_TRND@units + "]", \ + "First year of near disappearance"/) + caption3 = toplot&datasets(0) + " vs " + toplot&datasets(1) + "." + + outf3 = "SSIE-TRND_vs_YOD_" + var0 + "_extend_" \ + + diag_script_info@region + "_" + i2month + "_" \ + + min(time_hist) + "-" + max(time_rcp) + + wks_TRND_YOD = get_wks(0, DIAG_SCRIPT, outf3) + toplot@res_trXMinF = decimalPlaces((min_trnd - 100), 0, False) + toplot@res_trXMaxF = decimalPlaces((max_trnd + 100), 0, False) + plot_TRND_YOD = scatterplot(wks_TRND_YOD, toplot, var0, False, False, info) + + ; reference datasets (observations) + x = (/ssie_trnd_ref, ssie_trnd_ref/) + ref3 = gsn_add_polyline(wks_TRND_YOD, plot_TRND_YOD, x, y, res) + delete([/x, y, res/]) + + draw(plot_TRND_YOD) + frame(wks_TRND_YOD) + + ; =========================================================================== + ; Provenance + + if (diag_script_info@region.eq."Arctic") then + domain = "nh" + else if (diag_script_info@region.eq."Antarctic") then + domain = "sh" + end if + end if + + infiles = metadata_att_as_array(info, "filename") + infiles_hist = infiles(idcs_hist) + infiles_rcp = infiles(idcs_rcp) + + ; -------------------------------------------------------------- + + SSIE_MEAN@var = "SSIE_mean" + SSIE_MEAN@diag_script = DIAG_SCRIPT + SSIE_TRND@var = "SSIE_trnd" + SSIE_TRND@diag_script = DIAG_SCRIPT + delete(SSIE_TRND&datasets@_FillValue) + outf = config_user_info@work_dir + outf + ".nc" + + nc_ext = ncdf_write(SSIE_MEAN, outf) + outf@existing = "append" + nc_ext = ncdf_write(SSIE_TRND, outf) + + log_provenance(nc_ext, wks_MEAN_TRND@fullname, caption1, \ + (/"mean", "trend"/), domain, "scatter", \ + (/"senftleben_daniel", "lauer_axel"/), \ + "massonnet12tc", infiles_hist) + + ; -------------------------------------------------------------- + + YOD@var = "yod" + YOD@diag_script = DIAG_SCRIPT + delete(YOD&datasets@_FillValue) + outf2 = config_user_info@work_dir + outf2 + ".nc" + + nc_ext2 = ncdf_write(SSIE_MEAN, outf2) + outf2@existing = "append" + nc_ext2 = ncdf_write(YOD, outf2) + + log_provenance(nc_ext2, wks_MEAN_YOD@fullname, caption2, \ + (/"mean", "other"/), domain, "scatter", \ + (/"senftleben_daniel", "lauer_axel"/), \ + "", infiles) + + ; -------------------------------------------------------------- + + outf3 = config_user_info@work_dir + outf3 + ".nc" + + nc_ext3 = ncdf_write(SSIE_TRND, outf3) + outf3@existing = "append" + nc_ext3 = ncdf_write(YOD, outf3) + + log_provenance(nc_ext3, wks_TRND_YOD@fullname, caption3, \ + (/"trend", "other"/), domain, "scatter", \ + (/"senftleben_daniel", "lauer_axel"/), \ + "", infiles) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/seaice/seaice_trends.ncl b/esmvaltool/diag_scripts/seaice/seaice_trends.ncl new file mode 100644 index 0000000000..f654365606 --- /dev/null +++ b/esmvaltool/diag_scripts/seaice/seaice_trends.ncl @@ -0,0 +1,302 @@ +; ############################################################################# +; Histograms of sea ice trend distribution (CMIP5): Figure 9.24 of IPPC AR5 +; Author: Senftleben, Daniel (DLR, Germany) +; ESMVal project +; ############################################################################# +; Description +; +; +; Required diag_script_info attributes (diagnostics specific) +; month: selected month (1, 2, ..., 12) or annual mean ("A") +; region: region to be analyzed ( "Arctic" or "Antarctic") +; +; Optional diag_script_info attributes (diagnostic specific) +; fill_pole_hole: fill observational hole at North pole (default: False) +; ref_model: array of references plotted as vertical lines +; +; Required variable_info attributes (variable specific) +; +; Caveats +; CMOR units expected: areacello in m2, sic in % +; +; Modification history +; 20211006-lauer_axel: removed write_plots +; 20191025-lauer_axel: rewritten for v2.0 +; 20170619-senftleben_daniel: written. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" + +load "$diag_scripts/seaice/seaice_aux.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Get metadata items + att = True + att@mip = "OImon" + info = select_metadata_by_atts(input_file_info, att) ; variable + var0 = info[0]@short_name + datasets = metadata_att_as_array(info, "dataset") + ensembles = metadata_att_as_array(info, "ensemble") + exps = metadata_att_as_array(info, "exp") + dim_MOD = dimsizes(datasets) + att@mip = "fx" + areainfo = select_metadata_by_atts(input_file_info, att) ; area + areadatasets = metadata_att_as_array(areainfo, "dataset") + delete(att) + + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"month", "region"/)) + + ; Check for reference dataset + if (isatt(info[0], "reference_dataset")) then + ref_models = info[0]@reference_dataset + rcp_tag = "" + dim_REF = dimsizes(ref_models) + else + ref_models = "" + rcp_tag = ", " + exps(0) + dim_REF = 0 + end if + + ; Check fill_pole_hole option + if (.not.isatt(diag_script_info, "fill_pole_hole")) then + diag_script_info@fill_pole_hole = False + else + if (diag_script_info@fill_pole_hole) then + if (diag_script_info@region.eq."Antarctic") then + diag_script_info@fill_pole_hole = False + error_msg("w", DIAG_SCRIPT, "", "No pole hole at Antarctic " + \ + "- setting diag_script_info@fill_pole_hole to False") + end if + end if + end if + + ; Create output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + + ; Create output directory (work) + system("mkdir -p " + config_user_info@work_dir) + + ; Plot file type + file_type = config_user_info@output_file_type + + ; Create arrays to collect SSIE mean and trend from each model + SSIE_trnd = new(dim_MOD - dim_REF, float) + SSIE_trnd@units = "million sq km / decade" + if (dim_REF .gt. 0) then + SSIE_trnd_ref = new(dim_REF, float) + SSIE_trnd_ref@units = "million sq km / decade" + end if + + ; Determine time range + start_years = metadata_att_as_array(info, "start_year") + start_year = min(start_years) + end_years = metadata_att_as_array(info, "end_year") + end_year = max(end_years) + all_years = ispan(start_year, end_year, 1) + + ; strings for caption and plot title + + year0 = tostring(start_year) + if (start_year .ne. max(start_years)) then + year0 = year0 + "..." + max(start_years) + end if + + year1 = tostring(end_year) + if (end_year .ne. min(end_years)) then + year1 = min(end_years) + "..." + year1 + end if + + ; Set up counter(s) + ref_count = 0 + + ; Loop over datasets + do imod = 0, dim_MOD - 1 + + log_info("Processing dataset " + datasets(imod)) + + sic = read_data(info[imod]) + + ; Check grid type + l_irreg = True + if (sic!1.eq."lat" .and. sic!2.eq."lon") then + if (dimsizes(dimsizes(sic&lat)).eq.1 .and. \ + dimsizes(dimsizes(sic&lon)).eq.1) then + l_irreg = False + end if + end if + + ; Look for area variable + if (any(datasets(imod).eq.areadatasets)) then + + ; Read areacello + log_info("Using variable areacello for the area") + area = read_data(areainfo[imod]) + locinfo = areainfo[imod] + locinfo@short_name = "lat" ; read auxiliary latitude coordinate + arealat = read_data(locinfo) + delete(locinfo) + + ; Check grid consistency + if (.not.isconform(sic(0, :, :), area)) then + if (l_irreg) then + error_msg("f", DIAG_SCRIPT, "", "areacello not conformal") + else + delete(area) + delete(arealat) + log_info("areacello has inconsistent dimensions, " + \ + "calculating area for a regular grid instead") + area = map_area(sic&lat, sic&lon) + arealat = area&lat + end if + end if + + else + + if (.not.l_irreg) then + log_info("Calculating area for a regular grid") + area = map_area(sic&lat, sic&lon) + arealat = area&lat + else + error_msg("f", DIAG_SCRIPT, "", "cannot calculate area") + end if + + end if + + ; Calcluate sea ice extent + SSIE = sea_ice_area(sic, area, arealat, l_irreg, \ + diag_script_info@region, \ + start_years(imod), end_years(imod), \ + diag_script_info@month, "extent", \ + diag_script_info@fill_pole_hole) + + ; Calculate SSIE trend + trend_tmp = dtrend(SSIE, True) + if (dim_REF .gt. 0) .and. \ + any(ref_models.eq.datasets(imod)) then + SSIE_trnd_ref(ref_count) = trend_tmp@slope * 10. ; per decade + ref_count = ref_count + 1 + else + SSIE_trnd(imod - ref_count) = trend_tmp@slope * 10. ; per decade + print(SSIE_trnd(imod - ref_count)) + end if + + ; Clean up + delete([/sic, area, arealat, SSIE/]) + + end do ; imod + + ; =========================================================================== + ; Plot histogram + + smonths = (/"January", "February", "March", "April", \ + "May", "June", "July", "August", \ + "September", "October", "November", "December"/) + + if (diag_script_info@month .eq. "A") then + i2month = "annual" + else + i2month = smonths(toint(diag_script_info@month) - 1) + end if + + out_ext = "trend_" + var0 + "_extend_" \ + + diag_script_info@region + "_" + i2month + "_histogram" + + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, out_ext) + + res = True + res@tiMainString = i2month + " " + diag_script_info@region \ + + " sea ice extent trends " + year0 + "-" + year1 + rcp_tag + res@tiXAxisString = "10~S~6~N~ km~S~2~N~ per decade" + res@tiYAxisString = "Number of models" + xAxis = fspan(-1.6, 0.2, 19) + res@gsnHistogramBinIntervals = xAxis + res@tmXBLabelAngleF = 265 + res@gsnHistogramBarWidthPercent = 100. + res@gsFillColor = "dodgerblue3" + res@gsnFrame = False + res@gsnDraw = False + plot = gsn_histogram(wks, SSIE_trnd, res) + + ; Add reference data as vertical lines + if (dim_REF .gt. 0) then + getvalues plot ; Get plot parameters + "trYMinF" : ymin + "trYMaxF" : ymax + end getvalues + xmin = min(xAxis) + xmax = max(xAxis) + res_ref = True + res_ref@gsLineThicknessF = 6. + ref_colors = (/"firebrick1", "black", "green2", "gold"/) + do iref = 0, dim_REF - 1 + res_ref@gsLineColor = ref_colors(iref) + ref_val = SSIE_trnd_ref(iref) + x = (ref_val - xmin) / (xmax - xmin) + if (iref.eq.0) then + ref_line0 = gsn_add_polyline(wks, plot, (/x, x/), \ + (/ymin, ymax/), res_ref) + else if (iref.eq.1) then + ref_line1 = gsn_add_polyline(wks, plot, (/x, x/), \ + (/ymin, ymax/), res_ref) + else if (iref.eq.2) then + ref_line2 = gsn_add_polyline(wks, plot, (/x, x/), \ + (/ymin, ymax/), res_ref) + else if (iref.eq.3) then + ref_line3 = gsn_add_polyline(wks, plot, (/x, x/), \ + (/ymin, ymax/), res_ref) + else + break ; four ref lines should be enough ;) + end if + end if + end if + end if + + delete([/ref_val, x/]) + end do + end if + + draw(plot) + frame(wks) + + ; =========================================================================== + ; Provenance + + if (diag_script_info@region.eq."Arctic") then + domain = "nh" + else if (diag_script_info@region.eq."Antarctic") then + domain = "sh" + end if + end if + + infiles = metadata_att_as_array(info, "filename") + + alltrends = new(dim_MOD, float) + alltrends(0:dim_MOD - dim_REF - 1) = SSIE_trnd + if (dim_REF .gt. 0) then + alltrends(dim_MOD - dim_REF - 1:dim_MOD - 1) = SSIE_trnd_ref + end if + alltrends!0 = "dataset" + alltrends&dataset = datasets + alltrends@var = "SSIE_trnd" + alltrends@diag_script = DIAG_SCRIPT + caption = res@tiMainString + + nc_ext = ncdf_write(alltrends, config_user_info@work_dir + out_ext + ".nc") + + log_provenance(nc_ext, wks@fullname, caption, \ + (/"trend"/), domain, "histogram", \ + (/"senftleben_daniel", "lauer_axel"/), \ + "", infiles) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/seaice/seaice_tsline.ncl b/esmvaltool/diag_scripts/seaice/seaice_tsline.ncl new file mode 100644 index 0000000000..eaf0a727e1 --- /dev/null +++ b/esmvaltool/diag_scripts/seaice/seaice_tsline.ncl @@ -0,0 +1,388 @@ +; ############################################################################# +; LINE PLOTS OF SEA ICE AREA AND EXTENT vs TIME +; Author: Daniel Senftleben (DLR, Germany) +; ESMVal project +; ############################################################################# +; +; Description +; Creates line plot for sea ice area and extent versus time. +; +; Required diag_script_info attributes (diagnostics specific) +; month: selected month or annual mean. +; region: region to be plotted ( "Arctic" or "Antarctic"). +; +; Optional diag_script_info attributes (diagnostics specific) +; fill_pole_hole: fill observational hole at North pole, Default: False +; styleset: specify line and color style +; EMs_in_lg: create legend label for each individual ensemble member +; (Default: "True") +; xy_line_legend: False: do not draw a legend inside the plot +; legend_outside: True: draw legend in an extra plot +; +; Required variable_info attributes (variable specific) +; +; Optional variable_info attributes (variable specific) +; +; Caveats +; CMOR units expected: areacello in m2, sic in % +; +; Modification history +; 20211006-lauer_axel: removed write_plots +; 20190227-senftleben_daniel: added provenance. +; 20190218-senftleben_daniel: ported to ESMValTool v2.0a1 +; 20180620-senftleben_daniel: ported to ESMValTool v2 +; 20160906-senftleben_daniel: added option to fill polar observation hole +; 20151027-lauer_axel: moved call to 'write_references' to the beginning +; 20150325-lauer_axel: modified reference tags used for acknowledgements +; 20140312-lauer_axel: adapted to new time_operations structure +; 20140129-senftleben_daniel: modularized to fit to new structure +; 20130419-gottschaldt_klaus-dirk: implemented into ./diag_scripts +; 20130405-gottschaldt_klaus-dirk: written +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" + +load "$diag_scripts/seaice/seaice_aux.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Get metadata items + att = True + att@mip = "OImon" + info = select_metadata_by_atts(input_file_info, att) ; variable + var0 = info[0]@short_name + datasets = metadata_att_as_array(info, "dataset") + n_datasets = dimsizes(datasets) + att@mip = "fx" + areainfo = select_metadata_by_atts(input_file_info, att) ; area + areadatasets = metadata_att_as_array(areainfo, "dataset") + delete(att) + + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"region", "month"/)) + + ; Check region selection + if (all(diag_script_info@region.ne.(/"Arctic", "Antarctic"/))) then + error_msg("f", DIAG_SCRIPT, "", "no (valid) region selection") + end if + + ; Check fill_pole_hole option + if (.not.isatt(diag_script_info, "fill_pole_hole")) then + diag_script_info@fill_pole_hole = False + else + if (diag_script_info@fill_pole_hole) then + if (diag_script_info@region.eq."Antarctic") then + diag_script_info@fill_pole_hole = False + error_msg("w", DIAG_SCRIPT, "", "No pole hole at Antarctic " + \ + "- setting diag_script_info@fill_pole_hole to False") + end if + end if + end if + + ; Check multi model mean option + if (isatt(diag_script_info, "multi_model_mean")) then + multi_model_mean = diag_script_info@multi_model_mean + else + multi_model_mean = False + end if + + ; Determine time range + start_years = metadata_att_as_array(info, "start_year") + start_year = min(start_years) + end_years = metadata_att_as_array(info, "end_year") + end_year = max(end_years) + all_years = ispan(start_year, end_year, 1) + + ; Select attributes + avgstd = project_style(info, diag_script_info, "avgstd") + + ; Create output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + + ; Plot file type + file_type = config_user_info@output_file_type + + ; For annotation + smonths = (/"January", "February", "March", "April", \ + "May", "June", "July", "August", \ + "September", "October", "November", "December"/) + i2months = sprinti("%0.2i", ispan(1, 12, 1)) + + ; Output file name specifications (passed to get_wks for output file name) + system("mkdir -p " + config_user_info@work_dir) + if (diag_script_info@month .eq. "A") then + i2month = "annual" + else + i2month = smonths(toint(diag_script_info@month) - 1) + end if + out_ext = "extent_" + var0 + "_" \ + + diag_script_info@region + "_" + i2month + "_" \ + + sprinti("%0.4i", start_year) + "-" \ + + sprinti("%0.4i", end_year) + out_area = "area_" + var0 + "_" \ + + diag_script_info@region + "_" + i2month + "_" \ + + sprinti("%0.4i", start_year) + "-" \ + + sprinti("%0.4i", end_year) + + ; Array for collecting sea-ice extent for all datasets + val_ext = new((/n_datasets, end_year - start_year + 1/), float) + val_ext!1 = "time" + val_ext&time = int2dble(all_years) + + ; Array for collecting sea-ice area for all datasets + val_area = new((/n_datasets, end_year - start_year + 1/), float) + val_area!1 = "time" + val_area&time = int2dble(all_years) + + ; Prepare calculation of standard deviation of datasets + if (multi_model_mean) then + val_ext_stddev = new((/4, end_year - start_year + 1/), float) + val_area_stddev = new((/4, end_year - start_year + 1/), float) + else ; initialize anyway: both are needed as parameters for xy_line + val_ext_stddev = 0 + val_area_stddev = 0 + end if + + ; Loop over datasets + do imod = 0, n_datasets - 1 + + log_info("Processing dataset " + datasets(imod)) + + sic = read_data(info[imod]) + + ; Check grid type + l_irreg = True + if (sic!1.eq."lat" .and. sic!2.eq."lon") then + if (dimsizes(dimsizes(sic&lat)).eq.1 .and. \ + dimsizes(dimsizes(sic&lon)).eq.1) then + l_irreg = False + end if + end if + + ; Look for area variable + if (any(datasets(imod).eq.areadatasets)) then + + ; Read areacello + log_info("Using variable areacello for the area") + area = read_data(areainfo[imod]) + locinfo = areainfo[imod] + locinfo@short_name = "lat" ; read auxiliary latitude coordinate + arealat = read_data(locinfo) + delete(locinfo) + + ; Check grid consistency + if (.not.isconform(sic(0, :, :), area)) then + if (l_irreg) then + error_msg("f", DIAG_SCRIPT, "", "areacello not conformal") + else + delete(area) + delete(arealat) + log_info("areacello has inconsistent dimensions, " + \ + "calculating area for a regular grid instead") + area = map_area(sic&lat, sic&lon) + arealat = area&lat + end if + end if + + else + + if (.not.l_irreg) then + log_info("Calculating area for a regular grid") + area = map_area(sic&lat, sic&lon) + arealat = area&lat + else + error_msg("f", DIAG_SCRIPT, "", "cannot calculate area") + end if + + end if + + ; Calcluate sea ice area + sic_sum_ext = sea_ice_area(sic, area, arealat, l_irreg, \ + diag_script_info@region, \ + start_years(imod), end_years(imod), \ + diag_script_info@month, "extent", \ + diag_script_info@fill_pole_hole) + + ; Calculate sea ice extent + sic_sum_area = sea_ice_area(sic, area, arealat, l_irreg, \ + diag_script_info@region, \ + start_years(imod), end_years(imod), \ + diag_script_info@month, "area", \ + diag_script_info@fill_pole_hole) + + ; Put into global array + ia = toint(start_years(imod)) - start_year + ie = toint(end_years(imod)) - start_year + val_ext(imod, ia:ie) = tofloat(sic_sum_ext) + val_area(imod, ia:ie) = tofloat(sic_sum_area) + + ; Clean up + delete([/sic, area, arealat, sic_sum_ext, sic_sum_area, ia, ie/]) + + end do ; imod + + ; Calculate standard deviation of datasets + if (multi_model_mean) then + temp = ind(avgstd .eq. 0) + do imonth = 0, dimsizes(val_ext&time) - 1 + ; For extent + val_ext_stddev(0, imonth) = \ + dim_avg_n_Wrap(val_ext(temp, imonth), (/0/)) + val_ext_stddev(1, imonth) = \ + dim_stddev_n_Wrap(val_ext(temp, imonth), (/0/)) + val_ext_stddev(2, imonth) = \ + val_ext_stddev(0, imonth) - val_ext_stddev(1, imonth) + val_ext_stddev(3, imonth) = \ + val_ext_stddev(0, imonth) + val_ext_stddev(1, imonth) + + ; For area + val_area_stddev(0, imonth) = \ + dim_avg_n_Wrap(val_area(temp, imonth), (/0/)) + val_area_stddev(1, imonth) = \ + dim_stddev_n_Wrap(val_area(temp, imonth), (/0/)) + val_area_stddev(2, imonth) = \ + val_area_stddev(0, imonth) - val_area_stddev(1, imonth) + val_area_stddev(3, imonth) = \ + val_area_stddev(0, imonth) + val_area_stddev(1, imonth) + end do + delete(temp) + end if + + ; Some attributes for extent plot + val_ext!0 = "model" + val_ext&model = metadata_att_as_array(info, "dataset") + val_ext!1 = "time" + val_ext&time = int2dble(all_years) + if (diag_script_info@month .eq. "A") then ; weighted annual mean + val_ext&time@units = "Annual" + else ; select a month + val_ext&time@units = smonths(stringtoint(diag_script_info@month) - 1) + end if + val_ext@units = "10~S~6~N~ km~S~2~N~" + val_ext@long_name = "Sea Ice Extent" + + ; Some attributes for area plot + val_area!0 = "model" + val_area&model = metadata_att_as_array(info, "dataset") + val_area!1 = "time" + val_area&time = int2dble(all_years) + if (diag_script_info@month .eq. "A") then ; weighted annual mean + val_area&time@units = "Annual" + else ; select a month + val_area&time@units = smonths(stringtoint(diag_script_info@month) - 1) + end if + val_area@units = "10~S~6~N~ km~S~2~N~" + val_area@long_name = "Sea Ice Area" + + ; Plotting + if (dimsizes(val_ext&time) .eq. 1) then + log_info(" Skipping tsline: " + \ + "plot needs at least two time values, has 1") + else + + ; Create plot variables + wks_ext = gsn_open_wks(file_type, plot_dir + out_ext) + wks_area = gsn_open_wks(file_type, plot_dir + out_area) + + ; Define plot resources + res = True + res@tmXBLabelAngleF = 45 + res@tmXBLabelJust = "CenterRight" + + ; Min and max values on x-axis, leave space on right for legend + res@trXMaxF = max(val_ext&time) + 0.25 * (max(val_ext&time) - \ + min(val_ext&time)) + res@trXMinF = min(val_ext&time) - 0.05 * (max(val_ext&time) - \ + min(val_ext&time)) + res@tmXBMode = "Automatic" + res@tmXBFormat = "f" + if (isatt(diag_script_info, "xy_line_legend")) then + if (.not.diag_script_info@xy_line_legend) then + res@trXMaxF = max(val_ext&time) + res@trXMinF = min(val_ext&time) + end if + end if + + ; Resources specific for extent plot + res_ext = True + copy_VarMeta(res, res_ext) + res_ext@tiMainString = val_ext&time@units + " " + \ + diag_script_info@region + " " + val_ext@long_name + res_ext@tiYAxisString = val_ext@units + + ; Resources specific for area plot + res_area = True + copy_VarMeta(res, res_area) + res_area@tiMainString = val_area&time@units + " " + \ + diag_script_info@region + " " + val_area@long_name + res_area@tiYAxisString = val_area@units + + ; Draw the plots (procedure in ./plot_scripts/ncl/xy_line.ncl) + xy_line(wks_ext, val_ext, val_ext&time, val_ext_stddev, res_ext, info) + if (isatt(diag_script_info, "legend_outside")) then + ; Legend can only once be drawn in an extra file (hlu error), + ; but is same for both area and extent anyway + if (diag_script_info@legend_outside) then + val_area@legend_outside = True + wks_area@legendfile = plot_dir + out_ext + "_legend" + end if + else + diag_script_info@legend_outside = False + end if + xy_line(wks_area, val_area, val_area&time, val_area_stddev, res_area, info) + + ; Provenance + if (diag_script_info@region.eq."Arctic") then + domain = "nh" + elseif (diag_script_info@region.eq."Antarctic") then + domain = "sh" + end if + infiles = metadata_att_as_array(info, "filename") + + val_ext@var = var0 + val_ext@diag_script = DIAG_SCRIPT + caption_ext = "Timeseries of " + \ + smonths(toint(diag_script_info@month) - 1) + " " + \ + diag_script_info@region + " sea ice extent in " + val_ext@units + nc_ext = \ + ncdf_write(val_ext, config_user_info@work_dir + out_ext + ".nc") + + fullname = plot_dir + out_ext + "." + file_type + log_provenance(nc_ext, fullname, caption_ext, \ + (/"mean", "stddev", "clim"/), domain, "times", \ + (/"senftleben_daniel", "gottschaldt_klaus-dirk"/), \ + "stroeve07grl", infiles) + + val_area@var = var0 + val_area@diag_script = DIAG_SCRIPT + caption_area = "Timeseries of " + \ + smonths(toint(diag_script_info@month) - 1) + " " + \ + diag_script_info@region + " sea ice area in " + val_area@units + nc_area = \ + ncdf_write(val_area, config_user_info@work_dir + out_area + ".nc") + + fullname = plot_dir + out_area + "." + file_type + log_provenance(nc_area, fullname, caption_area, \ + (/"mean", "stddev", "clim"/), domain, "times", \ + (/"senftleben_daniel", "gottschaldt_klaus-dirk"/), \ + "stroeve07grl", infiles) + + end if ; More than 1 time step + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/seaice/seaice_yod.ncl b/esmvaltool/diag_scripts/seaice/seaice_yod.ncl new file mode 100644 index 0000000000..e5e1c24ed4 --- /dev/null +++ b/esmvaltool/diag_scripts/seaice/seaice_yod.ncl @@ -0,0 +1,500 @@ +; ############################################################################# +; YOD - Calculate year of near disappearance of (Arctic) sea ice +; Author: Senftleben, Daniel (DLR, Germany) +; CRESCENDO project +; ############################################################################# +; Description +; Calculates the year of near-disappearance of (Arctic) sea ice (YOD). YOD is +; defined as the first of five consecutive years in which the minimum Arctic +; sea ice extent drops below one million km2. The diagnostic creates a time +; series plot of (September Arctic) sea ice extent for each model given in the +; recipe and adds three multi-model statistics: the mean, the standard +; deviation and the YOD. It optionally accepts a list of pre-determined model +; weights and adds to the time series the three weighted multi-model +; statistics. +; +; Required diag_script_info attributes +; month: selected month (1, 2, ..., 12) or annual mean ("A") +; region: region to be analyzed ( "Arctic" or "Antarctic") +; +; Optional diag_script_info attributes +; fill_pole_hole: fill observational hole at North pole (default: False) +; wgt_file: netCDF containing pre-determined model weights +; +; Caveats +; - CMOR units expected: areacello in m2, sic in % +; +; Modification history +; 20211006-lauer_axel: removed write_plots +; 20191030-lauer_axel: rewritten for v2.0 +; 20190211-senftleben_daniel: implemented. +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" + +load "$diag_scripts/seaice/seaice_aux.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/scatterplot.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + ; Get metadata items + att = True + att@mip = "OImon" + info = select_metadata_by_atts(input_file_info, att) ; variable + var0 = info[0]@short_name + datasets = metadata_att_as_array(info, "dataset") + ensembles = metadata_att_as_array(info, "ensemble") + exps = metadata_att_as_array(info, "exp") + infiles = metadata_att_as_array(info, "filename") + dim_MOD = dimsizes(datasets) + att@mip = "fx" + areainfo = select_metadata_by_atts(input_file_info, att) ; area + areadatasets = metadata_att_as_array(areainfo, "dataset") + areafiles = metadata_att_as_array(areainfo, "filename") + delete(att) + + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"month", "region"/)) + + ; Check fill_pole_hole option + if (.not.isatt(diag_script_info, "fill_pole_hole")) then + diag_script_info@fill_pole_hole = False + else + if (diag_script_info@fill_pole_hole) then + if (diag_script_info@region.eq."Antarctic") then + diag_script_info@fill_pole_hole = False + error_msg("f", DIAG_SCRIPT, "", "No pole hole at Antarctic " + \ + "- setting diag_script_info@fill_pole_hole to False") + end if + end if + end if + + ; Create output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + + ; Create output directory (work) + system("mkdir -p " + config_user_info@work_dir) + + ; Plot file type + file_type = config_user_info@output_file_type + + ; Determine time range + start_years = metadata_att_as_array(info, "start_year") + end_years = metadata_att_as_array(info, "end_year") + + ; Check diagnostic attributes + region = diag_script_info@region + if (isatt(diag_script_info, "wgt_file")) then + if (isfilepresent(diag_script_info@wgt_file)) then + wgt_flag = True ; do both, weighted and unweighted + weights = ncdf_read(diag_script_info@wgt_file, "weight") ; load weights + else + error_msg("f", DIAG_SCRIPT, "", "Weights file not found: " + \ + diag_script_info@wgt_file) + end if + else + wgt_flag = False + end if + + smonths = (/"January", "February", "March", "April", \ + "May", "June", "July", "August", \ + "September", "October", "November", "December"/) + + if (diag_script_info@month .eq. "A") then + timestring = "annual" + else + timestring = smonths(toint(diag_script_info@month) - 1) + end if + + all_rcps = exps(str_match_ind_ic(exps, "rcp")) + which_rcps = get_unique_values(all_rcps) + + YOD = new((/dimsizes(which_rcps), 2, 2/), float) + YOD!0 = "rcp" + YOD!1 = "wgt" + YOD!2 = "val" + YOD&rcp = which_rcps + YOD&wgt = (/"unweighted", "weighted"/) + YOD&val = (/"avg", "std"/) + + ; for writing provenance + + if (diag_script_info@region.eq."Arctic") then + domain = "nh" + else if (diag_script_info@region.eq."Antarctic") then + domain = "sh" + end if + end if + + do ircp = 0, dimsizes(which_rcps) - 1 + + ; initialize arrays + rcp_ind = ind(exps.eq.which_rcps(ircp)) + rcp_models = datasets(rcp_ind) + YOD_tmp = new((/dimsizes(rcp_models)/), float) + YOD_tmp!0 = "dataset" + YOD_tmp&dataset = rcp_models + years = ispan(min(toint(start_years(rcp_ind))), \ + max(toint(end_years(rcp_ind))), 1) + SSIE_all = new((/dimsizes(YOD_tmp), dimsizes(years)/), float) + SSIE_all!0 = "dataset" + SSIE_all!1 = "year" + SSIE_all&dataset = rcp_models + SSIE_all&year = years + + ; check if weights are consistent + if (wgt_flag) then + if (dimsizes(weights).ne.dimsizes(YOD_tmp&dataset)) then + error_msg("f", diag_script, "", "weights do not fit to datasets " + \ + "in recipe for " + which_rcps(ircp)) + else ; make sure each weight fits to the correct dataset + sort_ind = new(dimsizes(YOD_tmp&dataset), integer) + do imod = 0, dimsizes(weights)-1 + sort_ind(imod) = ind(weights&dataset.eq.YOD_tmp&dataset(imod)) + end do + if (any(ismissing(sort_ind))) then + error_msg("f", diag_script, "", "weights do not fit to " + \ + "datasets in recipe for " + which_rcps(ircp)) + else + weights = weights(sort_ind) + end if + end if + end if + + ; loop over datasets + do ii = 0, dimsizes(rcp_ind) - 1 + imod = rcp_ind(ii) + log_info("Processing dataset " + datasets(imod)) + + sic = read_data(info[imod]) + + ; check grid type + l_irreg = True + if (sic!1.eq."lat" .and. sic!2.eq."lon") then + if (dimsizes(dimsizes(sic&lat)).eq.1 .and. \ + dimsizes(dimsizes(sic&lon)).eq.1) then + l_irreg = False + end if + end if + + ; look for area variable + if (any(datasets(imod).eq.areadatasets)) then + + ; read areacello + log_info("Using variable areacello for the area") + area = read_data(areainfo[imod]) + locinfo = areainfo[imod] + locinfo@short_name = "lat" ; read auxiliary latitude coordinate + arealat = read_data(locinfo) + delete(locinfo) + + ; check grid consistency + if (.not.isconform(sic(0, :, :), area)) then + if (l_irreg) then + error_msg("f", DIAG_SCRIPT, "", "areacello not conformal") + else + delete(area) + delete(arealat) + log_info("areacello has inconsistent dimensions, " + \ + "calculating area for a regular grid instead") + area = map_area(sic&lat, sic&lon) + arealat = area&lat + end if + end if + else + if (.not.l_irreg) then + log_info("Calculating area for a regular grid") + area = map_area(sic&lat, sic&lon) + arealat = area&lat + else + error_msg("f", DIAG_SCRIPT, "", "cannot calculate area") + end if + end if + + ; calcluate sea ice extent (SIE) + SIE = sea_ice_area(sic, area, arealat, l_irreg, \ + diag_script_info@region, \ + start_years(imod), end_years(imod), \ + diag_script_info@month, "extent", \ + diag_script_info@fill_pole_hole) + + delete([/sic, area, arealat/]) + + SSIE = SIE + if (SSIE!0.eq."time") then ; need 'year' + utc_date = cd_calendar(SSIE&time, 0) + SSIE!0 = "year" + SSIE&year = utc_date(:, 0) + delete(utc_date) + end if + + ; calculate ensemble mean + if (imod.ne.max(rcp_ind)) then + if ((datasets(imod) + exps(imod)) .eq. \ + (datasets(imod + 1) + exps(imod + 1))) then + if (.not.isvar("store")) then + store = new((/1, dimsizes(SSIE)/), typeof(SSIE)) + store(0, :) = SSIE + continue + else + tmp = new((/1, dimsizes(SSIE)/), typeof(SSIE)) + tmp(0, :) = SSIE + store := array_append_record(store, tmp, 0) + continue + end if + else + if (isvar("store")) then + SSIE_em = dim_avg_n_Wrap(store, 0) + delete(store) + else + SSIE_em = SSIE + end if + end if + else + if (isvar("store")) then + SSIE_em = dim_avg_n_Wrap(store, 0) + else + SSIE_em = SSIE + end if + end if + + ; calculate first year of disappearance (YOD) + ; = 1st year when 5-yr smoothed SSIE drops below 10^6 km^2 + ; for more than 5 consecutive years + mod_ind = ind(datasets(imod).eq.YOD_tmp&dataset) + YOD_tmp(mod_ind) = calc_yod(SSIE_em, 1., 5) + if (YOD_tmp(mod_ind).lt.min(tofloat(start_years(rcp_ind))) .or. \ + YOD_tmp(mod_ind).gt.max(tofloat(end_years(rcp_ind)))) then + ; make sure missing values are recognized + YOD_tmp(mod_ind) = YOD_tmp@_FillValue + end if + + ; store + SSIE_all(mod_ind, :) = (/SSIE_em/) + + ; clean up + delete([/SIE, SSIE, SSIE_em, mod_ind/]) + end do ; imod + + ; calculate YOD from multi-model mean (MMM) + MMM_uwt = dim_avg_n_Wrap(SSIE_all, 0) + STD_uwt = dim_stddev_n_Wrap(SSIE_all, 0) + STD_wgt = STD_uwt ; initialize + MMM_wgt = MMM_uwt + if (wgt_flag) then + MMM_wgt = dim_avg_wgt_n_Wrap(SSIE_all, weights, 1, 0) + do iyear = 0, dimsizes(SSIE_all&year) - 1 + STD_wgt(iyear) = dim_stddev_wgt_Wrap(SSIE_all(:, iyear), weights, 1) + end do + else + STD_wgt = STD_wgt@_FillValue + MMM_wgt = MMM_wgt@_FillValue + end if + + YOD_MMM_uwt = calc_yod(MMM_uwt, 1., 5) + YOD_MMM_uwt_2 = calc_yod(MMM_uwt, 2., 1) + + if (wgt_flag) then + YOD_MMM_wgt = calc_yod(MMM_wgt, 1., 5) + YOD_MMM_wgt_2 = calc_yod(MMM_wgt, 2., 1) + else + YOD_MMM_wgt = 0.0 + YOD_MMM_wgt_2 = 0.0 + end if + + ; ========================================================================= + ; plotting section + ; ========================================================================= + + ; -------------------------------------------------------------------- + ; timeseries of models (ensemble means), MMM (wgt/uwt), stdd (wgt/uwt) + ; -------------------------------------------------------------------- + outfile_ts = "timeseries_" + which_rcps(ircp) + wks_ts = get_wks(0, DIAG_SCRIPT, outfile_ts) + res_ts = True + res_ts@gsnDraw = False + res_ts@gsnFrame = False + res_ts@gsnMaximize = True + res_ts@vpHeightF = 0.4 + res_ts@vpWidthF = 0.8 + res_ts@xyDashPattern = 1 + res_ts@trXMinF = min(SSIE_all&year) + res_ts@trXMaxF = max(SSIE_all&year) + res_std_uwt = True + copy_VarMeta(res_ts, res_std_uwt) + res_std_wgt = True + copy_VarMeta(res_ts, res_std_wgt) + res_mmm = True + copy_VarMeta(res_ts, res_mmm) + res_ts@tiMainString = which_rcps(ircp) + plot_ts = gsn_csm_xy(wks_ts, SSIE_all&year, SSIE_all, res_ts) + res_std_uwt@gsnXYFillColors = (/0., 0., 0., .07/) ; transparent gray + res_std_uwt@xyLineColor = "red" + res_std_uwt@xyDashPattern = 0 + res_std_uwt@xyLineThicknessF = 2. + plot_std_uwt = gsn_csm_xy(wks_ts, SSIE_all&year, (/MMM_uwt + STD_uwt, \ + MMM_uwt - STD_uwt/), res_std_uwt) + overlay(plot_ts, plot_std_uwt) + if (wgt_flag) then + res_std_wgt@gsnXYFillColors = (/0., 0., 0., .07/) ; transparent gray + res_std_wgt@xyLineColor = "blue" + res_std_wgt@xyDashPattern = 0 + res_std_wgt@xyLineThicknessF = 2. + plot_std_wgt = gsn_csm_xy(wks_ts, SSIE_all&year, (/MMM_wgt + STD_wgt, \ + MMM_wgt - STD_wgt/), res_std_wgt) + overlay(plot_ts, plot_std_wgt) + end if + res_mmm@xyLineColors = (/"red", "blue"/) + res_mmm@xyLineThicknessF = 4. + res_mmm@xyDashPattern = 0 + if (wgt_flag) then + plot_mmm = gsn_csm_xy(wks_ts, SSIE_all&year, \ + (/MMM_uwt, MMM_wgt/), res_mmm) + else + plot_mmm = gsn_csm_xy(wks_ts, SSIE_all&year, MMM_uwt, res_mmm) + end if + overlay(plot_ts, plot_mmm) + res_yod = True + res_yod@gsLineThicknessF = 3. + res_yod@gsLineColor = "red" + res_yod@gsLineDashPattern = 0 + yod_line_uwt = gsn_add_polyline(wks_ts, plot_ts, (/YOD_MMM_uwt, \ + YOD_MMM_uwt/), (/0., 12./), res_yod) + if (wgt_flag) then + res_yod@gsLineColor = "blue" + res_yod@gsLineDashPattern = 0 + yod_line_wgt = gsn_add_polyline(wks_ts, plot_ts, (/YOD_MMM_wgt, \ + YOD_MMM_wgt/), (/0., 12./), res_yod) + end if + res_yod@xyLineThicknessF = 0.1 + res_yod@gsLineColor = "black" ; 1 million km2 reference line + res_yod@gsLineDashPattern = 0 + yod_ref_line = gsn_add_polyline(wks_ts, plot_ts, (/min(SSIE_all&year), \ + max(SSIE_all&year)/), (/1., 1./), res_yod) + draw(plot_ts) + frame(wks_ts) + + ; store + YOD(ircp, 0, 0) = avg(YOD_tmp) + YOD(ircp, 0, 1) = stddev(YOD_tmp) + if (wgt_flag) then + YOD(ircp, 1, 0) = dim_avg_wgt(YOD_tmp, weights, 1) + YOD(ircp, 1, 1) = dim_stddev_wgt_Wrap(YOD_tmp, weights, 1) + end if + + ; ========================================================================= + ; Provenance + ; ========================================================================= + + SSIE_all@var = "SSIE" + SSIE_all@diag_script = DIAG_SCRIPT + outf = config_user_info@work_dir + outfile_ts + ".nc" + + MMM_uwt@var = "SSIE_MMM_uwt" + MMM_uwt@diag_script = DIAG_SCRIPT + STD_uwt@var = "SSIE_MMM_STD_uwt" + STD_uwt@diag_script = DIAG_SCRIPT + + nc_ext = ncdf_write(SSIE_all, outf) + outf@existing = "append" + nc_ext = ncdf_write(MMM_uwt, outf) + nc_ext = ncdf_write(STD_uwt, outf) + + if (wgt_flag) then + MMM_wgt@var = "SSIE_MMM_wgt" + MMM_wgt@diag_script = DIAG_SCRIPT + STD_wgt@var = "SSIE_MMM_STD_wgt" + STD_wgt@diag_script = DIAG_SCRIPT + + nc_ext = ncdf_write(MMM_wgt, outf) + nc_ext = ncdf_write(STD_wgt, outf) + end if + + caption = "Time series of " + timestring + " " + diag_script_info@region \ + + " sea ice extent for individual models and multi-model mean," \ + + " and multi-model standard deviation of year of dissaperance" \ + + " for scenario " + which_rcps(ircp) + "." + + log_provenance(nc_ext, wks_ts@fullname, caption, \ + "other", domain, "times", \ + (/"senftleben_daniel", "lauer_axel"/), \ + "", infiles(rcp_ind)) + + ; clean up + delete([/rcp_models, SSIE_all, YOD_tmp, MMM_uwt, MMM_wgt, STD_uwt, \ + STD_wgt, YOD_MMM_uwt, YOD_MMM_wgt, res_yod, outf, nc_ext, wks_ts/]) + end do ; ircp + + ; --- + ; YOD + ; --- + + outfile = "YOD" + wks = get_wks(0, DIAG_SCRIPT, outfile) + if (wgt_flag) then + factor = 2 + else + factor = 1 + end if + ds = dimsizes(which_rcps) * factor + if (ds .gt. 1) then + x = fspan(1, ds, dimsizes(which_rcps) * factor) + else + x = 1 + end if + if (factor .gt. 1) then + x(::2) = x(::2) + 0.5 + end if + y = new((/dimsizes(YOD&rcp) * factor, 5/), float) + XBLabels = new((/dimsizes(YOD&rcp) * factor/), string) + do ircp = 0, dimsizes(YOD&rcp) - 1 + ; unweighted + y(ircp * factor, 0) = YOD(ircp, 0, 0) - YOD(ircp, 0, 1) + y(ircp * factor, 1:3) = YOD(ircp, 0, 0) + y(ircp * factor, 4) = YOD(ircp, 0, 0) + YOD(ircp, 0, 1) + XBLabels(ircp * factor) = YOD&rcp(ircp) + "_uwt" + if (wgt_flag) then + ; weighted + y(ircp * factor + 1, 0) = YOD(ircp, 1, 0) - YOD(ircp, 1, 1) + y(ircp * factor + 1, 1:3) = YOD(ircp, 1, 0) + y(ircp * factor + 1, 4) = YOD(ircp, 1, 0) + YOD(ircp, 1, 1) + XBLabels(ircp * factor + 1) = YOD&rcp(ircp) + "_wgt" + end if + end do + res = True + res@tmXBLabels = XBLabels + res@tmXBLabelAngleF = 90 + res@tiYAxisString = "Year of Disappearance" + plot = boxplot(wks, x, y, False, res, False) + draw(plot) + frame(wks) + + ; =========================================================================== + ; Provenance + ; =========================================================================== + + YOD@var = "yod" + YOD@diag_script = DIAG_SCRIPT + + outf = config_user_info@work_dir + outfile + ".nc" + + nc_ext2 = ncdf_write(YOD, outf) + + caption = "Year of dissaperance of " + timestring + " " \ + + diag_script_info@region + " sea ice for scenarios " \ + + tostring(which_rcps) + "." + + log_provenance(nc_ext2, wks@fullname, caption, \ + "other", domain, "bar", \ + (/"senftleben_daniel", "lauer_axel"/), \ + "", infiles) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/seaice_drift/seaice_drift.py b/esmvaltool/diag_scripts/seaice_drift/seaice_drift.py new file mode 100644 index 0000000000..6791254109 --- /dev/null +++ b/esmvaltool/diag_scripts/seaice_drift/seaice_drift.py @@ -0,0 +1,621 @@ +"""Sea ice drift diagnostic.""" +import csv +import logging +import math +import os +import warnings + +import iris +import iris.analysis +import iris.analysis.cartography +import iris.coords +import iris.cube +import numpy as np +import shapely +from iris.aux_factory import AuxCoordFactory +from iris.util import broadcast_to_shape +from matplotlib import pyplot as plt +from pyproj import Transformer +from scipy import stats +from shapely.geometry import Point, Polygon + +import esmvaltool.diag_scripts.shared +import esmvaltool.diag_scripts.shared.names as n +from esmvaltool.diag_scripts.shared._base import ProvenanceLogger + +logger = logging.getLogger(os.path.basename(__file__)) + +MONTHS_PER_YEAR = 12 + +warnings.filterwarnings("ignore", category=DeprecationWarning) + + +class SeaIceDrift(): + """Class to compute SeaIce Drift metric.""" + def __init__(self, cfg): + self.cfg = cfg + self.datasets = esmvaltool.diag_scripts.shared.Datasets(self.cfg) + self.variables = esmvaltool.diag_scripts.shared.Variables(self.cfg) + + self.references = {} + self.siconc = {} + self.sivol = {} + self.sispeed = {} + self.region_mask = {} + + self.slope_drift_sic = {} + self.intercept_drift_siconc = {} + self.slope_ratio_drift_siconc = {} + self.error_drift_siconc = {} + + self.slope_drift_sivol = {} + self.intercept_drift_sivol = {} + self.slope_ratio_drift_sivol = {} + self.error_drift_sivol = {} + + def compute(self): + """Compute metric.""" + logger.info('Loading sea ice concentration') + siconc_original = {} + siconc_files = self.datasets.get_path_list( + standard_name='sea_ice_area_fraction') + for filename in siconc_files: + reference_dataset = self._get_reference_dataset( + 'sic', self.datasets.get_info('reference_dataset', filename)) + alias = self._get_alias(filename, reference_dataset) + siconc = iris.load_cube(filename, 'sea_ice_area_fraction') + siconc.convert_units('1.0') + siconc_original[alias] = siconc + + self.siconc[alias] = self._compute_mean( + siconc, self._get_mask(siconc, filename)) + + logger.info('Loading sea ice thickness') + sithick_files = self.datasets.get_path_list( + standard_name='sea_ice_thickness') + for filename in sithick_files: + reference_dataset = self._get_reference_dataset( + 'sithick', self.datasets.get_info('reference_dataset', + filename)) + alias = self._get_alias(filename, reference_dataset) + sithick = iris.load_cube(filename, 'sea_ice_thickness') + self.sivol[alias] = self._compute_mean( + sithick, self._get_mask(sithick, filename)) + + logger.info('Load sea ice velocities') + sispeed_files = self.datasets.get_path_list( + standard_name='sea_ice_speed') + obs_file = self.cfg.get('sispeed_obs', '') + for filename in sispeed_files: + reference_dataset = self._get_reference_dataset( + 'sispeed', self.datasets.get_info('reference_dataset', + filename)) + alias = self._get_alias(filename, reference_dataset) + if obs_file and alias == 'reference': + obs_data = np.load(obs_file) + obs_data = obs_data.reshape((12, 35), order='F') + logger.debug(obs_data) + sispeed = iris.cube.Cube(obs_data, + 'sea_ice_speed', + units='km day-1') + sispeed.add_dim_coord( + iris.coords.DimCoord(range(1, 13), + var_name='month_number'), 0) + sispeed.add_dim_coord( + iris.coords.DimCoord(range(1979, 1979 + 35), + var_name='year'), 1) + sispeed.extract( + iris.Constraint(year=lambda c: 1979 <= c <= 2005)) + sispeed = sispeed.collapsed('year', iris.analysis.MEAN) + logger.debug(sispeed) + self.sispeed[alias] = sispeed + else: + sispeed = iris.load_cube(filename, 'sea_ice_speed') + sispeed.convert_units('km day-1') + self.sispeed[alias] = self._compute_mean( + sispeed, self._get_mask(sispeed, filename)) + + self._compute_metrics() + self._results() + self._save() + self._plot_results() + + def _get_reference_dataset(self, var, reference_dataset): + for filename in self.datasets: + dataset = self.datasets.get_info(n.DATASET, filename) + if dataset == reference_dataset: + self.references[var] = self.datasets.get_info( + n.ALIAS, filename) + return filename + raise ValueError(f'Reference dataset {reference_dataset} not found') + + def _get_mask(self, data, filename): + if 'latitude_treshold' in self.cfg: + lat_threshold = self.cfg['latitude_treshold'] + mask = data.coord('latitude').points > lat_threshold + mask = mask.astype(np.int8) + else: + polygon = self.cfg['polygon'] + factory = InsidePolygonFactory( + polygon, + data.coord('latitude'), + data.coord('longitude'), + ) + data.add_aux_factory(factory) + mask = data.coord('Inside polygon').points + mask = mask.astype(np.int8) + coord = data.coord('Inside polygon') + dim_coords = data.coord_dims(coord) + data.remove_aux_factory(factory) + data.add_aux_coord(coord, dim_coords) + data.remove_coord('Inside polygon') + + dataset_info = self.datasets.get_dataset_info(filename) + var_info = esmvaltool.diag_scripts.shared.group_metadata( + self.cfg['input_data'].values(), 'alias')[dataset_info[n.ALIAS]] + var_info = esmvaltool.diag_scripts.shared.group_metadata( + var_info, 'short_name') + if 'areacello' in var_info: + area_file = var_info['areacello'][0]['filename'] + area_cello = iris.load_cube(area_file) + else: + area_cello = iris.analysis.cartography.area_weights(data) + + return area_cello.data * mask + + def _compute_metrics(self): + for dataset in self.siconc: + logger.info('Compute diagnostics for %s', dataset) + logger.info('Metrics drift-concentration') + logger.debug('Siconc: %s', self.siconc[dataset].data) + logger.debug('Sispeed: %s', self.sispeed[dataset].data) + logger.info('Slope ratio (no unit)') + slope, intercept, _, _ = self._get_slope_ratio( + self.siconc[dataset], self.sispeed[dataset]) + self.slope_drift_sic[dataset] = slope + self.intercept_drift_siconc[dataset] = intercept + + logger.info('Metrics drift-thickness') + logger.debug('sivol: %s', self.sivol[dataset].data) + logger.debug('Sispeed: %s', self.sispeed[dataset].data) + logger.info('Slope ratio (no unit)') + slope, intercept, _, _ = self._get_slope_ratio( + self.sivol[dataset], self.sispeed[dataset]) + self.slope_drift_sivol[dataset] = slope + self.intercept_drift_sivol[dataset] = intercept + + for dataset in self.siconc: + if dataset == 'reference': + continue + logger.info('Compute metrics for %s', dataset) + logger.info('Compute mean errors (%)') + self.error_drift_siconc[dataset] = self._compute_error( + self.siconc[dataset], self.siconc['reference'], + self.sispeed[dataset], self.siconc['reference']) + self.error_drift_sivol[dataset] = self._compute_error( + self.sivol[dataset], self.sivol['reference'], + self.sispeed[dataset], self.siconc['reference']) + + logger.info('Compute relative slope ratios ') + self.slope_ratio_drift_siconc[dataset] = \ + self.slope_drift_sic[dataset] / \ + self.slope_drift_sic['reference'] + self.slope_ratio_drift_sivol[dataset] = \ + self.slope_drift_sivol[dataset] / \ + self.slope_drift_sivol['reference'] + + def _get_alias(self, filename, reference_dataset): + filename = self._get_alias_name(filename) + reference_dataset = self._get_alias_name(reference_dataset) + if filename == reference_dataset: + return 'reference' + return filename + + def _get_alias_name(self, filename): + info = self.datasets.get_dataset_info(filename) + return info[n.ALIAS] + + @staticmethod + def _compute_mean(data, weights): + mapping = set( + data.coord_dims('latitude') + data.coord_dims('longitude')) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + return data.collapsed( + ('latitude', 'longitude'), + iris.analysis.MEAN, + weights=broadcast_to_shape(weights, data.shape, mapping)) + + @staticmethod + def _compute_error(var, var_obs, drift, drift_obs): + var = var.data + var_obs = var_obs.data + drift = drift.data + drift_obs = drift_obs.data + + return 100. * np.nanmean( + np.sqrt( + SeaIceDrift._var_error(var, var_obs) + + SeaIceDrift._var_error(drift, drift_obs))) + + @staticmethod + def _var_error(var, obs): + var_error = np.absolute(var - obs) + var_mean = np.nanmean(obs) + var_error_normal = var_error / var_mean + return var_error_normal**2 + + @staticmethod + def _get_slope_ratio(siconc, drift): + slope, intercept = np.polyfit(siconc.data, drift.data, 1) + std_dev, sig = SeaIceDrift._sd_slope(slope, intercept, siconc.data, + drift.data) + return slope, intercept, std_dev, sig + + @staticmethod + def _sd_slope(slope, intercept, sivar, drift): + # Parameters + alpha = 0.05 # significance level + nfreedom = MONTHS_PER_YEAR - 2 # number of degrees of freedom + t_crit = stats.t.ppf(1 - alpha / 2, nfreedom) # critical Student's t + + # Compute standard deviation of slope + lreg = slope * sivar + intercept # linear regression + s_yx = np.sum((drift - lreg)**2) / (MONTHS_PER_YEAR - 2) + ss_xx = np.sum((sivar - np.mean(sivar))**2) + sd_slope = np.sqrt(s_yx / ss_xx) # Standard deviation of slope + + # Significance + t_student = slope / sd_slope + sig_slope = 0 + if np.abs(t_student) > t_crit: + sig_slope = 1 + + return sd_slope, sig_slope + + def _results(self): + logger.info('Results') + for model in self.siconc: + self._print_results(model) + + def _print_results(self, model): + if model == 'reference': + return + logger.info('Dataset %s', model) + if 'latitude_treshold' in self.cfg: + logger.info('Metrics computed over domain north of %s', + self.cfg['latitude_treshold']) + else: + logger.info('Metrics computed inside %s region', + self.cfg.get('polygon_name', 'SCICEX')) + logger.info('Slope ratio Drift-Concentration = {0:.3}' + ''.format(self.slope_ratio_drift_siconc[model])) + logger.info('Mean error Drift-Concentration (%) = {0:.4}' + ''.format(self.error_drift_siconc[model])) + logger.info('Slope ratio Drift-Thickness = {0:.3}'.format( + self.slope_ratio_drift_sivol.get(model, math.nan))) + logger.info('Mean error Drift-Thickness (%) = {0:.4}' + ''.format(self.error_drift_sivol.get(model, math.nan))) + + def _save(self): + if not True: + return + logger.info('Save variables') + for dataset in self.siconc: + self._save_slope(dataset) + + def _save_slope(self, dataset): + base_path = os.path.join(self.cfg[n.WORK_DIR], dataset) + if not os.path.isdir(base_path): + os.makedirs(base_path) + + siconc_path = os.path.join(base_path, 'metric_drift_siconc.csv') + sivol_path = os.path.join(base_path, 'metric_drift_sivol.csv') + + with open(siconc_path, 'w') as csvfile: + csv_writer = csv.writer(csvfile) + csv_writer.writerow(('slope', 'intercept', 'slope_ratio', 'error')) + csv_writer.writerow( + (self.slope_drift_sic[dataset], + self.intercept_drift_siconc[dataset], + self.slope_ratio_drift_siconc.get(dataset, None), + self.error_drift_siconc.get(dataset, None))) + + sic_data, ancestors_sic = self._get_data_and_ancestors(dataset, 'sic') + _, ancestors_sispeed = self._get_data_and_ancestors(dataset, 'sispeed') + sithick_data, ancestors_sithick = self._get_data_and_ancestors( + dataset, 'sithick') + caption = ( + f"Drift - siconc metric between {sic_data[n.START_YEAR]} and " + f"{sic_data[n.END_YEAR]} according to {dataset}") + self._create_prov_record(siconc_path, caption, + ancestors_sic + ancestors_sispeed) + + with open(sivol_path, 'w') as csvfile: + csv_writer = csv.writer(csvfile) + csv_writer.writerow(('slope', 'intercept', 'slope_ratio', 'error')) + csv_writer.writerow( + (self.slope_drift_sivol[dataset], + self.intercept_drift_sivol[dataset], + self.slope_ratio_drift_sivol.get(dataset, None), + self.error_drift_sivol.get(dataset, None))) + caption = ( + f"Drift - sithick metric between {sithick_data[n.START_YEAR]} and " + f"{sithick_data[n.END_YEAR]} according to {dataset}") + self._create_prov_record(sivol_path, caption, + ancestors_sithick + ancestors_sispeed) + + def _plot_results(self): + logger.info('Plotting results') + for model in self.siconc: + if model == 'reference': + continue + logger.info('Results for %s', model) + self._plot_domain(model) + + def _plot_domain(self, dataset): + fig, axes = plt.subplots(1, 2, figsize=(18, 6)) + plt.suptitle('Seasonal cycle {0}'.format(dataset), fontsize=18) + self._plot_drift_siconc(axes[0], dataset) + self._plot_drift_sivol(axes[1], dataset) + base_path = os.path.join(self.cfg[n.PLOT_DIR], dataset) + if not os.path.isdir(base_path): + os.makedirs(base_path) + plot_path = os.path.join( + base_path, + 'drift-strength.{0}'.format(self.cfg[n.OUTPUT_FILE_TYPE])) + fig.savefig(plot_path) + + sic_data, ancestors_sic = self._get_data_and_ancestors(dataset, 'sic') + _, ancestors_sispeed = self._get_data_and_ancestors(dataset, 'sispeed') + _, ancestors_sithick = self._get_data_and_ancestors(dataset, 'sithick') + + _, ancestors_sic_ref = self._get_data_and_ancestors('reference', 'sic') + _, ancestors_sispeed_ref = self._get_data_and_ancestors( + 'reference', 'sispeed') + _, ancestors_sithick_ref = self._get_data_and_ancestors( + 'reference', 'sithick') + + caption = ("Drift - sithick and drift - siconc plot between " + f"{sic_data[n.START_YEAR]} and " + f"{sic_data[n.END_YEAR]} for {dataset} and reference") + self._create_prov_record( + plot_path, caption, + ancestors_sic + ancestors_sispeed + ancestors_sithick + + ancestors_sic_ref + ancestors_sispeed_ref + ancestors_sithick_ref) + + def _plot_drift_sivol(self, axes, dataset): + drift = self.sispeed[dataset].data + sivol = self.sivol[dataset].data + + slope_sivol = self.slope_drift_sivol[dataset] + intercept_sivol = self.intercept_drift_sivol[dataset] + + slope_sivol_obs = self.slope_drift_sivol['reference'] + intercept_sivol_obs = self.intercept_drift_sivol['reference'] + + # slope_ratio_sivol = self.slope_ratio_drift_sivol[dataset] + # error_sivol = self.error_drift_sivol[dataset] + + drift_obs = self.sispeed['reference'].data + sivol_obs = self.sivol['reference'].data + + axes.plot([sivol[-1], sivol[0]], [drift[-1], drift[0]], + 'r-', + linewidth=2) + axes.plot(sivol, drift, 'ro-', label='model', linewidth=2) + axes.plot(sivol, + slope_sivol * sivol + intercept_sivol, + 'r:', + linewidth=2) + + axes.plot([sivol_obs[-1], sivol_obs[0]], [drift_obs[-1], drift_obs[0]], + 'b-', + linewidth=2) + axes.plot( + sivol_obs, + drift_obs, + 'bo-', + label=r'reference', + # str(np.round(slope_ratio_sivol, 1)) + + # # r'; $\epsilon_h$=' + + # # str(np.round(error_sivol, 1)) + + # r')', + linewidth=2) + axes.plot(sivol_obs, + slope_sivol_obs * sivol_obs + intercept_sivol_obs, + 'b:', + linewidth=2) + + axes.set_xlabel('Sea ice thickness (m)', fontsize=18) + axes.set_ylabel('Sea ice drift speed (km d$^{-1}$)', fontsize=18) + axes.tick_params(axis='both', labelsize=14) + high_sivol, low_sivol = self._get_plot_limits(sivol, sivol_obs, 0.2) + high_drift, low_drift = self._get_plot_limits(drift, drift_obs) + axes.axis([low_sivol, high_sivol, low_drift, high_drift]) + axes.legend(loc='lower left', shadow=True, frameon=False, fontsize=12) + self._annotate_points(axes, sivol, drift) + self._annotate_points(axes, sivol_obs, drift_obs) + axes.grid() + + def _plot_drift_siconc(self, axes, dataset): + drift = self.sispeed[dataset].data + siconc = self.siconc[dataset].data + + slope_siconc = self.slope_drift_sic[dataset] + # slope_ratio_siconc = self.slope_ratio_drift_sivol[dataset] + intercept_siconc = self.intercept_drift_siconc[dataset] + # error_siconc = self.error_drift_siconc[dataset] + + slope_siconc_obs = self.slope_drift_sic['reference'] + intercept_siconc_obs = self.intercept_drift_siconc['reference'] + + drift_obs = self.sispeed['reference'].data + siconc_obs = self.siconc['reference'].data + + axes.plot(siconc, drift, 'ro', label='model') + axes.plot(siconc, + slope_siconc * siconc + intercept_siconc, + 'r:', + linewidth=2) + + axes.plot(siconc_obs, drift_obs, 'bo', label='reference') + axes.plot(siconc_obs, + slope_siconc_obs * siconc_obs + intercept_siconc_obs, + 'b:', + linewidth=2) + + axes.set_xlabel('Sea ice concentration', fontsize=18) + axes.set_ylabel('Sea ice drift speed (km d$^{-1}$)', fontsize=18) + axes.tick_params(axis='both', labelsize=14) + high_drift, low_drift = self._get_plot_limits(drift, drift_obs) + _, low_siconc = SeaIceDrift._get_plot_limits(siconc, siconc_obs, 0.1) + axes.axis([low_siconc, 1.01, low_drift, high_drift]) + axes.legend(loc='lower left', shadow=True, frameon=False, fontsize=12) + SeaIceDrift._annotate_points(axes, siconc, drift) + SeaIceDrift._annotate_points(axes, siconc_obs, drift_obs) + axes.grid() + + @staticmethod + def _annotate_points(axes, xvalues, yvalues): + for i, j, k in zip(xvalues, yvalues, range(1, 12 + 1)): + axes.annotate(k, + xy=(i, j), + xytext=(10, 5), + ha='right', + textcoords='offset points') + + @staticmethod + def _get_plot_limits(sivol, sivol_obs, step=0.55): + low = min(min(sivol), min(sivol_obs)) - 0.5 * step + low = step * math.floor(low / step) + low = max(low, 0) + high = max(max(sivol), max(sivol_obs)) + 0.5 * step + high = step * math.ceil(high / step) + return high, low + + def _create_prov_record(self, filepath, caption, ancestors): + record = { + 'caption': caption, + 'domains': ['nhpolar'], + 'autors': ['docquier_david'], + 'references': ['docquier2017cryo'], + 'ancestors': ancestors + } + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(filepath, record) + + def _get_data_and_ancestors(self, dataset, var): + if dataset == 'reference': + dataset = self.references[var] + + data = esmvaltool.diag_scripts.shared.group_metadata( + self.cfg['input_data'].values(), 'alias')[dataset] + data = esmvaltool.diag_scripts.shared.group_metadata( + data, 'short_name') + return (data[var][0], + [data[var][0]['filename'], data['areacello'][0]['filename']]) + + +class InsidePolygonFactory(AuxCoordFactory): + """Defines a coordinate.""" + def __init__(self, polygon=None, lat=None, lon=None): + """ + Args: + * polygon: List + List of (lon, lat) tuples defining the polygon + * lat: Coord + The coordinate providing the latitudes. + * lon: Coord + The coordinate providing the longitudes. + """ + super(InsidePolygonFactory, self).__init__() + self.lat = lat + self.lon = lon + self.standard_name = None + self.long_name = 'Inside polygon' + self.var_name = 'inpoly' + self.units = '1.0' + self.attributes = {} + + polygon.append(polygon[0]) + self.transformer = Transformer.from_crs("WGS84", + "North_Pole_Stereographic", + always_xy=True) + + transformed = [] + for lon_val, lat_val in polygon: + transformed.append(self.transformer.transform(lon_val, lat_val)) + self.polygon = Polygon(transformed) + + @property + def dependencies(self): + """Return a dict mapping from constructor names to coordinates.""" + return {'lat': self.lat, 'lon': self.lon} + + def _derive(self, lat, lon): + def in_polygon(lat, lon): + """Check if point is inside polygon.""" + if lon > 180: + lon -= 360 + point = self.transformer.transform(lon, lat) + try: + contained = self.polygon.contains(Point(point[0], point[1])) + except shapely.errors.TopologicalError: + return np.nan + if contained: + return 1. + return np.nan + + vectorized = np.vectorize(in_polygon) + return vectorized(lat, lon) + + def make_coord(self, coord_dims_func): + """Returns a new :class:`iris.coords.AuxCoord` + + Args: + * coord_dims_func: + A callable which can return the list of dimensions relevant + to a given coordinate. + See :meth:`iris.cube.Cube.coord_dims()`. + """ + # Which dimensions are relevant? + derived_dims = self.derived_dims(coord_dims_func) + dependency_dims = self._dependency_dims(coord_dims_func) + + # Build the points array. + nd_points_by_key = self._remap(dependency_dims, derived_dims) + points = self._derive( + nd_points_by_key['lat'], + nd_points_by_key['lon'], + ) + + in_polygon = iris.coords.AuxCoord(points, + standard_name=self.standard_name, + long_name=self.long_name, + var_name=self.var_name, + units=self.units, + bounds=None, + attributes=self.attributes, + coord_system=self.coord_system) + return in_polygon + + def update(self, old_coord, new_coord=None): + """Notify factory about the removal/replacement of a coordinate. + + Args: + * old_coord: + The coordinate to be removed/replaced. + * new_coord: + If None, any dependency using old_coord is removed, otherwise + any dependency using old_coord is updated to use new_coord. + """ + if self.lat is old_coord: + self.lat = new_coord + elif self.lon is old_coord: + self.lon = new_coord + + +if __name__ == '__main__': + with esmvaltool.diag_scripts.shared.run_diagnostic() as config: + SeaIceDrift(config).compute() diff --git a/esmvaltool/diag_scripts/seaice_feedback/negative_seaice_feedback.py b/esmvaltool/diag_scripts/seaice_feedback/negative_seaice_feedback.py new file mode 100644 index 0000000000..515dbf46b4 --- /dev/null +++ b/esmvaltool/diag_scripts/seaice_feedback/negative_seaice_feedback.py @@ -0,0 +1,451 @@ +""" +Diagnostic to evaluate the negative ice growth-ice thickness feedback + +The codes presented here is derived from +TECLIM's GitHub code developed by F. Massonnet: +http://www.climate.be:3000/TECLIM/ClimateData.git +branch develop-fmasson +""" + +import os +import logging +import math +import warnings +import numpy as np + +import scipy.stats +import iris +import matplotlib.pyplot as plt + +import esmvaltool.diag_scripts.shared +from esmvaltool.diag_scripts.shared import group_metadata +import esmvaltool.diag_scripts.shared.names as n +from esmvaltool.diag_scripts.shared._base import ProvenanceLogger + +logger = logging.getLogger(os.path.basename(__file__)) + + +class NegativeSeaIceFeedback(object): + """ + Diagnostic to evaluate the negative ice growth-ice thickness feedback + + Parameters + ---------- + conf : dict + Diagnostic execution info + + """ + + def __init__(self, conf): + self.cfg = conf + self.datasets = esmvaltool.diag_scripts.shared.Datasets(self.cfg) + self.variables = esmvaltool.diag_scripts.shared.Variables(self.cfg) + + def compute(self): + """ + Compute diagnostic + + """ + negative_feedback = list() + p_value = list() + datasets = list() + grouped_input_data = group_metadata( + self.cfg['input_data'].values(), 'alias', sort='alias') + for alias, dataset in grouped_input_data.items(): + try: + feedback, p_val = self._compute_dataset(alias, dataset) + except Exception as ex: + logger.error('Failed to compute for %s', alias) + logger.exception(ex) + else: + negative_feedback.append(feedback) + p_value.append(p_val) + datasets.append(alias) + + self._plot_comparison(negative_feedback, datasets) + self._plot_comparison(p_value, datasets, p_values=True) + + def _compute_dataset(self, alias, dataset): + var_info = group_metadata(dataset, 'short_name') + logger.info('Computing %s', alias) + area_cello = iris.load_cube( + var_info['areacello'][0]['filename'] + ) + cellarea = area_cello.data + sit = iris.load_cube(var_info['sit'][0]['filename']) + mask = np.asarray( + sit.coord('latitude').points > 80.0, + dtype=np.int8 + ) + try: + mask = np.broadcast_to(mask, cellarea.shape) + except ValueError: + try: + mask = np.broadcast_to(np.expand_dims(mask, -1), + cellarea.shape) + except ValueError: + mask = np.broadcast_to(np.expand_dims(mask, 0), + cellarea.shape) + volume = self.compute_volume(sit, cellarea, mask=mask) + del cellarea, sit + + neg_feedback, stats, _ = self.negative_seaice_feedback( + var_info['sit'][0], volume, period=12, order=2 + ) + del volume + logger.info("Negative feedback: %10.4f", neg_feedback) + logger.info("P-Value: %10.4f", stats[1]) + return (neg_feedback, stats[1]) + + @staticmethod + def compute_volume(avg_thick, cellarea, mask=1): + """ + Compute sea ice volume + + Parameters + ---------- + avg_thick : iris.Cube + Sea ice or snow volume per unit cell area, in meters + cellarea : [type] + Grid cell area (sq. meters) + mask : int, optional + mask (1 on ocean, 0 on continent) (the default is 1) + + Raises + ------ + ValueError + avg_thick has not 2 nor 3 dimensions or mask not between 0 and 1 + + Returns + ------- + numpy.array + Sea ice or snow volume in the region defined by the mask + """ + if np.max(mask) != 1.0 or np.min(mask) < 0.0: + raise ValueError("Mask not between 0 and 1") + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + max_thick = avg_thick.collapsed( + avg_thick.coords(), iris.analysis.MAX + ) + if float(max_thick.data) > 20.0: + logger.warning("Large sea ice thickness:" + "Max = %f", + max_thick.data) + + if avg_thick.coords('time'): + vol = [] + for thick_slice in avg_thick.slices_over('time'): + vol.append( + np.sum( + thick_slice.data * cellarea.data * mask.data + ) / 1e12 + ) + vol = np.asarray(vol) + elif len(avg_thick.shape) == 2: + vol = np.sum(avg_thick * cellarea * mask) / 1e12 + else: + raise ValueError("avgthickness has not 2 nor 3 dimensions") + return vol + + @staticmethod + def detrend(data, order=1, period=None): + """ + Detrend signal + + Parameters + ---------- + data : numpy.array + Data to detrend. Assumed to be sampled at evenly spaced times + order : int, optional + rder of the polynomial for detrending (the default is 1) + period : int, optional + possible existing periodicity of the signal, coming e.g. + from external forcing, expressed in units time steps. + That is, data[i] and data[i + period] correspond + to two realizations of the process at times where the + forcing might be similar. Common examples include + the seasonal cycle forcing, or the diurnal forcing. + + If "period" is not None, the detrending is performed + separately for each time step (e.g., all 1st of January, + all 2nd of January, ..., all 31st of December in case + of annual cycle. + + If "period" is None, the detrending is performed on + the given time series + The default is None + + Raises + ------ + ValueError + [description] + + Returns + ------- + numpy.array + the signal detrended using a least-square polynomial + regression of order "order" + """ + + if len(data.shape) != 1: + raise ValueError("Non-conform input data") + + # Remove possible nans from the data. All the regression (ie polyfit) + # parameters will be estimated based on the no-nan data but the + # residuals will be computed from the original data in order + # to keep the same size and to restitute NaNs where they appeared + + data_nonan = data[~np.isnan(data)] + + # If the signal has no periodicity, we just make a linear regression + if period is None: + time_nonan = np.arange(len(data_nonan)) + time = np.arange(len(data)) + polynom = np.polyfit(time_nonan, data_nonan, order) + residuals = data - np.sum([polynom[i] * time ** (order - i) + for i in range(order + 1)], axis=0) + + # If the signal contains a periodical component, we do the regression + # time step per time step + else: + residuals = np.empty([n]) + + # For each time step of the period, detrend + # Note that another common option is to first remove a seasonal + # cycle and then detrend the anomalies. However this assumes that + # a cycle can be estimated, which in presence of a trend is tricky + # because the trend component interferes with the mean. I have + # tried that and it gives ugly step-wise anomalies. Detrending day + # per day seems the most natural way to do, at least as long as we + # assume that the raw signal at some time is the result of a + # seasonal cycle depending on the position of the time step in the + # period, plus a common trend, plus some noise. + for i in np.arange(period): + raw = data[np.arange(i, n, period)] + raw_nonan = raw[~np.isnan(raw)] + time = np.arange(len(raw)) + time_nonan = np.arange(len(raw_nonan)) + polynom = np.polyfit(time_nonan, raw_nonan, order) + residuals[np.arange(i, n, period)] = \ + raw - np.sum([polynom[i] * time ** (order - i) + for i in range(order + 1)], axis=0) + return residuals + + def negative_seaice_feedback(self, dataset_info, volume, period, order=1): + """ + Function to estimate the negative ice-thickness ice growth feedback + and its significance. + + Parameters + ---------- + volume : iris.Cube + Time series of sea ice volume + period : int + period of the signal (period expressed in time steps: + 12 for monthly, 365 for daily...) + order : int, optional + order of the polynomial detrending (>=0) (the default is 1) + + Raises + ------ + ValueError + If volume is not 1D or timeseries length is not a multiple of + the period + + Returns + ------- + nv: float + Feedback parameter expressed as the regression + between dV on V_min + + corr: tuple(float, float, float) + Correlation between those two, the p-value under the null + hypothesis of no correlation between dV and V_min and standard + deviation + + volume: tuple(float, float( + [V_min, dV]: detrended time series of annual minimum of sea ice + volume, detrended series of wintertime volume production + """ + + if len(volume.shape) != 1: + raise ValueError("Volume is not 1-D") + + if volume.size % period != 0: + raise ValueError( + "Length of volume series is not multiple of period" + ) + + # 1. Locate the minima for each year + imin = [t + np.nanargmin(volume.data[t:t + period]) + for t in range(0, volume.size, period)] + + # 2. Locate the maxima for each year + imax = [t + np.nanargmax(volume.data[t:t + period]) + for t in np.arange(0, volume.size, period)] + + # 3. Detrend series. A one-year shift is introduced to make sure we + # compute volume production *after* the summer minimum + vol_min = self.detrend(volume[imin[:-1]], order=order) + dvol = self.detrend(volume[imax[1:]] - volume[imin[:-1]], order=order) + + # 4. Compute diagnostics + # If all Vmins are zero or all dVs are zero, return Nan + # (pathological case) + if np.max(vol_min) == 0.0 or np.max(dvol == 0.0): + fit = np.nan + fit_complete = [np.nan, np.nan] + corr = np.nan + pval = np.nan + std = np.nan + else: + corr = np.corrcoef(vol_min, dvol)[0, 1] + # The t-statistic. + tstat = corr / np.sqrt((1 - corr ** 2) / (len(vol_min) - 2)) + # Under the null hypothesis of no correlation, + # tstat follows a student's law with N - 2 dof. + pval = 1.0 - scipy.stats.t.cdf(np.abs(tstat), len(vol_min) - 2) + + if pval > 0.05: + logger.warning( + "Check the scatterplot of dV versus V_min, it is most " + "likely suspicious, and the feedback factor likely " + "meaningless: p-value: %f", pval + ) + + try: + fit_complete, cov = np.polyfit(vol_min, dvol, 1, cov=True) + logger.info(fit_complete) + fit = fit_complete[0] # Fit parameter + std = np.sqrt(cov[0, 0]) # Standard deviation on it + except ValueError: + logger.error("(negative_seaice_feedback) PROBLEM," + "series badly conditioned: " + "Input volume: %f Vmin: %f dv: %f", + volume, vol_min, dvol) + raise + + self._plot_ife(dataset_info, vol_min, dvol, fit_complete) + + return [fit, [corr, pval, std], [vol_min, dvol]] + + def _plot_ife(self, dataset_info, vol_min, dvol, fit_complete): + path = os.path.join( + self.cfg[n.PLOT_DIR], + f'ife_{dataset_info[n.ALIAS]}.{self.cfg[n.OUTPUT_FILE_TYPE]}' + ) + plot_options = self.cfg.get('plot', {}) + fig = plt.figure() + plt.scatter( + vol_min, + dvol, + plot_options.get('point_size', 8), + color=plot_options.get('point_color', 'black'), + ) + minx, maxx = plt.xlim() + xvals = np.linspace(minx, maxx) + plt.plot(xvals, xvals * fit_complete[0] + fit_complete[1]) + axes = plt.gca() + axes.set_title( + f'Evaluation of the IFE \n{dataset_info[n.ALIAS]} ' + f'({dataset_info[n.START_YEAR]}-{dataset_info[n.END_YEAR]})' + ) + axes.set_ylabel('Wintertime volume range \n(anomalies) [10³ km³]') + axes.set_xlabel('Volume at minimum\n(anomalies) [10³ km³]') + plt.grid(True, 'both', 'both') + plt.tight_layout() + fig.savefig(path) + plt.close(fig) + self._create_prov_record( + path, f'Evaluation of IFE for {dataset_info[n.ALIAS]}', + [info['filename'] for info in group_metadata( + self.cfg['input_data'].values(), n.ALIAS + )[dataset_info[n.ALIAS]] + ] + ) + + def _plot_comparison(self, data, datasets, p_values=False): + if p_values: + filename = 'feedback_p_values' + else: + filename = 'feedback' + + path = os.path.join( + self.cfg[n.PLOT_DIR], + f'{filename}.{self.cfg[n.OUTPUT_FILE_TYPE]}' + ) + + plot_options = self.cfg.get('plot', {}) + fig = plt.figure() + index = np.arange(len(data)) + plt.scatter( + index, + data, + plot_options.get('point_size', 8), + color=plot_options.get('point_color', 'black'), + ) + if p_values: + plt.hlines(0.05, -1, index[-1] + 1, colors='red') + axes = plt.gca() + logger.debug(data) + max_limit = math.ceil(max(data)) + if max_limit < 0: + max_limit = 0 + min_limit = math.floor(min(data)) + separation = max_limit - min_limit + + if plot_options.get('show_values', False): + def _get_y_position(value): + if value > min_limit + separation * 0.75: + return value - separation * 0.05 + return value + separation * 0.10 + + for i, value in enumerate(data): + axes.annotate( + f'{value:.2f}', + xy=(index[i], value), + xycoords='data', + textcoords='data', + xytext=(index[i], _get_y_position(value)), + rotation=90, + ) + + # axes and labels + axes.set_ylim(min_limit, max_limit) + if p_values: + axes.set_ylabel('P-value [log]') + plt.ylim(0, max(0.25, max(data))) + else: + axes.set_ylabel('IFE') + axes.set_title('IFE comparison') + _, xtick_names = plt.xticks(index, datasets) + plt.xlim(index[0] - 0.5, index[-1] + 0.5) + plt.setp(xtick_names, rotation=90, fontsize=10) + plt.grid(True, 'both', 'y') + plt.tight_layout() + fig.savefig(path) + plt.close(fig) + self._create_prov_record( + path, f'IFE {filename} comparison for all datasets', + group_metadata(self.cfg['input_data'].values(), n.ALIAS) + ) + + def _create_prov_record(self, filepath, caption, ancestors): + record = { + 'caption': caption, + 'domains': ['nhpolar'], + 'ancestors': ancestors + } + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(filepath, record) + + +def main(): + """Run diagnostic""" + with esmvaltool.diag_scripts.shared.run_diagnostic() as config: + NegativeSeaIceFeedback(config).compute() + + +if __name__ == '__main__': + main() diff --git a/esmvaltool/diag_scripts/shapeselect/diag_shapeselect.py b/esmvaltool/diag_scripts/shapeselect/diag_shapeselect.py index f648265e08..94ab3b14ec 100644 --- a/esmvaltool/diag_scripts/shapeselect/diag_shapeselect.py +++ b/esmvaltool/diag_scripts/shapeselect/diag_shapeselect.py @@ -11,8 +11,11 @@ from shapely.geometry import MultiPoint, shape from shapely.ops import nearest_points -from esmvaltool.diag_scripts.shared import (run_diagnostic, ProvenanceLogger, - get_diagnostic_filename) +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + run_diagnostic, +) logger = logging.getLogger(os.path.basename(__file__)) @@ -23,7 +26,7 @@ def get_provenance_record(cfg, basename, caption, extension, ancestor_files): 'caption': caption, 'statistics': ['other'], 'domains': ['global'], - 'authors': ['berg_pe'], + 'authors': ['berg_peter'], 'references': ['acknow_project'], 'ancestors': ancestor_files, } @@ -41,23 +44,29 @@ def main(cfg): attributes['standard_name'], attributes['dataset']) logger.debug("Loading %s", filename) cube = iris.load_cube(filename) + ncts, nclon, nclat = shapeselect(cfg, cube) name = os.path.splitext(os.path.basename(filename))[0] + '_polygon' if cfg['write_xlsx']: xname = name + '_table' writexls(cfg, filename, ncts, nclon, nclat) caption = 'Selected gridpoints within shapefile.' - get_provenance_record( - cfg, xname, caption, 'xlsx', ancestor_files=[filename]) - if cfg['write_netcdf']: - path = os.path.join( - cfg['work_dir'], - name + '.nc', - ) - write_netcdf(path, ncts, nclon, nclat, cube, cfg) - caption = 'Selected gridpoints within shapefile.' - get_provenance_record( - cfg, name, caption, 'nc', ancestor_files=[filename]) + get_provenance_record(cfg, + xname, + caption, + 'xlsx', + ancestor_files=[filename]) + path = os.path.join( + cfg['work_dir'], + name + '.nc', + ) + write_netcdf(path, ncts, nclon, nclat, cube, cfg) + caption = 'Selected gridpoints within shapefile.' + get_provenance_record(cfg, + name, + caption, + 'nc', + ancestor_files=[filename]) def write_keyvalue_toxlsx(worksheet, row, key, value): @@ -93,17 +102,16 @@ def writexls(cfg, filename, ncts, nclon1, nclat1): workbook = xlsxwriter.Workbook( os.path.join( cfg['work_dir'], - os.path.splitext(os.path.basename(filename))[0] + '_polygon_table' - + '.xlsx')) + os.path.splitext(os.path.basename(filename))[0] + + '_polygon_table' + '.xlsx')) worksheet = workbook.add_worksheet('Data') worksheet.write(0, 0, 'Date') worksheet.write(0, 1, 'Lon/Lat') worksheet.write_column(2, 0, wtime) for row in range(ncts.shape[1]): worksheet.write( - 1, row + 1, - str("%#.3f" % round(float(nclon1[row]), 3)) + '_' + str( - "%#.3f" % round(float(nclat1[row]), 3))) + 1, row + 1, f"{round(float(nclon1[row]), 3):.3f}_\ + {round(float(nclat1[row]), 3)}") worksheet.write_column(2, row + 1, np.around(np.squeeze(ncts[:, row]), decimals=8)) worksheet.set_column(0, row + 1, 20) @@ -111,7 +119,7 @@ def writexls(cfg, filename, ncts, nclon1, nclat1): worksheet.set_column(0, 0, 20) for row, attr in enumerate(ncfile.ncattrs()): worksheet.write(row, 0, attr) - worksheet.write(row, 1, getattr(ncfile, attr)) + worksheet.write(row, 1, str(getattr(ncfile, attr))) worksheet = workbook.add_worksheet('ESMValTool') worksheet.set_column(0, 0, 20) row = 0 @@ -135,7 +143,8 @@ def shapeselect(cfg, cube): coordpoints[i] = (coordpoints[i][0] - 360., coordpoints[i][1]) else: raise ValueError("Support for 2-d coords not implemented!") - points = MultiPoint(coordpoints) + multipoint = MultiPoint(coordpoints) + points = list(multipoint.geoms) with fiona.open(shppath) as shp: gpx = [] gpy = [] @@ -149,15 +158,16 @@ def shapeselect(cfg, cube): if wgtmet == 'mean_inside': gpx, gpy = mean_inside(gpx, gpy, points, multi, cube) if not gpx: - gpx, gpy = representative(gpx, gpy, points, multi, cube) + gpx, gpy = representative(gpx, gpy, multipoint, multi, + cube) elif wgtmet == 'representative': - gpx, gpy = representative(gpx, gpy, points, multi, cube) + gpx, gpy = representative(gpx, gpy, multipoint, multi, cube) if len(gpx) == 1: ncts[:, ishp] = np.reshape(cube.data[:, gpy, gpx], (cube.data.shape[0], )) else: ncts[:, ishp] = np.mean(cube.data[:, gpy, gpx], axis=1) - gxx, gyy = representative([], [], points, multi, cube) + gxx, gyy = representative([], [], multipoint, multi, cube) nclon[ishp] = cube.coord('longitude').points[gxx] nclat[ishp] = cube.coord('latitude').points[gyy] return ncts, nclon, nclat @@ -179,10 +189,10 @@ def mean_inside(gpx, gpy, points, multi, cube): return gpx, gpy -def representative(gpx, gpy, points, multi, cube): +def representative(gpx, gpy, multipoint, multi, cube): """Find representative point in shape.""" reprpoint = multi.representative_point() - nearest = nearest_points(reprpoint, points) + nearest = nearest_points(reprpoint, multipoint) npx = nearest[1].coords[0][0] npy = nearest[1].coords[0][1] if npx < 0: @@ -235,19 +245,24 @@ def write_netcdf(path, var, plon, plat, cube, cfg): polys.setncattr_string('standard_name', 'polygon') polys.setncattr_string('long_name', 'polygon') polys.setncattr_string('shapefile', cfg['shapefile']) - lon = ncout.createVariable( - cube.coord('longitude').var_name, 'f8', 'polygon', zlib=True) + lon = ncout.createVariable(cube.coord('longitude').var_name, + 'f8', + 'polygon', + zlib=True) lon.setncattr_string('standard_name', cube.coord('longitude').standard_name) lon.setncattr_string('long_name', cube.coord('longitude').long_name) lon.setncattr_string('units', cube.coord('longitude').units.origin) - lat = ncout.createVariable( - cube.coord('latitude').var_name, 'f8', 'polygon', zlib=True) + lat = ncout.createVariable(cube.coord('latitude').var_name, + 'f8', + 'polygon', + zlib=True) lat.setncattr_string('standard_name', cube.coord('latitude').standard_name) lat.setncattr_string('long_name', cube.coord('latitude').long_name) lat.setncattr_string('units', cube.coord('latitude').units.origin) - data = ncout.createVariable( - cube.var_name, 'f4', ('time', 'polygon'), zlib=True) + data = ncout.createVariable(cube.var_name, + 'f4', ('time', 'polygon'), + zlib=True) data.setncattr_string('standard_name', cube.standard_name) data.setncattr_string('long_name', cube.long_name) data.setncattr_string('units', cube.units.origin) diff --git a/esmvaltool/diag_scripts/shared/__init__.py b/esmvaltool/diag_scripts/shared/__init__.py index b0f0876449..a422acc0e3 100644 --- a/esmvaltool/diag_scripts/shared/__init__.py +++ b/esmvaltool/diag_scripts/shared/__init__.py @@ -1,18 +1,31 @@ """Code that is shared between multiple diagnostic scripts.""" from . import io, iris_helpers, names, plot -from ._base import (ProvenanceLogger, extract_variables, get_cfg, - get_diagnostic_filename, get_plot_filename, group_metadata, - run_diagnostic, select_metadata, sorted_group_metadata, - sorted_metadata, variables_available) +from ._base import ( + ProvenanceLogger, + extract_variables, + get_cfg, + get_diagnostic_filename, + get_plot_filename, + group_metadata, + run_diagnostic, + save_data, + save_figure, + select_metadata, + sorted_group_metadata, + sorted_metadata, + variables_available, +) from ._diag import Datasets, Variable, Variables from ._validation import apply_supermeans, get_control_exper_obs __all__ = [ # Main entry point for diagnostics 'run_diagnostic', - # Define output filenames - 'get_diagnostic_filename', + # Define and write output files + 'save_figure', + 'save_data', 'get_plot_filename', + 'get_diagnostic_filename', # Log provenance 'ProvenanceLogger', # Select and sort input metadata diff --git a/esmvaltool/diag_scripts/shared/_base.py b/esmvaltool/diag_scripts/shared/_base.py index 55668748d0..1789909130 100644 --- a/esmvaltool/diag_scripts/shared/_base.py +++ b/esmvaltool/diag_scripts/shared/_base.py @@ -7,12 +7,17 @@ import shutil import sys import time -from collections import OrderedDict +from pathlib import Path +import distributed +import iris +import matplotlib.pyplot as plt import yaml logger = logging.getLogger(__name__) +iris.FUTURE.save_split_attrs = True + def get_plot_filename(basename, cfg): """Get a valid path for saving a diagnostic plot. @@ -28,11 +33,10 @@ def get_plot_filename(basename, cfg): ------- str: A valid path for saving a diagnostic plot. - """ return os.path.join( cfg['plot_dir'], - basename + '.' + cfg['output_file_type'], + f"{basename}.{cfg['output_file_type']}", ) @@ -52,15 +56,89 @@ def get_diagnostic_filename(basename, cfg, extension='nc'): ------- str: A valid path for saving a diagnostic data file. - """ return os.path.join( cfg['work_dir'], - basename + '.' + extension, + f"{basename}.{extension}", ) -class ProvenanceLogger(object): +def save_figure(basename, provenance, cfg, figure=None, close=True, **kwargs): + """Save a figure to file. + + Parameters + ---------- + basename: str + The basename of the file. + provenance: dict + The provenance record for the figure. + cfg: dict + Dictionary with diagnostic configuration. + figure: matplotlib.figure.Figure + Figure to save. + close: bool + Close the figure after saving. + **kwargs: + Keyword arguments to pass to :obj:`matplotlib.figure.Figure.savefig`. + + See Also + -------- + ProvenanceLogger: For an example provenance record that can be used + with this function. + """ + if cfg.get('output_file_type') is None: + extensions = ('png', 'pdf') + elif isinstance(cfg['output_file_type'], str): + extensions = (cfg['output_file_type'], ) + else: + extensions = cfg['output_file_type'] + + for ext in extensions: + filename = Path(cfg['plot_dir']) / ext / f"{basename}.{ext}" + filename.parent.mkdir(exist_ok=True) + logger.info("Plotting analysis results to %s", filename) + fig = plt if figure is None else figure + fig.savefig(filename, **kwargs) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, provenance) + + if close: + plt.close(figure) + + +def save_data(basename, provenance, cfg, cube, **kwargs): + """Save the data used to create a plot to file. + + Parameters + ---------- + basename: str + The basename of the file. + provenance: dict + The provenance record for the data. + cfg: dict + Dictionary with diagnostic configuration. + cube: iris.cube.Cube + Data cube to save. + **kwargs: + Extra keyword arguments to pass to :obj:`iris.save`. + + See Also + -------- + ProvenanceLogger: For an example provenance record that can be used + with this function. + """ + if 'target' in kwargs: + raise ValueError( + "Please use the `basename` argument to specify the output file") + + filename = get_diagnostic_filename(basename, cfg) + logger.info("Saving analysis results to %s", filename) + iris.save(cube, target=filename, **kwargs) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, provenance) + + +class ProvenanceLogger: """Open the provenance logger. Parameters @@ -75,15 +153,14 @@ class ProvenanceLogger(object): record = { 'caption': "This is a nice plot.", 'statistics': ['mean'], - 'domain': 'global', - 'plot_type': 'zonal', - 'plot_file': '/path/to/result.png', + 'domain': ['global'], + 'plot_type': ['zonal'], 'authors': [ 'first_author', 'second_author', ], 'references': [ - 'acknow_project', + 'author20journal', ], 'ancestors': [ '/path/to/input_file_1.nc', @@ -94,7 +171,6 @@ class ProvenanceLogger(object): with ProvenanceLogger(cfg) as provenance_logger: provenance_logger.log(output_file, record) - """ def __init__(self, cfg): @@ -119,18 +195,22 @@ def log(self, filename, record): Dictionary with the provenance information to be logged. Typical keys are: - - plot_type - - plot_file - - caption - ancestors - authors + - caption + - domain + - plot_type - references + - statistics Note ---- - See also esmvaltool/config-references.yml + See the provenance `documentation`_ for more information. - """ + .. _documentation: https://docs.esmvaltool.org/en/latest/community/diagnostic.html#recording-provenance + """ # noqa + if isinstance(filename, Path): + filename = str(filename) if filename in self.table: raise KeyError( "Provenance record for {} already exists.".format(filename)) @@ -170,14 +250,12 @@ def select_metadata(metadata, **attributes): ------- :obj:`list` of :obj:`dict` A list of matching metadata. - """ selection = [] for attribs in metadata: - if all( - a in attribs and ( - attribs[a] == attributes[a] or attributes[a] == '*') - for a in attributes): + if all(a in attribs and ( + attribs[a] == attributes[a] or attributes[a] == '*') + for a in attributes): selection.append(attribs) return selection @@ -197,9 +275,7 @@ def group_metadata(metadata, attribute, sort=None): Returns ------- :obj:`dict` of :obj:`list` of :obj:`dict` - A dictionary containing the requested groups. If sorting is requested, - an `OrderedDict` will be returned. - + A dictionary containing the requested groups. """ groups = {} for attributes in metadata: @@ -230,7 +306,6 @@ def sorted_metadata(metadata, sort): ------- :obj:`list` of :obj:`dict` The sorted list of variable metadata. - """ if isinstance(sort, str): sort = [sort] @@ -257,18 +332,17 @@ def sorted_group_metadata(metadata_groups, sort): Returns ------- - :obj:`OrderedDict` of :obj:`list` of :obj:`dict` + :obj:`dict` of :obj:`list` of :obj:`dict` A dictionary containing the requested groups. - """ if sort is True: sort = [] def normalized_group_key(key): - """Define a key to sort the OrderedDict by.""" + """Define a key to sort by.""" return '' if key is None else str(key).lower() - groups = OrderedDict() + groups = {} for key in sorted(metadata_groups, key=normalized_group_key): groups[key] = sorted_metadata(metadata_groups[key], sort) @@ -294,7 +368,6 @@ def extract_variables(cfg, as_iris=False): dict Variable information in :obj:`dict`s (values) for each `short_name` (key). - """ keys_to_extract = [ 'short_name', @@ -312,11 +385,14 @@ def extract_variables(cfg, as_iris=False): variables[short_name] = {} info = variables[short_name] for key in keys_to_extract: - info[key] = data[key] + if key in data: + info[key] = data[key] # Replace short_name by var_name if desired if as_iris: info['var_name'] = info.pop('short_name') + if info['standard_name'] == '': + info['standard_name'] = None return variables @@ -335,7 +411,6 @@ def variables_available(cfg, short_names): ------- bool `True` if all variables available, `False` if not. - """ input_data = cfg['input_data'].values() available_short_names = list(group_metadata(input_data, 'short_name')) @@ -396,7 +471,6 @@ def main(cfg): The `cfg` dict passed to `main` contains the script configuration that can be used with the other functions in this module. - """ # Implemented as context manager so we can support clean up actions later parser = argparse.ArgumentParser(description="Diagnostic script") @@ -404,15 +478,24 @@ def main(cfg): parser.add_argument( '-f', '--force', - help=("Force emptying the output directories" + help=("Force emptying the output directories " "(useful when re-running the script)"), action='store_true', ) parser.add_argument( '-i', '--ignore-existing', - help=("Force running the script, even if output files exists." - "(useful when re-running the script, use at your own risk)"), + help=("Force running the script, even if output files exist " + "(useful when re-running the script, use at your own risk)."), + action='store_true', + ) + parser.add_argument( + '-n', + '--no-distributed', + help=("Do not use the Dask distributed 'scheduler_address' from the " + "configuration file " + "(useful when re-running the script and the scheduler is no " + "longer available)."), action='store_true', ) parser.add_argument( @@ -441,38 +524,63 @@ def main(cfg): logger.info("Starting diagnostic script %s with configuration:\n%s", cfg['script'], yaml.safe_dump(cfg)) - # Create output directories - output_directories = [] - if cfg['write_netcdf']: - output_directories.append(cfg['work_dir']) - if cfg['write_plots']: - output_directories.append(cfg['plot_dir']) - - existing = [p for p in output_directories if os.path.exists(p)] + # Clean run_dir and output directories from previous runs + default_files = { + 'diagnostic_provenance.yml', + 'log.txt', + 'profile.bin', + 'resource_usage.txt', + 'settings.yml', + } + + output_directories = (cfg['work_dir'], cfg['plot_dir']) + old_content = [ + p for p in output_directories + if Path(p).exists() and any(Path(p).iterdir()) + ] + old_content.extend(p for p in glob.glob(f"{cfg['run_dir']}{os.sep}*") + if not os.path.basename(p) in default_files) - if existing: + if old_content: if args.force: - for output_directory in existing: - logger.info("Removing %s", output_directory) - shutil.rmtree(output_directory) + for content in old_content: + logger.info("Removing %s", content) + if os.path.isfile(content): + os.remove(content) + else: + shutil.rmtree(content) elif not args.ignore_existing: - logger.error( - "Script will abort to prevent accidentally overwriting your " - "data in these directories:\n%s\n" - "Use -f or --force to force emptying the output directories " - "or use -i or --ignore-existing to ignore existing output " - "directories.", '\n'.join(existing)) + raise FileExistsError( + "Script will abort to prevent accidentally overwriting " + "your data in the following output files or directories:" + "\n%s\n Use -f or --force to force emptying the output " + "directories or use -i or --ignore-existing to ignore " + "existing output directories." % '\n'.join(old_content)) + # Create output directories for output_directory in output_directories: - logger.info("Creating %s", output_directory) - if args.ignore_existing and os.path.exists(output_directory): - continue - os.makedirs(output_directory) + if not os.path.isdir(output_directory): + logger.info("Creating %s", output_directory) + os.makedirs(output_directory) provenance_file = os.path.join(cfg['run_dir'], 'diagnostic_provenance.yml') if os.path.exists(provenance_file): + logger.info("Removing %s from previous run.", provenance_file) os.remove(provenance_file) - yield cfg + if not args.no_distributed and 'scheduler_address' in cfg: + try: + client = distributed.Client(cfg['scheduler_address']) + except OSError as exc: + raise OSError( + "Unable to connect to the Dask distributed scheduler at " + f"{cfg['scheduler_address']}. If the scheduler is no longer " + "available, try re-running the diagnostic script with the " + "--no-distributed flag.", ) from exc + else: + client = contextlib.nullcontext() + + with client: + yield cfg logger.info("End of diagnostic script run.") diff --git a/esmvaltool/diag_scripts/shared/_diag.py b/esmvaltool/diag_scripts/shared/_diag.py index 01e6cfbfa9..c8797fa763 100644 --- a/esmvaltool/diag_scripts/shared/_diag.py +++ b/esmvaltool/diag_scripts/shared/_diag.py @@ -8,16 +8,14 @@ datasets = e.Datasets(cfg) variables = e.Variables(cfg) -Notes ------ -An example diagnostic using these classes is given in -`diag_scripts/examples/diagnostic_object_oriented.py`. - """ import collections import logging +import warnings + +from esmvaltool import ESMValToolDeprecationWarning from . import names as n @@ -27,17 +25,49 @@ # Global variables DEFAULT_INFO = 'not_specified' +DEPRECATION_MSG = ("The class {class_name} has been deprecated in version 2.2 " + "and is not maintained anymore. Please consider using " + "alternative functions such as 'select_metadata', " + "'sorted_metadata', 'group_metadata' or " + "sorted_group_metadata' that are provided by the module " + "esmvaltool.diag_scripts.shared.") + # Variable class containing all relevant information -Variable = collections.namedtuple('Variable', [n.SHORT_NAME, - n.STANDARD_NAME, - n.LONG_NAME, - n.UNITS]) +BaseVariable = collections.namedtuple('Variable', [n.SHORT_NAME, + n.STANDARD_NAME, + n.LONG_NAME, + n.UNITS]) + +class Variable(BaseVariable): + """Variable class containing all relevant information. + + Note + ---- + This class has been deprecated in version 2.2 and will be removed two minor + releases later in version 2.4. + + """ -class Variables(object): + def __new__(cls, short_name, standard_name, long_name, units): + """Deprecate this class.""" + warnings.warn( + DEPRECATION_MSG.format(class_name=str(cls)), + ESMValToolDeprecationWarning) + self = super().__new__(cls, short_name, standard_name, long_name, + units) + return self + + +class Variables: """Class to easily access a recipe's variables in a diagnostic. + Note + ---- + This class has been deprecated in version 2.2 and will be removed two minor + releases later in version 2.4. + Examples -------- Get all variables of a recipe configuration `cfg`:: @@ -67,12 +97,15 @@ def __init__(self, cfg=None, **names): Parameters ---------- cfg : dict, optional - Configuation dictionary of the recipe. + Configuration dictionary of the recipe. **names : dict or Variable, optional Keyword arguments of the form `short_name=Variable_object` where `Variable_object` can be given as :obj:`dict` or :class:`Variable`. """ + warnings.warn( + DEPRECATION_MSG.format(class_name=str(self.__class__)), + ESMValToolDeprecationWarning) self._dict = {} # Add variables from cfg file @@ -100,7 +133,7 @@ def __init__(self, cfg=None, **names): "import of variables does not work for chained " "scripts (using 'ancestors' key)") - # Add costum variables + # Add custom variables self.add_vars(**names) if not self._dict: logger.warning("No variables found!") @@ -128,7 +161,7 @@ def _add_to_dict(self, name, attr): self._dict[name] = attr def add_vars(self, **names): - """Add costum variables to the class. + """Add custom variables to the class. Parameters ---------- @@ -322,9 +355,14 @@ def vars_available(self, *args): return True -class Datasets(object): +class Datasets: """Class to easily access a recipe's datasets in a diagnostic script. + Note + ---- + This class has been deprecated in version 2.2 and will be removed two minor + releases later in version 2.4. + Examples -------- Get all variables of a recipe configuration `cfg`:: @@ -355,7 +393,7 @@ def __init__(self, cfg): Parameters ---------- cfg : dict, optional - Configuation dictionary of the recipe. + Configuration dictionary of the recipe. Raises ------ @@ -363,6 +401,9 @@ def __init__(self, cfg): If recipe configuration dictionary is not valid. """ + warnings.warn( + DEPRECATION_MSG.format(class_name=str(self.__class__)), + ESMValToolDeprecationWarning) self._iter_counter = 0 self._paths = [] self._data = {} @@ -694,6 +735,8 @@ def get_info_list(self, key, **dataset_info): Parameters ---------- + key: str + Desired dictionary key. **dataset_info: optional Keyword arguments describing the dataset, e.g. `dataset=CanESM2`, `exp=piControl` or `short_name=tas`. diff --git a/esmvaltool/diag_scripts/shared/_supermeans.py b/esmvaltool/diag_scripts/shared/_supermeans.py index f09c82d26a..7099ba4725 100644 --- a/esmvaltool/diag_scripts/shared/_supermeans.py +++ b/esmvaltool/diag_scripts/shared/_supermeans.py @@ -9,7 +9,7 @@ """ import os.path -import six + import cf_units import iris import iris.coord_categorisation @@ -20,14 +20,10 @@ class NoBoundsError(ValueError): """Return error and pass.""" - pass - class InvalidPeriod(ValueError): """Return error and pass.""" - pass - def get_supermean(name, season, data_dir, obs_flag=None): """Calculated supermeans from retrieved data, which are pickled Iris cubes. @@ -62,7 +58,7 @@ def get_supermean(name, season, data_dir, obs_flag=None): if cube.name() == 'unknown': cube.rename(str(cube.attributes['STASH'])) - cube = cubes.extract_strict(name_constraint) + cube = cubes.extract_cube(name_constraint) if season in ['djf', 'mam', 'jja', 'son']: supermeans_cube = periodic_mean(cube, period='season') @@ -255,7 +251,7 @@ def _add_categorised_coord(cube, units of the category value, typically 'no_unit' or '1'. """ # Interpret coord, if given as a name - if isinstance(from_coord, six.string_types): + if isinstance(from_coord, str): from_coord = cube.coord(from_coord) if cube.coords(name): @@ -301,7 +297,7 @@ def time_average_by(cube, periods='time'): idx_obj = [None] * cube.data.ndim idx_obj[cube.coord_dims('time')[0]] = slice( None) # [None, slice(None), None] == [np.newaxis, :, np.newaxis] - cube.data *= durations_cube.data[idx_obj] + cube.data *= durations_cube.data[tuple(idx_obj)] if periods == ['time']: # duration weighted averaging cube = cube.collapsed(periods, iris.analysis.SUM) @@ -315,7 +311,7 @@ def time_average_by(cube, periods='time'): if durations_cube.data.shape == (): cube.data /= durations_cube.data else: - cube.data /= durations_cube.data[idx_obj] + cube.data /= durations_cube.data[tuple(idx_obj)] # correct cell methods cube.cell_methods = orig_cell_methods diff --git a/esmvaltool/diag_scripts/shared/_validation.py b/esmvaltool/diag_scripts/shared/_validation.py index 3fd4fcc2b3..c922f91e65 100644 --- a/esmvaltool/diag_scripts/shared/_validation.py +++ b/esmvaltool/diag_scripts/shared/_validation.py @@ -1,30 +1,36 @@ -"""Load functions needed by diags with CONTROL and EXPERIMENT""" -import os +"""Load functions needed by diags with CONTROL and EXPERIMENT.""" import logging +import os + import iris -from esmvaltool.preprocessor import time_average +from esmvalcore.preprocessor import climate_statistics + from esmvaltool.diag_scripts.shared import select_metadata logger = logging.getLogger(os.path.basename(__file__)) -def get_control_exper_obs(short_name, input_data, cfg, cmip_type): +def get_control_exper_obs(short_name, input_data, cfg, cmip_type=None): """ - Get control, exper and obs datasets + Get control, exper and obs datasets. This function is used when running recipes that need a clear distinction between a control dataset, an experiment - dataset and have optional obs (OBS, obs4mips etc) datasets; + dataset and have optional obs (OBS, obs4MIPs etc) datasets; such recipes include recipe_validation, and all the autoassess ones; short_name: variable short name input_data: dict containing the input data info cfg: config file as used in this module + cmip_type: optional, CMIP project type (CMIP5 or CMIP6) """ - # select data per short name and CMIP type - dataset_selection = select_metadata( - input_data, short_name=short_name, project=cmip_type) + # select data per short name and optional CMIP type + if not cmip_type: + dataset_selection = select_metadata(input_data, short_name=short_name) + else: + dataset_selection = select_metadata(input_data, short_name=short_name, + project=cmip_type) # get the obs datasets if specified in recipe if 'observational_datasets' in cfg: @@ -36,29 +42,47 @@ def get_control_exper_obs(short_name, input_data, cfg, cmip_type): else: obs_selection = [] - # determine CONTROL and EXPERIMENT datasets - for model in dataset_selection: - if model['dataset'] == cfg['control_model']: - logger.info("Control dataset %s", model['dataset']) - control = model - elif model['dataset'] == cfg['exper_model']: - logger.info("Experiment dataset %s", model['dataset']) - experiment = model - + # print out OBS's if obs_selection: logger.info("Observations dataset(s) %s", [obs['dataset'] for obs in obs_selection]) + # make sure the chosen datasets for control and exper are available + alias_selection = [] + for model in dataset_selection: + try: + dataset_name = model['alias'].split("_")[1] + except IndexError: + dataset_name = model['alias'] + alias_selection.append(dataset_name) + + if cfg['control_model'] not in alias_selection: + raise ValueError(f"Control dataset {cfg['control_model']} " + "not in datasets") + + if cfg['exper_model'] not in alias_selection: + raise ValueError(f"Experiment dataset {cfg['exper_model']} " + "not in datasets") + + # pick control and experiment dataset + for model in dataset_selection: + if cfg['control_model'] in model['alias'].split("_"): + logger.info("Control dataset %s", model['alias']) + control = model + elif cfg['exper_model'] in model['alias'].split("_"): + logger.info("Experiment dataset %s", model['alias']) + experiment = model + return control, experiment, obs_selection # apply supermeans: handy function that loads CONTROL, EXPERIMENT -# and OBS (if any) files and applies time_average() to mean the cubes +# and OBS (if any) files and applies climate_statistics() to mean the cubes def apply_supermeans(ctrl, exper, obs_list): """ - Apply supermeans on data components ie MEAN on time + Apply supermeans on data components ie MEAN on time. - This function is an extension of time_average() meant to ease the + This function is an extension of climate_statistics() meant to ease the time-meaning procedure when dealing with CONTROL, EXPERIMENT and OBS (if any) datasets. ctrl: dictionary of CONTROL dataset @@ -71,14 +95,14 @@ def apply_supermeans(ctrl, exper, obs_list): exper_file = exper['filename'] ctrl_cube = iris.load_cube(ctrl_file) exper_cube = iris.load_cube(exper_file) - ctrl_cube = time_average(ctrl_cube) - exper_cube = time_average(exper_cube) + ctrl_cube = climate_statistics(ctrl_cube) + exper_cube = climate_statistics(exper_cube) if obs_list: obs_cube_list = [] for obs in obs_list: obs_file = obs['filename'] obs_cube = iris.load_cube(obs_file) - obs_cube = time_average(obs_cube) + obs_cube = climate_statistics(obs_cube) obs_cube_list.append(obs_cube) else: obs_cube_list = None diff --git a/esmvaltool/diag_scripts/shared/dataset_selection.ncl b/esmvaltool/diag_scripts/shared/dataset_selection.ncl new file mode 100644 index 0000000000..0d29ae46e8 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/dataset_selection.ncl @@ -0,0 +1,168 @@ +; ############################################################################# +; GENERAL ROUTINES FOR SELECTING DATASETS +; ############################################################################# +; Please consider using of extending existing routines before adding new ones. +; Check the header of each routine for documentation. +; +; Contents: +; function get_obs +; function get_mod +; ############################################################################# + + +; ############################################################################# +undef("get_obs") +function get_obs(names[*]:string, projects[*]:string, exclude[*]:string) +; +; Arguments +; names: names of datasets +; projects: project class of datasets +; exclude: optional list of observational datasets to explicitely exclude +; +; Return value +; An integer array containing all indices of observational datasets +; (project classes OBS, OBS6, obs4mips). +; ERA5 data are handled separately because they can be of project +; OBS6 or nativ6. ERA5 (if present) are also returned as observational +; dataset. +; +; Description +; Checks the project class of all datasets and returns all indices +; of datasets that belong to one of the observational classes. +; +; Caveats +; ERA5 is the only "native6" observational dataset that can be found +; at the moment. An extension to include other native6 observations +; is needed once more of such datasets are available. +; +; References +; +; Modification history +; 20211018-lauer_axel: added option to exclude observational datasets +; 20210415-lauer_axel: written. +; +local funcname, scriptname, i, idx, idxobs +begin + + funcname = "get_obs" + scriptname = "diag_scripts/shared/dataset_selection.ncl" + enter_msg(scriptname, funcname) + + ; find indices of all OBS and obs4mips datasets + ; (treat ERA5 as special case) + + idx = new(dimsizes(projects), integer) + + do i = 0, dimsizes(projects) - 1 + if (isStrSubset(str_lower(projects(i)), "obs")) + idx(i) = 1 + else + idx(i) = 0 + end if + ; ERA5 might be native6 and thus not yet included in the list + ; of observations + if (isStrSubset(str_lower(names(i)), "era5") .and. \ + isStrSubset(str_lower(projects(i)), "native")) then + idx(i) = 1 + end if + + ; check for observational datasets to exclude (optional) + + do j = 0, dimsizes(exclude) - 1 + if (names(i) .eq. exclude(j)) then + idx(i) = 2 + end if + end do + end do + + idxobs = ind(idx .eq. 1) + + if (all(ismissing(idxobs))) then + idxobs = -1 + else + log_info("The following observational datasets are used for calculating" \ + + " multiobs products: " + str_join(names(idxobs), ", ")) + idxexcl = ind(idx .eq. 2) + if (.not.all(ismissing(idxexcl))) then + log_info("Observational datasets excluded by user: " \ + + str_join(names(idxexcl), ", ")) + end if + end if + + leave_msg(scriptname, funcname) + return(idxobs) + +end + +; ############################################################################# +undef("get_mod") +function get_mod(names[*]:string, projects[*]:string) +; +; Arguments +; names: names of datasets +; projects: project class of datasets +; +; Return value +; An integer array containing all indices of model datasets +; excluding MultiModelMean / MultiModelMedian (if present). +; +; Description +; Checks the project class of all datasets and returns all indices +; of datasets that do not belong to one of the observational classes and +; that are not "MultiModelMean" or "MultiModelMedian". +; +; Caveats +; The dataset names MultiModelMean and MultiModelMedian are hardcoded and +; need to be adjusted if the corresponding preprocessor creating these +; datasets should change. +; +; References +; +; Modification history +; 20210415-lauer_axel: written. +; +local funcname, scriptname, i, idx, idxmod, idxobs, mm_ind1, mm_ind2 +begin + + funcname = "get_mod" + scriptname = "diag_scripts/shared/dataset_selection.ncl" + enter_msg(scriptname, funcname) + + idx = new(dimsizes(projects), integer) + idx = 1 + + ; flag all observations (if present) + + idxobs = get_obs(names, projects, "") + + if (idxobs(0) .ne. -1) then + idx(idxobs) = 0 + end if + + ; flag MultiModelMean and MultiModelMedian (if preset) + + mm_ind1 = ind(names .eq. "MultiModelMean") + mm_ind2 = ind(names .eq. "MultiModelMedian") + + if (.not. ismissing(mm_ind1)) then + idx(mm_ind1) = 0 + end if + + if (.not. ismissing(mm_ind2)) then + idx(mm_ind2) = 0 + end if + + idxmod = ind(idx .eq. 1) + + if (all(ismissing(idxmod))) then + idxmod = -1 + else + log_info("The following model datasets have been found (exluding " \ + + "MultiModelMean/MultiModelMedian): " \ + + str_join(names(idxmod), ", ")) + end if + + leave_msg(scriptname, funcname) + return(idxmod) + +end diff --git a/esmvaltool/diag_scripts/shared/ensemble.ncl b/esmvaltool/diag_scripts/shared/ensemble.ncl index a7ed176459..32fdc2384b 100644 --- a/esmvaltool/diag_scripts/shared/ensemble.ncl +++ b/esmvaltool/diag_scripts/shared/ensemble.ncl @@ -34,7 +34,7 @@ function get_start_year(dataset_index[1]:numeric) ; References ; ; Modification history -; 20140128-A_senf_da: written. +; 20140128-senftleben_daniel: written. ; local funcname, scriptname, dataset_index begin @@ -82,7 +82,7 @@ function get_end_year(dataset_index:numeric) ; References ; ; Modification history -; 20140128-A_senf_da: written. +; 20140128-senftleben_daniel: written. ; local funcname, scriptname, dataset_index begin diff --git a/esmvaltool/diag_scripts/shared/external.R b/esmvaltool/diag_scripts/shared/external.R new file mode 100644 index 0000000000..88fdd52d11 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/external.R @@ -0,0 +1,49 @@ +# Wrappers to call external commands using system2 +# Currently implemented: cdo, nco + +cdo <- + function(command, + args = "", + input = "", + options = "", + output = "", + stdout = "", + noout = F) { + if (args != "") { + args <- paste0(",", args) + } + if (stdout != "") { + stdout <- paste0(" > '", stdout, "'") + noout <- T + } + if (input[1] != "") { + for (i in seq_along(input)) { + input[i] <- paste0("'", input[i], "'") + } + input <- paste(input, collapse = " ") + } + output0 <- output + if (output != "") { + output <- paste0("'", output, "'") + } else if (!noout) { + output <- tempfile() + output0 <- output + } + argstr <- paste0( + options, " ", command, args, " ", input, " ", output, + " ", stdout + ) + print(paste("cdo", argstr)) + ret <- system2("cdo", args = argstr) + if (ret != 0) { + stop(paste("Failed (", ret, "): cdo", argstr)) + } + return(output0) + } + +nco <- function(cmd, argstr) { + ret <- system2(cmd, args = argstr) + if (ret != 0) { + stop(paste("Failed (", ret, "): ", cmd, " ", argstr)) + } +} diff --git a/esmvaltool/diag_scripts/shared/external.jl b/esmvaltool/diag_scripts/shared/external.jl new file mode 100644 index 0000000000..e32feb6c70 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/external.jl @@ -0,0 +1,233 @@ +using PyPlot, PyCall +using NetCDF + +""" + create_yaml(filename, dict) +Write a dictionary to a YAML file. +""" +function create_yaml(dict::Dict{Any,Any}, filename::AbstractString) + os = open(filename, "w") + for (key, value) in dict + println(os, "? ",key) + indent = ": " + print_yaml(os, value, indent) + end + close(os) +end + +function print_yaml(os::IOStream, obj::Dict{String,Any}, indent::String) + for (key, value) in obj + print(os, indent, key, ": ") + print_yaml(os, value, " ") + indent = " " + end +end + +function print_yaml(os::IOStream, obj::String, indent::String) + println(os, obj) +end + +function print_yaml(os::IOStream, obj::Array{String}, indent::String) + println(os) + for i = 1:length(obj) + println(os, indent, "- ", obj[i]) + end +end + +""" + plotmap(fname, var; ...) + +Easy plotting of gridded datasets on a global map. +Plots variable `var` from a netcdf file `fname`. +Optional arguments are available to control the details of the plot. + +# Arguments +- 'fname::String': netcdf filename containing the data to plot +- 'var'::String : name of the variable to plot + +Optional: +- 'lon::String': name of the lon variable ("lon") +- 'lat::String': name of the lat variable ("lat") +- 'lonb::String': name of the lon bounds variable ("lon_bnds") +- 'latb::String': name of the lat boundsvariable ("lat_bnds") +- 'title': title string ("") +- 'cstep': divisions of the colorbar axis ([]) +- 'cmap': colormap ("RdBu_r") +- 'proj': projection. One of ["platecarree", "robinson", "mollweide"]. Defaults to "platecarree". +- 'cpad': padding (shift) of the colorbar (0.08) +- 'sub': matplotlib subplot option (e.g. "221" for the first panel of 2x2 subplots) ("111") +- 'clabel': label of the colorbar (defaults to the units string read from the netcdf file) +- 'cdir': direction of the colorbar. One of ["horizontal", "vertical"]. ("horizontal") +- 'cscale': scaling of the colorbar (0.65) +- 'cfs': colorbar ticks font size (12) +- 'lfs': colorbar label font size (12) +- 'tfs': title font size (14) +- 'tpad': padding (shift) of the title string +- 'tweight': weight of title font. One of [ 'normal" "bold" "heavy" "light" "ultrabold" "ultralight"]. ("normal") +- 'grid': grid spacing (defaults to [60,30]). set to empty [] to remove gridlines. +- 'region::NTuple{4,Int64}': region to plot in format (lon1, lon2, lat1, lat2). Defaults to global. +- 'style': one of ["pcolormesh" "contourf"]. Defaults to "pcolormesh". +- 'levels': contour plot levels. Can be an array or a number of levels (auto) +- 'extend': plot levels outside `levels` range. One of ["neither", "both", "min", "max"]. Default: "neither". + +Author: Jost von Hardenberg, 2019 +""" +function plotmap(fname::String, var::String; lon="lon", lat="lat", + lonb="lon_bnds", latb="lat_bnds", title="", cstep=[], + cmap="RdBu_r", proj="", cpad=0.08, tpad=24, sub=111, + clabel="NONE", cdir="horizontal", cscale=0.65, tfs=14, + cfs=12, lfs=12, tweight="normal", grid=[60,30], region=(), + style="pcolormesh", levels=0, extend="neither") + +# pcolormesh needs cell boundaries +if style=="pcolormesh" + try + lonb=ncread(fname, lonb); + lonv=vcat(lonb[1,:],lonb[2,end]) + catch + lonv=ncread(fname, lon); + end + try + latb=ncread(fname, latb); + latv=vcat(latb[1,:],latb[2,end]) + catch + latv=ncread(fname, lat); + end +else + lonv=ncread(fname, lon); + latv=ncread(fname, lat); +end + +data=ncread(fname, var); +units=ncgetatt(fname, var, "units") +if clabel=="NONE" clabel=units end + +plotmap(lonv, latv, data; title=title, cstep=cstep, cmap=cmap, proj=proj, + cpad=cpad, tpad=tpad, sub=sub, clabel=clabel, cdir=cdir, + cscale=cscale, tfs=tfs, cfs=cfs, lfs=lfs, tweight=tweight, grid=grid, + region=region, style=style, levels=levels, extend=extend) + +end + +""" + plotmap(lon, lat, data; ...) + +Easy plotting of gridded datasets on a global map. +Plots data in 2D array `data` with longitudes `lon` and latitudes `lat`. +Optional arguments are available to control the details of the plot. + +# Arguments +- 'data::Array{Float32,2}': data to plot. If a 3D array is passed, only the first frame is plotted: `data[:,:,1]` +- 'lon::Array{Float64,1}' : longitudes +- 'lat::Array{Float64,1}' : latitudes + +Optional: +- 'title': title string ("") +- 'cstep': divisions of the colorbar axis ([]) +- 'cmap': colormap ("RdBu_r") +- 'proj': projection. One of ["platecarree", "robinson", "mollweide"]. Defaults to "platecarree". +- 'cpad': padding (shift) of the colorbar (0.08) +- 'sub': matplotlib subplot option (e.g. "221" for the first panel of 2x2 subplots) ("111") +- 'clabel': label of the colorbar (defaults to the units string read from the netcdf file) +- 'cdir': direction of the colorbar. One of ["horizontal", "vertical"]. ("horizontal") +- 'cscale': scaling of the colorbar (0.65) +- 'cfs': colorbar ticks font size (12) +- 'lfs': colorbar label font size (12) +- 'tfs': title font size (14) +- 'tpad': padding (shift) of the title string +- 'tweight': weight of title font. One of [ 'normal" "bold" "heavy" "light" "ultrabold" "ultralight"]. ("normal") +- 'grid': grid spacing (defaults to [60,30]). set to empty [] to remove gridlines. +- 'region::NTuple{4,Int64}': region to plot in format (lon1, lon2, lat1, lat2). Defaults to global. +- 'style': one of ["pcolormesh" "contourf"]. Defaults to "pcolormesh". +- 'levels': contour plot levels. Can be an array or a number of levels (auto) +- 'extend': plot levels outside `levels` range. One of ["neither", "both", "min", "max"]. Default: "neither". + +Author: Jost von Hardenberg, 2019 +""" +function plotmap(lon, lat, data; title="", cstep=[], cmap="RdBu_r", proj="", + cpad=0.08, tpad=24, sub=111, clabel="", cdir="horizontal", + cscale=0.65, tfs=14, cfs=12, lfs=12, tweight="normal", + grid=[60,30], region=(), style="pcolormesh", levels=0, + extend="neither") + +dd = size(data) + +if length(dd)==3 data=data[:,:,1] end +if style=="pcolormesh" + if length(lon) in dd + #println("pcolormesh needs cell boundaries, reconstructing lon") + lonb=zeros(2,length(lon)) + lonb[1,2:end]=0.5*(lon[2:end]+lon[1:(end-1)]) + lonb[1,1]=lon[1]-(lon[2]-lon[1])*0.5 + lonb[2,end]=lon[end]+(lon[end]-lon[end-1])*0.5 + lon=vcat(lonb[1,:],lonb[2,end]) + end + if length(lat) in dd + #println("pcolormesh needs cell boundaries, reconstructing lat") + latb=zeros(2,length(lat)) + latb[1,2:end]=0.5*(lat[2:end]+lat[1:(end-1)]) + latb[1,1]=lat[1]-(lat[2]-lat[1])*0.5 + latb[2,end]=lat[end]+(lat[end]-lat[end-1])*0.5 + if latb[1,1]>89; latb[1,1]=90 ; end + if latb[1,1]<-89; latb[1,1]=-90 ; end + if latb[2,end]>89; latb[2,end]=90 ; end + if latb[2,end]<-89; latb[2,end]=-90 ; end + lat=vcat(latb[1,:],latb[2,end]) + end + if length(lon)==(dd[1]+1) data=data' end +else + if length(lon)==dd[1] data=data' end +end + +ccrs = pyimport("cartopy.crs") +cutil = pyimport("cartopy.util") + +if proj=="robinson" + proj=ccrs.Robinson() + dlabels=false +elseif proj == "mollweide" + proj=ccrs.Mollweide() + dlabels=false +else + proj=ccrs.PlateCarree() + dlabels=true +end + +ax = subplot(sub, projection=proj) +if length(region)>0 ax.set_extent(region, crs=ccrs.PlateCarree()) end +ax.coastlines() +xlocvec=vcat(-vcat(grid[1]:grid[1]:180)[end:-1:1], vcat(0:grid[1]:180)) +ylocvec=vcat(-vcat(grid[2]:grid[2]:90)[end:-1:1], vcat(0:grid[2]:90)) + +if dlabels + ax.gridlines(linewidth=1, color="gray", alpha=0.5, linestyle="--", + draw_labels=true, xlocs=xlocvec, ylocs=ylocvec) +else + ax.gridlines(linewidth=1, color="gray", alpha=0.5, linestyle="--", + xlocs=xlocvec, ylocs=ylocvec) +end + +if style=="contourf" + data_cyc, lon_cyc = cutil.add_cyclic_point(data, coord=lon) + if levels==0 + contourf(lon_cyc, lat, data_cyc, transform=ccrs.PlateCarree(), + cmap=cmap, extend=extend) + else + contourf(lon_cyc, lat, data_cyc, transform=ccrs.PlateCarree(), + cmap=cmap, levels=levels, extend=extend) + end +else + pcolormesh(lon, lat, data, transform=ccrs.PlateCarree(), cmap=cmap) +end + +if length(cstep)>0 clim(cstep[1],cstep[end]); end +if length(title)>1 PyPlot.title(title, pad=tpad, fontsize=tfs, weight=tweight) end +cbar=colorbar(orientation=cdir, extend="both", pad=cpad, label=clabel, + shrink=cscale) +cbar.set_label(label=clabel,size=lfs) +cbar.ax.tick_params(labelsize=cfs) +if length(cstep)>0 cbar.set_ticks(cstep) end +tight_layout() + +end + diff --git a/esmvaltool/diag_scripts/shared/io.py b/esmvaltool/diag_scripts/shared/io.py index 84cd96dbc0..f3e709bd48 100644 --- a/esmvaltool/diag_scripts/shared/io.py +++ b/esmvaltool/diag_scripts/shared/io.py @@ -2,6 +2,7 @@ import fnmatch import logging import os +from pprint import pformat import iris import numpy as np @@ -21,20 +22,24 @@ 'short_name', ] +iris.FUTURE.save_split_attrs = True + def _has_necessary_attributes(metadata, only_var_attrs=False, log_level='debug'): """Check if dataset metadata has necessary attributes.""" - keys_to_check = VAR_KEYS if only_var_attrs else NECESSARY_KEYS + output = True + keys_to_check = (VAR_KEYS + + ['short_name'] if only_var_attrs else NECESSARY_KEYS) for dataset in metadata: for key in keys_to_check: if key not in dataset: - getattr(logger, log_level)("Dataset '%s' does not have " - "necessary attribute '%s'", dataset, - key) - return False - return True + getattr(logger, log_level)( + "Dataset '%s' does not have necessary attribute '%s'", + dataset, key) + output = False + return output def get_all_ancestor_files(cfg, pattern=None): @@ -63,7 +68,7 @@ def get_all_ancestor_files(cfg, pattern=None): files = fnmatch.filter(files, pattern) files = [os.path.join(root, f) for f in files] ancestor_files.extend(files) - return ancestor_files + return sorted(ancestor_files) def get_ancestor_file(cfg, pattern): @@ -78,20 +83,20 @@ def get_ancestor_file(cfg, pattern): Returns ------- - str or None - Full path to the file or `None` if file not found. + str + Full path to the file. + + Raises + ------ + ValueError + No or more than one file found. """ files = get_all_ancestor_files(cfg, pattern=pattern) - if not files: - logger.warning( - "No file with requested name %s found in ancestor " - "directories", pattern) - return None if len(files) != 1: - logger.warning( - "Multiple files with requested pattern %s found (%s), returning " - "first appearance", pattern, files) + raise ValueError( + f"Expected to find exactly one ancestor file for pattern " + f"'{pattern}', got {len(files):d}:\n{pformat(files)}") return files[0] @@ -112,6 +117,11 @@ def netcdf_to_metadata(cfg, pattern=None, root=None): list of dict List of dataset metadata. + Raises + ------ + ValueError + Necessary attributes are missing. + """ if root is None: all_files = get_all_ancestor_files(cfg, pattern) @@ -123,6 +133,7 @@ def netcdf_to_metadata(cfg, pattern=None, root=None): files = [os.path.join(base, f) for f in files] all_files.extend(files) all_files = fnmatch.filter(all_files, '*.nc') + all_files = sorted(all_files) # Iterate over netcdf files metadata = [] @@ -130,21 +141,21 @@ def netcdf_to_metadata(cfg, pattern=None, root=None): cube = iris.load_cube(path) dataset_info = dict(cube.attributes) for var_key in VAR_KEYS: - dataset_info[var_key] = getattr(cube, var_key) + dataset_info[var_key] = str(getattr(cube, var_key)) dataset_info['short_name'] = cube.var_name + dataset_info['standard_name'] = cube.standard_name dataset_info['filename'] = path + metadata.append(dataset_info) - # Check if necessary keys are available - if _has_necessary_attributes([dataset_info], log_level='warning'): - metadata.append(dataset_info) - else: - logger.warning("Skipping '%s'", path) + # Check if necessary keys are available + if not _has_necessary_attributes(metadata, log_level='error'): + raise ValueError("Necessary attributes are missing for metadata") return metadata def metadata_to_netcdf(cube, metadata): - """Convert list of metadata to netcdf files. + """Convert single metadata dictionary to netcdf file. Parameters ---------- @@ -153,97 +164,124 @@ def metadata_to_netcdf(cube, metadata): metadata : dict Metadata for the cube. + Raises + ------ + ValueError + Saving of cube not possible because of invalid metadata. + """ metadata = dict(metadata) - if not _has_necessary_attributes([metadata], 'error'): - logger.error("Cannot save cube %s", cube) - return + if not _has_necessary_attributes([metadata], log_level='error'): + raise ValueError(f"Cannot save cube {cube.summary(shorten=True)}") for var_key in VAR_KEYS: setattr(cube, var_key, metadata.pop(var_key)) cube.var_name = metadata.pop('short_name') + cube.standard_name = None if 'standard_name' in metadata: standard_name = metadata.pop('standard_name') try: cube.standard_name = standard_name except ValueError: - logger.debug("Got invalid standard_name '%s'", standard_name) + logger.warning( + "Got invalid standard_name '%s', setting it to 'None'", + standard_name) + cube.attributes['invalid_standard_name'] = standard_name for (attr, val) in metadata.items(): if isinstance(val, bool): metadata[attr] = str(val) cube.attributes.update(metadata) - save_iris_cube(cube, metadata['filename']) + iris_save(cube, metadata['filename']) + + +def iris_save(source, path): + """Save :mod:`iris` objects with correct attributes. + + Parameters + ---------- + source : iris.cube.Cube or iterable of iris.cube.Cube + Cube(s) to be saved. + path : str + Path to the new file. + + """ + if isinstance(source, iris.cube.Cube): + source.attributes['filename'] = path + else: + for cube in source: + cube.attributes['filename'] = path + iris.save(source, path) + logger.info("Wrote %s", path) def save_1d_data(cubes, path, coord_name, var_attrs, attributes=None): - """Save scalar data for multiple datasets. + """Save 1D data for multiple datasets. - Create 1D cube with the auxiliary dimension `dataset` and save scalar data - for every appearing dataset. + Create 2D cube with the dimensionsal coordinate ``coord_name`` and the + auxiliary coordinate ``dataset`` and save 1D data for every dataset given. + The cube is filled with missing values where no data exists for a dataset + at a certain point. + + Note + ---- + Does not check metadata of the ``cubes``, i.e. different names or units + will be ignored. Parameters ---------- cubes : dict of iris.cube.Cube - 1D `iris.cube.Cube`s (values) and corresponding datasets (keys). + 1D :class.:`iris.cube.Cube`s (values) and corresponding dataset names + (keys). path : str Path to the new file. coord_name : str Name of the coordinate. var_attrs : dict - Attributes for the variable (`short_name`, `long_name`, or `units`). + Attributes for the variable (``short_name``, ``long_name``, and + ``units``). attributes : dict, optional Additional attributes for the cube. + Raises + ------ + ValueError + Empty list of cubes given or necessary variable attributes are missing. + """ var_attrs = dict(var_attrs) if not cubes: - logger.error("No cubes given") - return + raise ValueError("Cannot save 1D data, no cubes given") if not _has_necessary_attributes( [var_attrs], only_var_attrs=True, log_level='error'): - logger.error("Cannot write file '%s'", path) - return - datasets = [] - data = [] - cubes = unify_1d_cubes(cubes, coord_name) - for (dataset, cube) in cubes.items(): - datasets.append(dataset) - data.append(cube.data) + raise ValueError( + f"Cannot save 1D data to {path} because necessary variable " + f"attributes are missing") + datasets = list(cubes.keys()) + cube_list = iris.cube.CubeList(list(cubes.values())) + cube_list = unify_1d_cubes(cube_list, coord_name) + data = [c.data for c in cube_list] dataset_coord = iris.coords.AuxCoord(datasets, long_name='dataset') - coord = cubes[list(cubes.keys())[0]].coord(coord_name) + coord = cube_list[0].coord(coord_name) if attributes is None: attributes = {} var_attrs['var_name'] = var_attrs.pop('short_name') # Create new cube - cube = iris.cube.Cube( - np.array(data), - aux_coords_and_dims=[(dataset_coord, 0), (coord, 1)], - attributes=attributes, - **var_attrs) - cube.attributes['filename'] = path - save_iris_cube(cube, path) - - -def save_iris_cube(cube, path): - """Save `iris.cube.Cube`. - - Parameters - ---------- - cube : iris.cube.Cube - Cube to be saved. - path : str - Path to the new file. - - """ - iris.save(cube, path) - logger.info("Wrote %s", path) + cube = iris.cube.Cube(np.ma.array(data), + aux_coords_and_dims=[(dataset_coord, 0), (coord, 1)], + attributes=attributes, + **var_attrs) + iris_save(cube, path) def save_scalar_data(data, path, var_attrs, aux_coord=None, attributes=None): """Save scalar data for multiple datasets. - Create 1D cube with the auxiliary dimension `dataset` and save scalar data - for every appearing dataset. + Create 1D cube with the auxiliary dimension ``dataset`` and save scalar + data for every dataset given. + + Note + ---- + Missing values can be added by :obj:`numpy.nan`. Parameters ---------- @@ -252,18 +290,27 @@ def save_scalar_data(data, path, var_attrs, aux_coord=None, attributes=None): path : str Path to the new file. var_attrs : dict - Attributes for the variable (`short_name`, `long_name`, or `units`). + Attributes for the variable (``short_name``, ``long_name``, and + ``units``). aux_coord : iris.coords.AuxCoord, optional Optional auxiliary coordinate. attributes : dict, optional Additional attributes for the cube. + Raises + ------ + ValueError + No data given or necessary variable attributes are missing. + """ var_attrs = dict(var_attrs) + if not data: + raise ValueError("Cannot save scalar data, no data given") if not _has_necessary_attributes( [var_attrs], only_var_attrs=True, log_level='error'): - logger.error("Cannot write file '%s'", path) - return + raise ValueError( + f"Cannot save scalar data to {path} because necessary variable " + f"attributes are missing") dataset_coord = iris.coords.AuxCoord(list(data), long_name='dataset') if attributes is None: attributes = {} @@ -271,10 +318,8 @@ def save_scalar_data(data, path, var_attrs, aux_coord=None, attributes=None): coords = [(dataset_coord, 0)] if aux_coord is not None: coords.append((aux_coord, 0)) - cube = iris.cube.Cube( - list(data.values()), - aux_coords_and_dims=coords, - attributes=attributes, - **var_attrs) - cube.attributes['filename'] = path - save_iris_cube(cube, path) + cube = iris.cube.Cube(np.ma.masked_invalid(list(data.values())), + aux_coords_and_dims=coords, + attributes=attributes, + **var_attrs) + iris_save(cube, path) diff --git a/esmvaltool/diag_scripts/shared/iris_helpers.py b/esmvaltool/diag_scripts/shared/iris_helpers.py index a009e3ddf9..1f38014999 100644 --- a/esmvaltool/diag_scripts/shared/iris_helpers.py +++ b/esmvaltool/diag_scripts/shared/iris_helpers.py @@ -4,6 +4,8 @@ import iris import numpy as np +from cf_units import Unit +from iris.exceptions import CoordinateNotFoundError from ._base import group_metadata @@ -12,38 +14,42 @@ def _transform_coord_to_ref(cubes, ref_coord): """Transform coordinates of cubes to reference.""" - ref_coord = iris.coords.DimCoord.from_coord(ref_coord) + try: + # Convert AuxCoord to DimCoord if necessary and possible + ref_coord = iris.coords.DimCoord.from_coord(ref_coord) + except ValueError: + pass + if not np.array_equal(np.unique(ref_coord.points), np.sort( + ref_coord.points)): + raise ValueError( + f"Expected unique coordinate '{ref_coord.name()}', got " + f"{ref_coord}") coord_name = ref_coord.name() - cubes_iterable = enumerate(cubes) - new_cubes = list(range(len(cubes))) - if isinstance(cubes, dict): - cubes_iterable = cubes.items() - new_cubes = {} - for (key, cube) in cubes_iterable: + new_cubes = iris.cube.CubeList() + for cube in cubes: coord = cube.coord(coord_name) if not np.all(np.isin(coord.points, ref_coord.points)): raise ValueError( - "Coordinate '{}' of cube\n{}\nis not subset of reference " - "coordinate (longest coordinate in iterable of cubes)".format( - coord_name, cube)) + f"Coordinate {coord} of cube\n{cube}\nis not subset of " + f"reference coordinate {ref_coord}") new_data = np.full(ref_coord.shape, np.nan) indices = np.where(np.in1d(ref_coord.points, coord.points)) new_data[indices] = np.ma.filled(cube.data, np.nan) - new_cube = iris.cube.Cube( - np.ma.masked_invalid(new_data), - dim_coords_and_dims=[(ref_coord, 0)]) + new_cube = iris.cube.Cube(np.ma.masked_invalid(new_data)) + if isinstance(ref_coord, iris.coords.DimCoord): + new_cube.add_dim_coord(ref_coord, 0) + else: + new_cube.add_aux_coord(ref_coord, 0) for aux_coord in cube.coords(dim_coords=False): if aux_coord.shape in ((), (1, )): new_cube.add_aux_coord(aux_coord, []) new_cube.metadata = cube.metadata - new_cubes[key] = new_cube + new_cubes.append(new_cube) check_coordinate(new_cubes, coord_name) - logger.debug("Successfully unified 1D coordinates '%s' to %s", coord_name, + logger.debug("Successfully unified coordinate '%s' to %s", coord_name, ref_coord) logger.debug("of cubes") logger.debug(pformat(cubes)) - if isinstance(cubes, iris.cube.CubeList): - return iris.cube.CubeList(new_cubes) return new_cubes @@ -52,7 +58,7 @@ def check_coordinate(cubes, coord_name): Parameters ---------- - cubes : iterable or dict of iris.cube.Cube + cubes : iris.cube.CubeList Cubes to be compared. coord_name : str Name of the coordinate. @@ -64,33 +70,33 @@ def check_coordinate(cubes, coord_name): Raises ------ + iris.exceptions.CoordinateNotFoundError + Coordinate ``coord_name`` is not a coordinate of one of the cubes. ValueError Given coordinate differs for the input cubes. """ coord = None - if isinstance(cubes, dict): - cubes = list(cubes.values()) for cube in cubes: try: new_coord = cube.coord(coord_name) - except iris.exceptions.CoordinateNotFoundError: - raise ValueError("'{}' is not a coordinate of cube {}".format( - coord_name, cube)) + except CoordinateNotFoundError: + raise CoordinateNotFoundError( + f"'{coord_name}' is not a coordinate of cube\n{cube}") if coord is None: coord = new_coord else: if new_coord != coord: - raise ValueError("Expected cubes with identical coordinates " - "'{}', got {} and {}".format( - coord_name, new_coord, coord)) + raise ValueError( + f"Expected cubes with identical coordinates " + f"'{coord_name}', got {new_coord} and {coord}") logger.debug("Successfully checked coordinate '%s' of cubes", coord_name) logger.debug(pformat(cubes)) return coord.points def convert_to_iris(dict_): - """Change all appearances of `short_name` to `var_name`. + """Change all appearances of ``short_name`` to ``var_name``. Parameters ---------- @@ -102,37 +108,75 @@ def convert_to_iris(dict_): dict Converted dictionary. + Raises + ------ + KeyError + :obj:`dict` contains keys``'short_name'`` **and** ``'var_name'``. + """ dict_ = dict(dict_) if 'short_name' in dict_: + if 'var_name' in dict_: + raise KeyError( + f"Cannot replace 'short_name' by 'var_name', dictionary " + f"already contains 'var_name' (short_name = " + f"'{dict_['short_name']}', var_name = '{dict_['var_name']}')") dict_['var_name'] = dict_.pop('short_name') return dict_ -def iris_project_constraint(projects, cfg, negate=False): - """Create `iris.Constraint` to select specific projects from data. +def get_mean_cube(datasets): + """Get mean cube of a list of datasets. + + Parameters + ---------- + datasets : list of dict + List of datasets (given as metadata :obj:`dict`). + + Returns + ------- + iris.cube.Cube + Mean cube. + + """ + cubes = iris.cube.CubeList() + for dataset in datasets: + path = dataset['filename'] + cube = iris.load_cube(path) + prepare_cube_for_merging(cube, path) + cubes.append(cube) + mean_cube = cubes.merge_cube() + if len(cubes) > 1: + mean_cube = mean_cube.collapsed(['cube_label'], iris.analysis.MEAN) + mean_cube.remove_coord('cube_label') + return mean_cube + + +def iris_project_constraint(projects, input_data, negate=False): + """Create :class:`iris.Constraint` to select specific projects from data. Parameters ---------- projects : list of str Projects to be selected. - cfg : dict - Diagnostic script configuration. + input_data : list of dict + List of dataset metadata used to extract all relevant datasets + belonging to given ``projects``. negate : bool, optional (default: False) - Negate constraint (`False`: select all elements that fit `projects`, - `True`: select all elements that do NOT fit `projects`). + Negate constraint (``False``: select all elements that fit + ``projects``, `True``: select all elements that do **not** fit + ``projects``). Returns ------- iris.Constraint - constraint for coordinate `dataset`. + constraint for coordinate ``dataset``. """ datasets = [] - grouped_data = group_metadata(cfg['input_data'].values(), 'project') + grouped_data = group_metadata(input_data, 'project') for project in projects: - for data in grouped_data.get(project, {}): - datasets.append(data['dataset']) + datasets.extend([d['dataset'] for d in grouped_data.get(project, [])]) def project_constraint(cell): """Constraint function.""" @@ -143,70 +187,105 @@ def project_constraint(cell): return iris.Constraint(dataset=project_constraint) -def match_dataset_coordinates(cubes): +def intersect_dataset_coordinates(cubes): """Compare dataset coordinates of cubes and match them if necessary. + Use intersection of coordinate 'dataset' of all given cubes and remove + elements which are not given in all cubes. + Parameters ---------- - cubes : iterable or dict of iris.cube.Cube + cubes : iris.cube.CubeList Cubes to be compared. Returns ------- - iterable or dict of iris.cube.Cube + iris.cube.CubeList Transformed cubes. + Raises + ------ + iris.exceptions.CoordinateNotFoundError + Coordinate ``dataset`` is not a coordinate of one of the cubes. + ValueError + At least one of the cubes contains a ``dataset`` coordinate with + duplicate elements or the cubes do not share common elements. + """ common_elements = None - cubes_iterable = cubes - if isinstance(cubes, dict): - cubes_iterable = cubes.values() # Get common elements for cube in cubes: + try: + coord_points = cube.coord('dataset').points + except CoordinateNotFoundError: + raise CoordinateNotFoundError( + f"'dataset' is not a coordinate of cube\n{cube}") + if len(set(coord_points)) != len(coord_points): + raise ValueError( + f"Coordinate 'dataset' of cube\n{cube}\n contains duplicate " + f"elements") if common_elements is None: - common_elements = set(cube.coord('dataset').points) + common_elements = set(coord_points) else: - common_elements = common_elements.intersection( - set(cube.coord('dataset').points)) + common_elements = common_elements.intersection(set(coord_points)) common_elements = list(common_elements) # Save new cubes - cubes_iterable = enumerate(cubes) - new_cubes = list(range(len(cubes))) - if isinstance(cubes, dict): - cubes_iterable = cubes.items() - new_cubes = {} - for (key, cube) in cubes_iterable: + new_cubes = iris.cube.CubeList() + for cube in cubes: cube = cube.extract(iris.Constraint(dataset=common_elements)) + if cube is None: + raise ValueError(f"Cubes {cubes} do not share common elements") sorted_idx = np.argsort(cube.coord('dataset').points) - new_cubes[key] = cube[sorted_idx] + new_cubes.append(cube[sorted_idx]) check_coordinate(new_cubes, 'dataset') logger.debug("Successfully matched 'dataset' coordinate to %s", sorted(common_elements)) logger.debug("of cubes") logger.debug(pformat(cubes)) - if isinstance(cubes, iris.cube.CubeList): - return iris.cube.CubeList(new_cubes) return new_cubes +def prepare_cube_for_merging(cube, cube_label): + """Prepare single :class:`iris.cube.Cube` in order to merge it later. + + Parameters + ---------- + cube : iris.cube.Cube + Cube to be pre-processed. + cube_label : str + Label for the new scalar coordinate ``cube_label``. + + """ + cube.attributes = {} + cube.cell_methods = () + for coord in cube.coords(dim_coords=True): + coord.attributes = {} + for coord in cube.coords(dim_coords=False): + cube.remove_coord(coord) + cube_label_coord = iris.coords.AuxCoord(cube_label, + var_name='cube_label', + long_name='cube_label') + cube.add_aux_coord(cube_label_coord, []) + + def unify_1d_cubes(cubes, coord_name): - """Unify 1D cubes by transforming them to identical coordinate. + """Unify 1D cubes by transforming them to identical coordinates. - Use longest coordinate as reference and transform other cubes to it by - adding missing values. + Use union of all coordinates as reference and transform other cubes to it + by adding missing values. Parameters ---------- - cubes : iterable or dict of iris.cube.Cube + cubes : iris.cube.CubeList Cubes to be processed. coord_name : str Name of the coordinate. Returns ------- - iterable or dict of iris.cube.Cube + iris.cube.CubeList Transformed cubes. Raises @@ -217,26 +296,74 @@ def unify_1d_cubes(cubes, coord_name): """ ref_coord = None - cubes_iterable = cubes - if isinstance(cubes, dict): - cubes_iterable = cubes.values() # Get reference coordinate - for cube in cubes_iterable: + for cube in cubes: if cube.ndim != 1: - raise ValueError("Dimension of cube {} is not 1".format(cube)) + raise ValueError(f"Dimension of cube\n{cube}\nis not 1") try: new_coord = cube.coord(coord_name) - except iris.exceptions.CoordinateNotFoundError: - raise ValueError("'{}' is not a coordinate of cube {}".format( - coord_name, cube)) + except CoordinateNotFoundError: + raise CoordinateNotFoundError( + f"'{coord_name}' is not a coordinate of cube\n{cube}") + if not np.array_equal(np.unique(new_coord.points), + np.sort(new_coord.points)): + raise ValueError( + f"Coordinate '{coord_name}' of cube\n{cube}\n is not unique, " + f"unifying not possible") if ref_coord is None: ref_coord = new_coord else: - if ref_coord.shape[0] < new_coord.shape[0]: - ref_coord = new_coord + new_points = np.union1d(ref_coord.points, new_coord.points) + ref_coord = ref_coord.copy(new_points) if coord_name == 'time': - iris.util.unify_time_units(cubes_iterable) + iris.util.unify_time_units(cubes) # Transform all cubes return _transform_coord_to_ref(cubes, ref_coord) + + +def unify_time_coord(cube, target_units='days since 1850-01-01 00:00:00'): + """Unify time coordinate of cube in-place. + + Parameters + ---------- + cube: iris.cube.Cube + Cube whose time coordinate is transformed in-place. + target_units: str or cf_units.Unit, optional + Target time units. + + Raises + ------ + iris.exceptions.CoordinateNotFoundError + Cube does not contain coordinate ``time``. + + """ + if not cube.coords('time'): + raise CoordinateNotFoundError( + f"Coordinate 'time' not found in cube " + f"{cube.summary(shorten=True)}") + + # Convert points and (if possible) bounds to new units + target_units = Unit(target_units) # works if target_units already is Unit + time_coord = cube.coord('time') + new_points = target_units.date2num( + time_coord.units.num2date(time_coord.points)) + if time_coord.bounds is None: + new_bounds = None + else: + new_bounds = target_units.date2num( + time_coord.units.num2date(time_coord.bounds)) + + # Create new coordinate and add it to the cube + new_time_coord = iris.coords.DimCoord( + new_points, + bounds=new_bounds, + var_name='time', + standard_name='time', + long_name='time', + units=target_units, + ) + coord_dims = cube.coord_dims('time') + cube.remove_coord('time') + cube.add_dim_coord(new_time_coord, coord_dims) diff --git a/esmvaltool/diag_scripts/shared/latlon.ncl b/esmvaltool/diag_scripts/shared/latlon.ncl index fe9be7684c..429e6cee60 100644 --- a/esmvaltool/diag_scripts/shared/latlon.ncl +++ b/esmvaltool/diag_scripts/shared/latlon.ncl @@ -49,7 +49,7 @@ function roi(latlon_dat[4]:float, ; References ; ; Modification history -; 20140129-A_gott_kl: written. +; 20140129-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, latlon_cases, coord_checks, datmin, \ datmax, roimin, roimax, latlon_dat, latlon_roi, latmin, latmax, lonmin, \ @@ -190,8 +190,8 @@ function extract_area(index[1]:integer, ; ; References ; -; Modification history: -; 2013????-A_gott_kl: written. +; Modification history +; 2013????-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, cn, cnt, datasetfile, fm, val begin @@ -232,8 +232,8 @@ function gridcell_area(deltax[1]: numeric, ; Description ; Calculates the area of a grid cell on the sphere. ; -; Modification history: -; 20121211-A_righ_ma: written. +; Modification history +; 20121211-righi_mattia: written. ; local funcname, scriptname, deg2rad begin @@ -273,8 +273,8 @@ function map_area(lat[*]:numeric, ; Assumes a constant resolution in longitude. ; ; Modification history -; 20140819-A_righ_ma: modified to support non-global input. -; 20121211-A_righ_ma: written. +; 20140819-righi_mattia: modified to support non-global input. +; 20121211-righi_mattia: written. ; local funcname, scriptname, loclat, loclon, nlat, lati, xresol begin @@ -374,7 +374,7 @@ function area_operations(field:numeric, ; average/sum is possible in this case. ; ; Modification history -; 20140116-A_righ_ma: written. +; 20140116-righi_mattia: written. ; local funcname, scriptname, rank, dimnames, wgt, conwgt, f_latmin, f_latmax, \ f_lonmin, f_lonmax, delta, idx_latmin, idx_latmax, latidx, idx_lonmin, \ @@ -586,9 +586,10 @@ function select_region(region:string) ; name as an attribute. ; ; Modification history -; 20141205-A_gott_kl: adjusted names to Righi et al. (2015). -; 20140410-A_fran_fr: extended to midlat, equatorial and polar regions. -; 20140129-A_fran_fr: written. +; 20190405-righi_mattia: change region names to match provenance. +; 20141205-gottschaldt_klaus-dirk: adjusted names to Righi et al. (2015). +; 20140410-winterstein_franziska: extended to midlat, equatorial and polar. +; 20140129-winterstein_franziska: written. ; local funcname, scriptname, region begin @@ -597,93 +598,93 @@ begin scriptname = "diag_scripts/shared/latlon.ncl" enter_msg(scriptname, funcname) - if (region.eq."Global") then + if (region.eq."global") then region_array = (/-90., 90., 0., 360./) - region_array@name = "Glob" + region_array@name = "Global" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."Tropics") then + if (region.eq."trop") then region_array = (/-20., 20., 0., 360./) - region_array@name = "Trop" + region_array@name = "Tropics" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."NH extratropics") then + if (region.eq."nhext") then region_array = (/20., 90., 0., 360./) - region_array@name = "NHext" + region_array@name = "Northern extratropics" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."SH extratropics") then + if (region.eq."shext") then region_array = (/-90., -20., 0., 360./) - region_array@name = "SHext" + region_array@name = "Southern extratropics" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."NH equatorial") then + if (region.eq."nhtrop") then region_array = (/0., 30., 0., 360./) - region_array@name = "NHtrop" + region_array@name = "Northern tropics" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."SH equatorial") then + if (region.eq."shtrop") then region_array = (/-30., 0., 0., 360./) - region_array@name = "SHtrop" + region_array@name = "Southern tropics" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."Northern Hemisphere") then + if (region.eq."nh") then region_array = (/20., 90., 0., 360./) - region_array@name = "NH" + region_array@name = "Northern hemisphere" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."Southern Hemisphere") then + if (region.eq."sh") then region_array = (/-90., -20., 0., 360./) - region_array@name = "SH" + region_array@name = "Southern hemisphere" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."NH midlatitudes") then + if (region.eq."nhmidlat") then region_array = (/35., 60., 0., 360./) - region_array@name = "NHmidlat" + region_array@name = "Northern midlatitudes" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."SH midlatitudes") then + if (region.eq."shmidlat") then region_array = (/-60., -35., 0., 360./) - region_array@name = "SHmidlat" + region_array@name = "Southern midlatitudes" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."Arctic") then + if (region.eq."nhpolar") then region_array = (/60., 90., 0., 360./) - region_array@name = "NHpolar" + region_array@name = "Northern polar" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."Antarctic") then + if (region.eq."shpolar") then region_array = (/-90., -60., 0., 360./) - region_array@name = "SHpolar" + region_array@name = "Southern polar" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."Equatorial") then + if (region.eq."eq") then region_array = (/-10., 10., 0., 360./) - region_array@name = "EQ" + region_array@name = "Equatorial" leave_msg(scriptname, funcname) return(region_array) end if @@ -713,8 +714,8 @@ function make_latlon2D(var[1]:string, ; Get 2d lat & lon coordinates for variables (see e.g. ; http://www.ncl.ucar.edu/Applications/Scripts/ice_3.ncl) ; -; Modification history: -; 20141023-A_vanu_be: written based on code in SeaIce_polcon_diff +; Modification history +; 20141023-vanulft_bert: written based on code in SeaIce_polcon_diff ; local funcname, scriptname, area_lat, area_lon, area_temp, out_field, rank begin @@ -813,8 +814,8 @@ function cdo_remapdis(var[1]:string, ; Caveats ; Climate Data Operators (CDO) package must be installed on the system ; -; Modification history: -; 20170120-A_senf_da: written. +; Modification history +; 20170120-senftleben_daniel: written. ; local regfile_in_tmp, regfile_out_tmp, lat, lon, gAtt, field_out, quote begin diff --git a/esmvaltool/diag_scripts/shared/mder.ncl b/esmvaltool/diag_scripts/shared/mder.ncl new file mode 100644 index 0000000000..7ae466f598 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/mder.ncl @@ -0,0 +1,971 @@ +; ############################################################################# +; shared/mder.ncl +; ############################################################################# +; AUXILIARY FUNCTIONS FOR MULTIPLE DIAGNOSTIC ENSEMBLE REGRESSION +; ############################################################################# +; Please consider using of extending existing routines before adding new +; ones. Check the header of each routine for documentation. +; +; Contents: +; function get_obs_list: get list of OBS datasets +; function diag_file_name: filename of calculated diagnostic. +; function integrate_time_units: temporally integrates units. +; function mder_output_desired: check if MDER output is desired. +; function reg_multlin_stats_rap: costumized version of NCL's +; reg_multlin_stats. +; function regress_stepwise: does stepwise regression on diagnostics. +; +; ############################################################################# + + +; ############################################################################# +undef("get_obs_list") +function get_obs_list(input_list: list) +; +; Arguments: +; input_list: List of input metadata +; +; Return value: +; list: All OBS datasets. +; +; Description: +; Returns OBS and OBS6 datasets. +; +; Modification history: +; 20191120-schlund_manuel: written. +; +local atts, obs, obs6 + +begin + + funcname = "get_obs_list" + scriptname = "diag_scripts/shared/mder.ncl" + enter_msg(scriptname, funcname) + + atts := True + atts@project = "OBS" + obs = select_metadata_by_atts(input_list, atts) + atts@project = "OBS6" + obs6 = select_metadata_by_atts(input_list, atts) + do iobs = 0, ListCount(obs6) - 1 + ListAppend(obs, obs6[iobs]) + end do + + leave_msg(scriptname, funcname) + return(obs) + +end + + +; ############################################################################# +undef("diag_file_name") +function diag_file_name(filename[1]: string, \ + opt[1]: logical) +; +; Arguments: +; filename: Name of the current file. +; opt: options with the optional attributes +; opt@wdiag (diagnostic name). +; opt@period_avg (year-, month- or season name). +; opt@level (specify plev range). +; opt@region (specify lat and/or lon range). +; +; Return value: +; file_name: returns the file name of the calculated climatological +; diagnostic. +; Description: +; Provides a file name for the calculation results of diagsnostics. +; +; Modification history: +; 20180717-schlund_manuel: ported to v2.0. +; 20140723-wenzel_sabrina: written. +; +local outname + +begin + + funcname = "diag_file_name" + scriptname = "diag_scripts/shared/mder.ncl" + enter_msg(scriptname, funcname) + + ; File name + outname = basename(systemfunc("basename " + filename)) + + ; Add extra info to the file + if (isatt(opt, "wdiag")) then + outname = outname + "_" + opt@wdiag + end if + if (isatt(opt, "period_avg")) then + if (dimsizes(opt@period_avg) .eq. 2) then + outname = outname + "_" + str_sub_str(opt@period_avg(0) + \ + opt@period_avg(1), " ", "") + else + outname = outname + "_" + opt@period_avg + end if + end if + if (isatt(opt, "level")) then + if (dimsizes(opt@level) .eq. 2) then + outname = outname + "_" + str_sub_str(opt@level(0) + \ + opt@level(1), " ", "") + else + outname = outname + "_" + str_sub_str(opt@level, " ", "") + end if + end if + if (isatt(opt, "region")) then + if (dimsizes(opt@region) .eq. 4) then + outname = outname + "_" + str_sub_str(opt@region(0) + opt@region(1) + \ + opt@region(2) + opt@region(3), \ + " ", "") + else if (dimsizes(opt@region) .eq. 2) then + outname = outname + "_" + str_sub_str(opt@region(0) + opt@region(1), \ + " ", "") + else + outname = outname + "_" + str_sub_str(opt@region, " ", "") + end if + end if + end if + outname = outname + ".nc" + + ; Return file name + leave_msg(scriptname, funcname) + return(outname) + +end + + +; ############################################################################# +undef("integrate_time_units") +function integrate_time_units(units[1] : string, \ + opt[1] : integer) +; +; Arguments: +; opt: integer determining how to handle multiple time units. +; opt < 0: remove last units. +; opt = 0: throw error if there are multiple time units. +; opt = n > 1: remove n-th time units. +; +; Return value: +; New units. +; +; Description: +; Temporally integrates units, i.e. removes reciprocal time units. +; +; Caveat: +; Units with higher exponents than -1 cannot be processed, e.g. "m s-2" or +; "m/s2". +; +; Modification history: +; 20180927-schlund_manuel: written. +; +local allowed_time_units, allowed_prefixes, allowed_suffixes, iu, ip, \ + matches, dim_time, all_matches, all_lengths + +begin + + funcname = "integrate_time_units" + scriptname = "diag_scripts/shared/mder.ncl" + enter_msg(scriptname, funcname) + + ; Allowed time units + allowed_time_units = (/"s", "min", "h", "hr", "d", "day", "mon", "y", "yr", \ + "dec"/) + allowed_prefixes = (/"/"/) + allowed_suffixes = (/"-1", "^-1"/) + + ; Find units by prefix + do iu = 0, dimsizes(allowed_time_units) - 1 + do ip = 0, dimsizes(allowed_prefixes) - 1 + time = allowed_prefixes(ip) + allowed_time_units(iu) + matches := str_index_of_substr(units, time, 0) + if (.not. all(ismissing(matches))) then + dim_time := new(dimsizes(matches), integer) + dim_time = strlen(time) + if (.not. isvar("all_matches")) then + all_matches = matches + all_lengths = dim_time + else + all_matches := array_append_record(all_matches, matches, 0) + all_lengths := array_append_record(all_lengths, dim_time, 0) + end if + end if + end do + end do + + ; Find units by suffix + do iu = 0, dimsizes(allowed_time_units) - 1 + do is = 0, dimsizes(allowed_suffixes) - 1 + time = allowed_time_units(iu) + allowed_suffixes(is) + matches := str_index_of_substr(units, time, 0) + if (.not. all(ismissing(matches))) then + dim_time := new(dimsizes(matches), integer) + dim_time = strlen(time) + if (.not. isvar("all_matches")) then + all_matches = matches + all_lengths = dim_time + else + all_matches := array_append_record(all_matches, matches, 0) + all_lengths := array_append_record(all_lengths, dim_time, 0) + end if + end if + end do + end do + + ; Process different options + if (isvar("all_matches")) then + if (opt .lt. 0) then + max_ind = max(all_matches) + else if (opt .eq. 0) then + max_ind = all_matches(0) + if (dimsizes(all_matches) .gt. 1) then + if (.not. all(all_matches .eq. max_ind)) then + error_msg("f", scriptname, funcname, "temporal unit integration " + \ + "failed for units " + units + ", ambiguous time " + \ + "units given. Use different 'opt' parameter to enable " + \ + "this conversion.") + end if + end if + else + unique_matches := get_unique_values(all_matches) + qsort(unique_matches) + if (opt .gt. dimsizes(unique_matches)) then + error_msg("w", scriptname, funcname, "cannot remove time units " + \ + "at position " + opt + ", removing last appearance") + max_ind = max(all_matches) + else + max_ind = unique_matches(opt - 1) + end if + end if + end if + + ; Remove time units + max_len = max(where(all_matches .eq. max_ind, all_lengths, 0)) + if (max_len .eq. strlen(units)) then + new_units = "1" + else + new_units = stringtochar(units) + new_units := chartostring(new_units(:max_ind - 1)) + \ + chartostring(new_units(max_ind + max_len:)) + end if + else + error_msg("w", scriptname, funcname, "no reciprocal time units " + \ + "found in " + units) + end if + + ; Return file name + leave_msg(scriptname, funcname) + return(str_squeeze(new_units)) + +end + + +; ############################################################################# +undef("mder_output_desired") +function mder_output_desired() +; +; Return value: +; True if mder output is desired, False if not. +; +; Description: +; Checks if MDER output of diagnostic is desired. +; +; Modification history: +; 20180906-schlund_manuel: written. +; +local out + +begin + + funcname = "mder_output_desired" + scriptname = "diag_scripts/shared/mder.ncl" + enter_msg(scriptname, funcname) + + ; Check diag_script_info@wdiag + if (isatt(diag_script_info, "wdiag") .and. \ + .not. any(diag_script_info@wdiag .eq. "")) then + if (.not. isatt(diag_script_info, "ref_dataset")) then + error_msg("w", scriptname, funcname, "Skipping MDER output: " + \ + "attribute 'ref_dataset' not given in recipe") + out = False + else + log_info("Writing MDER output with the following diagnostics: " + \ + diag_script_info@wdiag) + out = True + out@work_dir = config_user_info@work_dir + system("mkdir -p " + out@work_dir) + end if + else + log_info("Skipping MDER output: 'wdiag' is not given in recipe") + out = False + end if + + ; Return boolean + leave_msg(scriptname, funcname) + return(out) + +end + + +; ############################################################################# +undef("reg_multlin_stats_rap") +function reg_multlin_stats_rap(Y[*]: numeric, \ + XP:numeric, \ + opt) +; +; Arguments: +; Y: dependent variable (size NY). Missing values (_FillValue) are not +; allowed. +; XP: one [*] or more [*][*] independent variables. Missing values +; (_FillValue) are not allowed. The size of the leftmost dimension of +; XP must be the same size as Y[*]. The rightmost dimension for [*][*] +; contains the independent variables. +; opt: options [type logical]. +; opt = False: no options. +; opt = True. +; opr@print_data = True: print input Y and XP in table form. +; opr@print_anova= True: print ANOVA information. +; +; Return value: +; See reg_mullin_stats. +; +; Description: +; This function is based on the NCL-function "reg_multlin_stats". The +; original can be found in: +; "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl". +; See: +; Wilks, D.S. (2006): Statistical Methods in the Atmospheric Sciences +; Academic Press (2nd Edition). +; Davis, J.C. (2002): Statistics and Data Analysis in Geology, Wiley +; (3rd Edition), 462-470. +; Very readable discussion: +; http://reliawiki.org/index.php/Multiple_Linear_Regression_Analysis. +; +; Caveats: +; In this version the following attributes are not available: +; b@stderr = stderr. +; b@tval = tval. +; b@pval = pval. +; +; Modification history: +; 20180808-schlund_manuel: ported to v2.0. +; 2013????-wenzel_sabrina: written. +; +local dimXP, N, NP, NY, M, T, Y, X, b, B, rankXP, Yavg, Yvar, Ystd, Xavg, \ + Xvar, Xstd, Yest, n, m, Yres, SST, SSE, SSR, MST, MSE, MSR, SE, Frat, \ + r2, r2a, r, fuv, XXt, XXTi, varcovx, df, stderr, tval, pval, opt_wm, \ + fmt_XXt, fmt_XXti, fmt_covx, opt_pd, fmt_data + +begin + + funcname = "reg_multlin_stats_rap" + scriptname = "diag_scripts/shared/mder.ncl" + enter_msg(scriptname, funcname) + + ; Arrays + dimXP = dimsizes(XP) + NX = dimXP(0) ; number of rows ('observations') + N = dimsizes(Y) + rankXP = dimsizes(dimXP) + + ; Check input + if (N .ne. NX) then + error_msg("f", scriptname, funcname, "number of rows of Y and XP must " + \ + "match, NY = " + N + ", NXP = " + NX) + end if + if (rankXP.gt.2) then + error_msg("f", scriptname, funcname, "rank of XP > 2: rankXP = " + \ + rankXP) + end if + ; if (isatt(Y, "_FillValue") .and. any(ismissing(Y))) then + ; error_msg("f", scriptname, funcname, "Y has missing values, not allowed") + ; end if + ; if (isatt(XP, "_FillValue") .and. any(ismissing(XP))) then + ; error_msg("f", scriptname, funcname, "XP has missing values, " + \ + ; "not allowed") + ; end if + + ; Get number of predictor variables + if (rankXP .eq. 2) then + NP = dimXP(1) + else + NP = 1 + end if + + ; Print data + if (opt .and. isatt(opt, "print_data") .and. opt@print_data) then + log_info("") + opt_pd = True + opt_pd@title = "----- reg_multlin_stats: Y, XP -----" + data = new((/N, NP + 1/), typeof(Y)) + data(:, 0) = (/Y/) + + if (NP.eq.1) then + data(:, 1) = (/XP/) + else + data(:, 1:) = (/XP/) + end if + + fmt_data = "f11.2" + if (isatt(opt, "fmt_data")) then + fmt_data = opt@fmt_data ; e.g.: "f8.3", "e13.5", etc. + end if + write_matrix(data, (NP + 1) + fmt_data, opt_pd) + log_info("") + delete(data) + end if + + ; Create the necessary arrays. X is a "design matrix": + ; http://en.wikipedia.org/wiki/Design_matrix + ; Most multiple regression models include a constant term. This ensures that + ; the model will be "unbiased", i.e. the mean of the residuals will be + ; exactly zero. + + ; Extra column for design matrix (required ordering for reg_multlin) + M = NP + 1 + X = new((/M, N/), typeof(XP), getFillValue(XP)) + X(0, :) = 1.0 + + if (NP .eq. 1) then + X(1, :) = XP ; One predictor + else + do m = 0, NP - 1 + X(m + 1, :) = XP(:, m) ; m refers to predictor variables + end do + end if + + ; Calculate the partial regression coefficients: b -> beta_hat + ; b = (X'X)^-1 X'y (theoretically) + ; http://www.ncl.ucar.edu/Document/Functions/Built-in/reg_multlin.shtml + + ; Partial regression coef (unstandardized...depends on units) + b = reg_multlin(Y, X, False) + delete(b@constant) + + ; Assorted calculations + Yavg = avg(Y) + Yvar = variance(Y) ; Unbiased estimate the variance + Ystd = sqrt(Yvar) + + Xavg = new(NP, typeof(b), "No_FillValue") + Xvar = new(NP, typeof(b), "No_FillValue") + Xstd = new(NP, typeof(b), "No_FillValue") + + ; Standardized regression coef.: describes what the partial regression coef. + ; would equal if all variables had the same standard deviation. + bstd = new(M, typeof(b), "No_FillValue") + + do n = 0, NP - 1 + Xavg(n) = avg(X(n + 1, :)) + Xvar(n) = variance(X(n + 1, :)) + Xstd(n) = sqrt(Xvar(n)) + bstd(n + 1) = b(n + 1) * Xstd(n) / Ystd ; don't do b(0) + end do + + ; Calculate model estimates: Yest = b(0) + b(1)* + b(2)* + ... + Yest = new(N, typeof(Y), getFillValue(Y)) + do n = 0, N - 1 + Yest(n) = b(0) + sum(b(1:) * X(1:, n)) ; array syntax + end do + + ; ANOVA table info. (Eg, Wilks pg 185 & pg 197) + ; Sums of squares , degrees of freedom, mean squares, F statistic + ; Other statistics are also calculated. + ; http://reliawiki.org/index.php/ + ; ANOVA_Calculations_in_Multiple_Linear_Regression + + ; Mean squares are obtained by dividing the sum of squares by their + ; associated degrees of freedom. + dof = N - NP - 1 + Yres = Yest-Y ; residuals (array operation) + + ; SS ==> Sum of Squares + SST = sum((Y - Yavg) ^ 2) ; total SS [dof = N - 1] + SSR = sum((Yest - Yavg) ^ 2) ; regression SS [dof = NP] + SSE = sum(Yres ^ 2) ; residual SS [dof = N - NP - 1] + + MST = SST / (N - 1) ; Mean SST [dof = N - 1] + MSR = SSR / NP ; Mean SSR [dof = NP] + MSE = SSE / dof ; Mean SSE [dof = (N - NP - 1)] + + RSE = sqrt(MSE) ; residual standard error + + ; F-Test is an overall (global) test of the models fit. If at least one + ; variable has a significant coefficient, then the model fit should be + ; significant. The F-statistic might be interpreted as the variation + ; explained by the regression relative to the variation not explained by the + ; regression. + ; R dof=(M,N-NP-1) + Frat = MSR / MSE + + ; The square of the Pearson correlation coefficient between the observed + ; and modeled (predicted) data values of the dependent variable. It is + ; another measure of 'goodness of fit.' biased high, particularly for small N + ; r2 = coefficient of determination. It is + r2 = SSR / SST + + r2a = r2 - (1 - r2) * NP / tofloat(dof) ; adjusted r2... better for small N + r = sqrt(r2) ; multiple (overall) correlation + fuv = 1.0 - r2 ; fraction of variance unexplained + + ; Determine (a) standard error of coefficients; (b) t-values; (c) p-values + + XXt = X#transpose(X) ; (M,M); opposite of text books + XXti = inverse_matrix(XXt) + varcovx = MSE*XXti ; variance-covariance matrix + ; of estimated regression coefficients + + ; Debug log + if (config_user_info@log_level .eq. "debug") then + log_debug("") + opt_wm = True + opt_wm@title = "--------- XXt ---------" + fmt_XXt = "f11.2" + if (isatt(opt, "fmt_XXt")) then + fmt_XXt = opt@fmt_XXt ; "f8.2", "f12.4", "e13.5", ... + end if + write_matrix(XXt, M + fmt_XXt, opt_wm) + opt_wm@title = "--------- XXti ---------" + fmt_XXti = "f11.2" + if (isatt(opt, "fmt_XXti")) then + fmt_XXti = opt@fmt_XXti + end if + write_matrix(XXti, M + fmt_XXti, opt_wm) + opt_wm@title = "--------- varcovx -----k----" + fmt_covx = "f11.2" + if (isatt(opt, "fmt_covx")) then + fmt_covx = opt@fmt_covx + end if + write_matrix(varcovx, M + fmt_covx, opt_wm) + log_debug("") + end if + + stderr = new(M, typeof(Y)) + tval = new(M, typeof(Y)) + pval = new(M, typeof(Y)) + df = N - NP + + ; do m = 0, M - 1 + ; stderr(m) = sqrt(varcovx(m, m)) + ; tval(m) = b(m) / stderr(m) + ; pval(m) = betainc(df / (df + tval(m) ^ 2), df / 2.0, 0.5) + ; end do + + ; Print ANOVA information + if (opt .and. isatt(opt, "print_anova") .and. opt@print_anova) then + log_info("") + log_info("--------- ANOVA information ---------") + log_info("") + log_info("SST = " + SST + ", SSR = " + SSR + ", SSE = " + SSE) + log_info("MST = " + MST + ", MSR = " + MSR + ", MSE = " + MSE + \ + ", RSE = " + RSE) + log_info("F-statistic = " + Frat + ", dof = (" + NP + "," + \ + (N - NP - 1) + ")") + log_info("--------- ---------") + log_info("r2 = " + r2 + ", r = " + r + ", r2a = " + r2a + ", fuv = " + fuv) + log_info("--------- ---------") + log_info("stderr, tval, pval: " + stderr + ", " + tval + ", " + pval) + log_info("") + end if + + ; Associate assorted information as attributes of 'b' + b@long_name = "multiple regression coefficients" + b@model = "Yest = b(0) + b(1)*X1 + b(2)*X2 + ... + b(M)*XM" + + b@N = N ; # of 'observations' + b@NP = NP ; # of predictors + b@M = M ; design matrix size + b@bstd = bstd ; standardized coefficients + + b@SST = SST ; [1] + b@SSE = SSE + b@SSR = SSR + + b@MST = MST + b@MSE = MSE + b@MSE_dof = dof + b@MSR = MSR + + b@RSE = RSE ; [1] + b@RSE_dof = N - M - 1 ; [1] + + b@F = Frat ; [1] + b@F_dof = (/NP, dof/) ; [2] + + df1 = b@F_dof(0) + df2 = b@F_dof(1) + b@F_pval = ftest(MSR, df1 + 1, MSE, df2 + 1, 0) * 0.5 ; [1] + + b@r2 = r2 ; [1] + b@r = r ; [1] + b@r2a = r2a ; [1] + + b@fuv = fuv + + b@Yest = Yest ; [NY] + b@Yavg = Yavg ; [1] + b@Ystd = Ystd ; [1] + + b@Xavg = Xavg ; [1] + b@Xstd = Xstd ; [1] + + ; b@stderr = stderr ; [M] + ; b@tval = tval ; [M] + ; b@pval = pval ; [M] + + leave_msg(scriptname, funcname) + return(b) + +end + + +; ############################################################################# +undef("regress_stepwise") +function regress_stepwise(xarr_f[*][*]: numeric, \ + yarr_f[*]: numeric) +; +; Arguments: +; xarr_f: array containing all diagnostics for each model. +; yarr_f: array with variable to which should be regressed. +; +; Return value: +; coef: coefficients of the regression (beta vector). +; coef@const: constant of regression (beta_0). +; coef@idx_used_diags: array with indices of diagnostics used in the +; regression model. +; coef@n_terms: number of terms in the regression model (= number of +; used diagnostics). +; coef@YFIT: array with y values from linear regression. +; coef@MCORRELATION: correlation coefficient of multicorrelation. +; coef@correlation: correlation coefficients of individual +; correlations. +; +; Description: +; Performs a multi diagnostic ensemble regression following Karpechko et +; al., 2013. +; +; Caveats: +; Needs attributes diag_script_info@wregr, diag_script_info@wexit and +; optionally diag_scripts@p_value. +; +; Modification history: +; 20180807-schlund_manuel: ported to v2.0. +; 201303??-wenzel_sabrina: written. +; +local p_value, im, wregr, wexit, dim_diag, dim_dat, f_value_b, ssr0, sst, \ + used_diags, iiter, igd, jgd, nii, cvf, xarr, yarr + +begin + + funcname = "regress_stepwise" + scriptname = "diag_scripts/shared/mder.ncl" + enter_msg(scriptname, funcname) + + ; Required attributes + req_atts = (/"wregr", "wexit"/) + exit_if_missing_atts(diag_script_info, req_atts) + wregr = diag_script_info@wregr + wexit = diag_script_info@wexit + + ; Optional attributes + if (isatt(diag_script_info, "p_value")) then + p_value = diag_script_info@p_value + else + p_value = 0.05 + end if + + ; Convert float arrays to double arrays + xarr = todouble(xarr_f) + yarr = todouble(yarr_f) + copy_VarMeta(xarr_f, xarr) + copy_VarMeta(yarr_f, yarr) + + ; Define some arrays + dim_diag = dimsizes(xarr(:, 0)) ; Number of diagnostics + dim_dat = dimsizes(xarr(0, :)) ; Number of datasets (i.e. models) + diag_names = xarr&diagnostics ; Diagnostic names + + used_diags = -1 ; Array for regression variables + ssr0 = 0.0d + sst = sum((yarr - avg(yarr)) ^ 2) + opt = False ; Options for multi linear regression + + ; Look-up table for critical value (= 1-p percentiles for a F-distribution + ; with 1 degree of freedom in the nominator and varying (0-99 = index of the + ; array) degrees of freedom in the denominator) + if (p_value .eq. 0.1) then + cvf = (/39.8635, 8.52632, 5.53832, 4.54477, 4.06042, 3.77595, 3.58943, \ + 3.45792, 3.36030, 3.28502, 3.22520, 3.17655, 3.13621, 3.10221, \ + 3.07319, 3.04811, 3.02623, 3.00698, 2.98990, 2.97466, 2.96096, \ + 2.94858, 2.93736, 2.92712, 2.91774, 2.90913, 2.90119, 2.89385, \ + 2.88703, 2.88069, 2.87478, 2.86926, 2.86409, 2.85922, 2.85465, \ + 2.85035, 2.84628, 2.84244, 2.83880, 2.83535, 2.83208, 2.82897, \ + 2.82600, 2.82317, 2.82047, 2.81790, 2.81544, 2.81308, 2.81082, \ + 2.80866, 2.80658, 2.80458, 2.80266, 2.80082, 2.79904, 2.79733, \ + 2.79568, 2.79409, 2.79255, 2.79107, 2.78963, 2.78825, 2.78691, \ + 2.78560, 2.78434, 2.78312, 2.78194, 2.78080, 2.77968, 2.77860, \ + 2.77755, 2.77654, 2.77555, 2.77458, 2.77364, 2.77272, 2.77184, \ + 2.77098, 2.77013, 2.76931, 2.76851, 2.76773, 2.76696, 2.76622, \ + 2.76550, 2.76479, 2.76410, 2.76342, 2.76276, 2.76211, 2.76149, \ + 2.76087, 2.76026, 2.75968, 2.75909, 2.75853, 2.75797, 2.75743, \ + 2.75689, 2.75637/) + else if (p_value .eq. 0.05) then + cvf = (/161.448, 18.5128, 10.1280, 7.70865, 6.60789, 5.98738, 5.59145, \ + 5.31766, 5.11735, 4.96460, 4.84434, 4.74723, 4.66719, 4.60011, \ + 4.54308, 4.49400, 4.45132, 4.41388, 4.38075, 4.35124, 4.32479, \ + 4.30095, 4.27935, 4.25968, 4.24170, 4.22520, 4.21001, 4.19597, \ + 4.18296, 4.17088, 4.15962, 4.14910, 4.13926, 4.13002, 4.12134, \ + 4.11316, 4.10546, 4.09817, 4.09128, 4.08475, 4.07855, 4.07265, \ + 4.06705, 4.06171, 4.05661, 4.05175, 4.04710, 4.04265, 4.03839, \ + 4.03431, 4.03039, 4.02663, 4.02302, 4.01954, 4.01620, 4.01298, \ + 4.00987, 4.00687, 4.00398, 4.00120, 3.99849, 3.99588, 3.99337, \ + 3.99092, 3.98856, 3.98627, 3.98405, 3.98190, 3.97981, 3.97778, \ + 3.97581, 3.97390, 3.97203, 3.97023, 3.96847, 3.96676, 3.96509, \ + 3.96348, 3.96189, 3.96035, 3.95885, 3.95739, 3.95597, 3.95457, \ + 3.95320, 3.95189, 3.95059, 3.94932, 3.94809, 3.94687, 3.94570, \ + 3.94454, 3.94341, 3.94230, 3.94123, 3.94017, 3.93913, 3.93811, \ + 3.93711, 3.93615/) + else + error_msg("f", scriptname, funcname, "p_value " + p_value + " not " + \ + "supported yet, function is hardcoded for p=0.05 and p=0.1") + end if + end if + + ; Start stepwise regression + iiter = 1 + do while (wregr .or. wexit) + ; .or.(f_prob .ge. f_value .and. f_prob .lt. f_value_b) + log_debug("stepwise regression: iteration " + iiter) + + ; 1. Diagnostics to be tried in forward selection + n_good_diag = dim_diag + 1 - dimsizes(used_diags) + if (n_good_diag .le. 0) then + log_debug("No diagnostics left to test: stopping iteration") + break + end if + + ; Array of untested "good" diagnostics + good_diags = new(n_good_diag, integer) + + ; Loop over good diagnostics + jgd = 0 + do igd = 0, dim_diag - 1 + nii = num(used_diags .eq. igd) + + ; Only include the diagnostic if it is not used already + if (nii .ne. 1) then + good_diags(jgd) = igd + jgd = jgd + 1 + end if + end do + log_debug("The following diagnostics are tried at forward step " + \ + iiter + ": " + diag_names(good_diags)) + + ; 2. Trying the diagnostics in forward selection + ssr1 = new(n_good_diag, double) + sse1 = new(n_good_diag, double) + F_val = new(n_good_diag, double) + yfit := new((/n_good_diag, dim_dat/), double) + + ; Iterate over good diagnostics + do igd = 0, n_good_diag - 1 + if (dimsizes(used_diags) .eq. 1) then + test_diag = xarr(igd, :) + rc = regline_stats(test_diag(:), yarr(:)) + else + test_diag = new((/dimsizes(used_diags), dim_dat/), double) + if (dimsizes(used_diags) .eq. 2) then + test_diag(:dimsizes(used_diags) - 2, :) = \ + xarr(used_diags(1:):used_diags(1:), :) + else + test_diag(:dimsizes(used_diags) - 2, :) = \ + xarr(used_diags(1:), :) + end if + test_diag(dimsizes(used_diags) - 1, :) = xarr(good_diags(igd), :) + rc = reg_multlin_stats(yarr, transpose(test_diag), opt) + MCORRELATION = rc@r + end if + sse1(igd) = rc@SSE + ssr1(igd) = rc@SSR + F_val(igd) = rc@F - ssr0 + yfit(igd, 0:dimsizes(rc@Yest) - 1) = rc@Yest + delete([/test_diag, rc/]) + end do + log_debug("ssr1 - ssr0 = " + (ssr1 - ssr0)) + + ; 3. Defining which diagnostic is the best + im = maxind(ssr1 - ssr0) + log_debug("The following diagnostic has the largest increase in SSR: " + \ + diag_names(good_diags(im)) + ": " + (ssr1(im) - ssr0)) + + ; 4. Testing whether it should be included + dfe = (dim_dat - 1 - dimsizes(used_diags)) + f_value = (ssr1(im) - ssr0) / (sse1(im) / dfe) + f_prob = cvf(dfe - 1) + delete(F_val) + + ; F-test + if (f_value .le. f_prob) then + log_debug("F-test shows that F-value is smaller than theoretical " + \ + "F-value: " + f_value + ", " + f_prob + ", " + ssr1(im) + \ + ", " + ssr0 + ", " + sse1(im) + ", " + dfe) + log_debug("The diagnostic " + diag_names(good_diags(im)) + \ + " is not included into the regression model, skipping " + \ + "backward elimination and stopping iteration") + + ; If the diagnostic is not included then backward elimination step is + ; not needed. + wexit = False + break ; continue + end if + if (f_value .gt. f_prob) then + log_debug("F-test shows that F-value is larger than theoretical " + \ + "F-value: " + f_value + ", " + f_prob + ", " + ssr1(im) + \ + ", " + ssr0 + ", " + sse1(im) + ", " + dfe) + log_debug("The diagnostic " + diag_names(good_diags(im)) + \ + " is included into the regression model.") + + ; Include tested diagnostic (good_diags) to used diagnostics (used_diags) + tmp = new((dimsizes(used_diags) + dimsizes(good_diags(im))), integer) + tmp(0:dimsizes(used_diags) - 1) = used_diags + tmp(dimsizes(used_diags):dimsizes(tmp) - 1) = good_diags(im) + used_diags := tmp + delete(tmp) + ssr0 = ssr1(im) + sse0 = sse1(im) + end if + + ; Only one diagnostic used: break after more than two iterations + if (dimsizes(used_diags) .eq. 2) then + delete([/good_diags, ssr1, sse1/]) + if (iiter .ge. 2) then + log_debug("Only one diagnotic used and more than 2 iterations: " + \ + "stopping iteration") + break + else + log_debug("Only one diagnostic used, skipping backward elimination") + wregr = False + iiter = iiter + 1 + continue + end if + end if + + ; 5. Trying the diagnostics in backward elimination + wregr = False + log_debug("The following diagnostics are tried at backward step " + \ + iiter + ": " + diag_names(used_diags(1:))) + ssr1 := new(dimsizes(used_diags) - 1, double) + F_val = new(dimsizes(used_diags) - 1, double) + yfit := new((/dimsizes(used_diags) - 1, dim_dat/), double) + + ; Iterate over used diagnostics + do igd = 1, dimsizes(used_diags) - 1 + ngood = ispan(1, dimsizes(used_diags) - 1, 1) + good = ind(ngood .ne. igd) + + if (.not. all(ismissing(good))) then + good = good + 1 + test_diag = xarr(used_diags(good), :) + if (dimsizes(ngood) .eq. 1) then + rc = regline_stats(test_diag, yarr) + else + rc = reg_multlin_stats_rap(yarr, transpose(test_diag), opt) + end if + sse2 = rc@SSE + ssr1(igd - 1) = rc@SSR + F_val(igd - 1) = ssr0 - rc@F + yfit(igd - 1, 0:dimsizes(rc@Yest) - 1) = rc@Yest + delete([/rc, test_diag/]) + else + ssr1(igd - 1) = 0 + F_val(igd - 1) = (ssr0 - ssr1(im)) / (sse0 / dfe) + end if + delete([/good, ngood/]) + end do + log_debug("ssr1 = " + ssr1) + + ; 6. Defining which diagnostic is the worst + im := minind(ssr0 - ssr1) + log_debug("The following diagnostic has the smallest decrease in SSR: " + \ + diag_names(used_diags(im+1)) + ": " + (ssr0 - ssr1(im))) + + ; 7. Testing whether it should be excluded + dfe = (dim_dat - dimsizes(used_diags)) + f_value_b = (ssr0 - ssr1(im)) / (sse0 / dfe) + f_prob = cvf(dfe - 1) + delete(F_val) + + ; F-test + if (f_value_b .le. f_prob) then + log_debug("F-test shows that F-value is smaller than theoretical " + \ + "F-value: " + f_value_b + ", " + f_prob + ", " + ssr0 + \ + ", " + ssr1(im) + ", " + sse0 + ", " + dfe) + log_debug("The diagnostic " + diag_names(good_diags(im + 1)) + \ + " is excluded from the regression model.") + good = ind(used_diags .ne. used_diags(im + 1)) + tmp = used_diags(good) + used_diags := tmp + delete([/tmp, good/]) + ssr0 = ssr1(im) + end if + if (f_value_b .gt. f_prob) then + log_debug("F-test shows that F-value is larger than theoretical " + \ + "F-value: " + f_value_b + ", " + f_prob + ", " + ssr0 + \ + ", " + ssr1(im) + ", " + sse0 + ", " + dfe) + log_debug("The diagnostic " + diag_names(good_diags(im)) + \ + " is left in the regression model.") + end if + delete([/good_diags, ssr1, sse1/]) + + ; if iiter equals 2 then stop + ; if (iiter .eq. dim_diag - 2) then + ; ... + ; end if + + ; if the regression model includes two terms then stop + if (dimsizes(used_diags) .eq. 3) + log_debug("Regression model includes two terms, stopping regression") + break + end if + iiter = iiter + 1 + end do ; Stepwise regression + + ; Final model + if (.not. wregr) then + log_debug("regress_stepwise correctly returned regression model") + idx_used_diags = used_diags(1:) + n_terms = dimsizes(used_diags) - 1 + finald = xarr(used_diags(1:), :) + rc = reg_multlin_stats(yarr, transpose(finald), opt) + MCORRELATION = rc@r2 + correlation = rc@pval + coef = rc(1:) + YFIT = rc@Yest + const = rc(0) + else + error_msg("w", scriptname, funcname, "regress_stepwise returned " + \ + "empty regression model") + const = avg(yarr) + idx_used_diags := 0 + YFIT = new(dim_dat, double) + YFIT = const + rc = reg_multlin_stats(yarr, transpose(yfit), opt) + MCORRELATION = rc@r + correlation = escorc(yfit, yarr) + coef = 0.0 + n_terms = 0 + coef@Yest = new(dim_dat, double) + coef@Yest(:) = const + end if + + ; Return elements + coef@const = const + coef@idx_used_diags = idx_used_diags + coef@n_terms = n_terms + coef@YFIT = YFIT + coef@MCORRELATION = MCORRELATION + coef@correlation = correlation + + ; Return data + leave_msg(scriptname, funcname) + return(coef) + +end diff --git a/esmvaltool/diag_scripts/shared/names.py b/esmvaltool/diag_scripts/shared/names.py index 25dd656585..49da19ebbe 100644 --- a/esmvaltool/diag_scripts/shared/names.py +++ b/esmvaltool/diag_scripts/shared/names.py @@ -14,8 +14,13 @@ TIME = 'time' YEAR = 'year' +ACTIVITY = 'activity' +ALIAS = 'alias' DATASET = 'dataset' EXP = 'exp' +ENSEMBLE = 'ensemble' +START_YEAR = 'start_year' +END_YEAR = 'end_year' LONG_NAME = 'long_name' OBS = 'OBS' PROJECT = 'project' @@ -23,14 +28,16 @@ STANDARD_NAME = 'standard_name' UNITS = 'units' VAR_NAME = 'var_name' +START_YEAR = 'start_year' +END_YEAR = 'end_year' INPUT_DATA = 'input_data' INPUT_FILES = 'input_files' +FILENAME = 'filename' +FX_FILES = 'fx_files' METADATA_YAML_FILE = 'metadata.yml' OUTPUT_FILE_TYPE = 'output_file_type' PLOT_DIR = 'plot_dir' SCRIPT = 'script' VERSION = 'version' WORK_DIR = 'work_dir' -WRITE_NETCDF = 'write_netcdf' -WRITE_PLOTS = 'write_plots' diff --git a/esmvaltool/diag_scripts/shared/plot/GO_panels.ncl b/esmvaltool/diag_scripts/shared/plot/GO_panels.ncl deleted file mode 100644 index 42326cdafd..0000000000 --- a/esmvaltool/diag_scripts/shared/plot/GO_panels.ncl +++ /dev/null @@ -1,2114 +0,0 @@ -; ############################################################################# -; GENERAL ROUTINES FOR GO PLOTTING -; ############################################################################# -; Please consider using or extending existing routines before adding new ones. -; Check the header of each routine for documentation. -; -; Contents: -; function get_title_suffix -; procedure remove_attrs -; procedure plot_two_by_one -; procedure plot_three_by_one_diff -; procedure two_by_one -; procedure three_by_one_diff -; procedure plot_three_by_one_vector -; procedure three_by_one_vector -; procedure plot_multipanel -; procedure multipanel -; procedure plot_multipanel_vector -; procedure multipanel_vector -; procedure seasonal_plot -; procedure xy_plot_wrapper -; procedure ts_line_wrapper -; -; ############################################################################# - -load "$diag_scripts/../interface_scripts/auxiliary.ncl" -load "$diag_scripts/../interface_scripts/logging.ncl" - -; ############################################################################# - -begin - loadscript(exact_panel_position_file) -end - -; ############################################################################# - -load "$diag_scripts/aux/SAMonsoon/SAMonsoon.ncl" -load "$diag_scripts/aux/SAMonsoon/SAMonsoon_panel_positions.ncl" -load "$diag_scripts/aux/SAMonsoon/exact_panel_positions_generic.ncl" - -load "$diag_scripts/shared/plot/style.ncl" -load "$diag_scripts/shared/plot/xy_line.ncl" - -; ############################################################################# - -undef("get_title_suffix") -function get_title_suffix(plot_settings[1]:logical) -local main_title_suffix -begin - - if (isatt(plot_settings, "main_title_suffix")) then - main_title_suffix = plot_settings@main_title_suffix - else - main_title_suffix = "" - end if - return(main_title_suffix) - -end - -; ############################################################################# - -undef("remove_attrs") -procedure remove_attrs(p_settings[1]:logical, - attributes[*]:string) -local idx, settings -begin - do idx = 0, dimsizes(attributes) - 1 - if (isatt(p_settings, attributes(idx))) then - delete(p_settings@$attributes(idx)$) - end if - end do -end - -; ############################################################################# - -undef("plot_two_by_one") -procedure plot_two_by_one(wks[1]:graphic, - res[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - valid_statistics[*]:string, - storage_name1[1]:string, - storage_name2[1]:string, - debuginfo[1]:logical, - storage_vault[1]:logical, - idx_mod[1]:integer) -begin - res = panel_three_by_one(res, 0) ; Update resource for first plot - res@cnLevelSelectionMode = "ExplicitLevels" - if (isatt(res, "cnLevels")) then - delete(res@cnLevels) - end if - - dummy_array = (/1e+20/) - dummy_array@_FillValue = 1e+20 - - remove_attrs(res, (/"cnLevels"/)) - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name1 + "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - remove_attrs(plot_settings, (/"idx"/)) - plot_settings@idx = plot_settings@idx_ref - - ; ------------------------------------- - ; Plot model entry (first plot) - ; ------------------------------------- - main_title_prefix = "" ; input_file_info@dataset(idx_mod) - main_title_suffix = "" - res = panel_two_by_one(res, 0) ; Update resource for second plot - - storage_record = \ - str_join((/storage_name1, sprinti("%i", idx_mod)/), str_vault_sep) - upper = retrieve_from_vault(storage_vault, storage_record) - plot_settings@type = "mean" - - remove_attrs(plot_settings, (/"idx", "gsn_right_string"/)) - plot_settings@idx = idx_mod - plot_settings@gsn_right_string = upper@gsn_right_string - - statistics = True - statistics = \ - compute_stat((/"yrs", "mean", "bob", "eio", "mean-corr"/), \ - valid_statistics, upper, dummy_array) - - plot_settings@skip_default_title = False - plot1 = single_contour(wks, - upper, - main_title_prefix, - main_title_suffix, - plot_settings, - debuginfo, - statistics, - res) - delete(statistics) - - ; --------------------------------------- - ; Plot model entry (second plot) - ; --------------------------------------- - main_title_prefix = "" - main_title_suffix = "" - res = panel_two_by_one(res, 1) ; Update resource for third plot - delete(res@cnLevels) - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name2 + "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - storage_record = \ - str_join((/storage_name2, sprinti("%i", idx_mod)/), str_vault_sep) - lower = retrieve_from_vault(storage_vault, storage_record) - plot_settings@type = "mean" - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = idx_mod - - statistics = True - statistics = \ - compute_stat((/"yrs", "mean", "bob", "eio", "mean-corr"/), \ - valid_statistics, lower, dummy_array) - plot_settings@gsn_right_string = lower@gsn_right_string - plot_settings@skip_default_title = True - plot2 = single_contour(wks,\ - lower,\ - main_title_prefix,\ - main_title_suffix,\ - plot_settings,\ - debuginfo,\ - statistics,\ - res) - delete(statistics) - - txres = True - if (any(idx_mod .eq. plot_settings@idx_ref)) then - inset_top_text(wks, plot1, "Reference", txres) - inset_top_text(wks, plot2, "Reference", txres) - end if - lbtitle = plot_settings@lb_units - two_by_one_labelbar(wks, plot2, lbtitle) - - if (debuginfo) then - txres@txFuncCode = "~" - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot1) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot2) - drawNDCGrid(wks) - end if - draw(plot1) - draw(plot2) - if (debuginfo) then - place_description(wks, debuginfo@description, debuginfo@description_ycoord) - end if - -end - -; ############################################################################# - -undef("plot_three_by_one_diff") -procedure plot_three_by_one_diff(wks[1]:graphic, - res[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - valid_statistics[*]:string, - storage_name[1]:string, - debuginfo[1]:logical, - storage_vault[1]:logical, - ref[*][*]:numeric, - idx_mod[1]:integer) -begin - - res = panel_three_by_one(res, 0) ; Update resource for first plot - res@cnLevelSelectionMode = "ExplicitLevels" - if (isatt(res, "cnLevels")) then - delete(res@cnLevels) - end if - - dummy_array = (/1e+20/) - dummy_array@_FillValue = 1e+20 - - ; ------------------------------------- - ; Plot reference plot (top plot) - ; ------------------------------------- - if (isatt(res, "cnLevels")) then - delete(res@cnLevels) - end if - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name + "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - main_title_prefix = "(1) " + di@season + "-" + storage_name + " of " - main_title_suffix = get_title_suffix(plot_settings) - plot_settings@type = "ref" - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = plot_settings@idx_ref - - statistics = True - statistics = \ - compute_stat((/"yrs", "mean", "bob", "eio"/), \ - valid_statistics, ref, dummy_array) - plot_ref = single_contour(wks, \ - ref, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - txres = True - inset_top_text(wks, plot_ref, "Reference", txres) - - three_by_one_labelbar(wks, plot_ref, plot_settings) - - ; ------------------------------------- - ; Plot model entry (middle plot) - ; ------------------------------------- - main_title_prefix = "(2) " + di@season + "-" + storage_name + " of " - res = panel_three_by_one(res, 1) ; Update resource for second plot - - storage_record = \ - str_join((/storage_name, sprinti("%i", idx_mod)/), str_vault_sep) - curr = retrieve_from_vault(storage_vault, storage_record) - plot_settings@type = "mean" - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = idx_mod - - statistics = True - statistics = \ - compute_stat((/"yrs", "mean", "bob", "eio", "mean-corr"/), \ - valid_statistics, curr, dummy_array) - statistics = compute_stat((/"corr"/), valid_statistics, curr, ref) - plot = single_contour(wks, \ - curr, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - - ; --------------------------------------- - ; Plot reference model diff (bottom plot) - ; --------------------------------------- - main_title = "(2) - (1)" - main_title_suffix = "" - res = panel_three_by_one(res, 2) ; Update resource for third plot - delete(res@cnLevels) - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name + "_diff_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - res@cnFillPalette = "BlWhRe" - - ; Plot mean of differences of first and second data set, first - ; interpolate to the reference data set grid (conservative interpolation) - diff_model_ref = get_dataset_minus_ref(curr, ref) - plot_settings@type = "diff" - plot_settings@skip_default_title = True - statistics = True - statistics = compute_stat((/"mean"/), valid_statistics, curr, dummy_array) - statistics = compute_stat((/"rmse"/), valid_statistics, curr, ref) - plot_diff = single_contour(wks, \ - diff_model_ref, \ - main_title, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - delete(diff_model_ref) - plot_settings@skip_default_title = False - - ; remove the light_centered colour palette for climatological plots (OA) - delete(res@cnFillPalette) - if (debuginfo) then - txres@txFuncCode = "~" - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot_ref) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot_diff) - drawNDCGrid(wks) - end if - draw(plot_ref) - draw(plot_diff) - draw(plot) - if (debuginfo) then - place_description(wks, debuginfo@description, debuginfo@description_ycoord) - end if - -end - -; ############################################################################# - -undef("two_by_one") -procedure two_by_one(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name1[1]:string, - storage_name2[1]:string, - debuginfo[1]:logical, - valid_statistics[*]:string, - res[1]:logical) -; Description: -; For each 2D-model field stored in the 'storage_vault', plot two separate -; figures. The various logicals carry switches for the plot routine behavior. -local aux_title_info, cn_levels_string, curr, diag_description, \ - diag_script_base, dim_MOD, dim_VAR, idx_mod, dummy_array, field_type0, \ - lbtitle, main_title, main_title_prefix, output_dir, output_filename, \ - output_file_path, plot, plot_diff, plot_ref, ref, res, storage_record, \ - textres, var0, wks, diff_model_ref, txres -begin - - dim_MOD = dimsizes(input_file_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - - ; Handle the case when using two variables - if (dimsizes(variables) .gt. 1) then - var1 = variables(1) - field_type1 = field_types(1) - else - var1 = "" - field_type1 = "" - end if - - ; 'output_file_type' if fetched from ncl.interface - if (.not. isdefined("output_file_type")) then - output_file_type = "ps" - end if - - ; Output dir - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(plot_settings@diag_script) - output_dir = config_user_info@plot_dir - - ; -------------------------------- - ; Static resources for these plots - ; -------------------------------- - res@cnFillOn = True - res@cnLinesOn = False - res@cnMissingValFillColor = "Background" - res@cnLineLabelsOn = False - res@gsnFrame = False - res@gsnDraw = False - res@gsnAddCyclic = False - do idx_mod = 0, dim_MOD - 1 - - ; ---------------------------------------- - ; Define output workstation - ; ---------------------------------------- - aux_title_info = di@season + "-" + storage_name1 + "-" + storage_name2 - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - plot_two_by_one(wks, \ - res, \ - di, \ - plot_settings, \ - valid_statistics, \ - storage_name1, \ - storage_name2, \ - False, \ - storage_vault, \ - idx_mod) - frame(wks) ; Workstation output - delete(wks) - - ; Debug version - if (debuginfo) then - aux_title_info = \ - di@season + "-" + storage_name1 + "-" + storage_name2 + "-debug" - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks_debug = gsn_open_wks(output_file_type, output_file_path) - plot_two_by_one(wks_debug, \ - res, \ - di, \ - plot_settings, \ - valid_statistics, \ - storage_name1, \ - storage_name2, \ - debuginfo, \ - storage_vault, \ - idx_mod) - frame(wks_debug) ; Workstation output - delete(wks_debug) - end if - - end do ; idx_mod = 0, dim_MOD - 1 - -end - -; ############################################################################# - -undef("three_by_one_diff") -procedure three_by_one_diff(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name[1]:string, - debuginfo[1]:logical, - valid_statistics[*]:string, - res[1]:logical) -; Description: -; For each 2D-model field stored in the 'storage_vault', plot a separate -; figure with a reference plot at the top, the model in the middle, and a -; diff at the bottom. The various logicals carry switches for the plot -; routine behavior. -local aux_title_info, cn_levels_string, curr, diag_description, \ - diag_script_base, dim_MOD, dim_VAR, idx_mod, dummy_array, field_type0, \ - lbtitle, main_title, main_title_prefix, output_dir, output_filename, \ - output_file_path, plot, plot_diff, plot_ref, ref, res, storage_record, \ - textres, var0, wks, diff_model_ref, txres -begin - - dim_MOD = dimsizes(input_file_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - - ; Handle the case when using two variables - if (dimsizes(variables) .gt. 1) then - var1 = variables(1) - field_type1 = field_types(1) - else - var1 = "" - field_type1 = "" - end if - - ; 'output_file_type' if fetched from ncl.interface - if (.not. isdefined("output_file_type")) then - output_file_type = "ps" - end if - - ; Output dir - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(plot_settings@diag_script) - output_dir = config_user_info@plot_dir - - ; -------------------------------- - ; Static resources for these plots - ; -------------------------------- - res@cnFillOn = True - res@cnLinesOn = False - res@cnMissingValFillColor = "Background" - res@cnLineLabelsOn = False - res@gsnFrame = False - res@gsnDraw = False - res@gsnAddCyclic = False - - storage_record = \ - str_join((/storage_name, sprinti("%i", plot_settings@idx_ref(0))/), \ - str_vault_sep) - ref = retrieve_from_vault(storage_vault, storage_record) - - do idx_mod = 0, dim_MOD - 1 - if (any(idx_mod .eq. plot_settings@idx_ref)) then - continue - end if - - ; ---------------------------------------- - ; Define output workstation - ; ---------------------------------------- - aux_title_info = di@season + "-" + storage_name - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - plot_three_by_one_diff(wks, \ - res, \ - di, \ - plot_settings, \ - valid_statistics, \ - storage_name, \ - False, \ - storage_vault, \ - ref, \ - idx_mod) - frame(wks) ; Workstation output - delete(wks) - - ; Debug version - if (debuginfo) then - aux_title_info = di@season + "-" + storage_name + "-debug" - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks_debug = gsn_open_wks(output_file_type, output_file_path) - plot_three_by_one_diff(wks_debug, \ - res, \ - di, \ - plot_settings, \ - valid_statistics, \ - storage_name, \ - debuginfo, \ - storage_vault, \ - ref, \ - idx_mod) - frame(wks_debug) ; Workstation output - delete(wks_debug) - end if - - end do ; idx_mod = 0, dim_MOD - 1 - -end - -; ############################################################################# - -undef("plot_three_by_one_vector") -procedure plot_three_by_one_vector(wks[1]:graphic, - res[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - valid_statistics[*]:string, - storage_name[1]:string, - debuginfo[1]:logical, - storage_vault[1]:logical, - ua_ref[*][*]:numeric, - va_ref[*][*]:numeric, - speed_ref[*][*]:numeric, - idx_mod[1]:integer) -begin - res = panel_three_by_one(res, 0) ; Update resource for first plot - res@cnLevelSelectionMode = "ExplicitLevels" - if (isatt(res, "cnLevels")) then - delete(res@cnLevels) - end if - - ; ------------------------------ - ; Plot reference plot (top plot) - ; ------------------------------ - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name + "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - main_title_prefix = "(1) " + di@season + "-" + storage_name + " of " - main_title_suffix = "" - plot_settings@type = "ref" - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = plot_settings@idx_ref - - dummy_array = (/1e+20/) - dummy_array@_FillValue = 1e+20 - - statistics = True - statistics = \ - compute_stat((/"yrs", "mean"/), valid_statistics, speed_ref, dummy_array) - plot_ref = single_contour(wks, \ - speed_ref, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - txres = True - inset_top_text(wks, plot_ref, "Reference", txres) - - max_speed_ref = max(speed_ref) - if (storage_name .ne. "stddev") then - plot_ref_v = single_vector(wks, \ - max_speed_ref, \ - speed_ref, \ - ua_ref, \ - va_ref, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - res) - overlay(plot_ref, plot_ref_v) - end if - three_by_one_labelbar(wks, plot_ref, plot_settings@lb_units) - - ; ------------------------------------- - ; Plot model entry plot (middle plot) - ; ------------------------------------- - main_title_prefix = "(2) " + di@season + "-" + storage_name + " of " - res = panel_three_by_one(res, 1) ; Update resource for second plot - - storage_record = \ - str_join((/storage_name, "ua", sprinti("%i", idx_mod)/), str_vault_sep) - ua = retrieve_from_vault(storage_vault, storage_record) - - storage_record = \ - str_join((/storage_name, "va", sprinti("%i", idx_mod)/), str_vault_sep) - va = retrieve_from_vault(storage_vault, storage_record) - - storage_record = \ - str_join((/storage_name, sprinti("%i", idx_mod)/), str_vault_sep) - speed = retrieve_from_vault(storage_vault, storage_record) - plot_settings@type = "mean" - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = idx_mod - - ; Interpolate to reference grid for pattern correlation calculation - diff_model_ref = get_dataset_minus_ref(speed, speed_ref) - - statistics = True - statistics = \ - compute_stat((/"yrs", "mean"/), valid_statistics, speed, dummy_array) - statistics = compute_stat((/"corr"/), valid_statistics, speed, speed_ref) - plot = single_contour(wks, \ - speed, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - if (storage_name .ne. "stddev") then - plot_v = single_vector(wks, \ - max_speed_ref, \ - speed, \ - ua, \ - va, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - res) - overlay(plot, plot_v) - end if - - ; -------------------------------------------- - ; Plot model reference diff plot (bottom plot) - ; -------------------------------------------- - ; Plot mean of differnces of first and second data set, first - ; interpolate to the reference data set grid (bilinear interpolation) - - diff_model_ref = get_dataset_minus_ref(speed, speed_ref) - diff_model_ua_ref = get_dataset_minus_ref(ua, ua_ref) - diff_model_va_ref = get_dataset_minus_ref(va, va_ref) - - main_title = "(1) - (2)" - res = panel_three_by_one(res, 2) ; Update resource for third plot - delete(res@cnLevels) - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name + "_diff_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - plot_settings@type = "diff" - plot_settings@skip_default_title = True - - statistics = True - statistics = \ - compute_stat((/"mean"/), valid_statistics, diff_model_ref, dummy_array) - statistics = \ - compute_stat((/"rmse", "corr"/), valid_statistics, speed, speed_ref) - plot_diff = single_contour(wks, \ - diff_model_ref, \ - main_title, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - if (storage_name .ne. "stddev") then - plot_diff_v = single_vector(wks, \ - max_speed_ref, \ - diff_model_ref, \ - diff_model_ua_ref, \ - diff_model_va_ref, \ - main_title, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - res) - overlay(plot_diff, plot_diff_v) - end if - plot_settings@skip_default_title = False - - if (debuginfo) then - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot_ref) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot_diff) - drawNDCGrid(wks) - end if - draw(plot_ref) - draw(plot_diff) - draw(plot) - if (debuginfo) then - place_description(wks, debuginfo@description, debuginfo@description_ycoord) - end if -end - -; ############################################################################# - -undef("three_by_one_vector") -procedure three_by_one_vector(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name[1]:string, - debuginfo[1]:logical, - valid_statistics[*]:string, - res[1]:logical) -; Description: -; For each set of three 2D-model fields (vector_x, vector_y and abs) stored -; in the 'storage_vault', plot a separate figure with a reference plot at -; the top (abs + vector f, the model in the middle, and a diff at the -; bottom. The various logicals carry switches for the plot routine behavior. -local aux_title_info, cn_levels_string, curr, diag_description, \ - diag_script_base, dim_MOD, dim_VAR, idx_mod, field_type0, lbtitle, \ - main_title, main_title_prefix, output_dir, output_filename, \ - output_file_path, plot, plot_diff, plot_ref, ref, res, storage_record, \ - textres, var0, wks, wks_debug, txres -begin - dim_MOD = dimsizes(input_file_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - var1 = variables(1) - field_type0 = field_types(0) - field_type1 = field_types(1) - - ; 'output_file_type' if fetched from ncl.interface - if (.not. isdefined("output_file_type")) then - output_file_type = "ps" - end if - - ; Output dir - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(plot_settings@diag_script) - output_dir = config_user_info@plot_dir - - ; -------------------------------- - ; Static resources for these plots - ; -------------------------------- - res@mpFillOn = False - res@cnFillOn = True - res@cnLinesOn = False - res@cnMissingValFillColor = "Background" - res@cnLineLabelsOn = False - res@gsnFrame = False - res@gsnDraw = False - res@gsnAddCyclic = False - - txres = True - txres@txFuncCode = "~" - - ; Fetch reference plot - storage_record = \ - str_join((/storage_name, "ua", \ - sprinti("%i", plot_settings@idx_ref(0))/), str_vault_sep) - ua_ref = retrieve_from_vault(storage_vault, storage_record) - - storage_record = \ - str_join((/storage_name, "va", \ - sprinti("%i", plot_settings@idx_ref(0))/), str_vault_sep) - va_ref = retrieve_from_vault(storage_vault, storage_record) - - storage_record = \ - str_join((/storage_name, \ - sprinti("%i", plot_settings@idx_ref(0))/), str_vault_sep) - speed_ref = retrieve_from_vault(storage_vault, storage_record) - - do idx_mod = 0, dim_MOD - 1 - if (idx_mod .eq. plot_settings@idx_ref) then - continue - end if - - ; ------------------------- - ; Define output workstation - ; ------------------------- - aux_title_info = di@season + "-" + storage_name - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - plot_three_by_one_vector(wks, \ - res, \ - di, \ - plot_settings, \ - valid_statistics, \ - storage_name, \ - False, \ - storage_vault, \ - ua_ref, \ - va_ref, \ - speed_ref, \ - idx_mod) - frame(wks) - delete(wks) - - ; Debug version - if (debuginfo) then - aux_title_info = di@season + "-" + storage_name + "-debug" - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks_debug = gsn_open_wks(output_file_type, output_file_path) - plot_three_by_one_vector(wks_debug, \ - res, \ - di, \ - plot_settings, \ - valid_statistics, \ - storage_name, \ - debuginfo, \ - storage_vault, \ - ua_ref, \ - va_ref, \ - speed_ref, \ - idx_mod) - frame(wks_debug) - delete(wks_debug) - end if - end do ; idx_mod = start_idx, dim_MOD - 1 - -end - -; ############################################################################# - -undef("plot_multipanel") -procedure plot_multipanel(cols[*]:integer, - rows[*]:float, - curr_idx[1]:integer, - curr_page[1]:integer, - res[1]:logical, - storage_name[1]:string, - storage_vault[1]:logical, - wks[1]:graphic, - di[1]:logical, - plot_settings[1]:logical, - valid_statistics[*]:string, - debuginfo[1]:logical, - figures_per_page[*]:integer, - model_panel_placement[*]:integer, - figure_panel_placement[*]:integer, - plot_array[*]:graphic, - type_specifier[1]:string, - no_figures_on_this_page[1]:integer) -begin - - ; Update position, labelbar and title - curr_figure_pos = figure_panel_placement(curr_idx) - res = \ - panel_n_by_cols(res, curr_figure_pos, rows, cols, \ - figures_per_page(curr_page)) - - if (isatt(res, "cnLevels")) then - delete(res@cnLevels) - end if - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name + "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - main_title_prefix = "" - main_title_suffix = "" - - ; Fetch reference plot - storage_record = \ - str_join((/storage_name, sprinti("%i", plot_settings@idx_ref(0))/), \ - str_vault_sep) - ref = retrieve_from_vault(storage_vault, storage_record) - - ; Reference plot - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = plot_settings@idx_ref - - dummy_array = (/1e+20/) - dummy_array@_FillValue = 1e+20 - - statistics = True - statistics = \ - compute_stat((/"yrs", "mean", "bob", "eio", "mean-corr"/), \ - valid_statistics, ref, dummy_array) - plot_ref = single_contour(wks, \ - ref, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - txres = True - txres@txFuncCode = "~" - if (debuginfo) then - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot_ref) - end if - - idx_fig = figure_panel_placement(curr_idx) - plot_array(idx_fig) = plot_ref - - ; Skip past the reference plot - curr_idx = curr_idx + 1 - - lbres = True - - if (plot_settings@type .eq. "diff") then - inset_top_text(wks, plot_ref, "REF", txres) - inset_labelbar(wks, plot_ref, res, "REF", lbres) - main_title_suffix = " - REF" - else - inset_top_text(wks, plot_ref, "Reference", txres) - main_title_suffix = "" - end if - - delete(res@cnLevels) - cn_levels_string = str_sub_str("cn_levels_" + storage_name + \ - type_specifier + "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - ; ----------------------------- - ; Create the non-reference plots - ; ----------------------------- - do curr_fig = 1, figures_per_page(curr_page) - 1 - - main_title_prefix = "" - idx_mod = model_panel_placement(curr_idx) - idx_fig = figure_panel_placement(curr_idx) - - ; Skip reference models - if any((idx_mod .eq. plot_settings@idx_ref)) then - continue - end if - - ; Update placement and labelbar colors - res = panel_n_by_cols(res, figure_panel_placement(curr_idx), rows, \ - cols, figures_per_page(curr_page)) - - storage_record = \ - str_join((/storage_name, sprinti("%i", idx_mod)/), str_vault_sep) - curr = retrieve_from_vault(storage_vault, storage_record) - - statistics = True - statistics = compute_stat((/"yrs"/), valid_statistics, curr, dummy_array) - if (plot_settings@type .eq. "diff") then - statistics = compute_stat((/"rmse"/), valid_statistics, curr, ref) - diff_model_ref = get_dataset_minus_ref(curr, ref) - delete(curr) - curr = diff_model_ref - delete(diff_model_ref) - else - statistics = compute_stat((/"corr"/), valid_statistics, curr, ref) - end if - statistics = \ - compute_stat((/"mean", "bob", "eio", "mean-corr"/), \ - valid_statistics, curr, dummy_array) - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = idx_mod - plot = single_contour(wks, \ - curr, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - - if (debuginfo) then - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot) - end if - - plot_array(idx_fig) = plot - - ; Update index to point to next field - curr_idx = curr_idx + 1 - delete(curr) - delete(statistics) - - end do ; curr_fig=1, figures_per_page(curr_page) - 1 - - plottype_lbres = False - - ; --------------------------------------------------------- - ; Create an blank plot for shared labelbar placement (mean) - ; --------------------------------------------------------- - blank_plot = add_blank_plot_title(wks, \ - di@season + "-" + plot_settings@type + \ - plot_settings@part_of_header, \ - rows, \ - cols) - ; Create shared labelbar - n_by_cols_labelbar(wks, \ - blank_plot, \ - plot_array(no_figures_on_this_page - 1), \ - rows, \ - cols, \ - plot_settings@lb_units, \ - plottype_lbres) - - ; -------------------- - ; Draw mean value plot - ; -------------------- - if (debuginfo) then - drawNDCGrid(wks) - end if - draw(plot_array) - draw(blank_plot) - if (debuginfo) then - place_description(wks, debuginfo@description, debuginfo@description_ycoord) - end if -end - -; ############################################################################# - -undef("multipanel") -procedure multipanel(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name[1]:string, - debuginfo[1]:logical, - valid_statistics[*]:string, - res[1]:logical) -; Description: -; Place the 2D-model fields stored in the 'storage_vault' on a set of paneled -; figures. First entry is always the reference data set. The various logicals -; carry switches for the plot routine behavior. -local aux_title_info, blank_plot, cn_levels_string, cols, curr, curr_fig, \ - curr_figure_pos, curr_idx, curr_idx_debug, curr_page, diag_script_base, \ - dim_MOD, dim_VAR, dummy_array, field_type0, figure_panel_placement, \ - figures_per_page, idx_fig, idx_mod, lbres, main_title_prefix, \ - model_panel_placement, no_figures_on_this_page, output_dir, \ - output_filename, output_file_path, page_no, plot, plot_array, \ - plot_mean_diff, plot_ref, plot_stddev, plot_stddev_diff, plottype_lbres, \ - ref, res, rows, storage_record, total_no_of_pages, txres, type_specifier, \ - var0, wks -begin - ; No of models may differ from dimsizes(dim_MOD) if variables - ; from different models are combined (e.g., 'ts' from HadISST - ; and 'pr' from TRMM) - ref_no = 0 - stored_ref_name = "model" - dim_MOD = no_unique_vault_entries(storage_vault, stored_ref_name, ref_no) - - dim_VAR = dimsizes(variables) - - var0 = variables(0) - field_type0 = field_types(0) - if (dimsizes(variables) .gt. 1) then - var1 = variables(1) - field_type1 = field_types(1) - else - var1 = "" - field_type1 = "" - end if - - ; 'output_file_type' if fetched from ncl.interface - if (.not. isdefined("output_file_type")) then - output_file_type = "ps" - end if - - ; Output dir - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(plot_settings@diag_script) - output_dir = config_user_info@plot_dir - - ; -------------------------------- - ; Static resources for these plots - ; -------------------------------- - res@cnFillOn = True - res@cnLinesOn = False - res@cnLevelSelectionMode = "ExplicitLevels" - res@cnMissingValFillColor = "Background" - res@cnLineLabelsOn = False - res@gsnFrame = False - res@gsnDraw = False - res@lbLabelBarOn = False - res@gsnAddCyclic = False - - ; -------------------------------------- - ; Compute the layout of paneled figures - ; -------------------------------------- - figures_per_page = \ - get_figures_per_page(dim_MOD, max_figures_pp, min_figures_pp) - - ; Which model goes where across all pages - model_panel_placement = new((/sum(figures_per_page)/), integer) - - ; Which model goes where on each page? - figure_panel_placement = new((/sum(figures_per_page)/), integer) - place_models_on_pages(input_file_info, \ - plot_settings@idx_ref, \ - figures_per_page, \ - model_panel_placement, \ - figure_panel_placement) - - ; Output dir - ; 'plot_dir' is fetched from ncl.interface - diag_script_base = basename(diag_script) - output_dir = config_user_info@plot_dir - - if (plot_settings@type .eq. "diff") then - type_specifier = "_diff" - else - type_specifier = "" - end if - - ; --------------------------- - ; Loop over all output pages - ; --------------------------- - curr_idx = 0 - curr_idx_debug = 0 - total_no_of_pages = dimsizes(figures_per_page) - - do curr_page = 0, total_no_of_pages - 1 - - ; -------------------------- - ; Plot arrays for gsn_panels - ; -------------------------- - plot_array = new((/max_figures_pp/), graphic) - - no_figures_on_this_page = figures_per_page(curr_page) - - ; Create a string to add to the figure output filename for mulitple pages - if (total_no_of_pages .gt. 1) then - page_no = "-page" + sprinti("%i", curr_page) - else - page_no = "" - end if - - ; ----------------------------------- - ; Define output workstation for plots - ; ----------------------------------- - idx_mod = -1 ; No specific model defined - aux_title_info = di@season + "-" + storage_name + type_specifier + page_no - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - - cols = multipanel_get_no_cols(no_figures_on_this_page, max_cols) - rows = multipanel_get_no_rows(no_figures_on_this_page, max_cols) - - plot_multipanel(cols, \ - rows, \ - curr_idx, \ - curr_page, \ - res, \ - storage_name, \ - storage_vault, \ - wks, \ - di, \ - plot_settings, \ - valid_statistics, \ - False, \ ; No debuginfo - figures_per_page, \ - model_panel_placement, \ - figure_panel_placement, \ - plot_array, \ - type_specifier, \ - no_figures_on_this_page) - frame(wks) - if (debuginfo) then - aux_title_info = di@season + "-" + storage_name + type_specifier + \ - page_no + "-debug" - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks_debug = gsn_open_wks(output_file_type, output_file_path) - - plot_multipanel(cols, \ - rows, \ - curr_idx_debug, \ - curr_page, \ - res, \ - storage_name, \ - storage_vault, \ - wks_debug, \ - di, \ - plot_settings, \ - valid_statistics, \ - debuginfo, \ - figures_per_page, \ - model_panel_placement, \ - figure_panel_placement, \ - plot_array, \ - type_specifier, \ - no_figures_on_this_page) - frame(wks_debug) - end if - end do ; curr_fig = 1, figures_per_page(curr_page) - 1 - -end - -; ############################################################################# - -undef("plot_multipanel_vector") -procedure plot_multipanel_vector(cols[*]:integer, - rows[*]:float, - curr_idx[1]:integer, - curr_page[1]:integer, - res[1]:logical, - storage_name[1]:string, - storage_vault[1]:logical, - wks[1]:graphic, - di[1]:logical, - plot_settings[1]:logical, - valid_statistics[*]:string, - debuginfo[1]:logical, - figures_per_page[*]:integer, - model_panel_placement[*]:integer, - figure_panel_placement[*]:integer, - plot_array[*]:graphic, - type_specifier[1]:string, - no_figures_on_this_page[1]:integer) -begin - - ; Update position, labelbar and title - curr_figure_pos = figure_panel_placement(curr_idx) - res = panel_n_by_cols(res, curr_figure_pos, rows, cols, \ - figures_per_page(curr_page)) - - if (isatt(res, "cnLevels")) then - delete(res@cnLevels) - end if - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name + "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - main_title_prefix = "" - main_title_suffix = "" - - ; Fetch reference plot - storage_record = \ - str_join((/"mean", "ua", \ - sprinti("%i", plot_settings@idx_ref(0))/), str_vault_sep) - ua_ref = retrieve_from_vault(storage_vault, storage_record) - - storage_record = \ - str_join((/"mean", "va", \ - sprinti("%i", plot_settings@idx_ref(0))/), str_vault_sep) - va_ref = retrieve_from_vault(storage_vault, storage_record) - - storage_record = \ - str_join((/storage_name, \ - sprinti("%i", plot_settings@idx_ref(0))/), str_vault_sep) - speed_ref = retrieve_from_vault(storage_vault, storage_record) - max_speed_ref = max(speed_ref) - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = plot_settings@idx_ref - - dummy_array = (/1e+20/) - dummy_array@_FillValue = 1e+20 - - statistics = True - statistics = \ - compute_stat((/"yrs", "mean"/), valid_statistics, speed_ref, dummy_array) - plot_ref = single_contour(wks, \ - speed_ref, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - if (storage_name .ne. "stddev") then - plot_ref_v = single_vector(wks, \ - max_speed_ref, \ - speed_ref, \ - ua_ref, \ - va_ref, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - res) - overlay(plot_ref, plot_ref_v) - end if - - txres = True - txres@txFuncCode = "~" - if (debuginfo) then - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot_ref) - end if - - delete(res@cnLevels) - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name + type_specifier + \ - "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - idx_fig = figure_panel_placement(curr_idx) - plot_array(idx_fig) = plot_ref - - ; Skip past the reference plot - curr_idx = curr_idx + 1 - - lbres = True - txres = True - txres@txFuncCode = "~" - - if (plot_settings@type .eq. "diff") then - inset_top_text(wks, plot_ref, "REF", txres) - inset_labelbar(wks, plot_ref, res, "REF", lbres) - main_title_suffix = " - REF" - else - inset_top_text(wks, plot_ref, "Reference", txres) - main_title_suffix = "" - end if - - ; ------------------------------ - ; Create the non-reference plots - ; ------------------------------ - do curr_fig = 1, figures_per_page(curr_page) - 1 - - main_title_prefix = "" - idx_mod = model_panel_placement(curr_idx) - idx_fig = figure_panel_placement(curr_idx) - - ; Update placement and labelbar colors - res = panel_n_by_cols(res, figure_panel_placement(curr_idx), rows, cols, \ - figures_per_page(curr_page)) - - storage_record = \ - str_join((/"mean", "ua", sprinti("%i", idx_mod)/), str_vault_sep) - ua = retrieve_from_vault(storage_vault, storage_record) - - storage_record = \ - str_join((/"mean", "va", sprinti("%i", idx_mod)/), str_vault_sep) - va = retrieve_from_vault(storage_vault, storage_record) - - storage_record = \ - str_join((/storage_name, sprinti("%i", idx_mod)/), str_vault_sep) - speed = retrieve_from_vault(storage_vault, storage_record) - - statistics = True - statistics = compute_stat((/"yrs"/), valid_statistics, speed, dummy_array) - if (plot_settings@type .eq. "diff") then - - ; Plot mean of differences of first and second data set, first - ; interpolate to the reference data set grid (bilinear interpolation) - statistics = compute_stat((/"rmse"/), valid_statistics, speed, speed_ref) - - ; ua/va/speed field interpolation - diff_model_ref = get_dataset_minus_ref(speed, speed_ref) - diff_model_ua_ref = get_dataset_minus_ref(ua, ua_ref) - diff_model_va_ref = get_dataset_minus_ref(va, va_ref) - - delete(speed) - speed = diff_model_ref - - delete(ua) - ua = diff_model_ua_ref - - delete(va) - va = diff_model_va_ref - - delete(diff_model_ref) - delete(diff_model_ua_ref) - delete(diff_model_va_ref) - end if - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = idx_mod - - statistics = compute_stat((/"mean"/), valid_statistics, speed, dummy_array) - statistics = compute_stat((/"corr"/), valid_statistics, speed, speed_ref) - - plot = single_contour(wks, \ - speed, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - if (storage_name .ne. "stddev") then - plot_v = single_vector(wks, \ - max_speed_ref, \ - speed, \ - ua, \ - va, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - res) - overlay(plot, plot_v) - end if - - if (debuginfo) then - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot) - end if - - plot_array(idx_fig) = plot - - ; Update index to point to next field - curr_idx = curr_idx + 1 - delete(ua) - delete(va) - delete(speed) - - end do ; curr_fig=1, figures_per_page(curr_page) - 1 - - plottype_lbres = False - - ; --------------------------------------------------------- - ; Create an blank plot for shared labelbar placement (mean) - ; --------------------------------------------------------- - blank_plot = \ - add_blank_plot_title(wks, di@season + "-" + plot_settings@type + \ - plot_settings@part_of_header, rows, cols) - - ; Create shared labelbar - n_by_cols_labelbar(wks, \ - blank_plot, \ - plot_array(no_figures_on_this_page - 1), \ - rows, \ - cols, \ - plot_settings@lb_units, \ - plottype_lbres) - - ; -------------------- - ; Draw mean value plot - ; -------------------- - if (debuginfo) then - drawNDCGrid(wks) - end if - draw(plot_array) - draw(blank_plot) - if (debuginfo) then - place_description(wks, debuginfo@description, debuginfo@description_ycoord) - end if -end - -; ############################################################################# - -undef("multipanel_vector") -procedure multipanel_vector(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name[1]:string, - debuginfo[1]:logical, - valid_statistics[*]:string, - res[1]:logical) -; Description: -; Place the sets of three 2D-model fields (vector_x, vector_y and abs) -; stored in the 'storage_vault' on a set of paneled figures. -; First entry is always the reference data set. The various logicals -; carry switches for the plot routine behavior. -local aux_title_info, blank_plot, cn_levels_string, cols, curr_fig, \ - curr_figure_pos, curr_idx, curr_page, diag_script_base, dim_MOD, dim_VAR, \ - dummy_array, field_type0, field_type1, figure_panel_placement, \ - figures_per_page, idx_fig, idx_mod, lbres, main_title_prefix, \ - model_panel_placement, no_figures_on_this_page, output_dir, \ - output_filename, output_file_path, page_no, plot, plot_array, plot_ref, \ - plottype_lbres, res, rows, speed, speed_ref, storage_record, \ - total_no_of_pages, txres, type_specifier, ua, ua_ref, va, var0, var1, \ - va_ref, wks, plot_ref_v -begin - dim_MOD = dimsizes(input_file_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - if (dimsizes(variables) .gt. 1) then - var1 = variables(1) - field_type1 = field_types(1) - else - var1 = "" - field_type1 = "" - end if - - ; 'output_file_type' if fetched from ncl.interface - if (.not. isdefined("output_file_type")) then - output_file_type = "ps" - end if - - ; Output dir - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(plot_settings@diag_script) - output_dir = config_user_info@plot_dir - - ; -------------------------------- - ; Static resources for these plots - ; -------------------------------- - res@mpFillOn = False - res@cnFillOn = True - res@cnLinesOn = False - res@cnLevelSelectionMode = "ExplicitLevels" - res@cnMissingValFillColor = "Background" - res@cnLineLabelsOn = False - res@gsnFrame = False - res@gsnDraw = False - res@lbLabelBarOn = False - res@gsnAddCyclic = False - - ; -------------------------------------- - ; Compute the layout of paneled figures - ; -------------------------------------- - figures_per_page = get_figures_per_page(dim_MOD, \ - max_figures_pp, \ - min_figures_pp) - - ; Which model goes where across all pages - model_panel_placement = new((/sum(figures_per_page)/), integer) - - ; Which model goes where on each page? - figure_panel_placement = new((/sum(figures_per_page)/), integer) - place_models_on_pages(input_file_info, \ - plot_settings@idx_ref, \ - figures_per_page, \ - model_panel_placement, \ - figure_panel_placement) - - ; Output dir - ; 'plot_dir' is fetched from ncl.interface - diag_script_base = basename(diag_script) - output_dir = config_user_info@plot_dir - - if (plot_settings@type .eq. "diff") then - type_specifier = "_diff" - else - type_specifier = "" - end if - - ; --------------------------- - ; Loop over all output pages - ; --------------------------- - curr_idx = 0 - curr_idx_debug = 0 - total_no_of_pages = dimsizes(figures_per_page) - - do curr_page = 0, total_no_of_pages - 1 - - ; -------------------------- - ; Plot arrays for gsn_panels - ; -------------------------- - plot_array = new((/max_figures_pp/), graphic) - - no_figures_on_this_page = figures_per_page(curr_page) - - ; Create a string to add to the figure output - ; filename for mulitple pages - if (total_no_of_pages .gt. 1) then - page_no = "-page" + sprinti("%i", curr_page) - else - page_no = "" - end if - - ; ----------------------------------- - ; Define output workstation for plots - ; ----------------------------------- - idx_mod = -1 ; No specific model defined - aux_title_info = di@season + "-" + storage_name + type_specifier + page_no - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - - cols = multipanel_get_no_cols(no_figures_on_this_page, max_cols) - rows = multipanel_get_no_rows(no_figures_on_this_page, max_cols) - - plot_multipanel_vector(cols, \ - rows, \ - curr_idx, \ - curr_page, \ - res, \ - storage_name, \ - storage_vault, \ - wks, \ - di, \ - plot_settings, \ - valid_statistics, \ - False, \ ; No debuginfo - figures_per_page, \ - model_panel_placement, \ - figure_panel_placement, \ - plot_array, \ - type_specifier, \ - no_figures_on_this_page) - frame(wks) - - if (debuginfo) then - aux_title_info = \ - di@season + "-" + storage_name + type_specifier + page_no + "-debug" - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks_debug = gsn_open_wks(output_file_type, output_file_path) - - plot_multipanel_vector(cols, \ - rows, \ - curr_idx_debug, \ - curr_page, \ - res, \ - storage_name, \ - storage_vault, \ - wks_debug, \ - di, \ - plot_settings, \ - valid_statistics, \ - debuginfo, \ - figures_per_page, \ - model_panel_placement, \ - figure_panel_placement, \ - plot_array, \ - type_specifier, \ - no_figures_on_this_page) - frame(wks_debug) - end if - end do - -end - -; ############################################################################# - -undef("seasonal_plot") -procedure seasonal_plot(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name[1]:string, - debuginfo[1]:logical) -; Description: -; Produces a bar plot for seasonal climatology or interannual -; variability. The various logicals carry switches for the -; plot routine behavior. -local am_infobox_id, am_labelbar_id, amres, debugbox, diag_script_base, \ - dim_VAR, field_type0, idx_mod, labelbar, labels, lbres, output_dir, \ - output_filename, output_file_path, plot_array, precip, \ - precip_seasonal_maxY, precip_seasonal_minY, precip_seasonal_spreadY, res, \ - storage_record, txres, var0, wks, xaxis, var1, field_type1 -begin - dim_MOD = dimsizes(input_file_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - if (dim_VAR .gt. 1) then ; Assume two variables? - var1 = variables(1) - field_type1 = field_types(1) - else - var1 = "" - field_type1 = "" - end if - - storage_record = storage_name - precip = retrieve_from_vault(storage_vault, storage_record) - units = precip@units - - precip_seasonal_maxY = max(precip) - precip_seasonal_minY = min(precip) - precip_seasonal_spreadY = precip_seasonal_maxY - precip_seasonal_minY - - ; Plot arrays for gsn_plots - plot_array = new((/dim_MOD/), graphic) - - ; ------------------------------- - ; General resources for all plot - ; ------------------------------- - res = True - res@gsnDraw = False - res@gsnFrame = False - res@gsnXYBarChart = True - res@gsnXYBarChartBarWidth = 0.15 ; change bar widths - res@gsnYRefLine = 0. ; reference line - - ; Specific plot resource for plot iii) - if (plot_settings@type .eq. "iav") then - res@tmXBFormat = "4f" - xaxis = di@years - else - ; Set months on x-axis for plots i) and ii) - res@tmXBMode = "Explicit" ; Define your own tick mark labels. - res@tmXBMinorOn = False ; No minor tick marks. - res@tmXBValues = ispan(0, 11, 1) ; Location to put tick mark labels - res@tmXBLabels = (/"Jan", "Feb", "Mar", "Apr", "May", "Jun", \ - "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"/) - res@tmXBLabelFont = 25 ; Change font of labels. - res@tmXBLabelFontHeightF = 0.015 ; Change font height of labels. - - ; Add some margins - res@trXMinF = -1 - res@trXMaxF = 12 - - xaxis = fspan(0, 11, 12) - end if - - labels = new(dim_MOD, string) - - ; Output dir - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(diag_script) - output_dir = config_user_info@plot_dir - - ; ---------------------------------------------- - ; Define output workstation for seasonal anomaly - ; ---------------------------------------------- - idx_mod = -1 ; No specific model defined - aux_filename_info = plot_settings@aux_filename_info - output_filename = interface_get_figure_filename(diag_script_base, \ - var0 + var1, \ - field_type0 + field_type1, \ - aux_filename_info, \ - idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - - ; --------- - ; Plot loop - ; --------- - do idx_mod = 0, dim_MOD - 1 - labels(idx_mod) = input_file_info@dataset(idx_mod) - res@gsnXYBarChartColors = di@colors_seasonal(idx_mod) - - ; Shift plot to the right and draw/redraw - res@vpXF = 0.2 + idx_mod * 0.005 - res@trYMinF = precip_seasonal_minY - precip_seasonal_spreadY * 0.05 - res@trYMaxF = precip_seasonal_maxY + precip_seasonal_spreadY * 0.05 - - ; Set strings for titles and axis for plot i) and ii) - res@tiMainString = set_string_once(plot_settings@title_string, idx_mod) - res@tiYAxisString = set_string_once(plot_settings@yaxis_string, idx_mod) - res@gsnLeftString = \ - set_string_once(plot_settings@left_plot_subheader, idx_mod) - res@gsnRightString = set_string_once(units, idx_mod) - - if (isatt(plot_settings, "xaxis_string")) then - res@tiXAxisString = set_string_once(plot_settings@xaxis_string, idx_mod) - end if - - plot_array(idx_mod) = gsn_csm_xy(wks, xaxis, precip(idx_mod, :), res) - - ; Remove borders such that next plot only draws the actual "bars" - res = remove_figure_borders(res) - end do ; idx_mod = 0, dim_MOD - 1 - - ; Label bar resources - lbres = True ; labelbar only resources - lbres@vpWidthF = 0.30 ; labelbar width - lbres@vpHeightF = 0.024 * dim_MOD ; labelbar height - lbres@lbBoxMajorExtentF = 0.36 ; puts space between color boxes - lbres@lbFillColors = di@colors_seasonal - lbres@lbMonoFillPattern = True ; Solid fill pattern - lbres@lbLabelJust = "CenterLeft" ; left justify labels - lbres@lbPerimOn = True - lbres@lbPerimFill = 0 - lbres@lbPerimFillColor = "white" - lbres@lbPerimColor = "black" - labelbar = gsn_create_labelbar(wks, dim_MOD, labels, lbres) - - ; Place annotations - amres = True - amres@amZone = 3 - amres@amSide = "Right" - amres@amParallelPosF = 0.88 - amres@amOrthogonalPosF = -0.25 - am_labelbar_id = gsn_add_annotation(plot_array(dim_MOD - 1), labelbar, amres) - - ; If requested, add a text info box and/or debug info box to each plot - txres = True - txres@txFuncCode = "~" - - if (debuginfo) then - debuginfo@years = di@years_string - debugbox = write_info(debuginfo) - am_infobox_id = \ - place_debuginfo(wks, debugbox, txres, plot_array(dim_MOD - 1)) - drawNDCGrid(wks) - end if - - draw(wks) - if (debuginfo) then - place_description(wks, debuginfo@description, debuginfo@description_ycoord) - end if - frame(wks) -end - -; ############################################################################# - -undef("xy_plot_wrapper") -procedure xy_plot_wrapper(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name[1]:string, - debuginfo[1]:logical) -local annots, annots_mmm, avgstd, avgstd_mmm, colors, colors_mmm, dashes, \ - dashes_mmm, diag_script_base, dim_VAR, field_type0, field_type1, i, \ - idx_mod, imon, output_dir, output_filename, output_file_path, res, \ - storage_record, temp, thicks, thicks_mmm, values, values_stddev, \ - values_stddev, var0, var1, wks, wks_debug -begin - dim_MOD = dimsizes(input_file_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - if (dim_VAR .gt. 1) then ; Assume two variables? - var1 = variables(1) - field_type1 = field_types(1) - else - var1 = "" - field_type1 = "" - end if - - storage_record = storage_name - values = retrieve_from_vault(storage_vault, storage_record) - units = values@units - - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(diag_script) - output_dir = config_user_info@plot_dir - ; ---------------------------------------------- - ; Define output workstation for xy plot - ; ---------------------------------------------- - idx_mod = -1 ; No specific model defined - aux_filename_info = plot_settings@aux_filename_info - output_filename = interface_get_figure_filename(diag_script_base, \ - var0 + var1, \ - field_type0 + field_type1, \ - aux_filename_info, \ - idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - - aux_filename_info = plot_settings@aux_filename_info + "-debug" - output_filename = interface_get_figure_filename(diag_script_base, \ - var0 + var1, \ - field_type0 + field_type1, \ - aux_filename_info, \ - idx_mod) - output_file_path = output_dir + output_filename - if (debuginfo) then - wks_debug = gsn_open_wks(output_file_type, output_file_path) - end if - - values&month@units = "Multi-year monthly mean" - - ; Select colors and other plotting attributes - ; See ./diag_scripts/shared/plot/style.ncl - colors = project_style(diag_script_info, "colors") - dashes = project_style(diag_script_info, "dashes") - thicks = project_style(diag_script_info, "thicks") - annots = project_style(diag_script_info, "annots") - avgstd = project_style(diag_script_info, "avgstd") - ; Select colors and other plotting attributes for multi-model mean - if (diag_script_info@multi_model_mean .eq. "y") then - ; project_style evaluates metadata of variable "models" - temp = models - - ; -> keep original "models" in "temp" and restore later - copy_VarMeta(models, temp) - delete(models) - - ; Use "models" to pass on attribute names - models = getvaratts(temp) - do i = 0, dimsizes(models) - 1 - ; Define all original attributes again, but empty - input_file_info@$models(i)$ = "" - end do - input_file_info@dataset = "model_mean" - - ; See ./diag_scripts/shared/plot/style.ncl - colors_mmm = project_style(diag_script_info, "colors") - dashes_mmm = project_style(diag_script_info, "dashes") - thicks_mmm = project_style(diag_script_info, "thicks") - annots_mmm = project_style(diag_script_info, "annots") - avgstd_mmm = project_style(diag_script_info, "avgstd") - delete(models) - models = temp ; Restore original "models" - copy_VarMeta(temp, models) - delete(temp) - end if - - ; Calculate standard deviation of models - if (di@multi_model_mean .ne. "y") then - ; define anyway, because fields are needed as parameters for xy_line - values_stddev = 0 - else - ; Mean, stddev, -1 * stddev, +1 * stddev - values_stddev = new((/4, 12/), float) - ; See ./diag_scripts/shared/plot/style.ncl for which data shall be - ; included in the statistics - temp = ind(avgstd .eq. 0) - do imon = 0, 11 - values_stddev(0, imon) = dim_avg_n_Wrap(values(temp, imon), (/0/)) - values_stddev(1, imon) = dim_stddev_n_Wrap(values(temp, imon), (/0/)) - values_stddev(2, imon) = values_stddev(0, imon) - values_stddev(1, imon) - values_stddev(3, imon) = values_stddev(0, imon) + values_stddev(1, imon) - end do - delete(temp) - end if - - ; Min and max values on x-axis - res = True - res@tmXTOn = False - res@tmYROn = True - if (di@supporting_gridlines .eq. "y") then - res@tmXMajorGrid = True - res@gsnYRefLine = 0 - end if - res@trXMinF = min(values&month) - 0.05 * \ - (max(values&month) - min(values&month)) - res@trXMaxF = max(values&month) + 0.25 * \ - (max(values&month) - min(values&month)) - - res@tmXBMode = "Explicit" - res@tmXBValues = (/0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11/) - res@tmXBLabels = (/"J", "F", "M", "A", "M", "J", \ - "J", "A", "S", "O", "N", "D"/) - - res@tiMainString = plot_settings@title_string - res@tiYAxisString = plot_settings@yaxis_string - res@gsnRightString = values@units - res@gsnRightStringFontHeightF = 16 - res@gsnLeftString = plot_settings@left_plot_subheader - res@gsnLeftStringFontHeightF = 16 - - xy_line(wks, values, values&month, values_stddev, res, False) - - if (debuginfo) then - xy_line(wks_debug, values, values&month, values_stddev, res, debuginfo) - end if -end - -; ############################################################################# - -undef("ts_line_wrapper") -procedure ts_line_wrapper(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name[1]:string, - debuginfo[1]:logical) -local annots, avgstd, diag_script_base, dim_VAR, field_type0, field_type1, \ - idx_mod, imonth, output_dir, output_filename, output_file_path, res, \ - storage_record, temp, val_area_stddev, values, values_stddev, var0, var1, \ - wks, wks_debug -begin - dim_MOD = dimsizes(input_file_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - if (dim_VAR .gt. 1) then ; Assume two variables? - var1 = variables(1) - field_type1 = field_types(1) - else - var1 = "" - field_type1 = "" - end if - - storage_record = storage_name - values = retrieve_from_vault(storage_vault, storage_record) - units = values@units - - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(diag_script) - output_dir = config_user_info@plot_dir - ; ---------------------------------------------- - ; Define output workstation for xy plot - ; ---------------------------------------------- - idx_mod = -1 ; No specific model defined - aux_filename_info = plot_settings@aux_filename_info - output_filename = interface_get_figure_filename(diag_script_base, \ - var0 + var1, \ - field_type0 + field_type1, \ - aux_filename_info, \ - idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - - aux_filename_info = plot_settings@aux_filename_info + "-debug" - output_filename = interface_get_figure_filename(diag_script_base, \ - var0 + var1, \ - field_type0 + field_type1, \ - aux_filename_info, \ - idx_mod) - output_file_path = output_dir + output_filename - if (debuginfo) then - wks_debug = gsn_open_wks(output_file_type, output_file_path) - end if - - ; Determine time range - start_year = min(stringtoint(input_file_info@start_year)) - end_year = max(stringtoint(input_file_info@end_year)) - - ; Select attributes - annots = project_style(diag_script_info, "annots") - avgstd = project_style(diag_script_info, "avgstd") - - ; Prepare calculation of standard deviation of models - if (diag_script_info@multi_model_mean .eq. "y") then - values_stddev = new((/4, end_year - start_year + 1/), float) - val_area_stddev = new((/4, end_year - start_year + 1/), float) - else ; initialize anyway, because both are needed as parameters in xy_line - values_stddev = 0 - val_area_stddev = 0 - end if - - ; Calculate standard deviation of models - if (diag_script_info@multi_model_mean .eq. "y") then - ; See style_$project$.ncl for which data shall - ; be included in the statistics - temp = ind(avgstd .eq. 0) - do imonth = 0, dimsizes(values&years) - 1 - ; For extent - values_stddev(0, imonth) = dim_avg_n_Wrap(values(temp, imonth), (/0/)) - values_stddev(1, imonth) = dim_stddev_n_Wrap(values(temp, imonth), (/0/)) - values_stddev(2, imonth) = \ - values_stddev(0, imonth) - values_stddev(1, imonth) - values_stddev(3, imonth) = \ - values_stddev(0, imonth) + values_stddev(1, imonth) - end do - delete(temp) - end if - - res = True - res@tmXTOn = False - res@tmYROn = True - if (di@supporting_gridlines .eq. "y") then - res@tmXMajorGrid = True - res@gsnYRefLine = 0 - end if - res@tmXBFormat = "4f" - res@tiMainString = plot_settings@title_string - res@tiYAxisString = plot_settings@yaxis_string - res@tiXAxisString = plot_settings@xaxis_string - res@gsnRightString = values@units - res@gsnRightStringFontHeightF = 16 - res@gsnLeftString = plot_settings@left_plot_subheader - res@gsnLeftStringFontHeightF = 16 - res@trXMinF = min(di@years) - 0.05 * (max(di@years) - min(di@years)) - res@trXMaxF = max(di@years) + 0.25 * (max(di@years) - min(di@years)) - - xy_line(wks, values, values&years, values_stddev, res, False) - - if (debuginfo) then - xy_line(wks_debug, values, di@years, values_stddev, res, debuginfo) - end if - -end diff --git a/esmvaltool/diag_scripts/shared/plot/__init__.py b/esmvaltool/diag_scripts/shared/plot/__init__.py index c6ab23768b..6e4021f538 100644 --- a/esmvaltool/diag_scripts/shared/plot/__init__.py +++ b/esmvaltool/diag_scripts/shared/plot/__init__.py @@ -1,16 +1,20 @@ """Module that provides common plot functions.""" from ._plot import ( - get_path_to_mpl_style, get_dataset_style, - quickplot, + get_path_to_mpl_style, + global_contourf, + global_pcolormesh, multi_dataset_scatterplot, + quickplot, scatterplot, ) __all__ = [ 'get_path_to_mpl_style', 'get_dataset_style', + 'global_contourf', + 'global_pcolormesh', 'quickplot', 'multi_dataset_scatterplot', 'scatterplot', diff --git a/esmvaltool/diag_scripts/shared/plot/_plot.py b/esmvaltool/diag_scripts/shared/plot/_plot.py index 0fb8366579..092479a999 100644 --- a/esmvaltool/diag_scripts/shared/plot/_plot.py +++ b/esmvaltool/diag_scripts/shared/plot/_plot.py @@ -1,9 +1,14 @@ """Common plot functions.""" import logging import os +from copy import deepcopy +import cartopy.crs as ccrs +import dask.array as da import iris.quickplot +import matplotlib.colors as colors import matplotlib.pyplot as plt +import numpy as np import yaml logger = logging.getLogger(__name__) @@ -74,6 +79,7 @@ def get_dataset_style(dataset, style_file=None): """Retrieve the style information for the given dataset.""" if style_file is None: style_file = 'cmip5.yml' + logger.debug("Using default style file {style_file}") if not style_file.endswith('.yml'): style_file += '.yml' base_dir = os.path.dirname(os.path.realpath(__file__)) @@ -85,21 +91,20 @@ def get_dataset_style(dataset, style_file=None): with open(filepath, 'r') as infile: style = yaml.safe_load(infile) else: - raise IOError("Invalid input: could not open style file " - "'{}'".format(filepath)) + raise FileNotFoundError(f"Cannot open style file {filepath}") logger.debug("Using style file %s for dataset %s", filepath, dataset) # Check if file has entry for unknown dataset default_dataset = 'default' options = ['color', 'dash', 'thick', 'mark', 'avgstd', 'facecolor'] if default_dataset not in style: - raise IOError("Style file '{}' does not contain default information " - "for unknown datasets".format(filepath)) + raise ValueError(f"Style file {filepath} does not contain section " + f"[{default_dataset}] (used for unknown datasets)") for option in options: if option not in style[default_dataset]: - raise IOError("Style file '{}' does not contain '{}' default " - "information for unknown " - "datasets".format(filepath, option)) + raise ValueError( + f"Style file {filepath} does not contain default information " + f"for '{option}' (under section [{default_dataset}])") # Check if dataset is available if not style.get(dataset): @@ -111,24 +116,234 @@ def get_dataset_style(dataset, style_file=None): # Get compulsory information for option in options: if option not in style[dataset]: + default_option = style[default_dataset][option] logger.warning( - "No style information '%s' found for dataset " - "'%s', using default value for unknown datasets", option, - dataset) - style[dataset].update({option: style[default_dataset][option]}) + "No style information '%s' found for dataset '%s', using " + "default value '%s' for unknown datasets", option, dataset, + default_option) + style[dataset][option] = default_option return style[dataset] -def quickplot(cube, filename, plot_type, **kwargs): +def _check_cube(cube): + """Check if cube is 2D and contains latitude and longitude.""" + if cube.ndim != 2: + raise ValueError( + f"Expected 2D cube, got {cube.ndim:d}D cube: " + f"{cube.summary(shorten=True)}") + required_coords = ['latitude', 'longitude'] + for coord_name in required_coords: + if not cube.coords(coord_name, dim_coords=True): + raise iris.exceptions.CoordinateNotFoundError( + f"Cube {cube.summary(shorten=True)} does not contain " + f"necessary dimensional coordinate '{coord_name}' for " + f"plotting global map plot") + + +def _truncate_colormap(cmap_name, minval=0.0, maxval=1.0, n_colors=100): + """Truncate colormaps.""" + cmap = plt.get_cmap(cmap_name) + new_cmap = colors.LinearSegmentedColormap.from_list( + f'trunc({cmap_name},{minval:.2f},{maxval:.2f})', + cmap(np.linspace(minval, maxval, n_colors))) + return new_cmap + + +def _get_centered_cmap(cmap_name, vmin, vmax, center, n_colors=100): + """Get centered colormap.""" + if not vmin < center < vmax: + raise ValueError( + f"Expected monotonic increase vmin < center < vmax, got vmin = " + f"{vmin}, vmax = {vmax}, center = {center}") + if center - vmin > vmax - center: + minval = 0.0 + maxval = 0.5 + (vmax - center) / (center - vmin) / 2.0 + else: + minval = 0.5 - (center - vmin) / (vmax - center) / 2.0 + maxval = 1.0 + return _truncate_colormap(cmap_name, minval, maxval, n_colors) + + +def global_contourf(cube, + cbar_center=None, + cbar_label=None, + cbar_range=None, + cbar_ticks=None, + **kwargs): + """Plot global filled contour plot. + + Note + ---- + This is only possible if the cube is 2D with dimensional coordinates + `latitude` and `longitude`. + + Parameters + ---------- + cube : iris.cube.Cube + Cube to plot. + cbar_center : float, optional + Central value for the colormap, useful for diverging colormaps. Can + only be used if ``cbar_range`` is given. + cbar_label : str, optional + Label for the colorbar. + cbar_range : list of float, optional + Range of the colorbar (first and second list element) and number of + distinct colors (third element). See :mod:`numpy.linspace`. + cbar_ticks : list, optional + Ticks for the colorbar. + **kwargs + Keyword argument for :func:`iris.plot.contourf()`. + + Returns + ------- + matplotlib.contour.QuadContourSet + Plot object. + + Raises + ------ + iris.exceptions.CoordinateNotFoundError + Input :class:`iris.cube.Cube` does not contain the necessary + dimensional coordinates ``'latitude'`` and ``'longitude'``. + ValueError + Input :class:`iris.cube.Cube` is not 2D. + + """ + kwargs = deepcopy(kwargs) + logger.debug("Plotting global filled contour plot for cube %s", + cube.summary(shorten=True)) + _check_cube(cube) + + # Adapt colormap if necessary + if cbar_center is not None: + if cbar_range is None: + raise ValueError( + "'cbar_center' can only be used if 'cbar_range' is given") + cmap = kwargs.get('cmap', plt.get_cmap()) + n_colors = cbar_range[2] if len(cbar_range) > 2 else 100 + cmap = _get_centered_cmap(cmap, cbar_range[0], cbar_range[1], + cbar_center, n_colors) + kwargs['cmap'] = cmap + + # Create plot + if cbar_range is not None: + levels = np.linspace(*cbar_range) + kwargs['levels'] = levels + axes = plt.axes(projection=ccrs.Robinson(central_longitude=10)) + plt.sca(axes) + + # see https://github.com/SciTools/cartopy/issues/2457 + # and https://github.com/SciTools/cartopy/issues/2468 + kwargs['transform_first'] = True + npx = da if cube.has_lazy_data() else np + map_plot = iris.plot.contourf( + cube.copy(npx.ma.filled(cube.core_data(), np.nan)), + **kwargs, + ) + + # Appearance + axes.gridlines(color='lightgrey', alpha=0.5) + axes.coastlines() + axes.set_global() + colorbar = plt.colorbar(orientation='horizontal', aspect=30) + if cbar_ticks is not None: + colorbar.set_ticks(cbar_ticks) + colorbar.set_ticklabels([str(tick) for tick in cbar_ticks]) + elif cbar_range is not None: + ticks = np.linspace(*cbar_range[:2], + 10, + endpoint=False, + dtype=type(cbar_range[0])) + colorbar.set_ticks(ticks) + colorbar.set_ticklabels([str(tick) for tick in ticks]) + if cbar_label is not None: + colorbar.set_label(cbar_label) + return map_plot + + +def global_pcolormesh(cube, + cbar_center=None, + cbar_label=None, + cbar_ticks=None, + **kwargs): + """Plot global color mesh. + + Note + ---- + This is only possible if the cube is 2D with dimensional coordinates + `latitude` and `longitude`. + + Parameters + ---------- + cube : iris.cube.Cube + Cube to plot. + cbar_center : float, optional + Central value for the colormap, useful for diverging colormaps. Can + only be used if ``vmin`` and ``vmax`` are given. + cbar_label : str, optional + Label for the colorbar. + cbar_ticks : list, optional + Ticks for the colorbar. + **kwargs + Keyword argument for :func:`iris.plot.pcolormesh()`. + + Returns + ------- + matplotlib.contour.QuadContourSet + Plot object. + + Raises + ------ + iris.exceptions.CoordinateNotFoundError + Input :class:`iris.cube.Cube` does not contain the necessary + dimensional coordinates ``'latitude'`` and ``'longitude'``. + ValueError + Input :class:`iris.cube.Cube` is not 2D. + + """ + kwargs = deepcopy(kwargs) + logger.debug("Plotting global filled contour plot for cube %s", + cube.summary(shorten=True)) + _check_cube(cube) + + # Adapt colormap if necessary + if cbar_center is not None: + if not ('vmin' in kwargs and 'vmax' in kwargs): + raise ValueError( + "'cbar_center' can only be used if 'vmin' and 'vmax' are " + "given") + cmap = kwargs.get('cmap', plt.get_cmap()) + cmap = _get_centered_cmap(cmap, kwargs['vmin'], kwargs['vmax'], + cbar_center) + kwargs['cmap'] = cmap + + # Create plot + axes = plt.axes(projection=ccrs.Robinson(central_longitude=10)) + plt.sca(axes) + map_plot = iris.plot.pcolormesh(cube, **kwargs) + + # Appearance + axes.gridlines(color='lightgrey', alpha=0.5) + axes.coastlines() + axes.set_global() + colorbar = plt.colorbar(orientation='horizontal', aspect=30) + if cbar_ticks is not None: + colorbar.set_ticks(cbar_ticks) + colorbar.set_ticklabels([str(tick) for tick in cbar_ticks]) + if cbar_label is not None: + colorbar.set_label(cbar_label) + return map_plot + + +def quickplot(cube, plot_type, filename=None, **kwargs): """Plot a cube using one of the iris.quickplot functions.""" logger.debug("Creating '%s' plot %s", plot_type, filename) plot_function = getattr(iris.quickplot, plot_type) fig = plt.figure() plot_function(cube, **kwargs) - # plt.gca().coastlines() - fig.savefig(filename) - plt.close(fig) + if filename: + fig.savefig(filename) + return fig def multi_dataset_scatterplot(x_data, y_data, datasets, filepath, **kwargs): @@ -171,7 +386,6 @@ def multi_dataset_scatterplot(x_data, y_data, datasets, filepath, **kwargs): ValueError `x_data`, `y_data`, `datasets` or `plot_kwargs` do not have the same size. - """ # Allowed kwargs allowed_kwargs = [ @@ -203,20 +417,21 @@ def multi_dataset_scatterplot(x_data, y_data, datasets, filepath, **kwargs): style['facecolor'] # Plot - axes.plot( - x_data[idx], - y_data[idx], - markeredgecolor=style['color'], - markerfacecolor=facecolor, - marker=style['mark'], - **(kwargs.get('plot_kwargs', empty_dict)[idx])) - - # Costumize plot + axes.plot(x_data[idx], + y_data[idx], + markeredgecolor=style['color'], + markerfacecolor=facecolor, + marker=style['mark'], + **(kwargs.get('plot_kwargs', empty_dict)[idx])) + + # Customize plot legend = _process_axes_functions(axes, kwargs.get('axes_functions')) # Save plot - fig.savefig( - filepath, additional_artists=[legend], **kwargs.get('save_kwargs', {})) + savefig_kwargs = dict(kwargs.get('save_kwargs', {})) + if legend is not None: + savefig_kwargs['bbox_extra_artists'] = [legend] + fig.savefig(filepath, **savefig_kwargs) logger.info("Wrote %s", filepath) plt.close() @@ -256,7 +471,6 @@ def scatterplot(x_data, y_data, filepath, **kwargs): given) `plot_kwargs` is not array-like. ValueError `x_data`, `y_data` or `plot_kwargs` do not have the same size. - """ # Allowed kwargs allowed_kwargs = [ @@ -270,8 +484,8 @@ def scatterplot(x_data, y_data, filepath, **kwargs): raise TypeError("{} is not a valid keyword argument".format(kwarg)) # Check parameters - _check_size_of_parameters(x_data, y_data, kwargs.get( - 'plot_kwargs', x_data)) + _check_size_of_parameters(x_data, y_data, + kwargs.get('plot_kwargs', x_data)) empty_dict = [{} for _ in x_data] # Create matplotlib instances @@ -290,11 +504,12 @@ def scatterplot(x_data, y_data, filepath, **kwargs): axes.plot(x_vals, y_data[idx], **(kwargs.get('plot_kwargs', empty_dict)[idx])) - # Costumize plot + # Customize plot legend = _process_axes_functions(axes, kwargs.get('axes_functions')) # Save plot - fig.savefig( - filepath, additional_artists=[legend], **kwargs.get('save_kwargs', {})) + fig.savefig(filepath, + bbox_extra_artists=[legend], + **kwargs.get('save_kwargs', {})) logger.info("Wrote %s", filepath) plt.close() diff --git a/esmvaltool/diag_scripts/shared/plot/aux_plotting.ncl b/esmvaltool/diag_scripts/shared/plot/aux_plotting.ncl index dd665ccd95..d4b2358c49 100644 --- a/esmvaltool/diag_scripts/shared/plot/aux_plotting.ncl +++ b/esmvaltool/diag_scripts/shared/plot/aux_plotting.ncl @@ -6,8 +6,8 @@ ; ; Contents: ; -; procedure create_legend_lines ; function output_type +; procedure create_legend_lines ; procedure copy_VarAtt_sel ; function panelling ; function get_plot_dir @@ -18,12 +18,60 @@ ; procedure add_errorbar ; procedure horizontal_whiskers ; procedure add_prediction_error +; function month_sel +; function lat_names +; procedure add_line +; procedure add_scatt +; procedure add_legend +; function calcRegCoeffs +; function genZonalMeans +; function calcMeanAnnCycleMonthly +; function calcMeanAnnCycleAnnual +; procedure rmMeanAnnCycle +; function apfiltersmooth +; procedure smoothAnomalies +; function clmMon2clmDayn ; ; ############################################################################# load "$diag_scripts/../interface_scripts/auxiliary.ncl" load "$diag_scripts/../interface_scripts/logging.ncl" +; ############################################################################# +undef("output_type") +function output_type() +; +; Arguments +; +; Return value +; A string with the output file type +; +; Description +; Provides a default, if file type is not explicitly specified +; +; Caveats +; +; Modification history +; 20131028-gottschaldt_klaus-dirk: written. +; +local funcname, scriptname, file_type +begin + + funcname = "output_type" + scriptname = "diag_scripts/shared/plot/aux_plotting.ncl" + enter_msg(scriptname, funcname) + + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + file_type = str_lower(file_type) + + leave_msg(scriptname, funcname) + return(file_type) + +end + ; ############################################################################# undef("create_legend_lines") procedure create_legend_lines(labels:string, \ @@ -47,12 +95,12 @@ procedure create_legend_lines(labels:string, \ ; ; Caveats ; -; Modification history: -; 20150511-A_laue_ax: added safe lower limits for panelling plot -; 20150508-A_righ_ma: added lines/markers option. -; 20150120-A_gott_kl: remove pre-existing file type suffix -; 20140305-A_righ_ma: modified to plot always as epsi format. -; 20140219-A_fran_fr: written. +; Modification history +; 20150511-lauer_axel: added safe lower limits for panelling plot +; 20150508-righi_mattia: added lines/markers option. +; 20150120-gottschaldt_klaus-dirk: remove pre-existing file type suffix +; 20140305-righi_mattia: modified to plot always as epsi format. +; 20140219-winterstein_franziska: written. ; local funcname, scriptname, region, temp, outfile, n_suff, wks_legend begin @@ -61,15 +109,9 @@ begin scriptname = "diag_scripts/shared/plot/aux_plotting.ncl" enter_msg(scriptname, funcname) - ; Flag if RGB or RGBA is used for colors - if (dimsizes(dimsizes(styles@colors)).ne.1) then - RGB = True - else - RGB = False - end if - ; Open workstation - wks_legend = gsn_open_wks("epsi", outfile) + file_type = output_type() + wks_legend = gsn_open_wks(file_type, outfile) ; General resources res = True @@ -126,13 +168,8 @@ begin ii = 0 do while (ii.lt.dim_LAB) ; Set color - if (RGB) then - resL@gsLineColor = styles@colors(ii, :) - resL@gsMarkerColor = styles@colors(ii, :) - else - resL@gsLineColor = styles@colors(ii) - resL@gsMarkerColor = styles@colors(ii) - end if + resL@gsLineColor = styles@colors(ii) + resL@gsMarkerColor = styles@colors(ii) ; Set dash pattern if (isatt(styles, "dashes")) then resL@gsLineDashPattern = styles@dashes(ii) @@ -177,41 +214,6 @@ begin end -; ############################################################################# -undef("output_type") -function output_type() -; -; Arguments -; -; Return value -; A string with the output file type -; -; Description -; Provides a default, if file type is not explicitly specified -; -; Caveats -; -; Modification history -; 20131028-A_gott_kl: written. -; -local funcname, scriptname, file_type -begin - - funcname = "output_type" - scriptname = "diag_scripts/shared/plot/aux_plotting.ncl" - enter_msg(scriptname, funcname) - - file_type = config_user_info@output_file_type - if (ismissing(file_type)) then - file_type = "ps" - end if - file_type = str_lower(file_type) - - leave_msg(scriptname, funcname) - return(file_type) - -end - ; ############################################################################# undef("copy_VarAtt_sel") procedure copy_VarAtt_sel(var1, var2, sel: string) @@ -223,9 +225,6 @@ procedure copy_VarAtt_sel(var1, var2, sel: string) ; sel: string (or list of strings) that specify the BEGINNING letters ; of the attributes to copy ; -; Return value -; var2 gets additional attributes -; ; Description ; Intended to copy selected plot ressources for use in a different ; plot routine that may not allow all ressources from var1. @@ -233,8 +232,8 @@ procedure copy_VarAtt_sel(var1, var2, sel: string) ; ; Caveats: ; -; Modification history: -; 20141227-A_gott_kl written. +; Modification history +; 20141227-gottschaldt_klaus-dirk written. ; local funcname, scriptname, atts, i1, i2, n begin @@ -294,7 +293,7 @@ function panelling(wks, ; Might not be fit to be used with non-NCL routines. ; ; Modification history -; 20131112-A_gott_kl: written. +; 20131112-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, plots, info, wks, outfile begin @@ -339,12 +338,12 @@ begin end if ia = 0 ie = nvert * nhori - 1 - ie = min((/dimsizes(plots) - 1, ie/)) ; a_laue_ax: added for cloud diag + ie = min((/dimsizes(plots) - 1, ie/)) do ipage = 0, npages - 1 gsn_panel(wks, plots(ia:ie), (/nvert, nhori/), pres) ia = ie + 1 ie = ia + (nvert * nhori) - 1 - ie = min((/dimsizes(plots) - 1, ie/)) ; a_laue_ax + ie = min((/dimsizes(plots) - 1, ie/)) end do else pres@gsnPaperOrientation = "auto" @@ -376,7 +375,7 @@ function get_plot_dir() ; Caveats ; ; Modification history -; 20131104-A_gott_kl: written. +; 20131104-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, plot_dir begin @@ -400,7 +399,7 @@ end ; ############################################################################# undef("get_outfile_name") -function get_outfile_name(add_specs[1] : string) +function get_outfile_name(add_specs[1]:string) ; ; Arguments ; add_specs: string containing specific elements to be added to the file @@ -415,8 +414,8 @@ function get_outfile_name(add_specs[1] : string) ; Fetches string with additional elements specified within diag script ; ; Modification history -; 20131204-A_senf_da: generalized naming. -; 20131104-A_gott_kl: written. +; 20131204-senftleben_daniel: generalized naming. +; 20131104-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, outfile, file_type, plot_dir, diag_script_base, \ add_specs, output_dir @@ -465,7 +464,7 @@ function get_wks(wks_in, ; Caveats ; ; Modification history -; 20131113-A_gott_kl: written. +; 20131113-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, wks_in, diag_script, add_specs, file_type begin @@ -493,11 +492,11 @@ end ; ############################################################################# undef("add_markers") -procedure add_markers(wks[1] : graphic, - plot[1] : graphic, - res_in[1] : logical, - xpos_in : numeric, - ypos_in : numeric) +procedure add_markers(wks[1]:graphic, + plot[1]:graphic, + res_in[1]:logical, + xpos_in:numeric, + ypos_in:numeric) ; ; Arguments ; wks: valid workstation, e.g. created by get_wks @@ -514,8 +513,9 @@ procedure add_markers(wks[1] : graphic, ; ; Caveats: ; -; Modification history: -; 20140224-A_gott_kl: written for use with profile_plev.ncl in Emmons.ncl +; Modification history +; 20140224-gottschaldt_klaus-dirk: written for use with profile_plev.ncl +; in Emmons.ncl ; local funcname, scriptname, res, str, atts, iatt, color begin @@ -562,11 +562,11 @@ end ; ############################################################################# undef("add_num_markers") -procedure add_num_markers(wks[1] : graphic, - plot[1] : graphic, - res_in[1] : logical, - xpos_in : numeric, - ypos_in : numeric) +procedure add_num_markers(wks[1]:graphic, + plot[1]:graphic, + res_in[1]:logical, + xpos_in:numeric, + ypos_in:numeric) ; ; Arguments ; wks: valid workstation, e.g. created by get_wks @@ -585,8 +585,8 @@ procedure add_num_markers(wks[1] : graphic, ; ; Caveats: ; -; Modification history: -; 20150914-A_wenz_sa: written. +; Modification history +; 20150914-wenzel_sabrina: written. ; local funcname, scriptname, xpos, ypos, res, str, atts, iatt, color begin @@ -647,11 +647,11 @@ end ; ############################################################################# undef("add_errorbar") -procedure add_errorbar(wks[1] : graphic, - plot[1] : graphic, - res_in[1] : logical, - xpos_in : numeric, - ypos_in : numeric) +procedure add_errorbar(wks[1]:graphic, + plot[1]:graphic, + res_in[1]:logical, + xpos_in:numeric, + ypos_in:numeric) ; ; Arguments ; wks: valid workstation, e.g. created by get_wks @@ -670,8 +670,8 @@ procedure add_errorbar(wks[1] : graphic, ; ; Caveats: ; -; Modification history: -; 20150914-A_wenz_sa: written. +; Modification history +; 20150914-wenzel_sabrina: written. ; local funcname, scriptname, xpos, ypos, res, str, atts, iatt, color begin @@ -780,11 +780,11 @@ end ; ############################################################################# undef("horizontal_whiskers") -procedure horizontal_whiskers(wks[1] : graphic, - plot[1] : graphic, - res_in[1] : logical, - xmin_in : numeric, - xmax_in : numeric, +procedure horizontal_whiskers(wks[1]:graphic, + plot[1]:graphic, + res_in[1]:logical, + xmin_in:numeric, + xmax_in:numeric, ypos_in: numeric) ; ; Arguments @@ -812,7 +812,7 @@ procedure horizontal_whiskers(wks[1] : graphic, ; www.ncl.ucar.edu/Document/Graphics/Interfaces/gsn_add_polyline.shtml ; ; Modification history -; 20140224-A_gott_kl: written. +; 20140224-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, n, i, j, xmin, xmax, ypos, imiss, ivalid, result, \ x, y, res, str @@ -891,11 +891,11 @@ end ; ############################################################################# undef("add_prediction_error") -procedure add_prediction_error(wks[1] : graphic, - plot[1] : graphic, - res_in[1] : logical, - xpos_in : numeric, - ypos_in : numeric) +procedure add_prediction_error(wks[1]:graphic, + plot[1]:graphic, + res_in[1]:logical, + xpos_in:numeric, + ypos_in:numeric) ; ; Arguments ; wks: valid workstation, e.g. created by get_wks @@ -914,8 +914,8 @@ procedure add_prediction_error(wks[1] : graphic, ; ; Caveats: ; -; Modification history: -; 20150914-A_wenz_sa: written. +; Modification history +; 20150914-wenzel_sabrina: written. ; local funcname, scriptname, xpos, ypos, res, str, atts, iatt, color begin @@ -952,3 +952,945 @@ begin leave_msg(scriptname, funcname) end + +; ############################################################################# +undef("month_sel") +function month_sel(month_names:string) +; +; Arguments +; +; +; Return value +; +; +; Description +; +; +; Caveats +; +; Modification history +; 20190605-righi_mattia: ported from v1. +; +local month_names, month_n, i +begin + + if (.not.any(ismissing(ind(month_names.eq."ANN")))) then + month_n = new(12, "integer") + month_n = (/1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12/) + else + month_n = new(dimsizes(month_names), "integer") + do i = 0, dimsizes(month_names)-1 + if (.not.ismissing(ind(month_names(i).eq."JAN"))) then + month_n(i) = 1 + end if + if (.not.ismissing(ind(month_names(i).eq."FEB"))) then + month_n(i) = 2 + end if + if (.not.ismissing(ind(month_names(i).eq."MAR"))) then + month_n(i) = 3 + end if + if (.not.ismissing(ind(month_names(i).eq."APR"))) then + month_n(i) = 4 + end if + if (.not.ismissing(ind(month_names(i).eq."MAY"))) then + month_n(i) = 5 + end if + if (.not.ismissing(ind(month_names(i).eq."JUN"))) then + month_n(i) = 6 + end if + if (.not.ismissing(ind(month_names(i).eq."JUL"))) then + month_n(i) = 7 + end if + if (.not.ismissing(ind(month_names(i).eq."AUG"))) then + month_n(i) = 8 + end if + if (.not.ismissing(ind(month_names(i).eq."SEP"))) then + month_n(i) = 9 + end if + if (.not.ismissing(ind(month_names(i).eq."OCT"))) then + month_n(i) = 10 + end if + if (.not.ismissing(ind(month_names(i).eq."NOV"))) then + month_n(i) = 11 + end if + if (.not.ismissing(ind(month_names(i).eq."DEC"))) then + month_n(i) = 12 + end if + end do + end if + + return(month_n) + +end + +; ############################################################################# +undef("lat_names") +function lat_names(lat_val:numeric) +; +; Arguments +; +; +; Return value +; +; +; Description +; +; +; Caveats +; +; Modification history +; 20190605-righi_mattia: ported from v1. +; +local lat_name, lat_val, i +begin + + lat_name = new((/dimsizes(lat_val)/), "string") + do i = 0, dimsizes(lat_val) - 1 + if (lat_val(i).gt.0.) then + lat_name(i) = lat_val(i) + "N" + end if + if (lat_val(i).eq.0.) then + lat_name(i) = "Equator" + end if + if (lat_val(i).lt.0.) then + lat_name(i) = -lat_val(i) + "S" + end if + end do + + return (lat_name) + +end + +; ############################################################################# +undef("add_line") +procedure add_line(wks, + plot, + x, + yval, + line_color, + line_dash) +; +; Arguments +; +; +; Description +; +; +; Caveats +; +; Modification history +; 20190605-righi_mattia: ported from v1. +; +local plres, str +begin + + ; gsn_add_polyline crashes if the arrays x and/or yval contain only missing + if (all(ismissing(x)).or.all(ismissing(yval))) then + return + end if + + plres = True + plres@gsLineColor = line_color + plres@gsLineDashPattern = line_dash + plres@gsLineThicknessF = 2.5 + str = unique_string("polyline") + plot@$str$ = gsn_add_polyline(wks, plot, x, yval, plres) + +end + +; ############################################################################# +undef("add_scatt") +procedure add_scatt(wks, + plot, + x, + yval, + mark_color, + mark_ind) +; +; Arguments +; +; +; Description +; +; +; Caveats +; +; Modification history +; 20190605-righi_mattia: ported from v1. +; +local plres, str +begin + + plres = True + plres@gsMarkerColor = mark_color + plres@gsMarkerIndex = mark_ind + str = unique_string("polymarker") + plot@$str$ = gsn_add_polymarker(wks, plot, x, yval, plres) + +end + +; ############################################################################# +undef("add_legend") +procedure add_legend(wks, + model:string, + colors:string, + dashs:numeric, + scatters:string, + ticknesses:numeric, + place:string) +; +; Arguments +; +; +; Return value +; +; +; Description +; +; +; Caveats +; +; Modification history +; 20190605-righi_mattia: ported from v1. +; +local model, dim_mod, colors, dashs, edge_x, edge_y, edge_res, tx1res, \ + dim_lb, xleg, xl, yleg, yl, xtxt, ytxt, place, dim_lines, space_lines, \ + lin_sp, scatters, ticknesses +begin + + dim_mod = dimsizes(model) + dim_lines = floattoint(dim_mod / 3)+1 + if ((mod(dim_mod, 3)).eq.0) then + dim_lines = floattoint(dim_mod /3) + 1 + else + dim_lines = floattoint(dim_mod / 3) + 2 + end if + lin_sp = 0.025 + space_lines = (dim_lines + 1) * lin_sp + + if (place.eq."bottom") + top_line = 0.20 + end if + if (place.eq."middle") + top_line = 0.42 + end if + if (place.eq."top") + top_line = 0.99 + end if + if ((place.ne."bottom").and.(place.ne."middle").and.(place.ne."top")) then + top_line = tofloat(place) + end if + bot_line = top_line-space_lines + edge_x = (/ 0.12, 0.92, 0.92, 0.12, 0.12/) + edge_y = (/space_lines+bot_line, space_lines+bot_line, bot_line, \ + bot_line, space_lines+bot_line/) + edge_res = True + edge_res@gsLineColor = "white" + gsn_polyline_ndc(wks, edge_x, edge_y, edge_res) + + dim_lb = dimsizes(model) + + gs1res = True + tx1res = True + tx1res@txFontHeightF = lin_sp / 5 * 2 + + xleg = new((/3 * dim_lines, 4/), "float") + xl = new((/3, 4/), "float") + + do i = 0, 2 + xl(0, :) = fspan((edge_x(0) + 0.02), (edge_x(0) + 0.07), 4) + xl(1, :) = fspan(((edge_x(2) + edge_x(0)) / 2 - 0.09), \ + ((edge_x(2) + edge_x(0)) / 2 - 0.04), 4) + xl(2, :) = fspan((edge_x(2) - 0.20), (edge_x(2) - 0.15), 4) + end do + + nt = 0 + do j = 0, dim_lines - 1 + do i = 0, 2 + xleg(nt, :) = (/xl(i, :)/) + nt = nt + 1 + end do + end do + + yleg = new((/3 * dim_lines, 4/), "float") + yl = new(dim_lines, "float") + + do i = 0, dim_lines - 1 + yl(dim_lines - 1 - i) = edge_y(3) + \ + (((edge_y(0) - (edge_y(3))) / dim_lines) * (i + 1)) - (lin_sp) + end do + + nt = 0 + do j = 0, dim_lines - 1 + do i = 0, 2 + yleg(nt, :) = (/yl(j)/) + nt = nt + 1 + end do + end do + + xtxt = new((/dim_lines * 3/), "float") + ytxt = new((/dim_lines * 3/), "float") + + nt = 0 + do j = 0, dim_lines - 1 + do i = 0, 2 + xtxt(nt) = xl(i, 3) + lin_sp / 5. + nt = nt + 1 + end do + end do + nt = 0 + do i = 0, dim_lines - 1 + do j = 0, 2 + ytxt(nt) = (/yl(i)/) + nt = nt + 1 + end do + end do + + do i = 0, (dimsizes(model) - 1) + gs1res@gsLineColor = colors(i) + gs1res@gsLineDashPattern = dashs(i) + gs1res@gsLineThicknessF = ticknesses(i) + tx1res@txFontColor = colors(i) + tx1res@txJust = "CenterLeft" + + if (scatters(i).eq."Markers") + gs1res@gsMarkerColor = colors(i) + gs1res@gsMarkerIndex = dashs(i) + gs1res@gsMarkerSizeF = 0.01 + gs1res@gsMarkerThicknessF = 1.5 + gsn_polymarker_ndc(wks, xleg(i, 2), yleg(i, 2), gs1res) + else + gsn_polyline_ndc(wks, xleg(i, :), yleg(i, :), gs1res) + end if + gsn_text_ndc(wks, model(i), xtxt(i), ytxt(i), tx1res) + end do + +end + +; ############################################################################# +undef("calcRegCoeffs") +function calcRegCoeffs(y:numeric) +; +; Arguments +; +; +; Return value +; +; +; Description +; +; +; Caveats +; +; Modification history +; 20190605-righi_mattia: ported from v1. +; +local y, d_y, d_t, ma, x, funcx, pi, t, a +begin + d_y = dimsizes(y) + d_t = d_y(0) + + ma = 14 + x = fspan(1, d_t, d_t) + a = new((/ma/), typeof(y)) + + funcx = new((/ma, d_t/), "float") + pi = 4.0 * atan(1.0) + t = 2 * pi * (x - 0.5) / 12 + + funcx(0, :) = 1 + funcx(1, :) = sin(t) + funcx(2, :) = cos(t) + funcx(3, :) = sin(2 * t) + funcx(4, :) = cos(2 * t) + funcx(5, :) = sin(3 * t) + funcx(6, :) = cos(3 * t) + funcx(7, :) = sin(4 * t) + funcx(8, :) = cos(4 * t) + funcx(9, :) = x + funcx(10, :) = sin(t) * x + funcx(11, :) = cos(t) * x + funcx(12, :) = sin(2 * t) * x + funcx(13, :) = cos(2 * t) * x + + a(:) = reg_multlin(y(:), funcx, False) + a@_FillValue = 1e20 + delete_VarAtts(a, "constant") + + a@long_name = "Coefficients A and B" + a!0 = "number of coefficients" + + return(a) + +end + +; ########################################################################### +undef("genZonalMeans") +function genZonalMeans(x:numeric, + lat:numeric, + startLat:numeric, + endLat:numeric) +; +; Arguments +; +; +; Return value +; +; +; Description +; +; +; Caveats +; +; Modification history +; 20190605-righi_mattia: ported from v1. +; +local dimx, ntim, nlat, nzone, z, t, i, j, cnt, idx_st, idx_ed, xZonMean +begin + dimx = dimsizes(x) + ntim = dimx(0) + nlat = dimx(1) + nlon = dimx(2) + nzone = dimsizes(startLat) + + nmos = 12 + modCheck("genZonalMeans", ntim, nmos) + + xZonMean = new((/ntim, nzone/), typeof(x)) + + pi = 4. * atan(1.0) + rad = (pi / 180.) + + ; loop through lat_band + do t = 0, (ntim-1) + do z = 0, (nzone-1) + idx_st = 0 + do while (lat(idx_st).lt.startLat(z)) + idx_st = idx_st + 1 + end do + idx_ed = nlat - 1 + do while (lat(idx_ed).gt.endLat(z)) + idx_ed = idx_ed - 1 + end do + + zoneTotalX = 0.0 + ZoneTotalArea = 0.0 + + do i = idx_st, idx_ed + if (i.eq.idx_st) then + Lat1 = startLat(z) + else + Lat1 = (lat(i) + lat(i - 1))/2 + end if + if (i.eq.idx_ed) then + Lat2 = endLat(z) + else + Lat2 = (lat(i) + lat(i + 1))/2 + end if + lat_slice = x(t, i, :) + idx_lon = ind(.not.ismissing(lat_slice)) + if (.not.all(ismissing(idx_lon))) then + CellArea = abs(sin(Lat1 * rad) - sin(Lat2 * rad)) + zoneTotalX = zoneTotalX + sum(lat_slice) * CellArea + ZoneTotalArea = ZoneTotalArea + dimsizes(idx_lon) * CellArea + end if + delete(idx_lon) + end do + + if (ZoneTotalArea.ne.0) then + xZonMean(t, z) = zoneTotalX / ZoneTotalArea + end if + end do ; loop(z) + end do ; loop(t) + + ; Create an informational attribute: + xZonMean@info = "zonal mean: CCMOzoneModelData.ncl" + + return (xZonMean) + +end + +; ############################################################################# +undef("calcMeanAnnCycleMonthly") +function calcMeanAnnCycleMonthly(RegCoeffAs:numeric) +; +; Arguments +; +; +; Return value +; +; +; Description +; +; +; Caveats +; +; Modification history +; 20190605-righi_mattia: ported from v1. +; +local dimx, z, month, t, xMeanAnnCycleMon +begin + dimx = dimsizes(RegCoeffAs) + + pi = 4. * atan(1.0) + + month = fspan(1, 12, 12) + t = 2 * pi * (month - 0.5) / 12.0 + + xMeanAnnCycleMon = new((/12/), "float") + xMeanAnnCycleMon(:) = RegCoeffAs(0) + \ + RegCoeffAs(1) * sin(t) + RegCoeffAs(2) * cos(t) + \ + RegCoeffAs(3) * sin(2 * t) + RegCoeffAs(4) * cos(2 * t) + \ + RegCoeffAs(5) * sin(3 * t) + RegCoeffAs(6) * cos(3 * t) + \ + RegCoeffAs(7) * sin(4 * t) + RegCoeffAs(8) * cos(4 * t) + + ; Create an informational attribute: + xMeanAnnCycleMon@info = "mean annual cycle monthly: CCMOzoneModelData.ncl" + + return(xMeanAnnCycleMon) + +end + +; ############################################################################# +undef("calcMeanAnnCycleAnnual") +function calcMeanAnnCycleAnnual(RegCoeffAs:numeric) +; +; Arguments +; +; +; Return value +; +; +; Description +; +; +; Caveats +; +; Modification history +; 20190605-righi_mattia: ported from v1. +; +local dimx, day, t, xMeanAnnCycleDay +begin + + dimx = dimsizes(RegCoeffAs) + + pi = 4. * atan(1.0) + + day = fspan(1, 366, 366) + t = 2 * pi * (day - 0.5) / 366.0 + + xMeanAnnCycleDay = new((/366/), "float") + + xMeanAnnCycleDay(:) = RegCoeffAs(0) + \ + RegCoeffAs(1) * sin(t) + RegCoeffAs(2) * cos(t) + \ + RegCoeffAs(3) * sin(2 * t) + RegCoeffAs(4) * cos(2 * t) + \ + RegCoeffAs(5) * sin(3 * t) + RegCoeffAs(6) * cos(3 * t) + \ + RegCoeffAs(7) * sin(4 * t) + RegCoeffAs(8) * cos(4 * t) + + ; Create an informational attribute: + xMeanAnnCycleDay@info = "mean annual cycle daily: CCMOzoneModelData.ncl" + + return (xMeanAnnCycleDay) + +end + +; ############################################################################# +undef("rmMeanAnnCycle") +procedure rmMeanAnnCycle(x:numeric, + xMeanAnnCycle:numeric, + RegCoeffA1:numeric, + xMonAnom:numeric, + xAnnAnom:numeric) +; +; Arguments +; +; +; Return value +; +; +; Description +; +; +; Caveats +; +; Modification history +; 20190605-righi_mattia: ported from v1. +; +local dimx, ntim, t, i, idx_st, idx_ed, nyr, xdata +begin + dimx = dimsizes(x) + ntim = dimx(0) + + nmos = 12 + modCheck("rmMeanAnnCycle", ntim, nmos) ; error check + + nyr = ntim/nmos + + ; loop through lat_band + do t = 0, (ntim - 1) + mon = mod(t, 12) + xMonAnom(t) = (x(t) - xMeanAnnCycle(mon)) * 100.0 / RegCoeffA1 + end do ; loop(t) + + do t = 0, (nyr - 1) + idx_st = t * 12 + idx_ed = (t + 1) * 12 - 1 + xdata = xMonAnom(idx_st:idx_ed) + if(.not.any(ismissing(xdata))) then + xAnnAnom(t) = avg(xdata) + end if + end do ; loop(t) + + ; Create an informational attribute: + xMonAnom@info = "monthly anomalies: CCMOzoneModelData.ncl" + xAnnAnom@info = "annual anomalies: CCMOzoneModelData.ncl" + +end + +; ############################################################################# +undef("apfiltersmooth") +function apfiltersmooth(xMonAnom:numeric, + filter:numeric, + iterations:integer) +; +; Arguments +; +; +; Return value +; +; +; Description +; +; +; Caveats +; +; Modification history +; 20190605-righi_mattia: ported from v1. +; +local dimx, dimf, ntim, nwin, z, t, i, j, hfw, idx_ed, avgwidth, \ + totalwgt, total, num, minval, finished, data, hold, xSmthMonAnom +begin + dimx = dimsizes(xMonAnom) + ntim = dimx(0) + + nmos = 12 + modCheck("apfiltersmooth", ntim, nmos) ; error check + + dimf = dimsizes(filter) + nwin = dimf(0) + hfw = nwin / 2 + + data = new(nwin, typeof(xMonAnom)) + hold = new(ntim, typeof(xMonAnom)) + xSmthMonAnom = new((/ntim/), typeof(xMonAnom)) + + do i = 1, iterations + + ; Transfer the data to the storage array. + if (i.eq.1) then + hold(:) = xMonAnom(:, z) + else + hold(:) = xSmthMonAnom(:, z) + end if + + ; Fill the data array for the first time. + do j = 1, hfw + data(j - 1) = -999.99 + end do + do j = hfw, (nwin - 1) + data(j) = hold(j - hfw) + end do + idx_ed = nwin-hfw + + ; Process the data array. + do t = 0, (ntim - 1) + if (data(hfw).lt.-999.0) then + xSmthMonAnom(t, z) = -999.99 + else + avgwidth = 0 + totalwgt = filter(hfw) + total = filter(hfw) * data(hfw) + num = 1 + finished = False + do while (.not.finished) + avgwidth = avgwidth + 1 + if ((data(hfw - avgwidth).gt.-999.0).and. \ + (data(hfw + avgwidth).gt.-999.0)) then + totalwgt = totalwgt + filter(hfw - avgwidth) + \ + filter(hfw + avgwidth) + total = total + (filter(hfw - avgwidth) * data(hfw - avgwidth)) + \ + (filter(hfw + avgwidth) * data(hfw + avgwidth)) + num = num+2 + else + finished = True + end if + if (avgwidth.eq.hfw) then + finished = True + end if + end do + if (i.eq.iterations) then + minval = hfw + else + minval = 0 + end if + if (num.gt.minval) then + xSmthMonAnom(t) = total / totalwgt + else + xSmthMonAnom(t) = -999.99 + end if + end if + + do j = 1, (nwin - 1) + data(j - 1) = data(j) + end do + idx_ed = idx_ed + 1 + if (idx_ed.gt.ntim) then + data(nwin - 1) = -999.99 + else + data(nwin - 1) = hold(idx_ed - 1) + end if + end do ; loop(t) + end do ; loop(i) + + ; Create an informational attribute: + xSmthMonAnom@info = "smoothed monthly anomalies: CCMOzoneModelData.ncl" + xSmthMonAnom@_FillValue = -999.99 + + return (xSmthMonAnom) + +end + +; ############################################################################# +undef("smoothAnomalies") +procedure smoothAnomalies(xAnnAnom:numeric, + iterations:integer) +; +; Arguments +; +; +; Return value +; +; +; Description +; +; +; Caveats +; +; Modification history +; 20190605-righi_mattia: ported from v1. +; +local dimx, dimf, ntim, nset, nwin, s, t, i, j, hfw, idx_ed, avgwidth, \ + totalwgt, total, num, minval, finished, smthdata, hold, buffer, filter, \ + method +begin + + filter = (/1., 2., 1./) + method = "EdgeP" + + dimx = dimsizes(xAnnAnom) + ntim = dimx(0) + + dimf = dimsizes(filter) + nwin = dimf(0) + hfw = nwin/2 + + buffer = new(nwin, typeof(xAnnAnom)) + smthdata = new(ntim, typeof(xAnnAnom)) + hold = new(ntim, typeof(xAnnAnom)) + + do i = 1, iterations + ; Transfer the data to the storage array. + if (i.eq.1) then + hold = (/xAnnAnom/) + else + hold = (/smthdata/) + end if + + ; Fill the data array for the first time. + do j = 1, hfw + buffer(j - 1) = -999.99 + end do + do j = hfw, (nwin - 1) + buffer(j) = (/hold(j - hfw)/) + end do + idx_ed = nwin - hfw + + ; Process the data array. + do t = 0, (ntim - 1) + if (buffer(hfw).lt.-999.0) then + smthdata(t) = -999.99 + else + avgwidth = 0 + totalwgt = filter(hfw) + total = filter(hfw) * buffer(hfw) + num = 1 + finished = False + do while (.not.finished) + avgwidth = avgwidth + 1 + if ((buffer(hfw - avgwidth).gt.-999.0) .and. \ + (buffer(hfw + avgwidth).gt.-999.0)) then + totalwgt = \ + totalwgt + filter(hfw - avgwidth) + filter(hfw + avgwidth) + total = \ + total + (filter(hfw - avgwidth) * buffer(hfw - avgwidth)) + \ + (filter(hfw + avgwidth) * buffer(hfw + avgwidth)) + num = num + 2 + else + finished = True + end if + if (avgwidth.eq.hfw) then + finished = True + end if + end do + + if (method.eq."EdgeP") then + if (num.eq.1) then + if (buffer(hfw + 1).lt.-999.0) then + smthdata(t) = (buffer(hfw) + buffer(hfw - 1)) / 2.0 + else + smthdata(t) = (buffer(hfw) + buffer(hfw + 1)) / 2.0 + end if + else + smthdata(t) = total / totalwgt + end if + else + if (i.eq.iterations) then + minval = hfw + else + minval = 0 + end if + if (num.gt.minval) then + smthdata(t) = total / totalwgt + else + smthdata(t) = -999.99 + end if + end if ; if(method) + end if + do j = 1, (nwin - 1) + buffer(j-1) = buffer(j) + end do + idx_ed = idx_ed+1 + if (idx_ed.gt.ntim) then + buffer(nwin-1) = -999.99 + else + buffer(nwin-1) = hold(idx_ed-1) + end if + end do ; loop(t) + end do ; loop(i) + xAnnAnom(:) = (/smthdata(:)/) + + ; Create an informational attribute: + xAnnAnom@info = "smoothed annual anomalies: CCMOzoneModelData.ncl" + xAnnAnom@_FillValue = -999.99 + +end + + +; ############################################################################# +undef("clmMon2clmDayn") +function clmMon2clmDayn(x:numeric, + retOrder:integer, + opt:integer) +; +; Arguments +; +; +; Return value +; +; +; Description +; Take a monthly climatology and make a daily climatology +; +; +; Caveats +; +; Modification history +; 20190605-righi_mattia: ported from v1. +; +local dNames, dimx, rank, X, midMon, day +begin + if (.not.(retOrder.eq.0 .or. retOrder.eq.1)) then + print("clmMon2clmDay: retOrder must be 0 or 1, retOrder=" + retOrder) + exit + end if + + dNames = getvardims(x) + if (any(ismissing(dNames))) then + print("clmMon2clmDay: named dimensions required:" + dNames) + exit + end if + + dimx = dimsizes(x) + if (dimx(0).ne.12) then + print("clmMon2clmDay: leftmost dimension must be size=12: SIZE=" + dimx(0)) + exit + end if + + rank = dimsizes(dimx) + if (rank.gt.4) then + print("clmMon2clmDay: currently max of 4 dimension supported: rank=" + \ + rank) + exit + end if + + ; transfer to work array, if necessary, reorder array + if (rank.eq.1) then + X = x + end if + if (rank.eq.2) then + X = x($dNames(1)$|:, $dNames(0)$|:) + end if + if (rank.eq.3) + X = x($dNames(1)$|:, $dNames(2)$|:, $dNames(0)$|:) + end if + if (rank.eq.4) + X = x($dNames(1)$|:, $dNames(2)$|:, $dNames(3)$|:, $dNames(0)$|:) + end if + + ; mid day of each month + if (isatt(opt, "midmon")) then + if (dimsizes(opt@midMon).eq.12) then + midMon = opt@midMon + else + print("clmMon2clmDay: midMon required to be size 12: size=" + \ + dimsizes(opt@midMon)) + exit + end if + else + midMon = (/15.5, 45, 74.5, 105, 135.5, 166, 196.5, \ + 227.5, 258, 288.5, 319, 349.5/) + end if + midMon@long_name = "middle of month" + + day = ispan(0, 364, 1) ; use 0 => 364 for interpolation + day!0 = "day" + + Z = linint1_Wrap(midMon, X, True, day, 0) + Z@info = "NCL: clmMon2clmDay" + + day = ispan(1, 365, 1) ; use 1 => 365 for coord variable + day@long_name = "day of year: no leap" + day@units = "1=Jan 1, 32=Feb 1, ..., 365-Dec 31" + Z!(rank-1) = "day" + Z&day = day + + if (retOrder.eq.1) then + return(Z) + end if + + if (retOrder.eq.0) then + if (rank.eq.1) then + return(Z) + end if + if (rank.eq.2) then + return(Z(day|:, $dNames(1)$|:)) + end if + if (rank.eq.3) then + return(Z(day|:, $dNames(1)$|:, $dNames(2)$|:)) + end if + if (rank.eq.4) then + return(Z(day|:, $dNames(1)$|:, $dNames(2)$|:, $dNames(3)$|:)) + end if + end if + +end diff --git a/esmvaltool/diag_scripts/shared/plot/carbon_plots.ncl b/esmvaltool/diag_scripts/shared/plot/carbon_plots.ncl new file mode 100644 index 0000000000..61f06dcb9c --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/carbon_plots.ncl @@ -0,0 +1,102 @@ +; ####################################################################### +; carbon_plots.ncl +; ####################################################################### +; Plots: +; * single_barchart : one value per model +; +; Written by Sabrina Wenzel (sabrina.zechlau@dlr.de) +; ####################################################################### +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +; ####################################################################### + +undef("barchart") +function barchart(wks_in[1], \ + source, \ + varname, \ + inlist[1]) +; Arguments: +; * wks : workstation for were to draw the plot +; * data : array for plotting on Y-axis +; * varname : variable to be plotted +; * inlist : input_file_info + +local plot, idx_obs, idx_mod, sres, dim_MOD, colors, data +begin + + ; Check for valid data + funcname = "barchart" + scriptname = "diag_scripts/shared/plot/carbon_plots.ncl" + enter_msg(scriptname, funcname) + + ; Get data, either directly or via netCDF + if (typeof(source).eq."string") then + data = ncdf_read(source, varname) + else + data = source + end if + + ; Style options + colors = project_style(inlist, diag_script_info, "colors") + + ; Retrieve basic metadata from data + defaults = (/"default", "dummy", "dummy_for_var", "Default", "Dummy"/) + if (any(varname.eq.defaults)) then + var = att2var(data, "var") + else + var = varname + end if + + ; Check if a valid wks has been provided, otherwise invoke default + wks = get_wks(wks_in, DIAG_SCRIPT, var) + gsn_define_colormap(wks, array_append_record((/"white", "black"/), \ + colors, 0)) + + ; Define names for x-ticks and dimsizes + dim_MOD = dimsizes(data(:, 0)) + + XStg = data&datasets(0) + xValue = ispan(1, dim_MOD, 1) + + ; Resources + res = True + res@gsnDraw = False + res@gsnFrame = False + res@gsnMaximize = True + res@gsnXYBarChart = True + res@gsnXYBarChartBarWidth = 0.30 + + res@trXMinF = 1-0.5 + res@trXMaxF = dim_MOD+0.5 + + res@vpWidthF = 0.5 + res@vpHeightF = 0.4 + + res@tmXBMode = "Explicit" ; explicit labels + res@tmXBValues = xValue + res@tmXBLabels = data&datasets + res@tmXBLabelFontHeightF = 0.025 + res@tmXBLabelAngleF = 30 + res@tmXBLabelJust = "TopRight" + res@tmXTLabelFontHeightF = 0.020 + res@tmYLLabelFontHeightF = 0.025 + + res@tiMainFontHeightF = 0.025 + res@tiXAxisFontHeightF = 0.025 + res@tiMainString = var + res@tiYAxisString = data&yaxis + res@gsnYRefLine = 0. + res@gsnXYBarChartColors2 = ispan(2, dimsizes(colors) + 1, 1) + + ; Override defaults with "res_" attributes of "data" + res_new = att2var(data, "res_") + copy_VarMeta(res_new, res) + + plot = gsn_csm_xy(wks, xValue, data(:, 0), res) + + return(plot) + +end diff --git a/esmvaltool/diag_scripts/shared/plot/contour_maps.ncl b/esmvaltool/diag_scripts/shared/plot/contour_maps.ncl index 8aa56f0cca..5f305130ae 100644 --- a/esmvaltool/diag_scripts/shared/plot/contour_maps.ncl +++ b/esmvaltool/diag_scripts/shared/plot/contour_maps.ncl @@ -52,7 +52,7 @@ function contour_map(wks_in[1], ; Input via netCDF not yet implemented ; ; Modification history -; 20131104-A_gott_kl: written. +; 20131104-gottschaldt_klaus-dirk: written. ; local funcname, data, defaults, diag_script, var, wks, res, res_new begin @@ -163,8 +163,8 @@ function contour_map_polar(wks_in[1], ; Input via netCDF not yet implemented ; ; Modification history -; 20140623-A_senf_da: now takes res as attributes of source. -; 20131218-A_senf_da: written. +; 20140623-senftleben_daniel: now takes res as attributes of source. +; 20131218-senftleben_daniel: written. ; local funcname, data, defaults, var, diag_script, wks, res, res_in, res1 begin @@ -300,9 +300,9 @@ function contour_map_ce(wks_in[1], ; Input via netCDF not yet implemented ; ; Modification history -; 20161220-A_laue_ax: added option to specificy plotting projection -; (via attribute data@projection) -; 20140228-A_righ_ma: written. +; 20161220-lauer_axel: added option to specificy plotting projection +; (via attribute data@projection) +; 20140228-righi_mattia: written. ; local data, var, wks, res begin @@ -398,7 +398,7 @@ procedure add_markers_to_map(wks_in[1], plot[1]: graphic, lat[*]: numeric, \ ; Caveats ; ; Modification history: -; 20140214-A_righ_ma: written. +; 20140214-righi_mattia: written. ; local funcname, class_name, views, cn_levels, lb_colors, cmap, colors, \ loc_res, ndata, mfill, mbord, ii, idx, str diff --git a/esmvaltool/diag_scripts/shared/plot/contourplot.ncl b/esmvaltool/diag_scripts/shared/plot/contourplot.ncl index df2c8eaf7d..93ffa40982 100644 --- a/esmvaltool/diag_scripts/shared/plot/contourplot.ncl +++ b/esmvaltool/diag_scripts/shared/plot/contourplot.ncl @@ -44,7 +44,7 @@ function contourplot(wks_in[1], ; Input via netCDF not yet implemented. ; ; Modification history -; 20131217-A_vanu_be: written, largely copied from countour_map.ncl +; 20131217-vanulft_bert: written, largely copied from countour_map.ncl. ; local data, defaults, diag_script, funcname, res, res_new, scriptname, \ source, varname, wks, wks_in diff --git a/esmvaltool/diag_scripts/shared/plot/legends.ncl b/esmvaltool/diag_scripts/shared/plot/legends.ncl index ff5057b76f..3dd979bf54 100644 --- a/esmvaltool/diag_scripts/shared/plot/legends.ncl +++ b/esmvaltool/diag_scripts/shared/plot/legends.ncl @@ -35,15 +35,16 @@ function legend_lines(wks_in[1], ; source@nrow: number of rows (optional) ; source@ncol: number of columns (optional) ; -; Returns value -; wks : Workstation with legend +; Return value +; wks: workstation with legend ; ; Description ; Creates an extra plot with a legend, specified by labels and line styles. ; It will be saved in the outfile directory and returned as a workstation ; -; Modification history: -; 20140326 A-gott_kl: written based on code by A-fran_fr +; Modification history +; 20140326-gottschaldt_klaus-dirk: written based on code by +; winterstein_franziska ; local data, defaults, var, diag_script, colors, annots, dashes, thicks begin @@ -200,12 +201,12 @@ function legend_markers(wks_in[1], ; Return value ; wks: workstation with legend ; -; Description: +; Description ; Creates an extra plot with a legend, specified by labels and line styles. ; It will be saved in the outfile directory and returned as a workstation ; -; Modification history: -; 20140326 A-wenz_sa: written based on legend_lines. +; Modification history +; 20140326-wenzel_sabrina: written based on legend_lines. ; local data, defaults, var, diag_script, colors, annots, dashes, thicks begin diff --git a/esmvaltool/diag_scripts/shared/plot/mder.ncl b/esmvaltool/diag_scripts/shared/plot/mder.ncl new file mode 100644 index 0000000000..9960e0ea7f --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/mder.ncl @@ -0,0 +1,990 @@ +; ############################################################################# +; shared/plot/mder.ncl +; ############################################################################# +; GENERAL PLOTTING ROUTINES FOR MULTIPLE DIAGNOSTIC ENSEMBLE REGRESSION +; ############################################################################# +; Check the header of each routine for documentation. +; +; Contents: +; function error_bar_plot: plots an error bar for every single value. +; function time_raster_plot: plots corellation coeficients in a raster. +; function weight_lin_plot: plots a time series together with weighted +; model ensemble and observations. +; function mder_scatter_plot: creates a scatterplot with regression error +; and xy-stddev of each model value. +; function squared_error_plot: plots the squared error for various model +; ensembles. +; function diag_scatter: plots scatterplot for each diagnostic of +; historical simulation vs projection. +; +; ############################################################################# + +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/scatterplot.ncl" + +load "$diag_scripts/shared/mder.ncl" + + +; ############################################################################# +undef("error_bar_plot") +function error_bar_plot(Ydata[*] : numeric, \ + Ystd[*][*] : numeric, \ + XBname[*] : string, \ + color[*] : string, \ + MainStg[1] : string, \ + YStg[1] : string, \ + wks[1], \ + opt[1] : string) +; +; Arguments: +; Ydata: array for plotting on Y-axis. +; Ystd: array with + and - 95% confidence intervals of Ydata. +; XBname: array with X-axis label names. +; color: array indicating positive or negative values. +; MainStg: header string for plot. +; YStg: Y-axis string. +; wks: workstation for were to draw the plot. +; opt: type of operations: +; "draw": draws the plot directly and returnes logical. +; "panel": returnes plotvalue for pannel plot. +; +; Modification history: +; 20180726-schlund_manuel: ported to v2.0. +; 201402??-wenzel_sabrina: written. +; 201411??-wenzel_sabrina: added collor code for pos and ne values. +; +local plot, ti, error_bar, error_barend, DIM_VAR +begin + + funcname = "error_bar_plot" + scriptname = "diag_scripts/shared/plot/mder.ncl" + enter_msg(scriptname, funcname) + + ; Check for valid data + if (all(ismissing(Ydata(:)))) then + log_info("No data available, skipping") + return(0) + end if + + ; Define names for x-ticks and dimsizes + DIM_VAR = dimsizes(Ydata(:)) + error_bar = new(DIM_VAR, graphic) + error_barend = new(DIM_VAR, graphic) + marker = new(DIM_VAR, graphic) + + res = True + res@gsnDraw = False ; Do not draw plot + res@gsnFrame = False ; Do not advance frame + res@gsnMaximize = True + + res@tiXAxisFontHeightF = 0.020 + res@tiYAxisFontHeightF = 0.020 + res@tiMainFontHeightF = 0.025 + res@tiMainString = MainStg + res@tiXAxisString = YStg + + res@vpHeightF = 0.60 ; change aspect ratio of plot + res@vpWidthF = 0.30 + + res@xyMarker = 16 + res@xyMarkerSizeF = 0.0012 + res@xyMarkLineMode = "Markers" + res@xyMonoMarkerColor = False + res@xyMonoMarker = True + res@xyMarkerColors = color(0) ; black + + ti = ispan(0, DIM_VAR - 1, 1) + res@trYMinF = -0.5 + res@trYMaxF = DIM_VAR-1+0.5 + res@trYReverse = True ; reverse y-axis + res@tmYLMode = "Explicit" + res@tmYLValues = ispan(0, DIM_VAR - 1, 1) + res@tmYLLabels = XBname + res@trXMinF = 0.0 + res@trXMaxF = 1.0 + + ; Create plot + plot = gsn_csm_xy(wks, \ + (/Ydata(0), Ydata(0)/), \ + (/ti(0), ti(0)/), \ + res) + + polyres = True + polyres@gsMarkerIndex = 16 + polyres@gsMarkerSizeF = 0.012 + pollyres = True + pollyres@gsLineThicknessF = 2 + + do t = 0, DIM_VAR - 1 + polyres@gsMarkerColor = color(t) + marker(t) = gsn_add_polymarker(wks, plot, Ydata(t), t, polyres) + error_bar(t) = gsn_add_polyline(wks, plot, (/t, t/), \ + (/Ystd(0, t), Ystd(1, t)/), polyres) + error_barend(t) = gsn_add_polyline(wks, plot, \ + (/Ystd(0, t), Ystd(0, t), Ystd(0, t), \ + Ystd(1, t), Ystd(1, t), Ystd(1, t)/), \ + (/t - 0.25, t + 0.25, t, t, t - 0.25, \ + t + 0.25/), pollyres) + end do + + leave_msg(scriptname, funcname) + + if (opt .eq. "draw") then + draw(plot) + frame(wks) + return(0) + end if + + if (opt .eq. "panel") then + return(plot) + end if + +end + + +; ############################################################################# +undef("time_raster_plot") +function time_raster_plot(Ydata[*][*] : numeric, \ + XBname[*] : string, \ + YLname[*] : string, \ + MainStg[1] : string, \ + wks[1], \ + opt[1] : string) +; +; Arguments: +; Ydata: array for plotting. +; XBname: array with X-axis label names. +; YBname: array with Y-axis label names. +; MainStg: header string for plot. +; wks: workstation for were to draw the plot. +; opt: type of operations: +; "draw": draws the plot directly and returnes logical. +; "panel": returnes plotvalue for pannel plot. +; +; Modification history +; 20180726-schlund_manuel: ported to v2.0. +; 201504??-wenzel_sabrina: written. +; +local plot, ti, error_bar, error_barend, DIM_VAR, pxi +begin + + funcname = "time_raster_plot" + scriptname = "diag_scripts/shared/plot/mder.ncl" + enter_msg(scriptname, funcname) + + ; Check required attributes of diag_script_info + req_atts = (/"p_step"/) + exit_if_missing_atts(diag_script_info, req_atts) + p_step = diag_script_info@p_step + + ; Check for valid data + if (all(ismissing(Ydata(:, :)))) then + log_info("No data available, skipping") + return(0) + end if + + ; Define names for x-ticks and dimsizes + dim_X = dimsizes(Ydata(0, :)) + dim_Y = dimsizes(Ydata(:, 0)) + + ; Color map + gsn_define_colormap(wks, "temp_19lev") + + res = True + res@gsnDraw = False ; Do not draw plot + res@gsnFrame = False ; Do not advance frame + res@gsnMaximize = True + res@gsnSpreadColors = True + + res@cnFillOn = True ; Turn on contour fill + res@cnFillMode = "RasterFill" ; Turn on raster fill + res@cnLinesOn = False ; Turn off contour lines + res@tiMainFontHeightF = 0.025 + res@tiMainString = MainStg + + res@cnLevelSelectionMode = "ManualLevels" + res@cnMinLevelValF = -0.8 + res@cnMaxLevelValF = 0.8 + res@cnLevelSpacingF = 0.01 + res@lbTitleFontHeightF = 0.020 + + res@lbLabelBarOn = True + res@lbOrientation = "vertical" ; vertical label bar + res@lbLabelStride = 10 + res@lbBoxLinesOn = False + + res@trXMinF = -0.5 + res@trXMaxF = dim_X - 0.5 + res@tmXBMode = "Explicit" + res@tmXBValues = ispan(0, dim_X - 1, 1) + res@tmXBLabels = XBname + res@tmXBLabelAngleF = 35 + res@tmXBLabelJust = "TopRight" + res@tmXBLabelFontHeightF = 0.020 + res@tmXBMinorValues = ispan(0, dim_X - 1, 1) - 0.5 + res@tmXMinorGrid = True ; implement y grid + res@tmXMinorGridThicknessF = 2.0 ; 2.0 is default + res@sfXArray = res@tmXBValues + + res@trYMinF = -0.5 + res@trYMaxF = dim_Y - 0.5 + res@trYReverse = True ; reverse y-axis + res@tmYLMode = "Explicit" + res@tmYLValues = ispan(0, dim_Y - 1, 1) + res@tmYLLabels = YLname + res@tmYLLabelFontHeightF = 0.022 + res@tmYLMinorValues = ispan(0, dim_Y - 1, 1) - 0.5 + res@tmYMinorGrid = True ; implement y grid + res@tmYMinorGridThicknessF = 2.0 ; 2.0 is default + res@sfYArray = res@tmYLValues + + ; Create plot + plot = gsn_csm_contour(wks, Ydata, res) + + ; Plot crosses + climo_dir = getenv("ESMValTool_climo_dir") + diag_file = config_user_info@work_dir + variables(0) + "_" + \ + input_file_info@exp(dimsizes(input_file_info@exp) - 1) + "_" + \ + (p_step + 1) + "ystep_diagnostics.txt" + if (isfilepresent(diag_file)) then + diag_terms = asciiread(diag_file, -1, "string") + pdx = dimsizes(diag_terms) / dimsizes(res@sfXArray) + respoly = True + respoly@gsMarkerColor = "black" + respoly@gsMarkerIndex = 5 + respoly@gsMarkerThicknessF = 2 + respoly@gsMarkerSizeF = 30 + pxi = new((/dimsizes(res@sfXArray), dimsizes(res@sfYArray)/), graphic) + do xi = 0, dimsizes(res@sfXArray) - 1 + idx = diag_terms(xi * pdx: xi * pdx + pdx - 1) + do yi = 0, dimsizes(res@sfYArray) - 1 + do di = 0, dimsizes(idx) - 1 + if (.not. ismissing(idx(di))) then + if (YLname(yi) .eq. idx(di)) then + pxi(xi, yi) = gsn_add_polymarker(wks, \ + plot, \ + res@tmXBValues(xi), \ + res@tmYLValues(yi), \ + respoly) + end if + end if + end do + end do + delete(idx) + end do + end if + + leave_msg(scriptname, funcname) + + if (opt .eq. "draw") then + draw(plot) + frame(wks) + return(0) + end if + + if (opt .eq. "panel") then + return(plot) + end if + +end + + +; ############################################################################# +undef("weight_lin_plot") +function weight_lin_plot(Ydata[*][*] : numeric, \ + Ywterror[*][*] : numeric, \ + Ywerror[*][*] : numeric, \ + Yobs[*][*] : numeric, \ + MainStg[1] : string, \ + YStg[1] : string, \ + wks[1], \ + opt[1] : string, \ + items: list) +; +; Arguments: +; Ydata: array with Y data for each model. +; Ywterror: error of MDER results. +; Ywerror: error of time-dependent MDER results. +; Yobs: array with Y data for observations. +; MainStg: plot header string. +; YStg: Y-axis string. +; wks: workstation for were to draw the plot. +; opt: type of operations: +; "draw": draws the plot directly and returnes logical. +; "panel": returnes plotvalue for pannel plot. +; items: list of input_file_info metadata +; +; Modification history +; 20180726-schlund_manuel: ported to v2.0. +; 201410??-wenzel_sabrina: written. +; +local plot_array, plot, ploto, lbid, annoid, wtmmm, ummm, colors, dashes, \ + thicks, plot_num, res, bres, tline, zline, wmmm, atts, obs, dim_obs, \ + lg_labels, lg_colors, item_type +begin + + funcname = "weight_lin_plot" + scriptname = "diag_scripts/shared/plot/mder.ncl" + enter_msg(scriptname, funcname) + + ; Check required attributes of diag_script_info + req_atts = (/"calc_type"/) + exit_if_missing_atts(diag_script_info, req_atts) + calc_type = diag_script_info@calc_type + + ; Check for valid data + if (all(ismissing(Ydata(:, :)))) then + log_info("No data available, skipping") + return(0) + end if + + ; Check for plotting of whole MDER time series + plot_mder_time_series = False + if (isatt(diag_script_info, "plot_mder_time_series")) then + plot_mder_time_series = diag_script_info@plot_mder_time_series + end if + + ; Define line colors (first color is used for the observations) + dashes = project_style(items, diag_script_info, "dashes") + obs = get_obs_list(items) + dim_obs = ListCount(obs) + + if (any(calc_type .eq. (/"pos", "int"/))) then + lg_labels = new((/3 + dim_obs/), string) + lg_colors = new((/3 + dim_obs/), string) + item_type = new((/3 + dim_obs/), string) + + OBSdashes = (/4, 2, 0, 9/) + lg_labels(dim_obs + 1) = "CMIP5 models" + lg_labels(dim_obs + 2) = Ydata&dataset(1) + lg_colors(dim_obs + 1) = "grey50" ; "blue2";"slateblue4";colors + lg_colors(dim_obs + 2) = "blue2" ; "slateblue4" + item_type(dim_obs + 1) = "Lines" + item_type(dim_obs + 2) = "Markers" + else if (calc_type .eq. "trend") then + lg_labels = new((/2 + dim_obs/), string) + lg_colors = new((/2 + dim_obs/), string) + item_type = new((/2 + dim_obs/), string) + + OBSdashes = (/4, 2, 9/) + lg_labels(dim_obs + 1) = Ydata&dataset(1) + lg_colors(dim_obs + 1) = "blue2" ; "slateblue4" + item_type(dim_obs + 1) = "Markers" + else + error_msg("f", DIAG_SCRIPT, "", "calc_type " + calc_type + \ + " not supported") + end if + end if + + ; Define array with legend names + lg_labels(:dim_obs - 1) = Yobs&name + lg_labels(dim_obs) = Ydata&dataset(0) + + ; Define array with colors + lg_colors(:dim_obs - 1) = "orange" + lg_colors(dim_obs) = "red" + + ; Define item types + item_type(:dim_obs) = "Lines" + + error_bar = new(dimsizes(Ydata&time), graphic) + error_barend = new(dimsizes(Ydata&time), graphic) + + ; Resources + res = True + res@gsnDraw = False ; Do not draw plot + res@gsnFrame = False ; Do not advance frame + res@gsnMaximize = True + + res@txFontHeightF = 0.025 + res@tiXAxisString = "Year" + res@tiXAxisFontHeightF = 0.025 + res@tiYAxisString = YStg + res@tiYAxisFontHeightF = 0.025 + res@tiMainFontHeightF = 0.025 + res@tiMainString = MainStg + + res@vpHeightF = 0.4 ; change aspect ratio of plot + res@vpWidthF = 0.6 + + res@trXMinF = min(Ydata&time) + res@trXMaxF = max(Ydata&time) - 1 + res@trYMinF = min(Ydata(4:, :)) - 0.02 * abs(avg(Ydata(4:, :))) + res@trYMaxF = max(Ydata(4:, :)) + 0.02 * abs(avg(Ydata(4:, :))) + + ; Plot error bar of time-dependent MDER results + if (plot_mder_time_series .and. (calc_type .eq. "pos")) then + res@xyLineColor = "skyblue1" + res@gsnXYAboveFillColors = "skyblue1" + else + res@xyLineColor = "white" + res@gsnXYAboveFillColors = "white" + end if + plot_array = gsn_csm_xy(wks, Ydata&time, Ywerror(:, :), res) + + ; Plot time-dependent MDER results + if (plot_mder_time_series .and. (calc_type .eq. "pos")) then + mder = True + mder@gsLineDashPattern = 4 + mder@xyMonoLineColor = False + mder@gsLineColor = "slateblue4" + mder@gsLineThicknessF = 3 + wmmm = gsn_add_polyline(wks, plot_array, Ydata&time, Ydata(2, :), mder) + end if + + ; Plot models + lres = True + lres@xyMonoDashPattern = False + lres@gsLineColor = "grey50" + lres@gsLineThicknessF = 2 + plotm = new((/dimsizes(Ydata(3:, 0))/), graphic) + do idat = 3, dimsizes(Ydata(:, 0)) - 1 + lres@gsLineDashPattern = dashes(idat) + plotm(idat - 3) = gsn_add_polyline(wks, plot_array, Ydata&time, \ + Ydata(idat, :), lres) + end do + + ; Plot observations + mres = True + mres@gsLineColor = "orange" + mres@gsLineThicknessF = 3 + ploto = new((/dimsizes(Yobs(:, 0))/), graphic) + do iobs = 0, dimsizes(Yobs(:, 0)) - 1 + mres@gsLineDashPattern = OBSdashes(iobs) + ploto(iobs) = gsn_add_polyline(wks, plot_array, Yobs&time, \ + Yobs(iobs, :), mres) + end do + + ; Plot uMMM + mres@gsLineColor = "red" + mres@gsLineThicknessF = 3 + mres@gsLineDashPattern = 2 + ummm = gsn_add_polyline(wks, plot_array, Ydata&time, Ydata(0, :), mres) + + ; Plot error bars of MDER results + erres = True + erres@gsLineColor = "blue2" ; "slateblue4" + erres@gsLineThicknessF = 3 + do tt = 0, dimsizes(Ydata&time) - 1 + if (.not. ismissing(Ywterror(0, tt))) then + error_bar(tt) = gsn_add_polyline(wks, plot_array, \ + (/Ydata&time(tt), Ydata&time(tt)/), \ + (/Ywterror(0, tt), \ + Ywterror(1, tt)/), \ + erres) + error_barend(tt) = gsn_add_polyline(wks, plot_array, \ + (/Ydata&time(tt) - 2.0, \ + Ydata&time(tt) + 2.0, \ + Ydata&time(tt), \ + Ydata&time(tt), \ + Ydata&time(tt) - 2.0, \ + Ydata&time(tt) + 2.0/), \ + (/Ywterror(0, tt), \ + Ywterror(0, tt), \ + Ywterror(0, tt), \ + Ywterror(1, tt), \ + Ywterror(1, tt), \ + Ywterror(1, tt)/), \ + erres) + end if + end do + + ; Plot means of MDER results + mder = True + mder@gsMarkerColor = "blue2" ; "slateblue4" + mder@gsMarkerIndex = 9 + mder@gsMarkerThicknessF = 3 + mder@gsMarkerSizeF = 0.01 + wtmmm = gsn_add_polymarker(wks, plot_array, Ydata&time, Ydata(1, :), mder) + + ; Plot line at Y = 0 + lres = True + lres@gsLineColor = "black" + lres@gsLineThicknessF = 1 + zline = gsn_add_polyline(wks, plot_array, (/1900, 2100/), (/0, 0/), lres) + + leave_msg(scriptname, funcname) + + if (opt.eq."draw") then + lgres = True + lgres@lgPerimOn = False + ; lgres@lgMonoitem_type = False + ; lgres@lgitem_types = item_type + lgres@vpWidthF = 0.30 + lgres@vpHeightF = 0.025 * dimsizes(OBSdashes) + lgres@lgLabelFontHeightF = 0.20 + lgres@lgLineColors = lg_colors + lgres@lgDashIndexes = OBSdashes + lgres@lgLineThicknessF = 2 + OBSdashes(:dimsizes(OBSdashes) - 2) = -1 + lgres@lgMarkerIndexes = OBSdashes + lgres@lgMarkerSizeF = 0.009 + lgres@lgMarkerThicknessF = 3 + lbid = gsn_create_legend(wks, dimsizes(OBSdashes), lg_labels, lgres) + + amres = True + amres@amParallelPosF = 0.25 + amres@amOrthogonalPosF = -0.37 + annoid = gsn_add_annotation(plot_array, lbid, amres) + + str1 = unique_string("annoid") + plot_array@$str1$ = annoid + + draw(plot_array) + frame(wks) + return(0) + end if + + if (opt .eq. "panel") then + return(plot_array) + end if + +end + + +; ############################################################################# +undef("mder_scatter_plot") +function mder_scatter_plot(Xdata[*] : numeric, \ + Xerr[*] : numeric, \ + Ydata[*] : numeric, \ + Yerr[*] : numeric, \ + Yfit[*] : numeric, \ + Yfit_err[*][*] : numeric, \ + MainStg[1] : string, \ + XStg[1] : string, \ + YStg[1] : string, \ + wks[1], \ + opt[1] : string, \ + items: list) +; +; Arguments: +; Xdata: array with X data for each model. +; Xerr: array with error values for X. +; Ydata: array with Y data for each model. +; Yfit[*]: array with regression values. +; Yfit_err: array with error regression values. +; Xobs: OBS value. +; Xobs_std: OBS error value. +; MainStg: plot header string. +; XStg: X-axis string. +; YStg: Y-axis string. +; wks: workstation for were to draw the plot. +; opt: type of operations: +; "draw": draws the plot directly and returnes logical. +; "panel": returnes plotvalue for pannel plot. +; items: list of input_file_info metadata +; +local plot_array, plot, colors, dashes, thicks, res, bres +begin + + funcname = "mder_scatter_plot" + scriptname = "diag_scripts/shared/plot/mder.ncl" + enter_msg(scriptname, funcname) + + ; Check for valid data + if (all(ismissing(Xdata))) then + log_info("No data available, skipping") + return(0) + end if + + ; Define line colors (first color is used for the observations) + colors = project_style(items, diag_script_info, "colors") + dashes = project_style(items, diag_script_info, "dashes") + thicks = project_style(items, diag_script_info, "thicks") + lg_labels = unique_labels_min(items, (/"dataset", "exp", "ensemble"/)) + + plot = new((/dimsizes(Xdata)/), graphic) + plot_m = new((/dimsizes(Xdata)/), graphic) + Xerror = new((/dimsizes(Xdata)/), graphic) + Xerror_end = new((/dimsizes(Xdata)/), graphic) + Yerror = new((/dimsizes(Xdata)/), graphic) + Yerror_end = new((/dimsizes(Xdata)/), graphic) + + ; Basic resources + res = True + res@gsnDraw = False ; Do not draw plot + res@gsnFrame = False ; Do not advance frame + res@gsnMaximize = True + + res@txFontHeightF = 0.025 + res@tiXAxisString = XStg + res@tiXAxisFontHeightF = 0.022 + res@tiYAxisString = YStg + res@tiYAxisFontHeightF = 0.025 + res@tiMainFontHeightF = 0.025 + res@tiMainString = MainStg + + res@vpHeightF = 0.6 ; change aspect ratio of plot + res@vpWidthF = 0.6 + + res@trYMinF = min(Ydata) - 0.2 * (abs(min(Ydata)) / 8) + res@trYMaxF = max(Ydata) + 0.2 * (abs(max(Ydata)) / 8) + res@trXMinF = max((/res@trYMinF, min(Yfit)/)) + res@trXMaxF = min((/res@trYMaxF, max(Yfit)/)) + + ; Plot regression errors (filled area between curves) + res@xyLineColor = "grey85" + res@gsnXYFillColors = "grey85" + plot_array = gsn_csm_xy(wks, Yfit, Yfit_err, res) + + ; Plot observations + fres = True + fres@gsLineColor = "orange" + fres@gsFillColor = "orange" + fres@gsLineDashPattern = 0 + plot_obs = gsn_add_polygon(wks, plot_array, \ + (/Xdata(1) + Xerr(1), Xdata(1) - Xerr(1), \ + Xdata(1) - Xerr(1), Xdata(1) + Xerr(1)/), \ + (/100.0, 100.0, -100.0, -100.0/), \ + fres) + + ; Plot regression line + fres@gsLineColor = "slateblue4" + fres@gsLineThicknessF = 3 + fres@gsLineDashPattern = 0 + plot_yfit = gsn_add_polyline(wks, plot_array, Yfit, Yfit, fres) + + ; Plot observation fit (MDER result) + fres@gsLineDashPattern = 1 + plot_mder = gsn_add_polyline(wks, plot_array, \ + (/-100.0, Xdata(1), Xdata(1)/), \ + (/Ydata(1), Ydata(1), -100.0/), \ + fres) + ; Plot uMMM line + fres@gsLineColor = "red" + fres@gsLineDashPattern = 3 + plot_ummm = gsn_add_polyline(wks, plot_array, (/100.0, -100.0/), \ + (/Ydata(0), Ydata(0)/), fres) + + ; Plot models and errors + bres = True + bres@gsLineColor = "black" + bres@gsLineThicknessF = 1.5 + bres@gsMarkerThicknessF = 1.5 + bres@gsMarkerSizeF = 0.01 + bres@gsMarkerColor = "black" + bres@gsMarkerIndex = 1 + Xbend = 0.01 * (res@trYMaxF - res@trYMinF) + Ybend = 0.01 * (res@trXMaxF - res@trXMinF) + txres = True + txres@txFontColor = "black" + txres@txFontHeightF = 0.016 + txres@txJust = "TopLeft" + do idat = 1, dimsizes(Xdata)-1 + ; bres@gsMarkerColor = colors(idat) + ; bres@gsMarkerIndex = thicks(idat) + if (.not. ismissing(Yerr(idat)) .and. .not. ismissing(Xerr(idat))) then + + ; Model error bars + Xerror(idat) = gsn_add_polyline(wks, plot_array, \ + (/Xdata(idat) + Xerr(idat), \ + Xdata(idat) - Xerr(idat)/), \ + (/Ydata(idat), Ydata(idat)/), bres) + Xerror_end(idat) = gsn_add_polyline(wks, plot_array, \ + (/Xdata(idat) + Xerr(idat), \ + Xdata(idat) + Xerr(idat), \ + Xdata(idat) + Xerr(idat), \ + Xdata(idat) - Xerr(idat), \ + Xdata(idat) - Xerr(idat), \ + Xdata(idat) - Xerr(idat)/), \ + (/Ydata(idat) - Xbend, \ + Ydata(idat) + Xbend, \ + Ydata(idat), Ydata(idat), \ + Ydata(idat) - Xbend, \ + Ydata(idat) + Xbend/), bres) + Yerror(idat) = gsn_add_polyline(wks, plot_array, \ + (/Xdata(idat), Xdata(idat)/), \ + (/Ydata(idat) + Yerr(idat), \ + Ydata(idat) - Yerr(idat)/), bres) + Yerror_end(idat) = gsn_add_polyline(wks, plot_array, \ + (/Xdata(idat) - Ybend, \ + Xdata(idat) + Ybend, \ + Xdata(idat), Xdata(idat), \ + Xdata(idat) - Ybend, \ + Xdata(idat) + Ybend/), \ + (/Ydata(idat) + Yerr(idat), \ + Ydata(idat) + Yerr(idat), \ + Ydata(idat) + Yerr(idat), \ + Ydata(idat) - Yerr(idat), \ + Ydata(idat) - Yerr(idat), \ + Ydata(idat) - Yerr(idat)/), bres) + end if + + ; Model means + plot_m(idat) = gsn_add_polymarker(wks, plot_array, Xdata(idat), \ + Ydata(idat), bres) + + ; Model numbers + if (isatt(Xdata, "model_numbers")) then + plot(idat) = gsn_add_text(wks, plot_array, \ + sprinti("%-0.2hi", \ + Xdata@model_numbers(idat)), \ + Xdata(idat), Ydata(idat), txres) + else + plot(idat) = gsn_add_text(wks, plot_array, sprinti("%-0.2hi", idat), \ + Xdata(idat), Ydata(idat), txres) + end if + end do + + leave_msg(scriptname, funcname) + + if (opt .eq. "draw") then + lgres = True + lgres@lgPerimOn = False + ; lgres@lgitem_type = "Markers" + lgres@lgMonoMarkerColor = False + lgres@lgMonoMarkerIndex = False + lgres@vpWidthF = 0.18 + lgres@lgLabelFontHeightF = 0.15 + lgres@lgMarkerSizeF = 0.01 + + amres = True + amres@amOrthogonalPosF = 0. + amres@amParallelPosF = 0.67 + lgres@vpHeightF = 0.6 + + lgres@lgMarkerColors = colors(2:) + lgres@lgMarkerIndexes = thicks(2:) + lbid = gsn_create_legend(wks, dimsizes(Xdata) - 2, lg_labels(2:), lgres) + ; annoid1 = gsn_add_annotation(plot_array, lbid, amres) + + draw(plot_array) + frame(wks) + return(0) + end if + + if (opt .eq. "panel") then + return(plot_array) + end if + +end + + +; ############################################################################# +undef("squared_error_plot") +function squared_error_plot(Ydata[*][*] : numeric, \ + Ymean[*] : numeric, \ + Ysqrd[*][*] : numeric, \ + MainStg[1] : string, \ + XStg[*] : string, \ + YStg[1] : string, \ + wks[1], \ + opt[1] : string) +; +; Arguments: +; Ydata: array, where the rightmost dimension contains the box plot +; reference pt. data: +; y(n, 0) = bottom_value, +; y(n, 1) = bottom_value_of_box, +; y(n, 2) = mid-value_of_box, +; y(n, 3) = top_value_of_box, +; y(n, 4) = top_value. +; Ymean: array containing the multi-model-average of the squared errors. +; Ysqrd: array containing the models' squared errors. +; MainStg: plot header strings. +; XStg: X-axis string. +; YStg: Y-axis string. +; wks: workstation for were to draw the plot. +; opt: type of operations: +; "draw": draws the plot directly and returnes logical. +; "panel": returnes plotvalue for pannel plot. +; +local plot, res, llres, mres, sres +begin + + funcname = "squared_error_plot" + scriptname = "diag_scripts/shared/plot/mder.ncl" + enter_msg(scriptname, funcname) + + ; Check for valid data + if (all(ismissing(Ydata))) then + log_info("No data available, skipping") + return(0) + end if + + ; Define names for x-ticks and dimsizes + DIM_VAR = dimsizes(Ymean) + Xdata = ispan(0, DIM_VAR - 1, 1) + dumAVG = new(DIM_VAR, graphic) + dumSQRT = new((/DIM_VAR, dimsizes(Ysqrd(0, :))/), graphic) + + res = True + res@gsnMaximize = True + + res@tiYAxisFontHeightF = 0.020 + res@tiMainFontHeightF = 0.025 + res@tiMainString = MainStg + res@tiYAxisString = YStg + + res@vpHeightF = 0.4 ; change aspect ratio of plot + res@vpWidthF = 0.6 + + res@trXMinF = -0.5 + res@trXMaxF = DIM_VAR - 1.0 + 0.5 + res@tmXBLabelFontHeightF = 0.018 + res@tmXBMajorOutwardLengthF = 0.015 + res@tmXBMode = "Explicit" + res@tmXBLabels = XStg + if (dimsizes(XStg) .gt. 4) then + res@tmXBLabelJust = "BottomRight" + res@tmXBLabelAngleF = 28 + end if + res@tmXTBorderOn = True + res@tmYRBorderOn = True + res@trYMinF = -0.2 + res@trYMaxF = max(Ysqrd) + 0.05 * (max(Ysqrd)) + + ; Plot boxes + llres = True + llres@gsLineThicknessF = 3 ; line thickness + opti = True + opti@boxWidth = 0.8 ; Width of box (x units) + boxColors = new(dimsizes(Xdata), string) + boxColors(0::2) = (/"red"/) + boxColors(1::2) = (/"slateblue4"/) + opti@boxColors = boxColors ; Color of box(es) + plot = boxplot(wks, Xdata, Ydata, opti, res, llres) + + ; Plot models (weighted and unweighted) + mres = True ; marker mods desired + mres@gsMarkerIndex = 5 ; polymarker style + mres@gsMarkerThicknessF = 2 + mres@gsMarkerColor = "black" ; polymarker color + + sres = True ; marker mods desired + sres@gsMarkerIndex = 16 ; polymarker style + sres@gsMarkerSizeF = 0.004 + sres@gsMarkerColor = "gray55" ; polymarker color + + do ivar = 0, DIM_VAR - 1 + do ip = 0, dimsizes(Ysqrd(ivar, :)) - 1 + + ; Plot models + dumSQRT(ivar, ip) = gsn_add_polymarker(wks, plot, Xdata(ivar), \ + Ysqrd(ivar, ip), sres) + end do + + ; Plot multi-model means + dumAVG(ivar) = gsn_add_polymarker(wks, plot, Xdata(ivar), Ymean(ivar), \ + mres) + end do + + leave_msg(scriptname, funcname) + + if (opt .eq. "draw") then + draw(plot) + frame(wks) + return(0) + end if + + if (opt .eq. "panel") then + return(plot) + end if + +end + + +; ############################################################################# +undef("diag_scatter") +function diag_scatter(res_in[1] : logical, \ + diag_all : numeric, \ + future_diag : numeric, \ + target_var : string, \ + items: list) +; +; Description: +; Scatterplot for each diagnostic of historical simulation vs projection. +; +; Return value: +; plotnames: array of strings containing plotnames. +; +; Arguments: +; res_in: resources, must be given as attributes with "res_" prefix. +; diag_all: [dim_diags x dim_datasets], coordinates must contain names of +; diagnostics and datasets, respectively. +; future_diag: [dim_datasets], target diagnostic of projections, can have +; attribute "future_exp" for plot title. +; target_var: name of target variable, needs attribute 'units'. +; items: list of input_file_info metadata +; +; Modification history: +; 20191121-schlund_manuel: added provenance tracking. +; 20180806-schlund_manuel: ported to v2.0. +; 201806??-senftleben_daniel: written. +; +local dim_diag, dim_model, diags, datasets, x_var, wks, plot +begin + + funcname = "diag_scatter" + scriptname = "diag_scripts/shared/plot/mder.ncl" + enter_msg(scriptname, funcname) + + all_diagnostics = diag_all + all_diagnostics!0 = "diags" + all_diagnostics!1 = "datasets" + diags = all_diagnostics&diags + datasets = all_diagnostics&datasets + plotnames = new(dimsizes(diags), string) + + ; Name of future projection + if (isatt(future_diag, "future_exp")) then + future_exp = " (" + future_diag@future_exp + ")" + else + future_exp = "" + end if + + do id = 0, dimsizes(diags) - 1 + wks = get_wks(0, DIAG_SCRIPT, target_var + "_" + diags(id)) + plotnames(id) = wks@fullname + x_var = (/all_diagnostics(id, :), future_diag/) + x_var!1 = "datasets" + x_var&datasets = datasets + copy_VarAtts(res_in, x_var) ; pass resources + x_var@res_tiMainString = "Correlation between " + diags(id) + \ + " and " + target_var + future_exp + x_var@res_tiXAxisString = diags(id) + x_var@res_tiYAxisString = target_var + " [" + target_var@units + "]" + x_var@res_trXMinF = min(x_var(0, :)) + x_var@res_trXMaxF = max(x_var(0, :)) + x_var@res_trYMinF = min(x_var(1, :)) + x_var@res_trYMaxF = max(x_var(1, :)) + plot = scatterplot(wks, x_var, target_var, False, False, items) + draw(plot) + frame(wks) + + ; Write ncdf file + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + new_path = work_dir + target_var + "_" + diags(id) + ".nc" + x_var!0 = "variable" + x_var&variable = (/diags(id), target_var/) + x_var@var = "value" + x_var@diag_script = DIAG_SCRIPT + x_var@ncdf = new_path + outfile = ncdf_write(x_var, new_path) + + ; Provenance tracking + if (isStrSubset(diags(id), "_t")) then + statistics := (/"trend"/) + else if(isStrSubset(diags(id), "_c")) then + statistics := (/"mean", "clim"/) + else + statistics := (/"mean"/) + end if + end if + authors = (/"schlund_manuel"/) + domain = diag_script_info@domain + plot_type = "scatter" + references = (/"wenzel16jclim"/) + caption = "Scatterplot between " + target_var + " and " + diags(id) + "." + ancestors = metadata_att_as_array(items, "filename") + log_provenance(outfile, plotnames(id), caption, statistics, domain, \ + plot_type, authors, references, ancestors) + end do + + leave_msg(scriptname, funcname) + return(plotnames) + +end diff --git a/esmvaltool/diag_scripts/shared/plot/misc_function.ncl b/esmvaltool/diag_scripts/shared/plot/misc_function.ncl deleted file mode 100755 index a12c240fa6..0000000000 --- a/esmvaltool/diag_scripts/shared/plot/misc_function.ncl +++ /dev/null @@ -1,775 +0,0 @@ -; ############################################################################# -; MISCELLANEOUS FUNCTIONS PORTED FROM THE CCMvalTool -; ############################################################################# -; Please consider using of extending existing routines before adding new ones. -; Check the header of each routine for documentation. -; -; Contents: -; function month_sel -; function lat_names -; procedure add_line -; procedure add_scatt -; procedure add_legenda -; -; ############################################################################# - -; ############################################################################# -undef("month_sel") -function month_sel(month_names:string) -local month_names, month_n -begin - - if (.not.any(ismissing(ind(month_names.eq."ANN")))) then - month_n = new(12, "integer") - month_n = (/1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12/) - else - month_n = new(dimsizes(month_names), "integer") - do i = 0, dimsizes(month_names)-1 - if (.not.ismissing(ind(month_names(i).eq."JAN"))) then - month_n(i) = 1 - end if - if (.not.ismissing(ind(month_names(i).eq."FEB"))) then - month_n(i) = 2 - end if - if (.not.ismissing(ind(month_names(i).eq."MAR"))) then - month_n(i) = 3 - end if - if (.not.ismissing(ind(month_names(i).eq."APR"))) then - month_n(i) = 4 - end if - if (.not.ismissing(ind(month_names(i).eq."MAY"))) then - month_n(i) = 5 - end if - if (.not.ismissing(ind(month_names(i).eq."JUN"))) then - month_n(i) = 6 - end if - if (.not.ismissing(ind(month_names(i).eq."JUL"))) then - month_n(i) = 7 - end if - if (.not.ismissing(ind(month_names(i).eq."AUG"))) then - month_n(i) = 8 - end if - if (.not.ismissing(ind(month_names(i).eq."SEP"))) then - month_n(i) = 9 - end if - if (.not.ismissing(ind(month_names(i).eq."OCT"))) then - month_n(i) = 10 - end if - if (.not.ismissing(ind(month_names(i).eq."NOV"))) then - month_n(i) = 11 - end if - if (.not.ismissing(ind(month_names(i).eq."DEC"))) then - month_n(i) = 12 - end if - end do - end if - - return(month_n) - -end - -; ############################################################################# - -undef("lat_names") -function lat_names(lat_val:numeric) -local lat_name, lat_val -begin - - lat_name = new((/dimsizes(lat_val)/), "string") - do i = 0, dimsizes(lat_val) - 1 - if (lat_val(i).gt.0.) then - lat_name(i) = lat_val(i) + "N" - end if - if (lat_val(i).eq.0.) then - lat_name(i) = "Equator" - end if - if (lat_val(i).lt.0.) then - lat_name(i) = -lat_val(i) + "S" - end if - end do - - return (lat_name) - -end - -; ############################################################################# - -undef("add_line") -procedure add_line(wks, plot, x, yval, line_color, line_dash) -local plres, str, y, plot, x, yval, line_color, line_dash -begin - - ; gsn_add_polyline crashes if the arrays x and/or yval contain - ; only missing values - if (all(ismissing(x)).or.all(ismissing(yval))) then - return - end if - - plres = True - plres@gsLineColor = line_color - plres@gsLineDashPattern = line_dash - plres@gsLineThicknessF = 2.5 - str = unique_string("polyline") - plot@$str$ = gsn_add_polyline(wks, plot, x, yval, plres) - -end - -; ############################################################################# - -undef("add_scatt") -procedure add_scatt(wks, plot, x, yval, mark_color, mark_ind) -local plres, str, y, plot, x, yval, line_color, line_dash -begin - - plres = True - plres@gsMarkerColor = mark_color - plres@gsMarkerIndex = mark_ind - str = unique_string("polymarker") - plot@$str$ = gsn_add_polymarker(wks, plot, x, yval, plres) - -end - -; ############################################################################# -undef("add_legenda") -procedure add_legenda(wks, - MODEL:string, - colors:string, - dashs:numeric, - scatters:string, - ticknesses:numeric, - place:string) -local MODEL, dim_mod, colors, dashs, edge_x, edge_y, edge_res, tx1res, \ - dim_lb, xleg, xl, yleg, yl, xtxt, ytxt, place, dim_lines, space_lines, \ - lin_sp, scatters, ticknesses -begin - - dim_mod = dimsizes(MODEL) - dim_lines = floattoint(dim_mod / 3)+1 - if ((mod(dim_mod, 3)).eq.0) then - dim_lines = floattoint(dim_mod /3) + 1 - else - dim_lines = floattoint(dim_mod / 3) + 2 - end if - lin_sp = 0.025 - space_lines = (dim_lines + 1) * lin_sp - - if (place.eq."bottom") - top_line = 0.20 - end if - if (place.eq."middle") - top_line = 0.42 - end if - if (place.eq."top") - top_line = 0.99 - end if - if ((place.ne."bottom").and.(place.ne."middle").and.(place.ne."top")) then - top_line = tofloat(place) - end if - bot_line = top_line-space_lines - edge_x = (/ 0.12, 0.92, 0.92, 0.12, 0.12/) - edge_y = (/space_lines+bot_line, space_lines+bot_line, bot_line, \ - bot_line, space_lines+bot_line/) - edge_res = True - edge_res@gsLineColor = "white" - gsn_polyline_ndc(wks, edge_x, edge_y, edge_res) - - dim_lb = dimsizes(MODEL) - - gs1res = True - tx1res = True - tx1res@txFontHeightF = lin_sp / 5 * 2 - - xleg = new((/3 * dim_lines, 4/), "float") - xl = new((/3, 4/), "float") - - do i = 0, 2 - xl(0, :) = fspan((edge_x(0) + 0.02), (edge_x(0) + 0.07), 4) - xl(1, :) = fspan(((edge_x(2) + edge_x(0)) / 2 - 0.09), \ - ((edge_x(2) + edge_x(0)) / 2 - 0.04), 4) - xl(2, :) = fspan((edge_x(2) - 0.20), (edge_x(2) - 0.15), 4) - end do - - nt = 0 - do j = 0, dim_lines - 1 - do i = 0, 2 - xleg(nt, :) = (/xl(i, :)/) - nt = nt + 1 - end do - end do - - yleg = new((/3 * dim_lines, 4/), "float") - yl = new(dim_lines, "float") - - do i = 0, dim_lines - 1 - yl(dim_lines - 1 - i) = edge_y(3) + \ - (((edge_y(0) - (edge_y(3))) / dim_lines) * (i + 1)) - (lin_sp) - end do - - nt = 0 - do j = 0, dim_lines - 1 - do i = 0, 2 - yleg(nt, :) = (/yl(j)/) - nt = nt + 1 - end do - end do - - xtxt = new((/dim_lines * 3/), "float") - ytxt = new((/dim_lines * 3/), "float") - - nt = 0 - do j = 0, dim_lines - 1 - do i = 0, 2 - xtxt(nt) = xl(i, 3) + lin_sp / 5. - nt = nt + 1 - end do - end do - nt = 0 - do i = 0, dim_lines - 1 - do j = 0, 2 - ytxt(nt) = (/yl(i)/) - nt = nt + 1 - end do - end do - - do i = 0, (dimsizes(MODEL) - 1) - gs1res@gsLineColor = colors(i) - gs1res@gsLineDashPattern = dashs(i) - gs1res@gsLineThicknessF = ticknesses(i) - tx1res@txFontColor = colors(i) - tx1res@txJust = "CenterLeft" - - if (scatters(i).eq."Markers") - gs1res@gsMarkerColor = colors(i) - gs1res@gsMarkerIndex = dashs(i) - gs1res@gsMarkerSizeF = 0.01 - gs1res@gsMarkerThicknessF = 1.5 - gsn_polymarker_ndc(wks, xleg(i, 2), yleg(i, 2), gs1res) - else - gsn_polyline_ndc(wks, xleg(i, :), yleg(i, :), gs1res) - end if - gsn_text_ndc(wks, MODEL(i), xtxt(i), ytxt(i), tx1res) - end do - -end - -; ############################################################################# - -undef("calcRegCoeffs") -function calcRegCoeffs(y:numeric) -local y, d_y, d_t, ma, x, funcx, pi, t, a -begin - d_y = dimsizes(y) - d_t = d_y(0) - - ma = 14 - x = fspan(1, d_t, d_t) - a = new((/ma/), typeof(y)) - - funcx = new((/ma, d_t/), "float") - pi = 4.0 * atan(1.0) - t = 2 * pi * (x - 0.5) / 12 - - funcx(0, :) = 1 - funcx(1, :) = sin(t) - funcx(2, :) = cos(t) - funcx(3, :) = sin(2 * t) - funcx(4, :) = cos(2 * t) - funcx(5, :) = sin(3 * t) - funcx(6, :) = cos(3 * t) - funcx(7, :) = sin(4 * t) - funcx(8, :) = cos(4 * t) - funcx(9, :) = x - funcx(10, :) = sin(t) * x - funcx(11, :) = cos(t) * x - funcx(12, :) = sin(2 * t) * x - funcx(13, :) = cos(2 * t) * x - - a(:) = reg_multlin(y(:), funcx, False) - a@_FillValue = 1e20 - delete_VarAtts(a, "constant") - - a@long_name = "Coefficients A and B" - a!0 = "number of coefficients" - - return(a) - -end - -; ########################################################################### - -undef("genZonalMeans") -function genZonalMeans(x:numeric, - lat:numeric, - startLat:numeric, - endLat:numeric) -local dimx, ntim, nlat, nzone, z, t, i, j, cnt, idx_st, idx_ed, xZonMean -begin - dimx = dimsizes(x) - ntim = dimx(0) - nlat = dimx(1) - nlon = dimx(2) - nzone = dimsizes(startLat) - - nmos = 12 - modCheck("genZonalMeans", ntim, nmos) - - xZonMean = new((/ntim, nzone/), typeof(x)) - - pi = 4. * atan(1.0) - rad = (pi / 180.) - - ; loop through lat_band - do t = 0, (ntim-1) - do z = 0, (nzone-1) - idx_st = 0 - do while (lat(idx_st).lt.startLat(z)) - idx_st = idx_st + 1 - end do - idx_ed = nlat - 1 - do while (lat(idx_ed).gt.endLat(z)) - idx_ed = idx_ed - 1 - end do - - zoneTotalX = 0.0 - ZoneTotalArea = 0.0 - - do i = idx_st, idx_ed - if (i.eq.idx_st) then - Lat1 = startLat(z) - else - Lat1 = (lat(i) + lat(i - 1))/2 - end if - if (i.eq.idx_ed) then - Lat2 = endLat(z) - else - Lat2 = (lat(i) + lat(i + 1))/2 - end if - lat_slice = x(t, i, :) - idx_lon = ind(.not.ismissing(lat_slice)) - if (.not.all(ismissing(idx_lon))) then - CellArea = abs(sin(Lat1 * rad) - sin(Lat2 * rad)) - zoneTotalX = zoneTotalX + sum(lat_slice) * CellArea - ZoneTotalArea = ZoneTotalArea + dimsizes(idx_lon) * CellArea - end if - delete(idx_lon) - end do - - if (ZoneTotalArea.ne.0) then - xZonMean(t, z) = zoneTotalX / ZoneTotalArea - end if - end do ; loop(z) - end do ; loop(t) - - ; Create an informational attribute: - xZonMean@info = "zonal mean: CCMOzoneModelData.ncl" - - return (xZonMean) - -end - -; ############################################################################# - -undef("calcMeanAnnCycleMonthly") -function calcMeanAnnCycleMonthly(RegCoeffAs:numeric) -local dimx, z, month, t, xMeanAnnCycleMon -begin - dimx = dimsizes(RegCoeffAs) - - pi = 4. * atan(1.0) - - month = fspan(1, 12, 12) - t = 2 * pi * (month - 0.5) / 12.0 - - xMeanAnnCycleMon = new((/12/), "float") - xMeanAnnCycleMon(:) = RegCoeffAs(0) + \ - RegCoeffAs(1) * sin(t) + RegCoeffAs(2) * cos(t) + \ - RegCoeffAs(3) * sin(2 * t) + RegCoeffAs(4) * cos(2 * t) + \ - RegCoeffAs(5) * sin(3 * t) + RegCoeffAs(6) * cos(3 * t) + \ - RegCoeffAs(7) * sin(4 * t) + RegCoeffAs(8) * cos(4 * t) - - ; Create an informational attribute: - xMeanAnnCycleMon@info = "mean annual cycle monthly: CCMOzoneModelData.ncl" - - return(xMeanAnnCycleMon) - -end - -; ############################################################################# - -undef("calcMeanAnnCycleAnnual") -function calcMeanAnnCycleAnnual(RegCoeffAs:numeric) -local dimx, day, t, xMeanAnnCycleDay -begin - - dimx = dimsizes(RegCoeffAs) - - pi = 4. * atan(1.0) - - day = fspan(1, 366, 366) - t = 2 * pi * (day - 0.5) / 366.0 - - xMeanAnnCycleDay = new((/366/), "float") - - xMeanAnnCycleDay(:) = RegCoeffAs(0) + \ - RegCoeffAs(1) * sin(t) + RegCoeffAs(2) * cos(t) + \ - RegCoeffAs(3) * sin(2 * t) + RegCoeffAs(4) * cos(2 * t) + \ - RegCoeffAs(5) * sin(3 * t) + RegCoeffAs(6) * cos(3 * t) + \ - RegCoeffAs(7) * sin(4 * t) + RegCoeffAs(8) * cos(4 * t) - - ; Create an informational attribute: - xMeanAnnCycleDay@info = "mean annual cycle daily: CCMOzoneModelData.ncl" - - return (xMeanAnnCycleDay) - -end - -; ############################################################################# - -undef("rmMeanAnnCycle") -procedure rmMeanAnnCycle(x:numeric, - xMeanAnnCycle:numeric, - RegCoeffA1:numeric, - xMonAnom:numeric, - xAnnAnom:numeric) -local dimx, ntim, t, i, idx_st, idx_ed, nyr, xdata -begin - dimx = dimsizes(x) - ntim = dimx(0) - - nmos = 12 - modCheck("rmMeanAnnCycle", ntim, nmos) ; error check - - nyr = ntim/nmos - - ; loop through lat_band - do t = 0, (ntim - 1) - mon = mod(t, 12) - xMonAnom(t) = (x(t) - xMeanAnnCycle(mon)) * 100.0 / RegCoeffA1 - end do ; loop(t) - - do t = 0, (nyr - 1) - idx_st = t * 12 - idx_ed = (t + 1) * 12 - 1 - xdata = xMonAnom(idx_st:idx_ed) - if(.not.any(ismissing(xdata))) then - xAnnAnom(t) = avg(xdata) - end if - end do ; loop(t) - - ; Create an informational attribute: - xMonAnom@info = "monthly anomalies: CCMOzoneModelData.ncl" - xAnnAnom@info = "annual anomalies: CCMOzoneModelData.ncl" - -end - -; ############################################################################# - -undef("apfiltersmooth") -function apfiltersmooth(xMonAnom:numeric, - filter:numeric, - iterations:integer) -local dimx, dimf, ntim, nwin, z, t, i, j, hfw, idx_ed, avgwidth, \ - totalwgt, total, num, minval, finished, data, hold, xSmthMonAnom -begin - dimx = dimsizes(xMonAnom) - ntim = dimx(0) - - nmos = 12 - modCheck("apfiltersmooth", ntim, nmos) ; error check - - dimf = dimsizes(filter) - nwin = dimf(0) - hfw = nwin / 2 - - data = new(nwin, typeof(xMonAnom)) - hold = new(ntim, typeof(xMonAnom)) - xSmthMonAnom = new((/ntim/), typeof(xMonAnom)) - - do i = 1, iterations - - ; Transfer the data to the storage array. - if (i.eq.1) then - hold(:) = xMonAnom(:, z) - else - hold(:) = xSmthMonAnom(:, z) - end if - - ; Fill the data array for the first time. - do j = 1, hfw - data(j - 1) = -999.99 - end do - do j = hfw, (nwin - 1) - data(j) = hold(j - hfw) - end do - idx_ed = nwin-hfw - - ; Process the data array. - do t = 0, (ntim - 1) - if (data(hfw).lt.-999.0) then - xSmthMonAnom(t, z) = -999.99 - else - avgwidth = 0 - totalwgt = filter(hfw) - total = filter(hfw) * data(hfw) - num = 1 - finished = False - do while (.not.finished) - avgwidth = avgwidth + 1 - if ((data(hfw - avgwidth).gt.-999.0).and. \ - (data(hfw + avgwidth).gt.-999.0)) then - totalwgt = totalwgt + filter(hfw - avgwidth) + \ - filter(hfw + avgwidth) - total = total + (filter(hfw - avgwidth) * data(hfw - avgwidth)) + \ - (filter(hfw + avgwidth) * data(hfw + avgwidth)) - num = num+2 - else - finished = True - end if - if (avgwidth.eq.hfw) then - finished = True - end if - end do - if (i.eq.iterations) then - minval = hfw - else - minval = 0 - end if - if (num.gt.minval) then - xSmthMonAnom(t) = total / totalwgt - else - xSmthMonAnom(t) = -999.99 - end if - end if - - do j = 1, (nwin - 1) - data(j - 1) = data(j) - end do - idx_ed = idx_ed + 1 - if (idx_ed.gt.ntim) then - data(nwin - 1) = -999.99 - else - data(nwin - 1) = hold(idx_ed - 1) - end if - end do ; loop(t) - end do ; loop(i) - - ; Create an informational attribute: - xSmthMonAnom@info = "smoothed monthly anomalies: CCMOzoneModelData.ncl" - xSmthMonAnom@_FillValue = -999.99 - - return (xSmthMonAnom) - -end - -; ############################################################################# - -undef("smoothAnomalies") -procedure smoothAnomalies(xAnnAnom:numeric, - iterations:integer) -local dimx, dimf, ntim, nset, nwin, s, t, i, j, hfw, idx_ed, avgwidth, \ - totalwgt, total, num, minval, finished, smthdata, hold, buffer, filter, \ - method -begin - - filter = (/1., 2., 1./) - method = "EdgeP" - - dimx = dimsizes(xAnnAnom) - ntim = dimx(0) - - dimf = dimsizes(filter) - nwin = dimf(0) - hfw = nwin/2 - - buffer = new(nwin, typeof(xAnnAnom)) - smthdata = new(ntim, typeof(xAnnAnom)) - hold = new(ntim, typeof(xAnnAnom)) - - do i = 1, iterations - ; Transfer the data to the storage array. - if (i.eq.1) then - hold = (/xAnnAnom/) - else - hold = (/smthdata/) - end if - - ; Fill the data array for the first time. - do j = 1, hfw - buffer(j - 1) = -999.99 - end do - do j = hfw, (nwin - 1) - buffer(j) = (/hold(j - hfw)/) - end do - idx_ed = nwin - hfw - - ; Process the data array. - do t = 0, (ntim - 1) - if (buffer(hfw).lt.-999.0) then - smthdata(t) = -999.99 - else - avgwidth = 0 - totalwgt = filter(hfw) - total = filter(hfw) * buffer(hfw) - num = 1 - finished = False - do while (.not.finished) - avgwidth = avgwidth + 1 - if ((buffer(hfw - avgwidth).gt.-999.0) .and. \ - (buffer(hfw + avgwidth).gt.-999.0)) then - totalwgt = \ - totalwgt + filter(hfw - avgwidth) + filter(hfw + avgwidth) - total = \ - total + (filter(hfw - avgwidth) * buffer(hfw - avgwidth)) + \ - (filter(hfw + avgwidth) * buffer(hfw + avgwidth)) - num = num + 2 - else - finished = True - end if - if (avgwidth.eq.hfw) then - finished = True - end if - end do - - if (method.eq."EdgeP") then - if (num.eq.1) then - if (buffer(hfw + 1).lt.-999.0) then - smthdata(t) = (buffer(hfw) + buffer(hfw - 1)) / 2.0 - else - smthdata(t) = (buffer(hfw) + buffer(hfw + 1)) / 2.0 - end if - else - smthdata(t) = total / totalwgt - end if - else - if (i.eq.iterations) then - minval = hfw - else - minval = 0 - end if - if (num.gt.minval) then - smthdata(t) = total / totalwgt - else - smthdata(t) = -999.99 - end if - end if ; if(method) - end if - do j = 1, (nwin - 1) - buffer(j-1) = buffer(j) - end do - idx_ed = idx_ed+1 - if (idx_ed.gt.ntim) then - buffer(nwin-1) = -999.99 - else - buffer(nwin-1) = hold(idx_ed-1) - end if - end do ; loop(t) - end do ; loop(i) - xAnnAnom(:) = (/smthdata(:)/) - - ; Create an informational attribute: - xAnnAnom@info = "smoothed annual anomalies: CCMOzoneModelData.ncl" - xAnnAnom@_FillValue = -999.99 - -end - - -; ############################################################################# -; -; D. Shea -; Take a monthly climatology and make a daily climatology -; Current for gregorian / standard year. -; -; Supported: leftmost dimension must be 12 -; x(12), x(12, N), x(12, N1, N2), x(12, N1, N2, N3) -; x must have named dimensions on entry -; opt - not used set to zero [0] -; -undef("clmMon2clmDayn") -function clmMon2clmDayn(x:numeric, retOrder:integer, opt:integer) -local dNames, dimx, rank, X, midMon, day -begin - if (.not.(retOrder.eq.0 .or. retOrder.eq.1)) then - print("clmMon2clmDay: retOrder must be 0 or 1, retOrder=" + retOrder) - exit - end if - - dNames = getvardims(x) - if (any(ismissing(dNames))) then - print("clmMon2clmDay: named dimensions required:" + dNames) - exit - end if - - dimx = dimsizes(x) - if (dimx(0).ne.12) then - print("clmMon2clmDay: leftmost dimension must be size=12: SIZE=" + dimx(0)) - exit - end if - - rank = dimsizes(dimx) - if (rank.gt.4) then - print("clmMon2clmDay: currently max of 4 dimension supported: rank=" + \ - rank) - exit - end if - - ; transfer to work array, if necessary, reorder array - if (rank.eq.1) then - X = x - end if - if (rank.eq.2) then - X = x($dNames(1)$|:, $dNames(0)$|:) - end if - if (rank.eq.3) - X = x($dNames(1)$|:, $dNames(2)$|:, $dNames(0)$|:) - end if - if (rank.eq.4) - X = x($dNames(1)$|:, $dNames(2)$|:, $dNames(3)$|:, $dNames(0)$|:) - end if - - ; mid day of each month - if (isatt(opt, "midmon")) then - if (dimsizes(opt@midMon).eq.12) then - midMon = opt@midMon - else - print("clmMon2clmDay: midMon required to be size 12: size=" + \ - dimsizes(opt@midMon)) - exit - end if - else - midMon = (/15.5, 45, 74.5, 105, 135.5, 166, 196.5, \ - 227.5, 258, 288.5, 319, 349.5/) - end if - midMon@long_name = "middle of month" - - day = ispan(0, 364, 1) ; use 0 => 364 for interpolation - day!0 = "day" - - Z = linint1_Wrap(midMon, X, True, day, 0) - Z@info = "NCL: clmMon2clmDay" - - day = ispan(1, 365, 1) ; use 1 => 365 for coord variable - day@long_name = "day of year: no leap" - day@units = "1=Jan 1, 32=Feb 1, ..., 365-Dec 31" - Z!(rank-1) = "day" - Z&day = day - - if (retOrder.eq.1) then - return(Z) - end if - - if (retOrder.eq.0) then - if (rank.eq.1) then - return(Z) - end if - if (rank.eq.2) then - return(Z(day|:, $dNames(1)$|:)) - end if - if (rank.eq.3) then - return(Z(day|:, $dNames(1)$|:, $dNames(2)$|:)) - end if - if (rank.eq.4) then - return(Z(day|:, $dNames(1)$|:, $dNames(2)$|:, $dNames(3)$|:)) - end if - end if - -end diff --git a/esmvaltool/diag_scripts/shared/plot/mjo_level1.ncl b/esmvaltool/diag_scripts/shared/plot/mjo_level1.ncl deleted file mode 100644 index 4224f8aaff..0000000000 --- a/esmvaltool/diag_scripts/shared/plot/mjo_level1.ncl +++ /dev/null @@ -1,323 +0,0 @@ -; ############################################################################# -; GENERAL ROUTINES FOR STATISTICS -; ############################################################################# -; Please consider using or extending existing routines before adding new -; ones. -; Check the header of each routine for documentation. -; -; Contents: -; function pr_u850_mean_plot -; function mjo_xcor_lag_plot -; function mjo_pr_ua_vari_plot -; function mjo_unvari_eof_plot -; -; ############################################################################# - -; Temporary file updated by Python on the fly - - -load "$diag_scripts/../interface_scripts/auxiliary.ncl" -load "$diag_scripts/../interface_scripts/data_handling.ncl" -load "$diag_scripts/../interface_scripts/logging.ncl" - -load "$diag_scripts/shared/set_operators.ncl" -load "$diag_scripts/shared/statistics.ncl" - -load "$diag_scripts/aux/SAMonsoon/SAMonsoon.ncl" - -undef("pr_u850_mean_plot") -function pr_u850_mean_plot(wks[1]:graphic,\ - mean_pr[*][*] : numeric,\ - mean_ua[*][*] : numeric,\ - di[1] : logical,\ - opt[1] : logical) -; Description: -; Place the 2D-model fields stored in the 'storage_vault' on a set of paneled -; figures. First entry is always the reference data set. The various logcials -; carry switches for the plot routine behaviour. -local res1, res2, plot -begin - ; common resources for global contour plots - res1 = True - res1@gsnDraw = False ; don't draw - res1@gsnFrame = False ; don't advance frame - res1@gsnStringFontHeightF = 0.0125 ; make larger than default - - res1@lbLabelBarOn = False - res1@lbLabelFontHeightF = 0.01 ; make labels larger - res1@pmLabelBarOrthogonalPosF = -0.025 ; move closer to plot - res1@lbLabelAutoStride = True ; optimal label stride - res1@gsnSpreadColors = True ; use full range of colors - res1@mpCenterLonF = 180. ; center plot at 180 - - res1@cnLevelSelectionMode = "ExplicitLevels" ; set explicit contour levels - res1@cnLevels = ispan(2, 13, 1) - - res1@gsnAddCyclic = False - - res1@gsnStringFontHeightF = 0.02 - res1@tmYLLabelFontHeightF = 0.02 - res1@tmXBLabelFontHeightF = 0.02 - - res1@mpMinLonF = di@lonrange_basic(0) ; select a subregion - res1@mpMaxLonF = di@lonrange_basic(1) - res1@mpMinLatF = di@latrange_basic(0) - res1@mpMaxLatF = di@latrange_basic(1) - - res1@mpLandFillColor = "background" ; color of land - res1@cnFillDrawOrder = "Predraw" - - if (isatt(opt, "gsnLeftString")) then - res1@gsnLeftString = opt@gsnLeftString - end if - - if (isatt(opt, "gsnCenterString")) then - res1@gsnCenterString = opt@gsnCenterString - end if - - if (isatt(opt, "gsnRightString")) then - res1@gsnRightString = opt@gsnRightString - end if - - ; ************************************************** - ; resource list for second (contour only) data array - ; ************************************************** - res2 = True - res2@gsnContourZeroLineThicknessF = 2. ; doubles thickness of zero contour - - ; Sets negative contours to dash pattern 1 - res2@gsnContourNegLineDashPattern = 2 - res2@cnLineColor = "Black" ; color of second contours - res2@cnLineThicknessF = 1. - res2@cnLevelSpacingF = 3. ; SST contour spacing - res2@cnInfoLabelOn = False ; do not want - res2@cnLineLabelsOn = True - res2@cnLabelMasking = True - res2@tiMainString = "" - - res2@gsnAddCyclic = False - plot = gsn_csm_contour_map_overlay(wks, mean_pr, mean_ua, res1, res2) - - return(plot) - -end - -; ############################################################################# - -undef("mjo_xcor_lag_plot") -function mjo_xcor_lag_plot(wks[1]:graphic, - ccr_a[*][*]:numeric, - ccr_b[*][*]:numeric, - opt[1]:logical) -local res1, res2, CCR1, CCR2, plot -begin - res1 = True ; color precip - res1@gsnDraw = False - res1@gsnFrame = False - res1@gsnMaximize = True - res1@gsnPaperOrientation = "portrait" - - res1@cnFillOn = True ; turn on color - res1@cnLinesOn = False - res1@cnLevelSelectionMode = "ManualLevels" ; set manual contour levels - res1@cnMinLevelValF = -1.0 ; set min contour level - res1@cnMaxLevelValF = 1.0 ; set max contour level - res1@cnLevelSpacingF = 0.1 ; set contour spacing - - res1@cnLabelBarEndLabelsOn = True - res1@cnLabelBarEndStyle = "ExcludeOuterBoxes" - res1@cnInfoLabelOn = False - - res1@lbLabelBarOn = False ; turn off individual cb's - res1@vpWidthF = 0.6 ; change aspect ratio of plot - res1@vpHeightF = 0.4 - - res1@tiYAxisString = "lag (days)" - if (isatt(opt, "tiMainString")) then - res1@tiMainString = opt@tiMainString - end if - - if (isatt(opt, "gsnLeftString")) then - res1@gsnLeftString = opt@gsnLeftString - end if - - if (isatt(opt, "gsnCenterString")) then - res1@gsnCenterString = opt@gsnCenterString - end if - - if (isatt(opt, "gsnRightString")) then - res1@gsnRightString = opt@gsnRightString - end if - - ; *********************************** - ; resource list for second data array - ; *********************************** - res2 = True ; U - res2@gsnDraw = False - res2@gsnFrame = False - res2@cnLevelSelectionMode = "ManualLevels" ; set manual contour levels - res2@cnMinLevelValF = -1.0 - res2@cnMaxLevelValF = 1.0 - res2@cnLevelSpacingF = 0.1 - res2@cnLineLabelsOn = True - res2@gsnContourZeroLineThicknessF = 0. ; Eliminate 0 line - res2@gsnContourNegLineDashPattern = 1 ; negative contours dash pattern - res2@cnInfoLabelOn = False - - CCR1 = ccr_a ; possible smooth and delete of attribute - CCR2 = ccr_b - - if (opt .and. isatt(opt, "smth9") .and. abs(opt@smth9).eq.0.25) then - CCR1 = smth9(CCR1, 0.50, opt@smth9, False) - CCR2 = smth9(CCR2, 0.50, opt@smth9, False) - end if - delete(CCR1@long_name) - plot = gsn_csm_contour(wks, CCR1, res1) ; contour the variable - - delete(CCR2@long_name) - plt2 = gsn_csm_contour(wks, CCR2, res2) ; contour the variable - - overlay(plot, plt2) - - delete(CCR1) ; size may change - delete(CCR2) - - return(plot) -end - -; ############################################################################# - -undef("mjo_pr_ua_vari_plot") -function mjo_pr_ua_vari_plot(wks[1]:graphic, - ccr_a[*][*]:numeric, - ccr_b[*][*]:numeric, - opt[1]:logical, - di[1]:logical) -local res1, res2, plot -begin - - ; ****************************************** - ; resource list for first (color) data array - ; ****************************************** - res1 = True - res1@gsnDraw = False ; don't draw - res1@gsnFrame = False ; don't advance frame - - res1@lbLabelBarOn = False - - res1@lbLabelFontHeightF = 0.01 ; make labels larger - res1@pmLabelBarOrthogonalPosF = -0.025 ; move closer to plot - res1@lbLabelAutoStride = True ; optimal label stride - res1@gsnSpreadColors = True ; use full range of colors - res1@mpCenterLonF = 180. ; center plot at 180 - - res1@gsnStringFontHeightF = 0.02 - res1@tmYLLabelFontHeightF = 0.02 - res1@tmXBLabelFontHeightF = 0.02 - - res1@mpMinLonF = di@lonrange_seasonal(0) ; select a subregion - res1@mpMaxLonF = di@lonrange_seasonal(1) - res1@mpMinLatF = di@latrange_seasonal(0) - res1@mpMaxLatF = di@latrange_seasonal(1) - - res1@cnLevelSelectionMode = "ExplicitLevels" ; set explicit contour levels - res1@cnLevels = di@cnLevels_pr_vari - - res1@gsnAddCyclic = False - - res1@mpLandFillColor = "background" ; color of land - res1@cnFillDrawOrder = "Predraw" - - if (isatt(opt, "gsnLeftString")) then - res1@gsnLeftString = opt@gsnLeftString - end if - - if (isatt(opt, "gsnCenterString")) then - res1@gsnCenterString = opt@gsnCenterString - end if - - if (isatt(opt, "gsnRightString")) then - res1@gsnRightString = opt@gsnRightString - end if - - ; ************************************************** - ; resource list for second (contour only) data array - ; ************************************************** - res2 = True - res2@gsnContourZeroLineThicknessF = 2. ; doubles thickness of zero contour - - ; Sets negative contours to dash pattern 1 - res2@gsnContourNegLineDashPattern = 2 - res2@cnLineColor = "Black" ; color of second contours - res2@cnLineThicknessF = 1. - - res2@gsnAddCyclic = False - - res2@cnLevelSpacingF = 3. ; SST contour spacing - res2@cnInfoLabelOn = False ; do not want - res2@cnLineLabelsOn = True - res2@cnLabelMasking = True - res2@tiMainString = "" - - plot = gsn_csm_contour_map_overlay(wks, ccr_a, ccr_b, res1, res2) - return(plot) - -end - -; ############################################################################# - -undef("mjo_unvari_eof_plot") -function mjo_unvari_eof_plot(wks[1]:graphic, - eof[*][*]:numeric, - opt[1]:logical, - di[1]:logical) -local res, lot -begin - - ; EOF patterns - res = True - res@gsnDraw = False ; don't draw yet - res@gsnFrame = False ; don't advance frame yet - res@gsnSpreadColors = True ; spread out color table - res@gsnStringFontHeightF = 0.015 ; make larger than default - - res@mpFillOn = False ; turn off map fill - res@mpMinLatF = di@latrange_eof(0) ; zoom in on map - res@mpMaxLatF = di@latrange_eof(1) - res@mpMinLonF = di@lonrange_eof(0) ; select a subregion - res@mpMaxLonF = di@lonrange_eof(1) - - res@mpCenterLonF = 180. - - res@gsnAddCyclic = False - - res@cnFillOn = True ; turn on color fill - res@cnLinesOn = False ; True is default - res@cnLineLabelsOn = False ; True is default - res@lbLabelBarOn = False ; turn off individual lb's - - res@lbLabelFontHeightF = 0.01 ; make labels larger - - res@cnLevelSelectionMode = "ExplicitLevels" ; set explicit contour levels - res@cnLevels = di@cnLevels - - res@gsnStringFontHeightF = 0.018 - res@tmYLLabelFontHeightF = 0.018 - res@tmXBLabelFontHeightF = 0.018 - - if (isatt(opt, "gsnLeftString")) then - res@gsnLeftString = opt@gsnLeftString - end if - - if (isatt(opt, "gsnCenterString")) then - res@gsnCenterString = opt@gsnCenterString - end if - - if (isatt(opt, "gsnRightString")) then - res@gsnRightString = opt@gsnRightString - end if - - plot = gsn_csm_contour_map_ce(wks, eof(:, :), res) - return(plot) - -end diff --git a/esmvaltool/diag_scripts/shared/plot/mjo_level2.ncl b/esmvaltool/diag_scripts/shared/plot/mjo_level2.ncl deleted file mode 100644 index 5cc163f5c1..0000000000 --- a/esmvaltool/diag_scripts/shared/plot/mjo_level2.ncl +++ /dev/null @@ -1,731 +0,0 @@ - -load "$diag_scripts/../interface_scripts/auxiliary.ncl" -load "$diag_scripts/../interface_scripts/data_handling.ncl" -load "$diag_scripts/../interface_scripts/logging.ncl" - -load "$diag_scripts/shared/set_operators.ncl" -load "$diag_scripts/shared/statistics.ncl" - -load "$diag_scripts/aux/SAMonsoon/SAMonsoon.ncl" - -; ############################################################################# -undef("mjo_wave_freq_plot") -function mjo_wave_freq_plot(wks[1]:graphic, - wf[*][*]:numeric, - dummy[*][*]:graphic, - idx_mod[1]:numeric, - opt[1]:logical, - di[1]:logical) -; Description: -; Place the 2D-model fields stored in the 'storage_vault' on a set of paneled -; figures. First entry is always the reference data set. The various logcials -; carry switches for the plot routine behaviour. -local res, NW, fMin, fMax, day1, fline1, day2, fline2, gsres, tres -begin - - ; Set plot setting - res = True ; plot mods desired - res@gsnFrame = False - res@gsnDraw = False - res@cnFillOn = True ; turn on color - res@gsnSpreadColors = True ; use full range of colormap - res@lbLabelAutoStride = True - res@cnLinesOn = False ; no contour lines - res@cnLineLabelsOn = False - res@cnLevelFlags = False - res@cnInfoLabelOn = False - res@cnLevelSelectionMode = "ExplicitLevels" - res@cnLevels = (/0.01, 0.015, 0.02, 0.025, 0.03, 0.035, 0.04, 0.05/) - res@lbLabelBarOn = False - res@gsnLeftString = "" - res@gsnRightString = "" - res@tiXAxisString = "" - - gsres = True - gsres@gsLineDashPattern = 11 - - tres = True - tres@txFontHeightF = 0.0175 - - NW = 6 - fMin = -0.05 - fMax = 0.05 - day1 = di@min_days - fline1 = 1. / day1 - day2 = di@max_days - fline2 = 1. / day2 - - if(isatt(opt, "gsnLeftString")) then - res@gsnLeftString = opt@gsnLeftString - end if - if(isatt(opt, "gsnCenterString")) then - res@gsnCenterString = opt@gsnCenterString - end if - if(isatt(opt, "gsnRightString")) then - res@gsnRightString = opt@gsnRightString - end if - - plot = gsn_csm_contour(wks, wf({0:NW}, {fMin:fMax}), res) - - dummy(idx_mod, 0) = gsn_add_polyline(wks,\ - plot,\ - (/fline1, fline1/),\ - (/ 0., NW/),\ - gsres) - - dummy(idx_mod, 1) = gsn_add_text(wks,\ - plot,\ - (day1 + "d"),\ - fline1 + 0.005,\ - 0.93 * NW,\ - tres) - - dummy(idx_mod, 2) = gsn_add_polyline(wks,\ - plot,\ - (/fline2, fline2/),\ - (/ 0., NW/),\ - gsres) - - dummy(idx_mod, 3) = gsn_add_text(wks,\ - plot,\ - (day2 + "d"),\ - fline2 + 0.005,\ - 0.93 * NW,\ - tres) - return(plot) - -end - -; ############################################################################# - -undef("addHorVertLinesCross_extended") -function addHorVertLinesCross_extended(wks[1]:graphic, - plot[1]:graphic, - nw[1], - dumy[*][*]:graphic, - idx_mod[1]:numeric) -; freq [y] axis: Add horizontal lines that explicitly -; print time in days. This assumes the units -; of the freq axis are "cpd" [cycles per day] -local gsres, txres, xx, dely, m, nwl, nwr -begin - - gsres = True - gsres@gsLineDashPattern = 1 - - nwl = -nw + 3.5 ; left - nwr = nw ; right - dumy(idx_mod, 0) = \ - gsn_add_polyline(wks, plot, (/0, 0/), (/0.0, 0.5 /), gsres) - dumy(idx_mod, 1) = \ - gsn_add_polyline(wks, plot, (/nwl, nwr/), (/1. / 80, 1. / 80/), gsres) - dumy(idx_mod, 2) = \ - gsn_add_polyline(wks, plot, (/nwl, nwr/), (/1. / 20, 1. / 20/), gsres) - dumy(idx_mod, 3) = \ - gsn_add_polyline(wks, plot, (/nwl, nwr/), (/1. / 10, 1. / 10/), gsres) - dumy(idx_mod, 4) = \ - gsn_add_polyline(wks, plot, (/nwl, nwr/), (/1. / 5, 1. / 5/), gsres) - dumy(idx_mod, 5) = \ - gsn_add_polyline(wks, plot, (/nwl, nwr/), (/1. / 3, 1. / 3/), gsres) - - txres = True - txres@txJust = "CenterLeft" - txres@txFontHeightF = 0.013 - - xx = -nw + 0.3 - dely = 0.000 ; yy - dumy(idx_mod, 6) = \ - gsn_add_text(wks, plot, "3 days", xx, (1. / 3 + dely), txres) - dumy(idx_mod, 7) = \ - gsn_add_text(wks, plot, "5 days", xx, (1. / 5 + dely), txres) - dumy(idx_mod, 8) = \ - gsn_add_text(wks, plot, "10 days", xx, (1. / 10 + dely), txres) - dumy(idx_mod, 9) = \ - gsn_add_text(wks, plot, "20 days", xx, (1. / 20 + dely), txres) - dumy(idx_mod, 10) = \ - gsn_add_text(wks, plot, "80 days", xx, (1. / 80 + dely), txres) - - return(plot) - -end - -; ############################################################################# - -undef("mjo_cross_spectra_plot") -function mjo_cross_spectra_plot(wks[1]:graphic, - STC[*][*][*]:numeric, - spectraType[1]:string, - dumdcs[*][*]:graphic, - dums[*][*]:graphic, - idx_mod[1]:numeric, - opt[1]:logical) -local res -begin - - res = True ; plot mods desired - res@gsnDraw = False - res@gsnFrame = False - - res@cnFillOn = True ; turn on color - res@cnFillMode = "RasterFill" ; match WMO Clivar - - res@gsnSpreadColors = True ; use full range of colormap - if (opt .and. isatt(opt, "gsnSpreadColorStart")) then - res@gsnSpreadColorStart = opt@gsnSpreadColorStart - end if - - if (opt .and. isatt(opt, "gsnSpreadColorEnd")) then - res@gsnSpreadColorEnd = opt@gsnSpreadColorEnd - end if - - res@cnLinesOn = False - res@cnLineLabelsOn = False - res@cnLevelSelectionMode = "ManualLevels" - res@cnMinLevelValF = 0.05 - res@cnMaxLevelValF = 0.65 ; correlation^2 = 0.8 - res@cnLevelSpacingF = 0.05 - res@cnInfoLabelOn = False - res@lbLabelBarOn = False ; no individual label bars - - if(.not. opt .or. .not. isatt(opt, "pltPhase") .or. opt@pltPhase) then - plotPhase = True - res@vcRefMagnitudeF = 1.0 ; define vector ref mag - res@vcRefLengthF = 0.01 ; define length of vec ref - res@vcRefAnnoOrthogonalPosF = -1.0 ; move ref vector - res@vcRefAnnoArrowLineColor = "black" ; change ref vector color - res@vcMinDistanceF = 0.0075 ; thin out vectors - res@vcMapDirection = False - res@vcRefAnnoOn = False ; do not draw - res@gsnScalarContour = True ; contours desired - else - plotPhase = False - end if - - ; res@gsnLeftString = "Coh^2: Symmetric" - if(isatt(opt, "gsnLeftString")) then - res@gsnLeftString = opt@gsnLeftString - end if - - res@gsnRightString = "10% = " \ - + sprintf("%3.2f", STC@prob_coh2(2))\ - + " 5% = "\ - + sprintf("%3.2f", STC@prob_coh2(4)) - - if (opt .and. isatt(opt, "pltZonalWaveNumber")) then - nWavePlt = opt@pltZonalWaveNumber - else - nWavePlt = 15 ; default - end if - - ; --------------------------------------------------------------- - ; dispersion: curves - ; --------------------------------------------------------------- - rlat = 0.0 - Ahe = (/50., 25., 12./) - nWaveType = 6 - nPlanetaryWave = 50 - nEquivDepth = dimsizes(Ahe) - Apzwn = new((/nWaveType, nEquivDepth, nPlanetaryWave/), "double", 1e20) - Afreq = Apzwn - genDispersionCurves(nWaveType, \ - nEquivDepth, \ - nPlanetaryWave, \ - rlat, \ - Ahe, \ - Afreq, \ - Apzwn) - - ; -------------------------------------------------------------- - ; dispersion curve and text plot resources - ; -------------------------------------------------------------- - dcres = True - dcres@gsLineThicknessF = 2.0 - dcres@gsLineDashPattern = 0 - - txres = True - txres@txPerimOn = True - txres@txFontHeightF = 0.013 - txres@txBackgroundFillColor = "Background" - - ; --------------------------------------------------------------- - ; plot symmetric data - ; --------------------------------------------------------------- - if (spectraType .eq. "Symmetric") then - n = 8 - c2s = STC(n, :, {-nWavePlt:nWavePlt}) - c2s@_FillValue = 1e20 - c2s(0, :) = c2s@_FillValue - c2s = where(c2s .lt. 0.05, c2s@_FillValue, c2s) ; mask - - n = 12 - phs1 = STC(n, :, {-nWavePlt:nWavePlt}) - phs1@long_name = "symmetric phase-1" - phs1@_FillValue = c2s@_FillValue - phs1(0, :) = phs1@_FillValue - phs1 = where(c2s .lt. 0.05, phs1@_FillValue, phs1) ; mask - - n = 14 - phs2 = STC(n, :, {-nWavePlt:nWavePlt}) - phs2@long_name = "symmetric phase-2" - phs2@_FillValue = c2s@_FillValue - phs2(0, :) = phs2@_FillValue - phs2 = where(c2s .lt. 0.05, phs2@_FillValue, phs2) ; mask - - if(opt .and. isatt(opt, "pltProb")) then - np = ind(c2s@prob_coh2 .eq. opt@pltProb) - if(.not. ismissing(np)) then - c2s = where(c2s .lt. STC@prob_coh2(np), c2s@_FillValue, c2s) - phs1 = where(ismissing(c2s), phs1@_FillValue, phs1) - phs2 = where(ismissing(c2s), phs2@_FillValue, phs2) - end if - end if - - if(opt .and. isatt(opt, "coh2Cutoff")) then - c2s = where(c2s .lt. opt@coh2Cutoff, c2s@_FillValue, c2s) - end if - - if(opt .and. isatt(opt, "phaseCutoff")) then - phs1 = where(c2s .lt. opt@phaseCutoff, phs1@_FillValue, phs1) - phs2 = where(c2s .lt. opt@phaseCutoff, phs2@_FillValue, phs2) - end if - - if(opt .and. isatt(opt, "elimIsoVals") .and. .not.opt@elimIsoVals) then - print("mjo_cross_plot: no values eliminated") - else - mjo_elimIsolatedValues(c2s, phs1, phs2, 1) - mjo_elimIsolatedValues(c2s, phs1, phs2, 2) - end if - - if (plotPhase) then - scl_one = sqrt(1. / (phs1 ^ 2 + phs2 ^ 2)) - phs1 = scl_one*phs1 - phs2 = scl_one*phs2 - plot = gsn_csm_vector_scalar(wks, phs1, phs2, c2s, res) - else - plot = gsn_csm_contour(wks, c2s, res) - end if - - plot = addHorVertLinesCross_extended(wks, plot, nWavePlt, dums, idx_mod) - - dumdcs(idx_mod, 0) = \ - gsn_add_polyline(wks, plot, Apzwn(3, 0, :), Afreq(3, 0, :), dcres) - dumdcs(idx_mod, 1) = \ - gsn_add_polyline(wks, plot, Apzwn(3, 1, :), Afreq(3, 1, :), dcres) - dumdcs(idx_mod, 2) = \ - gsn_add_polyline(wks, plot, Apzwn(3, 2, :), Afreq(3, 2, :), dcres) - dumdcs(idx_mod, 3) = \ - gsn_add_polyline(wks, plot, Apzwn(4, 0, :), Afreq(4, 0, :), dcres) - dumdcs(idx_mod, 4) = \ - gsn_add_polyline(wks, plot, Apzwn(4, 1, :), Afreq(4, 1, :), dcres) - dumdcs(idx_mod, 5) = \ - gsn_add_polyline(wks, plot, Apzwn(4, 2, :), Afreq(4, 2, :), dcres) - dumdcs(idx_mod, 6) = \ - gsn_add_polyline(wks, plot, Apzwn(5, 0, :), Afreq(5, 0, :), dcres) - dumdcs(idx_mod, 7) = \ - gsn_add_polyline(wks, plot, Apzwn(5, 1, :), Afreq(5, 1, :), dcres) - dumdcs(idx_mod, 8) = \ - gsn_add_polyline(wks, plot, Apzwn(5, 2, :), Afreq(5, 2, :), dcres) - - dumdcs(idx_mod, 9) = \ - gsn_add_text(wks, plot, "Kelvin", 11.5, .40, txres) - dumdcs(idx_mod, 10) = \ - gsn_add_text(wks, plot, "n = 1 ER", -10.7, .07, txres) - dumdcs(idx_mod, 11) = \ - gsn_add_text(wks, plot, "n = 1 IG", -3.0, .45, txres) - dumdcs(idx_mod, 12) = \ - gsn_add_text(wks, plot, "h = 50", -14.0, .78, txres) - dumdcs(idx_mod, 13) = \ - gsn_add_text(wks, plot, "h = 25", -14.0, .60, txres) - dumdcs(idx_mod, 14) = \ - gsn_add_text(wks, plot, "h = 12", -14.0, .46, txres) - - ; --------------------------------------------------------------- - ; plot asymmetric data - ; --------------------------------------------------------------- - else if(spectraType .eq. "Asymmetric") then - n = 9 - if(isatt(opt, "gsnLeftString")) then - res@gsnLeftString = opt@gsnLeftString - end if - - res@gsnRightString = "10% = " \ - + sprintf("%3.2f", STC@prob_coh2(2)) \ - + " " \ - + " 5% = " \ - + sprintf("%3.2f", STC@prob_coh2(4)) - c2a = STC(n, :, {-nWavePlt:nWavePlt}) - c2a@_FillValue = 1e20 - c2a(0, :) = c2a@_FillValue - c2a = where(c2a .lt. 0.05, c2a@_FillValue, c2a) ; mask - - n = 13 - pha1 = STC(n, :, {-nWavePlt:nWavePlt}) - pha1@long_name = "asymmetric phase-1" - pha1@_FillValue = c2a@_FillValue - pha1(0, :) = pha1@_FillValue - - n = 15 - pha2 = STC(n, :, {-nWavePlt:nWavePlt}) - pha2@long_name = "asymmetric phase-2" - pha2@_FillValue = c2a@_FillValue - pha2(0, :) = pha2@_FillValue - pha2 = where(c2a .lt. 0.05, pha2@_FillValue, pha2) ; mask - - if (opt .and. isatt(opt, "pltProb")) then - np = ind(c2a@prob_coh2 .eq. opt@pltProb) - if (.not. ismissing(np)) then - c2a = where(c2a .lt. STC@prob_coh2(np), c2a@_FillValue, c2s) - pha1 = where(ismissing(c2a), pha1@_FillValue, pha1) - pha2 = where(ismissing(c2a), pha2@_FillValue, pha2) - end if - end if - - if (opt .and. isatt(opt, "coh2Cutoff")) then - c2a = where(c2a .lt. opt@coh2Cutoff, c2s@_FillValue, c2s) - end if - if (opt .and. isatt(opt, "phaseCutoff")) then - pha1 = where(c2a .lt. opt@phaseCutoff, pha1@_FillValue, pha1) - pha2 = where(c2a .lt. opt@phaseCutoff, pha2@_FillValue, pha2) - end if - - if (opt .and. isatt(opt, "elimIsoVals") .and. .not.opt@elimIsoVals) then - mjo_elimIsolatedValues(c2a, pha1, pha2, 2) - mjo_elimIsolatedValues(c2a, pha1, pha2, 1) - end if - - if (plotPhase) then - scl_one = sqrt(1. / (pha1 ^ 2 + pha2 ^ 2)) - pha1 = scl_one * pha1 - pha2 = scl_one * pha2 - plot = gsn_csm_vector_scalar(wks, pha1, pha2, c2a, res) - else - plot = gsn_csm_contour(wks, c2a, res) - end if - - plot = addHorVertLinesCross_extended(wks, plot, nWavePlt, dums, idx_mod) - - dumdcs(idx_mod, 0) = \ - gsn_add_polyline(wks, plot, Apzwn(0, 0, :), Afreq(0, 0, :), dcres) - dumdcs(idx_mod, 1) = \ - gsn_add_polyline(wks, plot, Apzwn(0, 1, :), Afreq(0, 1, :), dcres) - dumdcs(idx_mod, 2) = \ - gsn_add_polyline(wks, plot, Apzwn(0, 2, :), Afreq(0, 2, :), dcres) - dumdcs(idx_mod, 3) = \ - gsn_add_polyline(wks, plot, Apzwn(1, 0, :), Afreq(1, 0, :), dcres) - dumdcs(idx_mod, 4) = \ - gsn_add_polyline(wks, plot, Apzwn(1, 1, :), Afreq(1, 1, :), dcres) - dumdcs(idx_mod, 5) = \ - gsn_add_polyline(wks, plot, Apzwn(1, 2, :), Afreq(1, 2, :), dcres) - dumdcs(idx_mod, 6) = \ - gsn_add_polyline(wks, plot, Apzwn(2, 0, :), Afreq(2, 0, :), dcres) - dumdcs(idx_mod, 7) = \ - gsn_add_polyline(wks, plot, Apzwn(2, 1, :), Afreq(2, 1, :), dcres) - dumdcs(idx_mod, 8) = \ - gsn_add_polyline(wks, plot, Apzwn(2, 2, :), Afreq(2, 2, :), dcres) - - dumdcs(idx_mod, 10) = gsn_add_text(wks, plot, "MRG", -10.0, .15, txres) - dumdcs(idx_mod, 11) = gsn_add_text(wks, plot, "n = 2 IG", -3.0, .58, txres) - dumdcs(idx_mod, 12) = gsn_add_text(wks, plot, "n = 0 EIG", 6.5, .40, txres) - dumdcs(idx_mod, 13) = gsn_add_text(wks, plot, "h = 50", -10.0, .78, txres) - dumdcs(idx_mod, 14) = gsn_add_text(wks, plot, "h = 25", -10.0, .63, txres) - dumdcs(idx_mod, 15) = gsn_add_text(wks, plot, "h = 12", -10.0, .51, txres) - - end if - end if - - resP = True - resP@gsnMaximize = True - resP@gsnPanelLabelBar = True - resP@lbLabelStride = 2 ; every other one - resP@cnLabelBarEndLabelsOn = True - resP@cnLabelBarEndStyle = "ExcludeOuterBoxes" - - resP@cnLevelSelectionMode = res@cnLevelSelectionMode - resP@cnMinLevelValF = res@cnMinLevelValF - resP@cnMaxLevelValF = res@cnMaxLevelValF ; correlation^2 = 0.8 - resP@cnLevelSpacingF = res@cnLevelSpacingF - - return(plot) - -end - -; ############################################################################# - -undef("mjo_ceof_plot") -function mjo_ceof_plot(wks[1]:graphic, - ceof[*][*]:numeric, - pcv_eof_olr[1]:numeric, - pcv_eof_u850[1]:numeric, - pcv_eof_u200[1]:numeric, - opt[1]:logical) -local res, rts, var0, lon, plot -begin - - ; ************************************************ - ; Multivariate EOF plots - ; ************************************************ - rts = True - rts@gsnDraw = False ; don't draw yet - rts@gsnFrame = False ; don't advance frame yet - rts@gsnScale = True ; force text scaling - - rts@vpHeightF = 0.40 ; Changes the aspect ratio - rts@vpWidthF = 0.85 - rts@vpXF = 0.10 ; change start locations - rts@vpYF = 0.75 ; the plot - rts@xyLineThicknesses = (/2, 2, 2/) - rts@xyLineColors = (/"black", "red", "green"/) - rts@xyDashPatterns = (/0.0, 0.0, 0.0/) ; line patterns - - rts@trYMaxF = diag_script_info@trYMaxF_ceof - rts@trYMinF = diag_script_info@trYMinF_ceof - - rts@gsnYRefLine = 0. ; reference line - - rts@pmLegendDisplayMode = "Always" ; turn on legend - rts@pmLegendSide = "Top" ; Change location of - rts@pmLegendParallelPosF = 0.86 ; move units right - rts@pmLegendOrthogonalPosF = -0.50 ; move units down - rts@pmLegendWidthF = 0.15 ; Change width and - rts@pmLegendHeightF = 0.15 ; height of legend. - rts@lgLabelFontHeightF = 0.0175 - - var0 = variables(0) - - if (var0 .eq. "pr-mmday") then - rts@xyExplicitLegendLabels = \ - (/"Pre: " + sprintf("%4.1f", pcv_eof_olr) + "%",\ - "U850: " + sprintf("%4.1f", pcv_eof_u850) + "%",\ - "U200: " + sprintf("%4.1f", pcv_eof_u200) + "%" /) - end if - - if (var0 .eq. "rlut") then - rts@xyExplicitLegendLabels = \ - (/"OLR: " + sprintf("%4.1f", pcv_eof_olr) + "%",\ - "U850: " + sprintf("%4.1f", pcv_eof_u850) + "%",\ - "U200: " + sprintf("%4.1f", pcv_eof_u200) + "%"/) - end if - - if (isatt(opt, "gsnLeftString")) then - rts@gsnLeftString = opt@gsnLeftString - end if - - if (isatt(opt, "gsnCenterString")) then - rts@gsnCenterString = opt@gsnCenterString - end if - - if (isatt(opt, "gsnRightString")) then - rts@gsnRightString = opt@gsnRightString - end if - - lon = ceof&lon - plot = gsn_csm_xy(wks, lon, ceof(:, :), rts) - - return(plot) - -end - -; ############################################################################# - -undef("mjo_life_cycle_plot") -procedure mjo_life_cycle_plot(x[*][*][*]:numeric,\ - u[*][*][*]:numeric,\ - v[*][*][*]:numeric,\ - idx_mod[1]:numeric,\ - di[1]:logical) -local amres, ang, angBnd, aux_title_info, diag_script_base, f, field_type0, \ - iday, imon, mjo_indx, nDays, netcdf_dir, nn, nPhase, nt, output_dir, \ - output_filename, input_netcdf, pc1, pc2, phaBnd, phase, pLabel, plot, \ - pltSubTitle, r2d, res, resP, time, txid, txres, uAvg, var0, vAvg, wks, \ - xAvg, ymdhms -begin - - ; *********************************************************** - ; Open PC components file created in 'mjo_14.ncl' - ; *********************************************************** - var0 = variables(0) - field_type0 = field_types(0) - - netcdf_dir = plot_dir + "netcdf/" - aux_title_info = "MJO_PC_INDEX" - input_netcdf = \ - interface_get_figure_filename("mjo_life_cycle_plot", var0, field_type0, \ - aux_title_info, idx_mod) - f = addfile(netcdf_dir + input_netcdf + ".nc", "r") - - ; *********************************************************** - ; Find the indices corresponding to the start/end times - ; on the MJO_PC_INDEX.nc file - ; *********************************************************** - ; TIME = f->time ; days since ... - ; YMD = cd_calendar(TIME, -2) ; entire(time, 6) - ; - ; iStrt = ind(YMD .eq. twStrt) ; index start - ; iLast = ind(YMD .eq. twLast) ; index last - ; delete(TIME) - ; delete(YMD) - ; *********************************************************** - ; Read the data for the desired period - ; *********************************************************** - pc1 = f->PC1(:) - pc2 = f->PC2(:) - mjo_indx = f->MJO_INDEX(:) - - time = pc1&time - ymdhms = cd_calendar(time, 0) - - imon = floattoint(ymdhms(:, 1)) ; convenience - iday = floattoint(ymdhms(:, 2)) ; subscripts must be integer - - ; *********************************************************** - ; Place each array into an appropriate array - ; *********************************************************** - phase = (/202.5, 247.5, 292.5, 337.5, \ ; phi: Matt Wheeler's f77 code - 22.5, 67.5, 112.5, 157.5/) - nPhase = dimsizes(phase) - phaBnd = 180.0 / nPhase - angBnd = new((/2, nPhase/), "float") - angBnd(0, :) = phase - phaBnd - angBnd(1, :) = phase + phaBnd - - r2d = 180. / (4. * atan(1.0)) - ang = atan2(pc2, pc1) * r2d ; phase space - nn = ind(ang .lt. 0) - ang(nn) = ang(nn) + 360 ; make 0 to 360 - - nDays = new(nPhase, "integer") - pLabel = "P" + ispan(1, nPhase, 1) + ": " - - res = True - res@gsnDraw = False ; don't draw yet - res@gsnFrame = False ; don't advance frame yet - res@gsnSpreadColors = True ; spread out color table - - res@mpFillOn = False ; turn off map fill - res@mpMinLatF = di@latrange_life(0) ; zoom in on map - res@mpMaxLatF = di@latrange_life(1) - res@mpCenterLonF = 210. - res@cnFillOn = True ; turn on color fill - res@cnLinesOn = False ; True is default - res@cnLineLabelsOn = False ; True is default - res@lbLabelBarOn = False ; turn off individual lb's - res@gsnScalarContour = True ; contour 3rd array - res@gsnMajorLatSpacing = 15 - res@gsnMajorLonSpacing = 60 - res@tmXBLabelFontHeightF = 0.01 - res@tmYLLabelFontHeightF = 0.01 - - res@cnLevelSelectionMode = "ManualLevels" - - if (var0 .eq. "rlut") then - res@cnMinLevelValF = -40 ; -98 mnmxint(0) - res@cnMaxLevelValF = 40 ; 80 mnmxint(1) - res@cnLevelSpacingF = 5 ; 20 mnmxint(2) - end if - - if (var0 .eq. "pr-mmday") then - res@cnMinLevelValF = -8 ; -100 mnmxint(0) - res@cnMaxLevelValF = 8 ; 80 mnmxint(1) - res@cnLevelSpacingF = 1 ; 20 mnmxint(2) - end if - - res@vcMinDistanceF = 0.01 ; thin the vector density - res@vcRefMagnitudeF = 2.0 ; define vector ref mag - res@vcRefLengthF = 0.025 ; define length of vec ref - res@vcRefAnnoOrthogonalPosF = -1.0 ; move ref vector - res@vcRefAnnoArrowLineColor = "black" ; change ref vector color - res@vcRefAnnoArrowUseVecColor = False ; don't use vec color for ref - - ; panel plot only resources - resP = True ; modify the panel plot - resP@gsnMaximize = True ; large format - resP@gsnPanelLabelBar = True ; add common colorbar - resP@lbLabelAutoStride = True ; auto stride on labels - resP@lbLabelFontHeightF = 0.01 - resP@gsnPanelBottom = 0.05 ; add some space at bottom - resP@pmLabelBarWidthF = 0.8 ; label bar width - resP@pmLabelBarHeightF = 0.05 - resP@gsnPanelFigureStringsFontHeightF = 0.0125 ; bit larger than default - - amres = True - amres@amOrthogonalPosF = 0.75 - amres@amJust = "CenterCenter" - - ; ******************************************* - ; Loop over each phase - ; ******************************************* - res@gsnLeftString = "" - res@gsnRightString = "" - - ; Output dir - ; 'plot_dir' is fetched from ncl.interface - diag_script_base = basename(diag_script) - output_dir = config_user_info@plot_dir - - pltSubTitle = "Anomalous: OLR, U850, V850" - - do nSeason = 1, 2 - aux_title_info = input_file_info@dataset(idx_mod) + "_" \ - + di@season_life_cycle(nSeason - 1) - output_filename = \ - interface_get_figure_filename(diag_script_base, var0, field_type0,\ - aux_title_info, -1) - wks = gsn_open_wks(output_file_type, output_dir + output_filename) - gsn_define_colormap(wks, "ViBlGrWhYeOrRe") - plot = new(nPhase, graphic) ; create graphic array - - txres = True - txres@txFontHeightF = 0.01 - txid = gsn_create_text(wks, pltSubTitle, txres) - - if (nSeason .eq. 1) then - resP@txString = input_file_info@dataset(idx_mod) + ": "\ - + input_file_info@start_year(idx_mod) + "-"\ - + input_file_info@end_year(idx_mod)\ - + ": May to Oct" - else - resP@txString = input_file_info@dataset(idx_mod) + ": "\ - + input_file_info@start_year(idx_mod) + "-"\ - + input_file_info@end_year(idx_mod)\ - + ": Nov to Apr" - end if - do n = 0, nPhase - 1 - ; All times where the following criteria are met - if (nSeason .eq. 1) then - nt = ind(mjo_indx .gt. 1.0 \ - .and.(imon .ge. 5 .and. imon .le. 10)\ - .and. ang .ge. angBnd(0, n)\ - .and. ang .lt. angBnd(1, n)) - else - nt = ind(mjo_indx .gt. 1.0\ - .and.(imon .ge. 11 .or. imon .le. 4)\ - .and. ang .ge. angBnd(0, n) \ - .and. ang .lt. angBnd(1, n)) - end if - if (.not. all(ismissing(nt))) then - xAvg = dim_avg_n_Wrap(x(nt, :, :), 0) - uAvg = dim_avg_n_Wrap(u(nt, :, :), 0) - vAvg = dim_avg_n_Wrap(v(nt, :, :), 0) - - nDays(n) = dimsizes(nt) - - res@tmXBLabelsOn = False ; do not draw lon labels - res@tmXBOn = False ; lon tickmarks - if (n .eq. (nPhase - 1)) then - res@tmXBLabelsOn = True ; draw lon labels - res@tmXBOn = True ; tickmarks - end if - - plot(n) = gsn_csm_vector_scalar_map_ce(wks, uAvg, vAvg, xAvg, res) - end if - delete(nt) ; will change next iteration - end do - - resP@gsnPanelFigureStrings = pLabel + nDays - gsn_panel(wks, plot, (/nPhase, 1/), resP) ; now draw as one plot - - delete(wks) - delete(plot) - end do - -end diff --git a/esmvaltool/diag_scripts/shared/plot/monsoon_domain_panels.ncl b/esmvaltool/diag_scripts/shared/plot/monsoon_domain_panels.ncl deleted file mode 100644 index 501e78c953..0000000000 --- a/esmvaltool/diag_scripts/shared/plot/monsoon_domain_panels.ncl +++ /dev/null @@ -1,454 +0,0 @@ -; ############################################################################# -; GENERAL ROUTINES FOR MONSOON PANELLING -; ############################################################################# -; Please consider using or extending existing routines before adding new ones. -; Check the header of each routine for documentation. -; -; Contents: -; plot_precip_domain -; precip_domain -; -; ############################################################################# - -load "$diag_scripts/../interface_scripts/auxiliary.ncl" -load "$diag_scripts/../interface_scripts/data_handling.ncl" -load "$diag_scripts/../interface_scripts/logging.ncl" - -load "$diag_scripts/aux/SAMonsoon/SAMonsoon.ncl" -exact_panel_position_file = \ - "./diag_scripts/aux/SAMonsoon/exact_panel_positions_precip_world.ncl" -load "$diag_scripts/aux/SAMonsoon/SAMonsoon_panel_positions.ncl" -load "$diag_scripts/aux/SAMonsoon/exact_panel_positions_generic.ncl" -load "$diag_scripts/shared/plot/style.ncl" - -begin - loadscript(exact_panel_position_file) -end - -; ############################################################################# -undef("plot_precip_domain") -procedure plot_precip_domain(cols[*]:integer, - rows[*]:float, - curr_idx[1]:integer, - curr_page[1]:integer, - res[1]:logical, - storage_name[1]:string, - storage_vault[1]:logical, - wks[1]:graphic, - di[1]:logical, - plot_settings[1]:logical, - valid_statistics[*]:string, - debuginfo[1]:logical, - figures_per_page[*]:integer, - model_panel_placement[*]:integer, - figure_panel_placement[*]:integer, - plot_array[*]:graphic, - type_specifier[1]:string, - no_figures_on_this_page[1]:integer) -; -; Arguments -; cols: number of columns for this panel plot -; rows: number of rows for this panel plot -; curr_idx: current index -; curr_page: current page (may be more than one) -; res: valid ncl resources to apply to plot -; storage_name: name of first processed variable in the vault -; storage_vault: handle/pointer to vault with all models -; wks: workstation -; di: logical with diagnostic script info -; plot_settings: logical with general plot_settings -; valid_statistics: valid statistics (to be computed if defined) -; debuginfo: info to put onto plot in debug mode -; figures_per_page: array with number of figures on each page -; model_panel_placement: where to place respective model -; figure_panel_placement: where to place respective figure on the page -; plot_array: plot handles/pointers -; type_specifier: kind of plot, 'mean' or 'stddev' -; no_figures_on_this_page: no of figures on this page -; -; Return value -; -; Description -; Multipanel plot, plots all models on the current page. Top left entry -; is always the reference model. Used for the global domain plots only. -; -; Caveats -; -; References -; -; Modification history -; 20150702-A_eval_ma: written. -; -local am_infobox_id, blank_plot, cn_levels_string, curr, curr_idx, debugbox, \ - diff_model_ref, dummy_array, idx_fig, idx_mod, lb_prefix, lbres, \ - main_title_prefix, main_title_suffix, plot, plot_ref, plottype_lbres, ref, \ - statistics, storage_record, txres -begin - - funcname = "plot_precip_domain" - scriptname = "plot_script/ncl/monsoon_domain_panels.ncl" - enter_msg(scriptname, funcname) - - if (isatt(res, "cnLevels")) then - delete(res@cnLevels) - end if - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name + "_mean", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - if (isatt(plot_settings, "default_palette")) then - if (isatt(res, "cnFillPalette")) then - delete(res@cnFillPalette) - end if - res@cnFillPalette = plot_settings@default_palette - end if - - main_title_prefix = "" - main_title_suffix = "" - - storage_record = \ - str_join((/storage_name, \ - sprinti("%i", plot_settings@idx_ref)/), str_vault_sep) - ref = retrieve_from_vault(storage_vault, storage_record) - - ; Reference plot - plot_settings@idx = plot_settings@idx_ref - - ; Dummy array required for some plot routines - dummy_array = (/(/1e20, 1e20/), (/1e20, 1e20/)/) - dummy_array@_FillValue = 1e20 - - statistics = True - statistics = compute_stat((/"yrs"/), valid_statistics, ref, dummy_array) - - res = panel_n_by_cols(res, figure_panel_placement(curr_idx), rows, cols, \ - figures_per_page(curr_page)) - plot_ref = single_contour(wks, \ - ref, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - txres = True - txres@txFuncCode = "~" - if (debuginfo) then - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot_ref) - end if - - idx_fig = figure_panel_placement(curr_idx) - plot_array(idx_fig) = plot_ref - - ; Skip past the reference plot - curr_idx = curr_idx + 1 - - lbres = True - - if (plot_settings@type .eq. "diff") then - inset_top_text(wks, plot_ref, "REF", txres) - if (storage_name .eq. "intensity") then - inset_labelbar(wks, plot_ref, res, "REF", lbres) - end if - end if - - delete(res@cnLevels) - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name + type_specifier, "-", "_") - res@cnLevels = di@$cn_levels_string$ - - if (isatt(plot_settings, "diff_palette")) then - if (isatt(res, "cnFillPalette")) then - delete(res@cnFillPalette) - end if - res@cnFillPalette = plot_settings@diff_palette - end if - - ; ----------------------------- - ; Create the non-reference plots - ; ----------------------------- - do curr_fig = 1, figures_per_page(curr_page) - 1 - - main_title_prefix = "" - idx_mod = model_panel_placement(curr_idx) - idx_fig = figure_panel_placement(curr_idx) - - ; Update placement and labelbar colors - res = panel_n_by_cols(res, figure_panel_placement(curr_idx), rows, cols, \ - figures_per_page(curr_page)) - - storage_record = \ - str_join((/storage_name, sprinti("%i", idx_mod)/), str_vault_sep) - curr = retrieve_from_vault(storage_vault, storage_record) - if (plot_settings@type .eq. "diff") then - ; Plot mean of differences of first and second data set, first - ; interpolate to the reference data set grid (bilinear interpolation) - diff_model_ref = get_dataset_minus_ref(curr, ref) - delete(curr) - curr = diff_model_ref - delete(diff_model_ref) - main_title_suffix = " - REF" - else - main_title_suffix = "" - end if - - statistics = True - statistics = compute_stat((/"yrs"/), valid_statistics, curr, dummy_array) - - plot_settings@idx = idx_mod - plot = single_contour(wks, \ - curr, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - if (debuginfo) then - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot) - end if - - plot_array(idx_fig) = plot - delete(curr) - - ; Update index to point to next field - curr_idx = curr_idx + 1 - end do ; curr_fig=1, figures_per_page(curr_page) - 1 - - plottype_lbres = True - - ; Copy over any attributes with this prefix - lb_prefix = "lbres_" - plottype_lbres = filter_attrs(plot_settings, plottype_lbres, lb_prefix) - - ; Create an blank plot for shared labelbar placement - blank_plot = \ - add_blank_plot_title(wks, "Monsoon " + plot_settings@part_of_header, \ - rows, cols) - - ; Create shared labelbar - n_by_cols_labelbar(wks, \ - blank_plot, \ - plot_array(no_figures_on_this_page - 1), \ - rows, \ - cols, \ - plot_settings@lb_units,\ - plottype_lbres) - - if (debuginfo) then - drawNDCGrid(wks) - end if - draw(plot_array) - draw(blank_plot) - if (debuginfo) then - place_description(wks, debuginfo@description, debuginfo@description_ycoord) - end if - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("precip_domain") -procedure precip_domain(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name[1]:string, - debuginfo[1]:logical, - valid_statistics[*]:string, - res[1]:logical) -; -; Arguments -; storage_vault: handle/pointer to vault with all models -; di: logical with diagnostic script info -; plot_settings: logical with general plot_settings -; storage_name: name of first processed variable in the vault -; debuginfo: info to put onto plot in debug mode -; valid_statistics: valid statistics (to be computed if defined) -; res: valid ncl resources to apply to plot -; -; Return value -; -; Description -; Determines how to place a number of contour plots in a grid across -; multiple pages. Loop over pages and call plot_multipanel(...) for -; each page to plot entries. Used for the domain plots only. -; -; Caveats -; -; References -; -; Modification history -; 20150702-A_eval_ma: written. -; -local aux_filename_info, blank_plot, cn, cols, curr, curr_fig, \ - curr_figure_pos, curr_idx, curr_page, diag_script_base, dim_MOD, dim_VAR, \ - field_type0, figure_panel_placement, figures_per_page, fill_colors_string, \ - idx_fig, idx_mod, idx_ref, lbres, main_title_prefix, max_cols, \ - model_panel_placement, no_figures_on_this_page, output_dir, \ - output_filename, output_file_path, page_no, plot, plot_array, plot_ref, \ - plottype_lbres, ref, res, rows, storage_record, total_no_of_pages, txres, \ - type_specifier, var0, wks -begin - - funcname = "precip_domain" - scriptname = "plot_script/ncl/monsoon_domain_panels.ncl" - enter_msg(scriptname, funcname) - - dim_MOD = dimsizes(input_file_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - - ; 'output_file_type' if fetched from ncl.interface - if (.not. isdefined("output_file_type")) then - output_file_type = "ps" - end if - - ; Output dir - ; 'plot_dir' is fetched from ncl.interface - diag_script_base = basename(diag_script) - output_dir = config_user_info@plot_dir - res@tiMainFontHeightF = 0.011 - - ; ------------------------------ - ; Static resources for this plot - ; ------------------------------ - res@mpFillOn = False - res@cnFillOn = True - res@cnLinesOn = False - res@cnLevelSelectionMode = "ExplicitLevels" - res@cnMissingValFillColor = "Background" - res@cnLineLabelsOn = False - res@gsnFrame = False - res@gsnDraw = False - res@lbLabelBarOn = False - res@gsnAddCyclic = True - - ; -------------------------------------- - ; Compute the layout of paneled figures - ; -------------------------------------- - figures_per_page = get_figures_per_page(dim_MOD, \ - max_figures_pp, \ - min_figures_pp) - - ; Which model goes where across all pages - model_panel_placement = new((/sum(figures_per_page)/), integer) - - ; Which model goes where on each page? - figure_panel_placement = new((/sum(figures_per_page)/), integer) - place_models_on_pages(input_file_info, \ - plot_settings@idx_ref, \ - figures_per_page, \ - model_panel_placement, \ - figure_panel_placement) - - type_specifier = "_" + plot_settings@type - - ; --------------------------- - ; Loop over all output pages - ; --------------------------- - curr_idx = 0 - curr_idx_debug = 0 - total_no_of_pages = dimsizes(figures_per_page) - - do curr_page = 0, total_no_of_pages - 1 - ; -------------------------- - ; Plot arrays for gsn_panels - ; -------------------------- - plot_array = new((/max_figures_pp/), graphic) - - no_figures_on_this_page = figures_per_page(curr_page) - - ; Create a string to add to the figure output\ - ; filename for mulitple pages - if (total_no_of_pages .gt. 1) then - page_no = "-page" + sprinti("%i", curr_page) - else - page_no = "" - end if - - ; ---------------------------------------- - ; Define output workstation for mean plots - ; ---------------------------------------- - idx_mod = -1 ; No specific model defined - aux_filename_info = di@summer_season + "-" + storage_name + \ - type_specifier + page_no - output_filename = interface_get_figure_filename(diag_script_base, \ - var0, \ - field_type0, \ - aux_filename_info, \ - idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - - max_cols = 2 - cols = multipanel_get_no_cols(no_figures_on_this_page, max_cols) - rows = multipanel_get_no_rows(no_figures_on_this_page, max_cols) - - ; Update position, labelbar and title - curr_figure_pos = figure_panel_placement(curr_idx) - res = panel_n_by_cols(res, curr_figure_pos, rows, cols, \ - figures_per_page(curr_page)) - - plot_precip_domain(cols, \ - rows, \ - curr_idx, \ - curr_page, \ - res, \ - storage_name, \ - storage_vault, \ - wks, \ - di, \ - plot_settings, \ - valid_statistics, \ - False, \ ; No debuginfo - figures_per_page, \ - model_panel_placement, \ - figure_panel_placement, \ - plot_array, \ - type_specifier, \ - no_figures_on_this_page) - frame(wks) - - if (debuginfo) then - aux_filename_info = di@summer_season + "-" + storage_name + \ - type_specifier + page_no + "-debug" - output_filename = interface_get_figure_filename(diag_script_base, \ - var0, \ - field_type0, \ - aux_filename_info, \ - idx_mod) - output_file_path = output_dir + output_filename - wks_debug = gsn_open_wks(output_file_type, output_file_path) - - plot_precip_domain(cols, \ - rows, \ - curr_idx_debug, \ - curr_page, \ - res, \ - storage_name, \ - storage_vault, \ - wks_debug, \ - di, \ - plot_settings, \ - valid_statistics, \ - debuginfo, \ - figures_per_page, \ - model_panel_placement, \ - figure_panel_placement, \ - plot_array, \ - type_specifier, \ - no_figures_on_this_page) - frame(wks_debug) - end if - - end do ; curr_page = 0, total_no_of_pages - 1 - - leave_msg(scriptname, funcname) - -end diff --git a/esmvaltool/diag_scripts/shared/plot/monsoon_panels.ncl b/esmvaltool/diag_scripts/shared/plot/monsoon_panels.ncl deleted file mode 100644 index bf65740463..0000000000 --- a/esmvaltool/diag_scripts/shared/plot/monsoon_panels.ncl +++ /dev/null @@ -1,3001 +0,0 @@ -; ############################################################################# -; GENERAL ROUTINES FOR MONSOON PANELLING -; ############################################################################# -; Please consider using or extending existing routines before adding new ones. -; Check the header of each routine for documentation. -; -; Contents: -; function get_title_suffix -; procedure remove_attrs -; procedure plot_two_by_one -; procedure plot_three_by_one_diff -; procedure two_by_one -; procedure three_by_one_diff -; procedure plot_three_by_one_vector -; procedure three_by_one_vector -; procedure plot_multipanel -; procedure multipanel -; procedure plot_multipanel_vector -; procedure multipanel_vector -; procedure seasonal_plot -; procedure xy_plot_wrapper -; procedure ts_line_wrapper -; procedure xy_line_overlap -; -; ############################################################################# - -load "$diag_scripts/../interface_scripts/auxiliary.ncl" -load "$diag_scripts/../interface_scripts/data_handling.ncl" -load "$diag_scripts/../interface_scripts/logging.ncl" - -begin - loadscript(exact_panel_position_file) -end - -load "$diag_scripts/aux/SAMonsoon/SAMonsoon.ncl" -load "$diag_scripts/aux/SAMonsoon/SAMonsoon_panel_positions.ncl" -load "$diag_scripts/aux/SAMonsoon/exact_panel_positions_generic.ncl" - -load "$diag_scripts/shared/plot/style.ncl" -load "$diag_scripts/shared/plot/xy_line.ncl" - -; ############################################################################# -undef("get_title_suffix") -function get_title_suffix(plot_settings[1]:logical) -; -; Arguments -; plot_settings: Logical with plot settings as attributes -; -; Return value -; main_title_suffix: Main title suffix -; -; Description -; Checks and returns the main title suffix attribute from -; plot_settings, empty string if missing -; -; Caveats -; -; References -; -; Modification history -; 20150702-A_eval_ma: written. -; -local main_title_suffix -begin - - funcname = "get_title_suffix" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - if (isatt(plot_settings, "main_title_suffix")) then - main_title_suffix = plot_settings@main_title_suffix - else - main_title_suffix = "" - end if - - leave_msg(scriptname, funcname) - return(main_title_suffix) - -end - -; ############################################################################# -undef("remove_attrs") -procedure remove_attrs(p_settings[1]:logical, - attributes[*]:string) -; -; Arguments -; p_settings: Logical with plot settings as attributes -; attributes: array with attribute names -; -; Return value -; -; Description -; Removes supplied attributes if they are defined -; -; Caveats -; -; References -; -; Modification history -; 20150702-A_eval_ma: written. -; -local idx, settings -begin - - funcname = "remove_attrs" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - do idx = 0, dimsizes(attributes) - 1 - if (isatt(p_settings, attributes(idx))) then - delete(p_settings@$attributes(idx)$) - end if - end do - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("plot_two_by_one") -procedure plot_two_by_one(wks[1]:graphic, - res[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - valid_statistics[*]:string, - storage_name1[1]:string, - storage_name2[1]:string, - debuginfo[1]:logical, - storage_vault[1]:logical, - idx_mod[1]:integer) -; -; Arguments -; wks: workstation -; res: resources for plot -; di: logical with diagnostic script info -; plot_settings: logical with general plot_settings -; valid_statistics: valid statistics (to be computed if defined) -; storage_name1: name of first processed variable in the vault -; storage_name2: name of second processed variable in the vault -; debuginfo: info to put onto plot in debug mode -; storage_vault: handle/pointer to vault with all models -; idx_mod: index of current model -; -; Return value -; -; Description -; Plots two contour plots on top of each other -; -; Caveats -; -; References -; -; Modification history -; 20150702-A_eval_ma: written. -; -local am_infobox_id, cn_levels_string, debugbox, dummy_array, lbtitle, lower, \ - main_title_prefix, main_title_suffix, plot1, plot2, statistics, \ - storage_record, txres, upper -begin - - funcname = "plot_two_by_one" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - res = panel_three_by_one(res, 0) ; Update resource for first plot - res@cnLevelSelectionMode = "ExplicitLevels" - if (isatt(res, "cnLevels")) then - delete(res@cnLevels) - end if - - dummy_array = (/1e+20/) - dummy_array@_FillValue = 1e+20 - - remove_attrs(res, (/"cnLevels"/)) - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name1 + "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - remove_attrs(plot_settings, (/"idx"/)) - plot_settings@idx = plot_settings@idx_ref - - ; ----------------------------- - ; Plot model entry (first plot) - ; ----------------------------- - main_title_prefix = "" - main_title_suffix = plot_settings@part_of_header - res = panel_two_by_one(res, 0) ; Update resource for second plot - - storage_record = \ - str_join((/storage_name1, sprinti("%i", idx_mod)/), str_vault_sep) - upper = retrieve_from_vault(storage_vault, storage_record) - plot_settings@type = "mean" - - remove_attrs(plot_settings, (/"idx", "gsn_right_string"/)) - plot_settings@idx = idx_mod - plot_settings@gsn_right_string = upper@gsn_right_string - - statistics = True - statistics = \ - compute_stat((/"yrs", "refA", "lonB", "mean", "bob", "eio", "sahel", \ - "sa34", "en34"/), valid_statistics, upper, dummy_array) - - plot_settings@skip_default_title = False - plot1 = single_contour(wks, upper, main_title_prefix, main_title_suffix, \ - plot_settings, debuginfo, statistics, res) - delete(statistics) - - ; --------------------------------------- - ; Plot model entry (second plot) - ; --------------------------------------- - main_title_prefix = "" - main_title_suffix = "" - res = panel_two_by_one(res, 1) ; Update resource for second plot - delete(res@cnLevels) - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name2 + "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - storage_record = \ - str_join((/storage_name2, sprinti("%i", idx_mod)/), str_vault_sep) - lower = retrieve_from_vault(storage_vault, storage_record) - plot_settings@type = "mean" - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = idx_mod - - statistics = True - statistics = compute_stat((/"yrs", "refA", "mean", "bob", "eio", "sahel", \ - "sa34", "en34"/), valid_statistics, lower, \ - dummy_array) - plot_settings@gsn_right_string = lower@gsn_right_string - plot_settings@skip_default_title = True - plot2 = single_contour(wks, lower, main_title_prefix, main_title_suffix, \ - plot_settings, debuginfo, statistics, res) - delete(statistics) - - txres = True - if (any(idx_mod .eq. plot_settings@idx_ref)) then - inset_top_text(wks, plot1, "Reference", txres) - inset_top_text(wks, plot2, "Reference", txres) - end if - two_by_one_labelbar(wks, plot2, plot_settings@lb_title) - - if (debuginfo) then - txres@txFuncCode = "~" - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot1) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot2) - drawNDCGrid(wks) - end if - draw(plot1) - draw(plot2) - if (debuginfo) then - place_description(wks, debuginfo@description, debuginfo@description_ycoord) - end if - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("plot_three_by_one_diff") -procedure plot_three_by_one_diff(wks[1]:graphic, - res[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - valid_statistics[*]:string, - storage_name[1]:string, - debuginfo[1]:logical, - storage_vault[1]:logical, - ref[*][*]:numeric, - idx_mod[1]:integer) -; -; Arguments -; wks: workstation -; res: resources for plot -; di: logical with diagnostic script info -; plot_settings: logical with general plot_settings -; valid_statistics: valid statistics (to be computed if defined) -; storage_name: name of first processed variable in the vault -; debuginfo: info to put onto plot in debug mode -; storage_vault: handle/pointer to vault with all models/variables -; idx_mod: index of current model -; -; Return value -; -; Description -; Plots three contour plots, top: reference, middle model, -; bottom their difference -; -; Caveats -; -; References -; -; Modification history -; 20150702-A_eval_ma: written. -; -local am_infobox_id, cn_levels_string, curr, debugbox, diff_model_ref, \ - dummy_array, header_prefix, main_title, main_title_suffix, plot, plot_diff, \ - plot_ref, res, statistics, storage_record, title_name, txres -begin - - funcname = "plot_three_by_one_diff" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - res = panel_three_by_one(res, 0) ; Update resource for first plot - res@cnLevelSelectionMode = "ExplicitLevels" - if (isatt(res, "cnLevels")) then - delete(res@cnLevels) - end if - - dummy_array = (/1e+20/) - dummy_array@_FillValue = 1e+20 - - ; ------------------------------------- - ; Plot reference plot (top plot) - ; ------------------------------------- - if (isatt(res, "cnLevels")) then - delete(res@cnLevels) - end if - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name + "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - ; Title string might be a substring of the variable name - ; used for storing the data in the vault - title_name = keep_only_n_cols(plot_settings, storage_name) - header_prefix = \ - empty_string_if_missing_attribute(plot_settings, "prefix_to_header") - - main_title_prefix = \ - "(1) " + header_prefix + di@season + "-" + title_name + " of " - main_title_suffix = get_title_suffix(plot_settings) - plot_settings@type = "ref" - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = plot_settings@idx_ref - - ; Possibly switch from default colormap for non-diff plot - if (isatt(plot_settings, "default_palette")) then - if (isatt(res, "cnFillPalette")) then - delete(res@cnFillPalette) - end if - res@cnFillPalette = plot_settings@default_palette - end if - - statistics = True - statistics = compute_stat((/"yrs", "mean", "bob", "eio", "sahel", "sa34", \ - "en34"/), valid_statistics, ref, dummy_array) - plot_ref = single_contour(wks, ref, main_title_prefix, main_title_suffix, \ - plot_settings, debuginfo, statistics, res) - delete(statistics) - txres = True - inset_top_text(wks, plot_ref, "Reference", txres) - - three_by_one_labelbar(wks, plot_ref, plot_settings) - - ; ------------------------------------- - ; Plot model entry (middle plot) - ; ------------------------------------- - header_prefix = \ - empty_string_if_missing_attribute(plot_settings, "prefix_to_header") - - title_name = keep_only_n_cols(plot_settings, storage_name) - main_title_prefix = \ - "(2) " + header_prefix + di@season + "-" + title_name + " of " - res = panel_three_by_one(res, 1) ; Update resource for second plot - - storage_record = \ - str_join((/storage_name, sprinti("%i", idx_mod)/), str_vault_sep) - curr = retrieve_from_vault(storage_vault, storage_record) - plot_settings@type = "mean" - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = idx_mod - - statistics = True - statistics = compute_stat((/"yrs", "mean", "bob", "eio", "sahel", "sa34", \ - "en34"/), valid_statistics, curr, dummy_array) - statistics = compute_stat((/"corr"/), valid_statistics, curr, ref) - plot = single_contour(wks, curr, main_title_prefix, main_title_suffix, \ - plot_settings, debuginfo, statistics, res) - delete(statistics) - - ; --------------------------------------- - ; Plot reference model diff (bottom plot) - ; --------------------------------------- - main_title = "(2) - (1)" - main_title_suffix = "" - res = panel_three_by_one(res, 2) ; Update resource for third plot - delete(res@cnLevels) - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name + "_diff_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - ; Plot mean of differences of first and second data set, first - ; interpolate to the reference data set grid (conservative interpolation) - diff_model_ref = get_dataset_minus_ref(curr, ref) - - plot_settings@type = "diff" - plot_settings@skip_default_title = True - - ; Possibly switch from default colormap for diff plot - if (isatt(plot_settings, "diff_palette")) then - if (isatt(res, "cnFillPalette")) then - delete(res@cnFillPalette) - end if - res@cnFillPalette = plot_settings@diff_palette - end if - - statistics = True - statistics = \ - compute_stat((/"mean"/), valid_statistics, diff_model_ref, dummy_array) - statistics = compute_stat((/"rmse"/), valid_statistics, curr, ref) - plot_diff = single_contour(wks, diff_model_ref, main_title, \ - main_title_suffix, plot_settings, debuginfo, \ - statistics, res) - delete(statistics) - delete(diff_model_ref) - plot_settings@skip_default_title = False - - if (debuginfo) then - txres@txFuncCode = "~" - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot_ref) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot_diff) - drawNDCGrid(wks) - end if - draw(plot_ref) - draw(plot_diff) - draw(plot) - if (debuginfo) then - place_description(wks, debuginfo@description, debuginfo@description_ycoord) - end if - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("two_by_one") -procedure two_by_one(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name1[1]:string, - storage_name2[1]:string, - debuginfo[1]:logical, - valid_statistics[*]:string, - res[1]:logical) -; -; Arguments -; storage_vault: handle/pointer to vault with all models -; di: logical with diagnostic script info -; plot_settings: logical with general plot_settings -; res: resources for plot -; storage_name1: name of first processed variable in the vault -; storage_name2: name of second processed variable in the vault -; debuginfo: info to put onto plot in debug mode -; valid_statistics: valid statistics (to be computed if defined) -; res: valid ncl resources to apply to plot -; -; Return value -; -; Description -; Loops models in storage_vault and calls plot_two_by_one(..) -; for each model -; -; Caveats -; -; References -; -; Modification history -; 20150702-A_eval_ma: written. -; -local aux_title_info, cn_levels_string, curr, diag_description, \ - diag_script_base, dim_MOD, dim_VAR, idx_mod, dummy_array, field_type0, \ - lbtitle, main_title, main_title_prefix, output_dir, output_filename, \ - output_file_path, plot, plot_diff, plot_ref, ref, res, storage_record, \ - textres, var0, wks, diff_model_ref, txres -begin - - funcname = "two_by_one" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - dim_MOD = dimsizes(input_file_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - ; Handle the case when using two variables - if (dimsizes(variables) .gt. 1) then - var1 = variables(1) - field_type1 = field_types(1) - else - var1 = "" - field_type1 = "" - end if - - ; 'output_file_type' if fetched from ncl.interface - if (.not. isdefined("output_file_type")) then - output_file_type = "ps" - end if - - ; Output dir - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(plot_settings@diag_script) - output_dir = config_user_info@plot_dir - - ; -------------------------------- - ; Static resources for these plots - ; -------------------------------- - res@cnFillOn = True - res@cnLinesOn = False - res@cnMissingValFillColor = "Background" - res@cnLineLabelsOn = False - res@gsnFrame = False - res@gsnDraw = False - res@gsnAddCyclic = False - - do idx_mod = 0, dim_MOD - 1 - - ; ---------------------------------------- - ; Define output workstation - ; ---------------------------------------- - if (isatt(di, "filter_name")) then - aux_title_info = di@season + "-" + di@filter_name + "-" + \ - storage_name1 + "-" + storage_name2 - else - aux_title_info = di@season + "-" + storage_name1 + "-" + storage_name2 - end if - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - plot_two_by_one(wks, \ - res, \ - di, \ - plot_settings, \ - valid_statistics, \ - storage_name1, \ - storage_name2, \ - False, \ - storage_vault, \ - idx_mod) - frame(wks) ; Workstation output - delete(wks) - - ; Debug version - if (debuginfo) then - if (isatt(di, "filter_name")) then - aux_title_info = di@season + "-" + di@filter_name + "-" + \ - storage_name1 + "-" + storage_name2 + "-debug" - else - aux_title_info = di@season + "-" + storage_name1 + "-" + \ - storage_name2 + "-debug" - end if - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks_debug = gsn_open_wks(output_file_type, output_file_path) - plot_two_by_one(wks_debug, \ - res, \ - di, \ - plot_settings, \ - valid_statistics, \ - storage_name1, \ - storage_name2, \ - debuginfo, \ - storage_vault, \ - idx_mod) - frame(wks_debug) ; Workstation output - delete(wks_debug) - end if - - end do ; idx_mod = 0, dim_MOD - 1 - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("three_by_one_diff") -procedure three_by_one_diff(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name[1]:string, - debuginfo[1]:logical, - valid_statistics[*]:string, - res[1]:logical) -; -; Arguments -; storage_vault: handle/pointer to vault with all models -; di: logical with diagnostic script info -; plot_settings: logical with general plot_settings -; res: resources for plot -; storage_name: name of first processed variable in the vault -; debuginfo: info to put onto plot in debug mode -; valid_statistics: valid statistics (to be computed if defined) -; res: valid ncl resources to apply to plot -; -; Return value -; -; Description -; Loops models in storage_vault and calls plot_three_by_one_diff(..) -; for each model -; -; Caveats -; -; References -; -; Modification history -; 20150702-A_eval_ma: written. -; -local aux_title_info, cn_levels_string, curr, diag_description, \ - diag_script_base, dim_MOD, dim_VAR, idx_mod, dummy_array, field_type0, \ - lbtitle, main_title, main_title_prefix, output_dir, output_filename, \ - output_file_path, plot, plot_diff, plot_ref, ref, res, storage_record, \ - textres, var0, wks, diff_model_ref, txres -begin - - funcname = "three_by_one_diff" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - dim_MOD = dimsizes(input_file_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - - ; Handle the case when using two variables - if (dimsizes(variables) .gt. 1) then - var1 = variables(1) - field_type1 = field_types(1) - else - var1 = "" - field_type1 = "" - end if - - ; 'output_file_type' if fetched from ncl.interface - if (.not. isdefined("output_file_type")) then - output_file_type = "ps" - end if - - ; Output dir - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(plot_settings@diag_script) - output_dir = config_user_info@plot_dir - - ; -------------------------------- - ; Static resources for these plots - ; -------------------------------- - res@cnFillOn = True - res@cnLinesOn = False - res@cnMissingValFillColor = "Background" - res@cnLineLabelsOn = False - res@gsnFrame = False - res@gsnDraw = False - res@gsnAddCyclic = False - - storage_record = \ - str_join((/storage_name, \ - sprinti("%i", plot_settings@idx_ref(0))/), str_vault_sep) - ref = retrieve_from_vault(storage_vault, storage_record) - - do idx_mod = 0, dim_MOD - 1 - if (any(idx_mod .eq. plot_settings@idx_ref)) then - continue - end if - - ; ---------------------------------------- - ; Define output workstation - ; ---------------------------------------- - if (isatt(di, "filter_name")) then - aux_title_info = di@season + "-" + di@filter_name + "-" + storage_name - else - aux_title_info = di@season + "-" + storage_name - end if - aux_title_info = di@season + "-" + storage_name - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - plot_three_by_one_diff(wks, \ - res, \ - di, \ - plot_settings, \ - valid_statistics, \ - storage_name, \ - False, \ - storage_vault, \ - ref, \ - idx_mod) - frame(wks) ; Workstation output - delete(wks) - - ; Debug version - if (debuginfo) then - if (isatt(di, "filter_name")) then - aux_title_info = di@season + "-" + di@filter_name + "-" + \ - storage_name + "-debug" - else - aux_title_info = di@season + "-" + storage_name + "-debug" - end if - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks_debug = gsn_open_wks(output_file_type, output_file_path) - plot_three_by_one_diff(wks_debug, \ - res, \ - di, \ - plot_settings, \ - valid_statistics, \ - storage_name, \ - debuginfo, \ - storage_vault, \ - ref, \ - idx_mod) - frame(wks_debug) ; Workstation output - delete(wks_debug) - end if - - end do ; idx_mod = 0, dim_MOD - 1 - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("plot_three_by_one_vector") -procedure plot_three_by_one_vector(wks[1]:graphic, - res[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - valid_statistics[*]:string, - storage_names[1]:logical, - debuginfo[1]:logical, - storage_vault[1]:logical, - ua_ref[*][*]:numeric, - va_ref[*][*]:numeric, - speed_ref[*][*]:numeric, - idx_mod[1]:integer) -; -; Arguments -; wks: workstation -; res: resources for plot -; di: logical with diagnostic script info -; plot_settings: logical with general plot_settings -; valid_statistics: valid statistics (to be computed if defined) -; storage_name: name of first processed variable in the vault -; debuginfo: info to put onto plot in debug mode -; storage_vault: handle/pointer to vault with all models/variables -; ua_ref: ua wind reference model/obs -; va_ref: ua wind reference model/obs -; speed_ref: wind speed reference for model/obs -; idx_mod: index of current model -; -; Return value -; -; Description -; Plots three contour plots, top: reference, middle model, -; bottom their difference. On top of the contour the vector -; field is plotted -; -; Caveats -; -; References -; -; Modification history -; 20150702-A_eval_ma: written. -; - -local am_infobox_id, cn_levels_string, debugbox, diff_model_ref, \ - diff_model_ua_ref, diff_model_va_ref, dummy_array, main_title, \ - main_title_prefix, main_title_suffix, max_speed_ref, plot, plot_diff, \ - plot_diff_v, plot_ref, plot_ref_v, plot_v, speed, statistics, \ - storage_record, txres, ua, va -begin - - funcname = "plot_three_by_one_vector" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - res = panel_three_by_one(res, 0) ; Update resource for first plot - res@cnLevelSelectionMode = "ExplicitLevels" - if (isatt(res, "cnLevels")) then - delete(res@cnLevels) - end if - - ; ------------------------------ - ; Plot reference plot (top plot) - ; ------------------------------ - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_names@base_name + "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - main_title_prefix = \ - "(1) " + di@season + "-" + storage_names@base_name + " of " - main_title_suffix = "" - plot_settings@type = "ref" - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = plot_settings@idx_ref - - dummy_array = (/1e+20/) - dummy_array@_FillValue = 1e+20 - - statistics = True - statistics = \ - compute_stat((/"yrs", "mean"/), valid_statistics, speed_ref, dummy_array) - plot_ref = single_contour(wks, \ - speed_ref, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - txres = True - inset_top_text(wks, plot_ref, "Reference", txres) - - if (isatt(di, "max_speed_ref")) then - max_speed_ref = di@max_speed_ref - else - max_speed_ref = max(speed_ref) - end if - if (storage_names@base_name .ne. "stddev") then - plot_ref_v = single_vector(wks, \ - max_speed_ref, \ - speed_ref, \ - ua_ref, \ - va_ref, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - res) - overlay(plot_ref, plot_ref_v) - end if - three_by_one_labelbar(wks, plot_ref, plot_settings) - - ; ------------------------------------- - ; Plot model entry plot (middle plot) - ; ------------------------------------- - main_title_prefix = \ - "(2) " + di@season + "-" + storage_names@base_name + " of " - res = panel_three_by_one(res, 1) ; Update resource for second plot - - storage_record = str_join((/storage_names@storage_x_component, \ - sprinti("%i", idx_mod)/), str_vault_sep) - ua = retrieve_from_vault(storage_vault, storage_record) - - storage_record = str_join((/storage_names@storage_y_component, \ - sprinti("%i", idx_mod)/), str_vault_sep) - va = retrieve_from_vault(storage_vault, storage_record) - - storage_record = str_join((/storage_names@storage_xy_component, \ - sprinti("%i", idx_mod)/), str_vault_sep) - speed = retrieve_from_vault(storage_vault, storage_record) - plot_settings@type = "mean" - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = idx_mod - - ; Interpolate to reference grid for pattern correlation calculation - diff_model_ref = get_dataset_minus_ref(speed, speed_ref) - - statistics = True - statistics = \ - compute_stat((/"yrs", "mean"/), valid_statistics, speed, dummy_array) - statistics = compute_stat((/"corr"/), valid_statistics, speed, speed_ref) - plot = single_contour(wks, \ - speed, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - if (storage_names@base_name .ne. "stddev") then - plot_v = single_vector(wks, \ - max_speed_ref, \ - speed, \ - ua, \ - va, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - res) - overlay(plot, plot_v) - end if - - ; -------------------------------------------- - ; Plot model reference diff plot (bottom plot) - ; -------------------------------------------- - ; Plot mean of differnces of first and second data set, first - ; interpolate to the reference data set grid (bilinear interpolation) - - diff_model_ref = get_dataset_minus_ref(speed, speed_ref) - diff_model_ua_ref = get_dataset_minus_ref(ua, ua_ref) - diff_model_va_ref = get_dataset_minus_ref(va, va_ref) - - main_title = "(2) - (1)" - res = panel_three_by_one(res, 2) ; Update resource for third plot - delete(res@cnLevels) - cn_levels_string = str_sub_str("cn_levels_" + storage_names@base_name + \ - "_diff_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - plot_settings@type = "diff" - - statistics = True - statistics = \ - compute_stat((/"mean"/), valid_statistics, diff_model_ref, dummy_array) - statistics = compute_stat((/"rmse"/), valid_statistics, speed, speed_ref) - plot_diff = single_contour(wks, \ - diff_model_ref, \ - main_title, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - if (isatt(di, "max_speed_ref_diff")) then - max_speed_ref_diff = di@max_speed_ref_diff - else - max_speed_ref_diff = max(diff_model_ref) - end if - if (storage_names@base_name .ne. "stddev") then - plot_diff_v = single_vector(wks, \ - max_speed_ref, \ - diff_model_ref, \ - diff_model_ua_ref, \ - diff_model_va_ref, \ - main_title, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - res) - overlay(plot_diff, plot_diff_v) - end if - - if (debuginfo) then - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot_ref) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot_diff) - drawNDCGrid(wks) - end if - draw(plot_ref) - draw(plot_diff) - draw(plot) - if (debuginfo) then - place_description(wks, debuginfo@description, debuginfo@description_ycoord) - end if - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("three_by_one_vector") -procedure three_by_one_vector(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_names[1]:logical, - debuginfo[1]:logical, - valid_statistics[*]:string, - res[1]:logical) -; -; Arguments -; storage_vault: handle/pointer to vault with all models -; di: logical with diagnostic script info -; plot_settings: logical with general plot_settings -; storage_name: name of first processed variable in the vault -; debuginfo: info to put onto plot in debug mode -; valid_statistics: valid statistics (to be computed if defined) -; res: valid ncl resources to apply to plot -; -; Return value -; -; Description -; Loops models in storage_vault and calls plot_three_by_one_vector(..) -; for each model producing a reference plot at the top (abs + vector) -; ditto for the model in the middle, and a diff at the bottom -; -; Caveats -; -; References -; -; Modification history -; 20150702-A_eval_ma: written. -; -local aux_title_info, cn_levels_string, curr, diag_description, \ - diag_script_base, dim_MOD, dim_VAR, idx_mod, field_type0, lbtitle, \ - main_title, main_title_prefix, output_dir, output_filename, \ - output_file_path, plot, plot_diff, plot_ref, ref, res, storage_record, \ - textres, var0, wks, wks_debug, txres -begin - - funcname = "three_by_one_vector" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - dim_MOD = dimsizes(input_file_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - var1 = variables(1) - field_type0 = field_types(0) - field_type1 = field_types(1) - - ; 'output_file_type' if fetched from ncl.interface - if (.not. isdefined("output_file_type")) then - output_file_type = "ps" - end if - - ; Output dir - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(plot_settings@diag_script) - output_dir = config_user_info@plot_dir - - ; -------------------------------- - ; Static resources for these plots - ; -------------------------------- - res@mpFillOn = False - res@cnFillOn = True - res@cnLinesOn = False - res@cnMissingValFillColor = "Background" - res@cnLineLabelsOn = False - res@gsnFrame = False - res@gsnDraw = False - res@gsnAddCyclic = False - txres = True - txres@txFuncCode = "~" - - ; Fetch reference plot - storage_record = \ - str_join((/storage_names@storage_x_component, \ - sprinti("%i", plot_settings@idx_ref(0))/), str_vault_sep) - ua_ref = retrieve_from_vault(storage_vault, storage_record) - - storage_record = \ - str_join((/storage_names@storage_y_component, \ - sprinti("%i", plot_settings@idx_ref(0))/), str_vault_sep) - va_ref = retrieve_from_vault(storage_vault, storage_record) - - storage_record = \ - str_join((/storage_names@storage_xy_component, \ - sprinti("%i", plot_settings@idx_ref(0))/), str_vault_sep) - speed_ref = retrieve_from_vault(storage_vault, storage_record) - - do idx_mod = 0, dim_MOD - 1 - if (idx_mod .eq. plot_settings@idx_ref) then - continue - end if - - ; ------------------------- - ; Define output workstation - ; ------------------------- - aux_title_info = di@season + "-" + storage_names@base_name - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - plot_three_by_one_vector(wks, \ - res, \ - di, \ - plot_settings, \ - valid_statistics, \ - storage_names, \ - False, \ - storage_vault, \ - ua_ref, \ - va_ref, \ - speed_ref, \ - idx_mod) - frame(wks) - delete(wks) - - ; Debug version - if (debuginfo) then - aux_title_info = di@season + "-" + storage_names@base_name + "-debug" - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks_debug = gsn_open_wks(output_file_type, output_file_path) - plot_three_by_one_vector(wks_debug, \ - res, \ - di, \ - plot_settings, \ - valid_statistics, \ - storage_names, \ - debuginfo, \ - storage_vault, \ - ua_ref, \ - va_ref, \ - speed_ref, \ - idx_mod) - frame(wks_debug) - delete(wks_debug) - end if - end do ; idx_mod = start_idx, dim_MOD - 1 - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("plot_multipanel") -procedure plot_multipanel(cols[*]:integer, - rows[*]:float, - curr_idx[1]:integer, - curr_page[1]:integer, - res[1]:logical, - storage_name[1]:string, - storage_vault[1]:logical, - wks[1]:graphic, - di[1]:logical, - plot_settings[1]:logical, - valid_statistics[*]:string, - debuginfo[1]:logical, - figures_per_page[*]:integer, - model_panel_placement[*]:integer, - figure_panel_placement[*]:integer, - plot_array[*]:graphic, - type_specifier[1]:string, - no_figures_on_this_page[1]:integer) -; -; Arguments -; cols: number of columns for this panel plot -; rows: number of rows for this panel plot -; curr_idx: current index -; curr_page: current page (may be more than one) -; res: valid ncl resources to apply to plot -; storage_name: name of first processed variable in the vault -; storage_vault: handle/pointer to vault with all models -; wks: workstation -; di: logical with diagnostic script info -; plot_settings: logical with general plot_settings -; valid_statistics: valid statistics (to be computed if defined) -; debuginfo: info to put onto plot in debug mode -; figures_per_page: array with number of figures on each page -; model_panel_placement: where to place respective model -; figure_panel_placement: where to place respective figure on the page -; plot_array: plot handles/pointers -; type_specifier: kind of plot, 'mean' or 'stddev' -; no_figures_on_this_page: no of figures on this page -; -; Return value -; -; Description -; Multipanel plot, plots all models on the current page. Top left entry -; is always the reference model. -; -; Caveats -; -; References -; -; Modification history -; 20150702-A_eval_ma: written. -; -local am_infobox_id, blank_plot, cn_levels_string, curr, curr_figure_pos, \ - curr_idx, debugbox, diff_model_ref, dummy_array, header_prefix, idx_fig, \ - idx_mod, lbres, main_title_prefix, main_title_suffix, plot, plot_ref, \ - plottype_lbres, ref, res, statistics, storage_record, txres -begin - - funcname = "plot_multipanel" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - ; Update position, labelbar and title - curr_figure_pos = figure_panel_placement(curr_idx) - res = panel_n_by_cols(res, curr_figure_pos, rows, cols, \ - figures_per_page(curr_page)) - - if (isatt(res, "cnLevels")) then - delete(res@cnLevels) - end if - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name + "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - main_title_prefix = "" - main_title_suffix = "" - - ; Fetch reference plot - storage_record = \ - str_join((/storage_name, \ - sprinti("%i", plot_settings@idx_ref(0))/), str_vault_sep) - ref = retrieve_from_vault(storage_vault, storage_record) - - ; Reference plot - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = plot_settings@idx_ref - - dummy_array = (/1e+20/) - dummy_array@_FillValue = 1e+20 - - ; Possibly switch from default colormap for non-diff plot - if (isatt(plot_settings, "default_palette")) then - if (isatt(res, "cnFillPalette")) then - delete(res@cnFillPalette) - end if - res@cnFillPalette = plot_settings@default_palette - end if - - statistics = True - statistics = \ - compute_stat((/"yrs", "refA", "lonB", "mean", "bob", "eio", \ - "sahel", "sa34", "en34"/), valid_statistics, ref, dummy_array) - plot_ref = single_contour(wks, \ - ref, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - txres = True - txres@txFuncCode = "~" - if (debuginfo) then - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot_ref) - end if - - idx_fig = figure_panel_placement(curr_idx) - plot_array(idx_fig) = plot_ref - - ; Skip past the reference plot - curr_idx = curr_idx + 1 - lbres = True - - if (plot_settings@type .eq. "diff") then - inset_top_text(wks, plot_ref, "REF", txres) - inset_labelbar(wks, plot_ref, res, "REF", lbres) - main_title_suffix = " - REF" - - ; Possibly switch from default colormap for diff plot - if (isatt(plot_settings, "diff_palette")) then - if (isatt(res, "cnFillPalette")) then - delete(res@cnFillPalette) - end if - res@cnFillPalette = plot_settings@diff_palette - end if - else - inset_top_text(wks, plot_ref, "Reference", txres) - main_title_suffix = "" - end if - - delete(res@cnLevels) - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_name + type_specifier + \ - "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - ; ----------------------------- - ; Create the non-reference plots - ; ----------------------------- - do curr_fig = 1, figures_per_page(curr_page) - 1 - - main_title_prefix = "" - idx_mod = model_panel_placement(curr_idx) - idx_fig = figure_panel_placement(curr_idx) - - ; Skip reference models - if any((idx_mod .eq. plot_settings@idx_ref)) then - continue - end if - - ; Update placement and labelbar colors - res = panel_n_by_cols(res, \ - figure_panel_placement(curr_idx), \ - rows, \ - cols, \ - figures_per_page(curr_page)) - - storage_record = \ - str_join((/storage_name, sprinti("%i", idx_mod)/), str_vault_sep) - curr = retrieve_from_vault(storage_vault, storage_record) - - statistics = True - statistics = compute_stat((/"yrs"/), valid_statistics, curr, dummy_array) - if (plot_settings@type .eq. "diff") then - statistics = compute_stat((/"rmse"/), valid_statistics, curr, ref) - diff_model_ref = get_dataset_minus_ref(curr, ref) - delete(curr) - curr = diff_model_ref - delete(diff_model_ref) - else - statistics = compute_stat((/"corr"/), valid_statistics, curr, ref) - end if - statistics = \ - compute_stat((/"yrs", "mean", "bob", "eio", "sahel", "sa34", \ - "en34"/), valid_statistics, curr, dummy_array) - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = idx_mod - plot = single_contour(wks, \ - curr, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - - if (debuginfo) then - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot) - end if - - plot_array(idx_fig) = plot - - ; Update index to point to next field - curr_idx = curr_idx + 1 - delete(curr) - delete(statistics) - - end do ; curr_fig=1, figures_per_page(curr_page) - 1 - - plottype_lbres = False - - ; --------------------------------------------------------- - ; Create an blank plot for shared labelbar placement (mean) - ; --------------------------------------------------------- - header_prefix = \ - empty_string_if_missing_attribute(plot_settings, "prefix_to_header") - title_name = keep_only_n_cols(plot_settings, storage_name) - blank_plot = \ - add_blank_plot_title(wks, header_prefix + di@season + "-" + title_name + \ - plot_settings@part_of_header, rows, cols) - ; Create shared labelbar - n_by_cols_labelbar(wks, blank_plot, \ - plot_array(no_figures_on_this_page - 1), rows, cols, \ - plot_settings@lb_units, plottype_lbres) - - ; -------------------- - ; Draw mean value plot - ; -------------------- - if (debuginfo) then - drawNDCGrid(wks) - end if - draw(plot_array) - draw(blank_plot) - if (debuginfo) then - place_description(wks, debuginfo@description, debuginfo@description_ycoord) - end if - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("multipanel") -procedure multipanel(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name[1]:string, - debuginfo[1]:logical, - valid_statistics[*]:string, - res[1]:logical) -; -; Arguments -; storage_vault: handle/pointer to vault with all models -; di: logical with diagnostic script info -; plot_settings: logical with general plot_settings -; storage_name: name of first processed variable in the vault -; debuginfo: info to put onto plot in debug mode -; valid_statistics: valid statistics (to be computed if defined) -; res: valid ncl resources to apply to plot -; -; Return value -; -; Description -; Determines how to place a number of contour plots in a grid across -; multiple pages. Loop over pages and call plot_multipanel(...) for -; each page to plot entries. -; -; Caveats -; -; References -; -; Modification history -; 20150702-A_eval_ma: written. -; -local aux_title_info, blank_plot, cn_levels_string, cols, curr, curr_fig, \ - curr_figure_pos, curr_idx, curr_idx_debug, curr_page, diag_script_base, \ - dim_MOD, dim_VAR, dummy_array, field_type0, figure_panel_placement, \ - figures_per_page, idx_fig, idx_mod, lbres, main_title_prefix, \ - model_panel_placement, no_figures_on_this_page, output_dir, \ - output_filename, output_file_path, page_no, plot, plot_array, \ - plot_mean_diff, plot_ref, plot_stddev, plot_stddev_diff, plottype_lbres, \ - ref, res, rows, storage_record, total_no_of_pages, txres, type_specifier, \ - var0, wks -begin - - funcname = "multipanel" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - ; No of models may differ from dimsizes(dim_MOD) if variables - ; from different models are combined (e.g., 'ts' from HadISST - ; and 'pr' from TRMM) - ref_no = 0 - stored_ref_name = "model" - dim_MOD = no_unique_vault_entries(storage_vault, stored_ref_name, ref_no) - - dim_VAR = dimsizes(variables) - - var0 = variables(0) - field_type0 = field_types(0) - if (dimsizes(variables) .gt. 1) then - var1 = variables(1) - field_type1 = field_types(1) - else - var1 = "" - field_type1 = "" - end if - - ; 'output_file_type' if fetched from ncl.interface - if (.not. isdefined("output_file_type")) then - output_file_type = "ps" - end if - - ; Output dir - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(plot_settings@diag_script) - output_dir = config_user_info@plot_dir - - ; -------------------------------- - ; Static resources for these plots - ; -------------------------------- - res@cnFillOn = True - res@cnLinesOn = False - res@cnLevelSelectionMode = "ExplicitLevels" - res@cnMissingValFillColor = "Background" - res@cnLineLabelsOn = False - res@gsnFrame = False - res@gsnDraw = False - res@lbLabelBarOn = False - res@gsnAddCyclic = False - - ; -------------------------------------- - ; Compute the layout of paneled figures - ; -------------------------------------- - figures_per_page = \ - get_figures_per_page(dim_MOD, max_figures_pp, min_figures_pp) - - ; Which model goes where across all pages - model_panel_placement = new((/sum(figures_per_page)/), integer) - - ; Which model goes where on each page? - figure_panel_placement = new((/sum(figures_per_page)/), integer) - place_models_on_pages(input_file_info, plot_settings@idx_ref, \ - figures_per_page, model_panel_placement, \ - figure_panel_placement) - - ; Output dir - ; 'plot_dir' is fetched from ncl.interface - diag_script_base = basename(diag_script) - output_dir = config_user_info@plot_dir - - if (plot_settings@type .eq. "diff") then - type_specifier = "_diff" - else - type_specifier = "" - end if - - ; --------------------------- - ; Loop over all output pages - ; --------------------------- - curr_idx = 0 - curr_idx_debug = 0 - total_no_of_pages = dimsizes(figures_per_page) - - do curr_page = 0, total_no_of_pages - 1 - - ; -------------------------- - ; Plot arrays for gsn_panels - ; -------------------------- - plot_array = new((/max_figures_pp/), graphic) - - no_figures_on_this_page = figures_per_page(curr_page) - - ; Create a string to add to the figure output filename for mulitple pages - if (total_no_of_pages .gt. 1) then - page_no = "-page" + sprinti("%i", curr_page) - else - page_no = "" - end if - - ; ----------------------------------- - ; Define output workstation for plots - ; ----------------------------------- - idx_mod = -1 ; No specific model defined - if (isatt(di, "filter_name")) then - aux_title_info = di@season + "-" + di@filter_name + "-" + \ - storage_name + type_specifier + page_no - else - aux_title_info = \ - di@season + "-" + storage_name + type_specifier + page_no - end if - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - - cols = multipanel_get_no_cols(no_figures_on_this_page, max_cols) - rows = multipanel_get_no_rows(no_figures_on_this_page, max_cols) - - plot_multipanel(cols, \ - rows, \ - curr_idx, \ - curr_page, \ - res, \ - storage_name, \ - storage_vault, \ - wks, \ - di, \ - plot_settings, \ - valid_statistics, \ - False, \ ; No debuginfo - figures_per_page, \ - model_panel_placement, \ - figure_panel_placement, \ - plot_array, \ - type_specifier, \ - no_figures_on_this_page) - frame(wks) - - if (debuginfo) then - if (isatt(di, "filter_name")) then - aux_title_info = di@season + "-" + di@filter_name + "-" + \ - storage_name + type_specifier + page_no + "-debug" - else - aux_title_info = \ - di@season + "-" + storage_name + type_specifier + page_no + "-debug" - end if - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks_debug = gsn_open_wks(output_file_type, output_file_path) - - plot_multipanel(cols, \ - rows, \ - curr_idx_debug, \ - curr_page, \ - res, \ - storage_name, \ - storage_vault, \ - wks_debug, \ - di, \ - plot_settings, \ - valid_statistics, \ - debuginfo, \ - figures_per_page, \ - model_panel_placement, \ - figure_panel_placement, \ - plot_array, \ - type_specifier, \ - no_figures_on_this_page) - frame(wks_debug) - end if - end do ; curr_fig = 1, figures_per_page(curr_page) - 1 - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("plot_multipanel_vector") -procedure plot_multipanel_vector(cols[*]:integer, - rows[*]:float, - curr_idx[1]:integer, - curr_page[1]:integer, - res[1]:logical, - storage_names[1]:logical, - storage_vault[1]:logical, - wks[1]:graphic, - di[1]:logical, - plot_settings[1]:logical, - valid_statistics[*]:string, - debuginfo[1]:logical, - figures_per_page[*]:integer, - model_panel_placement[*]:integer, - figure_panel_placement[*]:integer, - plot_array[*]:graphic, - type_specifier[1]:string, - no_figures_on_this_page[1]:integer) -; -; Arguments -; cols: number of columns for this panel plot -; rows: number of rows for this panel plot -; curr_idx: current index -; curr_page: current page (may be more than one) -; res: valid ncl resources to apply to plot -; storage_name: name of first processed variable in the vault -; storage_vault: handle/pointer to vault with all models -; wks: workstation -; di: logical with diagnostic script info -; plot_settings: logical with general plot_settings -; valid_statistics: valid statistics (to be computed if defined) -; debuginfo: info to put onto plot in debug mode -; figures_per_page: array with number of figures on each page -; model_panel_placement: where to place respective model -; figure_panel_placement: where to place respective figure on the page -; plot_array: plot handles/pointers -; type_specifier: kind of plot, 'mean' or 'stddev' -; no_figures_on_this_page: no of figures on this page -; -; Return value -; -; Description -; Multipanel plot for contour with vector overlay, plots all models -; on the current page. Top left entry is always the reference model. -; -; Caveats -; -; References -; -; Modification history -; 20150702-A_eval_ma: written. -; -local am_infobox_id, blank_plot, cn_levels_string, curr_figure_pos, curr_idx, \ - debugbox, diff_model_ref, diff_model_ua_ref, diff_model_va_ref, \ - dummy_array, header_prefix, idx_fig, idx_mod, lbres, main_title_prefix, \ - main_title_suffix, max_speed_ref, plot, plot_ref, plot_ref_v, plot_v, \ - plottype_lbres, res, speed, speed_ref, statistics, storage_record, txres, \ - ua, ua_ref, va, va_ref -begin - - funcname = "plot_multipanel_vector" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - ; Update position, labelbar and title - curr_figure_pos = figure_panel_placement(curr_idx) - res = panel_n_by_cols(res, curr_figure_pos, rows, cols, \ - figures_per_page(curr_page)) - - if (isatt(res, "cnLevels")) then - delete(res@cnLevels) - end if - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_names@base_name + "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - main_title_prefix = "" - main_title_suffix = "" - - ; Fetch reference plot - storage_record = \ - str_join((/storage_names@storage_x_component, \ - sprinti("%i", plot_settings@idx_ref(0))/), str_vault_sep) - ua_ref = retrieve_from_vault(storage_vault, storage_record) - - storage_record = \ - str_join((/storage_names@storage_y_component, \ - sprinti("%i", plot_settings@idx_ref(0))/), str_vault_sep) - va_ref = retrieve_from_vault(storage_vault, storage_record) - - storage_record = \ - str_join((/storage_names@storage_xy_component, \ - sprinti("%i", plot_settings@idx_ref(0))/), str_vault_sep) - speed_ref = retrieve_from_vault(storage_vault, storage_record) - if (isatt(di, "max_speed_ref")) then - max_speed_ref = di@max_speed_ref - else - max_speed_ref = max(speed_ref) - end if - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = plot_settings@idx_ref - - dummy_array = (/1e+20/) - dummy_array@_FillValue = 1e+20 - - statistics = True - statistics = \ - compute_stat((/"yrs", "mean"/), valid_statistics, speed_ref, dummy_array) - plot_ref = single_contour(wks, \ - speed_ref, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - - delete(statistics) - if (storage_names@base_name .ne. "stddev") then - plot_ref_v = single_vector(wks, \ - max_speed_ref, \ - speed_ref, \ - ua_ref, \ - va_ref, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - res) - overlay(plot_ref, plot_ref_v) - end if - - txres = True - txres@txFuncCode = "~" - if (debuginfo) then - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot_ref) - end if - - delete(res@cnLevels) - cn_levels_string = \ - str_sub_str("cn_levels_" + storage_names@base_name + type_specifier + \ - "_basic", "-", "_") - res@cnLevels = di@$cn_levels_string$ - - idx_fig = figure_panel_placement(curr_idx) - plot_array(idx_fig) = plot_ref - - ; Skip past the reference plot - curr_idx = curr_idx + 1 - - lbres = True - txres = True - txres@txFuncCode = "~" - - if (plot_settings@type .eq. "diff") then - inset_top_text(wks, plot_ref, "REF", txres) - inset_labelbar(wks, plot_ref, res, "REF", lbres) - main_title_suffix = " - REF" - else - inset_top_text(wks, plot_ref, "Reference", txres) - main_title_suffix = "" - end if - - ; ------------------------------ - ; Create the non-reference plots - ; ------------------------------ - do curr_fig = 1, figures_per_page(curr_page) - 1 - - main_title_prefix = "" - idx_mod = model_panel_placement(curr_idx) - idx_fig = figure_panel_placement(curr_idx) - - ; Update placement and labelbar colors - res = panel_n_by_cols(res, figure_panel_placement(curr_idx), rows, cols, \ - figures_per_page(curr_page)) - - storage_record = str_join((/storage_names@storage_x_component, \ - sprinti("%i", idx_mod)/), str_vault_sep) - ua = retrieve_from_vault(storage_vault, storage_record) - - storage_record = str_join((/storage_names@storage_y_component, \ - sprinti("%i", idx_mod)/), str_vault_sep) - va = retrieve_from_vault(storage_vault, storage_record) - - storage_record = str_join((/storage_names@storage_xy_component, \ - sprinti("%i", idx_mod)/), str_vault_sep) - speed = retrieve_from_vault(storage_vault, storage_record) - - statistics = True - statistics = compute_stat((/"yrs"/), valid_statistics, speed, dummy_array) - if (plot_settings@type .eq. "diff") then - - ; Plot mean of differences of first and second data set, first - ; interpolate to the reference data set grid (bilinear interpolation) - statistics = compute_stat((/"rmse"/), valid_statistics, speed, speed_ref) - - ; ua/va/speed field interpolation - diff_model_ref = get_dataset_minus_ref(speed, speed_ref) - diff_model_ua_ref = get_dataset_minus_ref(ua, ua_ref) - diff_model_va_ref = get_dataset_minus_ref(va, va_ref) - delete(speed) - speed = diff_model_ref - delete(ua) - ua = diff_model_ua_ref - delete(va) - va = diff_model_va_ref - - if (isatt(di, "max_speed_ref_diff")) then - max_speed_ref = di@max_speed_ref_diff - else - max_speed_ref = max(diff_model_ref) - end if - - delete(diff_model_ref) - delete(diff_model_ua_ref) - delete(diff_model_va_ref) - - else - statistics = compute_stat((/"corr"/), valid_statistics, speed, speed_ref) - end if - statistics = compute_stat((/"mean"/), valid_statistics, speed, dummy_array) - - if (isatt(plot_settings, "idx")) then - delete(plot_settings@idx) - end if - plot_settings@idx = idx_mod - - plot = single_contour(wks, \ - speed, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - statistics, \ - res) - delete(statistics) - if (storage_names@base_name .ne. "stddev") then - plot_v = single_vector(wks, \ - max_speed_ref, \ - speed, \ - ua, \ - va, \ - main_title_prefix, \ - main_title_suffix, \ - plot_settings, \ - debuginfo, \ - res) - overlay(plot, plot_v) - end if - - if (debuginfo) then - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, plot) - end if - - plot_array(idx_fig) = plot - - ; Update index to point to next field - curr_idx = curr_idx + 1 - delete(ua) - delete(va) - delete(speed) - - end do ; curr_fig=1, figures_per_page(curr_page) - 1 - - plottype_lbres = False - - ; --------------------------------------------------------- - ; Create an blank plot for shared labelbar placement (mean) - ; --------------------------------------------------------- - header_prefix = \ - empty_string_if_missing_attribute(plot_settings, "prefix_to_header") - blank_plot = add_blank_plot_title(wks, header_prefix + di@season + "-" + \ - plot_settings@type + \ - plot_settings@part_of_header, rows, cols) - ; Create shared labelbar - n_by_cols_labelbar(wks, \ - blank_plot, \ - plot_array(no_figures_on_this_page - 1), \ - rows, \ - cols, \ - plot_settings@lb_units, \ - plottype_lbres) - - ; -------------------- - ; Draw mean value plot - ; -------------------- - if (debuginfo) then - drawNDCGrid(wks) - end if - draw(plot_array) - draw(blank_plot) - if (debuginfo) then - place_description(wks, debuginfo@description, debuginfo@description_ycoord) - end if - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("multipanel_vector") -procedure multipanel_vector(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_names[1]:logical, - debuginfo[1]:logical, - valid_statistics[*]:string, - res[1]:logical) -; -; Arguments -; storage_vault: handle/pointer to vault with all models -; di: logical with diagnostic script info -; plot_settings: logical with general plot_settings -; storage_name: name of first processed variable in the vault -; debuginfo: info to put onto plot in debug mode -; valid_statistics: valid statistics (to be computed if defined) -; res: valid ncl resources to apply to plot -; -; Return value -; -; Description -; Determines how to place a number of contour plots in a grid across -; multiple pages. Loop over pages and call plot_multipanel(...) for -; each page to plot entries. -; -; Caveats -; -; References -; -; Modification history -; 20150702-A_eval_ma: written. -; -local aux_title_info, blank_plot, cn_levels_string, cols, curr_fig, \ - curr_figure_pos, curr_idx, curr_page, diag_script_base, dim_MOD, dim_VAR, \ - dummy_array, field_type0, field_type1, figure_panel_placement, \ - figures_per_page, idx_fig, idx_mod, lbres, main_title_prefix, \ - model_panel_placement, no_figures_on_this_page, output_dir, \ - output_filename, output_file_path, page_no, plot, plot_array, plot_ref, \ - plottype_lbres, res, rows, speed, speed_ref, storage_record, \ - total_no_of_pages, txres, type_specifier, ua, ua_ref, va, var0, var1, \ - va_ref, wks, plot_ref_v -begin - - funcname = "multipanel_vector" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - dim_MOD = dimsizes(input_file_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - if (dimsizes(variables) .gt. 1) then - var1 = variables(1) - field_type1 = field_types(1) - else - var1 = "" - field_type1 = "" - end if - - ; 'output_file_type' if fetched from ncl.interface - if (.not. isdefined("output_file_type")) then - output_file_type = "ps" - end if - - ; Output dir - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(plot_settings@diag_script) - output_dir = config_user_info@plot_dir - - ; -------------------------------- - ; Static resources for these plots - ; -------------------------------- - res@mpFillOn = False - res@cnFillOn = True - res@cnLinesOn = False - res@cnLevelSelectionMode = "ExplicitLevels" - res@cnMissingValFillColor = "Background" - res@cnLineLabelsOn = False - res@gsnFrame = False - res@gsnDraw = False - res@lbLabelBarOn = False - res@gsnAddCyclic = False - - ; -------------------------------------- - ; Compute the layout of paneled figures - ; -------------------------------------- - figures_per_page = \ - get_figures_per_page(dim_MOD, max_figures_pp, min_figures_pp) - - ; Which model goes where across all pages - model_panel_placement = new((/sum(figures_per_page)/), integer) - - ; Which model goes where on each page? - figure_panel_placement = new((/sum(figures_per_page)/), integer) - place_models_on_pages(input_file_info, \ - plot_settings@idx_ref, \ - figures_per_page, \ - model_panel_placement, \ - figure_panel_placement) - - ; Output dir - ; 'plot_dir' is fetched from ncl.interface - diag_script_base = basename(diag_script) - output_dir = config_user_info@plot_dir - - if (plot_settings@type .eq. "diff") then - type_specifier = "_diff" - else - type_specifier = "" - end if - - ; --------------------------- - ; Loop over all output pages - ; --------------------------- - curr_idx = 0 - curr_idx_debug = 0 - total_no_of_pages = dimsizes(figures_per_page) - - do curr_page = 0, total_no_of_pages - 1 - - ; -------------------------- - ; Plot arrays for gsn_panels - ; -------------------------- - plot_array = new((/max_figures_pp/), graphic) - - no_figures_on_this_page = figures_per_page(curr_page) - - ; Create a string to add to the figure output\ - ; filename for mulitple pages - if (total_no_of_pages .gt. 1) then - page_no = "-page" + sprinti("%i", curr_page) - else - page_no = "" - end if - - ; ----------------------------------- - ; Define output workstation for plots - ; ----------------------------------- - idx_mod = -1 ; No specific model defined - if (isatt(di, "filter_name")) then - aux_title_info = di@season + "-" + di@filter_name + "-" + \ - storage_names@base_name + type_specifier + page_no - else - aux_title_info = di@season + "-" + storage_names@base_name + \ - type_specifier + page_no - end if - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - - cols = multipanel_get_no_cols(no_figures_on_this_page, max_cols) - rows = multipanel_get_no_rows(no_figures_on_this_page, max_cols) - - plot_multipanel_vector(cols, \ - rows, \ - curr_idx, \ - curr_page, \ - res, \ - storage_names, \ - storage_vault, \ - wks, \ - di, \ - plot_settings, \ - valid_statistics, \ - False, \ ; No debuginfo - figures_per_page, \ - model_panel_placement, \ - figure_panel_placement, \ - plot_array, \ - type_specifier, \ - no_figures_on_this_page) - frame(wks) - if (debuginfo) then - if (isatt(di, "filter_name")) then - aux_title_info = di@season + "-" + di@filter_name + "-" + \ - storage_names@base_name + type_specifier + page_no + "-debug" - else - aux_title_info = di@season + "-" + storage_names@base_name + \ - type_specifier + page_no + "-debug" - end if - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_title_info, idx_mod) - output_file_path = output_dir + output_filename - wks_debug = gsn_open_wks(output_file_type, output_file_path) - - plot_multipanel_vector(cols, \ - rows, \ - curr_idx_debug, \ - curr_page, \ - res, \ - storage_names, \ - storage_vault, \ - wks_debug, \ - di, \ - plot_settings, \ - valid_statistics, \ - debuginfo, \ - figures_per_page, \ - model_panel_placement, \ - figure_panel_placement, \ - plot_array, \ - type_specifier, \ - no_figures_on_this_page) - frame(wks_debug) - end if - end do - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("seasonal_plot") -procedure seasonal_plot(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name[1]:string, - debuginfo[1]:logical) -; -; Arguments -; storage_vault: handle/pointer to vault with all models -; di: logical with diagnostic script info -; plot_settings: logical with general plot_settings -; storage_name: name of first processed variable in the vault -; debuginfo: info to put onto plot in debug mode -; -; Return value -; -; Description -; Produces a bar plot for seasonal climatology or interannual -; variability. The various logcials carry switches for the -; -; Caveats -; -; References -; -; Modification history -; 20150703-A_eval_ma: written. -; -local am_infobox_id, am_labelbar_id, amres, debugbox, diag_script_base, \ - dim_VAR, field_type0, idx_mod, labelbar, labels, lbres, output_dir, \ - output_filename, output_file_path, plot_array, precip, \ - precip_seasonal_maxY, precip_seasonal_minY, precip_seasonal_spreadY, res, \ - storage_record, txres, var0, wks, xaxis, var1, field_type1 -begin - - funcname = "seasonal_plot" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - dim_MOD = dimsizes(input_file_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - if (dim_VAR .gt. 1) then ; Assume two variables? - var1 = variables(1) - field_type1 = field_types(1) - else - var1 = "" - field_type1 = "" - end if - - storage_record = storage_name - precip = retrieve_from_vault(storage_vault, storage_record) - units = precip@units - - precip_seasonal_maxY = max(precip) - precip_seasonal_minY = min(precip) - precip_seasonal_spreadY = precip_seasonal_maxY - precip_seasonal_minY - - ; Plot arrays for gsn_plots - plot_array = new((/dim_MOD/), graphic) - - ; ------------------------------ - ; General resources for all plot - ; ------------------------------ - res = True - res@gsnDraw = False - res@gsnFrame = False - res@gsnXYBarChart = True - res@gsnXYBarChartBarWidth = 0.15 ; change bar widths - res@gsnYRefLine = 0. ; reference line - - ; Specific plot resource for plot iii) - if (plot_settings@type .eq. "iav") then - res@tmXBFormat = "4f" - xaxis = di@years - else - ; Set months on x-axis for plots i) and ii) - res@tmXBMode = "Explicit" ; Define your own tick mark labels. - res@tmXBMinorOn = False ; No minor tick marks. - res@tmXBValues = ispan(0, 11, 1) ; Location to put tick mark labels - res@tmXBLabels = (/"Jan", "Feb", "Mar", "Apr", "May", "Jun", \ - "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"/) - res@tmXBLabelFont = 25 ; Change font of labels. - res@tmXBLabelFontHeightF = 0.015 ; Change font height of labels. - - ; Add some margins - res@trXMinF = -1 - res@trXMaxF = 12 - - xaxis = fspan(0, 11, 12) - end if - - labels = new(dim_MOD, string) - - ; Output dir - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(diag_script) - output_dir = config_user_info@plot_dir - - ; ---------------------------------------------- - ; Define output workstation for seasonal anomaly - ; ---------------------------------------------- - idx_mod = -1 ; No specific model defined - aux_filename_info = plot_settings@aux_filename_info - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_filename_info, idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - - ; --------- - ; Plot loop - ; --------- - do idx_mod = 0, dim_MOD - 1 - labels(idx_mod) = input_file_info@dataset(idx_mod) - res@gsnXYBarChartColors = di@colors_seasonal(idx_mod) - - ; Shift plot to the right and draw/redraw - res@vpXF = 0.2 + idx_mod * 0.005 - res@trYMinF = precip_seasonal_minY - precip_seasonal_spreadY * 0.05 - res@trYMaxF = precip_seasonal_maxY + precip_seasonal_spreadY * 0.05 - - ; Set strings for titles and axis for plot i) and ii) - res@tiMainString = set_string_once(plot_settings@title_string, idx_mod) - res@tiYAxisString = set_string_once(plot_settings@yaxis_string, idx_mod) - res@gsnLeftString = \ - set_string_once(plot_settings@left_plot_subheader, idx_mod) - res@gsnRightString = set_string_once(units, idx_mod) - - if (isatt(plot_settings, "xaxis_string")) then - res@tiXAxisString = set_string_once(plot_settings@xaxis_string, idx_mod) - end if - - plot_array(idx_mod) = gsn_csm_xy(wks, xaxis, precip(idx_mod, :), res) - - ; Remove borders such that next plot only draws the actual "bars" - res = remove_figure_borders(res) - end do ; idx_mod = 0, dim_MOD - 1 - - ; Label bar resources - lbres = True ; labelbar only resources - lbres@vpWidthF = 0.30 ; labelbar width - lbres@vpHeightF = 0.024 * dim_MOD ; labelbar height - lbres@lbBoxMajorExtentF = 0.36 ; puts space between color boxes - lbres@lbFillColors = di@colors_seasonal - lbres@lbMonoFillPattern = True ; Solid fill pattern - lbres@lbLabelJust = "CenterLeft" ; left justify labels - lbres@lbPerimOn = True - lbres@lbPerimFill = 0 - lbres@lbPerimFillColor = "white" - lbres@lbPerimColor = "black" - labelbar = gsn_create_labelbar(wks, dim_MOD, labels, lbres) - - ; Place annotations - amres = True - amres@amZone = 3 - amres@amSide = "Right" - amres@amParallelPosF = 0.88 - amres@amOrthogonalPosF = -0.25 - am_labelbar_id = gsn_add_annotation(plot_array(dim_MOD - 1), labelbar, amres) - - ; If requested, add a text info box and/or debug info box to each plot - txres = True - txres@txFuncCode = "~" - if (debuginfo) then - debuginfo@years = di@years_string - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, \ - plot_array(dim_MOD - 1)) - drawNDCGrid(wks) - end if - - draw(wks) - if (debuginfo) then - place_description(wks, debuginfo@description, debuginfo@description_ycoord) - end if - frame(wks) - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("xy_plot_wrapper") -procedure xy_plot_wrapper(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name[1]:string, - debuginfo[1]:logical) -; -; Arguments -; storage_vault: handle/pointer to vault with all models -; di: logical with diagnostic script info -; plot_settings: logical with general plot_settings -; storage_name: name of first processed variable in the vault -; debuginfo: info to put onto plot in debug mode -; -; Return value -; -; Description -; Wrapper function for the plot script 'xy_line(...)' -; -; Caveats -; -; References -; -; Modification history -; 20150703-A_eval_ma: written. -; -local annots, annots_mmm, avgstd, avgstd_mmm, colors, colors_mmm, \ - dashes, dashes_mmm, diag_script_base, dim_VAR, field_type0, field_type1, i, \ - idx_mod, imon, output_dir, output_filename, output_file_path, res, \ - storage_record, temp, thicks, thicks_mmm, values, values_stddev, \ - values_stddev, var0, var1, wks, wks_debug, idx_th -begin - - funcname = "xy_plot_wrapper" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - dim_MOD = dimsizes(input_file_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - if (dim_VAR .gt. 1) then ; Assume two variables? - var1 = variables(1) - field_type1 = field_types(1) - else - var1 = "" - field_type1 = "" - end if - - storage_record = storage_name - values = retrieve_from_vault(storage_vault, storage_record) - units = values@units - - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(diag_script) - output_dir = config_user_info@plot_dir - ; ---------------------------------------------- - ; Define output workstation for xy plot - ; ---------------------------------------------- - idx_mod = -1 ; No specific model defined - aux_filename_info = plot_settings@aux_filename_info - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_filename_info, idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - - aux_filename_info = plot_settings@aux_filename_info + "-debug" - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_filename_info, idx_mod) - output_file_path = output_dir + output_filename - if (debuginfo) then - wks_debug = gsn_open_wks(output_file_type, output_file_path) - end if - - values&month@units = "Multi-year monthly mean" - - ; Select colors and other plotting attributes - ; See ./diag_scripts/shared/plot/style.ncl - colors = project_style(diag_script_info, "colors") - dashes = project_style(diag_script_info, "dashes") - thicks = project_style(diag_script_info, "thicks") - annots = project_style(diag_script_info, "annots") - avgstd = project_style(diag_script_info, "avgstd") - - ; Select colors and other plotting attributes for multi-model mean - if (diag_script_info@multi_model_mean .eq. "y") then - ; project_style evaluates metadata of variable "models" - temp = models - - ; -> keep original "models" in "temp" and restore later - copy_VarMeta(models, temp) - delete(models) - - ; Use "models" to pass on attribute names - models = getvaratts(temp) - do i = 0, dimsizes(models) - 1 - ; Define all original attributes again, but empty - models@$models(i)$ = "" - end do - input_file_info@dataset = "model_mean" - - ; See ./diag_scripts/shared/plot/style.ncl - colors_mmm = project_style(diag_script_info, "colors") - dashes_mmm = project_style(diag_script_info, "dashes") - thicks_mmm = project_style(diag_script_info, "thicks") - annots_mmm = project_style(diag_script_info, "annots") - avgstd_mmm = project_style(diag_script_info, "avgstd") - delete(models) - models = temp ; Restore original "models" - copy_VarMeta(temp, models) - delete(temp) - end if - - ; Calculate standard deviation of models - if (di@multi_model_mean .ne. "y") then - ; define anyway, because fields are needed as parameters for xy_line - values_stddev = 0 - else - ; Mean, stddev, -1 * stddev, +1 * stddev - values_stddev = new((/4, 12/), float) - ; See ./diag_scripts/shared/plot/style.ncl for which data shall be - ; included in the statistics - temp = ind(avgstd .eq. 0) - do imon = 0, 11 - values_stddev(0, imon) = dim_avg_n_Wrap(values(temp, imon), (/0/)) - values_stddev(1, imon) = dim_stddev_n_Wrap(values(temp, imon), (/0/)) - values_stddev(2, imon) = values_stddev(0, imon) - values_stddev(1, imon) - values_stddev(3, imon) = values_stddev(0, imon) + values_stddev(1, imon) - end do - delete(temp) - end if - - ; Name of x-axis coordinate - x_axis_coord = "month" ; default - if (isatt(plot_settings, "x_axis_coord")) then - x_axis_coord = plot_settings@x_axis_coord - end if - - ; Min and max values on x-axis - res = True - res@tmXTOn = False - res@tmYROn = True - if (di@supporting_gridlines .eq. "y") then - res@tmXMajorGrid = True - res@gsnYRefLine = 0 - end if - res@trXMinF = min(values&$x_axis_coord$) - 0.05 * \ - (max(values&$x_axis_coord$) - min(values&$x_axis_coord$)) - res@trXMaxF = max(values&$x_axis_coord$) + 0.25 * \ - (max(values&$x_axis_coord$) - min(values&$x_axis_coord$)) - - res@tmXBMode = "Explicit" - res@tmXBValues = (/0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11/) - res@tmXBLabels = (/"J", "F", "M", "A", "M", "J", \ - "J", "A", "S", "O", "N", "D"/) - res@tiMainString = plot_settings@title_string - res@tiYAxisString = plot_settings@yaxis_string - res@gsnRightString = values@units - res@gsnRightStringFontHeightF = 16 - res@gsnLeftString = plot_settings@left_plot_subheader - res@gsnLeftStringFontHeightF = 16 - - ; Replace any CMIP5-style line thickness - if (diag_script_info@styleset .eq. "CMIP5") then - res@xyLineThicknesses = thicks - res@xyLineThicknesses(:) = 3 - end if - - xy_line(wks, values, values&month, values_stddev, res, False) - - if (debuginfo) then - xy_line(wks_debug, values, values&month, values_stddev, res, debuginfo) - end if - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("ts_line_wrapper") -procedure ts_line_wrapper(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name[1]:string, - debuginfo[1]:logical) -; -; Arguments -; storage_vault: handle/pointer to vault with all models -; di: logical with diagnostic script info -; plot_settings: logical with general plot_settings -; storage_name: name of first processed variable in the vault -; debuginfo: info to put onto plot in debug mode -; -; Return value -; -; Description -; Wrapper script for the plot script 'xy_line(..)' with a -; time series on the x-axis. -; -; Caveats -; -; References -; -; Modification history -; 20150703-A_eval_ma: written. -; -local annots, avgstd, diag_script_base, dim_VAR, field_type0, field_type1, \ - idx_mod, imonth, output_dir, output_filename, output_file_path, res, \ - storage_record, temp, val_area_stddev, values, values_stddev, var0, var1, \ - wks, wks_debug -begin - - funcname = "ts_line_wrapper" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - dim_MOD = dimsizes(input_file_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - if (dim_VAR .gt. 1) then ; Assume two variables? - var1 = variables(1) - field_type1 = field_types(1) - else - var1 = "" - field_type1 = "" - end if - - storage_record = storage_name - values = retrieve_from_vault(storage_vault, storage_record) - units = values@units - - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(diag_script) - output_dir = config_user_info@plot_dir - - ; ---------------------------------------------- - ; Define output workstation for xy plot - ; ---------------------------------------------- - idx_mod = -1 ; No specific model defined - aux_filename_info = plot_settings@aux_filename_info - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_filename_info, idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - - aux_filename_info = plot_settings@aux_filename_info + "-debug" - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_filename_info, idx_mod) - output_file_path = output_dir + output_filename - if (debuginfo) then - wks_debug = gsn_open_wks(output_file_type, output_file_path) - end if - - ; Determine time range - start_year = min(stringtoint(input_file_info@start_year)) - end_year = max(stringtoint(input_file_info@end_year)) - - ; Select attributes - annots = project_style(diag_script_info, "annots") - avgstd = project_style(diag_script_info, "avgstd") - - ; Prepare calculation of standard deviation of models - if (diag_script_info@multi_model_mean .eq. "y") then - values_stddev = new((/4, end_year - start_year + 1/), float) - val_area_stddev = new((/4, end_year - start_year + 1/), float) - else ; initialize anyway, because both are needed as parameters in xy_line - values_stddev = 0 - val_area_stddev = 0 - end if - - ; Calculate standard deviation of models - if (diag_script_info@multi_model_mean .eq. "y") then - ; See style_$project$.ncl for which data shall - ; be included in the statistics - temp = ind(avgstd .eq. 0) - do imonth = 0, dimsizes(values&years) - 1 - ; For extent - values_stddev(0, imonth) = \ - dim_avg_n_Wrap(values(temp, imonth), (/0/)) - values_stddev(1, imonth) = \ - dim_stddev_n_Wrap(values(temp, imonth), (/0/)) - values_stddev(2, imonth) = \ - values_stddev(0, imonth) - values_stddev(1, imonth) - values_stddev(3, imonth) = \ - values_stddev(0, imonth) + values_stddev(1, imonth) - end do - delete(temp) - end if - - res = True - res@tmXTOn = False - res@tmYROn = True - if (di@supporting_gridlines .eq. "y") then - res@tmXMajorGrid = True - res@gsnYRefLine = 0 - end if - res@tmXBFormat = "4f" - res@tiMainString = plot_settings@title_string - res@tiYAxisString = plot_settings@yaxis_string - res@tiXAxisString = plot_settings@xaxis_string - res@gsnRightString = values@units - res@gsnRightStringFontHeightF = 16 - res@gsnLeftString = plot_settings@left_plot_subheader - res@gsnLeftStringFontHeightF = 16 - res@trXMinF = min(di@years) - 0.05 * (max(di@years) - min(di@years)) - res@trXMaxF = max(di@years) + 0.25 * (max(di@years) - min(di@years)) - - ; See ./diag_scripts/shared/plot/style.ncl - colors = project_style(diag_script_info, "colors") - dashes = project_style(diag_script_info, "dashes") - thicks = project_style(diag_script_info, "thicks") - - ; Replace any CMIP5-style line thickness - if (diag_script_info@styleset .eq. "CMIP5") then - res@xyLineThicknesses = thicks - res@xyLineThicknesses(:) = 3 - end if - - xy_line(wks, values, values&years, values_stddev, res, False) - - if (debuginfo) then - xy_line(wks_debug, values, di@years, values_stddev, res, debuginfo) - end if - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("xy_line_overlap") -procedure xy_line_overlap(storage_vault[1]:logical, - di[1]:logical, - plot_settings[1]:logical, - storage_name[1]:string, - debuginfo[1]:logical) -local annots, avgstd, diag_script_base, dim_VAR, field_type0, field_type1, \ - idx_mod, imonth, output_dir, output_filename, output_file_path, res, \ - storage_record, temp, val_area_stddev, values, values_stddev, var0, var1, \ - wks, wks_debug, res2, coord -begin - - funcname = "xy_line_overlap" - scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" - enter_msg(scriptname, funcname) - - dim_MOD = dimsizes(input_file_info@name) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - if (dim_VAR .gt. 1) then ; Assume two variables? - var1 = variables(1) - field_type1 = field_types(1) - else - var1 = "" - field_type1 = "" - end if - - ; 'plot_dir' if fetched from ncl.interface - diag_script_base = basename(diag_script) - output_dir = config_user_info@plot_dir - - ; ---------------------------------------------- - ; Define output workstation for xy plot - ; ---------------------------------------------- - idx_mod = -1 ; No specific model defined - aux_filename_info = plot_settings@aux_filename_info - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_filename_info, idx_mod) - output_file_path = output_dir + output_filename - wks = gsn_open_wks(output_file_type, output_file_path) - - aux_filename_info = plot_settings@aux_filename_info + "-debug" - output_filename = \ - interface_get_figure_filename(diag_script_base, var0 + var1, \ - field_type0 + field_type1, \ - aux_filename_info, idx_mod) - output_file_path = output_dir + output_filename - if (debuginfo) then - wks_debug = gsn_open_wks(output_file_type, output_file_path) - end if - - plot = new((/dim_MOD/), graphic) - - ; Determine time range - start_year = min(stringtoint(input_file_info@start_year)) - end_year = max(stringtoint(input_file_info@end_year)) - - ; Select attributes - annots = project_style(diag_script_info, "annots") - avgstd = project_style(diag_script_info, "avgstd") - - res = True - res@tmXTOn = False - res@tmYROn = True - - ; Select colors and other plotting attributes - ; (see ./diag_scripts/shared/plot/style.ncl) - colors = project_style(di, "colors") - dashes = project_style(di, "dashes") - thicks = project_style(di, "thicks") - annots = project_style(di, "annots") - avgstd = project_style(di, "avgstd") - do idx_th = 0, dimsizes(thicks) - 1 - if (thicks(idx_th) .lt. 5) then - thicks(idx_th) = 5 - end if - end do - if (diag_script_info@multi_model_mean .eq. "y") then - ; Project_style evaluates metadata of variable "models" - temp = models - - ; -> keep original "models" in "temp" and restore later - copy_VarMeta(models, temp) - delete(models) - - ; Use "models" to pass on attribute names - models = getvaratts(temp) ; use "models" to pass on attribute names - do i = 0, dimsizes(models) - 1 - ; Define all original attributes again, but empty - input_file_info@$models(i)$ = "" - end do - input_file_info@dataset = "model_mean" - - ; See ./diag_scripts/shared/plot/style.ncl - colors_mmm = project_style(diag_script_info, "colors") - dashes_mmm = project_style(diag_script_info, "dashes") - thicks_mmm = project_style(diag_script_info, "thicks") - annots_mmm = project_style(diag_script_info, "annots") - avgstd_mmm = project_style(diag_script_info, "avgstd") - delete(models) - models = temp ; restore original "models" - copy_VarMeta(temp, models) - delete(temp) - end if - - res@gsnFrame = False ; don't advance frame. - res@xyMonoDashPattern = False - res@gsnDraw = False ; don't draw yet - res@gsnFrame = False ; don't advance frame yet - res@tiMainFontHeightF = .025 ; font height - res@gsnMaximize = True - res@tmYRBorderOn = True - res@tmYROn = True - - res@tmYLMajorOutwardLengthF = 0 ; draw tickmarks inward - res@tmYLMinorOutwardLengthF = 0 ; draw minor ticsk inward - - if (isatt(di, "minmax")) then - ymax = di@minmax(1) - ymin = di@minmax(0) - res@trYMaxF = ymax + 0.03 * (ymax - ymin) - res@trYMinF = ymin - 0.03 * (ymax - ymin) - else - if (.not.isatt(res, "trYMaxF")) then - ymax = di@minmax(1) - ymin = di@minmax(0) - res@trYMaxF = ymax + 0.03 * (ymax - ymin) - res@trYMinF = ymin - 0.03 * (ymax - ymin) - end if - end if - - if (di@x_gridlines .eq. "y") then - res@tmXMajorGrid = True - end if - if (di@y_gridlines .eq. "y") then - res@tmYMajorGrid = True - end if - res@tiMainString = plot_settings@title_string - res@tiYAxisString = plot_settings@yaxis_string - res@tiXAxisString = plot_settings@xaxis_string - res@gsnRightStringFontHeightF = 16 - res@gsnLeftString = plot_settings@left_plot_subheader - - is_cyclic = False - - ; Fetch first entry to check if we need hours or months on X-axis - storage_variable_name = \ - str_join((/storage_name, sprinti("%i", 0)/), str_vault_sep) - values = retrieve_from_vault(storage_vault, storage_variable_name) - - ; So far only two options (hours and latitudes), thus handled expclitily - ; here rather than in a config file.... - if (iscoord(values, "hour")) then - interp = fspan(0, 24, 9) - coord = "hour" - res@trXMinF = 0. - res@trXMaxF = 24. - else - interp = ispan(di@latrange(0), di@latrange(1), 1) - coord = "lat" - end if - interp_size = dimsizes(interp) - interp_mod = new((/interp_size, dim_MOD/), float) - delete(values) - - do idx_mod = 0, dim_MOD - 1 - storage_variable_name = \ - str_join((/storage_name, sprinti("%i", idx_mod)/), str_vault_sep) - values = retrieve_from_vault(storage_vault, storage_variable_name) - interp_mod(:, idx_mod) = \ - linint1(values&$coord$, values, is_cyclic, interp, 0) - res@gsnRightString = values@units - res@xyLineColors = colors(idx_mod) - res@xyLineThicknesses = thicks(idx_mod) - res@xyDashPatterns = dashes(idx_mod) - res@tmYLLabelDeltaF = -0.5 - res@tiMainString = set_string_once(plot_settings@title_string, idx_mod) - res@tiYAxisString = set_string_once(plot_settings@yaxis_string, idx_mod) - res@gsnLeftString = \ - set_string_once(plot_settings@left_plot_subheader, idx_mod) - res@gsnRightString = set_string_once(values@units, idx_mod) - - if (idx_mod .eq. 0) then - plot(idx_mod) = gsn_csm_xy(wks, values&$coord$, values, res) - getvalues plot(0) - "vpXF" : vpx - "vpYF" : vpy - "vpWidthF" : vpw - "vpHeightF" : vph - end getvalues - setvalues plot(0) - "vpWidthF" : vpw - 0.15 - end setvalues - res@vpWidthF = vpw - 0.15 - else - plot(idx_mod) = gsn_csm_xy(wks, values&$coord$, values, res) - overlay(plot(0), plot(idx_mod)) - end if - delete(values) - end do - - ; Add multi model mean and stddev - if (diag_script_info@multi_model_mean .eq. "y") then - is_model = ind(avgstd .eq. 0) - values_stddev = new((/4, interp_size/), float) - - ; See ./diag_scripts/shared/plot/style.ncl for which data shall be - ; included in the statistics - do idx_interp = 0, interp_size - 1 - values_stddev(0, idx_interp) = \ - dim_avg_n_Wrap(interp_mod(idx_interp, is_model), (/0/)) - values_stddev(1, idx_interp) = \ - dim_stddev_n_Wrap(interp_mod(idx_interp, is_model), (/0/)) - values_stddev(2, idx_interp) = \ - values_stddev(0, idx_interp) - values_stddev(1, idx_interp) - values_stddev(3, idx_interp) = \ - values_stddev(0, idx_interp) + values_stddev(1, idx_interp) - end do - - ; Stddev - res_stddev = True - copy_VarMeta(res, res_stddev) - res_stddev@gsnXYFillColors = "LightGrey" - delete(res_stddev@xyLineColors) - - ; We don't want the line, so make it transparent. - res_stddev@xyLineColor = -1 - shading_plot = gsn_csm_xy(wks, interp, values_stddev(2:3, :), res_stddev) - overlay(plot(0), shading_plot) - ; mmm - res2 = res - delete([/res2@xyLineThicknesses, res2@xyLineColors, res2@xyDashPatterns/]) - res2@xyLineThicknesses = thicks_mmm - res2@xyLineColors = colors_mmm - res2@xyDashPatterns = dashes_mmm - res2@gsnRightString = "" - mmm = gsn_csm_xy(wks, interp, values_stddev(0, :), res2) - overlay(plot(0), mmm) - end if - - if (debuginfo) then - xy_line(wks_debug, values, di@years, values_stddev, res, debuginfo) - end if - - ; Resources for a customized legend. - lgres = True - lgres@lgMonoLineThickness = False - lgres@lgLabelFontHeightF = .08 ; legend label font thickness - lgres@vpWidthF = 0.15 ; width of legend (NDC) - lgres@vpHeightF = vph ; height of legend (NDC) - lgres@lgPerimOn = True - lgres@lgPerimColor = 1 - lgres@lgMonoDashIndex = False - lgres@lgBoxBackground = 0 - lgres@lgPerimFill = 0 - lgres@lgPerimFillColor = 0 - lgres@lgBoxMajorExtentF = 1 - - if (diag_script_info@multi_model_mean .eq. "y") then - lgres@lgLineColors = array_append_record(colors, colors_mmm, 0) - lgres@lgDashIndexes = array_append_record(dashes, dashes_mmm, 0) - lgres@lgLineThicknesses = array_append_record(thicks, thicks_mmm, 0) - labels = array_append_record(annots, annots_mmm, 0) - else - lgres@lgLineColors = colors - lgres@lgDashIndexes = dashes - lgres@lgLineThicknesses = thicks - labels = annots - end if - nitems = dimsizes(labels) ; number of legend items - - ; Create legend - lbid = gsn_create_legend(wks, nitems, labels, lgres) - - ; Add legend at the correct position - ; Point (0, 0) is the dead center of the plot. Point (0, .5) is center, - ; flush bottom. Point (0.5, 0.5) is flush bottom, flush right. - amres = True - amres@amJust = "TopRight" ; reference corner of box - amres@amParallelPosF = 0.5 ; Move legend to +right, -left - amres@amOrthogonalPosF = -0.5 ; +down, -up - - ; Get plot size - getvalues plot(0) - "vpXF" : vpx - "vpYF" : vpy - "vpWidthF" : vpw - "vpHeightF" : vph - end getvalues - - bres = True - bres@gsnDraw = False - bres@gsnFrame = False - bres@tmXBOn = False - bres@tmYLOn = False - bres@tmXTOn = False - bres@tmYROn = False - bres@tmXBBorderOn = False - bres@tmXTBorderOn = False - bres@tmYLBorderOn = False - bres@tmYRBorderOn = False - - ; Create a blank plot with the same size as plot, attach legend - bres@vpXF = vpx - bres@vpYF = vpy - bres@vpWidthF = vpw + 0.15 - bres@vpHeightF = vph - blank_plot = gsn_csm_blank_plot(wks, bres) - ; Add legend to plot - annoid = gsn_add_annotation(blank_plot, lbid, amres) - - ; Create another blank plot to make sure plot border thickness is even - bres@tmXBBorderOn = True - bres@tmXTBorderOn = True - bres@tmYLBorderOn = True - bres@tmYRBorderOn = True - bres@tmBorderThicknessF = 3 - - blank_plot2 = gsn_csm_blank_plot(wks, bres) - - ; Draw first plot with the actual values (+ grid lines if - ; tmXMajorGrid/gsnYRefLine are set) - draw(plot(0)) - - ; Draw second plot with legend on top of previous plot. This - ; is, as far as I know, the only way to draw the legend on top - ; of the grid lines - draw(blank_plot) - - ; Redraw plot borders since the legend may (partially) cover some - ; of the borders drawn in the first 'plot' - draw(blank_plot2) - if (debuginfo) then - txres = True - txres@txFuncCode = "~" - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, blank_plot2) - drawNDCGrid(wks) - place_description(wks, debuginfo@description, debuginfo@description_ycoord) - end if - - draw(plot(0)) - frame(wks) - - leave_msg(scriptname, funcname) - -end diff --git a/esmvaltool/diag_scripts/shared/plot/portrait_plot.ncl b/esmvaltool/diag_scripts/shared/plot/portrait_plot.ncl index 8cef2bc72c..8001fdc5df 100644 --- a/esmvaltool/diag_scripts/shared/plot/portrait_plot.ncl +++ b/esmvaltool/diag_scripts/shared/plot/portrait_plot.ncl @@ -5,6 +5,8 @@ ; Check the header of each routine for documentation. ; ; function portrait_plot +; function circle_plot +; ; ############################################################################# load "$diag_scripts/../interface_scripts/auxiliary.ncl" @@ -26,7 +28,7 @@ function portrait_plot(wks_in[1], ; source: data to be plotted or a NetCDF filename with data. ; varname: variable name in the file. ; -; Source prototype: +; Source prototype ; source = (ndiag, nmod) ; source!0 = diagnostics ; source!1 = models @@ -37,510 +39,536 @@ function portrait_plot(wks_in[1], ; source!0 = diagnostics ; source!1 = models ; -; Return value: +; Return value ; A graphic variable. ; ; Caveats ; -; Modification history: -; 20151027-A_righ_ma: added option for displaying rankings. -; 20140605-A_righ_ma: modified with flexible plot shapes. -; 20140204-A_fran_fr: extended. -; 20140114-A_righ_ma: written. +; Modification history +; 20220609-bock_lisa: added gaps between project blocks and improved style +; 20211014-bock_lisa: added sorting by project +; 20151027-righi_mattia: added option for displaying rankings. +; 20140605-righi_mattia: modified with flexible plot shapes. +; 20140204-winterstein_franziska: extended. +; 20140114-righi_mattia: written. ; local funcname, scriptname, wks, wks_in, data, source, res, atts, \ base, varname begin - funcname = "portrait_plot" - scriptname = "diag_scripts/shared/plot/portrait_plot.ncl" - enter_msg(scriptname, funcname) - - ; Get data, either directly or via netCDF file - if(typeof(source) .eq. "string") then - data = ncdf_read(source, varname) + funcname = "portrait_plot" + scriptname = "diag_scripts/shared/plot/portrait_plot.ncl" + enter_msg(scriptname, funcname) + + ; Get data, either directly or via netCDF file + if (typeof(source) .eq. "string") then + data = ncdf_read(source, varname) + else + data = source + copy_VarMeta(source, data) + end if + + ; Use -999 as fill value (easier to display as text) + data@_FillValue = -999. + + ; Retrieve basic metadata from data + defaults = (/"default", "dummy", "dummy_for_var", "Default", "Dummy"/) + if (any(varname .eq. defaults)) then + grade = att2var(data, "var") + DIAG_SCRIPT = att2var(data, "diag_script") + else + grade = varname + end if + + ; Check if a valid wks has been provided, otherwise invoke default + wks = get_wks(wks_in, DIAG_SCRIPT, grade) + + ; Check source dimensionality + grade_dim = dimsizes(data) + ndiag = grade_dim(0) + nmod = grade_dim(1) + + if (dimsizes(grade_dim).eq.3) then + if (grade_dim(2).le.2) then + nref = grade_dim(2) else - data = source - copy_VarMeta(source, data) + log_info("Maximum 2 references supported") + log_info("Only the first 2 will be plotted") + nref = 2 + var = data(:, :, 0:1) + delete(data) + data = var + delete(var) end if - - ; Use -999 as fill value (easier to display as text) - data@_FillValue = -999. - - ; Retrieve basic metadata from data - defaults = (/"default", "dummy", "dummy_for_var", "Default", "Dummy"/) - if (any(varname .eq. defaults)) then - grade = att2var(data, "var") - DIAG_SCRIPT = att2var(data, "diag_script") + elseif (dimsizes(grade_dim).eq.2) then + nref = 1 + var = new((/ndiag, nmod, nref/), typeof(data)) + var(:, :, 0) = data + delete(data) + data = var + delete(var) + else + error_msg("f", scriptname, funcname, \ + "input variable must be eithed 2D or 3D") + end if + + ; Set levels + if (isatt(data, "label_bounds")) then + + ; User provided + chars = \ + tochar(tostring(abs(data@label_bounds(1) - \ + round(data@label_bounds(1), 1)) * 100)) + five = tochar("5") + zero = tochar("0") + if (chars(1).eq.five(0)).or.(chars(1).eq.zero(0)) then + dec_pl = 100. else - grade = varname + dec_pl = 10. end if + upperbound = tofloat(round(data@label_bounds(1) * dec_pl, 1)) / dec_pl + lowerbound = tofloat(round(data@label_bounds(0) * dec_pl, 1)) / dec_pl - ; Check if a valid wks has been provided, otherwise invoke default - wks = get_wks(wks_in, DIAG_SCRIPT, grade) + else - ; Check source dimensionality - grade_dim = dimsizes(data) - ndiag = grade_dim(0) - nmod = grade_dim(1) - - if (dimsizes(grade_dim).eq.3) then - if (grade_dim(2).le.2) then - nref = grade_dim(2) - else - log_info("Maximum 2 references supported") - log_info("Only the first 2 will be plotted") - nref = 2 - var = data(:, :, 0:1) - delete(data) - data = var - delete(var) - end if - else if (dimsizes(grade_dim).eq.2) then - nref = 1 - var = new((/ndiag, nmod, nref/), typeof(data)) - var(:, :, 0) = data - delete(data) - data = var - delete(var) - else - error_msg("f", scriptname, funcname, \ - "input variable must be eithed 2D or 3D") - end if + ; Flexible in regard to the range of the data values + bound = max(abs(data(:, :, 0))) + if (all(ismissing(data))) then + delete(bound) + bound = 0.1 + error_msg("w", scriptname, funcname, "all data values are missing!") end if - - ; Set levels - if (isatt(data, "label_bounds")) then - - ; User provided - chars = tochar(tostring( \ - abs(data@label_bounds(1) - round(data@label_bounds(1), 1)) * 100)) - five = tochar("5") - zero = tochar("0") - if (chars(1).eq.five(0)).or.(chars(1).eq.zero(0)) then - dec_pl = 100. - else - dec_pl = 10. - end if - upperbound = tofloat(round(data@label_bounds(1) * dec_pl, 1)) / dec_pl - lowerbound = tofloat(round(data@label_bounds(0) * dec_pl, 1)) / dec_pl - + upperbound = tofloat(round(bound * 10, 1)) / 10. + upperbound = where(upperbound.lt.0.1, 0.1, upperbound) + if (min(data).lt.0) then + lowerbound = -1. * upperbound else - - ; Flexible in regard to the range of the data values - bound = max(abs(data(:, :, 0))) - if all(ismissing(data)) then - delete(bound) - bound = 0.1 - error_msg("w", scriptname, funcname, "all data " + \ - "values are missing!") - end if - upperbound = tofloat(round(bound * 10, 1)) / 10. - upperbound = where(upperbound.lt.0.1, 0.1, upperbound) - if (min(data).lt.0) then - lowerbound = -1. * upperbound - else - lowerbound = 0 - end if - + lowerbound = 0 end if - ; Label scale - if (isatt(data, "label_scale")) then - scale = data@label_scale - else - if (((upperbound - lowerbound) / 0.5).le.6) then - scale = 0.1 - else - scale = 0.5 - end if - end if + end if - ; Set levels - if (isatt(data, "label_lo")) then - label_lo = data@label_lo - else - label_lo = True - end if - if (isatt(data, "label_hi")) then - label_hi = data@label_hi + ; Label scale + if (isatt(data, "label_scale")) then + scale = data@label_scale + else + if (((upperbound - lowerbound) / 0.5).le.6) then + scale = 0.1 else - label_hi = True + scale = 0.5 end if - nlevels = round((upperbound - lowerbound + scale) / scale, 3) - levels = new(nlevels, "float") - levels(0) = lowerbound - do ii = 1, nlevels - 1 - levels(ii) = levels(ii - 1) + scale + end if + + ; Set levels + if (isatt(data, "label_lo")) then + label_lo = data@label_lo + else + label_lo = True + end if + if (isatt(data, "label_hi")) then + label_hi = data@label_hi + else + label_hi = True + end if + nlevels = round((upperbound - lowerbound + scale) / scale, 3) + levels = new(nlevels, "float") + levels(0) = lowerbound + do ii = 1, nlevels - 1 + levels(ii) = levels(ii - 1) + scale + end do + levels = round(100. * levels, 0) / 100. + + ; Determine breakpoints for x-values + vals = new(nmod, float) + vals = 1 + if isatt(data, "breakpoints") then + nbreak = dimsizes(data@breakpoints) + do ibreak = 0, nbreak - 1 + vals(data@breakpoints(ibreak)) = 3. end do - levels = round(100. * levels, 0) / 100. - - ; Display metrics value on the plot - flag_values = False - if (isatt(data, "disp_values")) then - if (data@disp_values) then - flag_values = True - text_v = new((/ndiag, nmod, nref/), graphic) - end if + else + nbreak = 0 + end if + x_space = cumsum(vals, 2) - 1 + delete(vals) + + ; Display metrics value on the plot + flag_values = False + if (isatt(data, "disp_values")) then + if (data@disp_values) then + flag_values = True + text_v = new((/ndiag, nmod, nref/), graphic) end if - - ; Diplay model rankings on the plot - flag_ranks = False - if (isatt(data, "disp_rankings")) then - if (data@disp_rankings) then - flag_ranks = True - text_r = new((/ndiag, nmod, nref/), graphic) - ranks = new((/ndiag, nmod, nref/), integer) - end if + end if + + ; Diplay model rankings on the plot + flag_ranks = False + if (isatt(data, "disp_rankings")) then + if (data@disp_rankings) then + flag_ranks = True + text_r = new((/ndiag, nmod, nref/), graphic) + ranks = new((/ndiag, nmod, nref/), integer) end if - - ; Set a colormap - if (isatt(data, "colormap")) then - gsn_define_colormap(wks, data@colormap) - else - gsn_define_colormap(wks, "ncl_default") - data@colormap = "ncl_default" + end if + + ; Set a colormap + if (isatt(data, "colormap")) then + gsn_define_colormap(wks, data@colormap) + else + gsn_define_colormap(wks, "ncl_default") + data@colormap = "ncl_default" + end if + tmp = read_colormap_file(data@colormap) + if (isatt(data, "cm_interval")) then + cm_start = data@cm_interval(0) + cm_end = data@cm_interval(1) + else + cm_start = 2 + cm_size = dimsizes(tmp) + cm_end = cm_size(0) - 1 + end if + cmap = tmp(cm_start:cm_end, :) + ncmap = dimsizes(cmap(:, 0)) + delete(tmp) + + ; Sample colormap and extract colors + if (label_lo.and.label_hi) then ; 2 more colors for the triangles + pos = new((/nlevels + 1/), float) + end if + if (label_lo.xor.label_hi) then ; 1 more color for upper/lower triangle + pos = new((/nlevels/), float) + end if + if (.not.label_lo.and. .not.label_hi) ; no more colors + pos = new((/nlevels - 1/), float) + end if + pos(0) = cm_start + npos = dimsizes(pos) + do ii = 1, npos - 1 + pos(ii) = pos(ii - 1) + (ncmap - 1) / tofloat(npos - 1) + end do + delete([/cmap, ncmap/]) + + ; Reverse order if required + if (isatt(data, "cm_reverse")) then + if (data@cm_reverse) then + pos = pos(::-1) end if - tmp = read_colormap_file(data@colormap) - if (isatt(data, "cm_interval")) then - cm_start = data@cm_interval(0) - cm_end = data@cm_interval(1) + end if + + ; Default resources + res = True + res@gsnDraw = False + res@gsnFrame = False + res@tmBorderThicknessF = False + res@tmXBOn = False + res@tmXTOn = False + res@tmYLOn = False + res@tmYROn = False + res@trXMinF = 0. + res@trXMaxF = 1.0 + 2.*nbreak/nmod + res@trYMinF = 0. + res@trYMaxF = 1.0 + + ; Set plot aspect depending on ndiag and nmod: longst dimension on y-axis + xbsize = 1. / nmod + ybsize = 1. / ndiag + if (ndiag.ge.nmod) then + res@vpWidthF = 0.6 * nmod / tofloat(ndiag) + res@vpHeightF = 0.6 + else + res@vpWidthF = 0.6 + if (isatt(diag_script_info, "scale_aspect")) then + ; code for ESA CCI paper Lauer et al. (2016) + scale_aspect = diag_script_info@scale_aspect + res@vpHeightF = 0.6 * ndiag / tofloat(nmod) * scale_aspect else - cm_start = 2 - cm_size = dimsizes(tmp) - cm_end = cm_size(0) - 1 + ; standard code + res@vpHeightF = 0.6 * ndiag / tofloat(nmod) + if (2 * ndiag.le.nmod) then + res@vpHeightF = 2 * res@vpHeightF + end if end if - cmap = tmp(cm_start:cm_end, :) - ncmap = dimsizes(cmap(:, 0)) - delete(tmp) - - ; Sample colormap and extract colors - if (label_lo.and.label_hi) then ; 2 more colors for the triangles - pos = new((/nlevels + 1/), float) + end if + res@vpYF = 0.2 + res@vpHeightF + + ; Title + res@tiMainFontHeightF = 0.01 + if (isatt(data, "metric")) then + if (isatt(data, "caption")) then + res@tiMainOffsetYF = 0.01 + res@tiMainString = data@caption + else + res@tiMainString = data@metric end if - if (label_lo.xor.label_hi) then ; 1 more color for upper/lower triangle - pos = new((/nlevels/), float) + end if + + ; Override defaults with "res_" attributes of "data" + res_new = att2var(data, "res_") + copy_VarMeta(res_new, res) + + ; Draw a blank plot, polygons will be added below + plot = gsn_blank_plot(wks, res) + + ; Text resources + resTxt = True + resTxt@txFontHeightF = 0.010 + resTxt@txJust = "CenterCenter" + resTxt@txFont = "times-roman" + + ; Polygons resources + resPol = True + resPol@gsLineThicknessF = 0.5 + resPol@gsEdgesOn = True + resPol@gsLineColor = "black" + poly = new((/ndiag, nmod, nref/), graphic) + + ; Calculate rankings + if (flag_ranks) then + if (.not.isatt(data, "rank_order")) then + error_msg("f", scriptname, funcname, \ + "rank_order attribute must be specified if " + \ + "ranking display is desired") end if - if (.not.label_lo.and. .not.label_hi) ; no more colors - pos = new((/nlevels - 1/), float) + if (all(data@rank_order.ne.(/-1, 1/))) then + error_msg("f", scriptname, funcname, \ + "rank_order attribute can be either 1 (increasing) " + \ + "or -1 (decreasing)") end if - pos(0) = cm_start - npos = dimsizes(pos) - do ii = 1, npos - 1 - pos(ii) = pos(ii - 1) + (ncmap - 1) / tofloat(npos - 1) + do idiag = 0, ndiag - 1 + do iref = 0, nref - 1 + pp = dim_pqsort(data(idiag, :, iref), data@rank_order) + rk = 1 + do imod = 0, nmod - 1 + if (.not.ismissing(data(idiag, pp(imod), iref))) then + ranks(idiag, pp(imod), iref) = rk + rk = rk + 1 + end if + end do + delete(rk) + delete(pp) + end do end do - delete([/cmap, ncmap/]) - - ; Reverse order if required - if (isatt(data, "cm_reverse")) then - if (data@cm_reverse) then - pos = pos(::-1) - end if + end if + + ; Draw polygons + do idiag = 0, ndiag - 1 + + ; Set triangle or square + ; - draw a square if only 1 reference model is available or if the + ; alternative model contains only missing values + ; - draw a triangle if an alternative reference is available and + ; contains valid values + l_square = True + if (nref.gt.1) then + if (.not.all(ismissing(data(idiag, :, 1)))) then + l_square = False + end if end if - ; Default resources - res = True - res@gsnDraw = False - res@gsnFrame = False - res@tmBorderThicknessF = False - res@tmXBOn = False - res@tmXTOn = False - res@tmYLOn = False - res@tmYROn = False - res@trXMinF = 0. - res@trXMaxF = 1.0 - res@trYMinF = 0. - res@trYMaxF = 1.0 - - ; Set plot aspect depending on ndiag and nmod: longst dimension on y-axis - xbsize = 1. / nmod - ybsize = 1. / ndiag - if (ndiag.ge.nmod) then - res@vpWidthF = 0.6 * nmod / tofloat(ndiag) - res@vpHeightF = 0.6 - else - res@vpWidthF = 0.6 -; A_laue_ax+ - if (isatt(diag_script_info, "scale_aspect")) then - ; code for ESA CCI paper Lauer et al. (2016) - scale_aspect = diag_script_info@scale_aspect - res@vpHeightF = 0.6 * ndiag / tofloat(nmod) * scale_aspect - else - ; standard code -; A_laue_ax- - res@vpHeightF = 0.6 * ndiag / tofloat(nmod) - if (2 * ndiag.le.nmod) then - res@vpHeightF = 2 * res@vpHeightF - end if -; A_laue_ax+ - end if -; A_laue_ax- - end if - res@vpYF = 0.2 + res@vpHeightF + do imod = 0, nmod - 1 - ; Title - res@tiMainFontHeightF = 0.020 - if (isatt(data, "metric")) then - res@tiMainString = data@metric - end if - if (isatt(data, "region")) then - if (isatt(res, "tiMainString")) then - res@tiMainString = res@tiMainString + data@region - else - res@tiMainString = data@region + ; Find corresponding color for primary reference + curr = data(idiag, imod, 0) + if (ismissing(curr)) then + resPol@gsFillColor = 0 + else + lidx = max(ind(curr.ge.levels)) + if (label_lo) then + lidx = lidx + 1 + lidx = where(ismissing(lidx), 0, lidx) end if - end if - - ; Override defaults with "res_" attributes of "data" - res_new = att2var(data, "res_") - copy_VarMeta(res_new, res) - - ; Draw a blank plot, polygons will be added below - plot = gsn_blank_plot(wks, res) - - ; Text resources - resTxt = True - resTxt@txFontHeightF = 0.010 - resTxt@txJust = "CenterCenter" - - ; Polygons resources - resPol = True - resPol@gsLineThicknessF = 0.5 - resPol@gsEdgesOn = True - resPol@gsLineColor = "black" - poly = new((/ndiag, nmod, nref/), graphic) + if (.not.label_hi .and. lidx.gt.npos - 1) then + error_msg("f", scriptname, funcname, \ + "metric value (" + curr + ") above the " + \ + "label upper bound (" + levels(nlevels - 1) + \ + "): change label_bounds or set " + \ + "label_hi = True") + end if + if (ismissing(lidx)) then + error_msg("f", scriptname, funcname, \ + "metric value (" + curr + ") below the " + \ + "label lower bound (" + levels(0) + \ + "): change label_bounds or set " + \ + "label_lo = True") + end if + resPol@gsFillColor = round(pos(lidx), 3) + end if + + if (l_square) then + xx = (/x_space(imod) * xbsize, (x_space(imod) + 1) * xbsize, \ + (x_space(imod) + 1) * xbsize, x_space(imod) * xbsize, \ + x_space(imod) * xbsize/) + yy = (/idiag * ybsize, idiag * ybsize, (idiag + 1) * ybsize, \ + (idiag + 1) * ybsize, idiag * ybsize/) + poly(idiag, imod, 0) = gsn_add_polygon(wks, plot, xx, yy, resPol) + if (flag_values) then + xt = (xx(0) + xx(1)) / 2. + yt = (yy(1) + yy(2)) / 2 + text_v(idiag, imod, 0) = \ + gsn_add_text(wks, plot, sprintf("%4.2f", data(idiag, imod, 0)), \ + xt, yt, resTxt) + delete(xt) + delete(yt) + end if + if (flag_ranks.and..not.ismissing(data(idiag, imod, 0))) then + xt = (xx(0) + xx(1)) / 2. + yt = (yy(1) + yy(2)) / 2. + text_r(idiag, imod, 0) = \ + gsn_add_text(wks, plot, "#" + ranks(idiag, imod, 0), \ + xt, yt, resTxt) + delete(xt) + delete(yt) + end if + delete(xx) + delete(yy) + + else + + ; Primary reference => lower triangle + xx = (/x_space(imod) * xbsize, (x_space(imod) + 1) * xbsize, \ + (x_space(imod) + 1) * xbsize, x_space(imod) * xbsize/) + yy = (/idiag * ybsize, idiag * ybsize, (idiag + 1) * ybsize, \ + idiag * ybsize/) + poly(idiag, imod, 0) = gsn_add_polygon(wks, plot, xx, yy, resPol) + if (flag_values) then + xt = sum(xx(0:2)) / 3. + yt = sum(yy(0:2)) / 3. + text_v(idiag, imod, 0) = \ + gsn_add_text(wks, plot, sprintf("%4.2f", data(idiag, imod, 0)), \ + xt, yt, resTxt) + delete(xt) + delete(yt) + end if + if (flag_ranks.and..not.ismissing(data(idiag, imod, 0))) then + xt = sum(xx(0:2)) / 3. + yt = sum(yy(0:2)) / 3. + text_r(idiag, imod, 0) = \ + gsn_add_text(wks, plot, "#" + ranks(idiag, imod, 0), \ + xt, yt, resTxt) + delete(xt) + delete(yt) + end if + delete(xx) + delete(yy) - ; Calculate rankings - if (flag_ranks) then - if (.not.isatt(data, "rank_order")) then + ; Find corresponding color for alternative reference + curr = data(idiag, imod, 1) + if (ismissing(curr)) then + resPol@gsFillColor = 0 + else + lidx = max(ind(curr.ge.levels)) + if (label_lo) then + lidx = lidx + 1 + lidx = where(ismissing(lidx), 0, lidx) + end if + if (.not.label_hi .and. lidx.gt.npos - 1) then error_msg("f", scriptname, funcname, \ - "rank_order attribute must be specified if " + \ - "ranking display is desired") - end if - if (all(data@rank_order.ne.(/-1, 1/))) then + "metric value (" + curr + ") above the " + \ + "label upper bound (" + \ + levels(nlevels - 1) + \ + "): change label_bounds or set " + \ + "label_hi = True") + end if + if (ismissing(lidx)) then error_msg("f", scriptname, funcname, \ - "rank_order attribute can be either 1 (increasing) " + \ - "or -1 (decreasing)") + "metric value (" + curr + ") below the " + \ + "label lower bound (" + levels(0) + \ + "): change label_bounds or set " + \ + "label_lo = True") + end if + resPol@gsFillColor = round(pos(lidx), 3) end if - do idiag = 0, ndiag - 1 - do iref = 0, nref - 1 - pp = dim_pqsort(data(idiag, :, iref), data@rank_order) - rk = 1 - do imod = 0, nmod - 1 - if (.not.ismissing(data(idiag, pp(imod), iref))) then - ranks(idiag, pp(imod), iref) = rk - rk = rk + 1 - end if - end do - delete(rk) - delete(pp) - end do - end do - end if - ; Draw polygons - do idiag = 0, ndiag - 1 - - ; Set triangle or square - ; - draw a square if only 1 reference model is available or if the - ; alternative model contains only missing values - ; - draw a triangle if an alternative reference is available and - ; contains valid values - l_square = True - if (nref.gt.1) then - if (.not.all(ismissing(data(idiag, :, 1)))) then - l_square = False - end if + ; Alternative reference => upper triangle + xx = (/x_space(imod) * xbsize, (x_space(imod) + 1) * xbsize, \ + x_space(imod) * xbsize, x_space(imod) * xbsize/) + yy = (/idiag * ybsize, (idiag + 1) * ybsize, \ + (idiag + 1) * ybsize, idiag * ybsize/) + poly(idiag, imod, 1) = gsn_add_polygon(wks, plot, xx, yy, resPol) + if (flag_values) then + xt = sum(xx(0:2)) / 3. + yt = sum(yy(0:2)) / 3. + text_v(idiag, imod, 1) = \ + gsn_add_text(wks, plot, sprintf("%4.2f", data(idiag, imod, 1)), \ + xt, yt, resTxt) + delete(xt) + delete(yt) end if + if (flag_ranks.and..not.ismissing(data(idiag, imod, 1))) then + xt = sum(xx(0:2)) / 3. + yt = sum(yy(0:2)) / 3. + text_r(idiag, imod, 1) = \ + gsn_add_text(wks, plot, "#" + ranks(idiag, imod, 1), \ + xt, yt, resTxt) + delete(xt) + delete(yt) + end if + delete(xx) + delete(yy) - do imod = 0, nmod - 1 - - ; Find corresponding color for primary reference - curr = data(idiag, imod, 0) - if (ismissing(curr)) then - resPol@gsFillColor = 0 - else - lidx = max(ind(curr.ge.levels)) - if (label_lo) then - lidx = lidx + 1 - lidx = where(ismissing(lidx), 0, lidx) - end if - if (.not.label_hi .and. lidx.gt.npos - 1) then - error_msg("f", scriptname, funcname, \ - "metric value (" + curr + ") above the " + \ - "label upper bound (" + levels(nlevels - 1) + \ - "): change label_bounds or set " + \ - "label_hi = True") - end if - if (ismissing(lidx)) then - error_msg("f", scriptname, funcname, \ - "metric value (" + curr + ") below the " + \ - "label lower bound (" + levels(0) + \ - "): change label_bounds or set " + \ - "label_lo = True") - end if - resPol@gsFillColor = round(pos(lidx), 3) - end if - - if (l_square) then - xx = (/imod * xbsize, (imod + 1) * xbsize, \ - (imod + 1) * xbsize, imod * xbsize, imod * xbsize/) - yy = (/idiag * ybsize, idiag * ybsize, (idiag + 1) * ybsize, \ - (idiag + 1) * ybsize, idiag * ybsize/) - poly(idiag, imod, 0) = \ - gsn_add_polygon(wks, plot, xx, yy, resPol) - if (flag_values) then - xt = (xx(0) + xx(1)) / 2. - yt = (yy(1) + yy(2)) / 2 - text_v(idiag, imod, 0) = \ - gsn_add_text(wks, plot, \ - sprintf("%4.2f", data(idiag, imod, 0)), \ - xt, yt, resTxt) - delete(xt) - delete(yt) - end if - if (flag_ranks.and..not.ismissing(data(idiag, imod, 0))) then - xt = (xx(0) + xx(1)) / 2. - yt = (yy(1) + yy(2)) / 2. - text_r(idiag, imod, 0) = \ - gsn_add_text(wks, plot, "#" + ranks(idiag, imod, 0), \ - xt, yt, resTxt) - delete(xt) - delete(yt) - end if - delete(xx) - delete(yy) - - else - - ; Primary reference => lower triangle - xx = (/imod * xbsize, (imod + 1) * xbsize, \ - (imod + 1) * xbsize, imod * xbsize/) - yy = (/idiag * ybsize, idiag * ybsize, (idiag + 1) * ybsize, \ - idiag * ybsize/) - poly(idiag, imod, 0) = \ - gsn_add_polygon(wks, plot, xx, yy, resPol) - if (flag_values) then - xt = sum(xx(0:2)) / 3. - yt = sum(yy(0:2)) / 3. - text_v(idiag, imod, 0) = \ - gsn_add_text(wks, plot, \ - sprintf("%4.2f", data(idiag, imod, 0)), \ - xt, yt, resTxt) - delete(xt) - delete(yt) - end if - if (flag_ranks.and..not.ismissing(data(idiag, imod, 0))) then - xt = sum(xx(0:2)) / 3. - yt = sum(yy(0:2)) / 3. - text_r(idiag, imod, 0) = \ - gsn_add_text(wks, plot, "#" + ranks(idiag, imod, 0), \ - xt, yt, resTxt) - delete(xt) - delete(yt) - end if - delete(xx) - delete(yy) - - ; Find corresponding color for alternative reference - curr = data(idiag, imod, 1) - if (ismissing(curr)) then - resPol@gsFillColor = 0 - else - lidx = max(ind(curr.ge.levels)) - if (label_lo) then - lidx = lidx + 1 - lidx = where(ismissing(lidx), 0, lidx) - end if - if (.not.label_hi .and. lidx.gt.npos - 1) then - error_msg("f", scriptname, funcname, \ - "metric value (" + curr + ") above the " + \ - "label upper bound (" + \ - levels(nlevels - 1) + \ - "): change label_bounds or set " + \ - "label_hi = True") - end if - if (ismissing(lidx)) then - error_msg("f", scriptname, funcname, \ - "metric value (" + curr + ") below the " + \ - "label lower bound (" + levels(0) + \ - "): change label_bounds or set " + \ - "label_lo = True") - end if - resPol@gsFillColor = round(pos(lidx), 3) - end if - - ; Alternative reference => upper triangle - xx = (/imod * xbsize, (imod + 1) * xbsize, \ - imod * xbsize, imod * xbsize/) - yy = (/idiag * ybsize, (idiag + 1) * ybsize, \ - (idiag + 1) * ybsize, idiag * ybsize/) - poly(idiag, imod, 1) = \ - gsn_add_polygon(wks, plot, xx, yy, resPol) - if (flag_values) then - xt = sum(xx(0:2)) / 3. - yt = sum(yy(0:2)) / 3. - text_v(idiag, imod, 1) = \ - gsn_add_text(wks, plot, \ - sprintf("%4.2f", data(idiag, imod, 1)), \ - xt, yt, resTxt) - delete(xt) - delete(yt) - end if - if (flag_ranks.and..not.ismissing(data(idiag, imod, 1))) then - xt = sum(xx(0:2)) / 3. - yt = sum(yy(0:2)) / 3. - text_r(idiag, imod, 1) = \ - gsn_add_text(wks, plot, "#" + ranks(idiag, imod, 1), \ - xt, yt, resTxt) - delete(xt) - delete(yt) - end if - delete(xx) - delete(yy) - - end if - delete(resPol@gsFillColor) - end do + end if + delete(resPol@gsFillColor) end do - - res@poly = poly - if (isdefined("text_v")) then - res@text_v = text_v - end if - if (isdefined("text_r")) then - res@text_v = text_r - end if - - ; Add legend text for the diagnostics - if(isatt(diag_script_info, "scale_font")) then - scale_font = diag_script_info@scale_font + end do + + res@poly = poly + if (isdefined("text_v")) then + res@text_v = text_v + end if + if (isdefined("text_r")) then + res@text_v = text_r + end if + + ; Add legend text for the diagnostics + if(isatt(diag_script_info, "scale_font")) then + scale_font = diag_script_info@scale_font + else + scale_font = 1. + end if + resTxt@txFontHeightF = min((/0.012 * scale_font, \ + 0.57 * scale_font / max((/ndiag, (nmod)/))/)) + resTxt@txJust = "CenterRight" + step = res@vpHeightF / ndiag + xpos = 0.195 + ypos = 0.2 + step / 2. + do idiag = 0, ndiag - 1 + if (isatt(data, "diag_label")) then + gsn_text_ndc(wks, data@diag_label(idiag), xpos, ypos, resTxt) else - scale_font = 1. + gsn_text_ndc(wks, data&diagnostics(idiag), xpos, ypos, resTxt) end if - resTxt@txFontHeightF = min((/0.012 * scale_font, \ - 0.57 * scale_font / max((/ndiag, nmod/))/)) - resTxt@txJust = "CenterRight" - step = res@vpHeightF / ndiag - xpos = 0.195 - ypos = 0.2 + step / 2. - do idiag = 0, ndiag - 1 - gsn_text_ndc(wks, data&diagnostics(idiag), xpos, ypos, resTxt) - ypos = ypos + step - end do - - ; Add text for the models - resTxt@txAngleF = 90. - step = res@vpWidthF / nmod - resTxt@txJust = "CenterRight" - xpos = 0.2 + step / 2. - ypos = 0.19 - do imod = 0, nmod - 1 - gsn_text_ndc(wks, data&models(imod), xpos, ypos, resTxt) - xpos = xpos + step + ypos = ypos + step + end do + + ; Add text for the models + resTxt@txAngleF = 45. + resTxt@txFontHeightF = min((/0.12 * scale_font, \ + 0.37 * scale_font / max((/ndiag, (nmod)/))/)) + step = res@vpWidthF / nmod + resTxt@txJust = "CenterRight" + xposs = 0.2 + (x_space+.5)*(res@vpWidthF/(nmod+2.*nbreak)) + yposs = 0.19 + do imod = 0, nmod - 1 + gsn_text_ndc(wks, data&models(imod), xposs(imod), yposs, resTxt) + end do + + ; Add subtitles + if (isatt(diag_script_info, "project_order")) then + titles = diag_script_info@project_order + resTxt@txAngleF = 0. + resTxt@txFontHeightF = 0.008 + resTxt@txJust = "CenterLeft" + ypos = ypos + 0.005 + gsn_text_ndc(wks, titles(0), xposs(0), ypos, resTxt) + do ititle = 1, dimsizes(titles) - 1 + gsn_text_ndc(wks, titles(ititle), xposs(data@breakpoints(ititle-1)+1), \ + ypos, resTxt) end do + end if - ; Draw a labelbar + ; Draw a labelbar + if (.not.isatt(data, "no_labelbar") .or. \ + (isatt(data, "no_labelbar") .and. data@no_labelbar .eq. False)) then resTxt@txFontHeightF = 0.015 * scale_font resTxt@txAngleF = 0. resTxt@txJust = "CenterLeft" @@ -552,54 +580,278 @@ begin il = 0 do ii = 0, npos - 1 - resPol@gsFillColor = pos(ii) - if (ii.eq.0) then - if (label_lo) then - gsn_polygon_ndc(wks, (/xpm, xp1, xp2, xpm/),\ - (/ypos, ypos + height, \ - ypos + height, ypos/), resPol) - else - gsn_polygon_ndc(wks, (/xp1, xp1, xp2, xp2, xp1/),\ - (/ypos, ypos + height, \ - ypos + height, ypos, ypos/), resPol) - gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ - xp2 + 0.01, ypos, resTxt) - il = il + 1 - end if - else if (ii.eq.(npos - 1)) then - if (label_hi) then - gsn_polygon_ndc(wks, (/xp1, xpm, xp2, xp1/), \ - (/ypos, ypos + height, ypos, ypos/), resPol) - gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ - xp2 + 0.01, ypos, resTxt) - il = il + 1 - else - gsn_polygon_ndc(wks, (/xp1, xp1, xp2, xp2, xp1/),\ - (/ypos, ypos + height, \ - ypos + height, ypos, ypos/), resPol) - gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ - xp2 + 0.01, ypos, resTxt) - il = il + 1 - gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ - xp2 + 0.01, ypos + height, resTxt) - il = il + 1 - end if + resPol@gsFillColor = pos(ii) + if (ii.eq.0) then + if (label_lo) then + gsn_polygon_ndc(wks, (/xpm, xp1, xp2, xpm/),\ + (/ypos, ypos + height, \ + ypos + height, ypos/), resPol) else - gsn_polygon_ndc(wks, (/xp1, xp1, xp2, xp2, xp1/), \ - (/ypos, ypos + height, \ - ypos + height, ypos, ypos/), \ - resPol) - gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ - xp2 + 0.01, ypos, resTxt) - il = il + 1 + gsn_polygon_ndc(wks, (/xp1, xp1, xp2, xp2, xp1/),\ + (/ypos, ypos + height, \ + ypos + height, ypos, ypos/), resPol) + gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ + xp2 + 0.01, ypos, resTxt) + il = il + 1 end if + elseif (ii.eq.(npos - 1)) then + if (label_hi) then + gsn_polygon_ndc(wks, (/xp1, xpm, xp2, xp1/), \ + (/ypos, ypos + height, ypos, ypos/), resPol) + gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ + xp2 + 0.01, ypos, resTxt) + il = il + 1 + else + gsn_polygon_ndc(wks, (/xp1, xp1, xp2, xp2, xp1/),\ + (/ypos, ypos + height, \ + ypos + height, ypos, ypos/), resPol) + gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ + xp2 + 0.01, ypos, resTxt) + il = il + 1 + gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ + xp2 + 0.01, ypos + height, resTxt) + il = il + 1 end if - ypos = ypos + height + else + gsn_polygon_ndc(wks, (/xp1, xp1, xp2, xp2, xp1/), \ + (/ypos, ypos + height, ypos + height, ypos, ypos/), \ + resPol) + gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ + xp2 + 0.01, ypos, resTxt) + il = il + 1 + end if + ypos = ypos + height end do - draw(plot) - frame(wks) + end if + + draw(plot) + frame(wks) + + leave_msg(scriptname, funcname) + return(plot) + +end + +; ############################################################################# + +undef("circle_plot") +function circle_plot(wks_in[1], + source, + varname[1]: string, + ensname[1]: string) +; +; Arguments +; wks_in: workstations (graphic object or default will be used). +; source: data to be plotted or a NetCDF filename with data. +; varname: variable name in the file. +; +; Source prototype +; source = (nmod, 2) +; source!0 = models +; source!1 = statistic +; source(:, 0) = SMPI +; source(:, 1) = 95% conf radius +; +; Return value +; A graphic variable. +; +; Caveats +; +; Modification history +; 20170424-gier_bettina: written. +; +local funcname, scriptname, verbosity, wks, wks_in, data, source, res, atts, \ + base, varname +begin - leave_msg(scriptname, funcname) - return(plot) + funcname = "circle_plot" + scriptname = "plot_scripts/ncl/portrait_plot.ncl" + enter_msg(scriptname, funcname) + + ; Get data, either directly or via netCDF file + if (typeof(source) .eq. "string") then + data = ncdf_read(source, varname) + else + data = source + copy_VarMeta(source, data) + end if + + ; Use -999 as fill value (easier to display as text) + data@_FillValue = -999. + + ; Retrieve basic metadata from data + defaults = (/"default", "dummy", "dummy_for_var", "Default", "Dummy"/) + if (any(varname.eq.defaults)) then + performance_index = att2var(data, "var") + DIAG_SCRIPT = att2var(data, "diag_script") + else + performance_index = varname + end if + + ; Check if a valid wks has been provided, otherwise invoke default + wks = get_wks(wks_in, DIAG_SCRIPT, performance_index) + + ; Make list of markers for different projects + symbols_1 = array_append_record(inttochar(ispan(97, 122, 1)), \ + inttochar(ispan(65, 90, 1)), 0) + + ; Greek alphabet + symbols_2 = new(dimsizes(symbols_1), string) + do ii = 0, dimsizes(symbols_1) - 1 + symbols_2(ii) = "~F5~" + symbols_1(ii) + "~F~" + end do + + n_projects = 1 + x_min = min((/0.38, min(data(:, 0) - data(:, 1))-0.05/)) + x_max = max(data(:, 0) + data(:, 1)) + 0.25 + y_max = n_projects + 2 + y_min = n_projects - 2 + + ; Default resources + res = True + res@xyXStyle = "Log" + res@gsnDraw = False + res@gsnFrame = False + res@tmBorderThicknessF = False + res@tmXBBorderOn = False + res@tmXTBorderOn = False + res@tmYLBorderOn = False + res@tmYRBorderOn = False + res@tmXBOn = False + res@tmXTOn = False + res@tmYLOn = False + res@tmYROn = False + res@trXMinF = x_min + res@trXMaxF = x_max + res@trYMinF = y_min + res@trYMaxF = y_max + res@vpXF = 0.1 + res@vpWidthF = 0.9 + res@vpYF = 1.0 + res@vpHeightF = 0.4 + res@tiXAxisString = "I~S~2" + res@tiXAxisOffsetYF = 0.1 + + res_lines = True + res_lines@gsLineDashPattern = 0. + res_lines@gsLineThicknessF = 2. + res_lines@tfPolyDrawOrder = "PreDraw" + + res_circles = True + res_circles@gsEdgesOn = True + res_circles@gsEdgeThicknessF = 1.5 + + res_text = True + res_text@txFontHeightF = 0.02 + + res_marker = True + res_marker@gsMarkerSizeF = 0.02 + res_marker@gsMarkerIndex = 1 + res_marker@gsMarkerColor = "black" + res_marker@tfPolyDrawOrder = "Draw" + res_marker@txFontHeightF = 0.015 + res_marker@txFontThicknessF = 2 + + plot = gsn_csm_xy(wks, (/x_min, x_max/), (/1., 1./), res) + + ; Draw baseline + plot@$unique_string("dum")$ = \ + gsn_add_polyline(wks, plot, (/x_min, x_max/), (/1., 1./), res_lines) + + ; Draw ticks on baseline + x_ticks = 0.1 * ispan(toint(x_min * 10), toint(x_max * 10), 1) + x_big_ticks = 0.1 * ispan(0, toint(x_max * 10), 5) + x_numbers = ispan(toint(x_min) + 1, toint(x_max), 1) + + do ticks = 0, dimsizes(x_ticks) - 1 + plot@$unique_string("dum")$ = \ + gsn_add_polyline(wks, plot, (/x_ticks(ticks), x_ticks(ticks)/), \ + (/0.95, 1.05/), res_lines) + end do + + do ticks = 0, dimsizes(x_big_ticks) - 1 + plot@$unique_string("dum")$ = \ + gsn_add_polyline(wks, plot, \ + (/x_big_ticks(ticks), x_big_ticks(ticks)/), \ + (/0.85, 1.15/), res_lines) + end do + + do ticks = 0, dimsizes(x_numbers)-1 + plot@$unique_string("dum")$ = \ + gsn_add_text(wks, plot, tostring(x_numbers(ticks)), \ + x_numbers(ticks), 0.2, res_text) + end do + + ; Add models as circles, transform x-coords into ndc space + ; to keep circles in log + n = 50 ; nr of points to draw circle with + do imod = 0, dimsizes(data&models) - 1 + x_in = (/data(imod, 0), data(imod, 0) + data(imod, 1), data(imod, 0)/) + y_in = (/1., 1., 1.26/) + x_ndc = new(3, float) + y_ndc = new(3, float) + datatondc(plot, x_in, y_in, x_ndc, y_ndc) + mod_r = x_ndc(1) - x_ndc(0) + x_1 = fspan(-mod_r, mod_r, n) + y_1 = sqrt(mod_r ^ 2 - x_1 ^ 2) + x = array_append_record(x_1, x_1(::-1), 0) + y = array_append_record(y_1, -y_1(::-1), 0) + x = x + x_ndc(0) + y = y + y_ndc(0) + if (data&models(imod).eq."MultiModelMean") then + res_circles@gsFillColor = "black" + res_circles@gsFillOpacityF = 0.8 + else + res_circles@gsFillColor = "orange" + res_circles@gsFillOpacityF = 0.3 + end if + gsn_polygon_ndc(wks, x, y, res_circles) + gsn_polymarker_ndc(wks, x_ndc(0), y_ndc(0), res_marker) + delete([/mod_r, x_1, y_1, x, y, x_ndc, y_ndc, x_in, y_in/]) + end do + + ; Plot average I^2 and Project name (Multi Model Mean at the end) + x_in = (/1., 1./) ; arbitrary value + y_in = (/1., 1./) + x_ndc = new(2, float) + y_ndc = new(2, float) + datatondc(plot, x_in, y_in, x_ndc, y_ndc) + res_text@txJust = "CenterLeft" + gsn_text_ndc(wks, ensname, 0.01, y_ndc(0), res_text) + delete([/x_in, y_in, x_ndc, y_ndc/]) + + ; Text symbols after all circles are drawn, in case they fall inside + x_in = data(:, 0) + y_in = new(dimsizes(data(:, 0)), float) + y_in(:) = max(data(:, 1)) + 1.5 + x_ndc = new(dimsizes(data(:, 0)), float) + y_ndc = new(dimsizes(data(:, 0)), float) + datatondc(plot, x_in, y_in, x_ndc, y_ndc) + + do imod = 0, dimsizes(data&models) - 1 + gsn_text_ndc(wks, symbols_2(imod), x_ndc(imod), y_ndc(imod), res_marker) + end do + delete([/x_in, y_in, x_ndc, y_ndc/]) + + ; Draw legend + txres = True + txres@txFontHeightF = 0.015 + txres@txJust = "CenterLeft" + ypos = fspan(0.55, 0.1, 20) + half = toint(dimsizes(data&models) / 2) + do imod = 0, half + gsn_text_ndc(wks, symbols_2(imod) + ": " + \ + data&models(imod), 0.01, ypos(imod), txres) + end do + do imod = half + 1, dimsizes(data&models) - 1 + gsn_text_ndc(wks, symbols_2(imod) + ": " + \ + data&models(imod), 0.3, ypos(imod - half - 1), txres) + end do + + draw(plot) + frame(wks) + + leave_msg(scriptname, funcname) + return(plot) end diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/categorical_fig5.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/categorical_fig5.rgb new file mode 100644 index 0000000000..7bc87ea620 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/categorical_fig5.rgb @@ -0,0 +1,8 @@ +255 255 255 +0 0 0 +139 26 26 +139 105 20 +39 64 139 +0 255 255 +205 150 205 +130 130 130 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/cmip_line.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/cmip_line.rgb new file mode 100644 index 0000000000..d54cc3c672 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/cmip_line.rgb @@ -0,0 +1,6 @@ +ncolors=3 + +#R G B +37 81 204 +204 35 35 +30 150 132 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/cmip_shading.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/cmip_shading.rgb new file mode 100644 index 0000000000..ee8360e363 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/cmip_shading.rgb @@ -0,0 +1,6 @@ +ncolors=3 + +#R G B +153 204 255 +255 153 153 +153 255 204 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_multiple_hue_non-diverging_03.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_multiple_hue_non-diverging_03.rgb new file mode 100644 index 0000000000..a1e0a1b219 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_multiple_hue_non-diverging_03.rgb @@ -0,0 +1,6 @@ +ncolors=3 + +#R G B +237 248 177 +127 205 187 +44 127 184 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_multiple_hue_non-diverging_04.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_multiple_hue_non-diverging_04.rgb new file mode 100644 index 0000000000..e2d3d4dd22 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_multiple_hue_non-diverging_04.rgb @@ -0,0 +1,7 @@ +ncolors=4 + +#R G B +255 255 204 +161 218 180 +65 182 196 +34 94 168 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_multiple_hue_non-diverging_05.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_multiple_hue_non-diverging_05.rgb new file mode 100644 index 0000000000..868f792eb5 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_multiple_hue_non-diverging_05.rgb @@ -0,0 +1,8 @@ +ncolors=5 + +#R G B +255 255 204 +161 218 180 +65 182 196 +44 127 184 +37 52 148 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_non-diverging_03.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_non-diverging_03.rgb new file mode 100644 index 0000000000..1c67e7a9ab --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_non-diverging_03.rgb @@ -0,0 +1,6 @@ +ncolors=3 + +#R G B +222 253 247 +158 202 225 +49 130 189 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_non-diverging_04.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_non-diverging_04.rgb new file mode 100644 index 0000000000..3e316e6ae4 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_non-diverging_04.rgb @@ -0,0 +1,7 @@ +ncolors=4 + +#R G B +239 243 255 +189 215 231 +107 174 214 +33 113 181 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_non-diverging_05.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_non-diverging_05.rgb new file mode 100644 index 0000000000..9eda812237 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_blue_non-diverging_05.rgb @@ -0,0 +1,8 @@ +ncolors=5 + +#R G B +239 243 255 +189 215 231 +107 174 214 +49 130 189 +8 81 156 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_cmip_line.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_cmip_line.rgb new file mode 100644 index 0000000000..b3221f2e80 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_cmip_line.rgb @@ -0,0 +1,6 @@ +ncolors=3 + +#R G B +30 150 132 +37 81 204 +204 35 35 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_cmip_shading.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_cmip_shading.rgb new file mode 100644 index 0000000000..34a2b21b6d --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_cmip_shading.rgb @@ -0,0 +1,6 @@ +ncolors=3 + +#R G B +153 255 204 +153 204 255 +255 153 153 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_damip_line.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_damip_line.rgb new file mode 100644 index 0000000000..9198e1e4a7 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_damip_line.rgb @@ -0,0 +1,7 @@ +ncolors=4 + +#R G B +196 121 0 +0 79 0 +128 128 128 +84 146 205 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_damip_shading.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_damip_shading.rgb new file mode 100644 index 0000000000..62d30f98ad --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_damip_shading.rgb @@ -0,0 +1,7 @@ +ncolors=4 + +#R G B +223 194 125 +169 208 142 +128 128 128 +146 197 222 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_multiple_hue_non-diverging_03.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_multiple_hue_non-diverging_03.rgb new file mode 100644 index 0000000000..57e429b8e0 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_multiple_hue_non-diverging_03.rgb @@ -0,0 +1,6 @@ +ncolors=3 + +#R G B +229 245 224 +161 217 155 +49 163 84 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_multiple_hue_non-diverging_04.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_multiple_hue_non-diverging_04.rgb new file mode 100644 index 0000000000..b39169004e --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_multiple_hue_non-diverging_04.rgb @@ -0,0 +1,7 @@ +ncolors=4 + +#R G B +237 248 233 +186 228 179 +116 196 118 +35 139 69 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_multiple_hue_non-diverging_05.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_multiple_hue_non-diverging_05.rgb new file mode 100644 index 0000000000..7d6fb308b5 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_multiple_hue_non-diverging_05.rgb @@ -0,0 +1,8 @@ +ncolors=5 + +#R G B +237 248 233 +186 228 179 +116 196 118 +49 163 84 +0 109 44 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_non-diverging_03.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_non-diverging_03.rgb new file mode 100644 index 0000000000..57e429b8e0 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_non-diverging_03.rgb @@ -0,0 +1,6 @@ +ncolors=3 + +#R G B +229 245 224 +161 217 155 +49 163 84 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_non-diverging_04.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_non-diverging_04.rgb new file mode 100644 index 0000000000..b39169004e --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_non-diverging_04.rgb @@ -0,0 +1,7 @@ +ncolors=4 + +#R G B +237 248 233 +186 228 179 +116 196 118 +35 139 69 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_non-diverging_05.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_non-diverging_05.rgb new file mode 100644 index 0000000000..7d6fb308b5 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_green_non-diverging_05.rgb @@ -0,0 +1,8 @@ +ncolors=5 + +#R G B +237 248 233 +186 228 179 +116 196 118 +49 163 84 +0 109 44 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_01.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_01.rgb new file mode 100644 index 0000000000..39eda0da54 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_01.rgb @@ -0,0 +1,4 @@ +ncolors=1 + +#R G B +0 0 0 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_02.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_02.rgb new file mode 100644 index 0000000000..4ab63d21e7 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_02.rgb @@ -0,0 +1,5 @@ +ncolors=2 + +#R G B +0 0 0 +84 146 205 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_03.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_03.rgb new file mode 100644 index 0000000000..3bed0139fe --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_03.rgb @@ -0,0 +1,7 @@ +ncolors=3 + +#R G B +0 0 0 +84 146 205 +196 121 0 + diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_04.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_04.rgb new file mode 100644 index 0000000000..0008a53aa8 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_04.rgb @@ -0,0 +1,8 @@ +ncolors=4 + +#R G B +0 0 0 +84 146 205 +196 121 0 +128 128 128 + diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_05.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_05.rgb new file mode 100644 index 0000000000..f5196b1058 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_05.rgb @@ -0,0 +1,8 @@ +ncolors=5 + +#R G B +0 0 0 +84 146 205 +196 121 0 +128 128 128 +0 52 102 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_06.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_06.rgb new file mode 100644 index 0000000000..920e22ee31 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_06.rgb @@ -0,0 +1,9 @@ +ncolors=6 + +#R G B +0 0 0 +84 146 205 +196 121 0 +128 128 128 +0 52 102 +0 79 0 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_shading.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_shading.rgb new file mode 100644 index 0000000000..54d44b6ef7 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_line_shading.rgb @@ -0,0 +1,9 @@ +ncolors=6 + +#R G B +128 128 128 +146 197 222 +223 194 125 +191 191 191 +67 147 195 +169 208 142 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_misc_div.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_misc_div.rgb new file mode 100644 index 0000000000..2d0f01716c --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_misc_div.rgb @@ -0,0 +1,259 @@ +ncolors=256 + +#R G B +8 29 88 +9 30 91 +11 31 95 +13 33 99 +15 34 103 +17 36 106 +18 37 110 +20 39 114 +22 40 118 +24 42 121 +26 43 125 +28 44 129 +29 46 133 +31 47 137 +33 49 140 +35 50 144 +36 52 148 +36 54 149 +36 57 150 +36 60 151 +36 62 153 +36 65 154 +35 68 155 +35 70 156 +35 73 158 +35 76 159 +35 78 160 +34 81 161 +34 83 163 +34 86 164 +34 89 165 +34 91 166 +33 94 168 +33 97 169 +33 101 171 +32 104 172 +32 107 174 +32 110 175 +32 113 177 +31 117 178 +31 120 180 +31 123 181 +30 126 183 +30 129 184 +30 133 186 +29 136 187 +29 139 189 +29 142 190 +29 145 192 +31 147 192 +34 150 192 +36 152 192 +38 154 193 +40 157 193 +43 159 193 +45 161 193 +47 164 194 +50 166 194 +52 168 194 +54 171 194 +56 173 195 +59 175 195 +61 178 195 +63 180 195 +66 182 195 +70 183 195 +74 185 194 +78 186 194 +82 188 193 +85 189 192 +89 191 192 +93 192 191 +97 194 191 +101 195 190 +105 197 190 +109 198 189 +113 199 188 +117 201 188 +121 202 187 +124 204 187 +129 205 186 +133 207 186 +138 209 185 +142 211 185 +147 212 185 +151 214 184 +156 216 184 +160 218 183 +165 219 183 +169 221 182 +174 223 182 +178 225 181 +183 226 181 +187 228 181 +192 230 180 +197 232 180 +200 233 179 +202 234 179 +205 235 179 +207 236 179 +209 237 179 +212 238 178 +214 239 178 +217 240 178 +219 241 178 +221 242 178 +224 242 178 +226 243 177 +229 244 177 +231 245 177 +233 246 177 +236 247 177 +237 248 178 +238 248 181 +240 249 183 +241 249 186 +242 250 188 +243 250 191 +244 250 193 +245 251 196 +246 251 198 +247 252 201 +249 252 203 +250 253 206 +251 253 208 +252 254 211 +253 254 213 +254 254 216 +255 254 203 +255 253 200 +255 252 197 +255 251 195 +255 250 192 +255 249 189 +255 247 186 +255 246 183 +255 245 181 +255 244 178 +255 243 175 +255 242 172 +255 241 170 +255 240 167 +255 238 164 +255 237 161 +254 236 159 +254 235 156 +254 234 153 +254 232 151 +254 231 148 +254 230 145 +254 229 143 +254 227 140 +254 226 137 +254 225 135 +254 223 132 +254 222 130 +254 221 127 +254 220 124 +254 218 122 +254 217 119 +254 215 116 +254 213 114 +254 211 111 +254 208 108 +254 206 106 +254 203 103 +254 201 101 +254 198 98 +254 196 95 +254 193 93 +254 191 90 +254 188 87 +254 186 85 +254 184 82 +254 181 79 +254 179 77 +253 176 75 +253 174 74 +253 172 73 +253 169 72 +253 167 71 +253 165 70 +253 162 69 +253 160 68 +253 158 67 +253 155 66 +253 153 65 +253 151 64 +253 148 63 +253 146 62 +253 144 61 +253 141 60 +252 138 59 +252 134 58 +252 130 57 +252 126 55 +252 122 54 +252 118 53 +252 114 52 +252 110 51 +252 106 50 +252 102 49 +252 98 47 +252 94 46 +252 90 45 +252 87 44 +252 83 43 +252 79 42 +250 75 41 +249 72 40 +247 69 39 +246 65 38 +244 62 37 +243 59 36 +241 56 36 +239 52 35 +238 49 34 +236 46 33 +235 42 32 +233 39 31 +232 36 30 +230 33 29 +228 29 29 +227 26 28 +225 24 28 +222 23 29 +220 21 29 +217 19 30 +215 18 31 +213 16 31 +210 14 32 +208 13 32 +205 11 33 +203 9 34 +201 8 34 +198 6 35 +196 5 36 +194 3 36 +191 1 37 +189 0 37 +185 0 38 +181 0 38 +177 0 38 +174 0 38 +170 0 38 +166 0 38 +162 0 38 +158 0 38 +154 0 38 +151 0 38 +147 0 38 +143 0 38 +139 0 38 +135 0 38 +131 0 38 +128 0 38 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_misc_seq_1.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_misc_seq_1.rgb new file mode 100644 index 0000000000..4c85c3d037 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_misc_seq_1.rgb @@ -0,0 +1,259 @@ +ncolors=256 + +#R G B +25 51 178 +26 51 178 +26 52 177 +27 53 177 +27 53 177 +28 54 176 +28 55 176 +28 56 176 +29 56 175 +29 57 175 +29 58 175 +30 58 174 +30 59 174 +31 60 174 +31 60 173 +31 61 173 +32 62 172 +32 62 172 +32 63 172 +33 64 171 +33 65 171 +33 65 171 +34 66 170 +34 67 170 +34 67 170 +35 68 169 +35 69 169 +35 69 169 +36 70 168 +36 71 168 +36 71 168 +37 72 167 +37 72 167 +37 73 167 +38 74 166 +38 74 166 +38 75 166 +39 76 165 +39 76 165 +39 77 165 +40 78 164 +40 78 164 +40 79 164 +40 80 163 +41 80 163 +41 81 163 +41 82 162 +42 82 162 +42 83 162 +42 84 161 +43 84 161 +43 85 161 +43 86 160 +44 86 160 +44 87 160 +44 88 159 +45 88 159 +45 89 159 +45 90 158 +46 90 158 +46 91 158 +46 91 157 +47 92 157 +47 93 157 +48 93 156 +48 94 156 +48 95 156 +49 95 155 +49 96 155 +49 97 154 +50 97 154 +50 98 154 +51 98 153 +51 99 153 +52 100 152 +52 100 152 +52 101 152 +53 101 151 +53 102 151 +54 103 150 +54 103 150 +55 104 149 +55 104 149 +56 105 148 +56 106 148 +57 106 147 +57 107 147 +58 107 146 +58 108 146 +59 109 145 +59 109 145 +60 110 144 +60 110 144 +61 111 143 +61 111 143 +62 112 142 +63 112 141 +63 113 141 +64 114 140 +64 114 140 +65 115 139 +65 115 139 +66 116 138 +66 117 138 +67 117 137 +68 118 137 +68 118 136 +69 119 136 +69 120 135 +70 120 135 +71 121 134 +71 121 134 +72 122 133 +72 123 133 +73 123 132 +74 124 132 +74 125 131 +75 125 131 +76 126 130 +77 127 130 +77 128 129 +78 128 129 +79 129 129 +79 130 128 +80 131 128 +81 131 127 +82 132 127 +83 133 127 +83 134 126 +84 135 126 +85 135 126 +86 136 125 +87 137 125 +88 138 125 +88 139 125 +89 140 124 +90 141 124 +91 141 124 +92 142 123 +93 143 123 +94 144 123 +95 145 123 +96 146 122 +96 147 122 +97 148 122 +98 149 122 +99 150 121 +100 151 121 +101 152 121 +102 153 121 +103 153 121 +104 154 120 +105 155 120 +106 156 120 +107 157 120 +108 158 119 +109 159 119 +110 160 119 +111 161 119 +112 162 118 +113 163 118 +114 164 118 +115 165 118 +115 166 117 +116 167 117 +117 168 117 +118 169 117 +119 170 116 +120 171 116 +121 172 116 +122 173 116 +123 174 115 +124 175 115 +125 176 115 +126 177 115 +127 178 114 +128 179 114 +130 180 114 +131 181 114 +132 182 113 +133 183 113 +134 184 113 +135 185 113 +136 186 112 +137 187 112 +138 189 112 +139 190 112 +140 191 111 +141 192 111 +142 193 111 +143 194 111 +144 195 110 +145 196 110 +146 197 110 +147 198 109 +148 199 109 +150 200 109 +151 201 109 +152 202 108 +153 204 108 +154 205 108 +155 206 108 +156 207 107 +158 208 107 +159 209 107 +160 210 107 +161 211 106 +163 212 106 +164 213 106 +165 215 106 +167 216 105 +168 217 105 +170 218 105 +171 219 105 +173 220 104 +174 221 104 +176 222 104 +177 223 104 +179 224 104 +181 225 103 +183 226 103 +184 227 103 +186 228 103 +188 229 103 +190 230 103 +192 231 103 +194 232 102 +196 233 102 +198 234 102 +200 235 102 +202 236 102 +204 237 102 +206 237 102 +208 238 102 +210 239 102 +212 240 102 +214 241 102 +217 241 102 +219 242 102 +221 243 102 +223 244 102 +225 245 102 +227 245 102 +229 246 102 +231 247 102 +233 247 102 +236 248 102 +238 249 102 +240 250 102 +242 250 102 +244 251 102 +246 252 102 +248 252 102 +250 253 102 +252 254 102 +255 254 102 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_05.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_05.rgb new file mode 100644 index 0000000000..90c77a31c1 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_05.rgb @@ -0,0 +1,8 @@ +ncolors=5 + +#R G B +166 67 26 +223 194 125 +245 245 245 +128 205 193 +1 133 113 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_06.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_06.rgb new file mode 100644 index 0000000000..244242dce4 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_06.rgb @@ -0,0 +1,9 @@ +ncolors=6 + +#R G B +140 81 10 +216 179 101 +246 232 195 +199 234 229 +90 180 172 +1 102 94 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_07.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_07.rgb new file mode 100644 index 0000000000..fffc8c501f --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_07.rgb @@ -0,0 +1,10 @@ +ncolors=7 + +#R G B +140 81 10 +216 179 101 +246 232 195 +245 245 245 +199 234 229 +90 180 172 +1 102 94 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_08.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_08.rgb new file mode 100644 index 0000000000..34b6382e48 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_08.rgb @@ -0,0 +1,11 @@ +ncolors=8 + +#R G B +140 81 10 +191 129 45 +223 194 125 +246 232 195 +199 234 229 +128 205 193 +53 151 143 +1 102 94 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_09.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_09.rgb new file mode 100644 index 0000000000..181c96bbf5 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_09.rgb @@ -0,0 +1,12 @@ +ncolors=9 + +#R G B +140 81 10 +191 129 45 +223 194 125 +246 232 195 +245 245 245 +199 234 229 +128 205 193 +53 151 143 +1 102 94 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_10.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_10.rgb new file mode 100644 index 0000000000..e7c70395f6 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_10.rgb @@ -0,0 +1,13 @@ +ncolors=10 + +#R G B +84 48 5 +140 81 10 +191 129 45 +223 194 125 +246 232 195 +199 234 229 +128 205 193 +53 151 143 +1 102 94 +0 60 48 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_11.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_11.rgb new file mode 100644 index 0000000000..6e4c8f8872 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_11.rgb @@ -0,0 +1,14 @@ +ncolors=11 + +#R G B +84 48 5 +140 81 10 +191 129 45 +223 194 125 +246 232 195 +245 245 245 +199 234 229 +128 205 193 +53 151 143 +1 102 94 +0 60 48 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_div.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_div.rgb new file mode 100644 index 0000000000..2b8ce8bd34 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_div.rgb @@ -0,0 +1,259 @@ +ncolors=256 + +#R G B +84 48 5 +86 49 5 +88 51 6 +90 52 7 +92 54 8 +94 55 8 +96 57 9 +98 59 10 +100 60 11 +102 62 12 +104 63 12 +107 65 13 +109 67 14 +111 68 15 +113 70 15 +115 71 16 +117 73 17 +119 74 18 +121 76 19 +123 78 19 +125 79 20 +128 81 21 +130 82 22 +132 84 23 +134 86 23 +136 87 24 +138 89 25 +140 90 26 +142 92 26 +144 94 27 +146 95 28 +149 97 29 +151 98 30 +153 100 30 +155 101 31 +157 103 32 +159 105 33 +161 106 34 +163 108 34 +165 109 35 +167 111 36 +170 113 37 +172 114 37 +174 116 38 +176 117 39 +178 119 40 +180 121 41 +182 122 41 +184 124 42 +186 125 43 +188 127 44 +191 129 44 +191 130 47 +192 132 50 +193 133 53 +194 135 55 +194 136 58 +195 138 60 +196 139 63 +197 141 66 +197 143 69 +198 144 71 +199 146 74 +200 147 77 +200 149 79 +201 150 82 +202 152 84 +203 154 87 +203 155 90 +204 157 93 +205 158 95 +206 160 98 +206 161 101 +207 163 103 +208 165 106 +209 166 109 +209 168 111 +210 169 114 +211 171 117 +212 172 119 +212 174 122 +213 176 125 +214 177 127 +215 179 130 +216 180 133 +216 182 135 +217 183 138 +218 185 140 +219 187 143 +219 188 146 +220 190 148 +221 191 151 +222 193 154 +222 194 157 +223 196 159 +224 198 162 +225 199 165 +225 201 167 +226 202 170 +227 204 173 +228 205 175 +228 207 178 +229 209 180 +230 210 183 +231 212 186 +231 213 189 +232 215 191 +233 216 194 +234 218 197 +234 219 199 +235 221 202 +236 223 205 +237 224 207 +238 226 210 +238 227 212 +239 229 215 +240 230 218 +241 232 221 +241 234 223 +242 235 226 +243 237 229 +244 238 231 +244 240 234 +245 241 236 +246 243 239 +247 245 242 +247 246 244 +248 248 247 +247 248 248 +245 247 246 +242 245 245 +240 244 244 +237 243 242 +234 241 241 +232 240 239 +229 239 238 +227 238 237 +224 236 235 +222 235 234 +219 234 233 +216 232 231 +214 231 230 +211 230 228 +209 229 227 +206 227 226 +204 226 224 +201 225 223 +199 224 221 +196 222 220 +193 221 219 +191 220 217 +188 218 216 +186 217 215 +183 216 213 +181 215 212 +178 213 210 +175 212 209 +173 211 208 +170 209 206 +168 208 205 +165 207 203 +163 206 202 +160 204 201 +158 203 199 +155 202 198 +152 200 197 +150 199 195 +147 198 194 +145 197 192 +142 195 191 +140 194 190 +137 193 188 +134 191 187 +132 190 185 +129 189 184 +127 188 183 +124 186 181 +122 185 180 +119 184 179 +117 183 177 +114 181 176 +111 180 174 +109 179 173 +106 177 172 +104 176 170 +101 175 169 +99 174 167 +96 172 166 +93 171 165 +91 170 163 +88 168 162 +86 167 161 +83 166 159 +81 165 158 +78 163 156 +76 162 155 +73 161 154 +70 159 152 +68 158 151 +65 157 149 +63 156 148 +60 154 147 +58 153 145 +55 152 144 +53 151 143 +51 149 141 +50 147 139 +49 145 137 +48 143 135 +47 142 133 +46 140 131 +45 138 129 +44 136 128 +43 134 126 +42 133 124 +41 131 122 +40 129 120 +39 127 118 +38 126 116 +37 124 115 +36 122 113 +35 120 111 +34 118 109 +33 117 107 +32 115 105 +31 113 103 +30 111 102 +29 109 100 +28 108 98 +27 106 96 +25 104 94 +24 102 92 +23 101 90 +22 99 88 +21 97 87 +20 95 85 +19 93 83 +18 92 81 +17 90 79 +16 88 77 +15 86 75 +14 84 74 +13 83 72 +12 81 70 +11 79 68 +10 77 66 +9 76 64 +8 74 62 +7 72 61 +6 70 59 +5 68 57 +4 67 55 +3 65 53 +2 63 51 +1 61 49 +0 60 48 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_seq.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_seq.rgb new file mode 100644 index 0000000000..bd225af51b --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_precipitation_seq.rgb @@ -0,0 +1,259 @@ +ncolors=256 + +#R G B +255 255 229 +253 254 228 +252 253 227 +251 252 227 +249 252 226 +248 251 226 +247 250 225 +245 250 225 +244 249 224 +243 248 223 +241 248 223 +240 247 222 +239 246 222 +237 246 221 +236 245 221 +235 244 220 +233 244 220 +232 243 219 +231 242 218 +229 242 218 +228 241 217 +227 240 217 +225 240 216 +224 239 216 +223 238 215 +221 238 214 +220 237 214 +219 236 213 +218 235 213 +216 235 212 +215 234 212 +214 233 211 +212 233 211 +211 232 210 +210 231 209 +208 231 209 +207 230 208 +206 229 208 +204 229 207 +203 228 207 +202 227 206 +200 227 205 +199 226 205 +198 225 204 +196 225 204 +195 224 203 +194 223 203 +192 223 202 +191 222 202 +190 221 201 +188 221 200 +187 220 200 +186 219 199 +185 218 199 +183 218 198 +182 217 198 +181 216 197 +179 216 196 +178 215 196 +177 214 195 +175 214 195 +174 213 194 +173 212 194 +171 212 193 +170 211 193 +169 210 192 +167 210 191 +166 209 191 +165 208 190 +163 208 190 +162 207 189 +161 206 189 +159 206 188 +158 205 187 +157 204 187 +155 204 186 +154 203 186 +153 202 185 +152 201 185 +150 201 184 +149 200 184 +148 199 183 +146 199 182 +145 198 182 +144 197 181 +142 197 181 +141 196 180 +140 195 180 +138 195 179 +137 194 178 +136 193 178 +134 193 177 +133 192 177 +132 191 176 +130 191 176 +129 190 175 +128 189 175 +126 189 174 +125 188 173 +124 187 173 +122 187 172 +121 186 172 +120 185 171 +119 184 171 +117 184 170 +116 183 169 +115 182 169 +113 182 168 +112 181 168 +111 180 167 +109 180 167 +108 179 166 +107 178 166 +105 178 165 +104 177 164 +103 176 164 +101 176 163 +100 175 163 +99 174 162 +97 174 162 +96 173 161 +95 172 160 +93 172 160 +92 171 159 +91 170 159 +89 170 158 +88 169 158 +87 168 157 +86 167 157 +84 167 156 +83 166 155 +82 165 155 +80 165 154 +79 164 154 +78 163 153 +76 163 153 +75 162 152 +74 161 151 +72 161 151 +71 160 150 +70 159 150 +68 159 149 +67 158 149 +66 157 148 +64 157 148 +63 156 147 +62 155 146 +60 155 146 +59 154 145 +58 153 145 +56 153 144 +55 152 144 +54 151 143 +53 151 143 +52 150 142 +51 149 141 +51 148 140 +50 147 139 +50 146 138 +49 145 137 +49 144 136 +48 143 135 +48 142 134 +47 142 133 +47 141 132 +46 140 131 +46 139 130 +45 138 129 +45 137 129 +44 136 128 +44 135 127 +43 134 126 +43 134 125 +42 133 124 +42 132 123 +41 131 122 +41 130 121 +40 129 120 +40 128 119 +39 127 118 +38 126 117 +38 126 116 +37 125 115 +37 124 115 +36 123 114 +36 122 113 +35 121 112 +35 120 111 +34 119 110 +34 118 109 +33 117 108 +33 117 107 +32 116 106 +32 115 105 +31 114 104 +31 113 103 +30 112 102 +30 111 102 +29 110 101 +29 109 100 +28 109 99 +28 108 98 +27 107 97 +27 106 96 +26 105 95 +25 104 94 +25 103 93 +24 102 92 +24 101 91 +23 101 90 +23 100 89 +22 99 88 +22 98 88 +21 97 87 +21 96 86 +20 95 85 +20 94 84 +19 93 83 +19 93 82 +18 92 81 +18 91 80 +17 90 79 +17 89 78 +16 88 77 +16 87 76 +15 86 75 +15 85 75 +14 84 74 +14 84 73 +13 83 72 +12 82 71 +12 81 70 +11 80 69 +11 79 68 +10 78 67 +10 77 66 +9 76 65 +9 76 64 +8 75 63 +8 74 62 +7 73 61 +7 72 61 +6 71 60 +6 70 59 +5 69 58 +5 68 57 +4 68 56 +4 67 55 +3 66 54 +3 65 53 +2 64 52 +2 63 51 +1 62 50 +1 61 49 +0 60 48 +0 60 48 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_multiple_hue_non-diverging_03.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_multiple_hue_non-diverging_03.rgb new file mode 100644 index 0000000000..3f57515271 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_multiple_hue_non-diverging_03.rgb @@ -0,0 +1,6 @@ +ncolors=3 + +#R G B +224 236 244 +158 188 218 +136 86 167 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_multiple_hue_non-diverging_04.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_multiple_hue_non-diverging_04.rgb new file mode 100644 index 0000000000..6a461a0006 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_multiple_hue_non-diverging_04.rgb @@ -0,0 +1,7 @@ +ncolors=4 + +#R G B +237 248 251 +179 205 227 +140 150 198 +136 65 157 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_multiple_hue_non-diverging_05.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_multiple_hue_non-diverging_05.rgb new file mode 100644 index 0000000000..6d557776a3 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_multiple_hue_non-diverging_05.rgb @@ -0,0 +1,8 @@ +ncolors=5 + +#R G B +237 248 251 +179 205 227 +140 150 198 +136 86 167 +129 15 124 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_non-diverging_03.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_non-diverging_03.rgb new file mode 100644 index 0000000000..29886e5a74 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_non-diverging_03.rgb @@ -0,0 +1,6 @@ +ncolors=3 + +#R G B +239 237 245 +188 189 220 +117 107 177 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_non-diverging_04.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_non-diverging_04.rgb new file mode 100644 index 0000000000..1a985069e4 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_non-diverging_04.rgb @@ -0,0 +1,7 @@ +ncolors=4 + +#R G B +242 240 247 +203 201 226 +158 154 200 +106 81 163 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_non-diverging_05.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_non-diverging_05.rgb new file mode 100644 index 0000000000..949e9ed9be --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_purple_non-diverging_05.rgb @@ -0,0 +1,8 @@ +ncolors=5 + +#R G B +242 240 247 +203 201 226 +158 154 200 +117 107 177 +84 39 143 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_rcp_line.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_rcp_line.rgb new file mode 100644 index 0000000000..fdf209dd44 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_rcp_line.rgb @@ -0,0 +1,7 @@ +ncolors=4 + +#R G B +153 0 2 +196 121 0 +84 146 205 +0 52 102 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_rcp_shading.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_rcp_shading.rgb new file mode 100644 index 0000000000..64d046797e --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_rcp_shading.rgb @@ -0,0 +1,7 @@ +ncolors=4 + +#R G B +252 209 197 +204 174 113 +146 197 222 +67 147 195 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_multiple_hue_non-diverging_03.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_multiple_hue_non-diverging_03.rgb new file mode 100644 index 0000000000..69e6f6f419 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_multiple_hue_non-diverging_03.rgb @@ -0,0 +1,6 @@ +ncolors=3 + +#R G B +254 237 160 +254 178 76 +240 59 32 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_multiple_hue_non-diverging_04.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_multiple_hue_non-diverging_04.rgb new file mode 100644 index 0000000000..dde89be21d --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_multiple_hue_non-diverging_04.rgb @@ -0,0 +1,7 @@ +ncolors=4 + +#R G B +255 255 178 +254 204 92 +253 141 60 +227 26 28 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_multiple_hue_non-diverging_05.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_multiple_hue_non-diverging_05.rgb new file mode 100644 index 0000000000..709c77877a --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_multiple_hue_non-diverging_05.rgb @@ -0,0 +1,8 @@ +ncolors=5 + +#R G B +255 255 178 +254 204 92 +253 141 60 +240 59 32 +189 0 38 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_non-diverging_03.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_non-diverging_03.rgb new file mode 100644 index 0000000000..b95002a551 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_non-diverging_03.rgb @@ -0,0 +1,6 @@ +ncolors=3 + +#R G B +254 224 210 +252 146 116 +222 45 38 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_non-diverging_04.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_non-diverging_04.rgb new file mode 100644 index 0000000000..432dce8cf0 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_non-diverging_04.rgb @@ -0,0 +1,7 @@ +ncolors=4 + +#R G B +254 229 217 +252 174 145 +251 106 74 +203 24 29 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_non-diverging_05.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_non-diverging_05.rgb new file mode 100644 index 0000000000..4405e2bf1c --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_red_non-diverging_05.rgb @@ -0,0 +1,8 @@ +ncolors=5 + +#R G B +254 229 217 +252 174 145 +251 106 74 +222 45 38 +165 15 21 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_05.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_05.rgb new file mode 100644 index 0000000000..26b59492df --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_05.rgb @@ -0,0 +1,8 @@ +ncolors=5 + +#R G B +202 0 32 +244 165 130 +247 247 247 +146 197 222 +5 113 176 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_06.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_06.rgb new file mode 100644 index 0000000000..7b5506c767 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_06.rgb @@ -0,0 +1,9 @@ +ncolors=6 + +#R G B +178 24 43 +239 138 98 +253 178 199 +209 229 240 +103 169 207 +33 102 172 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_07.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_07.rgb new file mode 100644 index 0000000000..3d55ff3526 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_07.rgb @@ -0,0 +1,11 @@ +ncolors=7 + +#R G B +178 24 43 +239 138 98 +253 219 199 +247 247 247 +209 229 240 +103 169 207 +33 102 172 + diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_08.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_08.rgb new file mode 100644 index 0000000000..4dd152962b --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_08.rgb @@ -0,0 +1,12 @@ +ncolors=8 + +#R G B +178 24 43 +214 96 77 +244 165 130 +253 178 199 +209 229 240 +146 197 222 +67 147 195 +33 102 172 + diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_09.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_09.rgb new file mode 100644 index 0000000000..9606a0152d --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_09.rgb @@ -0,0 +1,13 @@ +ncolors=9 + +#R G B +178 24 43 +214 96 77 +244 165 130 +253 178 199 +247 247 247 +209 229 240 +146 197 222 +67 147 195 +33 102 172 + diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_10.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_10.rgb new file mode 100644 index 0000000000..c5c034ad29 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_10.rgb @@ -0,0 +1,13 @@ +ncolors=10 + +#R G B +103 0 31 +178 24 43 +214 96 77 +244 165 130 +253 219 199 +209 229 240 +146 197 222 +67 147 195 +33 102 172 +5 48 97 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_11.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_11.rgb new file mode 100644 index 0000000000..b8e9810478 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_11.rgb @@ -0,0 +1,15 @@ +ncolors = 11 + +# R G B +103 0 31 +178 24 43 +214 96 77 +244 165 130 +253 219 199 +247 247 247 +209 229 240 +146 197 222 +67 147 195 +33 102 172 +5 48 97 + diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_div.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_div.rgb new file mode 100644 index 0000000000..01eeafaf70 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_div.rgb @@ -0,0 +1,259 @@ +ncolors=256 + +#R G B +5 48 97 +6 49 98 +7 51 100 +8 53 102 +9 55 104 +11 57 106 +12 59 108 +13 61 110 +14 63 112 +15 65 114 +17 67 116 +18 69 118 +19 71 120 +20 73 121 +22 75 123 +23 77 125 +24 79 127 +25 81 129 +26 82 131 +28 84 133 +29 86 135 +30 88 137 +31 90 139 +32 92 141 +34 94 143 +35 96 145 +36 98 146 +37 100 148 +39 102 150 +40 104 152 +41 106 154 +42 108 156 +43 110 158 +45 112 160 +46 113 162 +47 115 164 +48 117 166 +49 119 168 +51 121 170 +52 123 171 +53 125 173 +54 127 175 +56 129 177 +57 131 179 +58 133 181 +59 135 183 +60 137 185 +62 139 187 +63 141 189 +64 143 191 +65 145 193 +67 147 195 +69 148 195 +71 149 196 +74 150 197 +76 152 197 +78 153 198 +81 155 199 +83 156 199 +86 157 200 +88 159 201 +90 160 202 +93 161 202 +95 163 203 +97 164 204 +100 165 204 +102 166 205 +105 168 206 +107 169 207 +109 171 207 +112 172 208 +114 173 209 +116 175 209 +119 176 210 +121 177 211 +124 179 211 +126 180 212 +128 181 213 +131 183 214 +133 184 214 +135 185 215 +138 187 216 +140 188 216 +143 189 217 +145 191 218 +147 192 219 +150 193 219 +152 195 220 +155 196 221 +157 197 221 +159 198 222 +162 200 223 +164 201 223 +166 203 224 +169 204 225 +171 205 226 +174 207 226 +176 208 227 +178 209 228 +181 211 228 +183 212 229 +185 213 230 +188 214 230 +190 216 231 +193 217 232 +195 219 233 +197 220 233 +200 221 234 +202 223 235 +204 224 235 +207 225 236 +209 227 237 +212 228 238 +214 229 238 +216 230 239 +219 232 240 +221 233 240 +224 235 241 +226 236 242 +228 237 243 +231 239 243 +233 240 244 +235 241 245 +238 243 245 +240 244 246 +243 245 247 +245 246 247 +247 248 248 +248 248 247 +248 246 245 +247 243 243 +247 242 241 +246 240 238 +246 238 236 +246 235 234 +245 234 232 +245 232 229 +244 230 227 +244 227 225 +243 226 223 +243 224 220 +242 222 218 +242 220 216 +241 218 214 +241 216 211 +240 214 209 +240 211 207 +240 210 205 +239 208 202 +239 206 200 +238 203 198 +238 202 196 +237 200 193 +237 198 191 +236 195 189 +236 194 187 +235 192 184 +235 190 182 +235 187 180 +234 186 178 +234 184 175 +233 181 173 +233 179 171 +232 178 169 +232 176 166 +231 174 164 +231 172 162 +230 170 160 +230 168 157 +230 166 155 +229 163 153 +229 162 151 +228 160 148 +228 158 146 +227 156 144 +227 154 142 +226 152 139 +226 149 137 +225 147 135 +225 146 133 +224 144 130 +224 142 128 +224 140 126 +223 138 124 +223 135 121 +222 134 119 +222 132 117 +221 130 115 +221 128 112 +220 125 110 +220 124 108 +219 121 106 +219 120 103 +219 118 101 +218 115 99 +218 113 97 +217 112 94 +217 110 92 +216 108 90 +216 105 88 +215 104 85 +215 102 83 +214 100 81 +214 97 79 +214 96 76 +211 94 76 +209 92 75 +207 90 74 +205 88 73 +203 86 72 +200 84 71 +198 82 70 +196 80 69 +194 79 68 +192 77 67 +190 75 67 +187 73 66 +185 71 65 +183 69 64 +181 67 63 +179 65 62 +177 64 61 +174 62 60 +172 60 59 +170 58 58 +168 56 58 +166 54 57 +163 52 56 +161 50 55 +159 48 54 +157 47 53 +155 45 52 +153 43 51 +150 41 50 +148 39 49 +146 37 49 +144 35 48 +142 33 47 +140 32 46 +137 30 45 +135 28 44 +133 26 43 +131 24 42 +129 22 41 +126 20 40 +124 18 40 +122 16 39 +120 15 38 +118 13 37 +116 11 36 +113 9 35 +111 7 34 +109 5 33 +107 3 32 +105 1 31 +103 0 31 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_seq.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_seq.rgb new file mode 100644 index 0000000000..bc39646a87 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_temperature_seq.rgb @@ -0,0 +1,259 @@ +ncolors=256 + +#R G B +254 254 203 +254 254 202 +254 253 200 +254 253 198 +254 252 196 +254 251 195 +254 251 193 +254 250 191 +254 249 189 +254 249 187 +253 248 186 +253 247 184 +253 247 182 +253 246 180 +253 245 178 +253 245 177 +253 244 175 +253 243 173 +252 243 171 +252 242 170 +252 241 168 +252 241 166 +252 240 164 +252 239 162 +252 239 160 +251 238 159 +251 237 157 +251 236 155 +251 235 153 +251 235 151 +250 234 150 +250 233 148 +250 232 146 +250 231 144 +250 230 142 +249 230 140 +249 229 138 +249 228 137 +249 227 135 +249 226 133 +248 225 131 +248 224 129 +248 223 127 +248 221 126 +247 220 124 +247 219 122 +247 218 120 +247 217 119 +246 216 117 +246 214 115 +246 213 114 +245 212 112 +245 211 110 +245 209 109 +244 208 107 +244 207 106 +244 205 105 +243 204 103 +243 202 102 +243 201 101 +243 200 99 +242 198 98 +242 197 97 +242 196 96 +241 194 95 +241 193 94 +241 191 93 +240 190 92 +240 189 92 +240 187 91 +239 186 90 +239 185 90 +239 183 89 +239 182 88 +238 181 88 +238 180 87 +238 178 87 +237 177 87 +237 176 86 +237 175 86 +237 173 85 +236 172 85 +236 171 85 +236 170 85 +236 168 84 +235 167 84 +235 166 84 +235 165 84 +235 164 83 +234 162 83 +234 161 83 +234 160 83 +234 159 83 +233 158 83 +233 157 82 +233 155 82 +233 154 82 +232 153 82 +232 152 82 +232 151 82 +232 150 82 +231 148 82 +231 147 82 +231 146 81 +230 145 81 +230 144 81 +230 143 81 +230 141 81 +229 140 81 +229 139 81 +229 138 81 +228 137 81 +228 135 81 +228 134 80 +228 133 80 +227 132 80 +227 131 80 +226 129 80 +226 128 80 +226 127 80 +225 126 80 +225 125 80 +224 123 79 +224 122 79 +223 121 79 +223 120 79 +222 118 79 +222 117 79 +221 116 79 +221 114 79 +220 113 78 +219 112 78 +219 111 78 +218 109 78 +217 108 78 +216 107 78 +215 105 77 +214 104 77 +213 103 77 +212 101 77 +211 100 77 +210 99 76 +209 98 76 +208 96 76 +207 95 76 +205 94 75 +204 92 75 +203 91 75 +201 90 75 +200 89 74 +198 88 74 +197 87 74 +195 85 73 +194 84 73 +192 83 73 +190 82 72 +189 81 72 +187 80 72 +185 79 71 +184 79 71 +182 78 70 +180 77 70 +178 76 70 +177 75 69 +175 74 69 +173 74 68 +171 73 68 +170 72 67 +168 72 67 +166 71 66 +164 70 66 +162 70 65 +161 69 65 +159 69 64 +157 68 63 +155 67 63 +153 67 62 +152 66 62 +150 66 61 +148 65 60 +146 65 60 +144 64 59 +143 64 58 +141 63 58 +139 63 57 +137 62 56 +135 62 56 +134 61 55 +132 61 54 +130 60 53 +128 59 53 +127 59 52 +125 58 51 +123 58 50 +121 57 50 +119 57 49 +118 56 48 +116 56 47 +114 55 46 +112 55 46 +111 54 45 +109 54 44 +107 53 43 +106 53 42 +104 52 42 +102 52 41 +100 51 40 +99 50 39 +97 50 38 +95 49 38 +94 49 37 +92 48 36 +90 48 35 +89 47 34 +87 47 33 +85 46 33 +84 46 32 +82 45 31 +80 45 30 +79 44 30 +77 43 29 +75 43 28 +74 42 27 +72 42 27 +71 41 26 +69 41 25 +67 40 24 +66 40 24 +64 39 23 +63 39 22 +61 38 22 +59 38 21 +58 37 20 +56 37 20 +55 36 19 +53 36 18 +52 35 18 +50 35 17 +49 34 16 +47 34 15 +46 33 15 +44 33 14 +43 32 13 +41 31 12 +40 31 11 +39 30 10 +37 30 9 +36 29 8 +34 29 7 +33 28 6 +32 28 5 +30 27 4 +29 27 3 +28 26 2 +26 26 1 +25 25 0 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_wind_div.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_wind_div.rgb new file mode 100644 index 0000000000..ee83bf24ac --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-ar6_wind_div.rgb @@ -0,0 +1,260 @@ +ncolors=256 + +#R G B +43 25 76 +43 27 77 +43 28 79 +43 29 80 +43 31 82 +43 32 83 +43 34 84 +43 35 86 +43 37 87 +43 38 89 +43 40 90 +43 41 92 +42 42 93 +42 44 94 +42 45 96 +42 47 97 +42 48 99 +42 50 100 +42 51 102 +42 53 103 +42 54 105 +41 55 106 +41 57 108 +41 58 109 +41 60 111 +41 61 112 +41 63 114 +41 64 115 +41 66 117 +41 68 118 +41 69 120 +42 71 121 +42 72 123 +42 74 124 +43 75 126 +43 77 127 +43 79 129 +44 80 130 +45 82 131 +46 84 133 +46 85 134 +47 87 136 +48 89 137 +49 90 139 +51 92 140 +52 94 141 +53 95 143 +55 97 144 +56 99 146 +58 100 147 +59 102 148 +61 104 149 +63 105 151 +64 107 152 +66 109 153 +68 111 155 +70 112 156 +72 114 157 +74 116 158 +76 117 159 +78 119 161 +80 121 162 +82 122 163 +84 124 164 +86 126 165 +88 127 167 +90 129 168 +92 131 169 +94 132 170 +97 134 171 +99 136 172 +101 137 174 +103 139 175 +105 141 176 +107 142 177 +109 144 178 +112 146 179 +114 147 181 +116 149 182 +118 151 183 +120 153 184 +123 154 185 +125 156 186 +127 158 188 +129 159 189 +131 161 190 +134 163 191 +136 164 192 +138 166 193 +140 168 195 +143 170 196 +145 171 197 +147 173 198 +149 175 199 +152 176 201 +154 178 202 +156 180 203 +158 182 204 +161 183 205 +163 185 206 +165 187 208 +167 188 209 +170 190 210 +172 192 211 +174 194 212 +177 195 214 +179 197 215 +181 199 216 +183 201 217 +186 202 218 +188 204 219 +190 206 220 +192 207 222 +195 209 223 +197 211 224 +199 213 225 +201 214 225 +203 216 226 +205 217 227 +207 219 228 +209 221 228 +210 222 229 +212 223 229 +214 225 230 +215 226 230 +216 227 230 +217 228 229 +218 229 229 +218 230 228 +218 230 228 +218 231 227 +218 231 226 +218 231 224 +217 231 223 +216 231 221 +215 230 220 +214 230 218 +213 229 216 +211 228 215 +210 228 213 +208 227 211 +206 226 209 +204 225 207 +202 224 205 +201 223 203 +199 222 201 +197 221 199 +195 219 197 +193 218 195 +191 217 193 +189 216 191 +187 215 189 +185 214 187 +183 212 185 +181 211 183 +179 210 181 +177 209 179 +174 208 177 +172 207 175 +170 205 173 +168 204 171 +166 203 170 +164 202 168 +162 201 166 +160 199 164 +158 198 162 +156 197 160 +154 196 158 +152 195 156 +150 193 154 +148 192 152 +146 191 150 +144 190 148 +142 189 146 +140 188 144 +138 186 142 +136 185 140 +134 184 138 +132 183 137 +130 182 135 +128 181 133 +126 179 131 +124 178 129 +122 177 127 +120 176 125 +118 175 123 +116 173 121 +114 172 119 +112 171 117 +111 170 115 +109 169 113 +107 167 111 +105 166 109 +103 165 107 +101 164 105 +99 162 103 +97 161 101 +95 160 99 +93 158 97 +91 157 95 +89 155 93 +88 154 91 +86 153 89 +84 151 87 +82 150 84 +81 148 82 +79 146 80 +78 145 78 +76 143 76 +75 141 73 +73 140 71 +72 138 69 +71 136 67 +70 135 65 +69 133 62 +68 131 60 +68 129 58 +67 128 56 +66 126 54 +66 124 52 +65 123 51 +65 121 49 +65 119 47 +64 118 45 +64 116 44 +64 115 42 +64 113 40 +64 112 39 +64 110 37 +64 109 36 +64 107 34 +64 106 33 +64 105 32 +64 103 30 +64 102 29 +64 101 28 +64 99 26 +65 98 25 +65 97 24 +65 95 22 +65 94 21 +65 93 20 +65 92 18 +65 90 17 +65 89 16 +65 88 15 +65 87 13 +66 86 12 +66 84 11 +66 83 9 +66 82 8 +66 81 7 +66 80 5 +66 78 4 +66 77 3 +66 76 2 + diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-tas-seasdelta.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-tas-seasdelta.rgb new file mode 100644 index 0000000000..3b63387b9a --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-tas-seasdelta.rgb @@ -0,0 +1,31 @@ +############################################### +# Similar to the colors used in IPCC Fig. 9.3 # +############################################### + +# number of colors in table + +ncolors = 18 + +# first color = GMT background color +# last color = GMT foreground color + +# r g b + +82 68 164 +52 77 169 +53 81 171 +68 99 184 +74 121 198 +83 155 218 +88 196 242 +129 206 241 +174 228 242 +245 245 125 +249 221 39 +250 174 27 +246 126 29 +242 84 35 +239 40 42 +232 28 36 +219 39 51 +248 150 166 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb new file mode 100644 index 0000000000..9a291a3436 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb @@ -0,0 +1,12 @@ +255 255 255 +0 0 0 +190 190 190 +30 150 132 +29 51 84 +234 221 61 +242 17 17 +132 11 34 +255 221 61 +154 109 201 +99 189 229 +232 136 49 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_prec_div_14.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_prec_div_14.rgb new file mode 100644 index 0000000000..2b53b86548 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_prec_div_14.rgb @@ -0,0 +1,18 @@ +ncolors = 14 + +# r g b + +84 48 5 +125 79 20 +166 110 35 +196 141 65 +211 172 118 +226 202 170 +241 233 222 +223 236 235 +173 211 208 +123 186 181 +73 161 153 +40 129 121 +20 95 84 +0 60 48 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_prec_seq_14.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_prec_seq_14.rgb new file mode 100644 index 0000000000..f22870cd04 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_prec_seq_14.rgb @@ -0,0 +1,18 @@ +ncolors = 14 + +# r g b + +255 255 229 +229 241 217 +203 228 206 +177 215 195 +151 201 184 +125 188 173 +99 175 162 +73 161 151 +50 147 139 +40 130 121 +30 112 102 +20 94 84 +10 77 66 +0 60 48 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_prec_seq_7.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_prec_seq_7.rgb new file mode 100644 index 0000000000..b6b395bf31 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_prec_seq_7.rgb @@ -0,0 +1,11 @@ +ncolors= 7 + +#r g b + +255 255 229 +198 226 205 +142 197 181 +86 168 157 +44 135 127 +22 97 87 +0 60 48 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temp_div_10.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temp_div_10.rgb new file mode 100644 index 0000000000..713b94328d --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temp_div_10.rgb @@ -0,0 +1,13 @@ +ncolors= 10 + +#r g b +5 48 97 +39 102 151 +80 154 199 +147 192 219 +215 230 238 +242 220 217 +229 163 153 +216 107 89 +164 53 56 +103 0 31 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temp_div_18.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temp_div_18.rgb new file mode 100644 index 0000000000..10cb584f19 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temp_div_18.rgb @@ -0,0 +1,13 @@ +ncolors = 10 + +# r g b +231 239 243 +245 234 232 +238 203 198 +231 174 164 +224 144 130 +218 113 97 +200 84 71 +168 56 58 +135 28 44 +103 0 31 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temp_seq_14.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temp_seq_14.rgb new file mode 100644 index 0000000000..fabf3bda8e --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temp_seq_14.rgb @@ -0,0 +1,18 @@ +ncolors = 14 + +# r g b + +254 254 203 +252 242 168 +249 225 132 +243 201 101 +237 175 86 +232 152 82 +226 129 80 +214 104 77 +187 80 72 +152 67 62 +117 56 48 +84 46 32 +53 35 18 +25 25 0 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temp_seq_9.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temp_seq_9.rgb new file mode 100644 index 0000000000..9e5fa8009d --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temp_seq_9.rgb @@ -0,0 +1,12 @@ +ncolors= 9 + +# r g b +254 254 203 +250 232 146 +241 195 95 +233 155 82 +222 116 79 +183 78 71 +126 59 52 +72 42 26 +25 25 0 diff --git a/esmvaltool/diag_scripts/shared/plot/scatterplot.ncl b/esmvaltool/diag_scripts/shared/plot/scatterplot.ncl index efb74ce7d3..296b60d895 100644 --- a/esmvaltool/diag_scripts/shared/plot/scatterplot.ncl +++ b/esmvaltool/diag_scripts/shared/plot/scatterplot.ncl @@ -52,7 +52,7 @@ function scatterplot(wks_in[1], ; the calling diag_script (option: diag_script_info@scatter_log). ; ; Modification history -; 20140228-A_righ_ma: written. +; 20140228-righi_mattia: written. ; local funcname, scriptname, scattermode, loc_res, resL, linec, lineh, linel, \ resT, nstats, st, text, step, xpos, ypos, ii, tmp, ratio, fac2p, str, @@ -289,7 +289,7 @@ function scatterplot3D(wks_in[1], ; Caveats ; ; Modification history -; 20151103-A_righ_ma: written. +; 20151103-righi_mattia: written. ; local funcname, scriptname, data, xpts, ypts, zpts, defaults, var, wks, res, \ res_new, colors, ncolors, resM, markers, resT, labels, imod, idx, xp, yp, \ @@ -455,7 +455,7 @@ function scatterplot_markers(wks_in[1], ; source(0, :) = x-axis values ; source(1, :) = y-axis values ; source!0 = statistic -; source!1 = model +; source!1 = dataset ; ; Return value ; A graphic object. @@ -467,7 +467,7 @@ function scatterplot_markers(wks_in[1], ; Caveats ; ; Modification history -; 20151105_righ_ma: written. +; 20151105-righi_mattia: written. ; local funcname, scriptname, data, xpts, ypts, defaults, var, wks, annots, \ colors, markers, res, res_new, lg_outfile, styles, lgres diff --git a/esmvaltool/diag_scripts/shared/plot/style.ncl b/esmvaltool/diag_scripts/shared/plot/style.ncl index e9bf934567..83d69b84f3 100644 --- a/esmvaltool/diag_scripts/shared/plot/style.ncl +++ b/esmvaltool/diag_scripts/shared/plot/style.ncl @@ -12,6 +12,9 @@ ; function format_units ; function set_log_ticks ; function sort_alphabetically +; function unique_labels_min +; function unique_labels_all +; function get_unique_items ; ; ############################################################################# @@ -39,7 +42,7 @@ function project_style(items: list, ; observations and reanalyses). ; ; Return value -; An array of the same size of items list, with the stlye +; An array of the same size of items list, with the style ; information for the given flag. The type depends on the flag. ; ; Description @@ -56,11 +59,11 @@ function project_style(items: list, ; References ; ; Modification history -; 20150512-A_righ_ma: modified to read style info from external style -; files, instead of using hard-coded values in the -; code. Functionalities of the project_styleset and -; project_style_ functions porteed here. -; 20130419-A_gott_kl: written. +; 20150512-righi_mattia: modified to read style info from external style +; files, instead of using hard-coded values in the +; code. Functionalities of the project_styleset and +; project_style_ functions porteed here. +; 20130419-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, result, info, flag, styleset, stylefile, tmp, sty begin @@ -392,7 +395,7 @@ function gsnColorRange(lower:numeric, ; http://www.ncl.ucar.edu/Applications/Scripts/contoursym_4.ncl ; ; Modification history -; 20130422-A_gott_kl: written. +; 20130422-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, nboxes_left, nboxes_right, ncolors_left, \ ncolors_right, output, color_start @@ -463,7 +466,7 @@ function format_units(str[1]: string) ; References ; ; Modification history -; 20140320-A_righ_ma: written. +; 20140320-righi_mattia: written. ; local funcname, scriptname, loc_str begin @@ -524,7 +527,7 @@ function set_log_ticks(fmin[1]:numeric, ; References ; ; Modification history -; 20141003-A_righ_ma: written. +; 20141003-righi_mattia: written. ; local funcname, scriptname, omin, omax, rmin, rmax, om, tmp, arr begin @@ -614,7 +617,7 @@ function sort_alphabetically(orig_names[*], idx_exclude, dest_exclude) ; References ; ; Modification history -; 20151028-A_righ:ma: written. +; 20151028-righi_mattia: written. ; begin @@ -667,3 +670,185 @@ begin return(perm_idx) end + +; ############################################################################# +undef("unique_labels_min") +function unique_labels_min(items: list, \ + prio: string) +; +; Arguments +; items: list of input_file_info metadata +; prio: string vector with attribute names of elements of items, ordered by +; priority for annotation (starting with highest). +; +; Return value +; A vector (string) with one element for each item element, each label +; contains the least possible attribute strings. +; +; Description +; +; Caveats +; +; References +; +; Modification history +; 20181217-schlund_manuel: adapated to new NCL interface of v2.0. +; 20130422-gottschaldt_klaus-dirk: written. +; +local funcname, scriptname, prio, result, unique, iprio, index +begin + + funcname = "unique_labels_min" + scriptname = "diag_scripts/shared/plot/style.ncl" + enter_msg(scriptname, funcname) + + ; Start with highest priority + result = metadata_att_as_array(items, prio(0)) + unique = get_unique_values(result) + + ; Iterate over priorities + iprio = 1 + do while (dimsizes(unique) .ne. dimsizes(result)) + + ; Not enough prios given + if (iprio .eq. dimsizes(prio)) then + error_msg("w", scriptname, funcname, "Add more attributes to prio " + \ + "to make labels unique! Continuing with non-unique labels") + break + + ; Add further prio + else + do i = 0, dimsizes(unique) - 1 + index := ind(result .eq. unique(i)) + new_prio = metadata_att_as_array(items, prio(iprio)) + + ; More than one occurence + if (dimsizes(index) .gt. 1) then + result(index) = result(index) + "_" + new_prio(index) + end if + end do + + ; Reset + unique := get_unique_values(result) + iprio = iprio + 1 + end if + end do + + leave_msg(scriptname, funcname) + return(result) + +end + +; ############################################################################# +undef("unique_labels_all") +function unique_labels_all(items: list, \ + prio:string) +; +; Arguments +; items: list of input_file_info metadata +; prio: string vector with attribute names of elements of items, ordered by +; priority for annotation (starting with highest). +; +; Return value +; A vector (string) with one element for each item element, all labels +; contain the same (least possible) number of attribute strings. + +; Description +; +; Caveats +; +; References +; +; Modification history +; 20181217-schlund_manuel: adapated to new NCL interface of v2.0. +; 20130422-gottschaldt_klaus-dirk: written. +; +local funcname, scriptname, prio, result, unique, iprio, index +begin + + funcname = "unique_labels_all" + scriptname = "diag_scripts/shared/plot/style.ncl" + enter_msg(scriptname, funcname) + + ; Start with highest priority + result = metadata_att_as_array(items, prio(0)) + unique = get_unique_values(result) + + ; Iterate over priorities + iprio = 1 + do while (dimsizes(unique) .ne. dimsizes(result)) + + ; Not enough prios given + if (iprio .eq. dimsizes(prio)) then + error_msg("w", scriptname, funcname, "Add more attributes to prio " + \ + "to make labels unique! Continuing with non-unique labels") + break + + ; Add further prio + else + do i = 0, dimsizes(unique) - 1 + index := ind(result .eq. unique(i)) + new_prio = metadata_att_as_array(items, prio(iprio)) + result(index) = result(index) + "_" + new_prio(index) + end do + + ; Reset + unique := get_unique_values(result) + iprio = iprio + 1 + end if + end do + + leave_msg(scriptname, funcname) + return(result) + +end + +; ############################################################################# +undef("get_unique_items") +function get_unique_items(items: list, + prio: string) + +; +; Arguments +; items: list of input_file_info metadata +; prio: string vector with attribute names of elements of items, ordered by +; priority for annotation (starting with highest). +; +; Description +; Return all unique list elements with the given prio. +; +; Caveats +; +; References +; +; Modification history +; 20181218-schlund_manuel: written +; +local items, prio, funcname, scriptname, unique_labels, ilist, unique_list, \ + atts, info +begin + + funcname = "get_unique_items" + scriptname = "diag_scripts/shared/plot/style.ncl" + enter_msg(scriptname, funcname) + + ; Get unique lables + unique_labels = unique_labels_min(items, prio) + do ilist = 0, ListCount(items) - 1 + items[ilist]@label = unique_labels(ilist) + end do + + ; Select unique labels (always return 1st element) + unique_list = NewList("lifo") + unique_labels := get_unique_values(unique_labels) + do ilab = 0, dimsizes(unique_labels) - 1 + atts := True + atts@label = unique_labels(ilab) + info = select_metadata_by_atts(items, atts) + ListAppend(unique_list, info[0]) + end do + + leave_msg(scriptname, funcname) + return(unique_list) + +end diff --git a/esmvaltool/diag_scripts/shared/plot/styles/cmip3.style b/esmvaltool/diag_scripts/shared/plot/styles/cmip3.style new file mode 100644 index 0000000000..490e89cf93 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/styles/cmip3.style @@ -0,0 +1,83 @@ +############################################################################### +# CMIP3 STYLES +############################################################################### +# This file defines the plot attributes for the CMIP6 models. +# +# MODEL NAME: must be the same as given in the main recipe. +# COLOR: can be either an NCL named color, a RGB code (size 3) on a 0-255 scale +# or a RGBA code (size 4) with the last element indicating the opacity +# on a 0-1 scale (0 = transparent, 1 = full), see +# http://www.ncl.ucar.edu/Applications/Scripts/rgb.txt +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGB +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGBA +# https://www.ncl.ucar.edu/Document/Graphics/create_color_table.shtml +# DASH: the dash pattern index lines, see +# https://www.ncl.ucar.edu/Document/Graphics/Images/dashpatterns.png +# THICK: the line thickness (NCL default is 1) +# MARK: marker index for markers, see +# https://www.ncl.ucar.edu/Document/Graphics/Images/markers.png +# AVG-STD: 0 = takes part in the calculation of mean and stddev +# 1 = does not take part in the calculation of mean and stddev +# (usually 0 for models, 1 for observations/reanalysis) +# +# Mind the formatting: columns are separated by the | symbol , colors given +# as RGB/RGBA must be comma-separated. +############################################################################### +MODEL NAME # COLOR # DASH # THICK # MARK # AVG-STD +############################################################################### +bccr_bcm2_0 | 148, 25, 30 | 0 | 2 | 4 | 0 +cccma_cgcm3_1 | 255, 255, 0 | 0 | 2 | 4 | 0 +cccma_cgcm3_1_t63 | 255, 255, 0 | 1 | 2 | 4 | 0 +csiro_mk3_0 | 30, 76, 36 | 0 | 2 | 4 | 0 +gfdl_cm2_0 | 35, 54, 109 | 0 | 2 | 4 | 0 +gfdl_cm2_1 | 35, 54, 109 | 1 | 2 | 4 | 0 +giss_aom | 119, 29, 123 | 0 | 2 | 4 | 0 +giss_model_e_h | 119, 29, 123 | 1 | 2 | 4 | 0 +giss_model_e_r | 119, 29, 123 | 2 | 2 | 4 | 0 +iap_fgoals1_0_g | 145, 214, 126 | 0 | 2 | 4 | 0 +ingv_echam4 | 255, 204, 0 | 0 | 2 | 4 | 0 +inmcm3_0 | 161, 37, 44 | 0 | 2 | 4 | 0 +ipsl_cm4 | 91, 83, 174 | 0 | 2 | 4 | 0 +ipsl_cm4A-LR | 91, 83, 174 | 1 | 2 | 4 | 0 +miroc3_2_hires | 184, 95, 182 | 0 | 2 | 4 | 0 +miroc3_2_medres | 184, 95, 182 | 1 | 2 | 4 | 0 +mpi_echam5 | 93, 161, 162 | 0 | 2 | 4 | 0 +mpi_echam5-HR | 93, 161, 162 | 1 | 2 | 4 | 0 +mri_cgcm2_3_2a | 173, 255, 47 | 0 | 2 | 4 | 0 +ncar_ccsm3_0 | 174, 170, 170 | 0 | 2 | 4 | 0 +ncar_pcm1 | 174, 170, 170 | 1 | 2 | 4 | 0 +ukmo_hadcm3 | 122, 139, 38 | 0 | 2 | 4 | 0 +ukmo_hadgem1 | 122, 139, 38 | 1 | 2 | 4 | 0 +MultiModelMean | 255, 0, 0 | 0 | 4 | 12 | 1 +MultiModelMedian | 0, 0, 255 | 0 | 3 | 12 | 1 +NSIDC | 0, 0, 0 | 0 | 3 | 16 | 1 +HadISST | 0, 0, 0 | 1 | 3 | 16 | 1 +MOHC-HadISST | 0, 0, 0 | 1 | 3 | 16 | 1 +AMSRE | 0, 0, 0 | 2 | 3 | 16 | 1 +ERAINT | 0, 0, 0 | 3 | 3 | 16 | 1 +NCEP | 0, 0, 0 | 4 | 3 | 16 | 1 +TRMM-3B43-v7-0.25deg | 0, 0, 0 | 5 | 3 | 16 | 1 +TRMM-L3 | 0, 0, 0 | 5 | 3 | 16 | 1 +CERES-SYN1deg | 0, 0, 0 | 0 | 1 | 16 | 0 +CMAP | 0, 0, 0 | 6 | 3 | 16 | 1 +GPCP-1DD-V12 | 0, 0, 0 | 7 | 3 | 16 | 1 +MERRA | 0, 0, 0 | 8 | 3 | 16 | 1 +ESACCI-AEROSOL | 0, 0, 0 | 0 | 3 | 16 | 1 +ESACCI-CLOUD | 255, 0, 0 | 0 | 3 | 0 | 0 +MODIS | 0, 255, 0 | 0 | 3 | 16 | 1 +NIWA | 0, 0, 0 | 1 | 3 | 8 | 1 +BDBP | 0, 0, 0 | 1 | 3 | 4 | 1 +IGAG/SPARC | 248, 154, 28 | 0 | 3 | 16 | 0 +PATMOS | 0, 0, 255 | 0 | 3 | 0 | 0 +CLARA-A2 | 0, 255, 255 | 0 | 3 | 0 | 0 +ERA-Interim | 255, 200, 0 | 0 | 3 | 0 | 0 +ESACCI-SSMI | 0, 0, 0 | 0 | 4 | 0 | 0 +ESACCI-AMSR | 0, 0, 0 | 1 | 4 | 0 | 0 +NSIDC-NT | 0, 0, 0 | 2 | 3 | 0 | 0 +NSIDC-BT | 0, 0, 0 | 12 | 3 | 0 | 0 +HadCRUT4 | 0, 0, 0 | 0 | 4 | 0 | 0 +BerkeleyEarth | 0, 0, 0 | 1 | 3 | 0 | 0 +GISTEMP | 0, 0, 0 | 2 | 3 | 0 | 0 +CowtanWay | 0, 0, 0 | 3 | 3 | 0 | 0 +GHCN | 0, 0, 0 | 0 | 3 | 0 | 0 +default | 0, 0, 0 | 0 | 1 | 16 | 0 diff --git a/esmvaltool/diag_scripts/shared/plot/styles/cmip356.style b/esmvaltool/diag_scripts/shared/plot/styles/cmip356.style new file mode 100644 index 0000000000..e64e3d71e3 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/styles/cmip356.style @@ -0,0 +1,205 @@ +############################################################################### +# CMIP3+5+6 STYLES +############################################################################### +# This file defines the plot attributes for the CMIP6 models. +# +# MODEL NAME: must be the same as given in the main recipe. +# COLOR: can be either an NCL named color, a RGB code (size 3) on a 0-255 scale +# or a RGBA code (size 4) with the last element indicating the opacity +# on a 0-1 scale (0 = transparent, 1 = full), see +# http://www.ncl.ucar.edu/Applications/Scripts/rgb.txt +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGB +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGBA +# https://www.ncl.ucar.edu/Document/Graphics/create_color_table.shtml +# DASH: the dash pattern index lines, see +# https://www.ncl.ucar.edu/Document/Graphics/Images/dashpatterns.png +# THICK: the line thickness (NCL default is 1) +# MARK: marker index for markers, see +# https://www.ncl.ucar.edu/Document/Graphics/Images/markers.png +# AVG-STD: 0 = takes part in the calculation of mean and stddev +# 1 = does not take part in the calculation of mean and stddev +# (usually 0 for models, 1 for observations/reanalysis) +# +# Mind the formatting: columns are separated by the | symbol , colors given +# as RGB/RGBA must be comma-separated. +############################################################################### +MODEL NAME # COLOR # DASH # THICK # MARK # AVG-STD +############################################################################### +bccr_bcm2_0 | 148, 25, 30 | 0 | 2 | 4 | 0 +cccma_cgcm3_1 | 255, 255, 0 | 0 | 2 | 4 | 0 +cccma_cgcm3_1_t63 | 255, 255, 0 | 1 | 2 | 4 | 0 +csiro_mk3_0 | 30, 76, 36 | 0 | 2 | 4 | 0 +gfdl_cm2_0 | 35, 54, 109 | 0 | 2 | 4 | 0 +gfdl_cm2_1 | 35, 54, 109 | 1 | 2 | 4 | 0 +giss_aom | 119, 29, 123 | 0 | 2 | 4 | 0 +giss_model_e_h | 119, 29, 123 | 1 | 2 | 4 | 0 +giss_model_e_r | 119, 29, 123 | 2 | 2 | 4 | 0 +iap_fgoals1_0_g | 145, 214, 126 | 0 | 2 | 4 | 0 +ingv_echam4 | 255, 204, 0 | 0 | 2 | 4 | 0 +inmcm3_0 | 161, 37, 44 | 0 | 2 | 4 | 0 +ipsl_cm4 | 91, 83, 174 | 0 | 2 | 4 | 0 +ipsl_cm4A-LR | 91, 83, 174 | 1 | 2 | 4 | 0 +miroc3_2_hires | 184, 95, 182 | 0 | 2 | 4 | 0 +miroc3_2_medres | 184, 95, 182 | 1 | 2 | 4 | 0 +mpi_echam5 | 93, 161, 162 | 0 | 2 | 4 | 0 +mpi_echam5-HR | 93, 161, 162 | 1 | 2 | 4 | 0 +mri_cgcm2_3_2a | 173, 255, 47 | 0 | 2 | 4 | 0 +ncar_ccsm3_0 | 174, 170, 170 | 0 | 2 | 4 | 0 +ncar_pcm1 | 174, 170, 170 | 1 | 2 | 4 | 0 +ukmo_hadcm3 | 122, 139, 38 | 0 | 2 | 4 | 0 +ukmo_hadgem1 | 122, 139, 38 | 1 | 2 | 4 | 0 +ACCESS1-0 | 91, 142, 210 | 0 | 1 | 4 | 0 +ACCESS1-3 | 91, 142, 210 | 1 | 1 | 6 | 0 +bcc-csm1-1 | 148, 25, 30 | 0 | 1 | 4 | 0 +bcc-csm1-1-m | 148, 25, 30 | 1 | 1 | 6 | 0 +BNU-ESM | 196, 121, 0 | 0 | 1 | 4 | 0 +CanCM4 | 30, 76, 36 | 0 | 1 | 4 | 0 +CanESM2 | 30, 76, 36 | 1 | 1 | 6 | 0 +CNRM-AM-PRE6 | 30, 76, 36 | 1 | 1 | 6 | 0 +CCSM4 | 67, 178, 216 | 0 | 1 | 4 | 0 +CESM1-BGC | 67, 178, 216 | 1 | 1 | 6 | 0 +CESM1-CAM5 | 67, 178, 216 | 2 | 1 | 7 | 0 +CESM1-CAM5-1-FV2 | 67, 178, 216 | 2 | 1 | 7 | 0 +CESM1-FASTCHEM | 67, 178, 216 | 3 | 1 | 5 | 0 +CESM1-WACCM | 67, 178, 216 | 4 | 1 | 2 | 0 +CMCC-CESM | 232, 32, 35 | 0 | 1 | 4 | 0 +CMCC-CM | 232, 32, 35 | 1 | 1 | 6 | 0 +CMCC-CMS | 232, 32, 35 | 2 | 1 | 7 | 0 +CNRM-CM5 | 145, 214, 126 | 0 | 1 | 4 | 0 +CSIRO-Mk3-6-0 | 241, 75, 32 | 0 | 1 | 4 | 0 +EC-EARTH | 124, 99, 184 | 0 | 1 | 4 | 0 +FGOALS-g2 | 248, 154, 28 | 0 | 1 | 4 | 0 +FIO-ESM | 77, 187, 55 | 0 | 1 | 4 | 0 +GFDL-CM2p1 | 35, 54, 109 | 0 | 1 | 4 | 0 +GFDL-CM3 | 35, 54, 109 | 1 | 1 | 6 | 0 +GFDL-ESM2G | 35, 54, 109 | 2 | 1 | 7 | 0 +MPIESM-1-1 | 35, 54, 109 | 2 | 1 | 7 | 0 +GFDL-ESM2M | 35, 54, 109 | 3 | 1 | 5 | 0 +GISS-E2-H | 119, 29, 123 | 0 | 1 | 4 | 0 +GISS-E2-H-CC | 119, 29, 123 | 1 | 1 | 6 | 0 +GISS-E2-R | 119, 29, 123 | 2 | 1 | 7 | 0 +GISS-E2-R-CC | 119, 29, 123 | 3 | 1 | 5 | 0 +HadCM3 | 122, 139, 38 | 0 | 1 | 4 | 0 +HadGEM2-AO | 122, 139, 38 | 1 | 1 | 6 | 0 +HadGEM2-A | 122, 139, 38 | 1 | 1 | 6 | 0 +HadGEM2-CC | 122, 139, 38 | 2 | 1 | 7 | 0 +HadGEM2-ES | 122, 139, 38 | 3 | 1 | 5 | 0 +inmcm4 | 161, 37, 44 | 0 | 1 | 4 | 0 +INMCM4 | 161, 37, 44 | 0 | 1 | 4 | 0 +IPSL-CM5A-LR | 91, 83, 174 | 0 | 1 | 4 | 0 +IPSL-CM5A-MR | 91, 83, 174 | 1 | 1 | 6 | 0 +IPSL-CM5B-LR | 91, 83, 174 | 2 | 1 | 7 | 0 +MIROC4h | 184, 95, 182 | 0 | 1 | 4 | 0 +MIROC5 | 184, 95, 182 | 1 | 1 | 6 | 0 +MIROC-ESM | 184, 95, 182 | 2 | 1 | 7 | 0 +MIROC-ESM-CHEM | 184, 95, 182 | 3 | 1 | 5 | 0 +MPI-ESM-LR | 93, 161, 162 | 0 | 1 | 4 | 0 +MPI-ESM-MR | 93, 161, 162 | 1 | 1 | 6 | 0 +MPI-ESM-P | 93, 161, 162 | 2 | 1 | 7 | 0 +MRI-CGCM3 | 173, 255, 47 | 0 | 1 | 4 | 0 +MRI-ESM1 | 173, 255, 47 | 1 | 1 | 7 | 0 +NorESM1-M | 241, 58, 167 | 0 | 1 | 4 | 0 +NorESM1-ME | 241, 58, 167 | 1 | 1 | 7 | 0 +ACCESS-CM2 | 0, 176, 80 | 0 | 2 | 4 | 0 +ACCESS-ESM1-5 | 0, 176, 80 | 1 | 2 | 4 | 0 +AWI-CM-1-1-MR | 153, 0, 255 | 0 | 2 | 4 | 0 +AWI-ESM-1-1-LR | 153, 0, 255 | 1 | 2 | 4 | 0 +BCC-CSM2-MR | 148, 25, 30 | 0 | 2 | 4 | 0 +BCC-ESM1 | 148, 25, 30 | 1 | 2 | 6 | 0 +CAMS-CSM1-0 | 255, 255, 0 | 0 | 2 | 4 | 0 +CanESM5 | 30, 76, 36 | 0 | 2 | 4 | 0 +CanESM5-CanOE | 30, 76, 36 | 1 | 2 | 4 | 0 +CAS-ESM2-0 | 255, 102, 0 | 0 | 2 | 4 | 0 +CESM2 | 67, 178, 216 | 0 | 2 | 4 | 0 +CESM2-FV2 | 67, 178, 216 | 1 | 2 | 4 | 0 +CESM2-WACCM | 67, 178, 216 | 2 | 2 | 6 | 0 +CESM2-WACCM-FV2 | 67, 178, 216 | 3 | 2 | 6 | 0 +CIESM | 68, 113, 212 | 0 | 2 | 4 | 0 +CMCC-CM2-HR4 | 255, 153, 51 | 0 | 2 | 4 | 0 +CMCC-CM2-SR5 | 255, 153, 51 | 1 | 2 | 4 | 0 +CMCC-ESM2 | 255, 153, 51 | 2 | 2 | 4 | 0 +CNRM-CM6-1 | 145, 214, 126 | 0 | 2 | 4 | 0 +CNRM-CM6-1-HR | 145, 214, 126 | 1 | 2 | 4 | 0 +CNRM-ESM2-1 | 145, 214, 126 | 2 | 2 | 4 | 0 +E3SM-1-0 | 255, 204, 0 | 0 | 2 | 4 | 0 +E3SM-1-1 | 255, 204, 0 | 1 | 2 | 4 | 0 +E3SM-1-1-ECA | 255, 204, 0 | 2 | 2 | 4 | 0 +EC-Earth3 | 124, 99, 184 | 0 | 2 | 4 | 0 +EC-Earth3-AerChem | 124, 99, 184 | 1 | 2 | 4 | 0 +EC-Earth3-CC | 124, 99, 184 | 2 | 2 | 4 | 0 +EC-Earth3-LR | 124, 99, 184 | 3 | 2 | 4 | 0 +EC-Earth3-Veg | 124, 99, 184 | 4 | 2 | 6 | 0 +EC-Earth3-Veg-LR | 124, 99, 184 | 5 | 2 | 6 | 0 +FGOALS-f3-L | 248, 154, 28 | 0 | 2 | 4 | 0 +FGOALS-g3 | 248, 154, 28 | 1 | 2 | 4 | 0 +FIO-ESM-2-0 | 77, 187, 55 | 0 | 2 | 4 | 0 +GFDL-CM4 | 35, 54, 109 | 0 | 2 | 4 | 0 +GFDL-ESM4 | 35, 54, 109 | 1 | 2 | 6 | 0 +GISS-E2-1-G | 119, 29, 123 | 0 | 2 | 4 | 0 +GISS-E2-1-G-CC | 119, 29, 123 | 1 | 2 | 4 | 0 +GISS-E2-1-H | 119, 29, 123 | 2 | 2 | 6 | 0 +GISS-E2-2-G | 119, 29, 123 | 3 | 2 | 4 | 0 +HadGEM3-GC31-LL | 122, 139, 38 | 0 | 2 | 4 | 0 +HadGEM3-GC31-MM | 122, 139, 38 | 1 | 2 | 4 | 0 +IITM-ESM | 0, 51, 204 | 0 | 2 | 4 | 0 +INM-CM4-8 | 161, 37, 44 | 0 | 2 | 4 | 0 +INM-CM5-0 | 161, 37, 44 | 1 | 2 | 6 | 0 +IPSL-CM5A2-INCA | 91, 83, 174 | 0 | 2 | 4 | 0 +IPSL-CM6A-LR | 91, 83, 174 | 1 | 2 | 4 | 0 +KACE-1-0-G | 231, 23, 157 | 0 | 2 | 6 | 0 +KIOST-ESM | 119, 29, 123 | 0 | 2 | 6 | 0 +MCM-UA-1-0 | 197, 90, 17 | 1 | 2 | 4 | 0 +MIROC6 | 184, 95, 182 | 0 | 2 | 4 | 0 +MIROC-ES2L | 184, 95, 182 | 1 | 2 | 4 | 0 +MPI-ESM1-2-HR | 93, 161, 162 | 0 | 2 | 4 | 0 +MPI-ESM1-2-LR | 93, 161, 162 | 1 | 2 | 4 | 0 +MPI-ESM-1-2-HAM | 93, 161, 162 | 2 | 2 | 4 | 0 +MRI-ESM2-0 | 173, 255, 47 | 0 | 2 | 4 | 0 +NESM3 | 174, 170, 170 | 0 | 2 | 4 | 0 +NorCPM1 | 241, 58, 167 | 0 | 2 | 4 | 0 +NorESM1-F | 241, 58, 167 | 1 | 2 | 4 | 0 +NorESM2-LM | 241, 58, 167 | 2 | 2 | 4 | 0 +NorESM2-MM | 241, 58, 167 | 3 | 2 | 4 | 0 +SAM0-UNICON | 11, 247, 200 | 0 | 2 | 4 | 0 +TaiESM1 | 174, 170, 170 | 0 | 2 | 4 | 0 +UKESM1-0-LL | 164, 60, 112 | 0 | 2 | 4 | 0 +MultiModelMean | 255, 0, 0 | 0 | 4 | 12 | 1 +MultiModelMedian | 0, 0, 255 | 0 | 3 | 12 | 1 +multi-model mean | 255, 0, 0 | 0 | 3 | 12 | 1 +AMSRE | 0, 0, 0 | 2 | 3 | 16 | 1 +BDBP | 0, 0, 0 | 1 | 3 | 4 | 1 +BerkeleyEarth | 0, 0, 0 | 1 | 4 | 0 | 0 +CDS-XCO2 | 0, 0, 0 | 0 | 3 | 16 | 1 +CERES-SYN1deg | 0, 0, 0 | 0 | 1 | 16 | 0 +CLARA-A2 | 0, 255, 255 | 0 | 3 | 0 | 0 +CMAP | 0, 0, 0 | 6 | 3 | 16 | 1 +CowtanWay | 0, 0, 0 | 5 | 4 | 0 | 0 +ERAINT | 0, 0, 0 | 3 | 3 | 16 | 1 +ERA-Interim | 255, 200, 0 | 0 | 3 | 0 | 0 +ESACCI-AEROSOL | 0, 0, 0 | 0 | 3 | 16 | 1 +ESACCI-AMSR | 0, 0, 0 | 1 | 4 | 0 | 0 +ESACCI-CLOUD | 255, 0, 0 | 0 | 3 | 0 | 0 +ESACCI-SSMI | 0, 0, 0 | 0 | 4 | 0 | 0 +GHCN | 0, 0, 0 | 0 | 3 | 0 | 0 +GISTEMP | 0, 0, 0 | 4 | 4 | 0 | 0 +GPCP-1DD-V12 | 0, 0, 0 | 7 | 3 | 16 | 1 +HadCRUT4 | 0, 0, 0 | 4 | 4 | 0 | 0 +HadCRUT5 | 0, 0, 0 | 0 | 4 | 0 | 0 +HadISST | 0, 0, 0 | 1 | 3 | 16 | 1 +IGAG/SPARC | 248, 154, 28 | 0 | 3 | 16 | 0 +Kadow | 0, 0, 0 | 3 | 4 | 0 | 0 +MERRA | 0, 0, 0 | 8 | 3 | 16 | 1 +MODIS | 0, 255, 0 | 0 | 3 | 16 | 1 +MOHC-HadISST | 0, 0, 0 | 1 | 3 | 16 | 1 +NCEP | 0, 0, 0 | 4 | 3 | 16 | 1 +NIWA | 0, 0, 0 | 1 | 3 | 8 | 1 +NIWA-BS | 0, 0, 0 | 1 | 3 | 8 | 1 +NOAAGlobalTemp-Interim| 0, 0, 0 | 2 | 4 | 0 | 0 +NSIDC | 0, 0, 0 | 0 | 3 | 16 | 1 +NSIDC-BT | 0, 0, 0 | 12 | 3 | 0 | 0 +NSIDC-NT | 0, 0, 0 | 2 | 3 | 0 | 0 +PATMOS | 0, 0, 255 | 0 | 3 | 0 | 0 +PATMOS-x | 0, 0, 255 | 0 | 3 | 0 | 0 +TRMM-3B43-v7-0.25deg | 0, 0, 0 | 5 | 3 | 16 | 1 +TRMM-L3 | 0, 0, 0 | 5 | 3 | 16 | 1 +default | 0, 0, 0 | 0 | 1 | 16 | 0 diff --git a/esmvaltool/diag_scripts/shared/plot/styles/cmip5.style b/esmvaltool/diag_scripts/shared/plot/styles/cmip5.style index 836a1a5b2d..69affe5f39 100644 --- a/esmvaltool/diag_scripts/shared/plot/styles/cmip5.style +++ b/esmvaltool/diag_scripts/shared/plot/styles/cmip5.style @@ -80,6 +80,7 @@ NorESM1-M | 241, 58, 167 | 0 | 1 | 4 | 0 NorESM1-ME | 241, 58, 167 | 1 | 1 | 7 | 0 MultiModelMean | 255, 0, 0 | 0 | 3 | 12 | 1 MultiModelMedian | 0, 0, 255 | 0 | 3 | 12 | 1 +multi-model mean | 255, 0, 0 | 0 | 3 | 12 | 1 NSIDC | 0, 0, 0 | 0 | 3 | 16 | 1 HadISST | 0, 0, 0 | 1 | 3 | 16 | 1 MOHC-HadISST | 0, 0, 0 | 1 | 3 | 16 | 1 @@ -106,4 +107,5 @@ ESACCI-AMSR | 0, 0, 0 | 1 | 4 | 0 | 0 NSIDC-NT | 0, 0, 0 | 2 | 3 | 0 | 0 NSIDC-BT | 0, 0, 0 | 12 | 3 | 0 | 0 HadCRUT4 | 0, 0, 0 | 0 | 3 | 0 | 0 +CDS-XCO2 | 0, 0, 0 | 0 | 3 | 16 | 1 default | 0, 0, 0 | 0 | 1 | 16 | 0 diff --git a/esmvaltool/diag_scripts/shared/plot/styles/cmip6.style b/esmvaltool/diag_scripts/shared/plot/styles/cmip6.style index f525fd9376..47cffdc2c8 100644 --- a/esmvaltool/diag_scripts/shared/plot/styles/cmip6.style +++ b/esmvaltool/diag_scripts/shared/plot/styles/cmip6.style @@ -25,11 +25,85 @@ ############################################################################### MODEL NAME # COLOR # DASH # THICK # MARK # AVG-STD ############################################################################### -CNRM-CM6-1 | 30, 76, 36 | 1 | 1 | 6 | 0 -GISS-E2-1-G | 119, 29, 123 | 0 | 1 | 4 | 0 +ACCESS-ESM1-5 | 91, 142, 210 | 0 | 1 | 4 | 0 +AWI-CM-1-1-MR | 153, 0, 255 | 0 | 1 | 4 | 0 +BCC-CSM2-MR | 148, 25, 30 | 0 | 1 | 4 | 0 +BCC-ESM1 | 148, 25, 30 | 1 | 1 | 6 | 0 +CAMS-CSM1-0 | 255, 255, 0 | 0 | 1 | 4 | 0 +CanESM5 | 30, 76, 36 | 0 | 1 | 4 | 0 +CanESM5-CanOE | 130, 76, 36 | 0 | 1 | 4 | 0 +CESM2 | 67, 178, 216 | 0 | 1 | 4 | 0 +CESM2-WACCM | 67, 178, 216 | 1 | 1 | 6 | 0 +CNRM-CM6-1 | 145, 214, 126 | 0 | 1 | 4 | 0 +CNRM-CM6-1-HR | 145, 214, 126 | 1 | 1 | 4 | 0 +CNRM-ESM2-1 | 145, 214, 126 | 2 | 1 | 4 | 0 +E3SM-1-0 | 255, 204, 0 | 0 | 1 | 4 | 0 +E3SM-1-1 | 255, 204, 0 | 1 | 1 | 4 | 0 +EC-Earth3 | 124, 99, 184 | 0 | 1 | 4 | 0 +EC-Earth3-LR | 124, 99, 184 | 1 | 1 | 4 | 0 +EC-Earth3-Veg | 124, 99, 184 | 2 | 1 | 6 | 0 +FGOALS-f3-L | 248, 154, 28 | 0 | 1 | 4 | 0 +FGOALS-g3 | 248, 154, 28 | 1 | 1 | 4 | 0 +FIO-ESM-2-0 | 77, 187, 55 | 0 | 1 | 4 | 0 GFDL-AM4 | 35, 54, 109 | 0 | 1 | 4 | 0 +GFDL-CM4 | 35, 54, 109 | 0 | 1 | 4 | 0 +GFDL-ESM4 | 35, 54, 109 | 1 | 1 | 6 | 0 +GISS-E2-1-G | 119, 29, 123 | 0 | 1 | 4 | 0 +GISS-E2-1-G-CC | 119, 29, 123 | 1 | 1 | 4 | 0 +GISS-E2-1-H | 119, 29, 123 | 2 | 1 | 6 | 0 +HadGEM3-GC31-LL | 122, 139, 38 | 0 | 1 | 4 | 0 +HadGEM3-GC31-MM | 122, 139, 38 | 1 | 1 | 4 | 0 +IITM-ESM | 0, 51, 204 | 0 | 1 | 4 | 0 +INM-CM4-8 | 161, 37, 44 | 0 | 1 | 4 | 0 +INM-CM5-0 | 161, 37, 44 | 1 | 1 | 6 | 0 IPSL-CM6A-LR | 91, 83, 174 | 0 | 1 | 4 | 0 IPSL-CM6A-MR | 91, 83, 174 | 1 | 1 | 6 | 0 IPSL-CM6B-LR | 91, 83, 174 | 2 | 1 | 7 | 0 -MIROC6 | 184, 95, 182 | 1 | 1 | 6 | 0 +MCM-UA-1-0 | 197, 90, 17 | 1 | 1 | 4 | 0 +MIROC6 | 184, 95, 182 | 0 | 1 | 4 | 0 +MIROC-ES2L | 184, 95, 182 | 1 | 1 | 4 | 0 +MPI-ESM1-2-HR | 93, 161, 162 | 0 | 1 | 4 | 0 +MPI-ESM1-2-LR | 93, 161, 162 | 1 | 1 | 4 | 0 +MPI-ESM-1-2-HAM | 93, 161, 162 | 2 | 1 | 4 | 0 +MRI-ESM2-0 | 173, 255, 47 | 0 | 1 | 4 | 0 +NESM3 | 174, 170, 170 | 0 | 1 | 4 | 0 +NorCPM1 | 241, 58, 167 | 0 | 1 | 4 | 0 +NorESM1-F | 241, 58, 167 | 1 | 1 | 4 | 0 +NorESM2-LM | 241, 58, 167 | 2 | 1 | 4 | 0 +SAM0-UNICON | 11, 247, 200 | 0 | 1 | 4 | 0 +UKESM1-0-LL | 164, 60, 112 | 0 | 1 | 4 | 0 +MultiModelMean | 255, 0, 0 | 0 | 3 | 12 | 1 +multi-model mean | 255, 0, 0 | 0 | 3 | 12 | 1 +MultiModelMedian | 0, 0, 255 | 0 | 3 | 12 | 1 +CDS-XCO2 | 0, 0, 0 | 0 | 3 | 16 | 1 +NSIDC | 0, 0, 0 | 0 | 3 | 16 | 1 +HadISST | 0, 0, 0 | 1 | 3 | 16 | 1 +MOHC-HadISST | 0, 0, 0 | 1 | 3 | 16 | 1 +AMSRE | 0, 0, 0 | 2 | 3 | 16 | 1 +ERAINT | 0, 0, 0 | 3 | 3 | 16 | 1 +NCEP | 0, 0, 0 | 4 | 3 | 16 | 1 +TRMM-3B43-v7-0.25deg | 0, 0, 0 | 5 | 3 | 16 | 1 +TRMM-L3 | 0, 0, 0 | 5 | 3 | 16 | 1 +CERES-SYN1deg | 0, 0, 0 | 0 | 1 | 16 | 0 +CMAP | 0, 0, 0 | 6 | 3 | 16 | 1 +GPCP-1DD-V12 | 0, 0, 0 | 7 | 3 | 16 | 1 +MERRA | 0, 0, 0 | 8 | 3 | 16 | 1 +ESACCI-AEROSOL | 0, 0, 0 | 0 | 3 | 16 | 1 +ESACCI-CLOUD | 255, 0, 0 | 0 | 3 | 0 | 0 +MODIS | 0, 255, 0 | 0 | 3 | 16 | 1 +NIWA | 0, 0, 0 | 1 | 3 | 8 | 1 +BDBP | 0, 0, 0 | 1 | 3 | 4 | 1 +IGAG/SPARC | 248, 154, 28 | 0 | 3 | 16 | 0 +PATMOS | 0, 0, 255 | 0 | 3 | 0 | 0 +CLARA-A2 | 0, 255, 255 | 0 | 3 | 0 | 0 +ERA-Interim | 255, 200, 0 | 0 | 3 | 0 | 0 +ESACCI-SSMI | 0, 0, 0 | 0 | 4 | 0 | 0 +ESACCI-AMSR | 0, 0, 0 | 1 | 4 | 0 | 0 +NSIDC-NT | 0, 0, 0 | 2 | 3 | 0 | 0 +NSIDC-BT | 0, 0, 0 | 12 | 3 | 0 | 0 +HadCRUT4 | 0, 0, 0 | 0 | 3 | 0 | 0 +BerkeleyEarth | 0, 0, 0 | 1 | 3 | 0 | 0 +GISTEMP | 0, 0, 0 | 2 | 3 | 0 | 0 +CowtanWay | 0, 0, 0 | 3 | 3 | 0 | 0 +GHCN | 0, 0, 0 | 0 | 3 | 0 | 0 default | 0, 0, 0 | 0 | 1 | 16 | 0 diff --git a/esmvaltool/diag_scripts/shared/plot/styles/cmip6_ipcc.style b/esmvaltool/diag_scripts/shared/plot/styles/cmip6_ipcc.style new file mode 100644 index 0000000000..9006cbb3be --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/styles/cmip6_ipcc.style @@ -0,0 +1,128 @@ +############################################################################### +# CMIP6 STYLES - PRELIMINARY +############################################################################### +# This file defines the plot attributes for the CMIP6 models. +# +# MODEL NAME: must be the same as given in the main recipe. +# COLOR: can be either an NCL named color, a RGB code (size 3) on a 0-255 scale +# or a RGBA code (size 4) with the last element indicating the opacity +# on a 0-1 scale (0 = transparent, 1 = full), see +# http://www.ncl.ucar.edu/Applications/Scripts/rgb.txt +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGB +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGBA +# https://www.ncl.ucar.edu/Document/Graphics/create_color_table.shtml +# DASH: the dash pattern index lines, see +# https://www.ncl.ucar.edu/Document/Graphics/Images/dashpatterns.png +# THICK: the line thickness (NCL default is 1) +# MARK: marker index for markers, see +# https://www.ncl.ucar.edu/Document/Graphics/Images/markers.png +# AVG-STD: 0 = takes part in the calculation of mean and stddev +# 1 = does not take part in the calculation of mean and stddev +# (usually 0 for models, 1 for observations/reanalysis) +# +# Mind the formatting: columns are separated by the | symbol , colors given +# as RGB/RGBA must be comma-separated. +############################################################################### +MODEL NAME # COLOR # DASH # THICK # MARK # AVG-STD +############################################################################### +ACCESS-CM2 | 0, 176, 80 | 0 | 2 | 4 | 0 +ACCESS-ESM1-5 | 0, 176, 80 | 1 | 2 | 4 | 0 +AWI-CM-1-1-MR | 153, 0, 255 | 0 | 2 | 4 | 0 +AWI-ESM-1-1-LR | 153, 0, 255 | 1 | 2 | 4 | 0 +BCC-CSM2-MR | 148, 25, 30 | 0 | 2 | 4 | 0 +BCC-ESM1 | 148, 25, 30 | 1 | 2 | 6 | 0 +CAMS-CSM1-0 | 255, 255, 0 | 0 | 2 | 4 | 0 +CanESM5 | 30, 76, 36 | 0 | 2 | 4 | 0 +CanESM5-CanOE | 30, 76, 36 | 1 | 2 | 4 | 0 +CAS-ESM2-0 | 255, 102, 0 | 0 | 2 | 4 | 0 +CESM2 | 67, 178, 216 | 0 | 2 | 4 | 0 +CESM2-FV2 | 67, 178, 216 | 1 | 2 | 4 | 0 +CESM2-WACCM | 67, 178, 216 | 2 | 2 | 6 | 0 +CESM2-WACCM-FV2 | 67, 178, 216 | 3 | 2 | 6 | 0 +CIESM | 68, 113, 212 | 0 | 2 | 4 | 0 +CMCC-CM2-HR4 | 255, 153, 51 | 0 | 2 | 4 | 0 +CMCC-CM2-SR5 | 255, 153, 51 | 1 | 2 | 4 | 0 +CMCC-ESM2 | 255, 153, 51 | 2 | 2 | 4 | 0 +CNRM-CM6-1 | 145, 214, 126 | 0 | 2 | 4 | 0 +CNRM-CM6-1-HR | 145, 214, 126 | 1 | 2 | 4 | 0 +CNRM-ESM2-1 | 145, 214, 126 | 2 | 2 | 4 | 0 +E3SM-1-0 | 255, 204, 0 | 0 | 2 | 4 | 0 +E3SM-1-1 | 255, 204, 0 | 1 | 2 | 4 | 0 +E3SM-1-1-ECA | 255, 204, 0 | 2 | 2 | 4 | 0 +EC-Earth3 | 124, 99, 184 | 0 | 2 | 4 | 0 +EC-Earth3-AerChem | 124, 99, 184 | 1 | 2 | 4 | 0 +EC-Earth3-CC | 124, 99, 184 | 2 | 2 | 4 | 0 +EC-Earth3-LR | 124, 99, 184 | 3 | 2 | 4 | 0 +EC-Earth3-Veg | 124, 99, 184 | 4 | 2 | 6 | 0 +EC-Earth3-Veg-LR | 124, 99, 184 | 5 | 2 | 6 | 0 +FGOALS-f3-L | 248, 154, 28 | 0 | 2 | 4 | 0 +FGOALS-g3 | 248, 154, 28 | 1 | 2 | 4 | 0 +FIO-ESM-2-0 | 77, 187, 55 | 0 | 2 | 4 | 0 +GFDL-CM4 | 35, 54, 109 | 0 | 2 | 4 | 0 +GFDL-ESM4 | 35, 54, 109 | 1 | 2 | 6 | 0 +GISS-E2-1-G | 119, 29, 123 | 0 | 2 | 4 | 0 +GISS-E2-1-G-CC | 119, 29, 123 | 1 | 2 | 4 | 0 +GISS-E2-1-H | 119, 29, 123 | 2 | 2 | 6 | 0 +GISS-E2-2-G | 119, 29, 123 | 3 | 2 | 4 | 0 +HadGEM3-GC31-LL | 122, 139, 38 | 0 | 2 | 4 | 0 +HadGEM3-GC31-MM | 122, 139, 38 | 1 | 2 | 4 | 0 +IITM-ESM | 0, 51, 204 | 0 | 2 | 4 | 0 +INM-CM4-8 | 161, 37, 44 | 0 | 2 | 4 | 0 +INM-CM5-0 | 161, 37, 44 | 1 | 2 | 6 | 0 +IPSL-CM5A2-INCA | 91, 83, 174 | 0 | 2 | 4 | 0 +IPSL-CM6A-LR | 91, 83, 174 | 1 | 2 | 4 | 0 +KACE-1-0-G | 231, 23, 157 | 0 | 2 | 6 | 0 +KIOST-ESM | 119, 29, 123 | 0 | 2 | 6 | 0 +MCM-UA-1-0 | 197, 90, 17 | 1 | 2 | 4 | 0 +MIROC6 | 184, 95, 182 | 0 | 2 | 4 | 0 +MIROC-ES2L | 184, 95, 182 | 1 | 2 | 4 | 0 +MPI-ESM1-2-HR | 93, 161, 162 | 0 | 2 | 4 | 0 +MPI-ESM1-2-LR | 93, 161, 162 | 1 | 2 | 4 | 0 +MPI-ESM-1-2-HAM | 93, 161, 162 | 2 | 2 | 4 | 0 +MRI-ESM2-0 | 173, 255, 47 | 0 | 2 | 4 | 0 +NESM3 | 174, 170, 170 | 0 | 2 | 4 | 0 +NorCPM1 | 241, 58, 167 | 0 | 2 | 4 | 0 +NorESM1-F | 241, 58, 167 | 1 | 2 | 4 | 0 +NorESM2-LM | 241, 58, 167 | 2 | 2 | 4 | 0 +NorESM2-MM | 241, 58, 167 | 3 | 2 | 4 | 0 +SAM0-UNICON | 11, 247, 200 | 0 | 2 | 4 | 0 +TaiESM1 | 174, 170, 170 | 0 | 2 | 4 | 0 +UKESM1-0-LL | 164, 60, 112 | 0 | 2 | 4 | 0 +MultiModelMean | 255, 0, 0 | 0 | 4 | 12 | 1 +MultiModelMedian | 0, 0, 255 | 0 | 3 | 12 | 1 +multi-model mean | 255, 0, 0 | 0 | 3 | 12 | 1 +CDS-XCO2 | 0, 0, 0 | 0 | 3 | 16 | 1 +NSIDC | 0, 0, 0 | 0 | 3 | 16 | 1 +HadISST | 0, 0, 0 | 0 | 3 | 16 | 1 +MOHC-HadISST | 0, 0, 0 | 1 | 3 | 16 | 1 +AMSRE | 0, 0, 0 | 2 | 3 | 16 | 1 +ERAINT | 0, 0, 0 | 3 | 3 | 16 | 1 +NCEP | 0, 0, 0 | 4 | 3 | 16 | 1 +TRMM-3B43-v7-0.25deg | 0, 0, 0 | 5 | 3 | 16 | 1 +TRMM-L3 | 0, 0, 0 | 5 | 3 | 16 | 1 +CERES-SYN1deg | 0, 0, 0 | 0 | 1 | 16 | 0 +CMAP | 0, 0, 0 | 6 | 3 | 16 | 1 +GPCP-1DD-V12 | 0, 0, 0 | 7 | 3 | 16 | 1 +MERRA | 0, 0, 0 | 8 | 3 | 16 | 1 +ESACCI-AEROSOL | 0, 0, 0 | 0 | 3 | 16 | 1 +ESACCI-CLOUD | 255, 0, 0 | 0 | 3 | 0 | 0 +MODIS | 0, 255, 0 | 0 | 3 | 16 | 1 +NIWA | 0, 0, 0 | 1 | 3 | 8 | 1 +BDBP | 0, 0, 0 | 1 | 3 | 4 | 1 +IGAG/SPARC | 248, 154, 28 | 0 | 3 | 16 | 0 +PATMOS | 0, 0, 255 | 0 | 3 | 0 | 0 +CLARA-A2 | 0, 255, 255 | 0 | 3 | 0 | 0 +ERA-Interim | 255, 200, 0 | 0 | 3 | 0 | 0 +ESACCI-SSMI | 0, 0, 0 | 0 | 4 | 0 | 0 +ESACCI-AMSR | 0, 0, 0 | 1 | 4 | 0 | 0 +NSIDC-NT | 0, 0, 0 | 2 | 3 | 0 | 0 +NSIDC-BT | 0, 0, 0 | 12 | 3 | 0 | 0 +HadCRUT4 | 0, 0, 0 | 4 | 4 | 0 | 0 +HadCRUT5 | 0, 0, 0 | 0 | 4 | 0 | 0 +BerkeleyEarth | 0, 0, 0 | 1 | 4 | 0 | 0 +NOAAGlobalTemp-Interim| 0, 0, 0 | 2 | 4 | 0 | 0 +Kadow | 0, 0, 0 | 3 | 4 | 0 | 0 +GISTEMP | 0, 0, 0 | 4 | 4 | 0 | 0 +CowtanWay | 0, 0, 0 | 5 | 4 | 0 | 0 +GHCN | 0, 0, 0 | 0 | 3 | 0 | 0 +default | 0, 0, 0 | 0 | 1 | 16 | 0 diff --git a/esmvaltool/diag_scripts/shared/plot/styles/lauer21.style b/esmvaltool/diag_scripts/shared/plot/styles/lauer21.style new file mode 100644 index 0000000000..3cba1e2d02 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/styles/lauer21.style @@ -0,0 +1,44 @@ +############################################################################### +# CMIP5+6 STYLES +############################################################################### +# This file defines the plot attributes for the CMIP5 models. +# Mostly based on IPCC-AR5 Chapter 9 figures. +# +# MODEL NAME: must be the same as given in the main recipe. +# COLOR: can be either an NCL named color, a RGB code (size 3) on a 0-255 scale +# or a RGBA code (size 4) with the last element indicating the opacity +# on a 0-1 scale (0 = transparent, 1 = full), see +# http://www.ncl.ucar.edu/Applications/Scripts/rgb.txt +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGB +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGBA +# https://www.ncl.ucar.edu/Document/Graphics/create_color_table.shtml +# DASH: the dash pattern index lines, see +# https://www.ncl.ucar.edu/Document/Graphics/Images/dashpatterns.png +# THICK: the line thickness (NCL default is 1) +# MARK: marker index for markers, see +# https://www.ncl.ucar.edu/Document/Graphics/Images/markers.png +# AVG-STD: 0 = takes part in the calculation of mean and stddev +# 1 = does not take part in the calculation of mean and stddev +# (usually 0 for models, 1 for observations/reanalysis) +# +# Mind the formatting: columns are separated by the | symbol , colors given +# as RGB/RGBA must be comma-separated. +########################################################################### +MODEL NAME # COLOR # DASH # THICK # MARK # AVG-STD +########################################################################### +MultiModelAverage | 0, 0, 0 | 0 | 3 | 12 | 1 +MultiModelMean | 255, 0, 0 | 0 | 3 | 12 | 1 +MultiModelMedian | 255, 127, 63 | 0 | 3 | 12 | 1 +CERES-EBAF | 100, 200, 200 | 0 | 3 | 4 | 0 +CLARA-AVHRR | 150, 200, 250 | 0 | 3 | 4 | 0 +CLOUDSAT-L2 | 50, 200, 100 | 0 | 3 | 4 | 0 +ERA5 | 255, 200, 0 | 0 | 3 | 4 | 0 +ERA-Interim | 255, 150, 0 | 0 | 3 | 4 | 0 +ESACCI-CLOUD | 0, 50, 200 | 0 | 3 | 4 | 0 +ESACCI-WATERVAPOUR | 0, 50, 200 | 0 | 3 | 4 | 0 +ISCCP-FH | 250, 0, 50 | 0 | 3 | 4 | 0 +MAC-LWP | 150, 200, 50 | 0 | 3 | 4 | 0 +MODIS | 150, 0, 150 | 0 | 3 | 4 | 0 +SSMI-MERIS | 0, 250, 0 | 0 | 3 | 4 | 0 +PATMOS-x | 0, 150, 0 | 0 | 3 | 4 | 0 +default | 200, 200, 200 | 0 | 1 | 16 | 0 diff --git a/esmvaltool/diag_scripts/shared/plot/styles_python/cmip5.yml b/esmvaltool/diag_scripts/shared/plot/styles_python/cmip5.yml index 7a68ebbfc9..919657f72d 100644 --- a/esmvaltool/diag_scripts/shared/plot/styles_python/cmip5.yml +++ b/esmvaltool/diag_scripts/shared/plot/styles_python/cmip5.yml @@ -183,7 +183,7 @@ CanESM2: color: '#1e4c24' dash: -- facecolor: none - mark: s + mark: o thick: 1 EC-EARTH: avgstd: 0 @@ -253,28 +253,28 @@ GFDL-CM2p1: color: '#23366d' dash: '-' facecolor: none - mark: o + mark: x thick: 1 GFDL-CM3: avgstd: 0 color: '#23366d' dash: -- facecolor: none - mark: s + mark: o thick: 1 GFDL-ESM2G: avgstd: 0 color: '#23366d' dash: ':' facecolor: none - mark: ^ + mark: s thick: 1 GFDL-ESM2M: avgstd: 0 color: '#23366d' dash: -. facecolor: none - mark: x + mark: ^ thick: 1 GISS-E2-H: avgstd: 0 @@ -288,14 +288,14 @@ GISS-E2-H-CC: color: '#771d7b' dash: -- facecolor: none - mark: s + mark: ^ thick: 1 GISS-E2-R: avgstd: 0 color: '#771d7b' dash: ':' facecolor: none - mark: ^ + mark: s thick: 1 GISS-E2-R-CC: avgstd: 0 @@ -344,7 +344,7 @@ HadGEM2-ES: color: '#7a8b26' dash: -. facecolor: none - mark: x + mark: o thick: 1 HadISST: avgstd: 1 @@ -379,14 +379,14 @@ IPSL-CM5A-MR: color: '#5b53ae' dash: -- facecolor: none - mark: s + mark: ^ thick: 1 IPSL-CM5B-LR: avgstd: 0 color: '#5b53ae' dash: ':' facecolor: none - mark: ^ + mark: s thick: 1 MERRA: avgstd: 1 @@ -400,28 +400,28 @@ MIROC-ESM: color: '#b85fb6' dash: ':' facecolor: none - mark: ^ + mark: s thick: 1 MIROC-ESM-CHEM: avgstd: 0 color: '#b85fb6' dash: -. facecolor: none - mark: x + mark: ^ thick: 1 MIROC4h: avgstd: 0 color: '#b85fb6' dash: '-' facecolor: none - mark: o + mark: x thick: 1 MIROC5: avgstd: 0 color: '#b85fb6' dash: -- facecolor: none - mark: s + mark: o thick: 1 MODIS: avgstd: 1 @@ -449,14 +449,14 @@ MPI-ESM-MR: color: '#5da1a2' dash: -- facecolor: none - mark: s + mark: ^ thick: 1 MPI-ESM-P: avgstd: 0 color: '#5da1a2' dash: ':' facecolor: none - mark: ^ + mark: s thick: 1 MPIESM-1-1: avgstd: 0 @@ -483,15 +483,15 @@ MultiModelMean: avgstd: 1 color: '#ff0000' dash: '-' - facecolor: none - mark: '*' + facecolor: '#ff0000' + mark: 'o' thick: 3 MultiModelMedian: avgstd: 1 color: '#0000ff' dash: '-' - facecolor: none - mark: '*' + facecolor: '#0000ff' + mark: 'o' thick: 3 NCEP: avgstd: 1 @@ -586,7 +586,7 @@ default: thick: 1 inmcm4: avgstd: 0 - color: '#a1252c' + color: '#a1452c' dash: '-' facecolor: none mark: o diff --git a/esmvaltool/diag_scripts/shared/plot/styles_python/cmip6.yml b/esmvaltool/diag_scripts/shared/plot/styles_python/cmip6.yml index dbea820ba6..d832724fd9 100644 --- a/esmvaltool/diag_scripts/shared/plot/styles_python/cmip6.yml +++ b/esmvaltool/diag_scripts/shared/plot/styles_python/cmip6.yml @@ -31,33 +31,159 @@ # thick: line thickness ############################################################################### -CNRM-CM6-1: +AWI-CM-1-1-MR: + avgstd: 0 + color: '#7f7f7f' + dash: '-' + facecolor: none + mark: ^ + thick: 1 +BCC-CSM2-MR: avgstd: 0 - color: '#1e4c24' + color: '#c00000' + dash: '-' + facecolor: none + mark: o + thick: 1 +BCC-ESM1: + avgstd: 0 + color: '#c00000' + dash: '-' + facecolor: none + mark: s + thick: 1 +CAMS-CSM1-0: + avgstd: 0 + color: '#f79646' dash: -- facecolor: none mark: s thick: 1 -GFDL-AM4: +CanESM5: avgstd: 0 - color: '#23366d' + color: '#00b050' + dash: -- + facecolor: none + mark: s + thick: 1 +CNRM-CM6-1: + avgstd: 0 + color: '#9bbb59' dash: '-' facecolor: none mark: o thick: 1 +CNRM-CM6-1-HR: + avgstd: 0 + color: '#9bbb59' + dash: '-.' + facecolor: none + mark: ^ + thick: 1 +CNRM-ESM2-1: + avgstd: 0 + color: '#9bbb59' + dash: -- + facecolor: none + mark: s + thick: 1 +CESM2: + avgstd: 0 + color: '#00b0f0' + dash: ':' + facecolor: none + mark: ^ + thick: 1 +CESM2-WACCM: + avgstd: 0 + color: '#00b0f0' + dash: -. + facecolor: none + mark: s + thick: 1 +E3SM-1-0: + avgstd: 0 + color: '#f79646' + dash: ':' + facecolor: none + mark: o + thick: 1 +EC-Earth3-Veg: + avgstd: 0 + color: '#7f7f7f' + dash: ':' + facecolor: none + mark: s + thick: 1 +FGOALS-f3-L: + avgstd: 0 + color: '#948a54' + dash: ':' + facecolor: none + mark: o + thick: 1 +GFDL-CM4: + avgstd: 0 + color: '#295d91' + dash: -- + facecolor: none + mark: s + thick: 1 +GFDL-ESM4: + avgstd: 0 + color: '#295d91' + dash: -- + facecolor: none + mark: o + thick: 1 GISS-E2-1-G: avgstd: 0 - color: '#771d7b' + color: '#b3a2c7' + dash: ':' + facecolor: none + mark: ^ + thick: 1 +GISS-E2-1-H: + avgstd: 0 + color: '#b3a2c7' dash: '-' facecolor: none mark: o thick: 1 +GISS-E2-2-G: + avgstd: 0 + color: '#b3a2c7' + dash: -- + facecolor: none + mark: s + thick: 1 +HadGEM3-GC31-LL: + avgstd: 0 + color: '#4f6228' + dash: ':' + facecolor: none + mark: ^ + thick: 1 +INM-CM5-0: + avgstd: 0 + color: '#984807' + dash: '-' + facecolor: none + mark: ^ + thick: 1 +INM-CM4-8: + avgstd: 0 + color: '#984807' + dash: -- + facecolor: none + mark: s + thick: 1 IPSL-CM6A-LR: avgstd: 0 - color: '#5b53ae' + color: '#002060' dash: '-' facecolor: none - mark: o + mark: ^ thick: 1 IPSL-CM6A-MR: avgstd: 0 @@ -73,13 +199,97 @@ IPSL-CM6B-LR: facecolor: none mark: ^ thick: 1 +MCM-UA-1-0: + avgstd: 0 + color: '#ffc000' + dash: -- + facecolor: none + mark: s + thick: 1 MIROC6: avgstd: 0 - color: '#b85fb6' + color: '#7031a0' + dash: -- + facecolor: none + mark: s + thick: 1 +MIROC-ES2L: + avgstd: 0 + color: '#7031a0' dash: -- facecolor: none + mark: o + thick: 1 +MPI-ESM1-2-HR: + avgstd: 0 + color: '#00b0f0' + dash: -- + facecolor: none + mark: o + thick: 1 +MPI-ESM1-2-LR: + avgstd: 0 + color: '#00b0f0' + dash: '-' + facecolor: none mark: s thick: 1 +MRI-ESM2-0: + avgstd: 0 + color: '#c3d69b' + dash: -- + facecolor: none + mark: ^ + thick: 1 +NESM3: + avgstd: 0 + color: '#00843c' + dash: -- + facecolor: none + mark: ^ + thick: 1 +NorCPM1: + avgstd: 0 + color: '#d99694' + dash: '-' + facecolor: none + mark: ^ + thick: 1 +NorESM2-LM: + avgstd: 0 + color: '#d99694' + dash: -- + facecolor: none + mark: o + thick: 1 +SAM0-UNICON: + avgstd: 0 + color: '#ffc000' + dash: '-' + facecolor: none + mark: ^ + thick: 1 +UKESM1-0-LL: + avgstd: 0 + color: '#4f6228' + dash: '-' + facecolor: none + mark: o + thick: 1 +MultiModelMean: + avgstd: 1 + color: '#ff0000' + dash: '-' + facecolor: '#ff0000' + mark: 'o' + thick: 3 +MultiModelMedian: + avgstd: 1 + color: '#0000ff' + dash: '-' + facecolor: '#0000ff' + mark: 'o' + thick: 3 default: avgstd: 0 color: '#000000' diff --git a/esmvaltool/diag_scripts/shared/plot/taylor_diagram_less_hardcoded.ncl b/esmvaltool/diag_scripts/shared/plot/taylor_diagram_less_hardcoded.ncl deleted file mode 100644 index 94aab7ecc9..0000000000 --- a/esmvaltool/diag_scripts/shared/plot/taylor_diagram_less_hardcoded.ncl +++ /dev/null @@ -1,677 +0,0 @@ -; This is a donated script. It has no guarnteed support from the NCL project. -; See further comments below. For more info, and any improvements or bugs, -; please contact Jatin Kala (Jatin.Kala.JK *AT* gmail.com) -; ============================================================================= -function taylor_diagram(wks:graphic, - legendwks:graphic, - RATIO[*][*]:numeric, - CC[*][*]:numeric, - rOpts:logical) - -local dimR, nCase, nVar, X, Y, nc, angle, xyMin, xyOne, ceil_max_val, xyMax, \ - xyMax_Panel, FontHeightF, rxy, tempo_values, tempo_values1, temp_string, \ - temp1_string, temp_string_y, temp_string_y1, npts, xx, yy, sLabels, \ - cLabels, rad, angC, taylor, rsrRes, dum0, dum1, dum2, nStnRad, rr, radC, \ - xC, yC, txRes, dum4, plRes, tmEnd, radTM, xTM, yTM, dum5, dum6, i, mTM, \ - angmTM, radmTM, dum7, angRL, rlRes, dum8, xRL, yRL, respl, dx, ncon, npts, \ - ang, dum9, dist_n, Markers, Colors, gsRes, ptRes, markerTxYOffset, dum10, \ - dum11, n, lgres, nModel, lbid, lbid2, amres, annoid1, nVar, \ - varLabelsFontHeightF, txres, delta_y, ys, dum12 -begin - - dimR = dimsizes(RATIO) - nCase = dimR(0) ; # of cases [models] - nVar = dimR(1) ; # of variables - - ; x/y coordinates for plotting - X = new((/nCase, nVar/), typeof(RATIO)) - Y = new((/nCase, nVar/), typeof(RATIO)) - - do nc = 0, nCase - 1 - angle = acos(CC(nc, :)) - X(nc, :) = RATIO(nc, :) * cos(angle) - Y(nc, :) = RATIO(nc, :) * sin(angle) - end do - - ; fix to nearest x.5 - xyMin = 0. - xyOne = 1.00 - rmax = ceil(max(RATIO)) - if (rmax.lt.1.5) then - rmax = 1.5 - end if - if (rmax.gt.3.0) then - rmax = 3.0 - end if - xyMax = rmax + 0.1 - xyMax_Panel = xyMax + 0.10 - - if (rOpts .and. isatt(rOpts, "txFontHeightF")) then - FontHeightF = rOpts@txFontHeightF - else - FontHeightF = 0.0175 - end if - -; ---------------------------------------------------------------- -; Part 1: -; base plot: Based upon request of Mark Stevens -; basic x-y and draw the 1.0 observed and the outer curve at 1.65 -; ---------------------------------------------------------------- - - rxy = True - rxy@gsnDraw = False - rxy@gsnFrame = False - rxy@vpHeightF = 0.65 - rxy@vpWidthF = 0.65 - if (rOpts .and. isatt(rOpts, "plotSize")) then - rxy@vpHeightF = rOpts@plotSize - rxy@vpWidthF = rOpts@plotSize - rxy@vpXF = 0.125 - rxy@vpYF = 0.825 - end if - rxy@tmYLBorderOn = False - rxy@tmXBBorderOn = False - - rxy@tiYAxisString = "Standardized Deviations (Normalized)" - rxy@tiYAxisFontHeightF = FontHeightF - - rxy@tmXBMode = "Explicit" - delta = 0.5 - if (rmax.le.1.5) then - delta = 0.25 - end if - tempo_values = \ - decimalPlaces(fspan(0.0, rmax, toint(rmax / delta) + 1), 2, True) - tempo_values1 = tempo_values(ind(tempo_values .lt. xyMax)) - delete(tempo_values) - rxy@tmXBValues = tempo_values1 - delete(tempo_values1) - temp_string = tostring(sprintf("%4.2f", rxy@tmXBValues)) - temp_string(0) = " " - temp1_string = where(temp_string .eq. "1.00", "REF", temp_string) - rxy@tmXBLabels = temp1_string - if (rOpts .and. isatt(rOpts, "OneX")) then - rxy@tmXBLabels = temp_string - end if - delete(temp_string) - delete(temp1_string) - - rxy@tmXBMajorLengthF = 0.015 - rxy@tmXBLabelFontHeightF = FontHeightF - rxy@tmXBMinorOn = False - rxy@trXMaxF = xyMax_Panel - rxy@tmYLMode = "Manual" - rxy@tmYLMinorOn = False - rxy@tmYLMajorLengthF = rxy@tmXBMajorLengthF - rxy@tmYLLabelFontHeightF = FontHeightF - rxy@tmYLMode = "Explicit" - rxy@tmYLValues = rxy@tmXBValues - temp_string_y = rxy@tmXBLabels - temp_string_y(0) = "0.00" - temp_string_y1 = where(temp_string_y .eq. "REF", "1.00", temp_string_y) - delete(temp_string_y) - rxy@tmYLLabels = temp_string_y1 - delete(temp_string_y1) - rxy@trYMaxF = xyMax_Panel - - rxy@tmYRBorderOn = False - rxy@tmYROn = False - rxy@tmXTBorderOn = False - rxy@tmXTOn = False - rxy@xyDashPatterns = (/0 /) - rxy@xyLineThicknesses = (/2./) - rxy@gsnFrame = False - npts = 100 ; arbitrary - xx = fspan(xyMin, xyMax, npts) - yy = sqrt(xyMax ^ 2 - xx ^ 2) ; outer correlation line (xyMax) - sLabels = (/"0.0", "0.1", "0.2", "0.3", "0.4", "0.5", "0.6", \ - "0.7", "0.8", "0.9", "0.95", "0.99", "1.0" /) - cLabels = stringtofloat(sLabels) - rad = 4. * atan(1.0) / 180. - angC = acos(cLabels) / rad ; angles: correlation labels - if (rOpts .and. isatt(rOpts, "tiMainString")) then - rxy@tiMainString = rOpts@tiMainString - if (isatt(rOpts, "tiMainFontHeightF")) then - rxy@tiMainFontHeightF = rOpts@tiMainFontHeightF - else - rxy@tiMainFontHeightF = 0.0225 - end if - end if - - taylor = gsn_xy(wks, xx, yy, rxy) - -; -------------------------------------------------------------- -; draw observational uncertainties given as normalized RMS error -; around the observational reference point at (1, 0) -; -------------------------------------------------------------- - - if (rOpts .and. isatt(rOpts, "rmsobs")) then - npts = 100 ; arbitrary - rmsobj = new(2, graphic) - - respl0 = True - respl0@gsLineThicknessF = 4.0 - respl0@gsLineDashPattern = 0 - respl0@gsLineColor = (/0.50, 0.95, 0.47/) - respl0@gsFillColor = (/0.85, 1.00, 0.80/) - rr = rOpts@rmsobs - - ; uncertainty circle covers whole plotting area - if (rr.ge.xyMax) then - ang = fspan(180, 360, npts) * rad - xx = 0.995 * xyMax * cos(ang) - yy = fabs(0.995 * xyMax * sin(ang)) - rmsobj(0) = gsn_add_polygon(wks, taylor, xx, yy, respl0) - rmsobj(1) = gsn_add_polygon(wks, taylor, xx, yy, respl0) - else ; uncertainty circle covers only part of the plotting area - dist = rr + 1.0 - ; uncertainty circle has to be cropped - ; need to find intersection between uncertainty circule and outer arc -; if (dist .gt. max(rxy@tmXBValues)) then - if (dist .gt. xyMax) then - xtmp = (xyMax ^ 2 - rr ^ 2 + 1.0) / 2.0 - ytmp = sqrt(xyMax ^ 2 - xtmp ^ 2) - xtmp = xtmp - 1.0 - if (xtmp.eq.0.0) then - phitmp = 3.1415927 - else if (xtmp.gt.0.0) then - phitmp = 2.0 * 3.1415927 - atan(ytmp / xtmp) - else - phitmp = 3.1415927 + atan(ytmp / xtmp) - end if - end if - ang = fspan(3.1415927, phitmp, npts) - xx1 = 1.0 + rr * cos(ang) - yy1 = fabs(rr * sin(ang)) - - xtmp = xx1(npts - 1) - ytmp = yy1(npts - 1) - if (xtmp.eq.0.0) then - phitmp = 3.1415927 - else if (xtmp.gt.0.0) then - phitmp = 2.0 * 3.1415927 - atan(ytmp / xtmp) - else - phitmp = 3.1415927 + atan(ytmp / xtmp) - end if - end if - - ang2 = fspan(phitmp, 0.0, npts) - - xx2 = 0.995 * xyMax * cos(ang2) - yy2 = 0.995 * fabs(xyMax * sin(ang2)) - - if (isvar("xx")) then - delete(xx) - end if - if (isvar("yy")) then - delete(yy) - end if - xx = array_append_record(xx1, xx2, 0) - yy = array_append_record(yy1, yy2, 0) - rmsobj(0) = gsn_add_polygon(wks, taylor, xx, yy, respl0) - rmsobj(1) = gsn_add_polyline(wks, taylor, xx1, yy1, respl0) - delete(xx1) - delete(xx2) - delete(yy1) - delete(yy2) - delete(ang2) - else ; uncertainty circle does not need any adjustments - ang = fspan(180, 360, npts) * rad - xx = 1.0 + rr * cos(ang) - yy = fabs(rr * sin(ang)) - rmsobj(0) = gsn_add_polygon(wks, taylor, xx, yy, respl0) - rmsobj(1) = gsn_add_polyline(wks, taylor, xx, yy, respl0) - end if ; if uncertainty circle has to be cropped - - delete(ang) - delete(xx) - delete(yy) - - end if ; if uncertainty circle covers whole plotting area - end if ; if an uncertainty estimate is available - - rsrRes = True - rsrRes@gsLineThicknessF = rxy@xyLineThicknesses(0) - rsrRes@gsLineDashPattern = 0 - dum0 = gsn_add_polyline(wks, taylor, (/0., 0. /), (/0., xyMax/), rsrRes) - dum1 = gsn_add_polyline(wks, taylor, (/0., xyMax/), (/0., 0. /), rsrRes) - - xx = fspan(xyMin, xyOne, npts) - yy = sqrt(xyOne - xx ^ 2) - rsrRes@gsLineDashPattern = 1 - rsrRes@gsLineThicknessF = rxy@xyLineThicknesses(0) - dum2 = gsn_add_polyline(wks, taylor, xx, yy, rsrRes) - delete(xx) - delete(yy) - - if (rOpts .and. isatt(rOpts, "stnRad")) then - rsrRes@gsLineThicknessF = 1 - nStnRad = dimsizes(rOpts@stnRad) - - dum3 = new(nStnRad, graphic) - do n = 0, nStnRad - 1 - rr = rOpts@stnRad(n) - if (rr.gt.rmax) then - continue - end if - xx = fspan(xyMin, rr, npts) - yy = sqrt(rr ^ 2 - xx ^ 2) - dum3(n) = gsn_add_polyline(wks, taylor, xx, yy, rsrRes) - end do - taylor@$unique_string("dum")$ = dum3 - - delete(xx) - delete(yy) - end if - - getvalues taylor - "tmYLLabelFont" : tmYLLabelFont - "tmYLLabelFontHeightF" : tmYLLabelFontHeightF - end getvalues - -; ---------------------------------------------------------------- -; Part 2: -; Correlation labels -; ---------------------------------------------------------------- - radC = 1.02 * xyMax - xC = radC * cos(angC * rad) - yC = radC * sin(angC * rad) - - txRes = True - txRes@txFontHeightF = FontHeightF - txRes@tmYLLabelFont = tmYLLabelFont - txRes@txAngleF = -45. - if (.not.isatt(rOpts, "drawCorLabel") .or. rOpts@drawCorLabel) then - xdum = 1.15 * xyMax * cos(45.0 * rad) - ydum = 1.15 * xyMax * sin(45.0 * rad) - dum4 = gsn_add_text(wks, taylor, "Correlation", xdum, ydum, txRes) - taylor@$unique_string("dum")$ = dum4 - end if - txRes@txAngleF = 0.0 - txRes@txFontHeightF = FontHeightF * 0.50 - - plRes = True - plRes@gsLineThicknessF = 2. - txRes@txFontHeightF = FontHeightF - - tmEnd = 0.975 - radTM = xyMax * tmEnd - xTM = new(2, "float") - yTM = new(2, "float") - - dum5 = new(dimsizes(sLabels), graphic) - dum6 = dum5 - - do i = 0, dimsizes(sLabels) - 1 - txRes@txAngleF = angC(i) - if (angC(i).le.45.0) then - txRes@txJust = "CenterLeft" - else - txRes@txJust = "BottomCenter" - end if - dum5(i) = \ - gsn_add_text(wks, taylor, sLabels(i), xC(i), yC(i), txRes) - xTM(0) = xyMax * cos(angC(i) * rad) - yTM(0) = xyMax * sin(angC(i) * rad) - xTM(1) = radTM * cos(angC(i) * rad) - yTM(1) = radTM * sin(angC(i) * rad) - dum6(i) = gsn_add_polyline(wks, taylor, xTM, yTM, plRes) - end do - mTM = (/0.05, 0.15, 0.25, 0.35, 0.45, 0.55, 0.65, \ - 0.75, 0.85, 0.91, 0.92, 0.93, 0.94, 0.96, 0.97, 0.98/) - angmTM = acos(mTM) / rad - radmTM = xyMax * (1. - (1. - tmEnd) * 0.5) - - dum7 = new(dimsizes(mTM), graphic) - - do i = 0, dimsizes(mTM) - 1 - xTM(0) = xyMax * cos(angmTM(i) * rad) - yTM(0) = xyMax * sin(angmTM(i) * rad) - xTM(1) = radmTM * cos(angmTM(i) * rad) - yTM(1) = radmTM * sin(angmTM(i) * rad) - dum7(i) = gsn_add_polyline(wks, taylor, xTM, yTM, plRes) - end do - - if (rOpts .and. isatt(rOpts, "ccRays")) then - angRL = acos(rOpts@ccRays) / rad - rlRes = True - rlRes@gsLineDashPattern = 2 - rlRes@gsLineThicknessF = 1 - if (isatt(rOpts, "ccRays_color")) then - rlRes@gsLineColor = rOpts@ccRays_color - end if - - dum8 = new(dimsizes(angRL), graphic) - do i = 0, dimsizes(angRL) - 1 - xRL = xyMax * cos(angRL(i) * rad) - yRL = xyMax * sin(angRL(i) * rad) - dum8(i) = gsn_add_polyline(wks, taylor, (/0, xRL/), (/0, yRL/), rlRes) - end do - taylor@$unique_string("dum")$ = dum8 - end if - -; ---------------------------------------------------------------- -; Part 3: -; Concentric about 1.0 on XB axis -; ---------------------------------------------------------------- - if (rOpts .and. isatt(rOpts, "centerDiffRMS") .and. rOpts@centerDiffRMS) then - respl = True - respl@gsLineThicknessF = 2.0 - respl@gsLineColor = "Black" - if (isatt(rOpts, "centerDiffRMS_color")) then - respl@gsLineColor = rOpts@centerDiffRMS_color - end if - - dx = delta - ncon = 4 - npts = 100 ; arbitrary - ang = fspan(180, 360, npts) * rad - dum9 = new(ncon, graphic) - - do n = 1, ncon - rr = n * dx ; radius from 1.0 [OBS] abscissa - xx = 1. + rr * cos(ang) - yy = fabs(rr * sin(ang)) - dist_n = dx * n + 1.0 - if (dist_n .gt. max(rxy@tmXBValues)) then - xtmp = (xyMax ^ 2 - rr ^ 2 + 1.0) / 2.0 - ytmp = sqrt(xyMax ^ 2 - xtmp ^ 2) - xtmp = xtmp - 1.0 - if (xtmp.eq.0.0) then - phitmp = 3.1415927 - else if (xtmp.gt.0.0) then - phitmp = 2.0 * 3.1415927 - atan(ytmp / xtmp) - else - phitmp = 3.1415927 + atan(ytmp / xtmp) - end if - end if - ang2 = fspan(3.1415927, phitmp, npts) - xx2 = 1.0 + rr * cos(ang2) - yy2 = fabs(rr * sin(ang2)) - dum9(n - 1) = gsn_add_polyline(wks, taylor, xx2, yy2, respl) - delete(ang2) - delete(xx2) - delete(yy2) - else - dum9(n - 1) = gsn_add_polyline(wks, taylor, xx, yy, respl) - end if - delete(dist_n) - - end do - delete(ang) - delete(xx) - delete(yy) - taylor@$unique_string("dum")$ = dum9 - - end if - - ; draw filled black circle at (1, 0) to mark the reference point - if (rOpts .and. isatt(rOpts, "gsMarkerSizeF")) then - gsMarkerSizeF = rOpts@gsMarkerSizeF - else - gsMarkerSizeF = 0.0085 - end if - gsRes = True - gsRes@gsMarkerIndex = 16 - gsRes@gsMarkerColor = "Black" - gsRes@gsMarkerSizeF = gsMarkerSizeF - getvalues taylor - "vpXF" : vpx - "vpYF" : vpy - "vpWidthF" : vpw - "vpHeightF" : vph - end getvalues - dotx = vpx + 1.0 / xyMax_Panel * vpw - doty = vpy - vph - gsn_polymarker_ndc(wks, dotx, doty, gsRes) - dotobj = new(1, graphic) - dotobj = gsn_add_polymarker(wks, taylor, 1.0, 0.0, gsRes) - - -; --------------------------------------------------------------- -; Part 4: -; generic resources that will be applied to all users data points -; of course, these can be changed -; http://www.ncl.ucar.edu/Document/Graphics/Resources/gs.shtml -; --------------------------------------------------------------- - if (rOpts .and. isatt(rOpts, "Markers")) then - Markers = rOpts@Markers - else - Markers = (/4, 6, 8, 0, 9, 12, 7, 2, 11, 16/) - end if - - if (rOpts .and. isatt(rOpts, "Colors")) then - Colors = rOpts@Colors - else - Colors = (/"red", "blue", "green", "cyan", "orange", \ - "torquoise", "brown", "yellow", "purple", "black"/) - end if - - if (rOpts .and. isatt(rOpts, "gsMarkerThicknessF")) then - gsMarkerThicknessF = rOpts@gsMarkerThicknessF - else - gsMarkerThicknessF = 2.0 - end if - - if (rOpts .and. isatt(rOpts, "gsMarkerSizeF")) then - gsMarkerSizeF = rOpts@gsMarkerSizeF - else - gsMarkerSizeF = 0.0085 - end if - - gsRes = True - gsRes@gsMarkerThicknessF = gsMarkerThicknessF - gsRes@gsMarkerSizeF = gsMarkerSizeF - - ptRes = True - ptRes@txJust = "BottomCenter" - ptRes@txFontThicknessF = 1.2 - ptRes@txFontHeightF = 0.0125 - if (rOpts .and. isatt(rOpts, "txFontHeightF")) then - ptRes@txFontHeightF = rOpts@txFontHeightF - end if - - markerTxYOffset = 0.0175 - if (rOpts .and. isatt(rOpts, "markerTxYOffset")) then - markerTxYOffset = rOpts@markerTxYOffset - end if - - dum10 = new((nCase*nVar), graphic) - dum11 = dum10 - - do n = 0, nCase - 1 - gsRes@gsMarkerIndex = Markers(n) - gsRes@gsMarkerColor = Colors(n) - ptRes@txFontColor = gsRes@gsMarkerColor - do i = 0, nVar - 1 - dum10(n * nVar + i) = \ - gsn_add_polymarker(wks, taylor, X(n, i), Y(n, i), gsRes) - printdl = True - if (rOpts .and. isatt(rOpts, "printDataLabels")) then - if (.not.rOpts@printDataLabels) then - printdl = False - end if - end if - if (printdl) then - dum11(n * nVar + i) = \ - gsn_add_text(wks, taylor, (i + 1), X(n, i), \ - Y(n, i) + markerTxYOffset, ptRes) - end if - end do - end do - -; --------------------------------------------------------------- -; Part 5: ; add case legend and variable labels -; --------------------------------------------------------------- - - extrafile = False - - if (rOpts .and. isatt(rOpts, "legendExtraFile")) then - if (rOpts@legendExtraFile) then - extrafile = True - end if - end if - - if (rOpts .and. isatt(rOpts, "caseLabels")) then - - if (isatt(rOpts, "caseLabelsFontHeightF")) then - caseLabelsFontHeightF = rOpts@caseLabelsFontHeightF - else - caseLabelsFontHeightF = 0.05 - end if - - lgres = True - lgres@lgMarkerColors = Colors - lgres@lgMarkerIndexes = Markers - lgres@lgMarkerSizeF = gsMarkerSizeF - lgres@lgItemType = "Markers" - lgres@lgLabelFontHeightF = caseLabelsFontHeightF - - nModel = dimsizes(rOpts@caseLabels) - if (isatt(rOpts, "legendWidth")) then - lgres@vpWidthF = rOpts@legendWidth - else - lgres@vpWidthF = 0.15 - end if - - if (nModel.gt.20) then - lgres@vpWidthF = lgres@vpWidthF * 0.5 - lgres@lgMarkerSizeF = lgres@lgMarkerSizeF * 0.75 - end if - - lgres@lgPerimOn = False - if (isatt(rOpts, "legendBox")) then - if (rOpts@legendBox) then - lgres@lgPerimOn = True - lgres@lgRightMarginF = 0.1 - end if - end if - - if (nModel.le.20) then - if (isatt(rOpts, "reverseOrder")) then - if (rOpts@reverseOrder) then - lgres@lgItemOrder = ispan(nModel - 1, 0, 1) - end if - end if - if (isatt(rOpts, "legendHeight")) then - lgres@vpHeightF = rOpts@legendHeight - else - lgres@vpHeightF = 0.030 * nCase - end if - lbid = gsn_create_legend(legendwks, nModel, rOpts@caseLabels, lgres) - amres = True - if (isatt(rOpts, "legendXpos")) then - amres@amParallelPosF = rOpts@legendXpos - amres@amJust = "TopLeft" - else - amres@amParallelPosF = 0.35 - end if - if (isatt(rOpts, "legendYpos")) then - amres@amOrthogonalPosF = rOpts@legendYpos - amres@amJust = "TopLeft" - else - amres@amOrthogonalPosF = -0.35 - end if - if (extrafile) then - gsn_legend_ndc(legendwks, nModel, rOpts@caseLabels(0:nModel - 1), \ - 0.78, 0.75, lgres) - else - annoid1 = gsn_add_annotation(taylor, lbid, amres) - end if - else - if (isatt(rOpts, "modelsperrow")) then - modelsperrow = rOpts@modelsperrow - else - modelsperrow = (nModel + 1) / 2 - end if - do lr = 0, nModel / modelsperrow - ix0 = lr * modelsperrow - if (ix0.ge.nCase) then - break - end if - ix1 = min((/nCase - 1, ix0 + modelsperrow - 1/)) - dumdelta = ix1 - ix0 + 1 - - if (isatt(rOpts, "reverseOrder")) then - if (rOpts@reverseOrder) then - if (isatt(lgres, "lgItemOrder")) then - delete(lgres@lgItemOrder) - end if - lgres@lgItemOrder = ispan(dumdelta - 1, 0, 1) - end if - end if - - lgres@vpHeightF = 0.026 * dumdelta - - delete(lgres@lgMarkerColors) - delete(lgres@lgMarkerIndexes) - lgres@lgMarkerColors = Colors(ix0:ix1) - lgres@lgMarkerIndexes = Markers(ix0:ix1) - lgres@lgPerimOn = False - - if (extrafile) then - legend_x = 0.05 + 0.11 * lr - else - legend_x = 0.78 + 0.11 * lr - end if - - gsn_legend_ndc(legendwks, dumdelta, rOpts@caseLabels(ix0:ix1), \ - legend_x, 0.75, lgres) - end do - end if - end if - - if (rOpts .and. isatt(rOpts, "varLabels")) then - nVar = dimsizes(rOpts@varLabels) - - if (isatt(rOpts, "varLabelsFontHeightF")) then - varLabelsFontHeightF = rOpts@varLabelsFontHeightF - else - varLabelsFontHeightF = 0.013 - end if - - txres = True - txres@txFontHeightF = varLabelsFontHeightF - txres@txJust = "CenterLeft" - - delta_y = 0.06 - if (rOpts .and. isatt(rOpts, "varLabelsYloc")) then - ys = rOpts@varLabelsYloc - else - ys = max((/nVar * delta_y, 0.30/)) - end if - - do i = 1, nVar - if (i.eq.1) then - dum12 = new(nVar, graphic) - end if - - dum12(i - 1) = \ - gsn_add_text(wks, taylor, i + " - " + rOpts@varLabels(i - 1), \ - 0.125, ys, txres) - ys = ys - delta_y - end do - - taylor@$unique_string("dum")$ = dum12 - end if - - taylor@$unique_string("dum")$ = dum0 - taylor@$unique_string("dum")$ = dum1 - taylor@$unique_string("dum")$ = dum2 - taylor@$unique_string("dum")$ = dum5 - taylor@$unique_string("dum")$ = dum6 - taylor@$unique_string("dum")$ = dum7 - taylor@$unique_string("dum")$ = dum10 - taylor@$unique_string("dum")$ = dum11 - - if (.not.isatt(rOpts, "taylorDraw") .or. \ - (isatt(rOpts, "taylorDraw") .and. rOpts@taylorDraw)) then - draw(taylor) - end if - if (.not.isatt(rOpts, "taylorFrame") .or. \ - (isatt(rOpts, "taylorFrame") .and. rOpts@taylorFrame)) then - frame(wks) - end if - - return(taylor) -end diff --git a/esmvaltool/diag_scripts/shared/plot/taylor_plot.ncl b/esmvaltool/diag_scripts/shared/plot/taylor_plot.ncl index c01851840d..b50af96114 100644 --- a/esmvaltool/diag_scripts/shared/plot/taylor_plot.ncl +++ b/esmvaltool/diag_scripts/shared/plot/taylor_plot.ncl @@ -7,6 +7,7 @@ ; Contents: ; ; function taylor_plot +; function taylor_diagram ; ; ############################################################################# @@ -17,9 +18,7 @@ load "$diag_scripts/shared/set_operators.ncl" load "$diag_scripts/shared/plot/aux_plotting.ncl" - ; ############################################################################# - undef("taylor_plot") function taylor_plot(wks_in[1], source, @@ -50,8 +49,8 @@ function taylor_plot(wks_in[1], ; plot frame: one for the variables (markers) and one for the ; models (numbers). ; -; Modification history: -; 20150505-A_righ_ma: written based on the original NCL code. +; Modification history +; 20150505-righi_mattia: written based on the original NCL code. ; local funcname, scriptname begin @@ -444,3 +443,769 @@ begin return(taylor) end + +; ############################################################################# +function taylor_diagram(wks:graphic, + legendwks:graphic, + RATIO[*][*]:numeric, + CC[*][*]:numeric, + rOpts:logical) +; +; Arguments +; wks: workstation. +; legendwks: workstation for the legend. +; RATIO: ratio of the standard deviations. +; CC: correlation. +; rOpts: graphical resources. +; +; Return value: +; A graphic variable. +; +; Caveats +; +; Modification history +; 20190605-righi_mattia: ported from v1. +; +local dimR, nCase, nVar, X, Y, nc, angle, xyMin, xyOne, ceil_max_val, xyMax, \ + xyMax_Panel, FontHeightF, rxy, tempo_values, tempo_values1, temp_string, \ + temp1_string, temp_string_y, temp_string_y1, npts, xx, yy, sLabels, \ + cLabels, rad, angC, taylor, rsrRes, dum0, dum1, dum2, nStnRad, rr, radC, \ + xC, yC, txRes, dum4, plRes, tmEnd, radTM, xTM, yTM, dum5, dum6, i, mTM, \ + angmTM, radmTM, dum7, angRL, rlRes, dum8, xRL, yRL, respl, dx, ncon, npts, \ + ang, dum9, dist_n, Markers, Colors, gsRes, ptRes, markerTxYOffset, dum10, \ + dum11, n, lgres, nModel, lbid, lbid2, amres, annoid1, nVar, \ + varLabelsFontHeightF, txres, delta_y, ys, dum12 +begin + + dimR = dimsizes(RATIO) + nCase = dimR(0) ; # of cases [models] + nVar = dimR(1) ; # of variables + + ; x/y coordinates for plotting + X = new((/nCase, nVar/), typeof(RATIO)) + Y = new((/nCase, nVar/), typeof(RATIO)) + + do nc = 0, nCase - 1 + angle = acos(CC(nc, :)) + X(nc, :) = RATIO(nc, :) * cos(angle) + Y(nc, :) = RATIO(nc, :) * sin(angle) + end do + + ; fix to nearest x.5 + xyMin = 0. + xyOne = 1.00 + rmax = ceil(max(RATIO)) + if (rmax.lt.1.5) then + rmax = 1.5 + end if + if (rmax.gt.3.5) then + rmax = 3.5 + end if + xyMax = rmax + 0.1 + xyMax_Panel = xyMax + 0.10 + + if (rOpts .and. isatt(rOpts, "txFontHeightF")) then + FontHeightF = rOpts@txFontHeightF + else + FontHeightF = 0.0175 + end if + + ; ---------------------------------------------------------------- + ; Part 1: + ; base plot: Based upon request of Mark Stevens + ; basic x-y and draw the 1.0 observed and the outer curve at 1.65 + ; ---------------------------------------------------------------- + + rxy = True + rxy@gsnDraw = False + rxy@gsnFrame = False + rxy@vpHeightF = 0.65 + rxy@vpWidthF = 0.65 + if (rOpts .and. isatt(rOpts, "plotSize")) then + rxy@vpHeightF = rOpts@plotSize + rxy@vpWidthF = rOpts@plotSize + rxy@vpXF = 0.125 + rxy@vpYF = 0.825 + end if + rxy@tmYLBorderOn = False + rxy@tmXBBorderOn = False + + rxy@tiYAxisString = "Standardized Deviations (Normalized)" + rxy@tiYAxisFontHeightF = FontHeightF + + rxy@tmXBMode = "Explicit" + delta = 0.5 + if (rmax.le.1.5) then + delta = 0.25 + end if + tempo_values = \ + decimalPlaces(fspan(0.0, rmax, toint(rmax / delta) + 1), 2, True) + tempo_values1 = tempo_values(ind(tempo_values .lt. xyMax)) + delete(tempo_values) + rxy@tmXBValues = tempo_values1 + delete(tempo_values1) + temp_string = tostring(sprintf("%4.2f", rxy@tmXBValues)) + temp_string(0) = " " + temp1_string = where(temp_string .eq. "1.00", "REF", temp_string) + rxy@tmXBLabels = temp1_string + if (rOpts .and. isatt(rOpts, "OneX")) then + rxy@tmXBLabels = temp_string + end if + delete(temp_string) + delete(temp1_string) + + rxy@tmXBMajorLengthF = 0.015 + rxy@tmXBLabelFontHeightF = FontHeightF + rxy@tmXBMinorOn = False + rxy@trXMaxF = xyMax_Panel + rxy@tmYLMode = "Manual" + rxy@tmYLMinorOn = False + rxy@tmYLMajorLengthF = rxy@tmXBMajorLengthF + rxy@tmYLLabelFontHeightF = FontHeightF + rxy@tmYLMode = "Explicit" + rxy@tmYLValues = rxy@tmXBValues + temp_string_y = rxy@tmXBLabels + temp_string_y(0) = "0.00" + temp_string_y1 = where(temp_string_y .eq. "REF", "1.00", temp_string_y) + delete(temp_string_y) + rxy@tmYLLabels = temp_string_y1 + delete(temp_string_y1) + rxy@trYMaxF = xyMax_Panel + + rxy@tmYRBorderOn = False + rxy@tmYROn = False + rxy@tmXTBorderOn = False + rxy@tmXTOn = False + rxy@xyDashPatterns = (/0 /) + rxy@xyLineThicknesses = (/2./) + rxy@gsnFrame = False + npts = 100 ; arbitrary + xx = fspan(xyMin, xyMax, npts) + yy = sqrt(xyMax ^ 2 - xx ^ 2) ; outer correlation line (xyMax) + sLabels = (/"0.0", "0.1", "0.2", "0.3", "0.4", "0.5", "0.6", \ + "0.7", "0.8", "0.9", "0.95", "0.99", "1.0" /) + cLabels = stringtofloat(sLabels) + rad = 4. * atan(1.0) / 180. + angC = acos(cLabels) / rad ; angles: correlation labels + if (rOpts .and. isatt(rOpts, "tiMainString")) then + rxy@tiMainString = rOpts@tiMainString + if (isatt(rOpts, "tiMainFontHeightF")) then + rxy@tiMainFontHeightF = rOpts@tiMainFontHeightF + else + rxy@tiMainFontHeightF = 0.0225 + end if + if (isatt(rOpts, "tiMainOffsetYF")) then + rxy@tiMainOffsetYF = rOpts@tiMainOffsetYF + end if + end if + + taylor = gsn_xy(wks, xx, yy, rxy) + + ; -------------------------------------------------------------- + ; draw observational uncertainties given as normalized RMS error + ; around the observational reference point at (1, 0) + ; -------------------------------------------------------------- + + if (rOpts .and. isatt(rOpts, "rmsobs")) then + npts = 100 ; arbitrary + rmsobj = new(2, graphic) + + respl0 = True + respl0@gsLineThicknessF = 4.0 + respl0@gsLineDashPattern = 0 + respl0@gsLineColor = (/0.50, 0.95, 0.47/) + respl0@gsFillColor = (/0.85, 1.00, 0.80/) + rr = rOpts@rmsobs + + ; uncertainty circle covers whole plotting area + if (rr.ge.xyMax) then + ang = fspan(180, 360, npts) * rad + xx = 0.995 * xyMax * cos(ang) + yy = fabs(0.995 * xyMax * sin(ang)) + rmsobj(0) = gsn_add_polygon(wks, taylor, xx, yy, respl0) + rmsobj(1) = gsn_add_polygon(wks, taylor, xx, yy, respl0) + else ; uncertainty circle covers only part of the plotting area + dist = rr + 1.0 + ; uncertainty circle has to be cropped + ; need to find intersection between uncertainty circule and outer arc + if (dist .gt. xyMax) then + xtmp = (xyMax ^ 2 - rr ^ 2 + 1.0) / 2.0 + ytmp = sqrt(xyMax ^ 2 - xtmp ^ 2) + xtmp = xtmp - 1.0 + if (xtmp.eq.0.0) then + phitmp = 3.1415927 + else if (xtmp.gt.0.0) then + phitmp = 2.0 * 3.1415927 - atan(ytmp / xtmp) + else + phitmp = 3.1415927 + atan(ytmp / xtmp) + end if + end if + ang = fspan(3.1415927, phitmp, npts) + xx1 = 1.0 + rr * cos(ang) + yy1 = fabs(rr * sin(ang)) + + xtmp = xx1(npts - 1) + ytmp = yy1(npts - 1) + if (xtmp.eq.0.0) then + phitmp = 3.1415927 + else if (xtmp.gt.0.0) then + phitmp = 2.0 * 3.1415927 - atan(ytmp / xtmp) + else + phitmp = 3.1415927 + atan(ytmp / xtmp) + end if + end if + + ang2 = fspan(phitmp, 0.0, npts) + + xx2 = 0.995 * xyMax * cos(ang2) + yy2 = 0.995 * fabs(xyMax * sin(ang2)) + + if (isvar("xx")) then + delete(xx) + end if + if (isvar("yy")) then + delete(yy) + end if + xx = array_append_record(xx1, xx2, 0) + yy = array_append_record(yy1, yy2, 0) + rmsobj(0) = gsn_add_polygon(wks, taylor, xx, yy, respl0) + rmsobj(1) = gsn_add_polyline(wks, taylor, xx1, yy1, respl0) + delete(xx1) + delete(xx2) + delete(yy1) + delete(yy2) + delete(ang2) + else ; uncertainty circle does not need any adjustments + ang = fspan(180, 360, npts) * rad + xx = 1.0 + rr * cos(ang) + yy = fabs(rr * sin(ang)) + rmsobj(0) = gsn_add_polygon(wks, taylor, xx, yy, respl0) + rmsobj(1) = gsn_add_polyline(wks, taylor, xx, yy, respl0) + end if ; if uncertainty circle has to be cropped + + delete(ang) + delete(xx) + delete(yy) + + end if ; if uncertainty circle covers whole plotting area + end if ; if an uncertainty estimate is available + + rsrRes = True + rsrRes@gsLineThicknessF = rxy@xyLineThicknesses(0) + rsrRes@gsLineDashPattern = 0 + dum0 = gsn_add_polyline(wks, taylor, (/0., 0. /), (/0., xyMax/), rsrRes) + dum1 = gsn_add_polyline(wks, taylor, (/0., xyMax/), (/0., 0. /), rsrRes) + + xx = fspan(xyMin, xyOne, npts) + yy = sqrt(xyOne - xx ^ 2) + rsrRes@gsLineDashPattern = 1 + rsrRes@gsLineThicknessF = rxy@xyLineThicknesses(0) + dum2 = gsn_add_polyline(wks, taylor, xx, yy, rsrRes) + delete(xx) + delete(yy) + + if (rOpts .and. isatt(rOpts, "stnRad")) then + rsrRes@gsLineThicknessF = 1 + nStnRad = dimsizes(rOpts@stnRad) + + dum3 = new(nStnRad, graphic) + do n = 0, nStnRad - 1 + rr = rOpts@stnRad(n) + if (rr.gt.rmax) then + continue + end if + xx = fspan(xyMin, rr, npts) + yy = sqrt(rr ^ 2 - xx ^ 2) + dum3(n) = gsn_add_polyline(wks, taylor, xx, yy, rsrRes) + end do + taylor@$unique_string("dum")$ = dum3 + + delete(xx) + delete(yy) + end if + + getvalues taylor + "tmYLLabelFont" : tmYLLabelFont + "tmYLLabelFontHeightF" : tmYLLabelFontHeightF + end getvalues + + ; ---------------------------------------------------------------- + ; Part 2: + ; Correlation labels + ; ---------------------------------------------------------------- + radC = 1.02 * xyMax + xC = radC * cos(angC * rad) + yC = radC * sin(angC * rad) + + txRes = True + txRes@txFontHeightF = FontHeightF + txRes@tmYLLabelFont = tmYLLabelFont + txRes@txAngleF = -45. + if (.not.isatt(rOpts, "drawCorLabel") .or. rOpts@drawCorLabel) then + xdum = 1.15 * xyMax * cos(45.0 * rad) + ydum = 1.15 * xyMax * sin(45.0 * rad) + dum4 = gsn_add_text(wks, taylor, "Correlation", xdum, ydum, txRes) + taylor@$unique_string("dum")$ = dum4 + end if + txRes@txAngleF = 0.0 + txRes@txFontHeightF = FontHeightF * 0.50 + + plRes = True + plRes@gsLineThicknessF = 2. + txRes@txFontHeightF = FontHeightF + + tmEnd = 0.975 + radTM = xyMax * tmEnd + xTM = new(2, "float") + yTM = new(2, "float") + + dum5 = new(dimsizes(sLabels), graphic) + dum6 = dum5 + + do i = 0, dimsizes(sLabels) - 1 + txRes@txAngleF = angC(i) + if (angC(i).le.45.0) then + txRes@txJust = "CenterLeft" + else + txRes@txJust = "BottomCenter" + end if + dum5(i) = \ + gsn_add_text(wks, taylor, sLabels(i), xC(i), yC(i), txRes) + xTM(0) = xyMax * cos(angC(i) * rad) + yTM(0) = xyMax * sin(angC(i) * rad) + xTM(1) = radTM * cos(angC(i) * rad) + yTM(1) = radTM * sin(angC(i) * rad) + dum6(i) = gsn_add_polyline(wks, taylor, xTM, yTM, plRes) + end do + mTM = (/0.05, 0.15, 0.25, 0.35, 0.45, 0.55, 0.65, \ + 0.75, 0.85, 0.91, 0.92, 0.93, 0.94, 0.96, 0.97, 0.98/) + angmTM = acos(mTM) / rad + radmTM = xyMax * (1. - (1. - tmEnd) * 0.5) + + dum7 = new(dimsizes(mTM), graphic) + + do i = 0, dimsizes(mTM) - 1 + xTM(0) = xyMax * cos(angmTM(i) * rad) + yTM(0) = xyMax * sin(angmTM(i) * rad) + xTM(1) = radmTM * cos(angmTM(i) * rad) + yTM(1) = radmTM * sin(angmTM(i) * rad) + dum7(i) = gsn_add_polyline(wks, taylor, xTM, yTM, plRes) + end do + + if (rOpts .and. isatt(rOpts, "ccRays")) then + angRL = acos(rOpts@ccRays) / rad + rlRes = True + rlRes@gsLineDashPattern = 2 + rlRes@gsLineThicknessF = 1 + if (isatt(rOpts, "ccRays_color")) then + rlRes@gsLineColor = rOpts@ccRays_color + end if + + dum8 = new(dimsizes(angRL), graphic) + do i = 0, dimsizes(angRL) - 1 + xRL = xyMax * cos(angRL(i) * rad) + yRL = xyMax * sin(angRL(i) * rad) + dum8(i) = gsn_add_polyline(wks, taylor, (/0, xRL/), (/0, yRL/), rlRes) + end do + taylor@$unique_string("dum")$ = dum8 + end if + + ; ---------------------------------------------------------------- + ; Part 3: + ; Concentric about 1.0 on XB axis + ; ---------------------------------------------------------------- + if (rOpts .and. isatt(rOpts, "centerDiffRMS") .and. rOpts@centerDiffRMS) then + respl = True + respl@gsLineThicknessF = 2.0 + respl@gsLineColor = "Black" + if (isatt(rOpts, "centerDiffRMS_color")) then + respl@gsLineColor = rOpts@centerDiffRMS_color + end if + + respl2 = True + respl2@txFontHeightF = 0.015 + respl2@txFontColor = respl@gsLineColor + respl2@txBackgroundFillColor = "White" + + dx = delta + ncon = toint(rmax * 2) + npts = 100 ; arbitrary + n10 = toint(npts * 0.1) + n90 = toint(npts * 0.9) + ang = fspan(180, 360, npts) * rad + dum9 = new(ncon, graphic) + dum9a = new(ncon, graphic) + + pi = 3.1415927 + + do n = 1, ncon + rr = n * dx ; radius from 1.0 [OBS] abscissa + xx = 1. + rr * cos(ang) + yy = fabs(rr * sin(ang)) + dist_n = dx * n + 1.0 + if (dist_n .gt. max(rxy@tmXBValues)) then + xtmp = (xyMax ^ 2 - rr ^ 2 + 1.0) / 2.0 + ytmp = sqrt(xyMax ^ 2 - xtmp ^ 2) + xtmp = xtmp - 1.0 + if (xtmp.eq.0.0) then + phitmp = 3.1415927 + else if (xtmp.gt.0.0) then + phitmp = 2.0 * pi - atan(ytmp / xtmp) + else + phitmp = pi + atan(ytmp / xtmp) + end if + end if + + ang2 = fspan(pi, phitmp, npts) + xx2 = 1.0 + rr * cos(ang2) + yy2 = fabs(rr * sin(ang2)) + + if (phitmp .lt. pi) then + tmpang = ang2(n10) + pi / 2.0 + else + tmpang = ang2(n90) + end if + + dum9(n - 1) = gsn_add_polyline(wks, taylor, xx2, yy2, respl) + xl = xx2(n90) + yl = yy2(n90) + respl2@txAngleF = 630.0 - tmpang / pi * 180.0 + if (respl2@txAngleF .gt. 360.0) then + respl2@txAngleF = respl2@txAngleF - 360.0 + end if + delete(ang2) + delete(xx2) + delete(yy2) + else + dum9(n - 1) = gsn_add_polyline(wks, taylor, xx, yy, respl) + xl = xx(n90) + yl = yy(n90) + respl2@txAngleF = 630.0 - ang(n90) / pi * 180.0 + if (respl2@txAngleF .gt. 360.0) then + respl2@txAngleF = respl2@txAngleF - 360.0 + end if + end if + delete(dist_n) + + dum9a(n - 1) = gsn_add_text(wks, taylor, sprintf("%3.1f", n * 0.5), \ + xl, yl, respl2) + + end do + delete(ang) + delete(xx) + delete(yy) + taylor@$unique_string("dum")$ = dum9 + + end if + + ; draw filled black circle at (1, 0) to mark the reference point + if (rOpts .and. isatt(rOpts, "gsMarkerSizeF")) then + gsMarkerSizeF = rOpts@gsMarkerSizeF + else + gsMarkerSizeF = 0.0085 + end if + gsRes = True + gsRes@gsMarkerIndex = 16 + gsRes@gsMarkerColor = "Black" + gsRes@gsMarkerSizeF = gsMarkerSizeF + getvalues taylor + "vpXF" : vpx + "vpYF" : vpy + "vpWidthF" : vpw + "vpHeightF" : vph + end getvalues + dotx = vpx + 1.0 / xyMax_Panel * vpw + doty = vpy - vph + gsn_polymarker_ndc(wks, dotx, doty, gsRes) + dotobj = new(1, graphic) + dotobj = gsn_add_polymarker(wks, taylor, 1.0, 0.0, gsRes) + + ; --------------------------------------------------------------- + ; Part 4: + ; generic resources that will be applied to all users data points + ; of course, these can be changed + ; http://www.ncl.ucar.edu/Document/Graphics/Resources/gs.shtml + ; --------------------------------------------------------------- + if (rOpts .and. isatt(rOpts, "Markers")) then + Markers = rOpts@Markers + else + Markers = (/4, 6, 8, 0, 9, 12, 7, 2, 11, 16/) + end if + + if (rOpts .and. isatt(rOpts, "Colors")) then + Colors = rOpts@Colors + else + Colors = (/"red", "blue", "green", "cyan", "orange", \ + "torquoise", "brown", "yellow", "purple", "black"/) + end if + + if (rOpts .and. isatt(rOpts, "gsMarkerThicknessF")) then + gsMarkerThicknessF = rOpts@gsMarkerThicknessF + else + gsMarkerThicknessF = 2.0 + end if + + if (rOpts .and. isatt(rOpts, "gsMarkerSizeF")) then + gsMarkerSizeF = rOpts@gsMarkerSizeF + else + gsMarkerSizeF = 0.0085 + end if + + gsRes = True + gsRes@gsMarkerThicknessF = gsMarkerThicknessF + gsRes@gsMarkerSizeF = gsMarkerSizeF + + ptRes = True + ptRes@txJust = "BottomCenter" + ptRes@txFontThicknessF = 1.2 + ptRes@txFontHeightF = 0.0125 + if (rOpts .and. isatt(rOpts, "txFontHeightF")) then + ptRes@txFontHeightF = rOpts@txFontHeightF + end if + + markerTxYOffset = 0.0175 + if (rOpts .and. isatt(rOpts, "markerTxYOffset")) then + markerTxYOffset = rOpts@markerTxYOffset + end if + + dum10 = new((nCase*nVar), graphic) + dum11 = dum10 + + do n = 0, nCase - 1 + gsRes@gsMarkerIndex = Markers(n) + gsRes@gsMarkerColor = Colors(n) + ptRes@txFontColor = gsRes@gsMarkerColor + do i = 0, nVar - 1 + dum10(n * nVar + i) = \ + gsn_add_polymarker(wks, taylor, X(n, i), Y(n, i), gsRes) + printdl = True + if (rOpts .and. isatt(rOpts, "printDataLabels")) then + if (.not.rOpts@printDataLabels) then + printdl = False + end if + end if + if (printdl) then + dum11(n * nVar + i) = \ + gsn_add_text(wks, taylor, (i + 1), X(n, i), \ + Y(n, i) + markerTxYOffset, ptRes) + end if + end do + end do + + ; --------------------------------------------------------------- + ; Part 5: ; add case legend and variable labels + ; --------------------------------------------------------------- + extrafile = False + + if (rOpts .and. isatt(rOpts, "legendExtraFile")) then + if (rOpts@legendExtraFile) then + extrafile = True + end if + end if + + if (rOpts .and. isatt(rOpts, "caseLabels")) then + + if (isatt(rOpts, "caseLabelsFontHeightF")) then + caseLabelsFontHeightF = rOpts@caseLabelsFontHeightF + else + caseLabelsFontHeightF = 0.05 + end if + + lgres = True + lgres@lgMarkerSizeF = gsMarkerSizeF + lgres@lgLabelFontHeightF = caseLabelsFontHeightF + lgres@lgItemType = "Markers" + + if (isatt(rOpts, "legend_filter")) then + iii = ispan(0, dimsizes(rOpts@projects) - 1, 1) + iii = 1 + do i = 0, dimsizes(rOpts@legend_filter) - 1 + do ii = 0, dimsizes(rOpts@projects) - 1 + if (str_lower(rOpts@projects(ii)) .eq. \ + str_lower(rOpts@legend_filter(i))) then + iii(ii) = -1 + end if + end do + end do + testind = ind(iii .eq. -1) ; indices of datasets to be filtered out + if (.not. all(ismissing(testind))) then + iselect = ind(iii .ne. -1) ; datasets *not* to be filtered out + lgres@lgMarkerColors = Colors(iselect) + lgres@lgMarkerIndexes = Markers(iselect) + Labels = rOpts@caseLabels(iselect) + do i = 0, dimsizes(rOpts@legend_filter) - 1 + auxind = ind(str_lower(rOpts@projects) .eq. \ + str_lower(rOpts@legend_filter(i))) + if (.not. all(ismissing(auxind))) then + color0 := Colors(auxind(0)) + marker0 := Markers(auxind(0)) + label0 = rOpts@legend_filter(i) + lgres@lgMarkerColors := array_append_record(color0, \ + lgres@lgMarkerColors, \ + 0) + lgres@lgMarkerIndexes := \ + array_append_record(marker0, lgres@lgMarkerIndexes, 0) + Labels := array_append_record(label0, Labels, 0) + end if + delete(auxind) + end do + else + lgres@lgMarkerColors = Colors + lgres@lgMarkerIndexes = Markers + end if + else + lgres@lgMarkerColors = Colors + lgres@lgMarkerIndexes = Markers + Labels = rOpts@caseLabels + end if + + nModel = dimsizes(Labels) + if (isatt(rOpts, "legendWidth")) then + lgres@vpWidthF = rOpts@legendWidth + else + lgres@vpWidthF = 0.15 + end if + + if (nModel.gt.20) then + lgres@vpWidthF = lgres@vpWidthF * 0.5 + lgres@lgMarkerSizeF = lgres@lgMarkerSizeF * 0.75 + end if + + lgres@lgPerimOn = False + if (isatt(rOpts, "legendBox")) then + if (rOpts@legendBox) then + lgres@lgPerimOn = True + lgres@lgRightMarginF = 0.1 + end if + end if + + if (nModel.le.20) then + if (isatt(rOpts, "reverseOrder")) then + if (rOpts@reverseOrder) then + lgres@lgItemOrder = ispan(nModel - 1, 0, 1) + end if + end if + if (isatt(rOpts, "legendHeight")) then + lgres@vpHeightF = rOpts@legendHeight + else + lgres@vpHeightF = 0.030 * nModel + end if + lbid = gsn_create_legend(legendwks, nModel, Labels, lgres) + amres = True + if (isatt(rOpts, "legendXpos")) then + amres@amParallelPosF = rOpts@legendXpos + amres@amJust = "TopLeft" + else + amres@amParallelPosF = 0.35 + end if + if (isatt(rOpts, "legendYpos")) then + amres@amOrthogonalPosF = rOpts@legendYpos + amres@amJust = "TopLeft" + else + amres@amOrthogonalPosF = -0.35 + end if + if (extrafile) then + gsn_legend_ndc(legendwks, nModel, Labels(0:nModel - 1), \ + 0.78, 0.75, lgres) + else + annoid1 = gsn_add_annotation(taylor, lbid, amres) + end if + else + if (isatt(rOpts, "modelsperrow")) then + modelsperrow = rOpts@modelsperrow + else + modelsperrow = (nModel + 1) / 2 + end if + + do lr = 0, nModel / modelsperrow + ix0 = lr * modelsperrow + if (ix0.ge.nCase) then + break + end if + ix1 = min((/nCase - 1, ix0 + modelsperrow - 1/)) + dumdelta = ix1 - ix0 + 1 + + if (isatt(rOpts, "reverseOrder")) then + if (rOpts@reverseOrder) then + if (isatt(lgres, "lgItemOrder")) then + delete(lgres@lgItemOrder) + end if + lgres@lgItemOrder = ispan(dumdelta - 1, 0, 1) + end if + end if + + lgres@vpHeightF = 0.026 * dumdelta + + delete(lgres@lgMarkerColors) + delete(lgres@lgMarkerIndexes) + lgres@lgMarkerColors = Colors(ix0:ix1) + lgres@lgMarkerIndexes = Markers(ix0:ix1) + lgres@lgPerimOn = False + + if (extrafile) then + legend_x = 0.05 + 0.11 * lr + else + legend_x = 0.78 + 0.11 * lr + end if + + gsn_legend_ndc(legendwks, dumdelta, Labels(ix0:ix1), \ + legend_x, 0.75, lgres) + end do + end if + end if + + if (rOpts .and. isatt(rOpts, "varLabels")) then + nVar = dimsizes(rOpts@varLabels) + + if (isatt(rOpts, "varLabelsFontHeightF")) then + varLabelsFontHeightF = rOpts@varLabelsFontHeightF + else + varLabelsFontHeightF = 0.013 + end if + + txres = True + txres@txFontHeightF = varLabelsFontHeightF + txres@txJust = "CenterLeft" + + delta_y = 0.06 + if (rOpts .and. isatt(rOpts, "varLabelsYloc")) then + ys = rOpts@varLabelsYloc + else + ys = max((/nVar * delta_y, 0.30/)) + end if + + do i = 1, nVar + if (i.eq.1) then + dum12 = new(nVar, graphic) + end if + + dum12(i - 1) = \ + gsn_add_text(wks, taylor, i + " - " + rOpts@varLabels(i - 1), \ + 0.125, ys, txres) + ys = ys - delta_y + end do + + taylor@$unique_string("dum")$ = dum12 + end if + + taylor@$unique_string("dum")$ = dum0 + taylor@$unique_string("dum")$ = dum1 + taylor@$unique_string("dum")$ = dum2 + taylor@$unique_string("dum")$ = dum5 + taylor@$unique_string("dum")$ = dum6 + taylor@$unique_string("dum")$ = dum7 + taylor@$unique_string("dum")$ = dum10 + taylor@$unique_string("dum")$ = dum11 + + if (.not.isatt(rOpts, "taylorDraw") .or. \ + (isatt(rOpts, "taylorDraw") .and. rOpts@taylorDraw)) then + draw(taylor) + end if + if (.not.isatt(rOpts, "taylorFrame") .or. \ + (isatt(rOpts, "taylorFrame") .and. rOpts@taylorFrame)) then + frame(wks) + end if + + return(taylor) + +end diff --git a/esmvaltool/diag_scripts/shared/plot/vector_scalar_map_polar.ncl b/esmvaltool/diag_scripts/shared/plot/vector_scalar_map_polar.ncl deleted file mode 100644 index c3635568c6..0000000000 --- a/esmvaltool/diag_scripts/shared/plot/vector_scalar_map_polar.ncl +++ /dev/null @@ -1,135 +0,0 @@ -; ############################################################################# -; CONTOURS AND VECTORS ON A POLARSTEREOGRAPHIC MAP # -; ############################################################################# -; Please consider using or extending existing routines before adding new ones. -; Check the header of each routine for documentation. -; -; function vector_scalar_map_polar: wrapper for -; gsn_csm_vector_scalar_map_polar -; -; ############################################################################# - -load "$diag_scripts/../interface_scripts/auxiliary.ncl" -load "$diag_scripts/../interface_scripts/logging.ncl" - -load "$diag_scripts/shared/plot/aux_plotting.ncl" - -; ############################################################################# -undef("vector_scalar_map_polar") -function vector_scalar_map_polar(wks_in[1], - sourceu, - sourcev, - sourcec, - res : logical, - varnameu[1] : string, - varnamev[1] : string, - varnamec[1] : string) -; -; Arguments -; wks : workstation, must be passed - no default used. -; sourceu : u vector data to be plotted. -; sourcev : v vector data to be plotted. -; sourcec : contour data to be plotted. -; res : diag_script-specific resources passed from diag_script. -; varnameu: u vector variable name in the file. -; varnamev: v vector variable name in the file. -; varnamec: contour vector variable name in the file. -; -; Source prototype -; source[*,*] -; source!0 = lat -; source!1 = lon -; -; Return value -; A graphic variable. -; -; Description -; Wrapper for gsn_csm_vector_scalar_map_polar (http://www.ncl.ucar.edu/ -; Document/Graphics/Interfaces/gsn_csm_vector_scalar_map_polar.shtml) -; Combines local resources and accepts diag_script-specific resources -; Creates plot, according to wks & res -; -; Caveats -; Selection of defaults for res almost arbitrary -; Please check results of all scripts that use this routine if modifying -; the defaults! -; -; Modification history -; 20141016-A_vanu_be: adapted from contour_map_polar.ncl. -; -local areaplot, datac, datau, datav, funcname, scriptname, sourcec, sourceu, \ - sourcev, res, loc_res, varnamec, varnameu, varnamev, wks_in -begin - - funcname = "vector_scalar_map_polar" - scriptname = "diag_scripts/shared/plot/vector_scalar_map_polar.ncl" - enter_msg(scriptname, funcname) - - ; Get data, either directly or via netCDF file - if (typeof(sourceu) .eq. "string") then - datau = ncdf_read(sourceu, varnameu) - else - datau = sourceu - copy_VarMeta(sourceu, datau) - end if - if (typeof(sourcev) .eq. "string") then - datav = ncdf_read(sourcev, varnamev) - else - datav = sourcev - copy_VarMeta(sourcev, datav) - end if - if (typeof(sourcec) .eq. "string") then - datac = ncdf_read(sourcec, varnamec) - else - datac = sourcec - copy_VarMeta(sourcec, datac) - end if - - ; additional plot resources to be added to res from diag_script - loc_res = True - loc_res = res - loc_res@gsnDraw = False - loc_res@gsnFrame = False ; Don't advance frame. - loc_res@gsnLeftStringOrthogonalPosF = 0.1 ; shift the string up a bit - loc_res@cnFillOn = True ; Turn on color - loc_res@lbLabelBarOn = False ; Turn on later in panel - loc_res@vcVectorDrawOrder = "PostDraw" ; draw vectors last - - ; specify the hemisphere - if (diag_script_info@region .eq. "Antarctic") then - loc_res@gsnPolar = "SH" - if (isatt(diag_script_info, "max_lat")) then - loc_res@mpMaxLatF = diag_script_info@max_lat - else - loc_res@mpMaxLatF = 0.0 - end if - else if (diag_script_info@region .eq. "Arctic") - loc_res@gsnPolarNH = True - if (isatt(diag_script_info, "min_lat")) then - loc_res@mpMinLatF = diag_script_info@min_lat - else - loc_res@mpMinLatF = 0.0 - end if - else - error_msg("f", "vector_scalar_map_polar.ncl", funcname, \ - "no (valid) region selection: " + \ - diag_script_info@region) - end if - end if - - ; Draw plot - areaplot = \ - gsn_csm_vector_scalar_map_polar(wks_in, datau, datav, datac, loc_res) - - ; outfile name - if (isatt(wks_in, "fullname")) then - outfile = wks_in@fullname - else - outfile = wks_in@name - end if - log_info(" Wrote " + outfile) - - leave_msg(scriptname, funcname) - return(areaplot) - -end diff --git a/esmvaltool/diag_scripts/shared/plot/xy_line.ncl b/esmvaltool/diag_scripts/shared/plot/xy_line.ncl index 31e159195c..e6dee6be4c 100644 --- a/esmvaltool/diag_scripts/shared/plot/xy_line.ncl +++ b/esmvaltool/diag_scripts/shared/plot/xy_line.ncl @@ -11,9 +11,11 @@ ; function aerosol_sizedist ; procedure xy_line ; procedure xy_line_anom +; procedure xy_line_collect ; function timeseries_station ; function cycle_plot ; function errorbar_plot +; function evolution_base_plot ; ; ############################################################################# @@ -59,7 +61,7 @@ function profile_plev(wks_in[1], ; modifying the defaults! ; ; Modification history -; 20140214-A_gott_kl: written. +; 20140214-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, diag_script, data, var, font, font_size_fac, \ i_ref, FuncCode, i_N, i_mean, i_stddev, i_min, i_max, i_5, i_25, \ @@ -519,10 +521,10 @@ function aerosol_profile(wks_in[1], ; Caveats ; ; Modification history -; 20161013_A_righ_ma: moved size distributions to a separate routine. -; 20140917-A_righ_ma: renamed to aerosol_profile and extended for plotting -; size distributions. -; 20140705-A_righ_ma: written. +; 20161013-righi_mattia: moved size distributions to a separate routine. +; 20140917-righi_mattia: renamed to aerosol_profile and extended for +; plotting size distributions. +; 20140705-righi_mattia: written. ; local funcname, scriptname, tmp, colors, annots, res, res_new begin @@ -1026,7 +1028,7 @@ function aerosol_sizedist(wks_in[1], ; Caveats ; ; Modification history -; 20161013-A_righ_ma: written based on aerosol_profile. +; 20161013-righi_mattia: written based on aerosol_profile. ; local funcname, scriptname, tmp, colors, annots, res, res_new begin @@ -1490,19 +1492,19 @@ procedure xy_line(wks[1], ; Defines default ressources, which are overridden by argument res. ; Creates an xy-plot, according to wks & res. ; Adds multi model mean and standard deviation if -; diag_script_info@multi_model_mean is set to "y". +; diag_script_info@multi_model_mean is set to True. ; ; Caveats ; ; Modification history -; 20150511_A_senf_da: modified legend -; 20140109-A_senf_da: written. +; 20150511-senftleben_daniel: modified legend. +; 20140109-senftleben_daniel: written. ; local funcname, scriptname, res, res_in, res_stddev, source, source_x, \ source_stddev, wks, wks_in, colors, colors_mm, dashes, dashes_mm, thicks, \ thicks_mm, annots, annots_mm, avgstd, avgstd_mm, temp, plot, \ shading_plot, mm, lgres, nitems, lbid, amres, annoid, labels, lg_outfile, \ - psres, vpx, vph, vpy, vpw, bpres, tmborder + psres, vpx, vph, vpy, vpw, bpres, tmborder, items_mmm, mmm_var begin funcname = "xy_line" @@ -1517,36 +1519,18 @@ begin annots = project_style(items, diag_script_info, "annots") avgstd = project_style(items, diag_script_info, "avgstd") - ; FIX-ME: no longer needed, as multi-model is now a dataset (?) ; Select colors and other plotting attributes for multi-model mean - ; if (diag_script_info@multi_model_mean .eq. "y") then - ; ; Project_style evaluates metadata of variable "input_file_info" - ; temp = input_file_info - - ; ; -> keep original "input_file_info" in "temp" and restore later - ; copy_VarMeta(input_file_info, temp) - ; delete(input_file_info) - - ; ; Use "input_file_info" to pass on attribute names - ; input_file_info = getvaratts(temp) - ; do i = 0, dimsizes(input_file_info) - 1 - ; ; Define all original attributes again, but empty - ; input_file_info@$input_file_info(i)$ = "" - ; end do - ; input_file_info@dataset = "model_mean" - - ; ; See ./diag_scripts/shared/plot/style.ncl - ; ; FIX-ME: project_style function has changed - ; ; colors_mmm = project_style(diag_script_info, "colors") - ; ; dashes_mmm = project_style(diag_script_info, "dashes") - ; ; thicks_mmm = project_style(diag_script_info, "thicks") - ; ; annots_mmm = project_style(diag_script_info, "annots") - ; ; avgstd_mmm = project_style(diag_script_info, "avgstd") - ; delete(input_file_info) - ; input_file_info = temp ; restore original "input_file_info" - ; copy_VarMeta(temp, input_file_info) - ; delete(temp) - ; end if + if (diag_script_info@multi_model_mean) then + items_mmm = NewList("fifo") + mmm_var = True + mmm_var@dataset = "MultiModelMean" + ListPush(items_mmm, mmm_var) + colors_mmm = project_style(items_mmm, diag_script_info, "colors") + dashes_mmm = project_style(items_mmm, diag_script_info, "dashes") + thicks_mmm = project_style(items_mmm, diag_script_info, "thicks") + annots_mmm = project_style(items_mmm, diag_script_info, "annots") + avgstd_mmm = project_style(items_mmm, diag_script_info, "avgstd") + end if ; Set/copy resources res = True @@ -1567,60 +1551,41 @@ begin ; Plot plot = gsn_csm_xy(wks, source_x, source, res) - ; ; Add multi model mean and stddev - ; if (diag_script_info@multi_model_mean .eq. "y") then - ; ; Stddev - ; res_stddev = True - ; copy_VarMeta(res, res_stddev) - ; res_stddev@gsnXYFillColors = "LightGrey" - ; delete(res_stddev@xyLineColors) - - ; ; We don't want the line, so make it transparent. - ; res_stddev@xyLineColor = -1 - ; shading_plot = gsn_csm_xy(wks, source_x, source_stddev(2:3, :), \ - ; res_stddev) - ; overlay(plot, shading_plot) - ; ; MMM - ; delete([/res@xyLineThicknesses, res@xyLineColors, res@xyDashPatterns/]) - ; res@xyLineThicknesses = thicks_mmm - ; res@xyLineColors = colors_mmm - ; res@xyDashPatterns = dashes_mmm - ; mmm = gsn_csm_xy(wks, source_x, source_stddev(0, :), res) - ; overlay(plot, mmm) - ; end if - - ; Resources for a customized legend. - lgres = True - lgres@lgMonoLineThickness = False - lgres@lgLabelFontHeightF = .08 ; legend label font thickness - lgres@vpWidthF = 0.15 ; width of legend (NDC) - lgres@vpHeightF = 0.4 ; height of legend (NDC) - lgres@lgPerimOn = True - lgres@lgPerimColor = 0 - lgres@lgMonoDashIndex = False - lgres@lgBoxBackground = 0 - lgres@lgPerimFill = 0 - lgres@lgPerimFillColor = 0 - if (.not.isatt(diag_script_info, "EMs_in_lg")) then - diag_script_info@EMs_in_lg = True ; Set default - end if - if (.not.diag_script_info@EMs_in_lg) then - idcs_modelnames = annots - colors := colors(idcs_modelnames) - dashes := dashes(idcs_modelnames) - thicks := thicks(idcs_modelnames) - annots := input_file_info@dataset(idcs_modelnames) - end if - ; if (diag_script_info@multi_model_mean .eq. "y") then - ; lgres@lgLineColors = array_append_record(colors, colors_mmm, 0) - ; lgres@lgDashIndexes = array_append_record(dashes, dashes_mmm, 0) - ; lgres@lgLineThicknesses = \ - ; array_append_record(thicks, thicks_mmm, 0) + 0.5 - ; labels = array_append_record(annots, annots_mmm, 0) - ; else + ; Add multi model mean and stddev + if (diag_script_info@multi_model_mean) then + ; Stddev + res_stddev = True + copy_VarMeta(res, res_stddev) + res_stddev@gsnXYFillColors = "LightGrey" + delete(res_stddev@xyLineColors) + res_stddev@xyLineColor = -1 ; Make lines transparent + shading_plot = gsn_csm_xy(wks, source_x, source_stddev(2:3, :), \ + res_stddev) + overlay(plot, shading_plot) + ; MMM + delete([/res@xyLineThicknesses, res@xyLineColors, res@xyDashPatterns/]) + res@xyLineThicknesses = thicks_mmm + res@xyLineColors = colors_mmm + res@xyDashPatterns = dashes_mmm + mmm = gsn_csm_xy(wks, source_x, source_stddev(0, :), res) + overlay(plot, mmm) + end if + + ; *********************************************** + ; legend resources + ; *********************************************** + + lgres = True + lgres@lgItemType = "Lines" ; show lines only (default) + lgres@lgLabelFontHeightF = .05 ; set the legend label font thickness + lgres@vpWidthF = 0.15 ; width of legend (NDC) + lgres@vpHeightF = 0.5 ; height of legend (NDC) + lgres@lgPerimColor = "gray" ; draw the box perimeter in orange + lgres@lgPerimThicknessF = 1.0 ; thicken the box perimeter + lgres@lgLineColors = colors lgres@lgDashIndexes = dashes - lgres@lgLineThicknesses = thicks + 0.5 + lgres@lgLineThicknesses = thicks + 1 labels = annots nitems = dimsizes(labels) lgres@lgItemOrder = ispan(nitems - 1, 0, 1) @@ -1628,97 +1593,153 @@ begin ; Create legend lbid = gsn_create_legend(wks, nitems, labels, lgres) - ; Add legend at the correct position - ; Point (0, 0) is the dead center of the plot. Point (0, .5) is center, - ; flush bottom. Point (0.5, 0.5) is flush bottom, flush right. - amres = True - amres@amJust = "TopRight" ; reference corner of box - amres@amParallelPosF = 0.5 ; Move legend to +right, -left - amres@amOrthogonalPosF = -0.5 ; +down, -up - - ; Draw and frame - psres = True - psres@gsnDraw = False - psres@gsnFrame = False - maximize_output(wks, psres) - - ; Get plot size - getvalues plot - "vpXF" : vpx - "vpYF" : vpy - "vpWidthF" : vpw - "vpHeightF" : vph - end getvalues - bres = True - bres@gsnDraw = False - bres@gsnFrame = False - bres@tmXBOn = False - bres@tmYLOn = False - bres@tmXTOn = False - bres@tmYROn = False - bres@tmXBBorderOn = False - bres@tmXTBorderOn = False - bres@tmYLBorderOn = False - bres@tmYRBorderOn = False - - ; Create a blank plot with the same size as plot, attach legend - bres@vpXF = vpx - bres@vpYF = vpy - bres@vpWidthF = vpw - bres@vpHeightF = vph - blank_plot = gsn_csm_blank_plot(wks, bres) - - ; Add legend to plot - if (isatt(diag_script_info, "xy_line_legend")) then - if (diag_script_info@xy_line_legend .ne. False) then - annoid = gsn_add_annotation(blank_plot, lbid, amres) - end if - else - annoid = gsn_add_annotation(blank_plot, lbid, amres) - end if - - ; Put legend into an extra file - if (isatt(source, "legend_outside")) then - if (source@legend_outside) then - lg_outfile = wks@legendfile - styles = True - styles@colors = colors - styles@dashes = dashes - styles@thicks = thicks - create_legend_lines(annots, styles, lg_outfile, "lines") - end if - end if - - ; Create another blank plot to make sure plot border thickness is even - bres@tmXBBorderOn = True - bres@tmXTBorderOn = True - bres@tmYLBorderOn = True - bres@tmYRBorderOn = True - bres@tmBorderThicknessF = 3 - blank_plot2 = gsn_csm_blank_plot(wks, bres) - - ; Draw first plot with the actual values (+ grid lines if - ; tmXMajorGrid/gsnYRefLine are set) - draw(plot) - - ; Draw second plot with legend on top of previous plot. This - ; is, as far as I know, the only way to draw the legend on top - ; of the grid lines - draw(blank_plot) + amres = True + amres@amParallelPosF = 0.81 + amres@amOrthogonalPosF = 0.0 + annoid1 = gsn_add_annotation(plot, lbid, amres) - ; Redraw plot borders since the legend may (partially) cover some - ; of the borders drawn in the first 'plot' - draw(blank_plot2) + resP = True + resP@gsnMaximize = True + resP@gsnPaperOrientation = "portrait" + resP@gsnPaperMargin = 0.8 - frame(wks) + gsn_panel(wks, plot, (/1, 1/), resP) - ; outfile name - if (isatt(wks, "fullname")) then - outfile = wks@fullname - else - outfile = wks@name - end if - log_info(" Wrote " + outfile) +; ; Resources for a customized legend. +; lgres = True +; lgres@lgMonoLineThickness = False +; lgres@lgLabelFontHeightF = .08 ; legend label font thickness +; lgres@vpWidthF = 0.15 ; width of legend (NDC) +; lgres@vpHeightF = 0.4 ; height of legend (NDC) +; lgres@lgPerimOn = True +; lgres@lgPerimColor = 0 +; lgres@lgMonoDashIndex = False +; lgres@lgBoxBackground = 0 +; lgres@lgPerimFill = 0 +; lgres@lgPerimFillColor = 0 +; if (.not.isatt(diag_script_info, "EMs_in_lg")) then +; diag_script_info@EMs_in_lg = True ; Set default +; end if +; if (.not.diag_script_info@EMs_in_lg) then +; datasets = metadata_att_as_array(items, "dataset") +; idcs_modelnames = UNIQ(datasets) +; colors := colors(idcs_modelnames) +; dashes := dashes(idcs_modelnames) +; thicks := thicks(idcs_modelnames) +; annots := datasets(idcs_modelnames) +; end if +; if (diag_script_info@multi_model_mean) then +; lgres@lgLineColors = array_append_record(colors, colors_mmm, 0) +; lgres@lgDashIndexes = array_append_record(dashes, dashes_mmm, 0) +; lgres@lgLineThicknesses = \ +; array_append_record(thicks, thicks_mmm, 0) + 0.5 +; labels = array_append_record(annots, annots_mmm, 0) +; else +; lgres@lgLineColors = colors +; lgres@lgDashIndexes = dashes +; lgres@lgLineThicknesses = thicks + 0.5 +; labels = annots +; end if +; nitems = dimsizes(labels) +; lgres@lgItemOrder = ispan(nitems - 1, 0, 1) +; +; ; Create legend +; lbid = gsn_create_legend(wks, nitems, labels, lgres) +; +; ; Add legend at the correct position +; ; Point (0, 0) is the dead center of the plot. Point (0, .5) is center, +; ; flush bottom. Point (0.5, 0.5) is flush bottom, flush right. +; amres = True +; amres@amJust = "TopRight" ; reference corner of box +; ;amres@amParallelPosF = 0.5 ; Move legend to +right, -left +; ;amres@amOrthogonalPosF = -0.5 ; +down, -up +; amres@amParallelPosF = 0.81 +; amres@amOrthogonalPosF = 0.0 +; +; +; ; Draw and frame +; psres = True +; psres@gsnDraw = False +; psres@gsnFrame = False +; maximize_output(wks, psres) +; +; ; Get plot size +; getvalues plot +; "vpXF" : vpx +; "vpYF" : vpy +; "vpWidthF" : vpw +; "vpHeightF" : vph +; end getvalues +; bres = True +; bres@gsnDraw = False +; bres@gsnFrame = False +; bres@tmXBOn = False +; bres@tmYLOn = False +; bres@tmXTOn = False +; bres@tmYROn = False +; bres@tmXBBorderOn = False +; bres@tmXTBorderOn = False +; bres@tmYLBorderOn = False +; bres@tmYRBorderOn = False +; +; ; Create a blank plot with the same size as plot, attach legend +; bres@vpXF = vpx +; bres@vpYF = vpy +; bres@vpWidthF = vpw +; bres@vpHeightF = vph +; blank_plot = gsn_csm_blank_plot(wks, bres) +; +; ; Add legend to plot +; if (isatt(diag_script_info, "xy_line_legend")) then +; if (diag_script_info@xy_line_legend .ne. False) then +; annoid = gsn_add_annotation(blank_plot, lbid, amres) +; end if +; else +; annoid = gsn_add_annotation(blank_plot, lbid, amres) +; end if +; +; ; Put legend into an extra file +; if (isatt(source, "legend_outside")) then +; if (source@legend_outside) then +; lg_outfile = wks@legendfile +; styles = True +; styles@colors = colors +; styles@dashes = dashes +; styles@thicks = thicks +; create_legend_lines(annots, styles, lg_outfile, "lines") +; end if +; end if +; +; ; Create another blank plot to make sure plot border thickness is even +; bres@tmXBBorderOn = True +; bres@tmXTBorderOn = True +; bres@tmYLBorderOn = True +; bres@tmYRBorderOn = True +; bres@tmBorderThicknessF = 3 +; blank_plot2 = gsn_csm_blank_plot(wks, bres) +; +; ; Draw first plot with the actual values (+ grid lines if +; ; tmXMajorGrid/gsnYRefLine are set) +; draw(plot) +; +; ; Draw second plot with legend on top of previous plot. This +; ; is, as far as I know, the only way to draw the legend on top +; ; of the grid lines +; draw(blank_plot) +; +; ; Redraw plot borders since the legend may (partially) cover some +; ; of the borders drawn in the first 'plot' +; draw(blank_plot2) +; +; frame(wks) +; +; ; outfile name +; if (isatt(wks, "fullname")) then +; outfile = wks@fullname +; else +; outfile = wks@name +; end if +; log_info(" Wrote " + outfile) leave_msg(scriptname, funcname) end @@ -1754,12 +1775,12 @@ procedure xy_line_anom(wks[1], ; Defines default ressources, which are overridden by argument res. ; Creates an xy-plot, according to wks & res. ; Adds multi model mean and standard deviation if -; diag_script_info@multi_model_mean is set to "y". +; diag_script_info@multi_model_mean is set to True. ; ; Caveats ; ; Modification history -; 20160822_A_bock_li: written +; 20160822-bock_lisa: written ; local funcname, scriptname, verbosity, res, res_in, res_stddev, source, \ @@ -1771,7 +1792,7 @@ local funcname, scriptname, verbosity, res, res_in, res_stddev, source, \ begin funcname = "xy_line_anom" - scriptname = "plot_scripts/ncl/xy_line.ncl" + scriptname = "diag_scripts/shared/plot/xy_line.ncl" enter_msg(scriptname, funcname) ; Select colors and other plotting attributes @@ -1782,29 +1803,18 @@ begin annots = project_style(items, diag_script_info, "annots") avgstd = project_style(items, diag_script_info, "avgstd") - ; individual case for HadCRUT4 observations - ; FIX-ME: mean value comes from climatology file (absolute.nc). - ; There are no missing values as in the anomaly data. - datasetnames = metadata_att_as_array(items, "dataset") - if (any(datasetnames.eq."HadCRUT4-clim")) then - ind_wo_clim = ind(datasetnames .ne. "HadCRUT4-clim") - tmp1 = colors(ind_wo_clim) - delete(colors) - colors = tmp1 - tmp2 = dashes(ind_wo_clim) - delete(dashes) - dashes = tmp2 - tmp3 = thicks(ind_wo_clim) - delete(thicks) - thicks = tmp3 - tmp4 = annots(ind_wo_clim) - delete(annots) - annots = tmp4 - tmp5 = avgstd(ind_wo_clim) - delete(avgstd) - avgstd = tmp5 - delete([/tmp1, tmp2, tmp3, tmp4, tmp5/]) - end if + do i = 0, dimsizes(annots)-1 + tmp := str_split(annots(i), "_") + annots(i) = tmp(0) + end do + labels = get_unique_values(annots) + ind_end = ind(labels .eq. source@ref .or. \ + labels .eq. "MultiModelMean") + labels_old = labels + labels := labels(sort_alphabetically(labels, ind_end, "end")) + label_ind = get1Dindex(annots, labels) + labels_lb := labels_old(sort_alphabetically(labels_old, ind_end, "begin")) + label_ind_lb = get1Dindex(annots, labels_lb) ; ************************************************ ; plotting parameters @@ -1844,9 +1854,9 @@ begin res0@tmXBMinorLengthF = 0.003 res0@tmYLMinorLengthF = 0.003 - res0@xyDashPatterns = dashes - res0@xyLineThicknesses = thicks - res0@xyLineColors = colors + res0@xyDashPatterns = dashes(label_ind) + res0@xyLineThicknesses = thicks(label_ind) + res0@xyLineColors = colors(label_ind) copy_VarMeta(res0_in, res0) ; copy passed resources @@ -1875,53 +1885,117 @@ begin res@trYMinF = min(source) - 0.05 * (max(source) - min(source)) res@trYMaxF = max(source) + 0.05 * (max(source) - min(source)) res@tiYAxisOn = True - res@tiXAxisString = "Year" + res@tiXAxisString = "" res@gsnStringFontHeightF = 0.016 - res@xyDashPatterns = dashes - res@xyLineThicknesses = thicks ; make 2nd lines thicker - res@xyLineColors = colors ; change line color + res@xyDashPatterns = dashes(label_ind) + res@xyLineThicknesses = thicks(label_ind) ; make 2nd lines thicker + res@xyLineColors = colors(label_ind) ; change line color copy_VarMeta(res_in, res) ; copy passed resources - ; *************************************** - ; panel first two plots - ; *************************************** - - plot(0) = gsn_csm_xy(wks, source_x, source, res) ; create plot - - ; Add right panel with mean values in anomaly plot - - var = fspan(0., 2., 3) - mean = new((/dimsizes(source_mean), 3/), double) - mean(:, 0) = source_mean(:) - mean(:, 1) = source_mean(:) - mean(:, 2) = source_mean(:) - - plot2 = gsn_csm_xy(wks, var, mean, res0) ; create plot + plot(0) = gsn_csm_xy(wks, source_x, source(label_ind, :), res) ; create plot ; --------------------------------------------------------------------- - ; Procedure to attach a box to the given plot, given the lower left - ; corner, width, color, and opacity. + ; grey shading of reference period ; --------------------------------------------------------------------- gsres = True - gsres@gsFillColor = "yellow" + gsres@gsFillColor = "Grey70" gsres@gsFillOpacityF = 0.1 xbox = (/ref_start, ref_end, ref_end, ref_start, ref_start/) ybox = (/res@trYMinF, res@trYMinF, res@trYMaxF, res@trYMaxF, res@trYMinF/) newplot00 = gsn_add_polygon(wks, plot(0), xbox, ybox, gsres) - ; Draw some lines to create a legend + txtres = True + txtres@txFont = "helvetica-bold" + txtres@txFontColor = "Grey40" + txtres@txFontHeightF = 0.013 + ref_txt = gsn_add_text(wks, plot, "reference period", \ + 0.5*(ref_start + ref_end), res@trYMaxF - 0.15, txtres) + + ; --------------------------------------------------------------------- + ; add lines and names for volcanic eruptions + ; --------------------------------------------------------------------- + if (diag_script_info@volcanoes .eq. True) then + vres = True + vres@gsLineColor = "grey20" + vres@gsLineThicknessF = 1. + vres@gsLineDashPattern = 2. + txres = True + txres@txAngleF = 90 + txres@txFont = "helvetica-bold" + txres@txFontColor = "grey20" + txres@txFontHeightF = 0.013 + + yy = (/res@trYMinF, res@trYMaxF/) + + vol_name1 = gsn_add_text(wks, plot, "Krakatoa", 1885, res@trYMaxF - 0.6, \ + txres) + xx = (/1883.0, 1883.0/) + vol1 = gsn_add_polyline(wks, plot(0), xx, yy, vres) + vol_name2 = gsn_add_text(wks, plot, "Santa Maria", 1904, \ + res@trYMaxF - 0.4, txres) + xx = (/1902.0, 1902.0/) + vol2 = gsn_add_polyline(wks, plot(0), xx, yy, vres) + vol_name3 = gsn_add_text(wks, plot, "Agung", 1965, res@trYMaxF - 0.4, \ + txres) + xx = (/1963.0, 1963.0/) + vol3 = gsn_add_polyline(wks, plot(0), xx, yy, vres) + vol_name4 = gsn_add_text(wks, plot, "El Chichon", 1984, \ + res@trYMaxF - 0.4, txres) + xx = (/1982.0, 1982.0/) + vol4 = gsn_add_polyline(wks, plot(0), xx, yy, vres) + vol_name5 = gsn_add_text(wks, plot, "Pinatubo", 1993, res@trYMaxF - 0.4, \ + txres) + xx = (/1991.0, 1991.0/) + vol5 = gsn_add_polyline(wks, plot(0), xx, yy, vres) + end if + + ; --------------------------------------------------------------------- + ; add line at 0.0 + ; --------------------------------------------------------------------- res_lines = True ; polyline mods desired res_lines@tfPolyDrawOrder = "Predraw" - res_lines@gsLineColor = "grey" ; line color + res_lines@gsLineColor = "grey20" ; line color res_lines@gsLineThicknessF = 1. ; line thicker res_lines@gsLineDashPattern = 1. ; dash pattern xx = (/res@trXMinF, res@trXMaxF/) yy = (/0.0, 0.0/) dum0 = gsn_add_polyline(wks, plot(0), xx, yy, res_lines) - newplot = gsn_attach_plots(plot(0), plot2, res, res0) + + ; --------------------------------------------------------------------- + ; Add right panel with mean values in anomaly plot + ; --------------------------------------------------------------------- + if (diag_script_info@ref_value) then + var = fspan(0., 2., 3) + mean = new((/dimsizes(source_mean), 3/), double) + mean(:, 0) = source_mean(:) + mean(:, 1) = source_mean(:) + mean(:, 2) = source_mean(:) + + plot2 = gsn_csm_xy(wks, var, mean(label_ind, :), res0) ; create plot + + getvalues plot2 + "trXMinF" : xmin + "trYMinF" : ymin + "trXMaxF" : xmax + "trYMaxF" : ymax + end getvalues + + xbox1 = (/xmin, xmax, xmax, xmin, xmin/) + ybox1 = (/ymin, ymin, ymax, ymax, ymin/) + + gnres = True + gnres@gsFillColor = "Gray70" + gnres@gsFillOpacityF = 0.1 + gnres@tfPolyDrawOrder = "PreDraw" + + plot@$unique_string("box")$ = gsn_add_polygon(wks, plot2, xbox1, \ + ybox1, gnres) + + newplot = gsn_attach_plots(plot(0), plot2, res, res0) + end if ; *********************************************** ; legend resources @@ -1929,37 +2003,286 @@ begin lgres = True lgres@lgItemType = "Lines" ; show lines only (default) - lgres@lgLabelFontHeightF = .05 ; set the legend label font thickness - lgres@vpWidthF = 0.15 ; width of legend (NDC) - lgres@vpHeightF = 0.5 ; height of legend (NDC) + lgres@lgLabelFontHeightF = .04 ; set the legend label font thickness + lgres@vpWidthF = 0.3 ; width of legend (NDC) + lgres@vpHeightF = 1. ; height of legend (NDC) lgres@lgPerimColor = "gray" ; draw the box perimeter in orange lgres@lgPerimThicknessF = 1.0 ; thicken the box perimeter - if (.not.isatt(diag_script_info, "EMs_in_lg")) then - diag_script_info@EMs_in_lg = True ; Set default - end if - if (.not.diag_script_info@EMs_in_lg) then - idcs_modelnames = annots - colors := colors(idcs_modelnames) - dashes := dashes(idcs_modelnames) - thicks := thicks(idcs_modelnames) - annots := models@name(idcs_modelnames) - end if - lgres@lgLineColors = colors - lgres@lgDashIndexes = dashes - lgres@lgLineThicknesses = thicks + 0.5 - labels = annots + lgres@lgLineColors = colors(label_ind_lb) + lgres@lgDashIndexes = dashes(label_ind_lb) + lgres@lgLineThicknessF = 3 nitems = dimsizes(labels) lgres@lgItemOrder = ispan(nitems - 1, 0, 1) ; Create legend - lbid = gsn_create_legend(wks, nitems, labels, lgres) + lbid = gsn_create_legend(wks, nitems, labels_lb, lgres) amres = True - amres@amParallelPosF = 0.81 - amres@amOrthogonalPosF = 0.0 + amres@amParallelPosF = 0.9 + amres@amOrthogonalPosF = 0.5 annoid1 = gsn_add_annotation(plot(0), lbid, amres) + resP = True + resP@gsnMaximize = True + + gsn_panel(wks, plot, (/1, 1/), resP) + + leave_msg(scriptname, funcname) + +end + +; ############################################################################# +undef("xy_line_collect") +procedure xy_line_collect(wks[1], + source_mean, + source, + source_stderr, + source_x, + source_stat, + ref_start, + ref_end, + res_in: logical, + res0_in: logical, + items: list) +; +; Arguments: +; wks: workstation, must be passed - no default used yet! +; source_mean: source_mean +; source: data to be plotted (no netCDF input possible yet) +; source_stderr: standard error of reference +; source_x: x-axis of array to be plotted (e.g. source&time, ... ) +; source_stat: statistics of multi_model_mean, e.g. 5 and 95% quantile +; or stddev +; ref_start: start year of the reference dataset +; ref_end: end year of the reference dataset +; res_in: diag_script-specific resources passed from diag_script +; res0_in: res0_in +; items: list of input_file_info metadata +; +; Source prototype +; +; Description +; Defines default ressources, which are overridden by argument res. +; Creates an xy-plot, according to wks & res. +; Adds multi model mean and standard deviation if +; diag_script_info@multi_model_mean is set to "y". +; +; Caveats +; +; Modification history +; 20160822_A_bock_li: written +; + +local funcname, scriptname, verbosity, res, res_in, res_stat, source, \ + source_x, source_stat, wks, wks_in, colors, colors_mm, dashes, \ + dashes_mm, thicks, thicks_mm, annots, annots_mm, avgstd, avgstd_mm, temp, \ + plot, shading_plot, mm, lgres, nitems, lbid, amres, annoid, labels, \ + psres, vpx, vph, vpy, vpw, bpres, tmborder + +begin + + funcname = "xy_line_collect" + scriptname = "plot_scripts/ncl/xy_line.ncl" + enter_msg(scriptname, funcname) + + ; Select colors and other plotting attributes + ; (see ./diag_scripts/lib/ncl/style.ncl) + colors = project_style(items, diag_script_info, "colors") + dashes = project_style(items, diag_script_info, "dashes") + thicks = project_style(items, diag_script_info, "thicks") + annots = project_style(items, diag_script_info, "annots") + avgstd = project_style(items, diag_script_info, "avgstd") + + ; ************************************************ + ; plotting parameters + ; ************************************************ + + plot = new(1, graphic) + + res = True + res@gsnDraw = False + res@gsnFrame = False + + res@vpXF = 0.05 + res@vpYF = 0.7 + res@vpHeightF = 0.4 + res@vpWidthF = 0.7 + res@pmLegendDisplayMode = "Never" + res@tmYRLabelFontHeightF = 0.016 + res@tmYLLabelFontHeightF = 0.016 + res@tiXAxisFontHeightF = 0.016 + res@tiYAxisFontHeightF = 0.016 + res@tmXBMajorOutwardLengthF = 0.006 + res@tmYLMajorOutwardLengthF = 0.006 + res@tmXBMinorOutwardLengthF = 0.003 + res@tmYLMinorOutwardLengthF = 0.003 + res@tmXBMajorLengthF = 0.006 + res@tmYLMajorLengthF = 0.006 + res@tmXBMinorLengthF = 0.003 + res@tmYLMinorLengthF = 0.003 + + if (isatt(diag_script_info, "ref")) then + min_tmp = (/min(source_mean), min(source), min(source_stderr), \ + min(source_stat)/) + max_tmp = (/max(source_mean), max(source), max(source_stderr), \ + max(source_stat)/) + else + min_tmp = (/min(source_mean), min(source_stat)/) + max_tmp = (/max(source_mean), max(source_stat)/) + end if + res@trYMinF = min(min_tmp) - 0.05 * (max(max_tmp) - min(min_tmp)) + res@trYMaxF = max(max_tmp) + 0.05 * (max(max_tmp) - min(min_tmp)) + res@tiYAxisOn = True + res@tiXAxisString = "" + res@gsnStringFontHeightF = 0.016 + + copy_VarMeta(res_in, res) ; copy passed resources + + ; --------------------------------------------------------------------- + ; add multi model mean of different experiments + ; --------------------------------------------------------------------- + ; number of different experiments + tmp = dimsizes(source_mean) + nexp = tmp(0) + delete(tmp) + + cmap = read_colormap_file("$diag_scripts/shared/plot/rgb/cmip_line.rgb") + res@xyDashPatterns = (/0, 0, 0, 0, 0, 0/) + res@xyLineThicknesses = (/5, 5, 5, 5, 5, 5/) + res@xyLineColors = cmap(:, :) + + if (nexp .gt. 6) then + error_msg("w", scriptname, funcname, "Color palette not defined for " \ + + "more than " + nexp + " experiments") + end if + + res@pmLegendDisplayMode = "Always" + res@xyExplicitLegendLabels = source_mean&experiment + res@lgBoxMinorExtentF = 0.2 ; Shorten the legend lines + + plot(0) = gsn_csm_xy(wks, source_x, source_mean, res) ; create plot + + res@pmLegendDisplayMode = "Never" + + ; --------------------------------------------------------------------- + ; Add multi model statistics (5% and 95% quantile) + ; --------------------------------------------------------------------- + if (isatt(diag_script_info, "stat_shading")) then + if (diag_script_info@stat_shading .ne. False) then + res_stat = True + res_stat@gsnXYFillOpacities = 0.2 + cmap = read_colormap_file("$diag_scripts/shared/plot/rgb/" + \ + "cmip_shading.rgb") + copy_VarMeta(res, res_stat) + res_stat@gsnXYFillColors = cmap(0, :) + delete(res_stat@xyLineColors) + res_stat@xyLineColor = cmap(0, :) + delete(res_stat@xyLineThicknesses) + res_stat@xyLineThicknesses = (/1, 1/) + shading_plot = gsn_csm_xy(wks, source_x, source_stat(0:1, 0, :), \ + res_stat) + overlay(plot(0), shading_plot) + if(nexp .ge. 2) then + res_stat@gsnXYFillColors = cmap(1, :) + res_stat@xyLineColor = cmap(1, :) + shading_plot = gsn_csm_xy(wks, source_x, source_stat(0:1, 1, :), \ + res_stat) + overlay(plot(0), shading_plot) + end if + if(nexp .ge. 3) then + res_stat@gsnXYFillColors = cmap(2, :) + res_stat@xyLineColor = cmap(2, :) + shading_plot = gsn_csm_xy(wks, source_x, source_stat(0:1, 2, :), \ + res_stat) + overlay(plot(0), shading_plot) + end if + if(nexp .ge. 4) then + res_stat@gsnXYFillColors = cmap(3, :) + res_stat@xyLineColor = cmap(3, :) + shading_plot = gsn_csm_xy(wks, source_x, source_stat(0:1, 3, :), \ + res_stat) + overlay(plot(0), shading_plot) + end if + if(nexp .gt. 4) then + error_msg("w", scriptname, funcname, "Color palette not defined for " \ + + nexp + " experiments") + end if + end if + end if + + ; --------------------------------------------------------------------- + ; add reference datasets + ; --------------------------------------------------------------------- + if (isatt(diag_script_info, "ref")) then + delete(res@xyDashPatterns) + delete(res@xyLineThicknesses) + delete(res@xyLineColors) + res@xyDashPatterns = dashes + res@xyLineThicknesses = (/5/) + res@xyLineColors = colors ; change line color + + ref_p = gsn_csm_xy(wks, source_x, source, res) ; create plot + overlay(plot(0), ref_p) + end if + + ; --------------------------------------------------------------------- + ; add standard error of reference datasets + ; --------------------------------------------------------------------- + if (isatt(diag_script_info, "ref_stderr")) then + if (diag_script_info@ref_stderr .ne. False) then + res_stderr = True + res_stderr@gsnXYFillOpacities = 0.2 + copy_VarMeta(res, res_stderr) + res_stderr@gsnXYFillColors = "grey" + delete(res_stderr@xyLineColors) + delete(res_stderr@xyLineThicknesses) + ; We don't want the line, so make it transparent. + res_stderr@xyLineColor = "grey" + res_stderr@xyLineThicknesses = (/1, 1/) + shading_plot = gsn_csm_xy(wks, source_x, source_stderr(:, :), \ + res_stderr) + overlay(plot(0), shading_plot) + end if + end if + + if (diag_script_info@ts_anomaly .eq. "anom" .and. \ + diag_script_info@ref_shading) then + + ; --------------------------------------------------------------------- + ; yellow shading of reference period + ; --------------------------------------------------------------------- + gsres = True + gsres@gsFillColor = "Grey70" + gsres@gsFillOpacityF = 0.1 + xbox = (/ref_start, ref_end, ref_end, ref_start, ref_start/) + ybox = (/res@trYMinF, res@trYMinF, res@trYMaxF, res@trYMaxF, res@trYMinF/) + newplot00 = gsn_add_polygon(wks, plot(0), xbox, ybox, gsres) + + txtres = True + txtres@txFont = "helvetica-bold" + txtres@txFontColor = "Grey40" + txtres@txFontHeightF = 0.013 + ref_txt = gsn_add_text(wks, plot, "reference period", \ + 0.5*(ref_start + ref_end), \ + res@trYMinF + 0.05 * (res@trYMaxF - res@trYMinF), \ + txtres) + + end if + + ; --------------------------------------------------------------------- + ; Draw some lines to create a legend + ; --------------------------------------------------------------------- + res_lines = True ; polyline mods desired + res_lines@tfPolyDrawOrder = "Predraw" + res_lines@gsLineColor = "grey" ; line color + res_lines@gsLineThicknessF = 1. ; line thicker + res_lines@gsLineDashPattern = 1. ; dash pattern + + xx = (/res@trXMinF, res@trXMaxF/) + yy = (/0.0, 0.0/) + dum0 = gsn_add_polyline(wks, plot(0), xx, yy, res_lines) + resP = True resP@gsnMaximize = True resP@gsnPaperOrientation = "portrait" @@ -2001,7 +2324,7 @@ function timeseries_station(wks_in[1], ; Selection of defaults for res almost arbitrary. ; ; Modification history: -; 20140325_righ_ma: written. +; 20140325-righi_mattia: written. ; local funcname, scriptname begin @@ -2162,7 +2485,7 @@ function cycle_plot(wks_in[1], ; Caveats ; ; Modification history -; 20131206-A_fran_fr: written. +; 20131206-winterstein_franziska: written. ; local funcname, scriptname, wks_out, wks_in, data, source, res, atts, base, \ varname @@ -2367,12 +2690,12 @@ function errorbar_plot(wks_in[1], ; source(0, :) = mean ; source(1, :) = standard deviation ; source!0 = statistic -; source!1 = model +; source!1 = dataset ; ; Caveats ; ; Modification history -; 20151105-A_righ_ma: written. +; 20151105-righi_mattia: written. ; local funcname, scriptname, data, defaults, var, wks, res, ren_new, resL, \ error_bar @@ -2397,6 +2720,7 @@ begin diag_script = att2var(data, "diag_script") else var = varname + diag_script = DIAG_SCRIPT end if ; Check if a valid wks has been provided, otherwise invoke default @@ -2420,14 +2744,14 @@ begin res@tiMainFontHeightF = 0.025 res@tmXBMode = "Explicit" res@trXMinF = -1 - res@trXMaxF = dimsizes(data&model) - res@tmXBValues = ispan(0, dimsizes(data&model) - 1, 1) + res@trXMaxF = dimsizes(data&dataset) + res@tmXBValues = ispan(0, dimsizes(data&dataset) - 1, 1) res@trYMinF = min(data(0, :) - data(1, :) / 2.) - res@trYMinF = where(res@trXMinF.lt.0, 1.1 * res@trYMinF, 0.9 * res@trYMinF) + res@trYMinF = where(res@trYMinF.lt.0, 1.1 * res@trYMinF, 0.9 * res@trYMinF) res@trYMaxF = max(data(0, :) + data(1, :) / 2.) - res@trYMaxF = where(res@trXMaxF.gt.0, 1.1 * res@trYMaxF, 0.9 * res@trYMaxF) + res@trYMaxF = where(res@trYMaxF.gt.0, 1.1 * res@trYMaxF, 0.9 * res@trYMaxF) res@tiMainString = varname + " - " + diag_script_info@region - res@tmXBLabels = data&model + res@tmXBLabels = data&dataset res@tmXBLabelAngleF = 45. res@tmXBLabelJust = "CenterRight" @@ -2441,8 +2765,8 @@ begin ; Draw errorbars resL = True resL@gsLineThicknessF = 1. - error_bar = new(dimsizes(data&model), graphic) - do imod = 0, dimsizes(data&model) - 1 + error_bar = new(dimsizes(data&dataset), graphic) + do imod = 0, dimsizes(data&dataset) - 1 error_bar(imod) = \ gsn_add_polyline(wks, plot, (/imod, imod/), \ (/data(0, imod) + data(1, imod) / 2, \ @@ -2455,3 +2779,288 @@ begin return(plot) end + +; ############################################################################# +undef("evolution_base_plot") +function evolution_base_plot(wks_in[1], + source, + varname[1] : string, + anomaly) +; +; Arguments +; wks_in: workstations (graphic object or default will be used). +; source: data to be plotted or a NetCDF filename with data. +; varname: variable name in the file. +; anomaly: anomaly plot or not - used in axis labels. +; +; Source prototype: +; source(5, *) +; source!0 = reference dataset. +; source!1 = multi model mean. +; source!2 = multi model min. +; source!3 = multi model max. +; source!4 = years. +; source!5 = multi model std. +; source@legend_outside = draw a legend within the plot or in a +; separate file. +; source@dim_Mod = number of models. +; +; Return value: +; A graphic variable. +; +; Description: +; Draw an evolution plot from yearly means, including multi-model mean +; and uncertainty. +; +; Modification history: +; 20180822-schlund_manuel: ported to v2.0. +; 20170308-gier_bettina: written. +; +local funcname, scriptname, verbosity, wks_out, wks_in, data, source, res, \ + atts, base, varname, multi_model, refmean + +begin + + funcname = "evolution_plot" + scriptname = "plot_scripts/ncl/xy_line.ncl" + enter_msg(scriptname, funcname) + + ; Get data, either directly or via netCDF file + if(typeof(source) .eq. "string") then + data = ncdf_read(source, varname) + else + data = source + copy_VarMeta(source, data) + end if + + ; Retrieve basic metadata from data + defaults = (/"default", "dummy", "dummy_for_var", "Default", "Dummy"/) + if (any(varname .eq. defaults)) then + var = att2var(data, "var") + diag_script = att2var(data, "diag_script") + else + var = varname + diag_script = DIAG_SCRIPT + end if + if (.not.isatt(diag_script_info, "styleset")) then + diag_script_info@styleset = "DEFAULT" + end if + + ; Check if a valid wks has been provided, otherwise invoke default + wks_out = get_wks(wks_in, diag_script, var) + + ; Calculation for confidence polygons + n_val = 17 + new_arr2 = new((/n_val, 2*dimsizes(data(4, :))/), float) + new_arr2!0 = "intervals" + new_arr2&intervals = fspan(0.9, 0.1, n_val) + new_arr2!1 = "dtime" + new_arr2&dtime = array_append_record(data(4, :), data(4, ::-1), 0) + do n = 0, n_val - 1 + do time_i = 0, (dimsizes(data(4, :)) - 1) + t_var = cdft_t(0.5 - new_arr2&intervals(n) / 2, (data@dim_Mod - 1)) + + ; Offset from mean, use symmetry of t-distribution + y_off = t_var * data(5, time_i) / sqrt(data@dim_Mod - 1) + new_arr2(n, time_i) = data(1, time_i) - y_off + new_arr2(n, dimsizes(new_arr2&dtime) - 1 - time_i) = \ + data(1, time_i) + y_off + end do + end do + + ; Define default plot resources + res = True + res@gsnDraw = False + res@gsnFrame = False + res@gsnMaximize = True + res@gsnPaperOrientation = "landscape" + res@xyLineThicknesses = (/2, 2/) + res@xyLineColors = (/"black", "white"/) + res@xyDashPatterns = (/0.0, 0.0/) ; make all lines solid + ; res@xyMonoDashPattern = False + ; res@xyDashPatterns = dashes + + coord_names = getvardims(data) + ; x_axis = data&$coord_names(1)$ + x_axis_num = data(4, :) + ; x_axis_num = ispan(1, dimsizes(data&$coord_names(1)$), 1) + + res@trXMinF = min(x_axis_num) + res@trXMaxF = max(x_axis_num) + ymax = max((/max(data(3, :)), max(data(0, :))/)) + ymin = min((/min(data(2, :)), min(data(0, :))/)) + res@trYMinF = ymin - 0.05 * (ymax - ymin) + res@trYMaxF = ymax + 0.15 * (ymax - ymin) + res@tiMainFontHeightF = 0.025 + res@vpHeightF = 0.4 + res@vpWidthF = 0.8 + res@tmXBMode = "Explicit" + res@tmXTOn = False + res@tiXAxisString = "Time [years]" ; x-axis should always be time + + ; Axis title + if (anomaly) then + res@tiYAxisString = varname + " Anomaly [" + data@units + "]" + else + res@tiYAxisString = varname + " [" + data@units + "]" + end if + + ; After overlaying the plots, add titles and legend manually + delete(res@xyLineColors) + res@gsnXYFillColors = "gray60" + res@xyLineColor = -1 ; We don't want the line, so make it transparent. + + ; Set contour color + res_contour = True + if isatt(data, "contour_color") then + contour_color = data@contour_color + else + var_lower = str_lower(var) + contour_color = 0 ; default: red + if (.not. ismissing(str_index_of_substr(var_lower, "fgco2", -1))) then + contour_color = 200 + end if + if (.not. ismissing(str_index_of_substr(var_lower, "nbp", -1))) then + contour_color = 100 + end if + if (.not. ismissing(str_index_of_substr(var_lower, "tas", -1))) then + contour_color = 0 + end if + if (.not. ismissing(str_index_of_substr(var_lower, "pr", -1))) then + contour_color = 230 + end if + if (.not. ismissing(str_index_of_substr(var_lower, "tos", -1))) then + contour_color = 40 + end if + end if + + ; cmap = read_colormap_file("MPL_Reds") + ; opt = True + ; opt@NumColorsInTable = n_val + 2 + ; cmap = span_named_colors((/"gray", contour_color/), opt) + ; interval_colors = cmap(2:, :) + + hsv_colors = new((/n_val, 3/), "float") + hsv_colors(:, 0) = contour_color ; hue + hsv_colors(:, 1) = fspan(0, 1, n_val) ; saturation + hsv_colors(:, 2) = 0.9 ; value + interval_colors = hsvrgb(hsv_colors) + + ; Override defaults with "res_" attributes of "data" + res_new = att2var(data, "res_") + copy_VarMeta(res_new, res) + + ; Create filled xy plot + plot = gsn_csm_xy(wks_out, x_axis_num, data(2:3, :), res) + plot@outfile = wks_out@name + + ; Create lines for Volcanic eruptions + res_lines = True ; polyline mods desired + res_lines@gsLineDashPattern = 0.0 ; solid line + res_lines@gsLineThicknessF = 5.0 ; line thicker + res_lines@gsLineColor = "gray40" ; line color + res_text = True ; text mods desired + res_text@txFontHeightF = 0.01 ; change text size + res_text@txJust = "CenterLeft" ; text justification + if (isatt(diag_script_info, "evolution_plot_volcanoes") .and. \ + diag_script_info@evolution_plot_volcanoes) then + ymax = max((/max(data(3, :)), max(data(0, :))/)) + ymin = min((/min(data(2, :)), min(data(0, :))/)) + ytop = ymax + 0.1*(ymax - ymin) + yy = (/(ymin - 0.05 * (ymax - ymin)), (ymax + 0.15*(ymax-ymin))/) + + ; Agung + xx = (/1963.0, 1963.0/) + plot@$unique_string("dum")$ = gsn_add_polyline(wks_out, plot, xx, yy, \ + res_lines) + plot@$unique_string("dum")$ = gsn_add_text(wks_out, plot, "Agung", \ + (xx(1) + 0.5), (ytop), \ + res_text) + + ; El Chichon + xx = (/1982.0, 1982.0/) + plot@$unique_string("dum")$ = gsn_add_polyline(wks_out, plot, xx, yy, \ + res_lines) + plot@$unique_string("dum")$ = gsn_add_text(wks_out, plot, \ + "El Chichon", \ + (xx(1)), (ytop), res_text) + + ; Pinatubo + xx = (/1991.0, 1991.0/) + plot@$unique_string("dum")$ = gsn_add_polyline(wks_out, plot, xx, yy, \ + res_lines) + plot@$unique_string("dum")$ = gsn_add_text(wks_out, plot, "Pinatubo", \ + (xx(1)), (ytop), res_text) + + ; El Nino + res_lines@gsLineColor = "orange" + xx = (/1998.0, 1998.0/) + plot@$unique_string("dum")$ = gsn_add_polyline(wks_out, plot, xx, yy, \ + res_lines) + plot@$unique_string("dum")$ = gsn_add_text(wks_out, plot, "El Nino", \ + (xx(1) + 0.5), (ytop), \ + res_text) + end if + + ; Add legend manually + res_lines@gsLineDashPattern = 0.0 ; solid line + res_lines@gsLineThicknessF = 5.0 ; line thicker + res_lines@gsLineColor = "gray50" ; line color + xx = (/(min(x_axis_num) + 0.01 * (max(x_axis_num) - min(x_axis_num))), \ + (min(x_axis_num) + 0.05 * (max(x_axis_num) - min(x_axis_num)))/) + yy = (/(ymax + 0.07 * (ymax - ymin)), (ymax + 0.07 * (ymax - ymin))/) + + res_lines@gsLineColor = "black" + plot@$unique_string("dum")$ = gsn_add_polyline(wks_out, plot, xx, yy, \ + res_lines) + plot@$unique_string("dum")$ = gsn_add_text(wks_out, plot, data@ref_name, \ + (xx(1) + 0.3 * (xx(1) - xx(0))), \ + yy(0), res_text) + + yy = (/ymax, ymax/) + delete(res_lines@gsLineColor) + res_lines@gsLineColor = interval_colors(n_val-1, :) + plot@$unique_string("dum")$ = gsn_add_polyline(wks_out, plot, xx, yy, \ + res_lines) + plot@$unique_string("dum")$ = gsn_add_text(wks_out, plot, data@project, \ + (xx(1) + 0.3 * (xx(1) - xx(0))), \ + yy(0), res_text) + + ; Add confidence contours as polygons + do n = 0, n_val - 1 + res_contour@gsFillColor = interval_colors(n, :) + plot@$unique_string("dum")$ = gsn_add_polygon(wks_out, plot, \ + new_arr2&dtime, \ + new_arr2(n, :), \ + res_contour) + end do + + lbres = True + lbres@vpWidthF = 0.25 ; width + lbres@vpHeightF = 0.05 ; height + lbres@lbPerimOn = False ; Turn off perimeter + lbres@lbOrientation = "Horizontal" ; Default is vertical + ; lbres@lbLabelAlignment = "ExternalEdges" ; Default is "BoxCenters" + lbres@cnLabelBarEndStyle = "IncludeMinMaxLabels" ; turn on end labels + lbres@lbFillColors = interval_colors(::-1, :) ; colors for boxes + lbres@lbMonoFillPattern = True ; fill them all solid + lbres@lbLabelFontHeightF = 0.008 ; label font height + lbres@lbTitleString = "Confidence (%)" + lbres@lbTitlePosition = "Bottom" + ; lbres@lbTitleDirection = "Across" + lbres@lbBoxLinesOn = False + lbres@lbLabelStride = 2 + + labels = tostring_with_format(fspan(10, 90, n_val), "%2.0f") + + ; Override defaults with "res_" attributes of "data" + ; lbres_new = att2var(data, "lbres_") + ; copy_VarMeta(lbres_new, lbres) + + gsn_labelbar_ndc(wks_out, n_val, labels, 0.3, 0.74, lbres) + ; drawNDCGrid(wks_out) + + leave_msg(scriptname, funcname) + return(plot) + +end diff --git a/esmvaltool/diag_scripts/shared/plot/zonalmean_profile.ncl b/esmvaltool/diag_scripts/shared/plot/zonalmean_profile.ncl index 273f0c6c17..ecfafb5755 100644 --- a/esmvaltool/diag_scripts/shared/plot/zonalmean_profile.ncl +++ b/esmvaltool/diag_scripts/shared/plot/zonalmean_profile.ncl @@ -36,7 +36,7 @@ function zonalmean_profile(wks_in[1], ; Caveats ; ; Modification history -; 20131210-A_fran_fr: written. +; 20131210-winterstein_franziska: written. ; local funcname, scriptname, wks, wks_in, data, source, res, atts, base, varname begin @@ -105,6 +105,22 @@ begin plot = gsn_csm_pres_hgt(wks, data, res) plot@outfile = wks@name + ; Add the outline of the climatological tropopause + if (isatt(data, "add_tropopause")) then + if data@add_tropopause then + xyres = True + xyres@gsLineColor = "brown" + xyres@gsLineDashPattern = 1 + xyres@gsLineThicknessF = 3. + lat = data&lat + tp := (300. - 215. * (cos(lat / 180. * get_pi("f"))) ^ 2) + str = unique_string("tropopause") + plot@$str$ = gsn_add_polyline(wks, plot, lat, tp, xyres) + delete(tp) + delete(lat) + end if + end if + leave_msg(scriptname, funcname) return(plot) diff --git a/esmvaltool/diag_scripts/shared/regridding.ncl b/esmvaltool/diag_scripts/shared/regridding.ncl index 811598ab10..5c3429f6d7 100644 --- a/esmvaltool/diag_scripts/shared/regridding.ncl +++ b/esmvaltool/diag_scripts/shared/regridding.ncl @@ -526,14 +526,14 @@ function regrid_3D_to_rectilinear_grid(data_in:numeric, ; References ; ; Modification history -; 20151026_A_righ_ma: added warning for unavailable lat/lon vertices -; in input. -; 20151023_A_righ_ma: moved to regridding.ncl and renamed -; regrid_3D_data_to_global_rectilinear_grid --> -; regrid_3D_to_rectilinear_grid. -; 20150703_A_wenz_sa: moved to anav13jclim_func.ncl and adapted to -; ESMValTool structure. -; 201505??_A_anav_al: written. +; 20151026-righi_mattia: added warning for unavailable lat/lon vertices +; in input. +; 20151023-righi_mattia: moved to regridding.ncl and renamed +; regrid_3D_data_to_global_rectilinear_grid --> +; regrid_3D_to_rectilinear_grid. +; 20150703-wenzel_sabrina: moved to anav13jclim_func.ncl and adapted to +; ESMValTool structure. +; 201505??-anav_alessandro: written. ; local funcname, scriptname, INTERP_METHOD, srcGridName, dstGridName, dimx, \ ntime, nlat, nlon, temp, Opt, sfile, var diff --git a/esmvaltool/diag_scripts/shared/scaling.ncl b/esmvaltool/diag_scripts/shared/scaling.ncl index 4800e9e02b..a2ef8f185d 100644 --- a/esmvaltool/diag_scripts/shared/scaling.ncl +++ b/esmvaltool/diag_scripts/shared/scaling.ncl @@ -37,8 +37,8 @@ function convert_units(var:numeric, ; ; References ; -; Modification history: -; 20150216-A_righ_ma: written. +; Modification history +; 20150216-righi_mattia: written. ; local funcname, scriptname begin @@ -58,6 +58,12 @@ begin units_from = var@units end if + if (units_from.eq.units_to) then + error_msg("w", scriptname, funcname, "No conversion needed from " + \ + units_from + " to " + units_to) + return(out) + end if + if (units_from.eq."1") then if (units_to.eq."g/kg") then out = out * 1000. @@ -65,12 +71,14 @@ begin leave_msg(scriptname, funcname) return(out) end if + if (units_to.eq."ppmv") then - out = out * 1.e-6 + out = out * 1.e6 out@units = units_to leave_msg(scriptname, funcname) return(out) end if + if (units_to.eq."ppm9") then out = out * 1.e-9 out@units = units_to @@ -79,6 +87,15 @@ begin end if end if + if (units_from.eq."K") then + if (any(units_to.eq.(/"degC"/))) then + out = out - 273.15 + out@units = units_to + leave_msg(scriptname, funcname) + return(out) + end if + end if + if (units_from.eq."1e-6") then if (any(units_to.eq.(/"ppm", "ppmv", "umol/mol"/))) then out@units = units_to @@ -107,7 +124,7 @@ begin end if end if - if (units_from.eq."mole mole-1") then + if (any(units_from.eq.(/"mole mole-1", "mol mol-1"/))) then if (any(units_to.eq.(/"ppm", "ppmv", "umol/mol"/))) then out = out * 1.e6 out@units = units_to @@ -135,6 +152,30 @@ begin leave_msg(scriptname, funcname) return(out) end if + if (any(units_to.eq.(/"g kg-1", "g/kg"/))) then + out = out * 1.e3 + out@units = units_to + leave_msg(scriptname, funcname) + return(out) + end if + end if + + if (units_from.eq."kg s-1") then + if (any(units_to.eq.(/"GtC"/))) then + out = out * 3600. * 24. / 1.e12 + out@units = units_to + leave_msg(scriptname, funcname) + return(out) + end if + end if + + if (units_from.eq."kg.s-1") then + if (any(units_to.eq.(/"PgC y-1", "GtC y-1"/))) then + out = out * 3600. * 24. * 365 / 1.e12 + out@units = units_to + leave_msg(scriptname, funcname) + return(out) + end if end if if (units_from.eq."kg m-2 s-1") then @@ -144,12 +185,18 @@ begin leave_msg(scriptname, funcname) return(out) end if - if (any(units_to.eq.(/"mm d-1", "mm/day"/))) then + if (any(units_to.eq.(/"mm d-1", "mm day-1", "mm/day"/))) then out = out * 24 * 3600 out@units = units_to leave_msg(scriptname, funcname) return(out) end if + if (any(units_to.eq.(/"mm y-1", "mm yr-1", "mm/yr"/))) then + out = out * 24 * 3600 * 365 + out@units = units_to + leave_msg(scriptname, funcname) + return(out) + end if end if if (units_from.eq."kg m-2") then @@ -251,5 +298,4 @@ begin error_msg("f", scriptname, funcname, "conversion from " + units_from + \ " to " + units_to + " not defined") - end diff --git a/esmvaltool/diag_scripts/shared/set_operators.ncl b/esmvaltool/diag_scripts/shared/set_operators.ncl index 7688f464cb..66d2708b47 100644 --- a/esmvaltool/diag_scripts/shared/set_operators.ncl +++ b/esmvaltool/diag_scripts/shared/set_operators.ncl @@ -35,7 +35,7 @@ function UNIQ(a) ; Reference ; ; Modification history -; 20130419-A_gott_kl: written. +; 20130419-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, a, i, u, doubles, idx begin diff --git a/esmvaltool/diag_scripts/shared/statistics.ncl b/esmvaltool/diag_scripts/shared/statistics.ncl index de08355f91..d8c67238f6 100644 --- a/esmvaltool/diag_scripts/shared/statistics.ncl +++ b/esmvaltool/diag_scripts/shared/statistics.ncl @@ -18,6 +18,8 @@ ; function normalize_metric ; function distrib_stats ; function lognormal_dist +; function filter121 +; function get_average ; ; ############################################################################# @@ -55,9 +57,9 @@ function dim_stddev_wgt_Wrap(field[*]:numeric, ; en.wikipedia.org/wiki/Weighted_arithmetic_mean#Weighted_sample_variance ; ; Modification history -; 20150511-A_laue_ax: modified routine "calculate_metric": -; added "no weigth" (nowgt) option -; 20141215-A_righ_ma: written. +; 20150511-lauer_axel: modified routine "calculate_metric": +; added "no weigth" (nowgt) option +; 20141215-righi_mattia: written. ; local funcname, scriptname, wavg, wgt, v1, v2, d2, arg begin @@ -122,8 +124,6 @@ function time_operations(field:numeric, ; "seasonalclim": seasonal climatology for the standard seasons DJF, ; MAM, JJA, SON. ; "monthlyclim": monthly climatology jan-dec. -; For monthly input data only! Apply mymm first, if necessary. -; "mymm": multi year monthly mean ; "yearly": time average over every year in [y1:y2]. ; [month strings]: climatology of selected (consecutive) months ; (e.g., "MAM", "SONDJ"). @@ -142,8 +142,7 @@ function time_operations(field:numeric, ; ; Caveats ; The weighted standard deviation is not yet implmented for all cases -; The weighted standard deviation is calculated using the unbiased -; estimator, c +; The weighted standard deviation is calculated using the unbiased estimator ; This should take into account missing values and exclude the w_i for ; which the field contains only missing values. This feature is not ; implemented yet. @@ -151,9 +150,12 @@ function time_operations(field:numeric, ; References ; ; Modification history -; 20140703-A_gott_kl: added option "mymm". -; 20140312-A_righ_ma: extended with standard deviation. -; 20140109-A_righ_ma: written. +; 20201214-lauer_axel: bugfix time weights +; 20190503-righi_mattia: removed obsolete option "mymm" (used only in +; reformat_obs, now outdated). +; 20140703-gottschaldt_klaus-dirk: added option "mymm". +; 20140312-righi_mattia: extended with standard deviation. +; 20140109-righi_mattia: written. ; local funcname, scriptname, monthstr, date, year, month, idx1, idx2, loc_y1, \ loc_y2, rank, subfield, weights, idx, idx_win, idx_spr, idx_sum, idx_aut, \ @@ -196,6 +198,19 @@ begin date := cd_calendar(field&time, 0) year := date(:, 0) month := date(:, 1) + cal = field&time@calendar + + ; check that calendar is supported by 'days_in_month' + + valid_cal = (/"standard", "gregorian", "julian", "360_day", "360", \ + "365_day", "365", "366_day", "366", "noleap", "no_leap", \ + "allleap", "all_leap", "none"/) + + if (ismissing(ind(valid_cal .eq. cal))) then + log_info("Warning: unsupported calendar in function " + funcname + \ + " (" + scriptname + "): " + cal + ". Using 'standard' instead.") + cal = "standard" + end if ; Determine indexes for the requested time range if (y1.eq.-1) then @@ -243,7 +258,9 @@ begin ; Define weights as days-per-month if (l_wgt) then - weights = days_in_month(toint(year), toint(month)) + iyear = toint(year) + iyear@calendar = cal + weights = days_in_month(iyear, toint(month)) else weights = tofloat(subfield&time) weights = 1. @@ -470,147 +487,6 @@ begin return(out) end if - ; Multi year monthly mean - ; Output 12 months for each year that occurs in variable "year". - ; Months without an input value are set to missing. - if (opt.eq."mymm") then - - ; Concatenate year and month of input field - ym_in = 100 * toint(year) + toint(month) - - ; Init target field - years = ispan(toint(min(year)), toint(max(year)), 1) - nyear = dimsizes(years) - ym = new(nyear * 12, integer) - do yy = 0, nyear - 1 - do mm = 0, 11 ; concatenate as well, to avoid nested loop - ym((yy * 12) + mm) = 100 * years(yy) + mm + 1 - end do - end do - dims = dimsizes(subfield) - if(rank.eq.4) then - dims(0, :, :, :) = dimsizes(ym) - else if(rank.eq.3) then - dims(0, :, :) = dimsizes(ym) - else if(rank.eq.2) then - dims(0, :) = dimsizes(ym) - else if(rank.eq.1) then - dims(0) = dimsizes(ym) - else - error_msg("f", scriptname, funcname, "rank = " + tostring(rank) + \ - " not implemented for mymm in time_operations") - end if - end if - end if - end if - out = new(dims, typeof(subfield)) - copy_VarCoords_l1(subfield, out) ; auxiliary.ncl - timec = create_timec(min(years), max(years)) - out!0 = "time" - out&time = timec - - ; Determine FillValue - if(isatt(subfield, "_FillValue")) then - FillValue = subfield@_FillValue - else - FillValue = default_fillvalue(typeof(subfield)) - end if - out@_FillValue = FillValue - - ; Fill target field - if(rank.eq.4) then - do i = 0, dimsizes(ym) - 1 - index = ind(ym_in.eq.ym(i)) - if(.not.all(ismissing(index))) then - out(i, :, :, :) = \ - dim_avg_wgt_n_Wrap(subfield(index, :, :, :), weights(index), 1, 0) - if (oper.eq."stddev") then - p1 = subfield(index, :, :, :) - p2 = conform(p1, out(i, :, :, :), (/1, 2, 3/)) ^ 2 - d2 = (p1 - p2) ^ 2 - arg = dim_sum_wgt_n_Wrap(d2, weights(index), 1, 0) - v1 = sum(weights(index)) - v2 = sum(weights(index) ^ 2) - out(i) = sqrt(v1 / (v1 ^ 2 - v2) * arg) - delete([/p1, p2, d2, arg, v1, v2/]) - end if - else - out(i, :, :, :) = FillValue - end if - delete(index) - end do - else if(rank.eq.3) then - do i = 0, dimsizes(ym) - 1 - index = ind(ym_in.eq.ym(i)) - if(.not.all(ismissing(index))) then - out(i, :, :) = \ - dim_avg_wgt_n_Wrap(subfield(index, :, :), weights(index), 1, 0) - if (oper.eq."stddev") then - p1 = subfield(index, :, :) - p2 = conform(p1, out(i, :, :), (/1, 2/)) ^ 2 - d2 = (p1 - p2) ^ 2 - arg = dim_sum_wgt_n_Wrap(d2, weights(index), 1, 0) - v1 = sum(weights(index)) - v2 = sum(weights(index) ^ 2) - out(i) = sqrt(v1 / (v1 ^ 2 - v2) * arg) - delete([/p1, p2, d2, arg, v1, v2/]) - end if - else - out(i, :, :) = FillValue - end if - delete(index) - end do - else if(rank.eq.2) then - do i = 0, dimsizes(ym) - 1 - index = ind(ym_in.eq.ym(i)) - if(.not.all(ismissing(index))) then - out(i, :) = \ - dim_avg_wgt_n_Wrap(subfield(index, :), weights(index), 1, 0) - if (oper.eq."stddev") then - p1 = subfield(index, :) - p2 = conform(p1, out(i, :), 1) ^ 2 - d2 = (p1 - p2) ^ 2 - arg = dim_sum_wgt_n_Wrap(d2, weights(index), 1, 0) - v1 = sum(weights(index)) - v2 = sum(weights(index) ^ 2) - out(i) = sqrt(v1 / (v1 ^ 2 - v2) * arg) - delete([/p1, p2, d2, arg, v1, v2/]) - end if - else - out(i, :) = FillValue - end if - delete(index) - end do - else if(rank.eq.1) then - do i = 0, dimsizes(ym) - 1 - index = ind(ym_in.eq.ym(i)) - if(.not.all(ismissing(index))) then - out(i) = dim_avg_wgt_Wrap(subfield(index), weights(index), 1) - if (oper.eq."stddev") then - d2 = (subfield(index) - out(i)) ^ 2 - arg = dim_sum_wgt_Wrap(d2, weights(index), 1) - v1 = sum(weights(index)) - v2 = sum(weights(index) ^ 2) - out(i) = sqrt(v1 / (v1 ^ 2 - v2) * arg) - delete([/d2, arg, v1, v2/]) - end if - else - out(i) = FillValue - end if - delete(index) - end do - else - error_msg("f", scriptname, funcname, "rank = " + tostring(rank) + \ - " not implemented for mymm in time_operations") - end if - end if - end if - end if - - leave_msg(scriptname, funcname) - return(out) - end if - ; Months string (at least 2 consecutive months): define indexes if (.not.ismissing(str_match_ind_ic(monthstr, opt)).and. \ strlen(opt).ge.2.and.strlen(opt).le.12) then @@ -1012,7 +888,7 @@ function coswgt_areaave(field:numeric) ; References ; ; Modification history -; 20131209-A_eval_ma: written. +; 20131209-evaldsson_martin: written. ; local funcname, scriptname, lat, wgt_lat, lon, lon_size, wgt_lon, ave begin @@ -1056,8 +932,8 @@ function coswgt_arearmse(field1:numeric, ; ; References ; -; Modification history: -; 20131209-A_eval_ma: written. +; Modification history +; 20131209-evaldsson_martin: written. ; local funcname, scriptname, lat, wgt_lat, lon, lon_size, wgt_lon, rmse, \ local_field1, local_field2 @@ -1112,8 +988,8 @@ function coswgt_pattern_cor(field1:numeric, ; ; References ; -; Modification history: -; 20140115-A_eval_ma: written. +; Modification history +; 20140115-evaldsson_martin: written. ; local funcname, scriptname, lat, wgt_lat, lon, lon_size, wgt_lon, \ pattern_correlation, local_field1, local_field2 @@ -1190,8 +1066,8 @@ function interannual_variability(field: numeric, ; Reference ; ; Modification history -; 20181022-A_lore_ru: added option dtr for possible detrending of data v2 -; 20140314-A_righ_ma: written. +; 20181022-lorenz_ruth: added option dtr for possible detrending of data v2 +; 20140314-righi_mattia: written. ; local funcname, scriptname, monthstr, rank, field_avg, field_djf, field_mam, \ field_jja, field_son, field_avg_djf, field_avg_mam, field_avg_jja, \ @@ -1402,9 +1278,10 @@ function calculate_metric(var:numeric, ; dimensionality. ; ; Modification history -; 20140313-A_righ_ma: implemented weights calculation within the function, -; depending on dimensionality. -; 20140120-A_fran_fr: written. +; 20190312-A_bock_lisa: added calculation of RMSDxy +; 20140313-righi_mattia: implemented weights calculation within the +; function, depending on dimensionality. +; 20140120-winterstein_franziska: written. ; local funcname, scriptname, dims_var, dims_ref, ii, dim_names, mdays, sdays, \ weights, var1d, ref1d, wgt1d, avg_var, avg_ref, p1, p2, p3, var3d, wgt3d, \ @@ -1470,15 +1347,37 @@ begin time_weights = sdays end if - if (dim_names(1).eq."lat" .and. dim_names(2).eq."lon") then - area_weights = map_area(var&lat, var&lon) + ; Annual-mean time-series + if (dim_names(0).eq."year") then + time_weights = new(dims_var(0), float) + time_weights = 1. end if - if (isdefined("time_weights").and.isdefined("area_weights")) then - weights = new(dimsizes(var), float) - do ii = 0, dimsizes(time_weights) - 1 - weights(ii, :, :) = time_weights(ii) * area_weights - end do + if (metric .eq. "RMSDxy") then + weights = time_weights + else + if (dim_names(1).eq."lat" .and. dim_names(2).eq."lon") then + area_weights = map_area(var&lat, var&lon) + end if + + if (dim_names(1).eq."plev" .and. dim_names(2).eq."lat") then + areas = map_area(ref&lat, (/1.0, 2.0/)) + nlev = dimsizes(ref&plev) + ptop = ref&plev(nlev - 1) - \ + 0.5 * (ref&plev(nlev - 2) - ref&plev(nlev - 1)) + delta_p = dpres_plevel(ref&plev, 101325., ptop, 0) + area_weights = new((/nlev, dimsizes(ref&lat)/), float) + wdims = dimsizes(area_weights) + area_weights = conform_dims(wdims, delta_p, 0) * \ + conform_dims(wdims, areas(:, 0), 1) + end if + + if (isdefined("time_weights").and.isdefined("area_weights")) then + weights = new(dimsizes(var), float) + do ii = 0, dimsizes(time_weights) - 1 + weights(ii, :, :) = time_weights(ii) * area_weights + end do + end if end if end if @@ -1498,9 +1397,12 @@ begin wgt1d = 1.0 end if - ; Calculate weighted averages - avg_var = dim_avg_wgt_Wrap(var1d, wgt1d, 1) - avg_ref = dim_avg_wgt_Wrap(ref1d, wgt1d, 1) + if (isStrSubset(metric, "stddev_ratio") .or. \ + isStrSubset(metric, "correlation")) then + ; Calculate weighted averages + avg_var = dim_avg_wgt_Wrap(var1d, wgt1d, 1) + avg_ref = dim_avg_wgt_Wrap(ref1d, wgt1d, 1) + end if ; RMSD if (metric.eq."RMSD") then @@ -1509,6 +1411,13 @@ begin return(out) end if + ; RMSD + if (metric.eq."RMSDxy") then + out = sqrt(dim_avg_wgt_n_Wrap((var - ref) ^ 2, weights, 1, 0)) + leave_msg(scriptname, funcname) + return(out) + end if + ; BIAS if (metric.eq."BIAS") then out = dim_avg_wgt_Wrap((var1d - ref1d), wgt1d, 1) @@ -1534,6 +1443,61 @@ begin return(out) end if + ; Single Model Performance Index + if (metric.eq."SMPI") then + nyears = dimsizes(var&year) + out = new(diag_script_info@smpi_n_bootstrap + 1, float) + do ibootstrap = 0, diag_script_info@smpi_n_bootstrap + if (ibootstrap.eq.0) then + bootvect = ispan(0, nyears - 1, 1) + else + icnt = 0 + do while (icnt .le. 10) + bootvect = generate_sample_indices(nyears, 1) + icnt = icnt + 1 + if (.not.all(bootvect(:).eq.bootvect(0))) then + break + end if + end do + if (all(bootvect(:).eq.bootvect(0))) then + error_msg("f", scriptname, funcname, \ + "Number of years too small for bootstrapping. Abort.") + end if + end if + obs = ref(bootvect, :, :) + mod1D = ndtooned(dim_avg_n(var, 0)) + ref1D = ndtooned(dim_avg_n(obs, 0)) + sig1D = ndtooned(dim_stddev_n_Wrap(obs, 0)) + sig1D@_FillValue = default_fillvalue(typeof(sig1D)) + sig1D = where(sig1D.eq.0, sig1D@_FillValue, sig1D) + + delete(weights) + delete(wgt1d) + if (isdim(obs, "lon").and.isdim(obs, "lat")) then + weights = map_area(obs&lat, obs&lon) + elseif (isdim(obs, "plev").and.isdim(obs, "lat")) then + areas = map_area(obs&lat, (/1.0, 2.0/)) + nlev = dimsizes(obs&plev) + ptop = \ + obs&plev(nlev - 1) - 0.5 * (obs&plev(nlev - 2) - obs&plev(nlev - 1)) + delta_p = dpres_plevel(obs&plev, 101325., ptop, 0) + weights = new((/dimsizes(obs&plev), dimsizes(obs&lat)/), float) + wdims = dimsizes(weights) + weights = \ + conform_dims(wdims, delta_p, 0) * conform_dims(wdims, areas(:, 0), 1) + else + error_msg("f", diag_script, "", "Unknown dimensions in variable obs.") + end if + + wgt1d = ndtooned(weights) + out(ibootstrap) = \ + dim_avg_wgt_Wrap((mod1D - ref1D) ^ 2 / sig1D ^ 2, wgt1d, 1) + + end do + leave_msg(scriptname, funcname) + return(out) + end if + error_msg("f", scriptname, funcname, "metric " + metric + " not available") end @@ -1566,8 +1530,8 @@ function normalize_metric(var:numeric, ; Reference ; ; Modification history -; 20140609-A_righ_ma: absolute value added to "mean" normalization. -; 20140120-A_fran_fr: written. +; 20140609-righi_mattia: absolute value added to "mean" normalization. +; 20140120-winterstein_franziska: written. ; local funcname, scriptname, val_var, norm, stdv, p1 begin @@ -1644,7 +1608,7 @@ function distrib_stats(var[*]:numeric, ; Reference ; ; Modification history -; 20140526-A_righ_ma: written. +; 20140526-righi_mattia: written. ; local funcname, scriptname, lvar, nsize, idx begin @@ -1750,7 +1714,7 @@ function lognormal_dist(nn:numeric, ; New York, US, 1998. ; ; Modification history -; 20130528-A_righ_ma: written. +; 20130528-righi_mattia: written. ; local funcname, scriptname, pi, sqrt2pi, dd, ee begin @@ -1798,3 +1762,129 @@ begin return(fout) end + +; ############################################################################# +undef("filter121") +function filter121(var:numeric, \ + iter:numeric) +; +; Arguments +; var: a 1-D array. +; iter: number of iterations. +; +; Return value +; Array of size and type of var. +; +; Description +; Smoothes a time series by 1-2-1 filter in iter time steps. +; +; Modification history +; 20180807-schlund_manuel: ported to v2.0 +; 20140721-wenzel_sabrina: written. +; +local yy_sm, y_sm0, n_max +begin + + funcname = "filter121" + scriptname = "diag_scripts/shared/statistics.ncl" + enter_msg(scriptname, funcname) + + rank = dimsizes(dimsizes(var)) + n_max = dimsizes(var) + + if (rank .eq. 1) then + yy_sm = var + yy_sm0 = yy_sm + + ; Iteration + do ia = 0, iter + yy_sm0 = yy_sm + do it = 1, n_max - 2 + yy_sm(it) = (yy_sm0(it - 1) + 2.0 * yy_sm0(it) + yy_sm0(it + 1)) / 4.0 + end do + end do + out = yy_sm + delete([/yy_sm, yy_sm0/]) + else + error_msg("f", scriptname, funcname, "smooting is currently not " + \ + "implemented for more than 1 dimension") + end if + + leave_msg(scriptname, funcname) + return(out) +end + +; ############################################################################# +undef("get_average") +function get_average(items: list) + +; +; Arguments +; items: list of input_file_info metadata +; +; Description +; Calculates average over M different datasets given by items. All given +; datasets must be of the same shape (X1, X2, X3, ..., Xn). This function +; creates an intermediate array of shape (M, X1, X2, X3, ..., Xn) with +; 'dataset' as first dimension. Then, the unweighted mean over this first +; dimension is returned. +; +; Caveats +; +; References +; +; Modification history +; 20181217-schlund_manuel: written +; +local funcname, scriptname, dim_dat, dim_array, rank, data_array, data +begin + + funcname = "get_average" + scriptname = "diag_scripts/shared/statistics.ncl" + enter_msg(scriptname, funcname) + + ; Check input list + dim_dat = ListCount(items) + if (dim_dat .lt. 1) then + error_msg("f", scriptname, funcname, "expected at least one dataset, " + \ + "got empty list") + end if + + ; Collect data + do idat = 0, dim_dat - 1 + data = read_data(items[idat]) + + ; Create array at first iteration + if (.not. isvar("data_array")) then + dim_array = dimsizes(data) + rank = dimsizes(dim_array) + data_array = new(array_append_record(dim_dat, dim_array, 0), \ + typeof(data)) + end if + if (rank .eq. 4) then + copy_VarCoords(data, data_array(0, :, :, :, :)) + data_array(idat, :, :, :, :) = (/data/) + elseif (rank .eq. 3) then + copy_VarCoords(data, data_array(0, :, :, :)) + data_array(idat, :, :, :) = (/data/) + elseif (rank .eq. 2) then + copy_VarCoords(data, data_array(0, :, :)) + data_array(idat, :, :) = (/data/) + elseif (rank .eq. 1) then + copy_VarCoords(data, data_array(0, :)) + data_array(idat, :) = (/data/) + else + error_msg("f", scriptname, funcname, "unsupported rank " + rank) + end if + end do + + ; Average + data_array!0 = "dataset" + data_array := dim_avg_n_Wrap(data_array, 0) + copy_VarMeta(data, data_array) + log_info("Averaged over " + dim_dat + " dataset(s)") + + leave_msg(scriptname, funcname) + return(data_array) + +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/calc_IAV_hatching.ncl b/esmvaltool/diag_scripts/tebaldi21esd/calc_IAV_hatching.ncl new file mode 100644 index 0000000000..f8f0d83511 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/calc_IAV_hatching.ncl @@ -0,0 +1,414 @@ +; ############################################################################# +; INTERANNUAL VARIABILITY MULTI-MODEL MEAN FOR STIPPLING SIGNIFICANCE IPCCCH12 +; Author: Ruth Lorenz (ETH Zurich, Switzerland) +; CRESCENDO project +; ############################################################################# +; +; Description +; Calculate Interannual variability from piControl runs for plotting +; significance with stippling and hatching, save for each model +; (iavmode: "each") or multi-model mean only. +; Can either be calculated over full time period of piControl run +; (no periodlength given) +; or first averaged annually/seasonally over periodlength and then standard +; deviation calculated based on averaged periods. +; +; Required diag_script_info attributes (diagnostics specific) +; time_avg: time period to calculate IAV, e.g. annualclim, seasonalclim +; +; Optional diag_script_info attributes (diagnostic specific) +; iavmode: calculate multi-model mean of IAV over all models or calculate +; and save IAV for each individual model? (mmm, each) +; periodlength: length of periods to calculate IAV accross, depends on +; period lengths used in +; ch12_calc_map_diff_mmm_stippandhatch.ncl +; if not given whole time period calculated at once +; +; Caveats +; Needs lots of memory for 3D ocean variables +; +; Modification history +; 20161219-A_lorenz_ruth: remove seasonal cycle before std if seasonal +; 20161024-A_lorenz_ruth: adapted to ESMValTool +; 20130501-A_sedlacek_jan: written for IPCC AR5 as get_natvar.ncl. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" +load "$diag_scripts/shared/statistics.ncl" + +begin + ; ############################################################## + ; # Fetch general parameters, set in namelist_collins13ipcc.xml# + ; # passed via environment variables by python code # + ; ############################################################## + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check required diag_script_info attributes + req_atts = (/"time_avg"/) + exit_if_missing_atts(diag_script_info, req_atts) +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) +end + +begin + ; ############################################# + ; # Get parameters from ./variable_defs/*.ncl # + ; # passed via the 'info' attribute # + ; ############################################# + if (isvar("MyParam")) then + delete(MyParam) + end if + if (isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if +end + +begin + ; ########################################### + ; # Get data and average time # + ; ########################################### + ; get data from first dataset + imod = 0 ; NCL array indicies start from zero + log_debug("processing " + info_items[imod]@dataset + "_" \ + + info_items[imod]@exp + "_" \ + + info_items[imod]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[imod]) + + ; Check dimensions + dims = getvardims(A0) + ndim = dimsizes(dims) + + if (ndim .lt. 3) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 3 or 4") + end if + idx = ind(dims .eq. "lat" .or. dims .eq. "rlat" .or. dims .eq. "j") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + lat_name = dims(idx) + + idx = ind(dims .eq. "lon" .or. dims .eq. "rlon" .or. dims .eq. "i") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + lon_name = dims(idx) + + if (ndim .gt. 3) then + idx = ind(dims .eq. "lev" .or. dims .eq. "plev") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no level dimension") + end if + lev_name = dims(idx) + end if + + index = ispan(0, dim_MOD - 1, 1) + if (diag_script_info@time_avg .eq. "seasonalclim") then + dim_seas = 4 + diag_script_info@seasons = (/0, 1, 2, 3/) + else + dim_seas = 1 + diag_script_info@seasons = (/0/) + end if + + Fill = default_fillvalue(typeof(A0)) + if (ndim .eq. 3) then + data1 = new((/dim_MOD, dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(A0), Fill) + if ((.not. isatt(diag_script_info, "iavmode")) .or. \ + (diag_script_info@iavmode .ne. "each")) then + IAV_mmm = new((/dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(A0), Fill) + end if + elseif (ndim .eq. 4) then + data1 = new((/dim_MOD, dim_seas, dimsizes(A0&$lev_name$), \ + dimsizes(A0&lat), dimsizes(A0&lon)/), \ + typeof(A0), Fill) + if ((.not. isatt(diag_script_info, "iavmode")) .or. \ + (diag_script_info@iavmode .ne. "each")) then + IAV_mmm = new((/dim_seas, dimsizes(A0&$lev_name$), \ + dimsizes(A0&lat)/), \ + typeof(A0), Fill) + end if + end if + + do imod = 0, dim_MOD - 1 + if (imod .ne. 0) then + delete(A0) + A0 = read_data(info_items[imod]) + dims = getvardims(A0) + idx = ind(dims .eq. "lat" .or. dims .eq. "rlat" .or. dims .eq. "j") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + lat_name = dims(idx) + + idx = ind(dims .eq. "lon" .or. dims .eq. "rlon" .or. dims .eq. "i") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + lon_name = dims(idx) + + if (ndim .eq. 4) then + idx = ind(dims .eq. "lev" .or. dims .eq. "plev") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no level dimension") + end if + lev_name = dims(idx) + end if + end if + + ; Calculate IAV (function in ./diag_scripts/lib/ncl/statistics.ncl) + ; cut the first 100 yr + time = A0&time + start_yr = cd_calendar(time(0), 0) + ind_end = dimsizes(time) - 1 + end_yr = cd_calendar(time(ind_end), 0) + if ((end_yr(0, 0) - start_yr(0, 0) + 1) .lt. 500) then + log_info("warning: Control run is less than 500 years.") + end if + new_start_yr = toint((start_yr(0, 0) + 100)) + end_yr_int = toint(end_yr(0, 0)) + if (isatt(diag_script_info, "periodlength")) then + length_of_period = toint(diag_script_info@periodlength) + nr_periods = toint(floor((dimsizes(time) - 12 * 100) / \ + (length_of_period * 12.))) + if (nr_periods .lt. 1) then + error_msg("w", DIAG_SCRIPT, "", "time range too short, " + \ + "less than 1 period covered, continue with " + \ + "next model") + continue + end if + yr_possible = (dimsizes(time) - 12 * 100) / (length_of_period * 12.) + rest = yr_possible - nr_periods + start_yrs = ispan(new_start_yr, end_yr_int, length_of_period) + if (ndim .eq. 3) then + data_tmp = new((/nr_periods, dim_seas, dimsizes(A0&$lat_name$), \ + dimsizes(A0&$lon_name$)/), typeof(A0), Fill) + elseif (ndim .eq. 4) then + data_tmp = new((/nr_periods, dim_seas, dimsizes(A0&$lev_name$), \ + dimsizes(A0&$lat_name$), dimsizes(A0&$lon_name$)/), \ + typeof(A0), Fill) + end if + + do per = 0, nr_periods - 1 + if ((rest .gt. 0) .and. (per .eq. nr_periods - 1)) then + new_end_yr = end_yr_int + else + new_end_yr = toint(start_yrs(per) + length_of_period - 1.0) + end if + ; calculate seasonal/annual averages over periods + if ((dim_seas .eq. 1) .and. \ + (diag_script_info@time_avg .eq. "annualclim")) then + if (ndim .eq. 3) then + data_tmp(per, 0, :, :) = \ + time_operations(A0, start_yrs(per), new_end_yr, "average", \ + diag_script_info@time_avg, True) + elseif (ndim .eq. 4) then + data_tmp(per, 0, :, :, :) = \ + time_operations(A0, start_yrs(per), new_end_yr, "average", \ + diag_script_info@time_avg, True) + end if + else + if (ndim .eq. 3) then + data_tmp(per, :, :, :) = \ + time_operations(A0, start_yrs(per), new_end_yr, "average", \ + diag_script_info@time_avg, True) + elseif (ndim .eq. 4) then + data_tmp(per, :, :, :, :) = \ + time_operations(A0, start_yrs(per), new_end_yr, "average", \ + diag_script_info@time_avg, True) + end if + end if + end do + data_dtr = dtrend_quadratic_msg_n(data_tmp, False, False, 0) + if (typeof(data_dtr) .ne. typeof(data1)) then + if ((typeof(data_dtr) .eq. "double") .and. \ + (typeof(data1) .eq. "float")) then + tmp = data_dtr + delete(data_dtr) + data_dtr = doubletofloat(tmp) + elseif ((typeof(data_dtr) .eq. "float") .and. \ + (typeof(data1) .eq. "double")) then + tmp = data_dtr + delete(data_dtr) + data_dtr = floattodouble(tmp) + else + error_msg("f", DIAG_SCRIPT, "", "Type conversion issue, " + \ + "data_dtr has a different type than data1 which is " + \ + "neither float or double.") + end if + end if + if (ndim .eq. 3) then + data1(imod, :, :, :) = dim_stddev_n(data_dtr, 0) + elseif (ndim .eq. 4) then + data1(imod, :, :, :, :) = dim_stddev_n(data_dtr, 0) + end if + delete(start_yrs) + delete(data_tmp) + delete(data_dtr) + else + data1_tmp = interannual_variability(A0, new_start_yr, end_yr_int, \ + diag_script_info@time_avg, \ + "quadratic") + if (ndim .eq. 3) then + data1(imod, 0, :, :) = data1_tmp + elseif (ndim .eq. 4) then + data1(imod, :, :, :, :) = data1_tmp + end if + end if + delete(start_yr) + delete(end_yr) + delete(time) + + if ((isatt(diag_script_info, "iavmode")) .and. \ + (diag_script_info@iavmode .eq. "each")) then + outfile = "IAV_" + info_items[imod]@exp + "_" + \ + info_items[imod]@dataset + "_" + info_items[imod]@ensemble + "_" + \ + var0 + "_" + diag_script_info@time_avg + ".nc" + file_exist = isfilepresent(outfile) + if (file_exist .and. diag_script_info@overwrite .eq. False) then + continue + end if + if (imod .eq.0) then + if (ndim .eq. 3) then + data1!0 = "models" + data1!1 = "season" + data1!2 = "lat" + data1!3 = "lon" + data1&lon = A0&lon + elseif (ndim .eq. 4) then + data1!0 = "models" + data1!1 = "season" + data1!2 = lev_name + data1&$lev_name$ = A0&$lev_name$ + data1!3 = "lat" + data1!4 = "lon" + data1&lon = A0&lon + end if + if (diag_script_info@time_avg .eq. "seasonalclim") then + data1&season = (/ispan(0, dim_seas - 1, 1)/) + else + data1&season = (/0/) + end if + data1&lat = A0&lat + data1@diag_script = (/DIAG_SCRIPT/) + data1@var = "iav" + if (isatt(variable_info[0], "long_name")) then + data1@var_long_name = "iav of " + variable_info[0]@long_name + end if + if (isatt(variable_info[0], "units")) then + data1@units = variable_info[0]@units + end if + end if + ; ########################################### + ; # Output to netCDF # + ; ########################################### + if (dim_seas .eq. 1) then + if (ndim .eq. 3) then + write_data = data1(imod, 0, :, :) + elseif (ndim .eq. 4) then + write_data = data1(imod, 0, :, :, :) + end if + else + if (ndim .eq. 3) then + write_data = data1(imod, :, :, :) + elseif (ndim .eq. 4) then + write_data = data1(imod, :, :, :, :) + end if + end if + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(write_data, work_dir + outfile) + end if + end do ; imod + + if ((.not. isatt(diag_script_info, "iavmode")) .or. \ + (diag_script_info@iavmode .ne. "each")) then + ; ########################################### + ; # Calculate multi-model mean of IAV # + ; ########################################### + do s = 0, dim_seas - 1 + if (ndim .eq. 3) then + IAV_mmm(s, :, :) = \ + rm_single_dims(dim_avg_n(data1(:, s, :, :), 0) * sqrt(2.)) + elseif (ndim .eq. 4) then + ; Calculate the zonal average + tmp_zon = dim_avg_n_Wrap(data1, 4) ; data1(mon, seas, lev, lat, lon) + ; Calculate multi-model mean + IAV_mmm(s, :, :) = \ + rm_single_dims(dim_avg_n(tmp_zon(:, s, :, :), 0) * sqrt(2.)) + end if + end do + + if (ndim .eq. 3) then + IAV_mmm!0 = "season" + IAV_mmm!1 = "lat" + IAV_mmm!2 = "lon" + IAV_mmm&lon = A0&lon + elseif (ndim .eq. 4) then + IAV_mmm!0 = "season" + IAV_mmm!1 = lev_name + IAV_mmm&$lev_name$ = A0&$lev_name$ + IAV_mmm!2 = "lat" + end if + if (isatt(diag_script_info, "seasons")) then + IAV_mmm&season = (/ispan(0, dim_seas - 1, 1)/) + else + IAV_mmm&season = (/0/) + end if + IAV_mmm&lat = A0&lat + + ; ########################################### + ; # Output to netCDF # + ; ########################################### + outfile = "IAV_mmm_piControl_" + \ + var0 + "_" + diag_script_info@time_avg + ".nc" + + IAV_mmm@diag_script = (/DIAG_SCRIPT/) + IAV_mmm@var = "iav" + if (isatt(variable_info[0], "long_name")) then + IAV_mmm@var_long_name = "iav of " + variable_info[0]@long_name + end if + if (isatt(variable_info[0], "units")) then + IAV_mmm@units = variable_info[0]@units + end if + IAV_mmm@comment = metadata_att_as_array(info_items, "dataset") + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(IAV_mmm, work_dir + outfile) + end if + + ; collect meta-data + nc_file = ncdf_outfile + caption = "Inter-annual variability based on piControl runs." + statistics = ("var") + domains = ("global") + plot_types = ("other") + authors = (/"lorenz_ruth"/) + references = (/"collins13ipcc"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, "n/a", caption, statistics, domains, \ + plot_types, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/calc_cmip6_and_cmip5_pattern_diff_scaleT.ncl b/esmvaltool/diag_scripts/tebaldi21esd/calc_cmip6_and_cmip5_pattern_diff_scaleT.ncl new file mode 100644 index 0000000000..95efa06703 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/calc_cmip6_and_cmip5_pattern_diff_scaleT.ncl @@ -0,0 +1,421 @@ +; ############################################################################# +; Computes the pattern difference between CMIP6 and CMIP5 based on the +; multi-model change between historical and all included scenarios +; scaled by global T change. +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_calc_map_diff_scaleT_mmm_stipp.ncl +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Pattern scaling. Computes the pattern difference between the CMIP6 +; multi-model mean change and the CMIP5 multi-model mean change. +; +; Required diag_script_info attributes (diagnostics specific) +; scenarios_cimp6: list with CMIP6 scenarios to be included in the +; figure, e.g (/"ssp245"/) +; scenarios_cmip5: list with CMIP5 scenarios to be included in the +; figure, e.g (/"rcp45"/) +; periods: list with start years of periods to be included +; e.g. (/"2046","2081"/) +; time_avg: list with seasons or annual to be included +; e.g (/"annualclim"/) +; +; Optional diag_script_info attributes (diagnostic specific) +; percent: 0 or 1, difference in percent = 1, default = 0 +; units: unit string in ncl formatting for legend title +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/ensemble.ncl" +load "$diag_scripts/shared/scaling.ncl" + +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + if (dim_VAR .eq. 2) then + var0 = variable_info[0]@short_name + var1 = variable_info[1]@short_name + info_items2 = select_metadata_by_name(input_file_info, var1) + else + var0 = variable_info[0]@short_name + var1 = var0 + info_items2 = info_items + end if + + if (var1 .ne. "tas") then + error_msg("f", DIAG_SCRIPT, "", "first variable must be tas " + \ + "to scale by global mean tas change") + end if + + ; Save some dataset attributes as arrays for index-based selection below + dataset_names = metadata_att_as_array(info_items, "dataset") + dataset_exps = metadata_att_as_array(info_items, "exp") + dataset_ensembles = metadata_att_as_array(info_items, "ensemble") + dataset_startyears = metadata_att_as_array(info_items, "start_year") + + if (isatt(diag_script_info, "seasons")) then + dim_seas = dimsizes(diag_script_info@seasons) + else + dim_seas = 1 + end if + + ; Check required diag_script_info attributes + req_atts = (/"scenarios_cmip5", "scenarios_cmip6", "periods", "time_avg"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_info(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data and average time # + ; ########################################### + ; get data from first model + imod = 0 ; NCL array indices start from zero + log_info("processing " + info_items[imod]@dataset + "_" \ + + info_items[imod]@exp + "_" \ + + info_items[imod]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[imod]) + Fill = default_fillvalue(typeof(A0)) ; 1e20 + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + nlat = dimsizes(A0&lat) + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + nlon = dimsizes(A0&lon) + + temp_avg = new((/dim_MOD/), typeof(A0), Fill) + temp_avg!0 = "models" + data1 = new((/dim_MOD, dim_seas, nlat, nlon/), \ + typeof(A0), Fill) + ; Average over time (function in ./diag_scripts/lib/ncl/statistics.ncl) + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + if (dim_seas .eq. 1) then + data1(imod, 0, :, :) = tmp_seas(diag_script_info@seasons, :, :) + else + data1(imod, :, :, :) = tmp_seas(diag_script_info@seasons, :, :) + end if + elseif ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .eq. 4)) then + data1(imod, :, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + else + data1(imod, 0, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + rad = (4.0 * atan(1.0) / 180.0) + do imod = 0, dim_MOD - 1 + A0_temp = read_data(info_items2[imod]) + temp_tavg = dim_avg_n_Wrap(A0_temp, 0) + latw = cos(data1&lat * rad) + temp_avg(imod) = wgt_areaave_Wrap(temp_tavg, latw, 1.0, 0) + delete(temp_tavg) + delete(latw) + if (imod .ne. 0) then + A0 = read_data(info_items[imod]) + ; Average over time, + ; function in ./diag_scripts/lib/ncl/statistics.ncl) + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + if (dim_seas .eq. 1) then + data1(imod, 0, :, :) = tmp_seas(diag_script_info@seasons, :, :) + else + data1(imod, :, :, :) = tmp_seas(diag_script_info@seasons, :, :) + end if + delete(tmp_seas) + elseif ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .eq. 4)) then + data1(imod, :, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + else + data1(imod, 0, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + end if + delete(A0_temp) + delete(A0) + end do + delete(imod) + + ; ######################################################### + ; # Calculate change from hist to periods for CMIP6 models# + ; ######################################################### + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + + ; loop over rcps and periods + dim_scen = dimsizes(diag_script_info@scenarios_cmip6) + dim_per = dimsizes(diag_script_info@periods) + + to_plot = new((/dim_per * dim_seas, nlat, nlon/), typeof(data1), Fill) + if (isatt(diag_script_info, "percent")) then + percent = diag_script_info@percent + else + percent = 0 + end if + i = 0 + do seas = 0, dim_seas - 1 ; loop over seasons + do per = 0, dim_per - 1 ; loop over periods + do rcp = 0, dim_scen - 1 ; loop over rcps + idx_rcp = ind((dataset_exps .eq. \ + diag_script_info@scenarios_cmip6(rcp)) \ + .and. (dataset_startyears .eq. \ + diag_script_info@periods(per))) + proj = data1(idx_rcp, seas, :, :) + temp_proj = temp_avg(idx_rcp) + ; Cut low values + proj = where(abs(proj) .gt. 1.e-14, proj, proj@_FillValue) + ; find historical runs from same models as in rcp + do jj = 0, dimsizes(idx_rcp) - 1 + tmp_idx = ind(dataset_names .eq. dataset_names(idx_rcp(jj)) .and. \ + dataset_exps .eq. reference_run_name .and. \ + dataset_ensembles .eq. dataset_ensembles(idx_rcp(jj))) + if (isdefined("idx_hist")) then + idx_hist := array_append_record(idx_hist, tmp_idx, 0) + else + idx_hist = tmp_idx + end if + delete(tmp_idx) + end do + + base = data1(idx_hist, seas, :, :) + temp_base = temp_avg(idx_hist) + ; Cut low values + base = where(abs(base) .gt. 1.e-14, base, base@_FillValue) + ; scale each model by global T change + log_debug("Scale each model by global T change") + dim_mod = dimsizes(idx_rcp) + if (dim_mod .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", \ + "Only one model found, multi-model needs at least two.") + end if + var_diff_scal = new((/dim_mod, nlat, nlon/), typeof(data1), Fill) + do imod = 0, dim_mod - 1 + if (percent .eq. 1) then + var_diff_scal(imod, :, :) = \ + ((100 * (proj(imod, :, :) - base(imod, :, :))) / \ + where(base(imod, :, :) .ne. 0., \ + base(imod, :, :), base@_FillValue)) / \ + (temp_proj(imod) - temp_base(imod)) + else + var_diff_scal(imod, :, :) = (proj(imod, :, :) - \ + base(imod, :, :)) / \ + (temp_proj(imod) - temp_base(imod)) + end if + end do + delete([/base, proj, temp_proj, temp_base/]) + if rcp .eq. 0 then + var_diff_scal_all_mod = var_diff_scal + else + tmp_scal_all_mod = var_diff_scal_all_mod + delete(var_diff_scal_all_mod) + var_diff_scal_all_mod = \ + array_append_record(tmp_scal_all_mod, var_diff_scal, 0) + delete(tmp_scal_all_mod) + end if + delete([/idx_hist, idx_rcp, var_diff_scal/]) + end do ; rcp + var_diff_scal_all_mod!0 = "models" + var_diff_scal_all_mod!1 = "lat" + var_diff_scal_all_mod!2 = "lon" + var_diff_scal_all_mod&lat = data1&lat + var_diff_scal_all_mod&lon = data1&lon + ; average over rcps + log_debug("Average over models and rcps") + to_plot(i, :, :) = dim_avg_n_Wrap(var_diff_scal_all_mod, 0) + i = i + 1 + delete([/var_diff_scal_all_mod/]) + end do ; per + end do ; seas + to_plot!0 = "panel" + to_plot&panel = diag_script_info@label + to_plot!1 = "lat" + to_plot&lat = data1&lat + to_plot!2 = "lon" + to_plot&lon = data1&lon + to_plot@units = variable_info[0]@units + if (percent .eq. 1) then + to_plot@units = "%" + end if + + ; ######################################################### + ; # Calculate change from hist to periods for CMIP5 models# + ; ######################################################### + ; loop over rcps and periods + dim_scen_cmip5 = dimsizes(diag_script_info@scenarios_cmip5) + dim_per = dimsizes(diag_script_info@periods) + + to_plot_cmip5 = new((/dim_per * dim_seas, nlat, nlon/), typeof(data1), Fill) + if (isatt(diag_script_info, "percent")) then + percent = diag_script_info@percent + else + percent = 0 + end if + i = 0 + do seas = 0, dim_seas - 1 ; loop over seasons + do per = 0, dim_per - 1 ; loop over periods + do rcp = 0, dim_scen_cmip5 - 1 ; loop over rcps + idx_rcp = ind((dataset_exps .eq. \ + diag_script_info@scenarios_cmip5(rcp)) \ + .and. (dataset_startyears .eq. \ + diag_script_info@periods(per))) + proj = data1(idx_rcp, seas, :, :) + temp_proj = temp_avg(idx_rcp) + ; Cut low values + proj = where(abs(proj) .gt. 1.e-14, proj, proj@_FillValue) + ; find historical runs from same models as in rcp + do jj = 0, dimsizes(idx_rcp) - 1 + tmp_idx = ind(dataset_names .eq. dataset_names(idx_rcp(jj)) .and. \ + dataset_exps .eq. reference_run_name .and. \ + dataset_ensembles .eq. dataset_ensembles(idx_rcp(jj))) + if (isdefined("idx_hist")) then + idx_hist := array_append_record(idx_hist, tmp_idx, 0) + else + idx_hist = tmp_idx + end if + delete(tmp_idx) + end do + + base = data1(idx_hist, seas, :, :) + temp_base = temp_avg(idx_hist) + ; Cut low values + base = where(abs(base) .gt. 1.e-14, base, base@_FillValue) + ; scale each model by global T change + log_debug("Scale each model by global T change") + dim_mod = dimsizes(idx_rcp) + if (dim_mod .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", \ + "Only one model found, multi-model needs at least two.") + end if + var_diff_scal = new((/dim_mod, nlat, nlon/), typeof(data1), Fill) + do imod = 0, dim_mod - 1 + if (percent .eq. 1) then + var_diff_scal(imod, :, :) = \ + ((100 * (proj(imod, :, :) - base(imod, :, :))) / \ + where(base(imod, :, :) .ne. 0., \ + base(imod, :, :), base@_FillValue)) / \ + (temp_proj(imod) - temp_base(imod)) + else + var_diff_scal(imod, :, :) = (proj(imod, :, :) - \ + base(imod, :, :)) / \ + (temp_proj(imod) - temp_base(imod)) + end if + end do + delete([/base, proj, temp_proj, temp_base/]) + if rcp .eq. 0 then + var_diff_scal_all_mod = var_diff_scal + else + tmp_scal_all_mod = var_diff_scal_all_mod + delete(var_diff_scal_all_mod) + var_diff_scal_all_mod = \ + array_append_record(tmp_scal_all_mod, var_diff_scal, 0) + delete(tmp_scal_all_mod) + end if + delete([/idx_hist, idx_rcp, var_diff_scal/]) + end do ; rcp + var_diff_scal_all_mod!0 = "models" + var_diff_scal_all_mod!1 = "lat" + var_diff_scal_all_mod!2 = "lon" + var_diff_scal_all_mod&lat = data1&lat + var_diff_scal_all_mod&lon = data1&lon + ; average over rcps + log_debug("Average over models and rcps") + to_plot_cmip5(i, :, :) = dim_avg_n_Wrap(var_diff_scal_all_mod, 0) + to_plot(i, :, :) = to_plot(i, :, :) - to_plot_cmip5(i, :, :) + i = i + 1 + delete([/var_diff_scal_all_mod/]) + end do ; per + end do ; seas + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + ; Add to to_plot, as attributes without prefix + if (isatt(to_plot, "diag_script")) then ; Add to existing entries + tmp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(tmp, (/DIAG_SCRIPT/), 0) + delete(tmp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if (isatt(variable_info, "long_name")) then + to_plot@var_long_name = variable_info@long_name + end if + + ; Check units and adjust for plotting if necessary + ; difference in K is the same as difference in degC, only change label + if (to_plot@units.eq."K") then + to_plot@units = "degC" + end if + if (isatt(diag_script_info, "plot_units")) then + to_plot = convert_units(to_plot, diag_script_info@plot_units) + end if + tmp_unit = to_plot@units + to_plot@units = tmp_unit + " per degC" + + ; ########################################### + ; # Output to netCDF # + ; ########################################### + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_comparison.ncl b/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_comparison.ncl new file mode 100644 index 0000000000..e5613584e4 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_comparison.ncl @@ -0,0 +1,316 @@ +; ############################################################################# +; Computes the difference between two patterns +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_calc_map_diff_scaleT_mmm_stipp.ncl +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Computes the difference between the patterns of multi-model mean change +; of two different scenarios (ex: SSP4-6.0 and SSP4-3.4) +; +; Required diag_script_info attributes (diagnostics specific) +; scenarios: list with two scenarios included in figure. The last scenario +; is taken as reference. For example to compute the difference +; of pattern between SSP4-6.0 and SSP4-3.4 (ssp460 - ssp434), +; scenario ssp460 should be the last element of the list. +; periods: list with start years of periods to be included +; e.g. (/"2046","2081"/) +; time_avg: list with seasons or annual to be included +; e.g (/"annualclim"/) +; label: label of periods (ex. 2081-2100 relative to 1995-2014) +; +; Optional diag_script_info attributes (diagnostic specific) +; seasons: list with seasons index to be included if +; averagetime is "seasonalclim" (required for +; seasonalclim), DJF:0, MAM:1, JJA:2, SON:3 +; percent: 0 or 1, difference in percent = 1, default = 0 +; units: unit string in ncl formatting for legend title +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/ensemble.ncl" +load "$diag_scripts/shared/scaling.ncl" + +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + if (dim_VAR .eq. 2) then + var0 = variable_info[0]@short_name + var1 = variable_info[1]@short_name + info_items2 = select_metadata_by_name(input_file_info, var1) + else + var0 = variable_info[0]@short_name + var1 = var0 + info_items2 = info_items + end if + ; Save some dataset attributes as arrays for index-based selection below + dataset_names = metadata_att_as_array(info_items, "dataset") + dataset_exps = metadata_att_as_array(info_items, "exp") + dataset_ensembles = metadata_att_as_array(info_items, "ensemble") + dataset_startyears = metadata_att_as_array(info_items, "start_year") + + if (isatt(diag_script_info, "seasons")) then + dim_seas = dimsizes(diag_script_info@seasons) + else + dim_seas = 1 + end if + + ; Check required diag_script_info attributes + req_atts = (/"scenarios", "periods", "time_avg"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + ; ############################################# + ; # Get parameters from ./variable_defs/*.ncl # + ; # passed via the 'info' attribute # + ; ############################################# + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_info(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data and average time # + ; ########################################### + ; get data from first model + imod = 0 ; NCL array indices start from zero + log_info("processing " + info_items[imod]@dataset + "_" \ + + info_items[imod]@exp + "_" \ + + info_items[imod]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[imod]) + Fill = default_fillvalue(typeof(A0)) ; 1e20 + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + nlat = dimsizes(A0&lat) + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + nlon = dimsizes(A0&lon) + + temp_avg = new((/dim_MOD/), typeof(A0), Fill) + temp_avg!0 = "models" + data1 = new((/dim_MOD, dim_seas, nlat, nlon/), \ + typeof(A0), Fill) + ; Average over time (function in ./diag_scripts/lib/ncl/statistics.ncl) + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + if (dim_seas .eq. 1) then + data1(imod, 0, :, :) = tmp_seas(diag_script_info@seasons, :, :) + else + data1(imod, :, :, :) = tmp_seas(diag_script_info@seasons, :, :) + end if + elseif ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .eq. 4)) then + data1(imod, :, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + else + data1(imod, 0, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + rad = (4.0 * atan(1.0) / 180.0) + do imod = 0, dim_MOD - 1 + A0_temp = read_data(info_items2[imod]) + temp_tavg = dim_avg_n_Wrap(A0_temp, 0) + latw = cos(data1&lat * rad) + temp_avg(imod) = wgt_areaave_Wrap(temp_tavg, latw, 1.0, 0) + delete(temp_tavg) + delete(latw) + if (imod .ne. 0) then + A0 = read_data(info_items[imod]) + ; Average over time, + ; function in ./diag_scripts/lib/ncl/statistics.ncl) + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + if (dim_seas .eq. 1) then + data1(imod, 0, :, :) = tmp_seas(diag_script_info@seasons, :, :) + else + data1(imod, :, :, :) = tmp_seas(diag_script_info@seasons, :, :) + end if + delete(tmp_seas) + elseif ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .eq. 4)) then + data1(imod, :, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + else + data1(imod, 0, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + end if + delete(A0_temp) + delete(A0) + end do + delete(imod) + + ; ############################################## + ; # Calculate change from hist to periods # + ; ############################################## + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + + ; loop over ssps and periods + dim_scen = dimsizes(diag_script_info@scenarios) + dim_per = dimsizes(diag_script_info@periods) + + to_plot = new((/dim_per * dim_seas, nlat, nlon/), typeof(data1), Fill) + if (isatt(diag_script_info, "percent")) then + percent = diag_script_info@percent + else + percent = 0 + end if + perc95 = new((/dim_per * dim_seas, nlat, nlon/), typeof(data1), Fill) + i = 0 + do seas = 0, dim_seas - 1 ; loop over seasons + do per = 0, dim_per - 1 ; loop over periods + do ssp = 0, dim_scen - 1 ; loop over ssps + idx_ssp = ind((dataset_exps .eq. diag_script_info@scenarios(ssp)) \ + .and. (dataset_startyears .eq. \ + diag_script_info@periods(per))) + proj = data1(idx_ssp, seas, :, :) + temp_proj = temp_avg(idx_ssp) + ; Cut low values + proj = where(abs(proj) .gt. 1.e-14, proj, proj@_FillValue) + ; find historical runs from same models as in ssp + do jj = 0, dimsizes(idx_ssp) - 1 + tmp_idx = ind(dataset_names .eq. dataset_names(idx_ssp(jj)) .and. \ + dataset_exps .eq. reference_run_name .and. \ + dataset_ensembles .eq. dataset_ensembles(idx_ssp(jj))) + if (isdefined("idx_hist")) then + idx_hist := array_append_record(idx_hist, tmp_idx, 0) + else + idx_hist = tmp_idx + end if + delete(tmp_idx) + end do + + base = data1(idx_hist, seas, :, :) + temp_base = temp_avg(idx_hist) + ; Cut low values + base = where(abs(base) .gt. 1.e-14, base, base@_FillValue) + dim_mod = dimsizes(idx_ssp) + if (dim_mod .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", \ + "Only one model found, multi-model needs at least two.") + end if + var_diff_scal = new((/dim_mod, nlat, nlon/), typeof(data1), Fill) + do imod = 0, dim_mod - 1 + if (percent .eq. 1) then + var_diff_scal(imod, :, :) = \ + ((100 * (proj(imod, :, :) - base(imod, :, :))) / \ + where(base(imod, :, :) .ne. 0., \ + base(imod, :, :), base@_FillValue)) + else + var_diff_scal(imod, :, :) = (proj(imod, :, :) - \ + base(imod, :, :)) + end if + end do + delete([/base, proj, temp_proj, temp_base/]) + if ssp .eq. 0 then + var_diff_scal_1 = dim_avg_n_Wrap(var_diff_scal, 0) + else + var_diff_scal_2 = dim_avg_n_Wrap(var_diff_scal, 0) + end if + delete([/idx_hist, idx_ssp, var_diff_scal/]) + end do + to_plot(i, :, :) = var_diff_scal_2 - var_diff_scal_1 + i = i + 1 + end do ; per + end do ; seas + to_plot!0 = "panel" + to_plot&panel = diag_script_info@label + to_plot!1 = "lat" + to_plot&lat = data1&lat + to_plot!2 = "lon" + to_plot&lon = data1&lon + to_plot@units = variable_info[0]@units + if (percent .eq. 1) then + to_plot@units = "%" + end if + + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + ; Add to to_plot, as attributes without prefix + if (isatt(to_plot, "diag_script")) then ; Add to existing entries + tmp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(tmp, (/DIAG_SCRIPT/), 0) + delete(tmp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if (isatt(variable_info, "long_name")) then + to_plot@var_long_name = variable_info@long_name + end if + + ; Check units and adjust for plotting if necessary + ; difference in K is the same as difference in degC, only change label + if (to_plot@units.eq."K") then + to_plot@units = "degC" + end if + if (isatt(diag_script_info, "plot_units")) then + to_plot = convert_units(to_plot, diag_script_info@plot_units) + end if + + ; ########################################### + ; # Output to netCDF # + ; ########################################### + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_diff_scaleT.ncl b/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_diff_scaleT.ncl new file mode 100644 index 0000000000..95583cce68 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_diff_scaleT.ncl @@ -0,0 +1,334 @@ +; ############################################################################# +; Computes the pattern maps of multi-model change between historical and +; scenarios scaled by global T change +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_calc_map_diff_scaleT_mmm_stipp.ncl +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Pattern scaling. Annual mean or seasonal change scaled by global T +; change per multi-model mean with significance in different +; periods (e.g. 2081-2100 with respect to 1995-2014) +; +; Required diag_script_info attributes (diagnostics specific) +; scenarios: list with scenarios to be included in the +; figure, e.g (/"ssp245","ssp585"/) +; periods: list with start years of periods to be included +; e.g. (/"2046","2081"/) +; time_avg: list with seasons or annual to be included +; e.g (/"annualclim"/) +; label: label of periods (ex. 2081-2100 relative to 1995-2014) +; +; Optional diag_script_info attributes (diagnostic specific) +; seasons: list with seasons index to be included if +; averagetime is "seasonalclim" (required for +; seasonalclim), DJF:0, MAM:1, JJA:2, SON:3 +; percent: 0 or 1, difference in percent = 1, default = 0 +; units: unit string in ncl formatting for legend title +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/ensemble.ncl" +load "$diag_scripts/shared/scaling.ncl" + +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + if (dim_VAR .eq. 2) then + var0 = variable_info[0]@short_name + var1 = variable_info[1]@short_name + info_items2 = select_metadata_by_name(input_file_info, var1) + else + var0 = variable_info[0]@short_name + var1 = var0 + info_items2 = info_items + end if + + if (var1 .ne. "tas") then + error_msg("f", DIAG_SCRIPT, "", "first variable must be tas " + \ + "to scale by global mean tas change") + end if + + ; Save some dataset attributes as arrays for index-based selection below + dataset_names = metadata_att_as_array(info_items, "dataset") + dataset_exps = metadata_att_as_array(info_items, "exp") + dataset_ensembles = metadata_att_as_array(info_items, "ensemble") + dataset_startyears = metadata_att_as_array(info_items, "start_year") + + if (isatt(diag_script_info, "seasons")) then + dim_seas = dimsizes(diag_script_info@seasons) + else + dim_seas = 1 + end if + + ; Check required diag_script_info attributes + req_atts = (/"scenarios", "periods", "time_avg"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_info(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data and average time # + ; ########################################### + ; get data from first model + imod = 0 ; NCL array indices start from zero + log_info("processing " + info_items[imod]@dataset + "_" \ + + info_items[imod]@exp + "_" \ + + info_items[imod]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[imod]) + Fill = default_fillvalue(typeof(A0)) ; 1e20 + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + nlat = dimsizes(A0&lat) + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + nlon = dimsizes(A0&lon) + + temp_avg = new((/dim_MOD/), typeof(A0), Fill) + temp_avg!0 = "models" + data1 = new((/dim_MOD, dim_seas, nlat, nlon/), \ + typeof(A0), Fill) + ; Average over time (function in ./diag_scripts/lib/ncl/statistics.ncl) + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + if (dim_seas .eq. 1) then + data1(imod, 0, :, :) = tmp_seas(diag_script_info@seasons, :, :) + else + data1(imod, :, :, :) = tmp_seas(diag_script_info@seasons, :, :) + end if + elseif ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .eq. 4)) then + data1(imod, :, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + else + data1(imod, 0, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + rad = (4.0 * atan(1.0) / 180.0) + do imod = 0, dim_MOD - 1 + A0_temp = read_data(info_items2[imod]) + temp_tavg = dim_avg_n_Wrap(A0_temp, 0) + latw = cos(data1&lat * rad) + temp_avg(imod) = wgt_areaave_Wrap(temp_tavg, latw, 1.0, 0) + delete(temp_tavg) + delete(latw) + if (imod .ne. 0) then + A0 = read_data(info_items[imod]) + ; Average over time, + ; function in ./diag_scripts/lib/ncl/statistics.ncl) + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + if (dim_seas .eq. 1) then + data1(imod, 0, :, :) = tmp_seas(diag_script_info@seasons, :, :) + else + data1(imod, :, :, :) = tmp_seas(diag_script_info@seasons, :, :) + end if + delete(tmp_seas) + elseif ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .eq. 4)) then + data1(imod, :, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + else + data1(imod, 0, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + end if + delete(A0_temp) + delete(A0) + end do + delete(imod) + + ; ############################################## + ; # Calculate change from hist to periods # + ; ############################################## + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + + ; loop over ssps and periods + dim_scen = dimsizes(diag_script_info@scenarios) + dim_per = dimsizes(diag_script_info@periods) + + to_plot = new((/dim_per * dim_seas, nlat, nlon/), typeof(data1), Fill) + if (isatt(diag_script_info, "percent")) then + percent = diag_script_info@percent + else + percent = 0 + end if + i = 0 + do seas = 0, dim_seas - 1 ; loop over seasons + do per = 0, dim_per - 1 ; loop over periods + do ssp = 0, dim_scen - 1 ; loop over ssps + idx_ssp = ind((dataset_exps .eq. diag_script_info@scenarios(ssp)) \ + .and. (dataset_startyears .eq. \ + diag_script_info@periods(per))) + proj = data1(idx_ssp, seas, :, :) + temp_proj = temp_avg(idx_ssp) + ; Cut low values + proj = where(abs(proj) .gt. 1.e-14, proj, proj@_FillValue) + ; find historical runs from same models as in ssp + do jj = 0, dimsizes(idx_ssp) - 1 + tmp_idx = ind(dataset_names .eq. dataset_names(idx_ssp(jj)) .and. \ + dataset_exps .eq. reference_run_name .and. \ + dataset_ensembles .eq. dataset_ensembles(idx_ssp(jj))) + if (isdefined("idx_hist")) then + idx_hist := array_append_record(idx_hist, tmp_idx, 0) + else + idx_hist = tmp_idx + end if + delete(tmp_idx) + end do + + base = data1(idx_hist, seas, :, :) + temp_base = temp_avg(idx_hist) + ; Cut low values + base = where(abs(base) .gt. 1.e-14, base, base@_FillValue) + ; scale each model by global T change + log_debug("Scale each model by global T change") + dim_mod = dimsizes(idx_ssp) + if (dim_mod .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", \ + "Only one model found, multi-model needs at least two.") + end if + var_diff_scal = new((/dim_mod, nlat, nlon/), typeof(data1), Fill) + do imod = 0, dim_mod - 1 + if (percent .eq. 1) then + var_diff_scal(imod, :, :) = \ + ((100 * (proj(imod, :, :) - base(imod, :, :))) / \ + where(base(imod, :, :) .ne. 0., \ + base(imod, :, :), base@_FillValue)) / \ + (temp_proj(imod) - temp_base(imod)) + else + var_diff_scal(imod, :, :) = (proj(imod, :, :) - \ + base(imod, :, :)) / \ + (temp_proj(imod) - temp_base(imod)) + end if + end do + delete([/base, proj, temp_proj, temp_base/]) + if ssp .eq. 0 then + var_diff_scal_all_mod = var_diff_scal + else + tmp_scal_all_mod = var_diff_scal_all_mod + delete(var_diff_scal_all_mod) + var_diff_scal_all_mod = \ + array_append_record(tmp_scal_all_mod, var_diff_scal, 0) + delete(tmp_scal_all_mod) + end if + delete([/idx_hist, idx_ssp, var_diff_scal/]) + end do ; ssp + var_diff_scal_all_mod!0 = "models" + var_diff_scal_all_mod!1 = "lat" + var_diff_scal_all_mod!2 = "lon" + var_diff_scal_all_mod&lat = data1&lat + var_diff_scal_all_mod&lon = data1&lon + ; average over ssps + log_debug("Average over models and ssps") + to_plot(i, :, :) = dim_avg_n_Wrap(var_diff_scal_all_mod, 0) + i = i + 1 + delete([/var_diff_scal_all_mod/]) + end do ; per + end do ; seas + to_plot!0 = "panel" + to_plot&panel = diag_script_info@label + to_plot!1 = "lat" + to_plot&lat = data1&lat + to_plot!2 = "lon" + to_plot&lon = data1&lon + to_plot@units = variable_info[0]@units + if (percent .eq. 1) then + to_plot@units = "%" + end if + + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + ; Add to to_plot, as attributes without prefix + if (isatt(to_plot, "diag_script")) then ; Add to existing entries + tmp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(tmp, (/DIAG_SCRIPT/), 0) + delete(tmp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if (isatt(variable_info, "long_name")) then + to_plot@var_long_name = variable_info@long_name + end if + + ; Check units and adjust for plotting if necessary + ; difference in K is the same as difference in degC, only change label + if (to_plot@units.eq."K") then + to_plot@units = "degC" + end if + if (isatt(diag_script_info, "plot_units")) then + to_plot = convert_units(to_plot, diag_script_info@plot_units) + end if + tmp_unit = to_plot@units + to_plot@units = tmp_unit + " per degC" + + ; ########################################### + ; # Output to netCDF # + ; ########################################### + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_intermodel_stddev_scaleT.ncl b/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_intermodel_stddev_scaleT.ncl new file mode 100644 index 0000000000..e6f9e1f1b2 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_intermodel_stddev_scaleT.ncl @@ -0,0 +1,365 @@ +; ############################################################################# +; Inter-model Stddev map of considered variable scaled by global T change map +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_calc_map_diff_scaleT_mmm_stipp.ncl +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Pattern scaling. Inter-model Stddev (Weighted Stddev across models +; after averaging across scenarios for each model) of considered variable +; change scaled by global T change in different periods +; (e.g. 2081-2100 with respect to 1995-2014) +; +; Required diag_script_info attributes (diagnostics specific) +; scenarios: list with scenarios to be included in the +; figure, e.g (/"ssp245","ssp585"/) +; periods: list with start years of periods to be included +; e.g. (/"2046","2081"/) +; time_avg: list with seasons or annual to be included +; e.g (/"annualclim"/) +; label: label of periods (ex. 2081-2100 relative to 1995-2014) +; +; Optional diag_script_info attributes (diagnostic specific) +; seasons: list with seasons index to be included if +; averagetime is "seasonalclim" (required for +; seasonalclim), DJF:0, MAM:1, JJA:2, SON:3 +; percent: 0 or 1, difference in percent = 1, default = 0 +; units: unit string in ncl formatting for legend title +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/ensemble.ncl" +load "$diag_scripts/shared/scaling.ncl" + +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + if (dim_VAR .eq. 2) then + var0 = variable_info[0]@short_name + var1 = variable_info[1]@short_name + info_items2 = select_metadata_by_name(input_file_info, var1) + else + var0 = variable_info[0]@short_name + var1 = var0 + info_items2 = info_items + end if + + if (var1 .ne. "tas") then + error_msg("f", DIAG_SCRIPT, "", "first variable must be tas " + \ + "to scale by global mean tas change") + end if + + ; Save some dataset attributes as arrays for index-based selection below + dataset_names = metadata_att_as_array(info_items, "dataset") + dataset_exps = metadata_att_as_array(info_items, "exp") + dataset_ensembles = metadata_att_as_array(info_items, "ensemble") + dataset_startyears = metadata_att_as_array(info_items, "start_year") + + if (isatt(diag_script_info, "seasons")) then + dim_seas = dimsizes(diag_script_info@seasons) + else + dim_seas = 1 + end if + + ; Check required diag_script_info attributes + req_atts = (/"scenarios", "periods", "time_avg"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_info(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data and average time # + ; ########################################### + ; get data from first model + imod = 0 ; NCL array indices start from zero + log_info("processing " + info_items[imod]@dataset + "_" \ + + info_items[imod]@exp + "_" \ + + info_items[imod]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[imod]) + Fill = default_fillvalue(typeof(A0)) ; 1e20 + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + nlat = dimsizes(A0&lat) + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + nlon = dimsizes(A0&lon) + + temp_avg = new((/dim_MOD/), typeof(A0), Fill) + temp_avg!0 = "models" + data1 = new((/dim_MOD, dim_seas, nlat, nlon/), \ + typeof(A0), Fill) + ; Average over time (function in ./diag_scripts/lib/ncl/statistics.ncl) + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + if (dim_seas .eq. 1) then + data1(imod, 0, :, :) = tmp_seas(diag_script_info@seasons, :, :) + else + data1(imod, :, :, :) = tmp_seas(diag_script_info@seasons, :, :) + end if + elseif ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .eq. 4)) then + data1(imod, :, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + else + data1(imod, 0, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + rad = (4.0 * atan(1.0) / 180.0) + do imod = 0, dim_MOD - 1 + A0_temp = read_data(info_items2[imod]) + temp_tavg = dim_avg_n_Wrap(A0_temp, 0) + latw = cos(data1&lat * rad) + temp_avg(imod) = wgt_areaave_Wrap(temp_tavg, latw, 1.0, 0) + delete(temp_tavg) + delete(latw) + if (imod .ne. 0) then + A0 = read_data(info_items[imod]) + ; Average over time, + ; function in ./diag_scripts/lib/ncl/statistics.ncl) + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + if (dim_seas .eq. 1) then + data1(imod, 0, :, :) = tmp_seas(diag_script_info@seasons, :, :) + else + data1(imod, :, :, :) = tmp_seas(diag_script_info@seasons, :, :) + end if + delete(tmp_seas) + elseif ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .eq. 4)) then + data1(imod, :, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + else + data1(imod, 0, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + end if + delete(A0_temp) + delete(A0) + end do + delete(imod) + + ; ############################################## + ; # Calculate change from hist to periods # + ; ############################################## + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + + ; loop over ssps and periods + dim_scen = dimsizes(diag_script_info@scenarios) + dim_per = dimsizes(diag_script_info@periods) + + to_plot = new((/dim_per * dim_seas, nlat, nlon/), typeof(data1), Fill) + if (isatt(diag_script_info, "percent")) then + percent = diag_script_info@percent + else + percent = 0 + end if + i = 0 + do seas = 0, dim_seas - 1 ; loop over seasons + do per = 0, dim_per - 1 ; loop over periods + var_diff_scal_avg_mod = new((/1, nlat, nlon/), typeof(data1), Fill) + do mod_id = 0, dim_MOD-1 + if (dataset_exps(mod_id) .eq. reference_run_name) then + + else + idx_mod = ind((dataset_names .eq. dataset_names(mod_id)) \ + .and. (dataset_startyears .eq. \ + diag_script_info@periods(per)) \ + .and. (dataset_exps .ne. reference_run_name)) + curr_ind = ind(idx_mod .eq. mod_id) + proj = data1(idx_mod, seas, :, :) + temp_proj = temp_avg(idx_mod) + ; Cut low values + proj = where(abs(proj) .gt. 1.e-14, proj, proj@_FillValue) + ; find historical runs from same models as in current mod + do jj = 0, dimsizes(idx_mod) - 1 + tmp_idx = ind((dataset_names .eq. dataset_names(idx_mod(jj))) \ + .and. (dataset_exps .eq. reference_run_name) \ + .and. (dataset_ensembles .eq. \ + dataset_ensembles(idx_mod(jj)))) + if (isdefined("idx_hist")) then + idx_hist := array_append_record(idx_hist, tmp_idx, 0) + else + idx_hist = tmp_idx + end if + delete(tmp_idx) + end do + + base = data1(idx_hist, seas, :, :) + temp_base = temp_avg(idx_hist) + ; Cut low values + base = where(abs(base) .gt. 1.e-14, base, base@_FillValue) + ; scale each model by global T change + log_debug("Scale each model by global T change") + dim_mod = dimsizes(idx_mod) + + if (dim_mod .lt. 2) then + var_diff_scal = new((/dim_mod, nlat, nlon/), typeof(data1), Fill) + if (percent .eq. 1) then + var_diff_scal(0, :, :) = ((100 * (proj(:, :) - base(:, :))) / \ + where(base(:, :) .ne. 0., \ + base(:, :), base@_FillValue)) / \ + (temp_proj(0) - temp_base(0)) + else + var_diff_scal(0, :, :) = (proj(:, :) - base(:, :)) / \ + (temp_proj(0) - temp_base(0)) + end if + delete([/base, proj, temp_proj, temp_base/]) + tmp = var_diff_scal_avg_mod + delete(var_diff_scal_avg_mod) + var_diff_scal_avg_mod = array_append_record(tmp, var_diff_scal, \ + 0) + delete(tmp) + delete([/idx_hist, idx_mod, var_diff_scal/]) + else + var_diff_scal = new((/dim_mod, nlat, nlon/), typeof(data1), \ + Fill) + do imod = 0, dim_mod - 1 + if (percent .eq. 1) then + var_diff_scal(imod, :, :) = ((100 * (proj(imod, :, :) - \ + base(imod, :, :))) / \ + where(base(imod, :, :) .ne. \ + 0., base(imod, :, :), \ + base@_FillValue)) / \ + (temp_proj(imod) - \ + temp_base(imod)) + else + var_diff_scal(imod, :, :) = (proj(imod, :, :) - \ + base(imod, :, :)) / \ + (temp_proj(imod) - \ + temp_base(imod)) + end if + end do + delete([/base, proj, temp_proj, temp_base/]) + tmp = var_diff_scal_avg_mod + delete(var_diff_scal_avg_mod) + avg_same_mod = new((/1, nlat, nlon/), typeof(data1), Fill) + avg_same_mod(0, :, :) = dim_avg_n_Wrap(var_diff_scal, 0) + var_diff_scal_avg_mod = array_append_record(tmp, \ + avg_same_mod, 0) + delete(avg_same_mod) + delete(tmp) + delete([/idx_hist, idx_mod, var_diff_scal/]) + end if + end if + end do + temp = var_diff_scal_avg_mod + delete(var_diff_scal_avg_mod) + var_diff_scal_avg_mod = temp(1:, :, :) + var_diff_scal_avg_mod!0 = "models" + var_diff_scal_avg_mod!1 = "lat" + var_diff_scal_avg_mod!2 = "lon" + var_diff_scal_avg_mod&lat = data1&lat + var_diff_scal_avg_mod&lon = data1&lon + ; average over ssps + log_debug("Average over models and ssps") + to_plot(i, :, :) = dim_stddev_n_Wrap(var_diff_scal_avg_mod, 0) + i = i + 1 + delete([/var_diff_scal_avg_mod/]) + end do ; per + end do ; seas + to_plot!0 = "panel" + to_plot&panel = diag_script_info@label + to_plot!1 = "lat" + to_plot&lat = data1&lat + to_plot!2 = "lon" + to_plot&lon = data1&lon + to_plot@units = variable_info[0]@units + if (percent .eq. 1) then + to_plot@units = "%" + end if + + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + ; Add to to_plot, as attributes without prefix + if (isatt(to_plot, "diag_script")) then ; Add to existing entries + tmp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(tmp, (/DIAG_SCRIPT/), 0) + delete(tmp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if (isatt(variable_info, "long_name")) then + to_plot@var_long_name = variable_info@long_name + end if + + ; Check units and adjust for plotting if necessary + ; difference in K is the same as difference in degC, only change label + if (to_plot@units.eq."K") then + to_plot@units = "degC" + end if + if (isatt(diag_script_info, "plot_units")) then + to_plot = convert_units(to_plot, diag_script_info@plot_units) + end if + tmp_unit = to_plot@units + to_plot@units = tmp_unit + " per degC" + + ; ########################################### + ; # Output to netCDF # + ; ########################################### + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_interscenario_stddev_scaleT.ncl b/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_interscenario_stddev_scaleT.ncl new file mode 100644 index 0000000000..d5b9defdff --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_interscenario_stddev_scaleT.ncl @@ -0,0 +1,344 @@ +; ############################################################################# +; Interscenario Stddev of the considered variable scaled by global T change map +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_calc_map_diff_scaleT_mmm_stipp.ncl +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Pattern scaling. Interscenario Stddev (Weighted Stddev across Scenario) +; change scaled by global T change in different periods +; (e.g. 2081-2100 and 2181-2200 with respect to 1986-2005) +; +; Required diag_script_info attributes (diagnostics specific) +; scenarios: list with scenarios to be included in the +; figure, e.g (/"ssp245","ssp585"/) +; periods: list with start years of periods to be included +; e.g. (/"2046","2081"/) +; time_avg: list with seasons or annual to be included +; e.g (/"annualclim"/) +; label: label of periods (ex. 2081-2100 relative to 1995-2014) +; +; Optional diag_script_info attributes (diagnostic specific) +; seasons: list with seasons index to be included if +; averagetime is "seasonalclim" (required for +; seasonalclim), DJF:0, MAM:1, JJA:2, SON:3 +; percent: 0 or 1, difference in percent = 1, default = 0 +; units: unit string in ncl formatting for legend title +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/ensemble.ncl" +load "$diag_scripts/shared/scaling.ncl" + +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + if (dim_VAR .eq. 2) then + var0 = variable_info[0]@short_name + var1 = variable_info[1]@short_name + info_items2 = select_metadata_by_name(input_file_info, var1) + else + var0 = variable_info[0]@short_name + var1 = var0 + info_items2 = info_items + end if + + if (var1 .ne. "tas") then + error_msg("f", DIAG_SCRIPT, "", "first variable must be tas " + \ + "to scale by global mean tas change") + end if + + ; Save some dataset attributes as arrays for index-based selection below + dataset_names = metadata_att_as_array(info_items, "dataset") + dataset_exps = metadata_att_as_array(info_items, "exp") + dataset_ensembles = metadata_att_as_array(info_items, "ensemble") + dataset_startyears = metadata_att_as_array(info_items, "start_year") + + if (isatt(diag_script_info, "seasons")) then + dim_seas = dimsizes(diag_script_info@seasons) + else + dim_seas = 1 + end if + + ; Check required diag_script_info attributes + req_atts = (/"scenarios", "periods", "time_avg"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_info(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data and average time # + ; ########################################### + ; get data from first model + imod = 0 ; NCL array indices start from zero + log_info("processing " + info_items[imod]@dataset + "_" \ + + info_items[imod]@exp + "_" \ + + info_items[imod]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[imod]) + Fill = default_fillvalue(typeof(A0)) ; 1e20 + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + nlat = dimsizes(A0&lat) + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + nlon = dimsizes(A0&lon) + + temp_avg = new((/dim_MOD/), typeof(A0), Fill) + temp_avg!0 = "models" + data1 = new((/dim_MOD, dim_seas, nlat, nlon/), \ + typeof(A0), Fill) + ; Average over time (function in ./diag_scripts/lib/ncl/statistics.ncl) + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + if (dim_seas .eq. 1) then + data1(imod, 0, :, :) = tmp_seas(diag_script_info@seasons, :, :) + else + data1(imod, :, :, :) = tmp_seas(diag_script_info@seasons, :, :) + end if + elseif ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .eq. 4)) then + data1(imod, :, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + else + data1(imod, 0, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + rad = (4.0 * atan(1.0) / 180.0) + do imod = 0, dim_MOD - 1 + A0_temp = read_data(info_items2[imod]) + temp_tavg = dim_avg_n_Wrap(A0_temp, 0) + latw = cos(data1&lat * rad) + temp_avg(imod) = wgt_areaave_Wrap(temp_tavg, latw, 1.0, 0) + delete(temp_tavg) + delete(latw) + if (imod .ne. 0) then + A0 = read_data(info_items[imod]) + ; Average over time, + ; function in ./diag_scripts/lib/ncl/statistics.ncl) + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + if (dim_seas .eq. 1) then + data1(imod, 0, :, :) = tmp_seas(diag_script_info@seasons, :, :) + else + data1(imod, :, :, :) = tmp_seas(diag_script_info@seasons, :, :) + end if + delete(tmp_seas) + elseif ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .eq. 4)) then + data1(imod, :, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + else + data1(imod, 0, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + end if + delete(A0_temp) + delete(A0) + end do + delete(imod) + + ; ############################################## + ; # Calculate change from hist to periods # + ; ############################################## + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + + ; loop over ssps and periods + dim_scen = dimsizes(diag_script_info@scenarios) + dim_per = dimsizes(diag_script_info@periods) + + to_plot = new((/dim_per * dim_seas, nlat, nlon/), typeof(data1), Fill) + if (isatt(diag_script_info, "percent")) then + percent = diag_script_info@percent + else + percent = 0 + end if + i = 0 + do seas = 0, dim_seas - 1 ; loop over seasons + do per = 0, dim_per - 1 ; loop over periods + var_diff_scal_avg_scen = new((/1, nlat, nlon/), typeof(data1), Fill) + do mod_id = 0, dim_MOD-1 + if (dataset_exps(mod_id) .eq. reference_run_name) then + + else + idx_mod = ind((dataset_exps .eq. dataset_exps(mod_id)) \ + .and. (dataset_startyears .eq. \ + diag_script_info@periods(per)) \ + .and. (dataset_exps .ne. reference_run_name)) + curr_ind = ind(idx_mod .eq. mod_id) + proj = data1(idx_mod, seas, :, :) + temp_proj = temp_avg(idx_mod) + ; Cut low values + proj = where(abs(proj) .gt. 1.e-14, proj, proj@_FillValue) + ; find historical runs from same models as in current mod + do jj = 0, dimsizes(idx_mod) - 1 + tmp_idx = ind(dataset_names .eq. \ + dataset_names(idx_mod(jj)) .and. \ + dataset_exps .eq. reference_run_name .and. \ + dataset_ensembles .eq. \ + dataset_ensembles(idx_mod(jj))) + if (isdefined("idx_hist")) then + idx_hist := array_append_record(idx_hist, tmp_idx, 0) + else + idx_hist = tmp_idx + end if + delete(tmp_idx) + end do + base = data1(idx_hist, seas, :, :) + temp_base = temp_avg(idx_hist) + ; Cut low values + base = where(abs(base) .gt. 1.e-14, base, base@_FillValue) + ; scale each model by global T change + log_debug("Scale each model by global T change") + dim_mod = dimsizes(idx_mod) + + if (dim_mod .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", \ + "Only one model found," + \ + "multi-model needs at least two.") + end if + var_diff_scal = new((/dim_mod, nlat, nlon/), typeof(data1), Fill) + do imod = 0, dim_mod - 1 + if (percent .eq. 1) then + var_diff_scal(imod, :, :) = \ + ((100 * (proj(imod, :, :) - base(imod, :, :))) / \ + where(base(imod, :, :) .ne. 0., \ + base(imod, :, :), base@_FillValue)) / \ + (temp_proj(imod) - temp_base(imod)) + else + var_diff_scal(imod, :, :) = (proj(imod, :, :) - \ + base(imod, :, :)) / \ + (temp_proj(imod) - temp_base(imod)) + end if + end do + delete([/base, proj, temp_proj, temp_base/]) + tmp = var_diff_scal_avg_scen + delete(var_diff_scal_avg_scen) + avg_same_scen = new((/1, nlat, nlon/), typeof(data1), Fill) + avg_same_scen(0, :, :) = dim_avg_n_Wrap(var_diff_scal, 0) + var_diff_scal_avg_scen = array_append_record(tmp, avg_same_scen, 0) + delete(avg_same_scen) + delete(tmp) + delete([/idx_hist, idx_mod, var_diff_scal/]) + end if + end do + temp = var_diff_scal_avg_scen + delete(var_diff_scal_avg_scen) + var_diff_scal_avg_scen = temp(1:, :, :) + var_diff_scal_avg_scen!0 = "models" + var_diff_scal_avg_scen!1 = "lat" + var_diff_scal_avg_scen!2 = "lon" + var_diff_scal_avg_scen&lat = data1&lat + var_diff_scal_avg_scen&lon = data1&lon + ; average over ssps + log_debug("Average over models and ssps") + to_plot(i, :, :) = dim_stddev_n_Wrap(var_diff_scal_avg_scen, 0) + i = i + 1 + delete([/var_diff_scal_avg_scen/]) + end do ; per + end do ; seas + to_plot!0 = "panel" + to_plot&panel = diag_script_info@label + to_plot!1 = "lat" + to_plot&lat = data1&lat + to_plot!2 = "lon" + to_plot&lon = data1&lon + to_plot@units = variable_info[0]@units + if (percent .eq. 1) then + to_plot@units = "%" + end if + + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + ; Add to to_plot, as attributes without prefix + if (isatt(to_plot, "diag_script")) then ; Add to existing entries + tmp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(tmp, (/DIAG_SCRIPT/), 0) + delete(tmp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if (isatt(variable_info, "long_name")) then + to_plot@var_long_name = variable_info@long_name + end if + + ; Check units and adjust for plotting if necessary + ; difference in K is the same as difference in degC, only change label + if (to_plot@units.eq."K") then + to_plot@units = "degC" + end if + if (isatt(diag_script_info, "plot_units")) then + to_plot = convert_units(to_plot, diag_script_info@plot_units) + end if + tmp_unit = to_plot@units + to_plot@units = tmp_unit + " per degC" + + ; ########################################### + ; # Output to netCDF # + ; ########################################### + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_stddev_scaleT.ncl b/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_stddev_scaleT.ncl new file mode 100644 index 0000000000..67eb83a5aa --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_stddev_scaleT.ncl @@ -0,0 +1,333 @@ +; ############################################################################# +; Multi-model Stddev of considered variable scaled by global T change map +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_calc_map_diff_scaleT_mmm_stipp.ncl +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Pattern scaling. Stddev of considered variable scaled by global T +; change per multi-model mean in different periods +; (e.g. 2081-2100 with respect to 1995-2014) +; +; Required diag_script_info attributes (diagnostics specific) +; scenarios: list with scenarios to be included in the +; figure, e.g (/"ssp245","ssp585"/) +; periods: list with start years of periods to be included +; e.g. (/"2046","2081"/) +; time_avg: list with seasons or annual to be included +; e.g (/"annualclim"/) +; label: label of periods (ex. 2081-2100 relative to 1995-2014) +; +; Optional diag_script_info attributes (diagnostic specific) +; seasons: list with seasons index to be included if +; averagetime is "seasonalclim" (required for +; seasonalclim), DJF:0, MAM:1, JJA:2, SON:3 +; percent: 0 or 1, difference in percent = 1, default = 0 +; units: unit string in ncl formatting for legend title +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/ensemble.ncl" +load "$diag_scripts/shared/scaling.ncl" + +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + if (dim_VAR .eq. 2) then + var0 = variable_info[0]@short_name + var1 = variable_info[1]@short_name + info_items2 = select_metadata_by_name(input_file_info, var1) + else + var0 = variable_info[0]@short_name + var1 = var0 + info_items2 = info_items + end if + + if (var1 .ne. "tas") then + error_msg("f", DIAG_SCRIPT, "", "first variable must be tas " + \ + "to scale by global mean tas change") + end if + + ; Save some dataset attributes as arrays for index-based selection below + dataset_names = metadata_att_as_array(info_items, "dataset") + dataset_exps = metadata_att_as_array(info_items, "exp") + dataset_ensembles = metadata_att_as_array(info_items, "ensemble") + dataset_startyears = metadata_att_as_array(info_items, "start_year") + + if (isatt(diag_script_info, "seasons")) then + dim_seas = dimsizes(diag_script_info@seasons) + else + dim_seas = 1 + end if + + ; Check required diag_script_info attributes + req_atts = (/"scenarios", "periods", "time_avg"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_info(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data and average time # + ; ########################################### + ; get data from first model + imod = 0 ; NCL array indices start from zero + log_info("processing " + info_items[imod]@dataset + "_" \ + + info_items[imod]@exp + "_" \ + + info_items[imod]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[imod]) + Fill = default_fillvalue(typeof(A0)) ; 1e20 + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + nlat = dimsizes(A0&lat) + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + nlon = dimsizes(A0&lon) + + temp_avg = new((/dim_MOD/), typeof(A0), Fill) + temp_avg!0 = "models" + data1 = new((/dim_MOD, dim_seas, nlat, nlon/), \ + typeof(A0), Fill) + ; Average over time (function in ./diag_scripts/lib/ncl/statistics.ncl) + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + if (dim_seas .eq. 1) then + data1(imod, 0, :, :) = tmp_seas(diag_script_info@seasons, :, :) + else + data1(imod, :, :, :) = tmp_seas(diag_script_info@seasons, :, :) + end if + elseif ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .eq. 4)) then + data1(imod, :, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + else + data1(imod, 0, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + rad = (4.0 * atan(1.0) / 180.0) + do imod = 0, dim_MOD - 1 + A0_temp = read_data(info_items2[imod]) + temp_tavg = dim_avg_n_Wrap(A0_temp, 0) + latw = cos(data1&lat * rad) + temp_avg(imod) = wgt_areaave_Wrap(temp_tavg, latw, 1.0, 0) + delete(temp_tavg) + delete(latw) + if (imod .ne. 0) then + A0 = read_data(info_items[imod]) + ; Average over time, + ; function in ./diag_scripts/lib/ncl/statistics.ncl) + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + if (dim_seas .eq. 1) then + data1(imod, 0, :, :) = tmp_seas(diag_script_info@seasons, :, :) + else + data1(imod, :, :, :) = tmp_seas(diag_script_info@seasons, :, :) + end if + delete(tmp_seas) + elseif ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .eq. 4)) then + data1(imod, :, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + else + data1(imod, 0, :, :) = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + end if + delete(A0_temp) + delete(A0) + end do + delete(imod) + + ; ############################################## + ; # Calculate change from hist to periods # + ; ############################################## + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + + ; loop over ssps and periods + dim_scen = dimsizes(diag_script_info@scenarios) + dim_per = dimsizes(diag_script_info@periods) + + to_plot = new((/dim_per * dim_seas, nlat, nlon/), typeof(data1), Fill) + if (isatt(diag_script_info, "percent")) then + percent = diag_script_info@percent + else + percent = 0 + end if + i = 0 + do seas = 0, dim_seas - 1 ; loop over seasons + do per = 0, dim_per - 1 ; loop over periods + do ssp = 0, dim_scen - 1 ; loop over ssps + idx_ssp = ind((dataset_exps .eq. diag_script_info@scenarios(ssp)) \ + .and. (dataset_startyears .eq. \ + diag_script_info@periods(per))) + proj = data1(idx_ssp, seas, :, :) + temp_proj = temp_avg(idx_ssp) + ; Cut low values + proj = where(abs(proj) .gt. 1.e-14, proj, proj@_FillValue) + ; find historical runs from same models as in ssp + do jj = 0, dimsizes(idx_ssp) - 1 + tmp_idx = ind(dataset_names .eq. dataset_names(idx_ssp(jj)) .and. \ + dataset_exps .eq. reference_run_name .and. \ + dataset_ensembles .eq. dataset_ensembles(idx_ssp(jj))) + if (isdefined("idx_hist")) then + idx_hist := array_append_record(idx_hist, tmp_idx, 0) + else + idx_hist = tmp_idx + end if + delete(tmp_idx) + end do + + base = data1(idx_hist, seas, :, :) + temp_base = temp_avg(idx_hist) + ; Cut low values + base = where(abs(base) .gt. 1.e-14, base, base@_FillValue) + ; scale each model by global T change + log_debug("Scale each model by global T change") + dim_mod = dimsizes(idx_ssp) + if (dim_mod .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", \ + "Only one model found, multi-model needs at least two.") + end if + var_diff_scal = new((/dim_mod, nlat, nlon/), typeof(data1), Fill) + do imod = 0, dim_mod - 1 + if (percent .eq. 1) then + var_diff_scal(imod, :, :) = \ + ((100 * (proj(imod, :, :) - base(imod, :, :))) / \ + where(base(imod, :, :) .ne. 0., \ + base(imod, :, :), base@_FillValue)) / \ + (temp_proj(imod) - temp_base(imod)) + else + var_diff_scal(imod, :, :) = (proj(imod, :, :) - \ + base(imod, :, :)) / \ + (temp_proj(imod) - temp_base(imod)) + end if + end do + delete([/base, proj, temp_proj, temp_base/]) + if ssp .eq. 0 then + var_diff_scal_all_mod = var_diff_scal + else + tmp_scal_all_mod = var_diff_scal_all_mod + delete(var_diff_scal_all_mod) + var_diff_scal_all_mod = \ + array_append_record(tmp_scal_all_mod, var_diff_scal, 0) + delete(tmp_scal_all_mod) + end if + delete([/idx_hist, idx_ssp, var_diff_scal/]) + end do ; ssp + var_diff_scal_all_mod!0 = "models" + var_diff_scal_all_mod!1 = "lat" + var_diff_scal_all_mod!2 = "lon" + var_diff_scal_all_mod&lat = data1&lat + var_diff_scal_all_mod&lon = data1&lon + ; average over ssps + log_debug("Average over models and ssps") + to_plot(i, :, :) = dim_stddev_n_Wrap(var_diff_scal_all_mod, 0) + i = i + 1 + delete([/var_diff_scal_all_mod/]) + end do ; per + end do ; seas + to_plot!0 = "panel" + to_plot&panel = diag_script_info@label + to_plot!1 = "lat" + to_plot&lat = data1&lat + to_plot!2 = "lon" + to_plot&lon = data1&lon + to_plot@units = variable_info[0]@units + if (percent .eq. 1) then + to_plot@units = "%" + end if + + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + ; Add to to_plot, as attributes without prefix + if (isatt(to_plot, "diag_script")) then ; Add to existing entries + tmp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(tmp, (/DIAG_SCRIPT/), 0) + delete(tmp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if (isatt(variable_info, "long_name")) then + to_plot@var_long_name = variable_info@long_name + end if + + ; Check units and adjust for plotting if necessary + ; difference in K is the same as difference in degC, only change label + if (to_plot@units.eq."K") then + to_plot@units = "degC" + end if + if (isatt(diag_script_info, "plot_units")) then + to_plot = convert_units(to_plot, diag_script_info@plot_units) + end if + tmp_unit = to_plot@units + to_plot@units = tmp_unit + " per degC" + + ; ########################################### + ; # Output to netCDF # + ; ########################################### + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_stippling_hatching.ncl b/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_stippling_hatching.ncl new file mode 100644 index 0000000000..0d649f795c --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/calc_pattern_stippling_hatching.ncl @@ -0,0 +1,484 @@ +; ############################################################################# +; Multi-model mean change map with significance +; Author: Kevin Debeire (DLR, Germany) +; ############################################################################# +; +; Description +; computes the map of multi-model mean change with stippling for +; significant region and hatching for non-significant region. Significant +; is where the multi-model mean change is greater than two standard +; deviations of the internal variability and where at least 90% of the +; models agree on the sign of change. Not significant is where the +; multi-model mean change is less than one standard deviation of +; internal variability. +; +; Required diag_script_info attributes (diagnostics specific) +; ancestors: variable and diagnostics that calculated interannual +; variability for stippling and hatching +; scenarios: list with scenarios to be included in the +; figure, e.g (/"ssp245","ssp585"/) +; periods: list with start years of periods to be included +; e.g. (/"2046","2081"/) +; time_avg: list with seasons or annual to be included +; e.g (/"annualclim"/) +; sig: plot stippling for significance? (True, False) +; not_sig: plot hatching for uncertainty? (True, False) +; iavmode: calculate IAV from multi-model mean or save individual models +; ("each": save individual models, "mmm": multi-model mean, +; default), needs to be consistent with calc_IAV_hatching.ncl +; labels: list with labels to use in legend depending on scenarios +; +; Optional diag_script_info attributes (diagnostic specific) +; seasons: list with seasons index to be included if +; averagetime is "seasonalclim" (required for +; seasonalclim), DJF:0, MAM:1, JJA:2, SON:3 +; percent: 0 or 1, difference in percent = 1, default = 0 +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/ensemble.ncl" +load "$diag_scripts/shared/scaling.ncl" + +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Save some dataset attributes as arrays for index-based selection below + dataset_names = metadata_att_as_array(info_items, "dataset") + dataset_exps = metadata_att_as_array(info_items, "exp") + dataset_ensembles = metadata_att_as_array(info_items, "ensemble") + dataset_startyears = metadata_att_as_array(info_items, "start_year") + + ; Check required diag_script_info attributes + req_atts = (/"scenarios", "periods", "time_avg", "label"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + +begin + + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + input_dir = diag_script_info@input_files(1) + +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_info(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data and average time # + ; ########################################### + + ; Get data from first dataset + imod = 0 ; NCL array indicies start from zero + log_debug("processing " + info_items[imod]@dataset + "_" \ + + info_items[imod]@exp + "_" \ + + info_items[imod]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[imod]) + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat" .or. dims .eq. "rlat" .or. dims .eq. "j") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + lat_name = dims(idx) + idx = ind(dims .eq. "lon" .or. dims .eq. "rlon" .or. dims .eq. "i") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + lon_name = dims(idx) + + if (isatt(diag_script_info, "seasons")) then + dim_seas = dimsizes(diag_script_info@seasons) + else + dim_seas = 1 + end if + + ; Check if diag_script_info@label has correct size, otherwise error message + dim_periods = dimsizes(diag_script_info@periods) + dim_scenarios = dimsizes(diag_script_info@scenarios) + dim_label = dim_seas * dim_scenarios * dim_periods + if (dim_label .ne. dimsizes(diag_script_info@label)) then + error_msg("f", DIAG_SCRIPT, "", "wrong number of labels, needs to be " + \ + "seasons * scenarios * periods") + end if + + annots = project_style(info_items, diag_script_info, "annots") + Fill = default_fillvalue(typeof(A0)) + if (isatt(diag_script_info, "iavmode")) then + if (diag_script_info@iavmode .eq. "each") then + iav = new((/dim_MOD, dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(A0), Fill) + end if + end if + data2 = new((/dim_MOD, dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(A0), Fill) + + do imod = 0, dim_MOD - 1 + log_info(info_items[imod]@dataset) + ; Average over time + if (imod .eq. 0) then + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + data1 = tmp_seas(diag_script_info@seasons, :, :) + delete(tmp_seas) + else + data1 = time_operations(A0, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + else + if isvar((/"A1"/)) then + delete(A1) + end if + A1 = read_data(info_items[imod]) + dims = getvardims(A1) + idx = ind(dims .eq. "lat" .or. dims .eq. "rlat" .or. dims .eq. "j") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + lat_name = dims(idx) + idx = ind(dims .eq. "lon" .or. dims .eq. "rlon" .or. dims .eq. "i") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + lon_name = dims(idx) + ; Average over time + if ((diag_script_info@time_avg .eq. "seasonalclim") .and. \ + (dim_seas .ne. 4)) then + tmp_seas = time_operations(A1, -1, -1, "average", \ + diag_script_info@time_avg, True) + data1 = tmp_seas(diag_script_info@seasons, :, :) + delete(tmp_seas) + else + data1 = time_operations(A1, -1, -1, "average", \ + diag_script_info@time_avg, True) + end if + end if + if (dim_seas .eq. 1) then + data2(imod, 0, :, :) = data1 + else + data2(imod, :, :, :) = data1 + end if + + if (isatt(diag_script_info, "iavmode")) then + if (diag_script_info@iavmode .eq. "each") then + ; ######################################################### + ; # Read natural variability for stippling for each model # + ; ######################################################### + log_info("Read natural variability for stippling for each model") + file_path = systemfunc("ls " + input_dir + "/" + "IAV_piControl_" + \ + info_items[imod]@dataset + "_*_" + \ + var0 + "_" + diag_script_info@time_avg + ".nc") + iav_file = addfile(file_path(0), "r") + if ((dim_seas .eq. 1) .and. \ + (diag_script_info@time_avg .eq. "annualclim")) then + iav(imod, 0, :, :) = iav_file->iav + elseif ((dim_seas .eq. 1) .and. \ + (diag_script_info@time_avg .eq. "seasonalclim")) then + tmp_iav = iav_file->iav + iav(imod, 0, :, :) = tmp_iav(diag_script_info@seasons, :, :) + else + iav(imod, :, :, :) = iav_file->iav + end if + end if + end if + end do + + if (isatt(diag_script_info, "iavmode")) then + if (diag_script_info@iavmode .eq. "each") then + ; Calculate multi-model mean of iav + log_info("Calculate multi-model mean of natural variability") + natvar_mmm = rm_single_dims(dim_avg_n(iav, 0) * sqrt(2.)) + elseif (diag_script_info@iavmode .eq. "mmm") then + log_info("Read precalculated natural variability for multi-model") + natvar_file = addfile(input_dir + "/" + "IAV_mmm_piControl_" + \ + var0 + "_" + \ + diag_script_info@time_avg + ".nc", "r") + natvar_mmm = natvar_file->iav + if ((dim_seas .ne. 4) .and. \ + (diag_script_info@time_avg .ne. "annualclim")) then + tmp = natvar_mmm + delete(natvar_mmm) + natvar_mmm = rm_single_dims(tmp(diag_script_info@seasons, :, :)) + elseif (diag_script_info@time_avg .eq. "annualclim") then + tmp = natvar_mmm + delete(natvar_mmm) + natvar_mmm = rm_single_dims(tmp) + end if + else + error_msg("f", DIAG_SCRIPT, "", "Error: This iavmode is not know") + end if + else + ; Read already calculated mmm iav as default + log_info("Read precalculated natural variability for multi-model") + natvar_file = addfile(input_dir + "/" + "IAV_mmm_piControl_" + \ + var0 + "_" + \ + diag_script_info@time_avg + ".nc", "r") + natvar_mmm = natvar_file->iav + if ((dim_seas .ne. 4) .and. \ + (diag_script_info@time_avg .ne. "annualclim")) then + tmp = natvar_mmm + delete(natvar_mmm) + natvar_mmm = rm_single_dims(tmp(diag_script_info@seasons, :, :)) + elseif (diag_script_info@time_avg .eq. "annualclim") then + tmp = natvar_mmm + delete(natvar_mmm) + natvar_mmm = rm_single_dims(tmp) + end if + end if + data2!0 = "models" + data2!1 = "season" + data2!2 = "lat" + data2!3 = "lon" + data2&models = annots + if (isatt(diag_script_info, "seasons")) then + data2&season = (/ispan(0, dim_seas - 1, 1)/) + else + data2&season = (/0/) + end if + + ; ############################################## + ; # Calculate change from hist to periods # + ; ############################################## + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + + ; Loop over ssps and periods + dim_scen = dimsizes(diag_script_info@scenarios) + dim_per = dimsizes(diag_script_info@periods) + + to_plot = new((/dim_scen * dim_per * dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(data1), Fill) + if (isatt(diag_script_info, "percent")) then + percent = diag_script_info@percent + ref = new((/dim_scen * dim_per * dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(data1), Fill) + else + percent = 0 + end if + to_plot_signif = new((/dim_scen * dim_per * dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(data1), Fill) + to_plot_not_signif = new((/dim_scen * dim_per * dim_seas, dimsizes(A0&lat), \ + dimsizes(A0&lon)/), typeof(data1), Fill) + model_number = new((/dim_scen * dim_per * dim_seas/), integer) + i = 0 + do seas = 0, dim_seas - 1 ; loop over seasons + do ssp = 0, dim_scen - 1 ; loop over ssps + do per = 0, dim_per - 1 ; loop over periods + idx_ssp = ind(dataset_exps .eq. diag_script_info@scenarios(ssp) .and. \ + dataset_startyears .eq. diag_script_info@periods(per)) + proj = data2(idx_ssp, seas, :, :) + + ; Cut low values + proj = where(abs(proj) .gt. 1.e-14, proj, proj@_FillValue) + proj_avg = dim_avg_n_Wrap(proj, 0) + + ; Find historical runs from same datasets as in ssp + do jj = 0, dimsizes(idx_ssp) - 1 + tmp_idx = ind(dataset_names .eq. dataset_names(idx_ssp(jj)) .and. \ + dataset_exps .eq. reference_run_name .and. \ + dataset_ensembles .eq. dataset_ensembles(idx_ssp(jj))) + if (isdefined("idx_hist")) then + idx_hist := array_append_record(idx_hist, tmp_idx, 0) + else + idx_hist = tmp_idx + end if + delete(tmp_idx) + end do + + base = data2(idx_hist, seas, :, :) + + ; Cut low values + base = where(abs(base) .gt. 1.e-14, base, base@_FillValue) + base_avg = dim_avg_n_Wrap(base, 0) + + var_diff = proj - base + delete([/base, proj/]) + to_plot(i, :, :) = proj_avg - base_avg + if (isatt(diag_script_info, "percent")) then + ref(i, :, :) = base_avg + end if + + ; Determine sigma and sign for significance + var_pos_signif = dim_num_n(where(var_diff .gt. 0., 1., \ + to_plot@_FillValue) .eq. 1., 0) + var_neg_signif = dim_num_n(where(var_diff .lt. 0., 1., \ + to_plot@_FillValue) .eq. 1., 0) + var_pos_signif_tmp = \ + var_pos_signif / where((var_pos_signif + var_neg_signif) \ + .ne. 0., var_pos_signif + \ + var_neg_signif, to_plot@_FillValue) + var_neg_signif_tmp = \ + var_neg_signif / where((var_pos_signif + var_neg_signif) \ + .ne. 0., var_pos_signif + \ + var_neg_signif, to_plot@_FillValue) + signif_and_sign = where(var_pos_signif_tmp .ge. 0.9 .or. \ + var_neg_signif_tmp .ge. 0.9, 1., 0.) + signif_and_sign = where(ismissing(signif_and_sign), 0., \ + signif_and_sign) + if (dim_seas .ne. 1) then + sigma_and_sign = where(abs(to_plot(i, :, :)) .gt. \ + abs(2. * natvar_mmm(seas, :, :)), 1., 0.) + to_plot_not_signif(i, :, :) = where(abs(to_plot(i, :, :)) .lt. \ + abs(natvar_mmm(seas, :, :)), \ + 1., 0.) + else + sigma_and_sign = where(abs(to_plot(i, :, :)) .gt. \ + abs(2. * natvar_mmm), 1., 0.) + to_plot_not_signif(i, :, :) = where(abs(to_plot(i, :, :)) .lt. \ + abs(natvar_mmm), \ + 1., 0.) + end if + to_plot_signif(i, :, :) = where(signif_and_sign .eq. 1, \ + sigma_and_sign, 0.) + model_number(i) = dimsizes(idx_ssp) + i = i + 1 + delete([/idx_hist, idx_ssp, var_diff/]) + end do ; per + end do ; ssp + end do ; seas + + to_plot!0 = "panel" + to_plot&panel = diag_script_info@label + to_plot!1 = "lat" + to_plot&lat = A0&lat + to_plot!2 = "lon" + to_plot&lon = A0&lon + copy_VarMeta(to_plot, to_plot_signif) + copy_VarMeta(to_plot, to_plot_not_signif) + if (isatt(diag_script_info, "percent")) then + percent = diag_script_info@percent + if (percent .eq. 1) then + to_plot = 100 * to_plot / where(ref .ne. 0., ref, ref@_FillValue) + to_plot@units = "%" + delete(ref) + end if + end if + + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + + ; Add to to_plot, as attributes without prefix + if (isatt(to_plot, "diag_script")) then ; add to existing entries + temp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if (isatt(variable_info[0], "long_name")) then + to_plot@var_long_name = variable_info[0]@long_name + end if + + ; Check units and adjust for plotting if necessary + if (percent .eq. 0) then + to_plot@units = variable_info[0]@units + end if + ; difference in K is the same as difference in degC, only change label + if (to_plot@units.eq."K") then + to_plot@units = "degC" + end if + if (isatt(diag_script_info, "plot_units")) then + to_plot = convert_units(to_plot, diag_script_info@plot_units) + end if + + if(isatt(to_plot_signif, "diag_script")) then ; add to existing entries + temp = to_plot_signif@diag_script + delete(to_plot_signif@diag_script) + to_plot_signif@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot_signif@diag_script = (/DIAG_SCRIPT/) + end if + + to_plot_signif@var = "signif" + to_plot_signif@var_long_name = "significance: mean change larger than " \ + + "2*natvar and 90% of models have the " \ + + "same sign" + to_plot_signif@units = 1 + + if(isatt(to_plot_not_signif, "diag_script")) then + temp = to_plot_not_signif@diag_script + delete(to_plot_not_signif@diag_script) + to_plot_not_signif@diag_script = array_append_record(temp, \ + (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot_not_signif@diag_script = (/DIAG_SCRIPT/) + end if + to_plot_not_signif@var = "not_signif" + to_plot_not_signif@var_long_name = "not significant: mean change " \ + + "smaller than 1*natvar" + to_plot_not_signif@units = 1 + + model_number!0 = "panel" + model_number&panel = diag_script_info@label + if(isatt(model_number, "diag_script")) then ; add to existing entries + temp = model_number@diag_script + delete(model_number@diag_script) + model_number@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + model_number@diag_script = (/DIAG_SCRIPT/) + end if + model_number@var = "model_nr" + model_number@var_long_name = "number of models in mmm" + model_number@units = 1 + + ; ########################################### + ; # Optional output to netCDF # + ; ########################################### + ; Optional output (controlled by diag_script_info) + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + ncdf_file@existing = "append" + ncdf_outfile = ncdf_write(to_plot_signif, ncdf_file) + ncdf_outfile = ncdf_write(to_plot_not_signif, ncdf_file) + ncdf_outfile = ncdf_write(model_number, ncdf_file) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/calc_table_changes.ncl b/esmvaltool/diag_scripts/tebaldi21esd/calc_table_changes.ncl new file mode 100644 index 0000000000..0064e2da01 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/calc_table_changes.ncl @@ -0,0 +1,215 @@ +; ############################################################################# +; Computes the entries for the table of changes given scenario and period +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; Description +; Computes the changes (mean and spreads) for the specified scenarios and +; time periods relative to the historical baseline. +; +; Required diag_script_info attributes (diagnostics specific) +; scenarios: list with scenarios included in the table +; syears: list with start years of time periods to include in the table +; eyears: list with end years of the time periods to include in the table +; begin_ref_year: start year of historical baseline period (e.g. 1995) +; end_ref_year: end year of historical baseline period (e.g. 2014) +; spread: multiplier of standard deviation to calculate spread with +; (e.g. 1.64) +; label: list of scenario names included in the table +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" + +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + dataset_names = metadata_att_as_array(info_items, "dataset") + dataset_exps = metadata_att_as_array(info_items, "exp") + dataset_ensembles = metadata_att_as_array(info_items, "ensemble") + n_MOD = count_unique_values(dataset_names) + MOD_list = get_unique_values(dataset_names) + labels = new((/n_MOD/), string) + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"scenarios", "syears", "eyears", \ + "begin_ref_year", "end_ref_year"/)) + syears = diag_script_info@syears + eyears = diag_script_info@eyears +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + attsh = True + attsh@exp = reference_run_name + item_hist = select_metadata_by_atts(info_items, attsh) + dim_MOD_hist = ListCount(item_hist) + n_PER = dimsizes(syears) + period_list = (/tostring(syears(0)) + "-" + tostring(eyears(0)), \ + tostring(syears(1)) + "-" + tostring(eyears(1))/) + scenario_list = diag_script_info@scenarios + n_SCEN = dimsizes(diag_script_info@scenarios) + spread = diag_script_info@spread + mean_table = new((/n_PER, n_SCEN/), float) + five_perc_table = new((/n_PER, n_SCEN/), float) + ninetyfive_perc_table = new((/n_PER, n_SCEN/), float) + number_models_table = new((/n_PER, n_SCEN/), float) + do issp = 0, n_SCEN-1 + idx_scen = ind((dataset_exps .eq. diag_script_info@scenarios(issp))) + n_MOD = dimsizes(idx_scen) + do iper = 0, n_PER-1 + changes_for_all_models = new((/n_MOD/), float) + do imod = 0, n_MOD - 1 + ; LOAD HISTORICAL EXP and GET historical baseline + idx_histo = ind((dataset_exps .eq. reference_run_name) .and. \ + (dataset_names .eq. dataset_names(idx_scen(imod))) \ + .and. (dataset_ensembles .eq. \ + dataset_ensembles(idx_scen(imod)))) + Ahisto = read_data(info_items[idx_histo(0)]) + datahisto = time_operations(Ahisto, -1, -1, "average", "yearly", True) + delete(Ahisto) + datahisto_glob = coswgt_areaave(datahisto) + ind_start = ind(datahisto&year .eq. diag_script_info@begin_ref_year) + ind_end = ind(datahisto&year .eq. diag_script_info@end_ref_year) + histo_ref = dim_avg(datahisto_glob(ind_start : ind_end)) + delete([/datahisto, datahisto_glob/]) + + A0 = read_data(info_items[idx_scen(imod)]) + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + ; Average over time (function in ./diag_scripts/shared/statistics.ncl) + data0 = time_operations(A0, -1, -1, "average", "yearly", True) + dim_file = dimsizes(data0) ; file size of scenario file + ; Average over globe/region + ; (function in ./diag_scripts/shared/latlon.ncl) + if (isatt(diag_script_info, "ts_minlat")) then + data0_glob = area_operations(data0, diag_script_info@ts_minlat, \ + diag_script_info@ts_maxlat, \ + diag_script_info@ts_minlon, \ + diag_script_info@ts_maxlon, "average", \ + True) + else + data0_glob = coswgt_areaave(data0) + end if + ind_start_per = ind(data0_glob&year .eq. syears(iper)) + ind_end_per = ind(data0_glob&year .eq. min((/eyears(iper), \ + data0_glob&year(dimsizes(data0_glob&year)-1)/))) + scen_period_val = dim_avg(data0_glob(ind_start_per : ind_end_per)) + if var0 .eq. "pr" then + changes_current_mod = 100 * (scen_period_val - histo_ref) / \ + histo_ref + else + changes_current_mod = scen_period_val - histo_ref + end if + changes_for_all_models(imod) = changes_current_mod + delete(idx) + delete(A0) + delete(data0) + delete(data0_glob) + delete(histo_ref) + delete(idx_histo) + end do + mean_table(iper, issp) = avg(changes_for_all_models) + five_perc_table(iper, issp) = \ + avg(changes_for_all_models) - spread * stddev(changes_for_all_models) + ninetyfive_perc_table(iper, issp) = \ + avg(changes_for_all_models) + spread * stddev(changes_for_all_models) + number_models_table(iper, issp) = n_MOD + delete(changes_for_all_models) + end do + delete(idx_scen) + end do + to_plot = new((/4, n_PER, n_SCEN/), float) + to_plot(0, :, :) = mean_table + to_plot(1, :, :) = five_perc_table + to_plot(2, :, :) = ninetyfive_perc_table + to_plot(3, :, :) = tofloat(number_models_table) + to_plot!0 = "panel" + to_plot&panel = (/"mean", "five_perc", "ninetyfive_perc", "num_model"/) + to_plot!1 = "periods" + to_plot&periods = period_list + to_plot!2 = "ssp" + to_plot&ssp = scenario_list + to_plot@units = variable_info[0]@units + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + ; Add to to_plot, as attributes without prefix + if (isatt(to_plot, "diag_script")) then ; Add to existing entries + tmp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(tmp, (/DIAG_SCRIPT/), 0) + delete(tmp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if (isatt(variable_info, "long_name")) then + to_plot@var_long_name = variable_info@long_name + end if + ; Check units and adjust + if (to_plot@units.eq."K") then + to_plot@units = "degC" + end if + if var0 .eq. "pr" then + to_plot@units = "%" + end if + if (isatt(diag_script_info, "plot_units")) then + to_plot = convert_units(to_plot, diag_script_info@plot_units) + end if + ; ########################################### + ; # Output to netCDF # + ; ########################################### + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/calc_table_warming_level.ncl b/esmvaltool/diag_scripts/tebaldi21esd/calc_table_warming_level.ncl new file mode 100644 index 0000000000..942a725275 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/calc_table_warming_level.ncl @@ -0,0 +1,218 @@ +; ############################################################################# +; Produces entries for the table of warming level crossings +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Computes the warming level crossing year (mean, five percent and +; ninety-five percent quantiles of crossing years) for specified scenarios +; and warming levels. +; +; Required diag_script_info attributes (diagnostics specific) +; scenarios: list with scenarios to be included +; warming_levels: list of warming levels to include in the table +; syears: list with start years of time periods (historical then SSPs) +; eyears: list with end years of the time periods (historical then SSPs) +; begin_ref_year: start year of historical baseline period (e.g. 1995) +; end_ref_year: end year of historical baseline period (e.g. 2014) +; offset: offset between current historical baseline and 1850-1900 period +; label: list of scenario names included in the table +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" + +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + dataset_names = metadata_att_as_array(info_items, "dataset") + dataset_exps = metadata_att_as_array(info_items, "exp") + dataset_ensembles = metadata_att_as_array(info_items, "ensemble") + n_MOD = count_unique_values(dataset_names) + MOD_list = get_unique_values(dataset_names) + labels = new((/n_MOD/), string) + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"scenarios", "syears", "eyears"/)) + syears = diag_script_info@syears + eyears = diag_script_info@eyears + offset = diag_script_info@offset +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + attsh = True + attsh@exp = reference_run_name + item_hist = select_metadata_by_atts(info_items, attsh) + dim_MOD_hist = ListCount(item_hist) + warming_levels = diag_script_info@warming_levels + scenario_list = diag_script_info@scenarios + n_SCEN = dimsizes(diag_script_info@scenarios) + n_WAR = dimsizes(diag_script_info@warming_levels) + t_len = toint(eyears(1))-toint(syears(1)) + 1 + mean_table = new((/n_WAR, n_SCEN/), integer) + five_perc_table = new((/n_WAR, n_SCEN/), integer) + ninetyfive_perc_table = new((/n_WAR, n_SCEN/), integer) + number_models_crossing_table = new((/n_WAR, n_SCEN/), integer) + number_models_table = new((/n_WAR, n_SCEN/), integer) + do issp = 0, n_SCEN-1 + idx_scen = ind((dataset_exps .eq. diag_script_info@scenarios(issp))) + n_MOD = dimsizes(idx_scen) + do iwar = 0, n_WAR-1 + years_for_all_models = new((/n_MOD/), integer) + all_models_smoothed_ts = new((/n_MOD, t_len/), float) + do imod = 0, n_MOD - 1 + ; LOAD HISTORICAL EXP and GET historical baseline + print(dataset_names(idx_scen(imod))) + idx_histo = ind((dataset_exps .eq. reference_run_name) .and. \ + (dataset_names .eq. dataset_names(idx_scen(imod)))) + print(idx_histo) + Ahisto = read_data(info_items[idx_histo(0)]) + datahisto = time_operations(Ahisto, -1, -1, "average", "yearly", True) + delete(Ahisto) + datahisto_glob = coswgt_areaave(datahisto) + ind_start = ind(datahisto&year .eq. \ + max((/diag_script_info@begin_ref_year, \ + datahisto&year(0)/))) + ind_end = ind(datahisto&year .eq. diag_script_info@end_ref_year) + histo_ref = dim_avg(datahisto_glob(ind_start : ind_end)) + delete([/datahisto, datahisto_glob/]) + + A0 = read_data(info_items[idx_scen(imod)]) + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + + ; Average over time (function in ./diag_scripts/shared/statistics.ncl) + data0 = time_operations(A0, -1, -1, "average", "yearly", True) + + dim_file = dimsizes(data0) ; file size of scenario file + ; Average over globe/region + ; (function in ./diag_scripts/shared/latlon.ncl) + if (isatt(diag_script_info, "ts_minlat")) then + data0_glob = area_operations(data0, diag_script_info@ts_minlat, \ + diag_script_info@ts_maxlat, \ + diag_script_info@ts_minlon, \ + diag_script_info@ts_maxlon, "average", \ + True) + else + data0_glob = coswgt_areaave(data0) + end if + running_avg = runave_n(data0_glob, 11, 0, 0) - histo_ref + offset + t_len_mod = dimsizes(running_avg) + all_models_smoothed_ts(imod, 0:t_len_mod-1) = running_avg + idx_warming_level = ind(running_avg .ge. warming_levels(iwar)) + year_warming_level = toint(min(idx_warming_level) + \ + diag_script_info@syears(1)) + years_for_all_models(imod) = year_warming_level + delete(idx_warming_level) + delete(A0) + delete(data0) + delete(data0_glob) + delete(running_avg) + delete(histo_ref) + delete(idx_histo) + end do + mean_ts_scenario = dim_avg_n(all_models_smoothed_ts, 0) + std_ts_scenario = dim_stddev_n(all_models_smoothed_ts, 0) + five_perc_ts_scenario = mean_ts_scenario - 1.645 * std_ts_scenario + nin_fiv_perc_ts_scenario = mean_ts_scenario + 1.645 * std_ts_scenario + year_crossing_mean = \ + toint(min(ind(mean_ts_scenario .ge. warming_levels(iwar)))) + \ + diag_script_info@syears(1) + year_crossing_five_perc = \ + toint(min(ind(five_perc_ts_scenario .ge. warming_levels(iwar)))) + \ + diag_script_info@syears(1) + year_crossing_ninety_five_perc = \ + toint(min(ind(nin_fiv_perc_ts_scenario .ge. warming_levels(iwar)))) + \ + diag_script_info@syears(1) + mean_table(iwar, issp) = year_crossing_mean + five_perc_table(iwar, issp) = year_crossing_five_perc + ninetyfive_perc_table(iwar, issp) = year_crossing_ninety_five_perc + number_models_crossing_table(iwar, issp) = num(.not.ismissing( \ + years_for_all_models)) + number_models_table(iwar, issp) = n_MOD + delete(all_models_smoothed_ts) + delete(years_for_all_models) + end do + delete(idx_scen) + end do + to_plot = new((/5, n_WAR, n_SCEN/), integer) + to_plot(0, :, :) = mean_table + to_plot(1, :, :) = five_perc_table + to_plot(2, :, :) = ninetyfive_perc_table + to_plot(3, :, :) = number_models_crossing_table + to_plot(4, :, :) = number_models_table + to_plot!0 = "panel" + to_plot&panel = (/"mean", "five_perc", "ninetyfive_perc", \ + "num_model_crossing", "num_model"/) + to_plot!1 = "warming" + to_plot&warming = warming_levels + to_plot!2 = "ssp" + to_plot&ssp = scenario_list + to_plot@units = "None" + + ; Add to to_plot, as attributes without prefix + to_plot@diag_script = (/DIAG_SCRIPT/) + to_plot@var = var0 + + ; Check units and adjust for plotting if necessary + ; difference in K is the same as difference in degC, only change label + + ; ########################################### + ; # Output to netCDF # + ; ########################################### + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/calc_timeseries_across_realization_stddev_runave.ncl b/esmvaltool/diag_scripts/tebaldi21esd/calc_timeseries_across_realization_stddev_runave.ncl new file mode 100644 index 0000000000..5cf278a046 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/calc_timeseries_across_realization_stddev_runave.ncl @@ -0,0 +1,338 @@ +; ############################################################################# +; Compute across realization Std. dev. for large ensemble experiment(SSP3-7.0) +; Author: Kevin Debeire (DLR, Germany) +; ############################################################################# +; +; Description +; Computes time series of ensemble spreads (inter-member stddev). +; One dataset is used for resampling subsets of 10 members. +; +; Required diag_script_info attributes (diagnostics specific) +; scenarios: list with scenarios to be included in the +; figure, e.g (/"ssp370"/) +; syears: list with start years in time periods (e.g. start of historical +; period and SSPs) +; eyears: list with end years in time periods (end year of historical +; period and SSPs) +; begin_ref_year: start year of reference period (e.g. 1995) +; end_ref_year: end year of reference period (e.g. 2014) +; sampled_model: name of dataset on which to sample (ex. CanESM5) +; n_samples: number of samples of size 10 to draw among all the +; ensembles of sampled_model +; runave_window: size window used for the centered running average +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" + +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + dataset_names = metadata_att_as_array(info_items, "dataset") + dataset_exps = metadata_att_as_array(info_items, "exp") + dataset_ensembles = metadata_att_as_array(info_items, "ensemble") + n_MOD = count_unique_values(dataset_names) + MOD_list = get_unique_values(dataset_names) + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"scenarios", "syears", "eyears"/)) + syears = diag_script_info@syears + eyears = diag_script_info@eyears +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + + t_len = toint(eyears(1))-toint(syears(0))+1 + + if (isatt(diag_script_info, "n_samples")) .and. \ + (isatt(diag_script_info, "sampled_model")) then + n_samples = toint(diag_script_info@n_samples) + to_plot = new((/n_MOD+n_samples, t_len/), float) + to_plot!0 = "metric" + to_plot&metric = new(n_MOD+n_samples, string, "std") + else + to_plot = new((/n_MOD, t_len/), float) + to_plot!0 = "metric" + to_plot&metric = new(n_MOD, string, "std") + end if + ; Get runave_window + if (isatt(diag_script_info, "runave_window")) then + runave_window = diag_script_info@runave_window + else + runave_window = 1 + end if + to_plot!1 = "year" + to_plot&year = ispan(toint(syears(0)), toint(eyears(1)), 1) + + do imod = 0, n_MOD - 1 + current_mod = MOD_list(imod) + log_debug("processing " + info_items[imod]@dataset) + ; find ssp runs from same model + idx_scen = ind((dataset_names .eq. current_mod) .and. \ + (dataset_exps .eq. diag_script_info@scenarios(0))) + dim_ENS = dimsizes(idx_scen) + n_ENS = 10 + collect_n_ENS = new((/n_ENS, t_len/), double) + ; find ssp runs from same model + do iens = 0, n_ENS - 1 ; loop over ensembles + ; LOAD AND AVERAGE SSP data + A0 = read_data(info_items[idx_scen(iens)]) + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + ; Average over time (function in ./diag_scripts/shared/statistics.ncl) + data0 = time_operations(A0, -1, -1, "average", "yearly", True) + dim_file = dimsizes(data0) ; file size of scenario file + ; Average over globe/region + ; (function in ./diag_scripts/shared/latlon.ncl) + if (isatt(diag_script_info, "ts_minlat")) then + data0_glob = area_operations(data0, diag_script_info@ts_minlat, \ + diag_script_info@ts_maxlat, \ + diag_script_info@ts_minlon, \ + diag_script_info@ts_maxlon, "average", \ + True) + else + data0_glob = coswgt_areaave(data0) + end if + + ; LOAD AND AVERAGE HISTORICAL data + idx_hist = ind((dataset_names .eq. current_mod) .and. \ + (dataset_exps .eq. reference_run_name) .and. \ + (dataset_ensembles .eq. \ + dataset_ensembles(idx_scen(iens)))) + A1 = read_data(info_items[idx_hist]) + dims1 = getvardims(A1) + if (dimsizes(dims1) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims1) + " dimensions, " + \ + "need 2 or 3") + end if + idx1 = ind(dims1 .eq. "lat") + if (ismissing(idx1)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims1 .eq. "lon") + if (ismissing(idx1)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + ; Average over time (function in ./diag_scripts/shared/statistics.ncl) + data1 = time_operations(A1, -1, -1, "average", "yearly", True) + dim_file1 = dimsizes(data1) ; file size of scenario file + ; Average over globe/region + ; (function in ./diag_scripts/shared/latlon.ncl) + if (isatt(diag_script_info, "ts_minlat")) then + data1_glob = area_operations(data1, diag_script_info@ts_minlat, \ + diag_script_info@ts_maxlat, \ + diag_script_info@ts_minlon, \ + diag_script_info@ts_maxlon, "average", \ + True) + else + data1_glob = coswgt_areaave(data1) + end if + ensemble = dataset_ensembles(idx_scen(iens)) + model = current_mod + data_glob = array_append_record(data1_glob, data0_glob, 0) + asciiwrite(work_dir + "/" + var0 + "_timeseries_" + model + "_" + \ + ensemble + ".txt", data_glob) + ; store for all ens + if var0 .eq. "pr" then + collect_n_ENS(iens, :) = runave_n(data_glob, runave_window, 0, 0) \ + * 86400.0 + else + collect_n_ENS(iens, :) = runave_n(data_glob, runave_window, 0, 0) + end if + delete(data_glob) + delete(A0) + delete(A1) + delete(data0) + delete(data0_glob) + delete(data1) + delete(data1_glob) + end do + to_plot(imod, :) = tofloat(dim_stddev_n(collect_n_ENS, 0)) + delete(idx_scen) + delete(collect_n_ENS) + end do + ; SAMPLING across ensemble of SAMPLED MODEL + if (isatt(diag_script_info, "sampled_model") .and. \ + isatt(diag_script_info, "n_samples")) then + sampled_model = diag_script_info@sampled_model + n_samples = diag_script_info@n_samples + idx_samp_mod = ind((dataset_names .eq. sampled_model) .and. \ + (dataset_exps .eq. diag_script_info@scenarios(0))) + + dim_ENS = dimsizes(idx_samp_mod) + n_ENS = 10 + random_setallseed(1234567890, 236484749) + do i_samp = 0, n_samples - 1 + print("Drawing sample #" + i_samp) + collect_n_ENS = new((/n_ENS, t_len/), double) + ; randomly sample across ensemble for sampled_model + rand_idx_50 = generate_sample_indices(dim_ENS, 0) + rand_idx = idx_samp_mod(rand_idx_50(0:9)) + print("current sample indices " + rand_idx) + do iens = 0, n_ENS - 1 + A0 = read_data(info_items[rand_idx(iens)]) + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + ; Average over time (function in ./diag_scripts/shared/statistics.ncl) + data0 = time_operations(A0, -1, -1, "average", "yearly", True) + dim_file = dimsizes(data0) ; file size of scenario file + ; Average over globe/region + ; (function in ./diag_scripts/shared/latlon.ncl) + if (isatt(diag_script_info, "ts_minlat")) then + data0_glob = area_operations(data0, diag_script_info@ts_minlat, \ + diag_script_info@ts_maxlat, \ + diag_script_info@ts_minlon, \ + diag_script_info@ts_maxlon, "average", \ + True) + else + data0_glob = coswgt_areaave(data0) + end if + + ; LOAD AND AVERAGE HISTORICAL data + idx_hist = ind((dataset_names .eq. sampled_model) .and. \ + (dataset_exps .eq. reference_run_name) .and. \ + (dataset_ensembles .eq. \ + dataset_ensembles(rand_idx(iens)))) + A1 = read_data(info_items[idx_hist]) + dims1 = getvardims(A1) + if (dimsizes(dims1) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims1) + " dimensions, " + \ + "need 2 or 3") + end if + idx1 = ind(dims1 .eq. "lat") + if (ismissing(idx1)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims1 .eq. "lon") + if (ismissing(idx1)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + ; Average over time (function in ./diag_scripts/shared/statistics.ncl) + data1 = time_operations(A1, -1, -1, "average", "yearly", True) + dim_file1 = dimsizes(data1) + ; Average over globe/region + ; (function in ./diag_scripts/shared/latlon.ncl) + if (isatt(diag_script_info, "ts_minlat")) then + data1_glob = area_operations(data1, diag_script_info@ts_minlat, \ + diag_script_info@ts_maxlat, \ + diag_script_info@ts_minlon, \ + diag_script_info@ts_maxlon, "average", \ + True) + else + data1_glob = coswgt_areaave(data1) + end if + ensemble = dataset_ensembles(rand_idx(iens)) + model = current_mod + data_glob = array_append_record(data1_glob, data0_glob, 0) + + ; store for all ens + if var0 .eq. "pr" then + collect_n_ENS(iens, :) = runave_n(data_glob, runave_window, 0, 0) \ + * 86400.0 + else + collect_n_ENS(iens, :) = runave_n(data_glob, runave_window, 0, 0) + end if + delete(data_glob) + delete(A0) + delete(A1) + delete(data0) + delete(data0_glob) + delete(data1) + delete(data1_glob) + end do + to_plot(n_MOD+i_samp, :) = tofloat(dim_stddev_n(collect_n_ENS, 0)) + delete(collect_n_ENS) + end do + end if + + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + ; Add to to_plot, as attributes without prefix + if(isatt(to_plot, "diag_script")) then ; Add to existing entries + temp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if(isatt(variable_info[0], "long_name")) then + to_plot@var_long_name = variable_info[0]@long_name + end if + if(isatt(variable_info[0], "units")) then + to_plot@units = variable_info[0]@units + end if + ; ########################################### + ; # Output to netCDF # + ; ########################################### + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/calc_timeseries_mean_spread_runave.ncl b/esmvaltool/diag_scripts/tebaldi21esd/calc_timeseries_mean_spread_runave.ncl new file mode 100644 index 0000000000..ceda4512fb --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/calc_timeseries_mean_spread_runave.ncl @@ -0,0 +1,732 @@ +; ############################################################################# +; Computes global running-average timeseries (mean and spread) for historical +; and up to 5 scenarios +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_tsline_mean_spread.ncl +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; computes multi-model time series of change against historical baseline +; for specified scenarios with spread. A running average with specified +; window is performed. +; +; Required diag_script_info attributes (diagnostics specific) +; scenarios: list with scenarios to be included +; syears: list with start years of time periods (historical then SSPs) +; eyears: list with end years of the time periods (historical then SSPs) +; begin_ref_year: start year of historical baseline period (e.g. 1995) +; end_ref_year: end year of historical baseline period (e.g. 2014) +; label: list of scenario names included in the legend +; +; Optional diag_script_info attributes (diagnostic specific) +; runave_window_size: size of te running average window (default 11) +; spread: how many standard dev. to calculate the spread with (default 1) +; percent: determines if difference is in percent (0, 1, default = 0) +; model_nr: whether to save number of models used for each scenario +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" + +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"scenarios", "syears", "eyears", \ + "begin_ref_year", "end_ref_year"/)) +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + ; ########################################### + ; # Get data and average annual and globally# + ; ########################################### + ; How many historical model runs? + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + + attsh = True + attsh@exp = reference_run_name + item_hist = select_metadata_by_atts(info_items, attsh) + dim_MOD_hist = ListCount(item_hist) + + ; get data from first dataset for historical period + imod = 0 ; NCL array indices start from zero + idx_hist = item_hist[0]@recipe_dataset_index + log_debug("processing " + info_items[idx_hist]@dataset + "_" \ + + info_items[idx_hist]@exp + "_" \ + + info_items[idx_hist]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[idx_hist]) + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + + ; Average over time (function in ./diag_scripts/shared/statistics.ncl) + data1 = time_operations(A0, -1, -1, "average", "yearly", True) + + dim_file = dimsizes(data1) ; file size of historical file + ; Average over globe/region + ; (function in ./diag_scripts/shared/latlon.ncl) + if (isatt(diag_script_info, "ts_minlat")) then + data1_glob = area_operations(data1, diag_script_info@ts_minlat, \ + diag_script_info@ts_maxlat, \ + diag_script_info@ts_minlon, \ + diag_script_info@ts_maxlon, "average", \ + True) + else + data1_glob = coswgt_areaave(data1) + end if + + ref_data2 = new((/dim_MOD_hist/), float, data1_glob@_FillValue) + data2_rel = new((/dim_MOD_hist, dim_file(0)/), float, \ + data1_glob@_FillValue) + ; How many scenarios? + dim_scen = dimsizes(diag_script_info@scenarios) + ; start and end years of timeperiods + syears = diag_script_info@syears + eyears = diag_script_info@eyears + nperiods = dimsizes(syears) + nyears = toint(eyears(nperiods - 1)) - toint(syears(0)) + 1 + nyearsp1 = toint(eyears(nperiods - 1)) - toint(eyears(0)) + + ; How many ssp2.6 model runs? + atts1 = True + atts1@exp = diag_script_info@scenarios(0) + item_ssp26 = select_metadata_by_atts(info_items, atts1) + dim_MOD_26 = ListCount(item_ssp26) + ; indicate array for scenarios + data26_rel = new((/dim_MOD_26, nyearsp1/), float, \ + data1_glob@_FillValue) + i26 = 0 + + if (dim_scen .gt. 1) then + atts2 = True + atts2@exp = diag_script_info@scenarios(1) + item_ssp45 = select_metadata_by_atts(info_items, atts2) + dim_MOD_45 = ListCount(item_ssp45) + data45_rel = new((/dim_MOD_45, nyearsp1/), float, \ + data1_glob@_FillValue) + i45 = 0 + end if + if (dim_scen .gt. 2) then + atts3 = True + atts3@exp = diag_script_info@scenarios(2) + item_ssp60 = select_metadata_by_atts(info_items, atts3) + dim_MOD_60 = ListCount(item_ssp60) + data60_rel = new((/dim_MOD_60, nyearsp1/), float, \ + data1_glob@_FillValue) + i60 = 0 + end if + if (dim_scen .gt. 3) then + atts4 = True + atts4@exp = diag_script_info@scenarios(3) + item_ssp85 = select_metadata_by_atts(info_items, atts4) + dim_MOD_85 = ListCount(item_ssp85) + data85_rel = new((/dim_MOD_85, nyearsp1/), float, \ + data1_glob@_FillValue) + i85 = 0 + end if + if (dim_scen .gt. 4) then + atts5 = True + atts5@exp = diag_script_info@scenarios(4) + item_ssp100 = select_metadata_by_atts(info_items, atts5) + dim_MOD_100 = ListCount(item_ssp100) + data100_rel = new((/dim_MOD_100, nyearsp1/), float, \ + data1_glob@_FillValue) + i100 = 0 + end if + if (dim_scen .gt. 5) then + error_msg("f", DIAG_SCRIPT, "", "Too many different scenarios, " + \ + "code not set up for more than 5.") + end if + + ; set runave_window parameter + if (isatt(diag_script_info, "runave_window")) then + runave_window = diag_script_info@runave_window + half_window = toint(floor(runave_window/2.)) + else + runave_window = 11 + half_window = toint(floor(runave_window/2.)) + end if + + ; set percent parameter + if (isatt(diag_script_info, "percent")) then + percent = diag_script_info@percent + else + percent = 0 + end if + + do imod = 0, dim_MOD_hist - 1 + idx_hist = item_hist[imod]@recipe_dataset_index + need_runave_data2rel = True + if (imod .eq. 0) then + data2 = data1 + delete(A0) + else + A0 = read_data(info_items[idx_hist]) + data2 = time_operations(A0, -1, -1, "average", "yearly", True) + delete(A0) + end if + ; Average over globe (function in + ; ./diag_scripts/shared/statistics.ncl) + data2_glob = coswgt_areaave(data2) + ind_start = ind(data2&year .eq. diag_script_info@begin_ref_year) + ind_end = ind(data2&year .eq. diag_script_info@end_ref_year) + ref_data2(imod) = dim_avg(data2_glob(ind_start : ind_end)) + dim_data2g = dimsizes(data2_glob) + if (dim_data2g .ne. dim_file(0)) then + tdiff = dim_file(0) - dim_data2g + data2_rel(imod, 0 : tdiff - 1) = data2_glob@_FillValue + data2_rel(imod, tdiff :) = data2_glob - ref_data2(imod) + else + data2_rel(imod, :) = data2_glob - ref_data2(imod) + end if + if percent .eq. 1 then + data2_rel(imod, :) = 100*(data2_rel(imod, :)/ref_data2(imod)) + end if + ; clean up + delete([/data2, data2_glob/]) + + ; find all other runs from this model and calculate relative ts + atts = True + atts@dataset = info_items[idx_hist]@dataset + atts@exp = diag_script_info@scenarios(0) + atts@ensemble = info_items[idx_hist]@ensemble + item_26 = select_metadata_by_atts(info_items, atts) + if (ListCount(item_26) .ne. 0) then + A1 = read_data(item_26[0]) + data3 = time_operations(A1, -1, -1, "average", "yearly", True) + data3_glob = coswgt_areaave(data3) + dim_data3g = dimsizes(data3_glob) + if (dim_data3g .ne. nyearsp1) then + if (dim_data3g .gt. nyearsp1) then + error_msg("w", DIAG_SCRIPT, "", "Length of dataset " + \ + info_items[idx_hist]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data26_rel(i26, 0 : dim_data3g - 1) = data3_glob - ref_data2(imod) + data26_rel(i26, dim_data3g :) = data3_glob@_FillValue + end if + else + data26_rel(i26, :) = data3_glob - ref_data2(imod) + end if + if percent .eq. 1 then + data26_rel(i26, :) = 100*(data26_rel(i26, :)/ref_data2(imod)) + end if + + app_arr26 = array_append_record(data2_rel(imod, dim_data2g-half_window:\ + dim_data2g-1), \ + data26_rel(i26, :), 0) + tmp_arr26 = runave_n(app_arr26, runave_window, 0, 0) + dim_tmp26 = dimsizes(tmp_arr26) + data26_rel(i26, :) = tmp_arr26(half_window:dim_tmp26 - 1) + delete(app_arr26) + delete(tmp_arr26) + + i26 = i26 + 1 + ; clean up + delete([/A1, data3, data3_glob/]) + else + log_info("No corresponding model for historial run " + \ + info_items[idx_hist]@dataset + " found in scenario " + \ + diag_script_info@scenarios(0) + \ + ", cannot calculate relative change.") + end if + delete(item_26) + + if (dim_scen .gt. 1) then + atts@exp = diag_script_info@scenarios(1) + item_45 = select_metadata_by_atts(info_items, atts) + + if (ListCount(item_45) .ne. 0) then + A1 = read_data(item_45[0]) + data3 = time_operations(A1, -1, -1, "average", "yearly", True) + data3_glob = coswgt_areaave(data3) + dim_data3g = dimsizes(data3_glob) + if (dim_data3g .ne. nyearsp1) then + if (dim_data3g .gt. nyearsp1) then + error_msg("w", diag_script, "", "Length of dataset " + \ + info_items[idx_hist]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data45_rel(i45, 0:dim_data3g - 1) = data3_glob - ref_data2(imod) + data45_rel(i45, dim_data3g:) = data3_glob@_FillValue + end if + else + data45_rel(i45, :) = data3_glob - ref_data2(imod) + end if + if percent .eq. 1 then + data45_rel(i45, :) = 100 * (data45_rel(i45, :) / ref_data2(imod)) + end if + + app_arr45 = array_append_record(data2_rel(imod, dim_data2g-half_window\ + :dim_data2g-1), \ + data45_rel(i45, :), 0) + tmp_arr45 = runave_n(app_arr45, runave_window, 0, 0) + dim_tmp45 = dimsizes(tmp_arr45) + data45_rel(i45, :) = tmp_arr45(half_window:dim_tmp45 - 1) + delete(app_arr45) + delete(tmp_arr45) + + i45 = i45 + 1 + ; clean up + delete([/A1, data3, data3_glob/]) + else + log_info("No corresponding model ensemble for historial run " + \ + info_items[idx_hist]@dataset + " found in scenario " + \ + diag_script_info@scenarios(1) + \ + ", cannot calculate relative change.") + end if + delete(item_45) + end if + + if (dim_scen .gt. 2) then + atts@exp = diag_script_info@scenarios(2) + item_60 = select_metadata_by_atts(info_items, atts) + + if (ListCount(item_60) .ne. 0) then + A1 = read_data(item_60[0]) + data3 = time_operations(A1, -1, -1, "average", "yearly", True) + data3_glob = coswgt_areaave(data3) + dim_data3g = dimsizes(data3_glob) + if (dim_data3g .ne. nyearsp1) then + if (dim_data3g .gt. nyearsp1) then + error_msg("w", diag_script, "", "Length of dataset " + \ + info_items[idx_hist]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data60_rel(i60, 0 : dim_data3g - 1) = data3_glob - ref_data2(imod) + data60_rel(i60, dim_data3g:) = data3_glob@_FillValue + end if + else + data60_rel(i60, :) = data3_glob - ref_data2(imod) + end if + if percent .eq. 1 then + data60_rel(i60, :) = 100 * (data60_rel(i60, :) / ref_data2(imod)) + end if + + if (dim_scen .eq. 3) then + app_arr60 = array_append_record(data2_rel(imod, dim_data2g - \ + half_window:\ + dim_data2g - 1), \ + data60_rel(i60, :), 0) + tmp_arr60 = runave_n(app_arr60, runave_window, 0, 0) + app_arr2 = array_append_record(data2_rel(imod, :), \ + data60_rel(i60, 0:4), 0) + tmp_arr2 = runave_n(app_arr2, runave_window, 0, 0) + dim_tmp60 = dimsizes(tmp_arr60) + dim_tmp2 = dimsizes(tmp_arr2) + data60_rel(i60, :) = tmp_arr60(half_window:dim_tmp60 - 1) + data2_rel(imod, :) = tmp_arr2(0:dim_tmp2 - 6) + need_runave_data2rel = False + delete(app_arr60) + delete(tmp_arr60) + delete(app_arr2) + delete(tmp_arr2) + else + app_arr60 = array_append_record(data2_rel(imod, dim_data2g - \ + half_window:dim_data2g-1), \ + data60_rel(i60, :), 0) + tmp_arr60 = runave_n(app_arr60, runave_window, 0, 0) + dim_tmp60 = dimsizes(tmp_arr60) + data60_rel(i60, :) = tmp_arr60(half_window:dim_tmp60 - 1) + delete(app_arr60) + delete(tmp_arr60) + end if + i60 = i60 + 1 + ; clean up + delete([/A1, data3, data3_glob/]) + else + log_info("No corresponding model ensemble for historial run " + \ + info_items[idx_hist]@dataset + " found in scenario " + \ + diag_script_info@scenarios(2) + \ + ", cannot calculate relative change.") + end if + delete(item_60) + end if + + if (dim_scen .gt. 3) then + atts@exp = diag_script_info@scenarios(3) + item_85 = select_metadata_by_atts(info_items, atts) + + if (ListCount(item_85) .ne. 0) then + A1 = read_data(item_85[0]) + data3 = time_operations(A1, -1, -1, "average", "yearly", True) + data3_glob = coswgt_areaave(data3) + dim_data3g = dimsizes(data3_glob) + if (dim_data3g .ne. nyearsp1) then + if (dim_data3g .gt. nyearsp1) then + error_msg("w", diag_script, "", "Length of dataset " + \ + info_items[idx_hist]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data85_rel(i85, 0:dim_data3g - 1) = data3_glob - ref_data2(imod) + data85_rel(i85, dim_data3g:) = data3_glob@_FillValue + end if + else + data85_rel(i85, :) = data3_glob - ref_data2(imod) + end if + if percent .eq. 1 then + data85_rel(i85, :) = 100 * (data85_rel(i85, :) / ref_data2(imod)) + end if + + app_arr85 = array_append_record(data2_rel(imod, dim_data2g-half_window\ + :dim_data2g-1), \ + data85_rel(i85, :), 0) + tmp_arr85 = runave_n(app_arr85, runave_window, 0, 0) + dim_tmp85 = dimsizes(tmp_arr85) + data85_rel(i85, :) = tmp_arr85(half_window:dim_tmp85 - 1) + delete(app_arr85) + delete(tmp_arr85) + + i85 = i85 + 1 + ; clean up + delete(A1) + delete(data3) + delete(data3_glob) + else + log_info("No corresponding model ensemble for historial run " + \ + info_items[idx_hist]@dataset + " found in scenario " + \ + diag_script_info@scenarios(3) + \ + ", cannot calculate relative change, set to missing.") + end if + delete(item_85) + end if + if (dim_scen .gt. 4) then + atts@exp = diag_script_info@scenarios(4) + item_100 = select_metadata_by_atts(info_items, atts) + + if (ListCount(item_100) .ne. 0) then + A1 = read_data(item_100[0]) + data3 = time_operations(A1, -1, -1, "average", "yearly", True) + data3_glob = coswgt_areaave(data3) + dim_data3g = dimsizes(data3_glob) + if (dim_data3g .ne. nyearsp1) then + if (dim_data3g .gt. nyearsp1) then + error_msg("w", diag_script, "", "Length of dataset " + \ + info_items[idx_hist]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data100_rel(i100, 0:dim_data3g - 1) = data3_glob - ref_data2(imod) + data100_rel(i100, dim_data3g:) = data3_glob@_FillValue + end if + else + data100_rel(i100, :) = data3_glob - ref_data2(imod) + end if + if percent .eq. 1 then + data100_rel(i100, :) = 100 * (data100_rel(i100, :) / ref_data2(imod)) + end if + + app_arr100 = array_append_record(data2_rel(imod, dim_data2g - \ + half_window:dim_data2g - 1), \ + data100_rel(i100, :), 0) + tmp_arr100 = runave_n(app_arr100, runave_window, 0, 0) + app_arr2 = array_append_record(data2_rel(imod, :), \ + data100_rel(i100, 0:4), 0) + tmp_arr2 = runave_n(app_arr2, runave_window, 0, 0) + dim_tmp100 = dimsizes(tmp_arr100) + dim_tmp2 = dimsizes(tmp_arr2) + data100_rel(i100, :) = tmp_arr100(half_window:dim_tmp100 - 1) + data2_rel(imod, :) = tmp_arr2(0:dim_tmp2-6) + need_runave_data2rel = False + delete(app_arr100) + delete(tmp_arr100) + delete(app_arr2) + delete(tmp_arr2) + i100 = i100 + 1 + ; clean up + delete(A1) + delete(data3) + delete(data3_glob) + else + log_info("No corresponding model ensemble for historial run " + \ + info_items[idx_hist]@dataset + " found in scenario " + \ + diag_script_info@scenarios(3) + \ + ", cannot calculate relative change, set to missing.") + end if + delete(item_100) + end if + if need_runave_data2rel then + data2_rel(imod, :) = runave_n(data2_rel(imod, :), runave_window, 0, 0) + end if + end do ; imod + + ; average and standard deviation over all models + datahist_avg = dim_avg_n_Wrap(data2_rel, 0) + datahist_std = dim_stddev_n_Wrap(data2_rel, 0) + + ; put all data from 1850-2100 into mean_val and std_val + mean_val = new((/dim_scen + 1, nyears/), typeof(data2_rel), \ + data2_rel@_FillValue) + std_val = new((/dim_scen + 1, nyears/), typeof(data2_rel), \ + data2_rel@_FillValue) + p0 = toint(eyears(0)) - toint(syears(0)) + p1 = toint(eyears(0)) - toint(syears(0)) + 1 + mean_val(0, 0 : p0) = datahist_avg + std_val(0, 0 : p0) = datahist_std + + data26_avg = dim_avg_n_Wrap(data26_rel, 0) + data26_std = dim_stddev_n_Wrap(data26_rel, 0) + mean_val(1, p1 :) = data26_avg + std_val(1, p1 :) = data26_std + if (dim_scen .gt. 1) then + data45_avg = dim_avg_n_Wrap(data45_rel, 0) + data45_std = dim_stddev_n_Wrap(data45_rel, 0) + mean_val(2, p1 :) = data45_avg + std_val(2, p1 :) = data45_std + end if + if (dim_scen .gt. 2) then + data60_avg = dim_avg_n_Wrap(data60_rel, 0) + data60_std = dim_stddev_n_Wrap(data60_rel, 0) + mean_val(3, p1 :) = data60_avg + std_val(3, p1 :) = data60_std + end if + if (dim_scen .gt. 3) then + data85_avg = dim_avg_n_Wrap(data85_rel, 0) + data85_std = dim_stddev_n_Wrap(data85_rel, 0) + mean_val(4, p1 :) = data85_avg + std_val(4, p1 :) = data85_std + end if + if (dim_scen .gt. 4) then + data100_avg = dim_avg_n_Wrap(data100_rel, 0) + data100_std = dim_stddev_n_Wrap(data100_rel, 0) + mean_val(5, p1 :) = data100_avg + std_val(5, p1 :) = data100_std + end if + + ; check number of models for timeperiods in scenarios + ; if there are less than 5 models set to missing + runs = new((/1 + dim_scen, nperiods/), integer) + atts_runs = True + atts_runs@exp = reference_run_name + item_runs = select_metadata_by_atts(info_items, atts_runs) + runs(0, 0) = ListCount(item_runs) + do scen = 0, dim_scen - 1 + do p = 1, nperiods - 1 + atts_runs@exp = diag_script_info@scenarios(scen) + item_runs = select_metadata_by_atts(info_items, atts_runs) + nrp1 = NewList("fifo") + nrp2 = NewList("fifo") + do r = 0, ListCount(item_runs) - 1 + if (item_runs[r]@end_year .gt. toint(diag_script_info@eyears(p))) then + ListAppend(nrp1, item_runs[r]) + elseif (item_runs[r]@end_year .eq. \ + toint(diag_script_info@eyears(p))) then + ListAppend(nrp2, item_runs[r]) + end if + end do + tmp = ListCount(nrp1) + ; check if only found missing, in that case set tmp to zero + if tmp .eq. 0 then + tmp = 0 + end if + tmp2 = ListCount(nrp2) + ; check if only found missing, in that case set tmp2 to zero + if tmp2 .eq. 0 then + tmp2 = 0 + end if + runs(scen + 1, p) = tmp2 + tmp + delete([/tmp, tmp2, nrp1, nrp2/]) + if (runs(scen + 1, p) .le. 4) .and. (p .ne. 0) then + p2 = toint(eyears(p - 1)) - toint(syears(0)) + p3 = toint(eyears(p)) - toint(syears(0)) + mean_val(scen + 1, p2 : p3) = mean_val@_FillValue + std_val(scen + 1, p2 : p3) = std_val@_FillValue + log_info("Scenario " + diag_script_info@scenarios(scen) + \ + " in period ending " + diag_script_info@eyears(p) + \ + " has less than 5 models, set to missing.") + end if + end do + end do + dims_data = dimsizes(mean_val) + ; FIX MODEL_NR + dim_1 = dimsizes(data26_rel) + if (dim_scen .gt. 1) then + dim_2 = dimsizes(data45_rel) + runs(2, 1) = dim_2(0) + end if + if (dim_scen .gt. 2) then + dim_3 = dimsizes(data60_rel) + runs(3, 1) = dim_3(0) + end if + if (dim_scen .gt. 3) then + dim_4 = dimsizes(data85_rel) + runs(4, 1) = dim_4(0) + end if + if (dim_scen .gt. 4) then + dim_5 = dimsizes(data100_rel) + runs(5, 1) = dim_5(0) + end if + runs(1, 1) = dim_1(0) + + to_plot = new((/3 * dims_data(0), dims_data(1)/), float) + + if (isatt(diag_script_info, "spread")) then + spread = diag_script_info@spread + else + spread = 1.0 + end if + do j = 0, dims_data(0) - 1 + ; if variable sw or lw TOA flux: change direction + if (var0 .eq. "rlut" .or. var0 .eq. "rsut") then + to_plot(j * 3, :) = - mean_val(j, :) + else + to_plot(j * 3, :) = mean_val(j, :) + end if + to_plot((j * 3) + 1, :) = to_plot(j * 3, :) + spread * std_val(j, :) + to_plot((j * 3) + 2, :) = to_plot(j * 3, :) - spread * std_val(j, :) + end do + copy_VarMeta(mean_val, to_plot) + to_plot!0 = "metric" + str_spread = sprintf("%5.2f", spread) + if (dim_scen .eq. 1) then + to_plot&metric = (/"mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std"/) + elseif (dim_scen .eq. 2) then + to_plot&metric = (/"mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std", "mean", "+" + str_spread + "std", "-" \ + + str_spread + "std"/) + elseif (dim_scen .eq. 3) then + to_plot&metric = (/"mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std", "mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std"/) + elseif (dim_scen .eq. 4) then + to_plot&metric = (/"mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std", "mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std", "mean", "+" + str_spread + "std", "-" + \ + str_spread + "std"/) + elseif (dim_scen .eq. 5) then + to_plot&metric = (/"mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std", "mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std", "mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + str_spread + \ + "std", "-" + str_spread + "std"/) + end if + to_plot!1 = "year" + to_plot&year = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 1) + + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + ; Add to to_plot, as attributes without prefix + if(isatt(to_plot, "diag_script")) then ; Add to existing entries + temp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if(isatt(variable_info[0], "long_name")) then + to_plot@var_long_name = variable_info[0]@long_name + end if + if(isatt(variable_info[0], "units")) then + to_plot@units = variable_info[0]@units + end if + if (percent .eq. 1) then + to_plot@units = "%" + end if + runs!0 = "scenario" + runs&scenario = diag_script_info@label + runs!1 = "period" + runs&period = diag_script_info@syears + if(isatt(runs, "diag_script")) then ; Add to existing entries + temp = runs@diag_script + delete(runs@diag_script) + runs@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + runs@diag_script = (/DIAG_SCRIPT/) + end if + runs@var = "nr" + runs@var_long_name = "number of model runs per scenario and period" + runs@units = 1 + ; ########################################### + ; # Output to netCDF # + ; ########################################### + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + if(isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + ncdf_file2 = work_dir + "/nr_runs_" + variable_info[0]@diagnostic + ".nc" + ncdf_outfile2 = ncdf_write(runs, ncdf_file2) + end if + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/calc_timeseries_mean_spread_ssp4.ncl b/esmvaltool/diag_scripts/tebaldi21esd/calc_timeseries_mean_spread_ssp4.ncl new file mode 100644 index 0000000000..74baee5d88 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/calc_timeseries_mean_spread_ssp4.ncl @@ -0,0 +1,459 @@ +; ############################################################################# +; Computes timeseries of two SSP4 scenarios between 2015-2100 +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_tsline_mean_spread.ncl +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Computes multi-model time series of change against historical baseline +; for ssp434 and ssp460 with spread. A running average with specified +; window is performed. +; +; Required diag_script_info attributes (diagnostics specific) +; scenarios: list with scenarios to be included: ssp434, ssp460 +; syears: list with start years of time periods (historical then SSPs) +; eyears: list with end years of the time periods (historical then SSPs) +; begin_ref_year: start year of historical baseline period (e.g. 1995) +; end_ref_year: end year of historical baseline period (e.g. 2014) +; label: list of scenario names included in the legend +; +; Optional diag_script_info attributes (diagnostic specific) +; runave_window_size: size of the running average window (default 11) +; spread: how many standard dev. to calculate the spread with (default 1) +; percent: determines if difference is in percent (0, 1, default = 0) +; model_nr: whether to save number of models used for each scenario +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" + +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"scenarios", "syears", "eyears", \ + "begin_ref_year", "end_ref_year"/)) +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + ; ########################################### + ; # Get data and average annual and globally# + ; ########################################### + ; How many historical model runs? + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + attsh = True + attsh@exp = reference_run_name + item_hist = select_metadata_by_atts(info_items, attsh) + dim_MOD_hist = ListCount(item_hist) + + ; get data from first dataset for historical period + imod = 0 ; NCL array indices start from zero + idx_hist = item_hist[0]@recipe_dataset_index + log_debug("processing " + info_items[idx_hist]@dataset + "_" \ + + info_items[idx_hist]@exp + "_" \ + + info_items[idx_hist]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[idx_hist]) + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + + ; Average over time (function in ./diag_scripts/shared/statistics.ncl) + data1 = time_operations(A0, -1, -1, "average", "yearly", True) + + dim_file = dimsizes(data1) ; file size of historical file + ; Average over globe/region + ; (function in ./diag_scripts/shared/latlon.ncl) + if (isatt(diag_script_info, "ts_minlat")) then + data1_glob = area_operations(data1, diag_script_info@ts_minlat, \ + diag_script_info@ts_maxlat, \ + diag_script_info@ts_minlon, \ + diag_script_info@ts_maxlon, "average", \ + True) + else + data1_glob = coswgt_areaave(data1) + end if + + ref_data2 = new((/dim_MOD_hist/), float, data1_glob@_FillValue) + data2_rel = new((/dim_MOD_hist, dim_file(0)/), float, \ + data1_glob@_FillValue) + + ; How many scenarios? + dim_scen = dimsizes(diag_script_info@scenarios) + ; start and end years of timeperiods + syears = diag_script_info@syears + eyears = diag_script_info@eyears + nperiods = dimsizes(syears) + nyears = toint(eyears(nperiods - 1)) - toint(syears(0)) + 1 + nyearsp1 = toint(eyears(nperiods - 1)) - toint(eyears(0)) + + atts1 = True + atts1@exp = diag_script_info@scenarios(0) + item_ssp434 = select_metadata_by_atts(info_items, atts1) + dim_MOD_434 = ListCount(item_ssp434) + ; indicate array for scenarios + data434_rel = new((/dim_MOD_434, nyearsp1/), float, \ + data1_glob@_FillValue) + i434 = 0 + + if (dim_scen .gt. 1) then + atts2 = True + atts2@exp = diag_script_info@scenarios(1) + item_ssp460 = select_metadata_by_atts(info_items, atts2) + dim_MOD_460 = ListCount(item_ssp460) + data460_rel = new((/dim_MOD_460, nyearsp1/), float, \ + data1_glob@_FillValue) + i460 = 0 + end if + + ; set runave_window parameter + if (isatt(diag_script_info, "runave_window")) then + runave_window = diag_script_info@runave_window + half_window = toint(floor(runave_window/2.)) + else + runave_window = 11 + half_window = toint(floor(runave_window/2.)) + end if + + ; set percent parameter + if (isatt(diag_script_info, "percent")) then + percent = diag_script_info@percent + else + percent = 0 + end if + + do imod = 0, dim_MOD_hist - 1 + idx_hist = item_hist[imod]@recipe_dataset_index + if (imod .eq. 0) then + data2 = data1 + delete(A0) + else + A0 = read_data(info_items[idx_hist]) + data2 = time_operations(A0, -1, -1, "average", "yearly", True) + delete(A0) + end if + ; Average over globe (function in + ; ./diag_scripts/shared/statistics.ncl) + data2_glob = coswgt_areaave(data2) + ind_start = ind(data2&year .eq. diag_script_info@begin_ref_year) + ind_end = ind(data2&year .eq. diag_script_info@end_ref_year) + ref_data2(imod) = dim_avg(data2_glob(ind_start : ind_end)) + dim_data2g = dimsizes(data2_glob) + if (dim_data2g .ne. dim_file(0)) then + tdiff = dim_file(0) - dim_data2g + data2_rel(imod, 0 : tdiff - 1) = data2_glob@_FillValue + data2_rel(imod, tdiff :) = data2_glob - ref_data2(imod) + else + data2_rel(imod, :) = data2_glob - ref_data2(imod) + end if + if percent .eq. 1 then + data2_rel(imod, :) = 100*(data2_rel(imod, :)/ref_data2(imod)) + end if + ; clean up + delete([/data2, data2_glob/]) + ; find all other runs from this model and calculate relative ts + atts = True + atts@dataset = info_items[idx_hist]@dataset + atts@exp = diag_script_info@scenarios(0) + atts@ensemble = info_items[idx_hist]@ensemble + item_434 = select_metadata_by_atts(info_items, atts) + if (ListCount(item_434) .ne. 0) then + A1 = read_data(item_434[0]) + data3 = time_operations(A1, -1, -1, "average", "yearly", True) + data3_glob = coswgt_areaave(data3) + dim_data3g = dimsizes(data3_glob) + if (dim_data3g .ne. nyearsp1) then + if (dim_data3g .gt. nyearsp1) then + error_msg("w", DIAG_SCRIPT, "", "Length of dataset " + \ + info_items[idx_hist]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data434_rel(i434, 0 : dim_data3g - 1) = data3_glob - ref_data2(imod) + data434_rel(i434, dim_data3g :) = data3_glob@_FillValue + end if + else + data434_rel(i434, :) = data3_glob - ref_data2(imod) + end if + if percent .eq. 1 then + data434_rel(i434, :) = 100*(data434_rel(i434, :)/ref_data2(imod)) + end if + + i434 = i434 + 1 + ; clean up + delete([/A1, data3, data3_glob/]) + else + log_info("No corresponding model for historial run " + \ + info_items[idx_hist]@dataset + " found in scenario " + \ + diag_script_info@scenarios(0) + \ + ", cannot calculate relative change.") + end if + delete(item_434) + + if (dim_scen .gt. 1) then + atts@exp = diag_script_info@scenarios(1) + item_460 = select_metadata_by_atts(info_items, atts) + + if (ListCount(item_460) .ne. 0) then + A1 = read_data(item_460[0]) + data3 = time_operations(A1, -1, -1, "average", "yearly", True) + data3_glob = coswgt_areaave(data3) + dim_data3g = dimsizes(data3_glob) + if (dim_data3g .ne. nyearsp1) then + if (dim_data3g .gt. nyearsp1) then + error_msg("w", diag_script, "", "Length of dataset " + \ + info_items[idx_hist]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data460_rel(i460, 0:dim_data3g - 1) = data3_glob - ref_data2(imod) + data460_rel(i460, dim_data3g:) = data3_glob@_FillValue + end if + else + data460_rel(i460, :) = data3_glob - ref_data2(imod) + end if + if percent .eq. 1 then + data460_rel(i460, :) = 100*(data460_rel(i460, :)/ref_data2(imod)) + end if + + app_arr460 = \ + array_append_record(data2_rel(imod, dim_data2g - half_window:\ + dim_data2g - 1), data460_rel(i460, :), 0) + tmp_arr460 = runave_n(app_arr460, runave_window, 0, 0) + + app_arr434 = \ + array_append_record(data2_rel(imod, dim_data2g - half_window:\ + dim_data2g-1), data434_rel(i434-1, :), 0) + tmp_arr434 = runave_n(app_arr434, runave_window, 0, 0) + + data434_rel(i434-1, :) = tmp_arr434(half_window:dim_data3g+4) + data460_rel(i460, :) = tmp_arr460(half_window:dim_data3g+4) + delete(tmp_arr434) + delete(tmp_arr460) + delete(app_arr434) + delete(app_arr460) + i460 = i460 + 1 + ; clean up + delete([/A1, data3, data3_glob/]) + else + log_info("No corresponding model ensemble for historial run " + \ + info_items[idx_hist]@dataset + " found in scenario " + \ + diag_script_info@scenarios(1) + \ + ", cannot calculate relative change.") + end if + delete(item_460) + end if + + end do ; imod + + ; average and standard deviation over all models + datahist_avg = dim_avg_n_Wrap(data2_rel, 0) + datahist_std = dim_stddev_n_Wrap(data2_rel, 0) + + ; put all data from 1850-2100 into mean_val and std_val + mean_val = new((/dim_scen + 1, nyears/), typeof(data2_rel), \ + data2_rel@_FillValue) + std_val = new((/dim_scen + 1, nyears/), typeof(data2_rel), \ + data2_rel@_FillValue) + p0 = toint(eyears(0)) - toint(syears(0)) + p1 = toint(eyears(0)) - toint(syears(0)) + 1 + mean_val(0, 0 : p0) = datahist_avg + std_val(0, 0 : p0) = datahist_std + + data434_avg = dim_avg_n_Wrap(data434_rel, 0) + data434_std = dim_stddev_n_Wrap(data434_rel, 0) + mean_val(1, p1 :) = data434_avg + std_val(1, p1 :) = data434_std + if (dim_scen .gt. 1) then + data460_avg = dim_avg_n_Wrap(data460_rel, 0) + data460_std = dim_stddev_n_Wrap(data460_rel, 0) + mean_val(2, p1 :) = data460_avg + std_val(2, p1 :) = data460_std + end if + + ; check number of models for timeperiods in scenarios + ; if there are less than 5 models set to missing + runs = new((/1 + dim_scen, nperiods/), integer) + atts_runs = True + atts_runs@exp = reference_run_name + item_runs = select_metadata_by_atts(info_items, atts_runs) + runs(0, 0) = ListCount(item_runs) + do scen = 0, dim_scen - 1 + do p = 1, nperiods - 1 + atts_runs@exp = diag_script_info@scenarios(scen) + item_runs = select_metadata_by_atts(info_items, atts_runs) + nrp1 = NewList("fifo") + nrp2 = NewList("fifo") + do r = 0, ListCount(item_runs) - 1 + if (item_runs[r]@end_year .gt. toint(diag_script_info@eyears(p))) then + ListAppend(nrp1, item_runs[r]) + elseif (item_runs[r]@end_year .eq. \ + toint(diag_script_info@eyears(p))) then + ListAppend(nrp2, item_runs[r]) + end if + end do + tmp = ListCount(nrp1) + ; check if only found missing, in that case set tmp to zero + if tmp .eq. 0 then + tmp = 0 + end if + tmp2 = ListCount(nrp2) + ; check if only found missing, in that case set tmp2 to zero + if tmp2 .eq. 0 then + tmp2 = 0 + end if + runs(scen + 1, p) = tmp2 + tmp + delete([/tmp, tmp2, nrp1, nrp2/]) + if (runs(scen + 1, p) .le. 4) .and. (p .ne. 0) then + p2 = toint(eyears(p - 1)) - toint(syears(0)) + p3 = toint(eyears(p)) - toint(syears(0)) + mean_val(scen + 1, p2 : p3) = mean_val@_FillValue + std_val(scen + 1, p2 : p3) = std_val@_FillValue + log_info("Scenario " + diag_script_info@scenarios(scen) + \ + " in period ending " + diag_script_info@eyears(p) + \ + " has less than 5 models, set to missing.") + end if + end do + end do + ; FIX MODEL_NR + dim_1 = dimsizes(data434_rel) + if (dim_scen .gt. 1) then + dim_2 = dimsizes(data460_rel) + runs(2, 1) = dim_2(0) + end if + runs(1, 1) = dim_1(0) + + dims_data = dimsizes(mean_val) + to_plot = new((/3 * dims_data(0), dims_data(1)/), float) + + if (isatt(diag_script_info, "spread")) then + spread = diag_script_info@spread + else + spread = 1.0 + end if + do j = 0, dims_data(0) - 1 + ; if variable sw or lw TOA flux: change direction + if (var0 .eq. "rlut" .or. var0 .eq. "rsut") then + to_plot(j * 3, :) = - mean_val(j, :) + else + to_plot(j * 3, :) = mean_val(j, :) + end if + to_plot((j * 3) + 1, :) = to_plot(j * 3, :) + spread * std_val(j, :) + to_plot((j * 3) + 2, :) = to_plot(j * 3, :) - spread * std_val(j, :) + end do + copy_VarMeta(mean_val, to_plot) + to_plot!0 = "metric" + str_spread = sprintf("%5.2f", spread) + to_plot&metric = (/"mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std", "mean", "+" + str_spread + "std", "-" \ + + str_spread + "std"/) + to_plot!1 = "year" + to_plot&year = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 1) + + ; ########################################### + ; # Other Metadata: diag_script, var # + ; ########################################### + ; Add to to_plot, as attributes without prefix + if(isatt(to_plot, "diag_script")) then ; Add to existing entries + temp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if(isatt(variable_info[0], "long_name")) then + to_plot@var_long_name = variable_info[0]@long_name + end if + if(isatt(variable_info[0], "units")) then + to_plot@units = variable_info[0]@units + end if + if percent .eq. 1 then + to_plot@units = "%" + end if + runs!0 = "scenario" + runs&scenario = diag_script_info@label + runs!1 = "period" + runs&period = diag_script_info@syears + if(isatt(runs, "diag_script")) then ; Add to existing entries + temp = runs@diag_script + delete(runs@diag_script) + runs@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + runs@diag_script = (/DIAG_SCRIPT/) + end if + runs@var = "nr" + runs@var_long_name = "number of model runs per scenario and period" + runs@units = 1 + ; ########################################### + ; # Output to netCDF # + ; ########################################### + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + if(isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + ncdf_file2 = work_dir + "/nr_runs_" + variable_info[0]@diagnostic + ".nc" + ncdf_outfile2 = ncdf_write(runs, ncdf_file2) + end if + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/calc_timeseries_mean_spread_ssp5.ncl b/esmvaltool/diag_scripts/tebaldi21esd/calc_timeseries_mean_spread_ssp5.ncl new file mode 100644 index 0000000000..0ec3479508 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/calc_timeseries_mean_spread_ssp5.ncl @@ -0,0 +1,461 @@ +; ############################################################################# +; Computes timeseries of two SSP5 scenarios between 2015-2100 +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_tsline_mean_spread.ncl +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Computes multi-model time series of change against historical baseline +; for ssp534-over and ssp585 with spread. A running average with specified +; window is performed. +; +; Required diag_script_info attributes (diagnostics specific) +; scenarios: list with scenarios to be included: ssp534-over, ssp585 +; syears: list with start years of time periods (historical then SSPs) +; eyears: list with end years of the time periods (historical then SSPs) +; begin_ref_year: start year of historical baseline period (e.g. 1995) +; end_ref_year: end year of historical baseline period (e.g. 2014) +; label: list of scenario names included in the legend +; +; Optional diag_script_info attributes (diagnostic specific) +; runave_window_size: size of the running average window (default 11) +; spread: how many standard dev. to calculate the spread with (default 1) +; percent: determines if difference is in percent (0, 1, default = 0) +; model_nr: whether to save number of models used for each scenario +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" + +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"scenarios", "syears", "eyears", \ + "begin_ref_year", "end_ref_year"/)) +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + ; ########################################### + ; # Get data and average annual and globally# + ; ########################################### + ; How many historical model runs? + if (isatt(diag_script_info, "reference_run")) then + reference_run_name = diag_script_info@reference_run + else + log_info("No reference run name given, set to default 'historical'") + reference_run_name = "historical" + end if + attsh = True + attsh@exp = reference_run_name + item_hist = select_metadata_by_atts(info_items, attsh) + print(item_hist[0]@recipe_dataset_index) + dim_MOD_hist = ListCount(item_hist) + + ; get data from first dataset for historical period + imod = 0 ; NCL array indices start from zero + idx_hist = item_hist[0]@recipe_dataset_index + log_debug("processing " + info_items[idx_hist]@dataset + "_" \ + + info_items[idx_hist]@exp + "_" \ + + info_items[idx_hist]@ensemble) + + ; See ./interface_scripts/data_handling.ncl + A0 = read_data(info_items[idx_hist]) + + ; Check dimensions + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + + ; Average over time (function in ./diag_scripts/shared/statistics.ncl) + data1 = time_operations(A0, -1, -1, "average", "yearly", True) + + dim_file = dimsizes(data1) ; file size of historical file + ; Average over globe/region + ; (function in ./diag_scripts/shared/latlon.ncl) + if (isatt(diag_script_info, "ts_minlat")) then + data1_glob = area_operations(data1, diag_script_info@ts_minlat, \ + diag_script_info@ts_maxlat, \ + diag_script_info@ts_minlon, \ + diag_script_info@ts_maxlon, "average", \ + True) + else + data1_glob = coswgt_areaave(data1) + end if + + ref_data2 = new((/dim_MOD_hist/), float, data1_glob@_FillValue) + data2_rel = new((/dim_MOD_hist, dim_file(0)/), float, \ + data1_glob@_FillValue) + + ; How many scenarios? + dim_scen = dimsizes(diag_script_info@scenarios) + ; start and end years of timeperiods + syears = diag_script_info@syears + eyears = diag_script_info@eyears + nperiods = dimsizes(syears) + nyears = toint(eyears(nperiods - 1)) - toint(syears(0)) + 1 + nyearsp1 = toint(eyears(nperiods - 2)) - toint(syears(nperiods - 2)) + 1 + nyearsp2 = toint(eyears(nperiods - 1)) - toint(syears(nperiods - 1)) + 1 + + atts1 = True + atts1@exp = diag_script_info@scenarios(0) + item_ssp534 = select_metadata_by_atts(info_items, atts1) + dim_MOD_534 = ListCount(item_ssp534) + ; indicate array for scenarios + data534_rel = new((/dim_MOD_534, nyearsp1/), float, \ + data1_glob@_FillValue) + i534 = 0 + + if (dim_scen .gt. 1) then + atts2 = True + atts2@exp = diag_script_info@scenarios(1) + item_ssp585 = select_metadata_by_atts(info_items, atts2) + dim_MOD_585 = ListCount(item_ssp585) + data585_rel = new((/dim_MOD_585, nyearsp2/), float, \ + data1_glob@_FillValue) + i585 = 0 + end if + + ; set runave_window parameter + if (isatt(diag_script_info, "runave_window")) then + runave_window = diag_script_info@runave_window + half_window = toint(floor(runave_window/2.)) + else + runave_window = 11 + half_window = toint(floor(runave_window/2.)) + end if + + ; set percent parameter + if (isatt(diag_script_info, "percent")) then + percent = diag_script_info@percent + else + percent = 0 + end if + + do imod = 0, dim_MOD_hist - 1 + idx_hist = item_hist[imod]@recipe_dataset_index + if (imod .eq. 0) then + data2 = data1 + delete(A0) + else + A0 = read_data(info_items[idx_hist]) + data2 = time_operations(A0, -1, -1, "average", "yearly", True) + delete(A0) + end if + ; Average over globe (function in + ; ./diag_scripts/shared/statistics.ncl) + data2_glob = coswgt_areaave(data2) + ind_start = ind(data2&year .eq. diag_script_info@begin_ref_year) + ind_end = ind(data2&year .eq. diag_script_info@end_ref_year) + ref_data2(imod) = dim_avg(data2_glob(ind_start : ind_end)) + dim_data2g = dimsizes(data2_glob) + if (dim_data2g .ne. dim_file(0)) then + tdiff = dim_file(0) - dim_data2g + data2_rel(imod, 0 : tdiff - 1) = data2_glob@_FillValue + data2_rel(imod, tdiff :) = data2_glob - ref_data2(imod) + else + data2_rel(imod, :) = data2_glob - ref_data2(imod) + end if + if (percent .eq. 1) then + data2_rel(imod, :) = 100 * (data2_rel(imod, :) / ref_data2(imod)) + end if + ; clean up + delete([/data2, data2_glob/]) + ; find all other runs from this model and calculate relative ts + atts = True + atts@dataset = info_items[idx_hist]@dataset + atts@exp = diag_script_info@scenarios(0) + atts@ensemble = info_items[idx_hist]@ensemble + item_534 = select_metadata_by_atts(info_items, atts) + if (ListCount(item_534) .ne. 0) then + A1 = read_data(item_534[0]) + data3 = time_operations(A1, -1, -1, "average", "yearly", True) + data3_glob = coswgt_areaave(data3) + dim_data3g = dimsizes(data3_glob) + if (dim_data3g .ne. nyearsp1) then + if (dim_data3g .gt. nyearsp1) then + error_msg("w", DIAG_SCRIPT, "", "Length of dataset " + \ + info_items[idx_hist]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp1 + \ + " years)") + else + data534_rel(i534, 0 : dim_data3g - 1) = (data3_glob - \ + ref_data2(imod)) + data534_rel(i534, dim_data3g :) = data3_glob@_FillValue + end if + else + data534_rel(i534, :) = (data3_glob - ref_data2(imod)) + end if + if (percent .eq. 1) then + data534_rel(i534, :) = 100*(data534_rel(i534, :)/ref_data2(imod)) + end if + + i534 = i534 + 1 + ; clean up + delete([/A1, data3, data3_glob/]) + else + log_info("No corresponding model for historial run " + \ + info_items[idx_hist]@dataset + " found in scenario " + \ + diag_script_info@scenarios(0) + \ + ", cannot calculate relative change.") + end if + delete(item_534) + + if (dim_scen .gt. 1) then + atts@exp = diag_script_info@scenarios(1) + item_585 = select_metadata_by_atts(info_items, atts) + + if (ListCount(item_585) .ne. 0) then + A1 = read_data(item_585[0]) + data3 = time_operations(A1, -1, -1, "average", "yearly", True) + data3_glob = coswgt_areaave(data3) + dim_data3g = dimsizes(data3_glob) + if (dim_data3g .ne. nyearsp2) then + if (dim_data3g .gt. nyearsp2) then + error_msg("w", diag_script, "", "Length of dataset " + \ + info_items[idx_hist]@dataset + " in recipe is " + \ + "longer than period given in recipe (" + nyearsp2 + \ + " years)") + else + data585_rel(i585, 0:dim_data3g - 1) = (data3_glob - \ + ref_data2(imod)) + data585_rel(i585, dim_data3g:) = data3_glob@_FillValue + end if + else + data585_rel(i585, :) = (data3_glob - ref_data2(imod)) + end if + if (percent .eq. 1) then + data585_rel(i585, :) = 100*(data585_rel(i585, :)/ref_data2(imod)) + end if + ; perform running average and extend ssp534 with ssp585 before 2040 + app_arr585 = array_append_record(data2_rel(imod, dim_data2g - \ + half_window:dim_data2g-1), \ + data585_rel(i585, :), 0) + tmp_arr585 = runave_n(app_arr585, runave_window, 0, 0) + id_junction = nyearsp2-nyearsp1 + app_arr534 = array_append_record(data585_rel(i585, id_junction - \ + half_window:\ + id_junction-1), \ + data534_rel(i534-1, :), 0) + tmp_arr534 = runave_n(app_arr534, runave_window, 0, 0) + dim_tmp534 = dimsizes(tmp_arr534) + data534_rel(i534-1, :) = tmp_arr534(half_window:dim_tmp534-1) + data585_rel(i585, :) = tmp_arr585(half_window:dim_data3g+4) + delete(tmp_arr534) + delete(tmp_arr585) + delete(app_arr534) + delete(app_arr585) + i585 = i585 + 1 + ; clean up + delete([/A1, data3, data3_glob/]) + else + log_info("No corresponding model ensemble for historial run " + \ + info_items[idx_hist]@dataset + " found in scenario " + \ + diag_script_info@scenarios(1) + \ + ", cannot calculate relative change.") + end if + delete(item_585) + end if + + end do ; imod + + ; average and standard deviation over all models + datahist_avg = dim_avg_n_Wrap(data2_rel, 0) + datahist_std = dim_stddev_n_Wrap(data2_rel, 0) + ; put all data from 1850-2100 into mean_val and std_val + mean_val = new((/dim_scen+1, nyearsp2/), typeof(data534_rel), \ + data534_rel@_FillValue) + std_val = new((/dim_scen+1, nyearsp2/), typeof(data534_rel), \ + data534_rel@_FillValue) + p0 = toint(eyears(0)) - toint(syears(0)) + p1 = toint(syears(1)) - toint(syears(2)) + p2 = toint(syears(2)) - toint(syears(2)) + mean_val(0, 0 : p0) = mean_val@_FillValue + std_val(0, 0 : p0) = std_val@_FillValue + + data534_avg = dim_avg_n_Wrap(data534_rel, 0) + data534_std = dim_stddev_n_Wrap(data534_rel, 0) + mean_val(1, p1:) = data534_avg + std_val(1, p1:) = data534_std + if (dim_scen .gt. 1) then + data585_avg = dim_avg_n_Wrap(data585_rel, 0) + data585_std = dim_stddev_n_Wrap(data585_rel, 0) + mean_val(2, p2 :) = data585_avg + std_val(2, p2 :) = data585_std + end if + + ; check number of models for timeperiods in scenarios + ; if there are less than 5 models set to missing + runs = new((/1 + dim_scen, nperiods/), integer) + atts_runs = True + atts_runs@exp = reference_run_name + item_runs = select_metadata_by_atts(info_items, atts_runs) + runs(0, 0) = ListCount(item_runs) + do scen = 0, dim_scen - 1 + do p = 1, nperiods - 1 + atts_runs@exp = diag_script_info@scenarios(scen) + item_runs = select_metadata_by_atts(info_items, atts_runs) + nrp1 = NewList("fifo") + nrp2 = NewList("fifo") + do r = 0, ListCount(item_runs) - 1 + if (item_runs[r]@end_year .gt. toint(diag_script_info@eyears(p))) then + ListAppend(nrp1, item_runs[r]) + elseif (item_runs[r]@end_year .eq. \ + toint(diag_script_info@eyears(p))) then + ListAppend(nrp2, item_runs[r]) + end if + end do + tmp = ListCount(nrp1) + ; check if only found missing, in that case set tmp to zero + if tmp .eq. 0 then + tmp = 0 + end if + tmp2 = ListCount(nrp2) + ; check if only found missing, in that case set tmp2 to zero + if tmp2 .eq. 0 then + tmp2 = 0 + end if + runs(scen + 1, p) = tmp2 + tmp + delete([/tmp, tmp2, nrp1, nrp2/]) + if (runs(scen + 1, p) .le. 4) .and. (p .ne. 0) then + p2 = toint(eyears(p - 1)) - toint(syears(0)) + p3 = toint(eyears(p)) - toint(syears(0)) + mean_val(scen + 1, p2 : p3) = mean_val@_FillValue + std_val(scen + 1, p2 : p3) = std_val@_FillValue + log_info("Scenario " + diag_script_info@scenarios(scen) + \ + " in period ending " + diag_script_info@eyears(p) + \ + " has less than 5 models, set to missing.") + end if + end do + end do + ; FIX MODEL_NR + dim_1 = dimsizes(data534_rel) + dim_2 = dimsizes(data585_rel) + runs(1, 1) = dim_1(0) + runs(2, 1) = dim_2(0) + + dims_data = dimsizes(mean_val) + to_plot = new((/3 * dims_data(0), dims_data(1)/), float) + if (isatt(diag_script_info, "spread")) then + spread = diag_script_info@spread + else + spread = 1.0 + end if + do j = 0, dims_data(0) - 1 + ; if variable sw or lw TOA flux: change direction + if (var0 .eq. "rlut" .or. var0 .eq. "rsut") then + to_plot(j * 3, :) = - mean_val(j, :) + else + to_plot(j * 3, :) = mean_val(j, :) + end if + to_plot((j * 3) + 1, :) = to_plot(j * 3, :) + spread * std_val(j, :) + to_plot((j * 3) + 2, :) = to_plot(j * 3, :) - spread * std_val(j, :) + end do + copy_VarMeta(mean_val, to_plot) + to_plot!0 = "metric" + str_spread = sprintf("%5.2f", spread) + to_plot&metric = (/"mean", "+" + str_spread + "std", "-" + \ + str_spread + "std", "mean", "+" + \ + str_spread + "std", "-" + str_spread + \ + "std", "mean", "+" + str_spread + "std", "-" + \ + str_spread + "std"/) + to_plot!1 = "year" + to_plot&year = ispan(toint(syears(2)), toint(eyears(nperiods - 1)), 1) + + ; ########################################### + ; # Other Metadata: diag_script, var # + ; Add to to_plot, as attributes without prefix + if(isatt(to_plot, "diag_script")) then ; Add to existing entries + temp = to_plot@diag_script + delete(to_plot@diag_script) + to_plot@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + to_plot@diag_script = (/DIAG_SCRIPT/) + end if + to_plot@var = var0 ; Overwrite existing entry + if (isatt(variable_info[0], "long_name")) then + to_plot@var_long_name = variable_info[0]@long_name + end if + if(isatt(variable_info[0], "units")) then + to_plot@units = variable_info[0]@units + end if + if (percent .eq. 1) then + to_plot@units = "%" + end if + + runs!0 = "scenario" + runs&scenario = diag_script_info@label + runs!1 = "period" + runs&period = diag_script_info@syears + if(isatt(runs, "diag_script")) then ; Add to existing entries + temp = runs@diag_script + delete(runs@diag_script) + runs@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; Add as new attribute + runs@diag_script = (/DIAG_SCRIPT/) + end if + runs@var = "nr" + runs@var_long_name = "number of model runs per scenario and period" + runs@units = 1 + ; ########################################### + ; # Output to netCDF # + ; ########################################### + to_plot@ncdf = variable_info[0]@diagnostic + ".nc" + ncdf_file = work_dir + "/" + to_plot@ncdf + ncdf_file@existing = "overwrite" + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(to_plot, ncdf_file) + if(isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + ncdf_file2 = work_dir + "/nr_runs_" + variable_info[0]@diagnostic + ".nc" + ncdf_outfile2 = ncdf_write(runs, ncdf_file2) + end if + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/plot_pattern.ncl b/esmvaltool/diag_scripts/tebaldi21esd/plot_pattern.ncl new file mode 100644 index 0000000000..cd5aeb93c6 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/plot_pattern.ncl @@ -0,0 +1,387 @@ +; ############################################################################# +; Plots the multi-model mean change map with significance and +; hatching if specified +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Plots map +; +; Required diag_script_info attributes (diagnostics specific) +; scenarios: list with scenarios to be included in the +; figure, e.g (/"ssp245","ssp585"/) +; periods: list with start years of periods to be included +; e.g. (/"2046","2081"/); +; +; Optional diag_script_info attributes (diagnostic specific) +; sig: plot stippling for significance? (True, False) +; not_sig: plot hatching for uncertainty? (True, False) +; label: label to add in the legend +; colormap: alternative colormap, path to rgb file or ncl name +; model_nr: save number of model runs per period and scenario in netcdf +; to print in plot? (True, False, default = False) +; units: units written next to colorbar, e.g. (~F35~J~F~C) +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/contour_maps.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; check if diagnostic is change scaled by global temperature + ; in this case even if two variables present only one to be plotted + tf = isStrSubset(variable_info[0]@diagnostic, "scaleT") + if (tf) then + dim_VAR = 1 + end if + + ; Check required diag_script_info attributes + req_atts = (/"scenarios", "periods"/) + exit_if_missing_atts(diag_script_info, req_atts) +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + netcdf_dir = diag_script_info@input_files(1) + + dim_scen = dimsizes(diag_script_info@scenarios) + dim_per = dimsizes(diag_script_info@periods) + if (isatt(diag_script_info, "seasons")) then + dim_seas = dimsizes(diag_script_info@seasons) + else + dim_seas = 1 + end if +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_info(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data # + ; ########################################### + if (dim_VAR .eq. 1) then + datapath = netcdf_dir + "/" + variable_info[0]@diagnostic + ".nc" + else + datapath = netcdf_dir + "/" + var0 + "_" + variable_info[0]@diagnostic + \ + ".nc" + end if + tmp_plot = ncdf_read(datapath, var0) + + ; Check dimensions + dims = getvardims(tmp_plot) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + " dimensions, " + \ + "need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + nlat = dimsizes(tmp_plot&$dims(idx)$) + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + nlon = dimsizes(tmp_plot&$dims(idx)$) + + ; predefine data arrays + to_plot = new((/dim_VAR * dim_scen * dim_per * dim_seas, nlat, nlon/), \ + float) + if (isatt(diag_script_info, "sig") .and. \ + diag_script_info@sig .eq. True) then + to_plot_signif = new((/dim_VAR * dim_scen * dim_per * dim_seas, nlat, \ + nlon/), float) + end if + if (isatt(diag_script_info, "not_sig") .and. \ + diag_script_info@not_sig .eq. True) then + to_plot_not_signif = new((/dim_VAR * dim_scen * dim_per * dim_seas, \ + nlat, nlon/), float) + end if + if (isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + model_number = new((/dim_VAR * dim_scen * dim_per * dim_seas/), float) + end if + if (tf) then + pan_var = dim_per * dim_seas + else + pan_var = dim_scen * dim_per * dim_seas + end if + + do v = 0, dim_VAR - 1 + if (v .eq. 0) then + ind1 = 0 + ind2 = pan_var - 1 + else + datapath = netcdf_dir + "/" + variable_info[v]@short_name + "_" + \ + variable_info[v]@diagnostic + ".nc" + tmp_plot = ncdf_read(datapath, variable_info[v]@short_name) + ind1 = v * pan_var + ind2 = ind1 + pan_var - 1 + end if + to_plot(0, :, :) = tmp_plot + + if (isatt(diag_script_info, "sig") .and. \ + diag_script_info@sig .eq. True) then + ; read already calculated significance + to_plot_signif(ind1 : ind2, :, :) = ncdf_read(datapath, "signif") + end if + if (isatt(diag_script_info, "not_sig") .and. \ + diag_script_info@not_sig .eq. True) then + ; read already calculated significance + to_plot_not_signif(ind1 : ind2, :, :) = ncdf_read(datapath, "not_signif") + end if + if (isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + ; read already calculated number of models included in plot + model_number(ind1 : ind2) = ncdf_read(datapath, "model_nr") + end if + + end do ; loop over variables + ; ###################################################### + ; # Separate output to a graphics file, Panelling # + ; ###################################################### + to_plot@res_gsnDraw = False ; Do not draw yet + to_plot@res_gsnFrame = False ; Don't advance frame. + to_plot@res_cnFillOn = True ; Color plot desired + to_plot@res_cnLineLabelsOn = False ; Contour lines + to_plot@res_cnLinesOn = False + if (isatt(diag_script_info, "colormap")) then + col = read_colormap_file(diag_script_info@colormap) + else + col = "BlueYellowRed" + end if + to_plot@res_cnFillPalette = col + if (isatt(diag_script_info, "span")) then + to_plot@res_cnSpanFillPalette = diag_script_info@span + else + to_plot@res_cnSpanFillPalette = True ; use full colormap + end if + to_plot@res_lbLabelBarOn = False + + if (isatt(diag_script_info, "sig") .and. \ + diag_script_info@sig .eq. True) then + ; resources dots + ress = True + ress@gsnDraw = False + ress@gsnFrame = False + ress@gsnAddCyclic = True + ress@cnLinesOn = False + ress@cnLineLabelsOn = False + ress@cnLevelSelectionMode = "ExplicitLevels" + ress@cnLevels = (/.3, 1.3/) + ress@cnInfoLabelOn = False + ress@tiMainOn = False + ress@cnFillOn = True + ress@cnFillMode = "AreaFill" + ress@cnMonoFillPattern = False + ress@cnFillPatterns = (/-1, 17, 17/) + ress@cnMonoFillColor = True + ress@lbLabelBarOn = False + ress@gsnLeftString = "" + ress@gsnRightString = "" + ress@cnFillDotSizeF = 0.001 + ress@cnFillScaleF = .6 ; add extra density + end if + if (isatt(diag_script_info, "not_sig") .and. \ + diag_script_info@not_sig .eq. True) then + ; resources hatch + resb = True + resb@gsnDraw = False + resb@gsnFrame = False + resb@gsnAddCyclic = True + resb@cnLinesOn = False + resb@cnLineLabelsOn = False + resb@cnLevelSelectionMode = "ExplicitLevels" + resb@cnLevels = (/.3, 1.3/) + resb@cnInfoLabelOn = False + resb@tiMainOn = False + resb@cnFillOn = True + resb@cnFillMode = "AreaFill" + resb@cnRasterSmoothingOn = False + resb@cnMonoFillPattern = False + resb@cnFillPatterns = (/-1, 10, 10/) + resb@cnMonoFillColor = True + resb@lbLabelBarOn = False + resb@gsnLeftString = "" + resb@gsnRightString = "" + resb@cnFillScaleF = 0.25 + end if + if (isatt(diag_script_info, "seasons")) then + dim_seas = dimsizes(diag_script_info@seasons) + else + dim_seas = 1 + end if + if ((isatt(diag_script_info, "label")) .and. \ + (dimsizes(diag_script_info@label) .ne. \ + (dim_scen * dim_per * dim_seas))) then + dim_scen = 1 + end if + + nmap = dim_VAR * dim_scen * dim_per * dim_seas + + ; function in aux_plotting.ncl + if (isatt(diag_script_info, "pltname")) then + wks = get_wks(0, variable_info[0]@diagnostic, \ + diag_script_info@pltname + "_" + \ + diag_script_info@time_avg) + else + wks = get_wks(0, variable_info[0]@diagnostic, \ + var0 + "_" + diag_script_info@time_avg) + end if + if (isatt(diag_script_info, "max_vert")) then + ; Maximum allowed number of plots per page (vertical) + nvert = toint(diag_script_info@max_vert) + else + nvert = dim_scen + end if + if (isatt(diag_script_info, "max_hori")) then + ; Maximum allowed number of plots per page (horizontal) + nhori = toint(diag_script_info@max_hori) + else + nhori = dim_per + end if + if ((tf) .and. (nhori * nvert .lt. nmap)) then + nhori = nmap + end if + maps = new(nmap, graphic) ; collect individual maps in a graphic array + do i = 0, nmap - 1 ; this stupid loop creates nmap times the same plot + if(isatt(diag_script_info, "label")) then + to_plot@res_tiMainString = diag_script_info@label(i) + end if + to_plot@res_gsnRightString = "" + to_plot@res_gsnLeftString = "" + if(isatt(diag_script_info, "projection")) then + to_plot@res_mpProjection = diag_script_info@projection + else + to_plot@res_mpProjection = "Robinson" + end if + ; Set explicit contour levels + if (isatt(diag_script_info, "diff_levs")) then + to_plot@res_cnLevelSelectionMode = "ExplicitLevels" + to_plot@res_cnLevels = diag_script_info@diff_levs + end if + + to_plot@res_mpPerimOn = False + + maps(i) = contour_map(wks, to_plot(i, :, :), var0) + + if (isatt(diag_script_info, "sig") .and. \ + diag_script_info@sig .eq. True) then + ; plot dots + if all(to_plot_signif(i, :, :) .eq. 1) then + to_plot_signif(i, 0, 0) = 0 + end if + if (max(to_plot_signif(i, :, :)) .gt. \ + min(to_plot_signif(i, :, :))) then + plot1 = gsn_csm_contour(wks, to_plot_signif(i, :, :), ress) + overlay(maps(i), plot1) + end if + end if + if (isatt(diag_script_info, "not_sig") .and. \ + diag_script_info@not_sig .eq. True) then + ; plot hatch + if all(to_plot_not_signif(i, :, :) .eq. 1) then + to_plot_not_signif(i, 0, 0) = 0 + end if + if (max(to_plot_not_signif(i, :, :)) .gt. \ + min(to_plot_not_signif(i, :, :))) then + plot2 = gsn_csm_contour(wks, to_plot_not_signif(i, :, :), resb) + overlay(maps(i), plot2) + end if + end if + end do + + txres = True + txres@txFontHeightF = 0.02 + amres = True + amres@amParallelPosF = 0.48 ; This is the right edge of the plot. + amres@amOrthogonalPosF = -0.48 ; This is the bottom edge of the plot. + amres@amJust = "TopRight" + if (isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + do i = 0, dimsizes(maps) - 1 + txid1 = gsn_create_text(wks, "" + model_number(i), txres) + annoid1 = gsn_add_annotation(maps(i), txid1, amres) + end do + end if + pres = True ; needed to override panelling defaults + pres@gsnPanelLabelBar = True ; no general label bar desired here + + a4_height = 29.7 ; in centimeters + a4_width = 23.0 ; reference is correct + cm_per_inch = 2.54 + + pres@gsnPaperWidth = a4_width / cm_per_inch + pres@gsnPaperHeight = a4_height / cm_per_inch + pres@gsnPaperOrientation = "portrait" + pres@gsnPanelTop = 0.96 + if (isatt(diag_script_info, "units")) then + pres@lbTitleString = diag_script_info@units + else + unit_string = format_units(to_plot@units) + pres@lbTitleString = "(" + unit_string + ")" + end if + pres@lbTitleFontHeightF = 0.017 + pres@lbTitleDirection = "across" + pres@lbTitlePosition = "Right" + pres@lbTitleJust = "CenterLeft" + pres@lbLabelFontHeightF = 0.014 + pres@lbLabelJust = "CenterCenter" + pres@lbLabelAutoStride = True + + pres@txFontHeightF = 0.02 + caption = "" + if (isatt(diag_script_info, "title")) then + pres@txString = diag_script_info@title + caption = diag_script_info@title + end if + pres@pmLabelBarParallelPosF = 0.06 + pres@pmLabelBarWidthF = .6 + + outfile = panelling(wks, maps, nvert, nhori, pres) + + log_info("Wrote " + outfile) + + ; collect meta-data + nc_file = datapath + plot_file = outfile + statistics = ("mean") + domains = ("global") + plot_types = ("geo") + authors = (/"debeire_kevin"/) + references = (/"tebaldi21esd"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, plot_file, caption, statistics, domains, \ + plot_types, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/plot_table_changes.ncl b/esmvaltool/diag_scripts/tebaldi21esd/plot_table_changes.ncl new file mode 100644 index 0000000000..60b1e731be --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/plot_table_changes.ncl @@ -0,0 +1,175 @@ +; ############################################################################# +; Plot the table of changes between historical and specified scenarios +; across different periods +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Plots a table of the multi-model mean and spread for specified scenarios +; and periods. +; +; Required diag_script_info attributes (diagnostics specific) +; ancestors: variable and diagnostics used to calculate the values to plot +; scenarios: list of scenarios included in the figure +; syears: list of start years of historical and SSPs scenarios +; eyears: list of end years of historical and SSPs scenarios +; label: list of labels of the scenarios +; begin_ref_year: start year of reference period +; end_ref_year: end year of reference period +; +; Optional diag_script_info attributes (diagnostic specific) +; title: specify title of the table +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" + +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + dataset_names = metadata_att_as_array(info_items, "dataset") + dataset_exps = metadata_att_as_array(info_items, "exp") + dataset_ensembles = metadata_att_as_array(info_items, "ensemble") + n_MOD = count_unique_values(dataset_names) + MOD_list = get_unique_values(dataset_names) + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"scenarios", "syears", \ + "eyears", "label"/)) + labels = diag_script_info@label + syears = diag_script_info@syears + eyears = diag_script_info@eyears +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end +begin + ; Output netcdf directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + ncdf_dir = diag_script_info@input_files(1) + load_file = ncdf_dir + "/" + variable_info[0]@diagnostic + ".nc" + to_plot = ncdf_read(load_file, var0) + mean_ = sprintf("%3.2f", decimalPlaces(to_plot(0, :, :), 2, True)) + five_perc_ = sprintf("%3.2f", decimalPlaces(to_plot(1, :, :), 2, True)) + ninetyfive_perc_ = sprintf("%3.2f", decimalPlaces(to_plot(2, :, :), 2, True)) + num_models = tostring(toint(to_plot(3, :, :))) + period_list = to_plot&periods + n_PER = dimsizes(period_list) + n_SCEN = dimsizes(diag_script_info@scenarios) + ; ########################################### + ; *********** PLOTTING ************ + ; Create plot variables + outfile = "table_changes_"+var0 + wks = get_wks(0, DIAG_SCRIPT, outfile) + res = True + res2 = True + res3 = True + res4 = True + res@gsLineThicknessF = 2. + res2@gsLineThicknessF = 2. + res3@gsLineThicknessF = 2. + ncr = (/n_PER, n_SCEN/) + ncr2 = (/n_PER+1, 1/) + ncr3 = (/1, n_SCEN/) + mean_@_FillValue = "NA" + five_perc_@_FillValue = "NA" + ninetyfive_perc_@_FillValue = "NA" + text = new((/n_SCEN+1, n_SCEN/), string) + text2 = new((/n_SCEN+1, 1/), string) + text3 = labels + do i = 0, n_PER-1 + text2(i+1, 0) = period_list(i) + do j = 0, n_SCEN - 1 + text(i, j) = mean_(i, j) + "~C~["+five_perc_(i, j) + ", " + \ + ninetyfive_perc_(i, j) + "]" + \ + "~C~("+num_models(i, j) + ")" + end do + end do + res2@txJust = "CenterCenter" + res@txJust = "CenterCenter" + res3@txJust = "CenterCenter" + + res@txFontHeightF = 0.013 + res2@txFontHeightF = 0.02 + res3@txFontHeightF = 0.02 + + res2@gsFillColor = "gray" + res3@gsFillColor = "gray" + + x = (/0.30, 0.95/) + y = (/0.45, 0.65/) + x2 = (/0.05, 0.30/) + y2 = (/0.45, 0.75/) + x3 = (/0.30, 0.95/) + y3 = (/0.65, 0.75/) + ; Title + x4 = (/0.05, 0.95/) + y4 = (/0.77, 0.79/) + ncr4 = (/1, 1/) + res4@gsLineColor = "transparent" + res4@txFontHeightF = 0.02 + if (isatt(diag_script_info, "title")) then + text4 = diag_script_info@title + else + text4 = "Table of change for "+var0 + end if + + gsn_table(wks, ncr, x, y, text, res) + gsn_table(wks, ncr2, x2, y2, text2, res2) + gsn_table(wks, ncr3, x3, y3, text3, res3) + gsn_table(wks, ncr4, x4, y4, text4, res4) + draw(wks) + frame(wks) + log_info(" Wrote " + wks@fullname) + + ; Collect meta-data + netcdf_dir = diag_script_info@input_files(1) + datapath = netcdf_dir + "/" + variable_info[0]@diagnostic + ".nc" + nc_file = datapath + caption = "Table of global annual mean" + \ + variable_info[0]@long_name + " anomalies (relative to " + \ + diag_script_info@begin_ref_year + "-" + diag_script_info@end_ref_year + \ + ") from concentration driven experiments. " + \ + "Projections are shown for each SSP." + statistics = ("mean") + domains = ("global") + plot_types = ("portrait") + authors = (/"debeire_kevin"/) + references = (/"tebaldi21esd"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, wks@fullname, caption, statistics, domains, \ + plot_types, authors, references, infiles) + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/plot_table_warming_level.ncl b/esmvaltool/diag_scripts/tebaldi21esd/plot_table_warming_level.ncl new file mode 100644 index 0000000000..4bd691b6ea --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/plot_table_warming_level.ncl @@ -0,0 +1,173 @@ +; ############################################################################# +; Plot the table of warming level crossings, as in Table 1 of Tebaldi et al. +; with mean and 5-95 percent confidence intervals +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Plot a table of warming level crossing years for specified scenarios +; (columns) and warming levels (rows): +; +; Required diag_script_info attributes (diagnostics specific) +; scenarios: list of scenarios included in the figure +; warming_levels: list of warming levels +; syears: list of start years of historical and SSPs scenarios +; eyears: list of end years of historical and SSPs scenarios +; begin_ref_year: start year of reference period +; end_ref_year: end year of reference period +; label: list of labels of the scenarios +; offset: offset between reference baseline and 1850-1900 +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" + +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'datasets', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + dataset_names = metadata_att_as_array(info_items, "dataset") + dataset_exps = metadata_att_as_array(info_items, "exp") + dataset_ensembles = metadata_att_as_array(info_items, "ensemble") + n_MOD = count_unique_values(dataset_names) + MOD_list = get_unique_values(dataset_names) + labels = diag_script_info@label + ; Check required diag_script_info attributes + exit_if_missing_atts(diag_script_info, (/"scenarios", "syears", \ + "eyears", "warming_levels"/)) + syears = diag_script_info@syears + eyears = diag_script_info@eyears +end + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) +end +begin +; Output netcdf directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + ncdf_dir = diag_script_info@input_files(1) + load_file = ncdf_dir + "/" + variable_info[0]@diagnostic + ".nc" + to_plot = ncdf_read(load_file, var0) + mean_ = tostring(to_plot(0, :, :)) + five_perc_ = tostring(to_plot(1, :, :)) + ninetyfive_perc_ = tostring(to_plot(2, :, :)) + num_crossing_models = tostring(to_plot(3, :, :)) + num_models = tostring(to_plot(4, :, :)) + warming_levels = diag_script_info@warming_levels + n_WAR = dimsizes(warming_levels) + n_SCEN = dimsizes(diag_script_info@scenarios) + ; ########################################### + ; *********** PLOTTING ************ + ; Create plot variables + outfile = "warming_level_crossings" + wks = get_wks(0, DIAG_SCRIPT, outfile) + res = True + res2 = True + res3 = True + res4 = True + res@gsLineThicknessF = 2. + res2@gsLineThicknessF = 2. + res3@gsLineThicknessF = 2. + ncr = (/n_WAR, n_SCEN/) + ncr2 = (/n_WAR+1, 1/) + ncr3 = (/1, n_SCEN/) + mean_@_FillValue = "NA" + five_perc_@_FillValue = "NA" + ninetyfive_perc_@_FillValue = "NA" + text = new((/n_WAR+1, n_SCEN/), string) + text2 = new((/n_WAR+1, 1/), string) + text3 = labels + do i = 0, dimsizes(warming_levels)-1 + text2(i+1, 0) = sprintf("%2.1f", warming_levels(i)) + \ + "~S~o~N~C warming level" + do j = 0, n_SCEN - 1 + text(i, j) = mean_(i, j) + "~C~["+ninetyfive_perc_(i, j) + \ + ", " + five_perc_(i, j) + "]" + \ + "~C~(" + num_crossing_models(i, j) + "/" + \ + num_models(i, j) + ")" + end do + end do + res2@txJust = "CenterCenter" + res@txJust = "CenterCenter" + res3@txJust = "CenterCenter" + + res@txFontHeightF = 0.013 + res2@txFontHeightF = 0.02 + res3@txFontHeightF = 0.02 + + res2@gsFillColor = "gray" + res3@gsFillColor = "gray" + + x = (/0.30, 0.95/) + y = (/0.25, 0.75-0.5/6/) + x2 = (/0.05, 0.30/) + y2 = (/0.25, 0.75/) + x3 = (/0.30, 0.95/) + y3 = (/0.75-0.5/6, 0.75/) + ; Title + x4 = (/0.05, 0.95/) + y4 = (/0.77, 0.79/) + ncr4 = (/1, 1/) + res4@gsLineColor = "transparent" + res4@txFontHeightF = 0.02 + if (isatt(diag_script_info, "title")) then + text4 = diag_script_info@title + else + text4 = "Table of warming level crossings" + end if + + gsn_table(wks, ncr, x, y, text, res) + gsn_table(wks, ncr2, x2, y2, text2, res2) + gsn_table(wks, ncr3, x3, y3, text3, res3) + gsn_table(wks, ncr4, x4, y4, text4, res4) + draw(wks) + frame(wks) + log_info(" Wrote " + wks@fullname) + + ; Collect meta-data + netcdf_dir = diag_script_info@input_files(1) + datapath = netcdf_dir + "/" + variable_info[0]@diagnostic + ".nc" + nc_file = datapath + caption = text4 + statistics = ("mean") + domains = ("global") + plot_types = ("portrait") + authors = (/"debeire_kevin"/) + references = (/"tebaldi21esd"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, wks@fullname, caption, statistics, domains, \ + plot_types, authors, references, infiles) + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_across_realization_stddev_runave.ncl b/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_across_realization_stddev_runave.ncl new file mode 100644 index 0000000000..c562115c2c --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_across_realization_stddev_runave.ncl @@ -0,0 +1,239 @@ +; ############################################################################# +; Plot timeseries of across-realization Std. dev. for large ensemble experiment +; Author: Kevin Debeire (DLR, Germany) +; ############################################################################# +; +; Description +; Plot time series of inter-member standard deviation (ex: SSP3-7.0) +; +; Required diag_script_info attributes (diagnostics specific) +; ancestors: variable and diagnostics that calculated timeseries to plot +; scenarios: list of scenarios included in the figure +; syears: list of start years of historical and SSPs scenarios +; eyears: list of end years of historical and SSPs scenarios +; begin_ref_year: start year of reference period +; end_ref_year: end year of reference period +; label: list of labels of the scenarios +; sampled_model: name of dataset on which to sample +; n_samples: number of samples of size 10 to draw among all the ensembles +; of sampled_model only +; +; Optional diag_script_info attributes (diagnostic specific) +; trend: whether the trend is calculated and displayed +; runave_window: only used if trend is true, size window used for the +; centered running average +; title: specify plot title +; yaxis: specify y-axis title +; ymin: minimim value on y-axis, default calculated from data +; ymax: maximum value on y-axis +; colormap: alternative colormap, path to rgb file or ncl name +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'models', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + dataset_names = metadata_att_as_array(info_items, "dataset") + MOD_list = get_unique_values(dataset_names) + n_MOD = dimsizes(MOD_list) + ; Check required diag_script_info attributes + req_atts = (/"syears", "eyears"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + netcdf_dir = diag_script_info@input_files(1) + + ; Output netcdf directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + if(isatt(diag_script_info, "runave_window")) then + runave_window = diag_script_info@runave_window + else + runave_window = 1 + end if + ; ########################################### + ; # Get data # + ; ########################################### + datapath = netcdf_dir + "/" + variable_info[0]@diagnostic + ".nc" + to_plot = ncdf_read(datapath, var0) + ; start and end years of timeperiods + syears = diag_script_info@syears + eyears = diag_script_info@eyears + nperiods = dimsizes(syears) + + dims_data_array = dimsizes(to_plot) + dims_data = dims_data_array(0) + dims_time = dims_data_array(1) + ; ########################################### + ; *********** PLOTTING ************ + ; Create plot variables + outfile = var0 + "_ts_line_" + syears(0) + "_" + eyears(nperiods - 1) + wks_type = 0 + wks_type@wkPaperSize = "A4" + wks = get_wks(wks_type, DIAG_SCRIPT, outfile) + if(isatt(diag_script_info, "colormap")) then + colormap = RGBtoCmap(diag_script_info@colormap) + else + colormap = "StepSeq25" + end if + gsn_define_colormap(wks, colormap) + + colors_main = (/ 2, 3, 4, 5, 6, 7/) + + res = True + res@gsnMaximize = True + res@gsnPaperOrientation = "portrait" + res@gsnDraw = False + res@gsnFrame = False + res@txFont = 25 + res@vpHeightF = 0.5 ; change aspect ratio of plot + res@vpWidthF = 0.80 + res@vpXF = 0.16 ; start plot at x ndc coord + + caption = "Time series of annual ensembles across-realization" + \ + " standard deviation" + if (isatt(diag_script_info, "title")) then + res@tiMainString = diag_script_info@title + caption = diag_script_info@title + end if + if(isatt(diag_script_info, "yaxis")) then + res@tiYAxisString = diag_script_info@yaxis + end if + res@tiXAxisString = "Year" + + res@xyDashPatterns = 0 + res@trXMinF = toint(syears(0)) + res@trXMaxF = toint(eyears(nperiods - 1)) + if(isatt(diag_script_info, "ymin")) then + res@trYMinF = diag_script_info@ymin + end if + if(isatt(diag_script_info, "ymax")) then + res@trYMaxF = diag_script_info@ymax + end if + + res@tmXBMode = "Explicit" + res@tmXBValues = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 20) + res@tmXBLabels = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 20) + res@tmXBMinorValues = ispan(toint(syears(0)), \ + toint(eyears(nperiods - 1)), 10) + + res@xyLineColor = colors_main(0) + res@xyLineThicknesses = 2.7 + res@xyLineOpacityF = 0.8 + ; create labels + labels = new(n_MOD+1, string) + ; create plot + plot = gsn_csm_xy(wks, to_plot&year, to_plot(0, :), res) + labels(0) = MOD_list(0) + delete(res@tiMainString) + + ; display samplings on plot + if(isatt(diag_script_info, "sampled_model")) then + labels(n_MOD) = diag_script_info@sampled_model+" random" + do j = n_MOD, dims_data-1 + res@xyLineThicknesses = 1.8 + res@xyLineColor = colors_main(5) + res@xyLineOpacityF = 0.2 + plot1 = gsn_csm_xy(wks, to_plot&year, to_plot(j, :), res) + overlay(plot, plot1) + end do + res@xyLineOpacityF = 1.0 + end if + + do j = 0, n_MOD-1 + labels(j) = MOD_list(j) + res@xyLineThicknesses = 2.7 + res@xyLineOpacityF = 0.8 + res@xyLineColor = colors_main(j) + plot1 = gsn_csm_xy(wks, to_plot&year, to_plot(j, :), res) + overlay(plot, plot1) + end do + + ; display trend if asked + half_runave_window = round(todouble(runave_window)/2., 3)-1 + if (isatt(diag_script_info, "trend") .and. diag_script_info@trend) then + polyres = True + polyres@gsLineThicknessF = res@xyLineThicknesses + dum = new(n_MOD, graphic) + do n = 0, n_MOD-1 + rc = regline(to_plot&year, to_plot(n, :)) + ic = rc@yintercept + y_start = rc*toint(syears(0)) + ic + y_end = rc * toint(eyears(1)) + ic + polyres@gsLineColor = colors_main(n) + dum(n) = gsn_add_polyline(wks, plot, (/toint(syears(0)), \ + toint(eyears(1))/), (/y_start, y_end/), \ + polyres) + end do + end if + + ; delete(res@xyLineColor) + ; Attach a legend + lgres = True + lgres@lgLineColors = colors_main + lgres@lgItemType = "Lines" ; show lines only (default) + lgres@lgLabelFontHeightF = .08 ; legend label font thickness + lgres@vpWidthF = 0.2 ; width of legend (NDC) + lgres@vpHeightF = 0.2 ; height of legend (NDC) + lgres@lgMonoDashIndex = True + lgres@lgDashIndex = 0 + lgres@lgLineThicknessF = res@xyLineThicknesses + lgres@lgPerimOn = False + lgres@lgItemOrder = (/5, 4, 3, 2, 1, 0/) + legend = gsn_create_legend(wks, n_MOD+1, labels, lgres) + amres = True + amres@amJust = "TopRight" + amres@amParallelPosF = 0.46 + amres@amOrthogonalPosF = -0.52 + annoid = gsn_add_annotation(plot, legend, amres) ; add legend to plot + draw(wks) + frame(wks) + + log_info(" Wrote " + wks@fullname) + + ; collect meta-data + nc_file = datapath + statistics = ("stddev") + domains = ("global") + plot_types = ("times") + authors = (/"debeire_kevin"/) + references = (/"tebaldi21esd"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, wks@fullname, caption, statistics, domains, \ + plot_types, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread.ncl b/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread.ncl new file mode 100644 index 0000000000..780de8a235 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread.ncl @@ -0,0 +1,281 @@ +; ############################################################################# +; Plot global timeseries for historical and 5 scenarios with only left axis +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Plot time series (mean and spread) for 5 scenarios. +; +; Required diag_script_info attributes (diagnostics specific) +; ancestors: variable and diagnostics that calculated field to be plotted +; scenarios: list of scenarios included in the figure +; syears: list of start years of historical and SSPs scenarios +; eyears: list of end years of historical and SSPs scenarios +; begin_ref_year: start year of reference period +; end_ref_year: end year of reference period +; label: list of labels of the scenarios +; +; Optional diag_script_info attributes (diagnostic specific) +; title: specify plot title +; yaxis: specify y-axis title +; ymin: minimim value on y-axis, default calculated from data +; ymax: maximum value on y-axis +; colormap: alternative colormap, path to rgb file or ncl name +; model_nr: save number of model runs per period and scenario +; styleset: color style +; spread: how many standard deviations to calculate the spread with, +; default is 1, ipcc tas is 1.64 +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'models', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check required diag_script_info attributes + req_atts = (/"syears", "eyears"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + netcdf_dir = diag_script_info@input_files(1) + + ; Output netcdf directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) +end + +begin + ; ############################################# + ; # Get parameters from ./variable_defs/*.ncl # + ; # passed via the 'info' attribute # + ; ############################################# + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data # + ; ########################################### + datapath = netcdf_dir + "/" + variable_info[0]@diagnostic + ".nc" + to_plot = ncdf_read(datapath, var0) + if(isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + datapath2 = netcdf_dir + "/nr_runs_" + variable_info[0]@diagnostic + ".nc" + runs = ncdf_read(datapath2, "nr") + end if + ; start and end years of timeperiods + syears = diag_script_info@syears + eyears = diag_script_info@eyears + nperiods = dimsizes(syears) + + dims_data = (dimsizes(to_plot&metric) - 1) / 3 + + ; ########################################### + ; *********** PLOTTING ************ + ; Create plot variables + outfile = var0 + "_ts_line_" + syears(0) + "_" + eyears(nperiods - 1) + wks = get_wks(0, DIAG_SCRIPT, outfile) + if(isatt(diag_script_info, "colormap")) then + colormap = RGBtoCmap(diag_script_info@colormap) + else + colormap = "StepSeq25" + end if + gsn_define_colormap(wks, colormap) + + colors_main = (/1, 3, 4, 5, 6, 7/) + colors_spread = (/2, 3, 4, 5, 6, 7/) + + res = True + + res@gsnDraw = False + res@gsnFrame = False + res@txFont = 25 + res@vpHeightF = 0.5 ; change aspect ratio of plot + res@vpWidthF = 0.70 + res@vpXF = 0.15 ; start plot at x ndc coord + + caption = "" + if (isatt(diag_script_info, "title")) then + caption = diag_script_info@title + res@tiMainString = diag_script_info@title + res@tiMainFontHeightF = 0.04 + end if + if(isatt(diag_script_info, "yaxis")) then + res@tiYAxisString = diag_script_info@yaxis + end if + res@tiXAxisString = "Year" + + res@xyDashPatterns = 0 + res@trXMinF = toint(syears(0)) + res@trXMaxF = toint(eyears(nperiods - 1)) + if(isatt(diag_script_info, "ymin")) then + res@trYMinF = diag_script_info@ymin + end if + if(isatt(diag_script_info, "ymax")) then + res@trYMaxF = diag_script_info@ymax + end if + res@tmXBMode = "Explicit" + res@tmXBValues = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 50) + res@tmXBLabels = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 50) + res@tmXBMinorValues = ispan(toint(syears(0)), \ + toint(eyears(nperiods - 1)), 10) + + res@xyLineThicknesses = 3.5 + res@xyLineColor = colors_main(0) + res@tmYROn = True + res@tmYRBorderOn = False + ; create plot + plot = gsn_csm_xy(wks, to_plot&year, to_plot(0, :), res) + + res@gsnXYFillColors = colors_spread(0) + res@xyLineColor = -1 + spread_plot = gsn_csm_xy(wks, to_plot&year, to_plot(1 : 2, :), res) + overlay(plot, spread_plot) + + delete(res@gsnXYFillColors) + delete(res@tiMainString) + do j = 1, dims_data + if(all(ismissing(to_plot(j * 3, :)))) then + error_msg("w", DIAG_SCRIPT, "", "all missing values for " + \ + "dimension " + j * 3 + " in to_plot, need at least 5" + \ + " models, go to next") + continue + end if + res@xyLineColor = colors_main(j) + plot1 = gsn_csm_xy(wks, to_plot&year, to_plot(j * 3, :), res) + overlay(plot, plot1) + + res@gsnXYFillColors = colors_spread(j) + res@gsnXYFillOpacities = (/0.1, 0.1, 0.1/) + res@xyLineColor = -1 + if (j .eq. 1) then + spread_plot = gsn_csm_xy(wks, to_plot&year, \ + to_plot((j * 3) + 1 : (j * 3) + 2, :), res) + else + spread_plot = gsn_csm_xy(wks, to_plot&year, \ + to_plot((j * 3) + 1 : (j * 3) + 2, :), res) + end if + overlay(plot, spread_plot) + delete(res@gsnXYFillColors) + delete(res@gsnXYFillOpacities) + end do + + polyres = True + polyres@gsLineThicknessF = 1.5 + ; add polylines + dum = new(nperiods, graphic) + do n = 0, nperiods - 1 + dum(n) = gsn_add_polyline(wks, plot, (/toint(eyears(n)), \ + toint(eyears(n))/), (/-20, 20/), polyres) + end do + delete(res@xyLineColor) + ; Attach a legend + lgres = True + lgres@lgLineColors = colors_main ; (/1, 3, 4, 5, 6, 7/) + lgres@lgItemType = "Lines" ; show lines only (default) + lgres@lgLabelFontHeightF = .08 ; legend label font thickness + lgres@vpWidthF = 0.2 ; width of legend (NDC) + lgres@vpHeightF = 0.2 ; height of legend (NDC) + lgres@lgMonoDashIndex = True + lgres@lgDashIndex = 0 + lgres@lgLineThicknessF = 5 + lgres@lgPerimOn = False + lgres@lgItemOrder = (/5, 4, 3, 2, 1, 0/) + if (isatt(diag_script_info, "label")) then + labels = " " + diag_script_info@label + legend = gsn_create_legend(wks, 6, labels, lgres) + amres = True + amres@amJust = "TopLeft" ; Use bottom right corner of box + ; for determining its location. + amres@amParallelPosF = -0.46 ; Move legend to right + if (var0 .ne. "snw") then + amres@amParallelPosF = -0.45 ; Move legend to right + amres@amOrthogonalPosF = -0.45 ; Move legend up. + else + amres@amParallelPosF = -0.47 ; Move legend to right + amres@amOrthogonalPosF = 0.12 ; Move legend down. + end if + annoid = gsn_add_annotation(plot, legend, amres) ; add legend to plot + end if + ; Display number of models used in the legend + if(isatt(diag_script_info, "model_nr")) then + getvalues plot + "tmYLLabelFontHeightF" : fheight + "trYMinF" : trYMinF + "trYMaxF" : trYMaxF + end getvalues + text = new(1 + (dims_data - 1) * nperiods, graphic) + txres = True + txres@txFontHeightF = fheight * 0.8 + label = tostring(runs(0, 0)) + xloc = 1938 + yloc = trYMaxF - (trYMaxF+trYMinF)/4.5 + ydiff = (trYMaxF - trYMinF) / 15.9 + if yloc .le. 0 then + ydiff = - ydiff + end if + text(0) = gsn_add_text(wks, plot, label, xloc, yloc, txres) + i = 1 + do nscen = 1, dims_data + do nper = 1, nperiods - 1 + txres@txFontColor = colors_main(nscen) + label = tostring(runs(nscen, nper)) + text(i) = gsn_add_text(wks, plot, label, xloc, \ + yloc - nscen * ydiff, txres) + i = i + 1 + end do + end do + end if + draw(wks) + frame(wks) + + log_info(" Wrote " + wks@fullname) + + ; collect meta-data + nc_file = datapath + + if (isatt(diag_script_info, "spread")) then + spread_str = diag_script_info@spread + else + spread_str = "1.0" + end if + statistics = ("mean") + domains = ("global") + plot_types = ("times") + authors = (/"debeire_kevin"/) + references = (/"tebaldi21esd"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, wks@fullname, caption, statistics, domains, \ + plot_types, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread_3scenarios.ncl b/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread_3scenarios.ncl new file mode 100644 index 0000000000..6311d01782 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread_3scenarios.ncl @@ -0,0 +1,277 @@ +; ############################################################################# +; Plot timeseries for historical and 3 scenarios +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Plot time series (mean and spread) for 3 scenarios and historical period +; +; Required diag_script_info attributes (diagnostics specific) +; ancestors: variable and diagnostics that calculated timeseries to plot +; scenarios: list of scenarios included in the figure +; syears: list of start years of historical and SSPs scenarios +; eyears: list of end years of historical and SSPs scenarios +; begin_ref_year: start year of reference period +; end_ref_year: end year of reference period +; label: list of labels of the scenarios +; +; Optional diag_script_info attributes (diagnostic specific) +; title: specify plot title +; yaxis: specify y-axis title +; ymin: minimim value on y-axis, default calculated from data +; ymax: maximum value on y-axis +; colormap: alternative colormap, path to rgb file or ncl name +; model_nr: save number of model runs per period and scenario +; styleset: color style +; spread: how many standard deviations to calculate the spread with, +; default is 1, ipcc tas is 1.64 +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'models', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check required diag_script_info attributes + req_atts = (/"syears", "eyears"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + netcdf_dir = diag_script_info@input_files(1) + + ; Output netcdf directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) +end + +begin + ; ############################################# + ; # Get parameters from ./variable_defs/*.ncl # + ; # passed via the 'info' attribute # + ; ############################################# + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data # + ; ########################################### + datapath = netcdf_dir + "/" + variable_info[0]@diagnostic + ".nc" + to_plot = ncdf_read(datapath, var0) + if(isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + datapath2 = netcdf_dir + "/nr_runs_" + variable_info[0]@diagnostic + ".nc" + runs = ncdf_read(datapath2, "nr") + end if + ; start and end years of timeperiods + syears = diag_script_info@syears + eyears = diag_script_info@eyears + nperiods = dimsizes(syears) + + dims_data = (dimsizes(to_plot&metric) - 1) / 3 + + ; ########################################### + ; *********** PLOTTING ************ + ; Create plot variables + outfile = var0 + "_ts_line_" + syears(0) + "_" + eyears(nperiods - 1) + wks = get_wks(0, DIAG_SCRIPT, outfile) + if(isatt(diag_script_info, "colormap")) then + colormap = RGBtoCmap(diag_script_info@colormap) + else + colormap = "StepSeq25" + end if + gsn_define_colormap(wks, colormap) + + colors_main = (/1, 4, 5, 7/) + colors_spread = (/2, 4, 5, 7/) + + res = True + + res@gsnDraw = False + res@gsnFrame = False + res@txFont = 22 + res@txFontHeightF = 1.8 + res@vpHeightF = 0.5 ; change aspect ratio of plot + res@vpWidthF = 0.8 + res@vpXF = 0.15 ; start plot at x ndc coord + caption = "" + if (isatt(diag_script_info, "title")) then + res@tiMainString = diag_script_info@title + caption = diag_script_info@title + res@tiMainFontHeightF = 0.04 + end if + if(isatt(diag_script_info, "yaxis")) then + res@tiYAxisString = diag_script_info@yaxis + end if + res@tiXAxisString = "Year" + + res@xyDashPatterns = 0 + res@trXMinF = toint(syears(0)) + res@trXMaxF = toint(eyears(nperiods - 1)) + if(isatt(diag_script_info, "ymin")) then + res@trYMinF = diag_script_info@ymin + end if + if(isatt(diag_script_info, "ymax")) then + res@trYMaxF = diag_script_info@ymax + end if + + res@tmXBMode = "Explicit" + res@tmXBValues = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 50) + res@tmXBLabels = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 50) + res@tmXBMinorValues = ispan(toint(syears(0)), \ + toint(eyears(nperiods - 1)), 10) + + res@xyLineThicknesses = 3.5 + res@xyLineColor = colors_main(0) + + ; create plot + plot = gsn_csm_xy(wks, to_plot&year, to_plot(0, :), res) + + res@gsnXYFillColors = colors_spread(0) + res@xyLineColor = -1 + spread_plot = gsn_csm_xy(wks, to_plot&year, to_plot(1 : 2, :), res) + overlay(plot, spread_plot) + + delete(res@gsnXYFillColors) + + do j = 1, dims_data + if(all(ismissing(to_plot(j * 3, :)))) then + error_msg("w", DIAG_SCRIPT, "", "all missing values for " + \ + "dimension " + j * 3 + " in to_plot, need at least 5" + \ + " models, go to next") + continue + end if + res@xyLineColor = colors_main(j) + plot1 = gsn_csm_xy(wks, to_plot&year, to_plot(j * 3, :), res) + overlay(plot, plot1) + + res@gsnXYFillColors = colors_spread(j) + res@gsnXYFillOpacities = (/0.2, 0.2, 0.2/) + res@xyLineColor = -1 + spread_plot = gsn_csm_xy(wks, to_plot&year, \ + to_plot((j * 3) + 1 : (j * 3) + 2, :), res) + overlay(plot, spread_plot) + delete(res@gsnXYFillColors) + delete(res@gsnXYFillOpacities) + end do + + polyres = True + polyres@gsLineThicknessF = 1.5 + ; add polylines + dum = new(nperiods, graphic) + do n = 0, nperiods - 1 + dum(n) = gsn_add_polyline(wks, plot, (/toint(eyears(n)), \ + toint(eyears(n))/), (/-20, 20/), polyres) + end do + delete(res@xyLineColor) + + ; Attach a legend + lgres = True + lgres@lgLineColors = colors_main ; (/1, 3, 4, 5/) + lgres@lgItemType = "Lines" ; show lines only (default) + lgres@lgLabelFontHeightF = .08 ; legend label font thickness + lgres@vpWidthF = 0.2 ; width of legend (NDC) + lgres@vpHeightF = 0.2 ; height of legend (NDC) + lgres@lgMonoDashIndex = True + lgres@lgDashIndex = 0 + lgres@lgLineThicknessF = 5 + lgres@lgPerimOn = False + lgres@lgItemOrder = (/3, 2, 1, 0/) + if (isatt(diag_script_info, "label")) then + labels = " " + diag_script_info@label + legend = gsn_create_legend(wks, 4, labels, lgres) + amres = True + amres@amJust = "TopLeft" ; Use bottom right corner of box + ; for determining its location. + amres@amParallelPosF = -0.46 ; Move legend to right + if (var0 .ne. "snw") then + amres@amParallelPosF = -0.45 ; Move legend to right + amres@amOrthogonalPosF = -0.45 ; Move legend up. + else + amres@amParallelPosF = -0.47 ; Move legend to right + amres@amOrthogonalPosF = 0.12 ; Move legend down. + end if + annoid = gsn_add_annotation(plot, legend, amres) ; add legend to plot + end if + ; Display the number of models used in the legend + if(isatt(diag_script_info, "model_nr")) then + getvalues plot + "tmYLLabelFontHeightF" : fheight + "trYMinF" : trYMinF + "trYMaxF" : trYMaxF + end getvalues + text = new(1 + (dims_data - 1) * nperiods, graphic) + txres = True + txres@txFontHeightF = fheight * 0.8 + label = tostring(runs(0, 0)) + xloc = 1938 + yloc = trYMaxF - (trYMaxF-trYMinF)/9 + ydiff = (trYMaxF - trYMinF) / 10.9 + if yloc .le. 0 then + ydiff = - ydiff + end if + text(0) = gsn_add_text(wks, plot, label, xloc, yloc, txres) + i = 1 + do nscen = 1, dims_data + do nper = 1, nperiods - 1 + txres@txFontColor = colors_main(nscen) + label = tostring(runs(nscen, nper)) + text(i) = gsn_add_text(wks, plot, label, xloc, \ + yloc - nscen * ydiff, txres) + i = i + 1 + end do + end do + end if + draw(wks) + frame(wks) + + log_info(" Wrote " + wks@fullname) + + ; collect meta-data + nc_file = datapath + + if (isatt(diag_script_info, "spread")) then + spread_str = diag_script_info@spread + else + spread_str = "1.0" + end if + statistics = ("mean") + domains = ("global") + plot_types = ("times") + authors = (/"debeire_kevin"/) + references = (/"tebaldi21esd"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, wks@fullname, caption, statistics, domains, \ + plot_types, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread_constrained_projections.ncl b/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread_constrained_projections.ncl new file mode 100644 index 0000000000..0254d53e03 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread_constrained_projections.ncl @@ -0,0 +1,350 @@ +; ############################################################################# +; Plot global timeseries for historical and SSP with constrained projections +; in 2100 +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl +; Author: Kevin Debeire (DLR, Germany) +; ############################################################################# +; +; Description +; Plot global timeseries of TAS and bar plots of constrained projections +; from specified constrained values +; +; Required diag_script_info attributes (diagnostics specific) +; ancestors: variable and diagnostics that calculated timeseries to plot +; scenarios: list of scenarios included in the figure +; syears: list of start years of historical and SSPs scenarios +; eyears: list of end years of historical and SSPs scenarios +; begin_ref_year: start year of reference period +; end_ref_year: end year of reference period +; label: list of labels of the scenarios +; baseline_offset: offset between reference period (baseline) and 1850-1900 +; lower_constrained_projections: list of lower bounds of the constrained +; projections for the scenarios included in +; the same order as the scenarios +; upper_constrained_projections: list of upper bounds of the constrained +; projections for the scenarios included in +; the same order as the scenarios +; mean_constrained_projections: list of means of the constrained projection +; of the scenarios included in the same order +; as the scenarios +; +; Optional diag_script_info attributes (diagnostic specific) +; title: specify plot title +; yaxis: specify y-axis title +; ymin: minimim value on y-axis, default calculated from data +; ymax: maximum value on y-axis +; colormap: alternative colormap, path to rgb file or ncl name +; model_nr: save number of model runs per period and scenario +; styleset: color style +; spread: how many standard deviations to calculate the spread with, +; default is 1, ipcc tas is 1.64 +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'models', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check required diag_script_info attributes + req_atts = (/"syears", "eyears"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + netcdf_dir = diag_script_info@input_files(1) + + ; Output netcdf directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) +end +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data # + ; ########################################### + datapath = netcdf_dir + "/" + variable_info[0]@diagnostic + ".nc" + to_plot = ncdf_read(datapath, var0) + if(isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + datapath2 = netcdf_dir + "/nr_runs_" + variable_info[0]@diagnostic + ".nc" + runs = ncdf_read(datapath2, "nr") + end if + ; start and end years of timeperiods + syears = diag_script_info@syears + eyears = diag_script_info@eyears + nperiods = dimsizes(syears) + + dims_data = (dimsizes(to_plot&metric) - 1) / 3 + + ; Constrained projections lower bound, mean and upper bound + constrained_projections_2100 = new((/dims_data, 4/), float) + + lower_bound = diag_script_info@lower_constrained_projections + upper_bound = diag_script_info@upper_constrained_projections + mean_bound = diag_script_info@mean_constrained_projections + + do nscen = 0, dims_data - 1 + constrained_projections_2100(nscen, :) = (/2100, lower_bound(nscen), \ + mean_bound(nscen), \ + upper_bound(nscen) /) + end do + + ; ########################################### + ; *********** PLOTTING ************ + ; Create plot variables + outfile = var0 + "_ts_line_" + syears(0) + "_" + eyears(nperiods - 1) + wks = get_wks(0, DIAG_SCRIPT, outfile) + if(isatt(diag_script_info, "colormap")) then + colormap = RGBtoCmap(diag_script_info@colormap) + else + colormap = "StepSeq25" + end if + gsn_define_colormap(wks, colormap) + + colors_main = (/1, 4, 5, 7, 8/) + colors_spread = (/2, 4, 5, 7, 8/) + + res = True + res@gsnDraw = False + res@gsnFrame = False + res@txFont = 22 + res@txFontHeightF = 1.8 + res@vpHeightF = 0.5 ; change aspect ratio of plot + res@vpWidthF = 0.7 + res@vpXF = 0.15 ; start plot at x ndc coord + caption = "" + if (isatt(diag_script_info, "title")) then + res@tiMainString = diag_script_info@title + caption = diag_script_info@title + res@tiMainFontHeightF = 0.04 + end if + if(isatt(diag_script_info, "yaxis")) then + res@tiYAxisString = diag_script_info@yaxis + end if + res@tiXAxisString = "Year" + + res@xyDashPatterns = 0 + res@trXMinF = toint(syears(0)) + res@trXMaxF = toint(eyears(nperiods - 1)) + if(isatt(diag_script_info, "ymin")) then + res@trYMinF = diag_script_info@ymin + end if + if(isatt(diag_script_info, "ymax")) then + res@trYMaxF = diag_script_info@ymax + end if + + res@tmXBMode = "Explicit" + res@tmXBValues = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 50) + res@tmXBLabels = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 50) + res@tmXBMinorValues = ispan(toint(syears(0)), \ + toint(eyears(nperiods - 1)), 10) + res@xyLineThicknesses = 3.5 + res@xyLineColor = colors_main(0) + res@tmYROn = False + res@tmYRBorderOn = False + ; create second axis + diff_avg = diag_script_info@baseline_offset + res2 = True + res2@xyLineThicknesses = 0.001 + res2@xyLineColor = -1 + res2@tiYAxisString = "Relative to 1850-1900 (~S~o~N~ C)" + if(isatt(diag_script_info, "ymin")) then + res2@trYMinF = diff_avg + res@trYMinF + end if + if(isatt(diag_script_info, "ymax")) then + res2@trYMaxF = res@trYMaxF+diff_avg + end if + + ; create plot + plot = gsn_csm_xy(wks, to_plot&year, to_plot(0, :), res) + + res@gsnXYFillColors = colors_spread(0) + res@xyLineColor = -1 + spread_plot = gsn_csm_xy(wks, to_plot&year, to_plot(1 : 2, :), res) + overlay(plot, spread_plot) + + delete(res@gsnXYFillColors) + delete(res@tiMainString) + do j = 1, dims_data + if(all(ismissing(to_plot(j * 3, :)))) then + error_msg("w", DIAG_SCRIPT, "", "all missing values for " + \ + "dimension " + j * 3 + " in to_plot, need at least 5" + \ + " models, go to next") + continue + end if + res@xyLineOpacities = 1 + res@xyLineColor = colors_main(j) + plot1 = gsn_csm_xy(wks, to_plot&year, to_plot(j * 3, :), res) + overlay(plot, plot1) + + res@gsnXYFillColors = colors_spread(j) + res@gsnXYFillOpacities = (/0.1, 0.1, 0.1/) + res@xyLineColor = -1 + if (j .eq. 1) then + spread_plot = gsn_csm_xy2(wks, to_plot&year, to_plot((j * 3) + 1 \ + : (j * 3) + 2, :), to_plot((j * 3) + 1 \ + : (j * 3) + 2, :) * 0, res, res2) + else + spread_plot = gsn_csm_xy(wks, to_plot&year, \ + to_plot((j * 3) + 1 : (j * 3) + 2, :), res) + end if + overlay(plot, spread_plot) + delete(res@gsnXYFillColors) + delete(res@gsnXYFillOpacities) + end do + ; plot a plyline at the start of scenario + polyres = True + polyres@gsLineThicknessF = 1.5 + ; add polylines + dum = new(nperiods, graphic) + do n = 0, nperiods - 1 + gsn_polyline(wks, plot, (/toint(eyears(n)), \ + toint(eyears(n))/), (/-20, 20/), polyres) + end do + delete(res@xyLineColor) + delete(dum) + ; plot a polyline indicating constrained projection + polyres2 = True + polyres2@gsLineThicknessF = 7 + polyres2@gsLineOpacityF = 0.9 + polyres2@gsLineColor = colors_main(3) + dumm = new(dims_data*4, graphic) + + do idx = 0, dims_data - 1 + polyres2@gsLineColor = colors_main(idx+1) + dumm(idx*4) = \ + gsn_add_polyline(wks, plot, (/toint(eyears(nperiods - 1)-10), \ + toint(eyears(nperiods - 1))-10/), \ + (/constrained_projections_2100(idx, 1), \ + constrained_projections_2100(idx, 3)/), \ + polyres2) + dumm(idx*4+1) = \ + gsn_add_polyline(wks, plot, (/toint(eyears(nperiods - 1)), \ + toint(eyears(nperiods - 1)-19)/), \ + (/constrained_projections_2100(idx, 2), \ + constrained_projections_2100(idx, 2)/), \ + polyres2) + dumm(idx*4+2) = \ + gsn_add_polyline(wks, plot, (/toint(eyears(nperiods - 1)), \ + toint(eyears(nperiods - 1)-19)/), \ + (/constrained_projections_2100(idx, 3), \ + constrained_projections_2100(idx, 3)/), \ + polyres2) + dumm(idx*4+3) = \ + gsn_add_polyline(wks, plot, (/toint(eyears(nperiods - 1)), \ + toint(eyears(nperiods - 1)-19)/), \ + (/constrained_projections_2100(idx, 1), \ + constrained_projections_2100(idx, 1)/), \ + polyres2) + end do + + ; Attach a legend + lgres = True + lgres@lgLineColors = colors_main ; (/1, 3, 4, 5/) + lgres@lgItemType = "Lines" ; show lines only (default) + lgres@lgLabelFontHeightF = .08 ; legend label font thickness + lgres@vpWidthF = 0.2 ; width of legend (NDC) + lgres@vpHeightF = 0.2 ; height of legend (NDC) + lgres@lgMonoDashIndex = True + lgres@lgDashIndex = 0 + lgres@lgLineThicknessF = 5 + lgres@lgPerimOn = False + lgres@lgItemOrder = (/3, 2, 1, 0/) + if (isatt(diag_script_info, "label")) then + labels = " " + diag_script_info@label + legend = gsn_create_legend(wks, 4, labels, lgres) + amres = True + amres@amJust = "TopLeft" ; Use bottom right corner of box + ; for determining its location. + amres@amParallelPosF = -0.46 ; Move legend to right + if (var0 .ne. "snw") then + amres@amParallelPosF = -0.45 ; Move legend to right + amres@amOrthogonalPosF = -0.45 ; Move legend up. + else + amres@amParallelPosF = -0.47 ; Move legend to right + amres@amOrthogonalPosF = 0.12 ; Move legend down. + end if + annoid = gsn_add_annotation(plot, legend, amres) ; add legend to plot + end if + if(isatt(diag_script_info, "model_nr")) then + getvalues plot + "tmYLLabelFontHeightF" : fheight + "trYMinF" : trYMinF + "trYMaxF" : trYMaxF + end getvalues + text = new(1 + (dims_data - 1) * nperiods, graphic) + txres = True + txres@txFontHeightF = fheight * 0.8 + label = tostring(runs(0, 0)) + xloc = 1938 + yloc = trYMaxF - (trYMaxF-trYMinF)/9 + ydiff = (trYMaxF - trYMinF) / 10.9 + if yloc .le. 0 then + ydiff = - ydiff + end if + text(0) = gsn_add_text(wks, plot, label, xloc, yloc, txres) + i = 1 + do nscen = 1, dims_data + do nper = 1, nperiods - 1 + txres@txFontColor = colors_main(nscen) + label = tostring(runs(nscen, nper)) + text(i) = gsn_add_text(wks, plot, label, xloc, \ + yloc - nscen * ydiff, txres) + i = i + 1 + end do + end do + end if + draw(wks) + frame(wks) + + log_info(" Wrote " + wks@fullname) + + ; collect meta-data + nc_file = datapath + + if (isatt(diag_script_info, "spread")) then + spread_str = diag_script_info@spread + else + spread_str = "1.0" + end if + statistics = ("mean") + domains = ("global") + plot_types = ("times") + authors = (/"debeire_kevin"/) + references = (/"tebaldi21esd"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, wks@fullname, caption, statistics, domains, \ + plot_types, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread_rightaxis_5scen.ncl b/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread_rightaxis_5scen.ncl new file mode 100644 index 0000000000..72990171bc --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread_rightaxis_5scen.ncl @@ -0,0 +1,295 @@ +; ############################################################################# +; Plot global timeseries for historical and 5 scenarios between 1850-2100 +; with left and right axis for two different reference periods +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Plot time series (mean and spread) for 5 scenarios and historical period +; with left and right axis with an offset +; +; Required diag_script_info attributes (diagnostics specific) +; ancestors: variable and diagnostics that calculated timeseries to plot +; scenarios: list of scenarios included in the figure +; syears: list of start years of historical and SSPs scenarios +; eyears: list of end years of historical and SSPs scenarios +; begin_ref_year: start year of reference period +; end_ref_year: end year of reference period +; label: list of labels of the scenarios +; rightaxis_offset: offset of the right axis relative to the left axis +; +; Optional diag_script_info attributes (diagnostic specific) +; title: specify plot title +; yaxis: specify y-axis title +; ymin: minimim value on y-axis, default calculated from data +; ymax: maximum value on y-axis +; colormap: alternative colormap, path to rgb file or ncl name +; model_nr: save number of model runs per period and scenario +; styleset: color style +; spread: how many standard deviations to calculate the spread with, +; default is 1, ipcc tas is 1.64 +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'models', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check required diag_script_info attributes + req_atts = (/"syears", "eyears"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + netcdf_dir = diag_script_info@input_files(1) + + ; Output netcdf directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data # + ; ########################################### + datapath = netcdf_dir + "/" + variable_info[0]@diagnostic + ".nc" + to_plot = ncdf_read(datapath, var0) + if(isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + datapath2 = netcdf_dir + "/nr_runs_" + variable_info[0]@diagnostic + ".nc" + runs = ncdf_read(datapath2, "nr") + end if + ; start and end years of timeperiods + syears = diag_script_info@syears + eyears = diag_script_info@eyears + nperiods = dimsizes(syears) + + dims_data = (dimsizes(to_plot&metric) - 1) / 3 + + ; ########################################### + ; *********** PLOTTING ************ + ; Create plot variables + outfile = var0 + "_ts_line_" + syears(0) + "_" + eyears(nperiods - 1) + wks = get_wks(0, DIAG_SCRIPT, outfile) + if(isatt(diag_script_info, "colormap")) then + colormap = RGBtoCmap(diag_script_info@colormap) + else + colormap = "StepSeq25" + end if + gsn_define_colormap(wks, colormap) + + colors_main = (/1, 3, 4, 5, 6, 7/) + colors_spread = (/2, 3, 4, 5, 6, 7/) + + res = True + res@gsnDraw = False + res@gsnFrame = False + res@txFont = 25 + res@vpHeightF = 0.5 ; change aspect ratio of plot + res@vpWidthF = 0.70 + res@vpXF = 0.15 ; start plot at x ndc coord + + caption = "" + if (isatt(diag_script_info, "title")) then + caption = diag_script_info@title + res@tiMainString = diag_script_info@title + res@tiMainFontHeightF = 0.04 + end if + if(isatt(diag_script_info, "yaxis")) then + res@tiYAxisString = diag_script_info@yaxis + end if + res@tiXAxisString = "Year" + + res@xyDashPatterns = 0 + res@trXMinF = toint(syears(0)) + res@trXMaxF = toint(eyears(nperiods - 1)) + if(isatt(diag_script_info, "ymin")) then + res@trYMinF = diag_script_info@ymin + end if + if(isatt(diag_script_info, "ymax")) then + res@trYMaxF = diag_script_info@ymax + end if + + res@tmXBMode = "Explicit" + res@tmXBValues = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 50) + res@tmXBLabels = ispan(toint(syears(0)), toint(eyears(nperiods - 1)), 50) + res@tmXBMinorValues = ispan(toint(syears(0)), \ + toint(eyears(nperiods - 1)), 10) + + res@xyLineThicknesses = 3.5 + res@xyLineColor = colors_main(0) + res@tmYROn = False + res@tmYRBorderOn = False + ; create second axis + diff_avg = diag_script_info@rightaxis_offset + res2 = True + res2@xyLineThicknesses = 0.001 + res2@xyLineColor = -1 + + res2@tiYAxisString = "Relative to 1850-1900 (~S~o~N~ C)" + if(isatt(diag_script_info, "ymin")) then + res2@trYMinF = diff_avg+res@trYMinF + end if + if(isatt(diag_script_info, "ymax")) then + res2@trYMaxF = res@trYMaxF+diff_avg + end if + + ; create plot + plot = gsn_csm_xy(wks, to_plot&year, to_plot(0, :), res) + + res@gsnXYFillColors = colors_spread(0) + res@xyLineColor = -1 + spread_plot = gsn_csm_xy(wks, to_plot&year, to_plot(1 : 2, :), res) + overlay(plot, spread_plot) + + delete(res@gsnXYFillColors) + delete(res@tiMainString) + do j = 1, dims_data + if(all(ismissing(to_plot(j * 3, :)))) then + error_msg("w", DIAG_SCRIPT, "", "all missing values for " + \ + "dimension " + j * 3 + " in to_plot, need at least 5" + \ + " models, go to next") + continue + end if + res@xyLineColor = colors_main(j) + plot1 = gsn_csm_xy(wks, to_plot&year, to_plot(j * 3, :), res) + overlay(plot, plot1) + + res@gsnXYFillColors = colors_spread(j) + res@gsnXYFillOpacities = (/0.1, 0.1, 0.1/) + res@xyLineColor = -1 + if (j .eq. 1) then + spread_plot = gsn_csm_xy2(wks, to_plot&year, to_plot((j * 3) + 1 \ + : (j * 3) + 2, :), to_plot((j * 3) + 1 \ + : (j * 3) + 2, :)*0, res, res2) + else + spread_plot = gsn_csm_xy(wks, to_plot&year, \ + to_plot((j * 3) + 1 : (j * 3) + 2, :), res) + end if + overlay(plot, spread_plot) + delete(res@gsnXYFillColors) + delete(res@gsnXYFillOpacities) + end do + + polyres = True + polyres@gsLineThicknessF = 1.5 + ; add polylines + dum = new(nperiods, graphic) + do n = 0, nperiods - 1 + dum(n) = gsn_add_polyline(wks, plot, (/toint(eyears(n)), \ + toint(eyears(n))/), (/-20, 20/), polyres) + end do + delete(res@xyLineColor) + ; Attach a legend + lgres = True + lgres@lgLineColors = colors_main ; (/1, 3, 4, 5, 6, 7/) + lgres@lgItemType = "Lines" ; show lines only (default) + lgres@lgLabelFontHeightF = .08 ; legend label font thickness + lgres@vpWidthF = 0.2 ; width of legend (NDC) + lgres@vpHeightF = 0.2 ; height of legend (NDC) + lgres@lgMonoDashIndex = True + lgres@lgDashIndex = 0 + lgres@lgLineThicknessF = 5 + lgres@lgPerimOn = False + lgres@lgItemOrder = (/5, 4, 3, 2, 1, 0/) + if (isatt(diag_script_info, "label")) then + labels = " " + diag_script_info@label + legend = gsn_create_legend(wks, 6, labels, lgres) + amres = True + amres@amJust = "TopLeft" ; Use bottom right corner of box + ; for determining its location. + amres@amParallelPosF = -0.46 ; Move legend to right + if (var0 .ne. "snw") then + amres@amParallelPosF = -0.45 ; Move legend to right + amres@amOrthogonalPosF = -0.45 ; Move legend up. + else + amres@amParallelPosF = -0.47 ; Move legend to right + amres@amOrthogonalPosF = 0.12 ; Move legend down. + end if + annoid = gsn_add_annotation(plot, legend, amres) ; add legend to plot + end if + if(isatt(diag_script_info, "model_nr")) then + getvalues plot + "tmYLLabelFontHeightF" : fheight + "trYMinF" : trYMinF + "trYMaxF" : trYMaxF + end getvalues + text = new(1 + (dims_data - 1) * nperiods, graphic) + txres = True + txres@txFontHeightF = fheight * 0.8 + label = tostring(runs(0, 0)) + xloc = 1938 + yloc = trYMaxF - (trYMaxF+trYMinF) / 6.85 + ydiff = (trYMaxF - trYMinF) / 15.7 + if yloc .le. 0 then + ydiff = - ydiff + end if + text(0) = gsn_add_text(wks, plot, label, xloc, yloc, txres) + i = 1 + do nscen = 1, dims_data + do nper = 1, nperiods - 1 + txres@txFontColor = colors_main(nscen) + label = tostring(runs(nscen, nper)) + ; xloc = (toint(eyears(nper)) - toint(syears(nper))) / 2 + text(i) = gsn_add_text(wks, plot, label, xloc, \ + yloc - nscen * ydiff, txres) + i = i + 1 + end do + end do + end if + draw(wks) + frame(wks) + + log_info(" Wrote " + wks@fullname) + + ; collect meta-data + nc_file = datapath + + if (isatt(diag_script_info, "spread")) then + spread_str = diag_script_info@spread + else + spread_str = "1.0" + end if + statistics = ("mean") + domains = ("global") + plot_types = ("times") + authors = (/"debeire_kevin"/) + references = (/"tebaldi21esd"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, wks@fullname, caption, statistics, domains, \ + plot_types, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread_ssp4.ncl b/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread_ssp4.ncl new file mode 100644 index 0000000000..be50e29bb2 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread_ssp4.ncl @@ -0,0 +1,284 @@ +; ############################################################################# +; Plot timeseries for SSP4-3.4 and SSP4-6.0 scenarios +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Plot time series (mean and spread) for two ssp4 scenarios. +; +; Required diag_script_info attributes (diagnostics specific) +; ancestors: variable and diagnostics that calculated field to be plotted +; scenarios: list of scenarios included in the figure +; syears: list of start years of historical and SSPs scenarios +; eyears: list of end years of historical and SSPs scenarios +; begin_ref_year: start year of reference period +; end_ref_year: end year of reference period +; label: list of labels of the scenarios +; +; Optional diag_script_info attributes (diagnostic specific) +; title: specify plot title +; yaxis: specify y-axis title +; ymin: minimim value on y-axis, default calculated from data +; ymax: maximum value on y-axis +; colormap: alternative colormap, path to rgb file or ncl name +; model_nr: save number of model runs per period and scenario +; styleset: color style +; spread: how many standard deviations to calculate the spread with, +; default is 1, ipcc tas is 1.64 +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'models', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check required diag_script_info attributes + req_atts = (/"syears", "eyears"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + netcdf_dir = diag_script_info@input_files(1) + + ; Output netcdf directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) +end + +begin + ; ############################################# + ; # Get parameters from ./variable_defs/*.ncl # + ; # passed via the 'info' attribute # + ; ############################################# + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data # + ; ########################################### + datapath = netcdf_dir + "/" + variable_info[0]@diagnostic + ".nc" + to_plot = ncdf_read(datapath, var0) + if(isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + datapath2 = netcdf_dir + "/nr_runs_" + variable_info[0]@diagnostic + ".nc" + runs = ncdf_read(datapath2, "nr") + end if + ; start and end years of timeperiods + syears = diag_script_info@syears + eyears = diag_script_info@eyears + nperiods = dimsizes(syears) + + dims_data = (dimsizes(to_plot&metric) - 1) / 3 + + ; ########################################### + ; *********** PLOTTING ************ + ; Create plot variables + outfile = var0 + "_ts_line_" + syears(0) + "_" + eyears(nperiods - 1) + wks = get_wks(0, DIAG_SCRIPT, outfile) + if(isatt(diag_script_info, "colormap")) then + colormap = RGBtoCmap(diag_script_info@colormap) + else + colormap = "StepSeq25" + end if + gsn_define_colormap(wks, colormap) + + colors_main = (/1, 10, 11/) + colors_spread = (/2, 10, 11/) + + res = True + + res@gsnDraw = False + res@gsnFrame = False + res@txFont = 25 + res@vpHeightF = 0.5 ; change aspect ratio of plot + res@vpWidthF = 0.8 + res@vpXF = 0.15 ; start plot at x ndc coord + + caption = "" + if (isatt(diag_script_info, "title")) then + caption = diag_script_info@title + res@tiMainString = diag_script_info@title + res@tiMainFontHeightF = 0.04 + end if + if(isatt(diag_script_info, "yaxis")) then + res@tiYAxisString = diag_script_info@yaxis + end if + res@tiXAxisString = "Year" + + res@xyDashPatterns = 0 + res@trXMinF = toint(syears(1)) + res@trXMaxF = toint(eyears(nperiods - 1)) + if(isatt(diag_script_info, "ymin")) then + res@trYMinF = diag_script_info@ymin + end if + if(isatt(diag_script_info, "ymax")) then + res@trYMaxF = diag_script_info@ymax + end if + + res@tmXBMode = "Explicit" + res@tmXBValues = ispan(toint(2020), toint(eyears(nperiods - 1)), 20) + res@tmXBLabels = ispan(toint(2020), toint(eyears(nperiods - 1)), 20) + res@tmXBMinorValues = ispan(toint(syears(0)), \ + toint(eyears(nperiods - 1)), 10) + + res@xyLineThicknesses = 3.5 + res@xyLineColor = colors_main(0) + + ; create plot + plot = gsn_csm_xy(wks, to_plot&year, to_plot(0, :), res) + + res@gsnXYFillColors = colors_spread(0) + res@xyLineColor = -1 + spread_plot = gsn_csm_xy(wks, to_plot&year, to_plot(1 : 2, :), res) + overlay(plot, spread_plot) + + delete(res@gsnXYFillColors) + + do j = 1, dims_data + if(all(ismissing(to_plot(j * 3, :)))) then + error_msg("w", DIAG_SCRIPT, "", "all missing values for " + \ + "dimension " + j * 3 + " in to_plot, need at least 5" + \ + " models, go to next") + continue + end if + res@xyLineColor = colors_main(j) + plot1 = gsn_csm_xy(wks, to_plot&year, to_plot(j * 3, :), res) + overlay(plot, plot1) + + res@gsnXYFillColors = colors_spread(j) + res@gsnXYFillOpacities = (/0.2, 0.2, 0.2/) + res@xyLineColor = -1 + spread_plot = gsn_csm_xy(wks, to_plot&year, \ + to_plot((j * 3) + 1 : (j * 3) + 2, :), res) + overlay(plot, spread_plot) + delete(res@gsnXYFillColors) + delete(res@gsnXYFillOpacities) + end do + + polyres = True + polyres@gsLineThicknessF = 1.5 + ; add polylines + dum = new(nperiods, graphic) + do n = 0, nperiods - 1 + dum(n) = gsn_add_polyline(wks, plot, (/toint(eyears(n)), \ + toint(eyears(n))/), (/-20, 20/), polyres) + end do + delete(res@xyLineColor) + + ; Attach a legend + lgres = True + lgres@lgLineColors = colors_main ; (/1, 3, 4, 5, 6, 7/) + lgres@lgItemType = "Lines" ; show lines only (default) + lgres@lgLabelFontHeightF = .08 ; legend label font thickness + lgres@vpWidthF = 0.2 ; width of legend (NDC) + lgres@vpHeightF = 0.2 ; height of legend (NDC) + lgres@lgMonoDashIndex = True + lgres@lgDashIndex = 0 + lgres@lgLineThicknessF = 5 + lgres@lgPerimOn = False + lgres@lgItemOrder = (/2, 1, 0/) + if (isatt(diag_script_info, "label")) then + labels = " " + diag_script_info@label + legend = gsn_create_legend(wks, 3, labels, lgres) + amres = True + amres@amJust = "TopLeft" ; Use bottom right corner of box + ; for determining its location. + amres@amParallelPosF = -0.46 ; Move legend to right + if (var0 .ne. "snw") then + amres@amParallelPosF = -0.45 ; Move legend to right + amres@amOrthogonalPosF = -0.45 ; Move legend up. + else + amres@amParallelPosF = -0.47 ; Move legend to right + amres@amOrthogonalPosF = 0.12 ; Move legend down. + end if + annoid = gsn_add_annotation(plot, legend, amres) ; add legend to plot + end if + + ; Display the number of models used in the legend + if(isatt(diag_script_info, "model_nr")) then + getvalues plot + "tmYLLabelFontHeightF" : fheight + "trYMinF" : trYMinF + "trYMaxF" : trYMaxF + end getvalues + text = new(1 + (dims_data - 1) * nperiods, graphic) + txres = True + txres@txFontHeightF = fheight * 0.8 + label = tostring(runs(0, 0)) + xloc = 2045 + yloc = trYMaxF - (trYMaxF+trYMinF)/4.9 + if var0 .eq. "pr" then + yloc = trYMaxF - (trYMaxF-trYMinF)/8 + end if + print(trYMaxF) + print(trYMinF) + ydiff = (trYMaxF - trYMinF) / 8.00 + if yloc .le. 0 then + ydiff = - ydiff + end if + text(0) = gsn_add_text(wks, plot, label, xloc, yloc, txres) + i = 1 + do nscen = 1, dims_data + do nper = 1, nperiods - 1 + txres@txFontColor = colors_main(nscen) + label = tostring(runs(nscen, nper)) + ; xloc = (toint(eyears(nper)) - toint(syears(nper))) / 2 + text(i) = gsn_add_text(wks, plot, label, xloc, \ + yloc - nscen * ydiff, txres) + i = i + 1 + end do + end do + end if + draw(wks) + frame(wks) + + log_info(" Wrote " + wks@fullname) + + ; collect meta-data + nc_file = datapath + + if (isatt(diag_script_info, "spread")) then + spread_str = diag_script_info@spread + else + spread_str = "1.0" + end if + statistics = ("mean") + domains = ("global") + plot_types = ("times") + authors = (/"debeire_kevin"/) + references = (/"tebaldi21esd"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, wks@fullname, caption, statistics, domains, \ + plot_types, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread_ssp5.ncl b/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread_ssp5.ncl new file mode 100644 index 0000000000..9570b09ae4 --- /dev/null +++ b/esmvaltool/diag_scripts/tebaldi21esd/plot_timeseries_mean_spread_ssp5.ncl @@ -0,0 +1,269 @@ +; ############################################################################# +; Plot timeseries for SSP5-3.4-OS and SSP5-8.5 scenarios +; Derived and adapted from +; diag_scripts/ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl +; Author: Debeire Kevin (DLR, Germany) +; ############################################################################# +; +; Description +; Plot time series (mean and spread) for two ssp5 scenarios. +; +; Required diag_script_info attributes (diagnostics specific) +; ancestors: variable and diagnostics that calculate the timeseries to plot +; scenarios: list of scenarios included in the figure +; syears: list of start years of historical and SSPs scenarios +; eyears: list of end years of historical and SSPs scenarios +; begin_ref_year: start year of reference period +; end_ref_year: end year of reference period +; label: list of labels of the scenarios +; +; Optional diag_script_info attributes (diagnostic specific) +; title: specify plot title +; yaxis: specify y-axis title +; ymin: minimim value on y-axis, default calculated from data +; ymax: maximum value on y-axis +; colormap: alternative colormap, path to rgb file or ncl name +; model_nr: save number of model runs per period and scenario +; styleset: color style +; spread: how many standard deviations to calculate the spread with, +; default is 1, ipcc tas is 1.64 +; +; Modification history +; 20220803-A_debe_kevin: written for ESMValTool +; +; ############################################################################# +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +begin + enter_msg(DIAG_SCRIPT, "") + + ; 'models', 'variables' are fetched from the above 'interface.ncl' file + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Check required diag_script_info attributes + req_atts = (/"syears", "eyears"/) + exit_if_missing_atts(diag_script_info, req_atts) + +end + + +begin + ; Output netcdf directory + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + netcdf_dir = diag_script_info@input_files(1) + + ; Output netcdf directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) +end + +begin + if(isvar("MyParam")) then + delete(MyParam) + end if + if(isatt(variable_info[0], "long_name")) then + MyParam = variable_info[0]@long_name + log_debug(" MyParam = " + MyParam) + else + error_msg("f", DIAG_SCRIPT, "", "missing variable attribute " + \ + "'long_name'") + end if + + ; ########################################### + ; # Get data # + ; ########################################### + datapath = netcdf_dir + "/" + variable_info[0]@diagnostic + ".nc" + to_plot = ncdf_read(datapath, var0) + if(isatt(diag_script_info, "model_nr") .and. \ + diag_script_info@model_nr .eq. True) then + datapath2 = netcdf_dir + "/nr_runs_" + variable_info[0]@diagnostic + ".nc" + runs = ncdf_read(datapath2, "nr") + end if + ; start and end years of timeperiods + syears = diag_script_info@syears + eyears = diag_script_info@eyears + nperiods = dimsizes(syears) + dim_plot = dimsizes(to_plot) + dims_data = (dim_plot(0)) / 3 + ; ########################################### + ; *********** PLOTTING ************ + ; Create plot variables + outfile = var0 + "_ts_line_" + syears(0) + "_" + eyears(nperiods - 1) + wks = get_wks(0, DIAG_SCRIPT, outfile) + if(isatt(diag_script_info, "colormap")) then + colormap = RGBtoCmap(diag_script_info@colormap) + else + colormap = "StepSeq25" + end if + gsn_define_colormap(wks, colormap) + + colors_main = (/1, 9, 7/) + colors_spread = (/2, 9, 7/) + + res = True + + res@gsnDraw = False + res@gsnFrame = False + res@txFont = 22 + res@txFontHeightF = 1.8 + res@vpHeightF = 0.5 ; change aspect ratio of plot + res@vpWidthF = 0.8 + res@vpXF = 0.15 ; start plot at x ndc coord + + caption = "" + if (isatt(diag_script_info, "title")) then + caption = diag_script_info@title + res@tiMainString = diag_script_info@title + res@tiMainFontHeightF = 0.04 + end if + if(isatt(diag_script_info, "yaxis")) then + res@tiYAxisString = diag_script_info@yaxis + end if + res@tiXAxisString = "Year" + + res@xyDashPatterns = 0 + res@trXMinF = toint(syears(nperiods - 1)) + res@trXMaxF = toint(eyears(nperiods - 1)) + if(isatt(diag_script_info, "ymin")) then + res@trYMinF = diag_script_info@ymin + end if + if(isatt(diag_script_info, "ymax")) then + res@trYMaxF = diag_script_info@ymax + end if + + res@tmXBMode = "Explicit" + res@tmXBValues = ispan(toint(2020), toint(eyears(nperiods - 1)), 20) + res@tmXBLabels = ispan(toint(2020), toint(eyears(nperiods - 1)), 20) + res@tmXBMinorValues = ispan(toint(2020), \ + toint(eyears(nperiods - 1)), 10) + + res@xyLineThicknesses = 3.5 + res@xyLineColor = colors_main(0) + + ; create plot + plot = gsn_csm_xy(wks, to_plot&year, to_plot(0, :), res) + res@gsnXYFillColors = colors_spread(0) + res@xyLineColor = -1 + + delete(res@gsnXYFillColors) + + do i = 1, 2 + if(all(ismissing(to_plot(i * 3, :)))) then + error_msg("w", DIAG_SCRIPT, "", "all missing values for " + \ + "dimension " + i * 3 + " in to_plot, need at least 5" + \ + " models, go to next") + continue + end if + res@xyLineColor = colors_main(i) + plot1 = gsn_csm_xy(wks, to_plot&year, to_plot(i * 3, :), res) + overlay(plot, plot1) + res@gsnXYFillColors = colors_spread(i) + res@gsnXYFillOpacities = (/0.2, 0.2, 0.2/) + res@xyLineColor = -1 + spread_plot = gsn_csm_xy(wks, to_plot&year, \ + to_plot((i * 3) + 1 : (i * 3) + 2, :), res) + overlay(plot, spread_plot) + delete(res@gsnXYFillColors) + delete(res@gsnXYFillOpacities) + end do + + polyres = True + polyres@gsLineThicknessF = 1.5 + ; add polylines + dum = new(nperiods, graphic) + do n = 0, nperiods - 1 + dum(n) = gsn_add_polyline(wks, plot, (/toint(eyears(n)), \ + toint(eyears(n))/), (/-20, 20/), polyres) + end do + delete(res@xyLineColor) + + ; Attach a legend + lgres = True + lgres@lgLineColors = colors_main ; (/9, 8/) + lgres@lgItemType = "Lines" ; show lines only (default) + lgres@lgLabelFontHeightF = .08 ; legend label font thickness + lgres@vpWidthF = 0.2 ; width of legend (NDC) + lgres@vpHeightF = 0.1 ; height of legend (NDC) + lgres@lgMonoDashIndex = True + lgres@lgDashIndex = 0 + lgres@lgLineThicknessF = 5 + lgres@lgPerimOn = False + lgres@lgItemOrder = (/2, 1, 0/) + if (isatt(diag_script_info, "label")) then + labels = " " + diag_script_info@label + legend = gsn_create_legend(wks, 3, labels, lgres) + amres = True + amres@amJust = "TopLeft" ; Use bottom right corner of box + ; for determining its location. + amres@amParallelPosF = -0.46 ; Move legend to right + if (var0 .ne. "snw") then + amres@amParallelPosF = -0.45 ; Move legend to right + amres@amOrthogonalPosF = -0.45 ; Move legend up. + else + amres@amParallelPosF = -0.47 ; Move legend to right + amres@amOrthogonalPosF = 0.12 ; Move legend down. + end if + annoid = gsn_add_annotation(plot, legend, amres) ; add legend to plot + end if + if(isatt(diag_script_info, "model_nr")) then + getvalues plot + "tmYLLabelFontHeightF" : fheight + "trYMinF" : trYMinF + "trYMaxF" : trYMaxF + end getvalues + text = new(3, graphic) + txres = True + txres@txFontHeightF = fheight * 0.8 + label = tostring(runs(0, 0)) + xloc = 2045 + yloc = trYMaxF - (trYMaxF-trYMinF)/11.8 + ydiff = (trYMaxF - trYMinF) / 15.8 + if yloc .le. 0 then + ydiff = - ydiff + end if + txres@txFontColor = colors_main(0) + label = tostring(runs(0, 0)) + text(0) = gsn_add_text(wks, plot, label, xloc, \ + yloc - 0 * ydiff, txres) + label = tostring(runs(2, 1)) + txres@txFontColor = colors_main(1) + label = tostring(runs(0, 0)) + text(1) = gsn_add_text(wks, plot, label, xloc, yloc - 1 * ydiff, txres) + label = tostring(runs(2, 2)) + txres@txFontColor = colors_main(2) + text(2) = gsn_add_text(wks, plot, label, xloc, yloc - 2 * ydiff, txres) + end if + + draw(wks) + frame(wks) + + log_info(" Wrote " + wks@fullname) + + ; collect meta-data + nc_file = datapath + + if (isatt(diag_script_info, "spread")) then + spread_str = diag_script_info@spread + else + spread_str = "1.0" + end if + statistics = ("mean") + domains = ("global") + plot_types = ("times") + authors = (/"debeire_kevin"/) + references = (/"tebaldi21esd"/) + infiles = metadata_att_as_array(info_items, "filename") + log_provenance(nc_file, wks@fullname, caption, statistics, domains, \ + plot_types, authors, references, infiles) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/__init__.py b/esmvaltool/diag_scripts/thermodyn_diagtool/__init__.py new file mode 100644 index 0000000000..72f011f220 --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/__init__.py @@ -0,0 +1 @@ +"""Initialize the ESMValTool thermodyn_diagtool package.""" diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/computations.py b/esmvaltool/diag_scripts/thermodyn_diagtool/computations.py new file mode 100644 index 0000000000..84b29b6599 --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/computations.py @@ -0,0 +1,932 @@ +"""INTERNAL COMPUTATIONS. + +Module containing all the core computations. + +This module contains all the basic computations needed by the thermodynamics +diagnostic tool. + +The functions that are here contained are: +- baroceff: function for the baroclinic efficiency; +- budgets: function for the energy budgets (TOA, atmospheric, surface); +- direntr: function for the material entropy production (direct method); +- entr: function for the computation of entropy as energy/temperature; +- evapentr: function for the evaporation related material entropy production; +- indentr: function for material entropy production (indirect method); +- kinentr: function for the kin. en. diss. related material entropy production; +- landoc_budg: function for budget computations over land and oceans; +- mask_precip: function for masking rainfall and snowfall regions; +- masktonull: function for masking nan values to null; +- meltentr: function for the entropy production from ground snow melting; +- potentr: function for the entropy production from pot. en. of the droplet; +- rainentr: function for the entropy production from rainfall precipitation; +- removeif: function for conditional file deleting; +- sensentr: function for the entropy production from sensible heat fluxes; +- snowentr: function for the entropy production from snowfall precipitation; +- wmbudg: function for water mass and latent energy budgets; +- write_eb: function for writing global mean energy budgets to file; + +@author: valerio.lembo@uni-hamburg.de, Valerio Lembo, Hamburg University, 2019. +""" + +import os +from shutil import move + +import numpy as np +from cdo import Cdo +from netCDF4 import Dataset + +import esmvaltool.diag_scripts.shared as e +from esmvaltool.diag_scripts.thermodyn_diagtool import mkthe + +L_C = 2501000 # latent heat of condensation +LC_SUB = 2835000 # latent heat of sublimation +L_S = 334000 # latent heat of solidification +GRAV = 9.81 # gravity acceleration + + +def baroceff(model, wdir, aux_file, toab_file, te_file): + """Compute the baroclinic efficiency of the atmosphere. + + The function computes the baroclinic efficiency of the atmosphere, i.e. + the efficiency of the meridional heat transports from the low latitudes, + where there is a net energy gain, towards the high latitudes, where there + is a net energy loss (after Lucarini et al., 2011). + + Arguments: + ---------- + - model: the model name; + - wdir: the working directory where the outputs are stored; + - aux_file: the name of a dummy aux. file to be used for computations; + - toab_file: a file containing the annual mean TOA energy budgets + (time,lon,lat); + - te_file: a file containing the annual mean emission temperature + (time,lon,lat); + + Returns + ------- + The annual mean baroclinic efficiency (after Lucarini et al. 2011). + + @author: Valerio Lembo, Hamburg University, 2018. + """ + cdo = Cdo() + removeif(aux_file) + gain_file = wdir + '/{}_maskGain.nc'.format(model) + cdo.gtc('0', input=toab_file, output=gain_file) + loss_file = wdir + '/{}_maskLoss.nc'.format(model) + cdo.ltc('0', input=toab_file, output=loss_file) + toabgain_file = wdir + '/{}_toabGain.nc'.format(model) + cdo.setrtomiss('-1000,0', + input='-mul {} {}'.format(toab_file, gain_file), + output=toabgain_file) + toabloss_file = wdir + '/{}_toabLoss.nc'.format(model) + cdo.setrtomiss('0,1000', + input='-mul {} {}'.format(toab_file, loss_file), + output=toabloss_file) + tegain_file = wdir + '/{}_teGain.nc'.format(model) + cdo.setrtomiss('-1000,0', + input='-mul {} {}'.format(te_file, gain_file), + output=tegain_file) + teloss_file = wdir + '/{}_teLoss.nc'.format(model) + cdo.setrtomiss('-1000,0', + input='-mul {} {}'.format(te_file, loss_file), + output=teloss_file) + tegainm_file = wdir + '/{}_teGainm.nc'.format(model) + cdo.div(input='-fldmean {0} -fldmean -div {0} {1} '.format( + toabgain_file, tegain_file), + output=tegainm_file) + telossm_file = wdir + '/{}_teLossm.nc'.format(model) + cdo.div(input='-fldmean {0} -fldmean -div {0} {1} '.format( + toabloss_file, teloss_file), + output=telossm_file) + aux_baroceff_file = (wdir + '/{}_aux_barocEff.nc'.format(model)) + cdo.sub(input='-reci {} -reci {}'.format(telossm_file, tegainm_file), + output=aux_baroceff_file) + baroceff_file = wdir + '/{}_barocEff.nc'.format(model) + cdo.div(input='{} -mulc,0.5 -add -reci {} -reci {}'.format( + aux_baroceff_file, tegainm_file, telossm_file), + output=baroceff_file) + with Dataset(baroceff_file) as f_l: + baroc = f_l.variables['toab'][0, 0, 0] + remove_files = [ + gain_file, loss_file, toabgain_file, toabloss_file, tegain_file, + teloss_file, tegainm_file, telossm_file, aux_baroceff_file + ] + for filen in remove_files: + os.remove(filen) + return baroc + + +def budgets(model, wdir, aux_file, input_data): + """Compute radiative budgets from radiative and heat fluxes. + + The function computes TOA and surface energy budgets from radiative and + heat fluxes, then writes the annual mean to the log info file and write + the (lat,lon) annual mean fields to a NetCDF file, as well as the time + series of the annual mean globally averaged fields. + + toab = rsdt - rsut - rlut + surb = rsds + rlds - rsus - rlus - hfls - hfss + atmb = toab - atmb + + Arguments: + ---------- + - model: the model name; + - wdir: the working directory where the outputs are stored; + - aux_file: the name of a dummy aux. file to be used for computations; + - filelist: a list of file names containing the input fields; + + Returns + ------- + The list of input files, the global mean budget time series, a file + containing the budget fields, a file containing the annual mean TOA budget + value; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + cdo = Cdo() + hfls_file = e.select_metadata(input_data, short_name='hfls', + dataset=model)[0]['filename'] + hfss_file = e.select_metadata(input_data, short_name='hfss', + dataset=model)[0]['filename'] + rlds_file = e.select_metadata(input_data, short_name='rlds', + dataset=model)[0]['filename'] + rlus_file = e.select_metadata(input_data, short_name='rlus', + dataset=model)[0]['filename'] + rlut_file = e.select_metadata(input_data, short_name='rlut', + dataset=model)[0]['filename'] + rsds_file = e.select_metadata(input_data, short_name='rsds', + dataset=model)[0]['filename'] + rsus_file = e.select_metadata(input_data, short_name='rsus', + dataset=model)[0]['filename'] + rsdt_file = e.select_metadata(input_data, short_name='rsdt', + dataset=model)[0]['filename'] + rsut_file = e.select_metadata(input_data, short_name='rsut', + dataset=model)[0]['filename'] + toab_file = wdir + '/{}_toab.nc'.format(model) + toab_gmean_file = wdir + '/{}_toab_gmean.nc'.format(model) + surb_file = wdir + '/{}_surb.nc'.format(model) + aux_surb_file = wdir + '/{}_aux_surb.nc'.format(model) + surb_gmean_file = wdir + '/{}_surb_gmean.nc'.format(model) + atmb_file = wdir + '/{}_atmb.nc'.format(model) + atmb_gmean_file = wdir + '/{}_atmb_gmean.nc'.format(model) + removeif(aux_file) + cdo.sub(input="-sub {} {} {}".format(rsdt_file, rsut_file, rlut_file), + output=aux_file) + toab_gmean = write_eb('rsdt', 'toab', aux_file, toab_file, toab_gmean_file) + toab_ymm_file = wdir + '/{}_toab_ymm.nc'.format(model) + cdo.yearmonmean(input=toab_file, output=toab_ymm_file) + # Surface energy budget + removeif(aux_file) + cdo.add(input=" {} {}".format(rsds_file, rlds_file), output=aux_surb_file) + cdo.sub(input="-sub -sub -sub {} {} {} {} {}".format( + aux_surb_file, rsus_file, rlus_file, hfls_file, hfss_file), + output=aux_file) + surb_gmean = write_eb('rsds', 'surb', aux_file, surb_file, surb_gmean_file) + # Atmospheric energy budget + removeif(aux_file) + cdo.sub(input="{} {}".format(toab_file, surb_file), output=aux_file) + atmb_gmean = write_eb('toab', 'atmb', aux_file, atmb_file, atmb_gmean_file) + eb_gmean = [toab_gmean, atmb_gmean, surb_gmean] + eb_file = [toab_file, atmb_file, surb_file] + # Delete files + filenames = [ + aux_surb_file, toab_gmean_file, atmb_gmean_file, surb_gmean_file + ] + for filen in filenames: + os.remove(filen) + input_list = [ + hfls_file, hfss_file, rlds_file, rlus_file, rlut_file, rsds_file, + rsdt_file, rsus_file, rsut_file + ] + return input_list, eb_gmean, eb_file, toab_ymm_file + + +def direntr(logger, model, wdir, input_data, aux_file, te_file, lect, flags): + """Compute the material entropy production with the direct method. + + The function computes the material entropy production with the direct + method, explicitly retrieving the components related to evaporation, + rainfall and snowfall precipitation, snow melting at the ground, potential + energy of the droplet, sensible heat fluxes and kinetic energy dissipation + (from Lorenz Energy Cycle, LEC). The outputs are stored as NC files in + terms of global mean time series, and in terms of annual mean + (time,lat,lon) fields. + + Arguments: + ---------- + logger: the log file where the global mean values are printed out; + model: the model name; + wdir: the working directory where the outputs are stored; + filelist: the list containing all the input files; + aux_file: the name of a dummy aux. file to be used for computations; + lect: the annual mean value of the LEC strength; + flags: a list of flags containing information on whether the water mass + and energy budgets are computed, if the material entropy production + has to be computed, if using the indirect, the direct method, or + both methods; + + Returns + ------- + The annual mean entropy production with the direct method, the degree of + irreversibility, the list of input files for the computation. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + lec = flags[1] + aux_files = mkthe.init_mkthe_direntr(model, wdir, input_data, te_file, + flags) + htop_file = aux_files[1] + prr_file = aux_files[2] + tabl_file = aux_files[3] + tasvert_file = aux_files[4] + tcloud_file = aux_files[5] + tcolumn_file = aux_files[6] + tlcl_file = aux_files[7] + hfls_file = e.select_metadata(input_data, short_name='hfls', + dataset=model)[0]['filename'] + hfss_file = e.select_metadata(input_data, short_name='hfss', + dataset=model)[0]['filename'] + prsn_file = e.select_metadata(input_data, short_name='prsn', + dataset=model)[0]['filename'] + ts_file = e.select_metadata(input_data, short_name='ts', + dataset=model)[0]['filename'] + logger.info('Computation of the material entropy ' + 'production with the direct method\n') + logger.info('1. Sensible heat fluxes\n') + infile_list = [hfss_file, tabl_file, ts_file] + ssens, sensentr_file = sensentr(model, wdir, infile_list, aux_file) + logger.info( + 'Material entropy production associated with ' + 'sens. heat fluxes: %s\n', ssens) + logger.info('2. Hydrological cycle\n') + logger.info('2.1 Evaporation fluxes\n') + infile_list = [hfls_file, ts_file] + sevap, evapentr_file = evapentr(model, wdir, infile_list, aux_file) + logger.info( + 'Material entropy production associated with ' + 'evaporation fluxes: %s\n', sevap) + infile_mask = [prr_file, prsn_file, tlcl_file] + prrmask_file, prsnmask_file = mask_precip(model, wdir, infile_mask) + logger.info('2.2 Rainfall precipitation\n') + infile_rain = [prrmask_file, tcloud_file] + srain, rainentr_file = rainentr(model, wdir, infile_rain, aux_file) + logger.info( + 'Material entropy production associated with ' + 'rainfall: %s\n', srain) + logger.info('2.3 Snowfall precipitation\n') + infile_snow = [prsnmask_file, tcloud_file] + ssnow, latsnow_file, snowentr_file = snowentr(model, wdir, infile_snow, + aux_file) + logger.info( + 'Material entropy production associated with ' + 'snowfall: %s\n', ssnow) + logger.info('2.4 Melting of snow at the surface \n') + smelt, meltentr_file = meltentr(model, wdir, latsnow_file, aux_file) + logger.info( + 'Material entropy production associated with snow ' + 'melting: %s\n', smelt) + logger.info('2.5 Potential energy of the droplet\n') + infile_pot = [htop_file, prrmask_file, prsnmask_file, tcolumn_file] + spot, potentr_file = potentr(model, wdir, infile_pot, aux_file) + logger.info( + 'Material entropy production associated with ' + 'potential energy of the droplet: %s\n', spot) + os.remove(prrmask_file) + os.remove(prsnmask_file) + logger.info('3. Kinetic energy dissipation\n') + skin = kinentr(logger, aux_file, tasvert_file, lect, lec) + matentr = (float(ssens) - float(sevap) + float(srain) + float(ssnow) + + float(spot) + float(skin) - float(smelt)) + logger.info('Material entropy production with ' + 'the direct method: %s\n', matentr) + irrevers = ((matentr - float(skin)) / float(skin)) + for filen in aux_files: + os.remove(filen) + entr_list = [ + sensentr_file, evapentr_file, rainentr_file, snowentr_file, + meltentr_file, potentr_file + ] + return matentr, irrevers, entr_list + + +def entr(filelist, nin, nout, entr_file, entr_mean_file): + """Obtain the entropy dividing some energy by some working temperature. + + This function ingests an energy and a related temperature, then writes + (time,lat,lon) entropy fluxes and entropy flux annual mean values to NC + files. + + Arguments: + ---------- + filelist: a list of file containing the name of the energy file, of the + temperature file and of an auxiliary file needed for computation; + nin: the variable name of the input energy fields; + nout: the variable name to attribute to the entropy flux in the NC file; + entr_file: the name of the file containing the 3D entropy fluxes; + entr_mean_file: the name of the file containing the global annual mean + entropy value; + + Returns + ------- + The annual global mean value of entropy. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + cdo = Cdo() + en_file = filelist[0] + tem_file = filelist[1] + aux_file = filelist[2] + removeif(aux_file) + cdo.timmean(input='-yearmonmean -monmean -div {} {}'.format( + en_file, tem_file), + options='-b F32', + output=aux_file) + entr_gmean = write_eb(nin, nout, aux_file, entr_file, entr_mean_file) + return entr_gmean + + +def evapentr(model, wdir, infile, aux_file): + """Compute entropy production related to evaporation fluxes. + + The function computes the material entropy production related to + evaporation fluxes, as part of the material entropy production + obtained with the direct method (after Lucarini et al., 2011). + + Arguments: + ---------- + - model: the model name; + - wdir: the working directory where the outputs are stored; + - infile: a list of file containing hfls and ts, respectively + (with dimensions (time,lat,lon); + - aux_file: the name of a dummy aux. file to be used for computations; + + Returns + ------- + The global annual mean entropy production related to evaporation, the + file containing it. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + evapentr_file = wdir + '/{}_evap_entr.nc'.format(model) + evapentr_mean_file = wdir + '/{}_evapEntropy_gmean.nc'.format(model) + flist = [infile[0], infile[1], aux_file] + evapentr_gmean = entr(flist, 'hfls', 'sevap', evapentr_file, + evapentr_mean_file) + evapentr_gmean = masktonull(evapentr_gmean) + os.remove(evapentr_mean_file) + return evapentr_gmean, evapentr_file + + +def indentr(model, wdir, infile, input_data, aux_file, toab_gmean): + """Compute the material entropy production with the indirect method. + + The function computes the material entropy production with the indirect + method, isolating a vertical and a horizontal component + (after Lucarini et al., 2011). The outputs are stored in terms of global + mean time series, and in terms of (lat,lon) fields for each year to a NC + file. + + Arguments: + ---------- + model: the model name; + wdir: the working directory where the outputs are stored; + infile: a list of files, containing each the fields rlds, rlus, rsds, + rsus, emission temperature (te), TOA energy budget (toab) and ts; + toab_file: a file containing the annual mean TOA energy budgets + (time,lon,lat); + aux_file: the name of a dummy aux. file to be used for computations; + toab_gmean: the climatological annaul mean TOA energy budget; + + Returns + ------- + The annual mean vertical and horizontal components of the entropy + production with the indirect method, the file containing them. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + cdo = Cdo() + rlds_file = e.select_metadata(input_data, short_name='rlds', + dataset=model)[0]['filename'] + rlus_file = e.select_metadata(input_data, short_name='rlus', + dataset=model)[0]['filename'] + rsds_file = e.select_metadata(input_data, short_name='rsds', + dataset=model)[0]['filename'] + rsus_file = e.select_metadata(input_data, short_name='rsus', + dataset=model)[0]['filename'] + ts_file = e.select_metadata(input_data, short_name='ts', + dataset=model)[0]['filename'] + horzentropy_file = wdir + '/{}_horizEntropy.nc'.format(model) + vertenergy_file = wdir + '/{}_verticalEnergy.nc'.format(model) + vertentropy_file = wdir + '/{}_verticalEntropy.nc'.format(model) + vertentropy_mean_file = wdir + '/{}_vertEntropy_gmean.nc'.format(model) + horzentropy_mean_file = wdir + '/{}_horizEntropy_gmean.nc'.format(model) + removeif(aux_file) + cdo.yearmonmean(input='-mulc,-1 -div -subc,{} {} {}'.format( + np.nanmean(toab_gmean), infile[1], infile[0]), + output=aux_file) + horzentr_mean = write_eb('toab', 'shor', aux_file, horzentropy_file, + horzentropy_mean_file) + cdo.yearmonmean(input=' -add {} -sub {} -add {} {}'.format( + rlds_file, rsds_file, rlus_file, rsus_file), + output=vertenergy_file) + cdo.mul(input='{} -sub -yearmonmean -reci {} -yearmonmean -reci {}'.format( + vertenergy_file, infile[0], ts_file), + output=aux_file) + vertentr_mean = write_eb('rlds', 'sver', aux_file, vertentropy_file, + vertentropy_mean_file) + remove_files = [ + horzentropy_mean_file, vertenergy_file, vertentropy_mean_file + ] + for filen in remove_files: + os.remove(filen) + return horzentr_mean, vertentr_mean, horzentropy_file, vertentropy_file + + +def kinentr(logger, aux_file, tasvert_file, lect, lec): + """Compute the material entropy production from kin. energy dissipation. + + The function computes the material entropy production associated with the + kinetic energy dissipation, through the intensity of the LEC. + + Arguments: + ---------- + aux_file: the name of a dummy aux. file to be used for computations; + tasvert_file: a file containing the vertically integrated boundary layer + temperature; + lect: an array containing the annual mean LEC intensity; + lec: a flag marking whether the LEC has been previously computed or not + + Returns + ------- + The global annual mean entropy production related to kinetic energy + dissipation. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + cdo = Cdo() + removeif(aux_file) + if lec == 'True': + cdo.yearmonmean(input=tasvert_file, output=aux_file) + with Dataset(aux_file) as f_l: + tabl_mean = f_l.variables['ts'][:, 0, 0] + minentr_mean = np.nanmean(lect / tabl_mean) + logger.info( + 'Material entropy production associated with ' + 'kinetic energy dissipation: %s\n', minentr_mean) + minentr_mean = masktonull(minentr_mean) + else: + minentr_mean = 0.010 + logger.info('I cannot compute the material entropy ' + 'production without the LEC...\n') + logger.info('I will assign a given value for the material ' + 'entropy production attributed to LEC ' + '(0.01 W/m2*K)\n') + return minentr_mean + + +def landoc_budg(model, wdir, infile, mask, name): + """Compute budgets separately on land and oceans. + + Arguments: + ---------- + model: the model name; + wdir: the working directory where the outputs are stored; + infile: the file containing the original budget field as (time,lat,lon); + mask: the file containing the land-sea mask; + name: the variable name as in the input file; + + Returns + ------- + The mean budgets over land and over oceans. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + cdo = Cdo() + ocean_file = wdir + '/{}_{}_ocean.nc'.format(model, name) + oc_gmean_file = wdir + '/{}_{}_oc_gmean.nc'.format(model, name) + land_file = wdir + '/{}_{}_land.nc'.format(model, name) + la_gmean_file = wdir + '/{}_{}_la_gmean.nc'.format(model, name) + aux_file = wdir + '/aux.nc' + removeif(aux_file) + cdo.mul(input='{} -eqc,0 {}'.format(infile, mask), output=ocean_file) + cdo.timmean(input='-fldmean {}'.format(ocean_file), output=oc_gmean_file) + with Dataset(oc_gmean_file) as f_l: + oc_gmean = f_l.variables[name][0, 0, 0] + cdo.sub(input='{} {}'.format(infile, ocean_file), output=land_file) + cdo.setctomiss('0', input=ocean_file, output=aux_file) + move(aux_file, ocean_file) + cdo.setctomiss('0', input=land_file, output=aux_file) + move(aux_file, land_file) + cdo.timmean(input='-fldmean {}'.format(land_file), output=la_gmean_file) + with Dataset(la_gmean_file) as f_l: + la_gmean = f_l.variables[name][0, 0, 0] + remove_files = [ocean_file, oc_gmean_file, land_file, la_gmean_file] + for filen in remove_files: + os.remove(filen) + return oc_gmean, la_gmean + + +def mask_precip(model, wdir, infile): + """Mask precipitation according to the phase of the droplet. + + This function mask the rainfall and snowfall precipitation fields, as well + as in dependency of the temperature of the cloud at the droplet formation. + This allows to isolate some intermediate phase changes of the droplet life + cycle in the atmosphere. + + Arguments: + ---------- + model: the model name; + wdir: the working directory where the outputs are stored; + infile: a list of input file, containing rainfall precipitation (prr) and + prsn, respectively (dimensions (time,lat,lon)); + + Returns + ------- + The files containing masked rainfall and snowfall precipitation fields, + respectively. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + cdo = Cdo() + prr_file = infile[0] + prsn_file = infile[1] + tlcl_file = infile[2] + # Prepare masks for snowfall and rainfall + maskrain_file = wdir + '/{}_maskprecr.nc'.format(model) + cdo.gtc('1.0E-7', input=prr_file, options=' -b F32', output=maskrain_file) + masksnow_file = wdir + '/{}_maskprecs.nc'.format(model) + cdo.gtc('1.0E-7', input=prsn_file, options=' -b F32', output=masksnow_file) + prrmask_file = wdir + '/{}_prr_masked.nc'.format(model) + cdo.mul(input='{} {}'.format(maskrain_file, prr_file), + options='-b F32', + output=prrmask_file) + prsnmask_file = wdir + '/{}_prsn_masked.nc'.format(model) + cdo.mul(input='{} {}'.format(masksnow_file, prsn_file), + options='-b F32', + output=prsnmask_file) + # Temperatures of the rainfall and snowfall clouds + tliq_file = wdir + '/{}_tliq.nc'.format(model) + cdo.setrtomiss('-1000,0', + input='-mul {} {}'.format(tlcl_file, maskrain_file), + options='-b F32', + output=tliq_file) + tsol_file = wdir + '/{}_tsol.nc'.format(model) + cdo.setrtomiss('-1000,0', + input='-mul {} {}'.format(tlcl_file, masksnow_file), + options='-b F32', + output=tsol_file) + tdegl_file = wdir + '/{}_tliqdeg.nc'.format(model) + cdo.subc('273.15', input=tliq_file, options='-b F32', output=tdegl_file) + tdegs_file = wdir + '/{}_tsoldeg.nc'.format(model) + cdo.subc('273.15', input=tsol_file, options='-b F32', output=tdegs_file) + # Mask for ice cloud and temperature for phase changes from ice to rain + maskice_file = wdir + '/{}_maskice.nc'.format(model) + cdo.ltc('0.0', input=tdegl_file, options='-b F32', output=maskice_file) + ticer_file = wdir + '/{}_t_icerain_file'.format(model) + cdo.setrtomiss('-1000,0', + input='-mul {} {}'.format(tliq_file, maskice_file), + options='-b F32', + output=ticer_file) + prrice_file = wdir + '/{}_prr_ice_file.nc'.format(model) + cdo.mul(input='{} {}'.format(maskice_file, prr_file), + options='-b F32', + output=prrice_file) + # Mask for vapor cloud and temperature for phase changes from vapor to snow + maskvap_file = wdir + '/{}_maskvap.nc'.format(model) + cdo.gtc('0.0', input=tdegs_file, options='-b F32', output=maskvap_file) + tvaps_file = wdir + '/{}_t_vapsnow.nc'.format(model) + cdo.setrtomiss('-1000,0', + input='-mul {} {}'.format(tsol_file, maskvap_file), + options='-b F32', + output=tvaps_file) + prsnvap_file = wdir + '/{}_prsn_vap.nc'.format(model) + cdo.mul(input='{} {}'.format(maskvap_file, prsn_file), + options='-b F32', + output=prsnvap_file) + remove_files = [ + maskrain_file, masksnow_file, tliq_file, tsol_file, tdegl_file, + tdegs_file, maskice_file, ticer_file, prrice_file, maskvap_file, + tvaps_file, prsnvap_file + ] + for filen in remove_files: + os.remove(filen) + return prrmask_file, prsnmask_file + + +def masktonull(value): + """Replace missing values with zeros.""" + try: + value = float(value) + except Warning: + value = 0 + return value + + +def meltentr(model, wdir, latsnow_file, aux_file): + """Compute entropy production related to snow melting at the ground. + + The function computes the material entropy production related to snow + melting at the ground, as part of the material entropy production + obtained with the direct method (after Lucarini et al., 2011). + + Arguments: + ---------- + - model: the model name; + - wdir: the working directory where the outputs are stored; + - infile: the latent energy associated with snowfall precipitation; + - aux_file: the name of a dummy aux. file to be used for computations; + + Returns + ------- + The global annual mean entropy production related to evaporation, the + file containing it. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + cdo = Cdo() + removeif(aux_file) + latmelt_file = (wdir + '/{}_latentEnergy_snowmelt.nc'.format(model)) + meltentr_file = (wdir + '/{}_snowmelt_entr.nc'.format(model)) + meltentr_mean_file = wdir + '/{}_snowmeltEntropy_gmean.nc'.format(model) + cdo.mulc(str(L_S), + input='-divc,{} {}'.format(str(LC_SUB), latsnow_file), + options='-b F32', + output=latmelt_file) + cdo.timmean( + input='-yearmonmean -monmean -setmisstoc,0 -divc,273.15 {}'.format( + latmelt_file), + options='-b F32', + output=aux_file) + cdo.chname('prsn,smelt', + input=aux_file, + options='-b F32', + output=meltentr_file) + cdo.fldmean(input=meltentr_file, + options='-b F32', + output=meltentr_mean_file) + with Dataset(meltentr_mean_file) as f_l: + meltentr_gmean = f_l.variables['smelt'][0, 0, 0] + meltentr_gmean = masktonull(meltentr_gmean) + remove_files = [latmelt_file, meltentr_mean_file] + for filen in remove_files: + os.remove(filen) + os.remove(latsnow_file) + return meltentr_gmean, meltentr_file + + +def potentr(model, wdir, infile, aux_file): + """Compute entropy production related to potential energy of the droplet. + + The function computes the material entropy production related to the + potential energy of the snowfall or rainfall droplet. This term must be + part of a material entropy production budget, even though it does take part + to the energy exchanges of a model "normally". + + Arguments: + ---------- + model: the model name; + wdir: the working directory where the outputs are stored; + infile: a list of files containing the height of the bondary layer top + (htop), the masked rainfall precipitation (prrmask), the masked + snowfall precipitation (prsnmask), the temperature of the vertical + column between the cloud top and the ground (tcolumn); + aux_file: the name of a dummy aux. file to be used for computations; + + Returns + ------- + The global annual mean entropy production related to potential energy of + the droplet, the file containing it. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + cdo = Cdo() + removeif(aux_file) + htop_file = infile[0] + prrmask_file = infile[1] + prsnmask_file = infile[2] + tcolumn_file = infile[3] + poten_file = wdir + '/{}_potEnergy_drop.nc'.format(model) + potentr_file = wdir + '/{}_pot_drop_entr.nc'.format(model) + potentr_mean_file = wdir + '/{}_potEnergy_drop_gmean.nc'.format(model) + cdo.mulc(GRAV, + input='-mul {} -add {} {}'.format(htop_file, prrmask_file, + prsnmask_file), + options='-b F32', + output=poten_file) + flist = [poten_file, tcolumn_file, aux_file] + potentr_gmean = entr(flist, 'htop', 'spotp', potentr_file, + potentr_mean_file) + potentr_gmean = masktonull(potentr_gmean) + remove_files = [poten_file, potentr_mean_file] + for filen in remove_files: + os.remove(filen) + return potentr_gmean, potentr_file + + +def rainentr(model, wdir, infile, aux_file): + """Compute entropy production related to rainfall precipitation. + + The function computes the material entropy production related to rainfall + precipitation, as part of the material entropy production obtained with the + direct method (after Lucarini et al., 2011). + + Arguments: + ---------- + model: the model name; + wdir: the working directory where the outputs are stored; + infile: a list of file containing the masked rainfall precipitation + (prrmask) and the temperature of the cloud (tcloud); + aux_file: the name of a dummy aux. file to be used for computations; + + Returns + ------- + The global annual mean entropy production related to rainfall, the + file containing it. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + cdo = Cdo() + prrmask_file = infile[0] + removeif(aux_file) + latrain_file = wdir + '/{}_latentEnergy_rain.nc'.format(model) + rainentr_file = wdir + '/{}_rain_entr.nc'.format(model) + rainentr_mean_file = wdir + '/{}_rainEntropy_gmean.nc'.format(model) + cdo.mulc(str(L_C), + input='-setmisstoc,0 {}'.format(prrmask_file), + options='-b F32', + output=latrain_file) + flist = [latrain_file, infile[1], aux_file] + rainentr_gmean = entr(flist, 'prr', 'srain', rainentr_file, + rainentr_mean_file) + rainentr_gmean = masktonull(rainentr_gmean) + remove_files = [latrain_file, rainentr_mean_file] + for filen in remove_files: + os.remove(filen) + return rainentr_gmean, rainentr_file + + +def removeif(filename): + """Remove filename if it exists.""" + try: + os.remove(filename) + except OSError: + pass + + +def sensentr(model, wdir, infile, aux_file): + """Compute entropy production related to sensible heat fluxes. + + The function computes the material entropy production related to sensible + heat fluxes, as part of the material entropy production obtained with the + direct method (after Lucarini et al., 2011). + + Arguments: + ---------- + model: the model name; + wdir: the working directory where the outputs are stored; + infile: a list of file containing hfss, the temperature at the boundary + layer top (tabl), ts, respectively (with dimensions (time,lat,lon); + aux_file: the name of a dummy aux. file to be used for computations; + + Returns + ------- + The global annual mean entropy production related to sensible heat fluxes, + the file containing it. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + cdo = Cdo() + difftemp_file = wdir + '/{}_difftemp_bl.nc'.format(model) + sensentr_file = (wdir + '/{}_sens_entr.nc'.format(model)) + sensentr_mean_file = wdir + '/{}_sensEntropy_gmean.nc'.format(model) + cdo.reci(input='-sub -reci {} -reci {}'.format(infile[1], infile[2]), + options='-b F32', + output=difftemp_file) + flist = [infile[0], difftemp_file, aux_file] + sensentr_gmean = entr(flist, 'hfss', 'ssens', sensentr_file, + sensentr_mean_file) + sensentr_gmean = masktonull(sensentr_gmean) + remove_files = [difftemp_file, sensentr_mean_file] + for filen in remove_files: + os.remove(filen) + return sensentr_gmean, sensentr_file + + +def snowentr(model, wdir, infile, aux_file): + """Compute entropy production related to snowfall precipitation. + + The function computes the material entropy production related to snowfall + precipitation, as part of the material entropy production obtained with the + direct method (after Lucarini et al., 2011). + + Arguments: + ---------- + model: the model name; + wdir: the working directory where the outputs are stored; + infile: a list of file containing the masked snowfall precipitation + (prsnmask) and the temperature of the cloud (tcloud); + aux_file: the name of a dummy aux. file to be used for computations; + + Returns + ------- + The global annual mean entropy production related to snowfall, the + file containing it. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + cdo = Cdo() + prsnmask_file = infile[0] + removeif(aux_file) + latsnow_file = wdir + '/{}_latentEnergy_snow.nc'.format(model) + snowentr_file = wdir + '/{}_snow_entr.nc'.format(model) + snowentr_mean_file = wdir + '/{}_snowEntropy_gmean.nc'.format(model) + cdo.mulc(str(LC_SUB), + input='-setmisstoc,0 {}'.format(prsnmask_file), + options='-b F32', + output=latsnow_file) + flist = [latsnow_file, infile[1], aux_file] + snowentr_gmean = entr(flist, 'prsn', 'ssnow', snowentr_file, + snowentr_mean_file) + snowentr_gmean = masktonull(snowentr_gmean) + os.remove(snowentr_mean_file) + return snowentr_gmean, latsnow_file, snowentr_file + + +def wmbudg(model, wdir, aux_file, input_data, auxlist): + """Compute the water mass and latent energy budgets. + + This function computes the annual mean water mass and latent energy budgets + from the evaporation and rainfall/snowfall precipitation fluxes and prints + them to a NetCDF file. + The globally averaged annual mean budgets are also provided and saved to + a NetCDF file. + + Arguments: + ---------- + model: the model name; + wdir: the working directory where the outputs are stored; + aux_file: the name of a dummy aux. file to be used for computations; + input_data: a dictionary of file names containing the input fields; + auxlist: a list of auxiliary files; + + Returns + ------- + A list containing global mean water mass and latent energy values, a list + of files containing them. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + cdo = Cdo() + hfls_file = e.select_metadata(input_data, short_name='hfls', + dataset=model)[0]['filename'] + pr_file = e.select_metadata(input_data, short_name='pr', + dataset=model)[0]['filename'] + prsn_file = e.select_metadata(input_data, short_name='prsn', + dataset=model)[0]['filename'] + wmbudg_file = wdir + '/{}_wmb.nc'.format(model) + wm_gmean_file = wdir + '/{}_wmb_gmean.nc'.format(model) + latene_file = wdir + '/{}_latent.nc'.format(model) + latene_gmean_file = wdir + '/{}_latent_gmean.nc'.format(model) + removeif(aux_file) + cdo.sub(input="{} {}".format(auxlist[0], pr_file), output=aux_file) + wmass_gmean = write_eb('hfls', 'wmb', aux_file, wmbudg_file, wm_gmean_file) + removeif(aux_file) + cdo.sub(input="{} -add -mulc,{} {} -mulc,{} {}".format( + hfls_file, str(LC_SUB), prsn_file, str(L_C), auxlist[1]), + output=aux_file) + latent_gmean = write_eb('hfls', 'latent', aux_file, latene_file, + latene_gmean_file) + varlist = [wmass_gmean, latent_gmean] + fileout = [wmbudg_file, latene_file] + remove_files = [wm_gmean_file, latene_gmean_file] + for filen in remove_files: + os.remove(filen) + return varlist, fileout + + +def write_eb(namein, nameout, aux_file, d3_file, gmean_file): + """Change variable name in the NetCDF file and compute averages. + + Arguments: + ---------- + namein: initial name of the variable; + nameout: final name of the variable; + aux_file: the name of an auxiliary file; + d3_file: the file containing (time,lat,lon) fields; + gmean_file: the name of a file where to put the annual and globally + averaged fields; + + Returns + ------- + A global annual mean value of the budget. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + cdo = Cdo() + ch_name = '{},{}'.format(namein, nameout) + cdo.chname(ch_name, input=aux_file, options='-b F32', output=d3_file) + cdo.fldmean(input='-yearmonmean {}'.format(d3_file), output=gmean_file) + with Dataset(gmean_file) as f_l: + constant = f_l.variables[nameout][:] + return constant diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/fluxogram.py b/esmvaltool/diag_scripts/thermodyn_diagtool/fluxogram.py new file mode 100644 index 0000000000..2325d1ea7d --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/fluxogram.py @@ -0,0 +1,447 @@ +"""FLUX DIAGRAM PRODUCTION. + +Created on Tue Jun 19 16:41:47 2018. + +@author: Valerio2 + +Copyright 2018 Florian Ulrich Jehn + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +from matplotlib import pyplot as plt + + +class Fluxogram(): + """The diagram flux module. + + A class to draw and maintain all fluxes and storages from a model or + some similiar kind of thing to be drawn as a sequence of storages + and fluxes. + """ + + def __init__(self, max_flux, max_storage, grid_size=20): + """Initialize a fluxogram. must be called with. + + The arguments are: + - max_flux: aximum flux of all fluxes; needed for scaling + - max_storage: maximum storages of all storages; needed for scaling + - grid_size:grid_size for drawing the fluxogram, determines how big + everything is. Fluxes and storages scaled accordingly + - storages: all the storages the fluxogram has (usually empy to + begin with) + - fluxes: all the fluxes the fluxogram has (usually empty to begin + with). + """ + self.storages = [] + self.fluxes = [] + self.max_flux = max_flux + self.max_storage = max_storage + self.grid_size = grid_size + + def add_storage(self, name, amount, order, offset): + """Add a storage to the storages of the fluxogram.""" + self.storages.append( + Storage(name, self.grid_size, len(self.storages), amount, order, + offset)) + + def add_flux(self, name, from_storage, to_storage, amount): + """Add a flux to the fluxes of the fluxogram.""" + self.fluxes.append( + Flux(name, self.grid_size, from_storage, to_storage, amount)) + + def update_all_storages(self, amounts): + """Update the amount of all storages.""" + for storage, amount in zip(self.storages, amounts): + storage.update_storage(amount) + + def update_all_fluxes(self, amounts): + """Update the amount of all fluxes.""" + for flux, amount in zip(self.fluxes, amounts): + flux.update_flux(amount) + + def update_everything(self, amounts_storages, amounts_fluxes): + """Update all fluxes and storages.""" + self.update_all_fluxes(amounts_fluxes) + self.update_all_storages(amounts_storages) + + def draw(self, filen, listv): + """Draw all fluxes and storages.""" + fig = plt.figure() + frame1 = plt.axes() + fig.set_size_inches(18.5, 10.5) + # find the smallest/largest offset_ so the fluxogram can be drawn big + # enough + largest_offset = 0 + smallest_offset = 0 + largest_order = 0 + for storage in self.storages: + if storage.offset > largest_offset: + largest_offset = storage.offset + if storage.offset < smallest_offset: + smallest_offset = storage.offset + if storage.order > largest_order: + largest_order = storage.order + # set y and x limits + y_max = 0 + y_min = (largest_order + 1) * 2 * self.grid_size * -1 + x_max = (largest_offset + 2) * 2 * self.grid_size + x_min = (smallest_offset - 1) * 2 * self.grid_size + plt.axis([x_min, x_max, y_min, y_max]) + frame1.axes.get_xaxis().set_visible(False) + frame1.axes.get_yaxis().set_visible(False) + # draw all fluxes + dict_r = { + 'AZ+': listv[0], + 'ASE+': listv[2], + 'ATE+': listv[4], + 'A2KS': listv[6], + 'A2KT': listv[7], + 'KTE-': listv[8], + 'KSE-': listv[10], + 'KZ-': listv[12] + } + dict_oth = { + 'l': listv[14], + 'dn': listv[15], + 'rdn': listv[16], + 'ldn': listv[17], + 'up': listv[18], + 'lup': listv[19], + 'rup': listv[20] + } + switcher = { + 'l': self.leftarr_txt, + 'dn': self.dnarr_txt, + 'rdn': self.rdnarr_txt, + 'ldn': self.ldnarr_txt, + 'up': self.uparr_txt, + 'lup': self.luparr_txt, + 'rup': self.ruparr_txt + } + for flux in self.fluxes: + idb = flux.name + # scale the amount + scaled_amount_flux = self.scaler(flux.amount, self.max_flux) + # width multiplied because if not, the arrows are so tiny + arrow = plt.Arrow(flux.x_start, + flux.y_start, + flux.d_x, + flux.d_y, + width=scaled_amount_flux * 1.7, + alpha=0.8) + if flux.dire == 'r': + for key in dict_r: + value = dict_r[key] + if idb == key: + plt.text(flux.x_start + 0.25 * self.grid_size, + flux.y_start + 0.05 * self.grid_size, + value, + size=self.grid_size * 0.7) + else: + for key in dict_oth: + value = dict_oth[key] + if flux.dire == key: + switcher[flux.dire](value, flux, plt) + plt.gca().add_patch(arrow) + # draw all storages + for storage in self.storages: + # scale the amount + scaled_amount_stor = self.scaler(storage.amount, self.max_storage) + if scaled_amount_stor == 0: + scaled_amount_stor = 0.0001 + # change_x and y, so the storages are centered to the middle + # of their position and not to upper left + x_p = ( + storage.x_p + + (1 - storage.amount / self.max_storage) * 1.3 * self.grid_size) + y_p = ( + storage.y_p - + (1 - storage.amount / self.max_storage) * 1.3 * self.grid_size) + rectangle = plt.Rectangle((x_p, y_p), + scaled_amount_stor, + -scaled_amount_stor, + alpha=0.4) + # label all storages + plt.text(storage.x_p + 0.6 * self.grid_size, + storage.y_p - 0.65 * self.grid_size, + storage.name, + fontsize=0.7 * self.grid_size) + dict_s = { + 'AZ': listv[1], + 'ASE': listv[3], + 'ATE': listv[5], + 'KTE': listv[9], + 'KSE': listv[11], + 'KZ': listv[13] + } + for key in dict_s: + value = dict_s[key] + if storage.name == key: + plt.text(storage.x_p + 0.6 * self.grid_size, + storage.y_p - 0.85 * self.grid_size, + value, + fontsize=0.7 * self.grid_size) + # draw a date + plt.gca().add_patch(rectangle) + plt.savefig(filen) + plt.close(fig) + + def dnarr_txt(self, value, flux, pltt): + """Write text on arrow pointing down.""" + x_start = flux.x_start + y_start = flux.y_start + pltt.text(x_start - 0.2 * self.grid_size, + y_start - 0.45 * self.grid_size, + value, + size=self.grid_size * 0.7, + rotation=-90) + + def leftarr_txt(self, value, flux, pltt): + """Write text on arrow pointing left.""" + x_start = flux.x_start + y_start = flux.y_start + pltt.text(x_start - 1.35 * self.grid_size, + y_start + 0.05 * self.grid_size, + value, + size=self.grid_size * 0.7) + + def ldnarr_txt(self, value, flux, pltt): + """Write text on arrow pointing down-left.""" + x_start = flux.x_start + y_start = flux.y_start + pltt.text(x_start - 0.35 * self.grid_size, + y_start - 0.25 * self.grid_size, + value, + size=self.grid_size * 0.5, + rotation=-110) + + def luparr_txt(self, value, flux, pltt): + """Write text on arrow pointing up-left.""" + x_start = flux.x_start + y_start = flux.y_start + pltt.text(x_start - 0.35 * self.grid_size, + y_start + 0.45 * self.grid_size, + value, + size=self.grid_size * 0.5, + rotation=110) + + def rdnarr_txt(self, value, flux, pltt): + """Write text on arrow pointing down-right.""" + x_start = flux.x_start + y_start = flux.y_start + pltt.text(x_start + 0.05 * self.grid_size, + y_start - 0.25 * self.grid_size, + value, + size=self.grid_size * 0.5, + rotation=-75) + + def ruparr_txt(self, value, flux, pltt): + """Write text on arrow pointing up-right.""" + x_start = flux.x_start + y_start = flux.y_start + pltt.text(x_start - 0.1 * self.grid_size, + y_start + 0.45 * self.grid_size, + value, + size=self.grid_size * 0.5, + rotation=75) + + def uparr_txt(self, value, flux, pltt): + """Write text on arrow pointing up.""" + x_start = flux.x_start + y_start = flux.y_start + pltt.text(x_start + 0.05 * self.grid_size, + y_start + 0.75 * self.grid_size, + value, + size=self.grid_size * 0.7, + rotation=90) + + def scaler(self, value_in, base_max): + """Scale the values in the blocks of the diagram. + + Scale the fluxes and storages, so they don't overstep their + grafical bounds must be called with: + - valueIn: the value that needs rescaling + - baseMax: the upper limit of the original dataset + ~ 100 for fluxes, ~250 for stores (in my model). + """ + # baseMin: the lower limit of the original dataset (usually zero) + base_min = 0 + # limitMin: the lower limit of the rescaled dataset (usually zero) + limit_min = 0 + # limitMax: the upper limit of the rescaled dataset (in our case grid) + limit_max = self.grid_size + # prevents wrong use of scaler + if value_in > base_max: + raise ValueError("Input value larger than base max") + return (((limit_max - limit_min) * (value_in - base_min) / + (base_max - base_min)) + limit_min) + + +class Flux: + """Contain a flux of a fluxogram.""" + + def __init__(self, name, grid_size, from_storage, to_storage, amount=0): + """Initialize a flux. + + Arguments are: + - name: name of the flux + - grid_size: grid size of the diagram + - from_storage: storage the flux is originating from + - to_storage: storage the flux is going into + - amount: how much stuff fluxes. + """ + self.name = name + self.from_storage = from_storage + self.to_storage = to_storage + self.amount = amount + self.grid_size = grid_size + (self.x_start, self.y_start, self.x_end, self.y_end, self.d_x, + self.d_y, self.dire) = (self.calc_start_end_dx_dy()) + + def update_flux(self, amount): + """Update the amount of the flux.""" + self.amount = amount + + def calc_start_end_dx_dy(self): + """Scale the arrows. + + Calculate the starting and ending point of an arrow depending on the + order and offset of the starting and ending storages. This helps + determine the direction of the arrow + returns the start and end xy coordinates of the arrow as tuples. + """ + # arrow pointing to left up + if (self.from_storage.offset > self.to_storage.offset + and self.from_storage.order > self.to_storage.order): + x_start = self.from_storage.x_p + 0.85 * self.grid_size + y_start = self.from_storage.y_p - self.grid_size * 0.5 + x_end = self.to_storage.x_p + self.grid_size * 0.65 + y_end = self.to_storage.y_p - 0.7 * self.grid_size + d_x = abs(x_start - x_end) * (-1) + d_y = abs(y_start - y_end) + dire = 'lup' + # arrow pointing up + elif (self.from_storage.offset == self.to_storage.offset + and self.from_storage.order > self.to_storage.order): + x_start = self.from_storage.x_p + 0.85 * self.grid_size + y_start = self.from_storage.y_p - 0.5 * self.grid_size + x_end = self.to_storage.x_p + 0.85 * self.grid_size + y_end = self.to_storage.y_p - 0.25 * self.grid_size + d_x = abs(x_start - x_end) + d_y = abs(y_start - y_end) + dire = 'up' + # arrow pointing right up + elif (self.from_storage.offset < self.to_storage.offset + and self.from_storage.order > self.to_storage.order): + x_start = (self.from_storage.x_p + self.grid_size) + y_start = self.from_storage.y_p - 0.5 * self.grid_size + x_end = self.to_storage.x_p + 0.05 * self.grid_size + y_end = self.to_storage.y_p - 0.75 * self.grid_size + d_x = abs(x_start - x_end) + d_y = abs(y_start - y_end) + dire = 'rup' + # arrow pointing right + elif (self.from_storage.offset < self.to_storage.offset + and self.from_storage.order == self.to_storage.order): + x_start = (self.from_storage.x_p + self.grid_size) + y_start = self.from_storage.y_p - 0.8 * self.grid_size + x_end = self.to_storage.x_p + 1.25 * self.grid_size + y_end = self.to_storage.y_p - 0.8 * self.grid_size + d_x = abs(x_start - x_end) + d_y = abs(y_start - y_end) + dire = 'r' + # arrow pointing right down + elif (self.from_storage.offset < self.to_storage.offset + and self.from_storage.order < self.to_storage.order): + x_start = (self.from_storage.x_p + 0.85 * self.grid_size) + y_start = self.from_storage.y_p - 1.12 * self.grid_size + x_end = self.to_storage.x_p + 0.85 * self.grid_size + y_end = self.to_storage.y_p - 0.9 * self.grid_size + d_x = abs(x_start - x_end) + d_y = abs(y_start - y_end) * (-1) + dire = 'rdn' + # arrow pointing down + elif (self.from_storage.offset == self.to_storage.offset + and self.from_storage.order < self.to_storage.order): + x_start = self.from_storage.x_p + 0.8 * self.grid_size + y_start = (self.from_storage.y_p - 1.12 * self.grid_size) + x_end = self.to_storage.x_p + 0.8 * self.grid_size + y_end = self.to_storage.y_p - 1.4 * self.grid_size + d_x = abs(x_start - x_end) + d_y = abs(y_start - y_end) * (-1) + dire = 'dn' + # arrow pointing left down + elif (self.from_storage.offset > self.to_storage.offset + and self.from_storage.order < self.to_storage.order): + x_start = self.from_storage.x_p + 0.75 * self.grid_size + y_start = (self.from_storage.y_p - 1.1 * self.grid_size) + x_end = self.to_storage.x_p + 0.6 * self.grid_size + y_end = self.to_storage.y_p - 0.9 * self.grid_size + d_x = abs(x_start - x_end) * (-1) + d_y = abs(y_start - y_end) * (-1) + dire = 'ldn' + # arrow pointing left + elif (self.from_storage.offset > self.to_storage.offset + and self.from_storage.order == self.to_storage.order): + x_start = self.from_storage.x_p + 0.5 * self.grid_size + y_start = self.from_storage.y_p - 0.75 * self.grid_size + x_end = self.to_storage.x_p + 0.25 * self.grid_size + y_end = self.to_storage.y_p - 0.75 * self.grid_size + d_x = abs(x_start - x_end) * (-1) + d_y = abs(y_start - y_end) + dire = 'l' + # multiply by 0.9 so there is a gap between storages and arrows + d_x = d_x * 0.75 + d_y = d_y * 0.75 + return x_start, y_start, x_end, y_end, d_x, d_y, dire + + +class Storage: + """Contain a storage of a fluxogram.""" + + def __init__(self, name, grid_size, number, amount=0, order=0, offset=0): + """Initialize a storage. + + Arguments are: + - name: name of the storage + - number: consecutive number + - grid_size of the diagram + - amount: how much stuff is in it + - order: how much down it is in the hierachie (starts with 0) + - offset = how much the storage is offset to the left/right + in relationship to the center. + """ + self.name = name + self.amount = amount + self.number = number + self.order = order + self.offset = offset + self.grid_size = grid_size + self.x_p, self.y_p = self.calculate_xy() + + def update_storage(self, amount): + """Update the amount of the storage.""" + self.amount = amount + + def calculate_xy(self): + """Provide coordinates of the blocks in the diagram. + + Calculate the xy coordinates of the starting point from where + the rectangle is drawn. The additional multiplication by two is + to produce the gaps in the diagram. + """ + x_p = self.offset * self.grid_size * 2 + # multiply by -1 to draw the diagram from top to bottom + y_p = self.order * self.grid_size * 2 * -1 + return x_p, y_p diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/fourier_coefficients.py b/esmvaltool/diag_scripts/thermodyn_diagtool/fourier_coefficients.py new file mode 100644 index 0000000000..de1a8d75a9 --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/fourier_coefficients.py @@ -0,0 +1,297 @@ +"""Module retrieving Fourier coefficients computation from lonlat grid. + +Computation of the Fourier coefficients from lonlat grids +on pressure levels at every timestep. + +The spectral truncation is determined by the number of longitudinal +gridsteps. The outputs are given as (time,level,wave,lat) where wave stands +for the zonal wavenumber. In the context of the thermodynamic diagnostic tool, +this is used for the computation of the Lorenz Energy Cycle. + +@author: valerio.lembo@uni-hamburg.de, Valerio Lembo, Hamburg University, 2018. +""" + +import numpy as np +from netCDF4 import Dataset + +GP_RES = np.array([16, 32, 48, 64, 96, 128, 256, 384, 512, 1024, 2048, 4096]) +FC_RES = np.array([5, 10, 15, 21, 31, 43, 85, 127, 171, 341, 683, 1365]) +G_0 = 9.81 # Gravity acceleration +GAM = 0.0065 # Standard atmosphere lapse rate +GAS_CON = 287.0 # Gas constant +P_0 = 10000 # Reference tropospheric pressure + + +def fourier_coeff(tadiagfile, outfile, ta_input, tas_input): + """Compute Fourier coefficients in lon direction. + + Arguments: + --------- + tadiagfile: the name of a file to store modified t fields; + outfile: the name of a file to store the Fourier coefficients; + ta_input: the name of a file containing t,u,v,w fields; + tas_input: the name of a file containing t2m field. + """ + with Dataset(ta_input) as dataset: + lon = dataset.variables['lon'][:] + lat = dataset.variables['lat'][:] + lev = dataset.variables['plev'][:] + time = dataset.variables['time'][:] + t_a = dataset.variables['ta'][:, :, :, :] + u_a = dataset.variables['ua'][:, :, :, :] + v_a = dataset.variables['va'][:, :, :, :] + wap = dataset.variables['wap'][:, :, :, :] + nlon = len(lon) + nlat = len(lat) + nlev = len(lev) + ntime = len(time) + i = np.min(np.where(2 * nlat <= GP_RES)) + trunc = FC_RES[i] + 1 + wave2 = np.linspace(0, trunc - 1, trunc) + with Dataset(tas_input) as dataset: + tas = dataset.variables['tas'][:, :, :] + tas = tas[:, ::-1, :] + ta1_fx = np.array(t_a) + deltat = np.zeros([ntime, nlev, nlat, nlon]) + p_s = np.full([ntime, nlat, nlon], P_0) + for i in np.arange(nlev - 1, 0, -1): + h_1 = np.ma.masked_where(ta1_fx[:, i, :, :] != 0, ta1_fx[:, i, :, :]) + if np.any(h_1.mask > 0): + deltat[:, i - 1, :, :] = np.where(ta1_fx[:, i - 1, :, :] != 0, + deltat[:, i - 1, :, :], + (ta1_fx[:, i, :, :] - tas)) + deltat[:, i - 1, :, :] = ((1 * np.array(h_1.mask)) * + np.array(deltat[:, i - 1, :, :])) + d_p = -((P_0 * G_0 / + (GAM * GAS_CON)) * deltat[:, i - 1, :, :] / tas) + p_s = np.where(ta1_fx[:, i - 1, :, :] != 0, p_s, lev[i - 1] + d_p) + for k in np.arange(0, nlev - i - 1, 1): + h_3 = np.ma.masked_where(ta1_fx[:, i + k, :, :] != 0, + ta1_fx[:, i + k, :, :]) + if np.any(h_3.mask > 0): + deltat[:, i - 1, :, :] = np.where( + ta1_fx[:, i + k, :, :] != 0, deltat[:, i - 1, :, :], + (ta1_fx[:, i + k + 1, :, :] - tas)) + d_p = -((P_0 * G_0 / + (GAM * GAS_CON)) * deltat[:, i - 1, :, :] / tas) + p_s = np.where(ta1_fx[:, i + k, :, :] != 0, p_s, + lev[i + k] + d_p) + ta2_fx = np.array(t_a) + mask = np.zeros([nlev, ntime, nlat, nlon]) + dat = np.zeros([nlev, ntime, nlat, nlon]) + tafr_bar = np.zeros([nlev, ntime, nlat, nlon]) + deltap = np.zeros([ntime, nlev, nlat, nlon]) + for i in np.arange(nlev): + deltap[:, i, :, :] = p_s - lev[i] + h_2 = np.ma.masked_where(ta2_fx[:, i, :, :] == 0, ta2_fx[:, i, :, :]) + mask[i, :, :, :] = np.array(h_2.mask) + tafr_bar[i, :, :, :] = (1 * np.array(mask[i, :, :, :]) * + (tas - GAM * GAS_CON / + (G_0 * p_s) * deltap[:, i, :, :] * tas)) + dat[i, :, :, :] = (ta2_fx[:, i, :, :] * + (1 - 1 * np.array(mask[i, :, :, :]))) + t_a[:, i, :, :] = dat[i, :, :, :] + tafr_bar[i, :, :, :] + pr_output_diag(t_a, ta_input, tadiagfile, 'ta') + tafft_p = np.fft.fft(t_a, axis=3)[:, :, :, :int(trunc / 2)] / (nlon) + uafft_p = np.fft.fft(u_a, axis=3)[:, :, :, :int(trunc / 2)] / (nlon) + vafft_p = np.fft.fft(v_a, axis=3)[:, :, :, :int(trunc / 2)] / (nlon) + wapfft_p = np.fft.fft(wap, axis=3)[:, :, :, :int(trunc / 2)] / (nlon) + tafft = np.zeros([ntime, nlev, nlat, trunc]) + uafft = np.zeros([ntime, nlev, nlat, trunc]) + vafft = np.zeros([ntime, nlev, nlat, trunc]) + wapfft = np.zeros([ntime, nlev, nlat, trunc]) + tafft[:, :, :, 0::2] = np.real(tafft_p) + tafft[:, :, :, 1::2] = np.imag(tafft_p) + uafft[:, :, :, 0::2] = np.real(uafft_p) + uafft[:, :, :, 1::2] = np.imag(uafft_p) + vafft[:, :, :, 0::2] = np.real(vafft_p) + vafft[:, :, :, 1::2] = np.imag(vafft_p) + wapfft[:, :, :, 0::2] = np.real(wapfft_p) + wapfft[:, :, :, 1::2] = np.imag(wapfft_p) + dict_v = {'ta': tafft, 'ua': uafft, 'va': vafft, 'wap': wapfft} + file_desc = 'Fourier coefficients' + pr_output(dict_v, ta_input, outfile, file_desc, wave2) + + +def pr_output(dict_v, nc_f, fileo, file_desc, wave2): + """Print outputs to NetCDF. + + Save fields to NetCDF, retrieving information from an existing + NetCDF file. Metadata are transferred from the existing file to the + new one. + + Arguments: + --------- + var1, var2, var3, var4: the fields to be stored, with shape + (time,level,wave,lon); + nc_f: the existing dataset, from where the metadata are retrieved. + Coordinates time,level and lon have to be the same dimension as the + fields to be saved to the new files; + fileo: the name of the output file; + wave2: an array containing the zonal wavenumbers; + name1, name2, name3, name4: the name of the variables to be saved; + + @author: Chris Slocum (2014), modified by Valerio Lembo (2018). + """ + # Writing NetCDF files + with Dataset(fileo, 'w', format='NETCDF4') as var_nc_fid: + var_nc_fid.description = file_desc + with Dataset(nc_f, 'r') as nc_fid: + extr_time(nc_fid, var_nc_fid) + extr_lat(nc_fid, var_nc_fid, 'lat') + extr_plev(nc_fid, var_nc_fid) + # Write the wave dimension + var_nc_fid.createDimension('wave', len(wave2)) + var_nc_fid.createVariable('wave', nc_fid.variables['plev'].dtype, + ('wave', )) + var_nc_fid.variables['wave'][:] = wave2 + for key in dict_v: + value = dict_v[key] + var1_nc_var = var_nc_fid.createVariable( + key, 'f8', ('time', 'plev', 'lat', 'wave')) + varatts(var1_nc_var, key) + var_nc_fid.variables[key][:, :, :, :] = value + + +def pr_output_diag(var1, nc_f, fileo, name1): + """Print processed ta field to NetCDF file. + + Save fields to NetCDF, retrieving information from an existing + NetCDF file. Metadata are transferred from the existing file to the + new one. + + Arguments: + --------- + var1: the field to be stored, with shape (time,level,lat,lon); + nc_f: the existing dataset, from where the metadata are retrieved. + Coordinates time,level, lat and lon have to be the same dimension as + the fields to be saved to the new files; + fileo: the name of the output file; + name1: the name of the variable to be saved; + + @author: Chris Slocum (2014), modified by Valerio Lembo (2018). + """ + with Dataset(fileo, 'w', format='NETCDF4') as var_nc_fid: + var_nc_fid.description = "Fourier coefficients" + with Dataset(nc_f, 'r') as nc_fid: + # Extract data from NetCDF file nad write them to the new file + extr_time(nc_fid, var_nc_fid) + extr_lat(nc_fid, var_nc_fid, 'lat') + extr_lon(nc_fid, var_nc_fid) + extr_plev(nc_fid, var_nc_fid) + var1_nc_var = var_nc_fid.createVariable(name1, 'f8', + ('time', 'plev', 'lat', 'lon')) + varatts(var1_nc_var, name1) + var_nc_fid.variables[name1][:, :, :, :] = var1 + + +def extr_lat(nc_fid, var_nc_fid, latn): + """Extract lat coord. from NC files and save them to a new NC file. + + Arguments: + --------- + nc_f: the existing dataset, from where the metadata are + retrieved. Time,level and lon dimensions are retrieved; + var_nc_fid: the id of the new NC dataset previously created; + latn: the name of the latitude dimension; + """ + lats = nc_fid.variables['lat'][:] + var_nc_fid.createDimension(latn, len(lats)) + var_nc_dim = var_nc_fid.createVariable(latn, nc_fid.variables['lat'].dtype, + (latn, )) + for ncattr in nc_fid.variables['lat'].ncattrs(): + var_nc_dim.setncattr(ncattr, nc_fid.variables['lat'].getncattr(ncattr)) + var_nc_fid.variables[latn][:] = lats + + +def extr_lon(nc_fid, var_nc_fid): + """Extract lat coord. from NC files and save them to a new NC file. + + Arguments: + --------- + nc_f: the existing dataset, from where the metadata are + retrieved. Time,level and lon dimensions are retrieved; + var_nc_fid: the id of the new NC dataset previously created; + """ + lons = nc_fid.variables['lon'][:] + var_nc_fid.createDimension('lon', len(lons)) + var_nc_dim = var_nc_fid.createVariable('lon', + nc_fid.variables['lon'].dtype, + ('lon', )) + for ncattr in nc_fid.variables['lon'].ncattrs(): + var_nc_dim.setncattr(ncattr, nc_fid.variables['lon'].getncattr(ncattr)) + var_nc_fid.variables['lon'][:] = lons + + +def extr_plev(nc_fid, var_nc_fid): + """Extract plev coord. from NC files and save them to a new NC file. + + Arguments: + --------- + nc_f: the existing dataset, from where the metadata are + retrieved. Time,level and lon dimensions are retrieved; + var_nc_fid: the id of the new NC dataset previously created; + """ + plev = nc_fid.variables['plev'][:] + var_nc_fid.createDimension('plev', len(plev)) + var_nc_dim = var_nc_fid.createVariable('plev', + nc_fid.variables['plev'].dtype, + ('plev', )) + for ncattr in nc_fid.variables['plev'].ncattrs(): + var_nc_dim.setncattr(ncattr, + nc_fid.variables['plev'].getncattr(ncattr)) + var_nc_fid.variables['plev'][:] = plev + + +def extr_time(nc_fid, var_nc_fid): + """Extract time coord. from NC files and save them to a new NC file. + + Arguments: + --------- + nc_f: the existing dataset, from where the metadata are + retrieved. Time,level and lon dimensions are retrieved; + var_nc_fid: the id of the new NC dataset previously created; + """ + time = nc_fid.variables['time'][:] + var_nc_fid.createDimension('time', len(time)) + var_nc_dim = var_nc_fid.createVariable('time', + nc_fid.variables['time'].dtype, + ('time', )) + for ncattr in nc_fid.variables['time'].ncattrs(): + var_nc_dim.setncattr(ncattr, + nc_fid.variables['time'].getncattr(ncattr)) + var_nc_fid.variables['time'][:] = time + + +def varatts(w_nc_var, varname): + """Add attibutes to the variables, depending on their name. + + Arguments: + --------- + w_nc_var: a variable object; + varname: the name of the variable, among ta, ua, va and wap. + """ + if varname == 'ta': + w_nc_var.setncatts({ + 'long_name': "Air temperature", + 'units': "K", + 'level_desc': 'pressure levels' + }) + elif varname == 'ua': + w_nc_var.setncatts({ + 'long_name': "Eastward wind", + 'units': "m s-1", + 'level_desc': 'pressure levels' + }) + elif varname == 'va': + w_nc_var.setncatts({ + 'long_name': "Northward wind", + 'units': "m s-1", + 'level_desc': 'pressure levels' + }) + elif varname == 'wap': + w_nc_var.setncatts({ + 'long_name': 'Lagrangian tendency of air pressure', + 'units': "Pa s-1", + 'level_desc': 'pressure levels' + }) diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/lorenz_cycle.py b/esmvaltool/diag_scripts/thermodyn_diagtool/lorenz_cycle.py new file mode 100644 index 0000000000..aebd145965 --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/lorenz_cycle.py @@ -0,0 +1,1163 @@ +"""PROGRAM FOR LEC COMPUTATION. + +Program for computing and plotting the Lorenz Energy Cycle. + +The module contains the following functions: + - lorenz: it is the main program, calling functions that compute the + reservoirs and conversion terms, storing them separately in + NetCDF files and providing a flux diagram and a table outputs, + the latter separately for the two hemispheres; + - averages: a script computing time, global and zonal averages; + - averages_comp: a script computing global mean of the output fields; + - bsslzr: it contains the coefficients for the conversion from regular + lonlat grid to Gaussian grid; + - diagram: it is the interface between the main program and a + class "Fluxogram", producing the flux diagram; + - gauaw: it uses the coefficients provided in bsslzr for the lonlat to + Gaussian grid conversion; + - globall_cg: it computes the global and hemispheric means at each + timestep; + - init: initializes the table and ingests input fields; + - makek: computes the KE reservoirs; + - makea: computes the APE reservoirs; + - mka2k: computes the APE->KE conversion terms; + - mkaeaz: computes the zonal APE - eddy APE conversion terms; + - mkkekz: computes the zonal KE - eddy KE conversion terms; + - mkatas: computes the stationay eddy - transient eddy APE conversions; + - mkktks: computes the stationay eddy - transient eddy KE conversions; + - output: compute vertical integrals and print NC output; + - preprocess_lec: a script handling the input files, separating the real + from imaginary part of the Fourier coefficients, + reordering the latitudinal dimension (from N to S), + interpolating on a reference sigma coordinate, + - pr_output: prints a single component of the LEC computations to a + single Nc file; + - removeif: removes a file if it exists; + - stabil: calculates the stability parameter; + - table: prints the global and hemispheric mean values of + the reservoirs; + - table_conv: prints the global and hemispheric mean values of the + conversion terms; + - varatts: prints the attributes of a variable in a Nc file; + - weights: computes the weights for vertical integrations and meridional + averages; + - write_to_tab: a script for writing global and hemispheric means to table; + +References. + Ulbrich P. and P. Speth (1991) The global energy cycle of stationary + and transient atmospheric waves: Results from ECMWF analyses, Met. + +@author: valerio.lembo@uni-hamburg.de, Valerio Lembo, Hamburg University, 2018. +""" + +import math +import os +import sys + +import numpy as np +from cdo import Cdo +from netCDF4 import Dataset + +import esmvaltool.diag_scripts.shared as e +from esmvaltool.diag_scripts.thermodyn_diagtool import (fluxogram, + fourier_coefficients) + +G = 9.81 +R = 287.00 +CP = 1003.5 +AA = 6.371E6 +PS = 101100.0 +NW_1 = 3 +NW_2 = 9 +NW_3 = 21 + + +def lorenz(outpath, model, year, filenc, plotfile, logfile): + """Manage input and output fields and calling functions. + + Receive fields t,u,v,w as input fields in Fourier + coefficients (time,level,wave,lon) and compute the LEC. + + Arguments: + ---------- + outpath: ath where output fields are stored (as NetCDF fields); + model: name of the model that is analysed; + year: year that is considered; + filenc: name of the file containing the input fields; + plotfile: name of the file that will contain the flux diagram; + logfile: name of the file containing the table as a .txt file. + """ + ta_c, ua_c, va_c, wap_c, dims, lev, lat = init(logfile, filenc) + nlev = int(dims[0]) + ntime = int(dims[1]) + nlat = int(dims[2]) + ntp = int(dims[3]) + d_s, y_l, g_w = weights(lev, nlev, lat) + # Compute time mean + ta_tmn = np.nanmean(ta_c, axis=1) + ta_ztmn, ta_gmn = averages(ta_tmn, g_w) + ua_tmn = np.nanmean(ua_c, axis=1) + va_tmn = np.nanmean(va_c, axis=1) + wap_tmn = np.nanmean(wap_c, axis=1) + _, wap_gmn = averages(wap_tmn, g_w) + # Compute stability parameter + gam_ztmn = np.zeros([nlev, nlat]) + for l_l in range(nlat): + gam_ztmn[:, l_l] = stabil(ta_ztmn[:, l_l], lev, nlev) + gam_tmn = stabil(ta_gmn, lev, nlev) + e_k = np.zeros([nlev, ntime, nlat, ntp - 1]) + ape = np.zeros([nlev, ntime, nlat, ntp - 1]) + a2k = np.zeros([nlev, ntime, nlat, ntp - 1]) + ae2az = np.zeros([nlev, ntime, nlat, ntp - 1]) + ke2kz = np.zeros([nlev, ntime, nlat, ntp - 1]) + at2as = np.zeros([nlev, ntime, nlat, ntp - 1]) + kt2ks = np.zeros([nlev, ntime, nlat, ntp - 1]) + for t_t in range(ntime): + ta_tan = ta_c[:, t_t, :, :] - ta_tmn + ua_tan = ua_c[:, t_t, :, :] - ua_tmn + va_tan = va_c[:, t_t, :, :] - va_tmn + wap_tan = wap_c[:, t_t, :, :] - wap_tmn + # Compute zonal means + _, ta_tgan = averages(ta_tan, g_w) + _, wap_tgan = averages(wap_tan, g_w) + # Compute kinetic energy + e_k[:, t_t, :, :] = makek(ua_tan, va_tan) + # Compute available potential energy + ape[:, t_t, :, :] = makea(ta_tan, ta_tgan, gam_tmn) + # Compute conversion between kin.en. and pot.en. + a2k[:, t_t, :, :] = mka2k(wap_tan, ta_tan, wap_tgan, ta_tgan, lev) + # Compute conversion between zonal and eddy APE + ae2az[:, t_t, :, :] = mkaeaz(va_tan, wap_tan, ta_tan, ta_tmn, ta_gmn, + lev, y_l, gam_tmn, nlat, nlev) + # Compute conversion between zonal and eddy KE + ke2kz[:, t_t, :, :] = mkkekz(ua_tan, va_tan, wap_tan, ua_tmn, va_tmn, + lev, y_l, nlat, ntp, nlev) + # Compute conversion between stationary and transient eddy APE + at2as[:, t_t, :, :] = mkatas(ua_tan, va_tan, wap_tan, ta_tan, ta_ztmn, + gam_ztmn, lev, y_l, nlat, ntp, nlev) + # Compute conversion between stationary and transient eddy KE + kt2ks[:, t_t, :, :] = mkktks(ua_tan, va_tan, ua_tmn, va_tmn, y_l, nlat, + ntp, nlev) + ek_tgmn = averages_comp(e_k, g_w, d_s, dims) + table(ek_tgmn, ntp, 'TOT. KIN. EN. ', logfile, flag=0) + ape_tgmn = averages_comp(ape, g_w, d_s, dims) + table(ape_tgmn, ntp, 'TOT. POT. EN. ', logfile, flag=0) + a2k_tgmn = averages_comp(a2k, g_w, d_s, dims) + table(a2k_tgmn, ntp, 'KE -> APE (trans) ', logfile, flag=1) + ae2az_tgmn = averages_comp(ae2az, g_w, d_s, dims) + table(ae2az_tgmn, ntp, 'AZ <-> AE (trans) ', logfile, flag=1) + ke2kz_tgmn = averages_comp(ke2kz, g_w, d_s, dims) + table(ke2kz_tgmn, ntp, 'KZ <-> KE (trans) ', logfile, flag=1) + at2as_tgmn = averages_comp(at2as, g_w, d_s, dims) + table(at2as_tgmn, ntp, 'ASE <-> ATE ', logfile, flag=1) + kt2ks_tgmn = averages_comp(kt2ks, g_w, d_s, dims) + table(kt2ks_tgmn, ntp, 'KSE <-> KTE ', logfile, flag=1) + ek_st = makek(ua_tmn, va_tmn) + ek_stgmn = globall_cg(ek_st, g_w, d_s, dims) + table(ek_stgmn, ntp, 'STAT. KIN. EN. ', logfile, flag=0) + ape_st = makea(ta_tmn, ta_gmn, gam_tmn) + ape_stgmn = globall_cg(ape_st, g_w, d_s, dims) + table(ape_stgmn, ntp, 'STAT. POT. EN. ', logfile, flag=0) + a2k_st = mka2k(wap_tmn, ta_tmn, wap_gmn, ta_gmn, lev) + a2k_stgmn = globall_cg(a2k_st, g_w, d_s, dims) + table(a2k_stgmn, ntp, 'KE -> APE (stat)', logfile, flag=1) + ae2az_st = mkaeaz(va_tmn, wap_tmn, ta_tmn, ta_tmn, ta_gmn, lev, y_l, + gam_tmn, nlat, nlev) + ae2az_stgmn = globall_cg(ae2az_st, g_w, d_s, dims) + table(ae2az_stgmn, ntp, 'AZ <-> AE (stat)', logfile, flag=1) + ke2kz_st = mkkekz(ua_tmn, va_tmn, wap_tmn, ua_tmn, va_tmn, lev, y_l, nlat, + ntp, nlev) + ke2kz_stgmn = globall_cg(ke2kz_st, g_w, d_s, dims) + # table(ke2kz_stgmn, ntp, 'KZ <-> KE (stat)', logfile, flag=1) + list_diag = [ + ape_tgmn, ape_stgmn, ek_tgmn, ek_stgmn, ae2az_tgmn, ae2az_stgmn, + a2k_tgmn, a2k_stgmn, at2as_tgmn, kt2ks_tgmn, ke2kz_tgmn, ke2kz_stgmn + ] + lec_strength = diagram(plotfile, list_diag, dims) + nc_f = outpath + '/ek_tmap_{}_{}.nc'.format(model, year) + output(e_k, d_s, filenc, 'ek', nc_f) + nc_f = outpath + '/ape_tmap_{}_{}.nc'.format(model, year) + output(ape, d_s, filenc, 'ape', nc_f) + nc_f = outpath + '/a2k_tmap_{}_{}.nc'.format(model, year) + output(a2k, d_s, filenc, 'a2k', nc_f) + nc_f = outpath + '/ae2az_tmap_{}_{}.nc'.format(model, year) + output(ae2az, d_s, filenc, 'ae2az', nc_f) + nc_f = outpath + '/ke2kz_tmap_{}_{}.nc'.format(model, year) + output(ke2kz, d_s, filenc, 'ke2kz', nc_f) + return lec_strength + + +def averages(x_c, g_w): + """Compute time, zonal and global mean averages of initial fields. + + Arguments: + ---------- + x_c: the input field as (lev, lat, wave); + g_w: the Gaussian weights for meridional averaging; + """ + xc_ztmn = np.squeeze(np.real(x_c[:, :, 0])) + xc_gmn = np.nansum(xc_ztmn * g_w[np.newaxis, :], axis=1) / np.nansum(g_w) + return xc_ztmn, xc_gmn + + +def averages_comp(fld, g_w, d_s, dims): + """Compute the global mean averages of reservoirs and conversion terms. + + Arguments: + ---------- + fld: the component of the LEC (time, lev, lat, wave); + g_w: the Gaussian weights for meridional averaging; + d_s: the Delta sigma of the sigma levels; + dims: a list containing the dimensions length0; + """ + fld_tmn = np.nanmean(fld, axis=1) + fld_tgmn = globall_cg(fld_tmn, g_w, d_s, dims) + return fld_tgmn + + +def bsslzr(kdim): + """Obtain parameters for the Gaussian coefficients. + + @author: Valerio Lembo + """ + ndim = 50 + p_i = math.pi + zbes = [ + 2.4048255577, 5.5200781103, 8.6537279129, 11.7915344391, 14.9309177086, + 18.0710639679, 21.2116366299, 24.3524715308, 27.4934791320, + 30.6346064684, 33.7758202136, 36.9170983537, 40.0584257646, + 43.1997917132, 46.3411883717, 49.4826098974, 52.6240518411, + 55.7655107550, 58.9069839261, 62.0484691902, 65.1899648002, + 68.3314693299, 71.4729816036, 74.6145006437, 77.7560256304, + 80.8975558711, 84.0390907769, 87.1806298436, 90.3221726372, + 93.4637187819, 96.6052679510, 99.7468198587, 102.8883742542, + 106.0299309165, 109.1714896498, 112.3130502805, 115.4546126537, + 118.5961766309, 121.7377420880, 124.8793089132, 128.0208770059, + 131.1624462752, 134.3040166383, 137.4455880203, 140.5871603528, + 143.7287335737, 146.8703076258, 150.0118824570, 153.1534580192, + 156.2950342685 + ] + pbes = np.zeros(kdim) + idim = min([kdim, ndim]) + pbes[0:idim] = zbes[0:idim] + for j in range(idim, kdim - 1, 1): + pbes[j] = pbes[j - 1] + p_i + return pbes + + +def diagram(filen, listf, dims): + """Diagram interface script. + + Call the class fluxogram, serving as + interface between the main script and the class for flux + diagrams design. + + Arguments: + ---------- + filen: the filename of the diagram flux; + listf: a list containing the fluxes and storages; + dims: the dimensions of the variables; + """ + ntp = int(dims[3]) + apet = listf[0] + apes = listf[1] + ekt = listf[2] + eks = listf[3] + ae2azt = listf[4] + ae2azs = listf[5] + a2kt = listf[6] + a2ks = listf[7] + at2as = listf[8] + kt2ks = listf[9] + ke2kzt = listf[10] + ke2kzs = listf[11] + apz = '{:.2f}'.format(apet[0, 0] + apes[0, 0]) + az2kz = '{:.2f}'.format(-1e5 * (a2kt[0, 0])) + az2at = '{:.2f}'.format(-1e5 * np.nansum(ae2azt[0, 1:ntp - 1])) + aps = '{:.2f}'.format(np.nansum(apes[0, 1:ntp - 1])) + as2ks = '{:.2f}'.format(1e5 * np.nansum(a2ks[0, 1:ntp - 1])) + apt = '{:.2f}'.format(np.nansum(apet[0, 1:ntp - 1])) + at2kt = '{:.2f}'.format(1e5 * np.nansum(a2kt[0, 1:ntp - 1])) + az2as = '{:.2f}'.format(-1e5 * np.nansum(ae2azs[0, 1:ntp - 1])) + as2at = '{:.2f}'.format(1e5 * np.nansum(at2as[0, 1:ntp - 1])) + azin = '{:.2f}'.format((float(az2at) + float(az2as) - float(az2kz))) + asein = '{:.2f}'.format((float(as2ks) + float(as2at) - float(az2as))) + atein = '{:.2f}'.format(float(at2kt) - float(az2at) - float(as2at)) + k_z = '{:.2f}'.format(ekt[0, 0] + eks[0, 0]) + kte = '{:.2f}'.format(np.nansum(ekt[0, 1:ntp - 1])) + kse = '{:.2f}'.format(np.nansum(eks[0, 1:ntp - 1])) + kt2kz = '{:.2f}'.format(1e5 * np.nansum(ke2kzt[0, 1:ntp - 1])) + ks2kt = '{:.2f}'.format(-1e5 * np.nansum(kt2ks[0, 1:ntp - 1])) + ks2kz = '{:.2f}'.format(1e5 * np.nansum(ke2kzs[0, 1:ntp - 1])) + kteout = '{:.2f}'.format(float(at2kt) - float(ks2kt) - float(kt2kz)) + kseout = '{:.2f}'.format(float(ks2kt) + float(as2ks) - float(ks2kz)) + kzout = '{:.2f}'.format(float(kt2kz) + float(ks2kz) - float(az2kz)) + list_lorenz = [ + azin, apz, asein, aps, atein, apt, as2ks, at2kt, kteout, kte, kseout, + kse, kzout, k_z, az2kz, az2at, az2as, as2at, kt2kz, ks2kt, ks2kz + ] + flux = fluxogram.Fluxogram(1000, 1000) + flux.add_storage("AZ", 600, 0, 0) + flux.add_storage("ASE", 600, 0.75, 0.25) + flux.add_storage("ATE", 600, 1.5, 0) + flux.add_storage("KTE", 600, 1.5, 1.5) + flux.add_storage("KSE", 600, 0.75, 1.25) + flux.add_storage("KZ", 600, 0, 1.5) + flux.add_storage("AZ+", 0, 0, -1) + flux.add_storage("ASE+", 0, 0.75, -1) + flux.add_storage("ATE+", 0, 1.5, -1) + flux.add_storage("KTE-", 0, 1.5, 2.5) + flux.add_storage("KSE-", 0, 0.75, 2.5) + flux.add_storage("KZ-", 0, 0, 2.5) + flux.add_flux("A2KZ", flux.storages[5], flux.storages[0], 100) + flux.add_flux("AE2AZ", flux.storages[0], flux.storages[2], 150) + flux.add_flux("AE2AS", flux.storages[0], flux.storages[1], 60) + flux.add_flux("AE2AT", flux.storages[1], flux.storages[2], 60) + flux.add_flux("A2KS", flux.storages[1], flux.storages[4], 60) + flux.add_flux("A2KT", flux.storages[2], flux.storages[3], 100) + flux.add_flux("KE2KS", flux.storages[3], flux.storages[4], 60) + flux.add_flux("KS2KZ", flux.storages[4], flux.storages[5], 60) + flux.add_flux("KE2KZ", flux.storages[3], flux.storages[5], 150) + flux.add_flux("AZ+", flux.storages[6], flux.storages[0], 60) + flux.add_flux("ASE+", flux.storages[7], flux.storages[1], 60) + flux.add_flux("ATE+", flux.storages[8], flux.storages[2], 60) + flux.add_flux("KTE-", flux.storages[3], flux.storages[9], 60) + flux.add_flux("KSE-", flux.storages[4], flux.storages[10], 60) + flux.add_flux("KZ-", flux.storages[5], flux.storages[11], 60) + flux.draw(filen, list_lorenz) + lec = float(kteout) + float(kseout) + float(kzout) + return lec + + +def gauaw(n_y): + """Compute the Gaussian coefficients for the Gaussian grid conversion. + + Arguments: + ---------- + n_y: the latitude dimension; + """ + c_c = (1 - (2 / math.pi)**2) / 4 + eps = 0.00000000000001 + k_k = n_y / 2 + p_a = np.zeros(n_y) + p_a[0:k_k] = bsslzr(k_k) + p_w = np.zeros(n_y) + for i_l in range(k_k): + x_z = np.cos(p_a[i_l] / math.sqrt((n_y + 0.5)**2 + c_c)) + iterr = 0. + zsp = 1.0 + while (abs(zsp) > eps and iterr <= 10): + pkm1 = x_z + pkm2 = 1.0 + for n_n in range(2, n_y, 1): + p_k = ((n_n * 2 - 1.0) * x_z * pkm1 - (n_n - 1.0) * pkm2) / n_n + pkm2 = pkm1 + pkm1 = p_k + pkm1 = pkm2 + pkmrk = (n_y * (pkm1 - x_z * p_k)) / (1.0 - x_z**2) + zsp = p_k / pkmrk + x_z = x_z - zsp + iterr = iterr + 1 + if iterr > 15: + sys.exit("*** no convergence in gauaw ***") + p_a[i_l] = x_z + p_w[i_l] = (2.0 * (1.0 - x_z**2)) / ((n_y**2) * (pkm1**2)) + p_a[n_y - 1 - i_l] = -p_a[i_l] + p_w[n_y - 1 - i_l] = p_w[i_l] + psi = p_a + pgw = p_w + return psi, pgw + + +def globall_cg(d3v, g_w, d_s, dims): + """Compute the global and hemispheric averages. + + Arguments: + ---------- + d3v: the 3D dataset to be averaged; + g_w: the gaussian weights; + d_s: the vertical levels; + dims: a list containing the sizes of the dimensions; + """ + nlev = int(dims[0]) + nlat = int(dims[2]) + ntp = int(dims[3]) + gmn = np.zeros([3, ntp - 1]) + aux1 = np.zeros([nlev, int(nlat / 2), ntp - 1]) + aux2 = np.zeros([nlev, int(nlat / 2), ntp - 1]) + aux1v = np.zeros([nlev, ntp - 1]) + aux2v = np.zeros([nlev, ntp - 1]) + nhem = int(nlat / 2) + fac = 1 / G * PS / 1e5 + for l_l in range(nlev): + for i_h in range(nhem): + aux1[l_l, i_h, :] = fac * np.real(d3v[l_l, i_h, :]) * g_w[i_h] + aux2[l_l, i_h, :] = (fac * np.real(d3v[l_l, i_h + nhem - 1, :]) * + g_w[i_h + nhem - 1]) + aux1v[l_l, :] = (np.nansum(aux1[l_l, :, :], axis=0) / + np.nansum(g_w[0:nhem]) * d_s[l_l]) + aux2v[l_l, :] = (np.nansum(aux2[l_l, :, :], axis=0) / + np.nansum(g_w[0:nhem]) * d_s[l_l]) + gmn[1, :] = (np.nansum(aux1v, axis=0) / np.nansum(d_s)) + gmn[2, :] = (np.nansum(aux2v, axis=0) / np.nansum(d_s)) + gmn[0, :] = 0.5 * (gmn[1, :] + gmn[2, :]) + return gmn + + +def init(logfile, filep): + """Ingest input fields as complex fields and initialise tables. + + Receive fields t,u,v,w as input fields in Fourier + coefficients (time,level,wave,lon), with real as even and imaginary parts + as odd. Convert them to complex fields for Python. + + Arguments: + ---------- + filenc: name of the file containing the input fields; + logfile: name of the file containing the table as a .txt file. + """ + with open(logfile, 'w') as log: + log.write('########################################################\n') + log.write('# #\n') + log.write('# LORENZ ENERGY CYCLE #\n') + log.write('# #\n') + log.write('########################################################\n') + log.close() + with Dataset(filep) as dataset0: + t_a = dataset0.variables['ta'][:, :, :, :] + u_a = dataset0.variables['ua'][:, :, :, :] + v_a = dataset0.variables['va'][:, :, :, :] + wap = dataset0.variables['wap'][:, :, :, :] + lev = dataset0.variables['plev'][:] + time = dataset0.variables['time'][:] + lat = dataset0.variables['lat'][:] + nfc = np.shape(t_a)[3] + nlev = len(lev) + ntime = len(time) + nlat = len(lat) + ntp = nfc / 2 + 1 + dims = [nlev, ntime, nlat, ntp] + if max(lev) < 1000: + lev = lev * 100 + wap = wap * 100 + t_a = np.transpose(t_a, (1, 0, 2, 3)) + ta_r = t_a[:, :, :, 0::2] + ta_i = t_a[:, :, :, 1::2] + u_a = np.transpose(u_a, (1, 0, 2, 3)) + ua_r = u_a[:, :, :, 0::2] + ua_i = u_a[:, :, :, 1::2] + v_a = np.transpose(v_a, (1, 0, 2, 3)) + va_r = v_a[:, :, :, 0::2] + va_i = v_a[:, :, :, 1::2] + wap = np.transpose(wap, (1, 0, 2, 3)) + wap_r = wap[:, :, :, 0::2] + wap_i = wap[:, :, :, 1::2] + ta_c = ta_r + 1j * ta_i + ua_c = ua_r + 1j * ua_i + va_c = va_r + 1j * va_i + wap_c = wap_r + 1j * wap_i + with open(logfile, 'a+') as log: + log.write(' \n') + log.write(' \n') + log.write('INPUT DATA:\n') + log.write('-----------\n') + log.write(' \n') + log.write('SPECTRAL RESOLUTION : {}\n'.format(nfc)) + log.write('NUMBER OF LATITUDES : {}\n'.format(nlat)) + log.write('NUMBER OF LEVEL : {}'.format(nlev)) + log.write('LEVEL : {} Pa\n'.format(lev)) + log.write(' \n') + log.write('WAVES:\n') + log.write(' \n') + log.write('(1) : 1 - {}\n'.format(NW_1)) + log.write('(2) : {} - {}\n'.format(NW_1, NW_2)) + log.write('(3) : {} - {}\n'.format(NW_2, NW_3)) + log.write(' \n') + log.write('GLOBAL DIAGNOSTIC: \n') + log.write(' \n') + log.write(' I GLOBAL I NORTH I SOUTH I\n') + log.write('------------------------------------------------------\n') + log.close() + return ta_c, ua_c, va_c, wap_c, dims, lev, lat + + +def makek(u_t, v_t): + """Compute the kinetic energy reservoirs from u and v. + + Arguments: + ---------- + u_t: a 3D zonal velocity field; + v_t: a 3D meridional velocity field; + """ + ck1 = u_t * np.conj(u_t) + ck2 = v_t * np.conj(v_t) + e_k = np.real(ck1 + ck2) + e_k[:, :, 0] = 0.5 * np.real(u_t[:, :, 0] * u_t[:, :, 0] + + v_t[:, :, 0] * v_t[:, :, 0]) + return e_k + + +def makea(t_t, t_g, gam): + """Compute the kinetic energy reservoirs from t. + + Arguments: + ---------- + t_t_ a 3D temperature field; + t_g: a temperature vertical profile; + gam: a vertical profile of the stability parameter; + """ + ape = gam[:, np.newaxis, np.newaxis] * np.real(t_t * np.conj(t_t)) + ape[:, :, 0] = (gam[:, np.newaxis] * 0.5 * np.real( + (t_t[:, :, 0] - t_g[:, np.newaxis]) * + (t_t[:, :, 0] - t_g[:, np.newaxis]))) + return ape + + +def mka2k(wap, t_t, w_g, t_g, p_l): + """Compute the KE to APE energy conversions from t and w. + + Arguments: + ---------- + wap: a 3D vertical velocity field; + t_t: a 3D temperature field; + w_g: a vertical velocity vertical profile; + t_g: a temperature vertical profile; + p_l: the pressure levels; + """ + a2k = -np.real(R / p_l[:, np.newaxis, np.newaxis] * + (t_t * np.conj(wap) + np.conj(t_t) * wap)) + a2k[:, :, 0] = -np.real(R / p_l[:, np.newaxis] * + (t_t[:, :, 0] - t_g[:, np.newaxis]) * + (wap[:, :, 0] - w_g[:, np.newaxis])) + return a2k + + +def mkaeaz(v_t, wap, t_t, ttt, ttg, p_l, lat, gam, nlat, nlev): + """Compute the zonal mean - eddy APE conversions from t and v. + + Arguments: + ---------- + v_t: a 3D meridional velocity field; + wap: a 3D vertical velocity field; + t_t: a 3D temperature field; + ttt: a climatological mean 3D temperature field; + p_l: the pressure levels; + lat: the latudinal dimension; + gam: a vertical profile of the stability parameter; + nlat: the number of latitudes; + nlev: the number of levels; + """ + dtdp = np.zeros([nlev, nlat]) + dtdy = np.zeros([nlev, nlat]) + ttt = np.real(ttt) + for l_l in np.arange(nlev): + if l_l == 0: + t_1 = ttt[l_l, :, 0] - ttg[l_l] + t_2 = ttt[l_l + 1, :, 0] - ttg[l_l + 1] + dtdp[l_l, :] = (t_2 - t_1) / (p_l[l_l + 1] - p_l[l_l]) + elif l_l == nlev - 1: + t_1 = ttt[l_l - 1, :, 0] - ttg[l_l - 1] + t_2 = ttt[l_l, :, 0] - ttg[l_l] + dtdp[l_l, :] = (t_2 - t_1) / (p_l[l_l] - p_l[l_l - 1]) + else: + t_1 = ttt[l_l, :, 0] - ttg[l_l] + t_2 = ttt[l_l + 1, :, 0] - ttg[l_l + 1] + dtdp1 = (t_2 - t_1) / (p_l[l_l + 1] - p_l[l_l]) + t_2 = t_1 + t_1 = ttt[l_l - 1, :, 0] - ttg[l_l - 1] + dtdp2 = (t_2 - t_1) / (p_l[l_l] - p_l[l_l - 1]) + dtdp[l_l, :] = ((dtdp1 * (p_l[l_l] - p_l[l_l - 1]) + dtdp2 * + (p_l[l_l + 1] - p_l[l_l])) / + (p_l[l_l + 1] - p_l[l_l - 1])) + dtdp[l_l, :] = dtdp[l_l, :] - (R / (CP * p_l[l_l]) * + (ttt[l_l, :, 0] - ttg[l_l])) + for i_l in np.arange(nlat): + if i_l == 0: + t_1 = ttt[:, i_l, 0] + t_2 = ttt[:, i_l + 1, 0] + dtdy[:, i_l] = (t_2 - t_1) / (lat[i_l + 1] - lat[i_l]) + elif i_l == nlat - 1: + t_1 = ttt[:, i_l - 1, 0] + t_2 = ttt[:, i_l, 0] + dtdy[:, i_l] = (t_2 - t_1) / (lat[i_l] - lat[i_l - 1]) + else: + t_1 = ttt[:, i_l - 1, 0] + t_2 = ttt[:, i_l + 1, 0] + dtdy[:, i_l] = (t_2 - t_1) / (lat[i_l + 1] - lat[i_l - 1]) + dtdy = dtdy / AA + c_1 = np.real(v_t * np.conj(t_t) + t_t * np.conj(v_t)) + c_2 = np.real(wap * np.conj(t_t) + t_t * np.conj(wap)) + ae2az = (gam[:, np.newaxis, np.newaxis] * + (dtdy[:, :, np.newaxis] * c_1 + dtdp[:, :, np.newaxis] * c_2)) + ae2az[:, :, 0] = 0. + return ae2az + + +def mkkekz(u_t, v_t, wap, utt, vtt, p_l, lat, nlat, ntp, nlev): + """Compute the zonal mean - eddy KE conversions from u and v. + + Arguments: + ---------- + u_t: a 3D zonal velocity field; + v_t: a 3D meridional velocity field; + wap: a 3D vertical velocity field; + utt: a climatological mean 3D zonal velocity field; + vtt: a climatological mean 3D meridional velocity field; + p_l: the pressure levels; + lat: the latitude dimension; + nlat: the number of latitudes; + ntp: the number of wavenumbers; + nlev: the number of vertical levels; + """ + dudp = np.zeros([nlev, nlat]) + dvdp = np.zeros([nlev, nlat]) + dudy = np.zeros([nlev, nlat]) + dvdy = np.zeros([nlev, nlat]) + for l_l in np.arange(nlev): + if l_l == 0: + dudp[l_l, :] = ((np.real(utt[l_l + 1, :, 0] - utt[l_l, :, 0])) / + (p_l[l_l + 1] - p_l[l_l])) + dvdp[l_l, :] = ((np.real(vtt[l_l + 1, :, 0] - vtt[l_l, :, 0])) / + (p_l[l_l + 1] - p_l[l_l])) + elif l_l == nlev - 1: + dudp[l_l, :] = ((np.real(utt[l_l, :, 0] - utt[l_l - 1, :, 0])) / + (p_l[l_l] - p_l[l_l - 1])) + dvdp[l_l, :] = ((np.real(vtt[l_l, :, 0] - vtt[l_l - 1, :, 0])) / + (p_l[l_l] - p_l[l_l - 1])) + else: + dudp1 = ((np.real(utt[l_l + 1, :, 0] - utt[l_l, :, 0])) / + (p_l[l_l + 1] - p_l[l_l])) + dvdp1 = ((np.real(vtt[l_l + 1, :, 0] - vtt[l_l, :, 0])) / + (p_l[l_l + 1] - p_l[l_l])) + dudp2 = ((np.real(utt[l_l, :, 0] - utt[l_l - 1, :, 0])) / + (p_l[l_l] - p_l[l_l - 1])) + dvdp2 = ((np.real(vtt[l_l, :, 0] - vtt[l_l - 1, :, 0])) / + (p_l[l_l] - p_l[l_l - 1])) + dudp[l_l, :] = ((dudp1 * (p_l[l_l] - p_l[l_l - 1]) + dudp2 * + (p_l[l_l + 1] - p_l[l_l])) / + (p_l[l_l + 1] - p_l[l_l - 1])) + dvdp[l_l, :] = ((dvdp1 * (p_l[l_l] - p_l[l_l - 1]) + dvdp2 * + (p_l[l_l + 1] - p_l[l_l])) / + (p_l[l_l + 1] - p_l[l_l - 1])) + for i_l in np.arange(nlat): + if i_l == 0: + dudy[:, i_l] = ((np.real(utt[:, i_l + 1, 0] - utt[:, i_l, 0])) / + (lat[i_l + 1] - lat[i_l])) + dvdy[:, i_l] = ((np.real(vtt[:, i_l + 1, 0] - vtt[:, i_l, 0])) / + (lat[i_l + 1] - lat[i_l])) + elif i_l == nlat - 1: + dudy[:, i_l] = ((np.real(utt[:, i_l, 0] - utt[:, i_l - 1, 0])) / + (lat[i_l] - lat[i_l - 1])) + dvdy[:, i_l] = ((np.real(vtt[:, i_l, 0] - vtt[:, i_l - 1, 0])) / + (lat[i_l] - lat[i_l - 1])) + else: + dudy[:, + i_l] = ((np.real(utt[:, i_l + 1, 0] - utt[:, i_l - 1, 0])) / + (lat[i_l + 1] - lat[i_l - 1])) + dvdy[:, + i_l] = ((np.real(vtt[:, i_l + 1, 0] - vtt[:, i_l - 1, 0])) / + (lat[i_l + 1] - lat[i_l - 1])) + dudy = dudy / AA + dvdy = dvdy / AA + c_1 = np.zeros([nlev, nlat, ntp - 1]) + c_2 = np.zeros([nlev, nlat, ntp - 1]) + c_3 = np.zeros([nlev, nlat, ntp - 1]) + c_4 = np.zeros([nlev, nlat, ntp - 1]) + c_5 = np.zeros([nlev, nlat, ntp - 1]) + c_6 = np.zeros([nlev, nlat, ntp - 1]) + u_u = np.real(u_t * np.conj(u_t) + u_t * np.conj(u_t)) + u_v = np.real(u_t * np.conj(v_t) + v_t * np.conj(u_t)) + v_v = np.real(v_t * np.conj(v_t) + v_t * np.conj(v_t)) + u_w = np.real(u_t * np.conj(wap) + wap * np.conj(u_t)) + v_w = np.real(v_t * np.conj(wap) + wap * np.conj(v_t)) + for i_l in np.arange(nlat): + c_1[:, i_l, :] = dudy[:, i_l][:, np.newaxis] * u_v[:, i_l, :] + c_2[:, i_l, :] = dvdy[:, i_l][:, np.newaxis] * v_v[:, i_l, :] + c_5[:, i_l, :] = (np.tan(lat[i_l]) / AA * + np.real(utt[:, i_l, 0])[:, np.newaxis] * + (u_v[:, i_l, :])) + c_6[:, i_l, :] = -(np.tan(lat[i_l]) / AA * + np.real(vtt[:, i_l, 0])[:, np.newaxis] * + (u_u[:, i_l, :])) + for l_l in np.arange(nlev): + c_3[l_l, :, :] = dudp[l_l, :][:, np.newaxis] * u_w[l_l, :, :] + c_4[l_l, :, :] = dvdp[l_l, :][:, np.newaxis] * v_w[l_l, :, :] + ke2kz = (c_1 + c_2 + c_3 + c_4 + c_5 + c_6) + ke2kz[:, :, 0] = 0. + return ke2kz + + +def mkatas(u_t, v_t, wap, t_t, ttt, g_w, p_l, lat, nlat, ntp, nlev): + """Compute the stat.-trans. eddy APE conversions from u, v, wap and t. + + Arguments: + ---------- + u_t: a 3D zonal velocity field; + v_t: a 3D meridional velocity field; + wap: a 3D vertical velocity field; + t_t: a 3D temperature field; + ttt: a climatological mean 3D temperature field; + g_w: the gaussian weights; + p_l: the pressure levels; + lat: the latitude dimension; + nlat: the number of latitudes; + ntp: the number of wavenumbers; + nlev: the number of vertical levels; + """ + t_r = np.fft.ifft(t_t, axis=2) + u_r = np.fft.ifft(u_t, axis=2) + v_r = np.fft.ifft(v_t, axis=2) + w_r = np.fft.ifft(wap, axis=2) + tur = t_r * u_r + tvr = t_r * v_r + twr = t_r * w_r + t_u = np.fft.fft(tur, axis=2) + t_v = np.fft.fft(tvr, axis=2) + t_w = np.fft.fft(twr, axis=2) + c_1 = (t_u * np.conj(ttt[:, :, np.newaxis]) - + ttt[:, :, np.newaxis] * np.conj(t_u)) + c_6 = (t_w * np.conj(ttt[:, :, np.newaxis]) - + ttt[:, :, np.newaxis] * np.conj(t_w)) + c_2 = np.zeros([nlev, nlat, ntp - 1]) + c_3 = np.zeros([nlev, nlat, ntp - 1]) + c_5 = np.zeros([nlev, nlat, ntp - 1]) + for i_l in range(nlat): + if i_l == 0: + c_2[:, i_l, :] = np.real( + t_v[:, i_l, :] / (AA * (lat[i_l + 1] - lat[i_l])) * + np.conj(ttt[:, i_l + 1, np.newaxis] - ttt[:, i_l, np.newaxis])) + c_3[:, i_l, :] = np.real( + np.conj(t_v[:, i_l, :]) / (AA * (lat[i_l + 1] - lat[i_l])) * + (ttt[:, i_l + 1, np.newaxis] - ttt[:, i_l, np.newaxis])) + elif i_l == nlat - 1: + c_2[:, i_l, :] = np.real( + t_v[:, i_l, :] / (AA * (lat[i_l] - lat[i_l - 1])) * + np.conj(ttt[:, i_l, np.newaxis] - ttt[:, i_l - 1, np.newaxis])) + c_3[:, i_l, :] = np.real( + np.conj(t_v[:, i_l, :]) / (AA * (lat[i_l] - lat[i_l - 1])) * + (ttt[:, i_l, np.newaxis] - ttt[:, i_l - 1, np.newaxis])) + else: + c_2[:, i_l, :] = np.real(t_v[:, i_l, :] / + (AA * (lat[i_l + 1] - lat[i_l - 1])) * + np.conj(ttt[:, i_l + 1, np.newaxis] - + ttt[:, i_l - 1, np.newaxis])) + c_3[:, i_l, :] = np.real( + np.conj(t_v[:, i_l, :]) / (AA * + (lat[i_l + 1] - lat[i_l - 1])) * + (ttt[:, i_l + 1, np.newaxis] - ttt[:, i_l - 1, np.newaxis])) + for l_l in range(nlev): + if l_l == 0: + c_5[l_l, :, :] = ( + (ttt[l_l + 1, :, np.newaxis] - ttt[l_l, :, np.newaxis]) / + (p_l[l_l + 1] - p_l[l_l])) + elif l_l == nlev - 1: + c_5[l_l, :, :] = ( + (ttt[l_l, :, np.newaxis] - ttt[l_l - 1, :, np.newaxis]) / + (p_l[l_l] - p_l[l_l - 1])) + else: + c51 = ((ttt[l_l + 1, :, np.newaxis] - ttt[l_l, :, np.newaxis]) / + (p_l[l_l + 1] - p_l[l_l])) + c52 = ((ttt[l_l, :, np.newaxis] - ttt[l_l - 1, :, np.newaxis]) / + (p_l[l_l] - p_l[l_l - 1])) + c_5[l_l, :, :] = ((c51 * (p_l[l_l] - p_l[l_l - 1]) + c52 * + (p_l[l_l + 1] - p_l[l_l])) / + (p_l[l_l + 1] - p_l[l_l - 1])) + k_k = np.arange(0, ntp - 1) + at2as = (((k_k - 1)[np.newaxis, np.newaxis, :] * np.imag(c_1) / + (AA * np.cos(lat[np.newaxis, :, np.newaxis])) + + np.real(t_w * np.conj(c_5) + np.conj(t_w) * c_5) + + np.real(c_2 + c_3) + R / + (CP * p_l[:, np.newaxis, np.newaxis]) * np.real(c_6)) * + g_w[:, :, np.newaxis]) + at2as[:, :, 0] = 0. + return at2as + + +def mkktks(u_t, v_t, utt, vtt, lat, nlat, ntp, nlev): + """Compute the stat.-trans. eddy KE conversions from u, v and t. + + Arguments: + ---------- + u_t: a 3D zonal velocity field; + v_t: a 3D meridional velocity field; + utt: a climatological mean 3D zonal velocity field; + vtt: a climatological mean 3D meridional velocity field; + lat: the latitude dimension; + nlat: the number of latitudes; + ntp: the number of wavenumbers; + nlev: the number of vertical levels; + """ + dut = np.zeros([nlev, nlat, ntp - 1]) + dvt = np.zeros([nlev, nlat, ntp - 1]) + dlat = np.zeros([nlat]) + utt = np.real(utt) + vtt = np.real(vtt) + u_r = np.fft.irfft(u_t, axis=2) + v_r = np.fft.irfft(v_t, axis=2) + uur = u_r * u_r + uvr = u_r * v_r + vvr = v_r * v_r + u_u = np.fft.rfft(uur, axis=2) + v_v = np.fft.rfft(vvr, axis=2) + u_v = np.fft.rfft(uvr, axis=2) + c_1 = u_u * np.conj(u_t) - u_t * np.conj(u_u) + # c_3 = u_v * np.conj(u_t) + u_t * np.conj(u_v) + c_5 = u_u * np.conj(v_t) + v_t * np.conj(u_u) + c_6 = u_v * np.conj(v_t) - v_t * np.conj(u_v) + for i_l in range(nlat): + if i_l == 0: + dut[:, i_l, :] = (utt[:, i_l + 1, :] - utt[:, i_l, :]) + dvt[:, i_l, :] = (vtt[:, i_l + 1, :] - vtt[:, i_l, :]) + dlat[i_l] = (lat[i_l + 1] - lat[i_l]) + elif i_l == nlat - 1: + dut[:, i_l, :] = (utt[:, i_l, :] - utt[:, i_l - 1, :]) + dvt[:, i_l, :] = (vtt[:, i_l, :] - vtt[:, i_l - 1, :]) + dlat[i_l] = (lat[i_l] - lat[i_l - 1]) + else: + dut[:, i_l, :] = (utt[:, i_l + 1, :] - utt[:, i_l - 1, :]) + dvt[:, i_l, :] = (vtt[:, i_l + 1, :] - vtt[:, i_l - 1, :]) + dlat[i_l] = (lat[i_l + 1] - lat[i_l - 1]) + c21 = np.conj(u_u) * dut / dlat[np.newaxis, :, np.newaxis] + c22 = u_u * np.conj(dut) / dlat[np.newaxis, :, np.newaxis] + c41 = np.conj(v_v) * dvt / dlat[np.newaxis, :, np.newaxis] + c42 = v_v * np.conj(dvt) / dlat[np.newaxis, :, np.newaxis] + k_k = np.arange(0, ntp - 1) + kt2ks = (np.real(c21 + c22 + c41 + c42) / AA + + np.tan(lat)[np.newaxis, :, np.newaxis] * np.real(c_1 - c_5) / AA + + np.imag(c_1 + c_6) * (k_k - 1)[np.newaxis, np.newaxis, :] / + (AA * np.cos(lat)[np.newaxis, :, np.newaxis])) + kt2ks[:, :, 0] = 0 + return kt2ks + + +def output(fld, d_s, filenc, name, nc_f): + """Compute vertical integrals and print (time,lat,ntp) to NC output. + + Arguments: + ---------- + fld: the annual mean fields (lev, lat, wave); + d_s: Delta sigma; + filenc: the input file containing the Fourier coefficients of t,u,v,w; + name: the variable name; + nc_f: the name of the output file (with path) + """ + fld_tmn = np.nanmean(fld, axis=1) + fld_aux = fld_tmn * d_s[:, np.newaxis, np.newaxis] + fld_vmn = np.nansum(fld_aux, axis=0) / np.nansum(d_s) + removeif(nc_f) + pr_output(fld_vmn, name, filenc, nc_f) + + +def pr_output(varo, varname, filep, nc_f): + """Print outputs to NetCDF. + + Save fields to NetCDF, retrieving information from an existing + NetCDF file. Metadata are transferred from the existing file to the + new one. + + Arguments: + ---------- + varo: the field to be stored; + varname: the name of the variables to be saved; + filep: the existing dataset, containing the metadata; + nc_f: the name of the output file; + + @author: Chris Slocum (2014), modified by Valerio Lembo (2018). + """ + fourc = fourier_coefficients + with Dataset(nc_f, 'w', format='NETCDF4') as w_nc_fid: + w_nc_fid.description = "Outputs of LEC program" + with Dataset(filep, 'r') as nc_fid: + # Extract data from NetCDF file + wave = nc_fid.variables['wave'][:] + ntp = int(len(wave) / 2) + # Writing NetCDF files + fourc.extr_lat(nc_fid, w_nc_fid, 'lat') + w_nc_fid.createDimension('wave', ntp) + w_nc_dim = w_nc_fid.createVariable('wave', + nc_fid.variables['wave'].dtype, + ('wave', )) + for ncattr in nc_fid.variables['wave'].ncattrs(): + w_nc_dim.setncattr(ncattr, + nc_fid.variables['wave'].getncattr(ncattr)) + w_nc_fid.variables['wave'][:] = wave[0:ntp] + w_nc_var = w_nc_fid.createVariable(varname, 'f8', ('lat', 'wave')) + varatts(w_nc_var, varname, 1, 0) + w_nc_fid.variables[varname][:] = varo + + +def preproc_lec(model, wdir, pdir, input_data): + """Preprocess fields for LEC computations and send it to lorenz program. + + This function computes the interpolation of ta, ua, va, wap daily fields to + fill gaps using near-surface data, then computes the Fourier coefficients + and performs the LEC computations. For every year, (lev,lat,wave) fields, + global and hemispheric time series of each conversion and reservoir term + of the LEC is provided. + + Arguments: + ---------- + model: the model name; + wdir: the working directory where the outputs are stored; + pdir: a new directory is created as a sub-directory of the plot directory + to store tables of conversion/reservoir terms and the flux diagram for + year; + filelist: a list of file names containing the input fields; + """ + cdo = Cdo() + fourc = fourier_coefficients + ta_file = e.select_metadata(input_data, short_name='ta', + dataset=model)[0]['filename'] + tas_file = e.select_metadata(input_data, short_name='tas', + dataset=model)[0]['filename'] + ua_files = e.select_metadata(input_data, short_name='ua', dataset=model) + print(ua_files) + if len(ua_files) > 1: + ua_files = e.select_metadata(ua_files, variable_group='ua_1') + ua_file = ua_files[0]['filename'] + print(ua_file) + uas_file = e.select_metadata(input_data, short_name='uas', + dataset=model)[0]['filename'] + va_files = e.select_metadata(input_data, short_name='va', dataset=model) + if len(va_files) > 1: + va_files = e.select_metadata(va_files, variable_group='va_1') + va_file = va_files[0]['filename'] + vas_files = e.select_metadata(input_data, short_name='vas', dataset=model) + if len(vas_files) > 1: + vas_files = e.select_metadata(vas_files, variable_group='vas_1') + vas_file = vas_files[0]['filename'] + wap_file = e.select_metadata(input_data, short_name='wap', + dataset=model)[0]['filename'] + ldir = os.path.join(pdir, 'LEC_results') + os.makedirs(ldir) + maskorog = wdir + '/orog.nc' + ua_file_mask = wdir + '/ua_fill.nc' + va_file_mask = wdir + '/va_fill.nc' + energy3_file = wdir + '/energy_short.nc' + cdo.setmisstoc('0', + input='-setmisstoc,1 -sub {0} {0}'.format(ua_file), + options='-b F32', + output=maskorog) + cdo.add(input=('-setmisstoc,0 -selvar,ua {} ' + '-setmisstoc,0 -mul -selvar,uas {} -selvar,ua {}').format( + ua_file, uas_file, maskorog), + options='-b F32', + output=ua_file_mask) + cdo.add(input=('-setmisstoc,0 -selvar,va {} ' + '-setmisstoc,0 -mul -selvar,vas {} -selvar,ua {}').format( + va_file, vas_file, maskorog), + options='-b F32', + output=va_file_mask) + cdo.setmisstoc('0', + input=('-invertlat -sellevel,10000/90000 ' + '-merge {} {} {} {}').format(ta_file, ua_file_mask, + va_file_mask, wap_file), + options='-b F32', + output=energy3_file) + yrs = cdo.showyear(input=energy3_file) + yrs = str(yrs) + yrs2 = yrs.split() + y_i = 0 + lect = np.zeros(len(yrs2)) + for y_r in yrs2: + y_rl = [y_n for y_n in y_r] + y_ro = '' + for e_l in y_rl: + e_l = str(e_l) + if e_l.isdigit() is True: + y_ro += e_l + # print(filter(str.isdigit, str(y_r))) + enfile_yr = wdir + '/inputen.nc' + tasfile_yr = wdir + '/tas_yr.nc' + tadiag_file = wdir + '/ta_filled.nc' + ncfile = wdir + '/fourier_coeff.nc' + cdo.selyear(y_ro, + input=energy3_file, + options='-b F32', + output=enfile_yr) + cdo.selyear(y_ro, input=tas_file, options='-b F32', output=tasfile_yr) + fourc.fourier_coeff(tadiag_file, ncfile, enfile_yr, tasfile_yr) + diagfile = (ldir + '/{}_{}_lec_diagram.png'.format(model, y_ro)) + logfile = (ldir + '/{}_{}_lec_table.txt'.format(model, y_ro)) + lect[y_i] = lorenz(wdir, model, y_ro, ncfile, diagfile, logfile) + y_i = y_i + 1 + os.remove(enfile_yr) + os.remove(tasfile_yr) + os.remove(tadiag_file) + os.remove(ncfile) + os.remove(maskorog) + os.remove(ua_file_mask) + os.remove(va_file_mask) + os.remove(energy3_file) + return lect + + +def removeif(filename): + """Remove filename if it exists.""" + try: + os.remove(filename) + except OSError: + pass + + +def stabil(ta_gmn, p_l, nlev): + """Compute the stability parameter from temp. and pressure levels. + + Arguments: + ---------- + ta_gmn: a temperature vertical profile; + p_l: the vertical levels; + nlev: the number of vertical levels; + """ + cpdr = CP / R + t_g = ta_gmn + g_s = np.zeros(nlev) + for i_l in range(nlev): + if i_l == 0: + dtdp = (t_g[i_l + 1] - t_g[i_l]) / (p_l[i_l + 1] - p_l[i_l]) + elif i_l == nlev - 1: + dtdp = (t_g[i_l] - t_g[i_l - 1]) / (p_l[i_l] - p_l[i_l - 1]) + else: + dtdp1 = (t_g[i_l + 1] - t_g[i_l]) / (p_l[i_l + 1] - p_l[i_l]) + dtdp2 = (t_g[i_l] - t_g[i_l - 1]) / (p_l[i_l] - p_l[i_l - 1]) + dtdp = ((dtdp1 * (p_l[i_l] - p_l[i_l - 1]) + dtdp2 * + (p_l[i_l + 1] - p_l[i_l])) / + (p_l[i_l + 1] - p_l[i_l - 1])) + g_s[i_l] = CP / (t_g[i_l] - p_l[i_l] * dtdp * cpdr) + return g_s + + +def table(varin, ntp, name, logfile, flag): + """Write global and hem. storage terms to .txt table. + + Arguments: + ---------- + varin: the variable to be printed out; + ntp: the number of wavenumbers; + name: the name of the variable to be printed out; + logfile: the filename of the .txt where the variable is printed out; + flag: a flag for NH, SH, global; + """ + if flag is True: + fac = 1e5 + varin = fac * varin + varzon = varin[:, 0] + vared = np.nansum(varin[:, 1:ntp - 1], axis=1) + vared1 = np.nansum(varin[:, 1:NW_1 - 1], axis=1) + vared2 = np.nansum(varin[:, NW_1:NW_2 - 1], axis=1) + vared3 = np.nansum(varin[:, NW_2:NW_3 - 1], axis=1) + vared_tog = [vared, vared1, vared2, vared3] + write_to_tab(logfile, name, vared_tog, varzon) + + +def varatts(w_nc_var, varname, tres, vres): + """Add attributes to the variables, depending on name and time res. + + Arguments: + --------- + w_nc_var: a variable object; + varname: the name of the variable, among ta, ua, va and wap; + tres: the time resolution (daily or annual); + vres: the vertical resolution (pressure levels or vert. integr.). + + @author: Chris Slocum (2014), modified by Valerio Lembo (2018). + """ + if tres == 0: + tatt = "Daily\nM" + elif tres == 1: + tatt = "Annual mean\nM" + if vres == 0: + vatt = "Pressure levels\n" + elif vres == 1: + vatt = "Vertically integrated\n" + if varname == 'a': + w_nc_var.setncatts({ + 'long_name': "Available Potential Energy", + 'units': "W m-2", + 'level_desc': vatt, + 'var_desc': "APE -> KE", + 'statistic': tatt + }) + elif varname == 'ek': + w_nc_var.setncatts({ + 'long_name': "Kinetic Energy", + 'units': "W m-2", + 'level_desc': vatt, + 'var_desc': "APE -> KE", + 'statistic': tatt + }) + elif varname == 'a2k': + w_nc_var.setncatts({ + 'long_name': "Conversion between APE and KE", + 'units': "W m-2", + 'level_desc': vatt, + 'var_desc': "APE <-> KE", + 'statistic': tatt + }) + elif varname == 'k': + w_nc_var.setncatts({ + 'long_name': "Kinetic Energy", + 'units': "W m-2", + 'level_desc': vatt, + 'var_desc': "APE -> KE", + 'statistic': tatt + }) + + +def weights(lev, nlev, lat): + """Compute weights for vertical integration and meridional averages. + + Arguments: + --------- + lev: the pressure levels; + nlev: the number of pressure levels; + lat: the latitudes in degrees; + nlat: the number of latitudinal gridsteps; + """ + # Compute sigma level and dsigma + sig = lev / PS + d_s = np.zeros(nlev) + for j_l in range(1, nlev - 1, 1): + d_s[j_l] = 0.5 * abs(sig[j_l + 1] - sig[j_l - 1]) + d_s[0] = sig[0] + 0.5 * abs(sig[1] - sig[0]) + d_s[nlev - + 1] = 1 - sig[nlev - 1] + 0.5 * abs(sig[nlev - 1] - sig[nlev - 2]) + # Compute Gaussian weights + y_l = np.zeros(lat.shape) + np.deg2rad(lat, out=y_l) + g_w = np.cos(y_l) + return d_s, y_l, g_w + + +def write_to_tab(logfile, name, vared, varzon): + """Specify the formats for table entries. + + Arguments: + ---------- + log: the logfile where the entries must be written; + name: the name of the variable; + vared: a list of arrays containing the overall eddy components, the LW, + the SW and the KW components; + varzon: an array containing the zonal mean component; + """ + vartot = varzon + vared[0] + with open(logfile, 'a+') as log: + log.write(' {} TOTAL {: 4.3f} {: 4.3f} {: 4.3f}\n'.format( + name, vartot[0], vartot[1], vartot[2])) + log.write('--------------------------------------\n') + log.write(' {} ZONAL {: 4.3f} {: 4.3f} {: 4.3f}\n'.format( + name, varzon[0], varzon[1], varzon[2])) + log.write('--------------------------------------\n') + log.write(' {} EDDY {: 4.3f} {: 4.3f} {: 4.3f}\n'.format( + name, vared[0][0], vared[0][1], vared[0][2])) + log.write('--------------------------------------\n') + log.write(' {} EDDY(LW) {: 4.3f} {: 4.3f} {: 4.3f}\n'.format( + name, vared[1][0], vared[1][1], vared[1][2])) + log.write('--------------------------------------\n') + log.write(' {} EDDY(SW) {: 4.3f} {: 4.3f} {: 4.3f}\n'.format( + name, vared[2][0], vared[2][1], vared[2][2])) + log.write('--------------------------------------\n') + log.write(' {} EDDY(KW) {: 4.3f} {: 4.3f} {: 4.3f}\n'.format( + name, vared[3][0], vared[3][1], vared[3][2])) + log.write('--------------------------------------\n') + log.close() diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/mkthe.py b/esmvaltool/diag_scripts/thermodyn_diagtool/mkthe.py new file mode 100644 index 0000000000..cd01f138f2 --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/mkthe.py @@ -0,0 +1,516 @@ +"""AUXILIARY FIELDS RETRIEVAL. + +Module for computation of the auxiliary variables needed by the tool. + +It contains the following functions: +- init_mkthe_te: compute emission temperature from OLR; +- init_mkthe_wat: initialise wfluxes; +- init_mkthe_lec: compute monthly mean near-surface zonal and meridional + velocities when daily means are provided; +- init_mkthe_direntr: compute auxiliary files needed for material entropy + production retrieval with the direct method; +- input_fields: obtain input fields for mkthe_main; +- mkthe_main: obtain equivalent potential temperatures, temperatures + representative of the sensible and latent heat exchanges in + the lower layers of the troposphere, boundary layer height and + lifting condensation level temperature. +- mon_from_day: obtain monthly means from daily means; +- wfluxes: obtain evaporation and precipitation from precipitation and latent + heat fluxes; +- write_output: write auxiliary fields to NetCDF file; + +@author: Valerio Lembo, valerio.lembo@uni-hamburg.de, Universitat Hamburg, 2018 +""" +import os +from shutil import move + +import numpy as np +from cdo import Cdo +from netCDF4 import Dataset + +import esmvaltool.diag_scripts.shared as e +from esmvaltool.diag_scripts.thermodyn_diagtool import fourier_coefficients + +ALV = 2.5008e6 # Latent heat of vaporization +G_0 = 9.81 # Gravity acceleration +P_0 = 100000. # reference pressure +RV = 461.51 # Gas constant for water vapour +T_MELT = 273.15 # freezing temp. +AKAP = 0.286 # Kappa (Poisson constant R/Cp) +GAS_CON = 287.0 # Gas constant +RA_1 = 610.78 # Parameter for Magnus-Teten-Formula +H_S = 300. # stable boundary layer height (m) +H_U = 1000. # unstable boundary layer height (m) +RIC_RS = 0.39 # Critical Richardson number for stable layer +RIC_RU = 0.28 # Critical Richardson number for unstable layer +L_C = 2501000 # latent heat of condensation +SIGMAINV = 17636684.3034 # inverse of the Stefan-Boltzmann constant + + +def init_mkthe_te(model, wdir, input_data): + """Compute auxiliary fields or perform time averaging of existing fields. + + Arguments: + --------- + model: the model name; + wdir: the working directory where the outputs are stored; + filelist: a list of file names containing the input fields; + + Returns + ------- + A file containing annual mean emission temperature fields, the time mean + globally averaged emission temperature, the file containing emission + temperature fields. + """ + cdo = Cdo() + rlut_file = e.select_metadata(input_data, short_name='rlut', + dataset=model)[0]['filename'] + # Compute monthly mean fields from 2D surface daily fields + # emission temperature + te_file = wdir + '/{}_te.nc'.format(model) + cdo.sqrt(input="-sqrt -mulc,{} {}".format(SIGMAINV, rlut_file), + output=te_file) + te_ymm_file = wdir + '/{}_te_ymm.nc'.format(model) + cdo.yearmonmean(input=te_file, output=te_ymm_file) + te_gmean_file = wdir + '/{}_te_gmean.nc'.format(model) + cdo.timmean(input='-fldmean {}'.format(te_ymm_file), output=te_gmean_file) + with Dataset(te_gmean_file) as f_l: + te_gmean_constant = f_l.variables['rlut'][0, 0, 0] + return te_ymm_file, te_gmean_constant, te_file + + +def init_mkthe_wat(model, wdir, input_data, flags): + """Compute auxiliary fields or perform time averaging of existing fields. + + Arguments: + --------- + model: the model name; + wdir: the working directory where the outputs are stored; + filelist: a list of file names containing the input fields; + flags: (wat: a flag for the water mass budget module (y or n), + entr: a flag for the material entropy production (y or n); + met: a flag for the material entropy production method + (1: indirect, 2, direct, 3: both)); + + Returns + ------- + A list of input fields. + """ + wat = flags[0] + if wat == 'True': + evspsbl_file, prr_file = wfluxes(model, wdir, input_data) + aux_files = [evspsbl_file, prr_file] + return aux_files + + +def init_mkthe_lec(model, wdir, input_data): + """Compute auxiliary fields or perform time averaging of existing fields. + + Arguments: + --------- + model: the model name; + wdir: the working directory where the outputs are stored; + filelist: a dictionary of file names containing the input fields; + flags: (wat: a flag for the water mass budget module (y or n), + entr: a flag for the material entropy production (y or n); + met: a flag for the material entropy production method + (1: indirect, 2, direct, 3: both)); + + Returns + ------- + The file containing monthly mean near-surface velocities in the zonal and + meridional direction. + """ + uas_file = e.select_metadata(input_data, short_name='uas', + dataset=model)[0]['filename'] + vas_file = e.select_metadata(input_data, short_name='vas', + dataset=model)[0]['filename'] + uasmn_file = mon_from_day(wdir, model, 'uas', uas_file) + vasmn_file = mon_from_day(wdir, model, 'vas', vas_file) + return uasmn_file, vasmn_file + + +def init_mkthe_direntr(model, wdir, input_data, te_file, flags): + """Compute the MEP with the direct method. + + Arguments: + --------- + model: the model name; + wdir: the working directory where the outputs are stored; + input_data: a dictionary of file names containing the input fields; + te_file: a file containing the emission temperature computed from OLR; + flags: (wat: a flag for the water mass budget module (y or n), + entr: a flag for the material entropy production (y or n); + met: a flag for the material entropy production method + (1: indirect, 2, direct, 3: both)); + + Returns + ------- + A list of files cotiaining the components of the MEP with the direct + method. + """ + cdo = Cdo() + met = flags[3] + if met in {'2', '3'}: + evspsbl_file, prr_file = wfluxes(model, wdir, input_data) + hfss_file = e.select_metadata(input_data, + short_name='hfss', + dataset=model)[0]['filename'] + hus_file = e.select_metadata(input_data, + short_name='hus', + dataset=model)[0]['filename'] + ps_file = e.select_metadata(input_data, short_name='ps', + dataset=model)[0]['filename'] + ts_file = e.select_metadata(input_data, short_name='ts', + dataset=model)[0]['filename'] + uas_file = e.select_metadata(input_data, + short_name='uas', + dataset=model)[0]['filename'] + uas_tres = e.select_metadata(input_data, + short_name='uas', + dataset=model)[0]['mip'] + vas_file = e.select_metadata(input_data, + short_name='vas', + dataset=model)[0]['filename'] + vas_tres = e.select_metadata(input_data, + short_name='vas', + dataset=model)[0]['mip'] + if uas_tres == 'day': + uasmn_file = wdir + '/{}_uas_mm.nc'.format(model) + uasmn_file = mon_from_day(wdir, model, 'uas', uas_file) + uas_file = uasmn_file + if vas_tres == 'day': + vasmn_file = wdir + '/{}_uas_mm.nc'.format(model) + vasmn_file = mon_from_day(wdir, model, 'vas', vas_file) + vas_file = vasmn_file + mk_list = [ + ts_file, hus_file, ps_file, uas_file, vas_file, hfss_file, te_file + ] + htop_file, tabl_file, tlcl_file = mkthe_main(wdir, mk_list, model) + # Working temperatures for the hydrological cycle + tcloud_file = (wdir + '/{}_tcloud.nc'.format(model)) + removeif(tcloud_file) + cdo.mulc('0.5', + input='-add {} {}'.format(tlcl_file, te_file), + options='-b F32', + output=tcloud_file) + tcolumn_file = (wdir + '/{}_t_vertav_pot.nc'.format(model)) + removeif(tcolumn_file) + cdo.mulc('0.5', + input='-add {} {}'.format(ts_file, tcloud_file), + options='-b F32', + output=tcolumn_file) + # Working temperatures for the kin. en. diss. (updated) + tasvert_file = (wdir + '/{}_tboundlay.nc'.format(model)) + removeif(tasvert_file) + cdo.fldmean(input='-mulc,0.5 -add {} {}'.format(ts_file, tabl_file), + options='-b F32', + output=tasvert_file) + aux_files = [ + evspsbl_file, htop_file, prr_file, tabl_file, tasvert_file, + tcloud_file, tcolumn_file, tlcl_file + ] + else: + aux_files = [] + return aux_files + + +def input_fields(wdir, file_list): + """Manipulate input fields and read datasets. + + Arguments: + --------- + wdir: the working directory path; + file_list: the list of file containing ts, hus, + ps, uas, vas, hfss, te; + + Returns + ------- + hfss, huss, ps, te, ts and teh near-surface wind speed fields. + """ + cdo = Cdo() + ts_miss_file = wdir + '/ts.nc' + removeif(ts_miss_file) + cdo.setctomiss('0', input=file_list[0], output=ts_miss_file) + hus_miss_file = wdir + '/hus.nc' + removeif(hus_miss_file) + cdo.setctomiss('0', input=file_list[1], output=hus_miss_file) + ps_miss_file = wdir + '/ps.nc' + removeif(ps_miss_file) + cdo.setctomiss('0', input=file_list[2], output=ps_miss_file) + vv_missfile = wdir + '/V.nc' + removeif(vv_missfile) + vv_file = wdir + '/V_miss.nc' + removeif(vv_file) + cdo.sqrt(input='-add -sqr {} -sqr {}'.format(file_list[3], file_list[4]), + options='-b F32', + output=vv_file) + cdo.setctomiss('0', input=vv_file, output=vv_missfile) + os.remove(vv_file) + hfss_miss_file = wdir + '/hfss.nc' + removeif(hfss_miss_file) + cdo.setctomiss('0', input=file_list[5], output=hfss_miss_file) + te_miss_file = wdir + '/te.nc' + removeif(te_miss_file) + cdo.setctomiss('0', input=file_list[6], output=te_miss_file) + with Dataset(ts_miss_file) as dataset: + t_s = dataset.variables['ts'][:, :, :] + with Dataset(hus_miss_file) as dataset: + hus = dataset.variables['hus'][:, :, :, :] + lev = dataset.variables['plev'][:] + with Dataset(ps_miss_file) as dataset: + p_s = dataset.variables['ps'][:, :, :] + with Dataset(vv_missfile) as dataset: + vv_hor = dataset.variables['uas'][:, :, :] + with Dataset(hfss_miss_file) as dataset: + hfss = dataset.variables['hfss'][:, :, :] + with Dataset(te_miss_file) as dataset: + t_e = dataset.variables['rlut'][:, :, :] + huss = hus[:, 0, :, :] + huss = np.where(lev[0] >= p_s, huss, 0.) + nlev = len(lev) + for l_l in range(nlev): + aux = hus[:, l_l, :, :] + aux = np.where((p_s >= lev[l_l]), aux, 0.) + huss = huss + aux + remove_files = [ + ts_miss_file, hus_miss_file, ps_miss_file, vv_missfile, hfss_miss_file, + te_miss_file + ] + for filen in remove_files: + os.remove(filen) + return hfss, huss, p_s, t_e, t_s, vv_hor + + +def mkthe_main(wdir, file_list, modelname): + """Compute the auxiliary variables for the Thermodynamic diagnostic tool. + + It computes equivalent potential temperatures and temperatures + representative of the sensible and latent heat exchanges in the lower + layers of the troposphere. Estimates of the boundary layer height and + lifting condensation level are also provided. + + Arguments: + --------- + wdir: the working directory path; + file_list: the list of file containing ts, hus, ps, uas, vas, hfss, te; + modelname: the name of the model from which the fields are; + + Returns + ------- + The fields containing boundary layer top height, bondary layer mean + temperature, temperature at the lifting condensation level (LCL). + """ + hfss, huss, p_s, t_e, t_s, vv_hor = input_fields(wdir, file_list) + ricr = RIC_RU + h_bl = H_U + ricr = np.where(hfss >= 0.75, ricr, RIC_RS) + h_bl = np.where(hfss >= 0.75, h_bl, H_S) + ev_p = huss * p_s / (huss + GAS_CON / RV) # Water vapour pressure + td_inv = (1 / T_MELT) - (RV / ALV) * np.log(ev_p / RA_1) # Dewpoint t. + t_d = 1 / td_inv + hlcl = 125. * (t_s - t_d) # Empirical formula for LCL height + # Negative heights are replaced by the height of the stable + # boundary layer (lower constraint to the height of the cloud layer) + hlcl = np.where(hlcl >= 0., hlcl, h_bl) + cp_d = GAS_CON / AKAP + ztlcl = t_s - (G_0 / cp_d) * hlcl + # Compute the pseudo-adiabatic lapse rate to obtain the height of cloud + # top knowing emission temperature. + gw_pa = (G_0 / cp_d) * (1 + ((ALV * huss) / (GAS_CON * ztlcl)) / + (1 + ((ALV**2 * huss * 0.622) / + (cp_d * GAS_CON * ztlcl**2)))) + htop = -(t_e - ztlcl) / gw_pa + hlcl + # Use potential temperature and critical Richardson number to compute + # temperature and height of the boundary layer top + ths = t_s * (P_0 / p_s)**AKAP + thz = ths + 0.03 * ricr * (vv_hor)**2 / h_bl + p_z = p_s * np.exp((-G_0 * h_bl) / (GAS_CON * t_s)) # Barometric eq. + t_z = thz * (P_0 / p_z)**(-AKAP) + outlist = [ztlcl, t_z, htop] + htop_file, tabl_file, tlcl_file = write_output(wdir, modelname, file_list, + outlist) + return htop_file, tabl_file, tlcl_file + + +def mon_from_day(wdir, model, name, filein): + """Compute monthly mean from daily mean. + + Arguments: + --------- + wdir: the working directory path; + model: the model name; + name: the name of the field to be averaged; + filein: the input file containing the field to be averaged; + + Returns + ------- + The name of the file containing the monthly averaged field. + """ + cdo = Cdo() + fileaux = wdir + '/aux.nc' + cdo.selvar(name, input=filein, output=fileaux) + move(fileaux, filein) + fileout = wdir + '/{}_{}_mm.nc'.format(model, name) + cdo.selvar(name, + input='-monmean {}'.format(filein), + option='-b F32', + output=fileout) + return fileout + + +def removeif(filename): + """Remove filename if it exists.""" + try: + os.remove(filename) + except OSError: + pass + + +def wfluxes(model, wdir, input_data): + """Compute evaporation and rainfall mass fluxes. + + Arguments: + --------- + model: the model name; + wdir: the working directory where the outputs are stored; + input_data: a dictionary of file names containing the input fields; + + Returns + ------- + The names of the files containing evaporation and rainfall precipitation + fluxes. + """ + cdo = Cdo() + hfls_file = e.select_metadata(input_data, short_name='hfls', + dataset=model)[0]['filename'] + pr_file = e.select_metadata(input_data, short_name='pr', + dataset=model)[0]['filename'] + prsn_file = e.select_metadata(input_data, short_name='prsn', + dataset=model)[0]['filename'] + aux_file = wdir + '/aux.nc' + evspsbl_file = (wdir + '/{}_evspsbl.nc'.format(model)) + cdo.divc(str(L_C), input="{}".format(hfls_file), output=evspsbl_file) + prr_file = wdir + '/{}_prr.nc'.format(model) + cdo.sub(input="{} {}".format(pr_file, prsn_file), output=aux_file) + cdo.chname('pr,prr', input=aux_file, output=prr_file) + return evspsbl_file, prr_file + + +def write_output(wdir, model, file_list, varlist): + """Write auxiliary variables to new NC files, write new attributes. + + Arguments: + --------- + wdir: the work directory where the outputs are stored; + model: the name of the model; + file_list: the list containing the input fields; + varlist: a list containing the variables to be written to NC files, i.e. + tlcl (the temperature at the LCL), t_z (the temperature at the + boundary layer top), htop (the height of the boundary layer top); + their dimensions are as (time, lat, lon); + + Returns + ------- + The names of the files containing fields of boundary layer top height, + bondary layer mean temperature, temperature at the lifting condensation + level (LCL). + """ + cdo = Cdo() + fourc = fourier_coefficients + + dataset = Dataset(file_list[0]) + ztlcl = varlist[0] + t_z = varlist[1] + htop = varlist[2] + tlcl_temp = wdir + '/tlcl.nc' + removeif(tlcl_temp) + with Dataset(tlcl_temp, 'w', format='NETCDF4') as w_nc_fid: + w_nc_fid.description = ( + "Monthly mean LCL temperature from {} model. ".format(model), + "Calculated by Thermodynamics model diagnostics ", + "in ESMValTool. Author Valerio Lembo, ", + "Meteorologisches Institut, Universitaet ", "Hamburg.") + with Dataset(file_list[0]) as dataset: + fourc.extr_time(dataset, w_nc_fid) + fourc.extr_lat(dataset, w_nc_fid, 'lat') + fourc.extr_lon(dataset, w_nc_fid) + w_nc_var = w_nc_fid.createVariable('tlcl', 'f8', + ('time', 'lat', 'lon')) + w_nc_var.setncatts({ + 'long_name': + "LCL Temperature", + 'units': + "K", + 'level_desc': + "surface", + 'var_desc': + ("LCL temperature from LCL ", "height (Magnus formulas and dry ", + "adiabatic lapse ratio)"), + 'statistic': + 'monthly mean' + }) + w_nc_fid.variables['tlcl'][:] = ztlcl + tabl_temp = wdir + '/tabl.nc' + removeif(tabl_temp) + with Dataset(tabl_temp, 'w', format='NETCDF4') as w_nc_fid: + w_nc_fid.description = ( + "Monthly mean BL top temperature for {} model. ".format(model), + "Calculated by Thermodynamics model diagnostics ", + "in ESMValTool. Author Valerio ", + "Lembo, Meteorologisches Institut, ", "Universitaet Hamburg.") + with Dataset(file_list[0]) as dataset_tabl: + fourc.extr_time(dataset_tabl, w_nc_fid) + fourc.extr_lat(dataset_tabl, w_nc_fid, 'lat') + fourc.extr_lon(dataset_tabl, w_nc_fid) + w_nc_var = w_nc_fid.createVariable('tabl', 'f8', + ('time', 'lat', 'lon')) + w_nc_var.setncatts({ + 'long_name': + "Temperature at BL top", + 'units': + "K", + 'level_desc': + "surface", + 'var_desc': + ("Temperature at the Boundary Layer ", + "top, from boundary layer thickness and ", "barometric equation"), + 'statistic': + 'monthly mean' + }) + w_nc_fid.variables['tabl'][:] = t_z + htop_temp = wdir + '/htop.nc' + removeif(htop_temp) + with Dataset(htop_temp, 'w', format='NETCDF4') as w_nc_fid: + w_nc_fid.description = ( + "Monthly mean height of the BL top for {} model. ".format(model), + "Calculated by Thermodynamics model diagnostics ", + "in ESMValTool. Author Valerio ", + "Lembo, Meteorologisches Institut, ", "Universitaet Hamburg.") + with Dataset(file_list[0]) as dataset_htop: + fourc.extr_time(dataset_htop, w_nc_fid) + fourc.extr_lat(dataset_htop, w_nc_fid, 'lat') + fourc.extr_lon(dataset_htop, w_nc_fid) + w_nc_var = w_nc_fid.createVariable('htop', 'f8', + ('time', 'lat', 'lon')) + w_nc_var.setncatts({ + 'long_name': + "Height at BL top", + 'units': + "m", + 'level_desc': + "surface", + 'var_desc': + ("Height at the Boundary Layer top, ", + "from boundary layer thickness and ", "barometric equation"), + 'statistic': + 'monthly mean' + }) + w_nc_fid.variables['htop'][:] = htop + tlcl_file = wdir + '/{}_tlcl.nc'.format(model) + cdo.setrtomiss('400,1e36', input=tlcl_temp, output=tlcl_file) + tabl_file = wdir + '/{}_tabl.nc'.format(model) + cdo.setrtomiss('400,1e36', input=tabl_temp, output=tabl_file) + htop_file = wdir + '/{}_htop.nc'.format(model) + cdo.setrtomiss('12000,1e36', input=htop_temp, output=htop_file) + return htop_file, tabl_file, tlcl_file diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/plot_script.py b/esmvaltool/diag_scripts/thermodyn_diagtool/plot_script.py new file mode 100644 index 0000000000..ee5da73278 --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/plot_script.py @@ -0,0 +1,1080 @@ +"""FUNCTIONS FOR PLOTS. + +Plotting module for Thermodyn_diagtool. + +The module provides plots for a single model of: +- climatological mean maps of TOA, atmospheric and surface energy budgets; +- annual mean time series of TOA, atmospheric and surface energy budgets anom.; +- climatological mean maps of latent energy and water mass budgets; +- annual mean time series of latent energy and water mass budget anom.; +- meridional section of meridional enthalpy transports; +- meridional section of meridional water mass transports; +- scatter plots of atmospheric vs. oceani peak magnitudes in the two hem.; +- climatological mean maps of every component of the entropy budget. + +@author: valerio.lembo@uni-hamburg.de, Valerio Lembo, Hamburg University, 2018. +""" +import math +import os +from shutil import move + +import cartopy.crs as ccrs +import matplotlib.pyplot as plt +import numpy as np +from cdo import Cdo +from matplotlib import rcParams +from netCDF4 import Dataset +from scipy import interpolate, stats + +from esmvaltool.diag_scripts.shared import ProvenanceLogger +from esmvaltool.diag_scripts.thermodyn_diagtool import ( + fourier_coefficients, + provenance_meta, +) + + +def balances(cfg, wdir, plotpath, filena, name, model): + """Plot everything related to energy and water mass budgets. + + This method provides climatological annal mean maps of TOA, atmospheric + and surface energy budgets, time series of annual mean anomalies in the + two hemispheres and meridional sections of meridional enthalpy + transports. Scatter plots of oceanic vs. atmospheric meridional + enthalpy transports are also provided. + + Arguments: + --------- + wdir: the working directory; + plotpath: the path where the plot has to be saved; + filena: the files containing input fields; + name: the name of the variable associated with the input field; + model: the name of the model to be analysed; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + cdo = Cdo() + provlog = ProvenanceLogger(cfg) + nsub = len(filena) + pdir = plotpath + plotentname = pdir + '/{}_heat_transp.png'.format(model) + plotwmbname = pdir + '/{}_wmb_transp.png'.format(model) + plotlatname = pdir + '/{}_latent_transp.png'.format(model) + + # timesery = np.zeros([nsub, 2]) + dims, ndims, tmean, zmean, timeser = global_averages(nsub, filena, name) + transp_mean = np.zeros([nsub, ndims[1]]) + lat_maxm = np.zeros([nsub, 2, len(dims[3])]) + tr_maxm = np.zeros([nsub, 2, len(dims[3])]) + lim = [55, 55, 25] + for i_f in np.arange(nsub): + transp = transport(zmean[i_f, :, :], timeser[i_f, :, 0], dims[1]) + transp_mean[i_f, :], list_peak = transports_preproc( + dims[1], ndims[3], lim[i_f], transp) + lat_maxm[i_f, :, :] = list_peak[0] + tr_maxm[i_f, :, :] = list_peak[1] + if nsub == 3: + ext_name = [ + 'TOA Energy Budget', 'Atmospheric Energy Budget', + 'Surface Energy Budget' + ] + transpty = (-6.75E15, 6.75E15) + coords = [dims[0], dims[1]] + plot_climap_eb(model, pdir, coords, tmean, ext_name) + fig = plt.figure() + strings = ['Meridional heat transports', 'Latitude [deg]', '[W]'] + lats = dims[1] + for i in np.arange(nsub): + filename = filena[i] + '.nc' + if name[i] == 'toab': + nameout = 'total' + elif name[i] == 'atmb': + nameout = 'atmos' + elif name[i] == 'surb': + nameout = 'ocean' + nc_f = wdir + '/{}_transp_mean_{}.nc'.format(nameout, model) + removeif(nc_f) + lat_model = 'lat_{}'.format(model) + pr_output(transp_mean[i, :], filename, nc_f, nameout, lat_model) + name_model = '{}_{}'.format(nameout, model) + cdo.chname('{},{}'.format(nameout, name_model), + input=nc_f, + output='aux.nc') + move('aux.nc', nc_f) + cdo.chname('lat,{}'.format(lat_model), input=nc_f, output='aux.nc') + move('aux.nc', nc_f) + attr = ['{} meridional enthalpy transports'.format(nameout), model] + provrec = provenance_meta.get_prov_transp(attr, filename) + provlog.log(nc_f, provrec) + # provlog.log(plotentname, provrec) + plot_1m_transp(lats, transp_mean[i, :], transpty, strings) + plt.grid() + plt.savefig(plotentname) + plt.close(fig) + plot_1m_scatter(model, pdir, lat_maxm, tr_maxm) + elif nsub == 2: + ext_name = ['Water mass budget', 'Latent heat budget'] + transpwy = (-2E9, 2E9) + transply = (-6E15, 6E15) + coords = [dims[0], dims[1]] + plot_climap_wm(model, pdir, coords, tmean, ext_name, name) + nc_f = wdir + '/{}_transp_mean_{}.nc'.format('wmb', model) + removeif(nc_f) + filena[0] = filena[0].split('.nc', 1)[0] + filename = filena[0] + '.nc' + pr_output(transp_mean[0, :], filename, nc_f, 'wmb', 'lat') + attr = ['water mass transport', model] + provrec = provenance_meta.get_prov_transp(attr, filename) + provlog.log(nc_f, provrec) + # provlog.log(plotwmbname, provrec) + nc_f = wdir + '/{}_transp_mean_{}.nc'.format('latent', model) + removeif(nc_f) + filena[1] = filena[1].split('.nc', 1)[0] + filename = filena[1] + '.nc' + pr_output(transp_mean[1, :], filename, nc_f, 'latent', 'lat') + attr = ['latent energy transport', model] + provrec = provenance_meta.get_prov_transp(attr, filename) + provlog.log(nc_f, provrec) + # provlog.log(plotlatname, provrec) + strings = ['Water mass transports', 'Latitude [deg]', '[kg*s-1]'] + fig = plt.figure() + plot_1m_transp(dims[1], transp_mean[0, :], transpwy, strings) + plt.grid() + plt.savefig(plotwmbname) + plt.close(fig) + strings = ['Latent heat transports', 'Latitude [deg]', '[W]'] + fig = plt.figure() + plot_1m_transp(dims[1], transp_mean[1, :], transply, strings) + plt.grid() + plt.savefig(plotlatname) + plt.close(fig) + for i_f in np.arange(nsub): + fig = plt.figure() + axi = plt.subplot(111) + axi.plot(dims[3], timeser[i_f, :, 0], 'k', label='Global') + axi.plot(dims[3], timeser[i_f, :, 1], 'r', label='SH') + axi.plot(dims[3], timeser[i_f, :, 2], 'b', label='NH') + plt.title('Annual mean {}'.format(ext_name[i_f])) + plt.xlabel('Years') + if ext_name[i_f] == 'Water mass budget': + plt.ylabel('[Kg m-2 s-1]') + else: + plt.ylabel('[W/m2]') + axi.legend(loc='upper center', + bbox_to_anchor=(0.5, -0.13), + shadow=True, + ncol=3) + plt.tight_layout() + plt.grid() + plt.savefig(pdir + '/{}_{}_timeser.png'.format(model, name[i_f])) + plt.close(fig) + + +def entropy(plotpath, filename, name, ext_name, model): + """Plot everything rleated to annual mean maps of mat. entr. prod. + + Arguments: + --------- + plotpath: the path where the plot has to be saved; + filename: the file containing input fields; + name: the name of the variable associated with the input field; + ext_name: the long name of the input field + model: the name of the model to be analysed; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + pdir = plotpath + if ext_name == 'Vertical entropy production': + rangec = [-0.01, 0.1] + c_m = 'YlOrBr' + elif ext_name == 'Horizontal entropy production': + rangec = [-0.5, 0.5] + c_m = 'bwr' + elif ext_name == 'Sensible Heat entropy production': + rangec = [-0.01, 0.01] + c_m = 'YlOrBr' + elif ext_name == 'Evaporation entropy production': + rangec = [0, 1] + c_m = 'YlOrBr' + elif ext_name == 'Rainfall entropy production': + rangec = [0, 1] + c_m = 'YlOrBr' + elif ext_name == 'Snowfall entropy production': + rangec = [0, 0.25] + c_m = 'YlOrBr' + elif ext_name == 'Snowmelt entropy production': + rangec = [0, 0.05] + c_m = 'YlOrBr' + elif ext_name == 'Potential energy entropy production': + rangec = [0, 0.1] + c_m = 'YlOrBr' + with Dataset(filename) as dataset: + var = dataset.variables[name][:, :, :] + lats = dataset.variables['lat'][:] + lons = dataset.variables['lon'][:] + tmean = np.nanmean(var, axis=0) + fig = plt.figure() + axi = plt.axes(projection=ccrs.PlateCarree()) + coords = [lons, lats] + title = '{} (mW m-2 K-1)'.format(ext_name) + plot_climap(axi, coords, tmean, title, rangec, c_m) + plt.savefig(pdir + '/{}_{}_climap.png'.format(model, name)) + plt.close(fig) + + +def global_averages(nsub, filena, name): + """Compute zonal mean, global mean, time mean averages. + + Arguments: + --------- + nsub: the number of variables for which averages must be computed; + filena: the name of the file containing the variable (without extension); + name: the names of the variables; + + Returns + ------- + Dimensions, number of dimensions, a global mean map, a Hovmoller plot, and + a time series of annual global mean values. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + sep = '.nc' + filena[0] = filena[0].split(sep, 1)[0] + filename = filena[0] + sep + with Dataset(filename) as dataset: + lats = dataset.variables['lat'][:] + lons = dataset.variables['lon'][:] + time = dataset.variables['time'][:] + nlats = len(lats) + nlons = len(lons) + ntime = len(time) + yr_0 = int(len(time) / 12) + timey = np.linspace(0, yr_0 - 1, num=yr_0) + dims = [lons, lats, time, timey] + ndims = [nlons, nlats, ntime, yr_0] + var = np.zeros([nsub, ntime, nlats, nlons]) + for i in np.arange(nsub): + filena[i] = filena[i].split(sep, 1)[0] + filename = filena[i] + '.nc' + with Dataset(filename) as dataset: + dataset = Dataset(filename) + var[i, :, :, :] = dataset.variables[name[i]][:, :, :] + var_r = np.reshape(var, + (nsub, int(np.shape(var)[1] / 12), 12, nlats, nlons)) + vary = np.nanmean(var_r, axis=2) + zmean = np.nanmean(vary, axis=3) + tmean = np.nanmean(vary, axis=1) + timeser = np.zeros([nsub, yr_0, 3]) + for i_f in np.arange(nsub): + zmean_w = latwgt(lats, zmean[i_f, :, :]) + gmean = np.nansum(zmean_w, axis=1) + shmean = hemean(0, lats, zmean[i_f, :, :]) + nhmean = hemean(1, lats, zmean[i_f, :, :]) + timeser[i_f, :, :] = np.column_stack((gmean, shmean, nhmean)) + return dims, ndims, tmean, zmean, timeser + + +def hemean(hem, lat, inp): + """Compute hemispheric averages. + + Arguments: + --------- + hem: a parameter for the choice of the hemisphere (1 stands for SH); + lat: latitude (in degrees); + inp: input field; + + Returns + ------- + A time series of annual hemispheric mean values. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + j_end = np.shape(inp)[1] + zmn = latwgt(lat, inp) + hmean = [] + if hem == 1: + if j_end % 2 == 0: + hmean = 2 * np.nansum(zmn[:, int(j_end / 2):j_end], axis=1) + else: + hmean = 2 * np.nansum(zmn[:, int((j_end + 1) / 2):j_end], axis=1) + else: + if j_end % 2 == 0: + hmean = 2 * np.nansum(zmn[:, 1:int(j_end / 2)], axis=1) + else: + hmean = 2 * np.nansum(zmn[:, 1:int((j_end - 1) / 2)], axis=1) + return hmean + + +def init_plotentr(model, pdir, flist): + """Define options for plotting maps of entropy production components. + + Arguments: + --------- + model: the name of the model; + path: the path to the plots directory; + flist: a list of files containing the components of the entropy production + with the direct method; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + entropy(pdir, flist[0], 'ssens', 'Sensible Heat entropy production', model) + entropy(pdir, flist[1], 'sevap', 'Evaporation entropy production', model) + entropy(pdir, flist[2], 'srain', 'Rainfall entropy production', model) + entropy(pdir, flist[3], 'ssnow', 'Snowfall entropy production', model) + entropy(pdir, flist[4], 'smelt', 'Snowmelt entropy production', model) + entropy(pdir, flist[5], 'spotp', 'Potential energy entropy production', + model) + + +def latwgt(lat, t_r): + """Compute weighted average over latitudes. + + Arguments: + --------- + lat: latitude (in degrees); + tr: the field to be averaged (time,lat); + + Returns + ------- + A latitudinal mean weighted by the cosine of latitudes. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + p_i = math.pi + conv = 2 * p_i / 360 + dlat = np.zeros(len(lat)) + for i in range(len(lat) - 1): + dlat[i] = abs(lat[i + 1] - lat[i]) + dlat[len(lat) - 1] = dlat[len(lat) - 2] + latr = conv * lat + dlatr = conv * dlat + tr2 = np.zeros((np.shape(t_r)[0], np.shape(t_r)[1])) + for j in range(len(lat)): + tr2[:, j] = t_r[:, j] * np.cos(latr[j]) * dlatr[j] / 2 + return tr2 + + +def lec_plot(model, pdir, lect): + """Define options for plotting maps of entropy production components. + + Arguments: + --------- + model: the name of the model; + path: the path to the plots directory; + lect: a time series of annual mean LEC intensities; + + @author: Valerio Lembo, Hamburg University, 2019. + """ + fig = plt.figure() + axi = plt.subplot(111) + time = np.linspace(0, len(lect), len(lect)) + axi.plot(time, lect, 'k') + plt.title('Annual mean LEC intensity for {}'.format(model)) + plt.xlabel('Years') + plt.ylabel('[W/m2]') + plt.tight_layout() + plt.grid() + plt.savefig(pdir + '/{}_lec_timeser.png'.format(model)) + plt.close(fig) + + +def plot_climap_eb(model, pdir, coords, tmean, ext_name): + """Plot climatological mean maps of TOA, atmospheric, oceanic energy budg. + + Arguments: + --------- + model: the name of the model; + pdir: a plots directory; + coords: the lon and lat coordinates; + tmean: the climatological mean (3,lat,lon) maps of the three budgets; + ext_name: the extended name of the budget, to be used for the title; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + rangect = [-100, 100] + fig = plt.figure(figsize=(12, 22)) + axi = plt.subplot(311, projection=ccrs.PlateCarree()) + title = '{} (W/m2)'.format(ext_name[0]) + plot_climap(axi, coords, tmean[0, :, :], title, rangect, 'bwr') + axi = plt.subplot(312, projection=ccrs.PlateCarree()) + title = '{} (W/m2)'.format(ext_name[1]) + plot_climap(axi, coords, tmean[1, :, :], title, rangect, 'bwr') + axi = plt.subplot(313, projection=ccrs.PlateCarree()) + title = '{} (W/m2)'.format(ext_name[2]) + plot_climap(axi, coords, tmean[2, :, :], title, rangect, 'bwr') + plt.savefig(pdir + '/{}_energy_climap.png'.format(model)) + plt.close(fig) + + +def plot_climap_wm(model, pdir, coords, tmean, ext_name, name): + """Plot climatological mean maps of water mass and latent energy budgets. + + Arguments: + --------- + model: the name of the model; + pdir: a plots directory; + coords: the lon and lat coordinates; + tmean: the climatological mean (3,lat,lon) maps of the three budgets; + ext_name: the extended name of the budget, to be used for the title; + name: the variable name, used for the file name of the figure; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + rangecw = [-1E-4, 1E-4] + rangecl = [-150, 150] + fig = plt.figure() + axi = plt.subplot(111, projection=ccrs.PlateCarree()) + title = '{} (Km m-2 s-1)'.format(ext_name[0]) + plot_climap(axi, coords, tmean[0, :, :], title, rangecw, 'bwr') + plt.savefig(pdir + '/{}_{}_climap.png'.format(model, name[0])) + plt.close(fig) + fig = plt.figure() + axi = plt.subplot(111, projection=ccrs.PlateCarree()) + title = '{} (W/m2)'.format(ext_name[1]) + plot_climap(axi, coords, tmean[1, :, :], title, rangecl, 'bwr') + plt.savefig(pdir + '/{}_{}_climap.png'.format(model, name[1])) + plt.close(fig) + + +def plot_climap(axi, coords, fld, title, rrange, c_m): + """Plot very colourful maps. + + Arguments: + --------- + axi: an axis identifier; + coords: the lon and lat coordinates; + fld: the field to be plotted; + title: the title to appear on the figure; + rrange: the range for the color bar; + c_m: a color map identifier; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + axi.coastlines() + lons = np.linspace(0, 360, len(coords[0])) - (coords[0][1] - coords[0][0]) + plt.contourf(lons, coords[1], fld, 60, transform=ccrs.PlateCarree()) + plt.pcolor(lons, + coords[1], + fld, + vmin=rrange[0], + vmax=rrange[1], + cmap=c_m, + antialiaseds='True') + plt.colorbar() + plt.title(title, fontsize=14) + + +def plot_ellipse(semimaj, semimin, phi, x_cent, y_cent, a_x): + """Plot ellipses in Python in a simple way. + + This method plots ellipses with matplotlib. + + Arguments: + --------- + semimaj: the length of the major axis; + semimin: the length of the minor axis; + phi: the tilting of the semimaj axis; + x_cent, y_cent: the coordinates of the ellipse centre; + a_x: an object containing the axis properties; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + theta = np.linspace(0, 2 * np.pi, 100) + r_r = 1 / np.sqrt((np.cos(theta))**2 + (np.sin(theta))**2) + x_x = r_r * np.cos(theta) + y_x = r_r * np.sin(theta) + data = np.array([x_x, y_x]) + s_ax = np.array([[semimaj, 0], [0, semimin]]) + r_angle = np.array([[np.cos(phi), -np.sin(phi)], + [np.sin(phi), np.cos(phi)]]) + t_t = np.dot(r_angle, s_ax) + data = np.dot(t_t, data) + data[0] += x_cent + data[1] += y_cent + a_x.plot(data[0], data[1], color='b', linestyle='-') + + +def plot_1m_scatter(model, pdir, lat_maxm, tr_maxm): + """Plot the scatter plots of atmospheric vs. oceanic peaks and locations. + + The function produces scatter plots for the atmospheric vs. oceanic peak + magnitudes in the NH (a) and SH (b), atmospheric vs. ocean peak locations + in the NH (c) and SH (d). + + Arguments: + --------- + model: the name of the model; + pdir: a plots directory; + lat_maxm: the positions of the peaks; + tr_maxm: the magnitudes of the peaks; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + fig = plt.figure() + fig.set_size_inches(12, 12) + axi = plt.subplot(221) + axi.set_figsize = (50, 50) + plt.scatter(tr_maxm[1, 0, :], tr_maxm[2, 0, :], c=(0, 0, 0), alpha=1) + plt.title('(a) Atm. vs ocean magnitude - SH', fontsize=13, y=1.02) + plt.xlabel('Atmos. trans. [W]', fontsize=11) + plt.ylabel('Oceanic trans. [W]', fontsize=11) + plt.grid() + axi = plt.subplot(222) + axi.set_figsize = (50, 50) + plt.scatter(tr_maxm[1, 1, :], tr_maxm[2, 1, :], c=(0, 0, 0), alpha=1) + plt.title('(b) Atm. vs ocean magnitude - NH', fontsize=13, y=1.02) + plt.xlabel('Atmos. trans. [W]', fontsize=11) + plt.ylabel('Oceanic trans. [W]', fontsize=11) + plt.grid() + axi = plt.subplot(223) + axi.set_figsize = (50, 50) + plt.scatter(lat_maxm[1, 0, :], lat_maxm[2, 0, :], c=(0, 0, 0), alpha=1) + plt.title('(c) Atm. vs ocean location - SH', fontsize=13, y=1.02) + plt.xlabel('Atmos. trans. position [degrees of latitude]', fontsize=11) + plt.ylabel('Oceanic trans. position [degrees of latitude]', fontsize=11) + plt.grid() + axi = plt.subplot(224) + axi.set_figsize = (50, 50) + plt.scatter(lat_maxm[1, 1, :], lat_maxm[2, 1, :], c=(0, 0, 0), alpha=1) + plt.title('(d) Atm. vs ocean location - NH', fontsize=13, y=1.02) + plt.xlabel('Atmos. trans. position [degrees of latitude]', fontsize=11) + plt.ylabel('Oceanic trans. position [degrees of latitude]', fontsize=11) + plt.grid() + plt.savefig(pdir + '/{}_scatpeak.png'.format(model)) + plt.close(fig) + + +def plot_1m_transp(lats, yval, ylim, strings): + """Plot a meridional section of enthalpy transport for one model. + + This function plots total, atmospheric and oceanic meridional enthalpy + transports on the same panel. + + Arguments: + --------- + lats: the latitudinal dimension as a 1D array; + yval: the meridional enthalpy transports as a 2D array (3,lat), where + row 1 is the total, row 2 the atmospheric, row 3 the oceanic + transport; + ylim: a range for the y-axis; + strings: a list of strings containing the title of the figure, the names + of the x and y axes; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + plt.subplot(111) + plt.plot(lats, yval) + plt.title(strings[0], fontsize=10) + plt.xlabel(strings[1], fontsize=10) + plt.ylabel(strings[2]) + plt.tight_layout() + plt.ylim(ylim) + plt.xlim(-90, 90) + + +def plot_mm_ebscatter(pdir, eb_list): + """Plot multi-model scatter plots of EB mean values vs. their variability. + + The function produces a plot containing 4 scatter plots: + - (a) TOA mean energy budget vs. its interannual variability; + - (b) Atmospheric mean energy budget vs. its interannual variability; + - (c) Surface mean energy budget vs. its interannual variability; + - (d) Atmospheric vs. surface energy budget with whiskers encompassing the + 1sigma uncertainty range; + + Arguments: + --------- + pdir: a plots directory; + eb_list: a list containing the TOA, atmospheri and surface energy budgets + as a 2D array (model, 2), with the first column being the mean + value and the second column being the inter-annual variance; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + toab_all = eb_list[0] + atmb_all = eb_list[1] + surb_all = eb_list[2] + fig = plt.figure() + fig.set_size_inches(12, 22) + axi = plt.subplot(221) + plt.ylim(bottom=0) + title = '(a) TOA energy budget' + xlabel = 'R_t [W m-2]' + ylabel = 'Sigma (R_t) [W m-2]' + varlist = [toab_all[:, 0], toab_all[:, 1]] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + axi = plt.subplot(222) + plt.ylim(bottom=0) + title = '(b) Atmospheric energy budget' + xlabel = 'F_a [W m-2]' + ylabel = 'Sigma (F_a) [W m-2]' + varlist = [atmb_all[:, 0], atmb_all[:, 1]] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + axi = plt.subplot(223) + plt.ylim(bottom=0) + title = '(b) Surface energy budget' + xlabel = 'F_s [W m-2]' + ylabel = 'Sigma (F_s) [W m-2]' + varlist = [surb_all[:, 0], surb_all[:, 1]] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + axi = plt.subplot(224) + axi.set_figsize = (50, 50) + plt.errorbar(x=atmb_all[:, 0], + y=surb_all[:, 0], + xerr=atmb_all[:, 1], + yerr=surb_all[:, 1], + fmt='none', + ecolor=(0, 0, 0)) + title = '(b) Atmospheric vs. Surface budget' + xlabel = 'F_a [W m-2]' + ylabel = 'F_s [W m-2]' + varlist = [atmb_all[:, 0], surb_all[:, 0]] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + plt.savefig(pdir + '/scatters_variability.png') + plt.close(fig) + + +def plot_mm_scatter(axi, varlist, title, xlabel, ylabel): + """Plot a multi-model scatter plot. + + The function produces a scatter plot of a multi-model ensemble, with an + ellipse encompassing the 1sigma uncertainty around the multi-model mean. + + Arguments: + --------- + axi: an axis identifier; + varlist: a list containing the array for the x and y values (they have to + be the same length); + title: a string containing the title of the plot; + xlabel: a string containing the x-axis label; + ylabel: a string containing the y-axis label; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + xval = varlist[0] + yval = varlist[1] + modnum = len(xval) + axi.set_figsize = (50, 50) + plt.scatter(xval, yval, c=(0, 0, 0), alpha=1) + plt.scatter(np.nanmean(xval), np.nanmean(yval), c='red') + s_l, _, _, _, _ = stats.linregress(xval, yval) + semimaj = np.max([np.nanstd(xval), np.nanstd(yval)]) + semimin = np.min([np.nanstd(xval), np.nanstd(yval)]) + plot_ellipse(semimaj, + semimin, + phi=np.arctan(s_l), + x_cent=np.nanmean(xval), + y_cent=np.nanmean(yval), + a_x=axi) + plt.title(title, fontsize=12) + rcParams['axes.titlepad'] = 1 + rcParams['axes.labelpad'] = 1 + plt.xlabel(xlabel, fontsize=14) + plt.ylabel(ylabel, fontsize=14) + d_x = 0.01 * (max(xval) - min(xval)) + d_y = 0.01 * (max(yval) - min(yval)) + for i_m in np.arange(modnum): + axi.annotate(str(i_m + 1), (xval[i_m], yval[i_m]), + xytext=(xval[i_m] + d_x, yval[i_m] + d_y), + fontsize=12) + axi.tick_params(axis='both', which='major', labelsize=12) + axi.margins(0.002) + plt.axis('auto') + plt.grid() + + +def plot_mm_scatter_spec(axi, varlist, title, xlabel, ylabel): + """Plot a multi-model scatter plot ("special version"). + + The function produces a scatter plot of a multi-model ensemble, with dashed + diagonal lines containing the sum of the x and y values, an ellipse + encompassing the 1sigma uncertainty around the multi-model mean. + + Arguments: + --------- + axi: an axis identifier; + varlist: a list containing the array for the x and y values (they have to + be the same length); + title: a string containing the title of the plot; + xlabel: a string containing the x-axis label; + ylabel: a string containing the y-axis label; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + xval = varlist[0] + yval = varlist[1] + xrang = abs(max(xval) - min(xval)) + yrang = abs(max(yval) - min(yval)) + x_x = np.linspace(min(xval) - 0.1 * xrang, max(xval) + 0.1 * xrang, 10) + y_y = np.linspace(min(yval) - 0.1 * yrang, max(yval) + 0.1 * yrang, 10) + x_m, y_m = np.meshgrid(x_x, y_y) + z_m = x_m + y_m + c_p = plt.contour(x_m, + y_m, + z_m, + colors='black', + linestyles='dashed', + linewidths=1.) + plt.clabel(c_p, inline=True, inline_spacing=-4, fontsize=8) + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + + +def plot_mm_summaryscat(pdir, summary_varlist): + """Plot multi-model scatter plots of some key quantities. + + The function produces a plot containing 6 scatter plots: + - (a) TOA vs. atmospheric energy budget; + - (b) Baroclinic efficiency vs. Intensity of LEC; + - (c) Vertical vs. horizontal component; + - (d) Indirect vs. direct method; + - (e) Indirect vs. emission temperature; + - (f) Baroclinic efficiency vs. emission temperature; + + Arguments: + --------- + pdir: a plots directory; + summary_varlist: a list containing the quantities to be plotted as a 1D + (model) array, or a 2D array (model, 2), with the first + column being the mean value and the second column being + the inter-annual variance; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + atmb_all = summary_varlist[0] + baroceff_all = summary_varlist[1] + horzentr_all = summary_varlist[2] + lec_all = summary_varlist[3] + matentr_all = summary_varlist[4] + te_all = summary_varlist[5] + toab_all = summary_varlist[6] + vertentr_all = summary_varlist[7] + indentr_all = horzentr_all[:, 0] + vertentr_all[:, 0] + fig = plt.figure() + fig.set_size_inches(14, 22) + axi = plt.subplot(321) + title = '(a) TOA vs. atmospheric energy budget' + xlabel = 'R_t [W m-2]' + ylabel = 'F_a [W m-2]' + varlist = [toab_all[:, 0], atmb_all[:, 0]] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + axi = plt.subplot(322) + title = '(b) Baroclinic efficiency vs. Intensity of LEC' + xlabel = r'$\eta$' + ylabel = 'W [W/m2]' + varlist = [baroceff_all, lec_all[:, 0]] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + axi = plt.subplot(323) + title = '(c) Vertical vs. horizontal component' + xlabel = r'S$_{hor}$ [W m-2 K-1]' + ylabel = r'S$_{ver}$ [W m-2 K-1]' + varlist = [horzentr_all[:, 0], vertentr_all[:, 0]] + plot_mm_scatter_spec(axi, varlist, title, xlabel, ylabel) + axi = plt.subplot(324) + title = '(d) Indirect vs. direct method' + xlabel = r'S$_{ind}$ [W m-2 K-1]' + ylabel = r'S$_{dir}$ [W m-2 K-1]' + varlist = [indentr_all, matentr_all[:, 0]] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + axi.set(xlim=(min(indentr_all) - 0.003, max(indentr_all) + 0.003), + ylim=(min(matentr_all[:, 0]) - 0.003, + max(matentr_all[:, 0]) + 0.003)) + axi = plt.subplot(325) + title = '(e) Indirect vs. emission temperature' + xlabel = r'T$_E$ [K]' + ylabel = r'S$_{mat}$ [W m-2 K-1]' + varlist = [te_all, indentr_all] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + axi = plt.subplot(326) + title = '(f) Baroclinic efficiency vs. emission temperature' + xlabel = r'T$_E$ [K]' + ylabel = r'$\eta$' + varlist = [te_all, baroceff_all] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + oname = pdir + '/scatters_summary.png' + plt.savefig(oname) + plt.subplots_adjust(hspace=.3) + + +def plot_mm_transp(model_names, wdir, pdir): + """Plot multi-model meridional enthalpy transports. + + The function plots in three panels the total, atmospheric and oceanic + enthalpy transports, respectively. + + Arguments: + --------- + model_names: a list of model names contained in the ensemble; + wdir: a working directory; + pdir: a plots directory; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + fig = plt.figure() + fig.set_size_inches(12, 22) + axi = plt.subplot(311) + yrange = [-6.75E15, 6.75E15] + plot_mm_transp_panel(model_names, wdir, axi, 'total', yrange) + axi = plt.subplot(312) + plot_mm_transp_panel(model_names, wdir, axi, 'atmos', yrange) + axi = plt.subplot(313) + yrange = [-3E15, 3E15] + plot_mm_transp_panel(model_names, wdir, axi, 'ocean', yrange) + oname = pdir + '/meridional_transp.png' + plt.savefig(oname) + plt.close(fig) + + +def plot_mm_transp_panel(model_names, wdir, axi, domn, yrange): + """Plot a meridional section of enthalpy transport from a model ensemble. + + Arguments: + --------- + model_names: a list of model names contained in the ensemble; + wdir: a working directory; + axis: the axis of the pllot; + domn: the domain (total, atmospheric or oceanic); + yrange: a range for the y-axis; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + axi.set_figsize = (50, 50) + for model in model_names: + tot_transp_file = (wdir + '/{}_transp_mean_{}.nc'.format(domn, model)) + name = '{}_{}'.format(domn, model) + with Dataset(tot_transp_file) as dataset: + toat = dataset.variables[name][:] + lats = dataset.variables['lat_{}'.format(model)][:] + plt.plot(np.array(lats), np.array(toat), color='black', linewidth=1.) + plt.title('(a) {} heat transports'.format(domn), fontsize=18) + plt.xlabel('Latitude [deg]', fontsize=14) + plt.ylabel('[W]', fontsize=14) + plt.tight_layout() + plt.ylim(yrange) + plt.xlim(-90, 90) + axi.tick_params(axis='both', which='major', labelsize=12) + plt.grid() + + +def pr_output(varout, filep, nc_f, nameout, latn): + """Print processed ta field to NetCDF file. + + Save fields to NetCDF, retrieving information from an existing + NetCDF file. Metadata are transferred from the existing file to the + new one. + + Arguments: + --------- + varout: the field to be stored, with shape (time,level,lat,lon); + filep: the existing dataset, from where the metadata are retrieved. + Coordinates time,level, lat and lon have to be the same dimension + as the fields to be saved to the new files; + nc_f: the name of the output file; + nameout: the name of the variable to be saved; + latn: the name of the latitude dimension; + + @author: Chris Slocum (2014), modified by Valerio Lembo (2018). + """ + fourc = fourier_coefficients + nc_fid = Dataset(filep, 'r') + w_nc_fid = Dataset(nc_f, 'w', format='NETCDF4') + w_nc_fid.description = ("Total, atmospheric and oceanic annual ", + "mean meridional heat transports") + fourc.extr_lat(nc_fid, w_nc_fid, latn) + w_nc_var = w_nc_fid.createVariable(nameout, 'f8', (latn)) + varatts(w_nc_var, nameout) + w_nc_fid.variables[nameout][:] = varout + w_nc_fid.close() + nc_fid.close() + + +def removeif(filename): + """Remove filename if it exists.""" + try: + os.remove(filename) + except OSError: + pass + + +def transport(zmean, gmean, lat): + """Integrate the energy/water mass budgets to obtain meridional transp. + + Arguments: + --------- + zmean: zonal mean input fields; + gmean: the global mean of the input fields; + lat: a latitudinal array (in degrees of latitude); + + Returns + ------- + A zonal mean meridional heat transport. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + p_i = math.pi + dlat = np.zeros(len(lat)) + for i in range(len(lat) - 1): + dlat[i] = abs(lat[i + 1] - lat[i]) + dlat[len(lat) - 1] = dlat[len(lat) - 2] + zmn_ub = np.zeros((np.shape(zmean)[0], np.shape(zmean)[1])) + for index, value in enumerate(gmean): + for j_l in range(np.shape(zmean)[1]): + zmn_ub[index, j_l] = zmean[index, j_l] - value + zmn_ub[np.isnan(zmn_ub)] = 0 + cumb = np.zeros((np.shape(zmean)[0], np.shape(zmean)[1])) + transp = np.zeros((np.shape(zmean)[0], np.shape(zmean)[1])) + for j_l in range(len(lat) - 1): + cumb[:, j_l] = (-2 * np.nansum( + latwgt(lat[j_l:len(lat)], zmn_ub[:, j_l:len(lat)]), axis=1)) + r_earth = 6.371 * 10**6 + transp = 2 * p_i * cumb * r_earth * r_earth + return [zmn_ub, transp] + + +def transp_max(lat, transp, lim): + """Obtain transport peak magnitude and location from interpolation. + + Arguments: + --------- + lat: a latitudinal array; + transp: the meridional transport a 1D array (lat); + lim: limits to constrain the peak search in (necessary for ocean transp.) + + Returns + ------- + The magnitude and location of the meridional heat transports in the + atmosphere, oceans and in total, for every year. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + deriv = np.gradient(transp) + x_c = zerocross1d(lat, deriv) + y_i = np.zeros(2) + xc_cut = np.zeros(2) + j_p = 0 + for value in x_c: + if abs(value) <= lim: + xc_cut[j_p] = value + y_i[j_p] = interpolate.interp1d(lat, transp, kind='cubic')(value) + j_p = j_p + 1 + if j_p == 2: + break + return [xc_cut, y_i] + + +def transports_preproc(lats, yrs, lim, transp): + """Compute the peaks magnitude and locations of a meridional transport. + + This function computes the peaks magnitudes and locations recursively at + each time through the function transp_max and stores them in a list. + + Arguments: + --------- + lats: a latitudinal array; + yrs: the number of years through which iterating; + lim: the range (-lim,lim) in which the function transp_max has to search + for the peaks; + transp: the array containing the transport; + + Returns + ------- + The zonal mean meridional heat transport averaged over time, a list + containing the magnitude and location of the peaks at every timestep. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + transpp = transp[1] + transp_mean = np.nanmean(transpp, axis=0) + yr_ext = [] + lat_maxm = np.zeros([2, yrs]) + tr_maxm = np.zeros([2, yrs]) + lat_max = list() + tr_max = list() + for t_t in np.arange(int(yrs)): + yr_ext = transp_max(lats, transpp[t_t, :], lim) + lat_max.append(yr_ext[0]) + tr_max.append(yr_ext[1]) + for t_t in np.arange(int(yrs)): + lat_maxm[:, t_t] = lat_max[t_t] + tr_maxm[:, t_t] = tr_max[t_t] + list_peak = [lat_maxm, tr_maxm] + return transp_mean, list_peak + + +def varatts(w_nc_var, varname): + """Add attributes to the variables, depending on name and time res. + + Arguments: + --------- + w_nc_var: a variable object; + varname: the name of the variable, among total, atmos, ocean, wmb, latent; + + @author: Valerio Lembo, Hamburg University, 2018. + """ + if varname == 'total': + w_nc_var.setncatts({ + 'long_name': "Total merid. heat transport", + 'units': "W", + 'level_desc': 'TOA' + }) + elif varname == 'atmos': + w_nc_var.setncatts({ + 'long_name': "Atmos. merid. heat transport", + 'units': "W", + 'level_desc': 'Vertically integrated' + }) + elif varname == 'ocean': + w_nc_var.setncatts({ + 'long_name': "Ocean. merid. heat transport", + 'units': "W", + 'level_desc': 'sfc' + }) + elif varname == 'wmb': + w_nc_var.setncatts({ + 'long_name': "Merid. water mass transport", + 'units': "Kg*s-1", + 'level_desc': 'sfc' + }) + elif varname == 'latent': + w_nc_var.setncatts({ + 'long_name': "Merid. latent heat transport", + 'units': "W", + 'level_desc': 'sfc' + }) + + +def zerocross1d(x_x, y_y): + """Find the zero crossing points in 1d data. + + Find the zero crossing events in a discrete data set. Linear interpolation + is used to determine the actual locations of the zero crossing between + two data points showing a change in sign. Data point which are zero + are counted in as zero crossings if a sign change occurs across them. + Note that the first and last data point will not be considered whether + or not they are zero. + + Arguments: + --------- + x_x, y_y : arrays. Ordinate and abscissa data values. + + Returns + ------- + The location at which the y_y crosses zero. + + Credits: + The PyA group (https://github.com/sczesla/PyAstronomy). + Modified by Valerio Lembo (valerio.lembo@uni-hamburg.de). + + License: + Copyright (c) 2011, PyA group. + """ + indi = np.where(y_y[1:] * y_y[0:-1] < 0.0)[0] + d_x = x_x[indi + 1] - x_x[indi] + d_y = y_y[indi + 1] - y_y[indi] + z_c = -y_y[indi] * (d_x / d_y) + x_x[indi] + z_i = np.where(y_y == 0.0)[0] + z_i = z_i[np.where((z_i > 0) & (z_i < x_x.size - 1))] + z_i = z_i[np.where(y_y[z_i - 1] * y_y[z_i + 1] < 0.0)] + zzindi = np.concatenate((indi, z_i)) + z_z = np.concatenate((z_c, x_x[z_i])) + sind = np.argsort(z_z) + z_z, zzindi = z_z[sind], zzindi[sind] + return z_z diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/provenance_meta.py b/esmvaltool/diag_scripts/thermodyn_diagtool/provenance_meta.py new file mode 100644 index 0000000000..b8cb6175a1 --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/provenance_meta.py @@ -0,0 +1,167 @@ +"""ATTRIBUTES FOR PROVENANCE TAGGING. + +Module containing functions to create the metadata for the output files. + +The module contains the following functions: +- get_prov_map: create a record of metadata for 2D outputs; +- get_prov_transp: create a record of metadata for 1D outputs (e.g. transports) +- meta_direntr: write metadata to a file containing one of the components of + the material entropy production with the direct method; +- meta_indentr: write metadata to a file containing one of the components of + the material entropy production with the indirect method; + +@author: Valerio Lembo, University of Hamburg, 2019. +""" + +import esmvaltool.diag_scripts.shared as e +from esmvaltool.diag_scripts.shared import ProvenanceLogger + + +def get_prov_map(attr, ancestor_files): + """Create a provenance record for the 2D diagnostic outputs.""" + caption = ( + "Thermodynamic Diagnostic Tool - Monthly mean {} (lat, lon) fields" + "for model {}.".format(attr[0], attr[1])) + + record = { + 'caption': caption, + 'statistics': ['mean'], + 'domains': ['global'], + 'plot_type': ['geo'], + 'authors': ['lembo_valerio'], + 'references': ['lembo16climdyn', 'lembo19gmdd', 'lucarini14revgeop'], + 'ancestors': ancestor_files, + } + return record + + +def get_prov_transp(attr, ancestor_file): + """Create a provenance record for the 1d meridional transports.""" + caption = ("Thermodynamic Diagnostic Tool - Annual mean zonally averaged" + " meridional {} transports" + " for model {}.".format(attr[0], attr[1])) + + record = { + 'caption': caption, + 'statistics': ['mean'], + 'domains': ['global'], + 'plot_type': ['sect'], + 'authors': ['lembo_valerio'], + 'references': ['lembo16climdyn', 'lembo19gmdd', 'lucarini14revgeop'], + 'ancestors': ancestor_file, + } + return record + + +def meta_direntr(cfg, model, input_data, flist): + """Write metadata to components of the direct entropy prod maps. + + Arguments:r + - model: the name of the model; + - inlist: the list of the input filenames; + - flist: the list of the entropy filenames; + + @author: Valerio Lembo, University of Hamburg, 2019. + """ + with ProvenanceLogger(cfg) as provlog: + hfls_file = e.select_metadata(input_data, + short_name='hfls', + dataset=model)[0]['filename'] + hfss_file = e.select_metadata(input_data, + short_name='hfss', + dataset=model)[0]['filename'] + hus_file = e.select_metadata(input_data, + short_name='hus', + dataset=model)[0]['filename'] + pr_file = e.select_metadata(input_data, short_name='pr', + dataset=model)[0]['filename'] + prsn_file = e.select_metadata(input_data, + short_name='prsn', + dataset=model)[0]['filename'] + ps_file = e.select_metadata(input_data, short_name='ps', + dataset=model)[0]['filename'] + rlut_file = e.select_metadata(input_data, + short_name='rlut', + dataset=model)[0]['filename'] + ts_file = e.select_metadata(input_data, short_name='ts', + dataset=model)[0]['filename'] + uas_file = e.select_metadata(input_data, + short_name='uas', + dataset=model)[0]['filename'] + vas_file = e.select_metadata(input_data, + short_name='vas', + dataset=model)[0]['filename'] + attr = ['sensible heat entropy production', model] + ancestor = [ + hfss_file, hus_file, ps_file, rlut_file, uas_file, vas_file, + ts_file + ] + record = get_prov_map(attr, ancestor) + provlog.log(flist[0], record) + attr = ['evaporation entropy production', model] + ancestor = [hfls_file, ts_file] + record = get_prov_map(attr, ancestor) + provlog.log(flist[1], record) + attr = ['rainfall precipitation entropy production', model] + ancestor = [hus_file, pr_file, prsn_file, ps_file, ts_file] + record = get_prov_map(attr, ancestor) + provlog.log(flist[2], record) + attr = ['snowfall precipitation entropy production', model] + ancestor = [hus_file, prsn_file, ps_file, ts_file] + record = get_prov_map(attr, ancestor) + provlog.log(flist[3], record) + attr = ['snow melt entropy production', model] + ancestor = [prsn_file, ts_file] + record = get_prov_map(attr, ancestor) + provlog.log(flist[4], record) + attr = ['potential energy entropy production', model] + ancestor = [hus_file, pr_file, prsn_file, ps_file, rlut_file, ts_file] + record = get_prov_map(attr, ancestor) + provlog.log(flist[5], record) + + +def meta_indentr(cfg, model, input_data, flist): + """Write metadata to components of the indirect entropy prod maps. + + Arguments: + --------- + - model: the name of the model; + - inlist: the list of the input filenames; + - flist: the list of the entropy filenames; + + @author: Valerio Lembo, University of Hamburg, 2019. + """ + with ProvenanceLogger(cfg) as provlog: + rlds_file = e.select_metadata(input_data, + short_name='rlds', + dataset=model)[0]['filename'] + rlus_file = e.select_metadata(input_data, + short_name='rlus', + dataset=model)[0]['filename'] + rlut_file = e.select_metadata(input_data, + short_name='rlut', + dataset=model)[0]['filename'] + rsds_file = e.select_metadata(input_data, + short_name='rsds', + dataset=model)[0]['filename'] + rsdt_file = e.select_metadata(input_data, + short_name='rsdt', + dataset=model)[0]['filename'] + rsus_file = e.select_metadata(input_data, + short_name='rsus', + dataset=model)[0]['filename'] + rsut_file = e.select_metadata(input_data, + short_name='rsut', + dataset=model)[0]['filename'] + ts_file = e.select_metadata(input_data, short_name='ts', + dataset=model)[0]['filename'] + attr = ['horizontal entropy production', model] + ancestor = [rlut_file, rsdt_file, rsut_file] + record = get_prov_map(attr, ancestor) + provlog.log(flist[0], record) + attr = ['vertical entropy production', model] + ancestor = [ + rlds_file, rlus_file, rlut_file, rsds_file, rsus_file, ts_file + ] + record = get_prov_map(attr, ancestor) + provlog.log(flist[1], record) diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/thermodyn_diagnostics.py b/esmvaltool/diag_scripts/thermodyn_diagtool/thermodyn_diagnostics.py new file mode 100644 index 0000000000..2d1d03817b --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/thermodyn_diagnostics.py @@ -0,0 +1,580 @@ +r"""MAIN PROGRAM. + +TheDiaTo - The diagnostic tool for climate system thermodynamics. + +Author +Valerio Lembo +(Meteorological Institute, Hamburg University - valerio.lembo@uni-hamburg.de) + +Contributors +Frank Lunkeit +(Meteorological Insitute, Hamburg University - f.lunkeit@uni-hamburg.de) +Nikolay Koldunov +(MARUM/AWI, nikolay.koldunov@awi.de, Germany) + +Project +CRC - TRR 181 "Energy transfers in Atmosphere and Ocean" + +############################################################################# + +SOFTWARE DESCRIPTION + +The tool consists of three modules; one for the +computation of energy budgets and transports, one for the water mass budgets +(and related meridional transports), one for the Lorenz Energy Cycle (LEC), +one for the material entropy production. + +The first module is run by default, the others are optional. If the lsm option +is set to true, the module 1 and the module 2 will be run with additional +separate results over land and oceans. The land-sea mask is provided by the +ESMValTool preprocessor. + +- MODULE 1 (default) +Earth's energy budgets from radiative and heat fluxes at Top-of-Atmosphere, +at the surface and in the atmosphere (as a residual). +Meridional transports, magnitude and location of the peaks in each +hemisphere (only for heat transports) are also computed. +The baroclinic efficiency is computed from TOA energy budgets, emission +temperature (in turn retrieved from OLR) and near-surface temperature. + +- MODULE 2 (optional) +Water mass and latent energy budgets and meridional transports are computed +from latent heat fluxes, snowfall and rainfall precipitation fluxes. Magnitude +and location of the peaks in each hemisphere (only for heat transports) are +also computed, as for module 1. + +- MODULE 3 (optional) +The Lorenz Energy Cycle (LEC) is computed in spectral components from near- +surface temperatures, temperatures and the three components of velocities +over pressure levels. +The storage and conversion terms are directly computed, the sources and +sinks are retrieved as residuals. +Components are grouped into a zonal mean, stationary and transient eddy +part. + +- MODULE 4 (optional) +The material entropy production is computed using the indirect method, the +direct method or both (following Lucarini et al., 2014). +For the indirect method a vertical and a horizontal component are provided. +For the direct method, all components are combined, related to the +hydrological cycle (attributable to evaporation, rainfall and snowfall +precipitation, phase changes and potential energy of the droplet), to the +sensible heat fluxes and to kinetic energy dissipation. For the latter the +LEC computation is required, given that the strength of the LEC can be +considered as equal to the kinetic energy dissipated to heating. If the option +for module 3 is set to false, a reference value for the material entropy +production related to the kinetic energy dissipation is provided. + +PREREQUISITES + +The program shares the same prerequisites with the overall ESMValTool +architecture +(see https://docs.esmvaltool.org/en/latest/quickstart/installation.html) + +USAGE + +1: Obtain the datasets: the program accepts the following variables as + input for the computations: + Monthly mean resolution or higher (default usage): + - TOA shortwave radiation downwards; + - TOA shortwave radiation upwards; + - TOA longwave radiation upwards (OLR); + - Surface shortwave radiation downwards; + - Surface shortwave radiation upwards; + - Surface longwave radiation downwards; + - Surface longwave radiation upwards; + - Surface turbulent latent heat fluxes; + - Surface turbulent sensible heat fluxes; + Monthly mean resolution or higher (for 'wat' set to 'true' and/or for + 'entr' set to 'true' with 'met' set to '2' or '3'): + - Precipitation flux; + - Snowfall flux; + Monthly mean resolution or higher (for 'entr' set to 'true' and 'met' set + to '2' or '3'): + - Surface air pressure; + - Surface temperature; + - Near-surface (or 10m) zonal velocity; + - Near-surface (or 10m) meridional velocity; + - Specific humidity (on pressure levels); + Daily mean resolution or higher (for 'lec' set to 'true'): + - Near-surface temperature; + - Near-surface (or 10m) zonal velocity; + - Near-surface (or 10m) meridional velocity; + - Air temperature (on pressure levels); + - Horizontal velocity (on pressure levels); + - Meridional velocity (on pressure levels); + - Vertical velocity (on pressure levels); + Fixed dataset (for 'lec' set to 'true'): + - Land-sea mask (binary or percentage); + Data on lonlat grid are accepted, with CMOR-compliant coordinate system. + The pre-processing modules of ESMValTool scheme will take care of + converting known grids and recognized datasets to CMOR standards. For a + a list of known formats, see + https://docs.esmvaltool.org/en/latest/input.html#observations + +2: A configuration template is available in the ESMValTool release. Set your + own paths to local directories here. Input datasets are read in MODELPATH, + MODELPATH2, OBSPATH or OBSPATH2 output datasets are stored in WORKPATH, + plots in PLOTPATH (refer to the manual for ESMValTool). + +3: Go to the recipe file in ~/recipes/recipe_thermodyn_diagtool.yml. + Set the namelist with the datasets that you neeed, following the ESMValTool + naming convention. Here you can also set the length of the dataset you want + to subset. + In the 'scripts' section, set the options with the modules that you want the + program to use: + - wat: if set to true, the program will compute the water mass and + latent energy budget, + - lec: if set to true, the program will compute the Lorenz Energy Cycle + (LEC) averaged on each year; + - entr: if set to true, the program will compute the material entropy + production (MEP); + - met: if set to 1, the program will compute the MEP with the indirect + method, if set to 2 with the direct method, if set to 3, both + methods will be computed and compared with each other; + In the 'variables' subsection of the 'diagnostics' section, you have to + comment the fields that are not needed depending on the options set in + the 'scripts' section. Energy budget and transport computations are + performed by default, and required fields have to be provided in any + case. + +4: Run the tool by typing: + esmvaltool run esmvaltool/recipes/recipe_thermodyn_diagtool.yml + +OUTPUT + +The output directory contains the following NetCDF files: + - (output directory): + atmos_transp_mean_.nc + latent_transp_mean_.nc + ocean_transp_mean_.nc + total_transp_mean_.nc + wmb_transp_mean_.nc + + contain annual mean meridional sections of heat transports in the + atmosphere, oceans, and as a total; latent energy transports and water + mass transports; + + - (output directory)/: + _atmb.nc + (_latent.nc; if wat is set to true) + _surb.nc + _toab.nc + (_wmb.nc; is wat is set to true) + + contain annual mean 2D fields of energy budget, latent heat and water + mass budgets; + + _barocEff.nc + + contains the evolution of annual mean baroclinic efficiency + (Lucarini et al., 2011). + + (if entr is set to true): + _evap_entr.nc (if met is set to 2 or 3) + _horizEntropy.nc (if met is set to 1 or 3) + _pot_drop_entr.nc (if met is set to 2 or 3) + _rain_entr.nc (if met is set to 2 or 3) + _sens_entr.nc (if met is set to 2 or 3) + _snow_entr.nc (if met is set to 2 or 3) + _snowmelt_entr.nc (if met is set to 2 or 3) + _verticalEntropy.nc (if met is set to 1 or 3) + contain the evolution of annual mean components of the material entropy + production. + + - (plots directory): + meridional_transp.png: contains the model inter-comparison of total, + atmospheric and oceanic of meridional sections in zonally averaged + meridional heat transports; + scatters_summary.png: contains the scatter plots of + model intercomparisons of various metrics retrieved in the program; + scattes_variability: contains scatter plots of model intercomparisons + between TOA, atmospheric and surface global mean energy budgets and + their inter-annual variability; + + - (plots directory)/: + _atmb_timeser.png: the atmospheric budget annual mean + global and hemispheric time series; + _energy_climap.png: the TOA, atmospheric and surface + climatological mean fields; + _heat_transp.png: the meridional sections of total, + atmospheric and oceanic meridional heat transports (implied from energy + budgets); + _latent_climap.png: the climatological mean latent heat + field; + _latent_timeser.png: the latent heat annual mean global and + hemispheric evolutions; + _latent_transp.png: the meridional section of annual mean + meridional latent heat transport; + _lec_timeser.png: the time series of annual mean LEC + intensity; + _scatpeak.png: the scatter plots of atmospheric vs. oceanic + peak magnitude in both hemispheres; + _sevap_climap.png: the annual mean field of material + entropy production due to evaporation; + _smelt_climap.png: the annual mean field of material + entropy production due to snow melting; + _spotp_climap.png: the annual mean field of material + entropy production due to potential energy of the droplet; + _srain_climap.png: the annual mean field of material + entropy production due to rainfall precipitation; + _ssens_climap.png: the annual mean field of material + entropy production due to sensible heat fluxes; + _ssnow_climap.png: the annual mean field of material + entropy production due to snowfall precipitation; + _surb_timeser.png: the surface budget annual mean + global and hemispheric time series; + _sver_climap.png: the annual mean field of vertical + material entropy production through the indirect method; + _toab_timeser.png: the TOA budget annual mean + global and hemispheric time series; + _wmb_climap.png: the climatological mean water mass budget + field; + _wmb_timeser.png: the water mass annual mean global and + hemispheric evolutions; + _wmb_transp.png: the meridional section of annual mean + meridional water mass transport; + + - (plots directory)//LEC_results: + __lec_diagram.png: the flux diagram for the annual + mean LEC cycle in a specific year; + __lec_table.txt: the table containing the storage and + conversion terms for the annual mean LEC cycle in a specific year; + +The file log.txt in the '$WORK_PATH/recipe_thermodyn_diagtool_date_hour/run' +sub-directory contains the values for the metrics and all useful information +for immediate model intercomparison. + + +############################################################################# + +20170803-lembo_valerio: modified header with description and caveats +20170629-koldunov_nikolay: atmospheric budgets diagnostics written +20180524-lembo_valerio: first complete working thermodynamics diagnostics +20190325-lembo_valerio: complete updated version for ESMValTool v2.0b +20191030-lembo_valerio: updated ingestion of input fields and several minor + fixings +20191113-lembo_valerio: updated documentation +20210211-lembo_valerio: updated documentation +############################################################################# +""" + +# New packages for version 2.0 of ESMValTool +import logging +import os +import warnings + +import numpy as np + +import esmvaltool.diag_scripts.shared as e +from esmvaltool.diag_scripts.shared import ProvenanceLogger +from esmvaltool.diag_scripts.thermodyn_diagtool import (computations, + lorenz_cycle, mkthe, + plot_script, + provenance_meta) + +warnings.filterwarnings("ignore", message="numpy.dtype size changed") +logger = logging.getLogger(os.path.basename(__file__)) + + +def main(cfg): + """Execute the program. + + Arguments: + --------- + cfg: metadata containing directory paths, preprocessed input dataset + filenames and user-defined options, passed by ESMValTool preprocessor. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + provlog = ProvenanceLogger(cfg) + lorenz = lorenz_cycle + comp = computations + logger.info('Entering the diagnostic tool') + # Load paths + wdir_up = cfg['work_dir'] + pdir_up = cfg['plot_dir'] + input_data = cfg['input_data'].values() + logger.info('Work directory: %s \n', wdir_up) + logger.info('Plot directory: %s \n', pdir_up) + plotsmod = plot_script + data = e.Datasets(cfg) + logger.debug(data) + models = data.get_info_list('dataset') + model_names = list(set(models)) + model_names.sort() + logger.info(model_names) + varnames = data.get_info_list('short_name') + curr_vars = list(set(varnames)) + logger.debug(curr_vars) + # load user-defined options + lsm = str(cfg['lsm']) + wat = str(cfg['wat']) + lec = str(cfg['lec']) + entr = str(cfg['entr']) + met = str(cfg['met']) + flags = [wat, lec, entr, met] + # Initialize multi-model arrays + modnum = len(model_names) + te_all = np.zeros(modnum) + toab_all = np.zeros([modnum, 2]) + toab_oc_all = np.zeros(modnum) + toab_la_all = np.zeros(modnum) + atmb_all = np.zeros([modnum, 2]) + atmb_oc_all = np.zeros(modnum) + atmb_la_all = np.zeros(modnum) + surb_all = np.zeros([modnum, 2]) + surb_oc_all = np.zeros(modnum) + surb_la_all = np.zeros(modnum) + wmb_all = np.zeros([modnum, 2]) + wmb_oc_all = np.zeros(modnum) + wmb_la_all = np.zeros(modnum) + latent_all = np.zeros([modnum, 2]) + latent_oc_all = np.zeros(modnum) + latent_la_all = np.zeros(modnum) + baroc_eff_all = np.zeros(modnum) + lec_all = np.zeros([modnum, 2]) + horzentr_all = np.zeros([modnum, 2]) + vertentr_all = np.zeros([modnum, 2]) + matentr_all = np.zeros([modnum, 2]) + irrevers_all = np.zeros(modnum) + diffentr_all = np.zeros([modnum, 2]) + logger.info("Entering main loop\n") + i_m = 0 + for model in model_names: + # Load paths to individual models output and plotting directories + wdir = os.path.join(wdir_up, model) + pdir = os.path.join(pdir_up, model) + os.makedirs(wdir) + os.makedirs(pdir) + aux_file = wdir + '/aux.nc' + te_ymm_file, te_gmean_constant, te_file = mkthe.init_mkthe_te( + model, wdir, input_data) + te_all[i_m] = te_gmean_constant + logger.info('Computing energy budgets\n') + in_list, eb_gmean, eb_file, toab_ymm_file = comp.budgets( + model, wdir, aux_file, input_data) + prov_rec = provenance_meta.get_prov_map( + ['TOA energy budgets', model], + [in_list[4], in_list[6], in_list[7]]) + provlog.log(eb_file[0], prov_rec) + prov_rec = provenance_meta.get_prov_map( + ['atmospheric energy budgets', model], [ + in_list[0], in_list[1], in_list[2], in_list[3], in_list[4], + in_list[5], in_list[6], in_list[7], in_list[8] + ]) + provlog.log(eb_file[1], prov_rec) + prov_rec = provenance_meta.get_prov_map( + ['surface energy budgets', model], [ + in_list[0], in_list[1], in_list[2], in_list[3], in_list[5], + in_list[7] + ]) + provlog.log(eb_file[2], prov_rec) + toab_all[i_m, 0] = np.nanmean(eb_gmean[0]) + toab_all[i_m, 1] = np.nanstd(eb_gmean[0]) + atmb_all[i_m, 0] = np.nanmean(eb_gmean[1]) + atmb_all[i_m, 1] = np.nanstd(eb_gmean[1]) + surb_all[i_m, 0] = np.nanmean(eb_gmean[2]) + surb_all[i_m, 1] = np.nanstd(eb_gmean[2]) + logger.info('Global mean emission temperature: %s\n', + te_gmean_constant) + logger.info('TOA energy budget: %s\n', toab_all[i_m, 0]) + logger.info('Atmospheric energy budget: %s\n', atmb_all[i_m, 0]) + logger.info('Surface energy budget: %s\n', surb_all[i_m, 0]) + logger.info('Done\n') + baroc_eff_all[i_m] = comp.baroceff(model, wdir, aux_file, + toab_ymm_file, te_ymm_file) + logger.info('Baroclinic efficiency (Lucarini et al., 2011): %s\n', + baroc_eff_all[i_m]) + logger.info('Running the plotting module for the budgets\n') + plotsmod.balances(cfg, wdir_up, pdir, + [eb_file[0], eb_file[1], eb_file[2]], + ['toab', 'atmb', 'surb'], model) + logger.info('Done\n') + # Water mass budget + if wat == 'True': + (wm_file, wmb_all[i_m, 0], wmb_all[i_m, 1], latent_all[i_m, 0], + latent_all[i_m, 1]) = compute_water_mass_budget( + cfg, wdir_up, pdir, model, wdir, input_data, flags, aux_file) + if lsm == 'True': + sftlf_files = e.select_metadata(input_data, short_name='sftlf', + dataset=model) + if model in ['HadGEM3-GC31-LL', 'CNRM-ESM2-1']: + sftlf_files = e.select_metadata(sftlf_files, + variable_group='sftlf_piC') + else: + sftlf_files = e.select_metadata(sftlf_files, + variable_group='sftlf_other') + sftlf_fx = sftlf_files[0]['filename'] + logger.info('Computing energy budgets over land and oceans\n') + toab_oc_all[i_m], toab_la_all[i_m] = compute_land_ocean( + model, wdir, eb_file[0], sftlf_fx, 'toab') + atmb_oc_all[i_m], atmb_la_all[i_m] = compute_land_ocean( + model, wdir, eb_file[1], sftlf_fx, 'atmb') + surb_oc_all[i_m], surb_la_all[i_m] = compute_land_ocean( + model, wdir, eb_file[2], sftlf_fx, 'surb') + if wat == 'True': + logger.info('Computing water mass and latent energy' + ' budgets over land and oceans\n') + wmb_oc_all[i_m], wmb_la_all[i_m] = compute_land_ocean( + model, wdir, wm_file[0], sftlf_fx, 'wmb') + latent_oc_all[i_m], latent_la_all[i_m] = compute_land_ocean( + model, wdir, wm_file[1], sftlf_fx, 'latent') + logger.info('Done\n') + if lec == 'True': + logger.info('Computation of the Lorenz Energy ' + 'Cycle (year by year)\n') + _, _ = mkthe.init_mkthe_lec(model, wdir, input_data) + lect = lorenz.preproc_lec(model, wdir, pdir, input_data) + plotsmod.lec_plot(model, pdir, lect) + lec_all[i_m, 0] = np.nanmean(lect) + lec_all[i_m, 1] = np.nanstd(lect) + logger.info( + 'Intensity of the annual mean Lorenz Energy ' + 'Cycle: %s\n', lec_all[i_m, 0]) + logger.info('Done\n') + else: + lect = np.repeat(2.0, len(eb_gmean[0])) + lec_all[i_m, 0] = 2.0 + lec_all[i_m, 1] = 0.2 + if entr == 'True': + if met in {'1', '3'}: + logger.info('Computation of the material entropy production ' + 'with the indirect method\n') + indentr_list = [te_file, eb_file[0]] + horz_mn, vert_mn, horzentr_file, vertentr_file = comp.indentr( + model, wdir, indentr_list, input_data, aux_file, + eb_gmean[0]) + listind = [horzentr_file, vertentr_file] + provenance_meta.meta_indentr(cfg, model, input_data, listind) + horzentr_all[i_m, 0] = np.nanmean(horz_mn) + horzentr_all[i_m, 1] = np.nanstd(horz_mn) + vertentr_all[i_m, 0] = np.nanmean(vert_mn) + vertentr_all[i_m, 1] = np.nanstd(vert_mn) + logger.info( + 'Horizontal component of the material entropy ' + 'production: %s\n', horzentr_all[i_m, 0]) + logger.info( + 'Vertical component of the material entropy ' + 'production: %s\n', vertentr_all[i_m, 0]) + logger.info('Done\n') + logger.info('Running the plotting module for the material ' + 'entropy production (indirect method)\n') + plotsmod.entropy(pdir, vertentr_file, 'sver', + 'Vertical entropy production', model) + logger.info('Done\n') + if met in {'2', '3'}: + matentr, irrevers, entr_list = comp.direntr( + logger, model, wdir, input_data, aux_file, te_file, lect, + flags) + provenance_meta.meta_direntr(cfg, model, input_data, entr_list) + matentr_all[i_m, 0] = matentr + if met in {'3'}: + diffentr = (float(np.nanmean(vert_mn)) + + float(np.nanmean(horz_mn)) - matentr) + logger.info('Difference between the two ' + 'methods: %s\n', diffentr) + diffentr_all[i_m, 0] = diffentr + logger.info('Degree of irreversibility of the ' + 'system: %s\n', irrevers) + irrevers_all[i_m] = irrevers + logger.info('Running the plotting module for the material ' + 'entropy production (direct method)\n') + plotsmod.init_plotentr(model, pdir, entr_list) + logger.info('Done\n') + os.remove(te_file) + os.remove(te_ymm_file) + logger.info('Done for model: %s \n', model) + i_m = i_m + 1 + logger.info('I will now start multi-model plots') + logger.info('Meridional heat transports\n') + plotsmod.plot_mm_transp(model_names, wdir_up, pdir_up) + logger.info('Scatter plots') + summary_varlist = [ + atmb_all, baroc_eff_all, horzentr_all, lec_all, matentr_all, te_all, + toab_all, vertentr_all + ] + plotsmod.plot_mm_summaryscat(pdir_up, summary_varlist) + logger.info('Scatter plots for inter-annual variability of' + ' some quantities') + eb_list = [toab_all, atmb_all, surb_all] + plotsmod.plot_mm_ebscatter(pdir_up, eb_list) + logger.info("The diagnostic has finished. Now closing...\n") + + +def compute_water_mass_budget(cfg, wdir_up, pdir, model, wdir, input_data, + flags, aux_file): + """Initialise computations of the water mass and latent heat budget. + + This function calls the functions for the retrieveal of water mass and + latent energy budgets. + + Arguments: + --------- + cfg: a lot of metadata to handle input files; + wdir_up: the work directory; + pdir: the directory for the plots; + model: the name of the model; + wdir: the work directory of the specific model; + input_data: the names of the variables found in the input directory; + flags: a list with user options; + aux_file: the name of an auxiliary file; + + Returns + ------- + Time mean and standard deviations of the water mass and latent + heat budgets. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + logger.info('Computing water mass and latent energy budgets\n') + aux_list = mkthe.init_mkthe_wat(model, wdir, input_data, flags) + wm_gmean, wm_file = computations.wmbudg(model, wdir, aux_file, input_data, + aux_list) + wm_time_mean = np.nanmean(wm_gmean[0]) + wm_time_std = np.nanstd(wm_gmean[0]) + logger.info('Water mass budget: %s\n', wm_time_mean) + latent_time_mean = np.nanmean(wm_gmean[1]) + latent_time_std = np.nanstd(wm_gmean[1]) + logger.info('Latent energy budget: %s\n', latent_time_mean) + logger.info('Done\n') + logger.info('Plotting the water mass and latent energy budgets\n') + plot_script.balances(cfg, wdir_up, pdir, [wm_file[0], wm_file[1]], + ['wmb', 'latent'], model) + logger.info('Done\n') + for filen in aux_list: + os.remove(filen) + return (wm_file, wm_time_mean, wm_time_std, latent_time_mean, + latent_time_std) + + +def compute_land_ocean(model, wdir, filein, sftlf_fx, name): + """Initialise computations of the budgets over land and ocean. + + This function calls the function for the average of budgets over land and + ocean. + + Arguments: + --------- + model: the name of the model; + wdir: the work directory of the specific model; + filein: a file containing the budget to be averaged over land and ocean; + sftlf_fx: a file containing the model-specific land-sea mask; + name: the name of the budget to be averaged; + + Returns + ------- + Time means of the budgets over land and ocean. + + @author: Valerio Lembo, Hamburg University, 2018. + """ + ocean_mean, land_mean = computations.landoc_budg(model, wdir, filein, + sftlf_fx, name) + logger.info('%s budget over oceans: %s\n', name, ocean_mean) + logger.info('%s budget over land: %s\n', name, land_mean) + return (ocean_mean, land_mean) + + +if __name__ == '__main__': + with e.run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/validation.py b/esmvaltool/diag_scripts/validation.py index aad9f53742..52c8d1a561 100644 --- a/esmvaltool/diag_scripts/validation.py +++ b/esmvaltool/diag_scripts/validation.py @@ -1,11 +1,8 @@ -""" -Validation Diagnostic - -This diagnostic uses two datasets (control and experiment), -applies operations on their data, and plots one against the other. -It can optionally use a number of OBS, OBS4MIPS datasets. +"""Validation Diagnostic. -This diagnostic uses CMIP5 data; to switch to CMIP6 change _CMIP_TYPE +This diagnostic uses two datasets (control and experiment), applies +operations on their data, and plots one against the other. It can +optionally use a number of OBS, OBS4MIPS datasets. """ import logging @@ -13,37 +10,89 @@ import iris import iris.analysis.maths as imath +import iris.coord_categorisation import iris.quickplot as qplt import matplotlib.pyplot as plt import numpy as np +from esmvalcore.preprocessor import extract_region, extract_season -from esmvaltool.diag_scripts.shared import (apply_supermeans, - get_control_exper_obs, - group_metadata, run_diagnostic) -from esmvaltool.preprocessor import extract_region, extract_season +from esmvaltool.diag_scripts.shared import ( + apply_supermeans, + get_control_exper_obs, + group_metadata, + run_diagnostic, +) +from esmvaltool.diag_scripts.shared._base import ProvenanceLogger logger = logging.getLogger(os.path.basename(__file__)) -_CMIP_TYPE = 'CMIP5' - - -def plot_contour(cube, plt_title, file_name): - """Plot a contour with iris.quickplot (qplot)""" +def _get_provenance_record(cfg, plot_file, caption, loc): + """Create a provenance record describing the diagnostic data and plot.""" + ancestor_files = [] + for dataset in cfg['input_data'].values(): + if (dataset['alias'] in plot_file and + dataset['short_name'] in plot_file): + ancestor_files.append(dataset['filename']) + record = { + 'caption': caption, + 'statistics': ['mean'], + 'domains': ['global'], + 'plot_types': ['map', 'metrics'], + 'authors': [ + 'predoi_valeriu', + ], + 'references': [], + 'plot_file': plot_file, + 'ancestors': ancestor_files, + } + + p_cfg = {} + p_cfg['run_dir'] = loc + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(plot_file, record) + + +def plot_contour(cube, cfg, plt_title, file_name): + """Plot a contour with iris.quickplot (qplot).""" if len(cube.shape) == 2: - qplt.contourf(cube, cmap='RdYlBu_r', bbox_inches='tight') + qplt.contourf(cube, cmap='RdYlBu_r') else: - qplt.contourf(cube[0], cmap='RdYlBu_r', bbox_inches='tight') + qplt.contourf(cube[0], cmap='RdYlBu_r') plt.title(plt_title) plt.gca().coastlines() plt.tight_layout() plt.savefig(file_name) plt.close() - - -def plot_latlon_cubes(cube_1, cube_2, cfg, data_names, obs_name=None): - """ - Plot lat-lon vars for control, experiment, and obs + _get_provenance_record(cfg, file_name, + plt_title, loc=os.path.basename(file_name)) + + +def save_plotted_cubes(cube, cfg, plot_name): + """Save cubes that were plotted to disk.""" + if "save_cubes" in cfg: + if cfg["save_cubes"]: + save_name = plot_name.replace("png", "nc") + save_path = os.path.join(cfg['work_dir'], save_name) + iris.save(cube, save_path) + parsed_file_name = plot_name.replace(".png", "").replace("_", " ") + prov_name = "Output file: " + parsed_file_name + # files are overwritten once; provenance fails second time + try: + _get_provenance_record(cfg, save_path, + prov_name, + loc=os.path.dirname(save_path)) + except KeyError: + pass + + +def plot_latlon_cubes(cube_1, + cube_2, + cfg, + data_names, + obs_name=None, + season=None): + """Plot lat-lon vars for control, experiment, and obs. Also plot Difference plots (control-exper, control-obs) cube_1: first cube (dataset: dat1) @@ -51,62 +100,132 @@ def plot_latlon_cubes(cube_1, cube_2, cfg, data_names, obs_name=None): cfg: configuration dictionary data_names: var + '_' + dat1 + '_vs_' + dat2 """ - plot_name = cfg['analysis_type'] + '_' + data_names + '.png' - plot_title = cfg['analysis_type'] + ': ' + data_names + if not season: + plot_name = "_".join([cfg['analysis_type'], data_names]) + '.png' + plot_title = "alltime " + cfg['analysis_type'] + ': ' + data_names + plot_file_path = os.path.join(cfg['plot_dir'], "alltime", + 'Difference_' + plot_name) + else: + plot_name = "_".join([cfg['analysis_type'], data_names, season]) + \ + '.png' + plot_title = season + " " + cfg['analysis_type'] + ': ' + data_names + plot_file_path = os.path.join(cfg['plot_dir'], season, + 'Difference_' + plot_name) cubes = [cube_1, cube_2] # plot difference: cube_1 - cube_2; use numpy.ma.abs() diffed_cube = imath.subtract(cube_1, cube_2) - plot_contour(diffed_cube, 'Difference ' + plot_title, - os.path.join(cfg['plot_dir'], 'Difference_' + plot_name)) + + plot_contour(diffed_cube, cfg, 'Difference ' + plot_title, plot_file_path) + save_plotted_cubes(diffed_cube, cfg, 'Difference_' + plot_name) # plot each cube var = data_names.split('_')[0] if not obs_name: - cube_names = [data_names.split('_')[1], data_names.split('_')[3]] + cube_names = [ + data_names.replace(f'{var}_', '').split('_vs_')[i] for i in + range(2) + ] for cube, cube_name in zip(cubes, cube_names): - plot_contour( - cube, cube_name + ' ' + cfg['analysis_type'] + ' ' + var, - os.path.join(cfg['plot_dir'], cube_name + '_' + var + '.png')) + if not season: + plot_file_path = os.path.join( + cfg['plot_dir'], "alltime", + "_".join([cube_name, var]) + ".png") + plot_contour(cube, cfg, + " ".join([cube_name, cfg['analysis_type'], + var]), plot_file_path) + else: + plot_file_path = os.path.join( + cfg['plot_dir'], season, + "_".join([cube_name, var, season]) + ".png") + plot_contour( + cube, cfg, + " ".join([season, cube_name, cfg['analysis_type'], + var]), plot_file_path) + save_plotted_cubes(cube, cfg, os.path.basename(plot_file_path)) else: # obs is always cube_2 - plot_contour( - cube_2, obs_name + ' ' + cfg['analysis_type'] + ' ' + var, - os.path.join(cfg['plot_dir'], obs_name + '_' + var + '.png')) + if not season: + plot_file_path = os.path.join(cfg['plot_dir'], "alltime", + "_".join([obs_name, var]) + ".png") + plot_contour(cube_2, cfg, + " ".join([obs_name, cfg['analysis_type'], + var]), plot_file_path) + else: + plot_file_path = os.path.join( + cfg['plot_dir'], season, + "_".join([obs_name, var, season]) + ".png") + plot_contour( + cube_2, cfg, " ".join([season, obs_name, cfg['analysis_type'], + var]), plot_file_path) + save_plotted_cubes(cube_2, cfg, os.path.basename(plot_file_path)) def plot_zonal_cubes(cube_1, cube_2, cfg, plot_data): - """Plot cubes data vs latitude or longitude when zonal meaning""" + """Plot cubes data vs latitude or longitude when zonal meaning.""" # xcoordinate: latotude or longitude (str) data_names, xcoordinate, period = plot_data var = data_names.split('_')[0] - cube_names = [data_names.split('_')[1], data_names.split('_')[3]] + cube_names = data_names.replace(var + '_', '').split('_vs_') lat_points = cube_1.coord(xcoordinate).points plt.plot(lat_points, cube_1.data, label=cube_names[0]) plt.plot(lat_points, cube_2.data, label=cube_names[1]) + plt.title(f'Annual Climatology of {var}' if period == 'alltime' + else f'{period} of {var}') if xcoordinate == 'latitude': - plt.title(period + ' Zonal Mean for ' + var + ' ' + data_names) + axis = plt.gca() + axis.set_xticks([-60, -30, 0, 30, 60], + labels=['60\N{DEGREE SIGN} S', + '30\N{DEGREE SIGN} S', + '0\N{DEGREE SIGN}', + '30\N{DEGREE SIGN} N', + '60\N{DEGREE SIGN} N']) elif xcoordinate == 'longitude': - plt.title(period + ' Meridional Mean for ' + var + ' ' + data_names) + axis = plt.gca() + axis.set_xticks([0, 60, 120, 180, 240, 300, 360], + labels=['0\N{DEGREE SIGN} E', + '60\N{DEGREE SIGN} E', + '120\N{DEGREE SIGN} E', + '180\N{DEGREE SIGN} E', + '240\N{DEGREE SIGN} E', + '300\N{DEGREE SIGN} E', + '0\N{DEGREE SIGN} E']) plt.xlabel(xcoordinate + ' (deg)') - plt.ylabel(var) + plt.ylabel(f'{var} [{str(cube_1.units)}]') plt.tight_layout() plt.grid() plt.legend() + png_name = f'{xcoordinate}_{period}_{data_names}.png' if xcoordinate == 'latitude': - png_name = 'Zonal_Mean_' + xcoordinate + '_' + data_names + '.png' + png_name = 'Zonal_Mean_' + png_name elif xcoordinate == 'longitude': - png_name = 'Merid_Mean_' + xcoordinate + '_' + data_names + '.png' - plt.savefig(os.path.join(cfg['plot_dir'], period, png_name)) + png_name = 'Merid_Mean_' + png_name + plot_file_path = os.path.join(cfg['plot_dir'], period, png_name) + plt.savefig(plot_file_path) + save_plotted_cubes( + cube_1, cfg, + "_".join([cube_names[0], + os.path.basename(plot_file_path)])) + save_plotted_cubes( + cube_2, cfg, + "_".join([cube_names[1], + os.path.basename(plot_file_path)])) plt.close() + caption = period + ' Zonal/Meridional Mean for ' + var + ' ' + data_names + _get_provenance_record(cfg, plot_file_path, + caption, loc=os.path.join(cfg['plot_dir'], period)) def apply_seasons(data_set_dict): - """Extract seaons and apply a time mean per season""" + """Extract seaons and apply a time mean per season.""" data_file = data_set_dict['filename'] logger.info("Loading %s for seasonal extraction", data_file) data_cube = iris.load_cube(data_file) seasons = ['DJF', 'MAM', 'JJA', 'SON'] + iris.coord_categorisation.add_season(data_cube, 'time', 'clim_season', + seasons) + iris.coord_categorisation.add_season_year(data_cube, 'time', 'season_year', + seasons) season_cubes = [extract_season(data_cube, season) for season in seasons] season_meaned_cubes = [ season_cube.collapsed('time', iris.analysis.MEAN) @@ -117,7 +236,7 @@ def apply_seasons(data_set_dict): def coordinate_collapse(data_set, cfg): - """Perform coordinate-specific collapse and (if) area slicing and mask""" + """Perform coordinate-specific collapse and (if) area slicing and mask.""" # see what analysis needs performing analysis_type = cfg['analysis_type'] @@ -136,14 +255,14 @@ def coordinate_collapse(data_set, cfg): mask_cube = iris.load_cube(mask_file) if 'mask_threshold' in cfg: thr = cfg['mask_threshold'] - data_set.data = np.ma.masked_array( - data_set.data, mask=(mask_cube.data > thr)) + data_set.data = np.ma.masked_array(data_set.data, + mask=mask_cube.data > thr) else: logger.warning('Could not find masking threshold') logger.warning('Please specify it if needed') logger.warning('Masking on 0-values = True (masked value)') - data_set.data = np.ma.masked_array( - data_set.data, mask=(mask_cube.data == 0)) + data_set.data = np.ma.masked_array(data_set.data, + mask=mask_cube.data == 0) # if zonal mean on LON if analysis_type == 'zonal_mean': @@ -161,7 +280,7 @@ def coordinate_collapse(data_set, cfg): def do_preamble(cfg): - """Execute some preamble functionality""" + """Execute some preamble functionality.""" # prepare output dirs time_chunks = ['alltime', 'DJF', 'MAM', 'JJA', 'SON'] time_plot_dirs = [ @@ -173,14 +292,15 @@ def do_preamble(cfg): # get data input_data = cfg['input_data'].values() - grouped_input_data = group_metadata( - input_data, 'short_name', sort='dataset') + grouped_input_data = group_metadata(input_data, + 'short_name', + sort='dataset') return input_data, grouped_input_data def plot_ctrl_exper(ctrl, exper, cfg, plot_key): - """Call plotting functions and make plots depending on case""" + """Call plotting functions and make plots depending on case.""" if cfg['analysis_type'] == 'lat_lon': plot_latlon_cubes(ctrl, exper, cfg, plot_key) elif cfg['analysis_type'] == 'zonal_mean': @@ -192,20 +312,31 @@ def plot_ctrl_exper(ctrl, exper, cfg, plot_key): def plot_ctrl_exper_seasons(ctrl_seasons, exper_seasons, cfg, plot_key): - """Call plotting functions and make plots with seasons""" + """Call plotting functions and make plots with seasons.""" seasons = ['DJF', 'MAM', 'JJA', 'SON'] - if cfg['analysis_type'] == 'zonal_mean': - for c_i, e_i, s_n in zip(ctrl_seasons, exper_seasons, seasons): - plot_info = [plot_key, 'latitude', s_n] - plot_zonal_cubes(c_i, e_i, cfg, plot_info) + if cfg['analysis_type'] == 'lat_lon': + for control_season, experiment_season, season in zip( + ctrl_seasons, exper_seasons, seasons): + plot_latlon_cubes(control_season, + experiment_season, + cfg, + plot_key, + obs_name=None, + season=season) + elif cfg['analysis_type'] == 'zonal_mean': + for control_season, experiment_season, season in zip( + ctrl_seasons, exper_seasons, seasons): + plot_info = [plot_key, 'latitude', season] + plot_zonal_cubes(control_season, experiment_season, cfg, plot_info) elif cfg['analysis_type'] == 'meridional_mean': - for c_i, e_i, s_n in zip(ctrl_seasons, exper_seasons, seasons): - plot_info = [plot_key, 'longitude', s_n] - plot_zonal_cubes(c_i, e_i, cfg, plot_info) + for control_season, experiment_season, season in zip( + ctrl_seasons, exper_seasons, seasons): + plot_info = [plot_key, 'longitude', season] + plot_zonal_cubes(control_season, experiment_season, cfg, plot_info) def main(cfg): - """Execute validation analysis and plotting""" + """Execute validation analysis and plotting.""" logger.setLevel(cfg['log_level'].upper()) input_data, grouped_input_data = do_preamble(cfg) @@ -214,11 +345,12 @@ def main(cfg): logger.info("Processing variable %s", short_name) # get the control, experiment and obs dicts - ctrl, exper, obs = get_control_exper_obs(short_name, input_data, - cfg, _CMIP_TYPE) + cmip_type = cfg["cmip_era"] if "cmip_era" in cfg else None + ctrl, exper, obs = get_control_exper_obs(short_name, input_data, cfg, + cmip_type=cmip_type) # set a plot key holding info on var and data set names - plot_key = short_name + '_' + ctrl['dataset'] \ - + '_vs_' + exper['dataset'] + plot_key = f"{short_name}_{ctrl['alias']}_vs_{exper['alias']}" + control_dataset_name = ctrl['alias'] # get seasons if needed then apply analysis if cfg['seasonal_analysis']: @@ -231,6 +363,16 @@ def main(cfg): coordinate_collapse(exps, cfg) for exps in exper_seasons ] plot_ctrl_exper_seasons(ctrl_seasons, exper_seasons, cfg, plot_key) + if obs: + for iobs in obs: + obs_seasons = apply_seasons(iobs) + obs_seasons = [ + coordinate_collapse(obss, cfg) for obss in obs_seasons + ] + plot_key_obs = (f"{short_name}_{ctrl['alias']}" + + f"_vs_{iobs['alias']}") + plot_ctrl_exper_seasons(ctrl_seasons, obs_seasons, cfg, + plot_key_obs) # apply the supermeans (MEAN on time), collapse a coord and plot ctrl, exper, obs_list = apply_supermeans(ctrl, exper, obs) @@ -242,11 +384,14 @@ def main(cfg): if obs_list: for obs_i, obsfile in zip(obs_list, obs): obs_analyzed = coordinate_collapse(obs_i, cfg) - obs_name = obsfile['dataset'] - plot_key = short_name + '_CONTROL_vs_' + obs_name + obs_name = obsfile['alias'] + plot_key = f"{short_name}_{control_dataset_name}_vs_{obs_name}" if cfg['analysis_type'] == 'lat_lon': - plot_latlon_cubes( - ctrl, obs_analyzed, cfg, plot_key, obs_name=obs_name) + plot_latlon_cubes(ctrl, + obs_analyzed, + cfg, + plot_key, + obs_name=obs_name) if __name__ == '__main__': diff --git a/esmvaltool/diag_scripts/weighting/calculate_difference_variable_group.py b/esmvaltool/diag_scripts/weighting/calculate_difference_variable_group.py new file mode 100644 index 0000000000..d34a6d1b28 --- /dev/null +++ b/esmvaltool/diag_scripts/weighting/calculate_difference_variable_group.py @@ -0,0 +1,91 @@ +"""Calculating the difference between two preprocessed files.""" + +import logging +import os +import xarray as xr + +from esmvaltool.diag_scripts.shared import ( + get_diagnostic_filename, + run_diagnostic, +) + +from esmvaltool.diag_scripts.weighting.climwip.io_functions import ( + read_metadata, + read_model_data, + read_observation_data, + log_provenance, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def calculate_diff(data_clim: list, data_glob: list, + observations=False) -> tuple: + """ + Read data and calculate differences. + + Return differences and ancestor files. + """ + + errmsg = '{}_{} not found but needed for anomaly calculation!' + if not data_clim: + raise ValueError(errmsg.format(data_clim[0]['short_name'], 'CLIM')) + if not data_glob: + raise ValueError(errmsg.format(data_glob[0]['short_name'], 'GLOBAL')) + + if observations: + data_clim_read, data_files_clim = read_observation_data(data_clim) + data_glob_read, data_files_glob = read_observation_data(data_glob) + else: + data_clim_read, data_files_clim = read_model_data(data_clim) + data_glob_read, data_files_glob = read_model_data(data_glob) + data_files_clim.extend(data_files_glob) + + diff = data_clim_read - data_glob_read + + diff.attrs['short_name'] = data_clim[0]['short_name'] + diff.attrs['units'] = data_clim[0]['units'] + + return diff, data_files_clim + + +def _save_data(data: 'xr.DataArray', name: str, cfg: dict, + ancestors: list): + """Save data to netcdf for further use.""" + varn_new = f'{data.short_name}_ANOM' + filename_data = get_diagnostic_filename( + '%s%s' % (name, varn_new), cfg, extension='nc') + data.to_dataset(name=varn_new).to_netcdf(filename_data) + + caption = '%s%s' % (name, varn_new) + log_provenance(caption, filename_data, cfg, ancestors) + + +def main(cfg): + """Compute the difference between the two given variable groups.""" + models, observations = read_metadata(cfg) + variable_groups = list(models) + varnames = list() + for variable_group in variable_groups: + varname = variable_group.split("_")[0] + varnames.append(varname) + short_names = set(varnames) + + for short_name in short_names: + datasets_mod_clim = models[short_name + '_CLIM'] + datasets_mod_global = models[short_name + '_GLOBAL'] + diff, data_files_models = calculate_diff( + datasets_mod_clim, datasets_mod_global) + _save_data(diff, 'MODELS_', cfg, ancestors=data_files_models) + + obs_clim = observations[short_name + '_CLIM'] + obs_glob = observations[short_name + '_GLOBAL'] + diff_obs, data_files_obs = calculate_diff( + obs_clim, obs_glob, observations=True) + _save_data(diff_obs, 'OBS_', cfg, ancestors=data_files_obs) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/weighting/climwip/calibrate_sigmas.py b/esmvaltool/diag_scripts/weighting/climwip/calibrate_sigmas.py new file mode 100644 index 0000000000..1d85e1f95a --- /dev/null +++ b/esmvaltool/diag_scripts/weighting/climwip/calibrate_sigmas.py @@ -0,0 +1,383 @@ +"""A collection of functions to calibrate the shape parameters (sigmas).""" +import logging +import os +from typing import Union + +import matplotlib.pyplot as plt +import numpy as np +import xarray as xr +from scipy.optimize import brute + +from esmvaltool.diag_scripts.shared import ( + get_diagnostic_filename, + get_plot_filename, +) +from esmvaltool.diag_scripts.weighting.climwip.core_functions import ( + area_weighted_mean, + calculate_model_distances, + calculate_weights, + combine_ensemble_members, + compute_overall_mean, + weighted_quantile, +) +from esmvaltool.diag_scripts.weighting.climwip.io_functions import ( + read_metadata, + read_model_data, + read_model_data_ancestor, +) + +logger = logging.getLogger(os.path.basename(__file__)) + +SIGMA_RANGE = (.1, 2) # allow this to be set by the recipe later +PERCENTILES = [.1, .9] # allow this to be set by the recipe later + +confidence_test_values = {'baseline': {}} + + +def calculate_percentiles(target: 'xr.DataArray', + weights_matrix: 'xr.DataArray', + percentiles: list, + weighted: bool = True) -> ('xr.DataArray', float): + """Calculate (equally) weighted percentiles based on each perfect model. + + Parameters + ---------- + target : array_like, shape (N,) + Array of model values that will be evaluated in order to estimate the + optimal sigma value. For each perfect model, the perfect model (and + potentially additional models) are excluded from the target, the + rest is weighted. The perfect model is then used to evaluate the + weighted distribution (see also weights_matrix). + weights_matrix : array_like, shape (N, N) + For each perfect model in the perfect_model_ensemble dimension + the weights_matrix contains the respective model weights in the + model_ensemble dimension based on this perfect model. + + Special feature: nan values in the model_ensemble dimension will lead + to the model being excluded from the weights calculation. This + is always the case for the perfect model itself (diagonal of the + matrix) but might also be the case for other models. This is + particularly important for a correct calculation of the independence + weighting. + percentiles : array_like, shape (2,) + Lower and upper percentile to use in the confidence test. Has to + satisfy 0 <= percentiles <=1 and percentiles[0] < percentiles[1] + weighted : bool, optional + If weighted is set to False (default: True) all values in the weights + matrix will be set to 1, except for nan values which will be preserved. + This can be used to calculate the unweighted baseline for each perfect + model, consistent with the weighted case. + + Returns + ------- + percentile_spread : array_like, shape (N,) + Full range spanned by the two percentiles for each perfect model. + inside_ratio: float + Ratio of perfect models inside their respective percentile_spread. + """ + percentiles = xr.DataArray(percentiles, dims='percentile') + + # need to rename for the inside_count comparison + target_perfect = target.rename( + {'model_ensemble': 'perfect_model_ensemble'}) + + if not weighted: # replace with equal weight but keep nans + weights_matrix = 0 * weights_matrix + 1 + + percentiles_data = xr.apply_ufunc( + weighted_quantile, + target, + percentiles, + weights_matrix, + input_core_dims=[['model_ensemble'], ['percentile'], + ['model_ensemble']], + output_core_dims=[['percentile']], + vectorize=True, + ) + + inside_count = np.logical_and( + target_perfect >= percentiles_data.isel(percentile=0), + target_perfect <= percentiles_data.isel(percentile=1)).values + inside_ratio = inside_count.sum() / len(inside_count) + + percentiles_spread = (percentiles_data.isel(percentile=1) - + percentiles_data.isel(percentile=0)) + + return percentiles_spread, inside_ratio + + +def compute_cost_function(target: 'xr.DataArray', + weights_matrix: 'xr.DataArray', + performance_sigma: float) -> float: + """Optimize the performance sigma for confidence. + + Parameters + ---------- + target : array_like, shape (N,) + See calculate_percentiles for more information. + weights_matrix : array_like, shape (N, N) + See calculate_percentiles for more information. + performance_sigma : float + The performance sigma value used to calculate the weights. + + Returns + ------- + cost_function_value : float + Value of the cost function based on the given performance sigma value. + The cost function is a discontinuous function distinguishing two cases: + 99 + abs(difference) if overconfident + f = { + sigma else + + WARNING + ------- + It is highly recommended to visually inspect the graphical output of this + process to ensure the optimisation worked as intended. + + + Additional information + ---------------------- + After evaluating all possible sigma values the sigma which leads to the + smallest cost function will be selected. Two different cases need to be + disdinguished: + * the cost function is > 99 for all cases: + All sigma values lead to overconfident weighting. The script will + plot a visualization of the test and will then raise an error. The user + can decide to select a sigma value manually (e.g., if the test only + failed by a narrow margin) and set it in the recipe. + * the cost function is < 99 for at least one sigma: + For this case the cost function increases linearly with sigma, + therefore the smallest possible sigma where this is true will be + selected on the fly and used in the computation of weights. + """ + percentiles = PERCENTILES + inside_ratio_reference = percentiles[1] - percentiles[0] + + # calculate the equally weighted case once as baseline + if len(confidence_test_values['baseline']) == 0: + percentiles_spread, inside_ratio = calculate_percentiles( + target, weights_matrix, percentiles, weighted=False) + confidence_test_values['baseline']['percentile_spread'] = ( + percentiles_spread) + confidence_test_values['baseline']['inside_ratio'] = inside_ratio + + percentiles_spread, inside_ratio = calculate_percentiles( + target, weights_matrix, percentiles) + confidence_test_values[performance_sigma] = { + 'percentile_spread': percentiles_spread, + 'inside_ratio': inside_ratio + } + + difference = inside_ratio - inside_ratio_reference + + if difference < 0: # overconfident + return 99 - difference + return performance_sigma + + +def evaluate_target(performance_sigma: list, + overall_performance: 'xr.DataArray', + target: 'xr.DataArray', + overall_independence: 'xr.DataArray', + independence_sigma: float) -> float: + """Evaluate the weighting in the target period. + + Parameters + ---------- + performance_sigma : list of one float + Performance weighting shape parameter, determines how strong the + weighting for performance is (smaller values correspond to stronger + weighting) + overall_performance : array_like, shape (N, N) + Contains the generalised distance for each model in the model_ensemble + dimension for each perfect model in the perfect_model_ensemble + dimension. + target : array_like, shape (N,) + See calculate_percentiles for more information. + overall_independence : array_like, shape (N, N) + Matrix containing model-model distances for independence. + independence_sigma : float + Independence weighting shape parameter. + + Returns + ------- + cost_function_value : float + See compute_cost_function for more information. + """ + performance_sigma = performance_sigma[0] + + # exclude perfect model in each row by setting it to nan + idx_diag = np.diag_indices(overall_performance['model_ensemble'].size) + overall_performance.values[idx_diag] = np.nan + + weights_matrix = calculate_weights(overall_performance, + overall_independence, performance_sigma, + independence_sigma) + + cost_function_value = compute_cost_function(target, weights_matrix, + performance_sigma) + return cost_function_value + + +def visualize_save_calibration(performance_sigma, cfg, success): + """Visualize a summary of the calibration.""" + percentiles = PERCENTILES + inside_ratio_reference = percentiles[1] - percentiles[0] + + baseline = confidence_test_values.pop('baseline') + sigmas = sorted(confidence_test_values) + inside_ratios = [ + confidence_test_values[sigma]['inside_ratio'] for sigma in sigmas + ] + confidence = xr.Dataset( + data_vars={ + 'inside_ratio_reference': ((), inside_ratio_reference, { + 'units': '1' + }), + 'inside_ratio': ('sigma', inside_ratios, { + 'units': '1' + }), + 'sigma': ('sigma', sigmas, { + 'units': '1' + }), + }) + + figure, axes = plt.subplots(figsize=(12, 8)) + axes.plot(sigmas, + inside_ratios, + color='k', + lw=2, + label='Perfect models inside the {:.0%} range'.format( + inside_ratio_reference), + zorder=99) + axes.axhline(inside_ratio_reference, + color='k', + ls='--', + label='Reference: {:.0%}'.format(inside_ratio_reference)) + axes.axhline(baseline['inside_ratio'], + color='k', + ls=':', + label='Unweighted baseline: {:.0%}'.format( + baseline['inside_ratio'])) + + if success: + axes.axvline(performance_sigma, + color='k', + ls='-.', + label='Selected performance sigma: {:.2f}'.format( + performance_sigma)) + axes.set_title('Performance sigma calibration') + else: + axes.axvline( + performance_sigma, + color='gray', + ls='-.', + label='Best performance sigma: {:.2f} (set manually to use)'. + format(performance_sigma)) + axes.set_title('Performance sigma calibration (FAILED)') + + # optional: sharpness + sharpness = xr.concat([ + confidence_test_values[sigma]['percentile_spread'].expand_dims( + {'sigma': [sigma]}) for sigma in sigmas], dim='sigma') + sharpness /= baseline['percentile_spread'] + axes.plot(sigmas, + sharpness.mean('perfect_model_ensemble'), + color='lightgray', + label='Spread relative to unweighted (mean & 80% range)') + axes.fill_between(sigmas, + *sharpness.quantile((.1, .9), 'perfect_model_ensemble'), + facecolor='lightgray', + edgecolor='none', + alpha=.3) + axes.axhline(1, color='lightgray', ls='--') + + sharpness.attrs['units'] = '1' + confidence['sharpness'] = sharpness + + axes.set_xlim(SIGMA_RANGE) + axes.set_ylim(0, 1.3) + axes.set_xlabel('sigma (1)') + axes.set_ylabel('Ratio (1)') + + axes.legend() + + filename_plot = get_plot_filename('performance_sigma_calibration', cfg) + figure.savefig(filename_plot, dpi=300, bbox_inches='tight') + plt.close(figure) + + filename_data = get_diagnostic_filename('performance_sigma_calibration', + cfg, + extension='nc') + confidence.to_netcdf(filename_data) + + +def calibrate_performance_sigma(performance_contributions: list, + overall_independence: Union['xr.DataArray', + None], + independence_sigma: Union[float, None], + cfg: dict) -> float: + """Calibrate the performance sigma using a perfect model approach.""" + settings = cfg['calibrate_performance_sigma'] + models, _ = read_metadata(cfg) + + performances_matrix = {} + for variable_group in performance_contributions: + + logger.info('Reading model data for %s', variable_group) + if variable_group.endswith("_ANOM"): + model_data, model_data_files = read_model_data_ancestor( + cfg, variable_group) + else: + datasets_model = models[variable_group] + model_data, _ = read_model_data(datasets_model) + + logger.info('Calculating performance for %s', variable_group) + performance_matrix = calculate_model_distances( + model_data, 'perfect_model_ensemble') + logger.debug(performance_matrix.values) + performances_matrix[variable_group] = performance_matrix + + performance_matrix = xr.Dataset(performances_matrix) + overall_performance = compute_overall_mean(performance_matrix, + performance_contributions) + + target = models[settings['target']] + target_data, _ = read_model_data(target) + + if settings.get('target_ref') is not None: + target_ref = models[settings['target_ref']] + target_ref_data, _ = read_model_data(target_ref) + target_data -= target_ref_data + + target_data = area_weighted_mean(target_data) + + if cfg['combine_ensemble_members']: + overall_independence, _ = combine_ensemble_members( + overall_independence, + ['model_ensemble', 'model_ensemble_reference']) + overall_performance, _ = combine_ensemble_members( + overall_performance, ['model_ensemble', 'perfect_model_ensemble']) + target_data, _ = combine_ensemble_members(target_data) + + performance_sigma, fval, _, _ = brute( + evaluate_target, + ranges=(SIGMA_RANGE, ), + Ns=100, + finish=None, + args=(overall_performance, target_data, overall_independence, + independence_sigma), + full_output=True, + ) + + success = fval < 99 + visualize_save_calibration(performance_sigma, cfg, success=success) + + if success: + logmsg = f'Found optimal performance sigma value: {performance_sigma}' + logger.info(logmsg) + return performance_sigma + + errmsg = ('No confident sigma could be found! Bad choice of predictors or ' + 'target or too small sigma range?') + raise ValueError(errmsg) diff --git a/esmvaltool/diag_scripts/weighting/climwip/core_functions.py b/esmvaltool/diag_scripts/weighting/climwip/core_functions.py new file mode 100644 index 0000000000..5c1348e4b6 --- /dev/null +++ b/esmvaltool/diag_scripts/weighting/climwip/core_functions.py @@ -0,0 +1,301 @@ +"""A collection of core functions.""" +import logging +import os +from collections import defaultdict +from typing import Union + +import numpy as np +import xarray as xr +from scipy.spatial.distance import pdist, squareform + +logger = logging.getLogger(os.path.basename(__file__)) + + +def area_weighted_mean(data_array: 'xr.DataArray') -> 'xr.DataArray': + """Calculate area mean weighted by the latitude. + + Returns a data array consisting of N values, where N == number of + ensemble members. + """ + weights_lat = np.cos(np.radians(data_array.lat)) + means = data_array.weighted(weights_lat).mean(dim=['lat', 'lon']) + + return means + + +def distance_matrix(values: 'np.ndarray', + weights: 'np.ndarray' = None) -> 'np.ndarray': + """Calculate the pairwise distance between model members. + + Takes a dataset with ensemble member/lon/lat. Flattens lon/lat + into a single dimension. Calculates the distance between every + ensemble member. + + If weights are passed, they should have the same shape as values. + + Returns 2D NxN array, where N == number of ensemble members. + """ + n_members = values.shape[0] + + values = values.reshape(n_members, -1) + + # pdist does not work with NaN + not_nan = np.where(np.all(np.isfinite(values), axis=0))[0] + values = values[:, not_nan] + + if weights is not None: + # Reshape weights to match values array + weights = weights.reshape(n_members, -1) + weights = weights[:, not_nan] + weights = weights[0] # Weights are equal along first dim + + d_matrix = squareform(pdist(values, metric='euclidean', w=weights)) + + return d_matrix + + +def calculate_model_distances( + data_array: 'xr.DataArray', + dimension: str = 'model_ensemble_reference') -> 'xr.DataArray': + """Calculate pair-wise distances between all values in data_array. + + Distances are calculated as the area weighted euclidean distance + between each pair of models in data_array. Returned is a square matrix + with where the number of elements along each edge equals the number + of ensemble members. + + Parameters + ---------- + data_array : array_like, shape (N,...) + Array of (2 dimensional) model fields. + dimension : string + Name of the newly created reference dimension (default: + 'model_ensemble_reference'. Must not be equal to the existing + model dimension ('model_ensemble')! + + Returns + ------- + distances : array_like, shape (N, N) + Symmetric matrix of pairwise model distances. + """ + assert dimension != 'model_ensemble', f'{dimension} != "model_ensemble"' + weights = np.cos(np.radians(data_array.lat)) + weights, _ = xr.broadcast(weights, data_array) + + diff = xr.apply_ufunc( + distance_matrix, + data_array, + weights, + input_core_dims=[['model_ensemble', 'lat', 'lon'], + ['model_ensemble', 'lat', 'lon']], + output_core_dims=[[dimension, 'model_ensemble']], + ) + + diff.name = f'd{data_array.name}' + diff.attrs['variable_group'] = data_array.name + diff.attrs["units"] = data_array.units + diff[dimension] = diff.model_ensemble.values + + return diff + + +def compute_overall_mean(dataset: 'xr.Dataset', + weights: dict) -> 'xr.DataArray': + """Normalize all variables in a dataset and return their weighted mean. + + Relative weights for each variable group are passed via the recipe. + """ + normalized = dataset / dataset.median() + + weights_selected = xr.DataArray( + [weights[variable_group] for variable_group in dataset], + coords={'variable_group': list(dataset)}, + dims='variable_group') + overall_mean = normalized.to_array( + dim='variable_group').weighted(weights_selected).mean('variable_group') + overall_mean.name = 'overall_mean' + overall_mean.attrs['variable_group'] = 'overall_mean' + overall_mean.attrs['units'] = '1' + return overall_mean + + +def combine_ensemble_members( + dataset: Union['xr.DataArray', None], + dimensions: Union[str, list] = 'model_ensemble', +) -> (Union['xr.DataArray', None], dict): + """Combine ensemble members of the same model. + + Parameters + ---------- + dataset : None or data_array, shape (N,) or (N, N) + A vector containing model-observations distances or a matrix containing + model-model distances. + dimensions : string or list of up to two strings + Spezifies the dimensions along which ensemble members are combined. + + Returns + ------- + dataset : None or data_array, shape (M,), (M, L) with M, L <= N + data_array where ensemble members along the given dimensions are + combined by averaging. + groups : dict of form {string: list} + Dictionary mapping the combined model names (keys) to the original + ensemble member names (values). + """ + if isinstance(dimensions, str): + dimensions = [dimensions] + assert len( + dimensions) <= 2, 'dimensions can contain a maximum of two strings' + + if dataset is None: + return None, {} + + groups = defaultdict(list) + models = [] + for name in dataset['model_ensemble'].values: + model = name.split('_')[0] + groups[model].append(name) + models.append(model) + + for dimension in dimensions: + if dimension in dataset.dims: + model = xr.DataArray(models, dims=dimension) + dataset = dataset.groupby(model).mean(keep_attrs=True).rename( + {'group': dimension}) + + if len(dimensions) == 2: + # need to set the diagonal elements back to zero after averaging + dataset.values[np.diag_indices(dataset['model_ensemble'].size)] = 0 + + return dataset, groups + + +def calculate_weights_data( + performance: Union['np.array', None], + independence: Union['np.array', None], + performance_sigma: Union[float, None], + independence_sigma: Union[float, None]) -> 'np.array': + """Calculate normalized weights for each model N. + + Parameters + ---------- + performance : array_like, shape (N,) or None + Array specifying the model performance. None is mutually exclusive + with independence being None. Single values in performance can be + nan, then they will be excluded from the independence calculation as + well (used for the perfect model test). + independence : array_like, shape (N, N) or None + Array specifying the model independence. None is mutually exclusive + with performance being None. + performance_sigma : float or None + Sigma value defining the form of the weighting function + for the performance. Can be one only if performance is also None. + independence_sigma : float or None + Sigma value defining the form of the weighting function + for the independence. Can be one only if independence is also None. + + Returns + ------- + weights : ndarray, shape (N,) + """ + numerator = 1 + not_nan = None + denominator = 1 + + if performance is not None: + numerator = np.exp(-((performance / performance_sigma)**2)) + # nans in the performance vector indicate models to be excluded + not_nan = np.isfinite(performance) + if independence is not None: + if not_nan is None: + not_nan = np.isfinite(independence[0]) + # don't consider nan models for independence of other models! + exp = np.exp(-((independence[:, not_nan] / independence_sigma)**2)) + # Note diagonal = exp(0) = 1, thus this is equal to 1 + sum(i!=j) + denominator = exp.sum(axis=1) + + weights = numerator / denominator + weights /= weights.sum(where=not_nan) + return weights + + +def calculate_weights( + performance: Union['xr.DataArray', None], + independence: Union['xr.DataArray', None], + performance_sigma: Union[float, None], + independence_sigma: Union[float, None]) -> 'xr.DataArray': + """Xarray wrapper for calculate_weights_data.""" + performance_core_dims = [] if performance is None else ['model_ensemble'] + independence_core_dims = [] if independence is None else [ + 'model_ensemble', 'model_ensemble_reference' + ] + + weights = xr.apply_ufunc( + calculate_weights_data, + performance, + independence, + performance_sigma, + independence_sigma, + input_core_dims=[ + performance_core_dims, independence_core_dims, [], [] + ], + output_core_dims=[['model_ensemble']], + vectorize=True, + ) + + weights.name = 'weight' + weights.attrs['variable_group'] = 'weight' # used in barplot + weights.attrs['units'] = '1' + + return weights + + +def weighted_quantile(values: list, + quantiles: list, + weights: list = None) -> 'np.array': + """Calculate weighted quantiles. + + Analogous to np.quantile, but supports weights. + + Based on: https://stackoverflow.com/a/29677616/6012085 + + Parameters + ---------- + values: array_like + List of input values. + quantiles: array_like + List of quantiles between 0.0 and 1.0. + weights: array_like + List with same length as `values` containing the weights. + + Returns + ------- + np.array + Numpy array with computed quantiles. + """ + values = np.array(values) + quantiles = np.array(quantiles) + if weights is None: + weights = np.ones(len(values)) + weights = np.array(weights) + + # remove nans + not_nan = np.where((np.isfinite(values) & np.isfinite(weights)))[0] + values = values[not_nan] + weights = weights[not_nan] + + if not np.all((quantiles >= 0) & (quantiles <= 1)): + raise ValueError('Quantiles should be between 0.0 and 1.0') + + idx = np.argsort(values) + values = values[idx] + weights = weights[idx] + + weighted_quantiles = np.cumsum(weights) - 0.5 * weights + + # Cast weighted quantiles to 0-1 To be consistent with np.quantile + min_val = weighted_quantiles.min() + max_val = weighted_quantiles.max() + weighted_quantiles = (weighted_quantiles - min_val) / max_val + + return np.interp(quantiles, weighted_quantiles, values) diff --git a/esmvaltool/diag_scripts/weighting/climwip/io_functions.py b/esmvaltool/diag_scripts/weighting/climwip/io_functions.py new file mode 100644 index 0000000000..c3fa337730 --- /dev/null +++ b/esmvaltool/diag_scripts/weighting/climwip/io_functions.py @@ -0,0 +1,171 @@ +"""A collection of input-output functions.""" +import logging +import os +from collections import defaultdict +from datetime import datetime + +import natsort +import numpy as np +import xarray as xr + +from esmvaltool.diag_scripts.shared import ProvenanceLogger, group_metadata, io + +logger = logging.getLogger(os.path.basename(__file__)) + + +def get_provenance_record(caption: str, ancestors: list): + """Create a provenance record describing the diagnostic data and plots.""" + record = { + 'caption': + caption, + 'domains': ['reg'], + 'authors': [ + 'kalverla_peter', + 'smeets_stef', + 'brunner_lukas', + 'camphuijsen_jaro', + ], + 'references': [ + 'brunner2019', + 'lorenz2018', + 'knutti2017', + ], + 'ancestors': + ancestors, + } + return record + + +def log_provenance(caption: str, filename: str, cfg: dict, ancestors: list): + """Log provenance info.""" + provenance_record = get_provenance_record(caption, ancestors=ancestors) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filename, provenance_record) + + logger.info('Output stored as %s', filename) + + +def read_metadata(cfg: dict) -> tuple: + """Read the metadata from the config file. + + Returns a two dicts, one for the model data and one for the + observational data. They are split based on the value of the + 'obs_data' variable in the recipe. The dictionaries are sorted by + the variable. + """ + obs_ids = cfg['obs_data'] + if isinstance(obs_ids, str): + obs_ids = [obs_ids] + + input_data = cfg['input_data'].values() + projects = group_metadata(input_data, attribute='project') + + observations = defaultdict(list) + models = defaultdict(list) + + for project, metadata in projects.items(): + + for item in metadata: + variable_group = item['variable_group'] + + if project in obs_ids: + observations[variable_group].append(item) + else: + models[variable_group].append(item) + + return models, observations + + +def make_standard_calendar(xrds: 'xr.Dataset'): + """Make sure time coordinate uses the default calendar. + + Workaround for incompatible calendars 'standard' and 'no-leap'. + Assumes yearly data. + """ + try: + years = xrds.time.dt.year.values + xrds['time'] = [datetime(year, 7, 1) for year in years] + except TypeError: + # Time dimension is 0-d array + pass + except AttributeError: + # Time dimension does not exist + pass + + +def read_input_data(metadata: list, + dim: str = 'data_ensemble', + identifier_fmt: str = '{dataset}') -> tuple: + """Load data from metadata. + + Read the input data from the list of given data sets. `metadata` is + a list of metadata containing the filenames to load. Only returns + the given `variable`. The datasets are stacked along the `dim` + dimension. Returns an xarray.DataArray. + """ + data_arrays = [] + identifiers = [] + input_files = [] + for info in metadata: + filename = info['filename'] + short_name = info['short_name'] + variable_group = info['variable_group'] + + xrds = xr.open_dataset(filename) + make_standard_calendar(xrds) + xrda = xrds[short_name] + xrda = xrda.rename(variable_group) + data_arrays.append(xrda) + + identifier = identifier_fmt.format(**info) + identifiers.append(identifier) + input_files.append(filename) + + diagnostic = xr.concat(data_arrays, dim=dim) + diagnostic[dim] = identifiers + + # Clean up unnecessary coordinate info + redundant_dims = np.setdiff1d(diagnostic.coords, diagnostic.dims) + diagnostic = diagnostic.drop(redundant_dims) + + # Use natural sorting order + sorting = natsort.natsorted(identifiers, alg=natsort.IC) + diagnostic = diagnostic.sel(indexers={dim: sorting}) + + return diagnostic, input_files + + +def read_model_data(datasets: list) -> tuple: + """Load model data from list of metadata.""" + return read_input_data(datasets, + dim='model_ensemble', + identifier_fmt='{dataset}_{ensemble}_{exp}') + + +def read_model_data_ancestor(cfg, variable_group) -> tuple: + """Load model data from ancestor folder.""" + filepath = io.get_ancestor_file(cfg, 'MODELS_' + variable_group + '.nc') + ancestor_ds = xr.open_dataset(filepath) + + anc_da = ancestor_ds[variable_group].load() + anc_da = anc_da.rename(variable_group) + + return anc_da, filepath + + +def read_observation_data(datasets: list) -> tuple: + """Load observation data from list of metadata.""" + return read_input_data(datasets, + dim='obs_ensemble', + identifier_fmt='{dataset}') + + +def read_observation_data_ancestor(cfg, variable_group) -> tuple: + """Load model data from ancestor folder.""" + filepath = io.get_ancestor_file(cfg, 'OBS_' + variable_group + '.nc') + ancestor_ds = xr.open_dataset(filepath) + + anc_da = ancestor_ds[variable_group].load() + anc_da = anc_da.rename(variable_group) + + return anc_da, filepath diff --git a/esmvaltool/diag_scripts/weighting/climwip/main.py b/esmvaltool/diag_scripts/weighting/climwip/main.py new file mode 100644 index 0000000000..f46db62090 --- /dev/null +++ b/esmvaltool/diag_scripts/weighting/climwip/main.py @@ -0,0 +1,334 @@ +"""Implementation of the climwip weighting scheme. + +Lukas Brunner et al. section 2.4 +https://iopscience.iop.org/article/10.1088/1748-9326/ab492f +""" +import logging +import os + +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns +import xarray as xr + +from esmvaltool.diag_scripts.shared import ( + get_diagnostic_filename, + get_plot_filename, + run_diagnostic, +) +from esmvaltool.diag_scripts.weighting.climwip.calibrate_sigmas import ( + calibrate_performance_sigma, ) +from esmvaltool.diag_scripts.weighting.climwip.core_functions import ( + area_weighted_mean, + calculate_model_distances, + calculate_weights, + combine_ensemble_members, + compute_overall_mean, +) +from esmvaltool.diag_scripts.weighting.climwip.io_functions import ( + log_provenance, + read_metadata, + read_model_data, + read_model_data_ancestor, + read_observation_data, + read_observation_data_ancestor, +) + + +logger = logging.getLogger(os.path.basename(__file__)) + + +def aggregate_obs_data(data_array: 'xr.DataArray', + operator: str = 'median') -> 'xr.DataArray': + """Reduce data array along ensemble dimension. + + Apply the operator to squeeze the ensemble dimension by applying the + `operator` along the ensemble (`obs_ensemble`) dimension. Returns an + xarray.Dataset squeezed to 1D. + """ + if operator == 'median': + output = data_array.median(dim='obs_ensemble') + else: + raise ValueError(f'No such operator `{operator}`') + + return output + + +def visualize_and_save_independence(independence: 'xr.DataArray', cfg: dict, + ancestors: list): + """Visualize independence.""" + variable = independence.variable_group + labels = list(independence.model_ensemble.values) + + figure, axes = plt.subplots(figsize=(15, 15), + subplot_kw={'aspect': 'equal'}) + chart = sns.heatmap( + independence, + linewidths=1, + cmap="YlGn", + xticklabels=labels, + yticklabels=labels, + cbar_kws={'label': f'Euclidean distance ({independence.units})'}, + ax=axes, + ) + chart.set_title(f'Distance matrix for {variable}') + + filename_plot = get_plot_filename(f'independence_{variable}', cfg) + figure.savefig(filename_plot, dpi=300, bbox_inches='tight') + plt.close(figure) + + filename_data = get_diagnostic_filename(f'independence_{variable}', + cfg, + extension='nc') + independence.to_netcdf(filename_data) + + caption = f'Euclidean distance matrix for variable {variable}' + log_provenance(caption, filename_plot, cfg, ancestors) + log_provenance(caption, filename_data, cfg, ancestors) + + +def calculate_performance(model_data: 'xr.DataArray', + obs_data: 'xr.DataArray') -> 'xr.DataArray': + """Calculate performance. + + Calculate the area weighted mean between the given ensemble of model + data, and observation data. The observation data must have the + ensemble dimension squeezed or reduced. Returns an xarray.DataArray + containing the same number of values as members of `model_data`. + """ + diff = model_data - obs_data + + performance = area_weighted_mean(diff**2)**0.5 + + performance.name = f'd{model_data.name}' + performance.attrs['variable_group'] = model_data.name + performance.attrs["units"] = model_data.units + + return performance + + +def barplot(metric: 'xr.DataArray', label: str, filename: str): + """Visualize metric as barplot.""" + name = metric.name + variable_group = metric.variable_group + units = metric.units + + metric_df = metric.to_dataframe().reset_index() + + ylabel = f'{label} {variable_group} ({units})' + + figure, axes = plt.subplots(figsize=(15, 10)) + chart = sns.barplot(x='model_ensemble', + y=name, + data=metric_df, + ax=axes, + color="blue") + chart.set_xticklabels(chart.get_xticklabels(), + rotation=45, + horizontalalignment='right') + if variable_group == 'weight': + chart.set_title('Performance weights') + else: + chart.set_title(f'{label} for {variable_group}') + chart.set_ylabel(ylabel) + chart.set_xlabel('') + + figure.savefig(filename, dpi=300, bbox_inches='tight') + plt.close(figure) + + +def visualize_and_save_performance(performance: 'xr.DataArray', cfg: dict, + ancestors: list): + """Visualize performance.""" + label = 'RMS error' + + variable_group = performance.variable_group + filename_plot = get_plot_filename(f'performance_{variable_group}', cfg) + + barplot(performance, label, filename_plot) + + filename_data = get_diagnostic_filename(f'performance_{variable_group}', + cfg, + extension='nc') + performance.to_netcdf(filename_data) + + caption = f'Performance metric {label} for variable group {variable_group}' + log_provenance(caption, filename_plot, cfg, ancestors) + log_provenance(caption, filename_data, cfg, ancestors) + + +def split_ensemble_members(dataset: 'xr.DataArray', + groups: dict) -> 'xr.DataArray': + """Split combined ensemble members of the same model.""" + model_ensemble = [] + nr_members = [] + for model in dataset['model_ensemble'].values: + model_ensemble += groups[model] + nr_members.append(len(groups[model])) + + data_scaled = dataset.values / nr_members + data_expanded = np.repeat(data_scaled, nr_members) + + return xr.DataArray(data_expanded, + coords={'model_ensemble': model_ensemble}, + dims='model_ensemble', + name=dataset.name, + attrs=dataset.attrs) + + +def visualize_and_save_weights(weights: 'xr.DataArray', cfg: dict, + ancestors: list): + """Visualize weights.""" + label = 'Weights' + + filename_plot = get_plot_filename('weights', cfg) + + barplot(weights, label, filename_plot) + + filename_data = get_diagnostic_filename('weights', cfg, extension='nc') + weights.to_netcdf(filename_data) + + caption = 'Weights' + log_provenance(caption, filename_plot, cfg, ancestors) + log_provenance(caption, filename_data, cfg, ancestors) + + +def parse_contributions_sigma(metric: str, cfg: dict) -> dict: + """Return contributions > 0 and sigma for a given metric.""" + if cfg.get(f'{metric}_contributions') is None: # not set or set to None + contributions = {} + else: + contributions = { + key: value + for key, value in cfg[f'{metric}_contributions'].items() + if value > 0 + } + sigma = cfg.get(f'{metric}_sigma') + return contributions, sigma + + +def main(cfg): + """Perform climwip weighting method.""" + models, observations = read_metadata(cfg) + + independence_contributions, independence_sigma = parse_contributions_sigma( + 'independence', cfg) + performance_contributions, performance_sigma = parse_contributions_sigma( + 'performance', cfg) + + if not (independence_contributions or performance_contributions): + errmsg = ' '.join([ + 'Either the independence_contributions or the', + 'performance_contributions field need to be set and contain at', + 'least one variable group with weight > 0 otherwise no weights', + 'can be calculated!' + ]) + raise IOError(errmsg) + + model_ancestors = [] + obs_ancestors = [] + + performances = {} + independences = {} + + for variable_group in independence_contributions: + + logger.info('Reading model data for %s', variable_group) + if variable_group.endswith("_ANOM"): + model_data, model_data_files = read_model_data_ancestor( + cfg, variable_group) + else: + datasets_model = models[variable_group] + model_data, model_data_files = read_model_data(datasets_model) + + logger.info('Calculating independence for %s', variable_group) + independence = calculate_model_distances(model_data) + visualize_and_save_independence(independence, cfg, model_data_files) + logger.debug(independence.values) + independences[variable_group] = independence + + model_ancestors.extend(model_data_files) + + for variable_group in performance_contributions: + + logger.info('Reading model data for %s', variable_group) + if variable_group.endswith("_ANOM"): + model_data, model_data_files = read_model_data_ancestor( + cfg, variable_group) + else: + datasets_model = models[variable_group] + model_data, model_data_files = read_model_data(datasets_model) + + logger.info('Reading observation data for %s', variable_group) + datasets_obs = observations[variable_group] + if variable_group.endswith("_ANOM"): + obs_data, obs_data_files = read_observation_data_ancestor( + cfg, variable_group) + else: + obs_data, obs_data_files = read_observation_data(datasets_obs) + obs_data = aggregate_obs_data(obs_data, operator='median') + + logger.info('Calculating performance for %s', variable_group) + performance = calculate_performance(model_data, obs_data) + visualize_and_save_performance(performance, cfg, + model_data_files + obs_data_files) + logger.debug(performance.values) + performances[variable_group] = performance + obs_ancestors.extend(obs_data_files) + + model_ancestors.extend(model_data_files) + + model_ancestors = list(set(model_ancestors)) # only keep unique items + + if independence_contributions: + logger.info('Computing overall mean independence') + independence = xr.Dataset(independences) + overall_independence = compute_overall_mean( + independence, independence_contributions) + visualize_and_save_independence(overall_independence, cfg, + model_ancestors) + if independence_sigma is None: + raise NotImplementedError('`independence_sigma` must be set if ' + '`independence_contributions` is set') + else: + overall_independence = None + + if performance_contributions: + logger.info('Computing overall mean performance') + performance = xr.Dataset(performances) + overall_performance = compute_overall_mean(performance, + performance_contributions) + visualize_and_save_performance(overall_performance, cfg, + model_ancestors + obs_ancestors) + if performance_sigma is None: + performance_sigma = calibrate_performance_sigma( + performance_contributions, overall_independence, + independence_sigma, cfg) + else: + overall_performance = None + + if cfg['combine_ensemble_members']: + overall_independence, groups_independence = combine_ensemble_members( + overall_independence, + ['model_ensemble', 'model_ensemble_reference']) + overall_performance, groups_performance = combine_ensemble_members( + overall_performance) + # one of them could be empty if metric is not calculated + groups = {**groups_independence, **groups_performance} + + logger.info('Calculating weights') + weights = calculate_weights(overall_performance, overall_independence, + performance_sigma, independence_sigma) + + if cfg['combine_ensemble_members']: + weights = split_ensemble_members(weights, groups) + + visualize_and_save_weights(weights, + cfg, + ancestors=model_ancestors + obs_ancestors) + logger.debug(weights.values) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/weighting/plot_utilities.py b/esmvaltool/diag_scripts/weighting/plot_utilities.py new file mode 100644 index 0000000000..a071a3c7f0 --- /dev/null +++ b/esmvaltool/diag_scripts/weighting/plot_utilities.py @@ -0,0 +1,60 @@ +"""A collection of utility functions for dealing with weights.""" +from collections import defaultdict + +import xarray as xr + +from esmvaltool.diag_scripts.weighting.climwip.core_functions import ( + weighted_quantile, ) + + +def read_weights(filename: str) -> dict: + """Read a `.nc` file into a weights DataArray.""" + weights_ds = xr.open_dataset(filename) + return weights_ds['weight'] + + +def read_metadata(cfg: dict, groupby: str = 'variable_group') -> dict: + """Read the metadata from the config file.""" + datasets = defaultdict(list) + + metadata = cfg['input_data'].values() + + for item in metadata: + variable = item[groupby] + + datasets[variable].append(item) + + return datasets + + +def calculate_percentiles(data: 'xr.DataArray', + percentiles: list, + weights: dict = None) -> 'xr.DataArray': + """Calculate (weighted) percentiles. + + Calculate the (weighted) percentiles for the given data. + + Percentiles is a list of values between 0 and 100. + + The `model_ensemble` dimension in weights has to contain at + least the same elements as in data. + If `weights` is not specified, the non-weighted percentiles are calculated. + + Returns a DataArray with 'percentiles' as the dimension. + """ + if weights is not None: + weights = weights.sel(model_ensemble=data.model_ensemble) + + output = xr.apply_ufunc(weighted_quantile, + data, + input_core_dims=[['model_ensemble']], + output_core_dims=[['percentiles']], + kwargs={ + 'weights': weights, + 'quantiles': percentiles / 100 + }, + vectorize=True) + + output['percentiles'] = percentiles + + return output diff --git a/esmvaltool/diag_scripts/weighting/weighted_temperature_graph.py b/esmvaltool/diag_scripts/weighting/weighted_temperature_graph.py new file mode 100644 index 0000000000..f5f1086e1e --- /dev/null +++ b/esmvaltool/diag_scripts/weighting/weighted_temperature_graph.py @@ -0,0 +1,181 @@ +"""Implementation of the climwip weighting scheme. + +Lukas Brunner et al. section 2.4 +https://iopscience.iop.org/article/10.1088/1748-9326/ab492f +""" +import logging +import os +from pathlib import Path + +import matplotlib.pyplot as plt +import numpy as np +import xarray as xr + +from esmvaltool.diag_scripts.shared import ( + get_diagnostic_filename, + get_plot_filename, + run_diagnostic, +) +from esmvaltool.diag_scripts.weighting.climwip.io_functions import ( + log_provenance, + read_model_data, +) +from esmvaltool.diag_scripts.weighting.plot_utilities import ( + calculate_percentiles, + read_metadata, + read_weights, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def visualize_and_save_temperatures(temperature: 'xr.DataArray', + central_estimate: 'xr.DataArray', + central_estimate_weighted: 'xr.DataArray', + uncertainty_range: 'xr.DataArray', + uncertainty_range_weighted: 'xr.DataArray', + cfg: dict, ancestors: list): + """Visualize weighted temperature.""" + figure, axes = plt.subplots(dpi=300) + + def plot_shaded(xrange, upper, lower, color, **kwargs): + axes.fill_between( + xrange.data, + upper.data, + lower.data, + facecolor=color, + edgecolor='none', + alpha=0.3, + zorder=100, + **kwargs, + ) + + def plot_line(xrange, central, **kwargs): + axes.plot( + xrange, + central, + zorder=1000, + **kwargs, + ) + + color_non_weighted = 'red' + color_weighted = 'green' + color_data = 'gray' + central_string = cfg['settings'].get('central_estimate', 50) + range_string = '{}-{}perc'.format(cfg['settings'].get('lower_bound', 25), + cfg['settings'].get('upper_bound', 75)) + if not isinstance(central_string, str): + central_string = f'{central_string}perc' + + plot_line(central_estimate.time, + central_estimate, + color=color_non_weighted, + label='Non-weighted {}'.format(central_string)) + plot_shaded(uncertainty_range.time, + uncertainty_range[:, 0], + uncertainty_range[:, 1], + color=color_non_weighted, + label=f'Non-weighted {range_string} range') + + plot_line(central_estimate_weighted.time, + central_estimate_weighted, + color=color_weighted, + label='Weighted {}'.format(central_string)) + plot_shaded( + uncertainty_range_weighted.time, + uncertainty_range_weighted[:, 0], + uncertainty_range_weighted[:, 1], + color=color_weighted, + label='Weighted {} range'.format(range_string), + ) + + for temp in temperature.data: + axes.plot(temperature.time, + temp, + color=color_data, + lw=0.5, + alpha=0.5, + zorder=1, + label='Ensemble members') + + # Fix duplicate labels + handles, labels = plt.gca().get_legend_handles_labels() + by_label = dict(zip(labels, handles)) # dict removes dupes + axes.legend(by_label.values(), by_label.keys()) + + start_year = cfg['settings']['start_year'] + end_year = cfg['settings']['end_year'] + caption = f'Temperature anomaly relative to {start_year}-{end_year}' + plt.title(caption) + plt.xlabel('Year') + plt.ylabel(r'Temperature anomaly $\degree$C') + + filename_plot = get_plot_filename('temperature_anomaly_graph', cfg) + figure.savefig(filename_plot, dpi=300, bbox_inches='tight') + plt.close(figure) + + filename_data = get_diagnostic_filename('temperature_anomalies', + cfg, + extension='nc') + temperature.to_netcdf(filename_data) + + log_provenance(caption, filename_plot, cfg, ancestors) + log_provenance(caption, filename_data, cfg, ancestors) + + +def main(cfg): + """Plot weighted temperature graph.""" + input_files = cfg['input_files'] + filename = cfg['weights'] + weights_path = Path(input_files[0]) / filename + weights = read_weights(weights_path) + + models = read_metadata(cfg, 'short_name')['tas'] + model_data, model_data_files = read_model_data(models) + + settings = cfg['settings'] + central_estimate_var = settings.get('central_estimate', 50) + if isinstance(central_estimate_var, (int, float)): + central_estimate = calculate_percentiles( + model_data, + np.array([central_estimate_var]), + ) + central_estimate_weighted = calculate_percentiles( + model_data, + np.array([central_estimate_var]), + weights=weights, + ) + elif central_estimate_var == 'mean': + central_estimate = model_data.mean('model_ensemble') + central_estimate_weighted = model_data.weighted(weights).mean( + 'model_ensemble') + + percentiles = np.array( + [settings.get('lower_bound', 25), + settings.get('upper_bound', 75)]) + + uncertainty_range = calculate_percentiles( + model_data, + percentiles, + ) + + uncertainty_range_weighted = calculate_percentiles( + model_data, + percentiles, + weights=weights, + ) + + visualize_and_save_temperatures( + model_data, + central_estimate, + central_estimate_weighted, + uncertainty_range, + uncertainty_range_weighted, + cfg, + model_data_files, + ) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/weighting/weighted_temperature_map.py b/esmvaltool/diag_scripts/weighting/weighted_temperature_map.py new file mode 100644 index 0000000000..667a382d94 --- /dev/null +++ b/esmvaltool/diag_scripts/weighting/weighted_temperature_map.py @@ -0,0 +1,214 @@ +"""Implementation of a mapplot for the climwip weighting scheme. + +Lukas Brunner et al. section 2.4 +https://iopscience.iop.org/article/10.1088/1748-9326/ab492f +""" +import logging +import os +from pathlib import Path + +import cartopy.crs as ccrs +import matplotlib.pyplot as plt +import numpy as np +from cartopy.mpl.ticker import LatitudeFormatter, LongitudeFormatter + +from esmvaltool.diag_scripts.shared import ( + get_diagnostic_filename, + get_plot_filename, + run_diagnostic, +) +from esmvaltool.diag_scripts.weighting.climwip.io_functions import ( + log_provenance, + read_model_data, +) +from esmvaltool.diag_scripts.weighting.plot_utilities import ( + calculate_percentiles, + read_metadata, + read_weights, +) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def set_antimeridian(dataarray, to: str): + """Flip the antimeridian (i.e. longitude discontinuity) between Europe + (i.e., [0, 360)) and the Pacific (i.e., [-180, 180)). + + Parameters + ---------- + - dataarray : xarray.DataArray + - to : string, {'pacific', 'europe'} + * 'europe': Longitude will be in [0, 360) + * 'pacific': Longitude will be in [-180, 180) + + Returns + ------- + dataarray : xarray.DataArray + """ + lon = dataarray['lon'] + + if to.lower() == 'europe': + dataarray = dataarray.assign_coords(lon=lon % 360) + elif to.lower() == 'pacific': + dataarray = dataarray.assign_coords(lon=((lon + 180) % 360) - 180) + else: + errmsg = "to has to be one of ['europe', 'pacific'] not {}".format(to) + raise ValueError(errmsg) + + idx = np.argmin(dataarray['lon'].values) + dataarray = dataarray.roll(lon=-idx, roll_coords=True) + dataarray['lon'].attrs = lon.attrs + return dataarray + + +def mapplot(dataarray, cfg, title_pattern, filename_part, ancestors, + **colormesh_args): + """Visualize weighted temperature.""" + metadata = read_metadata(cfg) + metadata_future = metadata['tas_CLIM_future'] + start_year = metadata_future[0]['start_year'] + end_year = metadata_future[0]['end_year'] + + period = f'{start_year}-{end_year}' + if 'tas_CLIM_reference' in metadata: + metadata_reference = metadata['tas_CLIM_reference'] + start_year_ref = metadata_reference[0]['start_year'] + end_year_ref = metadata_reference[0]['end_year'] + period = f'change: {period} minus {start_year_ref}-{end_year_ref}' + metric = cfg['model_aggregation'] + if isinstance(metric, int): + metric = f'{metric}perc' + proj = ccrs.PlateCarree(central_longitude=0) + figure, axes = plt.subplots(subplot_kw={'projection': proj}) + + dataarray = set_antimeridian(dataarray, cfg.get('antimeridian', 'pacific')) + dataarray = dataarray.dropna('lon', how='all').dropna('lat', how='all') + + dataarray.plot.pcolormesh( + ax=axes, + transform=ccrs.PlateCarree(), + levels=9, + robust=True, + extend='both', + **colormesh_args + # colorbar size often does not fit nicely + # https://stackoverflow.com/questions/18195758/set-matplotlib-colorbar-size-to-match-graph + # cbar_kwargs={'fraction': .021} + ) + + lons = dataarray.lon.values + lats = dataarray.lat.values + longitude_formatter = LongitudeFormatter() + latitude_formatter = LatitudeFormatter() + default_xticks = np.arange(np.floor(lons.min()), np.ceil(lons.max()), 10) + default_yticks = np.arange(np.floor(lats.min()), np.ceil(lats.max()), 10) + + axes.coastlines() + axes.set_xticks(cfg.get('xticks', default_xticks), crs=proj) + axes.set_yticks(cfg.get('yticks', default_yticks), crs=proj) + axes.xaxis.set_ticks_position('both') + axes.yaxis.set_ticks_position('both') + axes.xaxis.set_major_formatter(longitude_formatter) + axes.yaxis.set_major_formatter(latitude_formatter) + axes.set_xlabel('') + axes.set_ylabel('') + + title = title_pattern.format(metric=metric, period=period) + axes.set_title(title) + + filename_plot = get_plot_filename(filename_part, cfg) + figure.savefig(filename_plot, dpi=300, bbox_inches='tight') + plt.close(figure) + + filename_data = get_diagnostic_filename(filename_part, cfg, extension='nc') + dataarray.to_netcdf(filename_data) + + log_provenance(title, filename_plot, cfg, ancestors) + log_provenance(title, filename_data, cfg, ancestors) + + +def visualize_and_save_temperature(temperature, cfg: dict, ancestors: list): + """Wrap mapplot: absolute temperature.""" + title_pattern = ('Weighted {metric} temperature\n' + r'{period} ($\degree$C)') + filename_part = 'temperature_change_weighted_map' + mapplot(temperature, + cfg, + title_pattern, + filename_part, + ancestors, + cmap='Reds') + + +def visualize_and_save_difference(temperature_difference, cfg: dict, + ancestors: list): + """Wrap mapplot: temperature difference between weighted and unweighted.""" + title_pattern = '\n'.join([ + 'Weighted minus unweighted {metric} temperature', + r'{period} ($\degree$C)', + ]) + filename_part = 'temperature_change_difference_map' + mapplot(temperature_difference, + cfg, + title_pattern, + filename_part, + ancestors, + center=0) + + +def model_aggregation(dataset, metric, weights=None): + """Call mean or percentile calculation.""" + if isinstance(metric, int): + return calculate_percentiles(dataset, [metric], + weights).squeeze('percentile', drop=True) + if metric.lower() == 'mean': + if weights is not None: + dataset = dataset.weighted(weights) + return dataset.mean('model_ensemble') + + if metric.lower() == 'median': + return calculate_percentiles(dataset, [50], + weights).squeeze('percentile', drop=True) + + errmsg = f'model_aggregation {metric} is not implemented!' + raise NotImplementedError(errmsg) + + +def main(cfg): + """Plot weighted temperature graph.""" + input_files = cfg['input_files'] + filename = cfg['weights'] + weights_path = Path(input_files[0]) / filename + weights = read_weights(weights_path) + + metadata = read_metadata(cfg) + models = metadata['tas_CLIM_future'] + model_data, model_data_files = read_model_data(models) + + # if a historical period is given calculate the change + if 'tas_CLIM_reference' in metadata: + models_hist = metadata['tas_CLIM_reference'] + model_data_hist, model_data_files_hist = read_model_data(models_hist) + model_data_files += model_data_files_hist + model_data = model_data - model_data_hist + + metric = cfg.get('model_aggregation', 'mean') + unweighted_mean = model_aggregation(model_data, metric) + weighted_mean = model_aggregation(model_data, metric, weights) + + visualize_and_save_temperature( + weighted_mean, + cfg, + model_data_files, + ) + + visualize_and_save_difference( + weighted_mean - unweighted_mean, + cfg, + model_data_files, + ) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/xco2_analysis/carbon_plots.ncl b/esmvaltool/diag_scripts/xco2_analysis/carbon_plots.ncl new file mode 100644 index 0000000000..e4d64c6a51 --- /dev/null +++ b/esmvaltool/diag_scripts/xco2_analysis/carbon_plots.ncl @@ -0,0 +1,297 @@ +; ############################################################################# +; PLOT SCRIPTS FOR SOME PLOTS OF XCO2 analysis recipe (gier20bg) +; ############################################################################# +; Please consider using or extending existing routines before adding new ones. +; Check the header of each routine for documentation. +; +; Contents: +; +; function panel_plots +; +; ############################################################################# + +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" + +; ############################################################################# + +undef("panel_plots") +function panel_plots(wks_in[1], + source1, + source2, + varname1[1]: string, + varname2[1]: string, + obs_pres, + items: list) +; +; Arguments; +; wks_in: workstations (graphic object or default will be used). +; source1: data to be plotted on x-axis or a NetCDF filename +; source2: data to be plotted on y-axis or a NetCDF filename +; varname: variable name, needed for netCDF files with multiple variables +; obs_pres: True/False: is obs present (and first index) +; items: list of input_file_info items +; +; Return value +; A graphic variable. +; +; Description +; Creates panels of x-y plots vertically arranging different models +; +; Caveats +; +; Modification history +; 20200227_gier_bettina: ported to v2 +; 20180328_gier_bettina: written +; +local funcname, scriptname, verbosity, data +begin + + funcname = "panel_plots" + scriptname = "diag_scripts/xco2_analysis/carbon_plots.ncl" + enter_msg(scriptname, funcname) + + start_year = source1&time(0) + end_year = source1&time(dimsizes(source1&time)-1) + nyear = end_year - start_year + 1 + + if isatt(source1, "trange") then + trange = data@trange + else if iscoord(source1, "time") then + trange = tostring(start_year) + " - " + tostring(end_year) + else + trange = "" + end if + end if + if isatt(source1, "region") then + region = ", " + source1@region + ", " + else + region = ", " + end if + + if isatt(source1, "long_name") then + varname1_f = source1@long_name + else + varname1_f = varname1 + end if + + if isatt(source2, "long_name") then + varname2_f = source2@long_name + else + varname2_f = varname2 + end if + + var_yaxis = False + if isatt(source1, "var_yaxis") then + var_yaxis = source1@var_yaxis + elseif ((isatt(source2, "var_yaxis")) .and. (.not. var_yaxis)) then + var_yaxis = source2@var_yaxis + end if + + yaxisstr = varname2_f + " [" + source2@units + "]" + + ; Check if a valid wks has been provided, otherwise invoke default + wks = get_wks(wks_in, DIAG_SCRIPT, varname1) + + ; Plot histogram + res = True + res@tiYAxisString = "" + res@tiYAxisFontHeightF = 0.04 + res@tmYLLabelFontHeightF = 0.03 + res@tiXAxisString = varname1_f + " [" + source1@units + "]" + res@tiXAxisFontHeightF = 0.04 + res@tmXBLabelFontHeightF = 0.03 + res@pmTickMarkDisplayMode = "Conditional" + res@tiMainFontHeightF = 0.02 + res@tiMainString = "" + + colors = project_style(items, diag_script_info, "colors") + if obs_pres then + colors := array_append_record("black", colors(1:), 0) + else + colors := colors(1:) + end if + ; Add multi-model mean as it's not in diag_script_info + colors := array_append_record(colors, "red", 0) + + ; calc min and max dT, + min_X = min(source1) + max_X = max(source1) + res@gsnMaximize = True + res@gsnDraw = False + res@gsnFrame = False + res@trXMinF = min_X - 0.05*(max_X - min_X) + res@trXMaxF = max_X + 0.05*(max_X - min_X) + res@trYMinF = min(source2) - 0.5 * (max(source2) - min(source2)) + res@trYMaxF = max(source2) + 0.5 * (max(source2) - min(source2)) + res@vpHeightF = 0.2 + res@vpWidthF = 0.8 + res@tmLabelAutoStride = True + + resM = True + resM@gsMarkerSizeF = 0.01 + resM@gsMarkerThicknessF = 4. + + regression = new((/dimsizes(source2&model), 2/), float) + cor = new((/dimsizes(source2&model), 2/), float) + + lres = True + lres@gsLineThicknessF = 3. + txres = True + txres@txFontHeightF = 0.02 + + modnames = source1&model + if obs_pres then + modnames(0) = "OBS" + end if + + plots = new(dimsizes(source1&model), graphic) + + ; Override defaults with "res_" attributes of "data" + res_new = att2var(source1, "res_") + copy_VarMeta(res_new, res) + + ; Determine scale for variable y-axis + if var_yaxis then + my_y_scale = max(abs(dim_max_n(source2, 1) - dim_min_n(source2, 1))) + end if + + ; Make each separate plot + do imod = 0, dimsizes(source1&model)-1 + + if imod.eq.toint(dimsizes(source1&model)/2) then + res@tiYAxisString = yaxisstr + else + res@tiYAxisString = " " + end if + + ; Variable y-Axis if wanted + if var_yaxis then + res@trYMinF = min(source2(imod, :)) - 0.3 * my_y_scale + res@trYMaxF = min(source2(imod, :)) + 1.5 * my_y_scale + end if + + ; Add Regression lines + + x_fit = fspan(res@trXMinF - 1, res@trXMaxF + 1, 100) + + rc = regline_stats(source1(imod, :), source2(imod, :)) + regression(imod, 0) = (/rc/) + regression(imod, 1) = (/rc@stderr(1)/) + cor(imod, 0) = rc@r ; pearson-correlation coefficient + cor(imod, 1) = rc@pval(1) ; p-value + + y_fit = x_fit*rc + rc@yintercept + + lres@gsLineColor = colors(imod) + res@xyLineColor = colors(imod) + txres@txFontColor = colors(imod) + + plots(imod) = gsn_csm_xy(wks, x_fit, y_fit, res) + + if isatt(varname2, "mean") .and. varname2@mean then + v2_mean = rc@yave + v2_mean = decimalPlaces(v2_mean, 2, True) + txadd = ", mean = " + sprintf("%1.2f", v2_mean) + delete(v2_mean) + else + txadd = "" + end if + + txres@txJust = "centerLeft" + plots@$unique_string("mod")$ = \ + gsn_add_text(wks, plots(imod), modnames(imod) + ": " \ + + sprintf("%1.3f", regression(imod, 0)) \ + + " ~F18~S~F~ " \ + + sprintf("%1.3f", regression(imod, 1)) + txadd, \ + res@trXMinF + 0.02*(res@trXMaxF - res@trXMinF), \ + res@trYMaxF - 0.13*(res@trYMaxF - res@trYMinF), txres) + txres@txJust = "centerRight" + plots@$unique_string("cor")$ = \ + gsn_add_text(wks, plots(imod), "r: " \ + + sprintf("%1.2f", cor(imod, 0)) + ", p-value: " \ + + sprintf("%1.2f", cor(imod, 1)), \ + res@trXMaxF - 0.02*(res@trXMaxF - res@trXMinF),\ + res@trYMinF + 0.13*(res@trYMaxF - res@trYMinF), txres) + + do iyr = 0, nyear-1 + if nyear.lt.15 then + resM@gsMarkerIndex = iyr + 2 + stride = 1 + else + if nyear.lt.50 then + stride = 5 + elseif nyear.lt.100 then + stride = 10 + else + stride = 20 + end if + resM@gsMarkerIndex = 2 + toint(iyr/stride) + end if + resM@gsMarkerColor = colors(imod) + plots@$unique_string("markers")$ = \ + gsn_add_polymarker(wks, plots(imod), source1(imod, iyr), \ + source2(imod, iyr), resM) + end do + + delete(x_fit) + delete(y_fit) + + end do + + ; Add Marker Legend + lgres = True + lgres@lgMonoMarkerIndex = False + lgres@vpWidthF = 0.3 + lgres@vpHeightF = 0.5 + lgres@lgPerimOn = False + lgres@lgMarkerSizeF = 0.01 + lgres@lgMonoMarkerColor = True + lgres@lgMarkerColor = "black" + lgres@lgAutoManage = False + lgres@lgLabelFontHeightF = 0.06 + lgres@lgItemType = "Markers" + + if stride.eq.1 then + nlabels = nyear + labels = tostring(ispan(start_year, end_year, 1)) + lgres@lgMarkerIndexes = ispan(2, nyear+1, 1) + lgres@lgItemOrder = ispan(nyear-1, 0, 1) + else + nlabels = toint((nyear)/stride) + 1 + labels = new(nlabels, string) + labels(nlabels-1) = tostring(start_year + (nlabels-1)*stride) \ + + " - " + tostring(end_year) + do lb = 0, nlabels - 2 + labels(lb) = tostring(start_year + lb*stride) + " - " + \ + tostring(start_year + (lb+1)*stride) + end do + lgres@lgMarkerIndexes = ispan(2, 2+nlabels, 1) + lgres@lgItemOrder = ispan(nlabels-1, 0, 1) + end if + + legend = gsn_create_legend(wks, nlabels, labels, lgres) + + anres = True + anres@amJust = "TopLeft" + ; Parallel -> x-direction + anres@amParallelPosF = 0.45 + anres@amOrthogonalPosF = -0.5 + annoid1 = gsn_add_annotation(plots(0), legend, anres) + + res1 = True + res2 = True + res1@gsnMaximize = True + res2@gsnAttachPlotsXAxis = True + + newplot = gsn_attach_plots(plots(0), plots(1:), res1, res2) + + draw(plots(0)) + + frame(wks) + + leave_msg(scriptname, funcname) + + return(plots(0)) +end diff --git a/esmvaltool/diag_scripts/xco2_analysis/delta_T.ncl b/esmvaltool/diag_scripts/xco2_analysis/delta_T.ncl new file mode 100644 index 0000000000..f83f56cb31 --- /dev/null +++ b/esmvaltool/diag_scripts/xco2_analysis/delta_T.ncl @@ -0,0 +1,350 @@ +; ############################################################################# +; xco2_analysis/delta_T.ncl +; ############################################################################# +; Description +; Computes IAV of growth rate and plots against growing season temperature. +; +; Required diag_script_info attributes: +; region: region to average over +; masking: the kind of masking to apply prior to region average +; var_order: First main variable, then temperature variable to compare +; +; Optional diag_script_info attributes: +; styleset: styleset for color cording panels +; output_file_type: output file type for plots +; var_plotname: String formatting how variable should be named in plots +; defaults to short_name if not assigned +; +; Caveats +; +; Modification history +; 20201116-gier_bettina: Added provenance, clean up +; 20200227-gier_bettina: Adapted to version 2 +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/scatterplot.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + +load "$diag_scripts/xco2_analysis/stat.ncl" +load "$diag_scripts/xco2_analysis/carbon_plots.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + AUTHORS = (/"gier_bettina"/) + REFERENCES = (/"gier20bg"/) + + ; Variable + var0 = diag_script_info@var_order(0) + var1 = diag_script_info@var_order(1) + var2 = diag_script_info@var_order(2) + + ; Input data + INFO0 = select_metadata_by_name(input_file_info, var0) + INFO1 = select_metadata_by_name(input_file_info, var1) + INFO2 = select_metadata_by_name(input_file_info, var2) + DATASETS = metadata_att_as_array(INFO0, "dataset") + ; Rename MultiModelMean to multi-model mean for publication + DATASETS = where(DATASETS.eq."MultiModelMean", "multi-model mean", DATASETS) + DATASETS2 = metadata_att_as_array(INFO1, "dataset") + DATASETS2 = where(DATASETS2.eq."MultiModelMean", "multi-model mean", \ + DATASETS2) + ALL_FILES = metadata_att_as_array(INFO0, "filename") + ALL_FILES := array_append_record(ALL_FILES, \ + metadata_att_as_array(INFO1, "filename"), 0) + experiments = metadata_att_as_array(INFO0, "exp") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + " and " + var1 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + +end + +begin + ; Maximum amount of missing values per year + min_nmonth = 7 + + ; Prepare region + lat_min = diag_script_info@region(0) + lat_max = diag_script_info@region(1) + + if lat_min.eq.(-90) .and. lat_max.eq.90 then + region = "global" + else if lat_min.eq.(30) .and. lat_max.eq.60 then + region = "nhmidlat" + else if lat_min.eq.(-60) .and. lat_max.eq.(-30) then + region = "shmidlat" + else if lat_min.eq.(-30) .and. lat_max.eq.(30) then + region = "trop" + else if lat_min.eq.(0) .and. lat_max.eq.(90) then + region = "nh" + else if lat_min.eq.(-90) .and. lat_max.eq.0 then + region = "sh" + else + region = "lat_" + tostring(lat_min) + "_" + tostring(lat_max) + end if + end if + end if + end if + end if + end if + DOMAIN = (/region/) + + ; Plot file type + if (isatt(diag_script_info, "output_file_type")) then + file_type = diag_script_info@output_file_type + elseif (isatt(config_user_info, "output_file_type")) then + file_type = config_user_info@output_file_type + else + file_type = "png" + end if + + ; Output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + ; Determine start + end year + start_years = metadata_att_as_array(INFO0, "start_year") + start_year = min(start_years) + end_years = metadata_att_as_array(INFO0, "end_year") + end_year = max(end_years) + nyear = end_year - start_year + 1 + ntime = nyear*12 + time = new(ntime, float) + do yy = start_year, end_year + do mm = 0, 11 + time(12 * (yy - start_year) + mm) = yy + (mm + 0.5)/12. + end do + end do + + if (isatt(INFO0[0], "reference_dataset")) then + ref_ind = ind(DATASETS.eq.INFO0[0]@reference_dataset) + else + ref_ind = -999 + end if + + mod_inds = ind(DATASETS.ne.INFO0[0]@reference_dataset) + + ; Need array that maps obs to index 0, the other models to following indices + mapping_array1 = new(dimsizes(DATASETS), integer) + if ref_ind.ne.-999 then + mapping_array1(ref_ind) = 0 + end if + mapping_array1(mod_inds) = ispan(1, dimsizes(DATASETS)-1, 1) + + ; Need array that maps obs to index 0, the other models to following indizes + mapping_array2 = new(dimsizes(DATASETS2) + 1, integer) + mapping_array2(dimsizes(DATASETS2)) = 0 + mapping_array2(:dimsizes(DATASETS2)-1) = ispan(1, dimsizes(DATASETS2), 1) + + ; set up needed arrays + growth_series = new((/dimsizes(DATASETS), nyear/), float) + growth_series!0 = "model" + growth_series!1 = "time" + growth_series&model = DATASETS(mapping_array1) + growth_series&time = ispan(start_year, end_year, 1) + + delta_t = new((/dimsizes(DATASETS2)+1, nyear/), float) + delta_t!0 = "model" + delta_t!1 = "time" + delta_t&model = array_append_record("OBS", DATASETS2, 0) + delta_t&time = growth_series&time + + ; Temp Anomalies with reference to 2000 - 2010 + if start_year.lt.2003 then + int_yr = 2003 + else + int_yr = start_year + end if + if end_year.gt.2010 then + fin_yr = 2010 + else + fin_yr = end_year + end if + if (fin_yr - int_yr).lt.3 then + fin_yr = end_year + int_yr = start_year + end if + +end + +begin + ; Read Sat data + obs_data = read_data(INFO0[ref_ind]) + obs_avg = area_operations(obs_data, lat_min, lat_max, 0, 360, \ + "average", True) + growth_series(0, :) = (/calc_gr(obs_avg, "yearly", min_nmonth)/) + var0_units = obs_data@units + + ; Prepare tas data + MODIS_path = diag_script_info@auxiliary_data_dir \ + + "/Land_Cover_Class_1degree.nc4" + MODIS_dum = addfile(MODIS_path, "r") + MOD_map = byte2flt(MODIS_dum->land_cover_class) + MOD_map = where(MOD_map.eq.0 .or. MOD_map.eq.15 .or. MOD_map.eq.16,\ + MOD_map@_FillValue, 1.) + delete(MODIS_dum) + ; This map is lon -180, 180, we want 0, 360 + MOD_map = lonFlip(MOD_map) + + ; --------------------------------------------- + ; --------------------- First prepare XCO2 data + do imod = 0, dimsizes(DATASETS) - 1 + if imod.eq.ref_ind then + continue + end if + + tmp = read_data(INFO0[imod]) + var0_mod = tofloat(tmp) + copy_VarMeta(tmp, var0_mod) + delete(tmp) + + var0_reg = area_hi2lores_Wrap(var0_mod&lon, var0_mod&lat, var0_mod, True, \ + 1, obs_data&lon, obs_data&lat, False) + var0_reg = where(ismissing(obs_data), obs_data, var0_reg) + var0_avg = area_operations(var0_reg, lat_min, lat_max, 0, 360, \ + "average", True) + growth_series(mapping_array1(imod), :) = (/calc_gr(var0_avg, "yearly", \ + min_nmonth)/) + delete([/var0_mod, var0_avg, var0_reg/]) + end do + + ; ---------------------- Secondly tas data + do imod = 0, dimsizes(DATASETS2) + if imod.eq.(dimsizes(DATASETS2)) then + tmp = read_data(INFO2[0]) + var1_units = tmp@units + else + tmp = read_data(INFO1[imod]) + end if + + var1_mod = tofloat(tmp) + copy_VarMeta(tmp, var1_mod) + delete(tmp) + + time_coord = cd_calendar(var1_mod&time, 0) + temp_anom = var1_mod + + do imnth = 1, 12 + ref_inds = ind((time_coord(:, 1).eq.imnth) .and. \ + (time_coord(:, 0).lt.fin_yr) .and. \ + (time_coord(:, 0).ge.int_yr)) + mean = dim_avg_n_Wrap(var1_mod(ref_inds, :, :), 0) + ind_mnth = ind(time_coord(:, 1).eq.imnth) + mnth_anom = tofloat(var1_mod(ind_mnth, :, :)) + mnth_anom = mnth_anom - conform(mnth_anom, mean, (/1, 2/)) + temp_anom(ind_mnth, :, :) = (/mnth_anom/) + delete(ref_inds) + delete(ind_mnth) + delete(mnth_anom) + delete(mean) + end do + + delete(time_coord) + + ; Apply MODIS mask to get rid of ocean, snow/ice and + ; sparsely vegetated regions + temp_hreg = linint2_Wrap(MOD_map&longitude, MOD_map&latitude, MOD_map, \ + True, var1_mod&lon, var1_mod&lat, 0) + temp_var = temp_anom*conform(temp_anom, temp_hreg, (/1, 2/)) + copy_VarMeta(var1_mod, temp_var) + delete(temp_hreg) + + temp_area = area_operations(temp_var, lat_min, lat_max, 0, \ + 360, "average", False) + + ; First determine which part of the year to average over + if lat_min.ge.0 then + ; Schneising14 growing season: SH Dec-May, NH April-sept + mnth_string = "AMJJAS" + elseif lat_max.le.0 then + mnth_string = "DJFMAM" + else + mnth_string = "annualclim" + end if + + del_T = new(nyear, "float") + do ii = 0, nyear - 1 + del_T(ii) = time_operations(temp_area, start_year + ii, \ + start_year + ii, "average", \ + mnth_string, False) + end do + delta_t(mapping_array2(imod), :) = (/del_T/) + + delete(del_T) + delete(temp_var) + delete(temp_anom) + delete(var1_mod) + + end do + + ; -------------------------------------- PLOT ------------------------------- + obs_pres = True + delta_t@units = var1_units + delta_t@var = var1 + delta_t@diag_script = DIAG_SCRIPT + delta_t@long_name = "Growing Season Temperature" + growth_series@units = var0_units + " yr-1" + growth_series@var = "gr" + growth_series@diag_script = DIAG_SCRIPT + growth_series@long_name = "Growth Rate" + delta_t@region = region + + filename_dtgr_detr = var0 + "_" + var1 + "_" + experiments(0) \ + + "_" + region + "_" + (start_year) + "-" + (end_year) \ + + "_dt_GR_detr_panels" + outfile_dtgr_detr = plot_dir + "/" + filename_dtgr_detr + outfile_netcdf_dtgr_detr = work_dir + "/" + filename_dtgr_detr + ".nc" + + wks = gsn_open_wks(file_type, outfile_dtgr_detr) + delta_t_detr = new(dimsizes(delta_t), float) + copy_VarMeta(delta_t, delta_t_detr) + growth_series_detr = new(dimsizes(growth_series), float) + copy_VarMeta(growth_series, growth_series_detr) + do imod = 0, dimsizes(growth_series&model) - 1 + delta_t_detr(imod, :) = var_detrend(delta_t&time, delta_t(imod, :)) + growth_series_detr(imod, :) = var_detrend(growth_series&time, \ + growth_series(imod, :)) + end do + delta_t_detr@long_name = "IAV of growing season temperature" + growth_series_detr@long_name = "IAV of growth rate" + var_detr_GR = "Detrended GR" + plt_dtgr_panels = panel_plots(wks, delta_t_detr, growth_series_detr, \ + "Detrended ~F8~D~F~ T", var_detr_GR, \ + obs_pres, INFO0) + delete(wks) + delete(plt_dtgr_panels) + + outfile_netcdf_dtgr_detr@existing = "overwrite" + ncdf_outfile = ncdf_write(growth_series_detr, outfile_netcdf_dtgr_detr) + outfile_netcdf_dtgr_detr@existing = "append" + ncdf_outfile = ncdf_write(delta_t_detr, outfile_netcdf_dtgr_detr) + + log_provenance(ncdf_outfile, \ + outfile_dtgr_detr + "." + file_type, \ + "Sensitivity of interannual variability of " \ + + var0 + " growth rate in the " + region + ", " + \ + start_year + "-" + end_year + "to the interannual " \ + + "variability of growing season temperature. " \ + + "Similar to Gier et al. 2020, Fig C1.", \ + (/"mean", "stddev"/), \ + DOMAIN, \ + "scatter", \ + AUTHORS, \ + REFERENCES, \ + ALL_FILES) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/xco2_analysis/global_maps.ncl b/esmvaltool/diag_scripts/xco2_analysis/global_maps.ncl new file mode 100644 index 0000000000..d1748425d4 --- /dev/null +++ b/esmvaltool/diag_scripts/xco2_analysis/global_maps.ncl @@ -0,0 +1,602 @@ +; ############################################################################# +; xco2_analysis/global_maps.ncl +; ############################################################################# +; Description +; Plotting global maps for XCO2 data and Seasonal Cycle Amplitude +; +; Required diag_script_info attributes: +; contour_max_level: maximum seasonal cycle value displayed for contour plot +; +; Optional diag_script_info attributes: +; output_file_type: output file type for plots. Default: png +; +; Caveats +; +; Modification history +; 20201118-gier_bettina: Added provenance, clean up +; 20200226-gier_bettina: Adapted to version 2 +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/scatterplot.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + +load "$diag_scripts/xco2_analysis/stat.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + AUTHORS = (/"gier_bettina"/) + REFERENCES = (/"gier20bg"/) + DOMAIN = (/"global"/) + + ; Variable + var0 = variable_info[0]@short_name + + ; Input data + INFO0 = select_metadata_by_name(input_file_info, var0) + DATASETS = metadata_att_as_array(INFO0, "dataset") + DATASETS := array_append_record(DATASETS, "multi-model mean", 0) + experiments = metadata_att_as_array(INFO0, "exp") + dim_MOD = dimsizes(DATASETS) + ALL_FILES = metadata_att_as_array(INFO0, "filename") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + +end + +begin + ; Maximum amount of missing values per year + min_nmonth = 7 + + ; Plot file type + if (isatt(diag_script_info, "output_file_type")) then + file_type = diag_script_info@output_file_type + elseif (isatt(config_user_info, "output_file_type")) then + file_type = config_user_info@output_file_type + else + file_type = "png" + end if + + ; Output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + +end + +begin + ; Prepare arrays + + ; Make array to collect timeseries data + start_years = metadata_att_as_array(INFO0, "start_year") + start_year = min(start_years) + end_years = metadata_att_as_array(INFO0, "end_year") + end_year = max(end_years) + nyear = (end_year - start_year + 1) + time = new(nyear*12, float) + do yy = start_year, end_year + do mm = 0, 11 + time(12 * (yy - start_year) + mm) = yy + (mm + 0.5)/12. + end do + end do + +end + +begin + ; First read observations for observational masking + if (isatt(INFO0[0], "reference_dataset")) then + ref_ind = ind(DATASETS.eq.INFO0[0]@reference_dataset) + else + ref_ind = -999 + end if + + mod_inds = ind(DATASETS.ne.INFO0[0]@reference_dataset) + mod_names = DATASETS(mod_inds) + + ; Need array that maps obs to index 0, the other models to following indices + mapping_array = new(dim_MOD, integer) + if ref_ind.ne.-999 then + mapping_array(ref_ind) = 0 + end if + mapping_array(mod_inds) = ispan(1, dim_MOD - 1, 1) + + ; For MMM calculation + if ref_ind.eq."-999" then + subtract_mmm = 1 + else + subtract_mmm = 2 + end if + + if ref_ind.ne."-999" then + obs_data = read_data(INFO0[ref_ind]) + end if + + do imod = 0, dim_MOD - 1 + if imod.eq.ref_ind then + continue + end if + + if imod.eq.(dim_MOD-1) then + ; Compute MMM + mod_var = mmm_array / (dimsizes(DATASETS) - subtract_mmm) + copy_VarMeta(mmm_array, mod_var) + + else + tmp = read_data(INFO0[imod]) + mod_var = tofloat(tmp) + copy_VarMeta(tmp, mod_var) + delete(tmp) + var0_units = mod_var@units + + if .not. isdefined("mmm_array") then + mmm_array = area_hi2lores_Wrap(mod_var&lon, mod_var&lat, mod_var, \ + True, 1, obs_data&lon, obs_data&lat, \ + False) + else + var_reg = area_hi2lores(mod_var&lon, mod_var&lat, mod_var, \ + True, 1, mmm_array&lon, mmm_array&lat, False) + mmm_array = mmm_array + (/var_reg/) + end if + end if ; End MMM check and computation + + mod_nomv = area_hi2lores_Wrap(mod_var&lon, mod_var&lat, mod_var, True, \ + 1, obs_data&lon, obs_data&lat, False) + mod_mv = where(ismissing(obs_data), obs_data, mod_nomv) + + ; Calculate growth, seasonal amplitude on a grid level + ; Generate new arrays to hold values + nyr = end_year - start_year + 1 + mod_nomv_growth = new((/nyr, dimsizes(mod_nomv&lat), \ + dimsizes(mod_nomv&lon)/), float) + mod_nomv_growth!0 = "year" + mod_nomv_growth&year = ispan(start_year, \ + end_year, 1) + mod_nomv_growth!1 = "lat" + mod_nomv_growth&lat = mod_nomv&lat + mod_nomv_growth!2 = "lon" + mod_nomv_growth&lon = mod_nomv&lon + mod_nomv_growth@diag_script = DIAG_SCRIPT + mod_nomv_seasamp = mod_nomv_growth + mod_mv_growth = mod_nomv_growth + mod_mv_seasamp = mod_nomv_growth + + mod_nomv_growth@var = var0 + "_growth_rate_no_missing_values" + mod_nomv_seasamp@var = var0 + "_seasonal_amplitude_no_missing_values" + mod_mv_growth@var = var0 + "_growth_rate_with_missing_values" + mod_mv_seasamp@var = var0 + "_seasonal_amplitude_with_missing_values" + + ; Only calculate obs version once + if .not. isdefined("obs_growth") then + obs_growth = mod_nomv_growth + obs_seasamp = mod_nomv_growth + obs_growth@var = var0 + "_growth_rate_obs" + obs_seasamp@var = var0 + "_seasonal_amplitude_obs" + + ; Gridded GR and SCA + obs_monthly = new(dimsizes(obs_data), float) + copy_VarAtts(obs_data, obs_monthly) + + do itim = 6, dimsizes(obs_data&time)-7 + obs_monthly(itim, :, :) = obs_data(itim + 6, :, :) \ + - obs_data(itim - 6, :, :) + end do + + do itim = 0, nyear - 1 + ; GR + obs_cum_mv = new(dimsizes(obs_monthly(0, :, :)), float) + do ilat = 0, dimsizes(obs_monthly(0, :, 0)) - 1 + do ilon = 0, dimsizes(obs_monthly(0, 0, :)) - 1 + obs_cum_mv(ilat, ilon) = num(ismissing(\ + obs_monthly(itim*12:itim*12+11, ilat, ilon))) + end do + end do + obs_growth(itim, :, :) = \ + where(obs_cum_mv.le.min_nmonth, \ + dim_avg_n(obs_monthly(itim*12:itim*12+11, :, :), 0), \ + obs_growth(itim, :, :)) + + ; SCA + obs_detrended = new(dimsizes(obs_monthly), float) + obs_cum_mv_sca = new(dimsizes(obs_monthly(0, :, :)), float) + + do ilat = 0, dimsizes(obs_monthly(0, :, 0)) - 1 + do ilon = 0, dimsizes(obs_monthly(0, 0, :)) - 1 + gr_yr = where(ismissing(obs_monthly(itim*12:itim*12+11, ilat, \ + ilon)), \ + obs_growth(itim, ilat, ilon), \ + obs_monthly(itim*12:itim*12+11, ilat, ilon)) + obs_detrended(itim*12:itim*12+11, ilat, ilon) = \ + obs_data(itim*12:itim*12+11, ilat, ilon) - \ + dim_cumsum_n(gr_yr/12., 1, 0) + delete(gr_yr) + + obs_cum_mv_sca(ilat, ilon) = num(ismissing(\ + obs_detrended(itim*12:itim*12+11, ilat, ilon))) + end do + end do + obs_seasamp(itim, :, :) = \ + where(obs_cum_mv_sca.le.min_nmonth, \ + dim_max_n(obs_detrended(itim*12:itim*12+11, :, :), 0) - \ + dim_min_n(obs_detrended(itim*12:itim*12+11, :, :), 0), \ + obs_seasamp(itim, :, :)) + end do + + obs_map_lon = dim_avg_n(obs_seasamp, 2) + + coslat = cos(obs_seasamp&lat/180*3.14159) + obs_map_latavg = dim_avg_wgt_n(obs_map_lon, coslat, 1, 1) + + end if + + ; Gridded GR and SCA + mod_mv_monthly = new(dimsizes(mod_mv), float) + copy_VarAtts(mod_mv, mod_mv_monthly) + mod_nomv_monthly = new(dimsizes(mod_nomv), float) + copy_VarAtts(mod_nomv, mod_nomv_monthly) + do itim = 6, dimsizes(obs_data&time) - 7 + mod_mv_monthly(itim, :, :) = mod_mv(itim+6, :, :) - mod_mv(itim-6, :, :) + mod_nomv_monthly(itim, :, :) = \ + mod_nomv(itim+6, :, :) - mod_nomv(itim-6, :, :) + end do + + do itim = 0, nyear - 1 + ; GR + mod_cum_mv = new(dimsizes(mod_mv(0, :, :)), float) + mod_cum_nomv = new(dimsizes(mod_nomv(0, :, :)), float) + do ilat = 0, dimsizes(obs_monthly(0, :, 0)) - 1 + do ilon = 0, dimsizes(obs_monthly(0, 0, :)) - 1 + mod_cum_mv(ilat, ilon) = num(ismissing(\ + mod_mv_monthly(itim*12:itim*12+11, ilat, ilon))) + mod_cum_nomv(ilat, ilon) = num(ismissing(\ + mod_nomv_monthly(itim*12:itim*12+11, ilat, ilon))) + end do + end do + + mod_mv_growth(itim, :, :) = \ + where(mod_cum_mv.le.min_nmonth, \ + dim_avg_n(mod_mv_monthly(itim*12:itim*12+11, :, :), 0), \ + mod_mv_growth(itim, :, :)) + mod_nomv_growth(itim, :, :) = \ + where(mod_cum_nomv.le.min_nmonth, \ + dim_avg_n(mod_nomv_monthly(itim*12:itim*12+11, :, :), 0), \ + mod_nomv_growth(itim, :, :)) + + ; SCA + mod_mv_detrended = new(dimsizes(mod_mv), float) + mod_nomv_detrended = new(dimsizes(mod_nomv), float) + mod_cum_mv_sca = new(dimsizes(mod_mv_monthly(0, :, :)), float) + mod_cum_nomv_sca = new(dimsizes(mod_nomv_monthly(0, :, :)), float) + + do ilat = 0, dimsizes(obs_monthly(0, :, 0)) - 1 + do ilon = 0, dimsizes(obs_monthly(0, 0, :)) - 1 + + gr_yr = where(ismissing(mod_mv_monthly(itim*12:itim*12+11, \ + ilat, ilon)), \ + mod_mv_growth(itim, ilat, ilon), \ + mod_mv_monthly(itim*12:itim*12+11, ilat, ilon)) + mod_mv_detrended(itim*12:itim*12+11, ilat, ilon) = \ + mod_mv(itim*12:itim*12+11, ilat, ilon) \ + - dim_cumsum_n(gr_yr/12., 1, 0) + delete(gr_yr) + + gr_yr = where(ismissing(mod_nomv_monthly(itim*12:itim*12+11, \ + ilat, ilon)), \ + mod_nomv_growth(itim, ilat, ilon),\ + mod_nomv_monthly(itim*12:itim*12+11, ilat, ilon)) + mod_nomv_detrended(itim*12:itim*12+11, ilat, ilon) = \ + mod_nomv(itim*12:itim*12+11, ilat, ilon) \ + - dim_cumsum_n(gr_yr/12., 1, 0) + delete(gr_yr) + + mod_cum_mv_sca(ilat, ilon) = num(ismissing(\ + mod_mv_detrended(itim*12:itim*12+11, ilat, ilon))) + mod_cum_nomv_sca(ilat, ilon) = num(ismissing(\ + mod_nomv_detrended(itim*12:itim*12+11, ilat, ilon))) + end do + end do + mod_mv_seasamp(itim, :, :) = \ + where(mod_cum_mv_sca.le.min_nmonth, \ + dim_max_n(mod_mv_detrended(itim*12:itim*12+11, :, :), 0) \ + - dim_min_n(mod_mv_detrended(itim*12:itim*12+11, :, :), 0), \ + mod_mv_seasamp(itim, :, :)) + mod_nomv_seasamp(itim, :, :) = \ + where(mod_cum_nomv_sca.le.min_nmonth, \ + dim_max_n(mod_nomv_detrended(itim*12:itim*12+11, :, :), 0) \ + - dim_min_n(mod_nomv_detrended(itim*12:itim*12+11, :, :), 0), \ + mod_nomv_seasamp(itim, :, :)) + end do + + ; ##################################################################### + ; ##################### PLOTTING #################################### + ; ##################################################################### + ; Set up the different colormaps to be used + ; Non-divergent + non_div_cm = read_colormap_file("MPL_viridis") + non_div_cm = non_div_cm(::-1, :) + ; Divergent + div_cm = read_colormap_file("ncl_default") + + ; Plot seasonal cycle amplitude maps with and without obs sampling + if DATASETS(imod) .eq. "multi-model mean" then + fname_dataset = "multi-model_mean" + else + fname_dataset = DATASETS(imod) + end if + filename = experiments(0) + "_" + fname_dataset + "_xco2_mean_seas_cyc" \ + + "_" + start_year + "-" + end_year + outfile = plot_dir + "/" + filename + outfile_netcdf = work_dir + "/" + filename + ".nc" + wks = gsn_open_wks(file_type, outfile) + res = True + res@cnFillOn = True + res@cnLinesOn = False + res@gsnRightString = "" + res@lbLabelBarOn = True + res@gsnDraw = False + res@gsnFrame = False + res@gsnStringFontHeightF = 0.02 + res@tmXBLabelFontHeightF = 0.02 + res@tmYLLabelFontHeightF = 0.02 + res@cnConstFLabelOn = False + res@cnLineLabelsOn = False + res@tmLabelAutoStride = True + res@lbTitleOn = True + res@lbTitlePosition = "Bottom" + res@lbLabelFontHeightF = 0.02 + res@lbTitleFontHeightF = 0.025 + res@lbBottomMarginF = -0.2 + res@lbTopMarginF = 0.4 + res@lbTitleOffsetF = 0.2 + res@lbLabelBarOn = False + + seasamp_plots = new(4, graphic) + + res@gsnLeftString = "OBS to sampled " + DATASETS(imod) + " difference" + res@lbTitleString = "Seasonal cycle amplitude difference [" \ + + var0_units + "]" + seasamp_obsdiff = dim_avg_n_Wrap(mod_mv_seasamp, 0) + seasamp_obsdiff = dim_avg_n_Wrap(obs_seasamp, 0) \ + - dim_avg_n_Wrap(mod_mv_seasamp, 0) + seasamp_obsdiff@var = "obs_SCA_diff" + seasamp_diff = dim_avg_n_Wrap(mod_mv_seasamp, 0) \ + - dim_avg_n_Wrap(mod_nomv_seasamp, 0) + seasamp_diff@var = "SCA_diff_missing_values" + seasamp_diff!0 = "lat" + seasamp_diff&lat = mod_mv_growth&lat + seasamp_diff!1 = "lon" + seasamp_diff&lon = mod_mv_growth&lon + maxlev_obs = decimalPlaces(max(abs(seasamp_obsdiff)), 0, False) + maxlev_diff = decimalPlaces(max(abs(seasamp_diff)), 0, False) + maxlev = max((/maxlev_obs, maxlev_diff/)) + res@cnLevelSelectionMode = "ManualLevels" + res@cnMinLevelValF = -maxlev + res@cnMaxLevelValF = maxlev + res@cnLevelSpacingF = 1. + mean_plot = avg(seasamp_obsdiff) + res@gsnRightString = sprintf("%1.2f", mean_plot) + " " \ + + var0_units + res@cnFillPalette = div_cm + plot_seasamp_diff = gsn_csm_contour_map(wks, seasamp_obsdiff, res) + + res@gsnLeftString = "Unsampled to sampled " + DATASETS(imod) \ + + " difference" + res@lbTitleString = "Seasonal cycle amplitude difference [" \ + + var0_units + "]" + mean_plot = avg(seasamp_diff) + res@gsnRightString = sprintf("%1.2f", mean_plot) + " " \ + + var0_units + + seasamp_plots(3) = gsn_csm_contour_map(wks, seasamp_diff, res) + + ; to have a common label bar,plots should be set to the same interval + ; b/c the label bar is drawn from the interval of the first plot. + res@cnLevelSelectionMode = "ManualLevels" + res@cnMinLevelValF = 0. + res@cnMaxLevelValF = 12. + ; toint(max((/max(mod_mv_seasamp), \ + ; max(mod_nomv_seasamp), max(obs_seasamp)/))) + 1 + res@cnLevelSpacingF = 1. + + res@gsnLeftString = "Sampled " + DATASETS(imod) + delete(res@cnFillPalette) + res@cnFillPalette = non_div_cm ; set color map + + mean_plot = avg(mod_mv_seasamp) + res@gsnRightString = sprintf("%1.2f", mean_plot) + " " \ + + var0_units + seasamp_plots(0) = \ + gsn_csm_contour_map(wks, dim_avg_n_Wrap(mod_mv_seasamp, 0), res) + + mean_plot = avg(mod_nomv_seasamp) + res@gsnRightString = sprintf("%1.2f", mean_plot) + " " \ + + var0_units + res@gsnLeftString = "Unsampled " + DATASETS(imod) + seasamp_plots(1) = \ + gsn_csm_contour_map(wks, dim_avg_n_Wrap(mod_nomv_seasamp, 0), res) + + res@gsnLeftString = "OBS" + + mean_plot = avg(obs_seasamp) + res@gsnRightString = sprintf("%1.2f", mean_plot) + " " \ + + var0_units + seasamp_plots(2) = \ + gsn_csm_contour_map(wks, dim_avg_n_Wrap(obs_seasamp, 0), res) + + pres = True + pres@gsnMaximize = True + pres@gsnFrame = False + pres@gsnPanelRowSpec = True + pres@gsnPanelCenter = False + pres@gsnPanelLabelBar = True + pres@gsnPanelBottom = 0.4 + pres@lbTitleOn = True + pres@lbTitlePosition = "Bottom" + pres@lbTitleString = "Mean seasonal cycle amplitude [" \ + + var0_units + "]" + pres@lbLabelFontHeightF = 0.012 + pres@lbTitleFontHeightF = 0.015 + pres@lbBottomMarginF = -0.2 + pres@lbTopMarginF = 0.3 + pres@lbTitleOffsetF = 0.2 + pres@gsnPanelYWhiteSpacePercent = 2. + + gsn_panel(wks, seasamp_plots(:2), (/2, 1/), pres) + + pres@gsnPanelTop = 0.4 + pres@gsnPanelBottom = 0.01 + + gsn_panel(wks, (/plot_seasamp_diff, seasamp_plots(3)/), (/2/), pres) + frame(wks) + delete(pres) + + ; Write data to netcdf + outfile_netcdf@existing = "overwrite" + ncdf_outfile = ncdf_write(obs_seasamp, outfile_netcdf) + outfile_netcdf@existing = "append" + ncdf_outfile = ncdf_write(mod_nomv_seasamp, outfile_netcdf) + ncdf_outfile = ncdf_write(mod_mv_seasamp, outfile_netcdf) + ncdf_outfile = ncdf_write(seasamp_diff, outfile_netcdf) + ncdf_outfile = ncdf_write(seasamp_obsdiff, outfile_netcdf) + + ; Provenance + log_provenance(ncdf_outfile, \ + outfile + "." + file_type, \ + "Maps of mean Seasonal Cycle Amplitude of " + var0 + " " \ + + DATASETS(imod) + " for " + start_year + "-" \ + + end_year + ". Top: SCA with observational sampling (left)" \ + + ", without sampling (right). Middle: SCA of observations." \ + + "Bottom: Difference between observations and models " \ + + "(left) and sampled and unsampled model (right)." \ + + " Similar to Gier et al. 2020, Figure 6.", \ + (/"mean", "diff"/), \ + DOMAIN, \ + "geo", \ + AUTHORS, \ + REFERENCES, \ + ALL_FILES) + + ; ------------------------- Collect for panel plots + seasamp_avg = dim_avg_n_Wrap(mod_nomv_seasamp, 0) + if .not. isdefined("sca_p_array") then + sca_p_dims = array_append_record(dimsizes(mod_inds), \ + dimsizes(seasamp_avg), 0) + dim = 0 + sca_p_array = new(sca_p_dims, float) + sca_p_array!0 = "models" + sca_p_array&models = DATASETS(mod_inds) + sca_p_array!1 = "lat" + sca_p_array&lat = seasamp_avg&lat + sca_p_array!2 = "lon" + sca_p_array&lon = seasamp_avg&lon + sca_p_array@var = "mean_seasonal_cycle_amplitude" + sca_p_array@diag_script = DIAG_SCRIPT + delete(sca_p_dims) + end if + sca_p_array(dim, :, :) = (/seasamp_avg/) + dim = dim + 1 + delete(seasamp_avg) + + delete(mod_var) + delete(mod_mv) + delete(mod_nomv) + delete(res@cnFillPalette) + end do ; Loop over models + + ; ---------------- Make panel plot + + do imod = 0, dimsizes(mod_inds) - 1 + + if .not. isdefined("sca_panel_plots") then + filename_scap = var0 + "_" + experiments(0) \ + + "_SCApanels_" \ + + start_year + "-" + end_year + outfile_scap = plot_dir + "/" + filename_scap + outfile_netcdf_scap = work_dir + "/" + filename_scap + ".nc" + wks_scap = gsn_open_wks(file_type, outfile_scap) + sca_panel_plots = new(dimsizes(mod_inds), graphic) + res_sca_panel_plots = True + res_sca_panel_plots@cnFillOn = True + res_sca_panel_plots@cnLinesOn = False + res_sca_panel_plots@lbLabelBarOn = False + res_sca_panel_plots@gsnDraw = False + res_sca_panel_plots@gsnFrame = False + res_sca_panel_plots@cnLevelSelectionMode = "ManualLevels" + res_sca_panel_plots@cnMinLevelValF = 0. + res_sca_panel_plots@cnMaxLevelValF = diag_script_info@contour_max_level + res_sca_panel_plots@cnLevelSpacingF = 1. + res_sca_panel_plots@gsnStringFontHeightF = 0.03 + res_sca_panel_plots@tmXBLabelFontHeightF = 0.025 + res_sca_panel_plots@tmYLLabelFontHeightF = 0.025 + res_sca_panel_plots@cnConstFLabelOn = False + res_sca_panel_plots@cnLineLabelsOn = False + res_sca_panel_plots@tmLabelAutoStride = True + res_sca_panel_plots@gsnZonalMean = True + res_sca_panel_plots@gsnZonalMeanXMinF = 0 + res_sca_panel_plots@gsnZonalMeanXMaxF = \ + diag_script_info@contour_max_level + res_sca_panel_plots@amOrthogonalPosF = .2 + res_sca_panel_plots@cnFillPalette = non_div_cm ; set color map + res_sca_panel_plots@gsnRightStringOrthogonalPosF = -0.005 + res_sca_panel_plots@gsnLeftStringOrthogonalPosF = -0.005 + end if + res_sca_panel_plots@gsnLeftString = mod_names(imod) + mean_sca_mod = avg(sca_p_array(imod, :, :)) + res_sca_panel_plots@gsnRightString = \ + sprintf("%1.2f", mean_sca_mod) + " " + var0_units + sca_panel_plots(imod) = \ + gsn_csm_contour_map(wks_scap, sca_p_array(imod, :, :), \ + res_sca_panel_plots) + end do + + pres = True + pres@gsnMaximize = True + pres@gsnFrame = False + pres@gsnPanelCenter = False + pres@gsnPanelLabelBar = True + pres@gsnPanelBottom = 0.1 + pres@lbTitleOn = True + pres@lbTitlePosition = "Bottom" + pres@lbTitleString = \ + "Mean seasonal cycle amplitude [" + var0_units + "]" + pres@lbLabelFontHeightF = 0.012 + pres@lbTitleFontHeightF = 0.015 + pres@lbBottomMarginF = -1 + pres@lbTopMarginF = 0.8 + pres@lbTitleOffsetF = 0.2 + + ncol = toint(floor(sqrt(dimsizes(mod_inds)))) + nrow = toint(ceil((dimsizes(mod_inds)/tofloat(ncol)))) + gsn_panel(wks_scap, sca_panel_plots, (/nrow, ncol/), pres) + frame(wks_scap) + delete(res_sca_panel_plots) + delete(pres) + + ncdf_outfile = ncdf_write(sca_p_array, outfile_netcdf_scap) + + ; Provenance + log_provenance(ncdf_outfile, \ + outfile_scap + "." + file_type, \ + "Maps of mean Seasonal Cycle Amplitude of " \ + + var0 + " for " + start_year + "-" \ + + end_year + ". Similar to Gier et al 2020, Fig 5.", \ + (/"mean", "detrend"/), \ + DOMAIN, \ + "geo", \ + AUTHORS, \ + REFERENCES, \ + ALL_FILES) + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/xco2_analysis/main.ncl b/esmvaltool/diag_scripts/xco2_analysis/main.ncl new file mode 100644 index 0000000000..0739e52edb --- /dev/null +++ b/esmvaltool/diag_scripts/xco2_analysis/main.ncl @@ -0,0 +1,667 @@ +; ############################################################################# +; xco2_analysis/main.ncl +; ############################################################################# +; Description +; Plotting timeseries and histograms for growth rate and seasonal cycle. +; +; Required diag_script_info attributes: +; styleset: styleset to use for plotting colors, linestyles... +; region: latitude range for averaging +; masking: different masking options are available to use on dataset: +; "none" - no masking +; "obs" - observational masking +; ensemble_mean: if true calculates multi-model mean only accounting for +; the ensemble member named in "ensemble_refs" +; +; Optional diag_script_info attributes: +; output_file_type: output file type for plots. Default: png +; ensemble_refs: list of model-ensemble pairs to denote which ensemble +; member to use for calculating multi-model mean. required if +; ensemble_mean = true +; var_plotname: String formatting how variable should be named in plots +; defaults to short_name if not assigned +; +; Caveats +; +; Modification history +; 20201116-gier_bettina: Added provenance, clean up +; 20200226-gier_bettina: Adapted to version 2 +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/scatterplot.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + +load "$diag_scripts/xco2_analysis/stat.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + AUTHORS = (/"gier_bettina"/) + REFERENCES = (/"gier20bg"/) + + ; Variable + var0 = variable_info[0]@short_name + + ; Input data + INFO0 = select_metadata_by_name(input_file_info, var0) + DATASETS = metadata_att_as_array(INFO0, "dataset") + DATASETS := array_append_record(DATASETS, "multi-model mean", 0) + experiments = metadata_att_as_array(INFO0, "exp") + ensembles = metadata_att_as_array(INFO0, "ensemble") + dim_MOD = dimsizes(DATASETS) + ALL_FILES = metadata_att_as_array(INFO0, "filename") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + +end +begin + ; Maximum amount of missing values per year + min_nmonth = 7 + + ; Prepare region + lat_min = diag_script_info@region(0) + lat_max = diag_script_info@region(1) + + if lat_min.eq.(-90) .and. lat_max.eq.90 then + region = "global" + else if lat_min.eq.(30) .and. lat_max.eq.60 then + region = "nhmidlat" + else if lat_min.eq.(-60) .and. lat_max.eq.(-30) then + region = "shmidlat" + else if lat_min.eq.(-30) .and. lat_max.eq.(30) then + region = "trop" + else if lat_min.eq.(0) .and. lat_max.eq.(90) then + region = "nh" + else if lat_min.eq.(-90) .and. lat_max.eq.0 then + region = "sh" + else + region = "lat_" + tostring(lat_min) + "_" + tostring(lat_max) + end if + end if + end if + end if + end if + end if + DOMAIN = (/region/) + + ; Plot file type + if (isatt(diag_script_info, "output_file_type")) then + file_type = diag_script_info@output_file_type + elseif (isatt(config_user_info, "output_file_type")) then + file_type = config_user_info@output_file_type + else + file_type = "png" + end if + + ; Output directories + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + ; Determine start + end year + start_years = metadata_att_as_array(INFO0, "start_year") + start_year = min(start_years) + end_years = metadata_att_as_array(INFO0, "end_year") + end_year = max(end_years) + nyear = end_year - start_year + 1 + ntime = nyear*12 + time = new(ntime, float) + do yy = start_year, end_year + do mm = 0, 11 + time(12 * (yy - start_year) + mm) = yy + (mm + 0.5)/12. + end do + end do + + if (isatt(INFO0[0], "reference_dataset")) then + ref_ind = ind(DATASETS.eq.INFO0[0]@reference_dataset) + else + ref_ind = -999 + end if + + mod_inds = ind(DATASETS.ne.INFO0[0]@reference_dataset) + + ; Need array that maps obs to index 0, the other models to following indices + mapping_array = new(dimsizes(DATASETS), integer) + if ref_ind.ne.-999 then + mapping_array(ref_ind) = 0 + end if + mapping_array(mod_inds) = ispan(1, dimsizes(DATASETS)-1, 1) + + ; For MMM calculation + if ref_ind.eq."-999" then + subtract_mmm = 1 + else + subtract_mmm = 2 + end if + + ; Check if ensembles members should be handled as ensemble or separately + calc_ensemble_mean = diag_script_info@ensemble_mean + if calc_ensemble_mean then + ensemble_refs = diag_script_info@ensemble_refs + end if + ; find unique models + unique_models = get_unique_values(DATASETS) + dim_unique_MOD = dimsizes(unique_models) + + model_arr = new((/dim_MOD, ntime/), float) + model_arr!0 = "model" + model_arr!1 = "time" + model_arr&model = array_append_record("OBS", DATASETS(mod_inds), 0) + model_arr&time = time + model_arr@var = var0 + model_arr@diag_script = (/DIAG_SCRIPT/) + + gr_arr = model_arr + gr_arr@var = "gr" + sca_arr = model_arr + sca_arr@var = "sca" + + growth_series = new((/dim_MOD, nyear/), float) + growth_series!0 = "model" + growth_series!1 = "time" + growth_series&model = array_append_record("OBS", DATASETS(mod_inds), 0) + growth_series&time = ispan(start_year, end_year, 1) + + amp_series = growth_series + amp_series@long_name = "Seasonal Cycle Amplitude" + amp_series@var_yaxis = True + + opt_mask = "_" + str_join(diag_script_info@masking, "_") + + ; Formatted varname for plots + if (isatt(diag_script_info, "var_plotname")) then + var0_plotname = diag_script_info@var_plotname + else + var0_plotname = var0 + end if + +end +begin + ; First read obs to have it ready for masking + obs_data = read_data(INFO0[ref_ind]) + + model_arr@units = obs_data@units + gr_arr@units = obs_data@units + " yr-1" + growth_series@units = gr_arr@units + amp_series@units = obs_data@units + + do imod = 0, dim_MOD - 1 + if DATASETS(imod).eq."multi-model mean" then + if calc_ensemble_mean then + var0_mod = mmm_array / tofloat(dim_unique_MOD - subtract_mmm) + else + var0_mod = mmm_array / tofloat(dimsizes(DATASETS) - subtract_mmm) + end if + copy_VarMeta(mmm_array, var0_mod) + else + ; Check if it's an ensemble member + if calc_ensemble_mean then + if imod.eq.ref_ind then + mmm_member = False + elseif .not. calc_ensemble_mean then + mmm_member = True + elseif all(ensemble_refs(:, 0).ne.DATASETS(imod)) + mmm_member = True + else + ensemble_ref_ind = ind(ensemble_refs(:, 0).eq.DATASETS(imod)) + if ensemble_refs(ensemble_ref_ind, 1).eq. ensembles(imod) then + mmm_member = True + else + mmm_member = False + end if + end if + else + mmm_member = True + end if + if (imod.ne.ref_ind)then + tmp = read_data(INFO0[imod]) + var0_mod = tofloat(tmp) + copy_VarMeta(tmp, var0_mod) + delete(tmp) + + if mmm_member + if .not. isdefined("mmm_array") then + mmm_array = area_hi2lores_Wrap(var0_mod&lon, var0_mod&lat, \ + var0_mod, True, 1, obs_data&lon, \ + obs_data&lat, False) + else + var_reg = area_hi2lores(var0_mod&lon, var0_mod&lat, var0_mod, \ + True, 1, mmm_array&lon, mmm_array&lat, \ + False) + mmm_array = mmm_array + (/var_reg/) + delete(var_reg) + end if + end if + end if + end if + + if imod.eq.ref_ind then + var0_reg = obs_data + else + if any(diag_script_info@masking .eq. "obs") then + var0_reg = area_hi2lores_Wrap(var0_mod&lon, var0_mod&lat, var0_mod, \ + True, 1, obs_data&lon, obs_data&lat, \ + False) + var0_reg = where(ismissing(obs_data), obs_data, var0_reg) + delete(var0_mod) + else + var0_reg = var0_mod + delete(var0_mod) + end if + end if + + var0_avg = area_operations(var0_reg, lat_min, lat_max, 0, 360, \ + "average", True) + model_arr(mapping_array(imod), :) = (/var0_avg/) + gr_arr(mapping_array(imod), :) = (/calc_gr(var0_avg, "monthly", \ + min_nmonth)/) + mnth_filled = calc_gr(var0_avg, "monthlyfilled", min_nmonth) + sca_arr(mapping_array(imod), :) = (/(var0_avg - \ + dim_cumsum(mnth_filled/12., 1)) - \ + avg(var0_avg - \ + dim_cumsum(mnth_filled/12., 1))/) + growth_series(mapping_array(imod), :) = (/calc_gr(var0_avg, "yearly", \ + min_nmonth)/) + amp_series(mapping_array(imod), :) = (/calc_sca(var0_avg, min_nmonth)/) + delete([/var0_avg, var0_reg, mnth_filled/]) + end do ; End loop over models + + ; --------------------------------------------------------------------------- + ; -------------------------- PLOTTING --------------------------------------- + ; --------------------------------------------------------------------------- + + ; --------------------------------------------------------------------------- + ; ---------------------- TIMESERIES PANELS----------------------------------- + ; --------------------------------------------------------------------------- + + filename_ts_panels = var0 + "_" + experiments(0) + "_" + region \ + + "_time_series_panels_" + start_year + "-" + end_year + opt_mask + + outfile_netcdf_ts_panels = work_dir + "/" + filename_ts_panels + ".nc" + model_arr@ncdf = outfile_netcdf_ts_panels + gr_arr@ncdf = outfile_netcdf_ts_panels + sca_arr@ncdf = outfile_netcdf_ts_panels + + outfile_netcdf_ts_panels@existing = "overwrite" + ncdf_outfile = ncdf_write(model_arr, outfile_netcdf_ts_panels) + outfile_netcdf_ts_panels@existing = "append" + ncdf_outfile = ncdf_write(gr_arr, outfile_netcdf_ts_panels) + ncdf_outfile = ncdf_write(sca_arr, outfile_netcdf_ts_panels) + + outfile_ts_panels = plot_dir + "/" + filename_ts_panels + wks = gsn_open_wks(file_type, outfile_ts_panels) + + colors = project_style(INFO0, diag_script_info, "colors") + colors := array_append_record(colors, "red", 0) + ; Thicker lines for OBS (first) and MMM (last) + thicks = new(dim_MOD, float) + thicks(0) = 6. + thicks(1:dim_MOD-2) = 3. + thicks(dim_MOD-1) = 6. + dashes = project_style(INFO0, diag_script_info, "dashes") + dashes := array_append_record(dashes, 0, 0) + + labels_ini = model_arr&model + if dim_unique_MOD .ne. dim_MOD then + inds = ispan(0, dim_MOD - 1, 1) + do imod = 0, dim_MOD - 1 + other_sets = DATASETS(ind(inds.ne.imod)) + if any(other_sets.eq.DATASETS(imod)) then + labels_ini(imod) = labels_ini(imod) + " " + ensembles(imod) + get_r = str_split(ensembles(imod), "ri") + dashes(imod) = toint(get_r(0)) - 1 + end if + delete(other_sets) + end do + end if + + ; Plot series + res = True + res@tiYAxisString = "" + res@tiYAxisFontHeightF = 0.02 + res@tmYLLabelFontHeightF = 0.02 + res@tiYAxisString = var0_plotname + " [" + model_arr@units + "]" + res@tiXAxisFontHeightF = 0.02 + res@tmXBLabelFontHeightF = 0.02 + res@pmTickMarkDisplayMode = "Conditional" + res@tiMainFontHeightF = 0.02 + res@tiMainString = "" + res@gsnMaximize = True + res@gsnDraw = False + res@gsnFrame = False + res@trXMinF = start_year + res@trXMaxF = end_year + 1 + res@vpHeightF = 0.3 + res@vpWidthF = 0.8 + res@tmLabelAutoStride = True + res@xyLineColors = colors ; change line color + res@xyLineThicknesses = thicks ; make 2nd lines thicker + res@xyMonoDashPattern = False + res@xyDashPatterns = dashes + res@tmXBFormat = "f" + + plots = new(3, graphic) + + res@trYMinF = min(model_arr) - 0.05 * (max(model_arr) - min(model_arr)) + res@trYMaxF = max(model_arr) + 0.05 * (max(model_arr) - min(model_arr)) + + plots(0) = gsn_csm_xy(wks, model_arr&time, model_arr, res) + ; Redraw obs to make it more visible on top + respoly = True + respoly@gsLineThicknessF = thicks(0) + plots@poly0 = gsn_add_polyline(wks, plots(0), model_arr&time, \ + model_arr(0, :), respoly) + + res@tiYAxisString = "Growth Rate [" + model_arr@units + " yr~S~-1~N~]" + res@trYMinF = min(gr_arr) - 0.05 * (max(gr_arr) - min(gr_arr)) + res@trYMaxF = max(gr_arr) + 0.05 * (max(gr_arr) - min(gr_arr)) + + plots(1) = gsn_csm_xy(wks, gr_arr&time, gr_arr, res) + plots@poly1 = gsn_add_polyline(wks, plots(1), gr_arr&time, \ + gr_arr(0, :), respoly) + + res@trYMinF = min(sca_arr) - 0.05 * (max(sca_arr) - min(sca_arr)) + res@trYMaxF = max(sca_arr) + 0.05 * (max(sca_arr) - min(sca_arr)) + res@tiYAxisString = "Seasonal Cycle [" + model_arr@units + "]" + plots(2) = gsn_csm_xy(wks, sca_arr&time, sca_arr, res) + plots@poly2 = gsn_add_polyline(wks, plots(2), sca_arr&time, \ + sca_arr(0, :), respoly) + + ; Calc annual mean GR correlations + gr_corrs = new(dim_MOD-1, float) + gr_corrs!0 = "models" + gr_corrs&models = labels_ini(1:) + + ; Calc SCA correlations + sca_corrs = gr_corrs + + do imod = 1, dim_MOD - 1 + gr_corrs(imod-1) = escorc(growth_series(0, :), growth_series(imod, :)) + corr = escorc(sca_arr(0, :), sca_arr(imod, :)) + sca_corrs(imod-1) = corr + end do + print("Annual Mean Growth Rate correlation to observations") + print(gr_corrs&models + ": " + gr_corrs) + + print("SCA Correlation to observations") + print(sca_corrs&models + ": " + sca_corrs) + + lgres = True + lgres@vpWidthF = 0.25 + lgres@vpHeightF = 0.5 + lgres@lgLineColors = colors(::-1) + lgres@lgLabelFontHeightF = 0.1 + lgres@lgMonoDashIndex = False + lgres@lgDashIndexes = dashes(::-1) + lgres@lgMonoLineThickness = False + lgres@lgLineThicknesses = thicks(::-1) + lgres@lgPerimOn = False + lgres@lgItemType = "Lines" + + lgres@lgAutoManage = False + lgres@lgLabelFontHeightF = 0.06 + + labels = labels_ini(::-1) + nlabels = dim_MOD + legend = gsn_create_legend(wks, nlabels, labels, lgres) + + anres = True + anres@amJust = "TopLeft" + ; Parallel -> x-direction + anres@amParallelPosF = 0.53 + anres@amOrthogonalPosF = -0.5 + annoid1 = gsn_add_annotation(plots(0), legend, anres) + + res1 = True + res2 = True + res1@gsnMaximize = True + res2@gsnAttachPlotsXAxis = True + + newplot = gsn_attach_plots(plots(0), plots(1:), res1, res2) + + draw(plots(0)) + + frame(wks) + delete(res) + + ; Provenance + log_provenance(ncdf_outfile, \ + outfile_ts_panels + "." + file_type, \ + "Timeseries, growth rate and seasonal cycle of " \ + + var0 + " for " + region + ", " + start_year + "-" \ + + end_year + ". Using masking: " + opt_mask \ + + "Similar to Gier et al. 2020 Fig 3", \ + (/"mean", "diff", "detrend"/), \ + DOMAIN, \ + "times", \ + AUTHORS, \ + REFERENCES, \ + ALL_FILES) + + ; --------------------------------------------------------------------------- + ; ---------------------- BARPLOTS ------------------------------------------- + ; --------------------------------------------------------------------------- + + ; Set colors so first = OBS is black and last = MMM is red + bp_colors = new(dimsizes(growth_series&model), string) + bp_colors = "skyblue" + bp_colors(0) = "grey40" + bp_colors(dimsizes(growth_series&model) - 1) = "red" + + ; ---- Growth Rate + filename_grbp = var0 + "_" + experiments(0) \ + + "_" + region + "_" + (start_year) + "-" + (end_year) \ + + "_barplot_grRates" + opt_mask + outfile_grbp = plot_dir + "/" + filename_grbp + outfile_netcdf_grbp = work_dir + "/" + filename_grbp + ".nc" + + wks = gsn_open_wks(file_type, outfile_grbp) + + gr_Rates = new((/dimsizes(growth_series&model), 2/), float) + gr_Rates(:, 0) = dim_avg_n(growth_series, 1) + gr_Rates(:, 1) = dim_stddev_n(growth_series, 1) + gr_Rates@units = model_arr@units + " yr~S~-1~N~" + gr_Rates!0 = "models" + gr_Rates&models = growth_series&model + gr_Rates!1 = "statistics" + gr_Rates&statistics = (/"mean", "stddev"/) + gr_Rates@var = "gr" + gr_Rates@ncdf = outfile_netcdf_grbp + gr_Rates@diag_script = (/DIAG_SCRIPT/) + printVarSummary(gr_Rates) + ncdf_outfile = ncdf_write(gr_Rates, outfile_netcdf_grbp) + + ; Sort by size + sort_by = gr_Rates(:, 0) + qsort(sort_by) + perm_idx = new(dimsizes(sort_by), integer) + do idim = 0, dimsizes(sort_by) - 1 + perm_idx(idim) = ind(sort_by(idim).eq.gr_Rates(:, 0)) + end do + gr_Rates_sort = gr_Rates(perm_idx, :) + + res = True + res@gsnDraw = False + res@gsnFrame = False + res@gsnMaximize = True + res@gsnXYBarChart = True + res@tmXBMode = "Explicit" + res@tmXBValues = ispan(0, dimsizes(growth_series&model) - 1, 1) + res@tmXBLabels = labels_ini(perm_idx) + res@tmXBLabelAngleF = 60. + res@tmXBLabelJust = "CenterRight" + res@tmXBLabelFontHeightF = 0.014 + res@gsnYRefLine = 0. + res@trYMinF = 0. + res@trYMaxF = max(gr_Rates(:, 0) + gr_Rates(:, 1)) + 0.15 + res@trXMinF = -1. + res@trXMaxF = dimsizes(growth_series&model) + res@tiYAxisString = "Average growth rate [" + gr_Rates@units + "]" + res@tiYAxisFontHeightF = 0.02 + res@gsnXYBarChartBarWidth = 0.75 + res@gsnXYBarChartColors = bp_colors(perm_idx) + + x = ispan(0, dimsizes(growth_series&model)-1, 1) + + plot_grbp = gsn_csm_xy(wks, x, gr_Rates_sort(:, 0), res) + + ; Add errorbars + + err_wdth = 0.33 * res@gsnXYBarChartBarWidth + + lres = True + lres@gsLineThicknessF = 2. + + do imod = 0, dimsizes(growth_series&model) - 1 + plot_grbp@$unique_string("errbar")$ = \ + gsn_add_polyline(wks, plot_grbp, (/imod, imod/), \ + (/gr_Rates_sort(imod, 0) + gr_Rates_sort(imod, 1), \ + gr_Rates_sort(imod, 0) - gr_Rates_sort(imod, 1)/), \ + lres) + plot_grbp@$unique_string("errbar")$ = \ + gsn_add_polyline(wks, plot_grbp, (/imod + err_wdth, imod - err_wdth/), \ + (/gr_Rates_sort(imod, 0) + gr_Rates_sort(imod, 1), \ + gr_Rates_sort(imod, 0) + gr_Rates_sort(imod, 1)/), \ + lres) + plot_grbp@$unique_string("errbar")$ = \ + gsn_add_polyline(wks, plot_grbp, (/imod + err_wdth, imod - err_wdth/), \ + (/gr_Rates_sort(imod, 0) - gr_Rates_sort(imod, 1), \ + gr_Rates_sort(imod, 0) - gr_Rates_sort(imod, 1)/), \ + lres) + end do + + draw(plot_grbp) + frame(wks) + + delete(res) + delete(lres) + + ; Provenance + log_provenance(ncdf_outfile, \ + outfile_grbp + "." + file_type, \ + "Growth Rate histogram of " \ + + var0 + " for " + region + ", " + start_year + "-" \ + + end_year + ". Using masking: " + opt_mask \ + + "Similar to Gier et al. 2020, Fig 4.", \ + (/"mean", "stddev"/), \ + DOMAIN, \ + "histogram", \ + AUTHORS, \ + REFERENCES, \ + ALL_FILES) + + ; --------------------------------------------------------------------------- + ; ---- Seasonal Cycle Amplitude --------------------------------------------- + filename_sabp = var0 + "_" + experiments(0) + "_" + region \ + + "_" + (start_year) + "-" + (end_year) + "_barplot_SA" + opt_mask + outfile_sabp = plot_dir + "/" + filename_sabp + outfile_netcdf_sabp = work_dir + "/" + filename_sabp + ".nc" + + wks = gsn_open_wks(file_type, outfile_sabp) + + sa_Rates = new((/dimsizes(amp_series&model), 2/), float) + sa_Rates(:, 0) = dim_avg_n(amp_series, 1) + sa_Rates(:, 1) = dim_stddev_n(amp_series, 1) + sa_Rates!0 = "models" + sa_Rates&models = amp_series&model + sa_Rates!1 = "statistics" + sa_Rates&statistics = (/"mean", "stddev"/) + sa_Rates@units = model_arr@units + sa_Rates@var = "sca" + sa_Rates@ncdf = outfile_netcdf_sabp + sa_Rates@diag_script = (/DIAG_SCRIPT/) + ncdf_outfile = ncdf_write(sa_Rates, outfile_netcdf_grbp) + + ; Sort by size + sort_by = sa_Rates(:, 0) + qsort(sort_by) + perm_idx = new(dimsizes(sort_by), integer) + do idim = 0, dimsizes(sort_by) - 1 + perm_idx(idim) = ind(sort_by(idim).eq.sa_Rates(:, 0)) + end do + sa_Rates_sort = sa_Rates(perm_idx, :) + + res = True + res@gsnDraw = False + res@gsnFrame = False + res@gsnMaximize = True + res@gsnXYBarChart = True + res@tmXBMode = "Explicit" + res@tmXBValues = ispan(0, dimsizes(amp_series&model) - 1, 1) + res@tmXBLabels = labels_ini(perm_idx) ; amp_series&model + res@tmXBLabelAngleF = 60. + res@tmXBLabelJust = "CenterRight" + res@tmXBLabelFontHeightF = 0.014 + res@gsnYRefLine = 0. + res@trYMinF = 0. + res@trYMaxF = max(sa_Rates(:, 0) + sa_Rates(:, 1)) + 0.15 + res@trXMinF = -1. + res@trXMaxF = dimsizes(amp_series&model) + res@tiYAxisString = "Seasonal Cycle Amplitude [" + sa_Rates@units + "]" + res@tiYAxisFontHeightF = 0.02 + res@gsnXYBarChartBarWidth = 0.75 + res@gsnXYBarChartColors = bp_colors(perm_idx) + + x = ispan(0, dimsizes(amp_series&model)-1, 1) + + plot_sabp = gsn_csm_xy(wks, x, sa_Rates_sort(:, 0), res) + + ; Add errorbars + + err_wdth = 0.33 * res@gsnXYBarChartBarWidth + + lres = True + lres@gsLineThicknessF = 2. + + do imod = 0, dimsizes(amp_series&model) - 1 + plot_sabp@$unique_string("errbar")$ = \ + gsn_add_polyline(wks, plot_sabp, (/imod, imod/), \ + (/sa_Rates_sort(imod, 0) + sa_Rates_sort(imod, 1), \ + sa_Rates_sort(imod, 0) - sa_Rates_sort(imod, 1)/), \ + lres) + plot_sabp@$unique_string("errbar")$ = \ + gsn_add_polyline(wks, plot_sabp, (/imod + err_wdth, imod - err_wdth/), \ + (/sa_Rates_sort(imod, 0) + sa_Rates_sort(imod, 1), \ + sa_Rates_sort(imod, 0) + sa_Rates_sort(imod, 1)/), \ + lres) + plot_sabp@$unique_string("errbar")$ = \ + gsn_add_polyline(wks, plot_sabp, (/imod + err_wdth, imod - err_wdth/), \ + (/sa_Rates_sort(imod, 0) - sa_Rates_sort(imod, 1), \ + sa_Rates_sort(imod, 0) - sa_Rates_sort(imod, 1)/), \ + lres) + end do + + draw(plot_sabp) + frame(wks) + + delete(plot_sabp) + delete(res) + delete(lres) + delete(err_wdth) + + ; Provenance + log_provenance(ncdf_outfile, \ + outfile_sabp + "." + file_type, \ + "Seasonal Cycle Amplitude histogram of " \ + + var0 + " for " + region + ", " + start_year + "-" \ + + end_year + ". Using masking: " + opt_mask, \ + (/"mean", "stddev"/), \ + DOMAIN, \ + "histogram", \ + AUTHORS, \ + REFERENCES, \ + ALL_FILES) + +end diff --git a/esmvaltool/diag_scripts/xco2_analysis/panel_plots.ncl b/esmvaltool/diag_scripts/xco2_analysis/panel_plots.ncl new file mode 100644 index 0000000000..08fe30ad0f --- /dev/null +++ b/esmvaltool/diag_scripts/xco2_analysis/panel_plots.ncl @@ -0,0 +1,406 @@ +; ############################################################################# +; xco2_analysis/panel_plots.ncl +; ############################################################################# +; Description +; Plotting panel plots showing seasonal cycle amplitude against input +; variable +; +; Required diag_script_info attributes: +; styleset: styleset to use for plotting colors, linestyles... +; region: latitude range for averaging +; masking: different masking options are available to use on dataset: +; "none" - no masking +; "obs" - observational masking +; C3S satellite dataset specific: +; "sciamachy" - masking according to period for Sciamachy only +; "gosat" - masking according to period for Gosat only +; "land" - only consider land values +; obs_in_panel: True if observations should be included in plot +; area_avg: Type of area averaging: "full-area" normal area-average +; "lat-first" calculate zonal means first, +; then average these +; plot_var2_mean: If True adds mean of seasonal cycle to panel as string. +; +; Optional diag_script_info attributes: +; output_file_type: output file type for plots. Default: png +; var_plotname: String formatting how variable should be named in plots +; defaults to short_name if not assigned +; +; Caveats +; +; Modification history +; 20201119-gier_bettina: Added provenance and cleaned up. +; 20200226-gier_bettina: Adapted to version 2 +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/scatterplot.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + +load "$diag_scripts/xco2_analysis/stat.ncl" +load "$diag_scripts/xco2_analysis/carbon_plots.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + AUTHORS = (/"gier_bettina"/) + REFERENCES = (/"gier20bg"/) + + ; Variable + var0 = variable_info[0]@short_name + + ; Input data + INFO0 = select_metadata_by_name(input_file_info, var0) + DATASETS = metadata_att_as_array(INFO0, "dataset") + DATASETS := array_append_record(DATASETS, "multi-model mean", 0) + experiments = metadata_att_as_array(INFO0, "exp") + dim_MOD = dimsizes(DATASETS) + ALL_FILES = metadata_att_as_array(INFO0, "filename") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + +end +begin + ; Maximum amount of missing values per year + min_nmonth = 7 + + ; Prepare region + lat_min = diag_script_info@region(0) + lat_max = diag_script_info@region(1) + + if lat_min.eq.(-90) .and. lat_max.eq.90 then + region = "global" + else if lat_min.eq.(30) .and. lat_max.eq.60 then + region = "nhmidlat" + else if lat_min.eq.(-60) .and. lat_max.eq.(-30) then + region = "shmidlat" + else if lat_min.eq.(-30) .and. lat_max.eq.(30) then + region = "trop" + else if lat_min.eq.(0) .and. lat_max.eq.(90) then + region = "nh" + else if lat_min.eq.(-90) .and. lat_max.eq.0 then + region = "sh" + else + region = "lat_" + tostring(lat_min) + "_" + tostring(lat_max) + end if + end if + end if + end if + end if + end if + DOMAIN = (/region/) + + ; Plot file type + if (isatt(diag_script_info, "output_file_type")) then + file_type = diag_script_info@output_file_type + elseif (isatt(config_user_info, "output_file_type")) then + file_type = config_user_info@output_file_type + else + file_type = "png" + end if + + ; Output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + ; Determine start + end year + start_years = metadata_att_as_array(INFO0, "start_year") + start_year = min(start_years) + end_years = metadata_att_as_array(INFO0, "end_year") + end_year = max(end_years) + nyear = end_year - start_year + 1 + ntime = nyear*12 + time = new(ntime, float) + do yy = start_year, end_year + do mm = 0, 11 + time(12 * (yy - start_year) + mm) = yy + (mm + 0.5)/12. + end do + end do + + if (isatt(INFO0[0], "reference_dataset")) then + ref_ind = ind(DATASETS.eq.INFO0[0]@reference_dataset) + else + ref_ind = -999 + end if + + mod_inds = ind(DATASETS.ne.INFO0[0]@reference_dataset) + + ; Need array that maps obs to index 0, the other models to following indizes + mapping_array = new(dimsizes(DATASETS), integer) + if ref_ind.ne.-999 then + mapping_array(ref_ind) = 0 + end if + mapping_array(mod_inds) = ispan(1, dimsizes(DATASETS)-1, 1) + + ; For MMM calculation + if ref_ind.eq."-999" then + subtract_mmm = 1 + else + subtract_mmm = 2 + end if + + if diag_script_info@masking(0) .ne. "none" then + opt_mask = "_" + str_join(diag_script_info@masking, "_") + else + opt_mask = "" + end if + + ; Formatted varname for plots + if (isatt(diag_script_info, "var_plotname")) then + var0_plotname = diag_script_info@var_plotname + else + var0_plotname = var0 + end if + + ; Prepare arrays + amp_series = new((/dim_MOD, nyear/), float) + amp_series!0 = "model" + amp_series&model = DATASETS + amp_series!1 = "time" + amp_series&time = ispan(start_year, end_year, 1) + amp_series@var = "sca" + amp_series@long_name = "Seasonal Cycle Amplitude" + amp_series@var_yaxis = True + amp_series@region = region + amp_series@diag_script = DIAG_SCRIPT + + growth_series = amp_series + growth_series@long_name = "Growth Rate" + growth_series@var = "gr" + + var0_yr = amp_series + var0_yr@long_name = var0_plotname + var0_yr@var = var0 + + varname2 = "SCA" + varname2@mean = diag_script_info@plot_var2_mean + + varname_gr = "GR" + varname_gr@mean = diag_script_info@plot_var2_mean +end + +begin + ; First read obs to have it ready for masking + obs_data = read_data(INFO0[ref_ind]) + time_mnth = cd_calendar(obs_data&time, 0) + + ; Prepare sat masks for usage if needed + if any(diag_script_info@masking .eq. "sciamachy") then + ; Sciamachy Masks! 2003-2008 + scia_ind = max(ind(time_mnth(:, 0).eq.2008)) + scia_l_mask = obs_data(0, :, :) + scia_l_mask = 0. + + do i_tim = 0, dimsizes(obs_data&time(:scia_ind)) - 1 + mnth_mv = where(ismissing(obs_data(i_tim, :, :)), 0, 1) + scia_l_mask = scia_l_mask + mnth_mv + end do + scia_l_mask = scia_l_mask / tofloat(dimsizes(obs_data&time(:scia_ind))) + scia_l_mask = where(scia_l_mask.ge.0.5, 1, scia_l_mask@_FillValue) + end if + + if any(diag_script_info@masking .eq. "gosat") then + gosat_ind = min(ind(time_mnth(:, 0).eq.2013)) + gosat_l_mask = obs_data(0, :, :) + gosat_l_mask = 0. + + do i_tim = gosat_ind, dimsizes(obs_data&time) - 1 + mnth_mv = where(ismissing(obs_data(i_tim, :, :)), 0, 1) + gosat_l_mask = gosat_l_mask + mnth_mv + end do + gosat_l_mask = gosat_l_mask / tofloat(dimsizes(obs_data&time(gosat_ind:))) + gosat_l_mask = where(gosat_l_mask.ge.0.5, 1, gosat_l_mask@_FillValue) + end if + + ; Prepare Land-Sea Mask + f = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc", "r") + a = f->LSMASK + sftlf = byte2flt(landsea_mask(a, obs_data&lat, obs_data&lon)) + sftlf = where(sftlf.gt.1., 1., sftlf) + sftlf = where(sftlf.eq.0, sftlf@_FillValue, sftlf) + + ; Read data + do imod = 0, dim_MOD - 1 + ; Load data + if DATASETS(imod).eq."multi-model mean" then + var0_mod = mmm_array / tofloat(dimsizes(DATASETS) - subtract_mmm) + copy_VarMeta(mmm_array, var0_mod) + amp_series@units = var0_mod@units + growth_series@units = amp_series@units + " yr-1" + var0_yr@units = amp_series@units + else + if imod.ne.ref_ind then + tmp = read_data(INFO0[imod]) + var0_mod = tofloat(tmp) + copy_VarMeta(tmp, var0_mod) + delete(tmp) + + if .not. isdefined("mmm_array") then + mmm_array = area_hi2lores_Wrap(var0_mod&lon, var0_mod&lat, \ + var0_mod, True, 1, obs_data&lon, \ + obs_data&lat, False) + else + var_reg = area_hi2lores(var0_mod&lon, var0_mod&lat, var0_mod, \ + True, 1, mmm_array&lon, mmm_array&lat, False) + mmm_array = mmm_array + (/var_reg/) + delete(var_reg) + end if + end if + end if + + ; Go through all masks and apply + if imod .ne. ref_ind then + var0_reg = area_hi2lores_Wrap(var0_mod&lon, var0_mod&lat, var0_mod, \ + True, 1, obs_data&lon, obs_data&lat, False) + delete(var0_mod) + if any(diag_script_info@masking .eq. "obs") then + var0_reg = where(ismissing(obs_data), obs_data, var0_reg) + end if + if any(diag_script_info@masking .eq. "sciamachy") then + var0_reg = var0_reg * conform(var0_reg, scia_l_mask, (/1, 2/)) + end if + if any(diag_script_info@masking .eq. "gosat") then + var0_reg = var0_reg * conform(var0_reg, gosat_l_mask, (/1, 2/)) + end if + else + var0_reg = obs_data + end if + + if any(diag_script_info@masking .eq. "land") then + var0_reg = var0_reg * conform(var0_reg, sftlf, (/1, 2/)) + end if + + ; Different averages + if diag_script_info@area_avg .eq. "lat-first" then + var0_lonavg = dim_avg_n_Wrap(var0_reg, 2) + var0_sca_lonavg = new((/nyear, dimsizes(var0_lonavg&lat)/), float) + gr_lonavg_temp = var0_sca_lonavg + delete(var0_reg) + + do ilat = 0, dimsizes(var0_lonavg&lat) - 1 + gr = calc_gr(var0_lonavg(:, ilat), "monthlyfilled", min_nmonth) + gr_lonavg_temp(:, ilat) = calc_gr(var0_lonavg(:, ilat), "yearly", \ + min_nmonth) + do itim = 0, nyear - 1 + slope = dim_cumsum(gr(itim*12:itim*12+11)/12., 1) + detrended = var0_lonavg(itim*12:itim*12+11, ilat) - slope + mvnum = num(ismissing(detrended)) + if mvnum .le. min_nmonth then + var0_sca_lonavg(itim, ilat) = max(detrended) - min(detrended) + end if + delete(detrended) + delete(slope) + end do + + end do + + coords = ind_nearest_coord((/lat_min, lat_max/), var0_lonavg&lat, 0) + cos_lats = tofloat(cos(var0_lonavg&lat(coords(0):coords(1))/180*3.14159)) + var0_lonavg_xco2 = dim_avg_wgt_n(var0_lonavg(:, coords(0): coords(1)), \ + cos_lats, 1, 1) + copy_VarMeta(var0_lonavg, var0_lonavg_xco2) + + var0_yr(mapping_array(imod), :) = \ + (/time_operations(var0_lonavg_xco2, -1, -1, "average", "yearly", \ + True)/) + amp_series(mapping_array(imod), :) = \ + dim_avg_wgt(var0_sca_lonavg(:, coords(0): coords(1)), cos_lats, 1) + growth_series(mapping_array(imod), :) = \ + dim_avg_wgt(gr_lonavg_temp(:, coords(0): coords(1)), cos_lats, 1) + delete(gr_lonavg_temp) + else + var0_avg = area_operations(var0_reg, lat_min, lat_max, 0, 360, \ + "average", True) + var0_yr(mapping_array(imod), :) = \ + (/time_operations(var0_avg, -1, -1, "average", "yearly", True)/) + amp_series(mapping_array(imod), :) = (/calc_sca(var0_avg, min_nmonth)/) + growth_series(mapping_array(imod), :) = \ + (/calc_gr(var0_avg, "yearly", min_nmonth)/) + end if + + end do + + ; --------------------------------------------------------------------------- + ; ---------------------- PANEL PLOTS----------------------------------------- + ; --------------------------------------------------------------------------- + obs_pres = diag_script_info@obs_in_panel + if .not. obs_pres then + var0_yr := var0_yr(1:, :) + amp_series := amp_series(1:, :) + growth_series := growth_series(1:, :) + end if + filename_scaplot = var0 + "_" + experiments(0) \ + + "_" + region + "_" + (start_year) + "-" + (end_year) \ + + "_" + var0 + "_SCA_panels" + opt_mask + outfile_scaplot = plot_dir + "/" + filename_scaplot + outfile_scaplot_netcdf = work_dir + "/" + filename_scaplot + ".nc" + wks = gsn_open_wks(file_type, outfile_scaplot) + plt_xco2_SCA_panels = panel_plots(wks, var0_yr, amp_series, var0, \ + varname2, obs_pres, INFO0) + delete(wks) + delete(plt_xco2_SCA_panels) + + ; Write nc file + outfile_scaplot_netcdf@existing = "overwrite" + ncdf_outfile = ncdf_write(var0_yr, outfile_scaplot_netcdf) + outfile_scaplot_netcdf@existing = "append" + ncdf_outfile = ncdf_write(amp_series, outfile_scaplot_netcdf) + + ; Provenance + log_provenance(ncdf_outfile, \ + outfile_scaplot + "." + file_type, \ + "Trend of Seasonal Cycle Amplitude with " \ + + var0 + " for " + region + ", " + start_year + "-" \ + + end_year + ". Using masking: " + opt_mask \ + + ". Similar to Gier et al 2020, Fig. 7.", \ + (/"mean", "corr"/), \ + DOMAIN, \ + "scatter", \ + AUTHORS, \ + REFERENCES, \ + ALL_FILES) + + filename_grplot = var0 + "_" + experiments(0) + "_" + region + "_" \ + + (start_year) + "-" + (end_year) + "_" + var0 + "_GR_panels" \ + + opt_mask + outfile_grplot = plot_dir + "/" + filename_grplot + outfile_grplot_netcdf = work_dir + "/" + filename_grplot + ".nc" + wks = gsn_open_wks(file_type, outfile_grplot) + plt_t_gr_panels = panel_plots(wks, var0_yr, growth_series, var0, \ + varname_gr, obs_pres, INFO0) + delete(wks) + delete(plt_t_gr_panels) + + ; Write nc file + outfile_grplot_netcdf@existing = "overwrite" + ncdf_outfile = ncdf_write(var0_yr, outfile_grplot_netcdf) + outfile_grplot_netcdf@existing = "append" + ncdf_outfile = ncdf_write(growth_series, outfile_grplot_netcdf) + + ; Provenance + log_provenance(ncdf_outfile, \ + outfile_grplot + "." + file_type, \ + "Trend of Growth Rate with " \ + + var0 + " for " + region + ", " + start_year + "-" \ + + end_year + ". Using masking: " + opt_mask \ + + ". Similar to Gier et al 2020, Fig. 7.", \ + (/"mean", "corr"/), \ + DOMAIN, \ + "scatter", \ + AUTHORS, \ + REFERENCES, \ + ALL_FILES) + +end diff --git a/esmvaltool/diag_scripts/xco2_analysis/sat_masks.ncl b/esmvaltool/diag_scripts/xco2_analysis/sat_masks.ncl new file mode 100644 index 0000000000..2bed5c74a6 --- /dev/null +++ b/esmvaltool/diag_scripts/xco2_analysis/sat_masks.ncl @@ -0,0 +1,393 @@ +; ############################################################################# +; xco2_analysis/sat_masks.ncl +; ############################################################################# +; Description +; Plotting maps for missing values in satellite data. +; +; Required diag_script_info attributes: +; +; Optional diag_script_info attributes: +; output_file_type: output file type for plots. Default: png +; var_plotname: String formatting how variable should be named in plots +; defaults to short_name if not assigned +; c3s_plots: Missing value plots seperated by timeseries of c3s satellites +; +; Caveats +; +; Modification history +; 20201119-gier_bettina: Added provenance and cleaned up code. +; 20200226-gier_bettina: Adapted to version 2 +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/scatterplot.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + +load "$diag_scripts/xco2_analysis/stat.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + AUTHORS = (/"gier_bettina"/) + REFERENCES = (/"gier20bg"/) + DOMAIN = (/"global"/) + + ; Variable + var0 = variable_info[0]@short_name + + ; Input data + INFO0 = select_metadata_by_name(input_file_info, var0) + DATASETS = metadata_att_as_array(INFO0, "dataset") + DATASETS := array_append_record(DATASETS, "multi-model mean", 0) + experiments = metadata_att_as_array(INFO0, "exp") + dim_MOD = dimsizes(DATASETS) + ALL_FILES = metadata_att_as_array(INFO0, "filename") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") +end + +begin + + ; Plot file type + if (isatt(diag_script_info, "output_file_type")) then + file_type = diag_script_info@output_file_type + elseif (isatt(config_user_info, "output_file_type")) then + file_type = config_user_info@output_file_type + else + file_type = "png" + end if + + ; Output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + ; Formatted varname for plots + if (isatt(diag_script_info, "var_plotname")) then + var0_plotname = diag_script_info@var_plotname + else + var0_plotname = var0 + end if + + ; Formatted varname for plots + if (isatt(diag_script_info, "c3s_plots")) then + c3s_mask = True + else + c3s_mask = False + end if + +end + +begin + ; Prepare arrays + + ; Make array to collect timeseries data + start_years = metadata_att_as_array(INFO0, "start_year") + start_year = min(start_years) + end_years = metadata_att_as_array(INFO0, "end_year") + end_year = max(end_years) + nyear = (end_year - start_year + 1) + time = new(nyear*12, float) + do yy = start_year, end_year + do mm = 0, 11 + time(12 * (yy - start_year) + mm) = yy + (mm + 0.5)/12. + end do + end do + +end + +begin + ; First read observations for observational masking + if (isatt(INFO0[0], "reference_dataset")) then + ref_ind = ind(DATASETS.eq.INFO0[0]@reference_dataset) + else + error_msg("w", DIAG_SCRIPT, "", \ + "this diagnostic requires a reference dataset") + end if + + obs_data = read_data(INFO0[ref_ind]) + + ; --------------------------------------------- + ; Determine missing value masks + ; ----------------- OVERALL MASKS + time_mnth = cd_calendar(obs_data&time, 0) + + global_mask = obs_data(0, :, :) + global_mask = 0. + + monthly_mask = conform_dims((/12, dimsizes(global_mask(:, 0)), \ + dimsizes(global_mask(0, :))/), global_mask, \ + (/1, 2/)) + monthly_mask!0 = "month" + monthly_mask&month = (/"January", "February", "March", "April", "May", \ + "June", "July", "August", "September", "October",\ + "November", "December"/) + monthly_mask!1 = "lat" + monthly_mask&lat = global_mask&lat + monthly_mask!2 = "lon" + monthly_mask&lon = global_mask&lon + monthly_mask@var = "data_coverage_percent" + monthly_mask@diag_script = DIAG_SCRIPT + + copy_VarAtts(obs_data, monthly_mask) + + do i_tim = 0, dimsizes(obs_data&time) - 1 + imon = toint(time_mnth(i_tim, 1)) + mnth_mv = where(ismissing(obs_data(i_tim, :, :)), 1, 0) + global_mask = global_mask + mnth_mv + monthly_mask(imon-1, :, :) = monthly_mask(imon-1, :, :) + mnth_mv + end do + global_mask = 1 - (global_mask / tofloat(dimsizes(obs_data&time))) + monthly_mask = 1 - (monthly_mask / (dimsizes(obs_data&time)/12.)) + + ; -------------------------------------------------------------- + ; ------ PER SATELLITE MASKS -> C3S special masks + ; -------------------------------------------------------------- + + if c3s_mask then + + ; Calculate spatial mask approximations + + ; Sciamachy Masks! 2003-2008 + scia_ind = max(ind(time_mnth(:, 0).eq.2008)) + scia_l_mask = global_mask + scia_l_mask = 0. + + do i_tim = 0, dimsizes(obs_data&time(:scia_ind)) - 1 + mnth_mv = where(ismissing(obs_data(i_tim, :, :)), 0, 1) + scia_l_mask = scia_l_mask + mnth_mv + end do + scia_l_mask = scia_l_mask / tofloat(dimsizes(obs_data&time(:scia_ind))) + + ; Gosat Masks! 2013 - .. (overlap 2009 to 2012) + gosat_ind = min(ind(time_mnth(:, 0).eq.2013)) + gosat_l_mask = global_mask + gosat_l_mask = 0. + + do i_tim = gosat_ind, dimsizes(obs_data&time) - 1 + mnth_mv = where(ismissing(obs_data(i_tim, :, :)), 0, 1) + gosat_l_mask = gosat_l_mask + mnth_mv + end do + gosat_l_mask = gosat_l_mask / tofloat(dimsizes(obs_data&time(gosat_ind:))) + + ; SCIA + GOSAT joint mask + sgj_i_ind = min(ind(time_mnth(:, 0).eq.2009)) + sgj_f_ind = max(ind(time_mnth(:, 0).eq.2012)) + sgj_l_mask = global_mask + sgj_l_mask = 0. + + do i_tim = sgj_i_ind, sgj_f_ind + mnth_mv = where(ismissing(obs_data(i_tim, :, :)), 0, 1) + sgj_l_mask = sgj_l_mask + mnth_mv + end do + sgj_l_mask = sgj_l_mask / \ + tofloat(dimsizes(obs_data&time(sgj_i_ind:sgj_f_ind))) + + ; Add attributes for netcdf files + scia_l_mask@var = "data_coverage_percent_scia" + scia_l_mask@diag_script = DIAG_SCRIPT + gosat_l_mask@var = "data_coverage_percent_gosat" + gosat_l_mask@diag_script = DIAG_SCRIPT + sgj_l_mask@var = "data_coverage_percent_overlap" + sgj_l_mask@diag_script = DIAG_SCRIPT + + end if ; end C3S mask special + + ; ------------------------ PLOTS -------------------------------------------- + ; -------------- Missing value Plots ---------------------------------------- + mv_cmap = read_colormap_file("WhiteGreen") + + ; Plot missing values per month + filename = var0 + "_missing_values_monthly_" + start_year + "-" + end_year + outfile = plot_dir + filename + outfile_netcdf = work_dir + "/" + filename + ".nc" + ncdf_outfile = ncdf_write(monthly_mask, outfile_netcdf) + + wks = gsn_open_wks(file_type, outfile) + mres = True + mres@cnFillOn = True + mres@cnLinesOn = False + mres@gsnDraw = False + mres@gsnFrame = False + mres@lbLabelBarOn = False + mres@gsnRightString = "" + mres@gsnStringFontHeightF = 0.03 + mres@tmXBLabelFontHeightF = 0.025 + mres@tmYLLabelFontHeightF = 0.025 + mres@tmLabelAutoStride = True + mres@cnConstFLabelOn = False + mres@cnFillPalette = mv_cmap ; set color map + + mon_plots = new(12, graphic) + + do i = 0, 11 + mres@gsnLeftString = monthly_mask&month(i) + mon_plots(i) = gsn_csm_contour_map(wks, monthly_mask(i, :, :), mres) + end do + + pres = True + pres@gsnMaximize = True ; Maximize in frame + pres@gsnFrame = False ; Don't advance frame + pres@gsnPanelLabelBar = True + pres@lbTitleOn = True + pres@lbTitlePosition = "Bottom" + pres@lbTitleString = var0_plotname + " Data Coverage" + pres@lbLabelFontHeightF = 0.012 + pres@lbTitleFontHeightF = 0.015 + pres@lbBottomMarginF = -1 + pres@lbTopMarginF = 0.7 + pres@lbTitleOffsetF = 0.2 + pres@gsnPanelYWhiteSpacePercent = 2. + + gsn_panel(wks, mon_plots, (/4, 3/), pres) + + delete(pres) + delete(mres) + frame(wks) + + ; Provenance + log_provenance(ncdf_outfile, \ + outfile + "." + file_type, \ + var0 + "fractional data coverage " + start_year + "-" \ + + end_year + ". Similar to Gier et al 2020, Fig 1.", \ + (/"mean"/), \ + DOMAIN, \ + "histogram", \ + AUTHORS, \ + REFERENCES, \ + ALL_FILES) + + ; ------------------------------------------------------------------------- + ; --------------------Panel Plot of spatial masks + + if c3s_mask then + ; Plot spatial mask scia + filename_c3s = var0 + "_spatial_mask_panels" + outfile_c3s = plot_dir + "/" + filename_c3s + outfile_netcdf_c3s = work_dir + "/" + filename_c3s + ".nc" + + ; Write data to netcdf + outfile_netcdf_c3s@existing = "overwrite" + ncdf_outfile = ncdf_write(scia_l_mask, outfile_netcdf_c3s) + outfile_netcdf_c3s@existing = "append" + ncdf_outfile = ncdf_write(gosat_l_mask, outfile_netcdf_c3s) + ncdf_outfile = ncdf_write(sgj_l_mask, outfile_netcdf_c3s) + + wks = gsn_open_wks(file_type, outfile_c3s) + res = True + res@lbLabelBarOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@gsnDraw = False + res@gsnFrame = False + res@gsnLeftString = "SCIAMACHY" + res@gsnRightString = "2003 - 2008" + res@tiMainString = "" + res@gsnStringFontHeightF = 0.02 + res@tmXBLabelFontHeightF = 0.02 + res@tmYLLabelFontHeightF = 0.02 + res@cnConstFLabelOn = False + res@cnLineLabelsOn = False + res@tmLabelAutoStride = True + mv_cmap = read_colormap_file("WhiteGreen") + res@cnFillPalette = mv_cmap + plot_scia = gsn_csm_contour_map(wks, scia_l_mask, res) + ; Add polylines for 30 + 60N + lnres = True + lnres@gsLineThicknessF = 2.0 + lnid30 = gsn_add_polyline(wks, plot_scia, (/0, 120, 240, 360/), \ + (/30, 30, 30, 30/), lnres) + lnid60 = gsn_add_polyline(wks, plot_scia, (/0, 120, 240, 360/), \ + (/60, 60, 60, 60/), lnres) + + ; Plot for stippling + + res2 = True ; res2 probability plots + res2@lbLabelBarOn = False + res2@gsnDraw = False ; Do not draw plot + res2@gsnFrame = False ; Do not advance frame + res2@cnInfoLabelOn = False + res2@cnLinesOn = False ; do not draw contour lines + res2@cnLineLabelsOn = False ; do not draw contour labels + res2@cnConstFLabelOn = False + res2@cnFillColor = "black" + + res2@gsnLeftString = "" + res2@gsnRightString = "" + + plot_scias = gsn_csm_contour(wks, scia_l_mask, res2) + opt = True + opt@gsnShadeFillType = "pattern" + opt@gsnShadeHigh = 12 + opt@gsnShadeFillScaleF = 0.6 + ; shade all areas with less than 50% missing values + plot_scias = gsn_contour_shade(plot_scias, 0.00, 0.50, opt) + overlay(plot_scia, plot_scias) + + res@gsnLeftString = "SCIAMACHY + GOSAT" + res@gsnRightString = "2009 - 2012" + plot_sgj = gsn_csm_contour_map(wks, sgj_l_mask, res) + lnid30sgj = gsn_add_polyline(wks, plot_sgj, (/0, 120, 240, 360/), \ + (/30, 30, 30, 30/), lnres) + lnid60sgj = gsn_add_polyline(wks, plot_sgj, (/0, 120, 240, 360/), \ + (/60, 60, 60, 60/), lnres) + plot_sgjs = gsn_csm_contour(wks, sgj_l_mask, res2) + plot_sgjs = gsn_contour_shade(plot_sgjs, 0.00, 0.50, opt) + overlay(plot_sgj, plot_sgjs) + + res@gsnLeftString = "GOSAT" + res@gsnRightString = "2013 - " + end_year + plot_gosat = gsn_csm_contour_map(wks, gosat_l_mask, res) + lnid30gosat = gsn_add_polyline(wks, plot_gosat, (/0, 120, 240, 360/), \ + (/30, 30, 30, 30/), lnres) + lnid60gosat = gsn_add_polyline(wks, plot_gosat, (/0, 120, 240, 360/), \ + (/60, 60, 60, 60/), lnres) + plot_gosats = gsn_csm_contour(wks, gosat_l_mask, res2) + plot_gosats = gsn_contour_shade(plot_gosats, 0.00, 0.50, opt) + overlay(plot_gosat, plot_gosats) + + resP = True + resP@gsnFrame = False + resP@gsnPanelLabelBar = True + resP@lbTitleOn = True + resP@lbTitlePosition = "Bottom" + resP@lbTitleString = "XCO~B~2~N~ Data Coverage" + resP@lbTitleFontHeightF = 0.015 + resP@lbLabelFontHeightF = 0.012 + resP@lbBottomMarginF = -0.2 + resP@lbTopMarginF = 0.3 + resP@lbTitleOffsetF = 0.2 + resP@gsnPanelBottom = 0.05 + resP@gsnPanelYWhiteSpacePercent = 2. + gsn_panel(wks, (/plot_scia, plot_sgj, plot_gosat/), (/3, 1/), resP) + frame(wks) + delete(outfile) + delete(wks) + delete(res) + + log_provenance(ncdf_outfile, \ + outfile_c3s + "." + file_type, \ + var0 + "fractional data coverage for different obs " \ + + "periods. Similar to Gier et al 2020, Fig 8.", \ + (/"mean"/), \ + DOMAIN, \ + "histogram", \ + AUTHORS, \ + REFERENCES, \ + ALL_FILES) + + end if ; end special c3s mask plot + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/xco2_analysis/stat.ncl b/esmvaltool/diag_scripts/xco2_analysis/stat.ncl new file mode 100644 index 0000000000..1eea55bb74 --- /dev/null +++ b/esmvaltool/diag_scripts/xco2_analysis/stat.ncl @@ -0,0 +1,142 @@ +; ############################################################################# +; ROUTINES FOR CALCULATING GROWTH RATE AND SEASONAL CYCLE AMPLITUDE +; ############################################################################# +; Please consider using of extending existing routines before adding new ones. +; Check the header of each routine for documentation. +; +; Contents: +; function calc_gr +; function calc_sca +; function var_detrend +; +; ############################################################################# +undef("calc_gr") +function calc_gr(data[*]: numeric, \ + opt[1]:string, \ + min_nmonth: integer) +; +; Description: +; Calculates growth rate of a 1D array using the Buchwitz et al 2018 method, +; with a maximum of min_nmonth permitted missing values per year. +; +; Return value: +; +; +; Arguments: +; data: requires time coordinate +; opt: "monthly" - monthly GR +; "yearly" - yearly avg GR +; "monthlyfilled" - monthly, missing values filled with yearly +; average +; min_nmonth: maximum amount of missing values per year +; +; +; Modification history: +; 20180813_gier_bettina: written +; +local result, ntime, grmon, gryr, data1D, gryrf, mvnum +begin + data1D = data + + ; Calc 1D case + ntime = dimsizes(data1D) + grmon = new(ntime, float) + grmon(6:ntime-7) = data1D(12:) - data1D(:ntime-13) + + if opt.ne."monthly" then + gryr = new(ntime/12, float) + + do itim = 0, ntime/12 - 1 + mvnum = num(ismissing(grmon(itim*12:itim*12+11))) + if mvnum .le. min_nmonth then + gryr(itim) = avg(grmon(itim*12:itim*12+11)) + end if + end do + + if opt.eq."yearly" then + result = gryr + else if opt.eq."monthlyfilled" then + gryrf = new(ntime, float) + do itim = 0, ntime/12 - 1 + gryrf(itim*12:itim*12+11) = \ + where(ismissing(grmon(itim*12:itim*12+11)), \ + gryr(itim),\ + grmon(itim*12:itim*12+11)) + end do + result = gryrf + end if + end if + + else + result = grmon + end if + + return result +end + +; ############################################################################# +undef("calc_sca") +function calc_sca(data[*]: numeric, \ + min_nmonth: integer) +; +; Description: +; Calculates seasonal cycle amplitude by max - min of a 1D array, with a +; maximum of min_nmonth permitted missing values per year. +; +; Return value: +; +; +; Arguments: +; data: 1D array with data timeseries +; min_nmonth: maximum amount of missing values per year +; +; +; Modification history: +; 20180813_gier_bettina: written +; +local sca, detrended, gryrf, ntime, mvnum, data1D +begin + ; Get GR to detrend + data1D = data + ntime = dimsizes(data) + sca = new(ntime/12, float) + gryrf = calc_gr(data, "monthlyfilled", min_nmonth) + do itim = 0, ntime/12 - 1 + detrended = data(itim*12:itim*12+11) \ + - dim_cumsum(gryrf(itim*12:itim*12+11)/12., 1) + mvnum = num(ismissing(detrended)) + if mvnum .le. min_nmonth then + sca(itim) = max(detrended) - min(detrended) + end if + end do + + return sca +end + +; ############################################################################# +undef("var_detrend") +function var_detrend(x[*]: numeric, + y[*]: numeric) +; +; Description: +; detrends a variable y with respect to x by linear regression +; +; Return value: +; +; +; Arguments: +; x: var to detrend against (usually time) +; y: var to detrend +; +; +; Modification history: +; 20181207_gier_bettina: written +; +local rc, rc_line, y_detr +begin + rc = regline(x, y) + rc_line = rc * x + rc@yintercept + y_detr = y - rc_line + copy_VarMeta(y, y_detr) + return y_detr +end diff --git a/esmvaltool/diag_scripts/xco2_analysis/station_comparison.ncl b/esmvaltool/diag_scripts/xco2_analysis/station_comparison.ncl new file mode 100644 index 0000000000..23155e36fa --- /dev/null +++ b/esmvaltool/diag_scripts/xco2_analysis/station_comparison.ncl @@ -0,0 +1,517 @@ +; ############################################################################# +; xco2_analysis/station_comparison.ncl +; ############################################################################# +; Description +; Sampling Station CO2 data like XCO2 datasets and comparing the two +; +; Required diag_script_info attributes: +; var_order: in this case xco2, co2, co2s - column averaged with obs dataset +; first, then 2D variable, followed by surface stations +; +; Optional diag_script_info attributes: +; output_file_type: output file type for plots. Default: png +; var_plotnames: String formatting how variables should be named in plots +; defaults to short_name if not assigned +; overwrite_altitudes: Give other altitude values than the ones attached in +; the station data. Valid if altitude changes and +; timeseries spans range with different sample +; altitude. Caveat: If used, need to give altitude +; values for all stations. +; output_map: boolean if stations to be displayed on map. As this requires +; finetuning, currently only implemented for station set of +; (ASK, CGO, HUN, LEF, WIS) following the paper. Change code +; if others are desired. +; +; Caveats +; Currently assumes same time for all input variables (mmm computation) +; +; Modification history +; 20201210-gier_bettina: Added provenance and clean up +; 20200220-gier_bettina: Adapted to version 2 +; 20180809-gier_bettina: written. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/statistics.ncl" + +load "$diag_scripts/shared/plot/scatterplot.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + +load "$diag_scripts/xco2_analysis/stat.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + AUTHORS = (/"gier_bettina"/) + REFERENCES = (/"gier20bg"/) + + ; Variable + var0 = diag_script_info@var_order(0) + var1 = diag_script_info@var_order(1) + var2 = diag_script_info@var_order(2) + dim_VAR = ListCount(variable_info) + + ; Input data + INFO0 = select_metadata_by_name(input_file_info, var0) + INFO1 = select_metadata_by_name(input_file_info, var1) + INFO2 = select_metadata_by_name(input_file_info, var2) + DATASETS = metadata_att_as_array(INFO0, "dataset") + DATASETS1 = metadata_att_as_array(INFO1, "dataset") + ; ESRL station name is in version + DATASETS2 = metadata_att_as_array(INFO2, "version") + IN_FILES0 = metadata_att_as_array(INFO0, "filename") + MOD_FILES = array_append_record(IN_FILES0, \ + metadata_att_as_array(INFO1, "filename"), 0) + STAT_FILES = metadata_att_as_array(INFO2, "filename") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + " and " + var1 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + +end + +begin + ; Plot file type + if (isatt(diag_script_info, "output_file_type")) then + file_type = diag_script_info@output_file_type + elseif (isatt(config_user_info, "output_file_type")) then + file_type = config_user_info@output_file_type + else + file_type = "png" + end if + + ; Formatted varname for plots + if (isatt(diag_script_info, "var_plotnames")) then + var_plotnames = diag_script_info@var_plotnames + else + var_plotnames = (/var0, var1/) + end if + + ; Output directories + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + work_dir = config_user_info@work_dir + system("mkdir -p " + work_dir) + + ; Determine start + end year + start_years = metadata_att_as_array(INFO0, "start_year") + start_year = min(start_years) + end_years = metadata_att_as_array(INFO0, "end_year") + end_year = max(end_years) + nyear = end_year - start_year + 1 + ntime = nyear*12 + time = new(ntime, float) + do yy = start_year, end_year + do mm = 0, 11 + time(12 * (yy - start_year) + mm) = yy + (mm + 0.5)/12. + end do + end do + +end + +begin + ; Read Sat data + if (isatt(INFO0[0], "reference_dataset")) then + ref_ind = ind(DATASETS.eq.INFO0[0]@reference_dataset) + else + error_msg("w", DIAG_SCRIPT, "", "this diagnostic requires reference " + \ + "datasets.") + end if + sat_data = read_data(INFO0[ref_ind]) + var_unit = sat_data@units + + ; read multi-model means + xco2_mmm = ind(DATASETS.eq."MultiModelMean") + mod_xco2 = read_data(INFO0[xco2_mmm]) + + co2_mmm = ind(DATASETS1.eq."MultiModelMean") + mod_co2 = read_data(INFO1[co2_mmm]) + + ; Make array to hold stationdata + station_array = new((/dimsizes(DATASETS2), ntime/), float) + station_array!0 = "station" + station_array&station = DATASETS2 + station_array@lat = new(dimsizes(DATASETS2), float) + station_array@lon = new(dimsizes(DATASETS2), float) + station_array@alt = new(dimsizes(DATASETS2), float) + station_array!1 = "time" + station_array&time = time + station_array@var = var2 + station_array@units = var_unit + station_array@diag_script = DIAG_SCRIPT + + ; Make arrays to hold masked station data and corresponding sat measurement + station_array_masked = station_array + station_array_masked = station_array + station_array_masked@var = station_array@var + "_masked" + sat_array = station_array + sat_array@var = var0 + co2_array = station_array + xco2_array = station_array + sat_array@var = var0 + "_model" + offsets = new(dimsizes(DATASETS2), float) + + ; Reading in station data + do istat = 0, dimsizes(DATASETS2) - 1 + stat_data = read_data(INFO2[istat]) + station_array(istat, :) = (/stat_data/) + station_array@lat(istat) = tofloat(stat_data&lat(0)) + station_array@lon(istat) = tofloat(stat_data&lon(0)) + if (isatt(diag_script_info, "overwrite_altitudes")) then + station_array@alt(istat) = \ + tofloat(diag_script_info@overwrite_altitudes(istat)) + else + station_array@alt(istat) = tofloat(stat_data@altitude) + end if + + ; Sample Station data according to Sat Obs + add_lat = 2.5 + add_lon = 2.5 + latmin = station_array@lat(istat) - add_lat + latmax = station_array@lat(istat) + add_lat + lonmin = station_array@lon(istat) - add_lon + lonmax = station_array@lon(istat) + add_lon + if lonmin.lt.0 then + lonmin = 360.0 - abs(lonmin) + end if + if lonmax .gt.360 then + lonmax = lonmax - 360.0 + end if + obs_area = area_operations(sat_data, latmin, latmax, \ + lonmin, lonmax, "average", True) + sat_array(istat, :) = (/obs_area/) + xco2_area = area_operations(mod_xco2, latmin, latmax, \ + lonmin, lonmax, "average", True) + + ; Calculate model offset through regression + sat_rc = regline(obs_area&time, obs_area) + mod_rc = regline(xco2_area&time, xco2_area) + offset_mod = (obs_area&time(0) + \ + (obs_area&time(dimsizes(obs_area&time)-1) \ + - obs_area&time(0))/2) * sat_rc + sat_rc@yintercept \ + - ((xco2_area&time(0) + (xco2_area&time(dimsizes(xco2_area&time) - 1)\ + - xco2_area&time(0))/2)*mod_rc + mod_rc@yintercept) + offsets(istat) = tofloat(offset_mod) + xco2_array(istat, :) = (/xco2_area/) + tofloat(offset_mod) + + atmos = stdatmus_z2tdp(station_array@alt(istat)) + p_level = atmos(2)*100 + if p_level.gt.100000 then + p_level = 100000 + end if + + mod_co2h_data = mod_co2(:, {p_level}, :, :) + co2_area = area_operations(mod_co2h_data, latmin, latmax, \ + lonmin, lonmax, "average", True) + co2_array(istat, :) = (/co2_area/) + tofloat(offset_mod) + station_array_masked(istat, :) = \ + where(ismissing(obs_area), \ + station_array_masked(istat, :), \ + station_array(istat, :)) + delete(obs_area) + end do ; End reading station data + + stobs_ts = new((/dimsizes(DATASETS2), 4, dimsizes(station_array&time)/), \ + float) + stobs_ts!0 = "station" + stobs_ts&station = station_array&station + stobs_ts!1 = "dataset" + stobs_ts&dataset = (/"Station " + var_plotnames(1), \ + "Satellite " + var_plotnames(0), \ + "Multi-model Mean " + var_plotnames(1), \ + "Multi-Model Mean " + var_plotnames(0)/) + stobs_ts!2 = "time" + stobs_ts&time = station_array&time + + ; Loop over stations and plot co2 timeseries + do istat = 0, dimsizes(DATASETS2) - 1 + + filename_stations_masked = var0 + "_surface_flasks_" + \ + station_array&station(istat) + "_" + (start_year) + "-" + \ + (end_year) + "_masked" + outfile_stations_masked = plot_dir + "/" + filename_stations_masked + outfile_stations_masked_netcdf = work_dir + "/" \ + + filename_stations_masked + ".nc" + + outfile_stations_masked_netcdf@existing = "overwrite" + ncdf_outfile = ncdf_write(station_array(istat, :), \ + outfile_stations_masked_netcdf) + outfile_stations_masked_netcdf@existing = "append" + ncdf_outfile = ncdf_write(station_array_masked(istat, :), \ + outfile_stations_masked_netcdf) + + wks = gsn_open_wks(file_type, outfile_stations_masked) + res = True + res@gsnDraw = False + res@gsnFrame = False + res@tiYAxisString = var_plotnames(1) + " [" + var_unit + "]" + res@tiMainString = "Time series for surface flask measurement" + res@xyLineThicknessF = 2.0 ; default is 1 + res@xyMonoDashPattern = True ; force all solid lines + res@pmLegendOrthogonalPosF = -1.05 + res@pmLegendParallelPosF = 0.25 + res@pmLegendDisplayMode = "Always" + res@pmLegendWidthF = 0.05 + res@pmLegendHeightF = 0.2 + res@lgLabelFontHeightF = .015 + res@xyDashPattern = 0 + res@tmXBFormat = "f" ; Don't put unnecessary 0's + res@xyExplicitLegendLabels = station_array&station(istat) + " (" + \ + station_array@lat(istat) + ", " + station_array@lon(istat) + ")" + res@xyLineColors = (/"blue", "black", "red", "orange"/) + res@tiMainString = "Time series masked " + station_array&station(istat) \ + + " (" + station_array@lat(istat) + ", " + station_array@lon(istat) + ")" + res@pmLegendDisplayMode = "Never" + statm_plot = gsn_csm_xy(wks, station_array_masked&time, \ + station_array_masked(istat, :), res) + res@xyDashPattern = 2 + res@pmLegendDisplayMode = "Never" + stat_plot = gsn_csm_xy(wks, station_array&time, \ + station_array(istat, :), res) + NhlAddOverlay(statm_plot, stat_plot, statm_plot) + draw(statm_plot) + frame(wks) + delete(stat_plot) + delete(statm_plot) + delete(res@xyExplicitLegendLabels) + + print(INFO2[istat]@filename) + + ; Provenance + log_provenance(ncdf_outfile, \ + outfile_stations_masked + "." + file_type, \ + "Time series of station " + station_array&station(istat) \ + + ", dotted denoting areas masked like observations.", \ + (/""/), \ + "reg", \ + "times", \ + AUTHORS, \ + REFERENCES, \ + (/INFO2[istat]@filename, INFO0[ref_ind]@filename/)) + + filename_stations_obs = var0 + "_surface_flasks_" + \ + station_array&station(istat) + "_" + (start_year) + "-" + \ + (end_year) + "_maskedvsobs" + outfile_stations_obs_netcdf = work_dir + "/" \ + + filename_stations_obs + ".nc" + + outfile_stations_obs = plot_dir + "/" + filename_stations_obs + wks = gsn_open_wks(file_type, outfile_stations_obs) + res@tiMainString = "Time series " + station_array&station(istat) + \ + " (" + station_array@lat(istat) + ", " + station_array@lon(istat) + \ + ") Mod Offset: " + round(offsets(istat), 3) + res@xyDashPattern = 0 + res@pmLegendDisplayMode = "Always" + stobs = new((/4, dimsizes(station_array&time)/), float) + stobs!0 = "dataset" + stobs&dataset = (/station_array&station(istat), \ + "Satellite " + var_plotnames(0), \ + "Multi-model Mean " + var_plotnames(1), \ + "Multi-Model Mean " + var_plotnames(0)/) + stobs!1 = "time" + stobs&time = station_array&time + stobs(0, :) = (/station_array(istat, :)/) + stobs(1, :) = (/sat_array(istat, :)/) + stobs(2, :) = (/co2_array(istat, :)/) + stobs(3, :) = (/xco2_array(istat, :)/) + stobs@var = var2 + stobs@diag_script = DIAG_SCRIPT + stobs@units = var_unit + stobs_ts(istat, :, :) = (/stobs/) + ncdf_outfile = ncdf_write(stobs, outfile_stations_obs_netcdf) + + res@xyExplicitLegendLabels = stobs&dataset + stat_plot = gsn_csm_xy(wks, stobs&time, stobs, res) + draw(stat_plot) + frame(wks) + delete(res@xyExplicitLegendLabels) + + ; Provenance + log_provenance(ncdf_outfile, \ + outfile_stations_obs + "." + file_type, \ + "Time series of station " + station_array&station(istat) \ + + ", for " + var0 + " and " + var1 + ".", \ + (/"mean"/), \ + "reg", \ + "times", \ + AUTHORS, \ + REFERENCES, \ + array_append_record(MOD_FILES, INFO2[istat]@filename, 0)) + + end do + delete(res) + + if ((isatt(diag_script_info, "output_map")) \ + .and. (diag_script_info@output_map)) then + ; Make Mapplot and put timeseries on it + + ; Empty dummy map plot: + + filename_map = var0 + "_surface_flasks_" + (start_year) \ + + "-" + (end_year) + "_map" + outfile_map_netcdf = work_dir + "/" + filename_map + ".nc" + outfile_map = plot_dir + "/" + filename_map + stobs_ts@var = var2 + stobs_ts@diag_script = DIAG_SCRIPT + stobs_ts@units = var_unit + ncdf_outfile = ncdf_write(stobs_ts, outfile_map_netcdf) + + wks = gsn_open_wks(file_type, outfile_map) + + res = True ; plot mods desired + res@gsnDraw = False ; don't draw yet + res@gsnFrame = False ; don't advance frame + + resm = res + resm@mpFillOn = True + resm@mpLandFillColor = "grey92" + plot = gsn_csm_map_ce(wks, resm) + + ; Make Station timeseries plots + sta_ts_plots = new(dimsizes(DATASETS2), graphic) + + do istat = 0, dimsizes(DATASETS2) - 1 + res@tiMainString = station_array&station(istat) + " (" + \ + decimalPlaces(station_array@lat(istat), 1, True) + \ + ", " + decimalPlaces(station_array@lon(istat), 1, True) + \ + ") Offset: " + decimalPlaces(offsets(istat), 1, True) + res@vpWidthF = 0.155 ; Make plots quite a bit smaller. + res@vpHeightF = 0.15 + + res@trXMinF = start_year + res@trXMaxF = end_year + 1 + res@trYMinF = min(stobs_ts(istat, :, :)) - 2 + res@trYMaxF = max(stobs_ts(istat, :, :)) + 2 + res@xyDashPattern = 0 + res@xyLineColors = (/"blue", "black", "red", "orange"/) + res@tmXBFormat = "f" ; Don't put unnecessary 0's + res@pmLegendDisplayMode = "Never" + + res@tiYAxisString = var_plotnames(1) + " [" + var_unit + "]" + res@xyLineThicknessF = 2.0 ; default is 1 + res@xyMonoDashPattern = True ; force all solid lines + res@tiMainFontHeightF = 0.01 + res@tiYAxisFontHeightF = 0.01 + res@tmXBLabelFontHeightF = 0.007 + res@tmYLLabelFontHeightF = 0.007 + res@tmYLLabelDeltaF = -0.5 + res@tmXBLabelDeltaF = -0.5 + res@tiDeltaF = 0.9 + + sta_ts_plots(istat) = gsn_csm_xy(wks, stobs_ts&time, \ + stobs_ts(istat, :, :), res) + + mres = True + mres@gsMarkerIndex = 5 + mres@gsMarkerSizeF = 0.01 + mres@gsMarkerColor = "black" + mres@gsMarkerThicknessF = 5. + plot@$unique_string("marker")$ = \ + gsn_add_polymarker(wks, plot, station_array@lon(istat), \ + station_array@lat(istat), mres) + end do + + txres = True + txres@txFontHeightF = 0.01 + + ; Manual text placement! If using different station please change. + plot@$unique_string("test")$ = \ + gsn_add_text(wks, plot, stobs_ts&station(0) + " (" \ + + station_array@alt(0) \ + + " m)", station_array@lon(0) + 20, \ + station_array@lat(0) - 7, txres) + + plot@$unique_string("test")$ = \ + gsn_add_text(wks, plot, stobs_ts&station(1) + " (" \ + + station_array@alt(1) \ + + " m)", station_array@lon(1) + 10, \ + station_array@lat(1) - 7, txres) + + plot@$unique_string("test")$ = \ + gsn_add_text(wks, plot, stobs_ts&station(2) + " (" \ + + station_array@alt(2) \ + + " m)", station_array@lon(2) + 5, \ + station_array@lat(2) + 7, txres) + + plot@$unique_string("test")$ = \ + gsn_add_text(wks, plot, stobs_ts&station(3) + " (" \ + + station_array@alt(3) \ + + " m)", station_array@lon(3) - 20, \ + station_array@lat(3) - 7, txres) + + plot@$unique_string("test")$ = \ + gsn_add_text(wks, plot, stobs_ts&station(4) + " (" \ + + station_array@alt(4) \ + + " m)", station_array@lon(4), \ + station_array@lat(4) - 7, txres) + + amres = True + amres@amJust = "BottomLeft" + + ; ASK + amres@amOrthogonalPosF = 0.43 ; 0.5 is the bottom edge of the plot. + amres@amParallelPosF = -0.10 ; -0.5 is the left edge of the plot. + anno1 = gsn_add_annotation(plot, sta_ts_plots(0), amres) + + ; CGO + amres@amOrthogonalPosF = 0.45 + amres@amParallelPosF = 0.18 + anno2 = gsn_add_annotation(plot, sta_ts_plots(1), amres) + + ; HUN + amres@amOrthogonalPosF = -0.06 + amres@amParallelPosF = -0.19 + anno3 = gsn_add_annotation(plot, sta_ts_plots(2), amres) + + ; LEF + amres@amOrthogonalPosF = 0.30 + amres@amParallelPosF = -0.445 + anno3 = gsn_add_annotation(plot, sta_ts_plots(3), amres) + + ; WIS + amres@amOrthogonalPosF = -0.05 + amres@amParallelPosF = 0.21 + anno3 = gsn_add_annotation(plot, sta_ts_plots(4), amres) + + ; Make shared legend + + gres = True + gres@YPosPercent = 97. + gres@XPosPercent = 1. + gres@LineLabelWhiteSpacePercent = 1. + + lineres = True + lineres@lgLineColors = (/"blue", "black", "red", "orange"/) + lineres@lgLineThicknesses = 2.5 + lineres@LineLengthPercent = 4. + + textres = True + textres@lgLabelFontHeights = 0.01 + textres@lgLabels = stobs_ts&dataset ; legend labels (required) + + plot = simple_legend(wks, plot, gres, lineres, textres) + draw(plot) + frame(wks) + + ; Provenance + log_provenance(ncdf_outfile, \ + outfile_map + "." + file_type, \ + "Time series for satellite, multi-model mean and station " \ + + var0 + " and " + var1 \ + + ". Similar to Gier et al, 2020 Fig. 2", \ + (/"mean"/), \ + "global", \ + "times", \ + AUTHORS, \ + REFERENCES, \ + array_append_record(MOD_FILES, STAT_FILES, 0)) + end if + + leave_msg(DIAG_SCRIPT, "") +end diff --git a/esmvaltool/diag_scripts/zmnam/zmnam.py b/esmvaltool/diag_scripts/zmnam/zmnam.py index 3fdc5bc14f..c0450c117a 100644 --- a/esmvaltool/diag_scripts/zmnam/zmnam.py +++ b/esmvaltool/diag_scripts/zmnam/zmnam.py @@ -1,5 +1,5 @@ """ -Zonal-mean Northern Annular Mode main routine. +Zonal mean annular mode main routine. Author: Federico Serva (ISAC-CNR & ISMAR-CNR, Italy) Copernicus C3S 34a lot 2 (MAGIC) @@ -9,31 +9,39 @@ based on EOF/PC analysis of the geopotential height field. Modification history -20180512-A_serv_fe: Added output netCDFs, more use of preprocessor. -20180510-A_serv_fe: Routines written. +202107-serva_federico: Update to include hemisphere selection. +20180512-serva_federico: Added output netCDFs, more use of preprocessor. +20180510-serva_federico: Routines written. """ -import os import logging +import os -from esmvaltool.diag_scripts.shared import run_diagnostic, ProvenanceLogger - +from esmvaltool.diag_scripts.shared import ProvenanceLogger, run_diagnostic # Import zmnam diagnostic routines -from zmnam_calc import zmnam_calc -from zmnam_plot import zmnam_plot -from zmnam_preproc import zmnam_preproc +from esmvaltool.diag_scripts.zmnam.zmnam_calc import zmnam_calc +from esmvaltool.diag_scripts.zmnam.zmnam_plot import zmnam_plot +from esmvaltool.diag_scripts.zmnam.zmnam_preproc import ( + zmnam_preproc, + zmnam_preproc_clean, +) logger = logging.getLogger(__name__) -def get_provenance_record(vatt, ancestor_files): +def get_provenance_record(caption, vatt, ancestor_files): """Create a provenance record describing the diagnostic data and plot.""" - caption = ("Compute Zonal-mean Northern Annular Modes between " - "{start_year} and {end_year} ".format(**vatt)) + caption = (caption + " for " + "{start_year}-{end_year}".format(**vatt)) record = { 'caption': caption, - 'authors': ['serv_fe', 'hard_jo', 'arno_en', 'cagn_ch'], + 'authors': [ + 'serva_federico', + 'vonhardenberg_jost', + 'arnone_enrico', + 'cagnazzo_chiara', + ], 'projects': ['c3s-magic'], 'references': ['baldwin09qjrms'], 'plot_types': ['polar', 'zonal'], @@ -46,7 +54,7 @@ def get_provenance_record(vatt, ancestor_files): def main(cfg): """ - Run the zonal-mean NAM diagnostic. + Run the zonal mean annular mode diagnostic. Calling in order: - preprocessing @@ -59,46 +67,60 @@ def main(cfg): plot_dir = cfg['plot_dir'] out_dir = cfg['work_dir'] - write_plots = cfg['write_plots'] + run_dir = cfg['run_dir'] fig_fmt = cfg['output_file_type'] - filenames_cat = [] - fileprops_cat = [] - - # Loop over input cfg - for key, value in input_files.items(): - - # Collect file names - filenames_cat.append(key) - - # Collect relevant information for outputs naming - fileprops_cat.append([ - value['project'], value['dataset'], value['exp'], - value['ensemble'], - str(value['start_year']) + '-' + str(value['end_year']) - ]) + hemispheres = cfg['hemisphere'] # Go to work_dir for running os.chdir(out_dir) - # Process list of input files - for indfile, ifile in enumerate(filenames_cat): - - ifile_props = fileprops_cat[indfile] - - # Call diagnostics functions - (file_da_an_zm, file_mo_an) = zmnam_preproc(ifile) - outfiles = zmnam_calc(file_da_an_zm, out_dir + '/', ifile_props) - plot_files = zmnam_plot(file_mo_an, out_dir + '/', plot_dir + - '/', ifile_props, fig_fmt, write_plots) - provenance_record = get_provenance_record( - list(input_files.values())[0], ancestor_files=ifile) - if write_plots: - # plot_file cannot be an array, so only the first plot is provided - provenance_record['plot_file'] = plot_files[0] - for file in outfiles: - with ProvenanceLogger(cfg) as provenance_logger: - provenance_logger.log(file, provenance_record) + # Loop over input cfg + for ifile, props in input_files.items(): + + # Collect relevant information for outputs naming + ifile_props = [ + props['project'], props['dataset'], props['exp'], + props['ensemble'], + str(props['start_year']) + '-' + str(props['end_year'], ) + ] + + for hemisphere in hemispheres: + + # Call diagnostics functions + print("prepro") + (file_da_an_zm, file_mo_an) = zmnam_preproc(ifile, hemisphere, + tempdir=run_dir) + print("calc") + outfiles = zmnam_calc(file_da_an_zm, out_dir + '/', ifile_props) + + print("plot_files") + plot_files = zmnam_plot(file_mo_an, out_dir + '/', + plot_dir + '/', ifile_props, + fig_fmt, hemisphere) + + for file in outfiles + plot_files: + + if 'pc_da' in file: + caption = 'Daily principal component timeseries' + elif 'pc_mo' in file or 'mo_ts' in file: + caption = 'Monthly principal component timeseries' + elif 'mo_reg' in file: + caption = 'Monthly regression map' + elif 'da_pdf' in file: + caption = 'Daily probability distribution function' + elif 'eofs' in file: + caption = 'Empirical orthogonal functions' + else: # please add additional diagnostic description + caption = 'Unspecified zmnam diagnostic' + + prov_record = get_provenance_record(caption, props, + ancestor_files=[ifile]) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(file, prov_record) + + # Clean temporary files for current model to avoid cluttering + zmnam_preproc_clean(tempdir=run_dir) # Run the diagnostics diff --git a/esmvaltool/diag_scripts/zmnam/zmnam_calc.py b/esmvaltool/diag_scripts/zmnam/zmnam_calc.py index c1cb452b5f..b7be242523 100644 --- a/esmvaltool/diag_scripts/zmnam/zmnam_calc.py +++ b/esmvaltool/diag_scripts/zmnam/zmnam_calc.py @@ -5,8 +5,8 @@ Copernicus C3S 34a lot 2 (MAGIC) """ +import netCDF4 import numpy as np -import netCDF4 as nc4 from scipy import signal @@ -40,35 +40,34 @@ def zmnam_calc(da_fname, outdir, src_props): # Open daily data - in_file = nc4.Dataset(da_fname, "r") - time_dim = in_file.variables['time'][:] - time_nam = in_file.variables['time'].long_name - time_uni = in_file.variables['time'].units - time_cal = in_file.variables['time'].calendar - time = np.array(time_dim[:], dtype='d') - # startdate = nc4.num2date(time[0], time_uni, time_cal) - date = nc4.num2date(time, in_file.variables['time'].units, - in_file.variables['time'].calendar) - - lev = np.array(in_file.variables['plev'][:], dtype='d') - lev_nam = in_file.variables['plev'].long_name - lev_uni = in_file.variables['plev'].units - lev_pos = in_file.variables['plev'].positive - lev_axi = in_file.variables['plev'].axis - - lat = np.array(in_file.variables['lat'][:], dtype='d') - # lat_nam = in_file.variables['lat'].long_name - lat_uni = in_file.variables['lat'].units - lat_axi = in_file.variables['lat'].axis - - lon = np.array(in_file.variables['lon'][:], dtype='d') - # lon_nam = in_file.variables['lon'].long_name - lon_uni = in_file.variables['lon'].units - lon_axi = in_file.variables['lon'].axis - - zg_da = np.squeeze(np.array(in_file.variables['zg'][:], dtype='d')) - - in_file.close() + with netCDF4.Dataset(da_fname, "r") as in_file: + time_dim = in_file.variables['time'][:] + time_lnam = getattr(in_file.variables['time'], 'long_name', '') + time_snam = getattr(in_file.variables['time'], 'standard_name', '') + time_uni = in_file.variables['time'].units + time_cal = in_file.variables['time'].calendar + time = np.array(time_dim[:], dtype='d') + date = netCDF4.num2date(time, in_file.variables['time'].units, + in_file.variables['time'].calendar) + + lev = np.array(in_file.variables['plev'][:], dtype='d') + lev_lnam = getattr(in_file.variables['plev'], 'long_name', '') + lev_snam = getattr(in_file.variables['plev'], 'standard_name', '') + lev_uni = in_file.variables['plev'].units + lev_pos = in_file.variables['plev'].positive + lev_axi = in_file.variables['plev'].axis + + lat = np.array(in_file.variables['lat'][:], dtype='d') + # lat_nam = in_file.variables['lat'].long_name + lat_uni = in_file.variables['lat'].units + lat_axi = in_file.variables['lat'].axis + + lon = np.array(in_file.variables['lon'][:], dtype='d') + # lon_nam = in_file.variables['lon'].long_name + lon_uni = in_file.variables['lon'].units + lon_axi = in_file.variables['lon'].axis + + zg_da = np.squeeze(np.array(in_file.variables['zg'][:], dtype='d')) n_tim = len(time_dim) print('end infile close') @@ -104,9 +103,8 @@ def zmnam_calc(da_fname, outdir, src_props): mid_mon.append(idate) # Save last day of the month - if ((idate == len(date) - 1) or - (date[idate].month == mon and - date[idate + 1].month != mon)): + if idate == len(date) - 1 or (date[idate].month == mon and + date[idate + 1].month != mon): end_mon.append(idate) idate += 1 @@ -118,18 +116,16 @@ def zmnam_calc(da_fname, outdir, src_props): # Latitude weighting if lat_weighting is True: for j_lat in np.arange(len(lat)): - zg_da_lp[:, i_lev, j_lat] *= np.sqrt( - abs(np.cos(lat[j_lat] * deg_to_r))) + zg_da_lp[:, i_lev, + j_lat] *= np.sqrt(abs(np.cos(lat[j_lat] * deg_to_r))) - zg_da_lp_an = zg_da_lp[:, i_lev, :] - np.mean( - zg_da_lp[:, i_lev, :], axis=0) + zg_da_lp_an = zg_da_lp[:, i_lev, :] - np.mean(zg_da_lp[:, i_lev, :], + axis=0) cov = np.dot(zg_da_lp_an.T, zg_da_lp_an) / (n_tim - 1) # Compute eigenvectors and eigenvalues eigenval, eigenvec = np.linalg.eig(cov) - print(eigenvec) - sum_eigenval = np.sum(eigenval) eigenval_norm = eigenval[:] / sum_eigenval @@ -152,12 +148,19 @@ def zmnam_calc(da_fname, outdir, src_props): lead_pc = (pc[:, max_eigenval] - lead_pc_mean) / lead_pc_std lead_eof = eigenvec[:, max_eigenval] + # Constrain meridional EOF structure max_lat = max(range(len(lat)), key=lambda x: lat[x]) min_lat = min(range(len(lat)), key=lambda x: lat[x]) - if lead_eof[max_lat] > lead_eof[min_lat]: + if np.min(lat) > 0. and (lead_eof[max_lat] > lead_eof[min_lat]): lead_pc *= -1 lead_eof *= -1 + index_name = 'NAM' + + if np.min(lat) < 0. and (lead_eof[min_lat] > lead_eof[max_lat]): + lead_pc *= -1 + lead_eof *= -1 + index_name = 'SAM' lead_pc_mo = np.zeros(len(date[mid_mon]), dtype='d') time_mo = np.zeros(len(date[mid_mon]), dtype='d') @@ -165,8 +168,8 @@ def zmnam_calc(da_fname, outdir, src_props): # print(lead_pc) for k_mo in range(len(date[mid_mon])): - lead_pc_mo[k_mo] = np.mean( - lead_pc[sta_mon[k_mo]:end_mon[k_mo] + 1]) + lead_pc_mo[k_mo] = np.mean(lead_pc[sta_mon[k_mo]:end_mon[k_mo] + + 1]) time_mo[k_mo] = time[mid_mon[k_mo]] # Store PC/EOF for this level (no time dependent) @@ -178,131 +181,146 @@ def zmnam_calc(da_fname, outdir, src_props): # Save output files # (1) daily PCs - fname = outdir + '_'.join(src_props) + '_pc_da.nc' + fname = outdir + '_'.join(src_props) + '_pc_da_' + index_name + '.nc' outfiles.append(fname) - file_out = nc4.Dataset(fname, mode='w', format='NETCDF3_CLASSIC') - file_out.title = 'Zonal mean annular mode (1)' - file_out.contact = 'F. Serva (federico.serva@artov.isac.cnr.it); \ - C. Cagnazzo (chiara.cagnazzo@cnr.it)' - - file_out.createDimension('time', None) - file_out.createDimension('plev', np.size(lev)) - file_out.createDimension('lat', np.size(lat)) - file_out.createDimension('lon', np.size(lon)) - - time_var = file_out.createVariable('time', 'd', ('time', )) - time_var.setncattr('long_name', time_nam) - time_var.setncattr('units', time_uni) - time_var.setncattr('calendar', time_cal) - time_var[:] = time_dim[:] - - lev_var = file_out.createVariable('plev', 'd', ('plev', )) - lev_var.setncattr('long_name', lev_nam) - lev_var.setncattr('units', lev_uni) - lev_var.setncattr('positive', lev_pos) - lev_var.setncattr('axis', lev_axi) - lev_var[:] = lev[:] - - pcs_da_var = file_out.createVariable('PC_da', 'd', ( - 'time', - 'plev', - )) - pcs_da_var.setncattr('long_name', 'Daily zonal mean annular mode PC') - pcs_da_var.setncattr( - 'comment', - 'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479') - pcs_da_var[:] = pcs_da[:, :] - - file_out.close() + with netCDF4.Dataset(fname, mode='w') as file_out: + file_out.title = 'Zonal mean annular mode (1)' + file_out.contact = 'F. Serva (federico.serva@artov.ismar.cnr.it); \ + C. Cagnazzo (chiara.cagnazzo@cnr.it)' + + file_out.createDimension('time', None) + file_out.createDimension('plev', np.size(lev)) + file_out.createDimension('lat', np.size(lat)) + file_out.createDimension('lon', np.size(lon)) + + time_var = file_out.createVariable('time', 'd', ('time', )) + if time_lnam: + time_var.setncattr('long_name', time_lnam) + if time_snam: + time_var.setncattr('standard_name', time_snam) + time_var.setncattr('units', time_uni) + time_var.setncattr('calendar', time_cal) + time_var[:] = time_dim[:] + + lev_var = file_out.createVariable('plev', 'd', ('plev', )) + if lev_lnam: + lev_var.setncattr('long_name', lev_lnam) + if lev_snam: + lev_var.setncattr('standard_name', lev_snam) + lev_var.setncattr('units', lev_uni) + lev_var.setncattr('positive', lev_pos) + lev_var.setncattr('axis', lev_axi) + lev_var[:] = lev[:] + + pcs_da_var = file_out.createVariable('PC_da', 'd', ( + 'time', + 'plev', + )) + pcs_da_var.setncattr('long_name', 'Daily zonal mean annular mode PC') + pcs_da_var.setncattr('index_type', index_name) + pcs_da_var.setncattr( + 'comment', + 'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479') + pcs_da_var[:] = pcs_da[:, :] # (2) monthly PCs - fname = outdir + '_'.join(src_props) + '_pc_mo.nc' - file_out = nc4.Dataset(fname, mode='w', format='NETCDF3_CLASSIC') + fname = outdir + '_'.join(src_props) + '_pc_mo_' + index_name + '.nc' outfiles.append(fname) - file_out.title = 'Zonal mean annular mode (2)' - file_out.contact = 'F. Serva (federico.serva@artov.isac.cnr.it); \ - C. Cagnazzo (chiara.cagnazzo@cnr.it)' - - file_out.createDimension('time', None) - file_out.createDimension('plev', np.size(lev)) - - time_var = file_out.createVariable('time', 'd', ('time', )) - time_var.setncattr('long_name', time_nam) - time_var.setncattr('units', time_uni) - time_var.setncattr('calendar', time_cal) - time_var[:] = time_mo - - lev_var = file_out.createVariable('plev', 'd', ('plev', )) - lev_var.setncattr('long_name', lev_nam) - lev_var.setncattr('units', lev_uni) - lev_var.setncattr('positive', lev_pos) - lev_var.setncattr('axis', lev_axi) - lev_var[:] = lev[:] - - pcs_mo_var = file_out.createVariable('PC_mo', 'd', ( - 'time', - 'plev', - )) - pcs_mo_var.setncattr('long_name', 'Monthly zonal mean annular mode PC') - pcs_mo_var.setncattr( - 'comment', - 'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479') - pcs_mo_var[:] = pcs_mo[:, :] - - file_out.close() - - # (3) EOFs and explained varianceo - fname = outdir + '_'.join(src_props) + '_eofs.nc' - file_out = nc4.Dataset(fname, mode='w', format='NETCDF3_CLASSIC') + with netCDF4.Dataset(fname, mode='w') as file_out: + file_out.title = 'Zonal mean annular mode (2)' + file_out.contact = 'F. Serva (federico.serva@artov.ismar.cnr.it); \ + C. Cagnazzo (chiara.cagnazzo@cnr.it)' + + file_out.createDimension('time', None) + file_out.createDimension('plev', np.size(lev)) + + time_var = file_out.createVariable('time', 'd', ('time', )) + if time_lnam: + time_var.setncattr('long_name', time_lnam) + if time_snam: + time_var.setncattr('standard_name', time_snam) + time_var.setncattr('units', time_uni) + time_var.setncattr('calendar', time_cal) + time_var[:] = time_mo + + lev_var = file_out.createVariable('plev', 'd', ('plev', )) + if lev_lnam: + lev_var.setncattr('long_name', lev_lnam) + if lev_snam: + lev_var.setncattr('standard_name', lev_snam) + lev_var.setncattr('units', lev_uni) + lev_var.setncattr('positive', lev_pos) + lev_var.setncattr('axis', lev_axi) + lev_var[:] = lev[:] + + pcs_mo_var = file_out.createVariable('PC_mo', 'd', ( + 'time', + 'plev', + )) + pcs_mo_var.setncattr('long_name', 'Monthly zonal mean annular mode PC') + pcs_mo_var.setncattr('index_type', index_name) + pcs_mo_var.setncattr( + 'comment', + 'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479') + pcs_mo_var[:] = pcs_mo[:, :] + + # (3) EOFs and explained variances + fname = outdir + '_'.join(src_props) + '_eofs_' + index_name + '.nc' outfiles.append(fname) - - file_out.title = 'Zonal mean annular mode (3)' - file_out.contact = 'F. Serva (federico.serva@artov.isac.cnr.it); \ - C. Cagnazzo (chiara.cagnazzo@cnr.it)' - - file_out.createDimension('time', None) - file_out.createDimension('plev', np.size(lev)) - file_out.createDimension('lat', np.size(lat)) - file_out.createDimension('lon', np.size(lon)) - - time_var = file_out.createVariable('time', 'd', ('time', )) - time_var.setncattr('long_name', time_nam) - time_var.setncattr('units', time_uni) - time_var.setncattr('calendar', time_cal) - time_var[:] = 0 - # - lev_var = file_out.createVariable('plev', 'd', ('plev', )) - lev_var.setncattr('long_name', lev_nam) - lev_var.setncattr('units', lev_uni) - lev_var.setncattr('positive', lev_pos) - lev_var.setncattr('axis', lev_axi) - lev_var[:] = lev[:] - # - lat_var = file_out.createVariable('lat', 'd', ('lat', )) - lat_var.setncattr('units', lat_uni) - lev_var.setncattr('axis', lat_axi) - lat_var[:] = lat[:] - # - lon_var = file_out.createVariable('lon', 'd', ('lon', )) - lon_var.setncattr('units', lon_uni) - lon_var.setncattr('axis', lon_axi) - lon_var[:] = lon[:] - # - eofs_var = file_out.createVariable('EOF', 'd', ('plev', 'lat')) - eofs_var.setncattr('long_name', 'Zonal mean annular mode EOF') - eofs_var.setncattr( - 'comment', - 'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479') - eofs_var[:] = eofs[:, :] - # - eigs_var = file_out.createVariable('eigenvalues', 'd', ('plev')) - eigs_var.setncattr('long_name', - 'Zonal mean annular mode EOF explained variance') - eigs_var.setncattr( - 'comment', - 'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479') - eigs_var[:] = eigs[:] - # - file_out.close() + with netCDF4.Dataset(fname, mode='w') as file_out: + file_out.title = 'Zonal mean annular mode (3)' + file_out.contact = 'F. Serva (federico.serva@artov.ismar.cnr.it); \ + C. Cagnazzo (chiara.cagnazzo@cnr.it)' + + file_out.createDimension('time', None) + file_out.createDimension('plev', np.size(lev)) + file_out.createDimension('lat', np.size(lat)) + file_out.createDimension('lon', np.size(lon)) + + time_var = file_out.createVariable('time', 'd', ('time', )) + if time_lnam: + time_var.setncattr('long_name', time_lnam) + if time_snam: + time_var.setncattr('standard_name', time_snam) + time_var.setncattr('units', time_uni) + time_var.setncattr('calendar', time_cal) + time_var[:] = 0 + # + lev_var = file_out.createVariable('plev', 'd', ('plev', )) + if lev_lnam: + lev_var.setncattr('long_name', lev_lnam) + if lev_snam: + lev_var.setncattr('standard_name', lev_snam) + lev_var.setncattr('units', lev_uni) + lev_var.setncattr('positive', lev_pos) + lev_var.setncattr('axis', lev_axi) + lev_var[:] = lev[:] + # + lat_var = file_out.createVariable('lat', 'd', ('lat', )) + lat_var.setncattr('units', lat_uni) + lev_var.setncattr('axis', lat_axi) + lat_var[:] = lat[:] + # + lon_var = file_out.createVariable('lon', 'd', ('lon', )) + lon_var.setncattr('units', lon_uni) + lon_var.setncattr('axis', lon_axi) + lon_var[:] = lon[:] + # + eofs_var = file_out.createVariable('EOF', 'd', ('plev', 'lat')) + eofs_var.setncattr('long_name', 'Zonal mean annular mode EOF') + eofs_var.setncattr('index_type', index_name) + eofs_var.setncattr( + 'comment', + 'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479') + eofs_var[:] = eofs[:, :] + # + eigs_var = file_out.createVariable('eigenvalues', 'd', ('plev')) + eigs_var.setncattr('long_name', + 'Zonal mean annular mode EOF explained variance') + eigs_var.setncattr('index_type', index_name) + eigs_var.setncattr( + 'comment', + 'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479') + eigs_var[:] = eigs[:] return outfiles diff --git a/esmvaltool/diag_scripts/zmnam/zmnam_plot.py b/esmvaltool/diag_scripts/zmnam/zmnam_plot.py index 1af7928951..330a465ae2 100644 --- a/esmvaltool/diag_scripts/zmnam/zmnam_plot.py +++ b/esmvaltool/diag_scripts/zmnam/zmnam_plot.py @@ -6,88 +6,85 @@ """ -import numpy as np +import cartopy.crs as ccrs import matplotlib as mpl import matplotlib.pyplot as plt -import netCDF4 as nc4 -import cartopy.crs as ccrs +import netCDF4 +import numpy as np from cartopy.util import add_cyclic_point -def zmnam_plot(file_gh_mo, datafolder, figfolder, src_props, - fig_fmt, write_plots): +def zmnam_plot(file_gh_mo, datafolder, figfolder, src_props, fig_fmt, + hemisphere): """Plotting of timeseries and maps for zmnam diagnostics.""" + + if hemisphere == 'NH': + index_name = 'NAM' + if hemisphere == 'SH': + index_name = 'SAM' + plot_files = [] # Open daily and monthly PCs - file_name = '_'.join(src_props) + '_pc_da.nc' + file_name = '_'.join(src_props) + '_pc_da_' + index_name + '.nc' # print(datafolder + file_name) - in_file = nc4.Dataset(datafolder + file_name, "r") - # - # time_da = in_file.variables['time'][:] - # time_da_uni = in_file.variables['time'].units - # time_da_cal = in_file.variables['time'].calendar - # - lev = np.array(in_file.variables['plev'][:], dtype='d') - # lev_units = in_file.variables['plev'].units - # - pc_da = np.array(in_file.variables['PC_da'][:], dtype='d') - in_file.close() - - file_name = '_'.join(src_props) + '_pc_mo.nc' + with netCDF4.Dataset(datafolder + file_name, "r") as in_file: + lev = np.array(in_file.variables['plev'][:], dtype='d') + pc_da = np.array(in_file.variables['PC_da'][:], dtype='d') + + file_name = '_'.join(src_props) + '_pc_mo_' + index_name + '.nc' # print(datafolder + file_name) - in_file = nc4.Dataset(datafolder + file_name, "r") - # - time_mo = np.array(in_file.variables['time'][:], dtype='d') - time_mo_uni = in_file.variables['time'].units - time_mo_cal = in_file.variables['time'].calendar - # - pc_mo = np.array(in_file.variables['PC_mo'][:], dtype='d') - in_file.close() + with netCDF4.Dataset(datafolder + file_name, "r") as in_file: + time_mo = np.array(in_file.variables['time'][:], dtype='d') + time_mo_uni = in_file.variables['time'].units + time_mo_cal = in_file.variables['time'].calendar + + pc_mo = np.array(in_file.variables['PC_mo'][:], dtype='d') # Open monthly gh field file_name = file_gh_mo # print(datafolder + file_name) - in_file = nc4.Dataset(file_name, "r") - dims = list(in_file.dimensions.keys())[::-1] # py3 - print('mo full dims', dims) - - # Double check on lat/lon names, possibly redundant - if 'latitude' in dims: - latn = 'latitude' - if 'lat' in dims: - latn = 'lat' - if 'longitude' in dims: - lonn = 'longitude' - if 'lon' in dims: - lonn = 'lon' - lat = np.array(in_file.variables[latn][:]) - lon = np.array(in_file.variables[lonn][:]) - - zg_mo = np.array(in_file.variables['zg'][:]) - - # Record attributes for output netCDFs - time_nam = in_file.variables['time'].long_name - time_uni = in_file.variables['time'].units - time_cal = in_file.variables['time'].calendar - - lev_nam = in_file.variables['plev'].long_name - lev_uni = in_file.variables['plev'].units - lev_pos = in_file.variables['plev'].positive - lev_axi = in_file.variables['plev'].axis - - lat_uni = in_file.variables[latn].units - lat_axi = in_file.variables[latn].axis - - lon_uni = in_file.variables[lonn].units - lon_axi = in_file.variables[lonn].axis - - in_file.close() + with netCDF4.Dataset(file_name, "r") as in_file: + dims = list(in_file.dimensions.keys())[::-1] # py3 + print('mo full dims', dims) + + # Double check on lat/lon names, possibly redundant + if 'latitude' in dims: + latn = 'latitude' + if 'lat' in dims: + latn = 'lat' + if 'longitude' in dims: + lonn = 'longitude' + if 'lon' in dims: + lonn = 'lon' + lat = np.array(in_file.variables[latn][:]) + lon = np.array(in_file.variables[lonn][:]) + + zg_mo = np.array(in_file.variables['zg'][:]) + + # Record attributes for output netCDFs + time_lnam = getattr(in_file.variables['time'], 'long_name', '') + time_snam = getattr(in_file.variables['time'], 'standard_name', '') + time_uni = in_file.variables['time'].units + time_cal = in_file.variables['time'].calendar + lev_lnam = getattr(in_file.variables['plev'], 'long_name', '') + lev_snam = getattr(in_file.variables['plev'], 'standard_name', '') + lev_uni = in_file.variables['plev'].units + lev_pos = in_file.variables['plev'].positive + lev_axi = in_file.variables['plev'].axis + + lat_uni = in_file.variables[latn].units + lat_axi = in_file.variables[latn].axis + + lon_uni = in_file.variables[lonn].units + lon_axi = in_file.variables[lonn].axis # Save dates for timeseries date_list = [] for i_date in np.arange(len(time_mo)): - yydate = nc4.num2date(time_mo, time_mo_uni, time_mo_cal)[i_date].year - mmdate = nc4.num2date(time_mo, time_mo_uni, time_mo_cal)[i_date].month + yydate = netCDF4.num2date(time_mo, time_mo_uni, + time_mo_cal)[i_date].year + mmdate = netCDF4.num2date(time_mo, time_mo_uni, + time_mo_cal)[i_date].month date_list.append(str(yydate) + '-' + str(mmdate)) # Prepare array for outputting regression maps (lev/lat/lon) @@ -102,16 +99,16 @@ def zmnam_plot(file_gh_mo, datafolder, figfolder, src_props, # Make only a few ticks plt.xticks(time_mo[0:len(time_mo) + 1:60], date_list[0:len(time_mo) + 1:60]) - plt.title(str(int(lev[i_lev])) + ' Pa ' + - src_props[1] + ' ' + src_props[2]) + plt.title( + str(int(lev[i_lev])) + ' Pa ' + src_props[1] + ' ' + src_props[2]) plt.xlabel('Time') - plt.ylabel('Zonal mean NAM') + plt.ylabel('Zonal mean ' + index_name) - if write_plots: - fname = (figfolder + '_'.join(src_props) + '_' + - str(int(lev[i_lev])) + 'Pa_mo_ts.' + fig_fmt) - plt.savefig(fname, format=fig_fmt) - plot_files.append(fname) + fname = (figfolder + '_'.join(src_props) + '_' + + str(int(lev[i_lev])) + 'Pa_mo_ts_' + index_name + + '.' + fig_fmt) + plt.savefig(fname, format=fig_fmt) + plot_files.append(fname) plt.figure() @@ -121,27 +118,32 @@ def zmnam_plot(file_gh_mo, datafolder, figfolder, src_props, max_var = 5 n_bars = 50 - _, bins, _ = plt.hist(pc_da[:, i_lev], n_bars, density=True, - range=(min_var, max_var), facecolor='b', + _, bins, _ = plt.hist(pc_da[:, i_lev], + n_bars, + density=True, + range=(min_var, max_var), + facecolor='b', alpha=0.75) # Reference normal Gaussian - plt.plot(bins, 1. / (np.sqrt(2 * np.pi)) * - np.exp(- bins**2 / 2.), - linewidth=2, color='k', linestyle='--') + plt.plot(bins, + 1. / (np.sqrt(2 * np.pi)) * np.exp(-bins**2 / 2.), + linewidth=2, + color='k', + linestyle='--') plt.xlim(min_var, max_var) - plt.title('Daily PDF ' + str(int(lev[i_lev])) + - ' Pa ' + src_props[1] + ' ' + src_props[2]) - plt.xlabel('Zonal mean NAM') + plt.title('Daily PDF ' + str(int(lev[i_lev])) + ' Pa ' + + src_props[1] + ' ' + src_props[2]) + plt.xlabel('Zonal mean ' + index_name) plt.ylabel('Normalized probability') plt.tight_layout() - if write_plots: - fname = (figfolder + '_'.join(src_props) + '_' + - str(int(lev[i_lev])) + 'Pa_da_pdf.' + fig_fmt) - plt.savefig(fname, format=fig_fmt) - plot_files.append(fname) + fname = (figfolder + '_'.join(src_props) + '_' + + str(int(lev[i_lev])) + 'Pa_da_pdf_' + index_name + + '.' + fig_fmt) + plt.savefig(fname, format=fig_fmt) + plot_files.append(fname) plt.close('all') @@ -153,10 +155,10 @@ def zmnam_plot(file_gh_mo, datafolder, figfolder, src_props, for k_lon in np.arange(len(lon)): # Following BT09, the maps are Z_m^l*PC_m^l/|PC_m^l|^2 - slope[j_lat, k_lon] = np.dot(zg_mo[:, i_lev, j_lat, k_lon], - (pc_mo[:, i_lev]) / - np.dot(pc_mo[:, i_lev], - pc_mo[:, i_lev])) + slope[j_lat, + k_lon] = np.dot(zg_mo[:, i_lev, j_lat, + k_lon], (pc_mo[:, i_lev]) / + np.dot(pc_mo[:, i_lev], pc_mo[:, i_lev])) # Plots of regression maps plt.figure() @@ -164,8 +166,13 @@ def zmnam_plot(file_gh_mo, datafolder, figfolder, src_props, # Fixed contour levels. May be improved somehow. regr_levs = -1000 + np.arange(201) * 10 - # Create the projections - ortho = ccrs.Orthographic(central_longitude=0, central_latitude=90) + # Create the projections, selecting hemisphere based on latitudes + if min(lat) > 0.: # NH + ortho = ccrs.Orthographic(central_longitude=0, + central_latitude=90) + if min(lat) < 0.: # SH + ortho = ccrs.Orthographic(central_longitude=0, + central_latitude=-90) ccrs.Geodetic() # Create the geoaxes for an orthographic projection @@ -176,45 +183,76 @@ def zmnam_plot(file_gh_mo, datafolder, figfolder, src_props, # lons, lats = np.meshgrid(lonw, lat) - plt.contourf(lonw, lat, slopew, + plt.contourf(lonw, + lat, + slopew, colors=('#cccccc', '#ffffff'), levels=[-10000, 0, 10000], transform=ccrs.PlateCarree()) # Switch temporarily to solid negative lines mpl.rcParams['contour.negative_linestyle'] = 'solid' - plt.contour(lonw, lat, slopew, levels=regr_levs, - colors='k', transform=ccrs.PlateCarree(), - zorder=5) + plt.contour(lonw, + lat, + slopew, + levels=regr_levs, + colors='k', + transform=ccrs.PlateCarree(), + zorder=1) # Invisible contours, only for labels. - # Workaround for cartopy issue, as of Dec 18 - inv_map = plt.contour(lonw, lat, slopew, levels=regr_levs, - colors='k', transform=ccrs.PlateCarree(), - zorder=10) + # Change zorder for cartopy/matplotlib label issue, as of June 2019 + inv_map = plt.contour(lonw, + lat, + slopew, + levels=regr_levs, + colors='k', + transform=ccrs.PlateCarree(), + zorder=15) mpl.rcParams['contour.negative_linestyle'] = 'dashed' for cmap in inv_map.collections: cmap.set_visible(False) - plt.clabel(inv_map, fontsize=8, fmt='%1.0f', zorder=15) + # Add contour labels over white boxes + kwargs = {'fontsize': 8, 'fmt': '%1.0f'} + if mpl.__version__.split('.') >= ['3', '3']: + kwargs['zorder'] = 30 # new in matplotlib version 3.3 + plt.clabel(inv_map, **kwargs) + # work around https://github.com/SciTools/cartopy/issues/1554 + # in cartopy 0.18 + clabs = inv_map.labelTextsList + bbox_dict = dict(boxstyle='square,pad=0', + edgecolor='none', + fc='white', + zorder=25) + clabs = [txt.set_bbox(bbox_dict) for txt in clabs] axis.coastlines() axis.set_global() - plt.text(0.20, 0.80, str(int(lev[i_lev])) + ' Pa', - fontsize=12, transform=plt.gcf().transFigure) - plt.text(0.75, 0.80, src_props[1], - fontsize=12, transform=plt.gcf().transFigure) - plt.text(0.75, 0.75, src_props[2], - fontsize=12, transform=plt.gcf().transFigure) - - if write_plots: - fname = (figfolder + '_'.join(src_props) + '_' + - str(int(lev[i_lev])) + 'Pa_mo_reg.' + fig_fmt) - plt.savefig(fname, format=fig_fmt) - plot_files.append(fname) + plt.text(0.20, + 0.80, + str(int(lev[i_lev])) + ' Pa', + fontsize=12, + transform=plt.gcf().transFigure) + plt.text(0.75, + 0.80, + src_props[1], + fontsize=12, + transform=plt.gcf().transFigure) + plt.text(0.75, + 0.75, + src_props[2], + fontsize=12, + transform=plt.gcf().transFigure) + + fname = (figfolder + '_'.join(src_props) + '_' + + str(int(lev[i_lev])) + 'Pa_mo_reg_' + index_name + + '.' + fig_fmt) + plt.savefig(fname, format=fig_fmt) + plot_files.append(fname) plt.close('all') @@ -222,49 +260,54 @@ def zmnam_plot(file_gh_mo, datafolder, figfolder, src_props, regr_arr[i_lev, :, :] = slope # Save 3D regression results in output netCDF - file_out = nc4.Dataset(datafolder + '_'.join(src_props) + '_regr_map.nc', - mode='w', format='NETCDF3_CLASSIC') - - file_out.title = 'Zonal mean annular mode (4)' - file_out.contact = 'F. Serva (federico.serva@artov.isac.cnr.it); \ - C. Cagnazzo (chiara.cagnazzo@cnr.it)' - - # - file_out.createDimension('time', None) - file_out.createDimension('plev', np.size(lev)) - file_out.createDimension('lat', np.size(lat)) - file_out.createDimension('lon', np.size(lon)) - # - time_var = file_out.createVariable('time', 'd', ('time', )) - time_var.setncattr('long_name', time_nam) - time_var.setncattr('units', time_uni) - time_var.setncattr('calendar', time_cal) - time_var[:] = 0 # singleton - # - lev_var = file_out.createVariable('plev', 'd', ('plev', )) - lev_var.setncattr('long_name', lev_nam) - lev_var.setncattr('units', lev_uni) - lev_var.setncattr('positive', lev_pos) - lev_var.setncattr('axis', lev_axi) - lev_var[:] = lev[:] - # - lat_var = file_out.createVariable('lat', 'd', ('lat', )) - lat_var.setncattr('units', lat_uni) - lev_var.setncattr('axis', lat_axi) - lat_var[:] = lat[:] - # - lon_var = file_out.createVariable('lon', 'd', ('lon', )) - lon_var.setncattr('units', lon_uni) - lon_var.setncattr('axis', lon_axi) - lon_var[:] = lon[:] - # - regr_var = file_out.createVariable('regr', 'f', ('plev', 'lat', 'lon')) - regr_var.setncattr('long_name', 'Zonal mean annular mode regression map') - regr_var.setncattr( - 'comment', - 'Reference: Baldwin and Thompson ' + '(2009), doi:10.1002/qj.479') - regr_var[:] = regr_arr[:, :, :] - # - file_out.close() + with netCDF4.Dataset(datafolder + '_'.join(src_props) + '_regr_map_' + + index_name + '.nc', mode='w') as file_out: + file_out.title = 'Zonal mean annular mode (4)' + file_out.contact = 'F. Serva (federico.serva@artov.ismar.cnr.it); \ + C. Cagnazzo (chiara.cagnazzo@cnr.it)' + + # + file_out.createDimension('time', None) + file_out.createDimension('plev', np.size(lev)) + file_out.createDimension('lat', np.size(lat)) + file_out.createDimension('lon', np.size(lon)) + # + time_var = file_out.createVariable('time', 'd', ('time', )) + if time_lnam: + time_var.setncattr('long_name', time_lnam) + if time_snam: + time_var.setncattr('standard_name', time_snam) + time_var.setncattr('units', time_uni) + time_var.setncattr('calendar', time_cal) + time_var[:] = 0 # singleton + # + lev_var = file_out.createVariable('plev', 'd', ('plev', )) + if lev_lnam: + lev_var.setncattr('long_name', lev_lnam) + if lev_snam: + lev_var.setncattr('standard_name', lev_snam) + lev_var.setncattr('units', lev_uni) + lev_var.setncattr('positive', lev_pos) + lev_var.setncattr('axis', lev_axi) + lev_var[:] = lev[:] + # + lat_var = file_out.createVariable('lat', 'd', ('lat', )) + lat_var.setncattr('units', lat_uni) + lev_var.setncattr('axis', lat_axi) + lat_var[:] = lat[:] + # + lon_var = file_out.createVariable('lon', 'd', ('lon', )) + lon_var.setncattr('units', lon_uni) + lon_var.setncattr('axis', lon_axi) + lon_var[:] = lon[:] + # + regr_var = file_out.createVariable('regr', 'f', ('plev', 'lat', 'lon')) + regr_var.setncattr('long_name', + 'Zonal mean annular mode regression map') + regr_var.setncattr('index_type', index_name) + regr_var.setncattr( + 'comment', + 'Reference: Baldwin and Thompson ' + '(2009), doi:10.1002/qj.479') + regr_var[:] = regr_arr[:, :, :] return plot_files diff --git a/esmvaltool/diag_scripts/zmnam/zmnam_preproc.py b/esmvaltool/diag_scripts/zmnam/zmnam_preproc.py index 08e5fa9252..5f82282dc7 100644 --- a/esmvaltool/diag_scripts/zmnam/zmnam_preproc.py +++ b/esmvaltool/diag_scripts/zmnam/zmnam_preproc.py @@ -8,19 +8,56 @@ import cdo as cd -def zmnam_preproc(ifile): +def zmnam_preproc(ifile, hemisphere, tempdir=None): """Preprocessing of the input dataset files.""" - cdo = cd.Cdo() + + # Avoid using /tmp for larger files + if tempdir is not None: + cdo = cd.Cdo(tempdir=tempdir) + else: + cdo = cd.Cdo() + + # Select hemisphere depending on recipe parameter + if hemisphere == 'NH': + print('Northern hemisphere index') + rfile = cdo.sellonlatbox('0,360,20,90', input=ifile) + if hemisphere == 'SH': + print('Southern hemisphere index') + rfile = cdo.sellonlatbox('0,360,-90,-20', input=ifile) + # Delete leap day, if any. - full_da_nl = cdo.delete('month=2,day=29', input=ifile) + full_da_nl = cdo.delete('month=2,day=29', input=rfile) + + # Fill missing values with bilinear method. + full_da_nl_nn = cdo.fillmiss(input=full_da_nl) + + # Detrend with memory-efficient method. + full_da_nl_a, full_da_nl_b = cdo.trend(input=full_da_nl_nn) + full_da_nl_detr = cdo.subtrend(input=full_da_nl_nn + ' ' + full_da_nl_a + + ' ' + full_da_nl_b) # Compute anomalies from the daily/monthly means. - gh_da_dm = cdo.ydaymean(input=full_da_nl) - gh_da_an = cdo.sub(input=full_da_nl + ' ' + gh_da_dm) + gh_da_dm = cdo.ydaymean(input=full_da_nl_detr) + gh_da_an = cdo.sub(input=full_da_nl_detr + ' ' + gh_da_dm) gh_da_an_zm = cdo.zonmean(input=gh_da_an) - gh_mo = cdo.monmean(input=full_da_nl) + # Compute monthly mean anomalies. + gh_mo = cdo.monmean(input=full_da_nl_detr) gh_mo_mm = cdo.ymonmean(input=gh_mo) gh_mo_an = cdo.sub(input=gh_mo + ' ' + gh_mo_mm) return (gh_da_an_zm, gh_mo_an) + + +def zmnam_preproc_clean(tempdir=None): + """ Clean up temporary files after processing """ + + if tempdir is not None: + cdo = cd.Cdo(tempdir=tempdir) + else: + cdo = cd.Cdo() + + # Delete files like cdoPy* in tmp directory + cdo.cleanTempDir() + + return None diff --git a/esmvaltool/install/Julia/Project.toml b/esmvaltool/install/Julia/Project.toml new file mode 100644 index 0000000000..fb8d18f537 --- /dev/null +++ b/esmvaltool/install/Julia/Project.toml @@ -0,0 +1,11 @@ +[deps] +ArgParse = "c7e460c6-2fb9-53a9-8c5b-16f535851c63" +Compat = "34da2185-b29b-5c13-b0c7-acf172513d20" +DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" +JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" +NetCDF = "30363a11-5582-574a-97bb-aa9a979735b9" +PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0" +PyPlot = "d330b81b-6aea-500a-939a-2ce795aea3ee" +RainFARM = "e9a4e08f-a0a3-5224-a821-6d0231c12d6b" +Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" +YAML = "ddb6d928-2868-570f-bddf-ab3f9cf99eb6" diff --git a/esmvaltool/install/Julia/julia_requirements.txt b/esmvaltool/install/Julia/julia_requirements.txt deleted file mode 100644 index b22292cc69..0000000000 --- a/esmvaltool/install/Julia/julia_requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -DataFrames -YAML diff --git a/esmvaltool/install/Julia/setup.jl b/esmvaltool/install/Julia/setup.jl index ea4a7f1c8f..a6e3d25c6d 100644 --- a/esmvaltool/install/Julia/setup.jl +++ b/esmvaltool/install/Julia/setup.jl @@ -1,29 +1,13 @@ #!/usr/bin/env julia - -scriptDir=@__DIR__ +@info "Installing Julia dependencies" if VERSION >= v"0.7.0-DEV.2005" using Pkg end -@info "Installing the packages from" scriptDir * "/julia_requirements.txt" -pkgName=in -open(scriptDir * "/julia_requirements.txt") do f - for i in enumerate(eachline(f)) - - pkgId=i[1] - pkgName=i[2] - @info "Installing" pkgName - Pkg.add(pkgName) - @info "Testing: ", pkgName - # load the package this needs to be called at top-level - Expr(:toplevel, :(module ($pkgName) end)) - - end -end +Pkg.activate(@__DIR__) +Pkg.instantiate() -# Show the package list @info "Installed Julia packages:" -Pkg.installed() Pkg.status() diff --git a/esmvaltool/install/R/r_requirements.txt b/esmvaltool/install/R/r_requirements.txt deleted file mode 100644 index 549e7c89c5..0000000000 --- a/esmvaltool/install/R/r_requirements.txt +++ /dev/null @@ -1,20 +0,0 @@ -abind -akima -climdex.pcic -ClimProjDiags -dotCall64 -ggplot2 -gridExtra -lintr -logging -mapproj -maps -multiApply -ncdf4 -PCICt -plyr -RColorBrewer -Rcpp -s2dverification -SPEI -yaml diff --git a/esmvaltool/install/R/setup.R b/esmvaltool/install/R/setup.R deleted file mode 100644 index 515b564723..0000000000 --- a/esmvaltool/install/R/setup.R +++ /dev/null @@ -1,78 +0,0 @@ -log <- function(..., level="INFO") { - cat(format(Sys.time(), "%Y-%m-%d %X"), level, ":", ..., "\n") -} - -#check for present library paths -RLIBPATH <- .libPaths() - -#check if we can write in the present R libaries paths -if (any(file.access(RLIBPATH, 2) == 0)) { - #if possible, use the standard one for following instalation - RLIBLOC <- RLIBPATH[which(file.access(RLIBPATH, 2) == 0)[1]] -} else { - #if not possible, create a local library in the home directory - RLIBLOC <- Sys.getenv("R_LIBS_USER") - dir.create(path = Sys.getenv("R_LIBS_USER"), showWarnings = FALSE, - recursive = TRUE) -} - -log("Installing packages to --> ", RLIBLOC) - -# define the R mirror to download packages -pkg_mirror <- "https://cloud.r-project.org" -log("Using mirror: ", pkg_mirror) - -# get the script path -initial_options <- commandArgs(trailingOnly = FALSE) -file_arg_name <- "--file=" -script_name <- sub(file_arg_name, "", - initial_options[grep(file_arg_name, initial_options)]) -script_dirname <- dirname(script_name) - -# read the dependencies -dependencies <- scan( - paste(script_dirname, "r_requirements.txt", sep = "/"), - what = "character" -) -# TODO: find a solution for script directory -inst_packages <- installed.packages() -package_list <- dependencies[!(dependencies %in% inst_packages[, "Package"])] - -if (length(package_list) == 0) { - log("All packages are already installed!") -} else { - log("Number of packages to be installed: ", length(package_list)) -} - -Ncpus <- parallel::detectCores() -if (is.na(Ncpus)) { - Ncpus <- 1 -} - -log("Installing packages:", package_list) -if ( length(package_list) != 0 ) { - install.packages( - package_list, - repos = pkg_mirror, - Ncpus = Ncpus, - dependencies = c("Depends", "Imports", "LinkingTo") - ) -} - -failed <- list() -for (package_name in dependencies) { - success <- library( - package_name, - character.only = TRUE, - logical.return = TRUE - ) - if ( ! success ) { - failed <- c(failed, package_name) - } -} -if ( length(failed) != 0 ) { - log("Failed to install packages:", paste(failed, collapse = ", ")) - quit(status = 1, save = "no") -} - -log("Successfully installed all packages") diff --git a/esmvaltool/install/__init__.py b/esmvaltool/install/__init__.py new file mode 100644 index 0000000000..0d68911ef2 --- /dev/null +++ b/esmvaltool/install/__init__.py @@ -0,0 +1,36 @@ +"""Install Julia dependencies.""" +import subprocess +import sys +from pathlib import Path + + +class Install: + """Install extra dependencies. + + Diagnostics written in Julia need extra dependencies. Use this + command to install them. + + Note that Julia must be pre-installed before running this command. + """ + + @staticmethod + def _run(cmd, script): + root = Path(__file__).parent + try: + subprocess.check_output( + [cmd, str(root / script)], + stderr=subprocess.STDOUT, + universal_newlines=True, + ) + except subprocess.CalledProcessError as exc: + print(exc.stdout) + print("installation failed") + sys.exit(1) + else: + print("Installation successful") + + def Julia(self): # noqa: N802 + """Install dependencies needed to run Julia diagnostics.""" + print("installing Julia packages, please wait...") + script = Path("Julia") / "setup.jl" + self._run("julia", script) diff --git a/esmvaltool/interface_scripts/auxiliary.ncl b/esmvaltool/interface_scripts/auxiliary.ncl index a5d92cf739..3ba09318ce 100644 --- a/esmvaltool/interface_scripts/auxiliary.ncl +++ b/esmvaltool/interface_scripts/auxiliary.ncl @@ -5,244 +5,241 @@ ; Check the header of each routine for documentation. ; ; Contents: -; procedure copy_VarCoords_l1 -; procedure check_min_max_datasets -; function tstep +; function bname +; function basename +; function att2var +; function att2var_default ; function get_ncdf_name ; function get_ncdf_dir ; function ncdf_read +; function ncdf_define ; function ncdf_write ; function ncdf_att -; function ncdf_define -; function att2var_default -; function att2var -; function bname -; function basename -; function extract_years -; function extend_var_at ; function copy_CoordNames_n -; function empty_str -; function write_info +; function extend_var_at ; function remove_index ; procedure set_default_att -; function filter_attrs -; procedure write_ignore_warnings -; function get_ref_dataset_idx +; function empty_str ; ; ############################################################################# -undef("copy_VarCoords_l1") -procedure copy_VarCoords_l1(var_from, - var_to) +undef("bname") +function bname(a[*]:string) ; ; Arguments -; var_from: An array of any dimensionality. -; Must have named dimensions and coordinate variables. -; var_to: An array of any dimensionality. +; a: (array of) file name strings to analyse. +; +; Return value +; An array of strings, without paths and suffixes. ; ; Description -; This procedure copies all named dimensions and coordinate variables -; from one variable to another except for the leftmost dimension. +; Strips paths and suffixes from file names. ; ; Caveats ; ; References ; ; Modification history -; 20140703-A_gott_kl: Written for use with mymm in time_operations -; (diag_scripts/shared/statistics.ncl). +; 20131106-gottschaldt_klaus-dirk: written. ; -local funcname, scriptname, dims_from, dims, rank_from, rank, dimdiff, idimdiff +local funcname, scriptname, a, i, b, rm_prefix, rm_suffix begin - funcname = "copy_VarCoords_l1" + funcname = "bname" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - dims_from = getvardims(var_from) - rank_from = dimsizes(dims_from) - dims = getvardims(var_to) - rank = dimsizes(dims) - - if(rank.gt.1) then - - ; Check input - if(rank_from.ne.rank) then ; check rank - error_msg("f", scriptname, funcname, \ - "var_from and var_to must have same rank") - else ; check dimensions - dimdiff = dimsizes(var_from) - dimsizes(var_to) - idimdiff = ind(dimdiff.ne.0) - if(.not.all(ismissing(idimdiff))) then - if(sum(idimdiff).ne.0) then ; more or other than leftmost dim - error_msg("f", scriptname, funcname, \ - "var_from and var_to must have same " + \ - "dimension sizes, except the leftmost") - end if - end if - end if + b = a + do i = 0, dimsizes(a) - 1 + rm_prefix = systemfunc("basename " + a(i)) + rm_suffix = str_split(rm_prefix, ".") + b(i) = rm_suffix(0) + delete(rm_suffix) + end do - ; Loop over dimensions - do i = 1, rank - 1 - var_to!i = dims_from(i) - var_to&$dims_from(i)$ = var_from&$dims_from(i)$ - end do + leave_msg(scriptname, funcname) + return(b) - else +end + +; ############################################################################# +undef("basename") +function basename(name[1]:string) +; +; Arguments +; name: input string. +; +; Return value +; A string. +; +; Description +; Removes the suffix from the string 'name' and returns the string. +; +; Caveats +; +; References +; +; Modification history +; 20130419-gottschaldt_klaus-dirk: written. +; +local funcname, scriptname, suffix +begin - log_debug("No coordinates to process in copy_VarCoords_l1") + funcname = "basename" + scriptname = "interface_scripts/auxiliary.ncl" + enter_msg(scriptname, funcname) - end if + suffix = get_file_suffix(name, 0) + bsn = str_sub_str(name, suffix, "") leave_msg(scriptname, funcname) + return(bsn) end ; ############################################################################# -undef("check_min_max_datasets") -procedure check_min_max_datasets(no_datasets[1]:integer, - min_no_datasets[1]:integer, - max_no_datasets[1]:integer, - diag_script[1]) +undef("att2var") +function att2var(data, + meta[1]:string) ; ; Arguments -; no_datasets: number of datasets. -; min_no_datasets: minimum no of datasets required for this diag script. -; max_no_datasets: maximum no of datasets required for this diagg script. -; diag_script: name of diagnostic script. +; data: variable of any type with plot specific metadata. +; meta: string specifying the requested attribute, "var", "res_" and +; "diag_file" get special treatment. +; +; Return value +; Requested attribute of data is returned as variable: +; var (string): name string, with "var_" attached as attributes +; (without prefix). +; diag_file (string): name bases of registred scripts, joined into one +; string. +; res (logical): attributes needed for plotting. +; other attributes (any type): as requested by meta. ; ; Description -; Checks if sufficient number of datasets are present for the given -; diagnostic, tries to abort with a intelligible message. +; Puts attributes of data into individual variables. ; ; Caveats ; ; References ; ; Modification history +; 20131107-gottschaldt_klaus-dirk: written. ; -local funcname, scriptname +local funcname, scriptname, data, meta, result, temp, atts, att, iatt begin - funcname = "check_min_max_datasets" + funcname = "att2var" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - if (no_datasets .lt. min_no_datasets) then - error_msg("f", scriptname, funcname,\ - "Too few datasets (=" + no_datasets + ") for " + diag_script \ - + ". Mininum is " + min_no_datasets) - end if + ; Attribute "var" + if (meta .eq. "var") + if (isatt(data, meta)) then + result = data@$meta$ + atts = getvaratts(data) + do iatt = 0, dimsizes(atts) - 1 + att = atts(iatt) + if (isStrSubset(att, "var_")) then + ; Remove prefix "var_" + temp = str_get_cols(att, 4, strlen(att)) + ; Assign contents to variable "var" + result@$temp$ = data@$att$ + end if + end do + else + error_msg("f", scriptname, funcname, get_script_name() + \ + " requires variable attribute " + meta) + end if + + ; Attribute "diag_script" + elseif (meta .eq. "diag_script") + if (isatt(data, meta)) then + base = bname(data@$meta$) + ; Join names of all contributing scripts + result = str_join(base, "__") + else + error_msg("f", scriptname, funcname, get_script_name() + \ + " requires variable attribute " + meta) + end if + + ; Attribute "res_" + ; DEBUG info: different attribute types are not anticipated + ; -> check this if it crashes + elseif (meta .eq. "res_") + result = True + atts = getvaratts(data) + do iatt = 0, dimsizes(atts) - 1 + att = atts(iatt) + if (isStrSubset(att, "res_")) then + ; Remove prefix "res_" + temp = str_get_cols(att, 4, strlen(att)) + ; Assign contents to variable "res" + result@$temp$ = data@$att$ + end if + end do - if (no_datasets .gt. max_no_datasets) then - error_msg("f", scriptname, funcname,\ - "Too many datasets (=" + no_datasets + ") for " + diag_script \ - + ". Maximum is " + max_no_datasets) + ; Optional/generic attributes + else + if (isatt(data, meta)) then + result = data@$meta$ + else + error_msg("w", scriptname, funcname, "attribute " + meta + \ + " not found") + result = default_fillvalue("string") + end if end if leave_msg(scriptname, funcname) + return(result) end ; ############################################################################# -undef("tstep") -function tstep(utc:numeric) +undef("att2var_default") +function att2var_default(data, + meta[1]:string, + default, + option) ; ; Arguments -; utc: UTC time as returned by cd_calendar -; (dimsizes(time) x 6, with y,m,d,h,m,s). +; data: variable of any type with plot specific metadata. +; meta: string specifying the requested attribute +; "var", "res_" and "diag_file" get special treatment. +; default: variable of any type. +; option: not used yet. ; ; Return value -; "yea"=yearly, "sea"=seasonal, "mon"=monthly, "d"=daily, "h", "m", "s". +; Requested attribute of data is returned as variable. +; See function att2var for details on 'magic' strings. +; Those get special treatment and may trigger a crash, if missing. +; Default is returned if (non magic) data@meta is missing. ; ; Description -; Determines time step size in given UTC time axis. +; Puts attributes of data into individual variables, +; and returns default if the requested attribute is missing. ; ; Caveats ; ; References ; ; Modification history -; 20140211-A_gott_kl: written. +; 20140403-gottschaldt_klaus-dirk: written. ; -local funcname, scriptname, t, dt, nt, y, m, d, h, n, s, option, units +local funcname, scriptname, data, meta, result, default, option begin - funcname = "tstep" + funcname = "att2var_default" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - ; extract vectors - y = toint(utc(:, 0)) - m = toint(utc(:, 1)) - d = toint(utc(:, 2)) - h = toint(utc(:, 3)) - n = toint(utc(:, 4)) ; minutes - s = toint(utc(:, 5)) - - ; convert to seconds after y(0)-01-01 00:00:00 - units = "seconds after " + tostring(y(0)) + "-01-01 00:00:00" - option = 0 - option@calendar = "standard" - t = cd_inv_calendar(y, m, d, h, n, s, units, option) - - ; create vector of tstep lengths - nt = dimsizes(t) - dt = tolong(t(1:nt - 1)) - tolong(t(0:nt - 2)) - - ; check mean dt - mdt = avg(dt) - if (mdt.eq.1) then - result = "s" - else if (mdt.eq.60) then - result = "m" - else if (mdt.eq.3600) then - result = "h" - else if (mdt.eq.86400) then - result = "d" - else if (mdt.ge.2419200 .and. mdt.le.2678400) then - result = "mon" ; 28 ... 31 days - else if (mdt.ge.7776000 .and. mdt.le.7948800) then - result = "sea" ; 90 ... 92 days - else if (mdt.ge.31536000 .and. mdt.le.31622400) then - result = "yea" ; 365 ... 366 days - else - error_msg("f", scriptname, funcname, "could not determine " + \ - "step length for mdt = " + mdt) - end if - end if - end if - end if - end if - end if - end if - - ; check if all dt match the result - check_flag = False - if (result.eq."s" .or. result.eq."m" .or. \ - result.eq."h" .or. result.eq."d") then - if (max(dt) - min(dt) .ne. 0) then - check_flag = True + result = att2var(data, meta) + if(dimsizes(result).eq.1) then + if(result.eq.default_fillvalue("string")) then + log_debug("Using default: " + meta + " = " + tostring(default)) + result = default + copy_VarMeta(default, result) end if - else if (result.eq."mon") then - if (max(dt) - min(dt) .gt. 259200) then - check_flag = True - end if ; 31-28 = 3 days - else if (result.eq."sea") then - if (max(dt) - min(dt) .gt. 172800) then - check_flag = True - end if ; 92-90 = 2 days - else if (result.eq."yea") then - if (max(dt) - min(dt) .gt. 86400) then - check_flag = True - end if ; 366-365 = 1 days - end if - end if - end if - end if - if (check_flag) then - error_msg("f", scriptname, funcname, "range of dt is not " + \ - "consistent with " + result) end if leave_msg(scriptname, funcname) @@ -267,7 +264,7 @@ function get_ncdf_name(var[1]:string) ; References ; ; Modification history -; 20131108-A_gott_kl: written. +; 20131108-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, work_dir, diag_script, output_dir, ncdf_dir begin @@ -311,7 +308,7 @@ function get_ncdf_dir(work_dir[1]:string, ; References ; ; Modification history -; 20131108-A_gott_kl: written. +; 20131108-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, work_dir, subfolder, ncdf_dir begin @@ -366,7 +363,7 @@ function ncdf_read(infile, varname) ; References ; ; Modification history -; 20131111-A_gott_kl: written. +; 20131111-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, data, infile, ncdf, varname, i, ivar, nvar, \ vdims, var, atts, temp, defaults @@ -403,21 +400,18 @@ begin end if if (varname .eq. "dummy" .and. dimsizes(var) .eq. 1) then data = ncdf->$var$ - else if (varname .eq. "dummy" .and. dimsizes(var) .ne. 1) then + elseif (varname .eq. "dummy" .and. dimsizes(var) .ne. 1) then error_msg("f", scriptname, funcname, "please select variable " + \ "from " + infile) - else if (varname .ne. "dummy" .and. any(varname .eq. vNames)) then + elseif (varname .ne. "dummy" .and. any(varname .eq. vNames)) then i = ind(vNames .eq. varname) data = ncdf->$vNames(i)$ - else if (varname .ne. "dummy" .and. .not. any(varname .eq. vNames)) then + elseif (varname .ne. "dummy" .and. .not. any(varname .eq. vNames)) then error_msg("f", scriptname, funcname, "variable " + varname + \ " not in " + infile) else error_msg("f", scriptname, funcname, "case not covered") end if - end if - end if - end if ; Convert attributes to type logical, if they meet above conditions atts = getvaratts(data) @@ -450,185 +444,55 @@ begin end + ; ############################################################################# -undef("ncdf_write") -function ncdf_write(data, - outfile_in) +undef("ncdf_att") +function ncdf_att(infile[1], + att[1]:string) ; ; Arguments -; data: data with metadata (attributes) to be written to a netCDF file. -; outfile_in: full path & name of the netCDF file to be written. It may -; may contain an attribute "existing" with the values -; "overwrite" (default) / "append" / "rename" -; default is used for "default", "dummy" or non-string values. +; infile: full path & name of the netCDF file to be read OR a file +; variable already created by addfile(...). +; att: attribute to be read. +; ; Return value -; A string with the outfile. +; Contents of attribute att (could be any type supported by netCDF). ; ; Description -; Creates or modifies outfile, adding a single variable. All attributes of -; "data" are added to the variable in the netCDF file. It may rename an -; existing file to keep it. -; Two options for netCDF output, one disabled (see "Option"), change if -; needed. -; If data has coordinates which are named with strings, these names are -; converted to characeter arrays and the coordinates are appended as two -; dimensional variables. +; Checks existence of att in infile, then retrieves contents. ; ; Caveats -; Appending not tested. -; netCDF does not support data type "logical" and converts True (logical) -; to 1 (integer), False (logical) to 0 (integer). -; A workaround could be to convert logical data to string before writing to -; netCDF, and adjust reading routine accordingly. -; Currently ncdf_read interprets variable attributes of type "integer" and -; dimsizes = 1 & value 0 or 1 as logicals. +; str_infile does not resolve the file name for a file variable. ; ; References -; http://www.ncl.ucar.edu/Document/Manuals/Ref_Manual/ -; NclFormatSupport.shtml#NetCDF -; http://www.ncl.ucar.edu/Applications/o-netcdf.shtml -; http://www.ncl.ucar.edu/Support/talk_archives/2009/0367.html ; ; Modification history -; 20131107-A_gott_kl: written. +; 20140131-A_gott_kl: written. ; -local funcname, scriptname, data, outfile, outfile_in, ncdf, varname +local funcname, scriptname, f, file_atts, i, result begin - funcname = "ncdf_write" + funcname = "ncdf_att" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - ; Retrieve special attributes - varname = att2var(data, "var") - - ; Determine outfile - defaulting = (/"default", "dummy"/) - if (typeof(outfile_in) .ne. "string" .or. \ - .not.ismissing(ind(defaulting .eq. outfile_in))) then - outfile = get_ncdf_name(varname) + if (typeof(infile).eq."string") then + f = addfile(infile, "r") + str_infile = infile + else if (typeof(infile).eq."file") then + f = infile + str_infile = "file associated with file variable argument." else - outfile = outfile_in + error_msg("f", scriptname, funcname, "wrong type of infile argument") end if - - ; Determine mode for dealing with existing files - if (isatt(outfile, "existing")) then - existing = outfile@existing + end if + file_atts = getvaratts(f) ; retrieve global attributes + if (any(file_atts.eq.att)) then + i = ind(file_atts.eq.att) + result = f@$file_atts(i)$ else - existing = "overwrite" ; Default - end if - - ; Open outfile (see NCL documentation for "addfile") - if (fileexists(outfile)) then ; Outfile already exists - if (existing .eq. "rename") then - sysdate = systemfunc("date +%Y%m%d%H%M%S") - system("mv " + outfile + " " + outfile + "." + sysdate) - ncdf = addfile(outfile, "c") - log_debug(" Keeping existing " + outfile + " with suffix ." + sysdate) - else if (existing .eq. "append") then - ncdf = addfile(outfile, "w") - dimNames = getvardims(data) - dim_dim = dimsizes(dimNames) - ; define dimensions in case not defined yet - dimSzs = new(dimsizes(dimNames), long) - dimUnlim = new(dimsizes(dimNames), logical) - dimUnlim(:) = False - do i = 0, dim_dim - 1 - dimSzs(i) = dimsizes(data&$dimNames(i)$) - end do - filedimdef(ncdf, dimNames, dimSzs, dimUnlim) - filevardef(ncdf, varname, typeof(data), dimNames) - ; Copy attributes associated with each variable to the file - do i = 0, dim_dim - 1 - ; No attribute assignment to variable if no attributes are available - if (.not.(all(ismissing(getvaratts(data&$dimNames(i)$))))) then - filevarattdef(ncdf, dimNames(i), data&$dimNames(i)$) - end if - end do - filevarattdef(ncdf, varname, data) - else - system("rm -f " + outfile) - ncdf = addfile(outfile, "c") - ncdf = ncdf_define(ncdf, data) - end if - end if - else ; Outfile does not exist yet - ncdf = addfile(outfile, "c") - ncdf = ncdf_define(ncdf, data) - end if - - ; Write to netCDF - ; Option 1: q&d -; ncdf->$varname$ = data - ; Option 2: write with explicit file definition - dimNames = getvardims(data) - do i = 0, dimsizes(dimNames) - 1 - if (typeof(data&$dimNames(i)$).eq."string") then - ; Convert string array to two dimensional character array - char_array = stringtochar(data&$dimNames(i)$) - ncdf->$dimNames(i)$ = (/char_array/) - delete(char_array) - else - ncdf->$dimNames(i)$ = (/data&$dimNames(i)$/) - end if - end do - ncdf->$varname$ = (/data/) - - log_info("Wrote variable " + varname + " to " + outfile) - - leave_msg(scriptname, funcname) - return(outfile) - -end - -; ############################################################################# -undef("ncdf_att") -function ncdf_att(infile[1], - att[1]:string) -; -; Arguments -; infile: full path & name of the netCDF file to be read OR a file -; variable already created by addfile(...). -; att: attribute to be read. -; -; Return value -; Contents of attribute att (could be any type supported by netCDF). -; -; Description -; Checks existence of att in infile, then retrieves contents. -; -; Caveats -; str_infile does not resolve the file name for a file variable. -; -; References -; -; Modification history -; 20140131-A_gott_kl: written. -; -local funcname, scriptname, f, file_atts, i, result -begin - - funcname = "ncdf_att" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) - - if (typeof(infile).eq."string") then - f = addfile(infile, "r") - str_infile = infile - else if (typeof(infile).eq."file") then - f = infile - str_infile = "file associated with file variable argument." - else - error_msg("f", scriptname, funcname, "wrong type of infile argument") - end if - end if - file_atts = getvaratts(f) ; retrieve global attributes - if (any(file_atts.eq.att)) then - i = ind(file_atts.eq.att) - result = f@$file_atts(i)$ - else - error_msg("w", scriptname, funcname, att + " not found in " + str_infile) - result = "missing" + error_msg("w", scriptname, funcname, att + " not found in " + str_infile) + result = "missing" end if leave_msg(scriptname, funcname) @@ -656,10 +520,10 @@ function ncdf_define(ncdf, data) ; http://www.ncl.ucar.edu/Applications/method_2.shtml ; ; Modification history -; 20131112-A_gott_kl: written. +; 20131112-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, data, diag_script, var, fAtt, dimNames, dimSzs, \ - dimUnlim, atts + dimUnlim, atts, i, ii, jj begin funcname = "ncdf_define" @@ -752,278 +616,254 @@ begin end ; ############################################################################# -undef("att2var_default") -function att2var_default(data, - meta[1]:string, - default, - option) +undef("ncdf_write") +function ncdf_write(data, + outfile_in) ; ; Arguments -; data: variable of any type with plot specific metadata. -; meta: string specifying the requested attribute -; "var", "res_" and "diag_file" get special treatment. -; default: variable of any type. -; option: not used yet. -; +; data: data with metadata (attributes) to be written to a netCDF file. +; outfile_in: full path & name of the netCDF file to be written. It may +; may contain an attribute "existing" with the values +; "overwrite" (default) / "append" / "rename" +; default is used for "default", "dummy" or non-string values. ; Return value -; Requested attribute of data is returned as variable. -; See function att2var for details on 'magic' strings. -; Those get special treatment and may trigger a crash, if missing. -; Default is returned if (non magic) data@meta is missing +; A string with the outfile. ; ; Description -; Puts attributes of data into individual variables, -; and returns default if the requested attribute is missing +; Creates or modifies outfile, adding a single variable. All attributes of +; "data" are added to the variable in the netCDF file. It may rename an +; existing file to keep it. +; Two options for netCDF output, one disabled (see "Option"), change if +; needed. +; If data has coordinates which are named with strings, these names are +; converted to characeter arrays and the coordinates are appended as two +; dimensional variables. ; ; Caveats +; Appending not tested. +; netCDF does not support data type "logical" and converts True (logical) +; to 1 (integer), False (logical) to 0 (integer). +; A workaround could be to convert logical data to string before writing to +; netCDF, and adjust reading routine accordingly. +; Currently ncdf_read interprets variable attributes of type "integer" and +; dimsizes = 1 & value 0 or 1 as logicals. ; ; References +; http://www.ncl.ucar.edu/Document/Manuals/Ref_Manual/ +; NclFormatSupport.shtml#NetCDF +; http://www.ncl.ucar.edu/Applications/o-netcdf.shtml +; http://www.ncl.ucar.edu/Support/talk_archives/2009/0367.html ; ; Modification history -; 20140403-A_GoKl: written. +; 20131107-gottschaldt_klaus-dirk: written. ; -local funcname, scriptname, data, meta, result, default, option +local funcname, scriptname, data, outfile, outfile_in, ncdf, varname, i, idim begin - funcname = "att2var_default" + funcname = "ncdf_write" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - result = att2var(data, meta) - if(dimsizes(result).eq.1) then - if(result.eq.default_fillvalue("string")) then - log_debug("Using default: " + meta + " = " + tostring(default)) - result = default - copy_VarMeta(default, result) - end if - end if - - leave_msg(scriptname, funcname) - return(result) - -end + ; Retrieve special attributes + varname = att2var(data, "var") -; ############################################################################# -undef("att2var") -function att2var(data, - meta[1]:string) -; -; Arguments -; data: variable of any type with plot specific metadata. -; meta: string specifying the requested attribute, "var", "res_" and -; "diag_file" get special treatment. -; -; Return value -; Requested attribute of data is returned as variable: -; var (string): name string, with "var_" attached as attributes -; (without prefix). -; diag_file (string): name bases of registred scripts, joined into one -; string. -; res (logical): attributes needed for plotting. -; other attributes (any type): as requested by meta. -; -; Description -; Puts attributes of data into individual variables. -; -; Caveats -; -; References -; -; Modification history -; 20131107-A_gott_kl: written. -; -local funcname, scriptname, data, meta, result, temp, atts, att, iatt -begin + ; Determine outfile + defaulting = (/"default", "dummy"/) + if (typeof(outfile_in) .ne. "string" .or. \ + .not.ismissing(ind(defaulting .eq. outfile_in))) then + outfile = get_ncdf_name(varname) + else + outfile = outfile_in + end if - funcname = "att2var" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) + ; Determine mode for dealing with existing files + if (isatt(outfile, "existing")) then + existing = outfile@existing + else + existing = "overwrite" ; Default + end if - ; Attribute "var" - if (meta .eq. "var") - if (isatt(data, meta)) then - result = data@$meta$ - atts = getvaratts(data) - do iatt = 0, dimsizes(atts) - 1 - att = atts(iatt) - if (isStrSubset(att, "var_")) then - ; Remove prefix "var_" - temp = str_get_cols(att, 4, strlen(att)) - ; Assign contents to variable "var" - result@$temp$ = data@$att$ + ; Open outfile (see NCL documentation for "addfile") + if (fileexists(outfile)) then ; Outfile already exists + if (existing .eq. "rename") then + sysdate = systemfunc("date +%Y%m%d%H%M%S") + system("mv " + outfile + " " + outfile + "." + sysdate) + ncdf = addfile(outfile, "c") + log_debug(" Keeping existing " + outfile + " with suffix ." + sysdate) + elseif (existing .eq. "append") then + ncdf = addfile(outfile, "w") + dimNames = getvardims(data) + dim_dim = dimsizes(dimNames) + ; define dimensions in case not defined + do idim = 0, dim_dim-1 + if all(dimNames(idim) .ne. getvardims(ncdf)) then + if .not. isdefined("dims_new") then + dims_new = dimNames(idim) + else + dims_new := array_append_index(dims_new, dimNames(idim), 0) + end if end if end do - else - error_msg("f", scriptname, funcname, get_script_name() + \ - " requires variable attribute " + meta) - end if - - ; Attribute "diag_script" - else if (meta .eq. "diag_script") - if (isatt(data, meta)) then - base = bname(data@$meta$) - ; Join names of all contributing scripts - result = str_join(base, "__") - else - error_msg("f", scriptname, funcname, get_script_name() + \ - " requires variable attribute " + meta) - end if - - ; Attribute "res_" - ; DEBUG info: different attribute types are not anticipated - ; -> check this if it crashes - else if (meta .eq. "res_") - result = True - atts = getvaratts(data) - do iatt = 0, dimsizes(atts) - 1 - att = atts(iatt) - if (isStrSubset(att, "res_")) then - ; Remove prefix "res_" - temp = str_get_cols(att, 4, strlen(att)) - ; Assign contents to variable "res" - result@$temp$ = data@$att$ + if isdefined("dims_new") then + dimSzs = new(dimsizes(dims_new), long) + dimUnlim = new(dimsizes(dims_new), logical) + dimUnlim(:) = False + do i = 0, dim_dim - 1 + dimSzs(i) = dimsizes(data&$dimsnew(i)$) + end do + filedimdef(ncdf, dimNames, dimSzs, dimUnlim) end if - end do - - ; Optional/generic attributes - else - if (isatt(data, meta)) then - result = data@$meta$ + filevardef(ncdf, varname, typeof(data), dimNames) + ; Copy attributes associated with each variable to the file + do i = 0, dim_dim - 1 + ; No attribute assignment to variable if no attributes are available + ; Also skip existing dimensions + if ((.not.(all(ismissing(getvaratts(data&$dimNames(i)$))))) .and. \ + (all(dimNames(i) .ne. getvardims(ncdf)))) then + print("in if I should not be in") + filevarattdef(ncdf, dimNames(i), data&$dimNames(i)$) + end if + end do + filevarattdef(ncdf, varname, data) else - error_msg("w", scriptname, funcname, "attribute " + meta + \ - " not found") - result = default_fillvalue("string") + system("rm -f " + outfile) + ncdf = addfile(outfile, "c") + ncdf = ncdf_define(ncdf, data) end if + else ; Outfile does not exist yet + ncdf = addfile(outfile, "c") + ncdf = ncdf_define(ncdf, data) end if - end if - end if - - leave_msg(scriptname, funcname) - return(result) - -end - -; ############################################################################# -undef("bname") -function bname(a:string) -; -; Arguments -; a: (array of) file name strings to analyse. -; -; Return value -; An array of strings, without paths and suffixes. -; -; Description -; Strips paths and suffixes from file names. -; -; Caveats -; -; References -; -; Modification history -; 20131106-A_gott_kl: written. -; -local funcname, scriptname, a, i, b, rm_prefix, rm_suffix -begin - - funcname = "bname" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) - b = a - do i = 0, dimsizes(a) - 1 - rm_prefix = systemfunc("basename " + a(i)) - rm_suffix = str_split(rm_prefix, ".") - b(i) = rm_suffix(0) - delete(rm_suffix) + ; Write to netCDF + ; Option 1: q&d +; ncdf->$varname$ = data + ; Option 2: write with explicit file definition + dimNames = getvardims(data) + do i = 0, dimsizes(dimNames) - 1 + if (typeof(data&$dimNames(i)$).eq."string") then + ; Convert string array to two dimensional character array + char_array = stringtochar(data&$dimNames(i)$) + ncdf->$dimNames(i)$ = (/char_array/) + delete(char_array) + else + ncdf->$dimNames(i)$ = (/data&$dimNames(i)$/) + end if end do + ncdf->$varname$ = (/data/) + + log_info("Wrote variable " + varname + " to " + outfile) leave_msg(scriptname, funcname) - return(b) + return(outfile) end ; ############################################################################# -undef("basename") -function basename(name[1]:string) +undef("ncdf_att") +function ncdf_att(infile[1], + att[1]:string) ; ; Arguments -; name: input string. +; infile: full path & name of the netCDF file to be read OR a file +; variable already created by addfile(...). +; att: attribute to be read. ; ; Return value -; A string. +; Contents of attribute att (could be any type supported by netCDF). ; ; Description -; Removes the suffix from the string 'name' and returns the string. +; Checks existence of att in infile, then retrieves contents. ; ; Caveats +; str_infile does not resolve the file name for a file variable. ; ; References ; ; Modification history -; 20130419-A_gott_kl: written. +; 20140131-gottschaldt_klaus-dirk: written. ; -local funcname, scriptname, suffix +local funcname, scriptname, f, file_atts, i, result begin - funcname = "basename" + funcname = "ncdf_att" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - suffix = get_file_suffix(name, 0) - bsn = str_sub_str(name, suffix, "") + if (typeof(infile).eq."string") then + f = addfile(infile, "r") + str_infile = infile + elseif (typeof(infile).eq."file") then + f = infile + str_infile = "file associated with file variable argument." + else + error_msg("f", scriptname, funcname, "wrong type of infile argument") + end if + file_atts = getvaratts(f) ; retrieve global attributes + if (any(file_atts.eq.att)) then + i = ind(file_atts.eq.att) + result = f@$file_atts(i)$ + else + error_msg("w", scriptname, funcname, att + " not found in " + str_infile) + result = "missing" + end if leave_msg(scriptname, funcname) - return(bsn) + return(result) end ; ############################################################################# -undef("extract_years") -function extract_years(data, - startYr, - endYr) +undef("copy_CoordNames_n") +function copy_CoordNames_n(var_from, + var_to, + n) ; ; Arguments +; var_from: an array of any dimensionality. Must have named dimensions and +; coordinate variables. +; var_to: an array of any dimensionality. +; n: index of coordinate which is not to be copied. ; ; Return value +; An array with the same dimensionality of var_from. ; ; Description +;: Copies all named dimensions and coordinate variables except the n-th one. ; ; Caveats ; ; References ; ; Modification history +; 20130419-gottschaldt_klaus-dirk: written. ; -local funcname, scriptname, data_new, tim, t_utc, t_utc_yr, idx_t +local funcname, scriptname, var_from, var_to, n, rank, ii begin - funcname = "extract_years" + funcname = "copy_CoordNames_n" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - ds = dimsizes(data) - rank = dimsizes(ds) - - tim = data&time - - t_utc = cd_calendar(tim, 0) - t_utc_yr = floattoint(t_utc(:, 0)) - idx_t = ind((t_utc_yr .ge. startYr) .and. (t_utc_yr .le. endYr)) + rank = dimsizes(dimsizes(var_from)) - if (rank .eq. 2) then - data_new = data(idx_t, :) - end if - if (rank .eq. 3) then - data_new = data(idx_t, :, :) - end if - if (rank .eq. 4) then - data_new = data(idx_t, :, :, :) + if (rank.le.n) then + error_msg("f", scriptname, funcname, "index is not in the " + \ + "range of the variables") + else + names = getvardims(var_from) + do ii = 0, rank - 1 + if (.not.ismissing(names(ii))) then + var_to!ii = names(ii) + if (ii.ne.n) then + var_to&$names(ii)$ = var_from&$names(ii)$ + end if + end if + end do end if leave_msg(scriptname, funcname) - return(data_new) + return(var_to) end @@ -1049,7 +889,7 @@ function extend_var_at(var, ; References ; ; Modification history -; 20130419-A_gott_kl: written. +; 20130419-gottschaldt_klaus-dirk: written. ; local funcname, scriptname, var, coord, index, var_new, var_dim, \ var_dim_plus, rank @@ -1063,7 +903,7 @@ begin var_dim_plus = var_dim var_dim_plus(coord) = var_dim(coord) + 1 - ; Define new variables with one more entry for diagnostics + ; Define new variables with one more entry for diagnostics rank = dimsizes(var_dim) var_temp = new((/var_dim_plus/), typeof(var)) @@ -1078,7 +918,7 @@ begin if (index.lt.var_dim(0)) then var_new(index + 1:) = var(index:) end if - else if (rank.eq.2) then + elseif (rank.eq.2) then if (coord.eq.0) then if (index - 1.ge.0) then var_new(:index - 1, :) = var(:index - 1, :) @@ -1094,7 +934,7 @@ begin var_new(:, index + 1:) = var(:, index:) end if end if - else if (rank.eq.3) then + elseif (rank.eq.3) then if (coord.eq.0) then if (index - 1.ge.0) then var_new(:index - 1, :, :) = var(:index - 1, :, :) @@ -1102,7 +942,7 @@ begin if (index.lt.var_dim(0)) then var_new(index + 1:, :, :) = var(index:, :, :) end if - else if (coord.eq.1) then + elseif (coord.eq.1) then if (index - 1.ge.0) then var_new(:, :index - 1, :) = var(:, :index - 1, :) end if @@ -1117,155 +957,16 @@ begin var_new(:, :, index + 1:) = var(:, :, index:) end if end if - end if else error_msg("f", scriptname, funcname, "implemented only for " + \ "variables with dimension less or equal 3!") end if - end if - end if leave_msg(scriptname, funcname) return(var_new) end -; ############################################################################# -undef("copy_CoordNames_n") -function copy_CoordNames_n(var_from, - var_to, - n) -; -; Arguments -; var_from: an array of any dimensionality. Must have named dimensions and -; coordinate variables. -; var_to: an array of any dimensionality. -; n: index of coordinate which is not to be copied. -; -; Return value -; An array with the same dimensionality of var_from. -; -; Description -;: Copies all named dimensions and corrdinate variables except the n-th one. -; -; Caveats -; -; References -; -; Modification history -; 20130419-A_gott_kl: written. -; -local funcname, scriptname, var_from, var_to, n, rank -begin - - funcname = "copy_CoordNames_n" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) - - rank = dimsizes(dimsizes(var_from)) - - if (rank.le.n) then - error_msg("f", scriptname, funcname, "index is not in the " + \ - "range of the variables") - else - names = getvardims(var_from) - do ii = 0, rank - 1 - if (.not.ismissing(names(ii))) then - var_to!ii = names(ii) - if (ii.ne.n) then - var_to&$names(ii)$ = var_from&$names(ii)$ - end if - end if - end do - end if - - leave_msg(scriptname, funcname) - return(var_to) - -end - -; ############################################################################# -undef("empty_str") -function empty_str(nn[1]:integer) -; -; Arguments -; nn: length of the returned string. -; -; Return value -; An empty string of length nn. -; -; Description -; Returns an empty string of length nn (useful for text formatting). -; -; Caveats -; -; References -; -; Modification history -; 20140228-A_righ_ma: written. -; -local funcname, scriptname, ii -begin - - funcname = "empty_str" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) - - str = " " - do ii = 1, nn - 1 - str = str + " " - end do - - leave_msg(scriptname, funcname) - return(str) - -end - -; ############################################################################# -undef("write_info") -function write_info(display[1]:logical) -; -; Arguments -; display: logical with attributes listing the items to put in the infobox -; -; Return value -; A string to write to plot. -; -; Description -; Read the attributes in displayinfo and returns them in a formatted -; string. -; -; Caveats -; -; References -; -; Modification history -; -local funcname, scriptname, infobox, info_id -begin - - funcname = "write_info" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) - - infobox = "" - infobox = infobox + "~F22~DEBUG INFO:~F21~ ~C~" - display_varatts = getvaratts(display) - exceptions = (/"description", "description_ycoord"/) - do info_id = 0, dimsizes(display_varatts) - 1 - info_tag = display_varatts(info_id) - if (any(info_tag .eq. exceptions)) then - continue - end if - ; ~C~ = new line - infobox = infobox + info_tag + ": " + display@$info_tag$ + "~C~" - end do - - leave_msg(scriptname, funcname) - return(infobox) - -end - ; ############################################################################# undef("remove_index") function remove_index(array, @@ -1284,7 +985,7 @@ function remove_index(array, ; References ; ; Modification history -; 20160704-A_righ_ma: written. +; 20160704-righi_mattia: written. ; local funcname, scriptname, dsize, rank, iarr begin @@ -1326,7 +1027,6 @@ begin end - ; ############################################################################# undef("set_default_att") procedure set_default_att(info:logical, @@ -1345,7 +1045,7 @@ procedure set_default_att(info:logical, ; References ; ; Modification history -; 20171214-A_righ_ma: written. +; 20171214-righi_mattia: written. ; local funcname, scriptname begin @@ -1363,120 +1063,38 @@ begin end ; ############################################################################# -undef("filter_attrs") -function filter_attrs(source[1]:logical,\ - target[1]:logical,\ - lb_prefix[1]:string) -; -; Arguments -; -; Description -; Copy over certain attributes if they have a specified prefix, -; the prefix is removed on the target attribute -; -; Caveats -; -; References -; -; Modification history -; 20150630-A_eval_ma: written. -; -local funcname, scriptname, source_attrs, att_idx, prefix_removed -begin - - funcname = "filter_attrs" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) - - source_attrs = getvaratts(source) - do att_idx = 0, dimsizes(source_attrs) - 1 - if (.not. ismissing(str_match(source_attrs(att_idx), lb_prefix))) then - prefix_removed = str_get_cols(source_attrs(att_idx), \ - strlen(lb_prefix), -1) - target@$prefix_removed$ = source@$source_attrs(att_idx)$ - end if - end do - - leave_msg(scriptname, funcname) - return(target) - -end - -; ############################################################################# -undef("write_ignore_warnings") ; FIX-ME: to be removed -procedure write_ignore_warnings(warnings[*]:string, - warn_file_str[1]:string) +undef("empty_str") +function empty_str(nn[1]:integer) ; ; Arguments +; nn: length of the returned string. ; -; Description -; Write a file with warnings that will be disregarded by the Pyhton -; launcher. -; -; Caveats -; -; References -; -; Modification history -; 20150630-A_eval_ma: written. -; -local funcname, scriptname, idx_warn -begin - - funcname = "write_ignore_warnings" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) - - if (fileexists(warn_file_str)) then - system("rm -f " + warn_file_str) - end if - do idx_warn = 0, dimsizes(warnings) - 1 - system("echo " + warnings(idx_warn) + " >> " + warn_file_str) - end do - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("get_ref_dataset_idx") ; FIX-ME to be removed -function get_ref_dataset_idx(datasets[1]:logical,\ - dataset_ref_name:string) -; -; Arguments -; datasets: list of all datasets -; dataset_ref_name: reference dataset name, possibly two, separated by comma +; Return value +; An empty string of length nn. ; ; Description -; Fetches the reference dataset index in input_file_info@dataset +; Returns an empty string of length nn (useful for text formatting). ; ; Caveats ; ; References ; ; Modification history -; 20171208-A_righ_ma: rewritten for the new backend +; 20140228-righi_mattia: written. ; -local funcname, scriptname, nrefs, idx_ref +local funcname, scriptname, ii begin - funcname = "get_ref_dataset_idx" + funcname = "empty_str" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - nrefs = dimsizes(dataset_ref_name) - - idx_ref = new(nrefs, integer) - do ii = 0, nrefs - 1 - idx_ref(ii) = \ - ind(datasets@dataset.eq.str_squeeze(dataset_ref_name(ii))) - if (ismissing(idx_ref(ii))) then - error_msg("f", scriptname, funcname, "reference dataset " + \ - dataset_ref_name(ii) + " not found in the datasets list") - end if + str = " " + do ii = 1, nn - 1 + str = str + " " end do leave_msg(scriptname, funcname) - return(idx_ref) + return(str) end diff --git a/esmvaltool/interface_scripts/data_handling.ncl b/esmvaltool/interface_scripts/data_handling.ncl index 6fd66f219b..ec03a119ea 100644 --- a/esmvaltool/interface_scripts/data_handling.ncl +++ b/esmvaltool/interface_scripts/data_handling.ncl @@ -6,7 +6,6 @@ ; ; Contents: ; function read_data -; function read_fx_data ; ; ############################################################################# @@ -31,8 +30,11 @@ function read_data(info_item[1]:logical) ; References ; ; Modification history -; 20180511_A-righ_ma: adapted to the revised list-based metadata structure. -; 20180212_A-righ_ma: written. +; 20191209-schlund_manuel: attributes from file are now added to the array +; returned by this function. +; 20180511-righi_mattia: adapted to the revised list-based metadata +; structure. +; 20180212-righi_mattia: written. ; local funcname, scriptname, infile, fin begin @@ -60,109 +62,23 @@ begin ; Check that variable is in the file if (.not. isfilevar(fin, info_item@short_name)) then - error_msg("f", scriptname, funcname, "variable " + variable + \ + error_msg("f", scriptname, funcname, "variable " + info_item@short_name + \ " not found in file " + infile) end if ; Read data data = fin->$info_item@short_name$ - leave_msg(scriptname, funcname) - return(data) - -end - -; ############################################################################# -undef("read_fx_data") -function read_fx_data(info_item[1]:logical, - fx_variable[1]:string) -; -; Arguments -; index: index to current input file as defined in the _info.ncl -; file in the preproc directory of the current variable -; fx_variable: name of the fx variable to read -; -; Return value -; A list of data array(s) with all metadata or a missing value if the -; fx variable is not found. -; -; Description -; Retrieve the fx variable data and metadata based on the input_file_info -; logical defined in the _info.ncl file automatically generated -; by the preprocessor and stored in the preproc// subdirectory. -; Unlike read_data, this function does not exit on error if the input file -; is not found. This is to provide more flexibility to the calling -; diagnostic. -; -; Caveats -; If auxiliary coordinates are associated to the fx-variables (typically for -; areacello), these are also read and pushed to the return list. -; -; References -; -; Modification history -; 20180511_A-righ_ma: adapted to the revised list-based metadata structure. -; 20180526_A-righ_ma: written. -; -local funcname, scriptname, infile, fin -begin - - funcname = "read_fx_data" - scriptname = "interface_scripts/data_handling.ncl" - enter_msg(scriptname, funcname) - - ; Check attribute availability - if (.not.isatt(info_item, fx_variable)) then - error_msg("f", scriptname, funcname, "input path for the fx_variable " + \ - fx_variable + " not found in the metadata file, please add " + \ - "'fx_files: [" + fx_variable + "]' to the variable " + \ - "dictionary in the recipe") - end if - infile = info_item@$fx_variable$ - - ; Return missing if variable not available - if (ismissing(infile)) then - error_msg("w", scriptname, funcname, "fx variable " + fx_variable + \ - " not found for dataset " + info_item@dataset) - out = new(1, float) - out = default_fillvalue(typeof(out)) - return(out) - end if - - ; Open file - log_info("Reading in file = " + infile) - fin = addfile(infile, "r") - - ; Check that variable is in the file - if (.not. isfilevar(fin, fx_variable)) then - error_msg("f", scriptname, funcname, "variable " + fx_variable + \ - " not found in " + infile) - end if - - ; Initialize list - outlist = NewList("lifo") - - ; Check if auxiliary coordinates are defined - atts = getfilevaratts(fin, fx_variable) - if (any("coordinates".eq.atts)) then - ; Read auxiliary coordinates in reverse order (for correct order in list) - coords = fin->$fx_variable$@coordinates - ncoords = str_fields_count(coords, " ") - do ii = 1, ncoords - curr_coord = str_squeeze(str_get_field(coords, ncoords + 1 - ii, " ")) - if (.not. isfilevar(fin, curr_coord)) then - error_msg("w", scriptname, funcname, "auxiliary coordinate " + \ - curr_coord + " not found in " + infile + ", skipping") - else - ListPush(outlist, fin->$curr_coord$) - end if - end do - end if - - ; Push variable to the list - ListPush(outlist, fin->$fx_variable$) + ; Copy attributes from fin (do not overwrite existing ones) + fin_atts = getvaratts(fin) + do idx_att = 0, dimsizes(fin_atts) - 1 + att = fin_atts(idx_att) + if (.not. isatt(data, att)) then + data@$att$ = fin@$att$ + end if + end do leave_msg(scriptname, funcname) - return(outlist) + return(data) end diff --git a/esmvaltool/interface_scripts/interface.ncl b/esmvaltool/interface_scripts/interface.ncl index e044d49a1b..b6fcda29f7 100644 --- a/esmvaltool/interface_scripts/interface.ncl +++ b/esmvaltool/interface_scripts/interface.ncl @@ -87,7 +87,9 @@ function select_metadata_by_atts(info:list, ; References ; ; Modification history -; 20181106_A-righ_ma: written. +; 20190507-lauer_axel: missing attributes are now allowed; a warning +; message is issued for each missing attribute. +; 20181106-righi_mattia: written. ; local funcname, scriptname, ninfo, natts, l_flag, ii, jj, ca begin @@ -104,11 +106,13 @@ begin l_flag = True do jj = 0, natts - 1 ; loop over requested attributes if (.not.isatt(info[ii], atts(jj))) then - error_msg("f", scriptname, funcname, \ + error_msg("w", scriptname, funcname, \ "missing " + atts(jj) + " attribute") - end if - if (info[ii]@$atts(jj)$.ne.att@$atts(jj)$) then l_flag = False + else + if (info[ii]@$atts(jj)$.ne.att@$atts(jj)$) then + l_flag = False + end if end if end do if (l_flag) then @@ -144,7 +148,7 @@ function select_metadata_by_name(info:list, ; References ; ; Modification history -; 20181105_A-righ_ma: written. +; 20181105-righi_mattia: written. ; local funcname, scriptname begin @@ -180,15 +184,15 @@ function metadata_att_as_array(inlist:list, ; References ; ; Modification history -; 20181119-A_laue_ax: missing attributes are now allowed; error message is -; only issued if all attributes are missing -; 20180711-A_righ_ma: written +; 20181119-lauer_axel: missing attributes are now allowed; error message is +; only issued if all attributes are missing. +; 20180711-righi_mattia: written. ; local funcname, scriptname begin funcname = "metadata_att_as_array" - scriptname = "interface_scripts/auxiliary.ncl" + scriptname = "interface_scripts/interface.ncl" enter_msg(scriptname, funcname) do ii = 0, ListCount(inlist) - 1 @@ -205,6 +209,11 @@ begin end if end do + if (.not.(isdefined("out"))) then + error_msg("f", scriptname, funcname, "attribute " + att + " not " + \ + "found in list element(s)") + end if + if (all(ismissing(out))) then error_msg("f", scriptname, funcname, "attribute " + att + " not " + \ "found in list element(s)") diff --git a/esmvaltool/interface_scripts/logging.ncl b/esmvaltool/interface_scripts/logging.ncl index 1a221bc8e9..35c3167341 100644 --- a/esmvaltool/interface_scripts/logging.ncl +++ b/esmvaltool/interface_scripts/logging.ncl @@ -35,7 +35,7 @@ procedure log_info(output_string[*]:string) ; References ; ; Modification history -; 20180202-A_righ_ma: written to replace info_output. +; 20180202-righi_mattia: written to replace info_output. ; local nn, ii begin @@ -61,16 +61,16 @@ procedure log_debug(output_string[*]:string) ; output_string: the text to be output as message on screen ; ; Description -; Write a debug message to the log file (only if log_level = debug in -; config-user.yml). If the input is an array, each element will be -; written on different lines. +; Write a debug message to the log file (only if log_level = debug in the +; configuration). If the input is an array, each element will be written on +; different lines. ; ; Caveats ; ; References ; ; Modification history -; 20180202-A_righ_ma: written to replace info_output. +; 20180202-righi_mattia: written to replace info_output. ; local nn, ii begin @@ -105,8 +105,8 @@ procedure enter_msg(script[1]:string, ; References ; ; Modification history -; 20180202_A_righ_ma: removed required verbosity. -; 20150319-A_righ_ma: written. +; 20180202-righi_mattia: removed required verbosity. +; 20150319-righi_mattia: written. ; begin @@ -135,8 +135,8 @@ procedure leave_msg(script[1]:string, ; References ; ; Modification history -; 20180202_A_righ_ma: removed required verbosity. -; 20150319-A_righ_ma: written. +; 20180202-righi_mattia: removed required verbosity. +; 20150319-righi_mattia: written. ; begin @@ -169,8 +169,8 @@ procedure error_msg(type[1]:string, ; References ; ; Modification history -; 20150924-A_laue_ax: added type "error" (treated as "fatal") -; 20140929-A_righ_ma: written. +; 20150924-lauer_axel: added type "error" (treated as "fatal"). +; 20140929-righi_mattia: written. ; local nn, ii, msg_level, msg_origin begin @@ -215,7 +215,7 @@ procedure tool_stop() ; Arguments ; ; Description -; Forces the tool execution to stop at any point of an ncl script. +; Force the tool execution to stop at any point of an ncl script. ; For dubugging purposes only. ; ; Caveats @@ -225,7 +225,7 @@ procedure tool_stop() ; References ; ; Modification history -; 20150521-A_righ_ma: written. +; 20150521-righi_mattia: written. ; begin @@ -255,7 +255,7 @@ procedure exit_if_missing_atts(in[1], \ ; References ; ; Modification history -; 20141002-A_righ_ma: written. +; 20141002-righi_mattia: written. ; local funcname, scriptname begin @@ -265,7 +265,7 @@ begin enter_msg(scriptname, funcname) if (.not.all(isatt(in, reqatts))) then - error_msg("f", diag_script, "", "missing required diag_script_info " + \ + error_msg("f", DIAG_SCRIPT, "", "missing required diag_script_info " + \ "attribute: " + reqatts(ind(.not.isatt(in, reqatts)))) end if @@ -273,99 +273,128 @@ begin end - ; ############################################################################# undef("log_provenance") procedure log_provenance(nc_file:string, outfile:string, caption:string, statistics[*]:string, - domain:string, - plottype:string, + domains[*]:string, + plot_types[*]:string, diag_authors[*]:string, diag_refs[*]:string, infiles[*]:string) ; ; Arguments -; nc_file: netcdf file with data related to the plot -; outfile: file name of the figure (including path) -; caption: figure caption -; statistics: list of metrics/statistics calculated -; domain: of the data displayed -; plottype: plot type of the figure -; diag_authors: authors related to the diagnostic -; diag_refs: references related to the diagnotic +; nc_file: netcdf file with data related to the plot +; outfile: file name of the figure (including path) +; caption: figure caption +; statistics: list of metrics/statistics calculated +; domains: of the data displayed +; plot_types: plot type of the figure +; diag_authors: authors related to the diagnostic +; diag_refs: references related to the diagnostic +; infiles: input files (from preproc) used to generate the plot ; ; Description -; Creates a yaml file with meta data from the diagnostic -; script and save it in the plot directory +; Create a yaml file with meta data from the diagnostic script and save it +; in the run directory ; ; Caveats ; ; References ; -local funcname, scriptname, verbosity +; Modification history +; 20190415-righi_mattia: turn domains into a list. +; 20190415-righi_mattia: extended to avoid overwriting previous call. +; 20190225-bock_lisa: written. +; +local funcname, scriptname, yaml_file, outstring, existing begin funcname = "log_provenance" scriptname = "interface_scripts/logging.ncl" enter_msg(scriptname, funcname) - str0 = "? " + nc_file - n = dimsizes(infiles) - str1 = ": ancestors: [" - str1 = str1 + infiles(0) - if(n.ge.1) then - do i = 1, n-1 - str1 = str1 + ", " + infiles(i) - end do - end if - str1 = str1 + "]" - n = dimsizes(diag_authors) - str2 = " authors: [" - str2 = str2 + diag_authors(0) - if(n.ge.1) then - do i = 1, n-1 - str2 = str2 + ", " + diag_authors(i) - end do - end if - str2 = str2 + "]" - str3 = " caption: '" + caption + "'" - str4 = " domain: " + domain - str5 = " plot_file: " + outfile - str6 = " plot_type: " + plottype - n = dimsizes(diag_refs) - str7 = " references: [" - str7 = str7 + diag_refs(0) - if(n.ge.1) then - do i = 1, n-1 - str7 = str7 + ", " + diag_refs(i) - end do - end if - str7 = str7 + "]" - n = dimsizes(statistics) - str8 = " statistics: [" - str8 = str8 + statistics(0) - if(n.ge.1) then - do i = 1, n-1 - str8 = str8 + ", " + statistics(i) + ; Define output + outstring = new(8, string) + + ; Set yaml file path + yaml_file = config_user_info@run_dir + "diagnostic_provenance.yml" + + ; Save entries for NetCDF file + outstring(0) = "? " + nc_file + outstring(1) = ": ancestors: [" + str_join(infiles, ", ") + "]" + outstring(2) = " authors: [" + str_join(diag_authors, ", ") + "]" + outstring(3) = " caption: '" + caption + "'" + outstring(4) = " domains: [" + str_join(domains, ", ") + "]" + outstring(5) = " plot_types: [" + str_join(plot_types, ", ") + "]" + outstring(6) = " references: [" + str_join(diag_refs, ", ") + "]" + outstring(7) = " statistics: [" + str_join(statistics, ", ") + "]" + + ; Save entries for outfile if not "n/a" + if (outfile .ne. "n/a") then + suffix = get_file_suffix(outfile, 0) + + if (ismissing(suffix)) then + error_msg("f", scriptname, funcname, "got invalid value for outfile " + \ + "(path to figure): '" + outfile + "'; expected path to a " + \ + "file or 'n/a'") + end if + + ; For PNGs, additionally check for existence of files like + ; "plot_file.000001.png", "plot_file.000002.png", etc. and save + ; provenance record for each of them + if ((suffix .eq. ".png") .and. (.not. fileexists(outfile))) then + do file_idx = 1, 999999 + potential_outfile = suffix@fBase + "." + sprinti("%0.6i", file_idx) + \ + suffix + if (fileexists(potential_outfile)) then + if (.not. isvar("all_outfiles")) then + all_outfiles = potential_outfile + else + all_outfiles := array_append_record(all_outfiles, \ + potential_outfile, 0) + end if + else + break + end if + end do + end if + + ; For all other cases, use outfile + if (.not. isvar("all_outfiles")) then + all_outfiles = outfile + end if + + ; Save provenance record of all files + original_entry = outstring + do outfile_idx = 0, dimsizes(all_outfiles) - 1 + file_to_add := all_outfiles(outfile_idx) + if (.not. fileexists(file_to_add)) then + error_msg("f", scriptname, funcname, "outfile (path to figure) '" + \ + file_to_add + "' does not exist (for PNGs, this " + \ + "function also searches for 'FILE.000001.png', " + \ + "'FILE.000002.png', etc.); if no plot file is available " + \ + "use 'n/a'") + end if + new_entry = (/original_entry/) + new_entry(0) = "? " + file_to_add + outstring := array_append_record(outstring, new_entry, 0) end do end if - str8 = str8 + "]" - str := array_append_record(str7, str8, 0) - str := array_append_record(str6, str, 0) - str := array_append_record(str5, str, 0) - str := array_append_record(str4, str, 0) - str := array_append_record(str3, str, 0) - str := array_append_record(str2, str, 0) - str := array_append_record(str1, str, 0) - str := array_append_record(str0, str, 0) + ; Save existing information to avoid overwriting + if (fileexists(yaml_file)) then + existing = asciiread(yaml_file, -1, "string") + outstring := array_append_record(existing, outstring, 0) + delete(existing) + end if - yaml_file = config_user_info@run_dir + "diagnostic_provenance.yml" - asciiwrite(yaml_file, str) + ; Save provenance information + asciiwrite(yaml_file, outstring) - log_info(" write meta data to " + yaml_file) + log_info("Provenance information saved in " + yaml_file) leave_msg(scriptname, funcname) diff --git a/esmvaltool/preprocessor/__init__.py b/esmvaltool/preprocessor/__init__.py deleted file mode 100644 index 2f7d53f6f8..0000000000 --- a/esmvaltool/preprocessor/__init__.py +++ /dev/null @@ -1,416 +0,0 @@ -"""Preprocessor module.""" -import copy -import inspect -import logging - -import six -from iris.cube import Cube, CubeList - -from .._provenance import TrackedFile -from .._task import BaseTask -from ._area import (average_region, extract_named_regions, extract_region, - zonal_means) -from ._derive import derive -from ._download import download -from ._io import (_get_debug_filename, cleanup, concatenate, load, save, - write_metadata) -from ._mask import (mask_above_threshold, mask_below_threshold, - mask_fillvalues, mask_inside_range, mask_landsea, - mask_landseaice, mask_outside_range) -from ._multimodel import multi_model_statistics -from ._reformat import (cmor_check_data, cmor_check_metadata, fix_data, - fix_file, fix_metadata) -from ._regrid import extract_levels, regrid -from ._time import (annual_mean, extract_month, extract_season, extract_time, - regrid_time, seasonal_mean, time_average) -from ._volume import (average_volume, depth_integration, extract_trajectory, - extract_transect, extract_volume) - -logger = logging.getLogger(__name__) - -__all__ = [ - 'download', - # File reformatting/CMORization - 'fix_file', - # Load cubes from file - 'load', - # Derive variable - 'derive', - # Metadata reformatting/CMORization - 'fix_metadata', - # Concatenate all cubes in one - 'concatenate', - 'cmor_check_metadata', - # Time extraction - 'extract_time', - 'extract_season', - 'extract_month', - # Data reformatting/CMORization - 'fix_data', - # Level extraction - 'extract_levels', - # Mask landsea (fx or Natural Earth) - 'mask_landsea', - # Mask landseaice, sftgif only - 'mask_landseaice', - # Regridding - 'regrid', - # Masking missing values - 'mask_fillvalues', - 'mask_above_threshold', - 'mask_below_threshold', - 'mask_inside_range', - 'mask_outside_range', - # Region selection - 'extract_region', - 'extract_volume', - 'extract_trajectory', - 'extract_transect', - # 'average_zone': average_zone, - # 'cross_section': cross_section, - 'multi_model_statistics', - # Grid-point operations - 'extract_named_regions', - 'depth_integration', - 'average_region', - 'average_volume', - # Time operations - # 'annual_cycle': annual_cycle, - # 'diurnal_cycle': diurnal_cycle, - 'zonal_means', - 'annual_mean', - 'seasonal_mean', - 'time_average', - 'regrid_time', - 'cmor_check_data', - # Save to file - 'save', - 'cleanup', -] - -DEFAULT_ORDER = tuple(__all__) - -# The order of intial and final steps cannot be configured -INITIAL_STEPS = DEFAULT_ORDER[:DEFAULT_ORDER.index('fix_data') + 1] -FINAL_STEPS = DEFAULT_ORDER[DEFAULT_ORDER.index('cmor_check_data'):] - -MULTI_MODEL_FUNCTIONS = { - 'multi_model_statistics', - 'mask_fillvalues', -} - - -def _get_itype(step): - """Get the input type of a preprocessor function.""" - function = globals()[step] - itype = inspect.getargspec(function).args[0] - return itype - - -def check_preprocessor_settings(settings): - """Check preprocessor settings.""" - # The inspect functions getargspec and getcallargs are deprecated - # in Python 3, but their replacements are not available in Python 2. - # TODO: Use the new Python 3 inspect API - for step in settings: - if step not in DEFAULT_ORDER: - raise ValueError( - "Unknown preprocessor function '{}', choose from: {}".format( - step, ', '.join(DEFAULT_ORDER))) - - function = function = globals()[step] - argspec = inspect.getargspec(function) - args = argspec.args[1:] - # Check for invalid arguments - invalid_args = set(settings[step]) - set(args) - if invalid_args: - raise ValueError( - "Invalid argument(s): {} encountered for preprocessor " - "function {}. \nValid arguments are: [{}]".format( - ', '.join(invalid_args), step, ', '.join(args))) - - # Check for missing arguments - defaults = argspec.defaults - end = None if defaults is None else -len(defaults) - missing_args = set(args[:end]) - set(settings[step]) - if missing_args: - raise ValueError( - "Missing required argument(s) {} for preprocessor " - "function {}".format(missing_args, step)) - # Final sanity check in case the above fails to catch a mistake - try: - inspect.getcallargs(function, None, **settings[step]) - except TypeError: - logger.error( - "Wrong preprocessor function arguments in " - "function '%s'", step) - raise - - -def _check_multi_model_settings(products): - """Check that multi dataset settings are identical for all products.""" - multi_model_steps = (step for step in MULTI_MODEL_FUNCTIONS - if any(step in p.settings for p in products)) - for step in multi_model_steps: - reference = None - for product in products: - settings = product.settings.get(step) - if settings is None: - continue - elif reference is None: - reference = product - elif reference.settings[step] != settings: - raise ValueError( - "Unable to combine differing multi-dataset settings for " - "{} and {}, {} and {}".format( - reference.filename, product.filename, - reference.settings[step], settings)) - - -def _get_multi_model_settings(products, step): - """Select settings for multi model step""" - _check_multi_model_settings(products) - settings = {} - exclude = set() - for product in products: - if step in product.settings: - settings = product.settings[step] - else: - exclude.add(product) - return settings, exclude - - -def _run_preproc_function(function, items, kwargs): - """Run preprocessor function.""" - msg = "{}({}, {})".format(function.__name__, items, kwargs) - logger.debug("Running %s", msg) - try: - return function(items, **kwargs) - except Exception: - logger.error("Failed to run %s", msg) - raise - - -def preprocess(items, step, **settings): - """Run preprocessor""" - logger.debug("Running preprocessor step %s", step) - function = globals()[step] - itype = _get_itype(step) - - result = [] - if itype.endswith('s'): - result.append(_run_preproc_function(function, items, settings)) - else: - for item in items: - result.append(_run_preproc_function(function, item, settings)) - - items = [] - for item in result: - if isinstance(item, - (PreprocessorFile, Cube, six.string_types)): - items.append(item) - else: - items.extend(item) - - return items - - -def get_step_blocks(steps, order): - """Group steps into execution blocks.""" - blocks = [] - prev_step_type = None - for step in order[order.index('load') + 1:order.index('save')]: - if step in steps: - step_type = step in MULTI_MODEL_FUNCTIONS - if step_type is not prev_step_type: - block = [] - blocks.append(block) - prev_step_type = step_type - block.append(step) - return blocks - - -class PreprocessorFile(TrackedFile): - """Preprocessor output file.""" - - def __init__(self, attributes, settings, ancestors=None): - super(PreprocessorFile, self).__init__(attributes['filename'], - attributes, ancestors) - - self.settings = copy.deepcopy(settings) - if 'save' not in self.settings: - self.settings['save'] = {} - self.settings['save']['filename'] = self.filename - - self.files = [a.filename for a in ancestors or ()] - - self._cubes = None - self._prepared = False - - def check(self): - """Check preprocessor settings.""" - check_preprocessor_settings(self.settings) - - def apply(self, step, debug=False): - """Apply preprocessor step to product.""" - if step not in self.settings: - raise ValueError( - "PreprocessorFile {} has no settings for step {}".format( - self, step)) - self.cubes = preprocess(self.cubes, step, **self.settings[step]) - if debug: - logger.debug("Result %s", self.cubes) - filename = _get_debug_filename(self.filename, step) - save(self.cubes, filename) - - def prepare(self): - """Apply preliminary file operations on product.""" - if not self._prepared: - for step in DEFAULT_ORDER[:DEFAULT_ORDER.index('load')]: - if step in self.settings: - self.files = preprocess(self.files, step, - **self.settings[step]) - self._prepared = True - - @property - def cubes(self): - """Cubes.""" - if self.is_closed: - self.prepare() - self._cubes = preprocess(self.files, 'load', - **self.settings.get('load', {})) - return self._cubes - - @cubes.setter - def cubes(self, value): - self._cubes = value - - def save(self): - """Save cubes to disk.""" - if self._cubes is not None: - self.files = preprocess(self._cubes, 'save', - **self.settings['save']) - self.files = preprocess(self.files, 'cleanup', - **self.settings.get('cleanup', {})) - - def close(self): - """Close the file.""" - self.save() - self._cubes = None - - @property - def is_closed(self): - """Check if the file is closed.""" - return self._cubes is None - - def _initialize_entity(self): - """Initialize the entity representing the file.""" - super(PreprocessorFile, self)._initialize_entity() - settings = { - 'preprocessor:' + k: str(v) - for k, v in self.settings.items() - } - self.entity.add_attributes(settings) - - -# TODO: use a custom ProductSet that raises an exception if you try to -# add the same Product twice - - -def _apply_multimodel(products, step, debug): - """Apply multi model step to products.""" - settings, exclude = _get_multi_model_settings(products, step) - - logger.debug("Applying %s to\n%s", step, '\n'.join( - str(p) for p in products - exclude)) - result = preprocess(products - exclude, step, **settings) - products = set(result) | exclude - - if debug: - for product in products: - logger.debug("Result %s", product.filename) - if not product.is_closed: - for cube in product.cubes: - logger.debug("with cube %s", cube) - - return products - - -class PreprocessingTask(BaseTask): - """Task for running the preprocessor""" - - def __init__( - self, - products, - ancestors=None, - name='', - order=DEFAULT_ORDER, - debug=None, - write_ncl_interface=False, - ): - """Initialize""" - super(PreprocessingTask, self).__init__(ancestors=ancestors, name=name) - _check_multi_model_settings(products) - self.products = set(products) - self.order = list(order) - self.debug = debug - self.write_ncl_interface = write_ncl_interface - - def _intialize_product_provenance(self): - """Initialize product provenance.""" - for product in self.products: - product.initialize_provenance(self.activity) - - # Hacky way to initialize the multi model products as well. - step = 'multi_model_statistics' - input_products = [p for p in self.products if step in p.settings] - if input_products: - statistic_products = input_products[0].settings[step].get( - 'output_products', {}).values() - for product in statistic_products: - product.initialize_provenance(self.activity) - - def _run(self, _): - """Run the preprocessor.""" - self._intialize_product_provenance() - - steps = { - step - for product in self.products for step in product.settings - } - blocks = get_step_blocks(steps, self.order) - for block in blocks: - logger.debug("Running block %s", block) - if block[0] in MULTI_MODEL_FUNCTIONS: - for step in block: - self.products = _apply_multimodel(self.products, step, - self.debug) - else: - for product in self.products: - logger.debug("Applying single-model steps to %s", product) - for step in block: - if step in product.settings: - product.apply(step, self.debug) - if block == blocks[-1]: - product.close() - - for product in self.products: - product.close() - metadata_files = write_metadata(self.products, - self.write_ncl_interface) - return metadata_files - - def __str__(self): - """Get human readable description.""" - order = [ - step for step in self.order - if any(step in product.settings for product in self.products) - ] - products = '\n\n'.join(str(p) for p in self.products) - txt = "{}:\norder: {}\n{}\n{}".format( - self.__class__.__name__, - order, - products, - super(PreprocessingTask, self).str(), - ) - return txt diff --git a/esmvaltool/preprocessor/_area.py b/esmvaltool/preprocessor/_area.py deleted file mode 100644 index 9c7eb3589b..0000000000 --- a/esmvaltool/preprocessor/_area.py +++ /dev/null @@ -1,294 +0,0 @@ -""" -Area operations on data cubes. - -Allows for selecting data subsets using certain latitude and longitude bounds; -selecting geographical regions; constructing area averages; etc. -""" -import logging - -import iris -import numpy as np - - -logger = logging.getLogger(__name__) - - -# guess bounds tool -def _guess_bounds(cube, coords): - """Guess bounds of a cube, or not.""" - # check for bounds just in case - for coord in coords: - if not cube.coord(coord).has_bounds(): - cube.coord(coord).guess_bounds() - return cube - - -# slice cube over a restricted area (box) -def extract_region(cube, start_longitude, end_longitude, start_latitude, - end_latitude): - """ - Extract a region from a cube. - - Function that subsets a cube on a box (start_longitude, end_longitude, - start_latitude, end_latitude) - This function is a restriction of masked_cube_lonlat(). - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - start_longitude: float - Western boundary longitude. - - end_longitude: float - Eastern boundary longitude. - - start_latitude: float - Southern Boundary latitude. - - end_latitude: float - Northern Boundary Latitude. - - Returns - ------- - iris.cube.Cube - smaller cube. - """ - # Converts Negative longitudes to 0 -> 360. standard - start_longitude = float(start_longitude) - end_longitude = float(end_longitude) - start_latitude = float(start_latitude) - end_latitude = float(end_latitude) - - if cube.coord('latitude').ndim == 1: - region_subset = cube.intersection( - longitude=(start_longitude, end_longitude), - latitude=(start_latitude, end_latitude)) - region_subset = region_subset.intersection(longitude=(0., 360.)) - return region_subset - # irregular grids - lats = cube.coord('latitude').points - lons = cube.coord('longitude').points - mask = np.ma.array(cube.data).mask - mask += np.ma.masked_where(lats < start_latitude, lats).mask - mask += np.ma.masked_where(lats > end_latitude, lats).mask - mask += np.ma.masked_where(lons > start_longitude, lons).mask - mask += np.ma.masked_where(lons > end_longitude, lons).mask - cube.data = np.ma.masked_where(mask, cube.data) - return cube - - -def get_iris_analysis_operation(operator): - """ - Determine the iris analysis operator from a string. - - Arguments - --------- - operator: string - A named operator. - Returns - ------- - function: A function from iris.analysis - """ - operators = ['mean', 'median', 'std_dev', 'variance', 'min', 'max'] - operator = operator.lower() - if operator not in operators: - raise ValueError("operator {} not recognised. " - "Accepted values are: {}." - "".format(operator, ', '.join(operators))) - operation = getattr(iris.analysis, operator.upper()) - return operation - - -def zonal_means(cube, coordinate, mean_type): - """ - Get zonal means. - - Function that returns zonal means along a coordinate `coordinate`; - the type of mean is controlled by mean_type variable (string):: - - 'mean' -> MEAN - 'median' -> MEDIAN - 'std_dev' -> STD_DEV - 'variance' -> VARIANCE - 'min' -> MIN - 'max' -> MAX - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - coordinate: str - name of coordinate to make mean - - mean_type: str - Type of analysis to use, from iris.analysis. - - Returns - ------- - iris.cube.Cube - Returns a cube - """ - operation = get_iris_analysis_operation(mean_type) - return cube.collapsed(coordinate, operation) - - -def tile_grid_areas(cube, fx_files): - """ - Tile the grid area data to match the dataset cube. - - Arguments - --------- - cube: iris.cube.Cube - input cube. - fx_files: dictionary - dictionary of field:filename for the fx_files - - Returns - ------- - iris.cube.Cube - Freshly tiled grid areas cube. - """ - grid_areas = np.empty(0) - if fx_files: - for key, fx_file in fx_files.items(): - if fx_file is None: - continue - logger.info('Attempting to load %s from file: %s', key, fx_file) - fx_cube = iris.load_cube(fx_file) - - grid_areas = fx_cube.data - cube_shape = cube.data.shape - if cube.data.ndim == 4 and grid_areas.ndim == 2: - grid_areas = np.tile(grid_areas, - [cube_shape[0], cube_shape[1], 1, 1]) - elif cube.data.ndim == 4 and grid_areas.ndim == 3: - grid_areas = np.tile(grid_areas, - [cube_shape[0], 1, 1, 1]) - elif cube.data.ndim == 3 and grid_areas.ndim == 2: - grid_areas = np.tile(grid_areas, - [cube_shape[0], 1, 1]) - else: - raise ValueError('Grid and dataset number of dimensions not ' - 'recognised: {} and {}.' - ''.format(cube.data.ndim, grid_areas.ndim)) - return grid_areas - - -# get the area average -def average_region(cube, coord1, coord2, operator='mean', fx_files=None): - """ - Determine the area average. - - The average in the horizontal direction requires the coord1 and coord2 - arguments. These strings are usually 'longitude' and 'latitude' but - may depends on the cube. - - While this function is named `average_region`, it can be used to apply - several different operations in the horizonal plane: mean, standard - deviation, median variance, minimum and maximum. These options are - specified using the `operator` argument and the following key word - arguments: - - +------------+--------------------------------------------------+ - | `mean` | Area weighted mean. | - +------------+--------------------------------------------------+ - | `median` | Median (not area weighted) | - +------------+--------------------------------------------------+ - | `std_dev` | Standard Deviation (not area weighted) | - +------------+--------------------------------------------------+ - | `variance` | Variance (not area weighted) | - +------------+--------------------------------------------------+ - | `min`: | Minimum value | - +------------+--------------------------------------------------+ - | `max` | Maximum value | - +------------+--------------------------------------------------+ - - - Arguments - --------- - cube: iris.cube.Cube - input cube. - coord1: str - Name of the firct coordinate dimension - coord2: str - Name of the second coordinate dimension - operator: str - Name of the operation to apply (default: mean) - fx_files: dictionary - dictionary of field:filename for the fx_files - - Returns - ------- - iris.cube.Cube - collapsed cube. - """ - grid_areas = tile_grid_areas(cube, fx_files) - - if not fx_files and cube.coord('latitude').points.ndim == 2: - logger.error('average_region ERROR: fx_file needed to calculate grid ' - 'cell area for irregular grids.') - raise iris.exceptions.CoordinateMultiDimError(cube.coord('latitude')) - - if not grid_areas.any(): - cube = _guess_bounds(cube, [coord1, coord2]) - grid_areas = iris.analysis.cartography.area_weights(cube) - logger.info('Calculated grid area:{}'.format(grid_areas.shape)) - - if cube.data.shape != grid_areas.shape: - raise ValueError('Cube shape ({}) doesn`t match grid area shape ' - '({})'.format(cube.data.shape, grid_areas.shape)) - - operation = get_iris_analysis_operation(operator) - - # TODO: implement weighted stdev, median, and var when available in iris. - # See iris issue: https://github.com/SciTools/iris/issues/3208 - - if operator in ['mean', ]: - return cube.collapsed([coord1, coord2], - operation, - weights=grid_areas) - - # Many IRIS analysis functions do not accept weights arguments. - return cube.collapsed([coord1, coord2], operation) - - -def extract_named_regions(cube, regions): - """ - Extract a specific named region. - - The region coordinate exist in certain CMIP datasets. - This preprocessor allows a specific named regions to be extracted. - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - regions: str, list - A region or list of regions to extract. - - Returns - ------- - iris.cube.Cube - collapsed cube. - """ - # Make sure regions is a list of strings - if isinstance(regions, str): - regions = [regions, ] - - if not isinstance(regions, (list, tuple, set)): - raise ValueError('Regions "{}" is not an acceptable format.' - ''.format(regions)) - - available_regions = set(cube.coord('region').points) - invalid_regions = set(regions) - available_regions - if invalid_regions: - raise ValueError('Region(s) "{}" not in cube region(s): ' - '{}'.format(invalid_regions, available_regions)) - - constraints = iris.Constraint(region=lambda r: r in regions) - cube = cube.extract(constraint=constraints) - return cube diff --git a/esmvaltool/preprocessor/_derive/__init__.py b/esmvaltool/preprocessor/_derive/__init__.py deleted file mode 100644 index 8268a36a70..0000000000 --- a/esmvaltool/preprocessor/_derive/__init__.py +++ /dev/null @@ -1,122 +0,0 @@ -"""Automatically derive variables.""" - -import importlib -import logging -from copy import deepcopy -from pathlib import Path - -import iris - -logger = logging.getLogger(__name__) - - -def _get_all_derived_variables(): - """Get all possible derived variables. - - Returns - ------- - dict - All derived variables with `short_name` (keys) and the associated - python classes (values). - - """ - derivers = {} - for path in Path(__file__).parent.glob('[a-z]*.py'): - short_name = path.stem - module = importlib.import_module( - f'esmvaltool.preprocessor._derive.{short_name}') - derivers[short_name] = getattr(module, 'DerivedVariable') - return derivers - - -ALL_DERIVED_VARIABLES = _get_all_derived_variables() - -__all__ = list(ALL_DERIVED_VARIABLES) - - -def get_required(short_name): - """Return all required variables for derivation. - - Get all information (at least `short_name`) required for derivation and - optionally a list of needed fx files. - - Parameters - ---------- - short_name : str - `short_name` of the variable to derive. - - Returns - ------- - list - List of dictionaries (including at least the key `short_name`) - and occasionally mip or fx_files. - - """ - DerivedVariable = ALL_DERIVED_VARIABLES[short_name] - variables = deepcopy(DerivedVariable().required) - return variables - - -def derive(cubes, - short_name, - long_name, - units, - standard_name=None, - fx_files=None): - """Derive variable. - - Parameters - ---------- - cubes: iris.cube.CubeList - Includes all the needed variables for derivation defined in - :func:`get_required`. - short_name: str - short_name - long_name: str - long_name - units: str - units - standard_name: str, optional - standard_name - fx_files: dict, optional - If required, dictionary containing fx files with `short_name` - (keys) and path (values) of the fx variable. - - Returns - ------- - iris.cube.Cube - The new derived variable. - - """ - if short_name == cubes[0].var_name: - return cubes[0] - - cubes = iris.cube.CubeList(cubes) - # Preprare input cubes and add fx files if necessary - if fx_files: - for (fx_var, fx_path) in fx_files.items(): - if fx_path is not None: - fx_cube = iris.load_cube( - fx_path, - constraint=iris.Constraint( - cube_func=lambda c, var=fx_var: c.var_name == var)) - cubes.append(fx_cube) - else: - logger.debug( - "Requested fx variable '%s' for derivation of " - "'%s' not found", fx_var, short_name) - - # Derive variable - DerivedVariable = ALL_DERIVED_VARIABLES[short_name] - cube = DerivedVariable().calculate(cubes) - - # Set standard attributes - cube.var_name = short_name - cube.standard_name = standard_name if standard_name else None - cube.long_name = long_name - cube.units = units - for temp in cubes: - if 'source_file' in temp.attributes: - cube.attributes['source_file'] = temp.attributes['source_file'] - - return cube diff --git a/esmvaltool/preprocessor/_derive/_baseclass.py b/esmvaltool/preprocessor/_derive/_baseclass.py deleted file mode 100644 index 58844d9763..0000000000 --- a/esmvaltool/preprocessor/_derive/_baseclass.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Contains the base class for derived variables.""" -from abc import abstractmethod - - -class DerivedVariableBase: - """Base class for derived variables.""" - - @property - @staticmethod - @abstractmethod - def required(): - """List of required variables for derivation.""" - - @staticmethod - @abstractmethod - def calculate(cubes): - """Compute desired derived variable. - - This method needs to be overridden in the child class belonging to the - desired variable to derive. - - Parameters - ---------- - cubes : iris.cube.CubeList - Includes all the needed variables (incl. fx variables) for - derivation defined in the static class variable - `_required_variables`. - - Returns - ------- - iris.cube.Cube - New derived variable. - - Raises - ------ - NotImplementedError - If the desired variable derivation is not implemented, i.e. if this - method is called from this base class and not a child class. - - """ diff --git a/esmvaltool/preprocessor/_derive/_shared.py b/esmvaltool/preprocessor/_derive/_shared.py deleted file mode 100644 index 8d7fdade4b..0000000000 --- a/esmvaltool/preprocessor/_derive/_shared.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Auxiliary derivation functions used for multiple variables.""" - -import iris - - -def cloud_area_fraction(cubes, tau_constraint, plev_constraint): - """Calculate cloud area fraction for different parameters.""" - clisccp_cube = cubes.extract_strict( - iris.Constraint(name='isccp_cloud_area_fraction')) - new_cube = clisccp_cube - new_cube = new_cube.extract(tau_constraint & plev_constraint) - coord_names = [ - coord.standard_name for coord in new_cube.coords() - if len(coord.points) > 1 - ] - if 'atmosphere_optical_thickness_due_to_cloud' in coord_names: - new_cube = new_cube.collapsed( - 'atmosphere_optical_thickness_due_to_cloud', iris.analysis.SUM) - if 'air_pressure' in coord_names: - new_cube = new_cube.collapsed('air_pressure', iris.analysis.SUM) - - return new_cube diff --git a/esmvaltool/preprocessor/_derive/alb.py b/esmvaltool/preprocessor/_derive/alb.py deleted file mode 100644 index 5cea242c5d..0000000000 --- a/esmvaltool/preprocessor/_derive/alb.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Derivation of variable `alb`. - -authors: - - crez_ba - -""" - -from iris import Constraint - -from ._baseclass import DerivedVariableBase - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `alb`.""" - - # Required variables - required = [ - { - 'short_name': 'rsds' - }, - { - 'short_name': 'rsus' - }, - ] - - @staticmethod - def calculate(cubes): - """Compute surface albedo.""" - rsds_cube = cubes.extract_strict( - Constraint(name='surface_downwelling_shortwave_flux_in_air')) - rsus_cube = cubes.extract_strict( - Constraint(name='surface_upwelling_shortwave_flux_in_air')) - - rsns_cube = rsus_cube / rsds_cube - - return rsns_cube diff --git a/esmvaltool/preprocessor/_derive/amoc.py b/esmvaltool/preprocessor/_derive/amoc.py deleted file mode 100644 index 5095f74dde..0000000000 --- a/esmvaltool/preprocessor/_derive/amoc.py +++ /dev/null @@ -1,54 +0,0 @@ -"""Derivation of variable `amoc`.""" -import iris -import numpy as np - -from ._baseclass import DerivedVariableBase - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `amoc`.""" - - # Required variables - required = [{'short_name': 'msftmyz', 'mip': 'Omon'}] - - @staticmethod - def calculate(cubes): - """Compute Atlantic meriodinal overturning circulation. - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - Returns - --------- - iris.cube.Cube - Output AMOC cube. - """ - # 0. Load the msftmyz cube. - cube = cubes.extract_strict( - iris.Constraint( - name='ocean_meridional_overturning_mass_streamfunction')) - - # 1: find the relevant region - atlantic_region = 'atlantic_arctic_ocean' - atl_constraint = iris.Constraint(region=atlantic_region) - cube = cube.extract(constraint=atl_constraint) - - # 2: Remove the shallowest 500m to avoid wind driven mixed layer. - depth_constraint = iris.Constraint(depth=lambda d: d >= 500.) - cube = cube.extract(constraint=depth_constraint) - - # 3: Find the latitude closest to 26N - rapid_location = 26.5 - lats = cube.coord('latitude').points - rapid_index = np.argmin(np.abs(lats - rapid_location)) - rapid_constraint = iris.Constraint(latitude=lats[rapid_index]) - cube = cube.extract(constraint=rapid_constraint) - - # 4: find the maximum in the water column along the time axis. - cube = cube.collapsed( - ['depth', 'region'], - iris.analysis.MAX, - ) - return cube diff --git a/esmvaltool/preprocessor/_derive/clhmtisccp.py b/esmvaltool/preprocessor/_derive/clhmtisccp.py deleted file mode 100644 index 8eb9dd2358..0000000000 --- a/esmvaltool/preprocessor/_derive/clhmtisccp.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Derivation of variable `clhmtisccp`.""" - -from iris import Constraint - -from ._baseclass import DerivedVariableBase -from ._shared import cloud_area_fraction - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `clhmtisccp`.""" - - # Required variables - required = [{'short_name': 'clisccp'}] - - @staticmethod - def calculate(cubes): - """Compute ISCCP high level medium-thickness cloud area fraction.""" - tau = Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: 3.6 < t <= 23.) - plev = Constraint(air_pressure=lambda p: p <= 44000.) - - return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvaltool/preprocessor/_derive/clhtkisccp.py b/esmvaltool/preprocessor/_derive/clhtkisccp.py deleted file mode 100644 index 3f73c8ff13..0000000000 --- a/esmvaltool/preprocessor/_derive/clhtkisccp.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Derivation of variable `clhtkisccp`.""" - -from iris import Constraint - -from ._baseclass import DerivedVariableBase -from ._shared import cloud_area_fraction - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `clhtkisccp`.""" - - # Required variables - required = [{'short_name': 'clisccp'}] - - @staticmethod - def calculate(cubes): - """Compute ISCCP high level thick cloud area fraction.""" - tau = Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: t > 23.) - plev = Constraint(air_pressure=lambda p: p <= 44000.) - - return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvaltool/preprocessor/_derive/cllmtisccp.py b/esmvaltool/preprocessor/_derive/cllmtisccp.py deleted file mode 100644 index e6564142b5..0000000000 --- a/esmvaltool/preprocessor/_derive/cllmtisccp.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Derivation of variable `cllmtisccp`.""" - -from iris import Constraint - -from ._baseclass import DerivedVariableBase -from ._shared import cloud_area_fraction - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `cllmtisccp`.""" - - # Required variables - required = [{'short_name': 'clisccp'}] - - @staticmethod - def calculate(cubes): - """Compute ISCCP low level medium-thickness cloud area fraction.""" - tau = Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: 3.6 < t <= 23.) - plev = Constraint(air_pressure=lambda p: p > 68000.) - - return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvaltool/preprocessor/_derive/clltkisccp.py b/esmvaltool/preprocessor/_derive/clltkisccp.py deleted file mode 100644 index 79da0ee280..0000000000 --- a/esmvaltool/preprocessor/_derive/clltkisccp.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Derivation of variable `clltkisccp`.""" - -from iris import Constraint - -from ._baseclass import DerivedVariableBase -from ._shared import cloud_area_fraction - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `clltkisccp`.""" - - # Required variables - required = [{'short_name': 'clisccp'}] - - @staticmethod - def calculate(cubes): - """Compute ISCCP low level thick cloud area fraction.""" - tau = Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: t > 23.) - plev = Constraint(air_pressure=lambda p: p > 68000.) - - return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvaltool/preprocessor/_derive/clmmtisccp.py b/esmvaltool/preprocessor/_derive/clmmtisccp.py deleted file mode 100644 index fc70859c4d..0000000000 --- a/esmvaltool/preprocessor/_derive/clmmtisccp.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Derivation of variable `clmmtisccp`.""" - -from iris import Constraint - -from ._baseclass import DerivedVariableBase -from ._shared import cloud_area_fraction - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `clmmtisccp`.""" - - # Required variables - required = [{'short_name': 'clisccp'}] - - @staticmethod - def calculate(cubes): - """Compute ISCCP middle level medium-thickness cloud area fraction.""" - tau = Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: 3.6 < t <= 23.) - plev = Constraint(air_pressure=lambda p: 44000. < p <= 68000.) - - return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvaltool/preprocessor/_derive/clmtkisccp.py b/esmvaltool/preprocessor/_derive/clmtkisccp.py deleted file mode 100644 index eb6ef501a7..0000000000 --- a/esmvaltool/preprocessor/_derive/clmtkisccp.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Derivation of variable `clmtkisccp`.""" - -from iris import Constraint - -from ._baseclass import DerivedVariableBase -from ._shared import cloud_area_fraction - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `clmtkisccp`.""" - - # Required variables - required = [{'short_name': 'clisccp'}] - - @staticmethod - def calculate(cubes): - """Compute ISCCP middle level thick cloud area fraction.""" - tau = Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: t > 23.) - plev = Constraint(air_pressure=lambda p: 44000. < p <= 68000.) - - return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvaltool/preprocessor/_derive/gtfgco2.py b/esmvaltool/preprocessor/_derive/gtfgco2.py deleted file mode 100644 index f8295c8c6e..0000000000 --- a/esmvaltool/preprocessor/_derive/gtfgco2.py +++ /dev/null @@ -1,81 +0,0 @@ -"""Derivation of variable `gtfgco2`.""" -import iris -import numpy as np - -from ._baseclass import DerivedVariableBase - - -def calculate_total_flux(fgco2_cube, cube_area): - """ - Calculate the area of unmasked cube cells. - - Requires a cube with two spacial dimensions. (no depth coordinate). - - Parameters - ---------- - cube: iris.cube.Cube - Data Cube - cube_area: iris.cube.Cube - Cell area Cube - - Returns - ------- - numpy.array: - An numpy array containing the total flux of CO2. - - """ - data = [] - times = fgco2_cube.coord('time') - - fgco2_cube.data = np.ma.array(fgco2_cube.data) - for time_itr in np.arange(len(times.points)): - - total_flux = fgco2_cube[time_itr].data * cube_area.data - - total_flux = np.ma.masked_where(fgco2_cube[time_itr].data.mask, - total_flux) - data.append(total_flux.sum()) - - ###### - # Create a small dummy output array - data = np.array(data) - return data - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `gtfgco2`.""" - - # Required variables - required = [ - { - 'short_name': 'fgco2', - 'mip': 'Omon', - 'fx_files': [ - 'areacello', - ], - }, - ] - - @staticmethod - def calculate(cubes): - """Compute longwave cloud radiative effect.""" - fgco2_cube = cubes.extract_strict( - iris.Constraint(name='surface_downward_mass_flux_of_carbon_dioxide' - '_expressed_as_carbon')) - - try: - cube_area = cubes.extract_strict(iris.Constraint(name='cell_area')) - except iris.exceptions.ConstraintMismatchError: - pass - - total_flux = calculate_total_flux(fgco2_cube, cube_area) - - # Dummy result cube - result = fgco2_cube.collapsed( - ['latitude', 'longitude'], - iris.analysis.MEAN, - ) - result.units = fgco2_cube.units * cube_area.units - - result.data = total_flux - return result diff --git a/esmvaltool/preprocessor/_derive/lwcre.py b/esmvaltool/preprocessor/_derive/lwcre.py deleted file mode 100644 index 3526562fec..0000000000 --- a/esmvaltool/preprocessor/_derive/lwcre.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Derivation of variable `lwcre`.""" - -from iris import Constraint - -from ._baseclass import DerivedVariableBase - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `lwcre`.""" - - # Required variables - required = [ - { - 'short_name': 'rlut' - }, - { - 'short_name': 'rlutcs' - }, - ] - - @staticmethod - def calculate(cubes): - """Compute longwave cloud radiative effect.""" - rlut_cube = cubes.extract_strict( - Constraint(name='toa_outgoing_longwave_flux')) - rlutcs_cube = cubes.extract_strict( - Constraint(name='toa_outgoing_longwave_flux_assuming_clear_sky')) - - lwcre_cube = rlutcs_cube - rlut_cube - lwcre_cube.units = rlut_cube.units - - return lwcre_cube diff --git a/esmvaltool/preprocessor/_derive/lwp.py b/esmvaltool/preprocessor/_derive/lwp.py deleted file mode 100644 index 6b795c5665..0000000000 --- a/esmvaltool/preprocessor/_derive/lwp.py +++ /dev/null @@ -1,72 +0,0 @@ -"""Derivation of variable `lwp`.""" - -import logging - -from iris import Constraint - -from ._baseclass import DerivedVariableBase - -logger = logging.getLogger(__name__) - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `lwp`.""" - - # Required variables - required = [ - { - 'short_name': 'clwvi' - }, - { - 'short_name': 'clivi' - }, - ] - - @staticmethod - def calculate(cubes): - """Compute liquid water path. - - Note - ---- - Some datasets output the variable `clwvi` which only contains `lwp`. In - these cases, the input `clwvi` cube is just returned. - - """ - clwvi_cube = cubes.extract_strict( - Constraint(name='atmosphere_cloud_condensed_water_content')) - clivi_cube = cubes.extract_strict( - Constraint(name='atmosphere_cloud_ice_content')) - - dataset = clwvi_cube.attributes.get('model_id') - project = clwvi_cube.attributes.get('project_id') - # Should we check that the model_id/project_id are the same on both - # cubes? - - bad_datasets = [ - 'CESM1-CAM5-1-FV2', - 'CESM1-CAM5', - 'CMCC-CESM', - 'CMCC-CM', - 'CMCC-CMS', - 'IPSL-CM5A-MR', - 'IPSL-CM5A-LR', - 'IPSL-CM5B-LR', - 'CCSM4', - 'IPSL-CM5A-MR', - 'MIROC-ESM', - 'MIROC-ESM-CHEM', - 'MIROC-ESM', - 'CSIRO-Mk3-6-0', - 'MPI-ESM-MR', - 'MPI-ESM-LR', - 'MPI-ESM-P', - ] - if (project in ["CMIP5", "CMIP5_ETHZ"] and dataset in bad_datasets): - logger.info( - "Assuming that variable clwvi from %s dataset %s " - "contains only liquid water", project, dataset) - lwp_cube = clwvi_cube - else: - lwp_cube = clwvi_cube - clivi_cube - - return lwp_cube diff --git a/esmvaltool/preprocessor/_derive/nbp_grid.py b/esmvaltool/preprocessor/_derive/nbp_grid.py deleted file mode 100644 index 5a07fcaed7..0000000000 --- a/esmvaltool/preprocessor/_derive/nbp_grid.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Derivation of variable `nbp_grid`.""" - -import iris -from iris import Constraint - -from ._baseclass import DerivedVariableBase - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `nbp_grid`.""" - - # Required variables - required = [{ - 'short_name': 'nbp', - 'fx_files': ['sftlf'], - }] - - @staticmethod - def calculate(cubes): - """Compute net biome production relative to grid cell area. - - Note - ---- - By default, `nbp` is defined relative to land area. For easy spatial - integration, the original quantity is multiplied by the land area - fraction (`sftlf`), so that the resuting derived variable is defined - relative to the grid cell area. This correction is only relevant for - coastal regions. - """ - nbp_cube = cubes.extract_strict( - Constraint(name='surface_net_downward_mass_flux_of_carbon_dioxide_' - 'expressed_as_carbon_due_to_all_land_processes')) - try: - sftlf_cube = cubes.extract_strict( - Constraint(name='land_area_fraction')) - nbp_cube.data *= sftlf_cube.data / 100.0 - except iris.exceptions.ConstraintMismatchError: - pass - return nbp_cube diff --git a/esmvaltool/preprocessor/_derive/netcre.py b/esmvaltool/preprocessor/_derive/netcre.py deleted file mode 100644 index 7ee9a496a8..0000000000 --- a/esmvaltool/preprocessor/_derive/netcre.py +++ /dev/null @@ -1,44 +0,0 @@ -"""Derivation of variable `netcre`.""" - -from ._baseclass import DerivedVariableBase -from .lwcre import DerivedVariable as Lwcre -from .swcre import DerivedVariable as Swcre - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `netcre`.""" - - # Required variables - required = [ - { - 'short_name': 'rlut' - }, - { - 'short_name': 'rlutcs' - }, - { - 'short_name': 'rsut' - }, - { - 'short_name': 'rsutcs' - }, - ] - - @staticmethod - def calculate(cubes): - """Compute net cloud radiative effect. - - Note - ---- - Calculate net cloud radiative effect as sum of longwave and shortwave - cloud radiative effects. - """ - lwcre_var = Lwcre() - swcre_var = Swcre() - lwcre_cube = lwcre_var.calculate(cubes) - swcre_cube = swcre_var.calculate(cubes) - - netcre_cube = lwcre_cube + swcre_cube - netcre_cube.units = lwcre_cube.units - - return netcre_cube diff --git a/esmvaltool/preprocessor/_derive/rlns.py b/esmvaltool/preprocessor/_derive/rlns.py deleted file mode 100644 index 2a80e6386a..0000000000 --- a/esmvaltool/preprocessor/_derive/rlns.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Derivation of variable `rlns`.""" - -from iris import Constraint - -from ._baseclass import DerivedVariableBase - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `rlns`.""" - - # Required variables - required = [ - { - 'short_name': 'rlds' - }, - { - 'short_name': 'rlus' - }, - ] - - @staticmethod - def calculate(cubes): - """Compute surface net downward longwave radiation.""" - rlds_cube = cubes.extract_strict( - Constraint(name='surface_downwelling_longwave_flux_in_air')) - rlus_cube = cubes.extract_strict( - Constraint(name='surface_upwelling_longwave_flux_in_air')) - - rlns_cube = rlds_cube - rlus_cube - - return rlns_cube diff --git a/esmvaltool/preprocessor/_derive/rsns.py b/esmvaltool/preprocessor/_derive/rsns.py deleted file mode 100644 index 31a6bad867..0000000000 --- a/esmvaltool/preprocessor/_derive/rsns.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Derivation of variable `rsns`.""" - -from iris import Constraint - -from ._baseclass import DerivedVariableBase - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `rsns`.""" - - # Required variables - required = [ - { - 'short_name': 'rsds' - }, - { - 'short_name': 'rsus' - }, - ] - - @staticmethod - def calculate(cubes): - """Compute surface net downward shortwave radiation.""" - rsds_cube = cubes.extract_strict( - Constraint(name='surface_downwelling_shortwave_flux_in_air')) - rsus_cube = cubes.extract_strict( - Constraint(name='surface_upwelling_shortwave_flux_in_air')) - - rsns_cube = rsds_cube - rsus_cube - - return rsns_cube diff --git a/esmvaltool/preprocessor/_derive/rsnt.py b/esmvaltool/preprocessor/_derive/rsnt.py deleted file mode 100644 index 1a71035e3b..0000000000 --- a/esmvaltool/preprocessor/_derive/rsnt.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Derivation of variable `rsnt`.""" - -from iris import Constraint - -from ._baseclass import DerivedVariableBase - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `rsnt`.""" - - # Required variables - required = [ - { - 'short_name': 'rsdt' - }, - { - 'short_name': 'rsut' - }, - ] - - @staticmethod - def calculate(cubes): - """Compute toa net downward shortwave radiation.""" - rsdt_cube = cubes.extract_strict( - Constraint(name='toa_incoming_shortwave_flux')) - rsut_cube = cubes.extract_strict( - Constraint(name='toa_outgoing_shortwave_flux')) - - rsnt_cube = rsdt_cube - rsut_cube - - return rsnt_cube diff --git a/esmvaltool/preprocessor/_derive/rtnt.py b/esmvaltool/preprocessor/_derive/rtnt.py deleted file mode 100644 index 590b296e58..0000000000 --- a/esmvaltool/preprocessor/_derive/rtnt.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Derivation of variable `rtnt`.""" - -from iris import Constraint - -from ._baseclass import DerivedVariableBase - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `rtnt`.""" - - # Required variables - required = [ - { - 'short_name': 'rsdt' - }, - { - 'short_name': 'rsut' - }, - { - 'short_name': 'rlut' - }, - ] - - @staticmethod - def calculate(cubes): - """Compute toa net downward total radiation.""" - rsdt_cube = cubes.extract_strict( - Constraint(name='toa_incoming_shortwave_flux')) - rsut_cube = cubes.extract_strict( - Constraint(name='toa_outgoing_shortwave_flux')) - rlut_cube = cubes.extract_strict( - Constraint(name='toa_outgoing_longwave_flux')) - - rtnt_cube = rsdt_cube - rsut_cube - rlut_cube - - return rtnt_cube diff --git a/esmvaltool/preprocessor/_derive/sm.py b/esmvaltool/preprocessor/_derive/sm.py deleted file mode 100644 index be69f13803..0000000000 --- a/esmvaltool/preprocessor/_derive/sm.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Derivation of variable `sm`.""" - -import cf_units -import numpy as np -from iris import Constraint - -from ._baseclass import DerivedVariableBase - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `sm`.""" - - # Required variables - required = [{'short_name': 'mrsos'}] - - @staticmethod - def calculate(cubes): - """Compute soil moisture. - - Note - ---- - Convert moisture content of soil layer (kg/m2) into volumetric soil - moisture (m3/m3), assuming density of water 998.2 kg/m2 (at temperature - 20 deg C). - - """ - mrsos_cube = cubes.extract_strict( - Constraint(name='moisture_content_of_soil_layer')) - - depth = mrsos_cube.coord('depth').bounds - layer_thickness = depth[..., 1] - depth[..., 0] - - sm_cube = mrsos_cube / layer_thickness / 998.2 - sm_cube.units = cf_units.Unit('m3 m^-3') - sm_cube.data = np.ma.array(sm_cube.data, dtype=np.dtype('float32')) - - return sm_cube diff --git a/esmvaltool/preprocessor/_derive/swcre.py b/esmvaltool/preprocessor/_derive/swcre.py deleted file mode 100644 index ae89777547..0000000000 --- a/esmvaltool/preprocessor/_derive/swcre.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Derivation of variable `swcre`.""" - -from iris import Constraint - -from ._baseclass import DerivedVariableBase - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `swcre`.""" - - # Required variables - required = [ - { - 'short_name': 'rsut' - }, - { - 'short_name': 'rsutcs' - }, - ] - - @staticmethod - def calculate(cubes): - """Compute shortwave cloud radiative effect.""" - rsut_cube = cubes.extract_strict( - Constraint(name='toa_outgoing_shortwave_flux')) - rsutcs_cube = cubes.extract_strict( - Constraint(name='toa_outgoing_shortwave_flux_assuming_clear_sky')) - - swcre_cube = rsutcs_cube - rsut_cube - - return swcre_cube diff --git a/esmvaltool/preprocessor/_derive/toz.py b/esmvaltool/preprocessor/_derive/toz.py deleted file mode 100644 index b3d101389a..0000000000 --- a/esmvaltool/preprocessor/_derive/toz.py +++ /dev/null @@ -1,192 +0,0 @@ -"""Derivation of variable `toz`.""" - -import cf_units -import iris -import numba -import numpy as np -from scipy import constants - -from ._baseclass import DerivedVariableBase - -# Constants -AVOGADRO_CONST = constants.value('Avogadro constant') -AVOGADRO_CONST_UNIT = constants.unit('Avogadro constant') -STANDARD_GRAVITY = 9.81 -STANDARD_GRAVITY_UNIT = cf_units.Unit('m s^-2') -MW_AIR = 29 -MW_AIR_UNIT = cf_units.Unit('g mol^-1') -MW_O3 = 48 -MW_O3_UNIT = cf_units.Unit('g mol^-1') -DOBSON_UNIT = cf_units.Unit('2.69e20 m^-2') - - -class DerivedVariable(DerivedVariableBase): - """Derivation of variable `toz`.""" - - # Required variables - required = [ - { - 'short_name': 'tro3' - }, - { - 'short_name': 'ps' - }, - ] - - @staticmethod - def calculate(cubes): - """Compute total column ozone. - - Note - ---- - The surface pressure is used as a lower integration bound. A fixed - upper integration bound of 0 Pa is used. - - """ - tro3_cube = cubes.extract_strict( - iris.Constraint(name='mole_fraction_of_ozone_in_air')) - ps_cube = cubes.extract_strict( - iris.Constraint(name='surface_air_pressure')) - - p_layer_widths = _pressure_level_widths( - tro3_cube, ps_cube, top_limit=0.0) - toz_cube = ( - tro3_cube * p_layer_widths / STANDARD_GRAVITY * MW_O3 / MW_AIR) - toz_cube = toz_cube.collapsed('air_pressure', iris.analysis.SUM) - toz_cube.units = (tro3_cube.units * p_layer_widths.units / - STANDARD_GRAVITY_UNIT * MW_O3_UNIT / MW_AIR_UNIT) - - # Convert from kg m^-2 to Dobson unit (2.69e20 m^-2 ) - toz_cube = toz_cube / MW_O3 * AVOGADRO_CONST - toz_cube.units = toz_cube.units / MW_O3_UNIT * AVOGADRO_CONST_UNIT - toz_cube.convert_units(DOBSON_UNIT) - toz_cube.data = np.ma.array(toz_cube.data, dtype=np.dtype('float32')) - - return toz_cube - - -# Helper functions -def _pressure_level_widths(tro3_cube, ps_cube, top_limit=0.0): - """Create a cube with pressure level widths. - - This is done by taking a 2D surface pressure field as lower bound. - - Parameters - ---------- - tro3_cube : iris.cube.Cube - `Cube` containing `mole_fraction_of_ozone_in_air`. - ps_cube : iris.cube.Cube - `Cube` containing `surface_air_pressure`. - top_limit : double - Pressure in Pa. - - Returns - ------- - iris.cube.Cube - `Cube` of same shape as `tro3_cube` containing pressure level widths. - - """ - pressure_array = _create_pressure_array(tro3_cube, ps_cube, top_limit) - - data = _apply_pressure_level_widths(pressure_array) - p_level_widths_cube = tro3_cube.copy(data=data) - p_level_widths_cube.rename('pressure level widths') - p_level_widths_cube.units = ps_cube.units - - return p_level_widths_cube - - -def _create_pressure_array(tro3_cube, ps_cube, top_limit): - """Create an array filled with the `air_pressure` coord values. - - The array is created from the `tro3_cube` with the same dimensions - as `tro3_cube`. This array is then sandwiched with a 2D array - containing the surface pressure and a 2D array containing the top - pressure limit. - """ - # Create 4D array filled with pressure level values - p_levels = tro3_cube.coord('air_pressure').points - p_4d_array = iris.util.broadcast_to_shape(p_levels, tro3_cube.shape, [1]) - - # Create 4d array filled with surface pressure values - shape = tro3_cube.shape - ps_4d_array = iris.util.broadcast_to_shape(ps_cube.data, shape, [0, 2, 3]) - - # Set pressure levels below the surface pressure to NaN - pressure_4d = np.where((ps_4d_array - p_4d_array) < 0, np.NaN, p_4d_array) - - # Make top_limit last pressure level - top_limit_array = np.ones(ps_cube.shape) * top_limit - data = top_limit_array[:, np.newaxis, :, :] - pressure_4d = np.concatenate((pressure_4d, data), axis=1) - - # Make surface pressure the first pressure level - data = ps_cube.data[:, np.newaxis, :, :] - pressure_4d = np.concatenate((data, pressure_4d), axis=1) - - return pressure_4d - - -def _apply_pressure_level_widths(array, air_pressure_axis=1): - """Compute pressure level widths. - - For a 1D array with pressure level columns, return a 1D array with - pressure level widths. - """ - return np.apply_along_axis(_p_level_widths, air_pressure_axis, array) - - -@numba.jit() # ~10x faster -def _p_level_widths(array): - """Create pressure level widths. - - The array with pressure levels is assumed to be monotonic and the - values are decreasing. - - The first element is the lower boundary (surface pressure), the last - value is the upper boundary. Thicknesses are only calculated for the - values between these boundaries, the returned array, therefore, - contains two elements less. - - >>> _p_level_widths(np.array([1020, 1000, 700, 500, 5])) - array([170., 250., 595.]) - - >>> _p_level_widths(np.array([990, np.NaN, 700, 500, 5])) - array([ 0., 390., 595.]) - """ - surface_pressure = array[0] - top_limit = array[-1] - array = array[1:-1] - - p_level_widths = np.ones(array.shape) * np.NAN - - last_pressure_level = len(array) - 1 - for i, val in enumerate(array): - # numba would otherwise initialize it to 0 and - # hide bugs that would occur in raw Python - bounds_width = np.NAN - if np.isnan(val): - bounds_width = 0 - else: - # Distance to lower bound - if i == 0 or np.isnan(array[i - 1]): - # First pressure level with value - dist_to_lower_bound = surface_pressure - val - else: - dist_to_lower_bound = 0.5 * (array[i - 1] - val) - - # Distance to upper bound - if i == last_pressure_level: # last pressure level - dist_to_upper_bound = val - top_limit - else: - dist_to_upper_bound = 0.5 * (val - array[i + 1]) - - # Check monotonicity - all distances must be >= 0 - if dist_to_lower_bound < 0.0 or dist_to_upper_bound < 0.0: - raise ValueError("Pressure level value increased with " - "height.") - - bounds_width = dist_to_lower_bound + dist_to_upper_bound - - p_level_widths[i] = bounds_width - return p_level_widths diff --git a/esmvaltool/preprocessor/_download.py b/esmvaltool/preprocessor/_download.py deleted file mode 100644 index f89bc2ddcc..0000000000 --- a/esmvaltool/preprocessor/_download.py +++ /dev/null @@ -1,80 +0,0 @@ -"""Functions for downloading climate data files""" -import logging -import os -import subprocess - -from .._data_finder import get_start_end_year, select_files - -logger = logging.getLogger(__name__) - - -def synda_search(variable): - """Search files using synda.""" - query = { - 'model': variable.get('dataset'), - 'project': variable.get('project'), - 'cmor_table': variable.get('mip'), - 'ensemble': variable.get('ensemble'), - 'experiment': variable.get('exp'), - 'variable': variable.get('short_name'), - } - - query = {facet: value for facet, value in query.items() if value} - - query = ("{}='{}'".format(facet, value) for facet, value in query.items()) - - cmd = ['synda', 'search', '--file'] - cmd.extend(query) - cmd = ' '.join(cmd) - logger.debug("Running: %s", cmd) - result = subprocess.check_output(cmd, shell=True, universal_newlines=True) - logger.debug('Result:\n%s', result.strip()) - - files = (l.split()[-1] for l in result.split('\n') if l.startswith('new')) - files = select_files(files, variable['start_year'], variable['end_year']) - - # filter partially overlapping files - intervals = {get_start_end_year(name): name for name in files} - files = [] - for (start, end), filename in intervals.items(): - for _start, _end in intervals: - if start == _start and end == _end: - continue - if start >= _start and end <= _end: - break - else: - files.append(filename) - - logger.debug("Selected files:\n%s", '\n'.join(files)) - - return files - - -def synda_download(synda_name, dest_folder): - """Download file using synda.""" - filename = '.'.join(synda_name.split('.')[-2:]) - local_file = os.path.join(dest_folder, filename) - - if not os.path.exists(local_file): - cmd = [ - 'synda', 'get', '--dest_folder={}'.format(dest_folder), - '--verify_checksum', synda_name - ] - cmd = ' '.join(cmd) - logger.debug("Running: %s", cmd) - subprocess.check_call(cmd, shell=True) - - return local_file - - -def download(files, dest_folder): - """Download files that are not available locally""" - if not os.path.exists(dest_folder): - os.makedirs(dest_folder) - - local_files = [] - for name in files: - local_file = synda_download(synda_name=name, dest_folder=dest_folder) - local_files.append(local_file) - - return local_files diff --git a/esmvaltool/preprocessor/_io.py b/esmvaltool/preprocessor/_io.py deleted file mode 100644 index 71e7144058..0000000000 --- a/esmvaltool/preprocessor/_io.py +++ /dev/null @@ -1,267 +0,0 @@ -"""Functions for loading and saving cubes.""" -import copy -import logging -import os -import shutil -from collections import OrderedDict -from itertools import groupby - -import numpy as np -import iris -import iris.exceptions -import yaml - -from .._config import use_legacy_iris -from .._task import write_ncl_settings - -logger = logging.getLogger(__name__) - -GLOBAL_FILL_VALUE = 1e+20 - -DATASET_KEYS = { - 'mip', -} -VARIABLE_KEYS = { - 'reference_dataset', - 'alternative_dataset', -} - - -def _get_attr_from_field_coord(ncfield, coord_name, attr): - if coord_name is not None: - attrs = ncfield.cf_group[coord_name].cf_attrs() - attr_val = [value for (key, value) in attrs if key == attr] - if attr_val: - return attr_val[0] - return None - - -def concatenate_callback(raw_cube, field, _): - """Use this callback to fix anything Iris tries to break.""" - # Remove attributes that cause issues with merging and concatenation - for attr in ['creation_date', 'tracking_id', 'history']: - if attr in raw_cube.attributes: - del raw_cube.attributes[attr] - for coord in raw_cube.coords(): - # Iris chooses to change longitude and latitude units to degrees - # regardless of value in file, so reinstating file value - if coord.standard_name in ['longitude', 'latitude']: - units = _get_attr_from_field_coord(field, coord.var_name, 'units') - if units is not None: - coord.units = units - - -def load(file, callback=None): - """Load iris cubes from files.""" - logger.debug("Loading:\n%s", file) - raw_cubes = iris.load_raw(file, callback=callback) - if not raw_cubes: - raise Exception('Can not load cubes from {0}'.format(file)) - for cube in raw_cubes: - cube.attributes['source_file'] = file - return raw_cubes - - -def _fix_cube_attributes(cubes): - """Unify attributes of different cubes to allow concatenation.""" - attributes = {} - for cube in cubes: - for (attr, val) in cube.attributes.items(): - if attr not in attributes: - attributes[attr] = val - else: - if not np.array_equal(val, attributes[attr]): - attributes[attr] = '{};{}'.format( - str(attributes[attr]), str(val)) - for cube in cubes: - cube.attributes = attributes - - -def concatenate(cubes): - """Concatenate all cubes after fixing metadata.""" - _fix_cube_attributes(cubes) - try: - cube = iris.cube.CubeList(cubes).concatenate_cube() - return cube - except iris.exceptions.ConcatenateError as ex: - logger.error('Can not concatenate cubes: %s', ex) - logger.error('Cubes:') - for cube in cubes: - logger.error(cube) - raise ex - - -def save(cubes, filename, optimize_access='', compress=False, **kwargs): - """ - Save iris cubes to file. - - Parameters - ---------- - cubes: iterable of iris.cube.Cube - Data cubes to be saved - - filename: str - Name of target file - - optimize_access: str - Set internal NetCDF chunking to favour a reading scheme - - Values can be map or timeseries, which improve performance when - reading the file one map or time series at a time. - Users can also provide a coordinate or a list of coordinates. In that - case the better performance will be avhieved by loading all the values - in that coordinate at a time - - compress: bool, optional - Use NetCDF internal compression. - - Returns - ------- - str - filename - - """ - # Rename some arguments - kwargs['target'] = filename - kwargs['zlib'] = compress - - dirname = os.path.dirname(filename) - if not os.path.exists(dirname): - os.makedirs(dirname) - - if (os.path.exists(filename) - and all(cube.has_lazy_data() for cube in cubes)): - logger.debug( - "Not saving cubes %s to %s to avoid data loss. " - "The cube is probably unchanged.", cubes, filename) - return filename - - logger.debug("Saving cubes %s to %s", cubes, filename) - if optimize_access: - cube = cubes[0] - if optimize_access == 'map': - dims = set( - cube.coord_dims('latitude') + cube.coord_dims('longitude')) - elif optimize_access == 'timeseries': - dims = set(cube.coord_dims('time')) - else: - dims = tuple() - for coord_dims in (cube.coord_dims(dimension) - for dimension in optimize_access.split(' ')): - dims += coord_dims - dims = set(dims) - - kwargs['chunksizes'] = tuple( - length if index in dims else 1 - for index, length in enumerate(cube.shape)) - - if not use_legacy_iris(): - kwargs['fill_value'] = GLOBAL_FILL_VALUE - - iris.save(cubes, **kwargs) - - return filename - - -def _get_debug_filename(filename, step): - """Get a filename for debugging the preprocessor.""" - dirname = os.path.splitext(filename)[0] - if os.path.exists(dirname) and os.listdir(dirname): - num = int(sorted(os.listdir(dirname)).pop()[:2]) + 1 - else: - num = 0 - filename = os.path.join(dirname, '{:02}_{}.nc'.format(num, step)) - return filename - - -def cleanup(files, remove=None): - """Clean up after running the preprocessor.""" - if remove is None: - remove = [] - - for path in remove: - if os.path.isdir(path): - shutil.rmtree(path) - elif os.path.isfile(path): - os.remove(path) - - return files - - -def _ordered_safe_dump(data, stream): - """Write data containing OrderedDicts to yaml file.""" - - class _OrderedDumper(yaml.SafeDumper): - pass - - def _dict_representer(dumper, data): - return dumper.represent_mapping( - yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, data.items()) - - _OrderedDumper.add_representer(OrderedDict, _dict_representer) - return yaml.dump(data, stream, _OrderedDumper) - - -def write_metadata(products, write_ncl=False): - """Write product metadata to file.""" - output_files = [] - for output_dir, prods in groupby(products, - lambda p: os.path.dirname(p.filename)): - sorted_products = sorted( - prods, - key=lambda p: ( - p.attributes.get('recipe_dataset_index', 1e6), - p.attributes.get('dataset', ''), - ), - ) - metadata = OrderedDict() - for product in sorted_products: - if isinstance(product.attributes.get('exp'), (list, tuple)): - product.attributes = dict(product.attributes) - product.attributes['exp'] = '-'.join(product.attributes['exp']) - metadata[product.filename] = product.attributes - - output_filename = os.path.join(output_dir, 'metadata.yml') - output_files.append(output_filename) - with open(output_filename, 'w') as file: - _ordered_safe_dump(metadata, file) - if write_ncl: - output_files.append(_write_ncl_metadata(output_dir, metadata)) - - return output_files - - -def _write_ncl_metadata(output_dir, metadata): - """Write NCL metadata files to output_dir.""" - variables = [copy.deepcopy(v) for v in metadata.values()] - - for variable in variables: - fx_files = variable.pop('fx_files', {}) - for fx_type in fx_files: - variable[fx_type] = fx_files[fx_type] - - info = {'input_file_info': variables} - - # Split input_file_info into dataset and variable properties - # dataset keys and keys with non-identical values will be stored - # in dataset_info, the rest in variable_info - variable_info = {} - info['variable_info'] = [variable_info] - info['dataset_info'] = [] - for variable in variables: - dataset_info = {} - info['dataset_info'].append(dataset_info) - for key in variable: - dataset_specific = any( - variable[key] != var.get(key, object()) for var in variables) - if ((dataset_specific or key in DATASET_KEYS) - and key not in VARIABLE_KEYS): - dataset_info[key] = variable[key] - else: - variable_info[key] = variable[key] - - filename = os.path.join(output_dir, - variable_info['short_name'] + '_info.ncl') - write_ncl_settings(info, filename) - - return filename diff --git a/esmvaltool/preprocessor/_mapping.py b/esmvaltool/preprocessor/_mapping.py deleted file mode 100644 index 17a47e7481..0000000000 --- a/esmvaltool/preprocessor/_mapping.py +++ /dev/null @@ -1,238 +0,0 @@ -# -*- coding: utf-8 -*- -"""Provides mapping of a cube.""" - -import collections - -import iris -import numpy as np -import six - - -def _is_single_item(testee): - """ - Check if testee is a single item. - - Return whether this is a single item, rather than an iterable. - We count string types as 'single', also. - """ - return (isinstance(testee, six.string_types) - or not isinstance(testee, collections.Iterable)) - - -def _as_list_of_coords(cube, names_or_coords): - """Convert a name, coord, or list of names/coords to a list of coords.""" - # If not iterable, convert to list of a single item - if _is_single_item(names_or_coords): - names_or_coords = [names_or_coords] - coords = [] - for name_or_coord in names_or_coords: - if isinstance(name_or_coord, (iris.coords.Coord, six.string_types)): - coords.append(cube.coord(name_or_coord)) - else: - # Don't know how to handle this type - msg = ("Don't know how to handle coordinate of type %s. " - "Ensure all coordinates are of type six.string_types " - "or iris.coords.Coord.") % (type(name_or_coord), ) - raise TypeError(msg) - return coords - - -def ref_to_dims_index_as_coordinate(cube, ref): - """Get dims for coord ref.""" - coord = _as_list_of_coords(cube, ref)[0] - dims = cube.coord_dims(coord) - if not dims: - msg = ('Requested an iterator over a coordinate ({}) ' - 'which does not describe a dimension.') - msg = msg.format(coord.name()) - raise ValueError(msg) - return dims - - -def ref_to_dims_index_as_index(cube, ref): - """Get dim for index ref.""" - try: - dim = int(ref) - except (ValueError, TypeError): - raise ValueError('{} Incompatible type {} for ' - 'slicing'.format(ref, type(ref))) - if dim < 0 or dim > cube.ndim: - msg = ('Requested an iterator over a dimension ({}) ' - 'which does not exist.'.format(dim)) - raise ValueError(msg) - dims = [dim] - return dims - - -def ref_to_dims_index(cube, ref_to_slice): - """ - Map a list of :class:`iris.coords.DimCoord` to a tuple of indices. - - This method finds the indices of the dimensions in a cube that collectively - correspond to the given list of :class:`iris.coords.DimCoord`. - - Parameters - ---------- - cube: :class:`iris.cube.Cube` - The cube to examine. - ref_to_slice: iterable of or single :class:`iris.coords.DimCoord` - Specification of the dimensions in terms of coordinates. - - Returns - ------- - tuple: - A tuple of indices corresponding to the given dimensions. - """ - # Required to handle a mix between types - if _is_single_item(ref_to_slice): - ref_to_slice = [ref_to_slice] - dim_to_slice = [] - dim_to_slice_set = set() - for ref in ref_to_slice: - try: - dims = ref_to_dims_index_as_coordinate(cube, ref) - except TypeError: - dims = ref_to_dims_index_as_index(cube, ref) - for dim in dims: - if dim not in dim_to_slice_set: - dim_to_slice.append(dim) - dim_to_slice_set.add(dim) - return dim_to_slice - - -def get_associated_coords(cube, dimensions): - """ - Return all coords containing any of the given dimensions. - - Return all coords, dimensional and auxiliary, that contain any of - the given dimensions. - """ - dims = [] - dim_set = set() - for dim in dimensions: - if dim not in dim_set: - dims.append(dim) - dim_set.add(dim) - dim_coords = [] - for i in dims: - coords = cube.coords(contains_dimension=i, dim_coords=True) - if coords: - dim_coords.append(coords[0]) - aux_coords = [] - for i in dims: - coords = cube.coords(contains_dimension=i, dim_coords=False) - if coords: - aux_coords.append(coords[0]) - return dim_coords, aux_coords - - -def get_empty_data(shape, dtype=np.float32): - """ - Create an empty data object of the given shape. - - Creates an emtpy data object of the given shape, potentially of the lazy - kind from biggus or dask, depending on the used iris version. - """ - data = np.empty(shape, dtype=dtype) - mask = np.empty(shape, dtype=bool) - return np.ma.masked_array(data, mask) - - -def get_slice_spec(cube, ref_to_slice): - """ - Turn a slice reference into a specification for the slice. - - Turns a slice reference into a specification comprised of the shape as well - as the relevant dimensional and auxiliary coordinates. - """ - slice_dims = ref_to_dims_index(cube, ref_to_slice) - slice_shape = tuple(cube.shape[d] for d in slice_dims) - dim_coords, aux_coords = get_associated_coords(cube, slice_dims) - return slice_shape, dim_coords, aux_coords - - -def index_iterator(dims_to_slice, shape): - """ - Return iterator for subsets of multidimensional objects. - - An iterator over a multidimensional object, giving both source and - destination indices. - """ - dst_slices = (slice(None, None),) * len(dims_to_slice) - dims = [1 if n in dims_to_slice else i for n, i in enumerate(shape)] - for index_tuple in np.ndindex(*dims): - src_ind = tuple( - slice(None, None) if n in dims_to_slice else i - for n, i in enumerate(index_tuple)) - dst_ind = tuple(i for n, i in enumerate(index_tuple) - if n not in dims_to_slice) + dst_slices - yield src_ind, dst_ind - - -def get_slice_coords(cube): - """Return ordered set of unique coordinates.""" - slice_coords = [] - slice_set = set() - for i in range(cube.ndim): - coords = cube.coords(contains_dimension=i) - for coord in coords: - if coord not in slice_set: - slice_coords.append(coord) - slice_set.add(coord) - return slice_coords - - -def map_slices(src, func, src_rep, dst_rep): - """ - Map slices of a cube, replacing them with different slices. - - This method is similar to the standard cube collapsed and aggregated_by - methods, however, where they completely remove the mapped dimensions, this - method allows for their replacement with other dimensions. - The new dimensions are specified with a destination representant and will - be the last dimensions of the resulting cube, even if the removed - dimensions are can be any of the source cubes dimensions. - - Parameters - ---------- - src: :class:`iris.cube.Cube` - Source cube to be mapped. - func: callable - Callable that takes a single cube and returns a single numpy array. - src_rep: :class:`iris.cube.Cube` - Source representant that specifies the dimensions to be removed from - the source cube. - dst_rep: :class:`iris.cube.Cube` - Destination representant that specifies the shape of the new - dimensions. - - Returns - ------- - :class:`iris.cube.Cube`: - New cube that has the shape of the source cube with the removed - dimensions replaced with the destination dimensions. - All coordinates that span any of the removed dimensions are removed; - :class:`iris.coords.DimCoord` for the new dimensions are taken from - `dst_rep`. - """ - ref_to_slice = get_slice_coords(src_rep) - src_slice_dims = ref_to_dims_index(src, ref_to_slice) - src_keep_dims = list(set(range(src.ndim)) - set(src_slice_dims)) - src_keep_spec = get_slice_spec(src, src_keep_dims) - res_shape = src_keep_spec[0] + dst_rep.shape - dim_coords = src_keep_spec[1] + dst_rep.coords(dim_coords=True) - dim_coords_and_dims = [(c, i) for i, c in enumerate(dim_coords)] - dst = iris.cube.Cube( - data=get_empty_data(res_shape, dtype=src.dtype), - standard_name=src.standard_name, - long_name=src.long_name, - var_name=src.var_name, - units=src.units, - attributes=src.attributes, - cell_methods=src.cell_methods, - dim_coords_and_dims=dim_coords_and_dims, - ) - for src_ind, dst_ind in index_iterator(src_slice_dims, src.shape): - res = func(src[src_ind]) - dst.data[dst_ind] = res - return dst diff --git a/esmvaltool/preprocessor/_mask.py b/esmvaltool/preprocessor/_mask.py deleted file mode 100644 index 72adcef6fc..0000000000 --- a/esmvaltool/preprocessor/_mask.py +++ /dev/null @@ -1,480 +0,0 @@ -""" -_mask.py - -module that performs missing values masking -and geographical area eslection -""" - -import logging -import os - -import cartopy.io.shapereader as shpreader -import iris -import numpy as np -import shapely.vectorized as shp_vect -from iris.analysis import Aggregator -from iris.util import rolling_window - -logger = logging.getLogger(__name__) - - -def _check_dims(cube, mask_cube): - """Check for same dims for mask and data""" - x_dim = cube.coord('longitude').points.ndim - y_dim = cube.coord('latitude').points.ndim - mx_dim = mask_cube.coord('longitude').points.ndim - my_dim = mask_cube.coord('latitude').points.ndim - len_x = len(cube.coord('longitude').points) - len_y = len(cube.coord('latitude').points) - len_mx = len(mask_cube.coord('longitude').points) - len_my = len(mask_cube.coord('latitude').points) - if (x_dim == mx_dim and y_dim == my_dim and len_x == len_mx - and len_y == len_my): - logger.debug('Data cube and fx mask have same dims') - return True - - logger.debug( - 'Data cube and fx mask differ in dims: ' - 'cube: ((%i, %i), grid=(%i, %i)), mask: ((%i, %i), grid=(%i, %i))', - x_dim, y_dim, len_x, len_y, mx_dim, my_dim, len_mx, len_my) - return False - - -def _get_fx_mask(fx_data, fx_option, mask_type): - """Build a 50 percent land or sea mask""" - inmask = np.zeros_like(fx_data, bool) - if mask_type == 'sftlf': - if fx_option == 'land': - # Mask land out - inmask[fx_data > 50.] = True - elif fx_option == 'sea': - # Mask sea out - inmask[fx_data <= 50.] = True - elif mask_type == 'sftof': - if fx_option == 'land': - # Mask land out - inmask[fx_data < 50.] = True - elif fx_option == 'sea': - # Mask sea out - inmask[fx_data >= 50.] = True - elif mask_type == 'sftgif': - if fx_option == 'ice': - # Mask ice out - inmask[fx_data > 50.] = True - elif fx_option == 'landsea': - # Mask landsea out - inmask[fx_data <= 50.] = True - - return inmask - - -def _apply_fx_mask(fx_mask, var_data): - """Apply the fx mask""" - # Broadcast mask - var_mask = np.zeros_like(var_data, bool) - var_mask = np.broadcast_to(fx_mask, var_mask.shape).copy() - - # Aplly mask accross - if np.ma.is_masked(var_data): - var_mask |= var_data.mask - - # Build the new masked data - var_data = np.ma.array(var_data, mask=var_mask, fill_value=1e+20) - - return var_data - - -def mask_landsea(cube, fx_files, mask_out): - """ - Mask out either land or sea - - Function that masks out either land mass or seas (oceans, seas and lakes) - - It uses dedicated fx files (sftlf or sftof) or, in their absence, it - applies a Natural Earth mask (land or ocean contours). Not that the - Natural Earth masks have different resolutions: 10m for land, and 50m - for seas; these are more than enough for ESMValTool puprpose. - - Parameters - ---------- - - * cube (iris.Cube.cube instance): - data cube to be masked. - - * fx_files (list): - list holding the full paths to fx files. - - * mask_out (string): - either "land" to mask out land mass or "sea" to mask out seas. - - Returns - ------- - masked iris cube - - """ - # Dict to store the Natural Earth masks - cwd = os.path.dirname(__file__) - # ne_10m_land is fast; ne_10m_ocean is very slow - shapefiles = { - 'land': os.path.join(cwd, 'ne_masks/ne_10m_land.shp'), - 'sea': os.path.join(cwd, 'ne_masks/ne_50m_ocean.shp') - } - - if fx_files: - fx_cubes = {} - for fx_file in fx_files: - fx_root = os.path.basename(fx_file).split('_')[0] - fx_cubes[fx_root] = iris.load_cube(fx_file) - - # preserve importance order: try stflf first then sftof - if ('sftlf' in fx_cubes.keys() - and _check_dims(cube, fx_cubes['sftlf'])): - landsea_mask = _get_fx_mask(fx_cubes['sftlf'].data, mask_out, - 'sftlf') - cube.data = _apply_fx_mask(landsea_mask, cube.data) - logger.debug("Applying land-sea mask: sftlf") - elif ('sftof' in fx_cubes.keys() - and _check_dims(cube, fx_cubes['sftof'])): - landsea_mask = _get_fx_mask(fx_cubes['sftof'].data, mask_out, - 'sftof') - cube.data = _apply_fx_mask(landsea_mask, cube.data) - logger.debug("Applying land-sea mask: sftof") - else: - if cube.coord('longitude').points.ndim < 2: - cube = _mask_with_shp(cube, shapefiles[mask_out]) - logger.debug( - "Applying land-sea mask from Natural Earth" - " shapefile: \n%s", shapefiles[mask_out]) - else: - logger.error("Use of shapefiles with irregular grids not " - "yet implemented, land-sea mask not applied") - else: - if cube.coord('longitude').points.ndim < 2: - cube = _mask_with_shp(cube, shapefiles[mask_out]) - logger.debug( - "Applying land-sea mask from Natural Earth" - " shapefile: \n%s", shapefiles[mask_out]) - else: - logger.error("Use of shapefiles with irregular grids not " - "yet implemented, land-sea mask not applied") - - return cube - - -def mask_landseaice(cube, fx_files, mask_out): - """ - Mask out either landsea (combined) or ice - - Function that masks out either landsea (land and seas) or ice (Antarctica - and Greenland and some wee glaciers). It uses dedicated fx files (sftgif). - - Parameters - ---------- - - * cube (iris.Cube.cube instance): - data cube to be masked. - - * fx_files (list): - list holding the full paths to fx files. - - * mask_out (string): - either "landsea" to mask out landsea or "ice" to mask out ice. - - Returns - ------- - masked iris cube - - """ - # sftgif is the only one so far - if fx_files: - for fx_file in fx_files: - fx_cube = iris.load_cube(fx_file) - - if _check_dims(cube, fx_cube): - landice_mask = _get_fx_mask(fx_cube.data, mask_out, 'sftgif') - cube.data = _apply_fx_mask(landice_mask, cube.data) - logger.debug("Applying landsea-ice mask: sftgif") - else: - logger.warning("Landsea-ice mask could not be found ") - - return cube - - -def _get_geometry_from_shp(shapefilename): - """Get the mask geometry out from a shapefile""" - reader = shpreader.Reader(shapefilename) - # Index 0 grabs the lowest resolution mask (no zoom) - main_geom = [contour for contour in reader.geometries()][0] - return main_geom - - -def _mask_with_shp(cube, shapefilename): - """Apply a Natural Earth land/sea mask""" - # Create the region - region = _get_geometry_from_shp(shapefilename) - - # Create a mask for the data - mask = np.zeros(cube.shape, dtype=bool) - - # Create a set of x,y points from the cube - # 1D regular grids - if cube.coord('longitude').points.ndim < 2: - x_p, y_p = np.meshgrid( - cube.coord(axis='X').points, - cube.coord(axis='Y').points) - # 2D irregular grids; spit an error for now - else: - logger.error('No fx-files found (sftlf or sftof)!\n \ - 2D grids are suboptimally masked with\n \ - Natural Earth masks. Exiting.') - - # Wrap around longitude coordinate to match data - x_p_180 = np.where(x_p >= 180., x_p - 360., x_p) - # the NE mask has no points at x = -180 and y = +/-90 - # so we will fool it and apply the mask at (-179, -89, 89) instead - x_p_180 = np.where(x_p_180 == -180., x_p_180 + 1., x_p_180) - y_p_0 = np.where(y_p == -90., y_p + 1., y_p) - y_p_90 = np.where(y_p_0 == 90., y_p_0 - 1., y_p_0) - - # Build mask with vectorization - if len(cube.data.shape) == 3: - mask[:] = shp_vect.contains(region, x_p_180, y_p_90) - elif len(cube.data.shape) == 4: - mask[:, :] = shp_vect.contains(region, x_p_180, y_p_90) - - # Then apply the mask - if isinstance(cube.data, np.ma.MaskedArray): - cube.data.mask |= mask - else: - cube.data = np.ma.masked_array(cube.data, mask) - - return cube - - -# Define a function to perform the custom statistical operation. -# Note: in order to meet the requirements of iris.analysis.Aggregator, it must -# do the calculation over an arbitrary (given) data axis. -def count_spells(data, threshold, axis, spell_length): - """ - Count data occurences - - Function to calculate the number of points in a sequence where the value - has exceeded a threshold value for at least a certain number of timepoints. - - Generalised to operate on multiple time sequences arranged on a specific - axis of a multidimensional array. - - Args: - - * data (array): - raw data to be compared with value threshold. - - * threshold (float): - threshold point for 'significant' datapoints. - - * axis (int): - number of the array dimension mapping the time sequences. - (Can also be negative, e.g. '-1' means last dimension) - - * spell_length (int): - number of consecutive times at which value > threshold to "count". - - """ - if axis < 0: - # just cope with negative axis numbers - axis += data.ndim - # Threshold the data to find the 'significant' points. - data_hits = data > threshold - # Make an array with data values "windowed" along the time axis. - ############################################################### - # WARNING: default step is = window size i.e. no overlapping - # if you want overlapping windows set the step to be m*spell_length - # where m is a float - ############################################################### - hit_windows = rolling_window( - data_hits, window=spell_length, step=spell_length, axis=axis) - # Find the windows "full of True-s" (along the added 'window axis'). - full_windows = np.all(hit_windows, axis=axis + 1) - # Count points fulfilling the condition (along the time axis). - spell_point_counts = np.sum(full_windows, axis=axis, dtype=int) - return spell_point_counts - - -def window_counts(mycube, value_threshold, window_size, pctile): - """ - Find data counts in a time window - - Function that returns a flat array containing - the number of data points within a time window `window_size' - per grid point that satify a condition - value > value_threshold. - It also returns statistical measures for the flat array - window_counts[0] = array - window_counts[1] = mean(array) - window_counts[2] = std(array) - window_counts[3] = percentile(array, pctile) - """ - # Make an aggregator from the user function. - spell_count = Aggregator( - 'spell_count', count_spells, units_func=lambda units: 1) - - # Calculate the statistic. - counts_windowed_cube = mycube.collapsed( - 'time', - spell_count, - threshold=value_threshold, - spell_length=window_size) - - # if one wants to print the whole array - # np.set_printoptions(threshold=np.nan) - r_p = counts_windowed_cube.data.flatten() - meanr = np.mean(r_p) - stdr = np.std(r_p) - prcr = np.percentile(r_p, pctile) - return r_p, meanr, stdr, prcr - - -def mask_cube_counts(mycube, value_threshold, counts_threshold, window_size): - """Build the counts mask""" - # Make an aggregator from the user function. - spell_count = Aggregator( - 'spell_count', count_spells, units_func=lambda units: 1) - - # Calculate the statistic. - counts_windowed_cube = mycube.collapsed( - 'time', - spell_count, - threshold=value_threshold, - spell_length=window_size) - - mask = counts_windowed_cube.data >= counts_threshold - mask.astype(np.int) - # preserving the original cube metadata - dummyar = np.ones(mycube.data.shape, dtype=mycube.data.dtype) - newmask = dummyar * mask - newmask[newmask == 0] = 1e+20 # np.nan - masked_cube = mycube.copy() - # masked_cube.data = masked_cube.data * newmask - masked_cube.data = newmask - return counts_windowed_cube, newmask, masked_cube - - -def mask_above_threshold(cube, threshold): - """ - Mask above a specific threshold value. - - Takes a value 'threshold' and masks off anything that is above - it in the cube data. Values equal to the threshold are not masked. - """ - cube.data = np.ma.masked_where(cube.data > threshold, cube.data) - return cube - - -def mask_below_threshold(cube, threshold): - """ - Mask below a specific threshold value. - - Takes a value 'threshold' and masks off anything that is below - it in the cube data. Values equal to the threshold are not masked. - """ - cube.data = np.ma.masked_where(cube.data < threshold, cube.data) - return cube - - -def mask_inside_range(cube, minimum, maximum): - """ - Mask inside a specific threshold range. - - Takes a MINIMUM and a MAXIMUM value for the range, and masks off anything - that's between the two in the cube data. - """ - cube.data = np.ma.masked_inside(cube.data, minimum, maximum) - return cube - - -def mask_outside_range(cube, minimum, maximum): - """ - Mask outside a specific threshold range. - - Takes a MINIMUM and a MAXIMUM value for the range, and masks off anything - that's outside the two in the cube data. - """ - cube.data = np.ma.masked_outside(cube.data, minimum, maximum) - return cube - - -def mask_fillvalues(products, - threshold_fraction, - min_value=-1.e10, - time_window=1): - """Compute and apply a multi-dataset fillvalues mask""" - combined_mask = None - - logger.debug("Creating fillvalues mask") - used = set() - for product in products: - for cube in product.cubes: - cube.data = np.ma.fix_invalid(cube.data, copy=False) - mask = _get_fillvalues_mask(cube, threshold_fraction, min_value, - time_window) - if combined_mask is None: - combined_mask = np.zeros_like(mask) - # Select only valid (not all masked) pressure levels - n_dims = len(mask.shape) - if n_dims == 2: - valid = ~np.all(mask) - if valid: - combined_mask |= mask - used.add(product) - elif n_dims == 3: - valid = ~np.all(mask, axis=(1, 2)) - combined_mask[valid] |= mask[valid] - if np.any(valid): - used.add(product) - else: - raise NotImplementedError( - "Unable to handle {} dimensional data".format(n_dims)) - - if np.any(combined_mask): - logger.debug("Applying fillvalues mask") - used = {p.copy_provenance() for p in used} - for product in products: - for cube in product.cubes: - cube.data.mask |= combined_mask - for other in used: - if other.filename != product.filename: - product.wasderivedfrom(other) - - return products - - -def _get_fillvalues_mask(cube, threshold_fraction, min_value, time_window): - - # basic checks - if threshold_fraction < 0 or threshold_fraction > 1.0: - raise ValueError( - "Fraction of missing values {} should be between 0 and 1.0".format( - threshold_fraction)) - nr_time_points = len(cube.coord('time').points) - if time_window > nr_time_points: - logger.warning("Time window (in time units) larger " - "than total time span") - - max_counts_per_time_window = nr_time_points / time_window - # round to lower integer - counts_threshold = int(max_counts_per_time_window * threshold_fraction) - - # Make an aggregator - spell_count = Aggregator( - 'spell_count', count_spells, units_func=lambda units: 1) - - # Calculate the statistic. - counts_windowed_cube = cube.collapsed( - 'time', spell_count, threshold=min_value, spell_length=time_window) - - # Create mask - mask = counts_windowed_cube.data < counts_threshold - if np.ma.isMaskedArray(mask): - mask = mask.data | mask.mask - - return mask diff --git a/esmvaltool/preprocessor/_multimodel.py b/esmvaltool/preprocessor/_multimodel.py deleted file mode 100644 index b644b1810e..0000000000 --- a/esmvaltool/preprocessor/_multimodel.py +++ /dev/null @@ -1,347 +0,0 @@ -"""multimodel statistics. - -Functions for multi-model operations -supports a multitude of multimodel statistics -computations; the only requisite is the ingested -cubes have (TIME-LAT-LON) or (TIME-PLEV-LAT-LON) -dimensions; and obviously consistent units. - -It operates on different (time) spans: -- full: computes stats on full dataset time; -- overlap: computes common time overlap between datasets; - -""" - -import logging -from datetime import datetime -from functools import reduce - -import cf_units -import iris -import numpy as np - -from .._config import use_legacy_iris - -logger = logging.getLogger(__name__) - - -def _get_time_offset(time_unit): - """Return a datetime object equivalent to tunit.""" - # tunit e.g. 'day since 1950-01-01 00:00:00.0000000 UTC' - cfunit = cf_units.Unit(time_unit, calendar=cf_units.CALENDAR_STANDARD) - time_offset = cfunit.num2date(0) - return time_offset - - -def _plev_fix(dataset, pl_idx): - """Extract valid plev data. - - this function takes care of situations - in which certain plevs are completely - masked due to unavailable interpolation - boundaries. - """ - if np.ma.is_masked(dataset): - # keep only the valid plevs - if not np.all(dataset.mask[pl_idx]): - statj = np.ma.array(dataset[pl_idx], mask=dataset.mask[pl_idx]) - else: - logger.debug('All vals in plev are masked, ignoring.') - statj = None - else: - mask = np.zeros_like(dataset[pl_idx], bool) - statj = np.ma.array(dataset[pl_idx], mask=mask) - - return statj - - -def _compute_statistic(datas, statistic_name): - """Compute multimodel statistic.""" - datas = np.ma.array(datas) - statistic = datas[0] - - if statistic_name == 'median': - statistic_function = np.ma.median - elif statistic_name == 'mean': - statistic_function = np.ma.mean - else: - raise NotImplementedError - - # no plevs - if len(datas[0].shape) < 3: - # get all NOT fully masked data - u_data - # datas is per time point - # so we can safely NOT compute stats for single points - if datas.ndim == 1: - u_datas = [data for data in datas] - else: - u_datas = [data for data in datas if not np.all(data.mask)] - if len(u_datas) > 1: - statistic = statistic_function(datas, axis=0) - else: - statistic.mask = True - return statistic - - # plevs - for j in range(statistic.shape[0]): - plev_check = [] - for cdata in datas: - fixed_data = _plev_fix(cdata, j) - if fixed_data is not None: - plev_check.append(fixed_data) - - # check for nr datasets - if len(plev_check) > 1: - plev_check = np.ma.array(plev_check) - statistic[j] = statistic_function(plev_check, axis=0) - else: - statistic.mask[j] = True - - return statistic - - -def _put_in_cube(template_cube, cube_data, statistic, t_axis): - """Quick cube building and saving.""" - if t_axis is None: - times = template_cube.coord('time') - else: - times = iris.coords.DimCoord( - t_axis, - standard_name='time', - units=template_cube.coord('time').units) - lats = template_cube.coord('latitude') - lons = template_cube.coord('longitude') - - # no plevs - if len(template_cube.shape) == 3: - cspec = [(times, 0), (lats, 1), (lons, 2)] - # plevs - elif len(template_cube.shape) == 4: - plev = template_cube.coord('air_pressure') - cspec = [(times, 0), (plev, 1), (lats, 2), (lons, 3)] - elif len(template_cube.shape) == 1: - cspec = [ - (times, 0), - ] - elif len(template_cube.shape) == 2: - # If you're going to hardwire air_pressure into this, - # might as well have depth here too. - plev = template_cube.coord('depth') - cspec = [ - (times, 0), - (plev, 1), - ] - - # correct dspec if necessary - fixed_dspec = np.ma.fix_invalid(cube_data, copy=False, fill_value=1e+20) - # put in cube - stats_cube = iris.cube.Cube( - fixed_dspec, dim_coords_and_dims=cspec, long_name=statistic) - coord_names = [coord.name() for coord in template_cube.coords()] - if 'air_pressure' in coord_names: - if len(template_cube.shape) == 3: - stats_cube.add_aux_coord(template_cube.coord('air_pressure')) - - stats_cube.var_name = template_cube.var_name - stats_cube.long_name = template_cube.long_name - stats_cube.standard_name = template_cube.standard_name - stats_cube.units = template_cube.units - return stats_cube - - -def _datetime_to_int_days(cube): - """Return list of int(days) converted from cube datetime cells.""" - if use_legacy_iris(): - time_cells = [ - cube.coord('time').units.num2date(cell.point) - for cell in cube.coord('time').cells() - ] - else: - time_cells = [cell.point for cell in cube.coord('time').cells()] - - time_unit = cube.coord('time').units.name - time_offset = _get_time_offset(time_unit) - - # extract date info - real_dates = [] - for date_obj in time_cells: - # real_date resets the actual data point day - # to the 1st of the month so that there are no - # wrong overlap indeces - # NOTE: this workaround is good only - # for monthly data - real_date = datetime(date_obj.year, date_obj.month, 1, 0, 0, 0) - real_dates.append(real_date) - - days = [(date_obj - time_offset).days for date_obj in real_dates] - return days - - -def _get_overlap(cubes): - """ - Get discrete time overlaps. - - This method gets the bounds of coord time - from the cube and assembles a continuous time - axis with smallest unit 1; then it finds the - overlaps by doing a 1-dim intersect; - takes the floor of first date and - ceil of last date. - """ - all_times = [] - for cube in cubes: - span = _datetime_to_int_days(cube) - start, stop = span[0], span[-1] - all_times.append([start, stop]) - bounds = [range(b[0], b[-1] + 1) for b in all_times] - time_pts = reduce(np.intersect1d, bounds) - if len(time_pts) > 1: - time_bounds_list = [time_pts[0], time_pts[-1]] - return time_bounds_list - - -def _slice_cube(cube, t_1, t_2): - """ - Efficient slicer. - - Simple cube data slicer on indices - of common time-data elements. - """ - time_pts = [t for t in cube.coord('time').points] - converted_t = _datetime_to_int_days(cube) - idxs = sorted([ - time_pts.index(ii) for ii, jj in zip(time_pts, converted_t) - if t_1 <= jj <= t_2 - ]) - return [idxs[0], idxs[-1]] - - -def _monthly_t(cubes): - """Rearrange time points for monthly data.""" - # get original cubes tpoints - days = {day for cube in cubes for day in _datetime_to_int_days(cube)} - return sorted(days) - - -def _full_time_slice(cubes, ndat, indices, ndatarr, t_idx): - """Construct a contiguous collection over time.""" - for idx_cube, cube in enumerate(cubes): - # reset mask - ndat.mask = True - ndat[indices[idx_cube]] = cube.data - if np.ma.is_masked(cube.data): - ndat.mask[indices[idx_cube]] = cube.data.mask - else: - ndat.mask[indices[idx_cube]] = False - ndatarr[idx_cube] = ndat[t_idx] - - # return time slice - return ndatarr - - -def _assemble_overlap_data(cubes, interval, statistic): - """Get statistical data in iris cubes for OVERLAP.""" - start, stop = interval - sl_1, sl_2 = _slice_cube(cubes[0], start, stop) - stats_dats = np.ma.zeros(cubes[0].data[sl_1:sl_2 + 1].shape) - - # keep this outside the following loop - # this speeds up the code by a factor of 15 - indices = [_slice_cube(cube, start, stop) for cube in cubes] - - for i in range(stats_dats.shape[0]): - time_data = [ - cube.data[indx[0]:indx[1] + 1][i] - for cube, indx in zip(cubes, indices) - ] - stats_dats[i] = _compute_statistic(time_data, statistic) - stats_cube = _put_in_cube( - cubes[0][sl_1:sl_2 + 1], stats_dats, statistic, t_axis=None) - return stats_cube - - -def _assemble_full_data(cubes, statistic): - """Get statistical data in iris cubes for FULL.""" - # all times, new MONTHLY data time axis - time_axis = [float(fl) for fl in _monthly_t(cubes)] - - # new big time-slice array shape - new_shape = [len(time_axis)] + list(cubes[0].shape[1:]) - - # assemble an array to hold all time data - # for all cubes; shape is (ncubes,(plev), lat, lon) - new_arr = np.ma.empty([len(cubes)] + list(new_shape[1:])) - - # data array for stats computation - stats_dats = np.ma.zeros(new_shape) - - # assemble indices list to chop new_arr on - indices_list = [] - - # empty data array to hold time slices - empty_arr = np.ma.empty(new_shape) - - # loop through cubes and populate empty_arr with points - for cube in cubes: - time_redone = _datetime_to_int_days(cube) - oidx = [time_axis.index(s) for s in time_redone] - indices_list.append(oidx) - for i in range(new_shape[0]): - # hold time slices only - new_datas_array = _full_time_slice(cubes, empty_arr, indices_list, - new_arr, i) - # list to hold time slices - time_data = [] - for j in range(len(cubes)): - time_data.append(new_datas_array[j]) - stats_dats[i] = _compute_statistic(time_data, statistic) - stats_cube = _put_in_cube(cubes[0], stats_dats, statistic, time_axis) - return stats_cube - - -def multi_model_statistics(products, span, output_products, statistics): - """Compute multi-model mean and median.""" - logger.debug('Multimodel statistics: computing: %s', statistics) - if len(products) < 2: - logger.info("Single dataset in list: will not compute statistics.") - return products - - cubes = [cube for product in products for cube in product.cubes] - # check if we have any time overlap - interval = _get_overlap(cubes) - if interval is None: - logger.info("Time overlap between cubes is none or a single point." - "check datasets: will not compute statistics.") - return products - - if span == 'overlap': - logger.debug("Using common time overlap between " - "datasets to compute statistics.") - elif span == 'full': - logger.debug("Using full time spans to compute statistics.") - else: - raise ValueError( - "Unexpected value for span {}, choose from 'overlap', 'full'" - .format(span)) - - statistic_products = set() - for statistic in statistics: - # Compute statistic - if span == 'overlap': - statistic_cube = _assemble_overlap_data(cubes, interval, statistic) - elif span == 'full': - statistic_cube = _assemble_full_data(cubes, statistic) - statistic_cube.data = np.ma.array( - statistic_cube.data, dtype=np.dtype('float32')) - - # Add to output product and log provenance - statistic_product = output_products[statistic] - statistic_product.cubes = [statistic_cube] - for product in products: - statistic_product.wasderivedfrom(product) - logger.info("Generated %s", statistic_product) - statistic_products.add(statistic_product) - - products |= statistic_products - - return products diff --git a/esmvaltool/preprocessor/_reformat.py b/esmvaltool/preprocessor/_reformat.py deleted file mode 100644 index 0507b35c00..0000000000 --- a/esmvaltool/preprocessor/_reformat.py +++ /dev/null @@ -1,3 +0,0 @@ -"""Simple interface to reformat and CMORize functions.""" -from ..cmor.check import cmor_check, cmor_check_data, cmor_check_metadata -from ..cmor.fix import fix_data, fix_file, fix_metadata diff --git a/esmvaltool/preprocessor/_regrid.py b/esmvaltool/preprocessor/_regrid.py deleted file mode 100644 index f0b4908b7b..0000000000 --- a/esmvaltool/preprocessor/_regrid.py +++ /dev/null @@ -1,510 +0,0 @@ -"""Horizontal and vertical regridding module.""" - -import os -import re -from copy import deepcopy - -import iris -import numpy as np -import six -import stratify -from iris.analysis import AreaWeighted, Linear, Nearest, UnstructuredNearest - -from ._io import concatenate_callback, load -from ._regrid_esmpy import ESMF_REGRID_METHODS -from ._regrid_esmpy import regrid as esmpy_regrid -from ..cmor.fix import fix_file, fix_metadata -from ..cmor.table import CMOR_TABLES - -# Regular expression to parse a "MxN" cell-specification. -_CELL_SPEC = re.compile( - r'''\A - \s*(?P\d+(\.\d+)?)\s* - x - \s*(?P\d+(\.\d+)?)\s* - \Z - ''', re.IGNORECASE | re.VERBOSE) - -# Default fill-value. -_MDI = 1e+20 - -# Stock cube - global grid extents (degrees). -_LAT_MIN = -90.0 -_LAT_MAX = 90.0 -_LAT_RANGE = _LAT_MAX - _LAT_MIN -_LON_MIN = 0.0 -_LON_MAX = 360.0 -_LON_RANGE = _LON_MAX - _LON_MIN - -# A cached stock of standard horizontal target grids. -_CACHE = dict() - -# Supported horizontal regridding schemes. -HORIZONTAL_SCHEMES = { - 'linear': Linear(extrapolation_mode='mask'), - 'linear_extrapolate': Linear(extrapolation_mode='extrapolate'), - 'nearest': Nearest(extrapolation_mode='mask'), - 'area_weighted': AreaWeighted(), - 'unstructured_nearest': UnstructuredNearest(), -} - -# Supported vertical interpolation schemes. -VERTICAL_SCHEMES = ('linear', 'nearest', - 'linear_horizontal_extrapolate_vertical', - 'nearest_horizontal_extrapolate_vertical') - - -def parse_cell_spec(spec): - """Parse an MxN cell specification string.""" - cell_match = _CELL_SPEC.match(spec) - if cell_match is None: - emsg = 'Invalid MxN cell specification for grid, got {!r}.' - raise ValueError(emsg.format(spec)) - - cell_group = cell_match.groupdict() - dlon = float(cell_group['dlon']) - dlat = float(cell_group['dlat']) - - if (np.trunc(_LON_RANGE / dlon) * dlon) != _LON_RANGE: - emsg = ('Invalid longitude delta in MxN cell specification ' - 'for grid, got {!r}.') - raise ValueError(emsg.format(dlon)) - - if (np.trunc(_LAT_RANGE / dlat) * dlat) != _LAT_RANGE: - emsg = ('Invalid latitude delta in MxN cell specification ' - 'for grid, got {!r}.') - raise ValueError(emsg.format(dlat)) - - return dlon, dlat - - -def _stock_cube(spec, lat_offset=True, lon_offset=True): - """ - Create a stock cube. - - Create a global cube with M degree-east by N degree-north regular grid - cells. - - The longitude range is from 0 to 360 degrees. The latitude range is from - -90 to 90 degrees. Each cell grid point is calculated as the mid-point of - the associated MxN cell. - - Parameters - ---------- - spec : str - Specifies the 'MxN' degree cell-specification for the global grid. - lat_offset : bool - Offset the grid centers of the latitude coordinate w.r.t. the - pole by half a grid step. This argument is ignored if `target_grid` - is a cube or file. - lon_offset : bool - Offset the grid centers of the longitude coordinate w.r.t. Greenwich - meridian by half a grid step. - This argument is ignored if `target_grid` is a cube or file. - - Returns - ------- - A :class:`~iris.cube.Cube`. - - """ - dlon, dlat = parse_cell_spec(spec) - mid_dlon, mid_dlat = dlon / 2, dlat / 2 - - # Construct the latitude coordinate, with bounds. - if lat_offset: - latdata = np.linspace(_LAT_MIN + mid_dlat, _LAT_MAX - mid_dlat, - _LAT_RANGE / dlat) - else: - latdata = np.linspace(_LAT_MIN, _LAT_MAX, _LAT_RANGE / dlat + 1) - - # Construct the longitude coordinat, with bounds. - if lon_offset: - londata = np.linspace(_LON_MIN + mid_dlon, _LON_MAX - mid_dlon, - _LON_RANGE / dlon) - else: - londata = np.linspace(_LON_MIN, _LON_MAX - dlon, _LON_RANGE / dlon) - - lats = iris.coords.DimCoord( - latdata, - standard_name='latitude', - units='degrees_north', - var_name='lat') - lats.guess_bounds() - - lons = iris.coords.DimCoord( - londata, - standard_name='longitude', - units='degrees_east', - var_name='lon') - lons.guess_bounds() - - # Construct the resultant stock cube, with dummy data. - shape = (latdata.size, londata.size) - dummy = np.empty(shape, dtype=np.dtype('int8')) - coords_spec = [(lats, 0), (lons, 1)] - cube = iris.cube.Cube(dummy, dim_coords_and_dims=coords_spec) - - return cube - - -def _attempt_irregular_regridding(cube, scheme): - """Check if irregular regridding with ESMF should be used.""" - if scheme in ESMF_REGRID_METHODS: - try: - lat_dim = cube.coord('latitude').ndim - lon_dim = cube.coord('longitude').ndim - if lat_dim == lon_dim == 2: - return True - except iris.exceptions.CoordinateNotFoundError: - pass - return False - - -def regrid(cube, target_grid, scheme, lat_offset=True, lon_offset=True): - """ - Perform horizontal regridding. - - Parameters - ---------- - cube : cube - The source cube to be regridded. - tgt_cube : cube or str - The cube that specifies the target or reference grid for the regridding - operation. Alternatively, a string cell specification may be provided, - of the form 'MxN', which specifies the extent of the cell, longitude by - latitude (degrees) for a global, regular target grid. - scheme : str - The regridding scheme to perform, choose from - 'linear', - 'linear_extrapolate', - 'nearest', - 'area_weighted', - 'unstructured_nearest'. - lat_offset : bool - Offset the grid centers of the latitude coordinate w.r.t. the - pole by half a grid step. This argument is ignored if `target_grid` - is a cube or file. - lon_offset : bool - Offset the grid centers of the longitude coordinate w.r.t. Greenwich - meridian by half a grid step. - This argument is ignored if `target_grid` is a cube or file. - - Returns - ------- - cube - - See Also - -------- - extract_levels : Perform vertical regridding. - - """ - if HORIZONTAL_SCHEMES.get(scheme.lower()) is None: - emsg = 'Unknown regridding scheme, got {!r}.' - raise ValueError(emsg.format(scheme)) - - if isinstance(target_grid, six.string_types): - if os.path.isfile(target_grid): - target_grid = iris.load_cube(target_grid) - else: - # Generate a target grid from the provided cell-specification, - # and cache the resulting stock cube for later use. - target_grid = _CACHE.setdefault( - target_grid, - _stock_cube(target_grid, lat_offset, lon_offset), - ) - # Align the target grid coordinate system to the source - # coordinate system. - src_cs = cube.coord_system() - xcoord = target_grid.coord(axis='x', dim_coords=True) - ycoord = target_grid.coord(axis='y', dim_coords=True) - xcoord.coord_system = src_cs - ycoord.coord_system = src_cs - - if not isinstance(target_grid, iris.cube.Cube): - raise ValueError('Expecting a cube, got {}.'.format(target_grid)) - - # Unstructured regridding requires x2 2d spatial coordinates, - # so ensure to purge any 1d native spatial dimension coordinates - # for the regridder. - if scheme == 'unstructured_nearest': - for axis in ['x', 'y']: - coords = cube.coords(axis=axis, dim_coords=True) - if coords: - [coord] = coords - cube.remove_coord(coord) - - # Perform the horizontal regridding. - if _attempt_irregular_regridding(cube, scheme): - cube = esmpy_regrid(cube, target_grid, scheme) - else: - cube = cube.regrid(target_grid, HORIZONTAL_SCHEMES[scheme]) - - return cube - - -def _create_cube(src_cube, data, levels): - """ - Generate a new cube with the interpolated data. - - The resultant cube is seeded with `src_cube` metadata and coordinates, - excluding any source coordinates that span the associated vertical - dimension. The `levels` of interpolation are used along with the - associated source cube vertical coordinate metadata to add a new - vertical coordinate to the resultant cube. - - Parameters - ---------- - src_cube : cube - The source cube that was vertically interpolated. - data : array - The payload resulting from interpolating the source cube - over the specified levels. - levels : array - The vertical levels of interpolation. - - Returns - ------- - cube - - .. note:: - - If there is only one level of interpolation, the resultant cube - will be collapsed over the associated vertical dimension, and a - scalar vertical coordinate will be added. - - """ - # Get the source cube vertical coordinate and associated dimension. - src_levels = src_cube.coord(axis='z', dim_coords=True) - z_dim, = src_cube.coord_dims(src_levels) - - if data.shape[z_dim] != levels.size: - emsg = ('Mismatch between data and levels for data dimension {!r}, ' - 'got data shape {!r} with levels shape {!r}.') - raise ValueError(emsg.format(z_dim, data.shape, levels.shape)) - - # Construct the resultant cube with the interpolated data - # and the source cube metadata. - kwargs = deepcopy(src_cube.metadata)._asdict() - result = iris.cube.Cube(data, **kwargs) - - # Add the appropriate coordinates to the cube, excluding - # any coordinates that span the z-dimension of interpolation. - for coord in src_cube.dim_coords: - [dim] = src_cube.coord_dims(coord) - if dim != z_dim: - result.add_dim_coord(coord.copy(), dim) - - for coord in src_cube.aux_coords: - dims = src_cube.coord_dims(coord) - if z_dim not in dims: - result.add_aux_coord(coord.copy(), dims) - - for coord in src_cube.derived_coords: - dims = src_cube.coord_dims(coord) - if z_dim not in dims: - result.add_aux_coord(coord.copy(), dims) - - # Construct the new vertical coordinate for the interpolated - # z-dimension, using the associated source coordinate metadata. - kwargs = deepcopy(src_levels._as_defn())._asdict() - - try: - coord = iris.coords.DimCoord(levels, **kwargs) - result.add_dim_coord(coord, z_dim) - except ValueError: - coord = iris.coords.AuxCoord(levels, **kwargs) - result.add_aux_coord(coord, z_dim) - - # Collapse the z-dimension for the scalar case. - if levels.size == 1: - slicer = [slice(None)] * result.ndim - slicer[z_dim] = 0 - result = result[tuple(slicer)] - - return result - - -def _vertical_interpolate(cube, levels, interpolation, extrapolation): - """Perform vertical interpolation.""" - # Determine the source levels and axis for vertical interpolation. - src_levels = cube.coord(axis='z', dim_coords=True) - z_axis, = cube.coord_dims(src_levels) - - # Broadcast the 1d source cube vertical coordinate to fully - # describe the spatial extent that will be interpolated. - broadcast_shape = cube.shape[z_axis:] - reshape = [1] * len(broadcast_shape) - reshape[0] = cube.shape[z_axis] - src_levels_reshaped = src_levels.points.reshape(reshape) - src_levels_broadcast = np.broadcast_to(src_levels_reshaped, - broadcast_shape) - - # force mask onto data as nan's - if np.ma.is_masked(cube.data): - cube.data[cube.data.mask] = np.nan - - # Now perform the actual vertical interpolation. - new_data = stratify.interpolate( - levels, - src_levels_broadcast, - cube.data, - axis=z_axis, - interpolation=interpolation, - extrapolation=extrapolation) - - # Calculate the mask based on the any NaN values in the interpolated data. - mask = np.isnan(new_data) - - if np.any(mask): - # Ensure that the data is masked appropriately. - new_data = np.ma.array(new_data, mask=mask, fill_value=_MDI) - - # Construct the resulting cube with the interpolated data. - return _create_cube(cube, new_data, levels.astype(float)) - - -def extract_levels(cube, levels, scheme): - """ - Perform vertical interpolation. - - Parameters - ---------- - cube : cube - The source cube to be vertically interpolated. - levels : array - One or more target levels for the vertical interpolation. Assumed - to be in the same S.I. units of the source cube vertical dimension - coordinate. - scheme : str - The vertical interpolation scheme to use. Choose from - 'linear', - 'nearest', - 'nearest_horizontal_extrapolate_vertical', - 'linear_horizontal_extrapolate_vertical'. - - Returns - ------- - cube - - See Also - -------- - regrid : Perform horizontal regridding. - - """ - if scheme not in VERTICAL_SCHEMES: - emsg = 'Unknown vertical interpolation scheme, got {!r}. ' - emsg += 'Possible schemes: {!r}' - raise ValueError(emsg.format(scheme, VERTICAL_SCHEMES)) - - # This allows us to put level 0. to load the ocean surface. - extrap_scheme = 'nan' - if scheme == 'nearest_horizontal_extrapolate_vertical': - scheme = 'nearest' - extrap_scheme = 'nearest' - - if scheme == 'linear_horizontal_extrapolate_vertical': - scheme = 'linear' - extrap_scheme = 'nearest' - - # Ensure we have a non-scalar array of levels. - levels = np.array(levels, ndmin=1) - - # Get the source cube vertical coordinate, if available. - src_levels = cube.coord(axis='z', dim_coords=True) - - if (src_levels.shape == levels.shape - and np.allclose(src_levels.points, levels)): - # Only perform vertical extraction/interploation if the source - # and target levels are not "similar" enough. - result = cube - elif set(levels).issubset(set(src_levels.points)): - # If all target levels exist in the source cube, simply extract them. - name = src_levels.name() - coord_values = {name: lambda cell: cell.point in set(levels)} - constraint = iris.Constraint(coord_values=coord_values) - result = cube.extract(constraint) - # Ensure the constraint did not fail. - if not result: - emsg = 'Failed to extract levels {!r} from cube {!r}.' - raise ValueError(emsg.format(list(levels), name)) - else: - # As a last resort, perform vertical interpolation. - result = _vertical_interpolate(cube, levels, scheme, extrap_scheme) - - return result - - -def get_cmor_levels(cmor_table, coordinate): - """Get level definition from a CMOR coordinate. - - Parameters - ---------- - cmor_table: str - CMOR table name - coordinate: str - CMOR coordinate name - - Returns - ------- - list[int] - - Raises - ------ - ValueError: - If the CMOR table is not defined, the coordinate does not specify any - levels or the string is badly formatted. - - """ - if cmor_table not in CMOR_TABLES: - raise ValueError( - "Level definition cmor_table '{}' not available".format( - cmor_table)) - - if coordinate not in CMOR_TABLES[cmor_table].coords: - raise ValueError('Coordinate {} not available for {}'.format( - coordinate, cmor_table)) - - cmor = CMOR_TABLES[cmor_table].coords[coordinate] - - if cmor.requested: - return [float(level) for level in cmor.requested] - if cmor.value: - return [float(cmor.value)] - - raise ValueError( - 'Coordinate {} in {} does not have requested values'.format( - coordinate, cmor_table)) - - -def get_reference_levels(filename, - project, - dataset, - short_name, - fix_dir): - """Get level definition from a CMOR coordinate. - - Parameters - ---------- - filename: str - Path to the reference file - - Returns - ------- - list[float] - - Raises - ------ - ValueError: - If the dataset is not defined, the coordinate does not specify any - levels or the string is badly formatted. - - """ - filename = fix_file(filename, short_name, project, dataset, fix_dir) - cubes = load(filename, callback=concatenate_callback) - cubes = fix_metadata(cubes, short_name, project, dataset) - cube = cubes[0] - try: - coord = cube.coord(axis='Z') - except iris.exceptions.CoordinateNotFoundError: - raise ValueError('z-coord not available in {}'.format(filename)) - return coord.points.tolist() diff --git a/esmvaltool/preprocessor/_regrid_esmpy.py b/esmvaltool/preprocessor/_regrid_esmpy.py deleted file mode 100755 index 4ac6192451..0000000000 --- a/esmvaltool/preprocessor/_regrid_esmpy.py +++ /dev/null @@ -1,324 +0,0 @@ -# -*- coding: utf-8 -*- -"""Provides regridding for irregular grids.""" - -import ESMF -import iris -import numpy as np - -from ._mapping import get_empty_data, map_slices, ref_to_dims_index - - -ESMF_MANAGER = ESMF.Manager(debug=False) - -ESMF_LON, ESMF_LAT = 0, 1 - -ESMF_REGRID_METHODS = { - 'linear': ESMF.RegridMethod.BILINEAR, - 'area_weighted': ESMF.RegridMethod.CONSERVE, - 'nearest': ESMF.RegridMethod.NEAREST_STOD, -} - -MASK_REGRIDDING_MASK_VALUE = { - ESMF.RegridMethod.BILINEAR: np.array([1]), - ESMF.RegridMethod.CONSERVE: np.array([1]), - ESMF.RegridMethod.NEAREST_STOD: np.array([]), -} - -# ESMF_REGRID_METHODS = { -# 'bilinear': ESMF.RegridMethod.BILINEAR, -# 'patch': ESMF.RegridMethod.PATCH, -# 'conserve': ESMF.RegridMethod.CONSERVE, -# 'nearest_stod': ESMF.RegridMethod.NEAREST_STOD, -# 'nearest_dtos': ESMF.RegridMethod.NEAREST_DTOS, -# } - - -def cf_2d_bounds_to_esmpy_corners(bounds, circular): - """Convert cf style 2d bounds to normal (esmpy style) corners.""" - no_lat_points, no_lon_points = bounds.shape[:2] - no_lat_bounds = no_lat_points + 1 - if circular: - no_lon_bounds = no_lon_points - else: - no_lon_bounds = no_lon_points + 1 - esmpy_corners = np.empty((no_lon_bounds, no_lat_bounds)) - esmpy_corners[:no_lon_points, :no_lat_points] = bounds[:, :, 0].T - esmpy_corners[:no_lon_points, no_lat_points:] = bounds[-1:, :, 3].T - esmpy_corners[no_lon_points:, :no_lat_points] = bounds[:, -1:, 1].T - esmpy_corners[no_lon_points:, no_lat_points:] = bounds[-1:, -1:, 2].T - return esmpy_corners - - -def coords_iris_to_esmpy(lat, lon, circular): - """Build ESMF compatible coordinate information from iris coords.""" - dim = lat.ndim - if lon.ndim != dim: - msg = 'Different dimensions in latitude({}) and longitude({}) coords.' - raise ValueError(msg.format(lat.ndim, lon.ndim)) - if dim == 1: - for coord in [lat, lon]: - if not coord.has_bounds(): - coord.guess_bounds() - esmpy_lat, esmpy_lon = np.meshgrid(lat.points, lon.points) - lat_corners = np.concatenate([lat.bounds[:, 0], lat.bounds[-1:, 1]]) - if circular: - lon_corners = lon.bounds[:, 0] - else: - lon_corners = np.concatenate([lon.bounds[:, 0], - lon.bounds[-1:, 1]]) - esmpy_lat_corners, esmpy_lon_corners = np.meshgrid(lat_corners, - lon_corners) - elif dim == 2: - esmpy_lat, esmpy_lon = lat.points.T.copy(), lon.points.T.copy() - esmpy_lat_corners = cf_2d_bounds_to_esmpy_corners(lat.bounds, circular) - esmpy_lon_corners = cf_2d_bounds_to_esmpy_corners(lon.bounds, circular) - else: - raise NotImplementedError('Coord dimension is {}. Expected 1 or 2.' - ''.format(dim)) - return esmpy_lat, esmpy_lon, esmpy_lat_corners, esmpy_lon_corners - - -def get_grid(esmpy_lat, esmpy_lon, - esmpy_lat_corners, esmpy_lon_corners, circular): - """Build EMSF grid from given coordinate information.""" - if circular: - num_peri_dims = 1 - else: - num_peri_dims = 0 - grid = ESMF.Grid(np.array(esmpy_lat.shape), - num_peri_dims=num_peri_dims, - staggerloc=[ESMF.StaggerLoc.CENTER]) - grid.get_coords(ESMF_LON)[...] = esmpy_lon - grid.get_coords(ESMF_LAT)[...] = esmpy_lat - grid.add_coords([ESMF.StaggerLoc.CORNER]) - grid_lon_corners = grid.get_coords(ESMF_LON, - staggerloc=ESMF.StaggerLoc.CORNER) - grid_lat_corners = grid.get_coords(ESMF_LAT, - staggerloc=ESMF.StaggerLoc.CORNER) - grid_lon_corners[...] = esmpy_lon_corners - grid_lat_corners[...] = esmpy_lat_corners - grid.add_item(ESMF.GridItem.MASK, ESMF.StaggerLoc.CENTER) - return grid - - -def is_lon_circular(lon): - """Determine if longitudes are circular.""" - if isinstance(lon, iris.coords.DimCoord): - circular = lon.circular - elif isinstance(lon, iris.coords.AuxCoord): - if lon.ndim == 1: - seam = lon.bounds[-1, 1] - lon.bounds[0, 0] - elif lon.ndim == 2: - seam = (lon.bounds[1:-1, -1, (1, 2)] - - lon.bounds[1:-1, 0, (0, 3)]) - else: - raise NotImplementedError('AuxCoord longitude is higher ' - 'dimensional than 2d. Giving up.') - circular = np.alltrue(abs(seam) % 360. < 1.e-3) - else: - raise ValueError('longitude is neither DimCoord nor AuxCoord. ' - 'Giving up.') - return circular - - -def cube_to_empty_field(cube): - """Build an empty ESMF field from a cube.""" - lat = cube.coord('latitude') - lon = cube.coord('longitude') - circular = is_lon_circular(lon) - esmpy_coords = coords_iris_to_esmpy(lat, lon, circular) - grid = get_grid(*esmpy_coords, circular=circular) - field = ESMF.Field(grid, - name=cube.long_name, - staggerloc=ESMF.StaggerLoc.CENTER) - return field - - -def get_representant(cube, ref_to_slice): - """Get a representative slice from a cube.""" - slice_dims = ref_to_dims_index(cube, ref_to_slice) - rep_ind = [0] * cube.ndim - for dim in slice_dims: - rep_ind[dim] = slice(None, None) - rep_ind = tuple(rep_ind) - return cube[rep_ind] - - -def build_regridder_2d(src_rep, dst_rep, regrid_method, mask_threshold): - """Build regridder for 2d regridding.""" - dst_field = cube_to_empty_field(dst_rep) - src_field = cube_to_empty_field(src_rep) - regridding_arguments = { - 'srcfield': src_field, - 'dstfield': dst_field, - 'regrid_method': regrid_method, - 'unmapped_action': ESMF.UnmappedAction.IGNORE, - 'ignore_degenerate': True, - } - if np.ma.is_masked(src_rep.data): - src_field.data[...] = ~src_rep.data.mask.T - src_mask = src_field.grid.get_item(ESMF.GridItem.MASK, - ESMF.StaggerLoc.CENTER) - src_mask[...] = src_rep.data.mask.T - center_mask = dst_field.grid.get_item(ESMF.GridItem.MASK, - ESMF.StaggerLoc.CENTER) - center_mask[...] = 0 - mask_regridder = ESMF.Regrid( - src_mask_values=MASK_REGRIDDING_MASK_VALUE[regrid_method], - dst_mask_values=np.array([]), - **regridding_arguments) - regr_field = mask_regridder(src_field, dst_field) - dst_mask = regr_field.data[...].T < mask_threshold - center_mask[...] = dst_mask.T - else: - dst_mask = False - field_regridder = ESMF.Regrid(src_mask_values=np.array([1]), - dst_mask_values=np.array([1]), - **regridding_arguments) - - def regridder(src): - """Regrid 2d for irregular grids.""" - res = get_empty_data(dst_rep.shape, src.dtype) - data = src.data - if np.ma.is_masked(data): - data = data.data - src_field.data[...] = data.T - regr_field = field_regridder(src_field, dst_field) - res.data[...] = regr_field.data[...].T - res.mask[...] = dst_mask - return res - - return regridder - - -def build_regridder_3d(src_rep, dst_rep, regrid_method, mask_threshold): - # pylint: disable=too-many-locals - # The necessary refactoring will be done for the full 3d regridding. - """Build regridder for 2.5d regridding.""" - esmf_regridders = [] - no_levels = src_rep.shape[0] - for level in range(no_levels): - esmf_regridders.append( - build_regridder_2d(src_rep[level], dst_rep[level], - regrid_method, mask_threshold) - ) - - def regridder(src): - """Regrid 2.5d for irregular grids.""" - res = get_empty_data(dst_rep.shape, src.dtype) - for i, esmf_regridder in enumerate(esmf_regridders): - res[i, ...] = esmf_regridder(src[i]) - return res - - return regridder - - -def build_regridder(src_rep, dst_rep, method, mask_threshold=.99): - """Build regridders from representants.""" - regrid_method = ESMF_REGRID_METHODS[method] - if src_rep.ndim == 2: - regridder = build_regridder_2d(src_rep, dst_rep, - regrid_method, mask_threshold) - elif src_rep.ndim == 3: - regridder = build_regridder_3d(src_rep, dst_rep, - regrid_method, mask_threshold) - return regridder - - -def get_grid_representant(cube, horizontal_only=False): - """Extract the spatial grid from a cube.""" - horizontal_slice = ['latitude', 'longitude'] - ref_to_slice = horizontal_slice - if not horizontal_only: - try: - cube_z_coord = cube.coord(axis='Z') - n_zdims = len(cube.coord_dims(cube_z_coord)) - if n_zdims == 0: - # scalar z coordinate, go on with 2d regridding - pass - elif n_zdims == 1: - ref_to_slice = [cube_z_coord] + horizontal_slice - else: - raise ValueError("Cube has multidimensional Z coordinate.") - except iris.exceptions.CoordinateNotFoundError: - # no z coordinate, go on with 2d regridding - pass - return get_representant(cube, ref_to_slice) - - -def get_grid_representants(src, dst): - """ - Construct cubes representing the source and destination grid. - - This method constructs two new cubes that representant the grids, - i.e. the spatial dimensions of the given cubes. - - Parameters - ---------- - src: :class:`iris.cube.Cube` - Cube to be regridded. Typically a time series of 2d or 3d slices. - dst: :class:`iris.cube.Cube` - Cube defining the destination grid. Usually just a 2d or 3d cube. - - Returns - ------- - tuple of :class:`iris.cube.Cube`: - A tuple containing two cubes, representing the source grid and the - destination grid, respectively. - """ - src_rep = get_grid_representant(src) - dst_horiz_rep = get_grid_representant(dst, horizontal_only=True) - if src_rep.ndim == 3: - dst_shape = (src_rep.shape[0],) - dim_coords = [src_rep.coord(dimensions=[0], dim_coords=True)] - else: - dst_shape = tuple() - dim_coords = [] - dst_shape += dst_horiz_rep.shape - dim_coords += dst_horiz_rep.coords(dim_coords=True) - dim_coords_and_dims = [(c, i) for i, c in enumerate(dim_coords)] - dst_rep = iris.cube.Cube( - data=get_empty_data(dst_shape, src.dtype), - standard_name=src.standard_name, - long_name=src.long_name, - var_name=src.var_name, - units=src.units, - attributes=src.attributes, - cell_methods=src.cell_methods, - dim_coords_and_dims=dim_coords_and_dims, - ) - return src_rep, dst_rep - - -def regrid(src, dst, method='linear'): - """ - Regrid src_cube to the grid defined by dst_cube. - - Regrid the data in src_cube onto the grid defined by dst_cube. - - Parameters - ---------- - src_cube: :class:`iris.cube.Cube` - Source data. Must have latitude and longitude coords. - These can be 1d or 2d and should have bounds. - dst_cube: :class:`iris.cube.Cube` - Defines the target grid. - regrid_method: - Selects the regridding method. - Can be 'linear', 'area_weighted', - or 'nearest'. See ESMPy_. - - Returns - ------- - :class:`iris.cube.Cube`: - The regridded cube. - - - .. _ESMPy: http://www.earthsystemmodeling.org/ - esmf_releases/non_public/ESMF_7_0_0/esmpy_doc/html/ - RegridMethod.html#ESMF.api.constants.RegridMethod - """ - src_rep, dst_rep = get_grid_representants(src, dst) - regridder = build_regridder(src_rep, dst_rep, method) - res = map_slices(src, regridder, src_rep, dst_rep) - return res diff --git a/esmvaltool/preprocessor/_time.py b/esmvaltool/preprocessor/_time.py deleted file mode 100644 index 45976e7f26..0000000000 --- a/esmvaltool/preprocessor/_time.py +++ /dev/null @@ -1,299 +0,0 @@ -"""Time operations on cubes. - -Allows for selecting data subsets using certain time bounds; -constructing seasonal and area averages. -""" -import datetime -import logging - -import cf_units -import iris -import iris.coord_categorisation -import numpy as np - -from .._config import use_legacy_iris - -logger = logging.getLogger(__name__) - - -def extract_time(cube, start_year, start_month, start_day, end_year, end_month, - end_day): - """Extract a time range from a cube. - - Parameters - ---------- - cube: iris.cube.Cube - input cube. - start_year: int - start year - start_month: int - start month - start_day: int - start day - end_year: int - end year - end_month: int - end month - end_day: int - end day - - Returns - ------- - iris.cube.Cube - Sliced cube. - - """ - time_units = cube.coord('time').units - if time_units.calendar == '360_day': - if start_day > 30: - start_day = 30 - if end_day > 30: - end_day = 30 - start_date = datetime.datetime( - int(start_year), int(start_month), int(start_day)) - end_date = datetime.datetime(int(end_year), int(end_month), int(end_day)) - - t_1 = time_units.date2num(start_date) - t_2 = time_units.date2num(end_date) - if use_legacy_iris(): - constraint = iris.Constraint(time=lambda t: (t_1 < t.point < t_2)) - else: - constraint = iris.Constraint( - time=lambda t: (t_1 < time_units.date2num(t.point) < t_2)) - - cube_slice = cube.extract(constraint) - if cube_slice is None: - start_cube = str(cube.coord('time').points[0]) - end_cube = str(cube.coord('time').points[-1]) - raise ValueError( - f"Time slice {start_date} to {end_date} is outside cube " - f"time bounds {start_cube} to {end_cube}.") - - # Issue when time dimension was removed when only one point as selected. - if cube_slice.ndim != cube.ndim: - time_1 = cube.coord('time') - time_2 = cube_slice.coord('time') - if time_1 == time_2: - logger.debug('No change needed to time.') - return cube - - return cube_slice - - -def extract_season(cube, season): - """ - Slice cube to get only the data belonging to a specific season. - - Parameters - ---------- - cube: iris.cube.Cube - Original data - season: str - Season to extract. Available: DJF, MAM, JJA, SON - """ - if not cube.coords('clim_season'): - iris.coord_categorisation.add_season(cube, 'time', name='clim_season') - if not cube.coords('season_year'): - iris.coord_categorisation.add_season_year( - cube, 'time', name='season_year') - return cube.extract(iris.Constraint(clim_season=season.lower())) - - -def extract_month(cube, month): - """ - Slice cube to get only the data belonging to a specific month. - - Parameters - ---------- - cube: iris.cube.Cube - Original data - month: int - Month to extract as a number from 1 to 12 - """ - if month not in range(1, 13): - raise ValueError('Please provide a month number between 1 and 12.') - return cube.extract(iris.Constraint(month_number=month)) - - -def get_time_weights(cube): - """ - Compute the weighting of the time axis. - - Parameters - ---------- - cube: iris.cube.Cube - input cube. - - Returns - ------- - numpy.array - Array of time weights for averaging. - """ - time = cube.coord('time') - time_thickness = time.bounds[..., 1] - time.bounds[..., 0] - - # The weights need to match the dimensionality of the cube. - slices = [None for i in cube.shape] - coord_dim = cube.coord_dims('time')[0] - slices[coord_dim] = slice(None) - time_thickness = np.abs(time_thickness[tuple(slices)]) - ones = np.ones_like(cube.data) - time_weights = time_thickness * ones - return time_weights - - -def time_average(cube): - """ - Compute time average. - - Get the time average over the entire cube. The average is weighted by the - bounds of the time coordinate. - - Parameters - ---------- - cube: iris.cube.Cube - input cube. - - Returns - ------- - iris.cube.Cube - time averaged cube. - """ - time_weights = get_time_weights(cube) - - return cube.collapsed('time', iris.analysis.MEAN, weights=time_weights) - - -# get the seasonal mean -def seasonal_mean(cube): - """ - Compute seasonal means with MEAN. - - Chunks time in 3-month periods and computes means over them; - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - Returns - ------- - iris.cube.Cube - Seasonal mean cube - """ - if not cube.coords('clim_season'): - iris.coord_categorisation.add_season(cube, 'time', name='clim_season') - if not cube.coords('season_year'): - iris.coord_categorisation.add_season_year( - cube, 'time', name='season_year') - cube = cube.aggregated_by(['clim_season', 'season_year'], - iris.analysis.MEAN) - - # CMOR Units are days so we are safe to operate on days - # Ranging on [90, 92] days makes this calendar-independent - def spans_three_months(time): - """Check for three months""" - return 90 <= (time.bound[1] - time.bound[0]).days <= 92 - - three_months_bound = iris.Constraint(time=spans_three_months) - return cube.extract(three_months_bound) - - -def regrid_time(cube, frequency): - """ - Align time axis for cubes so they can be subtracted. - - Operations on time units, calendars, time points and auxiliary - coordinates so that any cube from cubes can be subtracted from any - other cube from cubes. Currently this function supports only monthly - (frequency=mon) and daily (frequency=day) data time frequencies. - - Arguments - --------- - cube: iris.cube.Cube - frequency: str - data frequency: mon or day - - Returns - ------- - iris.cube.Cube instance - """ - # fix calendars - cube.coord('time').units = cf_units.Unit( - cube.coord('time').units.origin, - calendar='gregorian' - ) - - # standardize time points - time_c = [cell.point for cell in cube.coord('time').cells()] - if frequency == 'mon': - cube.coord('time').cells = [ - datetime.datetime(t.year, t.month, - 15, 0, 0, 0) for t in time_c - ] - elif frequency == 'day': - cube.coord('time').cells = [ - datetime.datetime(t.year, t.month, - t.day, 0, 0, 0) for t in time_c - ] - # TODO add correct handling of hourly data - # this is a bit more complicated since it can be 3h, 6h etc - cube.coord('time').points = [ - cube.coord('time').units.date2num(cl) - for cl in cube.coord('time').cells - ] - - # uniformize bounds - cube.coord('time').bounds = None - cube.coord('time').guess_bounds() - - # remove aux coords that will differ - reset_aux = ['day_of_month', 'day_of_year'] - for auxcoord in cube.aux_coords: - if auxcoord.long_name in reset_aux: - cube.remove_coord(auxcoord) - - # re-add the converted aux coords - iris.coord_categorisation.add_day_of_month(cube, cube.coord('time'), - name='day_of_month') - iris.coord_categorisation.add_day_of_year(cube, cube.coord('time'), - name='day_of_year') - - return cube - - -def annual_mean(cube, decadal=False): - """ - Compute annual or decadal means. - - Note that this function does not weight the annual or decadal mean if - uneven time periods are present. Ie, all data inside the year/decade - are treated equally. - - Parameters - ---------- - cube: iris.cube.Cube - input cube. - decadal: bool - Annual average (:obj:`True`) or decadal average (:obj:`False`) - Returns - ------- - iris.cube.Cube - Annual mean cube - """ - def get_decade(coord, value): - """Callback function to get decades from cube.""" - date = coord.units.num2date(value) - return date.year - date.year % 10 - - # time_weights = get_time_weights(cube) - - # TODO: Add weighting in time dimension. See iris issue 3290 - # https://github.com/SciTools/iris/issues/3290 - - if decadal: - iris.coord_categorisation.add_categorised_coord(cube, 'decade', - 'time', get_decade) - return cube.aggregated_by('decade', iris.analysis.MEAN) - - return cube.aggregated_by('year', iris.analysis.MEAN) diff --git a/esmvaltool/preprocessor/_volume.py b/esmvaltool/preprocessor/_volume.py deleted file mode 100644 index 366450e37d..0000000000 --- a/esmvaltool/preprocessor/_volume.py +++ /dev/null @@ -1,456 +0,0 @@ -""" -Volume and z coordinate operations on data cubes. - -Allows for selecting data subsets using certain volume bounds; -selecting depth or height regions; constructing volumetric averages; -""" -from copy import deepcopy - -import logging - -import iris -import numpy as np - -logger = logging.getLogger(__name__) - - -def extract_volume(cube, z_min, z_max): - """ - Subset a cube based on a range of values in the z-coordinate. - - Function that subsets a cube on a box (z_min, z_max) - This function is a restriction of masked_cube_lonlat(); - Note that this requires the requested z-coordinate range to be the - same sign as the iris cube. ie, if the cube has z-coordinate as - negative, then z_min and z_max need to be negative numbers. - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - z_min: float - minimum depth to extract. - - z_max: float - maximum depth to extract. - - Returns - ------- - iris.cube.Cube - extracted cube. - """ - if z_min > z_max: - # minimum is below maximum, so switch them around - zmax = float(z_min) - zmin = float(z_max) - else: - zmax = float(z_max) - zmin = float(z_min) - - z_constraint = iris.Constraint( - coord_values={ - cube.coord(axis='Z'): lambda cell: zmin < cell.point < zmax}) - - return cube.extract(z_constraint) - - -def _create_cube_time(src_cube, data, times): - """ - Generate a new cube with the volume averaged data. - - The resultant cube is seeded with `src_cube` metadata and coordinates, - excluding any source coordinates that span the associated vertical - dimension. The `times` of interpolation are used along with the - associated source cube time coordinate metadata to add a new - time coordinate to the resultant cube. - - Based on the _create_cube method from _regrid.py. - - Parameters - ---------- - src_cube : cube - The source cube that was vertically interpolated. - data : array - The payload resulting from interpolating the source cube - over the specified times. - times : array - The array of times. - - Returns - ------- - cube - - .. note:: - - If there is only one level of interpolation, the resultant cube - will be collapsed over the associated vertical dimension, and a - scalar vertical coordinate will be added. - - """ - # Get the source cube vertical coordinate and associated dimension. - src_times = src_cube.coord('time') - t_dim, = src_cube.coord_dims(src_times) - - if data.shape[t_dim] != len(times): - emsg = ('Mismatch between data and times for data dimension {!r}, ' - 'got data shape {!r} with times shape {!r}.') - raise ValueError(emsg.format(t_dim, data.shape, times.shape)) - - # Construct the resultant cube with the interpolated data - # and the source cube metadata. - kwargs = deepcopy(src_cube.metadata)._asdict() - result = iris.cube.Cube(data, **kwargs) - - # Add the appropriate coordinates to the cube, excluding - # any coordinates that span the z-dimension of interpolation. - for coord in src_cube.dim_coords: - [dim] = src_cube.coord_dims(coord) - if dim != t_dim: - result.add_dim_coord(coord.copy(), dim) - - for coord in src_cube.aux_coords: - dims = src_cube.coord_dims(coord) - if t_dim not in dims: - result.add_aux_coord(coord.copy(), dims) - - for coord in src_cube.derived_coords: - dims = src_cube.coord_dims(coord) - if t_dim not in dims: - result.add_aux_coord(coord.copy(), dims) - - # Construct the new vertical coordinate for the interpolated - # z-dimension, using the associated source coordinate metadata. - kwargs = deepcopy(src_times._as_defn())._asdict() - - try: - coord = iris.coords.DimCoord(times, **kwargs) - result.add_dim_coord(coord, t_dim) - except ValueError: - coord = iris.coords.AuxCoord(times, **kwargs) - result.add_aux_coord(coord, t_dim) - - return result - - -def calculate_volume(cube): - """ - Calculate volume from a cube. - - This function is used when the volume netcdf fx_files can't be found. - """ - # #### - # Load depth field and figure out which dim is which. - depth = cube.coord(axis='z') - z_dim = cube.coord_dims(cube.coord(axis='z'))[0] - - # #### - # Load z direction thickness - thickness = depth.bounds[..., 1] - depth.bounds[..., 0] - - # #### - # Calculate grid volume: - area = iris.analysis.cartography.area_weights(cube) - if thickness.ndim == 1 and z_dim == 1: - grid_volume = area * thickness[None, :, None, None] - if thickness.ndim == 4 and z_dim == 1: - grid_volume = area * thickness[:, :] - - return grid_volume - - -def average_volume( - cube, - coord1, - coord2, - fx_files=None): - """ - Calculate the average volume. - - The volume average is weighted acoording to the cell volume. Cell volume - is calculated from iris's cartography tool multiplied by the cell - thickness. - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - coord1: str - name of first coordinate - - coord2: str - name of second coordinate - - fx_files: dictionary - dictionary of field:filename for the fx_files - - Returns - ------- - iris.cube.Cube - collapsed cube. - """ - # TODO: Test sigma coordinates. - - # #### - # Load z coordinate field and figure out which dim is which. - t_dim = cube.coord_dims('time')[0] - - grid_volume_found = False - grid_volume = None - if fx_files: - for key, fx_file in fx_files.items(): - if fx_file is None: - continue - logger.info('Attempting to load %s from file: %s', key, fx_file) - fx_cube = iris.load_cube(fx_file) - - grid_volume = fx_cube.data - grid_volume_found = True - cube_shape = cube.data.shape - - if not grid_volume_found: - grid_volume = calculate_volume(cube) - - # Check whether the dimensions are right. - if cube.data.ndim == 4 and grid_volume.ndim == 3: - grid_volume = np.tile(grid_volume, - [cube_shape[0], 1, 1, 1]) - - if cube.data.shape != grid_volume.shape: - raise ValueError('Cube shape ({}) doesn`t match grid volume shape ' - '({})'.format(cube.data.shape, grid_volume.shape)) - - # ##### - # Calculate global volume weighted average - result = [] - # ##### - # iterate over time and z-coordinate dimensions. - for time_itr in range(cube.shape[t_dim]): - # #### - # create empty output arrays - column = [] - depth_volume = [] - - # #### - # iterate over time and z-coordinate dimensions. - for z_itr in range(cube.shape[1]): - # #### - # Calculate weighted mean for this time and layer - total = cube[time_itr, z_itr].collapsed( - [cube.coord(axis='z'), coord1, coord2], - iris.analysis.MEAN, - weights=grid_volume[time_itr, z_itr]).data - column.append(total) - - try: - layer_vol = np.ma.masked_where( - cube[time_itr, z_itr].data.mask, - grid_volume[time_itr, z_itr]).sum() - - except AttributeError: - # #### - # No mask in the cube data. - layer_vol = grid_volume.sum() - depth_volume.append(layer_vol) - # #### - # Calculate weighted mean over the water volumn - result.append(np.average(column, weights=depth_volume)) - - # #### - # Send time series and dummy cube to cube creating tool. - times = np.array(cube.coord('time').points.astype(float)) - result = np.array(result) - - # ##### - # Create a small dummy output array for the output cube - src_cube = cube[:2, :2].collapsed([cube.coord(axis='z'), - coord1, coord2], - iris.analysis.MEAN, - weights=grid_volume[:2, :2], ) - - return _create_cube_time(src_cube, result, times) - - -def depth_integration(cube): - """ - Determine the total sum over the vertical component. - - Requires a 3D cube. The z-coordinate - integration is calculated by taking the sum in the z direction of the - cell contents multiplied by the cell thickness. - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - Returns - ------- - iris.cube.Cube - collapsed cube. - """ - # #### - depth = cube.coord(axis='z') - thickness = depth.bounds[..., 1] - depth.bounds[..., 0] - - if depth.ndim == 1: - slices = [None for i in cube.shape] - coord_dim = cube.coord_dims(cube.coord(axis='z'))[0] - slices[coord_dim] = slice(None) - thickness = np.abs(thickness[tuple(slices)]) - - ones = np.ones_like(cube.data) - - weights = thickness * ones - - result = cube.collapsed(cube.coord(axis='z'), iris.analysis.SUM, - weights=weights) - - result.rename('Depth_integrated_' + str(cube.name())) - # result.units = Unit('m') * result.units # This doesn't work: - # TODO: Change units on cube to reflect 2D concentration (not 3D) - # Waiting for news from iris community. - return result - - -def extract_transect(cube, latitude=None, longitude=None): - """ - Extract data along a line of constant latitude or longitude. - - Both arguments, latitude and longitude, are treated identically. - Either argument can be a single float, or a pair of floats, or can be - left empty. - The single float indicates the latitude or longitude along which the - transect should be extracted. - A pair of floats indicate the range that the transect should be - extracted along the secondairy axis. - - For instance `'extract_transect(cube, longitude=-28)'` will produce a - transect along 28 West. - - Also, `'extract_transect(cube, longitude=-28, latitude=[-50, 50])'` will - produce a transect along 28 West between 50 south and 50 North. - - This function is not yet implemented for irregular arrays - instead - try the extract_trajectory function, but note that it is currently - very slow. Alternatively, use the regrid preprocessor to regrid along - a regular grid and then extract the transect. - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - latitude: None, float or [float, float], optional - transect latiude or range. - - longitude: None, float or [float, float], optional - transect longitude or range. - - Returns - ------- - iris.cube.Cube - collapsed cube. - """ - # ### - coord_dim2 = False - second_coord_range = False - lats = cube.coord('latitude') - lons = cube.coord('longitude') - - if lats.ndim == 2: - raise ValueError( - 'extract_slice: Not implemented for irregular arrays!' - + '\nTry regridding the data first.') - - if isinstance(latitude, float) and isinstance(longitude, float): - raise ValueError( - 'extract_slice: Cant slice along lat and lon at the same time' - ) - - if isinstance(latitude, list) and isinstance(longitude, list): - raise ValueError( - 'extract_slice: Can\'t reduce lat and lon at the same time' - ) - - for dim_name, dim_cut, coord in zip(['latitude', 'longitude'], - [latitude, longitude], [lats, lons]): - # #### - # Look for the first coordinate. - if isinstance(dim_cut, float): - coord_index = coord.nearest_neighbour_index(dim_cut) - coord_dim = cube.coord_dims(dim_name)[0] - - # #### - # Look for the second coordinate. - if isinstance(dim_cut, list): - coord_dim2 = cube.coord_dims(dim_name)[0] - second_coord_range = [coord.nearest_neighbour_index(dim_cut[0]), - coord.nearest_neighbour_index(dim_cut[1])] - # #### - # Extracting the line of constant longitude/latitude - slices = [slice(None) for i in cube.shape] - slices[coord_dim] = coord_index - - if second_coord_range: - slices[coord_dim2] = slice(second_coord_range[0], - second_coord_range[1]) - return cube[tuple(slices)] - - -def extract_trajectory(cube, latitudes, longitudes, number_points=2): - """ - Extract data along a trajectory. - - latitudes and longitudes are the pairs of coordinates for two points. - number_points is the number of points between the two points. - - This version uses the expensive interpolate method, but it may be - necceasiry for irregular grids. - - If only two latitude and longitude coordinates are given, - extract_trajectory will produce a cube will extrapolate along a line - bewteen those two points, and will add `number_points` points between - the two corners. - - If more than two points are provided, then - extract_trajectory will produce a cube which has extrapolated the data - of the cube to those points, and `number_points` is not needed. - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - latitudes: list of floats - list of latitude coordinates. - - longitudes: list of floats - list of longitude coordinates. - - number_points: int - number of points to extrapolate (optional). - - Returns - ------- - iris.cube.Cube - collapsed cube. - """ - from iris.analysis.trajectory import interpolate - - if len(latitudes) != len(longitudes): - raise ValueError( - 'Longitude & Latitude coordinates have different lengths' - ) - - if len(latitudes) == len(longitudes) == 2: - minlat, maxlat = np.min(latitudes), np.max(latitudes) - minlon, maxlon = np.min(longitudes), np.max(longitudes) - - longitudes = np.linspace(minlat, maxlat, num=number_points) - latitudes = np.linspace(minlon, maxlon, num=number_points) - - points = [('latitude', latitudes), ('longitude', longitudes)] - interpolated_cube = interpolate(cube, points) # Very slow! - return interpolated_cube diff --git a/esmvaltool/preprocessor/ne_masks/ne_10m_land.README.html b/esmvaltool/preprocessor/ne_masks/ne_10m_land.README.html deleted file mode 100644 index 30acb5f815..0000000000 --- a/esmvaltool/preprocessor/ne_masks/ne_10m_land.README.html +++ /dev/null @@ -1,403 +0,0 @@ - - - - - - - - -Land | Natural Earth - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - -
-
- - - « 1:10m Physical Vectors  - « Downloads  -
-

Land

-
-
-
land_thumb
-
Land polygons including major islands.

- - -

-
-
About

-

Derived from 10m coastline. Continental polygons broken into smaller, contiguous pieces to avoid having too many points in any one polygon, facilitating faster data processing in certain software applications.

-

(below) Yucatan peninsula, Cuba, and Hispaniola.

-

land_banner

-

Issues

-

Coastline accuracy is suspect for northern Russia and southern Chile.

-

Some rank 5 land should be reclassified as rank 6.

-

Version History

-
    -
  • - 4.0.0 -
  • -
  • - 3.0.1 -
  • -
  • - 3.0.0 -
  • -
  • - 2.0.0 -
  • -
  • - 1.3.0 -
  • -
  • - 1.1.0 -
  • -
  • - 1.0.0 -
  • -
- -

The master changelog is available on Github » -

- - -
- -
- - -
- - - - -
-
- - - - - - \ No newline at end of file diff --git a/esmvaltool/preprocessor/ne_masks/ne_10m_land.VERSION.txt b/esmvaltool/preprocessor/ne_masks/ne_10m_land.VERSION.txt deleted file mode 100644 index 5dbaad6dd1..0000000000 --- a/esmvaltool/preprocessor/ne_masks/ne_10m_land.VERSION.txt +++ /dev/null @@ -1 +0,0 @@ -4.1.0 diff --git a/esmvaltool/preprocessor/ne_masks/ne_10m_land.cpg b/esmvaltool/preprocessor/ne_masks/ne_10m_land.cpg deleted file mode 100644 index 3ad133c048..0000000000 --- a/esmvaltool/preprocessor/ne_masks/ne_10m_land.cpg +++ /dev/null @@ -1 +0,0 @@ -UTF-8 \ No newline at end of file diff --git a/esmvaltool/preprocessor/ne_masks/ne_10m_land.dbf b/esmvaltool/preprocessor/ne_masks/ne_10m_land.dbf deleted file mode 100644 index fca59dc8fc..0000000000 Binary files a/esmvaltool/preprocessor/ne_masks/ne_10m_land.dbf and /dev/null differ diff --git a/esmvaltool/preprocessor/ne_masks/ne_10m_land.prj b/esmvaltool/preprocessor/ne_masks/ne_10m_land.prj deleted file mode 100644 index f45cbadf00..0000000000 --- a/esmvaltool/preprocessor/ne_masks/ne_10m_land.prj +++ /dev/null @@ -1 +0,0 @@ -GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] \ No newline at end of file diff --git a/esmvaltool/preprocessor/ne_masks/ne_10m_land.shp b/esmvaltool/preprocessor/ne_masks/ne_10m_land.shp deleted file mode 100644 index 44f9d4e5a6..0000000000 Binary files a/esmvaltool/preprocessor/ne_masks/ne_10m_land.shp and /dev/null differ diff --git a/esmvaltool/preprocessor/ne_masks/ne_10m_land.shx b/esmvaltool/preprocessor/ne_masks/ne_10m_land.shx deleted file mode 100644 index fcc1badd3d..0000000000 Binary files a/esmvaltool/preprocessor/ne_masks/ne_10m_land.shx and /dev/null differ diff --git a/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.README.html b/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.README.html deleted file mode 100644 index 880d47d731..0000000000 --- a/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.README.html +++ /dev/null @@ -1,402 +0,0 @@ - - - - - - - - -Ocean | Natural Earth - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - -
-
- - - « 1:10m Physical Vectors  - « Downloads  -
-

Ocean

-
-
-
water_thumb
-
Ocean polygon split into contiguous pieces.

- - -

-
-
About

-

Derives from 10m ocean coastlines. Ocean polygon is the same as Bathymetry 0 (sea level).

-

Looking for labeled oceans, seas, bays, and more? Try our marine labels, attributed 50m scale ocean polygons.

-

(below) Yucatan peninsula, Cuba, and Hispaniola.

-

water_banner

-

Issues

-

Caspian’s Garabogaz Aylagy and other “lagoons” might better belong in the lake theme than oceans.

-

Dependencies

-

This theme (and it’s inverse cousin, Land) are used to clip the raster imagery.

-

Version History

-
    -
  • - 4.0.0 -
  • -
  • - 3.0.0 -
  • -
  • - 2.0.0 -
  • -
  • - 1.3.0 -
  • -
  • - 1.1.0 -
  • -
  • - 1.0.0 -
  • -
- -

The master changelog is available on Github » -

- - -
- -
- - -
- - - - -
-
- - - - - - \ No newline at end of file diff --git a/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.VERSION.txt b/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.VERSION.txt deleted file mode 100644 index 5dbaad6dd1..0000000000 --- a/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.VERSION.txt +++ /dev/null @@ -1 +0,0 @@ -4.1.0 diff --git a/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.cpg b/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.cpg deleted file mode 100644 index 3ad133c048..0000000000 --- a/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.cpg +++ /dev/null @@ -1 +0,0 @@ -UTF-8 \ No newline at end of file diff --git a/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.dbf b/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.dbf deleted file mode 100644 index 6437fc75c7..0000000000 Binary files a/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.dbf and /dev/null differ diff --git a/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.prj b/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.prj deleted file mode 100644 index f45cbadf00..0000000000 --- a/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.prj +++ /dev/null @@ -1 +0,0 @@ -GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] \ No newline at end of file diff --git a/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.shp b/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.shp deleted file mode 100644 index 28b746bcd1..0000000000 Binary files a/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.shp and /dev/null differ diff --git a/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.shx b/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.shx deleted file mode 100644 index 83663ce64a..0000000000 Binary files a/esmvaltool/preprocessor/ne_masks/ne_10m_ocean.shx and /dev/null differ diff --git a/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.README.html b/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.README.html deleted file mode 100644 index 9f9b1de533..0000000000 --- a/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.README.html +++ /dev/null @@ -1,395 +0,0 @@ - - - - - - - - -Ocean | Natural Earth - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - -
-
- - - « 1:50m Physical Vectors  - « Downloads  -
-

Ocean

-
-
-
water_thumb
-
Ocean polygon split into contiguous pieces.

- -

-
-
About

-

Derives from 10m ocean coastlines. Ocean polygon is the same as Bathymetry 0 (sea level).

-

(below) Yucatan peninsula, Cuba, and Hispaniola.

-

water_banner

-

Issues

-

Caspian’s Garabogaz Aylagy and other “lagoons” might better belong in the lake theme than oceans.

-

Dependencies

-

This theme (and it’s inverse cousin, Land) are used to clip the raster imagery.

-

Version History

-
    -
  • - 4.0.0 -
  • -
  • - 2.0.0 -
  • -
  • - 1.3.0 -
  • -
  • - 1.1.0 -
  • -
  • - 1.0.0 -
  • -
- -

The master changelog is available on Github » -

- - -
- -
- - -
- - - - -
-
- - - - - - \ No newline at end of file diff --git a/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.VERSION.txt b/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.VERSION.txt deleted file mode 100644 index 5dbaad6dd1..0000000000 --- a/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.VERSION.txt +++ /dev/null @@ -1 +0,0 @@ -4.1.0 diff --git a/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.cpg b/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.cpg deleted file mode 100644 index 3ad133c048..0000000000 --- a/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.cpg +++ /dev/null @@ -1 +0,0 @@ -UTF-8 \ No newline at end of file diff --git a/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.dbf b/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.dbf deleted file mode 100644 index 3c93d9d7f7..0000000000 Binary files a/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.dbf and /dev/null differ diff --git a/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.prj b/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.prj deleted file mode 100644 index a30c00a55d..0000000000 --- a/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.prj +++ /dev/null @@ -1 +0,0 @@ -GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]] \ No newline at end of file diff --git a/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.shp b/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.shp deleted file mode 100644 index 964c3dfa9d..0000000000 Binary files a/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.shp and /dev/null differ diff --git a/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.shx b/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.shx deleted file mode 100644 index 32f350f8f5..0000000000 Binary files a/esmvaltool/preprocessor/ne_masks/ne_50m_ocean.shx and /dev/null differ diff --git a/esmvaltool/recipe_schema.yml b/esmvaltool/recipe_schema.yml deleted file mode 100644 index a58df53ee1..0000000000 --- a/esmvaltool/recipe_schema.yml +++ /dev/null @@ -1,56 +0,0 @@ -# Generic recipe Yamale schema definition. -# See https://github.com/23andMe/Yamale for help. - ---- -# Recipe schema -documentation: include('documentation') -datasets: list(include('dataset'), required=False) -preprocessors: map(map(), required=False) -diagnostics: map(include('diagnostic'), required=False) - ---- -# Recipe item definitions -documentation: - description: str() - authors: list(str(), min=1) - projects: list(str(), required=False) - references: list(str(), required=False) - -dataset: - dataset: str() - project: str(required=False) - start_year: int(required=False, min=0000, max=5000) - end_year: int(required=False, min=0000, max=5000) - ensemble: str(required=False) - exp: any(str(), list(str()), required=False) - mip: str(required=False) - realm: str(required=False) - shift: str(required=False) - tier: int(min=1, max=3, required=False) - type: str(required=False) - -variable: - project: str(required=False) - start_year: int(required=False, min=0000, max=4000) - end_year: int(required=False, min=0000, max=4000) - ensemble: str(required=False) - exp: any(str(), list(str()), required=False) - mip: str(required=False) - preprocessor: str(required=False) - reference_dataset: str(required=False) - alternative_dataset: str(required=False) - fx_files: list(required=False) - additional_datasets: list(include('dataset'), required=False) - -# TODO: add preprocessor item - -diagnostic: - scripts: any(null(), map(include('script'))) - additional_datasets: list(include('dataset'), required=False) - description: str(required=False) - themes: list(str(), required=False) - realms: list(str(), required=False) - variables: map(include('variable'), required=False) - -script: - script: str() diff --git a/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_1-4.yml b/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_1-4.yml new file mode 100644 index 0000000000..07475e5d74 --- /dev/null +++ b/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_1-4.yml @@ -0,0 +1,485 @@ +# ESMValTool +# recipe_bock20jgr_fig_1-4.yml +--- +documentation: + + title: Quantifying progress across different CMIP phases + + description: | + Producing Fig. 1, 2, 3, 4 from Bock et al., 2020, JGR + Processing of CMIP3 models currently works only in serial mode, due to + an issue in the input data still under investigation. + + authors: + - bock_lisa + + maintainer: + - lauer_axel + + references: + - bock20jgr + + projects: + - ipcc_ar6 + - crescendo + + +preprocessors: + + clim: + regrid: + target_grid: 2x2 + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + clim_ref: + regrid: + target_grid: reference_dataset + scheme: linear + multi_model_statistics: + span: full + statistics: [mean] + exclude: [reference_dataset] + + +CMIP6_tas: &cmip6_tas + - {dataset: ACCESS-CM2, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, grid: gn, institute: CSIRO} + - {dataset: AWI-CM-1-1-MR, grid: gn} + - {dataset: AWI-ESM-1-1-LR, grid: gn} + - {dataset: BCC-CSM2-MR, grid: gn} + - {dataset: BCC-ESM1, grid: gn} + - {dataset: CAMS-CSM1-0, grid: gn} + - {dataset: CanESM5, grid: gn} + - {dataset: CanESM5-CanOE, grid: gn, ensemble: r1i1p2f1} + - {dataset: CESM2, grid: gn} + - {dataset: CESM2-FV2, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM-FV2, grid: gn, institute: NCAR} + - {dataset: CIESM} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2} + - {dataset: E3SM-1-0} + - {dataset: E3SM-1-1, institute: E3SM-Project} + - {dataset: EC-Earth3-Veg} + - {dataset: FGOALS-f3-L} + - {dataset: FGOALS-g3, grid: gn} + - {dataset: FIO-ESM-2-0, grid: gn} + - {dataset: GFDL-ESM4, grid: gr1} + - {dataset: GISS-E2-1-G, grid: gn} + - {dataset: GISS-E2-1-H, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + - {dataset: INM-CM4-8, grid: gr1} + - {dataset: INM-CM5-0, grid: gr1} + - {dataset: IPSL-CM6A-LR} + - {dataset: KACE-1-0-G} + - {dataset: MCM-UA-1-0, grid: gn} + - {dataset: MIROC6, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, grid: gn} + - {dataset: MPI-ESM1-2-HR, grid: gn} + - {dataset: MPI-ESM1-2-LR, grid: gn} + - {dataset: MRI-ESM2-0, grid: gn} + - {dataset: NESM3, grid: gn} + - {dataset: NorESM2-LM, grid: gn, institute: NCC} + - {dataset: NorESM2-MM, grid: gn, institute: NCC} + - {dataset: SAM0-UNICON, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + +CMIP6_HighResMIP_low: &cmip6_highresmip_low + - {dataset: CMCC-CM2-HR4, exp: hist-1950, grid: gn} + - {dataset: CNRM-CM6-1, exp: hist-1950, ensemble: r1i1p1f2} + - {dataset: ECMWF-IFS-LR, exp: hist-1950, ensemble: r1i1p1f1} + - {dataset: HadGEM3-GC31-LL, exp: hist-1950, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, exp: hist-1950, ensemble: r1i1p1f1, grid: gn} + +CMIP6_HighResMIP_high: &cmip6_highresmip_high + - {dataset: CMCC-CM2-VHR4, exp: hist-1950, grid: gn} + - {dataset: CNRM-CM6-1-HR, exp: hist-1950, ensemble: r1i1p1f2} + - {dataset: ECMWF-IFS-HR, exp: hist-1950, ensemble: r1i1p1f1} + - {dataset: HadGEM3-GC31-HM, exp: hist-1950, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-XR, exp: hist-1950, ensemble: r1i1p1f1, grid: gn} + + +CMIP5_tas: &cmip5_tas + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5-2} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1, start_year: 1861} + - {dataset: GFDL-CM3, start_year: 1860} + - {dataset: GFDL-ESM2G, start_year: 1861} + - {dataset: GFDL-ESM2M, start_year: 1861} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R-CC} + - {dataset: GISS-E2-R} + - {dataset: HadCM3, start_year: 1860} + - {dataset: HadGEM2-AO, start_year: 1860} + - {dataset: HadGEM2-CC, start_year: 1860} + - {dataset: HadGEM2-ES, start_year: 1860} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1, start_year: 1851} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + + +CMIP3_tas: &cmip3_tas + - {dataset: bccr_bcm2_0} + - {dataset: cccma_cgcm3_1} + - {dataset: cccma_cgcm3_1_t63} + - {dataset: csiro_mk3_0, start_year: 1871} + - {dataset: gfdl_cm2_0, start_year: 1861} + - {dataset: gfdl_cm2_1, start_year: 1861} + - {dataset: giss_aom} + - {dataset: giss_model_e_h, start_year: 1880} + - {dataset: giss_model_e_r, start_year: 1880} + - {dataset: iap_fgoals1_0_g} + - {dataset: ingv_echam4, start_year: 1870} + - {dataset: inmcm3_0, start_year: 1871} + - {dataset: ipsl_cm4, start_year: 1860} + - {dataset: miroc3_2_medres} + - {dataset: mpi_echam5, start_year: 1860} + - {dataset: mri_cgcm2_3_2a, start_year: 1851} + - {dataset: ncar_ccsm3_0, start_year: 1870} + - {dataset: ncar_pcm1, start_year: 1890} + - {dataset: ukmo_hadcm3, start_year: 1860} + - {dataset: ukmo_hadgem1, start_year: 1860} + + +diagnostics: + + ###################################################### + # Fig. 1 + ###################################################### + + fig_1_cmip6: &diag_fig1 + title: CMIP6 surface temperature anomalies + description: CMIP6 timeseries of near-surface temperature anomalies + variables: + tas: &var_tas_tsline + preprocessor: clim_ref + reference_dataset: HadCRUT4 + mip: Amon + project: CMIP6 + exp: historical + ensemble: r1i1p1f1 + grid: gr + start_year: 1850 + end_year: 2014 + additional_datasets: *cmip6_tas + additional_datasets: + - {dataset: HadCRUT4, project: OBS, type: ground, version: 1, tier: 2, + end_year: 2017} + scripts: + tsline_anom: &tsline_settings + script: bock20jgr/tsline.ncl + time_avg: "yearly" + ts_anomaly: "anom" + ref_start: 1850 + ref_end: 1900 + ref_mask: true + plot_units: "degC" + y_min: -0.5 + y_max: 1.6 + volcanoes: true + write_stat: true + styleset: CMIP6 + + fig_1_cmip5: + title: CMIP5 surface temperature anomalies + description: CMIP5 timeseries of near-surface temperature anomalies + variables: + tas: + <<: *var_tas_tsline + project: CMIP5 + ensemble: r1i1p1 + start_year: 1850 + end_year: 2004 + additional_datasets: *cmip5_tas + additional_datasets: + - {dataset: HadCRUT4, project: OBS, type: ground, version: 1, tier: 2, + end_year: 2017} + scripts: + tsline_anom: + <<: *tsline_settings + styleset: CMIP5 + + fig_1_cmip3: + title: CMIP3 surface temperature anomalies + description: CMIP3 timeseries of near-surface temperature anomalies + variables: + tas: + <<: *var_tas_tsline + project: CMIP3 + mip: A1 + modeling_realm: atm + exp: 20c3m + frequency: mo + ensemble: run1 + start_year: 1850 + end_year: 1999 + additional_datasets: *cmip3_tas + additional_datasets: + - {dataset: HadCRUT4, project: OBS, type: ground, version: 1, tier: 2, + end_year: 2017, mip: Amon} + scripts: + tsline_anom: + <<: *tsline_settings + styleset: CMIP3 + + + ###################################################### + # Fig. 2 + ###################################################### + + fig_2: + title: Modelled and observed surface temperature anomalies + description: Collect all timeseries of near-surface temperature anomalies + variables: + tas: + <<: *var_tas_tsline + tasUnc1: + short_name: tasConf5 + mip: Amon + tasUnc2: + short_name: tasConf95 + mip: Amon + additional_datasets: + - {dataset: HadCRUT4, project: OBS, type: ground, version: 1, + start_year: 1850, end_year: 2017, tier: 2} + scripts: + collect: + <<: *tsline_settings + script: bock20jgr/tsline_collect.ncl + ancestors: [tas, tasUnc1, tasUnc2, 'fig_1_*/tsline_anom*'] + start_year: 1850 + end_year: 2017 + y_max: 1.2 + ref: ["HadCRUT4"] + order: ["CMIP6_historical", "CMIP5_historical", "CMIP3_20c3m"] + stat_shading: true + ref_shading: false + ref_stderr: true + + + ###################################################### + # Fig. 3 + ###################################################### + + fig_3_cmip6: + title: CMIP6 MMM surface temperature + description: CMIP6 multi-model mean and bias of near-surface temperature + variables: + tas: &var_cmip6_bias + preprocessor: clim + reference_dataset: ERA5 + mip: Amon + project: CMIP6 + exp: historical + ensemble: r1i1p1f1 + grid: gr + start_year: 1995 + end_year: 2014 + additional_datasets: *cmip6_tas + additional_datasets: + - {dataset: ERA5, project: OBS6, type: reanaly, version: v1, tier: 3} + scripts: + model_bias: &model_bias_settings + script: bock20jgr/model_bias.ncl + projection: Robinson + timemean: annualclim + + + fig_3_cmip5: + title: CMIP5 MMM surface temperature + description: CMIP5 multi-model mean and bias of near-surface temperature + variables: + tas: + <<: *var_cmip6_bias + project: CMIP5 + ensemble: r1i1p1 + start_year: 1985 + end_year: 2004 + additional_datasets: *cmip5_tas + additional_datasets: + - {dataset: ERA5, project: OBS6, type: reanaly, version: v1, tier: 3} + scripts: + model_bias: + <<: *model_bias_settings + + + fig_3_cmip3: + title: CMIP3 MMM surface temperature + description: CMIP3 multi-model mean and bias of near-surface temperature + variables: + tas: + <<: *var_cmip6_bias + mip: A1 + project: CMIP3 + modeling_realm: atm + exp: 20c3m + frequency: mo + ensemble: run1 + start_year: 1980 + end_year: 1999 + additional_datasets: *cmip3_tas + additional_datasets: + - {dataset: ERA5, project: OBS6, mip: Amon, type: reanaly, version: v1, + tier: 3} + scripts: + model_bias: + <<: *model_bias_settings + + + fig_3_highresmip_low: + title: CMIP6 low-res MMM (HighResMIP) surface temperature + description: CMIP6 multi-model mean and bias of near-surface temperature + variables: + tas: + <<: *var_cmip6_bias + additional_datasets: *cmip6_highresmip_low + additional_datasets: + - {dataset: ERA5, project: OBS6, type: reanaly, version: v1, tier: 3} + scripts: + model_bias: + <<: *model_bias_settings + + + fig_3_highresmip_high: + title: CMIP6 high-res MMM (HighResMIP) surface temperature + description: CMIP6 multi-model mean and bias of near-surface temperature + variables: + tas: + <<: *var_cmip6_bias + additional_datasets: *cmip6_highresmip_high + additional_datasets: + - {dataset: ERA5, project: OBS6, type: reanaly, version: v1, tier: 3} + scripts: + model_bias: + <<: *model_bias_settings + + + ###################################################### + # Fig. 4 + ###################################################### + + fig_4_cmip6: + title: CMIP6 MMM precipitation + description: CMIP6 multi-model mean and bias of percipitation + variables: + pr: + <<: *var_cmip6_bias + reference_dataset: GPCP-V2.3 + additional_datasets: *cmip6_tas + additional_datasets: + - {dataset: GPCP-V2.3, project: obs4MIPs, tier: 1} + scripts: + model_bias: + <<: *model_bias_settings + + + fig_4_cmip5: + title: CMIP5 MMM precipitation + description: CMIP5 multi-model mean and bias of percipitation + variables: + pr: + <<: *var_cmip6_bias + reference_dataset: GPCP-V2.3 + project: CMIP5 + ensemble: r1i1p1 + start_year: 1985 + end_year: 2004 + additional_datasets: *cmip5_tas + additional_datasets: + - {dataset: GPCP-V2.3, project: obs4MIPs, tier: 1} + scripts: + model_bias: + <<: *model_bias_settings + + + fig_4_cmip3: + title: CMIP3 MMM precipitation + description: CMIP3 multi-model mean and bias of percipitation + variables: + pr: + <<: *var_cmip6_bias + reference_dataset: GPCP-V2.3 + mip: A1 + project: CMIP3 + modeling_realm: atm + exp: 20c3m + frequency: mo + ensemble: run1 + start_year: 1980 + end_year: 1999 + additional_datasets: *cmip3_tas + additional_datasets: + - dataset: GPCP-V2.3 + project: obs4MIPs + tier: 1 + mip: Amon + frequency: mon + scripts: + model_bias: + <<: *model_bias_settings + + + fig_4_highresmip_low: + title: CMIP6 low-res MMM (HighResMIP) precipitation + description: CMIP6 multi-model mean and bias of percipitation + variables: + pr: + <<: *var_cmip6_bias + reference_dataset: GPCP-V2.3 + additional_datasets: *cmip6_highresmip_low + additional_datasets: + - {dataset: GPCP-V2.3, project: obs4MIPs, tier: 1} + scripts: + model_bias: + <<: *model_bias_settings + + + fig_4_highresmip_high: + title: CMIP6 high-res MMM (HighResMIP) precipitation + description: CMIP6 multi-model mean and bias of percipitation + variables: + pr: + <<: *var_cmip6_bias + reference_dataset: GPCP-V2.3 + additional_datasets: *cmip6_highresmip_high + additional_datasets: + - {dataset: GPCP-V2.3, project: obs4MIPs, tier: 1} + scripts: + model_bias: + <<: *model_bias_settings diff --git a/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_6-7.yml b/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_6-7.yml new file mode 100644 index 0000000000..93e4284ec5 --- /dev/null +++ b/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_6-7.yml @@ -0,0 +1,1202 @@ +# ESMValTool +# recipe_bock20jgr_fig_6-7.yml +--- +documentation: + + title: Quantifying progress across different CMIP phases + + description: | + Producing Fig. 6 and 7 from Bock et al., 2020, JGR + Processing of CMIP3 models currently works only in serial mode, due to + an issue in the input data still under investigation. + + authors: + - bock_lisa + + maintainer: + - lauer_axel + + references: + - bock20jgr + + projects: + - ipcc_ar6 + - crescendo + + +preprocessors: + pp850: + extract_levels: + levels: 85000 + scheme: linear + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + pp500: + extract_levels: + levels: 50000 + scheme: linear + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + pp400: + extract_levels: + levels: 40000 + scheme: linear + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + pp200: + extract_levels: + levels: 19900 + scheme: linear + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + ppNOLEV1: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + ppNOLEV1x1: + regrid: + target_grid: 1x1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + regrid_4_5: + regrid: + target_grid: 4x5 + scheme: linear + + +datasets: + # CMIP3 + - &cmip3 {dataset: cccma_cgcm3_1, institute: CCCMA, project: CMIP3, mip: A1, + modeling_realm: atm, exp: 20c3m, frequency: mo, ensemble: run1} + - {<<: *cmip3, dataset: cccma_cgcm3_1_t63, institute: CCCMA} + - {<<: *cmip3, dataset: csiro_mk3_0, institute: CSIRO} + - {<<: *cmip3, dataset: giss_model_e_h, institute: NASA} + - {<<: *cmip3, dataset: giss_model_e_r, institute: NASA} + - {<<: *cmip3, dataset: iap_fgoals1_0_g, institute: LASG} + - {<<: *cmip3, dataset: inmcm3_0, institute: INM} + - {<<: *cmip3, dataset: ipsl_cm4, institute: IPSL} + - {<<: *cmip3, dataset: miroc3_2_hires, institute: NIES} + - {<<: *cmip3, dataset: miroc3_2_medres, institute: NIES} + - {<<: *cmip3, dataset: mpi_echam5, institute: MPIM} + - {<<: *cmip3, dataset: ukmo_hadcm3, institute: UKMO} + # CMIP5 + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + # CMIP6 + - {dataset: ACCESS-CM2, institute: CSIRO-ARCCSS, grid: gn, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: ACCESS-ESM1-5, institute: CSIRO, grid: gn, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: AWI-ESM-1-1-LR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: CESM2-WACCM, institute: NCAR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: CESM2-WACCM-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1, institute: E3SM-Project, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: E3SM-1-1-ECA, institute: E3SM-Project, ensemble: r1i1p1f1, + grid: gr, project: CMIP6} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn, project: CMIP6} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn, project: CMIP6} + - {dataset: INM-CM4-8, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: INM-CM5-0, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn, project: CMIP6} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NorCPM1, grid: gn, institute: NCC, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: NorESM2-LM, grid: gn, institute: NCC, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: NorESM2-MM, grid: gn, institute: NCC, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn, project: CMIP6} + + +diagnostics: + + ###################################################### + # Fig. 6 + ###################################################### + + clt: + title: Calculation of performance metrics (total cloud cover) + description: Total cloud cover + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: ppNOLEV1 + reference_dataset: ESACCI-CLOUD + alternative_dataset: PATMOS-x + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-fv3.0, tier: 2, start_year: 1982} + - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, tier: 2, + start_year: 1982} + scripts: + grading: &grading_settings + script: perfmetrics/main.ncl + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: cycle_latlon + # Time average ('opt' argument of time_operations.ncl) + time_avg: monthlyclim + # Region ('global', 'trop', 'nhext', 'shext') + region: global + # Plot standard deviation ('all', 'none', 'ref_model' or dataset name) + plot_stddev: ref_model + # Plot legend in a separate file + legend_outside: true + # Plot style + styleset: CMIP5 + # Calculate grading + calc_grading: true + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + tas: + title: Calculation of performance metrics (near-surface air temperature) + description: Near-surface air temperature + themes: + - phys + realms: + - atmos + variables: + tas: + preprocessor: ppNOLEV1 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: CanCM4} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5-2} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: MPI-ESM-P} + - {dataset: MRI-ESM1} + + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + + + ts: + title: Calculation of performance metrics (sea-surface (skin) temperature) + description: Sea-surface (skin) temperature + themes: + - phys + realms: + - atmos + - ocean + variables: + ts: + preprocessor: ppNOLEV1x1 + reference_dataset: ESACCI-SST + alternative_dataset: HadISST + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: CanCM4} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: ESACCI-SST, project: OBS, type: sat, + version: L4-GHRSST-SSTdepth-OSTIA-GLOB, tier: 2, start_year: 1992} + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + + + pr: + title: Calculation of performance metrics (precipitation) + description: Precipitation + themes: + - phys + realms: + - atmos + variables: + pr: + preprocessor: ppNOLEV1 + reference_dataset: GPCP-V2.2 + alternative_dataset: GHCN + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: CanCM4} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5-2} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: MPI-ESM-P} + - {dataset: MRI-ESM1} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} + - {dataset: GHCN, project: OBS, type: ground, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + + + zg500: + title: Calculation of performance metrics (geopotential height) + description: Geopotential height 500 hPa global + themes: + - phys + realms: + - atmos + variables: + zg: + preprocessor: pp500 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + + - {dataset: CanCM4} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5-2} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: MPI-ESM-P} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + + + psl: + title: Calculation of performance metrics (sea level pressure) + description: Pressure at Sea Level. + themes: + - phys + realms: + - atmos + variables: + psl: + preprocessor: ppNOLEV1 + reference_dataset: JRA-55 + alternative_dataset: ERA-Interim + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: CanCM4} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CMS} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: MPI-ESM-P} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: JRA-55, project: ana4mips, type: reanalysis, tier: 1} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + scripts: + grading: + <<: *grading_settings + + + lwcre: + title: Calculation of performance metrics (longwave cloud radiative effect) + description: Longwave cloud radiative effect + themes: + - clouds + realms: + - atmos + variables: + lwcre: + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF + mip: Amon + derive: true + force_derivation: false + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: FGOALS-g2} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r2i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2015} + scripts: + grading: + <<: *grading_settings + + + swcre: + title: Calculation of performance metrics (shortwave cloud radiative effect) + description: Shortwave cloud radiative effect + themes: + - clouds + realms: + - atmos + variables: + swcre: + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF + mip: Amon + derive: true + force_derivation: false + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: CMCC-CM} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2015} + scripts: + grading: + <<: *grading_settings + + + rlut: + title: Calculation of performance metrics (longwave radiation) + description: All-sky longwave radiation + themes: + - phys + realms: + - atmos + variables: + rlut: + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: CanCM4} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r2i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2015} + scripts: + grading: + <<: *grading_settings + + + hus400: + title: Calculation of performance metrics (specific humidity) + description: Specific humidity at 400 hPa global. + themes: + - phys + realms: + - atmos + variables: + hus: + preprocessor: pp400 + reference_dataset: AIRS-2-1 + alternative_dataset: ERA-Interim + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: MRI-ESM1} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r2i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1, start_year: 2003, end_year: 2010} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + <<: *grading_settings + + + rsut: + title: Calculation of performance metrics (shortwave radiation) + description: All-sky shortwave radiation + themes: + - phys + realms: + - atmos + variables: + rsut: + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2015} + scripts: + grading: + <<: *grading_settings + + + ua200: + title: Calculation of performance metrics (eastward wind) + description: Eastward wind at 200 hPa global + themes: + - atmDyn + realms: + - atmos + variables: + ua: + preprocessor: pp200 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + + - {dataset: CanCM4} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5-2} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: MPI-ESM-P} + - {dataset: MRI-ESM1} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + + + va850: + title: Calculation of performance metrics (northward wind) + description: Northward wind at 850 hPa global. + themes: + - atmDyn + realms: + - atmos + variables: + va: + preprocessor: pp850 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + + - {dataset: CanCM4} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5-2} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: MPI-ESM-P} + - {dataset: MRI-ESM1} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + + + ua850: + title: Calculation of performance metrics (eastward wind) + description: Eastward wind at 850 hPa global. + themes: + - atmDyn + realms: + - atmos + variables: + ua: + preprocessor: pp850 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + + - {dataset: CanCM4} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5-2} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: MPI-ESM-P} + - {dataset: MRI-ESM1} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + + + va200: + title: Calculation of performance metrics (northward wind) + description: Northward wind at 200 hPa global. + themes: + - atmDyn + realms: + - atmos + variables: + va: + preprocessor: pp200 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + + - {dataset: CanCM4} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5-2} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: MPI-ESM-P} + - {dataset: MRI-ESM1} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + + + ta850: + title: Calculation of performance metrics (air temperature) + description: Air temperature at 850 hPa global. + themes: + - phys + realms: + - atmos + variables: + ta: + preprocessor: pp850 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + + - {dataset: CanCM4} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5-2} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: MPI-ESM-P} + - {dataset: MRI-ESM1} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + + + ta200: + title: Calculation of performance metrics (air temperature) + description: Air temperature at 200 hPa global. + themes: + - phys + realms: + - atmos + variables: + ta: + preprocessor: pp200 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + + - {dataset: CanCM4} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5-2} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: MPI-ESM-P} + - {dataset: MRI-ESM1} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + + + collect: + title: Collection of performance metrics + description: Wrapper to collect and plot previously calculated metrics + scripts: + fig_6_RMSD: + script: perfmetrics/collect.ncl + ancestors: ['*/grading*'] + metric: RMSD + label_bounds: [-0.5, 0.5] + label_scale: 0.1 + disp_values: false + cm_interval: [2, 241] + # Sort dataset in alphabetic order (excluding MMM) + project_order: ['CMIP3', 'CMIP5', 'CMIP6'] + # Sort diagnostics in a specific order (name = 'diagnositic'-'region') + diag_order: ['ta850-global', 'ta200-global', 'ua850-global', + 'ua200-global', 'va850-global', 'va200-global', + 'zg500-global', 'psl-global', 'hus400-global', + 'tas-global', 'ts-global', 'pr-global', 'clt-global', + 'rlut-global', 'rsut-global', 'lwcre-global', + 'swcre-global'] + + + ###################################################### + # Fig. 7 + ###################################################### + + tas_cor: &corr_diag + title: Pattern correlation (near-surface air temperature) + description: Calculate pattern correlation value + variables: + tas: &var_settings + preprocessor: regrid_4_5 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 + project: CMIP5 + exp: historical + ensemble: r1i1p1 + mip: Amon + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: CanCM4} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5-2} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: MPI-ESM-P} + - {dataset: MRI-ESM1} + + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, + tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + scripts: + pattern_cor: &fig_pattern_cor + script: bock20jgr/corr_pattern.ncl + + pr_cor: + <<: *corr_diag + title: Pattern correlation (precipitation) + variables: + pr: + <<: *var_settings + reference_dataset: GPCP-V2.2 + alternative_dataset: GHCN + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: CanCM4} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5-2} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: MPI-ESM-P} + - {dataset: MRI-ESM1} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} + - {dataset: GHCN, project: OBS, type: ground, version: 1, tier: 2} + + psl_cor: + <<: *corr_diag + title: Pattern correlation (sea level pressure) + variables: + psl: + <<: *var_settings + reference_dataset: JRA-55 + alternative_dataset: ERA-Interim + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: CanCM4} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CMS} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: MPI-ESM-P} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: JRA-55, project: ana4mips, type: reanalysis, tier: 1} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, + tier: 3} + + rlut_cor: + <<: *corr_diag + title: Pattern correlation (longwave radiation) + variables: + rlut: + <<: *var_settings + reference_dataset: CERES-EBAF + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: CanCM4} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r2i1p2f1, grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2015} + + swcre_cor: + <<: *corr_diag + title: Pattern correlation (shortwave cloud radiative effect) + variables: + swcre: + <<: *var_settings + reference_dataset: CERES-EBAF + derive: true + force_derivation: false + additional_datasets: + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: CMCC-CM} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: HadCM3} + + - {dataset: AWI-CM-1-1-MR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2015} + + fig_7: + title: Collection of pattern correlations + description: Wrapper to collect and plot previously calculated correlations + scripts: + cor_collect: + script: bock20jgr/corr_pattern_collect.ncl + ancestors: ['*/pattern_cor'] + diag_order: ['tas_cor', 'pr_cor', 'rlut_cor', 'swcre_cor', 'psl_cor'] diff --git a/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_8-10.yml b/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_8-10.yml new file mode 100644 index 0000000000..bc277209d7 --- /dev/null +++ b/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_8-10.yml @@ -0,0 +1,595 @@ +# ESMValTool +# recipe_bock20jgr_fig_8-10.yml +--- +documentation: + + title: Quantifying progress across different CMIP phases + + description: | + Producing Fig. 8, 9, 10 from Bock et al., 2020, JGR + Processing of CMIP3 models currently works only in serial mode, due to + an issue in the input data still under investigation. + + authors: + - bock_lisa + - schlund_manuel + + maintainer: + - lauer_axel + + references: + - bock20jgr + + projects: + - ipcc_ar6 + - crescendo + + +preprocessors: + + clim: + regrid: + target_grid: 2x2 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + regrid: ®rid + regrid: + target_grid: 5x5 + scheme: linear + + global_mean: + area_statistics: + operator: mean + + +CMIP6_tas: &cmip6_all + - {dataset: ACCESS-CM2, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, grid: gn, institute: CSIRO} + - {dataset: AWI-CM-1-1-MR, grid: gn} + - {dataset: AWI-ESM-1-1-LR, grid: gn} + - {dataset: BCC-CSM2-MR, grid: gn} + - {dataset: BCC-ESM1, grid: gn} + - {dataset: CAMS-CSM1-0, grid: gn} + - {dataset: CanESM5, grid: gn} + - {dataset: CanESM5-CanOE, grid: gn, ensemble: r1i1p2f1} + - {dataset: CESM2, grid: gn} + - {dataset: CESM2-FV2, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM-FV2, grid: gn, institute: NCAR} + - {dataset: CIESM} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2} + - {dataset: E3SM-1-0} + - {dataset: E3SM-1-1, institute: E3SM-Project} + - {dataset: FGOALS-f3-L} + - {dataset: FGOALS-g3, grid: gn} + - {dataset: GFDL-ESM4, grid: gr1} + - {dataset: GISS-E2-1-G, grid: gn} + - {dataset: GISS-E2-1-H, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + - {dataset: INM-CM4-8, grid: gr1} + - {dataset: INM-CM5-0, grid: gr1} + - {dataset: IPSL-CM6A-LR} + - {dataset: KACE-1-0-G} + - {dataset: MIROC6, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, grid: gn} + - {dataset: MPI-ESM1-2-HR, grid: gn} + - {dataset: MPI-ESM1-2-LR, grid: gn} + - {dataset: MRI-ESM2-0, grid: gn} + - {dataset: NESM3, grid: gn} + - {dataset: NorESM2-LM, grid: gn, institute: NCC} + - {dataset: NorESM2-MM, grid: gn, institute: NCC} + - {dataset: SAM0-UNICON, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + + +CMIP5_all: &cmip5_all + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CNRM-CM5-2} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R-CC} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + + +CMIP3_all: &cmip3_all + - {dataset: cccma_cgcm3_1} + - {dataset: cccma_cgcm3_1_t63} + - {dataset: csiro_mk3_0} + - {dataset: gfdl_cm2_0} + - {dataset: gfdl_cm2_1} + - {dataset: giss_model_e_h} + - {dataset: giss_model_e_r} + - {dataset: iap_fgoals1_0_g} + - {dataset: inmcm3_0} + - {dataset: ipsl_cm4} + - {dataset: miroc3_2_hires} + - {dataset: miroc3_2_medres} + - {dataset: mpi_echam5} + - {dataset: mri_cgcm2_3_2a} + - {dataset: ncar_ccsm3_0} + - {dataset: ncar_pcm1} + - {dataset: ukmo_hadcm3} + + +CMIP5_rtnt: &cmip5_rtnt + # Models with missing data (on all ESGF nodes) + # EC-EARTH (no rsut) + # CNRM-CM5-2 (only 140 years available) + # IPSL-CM5A-MR (only 140 years available) + - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} + - {dataset: ACCESS1-0, exp: abrupt4xCO2, start_year: 300, end_year: 449} + - {dataset: ACCESS1-3, exp: piControl, start_year: 250, end_year: 399} + - {dataset: ACCESS1-3, exp: abrupt4xCO2, start_year: 250, end_year: 399} + - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1, exp: abrupt4xCO2, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} + - {dataset: bcc-csm1-1-m, exp: abrupt4xCO2, start_year: 240, end_year: 389} + - {dataset: BNU-ESM, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: BNU-ESM, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CanESM2, exp: piControl, start_year: 2321, end_year: 2470} + - {dataset: CanESM2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Wrong start year for piControl? (branch_time = 2.) + - {dataset: CCSM4, exp: piControl, start_year: 250, end_year: 399} + - {dataset: CCSM4, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 104, end_year: 253} + - {dataset: CSIRO-Mk3-6-0, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: FGOALS-g2, exp: piControl, start_year: 490, end_year: 639} + - {dataset: FGOALS-g2, exp: abrupt4xCO2, start_year: 490, end_year: 639} + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2G, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2G, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GISS-E2-H, exp: piControl, start_year: 2660, end_year: 2809} + - {dataset: GISS-E2-H, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-R, exp: piControl, start_year: 4200, end_year: 4349} + - {dataset: GISS-E2-R, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Experiments start at 1859-12-01 + - {dataset: HadGEM2-ES, exp: piControl, start_year: 1860, end_year: 2009} + - {dataset: HadGEM2-ES, exp: abrupt4xCO2, start_year: 1860, end_year: 2009} + - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} + - {dataset: inmcm4, exp: abrupt4xCO2, start_year: 2090, end_year: 2239} + - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} + - {dataset: MIROC5, exp: abrupt4xCO2, start_year: 2100, end_year: 2249} + - {dataset: MIROC-ESM, exp: piControl, start_year: 1880, end_year: 2029} + - {dataset: MIROC-ESM, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: MPI-ESM-LR, exp: piControl, start_year: 1880, end_year: 2029} + - {dataset: MPI-ESM-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-MR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-MR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-P, exp: piControl, start_year: 1866, end_year: 2015} + - {dataset: MPI-ESM-P, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MRI-CGCM3, exp: piControl, start_year: 1891, end_year: 2040} + - {dataset: MRI-CGCM3, exp: abrupt4xCO2, start_year: 1851, end_year: 2000} + - {dataset: NorESM1-M, exp: piControl, start_year: 700, end_year: 849} + - {dataset: NorESM1-M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + +CMIP6_rtnt: &cmip6_rtnt + - {dataset: ACCESS-CM2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 950, end_year: 1099, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-CM2, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 950, end_year: 1099, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 101, end_year: 250} + - {dataset: ACCESS-ESM1-5, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 101, end_year: 250} + #- {dataset: AWI-CM-1-1-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 2650, end_year: 2799} + #- {dataset: AWI-CM-1-1-MR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-CSM2-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-CSM2-MR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-ESM1, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-ESM1, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CAMS-CSM1-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3179} + - {dataset: CAMS-CSM1-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3179} + - {dataset: CanESM5, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 5201, end_year: 5350} + - {dataset: CanESM5, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CESM2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: CESM2, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: CESM2-FV2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 321, end_year: 470, institute: NCAR} + - {dataset: CESM2-FV2, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CESM2-WACCM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CESM2-WACCM, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CESM2-WACCM-FV2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 301, end_year: 450, institute: NCAR} + - {dataset: CESM2-WACCM-FV2, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CMCC-CM2-SR5, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CMCC-CM2-SR5, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1-HR, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1-HR, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-ESM2-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-ESM2-1, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: E3SM-1-0, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 101, end_year: 250} + - {dataset: E3SM-1-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1, end_year: 150} + - {dataset: EC-Earth3-Veg, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: EC-Earth3-Veg, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + # parent_time_units messed up + - {dataset: FGOALS-f3-L, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 600, end_year: 749} + - {dataset: FGOALS-f3-L, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: FGOALS-g3, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 463, end_year: 612} + - {dataset: FGOALS-g3, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 463, end_year: 612} + - {dataset: GISS-E2-1-G, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 4150, end_year: 4299} + - {dataset: GISS-E2-1-G, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-1-H, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3180, end_year: 3329} + - {dataset: GISS-E2-1-H, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-LL, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-LL, exp: abrupt-4xCO2, ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-MM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-MM, exp: abrupt-4xCO2, ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: INM-CM5-0, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 2099, end_year: 2248} + - {dataset: INM-CM5-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 1999} + - {dataset: INM-CM4-8, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 1947, end_year: 2096} + - {dataset: INM-CM4-8, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM6A-LR, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1870, end_year: 2019} + - {dataset: IPSL-CM6A-LR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: KACE-1-0-G, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 2300, end_year: 2449} + - {dataset: KACE-1-0-G, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: MIROC6, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + - {dataset: MIROC6, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + - {dataset: MIROC-ES2L, exp: piControl, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MIROC-ES2L, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-1-2-HAM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1950, end_year: 2099} + - {dataset: MPI-ESM-1-2-HAM, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-HR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-HR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-LR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-LR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MRI-ESM2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MRI-ESM2-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + # parent_time_units not correct + - {dataset: NESM3, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 550, end_year: 699} + - {dataset: NESM3, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: NorCPM1, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 251, end_year: 400, institute: NCC} + - {dataset: NorCPM1, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCC} + - {dataset: NorESM2-MM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1201, end_year: 1350} + - {dataset: NorESM2-MM, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: SAM0-UNICON, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 274, end_year: 423} + - {dataset: SAM0-UNICON, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + # Wrong start year for piControl (must be 1201) + - {dataset: TaiESM1, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 201, end_year: 350} + - {dataset: TaiESM1, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + +CMIP6_rtmt: &cmip6_rtmt + # branch_time_in_child weird + - {dataset: MCM-UA-1-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: MCM-UA-1-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + + +diagnostics: + + ###################################################### + # Fig. 8 + ###################################################### + + fig_8_ecs_cmip3: + title: CMIP3 Effective Climate Sensitivity (ECS) + description: Calculate ECS for all available CMIP3 models. + scripts: + ecs: + script: climate_metrics/ecs.py + calculate_mmm: false + read_external_file: external_sources/ipcc_ar4.yml + output_attributes: + project: CMIP3 + + fig_8_ecs_cmip5: + title: CMIP5 Effective Climate Sensitivity (ECS) + description: Calculate ECS for all available CMIP5 models. + variables: + tas_rtnt: &ecs_settings_cmip5 + short_name: tas + preprocessor: global_mean + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + additional_datasets: *cmip5_rtnt + rtnt: + <<: *ecs_settings_cmip5 + short_name: rtnt + derive: true + additional_datasets: *cmip5_rtnt + scripts: + ecs: + script: climate_metrics/ecs.py + calculate_mmm: false + + fig_8_ecs_cmip6: + title: CMIP6 Effective Climate Sensitivity (ECS) + description: Calculate ECS for all available CMIP6 models. + variables: + tas_rtnt: &ecs_settings_cmip6 + short_name: tas + preprocessor: global_mean + project: CMIP6 + mip: Amon + additional_datasets: *cmip6_rtnt + tas_rtmt: + <<: *ecs_settings_cmip6 + additional_datasets: *cmip6_rtmt + rtnt: + <<: *ecs_settings_cmip6 + short_name: rtnt + derive: true + additional_datasets: *cmip6_rtnt + rtmt: + <<: *ecs_settings_cmip6 + short_name: rtmt + additional_datasets: *cmip6_rtmt + scripts: + ecs: + script: climate_metrics/ecs.py + calculate_mmm: false + + fig_8_ecs_barplot: + title: Effective Climate Sensitivity (ECS) + description: Create barplot of ECS for CMIP3, CMIP5 and CMIP6. + scripts: + barplot: + script: climate_metrics/create_barplot.py + ancestors: [ + 'fig_8_ecs_cmip3/ecs', + 'fig_8_ecs_cmip5/ecs', + 'fig_8_ecs_cmip6/ecs', + ] + add_mean: true + label_attribute: project + order: ['CMIP3', 'CMIP5', 'CMIP6'] + patterns: ['ecs.nc'] + sort_descending: true + subplots_kwargs: + figsize: [15, 4] + value_labels: true + y_range: [0.0, 6.0] + + + ###################################################### + # Fig. 9 + ###################################################### + + fig_9_cmip3: &fig_9_settings + title: CMIP3 climatoloty shortwave CRE + description: differences of multi-model mean and reference dataset + themes: + - clouds + realms: + - atmos + variables: + swcre: &var_swcre + preprocessor: clim + reference_dataset: CERES-EBAF + project: CMIP3 + exp: 20c3m + ensemble: run1 + mip: A1 + modeling_realm: atm + frequency: mo + start_year: 1980 + end_year: 1999 + derive: true + additional_datasets: *cmip3_all + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} + scripts: + clim: + script: clouds/clouds_ipcc.ncl + projection: Robinson + timemean: annualclim + explicit_cn_levels: [-40., -35., -30., -25., -20., -15., -10., -5., 0., + 5., 10., 15., 20., 25., 30., 35., 40.] + + fig_9_cmip5: + <<: *fig_9_settings + title: CMIP5 climatoloty shortwave CRE + variables: + swcre: + <<: *var_swcre + project: CMIP5 + exp: historical + ensemble: r1i1p1 + mip: Amon + modeling_realm: atmos + frequency: mon + additional_datasets: *cmip5_all + + fig_9_cmip6: + <<: *fig_9_settings + title: CMIP6 climatoloty shortwave CRE + variables: + swcre: + <<: *var_swcre + project: CMIP6 + exp: historical + ensemble: r1i1p1f1 + mip: Amon + modeling_realm: atmos + frequency: mon + grid: gr + additional_datasets: *cmip6_all + + + ###################################################### + # Fig. 10 + ###################################################### + + fig_10_cmip5: + title: CMIP5 cloud feedback + description: Calculate grid-level CRE feedback parameters for CMIP5 models. + variables: + tas_global_rtnt: &fig_10_settings_cmip5 + short_name: tas + preprocessor: global_mean + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + additional_datasets: *cmip5_rtnt + netcre_grid_level: + <<: *fig_10_settings_cmip5 + short_name: netcre + preprocessor: regrid + derive: true + lwcre_grid_level: + <<: *fig_10_settings_cmip5 + short_name: lwcre + preprocessor: regrid + derive: true + swcre_grid_level: + <<: *fig_10_settings_cmip5 + short_name: swcre + preprocessor: regrid + derive: true + scripts: + feedback_parameters: &script_feedback_parameters + script: climate_metrics/feedback_parameters.py + calculate_mmm: true + only_consider_mmm: true + output_attributes: # Dummy attributes necessary for mlr/plot.py + tag: cre_feedback_parameter + var_type: prediction_output + project: CMIP5 + start_year: 1 + end_year: 150 + + fig_10_cmip6: + title: CMIP6 cloud feedback + description: Calculate grid_level CRE feedback parameters for CMIP6 models. + variables: + tas_global_rtnt: &fig_10_settings_cmip6 + short_name: tas + preprocessor: global_mean + project: CMIP6 + mip: Amon + additional_datasets: *cmip6_rtnt + netcre_grid_level: + <<: *fig_10_settings_cmip6 + short_name: netcre + preprocessor: regrid + derive: true + lwcre_grid_level: + <<: *fig_10_settings_cmip6 + short_name: lwcre + preprocessor: regrid + derive: true + swcre_grid_level: + <<: *fig_10_settings_cmip6 + short_name: swcre + preprocessor: regrid + derive: true + scripts: + feedback_parameters: + <<: *script_feedback_parameters + output_attributes: # Dummy attributes necessary for mlr/plot.py + tag: cre_feedback_parameter + var_type: prediction_output + project: CMIP6 + start_year: 1 + end_year: 150 + + fig_10_netcre: + title: Net cloud radiative effect + description: Plot figure 10 (netcre). + scripts: + plot: &script_plot_fig_10 + script: mlr/plot.py + ancestors: ['fig_10_*/feedback_parameters'] + group_by_attribute: project + pattern: lambda_netcre_vs_latitude-longitude_MultiModelMean_all_150_years.nc + plot_map: + plot_kwargs: + cbar_label: '$\lambda_{netcre}$ [W m$^{-2}$ K$^{-1}$]' + cmap: bwr + vmin: -7.5 + vmax: 7.5 + plot_map_abs_biases: + plot_kwargs: + cbar_label: '$\Delta\lambda_{netcre}$ [W m$^{-2}$ K$^{-1}$]' + cmap: bwr + vmin: -2.5 + vmax: 2.5 + years_in_title: false + + fig_10_lwcre: + title: Longwave cloud radiative effect + description: Plot figure 10 (lwcre). + scripts: + plot: + <<: *script_plot_fig_10 + pattern: lambda_lwcre_vs_latitude-longitude_MultiModelMean_all_150_years.nc + plot_map: + plot_kwargs: + cbar_label: '$\lambda_{lwcre}$ [W m$^{-2}$ K$^{-1}$]' + cmap: bwr + vmin: -7.5 + vmax: 7.5 + plot_map_abs_biases: + plot_kwargs: + cbar_label: '$\Delta\lambda_{lwcre}$ [W m$^{-2}$ K$^{-1}$]' + cmap: bwr + vmin: -2.5 + vmax: 2.5 + + fig_10_swcre: + title: Shortwave cloud radiative effect + description: Plot figure 10 (swcre). + scripts: + plot: + <<: *script_plot_fig_10 + pattern: lambda_swcre_vs_latitude-longitude_MultiModelMean_all_150_years.nc + plot_map: + plot_kwargs: + cbar_label: '$\lambda_{swcre}$ [W m$^{-2}$ K$^{-1}$]' + cmap: bwr + vmin: -7.5 + vmax: 7.5 + plot_map_abs_biases: + plot_kwargs: + cbar_label: '$\Delta\lambda_{swcre}$ [W m$^{-2}$ K$^{-1}$]' + cmap: bwr + vmin: -2.5 + vmax: 2.5 diff --git a/esmvaltool/recipes/recipe_clouds_bias.yml b/esmvaltool/recipes/clouds/recipe_clouds_bias.yml similarity index 93% rename from esmvaltool/recipes/recipe_clouds_bias.yml rename to esmvaltool/recipes/clouds/recipe_clouds_bias.yml index d490286f16..d424397b22 100644 --- a/esmvaltool/recipes/recipe_clouds_bias.yml +++ b/esmvaltool/recipes/clouds/recipe_clouds_bias.yml @@ -2,15 +2,17 @@ # recipe_clouds_bias.yml --- documentation: + title: Annual mean climatologies (MMM) + description: | Diagnostics of clouds and hydrological cycle multi-model mean, mean bias, absolute bias, relative bias. authors: - - laue_ax + - lauer_axel maintainer: - - laue_ax + - lauer_axel references: - flato13ipcc @@ -129,6 +131,7 @@ diagnostics: # ********************************************************************** clouds_bias_tas: + title: Near-surface temperature climatology (MMM) description: IPCC AR5 Ch. 9, Fig. 9.2 (near-surface temperature) themes: - clouds @@ -140,7 +143,7 @@ diagnostics: reference_dataset: ERA-Interim mip: Amon additional_datasets: - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, start_year: 1986, end_year: 2005, tier: 3} scripts: clim: &clim_settings @@ -159,6 +162,7 @@ diagnostics: # ********************************************************************** clouds_bias_pr: + title: Precipitation climatology (MMM) description: IPCC AR5 Ch. 9, Fig. 9.4 (precipitation) themes: - clouds @@ -167,11 +171,10 @@ diagnostics: variables: pr: preprocessor: clim - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 mip: Amon additional_datasets: - - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, - start_year: 1986, end_year: 2005, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, start_year: 1986, end_year: 2005, tier: 1} scripts: clim: <<: *clim_settings @@ -184,6 +187,7 @@ diagnostics: # ********************************************************************** clouds_bias_clt: + title: Total cloud cover climatology (MMM) description: multi-model mean bias of annual mean compared with a reference dataset (observations). themes: @@ -193,11 +197,10 @@ diagnostics: variables: clt: preprocessor: clim - reference_dataset: MODIS + reference_dataset: MODIS-1-0 mip: Amon additional_datasets: - - {dataset: MODIS, project: obs4mips, level: L3, version: C5, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: MODIS-1-0, project: obs4MIPs, level: L3, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *clim_settings diff --git a/esmvaltool/recipes/recipe_clouds_ipcc.yml b/esmvaltool/recipes/clouds/recipe_clouds_ipcc.yml similarity index 93% rename from esmvaltool/recipes/recipe_clouds_ipcc.yml rename to esmvaltool/recipes/clouds/recipe_clouds_ipcc.yml index c74e58049f..2fb14b4795 100644 --- a/esmvaltool/recipes/recipe_clouds_ipcc.yml +++ b/esmvaltool/recipes/clouds/recipe_clouds_ipcc.yml @@ -2,15 +2,17 @@ # recipe_clouds_ipcc.yml --- documentation: + title: Climatologies cloud radiative effects + description: | Diagnostics of clouds and hydrological cycle multi-model mean bias, zonal means; similar to IPCC AR5 fig. 9.5. authors: - - laue_ax + - lauer_axel maintainer: - - laue_ax + - lauer_axel references: - flato13ipcc @@ -130,6 +132,7 @@ diagnostics: # ********************************************************************** clouds_ipcc_swcre: + title: Climatoloty shortwave CRE description: differences of multi-model mean and reference dataset themes: - clouds @@ -142,8 +145,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: &clim_settings script: clouds/clouds_ipcc.ncl @@ -152,6 +154,7 @@ diagnostics: timemean: annualclim clouds_ipcc_lwcre: + title: Climatoloty longwave CRE description: differences of multi-model mean and reference dataset themes: - clouds @@ -164,13 +167,13 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *clim_settings clouds_ipcc_netcre: + title: Climatoloty net CRE description: differences of multi-model mean and reference dataset themes: - clouds @@ -183,8 +186,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *clim_settings diff --git a/esmvaltool/recipes/recipe_lauer13jclim.yml b/esmvaltool/recipes/clouds/recipe_lauer13jclim.yml similarity index 83% rename from esmvaltool/recipes/recipe_lauer13jclim.yml rename to esmvaltool/recipes/clouds/recipe_lauer13jclim.yml index c4ecf8a901..9acbcfcbd1 100644 --- a/esmvaltool/recipes/recipe_lauer13jclim.yml +++ b/esmvaltool/recipes/clouds/recipe_lauer13jclim.yml @@ -2,15 +2,17 @@ # recipe_lauer13jclim.yml --- documentation: + title: Cloud diagnostics + description: | Diagnostics of clouds and hydrological cycle reproducing selected figures from Lauer and Hamilton (2013). authors: - - laue_ax + - lauer_axel maintainer: - - laue_ax + - lauer_axel references: - lauer13jclim @@ -127,6 +129,7 @@ diagnostics: # ========================================================================== clouds_fig1_lwp: + title: Climatology (liquid water path) description: climatological annual means themes: - clouds @@ -139,6 +142,13 @@ diagnostics: mip: Amon derive: true additional_datasets: + # The original recipe uses the UWisc dataset that is not + # maintained anymore by the authors and has been supersided + # by MAC-LWP (Elsaesser et al., 2017) + # https://doi.org/10.1175/JCLI-D-16-0902.1 + # We recommend using MAC-LWP + #- {dataset: MAC-LWP, project: OBS, type: sat, version: v1, + # start_year: 1988, end_year: 2007, tier: 3} - {dataset: UWisc, project: OBS, type: sat, version: v2, start_year: 1988, end_year: 2007, tier: 3} scripts: @@ -156,6 +166,7 @@ diagnostics: showdiff: false clouds_fig1_clt: + title: Climatology (total cloud cover) description: climatological annual means themes: - clouds @@ -164,16 +175,16 @@ diagnostics: variables: clt: preprocessor: clim - reference_dataset: MODIS + reference_dataset: MODIS-1-0 mip: Amon additional_datasets: - - {dataset: MODIS, project: obs4mips, level: L3, version: C5, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: MODIS-1-0, project: obs4MIPs, level: L3, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *clim_settings clouds_fig1_pr: + title: Climatology (precipitation) description: climatological annual means themes: - clouds @@ -182,16 +193,16 @@ diagnostics: variables: pr: preprocessor: clim - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 mip: Amon additional_datasets: - - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, - start_year: 1986, end_year: 2005, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, start_year: 1986, end_year: 2005, tier: 1} scripts: clim: <<: *clim_settings clouds_fig1_swcre: + title: Climaology (shortwave CRE) description: climatological annual means themes: - clouds @@ -205,13 +216,13 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *clim_settings clouds_fig1_lwcre: + title: Climaology (longwave CRE) description: climatological annual means themes: - clouds @@ -225,8 +236,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *clim_settings @@ -237,6 +247,7 @@ diagnostics: # ========================================================================== clouds_fig3_clt: + title: Taylor diagram (total cloud cover) description: climatological annual means themes: - clouds @@ -245,11 +256,10 @@ diagnostics: variables: clt: preprocessor: clim - reference_dataset: MODIS + reference_dataset: MODIS-1-0 mip: Amon additional_datasets: - - {dataset: MODIS, project: obs4mips, level: L3, version: C5, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: MODIS-1-0, project: obs4MIPs, level: L3, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: &taylor_settings script: clouds/clouds_taylor.ncl @@ -292,6 +302,7 @@ diagnostics: valid_fraction: 0.5 clouds_fig3_lwp: + title: Taylor diagram (liquid water path) description: climatological annual means themes: - clouds @@ -305,13 +316,13 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: UWisc, project: OBS, type: sat, version: v2, - start_year: 1988, end_year: 2007, tier: 3} + - {dataset: UWisc, project: OBS, type: sat, version: v2, start_year: 1988, end_year: 2007, tier: 3} scripts: clim: <<: *taylor_settings clouds_fig3_swcre: + title: Taylor diagram (shortwave CRE) description: climatological annual means themes: - clouds @@ -325,13 +336,13 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *taylor_settings clouds_fig3_lwcre: + title: Taylor diagram (longwave CRE) description: climatological annual means themes: - clouds @@ -345,13 +356,13 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *taylor_settings clouds_fig3_pr: + title: Taylor diagram (precipitation) description: climatological annual means themes: - clouds @@ -360,11 +371,10 @@ diagnostics: variables: pr: preprocessor: clim - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 mip: Amon additional_datasets: - - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, - start_year: 1986, end_year: 2005, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, start_year: 1986, end_year: 2005, tier: 1} scripts: clim: <<: *taylor_settings @@ -377,6 +387,7 @@ diagnostics: # ========================================================================== clouds_fig8_lwp: + title: Interannual variability (liquid water path) description: interannual variability themes: - clouds @@ -389,8 +400,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: UWisc, project: OBS, type: sat, version: v2, - start_year: 1988, end_year: 2007, tier: 3} + - {dataset: UWisc, project: OBS, type: sat, version: v2, start_year: 1988, end_year: 2007, tier: 3} scripts: clim: &intera_settings script: clouds/clouds_interannual.ncl @@ -413,6 +423,7 @@ diagnostics: timemean: annualclim clouds_fig8_clt: + title: Interannual variability (total cloud cover) description: interannual variability themes: - clouds @@ -421,16 +432,16 @@ diagnostics: variables: clt: preprocessor: clim - reference_dataset: MODIS + reference_dataset: MODIS-1-0 mip: Amon additional_datasets: - - {dataset: MODIS, project: obs4mips, level: L3, version: C5, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: MODIS-1-0, project: obs4MIPs, level: L3, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *intera_settings clouds_fig8_swcre: + title: Interannual variability (shortwave CRE) description: interannual variability themes: - clouds @@ -444,13 +455,13 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *intera_settings clouds_fig8_lwcre: + title: Interannual variability (longwave CRE) description: interannual variability themes: - clouds @@ -464,13 +475,13 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *intera_settings clouds_fig8_pr: + title: Interannual variability (precipitation) description: interannual variability themes: - clouds @@ -479,11 +490,10 @@ diagnostics: variables: pr: preprocessor: clim - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 mip: Amon additional_datasets: - - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, - start_year: 1986, end_year: 2005, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, start_year: 1986, end_year: 2005, tier: 1} scripts: clim: <<: *intera_settings diff --git a/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig1_clim.yml b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig1_clim.yml new file mode 100644 index 0000000000..0ecd79e458 --- /dev/null +++ b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig1_clim.yml @@ -0,0 +1,428 @@ +# recipe_lauer22jclim_fig1_clim.yml +--- +documentation: + title: Cloud diagnostics v2 (climatologies) + + description: | + Multi-year annaul means of cloud properties. + + authors: + - lauer_axel + + maintainer: + - lauer_axel + + references: + - lauer22jclim + + projects: + - cmug + + +preprocessors: + clim: + regrid: + target_grid: 2x2 + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [CERES-EBAF, CLARA-AVHRR, CLOUDSAT-L2, ERA5, ERA-Interim, + ESACCI-CLOUD, ESACCI-WATERVAPOUR, HadISST, ISCCP-FH, MAC-LWP, + MODIS, PATMOS-x, reference_dataset, SSMI-MERIS] +# annual_statistics: +# operator: mean + + +CMIP5: &cmip5_models + - {dataset: ACCESS1-0, ensemble: r1i1p1} + - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: CCSM4, ensemble: r1i1p1} + - {dataset: CESM1-BGC, ensemble: r1i1p1} + - {dataset: CESM1-CAM5, ensemble: r1i1p1} + - {dataset: CESM1-FASTCHEM, ensemble: r1i1p1} + - {dataset: CESM1-WACCM, ensemble: r1i1p1} + - {dataset: CMCC-CESM, ensemble: r1i1p1} + - {dataset: CMCC-CM, ensemble: r1i1p1} + # missing data - {dataset: CMCC-CMS, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: FIO-ESM, ensemble: r1i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {dataset: GISS-E2-H-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p1} + - {dataset: GISS-E2-R-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-R, ensemble: r1i1p1} + - {dataset: HadGEM2-CC, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, ensemble: r1i1p1} + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + - {dataset: MIROC4h, ensemble: r1i1p1} + - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + - {dataset: MPI-ESM-P, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: MRI-ESM1, ensemble: r1i1p1} + - {dataset: NorESM1-ME, ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + +CMIP6: &cmip6_models + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + # time_coord_problem_rlut - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + # no_prw - {dataset: FGOALS-f3-L, ensemble: r2i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: IPSL-CM6A-LR, ensemble: r3i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, institute: MPI-M, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + + +diagnostics: + + # ========================================================================== + # Geographical distribution of multi-year annual/seasonal means (panel plot) + # ========================================================================== + + clouds_clim_cmip5: + description: climatological annual means + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, + tier: 2, start_year: 1982, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + clivi: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + lwp: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, + version: V002_01, tier: 3, + start_year: 1982, end_year: 2018} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: MAC-LWP, project: OBS, type: sat, version: v1, + tier: 3, start_year: 1988, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + swcre: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, + tier: 2, start_year: 1984, end_year: 2016} + lwcre: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, + tier: 2, start_year: 1984, end_year: 2016} + prw: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-WATERVAPOUR + additional_datasets: + - {dataset: ESACCI-WATERVAPOUR, project: OBS6, type: sat, + version: CDR2-L3-COMBI-05deg-fv3.1, tier: 3, start_year: 2003, + end_year: 2017} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + additional_datasets: *cmip5_models + scripts: + avgclt: &settings5 + script: clouds/clouds.ncl + projection: Robinson + timemean: annualclim + showdiff: true + multiobs_uncertainty: true + multiobs_exclude: ["ERA-Interim", "ERA5"] + filename_add: cmip5 + var: clt + avgclivi: + <<: *settings5 + var: clivi + avglwp: + <<: *settings5 + var: lwp + avgswcre: + <<: *settings5 + var: swcre + avglwcre: + <<: *settings5 + var: lwcre + avgprw: + <<: *settings5 + var: prw + + clouds_clim_cmip6: + description: climatological annual means + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, + version: V002_01, tier: 3, start_year: 1982, end_year: 2018} + - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, + tier: 2, start_year: 1982, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + clivi: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + lwp: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, + version: V002_01, tier: 3, start_year: 1982, end_year: 2018} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: MAC-LWP, project: OBS, type: sat, version: v1, tier: 3, + start_year: 1988, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + swcre: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + lwcre: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 2001 + end_year: 2010 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + prw: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-WATERVAPOUR + additional_datasets: + - {dataset: ESACCI-WATERVAPOUR, project: OBS6, type: sat, + version: CDR2-L3-COMBI-05deg-fv3.1, tier: 3, + start_year: 2003, end_year: 2017} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + additional_datasets: *cmip6_models + scripts: + avgclt: &settings6 + script: clouds/clouds.ncl + projection: Robinson + timemean: annualclim + showdiff: true + multiobs_uncertainty: true + multiobs_exclude: ["ERA-Interim", "ERA5"] + filename_add: cmip6 + var: clt + avgclivi: + <<: *settings6 + var: clivi + avglwp: + <<: *settings6 + var: lwp + avgswcre: + <<: *settings6 + var: swcre + avglwcre: + <<: *settings6 + var: lwcre + avgprw: + <<: *settings6 + var: prw diff --git a/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig1_clim_amip.yml b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig1_clim_amip.yml new file mode 100644 index 0000000000..8ae73c9da5 --- /dev/null +++ b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig1_clim_amip.yml @@ -0,0 +1,443 @@ +# recipe_lauer22jclim_fig1_clim_amip.yml +--- +documentation: + title: Cloud diagnostics v2 (climatologies) + + description: | + Multi-year annual means of cloud properties (AMIP models). + + authors: + - lauer_axel + + maintainer: + - lauer_axel + + references: + - lauer22jclim + + projects: + - cmug + + +preprocessors: + clim: + regrid: + target_grid: 2x2 + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [CERES-EBAF, CLARA-AVHRR, CLOUDSAT-L2, ERA5, ERA-Interim, + ESACCI-CLOUD, ESACCI-WATERVAPOUR, HadISST, ISCCP-FH, MAC-LWP, + MODIS, PATMOS-x, reference_dataset, SSMI-MERIS] +# annual_statistics: +# operator: mean + + +CMIP5: &cmip5_models + - {dataset: ACCESS1-0, ensemble: r1i1p1} + - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + # - {dataset: CanESM2, ensemble: r1i1p1} no amip + - {dataset: CanAM4, ensemble: r1i1p1} + - {dataset: CCSM4, ensemble: r1i1p1} + # - {dataset: CESM1-BGC, ensemble: r1i1p1} no amip + - {dataset: CESM1-CAM5, ensemble: r1i1p1} + # - {dataset: CESM1-FASTCHEM, ensemble: r1i1p1} no amip + # - {dataset: CESM1-WACCM, ensemble: r1i1p1} no amip + # - {dataset: CMCC-CESM, ensemble: r1i1p1} no amip + - {dataset: CMCC-CM, ensemble: r1i1p1} + # missing data - {dataset: CMCC-CMS, ensemble: r1i1p1} no amip + - {dataset: CNRM-CM5, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: FGOALS-s2, ensemble: r1i1p1} + # - {dataset: FIO-ESM, ensemble: r1i1p1} no amip + - {dataset: GFDL-CM3, ensemble: r1i1p1} + # - {dataset: GFDL-ESM2G, ensemble: r1i1p1} no amip + # - {dataset: GFDL-ESM2M, ensemble: r1i1p1} no amip + - {dataset: GFDL-HIRAM-C180, ensemble: r1i1p1} + - {dataset: GFDL-HIRAM-C360, ensemble: r1i1p1} + # - {dataset: GISS-E2-H-CC, ensemble: r1i1p1} no amip + # - {dataset: GISS-E2-H, ensemble: r1i1p1} no amip + # - {dataset: GISS-E2-R-CC, ensemble: r1i1p1} no amip + - {dataset: GISS-E2-R, ensemble: r1i1p1} + - {dataset: HadGEM2-A, ensemble: r1i1p1} + # - {dataset: HadGEM2-CC, ensemble: r1i1p1} no amip + # - {dataset: HadGEM2-ES, ensemble: r1i1p1} no amip + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + # - {dataset: MIROC4h, ensemble: r1i1p1} no amip + - {dataset: MIROC5, ensemble: r1i1p1} + # - {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1} no amip + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + # - {dataset: MPI-ESM-P, ensemble: r1i1p1} no amip + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + # - {dataset: MRI-ESM1, ensemble: r1i1p1} no amip + # - {dataset: NorESM1-ME, ensemble: r1i1p1} no amip + - {dataset: NorESM1-M, ensemble: r1i1p1} + +CMIP6: &cmip6_models + # - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn} no amip + # error time coordinate - {dataset: BCC-CSM2-MR, ensemble: r2i1p1f1, grid: gn} + # time_coord_problem_rlut - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + # error time coordinate - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: EC-Earth3, ensemble: r1i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg, ensemble: r1i1p1f1, grid: gr} + # no_prw - {dataset: FGOALS-f3-L, ensemble: r2i1p1f1, grid: gr} + # missing data - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + # error time coordinate - {dataset: GFDL-AM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + # missing data - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + # error concat. cubes - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + # - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} no amip + - {dataset: GISS-E2-2-G, ensemble: r1i1p3f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr} + # missing data - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + # missing data - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, institute: MPI-M, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + # missing data - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + # err. parent time units- {dataset: UKESM1-0-LL, ensemble: r1i1p1f4, grid: gn} + + +diagnostics: + + # ========================================================================== + # Geographical distribution of multi-year annual/seasonal means (panel plot) + # ========================================================================== + + clouds_clim_cmip5_amip: + description: climatological annual means + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: amip + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, + tier: 2, start_year: 1982, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + clivi: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: amip + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + lwp: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: amip + start_year: 1986 + end_year: 2005 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: MAC-LWP, project: OBS, type: sat, version: v1, tier: 3, + start_year: 1988, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + swcre: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: amip + start_year: 1986 + end_year: 2005 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + lwcre: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: amip + start_year: 1986 + end_year: 2005 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + prw: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: amip + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-WATERVAPOUR + additional_datasets: + - {dataset: ESACCI-WATERVAPOUR, project: OBS6, type: sat, + version: CDR2-L3-COMBI-05deg-fv3.1, tier: 3, + start_year: 2003, end_year: 2017} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + additional_datasets: *cmip5_models + scripts: + avgclt: &settings5 + script: clouds/clouds.ncl + projection: Robinson + timemean: annualclim + showdiff: true + multiobs_uncertainty: true + multiobs_exclude: ["ERA-Interim", "ERA5"] + filename_add: cmip5_amip + var: clt + avgclivi: + <<: *settings5 + var: clivi + avglwp: + <<: *settings5 + var: lwp + avgswcre: + <<: *settings5 + var: swcre + avglwcre: + <<: *settings5 + var: lwcre + avgprw: + <<: *settings5 + var: prw + + clouds_clim_cmip6_amip: + description: climatological annual means + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: amip + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, + tier: 2, start_year: 1982, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + clivi: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: amip + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + lwp: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: amip + start_year: 1995 + end_year: 2014 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: MAC-LWP, project: OBS, type: sat, version: v1, tier: 3, + start_year: 1988, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + swcre: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: amip + start_year: 1995 + end_year: 2014 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + lwcre: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: amip + start_year: 2001 + end_year: 2010 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + prw: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: amip + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-WATERVAPOUR + additional_datasets: + - {dataset: ESACCI-WATERVAPOUR, project: OBS6, type: sat, + version: CDR2-L3-COMBI-05deg-fv3.1, tier: 3, + start_year: 2003, end_year: 2017} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + additional_datasets: *cmip6_models + scripts: + avgclt: &settings6 + script: clouds/clouds.ncl + projection: Robinson + timemean: annualclim + showdiff: true + multiobs_uncertainty: true + multiobs_exclude: ["ERA-Interim", "ERA5"] + filename_add: cmip6_amip + var: clt + avgclivi: + <<: *settings6 + var: clivi + avglwp: + <<: *settings6 + var: lwp + avgswcre: + <<: *settings6 + var: swcre + avglwcre: + <<: *settings6 + var: lwcre + avgprw: + <<: *settings6 + var: prw diff --git a/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig2_taylor.yml b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig2_taylor.yml new file mode 100644 index 0000000000..c36e633164 --- /dev/null +++ b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig2_taylor.yml @@ -0,0 +1,315 @@ +# recipe_lauer22jclim_fig2_taylor.yml +--- +documentation: + title: Cloud diagnostics v2 (taylor) + + description: | + Taylor diagrams of clouds properties. + + authors: + - lauer_axel + + maintainer: + - lauer_axel + + references: + - lauer22jclim + + projects: + - cmug + + +preprocessors: + clim: + regrid: + target_grid: 2x2 + scheme: linear + + +diagnostics: + + # ========================================================================== + # Taylor plots of multi-year mean quantities + # ========================================================================== + + clouds_tay: + description: climatological annual means + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim + mip: Amon + exp: historical + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, + version: V002_01, tier: 3, + start_year: 1982, end_year: 2018} + - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, + tier: 2, start_year: 1982, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3, start_year: 1986, end_year: 2014} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3, start_year: 1986, end_year: 2014} + clivi: + preprocessor: clim + mip: Amon + exp: historical + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3, start_year: 1986, end_year: 2014} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3, start_year: 1986, end_year: 2014} + lwp: + preprocessor: clim + mip: Amon + exp: historical + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: MAC-LWP, project: OBS, type: sat, version: v1, tier: 3, + start_year: 1988, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3, start_year: 1986, end_year: 2014} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3, start_year: 1986, end_year: 2014} + swcre: + preprocessor: clim + mip: Amon + exp: historical + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + lwcre: + preprocessor: clim + mip: Amon + exp: historical + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + prw: + preprocessor: clim + mip: Amon + exp: historical + reference_dataset: ESACCI-WATERVAPOUR + additional_datasets: + - {dataset: ESACCI-WATERVAPOUR, project: OBS6, type: sat, + version: CDR2-L3-COMBI-05deg-fv3.1, tier: 3, + start_year: 2003, end_year: 2017} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3, start_year: 1986, end_year: 2014} + additional_datasets: + - {dataset: ACCESS1-0, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: ACCESS1-3, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: CanESM2, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-BGC, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-FASTCHEM, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-WACCM, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: CMCC-CESM, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: CMCC-CM, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + # missing_data - {dataset: CMCC-CMS, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: FGOALS-g2, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: FIO-ESM, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H-CC, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R-CC, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: HadGEM2-CC, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: HadGEM2-ES, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: inmcm4, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: MIROC4h, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: MIROC5, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-P, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: MRI-ESM1, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-ME, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-M, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + # time_coord_problem_rlut - {dataset: BCC-ESM1, ensemble: r1i1p1f1, + # grid: gn, project: CMIP6, + # start_year: 1995, end_year: 2014} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6, start_year: 1995, end_year: 2014} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: CESM2-WACCM, institute: NCAR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6, start_year: 1995, end_year: 2014} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr, project: CMIP6, + start_year: 1995, end_year: 2014} + # no_prw - {dataset: FGOALS-f3-L, ensemble: r2i1p1f1, grid: gr, + # project: CMIP6, start_year: 1995, end_year: 2014} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: IPSL-CM6A-LR, ensemble: r3i1p1f1, grid: gr, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: MPI-ESM1-2-HR, institute: MPI-M, ensemble: r1i1p1f1, grid: gn, + project: CMIP6, start_year: 1995, end_year: 2014} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + scripts: + tayclt: &settings + script: clouds/clouds_taylor_double.ncl + multiobs_uncertainty: true + multiobs_exclude: ["ERA-Interim", "ERA5"] + styleset: lauer21 + timemean: annualclim + valid_fraction: 0.5 + projectcolors: ["(/0.6, 0.8, 1.0/)", "(/1.0, 0.6, 0.8/)"] + projectgroups: ["CMIP5", "CMIP6"] + var: clt + tayclivi: + <<: *settings + var: clivi + taylwp: + <<: *settings + var: lwp + tayswcre: + <<: *settings + var: swcre + taylwcre: + <<: *settings + var: lwcre + tayprw: + <<: *settings + var: prw diff --git a/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig2_taylor_amip.yml b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig2_taylor_amip.yml new file mode 100644 index 0000000000..426746571b --- /dev/null +++ b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig2_taylor_amip.yml @@ -0,0 +1,348 @@ +# recipe_lauer22jclim_fig2_taylor_amip.yml +--- +documentation: + title: Cloud diagnostics v2 (taylor) + + description: | + Taylor diagrams of clouds properties (AMIP models). + + authors: + - lauer_axel + + maintainer: + - lauer_axel + + references: + - lauer22jclim + + projects: + - cmug + + +preprocessors: + clim: + regrid: + target_grid: 2x2 + scheme: linear + + +diagnostics: + + # ========================================================================== + # Taylor plots of multi-year mean quantities + # ========================================================================== + + clouds_tay_amip: + description: climatological annual means + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim + mip: Amon + exp: amip + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, + tier: 2, start_year: 1982, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3, start_year: 1986, end_year: 2014} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3, start_year: 1986, end_year: 2014} + clivi: + preprocessor: clim + mip: Amon + exp: amip + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3, start_year: 1986, end_year: 2014} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3, start_year: 1986, end_year: 2014} + lwp: + preprocessor: clim + mip: Amon + exp: amip + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: MAC-LWP, project: OBS, type: sat, version: v1, tier: 3, + start_year: 1988, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3, start_year: 1986, end_year: 2014} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3, start_year: 1986, end_year: 2014} + swcre: + preprocessor: clim + mip: Amon + exp: amip + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + lwcre: + preprocessor: clim + mip: Amon + exp: amip + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + prw: + preprocessor: clim + mip: Amon + exp: amip + reference_dataset: ESACCI-WATERVAPOUR + additional_datasets: + - {dataset: ESACCI-WATERVAPOUR, project: OBS6, type: sat, + version: CDR2-L3-COMBI-05deg-fv3.1, tier: 3, + start_year: 2003, end_year: 2017} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3, start_year: 1986, end_year: 2014} + additional_datasets: + - {dataset: ACCESS1-0, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: ACCESS1-3, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + # no data - {dataset: CanESM2, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + - {dataset: CanAM4, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + # no data - {dataset: CESM1-BGC, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + # no data - {dataset: CESM1-FASTCHEM, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + # no data - {dataset: CESM1-WACCM, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + # no data - {dataset: CMCC-CESM, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} no amip + - {dataset: CMCC-CM, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + # missing data - {dataset: CMCC-CMS, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} no amip + - {dataset: CNRM-CM5, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: FGOALS-g2, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: FGOALS-s2, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + # no data - {dataset: FIO-ESM, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + # no data - {dataset: GFDL-ESM2G, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + # no data - {dataset: GFDL-ESM2M, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + - {dataset: GFDL-HIRAM-C180, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-HIRAM-C360, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + # no data - {dataset: GISS-E2-H-CC, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + # no data - {dataset: GISS-E2-H, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + # no data - {dataset: GISS-E2-R-CC, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: HadGEM2-A, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + # no data - {dataset: HadGEM2-CC, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + # no data - {dataset: HadGEM2-ES, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + - {dataset: inmcm4, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + # no data - {dataset: MIROC4h, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + - {dataset: MIROC5, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + # no data - {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + # no data - {dataset: MPI-ESM-P, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + # no data - {dataset: MRI-ESM1, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} + # no data - {dataset: NorESM1-ME, ensemble: r1i1p1, project: CMIP5, + # start_year: 1986, end_year: 2005} no amip + - {dataset: NorESM1-M, ensemble: r1i1p1, project: CMIP5, + start_year: 1986, end_year: 2005} + # no data - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn, + # project: CMIP6, start_year: 1995, end_year: 2014} + # error time coord. - {dataset: BCC-CSM2-MR, ensemble: r2i1p1f1, grid: gn, + # project: CMIP6, start_year: 1995, end_year: 2014} + # time_coord_problem_rlut - {dataset: BCC-ESM1, ensemble: r1i1p1f1, + # grid: gn, project: CMIP6, start_year: 1995, + # end_year: 2014} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6, start_year: 1995, end_year: 2014} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: CESM2-WACCM, institute: NCAR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6, start_year: 1995, end_year: 2014} + - {dataset: CESM2-WACCM-FV2, institute: NCAR, ensemble: r1i1p1f1, + grid: gn, project: CMIP6, start_year: 1995, end_year: 2014} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr, project: CMIP6, + start_year: 1995, end_year: 2014} + # error time coord. - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr, + # project: CMIP6, start_year: 1995, end_year: 2014} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: EC-Earth3, ensemble: r1i1p1f1, grid: gr, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: EC-Earth3-Veg, ensemble: r1i1p1f1, grid: gr, project: CMIP6, + start_year: 1995, end_year: 2014} + # no prw - {dataset: FGOALS-f3-L, ensemble: r2i1p1f1, grid: gr, + # project: CMIP6, start_year: 1995, end_year: 2014} + # missing data - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn, + # project: CMIP6, start_year: 1995, end_year: 2014} + # error time coord. - {dataset: GFDL-AM4, ensemble: r1i1p1f1, grid: gr1, + # project: CMIP6, start_year: 1995, end_year: 2014} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6, + start_year: 1995, end_year: 2014} + # missing data - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1, + # project: CMIP6, start_year: 1995, end_year: 2014} + # error concat. cubes - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, + # grid: gn, project: CMIP6, start_year: 1995, + # end_year: 2014} + # no data - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, + # project: CMIP6, start_year: 1995, end_year: 2014} + - {dataset: GISS-E2-2-G, ensemble: r1i1p3f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr, project: CMIP6, + start_year: 1995, end_year: 2014} + # missing data - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr, + # project: CMIP6, start_year: 1995, end_year: 2014} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + # missing data - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn, + # project: CMIP6, start_year: 1995, end_year: 2014} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: MPI-ESM1-2-HR, institute: MPI-M, ensemble: r1i1p1f1, grid: gn, + project: CMIP6, start_year: 1995, end_year: 2014} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + # missing data - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn, + # project: CMIP6, start_year: 1995, end_year: 2014} + # error parent_time_units - {dataset: UKESM1-0-LL, ensemble: r1i1p1f4, + # grid: gn, project: CMIP6, start_year: 1995, + # end_year: 2014} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, project: CMIP6, + start_year: 1995, end_year: 2014} + scripts: + tayclt: &settings + script: clouds/clouds_taylor_double.ncl + multiobs_uncertainty: true + multiobs_exclude: ["ERA-Interim", "ERA5"] + styleset: lauer21 + timemean: annualclim + valid_fraction: 0.5 + projectcolors: ["(/0.6, 0.8, 1.0/)", "(/1.0, 0.6, 0.8/)"] + projectgroups: ["CMIP5", "CMIP6"] + var: clt + tayclivi: + <<: *settings + var: clivi + taylwp: + <<: *settings + var: lwp + tayswcre: + <<: *settings + var: swcre + taylwcre: + <<: *settings + var: lwcre + tayprw: + <<: *settings + var: prw diff --git a/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig3-4_zonal.yml b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig3-4_zonal.yml new file mode 100644 index 0000000000..cedef4767f --- /dev/null +++ b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig3-4_zonal.yml @@ -0,0 +1,586 @@ +# ESMValTool +# recipe_lauer22jclim_fig3-4_zonal.yml +--- +documentation: + title: Cloud diagnostics v2 (zonal means) + + description: | + Diagnostics of 3-d cloud parameters. + + authors: + - lauer_axel + + maintainer: + - lauer_axel + + references: + - lauer22jclim + + projects: + - cmug + + +preprocessors: + ppa40: + extract_levels: + levels: {cmor_table: CMIP6, coordinate: alt40} + coordinate: altitude + scheme: linear + regrid: + target_grid: 2x2 + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + zonal_statistics: + operator: mean + + pp27: + extract_levels: + levels: {cmor_table: CMIP6, coordinate: plev27} + coordinate: air_pressure + scheme: linear + regrid: + target_grid: 2x2 + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + zonal_statistics: + operator: mean + + +diagnostics: + + clw_cmip6: + description: cloud liquid water zonal mean + themes: + - phys + - clouds + realms: + - atmos + variables: + clw: + preprocessor: pp27 + reference_dataset: CLOUDSAT-L2 + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + additional_datasets: + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, mip: Amon, tier: 3} + # concetenation error - {dataset: ACCESS-CM2, institute: CSIRO-ARCCSS, + # ensemble: r1i1p1f1, grid: gn} + # concetenation error - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, + # grid: gn} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM-FV2, ensemble: r1i1p1f1, grid: gn} + # error - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + # error - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + # ERROR_ps_missing - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + # ERROR_coordinates_screwed - {dataset: IPSL-CM6A-LR, ensemble: r4i1p1f1, + # grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, institute: MPI-M, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + zonal: + script: clouds/clouds_zonal.ncl + timemean: annualclim + embracesetup: false + extralegend: false + panel_labels: true + explicit_cn_levels: [0., 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, + 0.09, 0.1, 0.11, 0.12, 0.13, 0.14] + explicit_cn_dlevels: [-0.05, -0.04, -0.03, -0.02, -0.01, 0., 0.01, 0.02, + 0.03, 0.04, 0.05] + units_to: g/kg + showdiff: true + rel_diff: false + t_test: true + filename_add: cmip6 + + clw_cmip5: + description: cloud liquid water zonal mean + themes: + - phys + - clouds + realms: + - atmos + variables: + clw: + preprocessor: pp27 + reference_dataset: CLOUDSAT-L2 + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + additional_datasets: + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + # concatenation error - {dataset: ACCESS1-0, ensemble: r1i1p1} + # concatenation error - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: CCSM4, ensemble: r1i1p1} + - {dataset: CESM1-BGC, ensemble: r1i1p1} + - {dataset: CESM1-CAM5, ensemble: r1i1p1} + - {dataset: CESM1-FASTCHEM, ensemble: r1i1p1} + - {dataset: CESM1-WACCM, ensemble: r1i1p1} + # data look wrong - {dataset: CMCC-CESM, ensemble: r1i1p1} + # data look wrong - {dataset: CMCC-CM, ensemble: r1i1p1} + # data look wrong - {dataset: CMCC-CMS, ensemble: r1i1p1} + # - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + # - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: FIO-ESM, ensemble: r1i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {dataset: GISS-E2-H-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p1} + - {dataset: GISS-E2-R-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-R, ensemble: r1i1p1} + - {dataset: HadGEM2-CC, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, ensemble: r1i1p1} + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + # - {dataset: MIROC4h, ensemble: r1i1p1} + # concatenation error - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + - {dataset: MPI-ESM-P, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: MRI-ESM1, ensemble: r1i1p1} + - {dataset: NorESM1-ME, ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + scripts: + zonal: + script: clouds/clouds_zonal.ncl + timemean: annualclim + embracesetup: false + extralegend: false + panel_labels: true + explicit_cn_levels: [0., 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, + 0.09, 0.1, 0.11, 0.12, 0.13, 0.14] + explicit_cn_dlevels: [-0.05, -0.04, -0.03, -0.02, -0.01, 0., 0.01, 0.02, + 0.03, 0.04, 0.05] + units_to: g/kg + showdiff: true + rel_diff: false + t_test: true + filename_add: cmip5 + + clcalipso_cmip6: + description: cloud fraction zonal mean + themes: + - phys + - clouds + realms: + - atmos + variables: + clcalipso: + preprocessor: ppa40 + reference_dataset: CALIPSO-GOCCP + mip: CFmon + exp: historical + project: CMIP6 + start_year: 1995 + end_year: 2014 + additional_datasets: + - {dataset: CALIPSO-GOCCP, project: OBS, type: sat, version: 3.1.2, + mip: cfMon, start_year: 2007, end_year: 2015, tier: 2} + - {dataset: CESM2, ensemble: r10i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: E3SM-1-0, ensemble: r4i1p1f1, grid: gr} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, ensemble: r4i1p1f1, grid: gr} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + scripts: + zonal: + script: clouds/clouds_zonal.ncl + timemean: annualclim + explicit_cn_levels: [5., 10., 15., 20., 25., 30., 35., 40., 45., 50.] + explicit_cn_dlevels: [-20., -15., -10., -5., 0, 5., 10., 15., 20.] + embracesetup: true + extralegend: false + panel_labels: true + showdiff: true + rel_diff: false + t_test: true + filename_add: cmip6 + + clcalipso_cmip5: + description: cloud fraction zonal mean + themes: + - phys + - clouds + realms: + - atmos + variables: + clcalipso: + preprocessor: ppa40 + reference_dataset: CALIPSO-GOCCP + mip: cfMon + exp: historical + project: CMIP5 + start_year: 1986 + end_year: 2005 + additional_datasets: + - {dataset: CALIPSO-GOCCP, project: OBS, type: sat, version: 3.1.2, + mip: cfMon, tier: 2, start_year: 2007, end_year: 2015} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + scripts: + zonal: + script: clouds/clouds_zonal.ncl + timemean: annualclim + explicit_cn_levels: [5., 10., 15., 20., 25., 30., 35., 40., 45., 50.] + explicit_cn_dlevels: [-20., -15., -10., -5., 0, 5., 10., 15., 20.] + embracesetup: true + extralegend: false + panel_labels: true + showdiff: true + rel_diff: false + t_test: true + filename_add: cmip5 + + cli_cmip6: + description: cloud ice content zonal mean + themes: + - phys + realms: + - atmos + variables: + cli: + preprocessor: pp27 + reference_dataset: CALIPSO-ICECLOUD + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + additional_datasets: + - {dataset: CALIPSO-ICECLOUD, project: OBS, type: sat, version: 1-00, + mip: Amon, start_year: 2007, end_year: 2015, tier: 3} + # concatenation error - {dataset: ACCESS-CM2, institute: CSIRO-ARCCSS, + # ensemble: r1i1p1f1, grid: gn} + # concatenation error - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, + # grid: gn} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + # error - {dataset: FGOALS-f3-L, ensemble: r2i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + # ERROR_ps_missing - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + # ERROR_coordinates_screwed - {dataset: IPSL-CM6A-LR, ensemble: r3i1p1f1, + # grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, institute: MPI-M, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + zonal: + script: clouds/clouds_zonal.ncl + timemean: annualclim + explicit_cn_levels: [0., 0.002, 0.004, 0.006, 0.008, 0.01, 0.012, 0.014, + 0.016, 0.018, 0.02, 0.022, 0.024, 0.026, 0.028] + explicit_cn_dlevels: [-0.01, -0.008, -0.006, -0.004, -0.002, 0., 0.002, + 0.004, 0.006, 0.008, 0.01] + extralegend: false + panel_labels: true + showdiff: true + t_test: true + rel_diff: false + units_to: g/kg + filename_add: cmip6 + + cli_cmip5: + description: cloud ice content zonal mean + themes: + - phys + realms: + - atmos + variables: + cli: + preprocessor: pp27 + reference_dataset: CALIPSO-ICECLOUD + mip: Amon + exp: historical + project: CMIP5 + start_year: 1986 + end_year: 2005 + additional_datasets: + - {dataset: CALIPSO-ICECLOUD, project: OBS, type: sat, version: 1-00, + mip: Amon, start_year: 2007, end_year: 2015, tier: 3} + # concatenation error - {dataset: ACCESS1-0, ensemble: r1i1p1} + # concatenation error - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: CCSM4, ensemble: r1i1p1} + - {dataset: CESM1-BGC, ensemble: r1i1p1} + - {dataset: CESM1-CAM5, ensemble: r1i1p1} + - {dataset: CESM1-FASTCHEM, ensemble: r1i1p1} + - {dataset: CESM1-WACCM, ensemble: r1i1p1} + # fix needed - {dataset: CMCC-CESM, ensemble: r1i1p1} + # fix needed - {dataset: CMCC-CM, ensemble: r1i1p1} + # fix needed - {dataset: CMCC-CMS, ensemble: r1i1p1} + # - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + # - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: FIO-ESM, ensemble: r1i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {dataset: GISS-E2-H-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p1} + - {dataset: GISS-E2-R-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-R, ensemble: r1i1p1} + - {dataset: HadGEM2-CC, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, ensemble: r1i1p1} + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + # concatenation error - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + - {dataset: MPI-ESM-P, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: MRI-ESM1, ensemble: r1i1p1} + - {dataset: NorESM1-ME, ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + scripts: + zonal: + script: clouds/clouds_zonal.ncl + timemean: annualclim + t_test: true + explicit_cn_levels: [0., 0.002, 0.004, 0.006, 0.008, 0.01, 0.012, + 0.014, 0.016, 0.018, 0.02, 0.022, 0.024, 0.026, + 0.028] + explicit_cn_dlevels: [-0.01, -0.008, -0.006, -0.004, -0.002, 0., 0.002, + 0.004, 0.006, 0.008, 0.01] + extralegend: false + panel_labels: true + showdiff: true + rel_diff: false + units_to: g/kg + filename_add: cmip5 + + cl_cmip6: + description: 3-dim cloud cover, zonal mean + themes: + - phys + realms: + - atmos + variables: + cl: + preprocessor: pp27 + reference_dataset: ERA5 + mip: Amon + project: CMIP6 + start_year: 1995 + end_year: 2014 + exp: historical + additional_datasets: + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3, start_year: 1986, end_year: 2014} + # concatenation error - {dataset: ACCESS-CM2, institute: CSIRO-ARCCSS, + # ensemble: r1i1p1f1, grid: gn} + # concatenation error - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, + # grid: gn} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM-FV2, institute: NCAR, ensemble: r1i1p1f1, + grid: gn} + - {dataset: CESM2-WACCM, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CIESM, ensemble: r1i1p1f1, grid: gr} + # error - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr} + # error - {dataset: E3SM-1-1-ECA, ensemble: r1i1p1f1, grid: gr} + - {dataset: E3SM-1-1, ensemble: r1i1p1f1, grid: gr} + # error - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + # error - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-G-CC, institute: NASA-GISS, ensemble: r1i1p1f1, + grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + # ERROR_coordinates_screwed - {dataset: IPSL-CM6A-LR, ensemble: r3i1p1f1, + # grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + # error - {dataset: MCM-UA-1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, institute: MPI-M, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + zonal: + script: clouds/clouds_zonal.ncl + timemean: annualclim + explicit_cn_dlevels: [-25., -20., -15., -10., -5., 0., 5., 10., 15., + 20., 25.] + explicit_cn_levels: [5., 10., 15., 20., 25., 30., 35., 40., 45., 50.] + extralegend: false + panel_labels: true + showdiff: true + t_test: true + rel_diff: false + filename_add: cmip6 + + cl_cmip5: + description: 3-dim cloud cover, zonal mean + themes: + - phys + realms: + - atmos + variables: + cl: + preprocessor: pp27 + reference_dataset: ERA5 + mip: Amon + project: CMIP5 + start_year: 1986 + end_year: 2005 + exp: historical + additional_datasets: + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3, start_year: 1986, end_year: 2014} + - {dataset: ACCESS1-0, ensemble: r1i1p1} + - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: CCSM4, ensemble: r1i1p1} + - {dataset: CESM1-BGC, ensemble: r1i1p1} + - {dataset: CESM1-CAM5, ensemble: r1i1p1} + - {dataset: CESM1-FASTCHEM, ensemble: r1i1p1} + - {dataset: CESM1-WACCM, ensemble: r1i1p1} + # fix needed - {dataset: CMCC-CESM, ensemble: r1i1p1} + # fix needed - {dataset: CMCC-CM, ensemble: r1i1p1} + # fix needed - {dataset: CMCC-CMS, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: FIO-ESM, ensemble: r1i1p1} + - {dataset: GFDL-CM2p1, ensemble: r1i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {dataset: GISS-E2-H-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p1} + - {dataset: GISS-E2-R-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-R, ensemble: r1i1p1} + - {dataset: HadGEM2-CC, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, ensemble: r1i1p1} + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + - {dataset: MPI-ESM-P, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: MRI-ESM1, ensemble: r1i1p1} + - {dataset: NorESM1-ME, ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + scripts: + zonal: + script: clouds/clouds_zonal.ncl + timemean: annualclim + explicit_cn_dlevels: [-25., -20., -15., -10., -5., 0., 5., 10., 15., + 20., 25.] + explicit_cn_levels: [5., 10., 15., 20., 25., 30., 35., 40., 45., 50.] + extralegend: false + panel_labels: true + showdiff: true + t_test: true + rel_diff: false + filename_add: cmip5 diff --git a/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig5_lifrac.yml b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig5_lifrac.yml new file mode 100644 index 0000000000..970bd469ab --- /dev/null +++ b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig5_lifrac.yml @@ -0,0 +1,237 @@ +# ESMValTool +# recipe_lauer22jclim_fig5_lifrac.yml +--- +documentation: + title: Cloud diagnostics v2 (liquid/ice fraction) + + description: | + Diagnostics of 3-d cloud parameters. + + authors: + - lauer_axel + + maintainer: + - lauer_axel + + references: + - lauer22jclim + + projects: + - cmug + + +preprocessors: + + pp27_all_lev: + extract_levels: + levels: {cmor_table: CMIP6, coordinate: plev27} + coordinate: air_pressure + scheme: linear + regrid: + target_grid: 2x2 + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset] + + +diagnostics: + + lifrac_scatter_cmip6: + description: ice / liquid fraction vs. temperature + themes: + - phys + - clouds + realms: + - atmos + variables: + clw: + preprocessor: pp27_all_lev + mip: Amon + project: CMIP6 + start_year: 1995 + end_year: 2014 + exp: historical + reference_dataset: CLOUDSAT-L2 + additional_datasets: + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, tier: 3, + start_year: 2007, end_year: 2015} + cli: + preprocessor: pp27_all_lev + mip: Amon + project: CMIP6 + start_year: 1995 + end_year: 2014 + exp: historical + reference_dataset: CALIPSO-ICECLOUD + additional_datasets: + - {dataset: CALIPSO-ICECLOUD, project: OBS, type: sat, + version: 1-00, mip: Amon, tier: 3, + start_year: 2007, end_year: 2015} + ta: + preprocessor: pp27_all_lev + mip: Amon + project: CMIP6 + start_year: 1995 + end_year: 2014 + exp: historical + reference_dataset: ERA5 + additional_datasets: + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3, start_year: 2007, end_year: 2015} + additional_datasets: + # concatenation error - {dataset: ACCESS-CM2, institute: CSIRO-ARCCSS, + # ensemble: r1i1p1f1, grid: gn} + # concatenation error - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, + # grid: gn} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM-FV2, ensemble: r1i1p1f1, grid: gn} + # error - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + # error - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + # ERROR_ps_missing - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + # ERROR_coordinates_screwed - {dataset: IPSL-CM6A-LR, ensemble: r4i1p1f1, + # grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, institute: MPI-M, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + lifrac_scatter: + script: clouds/clouds_lifrac_scatter.ncl + filename_add: cmip6 + min_mass: 1.0e-6 + panel_labels: true + + lifrac_scatter_cmip5: + description: ice / liquid fraction vs. temperature + themes: + - phys + - clouds + realms: + - atmos + variables: + clw: + preprocessor: pp27_all_lev + mip: Amon + project: CMIP5 + start_year: 1986 + end_year: 2005 + exp: historical + reference_dataset: CLOUDSAT-L2 + additional_datasets: + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, tier: 3, + start_year: 2007, end_year: 2015} + cli: + preprocessor: pp27_all_lev + mip: Amon + project: CMIP5 + start_year: 1986 + end_year: 2005 + exp: historical + reference_dataset: CALIPSO-ICECLOUD + additional_datasets: + - {dataset: CALIPSO-ICECLOUD, project: OBS, type: sat, version: 1-00, + mip: Amon, tier: 3, + start_year: 2007, end_year: 2015} + ta: + preprocessor: pp27_all_lev + mip: Amon + project: CMIP5 + start_year: 1986 + end_year: 2005 + exp: historical + reference_dataset: ERA5 + additional_datasets: + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3, start_year: 2007, end_year: 2015} + additional_datasets: + # concatenation error - {dataset: ACCESS1-0, ensemble: r1i1p1} + # concatenation error - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: CCSM4, ensemble: r1i1p1} + - {dataset: CESM1-BGC, ensemble: r1i1p1} + - {dataset: CESM1-CAM5, ensemble: r1i1p1} + - {dataset: CESM1-FASTCHEM, ensemble: r1i1p1} + - {dataset: CESM1-WACCM, ensemble: r1i1p1} + # data look wrong - {dataset: CMCC-CESM, ensemble: r1i1p1} + # data look wrong - {dataset: CMCC-CM, ensemble: r1i1p1} + # - {dataset: CMCC-CMS, ensemble: r1i1p1} #data look wrong + # - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + # - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: FIO-ESM, ensemble: r1i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {dataset: GISS-E2-H-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p1} + - {dataset: GISS-E2-R-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-R, ensemble: r1i1p1} + - {dataset: HadGEM2-CC, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, ensemble: r1i1p1} + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + # - {dataset: MIROC4h, ensemble: r1i1p1} + # concatenation error - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + - {dataset: MPI-ESM-P, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: MRI-ESM1, ensemble: r1i1p1} + - {dataset: NorESM1-ME, ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + scripts: + lifrac_scatter: + script: clouds/clouds_lifrac_scatter.ncl + filename_add: cmip5 + min_mass: 1.0e-6 + panel_labels: true + + postproc: + description: | + Post-processing: CMIP5/CMIP6 comparison and additional statistics + scripts: + cmip5cmip6: + script: clouds/clouds_lifrac_scatter_postproc.ncl + ancestors: ['lifrac_scatter_cmip*/lifrac_scatter'] + models: [["bcc-csm1-1", "BCC-CSM2-MR"], + ["CESM1-WACCM", "CESM2-WACCM"], + ["GFDL-CM3", "GFDL-CM4"], + ["GISS-E2-H", "GISS-E2-1-H"], + ["HadGEM2-CC", "HadGEM3-GC31-LL"], + ["MIROC-ESM", "MIROC-ES2L"], + ["MPI-ESM-LR", "MPI-ESM1-2-LR"], + ["MRI-ESM1", "MRI-ESM2-0"], + ["NorESM1-M", "NorESM2-LM"]] + refname: "CLOUDSAT-L2-CALIPSO-ICECLOUD-ERA5" diff --git a/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig6_interannual.yml b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig6_interannual.yml new file mode 100644 index 0000000000..cb8912eee9 --- /dev/null +++ b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig6_interannual.yml @@ -0,0 +1,409 @@ +# recipe_lauer22jclim_fig6_interannual.yml +--- +documentation: + title: Cloud diagnostics v2 (interannual variability) + + description: | + Diagnostics for interannual variability of 2-d cloud parameters. + + authors: + - lauer_axel + + maintainer: + - lauer_axel + + references: + - lauer22jclim + + projects: + - cmug + + +preprocessors: + clim: + regrid: + target_grid: 2x2 + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [CERES-EBAF, CLARA-AVHRR, CLOUDSAT-L2, ERA5, ERA-Interim, + ESACCI-CLOUD, ESACCI-WATERVAPOUR, HadISST, ISCCP-FH, MAC-LWP, + MODIS, PATMOS-x, reference_dataset, SSMI-MERIS] + + +CMIP5: &cmip5_models + - {dataset: ACCESS1-0, ensemble: r1i1p1} + - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: CCSM4, ensemble: r1i1p1} + - {dataset: CESM1-BGC, ensemble: r1i1p1} + - {dataset: CESM1-CAM5, ensemble: r1i1p1} + - {dataset: CESM1-FASTCHEM, ensemble: r1i1p1} + - {dataset: CESM1-WACCM, ensemble: r1i1p1} + - {dataset: CMCC-CESM, ensemble: r1i1p1} + - {dataset: CMCC-CM, ensemble: r1i1p1} + # missing data - {dataset: CMCC-CMS, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: FIO-ESM, ensemble: r1i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {dataset: GISS-E2-H-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p1} + - {dataset: GISS-E2-R-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-R, ensemble: r1i1p1} + - {dataset: HadGEM2-CC, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, ensemble: r1i1p1} + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + - {dataset: MIROC4h, ensemble: r1i1p1} + - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + - {dataset: MPI-ESM-P, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: MRI-ESM1, ensemble: r1i1p1} + - {dataset: NorESM1-ME, ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + +CMIP6: &cmip6_models + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + # time_coord_problem_rlut - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + # no_prw - {dataset: FGOALS-f3-L, ensemble: r2i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: IPSL-CM6A-LR, ensemble: r3i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, institute: MPI-M, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + + +diagnostics: + + # ========================================================================== + # Lauer and Hamilton (2013) - Equation 2, part of Figure 8 + # Interannual variability: relative temporal standard deviation calculated + # from monthly mean anomalies after subtracting the climatological mean + # seasonal cycle + # ========================================================================== + + clouds_intera_cmip5: + description: interannual variability + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, + tier: 2, start_year: 1982, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + clivi: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + lwp: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: MAC-LWP, project: OBS, type: sat, version: v1, tier: 3, + start_year: 1988, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + swcre: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + lwcre: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + prw: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-WATERVAPOUR + additional_datasets: + - {dataset: ESACCI-WATERVAPOUR, project: OBS6, type: sat, + version: CDR2-L3-COMBI-05deg-fv3.1, tier: 3, + start_year: 2003, end_year: 2017} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + additional_datasets: *cmip5_models + scripts: + varclt: &settings5 + script: clouds/clouds_interannual.ncl + colormap: WhiteBlueGreenYellowRed + projection: Robinson + timemean: annualclim + filename_add: cmip5 + var: clt + epsilon: 1.0 + varclivi: + <<: *settings5 + var: clivi + epsilon: 0.001 + varlwp: + <<: *settings5 + var: lwp + epsilon: 0.001 + varlwcre: + <<: *settings5 + var: lwcre + epsilon: 1.0 + varswcre: + <<: *settings5 + var: swcre + epsilon: 1.0 + varprw: + <<: *settings5 + var: prw + epsilon: 1.0 + + + clouds_intera_cmip6: + description: interannual variability + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, + tier: 2, start_year: 1982, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + clivi: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, + tier: 2, start_year: 1992, end_year: 2016} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + lwp: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: MAC-LWP, project: OBS, type: sat, version: v1, tier: 3, + start_year: 1988, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + swcre: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + lwcre: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + prw: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-WATERVAPOUR + additional_datasets: + - {dataset: ESACCI-WATERVAPOUR, project: OBS6, type: sat, + version: CDR2-L3-COMBI-05deg-fv3.1, tier: 3, + start_year: 2003, end_year: 2017} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + additional_datasets: *cmip6_models + scripts: + varclt: &settings6 + script: clouds/clouds_interannual.ncl + colormap: WhiteBlueGreenYellowRed + projection: Robinson + timemean: annualclim + var: clt + epsilon: 1.0 + filename_add: cmip6 + varclivi: + <<: *settings6 + var: clivi + epsilon: 0.001 + varlwp: + <<: *settings6 + var: lwp + epsilon: 0.001 + varlwcre: + <<: *settings6 + var: lwcre + epsilon: 1.0 + varswcre: + <<: *settings6 + var: swcre + epsilon: 1.0 + varprw: + <<: *settings6 + var: prw + epsilon: 1.0 diff --git a/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig7_seas.yml b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig7_seas.yml new file mode 100644 index 0000000000..998f38b4cb --- /dev/null +++ b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig7_seas.yml @@ -0,0 +1,421 @@ +# recipe_lauer22jclim_fig7_seas.yml +--- +documentation: + title: Cloud diagnostics v2 (seasonal cycle amplitude) + + description: | + Diagnostics of 2-d cloud parameters. + + authors: + - lauer_axel + + maintainer: + - lauer_axel + + references: + - lauer22jclim + + projects: + - cmug + + +preprocessors: + clim: + regrid: + target_grid: 2x2 + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [CERES-EBAF, CLARA-AVHRR, CLOUDSAT-L2, ERA5, ERA-Interim, + ESACCI-CLOUD, ESACCI-WATERVAPOUR, HadISST, ISCCP-FH, MAC-LWP, + MODIS, PATMOS-x, reference_dataset, SSMI-MERIS] + + +CMIP5: &cmip5_models + - {dataset: ACCESS1-0, ensemble: r1i1p1} + - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: CCSM4, ensemble: r1i1p1} + - {dataset: CESM1-BGC, ensemble: r1i1p1} + - {dataset: CESM1-CAM5, ensemble: r1i1p1} + - {dataset: CESM1-FASTCHEM, ensemble: r1i1p1} + - {dataset: CESM1-WACCM, ensemble: r1i1p1} + - {dataset: CMCC-CESM, ensemble: r1i1p1} + - {dataset: CMCC-CM, ensemble: r1i1p1} + # missing data - {dataset: CMCC-CMS, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: FIO-ESM, ensemble: r1i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {dataset: GISS-E2-H-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p1} + - {dataset: GISS-E2-R-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-R, ensemble: r1i1p1} + - {dataset: HadGEM2-CC, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, ensemble: r1i1p1} + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + - {dataset: MIROC4h, ensemble: r1i1p1} + - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + - {dataset: MPI-ESM-P, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: MRI-ESM1, ensemble: r1i1p1} + - {dataset: NorESM1-ME, ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + +CMIP6: &cmip6_models + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + # time_coord_problem_rlut - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + # no_prw - {dataset: FGOALS-f3-L, ensemble: r2i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: IPSL-CM6A-LR, ensemble: r3i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, institute: MPI-M, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + + +diagnostics: + + # ============================================================================ + # Analyses climatological mean seasonal cycle amplitudes + # ============================================================================ + + clouds_seas_cmip5: + description: seasonal cycle amplitudes + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, tier: 2, + start_year: 1982, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + clivi: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + lwp: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: MAC-LWP, project: OBS, type: sat, version: v1, tier: 3, + start_year: 1988, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + swcre: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + lwcre: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + prw: + preprocessor: clim + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-WATERVAPOUR + additional_datasets: + - {dataset: ESACCI-WATERVAPOUR, project: OBS6, type: sat, + version: CDR2-L3-COMBI-05deg-fv3.1, tier: 3, + start_year: 2003, end_year: 2017} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + additional_datasets: *cmip5_models + scripts: + seasclt: &settings5 + script: clouds/clouds_seasonal_cycle.ncl + projection: Robinson + filename_add: cmip5 + var: clt + epsilon: 1.0 + seasclivi: + <<: *settings5 + var: clivi + epsilon: 0.001 + seaslwp: + <<: *settings5 + var: lwp + epsilon: 0.001 + seaslwcre: + <<: *settings5 + var: lwcre + epsilon: 1.0 + seasswcre: + <<: *settings5 + var: swcre + epsilon: 1.0 + seasprw: + <<: *settings5 + var: prw + epsilon: 1.0 + + clouds_seas_cmip6: + description: seasonal cycle amplitudes + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, tier: 2, + start_year: 1982, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + clivi: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + lwp: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: CLARA-AVHRR, project: OBS, type: sat, version: V002_01, + tier: 3, start_year: 1982, end_year: 2018} + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, + version: P1-R05-gridbox-average-noprecip, + start_year: 2006, end_year: 2017, tier: 3} + - {dataset: MAC-LWP, project: OBS, type: sat, version: v1, tier: 3, + start_year: 1988, end_year: 2016} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, + tier: 3, start_year: 2003, end_year: 2018} + swcre: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + lwcre: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + derive: true + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, + tier: 2, start_year: 2001, end_year: 2021} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + prw: + preprocessor: clim + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-WATERVAPOUR + additional_datasets: + - {dataset: ESACCI-WATERVAPOUR, project: OBS6, type: sat, + version: CDR2-L3-COMBI-05deg-fv3.1, tier: 3, + start_year: 2003, end_year: 2017} + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, tier: 2, + start_year: 1984, end_year: 2016} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: '1', + tier: 3} + additional_datasets: *cmip6_models + scripts: + seasclt: &settings6 + script: clouds/clouds_seasonal_cycle.ncl + projection: Robinson + filename_add: cmip6 + var: clt + epsilon: 1.0 + seasclivi: + <<: *settings6 + var: clivi + epsilon: 0.001 + seaslwp: + <<: *settings6 + var: lwp + epsilon: 0.001 + seaslwcre: + <<: *settings6 + var: lwcre + epsilon: 1.0 + seasswcre: + <<: *settings6 + var: swcre + epsilon: 1.0 + seasprw: + <<: *settings6 + var: prw + epsilon: 1.0 diff --git a/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig8_dyn.yml b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig8_dyn.yml new file mode 100644 index 0000000000..43faf1f366 --- /dev/null +++ b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig8_dyn.yml @@ -0,0 +1,353 @@ +# recipe_lauer22jclim_fig8_dyn.yml +--- +documentation: + title: Cloud diagnostics v2 (dyn) + + description: | + Diagnostics of clouds parameters sorted by dynamical regime. + + authors: + - lauer_axel + + maintainer: + - lauer_axel + + references: + - lauer22jclim + + projects: + - cmug + + +preprocessors: + clim500_ocean: + # extract_region: + # start_longitude: 0 + # end_longitude: 360 + # start_latitude: -30 + # end_latitude: 30 + convert_units: + units: "Pa min-1" + mask_landsea: + mask_out: land + extract_levels: + levels: 50000 + scheme: linear + regrid: + target_grid: 2x2 + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + clim_ocean: + # extract_region: + # start_longitude: 0 + # end_longitude: 360 + # start_latitude: -30 + # end_latitude: 30 + mask_landsea: + mask_out: land + regrid: + target_grid: 2x2 + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + +CMIP5: &cmip5_models + # landmaskdimensionerror - {dataset: ACCESS1-0, ensemble: r1i1p1} + # landmaskdimensionerror - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: CCSM4, ensemble: r1i1p1} + - {dataset: CESM1-BGC, ensemble: r1i1p1} + - {dataset: CESM1-CAM5, ensemble: r1i1p1} + - {dataset: CESM1-FASTCHEM, ensemble: r1i1p1} + - {dataset: CESM1-WACCM, ensemble: r1i1p1} + - {dataset: CMCC-CESM, ensemble: r1i1p1} + - {dataset: CMCC-CM, ensemble: r1i1p1} + # missing data - {dataset: CMCC-CMS, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: FIO-ESM, ensemble: r1i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {dataset: GISS-E2-H-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p1} + - {dataset: GISS-E2-R-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-R, ensemble: r1i1p1} + # no wap - {dataset: HadGEM2-CC, ensemble: r1i1p1} + # time_series_incomplete - {dataset: HadGEM2-ES, ensemble: r1i1p1} + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + - {dataset: MIROC4h, ensemble: r1i1p1} + - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + - {dataset: MPI-ESM-P, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: MRI-ESM1, ensemble: r1i1p1} + - {dataset: NorESM1-ME, ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + +CMIP6: &cmip6_models + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + # time_coord_problem_rlut - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + # no prw - {dataset: FGOALS-f3-L, ensemble: r2i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: IPSL-CM6A-LR, ensemble: r3i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, institute: MPI-M, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + + +diagnostics: + + # ========================================================================== + # Matrices: x = sst, y = omega500, z = clt, tcwp + # ========================================================================== + + clouds_dynmatrices_cmip5_ocean: + description: cloud matrices - dynamical regimes vs. cloud parameter + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim_ocean + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + ts: + preprocessor: clim_ocean + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + reference_dataset: ERA5 + additional_datasets: + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3, start_year: 1992, end_year: 2016} + wap: + preprocessor: clim500_ocean + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + reference_dataset: ERA5 + additional_datasets: + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3, start_year: 1992, end_year: 2016} + clivi: + preprocessor: clim_ocean + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + lwp: + preprocessor: clim_ocean + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + additional_datasets: *cmip5_models + scripts: + dynclt: + script: clouds/clouds_dyn_matrix.ncl + var_x: ts + var_y: wap + var_z: clt + xmin: 275 + xmax: 305 + ymin: -10 + ymax: 10 + zmin: 5 + zmax: 95 + zdmin: -45 + zdmax: 45 + xlabel: SST + ylabel: omega500 + clevels: [1, 2, 3, 4, 5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 100, + 130, 160, 190] + filename_add: cmip5_ocean + dyntcwp: + script: clouds/clouds_dyn_matrix.ncl + var_x: ts + var_y: wap + var_z: totalcwp + xmin: 275 + xmax: 305 + ymin: -10 + ymax: 10 + zmin: 0.05 + zmax: 0.5 + zdmin: -0.09 + zdmax: 0.09 + xlabel: SST + ylabel: omega500 + clevels: [1, 2, 3, 4, 5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 100, 130, + 160, 190] + filename_add: cmip5_ocean + + # ############################################################################ + + clouds_dynmatrices_cmip6_ocean: + description: cloud matrices - dynamical regimes vs. cloud parameter + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim_ocean + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + ts: + preprocessor: clim_ocean + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + reference_dataset: ERA5 + additional_datasets: + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3, start_year: 1992, end_year: 2016} + wap: + preprocessor: clim500_ocean + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + reference_dataset: ERA5 + additional_datasets: + - {dataset: ERA5, project: native6, type: reanaly, version: v1, + tier: 3, start_year: 1992, end_year: 2016} + clivi: + preprocessor: clim_ocean + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + lwp: + preprocessor: clim_ocean + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + additional_datasets: *cmip6_models + scripts: + dynclt: + script: clouds/clouds_dyn_matrix.ncl + var_x: ts + var_y: wap + var_z: clt + xmin: 275 + xmax: 305 + ymin: -10 + ymax: 10 + zmin: 5 + zmax: 95 + zdmin: -45 + zdmax: 45 + xlabel: SST + ylabel: omega500 + clevels: [1, 2, 3, 4, 5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 100, + 130, 160, 190] + filename_add: cmip6_ocean + dyntcwp: + script: clouds/clouds_dyn_matrix.ncl + var_x: ts + var_y: wap + var_z: totalcwp + xmin: 275 + xmax: 305 + ymin: -10 + ymax: 10 + zmin: 0.05 + zmax: 0.5 + zdmin: -0.09 + zdmax: 0.09 + xlabel: SST + ylabel: omega500 + clevels: [1, 2, 3, 4, 5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 100, + 130, 160, 190] + filename_add: cmip6_ocean diff --git a/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig9-11ab_scatter.yml b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig9-11ab_scatter.yml new file mode 100644 index 0000000000..61f0dcaee3 --- /dev/null +++ b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig9-11ab_scatter.yml @@ -0,0 +1,625 @@ +# ESMValTool +# recipe_lauer22jclim_fig9-11ab_scatter.yml +--- +documentation: + title: Cloud diagnostics v2 (scatter) + + description: | + Cloud properties sorted by total cloud cover. + + authors: + - lauer_axel + + maintainer: + - lauer_axel + + references: + - lauer22jclim + + projects: + - cmug + + +preprocessors: + ppITCZPacific: + extract_region: + start_longitude: 135 + end_longitude: 275 + start_latitude: 0 + end_latitude: 12 + regrid: + target_grid: 1x1 + scheme: linear + mask_landsea: + mask_out: land + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + ppSEPacific: + extract_region: + start_longitude: 265 + end_longitude: 275 + start_latitude: -25 + end_latitude: -5 + mask_landsea: + mask_out: land + regrid: + target_grid: 1x1 + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + ppSouthernOcean: + extract_region: + start_longitude: 0 + end_longitude: 360 + start_latitude: -65 + end_latitude: -30 + extract_season: + season: DJF + mask_landsea: + mask_out: land + regrid: + target_grid: 2x2 + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + +CMIP5: &cmip5_models + - {dataset: ACCESS1-0, ensemble: r1i1p1} + - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: CCSM4, ensemble: r1i1p1} + - {dataset: CESM1-BGC, ensemble: r1i1p1} + - {dataset: CESM1-CAM5, ensemble: r1i1p1} + - {dataset: CESM1-FASTCHEM, ensemble: r1i1p1} + - {dataset: CESM1-WACCM, ensemble: r1i1p1} + - {dataset: CMCC-CESM, ensemble: r1i1p1} + - {dataset: CMCC-CM, ensemble: r1i1p1} + # missing data - {dataset: CMCC-CMS, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: FIO-ESM, ensemble: r1i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {dataset: GISS-E2-H-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p1} + - {dataset: GISS-E2-R-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-R, ensemble: r1i1p1} + # error_extract_season - {dataset: HadGEM2-CC, ensemble: r1i1p1} + # error_extract_season - {dataset: HadGEM2-ES, ensemble: r1i1p1} + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + - {dataset: MIROC4h, ensemble: r1i1p1} + - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + - {dataset: MPI-ESM-P, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: MRI-ESM1, ensemble: r1i1p1} + - {dataset: NorESM1-ME, ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + +CMIP6: &cmip6_models + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + # time_coord_problem_rlut - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + # no_prw - {dataset: FGOALS-f3-L, ensemble: r2i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: IPSL-CM6A-LR, ensemble: r3i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, institute: MPI-M, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + + +diagnostics: + + scatter_southernocean_cmip5: + description: scatter plot cloud fraction vs shortwave cloud forcing + themes: + - phys + - clouds + realms: + - atmos + variables: + clt: + preprocessor: ppSouthernOcean + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + clivi: + preprocessor: ppSouthernOcean + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + lwp: + preprocessor: ppSouthernOcean + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + swcre: + preprocessor: ppSouthernOcean + mip: Amon + project: CMIP5 + exp: historical + start_year: 1986 + end_year: 2005 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + additional_datasets: *cmip5_models + scripts: + scatter_swcre: + script: clouds/clouds_scatter.ncl + var_x: clt + var_y: swcre + xmin: 0 + xmax: 100 + ymin_mm: -150.0 + ymax_mm: 0.0 + filename_add: so_cmip5 + scatter_totalcwp: + script: clouds/clouds_scatter.ncl + var_x: clt + var_y: totalcwp + xmin: 0 + xmax: 100 + ymin_mm: 0.0 + ymax_mm: 0.2 + filename_add: so_cmip5 + + scatter_southernocean_cmip6: + description: scatter plot cloud fraction vs shortwave cloud forcing + themes: + - phys + - clouds + realms: + - atmos + variables: + clt: + preprocessor: ppSouthernOcean + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + clivi: + preprocessor: ppSouthernOcean + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + lwp: + preprocessor: ppSouthernOcean + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + swcre: + preprocessor: ppSouthernOcean + mip: Amon + project: CMIP6 + exp: historical + start_year: 1995 + end_year: 2014 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + additional_datasets: *cmip6_models + scripts: + scatter_swcre: + script: clouds/clouds_scatter.ncl + var_x: clt + var_y: swcre + xmin: 0 + xmax: 100 + ymin_mm: -150.0 + ymax_mm: 0.0 + filename_add: so_cmip6 + scatter_totalcwp: + script: clouds/clouds_scatter.ncl + var_x: clt + var_y: totalcwp + xmin: 0 + xmax: 100 + ymin_mm: 0.0 + ymax_mm: 0.2 + filename_add: so_cmip6 + + scatter_sepacific_cmip5: + description: scatter plot cloud fraction vs shortwave cloud forcing + themes: + - phys + - clouds + realms: + - atmos + variables: + clt: + preprocessor: ppSEPacific + mip: Amon + project: CMIP5 + exp: historical + start_year: 1996 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + clivi: + preprocessor: ppSEPacific + mip: Amon + project: CMIP5 + exp: historical + start_year: 1996 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + lwp: + preprocessor: ppSEPacific + mip: Amon + project: CMIP5 + exp: historical + start_year: 1996 + end_year: 2005 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + swcre: + preprocessor: ppSEPacific + mip: Amon + project: CMIP5 + exp: historical + start_year: 1996 + end_year: 2005 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + additional_datasets: *cmip5_models + scripts: + scatter_swcre: + script: clouds/clouds_scatter.ncl + var_x: clt + var_y: swcre + xmin: 0 + xmax: 100 + ymin_mm: -150.0 + ymax_mm: 0.0 + filename_add: sep_cmip5 + scatter_totalcwp: + script: clouds/clouds_scatter.ncl + var_x: clt + var_y: totalcwp + xmin: 0 + xmax: 100 + ymin_mm: 0.0 + ymax_mm: 0.2 + filename_add: sep_cmip5 + + scatter_sepacific_cmip6: + description: scatter plot cloud fraction vs shortwave cloud forcing + themes: + - phys + - clouds + realms: + - atmos + variables: + clt: + preprocessor: ppSEPacific + mip: Amon + project: CMIP6 + exp: historical + start_year: 1996 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + clivi: + preprocessor: ppSEPacific + mip: Amon + project: CMIP6 + exp: historical + start_year: 1996 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + lwp: + preprocessor: ppSEPacific + mip: Amon + project: CMIP6 + exp: historical + start_year: 1996 + end_year: 2014 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + swcre: + preprocessor: ppSEPacific + mip: Amon + project: CMIP6 + exp: historical + start_year: 1996 + end_year: 2014 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + additional_datasets: *cmip6_models + scripts: + scatter_swcre: + script: clouds/clouds_scatter.ncl + var_x: clt + var_y: swcre + xmin: 0 + xmax: 100 + ymin_mm: -150.0 + ymax_mm: 0.0 + filename_add: sep_cmip6 + scatter_totalcwp: + script: clouds/clouds_scatter.ncl + var_x: clt + var_y: totalcwp + xmin: 0 + xmax: 100 + ymin_mm: 0.0 + ymax_mm: 0.2 + filename_add: sep_cmip6 + + scatter_itcz_cmip5: + description: scatter plot cloud fraction vs shortwave cloud forcing + themes: + - phys + - clouds + realms: + - atmos + variables: + clt: + preprocessor: ppITCZPacific + mip: Amon + project: CMIP5 + exp: historical + start_year: 1996 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + clivi: + preprocessor: ppITCZPacific + mip: Amon + project: CMIP5 + exp: historical + start_year: 1996 + end_year: 2005 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + lwp: + preprocessor: ppITCZPacific + mip: Amon + project: CMIP5 + exp: historical + start_year: 1996 + end_year: 2005 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + swcre: + preprocessor: ppITCZPacific + mip: Amon + project: CMIP5 + exp: historical + start_year: 1996 + end_year: 2005 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + additional_datasets: *cmip5_models + scripts: + scatter_swcre: + script: clouds/clouds_scatter.ncl + var_x: clt + var_y: swcre + xmin: 0 + xmax: 100 + ymin_mm: -150.0 + ymax_mm: 0.0 + filename_add: itcz_cmip5 + scatter_totalcwp: + script: clouds/clouds_scatter.ncl + var_x: clt + var_y: totalcwp + xmin: 0 + xmax: 100 + ymin_mm: 0.0 + ymax_mm: 0.3 + filename_add: itcz_cmip5 + + scatter_itcz_cmip6: + description: scatter plot cloud fraction vs shortwave cloud forcing + themes: + - phys + - clouds + realms: + - atmos + variables: + clt: + preprocessor: ppITCZPacific + mip: Amon + project: CMIP6 + exp: historical + start_year: 1996 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + clivi: + preprocessor: ppITCZPacific + mip: Amon + project: CMIP6 + exp: historical + start_year: 1996 + end_year: 2014 + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + lwp: + preprocessor: ppITCZPacific + mip: Amon + project: CMIP6 + exp: historical + start_year: 1996 + end_year: 2014 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + swcre: + preprocessor: ppITCZPacific + mip: Amon + project: CMIP6 + exp: historical + start_year: 1996 + end_year: 2014 + derive: true + reference_dataset: ESACCI-CLOUD + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + additional_datasets: *cmip6_models + scripts: + scatter_swcre: + script: clouds/clouds_scatter.ncl + var_x: clt + var_y: swcre + xmin: 0 + xmax: 100 + ymin_mm: -150.0 + ymax_mm: 0.0 + filename_add: itcz_cmip6 + scatter_totalcwp: + script: clouds/clouds_scatter.ncl + var_x: clt + var_y: totalcwp + xmin: 0 + xmax: 100 + ymin_mm: 0.0 + ymax_mm: 0.3 + filename_add: itcz_cmip6 diff --git a/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig9-11c_pdf.yml b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig9-11c_pdf.yml new file mode 100644 index 0000000000..223aaa2583 --- /dev/null +++ b/esmvaltool/recipes/clouds/recipe_lauer22jclim_fig9-11c_pdf.yml @@ -0,0 +1,276 @@ +# ESMValTool +# recipe_lauer22jclim_fig9-11c_pdf.yml +--- +documentation: + title: Cloud diagnostics v2 (pdf) + + description: | + Frequency distributions of cloud parameters. + + authors: + - lauer_axel + + maintainer: + - lauer_axel + + references: + - lauer22jclim + + projects: + - cmug + + +preprocessors: + ppOcean: + regrid: + target_grid: 2x2 + scheme: linear + # multi_model_statistics: + # span: overlap + # statistics: [mean] + # exclude: [reference_dataset] + mask_landsea: + mask_out: land + + ppSouthernOcean: + extract_region: + start_longitude: 0 + end_longitude: 360 + start_latitude: -65 + end_latitude: -30 + extract_season: + season: DJF + mask_landsea: + mask_out: land + # multi_model_statistics: + # span: overlap + # statistics: [mean] + # exclude: [reference_dataset] + regrid: + target_grid: 2x2 + scheme: linear + + +CMIP5: &cmip5_models + - {dataset: ACCESS1-0, ensemble: r1i1p1} + - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: CCSM4, ensemble: r1i1p1} + - {dataset: CESM1-BGC, ensemble: r1i1p1} + - {dataset: CESM1-CAM5, ensemble: r1i1p1} + - {dataset: CESM1-FASTCHEM, ensemble: r1i1p1} + - {dataset: CESM1-WACCM, ensemble: r1i1p1} + - {dataset: CMCC-CESM, ensemble: r1i1p1} + - {dataset: CMCC-CM, ensemble: r1i1p1} + # missing data - {dataset: CMCC-CMS, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: FIO-ESM, ensemble: r1i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {dataset: GISS-E2-H-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p1} + - {dataset: GISS-E2-R-CC, ensemble: r1i1p1} + - {dataset: GISS-E2-R, ensemble: r1i1p1} + - {dataset: HadGEM2-CC, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, ensemble: r1i1p1} + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + - {dataset: MIROC4h, ensemble: r1i1p1} + - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + - {dataset: MPI-ESM-P, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: MRI-ESM1, ensemble: r1i1p1} + - {dataset: NorESM1-ME, ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + +CMIP6: &cmip6_models + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + # time_coord_problem_rlut - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM, institute: NCAR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + # no_prw - {dataset: FGOALS-f3-L, ensemble: r2i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: IPSL-CM6A-LR, ensemble: r3i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, institute: MPI-M, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + + +diagnostics: + + pdf_clt_djf_cmip6: + description: climatological annual means + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: ppSouthernOcean + reference_dataset: ESACCI-CLOUD + project: CMIP6 + mip: Amon + exp: historical + start_year: 1995 + end_year: 2014 + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + additional_datasets: *cmip6_models + scripts: + southernocean: + script: clouds/clouds_pdf.ncl + styleset: lauer21 + xmin: 0 + xmax: 100 + ymin: 0 + ymax: 30 + plot_average: true + filename_add: so_cmip6 + + pdf_clt_djf_cmip5: + description: climatological annual means + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: ppSouthernOcean + reference_dataset: ESACCI-CLOUD + project: CMIP5 + mip: Amon + exp: historical + start_year: 1986 + end_year: 2005 + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + additional_datasets: *cmip5_models + scripts: + southernocean: + script: clouds/clouds_pdf.ncl + styleset: lauer21 + xmin: 0 + xmax: 100 + ymin: 0 + ymax: 30 + plot_average: true + filename_add: so_cmip5 + + pdf_clt_ann_cmip6: + description: climatological annual means + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: ppOcean + reference_dataset: ESACCI-CLOUD + project: CMIP6 + mip: Amon + exp: historical + start_year: 1995 + end_year: 2014 + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + additional_datasets: *cmip6_models + scripts: + sep: + script: clouds/clouds_pdf.ncl + region: [-25, -5, 265, 275] + styleset: lauer21 + xmin: 0 + xmax: 100 + ymin: 0 + ymax: 24 + plot_average: true + filename_add: sep_cmip6 + itcz: + script: clouds/clouds_pdf.ncl + styleset: lauer21 + region: [0, 12, 135, 275] + xmin: 0 + xmax: 100 + ymin: 0 + ymax: 27 + plot_average: true + filename_add: itcz_cmip6 + + pdf_clt_ann_cmip5: + description: climatological annual means + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: ppOcean + reference_dataset: ESACCI-CLOUD + project: CMIP5 + mip: Amon + exp: historical + start_year: 1986 + end_year: 2005 + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2, + start_year: 1992, end_year: 2016} + additional_datasets: *cmip5_models + scripts: + sep: + script: clouds/clouds_pdf.ncl + styleset: lauer21 + region: [-25, -5, 265, 275] + xmin: 0 + xmax: 100 + ymin: 0 + ymax: 24 + plot_average: true + filename_add: sep_cmip5 + itcz: + script: clouds/clouds_pdf.ncl + styleset: lauer21 + region: [0, 12, 135, 275] + xmin: 0 + xmax: 100 + ymin: 0 + ymax: 27 + plot_average: true + filename_add: itcz_cmip5 diff --git a/esmvaltool/recipes/cmorizers/recipe_daily_era5.yml b/esmvaltool/recipes/cmorizers/recipe_daily_era5.yml new file mode 100644 index 0000000000..986b2583bc --- /dev/null +++ b/esmvaltool/recipes/cmorizers/recipe_daily_era5.yml @@ -0,0 +1,162 @@ +# ESMValTool +# recipe_daily_era5.yml +--- +documentation: + description: Create daily ERA5 data + + title: Daily ERA5 data CMORization and creation + + authors: + - andela_bouwe + - kalverla_peter + - alidoost_sarah + + maintainer: + - unmaintained + + references: + - acknow_project + - hersbach20rmets + + projects: + - ewatercycle + +datasets: + # For the daily diagnostic, always add the next year, otherwise the last day is not cmor compatible + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3, start_year: 1990, end_year: 1991} + +preprocessors: + add_one_day: &add_one_day + extract_time: + start_year: 1990 + start_month: 1 + start_day: 1 + end_year: 1991 + end_month: 1 + end_day: 1 + + daily_mean: + <<: *add_one_day + daily_statistics: + operator: mean + daily_min: + <<: *add_one_day + daily_statistics: + operator: min + daily_max: + <<: *add_one_day + daily_statistics: + operator: max + +diagnostics: + daily: + description: Create daily ERA5 data + scripts: + rename: + script: cmorizers/era5.py + variables: + clt: + mip: E1hr + era5_name: total_cloud_cover + era5_freq: hourly + preprocessor: daily_mean + evspsbl: + mip: E1hr + era5_name: evaporation + era5_freq: hourly + preprocessor: daily_mean + evspsblpot: + mip: E1hr + era5_name: potential_evaporation + era5_freq: hourly + preprocessor: daily_mean + mrro: + mip: E1hr + era5_name: runoff + era5_freq: hourly + preprocessor: daily_mean + pr: + mip: E1hr + era5_name: total_precipitation + era5_freq: hourly + preprocessor: daily_mean + prsn: + mip: E1hr + era5_name: snowfall + era5_freq: hourly + preprocessor: daily_mean + ps: + mip: E1hr + era5_name: surface_pressure + era5_freq: hourly + preprocessor: daily_mean + psl: + mip: E1hr + era5_name: mean_sea_level_pressure + era5_freq: hourly + preprocessor: daily_mean + rlds: + mip: E1hr + era5_name: surface_thermal_radiation_downwards + era5_freq: hourly + preprocessor: daily_mean + rls: + mip: E1hr + era5_name: mean_surface_net_long_wave_radiation_flux + era5_freq: hourly + preprocessor: daily_mean + rsds: + mip: E1hr + era5_name: surface_solar_radiation_downwards + era5_freq: hourly + preprocessor: daily_mean + rsdt: + mip: E1hr + era5_name: toa_incident_solar_radiation + era5_freq: hourly + preprocessor: daily_mean + rss: + mip: E1hr + era5_name: surface_net_solar_radiation + era5_freq: hourly + preprocessor: daily_mean + tas: + mip: E1hr + era5_name: 2m_temperature + era5_freq: hourly + preprocessor: daily_mean + tasmax: + mip: E1hr + era5_name: maximum_2m_temperature_since_previous_post_processing + era5_freq: hourly + preprocessor: daily_max + tasmin: + mip: E1hr + era5_name: minimum_2m_temperature_since_previous_post_processing + era5_freq: hourly + preprocessor: daily_min + tdps: + mip: E1hr + era5_name: 2m_dewpoint_temperature + era5_freq: hourly + preprocessor: daily_mean + ts: + mip: E1hr + era5_name: skin_temperature + era5_freq: hourly + preprocessor: daily_mean + tsn: + mip: E1hr + era5_name: temperature_of_snow_layer + era5_freq: hourly + preprocessor: daily_mean + uas: + mip: E1hr + era5_name: 10m_u_component_of_wind + era5_freq: hourly + preprocessor: daily_mean + vas: + mip: E1hr + era5_name: 10m_v_component_of_wind + era5_freq: hourly + preprocessor: daily_mean diff --git a/esmvaltool/recipes/cmorizers/recipe_era5-land.yml b/esmvaltool/recipes/cmorizers/recipe_era5-land.yml new file mode 100644 index 0000000000..3a8782ad67 --- /dev/null +++ b/esmvaltool/recipes/cmorizers/recipe_era5-land.yml @@ -0,0 +1,35 @@ +# ESMValTool +# recipe_era5-land.yml +--- +documentation: + title: CMORizer for ERA5-Land + description: CMORize ERA5-Land data + + authors: + - weigel_katja + + maintainer: + - unmaintained + + references: + - era5-land + + projects: + - eval4cmip + +datasets: + - {dataset: ERA5-Land, project: native6, type: reanaly, version: v1, + tier: 3, start_year: 1981, end_year: 1982} + +diagnostics: + monthly: + description: CMORize monthly ERA5-Land data, so far for + - pr (ERA5 name total_precipitation) + scripts: + rename: + script: cmorizers/era5.py + variables: + pr: + mip: Amon + # era5_name: total_precipitation + # era5_freq: monthly diff --git a/esmvaltool/recipes/examples/recipe_check_obs.yml b/esmvaltool/recipes/examples/recipe_check_obs.yml index 1bf9bc1d42..880aef831a 100644 --- a/esmvaltool/recipes/examples/recipe_check_obs.yml +++ b/esmvaltool/recipes/examples/recipe_check_obs.yml @@ -6,565 +6,2030 @@ documentation: Test recipe for OBS, no proprocessor or diagnostics are applied, just to check correct reading of the CMORized data. + title: Recipe to test run all obs cmorizers. + authors: - - righ_ma + - righi_mattia -preprocessors: - nopp: - extract_levels: false - regrid: false - mask_fillvalues: false - multi_model_statistics: false + maintainer: + - predoi_valeriu diagnostics: ### TIER 2 ################################################################## + AGCD: + description: + variables: + pr: + additional_datasets: + - {project: OBS6, dataset: AGCD, mip: Amon, tier: 2, + type: ground, version: v2-0-1} + scripts: null + + BerkeleyEarth: + description: BerkeleyEarth check + variables: + tas: + tasa: + additional_datasets: + - {dataset: BerkeleyEarth, project: OBS, mip: Amon, tier: 2, + type: reanaly, version: 2020, start_year: 1850, end_year: 2019} + scripts: null + + + CALIPSO-GOCCP: + description: CALIPSO-GOCCP check + variables: + clcalipso: + additional_datasets: + - {dataset: CALIPSO-GOCCP, project: OBS, type: sat, version: 3.1.2, + mip: cfMon, tier: 2, start_year: 2007, end_year: 2015} + scripts: null + + + CERES-EBAF: + description: CERES-EBAF check + variables: + rlut: + rlutcs: + rsut: + rsutcs: + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, mip: Amon, tier: 2, + type: sat, version: Ed4.1, start_year: 2001, end_year: 2021} + scripts: null + + + CMAP: + description: CMAP check + variables: + pr: + additional_datasets: + - {project: OBS6, dataset: CMAP, mip: Amon, tier: 2, + type: reanaly, version: v1} + scripts: null + + + CRU: + description: CRU check + variables: + tas: # check older versions + mip: Amon + additional_datasets: + - {dataset: CRU, project: OBS, tier: 2, type: reanaly, + version: TS4.02, start_year: 1901, end_year: 2017} + - {dataset: CRU, project: OBS6, tier: 2, type: reanaly, + version: TS4.07, start_year: 1901, end_year: 2021} + pr: # check older versions + mip: Amon + additional_datasets: + - {dataset: CRU, project: OBS, tier: 2, type: reanaly, + version: TS4.02, start_year: 1901, end_year: 2017} + - {dataset: CRU, project: OBS6, tier: 2, type: reanaly, + version: TS4.07, start_year: 1901, end_year: 2021} + tasmin: + mip: Amon + tasmax: + mip: Amon + clt: + mip: Amon + evspsblpot: + mip: Emon + additional_datasets: # newest version for all variables + - {dataset: CRU, project: OBS6, tier: 2, + type: reanaly, version: TS4.07, start_year: 1901, end_year: 2022} + + + scripts: null + + + CowtanWay: + description: CowtanWay check + variables: + tasa: + additional_datasets: + - {dataset: CowtanWay, project: OBS, mip: Amon, tier: 2, + type: reanaly, version: had4_krig_v1, start_year: 1979, + end_year: 2013} + - {dataset: CowtanWay, project: OBS, mip: Amon, tier: 2, + type: reanaly, version: had4_uah_v1, start_year: 1979, end_year: 2013} + - {dataset: CowtanWay, project: OBS, mip: Amon, tier: 2, + type: reanaly, version: had4_short_krig_v2, start_year: 1979, + end_year: 2019, end_month: 11} + - {dataset: CowtanWay, project: OBS, mip: Amon, tier: 2, + type: reanaly, version: had4_short_uah_v2, start_year: 1979, + end_year: 2017, end_month: 7} + - {dataset: CowtanWay, project: OBS, mip: Amon, tier: 2, + type: reanaly, version: ghcn_short_krig_v2, start_year: 1979, + end_year: 2019, end_month: 8} + - {dataset: CowtanWay, project: OBS, mip: Amon, tier: 2, + type: reanaly, version: ghcn_short_uah_v2, start_year: 1979, + end_year: 2017, end_month: 7} + - {dataset: CowtanWay, project: OBS, mip: Amon, tier: 2, + type: reanaly, version: had4sst4_krig_v2, start_year: 1850, + end_year: 2018} + - {dataset: CowtanWay, project: OBS, mip: Amon, tier: 2, + type: reanaly, version: had4_krig_v2, start_year: 1850, + end_year: 2019, end_month: 11} + scripts: null + + + CT2019: + description: CT2019 check + variables: + co2s: + additional_datasets: + - {dataset: CT2019, project: OBS6, mip: Amon, tier: 2, + type: reanaly, version: 2019, start_year: 2000, end_year: 2018} + scripts: null + + + Duveiller2018: + description: Duveiller2018 check + variables: + albDiffiTr13: + additional_datasets: + - {dataset: Duveiller2018, project: OBS, mip: Amon, tier: 2, + type: clim, version: v2018, start_year: 2010, end_year: 2010} + scripts: null + + + E-OBS: + description: E-OBS check + variables: + tas: + tasmax: + tasmin: + pr: + psl: + additional_datasets: + # yamllint disable rule:octal-values + - {dataset: E-OBS, project: OBS, mip: day, tier: 2, type: ground, + version: v20.0e-0.1, start_year: 1950, end_year: 2019, end_month: 07} + - {dataset: E-OBS, project: OBS, mip: day, tier: 2, type: ground, + version: v20.0e-0.25, start_year: 1950, end_year: 2019, end_month: 07} + - {dataset: E-OBS, project: OBS, mip: Amon, tier: 2, type: ground, + version: v20.0e-0.1, start_year: 1950, end_year: 2019, end_month: 07} + - {dataset: E-OBS, project: OBS, mip: Amon, tier: 2, type: ground, + version: v20.0e-0.25, start_year: 1950, end_year: 2019, end_month: 07} + # yamllint enable rule:octal-values + scripts: null + + + Eppley-VGPM-MODIS: + description: Eppley-VGPM-MODIS check + variables: + intpp: + additional_datasets: + - {dataset: Eppley-VGPM-MODIS, project: OBS, mip: Omon, tier: 2, + type: sat, version: R2018, start_year: 2003, end_year: 2018} + scripts: null + + ESACCI-AEROSOL: - description: ESACCI-AEROSOL + description: ESACCI-AEROSOL check variables: abs550aer: - preproc: nopp - mip: aero od550aer: - preproc: nopp - mip: aero od550aerStderr: - preproc: nopp - mip: aero od550lt1aer: - preproc: nopp - mip: aero od870aer: - preproc: nopp - mip: aero od870aerStderr: - preproc: nopp - mip: aero additional_datasets: - - {dataset: ESACCI-AEROSOL, project: OBS, tier: 2, type: sat, version: SU-v4.21, start_year: 1997, end_year: 2011} + - {dataset: ESACCI-AEROSOL, project: OBS, mip: aero, tier: 2, + type: sat, version: SU-v4.21, start_year: 1997, end_year: 2011} scripts: null ESACCI-CLOUD: - description: ESACCI-CLOUD + description: ESACCI-CLOUD check variables: clivi: - preproc: nopp - mip: Amon clt: - preproc: nopp - mip: Amon cltStderr: - preproc: nopp - mip: Amon clwvi: - preproc: nopp - mip: Amon + lwp: + rlut: + rlutcs: + rsut: + rsutcs: + rsdt: + rlus: + rsus: + rsuscs: additional_datasets: - - {dataset: ESACCI-CLOUD, project: OBS, tier: 2, type: sat, version: AVHRR-fv3.0, start_year: 1982, end_year: 2016} + - {dataset: ESACCI-CLOUD, project: OBS, mip: Amon, tier: 2, + type: sat, version: AVHRR-AMPM-fv3.0, start_year: 1982, end_year: 2016} scripts: null ESACCI-FIRE: - description: ESACCI-FIRE + description: ESACCI-FIRE check variables: burntArea: - preproc: nopp - mip: Lmon additional_datasets: - - {dataset: ESACCI-FIRE, project: OBS, tier: 2, type: sat, version: L4-BA-MERIS-fv4.1, start_year: 2005, end_year: 2011} + - {dataset: ESACCI-FIRE, project: OBS, mip: Lmon, tier: 2, + type: sat, version: L4-BA-MERIS-fv4.1, + start_year: 2005, end_year: 2011} scripts: null ESACCI-LANDCOVER: - description: ESACCI-LANDCOVER + description: ESACCI-LANDCOVER check variables: baresoilFrac: - preproc: nopp - mip: Lmon cropFrac: - preproc: nopp - mip: Lmon grassFrac: - preproc: nopp - mip: Lmon shrubFrac: - preproc: nopp - mip: Lmon treeFrac: - preproc: nopp - mip: Lmon additional_datasets: - - {dataset: ESACCI-LANDCOVER, project: OBS, tier: 2, type: sat, version: L4-LCCS-Map-300m-P5Y-aggregated-0.500000Deg, start_year: 1998, end_year: 2012} + - {dataset: ESACCI-LANDCOVER, project: OBS, mip: Lmon, tier: 2, + type: sat, version: v2.0.8, frequency: yr, + start_year: 1992, end_year: 2020} + scripts: null + + ESACCI-LST: + # TODO change years to full data coverage + description: ESACCI-LST check + variables: + ts: + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, mip: Amon, tier: 2, + type: sat, version: "1.00", + start_year: 2003, end_year: 2018} + scripts: null + + + ESACCI-OC: + description: ESACCI-OC check + variables: + chl: + additional_datasets: + - {dataset: ESACCI-OC, project: OBS6, mip: Omon, tier: 2, + type: sat, version: fv5.0, start_year: 1998, end_year: 2020} scripts: null ESACCI-OZONE: - description: ESACCI-OZONE + description: ESACCI-OZONE check variables: toz: - preproc: nopp - mip: Amon additional_datasets: - - {dataset: ESACCI-OZONE, project: OBS, tier: 2, type: sat, version: L3, start_year: 1997, end_year: 2010} + - {dataset: ESACCI-OZONE, project: OBS, mip: Amon, tier: 2, + type: sat, version: L3, start_year: 1997, end_year: 2010} tozStderr: - preproc: nopp - mip: Amon additional_datasets: - - {dataset: ESACCI-OZONE, project: OBS, tier: 2, type: sat, version: L3, start_year: 1997, end_year: 2010} + - {dataset: ESACCI-OZONE, project: OBS, mip: Amon, tier: 2, + type: sat, version: L3, start_year: 1997, end_year: 2010} tro3prof: - preproc: nopp - mip: Amon additional_datasets: - - {dataset: ESACCI-OZONE, project: OBS, tier: 2, type: sat, version: L3, start_year: 2007, end_year: 2008} + - {dataset: ESACCI-OZONE, project: OBS, mip: Amon, tier: 2, + type: sat, version: L3, start_year: 2007, end_year: 2008} tro3profStderr: - preproc: nopp - mip: Amon additional_datasets: - - {dataset: ESACCI-OZONE, project: OBS, tier: 2, type: sat, version: L3, start_year: 2007, end_year: 2008} + - {dataset: ESACCI-OZONE, project: OBS, mip: Amon, tier: 2, + type: sat, version: L3, start_year: 2007, end_year: 2008} + scripts: null + + ESACCI-SEA-SURFACE-SALINITY: + description: ESACCI-SEA-SURFACE-SALINITY check + variables: + sos: + additional_datasets: + - {dataset: ESACCI-SEA-SURFACE-SALINITY, project: OBS6, mip: Omon, tier: 2, + type: reanaly, version: fv1.8, start_year: 2010, end_year: 2018} + - {dataset: ESACCI-SEA-SURFACE-SALINITY, project: OBS6, mip: Omon, tier: 2, + type: reanaly, version: fv2.31, start_year: 2010, end_year: 2018} scripts: null ESACCI-SOILMOISTURE: - description: ESACCI-SOILMOISTURE + description: ESACCI-SOILMOISTURE check variables: - dos: - preproc: nopp - mip: Lmon - dosStderr: - preproc: nopp - mip: Lmon - sm: - preproc: nopp + sm_daily: + short_name: sm + mip: Eday + frequency: day + sm_monthly: + short_name: sm mip: Lmon + frequency: mon smStderr: - preproc: nopp - mip: Lmon + mip: Eday + frequency: day additional_datasets: - - {dataset: ESACCI-SOILMOISTURE, project: OBS, tier: 2, type: sat, version: L3S-SSMV-COMBINED-v4.2, start_year: 2005, end_year: 2011} + - {dataset: ESACCI-SOILMOISTURE, project: OBS, tier: 2, + type: sat, version: L3S-SSMV-COMBINED-v08.1, start_year: 1978, end_year: 2022} scripts: null ESACCI-SST: - description: ESACCI-SST + description: ESACCI-SST check variables: ts: - preproc: nopp - mip: Amon tsStderr: - preproc: nopp - mip: Amon additional_datasets: - - {dataset: ESACCI-SST, project: OBS, tier: 2, type: sat, version: L4-GHRSST-SSTdepth-OSTIA-GLOB, start_year: 1992, end_year: 2010} + - {dataset: ESACCI-SST, project: OBS, mip: Amon, tier: 2, + type: sat, version: 2.2, + start_year: 1982, end_year: 2019} scripts: null - GHCN: - description: GHCN + + ESDC: + description: ESDC check variables: - pr: - preproc: nopp - mip: Amon + tas: + tasmin: + tasmax: additional_datasets: - - {dataset: GHCN, project: OBS, tier: 2, type: ground, version: 1, start_year: 1900, end_year: 2014} + - {dataset: ESDC, project: OBS6, mip: Amon, tier: 2, + type: reanaly, version: 3.0.1, + start_year: 1979, end_year: 2021} scripts: null - HadCRUT3: - description: HadCRUT3 + ESRL: + description: ESRL check variables: - tasa: - preproc: nopp + co2s: mip: Amon additional_datasets: - - {dataset: HadCRUT3, project: OBS, tier: 2, type: ground, version: 1, start_year: 1850, end_year: 2013} + - {dataset: ESRL, project: OBS, type: ground, version: ASK, tier: 2, + start_year: 2000, end_year: 2002} + - {dataset: ESRL, project: OBS, type: ground, version: GLOBAL, tier: 2, + start_year: 2000, end_year: 2002} + - {dataset: ESRL, project: OBS, type: ground, version: MLO, tier: 2, + start_year: 2000, end_year: 2002} scripts: null - HadCRUT4: - description: HadCRUT4 + GCP2018: + description: GCP2018 check variables: - tas: - preproc: nopp - mip: Amon - tasa: - preproc: nopp - mip: Amon + nbp: + mip: Lmon + fgco2: + mip: Omon additional_datasets: - - {dataset: HadCRUT4, project: OBS, tier: 2, type: ground, version: 1, start_year: 1850, end_year: 2018} + - {dataset: GCP2018, project: OBS, frequency: yr, tier: 2, type: reanaly, + version: '1.0', start_year: 1959, end_year: 2017} scripts: null - HadISST: - description: HadISST + GCP2020: + description: GCP2020 check variables: - ts: - preproc: nopp - mip: Amon - tos: - preproc: nopp + nbp: + mip: Lmon + additional_datasets: + - {dataset: GCP2020, project: OBS, frequency: yr, tier: 2, + type: reanaly, version: '1.0', start_year: 1959, end_year: 2019} + nbp_residual: + short_name: nbp + mip: Lmon + additional_datasets: + - {dataset: GCP2020, project: OBS, frequency: yr, tier: 2, + type: reanaly, version: '1.0-residual', start_year: 1959, + end_year: 2019} + fgco2: mip: Omon - sic: - preproc: nopp - mip: OImon + additional_datasets: + - {dataset: GCP2020, project: OBS, frequency: yr, tier: 2, + type: reanaly, version: '1.0', start_year: 1959, end_year: 2019} + scripts: null + + + GHCN: + description: GHCN check + variables: + pr: additional_datasets: - - {dataset: HadISST, project: OBS, tier: 2, type: reanaly, version: 1, start_year: 1870, end_year: 2017} + - {dataset: GHCN, project: OBS, mip: Amon, tier: 2, + type: ground, version: 1, start_year: 1900, end_year: 2014} scripts: null - NCEP: - description: NCEP monthly data + GHCN-CAMS: + description: GHCN-CAMS check variables: - hur: - preproc: nopp - mip: Amon - hus: - preproc: nopp - mip: Amon - pr_month: - short_name: pr - preproc: nopp - mip: Amon - pr_day: - short_name: pr - preproc: nopp - mip: day - rlut: - preproc: nopp - mip: day - ta: - preproc: nopp - mip: Amon tas: - preproc: nopp - mip: Amon - ua_month: - short_name: ua - preproc: nopp - mip: Amon - ua_day: - short_name: ua - preproc: nopp - mip: day - va_month: - short_name: va - preproc: nopp - mip: Amon - va_day: - short_name: va - preproc: nopp - mip: day - wap: - preproc: nopp - mip: Amon - zg: - preproc: nopp - mip: Amon additional_datasets: - - {dataset: NCEP, project: OBS, tier: 2, type: reanaly, version: 1, start_year: 1948, end_year: 2018} + - {dataset: GHCN-CAMS, project: OBS, mip: Amon, tier: 2, + type: ground, version: 1, start_year: 1948, end_year: 2020} scripts: null - PATMOS-x: - description: PATMOS-x + GISTEMP: + description: GISTEMP check variables: - clt: - preproc: nopp - mip: Amon + tasa: additional_datasets: - - {dataset: PATMOS-x, project: OBS, tier: 2, type: sat, version: NOAA, start_year: 1982, end_year: 1985}#2018} + - {dataset: GISTEMP, project: OBS, mip: Amon, tier: 2, + type: ground, version: v4, start_year: 1880, end_year: 2020} scripts: null - WOA: - description: WOA + GLODAP: + description: GLODAP check variables: - so: - preproc: nopp - mip: Omon - thetao: - preproc: nopp - mip: Omon - no3: - preproc: nopp - mip: Oyr - o2: - preproc: nopp - mip: Oyr - po4: - preproc: nopp - mip: Oyr - si: - preproc: nopp - mip: Oyr + dissic: + ph: + talk: additional_datasets: - - {dataset: WOA, project: OBS, tier: 2, type: clim, version: 2013v2, start_year: 2000, end_year: 2000} + - {dataset: GLODAP, project: OBS6, mip: Oyr, tier: 2, + type: clim, version: v2.2016b, start_year: 2000, end_year: 2000} scripts: null - ### TIER 3 ################################################################## + GPCC: + description: GPCC check + variables: + pr: + additional_datasets: + - {dataset: GPCC, project: OBS, mip: Amon, tier: 2, + type: reanaly, version: v2018_025, start_year: 1891, end_year: 2016} + - {dataset: GPCC, project: OBS, mip: Amon, tier: 2, + type: reanaly, version: v2018_05, start_year: 1891, end_year: 2016} + - {dataset: GPCC, project: OBS, mip: Amon, tier: 2, + type: reanaly, version: v2018_10, start_year: 1891, end_year: 2016} + - {dataset: GPCC, project: OBS, mip: Amon, tier: 2, + type: reanaly, version: v2018_25, start_year: 1891, end_year: 2016} + - {dataset: GPCC, project: OBS, mip: Amon, tier: 2, type: reanaly, + version: v2018_025-numgauge1, start_year: 1891, end_year: 2016} + - {dataset: GPCC, project: OBS, mip: Amon, tier: 2, type: reanaly, + version: v2018_05-numgauge1, start_year: 1891, end_year: 2016} + - {dataset: GPCC, project: OBS, mip: Amon, tier: 2, type: reanaly, + version: v2018_10-numgauge1, start_year: 1891, end_year: 2016} + - {dataset: GPCC, project: OBS, mip: Amon, tier: 2, type: reanaly, + version: v2018_25-numgauge1, start_year: 1891, end_year: 2016} + scripts: null - AURA-TES: - description: AURA-TES + GPCP-SG: + description: GPCP-SG check variables: - tro3: - preproc: nopp - mip: Amon + pr: + additional_datasets: + - {dataset: GPCP-SG, project: OBS, mip: Amon, tier: 2, type: atmos, + version: 2.3, start_year: 1979, end_year: 2022} + scripts: null + + HadCRUT3: + description: HadCRUT3 check + variables: + tasa: additional_datasets: - - {dataset: AURA-TES, project: OBS, tier: 3, type: sat, version: 1, start_year: 2005, end_year: 2011} + - {dataset: HadCRUT3, project: OBS, mip: Amon, tier: 2, + type: ground, version: 1, start_year: 1850, end_year: 2013} scripts: null - CDS-XCH4: - description: CDS-XCH4 + HadCRUT4: + description: HadCRUT4 check variables: - xch4: - preproc: nopp - mip: Amon + tas: + tasa: + tasConf5: + tasConf95: additional_datasets: - - {dataset: CDS-XCH4, project: OBS, tier: 3, type: sat, version: L3, start_year: 2003, end_year: 2016} + - {dataset: HadCRUT4, project: OBS, mip: Amon, tier: 2, + type: ground, version: 1, start_year: 1850, end_year: 2018} scripts: null - CDS-XCO2: - description: CDS-XCO2 + HadCRUT5: + description: HadCRUT5 check variables: - xco2: - preproc: nopp - mip: Amon - field: T2Ms + tas: + tasa: additional_datasets: - - {dataset: CDS-XCO2, project: OBS, tier: 3, type: sat, version: L3, start_year: 2003, end_year: 2016} + - {dataset: HadCRUT5, project: OBS, mip: Amon, tier: 2, + type: ground, version: 5.0.1.0-analysis, start_year: 1850, end_year: 2021} + - {dataset: HadCRUT5, project: OBS, mip: Amon, tier: 2, + type: ground, version: 5.0.1.0-noninfilled, start_year: 1850, end_year: 2021} scripts: null - CERES_mon: - description: CERES-SYN1deg monthly data + HadISST: + description: HadISST check variables: - rlds_month: - short_name: rlds - preproc: nopp - mip: Amon - rlds_3hr: - short_name: rlds - preproc: nopp - mip: 3hr - rldscs_month: - short_name: rldscs - preproc: nopp - mip: Amon - rldscs_3hr: - short_name: rldscs - preproc: nopp - mip: 3hr - rlus_month: - short_name: rlus - preproc: nopp - mip: Amon - rlus_3hr: - short_name: rlus - preproc: nopp - mip: 3hr - rluscs: - preproc: nopp - mip: 3hr - rlut_month: - short_name: rlut - preproc: nopp + ts: mip: Amon - rlut_3hr: - short_name: rlut - preproc: nopp - mip: 3hr - rlutcs_month: - short_name: rlutcs - preproc: nopp + tos: + mip: Omon + sic: + mip: OImon + additional_datasets: + - {dataset: HadISST, project: OBS, tier: 2, + type: reanaly, version: 1, start_year: 1870, end_year: 2021} + scripts: null + + + HALOE: + description: HALOE check + variables: + hus: + tro3: + additional_datasets: + - {dataset: HALOE, project: OBS, mip: Amon, tier: 2, + type: sat, version: 1, start_year: 1991, end_year: 2002} + scripts: null + + ISCCP-FH: + description: ISCCP-FH check + variables: + alb: + additional_datasets: + - {dataset: ISCCP-FH, project: OBS, mip: Amon, tier: 2, + type: sat, version: v0, start_year: 1984, end_year: 2016} + scripts: null + + JRA-25: + description: JRA-25 check + variables: + clt: + hus: + prw: + rlut: + rlutcs: + rsut: + rsutcs: + additional_datasets: + - {dataset: JRA-25, project: OBS6, mip: Amon, tier: 2, + type: reanaly, version: 1, start_year: 1979, end_year: 2007} + scripts: null + + JRA-55: + description: JRA-55 check + variables: + cli: + clivi: + clw: + clwvi: + clt: + prw: + rlus: + rlut: + rlutcs: + rsus: + rsuscs: + rsut: + rsutcs: + ta: + tas: + wap: + additional_datasets: + - {dataset: JRA-55, project: OBS6, mip: Amon, tier: 2, + type: reanaly, version: 1, start_year: 1958, end_year: 2022} + scripts: null + + Kadow2020: + description: Kadow2020 check + variables: + tasa: + additional_datasets: + - {dataset: Kadow2020, project: OBS, mip: Amon, tier: 2, + type: ground, version: 5.0.1.0, start_year: 1850, end_year: 2020} + scripts: null + + + Landschuetzer2016: + description: Landschuetzer2016 check + variables: + dpco2: + fgco2: + spco2: + additional_datasets: + - {dataset: Landschuetzer2016, project: OBS, mip: Omon, tier: 2, + type: clim, version: v2016, start_year: 1982, end_year: 2015} + scripts: null + + + Landschuetzer2020: + description: Landschuetzer2020 check + variables: + spco2: + additional_datasets: + - {dataset: Landschuetzer2020, project: OBS6, mip: Omon, tier: 2, + type: clim, version: '1.1', start_year: 2004, end_year: 2004} + scripts: null + + + MOBO-DIC_MPIM: + description: MOBO-DIC_MPIM check + variables: + dissic: + additional_datasets: + - {dataset: MOBO-DIC_MPIM, project: OBS6, mip: Omon, tier: 2, + type: clim, version: '1.1', start_year: 2011, end_year: 2011} + scripts: null + + + MOBO-DIC2004-2019: + description: MOBO-DIC2004-2019 check + variables: + dissic: + additional_datasets: + - {dataset: MOBO-DIC2004-2019, project: OBS6, mip: Omon, tier: 2, + type: reanaly, version: '2.3', start_year: 2004, end_year: 2019} + scripts: null + + + NCEP-NCAR-R1: + description: NCEP-NCAR-R1 check + variables: + clt: + mip: Amon + hur: + mip: Amon + hurs: + mip: Amon + hus: + mip: Amon + pr_month: + short_name: pr + mip: Amon + pr_day: + short_name: pr + mip: day + prw: + mip: Amon + psl: + mip: Amon + rlut_day: + short_name: rlut + mip: day + rlut_month: + short_name: rlut + mip: Amon + rlutcs: + mip: Amon + rsut: + mip: Amon + rsutcs: + mip: Amon + sfcWind: + mip: Amon + ta: + mip: Amon + tas: + mip: Amon + tasmax: + mip: Amon + tasmin: + mip: Amon + ts: + mip: Amon + ua_month: + short_name: ua + mip: Amon + ua_day: + short_name: ua + mip: day + va_month: + short_name: va + mip: Amon + va_day: + short_name: va + mip: day + wap: + mip: Amon + zg: + mip: Amon + additional_datasets: + - {dataset: NCEP-NCAR-R1, project: OBS6, tier: 2, + type: reanaly, version: 1, start_year: 1948, end_year: 2021} + scripts: null + + + NCEP-DOE-R2: + description: NCEP-DOE-R2 check + variables: + clt: + hur: + prw: + ta: + wap: + pr: + tauu: + tauv: + tos: + additional_datasets: + - {dataset: NCEP-DOE-R2, project: OBS6, mip: Amon, tier: 2, + type: reanaly, version: 2, start_year: 1979, end_year: 2022} + scripts: null + + + NOAA-CIRES-20CR-V2: + description: NOAA-CIRES-20CR-V2 check + variables: + clt: + clwvi: + hus: + prw: + rlut: + rsut: + pr: + tauu: + tauv: + additional_datasets: + - {dataset: NOAA-CIRES-20CR-V2, project: OBS6, mip: Amon, tier: 2, + type: reanaly, version: v2, start_year: 1871, end_year: 2012} + scripts: null + + NOAA-ERSSTv5: + description: NOAA-ERSSTv5 check + variables: + tos: + additional_datasets: + - {dataset: NOAA-ERSSTv5, project: OBS6, mip: Omon, tier: 2, + type: reanaly, version: v5, start_year: 1854, end_year: 2000} + scripts: null + + NOAA-ERSSTv3b: + description: NOAA-ERSSTv3b check + variables: + tos: + additional_datasets: + - {dataset: NOAA-ERSSTv3b, project: OBS6, mip: Omon, tier: 2, + type: reanaly, version: v3b, start_year: 1854, end_year: 2019} + scripts: null + + NOAA-MBL-CH4: + description: NOAA marine boundary layer CH4 check + variables: + ch4s: + additional_datasets: + - {dataset: NOAA-MBL-CH4, project: OBS6, mip: Amon, type: atmos, version: 1.0, tier: 2, + start_year: 1983, end_year: 2023} + scripts: null + + NOAA-CIRES-20CR-V3: + description: NOAA-CIRES-20CR-V3 check + variables: + clt: + clwvi: + hus: + prw: + rlut: + rlutcs: + rsut: + rsutcs: + additional_datasets: + - {dataset: NOAA-CIRES-20CR-V3, project: OBS6, mip: Amon, tier: 2, + type: reanaly, version: v3, start_year: 1836, end_year: 2015} + scripts: null + + + NOAAGlobalTemp: + description: NOAAGlobalTemp check + variables: + tasa: + additional_datasets: + - {dataset: NOAAGlobalTemp, project: OBS, mip: Amon, tier: 2, + type: ground, version: v5.0.0, start_year: 1880, end_year: 2021} + scripts: null + + + OceanSODA-ETHZ: + description: OceanSODA-ETHZ check + variables: + areacello: + mip: Ofx + co3os: + mip: Omon + dissicos: + mip: Omon + fgco2: + mip: Omon + phos: + mip: Omon + spco2: + mip: Omon + talkos: + mip: Omon + additional_datasets: + - {dataset: OceanSODA-ETHZ, project: OBS6, tier: 2, type: reanaly, + version: v2023, start_year: 1982, end_year: 2022} + scripts: null + + + OSI-450: + description: OSI-450 check + variables: + sic_day: + short_name: sic + mip: day + sic_month: + short_name: sic + mip: OImon + areacello: + mip: fx + additional_datasets: + - {dataset: OSI-450-nh, project: OBS, tier: 2, + type: reanaly, version: v2, start_year: 1979, end_year: 2015} + - {dataset: OSI-450-sh, project: OBS, tier: 2, + type: reanaly, version: v2, start_year: 1979, end_year: 2015} + scripts: null + + + PATMOS-x: + description: PATMOS-x check + variables: + clt: + additional_datasets: + - {dataset: PATMOS-x, project: OBS, mip: Amon, tier: 2, + type: sat, version: NOAA, start_year: 1982, end_year: 2016} + scripts: null + + + PERSIANN-CDR: + description: PERSIANN-CDR check + variables: + pr_month: + short_name: pr + mip: Amon + pr_day: + short_name: pr + mip: day + additional_datasets: + - {dataset: PERSIANN-CDR, project: OBS, tier: 2, + type: reanaly, version: v01r01, start_year: 1983, end_year: 2018} + scripts: null + + + PHC: + description: PHC check + variables: + so: + thetao: + additional_datasets: + - {dataset: PHC, project: OBS6, frequency: yr, mip: Omon, tier: 2, + type: clim, version: 3, start_year: 1950, end_year: 1950} + scripts: null + + + PIOMAS: + description: PIOMAS + variables: + sithick: + mip: day + areacello: + mip: fx + additional_datasets: + - {dataset: PIOMAS, project: OBS, tier: 2, + type: reanaly, version: 2.1, start_year: 1979, end_year: 2018} + scripts: null + + + REGEN: + description: REGEN check + variables: + pr: + additional_datasets: + - {dataset: REGEN, project: OBS, mip: Amon, tier: 2, + type: reanaly, version: V1-2019, start_year: 1950, end_year: 2016} + - {dataset: REGEN, project: OBS, mip: day, tier: 2, + type: reanaly, version: V1-2019, start_year: 1950, end_year: 2016} + scripts: null + + + Scripps-CO2-KUM: + description: Scripps-CO2-KUM check + variables: + co2s: + additional_datasets: + - {dataset: Scripps-CO2-KUM, project: OBS6, mip: Amon, tier: 2, + type: ground, version: 14-Oct-2021, start_year: 1979, end_year: 2021} + scripts: null + + + WFDE5: + description: WFDE5 check + variables: + tas: + additional_datasets: + - {dataset: WFDE5, project: OBS, mip: Amon, tier: 2, + type: ground, version: v1.1-CRU, start_year: 1979, end_year: 2018} + - {dataset: WFDE5, project: OBS, mip: day, tier: 2, + type: ground, version: v1.1-CRU, start_year: 1979, end_year: 2018} + pr: + additional_datasets: + - {dataset: WFDE5, project: OBS, mip: Amon, tier: 2, + type: ground, version: v1.1-CRU, start_year: 1980, end_year: 2018} + - {dataset: WFDE5, project: OBS, mip: day, tier: 2, + type: ground, version: v1.1-CRU, start_year: 1980, end_year: 2018} + - {dataset: WFDE5, project: OBS, mip: Amon, tier: 2, + type: ground, version: v1.1-CRU+GPCC, start_year: 1980, end_year: 2016} + - {dataset: WFDE5, project: OBS, mip: day, tier: 2, + type: ground, version: v1.1-CRU+GPCC, start_year: 1980, end_year: 2016} + scripts: null + + + TCOM-CH4: + description: TCOM-CH4 check + variables: + ch4: + additional_datasets: + - {dataset: TCOM-CH4, project: OBS6, mip: Amon, frequency: day, tier: 2, + type: reanaly, version: '1.0', start_year: 1991, end_year: 2021} + scripts: null + + + TCOM-N2O: + description: TCOM-N2O check + variables: + n2o: + additional_datasets: + - {dataset: TCOM-N2O, project: OBS6, mip: Amon, frequency: day, tier: 2, + type: reanaly, version: '1.0', start_year: 1991, end_year: 2021} + scripts: null + + + WOA: + description: WOA check + variables: + so: + mip: Omon + sos: + mip: Omon + thetao: + mip: Omon + tos: + mip: Omon + no3: + mip: Oyr + o2: + mip: Oyr + po4: + mip: Oyr + si: + mip: Oyr + additional_datasets: + - {dataset: WOA, project: OBS6, tier: 2, + type: clim, version: 2018, start_year: 2000, end_year: 2000} + - {dataset: WOA, project: OBS, tier: 2, + type: clim, version: 2013v2, start_year: 2000, end_year: 2000} + scripts: null + + + ### TIER 3 ################################################################## + + AERONET: + description: Aeronet check + variables: + od440aer: + additional_datasets: + - {dataset: AERONET, project: OBS6, mip: AERmon, tier: 3, type: atmos, version: 20240406} + scripts: null + + ANUClimate: + description: ANUClimate check + variables: + pr: + tasmax: + tasmin: + tas: + additional_datasets: + - {dataset: ANUClimate, project: OBS6, mip: Amon, tier: 3, type: reanaly} + scripts: null + + APHRO-MA: + description: APHRO-MA check + variables: + pr: + additional_datasets: + - {dataset: APHRO-MA, project: OBS, mip: day, tier: 3, type: ground, + version: 025deg-V1101, start_year: 1951, end_year: 2007} + - {dataset: APHRO-MA, project: OBS, mip: day, tier: 3, type: ground, + version: 050deg-V1101, start_year: 1951, end_year: 2007} + - {dataset: APHRO-MA, project: OBS, mip: Amon, tier: 3, type: ground, + version: 025deg-V1101, start_year: 1951, end_year: 2007} + - {dataset: APHRO-MA, project: OBS, mip: Amon, tier: 3, type: ground, + version: 050deg-V1101, start_year: 1951, end_year: 2007} + - {dataset: APHRO-MA, project: OBS, mip: day, tier: 3, type: ground, + version: 025deg-V1101-EXR1, start_year: 2007, end_year: 2015} + - {dataset: APHRO-MA, project: OBS, mip: day, tier: 3, type: ground, + version: 050deg-V1101-EXR1, start_year: 2007, end_year: 2015} + - {dataset: APHRO-MA, project: OBS, mip: Amon, tier: 3, type: ground, + version: 025deg-V1101-EXR1, start_year: 2007, end_year: 2015} + - {dataset: APHRO-MA, project: OBS, mip: Amon, tier: 3, type: ground, + version: 050deg-V1101-EXR1, start_year: 2007, end_year: 2015} + tas: + additional_datasets: + - {dataset: APHRO-MA, project: OBS, mip: day, tier: 3, type: ground, + version: 025deg-V1808, start_year: 1961, end_year: 2007} + - {dataset: APHRO-MA, project: OBS, mip: day, tier: 3, type: ground, + version: 050deg-V1808, start_year: 1961, end_year: 2007} + - {dataset: APHRO-MA, project: OBS, mip: Amon, tier: 3, type: ground, + version: 025deg-V1808, start_year: 1961, end_year: 2007} + - {dataset: APHRO-MA, project: OBS, mip: Amon, tier: 3, type: ground, + version: 050deg-V1808, start_year: 1961, end_year: 2007} + scripts: null + + + AURA-TES: + description: AURA-TES check + variables: + tro3: + additional_datasets: + - {dataset: AURA-TES, project: OBS, mip: Amon, tier: 3, + type: sat, version: 1, start_year: 2005, end_year: 2011} + scripts: null + + + CALIPSO-ICECLOUD: + description: CALIPSO-ICECLOUD check + variables: + cli: + additional_datasets: + - {dataset: CALIPSO-ICECLOUD, project: OBS, type: sat, version: 1-00, + mip: Amon, tier: 3, start_year: 2007, end_year: 2015} + scripts: null + + + CDS-SATELLITE-ALBEDO: + description: CDS-SATELLITE-ALBEDO check + variables: + bdalb: + bhalb: + additional_datasets: + - {dataset: CDS-SATELLITE-ALBEDO, project: OBS6, mip: Lmon, tier: 3, + type: sat, version: V1, start_year: 1999, end_year: 2013} + scripts: null + + + CDS-SATELLITE-LAI-FAPAR: + description: CDS-SATELLITE-LAI-FAPAR check + variables: + fapar: + lai: + additional_datasets: + - {dataset: CDS-SATELLITE-LAI-FAPAR, project: OBS, mip: Lmon, tier: 3, + type: sat, version: V1, start_year: 1999, end_year: 2013} + scripts: null + + + CDS-SATELLITE-SOIL-MOISTURE: + description: CDS-SATELLITE-SOIL-MOISTURE check + variables: + sm_month: + short_name: sm + mip: Lmon + sm_day: + short_name: sm + mip: day + additional_datasets: + - {dataset: CDS-SATELLITE-SOIL-MOISTURE, project: OBS, tier: 3, + type: sat, version: ACTIVE, + start_year: 1992, end_year: 2020} + - {dataset: CDS-SATELLITE-SOIL-MOISTURE, project: OBS, tier: 3, + type: sat, version: COMBINED, + start_year: 1979, end_year: 2020} + - {dataset: CDS-SATELLITE-SOIL-MOISTURE, project: OBS, tier: 3, + type: sat, version: PASSIVE, + start_year: 1979, end_year: 2020} + scripts: null + + + CDS-UERRA: + description: CDS-UERRA check + variables: + sm: + additional_datasets: + - {dataset: CDS-UERRA, project: OBS6, mip: E6hr, frequency: 6hr, tier: 3, + type: reanaly, version: UERRA-HARMONIE, + start_year: 1979, end_year: 2018} + scripts: null + + + CDS-XCH4: + description: CDS-XCH4 check + variables: + xch4: + additional_datasets: + - {dataset: CDS-XCH4, project: OBS, mip: Amon, tier: 3, + type: sat, version: L3, start_year: 2003, end_year: 2018} + scripts: null + + + CDS-XCO2: + description: CDS-XCO2 check + variables: + xco2: + additional_datasets: + - {dataset: CDS-XCO2, project: OBS, mip: Amon, tier: 3, + type: sat, version: L3, start_year: 2003, end_year: 2016} + scripts: null + + + CERES_mon: + description: CERES-SYN1deg check + variables: + rlds_month: + short_name: rlds + mip: Amon + rlds_3hr: + short_name: rlds + mip: 3hr + rldscs_month: + short_name: rldscs + mip: Amon + rldscs_3hr: + short_name: rldscs + mip: 3hr + rlus_month: + short_name: rlus + mip: Amon + rlus_3hr: + short_name: rlus + mip: 3hr + rluscs: + mip: 3hr + rlut_month: + short_name: rlut + mip: Amon + rlut_3hr: + short_name: rlut + mip: 3hr + rlutcs_month: + short_name: rlutcs mip: Amon rlutcs_3hr: short_name: rlutcs - preproc: nopp mip: 3hr rsds_month: short_name: rsds - preproc: nopp mip: Amon - rsds_3hr: + rsds_3hr: + short_name: rsds + mip: 3hr + rsdscs: + mip: 3hr + rsdt: + mip: Amon + rsus_month: + short_name: rsus + mip: Amon + rsus_3hr: + short_name: rsus + mip: 3hr + rsuscs: + mip: 3hr + rsut_month: + short_name: rsut + mip: Amon + rsut_3hr: + short_name: rsut + mip: 3hr + rsutcs_month: + short_name: rsutcs + mip: Amon + rsutcs_3hr: + short_name: rsutcs + mip: 3hr + additional_datasets: + - {dataset: CERES-SYN1deg, project: OBS, tier: 3, + type: sat, version: Ed3A, start_year: 2001, end_year: 2016} + scripts: null + + + CLARA-AVHRR: + description: CLARA-AVHRR check + variables: + clt: + mip: Amon + clivi: + mip: Amon + clwvi: + mip: Amon + lwp: + mip: Amon + additional_datasets: + - {dataset: CLARA-AVHRR, project: OBS, tier: 3, + type: sat, version: V002_01, start_year: 1982, end_year: 2018} + scripts: null + + + CLOUDSAT-L2: + description: CloudSat-level2 check + variables: + clivi: + clw: + clwvi: + lwp: + additional_datasets: + - {dataset: CLOUDSAT-L2, project: OBS, tier: 3, type: sat, + version: P1-R05-gridbox-average-all, mip: Amon, start_year: 2006, + end_year: 2017} + - {dataset: CLOUDSAT-L2, project: OBS, tier: 3, type: sat, + version: P1-R05-gridbox-average-noprecip, mip: Amon, start_year: 2006, + end_year: 2017} + - {dataset: CLOUDSAT-L2, project: OBS, tier: 3, type: sat, + version: P1-R05-in-cloud-all, mip: Amon, start_year: 2006, + end_year: 2017} + - {dataset: CLOUDSAT-L2, project: OBS, tier: 3, type: sat, + version: P1-R05-in-cloud-noprecip, mip: Amon, start_year: 2006, + end_year: 2017} + scripts: null + + + ERA-Interim: + description: ERA-Interim check + variables: + cl: + mip: Amon + cli: + mip: Amon + clivi: + mip: Amon + clt_day: + short_name: clt + mip: day + clt_month: + short_name: clt + mip: Amon + clw: + mip: Amon + clwvi: + mip: Amon + evspsbl_day: + short_name: evspsbl + mip: Eday + evspsbl_month: + short_name: evspsbl + mip: Amon + hfds: + mip: Omon + hur: + mip: Amon + hus: + mip: Amon + lwp: + mip: Amon + orog: + mip: fx + pr_day: + short_name: pr + mip: day + pr_month: + short_name: pr + mip: Amon + prsn_day: + short_name: prsn + mip: day + prsn_month: + short_name: prsn + mip: Amon + prw: + mip: Amon + ps_day: + short_name: ps + mip: CFday + ps_month: + short_name: ps + mip: Amon + psl_day: + short_name: psl + mip: day + psl_month: + short_name: psl + mip: Amon + rlds_day: + short_name: rlds + mip: day + rlut_month: + short_name: rlut + mip: Amon + rlutcs_month: + short_name: rlutcs + mip: Amon + rsds_day: + short_name: rsds + mip: day + rsds_month: + short_name: rsds + mip: Amon + rsdt_day: + short_name: rsdt + mip: CFday + rsdt_month: + short_name: rsdt + mip: Amon + rsut_month: + short_name: rsut + mip: Amon + rsutcs_month: + short_name: rsutcs + mip: Amon + rss_day: + short_name: rss + mip: day + rss_month: + short_name: rss + mip: Emon + sftlf: + mip: fx + ta_day: + short_name: ta + mip: day + ta_month: + short_name: ta + mip: Amon + tas_day: + short_name: tas + mip: day + tas_month: + short_name: tas + mip: Amon + tasmax: + mip: day + tasmin: + mip: day + tauu: + mip: Amon + tauv: + mip: Amon + tdps_day: + short_name: tdps + mip: Eday + tdps_month: + short_name: tdps + mip: Emon + tos: + mip: Omon + ts_day: + short_name: ts + mip: Eday + ts_month: + short_name: ts + mip: Amon + tsn_month: + short_name: tsn + mip: LImon + tsn_day: + short_name: tsn + mip: Eday + ua: + mip: Amon + uas_day: + short_name: uas + mip: day + uas_month: + short_name: uas + mip: Amon + va_day: + short_name: va + mip: day + va_month: + short_name: va + mip: Amon + vas_day: + short_name: vas + mip: day + vas_month: + short_name: vas + mip: Amon + wap: + mip: Amon + zg_day: + short_name: zg + mip: day + zg_month: + short_name: zg + mip: Amon + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, tier: 3, + type: reanaly, version: 1, start_year: 1979, end_year: 2018} + scripts: null + + + ERA-Interim-Land: + description: ERA-Interim-Land check + variables: + sm: + mip: Lmon + additional_datasets: + - {dataset: ERA-Interim-Land, project: OBS6, tier: 3, + type: reanaly, version: 1, start_year: 1979, end_year: 2010} + scripts: null + + + ERA5_OBS6: + description: ERA5 OBS6 (daily) check + variables: + evspsbl_Eday: + short_name: evspsbl + mip: Eday + evspsblpot_Eday: + short_name: evspsblpot + mip: Eday + tdps_Eday: + short_name: tdps + mip: Eday + ts_Eday: + short_name: ts + mip: Eday + tsn_Eday: + short_name: tsn + mip: Eday + clt_day: + short_name: clt + mip: day + mrro_day: + short_name: mrro + mip: day + pr_day: + short_name: pr + mip: day + prsn_day: + short_name: prsn + mip: day + psl_day: + short_name: psl + mip: day + rlds_day: + short_name: rlds + mip: day + rls_day: + short_name: rls + mip: day + rsds_day: + short_name: rsds + mip: day + rss_day: + short_name: rss + mip: day + tas_day: + short_name: tas + mip: day + tasmax_day: + short_name: tasmax + mip: day + tasmin_day: + short_name: tasmin + mip: day + uas_day: + short_name: uas + mip: day + vas_day: + short_name: vas + mip: day + ps_CFday: + short_name: ps + mip: CFday + rsdt_CFday: + short_name: rsdt + mip: CFday + orog: + mip: fx + additional_datasets: + - {dataset: ERA5, project: OBS6, tier: 3, type: reanaly, + version: 1, start_year: 1990, end_year: 1990} + scripts: null + + + ERA5_native6: + description: ERA5 native6 (hourly and monthly) check + variables: + clt_E1hr: + short_name: clt + mip: E1hr + evspsbl_E1hr: + short_name: evspsbl + mip: E1hr + evspsblpot_E1hr: + short_name: evspsblpot + mip: E1hr + mrro_E1hr: + short_name: mrro + mip: E1hr + pr_E1hr: + short_name: pr + mip: E1hr + prsn_E1hr: + short_name: prsn + mip: E1hr + ps_E1hr: + short_name: ps + mip: E1hr + psl_E1hr: + short_name: psl + mip: E1hr + ptype_E1hr: + short_name: ptype + mip: E1hr + rls_E1hr: + short_name: rls + mip: E1hr + rlds_E1hr: + short_name: rlds + mip: E1hr + rlns_E1hr: + short_name: rlns + mip: E1hr + frequency: 1hr + #rlus_E1hr: + # short_name: rlus + # mip: E1hr + # frequency: 1hr + rsds_E1hr: + short_name: rsds + mip: E1hr + rsdt_E1hr: + short_name: rsdt + mip: E1hr + rsns_E1hr: + short_name: rsns + mip: E1hr + frequency: 1hr + #rsus_E1hr: + # short_name: rsus + # mip: E1hr + # frequency: 1hr + rss_E1hr: + short_name: rss + mip: E1hr + uas_E1hr: + short_name: uas + mip: E1hr + vas_E1hr: + short_name: vas + mip: E1hr + tas_E1hr: + short_name: tas + mip: E1hr + tasmax_E1hr: + short_name: tasmax + mip: E1hr + tasmin_E1hr: + short_name: tasmin + mip: E1hr + tdps_E1hr: + short_name: tdps + mip: E1hr + ts_E1hr: + short_name: ts + mip: E1hr + tsn_E1hr: + short_name: tsn + mip: E1hr + clt_Amon: + short_name: clt + mip: Amon + cl_Amon: + short_name: cl + mip: Amon + evspsbl_Amon: + short_name: evspsbl + mip: Amon + evspsblpot_Amon: + short_name: evspsblpot + mip: Amon + mrro_Amon: + short_name: mrro + mip: Amon + pr_Amon: + short_name: pr + mip: Amon + prsn_Amon: + short_name: prsn + mip: Amon + ps_Amon: + short_name: ps + mip: Amon + psl_Amon: + short_name: psl + mip: Amon + ptype_Amon: + short_name: ptype + mip: Amon + rlds_Amon: + short_name: rlds + mip: Amon + rls_Amon: + short_name: rls + mip: Amon + rlns_Amon: + short_name: rlns + mip: Amon + #rlus_Amon: + # short_name: rlus + # mip: Amon + rsds_Amon: short_name: rsds - preproc: nopp - mip: 3hr - rsdscs: - preproc: nopp - mip: 3hr - rsdt: - preproc: nopp mip: Amon - rsus_month: - short_name: rsus - preproc: nopp + rsdt_Amon: + short_name: rsdt mip: Amon - rsus_3hr: - short_name: rsus - preproc: nopp - mip: 3hr - rsuscs: - preproc: nopp - mip: 3hr - rsut_month: - short_name: rsut - preproc: nopp + rsns_Amon: + short_name: rsns mip: Amon - rsut_3hr: - short_name: rsut - preproc: nopp - mip: 3hr - rsutcs_month: - short_name: rsutcs - preproc: nopp + #rsus_Amon: + # short_name: rsus + # mip: Amon + rss_Amon: + short_name: rss mip: Amon - rsutcs_3hr: - short_name: rsutcs - preproc: nopp - mip: 3hr + ta_Amon: + short_name: ta + mip: Amon + tas_Amon: + short_name: tas + mip: Amon + tdps_Amon: + short_name: tdps + mip: Amon + ts_Amon: + short_name: ts + mip: Amon + tsn_Amon: + short_name: tsn + mip: Amon + uas_Amon: + short_name: uas + mip: Amon + vas_Amon: + short_name: vas + mip: Amon + orog: + mip: fx additional_datasets: - - {dataset: CERES-SYN1deg, project: OBS, tier: 3, type: sat, version: Ed3A, start_year: 2001, end_year: 2016} + - {dataset: ERA5, project: native6, tier: 3, type: reanaly, + version: v1, start_year: 1990, end_year: 1990} scripts: null - ERA-Interim: - description: ERA-Interim + ESACCI-WATERVAPOUR: + description: ESACCI-WV check + variables: + prw: + additional_datasets: + - {dataset: ESACCI-WATERVAPOUR, project: OBS6, mip: Amon, tier: 3, + type: sat, version: CDR2-L3-COMBI-05deg-fv3.1, + start_year: 2003, end_year: 2017} + - {dataset: ESACCI-WATERVAPOUR, project: OBS6, mip: Eday, tier: 3, + type: sat, version: CDR2-L3-COMBI-05deg-fv3.1, + start_year: 2003, end_year: 2017} + scripts: null + + + GRACE: + description: GRACE check + variables: + lweGrace: + mip: Lmon + additional_datasets: + - {dataset: GRACE, project: OBS6, tier: 3, type: sat, + version: 1, start_year: 2002, end_year: 2020} + scripts: null + + + HWSD: + description: HWSD check + variables: + areacella: + mip: fx + cSoil: + mip: Lmon + sftlf: + mip: fx + additional_datasets: + - {dataset: HWSD, project: OBS, tier: 3, type: reanaly, version: 1.2, + start_year: 2000, end_year: 2000} + scripts: null + + + JMA-TRANSCOM: + description: JMA-TRANSCOM check + variables: + nbp: + mip: Lmon + fgco2: + mip: Omon + additional_datasets: + - {dataset: JMA-TRANSCOM, project: OBS, tier: 3, type: reanaly, + version: 2018, start_year: 1985, end_year: 2016} + scripts: null + + + FLUXCOM: + description: FLUXCOM check + variables: + gpp: + additional_datasets: + - {dataset: FLUXCOM, project: OBS, mip: Lmon, tier: 3, type: reanaly, + version: ANN-v1, start_year: 1980, end_year: 2013} + scripts: null + + + LAI3g: + description: LAI3g check + variables: + lai: + additional_datasets: + - {dataset: LAI3g, project: OBS, mip: Lmon, tier: 3, + type: reanaly, version: 1_regridded, start_year: 1981, end_year: 2018} + scripts: null + + + LandFlux-EVAL: + description: LandFlux-EVAL check + variables: + et: + etStderr: + additional_datasets: + - {dataset: LandFlux-EVAL, project: OBS, mip: Lmon, tier: 3, + type: reanaly, version: Oct13, start_year: 1989, end_year: 2005} + scripts: null + + + MAC-LWP: + description: MAC-LWP check + variables: + lwp: + lwpStderr: + additional_datasets: + - {dataset: MAC-LWP, project: OBS, mip: Amon, tier: 3, + type: sat, version: v1, start_year: 1988, end_year: 2016} + scripts: null + + MERRA: + description: MERRA check variables: + cli: clivi: - preproc: nopp - mip: Amon - fx_files: [sftlf] clt: - preproc: nopp - mip: Amon + clw: clwvi: - preproc: nopp - mip: Amon - hfds: - preproc: nopp - mip: Omon + hur: hus: - preproc: nopp + lwp: + pr: + prw: + ps: + psl: + rlut: + rlutcs: + rsdt: + rsut: + rsutcs: + ta: + tas: + ts: + ua: + va: + wap: + zg: + additional_datasets: + - {dataset: MERRA, project: OBS, tier: 3, mip: Amon, + type: reanaly, version: 5.2.0, start_year: 1979, end_year: 2015} + scripts: null + + + MERRA2: + description: MERRA2 check + variables: + sm_monthly: + short_name: sm + mip: Lmon + clt_monthly: + short_name: clt mip: Amon - pr_month: + pr_monthly: short_name: pr - preproc: nopp mip: Amon - pr_day: - short_name: pr - preproc: nopp - mip: day - prw: - preproc: nopp + evspsbl_monthly: + short_name: evspsbl mip: Amon - ps: - preproc: nopp + hfss_monthly: + short_name: hfss mip: Amon - psl_month: - short_name: psl - preproc: nopp + hfls_monthly: + short_name: hfls mip: Amon - psl_day: + huss_monthly: + short_name: huss + mip: Amon + prc_monthly: + short_name: prc + mip: Amon + prsn_monthly: + short_name: prsn + mip: Amon + prw_monthly: + short_name: prw + mip: Amon + ps_monthly: + short_name: ps + mip: Amon + psl_monthly: short_name: psl - preproc: nopp - mip: day - ta: - preproc: nopp mip: Amon - tas_month: - short_name: tas - preproc: nopp + rlds_monthly: + short_name: rlds mip: Amon - tas_day: + rldscs_monthly: + short_name: rldscs + mip: Amon + rlus_monthly: + short_name: rlus + mip: Amon + rlut_monthly: + short_name: rlut + mip: Amon + rsds_monthly: + short_name: rsds + mip: Amon + rsdscs_monthly: + short_name: rsdscs + mip: Amon + rsdt_monthly: + short_name: rsdt + mip: Amon + tas_monthly: short_name: tas - preproc: nopp - mip: day - tasmin: - preproc: nopp - mip: day - tasmax: - preproc: nopp - mip: day - tauu: - preproc: nopp mip: Amon - tauv: - preproc: nopp + tasmax_monthly: + short_name: tasmax mip: Amon - ts: - preproc: nopp + tasmin_monthly: + short_name: tasmin mip: Amon - ua: - preproc: nopp + tauu_monthly: + short_name: tauu mip: Amon - va: - preproc: nopp + tauv_monthly: + short_name: tauv mip: Amon - wap: - preproc: nopp + rlutcs_monthly: + short_name: rlutcs mip: Amon - zg: - preproc: nopp + ts_monthly: + short_name: ts + mip: Amon + uas_monthly: + short_name: uas + mip: Amon + vas_monthly: + short_name: vas + mip: Amon + rsus_monthly: + short_name: rsus + mip: Amon + rsuscs_monthly: + short_name: rsuscs + mip: Amon + rsut_monthly: + short_name: rsut + mip: Amon + rsutcs_monthly: + short_name: rsutcs + mip: Amon + ta_monthly: + short_name: ta + mip: Amon + ua_monthly: + short_name: ua + mip: Amon + va_monthly: + short_name: va + mip: Amon + tro3_monthly: + short_name: tro3 + mip: Amon + zg_monthly: + short_name: zg + mip: Amon + hus_monthly: + short_name: hus + mip: Amon + wap_monthly: + short_name: wap + mip: Amon + hur_monthly: + short_name: hur + mip: Amon + cl_monthly: + short_name: cl + mip: Amon + clw_monthly: + short_name: clw + mip: Amon + cli_monthly: + short_name: cli + mip: Amon + clwvi_monthly: + short_name: clwvi + mip: Amon + clivi_monthly: + short_name: clivi mip: Amon - tos: - preproc: nopp - mip: Omon additional_datasets: - - {dataset: ERA-Interim, project: OBS, tier: 3, type: reanaly, version: 1, start_year: 1979, end_year: 2018} + - {dataset: MERRA2, project: OBS6, tier: 3, + type: reanaly, version: 5.12.4, start_year: 1980, end_year: 2022} + scripts: null + + + MLS-AURA: + description: MLS-AURA check + variables: + hur: + mip: Amon + hurStderr: + mip: Amon + additional_datasets: + - {dataset: MLS-AURA, project: OBS6, tier: 3, type: sat, version: '004', + start_year: 2005, end_year: 2019} scripts: null MODIS: - description: MODIS + description: MODIS check variables: od550aer: - preproc: nopp mip: aero clivi: - preproc: nopp mip: Amon clwvi: - preproc: nopp mip: Amon clt: - preproc: nopp mip: Amon lwpStderr: - preproc: nopp mip: Amon iwpStderr: - preproc: nopp mip: Amon additional_datasets: - - {dataset: MODIS, project: OBS, tier: 3, type: sat, version: MYD08-M3, start_year: 2003, end_year: 2018} + - {dataset: MODIS, project: OBS, tier: 3, + type: sat, version: MYD08-M3, start_year: 2003, end_year: 2018} + scripts: null + + + MTE: + description: MTE check + variables: + gpp: + gppStderr: + additional_datasets: + - {dataset: MTE, project: OBS, tier: 3, mip: Lmon, + type: reanaly, version: May12, start_year: 1982, end_year: 2011} + scripts: null + + + NDP: + description: NDP check + variables: + cVeg: + additional_datasets: + - {dataset: NDP, project: OBS, tier: 3, mip: Lmon, + type: reanaly, version: 017b, start_year: 2000, end_year: 2000} scripts: null NIWA-BS: - description: NIWA-BS + description: NIWA-BS check variables: toz: - preproc: nopp - mip: Amon tozStderr: - preproc: nopp - mip: Amon additional_datasets: - - {dataset: NIWA-BS, project: OBS, tier: 3, type: sat, version: v3.3, start_year: 1979, end_year: 2016} + - {dataset: NIWA-BS, project: OBS, mip: Amon, tier: 3, + type: sat, version: v3.3, start_year: 1979, end_year: 2016} + scripts: null + + + NSIDC-0116: + description: NSIDC-0116 check + variables: + areacello: + mip: fx + usi: + mip: day + vsi: + mip: day + additional_datasets: + - {dataset: NSIDC-0116-nh, project: OBS, tier: 3, + type: reanaly, version: "4.1", start_year: 1978, end_year: 2018} + - {dataset: NSIDC-0116-sh, project: OBS, tier: 3, + type: reanaly, version: "4.1", start_year: 1978, end_year: 2018} scripts: null + NSIDC-G02202: + description: NSIDC-G02202 check + variables: + areacello: + mip: Ofx + siconc: + mip: SImon + additional_datasets: + - {dataset: NSIDC-G02202-sh, project: OBS6, tier: 3, + type: reanaly, version: 4, start_year: 1979, end_year: 2022} + scripts: null UWisc: - description: UWisc + description: UWisc check variables: lwp: - preproc: nopp - mip: Amon lwpStderr: - preproc: nopp - mip: Amon additional_datasets: - - {dataset: UWisc, project: OBS, tier: 3, type: sat, version: v2, start_year: 1988, end_year: 2007} + - {dataset: UWisc, project: OBS, mip: Amon, tier: 3, + type: sat, version: v2, start_year: 1988, end_year: 2007} scripts: null diff --git a/esmvaltool/recipes/examples/recipe_concatenate_exps.yml b/esmvaltool/recipes/examples/recipe_concatenate_exps.yml index 39f8f89dd1..8c72362fa7 100644 --- a/esmvaltool/recipes/examples/recipe_concatenate_exps.yml +++ b/esmvaltool/recipes/examples/recipe_concatenate_exps.yml @@ -5,11 +5,13 @@ documentation: description: | Concatenate time series of different experiments at the preprocessor level. + title: Recipe to test time concatenation in preprocesor. + authors: - - schl_ma + - schlund_manuel maintainer: - - schl_ma + - schlund_manuel datasets: - {dataset: CanESM2} @@ -30,7 +32,8 @@ diagnostics: start_year: 1950 end_year: 2000 additional_datasets: - - {dataset: ERA-Interim, project: OBS, tier: 3, type: reanaly, version: 1, start_year: 1980, end_year: 2000} + - {dataset: ERA-Interim, project: OBS6, tier: 3, type: reanaly, + version: 1, start_year: 1980, end_year: 2000} scripts: null diag_concatenate_exps: diff --git a/esmvaltool/recipes/examples/recipe_correlation.yml b/esmvaltool/recipes/examples/recipe_correlation.yml index ec443f909f..f61ec3874b 100644 --- a/esmvaltool/recipes/examples/recipe_correlation.yml +++ b/esmvaltool/recipes/examples/recipe_correlation.yml @@ -1,16 +1,18 @@ # ESMValTool # recipe_correlation.yml --- -documentation: +documentation: description: | Calculate the Pearsons r correlation coefficient over specified dimensions. - + + title: Recipe that computes Pearson-r correlation coefficients. + authors: - - ande_bo + - andela_bouwe maintainer: - - ande_bo - + - andela_bouwe + projects: - c3s-magic @@ -50,7 +52,7 @@ diagnostics: # One or more datasets can be added here - {dataset: bcc-csm1-1} # The reference dataset is required - - {dataset: ERA-Interim, project: OBS, tier: 3, type: reanaly, version: 1} + - {dataset: ERA-Interim, project: OBS6, tier: 3, type: reanaly, version: 1} scripts: correlation_pressure: script: examples/correlate.py diff --git a/esmvaltool/recipes/examples/recipe_decadal.yml b/esmvaltool/recipes/examples/recipe_decadal.yml new file mode 100644 index 0000000000..4963347cf4 --- /dev/null +++ b/esmvaltool/recipes/examples/recipe_decadal.yml @@ -0,0 +1,60 @@ +# ESMValTool +--- +documentation: + title: Example recipe that loads DCPP data. + description: | + This is an example recipe to deal with DCPP data. Computes the global + mean of tas and compares it against ERA-Interim for a set of timeranges. + Reproduces the examples given in deliverable D9.4 of ISENES-3. + authors: + - loosveldt-tomas_saskia + maintainer: + - loosveldt-tomas_saskia + projects: + - isenes3 + + +preprocessors: + pptas: + area_statistics: + operator: 'mean' +diagnostics: + first_example: + additional_datasets: + - &dcpp {dataset: EC-Earth3, project: CMIP6, exp: dcppA-hindcast, ensemble: r1i1p1f1, + sub_experiment: 's(1980:2018)', timerange: '*'} + - &obs {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3, + timerange: '198011/201812'} + variables: + tas: + grid: gr + mip: Amon + preprocessor: pptas + scripts: + first_example: + script: examples/decadal_example.py + second_example: + additional_datasets: + - {<<: *dcpp, sub_experiment: 's(1980:2007)'} + - {<<: *dcpp, sub_experiment: 's(2008:2018)', timerange: '*/201812'} + - {<<: *obs} + variables: + tas: + grid: gr + mip: Amon + preprocessor: pptas + scripts: + second_example: + script: examples/decadal_example.py + third_example: + additional_datasets: + - {<<: *dcpp, timerange: '*/P1Y'} + - {<<: *obs, timerange: '1980/2018'} + variables: + tas: + grid: gr + mip: Amon + preprocessor: pptas + scripts: + third_example: + script: examples/decadal_example.py diff --git a/esmvaltool/recipes/examples/recipe_easy_ipcc.yml b/esmvaltool/recipes/examples/recipe_easy_ipcc.yml new file mode 100644 index 0000000000..af5d711fa8 --- /dev/null +++ b/esmvaltool/recipes/examples/recipe_easy_ipcc.yml @@ -0,0 +1,128 @@ +documentation: + title: Easy IPCC + description: Reproduce part of IPCC AR6 figure 9.3a. + references: + - fox-kemper21ipcc + authors: + - kalverla_peter + - andela_bouwe + maintainer: + - andela_bouwe + +preprocessors: + easy_ipcc: + custom_order: true + anomalies: + period: month + reference: + start_year: 1950 + start_month: 1 + start_day: 1 + end_year: 1979 + end_month: 12 + end_day: 31 + area_statistics: + operator: mean + annual_statistics: + operator: mean + convert_units: + units: 'degrees_C' + ensemble_statistics: + statistics: + - operator: mean + multi_model_statistics: + statistics: + - operator: mean + - operator: percentile + percent: 17 + - operator: percentile + percent: 83 + span: full + keep_input_datasets: false + ignore_scalar_coords: true + +diagnostics: + AR6_Figure_9.3: + variables: + tos_ssp585: + short_name: tos + exp: ['historical', 'ssp585'] + project: CMIP6 + mip: Omon + preprocessor: easy_ipcc + timerange: '1850/2100' + tos_ssp126: + short_name: tos + exp: ['historical', 'ssp126'] + project: CMIP6 + mip: Omon + timerange: '1850/2100' + preprocessor: easy_ipcc + scripts: + Figure_9.3a: + script: examples/make_plot.py + +datasets: + - {dataset: ACCESS-CM2, ensemble: 'r(1:10)i1p1f1', grid: gn} + - {dataset: ACCESS-ESM1-5, ensemble: 'r(1:40)i1p1f1', grid: gn} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + # - {dataset: CAMS-CSM1-0, ensemble: 'r(1:2)i1p1f1', grid: gn} # available data does not fully cover timerange + - {dataset: CAS-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAS-ESM2-0, ensemble: r3i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r4i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: 'r(10:11)i1p1f1', grid: gn} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn} + - {dataset: CIESM, ensemble: r1i1p1f1, grid: gn} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CMCC-ESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: 'r(1:6)i1p1f2', grid: gn} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gn} + - {dataset: CNRM-ESM2-1, ensemble: 'r(1:5)i1p1f2', grid: gn} + - {dataset: CanESM5, ensemble: 'r(1:25)i1p(1:2)f1', grid: gn} + - {dataset: CanESM5-1, ensemble: 'r1i1p(1:2)f1', grid: gn, institute: CCCma} + - {dataset: CanESM5-CanOE, ensemble: 'r(1:3)i1p2f1', grid: gn} + - {dataset: EC-Earth3, ensemble: r1i1p1f1, grid: gn} + - {dataset: EC-Earth3, ensemble: r4i1p1f1, grid: gn} + - {dataset: EC-Earth3, ensemble: r6i1p1f1, grid: gn} + # - {dataset: EC-Earth3, ensemble: r9i1p1f1, grid: gn} # download failure of ssp585 + - {dataset: EC-Earth3, ensemble: r11i1p1f1, grid: gn} + - {dataset: EC-Earth3, ensemble: r15i1p1f1, grid: gn} + # - {dataset: EC-Earth3, ensemble: 'r(101:150)i1p1f1', grid: gn} # available data does not fully cover timerange + - {dataset: EC-Earth3-Veg, ensemble: 'r(1:4)i1p1f1', grid: gn} + - {dataset: EC-Earth3-Veg, ensemble: r6i1p1f1, grid: gn} + # - {dataset: EC-Earth3-Veg-LR, ensemble: 'r(1:3)i1p1f1', grid: gn} # mismatch between i and j coordinate names between historical and ssp experiment + - {dataset: FGOALS-f3-L, ensemble: 'r(1:3)i1p1f1', grid: gn} + - {dataset: FGOALS-g3, ensemble: 'r(1:4)i1p1f1', grid: gn} + - {dataset: FIO-ESM-2-0, ensemble: 'r(1:3)i1p1f1', grid: gn} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: 'r(1:4)i1p5f1', grid: gn, supplementary_variables: [{short_name: areacella, mip: fx, exp: piControl, ensemble: r1i1p5f1}, {short_name: areacello, skip: true}]} + - {dataset: GISS-E2-1-G, ensemble: 'r(1:5)i1p1f2', grid: gn, supplementary_variables: [{short_name: areacella, mip: fx, exp: piControl, ensemble: r1i1p1f1}, {short_name: areacello, skip: true}]} + - {dataset: GISS-E2-1-G, ensemble: 'r(1:5)i1p3f1', grid: gn, supplementary_variables: [{short_name: areacella, mip: fx, exp: piControl, ensemble: r1i1p3f1}, {short_name: areacello, skip: true}]} + - {dataset: GISS-E2-1-H, ensemble: 'r(1:5)i1p1f2', grid: gn, supplementary_variables: [{short_name: areacella, mip: fx, exp: piControl, ensemble: r1i1p1f1}, {short_name: areacello, skip: true}]} + - {dataset: GISS-E2-1-H, ensemble: 'r(1:5)i1p3f1', grid: gn, supplementary_variables: [{short_name: areacella, mip: fx, exp: piControl, ensemble: r1i1p3f1}, {short_name: areacello, skip: true}]} + - {dataset: GISS-E2-2-G, ensemble: 'r(1:5)i1p3f1', grid: gn, supplementary_variables: [{short_name: areacella, mip: fx, exp: piControl, ensemble: r1i1p1f1}, {short_name: areacello, skip: true}]} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + # - {dataset: IITM-ESM, ensemble: r1i1p1f1, grid: gn} # available data does not fully cover timerange + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, ensemble: 'r(1:4)i1p1f1', grid: gn} + - {dataset: IPSL-CM6A-LR, ensemble: r6i1p1f1, grid: gn} + - {dataset: IPSL-CM6A-LR, ensemble: r14i1p1f1, grid: gn} + # - {dataset: KACE-1-0-G, ensemble: 'r(1:3)i1p1f1', grid: gr} # unstructured grid but no cell area information available + # - {dataset: KIOST-ESM, ensemble: r1i1p1f1, grid: gr1} # historical and ssp126 experiment are on different grids + - {dataset: MCM-UA-1-0, ensemble: r1i1p1f2, grid: gn} + - {dataset: MIROC-ES2H, ensemble: r1i1p4f2, grid: gn} + - {dataset: MIROC-ES2L, ensemble: 'r(1:10)i1p1f2', grid: gn} + - {dataset: MIROC6, ensemble: 'r(1:50)i1p1f1', grid: gn} + - {dataset: MPI-ESM1-2-HR, ensemble: 'r1i1p1f1', grid: gn} + # - {dataset: MPI-ESM1-2-HR, ensemble: 'r(1:2)i1p1f1', grid: gn} # second ensemble member causes warnings about large graphs in `concatenate` preprocessor step + - {dataset: MPI-ESM1-2-LR, ensemble: 'r(1:50)i1p1f1', grid: gn} + - {dataset: MRI-ESM2-0, ensemble: 'r(1:5)i1p1f1', grid: gn} + # - {dataset: NESM3, ensemble: 'r(1:2)i1p1f1', grid: gn} # cannot be used due to https://github.com/ESMValGroup/ESMValCore/issues/2101 + # - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} # duplicated areacello file with wrong name + - {dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + # - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} # download failure of ssp585 + - {dataset: UKESM1-0-LL, ensemble: 'r(1:4)i1p1f2', grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r8i1p1f2, grid: gn} diff --git a/esmvaltool/recipes/examples/recipe_extract_shape.yml b/esmvaltool/recipes/examples/recipe_extract_shape.yml new file mode 100644 index 0000000000..08d1bab490 --- /dev/null +++ b/esmvaltool/recipes/examples/recipe_extract_shape.yml @@ -0,0 +1,58 @@ +# ESMValTool +# recipe_extract_shape.yml +--- +documentation: + description: | + Example recipe extracting precipitation in the Elbe catchment. + + The example shapefile(s) can be copied from + esmvaltool/diag_scripts/shapeselect/testdata/Elbe.* and + placed in the auxiliary_data_dir defined in the configuration. + + title: Example recipe extracting precipitation in the Elbe catchment. + + authors: + - andela_bouwe + + maintainer: + - andela_bouwe + + references: + - acknow_project + + projects: + - ewatercycle + +datasets: + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1} + +preprocessors: + + preproc: + regrid: + scheme: linear + target_grid: 0.25x0.25 + extract_shape: + shapefile: Elbe.shp + climate_statistics: + operator: mean + +diagnostics: + + diagnostic: + description: Extract data about the Elbe catchment. + themes: + - phys + realms: + - atmos + variables: + tas: + mip: Amon + start_year: 2000 + end_year: 2002 + preprocessor: preproc + scripts: + script1: + script: examples/diagnostic.py + quickplot: + plot_type: pcolormesh diff --git a/esmvaltool/recipes/examples/recipe_julia.yml b/esmvaltool/recipes/examples/recipe_julia.yml new file mode 100644 index 0000000000..982b5d8e1a --- /dev/null +++ b/esmvaltool/recipes/examples/recipe_julia.yml @@ -0,0 +1,39 @@ +# ESMValTool +# recipe_julia.yml +--- +documentation: + description: | + Recipe for example diagnostic written in Julia. + + title: Recipe running an example diagnostic written in Julia Roberts. + + authors: + - vonhardenberg_jost + + maintainer: + - vonhardenberg_jost + +datasets: + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1} + +preprocessors: + preproc: + regrid: + scheme: linear + target_grid: 1x1 + +diagnostics: + example: + description: Example diagnostic written in Julia + variables: + tas: + mip: Amon + start_year: 1997 + end_year: 1997 + preprocessor: preproc + reference_dataset: "CanESM2" + + scripts: + main: + script: examples/diagnostic.jl + parameter1: 1 # example parameter diff --git a/esmvaltool/recipes/examples/recipe_my_personal_diagnostic.yml b/esmvaltool/recipes/examples/recipe_my_personal_diagnostic.yml index 264a12a72a..657dd0c32b 100644 --- a/esmvaltool/recipes/examples/recipe_my_personal_diagnostic.yml +++ b/esmvaltool/recipes/examples/recipe_my_personal_diagnostic.yml @@ -1,24 +1,27 @@ # ESMValTool +# recipe_my_personal_diagnostic.yml --- documentation: description: | This is an example recipe for a personal diagnostic. You can run any Python diagnostic of your choice without installing ESMValTool as developer or git pushing. - Simply include the full path to your script of choice in script, - see example /path/to/your/my_little_diagnostic.py An example personal diagnostic can be found in esmvaltool/diag_scripts/examples/my_little_diagnostic.py authors: - - pred_va + - predoi_valeriu + + title: | + Example recipe for running a user-written diagnostic, without installing + ESMValTool as developer. maintainer: - - pred_va + - predoi_valeriu datasets: - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1} preprocessors: pp: @@ -33,8 +36,15 @@ diagnostics: ta: preprocessor: pp mip: Amon - fx_files: [sftlf, sftof, areacello] + start_year: 2000 + end_year: 2002 + sftlf: + mip: fx + sftof: + mip: fx + areacello: + mip: fx scripts: my_diagnostic: - script: /path/to/your/my_little_diagnostic.py - + # Replace this path with the absolute path to your diagnostic (eg. /path/to/your/my_little_diagnostic.py) + script: examples/my_little_diagnostic.py diff --git a/esmvaltool/recipes/examples/recipe_ncl.yml b/esmvaltool/recipes/examples/recipe_ncl.yml index 8087539b0c..f48dc7b174 100644 --- a/esmvaltool/recipes/examples/recipe_ncl.yml +++ b/esmvaltool/recipes/examples/recipe_ncl.yml @@ -5,12 +5,14 @@ documentation: description: | Example recipe that plots air temperature. + title: Example recipe running a diagnostic written in NCL. + authors: - - righ_ma + - righi_mattia maintainer: - - righ_ma - + - righi_mattia + references: - acknow_project @@ -18,19 +20,18 @@ documentation: - esmval datasets: - - {dataset: bcc-csm1-1, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ERA-Interim, project: OBS, tier: 3, type: reanaly, version: 1, start_year: 2000, end_year: 2002} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 2000, end_year: 2002} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 2000, end_year: 2002} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 2000, end_year: 2002} preprocessors: preprocessor_1: extract_levels: levels: 85000 scheme: nearest - regrid: false - mask_landsea: false - multi_model_statistics: false diagnostics: example: @@ -42,10 +43,11 @@ diagnostics: variables: ta: preprocessor: preprocessor_1 - reference_dataset: ERA-Interim - fx_files: [sftlf] - additional_datasets: - - {dataset: NCEP, project: OBS, tier: 2, type: reanaly, version: 1, start_year: 2000, end_year: 2002} + reference_dataset: MPI-ESM-LR + mip: Amon + additional_datasets: + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 2000, end_year: 2002} scripts: test_ta: &settings script: examples/diagnostic.ncl diff --git a/esmvaltool/recipes/examples/recipe_preprocessor_derive_test.yml b/esmvaltool/recipes/examples/recipe_preprocessor_derive_test.yml index 938540dc9d..6e1c54c980 100644 --- a/esmvaltool/recipes/examples/recipe_preprocessor_derive_test.yml +++ b/esmvaltool/recipes/examples/recipe_preprocessor_derive_test.yml @@ -5,120 +5,245 @@ documentation: description: | Recipe that demonstrates various uses of the preprocessor derive functionality. + title: Example recipe running various aspects of the derive preprocessor. + authors: - - ande_bo - - schl_ma + - andela_bouwe + - schlund_manuel + - vegas-regidor_javier maintainer: - - righ_ma + - righi_mattia projects: - c3s-magic -preprocessors: - - regrid: - regrid: - target_grid: CanESM2 - scheme: linear - diagnostics: - diag1: - description: Test variable derivation + cmip5: + description: Test variable derivation for CMIP5 models. variables: - toz: &toz + alb: &alb_cmip5 project: CMIP5 mip: Amon exp: historical - start_year: 2000 + start_year: 2004 end_year: 2005 derive: true force_derivation: false additional_datasets: - - {dataset: GFDL-CM3, ensemble: r1i1p1} - - {dataset: GISS-E2-H, ensemble: r1i1p2} - - {dataset: GISS-E2-R, ensemble: r1i1p2} - swcre: - <<: *toz - lwcre: - <<: *toz - clhmtisccp: &cloud + - {dataset: GFDL-CM3, ensemble: r1i1p1} + amoc: + <<: *alb_cmip5 + mip: Omon + additional_datasets: + - {dataset: CanESM2, ensemble: r1i1p1} + asr: + <<: *alb_cmip5 + clhmtisccp: &cloud_cmip5 project: CMIP5 mip: cfMon exp: amip - start_year: 1985 + start_year: 1987 end_year: 1988 ensemble: r1i1p1 derive: true force_derivation: false additional_datasets: - - {dataset: HadGEM2-A} - nbp_grid: - preprocessor: regrid - project: CMIP5 - mip: Lmon - exp: historical - start_year: 2000 - end_year: 2005 - derive: true - additional_datasets: - - {dataset: CanESM2, ensemble: r1i1p1} - - {dataset: MPI-ESM-LR, ensemble: r1i1p1} - - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} - scripts: null - - diag2: - description: Test variable derivation - variables: - rtnt: - <<: *toz + - {dataset: HadGEM2-A, ensemble: r1i1p1} clhtkisccp: - <<: *cloud - scripts: null - - diag3: - description: Test variable derivation - variables: - rsnt: - <<: *toz + <<: *cloud_cmip5 + cllmtisccp: + <<: *cloud_cmip5 + clltkisccp: + <<: *cloud_cmip5 clmmtisccp: - <<: *cloud - scripts: null - - diag4: - description: Test variable derivation - variables: - rsns: - <<: *toz + <<: *cloud_cmip5 clmtkisccp: - <<: *cloud - scripts: null - - diag5: - description: Test variable derivation - variables: + <<: *cloud_cmip5 + co2s: + <<: *alb_cmip5 + additional_datasets: + - {dataset: CanESM2, ensemble: r1i1p1} + ctotal: + <<: *alb_cmip5 + mip: Lmon + additional_datasets: + - {dataset: CanESM2, ensemble: r1i1p1} + et: + <<: *alb_cmip5 + mip: Lmon + lvp: + <<: *alb_cmip5 + lwcre: + <<: *alb_cmip5 + lwp: + <<: *alb_cmip5 + netcre: + <<: *alb_cmip5 + ohc: + <<: *alb_cmip5 + mip: Omon + additional_datasets: + - {dataset: CCSM4, ensemble: r1i1p1} rlns: - <<: *toz - clmtkisccp: - <<: *cloud + <<: *alb_cmip5 + rlnstcs: + <<: *alb_cmip5 + rlnst: + <<: *alb_cmip5 + rlntcs: + <<: *alb_cmip5 + rsns: + <<: *alb_cmip5 + rsnstcsnorm: + <<: *alb_cmip5 + rsnstcs: + <<: *alb_cmip5 + rsnst: + <<: *alb_cmip5 + rsntcs: + <<: *alb_cmip5 + rsnt: + <<: *alb_cmip5 + rtnt: + <<: *alb_cmip5 + sispeed: + <<: *alb_cmip5 + mip: day + start_year: 1979 + end_year: 1979 + sithick: + <<: *alb_cmip5 + mip: day + start_year: 1979 + end_year: 1979 + sm: + <<: *alb_cmip5 + mip: Lmon + swcre: + <<: *alb_cmip5 + toz: + <<: *alb_cmip5 + uajet: + <<: *alb_cmip5 + vegfrac: + <<: *alb_cmip5 + mip: Lmon + additional_datasets: + - {dataset: HadGEM2-ES, ensemble: r1i1p1} scripts: null - diag6: - description: Test variable derivation + cmip6: + description: Test variable derivation for CMIP6 models. variables: - lwp: - <<: *toz + alb: &alb_cmip6 + project: CMIP6 + mip: Amon + exp: historical + start_year: 2004 + end_year: 2005 + derive: true + force_derivation: false + additional_datasets: + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr} + # Issues with required variables [#359] + # amoc: + # <<: *alb_cmip6 + # mip: Omon + asr: + <<: *alb_cmip6 + clhmtisccp: &cloud_cmip6 + project: CMIP6 + mip: CFmon + exp: hist-nat + start_year: 1987 + end_year: 1988 + derive: true + force_derivation: false + additional_datasets: + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + clhtkisccp: + <<: *cloud_cmip6 cllmtisccp: - <<: *cloud - scripts: null - - diag7: - description: Test variable derivation - variables: + <<: *cloud_cmip6 clltkisccp: - <<: *cloud + <<: *cloud_cmip6 + clmmtisccp: + <<: *cloud_cmip6 + clmtkisccp: + <<: *cloud_cmip6 + co2s: + <<: *alb_cmip6 + additional_datasets: + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + ctotal: + <<: *alb_cmip6 + mip: Lmon + et: + <<: *alb_cmip6 + mip: Lmon + lvp: + <<: *alb_cmip6 + lwcre: + <<: *alb_cmip6 + lwp: + <<: *alb_cmip6 netcre: - <<: *toz + <<: *alb_cmip6 + ohc: + <<: *alb_cmip6 + mip: Omon + additional_datasets: + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + rlns: + <<: *alb_cmip6 + rlnstcs: + <<: *alb_cmip6 + rlnst: + <<: *alb_cmip6 + rlntcs: + <<: *alb_cmip6 + rsns: + <<: *alb_cmip6 + rsnstcsnorm: + <<: *alb_cmip6 + rsnstcs: + <<: *alb_cmip6 + rsnst: + <<: *alb_cmip6 + rsntcs: + <<: *alb_cmip6 + rsnt: + <<: *alb_cmip6 + rtnt: + <<: *alb_cmip6 + sispeed: + <<: *alb_cmip6 + mip: SImon + start_year: 1979 + end_year: 1979 + additional_datasets: + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn} + sithick: + <<: *alb_cmip6 + mip: SImon + additional_datasets: + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn} + sm: + <<: *alb_cmip6 + mip: Lmon + swcre: + <<: *alb_cmip6 + toz: + <<: *alb_cmip6 + exp: G1 + start_year: 1948 + end_year: 1949 + uajet: + <<: *alb_cmip6 + vegfrac: + <<: *alb_cmip6 + mip: Lmon scripts: null diff --git a/esmvaltool/recipes/examples/recipe_preprocessor_test.yml b/esmvaltool/recipes/examples/recipe_preprocessor_test.yml index e62e3d94a9..cce4e89df6 100644 --- a/esmvaltool/recipes/examples/recipe_preprocessor_test.yml +++ b/esmvaltool/recipes/examples/recipe_preprocessor_test.yml @@ -4,43 +4,44 @@ documentation: description: | Various example preprocessors. - + authors: - - righ_ma - - ande_bo - - schl_ma + - righi_mattia + - andela_bouwe + - schlund_manuel + + title: Test recipe for ESMValCore preprocessors. maintainer: - - righ_ma - + - righi_mattia + projects: - c3s-magic datasets: - - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ERA-Interim, project: OBS, tier: 3, type: reanaly, version: 1, start_year: 2000, end_year: 2002} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, tag: group1} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1} preprocessors: preprocessor_1: extract_levels: - levels: ERA-Interim + levels: bcc-csm1-1 # This also works, but allows specifying the pressure coordinate name - # levels: {dataset: ERA-Interim, coordinate: air_pressure} + # levels: {dataset: bcc-csm1-1, coordinate: air_pressure} scheme: linear mask_landsea: - mask_out: land #if land, will mask land out; if sea, will mask seas and oceans out + mask_out: land # if land (sea), will mask land (sea and oceans) out regrid: - target_grid: ERA-Interim + target_grid: bcc-csm1-1 scheme: linear mask_fillvalues: threshold_fraction: 0.90 multi_model_statistics: span: overlap statistics: [mean, median] - exclude: [NCEP] + exclude: [GFDL-ESM2G] preprocessor_2: custom_order: true @@ -48,7 +49,7 @@ preprocessors: target_grid: 1x1 scheme: nearest extract_levels: - levels: {cmor_table: CMIP6, coordinate: plev10} + levels: {cmor_table: CMIP6, coordinate: plev8} scheme: nearest mask_fillvalues: false multi_model_statistics: @@ -73,30 +74,155 @@ preprocessors: mask_fillvalues: false multi_model_statistics: false + + # Simple ensemble mean, median, and percentiles for multiple models + preprocessor_5: + ensemble_statistics: + statistics: + - mean + - median + - operator: percentile + percent: 5 + - operator: percentile + percent: 95 + exclude: [GFDL-ESM2G] + + # Calculate ensemble means, then multi-model mean + preprocessor_6: + regrid: + target_grid: 3x3 + scheme: linear + ensemble_statistics: + statistics: [mean] + exclude: [GFDL-ESM2G] + multi_model_statistics: + span: overlap + statistics: [mean] + keep_input_datasets: false + exclude: [GFDL-ESM2G] + + # Multi-model mean with more complex groupby + preprocessor_7: + regrid: + target_grid: 3x3 + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + groupby: [dataset, tag] + exclude: [bcc-csm1-1, MPI-ESM-LR, GFDL-ESM2G] + + # First calculate ensemble mean and median, then multi-model min and max + + preprocessor_8: + regrid: + target_grid: 3x3 + scheme: linear + ensemble_statistics: + statistics: [mean, median] + exclude: [GFDL-ESM2G] + multi_model_statistics: + span: overlap + statistics: [max, min] + groupby: [ensemble_statistics] + keep_input_datasets: false + exclude: [GFDL-ESM2G] + diagnostics: - diagnostic_1: &diag - description: Preprocessor test diagnostic. + diagnostic_1: + description: Test extract_levels, mask_landsea, regrid, mask_fillvalues, multi_model_statistics. variables: ta: preprocessor: preprocessor_1 - fx_files: [orog, sftlf] - additional_datasets: - - {dataset: NCEP, project: OBS, tier: 2, type: reanaly, version: 1, start_year: 2000, end_year: 2002} + mip: Amon + start_year: 2000 + end_year: 2002 + orog: + mip: fx + sftlf: + mip: fx scripts: null diagnostic_2: - <<: *diag + description: Test custom_order, regrid, levels, multi_model_statistics. variables: ta: preprocessor: preprocessor_2 + mip: Amon + start_year: 2000 + end_year: 2002 scripts: null diagnostic_3_and_4: - description: Preprocessor test diagnostic. + description: Test extract_levels, regrid. variables: ta: preprocessor: preprocessor_3 + mip: Amon + start_year: 2000 + end_year: 2002 pr: preprocessor: preprocessor_4 + mip: Amon + start_year: 2000 + end_year: 2002 + scripts: null + + diagnostic_5: + description: Test ensemble_statistics. + variables: + tas: + preprocessor: preprocessor_5 + mip: Amon + start_year: 2000 + end_year: 2000 + additional_datasets: + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: 'r(2:3)i1p1'} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r2i1p1} + scripts: null + + diagnostic_6: + description: Test regrid, ensemble_statistics, multi_model_statistics. + variables: + tas: + preprocessor: preprocessor_6 + mip: Amon + start_year: 2000 + end_year: 2000 + additional_datasets: + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: 'r(2:3)i1p1'} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r2i1p1} + scripts: null + + diagnostic_7: + description: Test regrid, multi_model_statistics. + variables: + tas: + preprocessor: preprocessor_7 + mip: Amon + start_year: 2000 + end_year: 2000 + additional_datasets: + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, tag: abc} + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r2i1p1, tag: abc} + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r3i1p1} + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r4i1p1} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, tag: group1} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r2i1p1, tag: group1} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r3i1p1, tag: group2} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r4i1p1, tag: group2} + scripts: null + + diagnostic_8: + description: Test regrid, ensemble_statistics, multi_model_statistics. + variables: + tas: + preprocessor: preprocessor_8 + mip: Amon + start_year: 2000 + end_year: 2000 + additional_datasets: + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: 'r(2:3)i1p1'} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r2i1p1} scripts: null diff --git a/esmvaltool/recipes/examples/recipe_python.yml b/esmvaltool/recipes/examples/recipe_python.yml index 7bfe97cb7d..d85e1ae437 100644 --- a/esmvaltool/recipes/examples/recipe_python.yml +++ b/esmvaltool/recipes/examples/recipe_python.yml @@ -1,16 +1,24 @@ # ESMValTool # recipe_python.yml +# +# See https://docs.esmvaltool.org/en/latest/recipes/recipe_examples.html +# for a description of this recipe. +# +# See https://docs.esmvaltool.org/projects/esmvalcore/en/latest/recipe/overview.html +# for a description of the recipe format. --- documentation: description: | - Example recipe that plots the mean precipitation and temperature. + Example recipe that plots a map and timeseries of temperature. + + title: Recipe that runs an example diagnostic written in Python. authors: - - ande_bo - - righ_ma + - andela_bouwe + - righi_mattia maintainer: - - schl_ma + - schlund_manuel references: - acknow_project @@ -20,39 +28,81 @@ documentation: - c3s-magic datasets: - - {dataset: CanESM2, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} + - {dataset: BCC-ESM1, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: bcc-csm1-1, version: v1, project: CMIP5, exp: historical, ensemble: r1i1p1} preprocessors: + # See https://docs.esmvaltool.org/projects/esmvalcore/en/latest/recipe/preprocessor.html + # for a description of the preprocessor functions. + + to_degrees_c: + convert_units: + units: degrees_C - preprocessor1: - extract_levels: - levels: 85000 - scheme: nearest - regrid: - target_grid: reference_dataset + annual_mean_amsterdam: + extract_location: + location: Amsterdam scheme: linear + annual_statistics: + operator: mean multi_model_statistics: + statistics: + - mean span: overlap - statistics: [mean, median] + convert_units: + units: degrees_C + + annual_mean_global: + area_statistics: + operator: mean + annual_statistics: + operator: mean + convert_units: + units: degrees_C diagnostics: - diagnostic1: - description: Air temperature and precipitation Python tutorial diagnostic. + map: + description: Global map of temperature in January 2000. themes: - phys realms: - atmos variables: - ta: - preprocessor: preprocessor1 - reference_dataset: CanESM2 - pr: - reference_dataset: MPI-ESM-LR + tas: + mip: Amon + preprocessor: to_degrees_c + timerange: 2000/P1M + caption: | + Global map of {long_name} in January 2000 according to {dataset}. scripts: script1: script: examples/diagnostic.py quickplot: plot_type: pcolormesh + cmap: Reds + + timeseries: + description: Annual mean temperature in Amsterdam and global mean since 1850. + themes: + - phys + realms: + - atmos + variables: + tas_amsterdam: + short_name: tas + mip: Amon + preprocessor: annual_mean_amsterdam + timerange: 1850/2000 + caption: Annual mean {long_name} in Amsterdam according to {dataset}. + tas_global: + short_name: tas + mip: Amon + preprocessor: annual_mean_global + timerange: 1850/2000 + caption: Annual global mean {long_name} according to {dataset}. + scripts: + script1: + script: examples/diagnostic.py + quickplot: + plot_type: plot diff --git a/esmvaltool/recipes/examples/recipe_python_object_oriented.yml b/esmvaltool/recipes/examples/recipe_python_object_oriented.yml deleted file mode 100644 index 90e1a5b224..0000000000 --- a/esmvaltool/recipes/examples/recipe_python_object_oriented.yml +++ /dev/null @@ -1,74 +0,0 @@ -# ESMValTool -# recipe_python_object_oriented.yml ---- -documentation: - description: | - Example recipe that runs a Python example diagnostic with a more object oriented interface. - - authors: - - schl_ma - - maintainer: - - schl_ma - -datasets: - - {dataset: GFDL-ESM2G, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ERA-Interim, project: OBS, tier: 3, type: reanaly, version: 1, start_year: 2000, end_year: 2002} - -preprocessors: - - preprocessor1: - extract_levels: - levels: 85000 - scheme: nearest - regrid: - target_grid: reference_dataset - scheme: linear - multi_model_statistics: - span: overlap - statistics: [mean, median] - - preprocessor2: - regrid: - target_grid: reference_dataset - scheme: linear - multi_model_statistics: - span: overlap - statistics: [mean, median] - -diagnostics: - - diagnostic1: - description: Air temperature and precipitation Python tutorial diagnostic. - variables: - ta: - preprocessor: preprocessor1 - reference_dataset: ERA-Interim - pr: - preprocessor: preprocessor2 - reference_dataset: ERA-Interim - scripts: - script1a: - script: examples/diagnostic_object_oriented.py - quickplot: - plot_type: pcolormesh - script1b: - script: examples/diagnostic_object_oriented.py - ancestors: ['script1a'] - quickplot: - plot_type: contour - - diagnostic2: - description: Another Python tutorial diagnostic. - variables: - tas: - preprocessor: preprocessor2 - additional_datasets: - - {dataset: bcc-csm1-1, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - reference_dataset: MPI-ESM-LR - scripts: - script2: - script: examples/diagnostic_object_oriented.py - quickplot: - plot_type: contourf diff --git a/esmvaltool/recipes/examples/recipe_r.yml b/esmvaltool/recipes/examples/recipe_r.yml index b8536c72e1..532b6306da 100644 --- a/esmvaltool/recipes/examples/recipe_r.yml +++ b/esmvaltool/recipes/examples/recipe_r.yml @@ -2,13 +2,16 @@ # recipe_example_r.yml --- documentation: - description: Recipe for example diagnostic written in R. - - authors: - - arno_en + description: | + Recipe for example diagnostic written in R. + + title: Recipe that runs an example diagnostic written in R. + + authors: + - arnone_enrico maintainer: - - ande_bo + - andela_bouwe datasets: - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1997, end_year: 1997} @@ -17,7 +20,7 @@ preprocessors: extract_levels: false diagnostics: example: - description: Example diagnostic written in R + description: Example diagnostic written in R variables: pr: preprocessor: preproc @@ -25,5 +28,5 @@ diagnostics: mip: day scripts: main: - script: examples/diagnostic.r - parameter1: 1 # example parameter + script: examples/diagnostic.R + parameter1: 1 # example parameter diff --git a/esmvaltool/recipes/examples/recipe_variable_groups.yml b/esmvaltool/recipes/examples/recipe_variable_groups.yml index 64a4a873a7..4510e33d28 100644 --- a/esmvaltool/recipes/examples/recipe_variable_groups.yml +++ b/esmvaltool/recipes/examples/recipe_variable_groups.yml @@ -1,16 +1,18 @@ +# ESMValTool # recipe_variable_groups.yml --- documentation: - description: | Example recipe to demonstrate grouping of variables in the diagnostic section. + title: Recipe running various examples of grouping of variables in the diagnostic. + authors: - - schl_ma + - schlund_manuel maintainer: - - schl_ma + - schlund_manuel projects: - crescendo @@ -51,7 +53,7 @@ diagnostics: preprocessor: mask tag: TAS2 additional_datasets: - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} tas_3: <<: *variable_settings exp: rcp45 @@ -59,5 +61,4 @@ diagnostics: end_year: 2025 tag: TAS3 additional_datasets: *datasets - scripts: - null + scripts: null diff --git a/esmvaltool/recipes/hydrology/recipe_globwat.yml b/esmvaltool/recipes/hydrology/recipe_globwat.yml new file mode 100644 index 0000000000..31aeae82fa --- /dev/null +++ b/esmvaltool/recipes/hydrology/recipe_globwat.yml @@ -0,0 +1,101 @@ +# ESMValTool +# globwat.yml +--- +documentation: + title: Generate forcing for the GlobWat hydrological model + description: | + Recipe pre-processes ERA-Interim and ERA-5 data + for use in the GlobWat hydrological model. + GlobWat is a Global Hydrological Model provided by FAO + http://www.fao.org/nr/water/aquamaps/ + + By default, PET is calculated by the De Bruin method (De Bruin et al. 2016). + However, users can choose the Langbein method by setting the langbein_pet + (Langbein et al. 1949) in the recipe to True. + + More information about the De Bruin method can be found at: + https://doi.org/10.1175/JHM-D-15-0006.1 page 1376, equation 6. + More information about the Langbein method can be found at: + https://doi.org/10.3133/cir52 page 8, figure 1. + An example of using Langbein method can be found at: + https://doi.org/10.1080/02626667.2017.1332416 page 1472, equation 7. + + A target_grid_file has been generated from one of the models sample files, + see the recipe documentation. + + authors: + - abdollahi_banafsheh + - alidoost_sarah + + maintainer: + - abdollahi_banafsheh + + projects: + - ewatercycle + + references: + - acknow_project + - debruin16ams + - hoogeveen15hess + - langbein1949usgs + +preprocessors: + area_selection: + extract_region: + start_longitude: 40 + end_longitude: 65 + start_latitude: 25 + end_latitude: 40 + +diagnostics: + diagnostic_daily_GlobWat: + description: daily precipitation of ERA5 & ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, tier: 3, type: reanaly, version: 1} + - {dataset: ERA5, project: OBS6, type: reanaly, version: '1', tier: 3} + variables: + pr: &daily_var + mip: day + # for 30 years, this calculation takes a very long time + # timerange: 1986/2016 # uncomment this line for the original settings + timerange: 19860101/19860110 # comment this line for the original settings + # comment preprocessor to process data on the global scale + preprocessor: area_selection + tas: *daily_var + # comment psl, rsds and rsdt if langbein_pet is True + psl: *daily_var + rsds: *daily_var + rsdt: + <<: *daily_var + mip: CFday + scripts: + script: + script: hydrology/globwat.py + target_grid_file: 'globwat/globwat_target_grid.nc' + evaporation_method: debruin # options: debruin or langbein + regrid_scheme: area_weighted + + diagnostic_monthly_GlobWat: + description: monthly precipitation and potential evaporation of ERA5 & ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, tier: 3, type: reanaly, version: 1} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + variables: + pr: &var_monthly + mip: Amon + # for 30 years, this calculation takes a very long time + # timerange: 1986/2016 # uncomment this line for the original settings + timerange: 198601/198612 # comment this line for the original settings + # comment preprocessor to process data on the global scale + preprocessor: area_selection + tas: *var_monthly + # comment psl, rsds and rsdt if langbein_pet is True + psl: *var_monthly + rsds: *var_monthly + rsdt: *var_monthly + scripts: + script: + script: hydrology/globwat.py + target_grid_file: 'globwat/globwat_target_grid.nc' + evaporation_method: debruin # options: debruin or langbein + regrid_scheme: area_weighted diff --git a/esmvaltool/recipes/hydrology/recipe_hydro_forcing.yml b/esmvaltool/recipes/hydrology/recipe_hydro_forcing.yml new file mode 100644 index 0000000000..925d9bd420 --- /dev/null +++ b/esmvaltool/recipes/hydrology/recipe_hydro_forcing.yml @@ -0,0 +1,107 @@ +# ESMValTool +# recipe_hydro_forcing.yml +--- +documentation: + title: Compare hydrological forcing between climate datasets + description: | + This recipe can be used to assess the agreement between forcing datasets + (i.e. MSWEP, ERA5, ERA-Interim) for a certain catchment. The recipe can be + used to: + + 1. Plot a timeseries of the raw daily data + 2. Plot monthly aggregated data over a certain period + 3. Plot the monthly climate statistics over a certain period + + authors: + - smeets_stef + - aerts_jerom + + maintainer: + - unmaintained + + projects: + - ewatercycle + + references: + - acknow_project + +datasets: + - {dataset: ERA-Interim, project: OBS6, tier: 3, type: reanaly, version: 1} + - {dataset: ERA5, project: OBS6, tier: 3, type: reanaly, version: 1} + - {dataset: MSWEP, project: native6, exp: historical, ensemble: r1i1p1, type: reanaly, version: v220, tier: 3} + +preprocessors: + daily: + extract_shape: &extract_shape + # Relative to auxiliary_data_dir defined in configuration + shapefile: Lorentz_Basin_Shapefiles/Meuse/Meuse.shp + method: contains + crop: true + area_statistics: + operator: mean + monthly: + extract_shape: *extract_shape + area_statistics: + operator: mean + monthly_statistics: + operator: sum + climatology_month: + extract_shape: *extract_shape + area_statistics: + operator: mean + climate_statistics: + period: month + operator: mean + climatology_day: + extract_shape: *extract_shape + area_statistics: + operator: mean + climate_statistics: + period: day + operator: mean + +diagnostics: + sample_year: + variables: + pr: + mip: day + start_year: 2015 + end_year: 2016 + preprocessor: daily + scripts: + plot: + script: hydrology/hydro_forcing.py + plot_type: timeseries + time_period: day + + total_precipitation: + variables: + pr: + mip: day + start_year: 2015 + end_year: 2016 + preprocessor: monthly + scripts: + plot: + script: hydrology/hydro_forcing.py + plot_type: timeseries + time_period: month + + climatology: + variables: + pr_month: + mip: day + short_name: pr + start_year: 2015 + end_year: 2016 + preprocessor: climatology_month + pr_day: + mip: day + short_name: pr + start_year: 2015 + end_year: 2016 + preprocessor: climatology_day + scripts: + plot: + script: hydrology/hydro_forcing.py + plot_type: climatology diff --git a/esmvaltool/recipes/hydrology/recipe_hype.yml b/esmvaltool/recipes/hydrology/recipe_hype.yml new file mode 100644 index 0000000000..27e2fadc68 --- /dev/null +++ b/esmvaltool/recipes/hydrology/recipe_hype.yml @@ -0,0 +1,60 @@ +# ESMValTool +# recipe_hype.yml +--- +documentation: + title: Generate forcing for the Hype hydrological model + description: | + Recipe pre-processes ERA5 and ERA-Interim reanalyses files + for use in the Hype hydrological model. + + authors: + - pelupessy_inti + - kalverla_peter + + maintainer: + - unmaintained + + projects: + - ewatercycle + + references: + - acknow_project + +preprocessors: + preprocessor: &general + extract_shape: + shapefile: Meuse_HYPE.shp + method: contains + decomposed: true + area_statistics: + operator: mean + temperature: + <<: *general + convert_units: + units: degC + water: + <<: *general + convert_units: + units: kg m-2 d-1 + +datasets: + - {dataset: ERA-Interim, project: OBS6, tier: 3, type: reanaly, version: 1} + - {dataset: ERA5, project: OBS6, tier: 3, type: reanaly, version: 1} + +diagnostics: + hype: + description: HYPE input preprocessor for daily data + variables: + tas: &var + mip: day + start_year: 1990 + end_year: 2001 + preprocessor: temperature + tasmin: *var + tasmax: *var + pr: + <<: *var + preprocessor: water + scripts: + script: + script: hydrology/hype.py diff --git a/esmvaltool/recipes/hydrology/recipe_lisflood.yml b/esmvaltool/recipes/hydrology/recipe_lisflood.yml new file mode 100644 index 0000000000..3acb4be481 --- /dev/null +++ b/esmvaltool/recipes/hydrology/recipe_lisflood.yml @@ -0,0 +1,97 @@ +# ESMValTool +# recipe_lisflood.yml +--- +documentation: + title: Generate forcing for the Lisflood hydrological model + description: | + Recipe pre-process files for use in the LISFLOOD hydrological model. + + authors: + - verhoeven_stefan + - kalverla_peter + - andela_bouwe + + maintainer: + - unmaintained + + projects: + - ewatercycle + + references: + - acknow_project + +datasets: + - {dataset: ERA-Interim, project: OBS6, tier: 3, type: reanaly, version: 1} + - {dataset: ERA5, project: OBS6, tier: 3, type: reanaly, version: 1} + +preprocessors: + general: &general + regrid: + target_grid: + step_longitude: 0.1 + start_longitude: 0.05 + end_longitude: 9.05 + step_latitude: 0.1 + start_latitude: 45.05 + end_latitude: 54.05 + scheme: linear + extract_shape: + # Perhaps a single shapefile needs to be created covering multiple basins + # Relative to auxiliary_data_dir defined in configuration + shapefile: Lorentz_Basin_Shapefiles/Meuse/Meuse.shp + method: contains + crop: true # set to false to keep the entire globe (memory intensive!) + daily_water: + <<: *general + convert_units: + units: kg m-2 d-1 + daily_temperature: + <<: *general + convert_units: + units: degC + daily_radiation: + <<: *general + convert_units: + units: J m-2 day-1 + daily_windspeed: + <<: *general + +diagnostics: + diagnostic_daily: + description: LISFLOOD input preprocessor for ERA-Interim and ERA5 data + variables: + pr: &var_daily + mip: day + # if time length increases, regrid processor might fail due to memory error. + # two possible fixes are: + # running the recipe on a machine with enough memory + # providing a cube as a target_grid in the processor + start_year: 1990 + end_year: 1990 + preprocessor: daily_water + tas: + <<: *var_daily + preprocessor: daily_temperature + tasmax: + <<: *var_daily + preprocessor: daily_temperature + tasmin: + <<: *var_daily + preprocessor: daily_temperature + tdps: + <<: *var_daily + mip: Eday + preprocessor: daily_temperature + uas: + <<: *var_daily + preprocessor: daily_windspeed + vas: + <<: *var_daily + preprocessor: daily_windspeed + rsds: + <<: *var_daily + preprocessor: daily_radiation + scripts: + script: + script: hydrology/lisflood.py + catchment: Meuse diff --git a/esmvaltool/recipes/hydrology/recipe_marrmot.yml b/esmvaltool/recipes/hydrology/recipe_marrmot.yml new file mode 100644 index 0000000000..e85a66d9b9 --- /dev/null +++ b/esmvaltool/recipes/hydrology/recipe_marrmot.yml @@ -0,0 +1,58 @@ +# ESMValTool +# recipe_marrmot.yml +--- +documentation: + title: Generate forcing for the Marrmot hydrological model + description: | + Recipe pre-processes ERA-Interim and ERA-5 data + for use in the MARRMoT hydrological model. + + MARRMoT is the Modular Assessment of Rainfall-Runnoff Models Toolbox ... + https://www.geosci-model-dev.net/12/2463/2019/ + + authors: + - kalverla_peter + - camphuijsen_jaro + - alidoost_sarah + + maintainer: + - unmaintained + + projects: + - ewatercycle + + references: + - acknow_project + +preprocessors: + daily: &daily + extract_shape: + # Lumped model: needs catchment-aggregated input data + # Relative to auxiliary_data_dir defined in configuration + shapefile: Meuse/Meuse.shp + method: contains + crop: true + +diagnostics: + diagnostic_daily: + description: marrmot input preprocessor for daily data + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, tier: 3, type: reanaly, version: 1} + - {dataset: ERA5, project: OBS6, tier: 3, type: reanaly, version: 1} + variables: + tas: &var_daily + mip: day + start_year: 1990 + end_year: 2018 + preprocessor: daily + pr: *var_daily + # MARRMoT requires PET. Thus, we need psl, rsds, rsdt + psl: *var_daily + rsds: *var_daily + rsdt: + <<: *var_daily + mip: CFday + scripts: + script: + script: hydrology/marrmot.py + basin: Meuse diff --git a/esmvaltool/recipes/hydrology/recipe_pcrglobwb.yml b/esmvaltool/recipes/hydrology/recipe_pcrglobwb.yml new file mode 100644 index 0000000000..fbdbce7271 --- /dev/null +++ b/esmvaltool/recipes/hydrology/recipe_pcrglobwb.yml @@ -0,0 +1,83 @@ +# ESMValTool +# recipe_pcrglobwb.yml +--- +documentation: + title: Generate forcing for the PRCGlobWB hydrological model + description: | + Pre-processes climate data for use in the PCR-GLOBWB hydrological model. + + authors: + - aerts_jerom + - andela_bouwe + - drost_niels + - camphuijsen_jaro + - alidoost_sarah + - kalverla_peter + + maintainer: + - unmaintained + + projects: + - ewatercycle + + references: + - sutanudjaja2018gmd + + +preprocessors: + crop_basin: &crop_basin + extract_region: # bounding box values depend on catchment + start_longitude: 3 + end_longitude: 13.5 + start_latitude: 45 + end_latitude: 54 + climatology: &climatology + climate_statistics: + operator: mean + period: day + preproc_pr: &preproc_pr + <<: *crop_basin + convert_units: + units: kg m-2 d-1 + preproc_tas: &preproc_tas + <<: *crop_basin + preproc_pr_clim: + <<: *preproc_pr + <<: *climatology + preproc_tas_clim: + <<: *preproc_tas + <<: *climatology + +diagnostics: + diagnostic_daily: + description: PCR-GLOBWB input preprocessor for ERA-Interim and ERA5 data + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, tier: 3, type: reanaly, version: 1} + - {dataset: ERA5, project: OBS6, tier: 3, type: reanaly, version: 1} + variables: + pr: + mip: day + start_year: 2002 + end_year: 2016 + preprocessor: preproc_pr + tas: + mip: day + start_year: 2002 + end_year: 2016 + preprocessor: preproc_tas + pr_climatology: + short_name: pr + mip: day + start_year: 1990 + end_year: 2002 + preprocessor: preproc_pr_clim + tas_climatology: + short_name: tas + mip: day + start_year: 1990 + end_year: 2002 + preprocessor: preproc_tas_clim + scripts: + script: + script: hydrology/pcrglobwb.py + basin: rhine diff --git a/esmvaltool/recipes/hydrology/recipe_wflow.yml b/esmvaltool/recipes/hydrology/recipe_wflow.yml new file mode 100644 index 0000000000..213428a9ac --- /dev/null +++ b/esmvaltool/recipes/hydrology/recipe_wflow.yml @@ -0,0 +1,61 @@ +# ESMValTool +# recipe_wflow.yml +--- +documentation: + title: Generate forcing for the WFlow hydrological model + description: | + Pre-processes climate data for the WFlow hydrological model. + + authors: + - kalverla_peter + - camphuijsen_jaro + - alidoost_sarah + - aerts_jerom + - andela_bouwe + + maintainer: + - unmaintained + + projects: + - ewatercycle + + references: + - acknow_project + +preprocessors: + rough_cutout: + extract_region: + start_longitude: 5.0 + end_longitude: 12.0 + start_latitude: 47.25 + end_latitude: 52.5 + +diagnostics: + wflow_daily: + description: WFlow input preprocessor for daily data + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, tier: 3, type: reanaly, version: 1} + - {dataset: ERA5, project: OBS6, tier: 3, type: reanaly, version: 1} + variables: + orog: + mip: fx + preprocessor: rough_cutout + tas: &daily_var + mip: day + preprocessor: rough_cutout + start_year: 1990 + end_year: 2001 + pr: *daily_var + # evspsblpot: # doesn't exist for ERA-Interim. + # Reconstruct evspsblpot using: + psl: *daily_var + rsds: *daily_var + rsdt: + <<: *daily_var + mip: CFday + scripts: + script: + script: hydrology/wflow.py + basin: Rhine + dem_file: 'wflow_parameterset/wflow_rhine_sbm/staticmaps/wflow_dem.map' + regrid: area_weighted diff --git a/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_914.yml b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_914.yml new file mode 100644 index 0000000000..e0f0baab64 --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_914.yml @@ -0,0 +1,134 @@ +# ESMValTool +# recipe_flato13ipcc_figure_914.yml +--- +documentation: + title: IPCC AR5, chap. 9 diagnostics for 9.14 + + description: | + Reproducing Fig. 9.14 from IPCC AR5, chap. 9 (Flato et al., 2013). + Data sets taken out were part of the original publication but are + currently not available on ESGF. + + authors: + - bock_lisa + - gier_bettina + - lauer_axel + - schlund_manuel + - senftleben_daniel + - zimmermann_klaus + + maintainer: + - lauer_axel + + references: + - flato13ipcc + + projects: + - embrace + - esmval + - crescendo + + +preprocessors: + + zonal_mean: + custom_order: true + climate_statistics: + regrid: + target_grid: 1x1 + scheme: linear + zonal_statistics: + operator: mean + convert_units: + units: degC + + equatorial: + custom_order: true + climate_statistics: + regrid: + target_grid: 1x1 + scheme: linear + extract_region: + start_longitude: 0. + end_longitude: 360. + start_latitude: -5. + end_latitude: 5. + meridional_statistics: + operator: mean + convert_units: + units: degC + +diagnostics: + + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.14 + # ********************************************************************** + # SST zonal mean and equatorial, multi model mean + # ********************************************************************** + + fig09-14: + title: Fig. 9.14 (SST) + description: | + IPCC AR5 Ch. 9, Fig. 9.14 (sst error, zonal mean and equatorial) + themes: + - phys + realms: + - ocean + variables: + tos_zm: &fig09_14_settings + project: CMIP5 + exp: historical + ensemble: r1i1p1 + mip: Omon + short_name: tos + preprocessor: zonal_mean + reference_dataset: HadISST + start_year: 1979 + end_year: 1999 + tos_eq: + <<: *fig09_14_settings + preprocessor: equatorial + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + # - {dataset: bcc-csm1-1-m} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + scripts: + fig09-14: + script: ipcc_ar5/ch09_fig09_14.py diff --git a/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_924.yml b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_924.yml new file mode 100644 index 0000000000..27fcff1af3 --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_924.yml @@ -0,0 +1,136 @@ +# ESMValTool +# recipe_flato13ipcc_figure_924.yml +--- +documentation: + title: IPCC AR5, chap. 9 diagnostics for 9.24 + + description: | + Reproducing Fig. 9.24 from IPCC AR5, chap. 9 (Flato et al., 2013). + Data sets taken out were part of the original publication but are + currently not available on ESGF. + + authors: + - bock_lisa + - gier_bettina + - lauer_axel + - schlund_manuel + - senftleben_daniel + - zimmermann_klaus + + maintainer: + - lauer_axel + + references: + - flato13ipcc + + projects: + - embrace + - esmval + - crescendo + + +diagnostics: + + # *********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.24 + # *********************************************************************** + # 9.24a/b: time series of Arctic and Antarctic sea ice extent + # 9.24c/d: trend distribution of September/February Arctic/Antarctic + # sea ice extent + # *********************************************************************** + + fig09-24: + title: Fig. 9.24 (sea ice) + description: timeseries and trend distributions of sea ice extent + themes: + - seaIce + realms: + - seaIce + variables: + sic: + mip: OImon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1960 + end_year: 2005 + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + areacello: + mip: fx + project: CMIP5 + exp: historical + ensemble: r0i0p0 + start_year: 1960 + end_year: 2005 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + # - {dataset: bcc-csm1-1-m} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-CAM5} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + # - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + scripts: + fig09-24a: + script: seaice/seaice_tsline.ncl + # "Arctic" or "Antarctic"; entire hemisphere will be evaluated + region: "Arctic" + # A = annual mean, 3 = March, 9 = September + month: "9" + # "CMIP5", "DEFAULT" + styleset: "CMIP5" + # Plot multi-model mean & std dev + multi_model_mean: true + # Create legend label for each individual ensemble member + EMs_in_lg: false + # Fill polar hole in data with sic = 1. + fill_pole_hole: true + fig09-24b: + script: seaice/seaice_tsline.ncl + # "Arctic" or "Antarctic"; entire hemisphere will be evaluated + region: "Antarctic" + # A = annual mean, 3 = March, 9 = September + month: "2" + # "CMIP5", "DEFAULT" + styleset: "CMIP5" + # Plot multi-model mean & std dev + multi_model_mean: true + # Create legend label for each individual ensemble member + EMs_in_lg: false + # Fill polar hole in data with sic = 1. + fill_pole_hole: false + fig09-24c: + script: seaice/seaice_trends.ncl + # "Arctic" or "Antarctic"; entire hemisphere will be evaluated + region: "Arctic" + # A = annual mean, 3 = March, 9 = September + month: "9" + # Fill polar hole in data with sic = 1. + fill_pole_hole: true + fig09-24d: + script: seaice/seaice_trends.ncl + # "Arctic" or "Antarctic"; entire hemisphere will be evaluated + region: "Antarctic" + # A = annual mean, 3 = March, 9 = September + month: "2" diff --git a/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_942.yml b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_942.yml new file mode 100644 index 0000000000..20e0d7bd82 --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_942.yml @@ -0,0 +1,550 @@ +# ESMValTool +# recipe_flato13ipcc_figure_942.yml +--- +documentation: + title: IPCC AR5, chap. 9 diagnostics for 9.42 + + description: | + Reproducing Fig. 9.42 from IPCC AR5, chap. 9 (Flato et al., 2013). + Data sets taken out were part of the original publication but are + currently not available on ESGF. + + authors: + - bock_lisa + - gier_bettina + - lauer_axel + - schlund_manuel + - senftleben_daniel + - zimmermann_klaus + + maintainer: + - lauer_axel + + references: + - flato13ipcc + + projects: + - embrace + - esmval + - crescendo + + +preprocessors: + + spatial_mean: + area_statistics: + operator: mean + + +diagnostics: + + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.42 + # ********************************************************************** + # + # ********************************************************************** + + ecs_cmip5: + description: Calculate ECS for CMIP5 models. + themes: + - phys + realms: + - atmos + variables: + tas_rtnt: &ecs_settings + short_name: tas + preprocessor: spatial_mean + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + additional_datasets: &datasets_cmip5_rtnt + - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} + - {dataset: ACCESS1-0, exp: abrupt4xCO2, start_year: 300, end_year: 449} + - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1, exp: abrupt4xCO2, start_year: 160, end_year: 309} + # - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} + # - {dataset: bcc-csm1-1-m, exp: abrupt4xCO2, start_year: 240, end_year: 389} + - {dataset: CanESM2, exp: piControl, start_year: 2321, end_year: 2470} + - {dataset: CanESM2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Wrong start year for piControl? (branch_time = 2.) + - {dataset: CCSM4, exp: piControl, start_year: 250, end_year: 399} + - {dataset: CCSM4, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 104, end_year: 253} + - {dataset: CSIRO-Mk3-6-0, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 1, end_year: 150} + # - {dataset: GFDL-ESM2G, exp: piControl, start_year: 1, end_year: 150} + # - {dataset: GFDL-ESM2G, exp: abrupt4xCO2, start_year: 1, end_year: 150} + # - {dataset: GFDL-ESM2M, exp: piControl, start_year: 1, end_year: 150} + # - {dataset: GFDL-ESM2M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GISS-E2-H, exp: piControl, start_year: 2660, end_year: 2809} + - {dataset: GISS-E2-H, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-R, exp: piControl, start_year: 4200, end_year: 4349} + - {dataset: GISS-E2-R, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Experiments start at 1859-12-01 + # - {dataset: HadGEM2-ES, exp: piControl, start_year: 1860, end_year: 2009} + # - {dataset: HadGEM2-ES, exp: abrupt4xCO2, start_year: 1860, end_year: 2009} + # - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} + # - {dataset: inmcm4, exp: abrupt4xCO2, start_year: 2090, end_year: 2239} + - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} + - {dataset: MIROC5, exp: abrupt4xCO2, start_year: 2100, end_year: 2249} + - {dataset: MIROC-ESM, exp: piControl, start_year: 1880, end_year: 2029} + - {dataset: MIROC-ESM, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: MPI-ESM-LR, exp: piControl, start_year: 1880, end_year: 2029} + - {dataset: MPI-ESM-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-P, exp: piControl, start_year: 1866, end_year: 2015} + - {dataset: MPI-ESM-P, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # - {dataset: MRI-CGCM3, exp: piControl, start_year: 1891, end_year: 2040} + # - {dataset: MRI-CGCM3, exp: abrupt4xCO2, start_year: 1851, end_year: 2000} + - {dataset: NorESM1-M, exp: piControl, start_year: 700, end_year: 849} + - {dataset: NorESM1-M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + tas_rtmt: + <<: *ecs_settings + # additional_datasets: &datasets_cmip5_rtmt + # - {dataset: IPSL-CM5A-LR, exp: piControl, start_year: 1850, end_year: 1999} + # - {dataset: IPSL-CM5A-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + rtnt: + <<: *ecs_settings + short_name: rtnt + derive: true + additional_datasets: *datasets_cmip5_rtnt + rtmt: + <<: *ecs_settings + short_name: rtmt + # additional_datasets: *datasets_cmip5_rtmt + scripts: + ecs: &ecs_script + script: climate_metrics/ecs.py + + tcr_cmip5: + description: Calculate TCR for CMIP5 models. + themes: + - phys + realms: + - atmos + variables: + 1pctCO2: &tcr_settings + short_name: tas + preprocessor: spatial_mean + project: CMIP5 + ensemble: r1i1p1 + exp: 1pctCO2 + mip: Amon + additional_datasets: + - {dataset: ACCESS1-0, start_year: 300, end_year: 439} + - {dataset: bcc-csm1-1, start_year: 160, end_year: 299} + # - {dataset: bcc-csm1-1-m, start_year: 240, end_year: 379} + - {dataset: CanESM2, start_year: 1850, end_year: 1989} + - {dataset: CCSM4, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM5, start_year: 1850, end_year: 1989} + - {dataset: CSIRO-Mk3-6-0, start_year: 1, end_year: 140} + - {dataset: GFDL-CM3, start_year: 1, end_year: 140} + - {dataset: GFDL-ESM2G, start_year: 1, end_year: 140} + - {dataset: GFDL-ESM2M, start_year: 1, end_year: 140} + - {dataset: GISS-E2-H, start_year: 1850, end_year: 1989} + - {dataset: GISS-E2-R, start_year: 1850, end_year: 1989} + # Experiments starts at 1859-12-01 + - {dataset: HadGEM2-ES, start_year: 1860, end_year: 1999} + # - {dataset: inmcm4, start_year: 2090, end_year: 2229} + - {dataset: IPSL-CM5A-LR, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5B-LR, start_year: 1850, end_year: 1989} + - {dataset: MIROC5, start_year: 2200, end_year: 2339} + - {dataset: MIROC-ESM, start_year: 1, end_year: 140} + - {dataset: MPI-ESM-LR, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM-P, start_year: 1850, end_year: 1989} + - {dataset: MRI-CGCM3, start_year: 1851, end_year: 1990} + - {dataset: NorESM1-M, start_year: 1, end_year: 140} + piControl: + <<: *tcr_settings + exp: piControl + additional_datasets: + - {dataset: ACCESS1-0, start_year: 300, end_year: 439} + - {dataset: bcc-csm1-1, start_year: 160, end_year: 299} + # - {dataset: bcc-csm1-1-m, start_year: 240, end_year: 379} + - {dataset: CanESM2, start_year: 2321, end_year: 2460} + - {dataset: CCSM4, start_year: 251, end_year: 390} + - {dataset: CNRM-CM5, start_year: 1850, end_year: 1989} + - {dataset: CSIRO-Mk3-6-0, start_year: 104, end_year: 243} + - {dataset: GFDL-CM3, start_year: 1, end_year: 140} + - {dataset: GFDL-ESM2G, start_year: 1, end_year: 140} + - {dataset: GFDL-ESM2M, start_year: 1, end_year: 140} + - {dataset: GISS-E2-H, start_year: 2410, end_year: 2549} + - {dataset: GISS-E2-R, start_year: 3981, end_year: 4120} + # Experiments starts at 1859-12-01 + - {dataset: HadGEM2-ES, start_year: 1860, end_year: 1999} + # - {dataset: inmcm4, start_year: 2090, end_year: 2229} + - {dataset: IPSL-CM5A-LR, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5B-LR, start_year: 1850, end_year: 1989} + - {dataset: MIROC5, start_year: 2200, end_year: 2339} + - {dataset: MIROC-ESM, start_year: 1880, end_year: 2019} + - {dataset: MPI-ESM-LR, start_year: 1880, end_year: 2019} + - {dataset: MPI-ESM-P, start_year: 1866, end_year: 2005} + - {dataset: MRI-CGCM3, start_year: 1891, end_year: 2030} + - {dataset: NorESM1-M, start_year: 700, end_year: 839} + scripts: + tcr: &tcr_script + script: climate_metrics/tcr.py + + fig09-42a_cmip5: + title: Fig. 9.42a CMIP5 (ECS vs. GMSAT) + description: Plot ECS vs. GMSAT for CMIP5 models. + themes: + - phys + realms: + - atmos + variables: + tas: + <<: *ecs_settings + additional_datasets: + - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} + - {dataset: ACCESS1-0, exp: historical, start_year: 1961, end_year: 1990} + # Wrong start year for piControl (must be 407) + - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 299} + - {dataset: bcc-csm1-1, exp: historical, start_year: 1961, end_year: 1990} + # Wrong start year for piControl (must be 344) + # - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 379} + # - {dataset: bcc-csm1-1-m, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: CanESM2, exp: piControl, start_year: 2321, end_year: 2470} + - {dataset: CanESM2, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: CCSM4, exp: piControl, start_year: 937, end_year: 1086} + - {dataset: CCSM4, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: CNRM-CM5, exp: piControl, start_year: 2250, end_year: 2399} + - {dataset: CNRM-CM5, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 81, end_year: 230} + - {dataset: CSIRO-Mk3-6-0, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: historical, start_year: 1961, end_year: 1990} + # - {dataset: GFDL-ESM2G, exp: piControl, start_year: 162, end_year: 311} + # - {dataset: GFDL-ESM2G, exp: historical, start_year: 1961, end_year: 1990} + # - {dataset: GFDL-ESM2M, exp: piControl, start_year: 162, end_year: 311} + # - {dataset: GFDL-ESM2M, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: GISS-E2-H, exp: piControl, start_year: 2410, end_year: 2559} + - {dataset: GISS-E2-H, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: GISS-E2-R, exp: piControl, start_year: 3981, end_year: 4130} + - {dataset: GISS-E2-R, exp: historical, start_year: 1961, end_year: 1990} + # - {dataset: HadGEM2-ES, exp: piControl, start_year: 1860, end_year: 2009} + # - {dataset: HadGEM2-ES, exp: historical, start_year: 1961, end_year: 1990} + # - {dataset: inmcm4, exp: piControl, start_year: 1850, end_year: 1999} + # - {dataset: inmcm4, exp: historical, start_year: 1961, end_year: 1990} + # - {dataset: IPSL-CM5A-LR, exp: piControl, start_year: 1850, end_year: 1999} + # - {dataset: IPSL-CM5A-LR, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: MIROC5, exp: piControl, start_year: 2411, end_year: 2560} + - {dataset: MIROC5, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: MIROC-ESM, exp: piControl, start_year: 1880, end_year: 2029} + - {dataset: MIROC-ESM, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: MPI-ESM-LR, exp: piControl, start_year: 1880, end_year: 2029} + - {dataset: MPI-ESM-LR, exp: historical, start_year: 1961, end_year: 1990} + # Parent experiment of historical is NOT piControl, but past1000 + - {dataset: MPI-ESM-P, exp: piControl, start_year: 2850, end_year: 2999} + - {dataset: MPI-ESM-P, exp: historical, start_year: 1961, end_year: 1990} + # - {dataset: MRI-CGCM3, exp: piControl, start_year: 1950, end_year: 2099} + # - {dataset: MRI-CGCM3, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: NorESM1-M, exp: piControl, start_year: 700, end_year: 849} + - {dataset: NorESM1-M, exp: historical, start_year: 1961, end_year: 1990} + scripts: + fig09-42a: &fig09_42a_script + script: ipcc_ar5/ch09_fig09_42a.py + ancestors: ['tas', 'ecs_cmip5/ecs'] + tas_units: celsius + save: + bbox_inches: tight + orientation: landscape + axes_functions: &axes_functions + set_title: GMSAT vs. ECS for CMIP5 models + set_xlabel: ECS / °C + set_ylabel: GMSAT / °C + set_xlim: [1.5, 6.0] + legend: + kwargs: + loc: center left + bbox_to_anchor: [1.05, 0.5] + borderaxespad: 0.0 + ncol: 2 + + fig09-42b_cmip5: + title: Fig. 9.42b CMIP5 (TCR vs. ECS) + description: Plot TCR vs. ECS for CMIP5 models. + themes: + - phys + realms: + - atmos + scripts: + fig09-42b: &fig09_42b_script + script: ipcc_ar5/ch09_fig09_42b.py + ancestors: ['ecs_cmip5/ecs', 'tcr_cmip5/tcr'] + + ecs_cmip6: + description: Calculate ECS for CMIP6 models. + themes: + - phys + realms: + - atmos + variables: + tas_rtnt: + <<: *ecs_settings + project: CMIP6 + additional_datasets: &datasets_cmip6_rtnt + # No monthly data (parent_time_units not correct) + # - {dataset: AWI-CM-1-1-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 2650, end_year: 2799, mip: day} + # - {dataset: AWI-CM-1-1-MR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999, mip: day} + - {dataset: BCC-CSM2-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-CSM2-MR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-ESM1, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-ESM1, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CAMS-CSM1-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3179} + - {dataset: CAMS-CSM1-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3179} + - {dataset: CESM2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: CESM2, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: CESM2-WACCM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CESM2-WACCM, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CNRM-CM6-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-ESM2-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-ESM2-1, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: E3SM-1-0, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 101, end_year: 250} + - {dataset: E3SM-1-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1, end_year: 150} + # No abrupt-4xCO2 + # - {dataset: EC-Earth3, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + # - {dataset: EC-Earth3, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: EC-Earth3-Veg, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: EC-Earth3-Veg, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + # Only mixed ensemble members available, parent_time_units messed up + # - {dataset: FGOALS-f3-L, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 151, end_year: 300} + # - {dataset: FGOALS-f3-L, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1, end_year: 150} + # Wrong start year for piControl (must be 101) + - {dataset: GFDL-CM4, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 151, end_year: 300} + - {dataset: GFDL-CM4, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM4, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 101, end_year: 250} + - {dataset: GFDL-ESM4, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1, end_year: 150} + - {dataset: GISS-E2-1-G, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 4150, end_year: 4299} + - {dataset: GISS-E2-1-G, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-1-H, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3180, end_year: 3329} + - {dataset: GISS-E2-1-H, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-LL, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-LL, exp: abrupt-4xCO2, ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 1999} + # Wrong start year for piControl (must be 2099), data withdrawn + # - {dataset: INM-CM5-0, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 2499, end_year: 2648} + # - {dataset: INM-CM5-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 1999} + # Not enough years for piControl run, data withdrawn + # - {dataset: INM-CM4-8, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 1947, end_year: 2096} + # - {dataset: INM-CM4-8, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM6A-LR, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1870, end_year: 2019} + - {dataset: IPSL-CM6A-LR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: MIROC6, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + - {dataset: MIROC6, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + - {dataset: MIROC-ES2L, exp: piControl, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MIROC-ES2L, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-HR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-HR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MRI-ESM2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MRI-ESM2-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + # parent_time_units not correct + - {dataset: NESM3, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 550, end_year: 699} + - {dataset: NESM3, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + # Issue #286 (manual copying was necessary) + - {dataset: SAM0-UNICON, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 274, end_year: 423} + - {dataset: SAM0-UNICON, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + tas_rtmt: + <<: *ecs_settings + project: CMIP6 + additional_datasets: &datasets_cmip6_rtmt + - {dataset: CanESM5, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 5201, end_year: 5350} + - {dataset: CanESM5, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + # No 'piControl' data available for 'rtnt' of 'UKESM1-0-LL' + # - {dataset: UKESM1-0-LL, exp: piControl, ensemble: r1i1p1f2, grid: gn, start_year: 1960, end_year: 2109} + # - {dataset: UKESM1-0-LL, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1999} + rtnt: + <<: *ecs_settings + short_name: rtnt + project: CMIP6 + derive: true + additional_datasets: *datasets_cmip6_rtnt + rtmt: + <<: *ecs_settings + short_name: rtmt + project: CMIP6 + additional_datasets: *datasets_cmip6_rtmt + scripts: + ecs: + <<: *ecs_script + + tcr_cmip6: + description: Calculate TCR for all CMIP6 models. + variables: + 1pctCO2: + <<: *tcr_settings + project: CMIP6 + additional_datasets: + # No monthly data (parent_time_units not correct) + # - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989, mip: day} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + # Model fix would be needed for BCC-ESM1 1pctCO2: + # esmvalcore.cmor.check.CMORCheckError: There were errors in variable areacello: + # There are multiple coordinates with standard_name "latitude": ['lat', 'latitude'] + # There are multiple coordinates with standard_name "longitude": ['lon', 'longitude'] + # areacello: does not match coordinate rank + # - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3169} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140, institute: NCAR} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr, start_year: 1, end_year: 140} + # No 1pctCO2 available + # - {dataset: EC-Earth3, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: EC-Earth3-Veg, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1989} + # Only mixed ensemble members available, parent_time_units messed up + # - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr1, start_year: 1, end_year: 140} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1, start_year: 1, end_year: 140} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1, start_year: 1, end_year: 140} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 1989} + # Data withdrawn + # - {dataset: INM-CM4-8, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3339} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + # parent_time_units not correct, incorrect start year for piControl + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1989} + piControl: + <<: *tcr_settings + project: CMIP6 + exp: piControl + additional_datasets: + # No monthly data (parent_time_units not correct) + # - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn, start_year: 2650, end_year: 2789, mip: day} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + # - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3169} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn, start_year: 5201, end_year: 5340} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn, start_year: 501, end_year: 640} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn, start_year: 70, end_year: 209, institute: NCAR} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr, start_year: 101, end_year: 240} + # No 1pctCO2 available + # - {dataset: EC-Earth3, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: EC-Earth3-Veg, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1989} + # Only mixed ensemble members available, parent_time_units messed up + # - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr1, start_year: 1, end_year: 140} + # Wrong start year (must be 101) + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1, start_year: 151, end_year: 290} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1, start_year: 101, end_year: 240} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn, start_year: 4150, end_year: 4289} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, start_year: 3180, end_year: 3319} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + # Data withdrawn + # - {dataset: INM-CM4-8, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 1947, end_year: 2086} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr, start_year: 1870, end_year: 2009} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3339} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + # parent_time_units not correct, incorrect start year for piControl + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn, start_year: 550, end_year: 689} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, start_year: 274, end_year: 413} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn, start_year: 1960, end_year: 2099} + scripts: + tcr: + <<: *tcr_script + + fig09-42a_cmip6: + title: Fig. 9.42a CMIP6 (ECS vs. GMSAT) + description: Plot ECS vs. GMSAT for CMIP6 models. + themes: + - phys + realms: + - atmos + variables: + tas: + <<: *ecs_settings + project: CMIP6 + additional_datasets: + - {dataset: BCC-CSM2-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-CSM2-MR, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + - {dataset: BCC-ESM1, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 2110, end_year: 2259} + - {dataset: BCC-ESM1, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + - {dataset: CAMS-CSM1-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3025, end_year: 3174} + - {dataset: CAMS-CSM1-0, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + - {dataset: CanESM5, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 5201, end_year: 5350} + - {dataset: CanESM5, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + - {dataset: CESM2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: CESM2, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + - {dataset: CESM2-WACCM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CESM2-WACCM, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990, institute: NCAR} + - {dataset: CNRM-CM6-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1, exp: historical, ensemble: r1i1p1f2, grid: gr, start_year: 1961, end_year: 1990} + - {dataset: CNRM-ESM2-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-ESM2-1, exp: historical, ensemble: r1i1p1f2, grid: gr, start_year: 1961, end_year: 1990} + - {dataset: E3SM-1-0, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 101, end_year: 250} + - {dataset: E3SM-1-0, exp: historical, ensemble: r1i1p1f1, grid: gr, start_year: 1961, end_year: 1990} + - {dataset: EC-Earth3-Veg, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 2030, end_year: 2179} + - {dataset: EC-Earth3-Veg, exp: historical, ensemble: r1i1p1f1, grid: gr, start_year: 1961, end_year: 1990} + # Wrong start year (must be 101) + - {dataset: GFDL-CM4, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 151, end_year: 300} + - {dataset: GFDL-CM4, exp: historical, ensemble: r1i1p1f1, grid: gr1, start_year: 1961, end_year: 1990} + - {dataset: GFDL-ESM4, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 101, end_year: 250} + - {dataset: GFDL-ESM4, exp: historical, ensemble: r1i1p1f1, grid: gr1, start_year: 1961, end_year: 1990} + - {dataset: GISS-E2-1-G, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 4150, end_year: 4299} + - {dataset: GISS-E2-1-G, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + - {dataset: GISS-E2-1-H, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3180, end_year: 3329} + - {dataset: GISS-E2-1-H, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + - {dataset: HadGEM3-GC31-LL, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-LL, exp: historical, ensemble: r1i1p1f3, grid: gn, start_year: 1961, end_year: 1990} + # Wrong start year for piControl (must be 2099), data withdrawn + # - {dataset: INM-CM5-0, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 2499, end_year: 2648} + # - {dataset: INM-CM5-0, exp: historical, ensemble: r1i1p1f1, grid: gr1, start_year: 1961, end_year: 1990} + # Not enough years for piControl run + # - {dataset: INM-CM4-8, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 1947, end_year: 2096} + # - {dataset: INM-CM4-8, exp: historical, ensemble: r1i1p1f1, grid: gr1, start_year: 1961, end_year: 1990} + - {dataset: IPSL-CM6A-LR, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM6A-LR, exp: historical, ensemble: r1i1p1f1, grid: gr, start_year: 1961, end_year: 1990} + - {dataset: MIROC6, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + - {dataset: MIROC6, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + - {dataset: MIROC-ES2L, exp: piControl, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MIROC-ES2L, exp: historical, ensemble: r1i1p1f2, grid: gn, start_year: 1961, end_year: 1990} + - {dataset: MPI-ESM1-2-HR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-HR, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + - {dataset: MRI-ESM2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MRI-ESM2-0, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + # parent_time_units not correct (must start in 1300) + - {dataset: NESM3, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 550, end_year: 699} + - {dataset: NESM3, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + - {dataset: SAM0-UNICON, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 274, end_year: 423} + - {dataset: SAM0-UNICON, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + # - {dataset: UKESM1-0-LL, exp: piControl, ensemble: r1i1p1f2, grid: gn, start_year: 2250, end_year: 2399} + # - {dataset: UKESM1-0-LL, exp: historical, ensemble: r1i1p1f2, grid: gn, start_year: 1961, end_year: 1990} + scripts: + fig09-42a: + <<: *fig09_42a_script + ancestors: ['tas', 'ecs_cmip6/ecs'] + axes_functions: + <<: *axes_functions + set_title: GMSAT vs. ECS for CMIP6 models + dataset_style: cmip6 + + fig09-42b_cmip6: + title: Fig. 9.42b CMIP6 (TCR vs. ECS) + description: Plot TCR vs. ECS for CMIP6 models. + themes: + - phys + realms: + - atmos + scripts: + fig09-42b: + <<: *fig09_42b_script + ancestors: ['ecs_cmip6/ecs', 'tcr_cmip6/tcr'] + dataset_style: cmip6 diff --git a/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_945a.yml b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_945a.yml new file mode 100644 index 0000000000..98a1b3365d --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_945a.yml @@ -0,0 +1,108 @@ +# ESMValTool +# recipe_flato13ipcc_figure945a.yml +--- +documentation: + title: IPCC AR5, chap. 9 diagnostics for 9.45a + + description: | + Reproducing selected Figure 9.45a from IPCC AR5, chap. 9 (Flato et al., 2013). + Data sets taken out were part of the original publication but are + currently not available on ESGF. + + authors: + - bock_lisa + - gier_bettina + - lauer_axel + - schlund_manuel + - senftleben_daniel + - zimmermann_klaus + + maintainer: + - lauer_axel + + references: + - flato13ipcc + + projects: + - embrace + - esmval + - crescendo + + +diagnostics: + + # *********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.45a + # *********************************************************************** + # Scatterplot of springtime snow-albedo effect values in climate + # change vs. springtime d(alpha_s)/d(T_s) values in the seasonal + # cycle in transient climate change experiments. + # *********************************************************************** + + fig09-45a: + title: Fig. 9.45a (snow-albedo feedback) + description: springtime snow-albedo feedback values vs. seasonal cycle + themes: + - EC + realms: + - atmos + variables: + alb: + mip: Amon + project: CMIP5 + ensemble: r1i1p1 + reference_dataset: ISCCP-FH + derive: true + additional_datasets: + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, start_year: 1984, end_year: 2000, tier: 2} + tas: + mip: Amon + project: CMIP5 + ensemble: r1i1p1 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, start_year: 1984, end_year: 2000, tier: 3} + rsdt: + mip: Amon + project: CMIP5 + ensemble: r1i1p1 + reference_dataset: ISCCP-FH + additional_datasets: + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, start_year: 1984, end_year: 2000, tier: 2} + additional_datasets: + - {dataset: bcc-csm1-1, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: bcc-csm1-1, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: CanESM2, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: CanESM2, exp: rcp45, start_year: 2101, end_year: 2200} + # - {dataset: CCSM4, exp: historical, start_year: 1901, end_year: 2000} + # - {dataset: CCSM4, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: CNRM-CM5, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: CNRM-CM5, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: CSIRO-Mk3-6-0, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2101, end_year: 2200} + # - {dataset: GFDL-CM3, exp: historical, start_year: 1901, end_year: 2000} + # - {dataset: GFDL-CM3, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: GISS-E2-H, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: GISS-E2-H, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: GISS-E2-R, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: GISS-E2-R, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: IPSL-CM5A-LR, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: IPSL-CM5A-LR, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: MIROC-ESM, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: MIROC-ESM, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: MPI-ESM-LR, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: NorESM1-M, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: NorESM1-M, exp: rcp45, start_year: 2101, end_year: 2200} + scripts: + fig09_45a: + script: emergent_constraints/snowalbedo.ncl + exp_presentday: historical + exp_future: rcp45 + legend_outside: false + xmin: -1.7 + xmax: -0.3 + ymin: -1.7 + ymax: -0.3 + styleset: "CMIP5" diff --git a/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_96.yml b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_96.yml new file mode 100644 index 0000000000..2c473b17cb --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_96.yml @@ -0,0 +1,397 @@ +# ESMValTool +# recipe_flato13ipcc_figure_96.yml +--- +documentation: + title: IPCC AR5, chap. 9 diagnostics for 9.6 + + description: | + Reproducing Fig. 9.6 from IPCC AR5, chap. 9 (Flato et al., 2013). + Processing of CMIP3 models currently works only in serial mode, due to + an issue in the input data still under investigation. CMIP3 models + are used in diagnostics for Fig. 9.6 in this recipe. + Data sets taken out were part of the original publication but are + currently not available on ESGF. + + authors: + - bock_lisa + - gier_bettina + - lauer_axel + - schlund_manuel + - senftleben_daniel + - zimmermann_klaus + + maintainer: + - lauer_axel + + references: + - flato13ipcc + + projects: + - embrace + - esmval + - crescendo + +preprocessors: + + regrid_4_5: + regrid: + target_grid: 4x5 + scheme: linear + +diagnostics: + + # *************************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.6 + # *************************************************************************** + # Centred pattern correlations between models and observations for + # the annual mean climatology over the period 1980-1999. + # *************************************************************************** + + fig09-6_tas: + title: Fig. 9.6 (near-surface temperature) + description: Calculate pattern correlation for tas + variables: + tas: + preprocessor: regrid_4_5 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 + project: CMIP5 + exp: historical + ensemble: r1i1p1 + mip: Amon + field: T2Ms + start_year: 1980 + end_year: 1999 + additional_datasets: + - &cmip3 {dataset: bccr_bcm2_0, institute: BCCR, project: CMIP3, mip: A1, modeling_realm: atm, exp: 20c3m, frequency: mo, ensemble: run1} + - {<<: *cmip3, dataset: cccma_cgcm3_1, institute: CCCMA} + - {<<: *cmip3, dataset: cccma_cgcm3_1_t63, institute: CCCMA} + - {<<: *cmip3, dataset: csiro_mk3_0, institute: CSIRO} + - {<<: *cmip3, dataset: giss_aom, institute: NASA} + - {<<: *cmip3, dataset: giss_model_e_h, institute: NASA} + - {<<: *cmip3, dataset: giss_model_e_r, institute: NASA} + - {<<: *cmip3, dataset: iap_fgoals1_0_g, institute: LASG} + - {<<: *cmip3, dataset: ingv_echam4, institute: INGV} + - {<<: *cmip3, dataset: inmcm3_0, institute: INM} + - {<<: *cmip3, dataset: ipsl_cm4, institute: IPSL} + - {<<: *cmip3, dataset: miroc3_2_hires, institute: NIES} + - {<<: *cmip3, dataset: miroc3_2_medres, institute: NIES} + - {<<: *cmip3, dataset: mpi_echam5, institute: MPIM} + - {<<: *cmip3, dataset: ncar_ccsm3_0, institute: NCAR} + - {<<: *cmip3, dataset: ncar_pcm1, institute: NCAR} + - {<<: *cmip3, dataset: ukmo_hadcm3, institute: UKMO} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + # - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + # - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + scripts: + fig09-6_pattern_cor: &fig09_6_pattern_cor + script: ipcc_ar5/ch09_fig09_6.ncl + + fig09-6_rlut: + title: Fig. 9.6 (TOA longwave radiation) + description: Calculate pattern correlation for all-sky longwave radiation + variables: + rlut: + preprocessor: regrid_4_5 + reference_dataset: CERES-EBAF + project: CMIP5 + exp: historical + ensemble: r1i1p1 + mip: Amon + field: T2Ms + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: cccma_cgcm3_1, institute: CCCMA} + - {<<: *cmip3, dataset: cccma_cgcm3_1_t63, institute: CCCMA} + - {<<: *cmip3, dataset: csiro_mk3_0, institute: CSIRO} + - {<<: *cmip3, dataset: giss_aom, institute: NASA} + - {<<: *cmip3, dataset: giss_model_e_h, institute: NASA} + - {<<: *cmip3, dataset: giss_model_e_r, institute: NASA} + - {<<: *cmip3, dataset: iap_fgoals1_0_g, institute: LASG} + - {<<: *cmip3, dataset: ingv_echam4, institute: INGV} + - {<<: *cmip3, dataset: inmcm3_0, institute: INM} + - {<<: *cmip3, dataset: ipsl_cm4, institute: IPSL} + - {<<: *cmip3, dataset: miroc3_2_hires, institute: NIES} + - {<<: *cmip3, dataset: miroc3_2_medres, institute: NIES} + - {<<: *cmip3, dataset: mpi_echam5, institute: MPIM} + - {<<: *cmip3, dataset: ncar_ccsm3_0, institute: NCAR} + - {<<: *cmip3, dataset: ncar_pcm1, institute: NCAR} + - {<<: *cmip3, dataset: ukmo_hadcm3, institute: UKMO} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + # - {dataset: bcc-csm1-1} + # - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + # - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1, start_year: 2003, end_year: 2011} + scripts: + fig09-6_pattern_cor: + <<: *fig09_6_pattern_cor + + fig09-6_pr: + title: Fig. 9.6 (precipitation) + description: Calculate pattern correlation for precipitation + variables: + pr: + preprocessor: regrid_4_5 + reference_dataset: GPCP-SG + alternative_dataset: GHCN + project: CMIP5 + exp: historical + ensemble: r1i1p1 + mip: Amon + field: T2Ms + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: cccma_cgcm3_1, institute: CCCMA} + - {<<: *cmip3, dataset: cccma_cgcm3_1_t63, institute: CCCMA} + - {<<: *cmip3, dataset: csiro_mk3_0, institute: CSIRO} + - {<<: *cmip3, dataset: giss_aom, institute: NASA} + - {<<: *cmip3, dataset: giss_model_e_h, institute: NASA} + - {<<: *cmip3, dataset: giss_model_e_r, institute: NASA} + - {<<: *cmip3, dataset: iap_fgoals1_0_g, institute: LASG} + - {<<: *cmip3, dataset: ingv_echam4, institute: INGV} + - {<<: *cmip3, dataset: inmcm3_0, institute: INM} + - {<<: *cmip3, dataset: ipsl_cm4, institute: IPSL} + - {<<: *cmip3, dataset: miroc3_2_hires, institute: NIES} + - {<<: *cmip3, dataset: miroc3_2_medres, institute: NIES} + - {<<: *cmip3, dataset: mpi_echam5, institute: MPIM} + - {<<: *cmip3, dataset: ncar_ccsm3_0, institute: NCAR} + - {<<: *cmip3, dataset: ncar_pcm1, institute: NCAR} + - {<<: *cmip3, dataset: ukmo_hadcm3, institute: UKMO} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + # - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + # - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + + - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.2, tier: 1} + - {dataset: GHCN, project: OBS, type: ground, version: 1, tier: 2} + scripts: + fig09-6_pattern_cor: + <<: *fig09_6_pattern_cor + + fig09-6_swcre: + title: Fig. 9.6 (shortwave CRE) + description: Calculate pattern correlation for shortwave cloud radiative effect + variables: + swcre: + preprocessor: regrid_4_5 + reference_dataset: CERES-EBAF + project: CMIP5 + exp: historical + ensemble: r1i1p1 + mip: Amon + field: T2Ms + start_year: 1980 + end_year: 1999 + derive: true + force_derivation: false + additional_datasets: + - {<<: *cmip3, dataset: cccma_cgcm3_1, institute: CCCMA} + - {<<: *cmip3, dataset: csiro_mk3_0, institute: CSIRO} + - {<<: *cmip3, dataset: giss_model_e_h, institute: NASA} + - {<<: *cmip3, dataset: giss_model_e_r, institute: NASA} + - {<<: *cmip3, dataset: iap_fgoals1_0_g, institute: LASG} + - {<<: *cmip3, dataset: inmcm3_0, institute: INM} + - {<<: *cmip3, dataset: ipsl_cm4, institute: IPSL} + - {<<: *cmip3, dataset: miroc3_2_hires, institute: NIES} + - {<<: *cmip3, dataset: miroc3_2_medres, institute: NIES} + - {<<: *cmip3, dataset: mpi_echam5, institute: MPIM} + - {<<: *cmip3, dataset: ncar_ccsm3_0, institute: NCAR} + - {<<: *cmip3, dataset: ncar_pcm1, institute: NCAR} + - {<<: *cmip3, dataset: ukmo_hadcm3, institute: UKMO} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + # - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + # - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1, start_year: 2003, end_year: 2011} + scripts: + fig09-6_pattern_cor: + <<: *fig09_6_pattern_cor + + ### COLLECT CORRELATIONS AND PLOT ########################################### + fig09-6_cor_collect: + description: Wrapper to collect and plot previously calculated correlations + scripts: + fig09-6_cor_collect: + script: ipcc_ar5/ch09_fig09_6_collect.ncl + ancestors: ['*/fig09-6_pattern_cor'] + diag_order: ['fig09-6_tas', 'fig09-6_rlut', 'fig09-6_pr'] diff --git a/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_98.yml b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_98.yml new file mode 100644 index 0000000000..7390d12d36 --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figure_98.yml @@ -0,0 +1,151 @@ +# ESMValTool +# recipe_flato13ipcc_figure_98.yml +--- +documentation: + title: IPCC AR5, chap. 9 diagnostics for 9.8 + + description: | + Reproducing selected Fig. 9.8 from IPCC AR5, chap. 9 (Flato et al., 2013). + Data sets taken out were part of the original publication but are + currently not available on ESGF. + + authors: + - bock_lisa + - gier_bettina + - lauer_axel + - schlund_manuel + - senftleben_daniel + - zimmermann_klaus + + maintainer: + - lauer_axel + + references: + - flato13ipcc + + projects: + - embrace + - esmval + - crescendo + + +preprocessors: + + clim_ref: + regrid: + target_grid: reference_dataset + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + +diagnostics: + + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.8 + # ********************************************************************** + # Time series of anomalies of annual and global surface temperature + # ********************************************************************** + + fig09-8: + title: Fig. 9.8 (near-surface temperature) + description: IPCC AR5 Ch. 9, Fig. 9.8 (near-surface temperature) + themes: + - phys + realms: + - atmos + variables: + tas: + preprocessor: clim_ref + reference_dataset: HadCRUT4 + mip: Amon + project: CMIP5 + exp: [historical, rcp45] + ensemble: r1i1p1 + start_year: 1870 + end_year: 2017 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + # Historical data end year is 2012, not 2005 (overlaps with RCP4.5) + # - {dataset: bcc-csm1-1} + # Historical data end year is 2012, not 2005 (overlaps with RCP4.5) + # - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + # - {dataset: CanCM4} + # - {dataset: CanESM2} + # rcp starts with year 2005 - not 2006 + # - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + # - {dataset: CESM1-CAM5-1-FV2} + # no rcp45 at DKRZ + # - {dataset: CESM1-FASTCHEM} + # no historival and rcp45 from the same exp + # - {dataset: CESM1-WACCM} + # no rcp45 at DKRZ + # - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + # no rcp45 at DKRZ + # - {dataset: CNRM-CM5} + # no rcp45 at DKRZ + # - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + # - {dataset: EC-EARTH, ensemble: r6i1p1} + # - {dataset: FGOALS-g2} + # - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + # Historical data end year is 2040, not 2005 (overlaps with RCP4.5) + # - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + # Historical data end year is 2010, not 2005 (overlaps with RCP4.5) + # - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + # Historical data end year is 2010, not 2005 (overlaps with RCP4.5) + # - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + # dataset ends November 2005 + # - {dataset: HadGEM2-CC} + # dataset ends November 2005 + # - {dataset: HadGEM2-ES} + # - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC4h} + # Historical data end year is 2012, not 2005 (overlaps with RCP4.5) + # - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + # no rcp45 at DKRZ + # - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + # no rcp45 at DKRZ + # - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: HadCRUT4, project: OBS, type: ground, version: 1, + start_year: 1870, end_year: 2017, tier: 2} + scripts: + fig09-8: + script: ipcc_ar5/tsline.ncl + time_avg: "yearly" + ts_anomaly: "anom" + ref_start: 1961 + ref_end: 1990 + ref_mask: True + plot_units: "degC" + y_min: -1.2 + y_max: 1.5 + volcanoes: True + styleset: CMIP5 # Plot style diff --git a/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_926_927.yml b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_926_927.yml new file mode 100644 index 0000000000..a44bca6644 --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_926_927.yml @@ -0,0 +1,186 @@ +# ESMValTool +# recipe_flato13ipcc_figures_926_927.yml +--- +documentation: + title: IPCC AR5, chap. 9 diagnostics for 9.26 and 9.27 + + description: | + Reproducing selected figures from IPCC AR5, chap. 9 (Flato et al., 2013) + 9.26, 9.27. + Data sets taken out were part of the original publication but are + currently not available on ESGF. + + authors: + - bock_lisa + - gier_bettina + - lauer_axel + - schlund_manuel + - senftleben_daniel + - zimmermann_klaus + + maintainer: + - lauer_axel + + references: + - flato13ipcc + + projects: + - embrace + - esmval + - crescendo + + +preprocessors: + + land_fraction_weighting: + weighting_landsea_fraction: &weighting_options + area_type: land + exclude: [ + # 'bcc-csm1-1-m', + 'GCP2018', + # 'inmcm4', + 'JMA-TRANSCOM', + ] + + sea_fraction_weighting: + weighting_landsea_fraction: + <<: *weighting_options + area_type: sea + regrid: + target_grid: 2x2 + scheme: linear + + +diagnostics: + + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.26 + # ********************************************************************** + # Ensemble-mean global ocean carbon uptake (top) and global land + # carbon uptake (bottom) in the CMIP5 ESMs for the historical period + # 1901-2005. + # ********************************************************************** + + fig09-26top: + title: Fig. 9.26a (carbon uptake) + variables: + fgco2: &fgco2_settings + preprocessor: sea_fraction_weighting + project: CMIP5 + mip: Omon + exp: historical + ensemble: r1i1p1 + start_year: 1901 + end_year: 2005 + reference_dataset: JMA-TRANSCOM + plot_units: PgC y-1 + additional_datasets: + - {dataset: JMA-TRANSCOM, project: OBS, type: reanaly, version: 2018, tier: 3, start_year: 1986, end_year: 2005} + # - {dataset: CanESM2} + # - {dataset: CESM1-BGC} + # - {dataset: GFDL-ESM2G} + # - {dataset: GFDL-ESM2M} + # - {dataset: HadGEM2-CC} + # - {dataset: HadGEM2-ES} + # - {dataset: inmcm4} + # - {dataset: IPSL-CM5A-LR} + # - {dataset: IPSL-CM5A-MR} + # - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM, supplementary_variables: [{short_name: sftlf, mip: fx, ensemble: r0i0p0}, {short_name: sftof, skip: true}]} + - {dataset: MIROC-ESM-CHEM, supplementary_variables: [{short_name: sftlf, mip: fx, ensemble: r0i0p0}, {short_name: sftof, skip: true}]} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: NorESM1-ME} + fgco2_GCP2018: + <<: *fgco2_settings + preprocessor: default + short_name: fgco2 + additional_datasets: + - {dataset: GCP2018, project: OBS, type: reanaly, version: '1.0', tier: 2, start_year: 1959, end_year: 2005, frequency: yr} + scripts: + main_global: &fig09_26_settings + script: carbon_cycle/main.ncl + styleset: CMIP5 + region: global + legend_year_outside: false + legend_outside: false + sort: true + anav_month: true + seasonal_cycle_plot: false + errorbar_plot: false + mean_IAV_plot: false + evolution_plot: true + evolution_plot_volcanoes: false + evolution_plot_anomaly: false + evolution_plot_ref_dataset: GCP2018 + + fig09-26bottom: + title: Fig. 9.26b (carbon uptake) + variables: + nbp: + <<: *fgco2_settings + preprocessor: land_fraction_weighting + mip: Lmon + additional_datasets: &nbp_datasets + - {dataset: GCP2018, project: OBS, type: reanaly, version: '1.0', tier: 2, start_year: 1959, end_year: 2005, frequency: yr} + - {dataset: JMA-TRANSCOM, project: OBS, type: reanaly, version: 2018, tier: 3, start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + # - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: NorESM1-ME} + scripts: + main_global: + <<: *fig09_26_settings + + + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.27 + # ********************************************************************** + # Simulation of global mean (a) atmosphere-ocean CO2 fluxes (fgCO2) + # and (b) net atmosphere-land CO2 fluxes (NBP), by ESMs for the + # period 1986-2005. + # ********************************************************************** + + fig09-27top: + title: Fig. 9.27a (CO2 fluxes) + variables: + fgco2: + <<: *fgco2_settings + start_year: 1986 + fgco2_GCP2018: + <<: *fgco2_settings + preprocessor: default + short_name: fgco2 + additional_datasets: + - {dataset: GCP2018, project: OBS, type: reanaly, version: '1.0', tier: 2, start_year: 1986, end_year: 2005, frequency: yr} + scripts: + main_global: &fig09_27_settings + <<: *fig09_26_settings + errorbar_plot: true + evolution_plot: false + + fig09-27bottom: + variables: + nbp: + <<: *fgco2_settings + preprocessor: default + mip: Lmon + start_year: 1986 + additional_datasets: *nbp_datasets + scripts: + main_global: + <<: *fig09_27_settings diff --git a/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_92_95.yml b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_92_95.yml new file mode 100644 index 0000000000..80286e716a --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_92_95.yml @@ -0,0 +1,688 @@ +# ESMValTool +# recipe_flato13ipcc_figures_92_95.yml +--- +documentation: + title: IPCC AR5, chap. 9 diagnostics 9.2-9.5 + + description: | + Reproducing selected figures from IPCC AR5, chap. 9 (Flato et al., 2013) + 9.2, 9.3, 9.4, 9.5. + Data sets taken out were part of the original publication but are + currently not available on ESGF. + + authors: + - bock_lisa + - gier_bettina + - lauer_axel + - schlund_manuel + - senftleben_daniel + - zimmermann_klaus + + maintainer: + - lauer_axel + + references: + - flato13ipcc + + projects: + - embrace + - esmval + - crescendo + + +preprocessors: + + clim: + regrid: + target_grid: 2x2 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + clim_ref_9-3: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + convert_units: + units: degC + +diagnostics: + + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.2 a/b/c + # ********************************************************************** + # Multi model mean, multi model mean bias, and mean absolute error + # (geographical distributions) + # ********************************************************************** + + fig09-2: + title: Fig. 9.2 (near-surface temperature) + description: IPCC AR5 Ch. 9, Fig. 9.2 (near-surface temperature) + themes: + - phys + realms: + - atmos + variables: + tas: + preprocessor: clim + reference_dataset: ERA-Interim + mip: Amon + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, + start_year: 1986, end_year: 2005, tier: 3} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + scripts: + fig09-2: &fig-9-2_and_9-4_settings + script: clouds/clouds_bias.ncl + projection: Robinson + timemean: annualclim + plot_abs_diff: true + plot_rel_diff: false + + # *************************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.3 + # *************************************************************************** + # Multi model mean seasonality, mean absolute seasonality, mean bias + # in seasonality, and mean bias in absolute seasonality + # *************************************************************************** + + fig09-3: + title: Fig. 9.3 (near-surface temperature) + description: Calculate seasonality of multi-model mean for tas + themes: + - phys + realms: + - atmos + variables: + tas: + preprocessor: clim_ref_9-3 + reference_dataset: ERA-Interim + project: CMIP5 + exp: historical + ensemble: r1i1p1 + mip: Amon + start_year: 1980 + end_year: 2005 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + # - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + # - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + fig09-3: + script: ipcc_ar5/ch09_fig09_3.ncl + + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.4 + # ********************************************************************** + # Multi model mean, multi model mean bias, mean absolute error, and + # mean relative error (geographical ditributions) + # ********************************************************************** + + fig09-4: + title: Fig. 9.4 (precipitation) + description: IPCC AR5 Ch. 9, Fig. 9.4 (precipitation) + themes: + - clouds + realms: + - atmos + variables: + pr: + preprocessor: clim + reference_dataset: GPCP-V2.2 + mip: Amon + additional_datasets: + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, + start_year: 1986, end_year: 2005, tier: 1} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + scripts: + fig09-4: + <<: *fig-9-2_and_9-4_settings + plot_abs_diff: true + plot_rel_diff: true + + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.5 + # ********************************************************************** + # Difference of multi-model mean and reference data set (geographical + # distribution, annual mean) + zonal averages of individual models + # and multi-model mean (annual means). + # ********************************************************************** + + fig09-5a: + title: Fig. 9.5a (shortwave CRE) + description: differences of multi-model mean and reference dataset + themes: + - clouds + realms: + - atmos + variables: + swcre: + preprocessor: clim + reference_dataset: CERES-EBAF + mip: Amon + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, + start_year: 2001, end_year: 2010, tier: 1} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + scripts: + fig09-5a: &fig-9-5_settings + script: clouds/clouds_ipcc.ncl + projection: Robinson + colormap: WhiteBlueGreenYellowRed + timemean: annualclim + + fig09-5b: + title: Fig. 9.5b (longwave CRE) + description: differences of multi-model mean and reference dataset + themes: + - clouds + realms: + - atmos + variables: + lwcre: + preprocessor: clim + reference_dataset: CERES-EBAF + mip: Amon + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, + start_year: 2001, end_year: 2010, tier: 1} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + scripts: + fig09-5b: + <<: *fig-9-5_settings + + fig09-5c: + title: Fig. 9.5c (net CRE) + description: differences of multi-model mean and reference dataset + themes: + - clouds + realms: + - atmos + variables: + netcre: + preprocessor: clim + reference_dataset: CERES-EBAF + mip: Amon + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, + start_year: 2001, end_year: 2010, tier: 1} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + scripts: + fig09-5c: + <<: *fig-9-5_settings + diff --git a/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_938_941_cmip3.yml b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_938_941_cmip3.yml new file mode 100644 index 0000000000..67c709a318 --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_938_941_cmip3.yml @@ -0,0 +1,365 @@ +# ESMValTool +# recipe_flato13ipcc_figures_938_941_cmip3.yml +--- +documentation: + title: IPCC AR5, chap. 9 diagnostics to reproduce regional figures 9.38-9.41 + description: | + Reproducing selected figures from IPCC AR5, chap. 9 (Flato et al., 2013) + 9.38, 9.39, 9.40, 9.41. + Differences to the IPCC version: + - No CMIP3 A1B runs included, therefore the time series stops in 1999 + - CRU data version 4.02 instead of 3.10 + - Not exactly the same CMIP3 and CMIP5 models and version. + Processing of CMIP3 models currently works only in serial mode, due to + an issue in the input data still under investigation. + Data sets comented out are currently not available on DKRZ. + + authors: + - cionni_irene + + maintainer: + - weigel_katja + + references: + - flato13ipcc + - seneviratne12ipcc + + projects: + - crescendo + +datasets: + - &cmip3 {dataset: bccr_bcm2_0, institute: BCCR, project: CMIP3, mip: A1, modeling_realm: atm, exp: 20c3m, frequency: mo, ensemble: run1} + - {<<: *cmip3, dataset: cccma_cgcm3_1, institute: CCCMA} + - {<<: *cmip3, dataset: cccma_cgcm3_1_t63, institute: CCCMA} + - {<<: *cmip3, dataset: csiro_mk3_0, institute: CSIRO} + - {<<: *cmip3, dataset: giss_aom, institute: NASA} + - {<<: *cmip3, dataset: giss_model_e_h, institute: NASA} + - {<<: *cmip3, dataset: giss_model_e_r, institute: NASA} + - {<<: *cmip3, dataset: iap_fgoals1_0_g, institute: LASG} + - {<<: *cmip3, dataset: ingv_echam4, institute: INGV} + - {<<: *cmip3, dataset: inmcm3_0, institute: INM} + - {<<: *cmip3, dataset: ipsl_cm4, institute: IPSL} + - {<<: *cmip3, dataset: miroc3_2_hires, institute: NIES} + - {<<: *cmip3, dataset: miroc3_2_medres, institute: NIES} + - {<<: *cmip3, dataset: mpi_echam5, institute: MPIM} + - {<<: *cmip3, dataset: ncar_ccsm3_0, institute: NCAR} + - {<<: *cmip3, dataset: ncar_pcm1, institute: NCAR} + - {<<: *cmip3, dataset: ukmo_hadcm3, institute: UKMO} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1} + # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1} + # - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} + # - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1} + # - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1} + + +preprocessors: + annual_cycle: + regrid: + target_grid: reference_dataset + scheme: linear + climate_statistics: + operator: mean + period: month + mask_landsea: + mask_out: sea + mask_fillvalues: + threshold_fraction: 0.95 + annual_cycle_pr: + regrid: + target_grid: reference_dataset + scheme: linear + climate_statistics: + operator: mean + period: month + mask_landsea: + mask_out: sea + mask_fillvalues: + threshold_fraction: 0.66 + + clima_nomask: + regrid: + target_grid: reference_dataset + scheme: linear + climate_statistics: + operator: mean + period: month + mask_fillvalues: + threshold_fraction: 0.95 + clima_nomask_pr: + regrid: + target_grid: reference_dataset + scheme: linear + climate_statistics: + operator: mean + period: month + mask_fillvalues: + threshold_fraction: 0.66 + regr: + regrid: + target_grid: reference_dataset + scheme: linear + +diagnostics: + + # ========================================================================== + # Chapter 9 - Figure938 + # ========================================================================== + regional_downscaling_Fig938tas: + description: Creates annual cycle regional plots for individual models + and mumltimodel over the selected project&experiemnt&mip + themes: + - chem + realms: + - atmos + variables: + tas: + preprocessor: annual_cycle + reference_dataset: ERA-Interim + mip: Amon + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + scripts: + clim: &Fig938_settings + script: regional_downscaling/Figure9.38.ncl + styleset: CMIP356 + fig938_region_label: ["WNA", "ENA", "CAM", "TSA", "SSA", "EUM", "NAF", + "CAF", "SAF", "NAS", "CAS", "EAS", "SAS", "SEA", + "AUS"] + fig938_project_MMM: ["CMIP5", "CMIP3"] + fig938_experiment_MMM: ["historical", "20c3m"] + fig938_mip_MMM: ["Amon", "A1"] + fig938_colors_MMM: ["red", "blue"] + fig938_refModel: ERA-Interim + fig938_MMM: true + fig938_diff: false + regional_downscaling_Fig938pr: + description: Creates annual cycle regional plots for individual models + and mumltimodel over the selected project&experiemnt&mip + themes: + - chem + realms: + - atmos + variables: + pr: + preprocessor: annual_cycle_pr + reference_dataset: ERA-Interim + mip: Amon + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + scripts: + clim: + <<: *Fig938_settings + + # ========================================================================== + # Chapter 9 - Figure939 + # ========================================================================== + regional_downscaling_Fig939tas: + description: Creates seasonal or annual bias box plots at selected + regions over the selected project&experiemnt&mip over + the selected project&experiemnt&mip + themes: + - chem + realms: + - atmos + variables: + tas: + preprocessor: annual_cycle + reference_dataset: CRU + mip: Amon + ensemble: r1i1p1 + start_year: 1986 + end_year: 1999 + additional_datasets: + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + + scripts: + clim: &Fig939_settings + script: regional_downscaling/Figure9.39.ncl + styleset: CMIP356 + fig939_season: ["DJF", "JJA", "ANN"] + fig939_region_label: ["ALAs", "CGIs", "WNAs", "CNAs", "ENAs", "CAMs", + "AMZs", "NEBs", "WSAs", "SSAs", "NEUs", "CEUs", + "MEDs", "SAHs", "WAFs", "EAFs", "SAFs", "NASs", + "WASs", "CASs", "TIBs", "EASs", "SASs", "SEAs", + "NAUs", "SAUs"] + fig939_project_MMM: ["CMIP5", "CMIP3"] + fig939_experiment_MMM: ["historical", "20c3m"] + fig939_mip_MMM: ["Amon", "A1"] + fig939_MMM: true + fig939_YMin: -11 + fig939_YMax: 8 + + regional_downscaling_Fig939pr: + description: Creates seasonal or annual bias box plots at selected + regions over the selected project&experiemnt&mip over + the selected project&experiemnt&mip + themes: + - chem + realms: + - atmos + variables: + pr: + preprocessor: annual_cycle_pr + reference_dataset: CRU + mip: Amon + ensemble: r1i1p1 + start_year: 1986 + end_year: 1999 + additional_datasets: + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + + scripts: + clim: + script: regional_downscaling/Figure9.39.ncl + styleset: CMIP356 + fig939_season: ["ONDJFM", "AMJJAS", "ANN"] + fig939_region_label: ["ALAs", "CGIs", "WNAs", "CNAs", "ENAs", "CAMs", + "AMZs", "NEBs", "WSAs", "SSAs", "NEUs", "CEUs", + "MEDs", "SAHs", "WAFs", "EAFs", "SAFs", "NASs", + "WASs", "CASs", "TIBs", "EASs", "SASs", "SEAs", + "NAUs", "SAUs"] + fig939_project_MMM: ["CMIP5", "CMIP3"] + fig939_experiment_MMM: ["historical", "20c3m"] + fig939_mip_MMM: ["Amon", "A1"] + fig939_MMM: true + fig939_mode: true + fig939_YMin: -100 + fig939_YMax: 300 + + + # ========================================================================== + # Chapter 9 - Figure940 + # ========================================================================== + regional_downscaling_Fig940tas: + description: Creates annual cycle regional plots for individual models + and mumltimodel over the selected project&experiemnt&mip. + themes: + - chem + realms: + - atmos + variables: + tas: + preprocessor: clima_nomask + reference_dataset: CRU + mip: Amon + ensemble: r1i1p1 + start_year: 1986 + end_year: 1999 + additional_datasets: + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + scripts: + clim: &Fig940_settings + script: regional_downscaling/Figure9.40.ncl + styleset: CMIP356 + fig940_season: ["DJF", "JJA", "ANN"] + fig940_region_label: ["Arctic_land", "Arctic_sea", "Antarctic_land", + "Antarctic_sea", "Caribbean", + "WesternIndianOcean", + "NorthernIndianOcean", "NorthernTropicalPacific", + "EquatorialTropicalPacific", + "SouthernTropicalPacific", "World_land", + "World_sea", "World"] + fig940_project_MMM: ["CMIP5", "CMIP3"] + fig940_experiment_MMM: ["historical", "20c3m"] + fig940_mip_MMM: ["Amon", "A1"] + fig940_MMM: true + fig940_YMin: -12 + fig940_YMax: 6 + + regional_downscaling_Fig940pr: + description: Creates annual cycle regional plots for individual models + and mumltimodel over the selected project&experiemnt&mip. + themes: + - chem + realms: + - atmos + variables: + pr: + preprocessor: clima_nomask_pr + reference_dataset: CRU + mip: Amon + ensemble: r1i1p1 + start_year: 1986 + end_year: 1999 + additional_datasets: + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + scripts: + clim: + <<: *Fig940_settings + fig940_mode: true + fig940_season: ["ONDJFM", "AMJJAS", "ANN"] + fig940_YMin: -100 + fig940_YMax: 150 + + + + # ========================================================================== + # Chapter 9 - Figure941 + # ========================================================================== + regional_downscaling_Fig941: + description: + themes: + - chem + realms: + - atmos + variables: + tas: + preprocessor: regr + reference_dataset: CRU + mip: Amon + ensemble: r1i1p1 + start_year: 1961 + end_year: 1999 + additional_datasets: + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + scripts: + clim: &Fig941_settings + script: regional_downscaling/Figure9.41.ncl + fig941_region_label: ["MEDs"] + styleset: CMIP356 diff --git a/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_938_941_cmip6.yml b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_938_941_cmip6.yml new file mode 100644 index 0000000000..e67625b257 --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_938_941_cmip6.yml @@ -0,0 +1,365 @@ +# ESMValTool +# recipe_flato13ipcc_figures_938_941_cmip6.yml +--- +documentation: + title: IPCC AR5, chap. 9 diagnostics to reproduce regional figures 9.38-9.41 + unsing CMIP6 data + description: | + Reproducing selected figures from IPCC AR5, chap. 9 (Flato et al., 2013) + 9.38, 9.39, 9.40, 9.41. + Differences to the IPCC version: + - CMIP6 instead of CMIP3 data + - CRU data version 4.02 instead of 3.10 + - Not exactly the same CMIP5 models and version. + + authors: + - cionni_irene + + maintainer: + - weigel_katja + + references: + - flato13ipcc + - seneviratne12ipcc + + projects: + - crescendo + +datasets: + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: BCC-CSM2-MR, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, project: CMIP6, exp: historical, ensemble: r1i1p1f2, grid: gr} + - {dataset: E3SM-1-0, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gr} + - {dataset: GFDL-CM4, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GFDL-ESM4, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-G, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, project: CMIP6, exp: historical, ensemble: r1i1p1f3, grid: gn} + - {dataset: IPSL-CM6A-LR, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, project: CMIP6, exp: historical, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM1-2-HR, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + + +preprocessors: + annual_cycle: + regrid: + target_grid: reference_dataset + scheme: linear + climate_statistics: + operator: mean + period: month + mask_landsea: + mask_out: sea + mask_fillvalues: + threshold_fraction: 0.95 + annual_cycle_pr: + regrid: + target_grid: reference_dataset + scheme: linear + climate_statistics: + operator: mean + period: month + mask_landsea: + mask_out: sea + mask_fillvalues: + threshold_fraction: 0.66 + + clima_nomask: + regrid: + target_grid: reference_dataset + scheme: linear + climate_statistics: + operator: mean + period: month + mask_fillvalues: + threshold_fraction: 0.95 + clima_nomask_pr: + regrid: + target_grid: reference_dataset + scheme: linear + climate_statistics: + operator: mean + period: month + mask_fillvalues: + threshold_fraction: 0.66 + regr: + regrid: + target_grid: reference_dataset + scheme: linear + +diagnostics: + + # ========================================================================== + # Chapter 9 - Figure938 + # ========================================================================== + regional_downscaling_Fig938tas: + description: Creates annual cycle regional plots for individual models + and mumltimodel over the selected project&experiemnt&mip + themes: + - chem + realms: + - atmos + variables: + tas: + preprocessor: annual_cycle + reference_dataset: ERA-Interim + mip: Amon + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + scripts: + clim: &Fig938_settings + script: regional_downscaling/Figure9.38.ncl + styleset: CMIP356 + fig938_region_label: ["WNA", "ENA", "CAM", "TSA", "SSA", "EUM", "NAF", + "CAF", "SAF", "NAS", "CAS", "EAS", "SAS", "SEA", + "AUS"] + fig938_project_MMM: ["CMIP5", "CMIP6"] + fig938_experiment_MMM: ["historical", "historical"] + fig938_mip_MMM: ["Amon", "Amon"] + fig938_colors_MMM: ["red", "blue"] + fig938_refModel: ERA-Interim + fig938_MMM: true + fig938_diff: false + + + regional_downscaling_Fig938pr: + description: Creates annual cycle regional plots for individual models + and mumltimodel over the selected project&experiemnt&mip + themes: + - chem + realms: + - atmos + variables: + pr: + preprocessor: annual_cycle_pr + reference_dataset: ERA-Interim + mip: Amon + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + scripts: + clim: + <<: *Fig938_settings + + # ========================================================================== + # Chapter 9 - Figure939 + # ========================================================================== + regional_downscaling_Fig939tas: + description: Creates seasonal or annual bias box plots at selected + regions over the selected project&experiemnt&mip over + the selected project&experiemnt&mip + themes: + - chem + realms: + - atmos + variables: + tas: + preprocessor: annual_cycle + reference_dataset: CRU + mip: Amon + ensemble: r1i1p1 + start_year: 1980 + end_year: 2005 + additional_datasets: + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + + scripts: + clim: &Fig939_settings + script: regional_downscaling/Figure9.39.ncl + styleset: CMIP356 + fig939_season: ["DJF", "JJA", "ANN"] + fig939_region_label: ["ALAs", "CGIs", "WNAs", "CNAs", "ENAs", "CAMs", + "AMZs", "NEBs", "WSAs", "SSAs", "NEUs", "CEUs", + "MEDs", "SAHs", "WAFs", "EAFs", "SAFs", "NASs", + "WASs", "CASs", "TIBs", "EASs", "SASs", "SEAs", + "NAUs", "SAUs"] + fig939_project_MMM: ["CMIP5", "CMIP6"] + fig939_experiment_MMM: ["historical", "historical"] + fig939_mip_MMM: ["Amon", "Amon"] + fig939_MMM: true + fig939_YMin: -11 + fig939_YMax: 8 + + regional_downscaling_Fig939pr: + description: Creates seasonal or annual bias box plots at selected + regions over the selected project&experiemnt&mip over + the selected project&experiemnt&mip + themes: + - chem + realms: + - atmos + variables: + pr: + preprocessor: annual_cycle_pr + reference_dataset: CRU + mip: Amon + ensemble: r1i1p1 + start_year: 1980 + end_year: 2005 + additional_datasets: + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + + scripts: + clim: + script: regional_downscaling/Figure9.39.ncl + styleset: CMIP356 + fig939_season: ["ONDJFM", "AMJJAS", "ANN"] + fig939_region_label: ["ALAs", "CGIs", "WNAs", "CNAs", "ENAs", "CAMs", + "AMZs", "NEBs", "WSAs", "SSAs", "NEUs", "CEUs", + "MEDs", "SAHs", "WAFs", "EAFs", "SAFs", "NASs", + "WASs", "CASs", "TIBs", "EASs", "SASs", "SEAs", + "NAUs", "SAUs"] + fig939_project_MMM: ["CMIP5", "CMIP6"] + fig939_experiment_MMM: ["historical", "historical"] + fig939_mip_MMM: ["Amon", "Amon"] + fig939_MMM: true + fig939_mode: true + fig939_YMin: -100 + fig939_YMax: 300 + + # ========================================================================== + # Chapter 9 - Figure940 + # ========================================================================== + regional_downscaling_Fig940tas: + description: Creates annual cycle regional plots for individual models + and mumltimodel over the selected project&experiemnt&mip. + themes: + - chem + realms: + - atmos + variables: + tas: + preprocessor: clima_nomask + reference_dataset: ERA-Interim + mip: Amon + ensemble: r1i1p1 + start_year: 1980 + end_year: 2005 + additional_datasets: + # - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + clim: &Fig940_settings + script: regional_downscaling/Figure9.40.ncl + styleset: CMIP356 + fig940_season: ["DJF", "JJA", "ANN"] + fig940_region_label: ["Arctic_land", "Arctic_sea", "Antarctic_land", + "Antarctic_sea", "Caribbean", + "WesternIndianOcean", + "NorthernIndianOcean", "NorthernTropicalPacific", + "EquatorialTropicalPacific", + "SouthernTropicalPacific", "World_land", + "World_sea", "World"] + fig940_project_MMM: ["CMIP5", "CMIP6"] + fig940_experiment_MMM: ["historical", "historical"] + fig940_mip_MMM: ["Amon", "Amon"] + fig940_MMM: true + fig940_YMin: -12 + fig940_YMax: 6 + + regional_downscaling_Fig940pr: + description: Creates annual cycle regional plots for individual models + and mumltimodel over the selected project&experiemnt&mip. + themes: + - chem + realms: + - atmos + variables: + pr: + preprocessor: clima_nomask_pr + reference_dataset: GPCP-V2.2 + mip: Amon + ensemble: r1i1p1 + start_year: 1980 + end_year: 2005 + additional_datasets: + - {dataset: GPCP-V2.2, project: obs4MIPs, tier: 1} + # - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + scripts: + clim: + <<: *Fig940_settings + fig940_mode: true + fig940_season: ["ONDJFM", "AMJJAS", "ANN"] + fig940_YMin: -100 + fig940_YMax: 150 + + + # ========================================================================== + # Chapter 9 - Figure941 + # ========================================================================== + regional_downscaling_Fig941: + description: + themes: + - chem + realms: + - atmos + variables: + tas: + preprocessor: regr + reference_dataset: CRU + mip: Amon + ensemble: r1i1p1 + start_year: 1961 + end_year: 2000 + additional_datasets: + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + scripts: + clim: &Fig941_settings + script: regional_downscaling/Figure9.41.ncl + fig941_region_label: ["MEDs"] + styleset: CMIP356 diff --git a/esmvaltool/recipes/ipccwg1ar5ch9/recipe_weigel21gmd_figures_13_16.yml b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_weigel21gmd_figures_13_16.yml new file mode 100644 index 0000000000..af6a5d38bb --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_weigel21gmd_figures_13_16.yml @@ -0,0 +1,239 @@ +# ESMValTool +# recipe_weigel21gmd_figures_13_16.yml +--- +documentation: + title: Figures from Weigel et al. (2021), + similar but reduces version of IPCC AR5, chap. 9 + regional figures 9.38-9.41 only containing CMIP5 data. + + description: | + Reproducing figures from Weigel et al. (2021) based on + the analysis shown in IPCC AR5, chap. 9 (Flato et al., 2013) + 9.38, 9.39, 9.40, 9.41. + Data sets comented out were part of the original publication but are + currently not available on ESGF. + + authors: + - cionni_irene + + maintainer: + - weigel_katja + + references: + - flato13ipcc + - seneviratne12ipcc + - weigel2021gmd + + projects: + - crescendo + +datasets: + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1} + # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1} + # - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} + # - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1} + # - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1} + + +preprocessors: + annual_cycle: + regrid: + target_grid: reference_dataset + scheme: linear + climate_statistics: + operator: mean + period: month + mask_landsea: + mask_out: sea + mask_fillvalues: + threshold_fraction: 0.95 + + clima_nomask: + regrid: + target_grid: reference_dataset + scheme: linear + climate_statistics: + operator: mean + period: month + mask_fillvalues: + threshold_fraction: 0.95 + regr: + regrid: + target_grid: reference_dataset + scheme: linear + +diagnostics: + + # ========================================================================== + # Chapter 9 - Figure938 + # ========================================================================== + regional_downscaling_Fig938: + description: Creates annual cycle regional plots for individual models + and multimodel over the selected project&experiemnt&mip. + themes: + - chem + realms: + - atmos + variables: + tas: + preprocessor: annual_cycle + reference_dataset: ERA-Interim + mip: Amon + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, + tier: 3} + scripts: + clim: &Fig938_settings + script: regional_downscaling/Figure9.38.ncl + styleset: CMIP5 + fig938_region_label: ["WNA", "ENA", "CAM", "TSA", "SSA", "EUM", "NAF", + "CAF", "SAF", "NAS", "CAS", "EAS", "SAS", "SEA", + "AUS"] + fig938_project_MMM: ["CMIP5"] + fig938_experiment_MMM: ["historical"] + fig938_mip_MMM: ["Amon"] + fig938_colors_MMM: ["red"] + fig938_refModel: ERA-Interim + fig938_MMM: true + fig938_diff: true + + # ========================================================================== + # Chapter 9 - Figure939 + # ========================================================================== + regional_downscaling_Fig939: + description: Creates seasonal or annual bias box plots at selected + regions over the selected project&experiemnt&mip. + themes: + - chem + realms: + - atmos + variables: + tas: + preprocessor: annual_cycle + reference_dataset: ERA-Interim + mip: Amon + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, + tier: 3} + + scripts: + clim: &Fig939_settings + script: regional_downscaling/Figure9.39.ncl + styleset: CMIP5 + fig939_season: ["DJF", "JJA", "ANN"] + fig939_region_label: ["ALAs", "CGIs", "WNAs", "CNAs", "ENAs", "CAMs", + "AMZs", "NEBs", "WSAs", "SSAs", "NEUs", "CEUs", + "MEDs", "SAHs", "WAFs", "EAFs", "SAFs", "NASs", + "WASs", "CASs", "TIBs", "EASs", "SASs", "SEAs", + "NAUs", "SAUs"] + fig939_project_MMM: ["CMIP5"] + fig939_experiment_MMM: ["historical"] + fig939_mip_MMM: ["Amon"] + fig939_MMM: true + + # ========================================================================== + # Chapter 9 - Figure940 + # ========================================================================== + regional_downscaling_Fig940: + description: Creates annual cycle regional plots for individual models + and multimodel over the selected project&experiemnt&mip. + themes: + - chem + realms: + - atmos + variables: + pr: + preprocessor: clima_nomask + reference_dataset: CRU + mip: Amon + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + scripts: + clim: &Fig940_settings + script: regional_downscaling/Figure9.40.ncl + styleset: CMIP5 + fig940_season: ["DJF", "JJA", "ANN"] + fig940_region_label: ["Arctic_land", "Arctic_sea", "Antarctic_land", + "Antarctic_sea", "Caribbean", + "WesternIndianOcean", + "NorthernIndianOcean", "NorthernTropicalPacific", + "EquatorialTropicalPacific", + "SouthernTropicalPacific", "World_land", + "World_sea", "World"] + fig940_project_MMM: ["CMIP5"] + fig940_experiment_MMM: ["historical"] + fig940_mip_MMM: ["Amon"] + fig940_MMM: true + + # ========================================================================== + # Chapter 9 - Figure941 + # ========================================================================== + regional_downscaling_Fig941: + description: Creates ranked modelled versus reference data set + regional mean temperature plots. + themes: + - chem + realms: + - atmos + variables: + tas: + preprocessor: regr + reference_dataset: ERA-Interim + mip: Amon + ensemble: r1i1p1 + start_year: 1979 + end_year: 2000 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, + tier: 3} + scripts: + clim: &Fig941_settings + script: regional_downscaling/Figure9.41.ncl + fig941_region_label: ["MEDs"] + styleset: CMIP5 diff --git a/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_atmosphere.yml b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_atmosphere.yml new file mode 100644 index 0000000000..316f24bcb5 --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_atmosphere.yml @@ -0,0 +1,1052 @@ +# ESMValTool +# recipe_ipccwg1ar6ch3_atmosphere.yml +--- +documentation: + + title: IPCC AR6 WG1 Chapter 3 figures + + description: | + Producing Fig. 3.3, 3.4, 3.5, 3.13, 3.15 in the IPCC Working Group I + Contribution to the Sixth Assessment Report: Chapter 3 + + authors: + - bock_lisa + - eyring_veronika + + maintainer: + - bock_lisa + + projects: + - ipcc_ar6 + + references: + - eyring21ipcc + + +preprocessors: + + preproc_map: + regrid: + target_grid: 1x1 + scheme: linear + + clim: + regrid: + target_grid: 1x1 + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + regrid_5_5: + regrid: + target_grid: 5x5 + scheme: linear + + regrid_5_5_mmm: + regrid: + target_grid: 5x5 + scheme: linear + multi_model_statistics: + span: full + statistics: [mean] + + clim_5x5_land: + mask_landsea: + mask_out: sea + regrid: + target_grid: 5x5 + scheme: linear + multi_model_statistics: + span: full + statistics: [mean] + exclude: [reference_dataset] + + clim_5x5_land_60N-90N: + mask_landsea: + mask_out: sea + regrid: + target_grid: 5x5 + scheme: linear + extract_region: + start_longitude: -180. + end_longitude: 180. + start_latitude: 60. + end_latitude: 90. + multi_model_statistics: + span: full + statistics: [mean] + exclude: [reference_dataset] + + clim_5x5_land_30N-60N: + mask_landsea: + mask_out: sea + regrid: + target_grid: 5x5 + scheme: linear + extract_region: + start_longitude: -180. + end_longitude: 180. + start_latitude: 30. + end_latitude: 60. + multi_model_statistics: + span: full + statistics: [mean] + exclude: [reference_dataset] + + clim_5x5_land_30S-30N: + mask_landsea: + mask_out: sea + regrid: + target_grid: 5x5 + scheme: linear + extract_region: + start_longitude: -180. + end_longitude: 180. + start_latitude: -30. + end_latitude: 30. + multi_model_statistics: + span: full + statistics: [mean] + exclude: [reference_dataset] + + clim_5x5_land_30S-60S: + mask_landsea: + mask_out: sea + regrid: + target_grid: 5x5 + scheme: linear + extract_region: + start_longitude: -180. + end_longitude: 180. + start_latitude: -60. + end_latitude: -30. + multi_model_statistics: + span: full + statistics: [mean] + exclude: [reference_dataset] + + +models_cmip6_historical: &models_cmip6_historical # historical, Amon, tas+pr + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-WACCM-FV2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CIESM, ensemble: r1i1p1f1, grid: gr} + - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CMCC-ESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: EC-Earth3, ensemble: r1i1p1f1, grid: gr} + - {dataset: EC-Earth3-AerChem, ensemble: r1i1p1f1, grid: gr} + - {dataset: EC-Earth3-CC, ensemble: r1i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg, ensemble: r1i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg-LR, ensemble: r(1:3)i1p1f1, grid: gr} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: FIO-ESM-2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f2, grid: gn} + - {dataset: GISS-E2-1-G-CC, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + - {dataset: IITM-ESM, ensemble: r1i1p1f1, grid: gn} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM5A2-INCA, ensemble: r1i1p1f1, grid: gr} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MCM-UA-1-0, ensemble: r1i1p1f2, grid: gn} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorCPM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + + +datasets_cmip6: &cmip6_models # historical+ssp245, all ens members, Amon, tas+pr + - {dataset: ACCESS-CM2, ensemble: r(1:3)i1p1f1, grid: gn} + - {dataset: ACCESS-ESM1-5, ensemble: r(1:11)i1p1f1, grid: gn} + - {dataset: ACCESS-ESM1-5, ensemble: r19i1p1f1, grid: gn} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn, end_year: 2014} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r(1:3)i1p1f1, grid: gn, end_year: 2014} + - {dataset: CAMS-CSM1-0, ensemble: r(1:2)i1p1f1, grid: gn} + - {dataset: CanESM5-CanOE, ensemble: r(1:3)i1p2f1, grid: gn} + - {dataset: CanESM5, ensemble: r(1:25)i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r(1:25)i1p2f1, grid: gn} + - {dataset: CESM2, ensemble: r4i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r(10:11)i1p1f1, grid: gn} + - {dataset: CESM2-FV2, ensemble: r(1:3)i1p1f1, grid: gn, end_year: 2014} + - {dataset: CESM2-WACCM, ensemble: r(1:3)i1p1f1, grid: gn} + - {dataset: CESM2-WACCM-FV2, ensemble: r(1:3)i1p1f1, grid: gn, end_year: 2014} + - {dataset: CIESM, ensemble: r1i1p1f1, grid: gr} + - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn, end_year: 2014} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CMCC-ESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r(1:10)i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r(1:5)i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r(7:10)i1p1f2, grid: gr} + - {dataset: EC-Earth3, ensemble: r(1:2)i1p1f1, grid: gr} + - {dataset: EC-Earth3, ensemble: r4i1p1f1, grid: gr} + - {dataset: EC-Earth3, ensemble: r(6:7)i1p1f1, grid: gr} + - {dataset: EC-Earth3, ensemble: r(9:10)i1p1f1, grid: gr} + - {dataset: EC-Earth3-AerChem, ensemble: r1i1p1f1, grid: gr, end_year: 2014} + - {dataset: EC-Earth3-AerChem, ensemble: r4i1p1f1, grid: gr, end_year: 2014} + - {dataset: EC-Earth3-CC, ensemble: r1i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg, ensemble: r(1:6)i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg, ensemble: r12i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg, ensemble: r14i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg-LR, ensemble: r(1:3)i1p1f1, grid: gr} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r(1:4)i1p1f1, grid: gn} + - {dataset: FIO-ESM-2-0, ensemble: r(1:3)i1p1f1, grid: gn} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GFDL-ESM4, ensemble: r(1:3)i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-G, ensemble: r(1:10)i1p1f2, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r(1:5)i1p3f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r(1:4)i1p5f1, grid: gn} + - {dataset: GISS-E2-1-G-CC, ensemble: r1i1p1f1, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-H, ensemble: r(1:10)i1p1f1, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-H, ensemble: r(1:5)i1p1f2, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-H, ensemble: r(1:5)i1p3f1, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-H, ensemble: r(1:5)i1p5f1, grid: gn, end_year: 2014} + - {dataset: HadGEM3-GC31-LL, ensemble: r(1:4)i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r(1:4)i1p1f3, grid: gn, end_year: 2014} + - {dataset: IITM-ESM, ensemble: r1i1p1f1, grid: gn} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM5A2-INCA, ensemble: r1i1p1f1, grid: gr, end_year: 2014} + - {dataset: IPSL-CM6A-LR, ensemble: r(1:6)i1p1f1, grid: gr} + - {dataset: IPSL-CM6A-LR, ensemble: r(10:11)i1p1f1, grid: gr} + - {dataset: IPSL-CM6A-LR, ensemble: r14i1p1f1, grid: gr} + - {dataset: IPSL-CM6A-LR, ensemble: r22i1p1f1, grid: gr} + - {dataset: IPSL-CM6A-LR, ensemble: r25i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r(1:3)i1p1f1, grid: gr} + - {dataset: KIOST-ESM, ensemble: r1i1p1f1, grid: gr1} + - {dataset: MCM-UA-1-0, ensemble: r1i1p1f2, grid: gn} + - {dataset: MIROC6, ensemble: r(1:20)i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r(1:2)i1p1f2, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r4i1p1f2, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r(6:9)i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r(1:3)i1p1f1, grid: gn, end_year: 2014} + - {dataset: MPI-ESM1-2-HR, ensemble: r(1:10)i1p1f1, grid: gn, end_year: 2014} + - {dataset: MPI-ESM1-2-LR, ensemble: r(1:10)i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r(1:5)i1p1f1, grid: gn, end_year: 2014} + - {dataset: NESM3, ensemble: r(1:2)i1p1f1, grid: gn} + - {dataset: NorCPM1, ensemble: r(1:30)i1p1f1, grid: gn, end_year: 2014} + - {dataset: NorESM2-LM, ensemble: r(1:3)i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r(1:2)i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, end_year: 2014} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r(1:2)i1p1f2, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r4i1p1f2, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r8i1p1f2, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r13i1p1f2, grid: gn} + + +datasets_cmip6_pr: &cmip6_models_pr # historical+ssp245, all ens members, Amon, pr + - {dataset: ACCESS-CM2, ensemble: r(1:3)i1p1f1, grid: gn} + - {dataset: ACCESS-ESM1-5, ensemble: r(1:11)i1p1f1, grid: gn} + - {dataset: ACCESS-ESM1-5, ensemble: r19i1p1f1, grid: gn} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn, end_year: 2014} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r(1:3)i1p1f1, grid: gn, end_year: 2014} + - {dataset: CAMS-CSM1-0, ensemble: r(1:2)i1p1f1, grid: gn} + - {dataset: CanESM5-CanOE, ensemble: r(1:3)i1p2f1, grid: gn} + - {dataset: CanESM5, ensemble: r(1:25)i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r(1:25)i1p2f1, grid: gn} + - {dataset: CESM2, ensemble: r4i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r(10:11)i1p1f1, grid: gn} + - {dataset: CESM2-FV2, ensemble: r(1:3)i1p1f1, grid: gn, end_year: 2014} + - {dataset: CESM2-WACCM, ensemble: r(1:3)i1p1f1, grid: gn} + - {dataset: CESM2-WACCM-FV2, ensemble: r(1:3)i1p1f1, grid: gn, end_year: 2014} + - {dataset: CIESM, ensemble: r1i1p1f1, grid: gr} + - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn, end_year: 2014} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CMCC-ESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r(1:10)i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r(1:5)i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r(7:10)i1p1f2, grid: gr} + - {dataset: E3SM-1-0, ensemble: r(1:5)i1p1f1, grid: gr, end_year: 2014} + - {dataset: E3SM-1-1-ECA, ensemble: r1i1p1f1, grid: gr, end_year: 2014} + - {dataset: E3SM-1-1, ensemble: r1i1p1f1, grid: gr, end_year: 2014} + - {dataset: EC-Earth3, ensemble: r(1:4)i1p1f1, grid: gr} + - {dataset: EC-Earth3, ensemble: r(6:7)i1p1f1, grid: gr} + - {dataset: EC-Earth3, ensemble: r(9:10)i1p1f1, grid: gr} + - {dataset: EC-Earth3-AerChem, ensemble: r1i1p1f1, grid: gr, end_year: 2014} + - {dataset: EC-Earth3-AerChem, ensemble: r4i1p1f1, grid: gr, end_year: 2014} + - {dataset: EC-Earth3-CC, ensemble: r1i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg, ensemble: r(1:6)i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg, ensemble: r12i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg, ensemble: r14i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg-LR, ensemble: r(1:3)i1p1f1, grid: gr} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r(1:4)i1p1f1, grid: gn} + - {dataset: FIO-ESM-2-0, ensemble: r(1:3)i1p1f1, grid: gn} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GFDL-ESM4, ensemble: r(1:3)i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-G, ensemble: r(1:10)i1p1f2, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r(1:5)i1p3f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r(1:4)i1p5f1, grid: gn} + - {dataset: GISS-E2-1-G-CC, ensemble: r1i1p1f1, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-H, ensemble: r(1:10)i1p1f1, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-H, ensemble: r(1:5)i1p1f2, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-H, ensemble: r(1:5)i1p3f1, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-H, ensemble: r(1:5)i1p5f1, grid: gn, end_year: 2014} + - {dataset: HadGEM3-GC31-LL, ensemble: r(1:4)i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r(1:4)i1p1f3, grid: gn, end_year: 2014} + - {dataset: IITM-ESM, ensemble: r1i1p1f1, grid: gn} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM5A2-INCA, ensemble: r1i1p1f1, grid: gr, end_year: 2014} + - {dataset: IPSL-CM6A-LR, ensemble: r(1:6)i1p1f1, grid: gr} + - {dataset: IPSL-CM6A-LR, ensemble: r(10:11)i1p1f1, grid: gr} + - {dataset: IPSL-CM6A-LR, ensemble: r14i1p1f1, grid: gr} + - {dataset: IPSL-CM6A-LR, ensemble: r22i1p1f1, grid: gr} + - {dataset: IPSL-CM6A-LR, ensemble: r25i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r(1:3)i1p1f1, grid: gr} + - {dataset: MCM-UA-1-0, ensemble: r1i1p1f2, grid: gn} + - {dataset: MIROC6, ensemble: r(1:50)i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r(1:2)i1p1f2, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r4i1p1f2, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r(6:9)i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r(1:3)i1p1f1, grid: gn, end_year: 2014} + - {dataset: MPI-ESM1-2-HR, ensemble: r(1:10)i1p1f1, grid: gn, end_year: 2014} + - {dataset: MPI-ESM1-2-LR, ensemble: r(1:10)i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r(1:5)i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r(1:2)i1p1f1, grid: gn} + - {dataset: NorCPM1, ensemble: r(1:30)i1p1f1, grid: gn, end_year: 2014} + - {dataset: NorESM2-LM, ensemble: r(1:3)i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r(1:2)i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, end_year: 2014} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r(1:2)i1p1f2, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r4i1p1f2, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r8i1p1f2, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r13i1p1f2, grid: gn} + + +models_cmip6_piControl: &models_cmip6_piControl # piControl, Amon, tas+pr + - {dataset: ACCESS-CM2, grid: gn, institute: CSIRO-ARCCSS, start_year: 950, end_year: 1149} + - {dataset: AWI-CM-1-1-MR, grid: gn, start_year: 2401, end_year: 2900} + - {dataset: AWI-ESM-1-1-LR, grid: gn, start_year: 1855, end_year: 1954} + - {dataset: BCC-CSM2-MR, grid: gn, start_year: 1850, end_year: 2449} + - {dataset: BCC-ESM1, grid: gn, start_year: 1850, end_year: 2300} + - {dataset: CAMS-CSM1-0, grid: gn, start_year: 2900, end_year: 3399} + - {dataset: CanESM5, grid: gn, start_year: 5201, end_year: 6200} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, start_year: 5550, end_year: 6050} + - {dataset: CESM2, grid: gn, start_year: 1, end_year: 1200} + - {dataset: CESM2-FV2, institute: NCAR, grid: gn, start_year: 1, end_year: 500} + - {dataset: CESM2-WACCM, institute: NCAR, grid: gn, start_year: 1, end_year: 499} + - {dataset: CESM2-WACCM-FV2, institute: NCAR, grid: gn, start_year: 1, end_year: 500} + - {dataset: CIESM, start_year: 1, end_year: 500} + - {dataset: CMCC-CM2-SR5, grid: gn, start_year: 1850, end_year: 2099} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, start_year: 1850, end_year: 2349} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, start_year: 1850, end_year: 2149} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, start_year: 1850, end_year: 2349} + - {dataset: E3SM-1-0, start_year: 1, end_year: 500} + - {dataset: EC-Earth3, start_year: 2259, end_year: 2759} + - {dataset: EC-Earth3-AerChem, start_year: 1850, end_year: 2160} + - {dataset: EC-Earth3-LR, start_year: 2219, end_year: 2419} + - {dataset: EC-Earth3-Veg, start_year: 1850, end_year: 2349} + - {dataset: EC-Earth3-Veg-LR, start_year: 2300, end_year: 2800} + - {dataset: FGOALS-f3-L, start_year: 600, end_year: 1160} + - {dataset: FGOALS-g3, grid: gn, start_year: 200, end_year: 899} + - {dataset: GFDL-CM4, grid: gr1, start_year: 151, end_year: 650} + - {dataset: GFDL-ESM4, grid: gr1, start_year: 151, end_year: 500} + - {dataset: GISS-E2-1-G, grid: gn, start_year: 4150, end_year: 5000} + - {dataset: GISS-E2-1-H, grid: gn, start_year: 3180, end_year: 3980} + - {dataset: HadGEM3-GC31-LL, grid: gn, start_year: 1850, end_year: 2349} + - {dataset: HadGEM3-GC31-MM, grid: gn, start_year: 1850, end_year: 2349} + - {dataset: IITM-ESM, grid: gn, start_year: 1926, end_year: 2125} + - {dataset: INM-CM4-8, grid: gr1, start_year: 1850, end_year: 2380} + - {dataset: INM-CM5-0, grid: gr1, start_year: 1996, end_year: 3196} + - {dataset: IPSL-CM6A-LR, start_year: 1850, end_year: 2849} + - {dataset: KACE-1-0-G, start_year: 2000, end_year: 2449} + - {dataset: MCM-UA-1-0, grid: gn, start_year: 1, end_year: 500} + - {dataset: MIROC6, grid: gn, start_year: 3200, end_year: 3999} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 2349} + - {dataset: MPI-ESM-1-2-HAM, grid: gn, start_year: 1850, end_year: 2629} + - {dataset: MPI-ESM1-2-HR, grid: gn, start_year: 1850, end_year: 2349} + - {dataset: MPI-ESM1-2-LR, grid: gn, start_year: 1850, end_year: 2849} + - {dataset: MRI-ESM2-0, grid: gn, start_year: 1850, end_year: 2550} + - {dataset: NESM3, grid: gn, start_year: 500, end_year: 999} + - {dataset: NorCPM1, institute: NCC, grid: gn, start_year: 1, end_year: 500} + - {dataset: NorESM1-F, institute: NCC, grid: gn, start_year: 1501, end_year: 1700} + - {dataset: NorESM2-LM, institute: NCC, grid: gn, start_year: 1600, end_year: 2100} + - {dataset: NorESM2-MM, institute: NCC, grid: gn, start_year: 1200, end_year: 1699} + - {dataset: SAM0-UNICON, grid: gn, start_year: 1, end_year: 700} + - {dataset: TaiESM1, grid: gn, start_year: 201, end_year: 700} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn, start_year: 1960, end_year: 3839} + + +models_cmip6_piControl_short: &models_cmip6_piControl_short # piControl, Amon, tas+pr + - {dataset: ACCESS-CM2, grid: gn, institute: CSIRO-ARCCSS, start_year: 950, end_year: 1014} + - {dataset: ACCESS-ESM1-5, grid: gn, institute: CSIRO, start_year: 101, end_year: 165} + - {dataset: AWI-CM-1-1-MR, grid: gn, start_year: 2401, end_year: 2465} + - {dataset: AWI-ESM-1-1-LR, grid: gn, start_year: 1855, end_year: 1919} + - {dataset: BCC-CSM2-MR, grid: gn, start_year: 1850, end_year: 1914} + - {dataset: BCC-ESM1, grid: gn, start_year: 1850, end_year: 1914} + - {dataset: CAMS-CSM1-0, grid: gn, start_year: 2900, end_year: 2964} + - {dataset: CanESM5, grid: gn, start_year: 5201, end_year: 5265} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, start_year: 5550, end_year: 5614} + - {dataset: CESM2, grid: gn, start_year: 1, end_year: 65} + - {dataset: CESM2-FV2, institute: NCAR, grid: gn, start_year: 1, end_year: 65} + - {dataset: CESM2-WACCM, institute: NCAR, grid: gn, start_year: 1, end_year: 65} + - {dataset: CESM2-WACCM-FV2, institute: NCAR, grid: gn, start_year: 1, end_year: 65} + - {dataset: CIESM, start_year: 1, end_year: 65} + - {dataset: CMCC-CM2-SR5, grid: gn, start_year: 1850, end_year: 1914} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, start_year: 1850, end_year: 1914} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, start_year: 1850, end_year: 1914} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, start_year: 1850, end_year: 1914} + - {dataset: E3SM-1-0, start_year: 1, end_year: 65} + - {dataset: E3SM-1-1, institute: E3SM-Project, start_year: 1850, end_year: 1914} + - {dataset: E3SM-1-1-ECA, institute: E3SM-Project, start_year: 1850, end_year: 1914} + - {dataset: EC-Earth3, start_year: 2259, end_year: 2323} + - {dataset: EC-Earth3-AerChem, start_year: 1850, end_year: 1914} + - {dataset: EC-Earth3-LR, start_year: 2219, end_year: 2283} + - {dataset: EC-Earth3-Veg, start_year: 1850, end_year: 1914} + - {dataset: EC-Earth3-Veg-LR, start_year: 2300, end_year: 2364} + - {dataset: FGOALS-f3-L, start_year: 600, end_year: 664} + - {dataset: FGOALS-g3, grid: gn, start_year: 200, end_year: 264} + - {dataset: FIO-ESM-2-0, grid: gn, start_year: 301, end_year: 365} + - {dataset: GFDL-CM4, grid: gr1, start_year: 151, end_year: 215} + - {dataset: GFDL-ESM4, grid: gr1, start_year: 151, end_year: 215} + - {dataset: GISS-E2-1-G, grid: gn, start_year: 4150, end_year: 4214} + - {dataset: GISS-E2-1-G-CC, grid: gn, start_year: 1850, end_year: 1914} + - {dataset: GISS-E2-1-H, grid: gn, start_year: 3180, end_year: 3244} + - {dataset: GISS-E2-2-G, institute: NASA-GISS, grid: gn, start_year: 2000, end_year: 2064} + - {dataset: HadGEM3-GC31-LL, grid: gn, start_year: 1850, end_year: 1914} + - {dataset: HadGEM3-GC31-MM, grid: gn, start_year: 1850, end_year: 1914} + - {dataset: IITM-ESM, grid: gn, start_year: 1926, end_year: 1990} + - {dataset: INM-CM4-8, grid: gr1, start_year: 1850, end_year: 1914} + - {dataset: INM-CM5-0, grid: gr1, start_year: 1996, end_year: 2060} + - {dataset: IPSL-CM6A-LR, start_year: 1850, end_year: 1914} + - {dataset: KACE-1-0-G, start_year: 2000, end_year: 2064} + - {dataset: MCM-UA-1-0, grid: gn, start_year: 1, end_year: 65} + - {dataset: MIROC6, grid: gn, start_year: 3200, end_year: 3264} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1914} + - {dataset: MPI-ESM-1-2-HAM, grid: gn, start_year: 1850, end_year: 1914} + - {dataset: MPI-ESM1-2-HR, grid: gn, start_year: 1850, end_year: 1914} + - {dataset: MPI-ESM1-2-LR, grid: gn, start_year: 1850, end_year: 1914} + - {dataset: MRI-ESM2-0, grid: gn, start_year: 1850, end_year: 1914} + - {dataset: NESM3, grid: gn, start_year: 500, end_year: 564} + - {dataset: NorCPM1, institute: NCC, grid: gn, start_year: 1, end_year: 64} + - {dataset: NorESM1-F, institute: NCC, grid: gn, start_year: 1501, end_year: 1565} + - {dataset: NorESM2-LM, institute: NCC, grid: gn, start_year: 1600, end_year: 1664} + - {dataset: NorESM2-MM, institute: NCC, grid: gn, start_year: 1200, end_year: 1264} + - {dataset: SAM0-UNICON, grid: gn, start_year: 1, end_year: 65} + - {dataset: TaiESM1, grid: gn, start_year: 201, end_year: 265} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn, start_year: 1960, end_year: 2024} + + +models_cmip6_hist-nat: &models_cmip6_hist-nat # hist-nat, Amon, tas+pr + - {dataset: ACCESS-ESM1-5, grid: gn, institute: CSIRO, ensemble: r(1:3)i1p1f1} + - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r(1:3)i1p1f1} + - {dataset: CanESM5, ensemble: r(1:15)i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r(20:25)i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r(1:15)i1p2f1, grid: gn} + - {dataset: CanESM5, ensemble: r17i1p2f1, grid: gn} + - {dataset: CanESM5, ensemble: r(19:25)i1p2f1, grid: gn} + - {dataset: CESM2, grid: gn, ensemble: r(1:3)i1p1f1, end_year: 2014} + - {dataset: CNRM-CM6-1, ensemble: r(1:10)i1p1f2} + - {dataset: FGOALS-g3, grid: gn, ensemble: r(1:3)i1p1f1} + - {dataset: GFDL-CM4, grid: gr1, ensemble: r1i1p1f1, end_year: 2014} + - {dataset: GFDL-ESM4, grid: gr1, ensemble: r(1:3)i1p1f1} + - {dataset: GISS-E2-1-G, ensemble: r(1:5)i1p1f2, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-G, ensemble: r(1:9)i1p1f3, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-G, ensemble: r(1:5)i1p1f4, grid: gn, end_year: 2014} + - {dataset: HadGEM3-GC31-LL, ensemble: r(1:4)i1p1f3, grid: gn} + - {dataset: IPSL-CM6A-LR, ensemble: r(1:10)i1p1f1} + - {dataset: MIROC6, ensemble: r(1:4)i1p1f1, grid: gn} + - {dataset: MIROC6, ensemble: r37i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r(1:5)i1p1f1} + - {dataset: NorESM2-LM, grid: gn, institute: NCC, ensemble: r(1:3)i1p1f1} + + +models_cmip5_historical: &models_cmip5_historical + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R} + - {dataset: GISS-E2-R-CC} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + + +datasets_cmip5: &cmip5_models # historical+ssp245, all ens members, Amon, tas + - {dataset: ACCESS1-0, ensemble: r1i1p1} + - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM} + - {dataset: CanESM2, ensemble: r(1:5)i1p1} + - {dataset: CCSM4, ensemble: r(1:5)i1p1} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5, ensemble: r(1:3)i1p1} + - {dataset: CESM1-FASTCHEM, ensemble: r(1:3)i1p1, end_year: 2004} + - {dataset: CESM1-WACCM, ensemble: r1i1p1, end_year: 2004} + - {dataset: CMCC-CESM, end_year: 2004} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5, ensemble: r1i1p1} + - {dataset: CNRM-CM5-2, ensemble: r1i1p1, end_year: 2004} + - {dataset: CSIRO-Mk3-6-0, ensemble: r(1:10)i1p1} + - {dataset: EC-EARTH, ensemble: r5i1p1} + - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: FGOALS-s2, ensemble: r(2:3)i1p1, end_year: 2004} + - {dataset: FIO-ESM, ensemble: r(1:3)i1p1} + - {dataset: GFDL-CM2p1, ensemble: r(1:10)i1p1, start_year: 1861} + - {dataset: GFDL-CM3, ensemble: r1i1p1, start_year: 1860} + - {dataset: GFDL-ESM2G, start_year: 1861} + - {dataset: GFDL-ESM2M, start_year: 1861} + - {dataset: GISS-E2-H, ensemble: r(1:5)i1p1} + - {dataset: GISS-E2-H, ensemble: r(1:5)i1p2} + - {dataset: GISS-E2-H, ensemble: r(1:6)i1p3} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r(1:6)i1p1} + - {dataset: GISS-E2-R, ensemble: r(1:5)i1p2} + - {dataset: GISS-E2-R, ensemble: r(1:6)i1p3} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3, ensemble: r(1:10)i1p1, start_year: 1860} + - {dataset: HadGEM2-AO, start_year: 1860} + - {dataset: HadGEM2-CC, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: HadGEM2-ES, ensemble: r(1:4)i1p1, start_year: 1860, end_year: 2004} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR, ensemble: r(1:4)i1p1} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5, ensemble: r(1:5)i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR, ensemble: r(1:3)i1p1} + - {dataset: MPI-ESM-MR, ensemble: r(1:3)i1p1} + - {dataset: MPI-ESM-P, ensemble: r(1:2)i1p1, end_year: 2004} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: MRI-ESM1, start_year: 1851, end_year: 2004} + - {dataset: NorESM1-M, ensemble: r1i1p1} + - {dataset: NorESM1-ME, ensemble: r1i1p1} + + +models_cmip5_piControl: &models_cmip5_piControl + - {dataset: ACCESS1-0, start_year: 300, end_year: 799} + - {dataset: ACCESS1-3, start_year: 250, end_year: 749} + - {dataset: bcc-csm1-1, start_year: 1, end_year: 500} + - {dataset: bcc-csm1-1-m, start_year: 1, end_year: 400} + - {dataset: BNU-ESM, start_year: 1450, end_year: 2008} + - {dataset: CanESM2, start_year: 2015, end_year: 3010} + - {dataset: CCSM4, start_year: 250, end_year: 1300} + - {dataset: CESM1-BGC, start_year: 101, end_year: 600} + - {dataset: CESM1-CAM5, start_year: 1, end_year: 319} + - {dataset: CESM1-FASTCHEM, start_year: 70, end_year: 291} + - {dataset: CESM1-WACCM, start_year: 96, end_year: 295} + - {dataset: CMCC-CESM, start_year: 4324, end_year: 4600} + - {dataset: CMCC-CM, start_year: 1550, end_year: 1879} + - {dataset: CMCC-CMS, start_year: 3684, end_year: 4183} + - {dataset: CNRM-CM5, start_year: 1850, end_year: 2699} + - {dataset: CNRM-CM5-2, start_year: 1850, end_year: 2208} + - {dataset: CSIRO-Mk3-6-0, start_year: 1, end_year: 500} + - {dataset: FGOALS-g2, start_year: 201, end_year: 900} + - {dataset: FGOALS-s2, start_year: 1850, end_year: 2350} + - {dataset: FIO-ESM, start_year: 401, end_year: 1200} + - {dataset: GFDL-CM3, start_year: 1, end_year: 500} + - {dataset: GFDL-ESM2G, start_year: 1, end_year: 500} + - {dataset: GFDL-ESM2M, start_year: 1, end_year: 500} + - {dataset: GISS-E2-H, start_year: 1180, end_year: 1419} + - {dataset: GISS-E2-H-CC, start_year: 2081, end_year: 2331} + - {dataset: GISS-E2-R, start_year: 3331, end_year: 3630} + - {dataset: GISS-E2-R-CC, start_year: 2050, end_year: 2300} + - {dataset: HadGEM2-CC, start_year: 1859, end_year: 2099} + - {dataset: HadGEM2-ES, start_year: 1859, end_year: 2435} + - {dataset: inmcm4, start_year: 1850, end_year: 2349} + - {dataset: IPSL-CM5A-LR, start_year: 1800, end_year: 2799} + - {dataset: IPSL-CM5A-MR, start_year: 1800, end_year: 2099} + - {dataset: IPSL-CM5B-LR, start_year: 1830, end_year: 2129} + - {dataset: MIROC-ESM, start_year: 1800, end_year: 2429} + - {dataset: MIROC-ESM-CHEM, start_year: 1846, end_year: 2100} + - {dataset: MPI-ESM-LR, start_year: 1850, end_year: 2849} + - {dataset: MPI-ESM-MR, start_year: 1850, end_year: 2849} + - {dataset: MPI-ESM-P, start_year: 1850, end_year: 3005} + - {dataset: MRI-CGCM3, start_year: 1851, end_year: 2350} + - {dataset: NorESM1-M, start_year: 700, end_year: 1200} + - {dataset: NorESM1-ME, start_year: 901, end_year: 1152} + + +diagnostics: + + # ********************************************************************** + # IPCC WGI AR6, chap. 3 + # Sect. 3.3 + # Fig. 3.3 a,b,c,d + # ********************************************************************** + # Multi model mean, multi model mean bias and root mean square error + # (geographical distributions) for CMIP6 and CMIP5 + # ********************************************************************** + + IAV_calc_tas_cmip6: &diag_IAV + description: Calculate temperature interannual variability + for stippling significance + themes: + - varmodes + realms: + - atmos + variables: + tas: + preprocessor: preproc_map + project: CMIP6 + mip: Amon + exp: piControl + ensemble: r1i1p1f1 + grid: gr + additional_datasets: *models_cmip6_piControl + scripts: + calc_IAV_for_stippandhatch: &IAV_script + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + time_avg: annualclim + periodlength: 20. + iavmode: mmm + + IAV_calc_tas_cmip5: + <<: *diag_IAV + variables: + tas: + preprocessor: preproc_map + project: CMIP5 + mip: Amon + exp: piControl + ensemble: r1i1p1 + additional_datasets: *models_cmip5_piControl + + + fig_3_3_cmip6: + description: IPCC AR6 Ch. 3, Fig. 3.2 (near-surface temperature) + variables: + tas: + preprocessor: clim + reference_dataset: ERA5 + mip: Amon + project: CMIP6 + exp: historical + ensemble: r1i1p1f1 + grid: gr + start_year: 1995 + end_year: 2014 + additional_datasets: *models_cmip6_historical + additional_datasets: + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + scripts: + fig_3_3: + script: ipcc_ar6/model_bias.ncl + ancestors: ['tas', 'IAV_calc_tas_cmip6/calc_IAV_for_stippandhatch'] + projection: Robinson + mean_caption: "CMIP6" + bias_caption: "CMIP6 bias" + plot_rms_diff: true + rmsd_caption: "CMIP6 root mean square error" + stippandhatch: true + panel_title: "Near-Surface Air Temperature" + + + fig_3_3_cmip5: + description: IPCC AR6 Ch. 3, Fig. 3.2 (near-surface temperature) + variables: + tas: + preprocessor: clim + reference_dataset: ERA5 + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1985 + end_year: 2004 + additional_datasets: *models_cmip5_historical + additional_datasets: + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + scripts: + fig_3_3: + script: ipcc_ar6/model_bias.ncl + ancestors: ['tas', 'IAV_calc_tas_cmip5/calc_IAV_for_stippandhatch'] + projection: Robinson + mean_caption: "CMIP5" + bias_caption: "CMIP5 bias" + panel_num: ['c)', 'd)'] + stippandhatch: true + + + # ********************************************************************** + # IPCC WGI AR6, chap. 3 + # Sect. 3.3 + # Fig 3.4 + # ********************************************************************** + # Timeseries of Global Near-Surface Air Temperature Anomaly + # *********************************************************************** + + fig_3_4_cmip6: + description: Timeseries of Global Near-Surface Air Temperature Anomaly + variables: + tas: &tas_settings_cmip6 + preprocessor: regrid_5_5_mmm + mip: Amon + project: CMIP6 + exp: [historical, ssp245] + grid: gr + ensemble: r1i1p1f1 + start_year: 1850 + end_year: 2020 + additional_datasets: *cmip6_models + tasa: + <<: *tas_settings_cmip6 + preprocessor: regrid_5_5 + additional_datasets: + - {dataset: HadCRUT5, project: OBS, type: ground, + version: 5.0.1.0-analysis, tier: 2} + scripts: + fig-3-4: &tsline_settings + script: ipcc_ar6/tas_anom.ncl + ref_start: 1850 + ref_end: 1900 + plot_units: "degC" + y_min: -0.7 + y_max: 2.1 + volcanoes: true + blending: false + ref_mask: true + write_stat: true + styleset: CMIP6_ipcc + + + fig_3_4_cmip5: + description: Blended surface temperature. + variables: + tas: &tas_settings_cmip5 + preprocessor: regrid_5_5_mmm + mip: Amon + project: CMIP5 + exp: [historical, rcp45] + ensemble: r1i1p1 + start_year: 1850 + end_year: 2020 + additional_datasets: *cmip5_models + tasa: + <<: *tas_settings_cmip5 + preprocessor: regrid_5_5 + additional_datasets: + - {dataset: HadCRUT5, project: OBS, type: ground, + version: 5.0.1.0-analysis, tier: 2} + scripts: + fig-3-4: + <<: *tsline_settings + styleset: CMIP5 + + + fig_3_4_collect: + description: Collect all timeseries of blended surface temperature. + variables: + tasa: + <<: *tas_settings_cmip6 + end_year: 2020 + preprocessor: regrid_5_5 + additional_datasets: + - {dataset: HadCRUT5, project: OBS, type: ground, + version: 5.0.1.0-analysis, tier: 2} + - {dataset: BerkeleyEarth, project: OBS, type: reanaly, + version: 2020, tier: 2, end_year: 2019} + - {dataset: NOAAGlobalTemp, project: OBS, type: ground, + version: v5.0.0, tier: 2, start_year: 1880} + - {dataset: Kadow2020, project: OBS, type: ground, + version: 5.0.1.0, tier: 2} + scripts: + collect: + <<: *tsline_settings + script: ipcc_ar6/tsline_collect.ncl + ancestors: ['tasa', 'fig_3_4_cmip5/fig-3-4', 'fig_3_4_cmip6/fig-3-4'] + start_year: 1850 + end_year: 2020 + ref: ["HadCRUT5"] + order: ["CMIP6_historical-ssp245", "CMIP5_historical-rcp45"] + stat_shading: true + ref_shading: true + ref_stderr: true + + + # ********************************************************************** + # IPCC WGI AR6, chap. 3 + # Sect. 3.3 + # Fig 3.5 + # ********************************************************************** + # Global climate variability - surface temperature + # ********************************************************************** + + fig_3_5: + description: IPCC AR6 Ch. 3, Fig. 3.5 (near-surface temperature) + variables: + tas: + preprocessor: regrid_5_5 + mip: Amon + project: CMIP6 + exp: piControl + ensemble: r1i1p1f1 + grid: gr + additional_datasets: *models_cmip6_piControl_short + tasa: + preprocessor: regrid_5_5 + mip: Amon + project: CMIP6 + exp: piControl + ensemble: r1i1p1f1 + grid: gr + start_year: 1950 + end_year: 2014 + additional_datasets: + - {dataset: Kadow2020, project: OBS, type: ground, + version: 5.0.1.0, tier: 2} + - {dataset: NOAAGlobalTemp, project: OBS, type: ground, + version: v5.0.0, tier: 2, start_year: 1880} + - {dataset: BerkeleyEarth, project: OBS, type: reanaly, + version: 2020, tier: 2} + - {dataset: HadCRUT5, project: OBS, type: ground, + version: 5.0.1.0-analysis, tier: 2} + scripts: + fig_3_5: + script: ipcc_ar6/zonal_st_dev.ncl + multi_model_mean: true + plot_units: "degC" + plot_legend: false + styleset: CMIP6_ipcc + + + # ********************************************************************** + # IPCC WGI AR6, chap. 3 + # Sect. 3.3 + # Fig. 3.13 a,b,c,d + # ********************************************************************** + # Multi model mean, multi model mean bias and root mean square error + # (geographical distributions) for CMIP6 and CMIP5 + # ********************************************************************** + + IAV_calc_pr_cmip6: + <<: *diag_IAV + variables: + pr: + preprocessor: preproc_map + project: CMIP6 + mip: Amon + exp: piControl + ensemble: r1i1p1f1 + grid: gr + + IAV_calc_pr_cmip5: + <<: *diag_IAV + variables: + pr: + preprocessor: preproc_map + project: CMIP5 + mip: Amon + exp: piControl + ensemble: r1i1p1 + additional_datasets: *models_cmip5_piControl + + + fig_3_13_cmip6: + description: IPCC AR6 Ch. 3, Fig. 3.2 (near-surface temperature) + variables: + pr: + preprocessor: clim + reference_dataset: GPCP-SG + mip: Amon + project: CMIP6 + exp: historical + ensemble: r1i1p1f1 + grid: gr + start_year: 1995 + end_year: 2014 + additional_datasets: *models_cmip6_historical + additional_datasets: + - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.3, + tier: 1} + scripts: + fig_3_13: + script: ipcc_ar6/model_bias.ncl + ancestors: ['pr', 'IAV_calc_pr_cmip6/calc_IAV_for_stippandhatch'] + projection: Robinson + mean_caption: "CMIP6" + bias_caption: "CMIP6 bias" + plot_rms_diff: true + rmsd_caption: "CMIP6 root mean square error" + stippandhatch: true + panel_title: "Precipitation" + + + fig_3_13_cmip5: + description: IPCC AR6 Ch. 3, Fig. 3.2 (near-surface temperature) + variables: + pr: + preprocessor: clim + reference_dataset: GPCP-SG + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1985 + end_year: 2004 + additional_datasets: *models_cmip5_historical + additional_datasets: + - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.3, + tier: 1} + scripts: + fig_3_13: + script: ipcc_ar6/model_bias.ncl + ancestors: ['pr', 'IAV_calc_pr_cmip5/calc_IAV_for_stippandhatch'] + projection: Robinson + mean_caption: "CMIP5" + bias_caption: "CMIP5 bias" + panel_num: ['c)', 'd)'] + stippandhatch: true + + + # ********************************************************************** + # IPCC WGI AR6, chap. 3 + # Sect. 3.3 + # Fig 3.15 + # ********************************************************************** + # Global and zonal average anomalies in annual mean precipitation + # ********************************************************************** + + fig_3_15_precip_anom: + description: IPCC AR6 Ch. 3, Fig. 3.12 (precipitation) + variables: + pr_historical_global: &var_pr + short_name: pr + preprocessor: clim_5x5_land + reference_dataset: GHCN + mip: Amon + project: CMIP6 + exp: historical + ensemble: r1i1p1f1 + grid: gr + start_year: 1950 + end_year: 2014 + additional_datasets: *cmip6_models_pr + pr_hist-nat_global: + <<: *var_pr + exp: hist-nat + additional_datasets: *models_cmip6_hist-nat + pr_historical_60N-90N: + <<: *var_pr + preprocessor: clim_5x5_land_60N-90N + pr_hist-nat_60N-90N: + <<: *var_pr + preprocessor: clim_5x5_land_60N-90N + exp: hist-nat + additional_datasets: *models_cmip6_hist-nat + pr_historical_30N-60N: + <<: *var_pr + preprocessor: clim_5x5_land_30N-60N + pr_hist-nat_30N-60N: + <<: *var_pr + preprocessor: clim_5x5_land_30N-60N + exp: hist-nat + additional_datasets: *models_cmip6_hist-nat + pr_historical_30S-30N: + <<: *var_pr + preprocessor: clim_5x5_land_30S-30N + pr_hist-nat_30S-30N: + <<: *var_pr + preprocessor: clim_5x5_land_30S-30N + exp: hist-nat + additional_datasets: *models_cmip6_hist-nat + pr_historical_30S-60S: + <<: *var_pr + preprocessor: clim_5x5_land_30S-60S + pr_hist-nat_30S-60S: + <<: *var_pr + preprocessor: clim_5x5_land_30S-60S + exp: hist-nat + additional_datasets: *models_cmip6_hist-nat + additional_datasets: + - {dataset: GHCN, project: OBS, type: ground, version: 1, tier: 2} + - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.3, + tier: 1, start_year: 1979} + - {dataset: CRU, mip: Amon, project: OBS, type: reanaly, + version: TS4.02, tier: 2} + scripts: + tsline: + script: ipcc_ar6/precip_anom.ncl + ref_start: 1961 + ref_end: 1990 + ref_mask: true + start_year: 1950 + end_year: 2014 + header: ["Global", "60N-90N", "30N-60N", "30S-30N", "60S-30S"] + panels: [["pr_historical_global", "pr_hist-nat_global"], + ["pr_historical_60N-90N", "pr_hist-nat_60N-90N"], + ["pr_historical_30N-60N", "pr_hist-nat_30N-60N"], + ["pr_historical_30S-30N", "pr_hist-nat_30S-30N"], + ["pr_historical_30S-60S", "pr_hist-nat_30S-60S"]] + stat: "MinMax" + plot_units: "mm/day" + y_min: -0.2 + y_max: 0.2 diff --git a/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_19.yml b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_19.yml new file mode 100644 index 0000000000..41731ba420 --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_19.yml @@ -0,0 +1,136 @@ +# ESMValTool +# recipe_ipccwg1ar6ch3_fig_3_19.yml +--- +documentation: + + title: IPCC AR6 WG1 Chapter 3 Fig. 3.19 + + description: | + Producing Fig. 3.19 in the IPCC Working Group I Contribution to the Sixth + Assessment Report: Chapter 3 + + authors: + - bock_lisa + - eyring_veronika + + maintainer: + - bock_lisa + + projects: + - ipcc_ar6 + + references: + - eyring21ipcc + + +datasets: + # CMIP6 + - {dataset: ACCESS-CM2, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, grid: gn, institute: CSIRO} + - {dataset: AWI-CM-1-1-MR, grid: gn} + - {dataset: AWI-ESM-1-1-LR, grid: gn} + - {dataset: BCC-CSM2-MR, grid: gn} + - {dataset: BCC-ESM1, grid: gn} + - {dataset: CAMS-CSM1-0, grid: gn} + - {dataset: CanESM5, grid: gn} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn} + - {dataset: CAS-ESM2-0, institute: CAS, grid: gn} + - {dataset: CESM2, grid: gn} + - {dataset: CESM2-FV2, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM-FV2, grid: gn, institute: NCAR} + - {dataset: CIESM} + - {dataset: CMCC-CM2-HR4, grid: gn} + - {dataset: CMCC-CM2-SR5, grid: gn} + - {dataset: CMCC-ESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2} + - {dataset: E3SM-1-0} + - {dataset: E3SM-1-1-ECA, institute: E3SM-Project} + - {dataset: EC-Earth3} + - {dataset: EC-Earth3-AerChem} + - {dataset: EC-Earth3-CC} + - {dataset: EC-Earth3-Veg} + - {dataset: EC-Earth3-Veg-LR} + - {dataset: FGOALS-f3-L} + - {dataset: FGOALS-g3, grid: gn} + - {dataset: GFDL-CM4, grid: gr1} + - {dataset: GFDL-ESM4, grid: gr1} + - {dataset: GISS-E2-1-G, grid: gn} + - {dataset: GISS-E2-1-G-CC, grid: gn} + - {dataset: GISS-E2-1-H, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + - {dataset: IITM-ESM, grid: gn} + - {dataset: INM-CM4-8, grid: gr1} + - {dataset: INM-CM5-0, grid: gr1} + - {dataset: IPSL-CM5A2-INCA} + - {dataset: IPSL-CM6A-LR} + - {dataset: KACE-1-0-G} + - {dataset: KIOST-ESM, grid: gr1} + - {dataset: MCM-UA-1-0, grid: gn} + - {dataset: MIROC6, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, grid: gn} + - {dataset: MPI-ESM1-2-HR, grid: gn} + - {dataset: MPI-ESM1-2-LR, grid: gn} + - {dataset: MRI-ESM2-0, grid: gn} + - {dataset: NESM3, grid: gn} + - {dataset: NorCPM1, grid: gn, institute: NCC} + - {dataset: NorESM2-LM, grid: gn, institute: NCC} + - {dataset: NorESM2-MM, grid: gn, institute: NCC} + - {dataset: SAM0-UNICON, grid: gn} + - {dataset: TaiESM1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + + +preprocessors: + zonal: + regrid: + target_grid: 2x2 + scheme: linear + extract_levels: + scheme: linear + levels: reference_dataset + zonal_statistics: + operator: mean + mask_fillvalues: + threshold_fraction: 0.95 + + +diagnostics: + + # ********************************************************************** + # IPCC WGI AR6, chap. 3 + # Sect. 3.3 + # Fig. 3.19 + # ********************************************************************** + # Longterm mean and trend of the zonal wind + # ********************************************************************** + + fig_3_19: + description: Longterm mean and trend zonal wind + themes: + - atmDyn + realms: + - atmos + variables: + ua: + preprocessor: zonal + reference_dataset: ERA5 + project: CMIP6 + mip: Amon + exp: historical + grid: gr + ensemble: r1i1p1f1 + start_year: 1985 + end_year: 2014 + additional_datasets: + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + scripts: + clim: + script: ipcc_ar6/zonal_westerly_winds.ncl + e13fig12_exp_MMM: "historical" + e13fig12_season: "DJF" + e13fig12_multimean: True diff --git a/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_42_a.yml b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_42_a.yml new file mode 100644 index 0000000000..55c53147ec --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_42_a.yml @@ -0,0 +1,886 @@ +# ESMValTool +# recipe_ipccwg1ar6ch3_fig_3_42_a.yml +--- +documentation: + + title: IPCC AR6 WG1 Chapter 3 Fig. 3.42a + + description: | + Producing Fig. 3.42a in the IPCC Working Group I + Contribution to the Sixth Assessment Report: Chapter 3 + Processing of CMIP3 models currently works only in serial mode, due to + an issue in the input data still under investigation. To run the recipe + set the configuration option ``max_parallel_tasks: 1``. + + authors: + - bock_lisa + - eyring_veronika + + maintainer: + - bock_lisa + + references: + - eyring21ipcc + + projects: + - ipcc_ar6 + + +preprocessors: + + pp850_1x1: + extract_levels: + levels: 85000 + scheme: linear + regrid: + target_grid: 1x1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + pp500_1x1: + extract_levels: + levels: 50000 + scheme: linear + regrid: + target_grid: 1x1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + pp400_1x1: + extract_levels: + levels: 40000 + scheme: linear + regrid: + target_grid: 1x1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + pp200_1x1: + extract_levels: + levels: 19900 + scheme: linear + regrid: + target_grid: 1x1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + ppNOLEV1: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + ppNOLEV1x1: + regrid: + target_grid: 1x1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + +datasets: + # CMIP3 + - &cmip3 {dataset: cccma_cgcm3_1, institute: CCCMA, project: CMIP3, mip: A1, + modeling_realm: atm, exp: 20c3m, frequency: mo, ensemble: run1} + - {<<: *cmip3, dataset: cccma_cgcm3_1_t63, institute: CCCMA} + - {<<: *cmip3, dataset: csiro_mk3_0, institute: CSIRO} + - {<<: *cmip3, dataset: gfdl_cm2_0, institute: GFDL} + - {<<: *cmip3, dataset: gfdl_cm2_1, institute: GFDL} + - {<<: *cmip3, dataset: giss_model_e_h, institute: NASA} + - {<<: *cmip3, dataset: giss_model_e_r, institute: NASA} + - {<<: *cmip3, dataset: iap_fgoals1_0_g, institute: LASG} + - {<<: *cmip3, dataset: inmcm3_0, institute: INM} + - {<<: *cmip3, dataset: ipsl_cm4, institute: IPSL} + - {<<: *cmip3, dataset: miroc3_2_hires, institute: NIES} + - {<<: *cmip3, dataset: miroc3_2_medres, institute: NIES} + - {<<: *cmip3, dataset: mpi_echam5, institute: MPIM} + - {<<: *cmip3, dataset: mri_cgcm2_3_2a, institute: MRI} + - {<<: *cmip3, dataset: ncar_ccsm3_0, institute: NCAR} + - {<<: *cmip3, dataset: ncar_pcm1, institute: NCAR} + - {<<: *cmip3, dataset: ukmo_hadcm3, institute: UKMO} + # CMIP5 + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CM} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + # CMIP6 + - &cmip6 {dataset: ACCESS-CM2, grid: gn, + ensemble: r1i1p1f1, project: CMIP6} + - {<<: *cmip6, dataset: ACCESS-ESM1-5, institute: CSIRO} + - {<<: *cmip6, dataset: AWI-CM-1-1-MR} + - {<<: *cmip6, dataset: AWI-ESM-1-1-LR} + - {<<: *cmip6, dataset: BCC-CSM2-MR} + - {<<: *cmip6, dataset: CAMS-CSM1-0} + - {<<: *cmip6, dataset: CanESM5} + - {<<: *cmip6, dataset: CanESM5-CanOE, ensemble: r1i1p2f1} + - {<<: *cmip6, dataset: CESM2} + - {<<: *cmip6, dataset: CESM2-FV2, institute: NCAR} + - {<<: *cmip6, dataset: CESM2-WACCM, institute: NCAR} + - {<<: *cmip6, dataset: CESM2-WACCM-FV2, institute: NCAR} + - {<<: *cmip6, dataset: CIESM, grid: gr} + - {<<: *cmip6, dataset: CMCC-CM2-HR4} + - {<<: *cmip6, dataset: CMCC-CM2-SR5} + - {<<: *cmip6, dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {<<: *cmip6, dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {<<: *cmip6, dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-Veg-LR, grid: gr} + - {<<: *cmip6, dataset: FGOALS-f3-L, grid: gr} + - {<<: *cmip6, dataset: FGOALS-g3} + - {<<: *cmip6, dataset: GFDL-CM4, grid: gr1} + - {<<: *cmip6, dataset: GISS-E2-1-G} + - {<<: *cmip6, dataset: GISS-E2-1-H} + - {<<: *cmip6, dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3} + - {<<: *cmip6, dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3} + - {<<: *cmip6, dataset: IITM-ESM} + - {<<: *cmip6, dataset: INM-CM4-8, grid: gr1} + - {<<: *cmip6, dataset: INM-CM5-0, grid: gr1} + - {<<: *cmip6, dataset: IPSL-CM6A-LR, grid: gr} + - {<<: *cmip6, dataset: MIROC6} + - {<<: *cmip6, dataset: MIROC-ES2L, ensemble: r1i1p1f2} + - {<<: *cmip6, dataset: MPI-ESM-1-2-HAM} + - {<<: *cmip6, dataset: MPI-ESM1-2-LR} + - {<<: *cmip6, dataset: MPI-ESM1-2-HR} + - {<<: *cmip6, dataset: MRI-ESM2-0} + - {<<: *cmip6, dataset: NESM3} + - {<<: *cmip6, dataset: NorCPM1, institute: NCC, ensemble: r10i1p1f1} + - {<<: *cmip6, dataset: NorESM2-LM, institute: NCC} + - {<<: *cmip6, dataset: NorESM2-MM, institute: NCC} + - {<<: *cmip6, dataset: SAM0-UNICON} + - {<<: *cmip6, dataset: TaiESM1} + - {<<: *cmip6, dataset: UKESM1-0-LL, ensemble: r1i1p1f2} + + +diagnostics: + + # ********************************************************************** + # IPCC WGI AR6, chap. 3 + # Sect. 3.8 + # Fig. 3.42a + # ********************************************************************** + # Perfmetrics of atmosphere vars for CMIP3, CMIP5, CMIP6 + # ********************************************************************** + + tas: &perf_diag + description: Calculate space-time root mean square error + variables: + tas: &perf_var + preprocessor: ppNOLEV1x1 + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + reference_dataset: ERA5 + alternative_dataset: NCEP-NCAR-R1 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-Veg, grid: gr} + - {<<: *cmip6, dataset: GFDL-ESM4, grid: gr1} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KACE-1-0-G, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + scripts: + grading: &grading_settings + script: perfmetrics/main.ncl + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: cycle_latlon + # Time average ('opt' argument of time_operations.ncl) + time_avg: monthlyclim + # Region ('global', 'trop', 'nhext', 'shext') + region: global + # Plot standard deviation ('all', 'none', 'ref_model' or dataset name) + plot_stddev: ref_model + # Plot legend in a separate file + legend_outside: true + # Plot style + styleset: CMIP5 + # Calculate grading + calc_grading: true + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + ts: + <<: *perf_diag + variables: + ts: + <<: *perf_var + reference_dataset: ESACCI-SST + alternative_dataset: HadISST + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-Veg, grid: gr} + - {<<: *cmip6, dataset: GFDL-ESM4, grid: gr1} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KACE-1-0-G, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ESACCI-SST, project: OBS, type: sat, + version: L4-GHRSST-SSTdepth-OSTIA-GLOB, tier: 2, start_year: 1992} + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + + + pr: + <<: *perf_diag + variables: + pr: + <<: *perf_var + preprocessor: ppNOLEV1 + reference_dataset: GPCP-SG + alternative_dataset: GHCN + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-Veg, grid: gr} + - {<<: *cmip6, dataset: GFDL-ESM4, grid: gr1} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KACE-1-0-G, grid: gr} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.3, tier: 1} + - {dataset: GHCN, project: OBS, type: ground, version: 1, tier: 2} + + + zg500: + <<: *perf_diag + variables: + zg: + <<: *perf_var + preprocessor: pp500_1x1 + reference_dataset: ERA5 + alternative_dataset: NCEP-NCAR-R1 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: GFDL-ESM4, grid: gr1} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: KACE-1-0-G, grid: gr} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + + + psl: + <<: *perf_diag + variables: + psl: + <<: *perf_var + preprocessor: ppNOLEV1 + reference_dataset: JRA-55 + alternative_dataset: ERA5 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-Veg, grid: gr} + - {<<: *cmip6, dataset: GFDL-ESM4, grid: gr1} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KACE-1-0-G, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: JRA-55, project: ana4mips, type: reanalysis, tier: 1} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + + + lwcre: + <<: *perf_diag + variables: + lwcre: + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF + derive: true + force_derivation: false + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: FGOALS-s2} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: GFDL-ESM4, grid: gr1} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KACE-1-0-G, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-8, + tier: 1, start_year: 2001, end_year: 2015} + + + swcre: + <<: *perf_diag + variables: + swcre: + reference_dataset: CERES-EBAF + derive: true + force_derivation: false + preprocessor: ppNOLEV1 + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: FGOALS-s2} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: GFDL-ESM4, grid: gr1} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KACE-1-0-G, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-8, + tier: 1, start_year: 2001, end_year: 2015} + + + rlut: + <<: *perf_diag + variables: + rlut: + reference_dataset: CERES-EBAF + preprocessor: ppNOLEV1 + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: GFDL-ESM4, grid: gr1} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KACE-1-0-G, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-8, + tier: 1, start_year: 2001, end_year: 2015} + + + hus400: + <<: *perf_diag + variables: + hus: + <<: *perf_var + preprocessor: pp400_1x1 + reference_dataset: AIRS + alternative_dataset: ERA5 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R-CC} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: GFDL-ESM4, grid: gr1} + - {<<: *cmip6, dataset: KACE-1-0-G, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: AIRS, project: obs4MIPs, level: L3, + version: RetStd-v5, tier: 1, start_year: 2003, end_year: 2010} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + + + rsut: + <<: *perf_diag + variables: + rsut: + reference_dataset: CERES-EBAF + preprocessor: ppNOLEV1 + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: GFDL-ESM4, grid: gr1} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KACE-1-0-G, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-8, + tier: 1, start_year: 2001, end_year: 2015} + + + ua200: + <<: *perf_diag + variables: + ua: + <<: *perf_var + preprocessor: pp200_1x1 + reference_dataset: ERA5 + alternative_dataset: NCEP-NCAR-R1 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-Veg, grid: gr} + - {<<: *cmip6, dataset: GFDL-ESM4, grid: gr1} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KACE-1-0-G, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + + + va850: + <<: *perf_diag + variables: + va: + <<: *perf_var + preprocessor: pp850_1x1 + reference_dataset: ERA5 + alternative_dataset: NCEP-NCAR-R1 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: GFDL-ESM4, grid: gr1} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KACE-1-0-G, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + + + ua850: + <<: *perf_diag + variables: + ua: + <<: *perf_var + preprocessor: pp850_1x1 + reference_dataset: ERA5 + alternative_dataset: NCEP-NCAR-R1 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-Veg, grid: gr} + - {<<: *cmip6, dataset: GFDL-ESM4, grid: gr1} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KACE-1-0-G, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + + + va200: + <<: *perf_diag + variables: + va: + <<: *perf_var + preprocessor: pp200_1x1 + reference_dataset: ERA5 + alternative_dataset: NCEP-NCAR-R1 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: GFDL-ESM4, grid: gr1} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KACE-1-0-G, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + + + ta850: + <<: *perf_diag + variables: + ta: + <<: *perf_var + preprocessor: pp850_1x1 + reference_dataset: ERA5 + alternative_dataset: NCEP-NCAR-R1 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + + + ta200: + <<: *perf_diag + variables: + ta: + <<: *perf_var + preprocessor: pp200_1x1 + reference_dataset: ERA5 + alternative_dataset: NCEP-NCAR-R1 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} + + + ### COLLECT METRICS ######################################################### + collect: + description: Wrapper to collect and plot previously calculated metrics + scripts: + RMSD: + script: perfmetrics/collect.ncl + ancestors: ['*/grading*'] + metric: RMSD + label_bounds: [-0.5, 0.5] + label_scale: 0.1 + disp_values: false + cm_interval: [2, 241] + no_labelbar: true + project_order: ['CMIP3', 'CMIP5', 'CMIP6'] + caption: "Relative model performance" + # Sort diagnostics in a specific order (name = 'diagnositic'-'region') + diag_order: ['hus400-global', 'zg500-global', + 'va200-global', 'va850-global', + 'ua200-global', 'ua850-global', + 'ta200-global', 'ta850-global', + 'ts-global', 'psl-global', + 'lwcre-global', 'swcre-global', + 'rlut-global', 'rsut-global', + 'pr-global', 'tas-global'] + diag_label: ['Specific Humidity 400 hPa', 'Geopotential Height 500 hPa', + 'Northward Wind 200 hPa', 'Northward Wind 850 hPa', + 'Eastward Wind 200 hPa', 'Eastward Wind 850 hPa', + 'Temperature 200h Pa', 'Temperature 850h Pa', + 'Surface Temperature', 'Sea Level Pressure', + 'TOA Longwave Cloud Radiative Effect', + 'TOA Shortwave Cloud Radiative Effect', + 'TOA Outgoing Longwave Radiation', + 'TOA Outgoing Shortwave Radiation', + 'Precipitation', 'Near-Surface Temperature'] diff --git a/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_42_b.yml b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_42_b.yml new file mode 100644 index 0000000000..27cf66f255 --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_42_b.yml @@ -0,0 +1,1692 @@ +# ESMValTool +# recipe_ipccwg1ar6ch3_fig_3_42_b.yml +--- +documentation: + + title: IPCC AR6 WG1 Chapter 3 Fig. 3.42b + + description: | + Producing Fig. 3.42b in the IPCC Working Group I + Contribution to the Sixth Assessment Report: Chapter 3 + + authors: + - bock_lisa + - gier_bettina + + maintainer: + - bock_lisa + + references: + - eyring21ipcc + + projects: + - ipcc_ar6 + + +preprocessors: + + pp_1x1: + regrid: + target_grid: 1x1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + ppNOLEV1: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + ppNOLEV2: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + +diagnostics: + + # ********************************************************************** + # IPCC WGI AR6, chap. 3 + # Sect. 3.8 + # Fig. 3.42b + # ********************************************************************** + # Perfmetrics of land vars for CMIP3, CMIP5, CMIP6 + # ********************************************************************** + + sm: + description: Soil moisture + themes: + - phys + realms: + - land + variables: + sm: + short_name: sm + preprocessor: ppNOLEV1 + reference_dataset: ESACCI-SOILMOISTURE + mip: Lmon + derive: true + force_derivation: false + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 2005 + additional_datasets: + # CMIP5 + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + # CMIP6 + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: ACCESS-ESM1-5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CanESM5-CanOE, grid: gn, ensemble: r1i1p2f1, project: CMIP6} + - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r7i1p1f2, project: CMIP6} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1-ECA, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3-Veg, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MIROC-ES2L, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + - {dataset: MPI-ESM-1-2-HAM, grid: gn, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: MPI-ESM1-2-HR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MPI-ESM1-2-LR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NorCPM1, grid: gn, ensemble: r1i1p1f1, institute: NCC, + project: CMIP6} + - {dataset: NorESM2-LM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: NorESM2-MM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: UKESM1-0-LL, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + # OBS + - {dataset: ESACCI-SOILMOISTURE, project: OBS, type: sat, + version: L3S-SSMV-COMBINED-v4.2, tier: 2, start_year: 1980} + scripts: + grading: &grading_settings + script: perfmetrics/main.ncl + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: cycle_latlon + # Time average ('opt' argument of time_operations.ncl) + time_avg: monthlyclim + # Region ('global', 'trop', 'nhext', 'shext') + region: global + # Plot standard deviation ('all', 'none', 'ref_model' or dataset name) + calc_grading: true + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + nbp: + description: | + surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon + _due_to_all_land_processes + themes: + - phys + realms: + - land + variables: + nbp: + short_name: nbp + preprocessor: ppNOLEV1 + reference_dataset: JMA-TRANSCOM + mip: Lmon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + additional_datasets: + # CMIP5 + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM, ensemble: r2i1p1} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + # CMIP6 + - {dataset: ACCESS-ESM1-5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CanESM5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CanESM5-CanOE, grid: gn, ensemble: r1i1p2f1, project: CMIP6} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CESM2-FV2, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CESM2-WACCM, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CESM2-WACCM-FV2, grid: gn, ensemble: r1i1p1f1, + institute: NCAR, project: CMIP6} + - {dataset: CMCC-ESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r7i1p1f2, project: CMIP6} + - {dataset: E3SM-1-1-ECA, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3-CC, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: EC-Earth3-Veg, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3-Veg-LR, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: GFDL-ESM4, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: GISS-E2-1-G, ensemble: r9i1p5f1, grid: gn, project: CMIP6} + - {dataset: GISS-E2-1-H, ensemble: r4i1p5f1, grid: gn, project: CMIP6} + - {dataset: INM-CM4-8, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: INM-CM5-0, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MIROC-ES2L, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + - {dataset: MPI-ESM-1-2-HAM, grid: gn, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: MPI-ESM1-2-LR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r1i2p1f1, project: CMIP6} + - {dataset: NorCPM1, grid: gn, ensemble: r1i1p1f1, institute: NCC, + project: CMIP6} + - {dataset: NorESM2-LM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: NorESM2-MM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: UKESM1-0-LL, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + # OBS + - {dataset: JMA-TRANSCOM, project: OBS, type: reanaly, + version: 2018, tier: 3, start_year: 1986} + scripts: + grading: + <<: *grading_settings + + + gpp: + description: Carbon Mass Flux out of Atmosphere due to Gross Primary + themes: + - phys + realms: + - land + variables: + gpp: + short_name: gpp + preprocessor: ppNOLEV2 + reference_dataset: MTE + alternative_dataset: FLUXCOM + mip: Lmon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1982 + end_year: 2005 + additional_datasets: + # CMIP5 + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM, ensemble: r2i1p1} + - {dataset: CMCC-CESM} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + # CMIP6 + - {dataset: ACCESS-ESM1-5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CanESM5-CanOE, grid: gn, ensemble: r1i1p2f1, project: CMIP6} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CESM2-WACCM, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-ESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r7i1p1f2, project: CMIP6} + - {dataset: E3SM-1-1-ECA, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3-CC, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: EC-Earth3-Veg, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3-Veg-LR, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: GISS-E2-1-G-CC, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: INM-CM4-8, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: INM-CM5-0, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: KIOST-ESM, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: MIROC-ES2L, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + - {dataset: MPI-ESM-1-2-HAM, grid: gn, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MPI-ESM1-2-LR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r1i2p1f1, project: CMIP6} + - {dataset: NorCPM1, grid: gn, ensemble: r1i1p1f1, institute: NCC, + project: CMIP6} + - {dataset: NorESM2-LM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: NorESM2-MM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: UKESM1-0-LL, grid: gn, ensemble: r2i1p1f2, project: CMIP6} + # OBS + - {dataset: MTE, project: OBS, type: reanaly, + version: May12, tier: 3, start_year: 1982} + - {dataset: FLUXCOM, project: OBS, type: reanaly, + version: ANN-v1, tier: 3} + scripts: + grading: + <<: *grading_settings + + + lai: + description: Leaf Area Index + themes: + - phys + realms: + - land + variables: + lai: + short_name: lai + preprocessor: ppNOLEV1 + reference_dataset: LAI3g + mip: Lmon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1981 + end_year: 2005 + additional_datasets: + # CMIP5 + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM, ensemble: r2i1p1} + - {dataset: CMCC-CESM} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + # CMIP6 + - {dataset: ACCESS-ESM1-5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f2, grid: gn, project: CMIP6} + - {dataset: CanESM5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CanESM5-CanOE, grid: gn, ensemble: r1i1p2f1, project: CMIP6} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CESM2-WACCM, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CIESM, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-ESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r7i1p1f2, project: CMIP6} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1-ECA, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3-CC, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: EC-Earth3-Veg, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3-Veg-LR, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: GISS-E2-1-G-CC, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: INM-CM4-8, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: INM-CM5-0, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: KIOST-ESM, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: MIROC-ES2L, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + - {dataset: MPI-ESM-1-2-HAM, grid: gn, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MPI-ESM1-2-LR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r1i2p1f1, project: CMIP6} + - {dataset: NorESM2-LM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: NorESM2-MM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: UKESM1-0-LL, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + # OBS + - {dataset: LAI3g, project: OBS, type: reanaly, + version: 1_regridded, tier: 3, start_year: 1981} + scripts: + grading: + <<: *grading_settings + + + fgco2: + description: | + surface_downward_mass_flux_of_carbon_dioxide_expressed _as_carbon + themes: + - phys + realms: + - ocnBgchem + variables: + fgco2: + short_name: fgco2 + preprocessor: ppNOLEV2 + reference_dataset: Landschuetzer2016 + alternative_dataset: JMA-TRANSCOM + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + additional_datasets: + # CMIP5 + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: CMCC-CESM} + - {dataset: CNRM-CM5} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R-CC} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-ME} + # CMIP6 + - {dataset: ACCESS-ESM1-5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CanESM5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CanESM5-CanOE, grid: gn, ensemble: r1i1p2f1, project: CMIP6} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CESM2-FV2, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CESM2-WACCM, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CESM2-WACCM-FV2, grid: gn, ensemble: r1i1p1f1, + institute: NCAR, project: CMIP6} + - {dataset: CMCC-ESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CNRM-ESM2-1, grid: gn, ensemble: r7i1p1f2, project: CMIP6} + - {dataset: EC-Earth3-CC, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: INM-CM4-8, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: INM-CM5-0, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: IPSL-CM5A2-INCA, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: IPSL-CM6A-LR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MIROC-ES2L, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + - {dataset: MPI-ESM-1-2-HAM, grid: gn, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: MPI-ESM1-2-HR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MPI-ESM1-2-LR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r1i2p1f1, project: CMIP6} + - {dataset: NorCPM1, grid: gn, ensemble: r1i1p1f1, institute: NCC, + project: CMIP6} + - {dataset: NorESM2-LM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: NorESM2-MM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: UKESM1-0-LL, grid: gn, ensemble: r2i1p1f2, project: CMIP6} + # OBS + - {dataset: Landschuetzer2016, project: OBS, type: clim, + version: v2016, tier: 2} + - {dataset: JMA-TRANSCOM, project: OBS, type: reanaly, version: 2018, + tier: 3, start_year: 1986} + scripts: + grading: + <<: *grading_settings + + + et: + description: total_evapotranspiration + themes: + - phys + realms: + - land + variables: + et: + short_name: et + preprocessor: ppNOLEV1 + reference_dataset: LandFlux-EVAL + mip: Lmon + derive: true + force_derivation: false + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1990 + end_year: 2005 + additional_datasets: + # CMIP5 + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + # CMIP6 + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: ACCESS-ESM1-5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CanESM5-CanOE, grid: gn, ensemble: r1i1p2f1, project: CMIP6} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CESM2-FV2, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CESM2-WACCM, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CESM2-WACCM-FV2, grid: gn, ensemble: r1i1p1f1, + institute: NCAR, project: CMIP6} + - {dataset: CIESM, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r7i1p1f2, project: CMIP6} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1-ECA, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3-Veg-LR, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: FIO-ESM-2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: IITM-ESM, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: INM-CM4-8, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: INM-CM5-0, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MCM-UA-1-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MIROC-ES2L, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + - {dataset: MPI-ESM-1-2-HAM, grid: gn, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: MPI-ESM1-2-HR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MPI-ESM1-2-LR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NorCPM1, grid: gn, ensemble: r1i1p1f1, institute: NCC, + project: CMIP6} + - {dataset: NorESM2-LM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: NorESM2-MM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: UKESM1-0-LL, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + # OBS + - {dataset: LandFlux-EVAL, project: OBS, type: reanaly, + version: Oct13, tier: 3, start_year: 1990} + scripts: + grading: + <<: *grading_settings + + + rlus: + description: surface upwelling longwave flux in air + themes: + - phys + realms: + - atmos + variables: + rlus: + short_name: rlus + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2001 + end_year: 2005 + additional_datasets: + # CMIP5 + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + # CMIP6 + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: ACCESS-ESM1-5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CanESM5-CanOE, grid: gn, ensemble: r1i1p2f1, project: CMIP6} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CESM2-FV2, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CESM2-WACCM, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CESM2-WACCM-FV2, grid: gn, ensemble: r1i1p1f1, + institute: NCAR, project: CMIP6} + - {dataset: CIESM, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-ESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r7i1p1f2, project: CMIP6} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1-ECA, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3-AerChem, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: EC-Earth3-CC, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: EC-Earth3-Veg-LR, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: FIO-ESM-2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: GISS-E2-1-G-CC, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: IITM-ESM, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: INM-CM4-8, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: INM-CM5-0, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: IPSL-CM5A2-INCA, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: KIOST-ESM, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MIROC-ES2L, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + - {dataset: MPI-ESM-1-2-HAM, grid: gn, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: MPI-ESM1-2-HR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MPI-ESM1-2-LR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NorCPM1, grid: gn, ensemble: r1i1p1f1, institute: NCC, + project: CMIP6} + - {dataset: NorESM2-LM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: NorESM2-MM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: UKESM1-0-LL, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + # OBS + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, + version: Ed2-8, tier: 1, start_year: 2001, end_year: 2005} + scripts: + grading: + <<: *grading_settings + + + rlds: + description: surface_downwelling_longwave_flux_in_air + themes: + - phys + realms: + - atmos + variables: + rlds: + short_name: rlds + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2001 + end_year: 2005 + additional_datasets: + # CMIP5 + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + # CMIP6 + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: ACCESS-ESM1-5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CanESM5-CanOE, grid: gn, ensemble: r1i1p2f1, project: CMIP6} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CESM2-FV2, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CESM2-WACCM, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CESM2-WACCM-FV2, grid: gn, ensemble: r1i1p1f1, + institute: NCAR, project: CMIP6} + - {dataset: CIESM, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-ESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r7i1p1f2, project: CMIP6} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1-ECA, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3-AerChem, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: EC-Earth3-CC, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: EC-Earth3-Veg-LR, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: FIO-ESM-2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: GISS-E2-1-G-CC, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: IITM-ESM, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: INM-CM4-8, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: INM-CM5-0, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: IPSL-CM5A2-INCA, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: KIOST-ESM, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MIROC-ES2L, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + - {dataset: MPI-ESM-1-2-HAM, grid: gn, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: MPI-ESM1-2-HR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MPI-ESM1-2-LR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NorCPM1, grid: gn, ensemble: r1i1p1f1, institute: NCC, + project: CMIP6} + - {dataset: NorESM2-LM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: NorESM2-MM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: UKESM1-0-LL, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + # OBS + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, + version: Ed2-8, tier: 1, start_year: 2001, end_year: 2005} + scripts: + grading: + <<: *grading_settings + + + rsus: + description: surface_upwelling_shortwave_flux_in_air + themes: + - phys + realms: + - atmos + variables: + rsus: + short_name: rsus + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2001 + end_year: 2005 + additional_datasets: + # CMIP5 + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + # CMIP6 + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: ACCESS-ESM1-5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CanESM5-CanOE, grid: gn, ensemble: r1i1p2f1, project: CMIP6} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CESM2-FV2, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CESM2-WACCM, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CESM2-WACCM-FV2, grid: gn, ensemble: r1i1p1f1, + institute: NCAR, project: CMIP6} + - {dataset: CIESM, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-ESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r7i1p1f2, project: CMIP6} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1-ECA, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3-AerChem, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: EC-Earth3-CC, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: EC-Earth3-Veg-LR, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: FIO-ESM-2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: GISS-E2-1-G-CC, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: IITM-ESM, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: INM-CM4-8, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: INM-CM5-0, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: IPSL-CM5A2-INCA, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: KIOST-ESM, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MIROC-ES2L, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + - {dataset: MPI-ESM-1-2-HAM, grid: gn, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: MPI-ESM1-2-HR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MPI-ESM1-2-LR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NorCPM1, grid: gn, ensemble: r1i1p1f1, institute: NCC, + project: CMIP6} + - {dataset: NorESM2-LM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: NorESM2-MM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: UKESM1-0-LL, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + # OBS + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, + version: Ed2-8, tier: 1, start_year: 2001, end_year: 2005} + scripts: + grading: + <<: *grading_settings + + + rsds: + description: surface_downwelling_shortwave_flux_in_air + themes: + - phys + realms: + - atmos + variables: + rsds: + short_name: rsds + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2001 + end_year: 2005 + additional_datasets: + # CMIP5 + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + # CMIP6 + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: ACCESS-ESM1-5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CanESM5-CanOE, grid: gn, ensemble: r1i1p2f1, project: CMIP6} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CESM2-FV2, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CESM2-WACCM, grid: gn, ensemble: r1i1p1f1, institute: NCAR, + project: CMIP6} + - {dataset: CESM2-WACCM-FV2, grid: gn, ensemble: r1i1p1f1, + institute: NCAR, project: CMIP6} + - {dataset: CIESM, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-ESM2, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr, project: CMIP6} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r7i1p1f2, project: CMIP6} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1-ECA, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3-AerChem, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: EC-Earth3-CC, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: EC-Earth3-Veg-LR, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: FIO-ESM-2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: GISS-E2-1-G-CC, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: IITM-ESM, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: INM-CM4-8, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: INM-CM5-0, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: IPSL-CM5A2-INCA, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: KIOST-ESM, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MIROC-ES2L, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + - {dataset: MPI-ESM-1-2-HAM, grid: gn, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: MPI-ESM1-2-HR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MPI-ESM1-2-LR, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NorCPM1, grid: gn, ensemble: r1i1p1f1, institute: NCC, + project: CMIP6} + - {dataset: NorESM2-LM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: NorESM2-MM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: UKESM1-0-LL, grid: gn, ensemble: r1i1p1f2, project: CMIP6} + # OBS + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, + version: Ed2-8, tier: 1, start_year: 2001, end_year: 2005} + scripts: + grading: + <<: *grading_settings + + + hfds: + description: surface_downward_heat_flux_in_sea_water + themes: + - phys + realms: + - ocean + variables: + hfds: + short_name: hfds + preprocessor: ppNOLEV1 + reference_dataset: ERA-Interim + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + grid: gr + additional_datasets: + # CMIP5 + - {dataset: CMCC-CM} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r1i1p1} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-ESM2G} + - {dataset: inmcm4} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + # CMIP6 + - {dataset: ACCESS-CM2, institute: CSIRO-ARCCSS, grid: gn, + ensemble: r1i1p1f1, project: CMIP6} + - {dataset: ACCESS-ESM1-5, institute: CSIRO, grid: gn, + ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: CESM2-WACCM, institute: NCAR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: CESM2-WACCM-FV2, institute: NCAR, ensemble: r1i1p1f1, + grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-ESM2, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gn, project: CMIP6} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gn, project: CMIP6} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gn, project: CMIP6} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1, institute: E3SM-Project, ensemble: r1i1p1f1, + grid: gr, project: CMIP6} + - {dataset: E3SM-1-1-ECA, institute: E3SM-Project, ensemble: r1i1p1f1, + grid: gr, project: CMIP6} + - {dataset: EC-Earth3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: EC-Earth3-AerChem, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: EC-Earth3-CC, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: EC-Earth3-Veg, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: EC-Earth3-Veg-LR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: FIO-ESM-2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NorCPM1, grid: gn, institute: NCC, ensemble: r10i1p1f1, + project: CMIP6} + - {dataset: NorESM2-LM, grid: gn, institute: NCC, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: NorESM2-MM, grid: gn, institute: NCC, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: TaiESM1, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn, project: CMIP6} + # OBS + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + scripts: + grading: + <<: *grading_settings + + + siconc: + description: Sea ice concentration + themes: + - phys + realms: + - ocean + variables: + sic: + short_name: sic + preprocessor: pp_1x1 + reference_dataset: HadISST + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + # CMIP5 + - &cmip5 {dataset: ACCESS1-0, mip: OImon, project: CMIP5} + - {<<: *cmip5, dataset: ACCESS1-3} + - {<<: *cmip5, dataset: BNU-ESM} + - {<<: *cmip5, dataset: CanCM4} + - {<<: *cmip5, dataset: CanESM2} + - {<<: *cmip5, dataset: CCSM4} + - {<<: *cmip5, dataset: CESM1-BGC} + - {<<: *cmip5, dataset: CESM1-CAM5} + - {<<: *cmip5, dataset: CESM1-CAM5-1-FV2} + - {<<: *cmip5, dataset: CESM1-FASTCHEM} + - {<<: *cmip5, dataset: CESM1-WACCM} + - {<<: *cmip5, dataset: CMCC-CM} + - {<<: *cmip5, dataset: CMCC-CMS} + - {<<: *cmip5, dataset: CNRM-CM5} + - {<<: *cmip5, dataset: CNRM-CM5-2} + - {<<: *cmip5, dataset: CSIRO-Mk3-6-0} + - {<<: *cmip5, dataset: EC-EARTH, ensemble: r6i1p1} + - {<<: *cmip5, dataset: FGOALS-g2} + - {<<: *cmip5, dataset: FIO-ESM} + - {<<: *cmip5, dataset: GFDL-CM2p1} + - {<<: *cmip5, dataset: GFDL-CM3} + - {<<: *cmip5, dataset: GFDL-ESM2G} + - {<<: *cmip5, dataset: GFDL-ESM2M} + - {<<: *cmip5, dataset: GISS-E2-H, ensemble: r1i1p2} + - {<<: *cmip5, dataset: GISS-E2-H-CC} + - {<<: *cmip5, dataset: GISS-E2-R, ensemble: r1i1p2} + - {<<: *cmip5, dataset: GISS-E2-R-CC} + - {<<: *cmip5, dataset: HadCM3} + - {<<: *cmip5, dataset: HadGEM2-AO} + - {<<: *cmip5, dataset: HadGEM2-CC} + - {<<: *cmip5, dataset: HadGEM2-ES} + - {<<: *cmip5, dataset: inmcm4} + - {<<: *cmip5, dataset: IPSL-CM5A-LR} + - {<<: *cmip5, dataset: IPSL-CM5A-MR} + - {<<: *cmip5, dataset: IPSL-CM5B-LR} + - {<<: *cmip5, dataset: MIROC4h} + - {<<: *cmip5, dataset: MIROC5} + - {<<: *cmip5, dataset: MIROC-ESM} + - {<<: *cmip5, dataset: MIROC-ESM-CHEM} + - {<<: *cmip5, dataset: MPI-ESM-LR} + - {<<: *cmip5, dataset: MPI-ESM-MR} + - {<<: *cmip5, dataset: MPI-ESM-P} + - {<<: *cmip5, dataset: MRI-CGCM3} + - {<<: *cmip5, dataset: MRI-ESM1} + - {<<: *cmip5, dataset: NorESM1-M} + - {<<: *cmip5, dataset: NorESM1-ME} + - {<<: *cmip5, dataset: HadISST, project: OBS, type: reanaly, + version: 1, tier: 2} + siconc: + short_name: siconc + preprocessor: pp_1x1 + reference_dataset: HadISST + mip: SImon + exp: historical + ensemble: r1i1p1 + grid: gr + start_year: 1980 + end_year: 1999 + additional_datasets: + # CMIP6 + - {dataset: ACCESS-CM2, institute: CSIRO-ARCCSS, grid: gn, + ensemble: r1i1p1f1, project: CMIP6} + - {dataset: ACCESS-ESM1-5, institute: CSIRO, grid: gn, + ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, + project: CMIP6} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: CESM2-WACCM, institute: NCAR, ensemble: r1i1p1f1, + grid: gn, project: CMIP6} + - {dataset: CESM2-WACCM-FV2, institute: NCAR, ensemble: r1i1p1f1, + grid: gn, project: CMIP6} + - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: CMCC-ESM2, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gn, project: CMIP6} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gn, + project: CMIP6} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gn, + project: CMIP6} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1, institute: E3SM-Project, ensemble: r1i1p1f1, + grid: gr, project: CMIP6} + - {dataset: E3SM-1-1-ECA, institute: E3SM-Project, + ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: EC-Earth3-AerChem, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: EC-Earth3-CC, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: FIO-ESM-2-0, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gr, + project: CMIP6} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: INM-CM4-8, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: INM-CM5-0, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: IPSL-CM5A2-INCA, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn, project: CMIP6} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NorCPM1, grid: gn, institute: NCC, ensemble: r10i1p1f1, + project: CMIP6} + - {dataset: NorESM2-MM, grid: gn, institute: NCC, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: TaiESM1, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn, + project: CMIP6} + scripts: + grading_shpolar: + <<: *grading_settings + region: shpolar + grading_nhpolar: + <<: *grading_settings + region: nhpolar + + + tos: + description: Sea Surface Temperature + themes: + - phys + realms: + - ocean + variables: + tos: + short_name: tos + preprocessor: ppNOLEV2 + reference_dataset: HadISST + alternative_dataset: ATSR + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + grid: gr + start_year: 1980 + end_year: 1999 + additional_datasets: + # CMIP5 + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + # CMIP6 + - {dataset: ACCESS-CM2, institute: CSIRO-ARCCSS, grid: gn, + ensemble: r1i1p1f1, project: CMIP6} + - {dataset: ACCESS-ESM1-5, institute: CSIRO, grid: gn, + ensemble: r1i1p1f1, project: CMIP6} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CESM2-FV2, institute: NCAR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: CESM2-WACCM, institute: NCAR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: CESM2-WACCM-FV2, institute: NCAR, ensemble: r1i1p1f1, + grid: gn, project: CMIP6} + - {dataset: CIESM, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CMCC-ESM2, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gn, project: CMIP6} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gn, project: CMIP6} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gn, project: CMIP6} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: E3SM-1-1, institute: E3SM-Project, ensemble: r1i1p1f1, + grid: gr, project: CMIP6} + - {dataset: E3SM-1-1-ECA, institute: E3SM-Project, ensemble: r1i1p1f1, + grid: gr, project: CMIP6} + - {dataset: EC-Earth3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: EC-Earth3-AerChem, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: EC-Earth3-CC, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: EC-Earth3-Veg, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: EC-Earth3-Veg-LR, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: FIO-ESM-2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr, project: CMIP6} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GISS-E2-1-G-CC, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn, + project: CMIP6} + - {dataset: INM-CM4-8, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: INM-CM5-0, grid: gr1, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: IPSL-CM5A2-INCA, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: KIOST-ESM, ensemble: r1i1p1f1, grid: gr1, project: CMIP6} + - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn, project: CMIP6} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn, + project: CMIP6} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: NorCPM1, grid: gn, institute: NCC, ensemble: r10i1p1f1, + project: CMIP6} + - {dataset: NorESM2-LM, grid: gn, institute: NCC, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: NorESM2-MM, grid: gn, institute: NCC, ensemble: r1i1p1f1, + project: CMIP6} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, project: CMIP6} + - {dataset: TaiESM1, grid: gn, ensemble: r1i1p1f1, project: CMIP6} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn, project: CMIP6} + # OBS + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ATSR, project: obs4MIPs, level: L3, + version: ARC-v1.1.1, tier: 1, start_year: 1997, end_year: 2011} + scripts: + grading: + <<: *grading_settings + + + ### COLLECT METRICS ######################################################### + collect: + description: Wrapper to collect and plot previously calculated metrics + scripts: + RMSD: + script: perfmetrics/collect.ncl + ancestors: ['*/grading*'] + metric: RMSD + label_bounds: [-0.5, 0.5] + label_scale: 0.1 + no_labelbar: true + disp_values: false + cm_interval: [2, 241] + project_order: ['CMIP5', 'CMIP6'] + diag_order: ['hfds-global', 'siconc-shpolar', 'siconc-nhpolar', + 'tos-global', + 'sm-global', 'nbp-global', 'lai-global', 'gpp-global', + 'fgco2-global', 'et-global', + 'rsds-global', 'rsus-global', 'rlds-global', + 'rlus-global'] + diag_label: ['Downward Heat Flux at Sea Water Surface', + 'Sea Ice Concentration - SH', + 'Sea Ice Concentration - NH', 'Sea Surface Temperature', + 'Soil Moisture', + 'Surface Net Downward Mass Flux of Carbon', + 'Leaf Area Index', 'Gross Primary Productivity', + 'Surface Downward Mass Flux of Carbon', + 'Evapotransporation', + 'Surface Downwelling Shortwave Radiation', + 'Surface Upwelling Shortwave Radiation', + 'Surface Downwelling Longwave Radiation', + 'Surface Upwelling Longwave Radiation'] diff --git a/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_43.yml b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_43.yml new file mode 100644 index 0000000000..fd7a60efe8 --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_43.yml @@ -0,0 +1,783 @@ +# ESMValTool +# recipe_ipccwg1ar6ch3_fig3_43.yml +--- +documentation: + + title: IPCC AR6 WG1 Chapter 3 Fig. 3.43 + + description: | + Producing Fig. 3.43 in the IPCC Working Group I + Contribution to the Sixth Assessment Report: Chapter 3 + + authors: + - bock_lisa + - eyring_veronika + + maintainer: + - bock_lisa + + references: + - eyring21ipcc + + projects: + - ipcc_ar6 + + +preprocessors: + + regrid_4_5: + regrid: + target_grid: 4x5 + scheme: linear + + regrid_4_5_pp200: + extract_levels: + levels: 20000 + scheme: linear + regrid: + target_grid: 4x5 + scheme: linear + + regrid_4_5_pp400: + extract_levels: + levels: 40000 + scheme: linear + regrid: + target_grid: 4x5 + scheme: linear + + regrid_4_5_pp500: + extract_levels: + levels: 50000 + scheme: linear + regrid: + target_grid: 4x5 + scheme: linear + + regrid_4_5_pp850: + extract_levels: + levels: 85000 + scheme: linear + regrid: + target_grid: 4x5 + scheme: linear + +datasets: + # CMIP3 + - &cmip3 {dataset: cccma_cgcm3_1, institute: CCCMA, project: CMIP3, + mip: A1, modeling_realm: atm, exp: 20c3m, frequency: mo, + ensemble: run1} + - {<<: *cmip3, dataset: cccma_cgcm3_1_t63, institute: CCCMA} + - {<<: *cmip3, dataset: csiro_mk3_0, institute: CSIRO} + - {<<: *cmip3, dataset: gfdl_cm2_0, institute: GFDL} + - {<<: *cmip3, dataset: gfdl_cm2_1, institute: GFDL} + - {<<: *cmip3, dataset: giss_model_e_h, institute: NASA} + - {<<: *cmip3, dataset: giss_model_e_r, institute: NASA} + - {<<: *cmip3, dataset: iap_fgoals1_0_g, institute: LASG} + - {<<: *cmip3, dataset: inmcm3_0, institute: INM} + - {<<: *cmip3, dataset: ipsl_cm4, institute: IPSL} + - {<<: *cmip3, dataset: miroc3_2_hires, institute: NIES} + - {<<: *cmip3, dataset: miroc3_2_medres, institute: NIES} + - {<<: *cmip3, dataset: mpi_echam5, institute: MPIM} + - {<<: *cmip3, dataset: mri_cgcm2_3_2a, institute: MRI} + - {<<: *cmip3, dataset: ncar_ccsm3_0, institute: NCAR} + - {<<: *cmip3, dataset: ncar_pcm1, institute: NCAR} + - {<<: *cmip3, dataset: ukmo_hadcm3, institute: UKMO} + # CMIP5 + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CM} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + # CMIP6 + - &cmip6 {dataset: ACCESS-CM2, grid: gn, ensemble: r1i1p1f1, + project: CMIP6} + - {<<: *cmip6, dataset: ACCESS-ESM1-5, institute: CSIRO} + - {<<: *cmip6, dataset: AWI-CM-1-1-MR} + - {<<: *cmip6, dataset: AWI-ESM-1-1-LR} + - {<<: *cmip6, dataset: BCC-CSM2-MR} + - {<<: *cmip6, dataset: CAMS-CSM1-0} + - {<<: *cmip6, dataset: CanESM5} + - {<<: *cmip6, dataset: CanESM5-CanOE, ensemble: r1i1p2f1} + - {<<: *cmip6, dataset: CESM2} + - {<<: *cmip6, dataset: CESM2-FV2, institute: NCAR} + - {<<: *cmip6, dataset: CESM2-WACCM, institute: NCAR} + - {<<: *cmip6, dataset: CESM2-WACCM-FV2, institute: NCAR} + - {<<: *cmip6, dataset: CIESM, grid: gr} + - {<<: *cmip6, dataset: CMCC-CM2-HR4} + - {<<: *cmip6, dataset: CMCC-CM2-SR5} + - {<<: *cmip6, dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {<<: *cmip6, dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-Veg-LR, grid: gr} + - {<<: *cmip6, dataset: FGOALS-f3-L, grid: gr} + - {<<: *cmip6, dataset: FGOALS-g3} + - {<<: *cmip6, dataset: GFDL-CM4, grid: gr1} + - {<<: *cmip6, dataset: GFDL-ESM4, grid: gr1} + - {<<: *cmip6, dataset: GISS-E2-1-G} + - {<<: *cmip6, dataset: GISS-E2-1-H} + - {<<: *cmip6, dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3} + - {<<: *cmip6, dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3} + - {<<: *cmip6, dataset: IITM-ESM} + - {<<: *cmip6, dataset: INM-CM4-8, grid: gr1} + - {<<: *cmip6, dataset: INM-CM5-0, grid: gr1} + - {<<: *cmip6, dataset: IPSL-CM6A-LR, grid: gr} + - {<<: *cmip6, dataset: KACE-1-0-G, grid: gr} + - {<<: *cmip6, dataset: MIROC6} + - {<<: *cmip6, dataset: MIROC-ES2L, ensemble: r1i1p1f2} + - {<<: *cmip6, dataset: MPI-ESM-1-2-HAM} + - {<<: *cmip6, dataset: MPI-ESM1-2-LR} + - {<<: *cmip6, dataset: MPI-ESM1-2-HR} + - {<<: *cmip6, dataset: MRI-ESM2-0} + - {<<: *cmip6, dataset: NESM3} + - {<<: *cmip6, dataset: NorCPM1, institute: NCC, ensemble: r10i1p1f1} + - {<<: *cmip6, dataset: NorESM2-LM, institute: NCC} + - {<<: *cmip6, dataset: NorESM2-MM, institute: NCC} + - {<<: *cmip6, dataset: SAM0-UNICON} + - {<<: *cmip6, dataset: TaiESM1} + - {<<: *cmip6, dataset: UKESM1-0-LL, ensemble: r1i1p1f2} + + +diagnostics: + + # ********************************************************************** + # IPCC WGI AR6, chap. 3 + # Sect. 3.3 + # Fig. 3.43 + # ********************************************************************** + # Centered pattern correlation + # ********************************************************************** + + tas: &corr_diag + description: Calculate pattern correlation value + variables: + tas: &var_settings + preprocessor: regrid_4_5 + reference_dataset: ERA5 + alternative_dataset: NCEP + project: CMIP5 + exp: historical + ensemble: r1i1p1 + mip: Amon + start_year: 1980 + end_year: 1999 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-Veg, grid: gr} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + pattern_cor: &fig_pattern_cor + script: ipcc_ar6/corr_pattern.ncl + + + pr: + <<: *corr_diag + variables: + pr: + <<: *var_settings + reference_dataset: GPCP-SG + alternative_dataset: GHCN + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-Veg, grid: gr} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, tier: 1} + - {dataset: GHCN, project: OBS, type: ground, version: 1, tier: 2} + + + psl: + <<: *corr_diag + variables: + psl: + <<: *var_settings + reference_dataset: JRA-55 + alternative_dataset: ERA5 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-Veg, grid: gr} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: JRA-55, project: ana4mips, type: reanalysis, tier: 1} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + + + rlut: + <<: *corr_diag + variables: + rlut: + <<: *var_settings + reference_dataset: CERES-EBAF + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-8, + tier: 1, start_year: 2001, end_year: 2015} + + + rsut: + <<: *corr_diag + variables: + rsut: + <<: *var_settings + reference_dataset: CERES-EBAF + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-8, + tier: 1, start_year: 2001, end_year: 2015} + + + lwcre: + <<: *corr_diag + variables: + lwcre: + <<: *var_settings + reference_dataset: CERES-EBAF + derive: true + force_derivation: false + additional_datasets: + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: FGOALS-s2} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-8, + tier: 1, start_year: 2001, end_year: 2015} + + + swcre: + <<: *corr_diag + variables: + swcre: + <<: *var_settings + reference_dataset: CERES-EBAF + derive: true + force_derivation: false + additional_datasets: + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: FGOALS-s2} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-8, + tier: 1, start_year: 2001, end_year: 2015} + + + ts: + <<: *corr_diag + variables: + ts: + <<: *var_settings + reference_dataset: ESACCI-SST + alternative_dataset: HadISST + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {<<: *cmip3, dataset: ukmo_hadgem1, institute: UKMO} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-Veg, grid: gr} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ESACCI-SST, project: OBS, type: sat, + version: L4-GHRSST-SSTdepth-OSTIA-GLOB, tier: 2, start_year: 1992} + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + + + hus400: + <<: *corr_diag + variables: + hus: + <<: *var_settings + preprocessor: regrid_4_5_pp400 + reference_dataset: AIRS + alternative_dataset: ERA5 + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R-CC} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: AIRS, project: obs4mips, level: L3, + version: RetStd-v5, tier: 1, start_year: 2003, end_year: 2010} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + + + zg500: + <<: *corr_diag + variables: + zg: + <<: *var_settings + preprocessor: regrid_4_5_pp500 + reference_dataset: ERA5 + alternative_dataset: NCEP + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-1, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + + + ta200: + <<: *corr_diag + variables: + ta: + <<: *var_settings + preprocessor: regrid_4_5_pp200 + reference_dataset: ERA5 + alternative_dataset: NCEP + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-Veg, grid: gr} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + + + ta850: + <<: *corr_diag + variables: + ta: + <<: *var_settings + preprocessor: regrid_4_5_pp850 + reference_dataset: ERA5 + alternative_dataset: NCEP + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: EC-Earth3, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-Veg, grid: gr} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + + + ua200: + <<: *corr_diag + variables: + ua: + <<: *var_settings + preprocessor: regrid_4_5_pp200 + reference_dataset: ERA5 + alternative_dataset: NCEP + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + + + ua850: + <<: *corr_diag + variables: + ua: + <<: *var_settings + preprocessor: regrid_4_5_pp850 + reference_dataset: ERA5 + alternative_dataset: NCEP + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + + + va200: + <<: *corr_diag + variables: + va: + <<: *var_settings + preprocessor: regrid_4_5_pp200 + reference_dataset: ERA5 + alternative_dataset: NCEP + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + + + va850: + <<: *corr_diag + variables: + va: + <<: *var_settings + preprocessor: regrid_4_5_pp850 + reference_dataset: ERA5 + alternative_dataset: NCEP + additional_datasets: + - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: GFDL-CM2p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {<<: *cmip6, dataset: BCC-ESM1} + - {<<: *cmip6, dataset: CMCC-ESM2} + - {<<: *cmip6, dataset: E3SM-1-0, grid: gr} + - {<<: *cmip6, dataset: E3SM-1-1-ECA, institute: E3SM-Project, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-AerChem, grid: gr} + - {<<: *cmip6, dataset: EC-Earth3-CC, grid: gr} + - {<<: *cmip6, dataset: GISS-E2-1-G-CC} + - {<<: *cmip6, dataset: IPSL-CM5A2-INCA, grid: gr} + - {<<: *cmip6, dataset: KIOST-ESM, grid: gr1} + - {<<: *cmip6, dataset: MCM-UA-1-0} + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + + + ### COLLECT CORRELATIONS AND PLOT ########################################### + fig_3_43: + description: Wrapper to collect and plot previously calculated correlations + scripts: + cor_collect: + script: ipcc_ar6/corr_pattern_collect.ncl + ancestors: ['*/pattern_cor'] + project_order: ['CMIP3', 'CMIP5', 'CMIP6'] + diag_order: ['tas', 'pr', 'rsut', 'rlut', 'swcre', + 'lwcre', 'psl', 'ts', 'ta850', 'ta200', + 'ua850', 'ua200', 'va850', 'va200', 'zg500', 'hus400'] + labels: [' Near-Surface ~C~ Air Temperature', 'Precipitation', + 'TOA Outgoing ~C~ Shortwave ~C~ Radiation', + 'TOA Outgoing ~C~ Longwave ~C~ Radiation', + ' TOA Shortwave ~C~ Cloud Radiative ~C~ Effect', + ' TOA Longwave ~C~ Cloud Radiative ~C~ Effect', + 'Sea Level ~C~ Pressure', + ' Surface ~C~ Temperature', + 'Temperature ~C~ 850 hPa', 'Temperature ~C~ 200 hPa', + 'Eastward Wind ~C~ 850 hPa', + 'Eastward Wind ~C~ 200 hPa', + 'Northward Wind ~C~ 850 hPa', + 'Northward Wind ~C~ 200 hPa', + 'Geopotential ~C~ Height ~C~ 500 hPa', + ' Specific ~C~ Humidity ~C~ 400 hPa'] diff --git a/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_9.yml b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_9.yml new file mode 100644 index 0000000000..20d7ac6a88 --- /dev/null +++ b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_9.yml @@ -0,0 +1,836 @@ +# ESMValTool +# recipe_ipccwg1ar6ch3_fig_3_9.yml +--- +documentation: + + title: IPCC AR6 WG1 Chapter 3 Fig. 3.9 + + description: | + Producing Fig. 3.9 in the IPCC Working Group I Contribution to the Sixth + Assessment Report: Chapter 3 + To run the recipe please upload the shapefile of the AR6 reference regions + (see detailed information in the documentation of this recipe). + + authors: + - bock_lisa + - eyring_veronika + + maintainer: + - bock_lisa + + projects: + - ipcc_ar6 + + references: + - eyring21ipcc + - iturbide20essd + + +preprocessors: + + regrid_5_5: + regrid: + target_grid: 5x5 + scheme: linear + + regrid_5_5_mmm: + regrid: + target_grid: 5x5 + scheme: linear + multi_model_statistics: + span: full + statistics: [mean] + + regrid_5_5_mmm_land: + mask_landsea: + mask_out: sea + regrid: + target_grid: 5x5 + scheme: linear + multi_model_statistics: + span: full + statistics: [mean] + + regrid_5_5_land: + mask_landsea: + mask_out: sea + regrid: + target_grid: 5x5 + scheme: linear + + regrid_5_5_mmm_ocean: + mask_landsea: + mask_out: land + regrid: + target_grid: 5x5 + scheme: linear + multi_model_statistics: + span: full + statistics: [mean] + + regrid_5_5_ocean: + mask_landsea: + mask_out: land + regrid: + target_grid: 5x5 + scheme: linear + + regrid_5_5_mmm_north_america: + extract_shape: + shapefile: IPCC-Regions/IPCC-WGI-reference-regions-v4.shp + method: contains + crop: false + ids: + - Greenland/Iceland + - N.W.North-America + - N.E.North-America + - W.North-America + - C.North-America + - E.North-America + regrid: + target_grid: 5x5 + scheme: linear + multi_model_statistics: + span: full + statistics: [mean] + + regrid_5_5_north_america: + extract_shape: + shapefile: IPCC-Regions/IPCC-WGI-reference-regions-v4.shp + method: contains + crop: false + ids: + - Greenland/Iceland + - N.W.North-America + - N.E.North-America + - W.North-America + - C.North-America + - E.North-America + regrid: + target_grid: 5x5 + scheme: linear + + regrid_5_5_mmm_south_america: + extract_shape: + shapefile: IPCC-Regions/IPCC-WGI-reference-regions-v4.shp + method: contains + crop: false + ids: + - N.Central-America + - S.Central-America + - Caribbean + - N.W.South-America + - N.South-America + - N.E.South-America + - South-American-Monsoon + - S.W.South-America + - S.E.South-America + - S.South-America + regrid: + target_grid: 5x5 + scheme: linear + multi_model_statistics: + span: full + statistics: [mean] + + regrid_5_5_south_america: + extract_shape: + shapefile: IPCC-Regions/IPCC-WGI-reference-regions-v4.shp + method: contains + crop: false + ids: + - N.Central-America + - S.Central-America + - Caribbean + - N.W.South-America + - N.South-America + - N.E.South-America + - South-American-Monsoon + - S.W.South-America + - S.E.South-America + - S.South-America + regrid: + target_grid: 5x5 + scheme: linear + + regrid_5_5_mmm_europe: + extract_shape: + shapefile: IPCC-Regions/IPCC-WGI-reference-regions-v4.shp + method: contains + crop: false + ids: + - N.Europe + - West&Central-Europe + - E.Europe + - Mediterranean + regrid: + target_grid: 5x5 + scheme: linear + multi_model_statistics: + span: full + statistics: [mean] + + regrid_5_5_europe: + extract_shape: + shapefile: IPCC-Regions/IPCC-WGI-reference-regions-v4.shp + method: contains + crop: false + ids: + - N.Europe + - West&Central-Europe + - E.Europe + - Mediterranean + regrid: + target_grid: 5x5 + scheme: linear + + regrid_5_5_mmm_africa: + extract_shape: + shapefile: IPCC-Regions/IPCC-WGI-reference-regions-v4.shp + method: contains + crop: false + ids: + - Sahara + - Western-Africa + - Central-Africa + - N.Eastern-Africa + - S.Eastern-Africa + - W.Southern-Africa + - E.Southern-Africa + - Madagascar + regrid: + target_grid: 5x5 + scheme: linear + multi_model_statistics: + span: full + statistics: [mean] + + regrid_5_5_africa: + extract_shape: + shapefile: IPCC-Regions/IPCC-WGI-reference-regions-v4.shp + method: contains + crop: false + ids: + - Sahara + - Western-Africa + - Central-Africa + - N.Eastern-Africa + - S.Eastern-Africa + - W.Southern-Africa + - E.Southern-Africa + - Madagascar + regrid: + target_grid: 5x5 + scheme: linear + + regrid_5_5_mmm_asia: + extract_shape: + shapefile: IPCC-Regions/IPCC-WGI-reference-regions-v4.shp + method: contains + crop: false + ids: + - Russian-Arctic + - W.Siberia + - E.Siberia + - Russian-Far-East + - W.C.Asia + - E.C.Asia + - Tibetan-Plateau + - E.Asia + - Arabian-Peninsula + - S.Asia + - S.E.Asia + regrid: + target_grid: 5x5 + scheme: linear + multi_model_statistics: + span: full + statistics: [mean] + + regrid_5_5_asia: + extract_shape: + shapefile: IPCC-Regions/IPCC-WGI-reference-regions-v4.shp + method: contains + crop: false + ids: + - Russian-Arctic + - W.Siberia + - E.Siberia + - Russian-Far-East + - W.C.Asia + - E.C.Asia + - Tibetan-Plateau + - E.Asia + - Arabian-Peninsula + - S.Asia + - S.E.Asia + regrid: + target_grid: 5x5 + scheme: linear + + regrid_5_5_mmm_australasia: + extract_shape: + shapefile: IPCC-Regions/IPCC-WGI-reference-regions-v4.shp + method: contains + crop: false + ids: + - N.Australia + - C.Australia + - E.Australia + - S.Australia + - New-Zealand + regrid: + target_grid: 5x5 + scheme: linear + multi_model_statistics: + span: full + statistics: [mean] + + regrid_5_5_australasia: + extract_shape: + shapefile: IPCC-Regions/IPCC-WGI-reference-regions-v4.shp + method: contains + crop: false + ids: + - N.Australia + - C.Australia + - E.Australia + - S.Australia + - New-Zealand + regrid: + target_grid: 5x5 + scheme: linear + + regrid_5_5_mmm_antarctica: + extract_shape: + shapefile: IPCC-Regions/IPCC-WGI-reference-regions-v4.shp + method: contains + crop: false + ids: + - E.Antarctica + - W.Antarctica + regrid: + target_grid: 5x5 + scheme: linear + multi_model_statistics: + span: full + statistics: [mean] + + regrid_5_5_antarctica: + extract_shape: + shapefile: IPCC-Regions/IPCC-WGI-reference-regions-v4.shp + method: contains + crop: false + ids: + - E.Antarctica + - W.Antarctica + regrid: + target_grid: 5x5 + scheme: linear + + +datasets_cmip6: &cmip6_models # historical+ssp245, all ensemble members, Amon + - {dataset: ACCESS-CM2, ensemble: r(1:3)i1p1f1, grid: gn} + - {dataset: ACCESS-ESM1-5, ensemble: r(1:11)i1p1f1, grid: gn} + - {dataset: ACCESS-ESM1-5, ensemble: r19i1p1f1, grid: gn} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-ESM-1-1-LR, ensemble: r1i1p1f1, grid: gn, end_year: 2014} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r(1:3)i1p1f1, grid: gn, end_year: 2014} + - {dataset: CAMS-CSM1-0, ensemble: r(1:2)i1p1f1, grid: gn} + - {dataset: CanESM5-CanOE, ensemble: r(1:3)i1p2f1, grid: gn} + - {dataset: CanESM5, ensemble: r(1:25)i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r(1:25)i1p2f1, grid: gn} + - {dataset: CESM2, ensemble: r4i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r(10:11)i1p1f1, grid: gn} + - {dataset: CESM2-FV2, ensemble: r(1:3)i1p1f1, grid: gn, end_year: 2014} + - {dataset: CESM2-WACCM, ensemble: r(1:3)i1p1f1, grid: gn} + - {dataset: CESM2-WACCM-FV2, ensemble: r(1:3)i1p1f1, grid: gn, + end_year: 2014} + - {dataset: CIESM, ensemble: r1i1p1f1, grid: gr} + - {dataset: CMCC-CM2-HR4, ensemble: r1i1p1f1, grid: gn, end_year: 2014} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CMCC-ESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r(1:10)i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r(1:5)i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r(7:10)i1p1f2, grid: gr} + - {dataset: E3SM-1-0, ensemble: r(1:5)i1p1f1, grid: gr, end_year: 2014} + - {dataset: E3SM-1-1-ECA, ensemble: r1i1p1f1, grid: gr, end_year: 2014} + - {dataset: E3SM-1-1, ensemble: r1i1p1f1, grid: gr, end_year: 2014} + - {dataset: EC-Earth3, ensemble: r(1:2)i1p1f1, grid: gr} + - {dataset: EC-Earth3, ensemble: r4i1p1f1, grid: gr} + - {dataset: EC-Earth3, ensemble: r(6:7)i1p1f1, grid: gr} + - {dataset: EC-Earth3, ensemble: r(9:10)i1p1f1, grid: gr} + - {dataset: EC-Earth3-AerChem, ensemble: r1i1p1f1, grid: gr, end_year: 2014} + - {dataset: EC-Earth3-AerChem, ensemble: r4i1p1f1, grid: gr, end_year: 2014} + - {dataset: EC-Earth3-CC, ensemble: r1i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg, ensemble: r(1:6)i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg, ensemble: r12i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg, ensemble: r14i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg-LR, ensemble: r(1:3)i1p1f1, grid: gr} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r(1:4)i1p1f1, grid: gn} + - {dataset: FIO-ESM-2-0, ensemble: r(1:3)i1p1f1, grid: gn} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GFDL-ESM4, ensemble: r(1:3)i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-G, ensemble: r(1:10)i1p1f2, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r(1:5)i1p3f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r(1:4)i1p5f1, grid: gn} + - {dataset: GISS-E2-1-G-CC, ensemble: r1i1p1f1, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-H, ensemble: r(1:10)i1p1f1, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-H, ensemble: r(1:5)i1p1f2, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-H, ensemble: r(1:5)i1p3f1, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-H, ensemble: r(1:5)i1p5f1, grid: gn, end_year: 2014} + - {dataset: HadGEM3-GC31-LL, ensemble: r(1:4)i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r(1:4)i1p1f3, grid: gn, + end_year: 2014} + - {dataset: IITM-ESM, ensemble: r1i1p1f1, grid: gn} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM5A2-INCA, ensemble: r1i1p1f1, grid: gr, end_year: 2014} + - {dataset: IPSL-CM6A-LR, ensemble: r(1:6)i1p1f1, grid: gr} + - {dataset: IPSL-CM6A-LR, ensemble: r(10:11)i1p1f1, grid: gr} + - {dataset: IPSL-CM6A-LR, ensemble: r14i1p1f1, grid: gr} + - {dataset: IPSL-CM6A-LR, ensemble: r22i1p1f1, grid: gr} + - {dataset: IPSL-CM6A-LR, ensemble: r25i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r(1:3)i1p1f1, grid: gr} + - {dataset: KIOST-ESM, ensemble: r1i1p1f1, grid: gr1} + - {dataset: MCM-UA-1-0, ensemble: r1i1p1f2, grid: gn} + - {dataset: MIROC6, ensemble: r(1:50)i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r(1:2)i1p1f2, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r4i1p1f2, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r(6:9)i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r(1:3)i1p1f1, grid: gn, + end_year: 2014} + - {dataset: MPI-ESM1-2-HR, ensemble: r(1:10)i1p1f1, grid: gn, end_year: 2014} + - {dataset: MPI-ESM1-2-LR, ensemble: r(1:10)i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r(1:5)i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r(1:2)i1p1f1, grid: gn} + - {dataset: NorCPM1, ensemble: r(1:30)i1p1f1, grid: gn, end_year: 2014} + - {dataset: NorESM2-LM, ensemble: r(1:3)i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r(1:2)i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, end_year: 2014} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r(1:2)i1p1f2, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r4i1p1f2, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r8i1p1f2, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r13i1p1f2, grid: gn} + + +models_cmip6_hist-nat: &models_cmip6_hist-nat # hist-nat, Amon, tas + - {dataset: ACCESS-ESM1-5, grid: gn, institute: CSIRO, + ensemble: r(1:3)i1p1f1} + - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r(1:3)i1p1f1} + - {dataset: CanESM5, ensemble: r(1:15)i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r(20:25)i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r(1:15)i1p2f1, grid: gn} + - {dataset: CanESM5, ensemble: r17i1p2f1, grid: gn} + - {dataset: CanESM5, ensemble: r(19:25)i1p2f1, grid: gn} + - {dataset: CESM2, grid: gn, ensemble: r(1:3)i1p1f1, end_year: 2014} + - {dataset: CNRM-CM6-1, ensemble: r(1:10)i1p1f2} + - {dataset: FGOALS-g3, grid: gn, ensemble: r(1:3)i1p1f1} + - {dataset: GFDL-CM4, grid: gr1, ensemble: r1i1p1f1, end_year: 2014} + - {dataset: GFDL-ESM4, grid: gr1, ensemble: r(1:3)i1p1f1} + - {dataset: GISS-E2-1-G, ensemble: r(1:5)i1p1f2, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-G, ensemble: r(1:9)i1p1f3, grid: gn, end_year: 2014} + - {dataset: GISS-E2-1-G, ensemble: r(1:5)i1p1f4, grid: gn, end_year: 2014} + - {dataset: HadGEM3-GC31-LL, ensemble: r(1:4)i1p1f3, grid: gn} + - {dataset: IPSL-CM6A-LR, ensemble: r(1:10)i1p1f1} + - {dataset: MIROC6, ensemble: r(1:4)i1p1f1, grid: gn} + - {dataset: MIROC6, ensemble: r37i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r(1:5)i1p1f1} + - {dataset: NorESM2-LM, grid: gn, institute: NCC, ensemble: r(1:3)i1p1f1} + + +models_cmip6_hist-GHG: &models_cmip6_hist-GHG # hist-GHG, Amon, tas + - {dataset: ACCESS-ESM1-5, grid: gn, institute: CSIRO, + ensemble: r(1:3)i1p1f1} + - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r(1:3)i1p1f1} + - {dataset: CanESM5, grid: gn, ensemble: r(1:25)i1p1f1} + - {dataset: CanESM5, grid: gn, ensemble: r(1:25)i1p2f1} + - {dataset: CESM2, grid: gn, ensemble: r(1:3)i1p1f1, end_year: 2014} + - {dataset: CNRM-CM6-1, ensemble: r(1:10)i1p1f2} + - {dataset: FGOALS-g3, grid: gn, ensemble: r(1:3)i1p1f1} + - {dataset: GFDL-ESM4, grid: gr1, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r(1:5)i1p1f1, end_year: 2014} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r(1:5)i1p1f2, end_year: 2014} + - {dataset: HadGEM3-GC31-LL, ensemble: r(1:4)i1p1f3, grid: gn} + - {dataset: IPSL-CM6A-LR, ensemble: r(1:10)i1p1f1} + - {dataset: MIROC6, grid: gn, ensemble: r(1:3)i1p1f1} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r(1:5)i1p1f1} + - {dataset: NorESM2-LM, grid: gn, institute: NCC, ensemble: r(2:3)i1p1f1} + +models_cmip6_hist-aer: &models_cmip6_hist-aer # hist-aer, Amon, tas + - {dataset: ACCESS-ESM1-5, grid: gn, institute: CSIRO, + ensemble: r(1:3)i1p1f1} + - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r(1:2)i1p1f1} + - {dataset: CanESM5, grid: gn, ensemble: r(1:15)i1p1f1} + - {dataset: CanESM5, grid: gn, ensemble: r(1:15)i1p2f1} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1, end_year: 2014} + - {dataset: CESM2, grid: gn, ensemble: r3i1p1f1, end_year: 2014} + - {dataset: CNRM-CM6-1, ensemble: r(1:10)i1p1f2} + - {dataset: FGOALS-g3, grid: gn, ensemble: r(1:3)i1p1f1} + - {dataset: GFDL-ESM4, grid: gr1, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r(1:5)i1p1f1, end_year: 2014} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r(1:5)i1p3f1, end_year: 2014} + - {dataset: HadGEM3-GC31-LL, ensemble: r(1:4)i1p1f3, grid: gn} + - {dataset: IPSL-CM6A-LR, ensemble: r(1:10)i1p1f1} + - {dataset: MIROC6, grid: gn, ensemble: r(1:4)i1p1f1} + - {dataset: MIROC6, grid: gn, ensemble: r8i1p1f1} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r(1:5)i1p1f1} + - {dataset: NorESM2-LM, grid: gn, institute: NCC, ensemble: r(1:3)i1p1f1} + +hadcrut5: &hadcrut5 + - {dataset: HadCRUT5, project: OBS, type: ground, version: 5.0.1.0-analysis, + tier: 2} + + +diagnostics: + + # ********************************************************************** + # IPCC WGI AR6, chap. 3 + # Sect. 3.3 + # Fig 3.9 + # ********************************************************************** + # Global and continental anomalies in annual mean near-surface temperature + # ********************************************************************** + + fig_3_9_tas_anom_damip_1: &diag_tas_anom + description: Anomaly of surface temperature. + variables: + tas_historical_global: &var_tas + short_name: tas + preprocessor: regrid_5_5_mmm + mip: Amon + project: CMIP6 + exp: [historical, ssp245] + grid: gr + ensemble: r1i1p1f1 + start_year: 1850 + end_year: 2020 + additional_datasets: *cmip6_models + tas_hist-nat_global: + <<: *var_tas + exp: hist-nat + additional_datasets: *models_cmip6_hist-nat + tas_hist-GHG_global: + <<: *var_tas + exp: hist-GHG + additional_datasets: *models_cmip6_hist-GHG + tas_hist-aer_global: + <<: *var_tas + exp: hist-aer + additional_datasets: *models_cmip6_hist-aer + tasa_global: + <<: *var_tas + short_name: tasa + preprocessor: regrid_5_5 + additional_datasets: *hadcrut5 + + tas_historical_land: + <<: *var_tas + preprocessor: regrid_5_5_mmm_land + additional_datasets: *cmip6_models + tas_hist-nat_land: + <<: *var_tas + exp: hist-nat + preprocessor: regrid_5_5_mmm_land + additional_datasets: *models_cmip6_hist-nat + tas_hist-GHG_land: + <<: *var_tas + exp: hist-GHG + preprocessor: regrid_5_5_mmm_land + additional_datasets: *models_cmip6_hist-GHG + tas_hist-aer_land: + <<: *var_tas + exp: hist-aer + preprocessor: regrid_5_5_mmm_land + additional_datasets: *models_cmip6_hist-aer + tasa_land: + <<: *var_tas + short_name: tasa + preprocessor: regrid_5_5_land + additional_datasets: *hadcrut5 + + tas_historical_ocean: + <<: *var_tas + preprocessor: regrid_5_5_mmm_ocean + additional_datasets: *cmip6_models + tas_hist-nat_ocean: + <<: *var_tas + exp: hist-nat + additional_datasets: *models_cmip6_hist-nat + tas_hist-GHG_ocean: + <<: *var_tas + exp: hist-GHG + preprocessor: regrid_5_5_mmm_ocean + additional_datasets: *models_cmip6_hist-GHG + tas_hist-aer_ocean: + <<: *var_tas + exp: hist-aer + preprocessor: regrid_5_5_mmm_ocean + additional_datasets: *models_cmip6_hist-aer + tasa_ocean: + <<: *var_tas + short_name: tasa + preprocessor: regrid_5_5_ocean + additional_datasets: *hadcrut5 + scripts: + tsline: &tsline_amon + script: ipcc_ar6/tas_anom_damip.ncl + ref_start: 1850 + ref_end: 1900 + start_year: 1850 + end_year: 2020 + plot_units: "degC" + y_min: -3.5 + y_max: 4.0 + ref_mask: true + header: ["Global Ocean", "Global", "Global Land"] + title: "global" + panels: [["tas_historical_ocean", "tas_hist-nat_ocean", + "tas_hist-GHG_ocean", "tas_hist-aer_ocean"], + ["tas_historical_global", "tas_hist-nat_global", + "tas_hist-GHG_global", "tas_hist-aer_global"], + ["tas_historical_land", "tas_hist-nat_land", + "tas_hist-GHG_land", "tas_hist-aer_land"]] + reference: ["tasa_ocean", "tasa_global", "tasa_land"] + panel_figure_strings: ["a)", "b)", "c)"] + legend: ["Anthropogenic + Natural", "Natural", + "Greenhouse Gases", "Aerosols"] + + + fig_3_9_tas_anom_damip_2: + <<: *diag_tas_anom + variables: + tas_historical_north_america: + <<: *var_tas + preprocessor: regrid_5_5_mmm_north_america + additional_datasets: *cmip6_models + tas_hist-nat_north_america: + <<: *var_tas + exp: hist-nat + preprocessor: regrid_5_5_mmm_north_america + additional_datasets: *models_cmip6_hist-nat + tas_hist-GHG_north_america: + <<: *var_tas + exp: hist-GHG + preprocessor: regrid_5_5_mmm_north_america + additional_datasets: *models_cmip6_hist-GHG + tas_hist-aer_north_america: + <<: *var_tas + exp: hist-aer + preprocessor: regrid_5_5_mmm_north_america + additional_datasets: *models_cmip6_hist-aer + tasa_north_america: + <<: *var_tas + short_name: tasa + preprocessor: regrid_5_5_north_america + additional_datasets: *hadcrut5 + + tas_historical_south_america: + <<: *var_tas + preprocessor: regrid_5_5_mmm_south_america + additional_datasets: *cmip6_models + tas_hist-nat_south_america: + <<: *var_tas + exp: hist-nat + preprocessor: regrid_5_5_mmm_south_america + additional_datasets: *models_cmip6_hist-nat + tas_hist-GHG_south_america: + <<: *var_tas + exp: hist-GHG + preprocessor: regrid_5_5_mmm_south_america + additional_datasets: *models_cmip6_hist-GHG + tas_hist-aer_south_america: + <<: *var_tas + exp: hist-aer + preprocessor: regrid_5_5_mmm_south_america + additional_datasets: *models_cmip6_hist-aer + tasa_south_america: + <<: *var_tas + short_name: tasa + preprocessor: regrid_5_5_south_america + additional_datasets: *hadcrut5 + + tas_historical_europe: + <<: *var_tas + preprocessor: regrid_5_5_mmm_europe + additional_datasets: *cmip6_models + tas_hist-nat_europe: + <<: *var_tas + exp: hist-nat + preprocessor: regrid_5_5_mmm_europe + additional_datasets: *models_cmip6_hist-nat + tas_hist-GHG_europe: + <<: *var_tas + exp: hist-GHG + preprocessor: regrid_5_5_mmm_europe + additional_datasets: *models_cmip6_hist-GHG + tas_hist-aer_europe: + <<: *var_tas + exp: hist-aer + preprocessor: regrid_5_5_mmm_europe + additional_datasets: *models_cmip6_hist-aer + tasa_europe: + <<: *var_tas + short_name: tasa + preprocessor: regrid_5_5_europe + additional_datasets: *hadcrut5 + scripts: + tsline: + <<: *tsline_amon + header: ["North America", "Central and South America", + "Europe and North Africa"] + title: "america_europe" + panels: [["tas_historical_north_america", "tas_hist-nat_north_america", + "tas_hist-GHG_north_america", "tas_hist-aer_north_america"], + ["tas_historical_south_america", "tas_hist-nat_south_america", + "tas_hist-GHG_south_america", "tas_hist-aer_south_america"], + ["tas_historical_europe", "tas_hist-nat_europe", + "tas_hist-GHG_europe", "tas_hist-aer_europe"]] + reference: ["tasa_north_america", "tasa_south_america", "tasa_europe"] + panel_figure_strings: ["d)", "e)", "f)"] + legend: ["Anthropogenic + Natural", "Natural", + "Greenhouse Gases", "Aerosols"] + + + fig_3_9_tas_anom_damip_3: + <<: *diag_tas_anom + variables: + tas_historical_africa: + <<: *var_tas + preprocessor: regrid_5_5_mmm_africa + additional_datasets: *cmip6_models + tas_hist-nat_africa: + <<: *var_tas + exp: hist-nat + preprocessor: regrid_5_5_mmm_africa + additional_datasets: *models_cmip6_hist-nat + tas_hist-GHG_africa: + <<: *var_tas + exp: hist-GHG + preprocessor: regrid_5_5_mmm_africa + additional_datasets: *models_cmip6_hist-GHG + tas_hist-aer_africa: + <<: *var_tas + exp: hist-aer + preprocessor: regrid_5_5_mmm_africa + additional_datasets: *models_cmip6_hist-aer + tasa_africa: + <<: *var_tas + short_name: tasa + preprocessor: regrid_5_5_africa + additional_datasets: *hadcrut5 + + tas_historical_asia: + <<: *var_tas + preprocessor: regrid_5_5_mmm_asia + additional_datasets: *cmip6_models + tas_hist-nat_asia: + <<: *var_tas + exp: hist-nat + preprocessor: regrid_5_5_mmm_asia + additional_datasets: *models_cmip6_hist-nat + tas_hist-GHG_asia: + <<: *var_tas + exp: hist-GHG + preprocessor: regrid_5_5_mmm_asia + additional_datasets: *models_cmip6_hist-GHG + tas_hist-aer_asia: + <<: *var_tas + exp: hist-aer + preprocessor: regrid_5_5_mmm_asia + additional_datasets: *models_cmip6_hist-aer + tasa_asia: + <<: *var_tas + short_name: tasa + preprocessor: regrid_5_5_asia + additional_datasets: *hadcrut5 + + tas_historical_australasia: + <<: *var_tas + preprocessor: regrid_5_5_mmm_australasia + additional_datasets: *cmip6_models + tas_hist-nat_australasia: + <<: *var_tas + exp: hist-nat + preprocessor: regrid_5_5_mmm_australasia + additional_datasets: *models_cmip6_hist-nat + tas_hist-GHG_australasia: + <<: *var_tas + exp: hist-GHG + preprocessor: regrid_5_5_mmm_australasia + additional_datasets: *models_cmip6_hist-GHG + tas_hist-aer_australasia: + <<: *var_tas + exp: hist-aer + preprocessor: regrid_5_5_mmm_australasia + additional_datasets: *models_cmip6_hist-aer + tasa_australasia: + <<: *var_tas + short_name: tasa + preprocessor: regrid_5_5_australasia + additional_datasets: *hadcrut5 + scripts: + tsline: + <<: *tsline_amon + header: ["Africa", "Asia", "Australasia"] + title: "africa_asia" + panels: [["tas_historical_africa", "tas_hist-nat_africa", + "tas_hist-GHG_africa", "tas_hist-aer_africa"], + ["tas_historical_asia", "tas_hist-nat_asia", + "tas_hist-GHG_asia", "tas_hist-aer_asia"], + ["tas_historical_australasia", "tas_hist-nat_australasia", + "tas_hist-GHG_australasia", "tas_hist-aer_australasia"]] + reference: ["tasa_africa", "tasa_asia", "tasa_australasia"] + panel_figure_strings: ["g)", "h)", "i)"] + legend: ["Anthropogenic + Natural", "Natural", + "Greenhouse Gases", "Aerosols"] + + + fig_3_9_tas_anom_damip_4: + variables: + tas_historical_antarctica: + <<: *var_tas + start_year: 1950 + preprocessor: regrid_5_5_mmm_antarctica + additional_datasets: *cmip6_models + tas_hist-nat_antarctica: + <<: *var_tas + start_year: 1950 + exp: hist-nat + preprocessor: regrid_5_5_mmm_antarctica + additional_datasets: *models_cmip6_hist-nat + tas_hist-GHG_antarctica: + <<: *var_tas + start_year: 1950 + exp: hist-GHG + preprocessor: regrid_5_5_mmm_antarctica + additional_datasets: *models_cmip6_hist-GHG + tas_hist-aer_antarctica: + <<: *var_tas + start_year: 1950 + exp: hist-aer + preprocessor: regrid_5_5_mmm_antarctica + additional_datasets: *models_cmip6_hist-aer + tasa_antarctica: + <<: *var_tas + start_year: 1950 + short_name: tasa + preprocessor: regrid_5_5_antarctica + additional_datasets: *hadcrut5 + scripts: + tsline: + <<: *tsline_amon + ref_start: 1950 + ref_end: 2010 + header: ["Antarctica"] + title: "antarctica" + panels: [["tas_historical_antarctica", "tas_hist-nat_antarctica", + "tas_hist-GHG_antarctica", "tas_hist-aer_antarctica"]] + reference: ["tasa_antarctica"] + panel_figure_strings: ["j)"] + legend: ["Anthropogenic + Natural", "Natural", + "Greenhouse Gases", "Aerosols"] diff --git a/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_basics.yml b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_basics.yml new file mode 100644 index 0000000000..9b7c187140 --- /dev/null +++ b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_basics.yml @@ -0,0 +1,260 @@ +# ESMValTool +--- +documentation: + title: Basic Model Evaluation. + description: > + Show plots of several variables that can be used for basic model + evaluations ("sanity checks"). + authors: + - hassler_birgit + - lauer_axel + - bonnet_pauline + - schlund_manuel + maintainer: + - hassler_birgit + + +# Note: The following models are just examples +datasets: + - {project: CMIP6, dataset: MPI-ESM1-2-HR, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, dataset: MPI-ESM1-2-LR, exp: historical, ensemble: r1i1p1f1, grid: gn} + +# Note: for some observational datasets, we use preset time ranges due to +# their limited temporal availability +timerange_for_models: &time_period + timerange: '2003/2007' # can be specified, this is just an example + + +preprocessors: + + timeseries_regular: &pp_timeseries_regular + area_statistics: + operator: mean + + timeseries_regular_ann: + <<: *pp_timeseries_regular + annual_statistics: + operator: mean + + timeseries_regular_pr: + <<: *pp_timeseries_regular + convert_units: + units: mm day-1 + + full_climatology: &pp_full_climatology + climate_statistics: + period: full + regrid: + target_grid: 2x2 + scheme: + reference: esmf_regrid.schemes:ESMFAreaWeighted + + full_climatology_pr: + <<: *pp_full_climatology + convert_units: + units: mm day-1 + + zonal_mean: + custom_order: true # makes preprocessor much faster since input for extract_levels is smaller + climate_statistics: + period: full + extract_levels: + levels: {cmor_table: CMIP6, coordinate: plev19} + scheme: linear + coordinate: air_pressure + regrid: + scheme: + reference: esmf_regrid.schemes:ESMFAreaWeighted + target_grid: 2x2 + zonal_statistics: + operator: mean + + +diagnostics: + + # Climatologies - maps (full climatology) + + plot_maps_with_references_tas: + description: Plot climatology maps including reference datasets for tas. + variables: + tas: + <<: *time_period + mip: Amon + preprocessor: full_climatology + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + scripts: + plot: &plot_multi_dataset_default + plot_folder: '{plot_dir}' + plot_filename: '{plot_type}_{real_name}_{dataset}_{mip}' + script: monitor/multi_datasets.py + plots: + map: + common_cbar: true + + plot_maps_with_references_pr: + description: Plot climatology maps including reference datasets for pr. + variables: + pr: + <<: *time_period + mip: Amon + preprocessor: full_climatology_pr + additional_datasets: + - {project: OBS, dataset: GPCP-SG, type: atmos, version: 2.3, tier: 2, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + map: + common_cbar: true + plot_kwargs: + default: + cmap: Blues + + # Climatologies (zonal means) + + plot_zonal_mean_profiles_with_references_ta: + description: Plot 2D zonal mean profiles including reference datasets. + variables: + ta: + <<: *time_period + mip: Amon + preprocessor: zonal_mean + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + zonal_mean_profile: + common_cbar: true + + plot_zonal_mean_profiles_with_references_ua: + description: Plot 2D zonal mean profiles including reference datasets. + variables: + ua: + <<: *time_period + mip: Amon + preprocessor: zonal_mean + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + zonal_mean_profile: + common_cbar: true + plot_kwargs: + default: + cmap: Blues + + plot_zonal_mean_profiles_with_references_hus: + description: Plot 2D zonal mean profiles including reference datasets. + variables: + hus: + <<: *time_period + mip: Amon + preprocessor: zonal_mean + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + zonal_mean_profile: + common_cbar: true + plot_kwargs: + default: + cmap: Blues + + # Time series of global averages (monthly) + + plot_multiple_timeseries: + description: Plot time series including reference datasets. + variables: + tas: + <<: *time_period + mip: Amon + preprocessor: timeseries_regular + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + clt: + <<: *time_period + mip: Amon + preprocessor: timeseries_regular + additional_datasets: + - {project: OBS, dataset: ESACCI-CLOUD, type: sat, version: AVHRR-AMPM-fv3.0, tier: 2, reference_for_monitor_diags: true} + rsut: + <<: *time_period + mip: Amon + preprocessor: timeseries_regular + additional_datasets: + - {project: OBS, dataset: CERES-EBAF, type: sat, version: Ed4.1, tier: 2, reference_for_monitor_diags: true} + rlut: + <<: *time_period + mip: Amon + preprocessor: timeseries_regular + additional_datasets: + - {project: OBS, dataset: CERES-EBAF, type: sat, version: Ed4.1, tier: 2, reference_for_monitor_diags: true} + rtnt: + derive: true + force_derivation: true + mip: Amon + preprocessor: timeseries_regular_ann + timerange: '1995/2014' + prw: + <<: *time_period + mip: Amon + preprocessor: timeseries_regular + # timerange MUST NOT start before 2003 since the observations are not available before 2003 + additional_datasets: + - {project: OBS6, dataset: ESACCI-WATERVAPOUR, type: sat, version: CDR2-L3-COMBI-05deg-fv3.1, tier: 3, timerange: '2003/2007', reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + group_variables_by: variable_group + script: monitor/multi_datasets.py + plots: + timeseries: + annual_mean_kwargs: false + plot_kwargs: + MPI-ESM1-2-HR: + color: C0 + MPI-ESM1-2-LR: + color: C1 + ERA5: + color: black + ESACCI-CLOUD: + color: black + CERES-EBAF: + color: black + ESACCI-WATERVAPOUR: + color: black + + plot_multiple_timeseries_pr: + description: Plot time series including reference datasets. + variables: + pr: + <<: *time_period + mip: Amon + preprocessor: timeseries_regular_pr + additional_datasets: + - {project: OBS, dataset: GPCP-SG, type: atmos, version: 2.3, tier: 2, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + timeseries: + annual_mean_kwargs: false + plot_kwargs: + MPI-ESM1-2-HR: + color: C0 + MPI-ESM1-2-LR: + color: C1 + GPCP-SG: + color: black diff --git a/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_clouds_clim.yml b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_clouds_clim.yml new file mode 100644 index 0000000000..aaf964dc24 --- /dev/null +++ b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_clouds_clim.yml @@ -0,0 +1,226 @@ +# ESMValTool +--- +documentation: + title: Model evaluation with focus on clouds. + description: > + Plot climatologies and zonal mean profiles of several cloud-related + variables of multi-year simulations. + authors: + - bonnet_pauline + - lauer_axel + - hassler_birgit + - schlund_manuel + maintainer: + - lauer_axel + + +# Note: the following models are just examples +datasets: + - {project: CMIP6, dataset: MPI-ESM1-2-HR, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, dataset: MPI-ESM1-2-LR, exp: historical, ensemble: r1i1p1f1, grid: gn} + +# Note: for some observational datasets, we use preset time ranges due to +# their limited temporal availability +timerange_for_models: &time_period + timerange: '2005/2014' # can be specified, this is just an example + + +preprocessors: + + full_climatology: &full_climatology_diag + climate_statistics: + period: full + regrid: + target_grid: 2x2 + scheme: + reference: esmf_regrid.schemes:ESMFAreaWeighted + + full_climatology_pr: + <<: *full_climatology_diag + convert_units: + units: mm day-1 + + zonal_mean: + custom_order: true # makes preprocessor much faster since input for extract_levels is smaller + climate_statistics: + period: full + extract_levels: + levels: {cmor_table: CMIP6, coordinate: plev19} + scheme: linear + coordinate: air_pressure + regrid: + scheme: + reference: esmf_regrid.schemes:ESMFAreaWeighted + target_grid: 2x2 + zonal_statistics: + operator: mean + + +diagnostics: + + plot_clt_maps: + description: Plot clt climatology maps including reference datasets. + variables: + clt: + <<: *time_period + mip: Amon + preprocessor: full_climatology + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, version: AVHRR-AMPM-fv3.0, tier: 2, reference_for_monitor_diags: true} + scripts: + plot: &plot_multi_dataset_default + script: monitor/multi_datasets.py + plot_folder: '{plot_dir}' + plot_filename: '{plot_type}_{real_name}_{dataset}_{mip}' + plots: + map: + common_cbar: true + + plot_lwcre_maps: + description: Plot lwcre climatology maps including reference datasets. + variables: + lwcre: + <<: *time_period + mip: Amon + preprocessor: full_climatology + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, timerange: '2001/2010', tier: 1, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + map: + common_cbar: true + + plot_swcre_maps: + description: Plot swcre climatology maps including reference datasets. + variables: + swcre: + <<: *time_period + mip: Amon + preprocessor: full_climatology + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1, timerange: '2001/2010', reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + map: + common_cbar: true + + plot_lwp_maps: + description: Plot lwp climatology maps including reference datasets. + variables: + lwp: + <<: *time_period + mip: Amon + preprocessor: full_climatology + derive: true + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + map: + common_cbar: true + fontsize: 6 + + plot_clivi_maps: + description: Plot clivi climatology maps including reference datasets. + variables: + clivi: + <<: *time_period + mip: Amon + preprocessor: full_climatology + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + map: + common_cbar: true + + plot_prw_maps: + description: Plot prw climatology maps including reference datasets. + variables: + prw: + <<: *time_period + mip: Amon + preprocessor: full_climatology + additional_datasets: + - {dataset: ESACCI-WATERVAPOUR, project: OBS6, type: sat, version: CDR2-L3-COMBI-05deg-fv3.1, tier: 3, timerange: '2003/2017', reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + map: + common_cbar: true + + plot_pr_maps: + description: Plot prw climatology maps including reference datasets. + variables: + pr: + <<: *time_period + mip: Amon + preprocessor: full_climatology_pr + additional_datasets: + - {dataset: GPCP-SG, project: OBS, type: atmos, version: 2.3, tier: 2, + timerange: '2003/2017', reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + map: + common_cbar: true + + plot_clw_profiles: + description: Plot clw vertical profiles including reference datasets. + variables: + clw: + <<: *time_period + mip: Amon + preprocessor: zonal_mean + additional_datasets: + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, version: P1-R05-gridbox-average-noprecip, timerange: '2006/2017', tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + profile: + common_cbar: true + + plot_cli_profiles: + description: Plot cli vertical profiles including reference datasets. + variables: + cli: + <<: *time_period + mip: Amon + preprocessor: zonal_mean + additional_datasets: + - {dataset: CALIPSO-ICECLOUD, project: OBS, type: sat, version: 1-00, timerange: '2007/2015', tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + profile: + common_cbar: true + + plot_cl_profiles: + description: Plot cl vertical profiles including reference datasets. + variables: + cl: + <<: *time_period + mip: Amon + preprocessor: zonal_mean + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + profile: + common_cbar: true diff --git a/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_clouds_cycles.yml b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_clouds_cycles.yml new file mode 100644 index 0000000000..8139a04dfc --- /dev/null +++ b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_clouds_cycles.yml @@ -0,0 +1,179 @@ +# ESMValTool +--- +documentation: + title: Model evaluation with focus on clouds. + description: > + Plot annual cycles of several cloud-related variables of multi-year + simulations. + authors: + - lauer_axel + - schlund_manuel + maintainer: + - lauer_axel + + +# Note: the following models are just examples +datasets: + - {project: CMIP6, dataset: MPI-ESM1-2-HR, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, dataset: MPI-ESM1-2-LR, exp: historical, ensemble: r1i1p1f1, grid: gn} + +# Note: for some observational datasets, we use preset time ranges due to +# their limited temporal availability +timerange_for_models: &time_period + timerange: '2000/2014' # can be specified, this is just an example + + +preprocessors: + + pp_global: &global_settings + area_statistics: + operator: mean + climate_statistics: + period: month + + pp_SEPacific: + <<: *global_settings + extract_region: + start_longitude: 265 + end_longitude: 275 + start_latitude: -25 + end_latitude: -5 + mask_landsea: + mask_out: land + + pp_SouthernOcean: + <<: *global_settings + extract_region: + start_longitude: 0 + end_longitude: 360 + start_latitude: -65 + end_latitude: -30 + mask_landsea: + mask_out: land + + pp_StormTracks: + <<: *global_settings + extract_region: + start_longitude: 0 + end_longitude: 360 + start_latitude: 45 + end_latitude: 60 + + +diagnostics: + + anncyc: + description: Plot annual cycles including reference datasets. + variables: + clt_global: &clt_settings + <<: *time_period + preprocessor: pp_global + short_name: clt + mip: Amon + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, version: AVHRR-AMPM-fv3.0, tier: 2} + clt_tropics: + <<: *clt_settings + clt_sepacific: + <<: *clt_settings + preprocessor: pp_SEPacific + clt_southerocean: + <<: *clt_settings + preprocessor: pp_SouthernOcean + clt_stormtracks: + <<: *clt_settings + preprocessor: pp_StormTracks + clivi_global: &clivi_settings + <<: *time_period + preprocessor: pp_global + short_name: clivi + mip: Amon + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, version: AVHRR-AMPM-fv3.0, tier: 2} + clivi_tropics: + <<: *clivi_settings + clivi_sepacific: + <<: *clivi_settings + preprocessor: pp_SEPacific + clivi_southerocean: + <<: *clivi_settings + preprocessor: pp_SouthernOcean + clivi_stormtracks: + <<: *clivi_settings + preprocessor: pp_StormTracks + lwp_global: &lwp_settings + <<: *time_period + preprocessor: pp_global + short_name: lwp + derive: true + mip: Amon + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, version: AVHRR-AMPM-fv3.0, tier: 2} + lwp_tropics: + <<: *lwp_settings + lwp_sepacific: + <<: *lwp_settings + preprocessor: pp_SEPacific + lwp_southerocean: + <<: *lwp_settings + preprocessor: pp_SouthernOcean + lwp_stormtracks: + <<: *lwp_settings + preprocessor: pp_StormTracks + swcre_global: &swcre_settings + <<: *time_period + preprocessor: pp_global + short_name: swcre + derive: true + mip: Amon + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, tier: 2} + swcre_tropics: + <<: *swcre_settings + swcre_sepacific: + <<: *swcre_settings + preprocessor: pp_SEPacific + swcre_southerocean: + <<: *swcre_settings + preprocessor: pp_SouthernOcean + swcre_stormtracks: + <<: *swcre_settings + preprocessor: pp_StormTracks + lwcre_global: &lwcre_settings + <<: *time_period + preprocessor: pp_global + short_name: lwcre + derive: true + mip: Amon + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, tier: 2} + lwcre_tropics: + <<: *lwcre_settings + lwcre_sepacific: + <<: *lwcre_settings + preprocessor: pp_SEPacific + lwcre_southerocean: + <<: *lwcre_settings + preprocessor: pp_SouthernOcean + lwcre_stormtracks: + <<: *lwcre_settings + preprocessor: pp_StormTracks + scripts: + allplots: + script: monitor/multi_datasets.py + plot_folder: '{plot_dir}' + plot_filename: '{plot_type}_{real_name}_{mip}' + group_variables_by: variable_group + plots: + annual_cycle: + legend_kwargs: + loc: upper right + plot_kwargs: + MPI-ESM1-2-HR: + color: C0 + MPI-ESM1-2-LR: + color: C1 + ESACCI-CLOUD: + color: black + pyplot_kwargs: + title: '{short_name}' diff --git a/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_precip_zonal.yml b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_precip_zonal.yml new file mode 100644 index 0000000000..97802deb47 --- /dev/null +++ b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_precip_zonal.yml @@ -0,0 +1,72 @@ +# ESMValTool +--- +documentation: + title: Model evaluation with focus on precipitation. + description: > + Plot zonal mean precipitation. + authors: + - lauer_axel + - schlund_manuel + maintainer: + - lauer_axel + + +# Note: the following models are just examples +datasets: + - {project: CMIP6, dataset: MPI-ESM1-2-HR, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, dataset: MPI-ESM1-2-LR, exp: historical, ensemble: r1i1p1f1, grid: gn} + +# Note: for some observational datasets, we use preset time ranges due to +# their limited temporal availability +timerange_for_models: &time_period + timerange: '2000/2014' # can be specified, this is just an example + + +preprocessors: + + pp_zonal: + regrid: + target_grid: 2x2 + scheme: + reference: esmf_regrid.schemes:ESMFAreaWeighted + zonal_statistics: + operator: mean + climate_statistics: + operator: mean + period: full + convert_units: + units: mm day-1 + + +diagnostics: + + zonal: + description: Plot annual cycles including reference datasets. + variables: + pr: + <<: *time_period + preprocessor: pp_zonal + mip: Amon + additional_datasets: + - {dataset: ERA5, project: native6, type: reanaly, version: 'v1', tier: 3} + - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.3, tier: 1} + scripts: + allplots: + script: monitor/multi_datasets.py + plot_folder: '{plot_dir}' + plot_filename: '{plot_type}_{real_name}_{mip}' + group_variables_by: variable_group + plots: + variable_vs_lat: + legend_kwargs: + loc: upper right + plot_kwargs: + MPI-ESM1-2-HR: + color: C0 + MPI-ESM1-2-LR: + color: C1 + ERA5: + color: black + linestyle: dotted + GPCP-SG: + color: black diff --git a/esmvaltool/recipes/monitor/recipe_monitor.yml b/esmvaltool/recipes/monitor/recipe_monitor.yml new file mode 100644 index 0000000000..a37f186583 --- /dev/null +++ b/esmvaltool/recipes/monitor/recipe_monitor.yml @@ -0,0 +1,356 @@ +# ESMValTool +--- +documentation: + title: Model Monitoring + description: | + This a recipe to monitor experiments by automatizing the creation of plots + from raw preprocessor output. + authors: + - vegas-regidor_javier + maintainer: + - loosveldt-tomas_saskia + + +datasets: + - {project: CMIP6, dataset: EC-Earth3, exp: historical, ensemble: r1i1p1f1, start_year: 1850, end_year: 2014} + +preprocessors: + timeseries_regular: + area_statistics: + operator: mean + + climatology: + climate_statistics: + period: month + + climatology_pr: + climate_statistics: + period: month + convert_units: + units: kg m-2 day-1 + + climatology_500hPa: + extract_levels: + levels: 50000 + scheme: linear + coordinate: air_pressure + climate_statistics: + period: month + + climatology_200hPa: + extract_levels: + levels: 20000 + scheme: linear + coordinate: air_pressure + climate_statistics: + period: month + + nao_djf: + extract_region: + start_longitude: -90. + end_longitude: 60. + start_latitude: 20. + end_latitude: 87.5 + extract_season: + season: djf + seasonal_statistics: + operator: mean + + sam_jja: + extract_region: + start_longitude: 0. + end_longitude: 360. + start_latitude: -90. + end_latitude: -50 + extract_season: + season: djf + seasonal_statistics: + operator: mean + + nino34: &index + extract_region: + start_longitude: -170. + end_longitude: -120 + start_latitude: -5. + end_latitude: 5. + mask_landsea: + mask_out: land + area_statistics: + operator: mean + + nino3: + <<: *index + extract_region: + start_longitude: -150. + end_longitude: -90 + start_latitude: -5. + end_latitude: 5. + + mlotstnorth: + <<: *index + extract_region: + start_longitude: 0. + end_longitude: 360 + start_latitude: 50. + end_latitude: 90. + + mlotstsouth: + <<: *index + extract_region: + start_longitude: 0. + end_longitude: 360 + start_latitude: -90. + end_latitude: -40. + + clim_aso: + extract_season: + season: son + climate_statistics: + period: month + + clim_fma: + extract_season: + season: mam + climate_statistics: + period: month + +diagnostics: + plot_timeseries_annual_cycle: + description: "Plot time series and annualcycles" + variables: + tas: + mip: Amon + preprocessor: timeseries_regular + grid: gr + scripts: + plot: &plot_default + script: monitor/monitor.py + # plot_folder: ~/plots/{expid}/{modeling_realm}/{real_name} + # cartopy_data_dir: /esarchive/scratch/Earth/jvegas/cartopy + plots: + timeseries: {} + annual_cycle: {} + + plot_timeseries: + description: "Plot time series" + variables: + nino3: + plot_name: 'Niño 3 index' + short_name: tos + mip: Omon + preprocessor: nino3 + grid: gn + nino34: + plot_name: 'Niño 3.4 index' + short_name: tos + mip: Omon + preprocessor: nino34 + grid: gn + scripts: + plot: + <<: *plot_default + plots: + timeseries: {} + + plot_annual_cycle: + description: "Plot annual cycles" + variables: + mlotst-north: + short_name: mlotst + mip: Omon + preprocessor: mlotstnorth + grid: gn + plot_name: Mixed layer depth average above 50ºN + mlotst-south: + short_name: mlotst + mip: Omon + preprocessor: mlotstsouth + grid: gn + plot_name: Mixed layer depth average below 40ºS + scripts: + plot: + <<: *plot_default + plots: + annual_cycle: {} + + global_climatologies: + description: "Plot map data" + variables: + tas: + mip: Amon + preprocessor: climatology + grid: gr + ps: + mip: Amon + preprocessor: climatology + grid: gr + rsns: + derive: true + mip: Amon + preprocessor: climatology + grid: gr + rlns: + derive: true + mip: Amon + preprocessor: climatology + grid: gr + hfss: + mip: Amon + preprocessor: climatology + grid: gr + hfls: + mip: Amon + preprocessor: climatology + grid: gr + vas: + mip: Amon + preprocessor: climatology + grid: gr + pr: + mip: Amon + preprocessor: climatology_pr + grid: gr + evspsbl: + mip: Amon + preprocessor: climatology + grid: gr + ua200: + short_name: ua + mip: Amon + preprocessor: climatology_200hPa + grid: gr + plot_name: Eastward Wind at 200 hPa + ua500: + short_name: ua + mip: Amon + preprocessor: climatology_500hPa + grid: gr + plot_name: Eastward Wind at 500 hPa + zg200: + short_name: zg + mip: Amon + preprocessor: climatology_200hPa + grid: gr + plot_name: Geopotential height at 200 hPa + zg500: + short_name: zg + mip: Amon + preprocessor: climatology_500hPa + grid: gr + plot_name: Geopotential height at 500 hPa + tos: + mip: Omon + preprocessor: climatology + grid: gn + zos: + mip: Omon + preprocessor: climatology + grid: gn + sos: + mip: Omon + preprocessor: climatology + grid: gn + scripts: + plot: + <<: *plot_default + plots: + clim: {} + seasonclim: {} + monclim: {} + + polar_climatology: + description: "Plot climatologies on both poles" + variables: + tas: + mip: Amon + preprocessor: climatology + grid: gr + pr: + mip: Amon + preprocessor: climatology_pr + grid: gr + tos: + mip: Omon + preprocessor: climatology + grid: gn + sos: + mip: Omon + grid: gn + preprocessor: climatology + scripts: + plot: + <<: *plot_default + plots: + clim: + maps: [arctic, antarctic] + + eofs: + description: Compute eofs + variables: + nao: + short_name: psl + mip: Amon + grid: gr + preprocessor: nao_djf + eof_name: NAO as first EOF in DJF + pc_name: NAO index as first PC in DJF + sam: + short_name: psl + mip: Amon + grid: gr + preprocessor: sam_jja + eof_name: SAM as first EOF in JJA + pc_name: SAM index as first PC in JJA + scripts: + eof: + <<: *plot_default + script: monitor/compute_eofs.py + + polar_monthly_climatology: + description: "Plot monthly climatologies on both poles" + variables: + siconc: + mip: SImon + preprocessor: climatology + grid: gn + + scripts: + plot: + <<: *plot_default + plots: + monclim: + maps: [arctic, antarctic] + months: [3, 9] + rows: 1 + columns: 2 + plot_size: [5., 4.] + + nh_clims: + description: "Plot climatologies" + variables: + mltost-fma: + short_name: mlotst + mip: Omon + preprocessor: clim_fma + grid: gn + scripts: + plot: + <<: *plot_default + plots: + clim: + maps: [north,] + + sh_clims: + description: "Plot climatologies" + variables: + mltost-aso: + short_name: mlotst + mip: Omon + preprocessor: clim_aso + grid: gn + scripts: + plot: + <<: *plot_default + plots: + clim: + maps: [south,] diff --git a/esmvaltool/recipes/monitor/recipe_monitor_with_refs.yml b/esmvaltool/recipes/monitor/recipe_monitor_with_refs.yml new file mode 100644 index 0000000000..4277313428 --- /dev/null +++ b/esmvaltool/recipes/monitor/recipe_monitor_with_refs.yml @@ -0,0 +1,259 @@ +# ESMValTool +--- +documentation: + title: Example recipe for model monitoring with reference datasets. + description: | + Show plots that include multiple datasets that can be used to monitor + (ongoing) model simulations. + authors: + - schlund_manuel + - heuer_helge + - kraft_jeremy + - kuehbacher_birgit + - lindenlaub_lukas + - sarauer_ellen + - winterstein_franziska + maintainer: + - schlund_manuel + + +datasets: + # Note: plot_label currently only used by diagnostic plot_multiple_annual_cycles + - {project: CMIP6, dataset: MPI-ESM1-2-HR, exp: historical, ensemble: r1i1p1f1, grid: gn, plot_label: 'MPI-ESM1-2-HR historical'} + - {project: CMIP6, dataset: MPI-ESM1-2-LR, exp: historical, ensemble: r1i1p1f1, grid: gn, plot_label: 'Reference (MPI-ESM1-2-LR historical)', reference_for_monitor_diags: true} + + +preprocessors: + + timeseries_regular: + area_statistics: + operator: mean + + annual_cycle_nh: + extract_region: + start_latitude: 0 + end_latitude: 90 + start_longitude: 0 + end_longitude: 360 + area_statistics: + operator: mean + climate_statistics: + period: monthly + + full_climatology: + climate_statistics: + period: full + regrid: + scheme: linear + target_grid: 2x2 + + zonal_mean: + custom_order: true + climate_statistics: + period: full + regrid: + scheme: linear + target_grid: 2x2 + extract_levels: + levels: {cmor_table: CMIP6, coordinate: plev39} + scheme: linear + coordinate: air_pressure + zonal_statistics: + operator: mean + + extract_1d_profile: + custom_order: true + climate_statistics: + period: full + area_statistics: + operator: mean + extract_levels: + levels: {cmor_table: CMIP6, coordinate: plev39} + scheme: linear + coordinate: air_pressure + + var_vs_lat: + climate_statistics: + operator: mean + regrid: + target_grid: 2x2 + scheme: linear + zonal_statistics: + operator: mean + convert_units: + units: mm day-1 + + global_mean_extract_levels: + custom_order: true + extract_levels: + levels: {cmor_table: CMIP6, coordinate: alt16} + scheme: linear + coordinate: altitude + regrid: + target_grid: 2x2 + scheme: linear + area_statistics: + operator: mean + + zonal_mean_2d: + regrid: + target_grid: 2x2 + scheme: linear + zonal_statistics: + operator: mean + + +diagnostics: + + plot_multiple_timeseries: + description: Plot time series including reference datasets. + variables: + tas: + mip: Amon + preprocessor: timeseries_regular + timerange: '2000/2014' + scripts: + plot: &plot_multi_dataset_default + script: monitor/multi_datasets.py + plot_folder: '{plot_dir}' + plot_filename: '{plot_type}_{real_name}_{dataset}_{mip}' + plots: + timeseries: + annual_mean_kwargs: + linestyle: '--' + plot_kwargs: + MPI-ESM1-2-HR: # = dataset since 'facet_used_for_labels' is 'dataset' by default + color: C0 + MPI-ESM1-2-LR: + color: black + + plot_multiple_annual_cycles: + description: Plot annual cycles including reference datasets. + variables: + tas: + mip: Amon + preprocessor: annual_cycle_nh + timerange: '2000/2014' + scripts: + plot: + <<: *plot_multi_dataset_default + facet_used_for_labels: plot_label + plots: + annual_cycle: + legend_kwargs: + loc: upper right + plot_kwargs: + 'MPI-ESM1-2-HR historical': # = plot_label since 'facet_used_for_labels: plot_label' + color: C0 + 'Reference (MPI-ESM1-2-LR historical)': + color: black + pyplot_kwargs: + title: Near-Surface Air Temperature on Northern Hemisphere + ylim: [280, 297] + + plot_maps_with_references: + description: Plot climatology maps including reference datasets. + variables: + tas: + mip: Amon + preprocessor: full_climatology + timerange: '2000/2014' + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + map: + common_cbar: true + plot_kwargs_bias: + levels: [-10.0, -7.5, -5.0, -2.5, 0.0, 2.5, 5.0, 7.5, 10.0] + + plot_zonal_mean_profiles_with_references: + description: Plot 2D zonal mean profiles including reference datasets. + variables: + ta: + mip: Amon + preprocessor: zonal_mean + timerange: '2000/2014' + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + zonal_mean_profile: + common_cbar: true + plot_kwargs_bias: + levels: [-10.0, -7.5, -5.0, -2.5, 0.0, 2.5, 5.0, 7.5, 10.0] + + + plot_1D_profiles_with_references: + description: Plot 1D profiles including reference datasets. + variables: + ta: + mip: Amon + preprocessor: extract_1d_profile + timerange: '2000/2014' + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + 1d_profile: + plot_kwargs: + MPI-ESM1-2-HR: # = dataset since 'facet_used_for_labels' is 'dataset' by default + color: C0 + MPI-ESM1-2-LR: + color: black + + plot_variable_vs_latitude: + description: Creates a single-panel variable plot over latitude. + variables: + pr: + preprocessor: var_vs_lat + mip: Amon + timerange: '20000101/20030101' + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + variable_vs_lat: + + plot_hovmoeller_z_vs_time: + description: Plot Hovmoeller Z vs. time including reference datasets. + variables: + ta: + preprocessor: global_mean_extract_levels + mip: Amon + timerange: '2000/2005' + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + hovmoeller_z_vs_time: + plot_func: contourf + common_cbar: true + time_format: '%Y' + log_y: false + pyplot_kwargs: + ylim: [0, 20000] + xticks: + rotation: 25 + + plot_time_vs_lat_with_references: + description: Plot Hovmoeller time vs. latitude including reference datasets. + variables: + tas: + mip: Amon + preprocessor: zonal_mean_2d + timerange: '2000/2005' + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + hovmoeller_time_vs_lat_or_lon: + common_cbar: true + show_x_minor_ticks: false + time_format: '%Y' diff --git a/esmvaltool/recipes/mpqb/recipe_mpqb_xch4.yml b/esmvaltool/recipes/mpqb/recipe_mpqb_xch4.yml new file mode 100644 index 0000000000..cdd61087b4 --- /dev/null +++ b/esmvaltool/recipes/mpqb/recipe_mpqb_xch4.yml @@ -0,0 +1,303 @@ +# ESMValTool +# recipe_mpqb_xch4.yml +--- +documentation: + title: Methane evaluation with ESA CMUG data + description: | + Recipe for the comparison of ESA XCH4 data with CMIP6 models + + authors: + - hassler_birgit + - mueller_benjamin + + maintainer: + - unmaintained + + +################################################ +# Define some default parameters using anchors # +################################################ + +nh_region: &nh_region + extract_region: + start_longitude: 0 + end_longitude: 360 + start_latitude: 30 + end_latitude: 90 + +sh_region: &sh_region + extract_region: + start_longitude: 0 + end_longitude: 360 + start_latitude: -90 + end_latitude: -30 + +mpqbregridder: &mpqbregridder + regrid: + target_grid: 5x5 + scheme: area_weighted + +global_mon: &global_mon + custom_order: true + <<: *mpqbregridder + mask_multimodel: + area_statistics: + operator: mean + monthly_statistics: + operator: mean + +global_ann: &global_ann + custom_order: true + <<: *mpqbregridder + mask_multimodel: + area_statistics: + operator: mean + annual_statistics: + operator: mean + +sh_mon: &sh_mon + custom_order: true + <<: *mpqbregridder + <<: *sh_region + mask_multimodel: + area_statistics: + operator: mean + monthly_statistics: + operator: mean + +sh_ann: &sh_ann + custom_order: true + <<: *mpqbregridder + <<: *sh_region + mask_multimodel: + area_statistics: + operator: mean + annual_statistics: + operator: mean + +nh_mon: &nh_mon + custom_order: true + <<: *mpqbregridder + <<: *nh_region + mask_multimodel: + area_statistics: + operator: mean + monthly_statistics: + operator: mean + +nh_ann: &nh_ann + custom_order: true + <<: *mpqbregridder + <<: *nh_region + mask_multimodel: + area_statistics: + operator: mean + annual_statistics: + operator: mean + + +# Datasets +################################################ + +datasets: +### observations + - {dataset: CDS-XCH4, project: OBS, type: sat, version: L3, tier: 3, + derive: false, alias: CDS-XCH4} +### CMIP6 (historical) + - {dataset: BCC-CSM2-MR, project: CMIP6, ensemble: r1i1p1f1, grid: gn, + exp: historical, derive: true, alias: BCC-CSM2-MR} + - {dataset: CNRM-ESM2-1, project: CMIP6, ensemble: r1i1p1f2, grid: gr, + exp: historical, derive: true, alias: CNRM-ESM2-1} + - {dataset: UKESM1-0-LL, project: CMIP6, ensemble: r1i1p1f2, grid: gn, + exp: historical, derive: true, alias: UKESM1-0-LL} + - {dataset: BCC-ESM1, project: CMIP6, ensemble: r1i1p1f1, grid: gn, + exp: historical, derive: true, alias: BCC-ESM1} + - {dataset: CESM2-WACCM, project: CMIP6, ensemble: r1i1p1f1, grid: gn, + exp: historical, derive: true, alias: CESM2-WACCM} + - {dataset: CESM2-WACCM-FV2, project: CMIP6, ensemble: r1i1p1f1, grid: gn, + exp: historical, derive: true, alias: CESM2-WACCM-FV2} + - {dataset: GFDL-ESM4, project: CMIP6, ensemble: r1i1p1f1, grid: gr1, + exp: historical, derive: true, alias: GFDL-ESM4} +### CMIP6 (ssp585) + # - {dataset: BCC-CSM2-MR, project: CMIP6, ensemble: r1i1p1f1, grid: gn, + # exp: ssp585, derive: true, alias: BCC-CSM2-MR} + # - {dataset: CNRM-ESM2-1, project: CMIP6, ensemble: r1i1p1f2, grid: gr, + # exp: ssp585, derive: true, alias: CNRM-ESM2-1} + # - {dataset: UKESM1-0-LL, project: CMIP6, ensemble: r1i1p1f2, grid: gn, + # exp: ssp585, derive: true, alias: UKESM1-0-LL} + # - {dataset: CESM2-WACCM, project: CMIP6, ensemble: r1i1p1f1, grid: gn, + # exp: ssp585, derive: true, alias: CESM2-WACCM} + # - {dataset: GFDL-ESM4, project: CMIP6, ensemble: r1i1p1f1, grid: gr1, + # exp: ssp585, derive: true, alias: GFDL-ESM4} +### CMIP6 (ssp245) + # - {dataset: BCC-CSM2-MR, project: CMIP6, ensemble: r1i1p1f1, grid: gn, + # exp: ssp245, derive: true, alias: BCC-CSM2-MR} + # - {dataset: CNRM-ESM2-1, project: CMIP6, ensemble: r1i1p1f2, grid: gr, + # exp: ssp245, derive: true, alias: CNRM-ESM2-1} + # - {dataset: UKESM1-0-LL, project: CMIP6, ensemble: r1i1p1f2, grid: gn, + # exp: ssp245, derive: true, alias: UKESM1-0-LL} + # - {dataset: CESM2-WACCM, project: CMIP6, ensemble: r1i1p1f1, grid: gn, + # exp: ssp245, derive: true, alias: CESM2-WACCM} + # - {dataset: GFDL-ESM4, project: CMIP6, ensemble: r1i1p1f1, grid: gr1, + # exp: ssp245, derive: true, alias: GFDL-ESM4} + +preprocessors: + pp_lineplots_gl_mon: &pp_lineplots_gl_mon + custom_order: true + convert_units: + units: ppbv + <<: *global_mon + + pp_lineplots_gl_ann: + custom_order: true + convert_units: + units: ppbv + <<: *global_ann + + pp_lineplots_nh_mon: &pp_lineplots_nh_mon + custom_order: true + convert_units: + units: ppbv + <<: *nh_mon + + pp_lineplots_nh_ann: + custom_order: true + convert_units: + units: ppbv + <<: *nh_ann + + pp_lineplots_sh_mon: &pp_lineplots_sh_mon + custom_order: true + convert_units: + units: ppbv + <<: *sh_mon + + pp_lineplots_sh_ann: + custom_order: true + convert_units: + units: ppbv + <<: *sh_ann + + pp_lineplots_anncyc_gl: + custom_order: true + <<: *pp_lineplots_gl_mon + climate_statistics: + operator: mean + period: month + + pp_lineplots_anncyc_nh: + custom_order: true + <<: *pp_lineplots_nh_mon + climate_statistics: + operator: mean + period: month + + pp_lineplots_anncyc_sh: + custom_order: true + <<: *pp_lineplots_sh_mon + climate_statistics: + operator: mean + period: month + + +xch4_def_cmip6: &xch4def_cmip6 + mip: Amon + start_year: 2003 + end_year: 2014 + +xch4_def_future: &xch4def_fut + mip: Amon + start_year: 2016 + end_year: 2099 + + +diagnostics: + ### global analyses + lineplots_gl_monmean: + description: lineplot monthly means + variables: + xch4: + preprocessor: pp_lineplots_gl_mon + <<: *xch4def_cmip6 + scripts: + lineplot_gl_monmean: + script: mpqb/mpqb_lineplot.py + lineplots_gl_gr: + description: lineplot growth rate global + variables: + xch4: + preprocessor: pp_lineplots_gl_ann + <<: *xch4def_cmip6 + scripts: + lineplot_gl_growthrate: + script: mpqb/mpqb_lineplot_growthrate.py + lineplots_anncyc_gl: + description: lineplot annual cycle + variables: + xch4: + preprocessor: pp_lineplots_anncyc_gl + derive: true + <<: *xch4def_cmip6 + scripts: + lineplot_gl_anncyc: + script: mpqb/mpqb_lineplot_anncyc.py + + ### NH analyses + lineplots_nh_monmean: + description: lineplot monthly means for the NH + variables: + xch4: + preprocessor: pp_lineplots_nh_mon + <<: *xch4def_cmip6 + scripts: + lineplot_nh_monmean: + script: mpqb/mpqb_lineplot.py + lineplots_nh_gr: + description: lineplot growth rate for the NH + variables: + xch4: + preprocessor: pp_lineplots_nh_ann + <<: *xch4def_cmip6 + scripts: + lineplot_nh_growthrate: + script: mpqb/mpqb_lineplot_growthrate.py + lineplots_anncyc_nh: + description: lineplot annual cycle for the NH + variables: + xch4: + preprocessor: pp_lineplots_anncyc_nh + derive: true + <<: *xch4def_cmip6 + scripts: + lineplot_nh_anncyc: + script: mpqb/mpqb_lineplot_anncyc.py + + ### SH analyses + lineplots_sh_monmean: + description: lineplot monthly means for the SH + variables: + xch4: + preprocessor: pp_lineplots_sh_mon + <<: *xch4def_cmip6 + scripts: + lineplot_sh_monmean: + script: mpqb/mpqb_lineplot.py + lineplots_sh_gr: + description: lineplot growth rate for the SH + variables: + xch4: + preprocessor: pp_lineplots_sh_ann + <<: *xch4def_cmip6 + scripts: + lineplot_sh_growthrate: + script: mpqb/mpqb_lineplot_growthrate.py + lineplots_anncyc_sh: + description: lineplot annual cycle for the SH + variables: + xch4: + preprocessor: pp_lineplots_anncyc_sh + derive: true + <<: *xch4def_cmip6 + scripts: + lineplot_sh_anncyc: + script: mpqb/mpqb_lineplot_anncyc.py diff --git a/esmvaltool/recipes/recipe_albedolandcover.yml b/esmvaltool/recipes/recipe_albedolandcover.yml new file mode 100644 index 0000000000..afd9bf2031 --- /dev/null +++ b/esmvaltool/recipes/recipe_albedolandcover.yml @@ -0,0 +1,154 @@ +# ESMValTool +# recipe_albedolandcover.yml +--- +documentation: + title: Landcover Albedo Relationship + description: | + This recipe analyzes the relationship between landcover and albedo + in CMIP models + + authors: + - crezee_bas + - lejeune_quentin + + maintainer: + - unmaintained + + +albedolandcover_parameters: &albedolandcover_parameters + params: + # + # size of the big box in the latitudinal direction. + # Recommended value: 5 + latsize_BB: 5 + # + # size of the big box in the longitudinal direction. + # Recommended value: 5 + lonsize_BB: 5 + # + # minimum grid cell area fraction that must be + # covered by the sum of the area fractions of all + # land cover classes included in the local + # regression for one grid cell, for this grid cell + # to be included in the regression. + # Recommended value: 90 + threshold_sumpred: 90 + # + # minimum number of grid cells with non-zero area fraction + # covered by one land cover class within a big box, for this + # land cover class to be included as a predictor in the local + # regression. + # Recommended value: 2 + mingc: 2 + # + # minimum number of grid cells with relevant + # information required within a big box to perform + # the local regression. + # Recommended value: 15 + minnum_gc_bb: 15 + snowfree: true + lc1_class: ['treeFrac'] + lc2_class: ['shrubFrac'] + lc3_class: ['grassFrac', 'cropFrac', 'pastureFrac'] + + +CMIP6_landcover: &CMIP6_landcover + additional_datasets: + - {dataset: HadGEM3-GC31-LL, project: CMIP6, grid: gn, exp: historical, + ensemble: r1i1p1f3} + +CMIP5_landcover: &CMIP5_landcover + additional_datasets: + - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} + +preprocessors: + pp_cmip: + custom_order: true + regrid: + target_grid: 2x2 + scheme: linear + extract_month: + month: 7 + climate_statistics: + operator: mean + mask_landsea: + mask_out: sea + pp_obs: + mask_landsea: + mask_out: sea + extract_month: + month: 7 + + +diagnostics: + albedolandcover_cmip5: + description: "Multiple linear regression between albedo and xxFrac" + variables: + treeFrac: &variable_settings_cmip5 + preprocessor: pp_cmip + mip: Lmon + exp: historical + start_year: 2000 + end_year: 2004 + <<: *CMIP5_landcover + snc: + <<: *variable_settings_cmip5 + mip: LImon + alb: + <<: *variable_settings_cmip5 + mip: Amon + derive: true + force_derivation: false + cropFrac: + <<: *variable_settings_cmip5 + grassFrac: + <<: *variable_settings_cmip5 + shrubFrac: + <<: *variable_settings_cmip5 + pastureFrac: + <<: *variable_settings_cmip5 + scripts: + albedolandcover: + script: landcover/albedolandcover.py + <<: *albedolandcover_parameters + + albedolandcover_cmip6: + description: "Multiple linear regression between albedo and xxFrac" + variables: + treeFrac: &variable_settings_cmip6 + preprocessor: pp_cmip + mip: Lmon + exp: historical + start_year: 2000 + end_year: 2004 + <<: *CMIP6_landcover + snc: + <<: *variable_settings_cmip6 + mip: LImon + alb: + <<: *variable_settings_cmip6 + mip: Amon + derive: true + force_derivation: false + grassFrac: + <<: *variable_settings_cmip6 + shrubFrac: + <<: *variable_settings_cmip6 + scripts: + albedolandcover: + script: landcover/albedolandcover.py + <<: *albedolandcover_parameters + + albedolandcover_obs: + description: "Multiple linear regression between albedo and xxFrac" + variables: + albDiffiTr13: + preprocessor: pp_obs + mip: Amon + additional_datasets: + - {dataset: Duveiller2018, project: OBS, tier: 2, version: v2018, + start_year: 2010, end_year: 2010, frequency: mon, type: clim} + scripts: + albedolandcover: + script: landcover/albedolandcover.py + <<: *albedolandcover_parameters diff --git a/esmvaltool/recipes/recipe_anav13jclim.yml b/esmvaltool/recipes/recipe_anav13jclim.yml new file mode 100644 index 0000000000..08559c2691 --- /dev/null +++ b/esmvaltool/recipes/recipe_anav13jclim.yml @@ -0,0 +1,895 @@ +# ESMValTool +# recipe_anav13jclim.xml +--- +documentation: + title: Land and ocean components of the global carbon cycle in CMIP5 + + description: | + This recipe reproduces most of the figures of Anav et al. (2013). + + authors: + - anav_alessandro + - wenzel_sabrina + - schlund_manuel + - righi_mattia + + maintainer: + - gier_bettina + + references: + - anav13jclim + + projects: + - embrace + - crescendo + + +preprocessors: + + regridding: ®ridding + regrid: + target_grid: 2x2 + scheme: linear + + landmask: &landmask + mask_landsea: + mask_out: sea + + landmask_and_regrid: + <<: *regridding + <<: *landmask + + land_fraction_weighting: &land_fraction_weighting + weighting_landsea_fraction: &weighting_options + area_type: land + exclude: [ + 'bcc-csm1-1-m', + 'GCP2018', + 'inmcm4', + 'JMA-TRANSCOM', + 'LAI3g', + 'MTE', + 'NDP', + ] + + sea_fraction_weighting: &sea_fraction_weighting + weighting_landsea_fraction: + <<: *weighting_options + area_type: sea + + land_fraction_weighting_and_regrid: + <<: *regridding + <<: *land_fraction_weighting + + sea_fraction_weighting_and_regrid: + <<: *regridding + <<: *sea_fraction_weighting + + grading: + regrid: + target_grid: ref_dataset + regrid_scheme: linear + mask_fillvalues: true + + regrid_to_ref: ®rid_to_ref + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + + land_fraction_weighting_and_regrid_to_ref: + <<: *land_fraction_weighting + <<: *regrid_to_ref + + +diagnostics: + + diag_mvi_tas: + description: MVI scatter plot for tas (Figure 1). + themes: + - phys + realms: + - atmos + variables: + tas: &var_tas + preprocessor: landmask_and_regrid + project: CMIP5 + mip: Amon + exp: historical + ensemble: r1i1p1 + start_year: 1901 + end_year: 2005 + reference_dataset: CRU + plot_units: degC + additional_datasets: &tas_datasets + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: NorESM1-ME} + scripts: + mvi_global: &mvi_global + script: carbon_cycle/mvi.ncl + sort: true + styleset: CMIP5 + region: global + mean_time_range: [1986, 2005] + mvi_time_range: [1986, 2005] + mvi_nh: &mvi_nh + <<: *mvi_global + region: nh + mvi_sh: &mvi_sh + <<: *mvi_global + region: sh + mvi_trop: &mvi_trop + <<: *mvi_global + region: trop + + diag_main_tas: + description: Error bar, sasonal cycle and evolution plots for tas (Figure 1). + themes: + - phys + realms: + - atmos + variables: + tas: + <<: *var_tas + preprocessor: landmask + additional_datasets: *tas_datasets + scripts: + main_global: &main_global + script: carbon_cycle/main.ncl + legend_outside: false + sort: true + seasonal_cycle_plot: true + errorbar_plot: true + mean_IAV_plot: true + styleset: CMIP5 + region: global + evolution_plot: true + evolution_plot_volcanoes: true + evolution_plot_ref_dataset: CRU + evolution_plot_anomaly: true + ensemble_name: CMIP5 + main_nh: &main_nh + <<: *main_global + region: nh + main_sh: &main_sh + <<: *main_global + region: sh + main_trop: &main_trop + <<: *main_global + region: trop + + diag_mvi_pr: + description: MVI scatter plot for pr (Figure 2). + themes: + - phys + realms: + - atmos + variables: + pr: &var_pr + preprocessor: landmask_and_regrid + project: CMIP5 + mip: Amon + exp: historical + ensemble: r1i1p1 + start_year: 1901 + end_year: 2005 + reference_dataset: CRU + plot_units: mm yr-1 + additional_datasets: *tas_datasets + scripts: + mvi_global: + <<: *mvi_global + mvi_nh: + <<: *mvi_nh + mvi_sh: + <<: *mvi_sh + mvi_trop: + <<: *mvi_trop + + diag_main_pr: + description: Error bar, sasonal cycle and evolution plots for pr (Figure 2). + themes: + - phys + realms: + - atmos + variables: + pr: + <<: *var_pr + preprocessor: landmask + additional_datasets: *tas_datasets + scripts: + main_global: + <<: *main_global + main_nh: + <<: *main_nh + main_sh: + <<: *main_sh + main_trop: + <<: *main_trop + + diag_mvi_tos: + description: MVI scatter plot for tos (Figure 3). + themes: + - phys + realms: + - ocean + variables: + tos: &var_tos + preprocessor: regridding + project: CMIP5 + mip: Omon + exp: historical + ensemble: r1i1p1 + start_year: 1901 + end_year: 2005 + reference_dataset: HadISST + plot_units: degC + additional_datasets: &tos_datasets + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: NorESM1-ME} + scripts: + mvi_global: + <<: *mvi_global + mvi_nh: + <<: *mvi_nh + mvi_sh: + <<: *mvi_sh + mvi_trop: + <<: *mvi_trop + + diag_main_tos: + description: Error bar, sasonal cycle and evolution plots for pr (Figure 3). + themes: + - phys + realms: + - ocean + variables: + tos: + <<: *var_tos + additional_datasets: *tos_datasets + scripts: + main_global: + <<: *main_global + evolution_plot_ref_dataset: HadISST + main_nh: + <<: *main_nh + evolution_plot_ref_dataset: HadISST + main_sh: + <<: *main_sh + evolution_plot_ref_dataset: HadISST + main_trop: + <<: *main_trop + evolution_plot_ref_dataset: HadISST + + diag_main_nbp_global: + description: Error bar, sasonal cycle and evolution plots for nbp (Figures 5, 6, 7). + themes: + - phys + realms: + - land + variables: + nbp: &var_nbp + preprocessor: land_fraction_weighting + project: CMIP5 + mip: Lmon + exp: historical + ensemble: r1i1p1 + start_year: 1901 + end_year: 2005 + reference_dataset: JMA-TRANSCOM + plot_units: PgC y-1 + additional_datasets: + - {dataset: GCP2018, project: OBS, type: reanaly, version: '1.0', tier: 2, start_year: 1959, end_year: 2017, frequency: yr} + additional_datasets: &nbp_datasets + - {dataset: JMA-TRANSCOM, project: OBS, type: reanaly, version: 2018, tier: 3, start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: NorESM1-ME} + scripts: + main_global: + <<: *main_global + evolution_plot_ref_dataset: GCP2018 + evolution_plot_anomaly: false + anav_month: true + + diag_main_nbp_not_global: + description: Error bar, sasonal cycle and evolution plots for nbp (Figures 5, 6, 7). + themes: + - phys + realms: + - land + variables: + nbp: + <<: *var_nbp + additional_datasets: *nbp_datasets + scripts: + main_nh: + <<: *main_nh + evolution_plot: false + anav_month: true + main_sh: + <<: *main_sh + evolution_plot: false + anav_month: true + main_trop: + <<: *main_trop + evolution_plot: false + anav_month: true + + diag_mvi_lai: + description: MVI scatter plot for lai (Figure 10). + themes: + - phys + realms: + - land + variables: + lai: &var_lai + preprocessor: land_fraction_weighting_and_regrid + project: CMIP5 + mip: Lmon + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + reference_dataset: LAI3g + additional_datasets: &lai_datasets + - {dataset: LAI3g, project: OBS, type: reanaly, version: 1_regridded, tier: 3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: NorESM1-ME} + scripts: + mvi_global: + <<: *mvi_global + mvi_nh: + <<: *mvi_nh + mvi_sh: + <<: *mvi_sh + mvi_trop: + <<: *mvi_trop + + diag_main_lai: + description: Error bar, sasonal cycle and evolution plots for lai (Figure 11). + themes: + - phys + realms: + - land + variables: + lai: + <<: *var_lai + preprocessor: land_fraction_weighting + additional_datasets: *lai_datasets + scripts: + main_global: + <<: *main_global + evolution_plot_ref_dataset: LAI3g + evolution_plot_anomaly: false + main_nh: + <<: *main_nh + evolution_plot_ref_dataset: LAI3g + evolution_plot_anomaly: false + main_sh: + <<: *main_sh + evolution_plot_ref_dataset: LAI3g + evolution_plot_anomaly: false + main_trop: + <<: *main_trop + evolution_plot_ref_dataset: LAI3g + evolution_plot_anomaly: false + + diag_mvi_gpp: + description: MVI scatter plot for gpp (Figure 8). + themes: + - phys + realms: + - land + variables: + gpp: &var_gpp + preprocessor: land_fraction_weighting_and_regrid + project: CMIP5 + mip: Lmon + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + reference_dataset: MTE + plot_units: PgC y-1 + additional_datasets: &gpp_datasets + - {dataset: MTE, project: OBS, type: reanaly, version: May12, tier: 3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: NorESM1-ME} + scripts: + mvi_global: + <<: *mvi_global + mask_below: 1e-2 + mvi_nh: + <<: *mvi_nh + mask_below: 1e-2 + mvi_sh: + <<: *mvi_sh + mask_below: 1e-2 + mvi_trop: + <<: *mvi_trop + mask_below: 1e-2 + + diag_main_gpp: + description: Error bar, sasonal cycle and evolution plots for gpp (Figure 9). + themes: + - phys + realms: + - land + variables: + gpp: + <<: *var_gpp + preprocessor: land_fraction_weighting + additional_datasets: *gpp_datasets + scripts: + main_global: + <<: *main_global + evolution_plot_ref_dataset: MTE + evolution_plot_anomaly: false + main_nh: + <<: *main_nh + evolution_plot_ref_dataset: MTE + evolution_plot_anomaly: false + main_sh: + <<: *main_sh + evolution_plot_ref_dataset: MTE + evolution_plot_anomaly: false + main_trop: + <<: *main_trop + evolution_plot_ref_dataset: MTE + evolution_plot_anomaly: false + + diag_two_vars_scatter: + description: Two-variable scatter plot for cSoil and cVeg (Figure 12). + themes: + - phys + realms: + - land + variables: + cVeg: + preprocessor: land_fraction_weighting + project: CMIP5 + mip: Lmon + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + reference_dataset: NDP + plot_units: PgC + additional_datasets: + - {dataset: NDP, project: OBS, type: reanaly, version: 017b, tier: 3, start_year: 2000, end_year: 2000} + cSoil: + preprocessor: land_fraction_weighting + project: CMIP5 + mip: Lmon + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + reference_dataset: HWSD + plot_units: PgC + additional_datasets: + - {dataset: HWSD, project: OBS, type: reanaly, version: 1.2, tier: 3, start_year: 2000, end_year: 2000, + supplementary_variables: [{short_name: sftlf, mip: fx}]} + additional_datasets: + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: NorESM1-ME} + scripts: + scat_global: &scat_glob + script: carbon_cycle/two_variables.ncl + legend_outside: false + styleset: CMIP5 + region: global + scat_nh: + <<: *scat_glob + region: nh + scat_sh: + <<: *scat_glob + region: sh + scat_trop: + <<: *scat_glob + region: trop + + diag_main_fgco2_global: + description: Error bar, sasonal cycle and evolution plots for fgco2 (Figure 13, 14, 15). + themes: + - phys + realms: + - ocean + variables: + fgco2: &var_fgco2 + preprocessor: sea_fraction_weighting_and_regrid + project: CMIP5 + mip: Omon + exp: historical + ensemble: r1i1p1 + start_year: 1901 + end_year: 2005 + reference_dataset: JMA-TRANSCOM + plot_units: PgC y-1 + additional_datasets: + - {dataset: JMA-TRANSCOM, project: OBS, type: reanaly, version: 2018, tier: 3, start_year: 1986, end_year: 2005} + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM, supplementary_variables: [{short_name: sftlf, mip: fx, ensemble: r0i0p0}, {short_name: sftof, skip: true}]} + - {dataset: MIROC-ESM-CHEM, supplementary_variables: [{short_name: sftlf, mip: fx, ensemble: r0i0p0}, {short_name: sftof, skip: true}]} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: NorESM1-ME} + fgco2_GCP2018: + <<: *var_fgco2 + short_name: fgco2 + preprocessor: default + additional_datasets: + - {dataset: GCP2018, project: OBS, type: reanaly, version: '1.0', tier: 2, start_year: 1959, end_year: 2017, frequency: yr} + scripts: + main_global: + <<: *main_global + evolution_plot_ref_dataset: GCP2018 + evolution_plot_anomaly: false + + diag_main_fgco2_not_global: + description: Error bar, sasonal cycle and evolution plots for fgco2 (Figure 13, 14, 15). + themes: + - phys + realms: + - ocean + variables: + fgco2: + <<: *var_fgco2 + scripts: + main_nh: + <<: *main_nh + evolution_plot: false + main_sh: + <<: *main_sh + evolution_plot: false + main_trop: + <<: *main_trop + evolution_plot: false + + diag_grading_tas: &diag_grading + description: Grading precalculations for tas. + themes: + - phys + realms: + - atmos + variables: + tas: + preprocessor: regrid_to_ref + project: CMIP5 + mip: Amon + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + reference_dataset: CRU + plot_units: degC + additional_datasets: *tas_datasets + scripts: + cycle_global: &cycle_global + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: monthlyclim + region: global + plot_stddev: all + legend_outside: false + styleset: CMIP5 + cycle_nh: &cycle_nh + <<: *cycle_global + region: nh + cycle_sh: &cycle_sh + <<: *cycle_global + region: sh + cycle_trop: &cycle_trop + <<: *cycle_global + region: trop + grading_global: &grading_global + <<: *cycle_global + draw_plots: true + calc_grading: true + metric: [RMSD] + normalization: [maximum] + grading_nh: &grading_nh + <<: *grading_global + region: nh + grading_sh: &grading_sh + <<: *grading_global + region: sh + grading_trop: &grading_trop + <<: *grading_global + region: trop + + diag_grading_pr: + <<: *diag_grading + description: Grading precalculations for pr. + variables: + pr: + preprocessor: regrid_to_ref + project: CMIP5 + mip: Amon + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + reference_dataset: CRU + plot_units: mm yr-1 + additional_datasets: *tas_datasets + scripts: + cycle_global: + <<: *cycle_global + cycle_nh: + <<: *cycle_nh + cycle_sh: + <<: *cycle_sh + cycle_trop: + <<: *cycle_trop + grading_global: + <<: *grading_global + grading_nh: + <<: *grading_nh + grading_sh: + <<: *grading_sh + grading_trop: + <<: *grading_trop + + diag_grading_nbp: + <<: *diag_grading + description: Grading precalculations for nbp. + realms: + - land + variables: + nbp: + preprocessor: land_fraction_weighting_and_regrid_to_ref + project: CMIP5 + mip: Lmon + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + reference_dataset: JMA-TRANSCOM + additional_datasets: *nbp_datasets + scripts: + cycle_global: + <<: *cycle_global + cycle_nh: + <<: *cycle_nh + cycle_sh: + <<: *cycle_sh + cycle_trop: + <<: *cycle_trop + grading_global: + <<: *grading_global + grading_nh: + <<: *grading_nh + grading_sh: + <<: *grading_sh + grading_trop: + <<: *grading_trop + + diag_grading_lai: + <<: *diag_grading + description: Grading precalculations for lai. + realms: + - land + variables: + lai: + preprocessor: land_fraction_weighting_and_regrid_to_ref + project: CMIP5 + mip: Lmon + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + reference_dataset: LAI3g + additional_datasets: *lai_datasets + scripts: + cycle_global: + <<: *cycle_global + cycle_nh: + <<: *cycle_nh + cycle_sh: + <<: *cycle_sh + cycle_trop: + <<: *cycle_trop + grading_global: + <<: *grading_global + grading_nh: + <<: *grading_nh + grading_sh: + <<: *grading_sh + grading_trop: + <<: *grading_trop + + diag_grading_gpp: + <<: *diag_grading + description: Grading precalculations for gpp. + realms: + - land + variables: + gpp: + preprocessor: land_fraction_weighting_and_regrid_to_ref + project: CMIP5 + mip: Lmon + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + reference_dataset: MTE + additional_datasets: *gpp_datasets + scripts: + cycle_global: + <<: *cycle_global + cycle_nh: + <<: *cycle_nh + cycle_sh: + <<: *cycle_sh + cycle_trop: + <<: *cycle_trop + grading_global: + <<: *grading_global + grading_nh: + <<: *grading_nh + grading_sh: + <<: *grading_sh + grading_trop: + <<: *grading_trop + + diag_collect: + <<: *diag_grading + description: Collect and plot previously calculated performance metrics. + realms: + - atmos + - land + scripts: + collect_global: &collect_global + script: perfmetrics/collect.ncl + ancestors: ['diag_grading_*/grading_global'] + metric: RMSD + label_bounds: [0.0, 1.0] + label_scale: 0.1 + disp_value: false + disp_rankings: true + rank_order: -1 + colormap: matlab_jet + cm_reverse: true + cm_interval: [2, 63] + sort: true + diag_order: [ + 'diag_grading_gpp-global', + 'diag_grading_nbp-global', + 'diag_grading_lai-global', + 'diag_grading_tas-global', + 'diag_grading_pr-global', + ] + collect_nh: + <<: *collect_global + ancestors: ['diag_grading_*/grading_nh'] + diag_order: [ + 'diag_grading_gpp-nh', + 'diag_grading_nbp-nh', + 'diag_grading_lai-nh', + 'diag_grading_tas-nh', + 'diag_grading_pr-nh', + ] + collect_sh: + <<: *collect_global + ancestors: ['diag_grading_*/grading_sh'] + diag_order: [ + 'diag_grading_gpp-sh', + 'diag_grading_nbp-sh', + 'diag_grading_lai-sh', + 'diag_grading_tas-sh', + 'diag_grading_pr-sh', + ] + collect_trop: + <<: *collect_global + ancestors: ['diag_grading_*/grading_trop'] + diag_order: [ + 'diag_grading_gpp-trop', + 'diag_grading_nbp-trop', + 'diag_grading_lai-trop', + 'diag_grading_tas-trop', + 'diag_grading_pr-trop', + ] diff --git a/esmvaltool/recipes/recipe_aod_aeronet_assess.yml b/esmvaltool/recipes/recipe_aod_aeronet_assess.yml new file mode 100644 index 0000000000..51cb14759b --- /dev/null +++ b/esmvaltool/recipes/recipe_aod_aeronet_assess.yml @@ -0,0 +1,65 @@ +# ESMValTool +# recipe_aod_aeronet_assess.yml +--- +documentation: + description: | + Recipe to plot seasonal maps of global aerosol optical depth (AOD) at 440nm. + + title: Recipe that runs an AOD diagnostic + + authors: + - hogan_emma + - lillis_jon + - hardacre_catherine + + maintainer: + - hogan_emma + - lillis_jon + - hardacre_catherine + + projects: + - esmval + +preprocessors: + ma_season_mean: + regrid: + target_grid: 2.5x2.5 + scheme: nearest + climate_statistics: + operator: mean + period: season + seasons: ['DJF', 'MAM', 'JJA', 'SON'] + multi_model_statistics: + span: overlap + statistics: [mean] + +diagnostics: + od440aer_climatologies: + description: Visualise spatial multi-annual seasonal means AOD at 440nm. + variables: + + od440aer: &var_od440aer + mip: AERmon + short_name: od440aer + start_year: 1994 + end_year: 2014 + additional_datasets: + - {dataset: AERONET, project: OBS6, mip: AERmon, tier: 3, type: atmos, version: 20240406} + + od440aer_season: + <<: *var_od440aer + preprocessor: ma_season_mean + additional_datasets: + - {dataset: UKESM1-0-LL, project: CMIP6, mip: AERmon, exp: historical, ensemble: r1i1p1f2, grid: gn} + - {dataset: HadGEM3-GC31-LL, project: CMIP6, mip: AERmon, exp: historical, ensemble: r1i1p1f3, grid: gn} + - {dataset: EC-Earth3-AerChem, project: CMIP6, mip: AERmon, exp: historical, ensemble: r1i1p1f1, grid: gn} +# - {dataset: NorESM2-LM, project: CMIP6, mip: AERmon, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: GFDL-ESM4, project: CMIP6, mip: AERmon, exp: historical, ensemble: r1i1p1f1, grid: gr1} + - {dataset: MPI-ESM-1-2-HAM, project: CMIP6, mip: AERmon, exp: historical, ensemble: r1i1p1f1, grid: gn} + + scripts: + aeronet: + script: aerosols/aod_aeronet_assess.py + observational_dataset: AERONET + quickplot: + plot_type: plot diff --git a/esmvaltool/recipes/recipe_arctic_ocean.yml b/esmvaltool/recipes/recipe_arctic_ocean.yml new file mode 100644 index 0000000000..5426cfd43e --- /dev/null +++ b/esmvaltool/recipes/recipe_arctic_ocean.yml @@ -0,0 +1,157 @@ +# ESMValTool +# recipe_arctic_ocean.yml +--- +documentation: + title: Arctic Ocean Diagnostics + description: | + Arctic Ocean diagnostics. + + authors: + - koldunov_nikolay + + maintainer: + - koldunov_nikolay + + references: + - contact_authors + + projects: + - trr181 + - applicate + +# Model problems +# bcc-csm1-1 - lon and lat in rotated grid coordinates, not usable +# GFDL-ESM2G - lon and lat in rotated grid coordinates, not usable +datasets: + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: CCSM4} + - {dataset: CanESM2} + - {dataset: CMCC-CM} + - {dataset: ACCESS1-3} + - {dataset: ACCESS1-0} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: HadCM3} + - {dataset: HadGEM2-ES} + +diagnostics: + + arctic_ocean: + description: Derive ocean variables + variables: + areacello: + project: CMIP5 + mip: fx + exp: historical + ensemble: r0i0p0 + additional_datasets: + - {dataset: PHC, project: OBS6, tier: 2, type: clim, version: 3} + thetao: &variable + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 2005 + additional_datasets: + - {dataset: PHC, project: OBS6, mip: Omon, tier: 2, type: clim, version: 3, start_year: 1950, end_year: 1950} + so: *variable + scripts: + arctic_ocean_main: + script: arctic_ocean/arctic_ocean.py + # Define if we calculate data for Hovmoeller diagrams. + # This data are also needed for mean vertical profiles. + hofm_data: True + # Define regions, as a list. + # 'EB' - Eurasian Basin of the Arctic Ocean + # 'AB' - Amerasian Basin of the Arctic Ocean + # 'Barents_sea' - Barrents Sea + # 'North_sea' - North Sea + hofm_regions: ["AB" , 'EB'] + # Define variables to use, should also be in "variables" entry of your diagnostic + hofm_vars: ['thetao', 'so'] + # Maximum depth of Hovmoeller and vertical profiles + hofm_depth: 1500 + # Define if Hovmoeller diagrams will be ploted. + hofm_plot: True + # Define colormap (as a list, same size as list with variables) + # Only cmaps from matplotlib and cmocean are supported. + # Additional cmap - 'custom_salinity1'. + hofm_cmap: ['Spectral_r', 'custom_salinity1'] + # Data limits for plots, + # List of the same size as the list of the variables + # each entry is [vmin, vmax, number of levels, rounding limit] + hofm_limits: [[-2, 2.3, 41, 1], [30.5, 35.1, 47, 2]] + # Number of columns in the plot + hofm_ncol: 3 + # Calculate the timemmean for every model + mean: True + # Plot vertical profiles + profiles: True + # Plot spatial distribution maps + plot2d: True + # Depths for spatial distribution maps + plot2d_depths: [10, 100] + # Variables to plot spatial distribution maps + plot2d_vars: ['thetao', 'so'] + # Define colormap (as a list, same size as list with variables) + # Only cmaps from matplotlib and cmocean are supported. + # Additional cmap - 'custom_salinity1'. + plot2d_cmap: ['Spectral_r', 'custom_salinity1'] + # Data limits for plots, + # List of the same size as the list of the variables + # each entry is [vmin, vmax, number of levels, rounding limit] + plot2d_limits: [[-2, 4, 20, 1], [30.5, 35.1, 47, 2]] + # number of columns for plots + plot2d_ncol: 4 + # Plot spatial distribution of the bias plots + plot2d_bias: True + # Depths for spatial distribution of the bias + plot2d_bias_depths: [10, 100] + # Variables to plot spatial distribution of the bias for + plot2d_bias_vars: ['thetao', 'so'] + # Color map bames for every variable + plot2d_bias_cmap: ['balance', 'balance'] + # Data limits for plots, + # List of the same size as the list of the variables + # each entry is [vmin, vmax, number of levels, rounding limit] + plot2d_bias_limits: [[-3, 3, 20, 1], [-2, 2, 47, 2]] + # number of columns in the bias plots + plot2d_bias_ncol: 4 + # Plot transects + transects: true + # Select regions (transects) to plot + # Available options are: + # AWpath - transect along the path of the Atlantic Water + # Fram - Fram strait + transects_regions: ["AWpath", "Fram"] + # Variables to plot on transects + transects_vars: ['thetao', 'so'] + # Color maps for every variable + transects_cmap: ['Spectral_r', 'custom_salinity1'] + # Data limits for plots, + # List of the same size as the list of the variables + # each entry is [vmin, vmax, number of levels, rounding limit] + transects_limits: [[-2, 4, 20, 1], [30.5, 35.1, 47, 2]] + # Maximum depth to plot the data + transects_depth: 1500 + # number of columns + transects_ncol: 3 + # Calculate Atlantic Water core depth and temperature + # one has to have `hofm_data`: True and "EB" as one of the `hofm_regions` + AW_core: True + # Plot maps of temperature spatial distribution at the depth + # of Atlantic Water depth + AW_core_2d: True + # Plot TS diagrams + tsdiag: True + # Regions to plot TS diagrams for + tsdiag_regions: ["AB" , 'EB'] + # Maximum depth to consider data for TS diagrams + tsdiag_depth: 1500 + # Number of columns + tsdiag_ncol: 3 + # Observational dataset + observational_dataset: {dataset: PHC, project: OBS,} diff --git a/esmvaltool/recipes/recipe_autoassess_landsurface_permafrost.yml b/esmvaltool/recipes/recipe_autoassess_landsurface_permafrost.yml index 6fc07d7e1e..897dcd5f19 100644 --- a/esmvaltool/recipes/recipe_autoassess_landsurface_permafrost.yml +++ b/esmvaltool/recipes/recipe_autoassess_landsurface_permafrost.yml @@ -7,15 +7,22 @@ documentation: Land surface area and ice masks are time independent. authors: - - pred_va + - predoi_valeriu + - sellar_alistair + + title: Land-surface diagnostic that computes permafrost indices (from Autoassess). + + references: + - brown02nsidc + - legates90tac + - koven13jclim maintainer: - - pred_va + - predoi_valeriu datasets: - - {dataset: MPI-ESM-LR, project: CMIP5, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} + - {dataset: ACCESS-CM2, project: CMIP6, exp: historical, grid: gn, ensemble: r1i1p1f1, start_year: 1992, end_year: 2002} + - {dataset: E3SM-1-0, project: CMIP6, exp: historical, grid: gr, ensemble: r1i1p1f1, start_year: 1992, end_year: 2002} preprocessors: pp_aa_area: @@ -29,30 +36,30 @@ diagnostics: variables: tas: mip: Amon - fx_files: [sftlf, mrsofc] tsl: mip: Lmon - fx_files: [sftlf, mrsofc] + sftlf: + mip: fx + mrsos: + mip: Lmon scripts: autoassess_landsurf_permafrost: &autoassess_landsurf_permafrost_settings script: autoassess/autoassess_area_base.py title: "Autoassess Land-Surface Permafrost Diagnostic" area: land_surface_permafrost - control_model: MPI-ESM-LR - exp_model: MPI-ESM-MR + control_model: E3SM-1-0 + exp_model: ACCESS-CM2 obs_models: [] - fx: [sftlf, mrsofc] - start: 1997/12/01 + start: 1993/12/01 end: 2002/12/01 - climfiles_root: '/group_workspaces/jasmin4/esmeval/for_vp/obs_etc' plot_standard: description: Wrapper to collect and plot previously calculated metrics scripts: plot_landsurf_permafrost: &plot_landsurf_permafrost_settings <<: *autoassess_landsurf_permafrost_settings - control_model: MPI-ESM-LR - exp_model: MPI-ESM-MR + control_model: E3SM-1-0 + exp_model: ACCESS-CM2 script: autoassess/plot_autoassess_metrics.py ancestors: ['*/autoassess_landsurf_permafrost'] title: "Plot Land-Surface Permafrost Metrics" diff --git a/esmvaltool/recipes/recipe_autoassess_landsurface_snow.yml b/esmvaltool/recipes/recipe_autoassess_landsurface_snow.yml deleted file mode 100644 index 3067984560..0000000000 --- a/esmvaltool/recipes/recipe_autoassess_landsurface_snow.yml +++ /dev/null @@ -1,60 +0,0 @@ -# ESMValTool -# recipe_autoassess_landsurface_snow.yml ---- -documentation: - description: | - Recipe that runs the Autoassess Land-surface assessment area diagnostic. - Climatological files are stored externally to avoid overloading the - ESMValTool source. See /group_workspaces/jasmin4/esmeval/autoassess_specific_files - (on JASMIN). - - authors: - - pred_va - - maintainer: - - pred_va - -datasets: - - {dataset: MPI-ESM-LR, project: CMIP5, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} - -preprocessors: - pp_aa_area: - regrid: # NOT USED - target_grid: 0.15x0.15 - scheme: linear - -diagnostics: - aa_landsurf_snow: - description: Autoassess test diag for Land-Surface Snow. - variables: - snw: # surface_snow_amount (CMOR name=snw) - mip: LImon - fx_files: [sftlf] - scripts: - autoassess_landsurf_snow: &autoassess_landsurf_snow_settings - script: autoassess/autoassess_area_base.py - title: "Autoassess Land-Surface Snow Diagnostic" - area: land_surface_snow - control_model: MPI-ESM-LR - exp_model: MPI-ESM-MR - obs_models: [] - fx: [sftlf] - start: 1997/12/01 - end: 2002/12/01 - climfiles_root: '/group_workspaces/jasmin4/esmeval/autoassess_specific_files/files' # on Jasmin - - plot_standard: - description: Wrapper to collect and plot previously calculated metrics - scripts: - plot_landsurf_snow: &plot_landsurf_snow_settings - <<: *autoassess_landsurf_snow_settings - control_model: MPI-ESM-LR - exp_model: MPI-ESM-MR - script: autoassess/plot_autoassess_metrics.py - ancestors: ['*/autoassess_landsurf_snow'] - title: "Plot Land-Surface Snow Metrics" - plot_name: "Snow_Metrics" - diag_tag: aa_landsurf_snow - diag_name: autoassess_landsurf_snow diff --git a/esmvaltool/recipes/recipe_autoassess_landsurface_soilmoisture.yml b/esmvaltool/recipes/recipe_autoassess_landsurface_soilmoisture.yml index 78a6b11069..061a6a6dfa 100644 --- a/esmvaltool/recipes/recipe_autoassess_landsurface_soilmoisture.yml +++ b/esmvaltool/recipes/recipe_autoassess_landsurface_soilmoisture.yml @@ -4,27 +4,31 @@ documentation: description: | Recipe that runs the Autoassess Land-surface assessment area diagnostic. - Climatological files are stored externally to avoid overloading the - ESMValTool source. See /group_workspaces/jasmin4/esmeval/autoassess_specific_files - (on JASMIN). - Missing variable mass_fraction_of_unfrozen_water_in_soil_moisture and - mass_fraction_of_frozen_water_in_soil_moisture. authors: - - pred_va + - predoi_valeriu + - sellar_alistair + + title: Land-surface diagnostic that computes soilmoisture indices (from Autoassess). + + references: + - esacci-soilmoisture + - dorigo17rse + - gruber19essd maintainer: - - pred_va + - predoi_valeriu datasets: - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1992, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1992, end_year: 2002} + - {dataset: ACCESS-CM2, project: CMIP6, exp: historical, grid: gn, ensemble: r1i1p1f1, start_year: 1992, end_year: 2002} + - {dataset: E3SM-1-0, project: CMIP6, exp: historical, grid: gr, ensemble: r1i1p1f1, start_year: 1992, end_year: 2002} preprocessors: - pp_aa_area: - regrid: # NOT USED - target_grid: 0.15x0.15 - scheme: linear + seasonal: + climate_statistics: + operator: mean + period: seasonal + seasons: ['DJF', 'MAM', 'JJA', 'SON'] diagnostics: aa_landsurf_soilmoisture: @@ -32,27 +36,28 @@ diagnostics: variables: mrsos: # moisture_content_of_soil_layer mip: Lmon - # TODO: mass_fraction_of_unfrozen_water_in_soil_moisture - # TODO: mass_fraction_of_frozen_water_in_soil_moisture + preprocessor: seasonal + sm: # Volumetric Moisture in Upper Portion of Soil Column + mip: Lmon + project: CMIP5 + derive: true + preprocessor: seasonal + additional_datasets: + - {dataset: ESACCI-SOILMOISTURE, project: OBS, type: sat, version: L3S-SSMV-COMBINED-v4.2, tier: 2, start_year: 1999, end_year: 2008} scripts: autoassess_landsurf_soilmoisture: &autoassess_landsurf_soilmoisture_settings - script: autoassess/autoassess_area_base.py - title: "Autoassess Land-Surface Soilmoisture Diagnostic" + script: autoassess/land_surface_soilmoisture/soilmoisture.py area: land_surface_soilmoisture - control_model: IPSL-CM5A-LR - exp_model: inmcm4 - obs_models: [] - start: 1997/12/01 - end: 2002/12/01 - climfiles_root: '/group_workspaces/jasmin4/esmeval/autoassess_specific_files/files' + control_model: ACCESS-CM2 + exp_model: E3SM-1-0 plot_standard: description: Wrapper to collect and plot previously calculated metrics scripts: plot_landsurf_soilmoisture: &plot_landsurf_soilmoisture_settings <<: *autoassess_landsurf_soilmoisture_settings - control_model: IPSL-CM5A-LR - exp_model: inmcm4 + control_model: ACCESS-CM2 + exp_model: E3SM-1-0 script: autoassess/plot_autoassess_metrics.py ancestors: ['*/autoassess_landsurf_soilmoisture'] title: "Plot Land-Surface Soilmoisture Metrics" diff --git a/esmvaltool/recipes/recipe_autoassess_landsurface_surfrad.yml b/esmvaltool/recipes/recipe_autoassess_landsurface_surfrad.yml index 1bf4943672..3d77ed02c1 100644 --- a/esmvaltool/recipes/recipe_autoassess_landsurface_surfrad.yml +++ b/esmvaltool/recipes/recipe_autoassess_landsurface_surfrad.yml @@ -11,15 +11,21 @@ documentation: checked. authors: - - pred_va + - predoi_valeriu + - sellar_alistair + + title: Land-surface diagnostic that computes surface radiation indices (from Autoassess). + + references: + - loeb19jclim + - kato18ebaf maintainer: - - pred_va + - predoi_valeriu datasets: - - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, mip: Amon, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, mip: Amon, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} preprocessors: pp_aa_area: @@ -34,13 +40,19 @@ diagnostics: rsns: # Surf SW net all sky derive: true force_derivation: false - fx_files: [sftlf] + mip: Amon + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2012, tier: 1} + sftlf: + mip: fx rlns: # Surf LW net all sky derive: true force_derivation: false - fx_files: [sftlf] - additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, start_year: 2001, end_year: 2012, tier: 1} + mip: Amon + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2012, tier: 1} + sftlf: + mip: fx scripts: autoassess_landsurf_surfrad: &autoassess_landsurf_surfrad_settings script: autoassess/autoassess_area_base.py @@ -49,9 +61,8 @@ diagnostics: control_model: MPI-ESM-LR exp_model: MPI-ESM-MR obs_models: [CERES-EBAF] - obs_type: obs4mips - fx: [sftlf] - start: 1997/12/01 + obs_type: obs4MIPs + start: 1993/12/01 end: 2002/12/01 plot_standard: diff --git a/esmvaltool/recipes/recipe_autoassess_radiation_rms_Amon_all.yml b/esmvaltool/recipes/recipe_autoassess_radiation_rms_Amon_all.yml deleted file mode 100644 index da7fac67b5..0000000000 --- a/esmvaltool/recipes/recipe_autoassess_radiation_rms_Amon_all.yml +++ /dev/null @@ -1,132 +0,0 @@ -# ESMValTool -# recipe_autoassess_radiation_rms_Amon_all.yml ---- -documentation: - description: | - Diagnostics of spatial RMS errors of radiation and clouds. - - authors: - - read_si - - pred_va - - sell_al - - maintainer: - - pred_va - - projects: - - cmug - -datasets: - - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2005} - - {dataset: NorESM1-M, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2005} - -preprocessors: - pp_rad: - regrid: - target_grid: 1x1 - scheme: linear - pp_rad_derive_var: - # use this not to overwrite variables that are used both - # for derivation and as standalone diag vars - regrid: - target_grid: 1x1 - scheme: linear - -diagnostics: - radiation_Amon_all_CERES-EBAF: - description: "CMIP5 vs CERES-EBAF" - variables: - rtnt: # TOA TOT net - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - rsnt: # TOA SW net - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - swcre: # TOA SW Cloud Radiative Effect - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - lwcre: # TOA LW Cloud Radiative Effect - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - rsns: # obs: ceres; name: Surf SW net all sky - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - rlns: # Surf LW net all sky - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - rsut: # TOA SW up all sky - preprocessor: pp_rad - rlut: # TOA LW up all sky - preprocessor: pp_rad - rsutcs: # TOA SW up clear sky - preprocessor: pp_rad - additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, start_year: 2001, end_year: 2012, tier: 1} - scripts: - autoassess_Radiation_rms_Amon_all: - script: autoassess/autoassess_radiation_rms.py - title: "Autoassess Radiation RMS Amon All (vs. CERES-EBAF)" - control_model: MPI-ESM-LR - exper_model: NorESM1-M - observational_datasets: ['CERES-EBAF'] # list of at least one element; if no OBS wanted comment out - analysis_type: lat_lon # use any of: lat_lon, zonal_mean, vertical_mean - landsea_mask: landsea.nc - - radiation_Amon_all_JRA-55: - description: "CMIP5 vs JRA-55" - variables: - rlutcs: # TOA LW up clear sky - preprocessor: pp_rad - rldscs: # Surf LW down clear sky - preprocessor: pp_rad - additional_datasets: - - {dataset: JRA-55, project: ana4mips, type: reanalysis, start_year: 2001, end_year: 2005, tier: 1} - scripts: - autoassess_Radiation_rms_Amon_all: - script: autoassess/autoassess_radiation_rms.py - title: "Autoassess Radiation RMS Amon All (vs. JRA-55)" - control_model: MPI-ESM-LR - exper_model: NorESM1-M - observational_datasets: ['JRA-55'] # list of at least one element; if no OBS wanted comment out - analysis_type: lat_lon # use any of: lat_lon, zonal_mean, vertical_mean - landsea_mask: landsea.nc - - radiation_Amon_all_SSMI-MERIS: - description: "CMIP5 vs SSMI-MERIS" - variables: - prw: # Column Water vapour - preprocessor: pp_rad - additional_datasets: - - {dataset: SSMI-MERIS, project: obs4mips, level: L3, version: v1-00, start_year: 2003, end_year: 2008, tier: 1} - scripts: - autoassess_Radiation_rms_Amon_all: - script: autoassess/autoassess_radiation_rms.py - title: "Autoassess Radiation RMS Amon All (vs. SSMI-MERIS)" - control_model: MPI-ESM-LR - exper_model: NorESM1-M - observational_datasets: ['SSMI-MERIS'] # list of at least one element; if no OBS wanted comment out - analysis_type: lat_lon # use any of: lat_lon, zonal_mean, vertical_mean - landsea_mask: landsea.nc - - radiation_Amon_all_GPCP-SG: - description: "CMIP5 vs GPCP-SG" - variables: - pr: # Precipitation - preprocessor: pp_rad - additional_datasets: - - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, start_year: 2001, end_year: 2009, tier: 1} - scripts: - autoassess_Radiation_rms_Amon_all: - script: autoassess/autoassess_radiation_rms.py - title: "Autoassess Radiation RMS Amon All (vs. GPCP-SG)" - control_model: MPI-ESM-LR - exper_model: NorESM1-M - observational_datasets: ['GPCP-SG'] # list of at least one element; if no OBS wanted comment out - analysis_type: lat_lon # use any of: lat_lon, zonal_mean, vertical_mean - landsea_mask: landsea.nc diff --git a/esmvaltool/recipes/recipe_autoassess_radiation_rms_Amon_obs.yml b/esmvaltool/recipes/recipe_autoassess_radiation_rms_Amon_obs.yml deleted file mode 100644 index 5ea1f52ed1..0000000000 --- a/esmvaltool/recipes/recipe_autoassess_radiation_rms_Amon_obs.yml +++ /dev/null @@ -1,137 +0,0 @@ -# ESMValTool -# recipe_autoassess_radiation_rms_Amon_obs.yml ---- -documentation: - description: | - Diagnostics of spatial RMS errors of radiation and clouds. - - authors: - - read_si - - pred_va - - sell_al - - maintainer: - - pred_va - - projects: - - cmug - -datasets: - # ESMValTool v1 has no specific models here; it says use whatever you want but no more than what Amon_all uses - - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2005} - - {dataset: NorESM1-M, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2005} - -preprocessors: - pp_rad: - regrid: - target_grid: 1x1 - scheme: linear - pp_rad_derive_var: - # use this not to overwrite variables that are used both - # for derivation and as standalone diag vars - regrid: - target_grid: 1x1 - scheme: linear - -diagnostics: - radiation_Amon_obs_CERES-EBAF: - description: "CMIP5 vs CERES-EBAF" - variables: - rtnt: # TOA TOT net - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - rsnt: # TOA SW net - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - swcre: # TOA SW Cloud Radiative Effect - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - lwcre: # TOA LW Cloud Radiative Effect - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - rsns: # Surf SW net all sky - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - rlns: # Surf LW net all sky - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - rsut: # TOA SW up all sky - preprocessor: pp_rad - rlut: # TOA LW up all sky - preprocessor: pp_rad - rsutcs: # TOA SW up clear sky - preprocessor: pp_rad - additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, start_year: 2001, end_year: 2012, tier: 1} - - {dataset: CERES-SYN1deg, project: OBS, type: sat, version: Ed3A, start_year: 2001, end_year: 2015, tier: 3} - scripts: - autoassess_Radiation_rms_Amon_obs: - script: autoassess/autoassess_radiation_rms.py - title: "Autoassess Radiation RMS Amon All (vs. CERES-EBAF)" - control_model: MPI-ESM-LR - exper_model: NorESM1-M - observational_datasets: ['CERES-EBAF', 'CERES-SYN1deg'] # list of at least one element; if no OBS wanted comment out - analysis_type: lat_lon # use any of: lat_lon, zonal_mean, vertical_mean - landsea_mask: landsea.nc - - radiation_Amon_obs_JRA-55: - description: "CMIP5 vs JRA-55" - variables: - rlutcs: # TOA LW up clear sky - preprocessor: pp_rad - rldscs: # Surf LW down clear sky - preprocessor: pp_rad - additional_datasets: - - {dataset: JRA-55, project: ana4mips, type: reanalysis, start_year: 2001, end_year: 2005, tier: 1} - - {dataset: CERES-SYN1deg, project: OBS, type: sat, version: Ed3A, start_year: 2001, end_year: 2015, tier: 3} - scripts: - autoassess_Radiation_rms_Amon_obs: - script: autoassess/autoassess_radiation_rms.py - title: "Autoassess Radiation RMS Amon All (vs. JRA-55)" - control_model: MPI-ESM-LR - exper_model: NorESM1-M - observational_datasets: ['JRA-55', 'CERES-SYN1deg'] # list of at least one element; if no OBS wanted comment out - analysis_type: lat_lon # use any of: lat_lon, zonal_mean, vertical_mean - landsea_mask: landsea.nc - - radiation_Amon_obs_SSMI-MERIS: - description: "CMIP5 vs SSMI-MERIS" - variables: - prw: # Column Water vapour - preprocessor: pp_rad - additional_datasets: - - {dataset: SSMI-MERIS, project: obs4mips, level: L3, version: v1-00, start_year: 2003, end_year: 2008, tier: 1} - - {dataset: SSMI, project: obs4mips, level: L3, version: RSSv07r00, start_year: 1988, end_year: 2006, tier: 1} - scripts: - autoassess_Radiation_rms_Amon_obs: - script: autoassess/autoassess_radiation_rms.py - title: "Autoassess Radiation RMS Amon All (vs. SSMI-MERIS)" - control_model: MPI-ESM-LR - exper_model: NorESM1-M - observational_datasets: ['SSMI-MERIS', 'SSMI'] # list of at least one element; if no OBS wanted comment out - analysis_type: lat_lon # use any of: lat_lon, zonal_mean, vertical_mean - landsea_mask: landsea.nc - - radiation_Amon_obs_GPCP-SG: - description: "CMIP5 vs GPCP-SG" - variables: - pr: # Precipitation - preprocessor: pp_rad - additional_datasets: - - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, start_year: 2001, end_year: 2009, tier: 1} - - {dataset: TRMM-L3, project: obs4mips, level: v7, version: 7A, start_year: 1998, end_year: 2013, tier: 1} - scripts: - autoassess_Radiation_rms_Amon_obs: - script: autoassess/autoassess_radiation_rms.py - title: "Autoassess Radiation RMS Amon All (vs. GPCP-SG)" - control_model: MPI-ESM-LR - exper_model: NorESM1-M - observational_datasets: ['GPCP-SG', 'TRMM-L3'] # list of at least one element; if no OBS wanted comment out - analysis_type: lat_lon # use any of: lat_lon, zonal_mean, vertical_mean - landsea_mask: landsea.nc diff --git a/esmvaltool/recipes/recipe_autoassess_radiation_rms_cfMon_all.yml b/esmvaltool/recipes/recipe_autoassess_radiation_rms_cfMon_all.yml deleted file mode 100644 index 95d825f11a..0000000000 --- a/esmvaltool/recipes/recipe_autoassess_radiation_rms_cfMon_all.yml +++ /dev/null @@ -1,68 +0,0 @@ -# ESMValTool -# recipe_autoassess_radiation_rms_cfMon_all.yml ---- -documentation: - description: | - Diagnostics of spatial RMS errors of radiation and clouds. - - authors: - - tsus_yo - - read_si - - pred_va - - maintainer: - - pred_va - - projects: - - cmug - -datasets: - - {dataset: MRI-CGCM3, project: CMIP5, mip: cfMon, exp: amip, ensemble: r1i1p1, start_year: 1985, end_year: 1988} - - {dataset: HadGEM2-A, project: CMIP5, mip: cfMon, exp: amip, ensemble: r1i1p1, start_year: 1985, end_year: 1988} - -preprocessors: - pp_rad_derive_var: - regrid: - target_grid: 1x1 - scheme: linear - -diagnostics: - radiation_cfMon_all_ISCCP: - description: "CMIP5 vs ISCCP Clouds" - variables: - cllmtisccp: # Low-intermediate Cloud - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - clltkisccp: # Low-thick Cloud - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - clmmtisccp: # Middle-intermediate Cloud - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - clmtkisccp: # Middle-thick Cloud - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - clhmtisccp: # High-intermediate Cloud - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - clhtkisccp: # High-thick Cloud - preprocessor: pp_rad_derive_var - derive: true - force_derivation: false - additional_datasets: - - {dataset: ISCCP, project: obs4mips, level: L3, version: V1.0, start_year: 1985, end_year: 1988, tier: 1} - scripts: - autoassess_Radiation_rms_cfMon_all: - script: autoassess/autoassess_radiation_rms.py - title: "Autoassess Radiation RMS Amon All (vs. CERES-EBAF)" - control_model: MRI-CGCM3 - exper_model: HadGEM2-A - observational_datasets: ['ISCCP'] # list of at least one element; if no OBS wanted comment out - analysis_type: lat_lon # use any of: lat_lon, zonal_mean, vertical_mean - landsea_mask: landsea.nc - diff --git a/esmvaltool/recipes/recipe_autoassess_stratosphere.yml b/esmvaltool/recipes/recipe_autoassess_stratosphere.yml index e829a22afc..12c23ca86a 100644 --- a/esmvaltool/recipes/recipe_autoassess_stratosphere.yml +++ b/esmvaltool/recipes/recipe_autoassess_stratosphere.yml @@ -18,10 +18,13 @@ documentation: file request in this namelist when running. authors: - - pred_va + - predoi_valeriu + - sellar_alistair + + title: Diagnostic that computes stratospheric indices (from Autoassess). maintainer: - - pred_va + - predoi_valeriu datasets: - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} @@ -51,7 +54,7 @@ diagnostics: preprocessor: pp_aa_area mip: Amon additional_datasets: - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 3} scripts: autoassess_strato_test_1: &autoassess_strato_test_1_settings script: autoassess/autoassess_area_base.py diff --git a/esmvaltool/recipes/recipe_capacity_factor.yml b/esmvaltool/recipes/recipe_capacity_factor.yml new file mode 100644 index 0000000000..2d34b0f1da --- /dev/null +++ b/esmvaltool/recipes/recipe_capacity_factor.yml @@ -0,0 +1,60 @@ +# ESMValTool +# recipe_capacity_factor.yml +--- +documentation: + title: | + Diurnal Temperature Variation (DTR) Indicator and Wind Capacity Factor + + description: | + Diurnal Temperature Variation (DTR) Indicator and Wind Capacity Factor + + authors: + - hunter_alasdair + - manubens_nicolau + - lledo_llorenc + - caron_louis-philippe + - bojovic_dragana + - gonzalez-reviriego_nube + - perez-zanon_nuria + + maintainer: + - unmaintained + + projects: + - c3s-magic + +datasets: + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1980, end_year: 2005} + +preprocessors: + preproc: + regrid: + target_grid: IPSL-CM5A-MR + scheme: linear + extract_region: + start_longitude: -20 + end_longitude: 60 + start_latitude: 30 + end_latitude: 80 + extract_season: + season: djf + mask_landsea: + mask_out: sea + +diagnostics: + capacity_factor: + description: Calculate the wind power capacity factor. + variables: + sfcWind: + preprocessor: preproc + mip: day + scripts: + main: + script: magic_bsc/capacity_factor.R + power_curves: + - PowerCurves/Enercon_E70_2.3MW.txt + - PowerCurves/Gamesa_G80_2.0MW.txt + - PowerCurves/Gamesa_G87_2.0MW.txt + - PowerCurves/Vestas_V100_2.0MW.txt + - PowerCurves/Vestas_V110_2.0MW.txt diff --git a/esmvaltool/recipes/recipe_capacity_factor_wp7.yml b/esmvaltool/recipes/recipe_capacity_factor_wp7.yml deleted file mode 100644 index 99e8eff71b..0000000000 --- a/esmvaltool/recipes/recipe_capacity_factor_wp7.yml +++ /dev/null @@ -1,55 +0,0 @@ -# ESMValTool -# recipe_capacity_factor_wp7.yml ---- -documentation: - description: | - Diurnal Temperature Variation (DTR) Indicator and Wind Capacity Factor - - authors: - - hunt_al - - manu_ni - - lled_ll - - caro_lo - - bojo_dr - - gonz_nu - - pere_nu - - projects: - - c3s-magic - -datasets: - - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1980, end_year: 2005} - #- {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: day, exp: rcp26, ensemble: r1i1p1, start_year: 2021, end_year: 2025} - #- {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: day, exp: rcp45, ensemble: r1i1p1, start_year: 2031, end_year: 2035} - -preprocessors: - preproc: - regrid: - target_grid: IPSL-CM5A-MR - scheme: linear - mask_fillvalues: - threshold_fraction: 0.95 - extract_region: - start_longitude: 200 - end_longitude: 300 - start_latitude: 27 - end_latitude: 70 - extract_season: - season: djf - -diagnostics: - capacity_factor: - description: Calculate the wind power capacity factor. - variables: - sfcWind: - preprocessor: preproc - mip: day - scripts: - main: - script: magic_bsc/capacity_factor.r - power_curves: - - PowerCurves/Enercon_E70_2.3MW.txt - - PowerCurves/Gamesa_G80_2.0MW.txt - - PowerCurves/Gamesa_G87_2.0MW.txt - - PowerCurves/Vestas_V100_2.0MW.txt - - PowerCurves/Vestas_V110_2.0MW.txt diff --git a/esmvaltool/recipes/recipe_carvalhais14nat.yml b/esmvaltool/recipes/recipe_carvalhais14nat.yml new file mode 100644 index 0000000000..63bfbb1edd --- /dev/null +++ b/esmvaltool/recipes/recipe_carvalhais14nat.yml @@ -0,0 +1,140 @@ +# ESMValTool +# recipe_carvalhais14nat.yml +--- +documentation: + title: Ecosystem Turnover Time of Carbon + description: | + Recipe to evaluate the ecosystem turnover time of carbon based on + Carvalhais et al., 2014, Nature. + The data required in the obs_details section can be obtained at + http://www.bgc-jena.mpg.de/geodb/BGI/tau4ESMValTool.php + and have to be stored in the auxiliary_data_dir defined in the configuration + in a subdirectory obs_data_subdir specified in the obs_details section + below. + + + authors: + - koirala_sujan + + maintainer: + - koirala_sujan + + references: + - carvalhais14nature + + projects: + - crescendo + + +datasets: + + - {dataset: HadGEM2-ES, project: CMIP5, ensemble: r1i1p1} + - {dataset: MIROC-ESM, project: CMIP5, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, project: CMIP5, ensemble: r1i1p1} + - {dataset: NorESM1-M, project: CMIP5, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} + - {dataset: CCSM4, project: CMIP5, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, project: CMIP5, ensemble: r1i1p1} + + +obs_details: &obs_details + obs_info: + # + # size of the big box in the latitudinal direction. + # Recommended value: 5 + obs_data_subdir: 'carvalhais14nat' + source_label: 'Carvalhais2014' + variant_label: 'BE' + grid_label: 'gn' + grid_extent: '0.5x0.5' + # grid_label: 'gr' + # grid_extent: '2.5x1.875' + # 'gn': 0.5x0.5, native observation + # 'gr1': 2.812x2.813, bcc-csm1-1 + # 'gr2': 1.25x0.937, CCSM4 + # 'gr3': 2.812x2.813, CanESM2 + # 'gr4': 2.5x2.0, GFDL-ESM2G + # 'gr5': 1.875x1.241, HadGEM2-ES + # 'gr6': 2.0x1.5, inmcm4 + # 'gr7': 2.5x1.259, IPSL-CM5A-MR + # 'gr8': 2.812x2.813, MIROC-ESM + # 'gr9': 1.875x1.875, MPI-ESM-LR + # 'gr': 2.5x1.875, NorESM1-M + frequency: 'fx' + +# tau_ctotal_fx_Carvalhais2014_BE_gr5 + +preprocessors: + # -------------------------------------------------- + # preprocessors: time mean, spatial regrid, and mask + # -------------------------------------------------- + preproc_meanRegrid: + custom_order: true + regrid: + target_grid: 0.5x0.5 + # target_grid: 2.5x1.875 + scheme: nearest + mask_landsea: + mask_out: sea + multi_model_statistics: + span: overlap + statistics: [median] + climate_statistics: + operator: mean + period: full + +diagnostics: + # -------------------------------------------------- + # diagnostics: global and zonal turnover + # -------------------------------------------------- + diag_land_carbon_turnover: + description: diagnose the turnover time of ecosystem carbon. + variables: + gpp: &variable_settings + # gross primary productivity + exp: historical + start_year: 1982 + end_year: 2005 + preprocessor: preproc_meanRegrid + mip: Lmon + ctotal: + # total carbon stock (cVeg + cSoil) + <<: *variable_settings + derive: true + mip: Lmon + pr: + # mean precipitation + <<: *variable_settings + mip: Amon + tas: + # mean air temperature + <<: *variable_settings + mip: Amon + scripts: + zonal_turnover_time: + # evaluate the zonal turnover time + script: land_carbon_cycle/diag_zonal_turnover.py + <<: *obs_details + obs_variable: + - tau_ctotal + fig_config: + ax_fs: 8 + global_turnover_time: + # evaluate the global distributions of turnover time + script: land_carbon_cycle/diag_global_turnover.py + <<: *obs_details + obs_variable: + - tau_ctotal + fig_config: + ax_fs: 8 + zonal_correlation_pearson: + # evaluate the zonal correlation (pearson) + # of turnover time with climate + script: land_carbon_cycle/diag_zonal_correlation.py + <<: *obs_details + correlation_method: pearson + obs_variable: + - r_tau_ctotal_tas + - r_tau_ctotal_pr + fig_config: + ax_fs: 8 diff --git a/esmvaltool/recipes/recipe_climate_change_hotspot.yml b/esmvaltool/recipes/recipe_climate_change_hotspot.yml new file mode 100644 index 0000000000..880b9c66d1 --- /dev/null +++ b/esmvaltool/recipes/recipe_climate_change_hotspot.yml @@ -0,0 +1,702 @@ +# ESMValTool +--- +documentation: + title: Mediterranean hotspot figures + description: | + Recipe to compute the CMIP5 and CMIP6 projected + Mediterranean hotspot for Near-Surface Air Temperature (tas) and Precipitation (pr) changes. + authors: + - cos_josep + maintainer: + - loosveldt-tomas_saskia + - cos_josep + references: + - cos22esd + +cmip5_85: &cmip5_85 + - &cmip5_h-rcp85 { + project: CMIP5, + exp: [historical, rcp85], + dataset: ACCESS1-0, + ensemble: r1i1p1, + start_year: 1980, + end_year: 2100, + } + + - {<<: *cmip5_h-rcp85, dataset: ACCESS1-3} + + - {<<: *cmip5_h-rcp85, dataset: bcc-csm1-1} + + - {<<: *cmip5_h-rcp85, dataset: bcc-csm1-1-m} + + - {<<: *cmip5_h-rcp85, dataset: BNU-ESM} + + - {<<: *cmip5_h-rcp85, dataset: CanESM2, ensemble: r(1:5)i1p1} + + - {<<: *cmip5_h-rcp85, dataset: CCSM4, ensemble: r(1:5)i1p1} + + - {<<: *cmip5_h-rcp85, dataset: CESM1-BGC} + + - {<<: *cmip5_h-rcp85, dataset: CESM1-CAM5, ensemble: r(1:3)i1p1} + + - {<<: *cmip5_h-rcp85, dataset: CMCC-CESM} + + - {<<: *cmip5_h-rcp85, dataset: CMCC-CM} + + - {<<: *cmip5_h-rcp85, dataset: CMCC-CMS} + + - {<<: *cmip5_h-rcp85, dataset: CNRM-CM5, ensemble: r(1:2)i1p1} + - {<<: *cmip5_h-rcp85, dataset: CNRM-CM5, ensemble: r4i1p1} + - {<<: *cmip5_h-rcp85, dataset: CNRM-CM5, ensemble: r6i1p1} + - {<<: *cmip5_h-rcp85, dataset: CNRM-CM5, ensemble: r10i1p1} + + - {<<: *cmip5_h-rcp85, dataset: CSIRO-Mk3-6-0, ensemble: r(1:10)i1p1} + + # - {<<: *cmip5_h-rcp85, dataset: FGOALS-s2, ensemble: r(1:3)i1p1} + + - {<<: *cmip5_h-rcp85, dataset: FIO-ESM, ensemble: r(1:3)i1p1} + + - {<<: *cmip5_h-rcp85, dataset: GFDL-CM3} + + - {<<: *cmip5_h-rcp85, dataset: GFDL-ESM2G} + + - {<<: *cmip5_h-rcp85, dataset: GFDL-ESM2M} + + - {<<: *cmip5_h-rcp85, dataset: GISS-E2-H, ensemble: r(1:2)i1p1} + + - {<<: *cmip5_h-rcp85, dataset: GISS-E2-H-CC} + + # - {<<: *cmip5_h-rcp85, dataset: GISS-E2-R, ensemble: r(1:2)i1p1} + + - {<<: *cmip5_h-rcp85, dataset: GISS-E2-R, ensemble: r1i1p1} + + - {<<: *cmip5_h-rcp85, dataset: GISS-E2-R-CC} + + # - {<<: *cmip5_h-rcp85, dataset: HadGEM2-AO} + + - {<<: *cmip5_h-rcp85, dataset: HadGEM2-ES, ensemble: r(1:4)i1p1} + + - {<<: *cmip5_h-rcp85, dataset: inmcm4} + + - {<<: *cmip5_h-rcp85, dataset: IPSL-CM5A-LR, ensemble: r(1:4)i1p1} + + - {<<: *cmip5_h-rcp85, dataset: IPSL-CM5A-MR} + + - {<<: *cmip5_h-rcp85, dataset: IPSL-CM5B-LR} + + - {<<: *cmip5_h-rcp85, dataset: MIROC-ESM} + + - {<<: *cmip5_h-rcp85, dataset: MIROC-ESM-CHEM} + + - {<<: *cmip5_h-rcp85, dataset: MIROC5, ensemble: r(2:3)i1p1} + + - {<<: *cmip5_h-rcp85, dataset: MPI-ESM-LR, ensemble: r(1:3)i1p1} + + - {<<: *cmip5_h-rcp85, dataset: MPI-ESM-MR} + + - {<<: *cmip5_h-rcp85, dataset: MRI-CGCM3} + + # - {<<: *cmip5_h-rcp85, dataset: MRI-ESM1} + + - {<<: *cmip5_h-rcp85, dataset: NorESM1-M} + +cmip5_45: &cmip5_45 + - &cmip5_h-rcp45 { + project: CMIP5, + exp: [historical, rcp45], + dataset: ACCESS1-0, + ensemble: r1i1p1, + start_year: 1980, + end_year: 2100, + } + + - {<<: *cmip5_h-rcp45, dataset: ACCESS1-3} + + - {<<: *cmip5_h-rcp45, dataset: bcc-csm1-1} + + - {<<: *cmip5_h-rcp45, dataset: bcc-csm1-1-m} + + - {<<: *cmip5_h-rcp45, dataset: BNU-ESM} + + - {<<: *cmip5_h-rcp45, dataset: CanESM2, ensemble: r(1:5)i1p1} + + - {<<: *cmip5_h-rcp45, dataset: CCSM4, ensemble: r(1:5)i1p1} + + - {<<: *cmip5_h-rcp45, dataset: CESM1-BGC} + + - {<<: *cmip5_h-rcp45, dataset: CESM1-CAM5, ensemble: r(1:3)i1p1} + + - {<<: *cmip5_h-rcp45, dataset: CMCC-CM} + + - {<<: *cmip5_h-rcp45, dataset: CMCC-CMS} + + - {<<: *cmip5_h-rcp45, dataset: CSIRO-Mk3-6-0, ensemble: r(1:10)i1p1} + + # - {<<: *cmip5_h-rcp45, dataset: FGOALS-s2, ensemble: r(1:3)i1p1} + + - {<<: *cmip5_h-rcp45, dataset: FIO-ESM, ensemble: r(1:3)i1p1} + + - {<<: *cmip5_h-rcp45, dataset: GFDL-CM3} + + - {<<: *cmip5_h-rcp45, dataset: GISS-E2-H, ensemble: r(1:3)i1p1} + - {<<: *cmip5_h-rcp45, dataset: GISS-E2-H, ensemble: r5i1p1} + + - {<<: *cmip5_h-rcp45, dataset: GISS-E2-R, ensemble: r2i1p1} + - {<<: *cmip5_h-rcp45, dataset: GISS-E2-R, ensemble: r6i1p1} + + - {<<: *cmip5_h-rcp45, dataset: HadGEM2-ES, ensemble: r(1:4)i1p1} + + - {<<: *cmip5_h-rcp45, dataset: inmcm4} + + - {<<: *cmip5_h-rcp45, dataset: IPSL-CM5A-LR, ensemble: r3i1p1} + + - {<<: *cmip5_h-rcp45, dataset: IPSL-CM5A-MR} + + - {<<: *cmip5_h-rcp45, dataset: IPSL-CM5B-LR} + + - {<<: *cmip5_h-rcp45, dataset: MIROC-ESM} + + - {<<: *cmip5_h-rcp45, dataset: MIROC-ESM-CHEM} + + - {<<: *cmip5_h-rcp45, dataset: MIROC5, ensemble: r(2:3)i1p1} + + - {<<: *cmip5_h-rcp45, dataset: MPI-ESM-LR, ensemble: r(1:3)i1p1} + + - {<<: *cmip5_h-rcp45, dataset: MPI-ESM-MR, ensemble: r(1:3)i1p1} + + - {<<: *cmip5_h-rcp45, dataset: MRI-CGCM3} + + - {<<: *cmip5_h-rcp45, dataset: NorESM1-M} + + - {<<: *cmip5_h-rcp45, dataset: NorESM1-ME} + +cmip5_26: &cmip5_26 + - &cmip5_h-rcp26 { + project: CMIP5, + exp: [historical, rcp26], + dataset: bcc-csm1-1, + ensemble: r1i1p1, + start_year: 1980, + end_year: 2100, + } + + - {<<: *cmip5_h-rcp26, dataset: bcc-csm1-1-m} + + - {<<: *cmip5_h-rcp26, dataset: BNU-ESM} + + - {<<: *cmip5_h-rcp26, dataset: CanESM2, ensemble: r(1:5)i1p1} + + - {<<: *cmip5_h-rcp26, dataset: CCSM4, ensemble: r(1:5)i1p1} + + - {<<: *cmip5_h-rcp26, dataset: CESM1-CAM5, ensemble: r(1:3)i1p1} + + - {<<: *cmip5_h-rcp26, dataset: CNRM-CM5} + + - {<<: *cmip5_h-rcp26, dataset: CSIRO-Mk3-6-0, ensemble: r(1:10)i1p1} + + - {<<: *cmip5_h-rcp26, dataset: FIO-ESM, ensemble: r(1:3)i1p1} + + - {<<: *cmip5_h-rcp26, dataset: GFDL-ESM2G} + + - {<<: *cmip5_h-rcp26, dataset: GFDL-ESM2M} + + - {<<: *cmip5_h-rcp26, dataset: GISS-E2-H} + + - {<<: *cmip5_h-rcp26, dataset: GISS-E2-R} + + # - {<<: *cmip5_h-rcp26, dataset: HadGEM2-AO} + + - {<<: *cmip5_h-rcp26, dataset: HadGEM2-ES, ensemble: r(1:4)i1p1} + + - {<<: *cmip5_h-rcp26, dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {<<: *cmip5_h-rcp26, dataset: IPSL-CM5A-LR, ensemble: r(3:4)i1p1} + + - {<<: *cmip5_h-rcp26, dataset: IPSL-CM5A-MR} + + - {<<: *cmip5_h-rcp26, dataset: MIROC-ESM} + + - {<<: *cmip5_h-rcp26, dataset: MIROC-ESM-CHEM} + + - {<<: *cmip5_h-rcp26, dataset: MIROC5, ensemble: r(2:3)i1p1} + + - {<<: *cmip5_h-rcp26, dataset: MPI-ESM-LR, ensemble: r(1:3)i1p1} + + - {<<: *cmip5_h-rcp26, dataset: MPI-ESM-MR} + + - {<<: *cmip5_h-rcp26, dataset: MRI-CGCM3} + + - {<<: *cmip5_h-rcp26, dataset: NorESM1-M} + +cmip6_85: &cmip6_85 + - &cmip6_h-ssp585 { + project: CMIP6, + exp: [historical, ssp585], + dataset: ACCESS-CM2, + ensemble: r1i1p1f1, + grid: gn, + start_year: 1980, + end_year: 2100, + } + + - {<<: *cmip6_h-ssp585, dataset: ACCESS-ESM1-5, ensemble: r(1:3)i1p1f1} + + - {<<: *cmip6_h-ssp585, dataset: AWI-CM-1-1-MR} + + - {<<: *cmip6_h-ssp585, dataset: BCC-CSM2-MR} + + - { + <<: *cmip6_h-ssp585, + dataset: CanESM5, + ensemble: r(1:10)i1p1f1, + } + + - { + <<: *cmip6_h-ssp585, + dataset: CanESM5-CanOE, + ensemble: r(1:3)i1p2f1, + } + +# - {<<: *cmip6_h-ssp585, dataset: CESM2, ensemble: r(1:2)i1p1f1} + - {<<: *cmip6_h-ssp585, dataset: CESM2, ensemble: r1i1p1f1} + + - {<<: *cmip6_h-ssp585, dataset: CESM2-WACCM} + + - {<<: *cmip6_h-ssp585, dataset: CIESM, grid: gr} + + - {<<: *cmip6_h-ssp585, dataset: CMCC-CM2-SR5} + + - {<<: *cmip6_h-ssp585, dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + + - {<<: *cmip6_h-ssp585, dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + + - {<<: *cmip6_h-ssp585, dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + + - {<<: *cmip6_h-ssp585, dataset: EC-Earth3, ensemble: r4i1p1f1, grid: gr} + - {<<: *cmip6_h-ssp585, dataset: EC-Earth3, ensemble: r6i1p1f1, grid: gr} + - {<<: *cmip6_h-ssp585, dataset: EC-Earth3, ensemble: r9i1p1f1, grid: gr} + - {<<: *cmip6_h-ssp585, dataset: EC-Earth3, ensemble: r11i1p1f1, grid: gr} + - {<<: *cmip6_h-ssp585, dataset: EC-Earth3, ensemble: r13i1p1f1, grid: gr} + - {<<: *cmip6_h-ssp585, dataset: EC-Earth3, ensemble: r15i1p1f1, grid: gr} + + - {<<: *cmip6_h-ssp585, dataset: FGOALS-f3-L, grid: gr} + + - {<<: *cmip6_h-ssp585, dataset: FGOALS-g3, ensemble: r1i1p1f1} + + - {<<: *cmip6_h-ssp585, dataset: FIO-ESM-2-0, ensemble: r(1:3)i1p1f1} + + - {<<: *cmip6_h-ssp585, dataset: GFDL-ESM4, grid: gr1} + + - { + <<: *cmip6_h-ssp585, + dataset: GISS-E2-1-G, + ensemble: r1i1p3f1, + } + + - {<<: *cmip6_h-ssp585, dataset: HadGEM3-GC31-LL, ensemble: r(1:3)i1p1f3} + + - {<<: *cmip6_h-ssp585, dataset: IPSL-CM6A-LR, grid: gr} + + - {<<: *cmip6_h-ssp585, dataset: KACE-1-0-G, grid: gr} + + - {<<: *cmip6_h-ssp585, dataset: MCM-UA-1-0, ensemble: r1i1p1f2} # ensemble: [r1i1p1f1, r1i1p1f2] changed due to ESMValCore issue 1964 + + - {<<: *cmip6_h-ssp585, dataset: MIROC6, ensemble: r(1:3)i1p1f1} + + - {<<: *cmip6_h-ssp585, dataset: MIROC-ES2L, ensemble: r1i1p1f2} + + - {<<: *cmip6_h-ssp585, dataset: MPI-ESM1-2-HR} + + - {<<: *cmip6_h-ssp585, dataset: MPI-ESM1-2-LR, ensemble: r(1:10)i1p1f1} + + - {<<: *cmip6_h-ssp585, dataset: MRI-ESM2-0} + + - {<<: *cmip6_h-ssp585, dataset: NESM3, ensemble: r(1:2)i1p1f1} + + - {<<: *cmip6_h-ssp585, dataset: NorESM2-LM} + + - {<<: *cmip6_h-ssp585, dataset: NorESM2-MM} + + - {<<: *cmip6_h-ssp585, dataset: UKESM1-0-LL, ensemble: r(1:4)i1p1f2} + - {<<: *cmip6_h-ssp585, dataset: UKESM1-0-LL, ensemble: r8i1p1f2} + +cmip6_45: &cmip6_45 + - &cmip6_h-ssp245 { + project: CMIP6, + exp: [historical, ssp245], + dataset: ACCESS-CM2, + ensemble: r1i1p1f1, + grid: gn, + start_year: 1980, + end_year: 2100, + } + + - {<<: *cmip6_h-ssp245, dataset: ACCESS-CM2, ensemble: r2i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: ACCESS-ESM1-5, ensemble: r(1:10)i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: AWI-CM-1-1-MR} + + - {<<: *cmip6_h-ssp245, dataset: BCC-CSM2-MR, ensemble: r1i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: CanESM5, ensemble: r(1:10)i1p1f1} + + - { + <<: *cmip6_h-ssp245, + dataset: CanESM5-CanOE, + ensemble: r(1:3)i1p2f1, + } + + # - {<<: *cmip6_h-ssp245, dataset: CAS-ESM2-0, ensemble: r1i1p1f1} + # - {<<: *cmip6_h-ssp245, dataset: CAS-ESM2-0, ensemble: r3i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: CESM2, ensemble: r1i1p1f1} + - {<<: *cmip6_h-ssp245, dataset: CESM2, ensemble: r4i1p1f1} + - {<<: *cmip6_h-ssp245, dataset: CESM2, ensemble: r(10:11)i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: CESM2-WACCM, ensemble: r(1:3)i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: CIESM, grid: gr, ensemble: r1i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: CMCC-ESM2, ensemble: r1i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: CNRM-CM6-1, ensemble: r(1:6)i1p1f2, grid: gr} + + - {<<: *cmip6_h-ssp245, dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + + - { + <<: *cmip6_h-ssp245, + dataset: CNRM-ESM2-1, + ensemble: r(1:5)i1p1f2, + grid: gr, + } + + - { + <<: *cmip6_h-ssp245, + dataset: EC-Earth3, + ensemble: r(18:24)i1p1f1, + grid: gr, + } + - {<<: *cmip6_h-ssp245, dataset: EC-Earth3, ensemble: r2i1p1f1, grid: gr} + - {<<: *cmip6_h-ssp245, dataset: EC-Earth3, ensemble: r7i1p1f1, grid: gr} + + - {<<: *cmip6_h-ssp245, dataset: FGOALS-g3, ensemble: r(1:4)i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: FGOALS-f3-L, grid: gr} + + - {<<: *cmip6_h-ssp245, dataset: FIO-ESM-2-0, ensemble: r(1:3)i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: GFDL-ESM4, grid: gr1} + + - {<<: *cmip6_h-ssp245, dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3} + + - { + <<: *cmip6_h-ssp245, + dataset: IPSL-CM6A-LR, + ensemble: r(1:6)i1p1f1, + grid: gr, + } + - { + <<: *cmip6_h-ssp245, + dataset: IPSL-CM6A-LR, + ensemble: r(10:11)i1p1f1, + grid: gr, + } + - {<<: *cmip6_h-ssp245, dataset: IPSL-CM6A-LR, ensemble: r14i1p1f1, grid: gr} + - {<<: *cmip6_h-ssp245, dataset: IPSL-CM6A-LR, ensemble: r22i1p1f1, grid: gr} + - {<<: *cmip6_h-ssp245, dataset: IPSL-CM6A-LR, ensemble: r25i1p1f1, grid: gr} + + - {<<: *cmip6_h-ssp245, dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {<<: *cmip6_h-ssp245, dataset: KACE-1-0-G, ensemble: r3i1p1f1, grid: gr} + + - {<<: *cmip6_h-ssp245, dataset: KIOST-ESM, grid: gr1} + + - {<<: *cmip6_h-ssp245, dataset: MCM-UA-1-0, ensemble: r1i1p1f2} # ensemble: [r1i1p1f1, r1i1p1f2] changed due to ESMValCore issue 1964 + + - {<<: *cmip6_h-ssp245, dataset: MIROC6, ensemble: r(1:3)i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: MIROC-ES2L, ensemble: r1i1p1f2} + + - {<<: *cmip6_h-ssp245, dataset: MPI-ESM1-2-HR} + + - {<<: *cmip6_h-ssp245, dataset: MPI-ESM1-2-LR, ensemble: r(1:10)i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: MRI-ESM2-0, ensemble: r1i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: NESM3, ensemble: r(1:2)i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: NorESM2-LM, ensemble: r(1:3)i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: NorESM2-MM, ensemble: r1i1p1f1} + + - {<<: *cmip6_h-ssp245, dataset: UKESM1-0-LL, ensemble: r(1:4)i1p1f2} + - {<<: *cmip6_h-ssp245, dataset: UKESM1-0-LL, ensemble: r8i1p1f2} + +cmip6_26: &cmip6_26 + - &cmip6_h-ssp126 { + project: CMIP6, + exp: [historical, ssp126], + dataset: ACCESS-CM2, + ensemble: r1i1p1f1, + grid: gn, + start_year: 1980, + end_year: 2100, + } + + - {<<: *cmip6_h-ssp126, dataset: ACCESS-ESM1-5, ensemble: r(1:3)i1p1f1} + + - {<<: *cmip6_h-ssp126, dataset: AWI-CM-1-1-MR} + + - {<<: *cmip6_h-ssp126, dataset: BCC-CSM2-MR, ensemble: r1i1p1f1} + + - {<<: *cmip6_h-ssp126, dataset: CanESM5, ensemble: r(1:10)i1p1f1} + + - {<<: *cmip6_h-ssp126, dataset: CanESM5-CanOE, ensemble: r(1:3)i1p2f1} + + - {<<: *cmip6_h-ssp126, dataset: CESM2, ensemble: r1i1p1f1} + + - {<<: *cmip6_h-ssp126, dataset: CESM2-WACCM} + + - {<<: *cmip6_h-ssp126, dataset: CMCC-CM2-SR5} + + - {<<: *cmip6_h-ssp126, dataset: CNRM-CM6-1, ensemble: r(1:6)i1p1f2, grid: gr} + + - {<<: *cmip6_h-ssp126, dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + + - { + <<: *cmip6_h-ssp126, + dataset: CNRM-ESM2-1, + ensemble: r(1:5)i1p1f2, + grid: gr, + } + + - {<<: *cmip6_h-ssp126, dataset: EC-Earth3, ensemble: r4i1p1f1, grid: gr} + - {<<: *cmip6_h-ssp126, dataset: EC-Earth3, ensemble: r6i1p1f1, grid: gr} + - {<<: *cmip6_h-ssp126, dataset: EC-Earth3, ensemble: r9i1p1f1, grid: gr} + - {<<: *cmip6_h-ssp126, dataset: EC-Earth3, ensemble: r11i1p1f1, grid: gr} + - {<<: *cmip6_h-ssp126, dataset: EC-Earth3, ensemble: r13i1p1f1, grid: gr} + - {<<: *cmip6_h-ssp126, dataset: EC-Earth3, ensemble: r15i1p1f1, grid: gr} + + - {<<: *cmip6_h-ssp126, dataset: FGOALS-g3, ensemble: r1i1p1f1} + + - {<<: *cmip6_h-ssp126, dataset: FGOALS-f3-L, grid: gr} + + - {<<: *cmip6_h-ssp126, dataset: FIO-ESM-2-0, ensemble: r(1:3)i1p1f1} + + - {<<: *cmip6_h-ssp126, dataset: GFDL-ESM4, grid: gr1} + + - {<<: *cmip6_h-ssp126, dataset: GISS-E2-1-G, ensemble: r1i1p3f1} + + - {<<: *cmip6_h-ssp126, dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3} + + - { + <<: *cmip6_h-ssp126, + dataset: IPSL-CM6A-LR, + ensemble: r(1:4)i1p1f1, + grid: gr, + } + - {<<: *cmip6_h-ssp126, dataset: IPSL-CM6A-LR, ensemble: r6i1p1f1, grid: gr} + + - {<<: *cmip6_h-ssp126, dataset: KACE-1-0-G, ensemble: r(1:2)i1p1f1, grid: gr} + + - {<<: *cmip6_h-ssp126, dataset: MCM-UA-1-0, ensemble: r1i1p1f2} # ensemble: [r1i1p1f1, r1i1p1f2] changed due to ESMValCore issue 1964 + + - {<<: *cmip6_h-ssp126, dataset: MIROC6, ensemble: r(1:3)i1p1f1} + + - {<<: *cmip6_h-ssp126, dataset: MIROC-ES2L, ensemble: r1i1p1f2} + + - {<<: *cmip6_h-ssp126, dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1} + + - {<<: *cmip6_h-ssp126, dataset: MPI-ESM1-2-LR, ensemble: r(1:10)i1p1f1} + + - {<<: *cmip6_h-ssp126, dataset: MRI-ESM2-0, ensemble: r1i1p1f1} + + - {<<: *cmip6_h-ssp126, dataset: NESM3, ensemble: r(1:2)i1p1f1} + + - {<<: *cmip6_h-ssp126, dataset: NorESM2-LM} + + - {<<: *cmip6_h-ssp126, dataset: NorESM2-MM} + + - {<<: *cmip6_h-ssp126, dataset: UKESM1-0-LL, ensemble: r(1:4)i1p1f2} + - {<<: *cmip6_h-ssp126, dataset: UKESM1-0-LL, ensemble: r8i1p1f2} + +preprocessors: + ensemble_members: &ensemble + regrid: + target_grid: 1x1 + scheme: area_weighted + multi_model_statistics: + span: full + statistics: [mean] + seasonal_statistics: + operator: mean + +diagnostics: + tas_cmip6_85: + themes: &themes + - phys + realms: &realms + - atmos + statistics: + - mean + - anomaly + - diff + variables: + tas: + mip: Amon + short_name: tas + preprocessor: ensemble_members + additional_datasets: *cmip6_85 + scripts: + tas_cmip6_85: &script_input + script: cos22esd/climate_change_hotspot.py + baseline_period: &baseline [1986, 2005] + future_periods: &future ["2041-2060", "2081-2100"] + region: ®ion [-10, 40, 30, 45] + region_name: ®_name "Mediterranean" + + pr_cmip6_85: + variables: + pr: + mip: Amon + short_name: pr + preprocessor: ensemble_members + additional_datasets: *cmip6_85 + scripts: + pr_cmip6_85: + <<: *script_input + + tas_cmip5_85: + variables: + tas: + mip: Amon + short_name: tas + preprocessor: ensemble_members + additional_datasets: *cmip5_85 + scripts: + tas_cmip5_85: + <<: *script_input + + pr_cmip5_85: + variables: + pr: + mip: Amon + short_name: pr + preprocessor: ensemble_members + additional_datasets: *cmip5_85 + scripts: + pr_cmip5_85: + <<: *script_input + + tas_cmip6_45: + variables: + tas: + mip: Amon + short_name: tas + preprocessor: ensemble_members + additional_datasets: *cmip6_45 + scripts: + tas_cmip6_45: + <<: *script_input + + pr_cmip6_45: + variables: + pr: + mip: Amon + short_name: pr + preprocessor: ensemble_members + additional_datasets: *cmip6_45 + scripts: + pr_cmip6_45: + <<: *script_input + + tas_cmip5_45: + variables: + tas: + mip: Amon + short_name: tas + preprocessor: ensemble_members + additional_datasets: *cmip5_45 + scripts: + tas_cmip5_45: + <<: *script_input + + pr_cmip5_45: + variables: + pr: + mip: Amon + short_name: pr + preprocessor: ensemble_members + additional_datasets: *cmip5_45 + scripts: + pr_cmip5_45: + <<: *script_input + + tas_cmip6_26: + variables: + tas: + mip: Amon + short_name: tas + preprocessor: ensemble_members + additional_datasets: *cmip6_26 + scripts: + tas_cmip6_26: + <<: *script_input + + pr_cmip6_26: + variables: + pr: + mip: Amon + short_name: pr + preprocessor: ensemble_members + additional_datasets: *cmip6_26 + scripts: + pr_cmip6_26: + <<: *script_input + + tas_cmip5_26: + variables: + tas: + mip: Amon + short_name: tas + preprocessor: ensemble_members + additional_datasets: *cmip5_26 + scripts: + tas_cmip5_26: + <<: *script_input + + pr_cmip5_26: + variables: + pr: + mip: Amon + short_name: pr + preprocessor: ensemble_members + additional_datasets: *cmip5_26 + scripts: + pr_cmip5_26: + <<: *script_input + + generate_plots: + plot_types: + - scatter + - geo + scripts: + generate_plots: + ancestors: [tas_cmip6_85/*, + pr_cmip6_85/*, + tas_cmip5_85/*, + pr_cmip5_85/*, + tas_cmip6_45/*, + pr_cmip6_45/*, + tas_cmip5_45/*, + pr_cmip5_45/*, + tas_cmip6_26/*, + pr_cmip6_26/*, + tas_cmip5_26/*, + pr_cmip5_26/*] + script: cos22esd/hotspot_plotter.py + baseline_period: *baseline + future_periods: *future + region: *region + region_name: *reg_name diff --git a/esmvaltool/recipes/recipe_climate_patterns.yml b/esmvaltool/recipes/recipe_climate_patterns.yml new file mode 100644 index 0000000000..08e0c51779 --- /dev/null +++ b/esmvaltool/recipes/recipe_climate_patterns.yml @@ -0,0 +1,249 @@ +# ESMValTool +# recipe_climate_patterns.yml +--- +documentation: + description: Generating climate patterns from CMIP6 models. + title: Generating Climate Patterns + + authors: + - munday_gregory + + maintainer: + - munday_gregory + + references: + - mathison2024gmd + - huntingford2000climdyn + +preprocessors: + global_mean_monthly: + monthly_statistics: + operator: mean + + regrid: + target_grid: {start_longitude: -180, end_longitude: 176.25, step_longitude: 3.75, + start_latitude: -55, end_latitude: 82.5, step_latitude: 2.5} + scheme: linear + + downscale_sftlf: + regrid: + target_grid: {start_longitude: -180, end_longitude: 176.25, step_longitude: 3.75, + start_latitude: -55, end_latitude: 82.5, step_latitude: 2.5} + scheme: linear + +monthly_global_settings: &monthly_global_settings + mip: Amon + project: CMIP6 + preprocessor: global_mean_monthly + +monthly_global_settings_day: &monthly_global_settings_day + mip: day + project: CMIP6 + preprocessor: global_mean_monthly + + +CMIP6_landfrac: &cmip6_landfrac + - {dataset: ACCESS-CM2, exp: piControl, ensemble: r1i1p1f1, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-CM-1-1-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, exp: hist-resIPO,ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5-CanOE, exp: piControl, ensemble: r1i1p2f1, grid: gn} + - {dataset: CanESM5-1, exp: piControl, ensemble: r1i1p1f1, grid: gn, institute: CCCma} + # - {dataset: CAS-ESM2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn} # Global only + - {dataset: CMCC-ESM2, exp: piControl, ensemble: r1i1p1f1, grid: gn} + # - {dataset: CMCC-CM2-SR5, exp: piControl, ensemble: r1i1p1f1, grid: gn} # No tasmin/tasmax + - {dataset: CNRM-CM6-1, exp: piControl, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, exp: piControl, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, exp: piControl, ensemble: r1i1p1f2, grid: gr} + # - {dataset: E3SM-1-0, exp: piControl, ensemble: r1i1p1f1, grid: gr} # Tasmax == tasmin + - {dataset: EC-Earth3, exp: piControl, ensemble: r1i1p1f1, grid: gr} + # - {dataset: EC-Earth3-CC, exp: piControl, ensemble: r1i1p1f1, grid: gr} # Global only + - {dataset: EC-Earth3-Veg, exp: piControl, ensemble: r1i1p1f1, grid: gr} + # - {dataset: FGOALS-f3-L, exp: historical, ensemble: r1i1p1f1, grid: gr} # No tasmin/tasmax + - {dataset: FGOALS-g3, exp: piControl, ensemble: r1i1p1f1, grid: gn} + # - {dataset: FIO-ESM-2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn} # Global only + - {dataset: GFDL-CM4, exp: piControl, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GFDL-ESM4, exp: ssp370, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-H, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-G, exp: piControl, ensemble: r1i1p5f1, grid: gn} + - {dataset: GISS-E2-2-G, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-MM, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: INM-CM4-8, exp: piControl, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM5-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, exp: piControl, ensemble: r1i1p1f1, grid: gr} + # - {dataset: KACE-1-0-G, exp: piControl, ensemble: r1i1p1f1, grid: gr} # Global only, weird tasmin/tasmax + # - {dataset: KIOST-ESM, exp: piControl, ensemble: r1i1p1f1, grid: gr} # Global only + - {dataset: MIROC6, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, exp: piControl, ensemble: r1i1p1f2, grid: gn} + - {dataset: MIROC-ES2H, exp: piControl, ensemble: r1i1p4f2, grid: gn} + - {dataset: MPI-ESM1-2-HR, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn} + # - {dataset: NorESM2-LM, exp: piControl, ensemble: r1i1p1f1, grid: gn} # Global only, tasmax == tasmin + - {dataset: NorESM2-MM, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, exp: piControl, ensemble: r1i1p1f2, grid: gn} + +CMIP6_no_tasmax: &cmip6_no_tasmax + # - {dataset: E3SM-1-0, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2099} # bad tasmin/tasmax + # - {dataset: NorESM2-LM, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} # bad tasmin/tasmax + - {dataset: NorESM2-MM, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: TaiESM1, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + +CMIP6_DAY: &cmip6_day + # - {dataset: E3SM-1-0, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2099} # bad tasmin/tasmax + # - {dataset: NorESM2-LM, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} # bad tasmin/tasmax + - {dataset: NorESM2-MM, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: TaiESM1, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + +CMIP6_FULL: &cmip6_full + - {dataset: ACCESS-CM2, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, exp: [historical, ssp585], ensemble: r3i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: AWI-CM-1-1-MR, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: BCC-CSM2-MR, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: CanESM5, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: CanESM5-1, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100, institute: CCCma} # optional extra + - {dataset: CanESM5-CanOE, exp: [historical, ssp585], ensemble: r1i1p2f1, grid: gn, start_year: 1850, end_year: 2100} + # - {dataset: CAS-ESM2-0, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} # Global only + - {dataset: CMCC-ESM2, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + # - {dataset: CMCC-CM2-SR5, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} # No tasmin/tasmax + - {dataset: CNRM-CM6-1, exp: [historical, ssp585], ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 2100} + - {dataset: CNRM-CM6-1-HR, exp: [historical, ssp585], ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 2100} + - {dataset: CNRM-ESM2-1, exp: [historical, ssp585], ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 2100} + - {dataset: EC-Earth3, exp: [historical, ssp585], ensemble: r11i1p1f1, grid: gr, start_year: 1850, end_year: 2100} + # - {dataset: EC-Earth3-CC, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2100} # Global only + - {dataset: EC-Earth3-Veg, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2100} + # - {dataset: FGOALS-f3-L, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2100} # No tasmin/tasmax + - {dataset: FGOALS-g3, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + # - {dataset: FIO-ESM-2-0, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} # Global only + - {dataset: GFDL-CM4, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 2100} + - {dataset: GFDL-ESM4, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 2100} + - {dataset: GISS-E2-1-H, exp: [historical, ssp585], ensemble: r3i1p1f2, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: GISS-E2-1-G, exp: [historical, ssp585], ensemble: r1i1p5f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: GISS-E2-2-G, exp: [historical, ssp585], ensemble: r1i1p3f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: HadGEM3-GC31-LL, exp: [historical, ssp585], ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: HadGEM3-GC31-MM, exp: [historical, ssp585], ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: INM-CM4-8, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 2100} + - {dataset: INM-CM5-0, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 2100} + - {dataset: IPSL-CM6A-LR, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2100} + # - {dataset: KACE-1-0-G, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2100} # bad tasmin/tasmax + # - {dataset: KIOST-ESM, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2100} # optional extra + - {dataset: MIROC6, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: MIROC-ES2L, exp: [historical, ssp585], ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: MIROC-ES2H, exp: [historical, ssp585], ensemble: r1i1p4f2, grid: gn, start_year: 1850, end_year: 2100} # optional extra + - {dataset: MPI-ESM1-2-HR, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: MPI-ESM1-2-LR, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: MRI-ESM2-0, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: UKESM1-0-LL, exp: [historical, ssp585], ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 2100} + +diagnostics: + monthly_timeseries: + description: Mean monthly variables + + variables: + + # sftlf: + # short_name: sftlf + # mip: fx + # project: CMIP6 + # preprocessor: downscale_sftlf + # additional_datasets: *cmip6_landfrac + + tasmax_585: + short_name: tasmax + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + tasmin_585: + short_name: tasmin + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + tas_585: + short_name: tas + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + huss_585: + short_name: huss + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + pr_585: + short_name: pr + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + sfcWind_585: + short_name: sfcWind + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + ps_585: + short_name: ps + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + rsds_585: + short_name: rsds + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + rlds_585: + short_name: rlds + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + tasmax_585_day: + short_name: tasmax + <<: *monthly_global_settings_day + additional_datasets: *cmip6_day + + tasmin_585_day: + short_name: tasmin + <<: *monthly_global_settings_day + additional_datasets: *cmip6_day + + tas_585_no_tasmax: + short_name: tas + <<: *monthly_global_settings + additional_datasets: *cmip6_no_tasmax + + huss_585_no_tasmax: + short_name: huss + <<: *monthly_global_settings + additional_datasets: *cmip6_no_tasmax + + pr_585_no_tasmax: + short_name: pr + <<: *monthly_global_settings + additional_datasets: *cmip6_no_tasmax + + sfcWind_585_no_tasmax: + short_name: sfcWind + <<: *monthly_global_settings + additional_datasets: *cmip6_no_tasmax + + ps_585_no_tasmax: + short_name: ps + <<: *monthly_global_settings + additional_datasets: *cmip6_no_tasmax + + rsds_585_no_tasmax: + short_name: rsds + <<: *monthly_global_settings + additional_datasets: *cmip6_no_tasmax + + rlds_585_no_tasmax: + short_name: rlds + <<: *monthly_global_settings + additional_datasets: *cmip6_no_tasmax + + scripts: + climate_patterns_script: + script: climate_patterns/climate_patterns.py + jules_mode: false # options: true, false + parallelise: false # options: true, false + area: global # options global, land. If land, uncomment landfrac recipe settings diff --git a/esmvaltool/recipes/recipe_climwip_brunner2019_med.yml b/esmvaltool/recipes/recipe_climwip_brunner2019_med.yml new file mode 100644 index 0000000000..651d78933c --- /dev/null +++ b/esmvaltool/recipes/recipe_climwip_brunner2019_med.yml @@ -0,0 +1,304 @@ +# ESMValTool +# recipe_climwip_brunner2019.yml +--- +documentation: + title: ClimWIP implementation after Brunner et al. (2019) + description: > + Calculate weights similar to Brunner et al. (2019). + Changes compared to Brunner et al. (2019): + - Use model native land-sea mask (instead of regionmask) + - Use ESMValCore extract_shape to cut regions (instead of regionmask) + - The models CCSM4 (r6) and MIROC5 (r1) had to be excluded due to errors in the ESMValCore pre-processor + - Use ERA5 instead of three older reanalyses as observational data set + This recipe is intended as a template - it calculates weights for the Mediterranean (MED) SREX + region. Small adaptation are needed to calculate the other regions (see documentation for guidance). + authors: + - brunner_lukas + - lorenz_ruth + maintainer: + - brunner_lukas + references: + - brunner2019 + +datasets: &model_data + - {dataset: ACCESS1-0, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + - {dataset: ACCESS1-3, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: bcc-csm1-1-m, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + - {dataset: bcc-csm1-1, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: BNU-ESM, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: CCSM4, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + - {dataset: CCSM4, project: CMIP5, exp: [historical, rcp85], ensemble: r2i1p1} + - {dataset: CCSM4, project: CMIP5, exp: [historical, rcp85], ensemble: r3i1p1} + - {dataset: CCSM4, project: CMIP5, exp: [historical, rcp85], ensemble: r4i1p1} + - {dataset: CCSM4, project: CMIP5, exp: [historical, rcp85], ensemble: r5i1p1} + # pre-processor can't merge historical and RCP85 - unclear why + # - {dataset: CCSM4, project: CMIP5, exp: [historical, rcp85], ensemble: r6i1p1} + + - {dataset: CESM1-BGC, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: CESM1-CAM5, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + - {dataset: CESM1-CAM5, project: CMIP5, exp: [historical, rcp85], ensemble: r2i1p1} + - {dataset: CESM1-CAM5, project: CMIP5, exp: [historical, rcp85], ensemble: r3i1p1} + + - {dataset: CMCC-CESM, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: CMCC-CMS, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: CMCC-CM, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: CNRM-CM5, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, exp: [historical, rcp85], ensemble: r2i1p1} + - {dataset: CNRM-CM5, project: CMIP5, exp: [historical, rcp85], ensemble: r4i1p1} + - {dataset: CNRM-CM5, project: CMIP5, exp: [historical, rcp85], ensemble: r6i1p1} + - {dataset: CNRM-CM5, project: CMIP5, exp: [historical, rcp85], ensemble: r10i1p1} + + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: [historical, rcp85], ensemble: r2i1p1} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: [historical, rcp85], ensemble: r3i1p1} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: [historical, rcp85], ensemble: r4i1p1} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: [historical, rcp85], ensemble: r5i1p1} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: [historical, rcp85], ensemble: r6i1p1} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: [historical, rcp85], ensemble: r7i1p1} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: [historical, rcp85], ensemble: r8i1p1} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: [historical, rcp85], ensemble: r9i1p1} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: [historical, rcp85], ensemble: r10i1p1} + + - {dataset: CanESM2, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + - {dataset: CanESM2, project: CMIP5, exp: [historical, rcp85], ensemble: r2i1p1} + - {dataset: CanESM2, project: CMIP5, exp: [historical, rcp85], ensemble: r3i1p1} + - {dataset: CanESM2, project: CMIP5, exp: [historical, rcp85], ensemble: r4i1p1} + - {dataset: CanESM2, project: CMIP5, exp: [historical, rcp85], ensemble: r5i1p1} + + - {dataset: FGOALS-g2, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: GFDL-CM3, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: GFDL-ESM2G, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: GFDL-ESM2M, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: GISS-E2-H, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + - {dataset: GISS-E2-H, project: CMIP5, exp: [historical, rcp85], ensemble: r2i1p1} + + - {dataset: GISS-E2-H, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p2} + + - {dataset: GISS-E2-H, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p3} + - {dataset: GISS-E2-H, project: CMIP5, exp: [historical, rcp85], ensemble: r2i1p3} + + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: GISS-E2-R, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + - {dataset: GISS-E2-R, project: CMIP5, exp: [historical, rcp85], ensemble: r2i1p1} + + - {dataset: GISS-E2-R, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p2} + + - {dataset: GISS-E2-R, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p3} + - {dataset: GISS-E2-R, project: CMIP5, exp: [historical, rcp85], ensemble: r2i1p3} + + - {dataset: HadGEM2-CC, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: HadGEM2-ES, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + - {dataset: HadGEM2-ES, project: CMIP5, exp: [historical, rcp85], ensemble: r2i1p1} + - {dataset: HadGEM2-ES, project: CMIP5, exp: [historical, rcp85], ensemble: r3i1p1} + - {dataset: HadGEM2-ES, project: CMIP5, exp: [historical, rcp85], ensemble: r4i1p1} + + - {dataset: inmcm4, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: [historical, rcp85], ensemble: r2i1p1} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: [historical, rcp85], ensemble: r3i1p1} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: [historical, rcp85], ensemble: r4i1p1} + + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: MIROC-ESM, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + # MIROC5 has extended historical runs (overlap with RCP85) the pre-processor can't merge + # the first ensemble member for rsus (for the other ones it works) + # - {dataset: MIROC5, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + - {dataset: MIROC5, project: CMIP5, exp: [historical, rcp85], ensemble: r2i1p1} + - {dataset: MIROC5, project: CMIP5, exp: [historical, rcp85], ensemble: r3i1p1} + + - {dataset: MPI-ESM-LR, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: [historical, rcp85], ensemble: r2i1p1} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: [historical, rcp85], ensemble: r3i1p1} + + - {dataset: MPI-ESM-MR, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: MRI-CGCM3, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: MRI-ESM1, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: NorESM1-ME, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + - {dataset: NorESM1-M, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + + +obs_data: &obs_data # for climwip performance metrics + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + +preprocessors: + climwip_general: &general + regrid: + target_grid: 2.5x2.5 + scheme: linear + mask_landsea: + mask_out: sea + + climatological_mean: &mean + climate_statistics: + operator: mean + + climatological_std: &std + climate_statistics: + operator: std_dev + + region: ®ion + <<: *general + extract_shape: + shapefile: shapefiles/srex.shp + decomposed: True + method: contains + crop: true + ids: + - 'South Europe/Mediterranean [MED:13]' + + region_mean: + <<: *region + <<: *mean + + region_std: + <<: *region + <<: *std + + temperature_anomalies: + custom_order: true + <<: *region + area_statistics: + operator: mean + annual_statistics: + operator: mean + anomalies: + period: full + reference: &ref_period + start_year: 1981 + start_month: 1 + start_day: 1 + end_year: 2010 + end_month: 12 + end_day: 31 + standardize: false + +diagnostics: + calculate_weights_climwip: + variables: + tas_CLIM: &common_settings + short_name: tas + start_year: 1995 + end_year: 2014 + mip: Amon + preprocessor: region_mean + additional_datasets: *obs_data + pr_CLIM: + <<: *common_settings + short_name: pr + rsds_CLIM: + <<: *common_settings + short_name: rsds + rlds_STD: + <<: *common_settings + short_name: rlds + preprocessor: region_std + rsus_CLIM: + <<: *common_settings + short_name: rsus + derive: true + force_derivation: false + rsus_STD: + <<: *common_settings + short_name: rsus + preprocessor: region_std + derive: true + force_derivation: false + # only needed if no performance sigma is given + # CLIM_future: + # short_name: tas + # start_year: 2081 + # end_year: 2100 + # mip: Amon + # preprocessor: region_mean + + scripts: + climwip: + script: weighting/climwip/main.py + obs_data: native6 + combine_ensemble_members: false + performance_sigma: 0.546 # <----- + independence_sigma: 0.643 # <----- + performance_contributions: + tas_CLIM: 1 + pr_CLIM: 1 + rsds_CLIM: 1 + rsus_CLIM: 1 + rlds_STD: 1 + rsus_STD: 1 + independence_contributions: + tas_CLIM: 1 + pr_CLIM: 1 + rsds_CLIM: 1 + rsus_CLIM: 1 + rlds_STD: 1 + rsus_STD: 1 + # only needed if no performance sigma is given + # calibrate_performance_sigma: + # target: CLIM_future + + weighted_temperature_graph: + variables: + tas: + start_year: 1960 + end_year: 2100 + mip: Amon + preprocessor: temperature_anomalies + scripts: + weighted_temperature_graph: + script: weighting/weighted_temperature_graph.py + ancestors: [calculate_weights_climwip/climwip, tas] + weights: 'weights.nc' + settings: + <<: *ref_period + central_estimate: mean + lower_bound: 16.7 + upper_bound: 83.3 + + weighted_temperature_map: + variables: + tas_CLIM_future: + short_name: tas + start_year: 2081 + end_year: 2100 + mip: Amon + preprocessor: region_mean + tas_CLIM_reference: + short_name: tas + start_year: 1995 + end_year: 2014 + mip: Amon + preprocessor: region_mean + scripts: + weighted_temperature_map: + script: weighting/weighted_temperature_map.py + ancestors: [calculate_weights_climwip/climwip, tas_CLIM_future, tas_CLIM_reference] + weights: 'weights.nc' + # optional arguments + antimeridian: pacific + model_aggregation: mean # [ mean (default) | median | integer in (0, 100) ] diff --git a/esmvaltool/recipes/recipe_climwip_brunner20esd.yml b/esmvaltool/recipes/recipe_climwip_brunner20esd.yml new file mode 100644 index 0000000000..93ef0d36d5 --- /dev/null +++ b/esmvaltool/recipes/recipe_climwip_brunner20esd.yml @@ -0,0 +1,247 @@ +# ESMValTool +# recipe_climwip_brunner20esd.yml +--- +documentation: + title: Reproduce ClimWIP results from Brunner et al. (2020) + description: > + Calculate weights similar to Brunner et al. (2020). Each model weight is based on the models performance + in reproducing historical observations as well as its independence from all the other models in the + multi-model ensemble. For more information see documentation and Brunner et al. (2020). + Differences compared to Brunner et al. (2020): + - The models CAMS-CSM1-0 and MPI-ESM1-2-HR (r2) had to be excluded due to errors in the ESMValCore pre-processor + - Use only ERA5 (instead of ERA5 and MERRA2) as observational data set + authors: + - kalverla_peter + - smeets_stef + - brunner_lukas + - camphuijsen_jaro + - lorenz_ruth + maintainer: + - kalverla_peter + - smeets_stef + - brunner_lukas + - lorenz_ruth + references: + - brunner2020 + +datasets: &model_data + - {dataset: ACCESS-CM2, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r1i1p1f1} + - {dataset: ACCESS-ESM1-5, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r(1:3)i1p1f1} + - {dataset: AWI-CM-1-1-MR, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r1i1p1f1} + - {dataset: BCC-CSM2-MR, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r1i1p1f1} + - {dataset: CanESM5-CanOE, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r(1:3)i1p2f1} + - {dataset: CanESM5, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r(1:25)i1p1f1} + - {dataset: CanESM5, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r(1:25)i1p2f1} + - {dataset: CESM2-WACCM, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r1i1p1f1} + # ssp585 data missing on ESGF for the follwoing dataset + # see https://github.com/ESMValGroup/ESMValTool/pull/2581 for details + #- {dataset: CESM2, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r(1:2)i1p1f1} + - {dataset: CNRM-CM6-1-HR, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r1i1p1f2} + - {dataset: CNRM-CM6-1, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r(1:6)i1p1f2} + - {dataset: CNRM-ESM2-1, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r(1:5)i1p1f2} + - {dataset: EC-Earth3-Veg, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r(1:2)i1p1f1} + - {dataset: EC-Earth3-Veg, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r4i1p1f1} + - {dataset: EC-Earth3, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r1i1p1f1} + - {dataset: EC-Earth3, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r4i1p1f1} + - {dataset: EC-Earth3, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r6i1p1f1} + - {dataset: EC-Earth3, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r9i1p1f1} + - {dataset: EC-Earth3, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r11i1p1f1} + - {dataset: EC-Earth3, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r13i1p1f1} + - {dataset: EC-Earth3, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r15i1p1f1} + - {dataset: FGOALS-f3-L, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r1i1p1f1} + - {dataset: FGOALS-g3, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r1i1p1f1} + - {dataset: FIO-ESM-2-0, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r(1:3)i1p1f1} + - {dataset: GFDL-ESM4, project: CMIP6, grid: gr1, exp: [historical, ssp585], ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-G, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r1i1p3f1} + - {dataset: HadGEM3-GC31-LL, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r1i1p1f3} + - {dataset: INM-CM4-8, project: CMIP6, grid: gr1, exp: [historical, ssp585], ensemble: r1i1p1f1} + - {dataset: INM-CM5-0, project: CMIP6, grid: gr1, exp: [historical, ssp585], ensemble: r1i1p1f1} + - {dataset: IPSL-CM6A-LR, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r(1:4)i1p1f1} + - {dataset: IPSL-CM6A-LR, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r6i1p1f1} + - {dataset: IPSL-CM6A-LR, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r14i1p1f1} + - {dataset: KACE-1-0-G, project: CMIP6, grid: gr, exp: [historical, ssp585], ensemble: r1i1p1f1} + - {dataset: MCM-UA-1-0, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r1i1p1f2} + - {dataset: MIROC6, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r(1:3)i1p1f1} + - {dataset: MIROC-ES2L, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r1i1p1f2} + - {dataset: MPI-ESM1-2-HR, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r1i1p1f1} + - {dataset: MPI-ESM1-2-LR, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r(1:10)i1p1f1} + - {dataset: MRI-ESM2-0, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r1i1p1f1} + - {dataset: NESM3, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r(1:2)i1p1f1} + - {dataset: NorESM2-MM, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r1i1p1f1} + - {dataset: UKESM1-0-LL, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r(1:4)i1p1f2} + - {dataset: UKESM1-0-LL, project: CMIP6, grid: gn, exp: [historical, ssp585], ensemble: r8i1p1f2} + + +obs_data: &obs_data # for climwip performance metrics + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + +preprocessors: + climwip_general: &general + regrid: + target_grid: 2.5x2.5 + scheme: linear + + climatological_mean: + <<: *general + climate_statistics: + operator: mean + + detrended_std: + custom_order: true + <<: *general + annual_statistics: + operator: mean + detrend: + dimension: time + method: linear + climate_statistics: + operator: std_dev + + annual_trend: + <<: *general + annual_statistics: + operator: mean + linear_trend: + coordinate: time + convert_units: + units: "K year-1" + + global_mean: + <<: *general + climate_statistics: + operator: mean + area_statistics: + operator: mean + + temperature_anomalies: + custom_order: true + area_statistics: + operator: mean + annual_statistics: + operator: mean + anomalies: + period: full + reference: &ref_period + start_year: 1995 + start_month: 1 + start_day: 1 + end_year: 2014 + end_month: 12 + end_day: 31 + standardize: false + +diagnostics: + # if local anomalies are needed as predictors they need to be calculated first + calculate_local_anomalies: + variables: + tas_CLIM: &performance_settings + short_name: tas + start_year: 1980 + end_year: 2014 + mip: Amon + preprocessor: climatological_mean + additional_datasets: *obs_data + tas_GLOBAL: + <<: *performance_settings + preprocessor: global_mean + psl_CLIM: + <<: *performance_settings + short_name: psl + preprocessor: climatological_mean + psl_GLOBAL: + <<: *performance_settings + short_name: psl + preprocessor: global_mean + scripts: + local_anomalies: + script: weighting/calculate_difference_variable_group.py + obs_data: native6 + + + calculate_weights_climwip: + variables: + tas_CLIM_i: &independence_settings + short_name: tas + start_year: 1980 + end_year: 2014 + mip: Amon + preprocessor: climatological_mean + additional_datasets: *obs_data + psl_CLIM_i: + <<: *independence_settings + short_name: psl + tas_STD: + <<: *performance_settings + preprocessor: detrended_std + psl_STD: + <<: *performance_settings + short_name: psl + preprocessor: detrended_std + tas_TREND: + <<: *performance_settings + preprocessor: annual_trend + tas_CLIM_reference: &target_settings + short_name: tas + start_year: 1995 + end_year: 2014 + mip: Amon + preprocessor: climatological_mean + tas_CLIM_future: + <<: *target_settings + start_year: 2081 + end_year: 2100 + + scripts: + climwip: + script: weighting/climwip/main.py + obs_data: native6 + ancestors: [calculate_local_anomalies/local_anomalies, + tas_CLIM_i, psl_CLIM_i, tas_STD, psl_STD, tas_TREND, + tas_CLIM_reference, tas_CLIM_future] + combine_ensemble_members: true + performance_sigma: 0.43 + # if performance_sigma is not set this needs to be commented in + # calibrate_performance_sigma: + # target_ref: tas_CLIM_reference + # target: tas_CLIM_future + performance_contributions: + tas_ANOM: 1 + tas_STD: 1 + psl_ANOM: 1 + psl_STD: 1 + tas_TREND: 4 + independence_sigma: 0.54 + independence_contributions: + tas_CLIM_i: 1 + psl_CLIM_i: 1 + + weighted_temperature_graph: + variables: + tas: + start_year: 1960 + end_year: 2100 + mip: Amon + preprocessor: temperature_anomalies + scripts: + weighted_temperature_graph: + script: weighting/weighted_temperature_graph.py + ancestors: [calculate_weights_climwip/climwip, tas] + weights: 'weights.nc' + settings: + <<: *ref_period + central_estimate: mean + lower_bound: 16.7 + upper_bound: 83.3 + + weighted_temperature_map: + scripts: + weighted_temperature_map: + script: weighting/weighted_temperature_map.py + ancestors: [calculate_weights_climwip/climwip, + calculate_weights_climwip/tas_CLIM_future, + calculate_weights_climwip/tas_CLIM_reference] + weights: 'weights.nc' + # optional arguments + model_aggregation: mean # [ mean (default) | median | integer in (0, 100) ] + xticks: [-150, -100, -50, 0, 50, 100, 150] # if not given ticks will be set automatically + yticks: [-90, -45, 0, 45, 90] diff --git a/esmvaltool/recipes/recipe_climwip_test_basic.yml b/esmvaltool/recipes/recipe_climwip_test_basic.yml new file mode 100644 index 0000000000..8a916d3493 --- /dev/null +++ b/esmvaltool/recipes/recipe_climwip_test_basic.yml @@ -0,0 +1,144 @@ +# ESMValTool +# recipe_climwip.yml +--- +documentation: + title: Basic test recipe for ClimWIP weighting method + description: EUCP ClimWIP + + authors: + - kalverla_peter + - smeets_stef + - brunner_lukas + - camphuijsen_jaro + - lorenz_ruth + + maintainer: + - kalverla_peter + - smeets_stef + - brunner_lukas + - lorenz_ruth + + references: + - brunner2019 + - lorenz2018 + - knutti2017 + + projects: + - eucp + +datasets: &model_data # a minimal selection to demonstrate functionality + - {dataset: ACCESS1-0, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + - {dataset: ACCESS1-3, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + - {dataset: CCSM4, project: CMIP5, exp: [historical, rcp85], ensemble: "r(1:4)i1p1"} + - {dataset: BNU-ESM, project: CMIP5, exp: [historical, rcp85], ensemble: r1i1p1} + +obs_data: &obs_data # for climwip performance metrics + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + +preprocessors: + climwip_general: &general + regrid: + target_grid: 2.5x2.5 + scheme: linear + mask_landsea: + mask_out: sea + extract_region: + start_longitude: -10.0 + end_longitude: 39.0 + start_latitude: 30.0 + end_latitude: 76.25 + + climatological_mean: + <<: *general + climate_statistics: + operator: mean + + temperature_anomalies: + custom_order: true + area_statistics: + operator: mean + annual_statistics: + operator: mean + anomalies: + period: full + reference: &ref_period + start_year: 1981 + start_month: 1 + start_day: 1 + end_year: 2010 + end_month: 12 + end_day: 31 + standardize: false + +diagnostics: + calculate_weights_climwip: + variables: + tas_CLIM: &common_settings + short_name: tas + start_year: 1995 + end_year: 2014 + mip: Amon + preprocessor: climatological_mean + additional_datasets: *obs_data + pr_CLIM: + <<: *common_settings + short_name: pr + psl_CLIM: + <<: *common_settings + short_name: psl + + scripts: + climwip: + script: weighting/climwip/main.py + obs_data: native6 + combine_ensemble_members: true + performance_sigma: 0.5 + performance_contributions: + tas_CLIM: 1 + pr_CLIM: 2 + psl_CLIM: 1 + independence_sigma: 0.5 + independence_contributions: + tas_CLIM: .5 + pr_CLIM: .25 + psl_CLIM: 0 # equivalent to not setting it + + weighted_temperature_graph: + variables: + tas: + start_year: 1960 + end_year: 2100 + mip: Amon + preprocessor: temperature_anomalies + scripts: + weighted_temperature_graph: + script: weighting/weighted_temperature_graph.py + ancestors: [calculate_weights_climwip/climwip, tas] + weights: 'weights.nc' + settings: + <<: *ref_period + central_estimate: mean + lower_bound: 16.7 + upper_bound: 83.3 + + weighted_temperature_map: + variables: + tas_CLIM_future: &map_settings + short_name: tas + start_year: 2081 + end_year: 2100 + mip: Amon + preprocessor: climatological_mean + tas_CLIM_reference: + <<: *map_settings + start_year: 1995 + end_year: 2014 + scripts: + weighted_temperature_map: + script: weighting/weighted_temperature_map.py + ancestors: [calculate_weights_climwip/climwip, tas_CLIM_future, tas_CLIM_reference] + weights: 'weights.nc' + # optional arguments + model_aggregation: mean # [ mean (default) | median | integer in (0, 100) ] + xticks: [-10, 0, 10, 20, 30, 40] # if not given ticks will be set automatically + yticks: [30, 40, 50, 60, 70, 80] diff --git a/esmvaltool/recipes/recipe_climwip_test_performance_sigma.yml b/esmvaltool/recipes/recipe_climwip_test_performance_sigma.yml new file mode 100644 index 0000000000..efdb8701ac --- /dev/null +++ b/esmvaltool/recipes/recipe_climwip_test_performance_sigma.yml @@ -0,0 +1,199 @@ +# ESMValTool +# recipe_climwip.yml +--- +documentation: + title: Recipe to test ClimWIP performance sigma calibration + description: > + Calculate weights for CMIP type models based on performance and independence. + Developed based on the ClimWIP method in the frame of EUCP and CRESCENDO. + themes: + - phys + realms: + - atmos + authors: + - kalverla_peter + - smeets_stef + - brunner_lukas + - camphuijsen_jaro + - lorenz_ruth + maintainer: + - kalverla_peter + - smeets_stef + - brunner_lukas + - lorenz_ruth + references: + - brunner2019 + - lorenz2018 + - knutti2017 + projects: + - eucp + +datasets: &model_data # a selection of model to demonstrate functionality of the perfect model test + - {project: CMIP6, exp: [historical, ssp585], dataset: ACCESS-CM2, ensemble: r(1:2)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: ACCESS-ESM1-5, ensemble: r(1:2)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CESM2-WACCM, ensemble: r(1:2)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CIESM, ensemble: r1i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: CNRM-CM6-1, ensemble: r(1:2)i1p1f2, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: CNRM-ESM2-1, ensemble: r(1:2)i1p1f2, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: CanESM5-CanOE, ensemble: r(1:2)i1p2f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CanESM5, ensemble: r(1:2)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3-Veg, ensemble: r(1:4)i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: FGOALS-g3, ensemble: r(1:2)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: FIO-ESM-2-0, ensemble: r(1:2)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + - {project: CMIP6, exp: [historical, ssp585], dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + - {project: CMIP6, exp: [historical, ssp585], dataset: GISS-E2-1-G, ensemble: r(1:2)i1p3f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: HadGEM3-GC31-MM, ensemble: r(1:2)i1p1f3, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {project: CMIP6, exp: [historical, ssp585], dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {project: CMIP6, exp: [historical, ssp585], dataset: IPSL-CM6A-LR, ensemble: r(1:2)i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: KACE-1-0-G, ensemble: r(1:2)i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: MCM-UA-1-0, ensemble: r1i1p1f2, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: MIROC6, ensemble: r(1:2)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: MPI-ESM1-2-LR, ensemble: r(1:2)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: NESM3, ensemble: r(1:2)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: UKESM1-0-LL, ensemble: r(1:2)i1p1f2, grid: gn} + +obs_data: &obs_data # for climwip performance metrics + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + +preprocessors: + climwip_general: &general + regrid: + target_grid: 2.5x2.5 + scheme: linear + mask_landsea: + mask_out: sea + extract_region: + start_longitude: 0.0 + end_longitude: 60.0 + start_latitude: 30.0 + end_latitude: 80.0 + + climatological_mean: + <<: *general + climate_statistics: + operator: mean + + annual_trend: + <<: *general + annual_statistics: + operator: mean + linear_trend: + coordinate: time + convert_units: + units: "K year-1" + + temperature_anomalies: + custom_order: true + area_statistics: + operator: mean + annual_statistics: + operator: mean + anomalies: + period: full + reference: &ref_period + start_year: 1981 + start_month: 1 + start_day: 1 + end_year: 2010 + end_month: 12 + end_day: 31 + standardize: false + +diagnostics: + calculate_weights_climwip: + variables: + tas_CLIM_reference: + short_name: tas + start_year: 1995 + end_year: 2014 + mip: Amon + preprocessor: climatological_mean + tas_CLIM_future: + short_name: tas + start_year: 2081 + end_year: 2100 + mip: Amon + preprocessor: climatological_mean + tas_CLIM: &common_settings + short_name: tas + start_year: 1995 + end_year: 2014 + mip: Amon + preprocessor: climatological_mean + additional_datasets: *obs_data + pr_CLIM: + <<: *common_settings + short_name: pr + tas_TREND: + <<: *common_settings + preprocessor: annual_trend + + scripts: + climwip: + script: weighting/climwip/main.py + obs_data: native6 + combine_ensemble_members: true + calibrate_performance_sigma: + target: tas_CLIM_future + target_ref: tas_CLIM_reference + performance_contributions: + tas_CLIM: 1 + pr_CLIM: 1 + tas_TREND: 1 + + weighted_temperature_graph: + description: > + Plot timeseries of all included models including interquartile + range for weighted and unweighted case. + themes: + - phys + realms: + - atmos + + variables: + tas: + start_year: 1960 + end_year: 2100 + mip: Amon + preprocessor: temperature_anomalies + scripts: + weighted_temperature_graph: + script: weighting/weighted_temperature_graph.py + ancestors: [calculate_weights_climwip/climwip, tas] + weights: 'weights.nc' + settings: + <<: *ref_period + central_estimate: mean + lower_bound: 16.7 + upper_bound: 83.3 + + weighted_temperature_map: + description: > + Plot map of weighted multi-model mean and difference between weighted + and unweighted multi-model mean. + themes: + - phys + realms: + - atmos + + scripts: + weighted_temperature_map: + script: weighting/weighted_temperature_map.py + ancestors: [calculate_weights_climwip/climwip, + calculate_weights_climwip/tas_CLIM_future, + calculate_weights_climwip/tas_CLIM_reference] + weights: 'weights.nc' + # optional arguments + model_aggregation: mean # [ mean (default) | median | integer in (0, 100) ] + xticks: [0, 15, 30, 45, 60] # if not given ticks will be set automatically + yticks: [30, 40, 50, 60, 70, 80] diff --git a/esmvaltool/recipes/recipe_cmug_h2o.yml b/esmvaltool/recipes/recipe_cmug_h2o.yml new file mode 100644 index 0000000000..fbd70e7036 --- /dev/null +++ b/esmvaltool/recipes/recipe_cmug_h2o.yml @@ -0,0 +1,228 @@ +# recipe_ +--- +documentation: + title: Diagnositcs for ESACCI data, which + Evaluate water vapor short wave radiance absorption schemes, + water vapour profiles, + and climatologies at the tropopause + description: | + Recipe for ESACCI water vapour data + + authors: + - weigel_katja + + maintainer: + - weigel_katja + + references: + - acknow_author + - deangelis15nat + + projects: + - cmug + +preprocessors: + pptrop: + regrid: + target_grid: reference_dataset + scheme: linear + spatial_mean: + area_statistics: + operator: mean + tropical_ocean: + mask_landsea: + mask_out: land + regrid: + target_grid: 2.5x2.5 + scheme: linear + extract_region: + start_latitude: -30 + end_latitude: 30 + start_longitude: 0 + end_longitude: 360 + + +diagnostics: + deangelisf3f4: + description: Diagnostic for reproducing figure 3 and 4 from + DeAngelis et al., 2015 Nature. + In this version HOABS data are used as a replacement for + expected ESACCI water vapour data for ESA-CMUG, CDR-2. + Gridded monthly time series of TCWV in units of kg/m2 + (corresponds to prw) + that cover the global land and ocean areas with + a spatial resolution of 0.05° / 0.5° for + the period July 2002 to December 2017. + variables: + rsnstcs: &spatial_mean3_cmip5_r1i1p1_amon_t2ms + preprocessor: spatial_mean + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + derive: true + force_derivation: false + rsnstcsnorm: &tropical_cmip5_r1i1p1_amon_t2ms + preprocessor: tropical_ocean + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + derive: true + force_derivation: false + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, type: satellite, + level: L3B, version: Ed2-8, start_year: 2003, + end_year: 2014, tier: 1} + tas: + <<: *spatial_mean3_cmip5_r1i1p1_amon_t2ms + derive: false + prw: + <<: *tropical_cmip5_r1i1p1_amon_t2ms + derive: false + additional_datasets: + # - {dataset: ESACCI-WV, project: OBS, type: sat, + # version: HOAPS-v4.0, start_year: 2001, end_year: 2012, tier: 2} + - {dataset: ESACCI-WATERVAPOUR, project: OBS6, type: sat, + version: CDR2-L3-COMBI-05deg-fv3.1, start_year: 2003, end_year: 2014, tier: 3} + additional_datasets: + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} + - {dataset: ACCESS1-0, exp: abrupt4xCO2, start_year: 300, end_year: 449} + - {dataset: ACCESS1-3, exp: piControl, start_year: 250, end_year: 399} + - {dataset: ACCESS1-3, exp: abrupt4xCO2, start_year: 250, end_year: 399} + - {dataset: CanESM2, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: CanESM2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: GFDL-ESM2G, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2G, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GISS-E2-R, exp: piControl, start_year: 3331, end_year: 3480} + - {dataset: GISS-E2-R, exp: abrupt4xCO2, start_year: 1850, + end_year: 1999} + - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} + - {dataset: inmcm4, exp: abrupt4xCO2, start_year: 2090, end_year: 2239} + # IPSL-CM5A-MR only 140 years available + - {dataset: IPSL-CM5A-MR, exp: piControl, start_year: 1850, + end_year: 1989} + - {dataset: IPSL-CM5A-MR, exp: abrupt4xCO2, start_year: 1850, + end_year: 1989} + - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, + end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: abrupt4xCO2, start_year: 1850, + end_year: 1999} + - {dataset: MIROC-ESM, exp: piControl, start_year: 1800, end_year: 1949} + - {dataset: MIROC-ESM, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} + - {dataset: MIROC5, exp: abrupt4xCO2, start_year: 2100, end_year: 2249} + - {dataset: MPI-ESM-LR, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: MPI-ESM-LR, exp: abrupt4xCO2, start_year: 1850, + end_year: 1999} + - {dataset: MPI-ESM-MR, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: MPI-ESM-MR, exp: abrupt4xCO2, start_year: 1850, + end_year: 1999} + - {dataset: MRI-CGCM3, exp: piControl, start_year: 1851, end_year: 2000} + - {dataset: MRI-CGCM3, exp: abrupt4xCO2, start_year: 1851, + end_year: 2000} + - {dataset: NorESM1-M, exp: piControl, start_year: 700, end_year: 849} + - {dataset: NorESM1-M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + + scripts: + deangelisf3f4: + script: deangelis15nat/deangelisf3f4.py + + tropo: + description: Plot given variable at tropopause height, + here Specific Humidity (hus) is used. + This will be calculated from the + ESACCI water vapour data CDR-4, which are planed to consist of + three-dimensional vertically resolved + monthly mean water vapour data (in ppmv) with + spatial resolution of 100 km, + covering the troposphere and lower stratosphere. + The envisaged coverage is 2010-2014. The calculation of hus + from water vapour in ppmv will be part of the cmorizer. + Here, ERA-Interim data are used. + variables: + hus: + preprocessor: pptrop + reference_dataset: MIROC6 + mip: Amon + field: T3M + start_year: 2010 + end_year: 2014 + ta: + preprocessor: pptrop + reference_dataset: MIROC6 + mip: Amon + field: T3M + start_year: 2010 + end_year: 2014 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, + tier: 3} + - {dataset: MIROC6, institute: MIROC, + project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: IPSL-CM6A-LR, institute: IPSL, + project: CMIP6, exp: historical, ensemble: r1i1p1f1, + grid: gr} + - {dataset: BCC-CSM2-MR, institute: BCC, project: CMIP6, + exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, institute: MRI, project: CMIP6, + exp: historical, ensemble: r1i1p1f1, grid: gn} + + + scripts: + hus_cmug: + script: cmug_h2o/diag_tropopause.py + + + tropozonal: + description: Plot zonal mean for given variable at for + all pressure levels between 250 and 1hPa and + at tropopause height. + Here Specific Humidity (hus) is used. + This will be calculated from the + ESACCI water vapour data CDR-3, which are planed to contain + the vertically resolved water vapour ECV + in units of ppmv (volume mixing ratio) and will be provided as + zonal monthly means on the SPARC Data Initiative + latitude/pressure level grid + (SPARC, 2017; Hegglin et al., 2013). + It covers the vertical range between 250 hPa and 1 hPa, + and the time period 1985 to the end of 2019. + The calculation of hus from water vapour in ppmv will be + part of the cmorizer. Here, ERA-Interim data are used. + variables: + hus: + preprocessor: pptrop + reference_dataset: MIROC6 + mip: Amon + field: T3M + start_year: 1985 + end_year: 2014 + ta: + preprocessor: pptrop + reference_dataset: MIROC6 + mip: Amon + field: T3M + start_year: 1985 + end_year: 2014 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, + tier: 3} + - {dataset: MIROC6, institute: MIROC, + project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: IPSL-CM6A-LR, institute: IPSL, + project: CMIP6, exp: historical, ensemble: r1i1p1f1, + grid: gr} + - {dataset: BCC-CSM2-MR, institute: BCC, project: CMIP6, + exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, institute: MRI, project: CMIP6, + exp: historical, ensemble: r1i1p1f1, grid: gn} + + + scripts: + hus_cmug: + script: cmug_h2o/diag_tropopause_zonalmean.py diff --git a/esmvaltool/recipes/recipe_collins13ipcc.yml b/esmvaltool/recipes/recipe_collins13ipcc.yml new file mode 100644 index 0000000000..1ea491d5f8 --- /dev/null +++ b/esmvaltool/recipes/recipe_collins13ipcc.yml @@ -0,0 +1,3196 @@ +# ESMValTool +# recipe_collins13ipcc.yml +--- +documentation: + title: Long-term Climate Change, selected figures + description: | + Recipe reproducing selected figures from IPCC AR5, + chap. 12 (Collins et al., 2013) mainly difference maps between + future and present (no observations included), timeseries, + changes scaled by mean global temperature change. + Only a subset of models are included for most examples! + Diagnostic IAV_calc_thetao (zonal ocean temperature, variable thetao) needs + a lot of memory, make sure you have enough memory available to read + piControl runs or exclude! + + authors: + - lorenz_ruth + - senftleben_daniel + + maintainer: + - lorenz_ruth + + references: + - collins13ipcc + + projects: + - crescendo + - esmval + +preprocessors: + preproc_map: + regrid: + target_grid: 1x1 + scheme: linear + + preproc_map_land: + mask_landsea: + mask_out: sea + mask_landseaice: + mask_out: ice + regrid: + target_grid: 1x1 + scheme: linear + + preproc_atm: + regrid: + target_grid: 1x1 + scheme: linear + extract_levels: + levels: reference_dataset + scheme: linear + + preproc_zonal_atm: + regrid: + target_grid: 1x1 + scheme: linear + extract_levels: + levels: reference_dataset + scheme: linear + zonal_statistics: + operator: mean + + preproc_ocean: + regrid: + target_grid: 1x1 + scheme: linear + extract_levels: + levels: [5., 15., 25., 35., 45., 56., 66., 77., 88., 100, 112., 125., + 140., 155., 170., 190., 215., 245., 275., 315., 370., 430., + 510., 610., 725., 870., 1040., 1240., 1450., 1725., 2010., + 2315., 2640., 2985., 3340., 3705., 4075., 4460., 4845., 5200.] + scheme: linear_extrapolate + + preproc_zonal_ocean: + regrid: + target_grid: 1x1 + scheme: linear + extract_levels: + levels: [5., 15., 25., 35., 45., 56., 66., 77., 88., 100, 112., 125., + 140., 155., 170., 190., 215., 245., 275., 315., 370., 430., + 510., 610., 725., 870., 1040., 1240., 1450., 1725., 2010., + 2315., 2640., 2985., 3340., 3705., 4075., 4460., 4845., 5200.] + scheme: linear_extrapolate + zonal_statistics: + operator: mean + +diagnostics: + + ### Mean variable change in one RCP for individual models ################## + tas_change_all_models: + description: Air temperature change for RCP45 for all models as maps. + Needs historical and scenario run for each model to be plotted. + themes: + - phys + realms: + - atmos + variables: + tas: + mip: Amon + project: CMIP5 + ensemble: r1i1p1 + exp: historical + additional_datasets: + - {dataset: CanESM2, start_year: 1986, end_year: 2005} + - {dataset: CCSM4, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, start_year: 1986, end_year: 2005} + + - {dataset: CanESM2, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: CCSM4, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2081, end_year: 2100} + + scripts: + ch12_map_diff_each_model_fig12-9: &settings + script: ipcc_ar5/ch12_map_diff_each_model_fig12-9.ncl + + ### Required attributes + # Time average ('opt' argument of time_operations.ncl) + time_avg: annualclim + # IPCC Scenario, used to pair historical and rcp runs from same model + experiment: rcp45 + + ### Optional attributes + # Map projection (any valid ncl projection, default Robinson) + # projection: Robinson + # Maximum number of plots in vertical + max_vert: 9 + # Maximum number of plots in horizontal + max_hori: 5 + # Plot title + title: "Annual mean surface air temperature change (RCP4.5: 2081-2100)" + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_temperature_with_grey.rgb + # Contour levels for all difference plots + diff_levs: [-2., -1.5, -1., -0.5, 0, 0.5, 1, 1.5, 2, 3, 4, 5, 7, 9, 11] + # Span whole colormap (default false) + span: false + # Unit for colorbar, only affects label + units: (~F35~J~F~C) + # Plot units, triggers conversion into this unit + # plot_units: + + ### Time series plots ####################################################### + ts_line_tas: + description: Timeseries of relative change of temperature in + multiple scenarios incl. spread + themes: + - phys + realms: + - atmos + variables: + tas: + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + exp: historical + additional_datasets: + - {dataset: bcc-csm1-1-m, start_year: 1850, end_year: 2005} + - {dataset: BNU-ESM, start_year: 1850, end_year: 2005} + - {dataset: CESM1-BGC, start_year: 1850, end_year: 2005} + - {dataset: CESM1-CAM5, start_year: 1850, end_year: 2005} + #- {dataset: CESM1-CAM5-1-FV2, start_year: 1850, end_year: 2005} # data is missing on ESGF + - {dataset: CESM1-FASTCHEM, start_year: 1850, end_year: 2005} + - {dataset: CESM1-WACCM, start_year: 1955, end_year: 2005, + ensemble: r2i1p1} + - {dataset: CMCC-CESM, start_year: 1850, end_year: 2005} + - {dataset: CMCC-CMS, start_year: 1850, end_year: 2005} + - {dataset: CNRM-CM5, start_year: 1850, end_year: 2005} + - {dataset: CNRM-CM5-2, start_year: 1850, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, start_year: 1850, end_year: 2005} + - {dataset: CSIRO-Mk3L-1-2, start_year: 1851, end_year: 2005, + ensemble: r1i2p1} + - {dataset: EC-EARTH, start_year: 1850, end_year: 2005, ensemble: r8i1p1} + - {dataset: FGOALS-s2, start_year: 1850, end_year: 2005, ensemble: r2i1p1} + - {dataset: FGOALS-g2, start_year: 1850, end_year: 2005} + - {dataset: FIO-ESM, start_year: 1850, end_year: 2005} + - {dataset: GFDL-CM2p1, start_year: 1861, end_year: 2005, + ensemble: r2i1p1} + - {dataset: GFDL-CM3, start_year: 1860, end_year: 2005} + - {dataset: GFDL-ESM2G, start_year: 1861, end_year: 2005} + - {dataset: GFDL-ESM2M, start_year: 1861, end_year: 2005} + - {dataset: GISS-E2-H, start_year: 1850, end_year: 2005} + - {dataset: GISS-E2-H, start_year: 1850, end_year: 2005, ensemble: r1i1p2} + - {dataset: GISS-E2-H, start_year: 1850, end_year: 2005, ensemble: r1i1p3} + - {dataset: GISS-E2-H-CC, start_year: 1850, end_year: 2005} + - {dataset: GISS-E2-R, start_year: 1850, end_year: 2005} + - {dataset: GISS-E2-R, start_year: 1850, end_year: 2005, ensemble: r1i1p2} + - {dataset: GISS-E2-R, start_year: 1850, end_year: 2005, ensemble: r1i1p3} + - {dataset: GISS-E2-R-CC, start_year: 1850, end_year: 2005} + - {dataset: HadGEM2-AO, start_year: 1860, end_year: 2005} + - {dataset: IPSL-CM5A-LR, start_year: 1850, end_year: 2005} + - {dataset: IPSL-CM5A-MR, start_year: 1850, end_year: 2005} + - {dataset: MIROC4h, start_year: 1950, end_year: 2005} + - {dataset: MIROC5, start_year: 1850, end_year: 2005} + - {dataset: MIROC-ESM, start_year: 1850, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, start_year: 1850, end_year: 2005} + - {dataset: MPI-ESM-LR, start_year: 1850, end_year: 2005} + - {dataset: MPI-ESM-MR, start_year: 1850, end_year: 2005} + - {dataset: MPI-ESM-P, start_year: 1850, end_year: 2005} + - {dataset: MRI-CGCM3, start_year: 1850, end_year: 2005} + - {dataset: MRI-ESM1, start_year: 1851, end_year: 2005} + - {dataset: NorESM1-M, start_year: 1850, end_year: 2005} + + - {dataset: bcc-csm1-1-m, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: BNU-ESM, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: CESM1-WACCM, exp: rcp26, start_year: 2006, end_year: 2099, + ensemble: r2i1p1} + - {dataset: CNRM-CM5, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: EC-EARTH, exp: rcp26, start_year: 2006, end_year: 2100, + ensemble: r8i1p1} + - {dataset: FGOALS-g2, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: FIO-ESM, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: GFDL-ESM2M, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: GISS-E2-H, exp: rcp26, start_year: 2006, end_year: 2300} + - {dataset: GISS-E2-H, exp: rcp26, start_year: 2006, end_year: 2300, + ensemble: r1i1p2} + - {dataset: GISS-E2-H, exp: rcp26, start_year: 2006, end_year: 2300, + ensemble: r1i1p3} + - {dataset: GISS-E2-R, exp: rcp26, start_year: 2006, end_year: 2300} + - {dataset: GISS-E2-R, exp: rcp26, start_year: 2006, end_year: 2300, + ensemble: r1i1p2} + - {dataset: GISS-E2-R, exp: rcp26, start_year: 2006, end_year: 2300, + ensemble: r1i1p3} + - {dataset: HadGEM2-AO, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp26, start_year: 2006, end_year: 2300} + - {dataset: IPSL-CM5A-MR, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: MIROC5, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: MIROC-ESM, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: MIROC-ESM-CHEM, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-MR, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: MRI-CGCM3, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: NorESM1-M, exp: rcp26, start_year: 2006, end_year: 2100} + + - {dataset: bcc-csm1-1-m, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: BNU-ESM, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: CESM1-BGC, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: CESM1-WACCM, exp: rcp45, start_year: 2006, end_year: 2099, + ensemble: r2i1p1} + - {dataset: CMCC-CMS, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3L-1-2, exp: rcp45, start_year: 2006, end_year: 2300, + ensemble: r1i2p1} + - {dataset: EC-EARTH, exp: rcp45, start_year: 2006, end_year: 2100, + ensemble: r8i1p1} + - {dataset: FGOALS-g2, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: FIO-ESM, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp45, start_year: 2006, end_year: 2300} + - {dataset: GFDL-ESM2G, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: GFDL-ESM2M, exp: rcp45, start_year: 2006, end_year: 2200} + - {dataset: GISS-E2-H, exp: rcp45, start_year: 2006, end_year: 2200} + - {dataset: GISS-E2-H, exp: rcp45, start_year: 2006, end_year: 2300, + ensemble: r1i1p2} + - {dataset: GISS-E2-H, exp: rcp45, start_year: 2006, end_year: 2300, + ensemble: r1i1p3} + - {dataset: GISS-E2-H-CC, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: GISS-E2-R, exp: rcp45, start_year: 2006, end_year: 2300} + - {dataset: GISS-E2-R, exp: rcp45, start_year: 2006, end_year: 2300, + ensemble: r1i1p2} + - {dataset: GISS-E2-R, exp: rcp45, start_year: 2006, end_year: 2300, + ensemble: r1i1p3} + - {dataset: GISS-E2-R-CC, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: HadGEM2-AO, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: HadGEM2-CC, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp45, start_year: 2006, end_year: 2300} + - {dataset: IPSL-CM5A-MR, exp: rcp45, start_year: 2006, end_year: 2300} + - {dataset: MIROC5, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: MIROC-ESM, exp: rcp45, start_year: 2006, end_year: 2300} + - {dataset: MIROC-ESM-CHEM, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2006, end_year: 2300} + - {dataset: MPI-ESM-MR, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: MRI-CGCM3, exp: rcp45, start_year: 2006, end_year: 2100} + + - {dataset: bcc-csm1-1-m, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: FIO-ESM, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: GFDL-ESM2M, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: GISS-E2-H, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: GISS-E2-H, exp: rcp60, start_year: 2006, end_year: 2100, + ensemble: r1i1p2} + - {dataset: GISS-E2-H, exp: rcp60, start_year: 2006, end_year: 2100, + ensemble: r1i1p3} + - {dataset: GISS-E2-R, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: GISS-E2-R, exp: rcp60, start_year: 2006, end_year: 2100, + ensemble: r1i1p2} + - {dataset: GISS-E2-R, exp: rcp60, start_year: 2006, end_year: 2100, + ensemble: r1i1p3} + - {dataset: HadGEM2-AO, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-MR, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: MIROC5, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: MIROC-ESM, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: MIROC-ESM-CHEM, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: MRI-CGCM3, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: NorESM1-M, exp: rcp60, start_year: 2006, end_year: 2100} + + - {dataset: bcc-csm1-1-m, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: BNU-ESM, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CESM1-BGC, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CESM1-WACCM, exp: rcp85, start_year: 2006, end_year: 2099, + ensemble: r2i1p1} + - {dataset: CMCC-CMS, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp85, start_year: 2006, end_year: 2300} + - {dataset: EC-EARTH, exp: rcp85, start_year: 2006, end_year: 2100, + ensemble: r8i1p1} + - {dataset: FGOALS-g2, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: FIO-ESM, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: GFDL-ESM2M, exp: rcp85, start_year: 2006, end_year: 2200} + - {dataset: GISS-E2-H, exp: rcp85, start_year: 2006, end_year: 2300} + - {dataset: GISS-E2-H, exp: rcp85, start_year: 2006, end_year: 2300, + ensemble: r1i1p2} + - {dataset: GISS-E2-H, exp: rcp85, start_year: 2006, end_year: 2300, + ensemble: r1i1p3} + - {dataset: GISS-E2-H-CC, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: GISS-E2-R, exp: rcp85, start_year: 2006, end_year: 2300} + - {dataset: GISS-E2-R, exp: rcp85, start_year: 2006, end_year: 2300, + ensemble: r1i1p2} + - {dataset: GISS-E2-R, exp: rcp85, start_year: 2006, end_year: 2300, + ensemble: r1i1p3} + - {dataset: GISS-E2-R-CC, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: HadGEM2-AO, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: HadGEM2-CC, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp85, start_year: 2006, end_year: 2300} + - {dataset: IPSL-CM5A-MR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MIROC5, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MIROC-ESM, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MIROC-ESM-CHEM, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2006, end_year: 2300} + - {dataset: MPI-ESM-MR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MRI-CGCM3, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MRI-ESM1, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: NorESM1-M, exp: rcp85, start_year: 2006, end_year: 2100} + + scripts: + ch12_ts_line_mean_spread_tas: &ts_settings + script: ipcc_ar5/ch12_ts_line_mean_spread.ncl + + ### Required attributes + # Plot style + # styleset: CMIP5 + # List of scenarios to be included + scenarios: [rcp26, rcp45, rcp60, rcp85] + # Start years in time periods (e.g. start of historical runs and rcps) + syears: [1850, 2006, 2101] + # End years in time periods (e.g. end of historical runs and rcps) + eyears: [2005, 2100, 2300] + # Start year of reference period (e.g. 1986) + begin_ref_year: 1986 + # End year of reference period (e.g 2005) + end_ref_year: 2005 + # Labels to use in legend depending on scenarios + label: [Historical, RCP2.6, RCP4.5, RCP6.0, RCP8.5] + + ### Optional attributes + # Plot title + title: "Global surface temperature change (~S~o~N~ C)" + # Standard deviations to calculate the spread with + # (default 1., ipcc tas used 1.64) + spread: 1.64 + # y-axis title + yaxis: "(~S~o~N~ C)" + # Minimim value on y-axis + ymin: -2 + # Maximum value on y-axis + ymax: 12 + # Save number of model runs per period and scenario in netcdf + # to print in plot + model_nr: true + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_tseries.rgb + # Minimum and maximum latitudes and longitudes if not global + # ts_minlat: 43 + # ts_maxlat: 54.5 + # ts_minlon: -12 + # ts_maxlon: 35 + + ch12_plot_ts_line_mean_spread_tas: + <<: *ts_settings + script: ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl + + ### Required attributes + ancestors: ['tas', 'ch12_ts_line_mean_spread_tas'] + + + ts_line_rsut: + description: Timeseries of relative change in TOA short wave radiation in + multiple scenarios incl. spread + themes: + - phys + realms: + - atmos + variables: + rsut: + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + exp: historical + additional_datasets: + - {dataset: CanESM2, start_year: 1850, end_year: 2005} + - {dataset: CCSM4, start_year: 1850, end_year: 2005} + - {dataset: CESM1-CAM5, start_year: 1850, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, start_year: 1850, end_year: 2005} + - {dataset: GFDL-ESM2G, start_year: 1861, end_year: 2005} + - {dataset: IPSL-CM5A-LR, start_year: 1850, end_year: 2005} + - {dataset: MPI-ESM-LR, start_year: 1850, end_year: 2005} + + - {dataset: CanESM2, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: CCSM4, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp26, start_year: 2006, end_year: 2100} + + - {dataset: CanESM2, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: CCSM4, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2006, end_year: 2100} + + - {dataset: CCSM4, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp60, start_year: 2006, end_year: 2100} + + - {dataset: CanESM2, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CCSM4, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2006, end_year: 2100} + + scripts: + ch12_ts_line_mean_spread_rsut: &ts_settings_rsut + script: ipcc_ar5/ch12_ts_line_mean_spread.ncl + + ### Required attributes + # Plot style + # styleset: CMIP5 + # List of scenarios to be included + scenarios: [rcp26, rcp45, rcp60, rcp85] + # Start years in time periods (e.g. start of historical runs and rcps) + syears: [1850, 2006] + # End years in time periods (e.g. end of historical runs and rcps) + eyears: [2005, 2100] + # Start year of reference period + begin_ref_year: 1900 + # End year of reference period + end_ref_year: 1950 + # Labels to use in legend depending on scenarios + label: [Historical, RCP2.6, RCP4.5, RCP6.0, RCP8.5] + + ### Optional attributes + # Plot title + title: "Global TOA short wave radiation change" + # Standard deviations to calculate the spread with + # (default 1., ipcc tas used 1.64) + spread: 1.0 + # y-axis title + yaxis: "(Wm~S~-2~N~)" + # Minimim value on y-axis + ymin: -3 + # Maximum value on y-axis + ymax: 6 + # Save number of model runs per period and scenario in netcdf + # to print in plot + model_nr: true + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_tseries.rgb + # Minimum and maximum latitude and longitude if not global + # ts_minlat: 43 + # ts_maxlat: 54.5 + # ts_minlon: -12 + # ts_maxlon: 35 + + ch12_plot_ts_line_mean_spread_rsut: + <<: *ts_settings_rsut + script: ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl + + ### Required attributes + ancestors: ['rsut', 'ch12_ts_line_mean_spread_rsut'] + + + ts_line_rlut: + description: Timeseries of relative change in TOA long wave radiation in + multiple scenarios incl. spread + themes: + - phys + realms: + - atmos + variables: + rlut: + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + additional_datasets: + - {dataset: CanESM2, start_year: 1850, end_year: 2005} + - {dataset: CCSM4, start_year: 1850, end_year: 2005} + - {dataset: CESM1-CAM5, start_year: 1850, end_year: 2005} + - {dataset: GFDL-ESM2G, start_year: 1861, end_year: 2005} + - {dataset: IPSL-CM5A-LR, start_year: 1850, end_year: 2005} + - {dataset: MPI-ESM-LR, start_year: 1850, end_year: 2005} + + - {dataset: CanESM2, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: CCSM4, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp26, start_year: 2006, end_year: 2100} + + - {dataset: CanESM2, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: CCSM4, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2006, end_year: 2100} + + - {dataset: CCSM4, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp60, start_year: 2006, end_year: 2100} + + - {dataset: CanESM2, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CCSM4, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2006, end_year: 2100} + + scripts: + ch12_ts_line_mean_spread_rlut: &ts_settings_rlut + script: ipcc_ar5/ch12_ts_line_mean_spread.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # List of scenarios to be included + scenarios: [rcp26, rcp45, rcp60, rcp85] + # Start years in time periods (e.g. start of historical runs and rcps) + syears: [1850, 2006] + # End years in time periods (e.g. end of historical runs and rcps) + eyears: [2005, 2100] + # Start year of reference period + begin_ref_year: 1900 + # End year of reference period + end_ref_year: 1950 + # Labels to use in legend depending on scenarios + label: [Historical, RCP2.6, RCP4.5, RCP6.0, RCP8.5] + + ### Optional attributes + # Plot title + title: "Global TOA long wave radiation change" + # Standard deviations to calculate the spread with + # (default 1., ipcc tas used 1.64) + spread: 1.0 + # y-axis title + yaxis: "(Wm~S~-2~N~)" + # Minimim value on y-axis + ymin: -3 + # Maximum value on y-axis + ymax: 4 + # Save number of model runs per period and scenario + # in netcdf to print in plot + model_nr: true + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_tseries.rgb + # Minimum and maximum latitude and longitude if not global + # ts_minlat: 43 + # ts_maxlat: 54.5 + # ts_minlon: -12 + # ts_maxlon: 35 + + ch12_plot_ts_line_mean_spread_rlut: + <<: *ts_settings_rlut + script: ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl + + ### Required attributes + ancestors: ['rlut', 'ch12_ts_line_mean_spread_rlut'] + + + ts_line_rtmt: + description: Timeseries of relative change in TOA net radiation in + multiple scenarios incl. spread + themes: + - phys + realms: + - atmos + variables: + rtmt: + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + additional_datasets: + - {dataset: CanESM2, start_year: 1850, end_year: 2005} + - {dataset: CCSM4, start_year: 1850, end_year: 2005} + - {dataset: CESM1-CAM5, start_year: 1850, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, start_year: 1850, end_year: 2005} + - {dataset: GFDL-CM3, start_year: 1860, end_year: 2005} + - {dataset: IPSL-CM5A-LR, start_year: 1850, end_year: 2005} + - {dataset: MPI-ESM-LR, start_year: 1850, end_year: 2005} + + - {dataset: CanESM2, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: CCSM4, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp26, start_year: 2006, end_year: 2100} + + - {dataset: CanESM2, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: CCSM4, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2006, end_year: 2100} + + - {dataset: CCSM4, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp60, start_year: 2006, end_year: 2100} + #- {dataset: GFDL-CM3, exp: rcp60, start_year: 2006, end_year: 2100} # data is missing on ESGF + + - {dataset: CanESM2, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CCSM4, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2006, end_year: 2100} + + scripts: + ch12_ts_line_mean_spread_rtmt: &ts_settings_rtmt + script: ipcc_ar5/ch12_ts_line_mean_spread.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # List of scenarios to be included + scenarios: [rcp26, rcp45, rcp60, rcp85] + # Start years in time periods (e.g. start of historical runs and rcps) + syears: [1850, 2006] + # End years in time periods (e.g. end of historical runs and rcps) + eyears: [2005, 2100] + # Start year of reference period + begin_ref_year: 1900 + # End year of reference period + end_ref_year: 1950 + # Labels to use in legend depending on scenarios + label: [Historical, RCP2.6, RCP4.5, RCP6.0, RCP8.5] + + ### Optional attributes + # Plot title + title: "Global TOA net radiation change" + # Standard deviations to calculate the spread with + # (default 1., ipcc tas used 1.64) + spread: 1.0 + # y-axis title + yaxis: "(Wm~S~-2~N~)" + # Minimim value on y-axis + ymin: -3 + # Maximum value on y-axis + ymax: 6 + # Save number of model runs per period and scenario + # in netcdf to print in plot + model_nr: true + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_tseries.rgb + # Minimum and maximum latitude and longitude if not global + # ts_minlat: 43 + # ts_maxlat: 54.5 + # ts_minlon: -12 + # ts_maxlon: 35 + + ch12_plot_ts_line_mean_spread_rtmt: + <<: *ts_settings_rtmt + script: ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl + + ### Required attributes + ancestors: ['rtmt', 'ch12_ts_line_mean_spread_rtmt'] + + + ### Mean temperature change in RCPs for multi model mean ################### + IAV_calc_tas: + description: Calculate temperature interannual variability + for stippling significance + themes: + - varmodes + realms: + - atmos + variables: + tas: + preprocessor: preproc_map + project: CMIP5 + mip: Amon + exp: piControl + ensemble: r1i1p1 + additional_datasets: + + - {dataset: CanESM2, start_year: 2015, end_year: 3010} + - {dataset: CCSM4, start_year: 250, end_year: 1300} + - {dataset: CESM1-CAM5, start_year: 1, end_year: 319} + - {dataset: CSIRO-Mk3-6-0, start_year: 1, end_year: 500} + - {dataset: GFDL-ESM2G, start_year: 1, end_year: 500} + - {dataset: IPSL-CM5A-LR, start_year: 1800, end_year: 2799} + - {dataset: MPI-ESM-LR, start_year: 1850, end_year: 2849} + + scripts: + ch12_calc_IAV_for_stippandhatch_tas: &IAV_settings_tas + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # Time average ('opt' argument of time_operations.ncl, + # annualclim, seasonalclim) + time_avg: annualclim + + ### Optional attributes + # Length of period in years to calculate variability over + # (default total time period) + periodlength: 20. + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) + # iavmode: + + + tas_change_mmm: + description: Air temperature change (annual mean or seasonal) + in multi-model mean with significance + themes: + - phys + realms: + - atmos + variables: + tas: + preprocessor: preproc_map + project: CMIP5 + ensemble : r1i1p1 + mip: Amon + additional_datasets: + - {dataset: GFDL-ESM2G, exp: historical, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, exp: historical, start_year: 1986, end_year: 2005} + + - {dataset: GFDL-ESM2G, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2081, end_year: 2100} + + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2081, end_year: 2100} + + scripts: + ch12_calc_map_diff_mmm_tas: &tas_map_settings + script: ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl + + ### Required attributes + ancestors: ['tas', 'IAV_calc_tas/ch12_calc_IAV_for_stippandhatch_tas'] + # List of scenarios to be included + scenarios: [rcp45, rcp85] + # Name of reference run, default is historical + reference_run: historical + # List with start years of periods to be included + periods: [2081] + # Time average ('opt' argument of time_operations.ncl, + # annualclim, seasonalclim) + time_avg: annualclim + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["RCP4.5 2081-2100", "RCP8.5 2081-2100"] + + ### Optional attributes + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + # seasons: [2] + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) (if "each", make sure to have them in + # IAV_calc_tas and set iavmode to "each" there too) + iavmode: mmm + # Show difference in percent (1, default 0) + # percent: + + ch12_plot_map_diff_mmm_stipp_tas: + <<: *tas_map_settings + script: ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['tas', 'ch12_calc_map_diff_mmm_tas'] + + ### Optional attributes + # Map projection (any valid ncl projection, default Robinson) + # projection: Robinson + # Plot title + title: "Annual mean temperature change" + # Contour levels for all difference plots + diff_levs: [-2, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, 2, 3, 4, 5, 6, 7, 9] + # Maximum number of plots in vertical + max_vert: 2 + # Maximum number of plots in horizontal + max_hori: 2 + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_temperature_with_grey.rgb + # Span whole colormap (default false) + span: false + # Plot significance as stippling + sig: true + # Plot uncertainty as hatching + not_sig: true + # Alternative name for output plot + # (default diagnostic + varname + time_avg) + # pltname: + # Print number of models at top-right of the figure + model_nr: true + + + ### Mean Precipitation change in RCPs for multi model mean ################# + IAV_calc_pr: + description: Calculate precipitation interannual variability + for stippling significance + themes: + - varmodes + realms: + - atmos + variables: + pr: + preprocessor: preproc_map + project: CMIP5 + mip: Amon + exp: piControl + ensemble: r1i1p1 + additional_datasets: + - {dataset: IPSL-CM5A-LR, start_year: 1800, end_year: 2799} + - {dataset: GFDL-ESM2G, start_year: 1, end_year: 500} + - {dataset: MPI-ESM-LR, start_year: 1850, end_year: 2849} + + scripts: + ch12_calc_IAV_for_stippandhatch_pr: &IAV_settings_pr + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: seasonalclim + + ### Optional attributes + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) + iavmode: mmm + # Length of period in years to calculate variability over + # (default total time period) + periodlength: 20. + + + pr_change_mmm: + description: Precipitation change (annual mean or seasonal) + in multi-model mean with signficance + themes: + - phys + realms: + - atmos + variables: + pr: + preprocessor: preproc_map + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + exp: historical + additional_datasets: + - {dataset: CSIRO-Mk3-6-0, start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, start_year: 1986, end_year: 2005} + + - {dataset: CSIRO-Mk3-6-0, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2081, end_year: 2100} + + - {dataset: CSIRO-Mk3-6-0, exp: rcp85, start_year: 2181, end_year: 2200} + - {dataset: IPSL-CM5A-LR, exp: rcp85, start_year: 2181, end_year: 2200} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2181, end_year: 2200} + + scripts: + ch12_calc_map_diff_mmm_pr: &pr_map_settings + script: ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl + + ### Required attributes + ancestors: ['pr', 'IAV_calc_pr/ch12_calc_IAV_for_stippandhatch_pr'] + # List of scenarios to be included + scenarios: [rcp85] + # Start years of periods to be included as list + periods: [2081, 2181] + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: seasonalclim + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["RCP8.5 2081-2100 DJF", "RCP8.5 2181-2200 DJF", + "RCP8.5 2081-2100 MAM", "RCP8.5 2181-2200 MAM", + "RCP8.5 2081-2100 JJA", "RCP8.5 2181-2200 JJA", + "RCP8.5 2081-2100 SON", "RCP8.5 2181-2200 SON"] + + ### Optional attributes + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + seasons: [0, 1, 2, 3] + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) (if "each", make sure to have them in + # IAV_calc_pr and set iavmode to "each" there too) + iavmode: mmm + # Show difference in percent (1, default 0) + percent: 1 + + ch12_plot_map_diff_mmm_stipp_pr: + <<: *pr_map_settings + script: ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['pr', 'ch12_calc_map_diff_mmm_pr'] + + ### Optional attributes + # Map projection (any valid ncl projection, default Robinson) + # projection: Robinson + title: "Seasonal mean percentage precipitation change" + # Contour levels for all difference plots + diff_levs: [-50, -40, -30, -20, -10, 0, 10, 20, 30, 40, 50] + # Maximum number of plots in vertical + max_vert: 4 + # Maximum number of plots in horizontal + max_hori: 2 + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc-precip-delta.rgb + # Span whole colormap (default false) + # span: + # Plot significance as stippling + sig: true + # Plot uncertainty as hatching + not_sig: true + # Alternative name for output plot + # (default diagnostic + varname + time_avg) + # pltname: + # Print number of models at top-right of the figure + model_nr: true + + + ### Mean Total Radiation change in RCPs for multi model mean ############### + IAV_calc_rtmt: + description: Calculate total radiation interannual variability + for stippling significance + themes: + - varmodes + realms: + - atmos + variables: + rtmt: + preprocessor: preproc_map + project: CMIP5 + mip: Amon + exp: piControl + ensemble: r1i1p1 + additional_datasets: + - {dataset: bcc-csm1-1, start_year: 1, end_year: 500} + - {dataset: bcc-csm1-1-m, start_year: 1, end_year: 400} + - {dataset: BNU-ESM, start_year: 1450, end_year: 2008} + - {dataset: CanESM2, start_year: 2015, end_year: 3010} + - {dataset: CESM1-CAM5, start_year: 1, end_year: 319} + - {dataset: CNRM-CM5, start_year: 1850, end_year: 2699} + - {dataset: CSIRO-Mk3-6-0, start_year: 1, end_year: 500} + - {dataset: inmcm4, start_year: 1850, end_year: 2349} + - {dataset: IPSL-CM5A-LR, start_year: 1800, end_year: 2799} + - {dataset: IPSL-CM5A-MR, start_year: 1800, end_year: 2099} + - {dataset: IPSL-CM5B-LR, start_year: 1830, end_year: 2129} + - {dataset: MIROC-ESM, start_year: 1800, end_year: 2429} + - {dataset: MIROC-ESM-CHEM, start_year: 1846, end_year: 2100} + - {dataset: NorESM1-M, start_year: 700, end_year: 1200} + - {dataset: NorESM1-ME, start_year: 901, end_year: 1152} + + scripts: + ch12_calc_IAV_for_stippandhatch_rtmt: &IAV_settings_rtmt + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: annualclim + + ### Optional attributes + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) + iavmode: mmm + # Length of period in years to calculate variability over + # (default total time period) + periodlength: 20. + + + rtmt_change_mmm: + description: Total Radiation change (annual mean or seasonal) + in multi-model mean with signficance + themes: + - phys + realms: + - atmos + variables: + rtmt: + preprocessor: preproc_map + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + exp: historical + additional_datasets: + + - {dataset: CESM1-CAM5, start_year: 1900, end_year: 1950} + - {dataset: IPSL-CM5A-LR, start_year: 1900, end_year: 1950} + - {dataset: NorESM1-ME, start_year: 1900, end_year: 1950} + + - {dataset: CESM1-CAM5, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: NorESM1-ME, exp: rcp45, start_year: 2081, end_year: 2100} + + scripts: + ch12_calc_map_diff_mmm_rtmt: &rtmt_map_settings + script: ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl + + ### Required attributes + ancestors: ['rtmt', + 'IAV_calc_rtmt/ch12_calc_IAV_for_stippandhatch_rtmt'] + # List of scenarios to be included + scenarios: [rcp45] + # Start years of periods to be included as list + periods: [2081] + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: annualclim + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["RCP4.5 2081-2100"] + + ### Optional attributes + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + # seasons: [2] + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each)(if "each", make sure to have them in + # IAV_calc_rtmt and set iavmode to "each" there too) + iavmode: mmm + # Show difference in percent (1, default 0) + percent: 0 + + ch12_plot_map_diff_mmm_stipp_rtmt: + <<: *rtmt_map_settings + script: ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['rtmt', 'ch12_calc_map_diff_mmm_rtmt'] + + ### Optional attributes + # Map projection (any valid ncl projection, default Robinson) + # projection: Robinson + title: "Annual mean TOA net radiation change" + # Contour levels for all difference plots + diff_levs: [-6., -5.25, -4.5, -3.75, -3., -2.25, -1.5, -0.75, 0, + 0.75, 1.5, 2.25, 3., 3.75, 4.5, 5.25, 6.] + # Maximum number of plots in vertical + max_vert: 2 + # Maximum number of plots in horizontal + max_hori: 2 + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_colors_blu2red_centered.rgb + # Span whole colormap (default false) + # span: + # Plot significance as stippling + sig: true + # Plot uncertainty as hatching + not_sig: true + # Alternative name for output plot + # (default diagnostic + varname + time_avg) + # pltname: + # Print number of models at top-right of the figure + model_nr: true + + + ### Mean Long Wave Radiation change in RCPs for multi model mean ########### + IAV_calc_rlut: + description: Calculate long wave radiation interannual variability + for stippling significance + themes: + - varmodes + realms: + - atmos + variables: + rlut: + preprocessor: preproc_map + project: CMIP5 + mip: Amon + exp: piControl + ensemble: r1i1p1 + additional_datasets: + - {dataset: bcc-csm1-1, start_year: 1, end_year: 500} + - {dataset: bcc-csm1-1-m, start_year: 1, end_year: 400} + - {dataset: BNU-ESM, start_year: 1450, end_year: 2008} + - {dataset: CanESM2, start_year: 2015, end_year: 3010} + - {dataset: CESM1-CAM5, start_year: 1, end_year: 319} + - {dataset: CNRM-CM5, start_year: 1850, end_year: 2699} + - {dataset: CSIRO-Mk3-6-0, start_year: 1, end_year: 500} + - {dataset: inmcm4, start_year: 1850, end_year: 2349} + - {dataset: IPSL-CM5A-LR, start_year: 1800, end_year: 2799} + - {dataset: IPSL-CM5A-MR, start_year: 1800, end_year: 2099} + - {dataset: IPSL-CM5B-LR, start_year: 1830, end_year: 2129} + - {dataset: MIROC-ESM, start_year: 1800, end_year: 2429} + - {dataset: MIROC-ESM-CHEM, start_year: 1846, end_year: 2100} + - {dataset: NorESM1-M, start_year: 700, end_year: 1200} + - {dataset: NorESM1-ME, start_year: 901, end_year: 1152} + + scripts: + ch12_calc_IAV_for_stippandhatch_rlut: &IAV_settings_rlut + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: annualclim + + ### Optional attributes + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) + iavmode: mmm + # Length of period in years to calculate variability over + # (default total time period) + periodlength: 20. + + + rlut_change_mmm: + description: Long Wave Radiation change (annual mean or seasonal) + in multi-model mean with signficance + themes: + - phys + realms: + - atmos + variables: + rlut: + preprocessor: preproc_map + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + exp: historical + additional_datasets: + + - {dataset: CESM1-CAM5, start_year: 1900, end_year: 1950} + - {dataset: IPSL-CM5A-LR, start_year: 1900, end_year: 1950} + - {dataset: NorESM1-ME, start_year: 1900, end_year: 1950} + + - {dataset: CESM1-CAM5, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: NorESM1-ME, exp: rcp45, start_year: 2081, end_year: 2100} + + scripts: + ch12_calc_map_diff_mmm_rlut: &rlut_map_settings + script: ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl + + ### Required attributes + ancestors: ['rlut', + 'IAV_calc_rlut/ch12_calc_IAV_for_stippandhatch_rlut'] + # List of scenarios to be included + scenarios: [rcp45] + # Start years of periods to be included as list + periods: [2081] + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: annualclim + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["RCP4.5 2081-2100"] + + ### Optional attributes + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + # seasons: [2] + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) (if "each", make sure to have them in + # IAV_calc_rlut and set iavmode to "each" there too) + iavmode: mmm + # Show difference in percent (1, default 0) + percent: 0 + # Plot units + plot_units: "-W m-2" # change sign of radiation flux as in IPCC + + ch12_plot_map_diff_mmm_stipp_rlut: + <<: *rlut_map_settings + script: ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['rlut', 'ch12_calc_map_diff_mmm_rlut'] + + ### Optional attributes + # Map projection (any valid ncl projection, default Robinson) + # projection: Robinson + title: "Annual mean long wave TOA radiation change" + # Contour levels for all difference plots + diff_levs: [-6., -5.25, -4.5, -3.75, -3., -2.25, -1.5, -0.75, 0, + 0.75, 1.5, 2.25, 3., 3.75, 4.5, 5.25, 6.] + # Maximum number of plots in vertical + max_vert: 2 + # Maximum number of plots in horizontal + max_hori: 2 + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_colors_blu2red_centered.rgb + # Span whole colormap (default false) + # span: + # Plot significance as stippling + sig: true + # Plot uncertainty as hatching + not_sig: true + # Alternative name for output plot + # (default diagnostic + varname + time_avg) + # pltname: + # Print number of models at top-right of the figure + model_nr: true + + + ### Mean Short Wave Radiation change in RCPs for multi model mean ########## + IAV_calc_rsut: + description: Calculate short wave radiation interannual variability + for stippling significance + themes: + - varmodes + realms: + - atmos + variables: + rsut: + preprocessor: preproc_map + project: CMIP5 + mip: Amon + exp: piControl + ensemble: r1i1p1 + additional_datasets: + - {dataset: bcc-csm1-1, start_year: 1, end_year: 500} + - {dataset: bcc-csm1-1-m, start_year: 1, end_year: 400} + - {dataset: BNU-ESM, start_year: 1450, end_year: 2008} + - {dataset: CanESM2, start_year: 2015, end_year: 3010} + - {dataset: CESM1-CAM5, start_year: 1, end_year: 319} + - {dataset: CNRM-CM5, start_year: 1850, end_year: 2699} + - {dataset: CSIRO-Mk3-6-0, start_year: 1, end_year: 500} + - {dataset: inmcm4, start_year: 1850, end_year: 2349} + - {dataset: IPSL-CM5A-LR, start_year: 1800, end_year: 2799} + - {dataset: IPSL-CM5A-MR, start_year: 1800, end_year: 2099} + - {dataset: IPSL-CM5B-LR, start_year: 1830, end_year: 2129} + - {dataset: MIROC-ESM, start_year: 1800, end_year: 2429} + - {dataset: MIROC-ESM-CHEM, start_year: 1846, end_year: 2100} + - {dataset: NorESM1-M, start_year: 700, end_year: 1200} + - {dataset: NorESM1-ME, start_year: 901, end_year: 1152} + + scripts: + ch12_calc_IAV_for_stippandhatch_rsut: &IAV_settings_rsut + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: annualclim + + ### Optional attributes + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) + iavmode: mmm + # Length of period in years to calculate variability over + # (default total time period) + periodlength: 20. + + + rsut_change_mmm: + description: Short Wave Radiation change (annual mean or seasonal) + in multi-model mean with signficance + themes: + - phys + realms: + - atmos + variables: + rsut: + preprocessor: preproc_map + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + exp: historical + additional_datasets: + + - {dataset: CESM1-CAM5, start_year: 1900, end_year: 1950} + - {dataset: IPSL-CM5A-LR, start_year: 1900, end_year: 1950} + - {dataset: NorESM1-ME, start_year: 1900, end_year: 1950} + + - {dataset: CESM1-CAM5, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: NorESM1-ME, exp: rcp45, start_year: 2081, end_year: 2100} + + scripts: + ch12_calc_map_diff_mmm_rsut: &rsut_map_settings + script: ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl + + ### Required attributes + ancestors: ['rsut', + 'IAV_calc_rsut/ch12_calc_IAV_for_stippandhatch_rsut'] + # List of scenarios to be included + scenarios: [rcp45] + # Start years of periods to be included as list + periods: [2081] + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: annualclim + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["RCP4.5 2081-2100"] + + ### Optional attributes + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + # seasons: [2] + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) (if "each", make sure to have them in + # IAV_calc_rsut and set iavmode to "each" there too) + iavmode: mmm + # Show difference in percent (1, default 0) + percent: 0 + # Plot units + plot_units: "-W m-2" # change sign of radiation flux as in IPCC + + ch12_plot_map_diff_mmm_stipp_rsut: + <<: *rsut_map_settings + script: ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['rsut', 'ch12_calc_map_diff_mmm_rsut'] + + ### Optional attributes + # Map projection (any valid ncl projection, default Robinson) + # projection: Robinson + title: "Annual mean short wave TOA radiation change" + # Contour levels for all difference plots + diff_levs: [-6., -5.25, -4.5, -3.75, -3., -2.25, -1.5, -0.75, 0, + 0.75, 1.5, 2.25, 3., 3.75, 4.5, 5.25, 6.] + # Maximum number of plots in vertical + max_vert: 2 + # Maximum number of plots in horizontal + max_hori: 2 + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_colors_blu2red_centered.rgb + # Span whole colormap (default false) + # span: + # Plot significance as stippling + sig: true + # Plot uncertainty as hatching + not_sig: true + # Alternative name for output plot + # (default diagnostic + varname + time_avg) + # pltname: + # Print number of models at top-right of the figure + model_nr: true + + + ### Mean cloud cover change in RCPs for multi model mean ################### + IAV_calc_clt: + description: Calculate cloud cover interannual variability + for stippling significance + themes: + - varmodes + realms: + - atmos + variables: + clt: + preprocessor: preproc_map + project: CMIP5 + mip: Amon + exp: piControl + ensemble: r1i1p1 + additional_datasets: + + - {dataset: CanESM2, start_year: 2015, end_year: 3010} + - {dataset: CSIRO-Mk3-6-0, start_year: 1, end_year: 500} + - {dataset: GFDL-ESM2G, start_year: 1, end_year: 500} + - {dataset: IPSL-CM5A-LR, start_year: 1800, end_year: 2799} + - {dataset: MPI-ESM-LR, start_year: 1850, end_year: 2849} + + scripts: + ch12_calc_IAV_for_stippandhatch_clt: &IAV_settings_clt + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # Time average ('opt' argument of time_operations.ncl, + # annualclim, seasonalclim) + time_avg: annualclim + + ### Optional attributes + # Length of period in years to calculate variability over, + # default total time period + periodlength: 20. + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) + # iavmode: + + + clt_change_mmm: + description: Cloud fraction change (annual mean or seasonal) + in multi-model mean with significance + themes: + - phys + realms: + - atmos + variables: + clt: + preprocessor: preproc_map + project: CMIP5 + ensemble : r1i1p1 + mip: Amon + additional_datasets: + - {dataset: GFDL-ESM2G, exp: historical, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, exp: historical, start_year: 1986, end_year: 2005} + + - {dataset: GFDL-ESM2G, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp26, start_year: 2081, end_year: 2100} + + - {dataset: GFDL-ESM2G, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2081, end_year: 2100} + + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2081, end_year: 2100} + + scripts: + ch12_calc_map_diff_mmm_clt: &clt_map_settings + script: ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl + + ### Required attributes + ancestors: ['clt', 'IAV_calc_clt/ch12_calc_IAV_for_stippandhatch_clt'] + # List of scenarios to be included + scenarios: [rcp26, rcp45, rcp85] + # List with start years of periods to be included + periods: [2081] + # Time average ('opt' argument of time_operations.ncl, + # annualclim, seasonalclim) + time_avg: annualclim + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["RCP2.6 2081-2100", "RCP4.5 2081-2100", "RCP8.5 2081-2100"] + + ### Optional attributes + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + # seasons: [2] + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) (if "each", make sure to have them in + # IAV_calc_clt and set iavmode to "each" there too) + # iavmode: + # Show difference in percent (1, default 0) + # percent: + + ch12_plot_map_diff_mmm_stipp_clt: + <<: *clt_map_settings + script: ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['clt', 'ch12_calc_map_diff_mmm_clt'] + + ### Optional attributes + # Map projection (any valid ncl projection, default Robinson) + # projection: Robinson + title: "Annula mean cloud fraction change" + # Contour levels for all difference plots + diff_levs: [-10, -9, -8, -7, -6, -5, -4, -3, -2, -1, 0, + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + # Maximum number of plots in vertical + max_vert: 2 + # Maximum number of plots in horizontal + max_hori: 2 + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_colors_red2blu_centered.rgb + # Span whole colormap (default false) + # span: + # Plot significance as stippling + sig: true + # Plot uncertainty as hatching + not_sig: true + # Alternative name for output plot + # (default diagnostic + varname + time_avg) + # pltname: + # Print number of models at top-right of the figure + model_nr: true + + ### Mean sea level pressure change in RCPs for multi model mean ############ + IAV_calc_psl: + description: Calculate sea level pressure interannual variability + for stippling significance + themes: + - varmodes + realms: + - atmos + variables: + psl: + preprocessor: preproc_map + project: CMIP5 + mip: Amon + exp: piControl + ensemble: r1i1p1 + additional_datasets: + + - {dataset: CanESM2, start_year: 2015, end_year: 3010} + - {dataset: CESM1-CAM5, start_year: 1, end_year: 319} + - {dataset: CSIRO-Mk3-6-0, start_year: 1, end_year: 500} + - {dataset: GFDL-ESM2G, start_year: 1, end_year: 500} + - {dataset: IPSL-CM5A-LR, start_year: 1800, end_year: 2799} + - {dataset: MPI-ESM-LR, start_year: 1850, end_year: 2849} + + scripts: + ch12_calc_IAV_for_stippandhatch_psl: &IAV_settings_psl + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # Time average ('opt' argument of time_operations.ncl, + # annualclim, seasonalclim) + time_avg: seasonalclim + + ### Optional attributes + # Length of period in years to calculate variability over, + # default total time period + periodlength: 20. + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) + # iavmode: + + psl_change_mmm: + description: Mean sea level pressure change (annual mean or seasonal) + in multi-model mean with significance + themes: + - phys + realms: + - atmos + variables: + psl: + preprocessor: preproc_map + project: CMIP5 + ensemble : r1i1p1 + mip: Amon + additional_datasets: + - {dataset: GFDL-ESM2G, exp: historical, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, exp: historical, start_year: 1986, end_year: 2005} + + - {dataset: GFDL-ESM2G, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp26, start_year: 2081, end_year: 2100} + + - {dataset: GFDL-ESM2G, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2081, end_year: 2100} + + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2081, end_year: 2100} + + scripts: + ch12_calc_map_diff_mmm_psl: &psl_map_settings + script: ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl + + ### Required attributes + ancestors: ['psl', 'IAV_calc_psl/ch12_calc_IAV_for_stippandhatch_psl'] + # List of scenarios to be included + scenarios: [rcp26, rcp45, rcp85] + # List with start years of periods to be included + periods: [2081] + # Time average ('opt' argument of time_operations.ncl, + # annualclim, seasonalclim) + time_avg: seasonalclim + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["DJF RCP2.6 2081-2100", "DJF RCP4.5 2081-2100", + "DJF RCP8.5 2081-2100", "JJA RCP2.6 2081-2100", + "JJA RCP4.5 2081-2100", "JJA RCP8.5 2081-2100"] + + ### Optional attributes + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + seasons: [0, 2] + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) (if "each", make sure to have them in + # IAV_calc_psl and set iavmode to "each" there too) + # iavmode: + # Show difference in percent (1, default 0) + # percent: + # Plot units + plot_units: "hPa" + + ch12_plot_map_diff_mmm_stipp_psl: + <<: *psl_map_settings + script: ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['psl', 'ch12_calc_map_diff_mmm_psl'] + + ### Optional attributes + # Map projection (any valid ncl projection, default Robinson) + # projection: Robinson + title: "Seasonal mean sea level pressure change" + # Contour levels for all difference plots + diff_levs: [-4, -3.5, -3, -2.5, -2, -1.5, -1, -0.5, 0, + 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4] + # Maximum number of plots in vertical + max_vert: 2 + # Maximum number of plots in horizontal + max_hori: 3 + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_colors_blu2red_centered.rgb + # Span whole colormap (default false) + span: false + # Plot significance as stippling + sig: true + # Plot uncertainty as hatching + not_sig: true + # Alternative name for output plot + # (default diagnostic + varname + time_avg) + # pltname: + # Print number of models at top-right of the figure + model_nr: true + + ### Mean humidity change in RCPs for multi model mean ###################### + IAV_calc_hurs: + description: Calculate mean relative humidity interannual variability + for stippling significance + themes: + - varmodes + realms: + - atmos + variables: + hurs: + preprocessor: preproc_map + project: CMIP5 + mip: Amon + exp: piControl + ensemble: r1i1p1 + additional_datasets: + - {dataset: bcc-csm1-1, start_year: 1, end_year: 500} + - {dataset: bcc-csm1-1-m, start_year: 1, end_year: 400} + - {dataset: BNU-ESM, start_year: 1450, end_year: 2008} + - {dataset: CanESM2, start_year: 2015, end_year: 3010} + - {dataset: CESM1-CAM5, start_year: 1, end_year: 319} + - {dataset: CNRM-CM5, start_year: 1850, end_year: 2699} + - {dataset: CSIRO-Mk3-6-0, start_year: 1, end_year: 500} + - {dataset: inmcm4, start_year: 1850, end_year: 2349} + - {dataset: IPSL-CM5A-MR, start_year: 1800, end_year: 2099} + - {dataset: IPSL-CM5B-LR, start_year: 1830, end_year: 2129} + - {dataset: MIROC-ESM, start_year: 1800, end_year: 2429} + - {dataset: MIROC-ESM-CHEM, start_year: 1846, end_year: 2100} + - {dataset: NorESM1-M, start_year: 700, end_year: 1200} + - {dataset: NorESM1-ME, start_year: 901, end_year: 1152} + + scripts: + ch12_calc_IAV_for_stippandhatch_hurs: &IAV_settings_hurs + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: seasonalclim + + ### Optional attributes + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) + iavmode: mmm + # Length of period in years to calculate variability over + # (default total time period) + periodlength: 20. + + + hurs_change_mmm: + description: Mean relative humidity change (annual mean or seasonal) + in multi-model mean with signficance + themes: + - phys + realms: + - atmos + variables: + hurs: + preprocessor: preproc_map + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + additional_datasets: + - {dataset: CESM1-CAM5, exp: historical, start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, exp: historical, start_year: 1986, end_year: 2005} + - {dataset: NorESM1-ME, exp: historical, start_year: 1986, end_year: 2005} + + - {dataset: CESM1-CAM5, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: NorESM1-ME, exp: rcp85, start_year: 2081, end_year: 2100} + + scripts: + ch12_calc_map_diff_mmm_hurs: &hurs_map_settings + script: ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl + + ### Required attributes + ancestors: ['hurs', + 'IAV_calc_hurs/ch12_calc_IAV_for_stippandhatch_hurs'] + # List of scenarios to be included + scenarios: [rcp85] + # Start years of periods to be included as list + periods: [2081] + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: seasonalclim + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["RCP8.5 DJF 2081-2100", "RCP8.5 JJA 2081-2100"] + + ### Optional attributes + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + seasons: [0, 2] + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) (if "each", make sure to have them in + # IAV_calc_hurs and set iavmode to "each" there too) + iavmode: mmm + # Show difference in percent (1, default 0) + percent: 1 + + ch12_plot_map_diff_mmm_stipp_hurs: + <<: *hurs_map_settings + script: ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['hurs', 'ch12_calc_map_diff_mmm_hurs'] + + ### Optional attributes + # Map projection (any valid ncl projection, default Robinson) + # projection: Robinson + # title above plot + title: "Mean relative humidity change" + # Contour levels for all difference plots + diff_levs: [-10, -9, -8, -7, -6, -5, -4, -3, -2, -1, 0, + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + # Maximum number of plots in vertical + max_vert: 2 + # Maximum number of plots in horizontal + max_hori: 2 + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_colors_red2blu_centered.rgb + # Span whole colormap (default false) + # span: + # Plot significance as stippling + sig: true + # Plot uncertainty as hatching + not_sig: true + # Alternative name for output plot + # (default diagnostic + varname + time_avg) + # pltname: + # Print number of models at top-right of the figure + model_nr: true + + + ### Mean Salinity change in RCPs for multi model mean ###################### + IAV_calc_sos: + description: Calculate salinity interannual variability + for stippling significance + themes: + - varmodes + realms: + - ocean + variables: + sos: + preprocessor: preproc_map + project: CMIP5 + mip: Omon + exp: piControl + ensemble: r1i1p1 + additional_datasets: + - {dataset: CSIRO-Mk3-6-0, start_year: 1, end_year: 500} + - {dataset: HadGEM2-CC, start_year: 1860, end_year: 2099} + - {dataset: MIROC-ESM, start_year: 1800, end_year: 2429} + - {dataset: MIROC-ESM-CHEM, start_year: 1846, end_year: 2100} + + scripts: + ch12_calc_IAV_for_stippandhatch_sos: &IAV_settings_sos + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: annualclim + + ### Optional attributes + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) + iavmode: mmm + # Length of period in years to calculate variability over + # (default total time period) + periodlength: 20. + + + sos_change_mmm: + description: Mean salinity change (annual mean or seasonal) + in multi-model mean with signficance + themes: + - phys + realms: + - ocean + variables: + sos: + preprocessor: preproc_map + project: CMIP5 + ensemble: r1i1p1 + mip: Omon + exp: historical + additional_datasets: + - {dataset: CSIRO-Mk3-6-0, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H, start_year: 1986, end_year: 2005, + ensemble: r1i1p2} + - {dataset: GISS-E2-H, start_year: 1986, end_year: 2005, + ensemble: r1i1p3} + - {dataset: GISS-E2-H-CC, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R-CC, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, start_year: 1986, end_year: 2005, + ensemble: r1i1p2} + - {dataset: GISS-E2-R, start_year: 1986, end_year: 2005, + ensemble: r1i1p3} + - {dataset: HadGEM2-AO, start_year: 1986, end_year: 2005} + - {dataset: HadGEM2-CC, start_year: 1986, end_year: 2004} + - {dataset: MIROC-ESM, start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, start_year: 1986, end_year: 2005} + + - {dataset: CSIRO-Mk3-6-0, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: GISS-E2-H, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: GISS-E2-H, exp: rcp85, start_year: 2081, end_year: 2100, + ensemble: r1i1p2} + - {dataset: GISS-E2-H, exp: rcp85, start_year: 2081, end_year: 2100, + ensemble: r1i1p3} + - {dataset: GISS-E2-H-CC, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: GISS-E2-R-CC, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: GISS-E2-R, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: GISS-E2-R, exp: rcp85, start_year: 2081, end_year: 2100, + ensemble: r1i1p2} + - {dataset: GISS-E2-R, exp: rcp85, start_year: 2081, end_year: 2100, + ensemble: r1i1p3} + - {dataset: HadGEM2-AO, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: HadGEM2-CC, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: MIROC-ESM, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: MIROC-ESM-CHEM, exp: rcp85, start_year: 2081, end_year: 2100} + + scripts: + ch12_calc_map_diff_mmm_sos: &sos_map_settings + script: ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl + + ### Required attributes + ancestors: ['sos', 'IAV_calc_sos/ch12_calc_IAV_for_stippandhatch_sos'] + # List of scenarios to be included + scenarios: [rcp85] + # Start years of periods to be included as list + periods: [2081] + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: annualclim + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["RCP8.5 2081-2100"] + + ### Optional attributes + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + # seasons: [0, 2] + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) (if "each", make sure to have them in + # IAV_calc_sos and set iavmode to "each" there too) + iavmode: mmm + # Show difference in percent (1, default 0) + # percent: 1 + + ch12_plot_map_diff_mmm_stipp_sos: + <<: *sos_map_settings + script: ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['sos', 'ch12_calc_map_diff_mmm_sos'] + + ### Optional attributes + # Map projection (any valid ncl projection, default Robinson) + # projection: Robinson + title: "Annual mean surface salinity change" + # Contour levels for all difference plots + diff_levs: [-2., -1.5, -1., -0.5, 0, 0.5, 1., 1.5, 2.] + # Maximum number of plots in vertical + max_vert: 2 + # Maximum number of plots in horizontal + max_hori: 2 + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_colors_blu2red_centered.rgb + # Span whole colormap (default false) + # span: + # Plot significance as stippling + sig: true + # Plot uncertainty as hatching + not_sig: true + # Alternative name for output plot + # (default diagnostic + varname + time_avg) + # pltname: + # Print number of models at top-right of the figure + model_nr: true + + + ### Mean zonal variable change in one RCP for multi model mean ############# + IAV_calc_ta: + description: Calculate 3D temperature (ta) IAV + for stippling significance in zonal plot + themes: + - varmodes + realms: + - atmos + variables: + ta: + preprocessor: preproc_atm + reference_dataset: CanESM2 + project: CMIP5 + mip: Amon + ensemble: r1i1p1 + exp: piControl + additional_datasets: + - {dataset: CanESM2, start_year: 2015, end_year: 3010} + - {dataset: GFDL-ESM2G, start_year: 1, end_year: 500} + - {dataset: IPSL-CM5A-LR, start_year: 1800, end_year: 2799} + + scripts: + ch12_calc_IAV_for_stippandhatch_ta: + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # Time average ('opt' argument of time_operations.ncl) + time_avg: annualclim + + ### Optional attributes + # Length of period in years to calculate variability over + # (default total time period) + periodlength: 20. + + + ### Mean air temperature change in one RCP for multi model mean ############ + ta_change_mmm: + description: Air temperature change for RCPs, periods, + significance as zonal plots. + themes: + - phys + realms: + - atmos + variables: + ta: + preprocessor: preproc_zonal_atm + reference_dataset: CanESM2 + project: CMIP5 + mip: Amon + ensemble: r1i1p1 + additional_datasets: + - {dataset: CanESM2, exp: historical, start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, exp: historical, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, exp: historical, start_year: 1986, end_year: 2005} + + - {dataset: CanESM2, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp26, start_year: 2081, end_year: 2100} + + - {dataset: CanESM2, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2081, end_year: 2100} + + - {dataset: CanESM2, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2081, end_year: 2100} + + scripts: + ch12_calc_zonal_diff_mmm_ta: &ta_settings + script: ipcc_ar5/ch12_calc_zonal_cont_diff_mmm_stippandhatch.ncl + + ### Required attributes + ancestors: ['ta', 'IAV_calc_ta/ch12_calc_IAV_for_stippandhatch_ta'] + # List of scenarios to be included + scenarios: [rcp26, rcp45, rcp85] + # List with start years of periods to be included + periods: [2081] + # Time average ('opt' argument of time_operations.ncl), + # if seasonalclim seasons is required + time_avg: annualclim + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["RCP2.6 2081-2100", "RCP4.5 2081-2100", "RCP8.5 2081-2100"] + + ### Optional attributes + # Units string + units: (~F35~J~F~C) + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + # seasons: + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) + # iavmode: + # Show difference in percent (1, default 0) + # percent: + + ch12_plot_zonal_diff_mmm_ta: + <<: *ta_settings + script: ipcc_ar5/ch12_plot_zonal_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['ta', 'ch12_calc_zonal_diff_mmm_ta'] + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_temperature_with_grey.rgb + # Contour levels for all difference plots + diff_levs: [-2, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, 2, 3, 4, 5, 6, 7, 8] + # Plot significance as stippling + sig: true + # Plot uncertainty as hatching + not_sig: true + # Maximum number of plots in horizontal + max_hori: 3 + # Print number of models at top-right of the figure + model_nr: true + + + ### Mean zonal variable change in one RCP for multi model mean ############# + IAV_calc_ua: + description: Calculate 3D wind (ua) IAV + for stippling significance in zonal plot + themes: + - varmodes + realms: + - atmos + variables: + ua: + preprocessor: preproc_atm + reference_dataset: CanESM2 + project: CMIP5 + mip: Amon + exp: piControl + ensemble: r1i1p1 + additional_datasets: + - {dataset: CanESM2, start_year: 2015, end_year: 3010} + - {dataset: GFDL-ESM2G, start_year: 1, end_year: 500} + - {dataset: IPSL-CM5A-LR, start_year: 1800, end_year: 2799} + + scripts: + ch12_calc_IAV_for_stippandhatch_ua: + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # Time average ('opt' argument of time_operations.ncl) + time_avg: annualclim + + ### Optional attributes + # Length of period in years to calculate variability over + # (default total time period) + periodlength: 20. + + + ### Mean wind change in RCPs for multi model mean ########################## + ua_change_mmm: + description: Zonal wind change for RCPs, periods, + significance as zonal plots. + themes: + - phys + realms: + - atmos + variables: + ua: + preprocessor: preproc_zonal_atm + reference_dataset: CanESM2 + project: CMIP5 + mip: Amon + ensemble: r1i1p1 + additional_datasets: + - {dataset: CanESM2, exp: historical, start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, exp: historical, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, exp: historical, start_year: 1986, end_year: 2005} + + - {dataset: CanESM2, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp26, start_year: 2081, end_year: 2100} + + - {dataset: CanESM2, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2081, end_year: 2100} + + - {dataset: CanESM2, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2081, end_year: 2100} + + scripts: + ch12_calc_zonal_diff_mmm_ua: &ua_settings + script: ipcc_ar5/ch12_calc_zonal_cont_diff_mmm_stippandhatch.ncl + + ### Required attributes + ancestors: ['ua', 'IAV_calc_ua/ch12_calc_IAV_for_stippandhatch_ua'] + # List of scenarios to be included + scenarios: [rcp26, rcp45, rcp85] + # List with start years of periods to be included + periods: [2081] + # Time average ('opt' argument of time_operations.ncl), + # if seasonalclim seasons is required + time_avg: annualclim + # Map projection (any valid ncl projection, default Robinson) + # projection: Robinson + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["RCP2.6 2081-2100", "RCP4.5 2081-2100", "RCP8.5 2081-2100"] + + ### Optional attributes + # Plot contours of base period as lines (need to save base period field) + base_cn: true + # Units string + units: (m s~S~-1~N~) + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + # seasons: + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) + # iavmode: + # Show difference in percent (1, default 0) + # percent: + + ch12_plot_zonal_diff_mmm_ua: + <<: *ua_settings + script: ipcc_ar5/ch12_plot_zonal_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['ua', 'ch12_calc_zonal_diff_mmm_ua'] + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_colors_blu2red_centered.rgb + # Contour levels for all difference plots + diff_levs: [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5] + # Plot significance as stippling + sig: true + # Plot uncertainty as hatching + not_sig: true + # Maximum number of plots in horizontal + max_hori: 3 + # Print number of models at top-right of the figure + model_nr: true + # Further settings to control contour lines (for base_cn: true) + base_cnLevelSpacing: 10. + base_cnMinLevel: -40 + base_cnMaxLevel: 40 + + + ### Mean ocean temperature change in RCPs for multi model mean ############# + IAV_calc_thetao: + description: Calculate 3D ocean temperature (thetao) IAV + for stippling significance in zonal plot + themes: + - varmodes + realms: + - ocean + variables: + thetao: + preprocessor: preproc_ocean + project: CMIP5 + mip: Omon + exp: piControl + ensemble: r1i1p1 + additional_datasets: + - {dataset: CanESM2, start_year: 2015, end_year: 2515} + - {dataset: CSIRO-Mk3-6-0, start_year: 1, end_year: 500} + - {dataset: HadGEM2-CC, start_year: 1860, end_year: 2099} + + scripts: + ch12_calc_IAV_for_stippandhatch_thetao: + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # Time average ('opt' argument of time_operations.ncl) + time_avg: annualclim + + ### Optional attributes + # Length of period in years to calculate variability over + # (default total time period) + periodlength: 20. + + thetao_change_mmm: + description: Air temperature change for RCPs, periods, + significance as zonal plots. + themes: + - phys + realms: + - ocean + variables: + thetao: + preprocessor: preproc_zonal_ocean + reference_dataset: CanESM2 + project: CMIP5 + mip: Omon + ensemble: r1i1p1 + exp: historical + additional_datasets: + - {dataset: CanESM2, start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, start_year: 1986, end_year: 2005} + - {dataset: HadGEM2-CC, start_year: 1986, end_year: 2004} + + - {dataset: CanESM2, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: HadGEM2-CC, exp: rcp85, start_year: 2081, end_year: 2100} + + scripts: + ch12_calc_zonal_diff_mmm_thetao: &thetao_settings + script: ipcc_ar5/ch12_calc_zonal_cont_diff_mmm_stippandhatch.ncl + + ### Required attributes + ancestors: ['thetao', + 'IAV_calc_thetao/ch12_calc_IAV_for_stippandhatch_thetao'] + # List of scenarios to be included + scenarios: [rcp85] + # List with start years of periods to be included + periods: [2081] + # Time average ('opt' argument of time_operations.ncl) + time_avg: annualclim + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["RCP8.5 2081-2100"] + + ### Optional attributes + # Units string + units: (~F35~J~F~C) + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + # seasons: + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) + # iavmode: + # Show difference in percent (1, default 0) + # percent: + + ch12_plot_zonal_diff_mmm_thetao: + <<: *thetao_settings + script: ipcc_ar5/ch12_plot_zonal_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['thetao', 'ch12_calc_zonal_diff_mmm_thetao'] + # Plot title + title: "Annual mean ocean temperature change (2081-2100)" + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_temperature_with_grey.rgb + # Contour levels for all difference plots + diff_levs: [-0.5, -0.3, -0.2, -0.1, 0, + 0.1, 0.2, 0.3, 0.4, 0.5, 0.7, 1, 1.5, 2, 2.5, 3] + # Plot significance as stippling + sig: true + # Plot uncertainty as hatching + not_sig: true + max_hori: 3 + # Print number of models at top-right of the figure + model_nr: true + + ### Mean Soil Moisture change in RCPs for multi model mean ### + IAV_calc_mrsos: + description: Calculate soil moisture IAV for stippling significance + themes: + - varmodes + realms: + - atmos + variables: + mrsos: + preprocessor: preproc_map_land + project: CMIP5 + mip: Lmon + exp: piControl + ensemble: r1i1p1 + additional_datasets: + - {dataset: CanESM2, start_year: 2015, end_year: 3010} + - {dataset: CESM1-CAM5, start_year: 1, end_year: 319} + - {dataset: CSIRO-Mk3-6-0, start_year: 1, end_year: 500} + - {dataset: GFDL-ESM2G, start_year: 1, end_year: 500} + - {dataset: IPSL-CM5A-LR, start_year: 1800, end_year: 2799} + + scripts: + ch12_calc_IAV_for_stippandhatch_mrsos: &IAV_settings_mrsos + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # Time average ('opt' argument of time_operations.ncl) + time_avg: annualclim + + ### Optional attributes + # Length of period in years to calculate variability over + # (default total time period) + periodlength: 20. + + + mrsos_change_mmm: + description: Soil moisture change (annual mean or seasonal) + in multi-model mean with signficance + themes: + - phys + realms: + - atmos + variables: + mrsos: + preprocessor: preproc_map_land + project: CMIP5 + mip: Lmon + ensemble: r1i1p1 + exp: historical + additional_datasets: + - {dataset: CanESM2, start_year: 1986, end_year: 2005} + - {dataset: CCSM4, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, start_year: 1986, end_year: 2005} + + - {dataset: CanESM2, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: CCSM4, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp26, start_year: 2081, end_year: 2100} + + - {dataset: CanESM2, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: CCSM4, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp45, start_year: 2081, end_year: 2100} + + - {dataset: CCSM4, exp: rcp60, start_year: 2081, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp60, start_year: 2081, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp60, start_year: 2081, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp60, start_year: 2081, end_year: 2100} + + - {dataset: CanESM2, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: CCSM4, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp85, start_year: 2081, end_year: 2100} + + scripts: + ch12_calc_map_diff_mmm_mrsos: &mrsos_map_settings + script: ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl + + ### Required attributes + ancestors: ['mrsos', + 'IAV_calc_mrsos/ch12_calc_IAV_for_stippandhatch_mrsos'] + # List of scenarios to be included + scenarios: [rcp26, rcp45, rcp60, rcp85] + # List with start years of periods to be included + periods: [2081] + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: annualclim + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["RCP2.6 2081-2100", "RCP4.5 2081-2100", + "RCP6.0 2081-2100", "RCP8.5 2081-2100"] + + ### Optional attributes + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + # seasons: [2] + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) (if "each", make sure to have them in + # IAV_calc_mrsos and set iavmode to "each" there too) + # iavmode: + # Show difference in percent (1, default 0) + # percent: + # Plot units + plot_units: "mm" + + ch12_plot_map_diff_mmm_stipp_mrsos: + <<: *mrsos_map_settings + script: ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['mrsos', 'ch12_calc_map_diff_mmm_mrsos'] + + ### Optional attributes + # Map projection (any valid ncl projection, default Robinson) + projection: Robinson + # Contour levels for all difference plots + diff_levs: [-2, -1.6, -1.2, -0.8, -0.4, 0, 0.4, 0.8, 1.2, 1.6, 2] + # Maximum number of plots in vertical + max_vert: 2 + # Maximum number of plots in horizontal + max_hori: 2 + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_colors_red2blu_centered.rgb + # Span whole colormap (default false) + # span: + # Plot significance as stippling + sig: true + # Plot uncertainty as hatching + not_sig: true + # Alternative name for output plot + # (default diagnostic + varname + time_avg) + # pltname: + # Print number of models at top-right of the figure + model_nr: true + + + ### Mean runoff change in RCPs for multi model mean ######################## + IAV_calc_mrro: + description: Calculate runoff interannual variability + for stippling significance + themes: + - varmodes + realms: + - land + variables: + mrro: + preprocessor: preproc_map_land + project: CMIP5 + mip: Lmon + exp: piControl + ensemble: r1i1p1 + additional_datasets: + - {dataset: bcc-csm1-1, start_year: 1, end_year: 500} + - {dataset: bcc-csm1-1-m, start_year: 1, end_year: 400} +# - {dataset: BNU-ESM, start_year: 1450, end_year: 2008} + - {dataset: CanESM2, start_year: 2015, end_year: 3010} + - {dataset: CESM1-CAM5, start_year: 1, end_year: 319} + - {dataset: CNRM-CM5, start_year: 1850, end_year: 2699} + - {dataset: CSIRO-Mk3-6-0, start_year: 1, end_year: 500} +# - {dataset: inmcm4, start_year: 1850, end_year: 2349} + - {dataset: IPSL-CM5A-LR, start_year: 1800, end_year: 2799} + - {dataset: IPSL-CM5A-MR, start_year: 1800, end_year: 2099} + - {dataset: IPSL-CM5B-LR, start_year: 1830, end_year: 2129} + - {dataset: MIROC-ESM, start_year: 1800, end_year: 2429} + - {dataset: MIROC-ESM-CHEM, start_year: 1846, end_year: 2100} + - {dataset: NorESM1-M, start_year: 700, end_year: 1200} + - {dataset: NorESM1-ME, start_year: 901, end_year: 1152} + + scripts: + ch12_calc_IAV_for_stippandhatch_mrro: &IAV_settings_mrro + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: annualclim + + ### Optional attributes + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) + iavmode: mmm + # Length of period in years to calculate variability over + # (default total time period) + periodlength: 20. + + + mrro_change_mmm: + description: Mean runoff change (annual mean or seasonal) + in multi-model mean with signficance + themes: + - phys + realms: + - land + variables: + mrro: + preprocessor: preproc_map_land + project: CMIP5 + ensemble: r1i1p1 + mip: Lmon + exp: historical + additional_datasets: + - {dataset: CanESM2, start_year: 1986, end_year: 2005} + - {dataset: CCSM4, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, start_year: 1986, end_year: 2005} + + - {dataset: CanESM2, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: CCSM4, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp26, start_year: 2081, end_year: 2100} + + - {dataset: CanESM2, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: CCSM4, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp45, start_year: 2081, end_year: 2100} + + - {dataset: CCSM4, exp: rcp60, start_year: 2081, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp60, start_year: 2081, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp60, start_year: 2081, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp60, start_year: 2081, end_year: 2100} + + - {dataset: CanESM2, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: CCSM4, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp85, start_year: 2081, end_year: 2100} + + scripts: + ch12_calc_map_diff_mmm_mrro: &mrro_map_settings + script: ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl + + ### Required attributes + ancestors: ['mrro', + 'IAV_calc_mrro/ch12_calc_IAV_for_stippandhatch_mrro'] + # List of scenarios to be included + scenarios: [rcp26, rcp45, rcp60, rcp85] + # Start years of periods to be included as list + periods: [2081] + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: annualclim + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["RCP2.6 2081-2100", "RCP4.5 2081-2100", + "RCP6.0 2081-2100", "RCP8.5 2081-2100"] + + ### Optional attributes + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + # seasons: [0, 2] + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) (if "each", make sure to have them in + # IAV_calc_mrro and set iavmode to "each" there too) + iavmode: mmm + # Show difference in percent (1, default 0) + # percent: 1 + # Plot units + plot_units: "mm d-1" + + ch12_plot_map_diff_mmm_stipp_mrro: + <<: *mrro_map_settings + script: ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['mrro', 'ch12_calc_map_diff_mmm_mrro'] + + ### Optional attributes + # Map projection (any valid ncl projection, default Robinson) + # projection: Robinson + # Contour levels for all difference plots + diff_levs: [-0.5, -0.4, -0.3, -0.2, -0.1, 0, 0.1, 0.2, 0.3, 0.4, 0.5] + # Maximum number of plots in vertical + max_vert: 2 + # Maximum number of plots in horizontal + max_hori: 2 + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_colors_red2blu_centered.rgb + # Span whole colormap (default false) + # span: + # Plot significance as stippling + sig: true + # Plot uncertainty as hatching + not_sig: true + # Alternative name for output plot + # (default diagnostic + varname + time_avg) + # pltname: + # Print number of models at top-right of the figure + model_nr: true + + + ### Mean Evaporation change in RCPs for multi model mean ################### + IAV_calc_evspsbl: + description: Calculate evaporation interannual variability + for stippling significance + themes: + - varmodes + realms: + - atmos + variables: + evspsbl: + preprocessor: preproc_map + project: CMIP5 + mip: Amon + exp: piControl + ensemble: r1i1p1 + additional_datasets: + - {dataset: bcc-csm1-1, start_year: 1, end_year: 500} + - {dataset: bcc-csm1-1-m, start_year: 1, end_year: 400} + - {dataset: BNU-ESM, start_year: 1450, end_year: 2008} + - {dataset: CanESM2, start_year: 2015, end_year: 3010} + - {dataset: CESM1-CAM5, start_year: 1, end_year: 319} + - {dataset: CNRM-CM5, start_year: 1850, end_year: 2699} + - {dataset: CSIRO-Mk3-6-0, start_year: 1, end_year: 500} + - {dataset: inmcm4, start_year: 1850, end_year: 2349} + - {dataset: IPSL-CM5A-LR, start_year: 1800, end_year: 2799} + - {dataset: IPSL-CM5A-MR, start_year: 1800, end_year: 2099} + - {dataset: IPSL-CM5B-LR, start_year: 1830, end_year: 2129} + - {dataset: MIROC-ESM, start_year: 1800, end_year: 2429} + - {dataset: MIROC-ESM-CHEM, start_year: 1846, end_year: 2100} + - {dataset: NorESM1-M, start_year: 700, end_year: 1200} + - {dataset: NorESM1-ME, start_year: 901, end_year: 1152} + + scripts: + ch12_calc_IAV_for_stippandhatch_evspsbl: &IAV_settings_evspsbl + script: ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: annualclim + + ### Optional attributes + # Calculate IAV from multi-model mean (mmm, default) + # or save individual models (each) + iavmode: mmm + # Length of period in years to calculate variability over + # (default total time period) + periodlength: 20. + + + evspsbl_change_mmm: + description: Mean evaporation change (annual mean or seasonal) + in multi-model mean with signficance + themes: + - phys + realms: + - atmos + variables: + evspsbl: + preprocessor: preproc_map + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + exp: historical + additional_datasets: + - {dataset: CanESM2, start_year: 1986, end_year: 2005} + - {dataset: CCSM4, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, start_year: 1986, end_year: 2005} + + - {dataset: CanESM2, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: CCSM4, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp26, start_year: 2081, end_year: 2100} + + - {dataset: CanESM2, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: CCSM4, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp45, start_year: 2081, end_year: 2100} + + - {dataset: CCSM4, exp: rcp60, start_year: 2081, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp60, start_year: 2081, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp60, start_year: 2081, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp60, start_year: 2081, end_year: 2100} + + - {dataset: CanESM2, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: CCSM4, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp85, start_year: 2081, end_year: 2100} + + scripts: + ch12_calc_map_diff_mmm_evspsbl: &evspsbl_map_settings + script: ipcc_ar5/ch12_calc_map_diff_mmm_stippandhatch.ncl + + ### Required attributes + ancestors: ['evspsbl', + 'IAV_calc_evspsbl/ch12_calc_IAV_for_stippandhatch_evspsbl'] + # List of scenarios to be included + scenarios: [rcp26, rcp45, rcp60, rcp85] + # Start years of periods to be included as list + periods: [2081] + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: annualclim + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["RCP2.6 2081-2100", "RCP4.5 2081-2100", + "RCP6.0 2081-2100", "RCP8.5 2081-2100"] + + ### Optional attributes + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + # seasons: [0, 2] + # Calculate IAV from multi-model mean (mmm, default) or + # save individual models (each) (if "each", make sure to have them + # in IAV_calc_evspsbl and set iavmode to "each" there too) + iavmode: mmm + # Show difference in percent (1, default 0) + # percent: 1 + # Plot units + plot_units: "mm d-1" + + ch12_plot_map_diff_mmm_stipp_evspsbl: + <<: *evspsbl_map_settings + script: ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['evspsbl', 'ch12_calc_map_diff_mmm_evspsbl'] + + ### Optional attributes + # Map projection (any valid ncl projection, default Robinson) + # projection: Robinson + # Contour levels for all difference plots + diff_levs: [-1., -0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8, 1.] + # Maximum number of plots in vertical + max_vert: 2 + # Maximum number of plots in horizontal + max_hori: 2 + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_colors_red2blu_centered.rgb + # Span whole colormap (default false) + # span: + # Plot significance as stippling + sig: true + # Plot uncertainty as hatching + not_sig: true + # Alternative name for output plot + # (default is diagnostic + varname + time_avg) + # pltname: + # Print number of models at top-right of the figure + model_nr: true + + + ### Change scaled by global average temperature change ##################### + tas_change_scaleT: + description: Multi-model mean temperature change + scaled by global T change map with significance. + # Caveat: method requires that the pattern be estimated at a time when the + # temperature change signal from CO2 increase has emerged, hence, in + # IPCC AR5 rcp2.6 FIO-ESM model was excluded because it did not show any + # warming by the end of the 21st century. + themes: + - phys + realms: + - atmos + variables: + tas: + preprocessor: preproc_map + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + exp: historical + additional_datasets: + - {dataset: CCSM4, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, start_year: 1986, end_year: 2005} + + - {dataset: GFDL-ESM2G, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp26, start_year: 2081, end_year: 2100} + + - {dataset: IPSL-CM5A-LR, exp: rcp26, start_year: 2181, end_year: 2200} + - {dataset: MPI-ESM-LR, exp: rcp26, start_year: 2181, end_year: 2200} + + - {dataset: GFDL-ESM2G, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2081, end_year: 2100} + + - {dataset: IPSL-CM5A-MR, exp: rcp45, start_year: 2181, end_year: 2200} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2181, end_year: 2200} + + - {dataset: GFDL-ESM2G, exp: rcp60, start_year: 2081, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp60, start_year: 2081, end_year: 2100} + - {dataset: IPSL-CM5A-MR, exp: rcp60, start_year: 2081, end_year: 2100} + + - {dataset: CCSM4, exp: rcp60, start_year: 2181, end_year: 2200} + - {dataset: CESM1-CAM5, exp: rcp60, start_year: 2181, end_year: 2200} + + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: IPSL-CM5A-MR, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-MR, exp: rcp85, start_year: 2081, end_year: 2100} + + - {dataset: CCSM4, exp: rcp85, start_year: 2181, end_year: 2200} + - {dataset: IPSL-CM5A-LR, exp: rcp85, start_year: 2181, end_year: 2200} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2181, end_year: 2200} + + scripts: + ch12_calc_map_diff_scaleT_tas: &tas_scale_settings + script: ipcc_ar5/ch12_calc_map_diff_scaleT_mmm_stipp.ncl + + ### Required attributes + # List of scenarios to be included + scenarios: [rcp26, rcp45, rcp60, rcp85] + # List with start years of periods to be included + periods: [2081, 2181] + # Time average ('opt' argument of time_operations.ncl, + # seasonalclim, annualclim) + time_avg: annualclim + # Labels on top of each panel (loop is seasons, scenarios, periods) + label: ["2081-2100", "2181-2200"] + + ### Optional attributes + # List with seasons index (required if time_avg "seasonalclim") + # (DJF:0, MAM:1, JJA:2, SON:3) + # seasons: [0, 2] + # Show difference in percent (1, default 0) + # percent: + + ch12_plot_map_diff_scaleT_tas: + <<: *tas_scale_settings + script: ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl + + ### Required attributes + ancestors: ['tas', 'ch12_calc_map_diff_scaleT_tas'] + diagscript: 'ch12_calc_map_diff_scaleT_mmm_stipp' + + ### Optional attributes + # Map projection (any valid ncl projection, default Robinson) + projection: Robinson + # Plot significance as stippling + sig: true + # Contour levels for all difference plots + diff_levs: [0, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2] + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_scaling.rgb + # Alternative name for output plot + # (default is diagnostic + varname + time_avg) + # pltname: + # Maximum number of plots in vertical + # max_vert: 2 + # Maximum number of plots in horizontal + # max_hori: 2 + + pr_change_scaleT: + description: Multi-model mean precipitation change scaled by + global tas change, map with significance + themes: + - phys + realms: + - atmos + variables: + tas: + preprocessor: preproc_map + project: CMIP5 + mip: Amon + ensemble: r1i1p1 + exp: historical + pr: + preprocessor: preproc_map + project: CMIP5 + mip: Amon + ensemble: r1i1p1 + exp: historical + additional_datasets: + - {dataset: CCSM4, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, start_year: 1986, end_year: 2005} + + - {dataset: GFDL-ESM2G, exp: rcp26, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp26, start_year: 2081, end_year: 2100} + + - {dataset: IPSL-CM5A-LR, exp: rcp26, start_year: 2181, end_year: 2200} + - {dataset: MPI-ESM-LR, exp: rcp26, start_year: 2181, end_year: 2200} + + - {dataset: GFDL-ESM2G, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp45, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2081, end_year: 2100} + + - {dataset: IPSL-CM5A-MR, exp: rcp45, start_year: 2181, end_year: 2200} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2181, end_year: 2200} + + - {dataset: GFDL-ESM2G, exp: rcp60, start_year: 2081, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp60, start_year: 2081, end_year: 2100} + - {dataset: IPSL-CM5A-MR, exp: rcp60, start_year: 2081, end_year: 2100} + + - {dataset: CCSM4, exp: rcp60, start_year: 2181, end_year: 2200} + - {dataset: CESM1-CAM5, exp: rcp60, start_year: 2181, end_year: 2200} + + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: IPSL-CM5A-LR, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: IPSL-CM5A-MR, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2081, end_year: 2100} + - {dataset: MPI-ESM-MR, exp: rcp85, start_year: 2081, end_year: 2100} + + - {dataset: CCSM4, exp: rcp85, start_year: 2181, end_year: 2200} + - {dataset: IPSL-CM5A-LR, exp: rcp85, start_year: 2181, end_year: 2200} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2181, end_year: 2200} + + scripts: + ch12_calc_map_diff_scaleT_pr: &pr_scale_settings + script: ipcc_ar5/ch12_calc_map_diff_scaleT_mmm_stipp.ncl + + # Required attributes: + # list with scenarios to be included + scenarios: [rcp26, rcp45, rcp60, rcp85] + # list with start years of periods to be included + periods: [2081, 2181] + # seasonal or annual means + time_avg: annualclim + # labels on top of each panel, loop to plot is seasons, periods + label: ["2081-2100", "2181-2200"] + + # Optional attributes: + # list with seasons index to be included if averagetime is + # "seasonalclim" (required), DJF:0, MAM:1, JJA:2, SON:3 + # seasons: [0, 2] + # difference in percent? percent = 1, default = 0 + percent: 1 + + ch12_plot_map_diff_scaleT_pr: + <<: *pr_scale_settings + script: ipcc_ar5/ch12_plot_map_diff_mmm_stipp.ncl + ancestors: ['pr', 'pr_change_scaleT/ch12_calc_map_diff_scaleT_pr'] + diagscript: 'ch12_calc_map_diff_scaleT_mmm_stipp' + + # Optional attributes: + # map projection, any valid ncl projection + projection: Robinson + # plot significance as stippling? + sig: true + # use explicit levels for contour plot + diff_levs: [-12, -9, -6, -3, 0, 3, 6, 9, 12] + # use alternative colormap + colormap: $diag_scripts/shared/plot/rgb/ipcc-precip-delta.rgb + # an alternative name for the output plot, + # default is diagnostic + varname + time_avg + # pltname: + # maximum number of plots in vertical + max_vert: 2 + # maximum number of plots in horizontal + max_hori: 2 + + ### Northern Hemisphere relative snow cover change timeseries ############## + snw_line: + description: Prepare snow area data for timeseries of multiple scenarios + incl. spread and then plot + themes: + - phys + realms: + - land + variables: + snw: + project: CMIP5 + mip: LImon + ensemble: r1i1p1 + exp: historical + sftlf: + project: CMIP5 + mip: fx + exp: historical + ensemble: r0i0p0 + sftgif: + project: CMIP5 + mip: fx + exp: historical + ensemble: r0i0p0 + + additional_datasets: + - {dataset: CanESM2, start_year: 1980, end_year: 2005} + - {dataset: CCSM4, start_year: 1980, end_year: 2005} + - {dataset: CESM1-CAM5, start_year: 1980, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, start_year: 1980, end_year: 2005} + - {dataset: GFDL-CM3, start_year: 1980, end_year: 2005} + - {dataset: MPI-ESM-LR, start_year: 1980, end_year: 2005} + - {dataset: MPI-ESM-MR, start_year: 1980, end_year: 2005} + - {dataset: NorESM1-M, start_year: 1980, end_year: 2005} + + - {dataset: CanESM2, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: CCSM4, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp26, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-MR, exp: rcp26, start_year: 2006, end_year: 2100} + + - {dataset: CanESM2, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: CCSM4, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-MR, exp: rcp45, start_year: 2006, end_year: 2100} + + - {dataset: CCSM4, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp60, start_year: 2006, end_year: 2100} + - {dataset: NorESM1-M, exp: rcp60, start_year: 2006, end_year: 2100} + + - {dataset: CanESM2, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CCSM4, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-MR, exp: rcp85, start_year: 2006, end_year: 2100} + + scripts: + ch12_calc_ts_snw_area_change: &ts_snw_settings + script: ipcc_ar5/ch12_snw_area_change_fig12-32.ncl + + ### Required attributes + # Plot style + styleset: CMIP5 + # List of scenarios to be included + scenarios: [rcp26, rcp45, rcp60, rcp85] + # Start years in time periods (e.g. start of historical runs and rcps) + syears: [1980, 2006] + # End years in time periods (e.g. end of historical runs and rcps) + eyears: [2005, 2100] + # Start year of reference period (e.g. 1986) + begin_ref_year: 1986 + # End year of reference period (e.g 2005) + end_ref_year: 2005 + # Months to be included in the analysis (e.g. for NH March + April) + months: MA + # Labels to use in legend depending on scenarios + label: [Historical, RCP2.6, RCP4.5, RCP6.0, RCP8.5] + + ### Optional attributes + # title in plot + title: "Relative snow cover extent change" + # y-axis title + yaxis: "(-)" + # Minimim value on y-axis + ymin: -0.4 + # Maximum value on y-axis + ymax: 0.2 + # Print number of models at top-right of the figure + model_nr: true + # Alternative colormap (path to .rgb file or ncl name) + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_tseries.rgb + # Minimum and maximum latitudes and longitudes if not global + ts_minlat: 0 + ts_maxlat: 90 + ts_minlon: 0 + ts_maxlon: 360 + + ch12_plot_ts_line_snw_area_change: + <<: *ts_snw_settings + script: ipcc_ar5/ch12_plot_ts_line_mean_spread.ncl + + ### Required attributes + ancestors: ['snw', 'ch12_calc_ts_snw_area_change'] + + # ********************************************************************** + # Collins et al. (2013) - IPCC AR5, chap. 12 + # similar to fig. 12.31a/d + # ********************************************************************** + # Emergent constraints for September Arctic sea ice extent + # ********************************************************************** + + fig12-31ad: + description: Emergent constraints for September Arctic sea ice extent + themes: + - seaIce + realms: + - seaIce + variables: + sic: + mip: OImon + project: CMIP5 + ensemble: r1i1p1 + reference_dataset: HadISST + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2, start_year: 1960, end_year: 2005} + areacello: + mip: fx + project: CMIP5 + ensemble: r0i0p0 + additional_datasets: + - {dataset: ACCESS1-0, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: ACCESS1-0, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: ACCESS1-3, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: ACCESS1-3, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: bcc-csm1-1, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: bcc-csm1-1, exp: rcp85, start_year: 2006, end_year: 2100} + #- {dataset: bcc-csm1-1-m, exp: historical, start_year: 1960, end_year: 2005} # areacello data missing on ESGF + #- {dataset: bcc-csm1-1-m, exp: rcp85, start_year: 2006, end_year: 2100} # areacello data missing on ESGF + - {dataset: CanESM2, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: CanESM2, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CCSM4, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: CCSM4, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: CESM1-CAM5, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CNRM-CM5, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: CNRM-CM5, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: FGOALS-g2, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: FGOALS-g2, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: GFDL-CM3, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: GFDL-CM3, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: GFDL-ESM2M, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: GFDL-ESM2M, exp: rcp85, start_year: 2006, end_year: 2100} + #- {dataset: inmcm4, exp: historical, start_year: 1960, end_year: 2005} # areacello data missing on ESGF + #- {dataset: inmcm4, exp: rcp85, start_year: 2006, end_year: 2100} # areacello data missing on ESGF + - {dataset: IPSL-CM5A-LR, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: IPSL-CM5A-LR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-MR, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: IPSL-CM5A-MR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5B-LR, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: IPSL-CM5B-LR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MIROC5, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: MIROC5, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MIROC-ESM, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: MIROC-ESM, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MIROC-ESM-CHEM, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-MR, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: MPI-ESM-MR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MRI-CGCM3, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: MRI-CGCM3, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MRI-ESM1, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: MRI-ESM1, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: NorESM1-M, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: NorESM1-M, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: NorESM1-ME, exp: historical, start_year: 1960, end_year: 2005} + - {dataset: NorESM1-ME, exp: rcp85, start_year: 2006, end_year: 2100} + scripts: + fig12-31ad: + script: seaice/seaice_ecs.ncl + # "Arctic" or "Antarctic"; entire hemisphere will be evaluated + region: "Arctic" + # A = annual mean, 3 = March, 9 = September + month: "9" + # Fill polar hole in data with sic = 1. + fill_pole_hole: true + hist_exp: "historical" + rcp_exp: "rcp85" + styleset: "CMIP5" + + # ********************************************************************** + # Collins et al. (2013) - IPCC AR5, chap. 12 + # similar to fig. 12.31e + # ********************************************************************** + # year of near disappearance of September Arctic sea ice + # ********************************************************************** + + fig12-31e: + description: year of near disappearance of September Arctic sea ice + themes: + - seaIce + realms: + - seaIce + variables: + sic: + mip: OImon + project: CMIP5 + exp: rcp85 + ensemble: r1i1p1 + start_year: 2006 + end_year: 2100 + areacello: + mip: fx + project: CMIP5 + exp: historical + ensemble: r0i0p0 + start_year: 2006 + end_year: 2100 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + #- {dataset: bcc-csm1-1-m} # areacello data missing on ESGF + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-CAM5} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + #- {dataset: inmcm4} # areacello data missing on ESGF + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + scripts: + fig12-31e: + script: seaice/seaice_yod.ncl + # "Arctic" or "Antarctic"; entire hemisphere will be evaluated + region: "Arctic" + # A = annual mean, 3 = March, 9 = September + month: "9" + # Fill polar hole in data with sic = 1. + fill_pole_hole: true + diff --git a/esmvaltool/recipes/recipe_combined_indices.yml b/esmvaltool/recipes/recipe_combined_indices.yml new file mode 100644 index 0000000000..a30fbcbe30 --- /dev/null +++ b/esmvaltool/recipes/recipe_combined_indices.yml @@ -0,0 +1,55 @@ +# ESMValTool +# recipe_combined_indices.yml +--- +documentation: + title: | + Indices and area weighted averages. + + description: | + Recipe for for computing seasonal means or running averages, + combining indices from multiple models and computing area averages + + authors: + - hunter_alasdair + - perez-zanon_nuria + - manubens_nicolau + + maintainer: + - unmaintained + + projects: + - c3s-magic + +datasets: + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1950, end_year: 2005} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1950, end_year: 2005} + + +preprocessors: + preprocessor1: + regrid: + target_grid: 1x1 + scheme: linear + +diagnostics: + combine_indices: + description: Combine indices and/or compute area weighted averages. + variables: + tos: + preprocessor: preprocessor1 + mip: Omon + scripts: + main: + script: magic_bsc/combined_indices.R + # Weights for combining indices - if not "equal" or "NULL", must be the same length as the number of indices + region: Nino3.4 # Nino3, Nino3.4, Nino4, NAO or SOI + + # Compute running mean? + running_mean: null + + # Compute seasons? + moninf: 12 # First month + monsup: 2 # Last month + + # Compute standarization? + standardized: true diff --git a/esmvaltool/recipes/recipe_combined_indices_wp6.yml b/esmvaltool/recipes/recipe_combined_indices_wp6.yml deleted file mode 100644 index 9ad6675602..0000000000 --- a/esmvaltool/recipes/recipe_combined_indices_wp6.yml +++ /dev/null @@ -1,55 +0,0 @@ -# ESMValTool -# recipe_combined_indices_wp6.yml ---- -documentation: - description: | - Recipe for for computing seasonal means or running avergaes, - combining indices from multiple models and computing area averages - - authors: - - hunt_al - - manu_ni - - projects: - - c3s-magic - -datasets: - - {dataset: MPI-ESM-MR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1950, end_year: 2005} - #- {dataset: bcc-csm1-1, type: exp, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - #- {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} - -preprocessors: - preproc: - regrid: - target_grid: MPI-ESM-MR - scheme: linear - mask_fillvalues: - threshold_fraction: 0.95 - extract_region: - end_latitude: 70 - start_latitude: 20 - end_longitude: 300 - start_longitude: 200 - -diagnostics: - combine_indices: - description: Combine indices and/or compute area weighted averages. - variables: - pr: - preprocessor: preproc - mip: Amon - scripts: - main: - script: magic_bsc/combined_indices_wp6.r - ## Weights for combining indices - if not "equal" or "NULL", must be the same length as the number of indices - weights: "equal" - - # Compute running mean? - running_mean: 5 #3 - - #Or compute seasons? (NOTE: cant compute both running means and seasons): - moninf: 1 # First month - monsup: 3 # Last month - - # Multi-year average? - multi_year_average: false diff --git a/esmvaltool/recipes/recipe_consecdrydays.yml b/esmvaltool/recipes/recipe_consecdrydays.yml index 6025788173..0de3357be5 100644 --- a/esmvaltool/recipes/recipe_consecdrydays.yml +++ b/esmvaltool/recipes/recipe_consecdrydays.yml @@ -1,9 +1,23 @@ +# ESMValTool +# recipe_consecdrydays --- documentation: - description: 'Consecutive dry days' - authors: ['berg_pe'] - projects: ['c3s-magic'] - references: ['acknow_project'] + title: Consecutive dry days + + description: | + Recipe to calculate consecutive dry days + + authors: + - berg_peter + + projects: + - c3s-magic + + maintainer: + - unmaintained + + references: + - acknow_project datasets: - {dataset: bcc-csm1-1-m, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2002} @@ -20,5 +34,5 @@ diagnostics: dryindex: 'cdd' plim: 1 frlim: 5 - quickplot: - plot_type: pcolormesh + plot: + cmap: rain diff --git a/esmvaltool/recipes/recipe_cox18nature.yml b/esmvaltool/recipes/recipe_cox18nature.yml index e9fe3734a2..9ec668425b 100644 --- a/esmvaltool/recipes/recipe_cox18nature.yml +++ b/esmvaltool/recipes/recipe_cox18nature.yml @@ -1,16 +1,21 @@ +# ESMValTool # recipe_cox18nature.yml --- documentation: + title: > + Emergent constraint on equilibrium climate sensitivity from global + temperature variability - description: | - Reproducing the emergent constraint for ECS based on global temperature - variability (Cox et al., 2018). + description: > + This recipe reproduces the emergent constraint for the equilibrium climate + sensitivity (ECS) based on global temperature variability by Cox et al. + (2018). authors: - - schl_ma + - schlund_manuel maintainer: - - schl_ma + - schlund_manuel references: - cox18nature @@ -22,9 +27,8 @@ documentation: preprocessors: spatial_mean: - average_region: - coord1: latitude - coord2: longitude + area_statistics: + operator: mean diagnostics: @@ -72,10 +76,9 @@ diagnostics: scripts: psi: script: climate_metrics/psi.py - plot_ecs_regression: true diag_cox18nature: - description: Plot emergent relationship between ECS ans psi. + description: Plot emergent relationship between ECS and psi. scripts: cox18nature: script: emergent_constraints/cox18nature.py diff --git a/esmvaltool/recipes/recipe_cvdp.yml b/esmvaltool/recipes/recipe_cvdp.yml index 82d1fdf950..02f4ef4e61 100644 --- a/esmvaltool/recipes/recipe_cvdp.yml +++ b/esmvaltool/recipes/recipe_cvdp.yml @@ -2,15 +2,17 @@ # recipe_cvdp.yml --- documentation: + title: Climate Variability Diagnostics Package + description: | Recipe for executing the NCAR CVDP package in the ESMValTool framework. authors: - - phil_ad - - broe_bj + - phillips_adam + - broetz_bjoern maintainer: - - broe_bj + - broetz_bjoern references: - phillips14eos @@ -20,18 +22,23 @@ documentation: - c3s-magic datasets: - - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1850, end_year: 1855} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r2i1p1, start_year: 1850, end_year: 1855} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1850, end_year: 1855} diagnostics: diagnostic1: description: Run the NCAR CVDPackage. variables: + ts: + mip: Amon tas: - #reference_dataset: bcc-csm1-1 + mip: Amon pr: - #reference_dataset: MPI-ESM-LR + mip: Amon psl: + mip: Amon scripts: cvdp: script: cvdp/cvdp_wrapper.py diff --git a/esmvaltool/recipes/recipe_deangelis15nat.yml b/esmvaltool/recipes/recipe_deangelis15nat.yml new file mode 100644 index 0000000000..e1d0373c66 --- /dev/null +++ b/esmvaltool/recipes/recipe_deangelis15nat.yml @@ -0,0 +1,329 @@ +# ESMValTool +# recipe_deangelis15nat.yml +--- +documentation: + title: "Evaluate water vapor short wave radiance absorption schemes of ESMs with the observations" + description: | + Recipe for reproducing the plots in DeAngelis et al., 2015 Nature + + authors: + - weigel_katja + + maintainer: + - weigel_katja + + references: + - deangelis15nat + + projects: + - eval4cmip + +preprocessors: + spatial_mean: + area_statistics: + operator: mean + + tropical_ocean: + mask_landsea: + mask_out: land + regrid: + target_grid: 2.5x2.5 + scheme: linear + extract_region: + start_latitude: -30 + end_latitude: 30 + start_longitude: 0 + end_longitude: 360 + + tropical: + regrid: + target_grid: 2.5x2.5 + scheme: linear + extract_region: + start_latitude: -30 + end_latitude: 30 + start_longitude: 0 + end_longitude: 360 + + +diagnostics: + + deangelisf1b: + description: Bar plot similar to (DeAngelis Fig. 1b). + # Global multi-model experiment mean for flux variables, e.g. rlnst, rsnst, lvp, hfss + variables: + rlnst: &spatial_mean_cmip5_r1i1p1_amon_t2ms + preprocessor: spatial_mean + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + field: T2Ms + derive: true + force_derivation: false + rsnst: + <<: *spatial_mean_cmip5_r1i1p1_amon_t2ms + lvp: + <<: *spatial_mean_cmip5_r1i1p1_amon_t2ms + hfss: + <<: *spatial_mean_cmip5_r1i1p1_amon_t2ms + derive: false + additional_datasets: + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, + end_year: 150} + - {dataset: GFDL-CM3, exp: rcp85, start_year: 2091, + end_year: 2100} + - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 141, end_year: 150} + - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} + - {dataset: ACCESS1-0, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: ACCESS1-0, exp: abrupt4xCO2, start_year: 300, end_year: 449} + - {dataset: ACCESS1-3, exp: piControl, start_year: 250, end_year: 399} + - {dataset: ACCESS1-3, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: ACCESS1-3, exp: abrupt4xCO2, start_year: 250, end_year: 399} + # Strange values in lvp + # - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} + # - {dataset: bcc-csm1-1, exp: rcp85, start_year: 2091, end_year: 2100} + # - {dataset: bcc-csm1-1, exp: abrupt4xCO2, start_year: 300, end_year: 309} + # - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} + # - {dataset: bcc-csm1-1-m, exp: rcp85, start_year: 2091, end_year: 2100} + # - {dataset: bcc-csm1-1-m, exp: abrupt4xCO2, start_year: 380, + # end_year: 389} + - {dataset: CanESM2, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: CanESM2, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: CanESM2, exp: abrupt4xCO2, start_year: 1990, end_year: 1999} + # data not available completely on DKRZ + # - {dataset: CCSM4, exp: piControl, start_year: 800, end_year: 949} + # - {dataset: CCSM4, exp: rcp85, start_year: 2091, end_year: 2100} + # - {dataset: CCSM4, exp: abrupt4xCO2, start_year: 1990, end_year: 1999} + - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: CNRM-CM5, exp: abrupt4xCO2, start_year: 1990, end_year: 1999} + - {dataset: GFDL-ESM2G, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: abrupt4xCO2, start_year: 141, end_year: 150} + - {dataset: GFDL-ESM2M, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: GFDL-ESM2M, exp: abrupt4xCO2, start_year: 141, end_year: 150} + - {dataset: GISS-E2-R, exp: piControl, start_year: 3331, end_year: 3480} + - {dataset: GISS-E2-R, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: GISS-E2-R, exp: abrupt4xCO2, start_year: 1990, + end_year: 1999} + - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} + - {dataset: inmcm4, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: inmcm4, exp: abrupt4xCO2, start_year: 2230, end_year: 2239} + # IPSL-CM5A-MR only 140 years available + - {dataset: IPSL-CM5A-MR, exp: piControl, start_year: 1850, + end_year: 1989} + - {dataset: IPSL-CM5A-MR, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: IPSL-CM5A-MR, exp: abrupt4xCO2, start_year: 1980, + end_year: 1989} + - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, + end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: IPSL-CM5B-LR, exp: abrupt4xCO2, start_year: 1990, + end_year: 1999} + - {dataset: MIROC-ESM, exp: piControl, start_year: 1800, end_year: 1949} + - {dataset: MIROC-ESM, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: MIROC-ESM, exp: abrupt4xCO2, start_year: 141, end_year: 150} + - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} + - {dataset: MIROC5, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: MIROC5, exp: abrupt4xCO2, start_year: 2240, end_year: 2249} + - {dataset: MPI-ESM-LR, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: abrupt4xCO2, start_year: 1990, + end_year: 1999} + - {dataset: MPI-ESM-MR, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: MPI-ESM-MR, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: MPI-ESM-MR, exp: abrupt4xCO2, start_year: 1990, + end_year: 1999} + - {dataset: MRI-CGCM3, exp: piControl, start_year: 1851, end_year: 2000} + - {dataset: MRI-CGCM3, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: MRI-CGCM3, exp: abrupt4xCO2, start_year: 1991, + end_year: 2000} + - {dataset: NorESM1-M, exp: piControl, start_year: 700, end_year: 849} + - {dataset: NorESM1-M, exp: rcp85, start_year: 2091, end_year: 2100} + - {dataset: NorESM1-M, exp: abrupt4xCO2, start_year: 141, end_year: 150} + + scripts: + deangelisf1b: + script: deangelis15nat/deangelisf1b.py + + + deangelisf2ext: + description: Plots scatter plot and linear regression for LvP/dtas. + # dLWC/dtas, dSWA/dtas, and dSH/dtas (DeAngelis extended data Fig. 1, + # one plot for each model) and temperature-mediated responses of + # LvP (i.e. LvdP/dT) and SWA (i.e., dSWA/dT) for all-sky and clear-sky + # conditions (DeAngelis Fig. 2a) and Scatterplot of the model-produced + # LvdP/dT versus clear-sky dSWA/dT conditions (DeAngelis Fig. 2a). + + variables: + rlnst: &spatial_mean2_cmip5_r1i1p1_amon_t2ms + preprocessor: spatial_mean + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + derive: true + force_derivation: false + rsnst: + <<: *spatial_mean2_cmip5_r1i1p1_amon_t2ms + rlnstcs: + <<: *spatial_mean2_cmip5_r1i1p1_amon_t2ms + rsnstcs: + <<: *spatial_mean2_cmip5_r1i1p1_amon_t2ms + lvp: + <<: *spatial_mean2_cmip5_r1i1p1_amon_t2ms + hfss: + <<: *spatial_mean2_cmip5_r1i1p1_amon_t2ms + derive: false + tas: + <<: *spatial_mean2_cmip5_r1i1p1_amon_t2ms + derive: false + additional_datasets: + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} + - {dataset: ACCESS1-0, exp: abrupt4xCO2, start_year: 300, end_year: 449} + - {dataset: ACCESS1-3, exp: piControl, start_year: 250, end_year: 399} + - {dataset: ACCESS1-3, exp: abrupt4xCO2, start_year: 250, end_year: 399} + # strange values in lvp + # - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} + # - {dataset: bcc-csm1-1, exp: abrupt4xCO2, start_year: 160, end_year: 309} + # - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} + # - {dataset: bcc-csm1-1-m, exp: abrupt4xCO2, start_year: 240, + # end_year: 389} + - {dataset: CanESM2, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: CanESM2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # data not available completely on DKRZ + # - {dataset: CCSM4, exp: piControl, start_year: 800, end_year: 949} + # - {dataset: CCSM4, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: GFDL-ESM2G, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2G, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GISS-E2-R, exp: piControl, start_year: 3331, end_year: 3480} + - {dataset: GISS-E2-R, exp: abrupt4xCO2, start_year: 1850, + end_year: 1999} + - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} + - {dataset: inmcm4, exp: abrupt4xCO2, start_year: 2090, end_year: 2239} + # IPSL-CM5A-MR only 140 years available + - {dataset: IPSL-CM5A-MR, exp: piControl, start_year: 1850, + end_year: 1989} + - {dataset: IPSL-CM5A-MR, exp: abrupt4xCO2, start_year: 1850, + end_year: 1989} + - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, + end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: abrupt4xCO2, start_year: 1850, + end_year: 1999} + - {dataset: MIROC-ESM, exp: piControl, start_year: 1800, end_year: 1949} + - {dataset: MIROC-ESM, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} + - {dataset: MIROC5, exp: abrupt4xCO2, start_year: 2100, end_year: 2249} + - {dataset: MPI-ESM-LR, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: MPI-ESM-LR, exp: abrupt4xCO2, start_year: 1850, + end_year: 1999} + - {dataset: MPI-ESM-MR, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: MPI-ESM-MR, exp: abrupt4xCO2, start_year: 1850, + end_year: 1999} + - {dataset: MRI-CGCM3, exp: piControl, start_year: 1851, + end_year: 2000} + - {dataset: MRI-CGCM3, exp: abrupt4xCO2, start_year: 1851, + end_year: 2000} + - {dataset: NorESM1-M, exp: piControl, start_year: 700, end_year: 849} + - {dataset: NorESM1-M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + + scripts: + deangelisf2ext: + script: deangelis15nat/deangelisf2ext.py + + + deangelisf3f4: + description: Plot fig 3 and fig 4 of DeAngelis et al., 2015 + variables: + rsnstcs: &spatial_mean3_cmip5_r1i1p1_amon_t2ms + preprocessor: spatial_mean + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + derive: true + force_derivation: false + rsnstcsnorm: &tropical_cmip5_r1i1p1_amon_t2ms + preprocessor: tropical_ocean + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + derive: true + force_derivation: false + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, type: satellite, + level: L3B, version: Ed2-8, start_year: 2001, + end_year: 2009, tier: 1} + tas: + <<: *spatial_mean3_cmip5_r1i1p1_amon_t2ms + derive: false + prw: + <<: *tropical_cmip5_r1i1p1_amon_t2ms + derive: false + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, + start_year: 2001, end_year: 2009, tier: 3} + - {dataset: SSMI, project: obs4MIPs, type: satellite, level: L3, + version: RSSv07r00, start_year: 2001, end_year: 2009, tier: 1} + additional_datasets: + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} + - {dataset: ACCESS1-0, exp: abrupt4xCO2, start_year: 300, end_year: 449} + - {dataset: ACCESS1-3, exp: piControl, start_year: 250, end_year: 399} + - {dataset: ACCESS1-3, exp: abrupt4xCO2, start_year: 250, end_year: 399} + # strange values in lvp + # - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} + # - {dataset: bcc-csm1-1, exp: abrupt4xCO2, start_year: 160, end_year: 309} + # - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} + # - {dataset: bcc-csm1-1-m, exp: abrupt4xCO2, start_year: 240, + # end_year: 389} + - {dataset: CanESM2, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: CanESM2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # data not available completely on DKRZ + # - {dataset: CCSM4, exp: piControl, start_year: 800, end_year: 949} + # - {dataset: CCSM4, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: GFDL-ESM2G, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2G, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GISS-E2-R, exp: piControl, start_year: 3331, end_year: 3480} + - {dataset: GISS-E2-R, exp: abrupt4xCO2, start_year: 1850, + end_year: 1999} + - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} + - {dataset: inmcm4, exp: abrupt4xCO2, start_year: 2090, end_year: 2239} + # IPSL-CM5A-MR only 140 years available + - {dataset: IPSL-CM5A-MR, exp: piControl, start_year: 1850, + end_year: 1989} + - {dataset: IPSL-CM5A-MR, exp: abrupt4xCO2, start_year: 1850, + end_year: 1989} + - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, + end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: abrupt4xCO2, start_year: 1850, + end_year: 1999} + - {dataset: MIROC-ESM, exp: piControl, start_year: 1800, end_year: 1949} + - {dataset: MIROC-ESM, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} + - {dataset: MIROC5, exp: abrupt4xCO2, start_year: 2100, end_year: 2249} + - {dataset: MPI-ESM-LR, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: MPI-ESM-LR, exp: abrupt4xCO2, start_year: 1850, + end_year: 1999} + - {dataset: MPI-ESM-MR, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: MPI-ESM-MR, exp: abrupt4xCO2, start_year: 1850, + end_year: 1999} + - {dataset: MRI-CGCM3, exp: piControl, start_year: 1851, end_year: 2000} + - {dataset: MRI-CGCM3, exp: abrupt4xCO2, start_year: 1851, + end_year: 2000} + - {dataset: NorESM1-M, exp: piControl, start_year: 700, end_year: 849} + - {dataset: NorESM1-M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + + scripts: + deangelisf3f4: + script: deangelis15nat/deangelisf3f4.py diff --git a/esmvaltool/recipes/recipe_diurnal_temperature_index.yml b/esmvaltool/recipes/recipe_diurnal_temperature_index.yml new file mode 100644 index 0000000000..038b71cad5 --- /dev/null +++ b/esmvaltool/recipes/recipe_diurnal_temperature_index.yml @@ -0,0 +1,60 @@ +# ESMValTool +# recipe_diurnal_temperature_index.yml +--- +documentation: + title: | + Diurnal temperature indicator (DTR). + + description: | + Tool to compute the diurnal temperature indicator. + + authors: + - hunter_alasdair + - perez-zanon_nuria + - manubens_nicolau + - caron_louis-philippe + + maintainer: + - unmaintained + + projects: + - c3s-magic + + +datasets: + # - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1961, end_year: 1990} + # - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, exp: rcp85, + # ensemble: r1i1p1, start_year: 2020, end_year: 2040} + # - {dataset: bcc-csm1-1, type: exp, project: CMIP5, + # exp: historical, ensemble: r1i1p1, start_year: 1971, end_year: 2000} + # - {dataset: bcc-csm1-1, type: exp, project: CMIP5, + # exp: rcp45, ensemble: r1i1p1, start_year: 2040, end_year: 2060} + - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: day, exp: historical, + ensemble: r1i1p1, start_year: 1961, end_year: 1990} + - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: day, exp: rcp26, + ensemble: r1i1p1, start_year: 2030, end_year: 2080} + + +preprocessors: + preproc: + extract_region: + start_longitude: -10 + end_longitude: 40 + start_latitude: 27 + end_latitude: 70 + mask_landsea: + mask_out: sea +diagnostics: + diurnal_temperature_indicator: + description: Calculate the diurnal temperature indicator. + variables: + tasmax: + preprocessor: preproc + mip: day + tasmin: + preprocessor: preproc + mip: day + scripts: + main: + script: magic_bsc/diurnal_temp_index.R diff --git a/esmvaltool/recipes/recipe_diurnal_temperature_index_wp7.yml b/esmvaltool/recipes/recipe_diurnal_temperature_index_wp7.yml deleted file mode 100644 index 031def56c5..0000000000 --- a/esmvaltool/recipes/recipe_diurnal_temperature_index_wp7.yml +++ /dev/null @@ -1,52 +0,0 @@ -# ESMValTool -# recipe_diurnal_temperature_index_wp7.yml ---- -documentation: - description: | - Tool to compute the diurnal temperature indicator. - - authors: - - hunt_al - - pere_nu - - manu_ni - - caro_lo - - projects: - - c3s-magic - - -datasets: - # - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1961, end_year: 1990} - # - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, mip: day, exp: rcp85, ensemble: r1i1p1, start_year: 2020, end_year: 2040} - # - {dataset: bcc-csm1-1, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1971, end_year: 2000} - # - {dataset: bcc-csm1-1, type: exp, project: CMIP5, mip: day, exp: rcp45, ensemble: r1i1p1, start_year: 2040, end_year: 2060} - - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1961, end_year: 1990} - - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: day, exp: rcp26, ensemble: r1i1p1, start_year: 2030, end_year: 2080} - - -preprocessors: - preproc: - regrid: - target_grid: MPI-ESM-MR - scheme: linear - mask_fillvalues: - threshold_fraction: 0.95 - extract_region: - start_longitude: -10 - end_longitude: 40 - start_latitude: 27 - end_latitude: 70 - -diagnostics: - diurnal_temperature_indicator: - description: Calculate the diurnal temperature indicator. - variables: - tasmax: - preprocessor: preproc - mip: day - tasmin: - preprocessor: preproc - mip: day - scripts: - main: - script: magic_bsc/diurnal_temp_index.r diff --git a/esmvaltool/recipes/recipe_eady_growth_rate.yml b/esmvaltool/recipes/recipe_eady_growth_rate.yml new file mode 100644 index 0000000000..b0eea7f708 --- /dev/null +++ b/esmvaltool/recipes/recipe_eady_growth_rate.yml @@ -0,0 +1,80 @@ +# ESMValTool +--- +documentation: + title: | + Annual and seasonal means of the maximum Eady Growth Rate (EGR). + + description: | + Recipe to compute the annual mean or the seasonal mean of the maximum Eady Growth Rate + (EGR, Brian J Hoskins and Paul J Valdes. On the existence of storm-tracks. + Journal of the atmospheric sciences, 47(15):1854–1864, 1990.). + The output produces netcdf files for each model. In the case of the seasonal means, a plot is + produced for each specified level showing the EGR values over the North-Atlantic region. + + + authors: + - sanchez-gomez_emilia + - moreno-chamarro_eduardo + + maintainer: + - loosveldt-tomas_saskia + + references: + - morenochamarro2021 + + projects: + - primavera + +datasets: + - {dataset: HadGEM3-GC31-LM, project: CMIP6, exp: highresSST-present, + ensemble: r1i1p1f1, mip: Amon, grid: gn, start_year: 1950, end_year: 2014} + - {dataset: HadGEM3-GC31-MM, project: CMIP6, exp: highresSST-present, + ensemble: r1i1p1f1, mip: Amon, grid: gn, start_year: 1950, end_year: 2014} + + +preprocessors: + summer: + extract_season: + season: 'JJA' + winter: + extract_season: + season: 'DJF' + +diagnostics: + annual_egr: + variables: + ta: + zg: + ua: + scripts: + annual_eady_growth_rate: + script: primavera/eady_growth_rate/eady_growth_rate.py + time_statistic: 'annual_mean' + + + summer_egr: + variables: + ta: + preprocessor: summer + zg: + preprocessor: summer + ua: + preprocessor: summer + scripts: + summer_eady_growth_rate: + script: primavera/eady_growth_rate/eady_growth_rate.py + time_statistic: 'seasonal_mean' + + winter_egr: + variables: + ta: + preprocessor: winter + zg: + preprocessor: winter + ua: + preprocessor: winter + scripts: + winter_eady_growth_rate: + script: primavera/eady_growth_rate/eady_growth_rate.py + time_statistic: 'seasonal_mean' + plot_levels: [70000] diff --git a/esmvaltool/recipes/recipe_ecs.yml b/esmvaltool/recipes/recipe_ecs.yml index fec0f704e7..02f284d295 100644 --- a/esmvaltool/recipes/recipe_ecs.yml +++ b/esmvaltool/recipes/recipe_ecs.yml @@ -1,15 +1,22 @@ +# ESMValTool # recipe_ecs.yml --- documentation: + title: > + Equilibrium Climate Sensitivity (ECS) - description: | - Calculate ECS using linear regression following Andrews et al. (2012). + description: > + Calculate Equilibrium Climate Sensitivity (ECS) using linear regression + following Gregory et al. (2004). authors: - - schl_ma + - schlund_manuel + + maintainer: + - schlund_manuel references: - - andrews12grl + - gregory04grl projects: - crescendo @@ -18,9 +25,168 @@ documentation: preprocessors: spatial_mean: - average_region: - coord1: latitude - coord2: longitude + area_statistics: + operator: mean + + +CMIP5_RTNT: &cmip5_rtnt + # Models with missing data (on all ESGF nodes) + # EC-EARTH (no rsut) + - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} + - {dataset: ACCESS1-0, exp: abrupt4xCO2, start_year: 300, end_year: 449} + - {dataset: ACCESS1-3, exp: piControl, start_year: 250, end_year: 399} + - {dataset: ACCESS1-3, exp: abrupt4xCO2, start_year: 250, end_year: 399} + - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1, exp: abrupt4xCO2, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} + - {dataset: bcc-csm1-1-m, exp: abrupt4xCO2, start_year: 240, end_year: 389} + - {dataset: BNU-ESM, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: BNU-ESM, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CanESM2, exp: piControl, start_year: 2321, end_year: 2470} + - {dataset: CanESM2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Wrong start year for piControl? (branch_time = 2.) + - {dataset: CCSM4, exp: piControl, start_year: 250, end_year: 399} + - {dataset: CCSM4, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Only 140 years available + - {dataset: CNRM-CM5-2, exp: piControl, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM5-2, exp: abrupt4xCO2, start_year: 1850, end_year: 1989} + - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 104, end_year: 253} + - {dataset: CSIRO-Mk3-6-0, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: FGOALS-g2, exp: piControl, start_year: 490, end_year: 639} + - {dataset: FGOALS-g2, exp: abrupt4xCO2, start_year: 490, end_year: 639} + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2G, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2G, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GISS-E2-H, exp: piControl, start_year: 2660, end_year: 2809} + - {dataset: GISS-E2-H, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-R, exp: piControl, start_year: 4200, end_year: 4349} + - {dataset: GISS-E2-R, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Experiments start at 1859-12-01 + - {dataset: HadGEM2-ES, exp: piControl, start_year: 1860, end_year: 2009} + - {dataset: HadGEM2-ES, exp: abrupt4xCO2, start_year: 1860, end_year: 2009} + - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} + - {dataset: inmcm4, exp: abrupt4xCO2, start_year: 2090, end_year: 2239} + # Only 140 years available + - {dataset: IPSL-CM5A-MR, exp: piControl, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5A-MR, exp: abrupt4xCO2, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} + - {dataset: MIROC5, exp: abrupt4xCO2, start_year: 2100, end_year: 2249} + - {dataset: MIROC-ESM, exp: piControl, start_year: 1880, end_year: 2029} + - {dataset: MIROC-ESM, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: MPI-ESM-LR, exp: piControl, start_year: 1880, end_year: 2029} + - {dataset: MPI-ESM-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-MR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-MR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-P, exp: piControl, start_year: 1866, end_year: 2015} + - {dataset: MPI-ESM-P, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MRI-CGCM3, exp: piControl, start_year: 1891, end_year: 2040} + - {dataset: MRI-CGCM3, exp: abrupt4xCO2, start_year: 1851, end_year: 2000} + - {dataset: NorESM1-M, exp: piControl, start_year: 700, end_year: 849} + - {dataset: NorESM1-M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + +CMIP6_RTNT: &cmip6_rtnt + - {dataset: ACCESS-CM2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 950, end_year: 1099, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-CM2, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 950, end_year: 1099, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 101, end_year: 250} + - {dataset: ACCESS-ESM1-5, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 101, end_year: 250} + - {dataset: AWI-CM-1-1-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 2650, end_year: 2799} + - {dataset: AWI-CM-1-1-MR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-CSM2-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-CSM2-MR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-ESM1, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-ESM1, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CAMS-CSM1-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3179} + - {dataset: CAMS-CSM1-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3179} + - {dataset: CanESM5, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 5201, end_year: 5350} + - {dataset: CanESM5, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CESM2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: CESM2, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: CESM2-FV2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 321, end_year: 470, institute: NCAR} + - {dataset: CESM2-FV2, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CESM2-WACCM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CESM2-WACCM, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CESM2-WACCM-FV2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 301, end_year: 450, institute: NCAR} + - {dataset: CESM2-WACCM-FV2, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CMCC-CM2-SR5, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CMCC-CM2-SR5, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1-HR, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1-HR, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-ESM2-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-ESM2-1, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: E3SM-1-0, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 101, end_year: 250} + - {dataset: E3SM-1-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1, end_year: 150} + - {dataset: EC-Earth3-Veg, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: EC-Earth3-Veg, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + # parent_time_units messed up + - {dataset: FGOALS-f3-L, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 600, end_year: 749} + - {dataset: FGOALS-f3-L, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: FGOALS-g3, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 463, end_year: 612} + - {dataset: FGOALS-g3, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 463, end_year: 612} + - {dataset: GISS-E2-1-G, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 4150, end_year: 4299} + - {dataset: GISS-E2-1-G, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-1-H, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3180, end_year: 3329} + - {dataset: GISS-E2-1-H, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-LL, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-LL, exp: abrupt-4xCO2, ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-MM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-MM, exp: abrupt-4xCO2, ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: INM-CM5-0, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 2099, end_year: 2248} + - {dataset: INM-CM5-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 1999} + - {dataset: INM-CM4-8, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 1947, end_year: 2096} + - {dataset: INM-CM4-8, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM6A-LR, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1870, end_year: 2019} + - {dataset: IPSL-CM6A-LR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: KACE-1-0-G, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 2150, end_year: 2299} + - {dataset: KACE-1-0-G, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: MIROC6, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + - {dataset: MIROC6, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + - {dataset: MIROC-ES2L, exp: piControl, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MIROC-ES2L, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-1-2-HAM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1950, end_year: 2099} + - {dataset: MPI-ESM-1-2-HAM, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-HR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-HR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-LR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-LR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MRI-ESM2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MRI-ESM2-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + # parent_time_units not correct + - {dataset: NESM3, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 550, end_year: 699} + - {dataset: NESM3, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: NorCPM1, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 251, end_year: 400, institute: NCC} + - {dataset: NorCPM1, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCC} + - {dataset: NorESM2-MM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1201, end_year: 1350} + - {dataset: NorESM2-MM, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + # Issue #286 (manual copying was necessary) + - {dataset: SAM0-UNICON, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 274, end_year: 423} + - {dataset: SAM0-UNICON, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + # Wrong start year for piControl (must be 1201) + - {dataset: TaiESM1, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 201, end_year: 350} + - {dataset: TaiESM1, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + +CMIP6_RTMT: &cmip6_rtmt + # branch_time_in_child weird + - {dataset: MCM-UA-1-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: MCM-UA-1-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + +ECS_SCRIPT: &ecs_script + script: climate_metrics/ecs.py + calculate_mmm: false +SCATTERPLOT: &scatterplot_script + script: climate_metrics/create_scatterplot.py + pattern: 'ecs.nc' + seaborn_settings: + style: ticks + y_range: [0.0, 6.0] diagnostics: @@ -28,68 +194,65 @@ diagnostics: cmip5: description: Calculate ECS for all available CMIP5 models. variables: - tas: &variable_settings + tas_rtnt: &variable_settings_cmip5 + short_name: tas preprocessor: spatial_mean project: CMIP5 ensemble: r1i1p1 mip: Amon - rtmt: - <<: *variable_settings - additional_datasets: - - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} - - {dataset: bcc-csm1-1, exp: abrupt4xCO2, start_year: 160, end_year: 309} - - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} - - {dataset: bcc-csm1-1-m, exp: abrupt4xCO2, start_year: 240, end_year: 389} - - {dataset: CanESM2, exp: piControl, start_year: 2015, end_year: 2164} - - {dataset: CanESM2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: CCSM4, exp: piControl, start_year: 800, end_year: 949} - - {dataset: CCSM4, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} - - {dataset: CNRM-CM5, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 1, end_year: 150} - - {dataset: CSIRO-Mk3-6-0, exp: abrupt4xCO2, start_year: 1, end_year: 150} - - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} - - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 1, end_year: 150} - - {dataset: GISS-E2-H, exp: piControl, start_year: 1200, end_year: 1349} - - {dataset: GISS-E2-H, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: GISS-E2-R, exp: piControl, start_year: 3331, end_year: 3480} - - {dataset: GISS-E2-R, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} - - {dataset: inmcm4, exp: abrupt4xCO2, start_year: 2090, end_year: 2239} - - {dataset: IPSL-CM5A-LR, exp: piControl, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM5A-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM5B-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} - - {dataset: MIROC5, exp: abrupt4xCO2, start_year: 2100, end_year: 2249} - - {dataset: MIROC-ESM, exp: piControl, start_year: 1800, end_year: 1949} - - {dataset: MIROC-ESM, exp: abrupt4xCO2, start_year: 1, end_year: 150} - - {dataset: MPI-ESM-LR, exp: piControl, start_year: 2015, end_year: 2164} - - {dataset: MPI-ESM-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - # Day is out of range for month (wait for iris > 2.0) - # - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} - # - {dataset: ACCESS1-0, exp: abrupt4xCO2, start_year: 300, end_year: 449} + additional_datasets: *cmip5_rtnt + rtnt: + <<: *variable_settings_cmip5 + short_name: rtnt + derive: true + additional_datasets: *cmip5_rtnt scripts: - ecs: &ecs_script - script: climate_metrics/ecs.py - plot_ecs_regression: true + ecs: + <<: *ecs_script + scatterplot: + <<: *scatterplot_script + ancestors: ['cmip5/ecs'] + dataset_style: cmip5 cmip6: description: Calculate ECS for all available CMIP6 models. variables: - tas: - <<: *variable_settings + tas_rtnt: &variable_settings_cmip6 + short_name: tas + preprocessor: spatial_mean project: CMIP6 + mip: Amon + additional_datasets: *cmip6_rtnt + tas_rtmt: + <<: *variable_settings_cmip6 + additional_datasets: *cmip6_rtmt + rtnt: + <<: *variable_settings_cmip6 + short_name: rtnt + derive: true + additional_datasets: *cmip6_rtnt rtmt: - <<: *variable_settings - project: CMIP6 - additional_datasets: - - {dataset: GISS-E2-1-G, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 4550, end_year: 4699} - - {dataset: GISS-E2-1-G, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM6A-LR, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM6A-LR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} - - {dataset: MIROC6, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} - - {dataset: MIROC6, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + <<: *variable_settings_cmip6 + short_name: rtmt + additional_datasets: *cmip6_rtmt scripts: ecs: <<: *ecs_script + scatterplot: + <<: *scatterplot_script + ancestors: ['cmip6/ecs'] + dataset_style: cmip6 + + barplot: + description: Create barplot of ECS. + scripts: + barplot: + script: climate_metrics/create_barplot.py + ancestors: ['*/ecs'] + add_mean: true + label_attribute: project + order: ['CMIP5', 'CMIP6'] + patterns: ['ecs.nc'] + sort_descending: true + value_labels: true + y_range: [0.0, 6.0] diff --git a/esmvaltool/recipes/recipe_ecs_constraints.yml b/esmvaltool/recipes/recipe_ecs_constraints.yml new file mode 100644 index 0000000000..0abc4f20e6 --- /dev/null +++ b/esmvaltool/recipes/recipe_ecs_constraints.yml @@ -0,0 +1,1233 @@ +# ESMValTool +# recipe_ecs_constraints.yml +--- +documentation: + title: > + Emergent constraints for ECS + + description: > + Multiple emergent constraints for ECS. + + authors: + - schlund_manuel + + maintainer: + - schlund_manuel + + references: + - gregory04grl + - sherwood14nat + - tian15grl + - lipat17grl + - brient16jclim + - cox18nature + - volodin08izvestiya + - zhai15grl + - brient16climdyn + - su14jgr + + projects: + - crescendo + + +VARIABLE_ANCHOR: &var_settings_cmip5 + project: CMIP5 + exp: historical + ensemble: r1i1p1 + +DIAG_NCL: &diag_ncl + script: emergent_constraints/ecs_scatter.ncl + output_diag_only: true + +DIAG_EMERGENT_CONSTRAINT: &diag_emergent_constraint + script: emergent_constraints/single_constraint.py + ancestors: [ + 'ecs_predictor', + 'diag_y_ecs_cmip5/ecs', + ] + group_by: project + ignore_patterns: + - 'ecs_regression_*.nc' + - 'lambda.nc' + - 'psi_*.nc' + - 'su_*.nc' + - 'zhai_*.nc' + + +preprocessors: + + pp500: &pp500 + extract_levels: + levels: 50000 + scheme: linear + + pp_ltmi_0: + extract_levels: + levels: [85000, 70000, 60000, 50000, 40000] + scheme: linear + + pp_ltmi_1: + extract_levels: + levels: [85000, 70000] + scheme: linear + + spatial_mean: + area_statistics: + operator: mean + + tropical_mean: + extract_region: + start_latitude: -28 + end_latitude: 28 + start_longitude: 0 + end_longitude: 360 + area_statistics: + operator: mean + climate_statistics: + operator: mean + + southern_midlatitudes_mean: + extract_region: + start_latitude: -56 + end_latitude: -36 + start_longitude: 0 + end_longitude: 360 + area_statistics: + operator: mean + climate_statistics: + operator: mean + + tropical_seamask_30: &tropical_seamask_30_pp + extract_region: + start_latitude: -30 + end_latitude: 30 + start_longitude: 0 + end_longitude: 360 + mask_landsea: + mask_out: land + + tropical_seamask_30_500hPa: + <<: *tropical_seamask_30_pp + <<: *pp500 + + tropical_mask_40: &tropical_mask_40_pp + regrid: + target_grid: 2x2 + scheme: linear + extract_region: + start_latitude: -40 + end_latitude: 40 + start_longitude: 0 + end_longitude: 360 + + tropical_mask_40_500hPa: + <<: *tropical_mask_40_pp + <<: *pp500 + + zonal_mean: + regrid: + target_grid: 2x2 + scheme: nearest + extract_levels: + levels: [ + 100000, + 92500, + 85000, + 70000, + 60000, + 50000, + 40000, + 30000, + 25000, + 20000, + 15000, + 10000, + ] + scheme: linear + extract_region: + start_latitude: -45 + end_latitude: 40 + start_longitude: 0 + end_longitude: 360 + zonal_statistics: + operator: mean + climate_statistics: + operator: mean + + +diagnostics: + + diag_y_ecs_cmip5: + description: Equilibrium Climate Sensitivity for CMIP5. + variables: + tas_rtnt: &tas_settings_cmip5 + short_name: tas + preprocessor: spatial_mean + project: CMIP5 + mip: Amon + ensemble: r1i1p1 + additional_datasets: &rtnt_datasets_cmip5 + # Models with missing data (on all ESGF nodes) + # EC-EARTH (no rsut) + - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} + - {dataset: ACCESS1-0, exp: abrupt4xCO2, start_year: 300, end_year: 449} + - {dataset: ACCESS1-3, exp: piControl, start_year: 250, end_year: 399} + - {dataset: ACCESS1-3, exp: abrupt4xCO2, start_year: 250, end_year: 399} + - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1, exp: abrupt4xCO2, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} + - {dataset: bcc-csm1-1-m, exp: abrupt4xCO2, start_year: 240, end_year: 389} + - {dataset: BNU-ESM, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: BNU-ESM, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CanESM2, exp: piControl, start_year: 2321, end_year: 2470} + - {dataset: CanESM2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Wrong start year for piControl? (branch_time = 2.) + - {dataset: CCSM4, exp: piControl, start_year: 250, end_year: 399} + - {dataset: CCSM4, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Only 140 years available + - {dataset: CNRM-CM5-2, exp: piControl, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM5-2, exp: abrupt4xCO2, start_year: 1850, end_year: 1989} + - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 104, end_year: 253} + - {dataset: CSIRO-Mk3-6-0, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: FGOALS-g2, exp: piControl, start_year: 490, end_year: 639} + - {dataset: FGOALS-g2, exp: abrupt4xCO2, start_year: 490, end_year: 639} + # branch_time_in_child weird + - {dataset: FGOALS-s2, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: FGOALS-s2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2G, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2G, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GISS-E2-H, exp: piControl, start_year: 2660, end_year: 2809} + - {dataset: GISS-E2-H, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-R, exp: piControl, start_year: 4200, end_year: 4349} + - {dataset: GISS-E2-R, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Experiments start at 1859-12-01 + - {dataset: HadGEM2-ES, exp: piControl, start_year: 1860, end_year: 2009} + - {dataset: HadGEM2-ES, exp: abrupt4xCO2, start_year: 1860, end_year: 2009} + - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} + - {dataset: inmcm4, exp: abrupt4xCO2, start_year: 2090, end_year: 2239} + # Only 140 years available + - {dataset: IPSL-CM5A-MR, exp: piControl, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5A-MR, exp: abrupt4xCO2, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} + - {dataset: MIROC5, exp: abrupt4xCO2, start_year: 2100, end_year: 2249} + - {dataset: MIROC-ESM, exp: piControl, start_year: 1880, end_year: 2029} + - {dataset: MIROC-ESM, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: MPI-ESM-LR, exp: piControl, start_year: 1880, end_year: 2029} + - {dataset: MPI-ESM-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-MR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-MR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-P, exp: piControl, start_year: 1866, end_year: 2015} + - {dataset: MPI-ESM-P, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MRI-CGCM3, exp: piControl, start_year: 1891, end_year: 2040} + - {dataset: MRI-CGCM3, exp: abrupt4xCO2, start_year: 1851, end_year: 2000} + - {dataset: NorESM1-M, exp: piControl, start_year: 700, end_year: 849} + - {dataset: NorESM1-M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + tas_rtmt: + <<: *tas_settings_cmip5 + additional_datasets: &rtmt_datasets_cmip5 + - {dataset: IPSL-CM5A-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5A-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + rtnt: + <<: *tas_settings_cmip5 + short_name: rtnt + derive: true + additional_datasets: *rtnt_datasets_cmip5 + rtmt: + <<: *tas_settings_cmip5 + short_name: rtmt + additional_datasets: *rtmt_datasets_cmip5 + scripts: + ecs: + script: climate_metrics/ecs.py + calculate_mmm: false + output_attributes: + var_type: label + tag: ECS + plot_ylim: [1.5, 6.0] + project: CMIP5 + provenance_authors: ['schlund_manuel'] + provenance_domains: ['global'] + provenance_realms: ['atmos'] + provenance_references: ['gregory04grl'] + provenance_statistics: ['mean', 'anomaly'] + provenance_themes: ['phys'] + + diag_x_sherwood_ltmi_cmip5: + description: Lower tropospheric mixing index (Sherwood et al., 2014) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + hur: + <<: *var_settings_cmip5 + preprocessor: pp_ltmi_1 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + ta: + <<: *var_settings_cmip5 + preprocessor: pp_ltmi_1 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + wap: + <<: *var_settings_cmip5 + preprocessor: pp_ltmi_0 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # No hur + # - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: ltmi + output_attributes: + var_type: feature + tag: SHL + plot_xlabel: 'LTMI [1]' + plot_title: 'Sherwood et al. (2014) constraint' + plot_xlim: [0.45, 1.0] + project: CMIP5 + provenance_authors: ['lauer_axel'] + provenance_references: ['sherwood14nat'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + emergent_constraint: + <<: *diag_emergent_constraint + additional_data: + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: SHL + data: 0.08 + + diag_x_sherwood_d_cmip5: + description: Sherwood D index (Sherwood et al., 2014) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + wap: + <<: *var_settings_cmip5 + preprocessor: pp_ltmi_0 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # No wap + # - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: sherwood_d + output_attributes: + var_type: feature + tag: SHD + plot_xlabel: 'Sherwood D index [1]' + plot_title: 'Sherwood et al. (2014) constraint' + plot_xlim: [0.15, 0.65] + project: CMIP5 + provenance_authors: ['lauer_axel'] + provenance_references: ['sherwood14nat'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + emergent_constraint: + <<: *diag_emergent_constraint + additional_data: + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: SHD + data: 0.031 + + diag_x_sherwood_s_cmip5: + description: Sherwood S index (Sherwood et al., 2014) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + hur: + <<: *var_settings_cmip5 + preprocessor: pp_ltmi_1 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + ta: + <<: *var_settings_cmip5 + preprocessor: pp_ltmi_1 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + wap: + <<: *var_settings_cmip5 + preprocessor: pp_ltmi_0 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # No hur + # - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: sherwood_s + output_attributes: + var_type: feature + tag: SHS + plot_xlabel: 'Sherwood S index [1]' + plot_title: 'Sherwood et al. (2014) constraint' + plot_xlim: [0.2, 0.6] + project: CMIP5 + provenance_authors: ['lauer_axel'] + provenance_references: ['sherwood14nat'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + emergent_constraint: + <<: *diag_emergent_constraint + additional_data: + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: SHS + data: 0.05 + + diag_x_tian_itcz_cmip5: + description: Southern ITCZ index (Tian, 2015) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + pr: + <<: *var_settings_cmip5 + preprocessor: default + mip: Amon + start_year: 1986 + end_year: 2005 + reference_dataset: GPCP-V2.2 + additional_datasets: + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # No pr + # - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: itczidx + output_attributes: + var_type: feature + tag: TII + plot_xlabel: 'Southern ITCZ index [mm day$^{-1}$]' + plot_title: 'Tian (2015) constraint' + plot_xlim: [-1.0, 3.0] + project: CMIP5 + provenance_authors: ['lauer_axel'] + provenance_references: ['tian15grl'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + emergent_constraint: + <<: *diag_emergent_constraint + additional_data: + - dataset: 0 + project: obs4MIPs + var_type: prediction_input_error + tag: TII + data: 0.5 + + diag_x_tian_hum_cmip5: + description: Humidity index (Tian, 2015) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + hus: + <<: *var_settings_cmip5 + preprocessor: pp500 + mip: Amon + start_year: 2003 + end_year: 2005 + reference_dataset: AIRS-2-1 + additional_datasets: + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # No hus + # - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: humidx + output_attributes: + var_type: feature + tag: TIH + plot_xlabel: 'Tropical Mid-tropospheric humidity index [%]' + plot_title: 'Tian (2015) constraint' + plot_xlim: [-10.0, 50.0] + project: CMIP5 + provenance_authors: ['lauer_axel'] + provenance_references: ['tian15grl'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + emergent_constraint: + <<: *diag_emergent_constraint + additional_data: + - dataset: 0 + project: obs4MIPs + var_type: prediction_input_error + tag: TIH + data: 10.0 + + diag_x_lipat_cmip5: + description: Climatological Hadley cell extent (Lipat et al., 2017) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + va: + <<: *var_settings_cmip5 + preprocessor: default + mip: Amon + start_year: 1980 + end_year: 2005 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # No va + # - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: shhc + output_attributes: + var_type: feature + tag: LIP + plot_xlabel: 'Southern hemisphere Hadley cell extent [°]' + plot_title: 'Lipat et al. (2017) constraint' + plot_xlim: [-39.0, -30.0] + project: CMIP5 + provenance_authors: ['lauer_axel'] + provenance_references: ['lipat17grl'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + emergent_constraint: + <<: *diag_emergent_constraint + additional_data: + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: LIP + data: 1.75 + + diag_x_brient_alb_cmip5: + description: Covariance of shortwave cloud reflection (Brient and Schneider, 2016) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + ts: + <<: *var_settings_cmip5 + mip: Amon + preprocessor: default + start_year: 2001 + end_year: 2005 + reference_dataset: HadISST + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + hur: + <<: *var_settings_cmip5 + preprocessor: pp500 + mip: Amon + start_year: 2001 + end_year: 2005 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + rsut: + <<: *var_settings_cmip5 + preprocessor: default + mip: Amon + start_year: 2001 + end_year: 2005 + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} + rsutcs: + <<: *var_settings_cmip5 + preprocessor: default + mip: Amon + start_year: 2001 + end_year: 2005 + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} + rsdt: + <<: *var_settings_cmip5 + preprocessor: default + mip: Amon + start_year: 2001 + end_year: 2005 + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # No ts + # - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: covrefl + output_attributes: + var_type: feature + tag: BRA + plot_xlabel: 'Response of SW cloud reflectivity to SST changes [% K$^{-1}$]' + plot_title: 'Brient and Schneider (2016) constraint' + plot_xlim: [-3.0, 1.5] + project: CMIP5 + provenance_authors: ['lauer_axel'] + provenance_references: ['brient16jclim'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + emergent_constraint: + <<: *diag_emergent_constraint + additional_data: + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: BRA + data: 0.13 + + diag_x_cox_cmip5: + description: Temperature variability metric psi (Cox et al., 2018) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + tas: + <<: *var_settings_cmip5 + preprocessor: spatial_mean + mip: Amon + exp: [historical, rcp85] + start_year: 1880 + end_year: 2014 + reference_dataset: HadCRUT4 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + # No tas for rcp85 + # - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # No tas + # - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + # No tas for rcp85 + # - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + tasa: + <<: *var_settings_cmip5 + preprocessor: spatial_mean + mip: Amon + start_year: 1880 + end_year: 2014 + reference_dataset: HadCRUT4 + additional_datasets: + - {dataset: HadCRUT4, project: OBS, type: ground, version: 1, tier: 2} + scripts: + ecs_predictor: + script: climate_metrics/psi.py + output_attributes: + var_type: feature + tag: COX + plot_xlabel: 'Temperature variability metric $\psi$ [K]' + plot_title: 'Cox et al. (2018) constraint' + plot_xlim: [0.05, 0.35] + project: CMIP5 + provenance_authors: ['schlund_manuel'] + provenance_domains: ['global'] + provenance_realms: ['atmos'] + provenance_references: ['cox18nature'] + provenance_statistics: ['var', 'diff', 'corr', 'detrend'] + provenance_themes: ['EC'] + emergent_constraint: + <<: *diag_emergent_constraint + additional_data: + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: COX + data: 0.05 + + diag_x_volodin_cmip5: + description: Difference in total cloud fraction between tropics and Southern midlatitudes (Volodin, 2008) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + trop_clt: + <<: *var_settings_cmip5 + short_name: clt + mip: Amon + preprocessor: tropical_mean + start_year: 1980 + end_year: 2000 + reference_dataset: '' + southern_midlat_clt: + <<: *var_settings_cmip5 + short_name: clt + mip: Amon + preprocessor: southern_midlatitudes_mean + start_year: 1980 + end_year: 2000 + reference_dataset: '' + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # No clt + # - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + script: emergent_constraints/ecs_scatter.py + diag: volodin + output_attributes: + var_type: feature + tag: VOL + plot_xlim: [-40.0, 10.0] + project: CMIP5 + emergent_constraint: + <<: *diag_emergent_constraint + additional_data: + - dataset: 0 + project: OBS + var_type: prediction_input + tag: VOL + data: -25 + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: VOL + data: 5.5 + + diag_x_zhai_cmip5: + description: Seasonal MBLC fraction variation (Zhai et al., 2015) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + cl: + <<: *var_settings_cmip5 + preprocessor: tropical_mask_40 + mip: Amon + start_year: 1980 + end_year: 2004 + reference_dataset: '' + wap: + <<: *var_settings_cmip5 + preprocessor: tropical_mask_40_500hPa + mip: Amon + start_year: 1980 + end_year: 2004 + reference_dataset: '' + tos: + <<: *var_settings_cmip5 + preprocessor: tropical_mask_40 + mip: Omon + start_year: 1980 + end_year: 2004 + reference_dataset: '' + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + # No tos + # - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + # No cl + # - {dataset: CNRM-CM5} + # - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # No cl + # - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + # latitude and longitude cannot be read for tos + # - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + # Irregular regridding of tos fails + # - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + script: emergent_constraints/ecs_scatter.py + diag: zhai + output_attributes: + var_type: feature + tag: ZHA + plot_xlim: [-3.0, 1.0] + project: CMIP5 + emergent_constraint: + <<: *diag_emergent_constraint + additional_data: + - dataset: 0 + project: OBS + var_type: prediction_input + tag: ZHA + data: -1.28 + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: ZHA + data: 0.187 + + diag_x_brient_shal_cmip5: + description: Shallowness of low clouds (Brient et al., 2015) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + cl: + <<: *var_settings_cmip5 + preprocessor: tropical_seamask_30 + mip: Amon + start_year: 1980 + end_year: 2004 + reference_dataset: '' + wap: + <<: *var_settings_cmip5 + preprocessor: tropical_seamask_30_500hPa + mip: Amon + start_year: 1980 + end_year: 2004 + reference_dataset: '' + zg: + <<: *var_settings_cmip5 + preprocessor: tropical_seamask_30 + mip: Amon + start_year: 1980 + end_year: 2004 + reference_dataset: '' + additional_datasets: + # Latitude differs for cl and wap + # - {dataset: ACCESS1-0} + # - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + # No cl + # - {dataset: CNRM-CM5} + # - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # No cl + # - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + # Latitude differs for cl and zg + # - {dataset: GISS-E2-H} + # - {dataset: GISS-E2-R} + # Latitude differs for cl and wap + # - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + script: emergent_constraints/ecs_scatter.py + diag: brient_shal + n_jobs: 2 + output_attributes: + var_type: feature + tag: BRS + plot_xlim: [30.0, 90.0] + project: CMIP5 + emergent_constraint: + <<: *diag_emergent_constraint + additional_data: + - dataset: 0 + project: OBS + var_type: prediction_input + tag: BRS + data: 44.5 + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: BRS + data: 3.5 + + diag_x_su_hur_cmip5: + description: Error in relative humidity (Su et al., 2014) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + hur: + <<: *var_settings_cmip5 + preprocessor: zonal_mean + mip: Amon + exp: [historical, rcp85] + start_year: 2005 + end_year: 2010 + reference_dataset: AIRS-2-0|MLS-AURA + additional_datasets: + - {dataset: AIRS-2-0, project: obs4MIPs, level: L3, tier: 1} + - {dataset: MLS-AURA, project: OBS6, type: sat, version: '004', tier: 3} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + # No hur for rcp85 + # - {dataset: CCSM4} + - {dataset: CNRM-CM5} + # No hur for rcp85 + # - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # No hur + # - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + # No hur for rcp85 + # - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + script: emergent_constraints/ecs_scatter.py + diag: su + metric: regression_slope + output_attributes: + var_type: feature + tag: SU + plot_xlim: [0.65, 1.3] + project: CMIP5 + emergent_constraint: + <<: *diag_emergent_constraint + additional_data: + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: SU + data: 0.25 diff --git a/esmvaltool/recipes/recipe_ecs_scatter.yml b/esmvaltool/recipes/recipe_ecs_scatter.yml new file mode 100644 index 0000000000..eae1034e42 --- /dev/null +++ b/esmvaltool/recipes/recipe_ecs_scatter.yml @@ -0,0 +1,756 @@ +# ESMValTool +# recipe_ecs_scatter.yml +--- +documentation: + title: Emergent constraints for ECS + + description: | + Calculates equilibrium climate sensitivity (ECS) versus + 1) southern ITCZ index, similar to fig. 2 from Tian (2015) + 2) lower tropospheric mixing index (LTMI), similar to fig. 5 from + Sherwood et al. (2014) + 3) tropical mid-tropospheric humidity asymmetry index, similar to fig. 4 + from Tian (2015) + 4) covariance of shortwave cloud reflection (Brient and Schneider, 2016) + 5) climatological Hadley cell extent (Lipat et al., 2017) + + *************************************************************************** + Note: this recipe requires pre-calulation of the equilirium climate + sensitivites (ECS) for all models. The ECS values are calculated + with recipe_ecs.yml. The netcdf file containing the ECS values + (path and filename) is specified by diag_script_info@ecs_file. + Alternatively, the netcdf file containing the ECS values can be + generated with the cdl-script + $diag_scripts/emergent_constraints/ecs_cmip.cdl (recommended method): + 1) save script given at the end of this namelist as ecs_cmip.cdl + 2) run command: ncgen -o ecs_cmip.nc ecs_cmip.cdl + 3) copy ecs_cmip.nc to directory given by diag_script_info@ecs_file + (e.g. $diag_scripts/emergent_constraints/ecs_cmip.nc) + *************************************************************************** + + authors: + - lauer_axel + + maintainer: + - lauer_axel + + references: + - brient16jclim + - lipat17grl + - sherwood14nat + - tian15grl + + projects: + - crescendo + + +preprocessors: + pp500: + extract_levels: + levels: 50000 + scheme: linear + + pp_ltmi_0: + extract_levels: + levels: [85000, 70000, 60000, 50000, 40000] + scheme: linear + + pp_ltmi_1: + extract_levels: + levels: [85000, 70000] + scheme: linear + + +diagnostics: + + cmip5_shhc: + title: CMIP5 Hadley cell extent (Lipat et al. 2017) + description: climatological Hadley cell extent + themes: + - EC + realms: + - atmos + variables: + va: + reference_dataset: ERA-Interim + mip: Amon + exp: historical + start_year: 1986 + end_year: 2005 + project: CMIP5 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: ACCESS1-0, ensemble: r1i1p1} + # - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: CCSM4, ensemble: r1i1p1} + - {dataset: CNRM-CM5, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + # - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p1} + - {dataset: GISS-E2-R, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, ensemble: r1i1p1} + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + # - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + # - {dataset: MPI-ESM-P, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + scripts: + ecs_scatter: + diag: shhc + calcmm: false + ecs_file: $diag_scripts/emergent_constraints/ecs_cmip.nc + legend_outside: false + predef_minmax: true + styleset: CMIP5 + script: emergent_constraints/ecs_scatter.ncl + + cmip5_itczidx: + title: CMIP5 southern ITCZ index (Tian 2015) + description: southern ITCZ index + themes: + - EC + realms: + - atmos + variables: + pr: + reference_dataset: TRMM + mip: Amon + exp: historical + start_year: 1986 + end_year: 2005 + project: CMIP5 + additional_datasets: + - {dataset: TRMM, project: obs4MIPs, level: v7, + start_year: 1998, end_year: 2013, tier: 1} + - {dataset: ACCESS1-0, ensemble: r1i1p1} + # - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: CCSM4, ensemble: r1i1p1} + - {dataset: CNRM-CM5, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + # - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p1} + - {dataset: GISS-E2-R, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, ensemble: r1i1p1} + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + # - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + # - {dataset: MPI-ESM-P, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + prStderr: + reference_dataset: TRMM + mip: Amon + frequency: mon + additional_datasets: + - {dataset: TRMM, project: obs4MIPs, level: v7, + start_year: 1998, end_year: 2013, tier: 1} + scripts: + ecs_scatter: + diag: itczidx + calcmm: false + ecs_file: $diag_scripts/emergent_constraints/ecs_cmip.nc + legend_outside: false + predef_minmax: true + styleset: CMIP5 + script: emergent_constraints/ecs_scatter.ncl + + cmip5_humidx: + title: CMIP5 humidity index (Tian 2015) + description: tropical mid-tropospheric humidity asymmetry index + themes: + - EC + realms: + - atmos + variables: + hus: + preprocessor: pp500 + reference_dataset: AIRS-2-1 + mip: Amon + exp: historical + start_year: 1986 + end_year: 2005 + project: CMIP5 + additional_datasets: + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, + start_year: 2003, end_year: 2010, tier: 1} + - {dataset: ACCESS1-0, ensemble: r1i1p1} + # - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: CCSM4, ensemble: r1i1p1} + - {dataset: CNRM-CM5, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + # - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p1} + - {dataset: GISS-E2-R, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, ensemble: r1i1p1} + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + # - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + # - {dataset: MPI-ESM-P, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + husStderr: + preprocessor: pp500 + reference_dataset: AIRS-2-1 + mip: Amon + frequency: mon + additional_datasets: + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, + start_year: 2003, end_year: 2010, tier: 1} + scripts: + ecs_scatter: + diag: humidx + calcmm: false + ecs_file: $diag_scripts/emergent_constraints/ecs_cmip.nc + legend_outside: false + predef_minmax: true + styleset: CMIP5 + script: emergent_constraints/ecs_scatter.ncl + + cmip5_ltmi: + title: CMIP5 LTMI (Sherwood et al., 2014) + description: lower tropospheric mixing index + themes: + - EC + realms: + - atmos + variables: + hur: + preprocessor: pp_ltmi_1 + reference_dataset: ERA-Interim + mip: Amon + exp: historical + start_year: 1986 + end_year: 2005 + project: CMIP5 + ta: + preprocessor: pp_ltmi_1 + reference_dataset: ERA-Interim + mip: Amon + exp: historical + start_year: 1986 + end_year: 2005 + project: CMIP5 + wap: + preprocessor: pp_ltmi_0 + reference_dataset: ERA-Interim + mip: Amon + exp: historical + start_year: 1986 + end_year: 2005 + project: CMIP5 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: ACCESS1-0, ensemble: r1i1p1} + # - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: CCSM4, ensemble: r1i1p1} + - {dataset: CNRM-CM5, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + # - {dataset: FGOALS-g2, ensemble: r1i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p1} + - {dataset: GISS-E2-R, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, ensemble: r1i1p1} + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + # - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + # - {dataset: MPI-ESM-P, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + scripts: + ecs_scatter: + diag: ltmi + calcmm: false + ecs_file: $diag_scripts/emergent_constraints/ecs_cmip.nc + legend_outside: false + predef_minmax: true + styleset: CMIP5 + script: emergent_constraints/ecs_scatter.ncl + + cmip5_covrefl: + title: CMIP5 covariance of shortwave cloud reflection (Brient and Schneider, 2016) + description: covariance of shortwave cloud reflection + themes: + - EC + realms: + - atmos + additional_datasets: + - {dataset: ACCESS1-0, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: ACCESS1-3, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CanESM2, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CNRM-CM5, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: FGOALS-g2, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: HadGEM2-ES, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: inmcm4, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC5, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: MPI-ESM-MR, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: MPI-ESM-P, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-M, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + variables: + ts: + reference_dataset: HadISST + mip: Amon + exp: historical + project: CMIP5 + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, + start_year: 2001, end_year: 2005, tier: 2} + hur: + preprocessor: pp500 + reference_dataset: ERA-Interim + mip: Amon + exp: historical + project: CMIP5 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, + start_year: 2001, end_year: 2005, tier: 3} + rsut: + reference_dataset: CERES-EBAF + mip: Amon + exp: historical + project: CMIP5 + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2005, + tier: 1} + rsutcs: + reference_dataset: CERES-EBAF + mip: Amon + exp: historical + project: CMIP5 + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2005, + tier: 1} + rsdt: + reference_dataset: CERES-EBAF + mip: Amon + exp: historical + project: CMIP5 + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2005, + tier: 1} + scripts: + ecs_scatter: + diag: covrefl + calcmm: false + ecs_file: $diag_scripts/emergent_constraints/ecs_cmip.nc + legend_outside: false + predef_minmax: true + styleset: CMIP5 + script: emergent_constraints/ecs_scatter.ncl + + cmip6_itczidx: + title: CMIP6 southern ITCZ index (Tian 2015) + description: southern ITCZ index + themes: + - EC + realms: + - atmos + variables: + pr: + reference_dataset: TRMM + start_year: 1998 + end_year: 2013 + exp: historical + mip: Amon + project: CMIP6 + additional_datasets: + - {dataset: TRMM, project: obs4MIPs, level: v7, tier: 1} + - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r1i1p1f1} + - {dataset: BCC-ESM1, grid: gn, ensemble: r1i1p1f1} + - {dataset: CAMS-CSM1-0, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2-WACCM, institute: NCAR, grid: gn, + ensemble: r1i1p1f1} + - {dataset: CNRM-CM6-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: GFDL-CM4, grid: gr1, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-H, grid: gn, ensemble: r1i1p1f1} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r15i1p1f1} + - {dataset: MIROC6, grid: gn, ensemble: r1i1p1f1} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r1i1p1f1} + prStderr: + reference_dataset: TRMM + mip: Amon + frequency: mon + start_year: 1998 + end_year: 2013 + additional_datasets: + - {dataset: TRMM, project: obs4MIPs, level: v7, tier: 1} + scripts: + ecs_scatter: + diag: itczidx + calcmm: false + ecs_file: $diag_scripts/emergent_constraints/ecs_cmip.nc + legend_outside: false + predef_minmax: true + script: emergent_constraints/ecs_scatter.ncl + + cmip6_humidx: + title: CMIP6 humidity index (Tian 2015) + description: tropical mid-tropospheric humidity asymmetry index + themes: + - EC + realms: + - atmos + variables: + hus: + preprocessor: pp500 + reference_dataset: AIRS-2-1 + start_year: 2003 + end_year: 2010 + exp: historical + mip: Amon + project: CMIP6 + additional_datasets: + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1} + - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r1i1p1f1} + - {dataset: BCC-ESM1, grid: gn, ensemble: r1i1p1f1} + - {dataset: CAMS-CSM1-0, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2-WACCM, institute: NCAR, grid: gn, + ensemble: r1i1p1f1} + - {dataset: CNRM-CM6-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: GFDL-CM4, grid: gr1, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-H, grid: gn, ensemble: r1i1p1f1} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r15i1p1f1} + - {dataset: MIROC6, grid: gn, ensemble: r1i1p1f1} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r1i1p1f1} + husStderr: + preprocessor: pp500 + reference_dataset: AIRS-2-1 + mip: Amon + frequency: mon + start_year: 2003 + end_year: 2010 + additional_datasets: + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1} + scripts: + ecs_scatter: + diag: humidx + calcmm: false + ecs_file: $diag_scripts/emergent_constraints/ecs_cmip.nc + legend_outside: false + predef_minmax: true + script: emergent_constraints/ecs_scatter.ncl + + cmip6_ltmi: + title: CMIP6 LTMI (Sherwood et al., 2014) + description: lower tropospheric mixing index + themes: + - EC + realms: + - atmos + variables: + hur: + preprocessor: pp_ltmi_1 + reference_dataset: ERA-Interim + start_year: 1980 + end_year: 2005 + exp: historical + mip: Amon + project: CMIP6 + ta: + preprocessor: pp_ltmi_1 + reference_dataset: ERA-Interim + start_year: 1980 + end_year: 2005 + exp: historical + mip: Amon + project: CMIP6 + wap: + preprocessor: pp_ltmi_0 + reference_dataset: ERA-Interim + start_year: 1980 + end_year: 2005 + exp: historical + mip: Amon + project: CMIP6 + additional_datasets: + - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r1i1p1f1} + - {dataset: BCC-ESM1, grid: gn, ensemble: r1i1p1f1} + - {dataset: CAMS-CSM1-0, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2-WACCM, institute: NCAR, grid: gn, + ensemble: r1i1p1f1} + - {dataset: CNRM-CM6-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: GFDL-CM4, grid: gr1, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-H, grid: gn, ensemble: r1i1p1f1} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r15i1p1f1} + - {dataset: MIROC6, grid: gn, ensemble: r1i1p1f1} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r1i1p1f1} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + scripts: + ecs_scatter: + diag: ltmi + calcmm: false + ecs_file: $diag_scripts/emergent_constraints/ecs_cmip.nc + legend_outside: false + predef_minmax: true + script: emergent_constraints/ecs_scatter.ncl + + cmip6_shhc: + title: CMIP6 Hadley cell extent (Lipat et al. 2017) + description: climatological Hadley cell extent + themes: + - EC + realms: + - atmos + variables: + va: + reference_dataset: ERA-Interim + start_year: 1980 + end_year: 2005 + exp: historical + mip: Amon + project: CMIP6 + additional_datasets: + - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r1i1p1f1} + - {dataset: BCC-ESM1, grid: gn, ensemble: r1i1p1f1} + - {dataset: CAMS-CSM1-0, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2-WACCM, institute: NCAR, grid: gn, + ensemble: r1i1p1f1} + - {dataset: CNRM-CM6-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: GFDL-CM4, grid: gr1, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-H, grid: gn, ensemble: r1i1p1f1} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r15i1p1f1} + - {dataset: MIROC6, grid: gn, ensemble: r1i1p1f1} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r1i1p1f1} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + scripts: + ecs_scatter: + diag: shhc + calcmm: false + ecs_file: $diag_scripts/emergent_constraints/ecs_cmip.nc + legend_outside: false + predef_minmax: true + script: emergent_constraints/ecs_scatter.ncl + + cmip6_covrefl: + title: CMIP6 covariance of shortwave cloud reflection (Brient and Schneider, 2016) + description: covariance of shortwave cloud reflection + themes: + - EC + realms: + - atmos + variables: + ts: + reference_dataset: HadISST + start_year: 2001 + end_year: 2012 + exp: historical + mip: Amon + project: CMIP6 + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r1i1p1f1} + - {dataset: BCC-ESM1, grid: gn, ensemble: r2i1p1f1} + # - {dataset: CAMS-CSM1-0, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2-WACCM, institute: NCAR, grid: gn, + ensemble: r1i1p1f1} + - {dataset: CNRM-CM6-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: GFDL-CM4, grid: gr1, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-H, grid: gn, ensemble: r1i1p1f1} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r1i1p1f1} + - {dataset: MIROC6, grid: gn, ensemble: r1i1p1f1} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r1i1p1f1} + hur: + preprocessor: pp500 + reference_dataset: ERA-Interim + start_year: 2001 + end_year: 2012 + exp: historical + mip: Amon + project: CMIP6 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r1i1p1f1} + - {dataset: BCC-ESM1, grid: gn, ensemble: r2i1p1f1} + # - {dataset: CAMS-CSM1-0, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2-WACCM, institute: NCAR, grid: gn, + ensemble: r1i1p1f1} + - {dataset: CNRM-CM6-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: GFDL-CM4, grid: gr1, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-H, grid: gn, ensemble: r1i1p1f1} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r15i1p1f1} + - {dataset: MIROC6, grid: gn, ensemble: r1i1p1f1} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r1i1p1f1} + rsut: + reference_dataset: CERES-EBAF + start_year: 2001 + end_year: 2012 + exp: historical + mip: Amon + project: CMIP6 + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} + - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r1i1p1f1} + - {dataset: BCC-ESM1, grid: gn, ensemble: r2i1p1f1} + # - {dataset: CAMS-CSM1-0, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2-WACCM, institute: NCAR, grid: gn, + ensemble: r1i1p1f1} + - {dataset: CNRM-CM6-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: GFDL-CM4, grid: gr1, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-H, grid: gn, ensemble: r1i1p1f1} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r1i1p1f1} + - {dataset: MIROC6, grid: gn, ensemble: r1i1p1f1} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r1i1p1f1} + rsutcs: + reference_dataset: CERES-EBAF + start_year: 2001 + end_year: 2012 + exp: historical + mip: Amon + project: CMIP6 + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} + - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r1i1p1f1} + - {dataset: BCC-ESM1, grid: gn, ensemble: r2i1p1f1} + # - {dataset: CAMS-CSM1-0, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2-WACCM, institute: NCAR, grid: gn, + ensemble: r1i1p1f1} + - {dataset: CNRM-CM6-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: GFDL-CM4, grid: gr1, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-H, grid: gn, ensemble: r1i1p1f1} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r1i1p1f1} + - {dataset: MIROC6, grid: gn, ensemble: r1i1p1f1} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r1i1p1f1} + rsdt: + reference_dataset: CERES-EBAF + start_year: 2001 + end_year: 2012 + exp: historical + mip: Amon + project: CMIP6 + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} + - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r1i1p1f1} + - {dataset: BCC-ESM1, grid: gn, ensemble: r2i1p1f1} + # - {dataset: CAMS-CSM1-0, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2, grid: gn, ensemble: r1i1p1f1} + - {dataset: CESM2-WACCM, institute: NCAR, grid: gn, + ensemble: r1i1p1f1} + - {dataset: CNRM-CM6-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: CNRM-ESM2-1, grid: gr, ensemble: r1i1p1f2} + - {dataset: GFDL-CM4, grid: gr1, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-G, grid: gn, ensemble: r1i1p1f1} + - {dataset: GISS-E2-1-H, grid: gn, ensemble: r1i1p1f1} + - {dataset: IPSL-CM6A-LR, grid: gr, ensemble: r1i1p1f1} + - {dataset: MIROC6, grid: gn, ensemble: r1i1p1f1} + - {dataset: MRI-ESM2-0, grid: gn, ensemble: r1i1p1f1} + scripts: + ecs_scatter: + diag: covrefl + calcmm: true + ecs_file: $diag_scripts/emergent_constraints/ecs_cmip.nc + legend_outside: false + predef_minmax: true + script: emergent_constraints/ecs_scatter.ncl diff --git a/esmvaltool/recipes/recipe_ensclus.yml b/esmvaltool/recipes/recipe_ensclus.yml index 4c7bc35d53..e50011576a 100644 --- a/esmvaltool/recipes/recipe_ensclus.yml +++ b/esmvaltool/recipes/recipe_ensclus.yml @@ -1,20 +1,22 @@ +# ESMValTool # recipe_EnsClus.yml --- documentation: + title: Sub-ensemble Selection by Clustering description: | Recipe for sub-ensemble selection. - The diagnostics groups ensemble members according to similar - characteristics and selects the most representative member + The diagnostics groups ensemble members according to similar + characteristics and selects the most representative member for each cluster based on a k-means algorithm authors: - - mavi_ir - - hard_jo - - arno_en - - cort_su + - mavilia_irene + - vonhardenberg_jost + - arnone_enrico + - corti_susanna maintainer: - - hard_jo + - vonhardenberg_jost references: - straus07jcli @@ -87,7 +89,6 @@ diagnostics: ## Information required: ##-------------------------------about data------------------------------------------- ## Write only letters or numbers, no punctuation marks! - ## If you want to leave the field empty write 'no' season: 'JJA' #seasonal average area: 'EU' #regional average (examples:'EAT':Euro-Atlantic # 'PNA': Pacific North American @@ -99,4 +100,5 @@ diagnostics: #Either set perc or numpcs: perc: 80 #cluster analysis is applied on a number of PCs such as they explain #'perc' of total variance - numpcs: 'no' #number of PCs + numpcs: 0 #number of PCs to retain. Has priority over perc unless set to 0 + max_plot_panels: 72 #threshold in number of panels in a plot to create multiple figures diff --git a/esmvaltool/recipes/recipe_esacci_lst.yml b/esmvaltool/recipes/recipe_esacci_lst.yml new file mode 100644 index 0000000000..5f4aa5ca9f --- /dev/null +++ b/esmvaltool/recipes/recipe_esacci_lst.yml @@ -0,0 +1,67 @@ +# Recipe to call ESA CCI LST diagnostic. +--- +documentation: + title: ESA CCI LST diagnostic + description: | + Please add description here + authors: + - king_robert + + maintainer: + - king_robert + + references: + - esacci_lst + + projects: + - cmug + +datasets: + - {dataset: CESM2, project: CMIP6, exp: historical, ensemble: r(2:3)i1p1f1, + start_year: 2004, end_year: 2005, grid: gn} + - {dataset: UKESM1-0-LL, project: CMIP6, exp: historical, + ensemble: r(1:2)i1p1f2, start_year: 2004, end_year: 2005, grid: gn} + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, + start_year: 2004, end_year: 2005, version: '1.00'} + +preprocessors: + + lst_preprocessor: + regrid: + target_grid: UKESM1-0-LL + scheme: linear + + extract_region: + start_longitude: 35 + end_longitude: 175 + start_latitude: 55 + end_latitude: 70 + + mask_landsea: + mask_out: sea + + area_statistics: + operator: mean + + multi_model_statistics: + span: overlap + statistics: [mean, std_dev] + exclude: [ESACCI-LST] + + +diagnostics: + + timeseries: + description: ESACCI LST difference to model historical ensemble average + themes: + - phys + realms: + - land + variables: + ts: + mip: Amon + preprocessor: lst_preprocessor + + scripts: + script1: + script: lst/lst.py diff --git a/esmvaltool/recipes/recipe_esacci_oc.yml b/esmvaltool/recipes/recipe_esacci_oc.yml new file mode 100644 index 0000000000..cf180b72a7 --- /dev/null +++ b/esmvaltool/recipes/recipe_esacci_oc.yml @@ -0,0 +1,90 @@ +# ESMValTool +# recipe_esacci_oc.yml +--- +documentation: + title: ESACCI Ocean Color Evaluation + description: | + Recipe for Ocean Color diagnostics. + + authors: + - willen_ulrika + - zimmermann_klaus + + maintainer: + - willen_ulrika + - zimmermann_klaus + + projects: + - cmug + + +datasets: + # - {dataset: EC-Earth3-Veg, project: CMIP6, ensemble: r1i1p1f1, grid: gn} + # - {dataset: IPSL-CM6A-LR, project: CMIP6, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, project: CMIP6, ensemble: r1i1p1f1, grid: gr} + # - {dataset: CNRM-ESM2-1, project: CMIP6, ensemble: r1i1p1f2, grid: gn} + # - {dataset: MIROC6, project: CMIP6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, project: CMIP6, ensemble: r1i1p1f1, grid: gn} + + +preprocessors: + + prep_chl: + custom_order: true + extract_levels: + levels: 0. + scheme: nearest_extrapolate + climate_statistics: + operator: mean + regrid: + target_grid: 2x2 + scheme: linear + convert_units: + units: mg m-3 + mask_above_threshold: + threshold: 1.5 + + +diagnostics: + + oc_chl: + description: climatological annual means + themes: + - phys + realms: + - ocean + variables: + chl: + preprocessor: prep_chl + # reference_dataset: ESACCI-OC + mip: Omon + exp: historical + start_year: 1998 + end_year: 2014 +# thresholds: [0.0, 0.2, 0.4, 0.6, 0.8, 1.0, 1.2, 1.4] +# preprocessor: prep_surface_map_2D +# mip: Omon +# maps_range: [-0.12, 0.12] +# diff_range: [-0.09, 0.09] +# layout_rowcol: [4, 4] + additional_datasets: + - {dataset: ESACCI-OC, project: OBS6, type: sat, version: fv5.0, tier: 2} + scripts: +# Global_Ocean_Surface_mean_map: &Global_Ocean_Surface_mean_map +# script: ocean/diagnostic_maps.py + Global_Ocean_model_vs_obs: + script: ocean/diagnostic_model_vs_obs.py + observational_dataset: {dataset: ESACCI-OC,} + +# scripts: +# Global_Ocean_multi_vs_obs: +# script: ocean/diagnostic_maps_multimodel.py +# observational_dataset: {dataset: ESACCI-OC, project: OBS, type: sat, version: fv3.1, tier: 2} + +# Global_Ocean_map: &Global_Ocean_map +# script: ocean/diagnostic_maps_quad.py +# control_model: {dataset: NorESM2-LM, project: CMIP6, mip: Omon, exp: historical, ensemble: r1i1p1f1} +# exper_model: {dataset: NorESM2-LM, project: CMIP6, mip: Omon, exp: historical, ensemble: r1i1p1f1} +# observational_dataset: {dataset: ESACCI-OC, project: OBS} +# control_model: {dataset: ESACCI-OC, project: OBS} +# control_model: {dataset: ESACCI-OC, project: OBS, mip: Omon, exp: historical, ensemble: r1i1p1} diff --git a/esmvaltool/recipes/recipe_extreme_events.yml b/esmvaltool/recipes/recipe_extreme_events.yml new file mode 100644 index 0000000000..f00f1e247a --- /dev/null +++ b/esmvaltool/recipes/recipe_extreme_events.yml @@ -0,0 +1,88 @@ +# ESMValTool +# recipe_ExtremeEvents.yml +--- +documentation: + title: Extreme Events Indices + + description: | + Calculate indices for monitoring changes in extremes based on daily temperature + and precipitation data. Producing Glecker and timeline plots of this as + shown in the IPCC_AR4 report + + authors: + - broetz_bjoern + - sandstad_marit + - mohr_christianwilhelm + - arnone_enrico + - vonhardenberg_jost + + maintainer: + - sandstad_marit + + references: + - zhang11wcc + + projects: + - crescendo + - c3s-magic + +#preprocessor: +# prep0: + +datasets: + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + +# - {dataset: EOBS, project: OBS, type: reanaly, version: 1, start_year: 1981, end_year: 2005, tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, start_year: 1981, end_year: 2000, tier: 3} + +diagnostics: + extreme_events: + description: calculate extreme events + variables: + pr: + mip: day + tas: + mip: day + tasmax: + mip: day + tasmin: + mip: day + + scripts: + main: + script: extreme_events/extreme_events.R + reference_datasets: ["ERA-Interim", "BNU-ESM", "ACCESS1-0", "ACCESS1-3"] + regrid_dataset: ERA-Interim + mip_name: CMIP + timeseries_idx: ["sdiiETCCDI_yr", "r95pETCCDI_yr", "rx5dayETCCDI_yr", "rx1dayETCCDI_yr", "cddETCCDI_yr", "fdETCCDI_yr", "trETCCDI_yr", "txnETCCDI_yr", "txxETCCDI_yr", "tnnETCCDI_yr", "tnxETCCDI_yr"] + gleckler_idx: ["sdiiETCCDI_yr", "r95pETCCDI_yr", "rx5dayETCCDI_yr", "rx1dayETCCDI_yr", "cddETCCDI_yr", "fdETCCDI_yr", "trETCCDI_yr", "txnETCCDI_yr", "txxETCCDI_yr", "tnnETCCDI_yr", "tnxETCCDI_yr"] + ts_plt: true + glc_plt: true + base_range: [1981, 2000] + analysis_range: [1981, 2000] diff --git a/esmvaltool/recipes/recipe_extreme_index.yml b/esmvaltool/recipes/recipe_extreme_index.yml new file mode 100644 index 0000000000..31566e0fab --- /dev/null +++ b/esmvaltool/recipes/recipe_extreme_index.yml @@ -0,0 +1,69 @@ +# ESMValTool +# recipe_extreme_index.yml +--- +documentation: + title: | + Time series for extreme events. + + description: | + Tool to compute time series of a number of extreme events: heatwave, + coldwave, heavy precipitation, drought and high wind. + + authors: + - hunter_alasdair + - perez-zanon_nuria + - manubens_nicolau + - caron_louis-philippe + + maintainer: + - unmaintained + + projects: + - c3s-magic + + references: + - alexander06jgr + +datasets: + - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1971, end_year: 2000} + - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, exp: rcp85, ensemble: r1i1p1, start_year: 2020, end_year: 2040} + +preprocessors: + preproc: + extract_region: + start_longitude: -20 + end_longitude: 40 + start_latitude: 30 + end_latitude: 70 + mask_landsea: + mask_out: sea + +diagnostics: + extreme_index: + description: Calculate insurance extreme indices and their combination. + variables: + tasmax: + preprocessor: preproc + mip: day + tasmin: + preprocessor: preproc + mip: day + sfcWind: + preprocessor: preproc + mip: day + pr: + preprocessor: preproc + mip: day + + scripts: + metric: + script: magic_bsc/extreme_index.R + + weight_t90p: 0.2 + weight_t10p: 0.2 + weight_Wx: 0.2 + weight_rx5day: 0.2 + weight_cdd: 0.2 + + # Compute running mean? + running_mean: 5 #3 diff --git a/esmvaltool/recipes/recipe_extreme_index_wp7.yml b/esmvaltool/recipes/recipe_extreme_index_wp7.yml deleted file mode 100644 index cac80e9808..0000000000 --- a/esmvaltool/recipes/recipe_extreme_index_wp7.yml +++ /dev/null @@ -1,50 +0,0 @@ -# ESMValTool -# recipe_extreme_index_wp7.yml ---- -documentation: - description: | - Tool to compute time series of a number of extreme events: heatwave, - coldwave, heavy precipitation, drought and high wind. - - authors: - - hunt_al - - pere_nu - - manu_ni - - caro_lo - - projects: - - c3s-magic - - references: - - alexander - -datasets: - - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1971, end_year: 2000} - - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, mip: day, exp: rcp85, ensemble: r1i1p1, start_year: 2020, end_year: 2040} - -preprocessors: - preproc: - regrid: - target_grid: IPSL-CM5A-MR - scheme: linear - mask_fillvalues: - threshold_fraction: 0.95 - extract_region: - start_longitude: -60 - end_longitude: 40 - start_latitude: 30 - end_latitude: 70 - -diagnostics: - extreme_index: - description: Calculate insurance extreme indices. - variables: - sfcWind: - preprocessor: preproc - mip: day - - scripts: - main: - script: magic_bsc/extreme_index.r - - metric: Wx # t10p, t90p, cdd, rx5day or Wx diff --git a/esmvaltool/recipes/recipe_eyring06jgr.yml b/esmvaltool/recipes/recipe_eyring06jgr.yml new file mode 100644 index 0000000000..9f070706ac --- /dev/null +++ b/esmvaltool/recipes/recipe_eyring06jgr.yml @@ -0,0 +1,326 @@ +# ESMValTool +# recipe_eyring06jgr.yml +--- +documentation: + title: Stratospheric chemistry and dynamics analyses + + description: | + Diagnostics of stratospheric dynamics and chemistry reproducing selected + figures from Eyring et al. JGR (2006). + + authors: + - cionni_irene + - hassler_birgit + + maintainer: + - hassler_birgit + + references: + - eyring06jgr + + projects: + - crescendo + +datasets: + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1960, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1960, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + +preprocessors: + + # Figure 1 + regrid_interp_lev_zonal: ®rid_and_zonal_preproc + extract_levels: + scheme: linear + levels: reference_dataset + regrid: + target_grid: reference_dataset + scheme: linear + zonal_statistics: + operator: mean + + regrid_interp_lev_zonal_o3: + <<: *regrid_and_zonal_preproc + convert_units: + units: ppmv + + # zonal mean + zonal: &zonal_preproc + regrid: + target_grid: reference_dataset + scheme: linear + zonal_statistics: + operator: mean + mask_fillvalues: + threshold_fraction: 0.95 + + zonal_o3_du: + <<: *zonal_preproc + convert_units: + units: DU + + +diagnostics: + + # ========================================================================== + # Eyring et al. (2006) - Figure 1 + # Vertical profile climatological mean bias, + # climatological mean for selected seasons and latitudinal region + # ========================================================================== + + eyring06jgr_fig01: + description: vertical profile climatological seasonal means bias + themes: + - chem + realms: + - atmos + variables: + ta: + preprocessor: regrid_interp_lev_zonal + reference_dataset: ERA-Interim + mip: Amon + additional_datasets: + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: CMCC-CM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: CMCC-CMS, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: GISS-E2-H, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: inmcm4, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1980, end_year: 2005} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, + start_year: 1980, end_year: 1999, tier: 3} + scripts: + fig01: + script: eyring06jgr/eyring06jgr_fig01.ncl + # Time average ('DJF', 'MAM', 'JJA', 'SON', or 'ANN') + season: ['DJF', 'MAM', 'JJA', 'SON'] + # Minimum latitude for region selection + latmin: [60., 60., -90., -90.] + # Maximum latitude for region selection + latmax: [90., 90., -60., -60.] + # Lower and upper limits for the X axis + Xmin: [-30., -30., -30., -30.] + Xmax: [20., 20., 20., 20.] + # Lover and upper limits for the Y axis + Ymin: [1., 1., 1., 1.] + Ymax: [350., 350., 350., 350.] + # Climatology time period + start_year: 1980 + end_year: 1999 + # Calculate multimodel mean? + multimean: True + # line style definition + styleset: CMIP5 + + + # =========================================================================== + # Eyring et al. (2006) - Figure 5 two sets of figures a (vertical profiles) + # and b (latitudinal profiles) + # Vertical profile climatological mean bias, climatological mean for selected + # seasons and latitudinal region this figure and setting is valid for + # figure 5(CH4) figure 6(H2O) figure 11(HCL) figure 13 (tro3) + # =========================================================================== + + eyring06jgr_fig05a: + description: vertical profile climatological seasonal means bias + themes: + - chem + realms: + - atmos + variables: + tro3: + preprocessor: regrid_interp_lev_zonal_o3 + reference_dataset: HALOE + mip: Amon + additional_datasets: + - {dataset: HALOE, project: OBS, type: sat, version: 1, + start_year: 1991, end_year: 2002, tier: 2} + scripts: + fig05a: + script: eyring06jgr/eyring06jgr_fig05a.ncl + # Selected projection + projection: CylindricalEquidistant + # Temporal aggregation + timemean: annualclim + # Should the difference be shown? + showdiff: false + # Minimum latitude for region selection + latmin: [80., -10., -82.5] + # Maximum latitude for region selection + latmax: [82.5, 10., -80.] + # Selected months + month: ['3', '3', '9'] + # Lower and upper limits for the X axis + Xmin: [0., 0., 0.] + Xmax: [7., 12., 7.] + # Lower and upper limits for the Y axis + levmin: [1., 1., 1.] + levmax: [200., 200., 200.] + # Climatology time period + start_year: 1991 + end_year: 2002 + # Calculate multimodel mean? + multimean: True + # line style definition + styleset: CMIP5 + + eyring06jgr_fig05b: + description: latitudinal profile climatological seasonal mean + themes: + - chem + realms: + - atmos + variables: + tro3: + preprocessor: regrid_interp_lev_zonal_o3 + reference_dataset: HALOE + mip: Amon + additional_datasets: + - {dataset: HALOE, project: OBS, type: sat, version: 1, + start_year: 1991, end_year: 2002, tier: 2} + scripts: + fig5b: + script: eyring06jgr/eyring06jgr_fig05b.ncl + projection: CylindricalEquidistant + # Temporal aggregation + timemean: annualclim + # Should the difference be shown? + showdiff: false + # line style definition + styleset: CMIP5 + # Lower and upper limits for the Y axis - given here in Pa + lev: ['5000', '5000'] + # Selected months + month: ['3', '9'] + # Lower and upper limits for the Y axis + YMin: [0., 0.] + YMax: [5., 5.] + # Climatology time period + start_year: 1991 + end_year: 2002 + # Calculate multimodel mean? + multimean: True + + + # ========================================================================== + # Eyring et al. (2006) - Figure 15 + # Timeseries detrended + # ========================================================================== + + eyring06jgr_fig15: + description: Total ozone anomalies at different latitudinal band and seasons + themes: + - chem + realms: + - atmos + variables: + toz: + derive: true + force_derivation: false + preprocessor: zonal_o3_du + reference_dataset: NIWA-BS + mip: Amon + additional_datasets: + - {dataset: NIWA-BS, project: OBS, type: sat, version: v3.3, + start_year: 1980, end_year: 2005, tier: 3} + scripts: + fig15: + script: eyring06jgr/eyring06jgr_fig15.ncl + projection: CylindricalEquidistant + showdiff: false + # line style definition + styleset: CMIP5 + # Minimum latitude for region selection + latmin: [60., -90., -90.] + # Maximum latitude for region selection + latmax: [90., -60., 90.] + # Time average + season: ['FMA', 'SON', 'ANN'] + # Lower and upper limits for the Y axis - left plot + Ymin: [-25., -40.,-10.] + Ymax: [15., 25., 10.] + # Lower and upper limits for the X axis - left plot + Xmin: 1960 + Xmax: 2005 + # Lower and upper limits for the X axis - right plot + cycle_yearmin: 1980 + cycle_yearmax: 1989 + # Lower and upper limits for the Y axis - right plot + cycle_Ymin: [250., 450., 260.] + cycle_Ymax: [550., 200., 380.] + # Calculate multimodel mean? + multimean: True diff --git a/esmvaltool/recipes/recipe_eyring13jgr_12.yml b/esmvaltool/recipes/recipe_eyring13jgr_12.yml new file mode 100644 index 0000000000..1b888765cd --- /dev/null +++ b/esmvaltool/recipes/recipe_eyring13jgr_12.yml @@ -0,0 +1,137 @@ +# ESMValTool +# recipe_eyring13jgr.yml +--- +documentation: + title: Diagnostics of stratospheric dynamics and chemistry + + description: | + Diagnostics of stratospheric dynamics and chemistry reproducing selected + figures from Eyring et al. JGR (2013). + + authors: + - cionni_irene + + maintainer: + - bock_lisa + + references: + - eyring13jgr + + projects: + - crescendo + + +datasets: + # CMIP6 + - {dataset: ACCESS-CM2, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, grid: gn, institute: CSIRO} + - {dataset: AWI-CM-1-1-MR, grid: gn} + - {dataset: AWI-ESM-1-1-LR, grid: gn} + - {dataset: BCC-CSM2-MR, grid: gn} + - {dataset: BCC-ESM1, grid: gn} + - {dataset: CAMS-CSM1-0, grid: gn} + - {dataset: CanESM5, grid: gn} + - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn} + - {dataset: CAS-ESM2-0, institute: CAS, grid: gn} + - {dataset: CESM2, grid: gn} + - {dataset: CESM2-FV2, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM-FV2, grid: gn, institute: NCAR} + - {dataset: CIESM} + - {dataset: CMCC-CM2-HR4, grid: gn} + - {dataset: CMCC-CM2-SR5, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2} + - {dataset: E3SM-1-0} + # - {dataset: E3SM-1-1, institute: E3SM-Project} # strange outliers + - {dataset: E3SM-1-1-ECA, institute: E3SM-Project} + # - {dataset: EC-Earth3} # empty folder + - {dataset: EC-Earth3-Veg} + - {dataset: EC-Earth3-Veg-LR} + - {dataset: FGOALS-f3-L} + - {dataset: FGOALS-g3, grid: gn} + # - {dataset: FIO-ESM-2-0, grid: gn} # variable ua: lat: is not monotonic + - {dataset: GFDL-CM4, grid: gr1} + - {dataset: GFDL-ESM4, grid: gr1} + - {dataset: GISS-E2-1-G, grid: gn} + - {dataset: GISS-E2-1-G-CC, grid: gn} + - {dataset: GISS-E2-1-H, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + - {dataset: IITM-ESM, grid: gn} + - {dataset: INM-CM4-8, grid: gr1} + - {dataset: INM-CM5-0, grid: gr1} + - {dataset: IPSL-CM6A-LR} + - {dataset: KACE-1-0-G} + - {dataset: KIOST-ESM, grid: gr1} + - {dataset: MCM-UA-1-0, grid: gn} + - {dataset: MIROC6, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, grid: gn} + - {dataset: MPI-ESM1-2-HR, grid: gn} + - {dataset: MPI-ESM1-2-LR, grid: gn} + - {dataset: MRI-ESM2-0, grid: gn} + - {dataset: NESM3, grid: gn} + # - {dataset: NorCPM1, grid: gn, institute: NCC} # empty directory + - {dataset: NorESM2-LM, grid: gn, institute: NCC} + - {dataset: NorESM2-MM, grid: gn, institute: NCC} + - {dataset: SAM0-UNICON, grid: gn} + - {dataset: TaiESM1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + # CMIP5 + # - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1979, end_year: 2005} + # - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1979, end_year: 2005} + # - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1979, end_year: 2005} + + +preprocessors: + zonal: + regrid: + target_grid: 1x1 + scheme: linear + extract_levels: + scheme: linear + levels: reference_dataset + zonal_statistics: + operator: mean + mask_fillvalues: + threshold_fraction: 0.95 + + +diagnostics: + + # ========================================================================== + # Eyring et al. (2013) - Figure 12 + # Longterm mean and trend of the zonal wind + # ========================================================================== + + eyring13jgr_fig12: + description: Longterm mean and trend zonal wind + themes: + - chem + realms: + - atmos + variables: + ua: + preprocessor: zonal + reference_dataset: ERA5 + project: CMIP6 + mip: Amon + exp: historical + grid: gr + ensemble: r1i1p1f1 + start_year: 1995 + end_year: 2014 + additional_datasets: + - {dataset: ERA5, project: OBS6, type: reanaly, version: 1, + start_year: 1995, end_year: 2014, tier: 3} + scripts: + clim: &clim_settings + script: eyring13jgr/eyring13jgr_fig12.ncl + e13fig12_exp_MMM: "historical" + e13fig12_season: "DJF" + e13fig12_multimean: true diff --git a/esmvaltool/recipes/recipe_flato13ipcc.yml b/esmvaltool/recipes/recipe_flato13ipcc.yml deleted file mode 100644 index c9d63a4185..0000000000 --- a/esmvaltool/recipes/recipe_flato13ipcc.yml +++ /dev/null @@ -1,872 +0,0 @@ -# ESMValTool -# recipe_flato13ipcc.yml ---- -documentation: - - description: | - Reproducing selected figures from IPCC AR5, chap. 9 (Flato et al., 2013) - 9.2, 9.4, 9.5, 9.8, 9.42a. - - authors: - - bock_ls - - laue_ax - - schl_ma - - maintainer: - - righ_ma - - references: - - flato13ipcc - - projects: - - embrace - - esmval - - crescendo - - -preprocessors: - - clim: - regrid: - target_grid: 2x2 - scheme: linear - mask_fillvalues: - threshold_fraction: 0.95 - multi_model_statistics: - span: overlap - statistics: [mean] - exclude: [reference_dataset] - - spatial_mean: - average_region: - coord1: latitude - coord2: longitude - - clim_ref: - regrid: - target_grid: reference_dataset - scheme: linear - multi_model_statistics: - span: overlap - statistics: [mean] - exclude: [reference_dataset, alternative_dataset] - -diagnostics: - - # ********************************************************************** - # Flato et al. (2013) - IPCC AR5, chap. 9 - # similar to fig. 9.2 a/b/c - # ********************************************************************** - # Multi model mean, multi model mean bias, and mean absolute error - # (geographical distributions) - # ********************************************************************** - - fig09-2: - description: IPCC AR5 Ch. 9, Fig. 9.2 (near-surface temperature) - themes: - - phys - realms: - - atmos - variables: - tas: - preprocessor: clim - reference_dataset: ERA-Interim - mip: Amon - additional_datasets: - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, - start_year: 1986, end_year: 2005, tier: 3} - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, - # start_year: 1986, end_year: 2005} - # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, - # start_year: 1986, end_year: 2005} - # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - scripts: - fig09-2: &fig-9-2_and_9-4_settings - script: clouds/clouds_bias.ncl - projection: Robinson - timemean: annualclim - plot_abs_diff: true - plot_rel_diff: false - - # ********************************************************************** - # Flato et al. (2013) - IPCC AR5, chap. 9 - # similar to fig. 9.4 - # ********************************************************************** - # Multi model mean, multi model mean bias, mean absolute error, and - # mean relative error (geographical ditributions) - # ********************************************************************** - - fig09-4: - description: IPCC AR5 Ch. 9, Fig. 9.4 (precipitation) - themes: - - clouds - realms: - - atmos - variables: - pr: - preprocessor: clim - reference_dataset: GPCP-SG - mip: Amon - additional_datasets: - - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, - start_year: 1986, end_year: 2005, tier: 1} - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, - # start_year: 1986, end_year: 2005} - # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, - # start_year: 1986, end_year: 2005} - # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - scripts: - fig09-4: - <<: *fig-9-2_and_9-4_settings - plot_abs_diff: true - plot_rel_diff: true - - # ********************************************************************** - # Flato et al. (2013) - IPCC AR5, chap. 9 - # similar to fig. 9.5 - # ********************************************************************** - # Difference of multi-model mean and reference data set (geographical - # distribution, annual mean) + zonal averages of individual models - # and multi-model mean (annual means). - # ********************************************************************** - - fig09-5a: - description: differences of multi-model mean and reference dataset - themes: - - clouds - realms: - - atmos - variables: - swcre: - preprocessor: clim - reference_dataset: CERES-EBAF - mip: Amon - derive: true - additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, - # start_year: 1986, end_year: 2005} - # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, - # start_year: 1986, end_year: 2005} - # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - scripts: - fig09-5a: &fig-9-5_settings - script: clouds/clouds_ipcc.ncl - projection: Robinson - colormap: WhiteBlueGreenYellowRed - timemean: annualclim - - fig09-5b: - description: differences of multi-model mean and reference dataset - themes: - - clouds - realms: - - atmos - variables: - lwcre: - preprocessor: clim - reference_dataset: CERES-EBAF - mip: Amon - derive: true - additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, - # start_year: 1986, end_year: 2005} - # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, - # start_year: 1986, end_year: 2005} - # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - scripts: - fig09-5b: - <<: *fig-9-5_settings - - fig09-5c: - description: differences of multi-model mean and reference dataset - themes: - - clouds - realms: - - atmos - variables: - netcre: - preprocessor: clim - reference_dataset: CERES-EBAF - mip: Amon - derive: true - additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, - # start_year: 1986, end_year: 2005} - # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, - # start_year: 1986, end_year: 2005} - # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, - # ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1986, end_year: 2005} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, - start_year: 1986, end_year: 2005} - scripts: - fig09-5c: - <<: *fig-9-5_settings - - - # ********************************************************************** - # Flato et al. (2013) - IPCC AR5, chap. 9 - # similar to fig. 9.8 - # ********************************************************************** - # Time series of anomalies of annual and global surface temperature - # ********************************************************************** - - fig09-8: - description: IPCC AR5 Ch. 9, Fig. 9.8 (near-surface temperature) - themes: - - phys - realms: - - atmos - variables: - tas: - preprocessor: clim_ref - reference_dataset: HadCRUT4 - alternative_dataset: HadCRUT4-clim - mip: Amon - field: T2Ms - project: CMIP5 - exp: historical - ensemble: r1i1p1 - start_year: 1870 - end_year: 2004 - additional_datasets: - - {dataset: ACCESS1-0} - - {dataset: ACCESS1-3} - - {dataset: bcc-csm1-1} - - {dataset: bcc-csm1-1-m} - - {dataset: BNU-ESM} - #- {dataset: CanCM4} - - {dataset: CanESM2} - - {dataset: CCSM4} - - {dataset: CESM1-BGC} - - {dataset: CESM1-CAM5} - #- {dataset: CESM1-CAM5-1-FV2} - - {dataset: CESM1-FASTCHEM} - - {dataset: CESM1-WACCM} - - {dataset: CMCC-CESM} - - {dataset: CMCC-CM} - - {dataset: CMCC-CMS} - - {dataset: CNRM-CM5} - - {dataset: CNRM-CM5-2} - - {dataset: CSIRO-Mk3-6-0} - #- {dataset: EC-EARTH, ensemble: r6i1p1} - #- {dataset: FGOALS-g2} - #- {dataset: FGOALS-s2} - - {dataset: FIO-ESM} - - {dataset: GFDL-CM2p1} - - {dataset: GFDL-CM3} - - {dataset: GFDL-ESM2G} - - {dataset: GFDL-ESM2M} - - {dataset: GISS-E2-H, ensemble: r1i1p2} - - {dataset: GISS-E2-H-CC} - - {dataset: GISS-E2-R, ensemble: r1i1p2} - - {dataset: GISS-E2-R-CC} - - {dataset: HadCM3} - - {dataset: HadGEM2-AO} - - {dataset: HadGEM2-CC} - - {dataset: HadGEM2-ES} - - {dataset: inmcm4} - - {dataset: IPSL-CM5A-LR} - - {dataset: IPSL-CM5A-MR} - - {dataset: IPSL-CM5B-LR} - #- {dataset: MIROC4h} - - {dataset: MIROC5} - - {dataset: MIROC-ESM} - - {dataset: MIROC-ESM-CHEM} - - {dataset: MPI-ESM-LR} - - {dataset: MPI-ESM-MR} - - {dataset: MPI-ESM-P} - - {dataset: MRI-CGCM3} - - {dataset: MRI-ESM1} - - {dataset: NorESM1-M} - - {dataset: NorESM1-ME} - - {dataset: HadCRUT4, project: OBS, type: ground, version: 1, - start_year: 1870, end_year: 2017, tier: 2} - - {dataset: HadCRUT4-clim, project: OBS, type: ground, version: 1, - start_year: 1976, end_year: 1976, tier: 2} - scripts: - fig09-8: - script: ipcc_ar5/tsline.ncl - time_avg: "yearly" - ts_anomaly: "anom" #"anom" for anomaly-plot else "noanom" - ref_start: 1961 #start year of reference period for anomalies - ref_end: 1990 #end year of reference period for anomalies - range_option: 0 - plot_units: "degC" - # Plot style - styleset: CMIP5 - - - # ********************************************************************** - # Flato et al. (2013) - IPCC AR5, chap. 9 - # similar to fig. 9.42a - # ********************************************************************** - # - # ********************************************************************** - - ecs_cmip5: - description: Calculate ECS for CMIP5 models. - themes: - - EC - realms: - - atmos - variables: - tas: &ecs_settings - preprocessor: spatial_mean - project: CMIP5 - ensemble: r1i1p1 - mip: Amon - rtmt: - <<: *ecs_settings - additional_datasets: - - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} - - {dataset: bcc-csm1-1, exp: abrupt4xCO2, start_year: 160, end_year: 309} - - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} - - {dataset: bcc-csm1-1-m, exp: abrupt4xCO2, start_year: 240, end_year: 389} - - {dataset: CanESM2, exp: piControl, start_year: 2015, end_year: 2164} - - {dataset: CanESM2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: CCSM4, exp: piControl, start_year: 800, end_year: 949} - - {dataset: CCSM4, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} - - {dataset: CNRM-CM5, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 1, end_year: 150} - - {dataset: CSIRO-Mk3-6-0, exp: abrupt4xCO2, start_year: 1, end_year: 150} - - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} - - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 1, end_year: 150} - - {dataset: GISS-E2-H, exp: piControl, start_year: 1200, end_year: 1349} - - {dataset: GISS-E2-H, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: GISS-E2-R, exp: piControl, start_year: 3331, end_year: 3480} - - {dataset: GISS-E2-R, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} - - {dataset: inmcm4, exp: abrupt4xCO2, start_year: 2090, end_year: 2239} - - {dataset: IPSL-CM5A-LR, exp: piControl, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM5A-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM5B-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} - - {dataset: MIROC5, exp: abrupt4xCO2, start_year: 2100, end_year: 2249} - - {dataset: MIROC-ESM, exp: piControl, start_year: 1800, end_year: 1949} - - {dataset: MIROC-ESM, exp: abrupt4xCO2, start_year: 1, end_year: 150} - - {dataset: MPI-ESM-LR, exp: piControl, start_year: 2015, end_year: 2164} - - {dataset: MPI-ESM-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - # Day is out of range for month (wait for iris > 2.0) - # - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} - # - {dataset: ACCESS1-0, exp: abrupt4xCO2, start_year: 300, end_year: 449} - scripts: - ecs: &ecs_script - script: climate_metrics/ecs.py - plot_ecs_regression: true - - fig09-42a_cmip5: - description: Plot ECS vs. GMSAT for CMIP5 models. - themes: - - EC - - phys - realms: - - atmos - variables: - tas: - <<: *ecs_settings - additional_datasets: - - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} - - {dataset: bcc-csm1-1, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} - - {dataset: bcc-csm1-1-m, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: CanESM2, exp: piControl, start_year: 2015, end_year: 2164} - - {dataset: CanESM2, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: CCSM4, exp: piControl, start_year: 800, end_year: 949} - - {dataset: CCSM4, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} - - {dataset: CNRM-CM5, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 1, end_year: 150} - - {dataset: CSIRO-Mk3-6-0, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} - - {dataset: GFDL-CM3, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: GISS-E2-H, exp: piControl, start_year: 1200, end_year: 1349} - - {dataset: GISS-E2-H, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: GISS-E2-R, exp: piControl, start_year: 3331, end_year: 3480} - - {dataset: GISS-E2-R, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} - - {dataset: inmcm4, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: IPSL-CM5A-LR, exp: piControl, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM5A-LR, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM5B-LR, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} - - {dataset: MIROC5, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: MIROC-ESM, exp: piControl, start_year: 1800, end_year: 1949} - - {dataset: MIROC-ESM, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: MPI-ESM-LR, exp: piControl, start_year: 2015, end_year: 2164} - - {dataset: MPI-ESM-LR, exp: historical, start_year: 1961, end_year: 1990} - # Day is out of range for month (wait for iris > 2.0) - # - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} - # - {dataset: ACCESS1-0, exp: historical, start_year: 1961, - # end_year: 1990} - scripts: - fig09-42a: &fig09_42a_script - script: ipcc_ar5/ch09_fig09_42a.py - ancestors: ['tas', 'ecs_cmip5/ecs'] - tas_units: celsius - save: - bbox_inches: tight - orientation: landscape - axes_functions: &axes_functions - set_title: GMSAT vs. ECS for CMIP5 models - set_xlabel: ECS / °C - set_ylabel: GMSAT / °C - set_xlim: [1.5, 5.0] - legend: - kwargs: - loc: center left - bbox_to_anchor: [1.05, 0.5] - borderaxespad: 0.0 - ncol: 2 - - ecs_cmip6: - description: Calculate ECS for CMIP6 models. - themes: - - EC - realms: - - atmos - variables: - tas: - <<: *ecs_settings - project: CMIP6 - rtmt: - <<: *ecs_settings - project: CMIP6 - additional_datasets: - # - {dataset: GISS-E2-1-G, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 4550, end_year: 4699} - # - {dataset: GISS-E2-1-G, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM6A-LR, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM6A-LR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} - - {dataset: MIROC6, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} - - {dataset: MIROC6, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} - scripts: - ecs: - <<: *ecs_script - - fig09-42a_cmip6: - description: Plot ECS vs. GMSAT for CMIP6 models. - themes: - - EC - - phys - realms: - - atmos - variables: - tas: - <<: *ecs_settings - project: CMIP6 - additional_datasets: - # - {dataset: GISS-E2-1-G, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 4550, end_year: 4699} - # - {dataset: GISS-E2-1-G, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} - - {dataset: IPSL-CM6A-LR, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} - # TODO - - {dataset: IPSL-CM6A-LR, exp: historical, ensemble: r29i1p1f1, grid: gr, start_year: 1961, end_year: 1990} - - {dataset: MIROC6, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} - - {dataset: MIROC6, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} - scripts: - fig09-42a: - <<: *fig09_42a_script - ancestors: ['tas', 'ecs_cmip6/ecs'] - axes_functions: - <<: *axes_functions - set_title: GMSAT vs. ECS for CMIP6 models - dataset_style: cmip6 diff --git a/esmvaltool/recipes/recipe_galytska23jgr.yml b/esmvaltool/recipes/recipe_galytska23jgr.yml new file mode 100644 index 0000000000..744a2a004f --- /dev/null +++ b/esmvaltool/recipes/recipe_galytska23jgr.yml @@ -0,0 +1,384 @@ +# ESMValTool +# recipe_galytska23jgr.yml +--- +documentation: + title: Arctic-Midlatitude Teleconnections recipe + + description: > + Recipe to select variables that reproduce Arctic-midlatitude + teleconnections for further application of + Causal Model Evaluation (CME) described in Galytska et al. (2023). + This recipe also applies the CVDP package. There are necessary + changes in cvdp/cvdp/driver.ncl to be applied: + modular = "True" and modular_list = "psl.nam_nao" + + + authors: + - galytska_evgenia + + maintainer: + - galytska_evgenia + + references: + - galytska23jgr + + projects: + - usmile + +preprocessors: + + bk_ice: + anomalies: + period: monthly + regrid: + target_grid: 1x1 + scheme: linear + extract_region: + start_latitude: 70. + end_latitude: 80. + start_longitude: 30. + end_longitude: 105. + area_statistics: + operator: mean + + ok_ice: + anomalies: + period: monthly + regrid: + target_grid: 1x1 + scheme: linear + extract_region: + start_latitude: 50. + end_latitude: 60. + start_longitude: 140. + end_longitude: 160. + area_statistics: + operator: mean + + pv: + anomalies: + period: monthly + regrid: + target_grid: 1x1 + scheme: linear + extract_levels: + levels: [10000., 7000., 5000., 3000., 2000., 1000.] + scheme: linear + extract_region: + start_latitude: 65. + end_latitude: 90. + start_longitude: 0. + end_longitude: 360. + area_statistics: + operator: mean + + pressure_sib: + anomalies: + period: monthly + regrid: + target_grid: 1x1 + scheme: linear + extract_region: + start_latitude: 40. + end_latitude: 65. + start_longitude: 85. + end_longitude: 120. + area_statistics: + operator: mean + + pressure_ural: + anomalies: + period: monthly + regrid: + target_grid: 1x1 + scheme: linear + extract_region: + start_latitude: 45. + end_latitude: 70. + start_longitude: 40. + end_longitude: 85. + area_statistics: + operator: mean + + pressure_aleut: + anomalies: + period: monthly + regrid: + target_grid: 1x1 + scheme: linear + extract_region: + start_latitude: 45. + end_latitude: 80. + start_longitude: 160. + end_longitude: 260. + area_statistics: + operator: mean + + pre_tas: + anomalies: + period: monthly + regrid: + target_grid: 1x1 + scheme: linear + extract_region: + start_latitude: 65. + end_latitude: 90. + start_longitude: 0. + end_longitude: 360. + area_statistics: + operator: mean + + pre_tas_baffin: + anomalies: + period: monthly + regrid: + target_grid: 1x1 + scheme: linear + extract_region: + start_latitude: 55. + end_latitude: 75. + start_longitude: 290. + end_longitude: 310. + area_statistics: + operator: mean + + pre_tas_sib: + anomalies: + period: monthly + regrid: + target_grid: 1x1 + scheme: linear + extract_region: + start_latitude: 60. + end_latitude: 70. + start_longitude: 20. + end_longitude: 100. + area_statistics: + operator: mean + + heat_flux: + regrid: + target_grid: 1x1 + scheme: linear + extract_levels: + levels: 10000 + scheme: linear + extract_region: + start_latitude: 45. + end_latitude: 75. + start_longitude: 0. + end_longitude: 360. + + zonal_wind: + anomalies: + period: monthly + regrid: + target_grid: 1x1 + scheme: linear + extract_region: + start_latitude: 50. + end_latitude: 70. + start_longitude: 0. + end_longitude: 360. + area_statistics: + operator: mean + +diagnostics: + + diagnostic_arctic: + description: Get the variables for Arctic-midlatitude processes + themes: + - phys + - seaIce + realms: + - atmos + - seaIce + variables: + pv: &my_settings + short_name: zg + preprocessor: pv + start_year: 1980 + end_year: 2021 + mip: Amon + project: CMIP6 + exp: + - historical + - ssp585 + ensemble: r1i1p1f1 + additional_datasets: + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: ACCESS-CM2, grid: gn} + - {dataset: ACCESS-ESM1-5, grid: gn, supplementary_variables: [{short_name: areacella, skip: true}]} + - {dataset: BCC-CSM2-MR, grid: gn} + - {dataset: CAMS-CSM1-0, grid: gn} + - {dataset: CanESM5, grid: gn} + - {dataset: CMCC-CM2-SR5, grid: gn} + - {dataset: CMCC-ESM2, grid: gn} + - {dataset: EC-Earth3, grid: gr} + - {dataset: EC-Earth3-Veg, grid: gr} + - {dataset: EC-Earth3-Veg-LR, grid: gr} + - {dataset: GFDL-CM4, grid: gr1} + - {dataset: GFDL-ESM4, grid: gr1} + - {dataset: INM-CM4-8, grid: gr1} + - {dataset: INM-CM5-0, grid: gr1} + - {dataset: IPSL-CM6A-LR, grid: gr} + - {dataset: MPI-ESM1-2-HR, grid: gn} + - {dataset: MPI-ESM1-2-LR, grid: gn} + - {dataset: MIROC6, grid: gn} + - {dataset: MRI-ESM2-0, grid: gn} + tas: + <<: *my_settings + short_name: tas + preprocessor: pre_tas + + psl_Ural: + <<: *my_settings + short_name: psl + preprocessor: pressure_ural + + psl_Sib: + <<: *my_settings + short_name: psl + preprocessor: pressure_sib + + psl_Aleut: + <<: *my_settings + short_name: psl + preprocessor: pressure_aleut + + va: + <<: *my_settings + short_name: va + preprocessor: heat_flux + + ta: + <<: *my_settings + short_name: ta + preprocessor: heat_flux + + sic_BK_siconc: + <<: *my_settings + short_name: siconc + preprocessor: bk_ice + mip: SImon + additional_datasets: + - {dataset: ACCESS-CM2, grid: gn} + - {dataset: ACCESS-ESM1-5, grid: gn} + - {dataset: BCC-CSM2-MR, grid: gn} + - {dataset: CAMS-CSM1-0, grid: gn} + - {dataset: CanESM5, grid: gn} + - {dataset: CMCC-CM2-SR5, grid: gn, supplementary_variables: [{short_name: areacello, skip: true}]} + - {dataset: CMCC-ESM2, grid: gn, supplementary_variables: [{short_name: areacello, skip: true}]} + - {dataset: EC-Earth3, grid: gn} + - {dataset: EC-Earth3-Veg, grid: gn} + - {dataset: EC-Earth3-Veg-LR, grid: gn} + - {dataset: GFDL-CM4, grid: gr} + - {dataset: GFDL-ESM4, grid: gr} + - {dataset: INM-CM4-8, grid: gr1} + - {dataset: INM-CM5-0, grid: gr1} + - {dataset: IPSL-CM6A-LR, grid: gn} + - {dataset: MPI-ESM1-2-HR, grid: gn} + - {dataset: MPI-ESM1-2-LR, grid: gn} + - {dataset: MIROC6, grid: gn} + - {dataset: MRI-ESM2-0, grid: gn} + + sic_Ok_siconc: + <<: *my_settings + short_name: siconc + preprocessor: ok_ice + mip: SImon + additional_datasets: + - {dataset: ACCESS-CM2, grid: gn} + - {dataset: ACCESS-ESM1-5, grid: gn} + - {dataset: BCC-CSM2-MR, grid: gn} + - {dataset: CAMS-CSM1-0, grid: gn} + - {dataset: CanESM5, grid: gn} + - {dataset: CMCC-CM2-SR5, grid: gn, supplementary_variables: [{short_name: areacello, skip: true}]} + - {dataset: CMCC-ESM2, grid: gn, supplementary_variables: [{short_name: areacello, skip: true}]} + - {dataset: EC-Earth3, grid: gn} + - {dataset: EC-Earth3-Veg, grid: gn} + - {dataset: EC-Earth3-Veg-LR, grid: gn} + - {dataset: GFDL-CM4, grid: gr} + - {dataset: GFDL-ESM4, grid: gr} + - {dataset: INM-CM4-8, grid: gr1} + - {dataset: INM-CM5-0, grid: gr1} + - {dataset: IPSL-CM6A-LR, grid: gn} + - {dataset: MPI-ESM1-2-HR, grid: gn} + - {dataset: MPI-ESM1-2-LR, grid: gn} + - {dataset: MIROC6, grid: gn} + - {dataset: MRI-ESM2-0, grid: gn} + + sic_BK: + <<: *my_settings + short_name: sic + preprocessor: bk_ice + additional_datasets: + - {dataset: HadISST, project: OBS, mip: OImon, type: reanaly, version: 1, tier: 2} + + sic_Ok: + <<: *my_settings + short_name: sic + preprocessor: ok_ice + additional_datasets: + - {dataset: HadISST, project: OBS, mip: OImon, type: reanaly, version: 1, tier: 2} + + scripts: + output: + script: galytska23/select_variables_for_tigramite.py + plot_timeseries: True + variable_to_plot: Arctic_temperature + + diagnostic_cvdp: + description: Run the NCAR CVDPackage. + variables: + ts: &cvdp_settings + short_name: ts + mip: Amon + start_year: 1980 + end_year: 2021 + project: CMIP6 + exp: + - historical + - ssp585 + ensemble: r1i1p1f1 + additional_datasets: + - {dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3} + - {dataset: ACCESS-ESM1-5, grid: gn} + - {dataset: ACCESS-CM2, grid: gn} + - {dataset: BCC-CSM2-MR, grid: gn} + - {dataset: CAMS-CSM1-0, grid: gn} + - {dataset: CanESM5, grid: gn} + - {dataset: CMCC-CM2-SR5, grid: gn} + - {dataset: CMCC-ESM2, grid: gn} + - {dataset: EC-Earth3, grid: gr} + - {dataset: EC-Earth3-CC, grid: gr} + - {dataset: EC-Earth3-Veg, grid: gr} + - {dataset: EC-Earth3-Veg-LR, grid: gr} + - {dataset: GFDL-CM4, grid: gr1} + - {dataset: GFDL-ESM4, grid: gr1} + - {dataset: INM-CM4-8, grid: gr1} + - {dataset: INM-CM5-0, grid: gr1} + - {dataset: IPSL-CM6A-LR, grid: gr} + - {dataset: MPI-ESM1-2-HR, grid: gn} + - {dataset: MPI-ESM1-2-LR, grid: gn} + - {dataset: MIROC6, grid: gn} + - {dataset: MRI-ESM2-0, grid: gn} + + tas: + <<: *cvdp_settings + short_name: tas + mip: Amon + + pr: + <<: *cvdp_settings + short_name: pr + mip: Amon + + psl: + <<: *cvdp_settings + short name: psl + mip: Amon + + scripts: + cvdp: + script: cvdp/cvdp_wrapper.py diff --git a/esmvaltool/recipes/recipe_gier2020bg.yml b/esmvaltool/recipes/recipe_gier2020bg.yml new file mode 100644 index 0000000000..8bd84937e0 --- /dev/null +++ b/esmvaltool/recipes/recipe_gier2020bg.yml @@ -0,0 +1,536 @@ +# ESMValTool +# recipe_gier2020bg.yml +# Diagnostic with growing season temperature requires MODIS Land Cover 1 degree map in +# auxiliary data directory, download from: +# https://daac.ornl.gov/cgi-bin/dsviewer.pl?ds_id=968 or +# https://thredds.daac.ornl.gov/thredds/catalog/ornldaac/968/catalog.html +# NOTE: +# Commented out datasets and options reflect the exact settings used in the paper creation, +# datasets commented out require additional preprocessing e.g. converting from hybrid coordinates to +# pressure coordinates, or are otherwise currently not available on the DKRZ ESGF node, +# but may be available on other ESGF nodes, if the user turns on automatic download in the run configuration. +--- +documentation: + + description: | + Analysis of column-averaged CO2 (XCO2) emission driven model data in + comparison to satellite data (Gier et al., 2020). + + title: Column-averaged CO2 emission (model vs satellite data) + + authors: + - gier_bettina + + + maintainer: + - gier_bettina + + references: + - gier20bg + + projects: + - eval4cmip + - 4c + +# YAML anchors + +PROVENANCE_ANCHOR: &provenance + realms: + - atmos + themes: + - phys + - ghg + - carbon + +preprocessors: + + conv_units: &convert_units + convert_units: + units: ppmv + + mmm_ref: + <<: *convert_units + regrid: + target_grid: reference_dataset + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + mmm_2x2: + regrid: + target_grid: 2x2 + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + + mmm: + <<: *convert_units + regrid: + target_grid: 5x5 + scheme: linear + extract_levels: + levels: [100000., 92500., 85000., 70000., 60000., 50000., 40000., 30000., + 25000., 20000., 15000., 10000., 7000., 5000., 3000., 2000., + 1000.] + scheme: nearest + multi_model_statistics: + span: overlap + statistics: [mean] + +diagnostics: + + # CMIP6 + + cmip6_ensemble_analysis: + <<: *provenance + description: figure 3 + variables: + xco2: + preprocessor: conv_units + project: CMIP6 + mip: Amon + exp: esm-hist + start_year: 2003 + end_year: 2014 + reference_dataset: CDS-XCO2 + derive: true + additional_datasets: + - {dataset: CDS-XCO2, project: OBS, type: sat, version: L3, + tier: 3, derive: false} + # - {dataset: ACCESS-ESM1-5, ensemble: r(1:3)i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r(1:7)i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r(9:10)i1p1f1, grid: gn} + # - {dataset: CanESM5-CanOE, ensemble: r(1:3)i1p2f1, grid: gn} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f3, grid: gr} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + # - {dataset: MIROC-ES2L, ensemble: r(1:3)i1p1f2, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r(1:10)i1p1f1, grid: gn} + # - {dataset: MRI-ESM2-0, ensemble: r1i2p1f1, grid: gn} + # - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + # - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + main_ensemble: + script: xco2_analysis/main.ncl + styleset: CMIP6 + output_file_type: "png" + region: [-90, 90] + # Possible masking options: obs + masking: ["obs"] + ensemble_mean: true + ensemble_refs: [["ACCESS-ESM1-5", "r1i1p1f1"], + ["CanESM5", "r1i1p1f1"], + ["CanESM5-CanOE", "r1i1p2f1"], + ["MIROC-ES2L", "r1i1p1f2"], + ["MPI-ESM1-2-LR", "r1i1p1f1"]] + + + cmip6_analysis: + description: figures (3), 4, 5, 6, 7, 9 + <<: *provenance + variables: + xco2: + preprocessor: conv_units + project: CMIP6 + mip: Amon + exp: esm-hist + start_year: 2003 + end_year: 2014 + reference_dataset: CDS-XCO2 + derive: true + additional_datasets: + - {dataset: CDS-XCO2, project: OBS, type: sat, version: L3, + tier: 3, derive: false} + # - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + # - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f3, grid: gr} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + # - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + # - {dataset: MRI-ESM2-0, ensemble: r1i2p1f1, grid: gn} + # - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + # - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + maps: + script: xco2_analysis/global_maps.ncl + output_file_type: "png" + contour_max_level: 18 + main: + script: xco2_analysis/main.ncl + styleset: CMIP6 + region: [-90, 90] + # Possible masking options: obs + masking: ["obs"] + ensemble_mean: false + output_file_type: "png" + var_plotname: "XCO~B~2~N~" + panels_unsampled: &panel_cfg + script: xco2_analysis/panel_plots.ncl + styleset: CMIP6 + output_file_type: "png" + region: [30, 60] + # Possible masking options: obs, land, sciamachy, gosat, "none" + masking: ["none"] + # Possible area averaging options: full-area, lat-first + area_avg: "full-area" + obs_in_panel: true + plot_var2_mean: true + var_plotname: "XCO~B~2~N~" + panels_obs: + <<: *panel_cfg + masking: ["obs"] + panels_scia: + <<: *panel_cfg + masking: ["sciamachy"] + obs_in_panel: false + panels_gosat: + <<: *panel_cfg + masking: ["gosat"] + area_avg: "full-area" + obs_in_panel: false + + sat_masks: + description: Figures 1, 8 + <<: *provenance + variables: + xco2: + project: CMIP6 + mip: Amon + exp: esm-hist + start_year: 2003 + end_year: 2014 + reference_dataset: CDS-XCO2 + derive: true + additional_datasets: + - {dataset: CDS-XCO2, project: OBS, type: sat, version: L3, + tier: 3, derive: false} + scripts: + sat_masks: + script: xco2_analysis/sat_masks.ncl + output_file_type: "png" + var_plotname: "XCO~B~2~N~" + c3s_plots: true + + cmip6_delta_T: + description: Figure C1 + <<: *provenance + variables: + xco2: + preprocessor: mmm_ref + project: CMIP6 + mip: Amon + exp: esm-hist + start_year: 2003 + end_year: 2014 + reference_dataset: CDS-XCO2 + derive: true + additional_datasets: + - {dataset: CDS-XCO2, project: OBS, type: sat, version: L3, + tier: 3, derive: false} + # - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + # - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f3, grid: gr} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + # - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + # - {dataset: MRI-ESM2-0, ensemble: r1i2p1f1, grid: gn} + # - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + # - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + tas: + preprocessor: mmm_2x2 + project: CMIP6 + mip: Amon + exp: esm-hist + start_year: 2003 + end_year: 2014 + additional_datasets: + # - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + # - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f3, grid: gr} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + # - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + # - {dataset: MRI-ESM2-0, ensemble: r1i2p1f1, grid: gn} + # - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + # - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + tasa: + mip: Amon + start_year: 2003 + end_year: 2014 + additional_datasets: + - {dataset: GISTEMP, project: OBS, type: ground, version: v4, + tier: 2} + scripts: + delta_T: + script: xco2_analysis/delta_T.ncl + styleset: CMIP6 + output_file_type: "png" + masking: obs + var_order: ["xco2", "tas", "tasa"] + region: [-30, 30] + + cmip6_station_comparison: + description: Figure 2 + <<: *provenance + variables: + xco2: + preprocessor: mmm_ref + project: CMIP6 + mip: Amon + exp: esm-hist + start_year: 2003 + end_year: 2014 + reference_dataset: CDS-XCO2 + derive: true + additional_datasets: + - {dataset: CDS-XCO2, project: OBS, type: sat, version: L3, + tier: 3, derive: false} + # - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + # - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f3, grid: gr} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + # - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + # - {dataset: MRI-ESM2-0, ensemble: r1i2p1f1, grid: gn} + # - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + # - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + co2: + preprocessor: mmm + project: CMIP6 + mip: Amon + exp: esm-hist + start_year: 2003 + end_year: 2014 + additional_datasets: + # - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + # - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f3, grid: gr} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + # - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + # - {dataset: MRI-ESM2-0, ensemble: r1i2p1f1, grid: gn} + # - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + # - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + co2s_obs: + preprocessor: conv_units + short_name: co2s + project: CMIP5 + mip: Amon + start_year: 2003 + end_year: 2014 + additional_datasets: + - {dataset: ESRL, project: OBS, type: ground, version: ASK, tier: 2} + - {dataset: ESRL, project: OBS, type: ground, version: CGO, tier: 2} + - {dataset: ESRL, project: OBS, type: ground, version: HUN, tier: 2} + # - {dataset: ESRL, project: OBS, type: ground, version: LEF, tier: 2} + - {dataset: ESRL, project: OBS, type: ground, version: WIS, tier: 2} + scripts: + station_comparison: + script: xco2_analysis/station_comparison.ncl + var_order: ["xco2", "co2", "co2s"] + output_file_type: "png" + var_plotnames: ["XCO~B~2~N~", "CO~B~2~N~"] + # Overwrite station altitudes in case timeseries considered uses a + # different altitude than the current one + overwrite_altitudes: [2710, 94, 248, 482] # LEF altitude: 868 to be inserted before 482 + output_map: false # true + + # CMIP5 + cmip5_analysis: + description: figures 3, 4, 8, 10-12 + <<: *provenance + variables: + xco2: + preprocessor: conv_units + project: CMIP5 + mip: Amon + exp: [esmHistorical, esmrcp85] + ensemble: r1i1p1 + start_year: 2003 + end_year: 2014 + reference_dataset: CDS-XCO2 + derive: true + additional_datasets: + - {dataset: CDS-XCO2, project: OBS, type: sat, version: L3, + tier: 3, derive: false} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + # - {dataset: FIO-ESM} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-ME} + scripts: + maps: + script: xco2_analysis/global_maps.ncl + contour_max_level: 18 + output_file_type: "png" + main: + script: xco2_analysis/main.ncl + styleset: CMIP5 + region: [-90, 90] + # Possible masking options: obs + masking: ["obs"] + ensemble_mean: false + output_file_type: "png" + var_plotname: "XCO~B~2~N~" + panels_unsampled: + <<: *panel_cfg + styleset: CMIP5 + panels_obs: + <<: *panel_cfg + styleset: CMIP5 + masking: ["obs"] + panels_scia: + <<: *panel_cfg + styleset: CMIP5 + masking: ["sciamachy"] + obs_in_panel: false + panels_gosat: + <<: *panel_cfg + styleset: CMIP5 + masking: ["gosat"] + area_avg: "full-area" + obs_in_panel: false + + cmip5_delta_T: + description: Plots XCO2 IAV against growing season temperature anomaly + <<: *provenance + variables: + xco2: + preprocessor: mmm_ref + project: CMIP5 + mip: Amon + exp: [esmHistorical, esmrcp85] + ensemble: r1i1p1 + start_year: 2003 + end_year: 2014 + reference_dataset: CDS-XCO2 + derive: true + additional_datasets: + - {dataset: CDS-XCO2, project: OBS, type: sat, version: L3, + tier: 3, derive: false} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + # - {dataset: FIO-ESM} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-ME} + tas: + preprocessor: mmm_2x2 + project: CMIP5 + mip: Amon + exp: [esmHistorical, esmrcp85] + ensemble: r1i1p1 + start_year: 2003 + end_year: 2014 + additional_datasets: + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + # - {dataset: FIO-ESM} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-ME} + tasa: + mip: Amon + start_year: 2003 + end_year: 2014 + additional_datasets: + - {dataset: GISTEMP, project: OBS, type: ground, version: v4, + tier: 2} + scripts: + delta_T: + script: xco2_analysis/delta_T.ncl + styleset: CMIP5 + output_file_type: "png" + masking: obs + var_order: ["xco2", "tas", "tasa"] + region: [-30, 30] + + cmip5_station_comparison: + description: Comparison of CO2 and XCO2 at select NOAA-ESRL stations + <<: *provenance + variables: + xco2: + preprocessor: mmm_ref + project: CMIP5 + mip: Amon + exp: [esmHistorical, esmrcp85] + ensemble: r1i1p1 + start_year: 2003 + end_year: 2014 + reference_dataset: CDS-XCO2 + derive: true + additional_datasets: + - {dataset: CDS-XCO2, project: OBS, type: sat, version: L3, + tier: 3, derive: false} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + # - {dataset: FIO-ESM} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-ME} + co2: + preprocessor: mmm + project: CMIP5 + mip: Amon + exp: [esmHistorical, esmrcp85] + ensemble: r1i1p1 + start_year: 2003 + end_year: 2014 + additional_datasets: + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + # - {dataset: FIO-ESM} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-ME} + co2s_obs: + preprocessor: conv_units + short_name: co2s + project: CMIP5 + mip: Amon + start_year: 2003 + end_year: 2014 + additional_datasets: + - {dataset: ESRL, project: OBS, type: ground, version: ASK, tier: 2} + - {dataset: ESRL, project: OBS, type: ground, version: CGO, tier: 2} + - {dataset: ESRL, project: OBS, type: ground, version: HUN, tier: 2} + # - {dataset: ESRL, project: OBS, type: ground, version: LEF, tier: 2} + - {dataset: ESRL, project: OBS, type: ground, version: WIS, tier: 2} + scripts: + station_comparison: + script: xco2_analysis/station_comparison.ncl + var_order: ["xco2", "co2", "co2s"] + output_file_type: "png" + var_plotnames: ["XCO~B~2~N~", "CO~B~2~N~"] + # Overwrite station altitudes in case timeseries considered uses a + # different altitude than the current one + overwrite_altitudes: [2710, 94, 248, 482] # LEF altitude: 868 to be inserted before 482 + output_map: false # true # map only when all OBS data available diff --git a/esmvaltool/recipes/recipe_heatwaves_coldwaves.yml b/esmvaltool/recipes/recipe_heatwaves_coldwaves.yml new file mode 100644 index 0000000000..aae6491103 --- /dev/null +++ b/esmvaltool/recipes/recipe_heatwaves_coldwaves.yml @@ -0,0 +1,56 @@ +# ESMValTool +# recipe_heatwaves_coldwaves.yml +--- +documentation: + title: | + Heatwaves and coldwaves. + + description: | + Tool to compute the number of days exceeding a quantile + for a minimum number of consecutive days. + + authors: + - hunter_alasdair + - perez-zanon_nuria + - manubens_nicolau + - caron_louis-philippe + + maintainer: + - unmaintained + + projects: + - c3s-magic + +datasets: + # - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1961, end_year: 1990} + # - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, exp: rcp85, + # ensemble: r1i1p1, start_year: 2020, end_year: 2040} + - {dataset: bcc-csm1-1, type: exp, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1971, end_year: 2000, version: v1} + - {dataset: bcc-csm1-1, type: exp, project: CMIP5, exp: rcp85, + ensemble: r1i1p1, start_year: 2060, end_year: 2080} + +preprocessors: + preproc: + extract_region: + start_longitude: -20 + end_longitude: 60 + start_latitude: 30 + end_latitude: 80 + mask_landsea: + mask_out: sea +diagnostics: + heatwaves_coldwaves: + description: Calculate heatwaves and coldwaves. + variables: + tasmax: + preprocessor: preproc + mip: day + scripts: + main: + script: magic_bsc/extreme_spells.R + quantile: 0.80 # quantile defining the exceedance/non-exceedance threshold + min_duration: 5 # Min duration of a heatwave/coldwave event in days + operator: '>' # or '>' + season: summer # or summer diff --git a/esmvaltool/recipes/recipe_heatwaves_coldwaves_wp7.yml b/esmvaltool/recipes/recipe_heatwaves_coldwaves_wp7.yml deleted file mode 100644 index 8f2eaac348..0000000000 --- a/esmvaltool/recipes/recipe_heatwaves_coldwaves_wp7.yml +++ /dev/null @@ -1,50 +0,0 @@ -# ESMValTool -# recipe_heatwaves_coldwaves_wp7.yml ---- -documentation: - description: | - Tool to compute the number of days excedding a quantile - for a minimum number of consecutive days. - - authors: - - hunt_al - - pere_nu - - manu_ni - - caro_lo - - projects: - - c3s-magic - -datasets: -# - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1961, end_year: 1990} -# - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, mip: day, exp: rcp85, ensemble: r1i1p1, start_year: 2020, end_year: 2040} - - {dataset: bcc-csm1-1, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1971, end_year: 2000} - - {dataset: bcc-csm1-1, type: exp, project: CMIP5, mip: day, exp: rcp85, ensemble: r1i1p1, start_year: 2060, end_year: 2080} - -preprocessors: - preproc: - regrid: - target_grid: bcc-csm1-1 - scheme: linear - mask_fillvalues: - threshold_fraction: 0.95 - extract_region: - start_longitude: 220 - end_longitude: 320 - start_latitude: 30 - end_latitude: 80 - -diagnostics: - heatwaves_coldwaves: - description: Calculate heatwaves and coldwaves. - variables: - tasmin: - preprocessor: preproc - mip: day - scripts: - main: - script: magic_bsc/extreme_spells.r - quantile: 0.80 # quantile defining the exceedance/non-exceedance threshold - min_duration: 5 # Min duration of a heatwave/coldwave event in days - operator: '<' # or '>' - season: winter # or summer diff --git a/esmvaltool/recipes/recipe_hyint.yml b/esmvaltool/recipes/recipe_hyint.yml new file mode 100644 index 0000000000..bfa82b5eb7 --- /dev/null +++ b/esmvaltool/recipes/recipe_hyint.yml @@ -0,0 +1,113 @@ +# ESMValTool +# recipe_hyint.yml +--- +documentation: + title: Hydroclimatic Indices by HyInt + description: | + Recipe for the HyInt package by E. Arnone and J. von Hardenberg (ISAC-CNR) + + authors: + - arnone_enrico + - vonhardenberg_jost + + maintainer: + - arnone_enrico + + references: + - giorgi11jc + - giorgi14jgr + + projects: + - c3s-magic + +preprocessors: + preproc: + mask_landsea: + mask_out: sea + +diagnostics: + hyint: + description: HyInt hydroclimatic indices calculation and plotting + variables: + pr: + preprocessor: preproc + project: CMIP5 + exp: [historical, rcp85] + ensemble: r1i1p1 + start_year: 1980 + end_year: 2020 + reference_dataset: "ACCESS1-0" + mip: day + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: MPI-ESM-MR} + - {dataset: IPSL-CM5A-MR} + - {dataset: CCSM4} + scripts: + main: + script: hyint/hyint.R + + # Reference normalization period to be used for normalized indices + norm_years: [1980, 1999] + + # Select one or more indices for timeseries and maps from the following + # list (order-sensitive): + # "pa_norm", "hyint", "int_norm", "r95_norm", "wsl_norm", "dsl_norm", "int", "dsl", "wsl" + select_indices: ["pa_norm", "hyint", "int_norm", "r95_norm", "wsl_norm", "dsl_norm"] + + # Select regions for timeseries and maps from the following list + # GL=Globe, GL60=Globe 60S/60N, TR=Tropics (30S/30N), SA=South America, + # AF =Africa,6NA=North America, IN=India, EU=Europe, EA=East-Asia, + # AU=Australia + select_regions: ["GL", "SA", "AF", "EU", "EA"] + + # Select type of plot: + # 1) single panel lon/lat map per individual index, multi-year mean + # 2) 3-panel lon/lat maps per individual index with comparison to reference dataset, multi-year mean + # 3) multipanel of indices of lon/lat maps with comparison to reference dataset, multi-year mean + # 11) single panel with timeseries over required individual region + # 12) multipanel of indices with timeseries over multiple regions + # 13) multipanel of indices with timeseries for multiple models + # 14) multipanel of indices with summary of trend coefficients over multiple regions + # 15) multipanel of indices with summary of trend coefficients for multiple models + plot_type: [1, 2, 3, 12, 13, 14, 15] + + ## ------- Optional settings ---------- + # Select number of columns and rows in multi-panel figure + npancol: 2 # number of columns for trend/tseries multi-panel figures + npanrow: 3 # number of rows for trend/tseries multi-panel figures + + # Define whether model data should be regridded + # a) false to keep original resolution + # b) set desired regridding resolution in cdo format e.g., "r320x160" + # c) "REF" to use resolution of reference model + rgrid: "REF" + + # Select automated or pre-set range of values in plots + autolevels: true + + # Scale autolevels (factor multiplying automated range) + autolevels_scale: 1 # for maps and timeseries + autolevels_scale_t: 1 # for trend coefficients + + # data and maps + removedesert: false # T to remove (flag as NA) grid points with mean + # annual pr < 0.5 mm/day (deserts, Giorgi2014) + oplot_grid: false # plot grid points over maps + boxregion: false # !=0 plot region boxes over global maps with + # thickness = abs(boxregion); white (>0) or grey (<0). + + # timeseries and trends + weight_tseries: true # adopt area weights in timeseries + trend_years: false # (a) F=all; (b) c(year1,year2) to apply trend + # calculation and plotting only to a limited + # time interval + add_trend: true # add linear trend to plot + add_trend_sd: true # add stdev range to timeseries + add_trend_sd_shade: true # add shade of stdev range to timeseries + add_tseries_lines: true # lot lines of timeseries over points + add_zeroline: true # plot a dashed line at y=0 + trend_years_only: false # limit timeseries plotting to trend_years + # time interval + scale100years: true # plot trends as 1/100 years + scalepercent: false # plot trends as % change (excluding hyint) diff --git a/esmvaltool/recipes/recipe_hyint_extreme_events.yml b/esmvaltool/recipes/recipe_hyint_extreme_events.yml new file mode 100644 index 0000000000..1438301fb4 --- /dev/null +++ b/esmvaltool/recipes/recipe_hyint_extreme_events.yml @@ -0,0 +1,152 @@ +# ESMValTool +# recipe_hyint_extreme_events.yml +--- +documentation: + title: Extreme events and HyInt diagnostics + + description: | + Recipe calling the extreme_events diagnostic followed by the hyint diagnostic + + authors: + - arnone_enrico + - vonhardenberg_jost + - broetz_bjoern + - sandstad_marit + - mohr_christianwilhelm + + maintainer: + - arnone_enrico + + references: + - giorgi11jc + - giorgi14jgr + - zhang11wcc + + projects: + - c3s-magic + +datasets: + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1980, end_year: 1985} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1980, end_year: 1985} + +preprocessors: + preproc: + mask_landsea: + mask_out: sea + +diagnostics: + hyint_etccdi: + description: Extreme events followed by HyInt hydroclimatic indices calculation and plotting + variables: + pr: + preprocessor: preproc + reference_dataset: "ACCESS1-0" + mip: day + tasmax: + preprocessor: preproc + reference_dataset: "ACCESS1-0" + mip: day + tasmin: + preprocessor: preproc + reference_dataset: "ACCESS1-0" + mip: day + + scripts: + extreme_events: + script: extreme_events/extreme_events.R + reference_datasets: ["ACCESS1-0", "ACCESS1-0", "ACCESS1-0"] + regrid_dataset: ACCESS1-0 + mip_name: CMIP + timeseries_idx: ["tn10pETCCDI_yr", "tn90pETCCDI_yr", "tx10pETCCDI_yr", "tx90pETCCDI_yr"] + gleckler_idx: ["tn10pETCCDI_yr", "tn90pETCCDI_yr", "tx10pETCCDI_yr", "tx90pETCCDI_yr"] + ts_plt: false + glc_plt: false + base_range: [1980, 1983] + analysis_range: [1980, 1985] + + hyint: + script: hyint/hyint.R + ancestors: [pr, extreme_events] + + # Reference normalization period to be used for normalized indices + norm_years: [1980, 1983] + + # Call preprocessing for ETCCDI indices calculated with climdex + etccdi_preproc: true + + # Specify external path to repository of ETCCDI indices + # (unless these are calculated in the same directory) + #etccdi_dir: "./work/hyint/main" + + # Select indices to be imported from the extreme_events metrics: + etccdi_list_import: ["tn10pETCCDI", "tn90pETCCDI", "tx10pETCCDI", "tx90pETCCDI"] + + # Select one or more indices for timeseries and maps from the following + # list (order-sensitive): + # "pa_norm", "hyint", "int_norm", "r95_norm", "wsl_norm", "dsl_norm", "int", "dsl", "wsl" + # "altcddETCCDI", "altcsdiETCCDI", "altcwdETCCDI", + # "altwsdiETCCDI", "cddETCCDI", "csdiETCCDI", "cwdETCCDI", + # "dtrETCCDI", "fdETCCDI", "gslETCCDI", "idETCCDI", + # "prcptotETCCDI", "r10mmETCCDI", "r1mmETCCDI", "r20mmETCCDI", + # "r95pETCCDI", "r99pETCCDI", "rx1dayETCCDI", "rx5dayETCCDI", + # "sdiiETCCDI", "suETCCDI", "tn10pETCCDI", "tn90pETCCDI", + # "tnnETCCDI", "tnxETCCDI", "trETCCDI", "tx10pETCCDI", + # "tx90pETCCDI", "txnETCCDI", "txxETCCDI", "wsdiETCCDI" + select_indices: ["pa_norm", "hyint", "tn10pETCCDI", "tn90pETCCDI", "tx10pETCCDI", "tx90pETCCDI"] + + # Select regions for timeseries and maps from the following list + # GL=Globe, GL60=Globe 60S/60N, TR=Tropics (30S/30N), SA=South America, + # AF =Africa,6NA=North America, IN=India, EU=Europe, EA=East-Asia, + # AU=Australia + select_regions: ["GL", "GL60", "TR"] + + # Select type of plot: + # 1) single panel lon/lat map per individual index, multi-year mean + # 2) 3-panel lon/lat maps per individual index with comparison to reference dataset, multi-year mean + # 3) multipanel of indices of lon/lat maps with comparison to reference dataset, multi-year mean + # 11) single panel with timeseries over required individual region + # 12) multipanel of indices with timeseries over multiple regions + # 13) multipanel of indices with timeseries for multiple models + # 14) multipanel of indices with summary of trend coefficients over multiple regions + # 15) multipanel of indices with summary of trend coefficients for multiple models + plot_type: [3, 12, 13, 15] + + ## ------- Optional settings ---------- + # Select number of columns and rows in multi-panel figure + npancol: 2 # number of columns for trend/tseries multi-panel figures + npanrow: 3 # number of rows for trend/tseries multi-panel figures + + # Define whether model data should be regridded + # a) false to keep original resolution + # b) set desired regridding resolution in cdo format e.g., "r320x160" + # c) "REF" to use resolution of reference model + rgrid: REF + + # Select automated or pre-set range of values in plots + autolevels: true + + # Scale autolevels (factor multiplying automated range) + autolevels_scale: 1 # for maps and timeseries + autolevels_scale_t: 1 # for trend coefficients + + # data and maps + removedesert: false # T to remove (flag as NA) grid points with mean + # annual pr < 0.5 mm/day (deserts, Giorgi2014) + oplot_grid: false # plot grid points over maps + boxregion: false # !=0 plot region boxes over global maps with + # thickness = abs(boxregion); white (>0) or grey (<0). + + # timeseries and trends + weight_tseries: true # adopt area weights in timeseries + trend_years: false # (a) F=all; (b) c(year1,year2) to apply trend + # calculation and plotting only to a limited + # time interval + add_trend: true # add linear trend to plot + add_trend_sd: true # add stdev range to timeseries + add_trend_sd_shade: true # add shade of stdev range to timeseries + add_tseries_lines: true # lot lines of timeseries over points + add_zeroline: true # plot a dashed line at y=0 + trend_years_only: false # limit timeseries plotting to trend_years + # time interval + scale100years: true # plot trends as 1/100 years + scalepercent: false # plot trends as % change (excluding hyint) diff --git a/esmvaltool/recipes/recipe_iht_toa.yml b/esmvaltool/recipes/recipe_iht_toa.yml new file mode 100644 index 0000000000..19e05f5ac2 --- /dev/null +++ b/esmvaltool/recipes/recipe_iht_toa.yml @@ -0,0 +1,119 @@ +# ESMValTool +# recipe_transport.yml +--- +documentation: + title: Implied heat transport + + description: | + Horizontal implied heat transport of the entire column derived from + TOA radiative fluxes. + +# The 'single_model' script produces Figures 1 to 6 in Pearce and +# Bodas-Salcedo (2023) for each of the datasets chosen. + + authors: + - pearce_francesca + - bodas-salcedo_alejandro + + maintainer: + - bodas-salcedo_alejandro + + references: + - pearce23jclim + +datasets: + # The CERES-EBAF observations reproduce the results of Pearce and + # Bodas-Salcedo (2023), but it can be applied to model data. + - {dataset: CERES-EBAF, project: obs4MIPs, tier: 1, start_year: 2001, end_year: 2010} + # - {dataset: HadGEM3-GC31-LL, project: CMIP6, exp: amip, ensemble: r1i1p1f3, grid: gn, mip: Amon, start_year: 1990, end_year: 2005} + +preprocessors: + # It is recommended to use a low resolution grid as the + # calculations are computationally expensive + common_grid: + regrid: + target_grid: 5.0x5.0 + scheme: area_weighted + + climate_mean_global: + regrid: + target_grid: 5.0x5.0 + scheme: area_weighted + climate_statistics: + operator: mean + +diagnostics: + + implied_heat_transport: + title: TOA implied heat transports + description: Energy flux potential and implied heat transports for TOA radiative fluxes. + themes: + - phys + realms: + - atmos + variables: + rtnt: + derive: true + short_name: rtnt + mip: Amon + preprocessor: climate_mean_global + rtnt_monthly: + derive: true + short_name: rtnt + mip: Amon + preprocessor: common_grid + rsnt: + derive: true + short_name: rsnt + mip: Amon + preprocessor: climate_mean_global + rlut: + short_name: rlut + mip: Amon + preprocessor: climate_mean_global + rlutcs: + short_name: rlutcs + mip: Amon + preprocessor: climate_mean_global + rlutcs_monthly: + short_name: rlutcs + mip: Amon + preprocessor: common_grid + rsutcs: + short_name: rsutcs + mip: Amon + preprocessor: climate_mean_global + rsutcs_monthly: + short_name: rsutcs + mip: Amon + preprocessor: common_grid + rsut: + short_name: rsut + mip: Amon + preprocessor: climate_mean_global + rsdt: + short_name: rsdt + mip: Amon + preprocessor: climate_mean_global + rsdt_monthly: + short_name: rsdt + mip: Amon + preprocessor: common_grid + netcre: + derive: true + short_name: netcre + mip: Amon + preprocessor: climate_mean_global + swcre: + derive: true + short_name: swcre + mip: Amon + preprocessor: climate_mean_global + lwcre: + derive: true + short_name: lwcre + mip: Amon + preprocessor: climate_mean_global + scripts: + single_model: + script: iht_toa/single_model_diagnostics.py diff --git a/esmvaltool/recipes/recipe_impact.yml b/esmvaltool/recipes/recipe_impact.yml new file mode 100644 index 0000000000..8db9a34196 --- /dev/null +++ b/esmvaltool/recipes/recipe_impact.yml @@ -0,0 +1,249 @@ +# ESMValTool +# recipe_impact.yml +--- +documentation: + title: Quick insight for climate impact researchers + description: > + This recipe provides quick insight into the spread and performance of multiple models. + + Many impact researchers do not have the time and finances to use a large + ensemble of climate model runs for their impact analysis. To get an idea of + the range of impacts of climate change it also suffices to use a small + number of climate model runs. In case a system is only sensitive to annual + temperature, one can select a run with a high change and one with a low + change of annual temperature, preferably both with a low bias. + + Results of this recipe can be viewed interactively at + https://esmvaltool.dkrz.de/shared/esmvaltool/climate4impact/. + + Notes: + + - Bias is calcated with respect to the ERA5 reanalysis dataset over the period 1981-2015. + - Future change is calculated for 2036-2065 as compared to 1986-2015. + - Area is set to Europe (lon 0-39; lat 30-76.25) + - All data are taken from the RCP/SSP 8.5 scenario + + authors: + - kalverla_peter + - andela_bouwe + maintainer: + - kalverla_peter + project: + - isenes3 + +datasets: + # CMIP5 + - {project: CMIP5, exp: [historical, rcp85], dataset: ACCESS1-0, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: ACCESS1-3, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: bcc-csm1-1, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: BNU-ESM, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: CanESM2, ensemble: r(1:5)i1p1, + supplementary_variables: [{short_name: areacella, mip: fx, ensemble: r0i0p0, exp: historical}]} + - {project: CMIP5, exp: [historical, rcp85], dataset: CCSM4, ensemble: r(1:6)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: CESM1-BGC, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: CESM1-CAM5, ensemble: r(1:3)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: CESM1-WACCM, ensemble: r(2:4)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: CMCC-CESM, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: CMCC-CM, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: CMCC-CMS, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: CNRM-CM5, ensemble: r(1:2)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: CNRM-CM5, ensemble: r4i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: CNRM-CM5, ensemble: r6i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: CNRM-CM5, ensemble: r10i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: CSIRO-Mk3-6-0, ensemble: r(1:10)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: EC-EARTH, ensemble: r(1:2)i1p1} + # The EC-EARTH r6i1p1, r7i1p1, r14i1p1 have issues in the time coordinate, + # see https://github.com/ESMValGroup/ESMValTool/pull/2563 for details + - {project: CMIP5, exp: [historical, rcp85], dataset: EC-EARTH, ensemble: r(8:9)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: EC-EARTH, ensemble: r(12:13)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: FGOALS-g2, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: FGOALS-s2, ensemble: r(1:3)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: FIO-ESM, ensemble: r(1:3)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: GFDL-CM3, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: GISS-E2-H, ensemble: r1i1p2} + - {project: CMIP5, exp: [historical, rcp85], dataset: GISS-E2-H, ensemble: r(1:2)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: GISS-E2-H, ensemble: r(1:2)i1p3} + - {project: CMIP5, exp: [historical, rcp85], dataset: GISS-E2-H-CC, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: GISS-E2-R, ensemble: r1i1p2} + - {project: CMIP5, exp: [historical, rcp85], dataset: GISS-E2-R, ensemble: r(1:2)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: GISS-E2-R, ensemble: r(1:2)i1p3} + - {project: CMIP5, exp: [historical, rcp85], dataset: GISS-E2-R-CC, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: HadGEM2-AO, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: HadGEM2-CC, ensemble: r(1:3)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: HadGEM2-ES, ensemble: r(1:4)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: inmcm4, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: IPSL-CM5A-LR, ensemble: r(1:4)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: MIROC-ESM, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: MIROC-ESM-CHEM, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: MIROC5, ensemble: r(1:3)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: MPI-ESM-LR, ensemble: r(1:3)i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: MPI-ESM-MR, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: MRI-CGCM3, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: MRI-ESM1, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: NorESM1-M, ensemble: r1i1p1} + - {project: CMIP5, exp: [historical, rcp85], dataset: NorESM1-ME, ensemble: r1i1p1} + # CMIP6 + - {project: CMIP6, exp: [historical, ssp585], dataset: ACCESS-CM2, ensemble: r(1:5)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: ACCESS-ESM1-5, ensemble: r(1:40)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CAMS-CSM1-0, ensemble: r(1:2)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CanESM5, ensemble: r(1:25)i1p(1:2)f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CanESM5-1, institute: CCCma, ensemble: r(1:10)i1p(1:2)f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CanESM5-CanOE, ensemble: r(1:3)i1p2f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CAS-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CAS-ESM2-0, ensemble: r3i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CESM2, ensemble: r4i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CESM2, ensemble: r(10:11)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CESM2-WACCM, ensemble: r(1:3)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CIESM, ensemble: r1i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CMCC-ESM2, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: CNRM-CM6-1, ensemble: r(1:6)i1p1f2, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: CNRM-ESM2-1, ensemble: r(1:5)i1p1f2, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: E3SM-1-0, ensemble: r(1:5)i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: E3SM-1-1, ensemble: r1i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: E3SM-1-1-ECA, ensemble: r1i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3, ensemble: r1i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3, ensemble: r(3:4)i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3, ensemble: r6i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3, ensemble: r9i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3, ensemble: r11i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3, ensemble: r13i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3, ensemble: r15i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3, ensemble: r(101:150)i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3-CC, ensemble: r1i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3-Veg, ensemble: r(1:4)i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3-Veg, ensemble: r6i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3-Veg, ensemble: r10i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3-Veg, ensemble: r12i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3-Veg, ensemble: r14i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: EC-Earth3-Veg-LR, ensemble: r(1:3)i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: FGOALS-g3, ensemble: r(1:4)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: FIO-ESM-2-0, ensemble: r(1:3)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1} + - {project: CMIP6, exp: [historical, ssp585], dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1} + - {project: CMIP6, exp: [historical, ssp585], dataset: GISS-E2-1-G, ensemble: r(1:4)i1p5f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: GISS-E2-1-G, ensemble: r(1:5)i1p1f2, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: GISS-E2-1-G, ensemble: r(1:5)i1p3f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: GISS-E2-1-H, ensemble: r(1:5)i1p1f2, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: GISS-E2-1-H, ensemble: r(1:5)i1p3f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: GISS-E2-2-G, ensemble: r(1:5)i1p3f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: HadGEM3-GC31-LL, ensemble: r(1:4)i1p1f3, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: HadGEM3-GC31-MM, ensemble: r(1:4)i1p1f3, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: IITM-ESM, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {project: CMIP6, exp: [historical, ssp585], dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {project: CMIP6, exp: [historical, ssp585], dataset: IPSL-CM6A-LR, ensemble: r(1:4)i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: IPSL-CM6A-LR, ensemble: r6i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: IPSL-CM6A-LR, ensemble: r14i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: IPSL-CM6A-LR, ensemble: r33i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: KACE-1-0-G, ensemble: r(1:3)i1p1f1, grid: gr} + - {project: CMIP6, exp: [historical, ssp585], dataset: KIOST-ESM, ensemble: r1i1p1f1, grid: gr1} + - {project: CMIP6, exp: [historical, ssp585], dataset: MCM-UA-1-0, ensemble: r1i1p1f2, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: MIROC-ES2L, ensemble: r(1:10)i1p1f2, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: MIROC6, ensemble: r(1:50)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: MPI-ESM1-2-HR, ensemble: r(1:2)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: MPI-ESM1-2-LR, ensemble: r(1:30)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: MRI-ESM2-0, ensemble: r1i2p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: MRI-ESM2-0, ensemble: r(1:5)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: NESM3, ensemble: r(1:2)i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: UKESM1-0-LL, ensemble: r(1:4)i1p1f2, grid: gn} + - {project: CMIP6, exp: [historical, ssp585], dataset: UKESM1-0-LL, ensemble: r8i1p1f2, grid: gn} + +observations: &observations + - {mip: Amon, dataset: ERA5, project: native6, type: reanaly, version: v1, tier: 3, tag: observations} + +preprocessors: + + calculate_anomalies: + custom_order: true + extract_region: ®ion + start_longitude: 0.0 + end_longitude: 39.0 + start_latitude: 30.0 + end_latitude: 76.25 + area_statistics: + operator: mean + anomalies: + period: full + reference: &reference_period + start_year: 1986 + start_month: 1 + start_day: 1 + end_year: 2015 + end_month: 12 + end_day: 31 + standardize: false + extract_time: # Future period + start_year: 2035 + start_month: 1 + start_day: 1 + end_year: 2065 + end_month: 12 + end_day: 31 + climate_statistics: + operator: mean + + prepare_for_bias_calculation: + regrid: + scheme: linear + target_grid: 2.5x2.5 + extract_region: + <<: *region + extract_time: + <<: *reference_period + +diagnostics: + bias_and_change: + description: > + Provide quick insights into the behaviour of each model by plotting the + bias with respect to observations and change with respect to the reference + period. + + themes: + - phys + realms: + - atmos + + variables: + tas_change: + short_name: tas + mip: Amon + tag: model + timerange: '1981/2099' + preprocessor: calculate_anomalies + pr_change: + short_name: pr + mip: Amon + tag: model + timerange: '1981/2099' + preprocessor: calculate_anomalies + tas_bias: + short_name: tas + mip: Amon + tag: model + timerange: '1981/2010' + preprocessor: prepare_for_bias_calculation + additional_datasets: *observations + pr_bias: + short_name: pr + mip: Amon + tag: model + timerange: '1981/2010' + preprocessor: prepare_for_bias_calculation + additional_datasets: *observations + + scripts: + visualize: + script: impact/bias_and_change.py diff --git a/esmvaltool/recipes/recipe_kcs.yml b/esmvaltool/recipes/recipe_kcs.yml new file mode 100644 index 0000000000..6eebb2c0e6 --- /dev/null +++ b/esmvaltool/recipes/recipe_kcs.yml @@ -0,0 +1,166 @@ +# ESMValTool +# recipe_kcs.yml +--- +documentation: + title: Reproduce KNMI '14 Climate Scenarios + description: > + This recipe reproduces the basic steps described in Lenderink 2014, + one scenario at a time. + + references: + - lenderink14erl + + authors: + - rol_evert + - kalverla_peter + - alidoost_sarah + + maintainer: + - unmaintained + + projects: + - eucp + + +cmip5: &cmip5 + - {dataset: ACCESS1-0, project: CMIP5, mip: Amon, exp: [historical, rcp45], ensemble: r1i1p1, start_year: 1961, end_year: 2099} + - {dataset: ACCESS1-0, project: CMIP5, mip: Amon, exp: [historical, rcp85], ensemble: r1i1p1, start_year: 1961, end_year: 2099} + + - {dataset: ACCESS1-3, project: CMIP5, mip: Amon, exp: [historical, rcp45], ensemble: r1i1p1, start_year: 1961, end_year: 2099} + - {dataset: ACCESS1-3, project: CMIP5, mip: Amon, exp: [historical, rcp85], ensemble: r1i1p1, start_year: 1961, end_year: 2099} + + - {dataset: CanESM2, project: CMIP5, mip: Amon, exp: [historical, rcp45], ensemble: "r(1:5)i1p1", start_year: 1961, end_year: 2099} + + - {dataset: CCSM4, project: CMIP5, mip: Amon, exp: [historical, rcp45], ensemble: "r(1:4)i1p1", start_year: 1961, end_year: 2099} + - {dataset: CCSM4, project: CMIP5, mip: Amon, exp: [historical, rcp60], ensemble: "r(1:4)i1p1", start_year: 1961, end_year: 2099} + - {dataset: CCSM4, project: CMIP5, mip: Amon, exp: [historical, rcp85], ensemble: "r(1:4)i1p1", start_year: 1961, end_year: 2099} + + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, mip: Amon, exp: [historical, rcp26], ensemble: "r(1:10)i1p1", start_year: 1961, end_year: 2099} + + - {dataset: BNU-ESM, project: CMIP5, mip: Amon, exp: [historical, rcp26], ensemble: r1i1p1, start_year: 1961, end_year: 2099} + - {dataset: BNU-ESM, project: CMIP5, mip: Amon, exp: [historical, rcp45], ensemble: r1i1p1, start_year: 1961, end_year: 2099} + - {dataset: BNU-ESM, project: CMIP5, mip: Amon, exp: [historical, rcp85], ensemble: r1i1p1, start_year: 1961, end_year: 2099} + +target: &target + - {dataset: CCSM4, project: CMIP5, mip: Amon, exp: [historical, rcp85], ensemble: "r(1:4)i1p1", start_year: 1961, end_year: 2099} + + +preprocessors: + preprocessor_global: + custom_order: true + area_statistics: + operator: mean + annual_statistics: + operator: mean + anomalies: + period: full + reference: + start_year: 1981 + start_month: 1 + start_day: 1 + end_year: 2010 + end_month: 12 + end_day: 31 + standardize: false + multi_model_statistics: + span: full + statistics: + - operator: percentile + percent: 10 + - operator: percentile + percent: 90 + preprocessor_local: &extract_NL + extract_point: + longitude: 6.25 + latitude: 51.21 + scheme: linear + + +diagnostics: + global_matching: + description: > + - Make a plot of the global mean temperature change according to all datasets (defined above) + - Get the global mean temperature change for specified years and specified percentiles (Delta T). These define our scenarios. + - Select the 30-year period from the target model (all ensemble members) where they match the Delta T for each scenario. + variables: + tas_cmip: + short_name: tas + preprocessor: preprocessor_global + additional_datasets: *cmip5 + tas_target: + short_name: tas + preprocessor: preprocessor_global + additional_datasets: *target + scripts: + global_matching: + script: kcs/global_matching.py + scenario_years: [2050, 2085] + scenario_percentiles: [Percentile10, Percentile90] + + local_resampling: + description: > + - Divide the 30-year dataset into 5-year blocks + - Create all possible combinations out of these 30/5 = 6 periods and x ensemble members (may contain the same block multiple times, but less is better and maximum three times (see third item below)) + - Determine the 1000 best ... + - Determine the final best + variables: + pr_target: + short_name: pr + preprocessor: preprocessor_local + additional_datasets: *target + tas_target: + short_name: tas + preprocessor: preprocessor_local + additional_datasets: *target + pr_cmip: + short_name: pr + preprocessor: preprocessor_local + additional_datasets: *cmip5 + tas_cmip: + short_name: tas + preprocessor: preprocessor_local + additional_datasets: *cmip5 + scripts: + resample: + script: kcs/local_resampling.py + control_period: [1981, 2010] + n_samples: 8 + scenarios: + ML_MOC: + description: "Moderate warming / low changes in seasonal temperature & precipitation, mid-century" + global_dT: 1.0 + scenario_year: 2050 + resampling_period: [2021, 2050] + dpr_winter: 4 + pr_summer_control: [25, 55] + pr_summer_future: [45, 75] + tas_winter_control: [50, 80] + tas_winter_future: [20, 50] + tas_summer_control: [0, 100] + tas_summer_future: [0, 50] + + ML_EOC: + description: "Moderate warming / low changes in seasonal temperature & precipitation, mid-century" + global_dT: 1.5 + scenario_year: 2085 + resampling_period: [2031, 2060] + dpr_winter: 6 + pr_summer_control: [10, 40] + pr_summer_future: [60, 90] + tas_winter_control: [50, 80] + tas_winter_future: [20, 50] + tas_summer_control: [0, 100] + tas_summer_future: [0, 50] + + WH_EOC: + description: "High warming / high changes in seasonal temperature & precipitation, end of century" + global_dT: 3.0 + scenario_year: 2085 + resampling_period: [2066, 2095] + dpr_winter: 24 + pr_summer_control: [60, 100] + pr_summer_future: [0, 40] + tas_winter_control: [20, 50] + tas_winter_future: [50, 80] + tas_summer_control: [10, 50] + tas_summer_future: [60, 100] diff --git a/esmvaltool/recipes/recipe_landcover.yml b/esmvaltool/recipes/recipe_landcover.yml index 169974da29..75c892b37c 100644 --- a/esmvaltool/recipes/recipe_landcover.yml +++ b/esmvaltool/recipes/recipe_landcover.yml @@ -1,20 +1,22 @@ # ESMValTool -# namelist_landcover.yml +# recipe_landcover.yml --- documentation: + title: Landcover diagnostics + description: | Recipe for plotting the accumulated area, average fraction and bias of landcover classes in comparison to ESA_CCI_LC data for the full globe and large scale regions. authors: - - hage_st - - loew_al - - muel_bn - - stac_to + - hagemann_stefan + - loew_alexander + - mueller_benjamin + - stacke_tobias maintainer: - righ_ma + - righi_mattia references: - georgievski18tac @@ -27,7 +29,7 @@ documentation: datasets: - {dataset: MPI-ESM-LR, project: CMIP5, exp: rcp85, ensemble: r1i1p1, start_year: 2008, end_year: 2012} - - {dataset: inmcm4, project: CMIP5, exp: rcp85, ensemble: r1i1p1, start_year: 2008, end_year: 2012} + # - {dataset: inmcm4, project: CMIP5, exp: rcp85, ensemble: r1i1p1, start_year: 2008, end_year: 2012} # data is missing on ESGF - {dataset: ESACCI-LANDCOVER, project: OBS, type: sat, version: L4-LCCS-Map-300m-P5Y-aggregated-0.500000Deg, tier: 2, start_year: 2008, end_year: 2012} preprocessors: @@ -65,5 +67,5 @@ diagnostics: landcover: script: landcover/landcover.py comparison: variable - colorscheme: seaborn-darkgrid + colorscheme: seaborn-v0_8-darkgrid diff --git a/esmvaltool/recipes/recipe_li17natcc.yml b/esmvaltool/recipes/recipe_li17natcc.yml new file mode 100644 index 0000000000..fc66b016c4 --- /dev/null +++ b/esmvaltool/recipes/recipe_li17natcc.yml @@ -0,0 +1,188 @@ +# ESMValTool +# recipe_li17natcc.yml +--- +documentation: + title: "Constraining future Indian Summer Monsoon projections" + description: | + Recipe for reproducing the plots in Li et al., 2017 Nature climate change + + authors: + - weigel_katja + + maintainer: + - weigel_katja + + references: + - li17natcc + + projects: + - eval4cmip + +preprocessors: + pp850: + extract_levels: + levels: 85000 + scheme: linear + regrid: + target_grid: 2x2 + scheme: linear + extract_region: + start_latitude: -40 + end_latitude: 40 + start_longitude: 0 + end_longitude: 360 + tropical: + regrid: + target_grid: 2x2 + scheme: linear + extract_region: + start_latitude: -40 + end_latitude: 40 + start_longitude: 0 + end_longitude: 360 + glob: + mask_landsea: + mask_out: land + area_statistics: + operator: mean + +diagnostics: + + cmip5: + description: Diagnostic for figure 1 and 2 from Li et al., 2017. Data sets from CMIP5. + themes: + - EC + realms: + - atmos + variables: + pr: &gridded_cmip5_r1i1p1_amon_t2ms + preprocessor: tropical + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + ua: &gridded_cmip5_r1i1p1_amon_t3m_pp850 + preprocessor: pp850 + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + va: + <<: *gridded_cmip5_r1i1p1_amon_t3m_pp850 + ts: &global_mean_cmip5_r1i1p1_amon_t2ms + preprocessor: glob + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + additional_datasets: + - {dataset: ACCESS1-0, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: ACCESS1-0, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: bcc-csm1-1-m, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: bcc-csm1-1-m, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: CanESM2, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: CanESM2, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: CCSM4, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: CCSM4, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: CESM1-CAM5, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: CESM1-CAM5, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: CNRM-CM5, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: CNRM-CM5, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: CSIRO-Mk3-6-0, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: GFDL-CM3, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: GFDL-CM3, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: GFDL-ESM2G, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: GFDL-ESM2M, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: GFDL-ESM2M, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: GISS-E2-R, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: GISS-E2-R, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: HadGEM2-AO, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: HadGEM2-AO, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: HadGEM2-CC, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: HadGEM2-CC, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: inmcm4, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: inmcm4, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: IPSL-CM5A-LR, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: IPSL-CM5A-LR, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: IPSL-CM5A-MR, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: IPSL-CM5A-MR, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: IPSL-CM5B-LR, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: IPSL-CM5B-LR, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: MPI-ESM-LR, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: MPI-ESM-MR, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: MPI-ESM-MR, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: MRI-CGCM3, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: MRI-CGCM3, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: NorESM1-M, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: NorESM1-M, exp: rcp85, start_year: 2070, end_year: 2099} + + - {dataset: NorESM1-ME, exp: historical, start_year: 1980, end_year: 2005} + - {dataset: NorESM1-ME, exp: rcp85, start_year: 2070, end_year: 2099} + + scripts: + lif1f2: + script: emergent_constraints/lif1f2.py + + cmip6: + description: Diagnostic for figure 1 and 2 from Li et al., 2017. Data sets from CMIP6. + themes: + - EC + realms: + - atmos + variables: + pr: &gridded_cmip6_r1i1p1_amon_t2ms + preprocessor: tropical + project: CMIP6 + ensemble: r1i1p1f1 + mip: Amon + ua: &gridded_cmip6_r1i1p1_amon_t3m_pp850 + preprocessor: pp850 + project: CMIP6 + ensemble: r1i1p1f1 + mip: Amon + va: + <<: *gridded_cmip6_r1i1p1_amon_t3m_pp850 + ts: &global_mean_cmip6_r1i1p1_amon_t2ms + preprocessor: glob + project: CMIP6 + ensemble: r1i1p1f1 + mip: Amon + additional_datasets: + - {dataset: ACCESS-ESM1-5, institute: CSIRO, exp: historical, grid: gn, start_year: 1980, end_year: 2009} + - {dataset: ACCESS-ESM1-5, institute: CSIRO, exp: ssp585, grid: gn, start_year: 2070, end_year: 2099} + + - {dataset: BCC-CSM2-MR, institute: BCC, exp: historical, grid: gn, start_year: 1980, end_year: 2009} + - {dataset: BCC-CSM2-MR, institute: BCC, exp: ssp585, grid: gn, start_year: 2070, end_year: 2099} + + - {dataset: CanESM5, institute: CCCma, exp: historical, grid: gn, start_year: 1980, end_year: 2009} + - {dataset: CanESM5, institute: CCCma, exp: ssp585, grid: gn, start_year: 2070, end_year: 2099} + + - {dataset: IPSL-CM6A-LR, institute: IPSL, exp: historical, grid: gr, start_year: 1980, end_year: 2009} + - {dataset: IPSL-CM6A-LR, institute: IPSL, exp: ssp585, grid: gr, start_year: 2070, end_year: 2099} + + - {dataset: MIROC6, institute: MIROC, exp: historical, grid: gn, start_year: 1980, end_year: 2009} + - {dataset: MIROC6, institute: MIROC, exp: ssp585, grid: gn, start_year: 2070, end_year: 2099} + + scripts: + lif1f2: + script: emergent_constraints/lif1f2.py diff --git a/esmvaltool/recipes/recipe_martin18grl.yml b/esmvaltool/recipes/recipe_martin18grl.yml new file mode 100644 index 0000000000..ead526f164 --- /dev/null +++ b/esmvaltool/recipes/recipe_martin18grl.yml @@ -0,0 +1,163 @@ +# ESMValTool +# recipe_martin18grl.yml +--- +documentation: + title: "Drought characteristics following Martin (2018)" + description: | + Calculate the SPI and counting drought events following Martin (2018). + authors: + - weigel_katja + - adeniyi_kemisola + + references: + - martin18grl + + maintainer: + - weigel_katja + + projects: + - eval4cmip + +preprocessors: + preprocessor1: + regrid: + target_grid: 2.0x2.0 + scheme: linear + preprocessor2: + regrid: + target_grid: 2.0x2.0 + scheme: linear + +diagnostics: + diagnostic1: + variables: + pr: + reference_dataset: MIROC-ESM + preprocessor: preprocessor1 + field: T2Ms + start_year: 1901 + end_year: 2000 + additional_datasets: + # - {dataset: ERA-Interim, project: OBS6, mip: Amon, type: reanaly, + # version: 1, start_year: 1979, end_year: 2005, tier: 3} + # - {dataset: MIROC-ESM, project: CMIP5, mip: Amon, exp: historical, + # ensemble: r1i1p1, start_year: 1979, end_year: 2005} + # - {dataset: GFDL-CM3, project: CMIP5, mip: Amon, + # exp: historical, ensemble: r1i1p1, + # start_year: 1979, end_year: 2005} + - {dataset: CRU, mip: Amon, project: OBS, type: reanaly, + version: TS4.02, tier: 2} + - {dataset: ACCESS1-0, project: CMIP5, mip: Amon, exp: historical, + ensemble: r1i1p1} + - {dataset: ACCESS1-3, project: CMIP5, mip: Amon, exp: historical, + ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, mip: Amon, exp: historical, + ensemble: r1i1p1} + - {dataset: BNU-ESM, project: CMIP5, mip: Amon, exp: historical, + ensemble: r1i1p1} + - {dataset: GFDL-CM3, project: CMIP5, mip: Amon, exp: historical, + ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, project: CMIP5, mip: Amon, exp: historical, + ensemble: r1i1p1} + - {dataset: GISS-E2-H, project: CMIP5, mip: Amon, exp: historical, + ensemble: r1i1p1} + - {dataset: HadGEM2-CC, project: CMIP5, mip: Amon, exp: historical, + ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, project: CMIP5, mip: Amon, exp: historical, + ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, project: CMIP5, mip: Amon, exp: historical, + ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, project: CMIP5, mip: Amon, exp: historical, + ensemble: r1i1p1} + - {dataset: MIROC-ESM, project: CMIP5, mip: Amon, exp: historical, + ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, project: CMIP5, mip: Amon, exp: historical, + ensemble: r1i1p1} + - {dataset: MRI-ESM1, project: CMIP5, mip: Amon, exp: historical, + ensemble: r1i1p1} + - {dataset: NorESM1-M, project: CMIP5, mip: Amon, exp: historical, + ensemble: r1i1p1} + + scripts: + script1: + script: droughtindex/diag_save_spi.R + smooth_month: 6 + # distribution: "Gamma" usually for SPI. + # distribution: "log-Logistic" usually for SPEI- + # Also available distribution: "PearsonIII" + distribution: "Gamma" + + spi_collect: + description: Wrapper to collect and plot previously calculated SPEI index + scripts: + spi_collect: + script: droughtindex/collect_drought_obs_multi.py + indexname: "SPI" + # Threshold under which an event is defined as drought. + # Usually -2.0 for SPI and SPEI. + threshold: -2.0 + ancestors: ['diagnostic1/script1'] + + + diagnostic2: + variables: + pr: + reference_dataset: MIROC-ESM + preprocessor: preprocessor2 + field: T2Ms + mip: Amon + project: CMIP5 + exp: [historical, rcp85] + start_year: 1950 + end_year: 2100 + additional_datasets: + - {dataset: ACCESS1-0, ensemble: r1i1p1} + - {dataset: ACCESS1-3, ensemble: r1i1p1} + - {dataset: CNRM-CM5, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p1} + - {dataset: HadGEM2-CC, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + - {dataset: MRI-ESM1, exp: [esmHistorical, esmrcp85], ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + # - {dataset: MIROC-ESM, project: CMIP5, mip: Amon, + # exp: [historical, rcp85], ensemble: r1i1p1, + # start_year: 1950, end_year: 2100} + # - {dataset: GFDL-CM3, project: CMIP5, mip: Amon, + # exp: [historical, rcp85], ensemble: r1i1p1, + # start_year: 1950, end_year: 2100} + # - {dataset: IPSL-CM5A-LR, project: CMIP5, mip: Amon, + # exp: [historical, rcp85], ensemble: r1i1p1, + # start_year: 1950, end_year: 2100} + # - {dataset: MRI-ESM1, project: CMIP5, mip: Amon, + # exp: [esmHistorical, esmrcp85], ensemble: r1i1p1, + # start_year: 1950, end_year: 2100} + scripts: + script2: + script: droughtindex/diag_save_spi.R + smooth_month: 6 + # distribution: "Gamma" usually for SPI. + # distribution: "log-Logistic" usually for SPEI. + # Also available distribution: "PearsonIII". + distribution: "Gamma" + + spi_collect2: + description: Wrapper to collect and plot previously calculated SPI index + scripts: + spi_collect2: + script: droughtindex/collect_drought_model.py + start_year: 1950 + end_year: 2100 + # comparison_period should be < (end_year - start_year)/2 + comparison_period: 50 + indexname: "SPI" + # Threshold under which an event is defined as drought. + # Usually -2.0 for SPI and SPEI. + threshold: -2.0 + ancestors: ['diagnostic2/script2'] diff --git a/esmvaltool/recipes/recipe_meehl20sciadv.yml b/esmvaltool/recipes/recipe_meehl20sciadv.yml new file mode 100644 index 0000000000..cd3c28b4e8 --- /dev/null +++ b/esmvaltool/recipes/recipe_meehl20sciadv.yml @@ -0,0 +1,499 @@ +# ESMValTool +# recipe_meehl20sciadv.yml +--- +documentation: + title: > + Context for interpreting equilibrium climate sensitivity and transient + climate response from the CMIP6 Earth system models + + description: > + This recipe evaluates the equilibrium climate sensitivity (ECS) and the + transient climate response (TCR) for various CMIP generations. + + authors: + - schlund_manuel + + maintainer: + - schlund_manuel + + references: + - meehl20sciadv + + +preprocessors: + + + spatial_mean: + area_statistics: + operator: mean + + +ECS_CMIP5_RTNT: &ecs_cmip5_rtnt + - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} + - {dataset: ACCESS1-0, exp: abrupt4xCO2, start_year: 300, end_year: 449} + - {dataset: ACCESS1-3, exp: piControl, start_year: 250, end_year: 399} + - {dataset: ACCESS1-3, exp: abrupt4xCO2, start_year: 250, end_year: 399} + - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1, exp: abrupt4xCO2, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} + - {dataset: bcc-csm1-1-m, exp: abrupt4xCO2, start_year: 240, end_year: 389} + - {dataset: BNU-ESM, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: BNU-ESM, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CanESM2, exp: piControl, start_year: 2321, end_year: 2470} + - {dataset: CanESM2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Wrong start year for piControl? (branch_time = 2.) + - {dataset: CCSM4, exp: piControl, start_year: 250, end_year: 399} + - {dataset: CCSM4, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 104, end_year: 253} + - {dataset: CSIRO-Mk3-6-0, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: FGOALS-g2, exp: piControl, start_year: 490, end_year: 639} + - {dataset: FGOALS-g2, exp: abrupt4xCO2, start_year: 490, end_year: 639} + # branch_time_in_child weird + - {dataset: FGOALS-s2, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: FGOALS-s2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2G, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2G, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GISS-E2-H, exp: piControl, start_year: 2660, end_year: 2809} + - {dataset: GISS-E2-H, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-R, exp: piControl, start_year: 4200, end_year: 4349} + - {dataset: GISS-E2-R, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Experiments start at 1859-12-01 + - {dataset: HadGEM2-ES, exp: piControl, start_year: 1860, end_year: 2009} + - {dataset: HadGEM2-ES, exp: abrupt4xCO2, start_year: 1860, end_year: 2009} + - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} + - {dataset: inmcm4, exp: abrupt4xCO2, start_year: 2090, end_year: 2239} + - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} + - {dataset: MIROC5, exp: abrupt4xCO2, start_year: 2100, end_year: 2249} + - {dataset: MIROC-ESM, exp: piControl, start_year: 1880, end_year: 2029} + - {dataset: MIROC-ESM, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: MPI-ESM-LR, exp: piControl, start_year: 1880, end_year: 2029} + - {dataset: MPI-ESM-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-MR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-MR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-P, exp: piControl, start_year: 1866, end_year: 2015} + - {dataset: MPI-ESM-P, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MRI-CGCM3, exp: piControl, start_year: 1891, end_year: 2040} + - {dataset: MRI-CGCM3, exp: abrupt4xCO2, start_year: 1851, end_year: 2000} + - {dataset: NorESM1-M, exp: piControl, start_year: 700, end_year: 849} + - {dataset: NorESM1-M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + +ECS_CMIP5_RTMT: &ecs_cmip5_rtmt + - {dataset: IPSL-CM5A-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5A-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + +ECS_CMIP6_RTNT: &ecs_cmip6_rtnt + - {dataset: ACCESS-CM2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 950, end_year: 1099, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-CM2, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 950, end_year: 1099, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 101, end_year: 250} + - {dataset: ACCESS-ESM1-5, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 101, end_year: 250} + - {dataset: AWI-CM-1-1-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 2650, end_year: 2799} + - {dataset: AWI-CM-1-1-MR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-CSM2-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-CSM2-MR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-ESM1, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-ESM1, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CAMS-CSM1-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3179} + - {dataset: CAMS-CSM1-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3179} + - {dataset: CanESM5, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 5201, end_year: 5350} + - {dataset: CanESM5, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CESM2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: CESM2, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: CESM2-WACCM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CESM2-WACCM, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CNRM-CM6-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1-HR, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1-HR, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-ESM2-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-ESM2-1, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: E3SM-1-0, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 101, end_year: 250} + - {dataset: E3SM-1-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1, end_year: 150} + # Mixed ensemble members and wrong start year for piControl (must be 1850) + - {dataset: EC-Earth3, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 2259, end_year: 2408} + - {dataset: EC-Earth3, exp: abrupt-4xCO2, ensemble: r3i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: EC-Earth3-Veg, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: EC-Earth3-Veg, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + # parent_time_units messed up + - {dataset: FGOALS-f3-L, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 600, end_year: 749} + - {dataset: FGOALS-f3-L, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + # Wrong start year for piControl (must be 101) + - {dataset: GFDL-CM4, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 151, end_year: 300} + - {dataset: GFDL-CM4, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM4, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 101, end_year: 250} + - {dataset: GFDL-ESM4, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1, end_year: 150} + - {dataset: GISS-E2-1-G, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 4150, end_year: 4299} + - {dataset: GISS-E2-1-G, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-1-H, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3180, end_year: 3329} + - {dataset: GISS-E2-1-H, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-2-G, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 2000, end_year: 2149, institute: NASA-GISS} + - {dataset: GISS-E2-2-G, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999, institute: NASA-GISS} + # Mixed ensemble members! + - {dataset: HadGEM3-GC31-LL, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-LL, exp: abrupt-4xCO2, ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 1999} + # Mixed ensemble members! + - {dataset: HadGEM3-GC31-MM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-MM, exp: abrupt-4xCO2, ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: INM-CM5-0, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 2099, end_year: 2248} + - {dataset: INM-CM5-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 1999} + - {dataset: INM-CM4-8, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 1947, end_year: 2096} + - {dataset: INM-CM4-8, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM6A-LR, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1870, end_year: 2019} + - {dataset: IPSL-CM6A-LR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: KACE-1-0-G, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 2150, end_year: 2299} + - {dataset: KACE-1-0-G, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: MIROC6, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + - {dataset: MIROC6, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + - {dataset: MIROC-ES2L, exp: piControl, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MIROC-ES2L, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-HR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-HR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-LR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-LR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MRI-ESM2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MRI-ESM2-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + # parent_time_units not correct + - {dataset: NESM3, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 550, end_year: 699} + - {dataset: NESM3, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: NorESM2-LM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1600, end_year: 1749} + - {dataset: NorESM2-LM, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + # Issue #286 (manual copying was necessary) + - {dataset: SAM0-UNICON, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 274, end_year: 423} + - {dataset: SAM0-UNICON, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + +ECS_CMIP6_RTMT: &ecs_cmip6_rtmt + # branch_time_in_child weird + - {dataset: MCM-UA-1-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: MCM-UA-1-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: UKESM1-0-LL, exp: piControl, ensemble: r1i1p1f2, grid: gn, start_year: 1960, end_year: 2109} + - {dataset: UKESM1-0-LL, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1999} + +TCR_CMIP5: &tcr_cmip5 + - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 439} + - {dataset: ACCESS1-0, exp: 1pctCO2, start_year: 300, end_year: 439} + - {dataset: ACCESS1-3, exp: piControl, start_year: 250, end_year: 389} + - {dataset: ACCESS1-3, exp: 1pctCO2, start_year: 250, end_year: 389} + - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 299} + - {dataset: bcc-csm1-1, exp: 1pctCO2, start_year: 160, end_year: 299} + - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 379} + - {dataset: bcc-csm1-1-m, exp: 1pctCO2, start_year: 240, end_year: 379} + - {dataset: BNU-ESM, exp: piControl, start_year: 1850, end_year: 1989} + - {dataset: BNU-ESM, exp: 1pctCO2, start_year: 1850, end_year: 1989} + - {dataset: CanESM2, exp: piControl, start_year: 2321, end_year: 2460} + - {dataset: CanESM2, exp: 1pctCO2, start_year: 1850, end_year: 1989} + - {dataset: CCSM4, exp: piControl, start_year: 251, end_year: 390} + - {dataset: CCSM4, exp: 1pctCO2, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM5, exp: 1pctCO2, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM5-2, exp: piControl, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM5-2, exp: 1pctCO2, start_year: 1850, end_year: 1989} + - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 104, end_year: 243} + - {dataset: CSIRO-Mk3-6-0, exp: 1pctCO2, start_year: 1, end_year: 140} + - {dataset: FGOALS-g2, exp: piControl, start_year: 440, end_year: 579} + - {dataset: FGOALS-g2, exp: 1pctCO2, start_year: 440, end_year: 579} + # branch_time_in_child weird + - {dataset: FGOALS-s2, exp: piControl, start_year: 1850, end_year: 1989} + - {dataset: FGOALS-s2, exp: 1pctCO2, start_year: 1850, end_year: 1989} + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 140} + - {dataset: GFDL-CM3, exp: 1pctCO2, start_year: 1, end_year: 140} + - {dataset: GFDL-ESM2G, exp: piControl, start_year: 1, end_year: 140} + - {dataset: GFDL-ESM2G, exp: 1pctCO2, start_year: 1, end_year: 140} + - {dataset: GFDL-ESM2M, exp: piControl, start_year: 1, end_year: 140} + - {dataset: GFDL-ESM2M, exp: 1pctCO2, start_year: 1, end_year: 140} + - {dataset: GISS-E2-H, exp: piControl, start_year: 2410, end_year: 2549} + - {dataset: GISS-E2-H, exp: 1pctCO2, start_year: 1850, end_year: 1989} + - {dataset: GISS-E2-R, exp: piControl, start_year: 3981, end_year: 4120} + - {dataset: GISS-E2-R, exp: 1pctCO2, start_year: 1850, end_year: 1989} + # Experiments start at 1859-12-01 + - {dataset: HadGEM2-ES, exp: piControl, start_year: 1860, end_year: 1999} + - {dataset: HadGEM2-ES, exp: 1pctCO2, start_year: 1860, end_year: 1999} + - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2229} + - {dataset: inmcm4, exp: 1pctCO2, start_year: 2090, end_year: 2229} + - {dataset: IPSL-CM5A-LR, exp: piControl, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5A-LR, exp: 1pctCO2, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5A-MR, exp: piControl, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5A-MR, exp: 1pctCO2, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5B-LR, exp: 1pctCO2, start_year: 1850, end_year: 1989} + - {dataset: MIROC5, exp: piControl, start_year: 2400, end_year: 2539} + - {dataset: MIROC5, exp: 1pctCO2, start_year: 2200, end_year: 2339} + - {dataset: MIROC-ESM, exp: piControl, start_year: 1880, end_year: 2019} + - {dataset: MIROC-ESM, exp: 1pctCO2, start_year: 1, end_year: 140} + - {dataset: MPI-ESM-LR, exp: piControl, start_year: 1880, end_year: 2019} + - {dataset: MPI-ESM-LR, exp: 1pctCO2, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM-MR, exp: piControl, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM-MR, exp: 1pctCO2, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM-P, exp: piControl, start_year: 1866, end_year: 2005} + - {dataset: MPI-ESM-P, exp: 1pctCO2, start_year: 1850, end_year: 1989} + - {dataset: MRI-CGCM3, exp: piControl, start_year: 1891, end_year: 2030} + - {dataset: MRI-CGCM3, exp: 1pctCO2, start_year: 1851, end_year: 1990} + - {dataset: NorESM1-M, exp: piControl, start_year: 700, end_year: 839} + - {dataset: NorESM1-M, exp: 1pctCO2, start_year: 1, end_year: 140} + +TCR_CMIP6: &tcr_cmip6 + - {dataset: ACCESS-CM2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 950, end_year: 1089, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-CM2, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 950, end_year: 1089, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 101, end_year: 240} + - {dataset: ACCESS-ESM1-5, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 101, end_year: 240} + - {dataset: AWI-CM-1-1-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 2650, end_year: 2789} + - {dataset: AWI-CM-1-1-MR, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: BCC-CSM2-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: BCC-CSM2-MR, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: BCC-ESM1, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: BCC-ESM1, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: CAMS-CSM1-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3169} + - {dataset: CAMS-CSM1-0, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3169} + - {dataset: CanESM5, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 5201, end_year: 5340} + - {dataset: CanESM5, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: CESM2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 501, end_year: 640} + - {dataset: CESM2, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140} + - {dataset: CESM2-WACCM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 70, end_year: 209, institute: NCAR} + - {dataset: CESM2-WACCM, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140, institute: NCAR} + - {dataset: CNRM-CM6-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM6-1, exp: 1pctCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM6-1-HR, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM6-1-HR, exp: 1pctCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: CNRM-ESM2-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: CNRM-ESM2-1, exp: 1pctCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: E3SM-1-0, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 101, end_year: 240} + - {dataset: E3SM-1-0, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1, end_year: 140} + - {dataset: EC-Earth3-Veg, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: EC-Earth3-Veg, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1989} + # parent_time_units messed up + - {dataset: FGOALS-f3-L, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 600, end_year: 739} + - {dataset: FGOALS-f3-L, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1989} + # Wrong start year for piControl (must be 101) + - {dataset: GFDL-CM4, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 151, end_year: 290} + - {dataset: GFDL-CM4, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1, end_year: 140} + - {dataset: GFDL-ESM4, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 101, end_year: 240} + - {dataset: GFDL-ESM4, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1, end_year: 140} + - {dataset: GISS-E2-1-G, exp: piControl, ensemble: r101i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: GISS-E2-1-G, exp: 1pctCO2, ensemble: r101i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: GISS-E2-1-H, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3180, end_year: 3319} + - {dataset: GISS-E2-1-H, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: GISS-E2-2-G, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 2000, end_year: 2139, institute: NASA-GISS} + - {dataset: GISS-E2-2-G, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989, institute: NASA-GISS} + # Mixed ensemble members! + - {dataset: HadGEM3-GC31-LL, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: HadGEM3-GC31-LL, exp: 1pctCO2, ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 1989} + # Mixed ensemble members! + - {dataset: HadGEM3-GC31-MM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: HadGEM3-GC31-MM, exp: 1pctCO2, ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 1989} + # Wrong start year for piControl (must be 1850) + - {dataset: IITM-ESM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1950, end_year: 2089} + - {dataset: IITM-ESM, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: INM-CM4-8, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 1947, end_year: 2086} + - {dataset: INM-CM4-8, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM6A-LR, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1870, end_year: 2009} + - {dataset: IPSL-CM6A-LR, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: KACE-1-0-G, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 2150, end_year: 2289} + - {dataset: KACE-1-0-G, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1989} + # branch_time_in_child weird + - {dataset: MCM-UA-1-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140} + - {dataset: MCM-UA-1-0, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140} + - {dataset: MIROC6, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3339} + - {dataset: MIROC6, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3339} + - {dataset: MIROC-ES2L, exp: piControl, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MIROC-ES2L, exp: 1pctCO2, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM1-2-HR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM1-2-HR, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM1-2-LR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM1-2-LR, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MRI-ESM2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MRI-ESM2-0, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + # parent_time_units not correct, incorrect start year for piControl + - {dataset: NESM3, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 550, end_year: 689} + - {dataset: NESM3, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + # weird branch_time_in_child/branch_time_in_parent + - {dataset: NorCPM1, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140, institute: NCC} + - {dataset: NorCPM1, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140, institute: NCC} + - {dataset: NorESM2-LM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1600, end_year: 1739} + - {dataset: NorESM2-LM, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140} + - {dataset: SAM0-UNICON, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 274, end_year: 413} + - {dataset: SAM0-UNICON, exp: 1pctCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: UKESM1-0-LL, exp: piControl, ensemble: r1i1p1f2, grid: gn, start_year: 1960, end_year: 2099} + - {dataset: UKESM1-0-LL, exp: 1pctCO2, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1989} + + +diagnostics: + + ecs_cmip5: + description: Calculate ECS for all available CMIP5 models. + variables: + tas_rtnt: &variable_settings_cmip5 + short_name: tas + preprocessor: spatial_mean + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + additional_datasets: *ecs_cmip5_rtnt + tas_rtmt: + <<: *variable_settings_cmip5 + additional_datasets: *ecs_cmip5_rtmt + rtnt: + <<: *variable_settings_cmip5 + short_name: rtnt + derive: true + additional_datasets: *ecs_cmip5_rtnt + rtmt: + <<: *variable_settings_cmip5 + short_name: rtmt + additional_datasets: *ecs_cmip5_rtmt + scripts: + ecs: &ecs_script + script: climate_metrics/ecs.py + calculate_mmm: true + complex_gregory_plot: true + seaborn_settings: + style: ticks + rc: + axes.titlepad: 15.0 + xtick.top: true + ytick.right: true + xtick.minor.visible: true + ytick.minor.visible: true + + ecs_cmip6: + description: Calculate ECS for all available CMIP6 models. + variables: + tas_rtnt: &variable_settings_cmip6 + short_name: tas + preprocessor: spatial_mean + project: CMIP6 + mip: Amon + additional_datasets: *ecs_cmip6_rtnt + tas_rtmt: + <<: *variable_settings_cmip6 + additional_datasets: *ecs_cmip6_rtmt + rtnt: + <<: *variable_settings_cmip6 + short_name: rtnt + derive: true + additional_datasets: *ecs_cmip6_rtnt + rtmt: + <<: *variable_settings_cmip6 + short_name: rtmt + additional_datasets: *ecs_cmip6_rtmt + scripts: + ecs: + <<: *ecs_script + + tcr_cmip5: + description: Calculate TCR for all available CMIP5 models. + variables: + tas: &variable_settings_tcr + preprocessor: spatial_mean + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + additional_datasets: *tcr_cmip5 + scripts: + tcr: &tcr_script + script: climate_metrics/tcr.py + calculate_mmm: true + seaborn_settings: + style: ticks + rc: + axes.titlepad: 15.0 + xtick.top: true + ytick.right: true + xtick.minor.visible: true + ytick.minor.visible: true + + tcr_cmip6: + description: Calculate TCR for all available CMIP6 models. + variables: + tas: + <<: *variable_settings_tcr + project: CMIP6 + additional_datasets: *tcr_cmip6 + scripts: + tcr: + <<: *tcr_script + + table: + description: Create table with all TCR and ECS values. + themes: + - phys + realms: + - atmos + scripts: + table: + script: climate_metrics/create_table.py + ancestors: [ + 'ecs_cmip5/ecs', + 'ecs_cmip6/ecs', + 'tcr_cmip5/tcr', + 'tcr_cmip6/tcr', + ] + calculate_mean: false + patterns: ['ecs.nc', 'tcr.nc'] + round_output: 2 + + tcr_vs_ecs_cmip5_linear: + description: Plot linear TCR vs. ECS for CMIP5 models. + themes: + - phys + realms: + - atmos + scripts: + tcr_vs_ecs: &tcr_vs_ecs_script + script: ipcc_ar5/ch09_fig09_42b.py + ancestors: ['ecs_cmip5/ecs', 'tcr_cmip5/tcr', 'table/table'] + marker_file: '*.csv' + marker_column: idx + dataset_style: cmip5 + seaborn_settings: + style: ticks + rc: + axes.titlepad: 15.0 + xtick.top: true + ytick.right: true + xtick.minor.visible: true + ytick.minor.visible: true + + tcr_vs_ecs_cmip5_nonlinear: + description: Plot non-linear TCR vs. ECS for CMIP5 models. + themes: + - phys + realms: + - atmos + scripts: + tcr_vs_ecs: + <<: *tcr_vs_ecs_script + ancestors: ['ecs_cmip5/ecs', 'tcr_cmip5/tcr', 'table/table'] + log_x: true + log_y: true + + tcr_vs_ecs_cmip6_linear: + description: Plot linear TCR vs. ECS for CMIP6 models. + themes: + - phys + realms: + - atmos + scripts: + tcr_vs_ecs: + <<: *tcr_vs_ecs_script + ancestors: ['ecs_cmip6/ecs', 'tcr_cmip6/tcr', 'table/table'] + dataset_style: cmip6 + + tcr_vs_ecs_cmip6_nonlinear: + description: Plot non-linear TCR vs. ECS for CMIP6 models. + themes: + - phys + realms: + - atmos + scripts: + tcr_vs_ecs: + <<: *tcr_vs_ecs_script + ancestors: ['ecs_cmip6/ecs', 'tcr_cmip6/tcr', 'table/table'] + log_x: true + log_y: true + dataset_style: cmip6 diff --git a/esmvaltool/recipes/recipe_miles_block.yml b/esmvaltool/recipes/recipe_miles_block.yml index ce1cc3d920..645c2a4aa7 100644 --- a/esmvaltool/recipes/recipe_miles_block.yml +++ b/esmvaltool/recipes/recipe_miles_block.yml @@ -1,38 +1,19 @@ -############################################################################### -## namelist_miles.xml -## -## Description -## Namelist for computing blocking using the MiLES package by P. Davini (ISAC-CNR) -## MiLES (Mid-Latitude Evaluation System) v0.31 -## -## Authors -## Paolo Davini (ISAC-CNR, Italy - p.davini@isac.cnr.it) -## J. von Hardenberg (ISAC-CNR, Italy - j.vonhardenberg@isac.cnr.it) -## E. Arnone (ISAC-CNR, Italy - e.arnone@isac.cnr.it) -## -## Project -## Copernicus C3S-MAGIC (C3S 34a Lot2) -## -## References: -## https://github.com/oloapinivad/MiLES -## Davini, P., C. Cagnazzo, S. Gualdi, and A. Navarra, 2012: Bidimensional Diagnostics, Variability, and Trends of Northern Hemisphere Blocking. J. Climate, 25, 6496–6509, doi: 10.1175/JCLI-D-12-00032.1 -## Tibaldi S, Molteni F. 1990. On the operational predictability of blocking. Tellus A 42(3): 343–365, doi:10.1034/j.1600- 0870.1990.t01- 2- 00003.x -## -## This namelist is part of the ESMValTool -################################################################################ -# -# mask_landocean: false -# multi_model_statistics: false +# ESMValTool +# recipe_miles_block.yml --- documentation: + title: Blocking Statistics by MiLES description: | - Recipe for computing and plotting blocking statistics using + Recipe for computing and plotting blocking statistics using the MiLES (Mid-Latitude Evaluation System) package authors: - - hard_jo - - davi_pa - - arno_en + - vonhardenberg_jost + - davini_paolo + - arnone_enrico + + maintainer: + - unmaintained references: - davini18 @@ -44,8 +25,7 @@ documentation: datasets: - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r2i1p1, start_year: 1980, end_year: 1989} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3, start_year: 1980, end_year: 1989 } - #- {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3, start_year: 1980, end_year: 1989, path: "/work/users/jost/esmvaltool2/input/OBS" } + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3, start_year: 1980, end_year: 1989 } preprocessors: preproc1: @@ -59,22 +39,20 @@ preprocessors: extract_region: start_longitude: 0. end_longitude: 360. - start_latitude: 1.25 + start_latitude: -1.25 end_latitude: 90. diagnostics: miles_diagnostics: - description: MiLES Blocking Diagnostics + description: MiLES Blocking Diagnostics variables: zg: preprocessor: preproc1 mip: day reference_dataset: "ERA-Interim" +# reference_exp: historical scripts: - miles_block: + miles_block: script: miles/miles_block.R seasons: DJF # Select season ('DJF','MAM','JJA','SON') - - - diff --git a/esmvaltool/recipes/recipe_miles_eof.yml b/esmvaltool/recipes/recipe_miles_eof.yml index 0f7df0d54c..2993855c3b 100644 --- a/esmvaltool/recipes/recipe_miles_eof.yml +++ b/esmvaltool/recipes/recipe_miles_eof.yml @@ -1,38 +1,19 @@ -############################################################################### -## namelist_miles.xml -## -## Description -## Namelist to compute EOFs using the MiLES package by P. Davini (ISAC-CNR) -## MiLES (Mid-Latitude Evaluation System) v0.31 -## -## Authors -## Paolo Davini (ISAC-CNR, Italy - p.davini@isac.cnr.it) -## J. von Hardenberg (ISAC-CNR, Italy - j.vonhardenberg@isac.cnr.it) -## E. Arnone (ISAC-CNR, Italy - e.arnone@isac.cnr.it) -## -## Project -## Copernicus C3S-MAGIC (C3S 34a Lot2) -## -## References: -## https://github.com/oloapinivad/MiLES -## Davini, P., C. Cagnazzo, S. Gualdi, and A. Navarra, 2012: Bidimensional Diagnostics, Variability, and Trends of Northern Hemisphere Blocking. J. Climate, 25, 6496–6509, doi: 10.1175/JCLI-D-12-00032.1 -## Tibaldi S, Molteni F. 1990. On the operational predictability of blocking. Tellus A 42(3): 343–365, doi:10.1034/j.1600- 0870.1990.t01- 2- 00003.x -## -## This namelist is part of the ESMValTool -################################################################################ -# -# mask_landocean: false -# multi_model_statistics: false +# ESMValTool +# recipe_miles_eof.yml --- documentation: + title: EOFs by MiLES description: | - Recipe for computing and plotting EOFs using + Recipe for computing and plotting EOFs using the MiLES (Mid-Latitude Evaluation System) package authors: - - hard_jo - - davi_pa - - arno_en + - vonhardenberg_jost + - davini_paolo + - arnone_enrico + + maintainer: + - unmaintained references: - davini18 @@ -44,8 +25,7 @@ documentation: datasets: - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r2i1p1, start_year: 1980, end_year: 1989} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3, start_year: 1980, end_year: 1989 } - #- {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3, start_year: 1980, end_year: 1989, path: "/work/users/jost/esmvaltool2/input/OBS" } + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3, start_year: 1980, end_year: 1989 } preprocessors: preproc1: @@ -59,23 +39,21 @@ preprocessors: extract_region: start_longitude: 0. end_longitude: 360. - start_latitude: 1.25 + start_latitude: -1.25 end_latitude: 90. diagnostics: miles_diagnostics: - description: MiLES EOF Diagnostics + description: MiLES EOF Diagnostics variables: zg: preprocessor: preproc1 mip: day reference_dataset: "ERA-Interim" +# reference_exp: historical scripts: - miles_eof: + miles_eof: script: miles/miles_eof.R seasons: DJF # Select season ('DJF','MAM','JJA','SON','ALL') or your period as e.g. 'Jan_Feb_Mar' teles: NAO # Select EOFs ('NAO','AO','PNA') or specify custom area as "lon1_lon2_lat1_lat2" - - - diff --git a/esmvaltool/recipes/recipe_miles_regimes.yml b/esmvaltool/recipes/recipe_miles_regimes.yml index 03ae025317..bc253dd89b 100644 --- a/esmvaltool/recipes/recipe_miles_regimes.yml +++ b/esmvaltool/recipes/recipe_miles_regimes.yml @@ -1,39 +1,19 @@ -############################################################################### -## namelist_miles.xml -## -## Description -## Namelist to compute Weather Regimes using the MiLES package by P. Davini (ISAC-CNR) -## MiLES (Mid-Latitude Evaluation System) v0.31 -## -## Authors -## Paolo Davini (ISAC-CNR, Italy - p.davini@isac.cnr.it) -## J. von Hardenberg (ISAC-CNR, Italy - j.vonhardenberg@isac.cnr.it) -## E. Arnone (ISAC-CNR, Italy - e.arnone@isac.cnr.it) -## -## Project -## Copernicus C3S-MAGIC (C3S 34a Lot2) -## -## References: -## https://github.com/oloapinivad/MiLES -## S. Corti, F. Molteni and T. N. Palmer, 1999 -## Signature of recent climate change in frequencies of natural -## atmospheric circulation regimes". Nature 398, 799-802 -## -## This namelist is part of the ESMValTool -################################################################################ -# -# mask_landocean: false -# multi_model_statistics: false +# ESMValTool +# recipe_miles_regimes.yml --- documentation: + title: Weather Regimes by MiLES description: | - Recipe for computing and plotting weather regimes using + Recipe for computing and plotting weather regimes using the MiLES (Mid-Latitude Evaluation System) package authors: - - hard_jo - - davi_pa - - arno_en + - vonhardenberg_jost + - davini_paolo + - arnone_enrico + + maintainer: + - unmaintained references: - davini18 @@ -44,8 +24,7 @@ documentation: datasets: - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r2i1p1, start_year: 1980, end_year: 1989} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3, start_year: 1980, end_year: 1989 } - #- {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3, start_year: 1980, end_year: 1989, path: "/work/users/jost/esmvaltool2/input/OBS" } + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3, start_year: 1980, end_year: 1989 } preprocessors: preproc1: @@ -59,20 +38,21 @@ preprocessors: extract_region: start_longitude: 0. end_longitude: 360. - start_latitude: 1.25 + start_latitude: -1.25 end_latitude: 90. diagnostics: miles_diagnostics: - description: MiLES Weather Regimes Diagnostics + description: MiLES Weather Regimes Diagnostics variables: zg: preprocessor: preproc1 mip: day reference_dataset: "ERA-Interim" +# reference_exp: historical scripts: - miles_regimes: + miles_regimes: script: miles/miles_regimes.R seasons: DJF # Select season (beta: 'DJF' only possible option for now) nclusters: 4 # beta: only 4 possible for now diff --git a/esmvaltool/recipes/recipe_modes_of_variability.yml b/esmvaltool/recipes/recipe_modes_of_variability.yml new file mode 100644 index 0000000000..494e05b417 --- /dev/null +++ b/esmvaltool/recipes/recipe_modes_of_variability.yml @@ -0,0 +1,63 @@ +# ESMValTool +# recipe_modes_of_variability.yml +--- +documentation: + title: | + Root Mean Square Error (RMSE) between observed and modelled patterns of + variability. + + description: | + Tool to compute the RMSE between the observed and modelled patterns of + variability obtained through classification and their relative relative + bias (percentage) in the frequency of occurrence and the persistence of + each mode. + + authors: + - torralba_veronica + - fuckar_neven + - cortesi_nicola + - guemas_virginie + - hunter_alasdair + - perez-zanon_nuria + - manubens_nicolau + + maintainer: + - unmaintained + + projects: + - c3s-magic + + references: + - fuckar15cd + +datasets: + - {dataset: CNRM-CM5, project: CMIP5, start_year: 1971, end_year: 2000, ensemble: r1i1p1, exp: historical} + - {dataset: CNRM-CM5, project: CMIP5, start_year: 2020, end_year: 2075, ensemble: r1i1p1, exp: rcp85} + +preprocessors: + preproc: + extract_region: + start_longitude: 0 + end_longitude: 360 + start_latitude: 50 + end_latitude: 90 + extract_levels: + levels: 50000 + scheme: nearest +diagnostics: + weather_regime: + description: Compute modes of variability. + variables: + zg: + preprocessor: preproc + mip: Amon + scripts: + main: + script: magic_bsc/weather_regime.R + plot_type: polar # rectangular or polar + + ncenters: 3 + detrend_order: 2 # 0, 1 or 2 for daily data + cluster_method: "kmeans" # select hclust or kmeans + EOFS: false + frequency: 'SON' # Select a month (format: JAN, FEB, ...) or season (JJA, SON, MAM(only monthly), DJF) diff --git a/esmvaltool/recipes/recipe_modes_of_variability_wp4.yml b/esmvaltool/recipes/recipe_modes_of_variability_wp4.yml deleted file mode 100644 index 311df72608..0000000000 --- a/esmvaltool/recipes/recipe_modes_of_variability_wp4.yml +++ /dev/null @@ -1,66 +0,0 @@ -# ESMValTool -# recipe_modes_of_variability_wp4.yml ---- -documentation: - description: | - Tool to compute the RMSE between the observed and modelled patterns of - variability obtained through classification and their relative relative - bias (percentage) in the frequency of occurrence and the persistence of - each mode. - - authors: - - torr_ve - - fuck_ne - - cort_ni - - guem_vi - - hunt_al - - pere_nu - - manu_ni - - projects: - - c3s-magic - - references: - - fuckar - -datasets: - #- {dataset: IPSL-CM5A-MR, project: CMIP5, start_year: 1961, end_year: 1990, ensemble: r1i1p1, exp: historical, type: exp} - #- {dataset: IPSL-CM5A-MR, project: CMIP5, start_year: 2006, end_year: 2026, ensemble: r1i1p1, exp: rcp85, type: exp} - - {dataset: bcc-csm1-1, project: CMIP5, start_year: 1971, end_year: 2000, ensemble: r1i1p1, exp: historical, type: exp} - - {dataset: bcc-csm1-1, project: CMIP5, start_year: 2020, end_year: 2050, ensemble: r1i1p1, exp: rcp85, type: exp} - -preprocessors: - preproc: - regrid: - target_grid: bcc-csm1-1 - scheme: linear - mask_fillvalues: - threshold_fraction: 0.95 - extract_region: - start_longitude: 0 - end_longitude: 360 - start_latitude: 30 - end_latitude: 90 - -diagnostics: - weather_regime: - description: Compute modes of variability. - variables: - psl: - preprocessor: preproc - mip: Amon - scripts: - main: - script: magic_bsc/weather_regime.r - region: North-Atlantic # North-Atlantic or Polar - - start_historical: "1971-01-01" - end_historical: "2000-12-31" - start_projection: "2020-01-01" - end_projection: "2050-12-31" - - ncenters: 3 - detrend_order: 2 - cluster_method: "kmeans" # select hclust or kmeans - EOFS: true - frequency: 'JAN' # Select a month (format: JAN, FEB, ...) or should work with season also but not jet format: JJA, SON, MAM, DJF diff --git a/esmvaltool/recipes/recipe_multimodel_products.yml b/esmvaltool/recipes/recipe_multimodel_products.yml new file mode 100644 index 0000000000..1339d967d7 --- /dev/null +++ b/esmvaltool/recipes/recipe_multimodel_products.yml @@ -0,0 +1,71 @@ +# ESMValTool +# recipe_multimodel_products.yml +--- +documentation: + title: | + Ensemble mean anomalies and agreements. + + description: | + Tool to compute the ensemble mean anomaly and the ensemble + variance and agreement and plot the results as maps and time series. + + authors: + - manubens_nicolau + - hunter_alasdair + - perez-zanon_nuria + + maintainer: + - unmaintained + + projects: + - c3s-magic + + references: + - manubens18ems + +datasets: + - {dataset: bcc-csm1-1, type: exp, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1961, end_year: 1990} + - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1961, end_year: 1990} + - {dataset: IPSL-CM5A-LR, type: exp, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1961, end_year: 1990} + - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, exp: rcp26, ensemble: r1i1p1, start_year: 2006, end_year: 2099} + - {dataset: bcc-csm1-1, type: exp, project: CMIP5, exp: rcp26, ensemble: r1i1p1, start_year: 2006, end_year: 2099} + - {dataset: IPSL-CM5A-LR, type: exp, project: CMIP5, exp: rcp26, ensemble: r1i1p1, start_year: 2006, end_year: 2099} + +preprocessors: + preproc: + regrid: + target_grid: bcc-csm1-1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + extract_region: + start_longitude: 0 + end_longitude: 360 + start_latitude: -90 + end_latitude: 90 + +diagnostics: + anomaly_agreement: + description: Calculate multi-member anomalies and their spread/agreement. + variables: + tas: + preprocessor: preproc + mip: Amon + scripts: + main: + script: magic_bsc/multimodel_products.R + + #Parameters for Season() function + moninf: 6 #If this is null, then the monthly anomalies will be computed + monsup: 6 + colorbar_lim: 3 + + agreement_threshold: 80 + #Time series plot options + running_mean: 5 #Length of running mean to use for the time series plot + + # Timeseries plot + time_series_plot: single # Either single or maxmin (plot the mean with/without shading between the max and min. diff --git a/esmvaltool/recipes/recipe_multimodel_products_wp5.yml b/esmvaltool/recipes/recipe_multimodel_products_wp5.yml deleted file mode 100644 index d51c196af8..0000000000 --- a/esmvaltool/recipes/recipe_multimodel_products_wp5.yml +++ /dev/null @@ -1,71 +0,0 @@ -# ESMValTool -# recipe_multimodel_products_wp5.yml ---- -documentation: - description: | - Tool to compute the ensemble mean anomaly and the ensemble - variance and agreement and plot the results as maps and time series. - - authors: - - manu_ni - - hunt_al - - pere_nu - - - projects: - - c3s-magic - - references: - - manubens - -datasets: - - {dataset: bcc-csm1-1, type: exp, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1961, end_year: 1990} - - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1961, end_year: 1990} - - {dataset: MIROC5, type: exp, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1961, end_year: 1990} - - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: Amon, exp: rcp26, ensemble: r1i1p1, start_year: 2006, end_year: 2099} - - {dataset: bcc-csm1-1, type: exp, project: CMIP5, mip: Amon, exp: rcp26, ensemble: r1i1p1, start_year: 2006, end_year: 2099} - - {dataset: MIROC5, type: exp, project: CMIP5, mip: Amon, exp: rcp26, ensemble: r1i1p1, start_year: 2006, end_year: 2099} - -preprocessors: - preproc: - regrid: - target_grid: bcc-csm1-1 - scheme: linear - mask_fillvalues: - threshold_fraction: 0.95 - extract_region: - start_longitude: 0 - end_longitude: 360 - start_latitude: -90 - end_latitude: 90 - -diagnostics: - anomaly_agreement: - description: Calculate multi-member anomalies and their spread/agreement. - variables: - tas: - preprocessor: preproc - mip: Amon - scripts: - main: - script: magic_bsc/multimodel_products.r - - #Specify datasets to be used for computing climatology and anomaly - climatology_class: historical - anomaly_class: rcp26 - - climatology_start_year: 1961 - climatology_end_year: 1990 - - anomaly_start_year: 2006 - anomaly_end_year: 2099 - #Parameters for Season() function - moninf: 6 #If this is null, then the monthly anomalies will be computed - monsup: 6 - - agreement_threshold: 80 - #Time series plot options - running_mean: 5 #Length of running mean to use for the time series plot - - # Timeseries plot - time_series_plot: single # Either single or maxmin (plot the mean with/without shading between the max and min. diff --git a/esmvaltool/recipes/recipe_ocean_Landschuetzer2016.yml b/esmvaltool/recipes/recipe_ocean_Landschuetzer2016.yml new file mode 100644 index 0000000000..5d8be0c516 --- /dev/null +++ b/esmvaltool/recipes/recipe_ocean_Landschuetzer2016.yml @@ -0,0 +1,134 @@ +# ESMValTool +# recipe_Landschuetzer2016.yml +--- +documentation: + + title: Evaluate simulated sea surface CO2 against Landschuetzer et al (2016) dataset + + description: | + Recipe to evaluate CO2 Fluxes of marine biogeochemistry models of CMIP5 + using Landschuetzer et al (2016) dataset. + + authors: + - lovato_tomas + + maintainer: + - lovato_tomas + + references: + - acknow_project + + projects: + - crescendo + + +# Write options for scripts +# -------------------------------------------------- +WRITE_OPTS: &write_opts + - { write_plots: True, write_netcdf: True } + + +# -------------------------------------------------- +# DATASETS +# -------------------------------------------------- +datasets: +# working datasets + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + + +# -------------------------------------------------- +# Preprocessors +# -------------------------------------------------- +preprocessors: + # -------------------------------------------------- + # map preprocessors + # -------------------------------------------------- + # For a 2D global surface map + prep_surface_map_2D: + climate_statistics: + operator: mean + regrid: + target_grid: 2x2 + scheme: linear + + # Global area-weighted Average from 2D field + prep_global_Surface_average_timeseries_2D: + custom_order: true + area_statistics: + operator: mean + multi_model_statistics: + span: overlap + statistics: [mean ] + +# -------------------------------------------------- +# Diagnostics +# -------------------------------------------------- +diagnostics: + + # -------------------------------------------------- + # Surface time series vs OBS + # -------------------------------------------------- + diag_timeseries_surface_average_vs_OBS: + description: Global surface time series (Landschuetzer2016 observations) + variables: + dpco2: + preprocessor: prep_global_Surface_average_timeseries_2D + mip: Omon + additional_datasets: + - {dataset: Landschuetzer2016, project: OBS, type: clim, version: v2016, start_year: 2000, end_year: 2010, tier: 2} + areacello: + mip: fx + spco2: + preprocessor: prep_global_Surface_average_timeseries_2D + mip: Omon + additional_datasets: + - {dataset: Landschuetzer2016, project: OBS, type: clim, version: v2016, start_year: 2000, end_year: 2010, tier: 2} + fgco2: + preprocessor: prep_global_Surface_average_timeseries_2D + mip: Omon + additional_datasets: + - {dataset: Landschuetzer2016, project: OBS, type: clim, version: v2016, start_year: 2000, end_year: 2010, tier: 2} + scripts: + Global_surface_timeseries: + script: ocean/diagnostic_timeseries.py + observational_dataset: {dataset: Landschuetzer2016, project: OBS} + <<: *write_opts + + + # -------------------------------------------------- + # Map diagnostics vs OBS + # -------------------------------------------------- + diag_surface_maps_vs_OBS: + description: Global Ocean Surface maps vs OBS + variables: + dpco2: + preprocessor: prep_surface_map_2D + mip: Omon + maps_range: [-90., 90.] + diff_range: [-50., 50.] + additional_datasets: + - {dataset: Landschuetzer2016, project: OBS, type: clim, version: v2016, start_year: 2000, end_year: 2010, tier: 2} + areacello: + mip: fx + spco2: + preprocessor: prep_surface_map_2D + mip: Omon + maps_range: [300., 400.] + diff_range: [-50., 50.] + additional_datasets: + - {dataset: Landschuetzer2016, project: OBS, type: clim, version: v2016, start_year: 2000, end_year: 2010, tier: 2} + fgco2: + preprocessor: prep_surface_map_2D + mip: Omon + maps_range: [-0.2, 0.2] + diff_range: [-0.1, 0.1] + additional_datasets: + - {dataset: Landschuetzer2016, project: OBS, type: clim, version: v2016, start_year: 2000, end_year: 2010, tier: 2} + scripts: + Global_Ocean_surface_map: + script: ocean/diagnostic_maps.py + <<: *write_opts + Global_Ocean_model_vs_obs: + script: ocean/diagnostic_model_vs_obs.py + observational_dataset: {dataset: Landschuetzer2016, project: OBS} + <<: *write_opts diff --git a/esmvaltool/recipes/recipe_ocean_Landschutzer2016.yml b/esmvaltool/recipes/recipe_ocean_Landschutzer2016.yml deleted file mode 100644 index 4472b5acb8..0000000000 --- a/esmvaltool/recipes/recipe_ocean_Landschutzer2016.yml +++ /dev/null @@ -1,118 +0,0 @@ -# ESMValTool -# Landschutzer2014.yml ---- -documentation: - description: | - Recipe to evaluate CO2 Fluxes of marine biogeochemistry models of CMIP5 - using Landschutzer et al (2.014) dataset. - Written by Tomas Lovato, CMCC, tomas.lovato@cmcc.it - - authors: - - lova_to - - maintainer: - - lova_to - - references: - - acknow_project - - projects: - - crescendo - - -datasets: -# working datasets - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - -# -------------------------------------------------- -# Preprocessors -# -------------------------------------------------- -preprocessors: - # -------------------------------------------------- - # map preprocessors - # -------------------------------------------------- - # For a 2D global surface map - prep_surface_map_2D: - time_average: - regrid: - target_grid: 2x2 - scheme: linear - - # Global area-weighted Average from 2D field - prep_global_Surface_average_timeseries_2D: - custom_order: true - average_region: - coord1: longitude - coord2: latitude - multi_model_statistics: - span: overlap - statistics: [mean ] - -# -------------------------------------------------- -# Diagnostics -# -------------------------------------------------- -diagnostics: - - # -------------------------------------------------- - # Surface time series vs OBS - # -------------------------------------------------- - diag_timeseries_surface_average_vs_OBS: - description: Global surface time series (Landschutzer2014 observations) - variables: - dpco2: - preprocessor: prep_global_Surface_average_timeseries_2D - mip: Omon - fx_files: [areacello, ] - spco2: - preprocessor: prep_global_Surface_average_timeseries_2D - mip: Omon - fx_files: [areacello, ] - fgco2: - preprocessor: prep_global_Surface_average_timeseries_2D - mip: Omon - fx_files: [areacello, ] - additional_datasets: - - {dataset: Landschutzer2016, project: OBS, type: clim, version: v2016, start_year: 2000, end_year: 2010, tier: 2} - scripts: - Global_surface_timeseries: - script: ocean/diagnostic_timeseries.py - observational_dataset: {dataset: Landschutzer2016, project: OBS} - - - # -------------------------------------------------- - # Map diagnostics vs OBS - # -------------------------------------------------- - diag_surface_maps_vs_OBS: - description: Global Ocean Surface maps vs OBS - variables: - dpco2: - preprocessor: prep_surface_map_2D - mip: Omon - fx_files: [areacello, ] - maps_range: [-90., 90.] - diff_range: [-50., 50.] - spco2: - preprocessor: prep_surface_map_2D - mip: Omon - fx_files: [areacello, ] - maps_range: [300., 400.] - diff_range: [-50., 50.] - fgco2: - preprocessor: prep_surface_map_2D - mip: Omon - fx_files: [areacello, ] - maps_range: [-0.2, 0.2] - diff_range: [-0.1, 0.1] - additional_datasets: - - {dataset: Landschutzer2016, project: OBS, type: clim, version: v2016, start_year: 2000, end_year: 2010, tier: 2} - scripts: - Global_Ocean_surface_map: - script: ocean/diagnostic_maps.py - Global_Ocean_model_vs_obs: - script: ocean/diagnostic_model_vs_obs.py - observational_dataset: {dataset: Landschutzer2016, project: OBS} - - - - diff --git a/esmvaltool/recipes/recipe_ocean_amoc.yml b/esmvaltool/recipes/recipe_ocean_amoc.yml index 819aed3452..7b83631ddc 100644 --- a/esmvaltool/recipes/recipe_ocean_amoc.yml +++ b/esmvaltool/recipes/recipe_ocean_amoc.yml @@ -1,7 +1,9 @@ # ESMValTool # recipe_ocean_amoc.yml - +--- documentation: + title: Atlantic Meridional Overturning Circulation Recipe + description: | Recipe to produce time series figures of the derived variable, the Atlantic meriodinal overturning circulation (AMOC). @@ -9,10 +11,10 @@ documentation: the years 2001-2004. authors: - - demo_le + - demora_lee maintainer: - - demo_le + - demora_lee references: - demora2018gmd @@ -20,26 +22,6 @@ documentation: projects: - ukesm -# datasets: -# - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} -# - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} - preprocessors: prep_timeseries_drake: @@ -57,7 +39,8 @@ preprocessors: end_year: 2003 end_month: 12 end_day: 31 - time_average: + climate_statistics: + operator: mean diagnostics: # -------------------------------------------------- @@ -80,7 +63,7 @@ diagnostics: - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} @@ -139,7 +122,7 @@ diagnostics: - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} diff --git a/esmvaltool/recipes/recipe_ocean_bgc.yml b/esmvaltool/recipes/recipe_ocean_bgc.yml index 5d826fce2f..45459f3ff5 100644 --- a/esmvaltool/recipes/recipe_ocean_bgc.yml +++ b/esmvaltool/recipes/recipe_ocean_bgc.yml @@ -2,6 +2,8 @@ # recipe_ocean_bcg.yml --- documentation: + title: Marine Biogeochemistry analysis suite + description: | Recipe to evaluate the marine biogeochemistry models of CMIP5. There are also some physical evaluation metrics here too. @@ -10,10 +12,10 @@ documentation: Written by Lee de Mora, Plymouth Marine Laboratory, ledm@pml.ac.uk authors: - - demo_le + - demora_lee maintainer: - - demo_le + - demora_lee references: - demora2018gmd @@ -34,9 +36,6 @@ datasets: # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} # - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - - ### # Problem with times # - {dataset: MIROC-ESM, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} @@ -56,35 +55,35 @@ preprocessors: # Global 3D Volume-weighted Average prep_timeseries_global_volume_average: custom_order: true - average_volume: - coord1: longitude - coord2: latitude - multi_model_statistics: - span: overlap - statistics: [mean ] + volume_statistics: + operator: mean + #multi_model_statistics: + # span: overlap + # statistics: [mean] + # exclude: ['WOA'] # Global area-weighted Average from 2D field prep_global_Surface_average_timeseries_2D: custom_order: true - average_region: - coord1: longitude - coord2: latitude - multi_model_statistics: - span: overlap - statistics: [mean ] + area_statistics: + operator: mean + #multi_model_statistics: + # span: overlap + # statistics: [mean] + # exclude: ['WOA'] # Global area -weighted surface Average from 3D field prep_global_Surface_average_timeseries_3D: custom_order: true extract_levels: levels: [0., ] - scheme: linear_horizontal_extrapolate_vertical - average_region: - coord1: longitude - coord2: latitude - multi_model_statistics: - span: overlap - statistics: [mean ] + scheme: linear_extrapolate + area_statistics: + operator: mean + #multi_model_statistics: + # span: overlap + # statistics: [mean] + # exclude: ['WOA'] prep_timeseries_scalar: # Load file as is. custom_order: true @@ -92,42 +91,45 @@ preprocessors: # For a 2D global surface map prep_surface_map_2D: - time_average: + climate_statistics: + operator: mean # For a 3D global surface map prep_surface_map_3D: extract_levels: levels: [0., ] - scheme: linear_horizontal_extrapolate_vertical - time_average: + scheme: linear_extrapolate + climate_statistics: + operator: mean prep_surface_map_regrid_3D: custom_order: true extract_levels: levels: [0., ] - scheme: linear_horizontal_extrapolate_vertical - time_average: + scheme: linear_extrapolate + climate_statistics: + operator: mean regrid: target_grid: 1x1 scheme: linear prep_global_profile: - annual_mean: - average_region: - coord1: longitude - coord2: latitude + annual_statistics: + operator: mean + area_statistics: + operator: mean prep_global_profile_decadal: - annual_mean: - decadal: true - average_region: - coord1: longitude - coord2: latitude + decadal_statistics: + operator: mean + area_statistics: + operator: mean prep_transect_AMT: # Atlantic Meridional Transect (28W) custom_order: true - time_average: + climate_statistics: + operator: mean extract_region: start_longitude: 320. end_longitude: 345. @@ -143,16 +145,15 @@ preprocessors: # prep_depth_integration: # depth_integration: # # new_units: kg m-2 # need to specify in advance, as cf_units has strange behaviour. -# time_average: +# climate_statistics: +# operator: mean # # # 2D map global depth integration time series maps # prep_depth_integration_timeseries: # custom_order: true # depth_integration: -# average_region: -# coord1: longitude -# coord2: latitude - +# area_statistics: +# operator: mean # -------------------------------------------------- @@ -183,29 +184,33 @@ diagnostics: thetao: # Temperature 3D preprocessor: prep_timeseries_global_volume_average mip: Omon - fx_files: [volcello,] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} so: # Salinity 3D preprocessor: prep_timeseries_global_volume_average mip: Omon - fx_files: [volcello,] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} no3: # nitrate preprocessor: prep_timeseries_global_volume_average mip: Oyr - fx_files: [volcello,] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} o2: # oxygen preprocessor: prep_timeseries_global_volume_average mip: Oyr - fx_files: [volcello,] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} si: # Silicate preprocessor: prep_timeseries_global_volume_average mip: Oyr - fx_files: [volcello,] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} # po4: # phosphate # No HadGEM2-ES phosphate. # preprocessor: prep_timeseries_global_volume_average # mip: Oyr - # fx_files: [volcello,] - additional_datasets: - - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} + # additional_datasets: + # - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} scripts: Global_Volume_Average_timeseries: script: ocean/diagnostic_timeseries.py @@ -222,23 +227,18 @@ diagnostics: # mip: Omon # derive: true # force_derivation: false -# fx_files: [areacello, ] chl: # chlorophyll preprocessor: prep_timeseries_global_volume_average mip: Oyr - fx_files: [volcello,] dfe: # iron preprocessor: prep_timeseries_global_volume_average mip: Oyr - fx_files: [volcello,] talk: # alkalinity preprocessor: prep_timeseries_global_volume_average mip: Oyr - fx_files: [volcello,] # dic: # Dissolved inorganic carbon # preprocessor: prep_timeseries_global_volume_average # mip: Oyr - # fx_files: [volcello,] scripts: Global_Volume_verage_timeseries: script: ocean/diagnostic_timeseries.py @@ -253,24 +253,21 @@ diagnostics: talk: # alkalinity preprocessor: prep_global_Surface_average_timeseries_3D mip: Oyr - fx_files: [areacello, ] intpp: preprocessor: prep_global_Surface_average_timeseries_2D mip: Omon - fx_files: [areacello, ] chl: preprocessor: prep_global_Surface_average_timeseries_3D mip: Oyr thresholds: [0.1, 0.2, 0.5] - fx_files: [areacello, ] + areacello: + mip: fx # dfe: # iron # preprocessor: prep_global_Surface_average_timeseries_3D # mip: Oyr - # fx_files: [areacello, ] # dic: # Dissolved inorganic carbon # preprocessor: prep_global_Surface_average_timeseries_3D # mip: Oyr - # fx_files: [areacello, ] scripts: Global_Volume_Average_timeseries: script: ocean/diagnostic_timeseries.py @@ -284,29 +281,35 @@ diagnostics: thetao: # Temperature ocean surface preprocessor: prep_global_Surface_average_timeseries_3D mip: Omon - fx_files: [areacello, ] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} so: # Salinity ocean surface preprocessor: prep_global_Surface_average_timeseries_3D mip: Omon - fx_files: [areacello, ] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} no3: # Nitrate ocean surface preprocessor: prep_global_Surface_average_timeseries_3D mip: Oyr - fx_files: [areacello, ] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} o2: # oxygen preprocessor: prep_global_Surface_average_timeseries_3D mip: Oyr - fx_files: [areacello, ] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} si: # Silicate preprocessor: prep_global_Surface_average_timeseries_3D mip: Oyr - fx_files: [areacello, ] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} + areacello: + mip: fx # po4: # Phosphate # preprocessor: prep_global_Surface_average_timeseries_3D # mip: Oyr - # fx_files: [areacello, ] - additional_datasets: - - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} + # additional_datasets: + # - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} scripts: Global_Volume_Average_timeseries: script: ocean/diagnostic_timeseries.py @@ -336,29 +339,33 @@ diagnostics: thetao: # Temperature ocean surface preprocessor: prep_global_profile mip: Omon - fx_files: [volcello,] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} so: # Salinity ocean surface preprocessor: prep_global_profile mip: Omon - fx_files: [volcello,] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} no3: # Nitrate ocean surface preprocessor: prep_global_profile mip: Oyr - fx_files: [volcello,] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} o2: # oxygen preprocessor: prep_global_profile mip: Oyr - fx_files: [volcello,] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} si: # Silicate preprocessor: prep_global_profile mip: Oyr - fx_files: [volcello,] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} # po4: # Phosphate # preprocessor: prep_global_profile # mip: Oyr - # fx_files: [volcello,] - additional_datasets: - - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} + # additional_datasets: + # - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} scripts: Global_profile_vs_WOA: script: ocean/diagnostic_profiles.py @@ -373,19 +380,15 @@ diagnostics: chl: # chlorophyll preprocessor: prep_global_profile mip: Oyr - fx_files: [volcello,] dfe: # iron preprocessor: prep_global_profile mip: Oyr - fx_files: [volcello,] talk: # alkalinity preprocessor: prep_global_profile mip: Oyr - fx_files: [volcello,] # dic: # Dissolved inorganic carbon # preprocessor: prep_global_profile # mip: Oyr - # fx_files: [volcello,] scripts: Global_profile_no_obs: script: ocean/diagnostic_profiles.py @@ -400,36 +403,44 @@ diagnostics: thetao: preprocessor: prep_surface_map_regrid_3D mip: Omon - fx_files: [areacello, ] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} so: preprocessor: prep_surface_map_regrid_3D mip: Omon - fx_files: [areacello, ] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} no3: preprocessor: prep_surface_map_regrid_3D mip: Oyr - fx_files: [areacello, ] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} si: # Silicate preprocessor: prep_surface_map_regrid_3D mip: Oyr - fx_files: [areacello, ] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} o2: # Oxygen preprocessor: prep_surface_map_regrid_3D mip: Oyr - fx_files: [areacello, ] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} + areacello: + mip: fx # po4: # preprocessor: prep_surface_map_3D # mip: Oyr - # fx_files: [areacello, ] - additional_datasets: - - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} + # additional_datasets: + # - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} scripts: Global_Ocean_surface_map: script: ocean/diagnostic_maps.py + Global_Ocean_model_vs_obs: script: ocean/diagnostic_model_vs_obs.py observational_dataset: {dataset: WOA, project: OBS} + # -------------------------------------------------- # Map diagnostics - no data # -------------------------------------------------- @@ -439,32 +450,27 @@ diagnostics: intpp: preprocessor: prep_surface_map_2D mip: Omon - fx_files: [areacello, ] fgco2: preprocessor: prep_surface_map_2D mip: Omon - fx_files: [areacello, ] chl: preprocessor: prep_surface_map_3D mip: Oyr # thresholds: [0.1, 0.2, 0.5] - fx_files: [areacello, ] dfe: preprocessor: prep_surface_map_3D mip: Oyr - fx_files: [areacello, ] - + areacello: + mip: fx # dic: # preprocessor: prep_surface_map_3D # mip: Oyr - # fx_files: [areacello, ] scripts: Global_Ocean_surface_map: script: ocean/diagnostic_maps.py - # -------------------------------------------------- # Transects diagnostics - vs WOA # -------------------------------------------------- @@ -519,9 +525,6 @@ diagnostics: script: ocean/diagnostic_transects.py - - - # # # Depth integrated maps # # diag_depth_int_maps: # # description: Global Ocean Depth Integrated maps diff --git a/esmvaltool/recipes/recipe_ocean_example.yml b/esmvaltool/recipes/recipe_ocean_example.yml index 767f1ac448..7921ce41d7 100644 --- a/esmvaltool/recipes/recipe_ocean_example.yml +++ b/esmvaltool/recipes/recipe_ocean_example.yml @@ -2,16 +2,18 @@ # recipe_ocean_example.yml --- documentation: + title: Ocean physics analysis suite + description: | Recipe to demonstrate several simple plots based on the monthly ocean temperature. Please use this file as a template for adding additional fields into the ocean. This work based on the BGC-val toolkit GMD-2018-103. authors: - - demo_le + - demora_lee maintainer: - - demo_le + - demora_lee references: - demora2018gmd @@ -53,9 +55,8 @@ preprocessors: # -------------------------------------------------- prep_timeseries_1: # For 2D fields custom_order: true - average_region: - coord1: longitude - coord2: latitude + area_statistics: + operator: mean multi_model_statistics: span: overlap statistics: [mean ] @@ -64,10 +65,9 @@ preprocessors: custom_order: true extract_levels: levels: [0., 10., 100., 1000.] - scheme: linear_horizontal_extrapolate_vertical - average_region: - coord1: longitude - coord2: latitude + scheme: linear_extrapolate + area_statistics: + operator: mean multi_model_statistics: span: overlap statistics: [mean ] @@ -79,9 +79,8 @@ preprocessors: end_longitude: 30. start_latitude: -80. end_latitude: 80. - average_region: - coord1: longitude - coord2: latitude + area_statistics: + operator: mean multi_model_statistics: span: overlap statistics: [mean ] @@ -90,24 +89,22 @@ preprocessors: custom_order: true extract_levels: levels: [0., 10., 100., 1000.] - scheme: linear_horizontal_extrapolate_vertical + scheme: linear_extrapolate extract_region: start_longitude: -80. end_longitude: 30. start_latitude: -80. end_latitude: 80. - average_region: - coord1: longitude - coord2: latitude + area_statistics: + operator: mean multi_model_statistics: span: overlap statistics: [mean ] prep_timeseries_5: # For Global Volume Averaged custom_order: true - average_volume: - coord1: longitude - coord2: latitude + volume_statistics: + operator: mean multi_model_statistics: span: overlap statistics: [mean ] @@ -119,9 +116,8 @@ preprocessors: end_longitude: 30. start_latitude: -80. end_latitude: 80. - average_volume: - coord1: longitude - coord2: latitude + volume_statistics: + operator: mean multi_model_statistics: span: overlap statistics: [mean ] @@ -136,9 +132,8 @@ preprocessors: extract_volume: z_min: 0. z_max: 100. - average_volume: - coord1: longitude - coord2: latitude + volume_statistics: + operator: mean multi_model_statistics: span: overlap statistics: [mean ] @@ -148,31 +143,33 @@ preprocessors: # Map preprocessors - 2D fields # -------------------------------------------------- prep_map_1: # For Global 2D fields - time_average: + climate_statistics: + operator: mean prep_map_2: # For Global 2D fields with regridding custom_order: true regrid: target_grid: 1x1 scheme: linear - time_average: - multi_model_statistics: - span: overlap - statistics: [mean ] - + climate_statistics: + operator: mean + # multi_model_statistics: + # span: overlap + # statistics: [mean ] prep_map_3: # For specific levels of 3D fields with regrid custom_order: true extract_levels: levels: [0., 10., 100., 1000.,] - scheme: linear_horizontal_extrapolate_vertical - time_average: + scheme: linear_extrapolate + climate_statistics: + operator: mean regrid: target_grid: 1x1 scheme: linear - multi_model_statistics: - span: overlap - statistics: [mean ] + # multi_model_statistics: + # span: overlap + # statistics: [mean ] prep_map_4: # For a specific region with regrid custom_order: true @@ -181,20 +178,22 @@ preprocessors: end_longitude: 30. start_latitude: -80. end_latitude: 80. - time_average: + climate_statistics: + operator: mean regrid: target_grid: 1x1 scheme: linear - multi_model_statistics: - span: overlap - statistics: [mean ] + # multi_model_statistics: + # span: overlap + # statistics: [mean ] prep_map_5: # For a specific region at depth levels, custom_order: true extract_levels: levels: [0., 10., 100., 1000.,] - scheme: linear_horizontal_extrapolate_vertical - time_average: + scheme: linear_extrapolate + climate_statistics: + operator: mean extract_region: start_longitude: -80. end_longitude: 30. @@ -203,28 +202,31 @@ preprocessors: regrid: target_grid: 1x1 scheme: linear - multi_model_statistics: - span: overlap - statistics: [mean ] + # multi_model_statistics: + # span: overlap + # statistics: [mean ] # -------------------------------------------------- # Transects preprocessors # -------------------------------------------------- prep_transect_1: # For extracting along a trajectory - time_average: + climate_statistics: + operator: mean extract_trajectory: latitudes: [-5., 5.,] longitudes: [332.,332.] number_points: 4 # VERY SLOW with high numbers! prep_transect_2: # For extracting a transect - time_average: + climate_statistics: + operator: mean extract_transect: # Pacific Equator latitude: 0. longitude: [120., 300.] prep_transect_3: # For extracting a transect - time_average: + climate_statistics: + operator: mean extract_transect: # Atlantic Meridional Transect latitude: [-50.,50.] longitude: 332. @@ -233,9 +235,8 @@ preprocessors: # Depth-profile preprocessors # -------------------------------------------------- prep_profile_1: # For a global profile - average_region: - coord1: longitude - coord2: latitude + area_statistics: + operator: mean prep_profile_2: # For a regional profile extract_region: @@ -246,16 +247,17 @@ preprocessors: extract_volume: z_min: 0. z_max: 100. - average_region: - coord1: longitude - coord2: latitude + area_statistics: + operator: mean + # -------------------------------------------------- # Depth-profile preprocessors # -------------------------------------------------- prep_depth_integration_1: # For a 2D map global profile depth_integration: - time_average: + climate_statistics: + operator: mean diagnostics: diff --git a/esmvaltool/recipes/recipe_ocean_ice_extent.yml b/esmvaltool/recipes/recipe_ocean_ice_extent.yml index 71b9edb57f..779b8b4dc8 100644 --- a/esmvaltool/recipes/recipe_ocean_ice_extent.yml +++ b/esmvaltool/recipes/recipe_ocean_ice_extent.yml @@ -2,14 +2,16 @@ # recipe_ocean_ice_extent.yml --- documentation: + title: Ice Extent analysis + description: | Recipe to demonstrate several sea ice plots based. authors: - - demo_le + - demora_lee maintainer: - - demo_le + - demora_lee references: - demora2018gmd diff --git a/esmvaltool/recipes/recipe_ocean_multimap.yml b/esmvaltool/recipes/recipe_ocean_multimap.yml new file mode 100644 index 0000000000..2828b35871 --- /dev/null +++ b/esmvaltool/recipes/recipe_ocean_multimap.yml @@ -0,0 +1,94 @@ +# ESMValTool +# recipe_ocean_multimap.yml +--- +documentation: + + title: Maps of ESMs biases against observations + + description: | + Recipe to evaluate multiple model bias against a reference dataset + Written by Tomas Lovato, CMCC, tomas.lovato@cmcc.it + + authors: + - lovato_tomas + + maintainer: + - lovato_tomas + + references: + - acknow_project + + projects: + - crescendo + + +# Write options for scripts +# -------------------------------------------------- +WRITE_OPTS: &write_opts + - { write_plots: True, write_netcdf: True } + + +# -------------------------------------------------- +# DATASETS +# -------------------------------------------------- +datasets: +# working datasets +# CMIP5 + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1995, end_year: 2004} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1995, end_year: 2004} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1995, end_year: 2004} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r3i1p1, start_year: 1995, end_year: 2004} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1995, end_year: 2004} + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1995, end_year: 2004} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1995, end_year: 2004} + - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1995, end_year: 2004} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1995, end_year: 2004} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1995, end_year: 2004} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1995, end_year: 2004} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r3i1p1, start_year: 1995, end_year: 2004} + - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1995, end_year: 2004} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1995, end_year: 2004} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1995, end_year: 2004} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1995, end_year: 2004} + + +# -------------------------------------------------- +# Preprocessors +# -------------------------------------------------- +preprocessors: + # -------------------------------------------------- + # map preprocessors + # -------------------------------------------------- + # For a 2D global surface map + prep_surface_map_2D: + climate_statistics: + operator: mean + regrid: + target_grid: 1x1 + scheme: linear + +# -------------------------------------------------- +# Diagnostics +# -------------------------------------------------- +diagnostics: + + # -------------------------------------------------- + # Map diagnostics vs OBS + # -------------------------------------------------- + diag_surface_multimap: + description: Global Ocean Surface maps vs OBS + variables: + fgco2: + preprocessor: prep_surface_map_2D + mip: Omon + maps_range: [-0.12, 0.12] + diff_range: [-0.09, 0.09] + layout_rowcol: [4, 4] + additional_datasets: + - {dataset: Landschuetzer2016, project: OBS, type: clim, version: v2016, start_year: 1995, end_year: 2014, tier: 2} + scripts: + Global_Ocean_multi_vs_obs: + script: ocean/diagnostic_maps_multimodel.py + observational_dataset: {dataset: Landschuetzer2016, project: OBS} + <<: *write_opts + diff --git a/esmvaltool/recipes/recipe_ocean_quadmap.yml b/esmvaltool/recipes/recipe_ocean_quadmap.yml index 6c70ca7d6b..2f20a50015 100644 --- a/esmvaltool/recipes/recipe_ocean_quadmap.yml +++ b/esmvaltool/recipes/recipe_ocean_quadmap.yml @@ -2,15 +2,17 @@ # recipe_ocean_quadmap.yml --- documentation: + title: Tool to produce a four pane map figure + description: | Recipe to demonstrate the Diagnostic Maps quad. Based on the ocean assess/Marine Assess toolkit plots. authors: - - demo_le + - demora_lee maintainer: - - demo_le + - demora_lee references: - demora2018gmd @@ -34,7 +36,8 @@ preprocessors: end_year: 2003 end_month: 12 end_day: 31 - time_average: + climate_statistics: + operator: mean regrid: target_grid: 1x1 scheme: linear @@ -51,12 +54,10 @@ diagnostics: preprocessor: prep_quad_map mip: Omon additional_datasets: -# filename: tos_ATSR_L3_ARC-v1.1.1_199701-201112.nc -# download from: https://datashare.is.ed.ac.uk/handle/10283/536 - - {dataset: ATSR, project: obs4mips, level: L3, version: ARC-v1.1.1, start_year: 2001, end_year: 2003, tier: 3} + - {dataset: ARC-SST-1-1, project: obs4MIPs, level: L3, start_year: 2001, end_year: 2003, tier: 1} scripts: Global_Ocean_map: &Global_Ocean_map script: ocean/diagnostic_maps_quad.py control_model: {dataset: HadGEM2-CC, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1} exper_model: {dataset: HadGEM2-ES, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1} - observational_dataset: {dataset: ATSR, project: obs4mips,} + observational_dataset: {dataset: ARC-SST-1-1, project: obs4MIPs,} diff --git a/esmvaltool/recipes/recipe_ocean_scalar_fields.yml b/esmvaltool/recipes/recipe_ocean_scalar_fields.yml index 3a96541669..ee2ff7a321 100644 --- a/esmvaltool/recipes/recipe_ocean_scalar_fields.yml +++ b/esmvaltool/recipes/recipe_ocean_scalar_fields.yml @@ -2,6 +2,8 @@ # recipe_ocean_scarlar_fields.yml --- documentation: + title: Ocean Scalar fields analysis + description: | Recipe to demonstrate several simple plots based on the scalar fields. These fluxes are saved as MFO files in CMIP5. @@ -9,10 +11,10 @@ documentation: fields into the ocean. This work based on the BGC-val toolkit GMD-2018-103. authors: - - demo_le + - demora_lee maintainer: - - demo_le + - demora_lee references: - demora2018gmd diff --git a/esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml b/esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml index ba41f72e9a..f1ff96eb64 100644 --- a/esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml +++ b/esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml @@ -2,18 +2,20 @@ # recipe_perfmetrics_CMIP5.yml --- documentation: + title: Performance metrics for essential climate variables in CMIP5 + description: | Recipe for plotting the performance metrics for the CMIP5 datasets, including the standard ECVs as in Gleckler et al., and some additional variables (like ozone, sea-ice, aerosol...) authors: - - fran_fr - - righ_ma - - eyri_ve + - winterstein_franziska + - righi_mattia + - eyring_veronika maintainer: - - righ_ma + - righi_mattia references: - gleckler08jgr @@ -182,58 +184,45 @@ preprocessors: diagnostics: - ### ta: AIR TEMPERATURE ##################################################### - ta850: - description: Air temperature at 850 hPa global. + ### The diagnostics are ordered by runtime, to optimize tasks execution ##### + + ### sm: SOIL MOISTURE ####################################################### + sm: + description: Soil moisture themes: - phys realms: - - atmos + - land variables: - ta: - preprocessor: pp850 - reference_dataset: ERA-Interim - alternative_dataset: NCEP - mip: Amon + sm: + preprocessor: ppNOLEV1thr10 + reference_dataset: ESACCI-SOILMOISTURE + mip: Lmon + derive: true + force_derivation: false project: CMIP5 exp: historical ensemble: r1i1p1 - start_year: 2000 - end_year: 2002 + start_year: 2002 + end_year: 2004 additional_datasets: - {dataset: ACCESS1-0} - {dataset: ACCESS1-3} - {dataset: bcc-csm1-1} - - {dataset: bcc-csm1-1-m} - - {dataset: BNU-ESM} - {dataset: CanCM4} - {dataset: CanESM2} - {dataset: CCSM4} - {dataset: CESM1-BGC} - {dataset: CESM1-CAM5} - - {dataset: CESM1-CAM5-1-FV2} - {dataset: CESM1-FASTCHEM} - {dataset: CESM1-WACCM} - - {dataset: CMCC-CESM} - - {dataset: CMCC-CM} - - {dataset: CMCC-CMS} - {dataset: CNRM-CM5} - - {dataset: CNRM-CM5-2} - {dataset: CSIRO-Mk3-6-0} - - {dataset: EC-EARTH, ensemble: r6i1p1} - {dataset: FGOALS-g2} - {dataset: FGOALS-s2} - - {dataset: FIO-ESM} - - {dataset: GFDL-CM2p1} - - {dataset: GFDL-CM3} - {dataset: GFDL-ESM2G} - {dataset: GFDL-ESM2M} - - {dataset: GISS-E2-H, ensemble: r1i1p2} - - {dataset: GISS-E2-H-CC} - - {dataset: GISS-E2-R, ensemble: r1i1p2} - - {dataset: GISS-E2-R-CC} - {dataset: HadCM3} - - {dataset: HadGEM2-AO} - {dataset: HadGEM2-CC} - {dataset: HadGEM2-ES} - {dataset: inmcm4} @@ -244,55 +233,46 @@ diagnostics: - {dataset: MIROC5} - {dataset: MIROC-ESM} - {dataset: MIROC-ESM-CHEM} - - {dataset: MPI-ESM-LR} - - {dataset: MPI-ESM-MR} - - {dataset: MPI-ESM-P} - {dataset: MRI-CGCM3} - - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ESACCI-SOILMOISTURE, project: OBS, type: sat, + version: L3S-SSMV-COMBINED-v4.2, tier: 2} scripts: - cycle: &cycle_settings + grading: &grading_settings script: perfmetrics/main.ncl - # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon') - plot_type: cycle + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: cycle_latlon # Time average ('opt' argument of time_operations.ncl) time_avg: monthlyclim - # Region ('Global', 'Tropics', 'NH extratropics', 'SH extratropics') - region: Global + # Region ('global', 'trop', 'nhext', 'shext') + region: global # Plot standard deviation ('all', 'none', 'ref_model' or dataset name) plot_stddev: ref_model # Plot legend in a separate file legend_outside: true # Plot style styleset: CMIP5 - grading: &grading_settings - <<: *cycle_settings - # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon') - plot_type: cycle_latlon - # Draw plots - draw_plots: false # Calculate grading calc_grading: true # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD, taylor] + metric: [RMSD] # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median, none] + normalization: [centered_median] - ta200: - description: Air temperature at 200 hPa global. + ### ta: AIR TEMPERATURE (zonal) ############################################# + taZONAL: + description: Air temperature zonal mean themes: - phys realms: - atmos variables: ta: - preprocessor: pp200 + preprocessor: ppALL reference_dataset: ERA-Interim - alternative_dataset: NCEP + alternative_dataset: NCEP-NCAR-R1 mip: Amon project: CMIP5 exp: historical @@ -350,30 +330,45 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: - cycle: - <<: *cycle_settings - grading: - <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] + zonal: &zonal_settings + script: perfmetrics/main.ncl + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: zonal + # Time average ('opt' argument of time_operations.ncl) + time_avg: annualclim + # Region ('global', 'trop', 'nhext', 'shext') + region: global + # Draw difference plots + plot_diff: true + # Calculate t-test in difference plots + t_test: true + # Confidence level for the t-test + conf_level: 0.95 + # Mask non-significant values with stippling + stippling: true + # Contour levels for absolute plot + abs_levs: [200, 210, 220, 230, 240, 250, 260, 270, 280, 290, 300] + # Contour levels for difference plot + diff_levs: [-10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10] - ta30: - description: Air temperature at 30 hPa global. + ### clt: TOTAL CLOUD COVER ################################################## + clt: + description: Total cloud cover themes: - - phys + - clouds realms: - atmos variables: - ta: - preprocessor: pp30 - reference_dataset: ERA-Interim - alternative_dataset: NCEP + clt: + preprocessor: ppNOLEV2 + reference_dataset: ESACCI-CLOUD + alternative_dataset: PATMOS-x mip: Amon project: CMIP5 exp: historical @@ -386,25 +381,18 @@ diagnostics: - {dataset: bcc-csm1-1} - {dataset: bcc-csm1-1-m} - {dataset: BNU-ESM} - - {dataset: CanCM4} - {dataset: CanESM2} - {dataset: CCSM4} - {dataset: CESM1-BGC} - {dataset: CESM1-CAM5} - {dataset: CESM1-CAM5-1-FV2} - - {dataset: CESM1-FASTCHEM} - - {dataset: CESM1-WACCM} - - {dataset: CMCC-CESM} - {dataset: CMCC-CM} - {dataset: CMCC-CMS} - {dataset: CNRM-CM5} - - {dataset: CNRM-CM5-2} - {dataset: CSIRO-Mk3-6-0} - {dataset: EC-EARTH, ensemble: r6i1p1} - {dataset: FGOALS-g2} - - {dataset: FGOALS-s2} - {dataset: FIO-ESM} - - {dataset: GFDL-CM2p1} - {dataset: GFDL-CM3} - {dataset: GFDL-ESM2G} - {dataset: GFDL-ESM2M} @@ -426,29 +414,49 @@ diagnostics: - {dataset: MIROC-ESM-CHEM} - {dataset: MPI-ESM-LR} - {dataset: MPI-ESM-MR} - - {dataset: MPI-ESM-P} - {dataset: MRI-CGCM3} - - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, + version: AVHRR-AMPM-fv3.0, tier: 2} + - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, tier: 2} scripts: - cycle: - <<: *cycle_settings + latlon: &latlon_settings + script: perfmetrics/main.ncl + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: latlon + # Time average ('opt' argument of time_operations.ncl) + time_avg: annualclim + # Region ('global', 'trop', 'nhext', 'shext') + region: global + # Draw difference plots + plot_diff: true + # Calculate t-test in difference plots + t_test: true + # Confidence level for the t-test + conf_level: 0.95 + # Add global average to the plot + show_global_avg: true + # Contour levels for absolute plot + abs_levs: [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100] + # Contour levels for difference plot + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] + grading: + <<: *grading_settings - ta5: - description: Air temperature at 5 hPa global. + ### tas: NEAR-SURFACE TEMPERATURE ########################################### + tas: + description: Near-surface air temperature themes: - phys realms: - atmos variables: - ta: - preprocessor: pp5 + tas: + preprocessor: ppNOLEV2 reference_dataset: ERA-Interim - alternative_dataset: NCEP + alternative_dataset: NCEP-NCAR-R1 mip: Amon project: CMIP5 exp: historical @@ -466,7 +474,7 @@ diagnostics: - {dataset: CCSM4} - {dataset: CESM1-BGC} - {dataset: CESM1-CAM5} - - {dataset: CESM1-CAM5-1-FV2} + # - {dataset: CESM1-CAM5-1-FV2} # data is missing on ESGF - {dataset: CESM1-FASTCHEM} - {dataset: CESM1-WACCM} - {dataset: CMCC-CESM} @@ -506,24 +514,38 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: - cycle: - <<: *cycle_settings + latlon: + <<: *latlon_settings + # Add global average to the plot + show_global_avg: false + # Contour levels for absolute plot + abs_levs: [240, 243, 246, 249, 252, 255, 258, + 261, 264, 267, 270, 273, 276, 279, + 282, 285, 288, 291, 294, 297, 300] + # Contour levels for difference plot + diff_levs: [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5] + grading: + <<: *grading_settings - taZONAL: - description: Air temperature zonal mean + ### ts: SEA-SURFACE (SKIN) TEMPERATURE ###################################### + ts: + description: Sea-surface (skin) temperature themes: - phys realms: - atmos + - ocean variables: - ta: - preprocessor: ppALL - reference_dataset: ERA-Interim - alternative_dataset: NCEP + ts: + preprocessor: ppNOLEV1x1 + reference_dataset: ESACCI-SST + alternative_dataset: HadISST mip: Amon project: CMIP5 exp: historical @@ -542,17 +564,11 @@ diagnostics: - {dataset: CESM1-BGC} - {dataset: CESM1-CAM5} - {dataset: CESM1-CAM5-1-FV2} - - {dataset: CESM1-FASTCHEM} - - {dataset: CESM1-WACCM} - - {dataset: CMCC-CESM} - {dataset: CMCC-CM} - {dataset: CMCC-CMS} - {dataset: CNRM-CM5} - - {dataset: CNRM-CM5-2} - {dataset: CSIRO-Mk3-6-0} - - {dataset: EC-EARTH, ensemble: r6i1p1} - {dataset: FGOALS-g2} - - {dataset: FGOALS-s2} - {dataset: FIO-ESM} - {dataset: GFDL-CM2p1} - {dataset: GFDL-CM3} @@ -576,48 +592,28 @@ diagnostics: - {dataset: MIROC-ESM-CHEM} - {dataset: MPI-ESM-LR} - {dataset: MPI-ESM-MR} - - {dataset: MPI-ESM-P} - {dataset: MRI-CGCM3} - - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ESACCI-SST, project: OBS, type: sat, + version: 2.2, tier: 2} + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} scripts: - zonal: &zonal_settings - script: perfmetrics/main.ncl - # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon') - plot_type: zonal - # Time average ('opt' argument of time_operations.ncl) - time_avg: annualclim - # Region ('Global', 'Tropics', 'NH extratropics', 'SH extratropics') - region: Global - # Draw difference plots - plot_diff: true - # Calculate t-test in difference plots - t_test: true - # Confidence level for the t-test - conf_level: 0.95 - # Mask non-significant values with stippling - stippling: true - # Contour levels for absolute plot - abs_levs: [200, 210, 220, 230, 240, 250, 260, 270, 280, 290, 300] - # Contour levels for difference plot - diff_levs: [-10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10] + grading: + <<: *grading_settings - ### ua: EASTWARD WIND ####################################################### - ua850: - description: Eastward wind at 850 hPa global. + ### pr: PRECIPITATION ####################################################### + pr: + description: Precipitation themes: - - atmDyn + - phys realms: - atmos variables: - ua: - preprocessor: pp850 - reference_dataset: ERA-Interim - alternative_dataset: NCEP + pr: + preprocessor: ppNOLEV1 + reference_dataset: GPCP-V2.2 mip: Amon project: CMIP5 exp: historical @@ -674,28 +670,24 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} scripts: grading: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] - ua200: - description: Eastward wind at 200 hPa global. + ### zg: GEOPOTENTIAL HEIGHT (500 hPa) ####################################### + zg500: + description: Geopotential height 500 hPa global themes: - - atmDyn + - phys realms: - atmos variables: - ua: - preprocessor: pp200 + zg: + preprocessor: pp500 reference_dataset: ERA-Interim - alternative_dataset: NCEP + alternative_dataset: NCEP-NCAR-R1 mip: Amon project: CMIP5 exp: historical @@ -722,8 +714,8 @@ diagnostics: - {dataset: CNRM-CM5} - {dataset: CNRM-CM5-2} - {dataset: CSIRO-Mk3-6-0} - - {dataset: EC-EARTH, ensemble: r6i1p1} - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} - {dataset: FIO-ESM} - {dataset: GFDL-CM2p1} - {dataset: GFDL-CM3} @@ -731,7 +723,6 @@ diagnostics: - {dataset: GFDL-ESM2M} - {dataset: GISS-E2-H, ensemble: r1i1p2} - {dataset: GISS-E2-H-CC} - - {dataset: GISS-E2-R, ensemble: r1i1p2} - {dataset: GISS-E2-R-CC} - {dataset: HadCM3} - {dataset: HadGEM2-AO} @@ -749,68 +740,59 @@ diagnostics: - {dataset: MPI-ESM-MR} - {dataset: MPI-ESM-P} - {dataset: MRI-CGCM3} - - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: grading: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] - ### va: NORTHWARD WIND ###################################################### - va850: - description: Northward wind at 850 hPa global. + ### lwcre: LONGWAVE CLOUD FORCING ########################################### + lwcre: + description: Longwave cloud radiative effect themes: - - atmDyn + - clouds realms: - atmos variables: - va: - preprocessor: pp850 - reference_dataset: ERA-Interim - alternative_dataset: NCEP + lwcre: + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF mip: Amon + derive: true + force_derivation: false project: CMIP5 exp: historical ensemble: r1i1p1 - start_year: 2000 - end_year: 2002 + start_year: 2001 + end_year: 2003 additional_datasets: - {dataset: ACCESS1-0} - {dataset: ACCESS1-3} - {dataset: bcc-csm1-1} - {dataset: bcc-csm1-1-m} - {dataset: BNU-ESM} - - {dataset: CanCM4} - {dataset: CanESM2} - {dataset: CCSM4} - {dataset: CESM1-BGC} - {dataset: CESM1-CAM5} - - {dataset: CESM1-FASTCHEM} - - {dataset: CESM1-WACCM} - - {dataset: CMCC-CESM} + - {dataset: CESM1-CAM5-1-FV2} - {dataset: CMCC-CM} - {dataset: CMCC-CMS} - {dataset: CNRM-CM5} - - {dataset: CNRM-CM5-2} - {dataset: CSIRO-Mk3-6-0} - - {dataset: EC-EARTH, ensemble: r6i1p1} - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} - {dataset: FIO-ESM} - - {dataset: GFDL-CM2p1} - {dataset: GFDL-CM3} - {dataset: GFDL-ESM2G} - {dataset: GFDL-ESM2M} - {dataset: GISS-E2-H, ensemble: r1i1p2} - - {dataset: GISS-E2-H-CC} - {dataset: GISS-E2-R, ensemble: r1i1p2} - - {dataset: GISS-E2-R-CC} - {dataset: HadCM3} - {dataset: HadGEM2-AO} - {dataset: HadGEM2-CC} @@ -825,69 +807,63 @@ diagnostics: - {dataset: MIROC-ESM-CHEM} - {dataset: MPI-ESM-LR} - {dataset: MPI-ESM-MR} - - {dataset: MPI-ESM-P} - {dataset: MRI-CGCM3} - - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} scripts: + latlon: + <<: *latlon_settings + # Add global average to the plot + show_global_avg: false + # Contour levels for absolute plot + abs_levs: [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100] + # Contour levels for difference plot + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] grading: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] - va200: - description: Northward wind at 200 hPa global. + ### swcre: SHORTWAVE CLOUD FORCING ########################################## + swcre: + description: Shortwave cloud radiative effect themes: - - atmDyn + - clouds realms: - atmos variables: - va: - preprocessor: pp200 - reference_dataset: ERA-Interim - alternative_dataset: NCEP + swcre: + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF mip: Amon + derive: true + force_derivation: false project: CMIP5 exp: historical ensemble: r1i1p1 - start_year: 2000 - end_year: 2002 + start_year: 2001 + end_year: 2003 additional_datasets: - {dataset: ACCESS1-0} - {dataset: ACCESS1-3} - {dataset: bcc-csm1-1} - {dataset: bcc-csm1-1-m} - {dataset: BNU-ESM} - - {dataset: CanCM4} - {dataset: CanESM2} - {dataset: CCSM4} - {dataset: CESM1-BGC} - {dataset: CESM1-CAM5} - - {dataset: CESM1-FASTCHEM} - - {dataset: CESM1-WACCM} - - {dataset: CMCC-CESM} + - {dataset: CESM1-CAM5-1-FV2} - {dataset: CMCC-CM} - - {dataset: CMCC-CMS} - {dataset: CNRM-CM5} - - {dataset: CNRM-CM5-2} - {dataset: CSIRO-Mk3-6-0} - - {dataset: EC-EARTH, ensemble: r6i1p1} - - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} - {dataset: FIO-ESM} - - {dataset: GFDL-CM2p1} - {dataset: GFDL-CM3} - {dataset: GFDL-ESM2G} - {dataset: GFDL-ESM2M} - {dataset: GISS-E2-H, ensemble: r1i1p2} - - {dataset: GISS-E2-H-CC} - {dataset: GISS-E2-R, ensemble: r1i1p2} - - {dataset: GISS-E2-R-CC} - {dataset: HadCM3} - {dataset: HadGEM2-AO} - {dataset: HadGEM2-CC} @@ -902,40 +878,40 @@ diagnostics: - {dataset: MIROC-ESM-CHEM} - {dataset: MPI-ESM-LR} - {dataset: MPI-ESM-MR} - - {dataset: MPI-ESM-P} - {dataset: MRI-CGCM3} - - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} scripts: + latlon: + <<: *latlon_settings + # Add global average to the plot + show_global_avg: false + # Contour levels for absolute plot + abs_levs: [-100, -90, -80, -70, -60, -50, -40, -30, -20, -10, 0] + # Contour levels for difference plot + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] grading: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] - ### zg: GEOPOTENTIAL HEIGHT ################################################# - zg500: - description: Geopotential height 500 hPa global. + ### rlut: ALL-SKY LONGWAVE RADIATION ######################################## + rlut: + description: All-sky longwave radiation themes: - phys realms: - atmos variables: - zg: - preprocessor: pp500 - reference_dataset: ERA-Interim - alternative_dataset: NCEP + rlut: + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF mip: Amon project: CMIP5 exp: historical ensemble: r1i1p1 - start_year: 2000 - end_year: 2002 + start_year: 2001 + end_year: 2003 additional_datasets: - {dataset: ACCESS1-0} - {dataset: ACCESS1-3} @@ -948,14 +924,11 @@ diagnostics: - {dataset: CESM1-BGC} - {dataset: CESM1-CAM5} - {dataset: CESM1-CAM5-1-FV2} - - {dataset: CESM1-FASTCHEM} - - {dataset: CESM1-WACCM} - - {dataset: CMCC-CESM} - {dataset: CMCC-CM} - {dataset: CMCC-CMS} - {dataset: CNRM-CM5} - - {dataset: CNRM-CM5-2} - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} - {dataset: FGOALS-g2} - {dataset: FGOALS-s2} - {dataset: FIO-ESM} @@ -964,8 +937,7 @@ diagnostics: - {dataset: GFDL-ESM2G} - {dataset: GFDL-ESM2M} - {dataset: GISS-E2-H, ensemble: r1i1p2} - - {dataset: GISS-E2-H-CC} - - {dataset: GISS-E2-R-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} - {dataset: HadCM3} - {dataset: HadGEM2-AO} - {dataset: HadGEM2-CC} @@ -980,22 +952,16 @@ diagnostics: - {dataset: MIROC-ESM-CHEM} - {dataset: MPI-ESM-LR} - {dataset: MPI-ESM-MR} - - {dataset: MPI-ESM-P} - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} scripts: grading: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] - ### hus: SPECIFIC HUMIDITY ################################################## + ### hus: SPECIFIC HUMIDITY (400 hPa) ######################################## hus400: description: Specific humidity at 400 hPa global. themes: @@ -1005,7 +971,7 @@ diagnostics: variables: hus: preprocessor: pp400 - reference_dataset: AIRS + reference_dataset: AIRS-2-1 alternative_dataset: ERA-Interim mip: Amon project: CMIP5 @@ -1054,29 +1020,89 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: AIRS, project: obs4mips, level: L3, version: RetStd-v5, tier: 1} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1} + - {dataset: ERA-Interim, project: OBS6, + type: reanaly, version: 1, tier: 3} scripts: grading: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] - ### tas: NEAR-SURFACE TEMPERATURE ########################################### - tas: - description: Near-surface air temperature + ### rsut: ALL-SKY SHORTWAVE RADIATION ####################################### + rsut: + description: All-sky shortwave radiation themes: - phys realms: - atmos variables: - tas: - preprocessor: ppNOLEV2 + rsut: + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2001 + end_year: 2003 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} + scripts: + grading: + <<: *grading_settings + + + ### ua: EASTWARD WIND (200 hPa) ############################################# + ua200: + description: Eastward wind at 200 hPa global + themes: + - atmDyn + realms: + - atmos + variables: + ua: + preprocessor: pp200 reference_dataset: ERA-Interim - alternative_dataset: NCEP + alternative_dataset: NCEP-NCAR-R1 mip: Amon project: CMIP5 exp: historical @@ -1105,7 +1131,6 @@ diagnostics: - {dataset: CSIRO-Mk3-6-0} - {dataset: EC-EARTH, ensemble: r6i1p1} - {dataset: FGOALS-g2} - - {dataset: FGOALS-s2} - {dataset: FIO-ESM} - {dataset: GFDL-CM2p1} - {dataset: GFDL-CM3} @@ -1134,50 +1159,27 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: - latlon: &latlon_settings - script: perfmetrics/main.ncl - # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon') - plot_type: latlon - # Time average ('opt' argument of time_operations.ncl) - time_avg: annualclim - # Region ('Global', 'Tropics', 'NH extratropics', 'SH extratropics') - region: Global - # Draw difference plots - plot_diff: true - # Calculate t-test in difference plots - t_test: true - # Confidence level for the t-test - conf_level: 0.95 - # Contour levels for absolute plot - abs_levs: [240, 243, 246, 249, 252, 255, 258, - 261, 264, 267, 270, 273, 276, 279, - 282, 285, 288, 291, 294, 297, 300] - # Contour levels for difference plot - diff_levs: [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5] grading: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] - ### ts: SEA-SURFACE (SKIN) TEMPERATURE ###################################### - ts: - description: Sea-surface (skin) temperature + ### va: NORTHWARD WIND (850 hPa) ############################################ + va850: + description: Northward wind at 850 hPa global. themes: - - phys + - atmDyn realms: - atmos - - ocean variables: - ts: - preprocessor: ppNOLEV1x1 - reference_dataset: ESACCI-SST - alternative_dataset: HadISST + va: + preprocessor: pp850 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 mip: Amon project: CMIP5 exp: historical @@ -1195,11 +1197,15 @@ diagnostics: - {dataset: CCSM4} - {dataset: CESM1-BGC} - {dataset: CESM1-CAM5} - - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} - {dataset: CMCC-CM} - {dataset: CMCC-CMS} - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} - {dataset: FGOALS-g2} - {dataset: FIO-ESM} - {dataset: GFDL-CM2p1} @@ -1224,31 +1230,32 @@ diagnostics: - {dataset: MIROC-ESM-CHEM} - {dataset: MPI-ESM-LR} - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ESACCI-SST, project: OBS, type: sat, version: L4-GHRSST-SSTdepth-OSTIA-GLOB, tier: 2} - - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: grading: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] - ### pr: PRECIPITATION ####################################################### - pr: - description: Precipitation + ### ua: EASTWARD WIND (850 hPa) ############################################# + ua850: + description: Eastward wind at 850 hPa global. themes: - - phys + - atmDyn realms: - atmos variables: - pr: - preprocessor: ppNOLEV1 - reference_dataset: GPCP-SG + ua: + preprocessor: pp850 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 mip: Amon project: CMIP5 exp: historical @@ -1305,28 +1312,27 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, tier: 1} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: grading: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] - ### clt: TOTAL CLOUD COVER ################################################## - clt: - description: Total cloud cover + ### va: NORTHWARD WIND (200 hPa) ############################################ + va200: + description: Northward wind at 200 hPa global. themes: - - clouds + - atmDyn realms: - atmos variables: - clt: - preprocessor: ppNOLEV2 - reference_dataset: ESACCI-CLOUD - alternative_dataset: PATMOS-x + va: + preprocessor: pp200 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 mip: Amon project: CMIP5 exp: historical @@ -1339,18 +1345,23 @@ diagnostics: - {dataset: bcc-csm1-1} - {dataset: bcc-csm1-1-m} - {dataset: BNU-ESM} + - {dataset: CanCM4} - {dataset: CanESM2} - {dataset: CCSM4} - {dataset: CESM1-BGC} - {dataset: CESM1-CAM5} - - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} - {dataset: CMCC-CM} - {dataset: CMCC-CMS} - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} - {dataset: CSIRO-Mk3-6-0} - {dataset: EC-EARTH, ensemble: r6i1p1} - {dataset: FGOALS-g2} - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} - {dataset: GFDL-CM3} - {dataset: GFDL-ESM2G} - {dataset: GFDL-ESM2M} @@ -1372,45 +1383,38 @@ diagnostics: - {dataset: MIROC-ESM-CHEM} - {dataset: MPI-ESM-LR} - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ESACCI-CLOUD, project: OBS, type: sat, version: AVHRR-fv3.0, tier: 2} - - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: - latlon: - <<: *latlon_settings - # Add global average to the plot - show_global_avg: true - # Contour levels for absolute plot - abs_levs: [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100] - # Contour levels for difference plot - diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] grading: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] - ### rlut: ALL-SKY LONGWAVE RADIATION ######################################## - rlut: - description: All-sky longwave radiation + ### ta: AIR TEMPERATURE (5 hPa) ############################################# + ta5: + description: Air temperature at 5 hPa global. themes: - phys realms: - atmos variables: - rlut: - preprocessor: ppNOLEV1 - reference_dataset: CERES-EBAF + ta: + preprocessor: pp5 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 mip: Amon project: CMIP5 exp: historical ensemble: r1i1p1 - start_year: 2001 - end_year: 2003 + start_year: 2000 + end_year: 2002 additional_datasets: - {dataset: ACCESS1-0} - {dataset: ACCESS1-3} @@ -1423,9 +1427,13 @@ diagnostics: - {dataset: CESM1-BGC} - {dataset: CESM1-CAM5} - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} - {dataset: CMCC-CM} - {dataset: CMCC-CMS} - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} - {dataset: CSIRO-Mk3-6-0} - {dataset: EC-EARTH, ensemble: r6i1p1} - {dataset: FGOALS-g2} @@ -1436,7 +1444,9 @@ diagnostics: - {dataset: GFDL-ESM2G} - {dataset: GFDL-ESM2M} - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} - {dataset: HadCM3} - {dataset: HadGEM2-AO} - {dataset: HadGEM2-CC} @@ -1451,51 +1461,72 @@ diagnostics: - {dataset: MIROC-ESM-CHEM} - {dataset: MPI-ESM-LR} - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: - grading: - <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] + cycle: &cycle_settings + script: perfmetrics/main.ncl + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: cycle + # Time average ('opt' argument of time_operations.ncl) + time_avg: monthlyclim + # Region ('global', 'trop', 'nhext', 'shext') + region: global + # Plot standard deviation ('all', 'none', 'ref_model' or dataset name) + plot_stddev: ref_model + # Plot legend in a separate file + legend_outside: true + # Plot style + styleset: CMIP5 - ### rsut: ALL-SKY SHORTWAVE RADIATION ####################################### - rsut: - description: All-sky shortwave radiation + ### ta: AIR TEMPERATURE (850 hPa) ########################################### + ta850: + description: Air temperature at 850 hPa global. themes: - phys realms: - atmos variables: - rsut: - preprocessor: ppNOLEV1 - reference_dataset: CERES-EBAF + ta: + preprocessor: pp850 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 mip: Amon project: CMIP5 exp: historical ensemble: r1i1p1 - start_year: 2001 - end_year: 2003 + start_year: 2000 + end_year: 2002 additional_datasets: - {dataset: ACCESS1-0} - {dataset: ACCESS1-3} - {dataset: bcc-csm1-1} - {dataset: bcc-csm1-1-m} - {dataset: BNU-ESM} + - {dataset: CanCM4} - {dataset: CanESM2} - {dataset: CCSM4} - {dataset: CESM1-BGC} - {dataset: CESM1-CAM5} - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} - {dataset: CMCC-CM} - {dataset: CMCC-CMS} - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} - {dataset: FGOALS-s2} - {dataset: FIO-ESM} - {dataset: GFDL-CM2p1} @@ -1503,7 +1534,9 @@ diagnostics: - {dataset: GFDL-ESM2G} - {dataset: GFDL-ESM2M} - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} - {dataset: HadCM3} - {dataset: HadGEM2-AO} - {dataset: HadGEM2-CC} @@ -1518,61 +1551,76 @@ diagnostics: - {dataset: MIROC-ESM-CHEM} - {dataset: MPI-ESM-LR} - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: + cycle: + <<: *cycle_settings grading: <<: *grading_settings # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] + metric: [RMSD, taylor] # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] + normalization: [centered_median, none] - ### lwcre: LONGWAVE CLOUD FORCING ########################################### - lwcre: - description: Longwave cloud radiative effect + ### ta: AIR TEMPERATURE (30 hPa) ############################################ + ta30: + description: Air temperature at 30 hPa global. themes: - - clouds + - phys realms: - atmos variables: - lwcre: - preprocessor: ppNOLEV1 - reference_dataset: CERES-EBAF + ta: + preprocessor: pp30 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 mip: Amon - derive: true - force_derivation: false project: CMIP5 exp: historical ensemble: r1i1p1 - start_year: 2001 - end_year: 2003 + start_year: 2000 + end_year: 2002 additional_datasets: - {dataset: ACCESS1-0} - {dataset: ACCESS1-3} - {dataset: bcc-csm1-1} - {dataset: bcc-csm1-1-m} - {dataset: BNU-ESM} + - {dataset: CanCM4} - {dataset: CanESM2} - {dataset: CCSM4} - {dataset: CESM1-BGC} - {dataset: CESM1-CAM5} - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} - {dataset: CMCC-CM} - {dataset: CMCC-CMS} - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} - {dataset: FGOALS-g2} - {dataset: FGOALS-s2} - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} - {dataset: GFDL-CM3} - {dataset: GFDL-ESM2G} - {dataset: GFDL-ESM2M} - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} - {dataset: HadCM3} - {dataset: HadGEM2-AO} - {dataset: HadGEM2-CC} @@ -1587,61 +1635,70 @@ diagnostics: - {dataset: MIROC-ESM-CHEM} - {dataset: MPI-ESM-LR} - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: - latlon: - <<: *latlon_settings - # Contour levels for absolute plot - abs_levs: [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100] - # Contour levels for difference plot - diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] - grading: - <<: *grading_settings + cycle: + <<: *cycle_settings - ### swcre: SHORTWAVE CLOUD FORCING ########################################## - swcre: - description: Shortwave cloud radiative effect + ### ta: AIR TEMPERATURE (200 hPa) ########################################### + ta200: + description: Air temperature at 200 hPa global. themes: - - clouds + - phys realms: - atmos variables: - swcre: - preprocessor: ppNOLEV1 - reference_dataset: CERES-EBAF + ta: + preprocessor: pp200 + reference_dataset: ERA-Interim + alternative_dataset: NCEP-NCAR-R1 mip: Amon - derive: true - force_derivation: false project: CMIP5 exp: historical ensemble: r1i1p1 - start_year: 2001 - end_year: 2003 + start_year: 2000 + end_year: 2002 additional_datasets: - {dataset: ACCESS1-0} - {dataset: ACCESS1-3} - {dataset: bcc-csm1-1} - {dataset: bcc-csm1-1-m} - {dataset: BNU-ESM} + - {dataset: CanCM4} - {dataset: CanESM2} - {dataset: CCSM4} - {dataset: CESM1-BGC} - {dataset: CESM1-CAM5} - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} - {dataset: FGOALS-s2} - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} - {dataset: GFDL-CM3} - {dataset: GFDL-ESM2G} - {dataset: GFDL-ESM2M} - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} - {dataset: HadCM3} - {dataset: HadGEM2-AO} - {dataset: HadGEM2-CC} @@ -1656,28 +1713,29 @@ diagnostics: - {dataset: MIROC-ESM-CHEM} - {dataset: MPI-ESM-LR} - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: - latlon: - <<: *latlon_settings - # Contour levels for absolute plot - abs_levs: [-100, -90, -80, -70, -60, -50, -40, -30, -20, -10, 0] - # Contour levels for difference plot - diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] + cycle: + <<: *cycle_settings grading: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] ### od550aer: AEROSOL OPTICAL DEPTH AT 550 nm ############################### od550aer: description: Aerosol optical depth at 550 nm + themes: + - aerosols + realms: + - atmos variables: od550aer: preprocessor: ppNOLEV2thr10 @@ -1710,20 +1768,21 @@ diagnostics: - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, version: SU-v4.21, tier: 2} + - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, + version: SU-v4.21, tier: 2} - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, tier: 3} scripts: grading: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] ### od870aer: AEROSOL OPTICAL DEPTH AT 870 nm ############################### od870aer: description: Aerosol optical depth at 870 nm + themes: + - aerosols + realms: + - atmos variables: od870aer: preprocessor: ppNOLEV1thr10 @@ -1747,19 +1806,20 @@ diagnostics: - {dataset: MIROC-ESM} - {dataset: MIROC-ESM-CHEM} - {dataset: MRI-CGCM3} - - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, version: SU-v4.21, tier: 2} + - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, + version: SU-v4.21, tier: 2} scripts: grading: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] ### abs550aer: ABSORPTION OPTICAL DEPTH AT 550 nm ########################### abs550aer: description: Absorption optical depth at 550 nm + themes: + - aerosols + realms: + - atmos variables: abs550aer: preprocessor: ppNOLEV1thr10 @@ -1783,19 +1843,20 @@ diagnostics: - {dataset: MIROC-ESM-CHEM} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, version: SU-v4.21, tier: 2} + - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, + version: SU-v4.21, tier: 2} scripts: grading: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] ### od550lt1aer: FINE MODE AEROSOL OPTICAL DEPTH AT 550 nm ################## od550lt1aer: description: Fine mode optical depth at 550 nm + themes: + - aerosols + realms: + - atmos variables: od550lt1aer: preprocessor: ppNOLEV1thr10 @@ -1818,19 +1879,21 @@ diagnostics: - {dataset: MIROC-ESM} - {dataset: MIROC-ESM-CHEM} - {dataset: MRI-CGCM3} - - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, version: SU-v4.21, tier: 2} + - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, + version: SU-v4.21, tier: 2} scripts: grading: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] ### toz: TOTAL COLUMN OZONE ################################################# toz: description: Total column ozone + themes: + - chem + - ghg + realms: + - atmosChem variables: toz: preprocessor: ppNOLEV2thr10 @@ -1856,77 +1919,10 @@ diagnostics: scripts: grading_global: <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] - grading_antarctic: + grading_shpolar: <<: *grading_settings # Region - region: Antarctic - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] - - ### sic: SEA-ICE CONCENTRATION (NH) ######################################### - - ### sic: SEA-ICE CONCENTRATION (SH) ######################################### - - ### sm: SOIL MOISTURE ####################################################### - sm: - description: Soil moisture - variables: - sm: - preprocessor: ppNOLEV1thr10 - reference_dataset: ESACCI-SOILMOISTURE - mip: Lmon - derive: true - force_derivation: false - project: CMIP5 - exp: historical - ensemble: r1i1p1 - start_year: 2002 - end_year: 2004 - additional_datasets: - - {dataset: ACCESS1-0} - - {dataset: ACCESS1-3} - - {dataset: bcc-csm1-1} - - {dataset: CanCM4} - - {dataset: CanESM2} - - {dataset: CCSM4} - - {dataset: CESM1-BGC} - - {dataset: CESM1-CAM5} - - {dataset: CESM1-FASTCHEM} - - {dataset: CESM1-WACCM} - - {dataset: CNRM-CM5 } - - {dataset: CSIRO-Mk3-6-0} - - {dataset: FGOALS-g2} - - {dataset: FGOALS-s2} - - {dataset: GFDL-ESM2G} - - {dataset: GFDL-ESM2M} - - {dataset: HadCM3} - - {dataset: HadGEM2-CC} - - {dataset: HadGEM2-ES} - - {dataset: inmcm4} - - {dataset: IPSL-CM5A-LR} - - {dataset: IPSL-CM5A-MR} - - {dataset: IPSL-CM5B-LR} - - {dataset: MIROC4h} - - {dataset: MIROC5} - - {dataset: MIROC-ESM} - - {dataset: MIROC-ESM-CHEM} - - {dataset: MRI-CGCM3} - - {dataset: NorESM1-M} - - {dataset: NorESM1-ME} - - {dataset: ESACCI-SOILMOISTURE, project: OBS, type: sat, version: L3S-SSMV-COMBINED-v4.2, tier: 2} - scripts: - grading: - <<: *grading_settings - # Metric ('RMSD', 'BIAS', taylor') - metric: [RMSD] - # Normalization ('mean', 'median', 'centered_median', 'none') - normalization: [centered_median] + region: shpolar ### COLLECT METRICS ######################################################### @@ -1943,6 +1939,16 @@ diagnostics: cm_interval: [2, 241] # Sort dataset in alphabetic order (excluding MMM) sort: true + # Sort diagnostics in a specific order (name = 'diagnositic'-'region') + diag_order: ['ta850-global', 'ta200-global', 'ua850-global', + 'ua200-global', 'va850-global', 'va200-global', + 'zg500-global', 'hus400-global', 'tas-global', + 'ts-global', 'pr-global', 'clt-global', 'rlut-global', + 'rsut-global', 'lwcre-global', 'swcre-global', + 'od550aer-global', 'od870aer-global', 'abs550aer-global', +# 'od550lt1aer-global', 'toz-global', 'toz-shpolar', + 'toz-global', 'toz-shpolar', + 'sm-global'] taylor: script: perfmetrics/collect.ncl ancestors: ['*/grading'] diff --git a/esmvaltool/recipes/recipe_perfmetrics_CMIP5_4cds.yml b/esmvaltool/recipes/recipe_perfmetrics_CMIP5_4cds.yml index 947958e583..6e80cef108 100644 --- a/esmvaltool/recipes/recipe_perfmetrics_CMIP5_4cds.yml +++ b/esmvaltool/recipes/recipe_perfmetrics_CMIP5_4cds.yml @@ -1,19 +1,21 @@ # ESMValTool -# recipe_perfmetrics_CMIP5.yml +# recipe_perfmetrics_CMIP5_4cds.yml --- documentation: + title: Performance metrics for essential climate variables in CMIP5 + description: | Recipe for plotting the performance metrics for the CMIP5 datasets, including the standard ECVs as in Gleckler et al., and some additional variables (like ozone, sea-ice, aerosol...) authors: - - fran_fr - - righ_ma - - eyri_ve + - winterstein_franziska + - righi_mattia + - eyring_veronika maintainer: - - righ_ma + - righi_mattia references: - gleckler08jgr @@ -193,9 +195,8 @@ diagnostics: ta: preprocessor: pp850 reference_dataset: ERA-Interim - alternative_dataset: NCEP + alternative_dataset: NCEP-NCAR-R1 mip: Amon - field: T3M project: CMIP5 exp: historical ensemble: r1i1p1 @@ -252,17 +253,19 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: cycle: &cycle_settings script: perfmetrics/main.ncl - # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon') + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') plot_type: cycle # Time average ('opt' argument of time_operations.ncl) time_avg: monthlyclim - # Region ('Global', 'Tropics', 'NH extratropics', 'SH extratropics') - region: Global + # Region ('global', 'trop', 'nhext', 'shext') + region: global # Plot standard deviation ('all', 'none', 'ref_model' or dataset name) plot_stddev: ref_model # Plot legend in a separate file @@ -271,10 +274,8 @@ diagnostics: styleset: CMIP5 grading: &grading_settings <<: *cycle_settings - # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon') + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') plot_type: cycle_latlon - # Draw plots - draw_plots: false # Calculate grading calc_grading: true # Metric ('RMSD', 'BIAS', taylor') @@ -293,9 +294,8 @@ diagnostics: ta: preprocessor: pp200 reference_dataset: ERA-Interim - alternative_dataset: NCEP + alternative_dataset: NCEP-NCAR-R1 mip: Amon - field: T3M project: CMIP5 exp: historical ensemble: r1i1p1 @@ -352,8 +352,10 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: cycle: <<: *cycle_settings @@ -375,9 +377,8 @@ diagnostics: ta: preprocessor: pp30 reference_dataset: ERA-Interim - alternative_dataset: NCEP + alternative_dataset: NCEP-NCAR-R1 mip: Amon - field: T3M project: CMIP5 exp: historical ensemble: r1i1p1 @@ -434,8 +435,10 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: cycle: <<: *cycle_settings @@ -451,9 +454,8 @@ diagnostics: ta: preprocessor: pp5 reference_dataset: ERA-Interim - alternative_dataset: NCEP + alternative_dataset: NCEP-NCAR-R1 mip: Amon - field: T3M project: CMIP5 exp: historical ensemble: r1i1p1 @@ -510,8 +512,10 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: cycle: <<: *cycle_settings @@ -527,9 +531,8 @@ diagnostics: ta: preprocessor: ppALL reference_dataset: ERA-Interim - alternative_dataset: NCEP + alternative_dataset: NCEP-NCAR-R1 mip: Amon - field: T3M project: CMIP5 exp: historical ensemble: r1i1p1 @@ -586,17 +589,19 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: zonal: &zonal_settings script: perfmetrics/main.ncl - # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon') + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') plot_type: zonal # Time average ('opt' argument of time_operations.ncl) time_avg: annualclim - # Region ('Global', 'Tropics', 'NH extratropics', 'SH extratropics') - region: Global + # Region ('global', 'trop', 'nhext', 'shext') + region: global # Draw difference plots plot_diff: true # Calculate t-test in difference plots @@ -622,9 +627,8 @@ diagnostics: ua: preprocessor: pp850 reference_dataset: ERA-Interim - alternative_dataset: NCEP + alternative_dataset: NCEP-NCAR-R1 mip: Amon - field: T3M project: CMIP5 exp: historical ensemble: r1i1p1 @@ -680,8 +684,10 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: grading: <<: *grading_settings @@ -701,9 +707,8 @@ diagnostics: ua: preprocessor: pp200 reference_dataset: ERA-Interim - alternative_dataset: NCEP + alternative_dataset: NCEP-NCAR-R1 mip: Amon - field: T3M project: CMIP5 exp: historical ensemble: r1i1p1 @@ -759,8 +764,10 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: grading: <<: *grading_settings @@ -781,9 +788,8 @@ diagnostics: va: preprocessor: pp850 reference_dataset: ERA-Interim - alternative_dataset: NCEP + alternative_dataset: NCEP-NCAR-R1 mip: Amon - field: T3M project: CMIP5 exp: historical ensemble: r1i1p1 @@ -838,8 +844,10 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: grading: <<: *grading_settings @@ -859,9 +867,8 @@ diagnostics: va: preprocessor: pp200 reference_dataset: ERA-Interim - alternative_dataset: NCEP + alternative_dataset: NCEP-NCAR-R1 mip: Amon - field: T3M project: CMIP5 exp: historical ensemble: r1i1p1 @@ -916,8 +923,10 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: grading: <<: *grading_settings @@ -938,9 +947,8 @@ diagnostics: zg: preprocessor: pp500 reference_dataset: ERA-Interim - alternative_dataset: NCEP + alternative_dataset: NCEP-NCAR-R1 mip: Amon - field: T3M project: CMIP5 exp: historical ensemble: r1i1p1 @@ -994,8 +1002,10 @@ diagnostics: - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: grading: <<: *grading_settings @@ -1015,10 +1025,9 @@ diagnostics: variables: hus: preprocessor: pp400 - reference_dataset: AIRS + reference_dataset: AIRS-2-1 alternative_dataset: ERA-Interim mip: Amon - field: T3M project: CMIP5 exp: historical ensemble: r1i1p1 @@ -1065,8 +1074,9 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: AIRS, project: obs4mips, level: L3, version: RetStd-v5, tier: 1} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1} + - {dataset: ERA-Interim, project: OBS6, + type: reanaly, version: 1, tier: 3} scripts: grading: <<: *grading_settings @@ -1087,9 +1097,8 @@ diagnostics: tas: preprocessor: ppNOLEV2 reference_dataset: ERA-Interim - alternative_dataset: NCEP + alternative_dataset: NCEP-NCAR-R1 mip: Amon - field: T2Ms project: CMIP5 exp: historical ensemble: r1i1p1 @@ -1106,7 +1115,7 @@ diagnostics: - {dataset: CCSM4} - {dataset: CESM1-BGC} - {dataset: CESM1-CAM5} - - {dataset: CESM1-CAM5-1-FV2} + # - {dataset: CESM1-CAM5-1-FV2} # data is missing on ESGF - {dataset: CESM1-FASTCHEM} - {dataset: CESM1-WACCM} - {dataset: CMCC-CESM} @@ -1146,17 +1155,19 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, + version: 1, tier: 3} + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, + version: 1, tier: 2} scripts: latlon: &latlon_settings script: perfmetrics/main.ncl - # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon') + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') plot_type: latlon # Time average ('opt' argument of time_operations.ncl) time_avg: annualclim - # Region ('Global', 'Tropics', 'NH extratropics', 'SH extratropics') - region: Global + # Region ('global', 'trop', 'nhext', 'shext') + region: global # Draw difference plots plot_diff: true # Calculate t-test in difference plots @@ -1191,7 +1202,6 @@ diagnostics: reference_dataset: ESACCI-SST alternative_dataset: HadISST mip: Amon - field: T2Ms project: CMIP5 exp: historical ensemble: r1i1p1 @@ -1240,7 +1250,7 @@ diagnostics: - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ESACCI-SST, project: OBS, type: sat, version: L4-GHRSST-SSTdepth-OSTIA-GLOB, tier: 2} + - {dataset: ESACCI-SST, project: OBS, type: sat, version: 2.2, tier: 2} - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} scripts: grading: @@ -1261,9 +1271,8 @@ diagnostics: variables: pr: preprocessor: ppNOLEV1 - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 mip: Amon - field: T2Ms project: CMIP5 exp: historical ensemble: r1i1p1 @@ -1319,7 +1328,7 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} scripts: grading: <<: *grading_settings @@ -1342,7 +1351,6 @@ diagnostics: reference_dataset: ESACCI-CLOUD alternative_dataset: PATMOS-x mip: Amon - field: T2Ms project: CMIP5 exp: historical ensemble: r1i1p1 @@ -1390,7 +1398,7 @@ diagnostics: - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: ESACCI-CLOUD, project: OBS, type: sat, version: AVHRR-fv3.0, tier: 2} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, version: AVHRR-AMPM-fv3.0, tier: 2} - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, tier: 2} scripts: latlon: @@ -1421,7 +1429,6 @@ diagnostics: preprocessor: ppNOLEV1 reference_dataset: CERES-EBAF mip: Amon - field: T2Ms project: CMIP5 exp: historical ensemble: r1i1p1 @@ -1470,7 +1477,7 @@ diagnostics: - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} scripts: grading: <<: *grading_settings @@ -1492,7 +1499,6 @@ diagnostics: preprocessor: ppNOLEV1 reference_dataset: CERES-EBAF mip: Amon - field: T2Ms project: CMIP5 exp: historical ensemble: r1i1p1 @@ -1538,7 +1544,7 @@ diagnostics: - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} scripts: grading: <<: *grading_settings @@ -1560,7 +1566,6 @@ diagnostics: # preprocessor: ppNOLEV1 # reference_dataset: CERES-EBAF # mip: Amon - # field: T2Ms # derive: true # force_derivation: false # project: CMIP5 @@ -1608,7 +1613,7 @@ diagnostics: # - {dataset: MRI-CGCM3} # - {dataset: NorESM1-M} # - {dataset: NorESM1-ME} - # - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} + # - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1} # scripts: # latlon: # <<: *latlon_settings @@ -1632,7 +1637,6 @@ diagnostics: # preprocessor: ppNOLEV1 # reference_dataset: CERES-EBAF # mip: Amon - # field: T2Ms # derive: true # force_derivation: false # project: CMIP5 @@ -1678,7 +1682,7 @@ diagnostics: # - {dataset: MRI-CGCM3} # - {dataset: NorESM1-M} # - {dataset: NorESM1-ME} - # - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} + # - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1} # scripts: # latlon: # <<: *latlon_settings @@ -1697,13 +1701,16 @@ diagnostics: #### od550aer: AEROSOL OPTICAL DEPTH AT 550 nm ############################### #od550aer: # description: Aerosol optical depth at 550 nm + # themes: + # - aerosols + # realms: + # - atmos # variables: # od550aer: # preprocessor: ppNOLEV2thr10 # reference_dataset: ESACCI-AEROSOL # alternative_dataset: MODIS # mip: aero - # field: T2Ms # project: CMIP5 # exp: historical # ensemble: r1i1p1 @@ -1744,12 +1751,15 @@ diagnostics: #### od870aer: AEROSOL OPTICAL DEPTH AT 870 nm ############################### #od870aer: # description: Aerosol optical depth at 870 nm + # themes: + # - aerosols + # realms: + # - atmos # variables: # od870aer: # preprocessor: ppNOLEV1thr10 # reference_dataset: ESACCI-AEROSOL # mip: aero - # field: T2Ms # project: CMIP5 # exp: historical # ensemble: r1i1p1 @@ -1781,12 +1791,15 @@ diagnostics: #### abs550aer: ABSORPTION OPTICAL DEPTH AT 550 nm ########################### #abs550aer: # description: Absorption optical depth at 550 nm + # themes: + # - aerosols + # realms: + # - atmos # variables: # abs550aer: # preprocessor: ppNOLEV1thr10 # reference_dataset: ESACCI-AEROSOL # mip: aero - # field: T2Ms # project: CMIP5 # exp: historical # ensemble: r1i1p1 @@ -1817,13 +1830,16 @@ diagnostics: #### od550lt1aer: FINE MODE AEROSOL OPTICAL DEPTH AT 550 nm ################## #od550lt1aer: + # themes: + # - aerosols + # realms: + # - atmos # description: Fine mode optical depth at 550 nm # variables: # od550lt1aer: # preprocessor: ppNOLEV1thr10 # reference_dataset: ESACCI-AEROSOL # mip: aero - # field: T2Ms # project: CMIP5 # exp: historical # ensemble: r1i1p1 @@ -1854,13 +1870,17 @@ diagnostics: #### toz: TOTAL COLUMN OZONE ################################################# #toz: # description: Total column ozone + # themes: + # - chem + # - ghg + # realms: + # - atmosChem # variables: # toz: # preprocessor: ppNOLEV2thr10 # reference_dataset: ESACCI-OZONE # alternative_dataset: NIWA-BS # mip: Amon - # field: T2Ms # derive: true # force_derivation: false # project: CMIP5 @@ -1897,7 +1917,64 @@ diagnostics: ### sic: SEA-ICE CONCENTRATION (SH) ######################################### - ### dos: SOIL MOISTURE ###################################################### + ### sm: SOIL MOISTURE ####################################################### + #sm: + # description: Soil moisture + # themes: + # - phys + # realms: + # - land + # variables: + # sm: + # preprocessor: ppNOLEV1thr10 + # reference_dataset: ESACCI-SOILMOISTURE + # mip: Lmon + # derive: true + # force_derivation: false + # project: CMIP5 + # exp: historical + # ensemble: r1i1p1 + # start_year: 2002 + # end_year: 2004 + # additional_datasets: + # - {dataset: ACCESS1-0} + # - {dataset: ACCESS1-3} + # - {dataset: bcc-csm1-1} + # - {dataset: CanCM4} + # - {dataset: CanESM2} + # - {dataset: CCSM4} + # - {dataset: CESM1-BGC} + # - {dataset: CESM1-CAM5} + # - {dataset: CESM1-FASTCHEM} + # - {dataset: CESM1-WACCM} + # - {dataset: CNRM-CM5 } + # - {dataset: CSIRO-Mk3-6-0} + # - {dataset: FGOALS-g2} + # - {dataset: FGOALS-s2} + # - {dataset: GFDL-ESM2G} + # - {dataset: GFDL-ESM2M} + # - {dataset: HadCM3} + # - {dataset: HadGEM2-CC} + # - {dataset: HadGEM2-ES} + # - {dataset: inmcm4} + # - {dataset: IPSL-CM5A-LR} + # - {dataset: IPSL-CM5A-MR} + # - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC4h} + # - {dataset: MIROC5} + # - {dataset: MIROC-ESM} + # - {dataset: MIROC-ESM-CHEM} + # - {dataset: MRI-CGCM3} + # - {dataset: NorESM1-M} + # - {dataset: NorESM1-ME} + # - {dataset: ESACCI-SOILMOISTURE, project: OBS, type: sat, version: L3S-SSMV-COMBINED-v4.2, tier: 2} + # scripts: + # grading: + # <<: *grading_settings + # # Metric ('RMSD', 'BIAS', taylor') + # metric: [RMSD] + # # Normalization ('mean', 'median', 'centered_median', 'none') + # normalization: [centered_median] ### COLLECT METRICS ######################################################### @@ -1914,6 +1991,12 @@ diagnostics: cm_interval: [2, 241] # Sort dataset in alphabetic order (excluding MMM) sort: true + # Sort diagnostics in a specific order (name = 'diagnositic'-'region') + diag_order: ['ta850-global', 'ta200-global', 'ua850-global', + 'ua200-global', 'va850-global', 'va200-global', + 'zg500-global', 'hus400-global', 'tas-global', + 'ts-global', 'pr-global', 'clt-global', 'rlut-global', + 'rsut-global'] taylor: script: perfmetrics/collect.ncl ancestors: ['*/grading'] diff --git a/esmvaltool/recipes/recipe_perfmetrics_land_CMIP5.yml b/esmvaltool/recipes/recipe_perfmetrics_land_CMIP5.yml new file mode 100644 index 0000000000..11ed80bbaa --- /dev/null +++ b/esmvaltool/recipes/recipe_perfmetrics_land_CMIP5.yml @@ -0,0 +1,656 @@ +# ESMValTool +# recipe_perfmetrics_land_CMIP5.yml +--- +documentation: + title: Performance metrics for land variables in CMIP5 + + description: | + Recipe for plotting the performance metrics for the CMIP5 datasets, + including only land variables + + authors: + - gier_bettina + - winterstein_franziska + - righi_mattia + - eyring_veronika + + maintainer: + - righi_mattia + + references: + - gleckler08jgr + + projects: + - esmval + - embrace + - crescendo + - c3s-magic + - cmug + +preprocessors: + + ppNOLEV1: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset] + + ppNOLEV2: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset, alternative_dataset] + + +diagnostics: + + sm: + description: Soil moisture + themes: + - phys + realms: + - land + variables: + sm: + short_name: sm + preprocessor: ppNOLEV1 + reference_dataset: ESACCI-SOILMOISTURE + mip: Lmon + derive: true + force_derivation: false + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadCM3} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + + - {dataset: ESACCI-SOILMOISTURE, project: OBS, type: sat, + version: L3S-SSMV-COMBINED-v4.2, tier: 2, start_year: 1980} + scripts: + grading: &grading_settings + script: perfmetrics/main.ncl + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: cycle_latlon + # Time average ('opt' argument of time_operations.ncl) + time_avg: monthlyclim + # Region ('global', 'trop', 'nhext', 'shext') + region: global + # Plot standard deviation ('all', 'none', 'ref_model' or dataset name) + plot_stddev: ref_model + # Plot legend in a separate file + legend_outside: true + # Plot style + styleset: CMIP5 + # Calculate grading + calc_grading: true + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + nbp: + description: surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes + themes: + - phys + realms: + - land + variables: + nbp: + short_name: nbp + preprocessor: ppNOLEV1 + reference_dataset: JMA-TRANSCOM + mip: Lmon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM, ensemble: r2i1p1} + - {dataset: CMCC-CESM} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-CC} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + + - {dataset: JMA-TRANSCOM, project: OBS, type: reanaly, + version: 2018, tier: 3, start_year: 1985} + scripts: + grading: + <<: *grading_settings + + + gpp: + description: Carbon Mass Flux out of Atmosphere due to Gross Primary + themes: + - phys + realms: + - land + variables: + gpp: + short_name: gpp + preprocessor: ppNOLEV2 + reference_dataset: MTE + alternative_dataset: FLUXCOM + mip: Lmon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM, ensemble: r2i1p1} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadCM3} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + + - {dataset: FLUXCOM, project: OBS, type: reanaly, + version: ANN-v1, tier: 3} + - {dataset: MTE, project: OBS, type: reanaly, + version: May12, tier: 3, start_year: 1982} + scripts: + grading: + <<: *grading_settings + + + lai: + description: Leaf Area Index + themes: + - phys + realms: + - land + variables: + lai: + short_name: lai + preprocessor: ppNOLEV1 + reference_dataset: LAI3g + mip: Lmon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM, ensemble: r2i1p1} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + + - {dataset: LAI3g, project: OBS, type: reanaly, + version: 1_regridded, tier: 3, start_year: 1981} + scripts: + grading: + <<: *grading_settings + + + fgco2: + description: surface_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon + themes: + - phys + realms: + - ocnBgchem + variables: + fgco2: + short_name: fgco2 + preprocessor: ppNOLEV2 + reference_dataset: JMA-TRANSCOM + alternative_dataset: Landschuetzer2016 + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + #- {dataset: BNU-ESM} # data is missing on ESGF + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: CMCC-CESM} + - {dataset: CNRM-CM5} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R-CC} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-ME} + + - {dataset: JMA-TRANSCOM, project: OBS, type: reanaly, + version: 2018, tier: 3, start_year: 1985} + - {dataset: Landschuetzer2016, project: OBS, type: clim, + version: v2016, tier: 2, start_year: 1982} + scripts: + grading: + <<: *grading_settings + + + et: + description: total_evapotranspiration + themes: + - phys + realms: + - land + variables: + et: + short_name: et + preprocessor: ppNOLEV1 + reference_dataset: LandFlux-EVAL + mip: Lmon + derive: true + force_derivation: false + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r2i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + + - {dataset: LandFlux-EVAL, project: OBS, type: reanaly, + version: Oct13, tier: 3, start_year: 1990} + scripts: + grading: + <<: *grading_settings + + + rlus: + description: surface upwelling longwave flux in air + themes: + - phys + realms: + - atmos + variables: + rlus: + short_name: rlus + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF_Surface + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: bcc-csm1-1} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: CERES-EBAF_Surface, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2013} + scripts: + grading: + <<: *grading_settings + + + rlds: + description: surface_downwelling_longwave_flux_in_air + themes: + - phys + realms: + - atmos + variables: + rlds: + short_name: rlds + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF_Surface + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: CERES-EBAF_Surface, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2013} + scripts: + grading: + <<: *grading_settings + + + rsus: + description: surface_upwelling_shortwave_flux_in_air + themes: + - phys + realms: + - atmos + variables: + rsus: + short_name: rsus + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF_Surface + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: bcc-csm1-1} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: CERES-EBAF_Surface, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2013} + scripts: + grading: + <<: *grading_settings + + + rsds: + description: surface_downwelling_shortwave_flux_in_air + themes: + - phys + realms: + - atmos + variables: + rsds: + short_name: rsds + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF_Surface + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1980 + end_year: 1999 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + + - {dataset: CERES-EBAF_Surface, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2013} + scripts: + grading: + <<: *grading_settings + + + ### COLLECT METRICS ######################################################### + collect: + description: Wrapper to collect and plot previously calculated metrics + scripts: + RMSD: + script: perfmetrics/collect.ncl + ancestors: ['*/grading*'] + metric: RMSD + label_bounds: [-0.5, 0.5] + label_scale: 0.1 + disp_values: false + cm_interval: [2, 241] + # Sort dataset in alphabetic order (excluding MMM) + sort: true + diag_order: ['sm-global', 'nbp-global', 'lai-global', 'gpp-global', + 'fgco2-global', 'et-global', + 'rsds-global', 'rsus-global', 'rlds-global', 'rlus-global'] diff --git a/esmvaltool/recipes/recipe_psyplot.yml b/esmvaltool/recipes/recipe_psyplot.yml new file mode 100644 index 0000000000..c34ee203f0 --- /dev/null +++ b/esmvaltool/recipes/recipe_psyplot.yml @@ -0,0 +1,63 @@ +# ESMValTool +# recipe_psyplot.yml +--- +documentation: + title: Example recipe for the Psyplot diagnostic. + + description: > + This recipes showcases the use of the Psyplot diagnostic that provides a + high-level interface to Psyplot for ESMValTool recipes. For each input + dataset, an individual plot is created. With the Psyplot diagnostic, + arbitrary Psyplot plots can be created. + + authors: + - schlund_manuel + + maintainer: + - schlund_manuel + + references: + - sommer17joss + + projects: + - 4c + - esm2025 + - isenes3 + - usmile + + +datasets: + - {project: CMIP6, dataset: CanESM5, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1995, end_year: 2014} + - {project: CMIP6, dataset: ICON-ESM-LR, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1995, end_year: 2014} + + +preprocessors: + climatology: + climate_statistics: + operator: mean + + +diagnostics: + plot_map: + description: Plot climatology maps. + variables: + tas: + mip: Amon + preprocessor: climatology + scripts: + plot: + script: psyplot_diag.py + psyplot_func: mapplot + psyplot_kwargs: + bounds: + method: rounded + vmin: 240 + vmax: 300 + clabel: '{long_name} [{units}]' + cmap: viridis + datagrid: + color: black + linewidth: 0.3 + map_extent: Europe + projection: robin + title: '{long_name} Climatology of {dataset} ({start_year}-{end_year})' diff --git a/esmvaltool/recipes/recipe_pv_capacity_factor.yml b/esmvaltool/recipes/recipe_pv_capacity_factor.yml new file mode 100644 index 0000000000..ee300b6835 --- /dev/null +++ b/esmvaltool/recipes/recipe_pv_capacity_factor.yml @@ -0,0 +1,79 @@ +# ESMValTool +# recipe_capacity_factor.yml +--- +documentation: + title: "Capacity factor for solar photovoltaic (PV) systems" + description: | + Solar Capacity Factor + + authors: + - cionni_irene + + maintainer: + - weigel_katja + + references: + - bett2016renene + - weigel2021gmd + + projects: + - crescendo + +datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3, + start_year: 1980, end_year: 2005} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1980, end_year: 2005} +preprocessors: + preproc: + regrid: + target_grid: reference_dataset + scheme: linear + extract_region: + start_longitude: -20 + end_longitude: 60 + start_latitude: 30 + end_latitude: 80 + extract_season: &season + season: djf + + +diagnostics: + capacity_factor: + description: Calculate the photovoltaic capacity factor. + variables: + tas: + reference_dataset: ERA-Interim + preprocessor: preproc + mip: day + rsds: + reference_dataset: ERA-Interim + preprocessor: preproc + mip: day + scripts: + main: + <<: *season + script: pv_capacityfactor/pv_capacity_factor.R + maxval_colorbar: 0.15 diff --git a/esmvaltool/recipes/recipe_quantilebias.yml b/esmvaltool/recipes/recipe_quantilebias.yml index 0a63ff4484..2da81b5960 100644 --- a/esmvaltool/recipes/recipe_quantilebias.yml +++ b/esmvaltool/recipes/recipe_quantilebias.yml @@ -1,16 +1,17 @@ +# ESMValTool # recipe_quantilebias.yml --- - documentation: + title: Precipitation Quantile Bias description: | Tool for calculation of precipitation quantile bias authors: - - arno_en - - hard_jo + - arnone_enrico + - vonhardenberg_jost maintainer: - - hard_jo + - vonhardenberg_jost references: - mehran14jgr @@ -19,66 +20,25 @@ documentation: - c3s-magic datasets: - - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r2i1p1, start_year: 1997, end_year: 1997} - - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, tier: 1, start_year: 1997, end_year: 1997} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1979, end_year: 2005} + - {dataset: GPCP-V2.3, project: obs4MIPs, level: L3, tier: 1, start_year: 1979, end_year: 2005} preprocessors: - masking_preprocessor: + mask_regrid_preproc: + regrid: + target_grid: 2x2 + scheme: area_weighted mask_landsea: mask_out: sea diagnostics: quantilebias: - description: Tool for calculation of precipitation quantile bias + description: Tool for calculation of precipitation quantile bias variables: pr: - preprocessor: masking_preprocessor - reference_dataset: "GPCP-SG" + preprocessor: mask_regrid_preproc + reference_dataset: "GPCP-V2.3" mip: Amon - project: CMIP5 - exp: historical - ensemble: r1i1p1 - start_year: 1997 - end_year: 1997 - additional_datasets: - - {dataset: ACCESS1-0} -# - {dataset: ACCESS1-3} - #- {dataset: bcc-csm1-1} - #- {dataset: bcc-csm1-1-m} - #- {dataset: CanESM2} - #- {dataset: CCSM4} - #- {dataset: CESM1-BGC} - #- {dataset: CESM1-CAM5} - #- {dataset: CESM1-FASTCHEM} - #- {dataset: CESM1-WACCM} - #- {dataset: CMCC-CESM} - #- {dataset: CMCC-CM} - #- {dataset: CMCC-CMS} - #- {dataset: CNRM-CM5} - #- {dataset: CSIRO-Mk3-6-0} - #- {dataset: EC-EARTH31, ensemble: r2i1p1} - #- {dataset: FGOALS-g2} - #- {dataset: FGOALS-s2} - #- {dataset: FIO-ESM} - #- {dataset: GFDL-CM2p1} - #- {dataset: GFDL-CM3} - #- {dataset: GFDL-ESM2G} - #- {dataset: GFDL-ESM2M} - #- {dataset: HadGEM2-AO} - #- {dataset: HadGEM2-CC} - #- {dataset: HadGEM2-ES} - #- {dataset: inmcm4} - #- {dataset: IPSL-CM5A-LR} - #- {dataset: IPSL-CM5A-MR} - #- {dataset: IPSL-CM5B-LR} - #- {dataset: MIROC5} - #- {dataset: MIROC-ESM} - #- {dataset: MIROC-ESM-CHEM} - #- {dataset: MPI-ESM-LR} - #- {dataset: MPI-ESM-P} - #- {dataset: MRI-CGCM3} - #- {dataset: NorESM1-M} - #- {dataset: NorESM1-ME} scripts: main: diff --git a/esmvaltool/recipes/recipe_radiation_budget.yml b/esmvaltool/recipes/recipe_radiation_budget.yml new file mode 100644 index 0000000000..4a81307283 --- /dev/null +++ b/esmvaltool/recipes/recipe_radiation_budget.yml @@ -0,0 +1,132 @@ +# ESMValTool +# recipe_radiation_budget.yml +--- +documentation: + title: Radiation Budget + description: + This diagnostic analyses the radiation budget by separating top-of-atmosphere + fluxes into clear-sky and cloud forcing components, and surface fluxes into + downwelling and upwelling components. Model predictions are compared against + three observational estimates, one of which (Stephens et al. 2012) includes + uncertainty estimates. When the black error bars overlap the zero line, the + model is consistent with observations according to Stephens et al. (2012). + authors: + - lillis_jon + - hogan_emma + maintainer: + - lillis_jon + - hogan_emma + +datasets: + - {dataset: HadGEM3-GC31-LL, project: CMIP6, exp: historical, + ensemble: r1i1p1f3, grid: gn, start_year: 1993, end_year: 2002} + - {dataset: UKESM1-0-LL, project: CMIP6, exp: historical, + ensemble: r5i1p1f3, grid: gn, start_year: 1993, end_year: 2002} + +preprocessors: + single_value: + climate_statistics: + operator: mean + period: full + area_statistics: + operator: mean + seasonal: + climate_statistics: + operator: mean + period: seasonal + seasons: ['DJF', 'MAM', 'JJA', 'SON'] + area_statistics: + operator: mean + +diagnostics: + single_value_radiation_budget: + description: Radiation budget for HadGEM3 vs UKESM1. + variables: + rss: + mip: Emon + preprocessor: single_value + rsdt: + mip: Amon + preprocessor: single_value + rsut: + mip: Amon + preprocessor: single_value + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, + start_year: 2000, end_year: 2010, tier: 1} + rsutcs: + mip: Amon + preprocessor: single_value + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, + start_year: 2000, end_year: 2010, tier: 1} + rsds: + mip: Amon + preprocessor: single_value + rls: + mip: Emon + preprocessor: single_value + rlut: + mip: Amon + preprocessor: single_value + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, + start_year: 2000, end_year: 2010, tier: 1} + rlutcs: + mip: Amon + preprocessor: single_value + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, + start_year: 2000, end_year: 2010, tier: 1} + rlds: + mip: Amon + preprocessor: single_value + hfss: + mip: Amon + preprocessor: single_value + hfls: + mip: Amon + preprocessor: single_value + scripts: + radiation_budget: + script: radiation_budget/radiation_budget.py + + seasonal_radiation_budget: + description: Seasonal radiation budget for HadGEM3 vs UKESM1. + variables: + rss: + mip: Emon + preprocessor: seasonal + rsdt: + mip: Amon + preprocessor: seasonal + rsut: + mip: Amon + preprocessor: seasonal + rsutcs: + mip: Amon + preprocessor: seasonal + rsds: + mip: Amon + preprocessor: seasonal + rls: + mip: Emon + preprocessor: seasonal + rlut: + mip: Amon + preprocessor: seasonal + rlutcs: + mip: Amon + preprocessor: seasonal + rlds: + mip: Amon + preprocessor: seasonal + hfss: + mip: Amon + preprocessor: seasonal + hfls: + mip: Amon + preprocessor: seasonal + scripts: + radiation_budget: + script: radiation_budget/seasonal_radiation_budget.py diff --git a/esmvaltool/recipes/recipe_rainfarm.yml b/esmvaltool/recipes/recipe_rainfarm.yml new file mode 100644 index 0000000000..b302120302 --- /dev/null +++ b/esmvaltool/recipes/recipe_rainfarm.yml @@ -0,0 +1,56 @@ +# ESMValTool +# recipe_rainfarm.yml +--- +documentation: + title: Stochastic Downscaling of Precipitation by RainFARM + description: | + Recipe for performing stochastic downscaling of precipitation fields + calling the RainFARM package by J. von Hardenberg (ISAC-CNR) + + authors: + - arnone_enrico + - vonhardenberg_jost + + maintainer: + - unmaintained + + references: + - donofrio14jh + - rebora06jhm + - terzago18nhess + + projects: + - c3s-magic + +datasets: + - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1997, end_year: 1997} + +preprocessors: + preproc: + extract_region: + # The selected region needs to have equal and even number of longitude (in any case it is cut) + # and latitude grid points (e.g., 2x2, 4x4, ...). + # Warning: downcaling can reach very high resolution, so select a limited area. + start_longitude: 5 + end_longitude: 15 + start_latitude: 40 + end_latitude: 50 + regrid: + target_grid: 1x1 # in degrees, can also be the name of one of the datasets to use the grid from that dataset + scheme: area_weighted # can be linear, nearest, area_weighted, unstructured_nearest +diagnostics: + rainfarm: + description: RainFARM stochastic downscaling of precipitation fields + variables: + pr: + preprocessor: preproc + mip: day + scripts: + rainfarm: + script: rainfarm/rainfarm.jl + slope: 1.7 # spatial spectral slope (set to 0 to compute from large scales) + nens: 2 # number of ensemble members to be calculated + nf: 8 # subdivisions for downscaling + conserv_glob: false # conserve precipitation over full domain (choose either glob or smooth, glob has priority) + conserv_smooth: true # conserve precipitation using convolution (if neither is chosen box conservation is used) + weights_climo: false # orographic weights: set to false (or omit) or path to a fine-scale precipitation climatology file diff --git a/esmvaltool/recipes/recipe_runoff_et.yml b/esmvaltool/recipes/recipe_runoff_et.yml index fa1951517c..0a83213caa 100644 --- a/esmvaltool/recipes/recipe_runoff_et.yml +++ b/esmvaltool/recipes/recipe_runoff_et.yml @@ -2,19 +2,21 @@ # recipe_runoff_et.yml --- documentation: + title: Runoff and Evapotranspiration Diagnostics description: | - Recipe for plotting runoff and ET diagnostics. The diagnostics calculates water - balance components for different catchments and compares the results against - observations. Currently, the required catchment mask needs to be downloaded manually - at https://doi.org/10.5281/zenodo.2025776 + Recipe for plotting runoff and ET diagnostics. The diagnostics calculates + water balance components for different catchments and compares the results + against observations. Currently, the required catchment mask needs to be + downloaded manually at https://doi.org/10.5281/zenodo.2025776 and saved in + the auxiliary_data_dir defined in configuration. authors: - - hage_st - - loew_al - - stac_to + - hagemann_stefan + - loew_alexander + - stacke_tobias maintainer: - - righ_ma + - righi_mattia references: - duemenil00mpimr @@ -26,8 +28,10 @@ documentation: - crescendo datasets: - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1970, end_year: 2000} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1970, end_year: 2000} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1970, end_year: 2000} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1970, end_year: 2000} diagnostics: catchment_analysis: @@ -42,5 +46,4 @@ diagnostics: scripts: catchment_analysis: script: runoff_et/catchment_analysis.py - catchmentmask: ~/catchmentmask_v1.nc - + catchmentmask: catchmentmask_v1.nc diff --git a/esmvaltool/recipes/recipe_russell18jgr.yml b/esmvaltool/recipes/recipe_russell18jgr.yml new file mode 100644 index 0000000000..2e070d74be --- /dev/null +++ b/esmvaltool/recipes/recipe_russell18jgr.yml @@ -0,0 +1,987 @@ +# ESMValTool +# recipe_russell18jgr.yml +--- +documentation: + title: Southern Ocean Metrics + description: | + Recipe for russell et al figures 1, 2, 3a, 3b, 4, 5, 5g, 6a, + 6b, 7, 7h, 7i, 8, 9a, 9b, 9c. + Russell, J.L.,et al., 2018, J. Geophysical Research - Oceans, + 123, 3120-3143, https://doi.org/10.1002/2017JC013461 + + Please read individual description in diagnostics section. + + authors: + - russell_joellen + - pandde_amarjiit + + maintainer: + - russell_joellen + - pandde_amarjiit + + references: + - russell18jgr + + projects: + - russell_project + +preprocessors: + preprocessor_time_land: + climate_statistics: + operator: mean + period: full + mask_landsea: + mask_out: "land" + + preprocessor_time: + climate_statistics: + operator: mean + period: full + + +diagnostics: + + Figure_1: + description: | + Diagnostic for russell et al figure 1. Plots Annual-mean zonal wind + stress as polar contour map. Here, we are using tauuo. Figures in + russell18jgr paper were made using tauuo, but tauu can also be used if + tauuo file is not available. To use tauu variable in this recipe just + uncomment variable 'tauu' and add dataset names in additional datasets of + respective variable. If there are no dataset in a variable, entire + variable section needs to be commented out. + themes: + - phys + realms: + - ocean + variables: + tauu: + preprocessor: preprocessor_time_land + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + additional_datasets: + - {dataset: CanESM2} + - {dataset: GFDL-ESM2M} + # tauuo: + # preprocessor: preprocessor_time + # mip: Omon + # project: CMIP5 + # exp: historical + # ensemble: r1i1p1 + # start_year: 1986 + # end_year: 2005 + # additional_datasets: + # - {dataset: CanESM2} + # - {dataset: GFDL-ESM2M} + scripts: + polar-tauu: + script: russell18jgr/russell18jgr-polar.ncl + # Plot style + styleset: CMIP5 + ncdf: default + grid_min: -0.4 + grid_max: 0.4 + grid_step: 0.1 + colors: [[237.6, 237.6, 0.], [255, 255, 66.4], [255, 255, 119.6], + [255, 255, 191.8], [223.8, 191.8, 223.8], + [192.8, 127.5, 190.8], + [161.6, 65.3, 158.6], [129.5, 1.0, 126.5]] + labelBar_end_type: ExcludeOuterBoxes + max_lat: -30. + max_vert: 2 + max_hori: 1 + + + Figure_2: + description: | + Diagnostic for russell et al Figure 2. Plots The zonal and annual means + of the zonal wind stress (N/m^2). Here, we are using tauuo, + figures in russell18jgr paper were made using tauuo, but tauu can + also be used if tauuo file is not available. To use tauu variable + in this recipe just uncomment variable 'tauu' and add dataset names + in additional datasets of respective variable. If there are no dataset + in a variable, entire variable section needs to be commented out. + themes: + - phys + realms: + - ocean + variables: + # tauu: + # preprocessor: preprocessor_time_land + # mip: Amon + # project: CMIP5 + # exp: historical + # ensemble: r1i1p1 + # start_year: 1986 + # end_year: 2005 + # additional_datasets: + # - {dataset: CanESM2} + # - {dataset: GFDL-ESM2M} + tauuo: + preprocessor: preprocessor_time + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + additional_datasets: + - {dataset: CanESM2} + - {dataset: GFDL-ESM2M} + scripts: + tauu-lineplot: + script: russell18jgr/russell18jgr-fig2.ncl + # Plot style + styleset: CMIP5 + ncdf: default + + + Figure_3b: + description: | + Diagnostic for russell et al figure 3b. Plots the latitudinal position of + Subantarctic Front. Using definitions from Orsi et al (1995), the + Subantarctic Front is defined here as the poleward location of + the 4C(277.15K) isotherm at(closest to and less than) 400 m. + themes: + - phys + realms: + - ocean + variables: + thetao: + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-CAM5} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-ES} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC-ESM} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + scripts: + Subantarctic-Front: + script: russell18jgr/russell18jgr-fig3b.ncl + # Plot style + styleset: CMIP5 + ncdf: CMIP5 + + + Figure_3b-2: + description: | + Diagnostic for russell et al figure 3b-2 (Polar fronts). Plots the + latitudinal position of Polar Front. Using definitions from Orsi et + al. (1995). The Polar Front is defined here as the poleward location + of the 2C (275.15K) isotherm of the temperature minimum between 0 and + 200 m (closest to and less than 200m). + themes: + - phys + realms: + - ocean + variables: + thetao: + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + additional_datasets: + # - {dataset: ACCESS1-0} + # - {dataset: ACCESS1-3} + # - {dataset: bcc-csm1-1} + # - {dataset: bcc-csm1-1-m} + # - {dataset: BNU-ESM} + - {dataset: CanESM2} + # - {dataset: CCSM4} + # - {dataset: CESM1-CAM5} + # - {dataset: CNRM-CM5} + # - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + # - {dataset: FGOALS-g2} + # - {dataset: FGOALS-s2} + # - {dataset: GFDL-CM3} + # - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + # - {dataset: GISS-E2-H-CC} + # - {dataset: GISS-E2-R-CC} + # - {dataset: HadCM3} + - {dataset: HadGEM2-ES} + # - {dataset: IPSL-CM5A-LR} + # - {dataset: IPSL-CM5A-MR} + # - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC-ESM} + # - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + # - {dataset: NorESM1-M} + # - {dataset: NorESM1-ME} + scripts: + Polar-front: + script: russell18jgr/russell18jgr-fig3b-2.ncl + # Plot style + styleset: CMIP5 + ncdf: default + + + Figure_4: + description: | + Recipe for russell et al figure 4. Plots the zonal velocity through + Drake Passage (at 69W) and total transport through the passage if the + volcello file is available. + themes: + - phys + realms: + - ocean + variables: + uo: + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + volcello: + mip: fx + project: CMIP5 + exp: historical + ensemble: r0i0p0 + start_year: 1986 + end_year: 2005 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + # - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-CAM5} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # - {dataset: FGOALS-s2} + # - {dataset: GFDL-CM3} + # - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + # - {dataset: GISS-E2-H-CC} + # - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-ES} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC-ESM} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + scripts: + Drake-passage: + script: russell18jgr/russell18jgr-fig4.ncl + styleset: CMIP5 + ncdf: default + max_vert: 2 + max_hori: 1 + # unit conversion factor from (m/s) to (cm/s) + unitCorrectionalFactor: 100 + new_units: "cm/s" + + + Figure_5: + description: | + Diagnostic for russell et al figure 5 (polar). Plots the mean extent of + sea ice for September(max) in blue and mean extent of sea ice for + February(min) in red. The edge of full coverage is defined by the 15% + areal coverage. + themes: + - seaIce + realms: + - seaIce + variables: + sic: + mip: OImon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + additional_datasets: + # - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-CAM5} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-ES} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + scripts: + sic-polar: + script: russell18jgr/russell18jgr-fig5.ncl + # Plot style + styleset: CMIP5 + ncdf: default + max_lat: -45.0 + max_vert: 2 + max_hori: 2 + + + Figure_5g: + description: | + Recipe for russell et al figure 5g. Plots the annual cycle of sea ice + area in southern ocean. The diag_script manually calculates the + areacello for lat-lon models, as some models use different grids + for areacello and sic files. + themes: + - seaIce + realms: + - seaIce + variables: + sic: + mip: OImon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + areacello: + mip: fx + project: CMIP5 + exp: historical + ensemble: r0i0p0 + start_year: 1986 + end_year: 2005 + additional_datasets: + # - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-CAM5} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + # - {dataset: FGOALS-s2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + # - {dataset: GISS-E2-H-CC} + # - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-ES} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + scripts: + sic-line: + script: russell18jgr/russell18jgr-fig5g.ncl + # Plot style + styleset: CMIP5 + + + Figure_6a: + description: | + Diagnostic for russell et al Figure 6 (volume transport). Plots the + density layer based volume transport(in Sv) across 30S based on the + layer definitions in Talley (2008). The dark blue bars are the + integrated totals for each layer and can be compared to the magenta + lines, which are the observed values from Talley( 2008). The narrower + red bars are equal subdivisions of each blue layer + themes: + - phys + realms: + - ocean + variables: + thetao: + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + so: + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + vo: + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + volcello: + mip: fx + project: CMIP5 + exp: historical + ensemble: r0i0p0 + start_year: 1986 + end_year: 2005 + additional_datasets: + # - {dataset: ACCESS1-0} + # - {dataset: ACCESS1-3} + # - {dataset: bcc-csm1-1} + # - {dataset: bcc-csm1-1-m} + # - {dataset: BNU-ESM} + - {dataset: CanESM2} + # - {dataset: CCSM4} + # - {dataset: CESM1-CAM5} + # - {dataset: CNRM-CM5} + # - {dataset: CNRM-CM5-2} + # - {dataset: CSIRO-Mk3-6-0} + # - {dataset: FGOALS-g2} + # - {dataset: FGOALS-s2} + # - {dataset: GFDL-CM3} + # - {dataset: GFDL-ESM2G} + # - {dataset: GFDL-ESM2M} + # - {dataset: GISS-E2-H-CC} + # - {dataset: GISS-E2-R-CC} + # - {dataset: HadCM3} + # - {dataset: HadGEM2-ES} + # - {dataset: IPSL-CM5A-LR} + # - {dataset: IPSL-CM5A-MR} + # - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC-ESM} + # - {dataset: MRI-CGCM3} + # - {dataset: MRI-ESM1} + # - {dataset: NorESM1-M} + # - {dataset: NorESM1-ME} + scripts: + Figure6a: + script: russell18jgr/russell18jgr-fig6a.ncl + # Plot style + styleset: CMIP5 + ncdf: default + + + Figure_6b: + description: | + Diagnostic for russell et al Figure 6b(heat transport). Plots the + density layer based heat transport (in PW) across 30S based on the layer + definitions in Talley (2008). The dark blue bars are the integrated + totals for each layer and can be compared to the magenta lines, which + are the observed values from Talley (2008). The narrower red bars are + equal subdivisions of each blue layer + themes: + - phys + realms: + - ocean + variables: + thetao: + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + so: + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + vo: + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + volcello: + mip: fx + project: CMIP5 + exp: historical + ensemble: r0i0p0 + start_year: 1986 + end_year: 2005 + additional_datasets: + # - {dataset: ACCESS1-0} + # - {dataset: ACCESS1-3} + # - {dataset: bcc-csm1-1} + # - {dataset: bcc-csm1-1-m} + # - {dataset: BNU-ESM} + - {dataset: CanESM2} + # - {dataset: CCSM4} + # - {dataset: CESM1-CAM5} + # - {dataset: CNRM-CM5} + # - {dataset: CNRM-CM5-2} + # - {dataset: CSIRO-Mk3-6-0} + # - {dataset: FGOALS-g2} + # - {dataset: FGOALS-s2} + # - {dataset: GFDL-CM3} + # - {dataset: GFDL-ESM2G} + # - {dataset: GFDL-ESM2M} + # - {dataset: GISS-E2-H-CC} + # - {dataset: GISS-E2-R-CC} + # - {dataset: HadCM3} + # - {dataset: HadGEM2-ES} + # - {dataset: IPSL-CM5A-LR} + # - {dataset: IPSL-CM5A-MR} + # - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC-ESM} + # - {dataset: MRI-CGCM3} + # - {dataset: MRI-ESM1} + # - {dataset: NorESM1-M} + # - {dataset: NorESM1-ME} + scripts: + Figure6b: + script: russell18jgr/russell18jgr-fig6b.ncl + styleset: CMIP5 + ncdf: default + + + Figure_7: + description: | + Diagnostic for russell et al figure 7(polar). Plots Annual mean CO2 flux + (sea to air, gC/(yr * m^2), positive (red) is out of the ocean). + themes: + - carbon + realms: + - ocean + variables: + fgco2: + preprocessor: preprocessor_time_land + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + # areacello: + # mip: fx + # project: CMIP5 + # exp: historical + # ensemble: r0i0p0 + # start_year: 1986 + # end_year: 2005 + additional_datasets: + - {dataset: bcc-csm1-1} + # - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R-CC} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM, supplementary_variables: [{short_name: sftlf, mip: fx, ensemble: r0i0p0}, {short_name: sftof, skip: true}]} # , ensemble: r3i1p1} + - {dataset: MRI-ESM1} + scripts: + fgco2-polar: + script: russell18jgr/russell18jgr-polar.ncl + # Plot style + styleset: CMIP5 + ncdf: default + grid_min: -30 + grid_max: 30 + grid_step: 2.5 + colormap: BlWhRe + labelBar_end_type: both_triangle + max_lat: -30. + max_vert: 2 + max_hori: 1 + unitCorrectionalFactor: -3.154e+10 + # unit conversion factor from kg /(m^2 * sec) to g / (m^2 * yr) + new_units: "gC/ (m~S~2~N~ * yr)" + + + Figure_7h: + description: | + Diagnostic for russell et al figure 7h. Plots the zonal mean flux of + fgco2 in gC/(yr * m^2). + themes: + - carbon + realms: + - ocean + variables: + fgco2: + preprocessor: preprocessor_time_land + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + # areacello: + # mip: fx + # project: CMIP5 + # exp: historical + # ensemble: r0i0p0 + # start_year: 1986 + # end_year: 2005 + additional_datasets: + - {dataset: bcc-csm1-1} + # - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R-CC} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM, supplementary_variables: [{short_name: sftlf, mip: fx, ensemble: r0i0p0}, {short_name: sftof, skip: true}]} # , ensemble: r3i1p1} + - {dataset: MRI-ESM1} + scripts: + fgco2_line: + script: russell18jgr/russell18jgr-fig7h.ncl + # Plot style + styleset: CMIP5 + ncdf: default + + + Figure_7i: + description: | + Diagnostic for russell et al figure 7i. Plots the cumulative integral + of the net CO2 flux from 90S to 30S (in PgC/yr). The diag_script manually + calculates the areacello for lat-lon models, as some models use different + grids for areacello and fgco2 files. + themes: + - carbon + realms: + - ocean + variables: + fgco2: + preprocessor: preprocessor_time_land + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + additional_datasets: + - {dataset: MIROC-ESM, supplementary_variables: [{short_name: sftlf, mip: fx, ensemble: r0i0p0}, {short_name: sftof, skip: true}]} # , ensemble: r3i1p1} + areacello: + mip: fx + project: CMIP5 + exp: historical + ensemble: r0i0p0 + start_year: 1986 + end_year: 2005 + additional_datasets: + - {dataset: MIROC-ESM} # , ensemble: r3i1p1} + additional_datasets: + - {dataset: bcc-csm1-1} + # - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CNRM-CM5} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + # - {dataset: GISS-E2-H-CC} + # - {dataset: GISS-E2-R-CC} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MRI-ESM1} + scripts: + fgco2_cumulative_line: + script: russell18jgr/russell18jgr-fig7i.ncl + # Plot style + styleset: CMIP5 + ncdf: default + + + Figure_8: + description: | + Diagnostic for russell et al figure 8. Plots Surface pH in polar + contour plot. + themes: + - bgchem + realms: + - ocnBgchem + variables: + ph: + preprocessor: preprocessor_time_land + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + additional_datasets: + # - {dataset: BNU-ESM} + - {dataset: GFDL-ESM2M} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC-ESM, ensemble: r3i1p1} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-ME} + scripts: + Polar-ph: + script: russell18jgr/russell18jgr-polar.ncl + styleset: CMIP5 + ncdf: default + grid_min: 8.0 + grid_max: 8.2 + grid_step: 0.01 + labelBar_end_type: both_triangle + max_lat: -30. + max_vert: 2 + max_hori: 1 + grid_color: blue4 + colors: [[132, 12, 127], [147, 5, 153], [172, 12, 173], [195, 33, 196], + [203, 63, 209], [215, 89, 225], [229, 117, 230], + [243, 129, 238], [253, 155, 247], [255, 178, 254], + [255, 255, 255], [255, 255, 255], [126, 240, 138], + [134, 234, 138], [95, 219, 89], [57, 201, 54], [39, 182, 57], + [33, 161, 36], [16, 139, 22], [0, 123, 10], [6, 96, 6], + [12, 77, 9.0]] + + + Figure_9a: + description: | + Diagnostic for russell et al figure 9a. Plots the scatter plot of the + width of the Southern Hemisphere westerly wind band against the + annual-mean integrated heat uptake south of 30S (in PW—negative uptake + is heat lost from the ocean), along with the line of best fit. The + diagnostic script will be updated later to include tauu variable and + non-hfds datasets + hfds = rsds + rlds - (rsus + rlus + hfss + hfls) + themes: + - phys + realms: + - ocean + variables: + # tauu: + # preprocessor: preprocessor_time_land + # mip: Amon + # project: CMIP5 + # exp: historical + # ensemble: r1i1p1 + # start_year: 1986 + # end_year: 2005 + tauuo: + preprocessor: preprocessor_time + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + hfds: + preprocessor: preprocessor_time + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + areacello: + mip: fx + project: CMIP5 + exp: historical + ensemble: r0i0p0 + start_year: 1986 + end_year: 2005 + additional_datasets: + # - {dataset: bcc-csm1-1} + # - {dataset: CanESM2} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + # - {dataset: BNU-ESM} + - {dataset: GFDL-ESM2G} + # - {dataset: GFDL-ESM2M} + # - {dataset: HadGEM2-ES, end_year: 2004} + # - {dataset: GISS-E2-H-CC} + # - {dataset: GISS-E2-R-CC} + # - {dataset: IPSL-CM5A-LR} + # - {dataset: IPSL-CM5A-MR} + # - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC-ESM} # , ensemble: r3i1p1} + - {dataset: MRI-ESM1} + # - {dataset: NorESM1-ME} + scripts: + Figure9a: + script: russell18jgr/russell18jgr-fig9a.ncl + styleset: CMIP5 + ncdf: default + + + Figure_9b: + description: | + Diagnostic for russell et al figure 9b. Plots the scatter plot of the + width of the Southern Hemisphere westerly wind band against the + annual-mean integrated carbon uptake south of 30S(in Pg C/yr), along + with the line of best fit. The diagnostic script will be updated later + to include tauu variable. + themes: + - phys + - carbon + realms: + - ocean + variables: + # tauu: + # preprocessor: preprocessor_time_land + # mip: Amon + # project: CMIP5 + # exp: historical + # ensemble: r1i1p1 + # start_year: 1986 + # end_year: 2005 + tauuo: + preprocessor: preprocessor_time + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + fgco2: + preprocessor: preprocessor_time + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + areacello: + mip: fx + project: CMIP5 + exp: historical + ensemble: r0i0p0 + start_year: 1986 + end_year: 2005 + additional_datasets: + # - {dataset: bcc-csm1-1} + # - {dataset: CanESM2} + - {dataset: CNRM-CM5} + # - {dataset: CSIRO-Mk3-6-0} + # - {dataset: BNU-ESM} + - {dataset: GFDL-ESM2G} + # - {dataset: GFDL-ESM2M} + # - {dataset: HadGEM2-ES, end_year: 2004} + # - {dataset: GISS-E2-H-CC} + # - {dataset: GISS-E2-R-CC} + # - {dataset: IPSL-CM5A-LR} + # - {dataset: IPSL-CM5A-MR} + # - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} # , ensemble: r3i1p1} + - {dataset: MRI-ESM1} + # - {dataset: NorESM1-ME} + scripts: + Figure9b: + script: russell18jgr/russell18jgr-fig9b.ncl + styleset: CMIP5 + ncdf: default + + + Figure_9c: + description: | + Diagnostic for russell et al Figure 9c. Plots the scatter plot of the + net heat uptake south of 30S(in PW) against the annual-mean integrated + carbon uptake south of 30S(in Pg C/yr), along with the line of best fit. + The diagnostic script will be updated later to include non-hfds datasets. + hfds = rsds + rlds - (rsus + rlus + hfss + hfls) + themes: + - phys + - carbon + realms: + - ocean + variables: + fgco2: + preprocessor: preprocessor_time + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + hfds: + preprocessor: preprocessor_time + mip: Omon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1986 + end_year: 2005 + areacello: + mip: fx + project: CMIP5 + exp: historical + ensemble: r0i0p0 + start_year: 1986 + end_year: 2005 + additional_datasets: + # - {dataset: bcc-csm1-1} + # - {dataset: CanESM2} + - {dataset: CNRM-CM5} + # - {dataset: CSIRO-Mk3-6-0} + # - {dataset: BNU-ESM} + - {dataset: GFDL-ESM2G} + # - {dataset: GFDL-ESM2M} + # - {dataset: HadGEM2-ES, end_year: 2004} + # - {dataset: GISS-E2-H-CC} + # - {dataset: GISS-E2-R-CC} + # - {dataset: IPSL-CM5A-LR} + # - {dataset: IPSL-CM5A-MR} + # - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + # - {dataset: MRI-ESM1} + # - {dataset: NorESM1-ME} + scripts: + Figure9c: + script: russell18jgr/russell18jgr-fig9c.ncl + styleset: CMIP5 + ncdf: default diff --git a/esmvaltool/recipes/recipe_schlund20esd.yml b/esmvaltool/recipes/recipe_schlund20esd.yml new file mode 100644 index 0000000000..041ae07e41 --- /dev/null +++ b/esmvaltool/recipes/recipe_schlund20esd.yml @@ -0,0 +1,2420 @@ +# ESMValTool +# recipe_schlund20esd.yml +--- +documentation: + title: > + Emergent constraints on equilibrium climate sensitivity in CMIP5: do they + hold for CMIP6? + + description: > + This recipe evaluates several emergent constraints on the equilibrium + climate sensitivity (ECS) on CMIP5 and CMIP6 models. + + authors: + - schlund_manuel + + maintainer: + - schlund_manuel + + references: + - schlund20esd + + projects: + - 4c + - crescendo + - eval4cmip + + +SEABORN_SETTINGS: &seaborn_settings + style: ticks + rc: + axes.titlepad: 15.0 + xtick.top: true + ytick.right: true + xtick.minor.visible: true + ytick.minor.visible: true + +VARIABLE_ANCHOR_CMIP5: &var_settings_cmip5 + project: CMIP5 + exp: historical + ensemble: r1i1p1 + +VARIABLE_ANCHOR_CMIP6: &var_settings_cmip6 + project: CMIP6 + exp: historical + +DIAG_NCL: &diag_ncl + script: emergent_constraints/ecs_scatter.ncl + output_diag_only: true + +DIAG_PY: &diag_py + script: emergent_constraints/ecs_scatter.py + seaborn_settings: *seaborn_settings + +DIAG_MULTIPLE_CONSTRAINTS: &diag_multiple_constraints + script: emergent_constraints/multiple_constraints.py + ignore_patterns: + - 'ecs_regression_*.nc' + - 'lambda.nc' + - 'psi_*.nc' + - 'su_*.nc' + - 'zhai_*.nc' + numbers_as_markers: true + plot_regression_line_mean: true + seaborn_settings: *seaborn_settings + + +preprocessors: + + pp500: &pp500 + extract_levels: + levels: 50000 + scheme: linear + + pp_ltmi_0: + extract_levels: + levels: [85000, 70000, 60000, 50000, 40000] + scheme: linear + + pp_ltmi_1: + extract_levels: + levels: [85000, 70000] + scheme: linear + + spatial_mean: + area_statistics: + operator: mean + + tropical_mean: + extract_region: + start_latitude: -28 + end_latitude: 28 + start_longitude: 0 + end_longitude: 360 + area_statistics: + operator: mean + climate_statistics: + operator: mean + + southern_midlatitudes_mean: + extract_region: + start_latitude: -56 + end_latitude: -36 + start_longitude: 0 + end_longitude: 360 + area_statistics: + operator: mean + climate_statistics: + operator: mean + + tropical_mask_40: &tropical_mask_40_pp + regrid: + target_grid: 2x2 + scheme: linear + extract_region: + start_latitude: -40 + end_latitude: 40 + start_longitude: 0 + end_longitude: 360 + + tropical_mask_40_500hPa: + <<: *tropical_mask_40_pp + <<: *pp500 + + zonal_mean: + regrid: + target_grid: 2x2 + scheme: nearest + extract_levels: + levels: [ + 100000, + 92500, + 85000, + 70000, + 60000, + 50000, + 40000, + 30000, + 25000, + 20000, + 15000, + 10000, + ] + scheme: linear + extract_region: + start_latitude: -45 + end_latitude: 40 + start_longitude: 0 + end_longitude: 360 + zonal_statistics: + operator: mean + climate_statistics: + operator: mean + + +diagnostics: + + diag_y_ecs_cmip5: + description: Equilibrium Climate Sensitivity for CMIP5. + variables: + tas_rtnt: &tas_settings_cmip5 + short_name: tas + preprocessor: spatial_mean + project: CMIP5 + mip: Amon + ensemble: r1i1p1 + additional_datasets: &rtnt_datasets_cmip5 + - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} + - {dataset: ACCESS1-0, exp: abrupt4xCO2, start_year: 300, end_year: 449} + - {dataset: ACCESS1-3, exp: piControl, start_year: 250, end_year: 399} + - {dataset: ACCESS1-3, exp: abrupt4xCO2, start_year: 250, end_year: 399} + - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1, exp: abrupt4xCO2, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} + - {dataset: bcc-csm1-1-m, exp: abrupt4xCO2, start_year: 240, end_year: 389} + - {dataset: BNU-ESM, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: BNU-ESM, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CanESM2, exp: piControl, start_year: 2321, end_year: 2470} + - {dataset: CanESM2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Wrong start year for piControl? (branch_time = 2.) + - {dataset: CCSM4, exp: piControl, start_year: 250, end_year: 399} + - {dataset: CCSM4, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Only 140 years available + - {dataset: CNRM-CM5-2, exp: piControl, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM5-2, exp: abrupt4xCO2, start_year: 1850, end_year: 1989} + - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 104, end_year: 253} + - {dataset: CSIRO-Mk3-6-0, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: FGOALS-g2, exp: piControl, start_year: 490, end_year: 639} + - {dataset: FGOALS-g2, exp: abrupt4xCO2, start_year: 490, end_year: 639} + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2G, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2G, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-ESM2M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GISS-E2-H, exp: piControl, start_year: 2660, end_year: 2809} + - {dataset: GISS-E2-H, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-R, exp: piControl, start_year: 4200, end_year: 4349} + - {dataset: GISS-E2-R, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Experiments start at 1859-12-01 + - {dataset: HadGEM2-ES, exp: piControl, start_year: 1860, end_year: 2009} + - {dataset: HadGEM2-ES, exp: abrupt4xCO2, start_year: 1860, end_year: 2009} + - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} + - {dataset: inmcm4, exp: abrupt4xCO2, start_year: 2090, end_year: 2239} + # Only 140 years available + - {dataset: IPSL-CM5A-MR, exp: piControl, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5A-MR, exp: abrupt4xCO2, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} + - {dataset: MIROC5, exp: abrupt4xCO2, start_year: 2100, end_year: 2249} + - {dataset: MIROC-ESM, exp: piControl, start_year: 1880, end_year: 2029} + - {dataset: MIROC-ESM, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: MPI-ESM-LR, exp: piControl, start_year: 1880, end_year: 2029} + - {dataset: MPI-ESM-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-MR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-MR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-P, exp: piControl, start_year: 1866, end_year: 2015} + - {dataset: MPI-ESM-P, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MRI-CGCM3, exp: piControl, start_year: 1891, end_year: 2040} + - {dataset: MRI-CGCM3, exp: abrupt4xCO2, start_year: 1851, end_year: 2000} + - {dataset: NorESM1-M, exp: piControl, start_year: 700, end_year: 849} + - {dataset: NorESM1-M, exp: abrupt4xCO2, start_year: 1, end_year: 150} + tas_rtmt: + <<: *tas_settings_cmip5 + additional_datasets: &rtmt_datasets_cmip5 + - {dataset: IPSL-CM5A-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5A-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + rtnt: + <<: *tas_settings_cmip5 + short_name: rtnt + derive: true + additional_datasets: *rtnt_datasets_cmip5 + rtmt: + <<: *tas_settings_cmip5 + short_name: rtmt + additional_datasets: *rtmt_datasets_cmip5 + scripts: + ecs: + script: climate_metrics/ecs.py + calculate_mmm: false + output_attributes: + var_type: label + tag: ECS + plot_ylim: [1.5, 6.0] + project: CMIP5 + original_paper: all + provenance_statistics: ['mean', 'anomaly'] + provenance_domains: ['global'] + provenance_authors: ['schlund_manuel'] + provenance_references: ['gregory04grl'] + provenance_realms: ['atmos'] + provenance_themes: ['phys'] + + diag_y_ecs_cmip6: + description: Equilibrium Climate Sensitivity for CMIP6. + variables: + tas_rtnt: &tas_settings_cmip6 + short_name: tas + preprocessor: spatial_mean + project: CMIP6 + mip: Amon + additional_datasets: &rtnt_datasets_cmip6 + - {dataset: ACCESS-CM2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 950, end_year: 1099, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-CM2, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 950, end_year: 1099, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 101, end_year: 250} + - {dataset: ACCESS-ESM1-5, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 101, end_year: 250} + - {dataset: AWI-CM-1-1-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 2650, end_year: 2799} + - {dataset: AWI-CM-1-1-MR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-CSM2-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-CSM2-MR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-ESM1, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-ESM1, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CAMS-CSM1-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3179} + - {dataset: CAMS-CSM1-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3179} + - {dataset: CanESM5, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 5201, end_year: 5350} + - {dataset: CanESM5, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CAS-ESM2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: CAS} + - {dataset: CAS-ESM2-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: CAS} + - {dataset: CESM2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: CESM2, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: CESM2-FV2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 321, end_year: 470, institute: NCAR} + - {dataset: CESM2-FV2, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CESM2-WACCM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CESM2-WACCM, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CESM2-WACCM-FV2, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 301, end_year: 450, institute: NCAR} + - {dataset: CESM2-WACCM-FV2, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCAR} + - {dataset: CMCC-CM2-SR5, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CMCC-CM2-SR5, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1-HR, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1-HR, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-ESM2-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-ESM2-1, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: E3SM-1-0, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 101, end_year: 250} + - {dataset: E3SM-1-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1, end_year: 150} + - {dataset: EC-Earth3-Veg, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: EC-Earth3-Veg, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + # parent_time_units messed up + - {dataset: FGOALS-f3-L, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 600, end_year: 749} + - {dataset: FGOALS-f3-L, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: FGOALS-g3, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 463, end_year: 612} + - {dataset: FGOALS-g3, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 463, end_year: 612} + - {dataset: GISS-E2-1-G, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 4150, end_year: 4299} + - {dataset: GISS-E2-1-G, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-1-H, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3180, end_year: 3329} + - {dataset: GISS-E2-1-H, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-LL, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-LL, exp: abrupt-4xCO2, ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-MM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: HadGEM3-GC31-MM, exp: abrupt-4xCO2, ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: INM-CM5-0, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 2099, end_year: 2248} + - {dataset: INM-CM5-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 1999} + - {dataset: INM-CM4-8, exp: piControl, ensemble: r1i1p1f1, grid: gr1, start_year: 1947, end_year: 2096} + - {dataset: INM-CM4-8, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM6A-LR, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1870, end_year: 2019} + - {dataset: IPSL-CM6A-LR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: KACE-1-0-G, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 2150, end_year: 2299} + - {dataset: KACE-1-0-G, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: MIROC6, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + - {dataset: MIROC6, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + - {dataset: MIROC-ES2L, exp: piControl, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MIROC-ES2L, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM-1-2-HAM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1950, end_year: 2099} + - {dataset: MPI-ESM-1-2-HAM, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-HR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-HR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-LR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MPI-ESM1-2-LR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MRI-ESM2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MRI-ESM2-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + # parent_time_units not correct + - {dataset: NESM3, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 550, end_year: 699} + - {dataset: NESM3, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: NorCPM1, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 251, end_year: 400, institute: NCC} + - {dataset: NorCPM1, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150, institute: NCC} + - {dataset: NorESM2-LM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1600, end_year: 1749} + - {dataset: NorESM2-LM, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: NorESM2-MM, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1201, end_year: 1350} + - {dataset: NorESM2-MM, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + # Issue #286 (manual copying was necessary) + - {dataset: SAM0-UNICON, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 274, end_year: 423} + - {dataset: SAM0-UNICON, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + # Wrong start year for piControl (must be 1201) + - {dataset: TaiESM1, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 201, end_year: 350} + - {dataset: TaiESM1, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + tas_rtmt: + <<: *tas_settings_cmip6 + additional_datasets: &rtmt_datasets_cmip6 + # branch_time_in_child weird + - {dataset: MCM-UA-1-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: MCM-UA-1-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 150} + - {dataset: UKESM1-0-LL, exp: piControl, ensemble: r1i1p1f2, grid: gn, start_year: 1960, end_year: 2109} + - {dataset: UKESM1-0-LL, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1999} + rtnt: + <<: *tas_settings_cmip6 + short_name: rtnt + derive: true + additional_datasets: *rtnt_datasets_cmip6 + rtmt: + <<: *tas_settings_cmip6 + short_name: rtmt + additional_datasets: *rtmt_datasets_cmip6 + scripts: + ecs: + script: climate_metrics/ecs.py + calculate_mmm: false + output_attributes: + var_type: label + tag: ECS + plot_ylim: [1.5, 6.0] + project: CMIP6 + provenance_statistics: ['mean', 'anomaly'] + provenance_domains: ['global'] + provenance_authors: ['schlund_manuel'] + provenance_references: ['gregory04grl'] + provenance_realms: ['atmos'] + provenance_themes: ['phys'] + + diag_x_sherwood_ltmi_cmip5: + description: Lower tropospheric mixing index (Sherwood et al., 2014) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + hur: + <<: *var_settings_cmip5 + preprocessor: pp_ltmi_1 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + ta: + <<: *var_settings_cmip5 + preprocessor: pp_ltmi_1 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + wap: + <<: *var_settings_cmip5 + preprocessor: pp_ltmi_0 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: ltmi + output_attributes: + var_type: feature + tag: SHL + plot_xlabel: 'LTMI [1]' + plot_title: 'Sherwood et al. (2014) constraint (SHL)' + plot_xlim: [0.45, 1.0] + project: CMIP5 + provenance_authors: ['lauer_axel'] + provenance_references: ['sherwood14nat'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_sherwood_ltmi_cmip6: + description: Lower tropospheric mixing index (Sherwood et al., 2014) for CMIP6. + themes: + - EC + realms: + - atmos + variables: + hur: + <<: *var_settings_cmip6 + preprocessor: pp_ltmi_1 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + ta: + <<: *var_settings_cmip6 + preprocessor: pp_ltmi_1 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + wap: + <<: *var_settings_cmip6 + preprocessor: pp_ltmi_0 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAS-ESM2-0, ensemble: r1i1p1f1, grid: gn, institute: CAS} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: ltmi + output_attributes: + var_type: feature + tag: SHL + plot_xlabel: 'LTMI [1]' + plot_title: 'Sherwood et al. (2014) constraint (SHL)' + plot_xlim: [0.45, 1.0] + project: CMIP6 + provenance_authors: ['lauer_axel'] + provenance_references: ['sherwood14nat'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_sherwood_d_cmip5: + description: Sherwood D index (Sherwood et al., 2014) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + wap: + <<: *var_settings_cmip5 + preprocessor: pp_ltmi_0 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: sherwood_d + output_attributes: + var_type: feature + tag: SHD + plot_xlabel: 'Sherwood D index [1]' + plot_title: 'Sherwood et al. (2014) constraint (SHD)' + plot_xlim: [0.15, 0.65] + project: CMIP5 + provenance_authors: ['lauer_axel'] + provenance_references: ['sherwood14nat'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_sherwood_d_cmip6: + description: Sherwood D index (Sherwood et al., 2014) for CMIP6. + themes: + - EC + realms: + - atmos + variables: + wap: + <<: *var_settings_cmip6 + preprocessor: pp_ltmi_0 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAS-ESM2-0, ensemble: r1i1p1f1, grid: gn, institute: CAS} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorCPM1, ensemble: r1i1p1f1, grid: gn, institute: NCC} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: sherwood_d + output_attributes: + var_type: feature + tag: SHD + plot_xlabel: 'Sherwood D index [1]' + plot_title: 'Sherwood et al. (2014) constraint (SHD)' + plot_xlim: [0.15, 0.65] + project: CMIP6 + provenance_authors: ['lauer_axel'] + provenance_references: ['sherwood14nat'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_sherwood_s_cmip5: + description: Sherwood S index (Sherwood et al., 2014) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + hur: + <<: *var_settings_cmip5 + preprocessor: pp_ltmi_1 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + ta: + <<: *var_settings_cmip5 + preprocessor: pp_ltmi_1 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + wap: + <<: *var_settings_cmip5 + preprocessor: pp_ltmi_0 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: sherwood_s + output_attributes: + var_type: feature + tag: SHS + plot_xlabel: 'Sherwood S index [1]' + plot_title: 'Sherwood et al. (2014) constraint (SHS)' + plot_xlim: [0.2, 0.6] + project: CMIP5 + provenance_authors: ['lauer_axel'] + provenance_references: ['sherwood14nat'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_sherwood_s_cmip6: + description: Sherwood S index (Sherwood et al., 2014) for CMIP6. + themes: + - EC + realms: + - atmos + variables: + hur: + <<: *var_settings_cmip6 + preprocessor: pp_ltmi_1 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + ta: + <<: *var_settings_cmip6 + preprocessor: pp_ltmi_1 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + wap: + <<: *var_settings_cmip6 + preprocessor: pp_ltmi_0 + mip: Amon + start_year: 1989 + end_year: 1998 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAS-ESM2-0, ensemble: r1i1p1f1, grid: gn, institute: CAS} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: sherwood_s + output_attributes: + var_type: feature + tag: SHS + plot_xlabel: 'Sherwood S index [1]' + plot_title: 'Sherwood et al. (2014) constraint (SHS)' + plot_xlim: [0.2, 0.6] + project: CMIP6 + provenance_authors: ['lauer_axel'] + provenance_references: ['sherwood14nat'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_tian_itcz_cmip5: + description: Southern ITCZ index (Tian, 2015) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + pr: + <<: *var_settings_cmip5 + preprocessor: default + mip: Amon + start_year: 1986 + end_year: 2005 + reference_dataset: GPCP-V2.2 + additional_datasets: + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: itczidx + output_attributes: + var_type: feature + tag: TII + plot_xlabel: 'Southern ITCZ index [mm day$^{-1}$]' + plot_title: 'Tian (2015) constraint (TII)' + plot_xlim: [-1.0, 3.0] + project: CMIP5 + provenance_authors: ['lauer_axel'] + provenance_references: ['tian15grl'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_tian_itcz_cmip6: + description: Southern ITCZ index (Tian, 2015) for CMIP6. + themes: + - EC + realms: + - atmos + variables: + pr: + <<: *var_settings_cmip6 + preprocessor: default + mip: Amon + start_year: 1986 + end_year: 2005 + reference_dataset: GPCP-V2.2 + additional_datasets: + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} + additional_datasets: + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAS-ESM2-0, ensemble: r1i1p1f1, grid: gn, institute: CAS} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MCM-UA-1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorCPM1, ensemble: r1i1p1f1, grid: gn, institute: NCC} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: itczidx + output_attributes: + var_type: feature + tag: TII + plot_xlabel: 'Southern ITCZ index [mm day$^{-1}$]' + plot_title: 'Tian (2015) constraint (TII)' + plot_xlim: [-1.0, 3.0] + project: CMIP6 + provenance_authors: ['lauer_axel'] + provenance_references: ['tian15grl'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_tian_hum_cmip5: + description: Humidity index (Tian, 2015) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + hus: + <<: *var_settings_cmip5 + preprocessor: pp500 + mip: Amon + start_year: 2003 + end_year: 2005 + reference_dataset: AIRS-2-1 + additional_datasets: + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: humidx + output_attributes: + var_type: feature + tag: TIH + plot_xlabel: 'Tropical Mid-tropospheric humidity index [%]' + plot_title: 'Tian (2015) constraint (TIH)' + plot_xlim: [-15.0, 50.0] + project: CMIP5 + provenance_authors: ['lauer_axel'] + provenance_references: ['tian15grl'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_tian_hum_cmip6: + description: Humidity index (Tian, 2015) for CMIP6. + themes: + - EC + realms: + - atmos + variables: + hus: + <<: *var_settings_cmip6 + preprocessor: pp500 + mip: Amon + start_year: 2003 + end_year: 2005 + reference_dataset: AIRS-2-1 + additional_datasets: + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1} + additional_datasets: + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAS-ESM2-0, ensemble: r1i1p1f1, grid: gn, institute: CAS} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MCM-UA-1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorCPM1, ensemble: r1i1p1f1, grid: gn, institute: NCC} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: humidx + output_attributes: + var_type: feature + tag: TIH + plot_xlabel: 'Tropical Mid-tropospheric humidity index [%]' + plot_title: 'Tian (2015) constraint (TIH)' + plot_xlim: [-15.0, 50.0] + project: CMIP6 + provenance_authors: ['lauer_axel'] + provenance_references: ['tian15grl'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_lipat_cmip5: + description: Climatological Hadley cell extent (Lipat et al., 2017) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + va: + <<: *var_settings_cmip5 + preprocessor: default + mip: Amon + start_year: 1980 + end_year: 2005 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: shhc + output_attributes: + var_type: feature + tag: LIP + plot_xlabel: 'Southern hemisphere Hadley cell extent [°]' + plot_title: 'Lipat et al. (2017) constraint (LIP)' + plot_xlim: [-39.0, -30.0] + project: CMIP5 + provenance_authors: ['lauer_axel'] + provenance_references: ['lipat17grl'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_lipat_cmip6: + description: Climatological Hadley cell extent (Lipat et al., 2017) for CMIP6. + themes: + - EC + realms: + - atmos + variables: + va: + <<: *var_settings_cmip6 + preprocessor: default + mip: Amon + start_year: 1980 + end_year: 2005 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAS-ESM2-0, ensemble: r1i1p1f1, grid: gn, institute: CAS} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorCPM1, ensemble: r1i1p1f1, grid: gn, institute: NCC} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: shhc + output_attributes: + var_type: feature + tag: LIP + plot_xlabel: 'Southern hemisphere Hadley cell extent [°]' + plot_title: 'Lipat et al. (2017) constraint (LIP)' + plot_xlim: [-39.0, -30.0] + project: CMIP6 + provenance_authors: ['lauer_axel'] + provenance_references: ['lipat17grl'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_brient_alb_cmip5: + description: Covariance of shortwave cloud reflection (Brient and Schneider, 2016) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + ts: + <<: *var_settings_cmip5 + mip: Amon + preprocessor: default + start_year: 2001 + end_year: 2005 + reference_dataset: HadISST + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + hur: + <<: *var_settings_cmip5 + preprocessor: pp500 + mip: Amon + start_year: 2001 + end_year: 2005 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + rsut: + <<: *var_settings_cmip5 + preprocessor: default + mip: Amon + start_year: 2001 + end_year: 2005 + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} + rsutcs: + <<: *var_settings_cmip5 + preprocessor: default + mip: Amon + start_year: 2001 + end_year: 2005 + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} + rsdt: + <<: *var_settings_cmip5 + preprocessor: default + mip: Amon + start_year: 2001 + end_year: 2005 + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: covrefl + output_attributes: + var_type: feature + tag: BRI + plot_xlabel: 'Response of SW cloud reflectivity to SST changes [% K$^{-1}$]' + plot_title: 'Brient and Schneider (2016) constraint (BRI)' + plot_xlim: [-3.0, 1.5] + project: CMIP5 + provenance_authors: ['lauer_axel'] + provenance_references: ['brient16jclim'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_brient_alb_cmip6: + description: Covariance of shortwave cloud reflection (Brient and Schneider, 2016) for CMIP6. + themes: + - EC + realms: + - atmos + variables: + ts: + <<: *var_settings_cmip6 + mip: Amon + preprocessor: default + start_year: 2001 + end_year: 2005 + reference_dataset: HadISST + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + hur: + <<: *var_settings_cmip6 + preprocessor: pp500 + mip: Amon + start_year: 2001 + end_year: 2005 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + rsut: + <<: *var_settings_cmip6 + preprocessor: default + mip: Amon + start_year: 2001 + end_year: 2005 + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} + rsutcs: + <<: *var_settings_cmip6 + preprocessor: default + mip: Amon + start_year: 2001 + end_year: 2005 + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} + rsdt: + <<: *var_settings_cmip6 + preprocessor: default + mip: Amon + start_year: 2001 + end_year: 2005 + reference_dataset: CERES-EBAF + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1} + additional_datasets: + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAS-ESM2-0, ensemble: r1i1p1f1, grid: gn, institute: CAS} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + ecs_predictor: + <<: *diag_ncl + diag: covrefl + output_attributes: + var_type: feature + tag: BRI + plot_xlabel: 'Response of SW cloud reflectivity to SST changes [% K$^{-1}$]' + plot_title: 'Brient and Schneider (2016) constraint (BRI)' + plot_xlim: [-3.0, 1.5] + project: CMIP6 + provenance_authors: ['lauer_axel'] + provenance_references: ['brient16jclim'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_cox_cmip5: + description: Temperature variability metric psi (Cox et al., 2018) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + tas: + <<: *var_settings_cmip5 + preprocessor: spatial_mean + mip: Amon + exp: [historical, rcp85] + start_year: 1880 + end_year: 2014 + reference_dataset: HadCRUT4 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + tasa: + <<: *var_settings_cmip5 + preprocessor: spatial_mean + mip: Amon + start_year: 1880 + end_year: 2014 + reference_dataset: HadCRUT4 + additional_datasets: + - {dataset: HadCRUT4, project: OBS, type: ground, version: 1, tier: 2} + scripts: + ecs_predictor: + script: climate_metrics/psi.py + output_attributes: + var_type: feature + tag: COX + plot_xlabel: 'Temperature variability metric $\psi$ [K]' + plot_title: 'Cox et al. (2018) constraint (COX)' + plot_xlim: [0.05, 0.35] + project: CMIP5 + provenance_authors: ['schlund_manuel'] + provenance_domains: ['global'] + provenance_references: ['cox18nature'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_cox_cmip6: + description: Temperature variability metric psi (Cox et al., 2018) for CMIP6. + themes: + - EC + realms: + - atmos + variables: + tas: + <<: *var_settings_cmip6 + preprocessor: spatial_mean + mip: Amon + start_year: 1880 + end_year: 2014 + reference_dataset: HadCRUT4 + additional_datasets: + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAS-ESM2-0, ensemble: r1i1p1f1, grid: gn, institute: CAS} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr} + - {dataset: EC-Earth3-Veg, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MCM-UA-1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorCPM1, ensemble: r1i1p1f1, grid: gn, institute: NCC} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + tasa: + <<: *var_settings_cmip6 + preprocessor: spatial_mean + mip: Amon + start_year: 1880 + end_year: 2014 + reference_dataset: HadCRUT4 + additional_datasets: + - {dataset: HadCRUT4, project: OBS, type: ground, version: 1, tier: 2} + scripts: + ecs_predictor: + script: climate_metrics/psi.py + output_attributes: + var_type: feature + tag: COX + plot_xlabel: 'Temperature variability metric $\psi$ [K]' + plot_title: 'Cox et al. (2018) constraint (COX)' + plot_xlim: [0.05, 0.35] + project: CMIP6 + provenance_authors: ['schlund_manuel'] + provenance_domains: ['global'] + provenance_references: ['cox18nature'] + provenance_realms: ['atmos'] + provenance_themes: ['EC'] + + diag_x_volodin_cmip5: + description: Difference in total cloud fraction between tropics and Southern midlatitudes (Volodin, 2008) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + trop_clt: + <<: *var_settings_cmip5 + short_name: clt + mip: Amon + preprocessor: tropical_mean + start_year: 1980 + end_year: 2000 + reference_dataset: '' + southern_midlat_clt: + <<: *var_settings_cmip5 + short_name: clt + mip: Amon + preprocessor: southern_midlatitudes_mean + start_year: 1980 + end_year: 2000 + reference_dataset: '' + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_py + diag: volodin + output_attributes: + var_type: feature + tag: VOL + plot_title: 'Volodin (2008) constraint (VOL)' + plot_xlim: [-40.0, 10.0] + project: CMIP5 + + diag_x_volodin_cmip6: + description: Difference in total cloud fraction between tropics and Southern midlatitudes (Volodin, 2008) for CMIP6. + themes: + - EC + realms: + - atmos + variables: + trop_clt: + <<: *var_settings_cmip6 + short_name: clt + mip: Amon + preprocessor: tropical_mean + start_year: 1980 + end_year: 2000 + reference_dataset: '' + southern_midlat_clt: + <<: *var_settings_cmip6 + short_name: clt + mip: Amon + preprocessor: southern_midlatitudes_mean + start_year: 1980 + end_year: 2000 + reference_dataset: '' + additional_datasets: + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAS-ESM2-0, ensemble: r1i1p1f1, grid: gn, institute: CAS} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorCPM1, ensemble: r1i1p1f1, grid: gn, institute: NCC} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + ecs_predictor: + <<: *diag_py + diag: volodin + output_attributes: + var_type: feature + tag: VOL + plot_title: 'Volodin (2008) constraint (VOL)' + plot_xlim: [-40.0, 10.0] + project: CMIP6 + + diag_x_zhai_cmip5: + description: Seasonal MBLC fraction variation (Zhai et al., 2015) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + cl: + <<: *var_settings_cmip5 + preprocessor: tropical_mask_40 + mip: Amon + start_year: 1980 + end_year: 2004 + reference_dataset: '' + wap: + <<: *var_settings_cmip5 + preprocessor: tropical_mask_40_500hPa + mip: Amon + start_year: 1980 + end_year: 2004 + reference_dataset: '' + tos: + <<: *var_settings_cmip5 + preprocessor: tropical_mask_40 + mip: Omon + start_year: 1980 + end_year: 2004 + reference_dataset: '' + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_py + diag: zhai + output_attributes: + var_type: feature + tag: ZHA + plot_title: 'Zhai et al. (2015) constraint (ZHA)' + plot_xlim: [-3.5, 1.5] + project: CMIP5 + + diag_x_zhai_cmip6: + description: Seasonal MBLC fraction variation (Zhai et al., 2015) for CMIP6. + themes: + - EC + realms: + - atmos + variables: + cl: + <<: *var_settings_cmip6 + preprocessor: tropical_mask_40 + mip: Amon + start_year: 1980 + end_year: 2004 + reference_dataset: '' + wap: + <<: *var_settings_cmip6 + preprocessor: tropical_mask_40_500hPa + mip: Amon + start_year: 1980 + end_year: 2004 + reference_dataset: '' + tos: + <<: *var_settings_cmip6 + preprocessor: tropical_mask_40 + mip: Omon + start_year: 1980 + end_year: 2004 + reference_dataset: '' + additional_datasets: + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + # Dataset issue: lev: has values > valid_max = 1.0 + # see https://github.com/ESMValGroup/ESMValCore/issues/1527 for details + # - {dataset: CAS-ESM2-0, ensemble: r1i1p1f1, grid: gn, institute: CAS} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + ecs_predictor: + <<: *diag_py + diag: zhai + output_attributes: + var_type: feature + tag: ZHA + plot_title: 'Zhai et al. (2015) constraint (ZHA)' + plot_xlim: [-3.5, 1.5] + project: CMIP6 + + diag_x_su_hur_cmip5: + description: Error in relative humidity (Su et al., 2014) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + hur: + <<: *var_settings_cmip5 + preprocessor: zonal_mean + mip: Amon + exp: [historical, rcp85] + start_year: 2005 + end_year: 2010 + reference_dataset: 'AIRS-2-0|MLS-AURA' + additional_datasets: + - {dataset: AIRS-2-0, project: obs4MIPs, level: L3, tier: 1} + - {dataset: MLS-AURA, project: OBS6, type: sat, version: '004', tier: 3} + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_py + diag: su + metric: regression_slope + output_attributes: + var_type: feature + tag: SU + plot_title: 'Su et al. (2014) constraint (SU)' + plot_xlim: [0.65, 1.3] + project: CMIP5 + + diag_x_su_hur_cmip6: + description: Error in relative humidity (Su et al., 2014) for CMIP6. + themes: + - EC + realms: + - atmos + variables: + hur: + <<: *var_settings_cmip6 + preprocessor: zonal_mean + mip: Amon + start_year: 2005 + end_year: 2010 + reference_dataset: 'AIRS-2-0|MLS-AURA' + additional_datasets: + - {dataset: AIRS-2-0, project: obs4MIPs, level: L3, tier: 1} + - {dataset: MLS-AURA, project: OBS6, type: sat, version: '004', tier: 3} + additional_datasets: + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAS-ESM2-0, ensemble: r1i1p1f1, grid: gn, institute: CAS} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CESM2-FV2, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn, institute: NCAR} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr} + - {dataset: FGOALS-g3, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr} + - {dataset: MCM-UA-1-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn} + - {dataset: MPI-ESM-1-2-HAM, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} + - {dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn} + scripts: + ecs_predictor: + <<: *diag_py + diag: su + metric: regression_slope + output_attributes: + var_type: feature + tag: SU + plot_title: 'Su et al. (2014) constraint (SU)' + plot_xlim: [0.65, 1.3] + project: CMIP6 + + diag_multiple_constraints: + description: Evaluate multiple emergent constraints on ECS for all available models. + themes: + - EC + realms: + - atmos + scripts: + multiple_constraints: + <<: *diag_multiple_constraints + ancestors: [ + 'diag_y_ecs_*/ecs', + 'diag_x_*/ecs_predictor', + ] + group_by: project + additional_data: + - dataset: 0 + project: OBS + var_type: prediction_input + tag: VOL + data: -25 + - dataset: 0 + project: OBS + var_type: prediction_input + tag: ZHA + data: -1.28 + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: SU + data: 0.25 + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: VOL + data: 5.5 + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: ZHA + data: 0.187 + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: COX + data: 0.05 + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: SHL + data: 0.08 + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: SHD + data: 0.031 + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: SHS + data: 0.05 + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: BRI + data: 0.13 + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: LIP + data: 1.75 + - dataset: 0 + project: obs4MIPs + var_type: prediction_input_error + tag: TII + data: 0.5 + - dataset: 0 + project: obs4MIPs + var_type: prediction_input_error + tag: TIH + data: 10.0 + - dataset: MRI-CGCM3 + project: CMIP5 + var_type: feature + tag: ZHA + data: 0.4 + units: '% K-1' + - dataset: inmcm4 + project: CMIP5 + var_type: feature + tag: ZHA + data: 0.75 + units: '% K-1' + - dataset: CNRM-CM5 + project: CMIP5 + var_type: feature + tag: ZHA + data: -0.4 + units: '% K-1' + + diag_assess_zhai_constraint_cmip5_x: + description: Seasonal MBLC fraction variation (Zhai et al., 2015) for CMIP5. + themes: + - EC + realms: + - atmos + variables: + cl: + <<: *var_settings_cmip5 + preprocessor: tropical_mask_40 + mip: Amon + start_year: 1980 + end_year: 2004 + reference_dataset: '' + wap: + <<: *var_settings_cmip5 + preprocessor: tropical_mask_40_500hPa + mip: Amon + start_year: 1980 + end_year: 2004 + reference_dataset: '' + tos: + <<: *var_settings_cmip5 + preprocessor: tropical_mask_40 + mip: Omon + start_year: 1980 + end_year: 2004 + reference_dataset: '' + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H} + - {dataset: GISS-E2-R} + - {dataset: HadGEM2-ES} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: NorESM1-M} + scripts: + ecs_predictor: + <<: *diag_py + diag: zhai + output_attributes: + var_type: feature + tag: ZHA + plot_title: 'Zhai et al. (2015) constraint (ZHA) for CMIP5' + plot_xlim: [-3.0, 1.0] + project: CMIP5 + original_paper: all + + diag_assess_zhai_constraint_cmip5: + description: Assess Zhai et al. (2015) constraint for different combinations of CMIP5 models. + themes: + - EC + realms: + - atmos + scripts: + multiple_constraints: + <<: *diag_multiple_constraints + ancestors: [ + 'diag_y_ecs_cmip5/ecs', + 'diag_assess_zhai_constraint_cmip5_x/ecs_predictor', + ] + group_by: original_paper + numbers_as_markers: false + plot_regression_line_mean: false + seaborn_settings: + <<: *seaborn_settings + palette: Set1 + additional_data: + - dataset: 0 + project: OBS + var_type: prediction_input + tag: ZHA + data: -1.28 + - dataset: 0 + project: OBS + var_type: prediction_input_error + tag: ZHA + data: 0.187 + - dataset: CNRM-CM5 + project: CMIP5 + var_type: feature + tag: ZHA + data: -0.4 + units: '% K-1' + original_paper: original + - dataset: CSIRO-Mk3-6-0 + project: CMIP5 + var_type: feature + tag: ZHA + data: -0.793017 + units: '% K-1' + original_paper: original + - dataset: CanESM2 + project: CMIP5 + var_type: feature + tag: ZHA + data: -1.175260 + units: '% K-1' + original_paper: original + - dataset: FGOALS-g2 + project: CMIP5 + var_type: feature + tag: ZHA + data: -0.537552 + units: '% K-1' + original_paper: original + - dataset: GFDL-CM3 + project: CMIP5 + var_type: feature + tag: ZHA + data: -2.330577 + units: '% K-1' + original_paper: original + - dataset: GISS-E2-H + project: CMIP5 + var_type: feature + tag: ZHA + data: -0.273398 + units: '% K-1' + original_paper: original + - dataset: GISS-E2-R + project: CMIP5 + var_type: feature + tag: ZHA + data: -0.205921 + units: '% K-1' + original_paper: original + - dataset: HadGEM2-ES + project: CMIP5 + var_type: feature + tag: ZHA + data: -1.544692 + units: '% K-1' + original_paper: original + - dataset: IPSL-CM5A-LR + project: CMIP5 + var_type: feature + tag: ZHA + data: -1.081265 + units: '% K-1' + original_paper: original + - dataset: MIROC-ESM + project: CMIP5 + var_type: feature + tag: ZHA + data: 0.068771 + units: '% K-1' + original_paper: original + - dataset: MIROC5 + project: CMIP5 + var_type: feature + tag: ZHA + data: 0.410226 + units: '% K-1' + original_paper: original + - dataset: MPI-ESM-LR + project: CMIP5 + var_type: feature + tag: ZHA + data: 0.067958 + units: '% K-1' + original_paper: original + - dataset: MRI-CGCM3 + project: CMIP5 + var_type: feature + tag: ZHA + data: 0.4 + units: '% K-1' + original_paper: original + - dataset: NorESM1-M + project: CMIP5 + var_type: feature + tag: ZHA + data: 0.505949 + units: '% K-1' + original_paper: original + - dataset: inmcm4 + project: CMIP5 + var_type: feature + tag: ZHA + data: 0.75 + units: '% K-1' + original_paper: original + - dataset: MRI-CGCM3 + project: CMIP5 + var_type: feature + tag: ZHA + data: 0.4 + units: '% K-1' + original_paper: all + - dataset: inmcm4 + project: CMIP5 + var_type: feature + tag: ZHA + data: 0.75 + units: '% K-1' + original_paper: all + - dataset: CNRM-CM5 + project: CMIP5 + var_type: feature + tag: ZHA + data: -0.4 + units: '% K-1' + original_paper: all + - dataset: CNRM-CM5 + project: CMIP5 + var_type: label + tag: ECS + data: 3.250497 + units: 'K' + original_paper: original + - dataset: CSIRO-Mk3-6-0 + project: CMIP5 + var_type: label + tag: ECS + data: 4.077746 + units: 'K' + original_paper: original + - dataset: CanESM2 + project: CMIP5 + var_type: label + tag: ECS + data: 3.691430 + units: 'K' + original_paper: original + - dataset: FGOALS-g2 + project: CMIP5 + var_type: label + tag: ECS + data: 3.380961 + units: 'K' + original_paper: original + - dataset: GFDL-CM3 + project: CMIP5 + var_type: label + tag: ECS + data: 3.969521 + units: 'K' + original_paper: original + - dataset: GISS-E2-H + project: CMIP5 + var_type: label + tag: ECS + data: 2.307163 + units: 'K' + original_paper: original + - dataset: GISS-E2-R + project: CMIP5 + var_type: label + tag: ECS + data: 2.111006 + units: 'K' + original_paper: original + - dataset: HadGEM2-ES + project: CMIP5 + var_type: label + tag: ECS + data: 4.612856 + units: 'K' + original_paper: original + - dataset: IPSL-CM5A-LR + project: CMIP5 + var_type: label + tag: ECS + data: 4.126445 + units: 'K' + original_paper: original + - dataset: MIROC-ESM + project: CMIP5 + var_type: label + tag: ECS + data: 4.672773 + units: 'K' + original_paper: original + - dataset: MIROC5 + project: CMIP5 + var_type: label + tag: ECS + data: 2.721727 + units: 'K' + original_paper: original + - dataset: MPI-ESM-LR + project: CMIP5 + var_type: label + tag: ECS + data: 3.633317 + units: 'K' + original_paper: original + - dataset: MRI-CGCM3 + project: CMIP5 + var_type: label + tag: ECS + data: 2.597889 + units: 'K' + original_paper: original + - dataset: NorESM1-M + project: CMIP5 + var_type: label + tag: ECS + data: 2.799529 + units: 'K' + original_paper: original + - dataset: inmcm4 + project: CMIP5 + var_type: label + tag: ECS + data: 2.077224 + units: 'K' + original_paper: original diff --git a/esmvaltool/recipes/recipe_sea_surface_salinity.yml b/esmvaltool/recipes/recipe_sea_surface_salinity.yml new file mode 100644 index 0000000000..43ec0e6b5e --- /dev/null +++ b/esmvaltool/recipes/recipe_sea_surface_salinity.yml @@ -0,0 +1,72 @@ +--- +documentation: + title: CMUG diagnostics for sea surface salinity + + description: | + Recipe for evaluating models against ESACCI Sea Surface Salinity dataset + + authors: + - vegas-regidor_javier + + maintainer: + - loosveldt-tomas_saskia + + references: + - contact_authors + + projects: + - cmug + +preprocessors: + timeseries: + extract_shape: + # Relative paths are relative to the configuration option 'auxiliary_data_dir'. + # The example shapefile can be downloaded from + # https://marineregions.org/download_file.php?name=World_Seas_IHO_v3.zip + # but any shapefile can be used + shapefile: World_Seas_IHO_v3/World_Seas_IHO_v3.shp + decomposed: True + method: contains + crop: False + ids: + - Arctic Ocean + - Southern Ocean + - North Atlantic Ocean + - South Atlantic Ocean + - North Pacific Ocean + - South Pacific Ocean + - Indian Ocean + area_statistics: + operator: mean + +datasets: + - &cmip6 {project: CMIP6, exp: historical, dataset: ACCESS-CM2, ensemble: r1i1p1f1, + start_year: 1950, end_year: 2014, alias: ACCESS-CM2} + - {<<: *cmip6, dataset: CMCC-CM2-HR4, alias: CMCC-CM2-HR4} + - {<<: *cmip6, dataset: CanESM5, alias: CanESM5} + - {<<: *cmip6, dataset: IPSL-CM6A-LR, alias: IPSL-CM6A-LR} + - {<<: *cmip6, dataset: MIROC6, alias: MIROC6} + - {<<: *cmip6, dataset: MPI-ESM1-2-HR, alias: MPI-ESM1-2-HR} + - {<<: *cmip6, dataset: NorESM2-MM, alias: NorESM2-MM} + - {<<: *cmip6, dataset: GISS-E2-2-H, alias: GISS-E2-2-H, institute: NASA-GISS} + + +diagnostics: + compare_salinity: + description: ESACCI-SEA-SURFACE-SALINITY check + variables: + sos: + reference_dataset: ESACCI-SEA-SURFACE-SALINITY_V1 + preprocessor: timeseries + mip: Omon + grid: gn + additional_datasets: + - {dataset: ESACCI-SEA-SURFACE-SALINITY, project: OBS6, tier: 2, + type: reanaly, version: fv1.8, start_year: 2010, end_year: 2018, + alias: ESACCI-SEA-SURFACE-SALINITY_V1} + - {dataset: ESACCI-SEA-SURFACE-SALINITY, project: OBS6, tier: 2, + type: reanaly, version: fv2.31, start_year: 2010, end_year: 2019, + alias: ESACCI-SEA-SURFACE-SALINITY_V2} + scripts: + compare_salinity: + script: sea_surface_salinity/compare_salinity.py diff --git a/esmvaltool/recipes/recipe_seaborn.yml b/esmvaltool/recipes/recipe_seaborn.yml new file mode 100644 index 0000000000..983efae0be --- /dev/null +++ b/esmvaltool/recipes/recipe_seaborn.yml @@ -0,0 +1,145 @@ +# ESMValTool +# recipe_seaborn.yml +--- +documentation: + title: Example recipe for the Seaborn diagnostic. + + description: > + This recipe showcases the use of the Seaborn diagnostic that provides a + high-level interface to Seaborn for ESMValTool recipes. For this, the input + data is arranged into a single `pandas.DataFrame`, which is then used as + input for the Seaborn function defined by the option `seaborn_func`. With + the Seaborn diagnostic, arbitrary Seaborn plots can be created. + + authors: + - schlund_manuel + + maintainer: + - schlund_manuel + + references: + - waskom21joss + + projects: + - 4c + - esm2025 + - isenes3 + - usmile + + +preprocessors: + + zonal_mean: + zonal_statistics: + operator: mean + + extract_ar6_regions: + regrid: + target_grid: 5x5 + scheme: linear + extract_shape: + shapefile: ar6 + crop: true + decomposed: true + ids: + Name: ®ions_to_extract + - N.Europe + - West&Central-Europe + - Mediterranean + - Equatorial.Pacific-Ocean + - Equatorial.Atlantic-Ocean + - Equatorial.Indic-Ocean + convert_units: + units: mm day-1 + + +diagnostics: + + plot_temperature_vs_lat: + description: Plot air temperature vs. latitude (pressure levels = colors). + variables: + zonal_mean_ta: + short_name: ta + mip: Amon + preprocessor: zonal_mean + project: CMIP6 + exp: historical + timerange: '1991/2014' + additional_datasets: + - {dataset: CESM2-WACCM, grid: gn, ensemble: r1i1p1f1} + - {dataset: GFDL-ESM4, grid: gr1, ensemble: r1i1p1f1} + scripts: + plot: + script: seaborn_diag.py + seaborn_func: relplot + seaborn_kwargs: + x: latitude + y: zonal_mean_ta + col: alias + col_wrap: 2 + hue: air_pressure + hue_norm: log + palette: plasma + linewidth: 0.0 + marker: o + s: 1 + add_aux_coords: true + data_frame_ops: + eval: air_pressure = air_pressure / 100.0 + dropna_kwargs: + axis: 0 + how: any + legend_title: Pressure [hPa] + plot_object_methods: + set: + xlabel: 'Latitude [°]' + ylabel: 'Temperatute [K]' + set_titles: '{col_name}' + seaborn_settings: + style: ticks + rc: + axes.titlepad: 15.0 + suptitle: Simulated Temperature (1991-2014) + + plot_precipitation_histograms_region: + description: Plot precipitation histograms for different regions. + variables: + pr: + mip: day + preprocessor: extract_ar6_regions + project: CMIP6 + exp: historical + timerange: '2005/2014' + additional_datasets: + - {dataset: CESM2-WACCM, grid: gn, ensemble: r1i1p1f1} + - {dataset: GFDL-ESM4, grid: gr1, ensemble: r1i1p1f1} + scripts: + plot: + script: seaborn_diag.py + seaborn_func: displot + seaborn_kwargs: + kind: hist + stat: density + bins: 300 + x: pr + col: shape_id + col_order: *regions_to_extract + col_wrap: 3 + hue: alias + facet_kws: + sharey: false + add_aux_coords: true + dropna_kwargs: + axis: 0 + how: any + legend_title: Model + plot_object_methods: + set: + xlabel: 'Precipitation [mm/day]' + xlim: [0, 30] + set_titles: '{col_name}' + seaborn_settings: + style: ticks + rc: + axes.titlepad: 15.0 + suptitle: Simulated Precipitation (2005-2014) diff --git a/esmvaltool/recipes/recipe_seaice.yml b/esmvaltool/recipes/recipe_seaice.yml new file mode 100644 index 0000000000..6f9b2e54e6 --- /dev/null +++ b/esmvaltool/recipes/recipe_seaice.yml @@ -0,0 +1,247 @@ +# ESMValTool +# recipe_seaice.yml +--- +documentation: + title: Sea ice diagnostics + + description: | + Recipe for Arctic and Antarctic sea ice diagnostics. + + authors: + - senftleben_daniel + + maintainer: + - lauer_axel + + references: + - massonnet12tc + - stroeve07grl + + projects: + - crescendo + - esmval + +# only datasets for which historical and rcp85 data are available +# (and which are working with the ESMValTool) +datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + #- {dataset: bcc-csm1-1-m} # areacello data missing on ESGF + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-CAM5} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + #- {dataset: inmcm4} # areacello data missing on ESGF + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + + +diagnostics: + seaice_tsline: + title: Sea ice area and extent + description: Timeseries of September/February Arctic/Antartic sea ice area and extent + themes: + - seaIce + realms: + - seaIce + variables: + sic: + mip: OImon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1960 + end_year: 2005 + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + areacello: + mip: fx + project: CMIP5 + exp: historical + ensemble: r0i0p0 + start_year: 1960 + end_year: 2005 + scripts: + sie_nh: + script: seaice/seaice_tsline.ncl + # "Arctic" or "Antarctic"; entire hemisphere will be evaluated + region: "Arctic" + # A = annual mean, 3 = March, 9 = September + month: "9" + # "CMIP5", "DEFAULT" + styleset: "CMIP5" + # Plot multi-model mean & std dev + multi_model_mean: true + # Create legend label for each individual ensemble member + EMs_in_lg: false + # Fill polar hole in data with sic = 1. + fill_pole_hole: true + sie_sh: + script: seaice/seaice_tsline.ncl + # "Arctic" or "Antarctic"; entire hemisphere will be evaluated + region: "Antarctic" + # A = annual mean, 3 = March, 9 = September + month: "3" + # "CMIP5", "DEFAULT" + styleset: "CMIP5" + # Plot multi-model mean & std dev + multi_model_mean: true + # Create legend label for each individual ensemble member + EMs_in_lg: false + # Fill polar hole in data with sic = 1. + fill_pole_hole: false + + seaice_trends: + title: Trend sea ice extent + description: Trend of September/February Arctic/Antarctic sea ice extent + themes: + - seaIce + realms: + - seaIce + variables: + sic: + mip: OImon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1960 + end_year: 2005 + reference_dataset: HadISST + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + areacello: + mip: fx + project: CMIP5 + exp: historical + ensemble: r0i0p0 + start_year: 1960 + end_year: 2005 + scripts: + trends_nh: + script: seaice/seaice_trends.ncl + # "Arctic" or "Antarctic"; entire hemisphere will be evaluated + region: "Arctic" + # A = annual mean, 3 = March, 9 = September + month: "9" + # Fill polar hole in data with sic = 1. + fill_pole_hole: true + trends_sh: + script: seaice/seaice_trends.ncl + # "Arctic" or "Antarctic"; entire hemisphere will be evaluated + region: "Antarctic" + # A = annual mean, 3 = March, 9 = September + month: "2" + + seaice_ecs: + title: Emergent constraint sea ice extent + description: Emergent constraints for September Arctic sea ice extent + themes: + - seaIce + realms: + - seaIce + variables: + sic: + mip: OImon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1960 + end_year: 2005 + reference_dataset: HadISST + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2, start_year: 1960, end_year: 2005} + areacello: + mip: fx + project: CMIP5 + exp: historical + ensemble: r0i0p0 + start_year: 1960 + end_year: 2005 + additional_datasets: + # must be exactly the same models as the ones specified in section datasets + - {dataset: ACCESS1-0, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: ACCESS1-3, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: bcc-csm1-1, exp: rcp85, start_year: 2006, end_year: 2100} + #- {dataset: bcc-csm1-1-m, exp: rcp85, start_year: 2006, end_year: 2100} # areacello data missing on ESGF + - {dataset: CanESM2, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CCSM4, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CESM1-CAM5, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CNRM-CM5, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: CSIRO-Mk3-6-0, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: FGOALS-g2, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: GFDL-CM3, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: GFDL-ESM2G, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: GFDL-ESM2M, exp: rcp85, start_year: 2006, end_year: 2100} + #- {dataset: inmcm4, exp: rcp85, start_year: 2006, end_year: 2100} # areacello data missing on ESGF + - {dataset: IPSL-CM5A-LR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5A-MR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: IPSL-CM5B-LR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MIROC5, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MIROC-ESM, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MIROC-ESM-CHEM, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-LR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MPI-ESM-MR, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MRI-CGCM3, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: MRI-ESM1, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: NorESM1-M, exp: rcp85, start_year: 2006, end_year: 2100} + - {dataset: NorESM1-ME, exp: rcp85, start_year: 2006, end_year: 2100} + scripts: + ecs_nh: + script: seaice/seaice_ecs.ncl + # "Arctic" or "Antarctic"; entire hemisphere will be evaluated + region: "Arctic" + # A = annual mean, 3 = March, 9 = September + month: "9" + # Fill polar hole in data with sic = 1. + fill_pole_hole: true + hist_exp: "historical" + rcp_exp: "rcp85" + styleset: "CMIP5" + + seaice_yod: + title: Sea ice year of near dissapearance + description: year of near disappearance of September Arctic sea ice + themes: + - seaIce + realms: + - seaIce + variables: + sic: + mip: OImon + project: CMIP5 + exp: rcp85 + ensemble: r1i1p1 + start_year: 2006 + end_year: 2100 + areacello: + mip: fx + project: CMIP5 + exp: historical + ensemble: r0i0p0 + start_year: 2006 + end_year: 2100 + scripts: + yod_nh: + script: seaice/seaice_yod.ncl + # "Arctic" or "Antarctic"; entire hemisphere will be evaluated + region: "Arctic" + # A = annual mean, 3 = March, 9 = September + month: "9" + # Fill polar hole in data with sic = 1. + fill_pole_hole: true diff --git a/esmvaltool/recipes/recipe_seaice_drift.yml b/esmvaltool/recipes/recipe_seaice_drift.yml new file mode 100644 index 0000000000..7cbe44ebe1 --- /dev/null +++ b/esmvaltool/recipes/recipe_seaice_drift.yml @@ -0,0 +1,111 @@ +# ESMValTool +# recipe_seaice_drift.yml +--- +documentation: + title: | + Sea-ice drift. + + description: | + 'Sea ice drift evaluation' + + authors: + - 'docquier_david' + + projects: + - primavera + + references: + - 'docquier2017cryo' + + maintainer: + - vegas-regidor_javier + +datasets: + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1979, end_year: 2005} + +preprocessors: + extract: + extract_region: + start_longitude: 0 + end_longitude: 360 + start_latitude: 40 + end_latitude: 90 + + extract_and_clim: + extract_region: + start_longitude: 0 + end_longitude: 360 + start_latitude: 40 + end_latitude: 90 + climate_statistics: + period: mon + operator: mean + +diagnostics: + seaice_drift: + description: Sea-ice drift + variables: + sic: + mip: day + preprocessor: extract_and_clim + reference_dataset: OSI-450-nh + additional_datasets: + - {dataset: OSI-450-nh, project: OBS, type: reanaly, version: v2, + mip: OImon, tier: 2, start_year: 1979, end_year: 2005} + + sithick: + mip: day + preprocessor: extract_and_clim + reference_dataset: PIOMAS + derive: true + force_derivation: false + additional_datasets: + - {dataset: PIOMAS, project: OBS, type: reanaly, version: 2.1, + mip: day, tier: 2, start_year: 1979, end_year: 2005} + + sispeed: + mip: day + preprocessor: extract_and_clim + reference_dataset: NSIDC-0116-nh + derive: true + force_derivation: false + additional_datasets: + - {dataset: NSIDC-0116-nh, project: OBS, type: reanaly, version: 4.1, + mip: day, tier: 3, start_year: 1979, end_year: 2005} + + areacello: + mip: fx + preprocessor: extract + additional_datasets: + - {dataset: NSIDC-0116-nh, project: OBS, type: reanaly, version: 4.1, + tier: 3, start_year: 1979, end_year: 2005} + - {dataset: OSI-450-nh, project: OBS, type: reanaly, version: v2, + tier: 2, start_year: 1979, end_year: 2005} + - {dataset: PIOMAS, project: OBS, type: reanaly, version: 2.1, + tier: 2, start_year: 1979, end_year: 2005} + + scripts: + sea_ice_drift: + script: seaice_drift/seaice_drift.py + latitude_treshold: 50 + sea_ice_drift_SCICEX: + script: seaice_drift/seaice_drift.py + # lon, lat tuples + polygon: + - [-15., 87.] + - [-60., 86.58] + - [-130., 80] + - [-141., 80] + - [-141., 70] + - [-155., 72] + - [175., 75.5] + - [172., 78.5] + - [163, 80.5] + - [126, 78.5] + - [110, 84.33] + - [80, 84.42] + - [57, 85.17] + - [33, 83.8] + - [8, 84.08] + polygon_name: SCICEX diff --git a/esmvaltool/recipes/recipe_seaice_feedback.yml b/esmvaltool/recipes/recipe_seaice_feedback.yml new file mode 100644 index 0000000000..950aa49c73 --- /dev/null +++ b/esmvaltool/recipes/recipe_seaice_feedback.yml @@ -0,0 +1,101 @@ +# ESMValTool +# recipe_seaice_feedback.yml +--- +documentation: + title: | + Ice Formation Efficiency (IFE). + + description: | + This recipe is related to the negative sea-ice growth–thickness feedback + In this recipe, one process-based diagnostic named the + Ice Formation Efficiency (IFE) is computed based on monthly mean + sea-ice volume estimated north of 80°N. The choice of this domain + is motivated by the desire to minimize the influence of dynamic + processes but also by the availability of sea-ice thickness measurements. + The diagnostic intends to evaluate the strength of the negative sea-ice + thickness/growth feedback, which causes late-summer negative anomalies + in sea-ice area and volume to be partially recovered during the next + growing season. A chief cause behind the existence of this feedback is + the non-linear inverse dependence between heat conduction fluxes and + sea-ice thickness, which implies that thin sea ice grows faster than thick + sea ice. To estimate the strength of that feedback, anomalies of the annual + minimum of sea-ice volume north of 80°N are first estimated. Then, + the increase in sea-ice volume until the next annual maximum is computed + for each year. The IFE is defined as the regression of this ice volume + production onto the baseline summer volume anomaly. + + authors: + - massonnet_francois + + maintainer: + - vegas-regidor_javier + + references: + - massonet18natcc + + projects: + - applicate + - primavera + +datasets: + - &cmip5 {project: CMIP5, exp: historical, dataset: ACCESS1-0, ensemble: r1i1p1, start_year: 1979, end_year: 2004} + - {<<: *cmip5, dataset: ACCESS1-3} + - {<<: *cmip5, dataset: bcc-csm1-1} + - {<<: *cmip5, dataset: BNU-ESM} + - {<<: *cmip5, dataset: CCSM4} + - {<<: *cmip5, dataset: CESM1-BGC} + - {<<: *cmip5, dataset: CESM1-CAM5} + - {<<: *cmip5, dataset: CESM1-FASTCHEM} + - {<<: *cmip5, dataset: CESM1-WACCM} + - {<<: *cmip5, dataset: CMCC-CESM} + - {<<: *cmip5, dataset: CMCC-CM} + - {<<: *cmip5, dataset: CMCC-CMS} + - {<<: *cmip5, dataset: CNRM-CM5} + - {<<: *cmip5, dataset: CNRM-CM5-2} + - {<<: *cmip5, dataset: FGOALS-g2} + - {<<: *cmip5, dataset: GFDL-CM2p1} + - {<<: *cmip5, dataset: GFDL-CM3} + - {<<: *cmip5, dataset: GFDL-ESM2G} + - {<<: *cmip5, dataset: GFDL-ESM2M} + - {<<: *cmip5, dataset: HadCM3} + - {<<: *cmip5, dataset: HadGEM2-CC} + - {<<: *cmip5, dataset: HadGEM2-ES} + - {<<: *cmip5, dataset: IPSL-CM5A-LR} + - {<<: *cmip5, dataset: IPSL-CM5A-MR} + - {<<: *cmip5, dataset: IPSL-CM5B-LR} + - {<<: *cmip5, dataset: MPI-ESM-LR} + - {<<: *cmip5, dataset: MPI-ESM-MR} + - {<<: *cmip5, dataset: MPI-ESM-P} + - {<<: *cmip5, dataset: NorESM1-M} + - {<<: *cmip5, dataset: NorESM1-ME} + - {<<: *cmip5, dataset: CanCM4} + - {<<: *cmip5, dataset: CanESM2} + - {<<: *cmip5, dataset: CSIRO-Mk3-6-0} + - {<<: *cmip5, dataset: EC-EARTH} + - {<<: *cmip5, dataset: GISS-E2-H, ensemble: r1i1p2} + - {<<: *cmip5, dataset: GISS-E2-R, ensemble: r1i1p2} + + # Missing areacello + # - {<<: *cmip5, dataset: bcc-csm1-1-m} + # - {<<: *cmip5, dataset: CESM1-CAM5-1-FV2} + # - {<<: *cmip5, dataset: FIO-ESM} + # - {<<: *cmip5, dataset: GISS-E2-H-CC} + # - {<<: *cmip5, dataset: GISS-E2-R-CC} + # - {<<: *cmip5, dataset: HadGEM2-AO} + # - {<<: *cmip5, dataset: inmcm4} + +diagnostics: + NegativeSeaIceFeedback: + description: Computes the negative seaice feedback + variables: + sit: + mip: OImon + areacello: + mip: fx + scripts: + negative_seaice_feedback: + script: seaice_feedback/negative_seaice_feedback.py + plot: + point_color: black + point_size: 10 + show_values: false diff --git a/esmvaltool/recipes/recipe_shapeselect.yml b/esmvaltool/recipes/recipe_shapeselect.yml index 58db4da176..b463f09df8 100644 --- a/esmvaltool/recipes/recipe_shapeselect.yml +++ b/esmvaltool/recipes/recipe_shapeselect.yml @@ -1,9 +1,23 @@ +# ESMValTool +# recipe_shapeselect.yml --- documentation: - description: 'Selects grid points belonging to a provided shapefile.' - authors: ['berg_pe'] - projects: ['c3s-magic'] - references: ['acknow_project'] + title: Shape selection + + description: | + This recipe selects grid points belonging to a provided shapefile + + authors: + - berg_peter + + maintainer: + - lindenlaub_lukas + + projects: + - c3s-magic + + references: + - acknow_project datasets: - {dataset: EC-EARTH, project: CMIP5, mip: Amon, exp: historical, ensemble: r12i1p1, start_year: 1990, end_year: 1999} @@ -22,8 +36,7 @@ diagnostics: script: shapeselect/diag_shapeselect.py # Example shapefiles can be found in: # esmvaltool/diag_scripts/shapeselect/testdata/ - # Relative paths are relative to 'auxiliary_data_dir' as configured in - # the config-user.yml file. + # Relative paths are relative to configuration option 'auxiliary_data_dir'. shapefile: 'Thames.shp' weighting_method: 'mean_inside' write_xlsx: true diff --git a/esmvaltool/recipes/recipe_smpi.yml b/esmvaltool/recipes/recipe_smpi.yml new file mode 100644 index 0000000000..feef65318e --- /dev/null +++ b/esmvaltool/recipes/recipe_smpi.yml @@ -0,0 +1,373 @@ +# ESMValTool +# recipe_smpi.yml +--- +documentation: + title: Single Model Performance Index + + description: | + Recipe for computing Single Model Performance Index. Follows Reichler + and Kim 2008. Considers the following variables: + Sea level pressure, Air Temperature, Zonal Wind Stress, Meridional Wind + Stress, 2m air temperature, Zonal Wind, Meridional Wind, Net surface heat + flux, Precipitation, Specific Humidity, Snow fraction, Sea Surface + Temperature, Sea Ice Fraction and sea surface salinity. + + authors: + - hassler_birgit + - gier_bettina + - righi_mattia + - eyring_veronika + + maintainer: + - hassler_birgit + + references: + - rk2008bams + + projects: + - crescendo + - c3s-magic + +datasets: + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1} + +preprocessors: + + ppALL: + extract_levels: + levels: reference_dataset + scheme: linear + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + ppNOLEV: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + ppNOLEVirreg: + regrid: + target_grid: 1x1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + +diagnostics: + + ta: + description: Air temperature zonal mean + themes: + - phys + realms: + - atmos + variables: + ta: &variable_settings + preprocessor: ppALL + reference_dataset: ERA-Interim + mip: Amon + start_year: 1980 + end_year: 2005 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: &grading_settings + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + va: + description: Meridional Wind + themes: + - phys + realms: + - atmos + variables: + va: + <<: *variable_settings + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + ua: + description: Zonal Wind + themes: + - phys + realms: + - atmos + variables: + ua: + <<: *variable_settings + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + hus: + description: Near-surface temperature + themes: + - phys + realms: + - atmos + variables: + hus: + <<: *variable_settings + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + tas: + description: Near-surface temperature + themes: + - phys + realms: + - atmos + variables: + tas: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + psl: + description: Sea-level pressure + themes: + - phys + realms: + - atmos + variables: + psl: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + pr: + description: Precipitation + themes: + - phys + realms: + - atmos + variables: + pr: + <<: *variable_settings + preprocessor: ppNOLEV + reference_dataset: GPCP-V2.2 + additional_datasets: + - {dataset: GPCP-V2.2, project: obs4MIPs, tier: 1} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + tos: + description: Sea surface temperature + themes: + - phys + realms: + - ocean + variables: + tos: + <<: *variable_settings + preprocessor: ppNOLEVirreg + reference_dataset: HadISST + mip: Omon + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + sic: + description: Sea ice fraction + themes: + - phys + realms: + - seaIce + variables: + sic: + <<: *variable_settings + preprocessor: ppNOLEVirreg + reference_dataset: HadISST + mip: OImon + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + hfds: + description: Net Surface Heat Flux + themes: + - phys + realms: + - ocean + variables: + hfds: + <<: *variable_settings + preprocessor: ppNOLEVirreg + mip: Omon + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + tauu: + description: Zonal Wind Stress + themes: + - phys + realms: + - atmos + variables: + tauu: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + tauv: + description: Meridional Wind Stress + themes: + - phys + realms: + - atmos + variables: + tauv: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + ### COLLECT METRICS ################### + collect: + description: Wrapper to collect and plot previously calculated metrics + scripts: + SMPI: + script: perfmetrics/collect.ncl + ancestors: ['*/grading'] + metric: SMPI diff --git a/esmvaltool/recipes/recipe_smpi_4cds.yml b/esmvaltool/recipes/recipe_smpi_4cds.yml new file mode 100644 index 0000000000..3a60419d69 --- /dev/null +++ b/esmvaltool/recipes/recipe_smpi_4cds.yml @@ -0,0 +1,380 @@ +# ESMValTool +# recipe_smpi_4cds.yml +--- +documentation: + title: Single Model Performance Index + + description: | + Recipe for computing Single Model Performance Index. Follows Reichler + and Kim 2008. Considers the following variables: + Sea level pressure, Air Temperature, Zonal Wind Stress, Meridional Wind + Stress, 2m air temperature, Zonal Wind, Meridional Wind, Net surface heat + flux, Precipitation, Specific Humidity, Snow fraction, Sea Surface + Temperature, Sea Ice Fraction and sea surface salinity. + + authors: + - hassler_birgit + - gier_bettina + - righi_mattia + - eyring_veronika + + maintainer: + - hassler_birgit + + references: + - rk2008bams + + projects: + - crescendo + - c3s-magic + +datasets: + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1} + + +preprocessors: + + ppALL: + extract_levels: + levels: reference_dataset + scheme: linear + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + ppNOLEV: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + ppNOLEVirreg: + regrid: + target_grid: 1x1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + +diagnostics: + + ta: + description: Air temperature zonal mean + themes: + - phys + realms: + - atmos + variables: + ta: &variable_settings + preprocessor: ppALL + reference_dataset: ERA-Interim + mip: Amon + start_year: 1980 + end_year: 2005 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: &grading_settings + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + va: + description: Meridional Wind + themes: + - phys + realms: + - atmos + variables: + va: + <<: *variable_settings + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + ua: + description: Zonal Wind + themes: + - phys + realms: + - atmos + variables: + ua: + <<: *variable_settings + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + hus: + description: Near-surface temperature + themes: + - phys + realms: + - atmos + variables: + hus: + <<: *variable_settings + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + tas: + description: Near-surface temperature + themes: + - phys + realms: + - atmos + variables: + tas: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + psl: + description: Sea-level pressure + themes: + - phys + realms: + - atmos + variables: + psl: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + pr: + description: Precipitation + themes: + - phys + realms: + - atmos + variables: + pr: + <<: *variable_settings + preprocessor: ppNOLEV + reference_dataset: GPCP-V2.2 + additional_datasets: + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + tos: + description: Sea surface temperature + themes: + - phys + realms: + - ocean + variables: + tos: + <<: *variable_settings + preprocessor: ppNOLEVirreg + reference_dataset: HadISST + mip: Omon + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + sic: + description: Sea ice fraction + themes: + - phys + realms: + - seaIce + variables: + sic: + <<: *variable_settings + preprocessor: ppNOLEVirreg + reference_dataset: HadISST + mip: OImon + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + +# hfds: +# description: Net Surface Heat Flux +# themes: +# - phys +# realms: +# - ocean +# variables: +# hfds: +# <<: *variable_settings +# preprocessor: ppNOLEVirreg +# mip: Omon +# additional_datasets: +# - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} +# scripts: +# grading: +# script: perfmetrics/main.ncl +# plot_type: cycle_latlon +# time_avg: yearly +# region: global +# calc_grading: true +# metric: [SMPI] +# normalization: CMIP5 +# smpi_n_bootstrap: 100 + + tauu: + description: Zonal Wind Stress + themes: + - phys + realms: + - atmos + variables: + tauu: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + tauv: + description: Meridional Wind Stress + themes: + - phys + realms: + - atmos + variables: + tauv: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + ### COLLECT METRICS ################### + collect: + description: Wrapper to collect and plot previously calculated metrics + scripts: + SMPI: + script: perfmetrics/collect.ncl + ancestors: ['*/grading'] + metric: SMPI diff --git a/esmvaltool/recipes/recipe_snowalbedo.yml b/esmvaltool/recipes/recipe_snowalbedo.yml new file mode 100644 index 0000000000..fb053cc7f2 --- /dev/null +++ b/esmvaltool/recipes/recipe_snowalbedo.yml @@ -0,0 +1,88 @@ +# ESMValTool +# recipe_snowalbedo.yml +--- +documentation: + title: Snow-albedo feedback + + description: | + Recipe snow-albedo feedback + + authors: + - lauer_axel + + maintainer: + - lauer_axel + + references: + - flato13ipcc + + projects: + - crescendo + +diagnostics: + snowalbedo: + title: Springtime snow-albedo effect (IPCC AR5 Fig. 9.45a) + description: springtime snow-albedo feedback values vs. seasonal cycle + themes: + - EC + realms: + - atmos + variables: + alb: + mip: Amon + project: CMIP5 + ensemble: r1i1p1 + reference_dataset: ISCCP-FH + derive: true + additional_datasets: + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, start_year: 1984, end_year: 2000, tier: 2} + tas: + mip: Amon + project: CMIP5 + ensemble: r1i1p1 + reference_dataset: ERA-Interim + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, start_year: 1984, end_year: 2000, tier: 3} + rsdt: + mip: Amon + project: CMIP5 + ensemble: r1i1p1 + reference_dataset: ISCCP-FH + additional_datasets: + - {dataset: ISCCP-FH, project: OBS, type: sat, version: v0, start_year: 1984, end_year: 2000, tier: 2} + additional_datasets: + - {dataset: bcc-csm1-1, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: bcc-csm1-1, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: CanESM2, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: CanESM2, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: CCSM4, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: CCSM4, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: CNRM-CM5, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: CNRM-CM5, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: CSIRO-Mk3-6-0, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: GFDL-CM3, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: GFDL-CM3, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: GISS-E2-H, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: GISS-E2-H, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: GISS-E2-R, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: GISS-E2-R, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: IPSL-CM5A-LR, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: IPSL-CM5A-LR, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: MIROC-ESM, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: MIROC-ESM, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: MPI-ESM-LR, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2101, end_year: 2200} + - {dataset: NorESM1-M, exp: historical, start_year: 1901, end_year: 2000} + - {dataset: NorESM1-M, exp: rcp45, start_year: 2101, end_year: 2200} + scripts: + fig09_45a: + script: emergent_constraints/snowalbedo.ncl + exp_presentday: historical + exp_future: rcp45 + legend_outside: false + xmin: -1.7 + xmax: -0.3 + ymin: -1.7 + ymax: -0.3 +# styleset: "CMIP5" diff --git a/esmvaltool/recipes/recipe_spei.yml b/esmvaltool/recipes/recipe_spei.yml index e3da38cf90..b6ecae3681 100644 --- a/esmvaltool/recipes/recipe_spei.yml +++ b/esmvaltool/recipes/recipe_spei.yml @@ -1,13 +1,27 @@ +# ESMValTool +# recipe_spei.yml --- documentation: - description: 'Calculates the SPI and SPEI drought indices.' - authors: ['berg_pe'] - projects: ['c3s-magic'] - references: ['acknow_project'] + title: Drought indices SPI and SPEI + + description: | + Calculates the SPI and SPEI drought indices + + authors: + - berg_peter + + maintainer: + - weigel_katja + + projects: + - c3s-magic + + references: + - acknow_project datasets: # - {dataset: CRU, project: OBS, type: reanaly, version: 1, tier: 3} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1} # - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1} # - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1} @@ -44,7 +58,7 @@ diagnostics: tas: *var scripts: spi: - script: droughtindex/diag_spi.r + script: droughtindex/diag_spi.R ancestors: [pr] spei: - script: droughtindex/diag_spei.r + script: droughtindex/diag_spei.R diff --git a/esmvaltool/recipes/recipe_tcr.yml b/esmvaltool/recipes/recipe_tcr.yml new file mode 100644 index 0000000000..c93fa08112 --- /dev/null +++ b/esmvaltool/recipes/recipe_tcr.yml @@ -0,0 +1,256 @@ +# ESMValTool +# recipe_tcr.yml +--- +documentation: + title: > + Transient Climate Response (TCR) + + description: > + Calculate Transient Climate Response (TCR). + + authors: + - schlund_manuel + + maintainer: + - schlund_manuel + + references: + - gregory08jgr + + projects: + - crescendo + + +preprocessors: + + spatial_mean: + area_statistics: + operator: mean + + +TCR: &tcr_script + script: climate_metrics/tcr.py + calculate_mmm: true +SCATTERPLOT: &scatterplot_script + script: climate_metrics/create_scatterplot.py + pattern: 'tcr.nc' + seaborn_settings: + style: ticks + y_range: [0.0, 3.0] +VAR_SETTING: &variable_settings + short_name: tas + preprocessor: spatial_mean + exp: 1pctCO2 + mip: Amon + + +diagnostics: + + cmip5: + description: Calculate TCR for all available CMIP5 models. + variables: + 1pctCO2: + <<: *variable_settings + project: CMIP5 + ensemble: r1i1p1 + additional_datasets: + - {dataset: ACCESS1-0, start_year: 300, end_year: 439} + - {dataset: ACCESS1-3, start_year: 250, end_year: 389} + - {dataset: bcc-csm1-1, start_year: 160, end_year: 299} + - {dataset: bcc-csm1-1-m, start_year: 240, end_year: 379} + - {dataset: BNU-ESM, start_year: 1850, end_year: 1989} + - {dataset: CanESM2, start_year: 1850, end_year: 1989} + - {dataset: CCSM4, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM5, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM5-2, start_year: 1850, end_year: 1989} + - {dataset: CSIRO-Mk3-6-0, start_year: 1, end_year: 140} + - {dataset: FGOALS-g2, start_year: 440, end_year: 579} + # branch_time_in_child weird + - {dataset: FGOALS-s2, start_year: 1850, end_year: 1989} + - {dataset: GFDL-CM3, start_year: 1, end_year: 140} + - {dataset: GFDL-ESM2G, start_year: 1, end_year: 140} + - {dataset: GFDL-ESM2M, start_year: 1, end_year: 140} + - {dataset: GISS-E2-H, start_year: 1850, end_year: 1989} + - {dataset: GISS-E2-R, start_year: 1850, end_year: 1989} + # Experiments start at 1859-12-01 + - {dataset: HadGEM2-ES, start_year: 1860, end_year: 1999} + - {dataset: inmcm4, start_year: 2090, end_year: 2229} + - {dataset: IPSL-CM5A-LR, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5A-MR, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5B-LR, start_year: 1850, end_year: 1989} + - {dataset: MIROC5, start_year: 2200, end_year: 2339} + - {dataset: MIROC-ESM, start_year: 1, end_year: 140} + - {dataset: MPI-ESM-LR, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM-MR, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM-P, start_year: 1850, end_year: 1989} + - {dataset: MRI-CGCM3, start_year: 1851, end_year: 1990} + - {dataset: NorESM1-M, start_year: 1, end_year: 140} + piControl: + <<: *variable_settings + project: CMIP5 + ensemble: r1i1p1 + exp: piControl + additional_datasets: + - {dataset: ACCESS1-0, start_year: 300, end_year: 439} + - {dataset: ACCESS1-3, start_year: 250, end_year: 389} + - {dataset: bcc-csm1-1, start_year: 160, end_year: 299} + - {dataset: bcc-csm1-1-m, start_year: 240, end_year: 379} + - {dataset: BNU-ESM, start_year: 1850, end_year: 1989} + - {dataset: CanESM2, start_year: 2321, end_year: 2460} + - {dataset: CCSM4, start_year: 251, end_year: 390} + - {dataset: CNRM-CM5, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM5-2, start_year: 1850, end_year: 1989} + - {dataset: CSIRO-Mk3-6-0, start_year: 104, end_year: 243} + - {dataset: FGOALS-g2, start_year: 440, end_year: 579} + # branch_time_in_child weird + - {dataset: FGOALS-s2, start_year: 1850, end_year: 1989} + - {dataset: GFDL-CM3, start_year: 1, end_year: 140} + - {dataset: GFDL-ESM2G, start_year: 1, end_year: 140} + - {dataset: GFDL-ESM2M, start_year: 1, end_year: 140} + - {dataset: GISS-E2-H, start_year: 2410, end_year: 2549} + - {dataset: GISS-E2-R, start_year: 3981, end_year: 4120} + # Experiments start at 1859-12-01 + - {dataset: HadGEM2-ES, start_year: 1860, end_year: 1999} + - {dataset: inmcm4, start_year: 2090, end_year: 2229} + - {dataset: IPSL-CM5A-LR, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5A-MR, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM5B-LR, start_year: 1850, end_year: 1989} + - {dataset: MIROC5, start_year: 2400, end_year: 2539} + - {dataset: MIROC-ESM, start_year: 1880, end_year: 2019} + - {dataset: MPI-ESM-LR, start_year: 1880, end_year: 2019} + - {dataset: MPI-ESM-MR, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM-P, start_year: 1866, end_year: 2005} + - {dataset: MRI-CGCM3, start_year: 1891, end_year: 2030} + - {dataset: NorESM1-M, start_year: 700, end_year: 839} + scripts: + tcr: + <<: *tcr_script + scatterplot: + <<: *scatterplot_script + ancestors: ['cmip5/tcr'] + dataset_style: cmip5 + + cmip6: + description: Calculate TCR for all available CMIP6 models. + variables: + 1pctCO2: + <<: *variable_settings + project: CMIP6 + additional_datasets: + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, start_year: 950, end_year: 1089, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn, start_year: 101, end_year: 240} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3169} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140, institute: NCAR} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr, start_year: 1, end_year: 140} + # No 1pctCO2 data + # - {dataset: EC-Earth3, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: EC-Earth3-Veg, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1989} + # parent_time_units messed up + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1, start_year: 1, end_year: 140} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1, start_year: 1, end_year: 140} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: GISS-E2-2-G, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989, institute: NASA-GISS} + # Mixed ensemble members! + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 1989} + # Mixed ensemble members! + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 1989} + # Wrong start year for piControl (must be 1850) + - {dataset: IITM-ESM, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + # Not enough years for 1pctCO2 + # - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 1989} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 1989} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1989} + # branch_time_in_child weird + - {dataset: MCM-UA-1-0, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3339} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + # parent_time_units not correct, incorrect start year for piControl + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140} + # weird branch_time_in_child/branch_time_in_parent + - {dataset: NorCPM1, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140, institute: NCC} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1989} + piControl: + <<: *variable_settings + project: CMIP6 + exp: piControl + additional_datasets: + - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, start_year: 950, end_year: 1089, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn, start_year: 101, end_year: 240} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn, start_year: 2650, end_year: 2789} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: BCC-ESM1, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: CAMS-CSM1-0, ensemble: r1i1p1f1, grid: gn, start_year: 3030, end_year: 3169} + - {dataset: CanESM5, ensemble: r1i1p1f1, grid: gn, start_year: 5201, end_year: 5340} + - {dataset: CESM2, ensemble: r1i1p1f1, grid: gn, start_year: 501, end_year: 640} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn, start_year: 70, end_year: 209, institute: NCAR} + - {dataset: CNRM-CM6-1, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: CNRM-ESM2-1, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: E3SM-1-0, ensemble: r1i1p1f1, grid: gr, start_year: 101, end_year: 240} + # No 1pctCO2 data + # - {dataset: EC-Earth3, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1989} + - {dataset: EC-Earth3-Veg, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1989} + # parent_time_units messed up + - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, start_year: 600, end_year: 739} + # Wrong start year (must be 101) + - {dataset: GFDL-CM4, ensemble: r1i1p1f1, grid: gr1, start_year: 151, end_year: 290} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gr1, start_year: 101, end_year: 240} + - {dataset: GISS-E2-1-H, ensemble: r1i1p1f1, grid: gn, start_year: 3180, end_year: 3319} + - {dataset: GISS-E2-2-G, ensemble: r1i1p1f1, grid: gn, start_year: 2000, end_year: 2139, institute: NASA-GISS} + # Mixed ensemble members! + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + # Mixed ensemble members! + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + # Wrong start year for piControl (must be 1850) + - {dataset: IITM-ESM, ensemble: r1i1p1f1, grid: gn, start_year: 1950, end_year: 2089} + # Not enough years for 1pctCO2 + # - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1, start_year: 2099, end_year: 2238} + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1, start_year: 1947, end_year: 2086} + - {dataset: IPSL-CM6A-LR, ensemble: r1i1p1f1, grid: gr, start_year: 1870, end_year: 2009} + - {dataset: KACE-1-0-G, ensemble: r1i1p1f1, grid: gr, start_year: 2150, end_year: 2289} + # branch_time_in_child weird + - {dataset: MCM-UA-1-0, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140} + - {dataset: MIROC6, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3339} + - {dataset: MIROC-ES2L, ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM1-2-HR, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MPI-ESM1-2-LR, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + - {dataset: MRI-ESM2-0, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1989} + # parent_time_units not correct, incorrect start year for piControl + - {dataset: NESM3, ensemble: r1i1p1f1, grid: gn, start_year: 550, end_year: 689} + - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn, start_year: 1600, end_year: 1739} + # weird branch_time_in_child/branch_time_in_parent + - {dataset: NorCPM1, ensemble: r1i1p1f1, grid: gn, start_year: 1, end_year: 140, institute: NCC} + - {dataset: SAM0-UNICON, ensemble: r1i1p1f1, grid: gn, start_year: 274, end_year: 413} + - {dataset: UKESM1-0-LL, ensemble: r1i1p1f2, grid: gn, start_year: 1960, end_year: 2099} + scripts: + tcr: + <<: *tcr_script + scatterplot: + <<: *scatterplot_script + ancestors: ['cmip6/tcr'] + dataset_style: cmip6 + + barplot: + description: Create barplot of TCR. + scripts: + barplot: + script: climate_metrics/create_barplot.py + ancestors: ['*/tcr'] + label_attribute: project + patterns: ['tcr.nc'] + y_range: [0.0, 3.5] diff --git a/esmvaltool/recipes/recipe_tebaldi21esd.yml b/esmvaltool/recipes/recipe_tebaldi21esd.yml new file mode 100644 index 0000000000..e80da7f8e0 --- /dev/null +++ b/esmvaltool/recipes/recipe_tebaldi21esd.yml @@ -0,0 +1,7992 @@ +# ESMValTool +# recipe_tebaldi21esd.yml +--- +documentation: + title: > + Climate model projections from the ScenarioMIP of CMIP6 + + description: > + Reproduces plots and tables of tebaldi21esd : TAS and PR climate model + projections from the Scenario Model Intercomparison Project (ScenarioMIP) + of CMIP6 + + authors: + - debeire_kevin + + maintainer: + - debeire_kevin + + references: + - tebaldi21esd + + projects: + - esmval + +preprocessors: + + preproc_annual_mean: + annual_statistics: + operator: mean + + preproc_decadal_mean: + decadal_statistics: + operator: mean + + preproc_land_only: + mask_landsea: + mask_out: sea + regrid: + scheme: linear + target_grid: 1x1 + + preproc_landonly_decadal_mean: + decadal_statistics: + operator: mean + mask_landsea: + mask_out: sea + + preproc_map: + regrid: + scheme: linear + target_grid: 1x1 + + preproc_ocean_only: + mask_landsea: + mask_out: land + regrid: + scheme: linear + target_grid: 1x1 + + +diagnostics: + + fig1b: + description: Fig1b, Time series of relative change in precipitation in multiple + scenarios incl. spread + realms: [atmos] + themes: [phys] + variables: + pr: {end_year: 2100, mip: Amon, project: CMIP6, short_name: pr, start_year: 2015} + scripts: + plot_ts_line_mean_spread_pr: + ancestors: [pr, ts_line_mean_spread_pr] + begin_ref_year: 1995 + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb + end_ref_year: 2014 + eyears: &id001 [2014, 2100] + label: &id002 [Historical, SSP1-1.9, SSP1-2.6, SSP2-4.5, SSP3-7.0, SSP5-8.5] + model_nr: true + scenarios: &id003 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/plot_timeseries_mean_spread.ncl + spread: 1.64 + styleset: CMIP6 + syears: &id004 [1850, 2015] + title: PR, global + yaxis: Relative to 1995-2014 (%) + ymax: 12 + ymin: -5 + ts_line_mean_spread_pr: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id001 + label: *id002 + model_nr: true + percent: 1 + scenarios: *id003 + script: tebaldi21esd/calc_timeseries_mean_spread_runave.ncl + spread: 1.64 + styleset: CMIP6 + syears: *id004 + additional_datasets: &fig1_pr_datasets + - &id005 {dataset: GISS-E2-1-G, end_year: 2100, ensemble: r1i1p1f2, exp: ssp126, + grid: gn, institute: NASA-GISS, start_year: 2015} + - &id006 {dataset: AWI-CM-1-1-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: AWI, start_year: 2015} + - &id007 {dataset: FGOALS-f3-L, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr, institute: CAS, start_year: 2015} + - &id008 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: CAS, start_year: 2015} + - &id009 {dataset: CMCC-CM2-SR5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: CMCC, start_year: 2015} + - &id010 {dataset: ACCESS-CM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: CSIRO-ARCCSS, start_year: 2015} + - &id011 {dataset: GFDL-ESM4, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr1, institute: NOAA-GFDL, start_year: 2015} + - &id012 {dataset: CAMS-CSM1-0, end_year: 2099, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: CAMS, start_year: 2015} + - &id013 {dataset: CanESM5-CanOE, end_year: 2100, ensemble: r1i1p2f1, exp: ssp126, + grid: gn, institute: CCCma, start_year: 2015} + - &id014 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, grid: gn, + institute: CCCma, start_year: 2015} + - &id015 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r1i1p1f2, exp: ssp126, + grid: gn, institute: MOHC, start_year: 2015} + - &id016 {dataset: HadGEM3-GC31-LL, end_year: 2100, ensemble: r1i1p1f3, exp: ssp126, + grid: gn, institute: MOHC, start_year: 2015} + - &id017 {dataset: HadGEM3-GC31-MM, end_year: 2100, ensemble: r1i1p1f3, exp: ssp126, + grid: gn, institute: MOHC, start_year: 2015} + - &id018 {dataset: EC-Earth3, end_year: 2100, ensemble: r4i1p1f1, exp: ssp126, + grid: gr, institute: EC-Earth-Consortium, start_year: 2015} + - &id019 {dataset: EC-Earth3-Veg, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr, institute: EC-Earth-Consortium, start_year: 2015} + - &id020 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, grid: gn, + institute: MIROC, start_year: 2015} + - &id021 {dataset: MIROC-ES2L, end_year: 2100, ensemble: r1i1p1f2, exp: ssp126, + grid: gn, institute: MIROC, start_year: 2015} + - &id022 {dataset: FIO-ESM-2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: FIO-QLNM, start_year: 2015} + - &id023 {dataset: MCM-UA-1-0, end_year: 2100, ensemble: r1i1p1f2, exp: ssp126, + grid: gn, institute: UA, start_year: 2015} + - &id024 {dataset: BCC-CSM2-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: BCC, start_year: 2015} + - &id025 {dataset: INM-CM5-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr1, institute: INM, start_year: 2015} + - &id026 {dataset: INM-CM4-8, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr1, institute: INM, start_year: 2015} + - &id027 {dataset: KACE-1-0-G, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr, institute: NIMS-KMA, start_year: 2015} + - &id028 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr, institute: IPSL, start_year: 2015} + - &id029 {dataset: CESM2-WACCM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: NCAR, start_year: 2015} +# - &id030 {dataset: CESM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, grid: gn, +# institute: NCAR, start_year: 2015} + - &id031 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: MPI-M, start_year: 2015} + - &id032 {dataset: NESM3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, grid: gn, + institute: NUIST, start_year: 2015} + - &id033 {dataset: ACCESS-ESM1-5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: CSIRO, start_year: 2015} + - &id034 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: MRI, start_year: 2015} + - &id035 {dataset: NorESM2-LM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: NCC, start_year: 2015} + - &id036 {dataset: NorESM2-MM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: NCC, start_year: 2015} + - &id037 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp126, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + - &id038 {dataset: CNRM-CM6-1-HR, end_year: 2100, ensemble: r1i1p1f2, exp: ssp126, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + - &id039 {dataset: CNRM-CM6-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp126, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + - &id040 {dataset: IITM-ESM, end_year: 2098, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: CCCR-IITM, start_year: 2015} + - &id041 {dataset: GISS-E2-1-G, end_year: 2100, ensemble: r1i1p1f2, exp: ssp245, + grid: gn, institute: NASA-GISS, start_year: 2015} + - &id042 {dataset: AWI-CM-1-1-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: AWI, start_year: 2015} + - &id043 {dataset: FGOALS-f3-L, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr, institute: CAS, start_year: 2015} + - &id044 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: CAS, start_year: 2015} + - &id045 {dataset: CMCC-CM2-SR5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: CMCC, start_year: 2015} + - &id046 {dataset: ACCESS-CM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: CSIRO-ARCCSS, start_year: 2015} + - &id047 {dataset: GFDL-CM4, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr1, institute: NOAA-GFDL, start_year: 2015} + - &id048 {dataset: GFDL-ESM4, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr1, institute: NOAA-GFDL, start_year: 2015} + - &id049 {dataset: CAMS-CSM1-0, end_year: 2099, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: CAMS, start_year: 2015} + - &id050 {dataset: CanESM5-CanOE, end_year: 2100, ensemble: r1i1p2f1, exp: ssp245, + grid: gn, institute: CCCma, start_year: 2015} + - &id051 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, grid: gn, + institute: CCCma, start_year: 2015} + - &id052 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r1i1p1f2, exp: ssp245, + grid: gn, institute: MOHC, start_year: 2015} + - &id053 {dataset: HadGEM3-GC31-LL, end_year: 2100, ensemble: r1i1p1f3, exp: ssp245, + grid: gn, institute: MOHC, start_year: 2015} + - &id054 {dataset: EC-Earth3, end_year: 2100, ensemble: r4i1p1f1, exp: ssp245, + grid: gr, institute: EC-Earth-Consortium, start_year: 2015} + - &id055 {dataset: EC-Earth3-Veg, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr, institute: EC-Earth-Consortium, start_year: 2015} + - &id056 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, grid: gn, + institute: MIROC, start_year: 2015} + - &id057 {dataset: MIROC-ES2L, end_year: 2100, ensemble: r1i1p1f2, exp: ssp245, + grid: gn, institute: MIROC, start_year: 2015} + - &id058 {dataset: FIO-ESM-2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: FIO-QLNM, start_year: 2015} + - &id059 {dataset: MCM-UA-1-0, end_year: 2100, ensemble: r1i1p1f2, exp: ssp245, + grid: gn, institute: UA, start_year: 2015} + - &id060 {dataset: BCC-CSM2-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: BCC, start_year: 2015} + - &id061 {dataset: INM-CM5-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr1, institute: INM, start_year: 2015} + - &id062 {dataset: INM-CM4-8, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr1, institute: INM, start_year: 2015} + - &id063 {dataset: KACE-1-0-G, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr, institute: NIMS-KMA, start_year: 2015} + - &id064 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr, institute: IPSL, start_year: 2015} + - &id065 {dataset: CESM2-WACCM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: NCAR, start_year: 2015} +# - &id066 {dataset: CESM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, grid: gn, +# institute: NCAR, start_year: 2015} + - &id067 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: MPI-M, start_year: 2015} + - &id068 {dataset: NESM3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, grid: gn, + institute: NUIST, start_year: 2015} + - &id069 {dataset: ACCESS-ESM1-5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: CSIRO, start_year: 2015} + - &id070 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: MRI, start_year: 2015} + - &id071 {dataset: NorESM2-LM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: NCC, start_year: 2015} + - &id072 {dataset: NorESM2-MM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: NCC, start_year: 2015} + - &id073 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp245, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + - &id074 {dataset: CNRM-CM6-1-HR, end_year: 2100, ensemble: r1i1p1f2, exp: ssp245, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + - &id075 {dataset: CNRM-CM6-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp245, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + - &id076 {dataset: GISS-E2-1-G, end_year: 2100, ensemble: r1i1p1f2, exp: ssp585, + grid: gn, institute: NASA-GISS, start_year: 2015} + - &id077 {dataset: AWI-CM-1-1-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: AWI, start_year: 2015} + - &id078 {dataset: FGOALS-f3-L, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr, institute: CAS, start_year: 2015} + - &id079 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: CAS, start_year: 2015} + - &id080 {dataset: CMCC-CM2-SR5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: CMCC, start_year: 2015} + - &id081 {dataset: ACCESS-CM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: CSIRO-ARCCSS, start_year: 2015} + - &id082 {dataset: GFDL-CM4, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr1, institute: NOAA-GFDL, start_year: 2015} + - &id083 {dataset: GFDL-ESM4, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr1, institute: NOAA-GFDL, start_year: 2015} + - &id084 {dataset: CAMS-CSM1-0, end_year: 2099, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: CAMS, start_year: 2015} + - &id085 {dataset: CanESM5-CanOE, end_year: 2100, ensemble: r1i1p2f1, exp: ssp585, + grid: gn, institute: CCCma, start_year: 2015} + - &id086 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, grid: gn, + institute: CCCma, start_year: 2015} + - &id087 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r1i1p1f2, exp: ssp585, + grid: gn, institute: MOHC, start_year: 2015} + - &id088 {dataset: HadGEM3-GC31-LL, end_year: 2100, ensemble: r1i1p1f3, exp: ssp585, + grid: gn, institute: MOHC, start_year: 2015} + - &id089 {dataset: HadGEM3-GC31-MM, end_year: 2100, ensemble: r1i1p1f3, exp: ssp585, + grid: gn, institute: MOHC, start_year: 2015} + - &id090 {dataset: EC-Earth3, end_year: 2100, ensemble: r4i1p1f1, exp: ssp585, + grid: gr, institute: EC-Earth-Consortium, start_year: 2015} + - &id091 {dataset: EC-Earth3-Veg, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr, institute: EC-Earth-Consortium, start_year: 2015} + - &id092 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, grid: gn, + institute: MIROC, start_year: 2015} + - &id093 {dataset: MIROC-ES2L, end_year: 2100, ensemble: r1i1p1f2, exp: ssp585, + grid: gn, institute: MIROC, start_year: 2015} + - &id094 {dataset: FIO-ESM-2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: FIO-QLNM, start_year: 2015} + - &id095 {dataset: MCM-UA-1-0, end_year: 2100, ensemble: r1i1p1f2, exp: ssp585, + grid: gn, institute: UA, start_year: 2015} + - &id096 {dataset: BCC-CSM2-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: BCC, start_year: 2015} + - &id097 {dataset: INM-CM5-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr1, institute: INM, start_year: 2015} + - &id098 {dataset: INM-CM4-8, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr1, institute: INM, start_year: 2015} + - &id099 {dataset: KACE-1-0-G, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr, institute: NIMS-KMA, start_year: 2015} + - &id100 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr, institute: IPSL, start_year: 2015} + - &id101 {dataset: CESM2-WACCM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: NCAR, start_year: 2015} +# - &id102 {dataset: CESM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, grid: gn, +# institute: NCAR, start_year: 2015} + - &id103 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: MPI-M, start_year: 2015} + - &id104 {dataset: NESM3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, grid: gn, + institute: NUIST, start_year: 2015} + - &id105 {dataset: ACCESS-ESM1-5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: CSIRO, start_year: 2015} + - &id106 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: MRI, start_year: 2015} + - &id107 {dataset: NorESM2-LM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: NCC, start_year: 2015} + - &id108 {dataset: NorESM2-MM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: NCC, start_year: 2015} + - &id109 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp585, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + - &id110 {dataset: CNRM-CM6-1-HR, end_year: 2100, ensemble: r1i1p1f2, exp: ssp585, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + - &id111 {dataset: CNRM-CM6-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp585, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + - &id112 {dataset: IITM-ESM, end_year: 2098, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: CCCR-IITM, start_year: 2015} + - &id113 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r1i1p1f2, exp: ssp370, + grid: gn, institute: MOHC, start_year: 2015} + - &id114 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, grid: gn, + institute: CCCma, start_year: 2015} + - &id115 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gr, institute: IPSL, start_year: 2015} + - &id116 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: MPI-M, start_year: 2015} + - &id117 {dataset: GISS-E2-1-G, end_year: 2100, ensemble: r1i1p1f2, exp: ssp119, + grid: gn, institute: NASA-GISS, start_year: 2015} + - &id118 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp119, + grid: gn, institute: CAS, start_year: 2015} + - &id119 {dataset: GFDL-ESM4, end_year: 2100, ensemble: r1i1p1f1, exp: ssp119, + grid: gr1, institute: NOAA-GFDL, start_year: 2015} + - &id120 {dataset: CAMS-CSM1-0, end_year: 2099, ensemble: r1i1p1f1, exp: ssp119, + grid: gn, institute: CAMS, start_year: 2015} + - &id121 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp119, grid: gn, + institute: CCCma, start_year: 2015} + - &id122 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r1i1p1f2, exp: ssp119, + grid: gn, institute: MOHC, start_year: 2015} + - &id123 {dataset: EC-Earth3, end_year: 2100, ensemble: r4i1p1f1, exp: ssp119, + grid: gr, institute: EC-Earth-Consortium, start_year: 2015} + - &id124 {dataset: EC-Earth3-Veg, end_year: 2100, ensemble: r1i1p1f1, exp: ssp119, + grid: gr, institute: EC-Earth-Consortium, start_year: 2015} + - &id125 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp119, grid: gn, + institute: MIROC, start_year: 2015} + - &id126 {dataset: MIROC-ES2L, end_year: 2100, ensemble: r1i1p1f2, exp: ssp119, + grid: gn, institute: MIROC, start_year: 2015} + - &id127 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp119, + grid: gr, institute: IPSL, start_year: 2015} + - &id128 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp119, + grid: gn, institute: MRI, start_year: 2015} + - &id129 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp119, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + - &id130 {dataset: GISS-E2-1-G, end_year: 2100, ensemble: r1i1p1f2, exp: ssp370, + grid: gn, institute: NASA-GISS, start_year: 2015} + - &id131 {dataset: AWI-CM-1-1-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: AWI, start_year: 2015} + - &id132 {dataset: FGOALS-f3-L, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gr, institute: CAS, start_year: 2015} + - &id133 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: CAS, start_year: 2015} + - &id134 {dataset: CMCC-CM2-SR5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: CMCC, start_year: 2015} + - &id135 {dataset: ACCESS-CM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: CSIRO-ARCCSS, start_year: 2015} + - &id136 {dataset: GFDL-ESM4, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gr1, institute: NOAA-GFDL, start_year: 2015} + - &id137 {dataset: CAMS-CSM1-0, end_year: 2099, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: CAMS, start_year: 2015} + - &id138 {dataset: CanESM5-CanOE, end_year: 2100, ensemble: r1i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id139 {dataset: EC-Earth3, end_year: 2100, ensemble: r4i1p1f1, exp: ssp370, + grid: gr, institute: EC-Earth-Consortium, start_year: 2015} + - &id140 {dataset: EC-Earth3-Veg, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gr, institute: EC-Earth-Consortium, start_year: 2015} + - &id141 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, grid: gn, + institute: MIROC, start_year: 2015} + - &id142 {dataset: MIROC-ES2L, end_year: 2100, ensemble: r1i1p1f2, exp: ssp370, + grid: gn, institute: MIROC, start_year: 2015} + - &id143 {dataset: MCM-UA-1-0, end_year: 2100, ensemble: r1i1p1f2, exp: ssp370, + grid: gn, institute: UA, start_year: 2015} + - &id144 {dataset: BCC-CSM2-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: BCC, start_year: 2015} + - &id145 {dataset: INM-CM5-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gr1, institute: INM, start_year: 2015} + - &id146 {dataset: INM-CM4-8, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gr1, institute: INM, start_year: 2015} + - &id147 {dataset: KACE-1-0-G, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gr, institute: NIMS-KMA, start_year: 2015} + - &id148 {dataset: CESM2-WACCM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: NCAR, start_year: 2015} +# - &id149 {dataset: CESM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, grid: gn, +# institute: NCAR, start_year: 2015} + - &id150 {dataset: ACCESS-ESM1-5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: CSIRO, start_year: 2015} + - &id151 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: MRI, start_year: 2015} + - &id152 {dataset: NorESM2-LM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: NCC, start_year: 2015} + - &id153 {dataset: NorESM2-MM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: NCC, start_year: 2015} + - &id154 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp370, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + - &id155 {dataset: CNRM-CM6-1-HR, end_year: 2100, ensemble: r1i1p1f2, exp: ssp370, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + - &id156 {dataset: CNRM-CM6-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp370, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + - &id157 {dataset: GISS-E2-1-G, end_year: 2014, ensemble: r1i1p1f2, exp: historical, + grid: gn, institute: NASA-GISS, start_year: 1850} + - &id158 {dataset: FGOALS-g3, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CAS, start_year: 1850} + - &id159 {dataset: GFDL-ESM4, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr1, institute: NOAA-GFDL, start_year: 1850} + - &id160 {dataset: CAMS-CSM1-0, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CAMS, start_year: 1850} + - &id161 {dataset: CanESM5, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1850} + - &id162 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r1i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1850} + - &id163 {dataset: EC-Earth3, end_year: 2014, ensemble: r4i1p1f1, exp: historical, + grid: gr, institute: EC-Earth-Consortium, start_year: 1850} + - &id164 {dataset: EC-Earth3-Veg, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr, institute: EC-Earth-Consortium, start_year: 1850} + - &id165 {dataset: MIROC6, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: MIROC, start_year: 1850} + - &id166 {dataset: MIROC-ES2L, end_year: 2014, ensemble: r1i1p1f2, exp: historical, + grid: gn, institute: MIROC, start_year: 1850} + - &id167 {dataset: IPSL-CM6A-LR, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr, institute: IPSL, start_year: 1850} + - &id168 {dataset: MRI-ESM2-0, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: MRI, start_year: 1850} + - &id169 {dataset: CNRM-ESM2-1, end_year: 2014, ensemble: r1i1p1f2, exp: historical, + grid: gr, institute: CNRM-CERFACS, start_year: 1850} + - &id170 {dataset: AWI-CM-1-1-MR, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: AWI, start_year: 1850} + - &id171 {dataset: FGOALS-f3-L, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr, institute: CAS, start_year: 1850} + - &id172 {dataset: CMCC-CM2-SR5, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CMCC, start_year: 1850} + - &id173 {dataset: ACCESS-CM2, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CSIRO-ARCCSS, start_year: 1850} + - &id174 {dataset: CanESM5-CanOE, end_year: 2014, ensemble: r1i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1850} + - &id175 {dataset: HadGEM3-GC31-LL, end_year: 2014, ensemble: r1i1p1f3, exp: historical, + grid: gn, institute: MOHC, start_year: 1850} + - &id176 {dataset: HadGEM3-GC31-MM, end_year: 2014, ensemble: r1i1p1f3, exp: historical, + grid: gn, institute: MOHC, start_year: 1850} + - &id177 {dataset: FIO-ESM-2-0, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: FIO-QLNM, start_year: 1850} + - &id178 {dataset: MCM-UA-1-0, end_year: 2014, ensemble: r1i1p1f2, exp: historical, + grid: gn, institute: UA, start_year: 1850} + - &id179 {dataset: BCC-CSM2-MR, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: BCC, start_year: 1850} + - &id180 {dataset: INM-CM5-0, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr1, institute: INM, start_year: 1850} + - &id181 {dataset: INM-CM4-8, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr1, institute: INM, start_year: 1850} + - &id182 {dataset: KACE-1-0-G, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr, institute: NIMS-KMA, start_year: 1850} + - &id183 {dataset: CESM2-WACCM, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: NCAR, start_year: 1850} +# - &id184 {dataset: CESM2, end_year: 2014, ensemble: r1i1p1f1, exp: historical, +# grid: gn, institute: NCAR, start_year: 1850} + - &id185 {dataset: MPI-ESM1-2-LR, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1850} + - &id186 {dataset: NESM3, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: NUIST, start_year: 1850} + - &id187 {dataset: ACCESS-ESM1-5, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CSIRO, start_year: 1850} + - &id188 {dataset: NorESM2-LM, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: NCC, start_year: 1850} + - &id189 {dataset: NorESM2-MM, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: NCC, start_year: 1850} + - &id190 {dataset: CNRM-CM6-1-HR, end_year: 2014, ensemble: r1i1p1f2, exp: historical, + grid: gr, institute: CNRM-CERFACS, start_year: 1850} + - &id191 {dataset: CNRM-CM6-1, end_year: 2014, ensemble: r1i1p1f2, exp: historical, + grid: gr, institute: CNRM-CERFACS, start_year: 1850} + - &id192 {dataset: IITM-ESM, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CCCR-IITM, start_year: 1850} + - &id193 {dataset: GFDL-CM4, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr1, institute: NOAA-GFDL, start_year: 1850} + - &id194 {dataset: IITM-ESM, end_year: 2098, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: CCCR-IITM, start_year: 2015} + + fig1a: + description: Fig1a, Time series of relative change of temperature in multiple scenarios + incl. spread + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, project: CMIP6, short_name: tas, start_year: 2015} + scripts: + plot_ts_line_mean_spread_tas: + ancestors: [tas, ts_line_mean_spread_tas] + begin_ref_year: 1995 + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb + end_ref_year: 2014 + eyears: &id195 [2014, 2100] + label: &id196 [Historical, SSP1-1.9, SSP1-2.6, SSP2-4.5, SSP3-7.0, SSP5-8.5] + model_nr: true + rightaxis_offset: 0.84 + scenarios: &id197 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/plot_timeseries_mean_spread_rightaxis_5scen.ncl + spread: 1.64 + syears: &id198 [1850, 2015] + title: TAS, global + yaxis: Relative to 1995-2014 (~S~o~N~ C) + ymax: 8 + ymin: -2 + ts_line_mean_spread_tas: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id195 + label: *id196 + model_nr: true + runave_window: 11 + scenarios: *id197 + script: tebaldi21esd/calc_timeseries_mean_spread_runave.ncl + spread: 1.64 + syears: *id198 + additional_datasets: &fig1_tas_datasets + - *id005 + - *id006 + - *id007 + - *id008 + - *id009 + - *id010 + - *id011 + - *id012 + - *id013 + - *id014 + - *id015 + - *id016 + - *id017 + - *id018 + - *id019 + - *id020 + - *id021 + - *id022 + - *id023 + - *id024 + - *id025 + - *id026 + - *id027 + - *id028 + - *id029 +# - *id030 + - *id031 + - *id032 + - *id033 + - *id034 + - *id035 + - *id036 + - *id037 + - *id038 + - *id039 + - &id874 {dataset: KIOST-ESM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr1, institute: KIOST, start_year: 2015} + - *id040 + - *id041 + - *id042 + - *id043 + - *id044 + - *id045 + - *id046 + - *id047 + - *id048 + - *id049 + - *id050 + - *id051 + - *id052 + - *id053 + - *id054 + - *id055 + - *id056 + - *id057 + - *id058 + - *id059 + - *id060 + - *id061 + - *id062 + - *id063 + - *id064 + - *id065 +# - *id066 + - *id067 + - *id068 + - *id069 + - *id070 + - *id071 + - *id072 + - *id073 + - *id074 + - *id075 + - &id875 {dataset: KIOST-ESM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr1, institute: KIOST, start_year: 2015} + - *id076 + - *id077 + - *id078 + - *id079 + - *id080 + - *id081 + - *id082 + - *id083 + - *id084 + - *id085 + - *id086 + - *id087 + - *id088 + - *id089 + - *id090 + - *id091 + - *id092 + - *id093 + - *id094 + - *id095 + - *id096 + - *id097 + - *id098 + - *id099 + - *id100 + - *id101 +# - *id102 + - *id103 + - *id104 + - *id105 + - *id106 + - *id107 + - *id108 + - *id109 + - *id110 + - *id111 + - &id876 {dataset: KIOST-ESM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr1, institute: KIOST, start_year: 2015} + - *id112 + - *id113 + - *id114 + - *id115 + - *id116 + - *id117 + - *id118 + - *id119 + - *id120 + - *id121 + - *id122 + - *id123 + - *id124 + - *id125 + - *id126 + - *id127 + - *id128 + - *id129 + - *id130 + - *id131 + - *id132 + - *id133 + - *id134 + - *id135 + - *id136 + - *id137 + - *id138 + - *id139 + - *id140 + - *id141 + - *id142 + - *id143 + - *id144 + - *id145 + - *id146 + - *id147 + - *id148 +# - *id149 + - *id150 + - *id151 + - *id152 + - *id153 + - *id154 + - *id155 + - *id156 + - *id157 + - *id158 + - *id159 + - *id160 + - *id161 + - *id162 + - *id163 + - *id164 + - *id165 + - *id166 + - *id167 + - *id168 + - *id169 + - *id170 + - *id171 + - *id172 + - *id173 + - *id174 + - *id175 + - *id176 + - *id177 + - *id178 + - *id179 + - *id180 + - *id181 + - *id182 + - *id183 +# - *id184 + - *id185 + - *id186 + - *id187 + - *id188 + - *id189 + - *id190 + - *id191 + - &id877 {dataset: KIOST-ESM, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr1, institute: KIOST, start_year: 1850} + - *id192 + - *id193 + - *id194 + + fig2b: + description: Fig2b, Map of multi-model mean of precipitation change scaled by + global T change. + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2081} + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: pr, + start_year: 2081} + scripts: + calc_map_diff_scaleT_pr: + label: &id199 [2081-2100 relative to 1995-2014] + percent: 1 + periods: &id200 [2081] + scenarios: &id201 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/calc_pattern_diff_scaleT.ncl + time_avg: annualclim + plot_map_diff_scaleT_pr: + ancestors: [pr, calc_map_diff_scaleT_pr] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_div_14.rgb + diagscript: calc_map_diff_scaleT_mmm_stipp + diff_levs: [-12, -10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10, 12] + label: *id199 + max_hori: 2 + max_vert: 2 + percent: 1 + periods: *id200 + projection: Robinson + scenarios: *id201 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Precipitation change scaled by global T + additional_datasets: &fig2_pr_datasets + - &id202 {dataset: GISS-E2-1-G, end_year: 2014, ensemble: r1i1p1f2, exp: historical, + grid: gn, institute: NASA-GISS, start_year: 1995} + - &id203 {dataset: AWI-CM-1-1-MR, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: AWI, start_year: 1995} + - &id204 {dataset: FGOALS-f3-L, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr, institute: CAS, start_year: 1995} + - &id205 {dataset: FGOALS-g3, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CAS, start_year: 1995} + - &id206 {dataset: CMCC-CM2-SR5, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CMCC, start_year: 1995} + - &id207 {dataset: ACCESS-CM2, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CSIRO-ARCCSS, start_year: 1995} + - &id208 {dataset: GFDL-ESM4, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr1, institute: NOAA-GFDL, start_year: 1995} + - &id209 {dataset: CAMS-CSM1-0, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CAMS, start_year: 1995} + - &id210 {dataset: CanESM5-CanOE, end_year: 2014, ensemble: r1i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1995} + - &id211 {dataset: CanESM5, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1995} + - &id212 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r1i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1995} + - &id213 {dataset: EC-Earth3, end_year: 2014, ensemble: r4i1p1f1, exp: historical, + grid: gr, institute: EC-Earth-Consortium, start_year: 1995} + - &id214 {dataset: EC-Earth3-Veg, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr, institute: EC-Earth-Consortium, start_year: 1995} + - &id215 {dataset: MIROC6, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: MIROC, start_year: 1995} + - &id216 {dataset: MIROC-ES2L, end_year: 2014, ensemble: r1i1p1f2, exp: historical, + grid: gn, institute: MIROC, start_year: 1995} + - &id217 {dataset: MCM-UA-1-0, end_year: 2014, ensemble: r1i1p1f2, exp: historical, + grid: gn, institute: UA, start_year: 1995} + - &id218 {dataset: BCC-CSM2-MR, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: BCC, start_year: 1995} + - &id219 {dataset: INM-CM5-0, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr1, institute: INM, start_year: 1995} + - &id220 {dataset: INM-CM4-8, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr1, institute: INM, start_year: 1995} + - &id221 {dataset: KACE-1-0-G, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr, institute: NIMS-KMA, start_year: 1995} + - &id222 {dataset: IPSL-CM6A-LR, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr, institute: IPSL, start_year: 1995} + - &id223 {dataset: CESM2-WACCM, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: NCAR, start_year: 1995} +# - &id224 {dataset: CESM2, end_year: 2014, ensemble: r1i1p1f1, exp: historical, +# grid: gn, institute: NCAR, start_year: 1995} + - &id225 {dataset: MPI-ESM1-2-LR, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1995} + - &id226 {dataset: ACCESS-ESM1-5, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CSIRO, start_year: 1995} + - &id227 {dataset: MRI-ESM2-0, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: MRI, start_year: 1995} + - &id228 {dataset: NorESM2-LM, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: NCC, start_year: 1995} + - &id229 {dataset: NorESM2-MM, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: NCC, start_year: 1995} + - &id230 {dataset: CNRM-ESM2-1, end_year: 2014, ensemble: r1i1p1f2, exp: historical, + grid: gr, institute: CNRM-CERFACS, start_year: 1995} + - &id231 {dataset: CNRM-CM6-1-HR, end_year: 2014, ensemble: r1i1p1f2, exp: historical, + grid: gr, institute: CNRM-CERFACS, start_year: 1995} + - &id232 {dataset: CNRM-CM6-1, end_year: 2014, ensemble: r1i1p1f2, exp: historical, + grid: gr, institute: CNRM-CERFACS, start_year: 1995} + - &id233 {dataset: HadGEM3-GC31-LL, end_year: 2014, ensemble: r1i1p1f3, exp: historical, + grid: gn, institute: MOHC, start_year: 1995} + - &id234 {dataset: HadGEM3-GC31-MM, end_year: 2014, ensemble: r1i1p1f3, exp: historical, + grid: gn, institute: MOHC, start_year: 1995} + - &id235 {dataset: FIO-ESM-2-0, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: FIO-QLNM, start_year: 1995} + - &id236 {dataset: CIESM, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr, institute: THU, start_year: 1995} + - &id237 {dataset: NESM3, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: NUIST, start_year: 1995} + - &id238 {dataset: IITM-ESM, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CCCR-IITM, start_year: 1995} + - &id239 {dataset: GFDL-CM4, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr1, institute: NOAA-GFDL, start_year: 1995} + - &id240 {dataset: TaiESM1, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: AS-RCEC, start_year: 1995} + - &id241 {dataset: GISS-E2-1-G, end_year: 2100, ensemble: r1i1p1f2, exp: ssp119, + grid: gn, institute: NASA-GISS, start_year: 2081} + - &id242 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp119, + grid: gn, institute: CAS, start_year: 2081} + - &id243 {dataset: GFDL-ESM4, end_year: 2100, ensemble: r1i1p1f1, exp: ssp119, + grid: gr1, institute: NOAA-GFDL, start_year: 2081} + - &id244 {dataset: CAMS-CSM1-0, end_year: 2099, ensemble: r1i1p1f1, exp: ssp119, + grid: gn, institute: CAMS, start_year: 2081} + - &id245 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp119, grid: gn, + institute: CCCma, start_year: 2081} + - &id246 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r1i1p1f2, exp: ssp119, + grid: gn, institute: MOHC, start_year: 2081} + - &id247 {dataset: EC-Earth3, end_year: 2100, ensemble: r4i1p1f1, exp: ssp119, + grid: gr, institute: EC-Earth-Consortium, start_year: 2081} + - &id248 {dataset: EC-Earth3-Veg, end_year: 2100, ensemble: r1i1p1f1, exp: ssp119, + grid: gr, institute: EC-Earth-Consortium, start_year: 2081} + - &id249 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp119, grid: gn, + institute: MIROC, start_year: 2081} + - &id250 {dataset: MIROC-ES2L, end_year: 2100, ensemble: r1i1p1f2, exp: ssp119, + grid: gn, institute: MIROC, start_year: 2081} + - &id251 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp119, + grid: gr, institute: IPSL, start_year: 2081} + - &id252 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp119, + grid: gn, institute: MRI, start_year: 2081} + - &id253 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp119, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + - &id254 {dataset: GISS-E2-1-G, end_year: 2100, ensemble: r1i1p1f2, exp: ssp126, + grid: gn, institute: NASA-GISS, start_year: 2081} + - &id255 {dataset: AWI-CM-1-1-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: AWI, start_year: 2081} + - &id256 {dataset: FGOALS-f3-L, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr, institute: CAS, start_year: 2081} + - &id257 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: CAS, start_year: 2081} + - &id258 {dataset: CMCC-CM2-SR5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: CMCC, start_year: 2081} + - &id259 {dataset: ACCESS-CM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: CSIRO-ARCCSS, start_year: 2081} + - &id260 {dataset: GFDL-ESM4, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr1, institute: NOAA-GFDL, start_year: 2081} + - &id261 {dataset: CAMS-CSM1-0, end_year: 2099, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: CAMS, start_year: 2081} + - &id262 {dataset: CanESM5-CanOE, end_year: 2100, ensemble: r1i1p2f1, exp: ssp126, + grid: gn, institute: CCCma, start_year: 2081} + - &id263 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, grid: gn, + institute: CCCma, start_year: 2081} + - &id264 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r1i1p1f2, exp: ssp126, + grid: gn, institute: MOHC, start_year: 2081} + - &id265 {dataset: HadGEM3-GC31-LL, end_year: 2100, ensemble: r1i1p1f3, exp: ssp126, + grid: gn, institute: MOHC, start_year: 2081} + - &id266 {dataset: HadGEM3-GC31-MM, end_year: 2100, ensemble: r1i1p1f3, exp: ssp126, + grid: gn, institute: MOHC, start_year: 2081} + - &id267 {dataset: EC-Earth3, end_year: 2100, ensemble: r4i1p1f1, exp: ssp126, + grid: gr, institute: EC-Earth-Consortium, start_year: 2081} + - &id268 {dataset: EC-Earth3-Veg, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr, institute: EC-Earth-Consortium, start_year: 2081} + - &id269 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, grid: gn, + institute: MIROC, start_year: 2081} + - &id270 {dataset: MIROC-ES2L, end_year: 2100, ensemble: r1i1p1f2, exp: ssp126, + grid: gn, institute: MIROC, start_year: 2081} + - &id271 {dataset: FIO-ESM-2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: FIO-QLNM, start_year: 2081} + - &id272 {dataset: CIESM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, grid: gr, + institute: THU, start_year: 2081} + - &id273 {dataset: MCM-UA-1-0, end_year: 2100, ensemble: r1i1p1f2, exp: ssp126, + grid: gn, institute: UA, start_year: 2081} + - &id274 {dataset: BCC-CSM2-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: BCC, start_year: 2081} + - &id275 {dataset: INM-CM5-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr1, institute: INM, start_year: 2081} + - &id276 {dataset: INM-CM4-8, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr1, institute: INM, start_year: 2081} + - &id277 {dataset: KACE-1-0-G, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr, institute: NIMS-KMA, start_year: 2081} + - &id278 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr, institute: IPSL, start_year: 2081} + - &id279 {dataset: CESM2-WACCM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: NCAR, start_year: 2081} +# - &id280 {dataset: CESM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, grid: gn, +# institute: NCAR, start_year: 2081} + - &id281 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: MPI-M, start_year: 2081} + - &id282 {dataset: NESM3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, grid: gn, + institute: NUIST, start_year: 2081} + - &id283 {dataset: ACCESS-ESM1-5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: CSIRO, start_year: 2081} + - &id284 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: MRI, start_year: 2081} + - &id285 {dataset: NorESM2-LM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: NCC, start_year: 2081} + - &id286 {dataset: NorESM2-MM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: NCC, start_year: 2081} + - &id287 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp126, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + - &id288 {dataset: CNRM-CM6-1-HR, end_year: 2100, ensemble: r1i1p1f2, exp: ssp126, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + - &id289 {dataset: CNRM-CM6-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp126, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + - &id290 {dataset: IITM-ESM, end_year: 2098, ensemble: r1i1p1f1, exp: ssp126, + grid: gn, institute: CCCR-IITM, start_year: 2081} + - &id291 {dataset: GISS-E2-1-G, end_year: 2100, ensemble: r1i1p1f2, exp: ssp245, + grid: gn, institute: NASA-GISS, start_year: 2081} + - &id292 {dataset: AWI-CM-1-1-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: AWI, start_year: 2081} + - &id293 {dataset: FGOALS-f3-L, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr, institute: CAS, start_year: 2081} + - &id294 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: CAS, start_year: 2081} + - &id295 {dataset: CMCC-CM2-SR5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: CMCC, start_year: 2081} + - &id296 {dataset: ACCESS-CM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: CSIRO-ARCCSS, start_year: 2081} + - &id297 {dataset: GFDL-CM4, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr1, institute: NOAA-GFDL, start_year: 2081} + - &id298 {dataset: GFDL-ESM4, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr1, institute: NOAA-GFDL, start_year: 2081} + - &id299 {dataset: CAMS-CSM1-0, end_year: 2099, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: CAMS, start_year: 2081} + - &id300 {dataset: CanESM5-CanOE, end_year: 2100, ensemble: r1i1p2f1, exp: ssp245, + grid: gn, institute: CCCma, start_year: 2081} + - &id301 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, grid: gn, + institute: CCCma, start_year: 2081} + - &id302 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r1i1p1f2, exp: ssp245, + grid: gn, institute: MOHC, start_year: 2081} + - &id303 {dataset: HadGEM3-GC31-LL, end_year: 2100, ensemble: r1i1p1f3, exp: ssp245, + grid: gn, institute: MOHC, start_year: 2081} + - &id304 {dataset: EC-Earth3, end_year: 2100, ensemble: r4i1p1f1, exp: ssp245, + grid: gr, institute: EC-Earth-Consortium, start_year: 2081} + - &id305 {dataset: EC-Earth3-Veg, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr, institute: EC-Earth-Consortium, start_year: 2081} + - &id306 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, grid: gn, + institute: MIROC, start_year: 2081} + - &id307 {dataset: MIROC-ES2L, end_year: 2100, ensemble: r1i1p1f2, exp: ssp245, + grid: gn, institute: MIROC, start_year: 2081} + - &id308 {dataset: FIO-ESM-2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: FIO-QLNM, start_year: 2081} + - &id309 {dataset: CIESM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, grid: gr, + institute: THU, start_year: 2081} + - &id310 {dataset: MCM-UA-1-0, end_year: 2100, ensemble: r1i1p1f2, exp: ssp245, + grid: gn, institute: UA, start_year: 2081} + - &id311 {dataset: BCC-CSM2-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: BCC, start_year: 2081} + - &id312 {dataset: INM-CM5-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr1, institute: INM, start_year: 2081} + - &id313 {dataset: INM-CM4-8, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr1, institute: INM, start_year: 2081} + - &id314 {dataset: KACE-1-0-G, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr, institute: NIMS-KMA, start_year: 2081} + - &id315 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr, institute: IPSL, start_year: 2081} + - &id316 {dataset: CESM2-WACCM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: NCAR, start_year: 2081} +# - &id317 {dataset: CESM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, grid: gn, +# institute: NCAR, start_year: 2081} + - &id318 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: MPI-M, start_year: 2081} + - &id319 {dataset: NESM3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, grid: gn, + institute: NUIST, start_year: 2081} + - &id320 {dataset: ACCESS-ESM1-5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: CSIRO, start_year: 2081} + - &id321 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: MRI, start_year: 2081} + - &id322 {dataset: NorESM2-LM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: NCC, start_year: 2081} + - &id323 {dataset: NorESM2-MM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gn, institute: NCC, start_year: 2081} + - &id324 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp245, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + - &id325 {dataset: CNRM-CM6-1-HR, end_year: 2100, ensemble: r1i1p1f2, exp: ssp245, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + - &id326 {dataset: CNRM-CM6-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp245, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + - &id327 {dataset: GISS-E2-1-G, end_year: 2100, ensemble: r1i1p1f2, exp: ssp370, + grid: gn, institute: NASA-GISS, start_year: 2081} + - &id328 {dataset: AWI-CM-1-1-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: AWI, start_year: 2081} + - &id329 {dataset: FGOALS-f3-L, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gr, institute: CAS, start_year: 2081} + - &id330 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: CAS, start_year: 2081} + - &id331 {dataset: CMCC-CM2-SR5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: CMCC, start_year: 2081} + - &id332 {dataset: ACCESS-CM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: CSIRO-ARCCSS, start_year: 2081} + - &id333 {dataset: GFDL-ESM4, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gr1, institute: NOAA-GFDL, start_year: 2081} + - &id334 {dataset: CAMS-CSM1-0, end_year: 2099, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: CAMS, start_year: 2081} + - &id335 {dataset: CanESM5-CanOE, end_year: 2100, ensemble: r1i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2081} + - &id336 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, grid: gn, + institute: CCCma, start_year: 2081} + - &id337 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r1i1p1f2, exp: ssp370, + grid: gn, institute: MOHC, start_year: 2081} + - &id338 {dataset: EC-Earth3, end_year: 2100, ensemble: r4i1p1f1, exp: ssp370, + grid: gr, institute: EC-Earth-Consortium, start_year: 2081} + - &id339 {dataset: EC-Earth3-Veg, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gr, institute: EC-Earth-Consortium, start_year: 2081} + - &id340 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, grid: gn, + institute: MIROC, start_year: 2081} + - &id341 {dataset: MIROC-ES2L, end_year: 2100, ensemble: r1i1p1f2, exp: ssp370, + grid: gn, institute: MIROC, start_year: 2081} + - &id342 {dataset: MCM-UA-1-0, end_year: 2100, ensemble: r1i1p1f2, exp: ssp370, + grid: gn, institute: UA, start_year: 2081} + - &id343 {dataset: BCC-CSM2-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: BCC, start_year: 2081} + - &id344 {dataset: TaiESM1, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, grid: gn, + institute: AS-RCEC, start_year: 2081} + - &id345 {dataset: INM-CM5-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gr1, institute: INM, start_year: 2081} + - &id346 {dataset: INM-CM4-8, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gr1, institute: INM, start_year: 2081} + - &id347 {dataset: KACE-1-0-G, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gr, institute: NIMS-KMA, start_year: 2081} + - &id348 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gr, institute: IPSL, start_year: 2081} + - &id349 {dataset: CESM2-WACCM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: NCAR, start_year: 2081} +# - &id350 {dataset: CESM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, grid: gn, +# institute: NCAR, start_year: 2081} + - &id351 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: MPI-M, start_year: 2081} + - &id352 {dataset: ACCESS-ESM1-5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: CSIRO, start_year: 2081} + - &id353 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: MRI, start_year: 2081} + - &id354 {dataset: NorESM2-LM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: NCC, start_year: 2081} + - &id355 {dataset: NorESM2-MM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: NCC, start_year: 2081} + - &id356 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp370, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + - &id357 {dataset: CNRM-CM6-1-HR, end_year: 2100, ensemble: r1i1p1f2, exp: ssp370, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + - &id358 {dataset: CNRM-CM6-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp370, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + - &id359 {dataset: IITM-ESM, end_year: 2098, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: CCCR-IITM, start_year: 2081} + - &id360 {dataset: GISS-E2-1-G, end_year: 2100, ensemble: r1i1p1f2, exp: ssp585, + grid: gn, institute: NASA-GISS, start_year: 2081} + - &id361 {dataset: AWI-CM-1-1-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: AWI, start_year: 2081} + - &id362 {dataset: FGOALS-f3-L, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr, institute: CAS, start_year: 2081} + - &id363 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: CAS, start_year: 2081} + - &id364 {dataset: CMCC-CM2-SR5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: CMCC, start_year: 2081} + - &id365 {dataset: ACCESS-CM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: CSIRO-ARCCSS, start_year: 2081} + - &id366 {dataset: GFDL-CM4, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr1, institute: NOAA-GFDL, start_year: 2081} + - &id367 {dataset: GFDL-ESM4, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr1, institute: NOAA-GFDL, start_year: 2081} + - &id368 {dataset: CAMS-CSM1-0, end_year: 2099, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: CAMS, start_year: 2081} + - &id369 {dataset: CanESM5-CanOE, end_year: 2100, ensemble: r1i1p2f1, exp: ssp585, + grid: gn, institute: CCCma, start_year: 2081} + - &id370 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, grid: gn, + institute: CCCma, start_year: 2081} + - &id371 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r1i1p1f2, exp: ssp585, + grid: gn, institute: MOHC, start_year: 2081} + - &id372 {dataset: HadGEM3-GC31-LL, end_year: 2100, ensemble: r1i1p1f3, exp: ssp585, + grid: gn, institute: MOHC, start_year: 2081} + - &id373 {dataset: HadGEM3-GC31-MM, end_year: 2100, ensemble: r1i1p1f3, exp: ssp585, + grid: gn, institute: MOHC, start_year: 2081} + - &id374 {dataset: EC-Earth3, end_year: 2100, ensemble: r4i1p1f1, exp: ssp585, + grid: gr, institute: EC-Earth-Consortium, start_year: 2081} + - &id375 {dataset: EC-Earth3-Veg, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr, institute: EC-Earth-Consortium, start_year: 2081} + - &id376 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, grid: gn, + institute: MIROC, start_year: 2081} + - &id377 {dataset: MIROC-ES2L, end_year: 2100, ensemble: r1i1p1f2, exp: ssp585, + grid: gn, institute: MIROC, start_year: 2081} + - &id378 {dataset: FIO-ESM-2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: FIO-QLNM, start_year: 2081} + - &id379 {dataset: MCM-UA-1-0, end_year: 2100, ensemble: r1i1p1f2, exp: ssp585, + grid: gn, institute: UA, start_year: 2081} + - &id380 {dataset: BCC-CSM2-MR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: BCC, start_year: 2081} + - &id381 {dataset: INM-CM5-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr1, institute: INM, start_year: 2081} + - &id382 {dataset: INM-CM4-8, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr1, institute: INM, start_year: 2081} + - &id383 {dataset: KACE-1-0-G, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr, institute: NIMS-KMA, start_year: 2081} + - &id384 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr, institute: IPSL, start_year: 2081} + - &id385 {dataset: CESM2-WACCM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: NCAR, start_year: 2081} +# - &id386 {dataset: CESM2, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, grid: gn, +# institute: NCAR, start_year: 2081} + - &id387 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: MPI-M, start_year: 2081} + - &id388 {dataset: NESM3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, grid: gn, + institute: NUIST, start_year: 2081} + - &id389 {dataset: ACCESS-ESM1-5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: CSIRO, start_year: 2081} + - &id390 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: MRI, start_year: 2081} + - &id391 {dataset: NorESM2-LM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: NCC, start_year: 2081} + - &id392 {dataset: NorESM2-MM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: NCC, start_year: 2081} + - &id393 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp585, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + - &id394 {dataset: CNRM-CM6-1-HR, end_year: 2100, ensemble: r1i1p1f2, exp: ssp585, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + - &id395 {dataset: CNRM-CM6-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp585, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + - &id396 {dataset: IITM-ESM, end_year: 2098, ensemble: r1i1p1f1, exp: ssp585, + grid: gn, institute: CCCR-IITM, start_year: 2081} + + fig2a: + description: Fig2a, Map of multi-model mean temperature change scaled by global + T change. + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2081} + scripts: + calc_map_diff_scaleT_tas: + label: &id397 [2081-2100 relative to 1995-2014] + periods: &id398 [2081] + scenarios: &id399 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/calc_pattern_diff_scaleT.ncl + time_avg: annualclim + plot_map_diff_scaleT_tas: + ancestors: [tas, calc_map_diff_scaleT_tas] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_div_18.rgb + diagscript: calc_map_diff_scaleT_mmm_stipp + diff_levs: [0, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2] + label: *id397 + periods: *id398 + projection: Robinson + scenarios: *id399 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Temperature change scaled by global T + additional_datasets: &fig2_tas_datasets + - *id202 + - *id203 + - *id204 + - *id205 + - *id206 + - *id207 + - *id208 + - *id209 + - *id210 + - *id211 + - *id212 + - *id213 + - *id214 + - *id215 + - *id216 + - *id217 + - *id218 + - *id219 + - *id220 + - *id221 + - *id222 + - *id223 +# - *id224 + - *id225 + - *id226 + - *id227 + - *id228 + - *id229 + - *id230 + - *id231 + - *id232 + - *id233 + - *id234 + - *id235 + - *id236 + - *id237 + - &id403 {dataset: KIOST-ESM, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr1, institute: KIOST, start_year: 1995} + - *id238 + - *id239 + - *id240 + - *id241 + - *id242 + - *id243 + - *id244 + - *id245 + - *id246 + - *id247 + - *id248 + - *id249 + - *id250 + - *id251 + - *id252 + - *id253 + - *id254 + - *id255 + - *id256 + - *id257 + - *id258 + - *id259 + - *id260 + - *id261 + - *id262 + - *id263 + - *id264 + - *id265 + - *id266 + - *id267 + - *id268 + - *id269 + - *id270 + - *id271 + - *id272 + - *id273 + - *id274 + - *id275 + - *id276 + - *id277 + - *id278 + - *id279 +# - *id280 + - *id281 + - *id282 + - *id283 + - *id284 + - *id285 + - *id286 + - *id287 + - *id288 + - *id289 + - &id404 {dataset: KIOST-ESM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, + grid: gr1, institute: KIOST, start_year: 2081} + - *id290 + - *id291 + - *id292 + - *id293 + - *id294 + - *id295 + - *id296 + - *id297 + - *id298 + - *id299 + - *id300 + - *id301 + - *id302 + - *id303 + - *id304 + - *id305 + - *id306 + - *id307 + - *id308 + - *id309 + - *id310 + - *id311 + - *id312 + - *id313 + - *id314 + - *id315 + - *id316 +# - *id317 + - *id318 + - *id319 + - *id320 + - *id321 + - *id322 + - *id323 + - *id324 + - *id325 + - *id326 + - &id405 {dataset: KIOST-ESM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, + grid: gr1, institute: KIOST, start_year: 2081} + - *id327 + - *id328 + - *id329 + - *id330 + - *id331 + - *id332 + - *id333 + - *id334 + - *id335 + - *id336 + - *id337 + - *id338 + - *id339 + - *id340 + - *id341 + - *id342 + - *id343 + - *id344 + - *id345 + - *id346 + - *id347 + - *id348 + - *id349 +# - *id350 + - *id351 + - *id352 + - *id353 + - *id354 + - *id355 + - *id356 + - *id357 + - *id358 + - *id359 + - *id360 + - *id361 + - *id362 + - *id363 + - *id364 + - *id365 + - *id366 + - *id367 + - *id368 + - *id369 + - *id370 + - *id371 + - *id372 + - *id373 + - *id374 + - *id375 + - *id376 + - *id377 + - *id378 + - *id379 + - *id380 + - *id381 + - *id382 + - *id383 + - *id384 + - *id385 +# - *id386 + - *id387 + - *id388 + - *id389 + - *id390 + - *id391 + - *id392 + - *id393 + - *id394 + - *id395 + - &id406 {dataset: KIOST-ESM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, + grid: gr1, institute: KIOST, start_year: 2081} + - *id396 + + fig2d: + description: Fig2d, Standard deviation of normalized PR patterns + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2081} + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: pr, + start_year: 2081} + scripts: + calc_map_stddev_scaleT_pr: + label: &id400 [2081-2100 relative to 1995-2014] + percent: 1 + periods: &id401 [2081] + scenarios: &id402 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/calc_pattern_stddev_scaleT.ncl + time_avg: annualclim + plot_map_stddev_scaleT_pr: + ancestors: [pr, calc_map_stddev_scaleT_pr] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_seq_14.rgb + diagscript: calc_map_stddev_scaleT_mmm_stipp + diff_levs: [0, 3, 6, 9, 12, 15, 18] + label: *id400 + max_hori: 2 + max_vert: 2 + percent: 1 + periods: *id401 + projection: Robinson + scenarios: *id402 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Standard deviation of precipitation ~C~ change scaled by global + T ~C~ + additional_datasets: *fig2_pr_datasets + + fig2c: + description: Fig2c, Standard deviation of normalized TAS patterns + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2081} + scripts: + calc_map_stddev_scaleT_tas: + label: &id407 [2081-2100 relative to 1995-2014] + periods: &id408 [2081] + scenarios: &id409 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/calc_pattern_stddev_scaleT.ncl + time_avg: annualclim + plot_map_stddev_scaleT_tas: + ancestors: [tas, calc_map_stddev_scaleT_tas] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_seq_14.rgb + diagscript: calc_map_stddev_scaleT_mmm_stipp + diff_levs: [0, 0.2, 0.4, 0.6, 0.8, 1, 1.2, 1.4, 1.6] + label: *id407 + periods: *id408 + projection: Robinson + scenarios: *id409 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Standard deviation of temperature ~C~ change scaled by global + T ~C~ + additional_datasets: *fig2_tas_datasets + + fig2f: + description: Fig2f, Standard deviation of normalized PR patterns after averaging + across models, highlighting the role of inter-scenario variability + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2081} + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: pr, + start_year: 2081} + scripts: + calc_map_stddev_scaleT_pr: + label: &id410 [2081-2100] + percent: 1 + periods: &id411 [2081] + scenarios: &id412 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/calc_pattern_intermodel_stddev_scaleT.ncl + time_avg: annualclim + plot_map_stddev_scaleT_pr: + ancestors: [pr, calc_map_stddev_scaleT_pr] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_seq_14.rgb + diagscript: calc_map_stddev_scaleT_mmm_stipp + diff_levs: [0, 3, 6, 9, 12, 15, 18] + label: *id410 + max_hori: 2 + max_vert: 2 + percent: 1 + periods: *id411 + projection: Robinson + scenarios: *id412 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Intermodel standard deviation of precipitation ~C~ change + scaled by global T + additional_datasets: *fig2_pr_datasets + + fig2e: + description: Fig2e, Standard deviation of normalized TAS patterns after averaging + across models, highlighting the role of inter-scenario variability + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2081} + scripts: + calc_map_stddev_scaleT_tas: + label: &id413 [2081-2100] + periods: &id414 [2081] + scenarios: &id415 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/calc_pattern_intermodel_stddev_scaleT.ncl + time_avg: annualclim + plot_map_stddev_scaleT_tas: + ancestors: [tas, calc_map_stddev_scaleT_tas] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_seq_14.rgb + diagscript: calc_map_stddev_scaleT_mmm_stipp + diff_levs: [0, 0.2, 0.4, 0.6, 0.8, 1, 1.2, 1.4, 1.6] + label: *id413 + periods: *id414 + projection: Robinson + scenarios: *id415 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Intermodel standard deviation of temperature ~C~ change + scaled by global T + additional_datasets: *fig2_tas_datasets + + fig2h: + description: Fig2h, standard deviation of normalized PR patterns after averaging + across scenarios + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2081} + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: pr, + start_year: 2081} + scripts: + calc_map_stddev_scaleT_pr: + label: &id416 [2081-2100 relative to 1995-2014] + percent: 1 + periods: &id417 [2081] + scenarios: &id418 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/calc_pattern_interscenario_stddev_scaleT.ncl + time_avg: annualclim + plot_map_stddev_scaleT_pr: + ancestors: [pr, calc_map_stddev_scaleT_pr] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_seq_14.rgb + diagscript: calc_map_stddev_scaleT_mmm_stipp + diff_levs: [0, 3, 6, 9, 12, 15, 18] + label: *id416 + max_hori: 2 + max_vert: 2 + percent: 1 + periods: *id417 + projection: Robinson + scenarios: *id418 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Interscenario standard deviation of precipitation ~C~ change + scaled by global T~C~ + additional_datasets: *fig2_pr_datasets + + fig2g: + description: Fig2g, standard deviation of normalized TAS patterns after averaging + across scenarios + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2081} + scripts: + calc_map_stddev_scaleT_tas: + label: &id419 [2081-2100 relative to 1995-2014] + periods: &id420 [2081] + scenarios: &id421 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/calc_pattern_interscenario_stddev_scaleT.ncl + time_avg: annualclim + plot_map_stddev_scaleT_tas: + ancestors: [tas, calc_map_stddev_scaleT_tas] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_seq_14.rgb + diagscript: calc_map_stddev_scaleT_mmm_stipp + diff_levs: [0, 0.2, 0.4, 0.6, 0.8, 1, 1.2, 1.4, 1.6] + label: *id419 + periods: *id420 + projection: Robinson + scenarios: *id421 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Interscenario standard deviation of temperature ~C~ change + scaled by global T~C~ + additional_datasets: *fig2_tas_datasets + + fig3b: + description: Fig3b, Normalized patterns of percent precipitation computed from + CMIP5 models and scenarios + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP5, + short_name: tas, start_year: 2081} + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP5, short_name: pr, + start_year: 2081} + scripts: + calc_map_diff_scaleT_pr: + label: &id422 [2081-2100 relative to 1986-2005] + percent: 1 + periods: &id423 [2081] + scenarios: &id424 [rcp26, rcp45, rcp60, rcp85] + script: tebaldi21esd/calc_pattern_diff_scaleT.ncl + time_avg: annualclim + plot_map_diff_scaleT_pr: + ancestors: [pr, calc_map_diff_scaleT_pr] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_div_14.rgb + diagscript: calc_map_diff_scaleT_mmm_stipp + diff_levs: [-12, -10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10, 12] + label: *id422 + max_hori: 2 + max_vert: 2 + percent: 1 + periods: *id423 + projection: Robinson + scenarios: *id424 + script: tebaldi21esd/plot_pattern.ncl + sig: false + span: false + time_avg: annualclim + title: Precipitation change scaled by global T, CMIP5 + additional_datasets: &fig3_row1_datasets + - &id425 {dataset: GISS-E2-R-CC, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NASA-GISS, start_year: 1986} + - &id426 {dataset: CCSM4, end_year: 2005, ensemble: r6i1p1, exp: historical, institute: NCAR, + start_year: 1986} + - &id427 {dataset: NorESM1-ME, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NCC, start_year: 1986} + - &id428 {dataset: GISS-E2-H, end_year: 2005, ensemble: r1i1p3, exp: historical, + institute: NASA-GISS, start_year: 1986} + - &id429 {dataset: MRI-CGCM3, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MRI, start_year: 1986} + - &id430 {dataset: GISS-E2-H-CC, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NASA-GISS, start_year: 1986} + - &id431 {dataset: GISS-E2-R, end_year: 2005, ensemble: r1i1p3, exp: historical, + institute: NASA-GISS, start_year: 1986} + - &id432 {dataset: CESM1-FASTCHEM, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: NSF-DOE-NCAR, start_year: 1986} + - &id433 {dataset: CESM1-CAM5, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: NSF-DOE-NCAR, start_year: 1986} + - &id434 {dataset: CESM1-BGC, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NSF-DOE-NCAR, start_year: 1986} + - &id435 {dataset: NorESM1-M, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NCC, start_year: 1986} + - &id436 {dataset: MPI-ESM-MR, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MPI-M, start_year: 1986} + - &id437 {dataset: MPI-ESM-P, end_year: 2005, ensemble: r2i1p1, exp: historical, + institute: MPI-M, start_year: 1986} + - &id438 {dataset: IPSL-CM5A-MR, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: IPSL, start_year: 1986} + - &id439 {dataset: IPSL-CM5B-LR, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: IPSL, start_year: 1986} + - &id440 {dataset: BNU-ESM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: BNU, start_year: 1986} + - &id441 {dataset: bcc-csm1-1, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: BCC, start_year: 1986} + - &id442 {dataset: CanESM2, end_year: 2005, ensemble: r5i1p1, exp: historical, + institute: CCCma, start_year: 1986} + - &id443 {dataset: bcc-csm1-1-m, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: BCC, start_year: 1986} + - &id444 {dataset: MPI-ESM-LR, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MPI-M, start_year: 1986} + - &id445 {dataset: CNRM-CM5-2, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CNRM-CERFACS, start_year: 1986} + - &id446 {dataset: CNRM-CM5, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CNRM-CERFACS, start_year: 1986} + - &id447 {dataset: MIROC-ESM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MIROC, start_year: 1986} + - &id448 {dataset: CMCC-CESM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CMCC, start_year: 1986} + - &id449 {dataset: CMCC-CMS, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CMCC, start_year: 1986} + - &id450 {dataset: CMCC-CM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CMCC, start_year: 1986} + - &id451 {dataset: MIROC-ESM-CHEM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MIROC, start_year: 1986} + - &id452 {dataset: FGOALS-s2, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: LASG-IAP, start_year: 1986} + - &id453 {dataset: FGOALS-g2, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: LASG-CESS, start_year: 1986} + - &id454 {dataset: FIO-ESM, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: FIO, start_year: 1986} + - &id455 {dataset: inmcm4, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: INM, start_year: 1986} + - &id456 {dataset: EC-EARTH, end_year: 2005, ensemble: r8i1p1, exp: historical, + institute: ICHEC, start_year: 1986} + - &id457 {dataset: ACCESS1-3, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CSIRO-BOM, start_year: 1986} + - &id458 {dataset: ACCESS1-0, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CSIRO-BOM, start_year: 1986} + - &id459 {dataset: MIROC5, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: MIROC, start_year: 1986} + - &id460 {dataset: CSIRO-Mk3-6-0, end_year: 2005, ensemble: r9i1p1, exp: historical, + institute: CSIRO-QCCCE, start_year: 1986} + - &id461 {dataset: HadGEM2-AO, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NIMR-KMA, start_year: 1986} + - &id462 {dataset: HadGEM2-ES, end_year: 2005, ensemble: r4i1p1, exp: historical, + institute: MOHC, start_year: 1986} + - &id463 {dataset: GFDL-ESM2M, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NOAA-GFDL, start_year: 1986} + - &id464 {dataset: GFDL-ESM2G, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NOAA-GFDL, start_year: 1986} + - &id465 {dataset: GFDL-CM3, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NOAA-GFDL, start_year: 1986} + - &id466 {dataset: HadGEM2-CC, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MOHC, start_year: 1986} + - &id467 {dataset: NorESM1-ME, ensemble: r1i1p1, exp: rcp26, institute: NCC} + - &id468 {dataset: GISS-E2-R, ensemble: r1i1p3, exp: rcp26, institute: NASA-GISS} + - &id469 {dataset: GISS-E2-H, ensemble: r1i1p3, exp: rcp26, institute: NASA-GISS} + - &id470 {dataset: MRI-CGCM3, ensemble: r1i1p1, exp: rcp26, institute: MRI} + - &id471 {dataset: HadGEM2-ES, ensemble: r4i1p1, exp: rcp26, institute: MOHC} + - &id472 {dataset: CESM1-CAM5, ensemble: r3i1p1, exp: rcp26, institute: NSF-DOE-NCAR} + - &id473 {dataset: GFDL-ESM2M, ensemble: r1i1p1, exp: rcp26, institute: NOAA-GFDL} + - &id474 {dataset: CCSM4, ensemble: r6i1p1, exp: rcp26, institute: NCAR} + - &id475 {dataset: NorESM1-M, ensemble: r1i1p1, exp: rcp26, institute: NCC} + - &id476 {dataset: MPI-ESM-MR, ensemble: r1i1p1, exp: rcp26, institute: MPI-M} + - &id477 {dataset: GFDL-ESM2G, ensemble: r1i1p1, exp: rcp26, institute: NOAA-GFDL} + - &id478 {dataset: GFDL-CM3, ensemble: r1i1p1, exp: rcp26, institute: NOAA-GFDL} + - &id479 {dataset: IPSL-CM5A-MR, ensemble: r1i1p1, exp: rcp26, institute: IPSL} + - &id480 {dataset: BNU-ESM, ensemble: r1i1p1, exp: rcp26, institute: BNU} + - &id481 {dataset: CanESM2, ensemble: r5i1p1, exp: rcp26, institute: CCCma} + - &id482 {dataset: bcc-csm1-1, ensemble: r1i1p1, exp: rcp26, institute: BCC} + - &id483 {dataset: bcc-csm1-1-m, ensemble: r1i1p1, exp: rcp26, institute: BCC} + - &id484 {dataset: MIROC-ESM, ensemble: r1i1p1, exp: rcp26, institute: MIROC} + - &id485 {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1, exp: rcp26, institute: MIROC} + - &id486 {dataset: EC-EARTH, ensemble: r8i1p1, exp: rcp26, institute: ICHEC} + - &id487 {dataset: CNRM-CM5, ensemble: r1i1p1, exp: rcp26, institute: CNRM-CERFACS} + - &id488 {dataset: MIROC5, ensemble: r3i1p1, exp: rcp26, institute: MIROC} + - &id489 {dataset: CSIRO-Mk3-6-0, ensemble: r9i1p1, exp: rcp26, institute: CSIRO-QCCCE} + - &id490 {dataset: MPI-ESM-LR, ensemble: r1i1p1, exp: rcp26, institute: MPI-M} + - &id491 {dataset: GISS-E2-R-CC, ensemble: r1i1p1, exp: rcp45, institute: NASA-GISS} + - &id492 {dataset: NorESM1-ME, ensemble: r1i1p1, exp: rcp45, institute: NCC} + - &id493 {dataset: GISS-E2-R, ensemble: r1i1p3, exp: rcp45, institute: NASA-GISS} + - &id494 {dataset: GISS-E2-H, ensemble: r1i1p3, exp: rcp45, institute: NASA-GISS} + - &id495 {dataset: MRI-CGCM3, ensemble: r1i1p1, exp: rcp45, institute: MRI} + - &id496 {dataset: HadGEM2-ES, ensemble: r4i1p1, exp: rcp45, institute: MOHC} + - &id497 {dataset: HadGEM2-CC, ensemble: r1i1p1, exp: rcp45, institute: MOHC} + - &id498 {dataset: GISS-E2-H-CC, ensemble: r1i1p1, exp: rcp45, institute: NASA-GISS} + - &id499 {dataset: CESM1-CAM5, ensemble: r3i1p1, exp: rcp45, institute: NSF-DOE-NCAR} + - &id500 {dataset: CESM1-BGC, ensemble: r1i1p1, exp: rcp45, institute: NSF-DOE-NCAR} + - &id501 {dataset: GFDL-ESM2M, ensemble: r1i1p1, exp: rcp45, institute: NOAA-GFDL} + - &id502 {dataset: CCSM4, ensemble: r6i1p1, exp: rcp45, institute: NCAR} + - &id503 {dataset: NorESM1-M, ensemble: r1i1p1, exp: rcp45, institute: NCC} + - &id504 {dataset: MPI-ESM-MR, ensemble: r1i1p1, exp: rcp45, institute: MPI-M} + - &id505 {dataset: GFDL-ESM2G, ensemble: r1i1p1, exp: rcp45, institute: NOAA-GFDL} + - &id506 {dataset: GFDL-CM3, ensemble: r1i1p1, exp: rcp45, institute: NOAA-GFDL} + - &id507 {dataset: IPSL-CM5B-LR, ensemble: r1i1p1, exp: rcp45, institute: IPSL} + - &id508 {dataset: IPSL-CM5A-MR, ensemble: r1i1p1, exp: rcp45, institute: IPSL} + - &id509 {dataset: BNU-ESM, ensemble: r1i1p1, exp: rcp45, institute: BNU} + - &id510 {dataset: bcc-csm1-1, ensemble: r1i1p1, exp: rcp45, institute: BCC} + - &id511 {dataset: bcc-csm1-1-m, ensemble: r1i1p1, exp: rcp45, institute: BCC} + - &id512 {dataset: CanESM2, ensemble: r5i1p1, exp: rcp45, institute: CCCma} + - &id513 {dataset: CMCC-CMS, ensemble: r1i1p1, exp: rcp45, institute: CMCC} + - &id514 {dataset: CMCC-CM, ensemble: r1i1p1, exp: rcp45, institute: CMCC} + - &id515 {dataset: MIROC-ESM, ensemble: r1i1p1, exp: rcp45, institute: MIROC} + - &id516 {dataset: MIROC5, ensemble: r3i1p1, exp: rcp45, institute: MIROC} + - &id517 {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1, exp: rcp45, institute: MIROC} + - &id518 {dataset: FGOALS-g2, ensemble: r1i1p1, exp: rcp45, institute: LASG-CESS} + - &id519 {dataset: FIO-ESM, ensemble: r3i1p1, exp: rcp45, institute: FIO} + - &id520 {dataset: inmcm4, ensemble: r1i1p1, exp: rcp45, institute: INM} + - &id521 {dataset: EC-EARTH, ensemble: r8i1p1, exp: rcp45, institute: ICHEC} + - &id522 {dataset: CNRM-CM5, ensemble: r1i1p1, exp: rcp45, institute: CNRM-CERFACS} + - &id523 {dataset: ACCESS1-0, ensemble: r1i1p1, exp: rcp45, institute: CSIRO-BOM} + - &id524 {dataset: ACCESS1-3, ensemble: r1i1p1, exp: rcp45, institute: CSIRO-BOM} + - &id525 {dataset: CSIRO-Mk3-6-0, ensemble: r9i1p1, exp: rcp45, institute: CSIRO-QCCCE} + - &id526 {dataset: MPI-ESM-LR, ensemble: r1i1p1, exp: rcp45, institute: MPI-M} + - &id527 {dataset: NorESM1-ME, ensemble: r1i1p1, exp: rcp85, institute: NCC} + - &id528 {dataset: GISS-E2-R, ensemble: r1i1p3, exp: rcp85, institute: NASA-GISS} + - &id529 {dataset: GISS-E2-H, ensemble: r1i1p3, exp: rcp85, institute: NASA-GISS} + - &id530 {dataset: MRI-CGCM3, ensemble: r1i1p1, exp: rcp85, institute: MRI} + - &id531 {dataset: HadGEM2-CC, ensemble: r1i1p1, exp: rcp85, institute: MOHC} + - &id532 {dataset: CESM1-CAM5, ensemble: r3i1p1, exp: rcp85, institute: NSF-DOE-NCAR} + - &id533 {dataset: CESM1-BGC, ensemble: r1i1p1, exp: rcp85, institute: NSF-DOE-NCAR} + - &id534 {dataset: GFDL-ESM2M, ensemble: r1i1p1, exp: rcp85, institute: NOAA-GFDL} + - &id535 {dataset: CCSM4, ensemble: r6i1p1, exp: rcp85, institute: NCAR} + - &id536 {dataset: NorESM1-M, ensemble: r1i1p1, exp: rcp85, institute: NCC} + - &id537 {dataset: MPI-ESM-LR, ensemble: r1i1p1, exp: rcp85, institute: MPI-M} + - &id538 {dataset: MPI-ESM-MR, ensemble: r1i1p1, exp: rcp85, institute: MPI-M} + - &id539 {dataset: HadGEM2-ES, ensemble: r4i1p1, exp: rcp85, institute: MOHC} + - &id540 {dataset: GFDL-ESM2G, ensemble: r1i1p1, exp: rcp85, institute: NOAA-GFDL} + - &id541 {dataset: GFDL-CM3, ensemble: r1i1p1, exp: rcp85, institute: NOAA-GFDL} + - &id542 {dataset: IPSL-CM5A-MR, ensemble: r1i1p1, exp: rcp85, institute: IPSL} + - &id543 {dataset: inmcm4, ensemble: r1i1p1, exp: rcp85, institute: INM} + - &id544 {dataset: BNU-ESM, ensemble: r1i1p1, exp: rcp85, institute: BNU} + - &id545 {dataset: bcc-csm1-1-m, ensemble: r1i1p1, exp: rcp85, institute: BCC} + - &id546 {dataset: bcc-csm1-1, ensemble: r1i1p1, exp: rcp85, institute: BCC} + - &id547 {dataset: IPSL-CM5B-LR, ensemble: r1i1p1, exp: rcp85, institute: IPSL} + - &id548 {dataset: CanESM2, ensemble: r5i1p1, exp: rcp85, institute: CCCma} + - &id549 {dataset: CMCC-CMS, ensemble: r1i1p1, exp: rcp85, institute: CMCC} + - &id550 {dataset: CMCC-CM, ensemble: r1i1p1, exp: rcp85, institute: CMCC} + - &id551 {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1, exp: rcp85, institute: MIROC} + - &id552 {dataset: MIROC-ESM, ensemble: r1i1p1, exp: rcp85, institute: MIROC} + - &id553 {dataset: MIROC5, ensemble: r3i1p1, exp: rcp85, institute: MIROC} + - &id554 {dataset: FGOALS-g2, ensemble: r1i1p1, exp: rcp85, institute: LASG-CESS} + - &id555 {dataset: FIO-ESM, ensemble: r3i1p1, exp: rcp85, institute: FIO} + - &id556 {dataset: EC-EARTH, ensemble: r8i1p1, exp: rcp85, institute: ICHEC} + - &id557 {dataset: CNRM-CM5, ensemble: r1i1p1, exp: rcp85, institute: CNRM-CERFACS} + - &id558 {dataset: ACCESS1-0, ensemble: r1i1p1, exp: rcp85, institute: CSIRO-BOM} + - &id559 {dataset: ACCESS1-3, ensemble: r1i1p1, exp: rcp85, institute: CSIRO-BOM} + - &id560 {dataset: CSIRO-Mk3-6-0, ensemble: r9i1p1, exp: rcp85, institute: CSIRO-QCCCE} + - &id561 {dataset: GFDL-CM2p1, end_year: 2005, ensemble: r4i1p1, exp: historical, + institute: NOAA-GFDL, start_year: 1986} + - &id562 {dataset: MRI-ESM1, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MRI, start_year: 1986} + - &id563 {dataset: HadCM3, end_year: 2005, ensemble: r9i1p1, exp: historical, + institute: MOHC, start_year: 1986} + - &id564 {dataset: CESM1-WACCM, end_year: 2005, ensemble: r2i1p1, exp: historical, + institute: NSF-DOE-NCAR, start_year: 1986} + - &id565 {dataset: IPSL-CM5A-LR, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: IPSL, start_year: 1986} + - &id566 {dataset: CanCM4, end_year: 2005, ensemble: r9i1p1, exp: historical, + institute: CCCma, start_year: 1986} + - &id567 {dataset: MIROC4h, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: MIROC, start_year: 1986} + - &id568 {dataset: HadGEM2-AO, end_year: 2099, ensemble: r1i1p1, exp: rcp26, institute: NIMR-KMA, + start_year: 2081} + - &id569 {dataset: CESM1-WACCM, end_year: 2099, ensemble: r2i1p1, exp: rcp26, + institute: NSF-DOE-NCAR, start_year: 2081} + - &id570 {dataset: IPSL-CM5A-LR, ensemble: r1i1p1, exp: rcp26, institute: IPSL} + - &id571 {dataset: HadGEM2-AO, end_year: 2099, ensemble: r1i1p1, exp: rcp45, institute: NIMR-KMA, + start_year: 2081} + - &id572 {dataset: CESM1-WACCM, end_year: 2099, ensemble: r2i1p1, exp: rcp45, + institute: NSF-DOE-NCAR, start_year: 2081} + - &id573 {dataset: IPSL-CM5A-LR, ensemble: r1i1p1, exp: rcp45, institute: IPSL} + - &id574 {dataset: HadGEM2-AO, end_year: 2099, ensemble: r1i1p1, exp: rcp60, institute: NIMR-KMA, + start_year: 2081} + - &id575 {dataset: NorESM1-ME, ensemble: r1i1p1, exp: rcp60, institute: NCC} + - &id576 {dataset: GISS-E2-R, ensemble: r1i1p3, exp: rcp60, institute: NASA-GISS} + - &id577 {dataset: GISS-E2-H, ensemble: r1i1p3, exp: rcp60, institute: NASA-GISS} + - &id578 {dataset: MRI-CGCM3, ensemble: r1i1p1, exp: rcp60, institute: MRI} + - &id579 {dataset: CESM1-CAM5, ensemble: r3i1p1, exp: rcp60, institute: NSF-DOE-NCAR} + - &id580 {dataset: GFDL-ESM2M, ensemble: r1i1p1, exp: rcp60, institute: NOAA-GFDL} + - &id581 {dataset: CCSM4, ensemble: r6i1p1, exp: rcp60, institute: NCAR} + - &id582 {dataset: NorESM1-M, ensemble: r1i1p1, exp: rcp60, institute: NCC} + - &id583 {dataset: HadGEM2-ES, ensemble: r4i1p1, exp: rcp60, institute: MOHC} + - &id584 {dataset: GFDL-ESM2G, ensemble: r1i1p1, exp: rcp60, institute: NOAA-GFDL} + - &id585 {dataset: IPSL-CM5A-MR, ensemble: r1i1p1, exp: rcp60, institute: IPSL} + - &id586 {dataset: IPSL-CM5A-LR, ensemble: r1i1p1, exp: rcp60, institute: IPSL} + - &id587 {dataset: bcc-csm1-1, ensemble: r1i1p1, exp: rcp60, institute: BCC} + - &id588 {dataset: bcc-csm1-1-m, ensemble: r1i1p1, exp: rcp60, institute: BCC} + - &id589 {dataset: MIROC-ESM, ensemble: r1i1p1, exp: rcp60, institute: MIROC} + - &id590 {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1, exp: rcp60, institute: MIROC} + - &id591 {dataset: FIO-ESM, ensemble: r3i1p1, exp: rcp60, institute: FIO} + - &id592 {dataset: MIROC5, ensemble: r3i1p1, exp: rcp60, institute: MIROC} + - &id593 {dataset: CSIRO-Mk3-6-0, ensemble: r9i1p1, exp: rcp60, institute: CSIRO-QCCCE} + - &id594 {dataset: HadGEM2-AO, end_year: 2099, ensemble: r1i1p1, exp: rcp85, institute: NIMR-KMA, + start_year: 2081} + - &id595 {dataset: CESM1-WACCM, end_year: 2099, ensemble: r2i1p1, exp: rcp85, + institute: NSF-DOE-NCAR, start_year: 2081} + - &id596 {dataset: IPSL-CM5A-LR, ensemble: r1i1p1, exp: rcp85, institute: IPSL} + + fig3a: + description: Fig3a, Normalized patterns of TAS computed from CMIP5 models + and scenarios + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP5, + short_name: tas, start_year: 2081} + scripts: + calc_map_diff_scaleT_tas: + label: &id597 [2081-2100 relative to 1986-2005] + periods: &id598 [2081] + scenarios: &id599 [rcp26, rcp45, rcp60, rcp85] + script: tebaldi21esd/calc_pattern_diff_scaleT.ncl + time_avg: annualclim + plot_map_diff_scaleT_tas: + ancestors: [tas, calc_map_diff_scaleT_tas] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_div_18.rgb + diagscript: calc_map_diff_scaleT_mmm_stipp + diff_levs: [0, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2] + label: *id597 + periods: *id598 + projection: Robinson + scenarios: *id599 + script: tebaldi21esd/plot_pattern.ncl + sig: false + span: true + time_avg: annualclim + title: Temperature change scaled by global T, CMIP5 + additional_datasets: *fig3_row1_datasets + + fig3d: + description: Fig3d, Map of difference between CMIP6 and CMIP5 multi-model + mean for PR scaled by global T change. + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2081} + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: pr, + start_year: 2081} + scripts: + plot_map_diff_scaleT_pr: + ancestors: [pr, pr_cmip6_cmip5_normalized_pattern_diff] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_div_14.rgb + diagscript: calc_map_diff_scaleT_mmm_stipp + diff_levs: [-12, -10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10, 12] + label: &id600 [2081-2100 relative to 1986-2005] + max_hori: 2 + max_vert: 2 + percent: 1 + periods: &id601 [2081] + projection: Robinson + scenarios: &id602 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Difference between CMIP6 and CMIP5 patterns ~C~ of precipitation + change scaled by global T + pr_cmip6_cmip5_normalized_pattern_diff: + label: *id600 + percent: 1 + periods: *id601 + scenarios_cmip5: [rcp26, rcp45, rcp60, rcp85] + scenarios_cmip6: *id602 + script: tebaldi21esd/calc_cmip6_and_cmip5_pattern_diff_scaleT.ncl + time_avg: annualclim + additional_datasets: &fig3_row2_datasets + - &id603 {dataset: GISS-E2-1-G, end_year: 2005, ensemble: r1i1p1f2, exp: historical, + grid: gn, institute: NASA-GISS, start_year: 1986} + - &id604 {dataset: FGOALS-g3, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CAS, start_year: 1986} + - &id605 {dataset: GFDL-ESM4, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gr1, institute: NOAA-GFDL, start_year: 1986} + - &id606 {dataset: CAMS-CSM1-0, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CAMS, start_year: 1986} + - &id607 {dataset: CanESM5, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1986} + - &id608 {dataset: UKESM1-0-LL, end_year: 2005, ensemble: r1i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1986} + - &id609 {dataset: EC-Earth3, end_year: 2005, ensemble: r4i1p1f1, exp: historical, + grid: gr, institute: EC-Earth-Consortium, start_year: 1986} + - &id610 {dataset: EC-Earth3-Veg, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gr, institute: EC-Earth-Consortium, start_year: 1986} + - &id611 {dataset: MIROC6, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: MIROC, start_year: 1986} + - &id612 {dataset: MIROC-ES2L, end_year: 2005, ensemble: r1i1p1f2, exp: historical, + grid: gn, institute: MIROC, start_year: 1986} + - &id613 {dataset: IPSL-CM6A-LR, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gr, institute: IPSL, start_year: 1986} + - &id614 {dataset: MRI-ESM2-0, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: MRI, start_year: 1986} + - &id615 {dataset: CNRM-ESM2-1, end_year: 2005, ensemble: r1i1p1f2, exp: historical, + grid: gr, institute: CNRM-CERFACS, start_year: 1986} + - &id616 {dataset: AWI-CM-1-1-MR, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: AWI, start_year: 1986} + - &id617 {dataset: FGOALS-f3-L, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gr, institute: CAS, start_year: 1986} + - &id618 {dataset: CMCC-CM2-SR5, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CMCC, start_year: 1986} + - &id619 {dataset: ACCESS-CM2, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CSIRO-ARCCSS, start_year: 1986} + - &id620 {dataset: CanESM5-CanOE, end_year: 2005, ensemble: r1i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1986} + - &id621 {dataset: HadGEM3-GC31-LL, end_year: 2005, ensemble: r1i1p1f3, exp: historical, + grid: gn, institute: MOHC, start_year: 1986} + - &id622 {dataset: HadGEM3-GC31-MM, end_year: 2005, ensemble: r1i1p1f3, exp: historical, + grid: gn, institute: MOHC, start_year: 1986} + - &id623 {dataset: FIO-ESM-2-0, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: FIO-QLNM, start_year: 1986} + - &id624 {dataset: CIESM, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gr, institute: THU, start_year: 1986} + - &id625 {dataset: MCM-UA-1-0, end_year: 2005, ensemble: r1i1p1f2, exp: historical, + grid: gn, institute: UA, start_year: 1986} + - &id626 {dataset: BCC-CSM2-MR, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: BCC, start_year: 1986} + - &id627 {dataset: INM-CM5-0, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gr1, institute: INM, start_year: 1986} + - &id628 {dataset: INM-CM4-8, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gr1, institute: INM, start_year: 1986} + - &id629 {dataset: KACE-1-0-G, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gr, institute: NIMS-KMA, start_year: 1986} + - &id630 {dataset: CESM2-WACCM, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: NCAR, start_year: 1986} +# - &id631 {dataset: CESM2, end_year: 2005, ensemble: r1i1p1f1, exp: historical, +# grid: gn, institute: NCAR, start_year: 1986} + - &id632 {dataset: MPI-ESM1-2-LR, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1986} + - &id633 {dataset: NESM3, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: NUIST, start_year: 1986} + - &id634 {dataset: ACCESS-ESM1-5, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CSIRO, start_year: 1986} + - &id635 {dataset: NorESM2-LM, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: NCC, start_year: 1986} + - &id636 {dataset: NorESM2-MM, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: NCC, start_year: 1986} + - &id637 {dataset: CNRM-CM6-1-HR, end_year: 2005, ensemble: r1i1p1f2, exp: historical, + grid: gr, institute: CNRM-CERFACS, start_year: 1986} + - &id638 {dataset: CNRM-CM6-1, end_year: 2005, ensemble: r1i1p1f2, exp: historical, + grid: gr, institute: CNRM-CERFACS, start_year: 1986} + - &id639 {dataset: IITM-ESM, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CCCR-IITM, start_year: 1986} + - &id640 {dataset: GFDL-CM4, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gr1, institute: NOAA-GFDL, start_year: 1986} + - &id641 {dataset: TaiESM1, end_year: 2005, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: AS-RCEC, start_year: 1986} + - *id241 + - *id242 + - *id243 + - *id244 + - *id245 + - *id246 + - *id247 + - *id248 + - *id249 + - *id250 + - *id251 + - *id252 + - *id253 + - *id254 + - *id255 + - *id256 + - *id257 + - *id258 + - *id259 + - *id260 + - *id261 + - *id262 + - *id263 + - *id264 + - *id265 + - *id266 + - *id267 + - *id268 + - *id269 + - *id270 + - *id271 + - *id272 + - *id273 + - *id274 + - *id275 + - *id276 + - *id277 + - *id278 + - *id279 +# - *id280 + - *id281 + - *id282 + - *id283 + - *id284 + - *id285 + - *id286 + - *id287 + - *id288 + - *id289 + - *id290 + - *id291 + - *id292 + - *id293 + - *id294 + - *id295 + - *id296 + - *id297 + - *id298 + - *id299 + - *id300 + - *id301 + - *id302 + - *id303 + - *id304 + - *id305 + - *id306 + - *id307 + - *id308 + - *id309 + - *id310 + - *id311 + - *id312 + - *id313 + - *id314 + - *id315 + - *id316 +# - *id317 + - *id318 + - *id319 + - *id320 + - *id321 + - *id322 + - *id323 + - *id324 + - *id325 + - *id326 + - *id327 + - *id328 + - *id329 + - *id330 + - *id331 + - *id332 + - *id333 + - *id334 + - *id335 + - *id336 + - *id337 + - *id338 + - *id339 + - *id340 + - *id341 + - *id342 + - *id343 + - *id344 + - *id345 + - *id346 + - *id347 + - *id348 + - *id349 +# - *id350 + - *id351 + - *id352 + - *id353 + - *id354 + - *id355 + - *id356 + - *id357 + - *id358 + - *id359 + - *id360 + - *id361 + - *id362 + - *id363 + - *id364 + - *id365 + - *id366 + - *id367 + - *id368 + - *id369 + - *id370 + - *id371 + - *id372 + - *id373 + - *id374 + - *id375 + - *id376 + - *id377 + - *id378 + - *id379 + - *id380 + - *id381 + - *id382 + - *id383 + - *id384 + - *id385 +# - *id386 + - *id387 + - *id388 + - *id389 + - *id390 + - *id391 + - *id392 + - *id393 + - *id394 + - *id395 + - *id396 + - &id642 {dataset: GISS-E2-R-CC, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NASA-GISS, project: CMIP5, start_year: 1986} + - &id643 {dataset: GFDL-CM2p1, end_year: 2005, ensemble: r4i1p1, exp: historical, + institute: NOAA-GFDL, project: CMIP5, start_year: 1986} + - &id644 {dataset: CCSM4, end_year: 2005, ensemble: r6i1p1, exp: historical, institute: NCAR, + project: CMIP5, start_year: 1986} + - &id645 {dataset: HadGEM2-AO, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NIMR-KMA, project: CMIP5, start_year: 1986} + - &id646 {dataset: NorESM1-ME, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NCC, project: CMIP5, start_year: 1986} + - &id647 {dataset: GISS-E2-H, end_year: 2005, ensemble: r1i1p3, exp: historical, + institute: NASA-GISS, project: CMIP5, start_year: 1986} + - &id648 {dataset: MRI-ESM1, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MRI, project: CMIP5, start_year: 1986} + - &id649 {dataset: MRI-CGCM3, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MRI, project: CMIP5, start_year: 1986} + - &id650 {dataset: HadGEM2-ES, end_year: 2005, ensemble: r4i1p1, exp: historical, + institute: MOHC, project: CMIP5, start_year: 1986} + - &id651 {dataset: HadCM3, end_year: 2005, ensemble: r9i1p1, exp: historical, + institute: MOHC, project: CMIP5, start_year: 1986} + - &id652 {dataset: HadGEM2-CC, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MOHC, project: CMIP5, start_year: 1986} + - &id653 {dataset: GISS-E2-H-CC, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NASA-GISS, project: CMIP5, start_year: 1986} + - &id654 {dataset: GISS-E2-R, end_year: 2005, ensemble: r1i1p3, exp: historical, + institute: NASA-GISS, project: CMIP5, start_year: 1986} + - &id655 {dataset: CESM1-WACCM, end_year: 2005, ensemble: r2i1p1, exp: historical, + institute: NSF-DOE-NCAR, project: CMIP5, start_year: 1986} + - &id656 {dataset: CESM1-FASTCHEM, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: NSF-DOE-NCAR, project: CMIP5, start_year: 1986} + - &id657 {dataset: CESM1-CAM5, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: NSF-DOE-NCAR, project: CMIP5, start_year: 1986} + - &id658 {dataset: CESM1-BGC, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NSF-DOE-NCAR, project: CMIP5, start_year: 1986} + - &id659 {dataset: NorESM1-M, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NCC, project: CMIP5, start_year: 1986} + - &id660 {dataset: MPI-ESM-MR, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MPI-M, project: CMIP5, start_year: 1986} + - &id661 {dataset: MPI-ESM-P, end_year: 2005, ensemble: r2i1p1, exp: historical, + institute: MPI-M, project: CMIP5, start_year: 1986} + - &id662 {dataset: GFDL-CM3, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NOAA-GFDL, project: CMIP5, start_year: 1986} + - &id663 {dataset: GFDL-ESM2M, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NOAA-GFDL, project: CMIP5, start_year: 1986} + - &id664 {dataset: GFDL-ESM2G, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NOAA-GFDL, project: CMIP5, start_year: 1986} + - &id665 {dataset: IPSL-CM5A-MR, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: IPSL, project: CMIP5, start_year: 1986} + - &id666 {dataset: IPSL-CM5B-LR, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: IPSL, project: CMIP5, start_year: 1986} + - &id667 {dataset: IPSL-CM5A-LR, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: IPSL, project: CMIP5, start_year: 1986} + - &id668 {dataset: CanCM4, end_year: 2005, ensemble: r9i1p1, exp: historical, + institute: CCCma, project: CMIP5, start_year: 1986} + - &id669 {dataset: BNU-ESM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: BNU, project: CMIP5, start_year: 1986} + - &id670 {dataset: bcc-csm1-1, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: BCC, project: CMIP5, start_year: 1986} + - &id671 {dataset: CanESM2, end_year: 2005, ensemble: r5i1p1, exp: historical, + institute: CCCma, project: CMIP5, start_year: 1986} + - &id672 {dataset: bcc-csm1-1-m, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: BCC, project: CMIP5, start_year: 1986} + - &id673 {dataset: MPI-ESM-LR, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MPI-M, project: CMIP5, start_year: 1986} + - &id674 {dataset: CNRM-CM5-2, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CNRM-CERFACS, project: CMIP5, start_year: 1986} + - &id675 {dataset: CNRM-CM5, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CNRM-CERFACS, project: CMIP5, start_year: 1986} + - &id676 {dataset: MIROC-ESM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MIROC, project: CMIP5, start_year: 1986} + - &id677 {dataset: CMCC-CESM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CMCC, project: CMIP5, start_year: 1986} + - &id678 {dataset: CMCC-CMS, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CMCC, project: CMIP5, start_year: 1986} + - &id679 {dataset: CMCC-CM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CMCC, project: CMIP5, start_year: 1986} + - &id680 {dataset: MIROC4h, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: MIROC, project: CMIP5, start_year: 1986} + - &id681 {dataset: MIROC-ESM-CHEM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MIROC, project: CMIP5, start_year: 1986} + - &id682 {dataset: FGOALS-s2, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: LASG-IAP, project: CMIP5, start_year: 1986} + - &id683 {dataset: FGOALS-g2, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: LASG-CESS, project: CMIP5, start_year: 1986} + - &id684 {dataset: FIO-ESM, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: FIO, project: CMIP5, start_year: 1986} + - &id685 {dataset: inmcm4, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: INM, project: CMIP5, start_year: 1986} + - &id686 {dataset: EC-EARTH, end_year: 2005, ensemble: r8i1p1, exp: historical, + institute: ICHEC, project: CMIP5, start_year: 1986} + - &id687 {dataset: ACCESS1-3, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CSIRO-BOM, project: CMIP5, start_year: 1986} + - &id688 {dataset: ACCESS1-0, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CSIRO-BOM, project: CMIP5, start_year: 1986} + - &id689 {dataset: MIROC5, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: MIROC, project: CMIP5, start_year: 1986} + - &id690 {dataset: CSIRO-Mk3-6-0, end_year: 2005, ensemble: r9i1p1, exp: historical, + institute: CSIRO-QCCCE, project: CMIP5, start_year: 1986} + - &id691 {dataset: HadGEM2-AO, end_year: 2099, ensemble: r1i1p1, exp: rcp26, institute: NIMR-KMA, + project: CMIP5, start_year: 2081} + - &id692 {dataset: NorESM1-ME, ensemble: r1i1p1, exp: rcp26, institute: NCC, project: CMIP5} + - &id693 {dataset: GISS-E2-H, ensemble: r1i1p3, exp: rcp26, institute: NASA-GISS, + project: CMIP5} + - &id694 {dataset: MRI-CGCM3, ensemble: r1i1p1, exp: rcp26, institute: MRI, project: CMIP5} + - &id695 {dataset: HadGEM2-ES, ensemble: r4i1p1, exp: rcp26, institute: MOHC, + project: CMIP5} + - &id696 {dataset: GISS-E2-R, ensemble: r1i1p3, exp: rcp26, institute: NASA-GISS, + project: CMIP5} + - &id697 {dataset: CESM1-WACCM, end_year: 2099, ensemble: r2i1p1, exp: rcp26, + institute: NSF-DOE-NCAR, project: CMIP5, start_year: 2081} + - &id698 {dataset: CESM1-CAM5, ensemble: r3i1p1, exp: rcp26, institute: NSF-DOE-NCAR, + project: CMIP5} + - &id699 {dataset: GFDL-ESM2M, ensemble: r1i1p1, exp: rcp26, institute: NOAA-GFDL, + project: CMIP5} + - &id700 {dataset: CCSM4, ensemble: r6i1p1, exp: rcp26, institute: NCAR, project: CMIP5} + - &id701 {dataset: NorESM1-M, ensemble: r1i1p1, exp: rcp26, institute: NCC, project: CMIP5} + - &id702 {dataset: MPI-ESM-MR, ensemble: r1i1p1, exp: rcp26, institute: MPI-M, + project: CMIP5} + - &id703 {dataset: GFDL-ESM2G, ensemble: r1i1p1, exp: rcp26, institute: NOAA-GFDL, + project: CMIP5} + - &id704 {dataset: GFDL-CM3, ensemble: r1i1p1, exp: rcp26, institute: NOAA-GFDL, + project: CMIP5} + - &id705 {dataset: IPSL-CM5A-MR, ensemble: r1i1p1, exp: rcp26, institute: IPSL, + project: CMIP5} + - &id706 {dataset: IPSL-CM5A-LR, ensemble: r1i1p1, exp: rcp26, institute: IPSL, + project: CMIP5} + - &id707 {dataset: BNU-ESM, ensemble: r1i1p1, exp: rcp26, institute: BNU, project: CMIP5} + - &id708 {dataset: CanESM2, ensemble: r5i1p1, exp: rcp26, institute: CCCma, project: CMIP5} + - &id709 {dataset: bcc-csm1-1, ensemble: r1i1p1, exp: rcp26, institute: BCC, project: CMIP5} + - &id710 {dataset: bcc-csm1-1-m, ensemble: r1i1p1, exp: rcp26, institute: BCC, + project: CMIP5} + - &id711 {dataset: MIROC-ESM, ensemble: r1i1p1, exp: rcp26, institute: MIROC, + project: CMIP5} + - &id712 {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1, exp: rcp26, institute: MIROC, + project: CMIP5} + - &id713 {dataset: EC-EARTH, ensemble: r8i1p1, exp: rcp26, institute: ICHEC, project: CMIP5} + - &id714 {dataset: CNRM-CM5, ensemble: r1i1p1, exp: rcp26, institute: CNRM-CERFACS, + project: CMIP5} + - &id715 {dataset: MIROC5, ensemble: r3i1p1, exp: rcp26, institute: MIROC, project: CMIP5} + - &id716 {dataset: CSIRO-Mk3-6-0, ensemble: r9i1p1, exp: rcp26, institute: CSIRO-QCCCE, + project: CMIP5} + - &id717 {dataset: MPI-ESM-LR, ensemble: r1i1p1, exp: rcp26, institute: MPI-M, + project: CMIP5} + - &id718 {dataset: GISS-E2-R-CC, ensemble: r1i1p1, exp: rcp45, institute: NASA-GISS, + project: CMIP5} + - &id719 {dataset: HadGEM2-AO, end_year: 2099, ensemble: r1i1p1, exp: rcp45, institute: NIMR-KMA, + project: CMIP5, start_year: 2081} + - &id720 {dataset: NorESM1-ME, ensemble: r1i1p1, exp: rcp45, institute: NCC, project: CMIP5} + - &id721 {dataset: GISS-E2-R, ensemble: r1i1p3, exp: rcp45, institute: NASA-GISS, + project: CMIP5} + - &id722 {dataset: GISS-E2-H, ensemble: r1i1p3, exp: rcp45, institute: NASA-GISS, + project: CMIP5} + - &id723 {dataset: MRI-CGCM3, ensemble: r1i1p1, exp: rcp45, institute: MRI, project: CMIP5} + - &id724 {dataset: HadGEM2-ES, ensemble: r4i1p1, exp: rcp45, institute: MOHC, + project: CMIP5} + - &id725 {dataset: HadGEM2-CC, ensemble: r1i1p1, exp: rcp45, institute: MOHC, + project: CMIP5} + - &id726 {dataset: GISS-E2-H-CC, ensemble: r1i1p1, exp: rcp45, institute: NASA-GISS, + project: CMIP5} + - &id727 {dataset: CESM1-WACCM, end_year: 2099, ensemble: r2i1p1, exp: rcp45, + institute: NSF-DOE-NCAR, project: CMIP5, start_year: 2081} + - &id728 {dataset: CESM1-CAM5, ensemble: r3i1p1, exp: rcp45, institute: NSF-DOE-NCAR, + project: CMIP5} + - &id729 {dataset: CESM1-BGC, ensemble: r1i1p1, exp: rcp45, institute: NSF-DOE-NCAR, + project: CMIP5} + - &id730 {dataset: GFDL-ESM2M, ensemble: r1i1p1, exp: rcp45, institute: NOAA-GFDL, + project: CMIP5} + - &id731 {dataset: CCSM4, ensemble: r6i1p1, exp: rcp45, institute: NCAR, project: CMIP5} + - &id732 {dataset: NorESM1-M, ensemble: r1i1p1, exp: rcp45, institute: NCC, project: CMIP5} + - &id733 {dataset: MPI-ESM-MR, ensemble: r1i1p1, exp: rcp45, institute: MPI-M, + project: CMIP5} + - &id734 {dataset: GFDL-ESM2G, ensemble: r1i1p1, exp: rcp45, institute: NOAA-GFDL, + project: CMIP5} + - &id735 {dataset: GFDL-CM3, ensemble: r1i1p1, exp: rcp45, institute: NOAA-GFDL, + project: CMIP5} + - &id736 {dataset: IPSL-CM5B-LR, ensemble: r1i1p1, exp: rcp45, institute: IPSL, + project: CMIP5} + - &id737 {dataset: IPSL-CM5A-MR, ensemble: r1i1p1, exp: rcp45, institute: IPSL, + project: CMIP5} + - &id738 {dataset: IPSL-CM5A-LR, ensemble: r1i1p1, exp: rcp45, institute: IPSL, + project: CMIP5} + - &id739 {dataset: BNU-ESM, ensemble: r1i1p1, exp: rcp45, institute: BNU, project: CMIP5} + - &id740 {dataset: bcc-csm1-1, ensemble: r1i1p1, exp: rcp45, institute: BCC, project: CMIP5} + - &id741 {dataset: bcc-csm1-1-m, ensemble: r1i1p1, exp: rcp45, institute: BCC, + project: CMIP5} + - &id742 {dataset: CanESM2, ensemble: r5i1p1, exp: rcp45, institute: CCCma, project: CMIP5} + - &id743 {dataset: CMCC-CMS, ensemble: r1i1p1, exp: rcp45, institute: CMCC, project: CMIP5} + - &id744 {dataset: CMCC-CM, ensemble: r1i1p1, exp: rcp45, institute: CMCC, project: CMIP5} + - &id745 {dataset: MIROC-ESM, ensemble: r1i1p1, exp: rcp45, institute: MIROC, + project: CMIP5} + - &id746 {dataset: MIROC5, ensemble: r3i1p1, exp: rcp45, institute: MIROC, project: CMIP5} + - &id747 {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1, exp: rcp45, institute: MIROC, + project: CMIP5} + - &id748 {dataset: FGOALS-g2, ensemble: r1i1p1, exp: rcp45, institute: LASG-CESS, + project: CMIP5} + - &id749 {dataset: FIO-ESM, ensemble: r3i1p1, exp: rcp45, institute: FIO, project: CMIP5} + - &id750 {dataset: inmcm4, ensemble: r1i1p1, exp: rcp45, institute: INM, project: CMIP5} + - &id751 {dataset: EC-EARTH, ensemble: r8i1p1, exp: rcp45, institute: ICHEC, project: CMIP5} + - &id752 {dataset: CNRM-CM5, ensemble: r1i1p1, exp: rcp45, institute: CNRM-CERFACS, + project: CMIP5} + - &id753 {dataset: ACCESS1-0, ensemble: r1i1p1, exp: rcp45, institute: CSIRO-BOM, + project: CMIP5} + - &id754 {dataset: ACCESS1-3, ensemble: r1i1p1, exp: rcp45, institute: CSIRO-BOM, + project: CMIP5} + - &id755 {dataset: CSIRO-Mk3-6-0, ensemble: r9i1p1, exp: rcp45, institute: CSIRO-QCCCE, + project: CMIP5} + - &id756 {dataset: MPI-ESM-LR, ensemble: r1i1p1, exp: rcp45, institute: MPI-M, + project: CMIP5} + - &id757 {dataset: HadGEM2-AO, end_year: 2099, ensemble: r1i1p1, exp: rcp60, institute: NIMR-KMA, + project: CMIP5, start_year: 2081} + - &id758 {dataset: NorESM1-ME, ensemble: r1i1p1, exp: rcp60, institute: NCC, project: CMIP5} + - &id759 {dataset: GISS-E2-R, ensemble: r1i1p3, exp: rcp60, institute: NASA-GISS, + project: CMIP5} + - &id760 {dataset: GISS-E2-H, ensemble: r1i1p3, exp: rcp60, institute: NASA-GISS, + project: CMIP5} + - &id761 {dataset: MRI-CGCM3, ensemble: r1i1p1, exp: rcp60, institute: MRI, project: CMIP5} + - &id762 {dataset: CESM1-CAM5, ensemble: r3i1p1, exp: rcp60, institute: NSF-DOE-NCAR, + project: CMIP5} + - &id763 {dataset: GFDL-ESM2M, ensemble: r1i1p1, exp: rcp60, institute: NOAA-GFDL, + project: CMIP5} + - &id764 {dataset: CCSM4, ensemble: r6i1p1, exp: rcp60, institute: NCAR, project: CMIP5} + - &id765 {dataset: NorESM1-M, ensemble: r1i1p1, exp: rcp60, institute: NCC, project: CMIP5} + - &id766 {dataset: HadGEM2-ES, ensemble: r4i1p1, exp: rcp60, institute: MOHC, + project: CMIP5} + - &id767 {dataset: GFDL-ESM2G, ensemble: r1i1p1, exp: rcp60, institute: NOAA-GFDL, + project: CMIP5} + - &id768 {dataset: IPSL-CM5A-MR, ensemble: r1i1p1, exp: rcp60, institute: IPSL, + project: CMIP5} + - &id769 {dataset: IPSL-CM5A-LR, ensemble: r1i1p1, exp: rcp60, institute: IPSL, + project: CMIP5} + - &id770 {dataset: bcc-csm1-1, ensemble: r1i1p1, exp: rcp60, institute: BCC, project: CMIP5} + - &id771 {dataset: bcc-csm1-1-m, ensemble: r1i1p1, exp: rcp60, institute: BCC, + project: CMIP5} + - &id772 {dataset: MIROC-ESM, ensemble: r1i1p1, exp: rcp60, institute: MIROC, + project: CMIP5} + - &id773 {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1, exp: rcp60, institute: MIROC, + project: CMIP5} + - &id774 {dataset: FIO-ESM, ensemble: r3i1p1, exp: rcp60, institute: FIO, project: CMIP5} + - &id775 {dataset: MIROC5, ensemble: r3i1p1, exp: rcp60, institute: MIROC, project: CMIP5} + - &id776 {dataset: CSIRO-Mk3-6-0, ensemble: r9i1p1, exp: rcp60, institute: CSIRO-QCCCE, + project: CMIP5} + - &id777 {dataset: HadGEM2-AO, end_year: 2099, ensemble: r1i1p1, exp: rcp85, institute: NIMR-KMA, + project: CMIP5, start_year: 2081} + - &id778 {dataset: NorESM1-ME, ensemble: r1i1p1, exp: rcp85, institute: NCC, project: CMIP5} + - &id779 {dataset: GISS-E2-R, ensemble: r1i1p3, exp: rcp85, institute: NASA-GISS, + project: CMIP5} + - &id780 {dataset: GISS-E2-H, ensemble: r1i1p3, exp: rcp85, institute: NASA-GISS, + project: CMIP5} + - &id781 {dataset: MRI-CGCM3, ensemble: r1i1p1, exp: rcp85, institute: MRI, project: CMIP5} + - &id782 {dataset: HadGEM2-CC, ensemble: r1i1p1, exp: rcp85, institute: MOHC, + project: CMIP5} + - &id783 {dataset: CESM1-WACCM, end_year: 2099, ensemble: r2i1p1, exp: rcp85, + institute: NSF-DOE-NCAR, project: CMIP5, start_year: 2081} + - &id784 {dataset: CESM1-CAM5, ensemble: r3i1p1, exp: rcp85, institute: NSF-DOE-NCAR, + project: CMIP5} + - &id785 {dataset: CESM1-BGC, ensemble: r1i1p1, exp: rcp85, institute: NSF-DOE-NCAR, + project: CMIP5} + - &id786 {dataset: GFDL-ESM2M, ensemble: r1i1p1, exp: rcp85, institute: NOAA-GFDL, + project: CMIP5} + - &id787 {dataset: CCSM4, ensemble: r6i1p1, exp: rcp85, institute: NCAR, project: CMIP5} + - &id788 {dataset: NorESM1-M, ensemble: r1i1p1, exp: rcp85, institute: NCC, project: CMIP5} + - &id789 {dataset: MPI-ESM-LR, ensemble: r1i1p1, exp: rcp85, institute: MPI-M, + project: CMIP5} + - &id790 {dataset: MPI-ESM-MR, ensemble: r1i1p1, exp: rcp85, institute: MPI-M, + project: CMIP5} + - &id791 {dataset: HadGEM2-ES, ensemble: r4i1p1, exp: rcp85, institute: MOHC, + project: CMIP5} + - &id792 {dataset: GFDL-ESM2G, ensemble: r1i1p1, exp: rcp85, institute: NOAA-GFDL, + project: CMIP5} + - &id793 {dataset: GFDL-CM3, ensemble: r1i1p1, exp: rcp85, institute: NOAA-GFDL, + project: CMIP5} + - &id794 {dataset: IPSL-CM5A-MR, ensemble: r1i1p1, exp: rcp85, institute: IPSL, + project: CMIP5} + - &id795 {dataset: inmcm4, ensemble: r1i1p1, exp: rcp85, institute: INM, project: CMIP5} + - &id796 {dataset: IPSL-CM5A-LR, ensemble: r1i1p1, exp: rcp85, institute: IPSL, + project: CMIP5} + - &id797 {dataset: BNU-ESM, ensemble: r1i1p1, exp: rcp85, institute: BNU, project: CMIP5} + - &id798 {dataset: bcc-csm1-1-m, ensemble: r1i1p1, exp: rcp85, institute: BCC, + project: CMIP5} + - &id799 {dataset: bcc-csm1-1, ensemble: r1i1p1, exp: rcp85, institute: BCC, project: CMIP5} + - &id800 {dataset: IPSL-CM5B-LR, ensemble: r1i1p1, exp: rcp85, institute: IPSL, + project: CMIP5} + - &id801 {dataset: CanESM2, ensemble: r5i1p1, exp: rcp85, institute: CCCma, project: CMIP5} + - &id802 {dataset: CMCC-CMS, ensemble: r1i1p1, exp: rcp85, institute: CMCC, project: CMIP5} + - &id803 {dataset: CMCC-CM, ensemble: r1i1p1, exp: rcp85, institute: CMCC, project: CMIP5} + - &id804 {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1, exp: rcp85, institute: MIROC, + project: CMIP5} + - &id805 {dataset: MIROC-ESM, ensemble: r1i1p1, exp: rcp85, institute: MIROC, + project: CMIP5} + - &id806 {dataset: MIROC5, ensemble: r3i1p1, exp: rcp85, institute: MIROC, project: CMIP5} + - &id807 {dataset: FGOALS-g2, ensemble: r1i1p1, exp: rcp85, institute: LASG-CESS, + project: CMIP5} + - &id808 {dataset: FIO-ESM, ensemble: r3i1p1, exp: rcp85, institute: FIO, project: CMIP5} + - &id809 {dataset: EC-EARTH, ensemble: r8i1p1, exp: rcp85, institute: ICHEC, project: CMIP5} + - &id810 {dataset: CNRM-CM5, ensemble: r1i1p1, exp: rcp85, institute: CNRM-CERFACS, + project: CMIP5} + - &id811 {dataset: ACCESS1-0, ensemble: r1i1p1, exp: rcp85, institute: CSIRO-BOM, + project: CMIP5} + - &id812 {dataset: ACCESS1-3, ensemble: r1i1p1, exp: rcp85, institute: CSIRO-BOM, + project: CMIP5} + - &id813 {dataset: CSIRO-Mk3-6-0, ensemble: r9i1p1, exp: rcp85, institute: CSIRO-QCCCE, + project: CMIP5} + + fig3c: + description: Fig3c, Map of difference between CMIP6 and CMIP5 multi-model + mean for TAS scaled by global T change. + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2081} + scripts: + plot_map_diff_scaleT_tas: + ancestors: [tas, tas_cmip6_cmip5_normalized_pattern_diff] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_div_10.rgb + diagscript: calc_map_diff_scaleT_mmm_stipp + diff_levs: [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] + label: &id814 [2081-2100 relative to 1986-2005] + periods: &id815 [2081] + projection: Robinson + scenarios: &id816 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/plot_pattern.ncl + time_avg: annualclim + sig: false + title: Difference between CMIP6 and CMIP5 patterns ~C~ of temperature + change scaled by global T + tas_cmip6_cmip5_normalized_pattern_diff: + label: *id814 + periods: *id815 + scenarios_cmip5: [rcp26, rcp45, rcp60, rcp85] + scenarios_cmip6: *id816 + script: tebaldi21esd/calc_cmip6_and_cmip5_pattern_diff_scaleT.ncl + time_avg: annualclim + additional_datasets: *fig3_row2_datasets + + fig4d: + description: Fig4d, Time series of RCP2.6, RCP4.5 and RCP8.5 for global average + precipitation change incl. spread + realms: [atmos] + themes: [phys] + variables: + pr: {end_year: 2100, mip: Amon, project: CMIP5, short_name: pr, start_year: 2006} + scripts: + plot_ts_line_mean_spread_pr: + ancestors: [pr, ts_line_mean_spread_pr] + begin_ref_year: 1986 + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb + end_ref_year: 2005 + eyears: &id817 [2005, 2100] + label: &id818 [Historical, RCP2.6, RCP4.5, RCP8.5] + model_nr: true + scenarios: &id819 [rcp26, rcp45, rcp85] + script: tebaldi21esd/plot_timeseries_mean_spread_3scenarios.ncl + spread: 1.64 + styleset: CMIP6 + syears: &id820 [1850, 2006] + title: PR, global, CMIP5. + yaxis: Relative to 1986-2005 (%) + ymax: 12 + ymin: -5 + ts_line_mean_spread_pr: + begin_ref_year: 1986 + end_ref_year: 2005 + eyears: *id817 + label: *id818 + model_nr: true + percent: 1 + scenarios: *id819 + script: tebaldi21esd/calc_timeseries_mean_spread_runave.ncl + spread: 1.64 + styleset: CMIP6 + syears: *id820 + additional_datasets: &fig4_cmip5_datasets + - &id825 {dataset: HadGEM2-AO, end_year: 2099, ensemble: r1i1p1, exp: rcp26, institute: NIMR-KMA, + start_year: 2006} + - *id467 + - *id468 + - *id469 + - *id470 + - *id471 + - *id472 + - *id473 + - *id474 + - *id475 + - *id476 + - *id477 + - *id478 + - *id479 + - &id826 {dataset: IPSL-CM5A-LR, ensemble: r4i1p1, exp: rcp26, institute: IPSL} + - *id480 + - *id481 + - *id482 + - *id483 + - *id484 + - *id485 + - &id827 {dataset: FIO-ESM, ensemble: r3i1p1, exp: rcp26, institute: FIO} + - *id486 + - *id487 + - *id488 + - *id489 + - *id490 + - *id491 + - &id828 {dataset: HadGEM2-AO, end_year: 2099, ensemble: r1i1p1, exp: rcp45, institute: NIMR-KMA, + start_year: 2006} + - *id492 + - *id493 + - *id494 + - *id495 + - *id496 + - *id497 + - *id498 + - *id499 + - *id500 + - *id501 + - *id502 + - *id503 + - *id504 + - *id505 + - *id506 + - *id507 + - *id508 + - &id829 {dataset: IPSL-CM5A-LR, ensemble: r4i1p1, exp: rcp45, institute: IPSL} + - *id509 + - *id510 + - *id511 + - *id512 + - *id513 + - *id514 + - *id515 + - *id516 + - *id517 + - *id518 + - *id519 + - *id520 + - *id521 + - *id522 + - *id523 + - *id524 + - *id525 + - *id526 + - &id830 {dataset: HadGEM2-AO, end_year: 2099, ensemble: r1i1p1, exp: rcp85, institute: NIMR-KMA, + start_year: 2006} + - *id527 + - *id528 + - *id529 + - *id530 + - *id531 + - *id532 + - *id533 + - *id534 + - *id535 + - *id536 + - *id537 + - *id538 + - *id539 + - *id540 + - *id541 + - *id542 + - *id543 + - &id831 {dataset: IPSL-CM5A-LR, ensemble: r4i1p1, exp: rcp85, institute: IPSL} + - *id544 + - *id545 + - *id546 + - *id547 + - *id548 + - *id549 + - *id550 + - *id551 + - *id552 + - *id553 + - *id554 + - *id555 + - *id556 + - *id557 + - *id558 + - *id559 + - *id560 + - &id832 {dataset: GISS-E2-R-CC, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NASA-GISS, start_year: 1850} + - &id833 {dataset: CCSM4, end_year: 2005, ensemble: r6i1p1, exp: historical, institute: NCAR, + start_year: 1850} + - &id834 {dataset: NorESM1-ME, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NCC, start_year: 1850} + - &id835 {dataset: GISS-E2-H, end_year: 2005, ensemble: r1i1p3, exp: historical, + institute: NASA-GISS, start_year: 1850} + - &id836 {dataset: MRI-CGCM3, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MRI, start_year: 1850} + - &id837 {dataset: GISS-E2-H-CC, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NASA-GISS, start_year: 1850} + - &id838 {dataset: GISS-E2-R, end_year: 2005, ensemble: r1i1p3, exp: historical, + institute: NASA-GISS, start_year: 1850} + - &id839 {dataset: CESM1-WACCM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NSF-DOE-NCAR, start_year: 1850} + - &id840 {dataset: CESM1-FASTCHEM, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: NSF-DOE-NCAR, start_year: 1850} + - &id841 {dataset: CESM1-CAM5, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: NSF-DOE-NCAR, start_year: 1850} + - &id842 {dataset: CESM1-BGC, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NSF-DOE-NCAR, start_year: 1850} + - &id843 {dataset: NorESM1-M, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NCC, start_year: 1850} + - &id844 {dataset: MPI-ESM-MR, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MPI-M, start_year: 1850} + - &id845 {dataset: MPI-ESM-P, end_year: 2005, ensemble: r2i1p1, exp: historical, + institute: MPI-M, start_year: 1850} + - &id846 {dataset: IPSL-CM5A-MR, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: IPSL, start_year: 1850} + - &id847 {dataset: IPSL-CM5B-LR, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: IPSL, start_year: 1850} + - &id848 {dataset: IPSL-CM5A-LR, end_year: 2005, ensemble: r4i1p1, exp: historical, + institute: IPSL, start_year: 1850} + - &id849 {dataset: BNU-ESM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: BNU, start_year: 1850} + - &id850 {dataset: bcc-csm1-1, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: BCC, start_year: 1850} + - &id851 {dataset: CanESM2, end_year: 2005, ensemble: r5i1p1, exp: historical, + institute: CCCma, start_year: 1850} + - &id852 {dataset: bcc-csm1-1-m, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: BCC, start_year: 1850} + - &id853 {dataset: MPI-ESM-LR, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MPI-M, start_year: 1850} + - &id854 {dataset: CNRM-CM5-2, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CNRM-CERFACS, start_year: 1850} + - &id855 {dataset: CNRM-CM5, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CNRM-CERFACS, start_year: 1850} + - &id856 {dataset: MIROC-ESM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MIROC, start_year: 1850} + - &id857 {dataset: CMCC-CESM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CMCC, start_year: 1850} + - &id858 {dataset: CMCC-CMS, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CMCC, start_year: 1850} + - &id859 {dataset: CMCC-CM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CMCC, start_year: 1850} + - &id860 {dataset: MIROC-ESM-CHEM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: MIROC, start_year: 1850} + - &id861 {dataset: FGOALS-s2, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: LASG-IAP, start_year: 1850} + - &id862 {dataset: FGOALS-g2, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: LASG-CESS, start_year: 1850} + - &id863 {dataset: FIO-ESM, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: FIO, start_year: 1850} + - &id864 {dataset: inmcm4, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: INM, start_year: 1850} + - &id865 {dataset: EC-EARTH, end_year: 2005, ensemble: r8i1p1, exp: historical, + institute: ICHEC, start_year: 1850} + - &id866 {dataset: ACCESS1-3, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CSIRO-BOM, start_year: 1850} + - &id867 {dataset: ACCESS1-0, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CSIRO-BOM, start_year: 1850} + - &id868 {dataset: MIROC5, end_year: 2005, ensemble: r3i1p1, exp: historical, + institute: MIROC, start_year: 1850} + - &id869 {dataset: CSIRO-Mk3-6-0, end_year: 2005, ensemble: r9i1p1, exp: historical, + institute: CSIRO-QCCCE, start_year: 1850} + + fig4c: + description: Fig4c, Time series of SSP1-2.6, SSP2-4.5 and SSP5-8.5 for global average + precipitation change incl. spread + realms: [atmos] + themes: [phys] + variables: + pr: {end_year: 2100, mip: Amon, project: CMIP6, short_name: pr, start_year: 2015} + scripts: + plot_ts_line_mean_spread_pr: + ancestors: [pr, ts_line_mean_spread_pr] + begin_ref_year: 1986 + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb + end_ref_year: 2005 + eyears: &id821 [2014, 2100] + label: &id822 [Historical, SSP1-2.6, SSP2-4.5, SSP5-8.5] + model_nr: true + scenarios: &id823 [ssp126, ssp245, ssp585] + script: tebaldi21esd/plot_timeseries_mean_spread_3scenarios.ncl + spread: 1.64 + styleset: CMIP6 + syears: &id824 [1850, 2015] + title: PR, global, CMIP6. + yaxis: Relative to 1986-2005 (%) + ymax: 12 + ymin: -5 + ts_line_mean_spread_pr: + begin_ref_year: 1986 + end_ref_year: 2005 + eyears: *id821 + label: *id822 + model_nr: true + percent: 1 + scenarios: *id823 + script: tebaldi21esd/calc_timeseries_mean_spread_runave.ncl + spread: 1.64 + styleset: CMIP6 + syears: *id824 + additional_datasets: + - *id005 + - *id006 + - *id007 + - *id008 + - *id009 + - *id010 + - *id011 + - *id012 + - *id013 + - *id014 + - *id015 + - *id016 + - *id017 + - *id018 + - *id019 + - *id020 + - *id021 + - *id022 + - *id023 + - *id024 + - *id025 + - *id026 + - *id027 + - *id028 + - *id029 +# - *id030 + - *id031 + - *id032 + - *id033 + - *id034 + - *id035 + - *id036 + - *id037 + - *id038 + - *id039 + - *id040 + - *id041 + - *id042 + - *id043 + - *id044 + - *id045 + - *id046 + - *id047 + - *id048 + - *id049 + - *id050 + - *id051 + - *id052 + - *id053 + - *id054 + - *id055 + - *id056 + - *id057 + - *id058 + - *id059 + - *id060 + - *id061 + - *id062 + - *id063 + - *id064 + - *id065 +# - *id066 + - *id067 + - *id068 + - *id069 + - *id070 + - *id071 + - *id072 + - *id073 + - *id074 + - *id075 + - *id076 + - *id077 + - *id078 + - *id079 + - *id080 + - *id081 + - *id082 + - *id083 + - *id084 + - *id085 + - *id086 + - *id087 + - *id088 + - *id089 + - *id090 + - *id091 + - *id092 + - *id093 + - *id094 + - *id095 + - *id096 + - *id097 + - *id098 + - *id099 + - *id100 + - *id101 +# - *id102 + - *id103 + - *id104 + - *id105 + - *id106 + - *id107 + - *id108 + - *id109 + - *id110 + - *id111 + - *id112 + - *id157 + - *id158 + - *id159 + - *id160 + - *id161 + - *id162 + - *id163 + - *id164 + - *id165 + - *id166 + - *id167 + - *id168 + - *id169 + - *id170 + - *id171 + - *id172 + - *id173 + - *id174 + - *id175 + - *id176 + - *id177 + - *id178 + - *id179 + - *id180 + - *id181 + - *id182 + - *id183 +# - *id184 + - *id185 + - *id186 + - *id187 + - *id188 + - *id189 + - *id190 + - *id191 + - *id192 + - *id193 + + fig4b: + description: Fig4b, Time series of RCP2.6, RCP4.5 and RCP8.5 for global average temperature + change incl. spread + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, project: CMIP5, short_name: tas, start_year: 2006} + scripts: + plot_ts_line_mean_spread_tas: + ancestors: [tas, ts_line_mean_spread_tas] + baseline_offset: 0.65 + begin_ref_year: 1986 + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb + end_ref_year: 2005 + eyears: &id870 [2005, 2100] + label: &id871 [Historical, RCP2.6, RCP4.5, RCP8.5] + lower_constrained_projections: [0.15, 0.87, 2.43] + mean_constrained_projections: [0.83, 1.71, 3.45] + model_nr: true + scenarios: &id872 [rcp26, rcp45, rcp85] + script: tebaldi21esd/plot_timeseries_mean_spread_constrained_projections.ncl + spread: 1.64 + syears: &id873 [1850, 2006] + title: TAS, global, CMIP5. + upper_constrained_projections: [1.5, 2.56, 4.46] + yaxis: Relative to 1986-2005 (~S~o~N~ C) + ymax: 8 + ymin: -2 + ts_line_mean_spread_tas: + begin_ref_year: 1986 + end_ref_year: 2005 + eyears: *id870 + label: *id871 + model_nr: true + runave_window: 11 + scenarios: *id872 + script: tebaldi21esd/calc_timeseries_mean_spread_runave.ncl + spread: 1.64 + syears: *id873 + additional_datasets: *fig4_cmip5_datasets + + fig4a: + description: Fig4a, Time series of SSP1-2.6, SSP2-4.5 and SSP5-8.5 for global average + temperature change incl. spread and constrained projections bar in 2100 + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, project: CMIP6, short_name: tas, start_year: 2015} + scripts: + plot_ts_line_mean_spread_tas: + ancestors: [tas, ts_line_mean_spread_tas] + baseline_offset: 0.65 + begin_ref_year: 1986 + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb + end_ref_year: 2005 + eyears: &id878 [2014, 2100] + label: &id879 [Historical, SSP1-2.6, SSP2-4.5, SSP5-8.5] + lower_constrained_projections: [0.38, 1.2, 2.13] + mean_constrained_projections: [1.12, 2.0, 3.6] + model_nr: true + scenarios: &id880 [ssp126, ssp245, ssp585] + script: tebaldi21esd/plot_timeseries_mean_spread_constrained_projections.ncl + spread: 1.64 + syears: &id881 [1850, 2015] + title: TAS, global, CMIP6. + upper_constrained_projections: [1.85, 2.8, 5.05] + yaxis: Relative to 1986-2005 (~S~o~N~ C) + ymax: 8 + ymin: -2 + ts_line_mean_spread_tas: + begin_ref_year: 1986 + end_ref_year: 2005 + eyears: *id878 + label: *id879 + model_nr: true + runave_window: 11 + scenarios: *id880 + script: tebaldi21esd/calc_timeseries_mean_spread_runave.ncl + spread: 1.64 + syears: *id881 + additional_datasets: + - *id005 + - *id006 + - *id007 + - *id008 + - *id009 + - *id010 + - *id011 + - *id012 + - *id013 + - *id014 + - *id015 + - *id016 + - *id017 + - *id018 + - *id019 + - *id020 + - *id021 + - *id022 + - *id023 + - *id024 + - *id025 + - *id026 + - *id027 + - *id028 + - *id029 +# - *id030 + - *id031 + - *id032 + - *id033 + - *id034 + - *id035 + - *id036 + - *id037 + - *id038 + - *id039 + - *id874 + - *id040 + - *id041 + - *id042 + - *id043 + - *id044 + - *id045 + - *id046 + - *id047 + - *id048 + - *id049 + - *id050 + - *id051 + - *id052 + - *id053 + - *id054 + - *id055 + - *id056 + - *id057 + - *id058 + - *id059 + - *id060 + - *id061 + - *id062 + - *id063 + - *id064 + - *id065 +# - *id066 + - *id067 + - *id068 + - *id069 + - *id070 + - *id071 + - *id072 + - *id073 + - *id074 + - *id075 + - *id875 + - *id076 + - *id077 + - *id078 + - *id079 + - *id080 + - *id081 + - *id082 + - *id083 + - *id084 + - *id085 + - *id086 + - *id087 + - *id088 + - *id089 + - *id090 + - *id091 + - *id092 + - *id093 + - *id094 + - *id095 + - *id096 + - *id097 + - *id098 + - *id099 + - *id100 + - *id101 +# - *id102 + - *id103 + - *id104 + - *id105 + - *id106 + - *id107 + - *id108 + - *id109 + - *id110 + - *id111 + - *id876 + - *id112 + - *id157 + - *id158 + - *id159 + - *id160 + - *id161 + - *id162 + - *id163 + - *id164 + - *id165 + - *id166 + - *id167 + - *id168 + - *id169 + - *id170 + - *id171 + - *id172 + - *id173 + - *id174 + - *id175 + - *id176 + - *id177 + - *id178 + - *id179 + - *id180 + - *id181 + - *id182 + - *id183 +# - *id184 + - *id185 + - *id186 + - *id187 + - *id188 + - *id189 + - *id190 + - *id191 + - *id877 + - *id192 + - *id193 + + fig5b: + description: Fig5b, Timeseries of annual PR Stddev across realization + realms: [atmos] + themes: [phys] + variables: + pr: {mip: Amon, project: CMIP6, short_name: pr} + scripts: + plot_ts_line_mean_spread_pr: + ancestors: [pr, ts_line_mean_spread_pr] + colormap: $diag_scripts/shared/plot/rgb/categorical_fig5.rgb + eyears: &id882 [2014, 2100] + n_samples: 100 + runave_window: 1 + sampled_model: CanESM5 + scenarios: &id883 [ssp370] + script: tebaldi21esd/plot_timeseries_across_realization_stddev_runave.ncl + syears: &id884 [1980, 2015] + title: St. Dev. of annual mean PR (mm/day) + trend: true + yaxis: St. Dev. (mm/day) + ymax: 0.03 + ymin: 0.0 + ts_line_mean_spread_pr: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id882 + n_samples: 100 + runave_window: 1 + sampled_model: CanESM5 + scenarios: *id883 + script: tebaldi21esd/calc_timeseries_across_realization_stddev_runave.ncl + syears: *id884 + additional_datasets: &fig5_datasets + - &id885 {dataset: MPI-ESM1-2-HR, end_year: 2014, ensemble: r4i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id886 {dataset: MPI-ESM1-2-HR, end_year: 2014, ensemble: r9i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id887 {dataset: MPI-ESM1-2-HR, end_year: 2014, ensemble: r7i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id888 {dataset: MPI-ESM1-2-HR, end_year: 2014, ensemble: r10i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id889 {dataset: MPI-ESM1-2-HR, end_year: 2014, ensemble: r6i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id890 {dataset: MPI-ESM1-2-HR, end_year: 2014, ensemble: r3i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id891 {dataset: MPI-ESM1-2-HR, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id892 {dataset: MPI-ESM1-2-HR, end_year: 2014, ensemble: r2i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id893 {dataset: MPI-ESM1-2-HR, end_year: 2014, ensemble: r8i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id894 {dataset: MPI-ESM1-2-HR, end_year: 2014, ensemble: r5i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id895 {dataset: MPI-ESM1-2-HR, end_year: 2100, ensemble: r4i1p1f1, exp: ssp370, + grid: gn, institute: DKRZ, start_year: 2015} + - &id896 {dataset: MPI-ESM1-2-HR, end_year: 2100, ensemble: r9i1p1f1, exp: ssp370, + grid: gn, institute: DKRZ, start_year: 2015} + - &id897 {dataset: MPI-ESM1-2-HR, end_year: 2100, ensemble: r7i1p1f1, exp: ssp370, + grid: gn, institute: DKRZ, start_year: 2015} + - &id898 {dataset: MPI-ESM1-2-HR, end_year: 2100, ensemble: r10i1p1f1, exp: ssp370, + grid: gn, institute: DKRZ, start_year: 2015} + - &id899 {dataset: MPI-ESM1-2-HR, end_year: 2100, ensemble: r6i1p1f1, exp: ssp370, + grid: gn, institute: DKRZ, start_year: 2015} + - &id900 {dataset: MPI-ESM1-2-HR, end_year: 2100, ensemble: r3i1p1f1, exp: ssp370, + grid: gn, institute: DKRZ, start_year: 2015} + - &id901 {dataset: MPI-ESM1-2-HR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: DKRZ, start_year: 2015} + - &id902 {dataset: MPI-ESM1-2-HR, end_year: 2100, ensemble: r2i1p1f1, exp: ssp370, + grid: gn, institute: DKRZ, start_year: 2015} + - &id903 {dataset: MPI-ESM1-2-HR, end_year: 2100, ensemble: r8i1p1f1, exp: ssp370, + grid: gn, institute: DKRZ, start_year: 2015} + - &id904 {dataset: MPI-ESM1-2-HR, end_year: 2100, ensemble: r5i1p1f1, exp: ssp370, + grid: gn, institute: DKRZ, start_year: 2015} + - &id905 {dataset: CanESM5, end_year: 2014, ensemble: r15i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id906 {dataset: CanESM5, end_year: 2014, ensemble: r22i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id907 {dataset: CanESM5, end_year: 2014, ensemble: r4i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id908 {dataset: CanESM5, end_year: 2014, ensemble: r9i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id909 {dataset: CanESM5, end_year: 2014, ensemble: r23i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id910 {dataset: CanESM5, end_year: 2014, ensemble: r7i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id911 {dataset: CanESM5, end_year: 2014, ensemble: r20i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id912 {dataset: CanESM5, end_year: 2014, ensemble: r16i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id913 {dataset: CanESM5, end_year: 2014, ensemble: r17i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id914 {dataset: CanESM5, end_year: 2014, ensemble: r12i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id915 {dataset: CanESM5, end_year: 2014, ensemble: r6i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id916 {dataset: CanESM5, end_year: 2014, ensemble: r10i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id917 {dataset: CanESM5, end_year: 2014, ensemble: r6i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id918 {dataset: CanESM5, end_year: 2014, ensemble: r3i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id919 {dataset: CanESM5, end_year: 2014, ensemble: r13i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id920 {dataset: CanESM5, end_year: 2014, ensemble: r16i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id921 {dataset: CanESM5, end_year: 2014, ensemble: r20i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id922 {dataset: CanESM5, end_year: 2014, ensemble: r18i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id923 {dataset: CanESM5, end_year: 2014, ensemble: r18i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id924 {dataset: CanESM5, end_year: 2014, ensemble: r19i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id925 {dataset: CanESM5, end_year: 2014, ensemble: r19i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id926 {dataset: CanESM5, end_year: 2014, ensemble: r14i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id927 {dataset: CanESM5, end_year: 2014, ensemble: r21i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id928 {dataset: CanESM5, end_year: 2014, ensemble: r8i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id929 {dataset: CanESM5, end_year: 2014, ensemble: r23i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id930 {dataset: CanESM5, end_year: 2014, ensemble: r1i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id931 {dataset: CanESM5, end_year: 2014, ensemble: r3i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id932 {dataset: CanESM5, end_year: 2014, ensemble: r21i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id933 {dataset: CanESM5, end_year: 2014, ensemble: r10i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id934 {dataset: CanESM5, end_year: 2014, ensemble: r7i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id935 {dataset: CanESM5, end_year: 2014, ensemble: r2i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id936 {dataset: CanESM5, end_year: 2014, ensemble: r15i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id937 {dataset: CanESM5, end_year: 2014, ensemble: r5i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id938 {dataset: CanESM5, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id939 {dataset: CanESM5, end_year: 2014, ensemble: r11i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id940 {dataset: CanESM5, end_year: 2014, ensemble: r11i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id941 {dataset: CanESM5, end_year: 2014, ensemble: r4i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id942 {dataset: CanESM5, end_year: 2014, ensemble: r9i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id943 {dataset: CanESM5, end_year: 2014, ensemble: r25i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id944 {dataset: CanESM5, end_year: 2014, ensemble: r24i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id945 {dataset: CanESM5, end_year: 2014, ensemble: r25i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id946 {dataset: CanESM5, end_year: 2014, ensemble: r14i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id947 {dataset: CanESM5, end_year: 2014, ensemble: r17i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id948 {dataset: CanESM5, end_year: 2014, ensemble: r2i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id949 {dataset: CanESM5, end_year: 2014, ensemble: r12i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id950 {dataset: CanESM5, end_year: 2014, ensemble: r8i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id951 {dataset: CanESM5, end_year: 2014, ensemble: r24i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id952 {dataset: CanESM5, end_year: 2014, ensemble: r13i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id953 {dataset: CanESM5, end_year: 2014, ensemble: r22i1p2f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id954 {dataset: CanESM5, end_year: 2014, ensemble: r5i1p1f1, exp: historical, + grid: gn, institute: CCCma, start_year: 1980} + - &id955 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r16i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1980} + - &id956 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r2i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1980} + - &id957 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r9i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1980} + - &id958 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r19i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1980} + - &id959 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r1i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1980} + - &id960 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r17i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1980} + - &id961 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r11i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1980} + - &id962 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r3i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1980} + - &id963 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r12i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1980} + - &id964 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r10i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1980} + - &id965 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r18i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1980} + - &id966 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r4i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1980} + - &id967 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r8i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1980} + - &id968 {dataset: IPSL-CM6A-LR, end_year: 2014, ensemble: r4i1p1f1, exp: historical, + grid: gr, institute: IPSL, start_year: 1980} + - &id969 {dataset: IPSL-CM6A-LR, end_year: 2014, ensemble: r9i1p1f1, exp: historical, + grid: gr, institute: IPSL, start_year: 1980} + - &id970 {dataset: IPSL-CM6A-LR, end_year: 2014, ensemble: r7i1p1f1, exp: historical, + grid: gr, institute: IPSL, start_year: 1980} + - &id971 {dataset: IPSL-CM6A-LR, end_year: 2014, ensemble: r10i1p1f1, exp: historical, + grid: gr, institute: IPSL, start_year: 1980} + - &id972 {dataset: IPSL-CM6A-LR, end_year: 2014, ensemble: r6i1p1f1, exp: historical, + grid: gr, institute: IPSL, start_year: 1980} + - &id973 {dataset: IPSL-CM6A-LR, end_year: 2014, ensemble: r3i1p1f1, exp: historical, + grid: gr, institute: IPSL, start_year: 1980} + - &id974 {dataset: IPSL-CM6A-LR, end_year: 2014, ensemble: r14i1p1f1, exp: historical, + grid: gr, institute: IPSL, start_year: 1980} + - &id975 {dataset: IPSL-CM6A-LR, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gr, institute: IPSL, start_year: 1980} + - &id976 {dataset: IPSL-CM6A-LR, end_year: 2014, ensemble: r2i1p1f1, exp: historical, + grid: gr, institute: IPSL, start_year: 1980} + - &id977 {dataset: IPSL-CM6A-LR, end_year: 2014, ensemble: r8i1p1f1, exp: historical, + grid: gr, institute: IPSL, start_year: 1980} + - &id978 {dataset: IPSL-CM6A-LR, end_year: 2014, ensemble: r5i1p1f1, exp: historical, + grid: gr, institute: IPSL, start_year: 1980} + - &id979 {dataset: MPI-ESM1-2-LR, end_year: 2014, ensemble: r4i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id980 {dataset: MPI-ESM1-2-LR, end_year: 2014, ensemble: r9i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id981 {dataset: MPI-ESM1-2-LR, end_year: 2014, ensemble: r7i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id982 {dataset: MPI-ESM1-2-LR, end_year: 2014, ensemble: r10i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id983 {dataset: MPI-ESM1-2-LR, end_year: 2014, ensemble: r6i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id984 {dataset: MPI-ESM1-2-LR, end_year: 2014, ensemble: r3i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id985 {dataset: MPI-ESM1-2-LR, end_year: 2014, ensemble: r1i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id986 {dataset: MPI-ESM1-2-LR, end_year: 2014, ensemble: r2i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id987 {dataset: MPI-ESM1-2-LR, end_year: 2014, ensemble: r8i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id988 {dataset: MPI-ESM1-2-LR, end_year: 2014, ensemble: r5i1p1f1, exp: historical, + grid: gn, institute: MPI-M, start_year: 1980} + - &id989 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r16i1p1f2, exp: ssp370, + grid: gn, institute: MOHC, start_year: 2015} + - &id990 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r2i1p1f2, exp: ssp370, + grid: gn, institute: MOHC, start_year: 2015} + - &id991 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r9i1p1f2, exp: ssp370, + grid: gn, institute: MOHC, start_year: 2015} + - *id113 + - &id992 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r11i1p1f2, exp: ssp370, + grid: gn, institute: MOHC, start_year: 2015} + - &id993 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r3i1p1f2, exp: ssp370, + grid: gn, institute: MOHC, start_year: 2015} + - &id994 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r12i1p1f2, exp: ssp370, + grid: gn, institute: MOHC, start_year: 2015} + - &id995 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r10i1p1f2, exp: ssp370, + grid: gn, institute: MOHC, start_year: 2015} + - &id996 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r4i1p1f2, exp: ssp370, + grid: gn, institute: MOHC, start_year: 2015} + - &id997 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r8i1p1f2, exp: ssp370, + grid: gn, institute: MOHC, start_year: 2015} + - *id114 + - &id998 {dataset: CanESM5, end_year: 2100, ensemble: r2i1p1f1, exp: ssp370, grid: gn, + institute: CCCma, start_year: 2015} + - &id999 {dataset: CanESM5, end_year: 2100, ensemble: r3i1p1f1, exp: ssp370, grid: gn, + institute: CCCma, start_year: 2015} + - &id1000 {dataset: CanESM5, end_year: 2100, ensemble: r4i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1001 {dataset: CanESM5, end_year: 2100, ensemble: r5i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1002 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1003 {dataset: CanESM5, end_year: 2100, ensemble: r2i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1004 {dataset: CanESM5, end_year: 2100, ensemble: r3i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1005 {dataset: CanESM5, end_year: 2100, ensemble: r4i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1006 {dataset: CanESM5, end_year: 2100, ensemble: r5i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1007 {dataset: CanESM5, end_year: 2100, ensemble: r9i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1008 {dataset: CanESM5, end_year: 2100, ensemble: r7i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1009 {dataset: CanESM5, end_year: 2100, ensemble: r10i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1010 {dataset: CanESM5, end_year: 2100, ensemble: r6i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1011 {dataset: CanESM5, end_year: 2100, ensemble: r8i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1012 {dataset: CanESM5, end_year: 2100, ensemble: r15i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1013 {dataset: CanESM5, end_year: 2100, ensemble: r22i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1014 {dataset: CanESM5, end_year: 2100, ensemble: r23i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1015 {dataset: CanESM5, end_year: 2100, ensemble: r20i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1016 {dataset: CanESM5, end_year: 2100, ensemble: r16i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1017 {dataset: CanESM5, end_year: 2100, ensemble: r17i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1018 {dataset: CanESM5, end_year: 2100, ensemble: r12i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1019 {dataset: CanESM5, end_year: 2100, ensemble: r6i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1020 {dataset: CanESM5, end_year: 2100, ensemble: r13i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1021 {dataset: CanESM5, end_year: 2100, ensemble: r16i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1022 {dataset: CanESM5, end_year: 2100, ensemble: r20i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1023 {dataset: CanESM5, end_year: 2100, ensemble: r18i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1024 {dataset: CanESM5, end_year: 2100, ensemble: r18i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1025 {dataset: CanESM5, end_year: 2100, ensemble: r19i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1026 {dataset: CanESM5, end_year: 2100, ensemble: r19i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1027 {dataset: CanESM5, end_year: 2100, ensemble: r14i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1028 {dataset: CanESM5, end_year: 2100, ensemble: r21i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1029 {dataset: CanESM5, end_year: 2100, ensemble: r8i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1030 {dataset: CanESM5, end_year: 2100, ensemble: r23i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1031 {dataset: CanESM5, end_year: 2100, ensemble: r21i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1032 {dataset: CanESM5, end_year: 2100, ensemble: r10i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1033 {dataset: CanESM5, end_year: 2100, ensemble: r7i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1034 {dataset: CanESM5, end_year: 2100, ensemble: r15i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1035 {dataset: CanESM5, end_year: 2100, ensemble: r11i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1036 {dataset: CanESM5, end_year: 2100, ensemble: r11i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1037 {dataset: CanESM5, end_year: 2100, ensemble: r9i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1038 {dataset: CanESM5, end_year: 2100, ensemble: r25i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1039 {dataset: CanESM5, end_year: 2100, ensemble: r24i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1040 {dataset: CanESM5, end_year: 2100, ensemble: r25i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1041 {dataset: CanESM5, end_year: 2100, ensemble: r14i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1042 {dataset: CanESM5, end_year: 2100, ensemble: r17i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1043 {dataset: CanESM5, end_year: 2100, ensemble: r12i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1044 {dataset: CanESM5, end_year: 2100, ensemble: r24i1p1f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1045 {dataset: CanESM5, end_year: 2100, ensemble: r13i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1046 {dataset: CanESM5, end_year: 2100, ensemble: r22i1p2f1, exp: ssp370, + grid: gn, institute: CCCma, start_year: 2015} + - &id1047 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r4i1p1f1, exp: ssp370, + grid: gr, institute: IPSL, start_year: 2015} + - &id1048 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r9i1p1f1, exp: ssp370, + grid: gr, institute: IPSL, start_year: 2015} + - &id1049 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r7i1p1f1, exp: ssp370, + grid: gr, institute: IPSL, start_year: 2015} + - &id1050 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r10i1p1f1, exp: ssp370, + grid: gr, institute: IPSL, start_year: 2015} + - &id1051 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r6i1p1f1, exp: ssp370, + grid: gr, institute: IPSL, start_year: 2015} + - &id1052 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r3i1p1f1, exp: ssp370, + grid: gr, institute: IPSL, start_year: 2015} + - &id1053 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r14i1p1f1, exp: ssp370, + grid: gr, institute: IPSL, start_year: 2015} + - *id115 + - &id1054 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r2i1p1f1, exp: ssp370, + grid: gr, institute: IPSL, start_year: 2015} + - &id1055 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r8i1p1f1, exp: ssp370, + grid: gr, institute: IPSL, start_year: 2015} + - &id1056 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r5i1p1f1, exp: ssp370, + grid: gr, institute: IPSL, start_year: 2015} + - &id1057 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r4i1p1f1, exp: ssp370, + grid: gn, institute: MPI-M, start_year: 2015} + - &id1058 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r9i1p1f1, exp: ssp370, + grid: gn, institute: MPI-M, start_year: 2015} + - &id1059 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r7i1p1f1, exp: ssp370, + grid: gn, institute: MPI-M, start_year: 2015} + - &id1060 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r10i1p1f1, exp: ssp370, + grid: gn, institute: MPI-M, start_year: 2015} + - &id1061 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r6i1p1f1, exp: ssp370, + grid: gn, institute: MPI-M, start_year: 2015} + - &id1062 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r3i1p1f1, exp: ssp370, + grid: gn, institute: MPI-M, start_year: 2015} + - *id116 + - &id1063 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r2i1p1f1, exp: ssp370, + grid: gn, institute: MPI-M, start_year: 2015} + - &id1064 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r8i1p1f1, exp: ssp370, + grid: gn, institute: MPI-M, start_year: 2015} + - &id1065 {dataset: MPI-ESM1-2-LR, end_year: 2100, ensemble: r5i1p1f1, exp: ssp370, + grid: gn, institute: MPI-M, start_year: 2015} + + fig5d: + description: Fig5d, Timeseries of decadal PR Stddev across realization + realms: [atmos] + themes: [phys] + variables: + pr: {mip: Amon, project: CMIP6, short_name: pr} + scripts: + plot_ts_line_mean_spread_pr: + ancestors: [pr, ts_line_mean_spread_pr] + colormap: $diag_scripts/shared/plot/rgb/categorical_fig5.rgb + eyears: &id1066 [2014, 2100] + n_samples: 100 + runave_window: 11 + sampled_model: CanESM5 + scenarios: &id1067 [ssp370] + script: tebaldi21esd/plot_timeseries_across_realization_stddev_runave.ncl + syears: &id1068 [1980, 2015] + title: St. Dev. of decadal mean PR (mm/day) + trend: true + yaxis: St. Dev. (mm/day) + ymax: 0.015 + ymin: 0.0 + ts_line_mean_spread_pr: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id1066 + n_samples: 100 + runave_window: 11 + sampled_model: CanESM5 + scenarios: *id1067 + script: tebaldi21esd/calc_timeseries_across_realization_stddev_runave.ncl + syears: *id1068 + additional_datasets: *fig5_datasets + + fig5a: + description: Fig5a, Timeseries of annual TAS Stddev across realization + realms: [atmos] + themes: [phys] + variables: + tas: {mip: Amon, project: CMIP6, short_name: tas} + scripts: + plot_ts_line_mean_spread_tas: + ancestors: [tas, ts_line_mean_spread_tas] + colormap: $diag_scripts/shared/plot/rgb/categorical_fig5.rgb + eyears: &id1069 [2014, 2100] + n_samples: 100 + runave_window: 1 + sampled_model: CanESM5 + scenarios: &id1070 [ssp370] + script: tebaldi21esd/plot_timeseries_across_realization_stddev_runave.ncl + syears: &id1071 [1980, 2015] + title: St. Dev. of annual mean TAS (~S~o~N~ C) + trend: true + yaxis: St. Dev (~S~o~N~ C) + ymax: 0.3 + ymin: 0.0 + ts_line_mean_spread_tas: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id1069 + n_samples: 100 + runave_window: 1 + sampled_model: CanESM5 + scenarios: *id1070 + script: tebaldi21esd/calc_timeseries_across_realization_stddev_runave.ncl + syears: *id1071 + additional_datasets: *fig5_datasets + + fig5c: + description: Fig5c, Timeseries of decadal TAS Stddev across realization + realms: [atmos] + themes: [phys] + variables: + tas: {mip: Amon, project: CMIP6, short_name: tas} + scripts: + plot_ts_line_mean_spread_tas: + ancestors: [tas, ts_line_mean_spread_tas] + colormap: $diag_scripts/shared/plot/rgb/categorical_fig5.rgb + eyears: &id1072 [2014, 2100] + n_samples: 100 + runave_window: 11 + sampled_model: CanESM5 + scenarios: &id1073 [ssp370] + script: tebaldi21esd/plot_timeseries_across_realization_stddev_runave.ncl + syears: &id1074 [1980, 2015] + title: St. Dev. of decadal mean TAS (~S~o~N~ C) + trend: true + yaxis: St. Dev (~S~o~N~ C) + ymax: 0.2 + ymin: 0.0 + ts_line_mean_spread_tas: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id1072 + n_samples: 100 + runave_window: 11 + sampled_model: CanESM5 + scenarios: *id1073 + script: tebaldi21esd/calc_timeseries_across_realization_stddev_runave.ncl + syears: *id1074 + additional_datasets: *fig5_datasets + + fig6d_IAV: + description: Fig6d, Calculate precipitation interannual variability for hatching + realms: [atmos] + themes: [varmodes] + variables: + tas: {exp: piControl, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: pr} + scripts: + calc_IAV_for_stippandhatch_pr: {iavmode: mmm, periodlength: 20.0, script: tebaldi21esd/calc_IAV_hatching.ncl, + styleset: CMIP6, time_avg: annualclim} + additional_datasets: &fig6_IAV_datasets + - &id1075 {dataset: FGOALS-g3, end_year: 500, ensemble: r1i1p1f1, exp: piControl, + grid: gn, institute: CAS, start_year: 200} + - &id1076 {dataset: CanESM5, end_year: 5501, ensemble: r1i1p1f1, exp: piControl, + grid: gn, institute: CCCma, start_year: 5201} + - &id1077 {dataset: UKESM1-0-LL, end_year: 2260, ensemble: r1i1p1f2, exp: piControl, + grid: gn, institute: MOHC, start_year: 1960} + - &id1078 {dataset: MIROC6, end_year: 3500, ensemble: r1i1p1f1, exp: piControl, + grid: gn, institute: MIROC, start_year: 3200} + - &id1079 {dataset: MIROC-ES2L, end_year: 2150, ensemble: r1i1p1f2, exp: piControl, + grid: gn, institute: MIROC, start_year: 1850} + - &id1080 {dataset: IPSL-CM6A-LR, end_year: 2150, ensemble: r1i1p1f1, exp: piControl, + grid: gr, institute: IPSL, start_year: 1850} + - &id1081 {dataset: CESM2-WACCM, end_year: 301, ensemble: r1i1p1f1, exp: piControl, + grid: gn, institute: NCAR, start_year: 1} + - &id1082 {dataset: MRI-ESM2-0, end_year: 2150, ensemble: r1i1p1f1, exp: piControl, + grid: gn, institute: MRI, start_year: 1850} + - &id1083 {dataset: CNRM-ESM2-1, end_year: 2150, ensemble: r1i1p1f2, exp: piControl, + grid: gr, institute: CNRM-CERFACS, start_year: 1850} + + fig6c_IAV: + description: Fig6c, Calculate temperature interannual variability for hatching + realms: [atmos] + themes: [varmodes] + variables: + tas: {exp: piControl, mip: Amon, preprocessor: preproc_map, project: CMIP6} + scripts: + calc_IAV_for_stippandhatch_tas: {iavmode: mmm, periodlength: 20.0, script: tebaldi21esd/calc_IAV_hatching.ncl, + styleset: CMIP6, time_avg: annualclim} + additional_datasets: *fig6_IAV_datasets + + fig6d: + description: Fig6d, Patterns of global PR for SSP5-3.4 with hatching. Due to a mistake in the paper, this correponds to fig7d in the paper. + realms: [atmos] + themes: [phys] + variables: + pr: {mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: pr} + scripts: + calc_map_diff_scaleT_pr: + ancestors: [pr, fig6d_IAV/calc_IAV_for_stippandhatch_pr] + iavmode: mmm + label: &id1084 [2081-2100 relative to 1995-2014] + not_sig: true + percent: 1 + periods: &id1085 [2081] + scenarios: &id1086 [ssp534-over] + script: tebaldi21esd/calc_pattern_stippling_hatching.ncl + time_avg: annualclim + plot_map_diff_scaleT_pr: + ancestors: [pr, calc_map_diff_scaleT_pr] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_div_14.rgb + diagscript: calc_map_diff_scaleT_mmm_stipp + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] + label: *id1084 + not_sig: true + percent: 1 + periods: *id1085 + projection: Robinson + scenarios: *id1086 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: SSP5-3.4-OS PR change + additional_datasets: &fig6_ssp534_datasets + - *id205 + - *id211 + - *id212 + - *id215 + - *id216 + - *id222 + - *id223 + - *id227 + - *id363 + - *id370 + - *id371 + - *id376 + - *id377 + - *id384 + - *id385 + - *id390 + - &id1087 {dataset: CNRM-ESM2-1, end_year: 2014, ensemble: r2i1p1f2, exp: historical, + grid: gr, institute: CNRM-CERFACS, start_year: 1995} + - &id1098 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp534-over, + grid: gn, institute: CAS, start_year: 2081} + - &id1088 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp534-over, + grid: gn, institute: CCCma, start_year: 2081} + - &id1099 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r1i1p1f2, exp: ssp534-over, + grid: gn, institute: MOHC, start_year: 2081} + - &id1089 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp534-over, + grid: gn, institute: MIROC, start_year: 2081} + - &id1100 {dataset: MIROC-ES2L, end_year: 2100, ensemble: r1i1p1f2, exp: ssp534-over, + grid: gn, institute: MIROC, start_year: 2081} + - &id1090 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp534-over, + grid: gr, institute: IPSL, start_year: 2081} + - &id1091 {dataset: CESM2-WACCM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp534-over, + grid: gn, institute: NCAR, start_year: 2081} + - &id1092 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp534-over, + grid: gn, institute: MRI, start_year: 2081} + - &id1093 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r2i1p1f2, exp: ssp534-over, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + - &id1094 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r2i1p1f2, exp: ssp585, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + + fig6f: + description: fig6f, Pattern of the difference between global PR for SSP5-8.5 and SSP5-3.4-OS + realms: [atmos] + themes: [phys] + variables: + tas: {mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: tas} + pr: {mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: pr} + scripts: + calc_map_diff_mmm_stipp_pattern_comparison: + label: &id1095 [2081-2100] + percent: 1 + periods: &id1096 [2081] + scenarios: &id1097 [ssp534-over, ssp585] + script: tebaldi21esd/calc_pattern_comparison.ncl + time_avg: annualclim + plot_map_diff_mmm_stipp: + ancestors: [pr, fig6f/calc_map_diff_mmm_stipp_pattern_comparison] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_div_14.rgb + diagscript: calc_map_diff_mmm_stipp_pattern_comparison + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] + label: *id1095 + max_hori: 2 + max_vert: 2 + percent: 1 + periods: *id1096 + projection: Robinson + scenarios: *id1097 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Difference between SSP5-8.5 and SSP5-3.4 precipitation pattern + additional_datasets: &fig6_ssp585vssp534_datasets + - *id211 + - *id215 + - *id222 + - *id223 + - *id227 + - *id370 + - *id376 + - *id384 + - *id385 + - *id390 + - *id1087 + - *id1088 + - *id1089 + - *id1090 + - *id1091 + - *id1092 + - *id1093 + - *id1094 + - &id1104 {dataset: UKESM1-0-LL, end_year: 2014, ensemble: r2i1p1f2, exp: historical, + grid: gn, institute: MOHC, start_year: 1995} + - &id1105 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r2i1p1f2, exp: ssp585, + grid: gn, institute: MOHC, start_year: 2081} + - &id1106 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r2i1p1f2, exp: ssp534-over, + grid: gn, institute: MOHC, start_year: 2081} + + fig6c: + description: Fig6c, Patterns of global TAS for SSP5-3.4 with hatching. Due to a mistake in the paper, this corresponds to fig7c in the paper. + realms: [atmos] + themes: [phys] + variables: + tas: {mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: tas} + scripts: + calc_map_diff_scaleT_tas: + ancestors: [tas, fig6c_IAV/calc_IAV_for_stippandhatch_tas] + iavmode: mmm + label: &id1101 [2081-2100 relative to 1995-2014] + not_sig: true + periods: &id1102 [2081] + scenarios: &id1103 [ssp534-over] + script: tebaldi21esd/calc_pattern_stippling_hatching.ncl + time_avg: annualclim + plot_map_diff_scaleT_tas: + ancestors: [tas, calc_map_diff_scaleT_tas] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_div_18.rgb + diagscript: calc_map_diff_scaleT_mmm_stipp + diff_levs: [0, 1, 2, 3, 4, 5, 6, 7, 8] + label: *id1101 + not_sig: true + periods: *id1102 + projection: Robinson + scenarios: *id1103 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: SSP5-3.4-OS TAS change + additional_datasets: *fig6_ssp534_datasets + + fig6e: + description: Fig6e, Pattern of the difference between global TAS for SSP5-8.5 and SSP5-3.4-OS + realms: [atmos] + themes: [phys] + variables: + tas: {mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: tas} + scripts: + calc_map_diff_mmm_stipp_pattern_comparison: + label: &id1107 [2081-2100] + periods: &id1108 [2081] + scenarios: &id1109 [ssp534-over, ssp585] + script: tebaldi21esd/calc_pattern_comparison.ncl + time_avg: annualclim + plot_map_diff_mmm_stipp: + ancestors: [tas, calc_map_diff_mmm_stipp_pattern_comparison] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_div_18.rgb + diagscript: calc_map_diff_mmm_stipp_pattern_comparison + diff_levs: [0, 1, 2, 3, 4, 5, 6, 7, 8] + label: *id1107 + periods: *id1108 + projection: Robinson + scenarios: *id1109 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Difference between SSP5-8.5 and SSP5-3.4 temperature pattern + additional_datasets: *fig6_ssp585vssp534_datasets + + fig6b: + description: fig6b, Time series of PR percentage change comparing SSP5-8.5 and SSP5-3.4-OS + incl. spread + realms: [atmos] + themes: [phys] + variables: + pr: {mip: Amon, project: CMIP6, short_name: pr} + scripts: + plot_ts_line_mean_spread_pr: + ancestors: [pr, ts_line_mean_spread_pr] + begin_ref_year: 1995 + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb + end_ref_year: 2014 + eyears: &id1110 [2014, 2100, 2100] + label: &id1111 [Historical, SSP5-3.4-OS, SSP5-8.5] + model_nr: true + scenarios: &id1112 [ssp534-over, ssp585] + script: tebaldi21esd/plot_timeseries_mean_spread_ssp5.ncl + spread: 1.64 + styleset: CMIP6 + syears: &id1113 [1995, 2040, 2015] + title: PR, global + yaxis: Relative to 1995-2014 (%) + ymax: 14 + ymin: -3 + ts_line_mean_spread_pr: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id1110 + label: *id1111 + model_nr: true + percent: 1 + runave_window: 11 + scenarios: *id1112 + script: tebaldi21esd/calc_timeseries_mean_spread_ssp5.ncl + spread: 1.64 + styleset: CMIP6 + syears: *id1113 + additional_datasets: &fig6_ts_datasets + - *id079 + - *id086 + - *id087 + - *id092 + - *id093 + - *id100 + - *id101 + - *id106 + - *id205 + - *id211 + - *id212 + - *id215 + - *id216 + - *id222 + - *id223 + - *id227 + - *id1087 + - &id1114 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp534-over, + grid: gn, institute: CAS, start_year: 2040} + - &id1115 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp534-over, + grid: gn, institute: CCCma, start_year: 2040} + - &id1116 {dataset: UKESM1-0-LL, end_year: 2100, ensemble: r1i1p1f2, exp: ssp534-over, + grid: gn, institute: MOHC, start_year: 2040} + - &id1117 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp534-over, + grid: gn, institute: MIROC, start_year: 2040} + - &id1118 {dataset: MIROC-ES2L, end_year: 2100, ensemble: r1i1p1f2, exp: ssp534-over, + grid: gn, institute: MIROC, start_year: 2040} + - &id1119 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp534-over, + grid: gr, institute: IPSL, start_year: 2040} + - &id1120 {dataset: CESM2-WACCM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp534-over, + grid: gn, institute: NCAR, start_year: 2040} + - &id1121 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp534-over, + grid: gn, institute: MRI, start_year: 2040} + - &id1122 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r2i1p1f2, exp: ssp534-over, + grid: gr, institute: CNRM-CERFACS, start_year: 2040} + - &id1123 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r2i1p1f2, exp: ssp585, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + + fig6a: + description: Fig6a, Time series of global TAS change comparing SSP5-8.5 and SSP5-3.4-OS + including spread + realms: [atmos] + themes: [phys] + variables: + tas: {mip: Amon, project: CMIP6, short_name: tas} + scripts: + plot_ts_line_mean_spread_tas: + ancestors: [tas, ts_line_mean_spread_tas] + begin_ref_year: 1995 + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb + end_ref_year: 2014 + eyears: &id1124 [2014, 2100, 2100] + label: &id1125 [Historical, SSP5-3.4-OS, SSP5-8.5] + model_nr: true + scenarios: &id1126 [ssp534-over, ssp585] + script: tebaldi21esd/plot_timeseries_mean_spread_ssp5.ncl + spread: 1.64 + syears: &id1127 [1995, 2040, 2015] + title: TAS, global + yaxis: Relative to 1995-2014 (~S~o~N~ C) + ymax: 8 + ymin: -2 + ts_line_mean_spread_tas: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id1124 + label: *id1125 + model_nr: true + runave_window: 11 + scenarios: *id1126 + script: tebaldi21esd/calc_timeseries_mean_spread_ssp5.ncl + spread: 1.64 + syears: *id1127 + additional_datasets: *fig6_ts_datasets + + fig7d_IAV: + description: Fig7d, Calculate temperature interannual variability for hatching + realms: [atmos] + themes: [varmodes] + variables: + tas: {exp: piControl, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: pr} + scripts: + calc_IAV_for_stippandhatch_pr: {iavmode: mmm, periodlength: 20.0, script: tebaldi21esd/calc_IAV_hatching.ncl, + styleset: CMIP6, time_avg: annualclim} + additional_datasets: + - *id1075 + - *id1076 + - *id1078 + - *id1080 + - *id1082 + - *id1083 + - &id1128 {dataset: GISS-E2-1-G, end_year: 7850, ensemble: r1i1p1f2, exp: piControl, + grid: gn, institute: NASA-GISS, start_year: 7550} + + fig7c_IAV: + description: Fig7c, Calculate temperature interannual variability for hatching + realms: [atmos] + themes: [varmodes] + variables: + tas: {exp: piControl, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas} + scripts: + calc_IAV_for_stippandhatch_tas: {iavmode: mmm, periodlength: 20.0, script: tebaldi21esd/calc_IAV_hatching.ncl, + styleset: CMIP6, time_avg: annualclim} + additional_datasets: + - *id1075 + - *id1076 + - *id1078 + - *id1080 + - *id1082 + - *id1083 + - *id1128 + + fig7d: + description: Fig7d, Pattern of PR change for SSP4-3.4 with hatching. Due to a mistake in the paper, this corresponds to fig6d in the paper. + realms: [atmos] + themes: [phys] + variables: + pr: {mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: pr} + scripts: + calc_map_diff_pr: + ancestors: [pr, fig7d_IAV/calc_IAV_for_stippandhatch_pr] + iavmode: mmm + label: &id1129 [2081-2100 relative to 1995-2014] + not_sig: true + percent: 1 + periods: &id1130 [2081] + scenarios: &id1131 [ssp434] + script: tebaldi21esd/calc_pattern_stippling_hatching.ncl + sig: false + time_avg: annualclim + plot_map_diff_pr: + ancestors: [pr, fig7d/calc_map_diff_pr] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_div_14.rgb + diagscript: calc_map_diff_mmm_stipp + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] + label: *id1129 + max_hori: 2 + max_vert: 2 + not_sig: true + percent: 1 + periods: *id1130 + projection: Robinson + scenarios: *id1131 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: SSP4-3.4 PR change + additional_datasets: &fig7_ssp434_datasets + - *id202 + - *id205 + - *id211 + - *id215 + - *id222 + - *id227 + - *id230 + - &id1132 {dataset: GISS-E2-1-G, end_year: 2100, ensemble: r1i1p1f2, exp: ssp434, + grid: gn, institute: NASA-GISS, start_year: 2081} + - &id1133 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp434, + grid: gn, institute: CAS, start_year: 2081} + - &id1134 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp434, + grid: gn, institute: CCCma, start_year: 2081} + - &id1135 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp434, grid: gn, + institute: MIROC, start_year: 2081} + - &id1136 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp434, + grid: gr, institute: IPSL, start_year: 2081} + - &id1137 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp434, + grid: gn, institute: MRI, start_year: 2081} + - &id1138 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp434, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + + fig7f: + description: Fig7f, Pattern difference of PR between SSP4-6.0 and SSP4-3.4 + realms: [atmos] + themes: [phys] + variables: + tas: {mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: tas} + pr: {mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: pr} + scripts: + calc_map_diff_mmm_stipp_pattern_comparison: + label: &id1139 [2081-2100] + percent: 1 + periods: &id1140 [2081] + scenarios: &id1141 [ssp434, ssp460] + script: tebaldi21esd/calc_pattern_comparison.ncl + time_avg: annualclim + plot_map_diff_mmm_stipp: + ancestors: [pr, fig7f/calc_map_diff_mmm_stipp_pattern_comparison] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_div_14.rgb + diagscript: calc_map_diff_mmm_stipp_pattern_comparison + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] + label: *id1139 + max_hori: 2 + max_vert: 2 + percent: 1 + periods: *id1140 + projection: Robinson + scenarios: *id1141 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Difference between SSP4-6.0 and SSP4-3.4 precipitation pattern + additional_datasets: &fig7_ssp460vssp434_datasets + - *id202 + - *id205 + - *id211 + - *id215 + - *id222 + - *id227 + - *id230 + - *id1132 + - *id1133 + - *id1134 + - *id1135 + - *id1136 + - *id1137 + - *id1138 + - &id1145 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp460, + grid: gn, institute: CCCma, start_year: 2081} + - &id1146 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp460, grid: gn, + institute: MIROC, start_year: 2081} + - &id1147 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp460, + grid: gr, institute: IPSL, start_year: 2081} + - &id1148 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp460, + grid: gn, institute: MRI, start_year: 2081} + - &id1149 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp460, + grid: gr, institute: CNRM-CERFACS, start_year: 2081} + - &id1150 {dataset: GISS-E2-1-G, end_year: 2100, ensemble: r1i1p1f2, exp: ssp460, + grid: gn, institute: NASA-GISS, start_year: 2081} + - &id1151 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp460, + grid: gn, institute: CAS, start_year: 2081} + + fig7c: + description: Fig7c, Pattern of TAS change for SSP4-3.4 with hatching. Due to a mistake in the paper, this corresponds to fig6c of the paper. + realms: [atmos] + themes: [phys] + variables: + tas: {mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: tas} + scripts: + calc_map_diff_tas: + ancestors: [tas, fig7c_IAV/calc_IAV_for_stippandhatch_tas] + iavmode: mmm + label: &id1142 [2081-2100 relative to 1995-2014] + not_sig: true + periods: &id1143 [2081] + scenarios: &id1144 [ssp434] + script: tebaldi21esd/calc_pattern_stippling_hatching.ncl + sig: false + time_avg: annualclim + plot_map_diff_tas: + ancestors: [tas, calc_map_diff_tas] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_div_18.rgb + diagscript: calc_map_diff_mmm_stipp + diff_levs: [0, 1, 2, 3, 4, 5, 6, 7, 8] + label: *id1142 + not_sig: true + periods: *id1143 + projection: Robinson + scenarios: *id1144 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: SSP4-3.4 TAS change + additional_datasets: *fig7_ssp434_datasets + + fig7e: + description: Fig7e, Pattern difference of TAS between SSP4-6.0 and SSP4-3.4 + realms: [atmos] + themes: [phys] + variables: + tas: {mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: tas} + scripts: + calc_map_diff_mmm_stipp_pattern_comparison: + label: &id1152 [2081-2100] + periods: &id1153 [2081] + scenarios: &id1154 [ssp434, ssp460] + script: tebaldi21esd/calc_pattern_comparison.ncl + time_avg: annualclim + plot_map_diff_mmm_stipp: + ancestors: [tas, calc_map_diff_mmm_stipp_pattern_comparison] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_div_18.rgb + diagscript: calc_map_diff_mmm_stipp_pattern_comparison + diff_levs: [0, 1, 2, 3, 4, 5, 6, 7, 8] + label: *id1152 + periods: *id1153 + projection: Robinson + scenarios: *id1154 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Difference between SSP4-6.0 and SSP4-3.4 temperature pattern + additional_datasets: *fig7_ssp460vssp434_datasets + + fig7b: + description: Fig7b, Time series of PR change for SSP4-6.0 and SSP4-3.4 + realms: [atmos] + themes: [phys] + variables: + pr: {mip: Amon, project: CMIP6, short_name: pr} + scripts: + plot_ts_line_mean_spread_pr: + ancestors: [pr, ts_line_mean_spread_pr] + begin_ref_year: 1995 + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb + end_ref_year: 2014 + eyears: &id1155 [2014, 2100] + label: &id1156 [Historical, SSP4-3.4, SSP4-6.0] + model_nr: true + scenarios: &id1157 [ssp434, ssp460] + script: tebaldi21esd/plot_timeseries_mean_spread_ssp4.ncl + spread: 1.64 + styleset: CMIP6 + syears: &id1158 [1995, 2015] + title: PR, global + yaxis: Relative to 1995-2014 (%) + ymax: 12 + ymin: -5 + ts_line_mean_spread_pr: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id1155 + label: *id1156 + model_nr: true + percent: 1 + scenarios: *id1157 + script: tebaldi21esd/calc_timeseries_mean_spread_ssp4.ncl + spread: 1.64 + styleset: CMIP6 + syears: *id1158 + additional_datasets: &fig7_ts_datasets + - *id202 + - *id205 + - *id211 + - *id215 + - *id222 + - *id227 + - *id230 + - &id1159 {dataset: GISS-E2-1-G, end_year: 2100, ensemble: r1i1p1f2, exp: ssp434, + grid: gn, institute: NASA-GISS, start_year: 2015} + - &id1160 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp434, + grid: gn, institute: CAS, start_year: 2015} + - &id1161 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp434, + grid: gn, institute: CCCma, start_year: 2015} + - &id1162 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp434, grid: gn, + institute: MIROC, start_year: 2015} + - &id1163 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp434, + grid: gr, institute: IPSL, start_year: 2015} + - &id1164 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp434, + grid: gn, institute: MRI, start_year: 2015} + - &id1165 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp434, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + - &id1166 {dataset: GISS-E2-1-G, end_year: 2100, ensemble: r1i1p1f2, exp: ssp460, + grid: gn, institute: NASA-GISS, start_year: 2015} + - &id1167 {dataset: FGOALS-g3, end_year: 2100, ensemble: r1i1p1f1, exp: ssp460, + grid: gn, institute: CAS, start_year: 2015} + - &id1168 {dataset: CanESM5, end_year: 2100, ensemble: r1i1p1f1, exp: ssp460, + grid: gn, institute: CCCma, start_year: 2015} + - &id1169 {dataset: MIROC6, end_year: 2100, ensemble: r1i1p1f1, exp: ssp460, grid: gn, + institute: MIROC, start_year: 2015} + - &id1170 {dataset: IPSL-CM6A-LR, end_year: 2100, ensemble: r1i1p1f1, exp: ssp460, + grid: gr, institute: IPSL, start_year: 2015} + - &id1171 {dataset: MRI-ESM2-0, end_year: 2100, ensemble: r1i1p1f1, exp: ssp460, + grid: gn, institute: MRI, start_year: 2015} + - &id1172 {dataset: CNRM-ESM2-1, end_year: 2100, ensemble: r1i1p1f2, exp: ssp460, + grid: gr, institute: CNRM-CERFACS, start_year: 2015} + + fig7a: + description: Fig7a, Time series of TAS change for SSP4-6.0 and SSP4-3.4 + realms: [atmos] + themes: [phys] + variables: + tas: {mip: Amon, project: CMIP6, short_name: tas} + scripts: + plot_ts_line_mean_spread_tas: + ancestors: [tas, ts_line_mean_spread_tas] + begin_ref_year: 1995 + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb + end_ref_year: 2014 + eyears: &id1173 [2014, 2100] + label: &id1174 [Historical, SSP4-3.4, SSP4-6.0] + model_nr: true + scenarios: &id1175 [ssp434, ssp460] + script: tebaldi21esd/plot_timeseries_mean_spread_ssp4.ncl + spread: 1.64 + syears: &id1176 [1995, 2015] + title: TAS, global + yaxis: Relative to 1995-2014 (~S~o~N~ C) + ymax: 8 + ymin: -2 + ts_line_mean_spread_tas: + begin_ref_year: 1995 + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb + end_ref_year: 2014 + eyears: *id1173 + label: *id1174 + model_nr: true + scenarios: *id1175 + script: tebaldi21esd/calc_timeseries_mean_spread_ssp4.ncl + spread: 1.64 + syears: *id1176 + title: TAS, global + yaxis: Relative to 1995-2014 (~S~o~N~ C) + ymax: 8 + ymin: -2 + additional_datasets: *fig7_ts_datasets + + figA2c: + description: Fig A2c, Time series of relative change in precipitation in multiple + scenarios incl. spread over land-only + realms: [atmos] + themes: [phys] + variables: + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_land_only, project: CMIP6, + short_name: pr, start_year: 2015} + scripts: + plot_ts_line_mean_spread_pr: + ancestors: [pr, ts_line_mean_spread_pr] + begin_ref_year: 1995 + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb + end_ref_year: 2014 + eyears: &id1177 [2014, 2100] + label: &id1178 [Historical, SSP1-1.9, SSP1-2.6, SSP2-4.5, SSP3-7.0, SSP5-8.5] + model_nr: true + scenarios: &id1179 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/plot_timeseries_mean_spread.ncl + spread: 1.64 + styleset: CMIP6 + syears: &id1180 [1850, 2015] + title: PR, land-only + yaxis: Relative to 1995-2014 (%) + ymax: 12 + ymin: -5 + ts_line_mean_spread_pr: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id1177 + label: *id1178 + model_nr: true + percent: 1 + scenarios: *id1179 + script: tebaldi21esd/calc_timeseries_mean_spread_runave.ncl + spread: 1.64 + styleset: CMIP6 + syears: *id1180 + additional_datasets: *fig1_pr_datasets + + figA2d: + description: FigA2d, Time series of relative change in precipitation in multiple + scenarios incl. spread over ocean-only + realms: [atmos] + themes: [phys] + variables: + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_ocean_only, project: CMIP6, + short_name: pr, start_year: 2015} + scripts: + plot_ts_line_mean_spread_pr: + ancestors: [pr, ts_line_mean_spread_pr] + begin_ref_year: 1995 + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb + end_ref_year: 2014 + eyears: &id1181 [2014, 2100] + label: &id1182 [Historical, SSP1-1.9, SSP1-2.6, SSP2-4.5, SSP3-7.0, SSP5-8.5] + model_nr: true + scenarios: &id1183 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/plot_timeseries_mean_spread.ncl + spread: 1.64 + styleset: CMIP6 + syears: &id1184 [1850, 2015] + title: PR, ocean-only + yaxis: Relative to 1995-2014 (%) + ymax: 12 + ymin: -5 + ts_line_mean_spread_pr: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id1181 + label: *id1182 + model_nr: true + percent: 1 + scenarios: *id1183 + script: tebaldi21esd/calc_timeseries_mean_spread_runave.ncl + spread: 1.64 + styleset: CMIP6 + syears: *id1184 + additional_datasets: *fig1_pr_datasets + + figA2a: + description: FigA2a, Time series of relative change of temperature in multiple + scenarios incl. spread over land-only + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_land_only, project: CMIP6, + short_name: tas, start_year: 2015} + scripts: + plot_ts_line_mean_spread_tas: + ancestors: [tas, ts_line_mean_spread_tas] + begin_ref_year: 1995 + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb + end_ref_year: 2014 + eyears: &id1185 [2014, 2100] + label: &id1186 [Historical, SSP1-1.9, SSP1-2.6, SSP2-4.5, SSP3-7.0, SSP5-8.5] + model_nr: true + scenarios: &id1187 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/plot_timeseries_mean_spread.ncl + spread: 1.64 + syears: &id1188 [1850, 2015] + title: TAS, land-only + yaxis: Relative to 1995-2014 (~S~o~N~ C) + ymax: 8 + ymin: -2 + ts_line_mean_spread_tas: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id1185 + label: *id1186 + model_nr: true + runave_window: 11 + scenarios: *id1187 + script: tebaldi21esd/calc_timeseries_mean_spread_runave.ncl + spread: 1.64 + syears: *id1188 + additional_datasets: *fig1_tas_datasets + + figA2b: + description: FigA2b, Time series of relative change of temperature in multiple + scenarios incl. spread over ocean-only + realms: [atmos] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_ocean_only, project: CMIP6, + short_name: tas, start_year: 2015} + scripts: + plot_ts_line_mean_spread_tas: + ancestors: [tas, ts_line_mean_spread_tas] + begin_ref_year: 1995 + colormap: $diag_scripts/shared/plot/rgb/ipcc_color_ssp_tseries.rgb + end_ref_year: 2014 + eyears: &id1189 [2014, 2100] + label: &id1190 [Historical, SSP1-1.9, SSP1-2.6, SSP2-4.5, SSP3-7.0, SSP5-8.5] + model_nr: true + scenarios: &id1191 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/plot_timeseries_mean_spread.ncl + spread: 1.64 + syears: &id1192 [1850, 2015] + title: TAS, ocean-only + yaxis: Relative to 1995-2014 (~S~o~N~ C) + ymax: 8 + ymin: -2 + ts_line_mean_spread_tas: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id1189 + label: *id1190 + model_nr: true + runave_window: 11 + scenarios: *id1191 + script: tebaldi21esd/calc_timeseries_mean_spread_runave.ncl + spread: 1.64 + syears: *id1192 + additional_datasets: *fig1_tas_datasets + + figA4_IAV_calc_pr: + description: FigA4, Calculate precipitation interannual variability for hatching + realms: [atmos] + themes: [varmodes] + variables: + pr: {exp: piControl, mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: pr} + scripts: + calc_IAV_for_stippandhatch_pr: {iavmode: mmm, periodlength: 20.0, script: tebaldi21esd/calc_IAV_hatching.ncl, + styleset: CMIP6, time_avg: annualclim} + additional_datasets: + - &id1193 {dataset: MPI-ESM-1-2-HAM, end_year: 2150, ensemble: r1i1p1f1, grid: gn, + institute: HAMMOZ-Consortium, start_year: 1850} + - &id1194 {dataset: GISS-E2-1-G, end_year: 7850, ensemble: r1i1p1f2, grid: gn, + institute: NASA-GISS, start_year: 7550} + - &id1195 {dataset: AWI-CM-1-1-MR, end_year: 2701, ensemble: r1i1p1f1, grid: gn, + institute: AWI, start_year: 2401} + - &id1196 {dataset: FGOALS-f3-L, end_year: 900, ensemble: r1i1p1f1, grid: gr, + institute: CAS, start_year: 600} + - &id1197 {dataset: CAS-ESM2-0, end_year: 301, ensemble: r1i1p1f1, grid: gn, institute: CAS, + start_year: 1} + - &id1198 {dataset: FGOALS-g3, end_year: 500, ensemble: r1i1p1f1, grid: gn, institute: CAS, + start_year: 200} + - &id1199 {dataset: CMCC-CM2-SR5, end_year: 2150, ensemble: r1i1p1f1, grid: gn, + institute: CMCC, start_year: 1850} + - &id1200 {dataset: ACCESS-CM2, end_year: 1250, ensemble: r1i1p1f1, grid: gn, + institute: CSIRO-ARCCSS, start_year: 950} + - &id1201 {dataset: GFDL-CM4, end_year: 451, ensemble: r1i1p1f1, grid: gr1, institute: NOAA-GFDL, + start_year: 151} + - &id1202 {dataset: GFDL-ESM4, end_year: 301, ensemble: r1i1p1f1, grid: gr1, institute: NOAA-GFDL, + start_year: 1} + - &id1203 {dataset: CAMS-CSM1-0, end_year: 3200, ensemble: r1i1p1f1, grid: gn, + institute: CAMS, start_year: 2900} + - &id1204 {dataset: CanESM5-CanOE, end_year: 5850, ensemble: r1i1p2f1, grid: gn, + institute: CCCma, start_year: 5550} + - &id1205 {dataset: CanESM5, end_year: 5850, ensemble: r1i1p2f1, grid: gn, institute: CCCma, + start_year: 5550} + - &id1206 {dataset: CanESM5, end_year: 5501, ensemble: r1i1p1f1, grid: gn, institute: CCCma, + start_year: 5201} + - &id1207 {dataset: UKESM1-0-LL, end_year: 2260, ensemble: r1i1p1f2, grid: gn, + institute: MOHC, start_year: 1960} + - &id1208 {dataset: HadGEM3-GC31-LL, end_year: 2150, ensemble: r1i1p1f1, grid: gn, + institute: MOHC, start_year: 1850} + - &id1209 {dataset: HadGEM3-GC31-MM, end_year: 2150, ensemble: r1i1p1f1, grid: gn, + institute: MOHC, start_year: 1850} + - &id1210 {dataset: EC-Earth3-Veg-LR, end_year: 2600, ensemble: r1i1p1f1, grid: gr, + institute: EC-Earth-Consortium, start_year: 2300} + - &id1211 {dataset: EC-Earth3, end_year: 2559, ensemble: r1i1p1f1, grid: gr, institute: EC-Earth-Consortium, + start_year: 2259} + - &id1212 {dataset: EC-Earth3-Veg, end_year: 2150, ensemble: r1i1p1f1, grid: gr, + institute: EC-Earth-Consortium, start_year: 1850} + - &id1213 {dataset: MIROC6, end_year: 3500, ensemble: r1i1p1f1, grid: gn, institute: MIROC, + start_year: 3200} + - &id1214 {dataset: MIROC-ES2L, end_year: 2150, ensemble: r1i1p1f2, grid: gn, + institute: MIROC, start_year: 1850} + - &id1215 {dataset: CIESM, end_year: 301, ensemble: r1i1p1f1, grid: gr, institute: THU, + start_year: 1} + - &id1216 {dataset: MCM-UA-1-0, end_year: 301, ensemble: r1i1p1f1, grid: gn, institute: UA, + start_year: 1} + - &id1217 {dataset: BCC-CSM2-MR, end_year: 2150, ensemble: r1i1p1f1, grid: gn, + institute: BCC, start_year: 1850} + - &id1218 {dataset: BCC-ESM1, end_year: 2150, ensemble: r1i1p1f1, grid: gn, institute: BCC, + start_year: 1850} + - &id1219 {dataset: TaiESM1, end_year: 501, ensemble: r1i1p1f1, grid: gn, institute: AS-RCEC, + start_year: 201} + - &id1220 {dataset: INM-CM5-0, end_year: 2296, ensemble: r1i1p1f1, grid: gr1, + institute: INM, start_year: 1996} + - &id1221 {dataset: INM-CM4-8, end_year: 2150, ensemble: r1i1p1f1, grid: gr1, + institute: INM, start_year: 1850} + - &id1222 {dataset: IPSL-CM6A-LR, end_year: 2150, ensemble: r1i1p1f1, grid: gr, + institute: IPSL, start_year: 1850} + - &id1223 {dataset: E3SM-1-0, end_year: 301, ensemble: r1i1p1f1, grid: gr, institute: E3SM-Project, + start_year: 1} + - &id1224 {dataset: CESM2-FV2, end_year: 301, ensemble: r1i1p1f1, grid: gn, institute: NCAR, + start_year: 1} + - &id1225 {dataset: CESM2-WACCM, end_year: 301, ensemble: r1i1p1f1, grid: gn, + institute: NCAR, start_year: 1} +# - &id1226 {dataset: CESM2, end_year: 301, ensemble: r1i1p1f1, grid: gn, institute: NCAR, +# start_year: 1} + - &id1227 {dataset: CESM2-WACCM-FV2, end_year: 301, ensemble: r1i1p1f1, grid: gn, + institute: NCAR, start_year: 1} + - &id1228 {dataset: MPI-ESM1-2-LR, end_year: 2150, ensemble: r1i1p1f1, grid: gn, + institute: MPI-M, start_year: 1850} + - &id1229 {dataset: MPI-ESM1-2-HR, end_year: 2150, ensemble: r1i1p1f1, grid: gn, + institute: MPI-M, start_year: 1850} + - &id1230 {dataset: NESM3, end_year: 800, ensemble: r1i1p1f1, grid: gn, institute: NUIST, + start_year: 500} + - &id1231 {dataset: ACCESS-ESM1-5, end_year: 401, ensemble: r1i1p1f1, grid: gn, + institute: CSIRO, start_year: 101} + - &id1232 {dataset: MRI-ESM2-0, end_year: 2150, ensemble: r1i1p1f1, grid: gn, + institute: MRI, start_year: 1850} + - &id1233 {dataset: NorESM2-MM, end_year: 1500, ensemble: r1i1p1f1, grid: gn, + institute: NCC, start_year: 1200} + - &id1234 {dataset: NorCPM1, end_year: 301, ensemble: r1i1p1f1, grid: gn, institute: NCC, + start_year: 1} + - &id1235 {dataset: CNRM-ESM2-1, end_year: 2150, ensemble: r1i1p1f2, grid: gr, + institute: CNRM-CERFACS, start_year: 1850} + - &id1236 {dataset: CNRM-CM6-1, end_year: 2150, ensemble: r1i1p1f2, grid: gr, + institute: CNRM-CERFACS, start_year: 1850} + - &id1237 {dataset: SAM0-UNICON, end_year: 301, ensemble: r1i1p1f1, grid: gn, + institute: SNU, start_year: 1} + + figA4_IAV_calc_tas: + description: FigA4, Calculate TAS interannual variability for hatching + realms: [atmos] + themes: [varmodes] + variables: + tas: {exp: piControl, mip: Amon, preprocessor: preproc_map, project: CMIP6, + shortname: tas} + scripts: + calc_IAV_for_stippandhatch_tas: {iavmode: mmm, periodlength: 20.0, script: tebaldi21esd/calc_IAV_hatching.ncl, + styleset: CMIP6, time_avg: annualclim} + additional_datasets: + - *id1193 + - *id1194 + - *id1195 + - *id1196 + - *id1197 + - *id1198 + - *id1199 + - *id1200 + - *id1201 + - *id1202 + - *id1203 + - *id1204 + - *id1205 + - *id1206 + - *id1207 + - *id1208 + - *id1209 + - *id1210 + - *id1211 + - *id1212 + - *id1213 + - *id1214 + - {dataset: FIO-ESM-2-0, end_year: 601, ensemble: r1i1p1f1, grid: gn, institute: FIO-QLNM, + start_year: 301} + - *id1215 + - *id1216 + - *id1217 + - *id1218 + - *id1219 + - *id1220 + - *id1221 + - *id1222 + - *id1223 + - *id1224 + - *id1225 +# - *id1226 + - *id1227 + - *id1228 + - *id1229 + - *id1230 + - *id1231 + - *id1232 + - {dataset: NorESM2-LM, end_year: 1900, ensemble: r1i1p1f1, grid: gn, institute: NCC, + start_year: 1600} + - *id1233 + - *id1234 + - *id1235 + - *id1236 + - *id1237 + - {dataset: KIOST-ESM, end_year: 2989, ensemble: r1i1p1f1, grid: gr1, institute: KIOST, + start_year: 2689} + + figA4b: + description: FigA4b, Pattern precipitation changes (not scaled by global T) for + SSP1-1.9 + realms: [atmos] + themes: [phys] + variables: + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: pr, + start_year: 2081} + scripts: + calc_map_diff_pr: + ancestors: [pr, figA4_IAV_calc_pr/calc_IAV_for_stippandhatch_pr] + iavmode: mmm + label: &id1238 [2081-2100 relative to 1995-2014] + not_sig: true + percent: 1 + periods: &id1239 [2081] + scenarios: &id1240 [ssp119] + script: tebaldi21esd/calc_pattern_stippling_hatching.ncl + sig: false + time_avg: annualclim + plot_map_diff_pr: + ancestors: [pr, calc_map_diff_pr] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_div_14.rgb + diagscript: calc_map_diff_mmm_stipp + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] + label: *id1238 + max_hori: 2 + max_vert: 2 + not_sig: true + percent: 1 + periods: *id1239 + projection: Robinson + scenarios: *id1240 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Precipitation change for SSP1-1.9 + additional_datasets: + - *id202 + - *id203 + - *id204 + - *id205 + - *id206 + - *id207 + - *id208 + - *id209 + - *id210 + - *id211 + - *id212 + - *id213 + - *id214 + - *id215 + - *id216 + - *id217 + - *id218 + - *id219 + - *id220 + - *id221 + - *id222 + - *id223 +# - *id224 + - *id225 + - *id226 + - *id227 + - *id228 + - *id229 + - *id230 + - *id231 + - *id232 + - *id233 + - *id234 + - *id235 + - *id236 + - *id237 + - *id238 + - *id239 + - *id240 + - *id241 + - *id242 + - *id243 + - *id244 + - *id245 + - *id246 + - *id247 + - *id248 + - *id249 + - *id250 + - *id251 + - *id252 + - *id253 + + figA4d: + description: FigA4d, Pattern precipitation changes (not scaled by global T) for + SSP1-2.6 + realms: [atmos] + scripts: + calc_map_diff_pr: + ancestors: [pr, figA4_IAV_calc_pr/calc_IAV_for_stippandhatch_pr] + iavmode: mmm + label: &id1241 [2081-2100 relative to 1995-2014] + not_sig: true + percent: 1 + periods: &id1242 [2081] + scenarios: &id1243 [ssp126] + script: tebaldi21esd/calc_pattern_stippling_hatching.ncl + sig: false + time_avg: annualclim + plot_map_diff_pr: + ancestors: [pr, calc_map_diff_pr] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_div_14.rgb + diagscript: calc_map_diff_mmm_stipp + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] + label: *id1241 + max_hori: 2 + max_vert: 2 + not_sig: true + percent: 1 + periods: *id1242 + projection: Robinson + scenarios: *id1243 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Precipitation change for SSP1-2.6 + themes: [phys] + variables: + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: pr, + start_year: 2081} + additional_datasets: + - *id202 + - *id203 + - *id204 + - *id205 + - *id206 + - *id207 + - *id208 + - *id209 + - *id210 + - *id211 + - *id212 + - *id213 + - *id214 + - *id215 + - *id216 + - *id217 + - *id218 + - *id219 + - *id220 + - *id221 + - *id222 + - *id223 +# - *id224 + - *id225 + - *id226 + - *id227 + - *id228 + - *id229 + - *id230 + - *id231 + - *id232 + - *id233 + - *id234 + - *id235 + - *id236 + - *id237 + - *id238 + - *id239 + - *id240 + - *id254 + - *id255 + - *id256 + - *id257 + - *id258 + - *id259 + - *id260 + - *id261 + - *id262 + - *id263 + - *id264 + - *id265 + - *id266 + - *id267 + - *id268 + - *id269 + - *id270 + - *id271 + - *id272 + - *id273 + - *id274 + - *id275 + - *id276 + - *id277 + - *id278 + - *id279 +# - *id280 + - *id281 + - *id282 + - *id283 + - *id284 + - *id285 + - *id286 + - *id287 + - *id288 + - *id289 + - *id290 + + figA4f: + description: FigA4f, Pattern precipitation changes (not scaled by global T) for + SSP2-4.5 + realms: [atmos] + scripts: + calc_map_diff_pr: + ancestors: [pr, figA4_IAV_calc_pr/calc_IAV_for_stippandhatch_pr] + iavmode: mmm + label: &id1244 [2081-2100 relative to 1995-2014] + not_sig: true + percent: 1 + periods: &id1245 [2081] + scenarios: &id1246 [ssp245] + script: tebaldi21esd/calc_pattern_stippling_hatching.ncl + sig: false + time_avg: annualclim + plot_map_diff_pr: + ancestors: [pr, calc_map_diff_pr] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_div_14.rgb + diagscript: calc_map_diff_mmm_stipp + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] + label: *id1244 + max_hori: 2 + max_vert: 2 + not_sig: true + percent: 1 + periods: *id1245 + projection: Robinson + scenarios: *id1246 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Precipitation change for SSP2-4.5 + themes: [phys] + variables: + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: pr, + start_year: 2081} + additional_datasets: + - *id202 + - *id203 + - *id204 + - *id205 + - *id206 + - *id207 + - *id208 + - *id209 + - *id210 + - *id211 + - *id212 + - *id213 + - *id214 + - *id215 + - *id216 + - *id217 + - *id218 + - *id219 + - *id220 + - *id221 + - *id222 + - *id223 +# - *id224 + - *id225 + - *id226 + - *id227 + - *id228 + - *id229 + - *id230 + - *id231 + - *id232 + - *id233 + - *id234 + - *id235 + - *id236 + - *id237 + - *id238 + - *id239 + - *id240 + - *id291 + - *id292 + - *id293 + - *id294 + - *id295 + - *id296 + - *id297 + - *id298 + - *id299 + - *id300 + - *id301 + - *id302 + - *id303 + - *id304 + - *id305 + - *id306 + - *id307 + - *id308 + - *id309 + - *id310 + - *id311 + - *id312 + - *id313 + - *id314 + - *id315 + - *id316 +# - *id317 + - *id318 + - *id319 + - *id320 + - *id321 + - *id322 + - *id323 + - *id324 + - *id325 + - *id326 + + figA4h: + description: FigA4h, Pattern precipitation changes (not scaled by global T) for + SSP3-7.0 + realms: [atmos] + scripts: + calc_map_diff_pr: + ancestors: [pr, figA4_IAV_calc_pr/calc_IAV_for_stippandhatch_pr] + iavmode: mmm + label: &id1247 [2081-2100 relative to 1995-2014] + not_sig: true + percent: 1 + periods: &id1248 [2081] + scenarios: &id1249 [ssp370] + script: tebaldi21esd/calc_pattern_stippling_hatching.ncl + sig: false + time_avg: annualclim + plot_map_diff_pr: + ancestors: [pr, calc_map_diff_pr] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_div_14.rgb + diagscript: calc_map_diff_mmm_stipp + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] + label: *id1247 + max_hori: 2 + max_vert: 2 + not_sig: true + percent: 1 + periods: *id1248 + projection: Robinson + scenarios: *id1249 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Precipitation change for SSP3-7.0 + themes: [phys] + variables: + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: pr, + start_year: 2081} + additional_datasets: + - *id202 + - *id203 + - *id204 + - *id205 + - *id206 + - *id207 + - *id208 + - *id209 + - *id210 + - *id211 + - *id212 + - *id213 + - *id214 + - *id215 + - *id216 + - *id217 + - *id218 + - *id219 + - *id220 + - *id221 + - *id222 + - *id223 +# - *id224 + - *id225 + - *id226 + - *id227 + - *id228 + - *id229 + - *id230 + - *id231 + - *id232 + - *id233 + - *id234 + - *id236 + - *id237 + - *id238 + - *id239 + - *id240 + - *id327 + - *id328 + - *id329 + - *id330 + - *id331 + - *id332 + - *id333 + - *id334 + - *id335 + - *id336 + - *id337 + - *id338 + - *id339 + - *id340 + - *id341 + - *id342 + - *id343 + - *id344 + - *id345 + - *id346 + - *id347 + - *id348 + - *id349 +# - *id350 + - *id351 + - *id352 + - *id353 + - *id354 + - *id355 + - *id356 + - *id357 + - *id358 + - *id359 + + figA4j: + description: FigA4j, Pattern precipitation changes (not scaled by global T) for + SSP5-8.5 + realms: [atmos] + scripts: + calc_map_diff_pr: + ancestors: [pr, figA4_IAV_calc_pr/calc_IAV_for_stippandhatch_pr] + iavmode: mmm + label: &id1250 [2081-2100 relative to 1995-2014] + not_sig: true + percent: 1 + periods: &id1251 [2081] + scenarios: &id1252 [ssp585] + script: tebaldi21esd/calc_pattern_stippling_hatching.ncl + sig: false + time_avg: annualclim + plot_map_diff_pr: + ancestors: [pr, calc_map_diff_pr] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_div_14.rgb + diagscript: calc_map_diff_mmm_stipp + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] + label: *id1250 + max_hori: 2 + max_vert: 2 + not_sig: true + percent: 1 + periods: *id1251 + projection: Robinson + scenarios: *id1252 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Precipitation change for SSP5-8.5 + themes: [phys] + variables: + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, short_name: pr, + start_year: 2081} + additional_datasets: + - *id202 + - *id203 + - *id204 + - *id205 + - *id206 + - *id207 + - *id208 + - *id209 + - *id210 + - *id211 + - *id212 + - *id213 + - *id214 + - *id215 + - *id216 + - *id217 + - *id218 + - *id219 + - *id220 + - *id221 + - *id222 + - *id223 +# - *id224 + - *id225 + - *id226 + - *id227 + - *id228 + - *id229 + - *id230 + - *id231 + - *id232 + - *id233 + - *id234 + - *id235 + - *id236 + - *id237 + - *id238 + - *id239 + - *id240 + - *id360 + - *id361 + - *id362 + - *id363 + - *id364 + - *id365 + - *id366 + - *id367 + - *id368 + - *id369 + - *id370 + - *id371 + - *id372 + - *id373 + - *id374 + - *id375 + - *id376 + - *id377 + - *id378 + - *id379 + - *id380 + - *id381 + - *id382 + - *id383 + - *id384 + - *id385 +# - *id386 + - *id387 + - *id388 + - *id389 + - *id390 + - *id391 + - *id392 + - *id393 + - *id394 + - *id395 + - *id396 + + figA4a: + description: FigA4a, Pattern temperature changes (not scaled by global T) for SSP1-1.9 + realms: [atmos] + scripts: + calc_map_diff_tas: + ancestors: [tas, figA4_IAV_calc_tas/calc_IAV_for_stippandhatch_tas] + iavmode: mmm + label: &id1253 [2081-2100 relative to 1995-2014] + not_sig: true + periods: &id1254 [2081] + scenarios: &id1255 [ssp119] + script: tebaldi21esd/calc_pattern_stippling_hatching.ncl + sig: false + time_avg: annualclim + plot_map_diff_tas: + ancestors: [tas, calc_map_diff_tas] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_div_18.rgb + diagscript: calc_map_diff_mmm_stipp + diff_levs: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + label: *id1253 + not_sig: true + periods: *id1254 + projection: Robinson + scenarios: *id1255 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Temperature change for SSP1-1.9 + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2081} + additional_datasets: + - *id202 + - *id203 + - *id204 + - *id205 + - *id206 + - *id207 + - *id208 + - *id209 + - *id210 + - *id211 + - *id212 + - *id213 + - *id214 + - *id215 + - *id216 + - *id217 + - *id218 + - *id219 + - *id220 + - *id221 + - *id222 + - *id223 +# - *id224 + - *id225 + - *id226 + - *id227 + - *id228 + - *id229 + - *id230 + - *id231 + - *id232 + - *id233 + - *id234 + - *id235 + - *id236 + - *id237 + - *id403 + - *id238 + - *id239 + - *id240 + - *id241 + - *id242 + - *id243 + - *id244 + - *id245 + - *id246 + - *id247 + - *id248 + - *id249 + - *id250 + - *id251 + - *id252 + - *id253 + + figA4c: + description: FigA4c, Pattern temperature changes (not scaled by global T) for SSP1-2.6 + realms: [atmos] + scripts: + calc_map_diff_tas: + ancestors: [tas, figA4_IAV_calc_tas/calc_IAV_for_stippandhatch_tas] + iavmode: mmm + label: &id1256 [2081-2100 relative to 1995-2014] + not_sig: true + periods: &id1257 [2081] + scenarios: &id1258 [ssp126] + script: tebaldi21esd/calc_pattern_stippling_hatching.ncl + sig: false + time_avg: annualclim + plot_map_diff_tas: + ancestors: [tas, calc_map_diff_tas] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_div_18.rgb + diagscript: calc_map_diff_mmm_stipp + diff_levs: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + label: *id1256 + not_sig: true + periods: *id1257 + projection: Robinson + scenarios: *id1258 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Temperature change for SSP1-2.6 + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2081} + additional_datasets: + - *id202 + - *id203 + - *id204 + - *id205 + - *id206 + - *id207 + - *id208 + - *id209 + - *id210 + - *id211 + - *id212 + - *id213 + - *id214 + - *id215 + - *id216 + - *id217 + - *id218 + - *id219 + - *id220 + - *id221 + - *id222 + - *id223 +# - *id224 + - *id225 + - *id226 + - *id227 + - *id228 + - *id229 + - *id230 + - *id231 + - *id232 + - *id233 + - *id234 + - *id235 + - *id236 + - *id237 + - *id403 + - *id238 + - *id239 + - *id240 + - *id254 + - *id255 + - *id256 + - *id257 + - *id258 + - *id259 + - *id260 + - *id261 + - *id262 + - *id263 + - *id264 + - *id265 + - *id266 + - *id267 + - *id268 + - *id269 + - *id270 + - *id271 + - *id272 + - *id273 + - *id274 + - *id275 + - *id276 + - *id277 + - *id278 + - *id279 +# - *id280 + - *id281 + - *id282 + - *id283 + - *id284 + - *id285 + - *id286 + - *id287 + - *id288 + - *id289 + - *id404 + - *id290 + + figA4e: + description: FigA4e, Pattern temperature changes (not scaled by global T) for SSP2-4.5 + realms: [atmos] + scripts: + calc_map_diff_tas: + ancestors: [tas, figA4_IAV_calc_tas/calc_IAV_for_stippandhatch_tas] + iavmode: mmm + label: &id1259 [2081-2100 relative to 1995-2014] + not_sig: true + periods: &id1260 [2081] + scenarios: &id1261 [ssp245] + script: tebaldi21esd/calc_pattern_stippling_hatching.ncl + sig: false + time_avg: annualclim + plot_map_diff_tas: + ancestors: [tas, calc_map_diff_tas] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_div_18.rgb + diagscript: calc_map_diff_mmm_stipp + diff_levs: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + label: *id1259 + not_sig: true + periods: *id1260 + projection: Robinson + scenarios: *id1261 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Temperature change for SSP2-4.5 + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2081} + additional_datasets: + - *id202 + - *id203 + - *id204 + - *id205 + - *id206 + - *id207 + - *id208 + - *id209 + - *id210 + - *id211 + - *id212 + - *id213 + - *id214 + - *id215 + - *id216 + - *id217 + - *id218 + - *id219 + - *id220 + - *id221 + - *id222 + - *id223 +# - *id224 + - *id225 + - *id226 + - *id227 + - *id228 + - *id229 + - *id230 + - *id231 + - *id232 + - *id233 + - *id234 + - *id235 + - *id236 + - *id237 + - *id403 + - *id238 + - *id239 + - *id240 + - *id291 + - *id292 + - *id293 + - *id294 + - *id295 + - *id296 + - *id297 + - *id298 + - *id299 + - *id300 + - *id301 + - *id302 + - *id303 + - *id304 + - *id305 + - *id306 + - *id307 + - *id308 + - *id309 + - *id310 + - *id311 + - *id312 + - *id313 + - *id314 + - *id315 + - *id316 +# - *id317 + - *id318 + - *id319 + - *id320 + - *id321 + - *id322 + - *id323 + - *id324 + - *id325 + - *id326 + - *id405 + + figA4g: + description: FigA4g, Pattern temperature changes (not scaled by global T) for SSP3-7.0 + realms: [atmos] + scripts: + calc_map_diff_tas: + ancestors: [tas, figA4_IAV_calc_tas/calc_IAV_for_stippandhatch_tas] + iavmode: mmm + label: &id1262 [2081-2100 relative to 1995-2014] + not_sig: true + periods: &id1263 [2081] + scenarios: &id1264 [ssp370] + script: tebaldi21esd/calc_pattern_stippling_hatching.ncl + sig: false + time_avg: annualclim + plot_map_diff_tas: + ancestors: [tas, calc_map_diff_tas] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_div_18.rgb + diagscript: calc_map_diff_mmm_stipp + diff_levs: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + label: *id1262 + not_sig: true + periods: *id1263 + projection: Robinson + scenarios: *id1264 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Temperature change for SSP3-7.0 + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2081} + additional_datasets: + - *id202 + - *id203 + - *id204 + - *id205 + - *id206 + - *id207 + - *id208 + - *id209 + - *id210 + - *id211 + - *id212 + - *id213 + - *id214 + - *id215 + - *id216 + - *id217 + - *id218 + - *id219 + - *id220 + - *id221 + - *id222 + - *id223 +# - *id224 + - *id225 + - *id226 + - *id227 + - *id228 + - *id229 + - *id230 + - *id231 + - *id232 + - *id233 + - *id234 + - *id235 + - *id236 + - *id237 + - *id403 + - *id238 + - *id239 + - *id240 + - *id327 + - *id328 + - *id329 + - *id330 + - *id331 + - *id332 + - *id333 + - *id334 + - *id335 + - *id336 + - *id337 + - *id338 + - *id339 + - *id340 + - *id341 + - *id342 + - *id343 + - *id344 + - *id345 + - *id346 + - *id347 + - *id348 + - *id349 +# - *id350 + - *id351 + - *id352 + - *id353 + - *id354 + - *id355 + - *id356 + - *id357 + - *id358 + - *id359 + + figA4i: + description: FigA4i, Pattern temperature changes (not scaled by global T) for SSP5-8.5 + realms: [atmos] + scripts: + calc_map_diff_tas: + ancestors: [tas, figA4_IAV_calc_tas/calc_IAV_for_stippandhatch_tas] + iavmode: mmm + label: &id1265 [2081-2100 relative to 1995-2014] + not_sig: true + periods: &id1266 [2081] + scenarios: &id1267 [ssp585] + script: tebaldi21esd/calc_pattern_stippling_hatching.ncl + sig: false + time_avg: annualclim + plot_map_diff_tas: + ancestors: [tas, calc_map_diff_tas] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_div_18.rgb + diagscript: calc_map_diff_mmm_stipp + diff_levs: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + label: *id1265 + not_sig: true + periods: *id1266 + projection: Robinson + scenarios: *id1267 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Temperature change for SSP5-8.5 + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2081} + additional_datasets: + - *id202 + - *id203 + - *id204 + - *id205 + - *id206 + - *id207 + - *id208 + - *id209 + - *id210 + - *id211 + - *id212 + - *id213 + - *id214 + - *id215 + - *id216 + - *id217 + - *id218 + - *id219 + - *id220 + - *id221 + - *id222 + - *id223 +# - *id224 + - *id225 + - *id226 + - *id227 + - *id228 + - *id229 + - *id230 + - *id231 + - *id232 + - *id233 + - *id234 + - *id235 + - *id236 + - *id237 + - *id403 + - *id238 + - *id239 + - *id240 + - *id360 + - *id361 + - *id362 + - *id363 + - *id364 + - *id365 + - *id366 + - *id367 + - *id368 + - *id369 + - *id370 + - *id371 + - *id372 + - *id373 + - *id374 + - *id375 + - *id376 + - *id377 + - *id378 + - *id379 + - *id380 + - *id381 + - *id382 + - *id383 + - *id384 + - *id385 +# - *id386 + - *id387 + - *id388 + - *id389 + - *id390 + - *id391 + - *id392 + - *id393 + - *id394 + - *id395 + - *id406 + - *id396 + + figA5b: + description: FigA5b, Pattern of PR standard deviation scaled by global T change + realms: [atmos] + scripts: + calc_map_stddev_scaleT_pr: + label: &id1268 [2081-2100 relative to 1986-2005] + percent: 1 + periods: &id1269 [2081] + scenarios: &id1270 [rcp26, rcp45, rcp60, rcp85] + script: tebaldi21esd/calc_pattern_stddev_scaleT.ncl + time_avg: annualclim + plot_map_stddev_scaleT_pr: + ancestors: [pr, calc_map_stddev_scaleT_pr] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_seq_14.rgb + diagscript: calc_map_stddev_scaleT_mmm_stipp + diff_levs: [0, 3, 6, 9, 12, 15, 18] + label: *id1268 + max_hori: 2 + max_vert: 2 + percent: 1 + periods: *id1269 + projection: Robinson + scenarios: *id1270 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Standard deviation of PR change ~C~ scaled by global T, CMIP5 + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP5, + short_name: tas, start_year: 2081} + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP5, short_name: pr, + start_year: 2081} + additional_datasets: &figA5_pr_datasets + - *id425 + - *id426 + - *id427 + - *id428 + - *id429 + - *id430 + - *id431 + - *id432 + - *id433 + - *id434 + - *id435 + - *id436 + - *id437 + - *id438 + - *id439 + - *id440 + - *id441 + - *id442 + - *id443 + - *id444 + - *id445 + - *id446 + - *id447 + - *id448 + - *id449 + - *id450 + - *id451 + - *id452 + - *id453 + - *id454 + - *id455 + - *id456 + - *id457 + - *id458 + - *id459 + - *id460 + - *id461 + - *id462 + - *id463 + - *id464 + - *id465 + - *id466 + - *id467 + - *id468 + - *id469 + - *id470 + - *id471 + - *id472 + - *id473 + - *id474 + - *id475 + - *id476 + - *id477 + - *id478 + - *id479 + - *id480 + - *id481 + - *id482 + - *id483 + - *id484 + - *id485 + - *id486 + - *id487 + - *id488 + - *id489 + - *id490 + - *id491 + - *id492 + - *id493 + - *id494 + - *id495 + - *id496 + - *id497 + - *id498 + - *id499 + - *id500 + - *id501 + - *id502 + - *id503 + - *id504 + - *id505 + - *id506 + - *id507 + - *id508 + - *id509 + - *id510 + - *id511 + - *id512 + - *id513 + - *id514 + - *id515 + - *id516 + - *id517 + - *id518 + - *id519 + - *id520 + - *id521 + - *id522 + - *id523 + - *id524 + - *id525 + - *id526 + - *id527 + - *id528 + - *id529 + - *id530 + - *id531 + - *id532 + - *id533 + - *id534 + - *id535 + - *id536 + - *id537 + - *id538 + - *id539 + - *id540 + - *id541 + - *id542 + - *id543 + - *id544 + - *id545 + - *id546 + - *id547 + - *id548 + - *id549 + - *id550 + - *id551 + - *id552 + - *id553 + - *id554 + - *id555 + - *id556 + - *id557 + - *id558 + - *id559 + - *id560 + - *id561 + - *id562 + - *id563 + - *id564 + - *id565 + - *id566 + - *id567 + - *id568 + - *id569 + - *id570 + - *id571 + - *id572 + - *id573 + - *id574 + - *id575 + - *id576 + - *id577 + - *id578 + - *id579 + - *id580 + - *id581 + - *id582 + - *id583 + - *id584 + - *id585 + - *id586 + - *id587 + - *id588 + - *id589 + - *id590 + - *id591 + - *id592 + - *id593 + - *id594 + - *id595 + - *id596 + + figA5a: + description: FigA5a, Pattern of TAS standard deviation scaled by global T + change + realms: [atmos] + scripts: + calc_map_stddev_scaleT_tas: + label: &id1271 [2081-2100 relative to 1986-2005] + periods: &id1272 [2081] + scenarios: &id1273 [rcp26, rcp45, rcp60, rcp85] + script: tebaldi21esd/calc_pattern_stddev_scaleT.ncl + time_avg: annualclim + plot_map_stddev_scaleT_tas: + ancestors: [tas, calc_map_stddev_scaleT_tas] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_seq_14.rgb + diagscript: calc_map_stddev_scaleT_mmm_stipp + diff_levs: [0, 0.2, 0.4, 0.6, 0.8, 1, 1.2, 1.4, 1.6] + label: *id1271 + periods: *id1272 + projection: Robinson + scenarios: *id1273 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Standard deviation of TAS change ~C~ scaled by global T, CMIP5 + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP5, + short_name: tas, start_year: 2081} + additional_datasets: &figA5_tas_datasets + - *id425 + - *id426 + - *id427 + - *id428 + - *id429 + - *id430 + - *id431 + - *id432 + - *id433 + - *id434 + - *id435 + - *id436 + - *id437 + - *id438 + - *id439 + - *id440 + - *id441 + - *id442 + - *id443 + - *id444 + - *id445 + - *id446 + - *id447 + - *id448 + - *id449 + - *id450 + - *id451 + - *id452 + - *id453 + - *id454 + - *id455 + - *id456 + - *id457 + - *id458 + - *id459 + - *id460 + - *id461 + - *id462 + - *id463 + - *id464 + - *id465 + - *id466 + - *id467 + - *id468 + - *id469 + - *id470 + - *id471 + - *id472 + - *id473 + - *id474 + - *id475 + - *id476 + - *id477 + - *id478 + - *id479 + - *id480 + - *id481 + - *id482 + - *id483 + - *id484 + - *id485 + - *id486 + - *id487 + - *id488 + - *id489 + - *id490 + - *id491 + - *id492 + - *id493 + - *id494 + - *id495 + - *id496 + - *id497 + - *id498 + - *id499 + - *id500 + - *id501 + - *id502 + - *id503 + - *id504 + - *id505 + - *id506 + - *id507 + - *id508 + - *id509 + - *id510 + - *id511 + - *id512 + - *id513 + - *id514 + - *id515 + - *id516 + - *id517 + - *id518 + - *id519 + - *id520 + - *id521 + - *id522 + - *id523 + - *id524 + - *id525 + - *id526 + - *id527 + - *id528 + - *id529 + - *id530 + - *id531 + - *id532 + - *id533 + - *id534 + - *id535 + - *id536 + - *id537 + - *id538 + - *id539 + - *id540 + - *id541 + - *id542 + - *id543 + - *id544 + - *id545 + - *id546 + - *id547 + - *id548 + - *id549 + - *id550 + - *id551 + - *id552 + - *id553 + - *id554 + - *id555 + - *id556 + - *id557 + - *id558 + - *id559 + - *id560 + - *id561 + - *id562 + - *id563 + - *id564 + - *id565 + - *id566 + - *id567 + - *id568 + - *id569 + - *id570 + - *id571 + - *id572 + - *id573 + - *id574 + - *id575 + - *id576 + - *id577 + - *id578 + - *id579 + - *id580 + - *id581 + - *id582 + - *id583 + - *id584 + - *id585 + - *id586 + - *id587 + - *id588 + - *id589 + - *id590 + - *id591 + - *id592 + - *id593 + - *id594 + - *id595 + - *id596 + + figA5f: + description: FigA5f, Pattern of PR standard deviation after averaging across models scale + by global T change + realms: [atmos] + scripts: + calc_map_stddev_scaleT_pr: + label: &id1274 [2081-2100 relative to 1986-2005] + percent: 1 + periods: &id1275 [2081] + scenarios: &id1276 [rcp26, rcp45, rcp60, rcp85] + script: tebaldi21esd/calc_pattern_interscenario_stddev_scaleT.ncl + time_avg: annualclim + plot_map_stddev_scaleT_pr: + ancestors: [pr, calc_map_stddev_scaleT_pr] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_seq_14.rgb + diagscript: calc_map_stddev_scaleT_mmm_stipp + diff_levs: [0, 3, 6, 9, 12, 15, 18] + label: *id1274 + max_hori: 2 + max_vert: 2 + percent: 1 + periods: *id1275 + projection: Robinson + scenarios: *id1276 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Interscenario std. dev. of PR change ~C~ scaled by global + T, CMIP5 + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP5, + short_name: tas, start_year: 2081} + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP5, short_name: pr, + start_year: 2081} + additional_datasets: *figA5_pr_datasets + + figA5e: + description: Fig A5e, Pattern of TAS standard deviation after averaging across models scale + by global T change + realms: [atmos] + scripts: + calc_map_stddev_scaleT_tas: + label: &id1277 [2081-2100 relative to 1986-2005] + periods: &id1278 [2081] + scenarios: &id1279 [rcp26, rcp45, rcp60, rcp85] + script: tebaldi21esd/calc_pattern_interscenario_stddev_scaleT.ncl + time_avg: annualclim + plot_map_stddev_scaleT_tas: + ancestors: [tas, calc_map_stddev_scaleT_tas] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_seq_14.rgb + diagscript: calc_map_stddev_scaleT_mmm_stipp + diff_levs: [0, 0.2, 0.4, 0.6, 0.8, 1, 1.2, 1.4, 1.6] + label: *id1277 + periods: *id1278 + projection: Robinson + scenarios: *id1279 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Interscenario std. dev. of TAS change ~C~ scaled by global + T, CMIP5 + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP5, + short_name: tas, start_year: 2081} + additional_datasets: *figA5_tas_datasets + + figA5d: + description: FigA5d, Intermodel standard deviation of PR pattern scaled by + global T change + realms: [atmos] + scripts: + calc_map_stddev_scaleT_pr: + label: &id1280 [2081-2100 relative to 1986-2005] + percent: 1 + periods: &id1281 [2081] + scenarios: &id1282 [rcp26, rcp45, rcp60, rcp85] + script: tebaldi21esd/calc_pattern_intermodel_stddev_scaleT.ncl + time_avg: annualclim + plot_map_stddev_scaleT_pr: + ancestors: [pr, calc_map_stddev_scaleT_pr] + colormap: $diag_scripts/shared/plot/rgb/ipcc_prec_seq_7.rgb + diagscript: calc_map_stddev_scaleT_mmm_stipp + diff_levs: [0, 3, 6, 9, 12, 15, 18] + label: *id1280 + max_hori: 2 + max_vert: 2 + percent: 1 + periods: *id1281 + projection: Robinson + scenarios: *id1282 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Intermodel std. dev. of PR change ~C~ scaled by global T, CMIP5 + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP5, + short_name: tas, start_year: 2081} + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP5, short_name: pr, + start_year: 2081} + additional_datasets: *figA5_pr_datasets + + figA5c: + description: FigA5c, Intermodel standard deviation of TAS pattern scaled by + global T change + realms: [atmos] + scripts: + calc_map_stddev_scaleT_tas: + label: &id1283 [2081-2100 relative to 1986-2005] + periods: &id1284 [2081] + scenarios: &id1285 [rcp26, rcp45, rcp60, rcp85] + script: tebaldi21esd/calc_pattern_intermodel_stddev_scaleT.ncl + time_avg: annualclim + plot_map_stddev_scaleT_tas: + ancestors: [tas, calc_map_stddev_scaleT_tas] + colormap: $diag_scripts/shared/plot/rgb/ipcc_temp_seq_9.rgb + diagscript: calc_map_stddev_scaleT_mmm_stipp + diff_levs: [0, 0.2, 0.4, 0.6, 0.8, 1, 1.2, 1.4, 1.6] + label: *id1283 + periods: *id1284 + projection: Robinson + scenarios: *id1285 + script: tebaldi21esd/plot_pattern.ncl + sig: false + time_avg: annualclim + title: Intermodel std. dev. of TAS change ~C~ scaled by global + T, CMIP5 + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP5, + short_name: tas, start_year: 2081} + additional_datasets: *figA5_tas_datasets + + table1: + description: Table1, warming level crossing for given ssp scenarios and warming + levels. + realms: [atmos] + scripts: + calc_warming_level_tas: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: [2014, 2100] + label: [SSP1-1.9, SSP1-2.6, SSP2-4.5, SSP3-7.0, SSP5-8.5] + offset: 0.84 + scenarios: [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/calc_table_warming_level.ncl + syears: [1995, 2015] + warming_levels: [1.5, 2.0, 3.0, 4.0, 5.0] + plot_warming_level_tas: + ancestors: [tas, calc_warming_level_tas] + eyears: [2014, 2100] + label: [SSP1-1.9, SSP1-2.6, SSP2-4.5, SSP3-7.0, SSP5-8.5] + scenarios: [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/plot_table_warming_level.ncl + syears: [1995, 2015] + title: Warming level crossings relative to 1850-1900 + warming_levels: [1.5, 2.0, 3.0, 4.0, 5.0] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, project: CMIP6, short_name: tas, start_year: 2015} + additional_datasets: + - *id005 + - *id006 + - *id007 + - *id008 + - *id009 + - *id010 + - *id011 + - *id012 + - *id013 + - *id014 + - *id015 + - *id018 + - *id019 + - *id020 + - *id021 + - *id023 + - *id024 + - *id025 + - *id026 + - *id027 + - *id028 + - *id029 +# - *id030 + - *id031 + - *id033 + - *id034 + - *id035 + - *id036 + - *id037 + - *id038 + - *id039 + - *id041 + - *id042 + - *id043 + - *id044 + - *id045 + - *id046 + - *id048 + - *id049 + - *id050 + - *id051 + - *id052 + - *id054 + - *id055 + - *id056 + - *id057 + - *id059 + - *id060 + - *id061 + - *id062 + - *id063 + - *id064 + - *id065 +# - *id066 + - *id067 + - *id069 + - *id070 + - *id071 + - *id072 + - *id073 + - *id074 + - *id075 + - *id076 + - *id077 + - *id078 + - *id079 + - *id080 + - *id081 + - *id083 + - *id084 + - *id085 + - *id086 + - *id087 + - *id090 + - *id091 + - *id092 + - *id093 + - *id095 + - *id096 + - *id097 + - *id098 + - *id099 + - *id100 + - *id101 +# - *id102 + - *id103 + - *id105 + - *id106 + - *id107 + - *id108 + - *id109 + - *id110 + - *id111 + - *id113 + - *id114 + - *id115 + - *id116 + - *id202 + - *id203 + - *id204 + - *id205 + - *id206 + - *id207 + - *id208 + - *id209 + - *id210 + - *id211 + - *id212 + - *id213 + - *id214 + - *id215 + - *id216 + - *id217 + - *id218 + - *id219 + - *id220 + - *id221 + - *id222 + - *id223 +# - *id224 + - *id225 + - *id226 + - *id227 + - *id228 + - *id229 + - *id230 + - *id231 + - *id232 + - *id117 + - *id118 + - *id119 + - *id120 + - *id121 + - *id122 + - *id123 + - *id124 + - *id125 + - *id126 + - *id127 + - *id128 + - *id129 + - *id130 + - *id131 + - *id132 + - *id133 + - *id134 + - *id135 + - *id136 + - *id137 + - *id138 + - *id139 + - *id140 + - *id141 + - *id142 + - *id143 + - *id144 + - *id145 + - *id146 + - *id147 + - *id148 +# - *id149 + - *id150 + - *id151 + - *id152 + - *id153 + - *id154 + - *id155 + - *id156 + + tableA3_landonly: + description: TableA3, Projected land-only CMIP6 TAS changes under the five scenarios + by 2041-2060 and 2081-2100 relative to the historical baseline of 1995-2014. + realms: [atmos] + scripts: + calc_table_tas: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: &id1286 [2060, 2100] + label: &id1287 [SSP1-1.9, SSP1-2.6, SSP2-4.5, SSP3-7.0, SSP5-8.5] + scenarios: &id1288 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/calc_table_changes.ncl + spread: 1.64 + syears: &id1289 [2041, 2081] + plot_table_tas: + ancestors: [tas, calc_table_tas] + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id1286 + label: *id1287 + scenarios: *id1288 + script: tebaldi21esd/plot_table_changes.ncl + spread: 1.64 + syears: *id1289 + title: Table of CMIP6 land-only TAS change relative to 1995-2014 (~S~o~N~ + C) + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_land_only, project: CMIP6, + short_name: tas, start_year: 2015} + additional_datasets: &tableA3_datasets + - *id005 + - *id006 + - *id007 + - *id008 + - *id009 + - *id010 + - *id011 + - *id012 + - *id013 + - *id014 + - *id015 + - *id016 + - *id017 + - *id018 + - *id019 + - *id020 + - *id021 + - *id022 + - &id1290 {dataset: CIESM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp126, grid: gr, + institute: THU, start_year: 2015} + - *id023 + - *id024 + - *id025 + - *id026 + - *id027 + - *id028 + - *id029 +# - *id030 + - *id031 + - *id032 + - *id033 + - *id034 + - *id035 + - *id036 + - *id037 + - *id038 + - *id039 + - *id874 + - *id040 + - *id041 + - *id042 + - *id043 + - *id044 + - *id045 + - *id046 + - *id047 + - *id048 + - *id049 + - *id050 + - *id051 + - *id052 + - *id053 + - *id054 + - *id055 + - *id056 + - *id057 + - *id058 + - &id1291 {dataset: CIESM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp245, grid: gr, + institute: THU, start_year: 2015} + - *id059 + - *id060 + - *id061 + - *id062 + - *id063 + - *id064 + - *id065 +# - *id066 + - *id067 + - *id068 + - *id069 + - *id070 + - *id071 + - *id072 + - *id073 + - *id074 + - *id075 + - *id875 + - *id076 + - *id077 + - *id078 + - *id079 + - *id080 + - *id081 + - *id082 + - *id083 + - *id084 + - *id085 + - *id086 + - *id087 + - *id088 + - *id089 + - *id090 + - *id091 + - *id092 + - *id093 + - *id094 + - *id095 + - *id096 + - *id097 + - *id098 + - *id099 + - *id100 + - *id101 +# - *id102 + - *id103 + - *id104 + - *id105 + - *id106 + - *id107 + - *id108 + - *id109 + - *id110 + - *id111 + - *id876 + - *id112 + - *id113 + - *id114 + - *id115 + - *id116 + - *id202 + - *id203 + - *id204 + - *id205 + - *id206 + - *id207 + - *id208 + - *id209 + - *id210 + - *id211 + - *id212 + - *id213 + - *id214 + - *id215 + - *id216 + - *id217 + - *id218 + - *id219 + - *id220 + - *id221 + - *id222 + - *id223 +# - *id224 + - *id225 + - *id226 + - *id227 + - *id228 + - *id229 + - *id230 + - *id231 + - *id232 + - *id117 + - *id118 + - *id119 + - *id120 + - *id121 + - *id122 + - *id123 + - *id124 + - *id125 + - *id126 + - *id127 + - *id128 + - *id129 + - *id130 + - *id131 + - *id132 + - *id133 + - *id134 + - *id135 + - *id136 + - *id137 + - *id138 + - *id139 + - *id140 + - *id141 + - *id142 + - *id143 + - *id144 + - *id145 + - *id146 + - *id147 + - *id148 +# - *id149 + - *id150 + - *id151 + - *id152 + - *id153 + - *id154 + - *id155 + - *id156 + - *id233 + - *id234 + - *id235 + - *id236 + - *id237 + - *id403 + - *id238 + - *id239 + - *id240 + - *id194 + - &id1292 {dataset: TaiESM1, end_year: 2100, ensemble: r1i1p1f1, exp: ssp370, + grid: gn, institute: AS-RCEC, start_year: 2015} + + tableA3_oceanonly: + description: TableA3, Projected ocean-only CMIP6 TAS changes under the five scenarios + by 2041-2060 and 2081-2100 relative to the historical baseline of 1995-2014. + realms: [atmos] + scripts: + calc_table_tas: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: &id1293 [2060, 2100] + label: &id1294 [SSP1-1.9, SSP1-2.6, SSP2-4.5, SSP3-7.0, SSP5-8.5] + scenarios: &id1295 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/calc_table_changes.ncl + spread: 1.64 + syears: &id1296 [2041, 2081] + yaxis: Relative to 1995-2014 (~S~o~N~ C) + plot_table_tas: + ancestors: [tas, calc_table_tas] + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id1293 + label: *id1294 + scenarios: *id1295 + script: tebaldi21esd/plot_table_changes.ncl + spread: 1.64 + syears: *id1296 + title: Table of CMIP6 ocean-only TAS change relative to 1995-2014 (~S~o~N~ + C) + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_ocean_only, project: CMIP6, + short_name: tas, start_year: 2015} + additional_datasets: *tableA3_datasets + + tableA3_global: + description: TableA3, Projected global CMIP6 TAS changes under the five scenarios + by 2041-2060 and 2081-2100 relative to the historical baseline of 1995-2014. + realms: [atmos] + scripts: + calc_table_tas: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: &id2293 [2060, 2100] + label: &id2294 [SSP1-1.9, SSP1-2.6, SSP2-4.5, SSP3-7.0, SSP5-8.5] + scenarios: &id2295 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/calc_table_changes.ncl + spread: 1.64 + syears: &id2296 [2041, 2081] + yaxis: Relative to 1995-2014 (~S~o~N~ C) + plot_table_tas: + ancestors: [tas, calc_table_tas] + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id2293 + label: *id2294 + scenarios: *id2295 + script: tebaldi21esd/plot_table_changes.ncl + spread: 1.64 + syears: *id2296 + title: Table of CMIP6 ocean-only TAS change relative to 1995-2014 (~S~o~N~ + C) + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, preprocessor: preproc_map, project: CMIP6, + short_name: tas, start_year: 2015} + additional_datasets: *tableA3_datasets + + tableA4_landonly: + description: FigA4, Projected land-only CMIP6 PR changes under the five scenarios + by 2041-2060 and 2081-2100 relative to the historical baseline of 1995-2014. + realms: [atmos] + scripts: + calc_table_pr: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: &id1297 [2060, 2100] + label: &id1298 [SSP1-1.9, SSP1-2.6, SSP2-4.5, SSP3-7.0, SSP5-8.5] + scenarios: &id1299 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/calc_table_changes.ncl + spread: 1.64 + syears: &id1300 [2041, 2081] + plot_table_pr: + ancestors: [pr, calc_table_pr] + eyears: *id1297 + label: *id1298 + scenarios: *id1299 + script: tebaldi21esd/plot_table_changes.ncl + syears: *id1300 + title: Table of CMIP6 land-only PR change relative to 1995-2014 (%) + themes: [phys] + variables: + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_land_only, project: CMIP6, + short_name: pr, start_year: 2015} + additional_datasets: &tableA4_datasets + - *id005 + - *id006 + - *id007 + - *id008 + - *id009 + - *id010 + - *id011 + - *id012 + - *id013 + - *id014 + - *id015 + - *id016 + - *id017 + - *id018 + - *id019 + - *id020 + - *id021 + - *id022 + - *id1290 + - *id023 + - *id024 + - *id025 + - *id026 + - *id027 + - *id028 + - *id029 +# - *id030 + - *id031 + - *id032 + - *id033 + - *id034 + - *id035 + - *id036 + - *id037 + - *id038 + - *id039 + - *id040 + - *id041 + - *id042 + - *id043 + - *id044 + - *id045 + - *id046 + - *id047 + - *id048 + - *id049 + - *id050 + - *id051 + - *id052 + - *id053 + - *id054 + - *id055 + - *id056 + - *id057 + - *id058 + - *id1291 + - *id059 + - *id060 + - *id061 + - *id062 + - *id063 + - *id064 + - *id065 +# - *id066 + - *id067 + - *id068 + - *id069 + - *id070 + - *id071 + - *id072 + - *id073 + - *id074 + - *id075 + - *id076 + - *id077 + - *id078 + - *id079 + - *id080 + - *id081 + - *id082 + - *id083 + - *id084 + - *id085 + - *id086 + - *id087 + - *id088 + - *id089 + - *id090 + - *id091 + - *id092 + - *id093 + - *id094 + - *id095 + - *id096 + - *id097 + - *id098 + - *id099 + - *id100 + - *id101 +# - *id102 + - *id103 + - *id104 + - *id105 + - *id106 + - *id107 + - *id108 + - *id109 + - *id110 + - *id111 + - *id112 + - *id113 + - *id114 + - *id115 + - *id116 + - *id202 + - *id203 + - *id204 + - *id205 + - *id206 + - *id207 + - *id208 + - *id209 + - *id210 + - *id211 + - *id212 + - *id213 + - *id214 + - *id215 + - *id216 + - *id217 + - *id218 + - *id219 + - *id220 + - *id221 + - *id222 + - *id223 +# - *id224 + - *id225 + - *id226 + - *id227 + - *id228 + - *id229 + - *id230 + - *id231 + - *id232 + - *id117 + - *id118 + - *id119 + - *id120 + - *id121 + - *id122 + - *id123 + - *id124 + - *id125 + - *id126 + - *id127 + - *id128 + - *id129 + - *id130 + - *id131 + - *id132 + - *id133 + - *id134 + - *id135 + - *id136 + - *id137 + - *id138 + - *id139 + - *id140 + - *id141 + - *id142 + - *id143 + - *id144 + - *id145 + - *id146 + - *id147 + - *id148 +# - *id149 + - *id150 + - *id151 + - *id152 + - *id153 + - *id154 + - *id155 + - *id156 + - *id233 + - *id234 + - *id235 + - *id236 + - *id237 + - *id238 + - *id239 + - *id240 + - *id194 + - *id1292 + + tableA4_oceanonly: + description: tableA4, Projected ocean-only CMIP6 PR changes under the five scenarios + by 2041-2060 and 2081-2100 relative to the historical baseline of 1995-2014. + realms: [atmos] + scripts: + calc_table_pr: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: &id1301 [2060, 2100] + label: &id1302 [SSP1-1.9, SSP1-2.6, SSP2-4.5, SSP3-7.0, SSP5-8.5] + scenarios: &id1303 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/calc_table_changes.ncl + spread: 1.64 + syears: &id1304 [2041, 2081] + plot_table_pr: + ancestors: [pr, calc_table_pr] + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id1301 + label: *id1302 + scenarios: *id1303 + script: tebaldi21esd/plot_table_changes.ncl + spread: 1.64 + syears: *id1304 + title: Table of CMIP6 ocean-only PR change relative to 1995-2014 (%) + themes: [phys] + variables: + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_ocean_only, project: CMIP6, + short_name: pr, start_year: 2015} + additional_datasets: *tableA4_datasets + + tableA4_global: + description: tableA4, Projected global CMIP6 PR changes under the five scenarios + by 2041-2060 and 2081-2100 relative to the historical baseline of 1995-2014. + realms: [atmos] + scripts: + calc_table_pr: + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: &id2301 [2060, 2100] + label: &id2302 [SSP1-1.9, SSP1-2.6, SSP2-4.5, SSP3-7.0, SSP5-8.5] + scenarios: &id2303 [ssp119, ssp126, ssp245, ssp370, ssp585] + script: tebaldi21esd/calc_table_changes.ncl + spread: 1.64 + syears: &id2304 [2041, 2081] + plot_table_pr: + ancestors: [pr, calc_table_pr] + begin_ref_year: 1995 + end_ref_year: 2014 + eyears: *id2301 + label: *id2302 + scenarios: *id2303 + script: tebaldi21esd/plot_table_changes.ncl + spread: 1.64 + syears: *id2304 + title: Table of CMIP6 ocean-only PR change relative to 1995-2014 (%) + themes: [phys] + variables: + pr: {end_year: 2100, mip: Amon, preprocessor: preproc_ocean_only, project: CMIP6, + short_name: pr, start_year: 2015} + additional_datasets: *tableA4_datasets + + tableA6_pr_cmip5: + description: Table A6, Projected PR change for CMIP5 relative to 1986-2005 + realms: [atmos] + scripts: + plot_ts_line_mean_spread_pr: + ancestors: [pr, ts_line_mean_spread_pr] + begin_ref_year: 1986 + end_ref_year: 2005 + eyears: &id1305 [2060, 2100] + label: &id1306 [RCP2.6, RCP4.5, RCP8.5] + scenarios: &id1307 [rcp26, rcp45, rcp85] + script: tebaldi21esd/plot_table_changes.ncl + spread: 1.64 + styleset: CMIP6 + syears: &id1308 [2041, 2081] + title: Table of CMIP5 global PR change relative to 1986-2005 (%) + ts_line_mean_spread_pr: + begin_ref_year: 1986 + end_ref_year: 2005 + eyears: *id1305 + label: *id1306 + scenarios: *id1307 + script: tebaldi21esd/calc_table_changes.ncl + spread: 1.64 + styleset: CMIP6 + syears: *id1308 + themes: [phys] + variables: + pr: {end_year: 2100, mip: Amon, project: CMIP5, short_name: pr, start_year: 2006} + additional_datasets: &tableA6_cmip5_datasets + - *id425 + - *id426 + - *id427 + - *id428 + - *id429 + - *id430 + - *id431 + - &id1313 {dataset: CESM1-WACCM, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: NSF-DOE-NCAR, start_year: 1986} + - *id432 + - *id433 + - *id434 + - *id435 + - *id436 + - *id437 + - *id438 + - *id439 + - &id1314 {dataset: IPSL-CM5A-LR, end_year: 2005, ensemble: r4i1p1, exp: historical, + institute: IPSL, start_year: 1986} + - *id440 + - *id441 + - *id442 + - *id443 + - *id444 + - *id445 + - *id446 + - *id447 + - *id448 + - *id449 + - *id450 + - *id451 + - *id452 + - *id453 + - *id454 + - *id455 + - *id456 + - *id457 + - *id458 + - *id459 + - *id460 + - *id461 + - *id462 + - *id463 + - *id464 + - *id465 + - *id466 + - *id825 + - *id467 + - *id468 + - *id469 + - *id470 + - *id471 + - *id472 + - *id473 + - *id474 + - *id475 + - *id476 + - *id477 + - *id478 + - *id479 + - *id826 + - *id480 + - *id481 + - *id482 + - *id483 + - *id484 + - *id485 + - *id827 + - *id486 + - *id487 + - *id488 + - *id489 + - *id490 + - *id491 + - *id828 + - *id492 + - *id493 + - *id494 + - *id495 + - *id496 + - *id497 + - *id498 + - *id499 + - *id500 + - *id501 + - *id502 + - *id503 + - *id504 + - *id505 + - *id506 + - *id507 + - *id508 + - *id829 + - *id509 + - *id510 + - *id511 + - *id512 + - *id513 + - *id514 + - *id515 + - *id516 + - *id517 + - *id518 + - *id519 + - *id520 + - *id521 + - *id522 + - *id523 + - *id524 + - *id525 + - *id526 + - *id830 + - *id527 + - *id528 + - *id529 + - *id530 + - *id531 + - *id532 + - *id533 + - *id534 + - *id535 + - *id536 + - *id537 + - *id538 + - *id539 + - *id540 + - *id541 + - *id542 + - *id543 + - *id831 + - *id544 + - *id545 + - *id546 + - *id547 + - *id548 + - *id549 + - *id550 + - *id551 + - *id552 + - *id553 + - *id554 + - *id555 + - *id556 + - *id557 + - *id558 + - *id559 + - *id560 + + tableA6_pr_cmip6: + description: Table A6, Projected PR change for CMIP6 relative to 1986-2005 + realms: [atmos] + scripts: + calc_table_pr: + begin_ref_year: 1986 + end_ref_year: 2005 + eyears: &id1309 [2060, 2100] + label: &id1310 [SSP1-2.6, SSP2-4.5, SSP5-8.5] + scenarios: &id1311 [ssp126, ssp245, ssp585] + script: tebaldi21esd/calc_table_changes.ncl + spread: 1.64 + styleset: CMIP6 + syears: &id1312 [2041, 2081] + title: Table of CMIP6 global PR change relative to 1986-2005 (%). + plot_table_pr: + ancestors: [pr, calc_table_pr] + begin_ref_year: 1986 + end_ref_year: 2005 + eyears: *id1309 + label: *id1310 + scenarios: *id1311 + script: tebaldi21esd/plot_table_changes.ncl + spread: 1.64 + styleset: CMIP6 + syears: *id1312 + title: Table of CMIP6 global PR change relative to 1986-2005 (%) + themes: [phys] + variables: + pr: {end_year: 2100, mip: Amon, project: CMIP6, short_name: pr, start_year: 2015} + additional_datasets: + - *id603 + - *id604 + - *id605 + - *id606 + - *id607 + - *id608 + - *id609 + - *id610 + - *id611 + - *id612 + - *id613 + - *id614 + - *id615 + - *id616 + - *id617 + - *id618 + - *id619 + - *id620 + - *id621 + - *id622 + - *id623 + - *id624 + - *id625 + - *id626 + - *id627 + - *id628 + - *id629 + - *id630 +# - *id631 + - *id632 + - *id633 + - *id634 + - *id635 + - *id636 + - *id637 + - *id638 + - *id639 + - *id640 + - *id641 + - *id005 + - *id006 + - *id007 + - *id008 + - *id009 + - *id010 + - *id011 + - *id012 + - *id013 + - *id014 + - *id015 + - *id016 + - *id017 + - *id018 + - *id019 + - *id020 + - *id021 + - *id022 + - *id1290 + - *id023 + - *id024 + - *id025 + - *id026 + - *id027 + - *id028 + - *id029 +# - *id030 + - *id031 + - *id032 + - *id033 + - *id034 + - *id035 + - *id036 + - *id037 + - *id038 + - *id039 + - *id040 + - *id041 + - *id042 + - *id043 + - *id044 + - *id045 + - *id046 + - *id047 + - *id048 + - *id049 + - *id050 + - *id051 + - *id052 + - *id053 + - *id054 + - *id055 + - *id056 + - *id057 + - *id058 + - *id1291 + - *id059 + - *id060 + - *id061 + - *id062 + - *id063 + - *id064 + - *id065 +# - *id066 + - *id067 + - *id068 + - *id069 + - *id070 + - *id071 + - *id072 + - *id073 + - *id074 + - *id075 + - *id076 + - *id077 + - *id078 + - *id079 + - *id080 + - *id081 + - *id082 + - *id083 + - *id084 + - *id085 + - *id086 + - *id087 + - *id088 + - *id089 + - *id090 + - *id091 + - *id092 + - *id093 + - *id094 + - *id095 + - *id096 + - *id097 + - *id098 + - *id099 + - *id100 + - *id101 +# - *id102 + - *id103 + - *id104 + - *id105 + - *id106 + - *id107 + - *id108 + - *id109 + - *id110 + - *id111 + - *id112 + + tableA6_tas_cmip5: + description: Table A6, Projected TAS change for CMIP5 relative to 1986-2005 + realms: [atmos] + scripts: + plot_ts_line_mean_spread_tas: + ancestors: [tas, ts_line_mean_spread_tas] + begin_ref_year: 1986 + end_ref_year: 2005 + eyears: &id1315 [2060, 2100] + label: &id1316 [RCP2.6, RCP4.5, RCP8.5] + scenarios: &id1317 [rcp26, rcp45, rcp85] + script: tebaldi21esd/plot_table_changes.ncl + spread: 1.64 + syears: &id1318 [2041, 2081] + title: Table of CMIP5 global TAS change relative to 1986-2005 (~S~o~N~ C) + ts_line_mean_spread_tas: + begin_ref_year: 1986 + end_ref_year: 2005 + eyears: *id1315 + label: *id1316 + scenarios: *id1317 + script: tebaldi21esd/calc_table_changes.ncl + spread: 1.64 + syears: *id1318 + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, project: CMIP5, short_name: tas, start_year: 2006} + additional_datasets: *tableA6_cmip5_datasets + + tableA6_tas_cmip6: + description: Table A6, Projected TAS change for CMIP6 relative to 1986-2005 + realms: [atmos] + scripts: + calc_table_tas: + begin_ref_year: 1986 + end_ref_year: 2005 + eyears: &id1320 [2060, 2100] + label: &id1321 [SSP1-2.6, SSP2-4.5, SSP5-8.5] + model_nr: true + scenarios: &id1322 [ssp126, ssp245, ssp585] + script: tebaldi21esd/calc_table_changes.ncl + spread: 1.64 + syears: &id1323 [2041, 2081] + title: Relative to 1986-2005 (~S~o~N~ C) + plot_table_tas: + ancestors: [tas, calc_table_tas] + begin_ref_year: 1986 + end_ref_year: 2005 + eyears: *id1320 + label: *id1321 + scenarios: *id1322 + script: tebaldi21esd/plot_table_changes.ncl + spread: 1.64 + syears: *id1323 + title: Table of CMIP6 global TAS change relative to 1986-2005 (~S~o~N~ C) + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, project: CMIP6, short_name: tas, start_year: 2015} + additional_datasets: + - *id603 + - *id604 + - *id605 + - *id606 + - *id607 + - *id608 + - *id609 + - *id610 + - *id611 + - *id612 + - *id613 + - *id614 + - *id615 + - *id616 + - *id617 + - *id618 + - *id619 + - *id620 + - *id621 + - *id622 + - *id623 + - *id624 + - *id625 + - *id626 + - *id627 + - *id628 + - *id629 + - *id630 +# - *id631 + - *id632 + - *id633 + - *id634 + - *id635 + - *id636 + - *id637 + - *id638 + - *id639 + - *id640 + - *id641 + - *id005 + - *id006 + - *id007 + - *id008 + - *id009 + - *id010 + - *id011 + - *id012 + - *id013 + - *id014 + - *id015 + - *id016 + - *id017 + - *id018 + - *id019 + - *id020 + - *id021 + - *id022 + - *id1290 + - *id023 + - *id024 + - *id025 + - *id026 + - *id027 + - *id028 + - *id029 +# - *id030 + - *id031 + - *id032 + - *id033 + - *id034 + - *id035 + - *id036 + - *id037 + - *id038 + - *id039 + #- *id874 + - *id040 + - *id041 + - *id042 + - *id043 + - *id044 + - *id045 + - *id046 + - *id047 + - *id048 + - *id049 + - *id050 + - *id051 + - *id052 + - *id053 + - *id054 + - *id055 + - *id056 + - *id057 + - *id058 + - *id1291 + - *id059 + - *id060 + - *id061 + - *id062 + - *id063 + - *id064 + - *id065 +# - *id066 + - *id067 + - *id068 + - *id069 + - *id070 + - *id071 + - *id072 + - *id073 + - *id074 + - *id075 + #- *id875 + - *id076 + - *id077 + - *id078 + - *id079 + - *id080 + - *id081 + - *id082 + - *id083 + - *id084 + - *id085 + - *id086 + - *id087 + - *id088 + - *id089 + - *id090 + - *id091 + - *id092 + - *id093 + - *id094 + - *id095 + - *id096 + - *id097 + - *id098 + - *id099 + - *id100 + - *id101 +# - *id102 + - *id103 + - *id104 + - *id105 + - *id106 + - *id107 + - *id108 + - *id109 + - *id110 + - *id111 + #- *id876 + - *id112 + + tableA8_cmip5: + description: TableA8, Table of CMIP5 warming level crossing for given ssp scenarios + and warming levels. + realms: [atmos] + scripts: + calc_warming_level_tas: + begin_ref_year: 1986 + end_ref_year: 2005 + eyears: [2005, 2100] + label: [RCP2.6, RCP4.5, RCP8.5] + offset: 0.65 + scenarios: [rcp26, rcp45, rcp85] + script: tebaldi21esd/calc_table_warming_level.ncl + syears: [1986, 2006] + warming_levels: [1.5, 2.0, 3.0, 4.0, 5.0] + plot_warming_level_tas: + ancestors: [tas, calc_warming_level_tas] + eyears: [2005, 2100] + label: [RCP2.6, RCP4.5, RCP8.5] + scenarios: [rcp26, rcp45, rcp85] + script: tebaldi21esd/plot_table_warming_level.ncl + syears: [1986, 2006] + title: CMIP5 warming level crossings relative to 1850-1900 + warming_levels: [1.5, 2.0, 3.0, 4.0, 5.0] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, project: CMIP5, short_name: tas, start_year: 2006} + additional_datasets: + - *id427 + - *id429 + - *id435 + - *id436 + - *id438 + - *id440 + - *id441 + - *id443 + - *id444 + - *id446 + - *id447 + - *id451 + - *id453 + - *id456 + - *id461 + - *id463 + - *id464 + - *id465 + - *id825 + - *id828 + - *id830 + - *id564 + - *id565 + - {dataset: CSIRO-Mk3-6-0, end_year: 2005, ensemble: r1i1p1, exp: historical, + institute: CSIRO-QCCCE, start_year: 1986} + - {dataset: CESM1-CAM5, end_year: 2005, ensemble: r1i1p1, exp: historical, institute: NSF-DOE-NCAR, + start_year: 1986} + - {dataset: CCSM4, end_year: 2005, ensemble: r1i1p1, exp: historical, institute: NCAR, + start_year: 1986} + - {dataset: HadGEM2-ES, end_year: 2005, ensemble: r1i1p1, exp: historical, institute: MOHC, + start_year: 1986} + - {dataset: GISS-E2-R, end_year: 2005, ensemble: r1i1p1, exp: historical, institute: NASA-GISS, + start_year: 1986} + - {dataset: GISS-E2-H, end_year: 2005, ensemble: r1i1p1, exp: historical, institute: NASA-GISS, + start_year: 1986} + - {dataset: MIROC5, end_year: 2005, ensemble: r1i1p1, exp: historical, institute: MIROC, + start_year: 1986} + - {dataset: FIO-ESM, end_year: 2005, ensemble: r1i1p1, exp: historical, institute: FIO, + start_year: 1986} + - {dataset: CanESM2, end_year: 2005, ensemble: r1i1p1, exp: historical, institute: CCCma, + start_year: 1986} + - {dataset: CSIRO-Mk3-6-0, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: CSIRO-QCCCE, + start_year: 2006} + - {dataset: CESM1-WACCM, end_year: 2099, ensemble: r2i1p1, exp: rcp26, institute: NSF-DOE-NCAR, + start_year: 2006} + - {dataset: CESM1-CAM5, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: NSF-DOE-NCAR, + start_year: 2006} + - {dataset: bcc-csm1-1, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: BCC, + start_year: 2006} + - {dataset: NorESM1-M, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: NCC, + start_year: 2006} + - {dataset: CCSM4, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: NCAR, + start_year: 2006} + - {dataset: NorESM1-ME, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: NCC, + start_year: 2006} + - {dataset: GFDL-CM3, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: NOAA-GFDL, + start_year: 2006} + - {dataset: GFDL-ESM2G, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: NOAA-GFDL, + start_year: 2006} + - {dataset: GFDL-ESM2M, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: NOAA-GFDL, + start_year: 2006} + - {dataset: HadGEM2-ES, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: MOHC, + start_year: 2006} + - {dataset: GISS-E2-R, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: NASA-GISS, + start_year: 2006} + - {dataset: GISS-E2-H, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: NASA-GISS, + start_year: 2006} + - {dataset: MRI-CGCM3, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: MRI, + start_year: 2006} + - {dataset: MPI-ESM-MR, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: MPI-M, + start_year: 2006} + - {dataset: MPI-ESM-LR, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: MPI-M, + start_year: 2006} + - {dataset: IPSL-CM5A-MR, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: IPSL, + start_year: 2006} + - {dataset: MIROC-ESM-CHEM, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: MIROC, + start_year: 2006} + - {dataset: IPSL-CM5A-LR, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: IPSL, + start_year: 2006} + - {dataset: MIROC-ESM, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: MIROC, + start_year: 2006} + - {dataset: FGOALS-g2, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: LASG-CESS, + start_year: 2006} + - {dataset: MIROC5, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: MIROC, + start_year: 2006} + - {dataset: EC-EARTH, end_year: 2100, ensemble: r8i1p1, exp: rcp26, institute: ICHEC, + start_year: 2006} + - {dataset: FIO-ESM, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: FIO, + start_year: 2006} + - {dataset: bcc-csm1-1-m, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: BCC, + start_year: 2006} + - {dataset: CanESM2, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: CCCma, + start_year: 2006} + - {dataset: BNU-ESM, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: BNU, + start_year: 2006} + - {dataset: CNRM-CM5, end_year: 2100, ensemble: r1i1p1, exp: rcp26, institute: CNRM-CERFACS, + start_year: 2006} + - {dataset: CSIRO-Mk3-6-0, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: CSIRO-QCCCE, + start_year: 2006} + - {dataset: CESM1-WACCM, end_year: 2099, ensemble: r2i1p1, exp: rcp45, institute: NSF-DOE-NCAR, + start_year: 2006} + - {dataset: CESM1-CAM5, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: NSF-DOE-NCAR, + start_year: 2006} + - {dataset: bcc-csm1-1, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: BCC, + start_year: 2006} + - {dataset: NorESM1-M, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: NCC, + start_year: 2006} + - {dataset: CCSM4, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: NCAR, + start_year: 2006} + - {dataset: NorESM1-ME, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: NCC, + start_year: 2006} + - {dataset: GFDL-CM3, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: NOAA-GFDL, + start_year: 2006} + - {dataset: GFDL-ESM2G, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: NOAA-GFDL, + start_year: 2006} + - {dataset: GFDL-ESM2M, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: NOAA-GFDL, + start_year: 2006} + - {dataset: HadGEM2-ES, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: MOHC, + start_year: 2006} + - {dataset: GISS-E2-R, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: NASA-GISS, + start_year: 2006} + - {dataset: GISS-E2-H, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: NASA-GISS, + start_year: 2006} + - {dataset: MRI-CGCM3, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: MRI, + start_year: 2006} + - {dataset: MPI-ESM-MR, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: MPI-M, + start_year: 2006} + - {dataset: MPI-ESM-LR, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: MPI-M, + start_year: 2006} + - {dataset: IPSL-CM5A-MR, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: IPSL, + start_year: 2006} + - {dataset: MIROC-ESM-CHEM, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: MIROC, + start_year: 2006} + - {dataset: IPSL-CM5A-LR, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: IPSL, + start_year: 2006} + - {dataset: MIROC-ESM, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: MIROC, + start_year: 2006} + - {dataset: FGOALS-g2, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: LASG-CESS, + start_year: 2006} + - {dataset: MIROC5, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: MIROC, + start_year: 2006} + - {dataset: EC-EARTH, end_year: 2100, ensemble: r8i1p1, exp: rcp45, institute: ICHEC, + start_year: 2006} + - {dataset: FIO-ESM, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: FIO, + start_year: 2006} + - {dataset: bcc-csm1-1-m, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: BCC, + start_year: 2006} + - {dataset: CanESM2, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: CCCma, + start_year: 2006} + - {dataset: BNU-ESM, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: BNU, + start_year: 2006} + - {dataset: CNRM-CM5, end_year: 2100, ensemble: r1i1p1, exp: rcp45, institute: CNRM-CERFACS, + start_year: 2006} + - {dataset: CSIRO-Mk3-6-0, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: CSIRO-QCCCE, + start_year: 2006} + - {dataset: CESM1-WACCM, end_year: 2099, ensemble: r2i1p1, exp: rcp85, institute: NSF-DOE-NCAR, + start_year: 2006} + - {dataset: CESM1-CAM5, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: NSF-DOE-NCAR, + start_year: 2006} + - {dataset: bcc-csm1-1, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: BCC, + start_year: 2006} + - {dataset: NorESM1-M, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: NCC, + start_year: 2006} + - {dataset: CCSM4, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: NCAR, + start_year: 2006} + - {dataset: NorESM1-ME, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: NCC, + start_year: 2006} + - {dataset: GFDL-CM3, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: NOAA-GFDL, + start_year: 2006} + - {dataset: GFDL-ESM2G, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: NOAA-GFDL, + start_year: 2006} + - {dataset: GFDL-ESM2M, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: NOAA-GFDL, + start_year: 2006} + - {dataset: HadGEM2-ES, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: MOHC, + start_year: 2006} + - {dataset: GISS-E2-R, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: NASA-GISS, + start_year: 2006} + - {dataset: GISS-E2-H, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: NASA-GISS, + start_year: 2006} + - {dataset: MRI-CGCM3, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: MRI, + start_year: 2006} + - {dataset: MPI-ESM-MR, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: MPI-M, + start_year: 2006} + - {dataset: MPI-ESM-LR, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: MPI-M, + start_year: 2006} + - {dataset: IPSL-CM5A-MR, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: IPSL, + start_year: 2006} + - {dataset: MIROC-ESM-CHEM, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: MIROC, + start_year: 2006} + - {dataset: IPSL-CM5A-LR, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: IPSL, + start_year: 2006} + - {dataset: MIROC-ESM, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: MIROC, + start_year: 2006} + - {dataset: FGOALS-g2, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: LASG-CESS, + start_year: 2006} + - {dataset: MIROC5, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: MIROC, + start_year: 2006} + - {dataset: EC-EARTH, end_year: 2100, ensemble: r8i1p1, exp: rcp85, institute: ICHEC, + start_year: 2006} + - {dataset: FIO-ESM, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: FIO, + start_year: 2006} + - {dataset: bcc-csm1-1-m, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: BCC, + start_year: 2006} + - {dataset: CanESM2, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: CCCma, + start_year: 2006} + - {dataset: BNU-ESM, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: BNU, + start_year: 2006} + - {dataset: CNRM-CM5, end_year: 2100, ensemble: r1i1p1, exp: rcp85, institute: CNRM-CERFACS, + start_year: 2006} + + tableA8_cmip6: + description: TableA8, Table of CMIP6 warming level crossing for given ssp scenarios + and warming levels. + realms: [atmos] + scripts: + calc_warming_level_tas: + begin_ref_year: 1986 + end_ref_year: 2005 + eyears: [2005, 2100] + offset: 0.65 + scenarios: [ssp126, ssp245, ssp585] + script: tebaldi21esd/calc_table_warming_level.ncl + syears: [1986, 2015] + warming_levels: [1.5, 2.0, 3.0, 4.0, 5.0] + plot_warming_level_tas: + ancestors: [tas, calc_warming_level_tas] + eyears: [2005, 2100] + label: [SSP1-2.6, SSP2-4.5, SSP5-8.5] + scenarios: [ssp126, ssp245, ssp585] + script: tebaldi21esd/plot_table_warming_level.ncl + syears: [1986, 2015] + title: CMIP6 Warming level crossings relative to 1850-1900 + warming_levels: [1.5, 2.0, 3.0, 4.0, 5.0] + themes: [phys] + variables: + tas: {end_year: 2100, mip: Amon, project: CMIP6, short_name: tas, start_year: 2015} + additional_datasets: + - *id603 + - *id604 + - *id605 + - *id606 + - *id607 + - *id608 + - *id609 + - *id610 + - *id611 + - *id612 + - *id613 + - *id614 + - *id615 + - *id616 + - *id617 + - *id618 + - *id619 + - *id620 + - *id621 + - *id623 + - *id624 + - *id625 + - *id626 + - *id627 + - *id628 + - *id629 + - *id630 +# - *id631 + - *id632 + - *id633 + - *id634 + - *id635 + - *id636 + - *id637 + - *id638 + - *id005 + - *id006 + - *id007 + - *id008 + - *id009 + - *id010 + - *id011 + - *id012 + - *id013 + - *id014 + - *id015 + - *id016 + - *id018 + - *id019 + - *id020 + - *id021 + - *id022 + - *id1290 + - *id023 + - *id024 + - *id025 + - *id026 + - *id027 + - *id028 + - *id029 +# - *id030 + - *id031 + - *id032 + - *id033 + - *id034 + - *id035 + - *id036 + - *id037 + - *id038 + - *id039 + #- *id874 + - *id041 + - *id042 + - *id043 + - *id044 + - *id045 + - *id046 + - *id048 + - *id049 + - *id050 + - *id051 + - *id052 + - *id053 + - *id054 + - *id055 + - *id056 + - *id057 + - *id058 + - *id1291 + - *id059 + - *id060 + - *id061 + - *id062 + - *id063 + - *id064 + - *id065 +# - *id066 + - *id067 + - *id068 + - *id069 + - *id070 + - *id071 + - *id072 + - *id073 + - *id074 + - *id075 + #- *id875 + - *id076 + - *id077 + - *id078 + - *id079 + - *id080 + - *id081 + - *id083 + - *id084 + - *id085 + - *id086 + - *id087 + - *id088 + - *id090 + - *id091 + - *id092 + - *id093 + - *id094 + - *id095 + - *id096 + - *id097 + - *id098 + - *id099 + - *id100 + - *id101 +# - *id102 + - *id103 + - *id104 + - *id105 + - *id106 + - *id107 + - *id108 + - *id109 + - *id110 + - *id111 + #- *id876 + - {dataset: CIESM, end_year: 2100, ensemble: r1i1p1f1, exp: ssp585, grid: gr, + institute: THU, start_year: 2015} diff --git a/esmvaltool/recipes/recipe_thermodyn_diagtool.yml b/esmvaltool/recipes/recipe_thermodyn_diagtool.yml new file mode 100644 index 0000000000..ab1adc3f28 --- /dev/null +++ b/esmvaltool/recipes/recipe_thermodyn_diagtool.yml @@ -0,0 +1,179 @@ +# recipe_thermodyn_diagtool.yml + +--- +documentation: + title: | + Diagnostic tool for several thermodynamic aspects of + the atmosphere and oceans. + + description: | + Recipe for the computation of various aspects associated with + the thermodynamics of the climate system, such as energy and + water mass budgets, meridional enthalpy transports, the Lorenz + Energy Cycle and the material entropy production. + + authors: + - lembo_valerio + - koldunov_nikolay + + maintainer: + - unmaintained + + references: + - lembo16climdyn + - lucarini14revgeop + + projects: + - esmval + - trr181 + +HADGEM: &hadgem_only + - {dataset: HadGEM3-GC31-LL, project: CMIP6, exp: historical, ensemble: r1i1p1f3, grid: gn, start_year: 2000, end_year: 2001} + +SFTLF_PIC: &pic_sftlf + - {dataset: HadGEM3-GC31-LL, project: CMIP6, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 2000, end_year: 2001} + - {dataset: CNRM-ESM2-1, project: CMIP6, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 2000, end_year: 2005} + +SFTLF_OTHER: &other_sftlf + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, exp: historical, project: CMIP5, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, ensemble: r1i1p1f2, grid: gr, start_year: 2000, end_year: 2005} + - {dataset: MRI-ESM2-0, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 2000, end_year: 2005} + +OTHER: &other_data + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, exp: historical, project: CMIP5, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, ensemble: r1i1p1f2, grid: gr, start_year: 2000, end_year: 2005} + - {dataset: CNRM-ESM2-1, project: CMIP6, exp: historical, ensemble: r1i1p1f2, grid: gr, start_year: 2000, end_year: 2005} + - {dataset: MRI-ESM2-0, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 2000, end_year: 2005} + +ALL: &all_data + - {dataset: HadGEM3-GC31-LL, project: CMIP6, exp: historical, ensemble: r1i1p1f3, grid: gn, start_year: 2000, end_year: 2001} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, exp: historical, project: CMIP5, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2005} + - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, ensemble: r1i1p1f2, grid: gr, start_year: 2000, end_year: 2005} + - {dataset: CNRM-ESM2-1, project: CMIP6, exp: historical, ensemble: r1i1p1f2, grid: gr, start_year: 2000, end_year: 2005} + - {dataset: MRI-ESM2-0, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 2000, end_year: 2005} + +preprocessors: + regrid_preproc: + regrid: + target_grid: 1.875x1.25 + lon_offset: true + lat_offset: true + scheme: area_weighted + +diagnostics: + Thermodyn_Diag: + description: Thermodynamics diagnostics + variables: + hfls: + mip: Amon + cmor_name: hfls + additional_datasets: *all_data + hfss: + mip: Amon + additional_datasets: *all_data + pr: + mip: Amon + additional_datasets: *all_data + ps: + mip: Amon + additional_datasets: *all_data + prsn: + mip: Amon + additional_datasets: *all_data + rlds: + mip: Amon + additional_datasets: *all_data + rlus: + mip: Amon + additional_datasets: *all_data + rlut: + mip: Amon + additional_datasets: *all_data + rsds: + mip: Amon + additional_datasets: *all_data + rsus: + mip: Amon + additional_datasets: *all_data + rsdt: + mip: Amon + additional_datasets: *all_data + rsut: + mip: Amon + additional_datasets: *all_data + ts: + mip: Amon + additional_datasets: *all_data + hus: + mip: Amon + additional_datasets: *all_data + tas: + mip: day + additional_datasets: *all_data + uas: + mip: day + additional_datasets: *all_data + vas_1: + mip: day + short_name: vas + preprocessor: regrid_preproc + additional_datasets: *hadgem_only + vas_2: &variable_settings + mip: day + short_name: vas + additional_datasets: *all_data + ta: + mip: day + additional_datasets: *all_data + ua_1: &variable_settings_ua + mip: day + short_name: ua + preprocessor: regrid_preproc + additional_datasets: *hadgem_only + ua_2: + mip: day + short_name: ua + additional_datasets: *all_data + va_1: &variable_settings_va + mip: day + short_name: va + preprocessor: regrid_preproc + additional_datasets: *hadgem_only + va_2: + mip: day + short_name: va + additional_datasets: *all_data + wap: + mip: day + additional_datasets: *all_data + sftlf_piC: + mip: fx + short_name: sftlf + additional_datasets: *pic_sftlf + sftlf_other: + mip: fx + short_name: sftlf + additional_datasets: *other_sftlf + scripts: + Thermodyn_Diag: + script: thermodyn_diagtool/thermodyn_diagnostics.py + wat: true + lec: true + entr: true + met: 3 + lsm: true diff --git a/esmvaltool/recipes/recipe_toymodel.yml b/esmvaltool/recipes/recipe_toymodel.yml new file mode 100644 index 0000000000..be41b1c2ab --- /dev/null +++ b/esmvaltool/recipes/recipe_toymodel.yml @@ -0,0 +1,57 @@ +# ESMValTool +# recipe_toymodel.yml +--- +documentation: + title: Generate artificial forecasts from observations + description: | + Tool for generating synthetic observations based on the model presented + in Weigel et al. (2008) QJRS with an extension to consider non-stationary + (2008) QJRS with an extension to consider non-stationary distributions + distributions prescribing a linear trend. The toymodel allows to + generate an artificial forecast based on observations provided as input. + + authors: + - bellprat_omar + + maintainer: + - unmaintained + + projects: + - c3s-magic + + references: + - weigel08qjrms + + +datasets: + # - {dataset: IPSL-CM5A-LR, type: exp, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1999, end_year: 2000} + # - {dataset: MPI-ESM-LR, type: exp, project: CMIP5, exp: rcp85, ensemble: r1i1p1, start_year: 2020, end_year: 2050} + - {dataset: bcc-csm1-1, type: exp, project: CMIP5, exp: rcp45, ensemble: r1i1p1, start_year: 2051, end_year: 2060} + +preprocessors: + preproc: + regrid: + target_grid: bcc-csm1-1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + extract_region: + start_longitude: -40 + end_longitude: 40 + start_latitude: 30 + end_latitude: 50 + + +diagnostics: + toymodel: + description: Generate synthetic observations. + variables: + psl: + preprocessor: preproc + mip: Amon + + scripts: + main: + script: magic_bsc/toymodel.R + beta: 500 + number_of_members: 20 diff --git a/esmvaltool/recipes/recipe_toymodel_wp4.yml b/esmvaltool/recipes/recipe_toymodel_wp4.yml deleted file mode 100644 index d2a09c3594..0000000000 --- a/esmvaltool/recipes/recipe_toymodel_wp4.yml +++ /dev/null @@ -1,54 +0,0 @@ -# ESMValTool -# recipe_toymodel_wp4.yml ---- -documentation: - description: | - Tool for generating synthetic observations based on the model presented - in Weigel et al. (2008) QJRS with an extension to consider non-stationary - (2008) QJRS with an extension to consider non-stationary distributions - distributions prescribing a linear trend. The toymodel allows to - generate an aritifical forecast based on obsevations provided as input. - - - authors: - - bell_om - - projects: - - c3s-magic - - references: - - weigel - - -datasets: -# - {dataset: IPSL-CM5A-LR, type: exp, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1999, end_year: 2000} -# - {dataset: MPI-ESM-LR, type: exp, project: CMIP5, exp: rcp85, ensemble: r1i1p1, start_year: 2020, end_year: 2050} - - {dataset: bcc-csm1-1, type: exp, project: CMIP5, exp: rcp45, ensemble: r1i1p1, start_year: 2051, end_year: 2060} - -preprocessors: - preproc: - regrid: - target_grid: bcc-csm1-1 - scheme: linear - mask_fillvalues: - threshold_fraction: 0.95 - extract_region: - start_longitude: -40 - end_longitude: 40 - start_latitude: 30 - end_latitude: 50 - - -diagnostics: - toymodel: - description: Generate synthetic observations. - variables: - psl: - preprocessor: preproc - mip: Amon - - scripts: - main: - script: magic_bsc/toymodel.r - beta: 0.7 - number_of_members: 2 diff --git a/esmvaltool/recipes/recipe_validation.yml b/esmvaltool/recipes/recipe_validation.yml index b01ec8bb36..c14f714e41 100644 --- a/esmvaltool/recipes/recipe_validation.yml +++ b/esmvaltool/recipes/recipe_validation.yml @@ -1,14 +1,25 @@ # ESMValTool +# recipe_validation.yml --- documentation: description: | Validation of CONTROL and EXPERIMENT datasets. + This recipe produces standard comparison plots for two models + and may include observational data. The standard comparison + metrics are 2D lat-lon differences (between control and experiment), + plain 2D lat-lon for each model, 1D (coordinate vs time) + zonal and meridional means for both control and experiment models. + Plots are produced for each season (winter DJF, spring MAM, summer JJA, + autumn SON) and for the entire year. + There are no restrictions in terms of data. authors: - - pred_va + - predoi_valeriu + + title: Standard comparison between two models (control and experiment, CMIP5 data). maintainer: - - pred_va + - predoi_valeriu datasets: - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2005} @@ -37,30 +48,36 @@ diagnostics: rsut: # TOA SW up all sky preprocessor: pp_rad additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, start_year: 2001, end_year: 2012, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, mip: Amon, level: L3B, version: v20160610, start_year: 2001, end_year: 2012, tier: 1} scripts: lat_lon: script: validation.py title: "" + cmip_era: CMIP5 control_model: MPI-ESM-LR exper_model: NorESM1-M observational_datasets: ['CERES-EBAF'] # list of at least one element; if no OBS wanted comment out analysis_type: lat_lon # use any of: lat_lon, meridional_mean, zonal_mean, vertical_mean[not implemented] - seasonal_analysis: False + seasonal_analysis: false # turn on to output per season + save_cubes: true # save each plotted cube in work zonal_mean: script: validation.py title: "" + cmip_era: CMIP5 control_model: MPI-ESM-LR exper_model: NorESM1-M observational_datasets: ['CERES-EBAF'] # list of at least one element; if no OBS wanted comment out analysis_type: zonal_mean # use any of: lat_lon, meridional_mean, zonal_mean, vertical_mean[not implemented] - seasonal_analysis: True + seasonal_analysis: true # turn on to output per season + save_cubes: true # save each plotted cube in work meridional_mean: script: validation.py title: "" + cmip_era: CMIP5 control_model: MPI-ESM-LR exper_model: NorESM1-M observational_datasets: ['CERES-EBAF'] # list of at least one element; if no OBS wanted comment out analysis_type: meridional_mean # use any of: lat_lon, meridional_mean, zonal_mean, vertical_mean[not implemented] - seasonal_analysis: True + seasonal_analysis: true # turn on to output per season + save_cubes: true # save each plotted cube in work diff --git a/esmvaltool/recipes/recipe_validation_CMIP6.yml b/esmvaltool/recipes/recipe_validation_CMIP6.yml new file mode 100644 index 0000000000..2eb03799ef --- /dev/null +++ b/esmvaltool/recipes/recipe_validation_CMIP6.yml @@ -0,0 +1,72 @@ +# ESMValTool +# recipe_validation_CMIP6.yml +--- +documentation: + description: | + Validation of CONTROL and EXPERIMENT datasets. + This recipe produces standard comparison plots for two models + and may include observational data. The standard comparison + metrics are 2D lat-lon differences (between control and experiment), + plain 2D lat-lon for each model, 1D (coordinate vs time) + zonal and meridional means for both control and experiment models. + Plots are produced for each season (winter DJF, spring MAM, summer JJA, + autumn SON) and for the entire year. + There are no restrictions in terms of data. + + authors: + - predoi_valeriu + + title: Standard comparison between two models (control and experiment, CMIP6 data). + + maintainer: + - predoi_valeriu + +datasets: + - {dataset: IPSL-CM6A-LR, project: CMIP6, mip: Amon, exp: historical, ensemble: r1i1p1f1, grid: gr, start_year: 1900, end_year: 2000} + - {dataset: UKESM1-0-LL, project: CMIP6, mip: Amon, exp: historical, ensemble: r1i1p1f2, grid: gn, start_year: 1900, end_year: 2000} + +preprocessors: + pp_rad: + regrid: + target_grid: 1x1 + scheme: linear + +diagnostics: + validation_with_ERA-Interim: + description: "CMIP6 with ERA-Interim" + variables: + tas: + preprocessor: pp_rad + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, mip: Amon, type: reanaly, version: 1, start_year: 1980, end_year: 2000, tier: 3} + scripts: + lat_lon: + script: validation.py + title: "" + cmip_era: CMIP6 + control_model: UKESM1-0-LL + exper_model: IPSL-CM6A-LR + observational_datasets: ['ERA-Interim'] # list of at least one element; if no OBS wanted comment out + analysis_type: lat_lon # use any of: lat_lon, meridional_mean, zonal_mean, vertical_mean[not implemented] + seasonal_analysis: false # turn on to output per season + save_cubes: true # save each plotted cube in work + zonal_mean: + script: validation.py + title: "" + cmip_era: CMIP6 + control_model: UKESM1-0-LL + exper_model: IPSL-CM6A-LR + observational_datasets: ['ERA-Interim'] # list of at least one element; if no OBS wanted comment out + analysis_type: zonal_mean # use any of: lat_lon, meridional_mean, zonal_mean, vertical_mean[not implemented] + seasonal_analysis: true # turn on to output per season + save_cubes: true # save each plotted cube in work + meridional_mean: + script: validation.py + title: "" + cmip_era: CMIP6 + control_model: UKESM1-0-LL + exper_model: IPSL-CM6A-LR + observational_datasets: ['ERA-Interim'] # list of at least one element; if no OBS wanted comment out + analysis_type: meridional_mean # use any of: lat_lon, meridional_mean, zonal_mean, vertical_mean[not implemented] + seasonal_analysis: true # turn on to output per season + save_cubes: true # save each plotted cube in work diff --git a/esmvaltool/recipes/recipe_wenzel14jgr.yml b/esmvaltool/recipes/recipe_wenzel14jgr.yml new file mode 100644 index 0000000000..814da02a18 --- /dev/null +++ b/esmvaltool/recipes/recipe_wenzel14jgr.yml @@ -0,0 +1,661 @@ +# ESMValTool +# recipe_wenzel14jgr.yml +--- +documentation: + title: Emergent constraints on climate-carbon cycle feedbacks + + description: | + This recipe reproduces most of the figures of Wenzel et al. (2014). + + authors: + - wenzel_sabrina + + maintainer: + - unmaintained + + references: + - wenzel14jgr + + projects: + - embrace + +preprocessors: + + land_fraction_weighting: + weighting_landsea_fraction: + area_type: land + exclude: [ + 'GCP2018', + ] + + sea_fraction_weighting: + weighting_landsea_fraction: + area_type: sea + exclude: [ + 'GCP2018', 'HadGEM2-ES', 'CanESM2', 'IPSL-CM5A-LR', 'GFDL-ESM2M', 'MPI-ESM-LR', 'CESM1-BGC', 'NorESM1-ME', + ] + regrid: + target_grid: CanESM2 + scheme: area_weighted + + global_land: + weighting_landsea_fraction: + area_type: land + exclude: [ + 'GCP2018', 'HadGEM2-ES', 'CanESM2', 'IPSL-CM5A-LR', 'GFDL-ESM2M', 'MPI-ESM-LR', 'CESM1-BGC', 'NorESM1-ME', + ] + area_statistics: + operator: sum + + tropical_land: + weighting_landsea_fraction: + area_type: land + exclude: [ + 'GCP2018', 'HadGEM2-ES', 'CanESM2', 'IPSL-CM5A-LR', 'GFDL-ESM2M', 'MPI-ESM-LR', 'CESM1-BGC', 'NorESM1-ME', + ] + extract_region: + start_longitude: 0. + end_longitude: 360. + start_latitude: -30. + end_latitude: 30. + area_statistics: + operator: sum + + global_ocean: + weighting_landsea_fraction: + area_type: sea + exclude: [ + 'GCP2018', 'HadGEM2-ES', 'CanESM2', 'IPSL-CM5A-LR', 'GFDL-ESM2M', 'MPI-ESM-LR', 'CESM1-BGC', 'NorESM1-ME', + ] + area_statistics: + operator: sum + + tropical: + extract_region: + start_longitude: 0. + end_longitude: 360. + start_latitude: -30. + end_latitude: 30. + area_statistics: + operator: mean + + +diagnostics: + + ### Wenzel et al. 2014, Fig. 01: Quantities used to diagnose GAMMA_LT ######################### + + diag_tsline_Fig1a: &Carbon_tsline + description: Fig 1a integrated NBP in the idealized coupled simulation + themes: + - phys + realms: + - land + variables: + nbp: &var_nbp + preprocessor: default #land_fraction_weighting + project: CMIP5 + mip: Lmon + exp: 1pctCO2 + ensemble: r1i1p1 + start_year: 1850 + end_year: 1989 + cumulate: True + additional_datasets: &2xCO2_datastet + - {dataset: CanESM2} + - {dataset: CESM1-BGC, start_year: 1 ,end_year: 140} + - {dataset: GFDL-ESM2M, start_year: 1 ,end_year: 140} + - {dataset: HadGEM2-ES, start_year: 1860 ,end_year: 1999} + - {dataset: IPSL-CM5A-LR} + #- {dataset: MIROC-ESM, start_year: 1 ,end_year: 140} + #- {dataset: MPI-ESM-LR} + - {dataset: NorESM1-ME, start_year: 1 ,end_year: 140} + scripts: + carbon_tsline_1a: &idealized_carbon + script: carbon_ec/carbon_tsline.ncl + ts_minlat: -30 + ts_maxlat: 30 + ts_minlon: 0 + ts_maxlon: 360 + ridx_start: 0 + ridx_end: 120 + ref_start: 1850 + ref_end: 1859 + multi_model_mean: False + plot_units: GtC + ts_maxyear: "1989" + ts_minyear: "1850" + time_avg: "yearly" + ts_anomaly: "anom" + area_opper: "sum" + align: True + styleset: CMIP5 # Plot style + xy_line_legend: True + + diag_tsline_Fig1b: + description: Fig 1b integrated NBP in the idealized uncoupled simulation + themes: + - phys + realms: + - land + variables: + nbp: + preprocessor: default #land_fraction_weighting + project: CMIP5 + mip: Lmon + exp: esmFixClim1 + ensemble: r1i1p1 + start_year: 1850 + end_year: 1989 + cumulate: True + additional_datasets: + - {dataset: CanESM2} + - {dataset: CESM1-BGC, start_year: 1 ,end_year: 140} + - {dataset: GFDL-ESM2M, start_year: 1 ,end_year: 140} + - {dataset: HadGEM2-ES, start_year: 1860 ,end_year: 1999} + - {dataset: IPSL-CM5A-LR} + #- {dataset: MIROC-ESM} + #- {dataset: MPI-ESM-LR} + - {dataset: NorESM1-ME, start_year: 1 ,end_year: 140} + scripts: + carbon_tsline_1b: + script: carbon_ec/carbon_tsline.ncl + ts_minlat: -30 + ts_maxlat: 30 + ts_minlon: 0 + ts_maxlon: 360 + ridx_start: 0 + ridx_end: 120 + ref_start: 1850 + ref_end: 1859 + plot_units: GtC + multi_model_mean: False + ts_maxyear: "1989" + ts_minyear: "1850" + time_avg: "yearly" + ts_anomaly: "anom" + area_opper: "sum" + align: True + styleset: CMIP5 # Plot style + xy_line_legend: True + + diag_tsline_Fig1c: + description: Fig 1c near surface temperature idealized coupled simulation + themes: + - phys + realms: + - atmos + variables: + tas: &var_tas + project: CMIP5 + mip: Amon + exp: 1pctCO2 + ensemble: r1i1p1 + start_year: 1850 + end_year: 1989 + additional_datasets: + - {dataset: CanESM2} + - {dataset: CESM1-BGC, start_year: 1 ,end_year: 140} + - {dataset: GFDL-ESM2M, start_year: 1 ,end_year: 140} + - {dataset: HadGEM2-ES, start_year: 1860 ,end_year: 1999} + - {dataset: IPSL-CM5A-LR} + #- {dataset: MIROC-ESM, start_year: 1 ,end_year: 140} + #- {dataset: MPI-ESM-LR} + - {dataset: NorESM1-ME, start_year: 1 ,end_year: 140} + scripts: + tas_tsline_1c: + script: carbon_ec/carbon_tsline.ncl + ts_minlat: -30 + ts_maxlat: 30 + ts_minlon: 0 + ts_maxlon: 360 + ridx_start: 0 + ridx_end: 120 + ref_start: 1850 + ref_end: 1859 + plot_units: K + multi_model_mean: False + ts_maxyear: "1989" + ts_minyear: "1850" + time_avg: "yearly" + ts_anomaly: "anom" + align: True + styleset: CMIP5 # Plot style + xy_line_legend: True + + ### Wenzel et al. 2014, Fig. 02: Quantities used to diagnose GAMMA_IAV ######################### + + diag_tsline_Fig2a: + description: Fig 2a integrated NBP in the historical coupled simulation + themes: + - phys + realms: + - land + variables: + nbp: + preprocessor: default #land_fraction_weighting + project: CMIP5 + mip: Lmon + exp: esmHistorical + ensemble: r1i1p1 + start_year: 1960 + end_year: 2005 + additional_datasets: + - {dataset: CanESM2, start_year: 1960 ,end_year: 2005} + - {dataset: CESM1-BGC, start_year: 1960 ,end_year: 2005} + - {dataset: GFDL-ESM2M, start_year: 1960 ,end_year: 2005} + #- {dataset: HadGEM2-ES, start_year: 1960 ,end_year: 2005} + - {dataset: IPSL-CM5A-LR, start_year: 1960 ,end_year: 2005} + #- {dataset: MIROC-ESM, start_year: 1960 ,end_year: 2005} + #- {dataset: MPI-ESM-LR, start_year: 1960 ,end_year: 2005} + - {dataset: NorESM1-ME, start_year: 1960 ,end_year: 2005} + scripts: + tsline_2a: + script: carbon_ec/carbon_tsline.ncl + ts_minlat: -30 + ts_maxlat: 30 + ts_minlon: 0 + ts_maxlon: 360 + ref_start: 1960 + ref_end: 1969 + plot_units: GtC y-1 + multi_model_mean: False + ts_maxyear: "2005" + ts_minyear: "1960" + time_avg: "yearly" + ts_anomaly: "anom" + area_opper: "sum" + volcanoes: True + styleset: CMIP5 # Plot style + run_ave: 2 + xy_line_legend: True + + diag_tsline_Fig2b: + description: Fig 2b integrated NBP in the idealized coupled simulation + themes: + - phys + realms: + - land + variables: + nbp: + preprocessor: default #land_fraction_weighting + project: CMIP5 + mip: Lmon + exp: 1pctCO2 + ensemble: r1i1p1 + start_year: 1850 + end_year: 1989 + additional_datasets: + - {dataset: CanESM2, start_year: 1890 ,end_year: 1989} + - {dataset: CESM1-BGC, start_year: 40 ,end_year: 90} + - {dataset: GFDL-ESM2M, start_year: 40 ,end_year: 90} + - {dataset: HadGEM2-ES, start_year: 1890 ,end_year: 1989} + - {dataset: IPSL-CM5A-LR, start_year: 1890 ,end_year: 1989} + #- {dataset: MIROC-ESM, start_year: 40 ,end_year: 90} + #- {dataset: MPI-ESM-LR} + - {dataset: NorESM1-ME, start_year: 40 ,end_year: 90} + scripts: + tsline_2b: + script: carbon_ec/carbon_tsline.ncl + ts_minlat: -30 + ts_maxlat: 30 + ts_minlon: 0 + ts_maxlon: 360 + ridx_start: 0 + ridx_end: 120 + ref_start: 1890 + ref_end: 1899 + plot_units: GtC y-1 + multi_model_mean: False + ts_maxyear: "2005" + ts_minyear: "1960" + time_avg: "yearly" + ts_anomaly: "anom" + area_opper: "sum" + align: True + styleset: CMIP5 # Plot style + run_ave: 2 + xy_line_legend: True + + diag_tsline_Fig2c: + description: Fig 2c integrated FGCO2 in the historical coupled simulation + variables: + fgco2: + preprocessor: sea_fraction_weighting + project: CMIP5 + mip: Omon + exp: esmHistorical + ensemble: r1i1p1 + start_year: 1960 + end_year: 2005 + additional_datasets: + - {dataset: CanESM2, start_year: 1960 ,end_year: 2005} + - {dataset: CESM1-BGC, start_year: 1960 ,end_year: 2005} + - {dataset: GFDL-ESM2M, start_year: 1960 ,end_year: 2005} + - {dataset: HadGEM2-ES, start_year: 1960 ,end_year: 2005} + - {dataset: IPSL-CM5A-LR, start_year: 1960 ,end_year: 2005} + #- {dataset: MIROC-ESM, start_year: 1960 ,end_year: 2005} + #- {dataset: MPI-ESM-LR, start_year: 1960 ,end_year: 2005} + - {dataset: NorESM1-ME, start_year: 1960 ,end_year: 2005} + scripts: + tsline_2c: + script: carbon_ec/carbon_tsline.ncl + ts_minlat: -30 + ts_maxlat: 30 + ts_minlon: 0 + ts_maxlon: 360 + ref_start: 1960 + ref_end: 1969 + plot_units: GtC y-1 + multi_model_mean: False + ts_maxyear: "2005" + ts_minyear: "1960" + time_avg: "yearly" + ts_anomaly: "anom" + area_opper: "sum" + volcanoes: True + styleset: CMIP5 # Plot style + run_ave: 2 + xy_line_legend: True + + diag_tsline_Fig2d: &tsline_fgcos + description: Fig 2d integrated NBP in the idealized coupled simulation + themes: + - phys + realms: + - land + variables: + fgco2: &var_fgco2 + preprocessor: sea_fraction_weighting + project: CMIP5 + mip: Omon + exp: 1pctCO2 + ensemble: r1i1p1 + start_year: 1850 + end_year: 1989 + additional_datasets: + - {dataset: CanESM2, start_year: 1890 ,end_year: 1989} + - {dataset: CESM1-BGC, start_year: 40 ,end_year: 90} + - {dataset: GFDL-ESM2M, start_year: 40 ,end_year: 90} + - {dataset: HadGEM2-ES, start_year: 1890 ,end_year: 1989} + - {dataset: IPSL-CM5A-LR, start_year: 1890 ,end_year: 1989} + #- {dataset: MIROC-ESM, start_year: 40 ,end_year: 90} + #- {dataset: MPI-ESM-LR} + - {dataset: NorESM1-ME, start_year: 40 ,end_year: 90} + scripts: + tsline_2d: + script: carbon_ec/carbon_tsline.ncl + ts_minlat: -30 + ts_maxlat: 30 + ts_minlon: 0 + ts_maxlon: 360 + ridx_start: 0 + ridx_end: 120 + ref_start: 1890 + ref_end: 1899 + plot_units: GtC y-1 + multi_model_mean: False + ts_maxyear: "2005" + ts_minyear: "1960" + time_avg: "yearly" + ts_anomaly: "anom" + area_opper: "sum" + align: True + styleset: CMIP5 # Plot style + run_ave: 2 + xy_line_legend: True + + diag_tsline_Fig2e: + description: Fig 2e near surface temperature historical coupled simulation + variables: + tas: + project: CMIP5 + mip: Amon + exp: esmHistorical + ensemble: r1i1p1 + start_year: 1960 + end_year: 2005 + additional_datasets: + - {dataset: CanESM2, start_year: 1960 ,end_year: 2005} + - {dataset: CESM1-BGC, start_year: 1960 ,end_year: 2005} + - {dataset: GFDL-ESM2M, start_year: 1960 ,end_year: 2005} + - {dataset: HadGEM2-ES, start_year: 1960 ,end_year: 2005} + - {dataset: IPSL-CM5A-LR, start_year: 1960 ,end_year: 2005} + #- {dataset: MIROC-ESM, start_year: 1960 ,end_year: 2005} + #- {dataset: MPI-ESM-LR, start_year: 1960 ,end_year: 2005} + - {dataset: NorESM1-ME, start_year: 1960 ,end_year: 2005} + scripts: + tsline_2e: + script: carbon_ec/carbon_tsline.ncl + ts_minlat: -30 + ts_maxlat: 30 + ts_minlon: 0 + ts_maxlon: 360 + ref_start: 1960 + ref_end: 1969 + plot_units: K + multi_model_mean: False + ts_maxyear: "2005" + ts_minyear: "1960" + time_avg: "yearly" + ts_anomaly: "anom" + area_opper: "average" + volcanoes: True + styleset: CMIP5 # Plot style + run_ave: 2 + xy_line_legend: True + + diag_tsline_Fig2f: + description: Fig 2f near surface temperature idealized coupled simulation + themes: + - phys + realms: + - atmos + variables: + tas: + project: CMIP5 + mip: Amon + exp: 1pctCO2 + ensemble: r1i1p1 + start_year: 1890 + end_year: 1989 + additional_datasets: + - {dataset: CanESM2, start_year: 1890 ,end_year: 1989} + - {dataset: CESM1-BGC, start_year: 40 ,end_year: 90} + - {dataset: GFDL-ESM2M, start_year: 40 ,end_year: 90} + - {dataset: HadGEM2-ES, start_year: 1890 ,end_year: 1989} + - {dataset: IPSL-CM5A-LR, start_year: 1890 ,end_year: 1989} + #- {dataset: MIROC-ESM, start_year: 40 ,end_year: 90} + #- {dataset: MPI-ESM-LR} + - {dataset: NorESM1-ME, start_year: 40 ,end_year: 90} + scripts: + tsline_2f: + script: carbon_ec/carbon_tsline.ncl + ts_minlat: -90 + ts_maxlat: 90 + ts_minlon: 0 + ts_maxlon: 360 + ridx_start: 0 + ridx_end: 120 + ref_start: 1890 + ref_end: 1899 + plot_units: K + multi_model_mean: False + ts_maxyear: "2005" + ts_minyear: "1960" + time_avg: "yearly" + ts_anomaly: "anom" + run_ave: 2 + area_opper: "average" + align: True + styleset: CMIP5 # Plot style + xy_line_legend: True + + ### Wenzel et al. 2014, Fig. 03/04: Diagnose GAMMA_IAV from each model ######################### + + diag_gammaHist_Fig3and4: + description: Fig 3 and 4 correlating near surface temperature and land+ocean carbon fluxes + variables: + fgco2: + short_name: fgco2 + preprocessor: global_ocean + project: CMIP5 + mip: Omon + exp: esmHistorical + ensemble: r1i1p1 + start_year: 1960 + end_year: 2005 + additional_datasets: + - {dataset: CanESM2, start_year: 1960 ,end_year: 2005} + - {dataset: CESM1-BGC, start_year: 1960 ,end_year: 2005} + - {dataset: GFDL-ESM2M, start_year: 1960 ,end_year: 2005} + #- {dataset: HadGEM2-ES, start_year: 1960 ,end_year: 2005} + - {dataset: IPSL-CM5A-LR, start_year: 1960 ,end_year: 2005} + #- {dataset: MIROC-ESM, start_year: 1960 ,end_year: 2005} + #- {dataset: MPI-ESM-LR, start_year: 1960 ,end_year: 2005} + - {dataset: NorESM1-ME, start_year: 1960 ,end_year: 2005} + + fgco2_obs: + short_name: fgco2 + preprocessor: default + project: CMIP5 + mip: Omon + exp: esmHistorical + ensemble: r1i1p1 + start_year: 1960 + end_year: 2005 + reference_dataset: GCP2018 + additional_datasets: + - {dataset: GCP2018, project: OBS, type: reanaly, version: '1.0', tier: 2, start_year: 1959, end_year: 2005, frequency: yr} + nbp: + preprocessor: global_land + project: CMIP5 + mip: Lmon + exp: esmHistorical + ensemble: r1i1p1 + start_year: 1960 + end_year: 2005 + plot_units: GtC y-1 + reference_dataset: GCP2018 + additional_datasets: + - {dataset: CanESM2, start_year: 1960 ,end_year: 2005} + - {dataset: CESM1-BGC, start_year: 1960 ,end_year: 2005} + - {dataset: GFDL-ESM2M, start_year: 1960 ,end_year: 2005} + #- {dataset: HadGEM2-ES, start_year: 1960 ,end_year: 2005} + - {dataset: IPSL-CM5A-LR, start_year: 1960 ,end_year: 2005} + - {dataset: MIROC-ESM, start_year: 1960 ,end_year: 2005} + #- {dataset: MPI-ESM-LR, start_year: 1960 ,end_year: 2005} + - {dataset: NorESM1-ME, start_year: 1960 ,end_year: 2005} + nbp_obs: + short_name: nbp + preprocessor: default + project: CMIP5 + mip: Lmon + exp: esmHistorical + ensemble: r1i1p1 + start_year: 1960 + end_year: 2005 + additional_datasets: + - {dataset: GCP2018, project: OBS, type: reanaly, version: '1.0', tier: 2, start_year: 1959, end_year: 2005, frequency: yr} + tas: + preprocessor: tropical + project: CMIP5 + mip: Amon + exp: esmHistorical + ensemble: r1i1p1 + start_year: 1960 + end_year: 2005 + plot_units: K + reference_dataset: NCEP-NCAR-R1 + additional_datasets: + - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2, start_year: 1959, end_year: 2005}#, frequency: yr} + - {dataset: CanESM2, start_year: 1960 ,end_year: 2005} + - {dataset: CESM1-BGC, start_year: 1960 ,end_year: 2005} + - {dataset: GFDL-ESM2M, start_year: 1960 ,end_year: 2005} + #- {dataset: HadGEM2-ES, start_year: 1960 ,end_year: 2005} + - {dataset: IPSL-CM5A-LR, start_year: 1960 ,end_year: 2005} + #- {dataset: MIROC-ESM, start_year: 1960 ,end_year: 2005} + #- {dataset: MPI-ESM-LR, start_year: 1960 ,end_year: 2005} + - {dataset: NorESM1-ME, start_year: 1960 ,end_year: 2005} + scripts: + gammaHist_3and4: + script: carbon_ec/carbon_gammaHist.ncl + start_year: 1960 + end_year: 2005 + plot_units_x: k + plot_units_y: GtC y-1 + ec_anom : True + ec_volc : True #(optional) + scatter_log : False # set logarithmic axes in scatterplot.ncl + styleset: CMIP5 # Plot style + + ### Wenzel et al. 2014, Fig. 05: Diagnose GAMMA_LT and create Emergent Constraint ############# + + diag_gammaLT_5: + description: Fig 5 correlates gamma historical vs gammaLT and calculates PDF + variables: + nbp_1pct: + short_name: nbp + preprocessor: tropical_land + project: CMIP5 + mip: Lmon + exp: 1pctCO2 + ensemble: r1i1p1 + start_year: 1880 + end_year: 1960 + plot_units: GtC y-1 + additional_datasets: + - {dataset: CanESM2, start_year: 1880 ,end_year: 1960} + - {dataset: CESM1-BGC, start_year: 30 ,end_year: 110} + - {dataset: GFDL-ESM2M, start_year: 30 ,end_year: 110} + #- {dataset: HadGEM2-ES, start_year: 1880 ,end_year: 1960} + - {dataset: IPSL-CM5A-LR, start_year: 1880 ,end_year: 1960} + #- {dataset: MIROC-ESM, start_year: 30 ,end_year: 110} + #- {dataset: MPI-ESM-LR, start_year: 1880 ,end_year: 1960} + - {dataset: NorESM1-ME, start_year: 30 ,end_year: 110} + nbp_esmFix: + short_name: nbp + preprocessor: tropical_land + project: CMIP5 + mip: Lmon + exp: esmFixClim1 + ensemble: r1i1p1 + start_year: 1880 + end_year: 1960 + plot_units: GtC y-1 + additional_datasets: + - {dataset: CanESM2, start_year: 1880 ,end_year: 1960} + - {dataset: CESM1-BGC, start_year: 30 ,end_year: 110} + - {dataset: GFDL-ESM2M, start_year: 30 ,end_year: 110} + #- {dataset: HadGEM2-ES, start_year: 1880 ,end_year: 1960} + - {dataset: IPSL-CM5A-LR, start_year: 1880 ,end_year: 1960} + #- {dataset: MIROC-ESM, start_year: 1880 ,end_year: 1960} + #- {dataset: MPI-ESM-LR, start_year: 1880 ,end_year: 1960} + - {dataset: NorESM1-ME, start_year: 30 ,end_year: 110} + tas: + preprocessor: tropical + project: CMIP5 + mip: Amon + exp: 1pctCO2 + ensemble: r1i1p1 + start_year: 1880 + end_year: 1960 + additional_datasets: + - {dataset: CanESM2, start_year: 1880 ,end_year: 1960} + - {dataset: CESM1-BGC, start_year: 30 ,end_year: 110} + - {dataset: GFDL-ESM2M, start_year: 30 ,end_year: 110} + #- {dataset: HadGEM2-ES, start_year: 1880 ,end_year: 1960} + - {dataset: IPSL-CM5A-LR, start_year: 1880 ,end_year: 1960} + #- {dataset: MIROC-ESM, start_year: 30 ,end_year: 110} + #- {dataset: MPI-ESM-LR, start_year: 1880 ,end_year: 1960} + - {dataset: NorESM1-ME, start_year: 30 ,end_year: 110} + scripts: + gammaLT_5: + script: carbon_ec/carbon_constraint.ncl + gIAV_diagscript: "gammaHist_Fig3and4" + gIAV_start: 1960 + gIAV_end: 2005 + ec_anom: True + #reg_models: "MPI-ESM-LR" + con_units: "GtC/K" + nc_infile: ../../diag_gammaHist_Fig3and4/gammaHist_3and4/ + styleset: CMIP5 # Plot style diff --git a/esmvaltool/recipes/recipe_wenzel16jclim.yml b/esmvaltool/recipes/recipe_wenzel16jclim.yml new file mode 100644 index 0000000000..6caed04a93 --- /dev/null +++ b/esmvaltool/recipes/recipe_wenzel16jclim.yml @@ -0,0 +1,397 @@ +# ESMValTool +# recipe_wenzel16jclim.yml +--- +documentation: + title: Constraining Future Summer Austral Jet Stream Positions in CMIP5 + + description: | + Calculates process oriented diagnostics from all model ensemble members and + compares results with future projections of zonal mean zonal wind and + performs a Multiple Linear Regression (MDER) analysis. + + authors: + - wenzel_sabrina + - schlund_manuel + + maintainer: + - unmaintained + + projects: + - esmval + - crescendo + + +DATASET_ANCHOR: &datasets + - {dataset: ACCESS1-0, ensemble: r1i1p1, supplementary_variables: [{short_name: areacella, skip: true}]} + - {dataset: ACCESS1-3, ensemble: r1i1p1, supplementary_variables: [{short_name: areacella, skip: true}]} + - {dataset: bcc-csm1-1, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, ensemble: r1i1p1} + - {dataset: BNU-ESM, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: CanESM2, ensemble: r2i1p1} + - {dataset: CanESM2, ensemble: r3i1p1} + - {dataset: CanESM2, ensemble: r4i1p1} + - {dataset: CanESM2, ensemble: r5i1p1} + - {dataset: CESM1-BGC, ensemble: r1i1p1} + - {dataset: CESM1-CAM5, ensemble: r1i1p1} + - {dataset: CESM1-CAM5, ensemble: r2i1p1} + - {dataset: CESM1-CAM5, ensemble: r3i1p1} + - {dataset: CMCC-CMS, ensemble: r1i1p1} + - {dataset: CNRM-CM5, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r2i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r3i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r4i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r5i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r6i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r7i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r8i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r9i1p1} + - {dataset: CSIRO-Mk3-6-0, ensemble: r10i1p1} + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, ensemble: r1i1p1} + - {dataset: HadGEM2-AO, ensemble: r1i1p1} + - {dataset: inmcm4, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r2i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r3i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r4i1p1} + - {dataset: IPSL-CM5A-MR, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, ensemble: r1i1p1} + - {dataset: MIROC5, ensemble: r1i1p1} + - {dataset: MIROC5, ensemble: r2i1p1} + - {dataset: MIROC-ESM, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r2i1p1} + - {dataset: MPI-ESM-LR, ensemble: r3i1p1} + - {dataset: MPI-ESM-MR, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, ensemble: r2i1p1} + - {dataset: MPI-ESM-MR, ensemble: r3i1p1} + - {dataset: MRI-CGCM3, ensemble: r1i1p1} + - {dataset: NorESM1-M, ensemble: r1i1p1} + + +preprocessors: + + sh: + extract_region: + start_latitude: -90 + end_latitude: -20 + start_longitude: 0 + end_longitude: 360 + + mean_sp50: + extract_levels: + levels: 5000 + scheme: linear + extract_region: + start_latitude: -90 + end_latitude: -60 + start_longitude: 0 + end_longitude: 360 + area_statistics: + operator: mean + + mean_sp100: + extract_levels: + levels: 10000 + scheme: linear + extract_region: + start_latitude: -90 + end_latitude: -60 + start_longitude: 0 + end_longitude: 360 + area_statistics: + operator: mean + + mean_ng50: + extract_levels: + levels: 5000 + scheme: linear + extract_region: + start_latitude: -82.5 + end_latitude: 82.5 + start_longitude: 0 + end_longitude: 360 + area_statistics: + operator: mean + + mean_ng100: + extract_levels: + levels: 10000 + scheme: linear + extract_region: + start_latitude: -90 + end_latitude: 90 + start_longitude: 0 + end_longitude: 360 + area_statistics: + operator: mean + + mean_trop250: + extract_levels: + levels: 25000 + scheme: linear + extract_region: + start_latitude: -30 + end_latitude: 30 + start_longitude: 0 + end_longitude: 360 + area_statistics: + operator: mean + + +diagnostics: + + diag_ta_sp100: + description: Calculate temperature trends and climatological means of ta. + variables: + ta: &mean_sp100_cmip5_amon_t3m_historical_1979_2005 + preprocessor: mean_sp100 + project: CMIP5 + mip: Amon + exp: historical + start_year: 1979 + end_year: 2005 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: *datasets + scripts: + ta_sp100: &ta_sp100 + script: austral_jet/main.ncl + domain: sh + average_ens: true + styleset: CMIP5 + season: ['ONDJ'] + e13fig10_yr_min: 1979 + e13fig10_yr_max: 2005 + wdiag: ['T-SP_t', 'T-SP_c'] + wdiag_title: ['T-SP_t', 'T-SP_c'] + ref_dataset: ['ERA-Interim'] + select_for_mder: + <<: *ta_sp100 + script: mder/select_for_mder.ncl + ancestors: ['ta', 'ta_sp100'] + + diag_ta_ng100: + description: Calculate temperature trends and climatological means of ta. + variables: + ta: + <<: *mean_sp100_cmip5_amon_t3m_historical_1979_2005 + preprocessor: mean_ng100 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: *datasets + scripts: + ta_ng100: &ta_ng100 + <<: *ta_sp100 + season: ['yearly'] + wdiag: ['T-NGlob_t', 'T-NGlob_c'] + wdiag_title: ['T-NGlob_t', 'T-NGlob_c'] + select_for_mder: + <<: *ta_ng100 + script: mder/select_for_mder.ncl + ancestors: ['ta', 'ta_ng100'] + + diag_ta_trop250: + description: Calculate temperature trends and climatological means of ta. + variables: + ta: + <<: *mean_sp100_cmip5_amon_t3m_historical_1979_2005 + preprocessor: mean_trop250 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: *datasets + scripts: + ta_trop250: &ta_trop250 + <<: *ta_sp100 + season: ['DJF'] + wdiag: ['T-Trop_t', 'T-Trop_c'] + wdiag_title: ['T-Trop_t', 'T-Trop_c'] + select_for_mder: + <<: *ta_trop250 + script: mder/select_for_mder.ncl + ancestors: ['ta', 'ta_trop250'] + + diag_uajet_sh850: + description: Calculate temperature trends and climatological means of uajet. + variables: + uajet: &cmip5_amon_t0m_historical_1979_2005 + project: CMIP5 + mip: Amon + exp: historical + start_year: 1979 + end_year: 2005 + derive: true + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: *datasets + scripts: + uajet_sh850: &uajet_sh850 + <<: *ta_sp100 + season: ['DJF'] + wdiag: ['U-Jet_t', 'U-Jet_c'] + wdiag_title: ['U-Jet_t', 'U-Jet_c'] + select_for_mder: + <<: *uajet_sh850 + script: mder/select_for_mder.ncl + ancestors: ['uajet', 'uajet_sh850'] + + diag_tpp_sh50: + description: Calculate temperature trends and climatological means of tpp. + variables: + ta: &cmip5_amon_t3m_historical_1979_2005 + project: CMIP5 + mip: Amon + exp: historical + start_year: 1979 + end_year: 2005 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: *datasets + scripts: + tpp_sh50: &tpp_sh50 + <<: *ta_sp100 + season: ['DJF'] + wdiag: ['P-SH_t', 'P-SH_c'] + wdiag_title: ['P-SH_t', 'P-SH_c'] + derive_var: tpp + derive_lev: 5000 + derive_latrange: [-90, -50] + select_for_mder: + <<: *tpp_sh50 + script: mder/select_for_mder.ncl + ancestors: ['ta', 'tpp_sh50'] + + diag_mmstf_sh500: + description: Calculate temperature trends and climatological means of southern hemisphere Hadley cell boundary. + variables: + va: + <<: *cmip5_amon_t3m_historical_1979_2005 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + ps: + <<: *cmip5_amon_t3m_historical_1979_2005 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + additional_datasets: *datasets + scripts: + mmstf_sh500: &mmstf_sh500 + <<: *ta_sp100 + season: ['DJF'] + wdiag: ['H-SH_t', 'H-SH_c'] + wdiag_title: ['H-SH_t', 'H-SH_c'] + derive_var: mmstf + derive_lev: 50000 + derive_latrange: [-80, -20] + select_for_mder: + <<: *mmstf_sh500 + script: mder/select_for_mder.ncl + ancestors: ['va', 'ps', 'mmstf_sh500'] + + diag_asr_sh: + description: Calculate temperature trends and climatological means of asr. + variables: + asr: + preprocessor: sh + project: CMIP5 + mip: Amon + exp: historical + start_year: 2000 + end_year: 2005 + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1, start_year: 2001} + additional_datasets: *datasets + scripts: + asr_sh: &asr_sh + <<: *ta_sp100 + script: austral_jet/asr.ncl + season: monthlyclim + wdiag: ['ASR-SH'] + wdiag_title: ['ASR-SH'] + ref_dataset: ['CERES-EBAF'] + select_for_mder: + <<: *asr_sh + script: mder/select_for_mder.ncl + ancestors: ['asr', 'asr_sh'] + + diag_mder: + description: Performs Multiple Diagnostic Ensemble Regression (MDER). + variables: + uajet: + <<: *cmip5_amon_t0m_historical_1979_2005 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3, end_year: 2012} + - {dataset: ACCESS1-0, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: ACCESS1-3, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: bcc-csm1-1-m, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: BNU-ESM, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: CanESM2, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: CanESM2, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r2i1p1} + - {dataset: CanESM2, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r3i1p1} + - {dataset: CanESM2, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r4i1p1} + - {dataset: CanESM2, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r5i1p1} + - {dataset: CESM1-BGC, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: CESM1-CAM5, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: CESM1-CAM5, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r2i1p1} + - {dataset: CESM1-CAM5, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r3i1p1} + - {dataset: CMCC-CMS, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: CNRM-CM5, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r2i1p1} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r3i1p1} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r4i1p1} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r5i1p1} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r6i1p1} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r7i1p1} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r8i1p1} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r9i1p1} + - {dataset: CSIRO-Mk3-6-0, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r10i1p1} + - {dataset: GFDL-CM3, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: HadGEM2-AO, exp: rcp45, start_year: 2006, end_year: 2099, ensemble: r1i1p1} + - {dataset: inmcm4, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r2i1p1} + - {dataset: IPSL-CM5A-LR, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r3i1p1} + - {dataset: IPSL-CM5A-LR, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r4i1p1} + - {dataset: IPSL-CM5A-MR, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: MIROC5, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: MIROC5, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r2i1p1} + - {dataset: MIROC-ESM, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r2i1p1} + - {dataset: MPI-ESM-LR, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r3i1p1} + - {dataset: MPI-ESM-MR, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r2i1p1} + - {dataset: MPI-ESM-MR, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r3i1p1} + - {dataset: MRI-CGCM3, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + - {dataset: NorESM1-M, exp: rcp45, start_year: 2006, end_year: 2100, ensemble: r1i1p1} + additional_datasets: *datasets + scripts: + absolute_correlation: &absolute_correlation + <<: *ta_sp100 + script: mder/absolute_correlation.ncl + ancestors: ['uajet', '*/select_for_mder'] + time_opt: DJF + time_oper: extract + calc_type: pos + p_time: [2015, 2040] + p_step: 19 + scal_time: [1979, 1998] + regression_stepwise: + <<: *absolute_correlation + script: mder/regression_stepwise.ncl + p_value: 0.05 + wregr: true + wexit: true + smooth: true + iter: 29 + cross_validation_mode: false diff --git a/esmvaltool/recipes/recipe_wenzel16nat.yml b/esmvaltool/recipes/recipe_wenzel16nat.yml new file mode 100644 index 0000000000..4454f7f95d --- /dev/null +++ b/esmvaltool/recipes/recipe_wenzel16nat.yml @@ -0,0 +1,185 @@ +# ESMValTool +# recipe_wenzel16nat.yml +--- +documentation: + title: > + Projected land photosynthesis constrained by changes in the + seasonal cycle of atmospheric CO2 + + description: | + This recipe reproduces selected figures of Wenzel et al. (2016). + + authors: + - wenzel_sabrina + + maintainer: + - unmaintained + + references: + - wenzel16nat + + projects: + - crescendo + +preprocessors: + + highlat_gpp: + custom_order: true + weighting_landsea_fraction: + area_type: land + extract_region: &extract_region + start_longitude: 0. + end_longitude: 360. + start_latitude: 60. + end_latitude: 90. + area_statistics: + operator: sum + annual_statistics: + operator: mean + + highlat_co2: + custom_order: true + extract_point: &extract_point + latitude: 71.323 + longitude: 203.389 + scheme: nearest + annual_statistics: + operator: mean + + highlat_amp: + custom_order: true + extract_point: *extract_point + amplitude: + coords: year + + BRW_co2: + custom_order: true + extract_point: *extract_point + annual_statistics: + operator: mean + + BRW_amp: + custom_order: true + extract_point: *extract_point + amplitude: + coords: year + +diagnostics: + + ### Wenzel et al. 2016: Analysis for Pt. Barrow, Alaska ### + + diag_beta_Fig2: + description: | + Fig. 2: Comparison of simulated annual mean GPP at 2xCO2 in the 1%BGC + simulations. + themes: + - phys + realms: + - land + variables: + gpp: + preprocessor: highlat_gpp + project: CMIP5 + mip: Lmon + exp: esmFixClim1 + ensemble: r1i1p1 + start_year: 1860 + end_year: 1989 + additional_datasets: + - {dataset: CanESM2} + - {dataset: CESM1-BGC, start_year: 11, end_year: 140} + - {dataset: GFDL-ESM2M, start_year: 11, end_year: 140} + # - {dataset: HadGEM2-ES} +### data not found on ESGF - {dataset: MIROC-ESM, start_year: 11, end_year: 140} + - {dataset: MPI-ESM-LR} + - {dataset: NorESM1-ME, start_year: 11, end_year: 140} + scripts: + carbon_beta: + script: carbon_ec/carbon_beta.ncl + cl_mean: false + cl_output: true + bc_xmax_year: 1920 + bc_xmin_year: 1860 + styleset: CMIP5 + + diag_co2cycle_Fig1and3: + description: | + Fig. 1: Comparison of simulated annual mean GPP at 2xCO2 in the 1%BGC + simulations and Fig. 3: EC on the relative increase of large-scale GPP + for 2xCO2. + themes: + - phys + realms: + - land + variables: + co2s: + short_name: co2s + preprocessor: highlat_co2 + derive: true + project: CMIP5 + mip: Amon + exp: esmHistorical + ensemble: r1i1p1 + start_year: 1850 + end_year: 2005 + additional_datasets: + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2M, start_year: 1861, end_year: 2005} + # - {dataset: HadGEM2-ES, start_year: 1860, end_year: 2005} +### - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: NorESM1-ME} + co2s_obs: + short_name: co2s + preprocessor: BRW_co2 + project: CMIP5 + mip: Amon + exp: esmHistorical + ensemble: r1i1p1 + start_year: 1974 + end_year: 2014 + reference_dataset: ESRL + additional_datasets: + - {dataset: ESRL, project: OBS, type: ground, version: BRW, tier: 2, + start_year: 1973, end_year: 2014} + co2s_amp: + short_name: co2s + preprocessor: highlat_amp + derive: true + project: CMIP5 + mip: Amon + exp: esmHistorical + ensemble: r1i1p1 + start_year: 1850 + end_year: 2005 + additional_datasets: + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2M, start_year: 1861, end_year: 2005} + # - {dataset: HadGEM2-ES, start_year: 1860, end_year: 2005} +### - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: NorESM1-ME} + co2s_amp_obs: + short_name: co2s + preprocessor: BRW_amp + project: CMIP5 + mip: Amon + exp: esmHistorical + ensemble: r1i1p1 + start_year: 1974 + end_year: 2014 + reference_dataset: ESRL + additional_datasets: + - {dataset: ESRL, project: OBS, type: ground, version: BRW, tier: 2, + start_year: 1973, end_year: 2014} + scripts: + carbon_cycle: + script: carbon_ec/carbon_co2_cycle.ncl + ancestors: [diag_beta_Fig2/carbon_beta, co2s, co2s_obs, co2s_amp, + co2s_amp_obs] + bc_xmax_year: 1920 + bc_xmin_year: 1860 + styleset: CMIP5 + nc_infile: ../../diag_beta_Fig2/carbon_beta/ diff --git a/esmvaltool/recipes/recipe_williams09climdyn_CREM.yml b/esmvaltool/recipes/recipe_williams09climdyn_CREM.yml index eaafb97531..6d579053b9 100644 --- a/esmvaltool/recipes/recipe_williams09climdyn_CREM.yml +++ b/esmvaltool/recipes/recipe_williams09climdyn_CREM.yml @@ -2,15 +2,17 @@ # recipe_williams09climdyn_CREM.yml --- documentation: + title: Cloud Regime Error Metric (CREM) + description: | Cloud Regime Error Metric (CREM) by Williams and Webb (2009). authors: - - will_ke - - laue_ax + - williams_keith + - lauer_axel maintainer: - - laue_ax + - lauer_axel references: - williams09climdyn @@ -19,14 +21,6 @@ documentation: - cmug -#datasets: -# - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} -# - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} -# - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} -# - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} -# - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} - - preprocessors: preproc25x25: regrid: @@ -41,6 +35,7 @@ diagnostics: # ********************************************************************** crem: + title: CREM description: Cloud Regime Error Message (CREM) themes: - phys @@ -53,107 +48,37 @@ diagnostics: albisccp: &var_settings preprocessor: preproc25x25 start_year: 1985 - end_year: 1987 - grid: gr - mip: cfDay + end_year: 1989 exp: amip + project: CMIP5 + mip: cfDay additional_datasets: - - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} - - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, mip: CFday, ensemble: r1i1p1f2} + - {dataset: CanAM4, ensemble: r1i1p1} pctisccp: <<: *var_settings - additional_datasets: - - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} - - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, mip: CFday, ensemble: r1i1p1f2} cltisccp: <<: *var_settings - additional_datasets: - - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} - - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, mip: CFday, ensemble: r1i1p1f2} rsut: <<: *var_settings - additional_datasets: - - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} - - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, mip: CFday, ensemble: r1i1p1f2} rlut: <<: *var_settings - additional_datasets: - - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} - - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, mip: day, ensemble: r1i1p1f2} + mip: day rsutcs: <<: *var_settings - additional_datasets: - - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} - - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, mip: CFday, ensemble: r1i1p1f2} rlutcs: <<: *var_settings - additional_datasets: - - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} - - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, mip: CFday, ensemble: r1i1p1f2} snc: <<: *var_settings mip: day - additional_datasets: - - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} - - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, ensemble: r1i1p1f2} - # snw: - # <<: *var_settings - # mip: day - # additional_datasets: - # - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} - # - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} - # - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} - # - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} - # - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} - # - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, ensemble: r1i1p1f2} +# snw: +# <<: *var_settings +# mip: day sic: <<: *var_settings mip: day - additional_datasets: - - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} - - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} - - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} - - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} - siconc: - <<: *var_settings - grid: gn - mip: SIday - exp: historical - additional_datasets: - - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, ensemble: r1i1p1f2} +# siconc: +# <<: *var_settings +# mip: SIday scripts: clim: script: crem/ww09_esmvaltool.py diff --git a/esmvaltool/recipes/recipe_zmnam.yml b/esmvaltool/recipes/recipe_zmnam.yml index 1ca0bda913..4466fe1dfa 100644 --- a/esmvaltool/recipes/recipe_zmnam.yml +++ b/esmvaltool/recipes/recipe_zmnam.yml @@ -1,20 +1,22 @@ -# recipe zmnam.yml +# ESMValTool +# recipe_zmnam.yml --- documentation: + title: Zonal mean annular modes description: | - Recipe for zonal mean Northern Annular Mode. - The diagnostics compute the index and the spatial - pattern to assess the simulation of the - strat-trop coupling in the boreal hemisphere + Recipe for zonal mean annular mode. + The diagnostics compute the index and the spatial + pattern to assess the simulation of the + strat-trop coupling in the extratropical regions authors: - - serv_fe - - hard_jo - - arno_en - - cagn_ch + - serva_federico + - vonhardenberg_jost + - arnone_enrico + - cagnazzo_chiara maintainer: - - serv_fe + - serva_federico references: - baldwin09qjrms @@ -24,30 +26,24 @@ documentation: datasets: - {dataset: MPI-ESM-MR, project: CMIP5, mip: day, exp: amip, ensemble: r1i1p1, start_year: 1979, end_year: 2008} - #- {model: MPI-ESM-MR, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1979, end_year: 2008} preprocessors: preproc: extract_levels: - levels: [100000., 50000., 25000., 5000.] + levels: [85000., 50000., 25000., 5000.] scheme: nearest - regrid: - target_grid: 3x3 + regrid: + target_grid: 3x3 scheme: area_weighted - extract_region: - start_longitude: 0. - end_longitude: 360. - start_latitude: 20. - end_latitude: 90. - diagnostics: zmnam: - description: Zonal mean Northern Annular Mode - variables: + description: Zonal mean annular mode + variables: zg: preprocessor: preproc - scripts: - main: - script: zmnam/zmnam.py - title: "Zonal mean NAM diagnostics" + scripts: + main: + script: zmnam/zmnam.py + title: "Zonal mean AM diagnostics" + hemisphere: ["NH", "SH"] diff --git a/esmvaltool/recipes/schlund20jgr/recipe_schlund20jgr_gpp_abs_rcp85.yml b/esmvaltool/recipes/schlund20jgr/recipe_schlund20jgr_gpp_abs_rcp85.yml new file mode 100644 index 0000000000..69745951a5 --- /dev/null +++ b/esmvaltool/recipes/schlund20jgr/recipe_schlund20jgr_gpp_abs_rcp85.yml @@ -0,0 +1,1059 @@ +# ESMValTool +# recipe_schlund20jgr_gpp_abs_rcp85.yml +--- +documentation: + title: > + Constraining uncertainty in projected gross primary production (GPP) with + machine learning + + description: > + Use MLR (Machine Learning Regression) models to contstrain absolute + projected gross primary production (GPP) in the emission-driven RCP 8.5 + scenario. + + authors: + - schlund_manuel + + maintainer: + - schlund_manuel + + references: + - schlund20jgr + + projects: + - 4c + - crescendo + - usmile + + +# YAML anchors + +PROVENANCE_ANCHOR: &provenance + realms: + - land + themes: + - ML + - EC + - carbon + +DATASET_ANCHOR: &datasets + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-ES} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: NorESM1-ME} + +DATASET_ANCHOR_CO2: &datasets_co2 + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-ES, end_year: 2004} # missing on ESGF for co2 of esmHistorical + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: NorESM1-ME} + +PSEUDO_REALITY: &pseudo_reality + grid_search_cv_param_grid: false + only_predict: true + pseudo_reality: ['dataset'] + save_lime_importance: false + save_mlr_model_error: false + save_propagated_errors: false + +SEABORN_SETTINGS: &seaborn_settings + style: ticks + rc: + axes.titlepad: 15.0 + xtick.top: true + ytick.right: true + xtick.minor.visible: true + ytick.minor.visible: true + +WEIGHTED_SAMPLES: &weighted_samples + area_weighted: true + time_weighted: false + +VARIABLE_SETTINGS_FEATURES: &variable_settings_features + project: CMIP5 + exp: esmHistorical + ensemble: r1i1p1 + var_type: feature + additional_datasets: *datasets + +VARIABLE_SETTINGS_PREDICTION_INPUT: &variable_settings_prediction_input + prediction_name: OBS + var_type: prediction_input + +VARIABLE_SETTINGS_GPP_2100: &variable_settings_gpp_2100 + project: CMIP5 + short_name: gpp + mip: Lmon + exp: esmrcp85 + ensemble: r1i1p1 + start_year: 2091 + end_year: 2100 + var_type: label + additional_datasets: *datasets + +PREPROCESS_MEAN_SCRIPT: &preprocess_mean_script + script: mlr/preprocess.py + +PREPROCESS_ERROR_SCRIPT: &preprocess_error_script + script: mlr/preprocess.py + ancestors: ['diag_mlr_label_gpp_rescaled/preprocess'] + ignore: + - {var_type: label} + +GBRT_SCRIPT: &gbrt_script + script: mlr/main.py + group_datasets_by_attributes: ['dataset'] + imputation_strategy: constant + mlr_model_type: gbr_sklearn + n_jobs: -1 + parameters_final_regressor: + learning_rate: 0.01 + max_depth: 6 + n_estimators: 400 + subsample: 0.5 + random_state: 31415 + save_mlr_model_error: test + save_propagated_errors: true + seaborn_settings: *seaborn_settings + test_size: 0.25 + weighted_samples: *weighted_samples + +GBRT_PSEUDO_REALITY_SCRIPT: &gbrt_pseudo_reality_script + <<: *gbrt_script + <<: *pseudo_reality + parameters_final_regressor: + learning_rate: 0.1 + max_depth: 6 + n_estimators: 30 + subsample: 0.5 + +LASSO_SCRIPT: &lasso_script + script: mlr/main.py + group_datasets_by_attributes: ['dataset'] + imputation_strategy: constant + mlr_model_type: lasso_cv + n_jobs: -1 + random_state: 31415 + save_mlr_model_error: test + save_propagated_errors: true + seaborn_settings: *seaborn_settings + test_size: 0.25 + weighted_samples: *weighted_samples + +LASSO_PSEUDO_REALITY_SCRIPT: &lasso_pseudo_reality_script + <<: *lasso_script + <<: *pseudo_reality + +MMM_SCRIPT: &mmm_script + script: mlr/main.py + mlr_model_type: mmm + prediction_name: OBS + mmm_error_type: loo + seaborn_settings: *seaborn_settings + weighted_samples: *weighted_samples + +POSTPROCESS_SCRIPT: &postprocess_script + script: mlr/postprocess.py + convert_units_to: Gt yr-1 + ignore: + - {prediction_name: null} + - {var_type: null} + mean: ['month_number'] + sum: ['latitude', 'longitude'] + +PLOT_SCRIPT: &plot_script + script: mlr/plot.py + additional_plot_kwargs_xy_plots: &additional_plot_kwargs + CanESM2: {color: C0, alpha: 0.5} + CESM1-BGC: {color: C1, alpha: 0.5} + GFDL-ESM2M: {color: C2, alpha: 0.5} + HadGEM2-ES: {color: C3, alpha: 0.5} + MIROC-ESM: {color: C4, alpha: 0.5} + MPI-ESM-LR: {color: C5, alpha: 0.5} + NorESM1-ME: {color: C6, alpha: 0.5} + CRU: {color: k, alpha: 1.0, linewidth: 2} + ERA-Interim: {color: k, alpha: 1.0, linewidth: 2} + LAI3g: {color: k, alpha: 1.0, linewidth: 2} + MTE: {color: k, alpha: 1.0, linewidth: 2} + Scripps-CO2-KUM: {color: k, alpha: 1.0, linewidth: 2} + group_by_attribute: dataset + ignore: + - {var_type: prediction_input_error} + legend_kwargs: &legend_kwargs + bbox_to_anchor: [1.05, 0.5] + borderaxespad: 0.0 + loc: center left + seaborn_settings: *seaborn_settings + years_in_title: true + + +preprocessors: + + preproc_extract_KUM: + custom_order: true + extract_point: &extract_point + latitude: 19.5 + longitude: 205.2 + scheme: nearest + + preproc_extract_KUM_annual_amplitude: + custom_order: true + extract_point: *extract_point + amplitude: + coords: year + + preproc_extract_KUM_annual_mean: + custom_order: true + extract_point: *extract_point + annual_statistics: + operator: mean + + preproc_total_mean_flux_var: + custom_order: true + weighting_landsea_fraction: &weighting_landsea_fraction + area_type: land + exclude: ['BNU-ESM', 'LAI3g', 'MTE'] + extract_region: &extract_region_mean + start_latitude: -60.0 + end_latitude: 90.0 + start_longitude: 0.0 + end_longitude: 360 + area_statistics: &area_statistics + operator: mean + climate_statistics: + period: full + operator: mean + + preproc_map: + custom_order: true + regrid: ®rid + scheme: nearest + target_grid: 2x2 + mask_landsea: &mask_sea_NE_mask + mask_out: sea + extract_region: &extract_region + start_latitude: -60.0 + end_latitude: 90.0 + start_longitude: 0.0 + end_longitude: 360 + climate_statistics: &climate_statistics + period: month + operator: mean + + preproc_map_flux_var: + custom_order: true + weighting_landsea_fraction: *weighting_landsea_fraction + regrid: *regrid + mask_landsea: *mask_sea_NE_mask + extract_region: *extract_region + climate_statistics: *climate_statistics + +diagnostics: + + # Auxiliary dataset to estimate covariance + + diag_mte_for_cov_estimation: + <<: *provenance + description: Process MTE dataset which is used to estimate covariance structure. + variables: + cov_ref: + <<: *variable_settings_prediction_input + short_name: gpp + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1982 + end_year: 2011 + additional_datasets: + - {dataset: MTE, project: OBS, type: reanaly, version: May12, tier: 3} + scripts: + preprocess: + <<: *preprocess_mean_script + ancestors: ['diag_mte_for_cov_estimation/cov_ref'] + output_attributes: + tag: GPP + convert_units_to: g m-2 yr-1 + + # Use emergent constraint to rescale target variable (Step 1 in paper) + + diag_co2_cycle: + <<: *provenance + variables: + features: + <<: *variable_settings_features + short_name: co2s + preprocessor: preproc_extract_KUM + mip: Amon + exp: [esmHistorical, esmrcp85] + start_year: 1979 + end_year: 2019 + derive: true + tag: CO2 + additional_datasets: *datasets_co2 + prediction_input: + <<: *variable_settings_prediction_input + short_name: co2s + preprocessor: preproc_extract_KUM + mip: Amon + start_year: 1979 + end_year: 2019 + tag: CO2 + additional_datasets: + - {dataset: Scripps-CO2-KUM, project: OBS6, type: ground, version: 15-Nov-2019, tier: 2} + scripts: + plot_cycle: + <<: *plot_script + plot_xy: + pyplot_kwargs: + xlabel: 'Year' + ylabel: 'Atmospheric CO$_2$ concentration [ppm]' + ylim: [320, 460] + title: 'KUM (19.5°N, 154.8°W)' + + diag_co2_amplitude_sensitivity: + <<: *provenance + variables: + features: + <<: *variable_settings_features + short_name: co2s + preprocessor: preproc_extract_KUM_annual_amplitude + mip: Amon + exp: [esmHistorical, esmrcp85] + start_year: 1979 + end_year: 2019 + derive: true + additional_datasets: *datasets_co2 + features_ref: + <<: *variable_settings_features + short_name: co2s + preprocessor: preproc_extract_KUM_annual_mean + mip: Amon + exp: [esmHistorical, esmrcp85] + start_year: 1979 + end_year: 2019 + derive: true + ref: true + additional_datasets: *datasets_co2 + prediction_input: + <<: *variable_settings_prediction_input + short_name: co2s + preprocessor: preproc_extract_KUM_annual_amplitude + mip: Amon + start_year: 1979 + end_year: 2019 + additional_datasets: + - {dataset: Scripps-CO2-KUM, project: OBS6, type: ground, version: 15-Nov-2019, tier: 2} + prediction_input_ref: + <<: *variable_settings_prediction_input + short_name: co2s + preprocessor: preproc_extract_KUM_annual_mean + mip: Amon + start_year: 1979 + end_year: 2019 + ref: true + additional_datasets: + - {dataset: Scripps-CO2-KUM, project: OBS6, type: ground, version: 15-Nov-2019, tier: 2} + scripts: + preprocess: + script: mlr/preprocess.py + ref_calculation: trend + ref_kwargs: + matched_by: ['dataset'] + collapse_over: 'time' + output_attributes: + tag: CO2_AMP_SENS + convert_units_to: 1 + preprocess_for_plot: + script: mlr/preprocess.py + output_attributes: + tag: CO2_AMP_SENS + ref_calculation: merge + ref_kwargs: + matched_by: ['dataset'] + plot_co2_amplitudes: + <<: *plot_script + ancestors: ['diag_co2_amplitude_sensitivity/preprocess_for_plot'] + plot_xy: + reg_line: true + pyplot_kwargs: + xlabel: 'Atmospheric CO$_2$ concentration [ppm]' + ylabel: 'Amplitude of seasonal CO$_2$ cycle [ppm]' + ylim: [0, 20] + title: 'KUM (19.5°N, 154.8°W)' + x_coord: mole_fraction_of_carbon_dioxide_in_air + + diag_gpp_fraction_mean: + <<: *provenance + variables: + ref: + project: CMIP5 + short_name: gpp + preprocessor: preproc_total_mean_flux_var + mip: Lmon + exp: esmHistorical + ensemble: r1i1p1 + start_year: 1991 + end_year: 2000 + ref: true + additional_datasets: *datasets + label: + project: CMIP5 + short_name: gpp + preprocessor: preproc_total_mean_flux_var + mip: Lmon + exp: esmrcp85 + ensemble: r1i1p1 + start_year: 2091 + end_year: 2100 + var_type: label + additional_datasets: *datasets + scripts: + preprocess_fraction: + script: mlr/preprocess.py + convert_units_to: '1' + output_attributes: + tag: GPP_change + ref_calculation: divide + ref_kwargs: + matched_by: ['dataset'] + preprocess: + script: mlr/preprocess.py + ancestors: ['diag_gpp_fraction_mean/preprocess_fraction'] + convert_units_to: '%' + scalar_operations: + subtract: 1.0 + + diag_gpp_2100: + <<: *provenance + variables: + label: + <<: *variable_settings_gpp_2100 + preprocessor: preproc_map_flux_var + scripts: + preprocess: + script: mlr/preprocess.py + ancestors: ['diag_gpp_2100/label'] + output_attributes: + tag: GPP + var_type: label + convert_units_to: g m-2 yr-1 + preprocess_for_rescaling: + script: mlr/preprocess.py + ancestors: ['diag_gpp_2100/preprocess'] + output_attributes: + var_type: label_to_rescale + + diag_rescale_gpp_fraction_with_emergent_constraint: + <<: *provenance + scripts: + rescale: + script: mlr/rescale_with_emergent_constraint.py + ancestors: [ + 'diag_co2_amplitude_sensitivity/preprocess', + 'diag_gpp_fraction_mean/preprocess', + 'diag_gpp_2100/preprocess_for_rescaling', + ] + legend_kwargs: *legend_kwargs + plot_emergent_relationship: + pyplot_kwargs: + xlim: [-0.01, 0.065] + ylim: [5, 75] + xlabel: 'Sensitivity of CO$_2$ amplitude at KUM [ppm ppm$^{-1}$]' + ylabel: 'GPP change (2100 vs. 2000) [%]' + title: 'RCP 8.5 scenario' + plot_kwargs_for_groups: *additional_plot_kwargs + seaborn_settings: *seaborn_settings + + # Process historical GPP + + diag_process_mte: + <<: *provenance + description: Process MTE dataset. + variables: + ref: + <<: *variable_settings_prediction_input + short_name: gpp + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1991 + end_year: 2000 + var_type: prediction_input + additional_datasets: + - {dataset: MTE, project: OBS, type: reanaly, version: May12, tier: 3} + mean: + <<: *variable_settings_prediction_input + short_name: gpp + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1991 + end_year: 2000 + var_type: prediction_output + additional_datasets: + - {dataset: MTE, project: OBS, type: reanaly, version: May12, tier: 3} + error: + <<: *variable_settings_prediction_input + short_name: gppStderr + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1991 + end_year: 2000 + var_type: prediction_output_error + additional_datasets: + - {dataset: MTE, project: OBS, type: reanaly, version: May12, tier: 3} + scripts: + preprocess: + <<: *preprocess_mean_script + output_attributes: + tag: GPP + convert_units_to: g m-2 yr-1 + postprocess: + <<: *postprocess_script + ancestors: ['diag_process_mte/preprocess'] + + diag_process_historical_gpp: + <<: *provenance + description: Process historical GPP predicted by CMIP5 models. + variables: + ref: + <<: *variable_settings_prediction_input + short_name: gpp + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1991 + end_year: 2000 + var_type: prediction_input + additional_datasets: + - {dataset: MTE, project: OBS, type: reanaly, version: May12, tier: 3} + mean: + <<: *variable_settings_features + short_name: gpp + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1991 + end_year: 2000 + var_type: label + scripts: + preprocess: + <<: *preprocess_mean_script + output_attributes: + tag: GPP + convert_units_to: g m-2 yr-1 + mmm: + <<: *mmm_script + ancestors: ['diag_process_historical_gpp/preprocess'] + mlr_model_name: MMM_hist + postprocess: + <<: *postprocess_script + ancestors: [ + 'diag_process_historical_gpp/preprocess', + 'diag_process_historical_gpp/mmm', + ] + + # Label (= target variable) + + diag_mlr_label_gpp_rescaled: + <<: *provenance + scripts: + preprocess: + script: mlr/preprocess.py + ancestors: ['diag_rescale_gpp_fraction_with_emergent_constraint/rescale'] + convert_units_to: g m-2 yr-1 + ignore: + - {var_type: null} + output_attributes: + tag: GPP + preprocess_error_mmm_rescaled: + <<: *preprocess_error_script + output_attributes: + mlr_model_name: rMMM + preprocess_error_gbrt: + <<: *preprocess_error_script + output_attributes: + mlr_model_name: GBRT + preprocess_error_gbrt_1d: + <<: *preprocess_error_script + output_attributes: + mlr_model_name: 'GBRT-1D' + preprocess_error_lasso: + <<: *preprocess_error_script + output_attributes: + mlr_model_name: LASSO + preprocess_error_lasso_1d: + <<: *preprocess_error_script + output_attributes: + mlr_model_name: 'LASSO-1D' + + # Features (= predicctors) + + diag_mlr_feature_gpp_mean: + <<: *provenance + variables: + features: + <<: *variable_settings_features + short_name: gpp + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1991 + end_year: 2000 + prediction_input: + <<: *variable_settings_prediction_input + short_name: gpp + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1991 + end_year: 2000 + additional_datasets: + - {dataset: MTE, project: OBS, type: reanaly, version: May12, tier: 3} + prediction_input_error: + <<: *variable_settings_prediction_input + short_name: gppStderr + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1991 + end_year: 2000 + var_type: prediction_input_error + stderr: true + additional_datasets: + - {dataset: MTE, project: OBS, type: reanaly, version: May12, tier: 3} + scripts: + preprocess_mean: + <<: *preprocess_mean_script + output_attributes: + tag: GPP + convert_units_to: g m-2 yr-1 + + diag_mlr_feature_lai_mean: + <<: *provenance + variables: + features: + <<: *variable_settings_features + short_name: lai + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1982 + end_year: 2005 + prediction_input: + <<: *variable_settings_prediction_input + short_name: lai + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1982 + end_year: 2005 + additional_datasets: + - {dataset: LAI3g, project: OBS, type: reanaly, version: 1_regridded, tier: 3} + scripts: + preprocess_mean: + <<: *preprocess_mean_script + output_attributes: + tag: LAI + + diag_mlr_feature_pr_mean: + <<: *provenance + variables: + features: + <<: *variable_settings_features + short_name: pr + preprocessor: preproc_map + mip: Amon + start_year: 1901 + end_year: 2005 + prediction_input: + <<: *variable_settings_prediction_input + short_name: pr + preprocessor: preproc_map + mip: Amon + start_year: 1901 + end_year: 2005 + additional_datasets: + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + scripts: + preprocess_mean: + <<: *preprocess_mean_script + output_attributes: + tag: PR + convert_units_to: kg m-2 day-1 + + diag_mlr_feature_rsds_mean: + <<: *provenance + variables: + features: + <<: *variable_settings_features + short_name: rsds + preprocessor: preproc_map + mip: Amon + start_year: 1979 + end_year: 2005 + prediction_input: + <<: *variable_settings_prediction_input + short_name: rsds + preprocessor: preproc_map + mip: Amon + start_year: 1979 + end_year: 2005 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + preprocess_mean: + <<: *preprocess_mean_script + output_attributes: + tag: RSDS + + diag_mlr_feature_tas_mean: + <<: *provenance + variables: + features: + <<: *variable_settings_features + short_name: tas + preprocessor: preproc_map + mip: Amon + start_year: 1901 + end_year: 2005 + prediction_input: + <<: *variable_settings_prediction_input + short_name: tas + preprocessor: preproc_map + mip: Amon + start_year: 1901 + end_year: 2005 + additional_datasets: + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + scripts: + preprocess_mean: + <<: *preprocess_mean_script + output_attributes: + tag: T + convert_units_to: celsius + + # MLR models (Step 2a in paper) + + diag_mmm: + <<: *provenance + description: Use simple multi-model mean to predict target variable. + scripts: + mlr: + <<: *mmm_script + ancestors: ['diag_gpp_2100/preprocess'] + mlr_model_name: MMM + postprocess: + <<: *postprocess_script + ancestors: [ + 'diag_mmm/mlr', + 'diag_mte_for_cov_estimation/preprocess', + ] + + diag_mmm_pseudo_reality: + <<: *provenance + description: Validate MMM model using pseudo-reality approach. + scripts: + mlr_pseudo_reality: + <<: *mmm_script + ancestors: ['diag_gpp_2100/preprocess'] + mlr_model_name: MMM + pseudo_reality: ['dataset'] + + diag_mmm_rescaled: + <<: *provenance + description: Use simple multi-model mean to predict target variable from rescaled data. + scripts: + mlr: + <<: *mmm_script + ancestors: ['diag_mlr_label_gpp_rescaled/preprocess'] + mlr_model_name: rMMM + postprocess: + <<: *postprocess_script + ancestors: [ + 'diag_mmm_rescaled/mlr', + 'diag_mte_for_cov_estimation/preprocess', + 'diag_mlr_label_gpp_rescaled/preprocess_error_mmm_rescaled', + ] + + diag_mmm_rescaled_pseudo_reality: + <<: *provenance + description: Validate rescaled MMM model using pseudo-reality approach. + scripts: + mlr_pseudo_reality: + <<: *mmm_script + ancestors: ['diag_mlr_label_gpp_rescaled/preprocess'] + mlr_model_name: rMMM + pseudo_reality: ['dataset'] + + diag_gbrt: + <<: *provenance + description: GBRT model. + scripts: + mlr: + <<: *gbrt_script + ancestors: [ + 'diag_mlr_label_gpp_rescaled/preprocess', + 'diag_mlr_feature_*/preprocess*', + ] + mlr_model_name: GBRT + postprocess: + <<: *postprocess_script + ancestors: [ + 'diag_gbrt/mlr', + 'diag_mte_for_cov_estimation/preprocess', + 'diag_mlr_label_gpp_rescaled/preprocess_error_gbrt', + ] + + diag_gbrt_pseudo_reality: + <<: *provenance + description: Validate GBRT model using pseudo-reality approach. + scripts: + mlr_pseudo_reality: + <<: *gbrt_pseudo_reality_script + ancestors: [ + 'diag_mlr_label_gpp_rescaled/preprocess', + 'diag_mlr_feature_*/preprocess*', + ] + mlr_model_name: GBRT + + diag_gbrt_1d: + <<: *provenance + description: GBRT model with single predictor. + scripts: + mlr: + <<: *gbrt_script + ancestors: [ + 'diag_mlr_label_gpp_rescaled/preprocess', + 'diag_mlr_feature_gpp_mean/preprocess*', + ] + mlr_model_name: 'GBRT-1D' + postprocess: + <<: *postprocess_script + ancestors: [ + 'diag_gbrt_1d/mlr', + 'diag_mte_for_cov_estimation/preprocess', + 'diag_mlr_label_gpp_rescaled/preprocess_error_gbrt_1d', + ] + + diag_gbrt_1d_pseudo_reality: + <<: *provenance + description: Validate GBRT model with single predictor using pseudo-reality approach. + scripts: + mlr_pseudo_reality: + <<: *gbrt_pseudo_reality_script + ancestors: [ + 'diag_mlr_label_gpp_rescaled/preprocess', + 'diag_mlr_feature_gpp_mean/preprocess*', + ] + mlr_model_name: 'GBRT-1D' + + diag_lasso: + <<: *provenance + description: Use LASSO model to predict target variable. + scripts: + mlr: + <<: *lasso_script + ancestors: [ + 'diag_mlr_label_gpp_rescaled/preprocess', + 'diag_mlr_feature_*/preprocess*', + ] + mlr_model_name: LASSO + postprocess: + <<: *postprocess_script + ancestors: [ + 'diag_lasso/mlr', + 'diag_mte_for_cov_estimation/preprocess', + 'diag_mlr_label_gpp_rescaled/preprocess_error_lasso', + ] + + diag_lasso_pseudo_reality: + <<: *provenance + description: Validate LASSO model using pseudo-reality approach. + scripts: + mlr_pseudo_reality: + <<: *lasso_pseudo_reality_script + ancestors: [ + 'diag_mlr_label_gpp_rescaled/preprocess', + 'diag_mlr_feature_*/preprocess*', + ] + mlr_model_name: LASSO + + diag_lasso_1d: + <<: *provenance + description: LASSO model with single predictor. + scripts: + mlr: + <<: *lasso_script + ancestors: [ + 'diag_mlr_label_gpp_rescaled/preprocess', + 'diag_mlr_feature_gpp_mean/preprocess*', + ] + mlr_model_name: 'LASSO-1D' + postprocess: + <<: *postprocess_script + ancestors: [ + 'diag_lasso_1d/mlr', + 'diag_mte_for_cov_estimation/preprocess', + 'diag_mlr_label_gpp_rescaled/preprocess_error_lasso_1d', + ] + + diag_lasso_1d_pseudo_reality: + <<: *provenance + description: Validate LASSO model with single predictor using pseudo-reality approach. + scripts: + mlr_pseudo_reality: + <<: *lasso_pseudo_reality_script + ancestors: [ + 'diag_mlr_label_gpp_rescaled/preprocess', + 'diag_mlr_feature_gpp_mean/preprocess*', + ] + mlr_model_name: 'LASSO-1D' + + # Plots + + diag_plot_cmip5_gbrt: + <<: *provenance + description: Create CMIP5 vs. GBRT plots. + scripts: + preprocess_plot: + script: mlr/preprocess.py + ancestors: [ + 'diag_gbrt/mlr', + 'diag_mmm/mlr', + 'diag_mmm_rescaled/mlr', + 'diag_process_historical_gpp/mmm', + 'diag_process_mte/preprocess', + ] + collapse: + mean: ['month_number'] + ignore: + - {var_type: null} + plot: &plot_script_map_plots + <<: *plot_script + ancestors: ['diag_plot_cmip5_gbrt/preprocess_plot'] + aliases: + prediction_output: MTE + prediction_output___GBRT: GBRT + prediction_output___GBRT-1D: GBRT-1D + prediction_output___LASSO: LASSO + prediction_output___LASSO-1D: LASSO-1D + prediction_output___MMM: MMM + prediction_output___MMM_hist: MMM(hist) + apply_common_mask: true + group_by_attribute: mlr_model_name + ignore: + - {var_type: prediction_input} + - {var_type: prediction_input_error} + - {var_type: prediction_output_error} + - {var_type: prediction_output_misc} + - {var_type: null} + plot_map: + plot_kwargs: + cbar_label: 'GPP [gC m$^{-2}$ yr$^{-1}$]' + cbar_ticks: [0, 1000, 2000, 3000, 4000, 5000, 6000] + vmin: 0 + vmax: 6000 + plot_map_abs_biases: + plot_kwargs: + cbar_label: 'ΔGPP [gC m$^{-2}$ yr$^{-1}$]' + cbar_ticks: [-2500, -2000, -1500, -1000, -500, 0, 500, 1000, 1500, 2000, 2500] + vmin: -2500 + vmax: 2500 + plot_map_rel_biases: + plot_kwargs: + cbar_label: 'Relative change in GPP [1]' + cbar_ticks: [-1.0, -0.5, 0.0, 0.5, 1.0] + vmin: -1.0 + vmax: 1.0 + print_corr: true + + diag_plot_mlr_models: + <<: *provenance + description: Create plots for the different MLR models. + scripts: + preprocess_plot: + script: mlr/preprocess.py + ancestors: [ + 'diag_mmm/mlr', + 'diag_mmm_rescaled/mlr', + 'diag_mlr_label_gpp_rescaled/preprocess_error_mmm_rescaled', + 'diag_lasso_1d/mlr', + 'diag_mlr_label_gpp_rescaled/preprocess_error_lasso_1d', + 'diag_lasso/mlr', + 'diag_mlr_label_gpp_rescaled/preprocess_error_lasso', + 'diag_gbrt_1d/mlr', + 'diag_mlr_label_gpp_rescaled/preprocess_error_gbrt_1d', + 'diag_gbrt/mlr', + 'diag_mlr_label_gpp_rescaled/preprocess_error_gbrt', + ] + collapse: + mean: ['month_number'] + ignore: + - {var_type: null} + plot: + <<: *plot_script_map_plots + ancestors: ['diag_plot_mlr_models/preprocess_plot'] + plot_map_abs_biases: + plot_kwargs: + cbar_label: 'ΔGPP [gC m$^{-2}$ yr$^{-1}$]' + cbar_ticks: [-800, -600, -400, -200, 0, 200, 400, 600, 800] + vmin: -800 + vmax: 800 + plot_map_ratios: + plot_kwargs: + cbar_label: 'GPP ratio [1]' + cbar_ticks: [0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3] + cmap: Reds + vmin: 0.5 + vmax: 1.3 + plot_errors: + <<: *plot_script_map_plots + ancestors: ['diag_plot_mlr_models/preprocess_plot'] + ignore: + - {var_type: prediction_input} + - {var_type: prediction_input_error} + - {var_type: prediction_output} + - {var_type: prediction_output_misc} + - {var_type: null} + plot_map: + plot_kwargs: + cbar_label: 'GPP [gC m$^{-2}$ yr$^{-1}$]' + cbar_ticks: [500, 600, 700, 800, 900, 1000, 1100, 1200] + vmin: 500 + vmax: 1200 + plot_map_abs_biases: + plot_kwargs: + cbar_label: 'ΔGPP [gC m$^{-2}$ yr$^{-1}$]' + cbar_ticks: [-500, -400, -300, -200, -100, 0, 100, 200, 300, 400, 500] + vmin: -500 + vmax: 500 + + diag_evaluate_residuals: + <<: *provenance + description: Evaluate residuals of different statistical models. + scripts: + evaluate_residuals: + script: mlr/evaluate_residuals.py + ancestors: [ + 'diag_mmm_pseudo_reality/mlr_pseudo_reality', + 'diag_mmm_rescaled_pseudo_reality/mlr_pseudo_reality', + 'diag_lasso_1d_pseudo_reality/mlr_pseudo_reality', + 'diag_lasso_pseudo_reality/mlr_pseudo_reality', + 'diag_gbrt_1d_pseudo_reality/mlr_pseudo_reality', + 'diag_gbrt_pseudo_reality/mlr_pseudo_reality', + ] + ignore: + - {var_type: null} + mse_plot: + pyplot_kwargs: + ylabel: 'MSEP [gC$^2$ m$^{-4}$ yr$^{-2}$]' + title: MSEP for different statistical models + rmse_plot: + pyplot_kwargs: + ylabel: 'RMSEP [gC m$^{-2}$ yr$^{-1}$]' + title: RMSEP for different statistical models + seaborn_settings: + <<: *seaborn_settings + rc: + xtick.top: false + ytick.right: true + xtick.minor.visible: false + ytick.minor.visible: true + weighted_samples: *weighted_samples diff --git a/esmvaltool/recipes/schlund20jgr/recipe_schlund20jgr_gpp_change_1pct.yml b/esmvaltool/recipes/schlund20jgr/recipe_schlund20jgr_gpp_change_1pct.yml new file mode 100644 index 0000000000..8eda376e80 --- /dev/null +++ b/esmvaltool/recipes/schlund20jgr/recipe_schlund20jgr_gpp_change_1pct.yml @@ -0,0 +1,383 @@ +# ESMValTool +# recipe_schlund20jgr_gpp_change_1pct.yml +--- +documentation: + title: > + Constraining uncertainty in projected gross primary production (GPP) with + machine learning + + description: > + Use the emergent constraint of Wenzel et al. (2016) to constrain global + mean gross primary production (GPP) in the CMIP5 biogeochemically coupled + 1%CO2 run. + + authors: + - schlund_manuel + + maintainer: + - schlund_manuel + + references: + - schlund20jgr + - wenzel16nat + + projects: + - 4c + - crescendo + - usmile + + +# YAML anchors + +PROVENANCE_ANCHOR: &provenance + realms: + - land + themes: + - EC + - carbon + +DATASET_ANCHOR_CO2: &datasets_co2 + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2M, start_year: 1861} + - {dataset: HadGEM2-ES, end_year: 2004} # missing on ESGF for co2 of esmHistorical + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: NorESM1-ME} + +DATASET_ANCHOR_ESMCONTROL: &datasets_esmControl + - {dataset: CanESM2, start_year: 1860, end_year: 1869} + - {dataset: CESM1-BGC, start_year: 11, end_year: 20} + - {dataset: GFDL-ESM2M, start_year: 11, end_year: 20} + - {dataset: HadGEM2-ES, start_year: 1860, end_year: 1869} + - {dataset: MIROC-ESM, start_year: 11, end_year: 20} + - {dataset: MPI-ESM-LR, start_year: 1860, end_year: 1869} + - {dataset: NorESM1-ME, start_year: 11, end_year: 20} + +DATASET_ANCHOR_1PCTBGC: &datasets_1pctBGC + - {dataset: CanESM2, start_year: 1916, end_year: 1925} + - {dataset: CESM1-BGC, start_year: 67, end_year: 76} + - {dataset: GFDL-ESM2M, start_year: 67, end_year: 76} + - {dataset: HadGEM2-ES, start_year: 1916, end_year: 1925} + - {dataset: MIROC-ESM, start_year: 67, end_year: 76} # missing on ESGF for gpp of esmFixClim1 + - {dataset: MPI-ESM-LR, start_year: 1916, end_year: 1925} + - {dataset: NorESM1-ME, start_year: 67, end_year: 76} + +ADDITIONAL_PLOT_KWARGS: &additional_plot_kwargs + CanESM2: {color: C0, alpha: 0.5} + CESM1-BGC: {color: C1, alpha: 0.5} + GFDL-ESM2M: {color: C2, alpha: 0.5} + HadGEM2-ES: {color: C3, alpha: 0.5} + MIROC-ESM: {color: C4, alpha: 0.5} + MPI-ESM-LR: {color: C5, alpha: 0.5} + NorESM1-ME: {color: C6, alpha: 0.5} + CRU: {color: k, alpha: 1.0, linewidth: 2} + CT2019: {color: k, alpha: 1.0, linewidth: 2} + ERA-Interim: {color: k, alpha: 1.0, linewidth: 2} + LAI3g: {color: k, alpha: 1.0, linewidth: 2} + MTE: {color: k, alpha: 1.0, linewidth: 2} + Scripps-CO2-KUM: {color: k, alpha: 1.0, linewidth: 2} + +LEGEND_KWARGS: &legend_kwargs + bbox_to_anchor: [1.05, 0.5] + borderaxespad: 0.0 + loc: center left + +SEABORN_SETTINGS: &seaborn_settings + style: ticks + rc: + axes.titlepad: 15.0 + xtick.top: true + ytick.right: true + xtick.minor.visible: true + ytick.minor.visible: true + +WEIGHTED_SAMPLES: &weighted_samples + area_weighted: true + time_weighted: false + +VARIABLE_SETTINGS_FEATURE_CO2: &variable_settings_feature_co2 + project: CMIP5 + short_name: co2s + mip: Amon + exp: [esmHistorical, esmrcp85] + start_year: 1860 + end_year: 2005 + ensemble: r1i1p1 + derive: true + var_type: feature + additional_datasets: *datasets_co2 + +VARIABLE_SETTINGS_PREDICTION_INPUT_CO2: &variable_settings_prediction_input_co2 + short_name: co2s + mip: Amon + start_year: 1979 + end_year: 2019 + prediction_name: OBS + var_type: prediction_input + additional_datasets: + - {dataset: Scripps-CO2-KUM, project: OBS6, type: ground, version: 15-Nov-2019, tier: 2} + +VARIABLE_SETTINGS_GPP_FRACTION: &variable_settings_gpp_fraction + project: CMIP5 + short_name: gpp + mip: Lmon + exp: esmFixClim1 + ensemble: r1i1p1 + var_type: label + additional_datasets: *datasets_1pctBGC + +VARIABLE_SETTINGS_GPP_FRACTION_REF: &variable_settings_gpp_fraction_ref + project: CMIP5 + short_name: gpp + mip: Lmon + exp: esmFixClim1 + ensemble: r1i1p1 + ref: true + additional_datasets: *datasets_esmControl + +PREPROCESS_FRACTION_SCRIPT: &preprocess_fraction_script + script: mlr/preprocess.py + convert_units_to: '1' + output_attributes: + tag: GPP_change + ref_calculation: divide + ref_kwargs: + matched_by: ['dataset'] + +PREPROCESS_FRACTION_TO_PERCENTAGE_SCRIPT: &preprocess_fraction_to_percentage_script + script: mlr/preprocess.py + convert_units_to: '%' + scalar_operations: + divide: 0.9 + subtract: 1.0 + +PLOT_SCRIPT: &plot_script + script: mlr/plot.py + additional_plot_kwargs_xy_plots: *additional_plot_kwargs + group_by_attribute: dataset + ignore: + - {var_type: prediction_input_error} + legend_kwargs: *legend_kwargs + seaborn_settings: *seaborn_settings + + +preprocessors: + + preproc_extract_KUM: + custom_order: true + extract_point: &extract_point + latitude: 19.5 + longitude: 205.2 + scheme: nearest + + preproc_extract_KUM_annual_amplitude: + custom_order: true + extract_point: *extract_point + amplitude: + coords: year + + preproc_extract_KUM_annual_mean: + custom_order: true + extract_point: *extract_point + annual_statistics: + operator: mean + + preproc_total_mean_flux_var: + custom_order: true + weighting_landsea_fraction: &weighting_landsea_fraction + area_type: land + exclude: ['BNU-ESM', 'LAI3g', 'MTE'] + mask_landsea: &mask_sea + mask_out: sea + extract_region: + start_latitude: -60.0 + end_latitude: 90.0 + start_longitude: 0.0 + end_longitude: 360 + area_statistics: + operator: mean + climate_statistics: &climate_statistics + period: full + operator: mean + + preproc_map_flux_var: + custom_order: true + weighting_landsea_fraction: *weighting_landsea_fraction + regrid: + scheme: nearest + target_grid: 2x2 + mask_landsea: *mask_sea + extract_region: + start_latitude: -60.0 + end_latitude: 75.0 + start_longitude: 0.0 + end_longitude: 360 + climate_statistics: *climate_statistics + + +diagnostics: + + diag_co2_cycle: + <<: *provenance + variables: + features: + <<: *variable_settings_feature_co2 + preprocessor: preproc_extract_KUM + tag: CO2 + prediction_input: + <<: *variable_settings_prediction_input_co2 + preprocessor: preproc_extract_KUM + tag: CO2 + scripts: + plot_cycle: + <<: *plot_script + plot_xy: + pyplot_kwargs: + xlabel: 'Year' + ylabel: 'Atmospheric CO$_2$ concentration [ppm]' + ylim: [270, 430] + title: 'KUM (19.5°N, 154.8°W)' + additional_plot_kwargs_xy_plots: + CanESM2: {color: C0, alpha: 0.5, linewidth: 0.5} + CESM1-BGC: {color: C1, alpha: 0.5, linewidth: 0.5} + GFDL-ESM2M: {color: C2, alpha: 0.5, linewidth: 0.5} + HadGEM2-ES: {color: C3, alpha: 0.5, linewidth: 0.5} + MIROC-ESM: {color: C4, alpha: 0.5, linewidth: 0.5} + MPI-ESM-LR: {color: C5, alpha: 0.5, linewidth: 0.5} + NorESM1-ME: {color: C6, alpha: 0.5, linewidth: 0.5} + Scripps-CO2-KUM: {color: k, alpha: 1.0, linewidth: 1} + + diag_co2_amplitude_sensitivity: + <<: *provenance + variables: + features: + <<: *variable_settings_feature_co2 + preprocessor: preproc_extract_KUM_annual_amplitude + features_ref: + <<: *variable_settings_feature_co2 + preprocessor: preproc_extract_KUM_annual_mean + ref: true + prediction_input: + <<: *variable_settings_prediction_input_co2 + preprocessor: preproc_extract_KUM_annual_amplitude + prediction_input_ref: + <<: *variable_settings_prediction_input_co2 + preprocessor: preproc_extract_KUM_annual_mean + ref: true + scripts: + preprocess: + script: mlr/preprocess.py + ref_calculation: trend + ref_kwargs: + matched_by: ['dataset'] + collapse_over: 'time' + output_attributes: + tag: CO2_AMP_SENS + convert_units_to: 1 + preprocess_for_plot: + script: mlr/preprocess.py + output_attributes: + tag: CO2_AMP_SENS + ref_calculation: merge + ref_kwargs: + matched_by: ['dataset'] + plot_co2_amplitudes: + <<: *plot_script + ancestors: ['diag_co2_amplitude_sensitivity/preprocess_for_plot'] + plot_xy: + reg_line: true + pyplot_kwargs: + xlabel: 'Atmospheric CO$_2$ concentration [ppm]' + ylabel: 'Amplitude of seasonal CO$_2$ cycle [ppm]' + ylim: [0, 20] + title: 'KUM (19.5°N, 154.8°W)' + x_coord: mole_fraction_of_carbon_dioxide_in_air + + diag_gpp_fraction_mean: + <<: *provenance + variables: + ref: + <<: *variable_settings_gpp_fraction_ref + preprocessor: preproc_total_mean_flux_var + label: + <<: *variable_settings_gpp_fraction + preprocessor: preproc_total_mean_flux_var + scripts: + preprocess_fraction: + <<: *preprocess_fraction_script + preprocess: + <<: *preprocess_fraction_to_percentage_script + ancestors: ['diag_gpp_fraction_mean/preprocess_fraction'] + + diag_gpp_fraction_map: + <<: *provenance + variables: + ref: + <<: *variable_settings_gpp_fraction_ref + preprocessor: preproc_map_flux_var + label: + <<: *variable_settings_gpp_fraction + preprocessor: preproc_map_flux_var + scripts: + preprocess_fraction: + <<: *preprocess_fraction_script + mask: + masked_less: + value: 6.342e-9 + preprocess: + <<: *preprocess_fraction_to_percentage_script + ancestors: ['diag_gpp_fraction_map/preprocess_fraction'] + output_attributes: + var_type: label + preprocess_for_rescaling: + <<: *preprocess_fraction_to_percentage_script + ancestors: ['diag_gpp_fraction_map/preprocess_fraction'] + output_attributes: + var_type: label_to_rescale + plot: + <<: *plot_script + ancestors: ['diag_gpp_fraction_map/preprocess'] + plot_map: + plot_kwargs: + cbar_center: 0 + cbar_label: 'GPP change (2xCO2 vs. 1xCO2) [%]' + cbar_ticks: [-50, 0, 50, 100, 150, 200, 250, 300] + cmap: bwr + vmin: -50 + vmax: 300 + + diag_rescale_gpp_fraction_with_emergent_constraint: + <<: *provenance + scripts: + rescale: + script: mlr/rescale_with_emergent_constraint.py + ancestors: [ + 'diag_co2_amplitude_sensitivity/preprocess', + 'diag_gpp_fraction_mean/preprocess', + 'diag_gpp_fraction_map/preprocess_for_rescaling', + ] + legend_kwargs: *legend_kwargs + plot_emergent_relationship: + pyplot_kwargs: + xlim: [-0.01, 0.065] + ylim: [5, 75] + xlabel: 'Sensitivity of CO$_2$ amplitude at KUM [ppm ppm$^{-1}$]' + ylabel: 'GPP change (doubling of CO$_2$) [%]' + title: 'Biogeochemically-coupled simulation with CO$_2$ increase of 1% per year' + plot_kwargs_for_groups: *additional_plot_kwargs + seaborn_settings: *seaborn_settings + + diag_mmm_mean: + <<: *provenance + description: Use simple multi-model mean to predict scalar target variable. + scripts: + mlr: + script: mlr/main.py + ancestors: ['diag_gpp_fraction_mean/preprocess'] + mlr_model_name: MMM + mlr_model_type: mmm + prediction_name: OBS + mmm_error_type: loo + seaborn_settings: *seaborn_settings + weighted_samples: *weighted_samples diff --git a/esmvaltool/recipes/schlund20jgr/recipe_schlund20jgr_gpp_change_rcp85.yml b/esmvaltool/recipes/schlund20jgr/recipe_schlund20jgr_gpp_change_rcp85.yml new file mode 100644 index 0000000000..f54b1ab8eb --- /dev/null +++ b/esmvaltool/recipes/schlund20jgr/recipe_schlund20jgr_gpp_change_rcp85.yml @@ -0,0 +1,923 @@ +# ESMValTool +# recipe_schlund20jgr_gpp_change_rcp85.yml +--- +documentation: + title: > + Constraining uncertainty in projected gross primary production (GPP) with + machine learning + + description: > + Use MLR (Machine Learning Regression) models to contstrain fractional + change in projected gross primary production (GPP) in the emission-driven + RCP 8.5 scenario. + + authors: + - schlund_manuel + + maintainer: + - schlund_manuel + + references: + - schlund20jgr + + projects: + - 4c + - crescendo + - usmile + +# YAML anchors + +PROVENANCE_ANCHOR: &provenance + realms: + - land + themes: + - ML + - EC + - carbon + +DATASET_ANCHOR: &datasets + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-ES} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: NorESM1-ME} + +DATASET_ANCHOR_CO2: &datasets_co2 + - {dataset: CanESM2} + - {dataset: CESM1-BGC} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-ES, end_year: 2004} # missing on ESGF for co2 of esmHistorical + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: NorESM1-ME} + +PSEUDO_REALITY: &pseudo_reality + grid_search_cv_param_grid: false + only_predict: true + pseudo_reality: ['dataset'] + save_lime_importance: false + save_mlr_model_error: false + save_propagated_errors: false + +SEABORN_SETTINGS: &seaborn_settings + style: ticks + rc: + axes.titlepad: 15.0 + xtick.top: true + ytick.right: true + xtick.minor.visible: true + ytick.minor.visible: true + +WEIGHTED_SAMPLES: &weighted_samples + area_weighted: true + time_weighted: false + +VARIABLE_SETTINGS_FEATURES: &variable_settings_features + project: CMIP5 + exp: esmHistorical + ensemble: r1i1p1 + var_type: feature + additional_datasets: *datasets + +VARIABLE_SETTINGS_PREDICTION_INPUT: &variable_settings_prediction_input + prediction_name: OBS + var_type: prediction_input + +VARIABLE_SETTINGS_GPP_FRACTION: &variable_settings_gpp_fraction + project: CMIP5 + short_name: gpp + mip: Lmon + exp: esmrcp85 + ensemble: r1i1p1 + start_year: 2091 + end_year: 2100 + var_type: label + additional_datasets: *datasets + +VARIABLE_SETTINGS_GPP_FRACTION_REF: &variable_settings_gpp_fraction_ref + project: CMIP5 + short_name: gpp + mip: Lmon + exp: esmHistorical + ensemble: r1i1p1 + start_year: 1991 + end_year: 2000 + ref: true + additional_datasets: *datasets + +PREPROCESS_MEAN_SCRIPT: &preprocess_mean_script + script: mlr/preprocess.py + +PREPROCESS_FRACTION_MEAN_SCRIPT: &preprocess_fraction_script + script: mlr/preprocess.py + convert_units_to: '1' + ref_calculation: divide + ref_kwargs: + matched_by: ['dataset'] + +PREPROCESS_FRACTION_TO_PERCENTAGE_SCRIPT: &preprocess_fraction_to_percentage_script + script: mlr/preprocess.py + convert_units_to: '%' + mask: # is executed prior to unit conversion and subtraction of 1 (4.0 == 300%) + masked_greater: + value: 4.0 + scalar_operations: + subtract: 1.0 + +PREPROCESS_ERROR_SCRIPT: &preprocess_error_script + script: mlr/preprocess.py + ancestors: ['diag_rescale_gpp_fraction_with_emergent_constraint/rescale'] + ignore: + - {var_type: label} + - {var_type: null} + +GBRT_SCRIPT: &gbrt_script + script: mlr/main.py + group_datasets_by_attributes: ['dataset'] + ignore: + - {var_type: null} + imputation_strategy: mean + mlr_model_type: gbr_sklearn + n_jobs: -1 + parameters_final_regressor: &gbrt_parameters + learning_rate: 0.1 + max_depth: 4 + n_estimators: 50 + subsample: 0.5 + random_state: 31415 + save_lime_importance: true + save_mlr_model_error: test + save_propagated_errors: true + seaborn_settings: *seaborn_settings + test_size: 0.25 + weighted_samples: *weighted_samples + +LASSO_SCRIPT: &lasso_script + script: mlr/main.py + group_datasets_by_attributes: ['dataset'] + ignore: + - {var_type: null} + imputation_strategy: mean + mlr_model_type: lasso_cv + n_jobs: -1 + random_state: 31415 + save_lime_importance: true + save_mlr_model_error: test + save_propagated_errors: true + seaborn_settings: *seaborn_settings + test_size: 0.25 + weighted_samples: *weighted_samples + +MMM_SCRIPT: &mmm_script + script: mlr/main.py + ignore: + - {var_type: null} + mlr_model_type: mmm + prediction_name: OBS + mmm_error_type: loo + seaborn_settings: *seaborn_settings + weighted_samples: *weighted_samples + +POSTPROCESS_SCRIPT: &postprocess_script + script: mlr/postprocess.py + convert_units_to: '1' + cov_estimate_dim_map: [1, 2] # latitude, longitude + ignore: + - {prediction_name: null} + - {var_type: null} + mean: ['latitude', 'longitude'] + +PLOT_SCRIPT: &plot_script + script: mlr/plot.py + additional_plot_kwargs_xy_plots: &additional_plot_kwargs + CanESM2: {color: C0, alpha: 0.5} + CESM1-BGC: {color: C1, alpha: 0.5} + GFDL-ESM2M: {color: C2, alpha: 0.5} + HadGEM2-ES: {color: C3, alpha: 0.5} + MIROC-ESM: {color: C4, alpha: 0.5} + MPI-ESM-LR: {color: C5, alpha: 0.5} + NorESM1-ME: {color: C6, alpha: 0.5} + CRU: {color: k, alpha: 1.0, linewidth: 2} + CT2019: {color: k, alpha: 1.0, linewidth: 2} + ERA-Interim: {color: k, alpha: 1.0, linewidth: 2} + LAI3g: {color: k, alpha: 1.0, linewidth: 2} + MTE: {color: k, alpha: 1.0, linewidth: 2} + Scripps-CO2-KUM: {color: k, alpha: 1.0, linewidth: 2} + group_by_attribute: dataset + ignore: + - {var_type: prediction_input_error} + - {var_type: null} + legend_kwargs: &legend_kwargs + bbox_to_anchor: [1.05, 0.5] + borderaxespad: 0.0 + loc: center left + seaborn_settings: *seaborn_settings + + +preprocessors: + + preproc_extract_KUM: + custom_order: true + extract_point: &extract_point + latitude: 19.5 + longitude: 205.2 + scheme: nearest + + preproc_extract_KUM_annual_amplitude: + custom_order: true + extract_point: *extract_point + amplitude: + coords: year + + preproc_extract_KUM_annual_mean: + custom_order: true + extract_point: *extract_point + annual_statistics: + operator: mean + + preproc_total_mean_flux_var: + custom_order: true + weighting_landsea_fraction: &weighting_landsea_fraction + area_type: land + exclude: ['BNU-ESM', 'LAI3g', 'MTE'] + extract_region: &extract_region_mean + start_latitude: -60.0 + end_latitude: 90.0 + start_longitude: 0.0 + end_longitude: 360 + area_statistics: &area_statistics + operator: mean + climate_statistics: &climate_statistics + period: full + operator: mean + + preproc_map: + custom_order: true + regrid: ®rid + scheme: nearest + target_grid: 2x2 + mask_landsea: &mask_sea_NE_mask + mask_out: sea + extract_region: &extract_region + start_latitude: -60.0 + end_latitude: 75.0 + start_longitude: 0.0 + end_longitude: 360 + climate_statistics: *climate_statistics + + preproc_map_flux_var: + custom_order: true + weighting_landsea_fraction: *weighting_landsea_fraction + regrid: *regrid + mask_landsea: *mask_sea_NE_mask + extract_region: *extract_region + climate_statistics: *climate_statistics + + preproc_no_mean_flux_var: + custom_order: true + weighting_landsea_fraction: *weighting_landsea_fraction + regrid: *regrid + mask_landsea: *mask_sea_NE_mask + extract_region: *extract_region + + +diagnostics: + + # Auxiliary dataset to estimate covariance + + diag_mte_for_cov_estimation: + <<: *provenance + description: Process MTE dataset which is used to estimate covariance structure. + variables: + cov_ref: + <<: *variable_settings_prediction_input + short_name: gpp + preprocessor: preproc_no_mean_flux_var + mip: Lmon + start_year: 1982 + end_year: 2011 + additional_datasets: + - {dataset: MTE, project: OBS, type: reanaly, version: May12, tier: 3} + scripts: + preprocess: + <<: *preprocess_mean_script + ancestors: ['diag_mte_for_cov_estimation/cov_ref'] + output_attributes: + tag: GPP_change + convert_units_to: g m-2 yr-1 + + # Use emergent constraint to rescale target variable (Step 1 in paper) + + diag_co2_cycle: + <<: *provenance + variables: + features: + <<: *variable_settings_features + short_name: co2s + preprocessor: preproc_extract_KUM + mip: Amon + exp: [esmHistorical, esmrcp85] + start_year: 1979 + end_year: 2019 + derive: true + tag: CO2 + additional_datasets: *datasets_co2 + prediction_input: + <<: *variable_settings_prediction_input + short_name: co2s + preprocessor: preproc_extract_KUM + mip: Amon + start_year: 1979 + end_year: 2019 + tag: CO2 + additional_datasets: + - {dataset: Scripps-CO2-KUM, project: OBS6, type: ground, version: 15-Nov-2019, tier: 2} + scripts: + plot_cycle: + <<: *plot_script + plot_xy: + pyplot_kwargs: + xlabel: 'Year' + ylabel: 'Atmospheric CO$_2$ concentration [ppm]' + ylim: [320, 460] + title: 'KUM (19.5°N, 154.8°W)' + + diag_co2_amplitude_sensitivity: + <<: *provenance + variables: + features: + <<: *variable_settings_features + short_name: co2s + preprocessor: preproc_extract_KUM_annual_amplitude + mip: Amon + exp: [esmHistorical, esmrcp85] + start_year: 1979 + end_year: 2019 + derive: true + additional_datasets: *datasets_co2 + features_ref: + <<: *variable_settings_features + short_name: co2s + preprocessor: preproc_extract_KUM_annual_mean + mip: Amon + exp: [esmHistorical, esmrcp85] + start_year: 1979 + end_year: 2019 + derive: true + ref: true + additional_datasets: *datasets_co2 + prediction_input: + <<: *variable_settings_prediction_input + short_name: co2s + preprocessor: preproc_extract_KUM_annual_amplitude + mip: Amon + start_year: 1979 + end_year: 2019 + additional_datasets: + - {dataset: Scripps-CO2-KUM, project: OBS6, type: ground, version: 15-Nov-2019, tier: 2} + prediction_input_ref: + <<: *variable_settings_prediction_input + short_name: co2s + preprocessor: preproc_extract_KUM_annual_mean + mip: Amon + start_year: 1979 + end_year: 2019 + ref: true + additional_datasets: + - {dataset: Scripps-CO2-KUM, project: OBS6, type: ground, version: 15-Nov-2019, tier: 2} + scripts: + preprocess: + script: mlr/preprocess.py + ref_calculation: trend + ref_kwargs: + matched_by: ['dataset'] + collapse_over: 'time' + output_attributes: + tag: CO2_AMP_SENS + convert_units_to: 1 + preprocess_for_plot: + script: mlr/preprocess.py + output_attributes: + tag: CO2_AMP_SENS + ref_calculation: merge + ref_kwargs: + matched_by: ['dataset'] + plot_co2_amplitudes: + <<: *plot_script + ancestors: ['diag_co2_amplitude_sensitivity/preprocess_for_plot'] + plot_xy: + reg_line: true + pyplot_kwargs: + xlabel: 'Atmospheric CO$_2$ concentration [ppm]' + ylabel: 'Amplitude of seasonal CO$_2$ cycle [ppm]' + ylim: [0, 20] + title: 'KUM (19.5°N, 154.8°W)' + x_coord: mole_fraction_of_carbon_dioxide_in_air + + diag_gpp_fraction_mean: + <<: *provenance + variables: + ref: + <<: *variable_settings_gpp_fraction_ref + preprocessor: preproc_total_mean_flux_var + label: + <<: *variable_settings_gpp_fraction + preprocessor: preproc_total_mean_flux_var + scripts: + preprocess_fraction: + <<: *preprocess_fraction_script + output_attributes: + tag: GPP_change + preprocess: + <<: *preprocess_fraction_to_percentage_script + ancestors: ['diag_gpp_fraction_mean/preprocess_fraction'] + + diag_gpp_fraction_map: + <<: *provenance + variables: + ref: + <<: *variable_settings_gpp_fraction_ref + preprocessor: preproc_map_flux_var + label: + <<: *variable_settings_gpp_fraction + preprocessor: preproc_map_flux_var + scripts: + preprocess_fraction: + <<: *preprocess_fraction_script + mask: + masked_less: + value: 1.0e-15 + output_attributes: + tag: GPP_change + preprocess: + <<: *preprocess_fraction_to_percentage_script + ancestors: ['diag_gpp_fraction_map/preprocess_fraction'] + output_attributes: + var_type: label + preprocess_for_rescaling: + <<: *preprocess_fraction_to_percentage_script + ancestors: ['diag_gpp_fraction_map/preprocess_fraction'] + output_attributes: + var_type: label_to_rescale + plot: + <<: *plot_script + ancestors: ['diag_gpp_fraction_map/preprocess'] + plot_map: + plot_kwargs: + cbar_center: 0 + cbar_label: 'GPP change (2100 vs. 2000) [%]' + cbar_ticks: [-50, 0, 50, 100, 150, 200, 250, 300] + cmap: bwr + vmin: -50 + vmax: 300 + + diag_rescale_gpp_fraction_with_emergent_constraint: + <<: *provenance + scripts: + rescale: + script: mlr/rescale_with_emergent_constraint.py + ancestors: [ + 'diag_co2_amplitude_sensitivity/preprocess', + 'diag_gpp_fraction_mean/preprocess', + 'diag_gpp_fraction_map/preprocess_for_rescaling', + ] + legend_kwargs: *legend_kwargs + plot_emergent_relationship: + pyplot_kwargs: + xlim: [-0.01, 0.065] + ylim: [5, 75] + xlabel: 'Sensitivity of CO$_2$ amplitude at KUM [ppm ppm$^{-1}$]' + ylabel: 'GPP change (2100 vs. 2000) [%]' + title: 'RCP 8.5 scenario' + plot_kwargs_for_groups: *additional_plot_kwargs + seaborn_settings: *seaborn_settings + plot: + <<: *plot_script + ancestors: ['diag_rescale_gpp_fraction_with_emergent_constraint/rescale'] + ignore: + - {var_type: prediction_output_error} + - {var_type: null} + plot_map: + plot_kwargs: + cbar_center: 0 + cbar_label: 'GPP change (2100 vs. 2000) [%]' + cbar_ticks: [-50, 0, 50, 100, 150, 200, 250, 300] + cmap: bwr + vmin: -50 + vmax: 300 + preprocess_error_mmm_rescaled: + <<: *preprocess_error_script + output_attributes: + mlr_model_name: rMMM + prediction_name: OBS + preprocess_error_lasso: + <<: *preprocess_error_script + output_attributes: + mlr_model_name: LASSO + prediction_name: OBS + preprocess_error_gbrt: + <<: *preprocess_error_script + output_attributes: + mlr_model_name: GBRT + prediction_name: OBS + + # Features (= predicctors) + + diag_mlr_feature_gpp_mean: + <<: *provenance + variables: + features: + <<: *variable_settings_features + short_name: gpp + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1982 + end_year: 2005 + prediction_input: + <<: *variable_settings_prediction_input + short_name: gpp + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1982 + end_year: 2005 + additional_datasets: + - {dataset: MTE, project: OBS, type: reanaly, version: May12, tier: 3} + prediction_input_error: + <<: *variable_settings_prediction_input + short_name: gppStderr + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1982 + end_year: 2005 + var_type: prediction_input_error + stderr: true + additional_datasets: + - {dataset: MTE, project: OBS, type: reanaly, version: May12, tier: 3} + scripts: + preprocess_mean: + <<: *preprocess_mean_script + output_attributes: + tag: GPP + convert_units_to: g m-2 yr-1 + plot: + <<: *plot_script + ancestors: ['diag_mlr_feature_gpp_mean/preprocess_mean'] + plot_map: + plot_kwargs: + cbar_label: 'GPP [gC m$^{-2}$ yr$^{-1}$]' + + diag_mlr_feature_lai_mean: + <<: *provenance + variables: + features: + <<: *variable_settings_features + short_name: lai + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1982 + end_year: 2005 + prediction_input: + <<: *variable_settings_prediction_input + short_name: lai + preprocessor: preproc_map_flux_var + mip: Lmon + start_year: 1982 + end_year: 2005 + additional_datasets: + - {dataset: LAI3g, project: OBS, type: reanaly, version: 1_regridded, tier: 3} + scripts: + preprocess_mean: + <<: *preprocess_mean_script + output_attributes: + tag: LAI + plot: + <<: *plot_script + ancestors: ['diag_mlr_feature_lai_mean/preprocess_mean'] + plot_map: + plot_kwargs: + cbar_label: 'LAI [1]' + + diag_mlr_feature_pr_mean: + <<: *provenance + variables: + features: + <<: *variable_settings_features + short_name: pr + preprocessor: preproc_map + mip: Amon + start_year: 1901 + end_year: 2005 + prediction_input: + <<: *variable_settings_prediction_input + short_name: pr + preprocessor: preproc_map + mip: Amon + start_year: 1901 + end_year: 2005 + additional_datasets: + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + scripts: + preprocess_mean: + <<: *preprocess_mean_script + output_attributes: + tag: PR + convert_units_to: kg m-2 day-1 + plot: + <<: *plot_script + ancestors: ['diag_mlr_feature_pr_mean/preprocess_mean'] + plot_map: + plot_kwargs: + cbar_label: 'PR [kg m$^{-2}$ day$^{-1}$]' + + diag_mlr_feature_rsds_mean: + <<: *provenance + variables: + features: + <<: *variable_settings_features + short_name: rsds + preprocessor: preproc_map + mip: Amon + start_year: 1979 + end_year: 2005 + prediction_input: + <<: *variable_settings_prediction_input + short_name: rsds + preprocessor: preproc_map + mip: Amon + start_year: 1979 + end_year: 2005 + additional_datasets: + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} + scripts: + preprocess_mean: + <<: *preprocess_mean_script + output_attributes: + tag: RSDS + plot: + <<: *plot_script + ancestors: ['diag_mlr_feature_rsds_mean/preprocess_mean'] + plot_map: + plot_kwargs: + cbar_label: 'RSDS [W m$^{-2}$]' + + diag_mlr_feature_tas_mean: + <<: *provenance + variables: + features: + <<: *variable_settings_features + short_name: tas + preprocessor: preproc_map + mip: Amon + start_year: 1901 + end_year: 2005 + prediction_input: + <<: *variable_settings_prediction_input + short_name: tas + preprocessor: preproc_map + mip: Amon + start_year: 1901 + end_year: 2005 + additional_datasets: + - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} + scripts: + preprocess_mean: + <<: *preprocess_mean_script + output_attributes: + tag: T + convert_units_to: celsius + plot: + <<: *plot_script + ancestors: ['diag_mlr_feature_tas_mean/preprocess_mean'] + plot_map: + plot_kwargs: + cbar_label: 'T [°C]' + + # MLR models (Step 2b in paper) + + diag_mmm_mean: + <<: *provenance + description: Use simple multi-model mean to predict scalar target variable. + scripts: + mlr: + <<: *mmm_script + ancestors: ['diag_gpp_fraction_mean/preprocess'] + mlr_model_name: MMM + + diag_mmm: + <<: *provenance + description: Use simple multi-model mean to predict target variable. + scripts: + mlr: + <<: *mmm_script + ancestors: ['diag_gpp_fraction_map/preprocess'] + mlr_model_name: MMM + postprocess: + <<: *postprocess_script + ancestors: [ + 'diag_mmm/mlr', + 'diag_mte_for_cov_estimation/preprocess', + ] + + diag_mmm_pseudo_reality: + <<: *provenance + description: Validate MMM model using pseudo-reality approach. + scripts: + mlr_pseudo_reality: + <<: *mmm_script + ancestors: ['diag_gpp_fraction_map/preprocess'] + mlr_model_name: MMM + pseudo_reality: ['dataset'] + + diag_mmm_rescaled: + <<: *provenance + description: Use simple multi-model mean to predict target variable from rescaled data. + scripts: + mlr: + <<: *mmm_script + ancestors: ['diag_rescale_gpp_fraction_with_emergent_constraint/rescale'] + mlr_model_name: rMMM + postprocess: + <<: *postprocess_script + ancestors: [ + 'diag_mmm_rescaled/mlr', + 'diag_mte_for_cov_estimation/preprocess', + 'diag_rescale_gpp_fraction_with_emergent_constraint/preprocess_error_mmm_rescaled', + ] + + diag_mmm_rescaled_pseudo_reality: + <<: *provenance + description: Validate rescaled MMM model using pseudo-reality approach. + scripts: + mlr_pseudo_reality: + <<: *mmm_script + ancestors: ['diag_rescale_gpp_fraction_with_emergent_constraint/rescale'] + mlr_model_name: rMMM + pseudo_reality: ['dataset'] + + diag_gbrt: + <<: *provenance + description: GBRT model. + scripts: + mlr: + <<: *gbrt_script + ancestors: [ + 'diag_rescale_gpp_fraction_with_emergent_constraint/rescale', + 'diag_mlr_feature_*/preprocess*', + ] + mlr_model_name: GBRT + postprocess: + <<: *postprocess_script + ancestors: [ + 'diag_gbrt/mlr', + 'diag_mte_for_cov_estimation/preprocess', + 'diag_rescale_gpp_fraction_with_emergent_constraint/preprocess_error_gbrt', + ] + + diag_gbrt_pseudo_reality: + <<: *provenance + description: Validate GBRT model using pseudo-reality approach. + scripts: + mlr_pseudo_reality: + <<: *gbrt_script + <<: *pseudo_reality + ancestors: [ + 'diag_rescale_gpp_fraction_with_emergent_constraint/rescale', + 'diag_mlr_feature_*/preprocess*', + ] + mlr_model_name: GBRT + parameters_final_regressor: *gbrt_parameters + + diag_lasso: + <<: *provenance + description: Use LASSO regression model to predict target variable. + scripts: + mlr: + <<: *lasso_script + ancestors: [ + 'diag_rescale_gpp_fraction_with_emergent_constraint/rescale', + 'diag_mlr_feature_*/preprocess*', + ] + mlr_model_name: LASSO + postprocess: + <<: *postprocess_script + ancestors: [ + 'diag_lasso/mlr', + 'diag_mte_for_cov_estimation/preprocess', + 'diag_rescale_gpp_fraction_with_emergent_constraint/preprocess_error_gbrt', + ] + + diag_lasso_pseudo_reality: + <<: *provenance + description: Validate LASSO regression model using pseudo-reality approach. + scripts: + mlr_pseudo_reality: + <<: *lasso_script + <<: *pseudo_reality + ancestors: [ + 'diag_rescale_gpp_fraction_with_emergent_constraint/rescale', + 'diag_mlr_feature_*/preprocess*', + ] + mlr_model_name: LASSO + + # Plots + + diag_plot_mlr_models: + <<: *provenance + description: Create plots for the different MLR models. + scripts: + plot: + <<: *plot_script + ancestors: &ancestors_plot_mlr_models + [ + 'diag_mmm/mlr', + 'diag_mmm_rescaled/mlr', + 'diag_rescale_gpp_fraction_with_emergent_constraint/preprocess_error_mmm_rescaled', + 'diag_lasso/mlr', + 'diag_rescale_gpp_fraction_with_emergent_constraint/preprocess_error_lasso', + 'diag_gbrt/mlr', + 'diag_rescale_gpp_fraction_with_emergent_constraint/preprocess_error_gbrt', + ] + apply_common_mask: true + group_by_attribute: mlr_model_name + ignore: + - {var_type: prediction_output_error} + - {var_type: prediction_output_misc} + - {var_type: null} + plot_map: + plot_kwargs: + cbar_center: 0 + cbar_label: 'GPP change (2100 vs. 2000) [%]' + cbar_ticks: [-50, 0, 50, 100, 150, 200, 250, 300] + cmap: bwr + vmin: -50 + vmax: 300 + plot_map_abs_biases: + plot_kwargs: + cbar_label: 'Δ(GPP change) [%]' + cbar_ticks: [-300, -200, -100, 0, 100, 200, 300] + cmap: bwr + vmin: -300 + vmax: 300 + plot_map_rel_biases: + plot_kwargs: + cbar_label: 'Relative change in GPP change [1]' + cbar_ticks: [-3.0, -2.0, -1.0, 0.0, 1.0, 2.0, 3.0] + vmin: -3.0 + vmax: 3.0 + plot_map_ratios: + plot_kwargs: + cbar_label: 'GPP change ratio [1]' + cbar_ticks: [0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3] + cmap: Reds + vmin: 0.5 + vmax: 1.3 + print_corr: true + plot_errors: + <<: *plot_script + ancestors: *ancestors_plot_mlr_models + apply_common_mask: true + group_by_attribute: mlr_model_name + ignore: + - {var_type: prediction_output} + - {var_type: prediction_output_misc} + - {var_type: null} + plot_map: + plot_kwargs: + cbar_label: 'GPP change (2100 vs. 2000) [%]' + cbar_ticks: [40, 45, 50, 55, 60, 65, 70, 75] + cmap: Reds + vmin: 40.0 + vmax: 75.0 + + diag_evaluate_residuals: + <<: *provenance + description: Evaluate residuals of different statistical models. + scripts: + evaluate_residuals: + script: mlr/evaluate_residuals.py + ancestors: [ + 'diag_mmm_pseudo_reality/mlr_pseudo_reality', + 'diag_mmm_rescaled_pseudo_reality/mlr_pseudo_reality', + 'diag_lasso_pseudo_reality/mlr_pseudo_reality', + 'diag_gbrt_pseudo_reality/mlr_pseudo_reality', + ] + ignore: + - {var_type: null} + mse_plot: + pyplot_kwargs: + ylabel: 'MSEP [%$^2$]' + title: MSEP for different statistical models + rmse_plot: + pyplot_kwargs: + ylabel: 'RMSEP [%]' + title: RMSEP for different statistical models + seaborn_settings: + <<: *seaborn_settings + rc: + xtick.top: false + ytick.right: true + xtick.minor.visible: false + ytick.minor.visible: true + weighted_samples: *weighted_samples diff --git a/esmvaltool/recipes/testing/recipe_deangelis15nat_fig1_fast.yml b/esmvaltool/recipes/testing/recipe_deangelis15nat_fig1_fast.yml new file mode 100644 index 0000000000..55e241bde4 --- /dev/null +++ b/esmvaltool/recipes/testing/recipe_deangelis15nat_fig1_fast.yml @@ -0,0 +1,55 @@ +# ESMValTool +# recipe_deangelis15nat.yml +--- +documentation: + title: "Evaluate water vapor short wave radiance absorption schemes of ESMs with the observations" + description: | + Recipe for reproducing the plots in DeAngelis et al., 2015 Nature + Short version of Fig. 1b for test cases. + + authors: + - weigel_katja + + maintainer: + - weigel_katja + + references: + - deangelis15nat + + projects: + - eval4cmip + +preprocessors: + spatial_mean: + area_statistics: + operator: mean + + +diagnostics: + + deangelisf1b: + description: Bar plot similar to (DeAngelis Fig. 1b). + # Global multi-model experiment mean for flux variables, e.g. rlnst, rsnst, lvp, hfss + variables: + rlnst: &spatial_mean_cmip5_r1i1p1_amon_t2ms + preprocessor: spatial_mean + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + derive: true + force_derivation: false + rsnst: + <<: *spatial_mean_cmip5_r1i1p1_amon_t2ms + lvp: + <<: *spatial_mean_cmip5_r1i1p1_amon_t2ms + hfss: + <<: *spatial_mean_cmip5_r1i1p1_amon_t2ms + derive: false + additional_datasets: + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 1} + - {dataset: GFDL-CM3, exp: rcp85, start_year: 2091, end_year: 2091} + - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 141, end_year: 141} + + scripts: + deangelisf1b: + script: deangelis15nat/deangelisf1b.py diff --git a/esmvaltool/recipes/testing/recipe_python_for_CI.yml b/esmvaltool/recipes/testing/recipe_python_for_CI.yml new file mode 100644 index 0000000000..34b1c09460 --- /dev/null +++ b/esmvaltool/recipes/testing/recipe_python_for_CI.yml @@ -0,0 +1,115 @@ +# ESMValTool +# recipe_python_for_CI.yml +# +# This recipe is identical to recipe_python.yml bar the extract_location +# preprocessor which is replaced here with extract_point. The reason for this +# is that this recipe is used solely for CI tests, and the geolocator/Nominatim +# engine for location extraction should not be used in CI runs as per their +# usage policy. +# +# See https://docs.esmvaltool.org/en/latest/recipes/recipe_examples.html +# for a description of this recipe. +# +# See https://docs.esmvaltool.org/projects/esmvalcore/en/latest/recipe/overview.html +# for a description of the recipe format. +--- +documentation: + description: | + Example recipe that plots a map and timeseries of temperature. + + title: Recipe that runs an example diagnostic written in Python. + + authors: + - andela_bouwe + - righi_mattia + + maintainer: + - predoi_valeriu + + references: + - acknow_project + + projects: + - esmval + - c3s-magic + +datasets: + - {dataset: BCC-ESM1, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1} + +preprocessors: + # See https://docs.esmvaltool.org/projects/esmvalcore/en/latest/recipe/preprocessor.html + # for a description of the preprocessor functions. + + to_degrees_c: + convert_units: + units: degrees_C + + annual_mean_amsterdam: + extract_point: + longitude: 52.3730796 + latitude: 4.8924534 + scheme: linear + annual_statistics: + operator: mean + multi_model_statistics: + statistics: + - mean + span: overlap + convert_units: + units: degrees_C + + annual_mean_global: + area_statistics: + operator: mean + annual_statistics: + operator: mean + convert_units: + units: degrees_C + +diagnostics: + + map: + description: Global map of temperature in January 2000. + themes: + - phys + realms: + - atmos + variables: + tas: + mip: Amon + preprocessor: to_degrees_c + timerange: 2000/P1M + caption: | + Global map of {long_name} in January 2000 according to {dataset}. + scripts: + script1: + script: examples/diagnostic.py + quickplot: + plot_type: pcolormesh + cmap: Reds + + timeseries: + description: Annual mean temperature in Amsterdam and global mean since 1850. + themes: + - phys + realms: + - atmos + variables: + tas_amsterdam: + short_name: tas + mip: Amon + preprocessor: annual_mean_amsterdam + timerange: 1850/2000 + caption: Annual mean {long_name} in Amsterdam according to {dataset}. + tas_global: + short_name: tas + mip: Amon + preprocessor: annual_mean_global + timerange: 1850/2000 + caption: Annual global mean {long_name} according to {dataset}. + scripts: + script1: + script: examples/diagnostic.py + quickplot: + plot_type: plot diff --git a/esmvaltool/references/acknow_author.bibtex b/esmvaltool/references/acknow_author.bibtex new file mode 100644 index 0000000000..8c04c438ad --- /dev/null +++ b/esmvaltool/references/acknow_author.bibtex @@ -0,0 +1,3 @@ +@misc{acknow_author, + title = "Please acknowledge the author(s)." +} diff --git a/esmvaltool/references/acknow_project.bibtex b/esmvaltool/references/acknow_project.bibtex new file mode 100644 index 0000000000..24c8bdc710 --- /dev/null +++ b/esmvaltool/references/acknow_project.bibtex @@ -0,0 +1,3 @@ +@misc{acknow_project, + title = "Please acknowledge the project(s)." +} diff --git a/esmvaltool/references/aeronetv3.bibtex b/esmvaltool/references/aeronetv3.bibtex new file mode 100644 index 0000000000..ac05ed0f31 --- /dev/null +++ b/esmvaltool/references/aeronetv3.bibtex @@ -0,0 +1,6 @@ +@misc{aeronetv3, + author = {}, + title = {}, + url = {https://aeronet.gsfc.nasa.gov/new_web/download_all_v3_aod.html}, + year = 2023 +} diff --git a/esmvaltool/references/agcd-v201.bibtex b/esmvaltool/references/agcd-v201.bibtex new file mode 100644 index 0000000000..931f0c423a --- /dev/null +++ b/esmvaltool/references/agcd-v201.bibtex @@ -0,0 +1,9 @@ +@misc{agcd-v201, + doi = {10.25914/RSES-ZH67}, + url = {https://pid.nci.org.au/doi/f5999_1125_5714_7440}, + author = {{Bureau Of Meteorology}}, + language = {en}, + title = {Australian Gridded Climate Data (AGCD) v2.0.1}, + publisher = {NCI Australia}, + year = {2023} +} diff --git a/esmvaltool/references/alexander06jgr.bibtex b/esmvaltool/references/alexander06jgr.bibtex new file mode 100644 index 0000000000..e54a8ad8db --- /dev/null +++ b/esmvaltool/references/alexander06jgr.bibtex @@ -0,0 +1,11 @@ +@article{alexander06jgr, + doi = {10.1029/2005jd006290}, + url = {https://doi.org/10.1029%2F2005jd006290}, + year = 2006, + publisher = {American Geophysical Union ({AGU})}, + volume = {111}, + number = {D5}, + author = {L. V. Alexander and X. Zhang and T. C. Peterson and J. Caesar and B. Gleason and A. M. G. Klein Tank and M. Haylock and D. Collins and B. Trewin and F. Rahimzadeh and A. Tagipour and K. Rupa Kumar and J. Revadekar and G. Griffiths and L. Vincent and D. B. Stephenson and J. Burn and E. Aguilar and M. Brunet and M. Taylor and M. New and P. Zhai and M. Rusticucci and J. L. Vazquez-Aguirre}, + title = {Global observed changes in daily climate extremes of temperature and precipitation}, + journal = {Journal of Geophysical Research} +} diff --git a/esmvaltool/references/anav13jclim.bibtex b/esmvaltool/references/anav13jclim.bibtex new file mode 100644 index 0000000000..f3423814ac --- /dev/null +++ b/esmvaltool/references/anav13jclim.bibtex @@ -0,0 +1,13 @@ +@article{anav13jclim, + doi = {10.1175/jcli-d-12-00417.1}, + url = {https://doi.org/10.1175%2Fjcli-d-12-00417.1}, + year = 2013, + month = {sep}, + publisher = {American Meteorological Society}, + volume = {26}, + number = {18}, + pages = {6801--6843}, + author = {A. Anav and P. Friedlingstein and M. Kidston and L. Bopp and P. Ciais and P. Cox and C. Jones and M. Jung and R. Myneni and Z. Zhu}, + title = {Evaluating the Land and Ocean Components of the Global Carbon Cycle in the {CMIP}5 Earth System Models}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/andrews12grl.bibtex b/esmvaltool/references/andrews12grl.bibtex new file mode 100644 index 0000000000..2bb961aa73 --- /dev/null +++ b/esmvaltool/references/andrews12grl.bibtex @@ -0,0 +1,12 @@ +@article{andrews12grl, + doi = {10.1029/2012gl051607}, + url = {https://doi.org/10.1029/2012gl051607}, + year = {2012}, + month = may, + publisher = {American Geophysical Union ({AGU})}, + volume = {39}, + number = {9}, + author = {Timothy Andrews and Jonathan M. Gregory and Mark J. Webb and Karl E. Taylor}, + title = {Forcing, feedbacks and climate sensitivity in {CMIP}5 coupled atmosphere-ocean climate models}, + journal = {Geophysical Research Letters}, +} diff --git a/esmvaltool/references/antonov10usgov.bibtex b/esmvaltool/references/antonov10usgov.bibtex new file mode 100644 index 0000000000..c160f7c56a --- /dev/null +++ b/esmvaltool/references/antonov10usgov.bibtex @@ -0,0 +1,6 @@ +@book{antonov10usgov, + author = {Antonov, J. I. and Seidov, D. and Boyer, T. P. and Locarnini, R. A. and Mishonov, A. V. and Garcia, H. E. and Baranova, O. K. and Zweng, M. M. and Johnson, D. R.}, + title = {World ocean atlas 2001. Volume 2: Salinity}, + publisher = {U.S. Department of Commerce, National Oceanic and Atmospheric Administration, National Environmental Satellite, Data and Information Service}, + year = {2010} +} diff --git a/esmvaltool/references/anuclimate2.bibtex b/esmvaltool/references/anuclimate2.bibtex new file mode 100644 index 0000000000..0c183cd181 --- /dev/null +++ b/esmvaltool/references/anuclimate2.bibtex @@ -0,0 +1,11 @@ +@misc{https://doi.org/10.25914/60a10aa56dd1b, + doi = {10.25914/60A10AA56DD1B}, + url = {https://pid.nci.org.au/doi/f2576_7854_4065_1457}, + author = {Hutchinson, Michael and Xu, Tingbao and Kesteven, Jennifer and Marang, Ian and Evans, Bradley}, + keywords = {Climatology (excl. Climate Change Processes)}, + language = {en}, + title = {ANUClimate 2.0}, + publisher = {NCI Australia}, + year = {2021}, + copyright = {Creative Commons Attribution Share Alike 4.0 International} +} diff --git a/esmvaltool/references/aphro-ma-v1101.bibtex b/esmvaltool/references/aphro-ma-v1101.bibtex new file mode 100644 index 0000000000..5105a9b16f --- /dev/null +++ b/esmvaltool/references/aphro-ma-v1101.bibtex @@ -0,0 +1,11 @@ +@article{aphro-ma-v1101, + author = {Yatagai, Akiyo and Kamiguchi, Kenji and Arakawa, Osamu and Hamada, Atsushi and Yasutomi, Natsuko and Kitoh, Akio}, + title = {APHRODITE: Constructing a Long-Term Daily Gridded Precipitation Dataset for Asia Based on a Dense Network of Rain Gauges}, + journal = {Bulletin of the American Meteorological Society}, + volume = {93}, + number = {9}, + pages = {1401-1415}, + year = {2012}, + doi = {10.1175/BAMS-D-11-00122.1}, + URL = {https://doi.org/10.1175/BAMS-D-11-00122.1} + } diff --git a/esmvaltool/references/aphro-ma-v1808.bibtex b/esmvaltool/references/aphro-ma-v1808.bibtex new file mode 100644 index 0000000000..a8f57e2b02 --- /dev/null +++ b/esmvaltool/references/aphro-ma-v1808.bibtex @@ -0,0 +1,12 @@ +@article{aphro-ma-v1808, + doi = {}, + issn = {1343-8808}, + url = {http://www.chikyu.ac.jp/precip/data/Yasutomi2011GER.pdf}, + year = 2011, + volume = {15}, + number = {2}, + pages = {165--172}, + author = {N. Yasutomi and A. Hamada and A. Yatagai}, + title = {Development of a long-term daily gridded temperature dataset and its application to rain/snow discrimination of daily precipitation}, + journal = {Global Environmental Research} +} diff --git a/esmvaltool/references/aquila11gmd.bibtex b/esmvaltool/references/aquila11gmd.bibtex new file mode 100644 index 0000000000..ebbd26a1fc --- /dev/null +++ b/esmvaltool/references/aquila11gmd.bibtex @@ -0,0 +1,13 @@ +@article{aquila11gmd, + doi = {10.5194/gmd-4-325-2011}, + url = {https://doi.org/10.5194%2Fgmd-4-325-2011}, + year = 2011, + month = {apr}, + publisher = {Copernicus {GmbH}}, + volume = {4}, + number = {2}, + pages = {325--355}, + author = {V. Aquila and J. Hendricks and A. Lauer and N. Riemer and H. Vogel and D. Baumgardner and A. Minikin and A. Petzold and J. P. Schwarz and J. R. Spackman and B. Weinzierl and M. Righi and M. Dall'Amico}, + title = {{MADE}-in: a new aerosol microphysics submodel for global simulation of insoluble particles and their mixing state}, + journal = {Geoscientific Model Development} +} diff --git a/esmvaltool/references/aura-tes.bibtex b/esmvaltool/references/aura-tes.bibtex new file mode 100644 index 0000000000..f5a5106270 --- /dev/null +++ b/esmvaltool/references/aura-tes.bibtex @@ -0,0 +1,13 @@ +@article{aura-tes, + doi = {10.1109/tgrs.2005.863716}, + url = {https://doi.org/10.1109%2Ftgrs.2005.863716}, + year = 2006, + month = {may}, + publisher = {Institute of Electrical and Electronics Engineers ({IEEE})}, + volume = {44}, + number = {5}, + pages = {1102--1105}, + author = {R. Beer}, + title = {{TES} on the aura mission: scientific objectives, measurements, and analysis overview}, + journal = {{IEEE} Transactions on Geoscience and Remote Sensing} +} diff --git a/esmvaltool/references/bakker14essd.bibtex b/esmvaltool/references/bakker14essd.bibtex new file mode 100644 index 0000000000..2e187d4ee4 --- /dev/null +++ b/esmvaltool/references/bakker14essd.bibtex @@ -0,0 +1,13 @@ +@article{bakker14essd, + doi = {10.5194/essd-6-69-2014}, + url = {https://doi.org/10.5194%2Fessd-6-69-2014}, + year = 2014, + month = {mar}, + publisher = {Copernicus {GmbH}}, + volume = {6}, + number = {1}, + pages = {69--90}, + author = {D. C. E. Bakker and B. Pfeil and K. Smith and S. Hankin and A. Olsen and S. R. Alin and C. Cosca and S. Harasawa and A. Kozyr and Y. Nojiri and K. M. O'Brien and U. Schuster and M. Telszewski and B. Tilbrook and C. Wada and J. Akl and L. Barbero and N. R. Bates and J. Boutin and Y. Bozec and W.-J. Cai and R. D. Castle and F. P. Chavez and L. Chen and M. Chierici and K. Currie and H. J. W. de Baar and W. Evans and R. A. Feely and A. Fransson and Z. Gao and B. Hales and N. J. Hardman-Mountford and M. Hoppema and W.-J. Huang and C. W. Hunt and B. Huss and T. Ichikawa and T. Johannessen and E. M. Jones and S. D. Jones and S. Jutterström and V. Kitidis and A. Körtzinger and P. Landschützer and S. K. Lauvset and N. Lef{\`{e}}vre and A. B. Manke and J. T. Mathis and L. Merlivat and N. Metzl and A. Murata and T. Newberger and A. M. Omar and T. Ono and G.-H. Park and K. Paterson and D. Pierrot and A. F. R{\'{\i}}os and C. L. Sabine and S. Saito and J. Salisbury and V. V. S. S. Sarma and R. Schlitzer and R. Sieger and I. Skjelvan and T. Steinhoff and K. F. Sullivan and H. Sun and A. J. Sutton and T. Suzuki and C. Sweeney and T. Takahashi and J. Tjiputra and N. Tsurushima and S. M. A. C. van Heuven and D. Vandemark and P. Vlahos and D. W. R. Wallace and R. Wanninkhof and A. J. Watson}, + title = {An update to the Surface Ocean {CO}2 Atlas ({SOCAT} version 2)}, + journal = {Earth System Science Data} +} diff --git a/esmvaltool/references/baldwin09qjrms.bibtex b/esmvaltool/references/baldwin09qjrms.bibtex new file mode 100644 index 0000000000..1f29636e9d --- /dev/null +++ b/esmvaltool/references/baldwin09qjrms.bibtex @@ -0,0 +1,13 @@ +@article{baldwin09qjrms, + doi = {10.1002/qj.479}, + url = {https://doi.org/10.1002%2Fqj.479}, + year = 2009, + month = {oct}, + publisher = {Wiley}, + volume = {135}, + number = {644}, + pages = {1661--1672}, + author = {Mark P. Baldwin and David W.J. Thompson}, + title = {A critical comparison of stratosphere-troposphere coupling indices}, + journal = {Quarterly Journal of the Royal Meteorological Society} +} diff --git a/esmvaltool/references/berkeleyearth.bibtex b/esmvaltool/references/berkeleyearth.bibtex new file mode 100644 index 0000000000..79a3ea096f --- /dev/null +++ b/esmvaltool/references/berkeleyearth.bibtex @@ -0,0 +1,11 @@ +@article{berkeleyearth, + author = {Rohde, Robert and Muller, Richard and Jacobsen, Robert and Muller, Elizabeth and Perlmutter, Saul and Rosenfeld, Arthur and Wurtele, Jonathan and Groom, Donald and Wickham, Charlotte}, + doi = {10.4172/2327-4581.1000101}, + issn = {23274581}, + journal = {Geoinformatics {\&} Geostatistics: An Overview}, + number = {1}, + title = {{A New Estimate of the Average Earth Surface Land Temperature Spanning 1753 to 2011}}, + url = {http://www.scitechnol.com/2327-4581/2327-4581-1-101.php}, + volume = {1}, + year = {2013} +} diff --git a/esmvaltool/references/bett2016renene.bibtex b/esmvaltool/references/bett2016renene.bibtex new file mode 100644 index 0000000000..db894491b4 --- /dev/null +++ b/esmvaltool/references/bett2016renene.bibtex @@ -0,0 +1,12 @@ +@article{bett2016renene, +title = {The climatological relationships between wind and solar energy supply in Britain}, +journal = {Renewable Energy}, +volume = {87}, +pages = {96-110}, +year = {2016}, +issn = {0960-1481}, +doi = {https://doi.org/10.1016/j.renene.2015.10.006}, +url = {https://www.sciencedirect.com/science/article/pii/S0960148115303591}, +author = {Philip E. Bett and Hazel E. Thornton}, +keywords = {Wind power generation, Solar PV power generation, Climatology, Energy balancing, Reanalysis, Seasonal variability}, +} diff --git a/esmvaltool/references/bianchi12gbc.bibtex b/esmvaltool/references/bianchi12gbc.bibtex new file mode 100644 index 0000000000..a882419f67 --- /dev/null +++ b/esmvaltool/references/bianchi12gbc.bibtex @@ -0,0 +1,13 @@ +@article{bianchi12gbc, + doi = {10.1029/2011gb004209}, + url = {https://doi.org/10.1029%2F2011gb004209}, + year = 2012, + month = {may}, + publisher = {American Geophysical Union ({AGU})}, + volume = {26}, + number = {2}, + pages = {n/a--n/a}, + author = {Daniele Bianchi and John P. Dunne and Jorge L. Sarmiento and Eric D. Galbraith}, + title = {Data-based estimates of suboxia, denitrification, and N2O production in the ocean and their sensitivities to dissolved O2}, + journal = {Global Biogeochemical Cycles} +} diff --git a/esmvaltool/references/bock20jgr.bibtex b/esmvaltool/references/bock20jgr.bibtex new file mode 100644 index 0000000000..55dce064eb --- /dev/null +++ b/esmvaltool/references/bock20jgr.bibtex @@ -0,0 +1,15 @@ +@article{https://doi.org/10.1029/2019JD032321, +author = {Bock, L. and Lauer, A. and Schlund, M. and Barreiro, M. and Bellouin, N. and Jones, C. and Meehl, G. A. and Predoi, V. and Roberts, M. J. and Eyring, V.}, +title = {Quantifying Progress Across Different CMIP Phases With the ESMValTool}, +journal = {Journal of Geophysical Research: Atmospheres}, +volume = {125}, +number = {21}, +pages = {e2019JD032321}, +keywords = {climate model, evaluation, CMIP}, +doi = {https://doi.org/10.1029/2019JD032321}, +url = {https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/2019JD032321}, +eprint = {https://agupubs.onlinelibrary.wiley.com/doi/pdf/10.1029/2019JD032321}, +note = {e2019JD032321 2019JD032321}, +year = {2020} +} + diff --git a/esmvaltool/references/brient16climdyn.bibtex b/esmvaltool/references/brient16climdyn.bibtex new file mode 100644 index 0000000000..c0e54ec432 --- /dev/null +++ b/esmvaltool/references/brient16climdyn.bibtex @@ -0,0 +1,13 @@ +@article{brient16climdyn, + doi = {10.1007/s00382-015-2846-0}, + url = {https://doi.org/10.1007/s00382-015-2846-0}, + year = {2015}, + month = oct, + publisher = {Springer Science and Business Media {LLC}}, + volume = {47}, + number = {1-2}, + pages = {433--449}, + author = {Florent Brient and Tapio Schneider and Zhihong Tan and Sandrine Bony and Xin Qu and Alex Hall}, + title = {Shallowness of tropical low clouds as a predictor of climate models' response to warming}, + journal = {Climate Dynamics} +} diff --git a/esmvaltool/references/brient16jclim.bibtex b/esmvaltool/references/brient16jclim.bibtex new file mode 100644 index 0000000000..ba92c0e5ae --- /dev/null +++ b/esmvaltool/references/brient16jclim.bibtex @@ -0,0 +1,13 @@ +@article{brient16jclim, + doi = {10.1175/jcli-d-15-0897.1}, + url = {https://doi.org/10.1175%2Fjcli-d-15-0897.1}, + year = 2016, + month = {aug}, + publisher = {American Meteorological Society}, + volume = {29}, + number = {16}, + pages = {5821--5835}, + author = {Florent Brient and Tapio Schneider}, + title = {Constraints on Climate Sensitivity from Space-Based Measurements of Low-Cloud Reflection}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/brown02nsidc.bibtex b/esmvaltool/references/brown02nsidc.bibtex new file mode 100644 index 0000000000..47d303a120 --- /dev/null +++ b/esmvaltool/references/brown02nsidc.bibtex @@ -0,0 +1,8 @@ +@misc{brown02nsidc, +author = {Brown, J. and Ferrians, O. and Heginbottom, J. A. and Melnikov, E.}, +booktitle = {National Snow and Ice Data Center}, +title = {{Circum-Arctic Map of Permafrost and Ground-Ice Conditions, Version 2}}, +url = {http://nsidc.org/data/ggd318.html}, +urldate = {2015-01-01}, +year = {2002} +} diff --git a/esmvaltool/references/brunner2019.bibtex b/esmvaltool/references/brunner2019.bibtex new file mode 100644 index 0000000000..9a3834a17f --- /dev/null +++ b/esmvaltool/references/brunner2019.bibtex @@ -0,0 +1,14 @@ +@article{Brunner2019, + doi = {10.1088/1748-9326/ab492f}, + url = {https://doi.org/10.1088%2F1748-9326%2Fab492f}, + year = 2019, + month = {nov}, + publisher = {{IOP} Publishing}, + volume = {14}, + number = {12}, + pages = {124010}, + author = {Lukas Brunner and Ruth Lorenz and Marius Zumwald and Reto Knutti}, + title = {Quantifying uncertainty in European climate projections using combined performance-independence weighting}, + journal = {Environmental Research Letters}, + abstract = {Uncertainty in model projections of future climate change arises due to internal variability, multiple possible emission scenarios, and different model responses to anthropogenic forcing. To robustly quantify uncertainty in multi-model ensembles, inter-dependencies between models as well as a models ability to reproduce observations should be considered. Here, a model weighting approach, which accounts for both independence and performance, is applied to European temperature and precipitation projections from the CMIP5 archive. Two future periods representing mid- and end-of-century conditions driven by the high-emission scenario RCP8.5 are investigated. To inform the weighting, six diagnostics based on three observational estimates are used to also account for uncertainty in the observational record. Our findings show that weighting the ensemble can reduce the interquartile spread by more than 20% in some regions, increasing the reliability of projected changes. The mean temperature change is most notably impacted by the weighting in the Mediterranean, where it is found to be 0.35 °C higher than the unweighted mean in the end-of-century period. For precipitation the largest differences are found for Northern Europe, with a relative decrease in precipitation of 2.4% and 3.4% for the two future periods compared to the unweighted case. Based on a perfect model test, it is found that weighting the ensemble leads to an increase in the investigated skill score for temperature and precipitation while minimizing the probability of overfitting.} +} \ No newline at end of file diff --git a/esmvaltool/references/brunner2020.bibtex b/esmvaltool/references/brunner2020.bibtex new file mode 100644 index 0000000000..c5f088ff9a --- /dev/null +++ b/esmvaltool/references/brunner2020.bibtex @@ -0,0 +1,16 @@ +@article{Brunner2020f, +abstract = {The sixth Coupled Model Intercomparison Project (CMIP6) constitutes the latest update on expected future climate change based on a new generation of climate models. To extract reliable estimates of future warming and related uncertainties from these models, the spread in their projections is often translated into probabilistic estimates such as the mean and likely range. Here, we use a model weighting approach, which accounts for the models' historical performance based on several diagnostics as well as model interdependence within the CMIP6 ensemble, to calculate constrained distributions of global mean temperature change. We investigate the skill of our approach in a perfect model test, where we use previous-generation CMIP5 models as pseudo-observations in the historical period. The performance of the distribution weighted in the abovementioned manner with respect to matching the pseudo-observations in the future is then evaluated, and we find a mean increase in skill of about 17 {\%} compared with the unweighted distribution. In addition, we show that our independence metric correctly clusters models known to be similar based on a CMIP6 “family tree”, which enables the application of a weighting based on the degree of inter-model dependence. We then apply the weighting approach, based on two observational estimates (the fifth generation of the European Centre for Medium-Range Weather Forecasts Retrospective Analysis – ERA5, and the Modern-Era Retrospective analysis for Research and Applications, version 2 – MERRA-2), to constrain CMIP6 projections under weak (SSP1-2.6) and strong (SSP5-8.5) climate change scenarios (SSP refers to the Shared Socioeconomic Pathways). Our results show a reduction in the projected mean warming for both scenarios because some CMIP6 models with high future warming receive systematically lower performance weights. The mean of end-of-century warming (2081–2100 relative to 1995–2014) for SSP5-8.5 with weighting is 3.7 ∘C, compared with 4.1 ∘C without weighting; the likely (66{\%}) uncertainty range is 3.1 to 4.6 ∘C, which equates to a 13 {\%} decrease in spread. For SSP1-2.6, the weighted end-of-century warming is 1 ∘C (0.7 to 1.4 ∘C), which results in a reduction of −0.1 ∘C in the mean and −24 {\%} in the likely range compared with the unweighted case.}, +author = {Brunner, Lukas and Pendergrass, Angeline G. and Lehner, Flavio and Merrifield, Anna L. and Lorenz, Ruth and Knutti, Reto}, +doi = {10.5194/esd-11-995-2020}, +file = {:home/lukas/Documents/Mendeley/Brunner2020(2).pdf:pdf;:home/lukas/Documents/Mendeley/Brunner2020{\_}supp.pdf:pdf}, +issn = {2190-4987}, +journal = {Earth System Dynamics}, +keywords = {A-weighting,Climate change,Climate model,Coupled model intercomparison project,Global warming,Mathematics,Mean radiant temperature,Probabilistic logic,Statistics,Weighting}, +month = {nov}, +number = {4}, +pages = {995--1012}, +title = {{Reduced global warming from CMIP6 projections when weighting models by performance and independence}}, +url = {https://esd.copernicus.org/articles/11/995/2020/}, +volume = {11}, +year = {2020} +} diff --git a/esmvaltool/references/calipso-icecloud.bibtex b/esmvaltool/references/calipso-icecloud.bibtex new file mode 100644 index 0000000000..da7be60e88 --- /dev/null +++ b/esmvaltool/references/calipso-icecloud.bibtex @@ -0,0 +1,11 @@ +@misc{calipso-icecloud, + publisher={NASA Langley Atmospheric Science Data Center DAAC}, + doi = {10.5067/CALIOP/CALIPSO/L3_ICE_CLOUD-STANDARD-V1-00}, + url = {https://doi.org/10.5067/CALIOP/CALIPSO/L3_ICE_CLOUD-STANDARD-V1-00}, + year = 2018, + month = {jun}, + day = {1}, + date={2018-06-01}, + author = {ASA/LARC/SD/ASDC}, + title = {CALIPSO Lidar Level 3 Ice Cloud Data, Standard V1-00}, +} diff --git a/esmvaltool/references/carvalhais14nature.bibtex b/esmvaltool/references/carvalhais14nature.bibtex new file mode 100644 index 0000000000..6228622aec --- /dev/null +++ b/esmvaltool/references/carvalhais14nature.bibtex @@ -0,0 +1,18 @@ +@article{carvalhais14nature, + doi = {10.1038/nature13731}, + url = {https://doi.org/10.1038%2Fnature13731}, + year = 2014, + month = {oct}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {514}, + number = {7521}, + pages = {213--217}, + author = {Nuno Carvalhais and Matthias Forkel and Myroslava Khomik and + Jessica Bellarby and Martin Jung and Mirco Migliavacca and + Mingquan Mu and Sassan Saatchi and Maurizio Santoro and + Martin Thurner and Ulrich Weber and Bernhard Ahrens and + Christian Beer and Alessandro Cescatti and James Randerson and + Markus Reichstein}, + title = {Global covariation of carbon turnover times with climate in terrestrial ecosystems}, + journal = {Nature} +} diff --git a/esmvaltool/references/cds-satellite-lai-fapar.bibtex b/esmvaltool/references/cds-satellite-lai-fapar.bibtex new file mode 100644 index 0000000000..2ef8be06fa --- /dev/null +++ b/esmvaltool/references/cds-satellite-lai-fapar.bibtex @@ -0,0 +1,13 @@ +@article{cds-satellite-lai-fapar, + doi = {10.1016/j.rse.2007.02.018}, + url = {https://doi.org/10.1016%2Fj.rse.2007.02.018}, + year = 2007, + month = {oct}, + publisher = {Elsevier {BV}}, + volume = {110}, + number = {3}, + pages = {275--286}, + author = {Fr{\'{e}}d{\'{e}}ric Baret and Olivier Hagolle and Bernhard Geiger and Patrice Bicheron and Bastien Miras and Mireille Huc and B{\'{e}}atrice Berthelot and Fernando Ni{\~{n}}o and Marie Weiss and Olivier Samain and Jean Louis Roujean and Marc Leroy}, + title = {{LAI}, {fAPAR} and {fCover} {CYCLOPES} global products derived from {VEGETATION}}, + journal = {Remote Sensing of Environment} +} diff --git a/esmvaltool/references/cds-satellite-soil-moisture.bibtex b/esmvaltool/references/cds-satellite-soil-moisture.bibtex new file mode 100644 index 0000000000..cff0f66f12 --- /dev/null +++ b/esmvaltool/references/cds-satellite-soil-moisture.bibtex @@ -0,0 +1,13 @@ +@article{cds-satellite-soil-moisture, + doi = {10.5194/essd-11-717-2019}, + url = {https://doi.org/10.5194%2Fessd-11-717-2019}, + year = 2019, + month = {may}, + publisher = {Copernicus {GmbH}}, + volume = {11}, + number = {2}, + pages = {717--739}, + author = {Alexander Gruber and Tracy Scanlon and Robin van der Schalie and Wolfgang Wagner and Wouter Dorigo}, + title = {Evolution of the {ESA} {CCI} Soil Moisture climate data records and their underlying merging methodology}, + journal = {Earth System Science Data} +} diff --git a/esmvaltool/references/cds-uerra.bibtex b/esmvaltool/references/cds-uerra.bibtex new file mode 100644 index 0000000000..ce438e08c0 --- /dev/null +++ b/esmvaltool/references/cds-uerra.bibtex @@ -0,0 +1,6 @@ +@misc{cds-uerra, + author = {M. Ridal and E. Olsson and P. Unden and K. Zimmermann and A. Ohlsson}, + title = {HARMONIE reanalysis report of results and dataset, UERRA (EU FP7 Collaborative Project, Grant agreement 607193)}, + url = {http://uerra.eu/component/dpattachments/?task=attachment.download\&id=296}, + year = 2017 +} diff --git a/esmvaltool/references/cds-xch4.bibtex b/esmvaltool/references/cds-xch4.bibtex new file mode 100644 index 0000000000..4e3602713e --- /dev/null +++ b/esmvaltool/references/cds-xch4.bibtex @@ -0,0 +1,13 @@ +@article{cds-xch4, + doi = {10.1007/s42423-018-0004-6}, + url = {https://doi.org/10.1007%2Fs42423-018-0004-6}, + year = 2018, + month = {aug}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {1}, + number = {1}, + pages = {57--60}, + author = {Michael Buchwitz and Maximilian Reuter and Oliver Schneising and Heinrich Bovensmann and John P. Burrows and Hartmut Boesch and Jasdeep Anand and Robert Parker and Rob G. Detmers and Ilse Aben and Otto P. Hasekamp and Cyril Crevoisier and Raymond Armante and Claus Zehner and Dinand Schepers}, + title = {Copernicus Climate Change Service (C3S) Global Satellite Observations of Atmospheric Carbon Dioxide and Methane}, + journal = {Advances in Astronautics Science and Technology} +} diff --git a/esmvaltool/references/cds-xco2.bibtex b/esmvaltool/references/cds-xco2.bibtex new file mode 100644 index 0000000000..17045de017 --- /dev/null +++ b/esmvaltool/references/cds-xco2.bibtex @@ -0,0 +1,13 @@ +@article{cds-xco2, + doi = {10.1007/s42423-018-0004-6}, + url = {https://doi.org/10.1007%2Fs42423-018-0004-6}, + year = 2018, + month = {aug}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {1}, + number = {1}, + pages = {57--60}, + author = {Michael Buchwitz and Maximilian Reuter and Oliver Schneising and Heinrich Bovensmann and John P. Burrows and Hartmut Boesch and Jasdeep Anand and Robert Parker and Rob G. Detmers and Ilse Aben and Otto P. Hasekamp and Cyril Crevoisier and Raymond Armante and Claus Zehner and Dinand Schepers}, + title = {Copernicus Climate Change Service (C3S) Global Satellite Observations of Atmospheric Carbon Dioxide and Methane}, + journal = {Advances in Astronautics Science and Technology} +} diff --git a/esmvaltool/references/ceres-ebaf-ed41.bibtex b/esmvaltool/references/ceres-ebaf-ed41.bibtex new file mode 100644 index 0000000000..a9be154442 --- /dev/null +++ b/esmvaltool/references/ceres-ebaf-ed41.bibtex @@ -0,0 +1,10 @@ +@misc{ceres-ebaf_ed41, + publisher={NASA Langley Atmospheric Science Data Center DAAC}, + title={CERES Energy Balanced and Filled (EBAF) TOA and Surface Monthly means data in netCDF Edition 4.1}, + url={https://doi.org/10.5067/TERRA-AQUA/CERES/EBAF_L3B.004.1}, + author={NASA/LARC/SD/ASDC}, + date={2019-06-12}, + year=2019, + month=6, + day=12, +} diff --git a/esmvaltool/references/ceres-ebaf.bibtex b/esmvaltool/references/ceres-ebaf.bibtex new file mode 100644 index 0000000000..ff496ee1af --- /dev/null +++ b/esmvaltool/references/ceres-ebaf.bibtex @@ -0,0 +1,13 @@ +@article{ceres-ebaf, + doi = {10.1175/jcli-d-17-0208.1}, + url = {https://doi.org/10.1175%2Fjcli-d-17-0208.1}, + year = 2018, + month = {jan}, + publisher = {American Meteorological Society}, + volume = {31}, + number = {2}, + pages = {895--918}, + author = {Norman G. Loeb and David R. Doelling and Hailan Wang and Wenying Su and Cathy Nguyen and Joseph G. Corbett and Lusheng Liang and Cristian Mitrescu and Fred G. Rose and Seiji Kato}, + title = {Clouds and the Earth's Radiant Energy System ({CERES}) Energy Balanced and Filled ({EBAF}) Top-of-Atmosphere ({TOA}) Edition-4.0 Data Product}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/ceres-syn1deg.bibtex b/esmvaltool/references/ceres-syn1deg.bibtex new file mode 100644 index 0000000000..e26f7e49f6 --- /dev/null +++ b/esmvaltool/references/ceres-syn1deg.bibtex @@ -0,0 +1,13 @@ +@article{ceres-syn1deg, + doi = {10.1175/1520-0477(1996)077<0853:catere>2.0.co;2}, + url = {https://doi.org/10.1175%2F1520-0477%281996%29077%3C0853%3Acatere%3E2.0.co%3B2}, + year = 1996, + month = {may}, + publisher = {American Meteorological Society}, + volume = {77}, + number = {5}, + pages = {853--868}, + author = {Bruce A. Wielicki and Bruce R. Barkstrom and Edwin F. Harrison and Robert B. Lee and G. Louis Smith and John E. Cooper}, + title = {Clouds and the Earth{\textquotesingle}s Radiant Energy System ({CERES}): An Earth Observing System Experiment}, + journal = {Bulletin of the American Meteorological Society} +} diff --git a/esmvaltool/references/cionni11acp.bibtex b/esmvaltool/references/cionni11acp.bibtex new file mode 100644 index 0000000000..2a14a4c29c --- /dev/null +++ b/esmvaltool/references/cionni11acp.bibtex @@ -0,0 +1,13 @@ +@article{cionni11acp, + doi = {10.5194/acp-11-11267-2011}, + url = {https://doi.org/10.5194%2Facp-11-11267-2011}, + year = 2011, + month = {nov}, + publisher = {Copernicus {GmbH}}, + volume = {11}, + number = {21}, + pages = {11267--11292}, + author = {I. Cionni and V. Eyring and J. F. Lamarque and W. J. Randel and D. S. Stevenson and F. Wu and G. E. Bodeker and T. G. Shepherd and D. T. Shindell and D. W. Waugh}, + title = {Ozone database in support of {CMIP}5 simulations: results and corresponding radiative forcing}, + journal = {Atmospheric Chemistry and Physics} +} diff --git a/esmvaltool/references/clara-avhrr.bibtex b/esmvaltool/references/clara-avhrr.bibtex new file mode 100644 index 0000000000..b7216dc5a2 --- /dev/null +++ b/esmvaltool/references/clara-avhrr.bibtex @@ -0,0 +1,9 @@ +@article{clara-avhrr, + doi = {10.5676/EUM_SAF_CM/CLARA_AVHRR/V002_01}, + url = {https://doi.org/10.5676/EUM_SAF_CM/CLARA_AVHRR/V002_01}, + year = 2020, + publisher = {Satellite Application Facility on Climate Monitoring (CM SAF)}, + author = {Karlsson, Karl-Göran and Anttila, Kati and Trentmann, Jörg and Stengel, Martin and Solodovnik, Irina and Meirink, Jan Fokke and Devasthale, Abhay and Kothe, Steffen and Jääskeläinen, Emmihenna and Sedlar, Joseph and Benas, Nikos and van Zadelhoff, Gerd-Jan and Stein, Diana and Finkensieper, Stephan and Håkansson, Nina and Hollmann, Rainer and Kaiser, Johannes and Werscheck, Martin}, + title = {CLARA-A2.1: CM SAF cLoud, Albedo and surface RAdiation dataset from AVHRR data - Edition 2.1}, + journal = {Satellite Application Facility on Climate Monitoring} +} diff --git a/esmvaltool/references/clivar09jclim.bibtex b/esmvaltool/references/clivar09jclim.bibtex new file mode 100644 index 0000000000..0a9850e70c --- /dev/null +++ b/esmvaltool/references/clivar09jclim.bibtex @@ -0,0 +1,13 @@ +@article{clivar09jclim, + author = {D. Waliser and K. Sperber and H. Hendon and D. Kim and E. Maloney and M. Wheeler and K. Weickmann and L. Donner and J. Gottschalck and I.-S. Kang and D. Legler and M. Moncrieff and S. Schubert and W. Stern and F. Vitart and B. Wang and W. Wang and S. Woolnough}, + doi = {10.1175/2008jcli2731.1}, + url = {https://doi.org/10.1175%2F2008jcli2731.1}, + year = 2009, + month = {jun}, + publisher = {American Meteorological Society}, + volume = {22}, + number = {11}, + pages = {3006--3030}, + title = {{MJO} Simulation Diagnostics}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/collins13ipcc.bibtex b/esmvaltool/references/collins13ipcc.bibtex new file mode 100644 index 0000000000..7c28c9b6c4 --- /dev/null +++ b/esmvaltool/references/collins13ipcc.bibtex @@ -0,0 +1,11 @@ +@inbook{collins13ipcc, + author={Collins, M. and Knutti, R. and Arblaster, J. and Dufresne, J.-L. and Fichefet, T. and Friedlingstein, P. and Gao, X. and Gutowski, W. J. and Johns, T. and Krinner, G. and Shongwe, M. and Tebaldi, C. and Weaver, A. J. and Wehner, M.}, + editor={Stocker, T. F. and Qin, D. and Plattner, G.-K. and Tignor, M. and Allen, S. K. and Doschung, J. and Nauels, A. and Xia, Y. and Bex, V. and Midgley, P. M.}, + title={Long-term climate change: Projections, commitments and irreversibility}, + booktitle={Climate Change 2013: The Physical Science Basis. Contribution of Working Group I to the Fifth Assessment Report of the Intergovernmental Panel on Climate Change}, + year={2013}, + pages={1029--1136}, + publisher={Cambridge University Press}, + address={Cambridge, UK}, + doi={10.1017/CBO9781107415324.024}, +} diff --git a/esmvaltool/references/contact_authors.bibtex b/esmvaltool/references/contact_authors.bibtex new file mode 100644 index 0000000000..13b308c23a --- /dev/null +++ b/esmvaltool/references/contact_authors.bibtex @@ -0,0 +1,3 @@ +@misc{contact_authors, + title = "Please contact the author(s) to discuss acknowledgment or co-authorship." +} diff --git a/esmvaltool/references/corti99nat.bibtex b/esmvaltool/references/corti99nat.bibtex new file mode 100644 index 0000000000..133dd17c93 --- /dev/null +++ b/esmvaltool/references/corti99nat.bibtex @@ -0,0 +1,13 @@ +@article{corti99nat, + doi = {10.1038/19745}, + url = {https://doi.org/10.1038%2F19745}, + year = 1999, + month = {apr}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {398}, + number = {6730}, + pages = {799--802}, + author = {S. Corti and F. Molteni and T. N. Palmer}, + title = {Signature of recent climate change in frequencies of natural atmospheric circulation regimes}, + journal = {Nature} +} diff --git a/esmvaltool/references/cos22esd.bibtex b/esmvaltool/references/cos22esd.bibtex new file mode 100644 index 0000000000..1a3318667c --- /dev/null +++ b/esmvaltool/references/cos22esd.bibtex @@ -0,0 +1,13 @@ +@article{Cos2022, + doi = {10.5194/esd-13-321-2022}, + url = {https://doi.org/10.5194/esd-13-321-2022}, + year = {2022}, + month = feb, + publisher = {Copernicus {GmbH}}, + volume = {13}, + number = {1}, + pages = {321--340}, + author = {Josep Cos and Francisco Doblas-Reyes and Martin Jury and Ra\"{u}l Marcos and Pierre-Antoine Bretonni{\`{e}}re and Margarida Sams{\'{o}}}, + title = {The Mediterranean climate change hotspot in the {CMIP}5 and {CMIP}6 projections}, + journal = {Earth System Dynamics} +} diff --git a/esmvaltool/references/cowtanway.bibtex b/esmvaltool/references/cowtanway.bibtex new file mode 100644 index 0000000000..22b8baa6dc --- /dev/null +++ b/esmvaltool/references/cowtanway.bibtex @@ -0,0 +1,13 @@ +@article{cowtanway, + author = {Cowtan, Kevin and Way, Robert G.}, + title = {Coverage bias in the HadCRUT4 temperature series and its impact on recent temperature trends}, + journal = {Quarterly Journal of the Royal Meteorological Society}, + volume = {140}, + number = {683}, + pages = {1935-1944}, + doi = {10.1002/qj.2297}, + url = {https://rmets.onlinelibrary.wiley.com/doi/abs/10.1002/qj.2297}, + eprint = {https://rmets.onlinelibrary.wiley.com/doi/pdf/10.1002/qj.2297}, + year = {2014} +} + diff --git a/esmvaltool/references/cox18nature.bibtex b/esmvaltool/references/cox18nature.bibtex new file mode 100644 index 0000000000..f3895b38c5 --- /dev/null +++ b/esmvaltool/references/cox18nature.bibtex @@ -0,0 +1,13 @@ +@article{cox18nature, + doi = {10.1038/nature25450}, + url = {https://doi.org/10.1038%2Fnature25450}, + year = 2018, + month = {jan}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {553}, + number = {7688}, + pages = {319--322}, + author = {Peter M. Cox and Chris Huntingford and Mark S. Williamson}, + title = {Emergent constraint on equilibrium climate sensitivity from global temperature variability}, + journal = {Nature} +} diff --git a/esmvaltool/references/cru.bibtex b/esmvaltool/references/cru.bibtex new file mode 100644 index 0000000000..7de9c36f5e --- /dev/null +++ b/esmvaltool/references/cru.bibtex @@ -0,0 +1,16 @@ +@article{cru, + title = {Version 4 of the {{CRU TS}} Monthly High-Resolution Gridded Multivariate Climate Dataset}, + author = {Harris, Ian and Osborn, Timothy J. and Jones, Phil and Lister, David}, + date = {2020-04-03}, + year = 2020, + month = {april}, + journaltitle = {Sci Data}, + volume = {7}, + number = {1}, + pages = {109}, + publisher = {{Nature Publishing Group}}, + issn = {2052-4463}, + doi = {10.1038/s41597-020-0453-3}, + url = {https://www.nature.com/articles/s41597-020-0453-3}, + urldate = {2023-10-12}, +} \ No newline at end of file diff --git a/esmvaltool/references/ct2019.bibtex b/esmvaltool/references/ct2019.bibtex new file mode 100644 index 0000000000..88eb87f779 --- /dev/null +++ b/esmvaltool/references/ct2019.bibtex @@ -0,0 +1,9 @@ +@misc{ct2019, + doi = {10.25925/39M3-6069}, + url = {https://www.esrl.noaa.gov/gmd/ccgg/carbontracker/CT2019/}, + author = {Jacobson, Andrew R. and Schuldt, Kenneth N. and Miller, John B. and Oda, Tomohiro and Tans, Pieter and {Arlyn Andrews} and Mund, John and Ott, Lesley and Collatz, George J. and Aalto, Tuula and Afshar, Sara and Aikin, Ken and Aoki, Shuji and Apadula, Francesco and Baier, Bianca and Bergamaschi, Peter and Beyersdorf, Andreas and Biraud, Sebastien C. and Bollenbacher, Alane and Bowling, David and Brailsford, Gordon and Abshire, James Brice and Chen, Gao and {Huilin Chen} and {Lukasz Chmura} and {Sites Climadat} and Colomb, Aurelie and Conil, Sébastien and Cox, Adam and Cristofanelli, Paolo and Cuevas, Emilio and Curcoll, Roger and Sloop, Christopher D. and Davis, Ken and Wekker, Stephan De and Delmotte, Marc and DiGangi, Joshua P. and Dlugokencky, Ed and Ehleringer, Jim and Elkins, James W. and Emmenegger, Lukas and Fischer, Marc L. and Forster, Grant and Frumau, Arnoud and Galkowski, Michal and Gatti, Luciana V. and Gloor, Emanuel and Griffis, Tim and Hammer, Samuel and Haszpra, László and Hatakka, Juha and Heliasz, Michal and Hensen, Arjan and Hermanssen, Ove and Hintsa, Eric and Holst, Jutta and Jaffe, Dan and Karion, Anna and Kawa, Stephan Randolph and Keeling, Ralph and Keronen, Petri and Kolari, Pasi and Kominkova, Katerina and Kort, Eric and Krummel, Paul and Kubistin, Dagmar and Labuschagne, Casper and Langenfelds, Ray and Laurent, Olivier and Laurila, Tuomas and Lauvaux, Thomas and Law, Bev and Lee, John and Lehner, Irene and Leuenberger, Markus and Levin, Ingeborg and Levula, Janne and Lin, John and Lindauer, Matthias and Loh, Zoe and Lopez, Morgan and Myhre, Cathrine Lund and Machida, Toshinobu and Mammarella, Ivan and Manca, Giovanni and Manning, Alistair and Manning, Andrew and Marek, Michal V. and Marklund, Per and Martin, Melissa Yang and Matsueda, Hidekazu and McKain, Kathryn and Meijer, Harro and Meinhardt, Frank and Miles, Natasha and Miller, Charles E. and M\"{o}lder, Meelis and Montzka, Stephen and Moore, Fred and {Josep-Anton Morgui} and Morimoto, Shinji and Munger, Bill and {Jaroslaw Necki} and Newman, Sally and Nichol, Sylvia and Niwa, Yosuke and O'Doherty, Simon and {Mikaell Ottosson-L\"{o}fvenius} and Paplawsky, Bill and Peischl, Jeff and Peltola, Olli and {Jean-Marc Pichon} and Piper, Steve and Plass-D\"{o}lmer, Christian and Ramonet, Michel and Reyes-Sanchez, Enrique and Richardson, Scott and Riris, Haris and Ryerson, Thomas and Saito, Kazuyuki and Sargent, Maryann and Sawa, Yousuke and Say, Daniel and Scheeren, Bert and Schmidt, Martina and Schmidt, Andres and Schumacher, Marcus and Shepson, Paul and Shook, Michael and Stanley, Kieran and Steinbacher, Martin and Stephens, Britton and Sweeney, Colm and Thoning, Kirk and Torn, Margaret and Turnbull, Jocelyn and Tørseth, Kjetil and Bulk, Pim Van Den and Laan-Luijkx, Ingrid T. Van Der and Dinther, Danielle Van and Vermeulen, Alex and Viner, Brian and Vitkova, Gabriela and Walker, Stephen and Weyrauch, Dietmar and Wofsy, Steve and Worthy, Doug and {Dickon Young} and {Miroslaw Zimnoch}}, + title = {CarbonTracker CT2019}, + publisher = {NOAA Earth System Research Laboratory, Global Monitoring Division}, + year = {2020} +} + diff --git a/esmvaltool/references/davini12jclim.bibtex b/esmvaltool/references/davini12jclim.bibtex new file mode 100644 index 0000000000..6aeb72614a --- /dev/null +++ b/esmvaltool/references/davini12jclim.bibtex @@ -0,0 +1,13 @@ +@article{davini12jclim, + doi = {10.1175/jcli-d-12-00032.1}, + url = {https://doi.org/10.1175%2Fjcli-d-12-00032.1}, + year = 2012, + month = {oct}, + publisher = {American Meteorological Society}, + volume = {25}, + number = {19}, + pages = {6496--6509}, + author = {Paolo Davini and Chiara Cagnazzo and Silvio Gualdi and Antonio Navarra}, + title = {Bidimensional Diagnostics, Variability, and Trends of Northern Hemisphere Blocking}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/davini18.bibtex b/esmvaltool/references/davini18.bibtex new file mode 100644 index 0000000000..b7db1281ae --- /dev/null +++ b/esmvaltool/references/davini18.bibtex @@ -0,0 +1,8 @@ +@misc{davini18, + doi = {10.5281/ZENODO.1237837}, + url = {https://zenodo.org/record/1237837}, + author = {Davini, Paolo}, + title = {MiLES - Mid Latitude Evaluation System}, + publisher = {Zenodo}, + year = {2018} +} diff --git a/esmvaltool/references/deangelis15nat.bibtex b/esmvaltool/references/deangelis15nat.bibtex new file mode 100644 index 0000000000..b33bc3d6bb --- /dev/null +++ b/esmvaltool/references/deangelis15nat.bibtex @@ -0,0 +1,13 @@ +@article{deangelis15nat, + doi = {10.1038/nature15770}, + url = {https://doi.org/10.1038%2Fnature15770}, + year = 2015, + month = {dec}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {528}, + number = {7581}, + pages = {249--253}, + author = {Anthony M. DeAngelis and Xin Qu and Mark D. Zelinka and Alex Hall}, + title = {An observational radiative constraint on hydrologic cycle intensification}, + journal = {Nature} +} diff --git a/esmvaltool/references/debruin16ams.bibtex b/esmvaltool/references/debruin16ams.bibtex new file mode 100644 index 0000000000..63cfa6341a --- /dev/null +++ b/esmvaltool/references/debruin16ams.bibtex @@ -0,0 +1,13 @@ +@article{deBruin2016, + doi = {10.1175/jhm-d-15-0006.1}, + url = {https://doi.org/10.1175/jhm-d-15-0006.1}, + year = {2016}, + month = apr, + publisher = {American Meteorological Society}, + volume = {17}, + number = {5}, + pages = {1373--1382}, + author = {H. A. R. de Bruin and I. F. Trigo and F. C. Bosveld and J. F. Meirink}, + title = {A Thermodynamically Based Model for Actual Evapotranspiration of an Extensive Grass Field Close to {FAO} Reference, Suitable for Remote Sensing Application}, + journal = {Journal of Hydrometeorology} +} \ No newline at end of file diff --git a/esmvaltool/references/demora2018gmd.bibtex b/esmvaltool/references/demora2018gmd.bibtex new file mode 100644 index 0000000000..5cf53ebdd7 --- /dev/null +++ b/esmvaltool/references/demora2018gmd.bibtex @@ -0,0 +1,13 @@ +@article{demora2018gmd, + doi = {10.5194/gmd-11-4215-2018}, + url = {https://doi.org/10.5194%2Fgmd-11-4215-2018}, + year = 2018, + month = {oct}, + publisher = {Copernicus {GmbH}}, + volume = {11}, + number = {10}, + pages = {4215--4240}, + author = {Lee de Mora and Andrew Yool and Julien Palmieri and Alistair Sellar and Till Kuhlbrodt and Ekaterina Popova and Colin Jones and J. Icarus Allen}, + title = {{BGC}-val: a model- and grid-independent Python toolkit to evaluate marine biogeochemical models}, + journal = {Geoscientific Model Development} +} diff --git a/esmvaltool/references/docquier2017cryo.bibtex b/esmvaltool/references/docquier2017cryo.bibtex new file mode 100644 index 0000000000..ba34706249 --- /dev/null +++ b/esmvaltool/references/docquier2017cryo.bibtex @@ -0,0 +1,11 @@ +@Article{docquier2017cryo, +AUTHOR = {Docquier, D. and Massonnet, F. and Barth\'elemy, A. and Tandon, N. F. and Lecomte, O. and Fichefet, T.}, +TITLE = {Relationships between Arctic sea ice drift and strength\hack{\break} modelled by NEMO-LIM3.6}, +JOURNAL = {The Cryosphere}, +VOLUME = {11}, +YEAR = {2017}, +NUMBER = {6}, +PAGES = {2829--2846}, +URL = {https://www.the-cryosphere.net/11/2829/2017/}, +DOI = {10.5194/tc-11-2829-2017} +} diff --git a/esmvaltool/references/dong08grl.bibtex b/esmvaltool/references/dong08grl.bibtex new file mode 100644 index 0000000000..44a5def804 --- /dev/null +++ b/esmvaltool/references/dong08grl.bibtex @@ -0,0 +1,12 @@ +@article{dong08grl, + doi = {10.1029/2006jc004051}, + url = {https://doi.org/10.1029%2F2006jc004051}, + year = 2008, + month = {jun}, + publisher = {American Geophysical Union ({AGU})}, + volume = {113}, + number = {C6}, + author = {Shenfu Dong and Janet Sprintall and Sarah T. Gille and Lynne Talley}, + title = {Southern Ocean mixed-layer depth from Argo float profiles}, + journal = {Journal of Geophysical Research} +} diff --git a/esmvaltool/references/donofrio14jh.bibtex b/esmvaltool/references/donofrio14jh.bibtex new file mode 100644 index 0000000000..60aecf1ceb --- /dev/null +++ b/esmvaltool/references/donofrio14jh.bibtex @@ -0,0 +1,13 @@ +@article{donofrio14jh, + doi = {10.1175/jhm-d-13-096.1}, + url = {https://doi.org/10.1175/jhm-d-13-096.1}, + year = {2014}, + month = apr, + publisher = {American Meteorological Society}, + volume = {15}, + number = {2}, + pages = {830--843}, + author = {D'Onofrio, D. and Palazzi, E and von Hardenberg, J. and Provenzale, A. and Calmanti, S.}, + title = {Stochastic Rainfall Downscaling of Climate Models}, + journal = {Journal of Hydrometeorology} +} diff --git a/esmvaltool/references/dorigo17rse.bibtex b/esmvaltool/references/dorigo17rse.bibtex new file mode 100644 index 0000000000..008758aac1 --- /dev/null +++ b/esmvaltool/references/dorigo17rse.bibtex @@ -0,0 +1,11 @@ +@article{dorigo17rse, +author = {Dorigo, Wouter and Wagner, Wolfgang and Albergel, Clement and Albrecht, Franziska and Balsamo, Gianpaolo and Brocca, Luca and Chung, Daniel and Ertl, Martin and Forkel, Matthias and Gruber, Alexander and Haas, Eva and Hamer, Paul D. and Hirschi, Martin and Ikonen, Jaakko and de Jeu, Richard and Kidd, Richard and Lahoz, William and Liu, Yi Y. and Miralles, Diego and Mistelbauer, Thomas and Nicolai-Shaw, Nadine and Parinussa, Robert and Pratola, Chiara and Reimer, Christoph and van der Schalie, Robin and Seneviratne, Sonia I. and Smolander, Tuomo and Lecomte, Pascal}, +doi = {10.1016/j.rse.2017.07.001}, +issn = {00344257}, +journal = {Remote Sensing of Environment}, +month = {dec}, +pages = {185--215}, +title = {{ESA CCI Soil Moisture for improved Earth system understanding: State-of-the art and future directions}}, +volume = {203}, +year = {2017} +} diff --git a/esmvaltool/references/duemenil00mpimr.bibtex b/esmvaltool/references/duemenil00mpimr.bibtex new file mode 100644 index 0000000000..67744af3b9 --- /dev/null +++ b/esmvaltool/references/duemenil00mpimr.bibtex @@ -0,0 +1,10 @@ +@misc{duemenil00mpimr, + doi = {10.17617/2.1560282}, + url = {http://pubman.mpdl.mpg.de/pubman/item/escidoc:1560282}, + author = {D\"{u}menil Gates, Lydia and Hagemann, Stefan and Golz, Claudia}, + keywords = {}, + language = {eng}, + title = {Observed historical discharge data from major rivers for climate model validation}, + publisher = {(:unas)}, + year = {2000} +} diff --git a/esmvaltool/references/duveiller2018.bibtex b/esmvaltool/references/duveiller2018.bibtex new file mode 100644 index 0000000000..57562ab02b --- /dev/null +++ b/esmvaltool/references/duveiller2018.bibtex @@ -0,0 +1,12 @@ +@article{duveiller2018, + doi = {10.1038/sdata.2018.14}, + url = {https://doi.org/10.1038%2Fsdata.2018.14}, + year = 2018, + month = {feb}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {5}, + number = {1}, + author = {Gregory Duveiller and Josh Hooker and Alessandro Cescatti}, + title = {A dataset mapping the potential biophysical effects of vegetation cover change}, + journal = {Scientific Data} +} diff --git a/esmvaltool/references/e-obs.bibtex b/esmvaltool/references/e-obs.bibtex new file mode 100644 index 0000000000..cab082ae68 --- /dev/null +++ b/esmvaltool/references/e-obs.bibtex @@ -0,0 +1,12 @@ +@article{e-obs, + author = {Cornes, Richard C. and van der Schrier, Gerard and van den Besselaar, Else J. M. and Jones, Philip D.}, + title = {An Ensemble Version of the E-OBS Temperature and Precipitation Data Sets}, + journal = {Journal of Geophysical Research: Atmospheres}, + volume = {123}, + number = {17}, + pages = {9391-9409}, + doi = {10.1029/2017JD028200}, + url = {https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/2017JD028200}, + eprint = {https://agupubs.onlinelibrary.wiley.com/doi/pdf/10.1029/2017JD028200}, + year = {2018} +} diff --git a/esmvaltool/references/emmons00jgr.bibtex b/esmvaltool/references/emmons00jgr.bibtex new file mode 100644 index 0000000000..900c8ad753 --- /dev/null +++ b/esmvaltool/references/emmons00jgr.bibtex @@ -0,0 +1,13 @@ +@article{emmons00jgr, + doi = {10.1029/2000jd900232}, + url = {https://doi.org/10.1029/2000jd900232}, + year = {2000}, + month = aug, + publisher = {American Geophysical Union ({AGU})}, + volume = {105}, + number = {D16}, + pages = {20497--20538}, + author = {Louisa K. Emmons and Didier A. Hauglustaine and Jean-Fran{\c{c}}ois M\"{u}ller and Mary Anne Carroll and Guy P. Brasseur and Dominik Brunner and Johannes Staehelin and Valerie Thouret and Alain Marenco}, + title = {Data composites of airborne observations of tropospheric ozone and its precursors}, + journal = {Journal of Geophysical Research: Atmospheres} +} diff --git a/esmvaltool/references/eppley-vgpm-modis.bibtex b/esmvaltool/references/eppley-vgpm-modis.bibtex new file mode 100644 index 0000000000..c2e3eda8ba --- /dev/null +++ b/esmvaltool/references/eppley-vgpm-modis.bibtex @@ -0,0 +1,13 @@ +@article{eppley-vgpm-modis, + doi = {10.4319/lo.1997.42.1.0001}, + url = {https://doi.org/10.4319%2Flo.1997.42.1.0001}, + year = 1997, + month = {jan}, + publisher = {Wiley}, + volume = {42}, + number = {1}, + pages = {1--20}, + author = {Michael J. Behrenfeld and Paul G. Falkowski}, + title = {Photosynthetic rates derived from satellite-based chlorophyll concentration}, + journal = {Limnology and Oceanography} +} diff --git a/esmvaltool/references/era-interim-land.bibtex b/esmvaltool/references/era-interim-land.bibtex new file mode 100644 index 0000000000..3f0d79983e --- /dev/null +++ b/esmvaltool/references/era-interim-land.bibtex @@ -0,0 +1,13 @@ +@article{era-interim-land, + doi = {10.5194/hess-19-389-2015}, + url = {https://doi.org/10.5194%2Fhess-19-389-2015}, + year = 2015, + month = {jan}, + publisher = {Copernicus {GmbH}}, + volume = {19}, + number = {1}, + pages = {389--407}, + author = {G. Balsamo and C. Albergel and A. Beljaars and S. Boussetta and E. Brun and H. Cloke and D. Dee and E. Dutra and J. Mu{\~{n}}oz-Sabater and F. Pappenberger and P. de Rosnay and T. Stockdale and F. Vitart}, + title = {{ERA}-Interim/Land: a global land surface reanalysis data set}, + journal = {Hydrology and Earth System Sciences} +} diff --git a/esmvaltool/references/era-interim.bibtex b/esmvaltool/references/era-interim.bibtex new file mode 100644 index 0000000000..a4d41e0197 --- /dev/null +++ b/esmvaltool/references/era-interim.bibtex @@ -0,0 +1,13 @@ +@article{era-interim, + doi = {10.1002/qj.828}, + url = {https://doi.org/10.1002%2Fqj.828}, + year = 2011, + month = {apr}, + publisher = {Wiley}, + volume = {137}, + number = {656}, + pages = {553--597}, + author = {D. P. Dee and S. M. Uppala and A. J. Simmons and P. Berrisford and P. Poli and S. Kobayashi and U. Andrae and M. A. Balmaseda and G. Balsamo and P. Bauer and P. Bechtold and A. C. M. Beljaars and L. van de Berg and J. Bidlot and N. Bormann and C. Delsol and R. Dragani and M. Fuentes and A. J. Geer and L. Haimberger and S. B. Healy and H. Hersbach and E. V. H{\'{o}}lm and L. Isaksen and P. K{\aa}llberg and M. Köhler and M. Matricardi and A. P. McNally and B. M. Monge-Sanz and J.-J. Morcrette and B.-K. Park and C. Peubey and P. de Rosnay and C. Tavolato and J.-N. Th{\'{e}}paut and F. Vitart}, + title = {The {ERA}-Interim reanalysis: configuration and performance of the data assimilation system}, + journal = {Quarterly Journal of the Royal Meteorological Society} +} diff --git a/esmvaltool/references/era5-land.bibtex b/esmvaltool/references/era5-land.bibtex new file mode 100644 index 0000000000..129ad8a9be --- /dev/null +++ b/esmvaltool/references/era5-land.bibtex @@ -0,0 +1,8 @@ +@misc{https://doi.org/10.24381/cds.68d2bb30, + doi = {10.24381/CDS.68D2BB30}, + url = {https://cds.climate.copernicus.eu/doi/10.24381/cds.68d2bb30}, + author = {{Copernicus Climate Change Service}}, + title = {ERA5-Land monthly averaged data from 2001 to present}, + publisher = {ECMWF}, + year = {2019} +} diff --git a/esmvaltool/references/era5.bibtex b/esmvaltool/references/era5.bibtex new file mode 100644 index 0000000000..ca44f8a5d7 --- /dev/null +++ b/esmvaltool/references/era5.bibtex @@ -0,0 +1,5 @@ +@misc{era5, + title = {Copernicus Climate Change Service (C3S) (2017): ERA5: Fifth generation of ECMWF atmospheric reanalyses of the global climate.}, + publisher = {Copernicus Climate Change Service Climate Data Store (CDS)}, + year = 2017 +} diff --git a/esmvaltool/references/ersstv3b.bibtex b/esmvaltool/references/ersstv3b.bibtex new file mode 100644 index 0000000000..425d527711 --- /dev/null +++ b/esmvaltool/references/ersstv3b.bibtex @@ -0,0 +1,15 @@ +@article{ersstv3b, + author = {Smith, Thomas M. and Reynolds, Richard W.}, + title = "{Extended Reconstruction of Global Sea Surface Temperatures Based on COADS Data (1854–1997)}", + journal = {Journal of Climate}, + volume = {16}, + number = {10}, + pages = {1495-1510}, + year = {2003}, + month = {05}, + abstract = "{A monthly extended reconstruction of global SST (ERSST) is produced based on Comprehensive Ocean–Atmosphere Data Set (COADS) release 2 observations from the 1854–1997 period. Improvements come from the use of updated COADS observations with new quality control procedures and from improved reconstruction methods. In addition error estimates are computed, which include uncertainty from both sampling and analysis errors. Using this method, little global variance can be reconstructed before the 1880s because data are too sparse to resolve enough modes for that period. Error estimates indicate that except in the North Atlantic ERSST is of limited value before 1880, when the uncertainty of the near-global average is almost as large as the signal. In most regions, the uncertainty decreases through most of the period and is smallest after 1950.The large-scale variations of ERSST are broadly consistent with those associated with the Hadley Centre Global Sea Ice and Sea Surface Temperature (HadISST) reconstruction produced by the Met Office. There are differences due to both the use of different historical bias corrections as well as different data and analysis procedures, but these differences do not change the overall character of the SST variations. Procedures used here produce a smoother analysis compared to HadISST. The smoother ERSST has the advantage of filtering out more noise at the possible cost of filtering out some real variations when sampling is sparse. A rotated EOF analysis of the ERSST anomalies shows that the dominant modes of variation include ENSO and modes associated with trends. Projection of the HadISST data onto the rotated eigenvectors produces time series similar to those for ERSST, indicating that the dominant modes of variation are consistent in both.}", + issn = {0894-8755}, + doi = {10.1175/1520-0442(2003)016<1495:erogss>2.0.co;2}, + url = {http://dx.doi.org/10.1175/1520-0442(2003)016<1495:EROGSS>2.0.CO;2}, + eprint = {https://journals.ametsoc.org/jcli/article-pdf/16/10/1495/3779054/1520-0442-16\_10\_1495.pdf}, +} diff --git a/esmvaltool/references/ersstv5.bibtex b/esmvaltool/references/ersstv5.bibtex new file mode 100644 index 0000000000..8b588d3690 --- /dev/null +++ b/esmvaltool/references/ersstv5.bibtex @@ -0,0 +1,8 @@ +@misc{ersstv5, + doi = {10.7289/V5T72FNM}, + url = {https://data.nodc.noaa.gov/cgi-bin/iso?id=gov.noaa.ncdc:C00927}, + author = {Huang, Boyin and Thorne, Peter W. and Banzon, Viva F. and Boyer, Tim and Chepurin, Gennady and Lawrimore, Jay H. and Menne, Matthew J. and Smith, Thomas M. and Vose, Russell S. and Zhang, Huai-Min}, + title = {NOAA Extended Reconstructed Sea Surface Temperature (ERSST), Version 5}, + publisher = {NOAA National Centers for Environmental Information}, + year = {2017} +} diff --git a/esmvaltool/references/esacci-aerosol.bibtex b/esmvaltool/references/esacci-aerosol.bibtex new file mode 100644 index 0000000000..002be7b19d --- /dev/null +++ b/esmvaltool/references/esacci-aerosol.bibtex @@ -0,0 +1,13 @@ +@article{esacci-aerosol, + doi = {10.3390/rs8050421}, + url = {https://doi.org/10.3390%2Frs8050421}, + year = 2016, + month = {may}, + publisher = {{MDPI} {AG}}, + volume = {8}, + number = {5}, + pages = {421}, + author = {Thomas Popp and Gerrit de Leeuw and Christine Bingen and Christoph Brühl and Virginie Capelle and Alain Chedin and Lieven Clarisse and Oleg Dubovik and Roy Grainger and Jan Griesfeller and Andreas Heckel and Stefan Kinne and Lars Klüser and Miriam Kosmale and Pekka Kolmonen and Luca Lelli and Pavel Litvinov and Linlu Mei and Peter North and Simon Pinnock and Adam Povey and Charles Robert and Michael Schulz and Larisa Sogacheva and Kerstin Stebel and Deborah Stein Zweers and Gareth Thomas and Lieuwe Tilstra and Sophie Vandenbussche and Pepijn Veefkind and Marco Vountas and Yong Xue}, + title = {Development, Production and Evaluation of Aerosol Climate Data Records from European Satellite Observations (Aerosol{\_}cci)}, + journal = {Remote Sensing} +} diff --git a/esmvaltool/references/esacci-cloud.bibtex b/esmvaltool/references/esacci-cloud.bibtex new file mode 100644 index 0000000000..60d3b3b40e --- /dev/null +++ b/esmvaltool/references/esacci-cloud.bibtex @@ -0,0 +1,13 @@ +@article{esacci-cloud, + doi = {10.5194/essd-9-881-2017}, + url = {https://doi.org/10.5194%2Fessd-9-881-2017}, + year = 2017, + month = {nov}, + publisher = {Copernicus {GmbH}}, + volume = {9}, + number = {2}, + pages = {881--904}, + author = {Martin Stengel and Stefan Stapelberg and Oliver Sus and Cornelia Schlundt and Caroline Poulsen and Gareth Thomas and Matthew Christensen and Cintia Carbajal Henken and Rene Preusker and Jürgen Fischer and Abhay Devasthale and Ulrika Will{\'{e}}n and Karl-Göran Karlsson and Gregory R. McGarragh and Simon Proud and Adam C. Povey and Roy G. Grainger and Jan Fokke Meirink and Artem Feofilov and Ralf Bennartz and Jedrzej S. Bojanowski and Rainer Hollmann}, + title = {Cloud property datasets retrieved from {AVHRR}, {MODIS}, {AATSR} and {MERIS} in the framework of the Cloud{\_}cci project}, + journal = {Earth System Science Data} +} diff --git a/esmvaltool/references/esacci-fire.bibtex b/esmvaltool/references/esacci-fire.bibtex new file mode 100644 index 0000000000..16b442d876 --- /dev/null +++ b/esmvaltool/references/esacci-fire.bibtex @@ -0,0 +1,8 @@ +@misc{esacci-fire, + doi = {10.5285/D80636D4-7DAF-407E-912D-F5BB61C142FA}, + url = {http://catalogue.ceda.ac.uk/uuid/fa493d62c2af4c5cb8e6e3c340cdbf0d}, + author = {Chuvieco, Emilio and Pettinari, M. Lucrecia and Alonso-Canas, Itziar and Bastarrika, Aitor and Roteta, Ekhi and Tansey, Kevin and Padilla Parellada, Marc and Lewis, Philip and Gomez-Dans, Jose and Pereira, Jose Miguel and Oom, Duarte and Campagnolo, Manuel and Storm, Thomas and B\"{o}ttcher, Martin and Kaiser, Johannes and Heil, Angelika and Mouillot, Florent and Ciais, Philippe and Cadule, Patricia and Yue, Chao and Van Der Werf, Guido}, + title = {ESA Fire Climate Change Initiative (Fire_cci): Burned Area Grid Product Version 4.1}, + publisher = {NERC Centre for Environmental Data Analysis}, + year = {2016} +} diff --git a/esmvaltool/references/esacci-landcover.bibtex b/esmvaltool/references/esacci-landcover.bibtex new file mode 100644 index 0000000000..44757b1d04 --- /dev/null +++ b/esmvaltool/references/esacci-landcover.bibtex @@ -0,0 +1,8 @@ +@misc{esacci-landcover, + doi = {10.5194/essd-15-1465-2023}, + url = {https://catalogue.ceda.ac.uk/uuid/26a0f46c95ee4c29b5c650b129aab788/}, + title = {A 29-year time series of annual 300 m resolution plant-functional-type maps for climate models}, + publisher = {Earth System Science Data}, + year = {2023}, + author = { Kandice L. Harper, Céline Lamarche, Andrew Hartley, Philippe Peylin, Catherine Ottlé, Vladislav Bastrikov, Rodrigo San Martín, Sylvia I. Bohnenstengel, Grit Kirches, Martin Boettcher, Roman Shevchuk, Carsten Brockmann, and Pierre Defourny } +} diff --git a/esmvaltool/references/esacci-oc.bibtex b/esmvaltool/references/esacci-oc.bibtex new file mode 100644 index 0000000000..e1367f3b80 --- /dev/null +++ b/esmvaltool/references/esacci-oc.bibtex @@ -0,0 +1,8 @@ +@misc{esacci-oc, + doi = {10.5285/9C334FBE6D424A708CF3C4CF0C6A53F5}, + url = {https://catalogue.ceda.ac.uk/uuid/9c334fbe6d424a708cf3c4cf0c6a53f5}, + author = {Sathyendranath, S. and Grant, M. and Brewin, R.J.W. and Brockmann, C. and Brotas, V. and Chuprin, A. and Doerffer, R. and Dowell, M. and Farman, A. and Groom, S. and Jackson, T. and Krasemann, H. and Lavender, S. and Martinez Vicente, V. and Mazeran, C. and Mélin, F. and Moore, T.S. and M\"{u}ller, D. and Platt, T. and Regner, P. and Roy, S. and Steinmetz, F. and Swinton, J. and Valente, A. and Z\"{u}hlke, M. and Antoine, D. and Arnone, R. and Balch, W.M. and Barker, K. and Barlow, R. and Bélanger, S. and Berthon, J.-F. and Beşiktepe, Ş. and Brando, V.E. and Canuti, E. and Chavez, F. and Claustre, H. and Crout, R. and Feldman, G. and Franz, B. and Frouin, R. and García-Soto, C. and Gibb, S.W. and Gould, R. and Hooker, S. and Kahru, M. and Klein, H. and Kratzer, S. and Loisel, H. and McKee, D. and Mitchell, B.G. and Moisan, T. and Muller-Karger, F. and O'Dowd, L. and Ondrusek, M. and Poulton, A.J. and Repecaud, M. and Smyth, T. and Sosik, H.M. and Taberner, M. and Twardowski, M. and Voss, K. and Werdell, J. and Wernand, M. and Zibordi, G.}, + title = {ESA Ocean Colour Climate Change Initiative (Ocean_Colour_cci): Version 3.1 Data}, + publisher = {Centre for Environmental Data Analysis (CEDA)}, + year = {2018} +} diff --git a/esmvaltool/references/esacci-ozone.bibtex b/esmvaltool/references/esacci-ozone.bibtex new file mode 100644 index 0000000000..819b3234df --- /dev/null +++ b/esmvaltool/references/esacci-ozone.bibtex @@ -0,0 +1,13 @@ +@article{esacci-ozone, + doi = {10.1080/01431160902825016}, + url = {https://doi.org/10.1080%2F01431160902825016}, + year = 2009, + month = {jul}, + publisher = {Informa {UK} Limited}, + volume = {30}, + number = {15-16}, + pages = {4295--4318}, + author = {D. G. Loyola and R. M. Coldewey-Egbers and M. Dameris and H. Garny and A. Stenke and M. Van Roozendael and C. Lerot and D. Balis and M. Koukouli}, + title = {Global long-term monitoring of the ozone layer {\textendash} a prerequisite for predictions}, + journal = {International Journal of Remote Sensing} +} diff --git a/esmvaltool/references/esacci-sea-surface-salinity.bibtex b/esmvaltool/references/esacci-sea-surface-salinity.bibtex new file mode 100644 index 0000000000..6037b1fa03 --- /dev/null +++ b/esmvaltool/references/esacci-sea-surface-salinity.bibtex @@ -0,0 +1,9 @@ +@article{esacci-sst, + doi = {10.5285/9ef0ebf847564c2eabe62cac4899ec41}, + url = {https://doi.org/10.5285/9ef0ebf847564c2eabe62cac4899ec41}, + year = 2019, + month = {nov}, + publisher = {Centre for Environmental Data Analysis}, + author = {Boutin, J. and Vergely, J.-L. and Koehler, J. and Rouffi, F. and Reul, N.}, + title = {ESA Sea Surface Salinity Climate Change Initiative (Sea\_Surface\_Salinity\_cci): Version 1.8 data collection}, +} diff --git a/esmvaltool/references/esacci-soilmoisture.bibtex b/esmvaltool/references/esacci-soilmoisture.bibtex new file mode 100644 index 0000000000..7e4404a8f4 --- /dev/null +++ b/esmvaltool/references/esacci-soilmoisture.bibtex @@ -0,0 +1,115 @@ +@article{esacci-soilmoisture, + doi = {10.5194/essd-11-717-2019}, + title = {Evolution of the {ESA} {CCI} Soil Moisture climate data records + and their underlying merging methodology}, + author = {Gruber, Alexander and Scanlon, Tracy and van der Schalie, Robin + and Wagner, Wolfgang and Dorigo, Wouter}, + abstract = {The European Space Agency's Climate Change Initiative + for Soil Moisture (ESA CCI SM) merging algorithm generates + consistent quality-controlled long-term (1978--2018) climate + data records for soil moisture, which serves thousands of + scientists and data users worldwide. It harmonises and merges + soil moisture retrievals from multiple satellites into (i) an + active-microwave-based-only product, (ii) a + passive-microwave-based-only product and (iii) a combined + active--passive product, which are sampled to daily global + images on a 0.25∘ regular grid. Since its first release in 2012 + the algorithm has undergone substantial improvements which have + so far not been thoroughly reported in the scientific + literature. This paper fills this gap by reviewing and + discussing the science behind the three major ESA CCI SM merging + algorithms, versions 2 + (https://doi.org/10.5285/3729b3fbbb434930bf65d82f9b00111c; + Wagner et al., 2018), 3 + (https://doi.org/10.5285/b810601740bd4848b0d7965e6d83d26c; + Dorigo et al., 2018) and 4 + (https://doi.org/10.5285/dce27a397eaf47e797050c220972ca0e; + Dorigo et al., 2019), and provides an outlook on the expected + improvements planned for the next algorithm, version 5.}, + journal = {Earth Syst. Sci. Data}, + publisher = {Copernicus GmbH}, + volume = {11}, + number = {2}, + pages = {717--739}, + month = {may}, + year = 2019 +} + +@article{esacci-soilmoisture, + doi = {10.1016/j.rse.2017.07.001}, + title = {{ESA} {CCI} Soil Moisture for improved Earth system + understanding: State-of-the art and future directions}, + author = {Dorigo, Wouter and Wagner, Wolfgang and Albergel, Clement and + Albrecht, Franziska and Balsamo, Gianpaolo and Brocca, Luca and + Chung, Daniel and Ertl, Martin and Forkel, Matthias and Gruber, + Alexander and Haas, Eva and Hamer, Paul D and Hirschi, Martin + and Ikonen, Jaakko and de Jeu, Richard and Kidd, Richard and + Lahoz, William and Liu, Yi Y and Miralles, Diego and + Mistelbauer, Thomas and Nicolai-Shaw, Nadine and Parinussa, + Robert and Pratola, Chiara and Reimer, Christoph and van der + Schalie, Robin and Seneviratne, Sonia I and Smolander, Tuomo and + Lecomte, Pascal}, + abstract = {Climate Data Records of soil moisture are fundamental for + improving our understanding of long-term dynamics in the coupled + water, energy, and carbon cycles over land. To respond to this + need, in 2012 the European Space Agency (ESA) released the first + multi-decadal, global satellite-observed soil moisture (SM) + dataset as part of its Climate Change Initiative (CCI) program. + This product, named ESA CCI SM, combines various single-sensor + active and passive microwave soil moisture products into three + harmonised products: a merged ACTIVE, a merged PASSIVE, and a + COMBINED active + passive microwave product. Compared to the + first product release, the latest version of ESA CCI SM includes + a large number of enhancements, incorporates various new + satellite sensors, and extends its temporal coverage to the + period 1978--2015. In this study, we first provide a + comprehensive overview of the characteristics, evolution, and + performance of the ESA CCI SM products. Based on original + research and a review of existing literature we show that the + product quality has steadily increased with each successive + release and that the merged products generally outperform the + single-sensor input products. Although ESA CCI SM generally + agrees well with the spatial and temporal patterns estimated by + land surface models and observed in-situ, we identify surface + conditions (e.g., dense vegetation, organic soils) for which it + still has large uncertainties. Second, capitalising on the + results of > 100 research studies that made use of the ESA CCI + SM data we provide a synopsis of how it has contributed to + improved process understanding in the following Earth system + domains: climate variability and change, land-atmosphere + interactions, global biogeochemical cycles and ecology, + hydrological and land surface modelling, drought applications, + and meteorology. While in some disciplines the use of ESA CCI SM + is already widespread (e.g. in the evaluation of model soil + moisture states) in others (e.g. in numerical weather prediction + or flood forecasting) it is still in its infancy. The latter is + partly related to current shortcomings of the product, e.g., the + lack of near-real-time availability and data gaps in time and + space. This study discloses the discrepancies between current + ESA CCI SM product characteristics and the preferred + characteristics of long-term satellite soil moisture products as + outlined by the Global Climate Observing System (GCOS), and + provides important directions for future ESA CCI SM product + improvements to bridge these gaps.}, + journal = {Remote Sens. Environ.}, + publisher = {Elsevier BV", + volume = {203}, + pages = {185--215}, + month = {dec}, + year = 2017 +} + +@article{esacci-soilmoisture, + doi = {10.1109/TGRS.2020.3012896}, + title = {Homogenization of structural breaks in the global {ESA} {CCI} + soil moisture multisatellite climate data record}, + author = {Preimesberger, Wolfgang and Scanlon, Tracy and Su, Chun-Hsu and + Gruber, Alexander and Dorigo, Wouter}, + journal = {IEEE Trans. Geosci. Remote Sens.}, + publisher = {Institute of Electrical and Electronics Engineers (IEEE)}, + volume = {159}, + number = {14}, + pages = {12845--2862}, + month = {apr}, + year = 2021 +} \ No newline at end of file diff --git a/esmvaltool/references/esacci-sst-bias-correction.bibtex b/esmvaltool/references/esacci-sst-bias-correction.bibtex new file mode 100644 index 0000000000..cf120a231a --- /dev/null +++ b/esmvaltool/references/esacci-sst-bias-correction.bibtex @@ -0,0 +1,13 @@ +@article{esacci-sst-bias-correction, + doi = {10.3390/rs12162554}, + url = {https://doi.org/10.3390/rs12162554}, + year = 2020, + month = {aug}, + publisher = {MDPI}, + volume = {12}, + number = {16}, + pages = {2554}, + author = {Christopher J. Merchant and Owen Embury}, + title = {Adjusting for Desert-Dust-Related Biases in a Climate Data Record of Sea Surface Temperature ({SST} {CCI})}, + journal = {Remote Sensing} +} diff --git a/esmvaltool/references/esacci-sst.bibtex b/esmvaltool/references/esacci-sst.bibtex new file mode 100644 index 0000000000..30eafc7756 --- /dev/null +++ b/esmvaltool/references/esacci-sst.bibtex @@ -0,0 +1,13 @@ +@article{esacci-sst, + doi = {10.1038/s41597-019-0236-x}, + url = {https://doi.org/10.1038/s41597-019-0236-x}, + year = 2019, + month = {oct}, + publisher = {Springer Nature}, + volume = {6}, + number = {1}, + pages = {223}, + author = {Christopher J. Merchant and Owen Embury and Claire E. Bulgin and Thomas Block and Gary K. Corlett and Emma Fiedler and Simon A. Good and Jonathan Mittaz and Nick A. Rayner and David Berry and Steinar Eastwood and Michael Taylor and Yoko Tsushima and Alison Waterfall and Ruth Wilson and Craig Donlon}, + title = {Satellite-based time-series of sea-surface temperature since 1981 for climate applications ({SST} {CCI})}, + journal = {Scientific Data} +} diff --git a/esmvaltool/references/esacci-watervapour.bibtex b/esmvaltool/references/esacci-watervapour.bibtex new file mode 100644 index 0000000000..bdccb12a5e --- /dev/null +++ b/esmvaltool/references/esacci-watervapour.bibtex @@ -0,0 +1,10 @@ +@misc{esacci-watervapour, + doi = {CCIWV.REP.015}, + url = {https://climate.esa.int/documents/357/Water_Vapour_cci_D4.2_CRDP_v2.1.pdf}, + year = 2020, + month = {october}, + publisher = {ESA / ECSAT}, + issue = {2.1}, + author = {Michaela Hegglin, Olaf Danne, Marc Schröder, Hao Ye}, + title = {Climate Research Data Package (CRDP) Water Vapour Climate Change Initiative (WV{\_}cci)- CCI+ Phase 1} +} diff --git a/esmvaltool/references/esacci_lst.bibtex b/esmvaltool/references/esacci_lst.bibtex new file mode 100644 index 0000000000..f81ab82f46 --- /dev/null +++ b/esmvaltool/references/esacci_lst.bibtex @@ -0,0 +1,12 @@ +@misc{esacci-lst, + url = {https://climate.esa.int/en/projects/land-surface-temperature/}, + title = {{ESA CCI} {L}and {S}urface {T}emperature}, + year = {2020}, + note = {The {ESA} {LST}\_cci ({E}uropean {S}pace {A}gency {L}and {S}urface {T}emperature {C}limate {C}hange {I}nitiative) project, + is led by the {U}niversity of {L}eicester in the {UK}. + The {LST}\_cci project aims to deliver a significant improvement on the capability of current satellite {LST} data records + to meet the {G}lobal {C}limate {O}bserving {S}ystem requirements for climate applications + and realise the full potential of long-term {LST} data for climate science. + The {LST}\_cci products used here is the {AQUA\_MODIS\_L3C} v1.00 0.05 degree global lon-lat grid monthly product.} +} + diff --git a/esmvaltool/references/esdc.bibtex b/esmvaltool/references/esdc.bibtex new file mode 100644 index 0000000000..887cb1f2bf --- /dev/null +++ b/esmvaltool/references/esdc.bibtex @@ -0,0 +1,11 @@ +@article{esdc, + doi = {10.5194/esd-11-201-2020}, + url = {https://esd.copernicus.org/articles/11/201/2020/}, + year = {2020}, + volume = {11}, + number = {1}, + pages = {201--234}, + author = {Mahecha, M. D. and Gans, F. and Brandt, G. and Christiansen, R. and Cornell, S. E. and Fomferra, N. and Kraemer, G. and Peters, J. and Bodesheim, P. and Camps-Valls, G. and Donges, J. F. and Dorigo, W. and Estupinan-Suarez, L. M. and Gutierrez-Velez, V. H. and Gutwin, M. and Jung, M. and Londo\~no, M. C. and Miralles, D. G. and Papastefanou, P. and Reichstein, M.}, + title = {Earth system data cubes unravel global multivariate dynamics}, + journal = {Earth System Dynamics} +} diff --git a/esmvaltool/references/esrl.bibtex b/esmvaltool/references/esrl.bibtex new file mode 100644 index 0000000000..98884fc863 --- /dev/null +++ b/esmvaltool/references/esrl.bibtex @@ -0,0 +1,8 @@ +@misc{https://doi.org/10.15138/wkgj-f215, + doi = {10.15138/WKGJ-F215}, + url = {https://www.esrl.noaa.gov/gmd/ccgg/arc/?id=132}, + author = {{NOAA ESRL GML CCGG Group}}, + title = {Earth System Research Laboratory Carbon Cycle and Greenhouse Gases Group Flask-Air Sample Measurements of CO2 at Global and Regional Background Sites, 1967-Present}, + publisher = {NOAA ESRL GML CCGG Group}, + year = {2019} +} diff --git a/esmvaltool/references/eyring06jgr.bibtex b/esmvaltool/references/eyring06jgr.bibtex new file mode 100644 index 0000000000..1500d28e98 --- /dev/null +++ b/esmvaltool/references/eyring06jgr.bibtex @@ -0,0 +1,12 @@ +@article{eyring06jgr, + doi = {10.1029/2006jd007327}, + url = {https://doi.org/10.1029%2F2006jd007327}, + year = 2006, + month = {nov}, + publisher = {American Geophysical Union ({AGU})}, + volume = {111}, + number = {D22}, + author = {V. Eyring and N. Butchart and D. W. Waugh and H. Akiyoshi and J. Austin and S. Bekki and G. E. Bodeker and B. A. Boville and C. Brühl and M. P. Chipperfield and E. Cordero and M. Dameris and M. Deushi and V. E. Fioletov and S. M. Frith and R. R. Garcia and A. Gettelman and M. A. Giorgetta and V. Grewe and L. Jourdain and D. E. Kinnison and E. Mancini and E. Manzini and M. Marchand and D. R. Marsh and T. Nagashima and P. A. Newman and J. E. Nielsen and S. Pawson and G. Pitari and D. A. Plummer and E. Rozanov and M. Schraner and T. G. Shepherd and K. Shibata and R. S. Stolarski and H. Struthers and W. Tian and M. Yoshiki}, + title = {Assessment of temperature, trace species, and ozone in chemistry-climate model simulations of the recent past}, + journal = {Journal of Geophysical Research} +} diff --git a/esmvaltool/references/eyring13jgr.bibtex b/esmvaltool/references/eyring13jgr.bibtex new file mode 100644 index 0000000000..93ff423ee0 --- /dev/null +++ b/esmvaltool/references/eyring13jgr.bibtex @@ -0,0 +1,13 @@ +@article{eyring13jgr, + doi = {10.1002/jgrd.50316}, + url = {https://doi.org/10.1002%2Fjgrd.50316}, + year = 2013, + month = {may}, + publisher = {American Geophysical Union ({AGU})}, + volume = {118}, + number = {10}, + pages = {5029--5060}, + author = {V. Eyring and J. M. Arblaster and I. Cionni and J. Sedl{\'{a}}{\v{c}}ek and J. Perlwitz and P. J. Young and S. Bekki and D. Bergmann and P. Cameron-Smith and W. J. Collins and G. Faluvegi and K.-D. Gottschaldt and L. W. Horowitz and D. E. Kinnison and J.-F. Lamarque and D. R. Marsh and D. Saint-Martin and D. T. Shindell and K. Sudo and S. Szopa and S. Watanabe}, + title = {Long-term ozone changes and associated climate impacts in {CMIP}5 simulations}, + journal = {Journal of Geophysical Research: Atmospheres} +} diff --git a/esmvaltool/references/eyring19gmdd.bibtex b/esmvaltool/references/eyring19gmdd.bibtex new file mode 100644 index 0000000000..159a17903a --- /dev/null +++ b/esmvaltool/references/eyring19gmdd.bibtex @@ -0,0 +1,9 @@ +@misc{eyring19gmdd, + doi = {10.5194/gmd-2019-291}, + url = {https://doi.org/10.5194%2Fgmd-2019-291}, + year = 2019, + month = {nov}, + publisher = {Copernicus {GmbH}}, + author = {Veronika Eyring and Lisa Bock and Axel Lauer and Mattia Righi and Manuel Schlund and Bouwe Andela and Enrico Arnone and Omar Bellprat and Björn Brötz and Louis-Phillippe Caron and Nuno Carvalhais and Irene Cionni and Nicola Cortesi and Bas Crezee and Edouard Davin and Paolo Davini and Kevin Debeire and Lee de Mora and Clara Deser and David Docquier and Paul Earnshaw and Carsten Ehbrecht and Bettina K. Gier and Nube Gonzalez-Reviriego and Paul Goodman and Stefan Hagemann and Steven Hardiman and Birgit Hassler and Alasdair Hunter and Christopher Kadow and Stephan Kindermann and Sujan Koirala and Nikolay V. Koldunov and Quentin Lejeune and Valerio Lembo and Tomas Lovato and Valerio Lucarini and Francois Massonnet and Benjamin Müller and Amarjiit Pandde and Nuria P{\'{e}}rez-Zan{\'{o}}n and Adam Phillips and Valeriu Predoi and Joellen Russell and Alistair Sellar and Federico Serva and Tobias Stacke and Ranjini Swaminathan and Ver{\'{o}}nica Torralba and Javier Vegas-Regidor and Jost von Hardenberg and Katja Weigel and Klaus Zimmermann}, + title = {{ESMValTool} v2.0 -- Extended set of large-scale diagnostics for quasi-operational and comprehensive evaluation of Earth system models in {CMIP}} +} diff --git a/esmvaltool/references/eyring21ipcc.bibtex b/esmvaltool/references/eyring21ipcc.bibtex new file mode 100644 index 0000000000..fd73260004 --- /dev/null +++ b/esmvaltool/references/eyring21ipcc.bibtex @@ -0,0 +1,11 @@ +@inbook{eyring21ipcc, + author={V. Eyring and N.P. Gillett and K.M. Achuta Rao and R. Barimalala and M. Barreiro Parrillo and N. Bellouin and C. Cassou and P.J. Durack and Y. Kosaka and S. McGregor and S. Min and O. Morgenstern and Y. Sun}, + editor={V. Masson-Delmotte and P. Zhai and A. Pirani and S.L. Connors and C. Pean and S. Berger and N. Caud and Y. Chen and L. Goldfarb and M.I. Gomis and M. Huang and K. Leitzell and E. Lonnoy and J.B.R. Matthews and T.K. Maycock and T. Waterfield and O. Yelekci and R. Yu and B. Zhou}, + title={Human Influence on the Climate System}, + booktitle={Climate Change 2021: The Physical Science Basis. Contribution of Working Group I to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change}, + year={2021}, + publisher={Cambridge University Press}, + address={Cambridge, UK and New York, NY, USA}, + pages = {423--552}, + doi={10.1017/9781009157896.005} +} diff --git a/esmvaltool/references/flato13ipcc.bibtex b/esmvaltool/references/flato13ipcc.bibtex new file mode 100644 index 0000000000..3119447c88 --- /dev/null +++ b/esmvaltool/references/flato13ipcc.bibtex @@ -0,0 +1,11 @@ +@inbook{flato13ipcc, + author={Flato, G. and Marotzke, J. and Abiodun, B. and Braconnot, P. and Chou, S. C. and Collins, W. and Cox, P. and Driouech, F. and Emori, S. and Eyring, V. and Forest, C. and Gleckler, P. and Guilyardi, E. and Jakob, C. and Kattsov, V. and Reason, C. and Rummukainen, M.}, + editor={Stocker, T. F. and Qin, D. and Plattner, G.-K. and Tignor, M. and Allen, S. K. and Doschung, J. and Nauels, A. and Xia, Y. and Bex, V. and Midgley, P. M.}, + title={Evaluation of climate models}, + booktitle={Climate Change 2013: The Physical Science Basis. Contribution of Working Group I to the Fifth Assessment Report of the Intergovernmental Panel on Climate Change}, + year={2013}, + pages={741--882}, + publisher={Cambridge University Press}, + address={Cambridge, UK}, + doi={10.1017/CBO9781107415324.020}, +} diff --git a/esmvaltool/references/fluxcom.bibtex b/esmvaltool/references/fluxcom.bibtex new file mode 100644 index 0000000000..ec8f7af749 --- /dev/null +++ b/esmvaltool/references/fluxcom.bibtex @@ -0,0 +1,12 @@ +@article{fluxcom, + doi = {10.1038/s41597-019-0076-8}, + url = {https://doi.org/10.1038%2Fs41597-019-0076-8}, + year = 2019, + month = {may}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {6}, + number = {1}, + author = {Martin Jung and Sujan Koirala and Ulrich Weber and Kazuhito Ichii and Fabian Gans and Gustau Camps-Valls and Dario Papale and Christopher Schwalm and Gianluca Tramontana and Markus Reichstein}, + title = {The {FLUXCOM} ensemble of global land-atmosphere energy fluxes}, + journal = {Scientific Data} +} diff --git a/esmvaltool/references/fox-kemper21ipcc.bibtex b/esmvaltool/references/fox-kemper21ipcc.bibtex new file mode 100644 index 0000000000..e26a480f46 --- /dev/null +++ b/esmvaltool/references/fox-kemper21ipcc.bibtex @@ -0,0 +1,11 @@ +@inbook{fox-kemper21ipcc, + author={B. Fox-Kemper and H.T. Hewitt and C. Xiao and G. Aðalgeirsdóttir and S.S. Drijfhout and T.L. Edwards and N.R. Golledge and M. Hemer and R.E. Kopp and G. Krinner and A. Mix and D. Notz and S. Nowicki and I.S. Nurhati and L. Ruiz and J.-B. Sallée and A.B.A. Slangen and Y. Yu}, + editor={V. Masson-Delmotte and P. Zhai and A. Pirani and S.L. Connors and C. Pean and S. Berger and N. Caud and Y. Chen and L. Goldfarb and M.I. Gomis and M. Huang and K. Leitzell and E. Lonnoy and J.B.R. Matthews and T.K. Maycock and T. Waterfield and O. Yelekci and R. Yu and B. Zhou}, + title={Ocean, Cryosphere and Sea Level Change}, + booktitle={Climate Change 2021: The Physical Science Basis. Contribution of Working Group I to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change}, + year={2021}, + publisher={Cambridge University Press}, + address={Cambridge, UK and New York, NY, USA}, + pages = {1211--1362}, + doi={10.1017/9781009157896.011} +} diff --git a/esmvaltool/references/fuckar15cd.bibtex b/esmvaltool/references/fuckar15cd.bibtex new file mode 100644 index 0000000000..fe7ba59e23 --- /dev/null +++ b/esmvaltool/references/fuckar15cd.bibtex @@ -0,0 +1,13 @@ +@article{fuckar15cd, + doi = {10.1007/s00382-015-2917-2}, + url = {https://doi.org/10.1007%2Fs00382-015-2917-2}, + year = 2015, + month = {nov}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {47}, + number = {5-6}, + pages = {1527--1543}, + author = {Neven S. Fu{\v{c}}kar and Virginie Guemas and Nathaniel C. Johnson and Fran{\c{c}}ois Massonnet and Francisco J. Doblas-Reyes}, + title = {Clusters of interannual sea ice variability in the northern hemisphere}, + journal = {Climate Dynamics} +} diff --git a/esmvaltool/references/galytska23jgr.bibtex b/esmvaltool/references/galytska23jgr.bibtex new file mode 100644 index 0000000000..9926c32a88 --- /dev/null +++ b/esmvaltool/references/galytska23jgr.bibtex @@ -0,0 +1,8 @@ +@article{galytska2022, + title={Causal model evaluation of Arctic-midlatitude teleconnections in CMIP6}, + author={Galytska, Evgenia and Weigel, Katja and Handorf, D{\"o}rthe and Jaiser, Ralf and K{\"o}hler, Raphael Harry and Runge, Jakob and Eyring, Veronika}, + journal={Authorea Preprints}, + year={2022}, + doi = {10.1002/essoar.10512569.1}, + publisher={Authorea} +} diff --git a/esmvaltool/references/gcp2018.bibtex b/esmvaltool/references/gcp2018.bibtex new file mode 100644 index 0000000000..0227bbe9e5 --- /dev/null +++ b/esmvaltool/references/gcp2018.bibtex @@ -0,0 +1,13 @@ +@article{gcp2018, + doi = {10.5194/essd-10-2141-2018}, + url = {https://doi.org/10.5194%2Fessd-10-2141-2018}, + year = 2018, + month = {dec}, + publisher = {Copernicus {GmbH}}, + volume = {10}, + number = {4}, + pages = {2141--2194}, + author = {Corinne Le Qu{\'{e}}r{\'{e}} and Robbie M. Andrew and Pierre Friedlingstein and Stephen Sitch and Judith Hauck and Julia Pongratz and Penelope A. Pickers and Jan Ivar Korsbakken and Glen P. Peters and Josep G. Canadell and Almut Arneth and Vivek K. Arora and Leticia Barbero and Ana Bastos and Laurent Bopp and Fr{\'{e}}d{\'{e}}ric Chevallier and Louise P. Chini and Philippe Ciais and Scott C. Doney and Thanos Gkritzalis and Daniel S. Goll and Ian Harris and Vanessa Haverd and Forrest M. Hoffman and Mario Hoppema and Richard A. Houghton and George Hurtt and Tatiana Ilyina and Atul K. Jain and Truls Johannessen and Chris D. Jones and Etsushi Kato and Ralph F. Keeling and Kees Klein Goldewijk and Peter Landschützer and Nathalie Lef{\`{e}}vre and Sebastian Lienert and Zhu Liu and Danica Lombardozzi and Nicolas Metzl and David R. Munro and Julia E. M. S. Nabel and Shin-ichiro Nakaoka and Craig Neill and Are Olsen and Tsueno Ono and Prabir Patra and Anna Peregon and Wouter Peters and Philippe Peylin and Benjamin Pfeil and Denis Pierrot and Benjamin Poulter and Gregor Rehder and Laure Resplandy and Eddy Robertson and Matthias Rocher and Christian Rödenbeck and Ute Schuster and Jörg Schwinger and Roland S{\'{e}}f{\'{e}}rian and Ingunn Skjelvan and Tobias Steinhoff and Adrienne Sutton and Pieter P. Tans and Hanqin Tian and Bronte Tilbrook and Francesco N. Tubiello and Ingrid T. van der Laan-Luijkx and Guido R. van der Werf and Nicolas Viovy and Anthony P. Walker and Andrew J. Wiltshire and Rebecca Wright and Sönke Zaehle and Bo Zheng}, + title = {Global Carbon Budget 2018}, + journal = {Earth System Science Data} +} diff --git a/esmvaltool/references/gcp2020.bibtex b/esmvaltool/references/gcp2020.bibtex new file mode 100644 index 0000000000..379931205f --- /dev/null +++ b/esmvaltool/references/gcp2020.bibtex @@ -0,0 +1,13 @@ +@article{gcp2020, + doi = {10.5194/essd-12-3269-2020}, + url = {https://doi.org/10.5194/essd-12-3269-2020}, + year = {2020}, + month = dec, + publisher = {Copernicus {GmbH}}, + volume = {12}, + number = {4}, + pages = {3269--3340}, + author = {Pierre Friedlingstein and Michael O{\textquotesingle}Sullivan and Matthew W. Jones and Robbie M. Andrew and Judith Hauck and Are Olsen and Glen P. Peters and Wouter Peters and Julia Pongratz and Stephen Sitch and Corinne Le Qu{\'{e}}r{\'{e}} and Josep G. Canadell and Philippe Ciais and Robert B. Jackson and Simone Alin and Luiz E. O. C. Arag{\~{a}}o and Almut Arneth and Vivek Arora and Nicholas R. Bates and Meike Becker and Alice Benoit-Cattin and Henry C. Bittig and Laurent Bopp and Selma Bultan and Naveen Chandra and Fr{\'{e}}d{\'{e}}ric Chevallier and Louise P. Chini and Wiley Evans and Liesbeth Florentie and Piers M. Forster and Thomas Gasser and Marion Gehlen and Dennis Gilfillan and Thanos Gkritzalis and Luke Gregor and Nicolas Gruber and Ian Harris and Kerstin Hartung and Vanessa Haverd and Richard A. Houghton and Tatiana Ilyina and Atul K. Jain and Emilie Joetzjer and Koji Kadono and Etsushi Kato and Vassilis Kitidis and Jan Ivar Korsbakken and Peter Landsch\"{u}tzer and Nathalie Lef{\`{e}}vre and Andrew Lenton and Sebastian Lienert and Zhu Liu and Danica Lombardozzi and Gregg Marland and Nicolas Metzl and David R. Munro and Julia E. M. S. Nabel and Shin-Ichiro Nakaoka and Yosuke Niwa and Kevin O{\textquotesingle}Brien and Tsuneo Ono and Paul I. Palmer and Denis Pierrot and Benjamin Poulter and Laure Resplandy and Eddy Robertson and Christian R\"{o}denbeck and J\"{o}rg Schwinger and Roland S{\'{e}}f{\'{e}}rian and Ingunn dam J. P. Smith and Adrienne J. Sutton and Toste Tanhua and Pieter P. Tans and Hanqin Tian and Bronte Tilbrook and Guido van der Werf and Nicolas Vuichard and Anthony P. Walker and Rik Wanninkhof and Andrew J. Watson and David Willis and Andrew J. Wiltshire and Wenping Yuan and Xu Yue and S\"{o}nke Zaehle}, + title = {Global Carbon Budget 2020}, + journal = {Earth System Science Data}, +} diff --git a/esmvaltool/references/gen14jclim.bibtex b/esmvaltool/references/gen14jclim.bibtex new file mode 100644 index 0000000000..7ebe9b5c6d --- /dev/null +++ b/esmvaltool/references/gen14jclim.bibtex @@ -0,0 +1,13 @@ +@article{gen14jclim, + doi = {10.1175/jcli-d-13-00337.1}, + url = {https://doi.org/10.1175%2Fjcli-d-13-00337.1}, + year = 2014, + month = {feb}, + publisher = {American Meteorological Society}, + volume = {27}, + number = {4}, + pages = {1765--1780}, + author = {Gen Li and Shang-Ping Xie}, + title = {Tropical Biases in {CMIP}5 Multimodel Ensemble: The Excessive Equatorial Pacific Cold Tongue and Double {ITCZ} Problems}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/georgievski18tac.bibtex b/esmvaltool/references/georgievski18tac.bibtex new file mode 100644 index 0000000000..f2f3de209f --- /dev/null +++ b/esmvaltool/references/georgievski18tac.bibtex @@ -0,0 +1,13 @@ +@article{georgievski18tac, + doi = {10.1007/s00704-018-2675-2}, + url = {https://doi.org/10.1007%2Fs00704-018-2675-2}, + year = 2018, + month = {nov}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {137}, + number = {1-2}, + pages = {1587--1603}, + author = {Goran Georgievski and Stefan Hagemann}, + title = {Characterizing uncertainties in the {ESA}-{CCI} land cover map of the epoch 2010 and their impacts on {MPI}-{ESM} climate simulations}, + journal = {Theoretical and Applied Climatology} +} diff --git a/esmvaltool/references/ghcn-cams.bibtex b/esmvaltool/references/ghcn-cams.bibtex new file mode 100644 index 0000000000..c2ad920a77 --- /dev/null +++ b/esmvaltool/references/ghcn-cams.bibtex @@ -0,0 +1,13 @@ +@article{ghcn-cams, +author = {Fan, Yun and van den Dool, Huug}, +title = {A global monthly land surface air temperature analysis for 1948–present}, +journal = {Journal of Geophysical Research: Atmospheres}, +volume = {113}, +number = {D1}, +pages = {}, +keywords = {land surface, air temperature, analysis}, +doi = {10.1029/2007JD008470}, +url = {https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/2007JD008470}, +eprint = {https://agupubs.onlinelibrary.wiley.com/doi/pdf/10.1029/2007JD008470}, +year = {2008} +} diff --git a/esmvaltool/references/ghcn.bibtex b/esmvaltool/references/ghcn.bibtex new file mode 100644 index 0000000000..8eb927d8a5 --- /dev/null +++ b/esmvaltool/references/ghcn.bibtex @@ -0,0 +1,13 @@ +@article{ghcn, + doi = {10.1175/1520-0442(2003)016<0206:halssa>2.0.co;2}, + url = {https://doi.org/10.1175%2F1520-0442%282003%29016%3C0206%3Ahalssa%3E2.0.co%3B2}, + year = 2003, + month = {jan}, + publisher = {American Meteorological Society}, + volume = {16}, + number = {2}, + pages = {206--223}, + author = {P. D. Jones and A. Moberg}, + title = {Hemispheric and Large-Scale Surface Air Temperature Variations: An Extensive Revision and an Update to 2001}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/gier20bg.bibtex b/esmvaltool/references/gier20bg.bibtex new file mode 100644 index 0000000000..e54c0c7aec --- /dev/null +++ b/esmvaltool/references/gier20bg.bibtex @@ -0,0 +1,13 @@ +@article{Gier2020, + doi = {10.5194/bg-17-6115-2020}, + url = {https://doi.org/10.5194/bg-17-6115-2020}, + year = {2020}, + month = dec, + publisher = {Copernicus {GmbH}}, + volume = {17}, + number = {23}, + pages = {6115--6144}, + author = {Bettina K. Gier and Michael Buchwitz and Maximilian Reuter and Peter M. Cox and Pierre Friedlingstein and Veronika Eyring}, + title = {Spatially resolved evaluation of Earth system models with satellite column-averaged {CO}{\&}lt$\mathsemicolon$sub{\&}gt$\mathsemicolon$2{\&}lt$\mathsemicolon$/sub{\&}gt$\mathsemicolon$}, + journal = {Biogeosciences} +} diff --git a/esmvaltool/references/giorgi11jc.bibtex b/esmvaltool/references/giorgi11jc.bibtex new file mode 100644 index 0000000000..49d2f6d296 --- /dev/null +++ b/esmvaltool/references/giorgi11jc.bibtex @@ -0,0 +1,13 @@ +@article{giorgi11jc, + doi = {10.1175/2011jcli3979.1}, + url = {https://doi.org/10.1175%2F2011jcli3979.1}, + year = 2011, + month = {oct}, + publisher = {American Meteorological Society}, + volume = {24}, + number = {20}, + pages = {5309--5324}, + author = {F. Giorgi and E.-S. Im and E. Coppola and N. S. Diffenbaugh and X. J. Gao and L. Mariotti and Y. Shi}, + title = {Higher Hydroclimatic Intensity with Global Warming}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/giorgi14jgr.bibtex b/esmvaltool/references/giorgi14jgr.bibtex new file mode 100644 index 0000000000..6bd21af2bb --- /dev/null +++ b/esmvaltool/references/giorgi14jgr.bibtex @@ -0,0 +1,14 @@ +@article{giorgi14jgr, +author = {Giorgi, F. and Coppola, E. and Raffaele, F.}, +title = {A consistent picture of the hydroclimatic response to global warming from multiple indices: Models and observations}, +journal = {Journal of Geophysical Research: Atmospheres}, +volume = {119}, +number = {20}, +pages = {11,695-11,708}, +keywords = {HY-INT, CMIP5, extremes}, +doi = {10.1002/2014JD022238}, +url = {https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1002/2014JD022238}, +eprint = {https://agupubs.onlinelibrary.wiley.com/doi/pdf/10.1002/2014JD022238}, +abstract = {Abstract We analyze trends of six daily precipitation-based and physically interconnected hydroclimatic indices in an ensemble of historical and 21st century climate projections under forcing from increasing greenhouse gas (GHG) concentrations (Representative Concentration Pathways (RCP)8.5), along with gridded (land only) observations for the late decades of the twentieth century. The indices include metrics of intensity (SDII) and extremes (R95) of precipitation, dry (DSL), and wet spell length, the hydroclimatic intensity index (HY-INT), and a newly introduced index of precipitation area (PA). All the indices in both the 21st century and historical simulations provide a consistent picture of a predominant shift toward a hydroclimatic regime of more intense, shorter, less frequent, and less widespread precipitation events in response to GHG-induced global warming. The trends are larger and more spatially consistent over tropical than extratropical regions, pointing to the importance of tropical convection in regulating this response, and show substantial regional spatial variability. Observed trends in the indices analyzed are qualitatively and consistently in line with the simulated ones, at least at the global and full tropical scale, further supporting the robustness of the identified prevailing hydroclimatic responses. The HY-INT, PA, and R95 indices show the most consistent response to global warming, and thus offer the most promising tools for formal hydroclimatic model validation and detection/attribution studies. The physical mechanism underlying this response and some of the applications of our results are also discussed.}, +year = {2014} +} diff --git a/esmvaltool/references/gistemp.bibtex b/esmvaltool/references/gistemp.bibtex new file mode 100644 index 0000000000..f8e1d26e3b --- /dev/null +++ b/esmvaltool/references/gistemp.bibtex @@ -0,0 +1,12 @@ +@article{gistemp, +author = {Lenssen, Nathan J. L. and Schmidt, Gavin A. and Hansen, James E. and Menne, Matthew J. and Persin, Avraham and Ruedy, Reto and Zyss, Daniel}, +title = {Improvements in the GISTEMP Uncertainty Model}, +journal = {Journal of Geophysical Research: Atmospheres}, +volume = {124}, +number = {12}, +pages = {6307-6326}, +doi = {10.1029/2018JD029522}, +url = {https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/2018JD029522}, +year = {2019} +} + diff --git a/esmvaltool/references/gleckler08jgr.bibtex b/esmvaltool/references/gleckler08jgr.bibtex new file mode 100644 index 0000000000..547b23f396 --- /dev/null +++ b/esmvaltool/references/gleckler08jgr.bibtex @@ -0,0 +1,12 @@ +@article{gleckler08jgr, + doi = {10.1029/2007jd008972}, + url = {https://doi.org/10.1029%2F2007jd008972}, + year = 2008, + month = {mar}, + publisher = {American Geophysical Union ({AGU})}, + volume = {113}, + number = {D6}, + author = {P. J. Gleckler and K. E. Taylor and C. Doutriaux}, + title = {Performance metrics for climate models}, + journal = {Journal of Geophysical Research} +} \ No newline at end of file diff --git a/esmvaltool/references/glodap.bibtex b/esmvaltool/references/glodap.bibtex new file mode 100644 index 0000000000..fc554bf39a --- /dev/null +++ b/esmvaltool/references/glodap.bibtex @@ -0,0 +1,11 @@ +@article{glodap, + doi = {10.5194/essd-8-325-2016}, + title = {A new global interior ocean mapped climatology: The 1$\times$ 1 GLODAP version 2}, + author = {Lauvset, Siv K and Key, Robert M and Olsen, Are and Heuven, Steven van and Velo, Anton and Lin, Xiaohua and Schirnick, Carsten and Kozyr, Alex and Tanhua, Toste and Hoppema, Mario and others}, + journal = {Earth System Science Data}, + volume = {8}, + number = {2}, + pages = {325--340}, + year = {2016}, + publisher = {Copernicus GmbH} +} diff --git a/esmvaltool/references/goswami99qjrms.bibtex b/esmvaltool/references/goswami99qjrms.bibtex new file mode 100644 index 0000000000..b0b3c1226f --- /dev/null +++ b/esmvaltool/references/goswami99qjrms.bibtex @@ -0,0 +1,13 @@ +@article{goswami99qjrms, + doi = {10.1002/qj.49712555412}, + url = {https://doi.org/10.1002%2Fqj.49712555412}, + year = 1999, + month = {jan}, + publisher = {Wiley}, + volume = {125}, + number = {554}, + pages = {611--633}, + author = {B. N. Goswami and V. Krishnamurthy and H. Annmalai}, + title = {A broad-scale circulation index for the interannual variability of the Indian summer monsoon}, + journal = {Quarterly Journal of the Royal Meteorological Society} +} diff --git a/esmvaltool/references/gpcc.bibtex b/esmvaltool/references/gpcc.bibtex new file mode 100644 index 0000000000..d0a4f5accd --- /dev/null +++ b/esmvaltool/references/gpcc.bibtex @@ -0,0 +1,7 @@ +@misc{gpcc, + doi = {10.5676/DWD_GPCC/FD_M_V2018_025, 10.5676/DWD_GPCC/FD_M_V2018_050, 10.5676/DWD_GPCC/FD_M_V2018_100, 10.5676/DWD_GPCC/FD_M_V2018_250}, + url = {https://opendata.dwd.de/climate_environment/GPCC/html/fulldata-monthly_v2018_doi_download.html}, + year = 2018, + author = {U. Schneider and A. Becker and P. Finger and A. Meyer-Christoffer and M. Ziese}, + title = {GPCC Full Data Monthly Product Version 2018 at [0.25°, 0.5°, 1.0°, 2.5°]: Monthly Land-Surface Precipitation from Rain-Gauges built on GTS-based and Historical Data} +} diff --git a/esmvaltool/references/gpcp-sg.bibtex b/esmvaltool/references/gpcp-sg.bibtex new file mode 100644 index 0000000000..973dd15990 --- /dev/null +++ b/esmvaltool/references/gpcp-sg.bibtex @@ -0,0 +1,7 @@ +@misc{gpcp-sg, + url = {https://psl.noaa.gov/data/gridded/data.gpcp.html}, + year = 2003, + author = {Adler, R.F. and G.J. Huffman and A. Chang and R. Ferraro and P. Xie and J. Janowiak and B. Rudolf and U. Schneider and S. Curtis and D. Bolvin and A. Gruber and J. Susskind and P. Arkin}, + title = {The Version 2 Global Precipitation Climatology Project (GPCP) Monthly Precipitation Analysis (1979-Present)} + howpublished = {via website https://psl.noaa.gov, provided by the NOAA PSL, Boulder, Colorado, USA} +} diff --git a/esmvaltool/references/grace.bibtex b/esmvaltool/references/grace.bibtex new file mode 100644 index 0000000000..2826dac73c --- /dev/null +++ b/esmvaltool/references/grace.bibtex @@ -0,0 +1,11 @@ +@article{watkins2015improved, + title={Improved methods for observing Earth's time variable mass distribution with GRACE using spherical cap mascons}, + author={Watkins, Michael M and Wiese, David N and Yuan, Dah-Ning and Boening, Carmen and Landerer, Felix W}, + journal={Journal of Geophysical Research: Solid Earth}, + doi = {10.1002/2014JB011547}, + volume={120}, + number={4}, + pages={2648--2671}, + year={2015}, + publisher={Wiley Online Library} +} diff --git a/esmvaltool/references/gregory04grl.bibtex b/esmvaltool/references/gregory04grl.bibtex new file mode 100644 index 0000000000..8d5bf1ea76 --- /dev/null +++ b/esmvaltool/references/gregory04grl.bibtex @@ -0,0 +1,11 @@ +@article{gregory04grl, + doi = {10.1029/2003gl018747}, + url = {https://doi.org/10.1029%2F2003gl018747}, + year = 2004, + publisher = {American Geophysical Union ({AGU})}, + volume = {31}, + number = {3}, + author = {J. M. Gregory}, + title = {A new method for diagnosing radiative forcing and climate sensitivity}, + journal = {Geophysical Research Letters} +} diff --git a/esmvaltool/references/gregory08jgr.bibtex b/esmvaltool/references/gregory08jgr.bibtex new file mode 100644 index 0000000000..8b631a0c58 --- /dev/null +++ b/esmvaltool/references/gregory08jgr.bibtex @@ -0,0 +1,12 @@ +@article{gregory08jgr, + doi = {10.1029/2008jd010405}, + url = {https://doi.org/10.1029%2F2008jd010405}, + year = 2008, + month = {dec}, + publisher = {American Geophysical Union ({AGU})}, + volume = {113}, + number = {D23}, + author = {J. M. Gregory and P. M. Forster}, + title = {Transient climate response estimated from radiative forcing and observed temperature change}, + journal = {Journal of Geophysical Research} +} diff --git a/esmvaltool/references/gruber19essd.bibtex b/esmvaltool/references/gruber19essd.bibtex new file mode 100644 index 0000000000..7d92863b27 --- /dev/null +++ b/esmvaltool/references/gruber19essd.bibtex @@ -0,0 +1,12 @@ +@article{gruber19essd, +author = {Gruber, Alexander and Scanlon, Tracy and van der Schalie, Robin and Wagner, Wolfgang and Dorigo, Wouter}, +doi = {10.5194/essd-11-717-2019}, +issn = {1866-3516}, +journal = {Earth System Science Data}, +month = {may}, +number = {2}, +pages = {717--739}, +title = {{Evolution of the ESA CCI Soil Moisture climate data records and their underlying merging methodology}}, +volume = {11}, +year = {2019} +} diff --git a/esmvaltool/references/hadcrut3.bibtex b/esmvaltool/references/hadcrut3.bibtex new file mode 100644 index 0000000000..3fab4484f1 --- /dev/null +++ b/esmvaltool/references/hadcrut3.bibtex @@ -0,0 +1,11 @@ +@article{hadcrut3, + doi = {10.1029/2005jd006548}, + url = {https://doi.org/10.1029%2F2005jd006548}, + year = 2006, + publisher = {American Geophysical Union ({AGU})}, + volume = {111}, + number = {D12}, + author = {P. Brohan and J. J. Kennedy and I. Harris and S. F. B. Tett and P. D. Jones}, + title = {Uncertainty estimates in regional and global observed temperature changes: A new data set from 1850}, + journal = {Journal of Geophysical Research} +} diff --git a/esmvaltool/references/hadcrut4.bibtex b/esmvaltool/references/hadcrut4.bibtex new file mode 100644 index 0000000000..dabe3eb43a --- /dev/null +++ b/esmvaltool/references/hadcrut4.bibtex @@ -0,0 +1,13 @@ +@article{hadcrut4, + doi = {10.1029/2011jd017187}, + url = {https://doi.org/10.1029%2F2011jd017187}, + year = 2012, + month = {apr}, + publisher = {American Geophysical Union ({AGU})}, + volume = {117}, + number = {D8}, + pages = {n/a--n/a}, + author = {Colin P. Morice and John J. Kennedy and Nick A. Rayner and Phil D. Jones}, + title = {Quantifying uncertainties in global and regional temperature change using an ensemble of observational estimates: The {HadCRUT}4 data set}, + journal = {Journal of Geophysical Research: Atmospheres} +} diff --git a/esmvaltool/references/hadcrut5.bibtex b/esmvaltool/references/hadcrut5.bibtex new file mode 100644 index 0000000000..cb4cf71fb2 --- /dev/null +++ b/esmvaltool/references/hadcrut5.bibtex @@ -0,0 +1,14 @@ +@article{hadcrut5, + author = {Morice, C. P. and Kennedy, J. J. and Rayner, N. A. and Winn, J. P. and Hogan, E. and Killick, R. E. and Dunn, R. J. H. and Osborn, T. J. and Jones, P. D. and Simpson, I. R.}, + title = {An updated assessment of near-surface temperature change from 1850: the HadCRUT5 dataset}, + journal = {Journal of Geophysical Research: Atmospheres}, + volume = {n/a}, + number = {n/a}, + pages = {e2019JD032361}, + keywords = {Global temperature, Surface temperature, Instrumental record, Ensemble, Climate change, Global warming}, + doi = {https://doi.org/10.1029/2019JD032361}, + url = {https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/2019JD032361}, + eprint = {https://agupubs.onlinelibrary.wiley.com/doi/pdf/10.1029/2019JD032361}, + note = {e2019JD032361 2019JD032361}, + abstract = {Abstract We present a new version of the Met Office Hadley Centre/Climatic Research Unit global surface temperature dataset, HadCRUT5. HadCRUT5 presents monthly average near-surface temperature anomalies, relative to the 1961-1990 period, on a regular 5° latitude by 5° longitude grid from 1850 to 2018. HadCRUT5 is a combination of sea-surface temperature measurements over the ocean from ships and buoys and near-surface air temperature measurements from weather stations over the land surface. These data have been sourced from updated compilations and the adjustments applied to mitigate the impact of changes in sea-surface temperature measurement methods have been revised. Two variants of HadCRUT5 have been produced for use in different applications. The first represents temperature anomaly data on a grid for locations where measurement data are available. The second, more spatially complete, variant uses a Gaussian process based statistical method to make better use of the available observations, extending temperature anomaly estimates into regions for which the underlying measurements are informative. Each is provided as a 200-member ensemble accompanied by additional uncertainty information. The combination of revised input datasets and statistical analysis results in greater warming of the global average over the course of the whole record. In recent years, increased warming results from an improved representation of Arctic warming and a better understanding of evolving biases in sea-surface temperature measurements from ships. These updates result in greater consistency with other independent global surface temperature datasets, despite their different approaches to dataset construction, and further increase confidence in our understanding of changes seen.} +} \ No newline at end of file diff --git a/esmvaltool/references/hadisst.bibtex b/esmvaltool/references/hadisst.bibtex new file mode 100644 index 0000000000..7d55f2983b --- /dev/null +++ b/esmvaltool/references/hadisst.bibtex @@ -0,0 +1,11 @@ +@article{hadisst, + doi = {10.1029/2002jd002670}, + url = {https://doi.org/10.1029%2F2002jd002670}, + year = 2003, + publisher = {American Geophysical Union ({AGU})}, + volume = {108}, + number = {D14}, + author = {N. A. Rayner}, + title = {Global analyses of sea surface temperature, sea ice, and night marine air temperature since the late nineteenth century}, + journal = {Journal of Geophysical Research} +} diff --git a/esmvaltool/references/hagemann13james.bibtex b/esmvaltool/references/hagemann13james.bibtex new file mode 100644 index 0000000000..06955bd516 --- /dev/null +++ b/esmvaltool/references/hagemann13james.bibtex @@ -0,0 +1,11 @@ +@article{hagemann13james, + doi = {10.1029/2012ms000173}, + url = {https://doi.org/10.1029%2F2012ms000173}, + year = 2013, + month = {may}, + publisher = {American Geophysical Union ({AGU})}, + pages = {n/a--n/a}, + author = {Stefan Hagemann and Alexander Loew and A. Andersson}, + title = {Combined evaluation of {MPI}-{ESM} land surface water and energy fluxes}, + journal = {Journal of Advances in Modeling Earth Systems} +} diff --git a/esmvaltool/references/hall06grl.bibtex b/esmvaltool/references/hall06grl.bibtex new file mode 100644 index 0000000000..dfe7a3c358 --- /dev/null +++ b/esmvaltool/references/hall06grl.bibtex @@ -0,0 +1,11 @@ +@article{hall06grl, + doi = {10.1029/2005gl025127}, + url = {https://doi.org/10.1029%2F2005gl025127}, + year = 2006, + publisher = {American Geophysical Union ({AGU})}, + volume = {33}, + number = {3}, + author = {Alex Hall and Xin Qu}, + title = {Using the current seasonal cycle to constrain snow albedo feedback in future climate change}, + journal = {Geophysical Research Letters} +} diff --git a/esmvaltool/references/haloe.bibtex b/esmvaltool/references/haloe.bibtex new file mode 100644 index 0000000000..5ba634e55f --- /dev/null +++ b/esmvaltool/references/haloe.bibtex @@ -0,0 +1,11 @@ +@Article{acp-5-2797-2005, +AUTHOR = {Groo{\ss}, J.-U. and Russell III, J. M.}, +TITLE = {Technical note: A stratospheric climatology for O$_{3}$, H$_{2}$O, CH$_{4}$, NO$_{x}$, HCl and HF derived from HALOE measurements}, +JOURNAL = {Atmospheric Chemistry and Physics}, +VOLUME = {5}, +YEAR = {2005}, +NUMBER = {10}, +PAGES = {2797--2807}, +URL = {https://www.atmos-chem-phys.net/5/2797/2005/}, +DOI = {10.5194/acp-5-2797-2005} +} \ No newline at end of file diff --git a/esmvaltool/references/hersbach20rmets.bibtex b/esmvaltool/references/hersbach20rmets.bibtex new file mode 100644 index 0000000000..27dbf434ef --- /dev/null +++ b/esmvaltool/references/hersbach20rmets.bibtex @@ -0,0 +1,13 @@ +@article{hersbach20rmets, + doi = {10.1002/qj.3803}, + url = {https://doi.org/10.1002/qj.3803}, + year = {2020}, + month = jun, + publisher = {Wiley}, + volume = {146}, + number = {730}, + pages = {1999--2049}, + author = {Hans Hersbach and Bill Bell and Paul Berrisford and Shoji Hirahara and Andr{\'{a}}s Hor{\'{a}}nyi and Joaqu{\'{\i}}n Mu{\~{n}}oz-Sabater and Julien Nicolas and Carole Peubey and Raluca Radu and Dinand Schepers and Adrian Simmons and Cornel Soci and Saleh Abdalla and Xavier Abellan and Gianpaolo Balsamo and Peter Bechtold and Gionata Biavati and Jean Bidlot and Massimo Bonavita and Giovanna Chiara and Per Dahlgren and Dick Dee and Michail Diamantakis and Rossana Dragani and Johannes Flemming and Richard Forbes and Manuel Fuentes and Alan Geer and Leo Haimberger and Sean Healy and Robin J. Hogan and El{\'{\i}}as H{\'{o}}lm and Marta Janiskov{\'{a}} and Sarah Keeley and Patrick Laloyaux and Philippe Lopez and Cristina Lupu and Gabor Radnoti and Patricia Rosnay and Iryna Rozum and Freja Vamborg and Sebastien Villaume and Jean-Noël Th{\'{e}}paut}, + title = {The {ERA}5 global reanalysis}, + journal = {Quarterly Journal of the Royal Meteorological Society} +} diff --git a/esmvaltool/references/hoogeveen15hess.bibtex b/esmvaltool/references/hoogeveen15hess.bibtex new file mode 100644 index 0000000000..d4c3020563 --- /dev/null +++ b/esmvaltool/references/hoogeveen15hess.bibtex @@ -0,0 +1,13 @@ +@article{Hoogeveen2015, + doi = {10.5194/hess-19-3829-2015}, + url = {https://doi.org/10.5194/hess-19-3829-2015}, + year = {2015}, + month = sep, + publisher = {Copernicus {GmbH}}, + volume = {19}, + number = {9}, + pages = {3829--3844}, + author = {J. Hoogeveen and J.-M. Faur{\`{e}}s and L. Peiser and J. Burke and N. van de Giesen}, + title = {{GlobWat} {\textendash} a global water balance model to assess water use in irrigated agriculture}, + journal = {Hydrology and Earth System Sciences} +} \ No newline at end of file diff --git a/esmvaltool/references/huntingford2000climdyn.bibtex b/esmvaltool/references/huntingford2000climdyn.bibtex new file mode 100644 index 0000000000..69bc072d49 --- /dev/null +++ b/esmvaltool/references/huntingford2000climdyn.bibtex @@ -0,0 +1,14 @@ +@article{huntingford2000, + title = {An analogue model to derive additional climate change scenarios from existing {GCM} simulations}, + volume = {16}, + issn = {1432-0894}, + url = {https://doi.org/10.1007/s003820000067}, + doi = {10.1007/s003820000067}, + abstract = {Changes in land surface driving variables, predicted by GCM transient climate change experiments, are confirmed to exhibit linearity in the global mean land temperature anomaly, ΔTl. The associated constants of proportionality retain spatial and seasonal characteristics of the GCM output, whilst ΔTlis related to radiative forcing anomalies. The resultant analogue model is shown to be robust between GCM runs and as such provides a computationally efficient technique of extending existing GCM experiments to a large range of climate change scenarios. As an example impacts study, the analogue model is used to drive a terrestrial ecosystem model, and predicted changes in terrestrial carbon are found to be similar to those when using GCM anomalies directly.}, + number = {8}, + journal = {Climate Dynamics}, + author = {Huntingford, C. and Cox, P. M.}, + month = aug, + year = {2000}, + pages = {575--586}, +} diff --git a/esmvaltool/references/hwsd.bibtex b/esmvaltool/references/hwsd.bibtex new file mode 100644 index 0000000000..a39fa5912f --- /dev/null +++ b/esmvaltool/references/hwsd.bibtex @@ -0,0 +1,9 @@ +@misc{hwsd, + doi = {10.3334/ORNLDAAC/1247}, + url = {http://daac.ornl.gov/cgi-bin/dsviewer.pl?ds_id=1247}, + author = {WIEDER, W.}, + language = {eng}, + title = {Regridded Harmonized World Soil Database v1.2}, + publisher = {ORNL Distributed Active Archive Center}, + year = {2014} +} diff --git a/esmvaltool/references/isccp-fh.bibtex b/esmvaltool/references/isccp-fh.bibtex new file mode 100644 index 0000000000..f1adadefa2 --- /dev/null +++ b/esmvaltool/references/isccp-fh.bibtex @@ -0,0 +1,6 @@ +@misc{isccp-fh, + url = {https://isccp.giss.nasa.gov/pub/flux-fh/docs/Zhang\_etal\_flux-cal\_at-isccp\_v3\_2019.pdf}, + title = {Calculation, Evaluation and Application of Long-term, Global Radiative Flux Datasets at ISCCP: Past and Present,}, + year = {2019}, + author = {Zhang et al.} +} diff --git a/esmvaltool/references/iturbide20essd.bibtex b/esmvaltool/references/iturbide20essd.bibtex new file mode 100644 index 0000000000..afec4f7c90 --- /dev/null +++ b/esmvaltool/references/iturbide20essd.bibtex @@ -0,0 +1,12 @@ +@article{https://doi.org/10.5194/essd-12-2959-2020, +author = {Iturbide, M. and Guti\'errez, J. M. and Alves, L. M. and Bedia, J. and Cerezo-Mota, R. and Cimadevilla, E. and Cofi\~no, A. S. and Di Luca, A. and Faria, S. H. and Gorodetskaya, I. V. and Hauser, M. and Herrera, S. and Hennessy, K. and Hewitt, H. T. and Jones, R. G. and Krakovska, S. and Manzanas, R. and Mart\'{\i}nez-Castro, D. and Narisma, G. T. and Nurhati, I. S. and Pinto, I. and Seneviratne, S. I. and van den Hurk, B. and Vera, C. S.}, +title = {An update of IPCC climate reference regions for subcontinental analysis of +climate model data: definition and aggregated datasets}, +journal = {Earth System Science Data}, +volume = {12}, +year = {2020}, +number = {4}, +pages = {2959--2970}, +url = {https://essd.copernicus.org/articles/12/2959/2020/}, +doi = {10.5194/essd-12-2959-2020} +} diff --git a/esmvaltool/references/jma-transcom.bibtex b/esmvaltool/references/jma-transcom.bibtex new file mode 100644 index 0000000000..aa1b290396 --- /dev/null +++ b/esmvaltool/references/jma-transcom.bibtex @@ -0,0 +1,13 @@ +@article{jma-transcom, + doi = {10.1111/j.1600-0889.2010.00488.x}, + url = {https://doi.org/10.1111%2Fj.1600-0889.2010.00488.x}, + year = 2010, + month = {jan}, + publisher = {Informa {UK} Limited}, + volume = {62}, + number = {5}, + pages = {797--809}, + author = {T. Maki and M. Ikegami and T. Fujita and T. Hirahara and K. Yamada and K. Mori and A. Takeuchi and Y. Tsutsumi and K. Suda and T. J. Conway}, + title = {New technique to analyse global distributions of {CO}2 concentrations and fluxes from non-processed observational data}, + journal = {Tellus B: Chemical and Physical Meteorology} +} diff --git a/esmvaltool/references/jones13jgr.bibtex b/esmvaltool/references/jones13jgr.bibtex new file mode 100644 index 0000000000..c945d30ba7 --- /dev/null +++ b/esmvaltool/references/jones13jgr.bibtex @@ -0,0 +1,13 @@ +@article{jones13jgr, + doi = {10.1002/jgrd.50239}, + url = {https://doi.org/10.1002%2Fjgrd.50239}, + year = 2013, + month = {may}, + publisher = {American Geophysical Union ({AGU})}, + volume = {118}, + number = {10}, + pages = {4001--4024}, + author = {Gareth S. Jones and Peter A. Stott and Nikolaos Christidis}, + title = {Attribution of observed historical near-surface temperature variations to anthropogenic and natural causes using {CMIP}5 simulations}, + journal = {Journal of Geophysical Research: Atmospheres} +} diff --git a/esmvaltool/references/jones15james.bibtex b/esmvaltool/references/jones15james.bibtex new file mode 100644 index 0000000000..9dab205be2 --- /dev/null +++ b/esmvaltool/references/jones15james.bibtex @@ -0,0 +1,13 @@ +@article{jones15james, + doi = {10.1002/2014ms000416}, + url = {https://doi.org/10.1002%2F2014ms000416}, + year = 2015, + month = {oct}, + publisher = {American Geophysical Union ({AGU})}, + volume = {7}, + number = {4}, + pages = {1554--1575}, + author = {Steve D. Jones and Corinne Le Qu{\'{e}}r{\'{e}} and Christian Rödenbeck and Andrew C. Manning and Are Olsen}, + title = {A statistical gap-filling method to interpolate global monthly surface ocean carbon dioxide data}, + journal = {Journal of Advances in Modeling Earth Systems} +} diff --git a/esmvaltool/references/jra_25.bibtex b/esmvaltool/references/jra_25.bibtex new file mode 100644 index 0000000000..cb20aa2cb7 --- /dev/null +++ b/esmvaltool/references/jra_25.bibtex @@ -0,0 +1,10 @@ +@article{jra_25, + doi = {https://doi.org/10.2151/jmsj.85.369}, + title={The JRA-25 Reanalysis}, + author={Kazutoshi ONOGI and Junichi TSUTSUI and Hiroshi KOIDE and Masami SAKAMOTO and Shinya KOBAYASHI and Hiroaki HATSUSHIKA and Takanori MATSUMOTO and Nobuo YAMAZAKI and Hirotaka KAMAHORI and Kiyotoshi TAKAHASHI and Shinji KADOKURA and Koji WADA and Koji KATO and Ryo OYAMA and Tomoaki OSE and Nobutaka MANNOJI and Ryusuke TAIRA}, + journal={Journal of the Meteorological Society of Japan. Ser. II}, + volume={85}, + number={3}, + pages={369-432}, + year={2007} +} diff --git a/esmvaltool/references/jra_55.bibtex b/esmvaltool/references/jra_55.bibtex new file mode 100644 index 0000000000..d979a6c9cc --- /dev/null +++ b/esmvaltool/references/jra_55.bibtex @@ -0,0 +1,10 @@ +@article{jra_55, + doi = {https://doi.org/10.5065/D60G3H5B}, + title={The JRA-55 Reanalysis: General Specifications and Basic Characteristics}, + author={Kobayashi, S. and Y. Ota and Y. Harada and A. Ebita and M. Moriya and H. Onoda and K. Onogi and H. Kamahori and C. Kobayashi and H. Endo and K. Miyaoka and K. Takahashi}, + journal={J. Met. Soc. Jap.}, + volume={93}, + number={1}, + pages={5-48}, + year={2015} +} diff --git a/esmvaltool/references/kadow20natgeosc.bibtex b/esmvaltool/references/kadow20natgeosc.bibtex new file mode 100644 index 0000000000..a4110e10de --- /dev/null +++ b/esmvaltool/references/kadow20natgeosc.bibtex @@ -0,0 +1,12 @@ +@article{kadow2020natgeosc, + doi = {https://doi.org/10.1038/s41561-020-0582-5}, + url= {https://www.nature.com/articles/s41561-020-0582-5}, + title={Artificial intelligence reconstructs missing climate information}, + author={Kadow, Christopher and Hall, David Matthew and Ulbrich, Uwe}, + journal={Nature Geoscience}, + volume={13}, + number={6}, + pages={408--413}, + year={2020}, + publisher={Nature Publishing Group} +} diff --git a/esmvaltool/references/kato18ebaf.bibtex b/esmvaltool/references/kato18ebaf.bibtex new file mode 100644 index 0000000000..f86af5419a --- /dev/null +++ b/esmvaltool/references/kato18ebaf.bibtex @@ -0,0 +1,13 @@ +@article{kato18jclim, +author = {Kato, Seiji and Rose, Fred G. and Rutan, David A. and Thorsen, Tyler J. and Loeb, Norman G. and Doelling, David R. and Huang, Xianglei and Smith, William L. and Su, Wenying and Ham, Seung-Hee}, +doi = {10.1175/JCLI-D-17-0523.1}, +issn = {0894-8755}, +journal = {Journal of Climate}, +month = {jun}, +number = {11}, +pages = {4501--4527}, +title = {{Surface Irradiances of Edition 4.0 Clouds and the Earth's Radiant Energy System (CERES) Energy Balanced and Filled (EBAF) Data Product}}, +url = {http://journals.ametsoc.org/doi/10.1175/JCLI-D-17-0523.1}, +volume = {31}, +year = {2018} +} diff --git a/esmvaltool/references/kerry06jclim.bibtex b/esmvaltool/references/kerry06jclim.bibtex new file mode 100644 index 0000000000..193fb688aa --- /dev/null +++ b/esmvaltool/references/kerry06jclim.bibtex @@ -0,0 +1,13 @@ +@article{kerry06jclim, + doi = {10.1175/jcli3814.1}, + url = {https://doi.org/10.1175%2Fjcli3814.1}, + year = 2006, + month = {aug}, + publisher = {American Meteorological Society}, + volume = {19}, + number = {15}, + pages = {3681--3703}, + author = {Kerry H. Cook and Edward K. Vizy}, + title = {Coupled Model Simulations of the West African Monsoon System: Twentieth- and Twenty-First-Century Simulations}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/key04gbc.bibtex b/esmvaltool/references/key04gbc.bibtex new file mode 100644 index 0000000000..0fbe9663c4 --- /dev/null +++ b/esmvaltool/references/key04gbc.bibtex @@ -0,0 +1,13 @@ +@article{key04gbc, + doi = {10.1029/2004gb002247}, + url = {https://doi.org/10.1029/2004gb002247}, + year = {2004}, + month = dec, + publisher = {American Geophysical Union ({AGU})}, + volume = {18}, + number = {4}, + pages = {n/a--n/a}, + author = {R. M. Key and A. Kozyr and C. L. Sabine and K. Lee and R. Wanninkhof and J. L. Bullister and R. A. Feely and F. J. Millero and C. Mordy and T.-H. Peng}, + title = {A global ocean carbon climatology: Results from Global Data Analysis Project ({GLODAP})}, + journal = {Global Biogeochemical Cycles} +} diff --git a/esmvaltool/references/kim09jclim.bibtex b/esmvaltool/references/kim09jclim.bibtex new file mode 100644 index 0000000000..f8638e2358 --- /dev/null +++ b/esmvaltool/references/kim09jclim.bibtex @@ -0,0 +1,13 @@ +@article{kim09jclim, + doi = {10.1175/2009jcli3063.1}, + url = {https://doi.org/10.1175%2F2009jcli3063.1}, + year = 2009, + month = {dec}, + publisher = {American Meteorological Society}, + volume = {22}, + number = {23}, + pages = {6413--6436}, + author = {D. Kim and K. Sperber and W. Stern and D. Waliser and I.-S. Kang and E. Maloney and W. Wang and K. Weickmann and J. Benedict and M. Khairoutdinov and M.-I. Lee and R. Neale and M. Suarez and K. Thayer-Calder and G. Zhang}, + title = {Application of {MJO} Simulation Diagnostics to Climate Models}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/kim12grl.bibtex b/esmvaltool/references/kim12grl.bibtex new file mode 100644 index 0000000000..dc086a7461 --- /dev/null +++ b/esmvaltool/references/kim12grl.bibtex @@ -0,0 +1,13 @@ +@article{kim12grl, + doi = {10.1029/2012gl052006}, + url = {https://doi.org/10.1029%2F2012gl052006}, + year = 2012, + month = {jun}, + publisher = {American Geophysical Union ({AGU})}, + volume = {39}, + number = {11}, + pages = {n/a--n/a}, + author = {Seon Tae Kim and Jin-Yi Yu}, + title = {The two types of {ENSO} in {CMIP}5 models}, + journal = {Geophysical Research Letters} +} diff --git a/esmvaltool/references/knutti2017.bibtex b/esmvaltool/references/knutti2017.bibtex new file mode 100644 index 0000000000..8212988f24 --- /dev/null +++ b/esmvaltool/references/knutti2017.bibtex @@ -0,0 +1,9 @@ +@article{knutti2017, + doi = {10.1002/2016gl072012}, + url = {https://doi.org/10.1002/2016gl072012}, + year = {2017}, + publisher = {American Geophysical Union ({AGU})}, + author = {Reto Knutti and Jan Sedl{\'{a}}{\v{c}}ek and Benjamin M. Sanderson and Ruth Lorenz and Erich M. Fischer and Veronika Eyring}, + title = {A climate model projection weighting scheme accounting for performance and interdependence}, + journal = {Geophysical Research Letters} +} \ No newline at end of file diff --git a/esmvaltool/references/koven13jclim.bibtex b/esmvaltool/references/koven13jclim.bibtex new file mode 100644 index 0000000000..47e3f566f8 --- /dev/null +++ b/esmvaltool/references/koven13jclim.bibtex @@ -0,0 +1,12 @@ +@article{koven13jclim, +author = {Koven, Charles D. and Riley, William J. and Stern, Alex}, +doi = {10.1175/JCLI-D-12-00228.1}, +issn = {0894-8755}, +journal = {Journal of Climate}, +month = {mar}, +number = {6}, +pages = {1877--1900}, +title = {{Analysis of Permafrost Thermal Dynamics and Response to Climate Change in the CMIP5 Earth System Models}}, +volume = {26}, +year = {2013} +} diff --git a/esmvaltool/references/lai3g.bibtex b/esmvaltool/references/lai3g.bibtex new file mode 100644 index 0000000000..aaeeeedf35 --- /dev/null +++ b/esmvaltool/references/lai3g.bibtex @@ -0,0 +1,13 @@ +@article{lai3g, + doi = {10.3390/rs5020927}, + url = {https://doi.org/10.3390%2Frs5020927}, + year = 2013, + month = {feb}, + publisher = {{MDPI} {AG}}, + volume = {5}, + number = {2}, + pages = {927--948}, + author = {Zaichun Zhu and Jian Bi and Yaozhong Pan and Sangram Ganguly and Alessandro Anav and Liang Xu and Arindam Samanta and Shilong Piao and Ramakrishna Nemani and Ranga Myneni}, + title = {Global Data Sets of Vegetation Leaf Area Index ({LAI})3g and Fraction of Photosynthetically Active Radiation ({FPAR})3g Derived from Global Inventory Modeling and Mapping Studies ({GIMMS}) Normalized Difference Vegetation Index ({NDVI}3g) for the Period 1981 to 2011}, + journal = {Remote Sensing} +} diff --git a/esmvaltool/references/landflux-eval.bibtex b/esmvaltool/references/landflux-eval.bibtex new file mode 100644 index 0000000000..c8e2c4a3ab --- /dev/null +++ b/esmvaltool/references/landflux-eval.bibtex @@ -0,0 +1,13 @@ +@article{landflux-eval, + doi = {10.5194/hess-17-3707-2013}, + url = {https://doi.org/10.5194%2Fhess-17-3707-2013}, + year = 2013, + month = {oct}, + publisher = {Copernicus {GmbH}}, + volume = {17}, + number = {10}, + pages = {3707--3720}, + author = {B. Mueller and M. Hirschi and C. Jimenez and P. Ciais and P. A. Dirmeyer and A. J. Dolman and J. B. Fisher and M. Jung and F. Ludwig and F. Maignan and D. G. Miralles and M. F. McCabe and M. Reichstein and J. Sheffield and K. Wang and E. F. Wood and Y. Zhang and S. I. Seneviratne}, + title = {Benchmark products for land evapotranspiration: {LandFlux}-{EVAL} multi-data set synthesis}, + journal = {Hydrology and Earth System Sciences} +} diff --git a/esmvaltool/references/landschuetzer2016.bibtex b/esmvaltool/references/landschuetzer2016.bibtex new file mode 100644 index 0000000000..d45c7a7d20 --- /dev/null +++ b/esmvaltool/references/landschuetzer2016.bibtex @@ -0,0 +1,13 @@ +@article{landschuetzer2016, + doi = {10.1002/2015gb005359}, + url = {https://doi.org/10.1002%2F2015gb005359}, + year = 2016, + month = {oct}, + publisher = {American Geophysical Union ({AGU})}, + volume = {30}, + number = {10}, + pages = {1396--1417}, + author = {Peter Landschützer and Nicolas Gruber and Dorothee C. E. Bakker}, + title = {Decadal variations and trends of the global ocean carbon sink}, + journal = {Global Biogeochemical Cycles} +} diff --git a/esmvaltool/references/landschuetzer2020.bibtex b/esmvaltool/references/landschuetzer2020.bibtex new file mode 100644 index 0000000000..8e01e1e7ab --- /dev/null +++ b/esmvaltool/references/landschuetzer2020.bibtex @@ -0,0 +1,11 @@ +@article{landschuetzer2020, + author = {Landschützer, P. and Laruelle, G. G. and Roobaert, A. and Regnier, P.}, + doi = {10.5194/essd-12-2537-2020}, + journal = {Earth System Science Data}, + number = {4}, + pages = {2537--2553}, + title = {A uniform pCO2 climatology combining open and coastal oceans}, + url = {https://essd.copernicus.org/articles/12/2537/2020/}, + volume = {12}, + year = {2020}, +} diff --git a/esmvaltool/references/langbein1949usgs.bibtex b/esmvaltool/references/langbein1949usgs.bibtex new file mode 100644 index 0000000000..9671e7985b --- /dev/null +++ b/esmvaltool/references/langbein1949usgs.bibtex @@ -0,0 +1,8 @@ +@misc{Langbein1949, + doi = {10.3133/cir52}, + url = {https://doi.org/10.3133/cir52}, + year = {1949}, + publisher = {{US} Geological Survey}, + author = {Walter Basil Langbein}, + title = {Annual runoff in the United States} +} \ No newline at end of file diff --git a/esmvaltool/references/lauer05acp.bibtex b/esmvaltool/references/lauer05acp.bibtex new file mode 100644 index 0000000000..03f2d63e07 --- /dev/null +++ b/esmvaltool/references/lauer05acp.bibtex @@ -0,0 +1,13 @@ +@article{lauer05acp, + doi = {10.5194/acp-5-3251-2005}, + url = {https://doi.org/10.5194%2Facp-5-3251-2005}, + year = 2005, + month = {dec}, + publisher = {Copernicus {GmbH}}, + volume = {5}, + number = {12}, + pages = {3251--3276}, + author = {A. Lauer and J. Hendricks and I. Ackermann and B. Schell and H. Hass and S. Metzger}, + title = {Simulating aerosol microphysics with the {ECHAM}/{MADE} {GCM} -- Part I: Model description and comparison with observations}, + journal = {Atmospheric Chemistry and Physics} +} diff --git a/esmvaltool/references/lauer13jclim.bibtex b/esmvaltool/references/lauer13jclim.bibtex new file mode 100644 index 0000000000..885fc8cb3c --- /dev/null +++ b/esmvaltool/references/lauer13jclim.bibtex @@ -0,0 +1,13 @@ +@article{lauer13jclim, + doi = {10.1175/jcli-d-12-00451.1}, + url = {https://doi.org/10.1175%2Fjcli-d-12-00451.1}, + year = 2013, + month = {jun}, + publisher = {American Meteorological Society}, + volume = {26}, + number = {11}, + pages = {3823--3845}, + author = {Axel Lauer and Kevin Hamilton}, + title = {Simulating Clouds with Global Climate Models: A Comparison of {CMIP}5 Results with {CMIP}3 and Satellite Data}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/lauer17rse.bibtex b/esmvaltool/references/lauer17rse.bibtex new file mode 100644 index 0000000000..7b75621480 --- /dev/null +++ b/esmvaltool/references/lauer17rse.bibtex @@ -0,0 +1,12 @@ +@article{lauer17rse, + doi = {10.1016/j.rse.2017.01.007}, + url = {https://doi.org/10.1016%2Fj.rse.2017.01.007}, + year = 2017, + month = {dec}, + publisher = {Elsevier {BV}}, + volume = {203}, + pages = {9--39}, + author = {Axel Lauer and Veronika Eyring and Mattia Righi and Michael Buchwitz and Pierre Defourny and Martin Evaldsson and Pierre Friedlingstein and Richard de Jeu and Gerrit de Leeuw and Alexander Loew and Christopher J. Merchant and Benjamin Müller and Thomas Popp and Maximilian Reuter and Stein Sandven and Daniel Senftleben and Martin Stengel and Michel Van Roozendael and Sabrina Wenzel and Ulrika Will{\'{e}}n}, + title = {Benchmarking {CMIP}5 models with a subset of {ESA} {CCI} Phase 2 data using the {ESMValTool}}, + journal = {Remote Sensing of Environment} +} diff --git a/esmvaltool/references/lauer22jclim.bibtex b/esmvaltool/references/lauer22jclim.bibtex new file mode 100644 index 0000000000..df981c7488 --- /dev/null +++ b/esmvaltool/references/lauer22jclim.bibtex @@ -0,0 +1,13 @@ +@article{lauer23jclim, + doi = {10.1175/JCLI-D-22-0181.1}, + url = {https://journals.ametsoc.org/view/journals/clim/36/2/JCLI-D-22-0181.1.xml}, + year = 2023, + publisher = {American Meteorological Society}, + address = {Boston MA, USA}, + volume = {36}, + number = {2}, + pages = {281-311}, + author = {Axel Lauer and Lisa Bock and Birgit Hassler and Marc Schröder and Martin Stengel}, + title = {Cloud climatologies from global climate models - a comparison of CMIP5 and CMIP6 models with satellite data}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/legates90tac.bibtex b/esmvaltool/references/legates90tac.bibtex new file mode 100644 index 0000000000..9c0d5f60ca --- /dev/null +++ b/esmvaltool/references/legates90tac.bibtex @@ -0,0 +1,8 @@ +@article{legates90tac, +author = {Legates, D. R. and Willmott, C. J.}, +journal = {Theor. Appl. Climatol.}, +pages = {11--21}, +title = {{Mean seasonal and spatial variability in global surface air temperature}}, +volume = {41}, +year = {1990} +} diff --git a/esmvaltool/references/lembo16climdyn.bibtex b/esmvaltool/references/lembo16climdyn.bibtex new file mode 100644 index 0000000000..079fd87a3a --- /dev/null +++ b/esmvaltool/references/lembo16climdyn.bibtex @@ -0,0 +1,13 @@ +@article{lembo16climdyn, + doi = {10.1007/s00382-016-3173-9}, + url = {https://doi.org/10.1007%2Fs00382-016-3173-9}, + year = 2016, + month = {may}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {48}, + number = {5-6}, + pages = {1793--1812}, + author = {Valerio Lembo and Doris Folini and Martin Wild and Piero Lionello}, + title = {Energy budgets and transports: global evolution and spatial patterns during the twentieth century as estimated in two {AMIP}-like experiments}, + journal = {Climate Dynamics} +} diff --git a/esmvaltool/references/lembo19gmdd.bibtex b/esmvaltool/references/lembo19gmdd.bibtex new file mode 100644 index 0000000000..230e8739e9 --- /dev/null +++ b/esmvaltool/references/lembo19gmdd.bibtex @@ -0,0 +1,10 @@ +@misc{lembo19gmdd, + doi = {10.5194/gmd-2019-37}, + url = {https://doi.org/10.5194%2Fgmd-2019-37}, + year = 2019, + month = {feb}, + publisher = {Copernicus {GmbH}}, + author = {Valerio Lembo and Frank Lunkeit and Valerio Lucarini}, + title = {{TheDiaTo} (v1.0) -- A new diagnostic tool for water, energy and + entropy budgets in climate models} +} diff --git a/esmvaltool/references/lenderink14erl.bibtex b/esmvaltool/references/lenderink14erl.bibtex new file mode 100644 index 0000000000..08fa85057b --- /dev/null +++ b/esmvaltool/references/lenderink14erl.bibtex @@ -0,0 +1,13 @@ +@article{Lenderink2014, + doi = {10.1088/1748-9326/9/11/115008}, + url = {https://doi.org/10.1088/1748-9326/9/11/115008}, + year = {2014}, + month = nov, + publisher = {{IOP} Publishing}, + volume = {9}, + number = {11}, + pages = {115008}, + author = {G Lenderink and B J J M van den Hurk and A M G Klein Tank and G J van Oldenborgh and E van Meijgaard and H de Vries and J J Beersma}, + title = {Preparing local climate change scenarios for the Netherlands using resampling of climate model output}, + journal = {Environmental Research Letters} +} diff --git a/esmvaltool/references/li14jclim.bibtex b/esmvaltool/references/li14jclim.bibtex new file mode 100644 index 0000000000..6c8f2f05d8 --- /dev/null +++ b/esmvaltool/references/li14jclim.bibtex @@ -0,0 +1,13 @@ +@article{li14jclim, + doi = {10.1175/jcli-d-13-00337.1}, + url = {https://doi.org/10.1175%2Fjcli-d-13-00337.1}, + year = 2014, + month = {feb}, + publisher = {American Meteorological Society}, + volume = {27}, + number = {4}, + pages = {1765--1780}, + author = {Gen Li and Shang-Ping Xie}, + title = {Tropical Biases in {CMIP}5 Multimodel Ensemble: The Excessive Equatorial Pacific Cold Tongue and Double {ITCZ} Problems}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/li17natcc.bibtex b/esmvaltool/references/li17natcc.bibtex new file mode 100644 index 0000000000..65b42be321 --- /dev/null +++ b/esmvaltool/references/li17natcc.bibtex @@ -0,0 +1,13 @@ +@article{li17natcc, + doi = {10.1038/nclimate3387}, + url = {https://doi.org/10.1038%2Fnclimate3387}, + year = 2017, + month = {sep}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {7}, + number = {10}, + pages = {708--712}, + author = {Gen Li and Shang-Ping Xie and Chao He and Zesheng Chen}, + title = {Western Pacific emergent constraint lowers projected increase in Indian summer monsoon~rainfall}, + journal = {Nature Climate Change} +} diff --git a/esmvaltool/references/lin08jclim.bibtex b/esmvaltool/references/lin08jclim.bibtex new file mode 100644 index 0000000000..7b86941d14 --- /dev/null +++ b/esmvaltool/references/lin08jclim.bibtex @@ -0,0 +1,13 @@ +@article{lin08jclim, + doi = {10.1175/2008jcli1816.1}, + url = {https://doi.org/10.1175%2F2008jcli1816.1}, + year = 2008, + month = {sep}, + publisher = {American Meteorological Society}, + volume = {21}, + number = {18}, + pages = {4541--4567}, + author = {Jia-Lin Lin and Klaus M. Weickman and George N. Kiladis and Brian E. Mapes and Siegfried D. Schubert and Max J. Suarez and Julio T. Bacmeister and Myong-In Lee}, + title = {Subseasonal Variability Associated with Asian Summer Monsoon Simulated by 14 {IPCC} {AR}4 Coupled {GCMs}}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/lipat17grl.bibtex b/esmvaltool/references/lipat17grl.bibtex new file mode 100644 index 0000000000..5b10e6ce53 --- /dev/null +++ b/esmvaltool/references/lipat17grl.bibtex @@ -0,0 +1,13 @@ +@article{lipat17grl, + doi = {10.1002/2017gl073151}, + url = {https://doi.org/10.1002/2017gl073151}, + year = {2017}, + month = jun, + publisher = {American Geophysical Union ({AGU})}, + volume = {44}, + number = {11}, + pages = {5739--5748}, + author = {Bernard R. Lipat and George Tselioudis and Kevin M. Grise and Lorenzo M. Polvani}, + title = {{CMIP}5 models{\textquotesingle} shortwave cloud radiative response and climate sensitivity linked to the climatological Hadley cell extent}, + journal = {Geophysical Research Letters} +} diff --git a/esmvaltool/references/lloyd-hughes02jclim.bibtex b/esmvaltool/references/lloyd-hughes02jclim.bibtex new file mode 100644 index 0000000000..773f65e1da --- /dev/null +++ b/esmvaltool/references/lloyd-hughes02jclim.bibtex @@ -0,0 +1,12 @@ +@article{lloyd-hughes02jclim, + doi = {10.1002/joc.846}, + url = {https://doi.org/10.1002%2Fjoc.846}, + year = 2002, + publisher = {Wiley}, + volume = {22}, + number = {13}, + pages = {1571--1592}, + author = {Benjamin Lloyd-Hughes and Mark A. Saunders}, + title = {A drought climatology for Europe}, + journal = {International Journal of Climatology} +} diff --git a/esmvaltool/references/locarini10usgov.bibtex b/esmvaltool/references/locarini10usgov.bibtex new file mode 100644 index 0000000000..d0a05285f0 --- /dev/null +++ b/esmvaltool/references/locarini10usgov.bibtex @@ -0,0 +1,7 @@ +@book{locarini10usgov, + author = {Locarnini, Ricardo A. and Mishonov, Alexey V. and Antonov, John I. and Boyer, Timothy P. and Garcia, Hernan E. and Baranova, Olga K. and Zweng, Melissa M. and Paver, Christoper R. and Reagan, James R. and Johnson, Daphne R. and Hamilton, Melanie and Seidov, Dan}, + keywords = {Oceanography--Atlases, Ocean temperature--Atlases.}, + title = {World ocean atlas 2009. Volume 1, Temperature}, + publisher = {U.S. Department of Commerce, National Oceanic and Atmospheric Administration, National Environmental Satellite, Data and Information Service}, + year = {2010} +} diff --git a/esmvaltool/references/loeb19jclim.bibtex b/esmvaltool/references/loeb19jclim.bibtex new file mode 100644 index 0000000000..2bdc32ae83 --- /dev/null +++ b/esmvaltool/references/loeb19jclim.bibtex @@ -0,0 +1,12 @@ +@article{loeb19jclim, +author = {Loeb, Norman G. and Doelling, David R. and Wang, Hailan and Su, Wenying and Nguyen, Cathy and Corbett, Joseph G. and Liang, Lusheng and Mitrescu, Cristian and Rose, Fred G. and Kato, Seiji}, +doi = {10.1175/JCLI-D-17-0208.1}, +issn = {0894-8755}, +journal = {Journal of Climate}, +month = {jan}, +number = {2}, +pages = {895--918}, +title = {{Clouds and the Earth's Radiant Energy System (CERES) Energy Balanced and Filled (EBAF) Top-of-Atmosphere (TOA) Edition-4.0 Data Product}}, +volume = {31}, +year = {2018} +} diff --git a/esmvaltool/references/lorenz2018.bibtex b/esmvaltool/references/lorenz2018.bibtex new file mode 100644 index 0000000000..4863722ebd --- /dev/null +++ b/esmvaltool/references/lorenz2018.bibtex @@ -0,0 +1,13 @@ +@article{lorenz2018, + doi = {10.1029/2017jd027992}, + url = {https://doi.org/10.1029/2017jd027992}, + year = {2018}, + month = may, + publisher = {American Geophysical Union ({AGU})}, + volume = {123}, + number = {9}, + pages = {4509--4526}, + author = {Ruth Lorenz and Nadja Herger and Jan Sedl{\'{a}}{\v{c}}ek and Veronika Eyring and Erich M. Fischer and Reto Knutti}, + title = {Prospects and Caveats of Weighting Climate Models for Summer Maximum Temperature Projections Over North America}, + journal = {Journal of Geophysical Research: Atmospheres} +} \ No newline at end of file diff --git a/esmvaltool/references/lucarini14revgeop.bibtex b/esmvaltool/references/lucarini14revgeop.bibtex new file mode 100644 index 0000000000..ccb734a100 --- /dev/null +++ b/esmvaltool/references/lucarini14revgeop.bibtex @@ -0,0 +1,13 @@ +@article{lucarini14revgeop, + doi = {10.1002/2013rg000446}, + url = {https://doi.org/10.1002%2F2013rg000446}, + year = 2014, + month = {dec}, + publisher = {American Geophysical Union ({AGU})}, + volume = {52}, + number = {4}, + pages = {809--859}, + author = {Valerio Lucarini and Richard Blender and Corentin Herbert and Francesco Ragone and Salvatore Pascale and Jeroen Wouters}, + title = {Mathematical and physical ideas for climate science}, + journal = {Reviews of Geophysics} +} diff --git a/esmvaltool/references/mac-lwp.bibtex b/esmvaltool/references/mac-lwp.bibtex new file mode 100644 index 0000000000..eeae421e2d --- /dev/null +++ b/esmvaltool/references/mac-lwp.bibtex @@ -0,0 +1,13 @@ +@article{mac-lwp, + doi = {10.1175/jcli-d-16-0902.1}, + url = {https://doi.org/10.1175/jcli-d-16-0902.1}, + year = {2017}, + month = dec, + publisher = {American Meteorological Society}, + volume = {30}, + number = {24}, + pages = {10193--10210}, + author = {Gregory S. Elsaesser and Christopher W. O'Dell and Matthew D. Lebsock and Ralf Bennartz and Thomas J. Greenwald and Frank J. Wentz}, + title = {The Multisensor Advanced Climatology of Liquid Water Path ({MAC}-{LWP})}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/manubens18ems.bibtex b/esmvaltool/references/manubens18ems.bibtex new file mode 100644 index 0000000000..6ab0b25c2e --- /dev/null +++ b/esmvaltool/references/manubens18ems.bibtex @@ -0,0 +1,12 @@ +@article{manubens18ems, + doi = {10.1016/j.envsoft.2018.01.018}, + url = {https://doi.org/10.1016%2Fj.envsoft.2018.01.018}, + year = 2018, + month = {may}, + publisher = {Elsevier {BV}}, + volume = {103}, + pages = {29--42}, + author = {Nicolau Manubens and Louis-Philippe Caron and Alasdair Hunter and Omar Bellprat and Eleftheria Exarchou and Neven S. Fu{\v{c}}kar and Javier Garcia-Serrano and Fran{\c{c}}ois Massonnet and Martin M{\'{e}}n{\'{e}}goz and Valentina Sicardi and Lauriane Batt{\'{e}} and Chlo{\'{e}} Prodhomme and Ver{\'{o}}nica Torralba and Nicola Cortesi and Oriol Mula-Valls and Kim Serradell and Virginie Guemas and Francisco J. Doblas-Reyes}, + title = {An R package for climate forecast verification}, + journal = {Environmental Modelling {\&} Software} +} diff --git a/esmvaltool/references/martin18grl.bibtex b/esmvaltool/references/martin18grl.bibtex new file mode 100644 index 0000000000..26d70580d0 --- /dev/null +++ b/esmvaltool/references/martin18grl.bibtex @@ -0,0 +1,14 @@ +@article{martin18grl, +author = {Martin, E. R.}, +title = {Future Projections of Global Pluvial and Drought Event Characteristics}, +journal = {Geophysical Research Letters}, +volume = {45}, +number = {21}, +pages = {11,913-11,920}, +keywords = {pluvial, drought, precipitation, climate change, intensity, number}, +doi = {10.1029/2018GL079807}, +url = {https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/2018GL079807}, +eprint = {https://agupubs.onlinelibrary.wiley.com/doi/pdf/10.1029/2018GL079807}, +abstract = {Abstract This study assesses projections from 24 CMIP5 models of number, duration, and severity of pluvial and drought events utilizing 6-month standardized precipitation index. Increased variability of standardized precipitation index is projected globally. More frequent, longer lasting, and stronger pluvials are projected in wet regions, and the same for droughts in dry regions. Worsening pluvials and droughts are most apparent in the Northern Hemisphere midlatitudes and the Americas, respectively. Uniquely, this study investigates pluvials and droughts in locations where the precipitation trend is of the opposite sign. In drying regions, 40\% of grid points project an increase in number and 65\% project an increase in duration of severe pluvials. Projections for severe drought events in wetting regions show similar projections. As precipitation trends alone do not provide information about pluvial and drought characteristics this study has important implications for planning and resilience.}, +year = {2018} +} diff --git a/esmvaltool/references/massonet18natcc.bibtex b/esmvaltool/references/massonet18natcc.bibtex new file mode 100644 index 0000000000..ff46c22961 --- /dev/null +++ b/esmvaltool/references/massonet18natcc.bibtex @@ -0,0 +1,17 @@ +@Article{massonet18natcc, +author={Massonnet, Fran{\c{c}}ois +and Vancoppenolle, Martin +and Goosse, Hugues +and Docquier, David +and Fichefet, Thierry +and Blanchard-Wrigglesworth, Edward}, +title={Arctic sea-ice change tied to its mean state through thermodynamic processes}, +journal={Nature Climate Change}, +year={2018}, +volume={8}, +number={7}, +pages={599-603}, +issn={1758-6798}, +doi={10.1038/s41558-018-0204-z}, +url={https://doi.org/10.1038/s41558-018-0204-z} +} diff --git a/esmvaltool/references/massonnet12tc.bibtex b/esmvaltool/references/massonnet12tc.bibtex new file mode 100644 index 0000000000..9528171269 --- /dev/null +++ b/esmvaltool/references/massonnet12tc.bibtex @@ -0,0 +1,13 @@ +@article{massonnet12tc, + doi = {10.5194/tc-6-1383-2012}, + url = {https://doi.org/10.5194%2Ftc-6-1383-2012}, + year = 2012, + month = {nov}, + publisher = {Copernicus {GmbH}}, + volume = {6}, + number = {6}, + pages = {1383--1394}, + author = {F. Massonnet and T. Fichefet and H. Goosse and C. M. Bitz and G. Philippon-Berthier and M. M. Holland and P.-Y. Barriat}, + title = {Constraining projections of summer Arctic sea ice}, + journal = {The Cryosphere} +} diff --git a/esmvaltool/references/mathison2024gmd.bibtex b/esmvaltool/references/mathison2024gmd.bibtex new file mode 100644 index 0000000000..a6090db6c7 --- /dev/null +++ b/esmvaltool/references/mathison2024gmd.bibtex @@ -0,0 +1,10 @@ +@Article{mathison2024, + AUTHOR = {Mathison, C. T. and Burke, E. and Kovacs, E. and Munday, G. and Huntingford, C. and Jones, C. and Smith, C. and Steinert, N. and Wiltshire, A. and Gohar, L. and Varney, R.}, + TITLE = {A rapid application emissions-to-impacts tool for scenario assessment: Probabilistic Regional Impacts from Model patterns and Emissions (PRIME)}, + JOURNAL = {EGUsphere}, + VOLUME = {2024}, + YEAR = {2024}, + PAGES = {1--28}, + URL = {https://egusphere.copernicus.org/preprints/2024/egusphere-2023-2932/}, + DOI = {10.5194/egusphere-2023-2932} +} diff --git a/esmvaltool/references/mckee93proc.bibtex b/esmvaltool/references/mckee93proc.bibtex new file mode 100644 index 0000000000..d0c9cf2c4e --- /dev/null +++ b/esmvaltool/references/mckee93proc.bibtex @@ -0,0 +1,9 @@ +@article{mckee93proc, + author = {McKee, T.B. and Doesken, N.J. and Kleist, J.}, + title = {The Relationship of Drought Frequency and Duration to Time Scales}, + journal = {Proceedings of 8th Conference on Applied Climatology}, + year = 1993, + number = 22, + pages = {179-184}, + volume = 17 +} diff --git a/esmvaltool/references/meehl20sciadv.bibtex b/esmvaltool/references/meehl20sciadv.bibtex new file mode 100644 index 0000000000..b90921ed67 --- /dev/null +++ b/esmvaltool/references/meehl20sciadv.bibtex @@ -0,0 +1,13 @@ +@article{meehl20sciadv, + author = {Meehl, Gerald A. and Senior, Catherine A. and Eyring, Veronika and Flato, Gregory and Lamarque, Jean-Francois and Stouffer, Ronald J. and Taylor, Karl E. and Schlund, Manuel}, + doi = {10.1126/sciadv.aba1981}, + eprint = {https://advances.sciencemag.org/content/6/26/eaba1981.full.pdf}, + journal = {Science Advances}, + number = {26}, + pages = {eaba1981}, + publisher = {American Association for the Advancement of Science}, + title = {Context for interpreting equilibrium climate sensitivity and transient climate response from the CMIP6 Earth system models}, + URL = {https://advances.sciencemag.org/content/6/26/eaba1981}, + volume = {6}, + year = {2020}, +} diff --git a/esmvaltool/references/mehran14jgr.bibtex b/esmvaltool/references/mehran14jgr.bibtex new file mode 100644 index 0000000000..973db90ab7 --- /dev/null +++ b/esmvaltool/references/mehran14jgr.bibtex @@ -0,0 +1,13 @@ +@article{mehran14jgr, + doi = {10.1002/2013jd021152}, + url = {https://doi.org/10.1002%2F2013jd021152}, + year = 2014, + month = {feb}, + publisher = {American Geophysical Union ({AGU})}, + volume = {119}, + number = {4}, + pages = {1695--1707}, + author = {A. Mehran and A. AghaKouchak and T. J. Phillips}, + title = {Evaluation of {CMIP}5 continental precipitation simulations relative to satellite-based gauge-adjusted observations}, + journal = {Journal of Geophysical Research: Atmospheres} +} diff --git a/esmvaltool/references/merra.bibtex b/esmvaltool/references/merra.bibtex new file mode 100644 index 0000000000..aa1df2c53d --- /dev/null +++ b/esmvaltool/references/merra.bibtex @@ -0,0 +1,13 @@ +@article{merra, + doi = {10.1175/JCLI-D-11-00015.1}, + url = {https://journals.ametsoc.org/view/journals/clim/24/14/jcli-d-11-00015.1.xml}, + year = 2011, + month = {jul}, + publisher = {American Meteorological Society}, + volume = {24}, + number = {14}, + pages = {3624--3648}, + author = {Michele M. Rienecker and Max J. Suarez and Ronald Gelaro and Ricardo Todling and Julio Bacmeister and Emily Liu and Michael G. Bosilovich and Siegfried D. Schubert and Lawrence Takacs and Gi-Kong Kim and Stephen Bloom and Junye Chen and Douglas Collins and Austin Conaty and Arlindo da Silva and Wei Gu and Joanna Joiner and Randal D. Koster and Robert Lucchesi and Andrea Molod and Tommy Owens and Steven Pawson and Philip Pegion and Christopher R. Redder and Rolf Reichle and Franklin R. Robertson and Albert G. Ruddick and Meta Sienkiewicz and Jack Woollen}, + title = {{MERRA}: {NASA}’s Modern-Era Retrospective Analysis for Research and Applications}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/merra2.bibtex b/esmvaltool/references/merra2.bibtex new file mode 100644 index 0000000000..059b8b05a2 --- /dev/null +++ b/esmvaltool/references/merra2.bibtex @@ -0,0 +1,13 @@ +@article{merra2, + doi = {10.1175/jcli-d-16-0758.1}, + url = {https://doi.org/10.1175%2Fjcli-d-16-0758.1}, + year = 2017, + month = {jul}, + publisher = {American Meteorological Society}, + volume = {30}, + number = {14}, + pages = {5419--5454}, + author = {Ronald Gelaro and Will McCarty and Max J. Su{\'{a}}rez and Ricardo Todling and Andrea Molod and Lawrence Takacs and Cynthia A. Randles and Anton Darmenov and Michael G. Bosilovich and Rolf Reichle and Krzysztof Wargan and Lawrence Coy and Richard Cullather and Clara Draper and Santha Akella and Virginie Buchard and Austin Conaty and Arlindo M. da Silva and Wei Gu and Gi-Kong Kim and Randal Koster and Robert Lucchesi and Dagmar Merkova and Jon Eric Nielsen and Gary Partyka and Steven Pawson and William Putman and Michele Rienecker and Siegfried D. Schubert and Meta Sienkiewicz and Bin Zhao}, + title = {The Modern-Era Retrospective Analysis for Research and Applications, Version 2 ({MERRA}-2)}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/mls-aura.bibtex b/esmvaltool/references/mls-aura.bibtex new file mode 100644 index 0000000000..13ba3d9762 --- /dev/null +++ b/esmvaltool/references/mls-aura.bibtex @@ -0,0 +1,9 @@ +@misc{mls-aura, + doi = {10.5067/Aura/MLS/DATA2019}, + url = {https://disc.gsfc.nasa.gov/datasets/ML2RHI_004/summary}, + author = {Read, W. and Livesey, N.}, + language = {en}, + title = {MLS/Aura Level 2 Relative Humidity With Respect To Ice V004}, + publisher = {Goddard Earth Sciences Data and Information Services Center (GES DISC)}, + year = {2015} +} diff --git a/esmvaltool/references/mobo_dic2004_2019.bibtex b/esmvaltool/references/mobo_dic2004_2019.bibtex new file mode 100644 index 0000000000..928fc33afd --- /dev/null +++ b/esmvaltool/references/mobo_dic2004_2019.bibtex @@ -0,0 +1,12 @@ +@article{mobo_dic2004_2019, + doi = {10.1029/2022gb007677}, + url = {https://doi.org/10.1029/2022gb007677}, + year = {2023}, + month = may, + publisher = {American Geophysical Union ({AGU})}, + volume = {37}, + number = {5}, + author = {L. Keppler and P. Landschützer and S. K. Lauvset and N. Gruber}, + title = {Recent Trends and Variability in the Oceanic Storage of Dissolved Inorganic Carbon}, + journal = {Global Biogeochemical Cycles} +} diff --git a/esmvaltool/references/mobo_dic_mpim.bibtex b/esmvaltool/references/mobo_dic_mpim.bibtex new file mode 100644 index 0000000000..126fc14ab8 --- /dev/null +++ b/esmvaltool/references/mobo_dic_mpim.bibtex @@ -0,0 +1,13 @@ +@article{mobo_dic_mpim, + author = {Keppler, L. and Landschützer, P. and Gruber, N. and Lauvset, S. K. and Stemmler, I.}, + doi = {https://doi.org/10.1029/2020GB006571}, + eprint = {https://agupubs.onlinelibrary.wiley.com/doi/pdf/10.1029/2020GB006571}, + journal = {Global Biogeochemical Cycles}, + keywords = {DIC, seasonal variability, neural networks, SOM-FFN, monthly climatology, NCP}, + number = {12}, + pages = {e2020GB006571}, + title = {Seasonal Carbon Dynamics in the Near-Global Ocean}, + url = {https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/2020GB006571}, + volume = {34}, + year = {2020}, +} diff --git a/esmvaltool/references/modis1.bibtex b/esmvaltool/references/modis1.bibtex new file mode 100644 index 0000000000..83064e1272 --- /dev/null +++ b/esmvaltool/references/modis1.bibtex @@ -0,0 +1,13 @@ +@article{modis1, + doi = {10.1109/tgrs.2002.808301}, + url = {https://doi.org/10.1109%2Ftgrs.2002.808301}, + year = 2003, + month = {feb}, + publisher = {Institute of Electrical and Electronics Engineers ({IEEE})}, + volume = {41}, + number = {2}, + pages = {459--473}, + author = {S. Platnick and M.D. King and S.A. Ackerman and W.P. Menzel and B.A. Baum and J.C. Riedi and R.A. Frey}, + title = {The {MODIS} cloud products: algorithms and examples from terra}, + journal = {{IEEE} Transactions on Geoscience and Remote Sensing} +} diff --git a/esmvaltool/references/modis2.bibtex b/esmvaltool/references/modis2.bibtex new file mode 100644 index 0000000000..38b2f61025 --- /dev/null +++ b/esmvaltool/references/modis2.bibtex @@ -0,0 +1,13 @@ +@article{modis2, + doi = {10.5194/amt-6-2989-2013}, + url = {https://doi.org/10.5194%2Famt-6-2989-2013}, + year = 2013, + month = {nov}, + publisher = {Copernicus {GmbH}}, + volume = {6}, + number = {11}, + pages = {2989--3034}, + author = {R. C. Levy and S. Mattoo and L. A. Munchak and L. A. Remer and A. M. Sayer and F. Patadia and N. C. Hsu}, + title = {The Collection 6 {MODIS} aerosol products over land and ocean}, + journal = {Atmospheric Measurement Techniques} +} diff --git a/esmvaltool/references/morenochamarro2021.bibtex b/esmvaltool/references/morenochamarro2021.bibtex new file mode 100644 index 0000000000..36da8c758e --- /dev/null +++ b/esmvaltool/references/morenochamarro2021.bibtex @@ -0,0 +1,13 @@ +@article{morenochamarro2021, + doi = {10.1088/1748-9326/abf28a}, + url = {https://doi.org/10.1088/1748-9326/abf28a}, + year = {2021}, + month = may, + publisher = {{IOP} Publishing}, + volume = {16}, + number = {5}, + pages = {054063}, + author = {E Moreno-Chamarro and L-P Caron and P Ortega and S Loosveldt Tomas and M J Roberts}, + title = {Can we trust {CMIP}5/6 future projections of European winter precipitation?}, + journal = {Environmental Research Letters} +} diff --git a/esmvaltool/references/mte.bibtex b/esmvaltool/references/mte.bibtex new file mode 100644 index 0000000000..dc00289aff --- /dev/null +++ b/esmvaltool/references/mte.bibtex @@ -0,0 +1,11 @@ +@article{mte, + doi = {10.1029/2010jg001566}, + url = {https://doi.org/10.1029%2F2010jg001566}, + year = 2011, + month = {sep}, + publisher = {American Geophysical Union ({AGU})}, + volume = {116}, + author = {Martin Jung and Markus Reichstein and Hank A. Margolis and Alessandro Cescatti and Andrew D. Richardson and M. Altaf Arain and Almut Arneth and Christian Bernhofer and Damien Bonal and Jiquan Chen and Damiano Gianelle and Nadine Gobron and Gerald Kiely and Werner Kutsch and Gitta Lasslop and Beverly E. Law and Anders Lindroth and Lutz Merbold and Leonardo Montagnani and Eddy J. Moors and Dario Papale and Matteo Sottocornola and Francesco Vaccari and Christopher Williams}, + title = {Global patterns of land-atmosphere fluxes of carbon dioxide, latent heat, and sensible heat derived from eddy covariance, satellite, and meteorological observations}, + journal = {Journal of Geophysical Research} +} diff --git a/esmvaltool/references/mueller13hess.bibtex b/esmvaltool/references/mueller13hess.bibtex new file mode 100644 index 0000000000..4815df9185 --- /dev/null +++ b/esmvaltool/references/mueller13hess.bibtex @@ -0,0 +1,13 @@ +@article{mueller13hess, + doi = {10.5194/hess-17-3707-2013}, + url = {https://doi.org/10.5194%2Fhess-17-3707-2013}, + year = 2013, + month = {oct}, + publisher = {Copernicus {GmbH}}, + volume = {17}, + number = {10}, + pages = {3707--3720}, + author = {B. Mueller and M. Hirschi and C. Jimenez and P. Ciais and P. A. Dirmeyer and A. J. Dolman and J. B. Fisher and M. Jung and F. Ludwig and F. Maignan and D. G. Miralles and M. F. McCabe and M. Reichstein and J. Sheffield and K. Wang and E. F. Wood and Y. Zhang and S. I. Seneviratne}, + title = {Benchmark products for land evapotranspiration: {LandFlux}-{EVAL} multi-data set synthesis}, + journal = {Hydrology and Earth System Sciences} +} diff --git a/esmvaltool/references/mueller14grl.bibtex b/esmvaltool/references/mueller14grl.bibtex new file mode 100644 index 0000000000..6019f9eb0f --- /dev/null +++ b/esmvaltool/references/mueller14grl.bibtex @@ -0,0 +1,13 @@ +@article{mueller14grl, + doi = {10.1002/2013gl058055}, + url = {https://doi.org/10.1002%2F2013gl058055}, + year = 2014, + month = {jan}, + publisher = {American Geophysical Union ({AGU})}, + volume = {41}, + number = {1}, + pages = {128--134}, + author = {B. Mueller and S. I. Seneviratne}, + title = {Systematic land climate and evapotranspiration biases in {CMIP}5 simulations}, + journal = {Geophysical Research Letters} +} diff --git a/esmvaltool/references/ncep-doe-r2.bibtex b/esmvaltool/references/ncep-doe-r2.bibtex new file mode 100644 index 0000000000..74d93b4569 --- /dev/null +++ b/esmvaltool/references/ncep-doe-r2.bibtex @@ -0,0 +1,8 @@ +@article{ncep-doe-r2, + doi = {10.1175/BAMS-83-11-1631}, + url = {https://doi.org/10.1175/BAMS-83-11-1631}, + publisher = {American Meteorological Society}, + author = {Kanamitsu, M., W. Ebisuzaki, J. Woollen, S.-K. Yang, J.J. Hnilo, M. Fiorino, and G.L. Potter}, + title = {NCEP–DOE AMIP-II Reanalysis (R-2)}, + journal = {Bulletin of the American Meteorological Society} +} diff --git a/esmvaltool/references/ncep-ncar-r1.bibtex b/esmvaltool/references/ncep-ncar-r1.bibtex new file mode 100644 index 0000000000..f0504e6a0b --- /dev/null +++ b/esmvaltool/references/ncep-ncar-r1.bibtex @@ -0,0 +1,13 @@ +@article{ncep-ncar-r1, + doi = {10.1175/1520-0477(1996)077<0437:TNYRP>2.0.CO;2}, + url = {https://doi.org/10.1175/1520-0477(1996)077<0437:TNYRP>2.0.CO;2}, + year = 1996, + month = {mar}, + publisher = {American Meteorological Society}, + volume = {77}, + number = {3}, + pages = {437--471}, + author = {Kalnay, E., M. Kanamitsu, R. Kistler, W. Collins, D. Deaven, L. Gandin, M. Iredell, S. Saha, G. White, J. Woollen, Y. Zhu, M. Chelliah, W. Ebisuzaki, W. Higgins, J. Janowiak, K.C. Mo, C. Ropelewski, J. Wang, A. Leetmaa, R. Reynolds, R. Jenne, and D. Joseph}, + title = {The NCEP/NCAR 40-Year Reanalysis Project}, + journal = {Bulletin of the American Meteorological Society} +} diff --git a/esmvaltool/references/ncep.bibtex b/esmvaltool/references/ncep.bibtex new file mode 100644 index 0000000000..748882c855 --- /dev/null +++ b/esmvaltool/references/ncep.bibtex @@ -0,0 +1,13 @@ +@article{ncep, + doi = {10.1175/1520-0477(1996)077<0437:tnyrp>2.0.co;2}, + url = {https://doi.org/10.1175%2F1520-0477%281996%29077%3C0437%3Atnyrp%3E2.0.co%3B2}, + year = 1996, + month = {mar}, + publisher = {American Meteorological Society}, + volume = {77}, + number = {3}, + pages = {437--471}, + author = {E. Kalnay and M. Kanamitsu and R. Kistler and W. Collins and D. Deaven and L. Gandin and M. Iredell and S. Saha and G. White and J. Woollen and Y. Zhu and A. Leetmaa and R. Reynolds and M. Chelliah and W. Ebisuzaki and W. Higgins and J. Janowiak and K. C. Mo and C. Ropelewski and J. Wang and Roy Jenne and Dennis Joseph}, + title = {The {NCEP}/{NCAR} 40-Year Reanalysis Project}, + journal = {Bulletin of the American Meteorological Society} +} diff --git a/esmvaltool/references/ndp.bibtex b/esmvaltool/references/ndp.bibtex new file mode 100644 index 0000000000..a97125f537 --- /dev/null +++ b/esmvaltool/references/ndp.bibtex @@ -0,0 +1,9 @@ +@misc{ndp, + doi = {10.3334/cdiac/lue.ndp017.2006}, + url = {https://doi.org/10.3334%2Fcdiac%2Flue.ndp017.2006}, + year = 2006, + month = {aug}, + publisher = {{ORNL} Environmental Sciences Division}, + author = {H.K. Gibbs,}, + title = {Olson{\textquotesingle}s Major World Ecosystem Complexes Ranked by Carbon in Live Vegetation: An Updated Database Using the {GLC}2000 Land Cover Product} +} diff --git a/esmvaltool/references/niwa-bs.bibtex b/esmvaltool/references/niwa-bs.bibtex new file mode 100644 index 0000000000..98ac1be2ec --- /dev/null +++ b/esmvaltool/references/niwa-bs.bibtex @@ -0,0 +1,13 @@ +@article{niwa-bs, + doi = {10.5194/acp-5-2603-2005}, + url = {https://doi.org/10.5194%2Facp-5-2603-2005}, + year = 2005, + month = {sep}, + publisher = {Copernicus {GmbH}}, + volume = {5}, + number = {10}, + pages = {2603--2615}, + author = {G. E. Bodeker and H. Shiona and H. Eskes}, + title = {Indicators of Antarctic ozone depletion}, + journal = {Atmospheric Chemistry and Physics} +} diff --git a/esmvaltool/references/noaa-cires-20cr-v2.bibtex b/esmvaltool/references/noaa-cires-20cr-v2.bibtex new file mode 100644 index 0000000000..79314b1d4b --- /dev/null +++ b/esmvaltool/references/noaa-cires-20cr-v2.bibtex @@ -0,0 +1,11 @@ +@article{noaa-cires-20cr-v2, + doi = {10.1002/qj.776}, + url = {https://doi.org/10.1002/qj.776}, + author = {Compo, G.P., Whitaker, J.S., Sardeshmukh, P.D., Matsui, N., Allan, R.J., Yin, X., Gleason, B.E., Vose, R.S., Rutledge, G., Bessemoulin, P., Brönnimann, S., Brunet, M., Crouthamel, R.I., Grant, A.N., Groisman, P.Y., Jones, P.D., Kruk, M.C., Kruger, A.C., Marshall, G.J., Maugeri, M., Mok, H.Y., Nordli, O., Ross, T.F., Trigo, R.M., Wang, X.L., Woodruff, S.D. and Worley, S.J.}, + title = {The Twentieth Century Reanalysis Project}, + year = {2011}, + journal = {Quarterly J. Roy. Meteorol. Soc.} + volume = {137}, + number = {654}, + pages = {1-28}, +} diff --git a/esmvaltool/references/noaa-cires-20cr-v3.bibtex b/esmvaltool/references/noaa-cires-20cr-v3.bibtex new file mode 100644 index 0000000000..d1bf482e9d --- /dev/null +++ b/esmvaltool/references/noaa-cires-20cr-v3.bibtex @@ -0,0 +1,11 @@ +@article{noaa-cires-20cr-v3, + journal = {Quarterly J. Roy. Meteorol. Soc.} + author = {Slivinski, Laura C. and Compo, Gilbert P. and Whitaker, Jeffrey S. and Sardeshmukh, Prashant D. and Giese, Benjamin S. and McColl, Chesley and Allan, Rob and Yin, Xungang and Vose, Russell and Titchner, Holly and Kennedy, John and Spencer, Lawrence J. and Ashcroft, Linden and Brönnimann, Stefan and Brunet, Manola and Camuffo, Dario and Cornes, Richard and Cram, Thomas A. and Crouthamel, Richard and Domínguez-Castro, Fernando and Freeman, J. Eric and Gergis, Joëlle and Hawkins, Ed and Jones, Philip D. and Jourdain, Sylvie and Kaplan, Alexey and Kubota, Hisayuki and Blancq, Frank Le and Lee, Tsz-Cheung and Lorrey, Andrew and Luterbacher, Jürg and Maugeri, Maurizio and Mock, Cary J. and Moore, G.W. Kent and Przybylak, Rajmund and Pudmenzky, Christa and Reason, Chris and Slonosky, Victoria C. and Smith, Catherine A. and Tinz, Birger and Trewin, Blair and Valente, Maria Antónia and Wang, Xiaolan L. and Wilkinson, Clive and Wood, Kevin and Wyszynski, Przemysxslaw}, + title = {Towards a more reliable historical reanalysis: Improvements for version 3 of the Twentieth Century Reanalysis system}, + volume = {145}, + number = {724}, + pages = {2876-2908}, + doi = {https://doi.org/10.1002/qj.3598}, + url = {https://rmets.onlinelibrary.wiley.com/doi/abs/10.1002/qj.3598}, + year = {2019} +} diff --git a/esmvaltool/references/noaa-mbl-ch4.bibtex b/esmvaltool/references/noaa-mbl-ch4.bibtex new file mode 100644 index 0000000000..e4c6a17cbd --- /dev/null +++ b/esmvaltool/references/noaa-mbl-ch4.bibtex @@ -0,0 +1,9 @@ +@misc{noaa-mbl-ch4, + doi = {https://doi.org/10.15138/P8XG-AA10}, + url = {https://gml.noaa.gov/ccgg/trends_ch4/}, + year = 2023, + month = {jul}, + author = {Lan, X., K.W. Thoning, and E.J. Dlugokencky}, + title = {Trends in globally-averaged CH4, N2O, and SF6 determined from NOAA Global Monitoring Laboratory measurements. Version 2023-09}, + howpublished = {via website https://gml.noaa.gov/ccgg/trends_ch4/, provided by the NOAA/GML, Boulder, Colorado, USA} +} diff --git a/esmvaltool/references/noaaglobaltemp.bibtex b/esmvaltool/references/noaaglobaltemp.bibtex new file mode 100644 index 0000000000..37c496cddc --- /dev/null +++ b/esmvaltool/references/noaaglobaltemp.bibtex @@ -0,0 +1,7 @@ +@article{noaaglobaltemp, + doi = {doi:10.25921/9qth-2p70}, + url = {https://doi.org/10.25921/9qth-2p70}, + publisher = {NOAA National Centers for Environmental Information}, + author = {Zhang, H.-M., B. Huang, J. Lawrimore, M. Menne, Thomas M. Smith}, + title = {NOAA Global Surface Temperature Dataset (NOAAGlobalTemp), Version 5}, +} diff --git a/esmvaltool/references/nsidc-0116.bibtex b/esmvaltool/references/nsidc-0116.bibtex new file mode 100644 index 0000000000..2bae39f05c --- /dev/null +++ b/esmvaltool/references/nsidc-0116.bibtex @@ -0,0 +1,8 @@ +@misc{nsidc-0116, + doi = {10.5067/INAWUWO7QH7B}, + url = {http://nsidc.org/data/nsidc-0116/versions/4}, + author = {{M. Tschudi/Univ Of CO}}, + title = {Polar Pathfinder Daily 25 km EASE-Grid Sea Ice Motion Vectors}, + publisher = {NASA National Snow and Ice Data Center DAAC}, + year = {2019} +} diff --git a/esmvaltool/references/nsidc-g02202.bibtex b/esmvaltool/references/nsidc-g02202.bibtex new file mode 100644 index 0000000000..e0710714ea --- /dev/null +++ b/esmvaltool/references/nsidc-g02202.bibtex @@ -0,0 +1,8 @@ +@misc{nsidc-g02202, + doi = {10.7265/efmz-2t65}, + title = {NOAA/NSIDC Climate Data Record of Passive Microwave Sea Ice Concentration, Version 4}, + url = {https://nsidc.org/data/G02202/versions/4}, + author = {Meier, W. N., F. Fetterer, A. K. Windnagel, and J. S. Stewart.}, + publisher = {National Snow and Ice Data Center}, + year = {2021} +} \ No newline at end of file diff --git a/esmvaltool/references/oceansoda_ethz.bibtex b/esmvaltool/references/oceansoda_ethz.bibtex new file mode 100644 index 0000000000..754ecc5638 --- /dev/null +++ b/esmvaltool/references/oceansoda_ethz.bibtex @@ -0,0 +1,11 @@ +@article{oceansoda_ethz, + author = {Gregor, L. and Gruber, N.}, + doi = {10.5194/essd-13-777-2021}, + journal = {Earth System Science Data}, + number = {2}, + pages = {777--808}, + title = {OceanSODA-ETHZ: a global gridded data set of the surface ocean carbonate system for seasonal to decadal studies of ocean acidification}, + url = {https://essd.copernicus.org/articles/13/777/2021/}, + volume = {13}, + year = {2021}, +} diff --git a/esmvaltool/references/osi-450.bibtex b/esmvaltool/references/osi-450.bibtex new file mode 100644 index 0000000000..79fcde3384 --- /dev/null +++ b/esmvaltool/references/osi-450.bibtex @@ -0,0 +1,10 @@ +@misc{osi-450, + doi = {10.15770/EUM_SAF_OSI_0008}, + url = {https://navigator.eumetsat.int/product/EO:EUM:DAT:MULT:OSI-450}, + author = {{OSI SAF}}, + keywords = {Climate, Thematic Climate Data Record, Ocean, Sea Ice}, + language = {en}, + title = {Global Sea Ice Concentration Climate Data Record v2.0 - Multimission}, + publisher = {OSI SAF}, + year = {2017} +} diff --git a/esmvaltool/references/patmos-x.bibtex b/esmvaltool/references/patmos-x.bibtex new file mode 100644 index 0000000000..9f2d0f6e63 --- /dev/null +++ b/esmvaltool/references/patmos-x.bibtex @@ -0,0 +1,13 @@ +@article{patmos-x, + doi = {10.1175/bams-d-12-00246.1}, + url = {https://doi.org/10.1175%2Fbams-d-12-00246.1}, + year = 2014, + month = {jun}, + publisher = {American Meteorological Society}, + volume = {95}, + number = {6}, + pages = {909--922}, + author = {Andrew K. Heidinger and Michael J. Foster and Andi Walther and Xuepeng (Tom) Zhao}, + title = {The Pathfinder Atmospheres{\textendash}Extended {AVHRR} Climate Dataset}, + journal = {Bulletin of the American Meteorological Society} +} diff --git a/esmvaltool/references/pearce23jclim.bibtex b/esmvaltool/references/pearce23jclim.bibtex new file mode 100644 index 0000000000..ebfac532ad --- /dev/null +++ b/esmvaltool/references/pearce23jclim.bibtex @@ -0,0 +1,13 @@ +@article{pearce23jclim, + doi = {10.1175/JCLI-D-22-0149.1}, + url = {https://doi.org/10.1175/JCLI-D-22-0149.1}, + year = 2023, + month = {jun}, + publisher = {American Meteorological Society}, + volume = {36}, + number = {9}, + pages = {1--30}, + author = {Francesca Pearce and Alejandro Bodas-Salcedo}, + title = {Implied heat transport from {CERES} data: direct radiative effect of clouds on regional patterns and hemispheric symmetry}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/persiann-cdr.bibtex b/esmvaltool/references/persiann-cdr.bibtex new file mode 100644 index 0000000000..770949b023 --- /dev/null +++ b/esmvaltool/references/persiann-cdr.bibtex @@ -0,0 +1,9 @@ +@misc{persiann-cdr, + doi = {10.7289/V51V5BWQ}, + url = {https://data.nodc.noaa.gov/cgi-bin/iso?id=gov.noaa.ncdc:C00854}, + author = {Sorooshian, Soroosh and Hsu, Kuolin and Braithwaite, Dan and Ashouri, Hamed and NOAA CDR Program}, + keywords = {Precipitation climate data record}, + title = {NOAA Climate Data Record (CDR) of Precipitation Estimation from Remotely Sensed Information using Artificial Neural Networks (PERSIANN-CDR), Version 1 Revision 1}, + publisher = {U.S. Department of Commerce, National Oceanic and Atmospheric Administration, NESDIS, National Centers for Environmental Information}, + year = {2014} +} diff --git a/esmvaltool/references/phc.bibtex b/esmvaltool/references/phc.bibtex new file mode 100644 index 0000000000..b469301fe6 --- /dev/null +++ b/esmvaltool/references/phc.bibtex @@ -0,0 +1,13 @@ +@article{phc, + doi = {10.1175/1520-0442(2001)014<2079:pagohw>2.0.co;2}, + url = {https://doi.org/10.1175%2F1520-0442%282001%29014%3C2079%3Apagohw%3E2.0.co%3B2}, + year = 2001, + month = {may}, + publisher = {American Meteorological Society}, + volume = {14}, + number = {9}, + pages = {2079--2087}, + author = {Michael Steele and Rebecca Morley and Wendy Ermold}, + title = {{PHC}: A Global Ocean Hydrography with a High-Quality Arctic Ocean}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/phillips14eos.bibtex b/esmvaltool/references/phillips14eos.bibtex new file mode 100644 index 0000000000..582688766e --- /dev/null +++ b/esmvaltool/references/phillips14eos.bibtex @@ -0,0 +1,13 @@ +@article{phillips14eos, + doi = {10.1002/2014eo490002}, + url = {https://doi.org/10.1002/2014eo490002}, + year = {2014}, + month = dec, + publisher = {American Geophysical Union ({AGU})}, + volume = {95}, + number = {49}, + pages = {453--455}, + author = {Adam S. Phillips and Clara Deser and John Fasullo}, + title = {Evaluating Modes of Variability in Climate Models}, + journal = {Eos, Transactions American Geophysical Union} +} diff --git a/esmvaltool/references/piomas.bibtex b/esmvaltool/references/piomas.bibtex new file mode 100644 index 0000000000..be0f432680 --- /dev/null +++ b/esmvaltool/references/piomas.bibtex @@ -0,0 +1,13 @@ +@article{piomas, + doi = {10.1175/1520-0493(2003)131<0845:mgsiwa>2.0.co;2}, + url = {https://doi.org/10.1175%2F1520-0493%282003%29131%3C0845%3Amgsiwa%3E2.0.co%3B2}, + year = 2003, + month = {may}, + publisher = {American Meteorological Society}, + volume = {131}, + number = {5}, + pages = {845--861}, + author = {Jinlun Zhang and D. A. Rothrock}, + title = {Modeling Global Sea Ice with a Thickness and Enthalpy Distribution Model in Generalized Curvilinear Coordinates}, + journal = {Monthly Weather Review} +} diff --git a/esmvaltool/references/rebora06jhm.bibtex b/esmvaltool/references/rebora06jhm.bibtex new file mode 100644 index 0000000000..ac6ce04c99 --- /dev/null +++ b/esmvaltool/references/rebora06jhm.bibtex @@ -0,0 +1,13 @@ +@article{rebora06jhm, + doi = {10.1175/jhm517.1}, + url = {https://doi.org/10.1175/jhm517.1}, + year = {2006}, + month = aug, + publisher = {American Meteorological Society}, + volume = {7}, + number = {4}, + pages = {724--738}, + author = {Nicola Rebora and Luca Ferraris and Jost von Hardenberg and Antonello Provenzale}, + title = {{RainFARM}: Rainfall Downscaling by a Filtered Autoregressive Model}, + journal = {Journal of Hydrometeorology} +} diff --git a/esmvaltool/references/regen.bibtex b/esmvaltool/references/regen.bibtex new file mode 100644 index 0000000000..037b7b405e --- /dev/null +++ b/esmvaltool/references/regen.bibtex @@ -0,0 +1,11 @@ +@Article{regen, +AUTHOR = {Contractor, S. and Donat, M. G. and Alexander, L. V. and Ziese, M. and Meyer-Christoffer, A. and Schneider, U. and Rustemeier, E. and Becker, A. and Durre, I. and Vose, R. S.}, +TITLE = {Rainfall Estimates on a Gridded Network (REGEN) -- a global land-based gridded dataset of daily precipitation from 1950 to 2016}, +JOURNAL = {Hydrology and Earth System Sciences}, +VOLUME = {24}, +YEAR = {2020}, +NUMBER = {2}, +PAGES = {919--943}, +URL = {https://www.hydrol-earth-syst-sci.net/24/919/2020/}, +DOI = {10.5194/hess-24-919-2020} +} \ No newline at end of file diff --git a/esmvaltool/references/righi13acp.bibtex b/esmvaltool/references/righi13acp.bibtex new file mode 100644 index 0000000000..5c80f88a85 --- /dev/null +++ b/esmvaltool/references/righi13acp.bibtex @@ -0,0 +1,13 @@ +@article{righi13acp, + doi = {10.5194/acp-13-9939-2013}, + url = {https://doi.org/10.5194%2Facp-13-9939-2013}, + year = 2013, + month = {oct}, + publisher = {Copernicus {GmbH}}, + volume = {13}, + number = {19}, + pages = {9939--9970}, + author = {M. Righi and J. Hendricks and R. Sausen}, + title = {The global impact of the transport sectors on atmospheric aerosol: simulations for year 2000 emissions}, + journal = {Atmospheric Chemistry and Physics} +} diff --git a/esmvaltool/references/righi15gmd.bibtex b/esmvaltool/references/righi15gmd.bibtex new file mode 100644 index 0000000000..308d1bb433 --- /dev/null +++ b/esmvaltool/references/righi15gmd.bibtex @@ -0,0 +1,13 @@ +@article{righi15gmd, + doi = {10.5194/gmd-8-733-2015}, + url = {https://doi.org/10.5194%2Fgmd-8-733-2015}, + year = 2015, + month = {mar}, + publisher = {Copernicus {GmbH}}, + volume = {8}, + number = {3}, + pages = {733--768}, + author = {M. Righi and V. Eyring and K.-D. Gottschaldt and C. Klinger and F. Frank and P. Jöckel and I. Cionni}, + title = {Quantitative evaluation of ozone and selected climate parameters in a set of {EMAC} simulations}, + journal = {Geoscientific Model Development} +} diff --git a/esmvaltool/references/rk2008bams.bibtex b/esmvaltool/references/rk2008bams.bibtex new file mode 100644 index 0000000000..1d15a6ef4f --- /dev/null +++ b/esmvaltool/references/rk2008bams.bibtex @@ -0,0 +1,13 @@ +@article{rk2008bams, + doi = {10.1175/bams-89-3-303}, + url = {https://doi.org/10.1175%2Fbams-89-3-303}, + year = 2008, + month = {mar}, + publisher = {American Meteorological Society}, + volume = {89}, + number = {3}, + pages = {303--312}, + author = {Thomas Reichler and Junsu Kim}, + title = {How Well Do Coupled Models Simulate Today{\textquotesingle}s Climate?}, + journal = {Bulletin of the American Meteorological Society} +} diff --git a/esmvaltool/references/roedenbeck13os.bibtex b/esmvaltool/references/roedenbeck13os.bibtex new file mode 100644 index 0000000000..0fe51fbf72 --- /dev/null +++ b/esmvaltool/references/roedenbeck13os.bibtex @@ -0,0 +1,13 @@ +@article{roedenbeck13os, + doi = {10.5194/os-9-193-2013}, + url = {https://doi.org/10.5194%2Fos-9-193-2013}, + year = 2013, + month = {mar}, + publisher = {Copernicus {GmbH}}, + volume = {9}, + number = {2}, + pages = {193--216}, + author = {C. Rödenbeck and R. F. Keeling and D. C. E. Bakker and N. Metzl and A. Olsen and C. Sabine and M. Heimann}, + title = {Global surface-ocean {pCO}2 and sea{\textendash}air {CO}2 flux variability from an observation-driven ocean mixed-layer scheme}, + journal = {Ocean Science} +} diff --git a/esmvaltool/references/roehrig13jclim.bibtex b/esmvaltool/references/roehrig13jclim.bibtex new file mode 100644 index 0000000000..ac3720bdcc --- /dev/null +++ b/esmvaltool/references/roehrig13jclim.bibtex @@ -0,0 +1,13 @@ +@article{roehrig13jclim, + doi = {10.1175/jcli-d-12-00505.1}, + url = {https://doi.org/10.1175%2Fjcli-d-12-00505.1}, + year = 2013, + month = {sep}, + publisher = {American Meteorological Society}, + volume = {26}, + number = {17}, + pages = {6471--6505}, + author = {Romain Roehrig and Dominique Bouniol and Francoise Guichard and Fr{\'{e}}d{\'{e}}ric Hourdin and Jean-Luc Redelsperger}, + title = {The Present and Future of the West African Monsoon: A Process-Oriented Assessment of {CMIP}5 Simulations along the {AMMA} Transect}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/russell18jgr.bibtex b/esmvaltool/references/russell18jgr.bibtex new file mode 100644 index 0000000000..192dca5935 --- /dev/null +++ b/esmvaltool/references/russell18jgr.bibtex @@ -0,0 +1,13 @@ +@article{russell18jgr, + doi = {10.1002/2017jc013461}, + url = {https://doi.org/10.1002/2017jc013461}, + year = {2018}, + month = may, + publisher = {American Geophysical Union ({AGU})}, + volume = {123}, + number = {5}, + pages = {3120--3143}, + author = {Joellen L. Russell and Igor Kamenkovich and Cecilia Bitz and Raffaele Ferrari and Sarah T. Gille and Paul J. Goodman and Robert Hallberg and Kenneth Johnson and Karina Khazmutdinova and Irina Marinov and Matthew Mazloff and Stephen Riser and Jorge L. Sarmiento and Kevin Speer and Lynne D. Talley and Rik Wanninkhof}, + title = {Metrics for the Evaluation of the Southern Ocean in Coupled Climate Models and Earth System Models}, + journal = {Journal of Geophysical Research: Oceans} +} diff --git a/esmvaltool/references/schlund20esd.bibtex b/esmvaltool/references/schlund20esd.bibtex new file mode 100644 index 0000000000..2cae5c282b --- /dev/null +++ b/esmvaltool/references/schlund20esd.bibtex @@ -0,0 +1,11 @@ +@article{schlund20esd, + author = {Schlund, M. and Lauer, A. and Gentine, P. and Sherwood, S. C. and Eyring, V.}, + doi = {10.5194/esd-11-1233-2020}, + journal = {Earth System Dynamics}, + number = {4}, + pages = {1233--1258}, + title = {Emergent constraints on equilibrium climate sensitivity in CMIP5: do they hold for CMIP6?}, + url = {https://esd.copernicus.org/articles/11/1233/2020/}, + volume = {11}, + year = {2020}, +} diff --git a/esmvaltool/references/schlund20jgr.bibtex b/esmvaltool/references/schlund20jgr.bibtex new file mode 100644 index 0000000000..df905a4dde --- /dev/null +++ b/esmvaltool/references/schlund20jgr.bibtex @@ -0,0 +1,11 @@ +@article{schlund20jgr, + author = {Schlund, Manuel and Eyring, Veronika and Camps-Valls, Gustau and Friedlingstein, Pierre and Gentine, Pierre and Reichstein, Markus}, + title = {Constraining Uncertainty in Projected Gross Primary Production With Machine Learning}, + journal = {Journal of Geophysical Research: Biogeosciences}, + volume = {125}, + number = {11}, + pages = {e2019JG005619}, + doi = {10.1029/2019JG005619}, + url = {https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/2019JG005619}, + year = {2020}, +} diff --git a/esmvaltool/references/scripps_co2.bibtex b/esmvaltool/references/scripps_co2.bibtex new file mode 100644 index 0000000000..5114890469 --- /dev/null +++ b/esmvaltool/references/scripps_co2.bibtex @@ -0,0 +1,9 @@ +@incollection{scripps_co2, + doi = {10.1007/0-387-27048-5_5}, + title = {Atmospheric CO 2 and 13 CO 2 exchange with the terrestrial biosphere and oceans from 1978 to 2000: Observations and carbon cycle implications}, + author = {Keeling, Charles D and Piper, Stephen C and Bacastow, Robert B and Wahlen, Martin and Whorf, Timothy P and Heimann, Martin and Meijer, Harro A}, + booktitle = {A history of atmospheric CO2 and its effects on plants, animals, and ecosystems}, + pages = {83--113}, + year = {2005}, + publisher = {Springer} +} diff --git a/esmvaltool/references/seneviratne12ipcc.bibtex b/esmvaltool/references/seneviratne12ipcc.bibtex new file mode 100644 index 0000000000..66be640fa5 --- /dev/null +++ b/esmvaltool/references/seneviratne12ipcc.bibtex @@ -0,0 +1,53 @@ +@incollection{ WOS:000519741300006, +Author = {Seneviratne, Sonia I. and Nicholls, Neville and Easterling, David and + Goodess, Clare M. and Kanae, Shinjiro and Kossin, James and Luo, Yali + and Marengo, Jose and McInnes, Kathleen and Rahimi, Mohammad and + Reichstein, Markus and Sorteberg, Asgeir and Vera, Carolina and Zhang, + Xuebin and Rusticucci, Matilde and Semenov, Vladimir and Alexander, Lisa + V. and Allen, Simon and Benito, Gerardo and Cavazos, Tereza and Clague, + John and Conway, Declan and Della-Marta, Paul M. and Gerber, Markus and + Gong, Sunling and Goswami, B. N. and Hemer, Mark and Huggel, Christian + and van den Hurk, Bart and Kharin, Viatcheslav V. and Kitoh, Akio and + Tank, Albert M. G. Klein and Li, Guilong and Mason, Simon and McGuire, + William and van Oldenborgh, Geert Jan and Orlowsky, Boris and Smith, + Sharon and Thiaw, Wassila and Velegrakis, Adonis and Yiou, Pascal and + Zhang, Tingjun and Zhou, Tianjun and Zwiers, Francis W.}, +Editor = {{Field, CB and Barros, V and Stocker, TF and Dahe, Q and Dokken, DJ and Ebi, KL and Mastrandrea, MD and Mach, KJ and Plattner, GK and Allen, SK and Tignor, M and Midgley, PM}}, +Book-Group-Author = {{Intergov Panel Clim Chg}}, +Title = {Changes in Climate Extremes and their Impacts on the Natural Physical + Environment}, +Booktitle = {MANAGING THE RISKS OF EXTREME EVENTS AND DISASTERS TO ADVANCE CLIMATE + CHANGE ADAPTATION}, +Year = {{2012}}, +Pages = {{109-230}}, +ISBN = {{978-1-107-02506-6; 978-1-107-60780-4}}, +ResearcherID-Numbers = {{Sorteberg, Asgeir/N-8576-2015 + McInnes, Kathleen L/A-7787-2012 + Cavazos, Tereza/AAF-2253-2020 + Alexander, Lisa V/A-8477-2011 + Zhang, Xuebin/ABD-7511-2021 + van den Hurk, Bart/ABI-1654-2020 + Kanae, Shinjiro/E-5606-2010 + ZHANG, TINGJUN/AAX-3662-2020 + Benito, Gerardo/E-5456-2013 + Rusticucci, Matilde/K-5249-2017 + Hemer, Mark/M-1905-2013 + Seneviratne, Sonia/G-8761-2011 + }}, +ORCID-Numbers = {{Sorteberg, Asgeir/0000-0001-6003-9618 + McInnes, Kathleen L/0000-0002-1810-7215 + Cavazos, Tereza/0000-0003-3097-9021 + Alexander, Lisa V/0000-0002-5635-2457 + van den Hurk, Bart/0000-0003-3726-7086 + Kanae, Shinjiro/0000-0002-3176-4957 + Benito, Gerardo/0000-0003-0724-1790 + Rusticucci, Matilde/0000-0003-2588-6234 + Hemer, Mark/0000-0002-7725-3474 + Vera, Carolina/0000-0003-4032-5232 + Nicholls, Neville/0000-0002-1298-4356 + Conway, Declan/0000-0002-4590-6733 + Allen, Simon/0000-0002-4809-649X + Seneviratne, Sonia/0000-0001-9528-2917 + Goodess, Clare/0000-0002-7462-4479}}, +Unique-ID = {{WOS:000519741300006}}, +} diff --git a/esmvaltool/references/sherwood14nat.bibtex b/esmvaltool/references/sherwood14nat.bibtex new file mode 100644 index 0000000000..6ccff68ac7 --- /dev/null +++ b/esmvaltool/references/sherwood14nat.bibtex @@ -0,0 +1,13 @@ +@article{sherwood14nat, + doi = {10.1038/nature12829}, + url = {https://doi.org/10.1038%2Fnature12829}, + year = 2014, + month = {jan}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {505}, + number = {7481}, + pages = {37--42}, + author = {Steven C. Sherwood and Sandrine Bony and Jean-Louis Dufresne}, + title = {Spread in model climate sensitivity traced to atmospheric convective mixing}, + journal = {Nature} +} diff --git a/esmvaltool/references/sillman13jgr.bibtex b/esmvaltool/references/sillman13jgr.bibtex new file mode 100644 index 0000000000..6926c342b6 --- /dev/null +++ b/esmvaltool/references/sillman13jgr.bibtex @@ -0,0 +1,13 @@ +@article{Sillmann2013, + doi = {10.1002/jgrd.50203}, + url = {https://doi.org/10.1002/jgrd.50203}, + year = {2013}, + month = feb, + publisher = {American Geophysical Union ({AGU})}, + volume = {118}, + number = {4}, + pages = {1716--1733}, + author = {J. Sillmann and V. V. Kharin and X. Zhang and F. W. Zwiers and D. Bronaugh}, + title = {Climate extremes indices in the {CMIP}5 multimodel ensemble: Part 1. Model evaluation in the present climate}, + journal = {Journal of Geophysical Research: Atmospheres} +} diff --git a/esmvaltool/references/sommer17joss.bibtex b/esmvaltool/references/sommer17joss.bibtex new file mode 100644 index 0000000000..1eb4574039 --- /dev/null +++ b/esmvaltool/references/sommer17joss.bibtex @@ -0,0 +1,12 @@ +@article{sommer17joss, + author = {Philipp S. Sommer}, + doi = {10.21105/joss.00363}, + journal = {Journal of Open Source Software}, + number = {16}, + pages = {363}, + publisher = {The Open Journal}, + title = {The psyplot interactive visualization framework}, + url = {https://doi.org/10.21105/joss.00363}, + volume = {2}, + year = {2017}, +} diff --git a/esmvaltool/references/sperber12asl.bibtex b/esmvaltool/references/sperber12asl.bibtex new file mode 100644 index 0000000000..7f356d1c5b --- /dev/null +++ b/esmvaltool/references/sperber12asl.bibtex @@ -0,0 +1,13 @@ +@article{sperber12asl, + doi = {10.1002/asl.378}, + url = {https://doi.org/10.1002%2Fasl.378}, + year = 2012, + month = {apr}, + publisher = {Wiley}, + volume = {13}, + number = {3}, + pages = {187--193}, + author = {Kenneth R. Sperber and Daehyun Kim}, + title = {Simplified metrics for the identification of the Madden-Julian oscillation in models}, + journal = {Atmospheric Science Letters} +} diff --git a/esmvaltool/references/stephens17bams.bibtex b/esmvaltool/references/stephens17bams.bibtex new file mode 100644 index 0000000000..8792b6286b --- /dev/null +++ b/esmvaltool/references/stephens17bams.bibtex @@ -0,0 +1,10 @@ +@article{stephens17bams, +author = {Stephens, Graeme and Winker, David and Pelon, Jacques and Trepte, Charles and Vane, Deborah and Yuhas, Cheryl and L'Ecuyer, Tristan and Lebsock, Mathew}, +year = {2017}, +month = {08}, +pages = {}, +title = {CloudSat and CALIPSO within the A-Train: Ten years of actively observing the Earth system}, +volume = {99}, +journal = {Bulletin of the American Meteorological Society}, +doi = {10.1175/BAMS-D-16-0324.1} +} diff --git a/esmvaltool/references/straus07jcli.bibtex b/esmvaltool/references/straus07jcli.bibtex new file mode 100644 index 0000000000..cf8ca13110 --- /dev/null +++ b/esmvaltool/references/straus07jcli.bibtex @@ -0,0 +1,13 @@ +@article{straus07jcli, + doi = {10.1175/jcli4070.1}, + url = {https://doi.org/10.1175%2Fjcli4070.1}, + year = 2007, + month = {may}, + publisher = {American Meteorological Society}, + volume = {20}, + number = {10}, + pages = {2251--2272}, + author = {David M. Straus and Susanna Corti and Franco Molteni}, + title = {Circulation Regimes: Chaotic Variability versus {SST}-Forced Predictability}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/stroeve07grl.bibtex b/esmvaltool/references/stroeve07grl.bibtex new file mode 100644 index 0000000000..46a00600c4 --- /dev/null +++ b/esmvaltool/references/stroeve07grl.bibtex @@ -0,0 +1,12 @@ +@article{stroeve07grl, + doi = {10.1029/2007gl029703}, + url = {https://doi.org/10.1029%2F2007gl029703}, + year = 2007, + month = {may}, + publisher = {American Geophysical Union ({AGU})}, + volume = {34}, + number = {9}, + author = {Julienne Stroeve and Marika M. Holland and Walt Meier and Ted Scambos and Mark Serreze}, + title = {Arctic sea ice decline: Faster than forecast}, + journal = {Geophysical Research Letters} +} diff --git a/esmvaltool/references/su14jgr.bibtex b/esmvaltool/references/su14jgr.bibtex new file mode 100644 index 0000000000..3ba1acc621 --- /dev/null +++ b/esmvaltool/references/su14jgr.bibtex @@ -0,0 +1,13 @@ +@article{su14jgr, + doi = {10.1002/2014jd021642}, + url = {https://doi.org/10.1002/2014jd021642}, + year = {2014}, + month = may, + publisher = {American Geophysical Union ({AGU})}, + volume = {119}, + number = {10}, + pages = {5787--5805}, + author = {Hui Su and Jonathan H. Jiang and Chengxing Zhai and Tsaepyng J. Shen and J. David Neelin and Graeme L. Stephens and Yuk L. Yung}, + title = {Weakening and strengthening structures in the Hadley Circulation change under global warming and implications for cloud response and climate sensitivity}, + journal = {Journal of Geophysical Research: Atmospheres} +} diff --git a/esmvaltool/references/sutanudjaja2018gmd.bibtex b/esmvaltool/references/sutanudjaja2018gmd.bibtex new file mode 100644 index 0000000000..ed23c49b24 --- /dev/null +++ b/esmvaltool/references/sutanudjaja2018gmd.bibtex @@ -0,0 +1,13 @@ +@article{sutanudjaja2018gmd, + doi = {10.5194/gmd-11-2429-2018}, + url = {https://doi.org/10.5194%2Fgmd-11-2429-2018}, + year = 2018, + month = {jun}, + publisher = {Copernicus {GmbH}}, + volume = {11}, + number = {6}, + pages = {2429--2453}, + author = {Edwin H. Sutanudjaja and Rens van Beek and Niko Wanders and Yoshihide Wada and Joyce H. C. Bosmans and Niels Drost and Ruud J. van der Ent and Inge E. M. de Graaf and Jannis M. Hoch and Kor de Jong and Derek Karssenberg and Patricia L{\'{o}}pez L{\'{o}}pez and Stefanie Pe{\ss}enteiner and Oliver Schmitz and Menno W. Straatsma and Ekkamol Vannametee and Dominik Wisser and Marc F. P. Bierkens}, + title = {{PCR}-{GLOBWB}~2: a 5 arcmin global hydrological and water resources model}, + journal = {Geoscientific Model Development} +} diff --git a/esmvaltool/references/takahashi14marchem.bibtex b/esmvaltool/references/takahashi14marchem.bibtex new file mode 100644 index 0000000000..0b8746d51c --- /dev/null +++ b/esmvaltool/references/takahashi14marchem.bibtex @@ -0,0 +1,12 @@ +@article{takahashi14marchem, + doi = {10.1016/j.marchem.2014.06.004}, + url = {https://doi.org/10.1016%2Fj.marchem.2014.06.004}, + year = 2014, + month = {aug}, + publisher = {Elsevier {BV}}, + volume = {164}, + pages = {95--125}, + author = {Taro Takahashi and S.C. Sutherland and D.W. Chipman and J.G. Goddard and Cheng Ho and Timothy Newberger and Colm Sweeney and D.R. Munro}, + title = {Climatological distributions of {pH}, {pCO}2, total {CO}2, alkalinity, and {CaCO}3 saturation in the global surface ocean, and temporal changes at selected locations}, + journal = {Marine Chemistry} +} diff --git a/esmvaltool/references/taylor12nature.bibtex b/esmvaltool/references/taylor12nature.bibtex new file mode 100644 index 0000000000..1778da2eae --- /dev/null +++ b/esmvaltool/references/taylor12nature.bibtex @@ -0,0 +1,13 @@ +@article{taylor12nature, + doi = {10.1038/nature11377}, + url = {https://doi.org/10.1038%2Fnature11377}, + year = 2012, + month = {sep}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {489}, + number = {7416}, + pages = {423--426}, + author = {Christopher M. Taylor and Richard A. M. de Jeu and Fran{\c{c}}oise Guichard and Phil P. Harris and Wouter A. Dorigo}, + title = {Afternoon rain more likely over drier soils}, + journal = {Nature} +} diff --git a/esmvaltool/references/tcom_ch4.bibtex b/esmvaltool/references/tcom_ch4.bibtex new file mode 100644 index 0000000000..a014685d73 --- /dev/null +++ b/esmvaltool/references/tcom_ch4.bibtex @@ -0,0 +1,10 @@ +@article{tcom_ch4, + author = {Dhomse Sandip S.}, + title = {{TCOM-CH4: TOMCAT CTM and Occultation Measurements based daily zonal stratospheric methane profile dataset [1991-2021] constructed using machine-learning}}, + month = nov, + year = 2022, + publisher = {Zenodo}, + version = {1.0}, + doi = {https://doi.org/10.5281/zenodo.7293740}, + url = {https://doi.org/10.5281/zenodo.7293740}, +} diff --git a/esmvaltool/references/tcom_n2o.bibtex b/esmvaltool/references/tcom_n2o.bibtex new file mode 100644 index 0000000000..6d0048dd6e --- /dev/null +++ b/esmvaltool/references/tcom_n2o.bibtex @@ -0,0 +1,10 @@ +@article{tcom_n2o, + author = {Dhomse, Sandip}, + title = {{TCOM-N2O: TOMCAT CTM and Occultation Measurements based daily zonal stratospheric nitrous oxide profile dataset [1991-2021] constructed using machine-learning}}, + month = dec, + year = 2022, + publisher = {Zenodo}, + version = {1.0}, + doi = {https://doi.org/10.5281/zenodo.7386001}, + url = {https://doi.org/10.5281/zenodo.7386001}, +} diff --git a/esmvaltool/references/tebaldi21esd.bibtex b/esmvaltool/references/tebaldi21esd.bibtex new file mode 100644 index 0000000000..295e6f8a4b --- /dev/null +++ b/esmvaltool/references/tebaldi21esd.bibtex @@ -0,0 +1,13 @@ +@article{tebaldi21esd, + doi = {10.5194/esd-12-253-2021}, + url = {https://doi.org/10.5194/esd-12-253-2021}, + year = {2021}, + month = mar, + publisher = {Copernicus {GmbH}}, + volume = {12}, + number = {1}, + pages = {253--293}, + author = {Claudia Tebaldi and Kevin Debeire and Veronika Eyring and Erich Fischer and John Fyfe and Pierre Friedlingstein and Reto Knutti and Jason Lowe and Brian O{\textquotesingle}Neill and Benjamin Sanderson and Detlef van Vuuren and Keywan Riahi and Malte Meinshausen and Zebedee Nicholls and Katarzyna B. Tokarska and George Hurtt and Elmar Kriegler and Jean-Francois Lamarque and Gerald Meehl and Richard Moss and Susanne E. Bauer and Olivier Boucher and Victor Brovkin and Young-Hwa Byun and Martin Dix and Silvio Gualdi and Huan Guo and Jasmin G. John and Slava Kharin and YoungHo Kim and Tsuyoshi Koshiro and Libin Ma and Dirk Olivi{\'{e}} and Swapna Panickal and Fangli Qiao and Xinyao Rong and Nan Rosenbloom and Martin Schupfner and Roland S{\'{e}}f{\'{e}}rian and Alistair Sellar and Tido Semmler and Xiaoying Shi and Zhenya Song and Christian Steger and Ronald Stouffer and Neil Swart and Kaoru Tachiiri and Qi Tang and Hiroaki Tatebe and Aurore Voldoire and Evgeny Volodin and Klaus Wyser and Xiaoge Xin and Shuting Yang and Yongqiang Yu and Tilo Ziehn}, + title = {Climate model projections from the Scenario Model Intercomparison Project~({ScenarioMIP}) of {CMIP}6}, + journal = {Earth System Dynamics} +} diff --git a/esmvaltool/references/terzago18nhess.bibtex b/esmvaltool/references/terzago18nhess.bibtex new file mode 100644 index 0000000000..91e9bb52ec --- /dev/null +++ b/esmvaltool/references/terzago18nhess.bibtex @@ -0,0 +1,13 @@ +@article{terzago18nhess, + doi = {10.5194/nhess-18-2825-2018}, + url = {https://doi.org/10.5194%2Fnhess-18-2825-2018}, + year = 2018, + month = {nov}, + publisher = {Copernicus {GmbH}}, + volume = {18}, + number = {11}, + pages = {2825--2840}, + author = {Silvia Terzago and Elisa Palazzi and Jost von Hardenberg}, + title = {Stochastic downscaling of precipitation in complex orography: a simple method to reproduce a realistic fine-scale climatology}, + journal = {Natural Hazards and Earth System Sciences} +} diff --git a/esmvaltool/references/tian15grl.bibtex b/esmvaltool/references/tian15grl.bibtex new file mode 100644 index 0000000000..ee84d95d53 --- /dev/null +++ b/esmvaltool/references/tian15grl.bibtex @@ -0,0 +1,13 @@ +@article{tian15grl, + doi = {10.1002/2015gl064119}, + url = {https://doi.org/10.1002%2F2015gl064119}, + year = 2015, + month = {may}, + publisher = {American Geophysical Union ({AGU})}, + volume = {42}, + number = {10}, + pages = {4133--4141}, + author = {Baijun Tian}, + title = {Spread of model climate sensitivity linked to double-Intertropical Convergence Zone bias}, + journal = {Geophysical Research Letters} +} diff --git a/esmvaltool/references/tibaldi90tel.bibtex b/esmvaltool/references/tibaldi90tel.bibtex new file mode 100644 index 0000000000..ffe2dfbcdc --- /dev/null +++ b/esmvaltool/references/tibaldi90tel.bibtex @@ -0,0 +1,13 @@ +@article{tibaldi90tel, + doi = {10.1034/j.1600-0870.1990.t01-2-00003.x}, + url = {https://doi.org/10.1034%2Fj.1600-0870.1990.t01-2-00003.x}, + year = 1990, + month = {may}, + publisher = {Informa {UK} Limited}, + volume = {42}, + number = {3}, + pages = {343--365}, + author = {STEFANO TIBALDI and FRANCO MOLTENI}, + title = {On the operational predictability of blocking}, + journal = {Tellus A} +} diff --git a/esmvaltool/references/uwisc.bibtex b/esmvaltool/references/uwisc.bibtex new file mode 100644 index 0000000000..7b551db48b --- /dev/null +++ b/esmvaltool/references/uwisc.bibtex @@ -0,0 +1,13 @@ +@article{uwisc, + doi = {10.1175/2007jcli1958.1}, + url = {https://doi.org/10.1175%2F2007jcli1958.1}, + year = 2008, + month = {apr}, + publisher = {American Meteorological Society}, + volume = {21}, + number = {8}, + pages = {1721--1739}, + author = {Christopher W. O'Dell and Frank J. Wentz and Ralf Bennartz}, + title = {Cloud Liquid Water Path from Satellite-Based Passive Microwave Observations: A New Climatology over the Global Oceans}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/vicente10jclim.bibtex b/esmvaltool/references/vicente10jclim.bibtex new file mode 100644 index 0000000000..ea16f7e735 --- /dev/null +++ b/esmvaltool/references/vicente10jclim.bibtex @@ -0,0 +1,13 @@ +@article{vicente10jclim, + doi = {10.1175/2009jcli2909.1}, + url = {https://doi.org/10.1175/2009jcli2909.1}, + year = {2010}, + month = apr, + publisher = {American Meteorological Society}, + volume = {23}, + number = {7}, + pages = {1696--1718}, + author = {Sergio M. Vicente-Serrano and Santiago Beguer{\'{\i}}a and Juan I. L{\'{o}}pez-Moreno}, + title = {A Multiscalar Drought Index Sensitive to Global Warming: The Standardized Precipitation Evapotranspiration Index}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/volodin08izvestiya.bibtex b/esmvaltool/references/volodin08izvestiya.bibtex new file mode 100644 index 0000000000..6767b8a1af --- /dev/null +++ b/esmvaltool/references/volodin08izvestiya.bibtex @@ -0,0 +1,13 @@ +@article{volodin08izvestiya, + doi = {10.1134/s0001433808030043}, + url = {https://doi.org/10.1134/s0001433808030043}, + year = {2008}, + month = jun, + publisher = {Pleiades Publishing Ltd}, + volume = {44}, + number = {3}, + pages = {288--299}, + author = {E. M. Volodin}, + title = {Relation between temperature sensitivity to doubled carbon dioxide and the distribution of clouds in current climate models}, + journal = {Izvestiya, Atmospheric and Oceanic Physics} +} diff --git a/esmvaltool/references/wang11climdyn.bibtex b/esmvaltool/references/wang11climdyn.bibtex new file mode 100644 index 0000000000..dd03cf6601 --- /dev/null +++ b/esmvaltool/references/wang11climdyn.bibtex @@ -0,0 +1,13 @@ +@article{wang11climdyn, + doi = {10.1007/s00382-011-1266-z}, + url = {https://doi.org/10.1007%2Fs00382-011-1266-z}, + year = 2011, + month = {dec}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {39}, + number = {5}, + pages = {1123--1135}, + author = {Bin Wang and Jian Liu and Hyung-Jin Kim and Peter J. Webster and So-Young Yim}, + title = {Recent change of the global monsoon precipitation (1979{\textendash}2008)}, + journal = {Climate Dynamics} +} diff --git a/esmvaltool/references/wang99bams.bibtex b/esmvaltool/references/wang99bams.bibtex new file mode 100644 index 0000000000..e9387286f3 --- /dev/null +++ b/esmvaltool/references/wang99bams.bibtex @@ -0,0 +1,13 @@ +@article{wang99bams, + doi = {10.1175/1520-0477(1999)080<0629:cosasm>2.0.co;2}, + url = {https://doi.org/10.1175/1520-0477(1999)080<0629:cosasm>2.0.co;2}, + year = {1999}, + month = apr, + publisher = {American Meteorological Society}, + volume = {80}, + number = {4}, + pages = {629--638}, + author = {Bin Wang and Zhen Fan}, + title = {Choice of South Asian Summer Monsoon Indices}, + journal = {Bulletin of the American Meteorological Society} +} diff --git a/esmvaltool/references/waskom21joss.bibtex b/esmvaltool/references/waskom21joss.bibtex new file mode 100644 index 0000000000..386d270d8b --- /dev/null +++ b/esmvaltool/references/waskom21joss.bibtex @@ -0,0 +1,12 @@ +@article{waskom21joss, + doi = {10.21105/joss.03021}, + url = {https://doi.org/10.21105/joss.03021}, + year = {2021}, + publisher = {The Open Journal}, + volume = {6}, + number = {60}, + pages = {3021}, + author = {Michael L. Waskom}, + title = {seaborn: statistical data visualization}, + journal = {Journal of Open Source Software} +} diff --git a/esmvaltool/references/webster92qjrms.bibtex b/esmvaltool/references/webster92qjrms.bibtex new file mode 100644 index 0000000000..26cc2e941c --- /dev/null +++ b/esmvaltool/references/webster92qjrms.bibtex @@ -0,0 +1,13 @@ +@article{webster92qjrms, + doi = {10.1002/qj.49711850705}, + url = {https://doi.org/10.1002%2Fqj.49711850705}, + year = 1992, + month = {jul}, + publisher = {Wiley}, + volume = {118}, + number = {507}, + pages = {877--926}, + author = {Peter J. Webster and Song Yang}, + title = {Monsoon and Enso: Selectively Interactive Systems}, + journal = {Quarterly Journal of the Royal Meteorological Society} +} diff --git a/esmvaltool/references/weedon14wrr.bibtex b/esmvaltool/references/weedon14wrr.bibtex new file mode 100644 index 0000000000..b39d2e468b --- /dev/null +++ b/esmvaltool/references/weedon14wrr.bibtex @@ -0,0 +1,13 @@ +@article{weedon14wrr, + doi = {10.1002/2014wr015638}, + url = {https://doi.org/10.1002%2F2014wr015638}, + year = 2014, + month = {sep}, + publisher = {American Geophysical Union ({AGU})}, + volume = {50}, + number = {9}, + pages = {7505--7514}, + author = {Graham P. Weedon and Gianpaolo Balsamo and Nicolas Bellouin and Sandra Gomes and Martin J. Best and Pedro Viterbo}, + title = {The {WFDEI} meteorological forcing data set: {WATCH} Forcing Data methodology applied to {ERA}-Interim reanalysis data}, + journal = {Water Resources Research} +} diff --git a/esmvaltool/references/weigel08qjrms.bibtex b/esmvaltool/references/weigel08qjrms.bibtex new file mode 100644 index 0000000000..9df234f5d8 --- /dev/null +++ b/esmvaltool/references/weigel08qjrms.bibtex @@ -0,0 +1,13 @@ +@article{weigel08qjrms, + doi = {10.1002/qj.210}, + url = {https://doi.org/10.1002%2Fqj.210}, + year = 2008, + month = {jan}, + publisher = {Wiley}, + volume = {134}, + number = {630}, + pages = {241--260}, + author = {A. P. Weigel and M. A. Liniger and C. Appenzeller}, + title = {Can multi-model combination really enhance the prediction skill of probabilistic ensemble forecasts?}, + journal = {Quarterly Journal of the Royal Meteorological Society} +} diff --git a/esmvaltool/references/weigel2021gmd.bibtex b/esmvaltool/references/weigel2021gmd.bibtex new file mode 100644 index 0000000000..d6323d6471 --- /dev/null +++ b/esmvaltool/references/weigel2021gmd.bibtex @@ -0,0 +1,13 @@ +@Article{weigel2021gmd, +AUTHOR = {Weigel, K. and Bock, L. and Gier, B. K. and Lauer, A. and Righi, M. and Schlund, M. and Adeniyi, K. and Andela, B. and Arnone, E. and Berg, P. and Caron, L.-P. and Cionni, I. and Corti, S. and Drost, N. and Hunter, A. and Lled\'o, L. and Mohr, C. W. and Pa\c{c}al, A. and P\'erez-Zan\'on, N. and Predoi, V. and Sandstad, M. and Sillmann, J. and Sterl, A. and Vegas-Regidor, J. and von Hardenberg, J. and Eyring, V.}, +TITLE = {Earth System Model Evaluation Tool (ESMValTool) v2.0 -- diagnostics for +extreme events, regional and impact evaluation, and analysis of Earth system +models in CMIP}, +JOURNAL = {Geoscientific Model Development}, +VOLUME = {14}, +YEAR = {2021}, +NUMBER = {6}, +PAGES = {3159--3184}, +URL = {https://gmd.copernicus.org/articles/14/3159/2021/}, +DOI = {10.5194/gmd-14-3159-2021} +} diff --git a/esmvaltool/references/wenzel14jgr.bibtex b/esmvaltool/references/wenzel14jgr.bibtex new file mode 100644 index 0000000000..0cf0935b26 --- /dev/null +++ b/esmvaltool/references/wenzel14jgr.bibtex @@ -0,0 +1,13 @@ +@article{wenzel14jgr, + doi = {10.1002/2013jg002591}, + url = {https://doi.org/10.1002/2013jg002591}, + year = {2014}, + month = may, + publisher = {American Geophysical Union ({AGU})}, + volume = {119}, + number = {5}, + pages = {794--807}, + author = {Sabrina Wenzel and Peter M. Cox and Veronika Eyring and Pierre Friedlingstein}, + title = {Emergent constraints on climate-carbon cycle feedbacks in the {CMIP}5 Earth system models}, + journal = {Journal of Geophysical Research: Biogeosciences} +} diff --git a/esmvaltool/references/wenzel16jclim.bibtex b/esmvaltool/references/wenzel16jclim.bibtex new file mode 100644 index 0000000000..d7a476400c --- /dev/null +++ b/esmvaltool/references/wenzel16jclim.bibtex @@ -0,0 +1,13 @@ +@article{wenzel16jclim, + doi = {10.1175/jcli-d-15-0412.1}, + url = {https://doi.org/10.1175%2Fjcli-d-15-0412.1}, + year = 2016, + month = {jan}, + publisher = {American Meteorological Society}, + volume = {29}, + number = {2}, + pages = {673--687}, + author = {Sabrina Wenzel and Veronika Eyring and Edwin P. Gerber and Alexey Yu. Karpechko}, + title = {Constraining Future Summer Austral Jet Stream Positions in the {CMIP}5 Ensemble by Process-Oriented Multiple Diagnostic Regression}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/wenzel16nat.bibtex b/esmvaltool/references/wenzel16nat.bibtex new file mode 100644 index 0000000000..9541fde57c --- /dev/null +++ b/esmvaltool/references/wenzel16nat.bibtex @@ -0,0 +1,11 @@ +@article{wenzel16nat, + doi = {10.1038/nature19772}, + url = {https://doi.org/10.1038/nature19772}, + year = {2016}, + month = jan, + volume = {538}, + pages = {1476-4687}, + author = {Sabrina Wenzel and Peter M. Cox and Veronika Eyring and Pierre Friedlingstein}, + title = {Projected land photosynthesis constrained by changes in the seasonal cycle of atmospheric CO2}, + journal = {Nature} +} diff --git a/esmvaltool/references/wfde5.bibtex b/esmvaltool/references/wfde5.bibtex new file mode 100644 index 0000000000..55dab08b4f --- /dev/null +++ b/esmvaltool/references/wfde5.bibtex @@ -0,0 +1,11 @@ +@article{wfde5, + doi = {10.5194/essd-12-2097-2020}, + author = {Cucchi, M. and Weedon, G. P. and Amici, A. and Bellouin, N. and Lange, S. and M\"uller Schmied, H. and Hersbach, H. and Buontempo, C.}, + title = {WFDE5: bias-adjusted ERA5 reanalysis data for impact studies}, + journal = {Earth System Science Data}, + volume = {12}, + year = {2020}, + number = {3}, + pages = {2097--2120}, + url = {https://essd.copernicus.org/articles/12/2097/2020/} +} \ No newline at end of file diff --git a/esmvaltool/references/williams09climdyn.bibtex b/esmvaltool/references/williams09climdyn.bibtex new file mode 100644 index 0000000000..2a21d1338c --- /dev/null +++ b/esmvaltool/references/williams09climdyn.bibtex @@ -0,0 +1,13 @@ +@article{williams09climdyn, + doi = {10.1007/s00382-008-0443-1}, + url = {https://doi.org/10.1007%2Fs00382-008-0443-1}, + year = 2008, + month = {aug}, + publisher = {Springer Science and Business Media {LLC}}, + volume = {33}, + number = {1}, + pages = {141--157}, + author = {K. D. Williams and M. J. Webb}, + title = {A quantitative performance assessment of cloud regimes in climate models}, + journal = {Climate Dynamics} +} diff --git a/esmvaltool/references/woa2013v2.bibtex b/esmvaltool/references/woa2013v2.bibtex new file mode 100644 index 0000000000..198e2a9022 --- /dev/null +++ b/esmvaltool/references/woa2013v2.bibtex @@ -0,0 +1,9 @@ +@book{woa2013v2, + doi = {10.7289/V55X26VD}, + url = {https://repository.library.noaa.gov/view/noaa/14847}, + author = {Locarnini, Ricardo A. and Mishonov, Alexey V. and Antonov, John I. and Boyer, Timothy P. and Garcia, Hernan E. and Baranova, Olga K. and Zweng, Melissa M. and Paver, Christoper R. and Reagan, James R. and Johnson, Daphne R. and Hamilton, Melanie and Seidov, Dan}, + keywords = {Oceanography--Atlases, Ocean temperature--Atlases.}, + title = {World ocean atlas 2013. Volume 1, Temperature}, + publisher = {U.S. Department of Commerce, National Oceanic and Atmospheric Administration, National Environmental Satellite, Data and Information Service}, + year = {2013} +} diff --git a/esmvaltool/references/woa2018.bibtex b/esmvaltool/references/woa2018.bibtex new file mode 100644 index 0000000000..136ba0d9f1 --- /dev/null +++ b/esmvaltool/references/woa2018.bibtex @@ -0,0 +1,8 @@ +@book{woa2018, + title={World Ocean Atlas 2018. [temperature, salinity, oxygen, nutrients]}, + author={Boyer, Tim P. and Garcia, Hernan E. and Locarnini, Ricardo A. and Zweng, Melissa M. and Mishonov, Alexey V. and Reagan, James R. and Weathers, Katharine A. and Baranova, Olga K. and Paver, Christopher R. and Seidov, Dan and Smolyar, Igor V.}, + publisher = {NOAA National Centers for Environmental Information}, + url = {https://www.ncei.noaa.gov/archive/accession/NCEI-WOA18}, + note = {Accessed: 2021-03-01}, + year={2018} +} diff --git a/esmvaltool/references/zhai15grl.bibtex b/esmvaltool/references/zhai15grl.bibtex new file mode 100644 index 0000000000..d3132bf3bf --- /dev/null +++ b/esmvaltool/references/zhai15grl.bibtex @@ -0,0 +1,13 @@ +@article{zhai15grl, + doi = {10.1002/2015gl065911}, + url = {https://doi.org/10.1002/2015gl065911}, + year = {2015}, + month = oct, + publisher = {American Geophysical Union ({AGU})}, + volume = {42}, + number = {20}, + pages = {8729--8737}, + author = {Chengxing Zhai and Jonathan H. Jiang and Hui Su}, + title = {Long-term cloud change imprinted in seasonal cloud variation: More evidence of high climate sensitivity}, + journal = {Geophysical Research Letters} +} diff --git a/esmvaltool/references/zhang11wcc.bibtex b/esmvaltool/references/zhang11wcc.bibtex new file mode 100644 index 0000000000..76e5547b36 --- /dev/null +++ b/esmvaltool/references/zhang11wcc.bibtex @@ -0,0 +1,13 @@ +@article{zhang11wcc, + doi = {10.1002/wcc.147}, + url = {https://doi.org/10.1002%2Fwcc.147}, + year = 2011, + month = {oct}, + publisher = {Wiley}, + volume = {2}, + number = {6}, + pages = {851--870}, + author = {Xuebin Zhang and Lisa Alexander and Gabriele C. Hegerl and Philip Jones and Albert Klein Tank and Thomas C. Peterson and Blair Trewin and Francis W. Zwiers}, + title = {Indices for monitoring changes in extremes based on daily temperature and precipitation data}, + journal = {Wiley Interdisciplinary Reviews: Climate Change} +} diff --git a/esmvaltool/utils/batch-jobs/generate.py b/esmvaltool/utils/batch-jobs/generate.py new file mode 100644 index 0000000000..428229b6eb --- /dev/null +++ b/esmvaltool/utils/batch-jobs/generate.py @@ -0,0 +1,334 @@ +"""Generate SLURM run scripts to run every recipe. + +To use this script, follow these steps: +1) Edit the following parameters: +- env +- mail +- submit. Try the script with false before running the jobs. +- account +- conda_path +2) If needed, edit optional parameters: +- outputs +- config_dir +3) SLURM settings +This script is configured to optimize the computing +footprint of the recipe testing. It is not necessary to edit +SLURM related parameters. +4) If new memory intensive recipes have been merged since +the last release (e.g. IPCC recipes), you may to add them +to `SPECIAL_RECIPES` and/or to `ONE_TASK_RECIPES` +5) Check the generation of the batch scripts with +`submit = False`. Once the batch scripts are correct, change +to `submit = True` and rerun the script to submit all jobs +to the SLURM scheduler. +""" +import os +import subprocess +import textwrap +from pathlib import Path + +import esmvaltool + +# Name of the conda environment in which esmvaltool is installed +env = 'coretool26rc4' +# Mail notifications when a submitted job fails or finishes +mail = False +# Name of the conda environment in which esmvaltool is installed +submit = False +# Name of the DKRZ account in which the job will be billed +account = '' # Select a compute project to be billed +# Name of the directory in which the job outputs files) are saved. +# The outputs will be saved in /home/user/ +outputs = 'output_rc4' +# Default Levante computing partition used +partition = 'interactive' +# Default amount of memory used +memory = '64G' +# Default walltime +time = '04:00:00' +# Full path to the miniforge3/etc/profile.d/conda.sh executable +# Set the path to conda +conda_path = 'PATH_TO/miniforge3/etc/profile.d/conda.sh' +# Full path to configuration directory +# If none, ~/.config/esmvaltool/ +config_dir = '' +# Set max_parallel_tasks +# If none, read from configuration +default_max_parallel_tasks = 8 + +# List of recipes that require non-default SLURM options set above +SPECIAL_RECIPES = { + 'recipe_anav13jclim': { + 'partition': '#SBATCH --partition=compute \n', + 'time': '#SBATCH --time=08:00:00 \n', + }, + 'recipe_bock20jgr_fig_6-7': { + 'partition': '#SBATCH --partition=shared \n', + 'time': '#SBATCH --time=48:00:00 \n', + 'memory': '#SBATCH --mem=50G \n', + }, + 'recipe_bock20jgr_fig_8-10': { + 'partition': '#SBATCH --partition=shared \n', + 'time': '#SBATCH --time=48:00:00 \n', + 'memory': '#SBATCH --mem=50G \n', + }, + 'recipe_check_obs': { + 'partition': '#SBATCH --partition=compute \n', + 'memory': '#SBATCH --constraint=512G \n', + }, + 'recipe_climate_change_hotspot': { + 'partition': '#SBATCH --partition=compute \n', + 'memory': '#SBATCH --constraint=512G \n', + }, + 'recipe_collins13ipcc': { + 'partition': '#SBATCH --partition=compute \n', + 'time': '#SBATCH --time=08:00:00 \n', + 'memory': '#SBATCH --constraint=512G \n', + }, + 'recipe_eady_growth_rate': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_ecs': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_ecs_constraints': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_extreme_index': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_eyring06jgr': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_eyring13jgr_12': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_flato13ipcc_figures_938_941_cmip6': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_galytska23jgr': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_ipccwg1ar6ch3_atmosphere': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_ipccwg1ar6ch3_fig_3_9': { + 'partition': '#SBATCH --partition=shared \n', + 'time': '#SBATCH --time=15:00:00 \n', + 'memory': '#SBATCH --mem=150G \n', + }, + 'recipe_ipccwg1ar6ch3_fig_3_19': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_ipccwg1ar6ch3_fig_3_42_a': { + 'partition': '#SBATCH --partition=compute \n', + 'time': '#SBATCH --time=08:00:00 \n', + 'memory': '#SBATCH --constraint=512G \n', + }, + 'recipe_ipccwg1ar6ch3_fig_3_42_b': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_ipccwg1ar6ch3_fig_3_43': { + 'partition': '#SBATCH --partition=compute \n', + 'time': '#SBATCH --time=08:00:00 \n', + }, + 'recipe_lauer22jclim_fig3-4_zonal': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_lauer22jclim_fig5_lifrac': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_meehl20sciadv': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_mpqb_xch4': { + 'partition': '#SBATCH --partition=compute \n', + 'memory': '#SBATCH --constraint=512G \n', + }, + 'recipe_perfmetrics_CMIP5': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_perfmetrics_CMIP5_4cds': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_perfmetrics_land_CMIP5': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_russell18jgr': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_schlund20esd': { + 'partition': '#SBATCH --partition=compute \n', + 'time': '#SBATCH --time=08:00:00 \n', + 'memory': '#SBATCH --constraint=512G \n', + }, + 'recipe_schlund20jgr_gpp_abs_rcp85': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_schlund20jgr_gpp_change_1pct': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_schlund20jgr_gpp_change_rcp85': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_sea_surface_salinity': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_smpi': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_smpi_4cds': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_tebaldi21esd': { + 'partition': '#SBATCH --partition=compute \n', + 'time': '#SBATCH --time=08:00:00 \n', + }, + 'recipe_thermodyn_diagtool': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_wflow': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_wenzel16jclim': { + 'partition': '#SBATCH --partition=compute \n', + }, + 'recipe_wenzel16nat': { + 'partition': '#SBATCH --partition=compute \n', + }, +} + +# These recipes cannot be run with the default number of parallel +# tasks (max_parallel_tasks=8). +# These recipes either use CMIP3 input data +# (see https://github.com/ESMValGroup/ESMValCore/issues/430) +# or require a large fraction of the compute node memory. +MAX_PARALLEL_TASKS = { + 'recipe_bock20jgr_fig_1-4': 1, + 'recipe_bock20jgr_fig_6-7': 1, + 'recipe_bock20jgr_fig_8-10': 1, + 'recipe_daily_era5': 1, + 'recipe_easy_ipcc': 1, + 'recipe_climate_change_hotspot': 1, + 'recipe_flato13ipcc_figure_96': 1, + 'recipe_flato13ipcc_figures_938_941_cmip3': 1, + 'recipe_ipccwg1ar6ch3_fig_3_9': 1, + 'recipe_ipccwg1ar6ch3_fig_3_42_a': 1, + 'recipe_ipccwg1ar6ch3_fig_3_43': 1, + 'recipe_check_obs': 1, + 'recipe_collins13ipcc': 1, + 'recipe_lauer22jclim_fig3-4_zonal': 1, + 'recipe_lauer22jclim_fig5_lifrac': 1, + 'recipe_smpi': 1, + 'recipe_smpi_4cds': 1, + 'recipe_wenzel14jgr': 1, +} + +DISTRIBUTED_RECIPES = [ + 'recipe_easy_ipcc', + 'recipe_daily_era5', + 'recipe_marrmot', + 'recipe_pcrglobwb', +] + + +def generate_submit(): + """Generate and submit scripts.""" + print("It is recommended that you run the following recipes with the " + "configuration in dask.yml in ~/.esmvaltool/dask.yml:") + default_dask_config_file = textwrap.dedent(f""" + cluster: + type: dask_jobqueue.SLURMCluster + queue: compute + account: {account} + cores: 128 + memory: 256GiB + processes: 32 + interface: ib0 + local_directory: /scratch/{os.getlogin()[0]}/{os.getlogin()}/dask-tmp + n_workers: 32 + walltime: '8:00:00' + """) + for recipe in DISTRIBUTED_RECIPES: + print(f"- {recipe}.yml") + Path('dask.yml').write_text(default_dask_config_file, encoding='utf-8') + + home = os.path.expanduser('~') + # Fill the list with the names of the recipes to be excluded + # This includes recipes containing missing datasets + exclude = [ + 'recipe_schlund20jgr_gpp_abs_rcp85', + 'recipe_schlund20jgr_gpp_change_1pct', + 'recipe_schlund20jgr_gpp_change_rcp85' + ] + dir_recipes = Path('/'.join((esmvaltool.__path__[0], 'recipes'))) + + for recipe in Path(dir_recipes).rglob('*.yml'): + recipe = recipe.relative_to(dir_recipes) + filename = f'launch_{recipe.stem}.sh' + if recipe.stem in exclude: + continue + with open(f'{filename}', 'w', encoding='utf-8') as file: + file.write('#!/bin/bash -l \n') + file.write('\n') + file.write(f'#SBATCH --job-name={recipe.stem}.%J\n') + file.write( + f'#SBATCH --output={home}/{outputs}/{recipe.stem}.%J.out\n') + file.write( + f'#SBATCH --error={home}/{outputs}/{recipe.stem}.%J.err\n') + file.write(f'#SBATCH --account={account}\n') + if not SPECIAL_RECIPES.get(recipe.stem, None): + # continue + file.write(f'#SBATCH --partition={partition}\n') + file.write(f'#SBATCH --time={time}\n') + file.write(f'#SBATCH --mem={memory}\n') + else: + file.write(SPECIAL_RECIPES[recipe.stem]['partition']) + # Time requirements + # Special time requirements + if 'time' in SPECIAL_RECIPES[recipe.stem]: + file.write(SPECIAL_RECIPES[recipe.stem]['time']) + # Default + else: + file.write(f'#SBATCH --time={time}\n') + # Memory requirements + # Full node memory (compute partition) + if 'compute' in SPECIAL_RECIPES[recipe.stem]['partition']: + mem_req_levante = '#SBATCH --mem=0\n' + file.write(mem_req_levante) + if 'memory' in SPECIAL_RECIPES[recipe.stem]: + file.write(SPECIAL_RECIPES[recipe.stem]['memory']) + # Shared nodes (other partitions) + else: + # Special memory requirements + if 'memory' in SPECIAL_RECIPES[recipe.stem]: + file.write(SPECIAL_RECIPES[recipe.stem]['memory']) + # Default + else: + file.write(f'#SBATCH --mem={memory}\n') + if mail: + file.write('#SBATCH --mail-type=FAIL,END \n') + file.write('\n') + file.write('set -eo pipefail \n') + file.write('unset PYTHONPATH \n') + file.write('\n') + file.write(f'. {conda_path}\n') + file.write(f'conda activate {env}\n') + file.write('\n') + if not config_dir: + file.write(f'esmvaltool run {str(recipe)}') + else: + file.write(f'esmvaltool run --config_dir ' + f'{str(config_dir)} {str(recipe)}') + # set max_parallel_tasks + max_parallel_tasks = MAX_PARALLEL_TASKS.get( + recipe.stem, + default_max_parallel_tasks, + ) + file.write(f' --max_parallel_tasks={max_parallel_tasks}\n') + if submit: + subprocess.check_call(['sbatch', filename]) + + +if __name__ == '__main__': + generate_submit() diff --git a/esmvaltool/utils/batch-jobs/job_DKRZ-MISTRAL.sh b/esmvaltool/utils/batch-jobs/job_DKRZ-MISTRAL.sh deleted file mode 100755 index 125bd54fc7..0000000000 --- a/esmvaltool/utils/batch-jobs/job_DKRZ-MISTRAL.sh +++ /dev/null @@ -1,30 +0,0 @@ - -#!/bin/bash -e -############################################################################### -### BATCH SCRIPT TO RUN THE ESMVALTOOL AT DKRZ MISTRAL -### Author: Mattia Righi (DLR) -############################################################################### -#SBATCH --partition=compute -#SBATCH --ntasks=8 -#SBATCH --time=08:00:00 -#SBATCH --mail-type=FAIL,END -#SBATCH --account=bd0854 -#SBATCH --output=job_%j.out.log -#SBATCH --error=job_%j.err.log -############################################################################### - -# Submit job with: sbatch job_DKRZ-MISTRAL.sh - -# Input arguments -RECIPE=recipe_perfmetrics_CMIP5.yml -CONFIG=config-user.yml - -# Set environment -CONDAPATH= # e.g. /home/soft/miniconda3/ -CONDAENV= # e.g. $CONDAPATH/envs/esmvaltool/bin -ESMVALPATH= # e.g. /home/ESMValTool/esmvaltool - -# Changes below this line should not be required -export PATH=$PATH:$CONDAPATH/bin/ -conda info --envs -$CONDAENV/esmvaltool $ESMVALPATH/recipes/$RECIPE -c $ESMVALPATH/$CONFIG diff --git a/esmvaltool/utils/batch-jobs/job_PA2-CLUSTER.sh b/esmvaltool/utils/batch-jobs/job_PA2-CLUSTER.sh deleted file mode 100644 index 42c2937910..0000000000 --- a/esmvaltool/utils/batch-jobs/job_PA2-CLUSTER.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/bash -e -############################################################################### -### BATCH SCRIPT TO RUN THE ESMVALTOOL AT THE DLR PA2-CLUSTER -### Author: Mattia Righi (DLR) -############################################################################### -################# shell to use -#PBS -S /bin/sh -################# export all environment variables to job-script -#PBS -V -################# name of the log file -#PBS -o ./$PBS_JOBNAME.$PBS_JOBID.log -################# join standard and error stream (oe, eo) ? -#PBS -j oe -################# do not rerun job if system failure occurs -#PBS -r n -################# send e-mail when [(a)borting|(b)eginning|(e)nding] job -#PBS -m ae -#PBS -M your.email@here -################# ressources (nodes, optional: number of cores; max. runtime) -#PBS -l nodes=1:ppn=1 -#PBS -l walltime=24:00:00 -############################################################################### - -# Submit job with: qsub -q job_PA2-CLUSTER.sh - -# Input arguments -RECIPE=recipe_perfmetrics_CMIP5.yml -CONFIG=config-user.yml - -# Set environment -CONDAPATH= # e.g. /home/soft/miniconda3/ -CONDAENV= # e.g. $CONDAPATH/envs/esmvaltool/bin -ESMVALPATH= # e.g. /home/ESMValTool/esmvaltool - -# Changes below this line should not be required -export PATH=$PATH:$CONDAPATH/bin/ -conda info --envs -module load ncl -$CONDAENV/esmvaltool $ESMVALPATH/recipes/$RECIPE -c $ESMVALPATH/$CONFIG diff --git a/esmvaltool/utils/batch-jobs/parse_recipes_output.py b/esmvaltool/utils/batch-jobs/parse_recipes_output.py new file mode 100644 index 0000000000..7daf202924 --- /dev/null +++ b/esmvaltool/utils/batch-jobs/parse_recipes_output.py @@ -0,0 +1,126 @@ +"""Parse recipes run output. + +Parse typical batch job output files like .out and .err to identify +recipes that have succeeded or failed; display results in a convenient +Markdown format, to be added to a GitHub issue or any other such +documentation. +""" +import datetime +import os +import re +from pathlib import Path +from typing import Iterator + +import fire + + +def parse_slurm_output(dirname: str, pattern: str) -> Iterator[Path]: + """Parse the out dir from SLURM. + + Perform a glob on dirname/pattern where dirname is the directory + where SLURM output is stored, and pattern is the out file pattern, + like .out. Returns all the files in dirname that have pattern + extension. + """ + return Path(dirname).expanduser().glob(pattern) + + +def parse_output_file(slurm_out_dir: str) -> dict[str, list[str]]: + """Parse .out and .err files in a given dir. + + Returns a tuple of lists of sorted .out files for each of these + criteria: recipes that ran successfulltm recipes that failed with + diagnostic errors, recipes that failed due to missing data. + """ + categories = [ + 'success', + 'diagnostic error', + 'missing data', + 'out of memory', + 'out of time', + 'unknown', + ] + results: dict[str, list[str]] = {k: [] for k in categories} + + files = parse_slurm_output(slurm_out_dir, '*.out') + for file in files: + recipe = str(Path(file.stem).with_suffix('.yml')) + with open(file, "r", encoding='utf-8') as outfile: + lines = outfile.readlines() + for line in lines: + if "Run was successful\n" in line: + results['success'].append(recipe) + break + elif "esmvalcore._task.DiagnosticError" in line: + results['diagnostic error'].append(recipe) + break + elif "ERROR Missing data for preprocessor" in line: + results['missing data'].append(recipe) + break + else: + if not file.with_suffix('.err').exists(): + results['unknown'].append(recipe) + else: + err = file.with_suffix('.err').read_text(encoding='utf-8') + if "killed by the cgroup out-of-memory" in err: + results['out of memory'].append(recipe) + elif "step tasks have been OOM Killed" in err: + results['out of memory'].append(recipe) + elif re.match(".* CANCELLED AT .* DUE TO TIME LIMIT", err): + results['out of time'].append(recipe) + else: + results['unknown'].append(recipe) + + results = {k: sorted(v) for k, v in results.items()} + + return results + + +def display_in_md( + slurm_out_dir: str = '.', + all_recipes_file: str = 'all_recipes.txt', +) -> None: + """Print out recipes in Markdown list. + + Parameters + ---------- + slurm_out_dir: + Directory where SLURM output files (.out and .err) are written to. + + all_recipes_file: + Text file containing a list of all recipes. + """ + todaynow = datetime.datetime.now() + print(f"## Recipe running session {todaynow}\n") + with open(all_recipes_file, "r", encoding='utf-8') as file: + all_recipes = [ + os.path.basename(line.strip()) for line in file.readlines() + ] + n_recipes = len(all_recipes) + + results = parse_output_file(slurm_out_dir) + results["no run"] = sorted( + set(all_recipes) - set(recipe for v in results.values() + for recipe in v)) + prefix = "Recipes that" + err_prefix = f"{prefix} failed because" + messages = { + "success": f"{prefix} ran successfully", + "diagnostic error": f"{err_prefix} the diagnostic script failed", + "missing data": f"{err_prefix} of missing data", + "out of time": f"{err_prefix} the run took too long", + "out of memory": f"{err_prefix} they used too much memory", + "unknown": f"{prefix} failed of other reasons or are still running", + "no run": f"{prefix} never ran", + } + for type_, msg in messages.items(): + result = results[type_] + if result: + print(f"### {msg} ({len(result)} out of {n_recipes})") + for recipe in result: + print(f"- {recipe}") + print() + + +if __name__ == '__main__': + fire.Fire(display_in_md) diff --git a/esmvaltool/utils/cmorizers/mip_convert/config-mipconv-user.yml b/esmvaltool/utils/cmorizers/mip_convert/config-mipconv-user.yml deleted file mode 100644 index 93362f92d7..0000000000 --- a/esmvaltool/utils/cmorizers/mip_convert/config-mipconv-user.yml +++ /dev/null @@ -1,22 +0,0 @@ -############################################################################### -# User's configuration file for the ESMValTool with mip_convert -# For further details see the README document; current sections are -# mandatory and should be populated with valid entries. -# Author: V. Predoi / UREAD / November 2018 -############################################################################### ---- -# root to directory where mip_convert rose suites will be run -# make this different than your usual /roses/ dir -ROSES_ROOT: "/home/users/$USER/roses_mipconv" -# root to directory where mip_convert rose suites will write output -ROSES_OUTPUT: "/home/users/$USER/roses_mipconv_output" -# map dataset name to relevant UM suite -DATASET_TO_SUITE: {"UKESM1-0-LL": "u-ar766a"} -# map variable standard name to stream definition -STREAM_MAP: {"ps": "ap4", "ta": "ap4", "va": "ap4", "ua": "ap5", "mrsos": "ap5", "toz":"apm"} -# root directory where PP data lives -# this directory is in Jasmin/Archer structure; this one here -# is an actual directory with data -INPUT_DIR: "/group_workspaces/jasmin4/ncas_cms/valeriu/MASS_DATA" -# map streams to realm components -STREAM_COMPONENTS: {"ap4": ["atmos-physics", "land"], "apm": ["atmos-physics"], "ap5": ["land"]} diff --git a/esmvaltool/utils/cmorizers/mip_convert/esmvt_mipconv_setup.py b/esmvaltool/utils/cmorizers/mip_convert/esmvt_mipconv_setup.py deleted file mode 100644 index bf699f23ee..0000000000 --- a/esmvaltool/utils/cmorizers/mip_convert/esmvt_mipconv_setup.py +++ /dev/null @@ -1,532 +0,0 @@ -""" -Run the first communication between esmvaltool's recipe and mip_convert. - -Description: ------------- - -This script sets up the correct rose suite directories to run mip_convert -on different UM suite data. You can run this tool in three different ways: - - (with -m --mode option) setup-only: will set up the mip convert rose - directories only; it will use the -c configuration file for user options; - - (with -m --mode option) setup-run-suites: will set up the mip convert rose - suites and will go ahead and submit them to cylc via rose suite-run; - - (with -m --mode option) postproc: will symlink newly created netCDF data - into a directory per esmvaltool recipe; note that for now, there is no - DRS-like path set up in that directory; - -Usage: ------- --c --config-file: [REQUIRED] user specific configuration file; --r --recipe-file: [REQUIRED] single or multiple (space-sep) recipe files; --m --mode: [OPTIONAL] running mode (setup-only, setup-run-suites, - postproc), default=setup-only --l --log-level: [OPTIONAL] log level, default=info - -Environment ------------ -current JASMIN rose/cyclc need python2.7; esmvaltool needs python3.x -So it is impossible at the moment to run this script as executable from an -esmvaltool environment. Instead, you can run it as a stand-alone tool in a -python 2.7 environment, intwo stages: - -[set up mip_convert suites and run them] -python esmvt_mipconv_setup.py -c config.yml -r recipe.yml -m setup-run-suites -[check succesful completion of mip_convert suites] -[run the symlinking] -python esmvt_mipconv_setup.py -c config.yml -r recipe.yml -m postproc - -A practical example of running the tool can be found on JASMIN: -/home/users/valeriu/esmvaltool_mip_convert -There you will find the two component shells: run_conversion -and run_symlink, as well as an example how to set the configuration file. - -The suite used is now on MOSRS (as of 3 December 2018): u-bd681 -You can use the default location on Jasmin: -DEFAULT_SUITE_LOCATION = "/home/users/valeriu/roses/u-bd681" -alternatively this can be turned off, should you want to check out the suite -off MOSRS and use it locally. - -Contact: --------- -author: Valeriu Predoi (UREAD, valeriu.predoi@ncas.ac.uk) -""" -import argparse -import datetime -import logging -import os -import sys -import shutil -import subprocess -import socket -from distutils.version import LooseVersion -# configparser has changed names in python 3.x -if LooseVersion(sys.version) < LooseVersion("3.0"): - import ConfigParser -else: - import configparser as ConfigParser -import yaml # noqa - -#################### -# global variables # -#################### - -# the tool uses a specially tailored mip_convert Rose suite -# locations of the suite depends on the host -host_name = socket.gethostname().split('.') -if len(host_name) > 1: - if host_name[1] == 'ceda': - # default location for mip_convert suite on JASMIN: - # previous suite: u-ak283_esmvt; new one u-bd681 - # DEFAULT_SUITE_LOCATION = "/home/users/valeriu/roses/u-ak283_esmvt" - DEFAULT_SUITE_LOCATION = "/home/users/valeriu/roses/u-bd681" - # note that you can fcm checkout it straight from the MOSRS - -# stream mapping; taken from hadsdk.streams -# these are used to set defaults if not overrides -STREAM_MAP = { - 'CMIP5': { - '3hr': 'apk', - '6hrPlev': 'apc', - '6hrlev': 'apg', - 'Amon': 'apm', - 'Lmon': 'apm', - 'LImon': 'apm', - 'Oday': 'opa', - 'Omon': 'opm', - 'Oyr': 'opy', - 'CF3hr': 'apk', - 'CFday': 'apa', - 'CFmon': 'apm', - 'CFsubhr': 'ape', - 'day': 'apa' - }, - 'CMIP6': { - '3hr': 'ap8', - '6hrLev': 'ap7', - '6hrPlev': 'ap7', - '6hrPlevPt': 'ap7', - 'AERday': 'ap6', - 'AERhr': 'ap9', - 'AERmon': 'ap4', - 'AERmonZ': 'ap4', - 'Amon': 'ap5', - 'CF3hr': 'ap8', - 'CFday': 'ap6', - 'CFmon': 'ap5', - 'E1hr': 'ap9', - 'E1hrClimMon': 'ap9', - 'E3hr': 'ap8', - 'E3hrPt': 'ap8', - 'E6hrZ': 'ap7', - 'Eday': 'ap6', - 'EdayZ': 'ap6', - 'Efx': 'ancil', - 'Emon': 'ap5', - 'EmonZ': 'ap5', - 'Esubhr': 'ap8', - 'Eyr': 'ap5', - 'LImon': 'ap5', - 'Lmon': 'ap5', - 'Oday': 'ond', - 'Ofx': 'ancil', - 'Omon': 'onm', - 'SIday': 'ind', - 'SImon': 'inm', - 'day': 'ap6', - 'fx': 'ancil', - 'prim1hrpt': 'ap9', - 'prim3hr': 'ap8', - 'prim3hrpt': 'ap8', - 'prim6hr': 'ap7', - 'prim6hrpt': 'ap7', - 'primDay': 'ap6', - 'primMon': 'ap5', - 'primSIday': 'ap6' - } -} - -# set up logging -logger = logging.getLogger(__name__) - -# print the header -HEADER = r""" -______________________________________________________________________ - - ESMValTool + mip_convert: linking mip_convert to ESMValTool -______________________________________________________________________ - -""" + __doc__ - - -def get_args(): - """Define the `esmvaltool` command line.""" - # parse command line args - parser = argparse.ArgumentParser( - description=HEADER, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument( - '-c', - '--config-file', - default=os.path.join(os.path.dirname(__file__), 'config-user.yml'), - help='Configuration file') - parser.add_argument( - '-r', - '--recipe-files', - type=str, - nargs='+', - help='Recipe files (list or single file)') - parser.add_argument( - '-m', - '--mode', - default='setup-only', - choices=['setup-only', 'setup-run-suites', 'postproc'], - help='How to run: setup: sets up mipconvert suites only;\n' + - 'or setup-run-suites: sets up suites and runs them as well;\n' + - 'or postproc: grab the output from mip_convert and use it.') - parser.add_argument( - '-l', - '--log-level', - default='info', - choices=['debug', 'info', 'warning', 'error']) - args = parser.parse_args() - return args - - -def _set_logger(logging, out_dir, log_file, log_level): - # set logging for screen and file output - root_logger = logging.getLogger() - out_fmt = "%(asctime)s %(levelname)-8s %(name)s,%(lineno)s\t%(message)s" - logging.basicConfig( - filename=os.path.join(out_dir, log_file), - filemode='a', - format=out_fmt, - datefmt='%H:%M:%S', - level=logging.DEBUG) - root_logger.setLevel(log_level.upper()) - logfmt = logging.Formatter(out_fmt) - console_handler = logging.StreamHandler() - console_handler.setFormatter(logfmt) - root_logger.addHandler(console_handler) - - -def read_yaml_file(yaml_file): - """Read recipe into a dictionary.""" - with open(yaml_file, 'r') as yfile: - loaded_file = yaml.safe_load(yfile) - return loaded_file - - -def map_var_to_stream(diagnostics, stream_map): - """Map variable standard name to stream string.""" - stream_list = [] - for _, diag in diagnostics.items(): - for var in diag['variables']: - stream = stream_map[var] - stream_list.append(stream) - stream_list = list(set(stream_list)) - return stream_list - - -def write_rose_conf(rose_config_template, recipe_file, config_file, log_level): - """Write the new rose conf file per suite.""" - # Build the ConfigParser object - Config = ConfigParser.ConfigParser() - Config.optionxform = str - Config.read(rose_config_template) - recipe_object = read_yaml_file(recipe_file) - conf_file = read_yaml_file(config_file) - datasets = recipe_object['datasets'] - - # check if dataset needs analysis - datasets_to_analyze = [] - for dataset in datasets: - if dataset['dataset'] not in conf_file['DATASET_TO_SUITE']: - logger.warning("Dataset %s has no mapping to suite", - dataset['dataset']) - logger.warning("Assuming data retrival from elsewhere.") - else: - datasets_to_analyze.append(dataset) - diagnostics = recipe_object['diagnostics'] - active_streams = map_var_to_stream(diagnostics, conf_file['STREAM_MAP']) - - # set stream overrides to None and set components - # also set CYCLING_FREQUENCIES to P1Y overall - stream_overrides = {} - stream_components = {} - cycling_frequencies = {} - for stream in active_streams: - stream_overrides[stream] = 'None' - stream_components[stream] = conf_file['STREAM_COMPONENTS'][stream] - cycling_frequencies[stream] = 'P1Y' - - # set the logger to start outputting - if not os.path.exists(conf_file['ROSES_OUTPUT']): - os.makedirs(conf_file['ROSES_OUTPUT']) - _set_logger(logging, conf_file['ROSES_OUTPUT'], 'rose_suites_setup.log', - log_level) - logger.info(HEADER) - - # store the rose suite locations - rose_suite_locations = [] - - # loop through datasets (different suites for different datasets) - for dataset in datasets_to_analyze: - - # set correct paths - rose_suite = os.path.join( - conf_file['ROSES_ROOT'], - conf_file['DATASET_TO_SUITE'][dataset['dataset']]) - rose_suite_locations.append(rose_suite) - rose_output = os.path.join( - conf_file['ROSES_OUTPUT'], - conf_file['DATASET_TO_SUITE'][dataset['dataset']]) - if os.path.exists(rose_suite): - shutil.rmtree(rose_suite) - if os.path.exists(DEFAULT_SUITE_LOCATION): - shutil.copytree(DEFAULT_SUITE_LOCATION, rose_suite) - else: - logger.error("Default Suite Location not found: %s", - DEFAULT_SUITE_LOCATION) - break - if not os.path.exists(rose_output): - os.makedirs(rose_output) - new_mipconv_config = os.path.join(rose_suite, 'mip_convert_config') - - # start logging - logger.info("Working on dataset: %s", dataset) - logger.info("Mapping dataset to suite: %s", rose_suite) - logger.info("Output and logs written to: %s", rose_output) - logger.info("Creating rose suite directories...") - logger.info("Use rose-suite.conf template %s", rose_config_template) - logger.info("Use user config file %s", config_file) - - # write the file - Config.set('jinja2:suite.rc', 'INPUT_DIR', - '"' + conf_file['INPUT_DIR'] + '"') - Config.set('jinja2:suite.rc', 'OUTPUT_DIR', '"' + rose_output + '"') - Config.set('jinja2:suite.rc', 'CDDS_DIR', - '"' + DEFAULT_SUITE_LOCATION + '"') - Config.set('jinja2:suite.rc', 'MIP_CONVERT_CONFIG_DIR', - '"' + new_mipconv_config + '"') - Config.set('jinja2:suite.rc', 'ACTIVE_STREAMS', str(active_streams)) - Config.set('jinja2:suite.rc', 'STREAM_TIME_OVERRIDES', - str(stream_overrides)) - Config.set('jinja2:suite.rc', 'FIRST_YEAR', str(dataset['start_year'])) - Config.set('jinja2:suite.rc', 'REF_YEAR', str(dataset['start_year'])) - Config.set('jinja2:suite.rc', 'FINAL_YEAR', str(dataset['end_year'])) - Config.set('jinja2:suite.rc', 'STREAM_COMPONENTS', - str(stream_components)) - Config.set('jinja2:suite.rc', 'CYCLING_FREQUENCIES', - str(cycling_frequencies)) - Config.set( - 'jinja2:suite.rc', 'TARGET_SUITE_NAME', - '"' + conf_file['DATASET_TO_SUITE'][dataset['dataset']] + '"') - with open(os.path.join(rose_suite, 'rose-suite.conf'), 'w') as r_c: - logger.info("Writing rose-suite.conf file %s", - os.path.join(rose_suite, 'rose-suite.conf')) - Config.write(r_c) - - # now that we have to conf file set up we need to - # edit the mip_convert configuration file with the correct data - for key, values in conf_file['STREAM_COMPONENTS'].items(): - for comp in values: - mipconv_config = os.path.join(new_mipconv_config, - 'mip_convert.cfg.' + comp) - _edit_mip_convert_config(mipconv_config, conf_file, dataset, - key) - - return rose_suite_locations - - -def _edit_mip_convert_config(mipconv_config, conf_file, dataset, stream): - """Edit the mip_convert file for correct runs.""" - # set the correct variables - base_date = str(dataset['start_year']) + '-01-01-00-00-00' - suite_id = conf_file['DATASET_TO_SUITE'][dataset['dataset']] - cdds_dir = os.path.join(DEFAULT_SUITE_LOCATION, 'mip_convert_aux') - - # Build the ConfigParser object - Config = ConfigParser.ConfigParser() - Config.optionxform = str - Config.read(mipconv_config) - - # set the correct fields - Config.set('COMMON', 'cdds_dir', cdds_dir) - Config.set('request', 'base_date', base_date) - Config.set('request', 'suite_id', suite_id) - stream_section = '_'.join(['stream', stream]) - # add the section if not there already - if not Config.has_section(stream_section): - Config.add_section(stream_section) - if 'mip' not in dataset: - # can work without any mip in dataset - # will not take it from diagnostic (will assemble - # all possible mappings instead) - logger.warning("No mip in the recipe dataset section.") - logger.warning("Assigning mapping from default dictionary.") - stream_map_default = STREAM_MAP[dataset['project']] - variables = [] - cmip_types = [] - for key, val in conf_file['STREAM_MAP'].items(): - for key_def, val_def in stream_map_default.items(): - if val == val_def: - cmip_types.append('_'.join([dataset['project'], key_def])) - variables.append(key) - str_variables = ' '.join(list(set([v for v in variables]))) - if variables: - for cmip_type in cmip_types: - Config.set(stream_section, cmip_type, str_variables) - else: - cmip_type = '_'.join([dataset['project'], dataset['mip']]) - all_vars = conf_file['STREAM_MAP'].keys() - str_variables = ' '.join( - [v for v in all_vars if conf_file['STREAM_MAP'][v] == stream]) - Config.set(stream_section, cmip_type, str_variables) - - # write to file - with open(mipconv_config, 'w') as r_c: - logger.info("Writing mip_convert config file %s", mipconv_config) - Config.write(r_c) - - -def _put_in_env(env_script): - """Put new system vars in environment.""" - logger.info("Setting environment for suite submission...") - - # First make it executable. - chmod_command = ["chmod", "+x", env_script] - proc = subprocess.Popen(chmod_command, stdout=subprocess.PIPE) - proc.communicate() - logger.info("Script %s is now executable.", env_script) - - # set the environment - for line in open(env_script, 'r'): - if line.split("=")[0] == 'export PATH': - logger.info("Appending %s to path...", - line.split("=")[1].strip("\n")) - add_path = line.split("=")[1].strip("\n").strip(":$PATH") - os.environ["PATH"] += os.pathsep + add_path - elif line.split("=")[0] == 'export PYTHONPATH': - logger.info("Exporting %s as PYTHONPATH...", - line.split("=")[1].strip("\n")) - os.environ["PYTHONPATH"] = line.split("=")[1].strip("\n") - - # print and check - logger.info("New path: %s", str(os.environ["PATH"])) - logger.info("mip_convert PYTHONPATH: %s", str(os.environ["PYTHONPATH"])) - proc = subprocess.Popen(["which", "rose"], stdout=subprocess.PIPE) - out, err = proc.communicate() - logger.info("rose: %s %s", out, err) - proc = subprocess.Popen(["which", "mip_convert"], stdout=subprocess.PIPE) - out, err = proc.communicate() - logger.info("mip_convert: %s %s", out, err) - - -def _source_envs(suite): - """Source relevant environments.""" - # source the Met Office rose/cylc environment - # and the suite specific environment - suite_env = os.path.join(suite, 'env_setup_command_line.sh') # suite env - env_file_mo = os.path.join(suite, 'sourcepaths.sh') # metomi env - _put_in_env(suite_env) - _put_in_env(env_file_mo) - - -def _run_suite(suite): - """Run the mip_convert suite.""" - os.chdir(suite) - logger.info("Submitting suite from %s", suite) - proc = subprocess.Popen(["rose", "suite-run"], stdout=subprocess.PIPE) - out, err = proc.communicate() - logger.info("Rose communications: %s %s", str(out), str(err)) - - -def symlink_data(recipe_file, config_file, log_level): - """Grab the mip_converted output and manage it for ESMValTool.""" - # get configuration and recipe - recipe_object = read_yaml_file(recipe_file) - conf_file = read_yaml_file(config_file) - datasets = recipe_object['datasets'] - - # create directory that stores all the output netCDF files - now = datetime.datetime.utcnow().strftime("%Y%m%d_%H%M%S") - new_subdir = '_'.join((recipe_file.strip('.yml'), now)) - sym_output_dir = os.path.join(conf_file['ROSES_OUTPUT'], - 'mip_convert_symlinks', new_subdir) - if not os.path.exists(sym_output_dir): - os.makedirs(sym_output_dir) - - # set the logger to start outputting - _set_logger(logging, conf_file['ROSES_OUTPUT'], 'file_simlink.log', - log_level) - logger.info(HEADER) - - # loop through all datasets to symlink output - for dataset in datasets: - rose_output = os.path.join( - conf_file['ROSES_OUTPUT'], - conf_file['DATASET_TO_SUITE'][dataset['dataset']]) - logger.info("Working on dataset: %s", dataset) - logger.info("Output and logs written to: %s", rose_output) - - # create the dataset dir - dataset_output = os.path.join(sym_output_dir, dataset['dataset']) - if os.path.exists(dataset_output): - shutil.rmtree(dataset_output) - os.makedirs(dataset_output) - - # loop through files - for root, _, files in os.walk(rose_output): - for xfile in files: - real_file = os.path.join(root, xfile) - imag_file = os.path.join(dataset_output, xfile) - - # symlink it if nc file - if real_file.endswith('.nc') and \ - xfile.split('_')[2] == dataset['dataset']: - if not os.path.islink(imag_file): - logger.info("File to symlink: %s", real_file) - logger.info("Symlinked file: %s", imag_file) - os.symlink(real_file, imag_file) - else: - logger.info("Symlinked file exists...") - logger.info("Original file: %s", real_file) - logger.info("Symlinked file: %s", imag_file) - - -def main(): - """Run the the meat of the code.""" - logger.info("Running main function...") - args = get_args() - rose_config_template = os.path.join( - os.path.dirname(__file__), "rose-suite-template.conf") - - # make sure the file is retrieved nonetheless - if not os.path.isfile(rose_config_template): - logger.info("Fetching rose template config from suite %s", - DEFAULT_SUITE_LOCATION) - rose_config_template = os.path.join(DEFAULT_SUITE_LOCATION, - "rose-suite-template.conf") - - recipe_files = args.recipe_files - config_file = args.config_file - log_level = args.log_level - for recipe_file in recipe_files: - if args.mode == 'setup-only': - # set up the rose suites - write_rose_conf(rose_config_template, recipe_file, config_file, - log_level) - elif args.mode == 'setup-run-suites': - # setup roses - roses = write_rose_conf(rose_config_template, recipe_file, - config_file, log_level) - # set up the environment and submit - for rose in roses: - _source_envs(rose) - _run_suite(rose) - elif args.mode == 'postproc': - symlink_data(recipe_file, config_file, log_level) - - -if __name__ == '__main__': - main() diff --git a/esmvaltool/utils/cmorizers/mip_convert/recipe_mip_convert.yml b/esmvaltool/utils/cmorizers/mip_convert/recipe_mip_convert.yml deleted file mode 100644 index 8d5168a975..0000000000 --- a/esmvaltool/utils/cmorizers/mip_convert/recipe_mip_convert.yml +++ /dev/null @@ -1,51 +0,0 @@ -#### summary -# Example of ESMValTool recipe that can be used with the mip_convert capability -# Data for this recipe exists in pp format on JASMIN, ready for mip_convert-ion -# The recipe is no different than any typical ESMValTool recipes, but can be used -# for a test run of mip_convert capability; see the README document and the included -# config-mipconv-user.yml configuration file. -# Author: V. Predoi (Uni Reading, valeriu.predoi@ncas.ac.uk) -# Date: first draft/November 2018 -########################################################################################################### ---- - -datasets: - - {dataset: UKESM1-0-LL, project: CMIP6, mip: Amon, exp: piControl-spinup, ensemble: r1i1p1f1_gn, start_year: 1850, end_year: 1860} - -preprocessors: - pp_rad: - regrid: - target_grid: 1x1 - scheme: linear - -diagnostics: - validation_mip_convert: - description: "Test with mip convert" - variables: - # mapping of standard_name to stream for CMIP6 - # see the associated config file for input - # "ps": "ap4", "ta": "ap4", "va": "ap4", "ua": "ap5", "mrsos": "ap5", "toz":"apm" - ps: - preprocessor: pp_rad - field: T2Ms - ta: - preprocessor: pp_rad - field: T2Ms - va: - preprocessor: pp_rad - field: T2Ms - ua: - preprocessor: pp_rad - field: T2Ms - toz: - preprocessor: pp_rad - field: T2Ms - scripts: - meridional_mean: - script: validation.py - title: "" - control_model: UKESM1-0-LL - exper_model: UKESM1-0-LL - analysis_type: meridional_mean - seasonal_analysis: True - diff --git a/esmvaltool/utils/cmorizers/mip_convert/rose-suite-template.conf b/esmvaltool/utils/cmorizers/mip_convert/rose-suite-template.conf deleted file mode 100644 index 5562333fed..0000000000 --- a/esmvaltool/utils/cmorizers/mip_convert/rose-suite-template.conf +++ /dev/null @@ -1,20 +0,0 @@ -[jinja2:suite.rc] -ACTIVE_STREAMS = -CONCATENATE = "FALSE" -CYCLING_FREQUENCIES = -DUMMY_RUN = "FALSE" -FINAL_YEAR = -FIRST_YEAR = -REF_YEAR = -INPUT_DIR = -LOCATION = "LOTUS" -MEMORY = "70000" -MIP_CONVERT_CONFIG_DIR = -OUTPUT_DIR = -PARALLEL_TASKS = "20" -NTHREADS_CONCATENATE = "6" -CDDS_DIR = -STREAM_COMPONENTS = -STREAM_TIME_OVERRIDES = -TARGET_SUITE_NAME = -WALL_TIME = "6:00:00" diff --git a/esmvaltool/utils/cmorizers/obs/cmor_config/WOA.yml b/esmvaltool/utils/cmorizers/obs/cmor_config/WOA.yml deleted file mode 100644 index 3baba8bdaa..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmor_config/WOA.yml +++ /dev/null @@ -1,56 +0,0 @@ ---- -# project at hand -proj: - dataset: 'WOA' - version: '2013v2' - realm: 'clim' - frequency: - 'thetao': 'Omon' - 'so': 'Omon' - 'no3': 'Oyr' - 'po4': 'Oyr' - 'si': 'Oyr' - 'o2': 'Oyr' - metadata_attributes: - tier: '2' - source: 'https://data.nodc.noaa.gov/woa/WOA13/DATAv2/' - comment: 'cmorized for ESMValTool v2' - CMORconventions: 'CF/CMOR3' - CMORcreated: '' - - -# specific raw file names -VAR_TO_FILENAME: - thetao: 'woa13_decav81B0_t' - so: 'woa13_decav81B0_s' - o2: 'woa13_all_o' - no3: 'woa13_all_n' - po4: 'woa13_all_p' - si: 'woa13_all_i' - -# specific fields names from raw obs files -FIELDS: - si: 'Objectively analyzed mean fields for moles_concentration_of_silicate_in_sea_water at standard depth levels.' - thetao: 'Objectively analyzed mean fields for sea_water_temperature at standard depth levels.' - so: 'Objectively analyzed mean fields for salinity at standard depth levels.' - po4: 'Objectively analyzed mean fields for moles_concentration_of_phosphate_in_sea_water at standard depth levels.' - no3: 'Objectively analyzed mean fields for moles_concentration_of_nitrate_in_sea_water at standard depth levels.' - o2: 'Objectively analyzed mean fields for volume_fraction_of_oxygen_in_sea_water at standard depth levels.' - -# cmor standard names -STANDARD_NAMES: - si: 'mole_concentration_of_silicate_in_sea_water' - thetao: 'sea_water_potential_temperature' - so: 'sea_water_salinity' - po4: 'mole_concentration_of_phosphate_in_sea_water' - no3: 'mole_concentration_of_nitrate_in_sea_water' - o2: 'mole_concentration_of_dissolved_molecular_oxygen_in_sea_water' - -# cmor long names -LONG_NAMES: - si: 'Dissolved Silicate Concentration' - thetao: 'Sea Water Potential Temperature' - so: 'Sea Water Salinity' - po4: 'Dissolved Phosphate Concentration' - no3: 'Dissolved Nitrate Concentration' - o2: 'Dissolved Oxygen Concentration' diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs.py b/esmvaltool/utils/cmorizers/obs/cmorize_obs.py deleted file mode 100755 index a5807c2684..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs.py +++ /dev/null @@ -1,266 +0,0 @@ -""" -Run the CMORization module as a utility executable. - -This utility allows the user to call and execute CMOR reformatting -scripts (support for NCL and Python at the moment), that will use -two I/O variables passed by this utility: an input directory as -specified in config-user.yml by the RAWOBS key, and an output dir -created in the form of output_dir/CMOR_DATE_TIME/TierTIER/DATASET. -The user can specify a list of DATASETS that the CMOR reformatting -can by run on by using -o (--obs-list-cmorize) command line argument. -The CMOR reformatting scripts are to be found in: -esmvaltool/cmor/cmorizers/obs -""" -import argparse -import logging -import importlib -import os -import datetime -import subprocess - -from esmvaltool._task import write_ncl_settings -from esmvaltool._config import read_config_user_file - -logger = logging.getLogger(__name__) - -HEADER = r""" -______________________________________________________________________ - _____ ____ __ ____ __ _ _____ _ - | ____/ ___|| \/ \ \ / /_ _| |_ _|__ ___ | | - | _| \___ \| |\/| |\ \ / / _` | | | |/ _ \ / _ \| | - | |___ ___) | | | | \ V / (_| | | | | (_) | (_) | | - |_____|____/|_| |_| \_/ \__,_|_| |_|\___/ \___/|_| -______________________________________________________________________ - -""" + __doc__ - - -def _assemble_datasets(raw_obs, obs_list): - """Get my datasets as dictionary keyed on Tier.""" - # check for desired datasets only (if any) - # if not, walk all over rawobs dir - # assume a RAWOBS/TierX/DATASET input structure - datasets = {} - - # get all available tiers in source dir - tiers = ['Tier{}'.format(i) for i in range(2, 4)] - tiers = [tier for tier in tiers if os.path.exists(os.path.join(raw_obs, - tier))] - - # if user specified obs list - if obs_list: - for tier in tiers: - datasets[tier] = [] - for dataset_name in obs_list.split(','): - if os.path.isdir(os.path.join(raw_obs, tier, dataset_name)): - datasets[tier].append(dataset_name) - - # otherwise go through the whole raw_obs dir - else: - for tier in tiers: - datasets[tier] = [] - for dats in os.listdir(os.path.join(raw_obs, tier)): - datasets[tier].append(dats) - - return datasets - - -def _write_ncl_settings(project_info, dataset, run_dir, - reformat_script, log_level): - """Write the information needed by the ncl reformat script.""" - settings = { - 'cmorization_script': reformat_script, - 'input_dir_path': project_info[dataset]['indir'], - 'output_dir_path': project_info[dataset]['outdir'], - 'config_user_info': {'log_level': log_level}, - } - settings_filename = os.path.join(run_dir, dataset, 'settings.ncl') - if not os.path.isdir(os.path.join(run_dir, dataset)): - os.makedirs(os.path.join(run_dir, dataset)) - # write the settings file - write_ncl_settings(settings, settings_filename) - return settings_filename - - -def _run_ncl_script(in_dir, - out_dir, - run_dir, - dataset, - reformat_script, - log_level): - """Run the NCL cmorization mechanism.""" - project = {} - project[dataset] = {} - project[dataset]['indir'] = in_dir - project[dataset]['outdir'] = out_dir - settings_file = _write_ncl_settings(project, dataset, run_dir, - reformat_script, log_level) - esmvaltool_root = os.path.dirname( - os.path.dirname(os.path.dirname(os.path.dirname(reformat_script))) - ) - - # put settings in environment - env = dict(os.environ) - env['settings'] = settings_file - env['esmvaltool_root'] = esmvaltool_root - - # call NCL - ncl_call = ['ncl', reformat_script] - logger.info("Executing cmd: %s", ' '.join(ncl_call)) - process = subprocess.Popen(ncl_call, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, env=env) - output, err = process.communicate() - for oline in str(output.decode('utf-8')).split('\n'): - logger.info('[NCL] %s', oline) - if err: - logger.info('[NCL][subprocess.Popen ERROR] %s', err) - - -def _run_pyt_script(in_dir, out_dir, reformat_module): - """Run the Python cmorization mechanism.""" - py_cmor = importlib.import_module(reformat_module) - py_cmor.cmorization(in_dir, out_dir) - - -def execute_cmorize(): - """Run it as executable.""" - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - '-o', - '--obs-list-cmorize', - type=str, - help='List of obs datasets to cmorize. \ - If no list provided: CMORization of \ - all datasets in RAWOBS; \ - -o DATASET1,DATASET2... : \ - for CMORization of select datasets.') - parser.add_argument( - '-c', - '--config-file', - default=os.path.join(os.path.dirname(__file__), 'config-user.yml'), - help='Config file') - args = parser.parse_args() - - # get and read config file - config_file = os.path.abspath( - os.path.expandvars(os.path.expanduser(args.config_file))) - - # Read user config file - if not os.path.exists(config_file): - logger.error("Config file %s does not exist", config_file) - - # read the file in - config_user = read_config_user_file(config_file, 'cmorize_obs') - - # set the run dir to hold the settings and log files - run_dir = os.path.join(config_user['output_dir'], 'run') - if not os.path.isdir(run_dir): - os.makedirs(run_dir) - - # set logging for screen and file output - root_logger = logging.getLogger() - out_fmt = "%(asctime)s %(levelname)-8s %(name)s,%(lineno)s\t%(message)s" - logging.basicConfig( - filename=os.path.join(run_dir, 'main_log.txt'), - filemode='a', - format=out_fmt, - datefmt='%H:%M:%S', - level=config_user['log_level'].upper()) - root_logger.setLevel(config_user['log_level'].upper()) - logfmt = logging.Formatter(out_fmt) - console_handler = logging.StreamHandler() - console_handler.setFormatter(logfmt) - root_logger.addHandler(console_handler) - - # print header - logger.info(HEADER) - - # run - timestamp1 = datetime.datetime.utcnow() - timestamp_format = "%Y-%m-%d %H:%M:%S" - - logger.info( - "Starting the CMORization Tool at time: %s UTC", - timestamp1.strftime(timestamp_format)) - - logger.info(70 * "-") - logger.info("input_dir = %s", config_user["rootpath"]["RAWOBS"][0]) - # check if the inputdir actually exists - if not os.path.isdir(config_user["rootpath"]["RAWOBS"][0]): - logger.error("Directory %s does not exist", - config_user["rootpath"]["RAWOBS"][0]) - raise ValueError - logger.info("output_dir = %s", config_user["output_dir"]) - logger.info(70 * "-") - - # call the reformat function - if args.obs_list_cmorize: - obs_list = args.obs_list_cmorize - else: - obs_list = [] - _cmor_reformat(config_user, obs_list) - - # End time timing - timestamp2 = datetime.datetime.utcnow() - logger.info( - "Ending the CMORization Tool at time: %s UTC", - timestamp2.strftime(timestamp_format)) - logger.info( - "Time for running the CMORization scripts was: %s", - timestamp2 - timestamp1) - - -def _cmor_reformat(config, obs_list): - """Run the cmorization routine.""" - logger.info("Running the CMORization scripts.") - - # master directory - raw_obs = config["rootpath"]["RAWOBS"][0] - - # set the reformat scripts dir - reformat_scripts = os.path.dirname(__file__) - run_dir = os.path.join(config['output_dir'], 'run') - # datsets dictionary of Tier keys - datasets = _assemble_datasets(raw_obs, obs_list) - logger.info("Processing datasets %s", datasets) - - # loop through tier/datasets to be cmorized - for tier in datasets: - for dataset in datasets[tier]: - reformat_script_root = os.path.join(reformat_scripts, - 'cmorize_obs_' + dataset) - # in-data dir; build out-dir tree - in_data_dir = os.path.join(raw_obs, tier, dataset) - out_data_dir = os.path.join(config['output_dir'], tier, dataset) - if not os.path.isdir(out_data_dir): - os.makedirs(out_data_dir) - - # all operations are done in the working dir now - os.chdir(out_data_dir) - - # figure out what language the script is in - if os.path.isfile(reformat_script_root + '.ncl'): - reformat_script = reformat_script_root + '.ncl' - logger.info("CMORizing dataset %s using NCL script %s", - dataset, reformat_script) - - # call the ncl script - _run_ncl_script(in_data_dir, - out_data_dir, - run_dir, - dataset, - reformat_script, - config['log_level']) - elif os.path.isfile(reformat_script_root + '.py'): - py_reformat_script = reformat_script_root + '.py' - logger.info("CMORizing dataset %s using Python script %s", - dataset, py_reformat_script) - module_root = 'esmvaltool.utils.cmorizers.obs.cmorize_obs_' - _run_pyt_script(in_data_dir, out_data_dir, - module_root + dataset) - else: - logger.info('Could not find cmorizer for %s', datasets) - - -if __name__ == '__main__': - execute_cmorize() diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_AURA-TES.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_AURA-TES.ncl deleted file mode 100644 index 552e48cf40..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_AURA-TES.ncl +++ /dev/null @@ -1,178 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for AURA-TES data -; ############################################################################# -; -; Tier -; Tier 3: restricted dataset. -; -; Source -; https://search.earthdata.nasa.gov/search?q=TL3O3M -; -; Last access -; 20181208 -; -; Download and processing instructions -; Select the V004 and V005 projects. -; Download the script file for both projects. -; To download the data use: wget -i . -; From the downloaded data, pick only the *.he5 files and put them in -; input_dir_path. -; Data is freely available, but a registration is required. -; -; Modification history -; 20190108-A_righ_ma: adapted to v2. -; 20140129-A_righ_ma: written. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_AURA-TES.ncl" - - ; Source name - OBSNAME = "AURA-TES" - - ; Tier - TIER = 3 - - ; Gridtype - GRIDTYPE = "Nadir" - - ; Period - YEAR1 = 2005 - YEAR2 = 2011 - - ; Selected variable - VAR = "tro3" - - ; MIP - MIP = "Amon" - - ; Frequency - FREQ = "mon" - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + \ - "/cmor/tables/cmip5/Tables/CMIP5_" + MIP - - ; Type - TYPE = "sat" - - ; Version - VERSION = "1" - - ; Global attributes - SOURCE = "https://search.earthdata.nasa.gov/search?q=TL3O3M" - REF = "Beer, R., IEEE Trans. Geosci. Rem. Sens., " + \ - "doi:10.1109/TGRS.2005.863716, 2006" - COMMENT = "" - -end - -begin - - ; Create time coordinate - timec = create_timec(YEAR1, YEAR2) - datec = ut_calendar(timec, 0) - - ; Loop over time - do tt = 0, dimsizes(timec) - 1 - - yy = toint(datec(tt, 0)) - mm = toint(datec(tt, 1)) - - log_info("Processing date " + yy + sprinti("%0.2i", mm)) - - ; Find files - fname = input_dir_path + "TES-Aura_L3-O3-M" + \ - yy + "m" + sprinti("%0.2i", mm) + "_F01_10.he5" - - if (.not.isfilepresent(fname)) then ; alternative name - fname = input_dir_path + "TES-Aura_L3-O3-M" + \ - yy + "m" + sprinti("%0.2i", mm) + "_C01_F01_10.he5" - end if - - if (.not.isfilepresent(fname)) then - log_info("File missing, skipping this date") - continue - end if - - ; Read variable - f = addfile(fname, "r") - str = "O3_" + GRIDTYPE + "Grid" - var = f->$str$ - - ; Reorder - d1 = "XDim_" + GRIDTYPE + "Grid" - d2 = "YDim_" + GRIDTYPE + "Grid" - d3 = "nLevels_" + GRIDTYPE + "Grid" - tmp = var($d1$|:, $d2$|:, $d3$|:) - delete(var) - var = tmp - delete(tmp) - - ; Read vertical coordinate - str = "Pressure_" + GRIDTYPE + "Grid" - press = f->$str$ - var!2 = "plev" - var&plev = todouble(press) - - ; Create array - if (.not.isdefined("outvar")) then - outdim = array_append_record(dimsizes(timec), dimsizes(var), 0) - outvar = new(outdim, float) - outvar@_FillValue = var@_FillValue - end if - - outvar(tt, :, :, :) = var - delete(var) - - end do - - ; Reorder - outvar!0 = "time" - outvar!1 = "lon" - outvar!2 = "lat" - outvar!3 = "plev" - output = outvar(time|:, plev|:, lat|:, lon|:) - output@_FillValue = FILL - delete(outvar) - - ; Assign coordinates - output&time = timec - output&plev = output&plev * 100. ; [hPa] --> [Pa] - output&plev@units = "Pa" - - ; Convert units [mol/mol] --> [1e9] - output = output * 1.e9 - output@units = "1e-9" - - ; Format coordinates - format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ) - - ; Set variable attributes - tmp = format_variable(output, VAR, CMOR_TABLE) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = YEAR1 + "01-" + YEAR2 + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP, VAR, DATESTR/), "_") + ".nc" - - ; Write variable - write_nc(fout, VAR, output, bounds, gAtt) - delete(gAtt) - delete(output) - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_CDS-XCH4.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_CDS-XCH4.ncl deleted file mode 100644 index ba3fd6a450..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_CDS-XCH4.ncl +++ /dev/null @@ -1,115 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for CDS-XCH4 data -; ############################################################################# -; -; Tier -; Tier 3 other freely-available dataset. -; -; Source -; https://cds.climate.copernicus.eu/cdsapp#!/dataset/ -; satellite-methane?tab=form -; -; Last access -; 20190311 -; -; Download and processing instructions -; Select Processing level "Level 3", variable "Column-average dry-air mole -; fraction of atmospheric methane (XCH4) and related variables", Sensor and -; algorithm "MERGED and OBS4MIPS". -; A registration is required to download the data. -; -; Modification history -; 20190311-A_hass_bg: written. -; -; ############################################################################ -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_CDS-XCH4.ncl" - - ; Source name - OBSNAME = "CDS-XCH4" - - ; Tier - TIER = 3 - - ; Period - YEAR1 = 2003 - YEAR2 = 2016 - - ; Selected variable (standard name) - VAR = (/"xch4"/) - - ; Name in the raw data - NAME = (/"xch4"/) - - ; MIP - MIP = (/"Amon"/) - - ; Frequency - FREQ = (/"mon"/) - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + \ - "/cmor/tables/custom/CMOR_xch4.dat" - - ; Type - TYPE = "sat" - - ; Version - VERSION = "L3" - - ; Global attributes - SOURCE = "https://cds.climate.copernicus.eu/cdsapp#!/dataset/" + \ - "satellite-methane?tab=form" - REF = "" - COMMENT = "" - -end - -begin - - ; Loop over variables - do vv = 0, dimsizes(VAR) - 1 - - log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") - - ; Read variables - fname = input_dir_path + "OBS_" + OBSNAME + "_sat_L3_" + NAME(vv) + \ - "_200301-201612.nc" - setfileoption("nc", "MissingToFillValue", False) - f = addfile(fname, "r") - output = f->xch4 - - ; Format coordinates - format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) - - ; Set variable attributes - tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ(vv)) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = YEAR1 + "01-" + YEAR2 + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" - - ; Write variable - write_nc(fout, VAR(vv), output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - - end do - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_CDS-XCO2.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_CDS-XCO2.ncl deleted file mode 100644 index 9ddb684364..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_CDS-XCO2.ncl +++ /dev/null @@ -1,115 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for CDS-XCO2 data -; ############################################################################# -; -; Tier -; Tier 3 other freely-available dataset. -; -; Source -; https://cds.climate.copernicus.eu/cdsapp#!/dataset/ -; satellite-carbon-dioxide?tab=form -; -; Last access -; 20190319 -; -; Download and processing instructions -; Select Processing level "Level 3", variable "Column-average dry-air mole -; fraction of atmospheric carbon dioxide (XCO2) and related variables", -; Sensor and algorithm "MERGED and OBS4MIPS". -; A registration is required to download the data. -; -; Modification history -; 20190319-A_hass_bg: written. -; -; ############################################################################ -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_CDS-XCO2.ncl" - - ; Source name - OBSNAME = "CDS-XCO2" - - ; Tier - TIER = 3 - - ; Period - YEAR1 = 2003 - YEAR2 = 2016 - - ; Selected variable (standard name) - VAR = (/"xco2"/) - - ; Name in the raw data - NAME = (/"xco2"/) - - ; MIP - MIP = (/"Amon"/) - - ; Frequency - FREQ = (/"mon"/) - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + \ - "/cmor/tables/custom/CMOR_xco2.dat" - - ; Type - TYPE = "sat" - - ; Version - VERSION = "L3" - - ; Global attributes - SOURCE = "https://cds.climate.copernicus.eu/cdsapp#!/dataset/" + \ - "satellite-carbon-dioxide?tab=form" - REF = "" - COMMENT = "" - -end - -begin - - ; Loop over variables - do vv = 0, dimsizes(VAR) - 1 - - log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") - - ; Read variables - fname = input_dir_path + "OBS_" + OBSNAME + "_sat_L3_" + NAME(vv) + \ - "_200301-201612.nc" - setfileoption("nc", "MissingToFillValue", False) - f = addfile(fname, "r") - output = f->xco2 - - ; Format coordinates - format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) - - ; Set variable attributes - tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ(vv)) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = YEAR1 + "01-" + YEAR2 + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" - - ; Write variable - write_nc(fout, VAR(vv), output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - - end do - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_CERES-SYN1deg.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_CERES-SYN1deg.ncl deleted file mode 100644 index f10e8e1df1..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_CERES-SYN1deg.ncl +++ /dev/null @@ -1,195 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for CERES-SYN1deg data -; ############################################################################# -; -; Tier -; Tier 3: restricted dataset. -; -; Source -; https://ceres-tool.larc.nasa.gov/ord-tool/jsp/SYN1degSelection.jsp -; -; Last access -; 20190207 -; -; Download and processing instructions -; Monthly data: -; Expand "Compute TOA Fluxes" and select: -; Shortwave Flux, Allsky and Clearsky -; Longwave Flux, Allsky and Clearsky -; Shortwave Flux Down, Allsky -; Expand "Computed Surface Fluxes" and select: -; Shortwave Flux Up, Allsky and Clearsky -; Shortwave Flux Down, Allsky and Clearsky -; Longwave Flux Up, Allsky and Clearsky -; Longwave Flux Down, Allsky and Clearsky -; then click on "Monthly", "Regional" and "Get data". All fields are saved -; in CERES_SYN1deg-Month_Terra-Aqua-MODIS_Ed3A_Subset_200003-201702.nc -; 3hr data: -; Select the same fields as above, then click on "Daily 3-Hourly" and -; "Get data". All fields are saved in -; CERES_SYN1deg-3H_Terra-Aqua-MODIS_Ed3A_Subset_YYYYMMDD-YYYYMMDD.nc -; Put all files in input_dir_path (no subdirectories with years). -; For orders larger than 2 GB a registration is required. -; -; Modification history -; 20190124-A_righ_ma: written based on v1 different bash scripts by laue_ax -; and eval_ma. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_CERES-SYN1deg.ncl" - - ; Source name - OBSNAME = "CERES-SYN1deg" - - ; Tier - TIER = 3 - - ; Period - YEAR1 = 2001 - YEAR2 = 2016 - - ; CMOR name - VAR = (/"rsus", "rsds", \ - "rlus", "rlds", \ - "rldscs", "rsut", \ - "rlut", "rsutcs", \ - "rlutcs", "rsdt", \ - "rsutcs", "rsut", \ - "rlutcs", "rlut", \ - "rsuscs", "rsus", \ - "rsdscs", "rsds", \ - "rluscs", "rlus", \ - "rldscs", "rlds"/) - - ; Name in the raw data - NAME = (/"sfc_comp_sw_up_all_mon", "sfc_comp_sw_down_all_mon", \ - "sfc_comp_lw_up_all_mon", "sfc_comp_lw_down_all_mon", \ - "sfc_comp_lw_down_clr_mon", "toa_comp_sw_up_all_mon", \ - "toa_comp_lw_up_all_mon", "toa_comp_sw_up_clr_mon", \ - "toa_comp_lw_up_clr_mon", "toa_comp_sw_down_all_mon", \ - "toa_comp_sw_up_clr_3h", "toa_comp_sw_up_all_3h", \ - "toa_comp_lw_up_clr_3h", "toa_comp_lw_up_all_3h", \ - "sfc_comp_sw_up_clr_3h", "sfc_comp_sw_up_all_3h", \ - "sfc_comp_sw_down_clr_3h", "sfc_comp_sw_down_all_3h", \ - "sfc_comp_lw_up_clr_3h", "sfc_comp_lw_up_all_3h", \ - "sfc_comp_lw_down_clr_3h", "sfc_comp_lw_down_all_3h"/) - - ; MIP - MIP = (/"Amon", "Amon", \ - "Amon", "Amon", \ - "Amon", "Amon", \ - "Amon", "Amon", \ - "Amon", "Amon", \ - "3hr", "3hr", \ - "3hr", "3hr", \ - "3hr", "3hr", \ - "3hr", "3hr", \ - "3hr", "3hr", \ - "3hr", "3hr"/) - - ; Frequency - FREQ = (/"mon", "mon", \ - "mon", "mon", \ - "mon", "mon", \ - "mon", "mon", \ - "mon", "mon", \ - "3hr", "3hr", \ - "3hr", "3hr", \ - "3hr", "3hr", \ - "3hr", "3hr", \ - "3hr", "3hr", \ - "3hr", "3hr"/) - - ; CMOR table: Amon version is used also for 3hr, since not all variables are - ; available in 3hr (the tables are identical anyway) - CMOR_TABLE = new(dimsizes(MIP), string) - CMOR_TABLE = getenv("esmvaltool_root") + \ - "/cmor/tables/cmip5/Tables/CMIP5_Amon" - CMOR_TABLE(ind(VAR.eq."rluscs")) = getenv("esmvaltool_root") + \ - "/cmor/tables/custom/CMOR_rluscs.dat" - - ; Type - TYPE = "sat" - - ; Version - VERSION = "Ed3A" - - ; Global attributes - SOURCE = "https://ceres-tool.larc.nasa.gov/ord-tool/jsp/SYN1degSelection.jsp" - REF = "Wielicki et al., Bull. Amer. Meteor. Soc., " + \ - "doi: 10.1175/1520-0477(1996)077<0853:CATERE>2.0.CO;2, 1996" - COMMENT = "" - -end - -begin - - do vv = 0, dimsizes(VAR) - 1 - - log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") - - do yy = YEAR1, YEAR2 - - ; Read file - if (MIP(vv).eq."Amon") then - f = addfile(input_dir_path + "CERES_SYN1deg-Month_Terra-Aqua-" + \ - "MODIS_" + VERSION + "_Subset_200003-201702.nc", "r") - xx = f->$NAME(vv)$ - delete(f) - end if - if (MIP(vv).eq."3hr") then - files = systemfunc("ls " + input_dir_path + "CERES_SYN1deg-3H_" + \ - "Terra-Aqua-MODIS_" + VERSION + "_Subset_*" + \ - yy + "*.nc") - f = addfiles(files, "r") - xx = f[:]->$NAME(vv)$ - delete(f) - delete(files) - end if - - ; Time selection - date = cd_calendar(xx&time, 0) - output = xx(ind(date(:, 0).eq.yy), :, :) - delete(date) - delete(xx) - - ; Format coordinates - output!0 = "time" - output!1 = "lat" - output!2 = "lon" - format_coords(output, yy + "0101", yy + "1231", FREQ(vv)) - - ; Set variable attributes - tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ(vv)) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = yy + "01-" + yy + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" - - ; Write variable - write_nc(fout, VAR(vv), output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - - end do - - end do - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ERA-Interim.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ERA-Interim.ncl deleted file mode 100644 index 446e70c8a2..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ERA-Interim.ncl +++ /dev/null @@ -1,373 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for ERA-Interim data -; ############################################################################# -; -; Tier -; Tier 3: restricted dataset. -; -; Source -; http://apps.ecmwf.int/datasets/data/interim-full-moda/ -; -; Last access -; 20190205 -; -; Download and processing instructions -; Select "Era Interim Fields": -; Daily: for daily values -; Invariant: for time invariant variables (like land-sea mask) -; Monthly Means of Daily Means: for monthly values -; Monthly Means of Daily Forecast Accumulation: for accumulated variables -; like precipitation or radiation fluxes -; Select "Type of level" (Surface or Pressure levels) -; Download the data on a single variable and single year basis, and save -; them as ERA-Interim___YYYY.nc, where is the ERA-Interim -; variable name and is either monthly or daily. Further download -; "land-sea mask" from the "Invariant" data and save it in -; ERA-Interim_lsm.nc. -; It is also possible to download data in an automated way, see: -; https://confluence.ecmwf.int/display/WEBAPI/Access+ECMWF+Public+Datasets -; https://confluence.ecmwf.int/display/WEBAPI/Python+ERA-interim+examples -; A registration is required for downloading the data. -; -; Caveats -; Make sure to select the right steps for accumulated fluxes, see: -; https://confluence.ecmwf.int/pages/viewpage.action?pageId=56658233 -; https://confluence.ecmwf.int/display/CKB/ERA-Interim%3A+monthly+means -; for a detailed explanation. -; The data are updated regularly: recent years are added, but also the past -; years are sometimes corrected. To have a consistent timeseries, it is -; therefore recommended to download the full timeseries and not just add -; new years to a previous version of the data. -; -; Modification history -; 20190311-A_righ_ma: added surface fluxes. -; 20190204-A_righ_ma: adapted to v2. -; 20171023-A_laue_ax: added variables prw, ts -; 20160913-A_laue_ax: added variable tcc -; 20150820-A_laue_ax: added variables lwp, iwp, sftlf -; 20150327-A_righ_ma: merged with ERA-Interim-surf.ncl and -; ERA-Interim-surf_daily.ncl. -; 20140128-A_righ_ma: written. -; -; ############################################################################# - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_ERA-Interim.ncl" - - ; Source name - OBSNAME = "ERA-Interim" - - ; Tier - TIER = 3 - - ; Period - YEAR1 = 1979 - YEAR2 = 2018 - - ; Acceleration of gravity [m s-2] - G = 9.80665 - - ; Variable settings as list [VAR, NAME, CONVERSION, MIP, FREQ] - ; VAR: selected variable (standard name) - ; NAME: name in the raw data - ; RAWUNITS: expected units in the input data - ; CONVERSION: corresponding conversion factor to CMOR units - ; MIP: mip - ; FREQ: frequency - VLIST = \ - [/[/"sftlf", "lsm", 100., "fx", "fx"/], \ - [/"pr", "tp", 1.e3, "day", "day"/], \ - [/"psl", "msl", 1., "day", "day"/], \ - [/"tas", "t2m", 1., "day", "day"/], \ - [/"tasmax", "mx2t", 1., "day", "day"/], \ - [/"tasmin", "mn2t", 1., "day", "day"/], \ - [/"zg", "z", 1. / G, "day", "day"/], \ - [/"clivi", "p57.162", 1., "Amon", "mon"/], \ - [/"clt", "tcc", 100., "Amon", "mon"/], \ - [/"clwvi", (/"p56.162", "p57.162"/), 1., "Amon", "mon"/], \ - [/"hfds", (/"ssr", "str", "slhf", "sshf"/), 1., "Omon", "mon"/], \ - [/"hur", "r", 1., "Amon", "mon"/], \ - [/"hus", "q", 1., "Amon", "mon"/], \ - [/"pr", "tp", 1.e3, "Amon", "mon"/], \ - [/"prw", "tcwv", 1., "Amon", "mon"/], \ - [/"ps", "sp", 1., "Amon", "mon"/], \ - [/"psl", "msl", 1., "Amon", "mon"/], \ - [/"ta", "t", 1., "Amon", "mon"/], \ - [/"tas", "t2m", 1., "Amon", "mon"/], \ - [/"tauu", "iews", 1., "Amon", "mon"/], \ - [/"tauv", "inss", 1., "Amon", "mon"/], \ - [/"tos", "sst", 1., "Omon", "mon"/], \ - [/"ts", "skt", 1., "Amon", "mon"/], \ - [/"ua", "u", 1., "Amon", "mon"/], \ - [/"va", "v", 1., "Amon", "mon"/], \ - [/"wap", "w", 1., "Amon", "mon"/], \ - [/"zg", "z", 1. / G, "Amon", "mon"/], \ - [""]/] - - ; Type - TYPE = "reanaly" - - ; Version - VERSION = "1" - - ; Global attributes - SOURCE = "http://apps.ecmwf.int/datasets/data/interim_full_moda/" - REF = "Dee, D. P. et al., Q. J. Roy. Meteor. Soc., doi:10.1002/qj.828, 2011" - COMMENT = "" - -end -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Loop over variables - do vv = 0, ListCount(VLIST) - 2 - - ; Setting for the current variable - clist = VLIST[vv] - VAR = clist[0] - NAME = clist[1] - CONVERSION = clist[2] - MIP = clist[3] - FREQ = clist[4] - CMOR_TABLE = \ - getenv("esmvaltool_root") + "/cmor/tables/cmip5/Tables/CMIP5_" + MIP - delete(clist) - - log_info("Processing " + VAR + " (" + MIP + ")") - - l_day = False - l_fx = False - if (isStrSubset(MIP, "mon")) then - freqkey = "monthly" - end if - if (isStrSubset(MIP, "day")) then - freqkey = "daily" - l_day = True - end if - if (isStrSubset(MIP, "fx")) then - l_fx = True - end if - - ; Create timeseries - do yy = YEAR1, YEAR2 - - ; fx variables are time invariant - if (l_fx) then - fname = input_dir_path + "ERA-Interim_" + NAME(0) + ".nc" - f = addfile(fname, "r") - else - fname = input_dir_path + \ - "ERA-Interim_" + NAME(0) + "_" + freqkey + "_" + yy + ".nc" - f = addfile(fname, "r") - end if - - ; Rank of the input data - rank = dimsizes(getfilevardims(f, NAME(0))) - - ; For daily data, split in monthly files, otherwise yearly - if (l_day) then - mend = 12 - else - mend = 1 ; dummy loop, the whole year will be processed - end if - - do mm = 1, mend - - ; Read variable - if (l_day) then - - ; Check time-index - time = f->time - if (any(VAR.eq.(/"tasmin", "tasmax", "pr"/))) then - time = time - 1 ; Shift back 1h to include 12:00 + step=12h - end if - date = cd_calendar(time, 0) - idxt = ind(date(:, 1).eq.mm) - - ; Extract current month - if (rank.eq.4) then - xx = f->$NAME(0)$(idxt, :, :, :) - elseif (rank.eq.3) then - xx = f->$NAME(0)$(idxt, :, :) - else - error_msg("f", DIAG_SCRIPT, "", "invalid rank") - end if - start_date = yy + sprinti("%0.2i", mm) + "01" - end_date = yy + sprinti("%0.2i", mm) + \ - sprinti("%0.2i", days_in_month(yy, mm)) - - ; Shift back 1h to include 12:00 + step=12h - if (any(VAR.eq.(/"tasmin", "tasmax", "pr"/))) then - xx&time = xx&time - 1 - end if - delete(date) - delete(time) - delete(idxt) - - else - - xx = f->$NAME(0)$ - start_date = yy + "0101" - end_date = yy + "1231" - - end if - - ; Unpack variable according to metadata information - output = short2flt(xx) - delete(xx) - - ; Multiple input case - if (dimsizes(NAME).gt.1) then - - if (l_day) then - error_msg("f", DIAG_SCRIPT, "", "multiple input for daily " + \ - "values not implemented") - end if - - do iv = 1, dimsizes(NAME) - 1 - fname2 = input_dir_path + \ - "ERA-Interim_" + NAME(iv) + "_" + freqkey + "_" + yy + ".nc" - f2 = addfile(fname2, "r") - xx2 = f2->$NAME(iv)$ - output = output + short2flt(xx2) - delete(xx2) - delete(f2) - delete(fname2) - end do - - end if - - ; Calculate daily mean - if (l_day) then - - if (VAR.eq."tasmin") then - dmean = calculate_daily_values(output, "min", 0, False) - elseif (VAR.eq."tasmax") then - dmean = calculate_daily_values(output, "max", 0, False) - elseif (VAR.eq."pr") then - dmean = calculate_daily_values(output, "sum", 0, False) - else - dmean = calculate_daily_values(output, "avg", 0, False) - end if - delete(output) - output = dmean - delete(dmean) - - end if - - ; Time-invariant variables - if (l_fx) then - output := output(0, :, :) - rank = 2 - end if - - ; Convert units - output = output * CONVERSION - - ; Special case: accumulated fluxes - if (any(VAR.eq.(/"pr", "hfds"/))) then ; [X] --> [X s-1] - if (l_day) then - output = output / 24. / 3600. - else - locy = new(12, integer) - locy = yy - dm = conform(output, days_in_month(locy, ispan(1, 12, 1)), 0) - output = output / 24. / 3600. / dm - delete(locy) - delete(dm) - end if - end if - - ; Format coordinates - if (rank.eq.4) then - output!0 = "time" - output!1 = "plev" - output!2 = "lat" - output!3 = "lon" - output&plev = output&plev * 100 ; [mb] --> [Pa] - elseif (rank.eq.3) then - output!0 = "time" - output!1 = "lat" - output!2 = "lon" - elseif (rank.eq.2) then - output!0 = "lat" - output!1 = "lon" - else - error_msg("f", DIAG_SCRIPT, "", "invalid rank") - end if - format_coords(output, start_date, end_date, FREQ) - - ; Set variable attributes - tmp = format_variable(output, VAR, CMOR_TABLE) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - if (l_fx) then - DATESTR = "" - elseif (l_day) then - DATESTR = \ - yy + sprinti("%0.2i", mm) + "-" + yy + sprinti("%0.2i", mm) - else - DATESTR = yy + "01-" + yy + "12" - end if - if (DATESTR.eq."") then - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, MIP, VAR/), "_") + ".nc" - - else - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP, VAR, DATESTR/), "_") + ".nc" - end if - - ; Add height coordinate to tas variable (required by the new backend) - if (any(VAR.eq.(/"tas", "tasmin", "tasmax"/))) then - output@coordinates = "height" - end if - - ; Write variable - write_nc(fout, VAR, output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - - ; Add height coordinate to tas variable (required by the new backend) - if (any(VAR.eq.(/"tas", "tasmin", "tasmax"/))) then - height = 2.d - height!0 = "ncl_scalar" - height@units = "m" - height@axis = "Z" - height@positive = "up" - height@long_name = "height" - height@standard_name = "height" - w = addfile(fout, "w") - w->height = height - delete(w) - end if - - end do ; month (for daily data, otherwise dummy loop) - - ; No year loop need for fx (time invariant) - if (l_fx) then - break - end if - - end do ; year - - delete(NAME) - - end do ; variable - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-AEROSOL.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-AEROSOL.ncl deleted file mode 100644 index 3ddbe27123..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-AEROSOL.ncl +++ /dev/null @@ -1,168 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for ESACCI-AEROSOL data -; ############################################################################# -; -; Tier -; Tier 2: other freely-available dataset. -; -; Source -; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/aerosol/data/ -; -; Last access -; 20190124 -; -; Download and processing instructions -; Download the data from: -; ATSR2_SU/L3/v4.21/MONTHLY/ (1997-2002) -; AATSR_SU/L3/v4.21/MONTHLY/ (2003-2011) -; Other years are not considered since they are not complete. -; Put all files in input_dir_path (no subdirectories with years). -; -; Modification history -; 20190124-A_righ_ma: adapted to v2. -; 20160718-A_laue_ax: added AOD550 + AOD870 uncertainties. -; 20160525-A_righ_ma: updated to v4.21 and adding more variables. -; 20150126-A_righ_ma: adding AOD at other wavelengths. -; 20151124-A_righ_ma: switched to monthly raw data (now available). -; 20150424-A_righ_ma: written. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_ESACCI-AEROSOL.ncl" - - ; Source name - OBSNAME = "ESACCI-AEROSOL" - - ; Tier - TIER = 2 - - ; Period - YEAR1 = 1997 - YEAR2 = 2011 - - ; Selected variable (standard name) - VAR = (/"od550aer", "od870aer", "od550lt1aer", "abs550aer", \ - "od550aerStderr", "od870aerStderr"/) - - ; Name in the raw data - NAME = (/"AOD550_mean", "AOD870_mean", "FM_AOD550_mean", "AAOD550_mean", \ - "AOD550_uncertainty", "AOD870_uncertainty"/) - - ; MIP - MIP = (/"aero", "aero", "aero", "aero", \ - "aero", "aero"/) - - ; Frequency - FREQ = (/"mon", "mon", "mon", "mon", \ - "mon", "mon"/) - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + "/cmor/tables/" + \ - (/"cmip5/Tables/CMIP5_aero", \ - "cmip5/Tables/CMIP5_aero", \ - "cmip5/Tables/CMIP5_aero", \ - "cmip5/Tables/CMIP5_aero", \ - "custom/CMOR_od550aerStderr.dat", \ - "custom/CMOR_od870aerStderr.dat"/) - - ; Type - TYPE = "sat" - - ; Version - VERSION = "SU-v4.21" - - ; Global attributes - SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/aerosol/data/" - REF = "Popp et al., ESA Aerosol Climate Change Initiative " + \ - "(ESA Aerosol_cci) data: AOD v4.21 via Centre for Environmental " + \ - "Data Analysis, 2016" - COMMENT = "Combined dataset ERS2-ATSR2 (1997-2002) and ENVISAT-AATSR " + \ - "(2003-2011), based on the University of Swansea algorithm " + \ - "(monthly mean L3 data)" - -end - -begin - - do vv = 0, dimsizes(VAR) - 1 - - log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") - - time = create_timec(YEAR1, YEAR2) - date = cd_calendar(time, 1) - - ; Create timeseries - do yy = YEAR1, YEAR2 - do mm = 1, 12 - - ldate = yy + sprinti("%0.2i", mm) - - ; Read file - fname = systemfunc("ls " + input_dir_path + ldate + "*.nc") - - ; No files found - if (all(ismissing(fname))) then - continue - end if - - ; Extract data - f = addfile(fname, "r") - xx = f->$NAME(vv)$ - - ; Assign to global array - if (.not.isdefined("output")) then - dims = array_append_record(dimsizes(time), dimsizes(xx), 0) - output = new(dims, float) - output!0 = "time" - output&time = time - output!1 = "lat" - output&lat = f->latitude - output!2 = "lon" - output&lon = f->longitude - end if - output(ind(toint(ldate).eq.date), :, :) = (/xx/) - delete(fname) - - end do - end do - - ; Set fill value - output = where(output.eq.-999, output@_FillValue, output) - - ; Format coordinates - output!0 = "time" - output!1 = "lat" - output!2 = "lon" - format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) - - ; Set variable attributes - tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ(vv)) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = YEAR1 + "01-" + YEAR2 + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" - - ; Write variable - write_nc(fout, VAR(vv), output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - - end do - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-CLOUD.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-CLOUD.ncl deleted file mode 100644 index b65f05dde5..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-CLOUD.ncl +++ /dev/null @@ -1,196 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for ESACCI-CLOUD data -; ############################################################################# -; -; Tier -; Tier 2: other freely-available dataset. -; -; Source -; https://public.satproj.klima.dwd.de/data/ESA_Cloud_CCI/CLD_PRODUCTS/v3.0/ -; -; Last access -; 20190201 -; -; Download and processing instructions -; Download the data from: -; L3C/AVHRR-PM/ -; To fill the gap 199409-199501, also download: -; L3C/AVHRR-AM/AVHRR_NOAA-12/1994/ \ -; 199409-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc -; L3C/AVHRR-AM/AVHRR_NOAA-12/1994/ \ -; 199410-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc -; L3C/AVHRR-AM/AVHRR_NOAA-12/1994/ \ -; 199411-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc -; L3C/AVHRR-AM/AVHRR_NOAA-12/1994/ \ -; 199412-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc -; L3C/AVHRR-AM/AVHRR_NOAA-12/1995/ \ -; 199501-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc -; Put all files under a single directory (no subdirectories with years). -; -; Modification history -; 20190201-A_righ_ma: adapted to v2. -; 20181116-A_laue_ax: changed to use CLOUD-CCI v3.0 data (AVHRR-PM), gaps -; (1994/09 - 1995/01) are filled with AVHRR-AM data -; 20180522-A_righ_ma: changed to use AVHRR-PM data. -; 20160519-A_laue_ax: written (reformat_obs_ESACCI-AEROSOL.ncl). -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_ESACCI-CLOUD.ncl" - - ; Source name - OBSNAME = "ESACCI-CLOUD" - - ; Tier - TIER = 2 - - ; Period - YEAR1 = 1982 - YEAR2 = 2016 - - ; Selected variable (standard name) - VAR = (/"clt", "cltStderr", "clivi", "clwvi"/) - - ; Name in the raw data - NAME = (/"cfc", "cfc_unc", "iwp_allsky", "lwp_allsky"/) - - ; Conversion factor - CONV = (/100., 1., 0.001, 0.001/) - - ; MIP - MIP = (/"Amon", "Amon", "Amon", "Amon"/) - - ; Frequency - FREQ = (/"mon", "mon", "mon", "mon"/) - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + "/cmor/tables/" + \ - (/"cmip5/Tables/CMIP5_Amon", \ - "custom/CMOR_cltStderr.dat", \ - "cmip5/Tables/CMIP5_Amon", \ - "cmip5/Tables/CMIP5_Amon"/) - - ; Type - TYPE = "sat" - - ; Version - VERSION = "AVHRR-fv3.0" - - ; Global attributes - SOURCE = "https://public.satproj.klima.dwd.de/data/ESA_Cloud_CCI/" + \ - "CLD_PRODUCTS/v3.0/" - REF = \ - "Stengel et al., Earth Syst. Sci. Data, doi:10.5194/essd-9-881-2017, 2017" - COMMENT = "" - -end - -begin - - do vv = 0, dimsizes(VAR) - 1 - - log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") - - time = create_timec(YEAR1, YEAR2) - date = cd_calendar(time, 1) - - ; Create timeseries - do yy = YEAR1, YEAR2 - - syear = sprinti("%i", yy) - do mm = 1, 12 - - smonth = sprinti("%0.2i", mm) - - ; Read file - fname = systemfunc("ls " + input_dir_path + syear + smonth + \ - "-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-*-" + \ - str_sub_str(VERSION, "AVHRR-", "") + ".nc") - - ; No files found - if (all(ismissing(fname))) then - continue - end if - - ; Extract data - f = addfile(fname, "r") - xx = f->$NAME(vv)$ - - ; Convert units - xx = xx * CONV(vv) - - ; lwp is not a CMOR variable, derive as clwvi = lwp + iwp - if (VAR(vv).eq."clwvi") then - - ; Read 2nd variable containing iwp (variable "iwp_allsky") - iwpname = "iwp_allsky" - xx2 = f->$iwpname$ - - ; Convert units - xx2 = xx2 * 0.001 - - ; add iwp to lwp to calculate cmor variable "clwvi" - xx = xx + xx2 - delete(xx2) - end if - - ; Assign to global array - if (.not.isdefined("output")) then - dims = dimsizes(xx) - dims(0) = dimsizes(time) - output = new(dims, float) - output!0 = "time" - output&time = time - output!1 = "lat" - output&lat = f->lat - output!2 = "lon" - output&lon = f->lon - end if - output(ind(toint(yy * 100 + mm).eq.date), :, :) = (/xx/) - - delete(fname) - delete(f) - - end do - end do - - ; Set fill value - output = where(output.eq.-999, output@_FillValue, output) - - ; Format coordinates - output!0 = "time" - output!1 = "lat" - output!2 = "lon" - format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) - - ; Set variable attributes - tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ(vv)) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = YEAR1 + "01-" + YEAR2 + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" - - ; Write variable - write_nc(fout, VAR(vv), output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - - end do - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-FIRE.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-FIRE.ncl deleted file mode 100644 index 4f3155b00e..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-FIRE.ncl +++ /dev/null @@ -1,162 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for ESACCI-FIRE data -; ############################################################################# -; -; Tier -; Tier 2: other freely-available dataset. -; -; Source -; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/fire/data/ -; -; Last access -; 20190124 -; -; Download and processing instructions -; Download the data from: -; burned_area/MERIS/grid/v4.1/ -; Put all files in input_dir_path (no subdirectories with years). -; -; Modification history -; 20190124-A_righ_ma: written based on a python script by muel_bn. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_ESACCI-FIRE.ncl" - - ; Source name - OBSNAME = "ESACCI-FIRE" - - ; Tier - TIER = 2 - - ; Period - YEAR1 = 2005 - YEAR2 = 2011 - - ; Selected variable (standard name) - VAR = "burntArea" - - ; Name in the raw data - NAME = "burned_area" - - ; MIP - MIP = "Lmon" - - ; Frequency - FREQ = "mon" - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + \ - "/cmor/tables/cmip5/Tables/CMIP5_Lmon" - - ; Type - TYPE = "sat" - - ; Version - VERSION = "L4-BA-MERIS-fv4.1" - - ; Global attributes - SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/fire/data/" - REF = "" - COMMENT = "" - -end - -begin - - time = create_timec(YEAR1, YEAR2) - date = cd_calendar(time, 1) - - do yy = YEAR1, YEAR2 - do mm = 1, 12 - - ldate = yy + sprinti("%0.2i", mm) - - files = systemfunc("ls " + input_dir_path + ldate + \ - "??-ESACCI-L4_FIRE-BA-MERIS-fv4.1.nc") - f = addfiles(files, "r") - - xx = f[:]->$NAME$ - - ; Calculate area - if (.not.isdefined("area")) then - deg2rad = acos(-1.0) / 180. - lat = f[0]->lat - lon = f[0]->lon - nlat = dimsizes(lat) - deltax = abs(lon(1) - lon(0)) - lati = new(dimsizes(lat) + 1, float) - lati(0) = max((/(3 * lat(0) - lat(1)) / 2., -90./)) - do ii = 1, dimsizes(lati) - 2 - lati(ii) = 0.5 * (lat(ii - 1) + lat(ii)) - end do - lati(dimsizes(lati) - 1) = \ - min((/(3 * lat(nlat - 1) - lat(nlat - 2)) / 2., 90./)) - area = new((/dimsizes(lat), dimsizes(lon)/), float) - do ii = 0, dimsizes(lat) - 1 - deltay = sin(lati(ii + 1) * deg2rad) - sin(lati(ii) * deg2rad) - area(ii, :) = abs(6371000. ^ 2 * deltay * deltax * deg2rad) - end do - delete([/lat, lon, nlat, deltax, lati, deltay/]) - end if - - ; Calculate fraction - xx = xx / (/conform(xx, area, (/1, 2/))/) - - ; Assign to global array - if (.not.isdefined("output")) then - dims = array_append_record(dimsizes(time), dimsizes(xx(0, :, :)), 0) - output = new(dims, float) - output!0 = "time" - output&time = time - output!1 = "lat" - output&lat = f[0]->lat - output!2 = "lon" - output&lon = f[0]->lon - end if - output(ind(toint(ldate).eq.date), :, :) = dim_sum_n(xx, 0) - - delete(files) - delete(f) - - end do - end do - - ; Convert units [1] --> [%] - output = output * 100. - - ; Format coordinates - output!0 = "time" - output!1 = "lat" - output!2 = "lon" - format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ) - - ; Set variable attributes - tmp = format_variable(output, VAR, CMOR_TABLE) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = YEAR1 + "01-" + YEAR2 + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP, VAR, DATESTR/), "_") + ".nc" - - ; Write variable - write_nc(fout, VAR, output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-LANDCOVER.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-LANDCOVER.ncl deleted file mode 100644 index 62a051ed72..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-LANDCOVER.ncl +++ /dev/null @@ -1,215 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for ESACCI-LANDCOVER data -; ############################################################################# -; -; Tier -; Tier 2: other freely-available dataset. -; -; Source -; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/land_cover/data/land_cover_maps/ -; -; Last access -; 20190110 -; -; Download and processing instructions -; Download the 3 NetCDF files for 2000, 2005 and 2010. -; Download the CCI-LC Tools from: -; http://maps.elie.ucl.ac.be/CCI/viewer/download/lc-user-tools-3.14.zip -; Unpack and run the CCI-LC Tools on each of the NetCDF files as follows: -; bash lc-user-tools-3.14/bin/aggregate-map.sh \ -; -PgridName=GEOGRAPHIC_LAT_LON -PnumMajorityClasses=1 \ -; -PoutputAccuracy=false -PoutputPFTClasses=true \ -; -PoutputLCCSClasses=false -PnumRows=360 -; Put the resulting processed data in input_dir_path. -; -; Caveat -; The CCI-LC Tools must be applied before running this script. -; The CCI-LC Tools require Java Version 7 or higher. -; The input data are available for a single year and are copied over to -; generate a time series over their time range of validity. -; -; Modification history -; 20190110-A_righ_ma: rewritten in NCL for v2. -; 20160714-A_muel_bn: written. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_ESACCI-LANDCOVER.ncl" - - ; Source name - OBSNAME = "ESACCI-LANDCOVER" - - ; Tier - TIER = 2 - - ; Years - YEARS = (/2000, 2005, 2010/) - - ; Variable names - VAR = \ - (/"baresoilFrac", "cropFrac", "grassFrac", "shrubFrac", "treeFrac"/) - - ; Corresponding aggregation classes in the raw data - CLASSES = [/"Bare_Soil", \ - "Managed_Grass", \ - "Natural_Grass", \ - (/"Shrub_Broadleaf_Deciduous", \ - "Shrub_Broadleaf_Evergreen", \ - "Shrub_Needleleaf_Evergreen"/), \ - (/"Tree_Broadleaf_Deciduous", \ - "Tree_Broadleaf_Evergreen", \ - "Tree_Needleleaf_Deciduous", \ - "Tree_Needleleaf_Evergreen"/)/] - - ; MIPs - MIP = (/"Lmon", "Lmon", "Lmon", "Lmon", "Lmon"/) - - ; Frequency - FREQ = (/"mon", "mon", "mon", "mon", "mon"/) - - ; CMOR table - CMOR_TABLE = \ - getenv("esmvaltool_root") + "/cmor/tables/cmip5/Tables/CMIP5_Lmon" - - ; Type - TYPE = "sat" - - ; Version - VERSION = "L4-LCCS-Map-300m-P5Y-aggregated-0.500000Deg" - - ; Global attributes - SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/land_cover/data/" - REF = "Defourny et al.. ESA Land Cover Climate Change Initiative " + \ - "(ESA LC_cci) data, 2015" - COMMENT = "" - -end - -begin - - do yy = 0, dimsizes(YEARS) - 1 - - fname = \ - input_dir_path + "ESACCI-LC-" + VERSION + "-" + YEARS(yy) + "-v1.6.1.nc" - - f = addfile(fname, "r") - - ; Create time coordinate - YEAR1 = YEARS(yy) - 2 - YEAR2 = YEARS(yy) + 2 - time = create_timec(YEAR1, YEAR2) - - do vv = 0, dimsizes(VAR) - 1 - - log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") - - ; Set classes to be added up - class = CLASSES[vv] - - ; Save mask before adding up classes - do cc = 0, dimsizes(class) - 1 - qq = f->$class(cc)$ - replace_ieeenan(qq, FILL, 0) - qq@_FillValue = FILL - tmp = ismissing(qq) - delete(qq) - if (cc.eq.0) then - lmask = tmp - else - lmask := lmask .and. tmp - end if - delete(tmp) - end do - - ; Add up classes - do cc = 0, dimsizes(class) - 1 - log_info(" adding class " + class(cc)) - tmp = f->$class(cc)$ - replace_ieeenan(tmp, FILL, 0) - tmp@_FillValue = FILL - tmp = where(ismissing(tmp), 0., tmp) - if (cc.eq.0) then - xx = tmp - else - xx = xx + tmp - end if - delete(tmp) - end do - delete(class) - - ; Reapply mask of missing values - xx = where(lmask, xx@_FillValue, xx) - - ; Define output array - output = \ - new((/dimsizes(time), dimsizes(xx&lat), dimsizes(xx&lon)/), float) - output!0 = "time" - output&time = time - output!1 = "lat" - output&lat = xx&lat - output!2 = "lon" - output&lon = xx&lon - output = conform(output, xx, (/1, 2/)) - delete(xx) - - ; Set standard fill value - output@_FillValue = FILL - - ; Convert units - output = output * 100 - output@units = "%" - - ; Format coordinates - output!0 = "time" - output!1 = "lat" - output!2 = "lon" - format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) - - ; Set variable attributes - tmp = format_variable(output, VAR(vv), CMOR_TABLE) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ(vv)) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = YEAR1 + "01-" + YEAR2 + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" - - ; Special case for baresoilFrac: add auxiliary coordinate - if (VAR(vv).eq."baresoilFrac") then - output@coordinates = "type" - end if - - ; Write variable - write_nc(fout, VAR(vv), output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - - ; Special case for baresoilFrac: add auxiliary coordinate - if (VAR(vv).eq."baresoilFrac") then - type = tochar("bare_ground") - type!0 = "strlen" - type@long_name = "surface type" - type@standard_name = "area_type" - w = addfile(fout, "w") - w->type = type - delete(w) - end if - - end do - end do - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-OZONE.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-OZONE.ncl deleted file mode 100644 index 6b2818e447..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-OZONE.ncl +++ /dev/null @@ -1,171 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for ESACCI-OZONE data -; ############################################################################# -; -; Tier -; Tier 2: other freely-available dataset. -; -; Source -; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/ozone/data/ -; -; Last access -; 20190201 -; -; Download and processing instructions -; Download the data from: -; total_columns/l3/merged/v0100/ -; Put all files under a single directory (no subdirectories with years). -; -; Modification history -; 20190201-A_righ_ma: adapted to v2 and replace NaN/inf with FillValue. -; 20160224-A_wenz_sa: written based on reformat_obs_ESACCI-AEROSOL.ncl. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_ESACCI-OZONE.ncl" - - ; Source name - OBSNAME = "ESACCI-OZONE" - - ; Tier - TIER = 2 - - ; Period - YEAR1 = (/1997, 1997, 2007, 2007/) - YEAR2 = (/2010, 2010, 2008, 2008/) - - ; Selected variable (standard name) - VAR = (/"toz", "tozStderr", "tro3prof", "tro3profStderr"/) - - ; Name in the raw data - NAME = (/"atmosphere_mole_content_of_ozone", \ - "atmosphere_mole_content_of_ozone_standard_error", \ - "merged_ozone_vmr", \ - "uncertainty_of_merged_ozone"/) - - ; MIP - MIP = (/"Amon", "Amon", "Amon", "Amon"/) - - ; Frequency - FREQ = (/"mon", "mon", "mon", "mon"/) - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + \ - "/cmor/tables/custom/CMOR_" + VAR + ".dat" - - ; File name - FNAME = (/"ESACCI-OZONE-L3S-TC-MERGED-DLR_1M-_DATE_??-fv0100.nc", \ - "ESACCI-OZONE-L3S-TC-MERGED-DLR_1M-_DATE_??-fv0100.nc", \ - "ESACCI-OZONE-L3-LP-MERGED-MZM-_DATE_-fv0002.nc", \ - "ESACCI-OZONE-L3-LP-MERGED-MZM-_DATE_-fv0002.nc"/) - - ; Type - TYPE = "sat" - - ; Version - VERSION = "L3" - - ; Global attributes - SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/ozone/data/" - REF = "Loyola et al., Int. J. Remote Sens. doi:10.1080/" + \ - "01431160902825016, 2009." - COMMENT = "" - -end - -begin - - do vv = 0, dimsizes(VAR) - 1 - - log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") - - time = create_timec(YEAR1(vv), YEAR2(vv)) - date = cd_calendar(time, 1) - - ; Create timeseries - do yy = YEAR1(vv), YEAR2(vv) - do mm = 1, 12 - - ldate = yy + sprinti("%0.2i", mm) - - ; File name - fname = systemfunc("ls " + input_dir_path + \ - str_sub_str(FNAME(vv), "_DATE_", ldate)) - - ; Check - if (all(ismissing(fname))) then - error_msg("f", DIAG_SCRIPT, "", "no file found for date " + ldate) - end if - - ; Extract data - f = addfile(fname(0), "r") - xx = f->$NAME(vv)$ - xx@_FillValue = FILL - xx@missing_value = xx@_FillValue - xx = where(xx.lt.0., xx@_FillValue, xx) - xx = where(xx.gt.1e35, xx@_FillValue, xx) ; get rid of infinity values - replace_ieeenan(xx, xx@_FillValue, 0) - - ; Assign to global array - dimnames = getvardimnames(xx) - if (.not.isdefined("output")) then - dims = array_append_record(dimsizes(time), dimsizes(xx), 0) - output = new(dims, typeof(xx)) - output!0 = "time" - output&time = time - do ii = 0, dimsizes(dimnames) - 1 - if (dimnames(ii).eq."air_pressure") then - output!(ii+1) = "plev" - output&plev = f->$dimnames(ii)$ - elseif (isStrSubset(dimnames(ii), "latitude")) then - output!(ii+1) = "lat" - output&lat = f->$dimnames(ii)$ - elseif (dimnames(ii).eq."longitude") - output!(ii+1) = "lon" - output&lon = f->$dimnames(ii)$ - end if - end do - end if - output(ind(toint(ldate).eq.date), :, :) = (/xx/) - delete(fname) - delete(xx) - - end do - end do - - ; Format coordinates - format_coords(output, YEAR1(vv) + "0101", YEAR2(vv) + "1231", FREQ(vv)) - - ; Set variable attributes - tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ(vv)) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = YEAR1(vv) + "01-" + YEAR2(vv) + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" - - ; Write variable - write_nc(fout, VAR(vv), output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - delete(time) - delete(date) - - end do - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-SOILMOISTURE.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-SOILMOISTURE.ncl deleted file mode 100644 index e5655b39e7..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-SOILMOISTURE.ncl +++ /dev/null @@ -1,173 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for ESACCI-SOILMOISTURE data -; ############################################################################# -; -; Tier -; Tier 2: other freely-available dataset. -; -; Source -; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/soil_moisture/data/ -; -; Last access -; 20190201 -; -; Download and processing instructions -; Download the data from: -; daily_files/COMBINED/v04.2/ -; ancillary/v04.2/ -; Put all files under a single directory (no subdirectories with years). -; -; Modification history -; 20190201-A_righ_ma: adapted to v2, use new input data version 4.2. -; 20160824-A_laue_ax: added processing of volumetric soil moisture -; content (sm, smStderr). -; 20160721-A_laue_ax: use daily files, added processing of uncertainty. -; 20150523-A_righ_ma: written. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_ESACCI-SOILMOISTURE.ncl" - - ; Source name - OBSNAME = "ESACCI-SOILMOISTURE" - - ; Tier - TIER = 2 - - ; Period - YEAR1 = 1979 - YEAR2 = 2016 - - ; Selected variable (standard name) - VAR = (/"sm", "smStderr", "dos", "dosStderr"/) - - ; Name in the raw data - NAME = (/"sm", "sm_uncertainty", "sm", "sm_uncertainty"/) - - ; MIP - MIP = (/"Lmon", "Lmon", "Lmon", "Lmon"/) - - ; Frequency - FREQ = (/"mon", "mon", "mon", "mon"/) - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + \ - "/cmor/tables/custom/CMOR_" + VAR + ".dat" - - ; Type - TYPE = "sat" - - ; Version - VERSION = "L3S-SSMV-COMBINED-v4.2" - - ; Global attributes - SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/soil_moisture/data/" - REF = "" - COMMENT = "" - -end - -begin - - do vv = 0, dimsizes(VAR) - 1 - - log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") - - do yy = YEAR1, YEAR2 - - ; Set list of files - files = systemfunc("ls " + input_dir_path + \ - "ESACCI-SOILMOISTURE-L3S-SSMV-" + \ - "COMBINED-" + yy + "????000000-fv04.2.nc") - f = addfiles(files, "r") - delete(files) - - ; Read data - xx = f[:]->$NAME(vv)$ - if (isatt(xx, "scale_factor")) then - tmp = tofloat(xx * xx@scale_factor) - copy_VarAtts(xx, tmp) - copy_VarCoords(xx, tmp) - delete(xx) - xx = tmp - delete(tmp) - end if - delete(f) - - ; Derive dos using porosity - if (any(VAR(vv).eq.(/"dos", "dosStderr"/))) then - g = addfile(input_dir_path + \ - "/ESACCI-SOILMOISTURE-POROSITY_V01.1.nc", "r") - zz = g->porosity - xx = xx * 100. / conform(xx, zz, (/1, 2/)) - delete(zz) - end if - - ; Add a minor time shift for correct extraction of monthly mean below - xx&time = xx&time + 0.1 - - ; Calculate monthly means - if (isStrSubset(VAR(vv), "Stderr")) then - xx2 = xx - xx2 = xx ^ 2 ; save metadata - tmp = calculate_monthly_values(xx2, "avg", 0, False) - delete(xx) - delete(xx2) - xx = sqrt(tmp) - copy_VarAtts(tmp, xx) - copy_VarCoords(tmp, xx) - delete(tmp) - else - tmp = calculate_monthly_values(xx, "avg", 0, False) - delete(xx) - xx = tmp - delete(tmp) - end if - - ; Append to time-series - if (.not.isdefined("output")) then - output = xx - else - output := array_append_record(output, xx, 0) - end if - delete(xx) - - end do - - ; Format coordinates - output!0 = "time" - output!1 = "lat" - output!2 = "lon" - format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) - - ; Set variable attributes - tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ(vv)) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = YEAR1 + "01-" + YEAR2 + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" - - ; Write variable - write_nc(fout, VAR(vv), output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - - end do - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-SST.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-SST.ncl deleted file mode 100644 index 306211a8e4..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-SST.ncl +++ /dev/null @@ -1,176 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for ESACCI-SST data -; ############################################################################# -; -; Tier -; Tier 2: other freely-available dataset. -; -; Source -; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/sst/data/ -; -; Last access -; 20190201 -; -; Download and processing instructions -; Download the data from: -; lt/Analysis/L4/v01.1/ -; Put all files under a single directory (no subdirectories with years). -; -; Modification history -; 20190201-A_righ_ma: adapted to v2. -; 20180529-A_righ_ma: modified to have consistent metadata across multiple -; files and to speed up processing time -; 20160818-A_laue_ax: written -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_ESACCI-SST.ncl" - - ; Source name - OBSNAME = "ESACCI-SST" - - ; Tier - TIER = 2 - - ; Period - YEAR1 = 1992 - YEAR2 = 2010 - - ; MIP - MIP = "Amon" - - ; Frequency - FREQ = "mon" - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + "/cmor/tables/" - - ; Type - TYPE = "sat" - - ; Version - VERSION = "L4-GHRSST-SSTdepth-OSTIA-GLOB" - - ; Global attributes - SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/sst/data/" - REF = "Merchant et al., Geosci. Data J., doi:10.1002/gdj3.20, 2014" - COMMENT = "" - -end - -begin - - ; Save date for consistent history attribute - today = systemfunc("date") - - ; Loop over time period - do yy = YEAR1, YEAR2 - do mm = 1, 12 - - mo = sprinti("%0.2i", mm) - - dm = days_in_month(yy, mm) - - ; Select files for this year/month - files = \ - systemfunc("ls " + input_dir_path + yy + mo + "??120000-" + \ - "ESACCI-L4_GHRSST-SSTdepth-OSTIA-GLOB_LT-v02.0-fv01.1.nc") - nfiles = dimsizes(files) - - if (nfiles.ne.dm) then - error_msg("f", DIAG_SCRIPT, "", "incomplete data in " + yy + mo + \ - " (" + dm + " != " + nfiles + " files found)") - end if - - ; Create time-series - f = addfiles(files, "r") - ListSetType(f, "cat") - - ; Read mask - lsmask = f[:]->mask - - ; Read SST - tmp = f[:]->analysed_sst - sst = tmp * tmp@scale_factor + tmp@add_offset - copy_VarCoords(tmp, sst) - delete(tmp) - - ; Read error - tmp = f[:]->analysis_error - err = tmp * tmp@scale_factor + tmp@add_offset - copy_VarCoords(tmp, err) - delete(tmp) - - ; Mask anything that is not open sea water (i.e. land, ice, lakes) - sst = where(lsmask.eq.1, sst, sst@_FillValue) - err = where(lsmask.eq.1, err, err@_FillValue) - delete(lsmask) - - ; Calculate time averages - sstavg = sst(0:0, :, :) - sstavg(0, :, :) = dim_avg_n_Wrap(sst, 0) - erravg = err(0:0, :, :) - tmp = err ^ 2 - erravg(0, :, :) = sqrt(dim_avg_n_Wrap(tmp, 0)) - delete(tmp) - delete(sst) - delete(err) - - ; Format time coordinate - sstavg!0 = "time" - sstavg!1 = "lat" - sstavg!2 = "lon" - format_coords(sstavg, yy + mo + "01", yy + mo + dm, FREQ) - erravg!0 = "time" - erravg!1 = "lat" - erravg!2 = "lon" - format_coords(erravg, yy + mo + "01", yy + mo + dm, FREQ) - - ; Format variable metadata - table = CMOR_TABLE + "cmip5/Tables/CMIP5_Amon" - tmp = format_variable(sstavg, "ts", table) - delete(sstavg) - sstavg = tmp - delete(tmp) - - table = CMOR_TABLE + "custom/CMOR_tsStderr.dat" - tmp = format_variable(erravg, "tsStderr", table) - delete(erravg) - erravg = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds_sst = guess_coord_bounds(sstavg, FREQ) - bounds_err = guess_coord_bounds(erravg, FREQ) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Write output - DATESTR = yy + mo + "-" + yy + mo - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP, "ts", DATESTR/), "_") + ".nc" - write_nc(fout, "ts", sstavg, bounds_sst, gAtt) - delete(sstavg) - delete(bounds_sst) - - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP, "tsStderr", DATESTR/), "_") + ".nc" - write_nc(fout, "tsStderr", erravg, bounds_err, gAtt) - delete(erravg) - delete(bounds_err) - - delete(gAtt) - delete(files) - delete(f) - - end do - end do - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_GHCN.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_GHCN.ncl deleted file mode 100644 index aed9b03e76..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_GHCN.ncl +++ /dev/null @@ -1,123 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for GHCN data -; ############################################################################# -; -; Tier -; Tier 2: other freely-available dataset. -; -; Source -; https://www.esrl.noaa.gov/psd/data/gridded/data.ghcngridded.html -; -; Last access -; 20190308 -; -; Download and processing instructions -; Download the dataset "precip.mon.total.nc" (precipitation, total, surface, -; 1900-2015 on a 5x5 grid). -; -; Modification history -; 20190308-A_righ_ma: minor changes to include coordinate boundaries. -; 20190227-A_bock_ls: written. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_GHCN.ncl" - - ; Source name - OBSNAME = "GHCN" - - ; Tier - TIER = 2 - - ; Period - YEAR1 = 1900 - YEAR2 = 2014 - - ; Selected variable (standard name) - VAR = "pr" - - ; Name in the raw data - NAME = "precip" - - ; MIP - MIP = "Amon" - - ; Frequency - FREQ = "mon" - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + \ - "/cmor/tables/cmip5/Tables/CMIP5_Amon" - - ; Type - TYPE = "ground" - - ; Version - VERSION = "1" - - ; Global attributes - SOURCE = "https://www.esrl.noaa.gov/psd/data/gridded/data.ghcngridded.html" - REF = "Jones, P. D., and A. Moberg, J. Climate, " + \ - "doi:10.1175/1520-0442(2003)016<0206:HALSSA>2.0.CO;2, 2003" - COMMENT = "" - -end - -begin - - ; Read file - fname = input_dir_path + "precip.mon.total.nc" - f = addfile(fname, "r") - setfileoption("nc", "MissingToFillValue", False) - - ; Read absolute precipitation without last incomplete year - output = f->$NAME$(time|0:1379, lat|:, lon|:) - - ; Calculate days per month - date = cd_calendar(output&time, 0) - dpm = days_in_month(toint(date(:, 0)), toint(date(:, 1))) - dpmc = conform(output, dpm, 0) - - ; Check time range - if (dimsizes(date(:, 0)).ne.12 * (YEAR2 - YEAR1 + 1)) then - error_msg("f", DIAG_SCRIPT, "", "incorrect number of timesteps") - end if - - ; Convert units [mm/month] --> [kg/m2/s] - output = output / (24 * 3600 * dpmc) - - log_info(" Climatology range: " + min(output) + \ - " kg/m2/s to " + max(output) + " kg/m2/s") - - ; Format coordinates - format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ) - - ; Set variable attributes - tmp = format_variable(output, VAR, CMOR_TABLE) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = YEAR1 + "01-" + YEAR2 + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP, VAR, DATESTR/), "_") + ".nc" - - ; Write temperature time-series - write_nc(fout, VAR, output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadCRUT3.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadCRUT3.ncl deleted file mode 100644 index abd1a6b3e9..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadCRUT3.ncl +++ /dev/null @@ -1,125 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for HadCRUT3 data -; ############################################################################# -; -; Tier -; Tier 2: other freely-available dataset. -; -; Source -; http://www.metoffice.gov.uk/hadobs/hadcrut3/data/download.html -; -; Last access -; 20190221 -; -; Download and processing instructions -; Download the HadCRUT3v.nc file (variance adjusted dataset). -; -; Caveats -; The HadCRUT3v variance-adjusted dataset for tas is actually the anomaly -; with respect to the period 1958-2001. -; -; Modification history -; 20190221-A_righ_ma: adapted to v2 and renamed to HadCRUT3. -; 20150330-A_righ_ma: updated paths and global attributes. -; 20140311-A_senf_da: written. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_HadCRUT3.ncl" - - ; Source name - OBSNAME = "HadCRUT3" - - ; Tier - TIER = 2 - - ; Period - YEAR1 = 1850 - YEAR2 = 2013 - - ; Selected variable (standard name) - VAR = "tasa" - - ; Name in the raw data - NAME = "temp" - - ; MIP - MIP = "Amon" - - ; Frequency - FREQ = "mon" - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + "/cmor/tables/custom/CMOR_tasa.dat" - - ; Type - TYPE = "ground" - - ; Version - VERSION = "1" - - ; Global attributes - SOURCE = "http://www.metoffice.gov.uk/hadobs/hadcrut3/data/download.html" - REF = "Brohan et al., J. Geophys. Res., doi:10.1029/2005JD006548, 2006" - COMMENT = "Temperature anomaly with respect to the period 1958-2001" - -end - -begin - - ; Read file - fname = input_dir_path + "HadCRUT3v.nc" - f = addfile(fname, "r") - - ; Read variable - output = f->temp - - ; Delete level coordinate (dimension 1) - tmp = rm_single_dims(output) - delete(output) - output = tmp - delete(tmp) - - ; Extract time period - output!0 = "time" - date = cd_calendar(output&time, 0) - idx = ind(date(:, 0).ge.YEAR1 .and. date(:, 0).le.YEAR2) - output := output(idx, :, :) - - ; Format coordinates - output!0 = "time" - output!1 = "lat" - output!2 = "lon" - format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ) - - ; Set variable attributes - tmp = format_variable(output, VAR, CMOR_TABLE) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = YEAR1 + "01-" + YEAR2 + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP, VAR, DATESTR/), "_") + ".nc" - - ; Write variable - write_nc(fout, VAR, output, bounds, gAtt) - w = addfile(fout, "w") - delete(w) - delete(gAtt) - delete(output) - delete(bounds) - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadCRUT4.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadCRUT4.ncl deleted file mode 100644 index 07cf11a936..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadCRUT4.ncl +++ /dev/null @@ -1,223 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for HadCRUT data -; ############################################################################# -; -; Tier -; Tier 2: other freely-available dataset. -; -; Source -; https://crudata.uea.ac.uk/cru/data/temperature/ -; -; Last access -; 20190208 -; -; Download and processing instructions -; Download the dataset "HadCRUT4" (median temperature anomalies) and -; the dataset "Absolute" (absolute temperatures for the base period -; 1961-90 on a 5x5 grid). -; -; Caveats -; In contrast to the HadCRUT3 reformat script which produces temperature -; anomalies (relative to the 1961-90 climatology), this script calculates -; absolute tas by adding the climatology ("absolute.nc") to the anomalies -; ("HadCRUT.4.6.0.0.median.nc"). It creates 3 output, one with the -; temperature time-series, one with the anomaly time-series, and one with -; the temperature climatology (1961-1990). -; -; Modification history -; 20190229-A_righ_ma: added output for anomaly (tasa). -; 20190208-A_righ_ma: added output for climatology and adapted to v2. -; 20180222-A_laue_ax: bug fix (added swapping of latitudes if needed). -; 20160203-A_laue_ax: written. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_HadCRUT4.ncl" - - ; Source name - OBSNAME = "HadCRUT4" - - ; Tier - TIER = 2 - - ; Period - YEAR1 = 1850 - YEAR2 = 2018 - - ; Selected variable (standard name) - VAR = "tas" - - ; MIP - MIP = "Amon" - - ; Frequency - FREQ = "mon" - - ; CMOR table - CMOR_TABLE1 = getenv("esmvaltool_root") + \ - "/cmor/tables/cmip5/Tables/CMIP5_Amon" - CMOR_TABLE2 = getenv("esmvaltool_root") + \ - "/cmor/tables/custom/CMOR_tasa.dat" - CMOR_TABLE3 = getenv("esmvaltool_root") + \ - "/cmor/tables/cmip5/Tables/CMIP5_Amon" - - ; Version - VERSION = "1" - - ; Type - TYPE1 = "ground" - TYPE2 = "ground" - TYPE3 = "clim" - - ; Global attributes - SOURCE = "https://crudata.uea.ac.uk/cru/data/temperature/" - REF1 = "Morice et al., J. Geophys. Res., doi:10.1029/2011JD017187, 2012" - REF2 = "Morice et al., J. Geophys. Res., doi:10.1029/2011JD017187, 2012" - REF3 = "Jones et al., Rev. Geophys., doi:10.1029/1999RG900002, 1999" - COMMENT1 = "Temperature time-series calculated from the anomaly " + \ - "time-series by adding the temperature climatology for 1961-1990" - COMMENT2 = "Temperature anomaly with respect to the period 1961-1990" - COMMENT3 = "Climatology 1961-1990" - -end - -begin - - ; Read file - fname1 = input_dir_path + "HadCRUT.4.6.0.0.median.nc" - fname2 = input_dir_path + "absolute.nc" - f1 = addfile(fname1, "r") - setfileoption("nc", "MissingToFillValue", False) - f2 = addfile(fname2, "r") - - ; Read anomaly - anomaly = f1->temperature_anomaly - - ; Read absolute temperature - tmp = f2->tem - clim = tofloat(tmp * tmp@scale_factor) + 273.15 - copy_VarCoords(tmp, clim) - delete(tmp) - - ; Swap latitudes - if (isMonotonic(anomaly&latitude).eq.-1) then - anomaly = anomaly(:, ::-1, :) - end if - - if (isMonotonic(clim&lat).eq.-1) then - clim = clim(:, ::-1, :) - end if - - log_info(" Climatology range: " + min(clim) + \ - " K to " + max(clim) + " K") - log_info(" Anomaly range: " + min(anomaly) + \ - " K to " + max(anomaly) + " K") - - output1 = anomaly - output2 = anomaly - output3 = clim - delete(output3&time) - output3&time = create_timec(1976, 1976) - dims = dimsizes(output1) - - ; Add absolute temperature to anomaly - do yr = 0, dims(0) / 12 - 1 - m1 = yr * 12 - m2 = m1 + 11 - output1(m1:m2, :, :) = where(.not.ismissing(clim), \ - anomaly(m1:m2, :, :) + clim, \ - tofloat(anomaly@_FillValue)) - end do - - ; Format coordinates - output1!0 = "time" - output1!1 = "lat" - output1!2 = "lon" - format_coords(output1, YEAR1 + "0101", YEAR2 + "1231", FREQ) - output2!0 = "time" - output2!1 = "lat" - output2!2 = "lon" - format_coords(output2, YEAR1 + "0101", YEAR2 + "1231", FREQ) - output3!0 = "time" - output3!1 = "lat" - output3!2 = "lon" - format_coords(output3, "19760101", "19761231", FREQ) - - ; Calculate coordinate bounds - bounds1 = guess_coord_bounds(output1, FREQ) - bounds2 = guess_coord_bounds(output2, FREQ) - bounds3 = guess_coord_bounds(output3, FREQ) - - ; Set variable attributes - tmp = format_variable(output1, VAR, CMOR_TABLE1) - delete(output1) - output1 = tmp - delete(tmp) - tmp = format_variable(output2, "tasa", CMOR_TABLE2) - delete(output2) - output2 = tmp - delete(tmp) - tmp = format_variable(output3, VAR, CMOR_TABLE3) - delete(output3) - output3 = tmp - delete(tmp) - - ; Add height coordinate - output1@coordinates = "height" - output3@coordinates = "height" - height = 2.d - height!0 = "ncl_scalar" - height@units = "m" - height@axis = "Z" - height@positive = "up" - height@long_name = "height" - height@standard_name = "height" - - ; Set global attributes - gAtt1 = set_global_atts(OBSNAME, TIER, SOURCE, REF1, COMMENT1) - gAtt2 = set_global_atts(OBSNAME, TIER, SOURCE, REF2, COMMENT2) - gatt3 = set_global_atts(OBSNAME, TIER, SOURCE, REF3, COMMENT3) - - ; Write temperature time-series - DATESTR = YEAR1 + "01-" + YEAR2 + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE1, VERSION, \ - MIP, VAR, DATESTR/), "_") + ".nc" - write_nc(fout, VAR, output1, bounds1, gAtt1) - w = addfile(fout, "w") - w->height = height - delete(w) - delete(gAtt1) - delete(bounds1) - delete(output1) - - ; Write temperature anomaly time-series - DATESTR = YEAR1 + "01-" + YEAR2 + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE2, VERSION, \ - MIP, "tasa", DATESTR/), "_") + ".nc" - write_nc(fout, "tasa", output2, bounds2, gAtt2) - w = addfile(fout, "w") - delete(w) - delete(gAtt2) - delete(bounds2) - delete(output2) - - ; Write temperature climatology - DATESTR = "197601-197612" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE3, VERSION, \ - MIP, VAR, DATESTR/), "_") + ".nc" - write_nc(fout, VAR, output3, bounds3, gatt3) - w = addfile(fout, "w") - w->height = height - delete(w) - delete(gatt3) - delete(bounds3) - delete(output3) - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadISST.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadISST.ncl deleted file mode 100644 index ed9a4a0bc7..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadISST.ncl +++ /dev/null @@ -1,135 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for HadISST data -; ############################################################################# -; -; Tier -; Tier 2: other freely-available dataset. -; -; Source -; http://www.metoffice.gov.uk/hadobs/hadisst/data/download.html -; -; Last access -; 20190208 -; -; Download and processing instructions -; Download and unzip "HadISST_ice.nc.gz" and "HadISST_sst.nc.gz". -; -; Modification history -; 20190208-A_hass_bg: adapted to v2. -; 20180530-A_righ_ma: fixed coordinates and metadata. -; 20170217-A_senf_da: modified so that SST fields are also written as 'tos'. -; 20150422-A_laue_ax: written. -; -; ############################################################################ -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_HadISST.ncl" - - ; Source name - OBSNAME = "HadISST" - - ; Tier - TIER = 2 - - ; Period - YEAR1 = 1870 - YEAR2 = 2017 - - ; Selected variable (standard name) - VAR = (/"ts", "tos", "sic"/) - - ; Name in the raw data - NAME = (/"sst", "sst", "ice"/) - - ; MIP - MIP = (/"Amon", "Omon", "OImon"/) - - ; Frequency - FREQ = (/"mon", "mon", "mon"/) - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + \ - "/cmor/tables/cmip5/Tables/CMIP5_" + MIP - - ; Type - TYPE = "reanaly" - - ; Version - VERSION = "1" - - ; Global attributes - SOURCE = "http://www.metoffice.gov.uk/hadobs/hadisst/data/download.html" - REF = "Rayner et al., J. Geophys. Res., doi:10.1029/2002JD002670, 2013" - COMMENT = "" - -end - -begin - - ; Loop over variables - do vv = 0, dimsizes(VAR) - 1 - - log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") - - ; Read variables - fname = input_dir_path + "HadISST_" + NAME(vv) + ".nc" - f = addfile(fname, "r") - if (NAME(vv).eq."sst") then - output = f->sst - output@_FillValue = -1000. - end if - if (NAME(vv).eq."ice") then - output = f->sic - end if - - ; Convert units - if (isStrSubset(NAME(vv), "sst")) then - output = output + 273.15 ; [degC] --> [K] - output@units = "K" - end if - if (isStrSubset(NAME(vv), "ice")) then - output = output * 100. ; [1] --> [%] - output@units = "%" - end if - - ; Extract time period - date = cd_calendar(output&time, 0) - idx = ind(date(:, 0).ge.YEAR1 .and. date(:, 0).le.YEAR2) - output := output(idx, :, :) - - ; Format coordinates - output!0 = "time" - output!1 = "lat" - output!2 = "lon" - format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) - - ; Set variable attributes - tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ(vv)) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = YEAR1 + "01-" + YEAR2 + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" - - ; Write variable - write_nc(fout, VAR(vv), output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - - end do - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_MODIS.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_MODIS.ncl deleted file mode 100644 index 5bfcfe1bc6..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_MODIS.ncl +++ /dev/null @@ -1,247 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for MODIS data -; ############################################################################# -; -; Tier -; Tier 3: restricted dataset. -; -; Source -; https://ladsweb.modaps.eosdis.nasa.gov/search/order -; -; Last access -; 20190209 -; -; Download and processing instructions -; In Products: select "MODIS Aqua", "Collection 6.1" and -; "L3 Atmosphere Product", click on MYD08_M3. -; In Time: select from 2000-01-01 to today. -; In Location: skip, the global domain will be applied. -; In Files: select all. -; Submit the order. -; A registration is required to download the data. -; -; Caveats -; clwvi and clivi data are in-cloud values whereas CMIP5 models provide -; grid-box averages --> multiply MODIS clwvi and clivi values with cloud -; fraction as a first guess -; -; Modification history -; 20180209-A_righ_ma: fixed bug in lwpStderr. -; 20180209-A_hass_bg: adapted to v2. -; 20180810-A_righ_ma: fix minor calendar issue. -; 20180806-A_righ_ma: code cleaning. -; 20170116-A-laue_ax: using cirrus fraction to convert lwp and iwp -; from in-cloud values to gridbox averages -; (test). -; 20160408-A-laue_ax: added processing of uncertainties -; removed suffixes for variables names. -; 20151118-A-laue_ax: bugfix: added unit conversion -; clivi, clwvi: g/m2 --> kg/m2 -; clt: frac --> % -; 20150430-eval_ma: written. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_MODIS.ncl" - - ; Source name - OBSNAME = "MODIS" - - ; Tier - TIER = 3 - - ; Selected variable (standard name) - VAR = (/"clwvi", \ - "clivi", \ - "clt", \ - "lwpStderr", \ - "iwpStderr", \ - "od550aer"/) - - ; Name in the raw data - NAME = (/"Cloud_Water_Path_Liquid_Mean_Mean", \ - "Cloud_Water_Path_Ice_Mean_Mean", \ - "Cloud_Fraction_Mean_Mean", \ - "Cloud_Water_Path_Liquid_Mean_Uncertainty", \ - "Cloud_Water_Path_Ice_Mean_Uncertainty", \ - "AOD_550_Dark_Target_Deep_Blue_Combined_Mean_Mean"/) - - ; MIP - MIP = (/"Amon", "Amon", "Amon", "Amon", "Amon", "aero"/) - - ; Frequency - FREQ = (/"mon", "mon", "mon", "mon", "mon", "mon"/) - - ; Version - VERSION = "MYD08_M3" - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + "/cmor/tables/" + \ - (/"cmip5/Tables/CMIP5_Amon", \ - "cmip5/Tables/CMIP5_Amon", \ - "cmip5/Tables/CMIP5_Amon", \ - "custom/CMOR_lwpStderr.dat", \ - "custom/CMOR_iwpStderr.dat", \ - "cmip5/Tables/CMIP5_aero"/) - - ; Type - TYPE = "sat" - - ; Global attributes - SOURCE = "https://ladsweb.modaps.eosdis.nasa.gov/search/order" - REF = "" - COMMENT = "" - -end - -begin - - ; List of files - FILES = systemfunc("ls -1 " + input_dir_path + VERSION + ".A*.hdf") - - do ff = 0, dimsizes(FILES) - 1 - - fin = addfile(FILES(ff), "r") - - ; Get time - infile = systemfunc("basename " + FILES(ff)) - date = yyyyddd_to_yyyymmdd(toint(str_get_cols(infile, 10, 16))) - year = toint(str_get_cols(tostring(date), 0, 3)) - month = toint(str_get_cols(tostring(date), 4, 5)) - dm = days_in_month(year, month) - - ; Loop over variables to fetch from input file - do vv = 0, dimsizes(VAR) - 1 - - invar = fin->$NAME(vv)$ - invar_fv = invar@_FillValue - invar_coords = invar - invar := tofloat(invar) - invar := where(invar.eq.tofloat(invar_fv), \ - default_fillvalue("float"), invar) - - ; Special case clwvi as the sum lwp + iwp - if (VAR(vv).eq."clwvi") then - if (NAME(vv).ne."Cloud_Water_Path_Liquid_Mean_Mean") then - error_msg("f", DIAG_SCRIPT, "", "cannot calculate clwvi") - end if - - ; Read cirrus fraction - ; cfin = fin->Cirrus_Fraction_SWIR_FMean - cfin = fin->Cirrus_Fraction_Infrared_FMean - cif = tofloat(cfin * cfin@scale_factor + cfin@add_offset) - ; liquid fraction is estimated assuming random overlap, i.e. - ; ctot = 1 - (1 - cif) * (1 - lif) - ; --> lif = 1 - (1 - ctot) / (1 - cif) - delete(cfin) - cfin = fin->Cloud_Fraction_Mean_Mean - ctot = tofloat(cfin * cfin@scale_factor + cfin@add_offset) - delete(cfin) - cif = where(cif.gt.0.999, cif@_FillValue, cif) - lif = 1.0 - (1.0 - ctot) / (1.0 - cif) - lif = where(lif.lt.0, 0, lif) - tmpvar = fin->Cloud_Water_Path_Ice_Mean_Mean ; read ice water path - tmpvar_fv = tmpvar@_FillValue - tmpvar := tofloat(tmpvar) - tmpvar := where(tmpvar.eq.tofloat(tmpvar_fv), \ - default_fillvalue("float"), \ - tmpvar) - tmpvar = tmpvar * cif ; convert iwp in-cloud value to gridbox avg - invar = invar * lif ; convert lwp in-cloud value to grid-box avg - invar = invar + tmpvar ; clwvi = lwp + iwp - delete(tmpvar) - delete(lif) - delete(cif) - invar = 0.001 * invar ; [g/m2] --> [kg/m2] - end if - - ; lwp and iwp are in-cloud values - ; convert lwp/iwp to grid-box averages by multiplying with - ; average cloud fraction (not optimum but best we can do at the moment) - if (any((/"clivi", "iwpStderr", "lwpStderr"/) .eq. VAR(vv))) then - - ; Read cirrus fraction (0-1) - ; cfin = fin->Cirrus_Fraction_SWIR_FMean - cfin = fin->Cirrus_Fraction_Infrared_FMean - cf = tofloat(cfin * cfin@scale_factor + cfin@add_offset) - delete(cfin) - if (VAR(vv).eq."lwpStderr") then - cfin = fin->Cloud_Fraction_Mean_Mean - ctot = tofloat(cfin * cfin@scale_factor + cfin@add_offset) - delete(cfin) - cif = where(cf.gt.0.999, cf@_FillValue, cf) - cf = 1.0 - (1.0 - ctot) / (1.0 - cif) - cf = where(cf.lt.0, 0, cf) - delete(cif) - delete(ctot) - end if - invar = invar * cf ; ; "grid-box average" lwp/iwp - delete(cf) - invar = 0.001 * invar ; [g/m2] --> [kg/m2] - end if - - invar@_FillValue = default_fillvalue("float") - copy_VarCoords(invar_coords, invar) - if (isatt(invar_coords, "scale_factor")) then - invar = invar * tofloat(invar_coords@scale_factor) - end if - if (isatt(invar_coords, "add_offset")) then - invar = invar + tofloat(invar_coords@add_offset) - end if - - if (VAR(vv).eq."clt") then - invar = 100.0 * invar ; [1] --> [%] - end if - - ; Create output variable - lat = fin->YDim - lon = fin->XDim - output = new((/1, dimsizes(lat), dimsizes(lon)/), float) - output!0 = "time" - output!1 = "lat" - output!2 = "lon" - output&time = cd_inv_calendar(year, month, 15, 0, 0, 0, TUNITS, 0) - output&lat = lat - output&lon = lon - output(0, :, :) = (/invar/) - delete(invar) - delete(invar_coords) - - ; Format coordinates - format_coords(output, year + sprinti("%0.2i", month) + "01", \ - year + sprinti("%0.2i", month) + dm, FREQ(vv)) - - ; Set variable attributes - tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ(vv)) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = \ - year + sprinti("%0.2i", month) + "-" + year + sprinti("%0.2i", month) - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, str_sub_str(VERSION, "_", "-"), \ - MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" - - ; Write variable - write_nc(fout, VAR(vv), output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - - end do - - end do - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_NCEP.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_NCEP.ncl deleted file mode 100644 index 39926d7df7..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_NCEP.ncl +++ /dev/null @@ -1,253 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for NCEP data -; ############################################################################# -; -; Tier -; Tier 2: other freely-available dataset. -; -; Source -; http://www.esrl.noaa.gov/psd/data/gridded/data.ncep.reanalysis.html -; -; Last access -; 20190204 -; -; Download and processing instructions -; To facilitate the download, the links to the ftp server are provided. -; Since the filenames are sometimes identical across different -; save the data in two subdirectories in input_dir_path. -; Subdirectory pressure/: -; ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.derived/pressure/ -; air.mon.mean.nc -; hgt.mon.mean.nc -; rhum.mon.mean.nc -; shum.mon.mean.nc -; uwnd.mon.mean.nc -; vwnd.mon.mean.nc -; omega.mon.mean.nc -; ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.dailyavgs/pressure/ -; uwnd.????.nc -; vwnd.????.nc -; Subdirectory surface/: -; ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.derived/surface/ -; air.mon.mean.nc -; ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.derived/surface_gauss/ -; prate.mon.mean.nc -; ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.dailyavgs/surface_gauss/ -; prate.sft.gauss.????.nc -; ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.dailyavgs/other_gauss/ -; ulwrf.ntat.gauss.????.nc -; -; Select the section "Pressure" and "Surface" and download the variables -; listed below. Since raw data on pressure levels and for surface have the -; same file and variable name, save the data in two different subdirectories -; "press" and "surf" in input_dir_path. -; Specify the time range of the data as YEAR1-YEAR2 below, considering only -; complete years (Jan to Dec). -; -; Modification history -; 20190204-A_righ_ma: merged with NCEP-daily and adapted to v2. -; 20140128-A_righ_ma: written. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_NCEP.ncl" - - ; Source name - OBSNAME = "NCEP" - - ; Tier - TIER = 2 - - ; Period - YEAR1 = 1948 - YEAR2 = 2018 - - ; Selected variable (standard name) - VAR = (/"ta", "zg", "hur", "hus", "ua", \ - "va", "wap", "tas", "pr", \ - "ua", "va", "pr", "rlut"/) - - ; Name in the raw data - NAME = (/"air", "hgt", "rhum", "shum", "uwnd", \ - "vwnd", "omega", "air", "prate", \ - "uwnd", "vwnd", "prate", "ulwrf"/) - - ; Subdirectory - SUBDIR = (/"pressure/", "pressure/", "pressure/", "pressure/", "pressure/", \ - "pressure/", "pressure/", "surface/", "surface/", \ - "pressure/", "pressure/", "surface/", "surface/"/) - - ; Expected units (according to CMOR standard) - EXPUNITS = (/"K", "m", "%", "1", "m/s", \ - "m/s", "Pascal/s", "K", "Kg/m^2/s", \ - "m/s", "m/s", "Kg/m^2/s", "W/m^2"/) - - ; MIP - MIP = (/"Amon", "Amon", "Amon", "Amon", "Amon", \ - "Amon", "Amon", "Amon", "Amon", \ - "day", "day", "day", "day"/) - - ; Frequency - FREQ = (/"mon", "mon", "mon", "mon", "mon", \ - "mon", "mon", "mon", "mon", \ - "day", "day", "day", "day"/) - - ; CMOR tables - CMOR_TABLE = getenv("esmvaltool_root") + \ - "/cmor/tables/cmip5/Tables/CMIP5_" + MIP - - ; Type - TYPE = "reanaly" - - ; Version - VERSION = "1" - - ; Global attributes - SOURCE = "http://www.esrl.noaa.gov/psd/data/gridded/" + \ - "data.ncep.reanalysis.html" - REF = "Kalnay et al., B. Am. Meteorol. Soc., " + \ - "doi:10.1175/1520-0477(1996)077<0437:TNYRP>2.0.CO;2, 1996" - COMMENT = "" - -end - -begin - - ; Loop over variables - do vv = 0, dimsizes(VAR) - 1 - - log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") - - do yy = YEAR1, YEAR2 - - if (isStrSubset(MIP(vv), "mon")) then - fname = input_dir_path + SUBDIR(vv) + NAME(vv) + ".mon.mean.nc" - end if - - if (isStrSubset(MIP(vv), "day")) then - flist = systemfunc("ls " + input_dir_path + SUBDIR(vv) + NAME(vv) + \ - ".*" + yy + ".nc") - fname = flist(0) - delete(flist) - end if - - f = addfile(fname, "r") - tmp = f->$NAME(vv)$ - delete(fname) - delete(f) - fill_start = num(ismissing(tmp)) - - ; Extract time range - tmp&time@calendar = "standard" - date = cd_calendar(tmp&time, 0) - idx = ind(date(:, 0).eq.yy) - if (dimsizes(dimsizes(tmp)).eq.4) then - output = tmp(idx, :, :, :) - else - output = tmp(idx, :, :) - end if - delete(idx) - delete(tmp) - delete(date) - - ; Unpack variable according to metadata information - if (isatt(output, "scale_factor") .or. \ - isatt(output, "add_offset")) then - tmp = output * output@scale_factor + output@add_offset - copy_VarMeta(output, tmp) - delete(output) - output = tmp - delete(tmp) - end if - - ; Convert units - if (any(VAR(vv).eq.(/"ta", "tas"/)) .and. \ - output@units.eq."degC") then - output = output + 273.15 - output@units = "K" - end if - if (VAR(vv).eq."hus" .and. output@units.eq."grams/kg") then - output = output / 1000. - output@units = "1" - end if - if (output@units.ne.EXPUNITS(vv)) then - error_msg("f", DIAG_SCRIPT, "", \ - "possibly wrong input units for " + VAR(vv)) - end if - - rank = dimsizes(dimsizes(output)) - output!0 = "time" - if (rank.eq.4) then - output!1 = "plev" - output!2 = "lat" - output!3 = "lon" - output&plev = output&plev * 100. ; [mb] --> [Pa] - elseif (rank.eq.3) - output!1 = "lat" - output!2 = "lon" - end if - - ; Format coordinates - format_coords(output, yy + "0101", yy + "1231", FREQ(vv)) - - ; Set variable attributes - tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ(vv)) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Check fill values - fill_end = num(ismissing(output)) - if (fill_start.ne.fill_end) then - error_msg("f", DIAG_SCRIPT, "", \ - "missing values lost during conversion") - end if - delete(fill_start) - delete(fill_end) - - ; Output file - DATESTR = yy + "01-" + yy + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" - - ; Add height coordinate to tas variable (required by the new backend) - if (VAR(vv).eq."tas") then - output@coordinates = "height" - end if - - ; Write variable - write_nc(fout, VAR(vv), output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - - ; Add height coordinate to tas variable (required by the new backend) - if (VAR(vv).eq."tas") then - height = 2.d - height!0 = "ncl_scalar" - height@units = "m" - height@axis = "Z" - height@positive = "up" - height@long_name = "height" - height@standard_name = "height" - w = addfile(fout, "w") - w->height = height - delete(w) - end if - - end do - - end do - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_NIWA-BS.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_NIWA-BS.ncl deleted file mode 100644 index 78cbb15b40..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_NIWA-BS.ncl +++ /dev/null @@ -1,117 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for NIWA-BS data -; ############################################################################# -; -; Tier -; Tier 3: restricted dataset. -; -; Source -; http://www.bodekerscientific.com/data/total-column-ozone -; -; Last access -; 20190207 -; -; Download and processing instructions -; To get the access data send an email to datasets@bodekerscientific.com -; Download all files from -; ftp://ftp.bodekerscientific.com/CombinedTCOV3.3/Monthly/Patched/NetCDF/ -; Newer versions may become available over time, but make sure to download -; the patched one. Only complete years should be downloaded. -; -; Modification history -; 20190207-A_righ_ma: renamed to NIWA-BS and adapted to v2. -; 20140528-A_gott_kl: written. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_NIWA-BS.ncl" - - ; Source name - OBSNAME = "NIWA-BS" - - ; Tier - TIER = 3 - - ; Period - YEAR1 = 1979 - YEAR2 = 2016 - - ; Selected variable (standard name) - VAR = (/"toz", "tozStderr"/) - - ; Name in the raw data - NAME = (/"tco", "tco_uncert"/) - - ; MIP - MIP = (/"Amon", "Amon"/) - - ; Frequency - FREQ = (/"mon", "mon"/) - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + "/cmor/tables/custom/CMOR_" + \ - VAR + ".dat" - - ; Type - TYPE = "sat" - - ; Version - VERSION = "V3.3" - - ; Global attributes - SOURCE = "http://www.bodekerscientific.com/data/total-column-ozone" - REF = "Bodeker et al., Atmos. Chem. Phys., doi:10.5194/acp-5-2603-2005, 2005" - COMMENT = "" - -end - -begin - - files = systemfunc("ls " + input_dir_path + \ - "NIWA-BS_CombinedTCO_" + VERSION + \ - "_????_Monthly_Patched.nc") - - do vv = 0, dimsizes(VAR) - 1 - - log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") - - f = addfiles(files, "r") - output = f[:]->$NAME(vv)$ - - ; Format coordinates - output!0 = "time" - output!1 = "lat" - output!2 = "lon" - format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) - - ; Set variable attributes - tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ(vv)) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = YEAR1 + "01-" + YEAR2 + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, str_sub_str(VERSION, "V", "v"), \ - MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" - - ; Write variable - write_nc(fout, VAR(vv), output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - - end do - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_PATMOS-x.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_PATMOS-x.ncl deleted file mode 100644 index 0cb2a861b5..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_PATMOS-x.ncl +++ /dev/null @@ -1,223 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for PATMOS-x data -; ############################################################################# -; -; Tier -; Tier 2: other freely-available dataset. -; -; Source -; https://www.ncdc.noaa.gov/cdr/atmospheric/avhrr-cloud-properties-patmos-x -; -; Last access -; 20190210 -; -; Download and processing instructions -; Click on Download and download all the NOAA data, excluding the -; preliminary, e.g. with: -; wget -r --accept '*NOAA*.nc' --reject '*preliminary*' -; Put all files in input_dir_path (no subdirectories with years). -; Select only complete years for both ascending and descending orbit. -; -; Caveats -; The data are processed by calculating the average of the ascending and the -; descending orbit on each day. Multiple files are available for some days, -; in this case the most recent version (NOAA-vv) is chosen. - -; Modification history -; 20190208-A_righ_ma: written. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_PATMOS-x.ncl" - - ; Source name - OBSNAME = "PATMOS-x" - - ; Tier - TIER = 2 - - ; Period - YEAR1 = 1982 - YEAR2 = 2016 - - ; Selected variable (standard name) - VAR = (/"clt"/) - - ; Name in the raw data - NAME = (/"cloud_fraction"/) - - ; MIP - MIP = (/"Amon"/) - - ; Frequency - FREQ = (/"mon"/) - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + \ - "/cmor/tables/cmip5/Tables/CMIP5_" + MIP - - ; Type - TYPE = "sat" - - ; Version - VERSION = "NOAA" - - ; Global attributes - SOURCE = "https://www.ncdc.noaa.gov/cdr/atmospheric/avhrr-cloud-" + \ - "properties-patmos-x" - REF = "Heidinger et al., NOAA National Centers for Environmental " + \ - "Information, doi:10.7289/V5348HCK, last access: 10 February 2019" - COMMENT = "" - -end - -begin - - ; Read coordinates - files = systemfunc("ls " + input_dir_path + "patmosx_*" + YEAR1 + "*.nc") - f = addfile(files(0), "r") - tmp = f->latitude - lat = tmp * tmp@scale_factor + tmp@add_offset - nlat = dimsizes(lat) - delete(tmp) - tmp = f->longitude - lon = tmp * tmp@scale_factor + tmp@add_offset - nlon = dimsizes(lon) - delete(tmp) - delete(files) - delete(f) - - do vv = 0, dimsizes(VAR) - 1 - - log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") - - do yy = YEAR1, YEAR2 - - ; Define output monthly-mean array - output = new((/12, nlat, nlon/), float) - output!0 = "time" - output!1 = "lat" - output!2 = "lon" - output&time = create_timec(yy, yy) - output&lat = lat - output&lon = lon - - do mm = 1, 12 - - ; Number of days - nd = days_in_month(yy, mm) - - ; Define local array - output_temp = new((/nd, nlat, nlon/), float) - - ; Date string for this month - yyyymm = yy + sprinti("%0.2i", mm) - - do dd = 1, nd - - ; Date string for this day - yyyymmdd = yy + sprinti("%0.2i", mm) + sprinti("%0.2i", dd) - - ; Ascending orbit - files_asc = systemfunc("ls " + input_dir_path + \ - "patmosx_v??r??_NOAA-??_asc_d" + \ - yyyymm + "??_c*.nc | grep asc_d" + yyyymmdd) - if (.not.all(ismissing(files_asc))) then - ; Read most recent file - f = addfile(files_asc(dimsizes(files_asc) - 1), "r") - tmp = f->$NAME(vv)$ - xasc = tmp * tmp@scale_factor + tmp@add_offset - delete(tmp) - end if - delete(files_asc) - - ; Descending orbit - files_des = systemfunc("ls " + input_dir_path + \ - "patmosx_v??r??_NOAA-??_des_d" + \ - yyyymm + "??_c*.nc | grep des_d" + yyyymmdd) - if (.not.all(ismissing(files_des))) then - ; Read most recent file - f = addfile(files_des(dimsizes(files_des) - 1), "r") - tmp = f->$NAME(vv)$ - xdes = tmp * tmp@scale_factor + tmp@add_offset - delete(tmp) - end if - delete(files_des) - - ; Skip if no data defined (output_temp will stay missing) - if (.not.isdefined("xasc") .and. .not.isdefined("xdes")) then - continue - end if - - if (.not.isdefined("xasc")) then - output_temp(dd, :, :) = (/xdes/) - delete(xdes) - continue - end if - - if (.not.isdefined("xdes")) then - output_temp(dd, :, :) = (/xasc/) - delete(xasc) - continue - end if - - ; Replace missing values in one orbit with valid values from the - ; other orbit, to avoid propagating missing values while averaging - xasc = where(ismissing(xasc), xdes, xasc) - xdes = where(ismissing(xdes), xasc, xdes) - - output_temp(dd - 1, :, :) = 0.5 * (xasc + xdes) - delete(xasc) - delete(xdes) - - end do ; day - - ; Monthly mean - output(mm - 1, :, :) = dim_avg_n(output_temp, 0) - delete(output_temp) - - end do ; month - - if (VAR(vv).eq."clt") then - output = 100. * output ; [1] --> [%] - end if - - ; Format coordinates - output!0 = "time" - output!1 = "lat" - output!2 = "lon" - format_coords(output, yy + "0101", yy + "1231", FREQ(vv)) - - ; Set variable attributes - tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ(vv)) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = yy + "01-" + yy + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" - - ; Write variable - write_nc(fout, VAR(vv), output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - - end do ; year - - end do ; variable - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_PYDUMMY.py b/esmvaltool/utils/cmorizers/obs/cmorize_obs_PYDUMMY.py deleted file mode 100644 index b19d90e79f..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_PYDUMMY.py +++ /dev/null @@ -1,10 +0,0 @@ -"""Make the cmorization using python.""" -import logging - -logger = logging.getLogger(__name__) - - -def cmorization(input_file_path, output_file_path): - """Mock example of python cmorization script.""" - logger.info("Input %s", input_file_path) - logger.info("Output %s", output_file_path) diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_UWisc.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_UWisc.ncl deleted file mode 100644 index f392273fcd..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_UWisc.ncl +++ /dev/null @@ -1,126 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for UWisc data -; ############################################################################# -; -; Tier -; Tier 3: restricted dataset. -; -; Source -; Data provided by Ralf Bennartz. -; -; Last access -; 20150415 -; -; Download and processing instructions -; Contact Ralf Bennartz (Earth and Environmental Sciences, Vanderbilt -; University, USA). -; -; Modification history -; 20190208-A_righ_ma: adapted to v2. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") -begin - - ; Script name (for logger) - DIAG_SCRIPT = "cmorize_obs_UWISC.ncl" - - ; Source name - OBSNAME = "UWisc" - - ; Tier - TIER = 3 - - ; Period - YEAR1 = 1988 - YEAR2 = 2007 - - ; Selected variable (standard name) - VAR = (/"lwp", "lwpStderr"/) - - ; Name in the raw data - NAME = (/"LWP", "LWP_ERROR"/) - - ; Conversion factor - CONVERSION = (/1.e-3, 1.e-3/) - - ; MIP - MIP = (/"Amon", "Amon"/) - - ; Frequency - FREQ = (/"mon", "mon"/) - - ; CMOR table - CMOR_TABLE = getenv("esmvaltool_root") + \ - "/cmor/tables/custom/CMOR_" + VAR + ".dat" - - ; Type - TYPE = "sat" - - ; Version - VERSION = "v2" - - ; Global attributes - SOURCE = "Data provided by Ralf Bennartz (Vanderbilt University, USA)" - REF = "O'Dell et al., J. Clim., doi:10.1175/2007JCLI1958.1, 2008" - COMMENT = "" - -end - -begin - - do vv = 0, dimsizes(VAR) - 1 - - log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") - - do yr = YEAR1, YEAR2 - - fname = input_dir_path + "UWisc_LWPMEAN_" + yr + "_v2.nc" - - f = addfile(fname, "r") - output = (/f->$NAME(vv)$/) - - lat = (/f->lat/) - lon = (/f->lon/) - - ; Convert units - output = output * CONVERSION(vv) ; for clivi this will be equal 0 - - ; Format coordinates - output!0 = "time" - output!1 = "lat" - output!2 = "lon" - output&time = create_timec(yr, yr) - output&lat = lat - output&lon = lon - format_coords(output, yr + "0101", yr + "1231", FREQ(vv)) - - ; Set variable attributes - tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ(vv)) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = yr + "01-" + yr + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" - - ; Write variable - write_nc(fout, VAR(vv), output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - - end do - - end do - -end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_WOA.py b/esmvaltool/utils/cmorizers/obs/cmorize_obs_WOA.py deleted file mode 100644 index b3ff379f81..0000000000 --- a/esmvaltool/utils/cmorizers/obs/cmorize_obs_WOA.py +++ /dev/null @@ -1,122 +0,0 @@ -""" -# ############################################################################# -# ESMValTool CMORizer for WOA data -# ############################################################################# -# -# Tier -# Tier 2: other freely-available dataset. -# -# Source -# https://data.nodc.noaa.gov/woa/WOA13/DATAv2/ -# -# Last access -# 20190131 -# -# Download and processing instructions -# Download the following files: -# temperature/netcdf/decav81B0/1.00/woa13_decav81B0_t00_01.nc -# salinity/netcdf/decav81B0/1.00/woa13_decav81B0_s00_01.nc -# oxygen/netcdf/all/1.00/woa13_all_o00_01.nc -# nitrate/netcdf/all/1.00/woa13_all_n00_01.nc -# phosphate/netcdf/all/1.00/woa13_all_p00_01.nc -# silicate/netcdf/all/1.00/woa13_all_i00_01.nc -# -# Modification history -# 20190131-A_pred_va: adapted to v2. -# 20190131-A_demo_le: written. -# -# ############################################################################# -""" - -import datetime -import logging -import os - -import iris -from cf_units import Unit - -from .utilities import (_add_metadata, - _convert_timeunits, - _fix_coords, - _read_cmor_config, - _roll_cube_data, - _save_variable) - -logger = logging.getLogger(__name__) - -# used vars -ALL_VARS = ['thetao', 'so', 'no3', 'po4', 'si', 'o2'] - -# all years to be analyzed -ALL_YEARS = [ - 2000, -] - -# read in CMOR configuration -cfg = _read_cmor_config('WOA.yml') -proj = cfg['proj'] -timestamp = datetime.datetime.utcnow() -timestamp_format = "%Y-%m-%d %H:%M:%S" -now_time = timestamp.strftime(timestamp_format) -proj['metadata_attributes']['CMORcreated'] = now_time -VAR_TO_FILENAME = cfg['VAR_TO_FILENAME'] -FIELDS = cfg['FIELDS'] -STANDARD_NAMES = cfg['STANDARD_NAMES'] -LONG_NAMES = cfg['LONG_NAMES'] - - -def _fix_metadata(cube, var): - """Fix all aspects of metadata for different vars.""" - mol_m3 = ['si', 'po4', 'no3', 'o2'] - if var in mol_m3: - cube.units = Unit('mol m-3') - if var == 'thetao': - cube.convert_units(Unit('kelvin')) - if var == 'so': - cube.units = Unit('Unknown') - return cube - - -def _fix_data(cube, var): - """Specific data fixes for different variables.""" - mll_to_mol = ['po4', 'si', 'no3'] - if var in mll_to_mol: - cube.data = cube.data / 1000. # Convert from ml/l to mol/m^3 - if var == 'o2': - cube.data = cube.data * 44.661 / 1000. # Convert from ml/l to mol/m^3 - return cube - - -def extract_variable(var, raw_file, out_dir, yr): - """Extract to all vars.""" - cubes = iris.load(raw_file) - field = FIELDS[var] - for cube in cubes: - if cube.long_name == field: - cube.standard_name = STANDARD_NAMES[var] - cube.long_name = LONG_NAMES[var] - cube.var_name = var - _convert_timeunits(cube, yr) - _fix_coords(cube) - _roll_cube_data(cube, 180, -1) - _fix_data(cube, var) - _fix_metadata(cube, var) - _add_metadata(cube, proj) - _save_variable(cube, var, out_dir, yr, proj) - - -def cmorization(in_dir, out_dir): - """Cmorization func call.""" - logger.info("Starting cmorization for WOA OBS files: Tier2") - logger.info("Input data from: %s", in_dir) - logger.info("Output will be written to: %s", out_dir) - - # run the cmorization - for var in ALL_VARS: - if not os.path.exists(out_dir): - os.path.makedirs(out_dir) - for yr in ALL_YEARS: - file_suffix = str(yr)[-2:] + '_' + str(yr + 1)[-2:] + '.nc' - raw_file = os.path.join(in_dir, VAR_TO_FILENAME[var] + file_suffix) - logger.info("CMORizing var %s in file %s", var, raw_file) - extract_variable(var, raw_file, out_dir, yr) diff --git a/esmvaltool/utils/cmorizers/obs/utilities.py b/esmvaltool/utils/cmorizers/obs/utilities.py deleted file mode 100644 index 1b4d5ad79d..0000000000 --- a/esmvaltool/utils/cmorizers/obs/utilities.py +++ /dev/null @@ -1,170 +0,0 @@ -"""Utils module for Python cmorizers.""" -import logging -import os - -import iris -import numpy as np -from cf_units import Unit - -import yaml - -logger = logging.getLogger(__name__) - - -# read the associated dataset-specific config file -def _read_cmor_config(cmor_config): - """Read cmor configuration in a dict.""" - reg_path = os.path.join( - os.path.dirname(__file__), 'cmor_config', cmor_config) - with open(reg_path, 'r') as file: - cfg = yaml.safe_load(file) - return cfg - - -def _convert_timeunits(cube, start_year): - """Convert time axis from malformed Year 0.""" - # TODO any more weird cases? - if cube.coord('time').units == 'months since 0000-01-01 00:00:00': - real_unit = 'months since {}-01-01 00:00:00'.format(str(start_year)) - elif cube.coord('time').units == 'days since 0000-01-01 00:00:00': - real_unit = 'days since {}-01-01 00:00:00'.format(str(start_year)) - elif cube.coord('time').units == 'days since 1950-1-1': - real_unit = 'days since 1950-1-1 00:00:00' - else: - real_unit = cube.coord('time').units - cube.coord('time').units = real_unit - return cube - - -def _fix_dim_coordnames(cube): - """Perform a check on dim coordinate names.""" - # first check for CMOR standard coord; - for coord in cube.coords(): - # guess the CMOR-standard x, y, z and t axes if not there - coord_type = iris.util.guess_coord_axis(coord) - - if coord_type == 'T': - cube.coord(axis=coord_type).var_name = 'time' - cube.coord(axis=coord_type).attributes = {} - - if coord_type == 'X': - cube.coord(axis=coord_type).var_name = 'lon' - cube.coord(axis=coord_type).standard_name = 'longitude' - cube.coord(axis=coord_type).long_name = 'longitude coordinate' - cube.coord(axis=coord_type).units = Unit('degrees') - cube.coord(axis=coord_type).attributes = {} - - if coord_type == 'Y': - cube.coord(axis=coord_type).var_name = 'lat' - cube.coord(axis=coord_type).standard_name = 'latitude' - cube.coord(axis=coord_type).long_name = 'latitude coordinate' - cube.coord(axis=coord_type).units = Unit('degrees') - cube.coord(axis=coord_type).attributes = {} - - if coord_type == 'Z': - if cube.coord(axis=coord_type).var_name == 'depth': - cube.coord(axis=coord_type).standard_name = 'depth' - cube.coord(axis=coord_type).long_name = \ - 'ocean depth coordinate' - cube.coord(axis=coord_type).var_name = 'lev' - cube.coord(axis=coord_type).attributes['positive'] = 'down' - if cube.coord(axis=coord_type).var_name == 'pressure': - cube.coord(axis=coord_type).standard_name = 'air_pressure' - cube.coord(axis=coord_type).long_name = 'pressure' - cube.coord(axis=coord_type).var_name = 'air_pressure' - cube.coord(axis=coord_type).attributes['positive'] = 'up' - - return cube - - -def _fix_bounds(cube, dim_coord): - """Reset and fix all bounds.""" - if len(cube.coord(dim_coord).points) > 1: - if not cube.coord(dim_coord).has_bounds(): - cube.coord(dim_coord).guess_bounds() - else: - cube.coord(dim_coord).bounds = None - cube.coord(dim_coord).guess_bounds() - - if cube.coord(dim_coord).has_bounds(): - cube.coord(dim_coord).bounds = np.array( - cube.coord(dim_coord).bounds, dtype='float64') - return cube - - -def _fix_coords(cube): - """Fix the time units and values to CMOR standards.""" - # first fix any completely missing coord var names - _fix_dim_coordnames(cube) - # fix individual coords - for cube_coord in cube.coords(): - # fix time - if cube_coord.var_name == 'time': - logger.info("Fixing time...") - cube.coord('time').convert_units( - Unit('days since 1950-1-1 00:00:00', calendar='gregorian')) - _fix_bounds(cube, cube.coord('time')) - - # fix longitude - if cube_coord.var_name == 'lon': - logger.info("Fixing longitude...") - if cube.coord('longitude').points[0] < 0. and \ - cube.coord('longitude').points[-1] < 181.: - cube.coord('longitude').points = \ - cube.coord('longitude').points + 180. - _fix_bounds(cube, cube.coord('longitude')) - cube.attributes['geospatial_lon_min'] = 0. - cube.attributes['geospatial_lon_max'] = 360. - - # fix latitude - if cube_coord.var_name == 'lat': - logger.info("Fixing latitude...") - _fix_bounds(cube, cube.coord('latitude')) - - # fix depth - if cube_coord.var_name == 'lev': - logger.info("Fixing depth...") - _fix_bounds(cube, cube.coord('depth')) - - # fix air_pressure - if cube_coord.var_name == 'air_pressure': - logger.info("Fixing air pressure...") - _fix_bounds(cube, cube.coord('air_pressure')) - - # remove CS - cube.coord('latitude').coord_system = None - cube.coord('longitude').coord_system = None - - return cube - - -def _add_metadata(cube, proj): - """Complete the cmorized file with useful metadata.""" - logger.info("Add Global metadata...") - for att in proj['metadata_attributes']: - if att not in cube.metadata.attributes: - cube.metadata.attributes[att] = proj['metadata_attributes'][att] - - -def _roll_cube_data(cube, shift, axis): - """Roll a cube data on specified axis.""" - cube.data = np.roll(cube.data, shift, axis=axis) - return cube - - -def _save_variable(cube, var, outdir, year, proj, **kwargs): - """Saver function.""" - # CMOR standard - if not isinstance(year, list): - time_suffix = '-'.join([str(year) + '01', str(year) + '12']) - else: - yr1, yr2 = year - time_suffix = '-'.join([str(yr1) + '01', str(yr2) + '12']) - cmor_prefix = '_'.join([ - 'OBS', proj['dataset'], proj['realm'], proj['version'], - proj['frequency'][var], var - ]) - file_name = cmor_prefix + '_' + time_suffix + '.nc' - file_path = os.path.join(outdir, file_name) - logger.info('Saving: %s', file_path) - iris.save(cube, file_path, **kwargs) diff --git a/tests/integration/cmor/__init__.py b/esmvaltool/utils/color_tables/__init__.py similarity index 100% rename from tests/integration/cmor/__init__.py rename to esmvaltool/utils/color_tables/__init__.py diff --git a/esmvaltool/utils/color_tables/show_color_tables.py b/esmvaltool/utils/color_tables/show_color_tables.py new file mode 100644 index 0000000000..98209b822a --- /dev/null +++ b/esmvaltool/utils/color_tables/show_color_tables.py @@ -0,0 +1,195 @@ +"""Utility script for inspecting and converting ncl color tables.""" +# pylint: disable=import-outside-toplevel +import logging + +logger = logging.getLogger(__name__) + +NCL_SCRIPT = """ +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" + +begin + print("Autogenerated NCL script is running.") + wks = gsn_open_wks("pdf","{{ outdir }}/available_colormaps_for_ncl") + {% for n in list_of_snippets %}{{n}} {% endfor %} +end +""" + +COLOR_SNIPPET = """ + cmap := read_colormap_file("{{ path }}") + opt = True + opt@Frame = False + draw_color_palette(wks, cmap, opt) + gsn_text_ndc(wks, "{{ name }}", 0.5, 0.9, True) + frame(wks) +""" + + +def load_ncl_color_map(name, colorpath): + """Load ncl color map to a list that is returned.""" + def _format(content): + out = [] + for item in content.split("\n"): + item = item.strip() + if item and not ('ncolors' in item or item.startswith('#') + or item.startswith(';')): + out.append([int(elem) / 256 + for elem in item.split()[0:3]] + [1]) + return out + + filename = "{0}/{1}.rgb".format(colorpath, name) + import os + if not os.path.exists(filename): + raise ValueError("Path {0} does not exist.".format(filename)) + with open(filename, 'r') as ncl_color_map: + return _format(ncl_color_map.read()) + + +def get_color_map(name, colorpath): + """Convert colormap from ncl to python. + + Parameters + ---------- + name: str + Name of ncl color map + Returns + ------- + matplotlib.colors.ListedColorMap object + """ + import matplotlib + import yaml + colors = load_ncl_color_map(name, colorpath) + logger.debug("RGB values for '%s':\n%s", name, yaml.dump(colors)) + return matplotlib.colors.ListedColormap(colors, name=name, N=None) + + +def list_ncl_color_maps(colorpath): + """Get list of all available ncl color maps.""" + import os + + def _format(name): + return os.path.splitext(os.path.basename(name))[0] + + out = [] + for (_, _, filenames) in os.walk(colorpath): + out.extend([ + _format(filename) for filename in filenames + if 'rgb' in filename.split('.') + ]) + return out + + +def plot_example_for_colormap(name, colorpath, outdir='./'): + """Create plots of given color map using python.""" + logger.info("Plotting example for '%s'", name) + import os + import matplotlib + matplotlib.use("Agg") # noqa + import matplotlib.pyplot as plt + import numpy as np + fig = plt.figure(1) + axis = fig.add_axes([0.1, 0.3, 0.5, 0.5]) + np.random.seed(12345678) + data = np.random.randn(30, 30) + psm = axis.pcolormesh(data, + cmap=get_color_map(name, colorpath), + rasterized=True, + vmin=-4, + vmax=4) + fig.colorbar(psm, ax=axis) + plt.savefig(os.path.join(outdir, "{0}.png".format(name))) + plt.close() + + +def main_plot_python_cm(colorpath, outpath): + """Execute functions for python plots.""" + for name in list_ncl_color_maps(colorpath): + plot_example_for_colormap(name, colorpath, outdir=outpath) + + +def main_plot_ncl_cm(colorpath, outpath): + """Execute functions for ncl plots.""" + from jinja2 import Template + t_color_snippet = Template(COLOR_SNIPPET) + template = Template(NCL_SCRIPT) + list_of_snippets = [] + import glob + import subprocess + import tempfile + import os + for path in glob.glob(colorpath + "/*rgb"): + _, tail = os.path.split(path) + list_of_snippets.append(t_color_snippet.render(path=path, name=tail)) + with tempfile.NamedTemporaryFile(mode='w', suffix='ncl') as fname: + fname.write( + template.render(list_of_snippets=sorted(list_of_snippets), + outdir=outpath)) + subprocess.check_call(["ncl", fname.name]) + + +class ColorTables(): + """Generate colormap samples for ESMValTool's default colormaps.""" + + def __init__(self): + logger.setLevel(logging.DEBUG) + console_handler = logging.StreamHandler() + console_handler.setLevel(logging.INFO) + console_handler.setFormatter( + logging.Formatter( + '%(asctime)s - %(name)s - %(levelname)s - %(message)s')) + logger.addHandler(console_handler) + self._colorpath = None + self._outpath = None + + def _prepare_paths(self, colorpath, outpath): + import os + if colorpath is None: + from esmvaltool.diag_scripts.shared.plot \ + import __file__ as plot_path + colorpath = os.path.join(os.path.dirname(plot_path), "rgb") + + if not os.path.isdir(colorpath): + logger.warning("Path '%s' is invalid", colorpath) + raise OSError + self._colorpath = colorpath + + if not os.path.isdir(outpath) and not os.path.exists(outpath): + logger.info("Creating directory '%s'", outpath) + os.mkdir(outpath) + self._outpath = outpath + + def python(self, colorpath=None, outpath="./"): + """ + Generate samples for Python colormaps. + + Create a series of png images with examples of ESMValTool's available + Python colormaps + + Parameters + ---------- + + colorpath: str + Folder to search for colormaps. Default is installed colormaps + outpath: str + Out directory. Default is the current directory + """ + self._prepare_paths(colorpath, outpath) + logger.info("Creating report with Python") + main_plot_python_cm(self._colorpath, self._outpath) + + def ncl(self, colorpath=None, outpath="./"): + """ + Generate samples for NCL colormaps. + + Create a pdf with examples of ESMValTool's available NCL colormaps + + Parameters + ---------- + + colorpath: str + Folder to search for colormaps. Default is installed colormaps + outpath: str + Out directory. Default is the current directory + """ + self._prepare_paths(colorpath, outpath) + logger.info("Creating report with NCL") + main_plot_ncl_cm(self._colorpath, self._outpath) diff --git a/esmvaltool/utils/draft_release_notes.py b/esmvaltool/utils/draft_release_notes.py new file mode 100644 index 0000000000..a11b03a9ae --- /dev/null +++ b/esmvaltool/utils/draft_release_notes.py @@ -0,0 +1,231 @@ +"""Draft release notes. + +To use this tool, follow these steps: +1) `pip install pygithub` +2) Create an access token and store it in the file ~/.github_api_key, see: +https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line +3) set PREVIOUS_RELEASE to the date/time of the previous release in code below +4) Call the script passing the project to create release notes on: esmvalcore +or esmvaltool +""" +import datetime +from pathlib import Path +from zoneinfo import ZoneInfo + +import dateutil +import esmvalcore +import fire + +import esmvaltool + +try: + from github import Github +except ImportError: + print("Please `pip install pygithub`") + +try: + GITHUB_API_KEY = Path("~/.github_api_key").expanduser().read_text( + encoding='utf-8').strip() +except FileNotFoundError: + print("Please create an access token and store it in the file " + "~/.github_api_key, see:\nhttps://help.github.com/en/github/" + "authenticating-to-github/creating-a-personal-access-token-" + "for-the-command-line") + +VERSION = { + 'esmvalcore': f"v{esmvalcore.__version__}", + 'esmvaltool': f"v{esmvaltool.__version__}" +} +GITHUB_REPO = { + 'esmvalcore': "ESMValGroup/ESMValCore", + 'esmvaltool': "ESMValGroup/ESMValTool", +} + +TIMEZONE = ZoneInfo("CET") + +PREVIOUS_RELEASE = { + 'esmvalcore': datetime.datetime(2023, 6, 6, 0, tzinfo=TIMEZONE), + 'esmvaltool': datetime.datetime(2023, 6, 20, 0, tzinfo=TIMEZONE), +} + +LABELS = { + 'esmvalcore': ( + 'backwards incompatible change', # important, keep at the top + 'deprecated feature', # important, keep at the top + 'bug', # important, keep at the top + 'api', + 'cmor', + 'containerization', + 'community', + 'dask', + 'deployment', + 'documentation', + 'fix for dataset', + 'installation', + 'iris', + 'preprocessor', + 'release', + 'testing', + 'UX', + 'variable derivation', + 'enhancement', # uncategorized, keep at the bottom + ), + 'esmvaltool': ( + 'backwards incompatible change', # important, keep at the top + 'deprecated feature', # important, keep at the top + 'bug', # important, keep at the top + 'community', + 'documentation', + 'diagnostic', + 'preprocessor', + 'observations', + 'testing', + 'installation', + 'enhancement', # uncategorized, keep at the bottom + ) +} + +TITLES = { + 'backwards incompatible change': 'Backwards incompatible changes', + 'deprecated feature': 'Deprecations', + 'bug': 'Bug fixes', + 'cmor': 'CMOR standard', + 'dask': 'Computational performance improvements', + 'diagnostic': 'Diagnostics', + 'fix for dataset': 'Fixes for datasets', + 'observations': 'Observational and re-analysis dataset support', + 'testing': 'Automatic testing', + 'api': 'Notebook API (experimental)', + 'enhancement': 'Improvements', +} + + +def draft_notes_since(project, previous_release_date=None, labels=None): + """Draft release notes containing the merged pull requests. + + Arguments + --------- + project: str + Project to draft release notes from. Valid options are esmvaltool and + esmvalcore + previous_release_date: datetime.datetime + date of the previous release + labels: list + list of GitHub labels that deserve separate sections + """ + project = project.lower() + if previous_release_date is None: + previous_release_date = PREVIOUS_RELEASE[project] + else: + previous_release_date = dateutil.parse(previous_release_date) + if labels is None: + labels = LABELS[project] + + pulls = _get_pull_requests(project) + + lines = {label: [] for label in labels} + labelless_pulls = [] + print(f"The following PRs (updated after {previous_release_date}) are " + f"considered in the changelog") + print(f"Note: Unmerged PRs or PRs that have been merged before " + f"{previous_release_date} are not shown\n") + for pull in pulls: + if pull.updated_at.astimezone(TIMEZONE) < previous_release_date: + break + if (not pull.merged or + pull.merged_at.astimezone(TIMEZONE) < previous_release_date): + continue + print( + pull.updated_at.astimezone(TIMEZONE), + pull.merged_at.astimezone(TIMEZONE), + pull.number, + pull.title, + ) + pr_labels = {label.name for label in pull.labels} + if 'automatedPR' in pr_labels: + continue + for label in labels: + if label in pr_labels: + break + else: + labelless_pulls.append(pull) + label = 'enhancement' + lines[label].append((pull.closed_at, _compose_note(pull))) + + # Warn about label-less PR: + _list_labelless_pulls(labelless_pulls) + + # Format lines to a human readable changelog + format_notes(lines, VERSION[project]) + + +def format_notes(lines, version): + """Format release notes.""" + sections = [ + version, + '-' * len(version), + 'Highlights', + '', + 'TODO: add highlights', + '', + "This release includes", + ] + for label in lines: + try: + entries = sorted(lines[label]) # sort by merge time + except KeyError: + continue + title = TITLES.get(label, label.title()) + if entries: + sections.append('\n'.join(['', title, '~' * len(title), ''])) + if label == 'backwards incompatible change': + sections.append( + 'TODO: add examples of how to deal with these changes\n') + sections.append('\n'.join(entry for _, entry in entries)) + notes = '\n'.join(sections) + print("Copy the following lines to changelog.rst:\n") + print(notes) + + +def _get_pull_requests(project): + session = Github(GITHUB_API_KEY) + repo = session.get_repo(GITHUB_REPO[project]) + pulls = repo.get_pulls( + state='closed', + sort='updated', + direction='desc', + base='main', + ) + return pulls + + +def _list_labelless_pulls(labelless_pulls): + if labelless_pulls: + print('\nPlease add labels to the following PR:') + for pull in labelless_pulls: + print(pull.html_url) + print('\n') + else: + print('\nNo PR has missing labels!\n') + + +def _compose_note(pull): + user = pull.user + title = pull.title + title = title[0].upper() + title[1:] + return f"- {title} (:pull:`{pull.number}`) by :user:`{user.login}`" + + +def main(): + """Entry point for the script.""" + + def display(lines, out): + text = "\n".join(lines) + "\n" + out.write(text) + + fire.core.Display = display + fire.Fire(draft_notes_since) + + +if __name__ == '__main__': + main() diff --git a/esmvaltool/utils/editor-enhancements/ncl-ESMValTool.el b/esmvaltool/utils/editor-enhancements/ncl-ESMValTool.el index a1f75f79c1..00a1d807c4 100644 --- a/esmvaltool/utils/editor-enhancements/ncl-ESMValTool.el +++ b/esmvaltool/utils/editor-enhancements/ncl-ESMValTool.el @@ -238,7 +238,7 @@ ("\\<\\(res_vpUseSegments\\|res_vpWidthF\\|res_vpXF\\|res_vpYF\\|res_wkAntiAlias\\|res_wkBackgroundColor\\|res_wkBackgroundOpacityF\\|res_wkColorMapLen\\|res_wkColorMap\\|res_wkColorModel\\|res_wkColorModel\\|res_wkDashTableLength\\|res_wkDefGraphicStyleId\\|res_wkDeviceLowerX\\|res_wkDeviceLowerX\\|res_wkDeviceLowerX\\|res_wkDeviceLowerY\\|res_wkDeviceLowerY\\|res_wkDeviceLowerY\\|res_wkDeviceUpperX\\|res_wkDeviceUpperX\\|res_wkDeviceUpperX\\|res_wkDeviceUpperY\\|res_wkDeviceUpperY\\|res_wkDeviceUpperY\\|res_wkFileName\\|res_wkFileName\\|res_wkFillTableLength\\|res_wkForegroundColor\\|res_wkFormat\\|res_wkFormat\\|res_wkFullBackground\\|res_wkFullBackground\\|res_wkGksWorkId\\|res_wkHeight\\|res_wkMarkerTableLength\\|res_wkMetaName\\|res_wkOrientation\\|res_wkOrientation\\|res_wkOrientation\\|res_wkPDFFileName\\|res_wkPDFFormat\\|res_wkPDFResolution\\|res_wkPSFileName\\|res_wkPSFormat\\|res_wkPSResolution\\|res_wkPaperHeightF\\|res_wkPaperHeightF\\|res_wkPaperHeightF\\|res_wkPaperSize\\|res_wkPaperSize\\|res_wkPaperSize\\|res_wkPaperWidthF\\|res_wkPaperWidthF\\|res_wkPaperWidthF\\|res_wkTopLevelViews\\|res_wkViews\\|res_wkVisualType\\|res_wkVisualType\\|res_wkWidth\\|res_wsCurrentSize\\|res_wsMaximumSize\\|res_wsThresholdSize\\|res_xyComputeXMax\\|res_xyComputeXMin\\|res_xyComputeYMax\\|res_xyComputeYMin\\|res_xyCoordData\\|res_xyCoordDataSpec\\|res_xyCurveDrawOrder\\|res_xyDashPattern\\|res_xyDashPatterns\\|res_xyExplicitLabels\\|res_xyExplicitLegendLabels\\|res_xyLabelMode\\|res_xyLineColor\\|res_xyLineColors\\|res_xyLineDashSegLenF\\|res_xyLineLabelConstantSpacingF\\|res_xyLineLabelFont\\|res_xyLineLabelFontAspectF\\|res_xyLineLabelFontColor\\|res_xyLineLabelFontColors\\|res_xyLineLabelFontHeightF\\|res_xyLineLabelFontQuality\\|res_xyLineLabelFontThicknessF\\|res_xyLineLabelFuncCode\\|res_xyLineOpacities\\|res_xyLineOpacityF\\|res_xyLineThicknessF\\|res_xyLineThicknesses\\|res_xyMarkLineMode\\|res_xyMarkLineModes\\|res_xyMarker\\|res_xyMarkerColor\\|res_xyMarkerColors\\|res_xyMarkerOpacities\\|res_xyMarkerOpacityF\\|res_xyMarkerSizeF\\|res_xyMarkerSizes\\|res_xyMarkerThicknessF\\|res_xyMarkerThicknesses\\|res_xyMarkers\\|res_xyMonoDashPattern\\|res_xyMonoLineColor\\|res_xyMonoLineLabelFontColor\\|res_xyMonoLineThickness\\|res_xyMonoMarkLineMode\\|res_xyMonoMarker\\|res_xyMonoMarkerColor\\|res_xyMonoMarkerSize\\|res_xyMonoMarkerThickness\\|res_xyXIrrTensionF\\|res_xyXIrregularPoints\\|res_xyXStyle\\|res_xyYIrrTensionF\\|res_xyYIrregularPoints\\|res_xyYStyle\\|\\)\\>" 1 font-lock-constant-face) ;; ESMValTool interface, shared and utility scripts - ("\\<\\(read_data\\|read_fx_data\\|select_metadata_by_atts\\|select_metadata_by_name\\|metadata_att_as_array\\|copy_VarCoords_l1\\|check_min_max_datasets\\|tstep\\|get_ncdf_name\\|get_ncdf_dir\\|ncdf_read\\|ncdf_write\\|ncdf_att\\|ncdf_define\\|att2var_default\\|att2var\\|bname\\|basename\\|extract_years\\|extend_var_at\\|copy_CoordNames_n\\|empty_str\\|write_info\\|remove_index\\|set_default_att\\|filter_attrs\\|write_ignore_warnings\\|get_ref_dataset_idx\\|log_info\\|log_debug\\|enter_msg\\|leave_msg\\|error_msg\\|tool_stop\\|exit_if_missing_atts\\|taylor_plot\\|contour_map\\|contour_map_polar\\|contour_map_ce\\|add_markers_to_map\\|get_title_suffix\\|remove_attrs\\|plot_two_by_one\\|plot_three_by_one_diff\\|two_by_one\\|three_by_one_diff\\|plot_three_by_one_vector\\|three_by_one_vector\\|plot_multipanel\\|multipanel\\|plot_multipanel_vector\\|multipanel_vector\\|seasonal_plot\\|xy_plot_wrapper\\|ts_line_wrapper\\|pr_u850_mean_plot\\|mjo_xcor_lag_plot\\|mjo_pr_ua_vari_plot\\|mjo_unvari_eof_plot\\|get_title_suffix\\|remove_attrs\\|plot_two_by_one\\|plot_three_by_one_diff\\|two_by_one\\|three_by_one_diff\\|plot_three_by_one_vector\\|three_by_one_vector\\|plot_multipanel\\|multipanel\\|plot_multipanel_vector\\|multipanel_vector\\|seasonal_plot\\|xy_plot_wrapper\\|ts_line_wrapper\\|xy_line_overlap\\|plot_precip_domain\\|precip_domain\\|month_sel\\|lat_names\\|add_line\\|add_scatt\\|add_legenda\\|calcRegCoeffs\\|genZonalMeans\\|calcMeanAnnCycleMonthly\\|calcMeanAnnCycleAnnual\\|rmMeanAnnCycle\\|apfiltersmooth\\|smoothAnomalies\\|clmMon2clmDayn\\|scatterplot\\|scatterplot3D\\|scatterplot_markers\\|zonalmean_profile\\|contourplot\\|portrait_plot\\|profile_plev\\|aerosol_profile\\|aerosol_sizedist\\|xy_line\\|xy_line_anom\\|timeseries_station\\|cycle_plot\\|errorbar_plot\\|create_legend_lines\\|output_type\\|copy_VarAtt_sel\\|panelling\\|get_plot_dir\\|get_outfile_name\\|get_wks\\|add_markers\\|add_num_markers\\|add_errorbar\\|horizontal_whiskers\\|add_prediction_error\\|mjo_wave_freq_plot\\|addHorVertLinesCross_extended\\|mjo_cross_spectra_plot\\|mjo_ceof_plot\\|mjo_life_cycle_plot\\|vector_scalar_map_polar\\|project_style\\|place_debuginfo\\|place_description\\|gsnColorRange\\|format_units\\|set_log_ticks\\|sort_alphabetically\\|legend_lines\\|legend_markers\\|roi\\|extract_area\\|gridcell_area\\|map_area\\|area_operations\\|select_region\\|make_latlon2D\\|cdo_remapdis\\|guestimate_average_grid_area\\|get_lower_limits\\|get_upper_limits\\|is_regional\\|esmf_conserve_wrapper\\|rect2rect_interp\\|plev_lat_interp\\|get_dataset_minus_ref\\|esmf_conserve_wrapper_time\\|regrid_3D_to_rectilinear_grid\\|ESMValMD\\|get_start_year\\|get_end_year\\|convert_units\\|UNIQ\\|union\\|set_inclusive_OR\\|intersection\\|is_array_subset\\|relative_complement\\|set_symmetric_difference\\|dim_stddev_wgt_Wrap\\|time_operations\\|calc_season_index\\|extract_season\\|month_to_season_extended\\|coswgt_areaave\\|coswgt_arearmse\\|coswgt_pattern_cor\\|interannual_variability\\|calculate_metric\\|normalize_metric\\|distrib_stats\\|lognormal_dist\\|add_labelbar\\|create_empty_array\\|data_read_in\\|data_read_in_ocean_MOC\\|data_read_in_ice\\|y_axis_check\\|check_custom_climo\\|isfilepresent2\\|table_link_setup\\|set_varAtts\\|create_timec\\|format_time\\|format_plev\\|format_lev\\|format_lat\\|format_lon\\|format_coords\\|read_cmor\\|format_variable\\|guess_bounds_time\\|guess_bounds_lev\\|guess_bounds_lat\\|guess_bounds_lon\\|guess_coord_bounds\\|set_global_atts\\|write_nc\\|write_nc_profile\\|set_size_array\\|process_EBAS_data\\|\\)\\>" 1 font-lock-type-face) + ("\\<\\(read_data\\|select_metadata_by_atts\\|select_metadata_by_name\\|metadata_att_as_array\\|bname\\|basename\\|att2var\\|att2var_default\\|get_ncdf_name\\|get_ncdf_dir\\|ncdf_read\\|ncdf_define\\|ncdf_write\\|ncdf_att\\|copy_CoordNames_n\\|extend_var_at\\|remove_index\\|set_default_att\\|empty_str\\|log_info\\|log_debug\\|enter_msg\\|leave_msg\\|error_msg\\|tool_stop\\|exit_if_missing_atts\\|log_provenance\\|taylor_plot\\|contour_map\\|contour_map_polar\\|contour_map_ce\\|add_markers_to_map\\|get_title_suffix\\|remove_attrs\\|plot_two_by_one\\|plot_three_by_one_diff\\|two_by_one\\|three_by_one_diff\\|plot_three_by_one_vector\\|three_by_one_vector\\|plot_multipanel\\|multipanel\\|plot_multipanel_vector\\|multipanel_vector\\|seasonal_plot\\|xy_plot_wrapper\\|ts_line_wrapper\\|pr_u850_mean_plot\\|mjo_xcor_lag_plot\\|mjo_pr_ua_vari_plot\\|mjo_unvari_eof_plot\\|get_title_suffix\\|remove_attrs\\|plot_two_by_one\\|plot_three_by_one_diff\\|two_by_one\\|three_by_one_diff\\|plot_three_by_one_vector\\|three_by_one_vector\\|plot_multipanel\\|multipanel\\|plot_multipanel_vector\\|multipanel_vector\\|seasonal_plot\\|xy_plot_wrapper\\|ts_line_wrapper\\|xy_line_overlap\\|plot_precip_domain\\|precip_domain\\|month_sel\\|lat_names\\|add_line\\|add_scatt\\|add_legend\\|calcRegCoeffs\\|genZonalMeans\\|calcMeanAnnCycleMonthly\\|calcMeanAnnCycleAnnual\\|rmMeanAnnCycle\\|apfiltersmooth\\|smoothAnomalies\\|clmMon2clmDayn\\|scatterplot\\|scatterplot3D\\|scatterplot_markers\\|zonalmean_profile\\|contourplot\\|portrait_plot\\|circle_plot\\|profile_plev\\|aerosol_profile\\|aerosol_sizedist\\|xy_line\\|xy_line_anom\\|timeseries_station\\|cycle_plot\\|errorbar_plot\\|create_legend_lines\\|output_type\\|copy_VarAtt_sel\\|panelling\\|get_plot_dir\\|get_outfile_name\\|get_wks\\|add_markers\\|add_num_markers\\|add_errorbar\\|horizontal_whiskers\\|add_prediction_error\\|mjo_wave_freq_plot\\|addHorVertLinesCross_extended\\|mjo_cross_spectra_plot\\|mjo_ceof_plot\\|mjo_life_cycle_plot\\|vector_scalar_map_polar\\|project_style\\|place_debuginfo\\|place_description\\|gsnColorRange\\|format_units\\|set_log_ticks\\|sort_alphabetically\\|legend_lines\\|legend_markers\\|roi\\|extract_area\\|gridcell_area\\|map_area\\|area_operations\\|select_region\\|make_latlon2D\\|cdo_remapdis\\|guestimate_average_grid_area\\|get_lower_limits\\|get_upper_limits\\|is_regional\\|esmf_conserve_wrapper\\|rect2rect_interp\\|plev_lat_interp\\|get_dataset_minus_ref\\|esmf_conserve_wrapper_time\\|regrid_3D_to_rectilinear_grid\\|get_start_year\\|get_end_year\\|convert_units\\|UNIQ\\|union\\|set_inclusive_OR\\|intersection\\|is_array_subset\\|relative_complement\\|set_symmetric_difference\\|dim_stddev_wgt_Wrap\\|time_operations\\|calc_season_index\\|extract_season\\|month_to_season_extended\\|coswgt_areaave\\|coswgt_arearmse\\|coswgt_pattern_cor\\|interannual_variability\\|calculate_metric\\|normalize_metric\\|distrib_stats\\|lognormal_dist\\|add_labelbar\\|create_empty_array\\|data_read_in\\|data_read_in_ocean_MOC\\|data_read_in_ice\\|y_axis_check\\|check_custom_climo\\|isfilepresent2\\|table_link_setup\\|set_varAtts\\|create_timec\\|format_time\\|format_plev\\|format_lev\\|format_lat\\|format_lon\\|format_coords\\|read_cmor\\|format_variable\\|guess_bounds_time\\|guess_bounds_lev\\|guess_bounds_lat\\|guess_bounds_lon\\|guess_coord_bounds\\|set_global_atts\\|write_nc\\|write_nc_profile\\|set_size_array\\|process_EBAS_data\\|\\)\\>" 1 font-lock-type-face) ) "words used in ncl-mode highlighting" diff --git a/esmvaltool/utils/nclcodestyle/nclcodestyle.py b/esmvaltool/utils/nclcodestyle/nclcodestyle.py index 455ddd7e33..003fe177fa 100644 --- a/esmvaltool/utils/nclcodestyle/nclcodestyle.py +++ b/esmvaltool/utils/nclcodestyle/nclcodestyle.py @@ -487,7 +487,7 @@ def missing_whitespace(logical_line): for index in range(len(line) - 1): char = line[index] if char in ',;:' and line[index + 1] not in WHITESPACE: - before = line[:index] + # before = line[:index] if char == ':': continue # Slice syntax, no space required if char == ',' and line[index + 1] == ')': diff --git a/esmvaltool/utils/prov2files.py b/esmvaltool/utils/prov2files.py new file mode 100644 index 0000000000..15873893d5 --- /dev/null +++ b/esmvaltool/utils/prov2files.py @@ -0,0 +1,69 @@ +"""Print out the input files used to generate a result.""" +import argparse + +from prov.model import ProvDerivation, ProvDocument + + +def prov2files(filename): + """Figure out what file was generated from which source files. + + Parameters + ---------- + filename: str + Name of the file containing the provenance. + + Returns + ------- + (str, list[str]) + A tuple, the first entry is the name of the result + and the second entry a list of files used to compute + that result. + """ + provenance = ProvDocument.deserialize(filename, format='xml') + + source_files = set() + generated_files = set() + for rec in provenance.get_records(ProvDerivation): + # Find all derivation relations + generated, used = rec.args[:2] + source_files.add(used.localpart) + generated_files.add(generated.localpart) + + # Filter out intermediate files + intermediate_files = source_files & generated_files + source_files = source_files - intermediate_files + result_files = generated_files - intermediate_files + + if not len(result_files) == 1: + # If this changes, need to rewrite this function so it + # builds a provenance graph. + raise ValueError("Invalid provenance file encountered," + " ESMValTool provenance describes one result only.") + return result_files.pop(), sorted(source_files) + + +def main(): + """Print out a list of files.""" + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument( + 'provenance_files', + nargs='+', + type=str, + help='Path to one or more files containing provenance.') + args = parser.parse_args() + + for filename in args.provenance_files: + if not filename.endswith('_provenance.xml'): + print("Skipping", filename, + "does it contain ESMValTool provenance?") + continue + result, files = prov2files(filename) + print(f"{result} was derived from:") + print('\n'.join(files)) + print('') + + +if __name__ == '__main__': + main() diff --git a/esmvaltool/utils/recipe_filler.py b/esmvaltool/utils/recipe_filler.py new file mode 100755 index 0000000000..40f637c6d5 --- /dev/null +++ b/esmvaltool/utils/recipe_filler.py @@ -0,0 +1,914 @@ +""" +Fill in a blank recipe with additional datasets. + +Tool to obtain a set of additional datasets when given a blank recipe. +The blank recipe should contain, to the very least, a list of diagnostics +each with their variable(s). Example of minimum settings: + +diagnostics: + diagnostic: + variables: + ta: + mip: Amon + start_year: 1850 + end_year: 1900 + +Note that the tool will exit if any of these minimum settings are missing! + +Key features: + +- you can add as many variable parameters as are needed; if not added, the + tool will use the "*" wildcard and find all available combinations; +- you can restrict the number of datasets to be looked for with the `dataset:` + key for each variable, pass a list of datasets as value, e.g. + `dataset: [MPI-ESM1-2-LR, MPI-ESM-LR]`; +- you can specify a pair of experiments eg `exp: [rcp26, rcp85]` + for each variable; this will look for each available dataset per experiment + and assemble an aggregated data stretch from each experiment; equivalent to + esmvaltool's syntax of multiple experiments; this option needs an ensemble + to be declared explicitly; it will return no entry if there are gaps in data +- `start_year` and `end_year` are mandatory and are used to filter out the + datasets that don't have data in the interval; if you want all possible years + hence no filtering on years just use "*" for start and end years; +- `config-user: rootpath: CMIPX` may be a list, rootpath lists are supported; + +Caveats: + +- the tool doesn't yet work for derived variables; +- operation restricted to CMIP data. + +Have fun! +""" +import argparse +import datetime +import itertools +import logging +import logging.config +import os +import shutil +import time +from glob import glob +from pathlib import Path + +import esmvalcore +import yaml + +from esmvalcore import __version__ as core_ver +from esmvalcore.cmor.table import CMOR_TABLES, read_cmor_tables +from packaging import version as pkg_version +from ruamel.yaml import YAML + +logger = logging.getLogger(__name__) + +CFG = {} + + +def _purge_file_handlers(cfg: dict) -> None: + """Remove handlers with filename set. + + This is used to remove file handlers which require an output + directory to be set. + """ + cfg['handlers'] = { + name: handler + for name, handler in cfg['handlers'].items() + if 'filename' not in handler + } + prev_root = cfg['root']['handlers'] + cfg['root']['handlers'] = [ + name for name in prev_root if name in cfg['handlers'] + ] + + +def _update_stream_level(cfg: dict, level=None): + """Update the log level for the stream handlers.""" + handlers = cfg['handlers'] + + for handler in handlers.values(): + if level is not None and 'stream' in handler: + if handler['stream'] in ('ext://sys.stdout', 'ext://sys.stderr'): + handler['level'] = level.upper() + + +def _get_log_files(cfg: dict, output_dir: str = None) -> list: + """Initialize log files for the file handlers.""" + log_files = [] + + handlers = cfg['handlers'] + + for handler in handlers.values(): + filename = handler.get('filename', None) + + if filename: + if not os.path.isabs(filename): + handler['filename'] = os.path.join(output_dir, filename) + log_files.append(handler['filename']) + + return log_files + + +def configure_logging(cfg_file: str = None, + output_dir: str = None, + console_log_level: str = None) -> list: + """Configure logging. + + Parameters + ---------- + cfg_file : str, optional + Logging config file. If `None`, defaults to `configure-logging.yml` + output_dir : str, optional + Output directory for the log files. If `None`, log only to the console. + console_log_level : str, optional + If `None`, use the default (INFO). + + Returns + ------- + log_files : list + Filenames that will be logged to. + """ + if cfg_file is None: + cfg_loc = Path(esmvalcore.__file__ + "esmvalcore") + if pkg_version.parse(core_ver) < pkg_version.parse('2.8.0'): + cfg_file = cfg_loc.parents[0] / '_config' / 'config-logging.yml' + else: + cfg_file = cfg_loc.parents[0] / 'config' / 'config-logging.yml' + + cfg_file = Path(cfg_file).absolute() + + with open(cfg_file) as file_handler: + cfg = yaml.safe_load(file_handler) + + if output_dir is None: + _purge_file_handlers(cfg) + + log_files = _get_log_files(cfg, output_dir=output_dir) + _update_stream_level(cfg, level=console_log_level) + + logging.config.dictConfig(cfg) + logging.Formatter.converter = time.gmtime + logging.captureWarnings(True) + + return log_files + + +def read_config_developer_file(cfg_file=None): + """Read the developer's configuration file.""" + if cfg_file is None: + cfg_loc = Path(esmvalcore.__file__ + "esmvalcore") + cfg_file = cfg_loc.parents[0] / 'config-developer.yml' + + with open(cfg_file, 'r') as file: + cfg = yaml.safe_load(file) + + return cfg + + +def _normalize_path(path): + """Normalize paths. + + Expand ~ character and environment variables and convert path to absolute. + + Parameters + ---------- + path: str + Original path + + Returns + ------- + str: + Normalized path + """ + if path is None: + return None + return os.path.abspath(os.path.expanduser(os.path.expandvars(path))) + + +def read_config_user_file(config_file, folder_name, options=None): + """Read config user file and store settings in a dictionary.""" + if not config_file: + config_file = '~/.esmvaltool/config-user.yml' + config_file = os.path.abspath( + os.path.expandvars(os.path.expanduser(config_file))) + # Read user config file + if not os.path.exists(config_file): + print(f"ERROR: Config file {config_file} does not exist") + + with open(config_file, 'r') as file: + cfg = yaml.safe_load(file) + + if options is None: + options = dict() + for key, value in options.items(): + cfg[key] = value + + # set defaults + defaults = { + 'compress_netcdf': False, + 'exit_on_warning': False, + 'output_file_type': 'png', + 'output_dir': 'esmvaltool_output', + 'auxiliary_data_dir': 'auxiliary_data', + 'save_intermediary_cubes': False, + 'remove_preproc_dir': True, + 'max_parallel_tasks': None, + 'run_diagnostic': True, + 'profile_diagnostic': False, + 'config_developer_file': None, + 'drs': {}, + } + + for key in defaults: + if key not in cfg: + logger.info( + "No %s specification in config file, " + "defaulting to %s", key, defaults[key]) + cfg[key] = defaults[key] + + cfg['output_dir'] = _normalize_path(cfg['output_dir']) + cfg['auxiliary_data_dir'] = _normalize_path(cfg['auxiliary_data_dir']) + + cfg['config_developer_file'] = _normalize_path( + cfg['config_developer_file']) + + for key in cfg['rootpath']: + root = cfg['rootpath'][key] + if isinstance(root, str): + cfg['rootpath'][key] = [_normalize_path(root)] + else: + cfg['rootpath'][key] = [_normalize_path(path) for path in root] + + # insert a directory date_time_recipe_usertag in the output paths + now = datetime.datetime.utcnow().strftime("%Y%m%d_%H%M%S") + new_subdir = '_'.join((folder_name, now)) + cfg['output_dir'] = os.path.join(cfg['output_dir'], new_subdir) + + # create subdirectories + cfg['preproc_dir'] = os.path.join(cfg['output_dir'], 'preproc') + cfg['work_dir'] = os.path.join(cfg['output_dir'], 'work') + cfg['plot_dir'] = os.path.join(cfg['output_dir'], 'plots') + cfg['run_dir'] = os.path.join(cfg['output_dir'], 'run') + + # Read developer configuration file + read_cmor_tables(cfg['config_developer_file']) + + return cfg + + +HEADER = r""" +______________________________________________________________________ + _____ ____ __ ____ __ _ _____ _ + | ____/ ___|| \/ \ \ / /_ _| |_ _|__ ___ | | + | _| \___ \| |\/| |\ \ / / _` | | | |/ _ \ / _ \| | + | |___ ___) | | | | \ V / (_| | | | | (_) | (_) | | + |_____|____/|_| |_| \_/ \__,_|_| |_|\___/ \___/|_| +______________________________________________________________________ + +""" + __doc__ + +dataset_order = [ + 'dataset', 'project', 'exp', 'mip', 'ensemble', 'grid', 'start_year', + 'end_year' +] + +# cmip eras +cmip_eras = ["CMIP5", "CMIP6"] + +# The base dictionairy (all wildcards): +base_dict = { + 'institute': '*', + 'dataset': '*', + 'project': '*', + 'exp': '*', + 'frequency': '*', + 'ensemble': '*', + 'mip': '*', + 'modeling_realm': '*', + 'short_name': '*', + 'grid': '*', + 'start_year': '*', + 'end_year': '*', + 'activity': '*', +} + + +def _get_download_dir(yamlconf, cmip_era): + """Get the Download Directory from user config file.""" + if 'download_dir' in yamlconf: + return os.path.join(yamlconf['download_dir'], cmip_era) + return False + + +def _get_site_rootpath(cmip_era): + """Get site (drs) from config-user.yml.""" + config_yml = get_args().config_file + with open(config_yml, 'r') as yamf: + yamlconf = yaml.safe_load(yamf) + drs = yamlconf['drs'][cmip_era] + + download_dir = _get_download_dir(yamlconf, cmip_era) + rootdir = [yamlconf['rootpath'][cmip_era], ] + + if download_dir: + rootdir.append(download_dir) + logger.debug("%s root directory %s", cmip_era, rootdir) + if drs == 'default' and 'default' in yamlconf['rootpath']: + rootdir = [yamlconf['rootpath']['default'], ] + if download_dir: + rootdir.append(download_dir) + + logger.debug("Using drs default and " + "default: %s data directory", rootdir) + + return drs, rootdir + + +def _get_input_dir(cmip_era): + """Get input_dir from config-developer.yml.""" + site = _get_site_rootpath(cmip_era)[0] + yamlconf = read_config_developer_file() + + return yamlconf[cmip_era]['input_dir'][site] + + +def _get_input_file(cmip_era): + """Get input_file from config-developer.yml.""" + yamlconf = read_config_developer_file() + return yamlconf[cmip_era]['input_file'] + + +def _determine_basepath(cmip_era): + """Determine a basepath.""" + if isinstance(_get_site_rootpath(cmip_era)[1], list): + rootpaths = _get_site_rootpath(cmip_era)[1] + else: + rootpaths = [_get_site_rootpath(cmip_era)[1]] + + basepaths = [] + for rootpath in rootpaths: + if _get_input_dir(cmip_era) != os.path.sep: + basepath = os.path.join(rootpath, _get_input_dir(cmip_era), + _get_input_file(cmip_era)) + else: + basepath = os.path.join(rootpath, _get_input_file(cmip_era)) + basepath = basepath.replace('//', '/') + basepaths.append(basepath) + logger.debug("We will look for files of patterns %s", basepaths) + + return basepaths + + +def _overlapping_datasets(files, all_years, start_year, end_year): + """Process overlapping datasets and check for avail data in time range.""" + valid_files = [] + ay_sorted = sorted(all_years) + if ay_sorted[0] <= start_year and ay_sorted[-1] >= end_year: + yr_pairs = sorted( + [all_years[i:i + 2] for i in range(0, len(all_years), 2)]) + yr_pairs = list(k for k, _ in itertools.groupby(yr_pairs)) + d_y = [ + yr_pairs[j][1] - yr_pairs[j + 1][0] + for j in range(len(yr_pairs) - 1) + ] + gaps = [c for c in d_y if c < -1] + if not gaps: + valid_files = files + logger.info("Contiguous data from multiple experiments.") + else: + logger.warning("Data from multiple exps has >1 year gaps! ") + logger.debug("Start %s/end %s requested - " + "files covering %s found.", + start_year, end_year, yr_pairs) + + return valid_files + + +def filter_years(files, start_year, end_year, overlap=False): + """ + Filter out files that are outside requested time range. + + Nifty function that takes a list of files and two years + as arguments; it will build a series of filter dictionaries + and check if data is available for the entire interval; + it will return a single file per dataset, the first file + in the list of files that cover the specified interval; + optional argument `overlap` used if multiple experiments are + used and overlap between datasets is present. + + Parameters + ---------- + files: list + A list of files that need filtering by requested time range. + + start_year: int + Integer start year of requested range. + + end_year: int + Integer end year of requested range. + + overlap: bool + Flag if datasets overlap; defaults to False. + + Returns + ------- + list + List of files which have been identified as falling in + the requested time range; if multiple files within time range + per dataset, the first file will be returned. + + """ + valid_files = [] + available_years = {} + + if start_year == "*" and end_year == "*": + return files + + if not files: + return valid_files + + all_files_roots = [("").join(fil.split("_")[0:-1]) for fil in files] + for fil in files: + available_years[("").join(fil.split("_")[0:-1])] = [] + for fil in files: + available_years[("").join(fil.split("_")[0:-1])].append( + fil.split("_")[-1].strip(".nc").split("-")) + + all_years = [] + for root, yr_list in available_years.items(): + actual_years = [] + yr_list = list(itertools.chain.from_iterable(yr_list)) + for year in yr_list: + if len(year) == 4: + actual_years.append(int(year)) + else: + actual_years.append(int(year[0:4])) + actual_years = sorted(actual_years) + all_years.extend(actual_years) + if not overlap: + actual_years = sorted(list(set(actual_years))) + if actual_years[0] <= start_year and actual_years[-1] >= end_year: + idx = all_files_roots.index(root) + valid_files.append(files[idx]) + + # multiple experiments to complete each other + if overlap: + valid_files = _overlapping_datasets(files, all_years, start_year, + end_year) + + if not valid_files: + logger.warning("No data found to fully cover start " + "%s / end %s as requested!", start_year, end_year) + + return valid_files + + +def _resolve_latestversion(dirname_template): + """Resolve the 'latestversion' tag.""" + for version_separator in ['{latestversion}', '{version}']: + if version_separator in dirname_template: + break + else: + return dirname_template + + # Find latest version + part1, part2 = dirname_template.split(version_separator) + part2 = part2.lstrip(os.sep) + part1_contents = glob(part1) + if part1_contents: + versions = os.listdir(part1_contents[0]) + versions.sort(reverse=True) + for version in ['latest'] + versions: + dirname = os.path.join(part1, version, part2) + if glob(dirname): + return dirname + + return dirname_template + + +def list_all_files(file_dict, cmip_era): + """ + List all files that match the dataset dictionary. + + Function that returns all files that are determined by a + file_dict dictionary; file_dict is keyed on usual parameters + like `dataset`, `project`, `mip` etc; glob.glob is used + to find files; speedup is achieved by replacing wildcards + with values from CMOR tables. + + Parameters + ---------- + file_dict: dict + Dictionary to hold dataset specifications. + + cmip_era: str + Either CMIP5 or CMIP6. + + Returns + ------- + list: + List of found files. + + """ + mip = file_dict['mip'] + short_name = file_dict['short_name'] + try: + frequency = CMOR_TABLES[cmip_era].get_variable(mip, + short_name).frequency + realms = CMOR_TABLES[cmip_era].get_variable(mip, + short_name).modeling_realm + except AttributeError: + logger.warning("Could not find %s CMOR table " + "for variable %s with mip %s", + cmip_era, short_name, mip) + return [] + file_dict['frequency'] = frequency + + basepaths = _determine_basepath(cmip_era) + all_files = [] + + for basepath in basepaths: + new_path = basepath[:] + + # could have multiple realms + for realm in realms: + file_dict['modeling_realm'] = realm + + # load all the files in the custom dict + for key, value in file_dict.items(): + new_path = new_path.replace('{' + key + '}', str(value)) + new_path = _resolve_latestversion(new_path) + if new_path.startswith("~"): + new_path = os.path.expanduser(new_path) + if not new_path.startswith(os.sep): + raise ValueError( + "Could not expand ~ to user home dir " + "please expand it in the config user file!") + logger.info("Expanding path to %s", new_path) + + # Globs all the wildcards into a list of files. + files = glob(new_path) + all_files.extend(files) + if not all_files: + logger.warning("Could not find any file for data specifications.") + + return all_files + + +def _file_to_recipe_dataset(fn_path, cmip_era, file_dict): + """Convert a filename to an recipe ready dataset.""" + # Add the obvious ones - ie the one you requested! + output_dataset = {} + output_dataset['project'] = cmip_era + for key, value in file_dict.items(): + if value == '*': + continue + if key in dataset_order: + output_dataset[key] = value + + # Split file name and base path into directory structure and filenames. + basefiles = _determine_basepath(cmip_era) + _, fnfile = os.path.split(fn_path) + + for basefile in basefiles: + _, basefile = os.path.split(basefile) + # Some of the key words include the splitting character '_' ! + basefile = basefile.replace('short_name', 'shortname') + basefile = basefile.replace('start_year', 'startyear') + basefile = basefile.replace('end_year', 'endyear') + + # Assume filename is separated by '_' + basefile_split = [key.replace("{", "") for key in basefile.split('_')] + basefile_split = [key.replace("}", "") for key in basefile_split] + fnfile_split = fnfile.split('_') + + # iterate through directory structure looking for useful bits. + for base_key, fn_key in zip(basefile_split, fnfile_split): + if base_key == '*.nc': + fn_key = fn_key.replace('.nc', '') + start_year, end_year = fn_key.split('-') + output_dataset['start_year'] = start_year + output_dataset['end_year'] = end_year + elif base_key == "ensemble*.nc": + output_dataset['ensemble'] = fn_key + elif base_key == "grid*.nc": + output_dataset['grid'] = fn_key + elif base_key == "shortname": + pass + else: + output_dataset[base_key] = fn_key + if "exp" in file_dict: + if isinstance(file_dict["exp"], list): + output_dataset["exp"] = file_dict["exp"] + + return output_dataset + + +def _remove_duplicates(add_datasets): + """ + Remove accidental duplicates. + + Close to 0% chances this will ever be used. + May be used when there are actual duplicates in data + storage, we've seen these before, but seldom. + """ + datasets = [] + seen = set() + + for dataset in add_datasets: + orig_exp = dataset["exp"] + dataset["exp"] = str(dataset["exp"]) + tup_dat = tuple(dataset.items()) + if tup_dat not in seen: + seen.add(tup_dat) + dataset["exp"] = orig_exp + datasets.append(dataset) + + return datasets + + +def _check_recipe(recipe_dict): + """Perform a quick recipe check for mandatory fields.""" + do_exit = False + if "diagnostics" not in recipe_dict: + logger.error("Recipe missing diagnostics section.") + do_exit = True + for diag_name, diag in recipe_dict["diagnostics"].items(): + if "variables" not in diag: + logger.error("Diagnostic %s missing variables.", diag_name) + do_exit = True + for var_name, var_pars in diag["variables"].items(): + if "mip" not in var_pars: + logger.error("Variable %s missing mip.", var_name) + do_exit = True + if "start_year" not in var_pars: + logger.error("Variable %s missing start_year.", var_name) + do_exit = True + if "end_year" not in var_pars: + logger.error("Variable %s missing end_year.", var_name) + do_exit = True + if "exp" in var_pars: + if isinstance(var_pars["exp"], + list) and "ensemble" not in var_pars: + logger.error("Asking for experiments list for ") + logger.error("variable %s - you need to ", var_name) + logger.error("define an ensemble for this case.") + do_exit = True + if do_exit: + raise ValueError("Please fix the issues in recipe and rerun") + + +def _check_config_file(user_config_file): + """Perform a quick recipe check for mandatory fields.""" + do_exit = False + if "rootpath" not in user_config_file: + logger.error("Config file missing rootpath section.") + do_exit = True + if "drs" not in user_config_file: + logger.error("Config file missing drs section.") + do_exit = True + for proj in cmip_eras: + if proj not in user_config_file["rootpath"].keys(): + logger.error("Config file missing rootpath for %s", proj) + do_exit = True + if proj not in user_config_file["drs"].keys(): + logger.error("Config file missing drs for %s", proj) + do_exit = True + if do_exit: + raise ValueError("Please fix issues in config file and rerun") + + +def _parse_recipe_to_dicts(yamlrecipe): + """Parse a recipe's variables into a dictionary of dictionairies.""" + output_dicts = {} + for diag in yamlrecipe['diagnostics']: + for variable, var_dict in yamlrecipe['diagnostics'][diag][ + 'variables'].items(): + new_dict = base_dict.copy() + for var_key, var_value in var_dict.items(): + if var_key in new_dict: + new_dict[var_key] = var_value + output_dicts[(diag, variable)] = new_dict + + return output_dicts + + +def _add_datasets_into_recipe(additional_datasets, output_recipe): + """Add the datasets into a new recipe.""" + yaml = YAML() + yaml.default_flow_style = False + with open(output_recipe, 'r') as yamlfile: + cur_yaml = yaml.load(yamlfile) + for diag_var, add_dat in additional_datasets.items(): + if add_dat: + if 'additional_datasets' in cur_yaml['diagnostics']: + cur_yaml['diagnostics'][diag_var[0]]['variables'][ + diag_var[1]]['additional_datasets'].extend(add_dat) + else: + cur_yaml['diagnostics'][diag_var[0]]['variables'][ + diag_var[1]]['additional_datasets'] = add_dat + if cur_yaml: + with open(output_recipe, 'w') as yamlfile: + yaml.dump(cur_yaml, yamlfile) + + +def _find_all_datasets(recipe_dict, cmip_eras): + """Find all datasets explicitly.""" + datasets = [] + for cmip_era in cmip_eras: + if cmip_era == "CMIP6": + activity = "CMIP" + else: + activity = "" + drs, site_path = _get_site_rootpath(cmip_era) + if drs in ["default", "SMHI"]: + logger.info("DRS is %s; filter on dataset disabled.", drs) + datasets = ["*"] + else: + if not isinstance(site_path, list): + site_path = [site_path] + for site_pth in site_path: + if drs in ["BADC", "DKRZ", "CP4CDS"]: + institutes_path = os.path.join(site_pth, activity) + elif drs in ["ETHZ", "RCAST"]: + exp = recipe_dict["exp"][0] + if exp == "*": + exp = "piControl" # all institutes have piControl + mip = recipe_dict["mip"] + var = recipe_dict["short_name"] + institutes_path = os.path.join(site_pth, exp, mip, var) + + if not os.path.isdir(institutes_path): + logger.warning("Path to data %s " + "does not exist; will look everywhere.", + institutes_path) + datasets = ["*"] + return datasets + + institutes = os.listdir(institutes_path) + if drs in ["BADC", "DKRZ", "CP4CDS"]: + for institute in institutes: + datasets.extend( + os.listdir(os.path.join(institutes_path, + institute))) + else: + datasets.extend(institutes) + + return datasets + + +def _get_exp(recipe_dict): + """Get the correct exp as list of single or multiple exps.""" + if isinstance(recipe_dict["exp"], list): + exps_list = recipe_dict["exp"] + logger.info("Multiple %s experiments requested", exps_list) + else: + exps_list = [recipe_dict["exp"]] + logger.info("Single %s experiment requested", exps_list) + + return exps_list + + +def _get_datasets(recipe_dict, cmip_eras): + """Get the correct datasets as list if needed.""" + if recipe_dict["dataset"] == "*": + datasets = _find_all_datasets(recipe_dict, cmip_eras) + return datasets + if isinstance(recipe_dict['dataset'], list): + datasets = recipe_dict['dataset'] + logger.info("Multiple %s datasets requested", datasets) + else: + datasets = [recipe_dict['dataset']] + logger.info("Single %s dataset requested", datasets) + + return datasets + + +def get_args(): + """Parse command line arguments.""" + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument('recipe', help='Path/name of yaml pilot recipe file') + parser.add_argument('-c', + '--config-file', + default=os.path.join(os.environ["HOME"], '.esmvaltool', + 'config-user.yml'), + help='User configuration file') + + parser.add_argument('-o', + '--output', + default=os.path.join(os.getcwd(), + 'recipe_autofilled.yml'), + help='Output recipe, default recipe_autofilled.yml') + + args = parser.parse_args() + return args + + +def _get_timefiltered_files(recipe_dict, exps_list, cmip_era): + """Obtain all files that correspond to requested time range.""" + # multiple experiments allowed, complement data from each exp + if len(exps_list) > 1: + files = [] + for exp in exps_list: + recipe_dict["exp"] = exp + files.extend(list_all_files(recipe_dict, cmip_era)) + files = filter_years(files, + recipe_dict["start_year"], + recipe_dict["end_year"], + overlap=True) + recipe_dict["exp"] = exps_list + + else: + files = list_all_files(recipe_dict, cmip_era) + files = filter_years(files, recipe_dict["start_year"], + recipe_dict["end_year"]) + + return files + + +def run(): + """Run the `recipe_filler` tool. Help in __doc__ and via --help.""" + # Get arguments + args = get_args() + input_recipe = args.recipe + output_recipe = args.output + cmip_eras = ["CMIP5", "CMIP6"] + + # read the config file + config_user = read_config_user_file(args.config_file, + 'recipe_filler', + options={}) + + # configure logger + run_dir = os.path.join(config_user['output_dir'], 'recipe_filler') + if not os.path.isdir(run_dir): + os.makedirs(run_dir) + log_files = configure_logging(output_dir=run_dir, + console_log_level=config_user['log_level']) + logger.info(HEADER) + logger.info("Using user configuration file: %s", args.config_file) + logger.info("Using pilot recipe file: %s", input_recipe) + logger.info("Writing filled out recipe to: %s", output_recipe) + log_files = "\n".join(log_files) + logger.info("Writing program log files to:\n%s", log_files) + + # check config user file + _check_config_file(config_user) + + # parse recipe + with open(input_recipe, 'r') as yamlfile: + yamlrecipe = yaml.safe_load(yamlfile) + _check_recipe(yamlrecipe) + recipe_dicts = _parse_recipe_to_dicts(yamlrecipe) + + # Create a list of additional_datasets for each diagnostic/variable. + additional_datasets = {} + for (diag, variable), recipe_dict in recipe_dicts.items(): + logger.info("Looking for data for " + "variable %s in diagnostic %s", variable, diag) + new_datasets = [] + if "short_name" not in recipe_dict: + recipe_dict['short_name'] = variable + elif recipe_dict['short_name'] == "*": + recipe_dict['short_name'] = variable + + # adjust cmip era if needed + if recipe_dict['project'] != "*": + cmip_eras = [recipe_dict['project']] + + # get datasets depending on user request; always a list + datasets = _get_datasets(recipe_dict, cmip_eras) + + # get experiments depending on user request; always a list + exps_list = _get_exp(recipe_dict) + + # loop through datasets + for dataset in datasets: + recipe_dict['dataset'] = dataset + logger.info("Seeking data for dataset: %s", dataset) + for cmip_era in cmip_eras: + files = _get_timefiltered_files(recipe_dict, exps_list, + cmip_era) + + # assemble in new recipe + add_datasets = [] + for fn in sorted(files): + fn_dir = os.path.dirname(fn) + logger.info("Data directory: %s", fn_dir) + out = _file_to_recipe_dataset(fn, cmip_era, recipe_dict) + logger.info("New recipe entry: %s", out) + if out is None: + continue + add_datasets.append(out) + new_datasets.extend(add_datasets) + additional_datasets[(diag, variable, cmip_era)] = \ + _remove_duplicates(new_datasets) + + # add datasets to recipe as additional_datasets + shutil.copyfile(input_recipe, output_recipe, follow_symlinks=True) + _add_datasets_into_recipe(additional_datasets, output_recipe) + logger.info("Finished recipe filler. Go get some science done now!") + + +if __name__ == "__main__": + run() diff --git a/esmvaltool/utils/rose-cylc/esmvt_rose_wrapper.py b/esmvaltool/utils/rose-cylc/esmvt_rose_wrapper.py deleted file mode 100644 index ba929fea36..0000000000 --- a/esmvaltool/utils/rose-cylc/esmvt_rose_wrapper.py +++ /dev/null @@ -1,265 +0,0 @@ -r""" -Install and run u-bd684 - the esmvaltool rose-cylc suite. - -Usage: ------- --c --config-file: [REQUIRED] user specific configuration file; --r --recipe-file: [REQUIRED] single or multiple (space-sep) recipe files; --d --main-dir: [OPTIONAL] main run dir name (full path); - defaults to $HOME/ESMVALTOOL_ROSE; --s --suite-dir [OPTIONAL] u-bd684 dir full path; can be set by user; - defaults to $HOME/u-bd684; --n --no-submit [OPTIONAL] if specified, will not submit suite to cylc; --l --log-level: [OPTIONAL] log level, default=info - -Example: --------- -python esmvt_rose_wrapper.py -c /home/users/valeriu/input/config-user.yml \ - -r /home/users/valeriu/recipes/recipe1.yml \ - /home/users/valeriu/recipes/recipe2.yml \ - -d /home/users/valeriu/esmvat_WRAPPER \ - -s /home/users/valeriu/u-bd684/ \ - -n - -Base suite: ------------ -The base suite to run esmvaltool via rose-cylc is u-bd684; for now (Nov 2018) -the base suite comes with esmvaltool package by default; this suite will be, -in the near future, included in the Rose repository. The location inside -esmvaltool is standardized to: - -$ESMVALTOOL/esmvaltool/utils/rose-cylc/ - -When rose (exec.) will be working with python3.x, this location will become -default and the pipeline will aceess it independently of user, unless, of -course the user will specify -s $SUITE_LOCATION; until then the user needs -to grab a copy of it in $HOME or specify the default location via -s option. - -Environment: ------------- -We will move to a unified and centrally-installed esmvaltool environment; -until then, the user will have to alter the env_setup script: - -u-bd684/app/esmvaltool/env_setup - -with the correct pointers to esmvaltool installation, if desired; -NOTE that the defaults are working pointers for an install on CEDA-Jasmin. - -To be able to submit to cylc, you need to have the /metomi/ suite in path -AND use a python2.7 environment. Use the Jasmin-example below for guidance. - -Jasmin-example: ---------------- -This shows how to interact with rose-cylc and run esmvaltool under cylc -using this script: - -export PATH=/apps/contrib/metomi/bin:$PATH -export PATH=/home/users/valeriu/miniconda2/bin:$PATH -mkdir esmvaltool_rose -cd esmvaltool_rose -cp $esmvaltool/utils/rose-cylc/esmvt_rose_wrapper.py . -[get u-abd684 in $HOME, get your recipes and the config] -python esmvt_rose_wrapper.py -c config-user.yml \ --r recipe_autoassess_stratosphere.yml recipe_OceanPhysics.yml \ --d $HOME/esmvaltool_rose - -Note that you need to pass FULL PATHS to cylc, no . or .. because all -operations are done remotely on different nodes. - -A practical actual example of running the tool can be found on JASMIN: -/home/users/valeriu/esmvaltool_rose -There you will find the run shell: run_example, as well as an example -how to set the configuration file. A copy of u-bd684 is always located -in /home/users/valeriu/roses/u-bd684. - -Contact: --------- -author: Valeriu Predoi (UREAD, valeriu.predoi@ncas.ac.uk) -""" -import argparse -import logging -import os -import sys -import subprocess -import shutil -from distutils.version import LooseVersion -# configparser has changed names in python 3.x -if LooseVersion(sys.version) < LooseVersion("3.0"): - import ConfigParser -else: - import configparser as ConfigParser -import yaml # noqa - - -# set up logging -logger = logging.getLogger(__name__) - -# print the header -HEADER = r""" -______________________________________________________________________ - - ESMValTool Rose-Cylc Wrapper -______________________________________________________________________ - -""" + __doc__ - - -def get_args(): - """Define the `esmvaltool` command line.""" - # parse command line args - parser = argparse.ArgumentParser( - description=HEADER, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument( - '-c', - '--config-file', - default=os.path.join(os.path.dirname(__file__), 'config-user.yml'), - help='Configuration file') - parser.add_argument( - '-r', - '--recipe-files', - type=str, - nargs='+', - help='Recipe files (list or single file)') - parser.add_argument( - '-d', - '--main-dir', - default=os.path.join(os.environ['HOME'], 'ESMVALTOOL_ROSE'), - help='Main analysis directory; default to $HOME/ESMVALTOOL_ROSE') - parser.add_argument( - '-s', - '--suite-dir', - default=os.path.join(os.environ['HOME'], 'u-bd684'), - help='u-bd684 suite directory; default to $HOME/u-bd684') - parser.add_argument( - '-n', - '--no-submit', - action='store_true', - help="Flag to NOT submit the Rose suite.") - parser.add_argument( - '-l', - '--log-level', - default='info', - choices=['debug', 'info', 'warning', 'error']) - args = parser.parse_args() - return args - - -def _set_logger(logging, out_dir, log_file, log_level): - # set logging for screen and file output - root_logger = logging.getLogger() - out_fmt = "%(asctime)s %(levelname)-8s %(name)s,%(lineno)s\t%(message)s" - logging.basicConfig( - filename=os.path.join(out_dir, log_file), - filemode='a', - format=out_fmt, - datefmt='%H:%M:%S', - level=logging.DEBUG) - root_logger.setLevel(log_level.upper()) - logfmt = logging.Formatter(out_fmt) - console_handler = logging.StreamHandler() - console_handler.setFormatter(logfmt) - root_logger.addHandler(console_handler) - - -def read_yaml_file(yaml_file): - """Read recipe into a dictionary.""" - with open(yaml_file, 'r') as yfile: - loaded_file = yaml.safe_load(yfile) - return loaded_file - - -def _setup_work(rose_config_template, recipe_files, - config_file, main_dir, default_suite, log_level): - """Write the new rose conf file per suite.""" - # Build the ConfigParser object - Config = ConfigParser.ConfigParser() - Config.optionxform = str - Config.read(rose_config_template) - - # set the main work dir - if not os.path.exists(main_dir): - os.makedirs(main_dir) - - # assemble work tree - if not os.path.isfile(os.path.join(main_dir, config_file)): - shutil.copy2(config_file, main_dir) - if not os.path.exists(os.path.join(main_dir, 'recipes')): - os.makedirs(os.path.join(main_dir, 'recipes')) - if not os.path.exists(os.path.join(main_dir, - os.path.basename(config_file))): - shutil.copy2(config_file, main_dir) - recipes_field = [] - for recipe in recipe_files: - if not os.path.exists(os.path.join(main_dir, 'recipes', - os.path.basename(recipe))): - shutil.copy2(recipe, os.path.join(main_dir, 'recipes')) - recipes_field.append(os.path.basename(recipe).strip('.yml')) - rose_suite = os.path.join(main_dir, 'u-bd684') - if os.path.exists(rose_suite): - shutil.rmtree(rose_suite) - shutil.copytree(default_suite, rose_suite) - out_dir = os.path.join(main_dir, 'output') - if not os.path.exists(out_dir): - os.makedirs(out_dir) - - # set logging - _set_logger(logging, out_dir, 'setup.log', log_level) - logger.info(HEADER) - - # start logging - logger.info("Main working directory: %s", main_dir) - logger.info("Using Rose-Cylc suite base: %s", default_suite) - logger.info("Output and logs written to: %s", out_dir) - logger.info("Creating rose suite directories...") - logger.info("Use rose-suite.conf template %s", rose_config_template) - logger.info("Use user config file %s", config_file) - - # write the file - Config.set('jinja2:suite.rc', 'INPUT_DIR', - '"' + main_dir + '"') - Config.set('jinja2:suite.rc', 'OUTPUT_DIR', '"' + out_dir + '"') - Config.set('jinja2:suite.rc', 'RECIPES', str(recipes_field)) - with open(os.path.join(rose_suite, 'rose-suite.conf'), 'w') as r_c: - logger.info("Writing rose-suite.conf file %s", - os.path.join(rose_suite, 'rose-suite.conf')) - Config.write(r_c) - - return rose_suite - - -def _run_suite(suite): - """Run the mip_convert suite.""" - os.chdir(suite) - logger.info("Submitting suite from %s", suite) - proc = subprocess.Popen(["rose", "suite-run"], stdout=subprocess.PIPE) - out, err = proc.communicate() - logger.info("Rose communications: %s %s", str(out), str(err)) - - -def main(): - """Run the the meat of the code.""" - logger.info("Running main function...") - args = get_args() - # rose suite default location - if args.suite_dir: - default_suite = args.suite_dir - rose_config_template = os.path.join(default_suite, "rose-suite.conf") - - # get command line arguments - recipe_files = args.recipe_files - config_file = args.config_file - main_dir = args.main_dir - log_level = args.log_level - - # setup rose suite - run_rose = _setup_work(rose_config_template, recipe_files, - config_file, main_dir, default_suite, log_level) - - # submit to cylc - if not args.no_submit: - _run_suite(run_rose) - - -if __name__ == '__main__': - main() diff --git a/esmvaltool/utils/testing/recipe_settings/install.sh.template b/esmvaltool/utils/testing/recipe_settings/install.sh.template new file mode 100644 index 0000000000..66db6026e2 --- /dev/null +++ b/esmvaltool/utils/testing/recipe_settings/install.sh.template @@ -0,0 +1,23 @@ +#!/bin/bash +#BSUB -q par-single +#BSUB -o %J.stdout.txt +#BSUB -e %J.stderr.txt +#BSUB -W 1:00 +#BSUB -R "rusage[mem=1000]" +#BSUB -M 1500 +#BSUB -n 16 + +set -eo pipefail +wget -nv https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh +bash Miniconda3-latest-Linux-x86_64.sh -b -p conda +. conda/etc/profile.d/conda.sh +conda activate base +conda update -y conda +git clone --depth 1 --branch {branch} https://github.com/ESMValGroup/ESMValTool +cd ESMValTool +# TODO: add option to merge with development branch +conda env create --name esmvaltool +conda activate esmvaltool +pip install -e . +Rscript esmvaltool/install/R/setup.R +julia esmvaltool/install/Julia/setup.jl diff --git a/esmvaltool/utils/testing/recipe_settings/install_expand_run.py b/esmvaltool/utils/testing/recipe_settings/install_expand_run.py new file mode 100644 index 0000000000..373e62fd52 --- /dev/null +++ b/esmvaltool/utils/testing/recipe_settings/install_expand_run.py @@ -0,0 +1,180 @@ +"""Tool for testing ESMValTool.""" +import argparse +import copy +import os +import subprocess +from itertools import product +from pathlib import Path + +import yaml + + +def absolute(path): + """Make path into an absolute Path object.""" + return Path(os.path.abspath(path)) + + +def linear_expand(filename, cwd): + """Create recipes from filename using the recipe options provided. + + Uses one option at a time. + """ + filename = Path(filename) + yield filename + + options_file = Path(__file__).parent / 'options.yml' + options = yaml.safe_load(options_file.read_text()).get(filename.name) + + recipe = yaml.safe_load(filename.read_text()) + + for key in options or {}: + for value in options[key]: + outrecipe = copy.deepcopy(recipe) + write_recipe = False + for diag_name, diagnostic in recipe['diagnostics'].items(): + for script_name, script in diagnostic['scripts'].items(): + if key in script and script[key] != value: + write_recipe = True + outrecipe['diagnostics'][diag_name]['scripts'][ + script_name][key] = value + if write_recipe: + outfile = cwd / Path('{}_{}_{}.yml'.format( + filename.stem, key, + str(value).replace(os.sep, '-'))) + print("Creating", outfile) + outfile.write_text(yaml.safe_dump(outrecipe)) + yield outfile + + +def matrix_expand(filename, cwd, max_recipes=100): + """Create recipes from filename using the recipe options provided. + + Tries all possible combinations of options, but stops at max_recipes. + """ + filename = Path(filename) + options_file = Path(__file__).parent / 'options.yml' + options = yaml.safe_load(options_file.read_text())[filename.name] + + recipe = yaml.safe_load(filename.read_text()) + + n_recipes = 0 + for diag_name, diagnostic in recipe['diagnostics'].items(): + for script_name, script in diagnostic['scripts'].items(): + outrecipe = copy.deepcopy(recipe) + keys = list(options) + for values in product(*[options[k] for k in keys]): + # Avoid creating a huge number of recipes + n_recipes += 1 + if n_recipes > max_recipes: + print("Warning: stopping early at", max_recipes, "recipes") + return + + outfile = filename.stem + for i, key in enumerate(keys): + value = values[i] + if key in script: + outrecipe['diagnostics'][diag_name]['scripts'][ + script_name][key] = value + outfile += '_' + str(key) + '_' + str(value).replace( + os.sep, '-') + outfile = cwd / Path(outfile + '.yml') + print("Creating", outfile) + outfile.write_text(yaml.safe_dump(outrecipe)) + yield outfile + + +def create_script(recipe, config_file, cwd): + """Submit a job for recipe.""" + job_template = Path(__file__).parent / 'job.sh.template' + job = job_template.read_text().format( + recipe=recipe, + config=config_file, + ) + + jobfile = cwd / Path(recipe.stem + '_' + job_template.stem) + jobfile.write_text(job) + return jobfile + + +def run(script, cwd, method=''): + """Run script in cwd using method.""" + if method == 'bsub': + print("Submitting", script, 'in', cwd) + with open(script) as stdin: + subprocess.run('bsub', stdin=stdin, cwd=cwd, check=True) + elif method == 'dry-run': + print("Would run", script, 'in', cwd) + else: + print("Running", script, 'in', cwd) + subprocess.run(['bash', str(script)], cwd=cwd, check=True) + + +def install(args): + """Install ESMValTool from GitHub.""" + cwd = absolute(args.directory) + cwd.mkdir(parents=True, exist_ok=True) + script_template = Path(__file__).parent / 'install.sh.template' + script = script_template.read_text().format(branch=args.branch) + + script_file = cwd / Path(args.branch + '_' + script_template.stem) + script_file.write_text(script) + run(script_file, cwd=cwd, method=args.run_method) + + +def schedule(args): + """Create recipes with the options provided and schedule.""" + cwd = absolute(args.directory) + expand = matrix_expand if args.matrix else linear_expand + for input_recipe in args.recipes: + input_recipe = absolute(input_recipe) + for recipe in expand(input_recipe, cwd=cwd): + script_file = create_script( + recipe, + config_file=absolute(args.esmvaltool_config_file), + cwd=cwd, + ) + run(script_file, cwd=cwd, method=args.run_method) + + +def main(): + """Run the program.""" + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('-d', + '--directory', + default='.', + help='Use as a working directory.') + parser.add_argument('-r', + '--run-method', + default='immediate', + choices=['immediate', 'bsub', 'dry-run'], + help='Choose an execution method.') + subparsers = parser.add_subparsers() + parser.set_defaults(function=lambda _: parser.print_help()) + + install_parser = subparsers.add_parser('install') + install_parser.add_argument('branch', + help='Name of the GitHub branch to install.') + install_parser.set_defaults(function=install) + + schedule_parser = subparsers.add_parser('schedule') + schedule_parser.add_argument('recipes', + nargs='+', + help='Path to the recipe files to run.') + schedule_parser.add_argument( + '-c', + '--esmvaltool-config-file', + help='Path to the ESMValTool configuration file.') + schedule_parser.add_argument( + '-m', + '--matrix', + action='store_true', + help=('Use all possible combinations of options instead of a single ' + 'option at a time.')) + schedule_parser.set_defaults(function=schedule) + + args = parser.parse_args() + args.function(args) + + +if __name__ == '__main__': + main() diff --git a/esmvaltool/utils/testing/recipe_settings/job.sh.template b/esmvaltool/utils/testing/recipe_settings/job.sh.template new file mode 100644 index 0000000000..90fcba4259 --- /dev/null +++ b/esmvaltool/utils/testing/recipe_settings/job.sh.template @@ -0,0 +1,15 @@ +#!/bin/bash +#BSUB -q par-single +#BSUB -o %J.stdout.txt +#BSUB -e %J.stderr.txt +#BSUB -W 2:00 +#BSUB -R "rusage[mem=10000]" +#BSUB -M 15000 +#BSUB -n 16 + +set -eo pipefail + +. conda/etc/profile.d/conda.sh +conda activate esmvaltool + +esmvaltool -c "{config}" "{recipe}" --skip-nonexistent diff --git a/esmvaltool/utils/testing/recipe_settings/options.yml b/esmvaltool/utils/testing/recipe_settings/options.yml new file mode 100644 index 0000000000..3f3bff9d4c --- /dev/null +++ b/esmvaltool/utils/testing/recipe_settings/options.yml @@ -0,0 +1,357 @@ +--- +recipe_consecdrydays.yml: + frlim: # days? + - 2.5 + - 5 + - 10 + plim: # mm + - 0.5 + - 1 + - 2 + +recipe_ensclus.yml: + season: + - DJF + - DJFM + - NDJFM + - JJA + area: + - EAT # Euro-Atlantic + - PNA # Pacific North American + - NH # Northern Hemisphere + - EU # Europe + extreme: + - 60th_percentile + - 75th_percentile + - 90th_percentile + - mean + - maximum + - std + - trend + numclus: # number of clusters + - 2 + - 3 + - 4 + # Cluster analysis is applied on a number of PCs such as they explain + # Either set perc or numpcs: + perc: # 'perc' of total variance + - 70 + - 80 + - 90 + numpcs: # number of PCs + - 0 + - 3 + max_plot_panels: + - 72 + - 2 + +recipe_shapeselect.yml: + shapefile: + - MotalaStrom.shp + - Elbe.shp + - multicatchment.shp + - testfile.shp + - Thames.shp + weighting_method: + - mean_inside + - representative + +recipe_capacity_factor.yml: + +recipe_combined_indices.yml: + region: + - Nino3 + - Nino3.4 + - Nino4 + - NAO + - SOI + running_mean: + - 5 + - 3 + moninf: + - 1 + - 12 + - null + monsup: + - 3 + - 5 + standardized: + - false + - true + +recipe_extreme_index.yml: + +recipe_diurnal_temperature_index.yml: + +recipe_heatwaves_coldwaves.yml: + # test result: works with bcc-csm1-1 from badc + quantile: + - 0.8 + - 0.9 # quantile defining the exceedance/non-exceedance threshold + - 0.7 + min_duration: + - 5 # Min duration of a heatwave/coldwave event + - 30 + operator: + - '>' # or lessthan + - '<' + season: + - summer + - winter + +recipe_insurance_risk_index.yml: + metric: + - t90p + - t10p + - cdd + - rx5day + - Wx + +recipe_modes_of_variability.yml: + plot_type: + - rectangular + - polar + ncenters: + - 2 + - 3 + - 4 + detrend_order: + - 0 + - 1 + - 2 + cluster_method: + - kmeans + - hierarchical + EOFS: + - true + - false + frequency: + # Select a month (format: JAN, FEB, ...) or should work with season also but not jet format: JJA, SON, MAM, DJF + - JAN + - FEB + - MAR + - APR + - MAY + - JUN + - JUL + - AUG + - SEP + - OCT + - NOV + - DEC + - JJA + - SON + - MAM + - DJF + +recipe_multimodel_products.yml: + # Parameters for Season() function + moninf: + - 6 + - 2 + monsup: + - 6 + - 9 + - null + agreement_threshold: + - 80 + - 50 + # Time series plot options + running_mean: + - 5 # Length of running mean to use for the time series plot + - 1 + # Timeseries plot + time_series_plot: + # Either mean or maxmin (plot the mean with/without shading between the max and min. + - single + - maxmin + +recipe_toymodel.yml: + beta: + - 0.3 + - 0.5 + - 2 + number_of_members: + - 2 + - 5 + - 3 + +recipe_miles_block.yml: + seasons: + - DJF + - MAM + - JJA + - SON + - ALL + - Jan_Feb_Mar + +recipe_miles_eof.yml: + seasons: + - DJF + - MAM + - JJA + - SON + - ALL + - Jan_Feb_Mar + teles: + - NAO + - AO + - PNA + - "0_10_80_100" # custom area as "lon1_lon2_lat1_lat2" + - "150_180_0_20" + - "-10_10_40_50" + +recipe_miles_regimes.yml: + +recipe_rainfarm.yml: + slope: # spatial spectral slope (set to 0 to compute from large scales) + - 1.7 + - 0 + nens: # number of ensemble members to be calculated + - 2 + - 3 + - 5 + nf: # subdivisions for downscaling + - 8 + - 3 + conserv_glob: # conserve precipitation over full domain (choose either glob or smooth, glob has priority) + - false + - true + conserv_smooth: # conserve precipitation using convolution (if neither is chosen box conservation is used) + - false + - true + weights_climo: # orographic weights: set to false or full path to a fine-scale precipitation climatology file + - false + - wc2.0_30s_prec.nc + +recipe_zmnam.yml: + +recipe_quantilebias.yml: + perc_lev: + - 50 + - 75 + +recipe_hyint.yml: + norm_years: # first and last year of reference normalization period to be used for normalized indices + - [2004, 2006] + selfields: # indices to be plotted. Select one or more fields from the following list (order-sensitive) as a numerical index: + - ["pa_norm"] + - ["hyint"] + - ["int_norm"] + - ["r95_norm"] + - ["wsl_norm"] + - ["dsl_norm"] + - ["int"] + - ["dsl"] + - ["wsl"] + - ["pa_norm", "dsl", "wsl", "r95_norm", "wsl_norm", "int"] + - ["pa_norm", "hyint", "int_norm", "r95_norm", "wsl_norm", "dsl_norm", "int", "dsl", "wsl"] + selregions: # Select regions for timeseries and maps from the following list as a numerical index: + - [GL] # World + - [GL60] # World60 (60S/60N) + - [TR] # Tropics (30S/30N) + - [SA] # South America + - [AF] # Africa + - [NA] # North America + - [IN] # India + - [EU] # Europe + - [EA] # East-Asia + - [AU] # Australia + - [GL60, AU, EU, AF] + plot_type: # type of figures to be plotted. Select one or more from: + - [1] # lon/lat maps per individual field/exp/multi-year mean + - [2] # lon/lat maps per individual field exp-ref-diff/multi-year mean + - [3] # lon/lat maps multi-field/exp-ref-diff/multi-year mean + - [11] # timeseries over required individual region/exp + - [12] # timeseries over multiple regions/exp + - [13] # timeseries with multiple models + - [14] # summary trend coefficients multiple regions + - [15] # summary trend coefficients multiple models + - [2, 3, 12, 13, 14, 15] + rgrid: + - false + - r100x50 + - REF + npancol: + - 2 + - 4 + npanrow: + - 3 + - 5 + autolevels: + - true + - false + autolevels_scale: + - 1 + - 1.7 + autolevels_scale_t: + - 1.5 + - 1 + oplot_grid: + - false + - true + boxregion: + - false + - -10 + - 3 + removedesert: + - false + - true + weight_tseries: + - true + - false + trend_years: + - false + - [2002, 2005] + add_trend: + - true + - false + add_trend_sd: + - true + - false + add_trend_sd_shade: + - true + - false + add_tseries_lines: + - true + - false + add_zeroline: + - true + - false + trend_years_only: + - true + - false + scale100years: + - true + - false + scalepercent: + - true + - false + +recipe_smpi.yml: + plot_type: + - cycle + - zonal + - latlon + - cycle_latlon + time_avg: + - annualclim + - seasonalclim + - monthlyclim + region: + - global + - trop + - nhext + - shext + - nhtrop + - shtrop + - nh + - sh + - nhmidlat + - shmidlat + - nhpolar + - shpolar + - eq + smpi_n_bootstrap: + - 80 + - 120 diff --git a/esmvaltool/utils/testing/regression/bin/run-esmvaltool.sh b/esmvaltool/utils/testing/regression/bin/run-esmvaltool.sh new file mode 100755 index 0000000000..878fc3b8f7 --- /dev/null +++ b/esmvaltool/utils/testing/regression/bin/run-esmvaltool.sh @@ -0,0 +1,8 @@ +#!/bin/sh +set -eo pipefail +unset PYTHONPATH + +. ~/conda/etc/profile.d/conda.sh +conda activate esmvaltool_v2.3 + +esmvaltool run "$1" diff --git a/esmvaltool/utils/testing/regression/compare.py b/esmvaltool/utils/testing/regression/compare.py new file mode 100644 index 0000000000..a4ee33c5d3 --- /dev/null +++ b/esmvaltool/utils/testing/regression/compare.py @@ -0,0 +1,502 @@ +"""Compare recipe runs to previous runs.""" +from __future__ import annotations + +import argparse +import difflib +import filecmp +import fnmatch +import os +import re +import sys +from pathlib import Path +from textwrap import indent +from typing import Iterator, Optional + +import numpy as np +import xarray as xr +from PIL import Image + +try: + import imagehash +except ImportError: + print("Please `pip install imagehash`") + +IGNORE_FILES: tuple[str, ...] = ( + '*_citation.bibtex', + '*_data_citation_info.txt', + '*_info.ncl', + '*_provenance.xml', + 'metadata.yml', +) +"""Files to ignore when comparing results.""" + +IGNORE_GLOBAL_ATTRIBUTES: tuple[str, ...] = ( + # see https://github.com/ESMValGroup/ESMValCore/issues/1657 + 'auxiliary_data_dir', + 'creation_date', + 'history', + 'provenance', + 'software', + # see https://github.com/ESMValGroup/ESMValCore/issues/1657 + 'version', +) +"""Global NetCDF attributes to ignore when comparing.""" + +IGNORE_VARIABLE_ATTRIBUTES: tuple[str, ...] = IGNORE_GLOBAL_ATTRIBUTES +"""Variable NetCDF attributes to ignore when comparing.""" + +IGNORE_VARIABLES: tuple[str, ...] = ( + # see https://github.com/ESMValGroup/ESMValTool/issues/2714 + 'temp_list', # used by perfmetrics diagnostics to save absolute paths +) +"""Variables in NetCDF files to ignore when comparing.""" + +COMPARE_SUBDIRS: tuple[str, ...] = ( + 'plots', + 'preproc', + 'work', +) +"""Directories of subdirectories to compare.""" + +RECIPE_DIR_DATETIME_PATTERN: str = r'[0-9]{8}_[0-9]{6}' +"""Regex pattern for datetime in recipe output directory.""" + +RECIPE_DIR_PATTERN: str = ( + r'recipe_(?P[^\s]*?)_' + RECIPE_DIR_DATETIME_PATTERN +) +"""Regex pattern for recipe output directories.""" + + +def as_txt(msg: list[str]) -> str: + """Convert lines of text to indented text.""" + return indent('\n'.join(msg), " ") + + +def diff_attrs(ref: dict, cur: dict) -> str: + """Compare two dicts and describe the difference.""" + msg = [] + for key in ref: + if key not in cur: + msg.append(f"missing attribute '{key}'") + elif not np.array_equal(ref[key], cur[key]): + msg.append(f"value of attribute '{key}' is different: " + f"expected '{ref[key]}' but found '{cur[key]}'") + for key in cur: + if key not in ref: + msg.append(f"extra attribute '{key}' with value '{cur[key]}'") + msg.sort() + txt = as_txt(msg) + if txt: + txt = "attributes are different:\n" + txt + return txt + + +def diff_array(ref: np.ndarray, cur: np.ndarray) -> str: + """Compare two arrays and describe the difference.""" + msg = [] + if cur.shape != ref.shape: + msg.append("data has different shape") + elif np.issubdtype(ref.dtype, np.inexact) and np.issubdtype( + cur.dtype, np.inexact): + if not np.array_equal(ref, cur, equal_nan=True): + if np.allclose(ref, cur, equal_nan=True): + msg.append("data is almost but not quite the same") + else: + msg.append("data is different") + elif not np.array_equal(ref, cur): + msg.append("data is different") + return as_txt(msg) + + +def diff_dataarray(ref: xr.DataArray, cur: xr.DataArray, type_: str) -> str: + """Compare two xarray DataArrays and describe the difference.""" + msg = [] + if not cur.identical(ref): + msg.append(f"{type_} '{cur.name}' is not identical to reference") + if diff := diff_array(ref.values, cur.values): + msg.append(diff) + if diff := diff_attrs(ref.attrs, cur.attrs): + msg.append(indent(diff, " ")) + return as_txt(msg) + + +def diff_dataset(ref: xr.Dataset, cur: xr.Dataset) -> str: + """Compare two xarray Datasets and describe the difference.""" + msg = [] + if diff := diff_attrs(ref.attrs, cur.attrs): + msg.append(diff) + + for var in ref: + if var not in cur: + msg.append(f"missing variable '{var}'") + else: + if diff := diff_dataarray(ref[var], cur[var], "variable"): + msg.append(diff) + + for coord in ref[var].coords: + if coord not in cur.coords: + msg.append(f"missing coordinate '{coord}'") + elif diff := diff_dataarray(ref[var].coords[coord], + cur[var].coords[coord], + 'coordinate'): + msg.append(diff) + for coord in cur[var].coords: + if coord not in ref.coords: + msg.append(f"extra coordinate '{coord}'") + + for var in cur: + if var not in ref: + msg.append(f"extra variable '{var}'") + + return '\n'.join(msg) + + +def adapt_attributes(attributes: dict, ignore_attributes: tuple[str, ...], + recipe_name: str) -> dict: + """Remove ignored attributes and make absolute paths relative.""" + new_attrs = {} + + for (attr, attr_val) in attributes.items(): + + # Ignore attributes + if attr in ignore_attributes: + continue + + # Convert absolute paths to relative paths using the recipe name + if isinstance(attr_val, str): + recipe_dir = get_recipe_dir_from_str(attr_val, recipe_name) + + # If recipe_dir is present in attribute value, assume this + # attribute value is a path and convert it to a relative path + if recipe_dir is not None: + attr_val = Path(attr_val).relative_to(recipe_dir) + + new_attrs[attr] = attr_val + + return new_attrs + + +def load_nc(filename: Path) -> xr.Dataset: + """Load a NetCDF file.""" + dataset = xr.open_dataset(filename, chunks={}, decode_times=False) + recipe_name = get_recipe_name_from_file(filename) + + # Remove ignored variables + dataset = dataset.drop_vars(IGNORE_VARIABLES, errors='ignore') + + # Remove ignored attributes and modify attributes that contain paths + dataset.attrs = adapt_attributes(dataset.attrs, IGNORE_GLOBAL_ATTRIBUTES, + recipe_name) + for var in dataset: + dataset[var].attrs = adapt_attributes(dataset[var].attrs, + IGNORE_VARIABLE_ATTRIBUTES, + recipe_name) + + return dataset + + +def debug_nc(reference_file: Path, current_file: Path) -> str: + """Find out the differences between two NetCDF files.""" + ref = load_nc(reference_file) + cur = load_nc(current_file) + + if diff := diff_dataset(ref, cur): + msg = diff + else: + msg = "Unknown difference" + + return msg + + +def debug_txt(reference_file: Path, current_file: Path) -> str: + """Find out the differences between two text files.""" + with reference_file.open('rt') as file: + ref = file.readlines() + with current_file.open('rt') as file: + cur = file.readlines() + + msg = difflib.unified_diff( + cur, + ref, + fromfile=str(current_file), + tofile=str(reference_file), + ) + return "".join(msg) + + +debug_csv = debug_txt +debug_html = debug_txt +debug_grid = debug_txt + + +def compare_nc(reference_file: Path, current_file: Path) -> bool: + """Compare two NetCDF files.""" + ref = load_nc(reference_file) + cur = load_nc(current_file) + + return cur.identical(ref) + + +def compare_png(reference_file: Path, current_file: Path) -> bool: + """Compare two PNG files.""" + # Based on: + # https://scitools-iris.readthedocs.io/en/latest/developers_guide/contributing_graphics_tests.html + + # Perceptual hash size. + hash_size = 16 + # Maximum perceptual hash hamming distance. + max_distance = 2 + + with Image.open(reference_file) as img: + ref = imagehash.phash(img, hash_size=hash_size) + with Image.open(current_file) as img: + cur = imagehash.phash(img, hash_size=hash_size) + + distance = ref - cur + return distance < max_distance + + +def debug(reference_file: Path, current_file: Path) -> str: + """Try to find out why two files are different.""" + suffix = reference_file.suffix + fn_name = f"debug_{suffix[1:]}" + if fn_name in globals(): + debug_fn = globals()[fn_name] + msg = debug_fn(reference_file, current_file) + else: + msg = "" + return indent(msg, " ") + + +def files_equal(reference_file: Path, current_file: Path) -> bool: + """Compare two files.""" + suffix = reference_file.suffix[1:].lower() + fn_name = f"compare_{suffix}" + if fn_name in globals(): + compare_fn = globals()[fn_name] + same = compare_fn(reference_file, current_file) + else: + same = filecmp.cmp(reference_file, current_file, shallow=False) + return same + + +def compare_files(reference_dir: Path, current_dir: Path, files: list[Path], + verbose: bool) -> list[str]: + """Compare files from the reference dir to the current dir.""" + different = [] + for file in files: + ref_file = reference_dir / file + cur_file = current_dir / file + if not files_equal(ref_file, cur_file): + msg = str(file) + if verbose: + if info := debug(ref_file, cur_file): + msg += ":\n" + info + different.append(msg) + return different + + +def compare(reference_dir: Optional[Path], current_dir: Path, + verbose: bool) -> bool: + """Compare a recipe run to a reference run. + + Returns True if the runs were identical, False otherwise. + """ + recipe_name = get_recipe_name_from_dir(current_dir) + print("") + print(f"recipe_{recipe_name}.yml: ", end='') + if reference_dir is None: + print("no reference run found, unable to check") + return False + + result = [] + + reference_files = find_files(reference_dir) + current_files = find_files(current_dir) + + if missing_files := sorted(reference_files - current_files): + result.append("Missing files:") + result.extend(f" - {f}" for f in missing_files) + + if extra_files := sorted(current_files - reference_files): + result.append("Extra files:") + result.extend(f" - {f}" for f in extra_files) + + if differing_files := compare_files( + reference_dir, + current_dir, + sorted(reference_files & current_files), + verbose, + ): + result.append("Differing files:") + result.extend(indent(f"- {f}", " ") for f in differing_files) + + if not result: + print("OK") + return True + + result.insert(0, "results differ from reference run") + result.insert(1, f"Reference run: {reference_dir}") + result.insert(2, f"Current run: {current_dir}") + print("\n".join(result)) + return False + + +def get_recipe_name_from_dir(recipe_dir: Path) -> str: + """Extract recipe name from output dir.""" + recipe_match = re.search(RECIPE_DIR_PATTERN, recipe_dir.stem) + return recipe_match['recipe_name'] + + +def get_recipe_name_from_file(filename: Path) -> str: + """Extract recipe name from arbitrary recipe output file.""" + # Iterate starting from the root dir to avoid false matches + for parent in list(filename.parents)[::1]: + recipe_match = re.search(RECIPE_DIR_PATTERN, str(parent)) + if recipe_match is not None: + return recipe_match['recipe_name'] + raise ValueError(f"Failed to extract recipe name from file {filename}") + + +def get_recipe_dir_from_str(str_in: str, recipe_name: str) -> Optional[Path]: + """Try to extract recipe directory from arbitrary string.""" + recipe_dir_pattern = ( + rf'recipe_{recipe_name}_' + RECIPE_DIR_DATETIME_PATTERN + ) + recipe_dir_match = re.search(recipe_dir_pattern, str_in) + + # If recipe directory is not found in string, return None + if recipe_dir_match is None: + return None + + # If recipe directory is found, return entire parent directory + # E.g., for str_in = /root/path/recipe_test_20220202_222222/work/file.nc + # return /root/path/recipe_test_20220202_222222 + # For this, iterate from the right (::-1) through the parents + for parent in Path(str_in).parents[::-1]: + if recipe_dir_match[0] in str(parent): + return parent + + # If no valid parent is found, return str_in + # E.g., for str_in = /root/path/recipe_test_20220202_222222 no valid + # parents are found; thus, return str_in itself + return Path(str_in) + + +def find_files(recipe_dir: Path) -> set[Path]: + """Find all NetCDF files in a recipe run directory.""" + result: set[Path] = set() + for subdir in COMPARE_SUBDIRS: + for root, _, found in os.walk(recipe_dir / subdir): + files = set(found) + for ignore_pattern in IGNORE_FILES: + files -= set(fnmatch.filter(files, ignore_pattern)) + parent = Path(root).relative_to(recipe_dir) + for file in files: + result.add(parent / file) + + return result + + +def find_successful_runs(dirname: Path, recipe_name: str = '*') -> list[Path]: + """Find recipe runs in `dirname`. + + `dirname` can either be a recipe run or a directory containing + recipe runs. + """ + runs = [] + for recipe_file in sorted( + list(dirname.glob(f"run/recipe_{recipe_name}.yml")) + + list(dirname.glob(f"*/run/recipe_{recipe_name}.yml"))): + recipe_dir = recipe_file.parent.parent + log = recipe_dir / 'run' / 'main_log.txt' + success = log.read_text().endswith('Run was successful\n') + if success: + runs.append(recipe_dir) + return sorted(set(runs)) + + +def find_recipes(reference: Path, + current: list[Path]) -> Iterator[tuple[Path, Optional[Path]]]: + """Yield tuples of current and reference directories.""" + for current_dir in current: + for recipe_dir in find_successful_runs(current_dir): + recipe_name = get_recipe_name_from_dir(recipe_dir) + reference_dirs = find_successful_runs(reference, recipe_name) + if reference_dirs: + reference_dir = reference_dirs[-1] + else: + reference_dir = None + yield (recipe_dir, reference_dir) + + +def main() -> int: + """Compare recipe runs. + + Returns 0 if all recipe runs were successfully compared and + identical to the reference, 1 otherwise. + """ + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + 'reference', + default='.', + type=Path, + help='Directory containing results obtained with reference version.', + ) + parser.add_argument( + 'current', + default='.', + nargs='+', + type=Path, + help=("List of recipe results or directories containing such " + "results, obtained with current version."), + ) + parser.add_argument( + '-v', + '--verbose', + action="store_true", + help="Display more information on differences.", + ) + + args = parser.parse_args() + + print("Comparing recipe run(s) in:\n{}".format('\n'.join( + str(f) for f in args.current))) + print(f"to reference in {args.reference}") + fail = [] + success = [] + for current_dir, reference_dir in find_recipes(args.reference, + args.current): + same = compare(reference_dir, current_dir, verbose=args.verbose) + recipe = f"recipe_{get_recipe_name_from_dir(current_dir)}.yml" + if same: + success.append(f"{recipe}:\t{current_dir}") + else: + fail.append(f"{recipe}:\t{current_dir}") + + # Print summary of results to screen + summary = ["", "Summary", "======="] + if success: + summary.extend([ + "", "The following recipe runs were identical to previous runs:", + *success + ]) + if fail: + summary.extend([ + "", "The following recipe runs need to be inspected by a human:", + *fail + ]) + print("\n".join(summary)) + print("") + + if fail: + print(f"Action required: {len(fail)} out of {len(success) + len(fail)}" + " recipe runs need to be inspected by a human.") + else: + print(f"All {len(success)} recipe runs were identical.") + + return int(bool(fail)) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/esmvaltool/utils/testing/regression/example-recipes.rc b/esmvaltool/utils/testing/regression/example-recipes.rc new file mode 100644 index 0000000000..e6661dede4 --- /dev/null +++ b/esmvaltool/utils/testing/regression/example-recipes.rc @@ -0,0 +1,8 @@ +#!jinja2 + +{% set recipes = [ + "examples/recipe_julia.yml", + "examples/recipe_ncl.yml", + "examples/recipe_python.yml", + "examples/recipe_r.yml" +] %} diff --git a/esmvaltool/utils/testing/regression/recipes.rc b/esmvaltool/utils/testing/regression/recipes.rc new file mode 100644 index 0000000000..5eff6a8d73 --- /dev/null +++ b/esmvaltool/utils/testing/regression/recipes.rc @@ -0,0 +1,114 @@ +#!jinja2 + +{% set recipes = [ + "recipe_albedolandcover.yml", + "recipe_anav13jclim.yml", + "recipe_arctic_ocean.yml", + "recipe_autoassess_landsurface_permafrost.yml", + "recipe_autoassess_landsurface_soilmoisture.yml", + "recipe_autoassess_landsurface_surfrad.yml", + "recipe_autoassess_radiation_rms_Amon_all.yml", + "recipe_autoassess_radiation_rms_Amon_obs.yml", + "recipe_autoassess_radiation_rms_cfMon_all.yml", + "recipe_autoassess_stratosphere.yml", + "recipe_bock20jgr.yml", + "recipe_capacity_factor.yml", + "recipe_carvalhais14nat.yml", + "recipe_climwip_brunner2019_med.yml", + "recipe_climwip_brunner20esd.yml", + "recipe_climwip_test_basic.yml", + "recipe_climwip_test_performance_sigma.yml", + "recipe_clouds_bias.yml", + "recipe_clouds_ipcc.yml", + "recipe_collins13ipcc.yml", + "recipe_combined_indices.yml", + "recipe_consecdrydays.yml", + "recipe_cox18nature.yml", + "recipe_cvdp.yml", + "recipe_deangelis15nat.yml", + "recipe_diurnal_temperature_index.yml", + "recipe_eady_growth_rate.yml", + "recipe_ecs.yml", + "recipe_ecs_constraints.yml", + "recipe_ecs_scatter.yml", + "recipe_ensclus.yml", + "recipe_extreme_events.yml", + "recipe_extreme_index.yml", + "recipe_eyring06jgr.yml", + "recipe_eyring13jgr_12.yml", + "recipe_flato13ipcc.yml", + "recipe_gier2020bg.yml", + "recipe_heatwaves_coldwaves.yml", + "recipe_hyint.yml", + "recipe_hyint_extreme_events.yml", + "recipe_impact.yml", + "recipe_kcs.yml", + "recipe_landcover.yml", + "recipe_lauer13jclim.yml", + "recipe_li17natcc.yml", + "recipe_martin18grl.yml", + "recipe_meehl20sciadv.yml", + "recipe_miles_block.yml", + "recipe_miles_eof.yml", + "recipe_miles_regimes.yml", + "recipe_modes_of_variability.yml", + "recipe_multimodel_products.yml", + "recipe_ocean_Landschuetzer2016.yml", + "recipe_ocean_amoc.yml", + "recipe_ocean_bgc.yml", + "recipe_ocean_example.yml", + "recipe_ocean_ice_extent.yml", + "recipe_ocean_multimap.yml", + "recipe_ocean_quadmap.yml", + "recipe_ocean_scalar_fields.yml", + "recipe_perfmetrics_CMIP5.yml", + "recipe_perfmetrics_CMIP5_4cds.yml", + "recipe_perfmetrics_land_CMIP5.yml", + "recipe_quantilebias.yml", + "recipe_rainfarm.yml", + "recipe_runoff_et.yml", + "recipe_russell18jgr.yml", + "recipe_schlund20esd.yml", + "recipe_seaice.yml", + "recipe_seaice_drift.yml", + "recipe_seaice_feedback.yml", + "recipe_shapeselect.yml", + "recipe_smpi.yml", + "recipe_smpi_4cds.yml", + "recipe_snowalbedo.yml", + "recipe_spei.yml", + "recipe_tcr.yml", + "recipe_thermodyn_diagtool.yml", + "recipe_toymodel.yml", + "recipe_validation.yml", + "recipe_validation_CMIP6.yml", + "recipe_wenzel14jgr.yml", + "recipe_wenzel16jclim.yml", + "recipe_wenzel16nat.yml", + "recipe_williams09climdyn_CREM.yml", + "recipe_zmnam.yml", + "cmorizers/recipe_era5-land.yml", + "cmorizers/recipe_era5.yml", + "examples/recipe_check_obs.yml", + "examples/recipe_concatenate_exps.yml", + "examples/recipe_correlation.yml", + "examples/recipe_extract_shape.yml", + "examples/recipe_julia.yml", + "examples/recipe_my_personal_diagnostic.yml", + "examples/recipe_ncl.yml", + "examples/recipe_preprocessor_derive_test.yml", + "examples/recipe_preprocessor_test.yml", + "examples/recipe_python.yml", + "examples/recipe_r.yml", + "examples/recipe_variable_groups.yml", + "hydrology/recipe_globwat.yml", + "hydrology/recipe_hydro_forcing.yml", + "hydrology/recipe_hype.yml", + "hydrology/recipe_lisflood.yml", + "hydrology/recipe_marrmot.yml", + "hydrology/recipe_pcrglobwb.yml", + "hydrology/recipe_wflow.yml", + "schlund20jgr/recipe_schlund20jgr_gpp_abs_rcp85.yml", + "schlund20jgr/recipe_schlund20jgr_gpp_change_1pct.yml", + "schlund20jgr/recipe_schlund20jgr_gpp_change_rcp85.yml" +] %} diff --git a/esmvaltool/utils/testing/regression/suite.rc b/esmvaltool/utils/testing/regression/suite.rc new file mode 100644 index 0000000000..bd82e7d98c --- /dev/null +++ b/esmvaltool/utils/testing/regression/suite.rc @@ -0,0 +1,24 @@ +#!jinja2 + +#Include the list of ESMValTool recipes to run +%include example-recipes.rc + +#[meta] +# title = "Run all ESMValTool recipes" +[scheduling] + [[dependencies]] + graph = RECIPES +[runtime] + [[RECIPES]] + [[[directives]]] + --account = bd0854 + --partition = compute + [[[job]]] + batch system = slurm + execution time limit = PT2H + +{% for i in recipes %} + [[{{ i|replace(".", "_")|replace("/", "-")}}]] + inherit = RECIPES + script = run-esmvaltool.sh {{i}} +{% endfor %} diff --git a/esmvaltool/utils/testing/regression/summarize.py b/esmvaltool/utils/testing/regression/summarize.py new file mode 100644 index 0000000000..da0ec2a68e --- /dev/null +++ b/esmvaltool/utils/testing/regression/summarize.py @@ -0,0 +1,343 @@ +"""Write an index.html file in a directory containing recipe runs.""" +import argparse +import datetime +import textwrap +from pathlib import Path + +import yaml + + +def read_resource_usage_file(recipe_dir): + """Read resource usage from the log.""" + resource_file = recipe_dir / 'run' / 'resource_usage.txt' + usage = {} + + if not resource_file.exists(): + return usage + + text = resource_file.read_text().strip() + if not text: + return usage + + lines = text.split('\n') + for name in lines[0].split('\t'): + usage[name] = [] + + for line in lines[1:]: + for key, value in zip(usage, line.split('\t')): + if key != 'Date and time (UTC)': + value = float(value) + usage[key].append(value) + + return usage + + +def get_runtime_from_debug(recipe_dir): + """Try to read the runtime from the debug log.""" + debug_file = recipe_dir / 'run' / 'main_log_debug.txt' + if not debug_file.exists(): + return None + + text = debug_file.read_text().strip() + if not text: + return None + + lines = text.split('\n') + fmt = "%Y-%m-%d %H:%M:%S" + end_date = None + for line in lines[::-1]: + try: + end_date = datetime.datetime.strptime(line[:19], fmt) + except ValueError: + pass + else: + break + if end_date is None: + return None + + start_date = datetime.datetime.strptime(lines[0][:19], fmt) + runtime = end_date - start_date + runtime = datetime.timedelta(seconds=round(runtime.total_seconds())) + return runtime + + +def get_resource_usage(recipe_dir): + """Get recipe runtime (minutes), max memory (GB), avg CPU.""" + resource_usage = read_resource_usage_file(recipe_dir) + + if not resource_usage or not resource_usage['Real time (s)']: + runtime = get_runtime_from_debug(recipe_dir) + runtime = "" if runtime is None else f"{runtime}" + return [runtime, '', ''] + + runtime = resource_usage['Real time (s)'][-1] + avg_cpu = resource_usage['CPU time (s)'][-1] / runtime * 100. + runtime = datetime.timedelta(seconds=round(runtime)) + memory = max(resource_usage['Memory (GB)']) + + return [f"{runtime}", f"{memory:.1f}", f"{avg_cpu:.1f}"] + + +def get_first_figure(recipe_dir): + """Get the first figure.""" + plot_dir = recipe_dir / 'plots' + figures = plot_dir.glob("**/*.png") + try: + return next(figures) + except StopIteration: + return None + + +def get_recipe_name(recipe_dir): + """Extract recipe name from output dir.""" + return recipe_dir.stem[7:-16] + + +def get_title_and_description(recipe_dir): + """Get recipe title and description.""" + name = get_recipe_name(recipe_dir) + recipe_file = recipe_dir / 'run' / f'recipe_{name}.yml' + + with open(recipe_file, 'rb') as file: + recipe = yaml.safe_load(file) + + docs = recipe['documentation'] + title = docs.get('title', name.replace('_', ' ').title()) + + return title, docs['description'] + + +def link(url, text): + """Format text as html link.""" + return '' + text + '' + + +def tr(entries): + """Format text entries as html table row.""" + return "" + " ".join(entries) + "" + + +def th(txt): + """Format text as html table header.""" + return "" + txt + "" + + +def td(txt): + """Format text as html table data.""" + return "" + txt + "" + + +def div(txt, class_): + """Format text as html div.""" + return f"
{txt}
" + + +def generate_summary(output_dir): + """Generate the lines of text for the debug summary view.""" + lines = [] + + column_titles = [ + "status", + "recipe output", + "run date", + "estimated run duration", + "estimated max memory (GB)", + "average cpu", + ] + lines.append(tr(th(txt) for txt in column_titles)) + + for recipe_dir in sorted(Path(output_dir).glob('recipe_*')): + log = recipe_dir / 'run' / 'main_log.txt' + success = log.read_text().endswith('Run was successful\n') + if success: + status = 'success' + else: + debug_log = f"{recipe_dir.name}/run/main_log_debug.txt" + status = "failed (" + link(debug_log, 'debug') + ")" + name = recipe_dir.name[:-16] + date = datetime.datetime.strptime(recipe_dir.name[-15:], + "%Y%m%d_%H%M%S") + resource_usage = get_resource_usage(recipe_dir) + + entry = [] + entry.append(status) + entry.append(link(recipe_dir.name, name)) + entry.append(str(date)) + entry.extend(resource_usage) + + entry_txt = tr(td(txt) for txt in entry) + lines.append(entry_txt) + + return lines + + +def generate_overview(output_dir): + """Generate the lines of text for the overview page.""" + recipes = {} + + def get_date(recipe_dir): + return datetime.datetime.strptime(recipe_dir.stem[-15:], + "%Y%m%d_%H%M%S") + + for recipe_dir in sorted(Path(output_dir).glob('recipe_*')): + log = recipe_dir / 'run' / 'main_log.txt' + success = log.read_text().endswith('Run was successful\n') + if not success: + continue + name = get_recipe_name(recipe_dir) + if name not in recipes: + recipes[name] = [] + recipes[name].append(recipe_dir) + + for name, recipe_dirs in recipes.items(): + recipes[name] = sorted(recipe_dirs, key=get_date)[-1] + + print(f"Found {len(recipes)} recipes") + lines = [] + for name, recipe_dir in recipes.items(): + title, description = get_title_and_description(recipe_dir) + figure = get_first_figure(recipe_dir) + recipe_url = recipe_dir.relative_to(output_dir) + entry_txt = div( + div( + "\n".join([ + f"" if figure else "", + div( + "\n".join([ + f'
{title}
', + f'

{description} ' + f'' + '' + '

', + ]), + "card-body", + ), + ]), + "card", + ), + "col", + ) + lines.append(entry_txt) + + return lines + + +def write_debug_html(lines, output_dir): + """Write lines to debug.html.""" + header = textwrap.dedent(""" + + + + ESMValTool recipes + + + + + """) + footer = textwrap.dedent(""" +
+ + + """) + lines = [" " + line for line in lines] + text = header + "\n".join(lines) + footer + + index_file = output_dir / 'debug.html' + index_file.write_text(text) + print(f"Wrote file://{index_file.absolute()}") + + +def write_index_html(lines, output_dir): + """Write lines to index.html.""" + header = textwrap.dedent(""" + + + + + + + + + + + + ESMValTool results + + +
+

+ +

+

+ See Available recipes + for a description of these recipes. + Missing something? Have a look at the debug page. +

+ +
+

+ """) # noqa: E501 + footer = textwrap.dedent(""" +
+
+ + + + """) # noqa: E501 + + lines = [" " + line for line in lines] + text = header + "\n".join(lines) + footer + + index_file = output_dir / 'index.html' + index_file.write_text(text) + print(f"Wrote file://{index_file.absolute()}") + + +def main(): + """Run the program.""" + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('output_dir', + default='.', + type=Path, + help='ESMValTool output directory.') + args = parser.parse_args() + + write_debug_html(generate_summary(args.output_dir), args.output_dir) + write_index_html(generate_overview(args.output_dir), args.output_dir) + + +if __name__ == '__main__': + main() diff --git a/esmvaltool/utils/xml2yml/README.md b/esmvaltool/utils/xml2yml/README.md deleted file mode 100644 index 04b1e512c9..0000000000 --- a/esmvaltool/utils/xml2yml/README.md +++ /dev/null @@ -1,32 +0,0 @@ -# xml2yml Converting Version 1 Namelists to Version 2 Namelists - -This converter can turn the old xml namelists into new-style yml namelists. -It is implemented as a xslt stylesheet that needs a processor that is xslt 2.0 capable. -With this, you simply process your old namelist with the stylesheet xml2yml.xsl to produce -a new yml namelist. - -After the conversion you need to manually check the mip information in the variables! -Also, check the caveats below! - -## Howto -One freely available processor is the Java based [saxon](http://saxon.sourceforge.net/). -You can download the free he edition [here](https://sourceforge.net/projects/saxon/files/latest/download?source=files). -Unpack the zip file into a new directory. Then, provided you have Java installed, you can convert your namelist -simply with: -``` -java -jar $SAXONDIR/saxon9he.jar -xsl:xml2yml.xsl -s:namelist.xml -o:namelist.yml -``` - -## Caveats/Known Limitations -* At the moment, not all model schemes (OBS, CMIP5, CMIP5_ETHZ...) are supported. - They are, however, relatively easy to add, so if you need help adding a new one, - please let me know! -* The documentation section (namelist_summary in the old file) is not - automatically converted. -* In version 1, one could name an exclude, similar to the reference model. This - is no longer possible and the way to do it is to include the models with - another `additional_models` tag in the variable section. That conversion is - not performed by this tool. - -## Author -* **Klaus Zimmermann**, direct questions and comments to klaus.zimmermann@smhi.se diff --git a/esmvaltool/utils/xml2yml/xml2yml.xsl b/esmvaltool/utils/xml2yml/xml2yml.xsl index e19d641547..22218bd44b 100644 --- a/esmvaltool/utils/xml2yml/xml2yml.xsl +++ b/esmvaltool/utils/xml2yml/xml2yml.xsl @@ -146,7 +146,7 @@ - + diff --git a/language_support.sh b/language_support.sh deleted file mode 100644 index 17b9a04735..0000000000 --- a/language_support.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -tools=(R Rscript julia) - -for tool in "${tools[@]}"; do - if ! command -v "$tool" > /dev/null 2>&1; then - echo "Executable $tool not found! Exiting..." >> $PREFIX/.messages.txt - exit 1 - fi -done - -Rscript $PREFIX/lib/python*/site-packages/esmvaltool/install/R/setup.R >> $PREFIX/.messages.txt -julia $PREFIX/lib/python*/site-packages/esmvaltool/install/Julia/setup.jl >> $PREFIX/.messages.txt diff --git a/meta.yaml b/meta.yaml deleted file mode 100644 index 82205546ec..0000000000 --- a/meta.yaml +++ /dev/null @@ -1,86 +0,0 @@ -# Conda build recipe ---- - -# Build commmand: -# conda build . -c conda-forge -c birdhouse - -# Package version number -{% set version = "2.0a1" %} - -package: - name: esmvaltool - version: {{ version }} - -source: - # Use these two lines to build a release: - # git_rev: v{{ version }} - # git_url: https://github.com/ESMValGroup/ESMValTool.git - # Use this line instead of the above to test building without a release: - path: . - -build: - # Increment the build number when building a new conda package of the same - # esmvaltool version, reset to 0 when building a new version. - number: 1 - script: | - python setup.py install --single-version-externally-managed --record=/tmp/record.txt - POST_LINK="${PREFIX}/bin/.esmvaltool-post-link.sh" - cp -v ${RECIPE_DIR}/language_support.sh ${POST_LINK}; - chmod +x ${POST_LINK}; - -requirements: - build: - - git - - python {{ python }} - # Normally installed via pip: - - setuptools_scm - run: - # esmvaltool - - python - - libunwind # specifically for Python3.7+ - - graphviz - - iris>=2.2 - - python-stratify - # Normally installed via pip: - - cartopy - - cf_units - - cython - - eofs - - esmpy - - matplotlib<3 - - nc-time-axis - - netCDF4 - - numba - - numpy - - pandas - - pillow - - prov - - psutil - - pydot - - python-cdo - - pyyaml - - scikit-learn - - shapely - - six - - vmprof - - xarray - - yamale # in birdhouse channel - - fiona - - xlsxwriter - # Multi language support: - - ncl>=6.5.0 - - jasper!=1.900.31 # pinned NCL dependency - - r-base - - cdo - -test: - # TODO: add unit tests? This seems to require installing the tests - imports: - - esmvaltool - commands: - - esmvaltool -h - -about: - home: https://www.esmvaltool.org - license: Apache License, Version 2.0 - license_file: LICENSE diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000..7b4aa557f8 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,6 @@ +[build-system] +requires = ["setuptools >= 40.6.0", "wheel", "setuptools_scm>=6.2"] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +version_scheme = "release-branch-semver" diff --git a/setup.cfg b/setup.cfg index b03861006a..e28f8079a0 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,13 +1,33 @@ -[build_sphinx] -source-dir = doc/sphinx/source -build-dir = doc/sphinx/build -all_files = 1 -builder = html - [tool:pytest] -log_level = DEBUG +addopts = + --import-mode=importlib + --doctest-modules + --ignore=doc/sphinx/source/conf.py + --cov=esmvaltool + --cov-report=xml:test-reports/coverage.xml + --cov-report=html:test-reports/coverage_html + --html=test-reports/report.html + --numprocesses auto env = MPLBACKEND = Agg +log_level = WARNING +markers = + installation: test requires installation of dependencies + +[coverage:run] +parallel = true + +[flake8] +exclude = + doc/sphinx/source/conf.py ALL + setup.py ALL [pydocstyle] convention = numpy + +[isort] +multi_line_output = 3 +include_trailing_comma = true + +[codespell] +ignore-words-list = vas diff --git a/setup.py b/setup.py index 580e643452..6b4636d1f7 100755 --- a/setup.py +++ b/setup.py @@ -1,20 +1,13 @@ #!/usr/bin/env python """ESMValTool installation script.""" -# This script only installs dependencies available on PyPI -# -# Dependencies that need to be installed some other way (e.g. conda): -# - ncl -# - iris -# - python-stratify - +import json import os import re import sys +from pathlib import Path from setuptools import Command, setup -from esmvaltool._version import __version__ - PACKAGES = [ 'esmvaltool', ] @@ -27,56 +20,92 @@ # Installation dependencies # Use with pip install . to install from source 'install': [ - 'cartopy', + 'aiohttp', + 'cartopy<0.24', # github.com/ESMValGroup/ESMValTool/issues/3767 'cdo', - 'cf_units', - 'cython', + 'cdsapi', + 'cf-units', + 'cfgrib', + 'cftime', + 'cmocean', + 'dask!=2024.8.0', # https://github.com/dask/dask/issues/11296 + 'distributed', + 'ecmwf-api-client', 'eofs', + 'ESMPy', # not on PyPI + 'esmvalcore', + 'esmf-regrid>=0.10.0', # iris-esmf-regrid #342 'fiona', - 'matplotlib<3', - 'nc-time-axis', # needed by iris.plot + 'fire', + 'fsspec', + 'GDAL', + 'jinja2', + 'joblib', + 'lime', + 'mapgenerator>=1.0.5', + 'matplotlib', + 'natsort', + 'nc-time-axis', 'netCDF4', 'numba', - 'numpy', - 'pandas', - 'pillow', - 'prov[dot]', - 'psutil', + 'numpy!=1.24.3', # severe masking bug + 'openpyxl', + 'packaging', + 'pandas==2.1.4', # see note in environment.yml + 'progressbar2', + 'psyplot>=1.5.0', # psy*<1.5.0 are not py312 compat + 'psy-maps>=1.5.0', + 'psy-reg>=1.5.0', + 'psy-simple>=1.5.0', + 'pyproj>=2.1', + 'pys2index', + 'python-dateutil', 'pyyaml', - 'scitools-iris>=2.2', - 'scikit-learn', - 'shapely', - 'six', - 'stratify', - 'vmprof', - 'xarray', + 'rasterio>=1.3.10', + 'requests', + 'ruamel.yaml', + 'scikit-image', + 'scikit-learn>=1.4.0', # github.com/ESMValGroup/ESMValTool/issues/3504 + 'scipy', + 'scitools-iris>=3.6.1', + 'seaborn', + 'seawater', + 'shapely>=2', + 'xarray>=0.12.0', + 'xesmf>=0.7.1', + 'xgboost>1.6.1', # github.com/ESMValGroup/ESMValTool/issues/2779 'xlsxwriter', - 'yamale', + 'zarr', ], - # Test dependencies - # Execute 'python setup.py test' to run tests + # Test dependencies (unit tests) + # Execute `pip install .[test]` once and then use `pytest` to run tests 'test': [ - 'easytest', - # TODO: add dummydata package, see environment.yml - 'mock', - 'nose', - 'pycodestyle', - 'pytest>=3.9', - 'pytest-cov', + 'flake8', + 'pytest>=3.9,!=6.0.0rc1,!=6.0.0', + 'pytest-cov>=2.10.1', 'pytest-env', - 'pytest-html', + 'pytest-html!=2.1.0', 'pytest-metadata>=1.5.1', + 'pytest-mock', + 'pytest-xdist', + ], + # Documentation dependencies + 'doc': [ + 'autodocsumm>=0.2.2', + 'nbsphinx', + 'sphinx>=6.1.3', + 'pydata-sphinx-theme', ], # Development dependencies # Use pip install -e .[develop] to install in development mode 'develop': [ + 'codespell', + 'docformatter', + 'imagehash', 'isort', - 'prospector[with_pyroma]', - 'pycodestyle', - 'pydocstyle', - 'pylint', - 'sphinx', - 'sphinx_rtd_theme', + 'pre-commit', + 'prospector[with_pyroma]>=1.12', + 'vprof', 'yamllint', 'yapf', ], @@ -101,59 +130,7 @@ def _ignore(path): yield filename -class CustomCommand(Command): - """Custom Command class.""" - - def install_deps_temp(self): - """Try to temporarily install packages needed to run the command.""" - if self.distribution.install_requires: - self.distribution.fetch_build_eggs( - self.distribution.install_requires) - if self.distribution.tests_require: - self.distribution.fetch_build_eggs(self.distribution.tests_require) - - -class RunTests(CustomCommand): - """Class to run tests and generate reports.""" - - user_options = [('installation', None, - 'Run tests that require installation.')] - - def initialize_options(self): - """Initialize custom options.""" - self.installation = False - - def finalize_options(self): - """Do nothing.""" - - def run(self): - """Run tests and generate a coverage report.""" - self.install_deps_temp() - - import pytest - - version = sys.version_info[0] - report_dir = 'test-reports/python{}'.format(version) - args = [ - 'tests', - 'esmvaltool', # for doctests - '--ignore=esmvaltool/cmor/tables/', - '--doctest-modules', - '--cov=esmvaltool', - '--cov-report=term', - '--cov-report=html:{}/coverage_html'.format(report_dir), - '--cov-report=xml:{}/coverage.xml'.format(report_dir), - '--junit-xml={}/report.xml'.format(report_dir), - '--html={}/report.html'.format(report_dir), - ] - if self.installation: - args.append('--installation') - errno = pytest.main(args) - - sys.exit(errno) - - -class RunLinter(CustomCommand): +class RunLinter(Command): """Class to run a linter and generate reports.""" user_options = [] @@ -164,6 +141,14 @@ def initialize_options(self): def finalize_options(self): """Do nothing.""" + def install_deps_temp(self): + """Try to temporarily install packages needed to run the command.""" + if self.distribution.install_requires: + self.distribution.fetch_build_eggs( + self.distribution.install_requires) + if self.distribution.tests_require: + self.distribution.fetch_build_eggs(self.distribution.tests_require) + def run(self): """Run prospector and generate a report.""" check_paths = PACKAGES + [ @@ -202,45 +187,81 @@ def run(self): sys.exit(errno) -with open('README.md') as readme: - setup( - name='ESMValTool', - version=__version__, - description='Earth System Models eValuation Tool', - long_description=readme.read(), - url='https://www.esmvaltool.org', - download_url='https://github.com/ESMValGroup/ESMValTool', - license='Apache License, Version 2.0', - classifiers=[ - 'Environment :: Console', - 'License :: OSI Approved :: Apache Software License', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', +def read_authors(filename): + """Read the list of authors from .zenodo.json file.""" + with Path(filename).open() as file: + info = json.load(file) + authors = [] + for author in info['creators']: + name = ' '.join(author['name'].split(',')[::-1]).strip() + authors.append(name) + return ', '.join(authors) + + +def read_description(filename): + """Read the description from .zenodo.json file.""" + with Path(filename).open() as file: + info = json.load(file) + return info['description'] + + +setup( + name='ESMValTool', + author=read_authors('.zenodo.json'), + description=read_description('.zenodo.json'), + long_description=Path('README.md').read_text(), + long_description_content_type='text/markdown', + url='https://www.esmvaltool.org', + download_url='https://github.com/ESMValGroup/ESMValTool', + license='Apache License, Version 2.0', + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'Environment :: Console', + 'Intended Audience :: Science/Research', + 'License :: OSI Approved :: Apache Software License', + 'Natural Language :: English', + 'Operating System :: POSIX :: Linux', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', + 'Topic :: Scientific/Engineering', + 'Topic :: Scientific/Engineering :: Atmospheric Science', + 'Topic :: Scientific/Engineering :: GIS', + 'Topic :: Scientific/Engineering :: Hydrology', + 'Topic :: Scientific/Engineering :: Physics', + ], + packages=PACKAGES, + # Include all version controlled files + include_package_data=True, + setup_requires=REQUIREMENTS['setup'], + install_requires=REQUIREMENTS['install'], + tests_require=REQUIREMENTS['test'], + extras_require={ + 'develop': + REQUIREMENTS['develop'] + REQUIREMENTS['test'] + REQUIREMENTS['doc'], + 'doc': + REQUIREMENTS['doc'], + 'test': + REQUIREMENTS['test'], + }, + entry_points={ + 'console_scripts': [ + 'nclcodestyle = esmvaltool.utils.nclcodestyle.nclcodestyle:_main', + 'test_recipe = ' + 'esmvaltool.utils.testing.recipe_settings.install_expand_run:main', + 'recipe_filler = ' + 'esmvaltool.utils.recipe_filler:run' ], - packages=PACKAGES, - # Include all version controlled files - include_package_data=True, - setup_requires=REQUIREMENTS['setup'], - install_requires=REQUIREMENTS['install'], - tests_require=REQUIREMENTS['test'], - extras_require={ - 'develop': REQUIREMENTS['develop'] + REQUIREMENTS['test'] - }, - entry_points={ - 'console_scripts': [ - 'esmvaltool = esmvaltool._main:run', - 'cmorize_obs = esmvaltool.' - 'utils.cmorizers.obs.cmorize_obs:execute_cmorize', - 'nclcodestyle = esmvaltool.' - 'utils.nclcodestyle.nclcodestyle:_main', - 'mip_convert_setup = esmvaltool.' - 'utils.cmorizers.mip_convert.esmvt_mipconv_setup:main' - ], - }, - cmdclass={ - 'test': RunTests, - 'lint': RunLinter, - }, - zip_safe=False, - ) + 'esmvaltool_commands': [ + 'colortables = ' + 'esmvaltool.utils.color_tables.show_color_tables:ColorTables', + 'install = esmvaltool.install:Install', + 'data = esmvaltool.cmorizers.data.cmorizer:DataCommand' + ] + }, + cmdclass={ + 'lint': RunLinter, + }, + zip_safe=False, +) diff --git a/tests/__init__.py b/tests/__init__.py index 7d9b55fc89..e69de29bb2 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,77 +0,0 @@ -""" -Provides testing capabilities for :mod:`esmvaltool` package. - -""" -import unittest -from functools import wraps - -import mock -import numpy as np - - -class Test(unittest.TestCase): - """ - Provides esmvaltool specific testing functionality. - - """ - - def _remove_testcase_patches(self): - """ - Helper method to remove per-testcase patches installed by - :meth:`patch`. - - """ - # Remove all patches made, ignoring errors. - for patch in self.testcase_patches: - patch.stop() - - # Reset per-test patch control variable. - self.testcase_patches.clear() - - def patch(self, *args, **kwargs): - """ - Install a patch to be removed automatically after the current test. - - The patch is created with :func:`mock.patch`. - - Parameters - ---------- - args : list - The parameters to be passed to :func:`mock.patch`. - kwargs : dict - The keyword parameters to be passed to :func:`mock.patch`. - - Returns - ------- - The substitute mock instance returned by :func:`patch.start`. - - """ - # Make the new patch and start it. - patch = mock.patch(*args, **kwargs) - start_result = patch.start() - - # Create the per-testcases control variable if it does not exist. - # NOTE: this mimics a setUp method, but continues to work when a - # subclass defines its own setUp. - if not hasattr(self, 'testcase_patches'): - self.testcase_patches = {} - - # When installing the first patch, schedule remove-all at cleanup. - if not self.testcase_patches: - self.addCleanup(self._remove_testcase_patches) - - # Record the new patch and start object for reference. - self.testcase_patches[patch] = start_result - - # Return patch replacement object. - return start_result - - @wraps(np.testing.assert_array_equal) - def assertArrayEqual(self, a, b, err_msg='', verbose=True): - np.testing.assert_array_equal(a, b, err_msg=err_msg, verbose=verbose) - - @wraps(np.testing.assert_array_almost_equal) - def assertArrayAlmostEqual(self, a, b, decimal=6, err_msg='', - verbose=True): - np.testing.assert_array_almost_equal( - a, b, decimal=decimal, err_msg=err_msg, verbose=verbose) diff --git a/tests/conftest.py b/tests/conftest.py deleted file mode 100644 index 5235a69295..0000000000 --- a/tests/conftest.py +++ /dev/null @@ -1,20 +0,0 @@ -import pytest - - -def pytest_addoption(parser): - """Add a command line option to skip tests that require installation.""" - parser.addoption( - "--installation", - action="store_true", - default=False, - help="run tests that require installation") - - -def pytest_collection_modifyitems(config, items): - """Select tests to run based on command line options.""" - if config.getoption("--installation"): - return - skip_install = pytest.mark.skip(reason="need --installation option to run") - for item in items: - if "install" in item.keywords: - item.add_marker(skip_install) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_BNU_ESM.py b/tests/integration/cmor/_fixes/CMIP5/test_BNU_ESM.py deleted file mode 100644 index 5adc081f73..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_BNU_ESM.py +++ /dev/null @@ -1,70 +0,0 @@ -import unittest - -from cf_units import Unit -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.BNU_ESM import ch4, co2, fgco2, spco2 - - -class TestCo2(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='co2', units='J') - self.fix = co2() - - def test_fix_metadata(self): - cube = self.fix.fix_metadata([self.cube])[0] - self.assertEqual(cube.units, Unit('1e-6')) - self.assertEqual(cube.data[0], 1.0) - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 29.0 / 44.0 * 1.e6) - self.assertEqual(cube.units, Unit('J')) - - -class Testfgco2(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='fgco2', units='J') - self.fix = fgco2() - - def test_fix_metadata(self): - cube = self.fix.fix_metadata([self.cube])[0] - self.assertEqual(cube.units, Unit('kg m-2 s-1')) - self.assertEqual(cube.data[0], 1) - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 12.0 / 44.0) - self.assertEqual(cube.units, Unit('J')) - - -class TestCh4(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='ch4', units='J') - self.fix = ch4() - - def test_fix_metadata(self): - cube = self.fix.fix_metadata([self.cube])[0] - self.assertEqual(cube.units, Unit('1e-9')) - self.assertEqual(cube.data[0], 1) - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 29.0 / 16.0 * 1.e9) - self.assertEqual(cube.units, Unit('J')) - - -class Testspco2(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='spco2', units='J') - self.fix = spco2() - - def test_fix_metadata(self): - cube = self.fix.fix_metadata([self.cube])[0] - self.assertEqual(cube.units, Unit('J')) - self.assertEqual(cube.data[0], 1) - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 1.e6) - self.assertEqual(cube.units, Unit('J')) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_CCSM4.py b/tests/integration/cmor/_fixes/CMIP5/test_CCSM4.py deleted file mode 100644 index 5249277e4b..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_CCSM4.py +++ /dev/null @@ -1,73 +0,0 @@ -import unittest - -import numpy as np -from iris.coords import DimCoord -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.CCSM4 import rlut, rlutcs, so - - -class TestsRlut(unittest.TestCase): - """Test for rlut fixes""" - - def setUp(self): - """Prepare tests""" - self.cube = Cube([1.0, 2.0], var_name='rlut') - self.cube.add_dim_coord( - DimCoord([0.50001, 1.499999], - standard_name='latitude', - bounds=[ - [0.00001, 0.999999], - [1.00001, 1.999999], - ]), 0) - self.fix = rlut() - - def test_fix_metadata(self): - """Check that latitudes values are rounded""" - cube = self.fix.fix_metadata([self.cube])[0] - - latitude = cube.coord('latitude') - self.assertTrue(np.all(latitude.points == np.array([0.5000, 1.5000]))) - self.assertTrue( - np.all(latitude.bounds == np.array([[0.0000, 1.0000], - [1.0000, 2.0000]]))) - - -class TestsRlutcs(unittest.TestCase): - """Test for rlutcs fixes""" - - def setUp(self): - """Prepare tests""" - self.cube = Cube([1.0, 2.0], var_name='rlutcs') - self.cube.add_dim_coord( - DimCoord([0.50001, 1.499999], - standard_name='latitude', - bounds=[ - [0.00001, 0.999999], - [1.00001, 1.999999], - ]), 0) - self.fix = rlutcs() - - def test_fix_metadata(self): - """Check that latitudes values are rounded""" - cube = self.fix.fix_metadata([self.cube])[0] - - latitude = cube.coord('latitude') - self.assertTrue(np.all(latitude.points == np.array([0.5000, 1.5000]))) - self.assertTrue( - np.all(latitude.bounds == np.array([[0.0000, 1.0000], - [1.0000, 2.0000]]))) - - -class TestSo(unittest.TestCase): - """Tests for so fixes""" - - def setUp(self): - """Prepare tests""" - self.cube = Cube([1.0, 2.0], var_name='so', units='1.0') - self.fix = so() - - def test_fix_metadata(self): - """Checks that units are changed to the correct value""" - cube = self.fix.fix_metadata([self.cube])[0] - self.assertEqual('1e3', cube.units.origin) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_CESM1_BGC.py b/tests/integration/cmor/_fixes/CMIP5/test_CESM1_BGC.py deleted file mode 100644 index 54521d987a..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_CESM1_BGC.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Tests for CESM1-BGC fixes.""" -import unittest - -from cf_units import Unit -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.CESM1_BGC import co2 - - -class TestCo2(unittest.TestCase): - """Tests for co2.""" - - def setUp(self): - """Prepare tests.""" - self.cube = Cube([1.0], var_name='co2', units='J') - self.fix = co2() - - def test_fix_data(self): - """Test fix to set units correctly.""" - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 28.966 / 44.0) - self.assertEqual(cube.units, Unit('J')) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_CNRM_CM5.py b/tests/integration/cmor/_fixes/CMIP5/test_CNRM_CM5.py deleted file mode 100644 index e829f48dac..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_CNRM_CM5.py +++ /dev/null @@ -1,28 +0,0 @@ -import unittest - -from cf_units import Unit -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.CNRM_CM5 import msftmyz, msftmyzba - - -class TestMsftmyz(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='msftmyz', units='J') - self.fix = msftmyz() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 1.0e6) - self.assertEqual(cube.units, Unit('J')) - - -class TestMsftmyzba(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='msftmyzba', units='J') - self.fix = msftmyzba() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 1.0e6) - self.assertEqual(cube.units, Unit('J')) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_CanESM2.py b/tests/integration/cmor/_fixes/CMIP5/test_CanESM2.py deleted file mode 100644 index dd5dff6850..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_CanESM2.py +++ /dev/null @@ -1,17 +0,0 @@ -import unittest - -from cf_units import Unit -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.CanESM2 import fgco2 - - -class TestCanESM2Fgco2(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='fgco2', units='J') - self.fix = fgco2() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 12.0 / 44.0) - self.assertEqual(cube.units, Unit('J')) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_EC_EARTH.py b/tests/integration/cmor/_fixes/CMIP5/test_EC_EARTH.py deleted file mode 100644 index 5ddeb0a380..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_EC_EARTH.py +++ /dev/null @@ -1,28 +0,0 @@ -import unittest - -from cf_units import Unit -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.EC_EARTH import sftlf, sic - - -class TestSic(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='sic', units='J') - self.fix = sic() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('J')) - - -class TestSftlf(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='sftlf', units='J') - self.fix = sftlf() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('J')) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_FGOALS_g2.py b/tests/integration/cmor/_fixes/CMIP5/test_FGOALS_g2.py deleted file mode 100644 index 9d4d51e00e..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_FGOALS_g2.py +++ /dev/null @@ -1,27 +0,0 @@ -import unittest - -from cf_units import Unit -from iris.coords import DimCoord -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.FGOALS_g2 import allvars - - -class TestAll(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0, 2.0], var_name='co2', units='J') - self.cube.add_dim_coord( - DimCoord( - [0.0, 1.0], - standard_name='time', - units=Unit('days since 0001-01', calendar='gregorian')), - 0) - self.fix = allvars() - - def test_fix_metadata(self): - cube = self.fix.fix_metadata([self.cube])[0] - - time = cube.coord('time') - self.assertEqual(time.units.origin, - 'day since 1-01-01 00:00:00.000000') - self.assertEqual(time.units.calendar, 'gregorian') diff --git a/tests/integration/cmor/_fixes/CMIP5/test_FIO_ESM.py b/tests/integration/cmor/_fixes/CMIP5/test_FIO_ESM.py deleted file mode 100644 index a4ed917ce0..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_FIO_ESM.py +++ /dev/null @@ -1,28 +0,0 @@ -import unittest - -from cf_units import Unit -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.FIO_ESM import ch4, co2 - - -class TestCh4(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='ch4', units='J') - self.fix = ch4() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 29. / 16. * 1.e9) - self.assertEqual(cube.units, Unit('J')) - - -class TestCo2(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='co2', units='J') - self.fix = co2() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 29. / 44. * 1.e6) - self.assertEqual(cube.units, Unit('J')) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_CM2p1.py b/tests/integration/cmor/_fixes/CMIP5/test_GFDL_CM2p1.py deleted file mode 100644 index 498752fd08..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_CM2p1.py +++ /dev/null @@ -1,17 +0,0 @@ -import unittest - -from cf_units import Unit -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.GFDL_CM2p1 import sftof - - -class TestSftof(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='sftof', units='J') - self.fix = sftof() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('J')) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_CM3.py b/tests/integration/cmor/_fixes/CMIP5/test_GFDL_CM3.py deleted file mode 100644 index c8d42b08d8..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_CM3.py +++ /dev/null @@ -1,17 +0,0 @@ -import unittest - -from cf_units import Unit -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.GFDL_CM3 import sftof - - -class TestSftof(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='sftof', units='J') - self.fix = sftof() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('J')) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_ESM2G.py b/tests/integration/cmor/_fixes/CMIP5/test_GFDL_ESM2G.py deleted file mode 100644 index f6f180f288..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_ESM2G.py +++ /dev/null @@ -1,17 +0,0 @@ -import unittest - -from cf_units import Unit -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.GFDL_ESM2G import co2 - - -class TestCo2(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='co2', units='J') - self.fix = co2() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 1e6) - self.assertEqual(cube.units, Unit('J')) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_ESM2M.py b/tests/integration/cmor/_fixes/CMIP5/test_GFDL_ESM2M.py deleted file mode 100644 index 5e4c924f9c..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_ESM2M.py +++ /dev/null @@ -1,29 +0,0 @@ -import unittest - -from cf_units import Unit -from iris.coords import DimCoord -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.GFDL_ESM2M import co2, sftof - - -class TestSftof(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='sftof', units='J') - self.fix = sftof() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('J')) - - -class TestCo2(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='co2', units='J') - self.fix = co2() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 1e6) - self.assertEqual(cube.units, Unit('J')) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_MIROC5.py b/tests/integration/cmor/_fixes/CMIP5/test_MIROC5.py deleted file mode 100644 index bf95a03c55..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_MIROC5.py +++ /dev/null @@ -1,47 +0,0 @@ -import unittest - -from cf_units import Unit -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.MIROC5 import sftof - - -class TestGpp(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='sftof', units='J') - self.fix = sftof() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('J')) - - # dayspermonth = (/31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31/) - # - # if ((name.eq."snc".or.name.eq."snw").and.FIELD.eq."T2Ds".and. \ - # ENSEMBLE.eq."r1i1p1") then - # opt = 0 - # opt@calendar = var&time@calendar - # ; get start date from time attribute "days_since_xxxx" - # t = 0.0 - # t@calendar = var&time@calendar - # t@units = var&time@units - # res = cd_calendar(t, -5) - # yy = res(0, 0) - # mm = res(0, 1) - # dd = res(0, 2) - # do ii = 0, dimsizes(var&time) - 1 - # var&time(ii) = tofloat(cd_inv_calendar(yy, mm, dd, 12, 0, 0, \ - # var&time@units, opt)) - # dd = dd + 1 - # if (dd.gt.dayspermonth(mm-1)) then - # mm = mm + 1 - # dd = 1 - # end if - # if (mm.gt.12) then - # mm = 1 - # yy = yy + 1 - # end if - # end do - # ret = 0 - # end if diff --git a/tests/integration/cmor/_fixes/CMIP5/test_MIROC_ESM.py b/tests/integration/cmor/_fixes/CMIP5/test_MIROC_ESM.py deleted file mode 100644 index 75d42e68b9..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_MIROC_ESM.py +++ /dev/null @@ -1,87 +0,0 @@ -import unittest - -from cf_units import Unit -from iris.coords import DimCoord -from iris.cube import Cube -from iris.exceptions import CoordinateNotFoundError - -from esmvaltool.cmor._fixes.CMIP5.MIROC_ESM import allvars, co2, gpp, tro3 - - -class TestCo2(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='co2', units='J') - self.fix = co2() - - def test_fix_metadata(self): - cube = self.fix.fix_metadata([self.cube])[0] - self.assertEqual(cube.data[0], 1) - self.assertEqual(cube.units, Unit('1e-6')) - - -class TestTro3(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='tro3', units='J') - self.fix = tro3() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 1000) - self.assertEqual(cube.units, Unit('J')) - - -class TestGpp(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='gpp', units='J') - self.fix = gpp() - - def test_fix_metadata(self): - cube = self.fix.fix_metadata([self.cube])[0] - self.assertEqual(cube.data[0], 1) - self.assertEqual(cube.units, Unit('g m-2 day-1')) - - -class TestAll(unittest.TestCase): - def setUp(self): - self.cube = Cube([[1.0, 2.0], [3.0, 4.0]], var_name='co2', units='J') - self.cube.add_dim_coord( - DimCoord( - [0, 1], - standard_name='time', - units=Unit( - 'days since 0000-01-01 00:00:00', calendar='gregorian')), - 0) - self.cube.add_dim_coord(DimCoord([0, 1], long_name='AR5PL35'), 1) - - self.fix = allvars() - - def test_fix_metadata_plev(self): - time = self.cube.coord('time') - time.units = Unit("days since 1-1-1", time.units.calendar) - cube = self.fix.fix_metadata([self.cube])[0] - cube.coord('air_pressure') - - def test_fix_metadata_no_plev(self): - self.cube.remove_coord('AR5PL35') - cube = self.fix.fix_metadata([self.cube])[0] - with self.assertRaises(CoordinateNotFoundError): - cube.coord('air_pressure') - - -# if (iscoord(var, "time")) then -# if (isatt(var&time,"units"))then -# if (var&time@units.eq."days since 0000-01-01 00:00:00") then -# var&time@units ="days since 1849-01-01 00:00:00" -# ret = 0 -# end if -# if (var&time@units.eq."days since 1-1-1")then -# var&time@units ="days since 1850-01-01 00:00:00" -# ret = 0 -# end if -# end if -# end if -# -# if (iscoord(var, "AR5PL35")) then -# var!1 = "plev" -# ret = 0 -# end if diff --git a/tests/integration/cmor/_fixes/CMIP5/test_MIROC_ESM_CHEM.py b/tests/integration/cmor/_fixes/CMIP5/test_MIROC_ESM_CHEM.py deleted file mode 100644 index 62149925da..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_MIROC_ESM_CHEM.py +++ /dev/null @@ -1,39 +0,0 @@ -import unittest - -from cf_units import Unit -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.MIROC_ESM_CHEM import tro3 - - -class TestTro3(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='tro3', units='J') - self.fix = tro3() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 1000) - self.assertEqual(cube.units, Unit('J')) - - -# if (name .eq. "tro3") then -# ; files say unit of ozone is "1e-9" ut unit is actually "1e-6" -# var = var * 1.0e3 -# if (iscoord(var, "time")) then -# do it = 1, dimsizes(var&time) - 1 -# if (var&time(it).eq.0) then -# tt = tointeger(cd_calendar(var&time(it-1), 0)) -# tt(0, 1) = tt(0, 1) + 1 ; month -# if (tt(0, 1).gt.12) then -# tt(0, 1) = 1 -# tt(0, 0) = tt(0, 0) + 1 ; year -# end if -# var&time(it) = cd_inv_calendar(\ -# tt(0, 0), tt(0, 1), tt(0, 2), tt(0, 3), \ -# tt(0, 4), tt(0, 5), var&time@units, 0) -# end if -# end do -# ret = 0 -# end if -# end if diff --git a/tests/integration/cmor/_fixes/CMIP5/test_MPI_ESM_LR.py b/tests/integration/cmor/_fixes/CMIP5/test_MPI_ESM_LR.py deleted file mode 100755 index 1809ebb9ac..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_MPI_ESM_LR.py +++ /dev/null @@ -1,17 +0,0 @@ -import unittest - -from cf_units import Unit -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.MPI_ESM_LR import pctisccp - - -class TestPctisccp2(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='pctisccp', units='J') - self.fix = pctisccp() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('J')) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_inmcm4.py b/tests/integration/cmor/_fixes/CMIP5/test_inmcm4.py deleted file mode 100644 index bdd9712568..0000000000 --- a/tests/integration/cmor/_fixes/CMIP5/test_inmcm4.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Tests for inmcm4 fixes.""" -import os -import shutil -import tempfile -import unittest - -import iris -from cf_units import Unit -from iris.cube import Cube - -from esmvaltool.cmor._fixes.CMIP5.inmcm4 import gpp, lai, nbp - - -class TestGpp(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='gpp', units='J') - self.fix = gpp() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], -1) - self.assertEqual(cube.units, Unit('J')) - - -class TestLai(unittest.TestCase): - def setUp(self): - self.cube = Cube([1.0], var_name='lai', units='J') - self.fix = lai() - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 1.0 / 100.0) - self.assertEqual(cube.units, Unit('J')) - - -class TestNbp(unittest.TestCase): - """Tests for nbp.""" - - def setUp(self): - """Prepare temp folder for test.""" - self.cube = Cube([1.0], var_name='nbp') - self.fix = nbp() - self.temp_folder = tempfile.mkdtemp() - - def tearDown(self): - """Delete temp folder.""" - shutil.rmtree(self.temp_folder) - - def test_fix_file(self): - """Test fix on nbp files to set standard_name.""" - temp_handler, temp_path = tempfile.mkstemp('.nc', dir=self.temp_folder) - os.close(temp_handler) - output_dir = os.path.join(self.temp_folder, 'fixed') - - iris.save(self.cube, temp_path) - new_path = self.fix.fix_file(temp_path, output_dir) - new_cube = iris.load_cube(new_path) - self.assertEqual( - new_cube.standard_name, - 'surface_net_downward_mass_flux_of_carbon_dioxide_' - 'expressed_as_carbon_due_to_all_land_processes') diff --git a/tests/integration/cmor/_fixes/PRIMAVERA/test_EC_Earth3_HR.py b/tests/integration/cmor/_fixes/PRIMAVERA/test_EC_Earth3_HR.py deleted file mode 100644 index d0c4a18b68..0000000000 --- a/tests/integration/cmor/_fixes/PRIMAVERA/test_EC_Earth3_HR.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Test for the fixes for EC-Earth3-HR model from PRIMAVERA project""" -import unittest - -from iris.coords import DimCoord -from iris.cube import Cube - -from esmvaltool.cmor._fixes.PRIMAVERA.EC_Earth3_HR import allvars - - -class TestAllVars(unittest.TestCase): - """Test for the common fixes for all the variables""" - def setUp(self): - """Prepare to test""" - self.cube = Cube([[1.0, 2.0], [3.0, 4.0]], var_name='var') - self.cube.add_dim_coord( - DimCoord([1.0, 2.0], standard_name='latitude', - var_name='latitude'), 0) - self.cube.add_dim_coord( - DimCoord([1.0, 2.0], standard_name='longitude', - var_name='longitude'), 1) - self.fix = allvars() - - def test_fix_lat_lon_names(self): - """ - Test latitude and longitude var names - - They should be lat and lon instead of the original latitude and - longitude - """ - cube = self.fix.fix_metadata([self.cube])[0] - self.assertEqual(cube.coord('latitude').var_name, 'lat') - self.assertEqual(cube.coord('longitude').var_name, 'lon') diff --git a/tests/integration/cmor/_fixes/test_fix.py b/tests/integration/cmor/_fixes/test_fix.py deleted file mode 100644 index bf26d5c652..0000000000 --- a/tests/integration/cmor/_fixes/test_fix.py +++ /dev/null @@ -1,65 +0,0 @@ -import os -import shutil -import tempfile -import unittest - -from iris.cube import Cube - -from esmvaltool.cmor.fix import Fix - - -class TestFix(unittest.TestCase): - def setUp(self): - """Set up temp folder""" - self.temp_folder = tempfile.mkdtemp() - - def tearDown(self): - """Remove temp folder""" - shutil.rmtree(self.temp_folder) - - def test_get_fix(self): - from esmvaltool.cmor._fixes.CMIP5.CanESM2 import fgco2 - self.assertListEqual( - Fix.get_fixes('CMIP5', 'CanESM2', 'fgco2'), [fgco2()]) - - def test_get_fixes_with_replace(self): - from esmvaltool.cmor._fixes.CMIP5.BNU_ESM import ch4 - self.assertListEqual(Fix.get_fixes('CMIP5', 'BNU-ESM', 'ch4'), [ch4()]) - - def test_get_fixes_with_generic(self): - from esmvaltool.cmor._fixes.CMIP5.CESM1_BGC import co2 - self.assertListEqual( - Fix.get_fixes('CMIP5', 'CESM1-BGC', 'co2'), [co2()]) - - def test_get_fix_no_project(self): - self.assertListEqual( - Fix.get_fixes('BAD_PROJECT', 'BNU-ESM', 'ch4'), []) - - def test_get_fix_no_model(self): - self.assertListEqual(Fix.get_fixes('CMIP5', 'BAD_MODEL', 'ch4'), []) - - def test_get_fix_no_var(self): - self.assertListEqual(Fix.get_fixes('CMIP5', 'BNU-ESM', 'BAD_VAR'), []) - - def test_fix_metadata(self): - cube = Cube([0]) - reference = Cube([0]) - - self.assertEqual(Fix().fix_metadata(cube), reference) - - def test_fix_data(self): - cube = Cube([0]) - reference = Cube([0]) - - self.assertEqual(Fix().fix_data(cube), reference) - - def test_fix_file(self): - filepath = 'sample_filepath' - self.assertEqual(Fix().fix_file(filepath, 'preproc'), filepath) - - def test_fixed_filenam(self): - filepath = os.path.join(self.temp_folder, 'file.nc') - output_dir = os.path.join(self.temp_folder, 'fixed') - os.makedirs(output_dir) - fixed_filepath = Fix().get_fixed_filepath(output_dir, filepath) - self.assertTrue(fixed_filepath, os.path.join(output_dir, 'file.nc')) diff --git a/tests/integration/cmor/test_table.py b/tests/integration/cmor/test_table.py deleted file mode 100644 index c9e33300a0..0000000000 --- a/tests/integration/cmor/test_table.py +++ /dev/null @@ -1,135 +0,0 @@ -"""Integration tests for the variable_info module""" - -import os -import unittest - -from esmvaltool.cmor.table import CMIP5Info, CMIP6Info, CustomInfo - - -class TestCMIP6Info(unittest.TestCase): - """Test for the CMIP6 info class.""" - - @classmethod - def setUpClass(cls): - """ - Set up tests. - - We read CMIP6Info once to keep tests times manageable - """ - cls.variables_info = CMIP6Info('cmip6', default=CustomInfo()) - - def test_custom_tables_location(self): - """Test constructor with custom tables location.""" - cwd = os.path.dirname(os.path.realpath(__file__)) - cmor_tables_path = os.path.join(cwd, '..', '..', '..', 'esmvaltool', - 'cmor', 'tables', 'cmip6') - cmor_tables_path = os.path.abspath(cmor_tables_path) - CMIP6Info(cmor_tables_path) - - def test_get_variable_tas(self): - """Get tas variable.""" - var = self.variables_info.get_variable('Amon', 'tas') - self.assertEqual(var.short_name, 'tas') - - def test_get_variable_from_alias(self): - """Get a variable from a known alias.""" - var = self.variables_info.get_variable('SImon', 'sic') - self.assertEqual(var.short_name, 'siconc') - - def test_get_bad_variable(self): - """Get none if a variable is not in the given table.""" - self.assertIsNone(self.variables_info.get_variable('Omon', 'tas')) - - -class Testobs4mipsInfo(unittest.TestCase): - """Test for the obs$mips info class.""" - - @classmethod - def setUpClass(cls): - """ - Set up tests. - - We read CMIP6Info once to keep tests times manageable - """ - cls.variables_info = CMIP6Info( - cmor_tables_path='obs4mips', - default=CustomInfo() - ) - - def test_custom_tables_location(self): - """Test constructor with custom tables location.""" - cwd = os.path.dirname(os.path.realpath(__file__)) - cmor_tables_path = os.path.join(cwd, '..', '..', '..', 'esmvaltool', - 'cmor', 'tables', 'cmip6') - cmor_tables_path = os.path.abspath(cmor_tables_path) - CMIP6Info(cmor_tables_path) - - def test_get_variable_tas(self): - """Get tas variable.""" - var = self.variables_info.get_variable('monStderr', 'ndviStderr') - self.assertEqual(var.short_name, 'ndviStderr') - - def test_get_bad_variable(self): - """Get none if a variable is not in the given table.""" - self.assertIsNone(self.variables_info.get_variable('Omon', 'tas')) - - -class TestCMIP5Info(unittest.TestCase): - """Test for the CMIP5 info class.""" - - @classmethod - def setUpClass(cls): - """ - Set up tests. - - We read CMIP5Info once to keep testing times manageable - """ - cls.variables_info = CMIP5Info('cmip5', default=CustomInfo()) - - def test_custom_tables_location(self): - """Test constructor with custom tables location.""" - cwd = os.path.dirname(os.path.realpath(__file__)) - cmor_tables_path = os.path.join(cwd, '..', '..', '..', 'esmvaltool', - 'cmor', 'tables', 'cmip5') - cmor_tables_path = os.path.abspath(cmor_tables_path) - CMIP5Info(cmor_tables_path) - - def test_get_variable_tas(self): - """Get tas variable.""" - var = self.variables_info.get_variable('Amon', 'tas') - self.assertEqual(var.short_name, 'tas') - - def test_get_bad_variable(self): - """Get none if a variable is not in the given table.""" - self.assertIsNone(self.variables_info.get_variable('Omon', 'tas')) - - -class TestCustomInfo(unittest.TestCase): - """Test for the custom info class.""" - - @classmethod - def setUpClass(cls): - """ - Set up tests. - - We read CMIP5Info once to keep testing times manageable - """ - cls.variables_info = CustomInfo() - - def test_custom_tables_location(self): - """Test constructor with custom tables location.""" - cwd = os.path.dirname(os.path.realpath(__file__)) - cmor_tables_path = os.path.join(cwd, '..', '..', '..', 'esmvaltool', - 'cmor', 'tables', 'cmip5') - cmor_tables_path = os.path.abspath(cmor_tables_path) - CustomInfo(cmor_tables_path) - - def test_get_variable_tas(self): - """Get tas variable.""" - CustomInfo() - var = self.variables_info.get_variable('Amon', 'netcre') - self.assertEqual(var.short_name, 'netcre') - - def test_get_bad_variable(self): - """Get none if a variable is not in the given table.""" - self.assertIsNone(self.variables_info.get_variable('Omon', 'badvar')) diff --git a/tests/integration/data_finder.yml b/tests/integration/data_finder.yml deleted file mode 100644 index cd718fe2b6..0000000000 --- a/tests/integration/data_finder.yml +++ /dev/null @@ -1,238 +0,0 @@ ---- - -get_output_file: - - variable: &variable - variable_group: test - short_name: ta - dataset: HadGEM2-ES - project: CMIP5 - cmor_table: CMIP5 - institute: [INPE, MOHC] - frequency: mon - modeling_realm: [atmos] - mip: Amon - exp: historical - ensemble: r1i1p1 - start_year: 1960 - end_year: 1980 - diagnostic: test_diag - preprocessor: test_preproc - preproc_dir: this/is/a/path - output_file: this/is/a/path/test_diag/test/CMIP5_HadGEM2-ES_Amon_historical_r1i1p1_ta_1960-1980.nc - - - variable: - <<: *variable - exp: [historical, rcp85] - preproc_dir: /test - output_file: /test/test_diag/test/CMIP5_HadGEM2-ES_Amon_historical-rcp85_r1i1p1_ta_1960-1980.nc - - -get_input_filelist: - - drs: default - variable: - <<: *variable - available_files: - - ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc - - ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - - ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc - found_files: - - ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - - - drs: default - variable: - <<: *variable - end_year: 2060 - exp: [historical, rcp85] - available_files: - - ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc - - ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - - ta_Amon_HadGEM2-ES_historical_r1i1p1_198413-200512.nc - - ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc - found_files: - - ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - - ta_Amon_HadGEM2-ES_historical_r1i1p1_198413-200512.nc - - ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc - - - drs: default - variable: - <<: *variable - start_year: 2010 - end_year: 2100 - available_files: - - ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc - - ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - - ta_Amon_HadGEM2-ES_historical_r1i1p1_198413-200512.nc - - ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc - found_files: [] - - - drs: default - variable: *variable - found_files: [] - - - drs: BADC - variable: - <<: *variable - start_year: 1980 - end_year: 2002 - available_files: - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20120928/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20120928/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20120928/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc - available_symlinks: - - link_name: MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/latest - target: v20120928 - found_files: - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/latest/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/latest/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc - - - drs: DKRZ - variable: - <<: *variable - start_year: 1980 - end_year: 2002 - available_files: - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_185912-188411.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_188412-190911.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_190912-193411.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc - found_files: - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc - - - drs: DKRZ - variable: - <<: *variable - exp: [historical, rcp45, rcp85] - start_year: 1980 - end_year: 2100 - available_files: - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_185912-188411.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_188412-190911.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_190912-193411.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc - - MOHC/HadGEM2-ES/rcp45/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_rcp45_r1i1p1_200601-210012.nc - - MOHC/HadGEM2-ES/rcp85/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc - found_files: - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc - - MOHC/HadGEM2-ES/rcp45/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_rcp45_r1i1p1_200601-210012.nc - - MOHC/HadGEM2-ES/rcp85/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc - - - drs: ETHZ - variable: - <<: *variable - start_year: 1980 - end_year: 2002 - available_files: - - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_185912-188411.nc - - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_188412-190911.nc - - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_190912-193411.nc - - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc - - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc - found_files: - - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc - - - drs: ETHZ - variable: - <<: *variable - start_year: 2000 - end_year: 2100 - available_files: - - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_185912-188411.nc - - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_188412-190911.nc - - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_190912-193411.nc - - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc - - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc - - rcp85/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc - found_files: - - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc - - -get_input_fx_filelist: - - drs: default - variable: - <<: *variable - fx_files: - - areacella - - areacello - - basin - - deptho - - orog - - sftlf - - sftof - - thkcello - - volcello - available_files: - - sftof_fx_HadGEM2-ES_historical_r0i0p0.nc - - areacella_fx_HadGEM2-ES_historical_r0i0p0.nc - - areacello_fx_HadGEM2-ES_historical_r0i0p0.nc - - basin_fx_HadGEM2-ES_historical_r0i0p0.nc - - deptho_fx_HadGEM2-ES_historical_r0i0p0.nc - - orog_fx_HadGEM2-ES_historical_r0i0p0.nc - - sftlf_fx_HadGEM2-ES_historical_r0i0p0.nc - - sftof_fx_HadGEM2-ES_historical_r0i0p0.nc - - thkcello_fx_HadGEM2-ES_historical_r0i0p0.nc - - volcello_fx_HadGEM2-ES_historical_r0i0p0.nc - found_files: - areacella: areacella_fx_HadGEM2-ES_historical_r0i0p0.nc - areacello: areacello_fx_HadGEM2-ES_historical_r0i0p0.nc - basin: basin_fx_HadGEM2-ES_historical_r0i0p0.nc - deptho: deptho_fx_HadGEM2-ES_historical_r0i0p0.nc - orog: orog_fx_HadGEM2-ES_historical_r0i0p0.nc - sftlf: sftlf_fx_HadGEM2-ES_historical_r0i0p0.nc - sftof: sftof_fx_HadGEM2-ES_historical_r0i0p0.nc - thkcello: thkcello_fx_HadGEM2-ES_historical_r0i0p0.nc - volcello: volcello_fx_HadGEM2-ES_historical_r0i0p0.nc - - - drs: default - variable: - <<: *variable - fx_files: - - sftof - found_files: - sftof: null - - - drs: BADC - variable: - <<: *variable - fx_files: - - sftof - available_files: - - MOHC/HadGEM2-ES/historical/fx/ocean/fx/r0i0p0/v20120215/sftof/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc - - MOHC/HadGEM2-ES/historical/fx/ocean/fx/r0i0p0/v20130612/sftof/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc - available_symlinks: - - link_name: MOHC/HadGEM2-ES/historical/fx/ocean/fx/r0i0p0/latest - target: v20130612 - found_files: - sftof: MOHC/HadGEM2-ES/historical/fx/ocean/fx/r0i0p0/latest/sftof/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc - - - drs: DKRZ - variable: - <<: *variable - fx_files: - - sftof - available_files: - - MOHC/HadGEM2-ES/historical/fx/ocean/fx/r0i0p0/v20120215/sftof/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc - - MOHC/HadGEM2-ES/historical/fx/ocean/fx/r0i0p0/v20130612/sftof/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc - found_files: - sftof: MOHC/HadGEM2-ES/historical/fx/ocean/fx/r0i0p0/v20130612/sftof/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc - - - drs: ETHZ - variable: - <<: *variable - fx_files: - - sftof - available_files: - - historical/fx/sftof/HadGEM2-ES/r0i0p0/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc - found_files: - sftof: historical/fx/sftof/HadGEM2-ES/r0i0p0/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc diff --git a/tests/integration/diag_scripts/__init__.py b/tests/integration/diag_scripts/__init__.py new file mode 100644 index 0000000000..6346d25c5d --- /dev/null +++ b/tests/integration/diag_scripts/__init__.py @@ -0,0 +1 @@ +"""Integration tests for diagnostic scripts.""" diff --git a/tests/integration/diag_scripts/mlr/__init__.py b/tests/integration/diag_scripts/mlr/__init__.py new file mode 100644 index 0000000000..c9cad75399 --- /dev/null +++ b/tests/integration/diag_scripts/mlr/__init__.py @@ -0,0 +1 @@ +"""Integration tests for Machine Learning Regression (MLR) diagnostics.""" diff --git a/tests/integration/diag_scripts/mlr/_sklearn_utils.py b/tests/integration/diag_scripts/mlr/_sklearn_utils.py new file mode 100644 index 0000000000..46f6f2e9b3 --- /dev/null +++ b/tests/integration/diag_scripts/mlr/_sklearn_utils.py @@ -0,0 +1,295 @@ +"""Testing utilities for custom :mod:`sklearn` functionalities. + +Parts of this code have been copied from :mod:`sklearn`. + +License: BSD 3-Clause License + +Copyright (c) 2007-2020 The scikit-learn developers. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +""" + +import warnings + +import numpy as np +import pytest +import scipy as sp +from numpy.testing import assert_allclose, assert_array_equal +from sklearn.base import BaseEstimator + + +class FailingClassifier(BaseEstimator): + """Classifier that deliberately fails when using fit().""" + + FAILING_PARAMETER = 42 + + def __init__(self, parameter=None): + """Initialize class instance.""" + self.parameter = parameter + + def fit(self, *_, **__): + """Fit.""" + if self.parameter == FailingClassifier.FAILING_PARAMETER: + raise ValueError("Failing classifier failed as required") + + @staticmethod + def predict(x_data): + """Predict.""" + return np.zeros(x_data.shape[0]) + + @staticmethod + def score(*_, **__): + """Score.""" + return 0.0 + + +def assert_warns(warning_class, func, *args, **kw): + """Test that a certain warning occurs. + + Parameters + ---------- + warning_class : the warning class + The class to test for, e.g. UserWarning. + + func : callable + Callable object to trigger warnings. + + *args : the positional arguments to `func`. + + **kw : the keyword arguments to `func` + + Returns + ------- + result : the return value of `func` + + """ + with warnings.catch_warnings(record=True) as warn: + # Cause all warnings to always be triggered. + warnings.simplefilter("always") + # Trigger a warning. + result = func(*args, **kw) + if hasattr(np, 'FutureWarning'): + # Filter out numpy-specific warnings in numpy >= 1.9 + warn = [e for e in warn + if e.category is not np.VisibleDeprecationWarning] + + # Verify some things + if not len(warn) > 0: + raise AssertionError("No warning raised when calling %s" + % func.__name__) + + found = any(warning.category is warning_class for warning in warn) + if not found: + raise AssertionError("%s did not give warning: %s( is %s)" + % (func.__name__, warning_class, warn)) + return result + + +def assert_warns_message(warning_class, message, func, *args, **kw): + # very important to avoid uncontrolled state propagation + """Test that a certain warning occurs and with a certain message. + + Parameters + ---------- + warning_class : the warning class + The class to test for, e.g. UserWarning. + + message : str or callable + The message or a substring of the message to test for. If callable, + it takes a string as the argument and will trigger an AssertionError + if the callable returns `False`. + + func : callable + Callable object to trigger warnings. + + *args : the positional arguments to `func`. + + **kw : the keyword arguments to `func`. + + Returns + ------- + result : the return value of `func` + + """ + with warnings.catch_warnings(record=True) as warn: + # Cause all warnings to always be triggered. + warnings.simplefilter("always") + if hasattr(np, 'FutureWarning'): + # Let's not catch the numpy internal DeprecationWarnings + warnings.simplefilter('ignore', np.VisibleDeprecationWarning) + # Trigger a warning. + result = func(*args, **kw) + # Verify some things + if not len(warn) > 0: + raise AssertionError("No warning raised when calling %s" + % func.__name__) + + found = [issubclass(warning.category, warning_class) for warning in + warn] + if not any(found): + raise AssertionError("No warning raised for %s with class " + "%s" + % (func.__name__, warning_class)) + + message_found = False + # Checks the message of all warnings belong to warning_class + for index in [i for i, x in enumerate(found) if x]: + # substring will match, the entire message with typo won't + msg = warn[index].message # For Python 3 compatibility + msg = str(msg.args[0] if hasattr(msg, 'args') else msg) + if callable(message): # add support for certain tests + check_in_message = message + else: + def check_in_message(msg): + return message in msg + + if check_in_message(msg): + message_found = True + break + + if not message_found: + raise AssertionError("Did not receive the message you expected " + "('%s') for <%s>, got: '%s'" + % (message, func.__name__, msg)) + + return result + + +def assert_raise_message(exceptions, message, function, *args, **kwargs): + """Test whether the message is in an exception. + + Given an exception, a callable to raise the exception, and + a message string, tests that the correct exception is raised and + that the message is a substring of the error thrown. Used to test + that the specific message thrown during an exception is correct. + + Parameters + ---------- + exceptions : exception or tuple of exception + An Exception object. + + message : str + The error message or a substring of the error message. + + function : callable + Callable object to raise error. + + *args : the positional arguments to `function`. + + **kwargs : the keyword arguments to `function`. + + """ + try: + function(*args, **kwargs) + except exceptions as exc: + error_message = str(exc) + if message not in error_message: + raise AssertionError("Error message does not include the expected " + "string: %r. Observed error message: %r" % + (message, error_message)) + else: + # concatenate exception names + if isinstance(exceptions, tuple): + names = " or ".join(e.__name__ for e in exceptions) + else: + names = exceptions.__name__ + + raise AssertionError("%s not raised by %s" % + (names, function.__name__)) + + +def assert_allclose_dense_sparse(x_arr, y_arr, rtol=1e-07, atol=1e-9, + err_msg=''): + """Assert allclose for sparse and dense data. + + Both x_arr and y_arr need to be either sparse or dense, they + can't be mixed. + + Parameters + ---------- + x_arr : {array-like, sparse matrix} + First array to compare. + + y_arr : {array-like, sparse matrix} + Second array to compare. + + rtol : float, default=1e-07 + relative tolerance; see numpy.allclose. + + atol : float, default=1e-9 + absolute tolerance; see numpy.allclose. Note that the default here is + more tolerant than the default for numpy.testing.assert_allclose, where + atol=0. + + err_msg : str, default='' + Error message to raise. + + """ + if sp.sparse.issparse(x_arr) and sp.sparse.issparse(y_arr): + x_arr = x_arr.tocsr() + y_arr = y_arr.tocsr() + x_arr.sum_duplicates() + y_arr.sum_duplicates() + assert_array_equal(x_arr.indices, y_arr.indices, err_msg=err_msg) + assert_array_equal(x_arr.indptr, y_arr.indptr, err_msg=err_msg) + assert_allclose(x_arr.data, y_arr.data, rtol=rtol, atol=atol, + err_msg=err_msg) + elif not sp.sparse.issparse(x_arr) and not sp.sparse.issparse(y_arr): + # both dense + assert_allclose(x_arr, y_arr, rtol=rtol, atol=atol, err_msg=err_msg) + else: + raise ValueError("Can only compare two sparse matrices, not a sparse " + "matrix and an array.") + + +def _convert_container(container, constructor_name, columns_name=None): + """Convert container.""" + if constructor_name == 'list': + return list(container) + if constructor_name == 'tuple': + return tuple(container) + if constructor_name == 'array': + return np.asarray(container) + if constructor_name == 'sparse': + return sp.sparse.csr_matrix(container) + if constructor_name == 'dataframe': + pandas = pytest.importorskip('pandas') + return pandas.DataFrame(container, columns=columns_name) + if constructor_name == 'series': + pandas = pytest.importorskip('pandas') + return pandas.Series(container) + if constructor_name == 'index': + pandas = pytest.importorskip('pandas') + return pandas.Index(container) + if constructor_name == 'slice': + return slice(container[0], container[1]) + if constructor_name == 'sparse_csr': + return sp.sparse.csr_matrix(container) + if constructor_name == 'sparse_csc': + return sp.sparse.csc_matrix(container) + raise TypeError(f"Constructor name '{constructor_name}' not supported") diff --git a/tests/integration/cmor/_fixes/CMIP5/__init__.py b/tests/integration/diag_scripts/mlr/configs/__init__.py similarity index 100% rename from tests/integration/cmor/_fixes/CMIP5/__init__.py rename to tests/integration/diag_scripts/mlr/configs/__init__.py diff --git a/tests/integration/diag_scripts/mlr/configs/test_general.yml b/tests/integration/diag_scripts/mlr/configs/test_general.yml new file mode 100644 index 0000000000..2e4bd09e5d --- /dev/null +++ b/tests/integration/diag_scripts/mlr/configs/test_general.yml @@ -0,0 +1,16 @@ +# Configuration file for general tests. +--- +args: + - [] + +kwargs: &kwargs + input_data: [] + input_files: [] + output_file_type: pdf + plot_dir: /luke/i/am/your/father + work_dir: /no + parameters: + very: 3.0 + cool: 1 + params: [4, 1, 5] + sub_dir: /sub/dir diff --git a/tests/integration/diag_scripts/mlr/configs/test_load_input_datasets.yml b/tests/integration/diag_scripts/mlr/configs/test_load_input_datasets.yml new file mode 100644 index 0000000000..b9db4fb343 --- /dev/null +++ b/tests/integration/diag_scripts/mlr/configs/test_load_input_datasets.yml @@ -0,0 +1,1697 @@ +--- +- cfg: + imputation_strategy: mean + input_data: {} + input_files: + - /ancestor/1 + - /ancestor/2 + output_file_type: png + plot_dir: /plot/dir/1 + work_dir: /work/dir_2 + logger: [] + output: + EXCEPTION: + type: ValueError + value: No 'feature' data found +- cfg: + accept_only_scalar_data: false + allow_missing_features: true + imputation_strategy: mean + input_data: + dataset_0: + broadcast_from: + - 0 + - 1 + dataset: dataset_1 + filename: path/3 + project: project_2 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_3 + units: units_1 + var_type: wrong_var_type + dataset_1: + broadcast_from: 0 + dataset: dataset_3 + filename: /path/1 + long_name: long_name_1 + project: project_3 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_3 + units: units_3 + var_type: feature + dataset_2: + dataset: dataset_1 + filename: /path/2 + long_name: long_name_2 + prediction_name: pred_name_1 + project: project_1 + short_name: short_name_2 + standard_name: std_name_2 + tag: tag_2 + units: units_3 + var_type: label + dataset_3: + broadcast_from: + - 0 + - 1 + dataset: dataset_3 + filename: /path/1 + long_name: long_name_3 + prediction_name: pred_name_1 + project: project_3 + short_name: short_name_1 + standard_name: std_name_3 + tag: tag_1 + units: units_3 + var_type: label + dataset_4: + broadcast_from: + - 0 + - 1 + dataset: dataset_3 + filename: /path/1 + long_name: long_name_3 + prediction_name: pred_name_1 + project: project_3 + short_name: short_name_1 + standard_name: std_name_3 + tag: tag_1 + units: units_2 + var_type: label + dataset_5: + dataset: dataset_1 + filename: /path/2 + long_name: long_name_1 + prediction_name: pred_name_1 + project: project_3 + short_name: short_name_2 + standard_name: std_name_3 + tag: tag_3 + units: units_3 + var_type: wrong_var_type + input_files: + - /ancestor/1 + - /ancestor/2 + mlr_model_type: gbr + output_file_type: png + parameters: + a: 1 + plot_dir: /plot/dir/2 + work_dir: /work/dir/1 + logger: + - error + - error + output: + EXCEPTION: + type: ValueError + value: Data with invalid 'var_type' given +- cfg: + accept_only_scalar_data: true + coords_as_features: + - latitude + group_datasets_by_attributes: + - dataset + - project + input_data: + dataset_0: + broadcast_from: 0 + dataset: dataset_2 + filename: /path/1 + long_name: long_name_2 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_3 + units: units_3 + var_type: feature + dataset_1: + dataset: dataset_2 + filename: /path/1 + long_name: long_name_1 + project: project_1 + short_name: short_name_2 + standard_name: std_name_2 + tag: tag_3 + units: units_2 + var_type: prediction_input + dataset_2: + dataset: dataset_2 + filename: /path/2 + long_name: long_name_1 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_3 + standard_name: std_name_3 + tag: tag_3 + units: units_1 + var_type: label + dataset_3: + dataset: dataset_2 + filename: /path/1 + long_name: long_name_2 + project: project_1 + short_name: short_name_2 + standard_name: std_name_1 + tag: tag_3 + units: units_1 + var_type: label + dataset_4: + broadcast_from: + - 0 + - 1 + dataset: dataset_1 + filename: path/3 + long_name: long_name_3 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_1 + standard_name: std_name_1 + tag: tag_2 + units: units_2 + var_type: label + dataset_5: + broadcast_from: + - 4 + - 5 + dataset: dataset_3 + filename: /path/2 + long_name: long_name_3 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_1 + standard_name: std_name_3 + tag: tag_2 + units: units_2 + var_type: prediction_input + input_files: [] + output_file_type: ps + plot_dir: /plot/dir/2 + work_dir: /work/dir_2 + logger: [] + output: + feature: + - broadcast_from: 0 + dataset: dataset_2 + filename: /path/1 + group_attribute: dataset_2-project_3 + long_name: long_name_2 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_3 + units: units_3 + var_type: feature + label: + - dataset: dataset_2 + filename: /path/2 + group_attribute: dataset_2-project_3 + long_name: long_name_1 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_3 + standard_name: std_name_3 + tag: tag_3 + units: units_1 + var_type: label + - dataset: dataset_2 + filename: /path/1 + group_attribute: dataset_2-project_1 + long_name: long_name_2 + project: project_1 + short_name: short_name_2 + standard_name: std_name_1 + tag: tag_3 + units: units_1 + var_type: label + - broadcast_from: + - 0 + - 1 + dataset: dataset_1 + filename: path/3 + group_attribute: dataset_1-project_3 + long_name: long_name_3 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_1 + standard_name: std_name_1 + tag: tag_2 + units: units_2 + var_type: label + prediction_input: + null: + - dataset: dataset_2 + filename: /path/1 + group_attribute: null + long_name: long_name_1 + project: project_1 + short_name: short_name_2 + standard_name: std_name_2 + tag: tag_3 + units: units_2 + var_type: prediction_input + pred_name_2: + - broadcast_from: + - 4 + - 5 + dataset: dataset_3 + filename: /path/2 + group_attribute: null + long_name: long_name_3 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_1 + standard_name: std_name_3 + tag: tag_2 + units: units_2 + var_type: prediction_input + prediction_input_error: {} + prediction_reference: {} +- cfg: + accept_only_scalar_data: false + input_data: + dataset_0: + broadcast_from: + - 0 + - 1 + dataset: dataset_3 + filename: path/3 + long_name: long_name_1 + prediction_name: pred_name_1 + project: project_2 + short_name: short_name_3 + standard_name: std_name_1 + tag: tag_1 + units: units_3 + var_type: wrong_var_type + dataset_1: + dataset: dataset_1 + filename: /path/2 + long_name: long_name_2 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_2 + standard_name: std_name_2 + tag: tag_3 + var_type: feature + dataset_2: + broadcast_from: + - 4 + - 5 + dataset: dataset_1 + filename: /path/1 + long_name: long_name_3 + prediction_name: pred_name_1 + project: project_1 + short_name: short_name_1 + standard_name: std_name_1 + tag: tag_1 + units: units_2 + var_type: wrong_var_type + dataset_3: + dataset: dataset_1 + filename: path/3 + long_name: long_name_1 + project: project_2 + short_name: short_name_1 + standard_name: std_name_3 + tag: tag_1 + units: units_3 + var_type: feature + input_files: [] + output_file_type: png + plot_dir: /plot/dir/2 + test_size: 0.25 + work_dir: /work/dir/1 + logger: + - error + - error + output: + EXCEPTION: + type: ValueError + value: Data with invalid 'var_type' given +- cfg: + accept_only_scalar_data: true + coords_as_features: + - latitude + grid_search_cv_param_grid: + - a: + - 1 + - 2 + b: + - 3.14 + - 2.71 + - b: + - 6.28 + - -1 + group_datasets_by_attributes: + - dataset + imputation_strategy: mean + input_data: + dataset_0: + broadcast_from: + - 4 + - 5 + dataset: dataset_1 + filename: path/3 + long_name: long_name_3 + project: project_1 + short_name: short_name_3 + standard_name: std_name_3 + tag: tag_1 + units: units_1 + var_type: prediction_input + dataset_1: + broadcast_from: + - 0 + - 1 + dataset: dataset_3 + filename: /path/1 + long_name: long_name_1 + project: project_1 + short_name: short_name_3 + standard_name: std_name_1 + tag: tag_3 + units: units_1 + var_type: feature + dataset_2: + broadcast_from: + - 0 + dataset: dataset_3 + filename: /path/1 + long_name: long_name_1 + prediction_name: pred_name_2 + project: project_1 + short_name: short_name_3 + standard_name: std_name_3 + tag: tag_3 + units: units_3 + var_type: wrong_var_type + dataset_3: + broadcast_from: 0 + dataset: dataset_1 + filename: /path/2 + long_name: long_name_1 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_2 + standard_name: std_name_3 + tag: tag_3 + units: units_2 + var_type: label + input_files: + - /ancestor/1 + mlr_model_type: gbr + output_file_type: png + parameters: + a: 1 + b: 3.1415 + plot_dir: /plot/dir/2 + predict_kwargs: + return_var: true + test_size: 2.5 + work_dir: /work/dir/1 + logger: + - error + output: + EXCEPTION: + type: ValueError + value: Data with invalid 'var_type' given +- cfg: + accept_only_scalar_data: false + grid_search_cv_param_grid: + a: + - 1 + - 2 + - 3 + imputation_strategy: remove + input_data: + dataset_0: + dataset: dataset_3 + filename: path/3 + long_name: long_name_3 + project: project_2 + short_name: short_name_3 + standard_name: std_name_3 + tag: tag_3 + units: units_1 + var_type: prediction_input + dataset_1: + dataset: dataset_1 + filename: path/3 + long_name: long_name_2 + prediction_name: pred_name_1 + project: project_3 + short_name: short_name_1 + standard_name: std_name_3 + tag: tag_3 + units: units_2 + var_type: label + dataset_2: + dataset: dataset_2 + filename: path/3 + long_name: long_name_1 + project: project_3 + short_name: short_name_2 + standard_name: std_name_2 + tag: tag_2 + units: units_1 + var_type: wrong_var_type + dataset_3: + dataset: dataset_1 + filename: path/3 + long_name: long_name_3 + project: project_3 + short_name: short_name_1 + tag: tag_3 + units: units_2 + var_type: wrong_var_type + dataset_4: + broadcast_from: + - 0 + dataset: dataset_3 + filename: /path/2 + long_name: long_name_3 + prediction_name: pred_name_1 + project: project_1 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_1 + units: units_1 + var_type: label + dataset_5: + dataset: dataset_1 + filename: /path/1 + long_name: long_name_1 + prediction_name: pred_name_1 + project: project_2 + short_name: short_name_3 + standard_name: std_name_1 + tag: tag_3 + units: units_3 + var_type: prediction_input + dataset_6: + dataset: dataset_1 + filename: path/3 + long_name: long_name_1 + prediction_name: pred_name_2 + project: project_1 + short_name: short_name_2 + standard_name: std_name_1 + tag: tag_2 + units: units_1 + var_type: prediction_input + dataset_7: + dataset: dataset_3 + filename: /path/1 + long_name: long_name_2 + prediction_name: pred_name_1 + project: project_1 + short_name: short_name_1 + standard_name: std_name_2 + tag: tag_2 + units: units_1 + var_type: wrong_var_type + input_files: [] + output_file_type: png + plot_dir: /plot/dir/2 + test_size: 0.25 + work_dir: /work/dir/1 + logger: + - error + - error + - error + output: + EXCEPTION: + type: ValueError + value: Data with invalid 'var_type' given +- cfg: + allow_missing_features: true + coords_as_features: + - air_pressure + - latitude + grid_search_cv_param_grid: + a: + - 1 + - 2 + - 3 + group_datasets_by_attributes: + - dataset + - project + imputation_strategy: remove + input_data: + dataset_0: + dataset: dataset_1 + filename: /path/2 + long_name: long_name_2 + prediction_name: pred_name_2 + project: project_1 + short_name: short_name_2 + standard_name: std_name_3 + tag: tag_3 + units: units_3 + var_type: wrong_var_type + dataset_1: + dataset: dataset_3 + filename: /path/1 + long_name: long_name_3 + prediction_name: pred_name_1 + project: project_1 + short_name: short_name_1 + standard_name: std_name_2 + tag: tag_2 + units: units_2 + var_type: label + dataset_2: + broadcast_from: + - 0 + dataset: dataset_2 + filename: /path/2 + long_name: long_name_1 + prediction_name: pred_name_1 + project: project_3 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_3 + units: units_3 + var_type: prediction_input + dataset_3: + broadcast_from: 0 + dataset: dataset_2 + filename: /path/2 + long_name: long_name_2 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_1 + standard_name: std_name_1 + tag: tag_3 + units: units_3 + var_type: label + input_files: + - /ancestor/1 + - /ancestor/2 + output_file_type: png + plot_dir: /plot/dir/2 + work_dir: /work/dir_2 + logger: + - error + output: + EXCEPTION: + type: ValueError + value: Data with invalid 'var_type' given +- cfg: + accept_only_scalar_data: false + allow_missing_features: false + grid_search_cv_param_grid: + a: + - 1 + - 2 + - 3 + group_datasets_by_attributes: + - dataset + - project + imputation_strategy: mean + input_data: + dataset_0: + dataset: dataset_3 + filename: /path/1 + long_name: long_name_2 + project: project_3 + standard_name: std_name_3 + tag: tag_1 + units: units_1 + var_type: prediction_input + dataset_1: + broadcast_from: + - 0 + - 1 + dataset: dataset_2 + filename: path/3 + long_name: long_name_2 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_2 + standard_name: std_name_1 + tag: tag_3 + units: units_3 + var_type: label + dataset_2: + broadcast_from: + - 4 + - 5 + dataset: dataset_1 + filename: /path/2 + long_name: long_name_1 + prediction_name: pred_name_2 + project: project_1 + standard_name: std_name_3 + tag: tag_1 + units: units_1 + var_type: label + dataset_3: + broadcast_from: + - 0 + - 1 + dataset: dataset_1 + filename: path/3 + long_name: long_name_3 + prediction_name: pred_name_2 + project: project_2 + short_name: short_name_2 + standard_name: std_name_1 + tag: tag_2 + units: units_3 + var_type: prediction_input + input_files: [] + mlr_model_type: gpr + output_file_type: pdf + plot_dir: /plot/dir/2 + predict_kwargs: + return_var: true + test_size: -1.0 + work_dir: /work/dir/1 + logger: + - error + output: + EXCEPTION: + type: ValueError + value: At least one 'label' dataset does not have necessary MLR attributes +- cfg: + accept_only_scalar_data: true + grid_search_cv_param_grid: + a: + - 1 + - 2 + - 3 + group_datasets_by_attributes: + - dataset + - project + input_data: + dataset_0: + dataset: dataset_3 + filename: path/3 + long_name: long_name_1 + project: project_1 + short_name: short_name_1 + standard_name: std_name_1 + tag: tag_3 + units: units_2 + var_type: label + dataset_1: + dataset: dataset_2 + filename: /path/2 + long_name: long_name_1 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_2 + standard_name: std_name_2 + tag: tag_3 + units: units_1 + var_type: wrong_var_type + dataset_2: + dataset: dataset_3 + filename: /path/2 + long_name: long_name_2 + prediction_name: pred_name_2 + project: project_2 + short_name: short_name_1 + standard_name: std_name_2 + tag: tag_3 + units: units_2 + var_type: wrong_var_type + dataset_3: + broadcast_from: + - 4 + - 5 + dataset: dataset_2 + filename: path/3 + long_name: long_name_2 + project: project_2 + short_name: short_name_1 + standard_name: std_name_2 + tag: tag_3 + units: units_2 + var_type: wrong_var_type + dataset_4: + broadcast_from: + - 0 + - 1 + dataset: dataset_3 + filename: path/3 + long_name: long_name_3 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_2 + standard_name: std_name_1 + tag: tag_1 + units: units_3 + var_type: feature + input_files: + - /ancestor/1 + - /ancestor/2 + mlr_model_type: gbr + output_file_type: pdf + parameters: + a: 1 + b: 3.1415 + plot_dir: /plot/dir/1 + test_size: 0.25 + work_dir: /work/dir_2 + logger: + - error + - error + - error + output: + EXCEPTION: + type: ValueError + value: Data with invalid 'var_type' given +- cfg: + accept_only_scalar_data: true + grid_search_cv_param_grid: + - a: + - 1 + - 2 + b: + - 3.14 + - 2.71 + - b: + - 6.28 + - -1 + group_datasets_by_attributes: + - dataset + imputation_strategy: remove + input_data: + dataset_0: + dataset: dataset_1 + filename: /path/1 + long_name: long_name_1 + project: project_2 + short_name: short_name_3 + standard_name: std_name_1 + tag: tag_1 + units: units_3 + var_type: feature + dataset_1: + broadcast_from: + - 0 + dataset: dataset_1 + filename: /path/1 + long_name: long_name_1 + prediction_name: pred_name_1 + project: project_1 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_3 + units: units_3 + var_type: wrong_var_type + dataset_2: + broadcast_from: + - 0 + - 1 + dataset: dataset_3 + filename: /path/1 + long_name: long_name_1 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_1 + standard_name: std_name_3 + tag: tag_2 + units: units_2 + var_type: feature + dataset_3: + dataset: dataset_3 + filename: /path/1 + long_name: long_name_3 + prediction_name: pred_name_1 + project: project_1 + short_name: short_name_3 + standard_name: std_name_1 + tag: tag_3 + units: units_3 + var_type: feature + dataset_4: + broadcast_from: + - 0 + dataset: dataset_3 + long_name: long_name_2 + prediction_name: pred_name_2 + project: project_1 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_3 + units: units_3 + var_type: feature + dataset_5: + broadcast_from: 0 + dataset: dataset_2 + filename: path/3 + long_name: long_name_3 + project: project_1 + short_name: short_name_2 + standard_name: std_name_2 + tag: tag_2 + units: units_2 + var_type: label + dataset_6: + broadcast_from: 0 + dataset: dataset_3 + filename: /path/2 + long_name: long_name_3 + project: project_2 + short_name: short_name_2 + standard_name: std_name_2 + tag: tag_1 + units: units_3 + var_type: prediction_input + dataset_7: + dataset: dataset_1 + filename: /path/2 + long_name: long_name_3 + prediction_name: pred_name_1 + project: project_2 + short_name: short_name_3 + standard_name: std_name_3 + tag: tag_2 + units: units_2 + var_type: feature + input_files: + - /ancestor/1 + mlr_model_type: gpr + output_file_type: png + parameters: + a: 1 + plot_dir: /plot/dir/1 + test_size: 0.25 + work_dir: /work/dir_2 + logger: + - error + output: + EXCEPTION: + type: ValueError + value: Data with invalid 'var_type' given +- cfg: + accept_only_scalar_data: false + coords_as_features: + - latitude + input_data: + dataset_0: + broadcast_from: + - 0 + dataset: dataset_2 + filename: path/3 + long_name: long_name_1 + prediction_name: pred_name_2 + project: project_1 + short_name: short_name_2 + standard_name: std_name_1 + tag: tag_2 + units: units_2 + var_type: label + dataset_1: + broadcast_from: 0 + dataset: dataset_3 + filename: /path/2 + long_name: long_name_3 + prediction_name: pred_name_1 + project: project_2 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_2 + units: units_2 + var_type: wrong_var_type + dataset_2: + broadcast_from: + - 4 + - 5 + dataset: dataset_3 + filename: path/3 + long_name: long_name_3 + project: project_1 + short_name: short_name_1 + standard_name: std_name_2 + tag: tag_1 + units: units_3 + var_type: label + dataset_3: + broadcast_from: 0 + dataset: dataset_3 + filename: /path/2 + long_name: long_name_1 + project: project_3 + short_name: short_name_2 + standard_name: std_name_3 + units: units_1 + var_type: label + dataset_4: + dataset: dataset_1 + filename: path/3 + long_name: long_name_3 + prediction_name: pred_name_1 + project: project_2 + short_name: short_name_1 + standard_name: std_name_1 + tag: tag_3 + units: units_2 + var_type: label + dataset_5: + dataset: dataset_3 + filename: /path/1 + long_name: long_name_2 + prediction_name: pred_name_1 + project: project_2 + short_name: short_name_2 + standard_name: std_name_2 + tag: tag_2 + units: units_2 + var_type: feature + dataset_6: + broadcast_from: 0 + dataset: dataset_2 + filename: /path/1 + long_name: long_name_3 + prediction_name: pred_name_2 + project: project_2 + standard_name: std_name_1 + tag: tag_1 + units: units_3 + var_type: label + input_files: + - /ancestor/1 + mlr_model_type: gpr + output_file_type: pdf + parameters: + a: 1 + b: 3.1415 + plot_dir: /plot/dir/2 + predict_kwargs: + return_var: true + work_dir: /work/dir_2 + logger: + - error + output: + EXCEPTION: + type: ValueError + value: Data with invalid 'var_type' given +- cfg: + accept_only_scalar_data: true + allow_missing_features: true + grid_search_cv_param_grid: + a: + - 1 + - 2 + - 3 + group_datasets_by_attributes: + - dataset + - project + imputation_strategy: remove + input_data: + dataset_0: + broadcast_from: + - 4 + - 5 + dataset: dataset_1 + filename: /path/2 + long_name: long_name_1 + prediction_name: pred_name_2 + project: project_1 + short_name: short_name_1 + standard_name: std_name_3 + tag: tag_3 + units: units_1 + var_type: wrong_var_type + dataset_1: + dataset: dataset_1 + filename: /path/1 + long_name: long_name_3 + prediction_name: pred_name_2 + project: project_2 + short_name: short_name_2 + standard_name: std_name_3 + tag: tag_2 + units: units_2 + var_type: label + input_files: [] + output_file_type: ps + plot_dir: /plot/dir/2 + work_dir: /work/dir/1 + logger: + - error + output: + EXCEPTION: + type: ValueError + value: Data with invalid 'var_type' given +- cfg: + coords_as_features: + - latitude + group_datasets_by_attributes: + - dataset + imputation_strategy: remove + input_data: + dataset_0: + dataset: dataset_3 + filename: /path/2 + long_name: long_name_3 + project: project_2 + short_name: short_name_1 + standard_name: std_name_2 + tag: tag_3 + units: units_1 + var_type: prediction_input + dataset_1: + broadcast_from: + - 4 + - 5 + dataset: dataset_3 + filename: /path/1 + long_name: long_name_3 + prediction_name: pred_name_2 + project: project_1 + short_name: short_name_2 + standard_name: std_name_3 + tag: tag_3 + units: units_1 + var_type: wrong_var_type + dataset_2: + broadcast_from: + - 0 + dataset: dataset_1 + filename: /path/2 + long_name: long_name_2 + prediction_name: pred_name_1 + project: project_3 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_2 + units: units_1 + var_type: prediction_input + dataset_3: + broadcast_from: + - 4 + - 5 + dataset: dataset_1 + filename: path/3 + project: project_2 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_3 + units: units_2 + var_type: wrong_var_type + dataset_4: + dataset: dataset_2 + filename: path/3 + long_name: long_name_1 + project: project_1 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_2 + units: units_3 + var_type: wrong_var_type + dataset_5: + broadcast_from: + - 4 + - 5 + dataset: dataset_3 + filename: /path/2 + long_name: long_name_1 + prediction_name: pred_name_1 + project: project_2 + short_name: short_name_2 + standard_name: std_name_3 + tag: tag_2 + units: units_1 + var_type: label + dataset_6: + dataset: dataset_3 + filename: path/3 + long_name: long_name_1 + prediction_name: pred_name_1 + project: project_3 + short_name: short_name_3 + standard_name: std_name_1 + tag: tag_3 + units: units_2 + var_type: feature + dataset_7: + broadcast_from: 0 + dataset: dataset_1 + filename: path/3 + long_name: long_name_1 + prediction_name: pred_name_2 + project: project_2 + short_name: short_name_3 + standard_name: std_name_1 + tag: tag_3 + units: units_2 + var_type: prediction_input + input_files: + - /ancestor/1 + - /ancestor/2 + mlr_model_type: gpr + output_file_type: png + plot_dir: /plot/dir/1 + test_size: 0.25 + work_dir: /work/dir_2 + logger: + - error + - error + - error + output: + EXCEPTION: + type: ValueError + value: Data with invalid 'var_type' given +- cfg: + accept_only_scalar_data: true + imputation_strategy: remove + input_data: + dataset_0: + broadcast_from: + - 0 + - 1 + dataset: dataset_3 + filename: path/3 + long_name: long_name_1 + prediction_name: pred_name_1 + project: project_3 + short_name: short_name_2 + standard_name: std_name_3 + tag: tag_2 + units: units_2 + var_type: wrong_var_type + dataset_1: + broadcast_from: + - 4 + - 5 + dataset: dataset_2 + filename: /path/1 + long_name: long_name_3 + prediction_name: pred_name_1 + project: project_1 + short_name: short_name_1 + standard_name: std_name_3 + tag: tag_1 + units: units_2 + var_type: wrong_var_type + dataset_2: + broadcast_from: + - 0 + dataset: dataset_2 + filename: /path/1 + long_name: long_name_1 + prediction_name: pred_name_2 + project: project_2 + short_name: short_name_3 + standard_name: std_name_3 + tag: tag_3 + units: units_2 + var_type: label + dataset_3: + dataset: dataset_2 + filename: /path/1 + long_name: long_name_1 + project: project_1 + short_name: short_name_2 + standard_name: std_name_3 + tag: tag_3 + units: units_3 + var_type: wrong_var_type + input_files: + - /ancestor/1 + mlr_model_type: gpr + output_file_type: pdf + parameters: + a: 1 + plot_dir: /plot/dir/1 + predict_kwargs: + return_var: true + test_size: 0.25 + work_dir: /work/dir_2 + logger: + - error + - error + - error + output: + EXCEPTION: + type: ValueError + value: Data with invalid 'var_type' given +- cfg: + accept_only_scalar_data: false + allow_missing_features: false + grid_search_cv_param_grid: + - a: + - 1 + - 2 + b: + - 3.14 + - 2.71 + - b: + - 6.28 + - -1 + group_datasets_by_attributes: + - dataset + imputation_strategy: mean + input_data: + dataset_0: + broadcast_from: + - 0 + dataset: dataset_1 + filename: path/3 + long_name: long_name_2 + project: project_3 + short_name: short_name_1 + standard_name: std_name_3 + tag: tag_1 + units: units_1 + var_type: prediction_input + dataset_1: + dataset: dataset_2 + filename: path/3 + long_name: long_name_1 + prediction_name: pred_name_2 + project: project_2 + short_name: short_name_2 + standard_name: std_name_2 + tag: tag_3 + units: units_1 + var_type: wrong_var_type + dataset_2: + broadcast_from: 0 + dataset: dataset_1 + filename: /path/2 + long_name: long_name_3 + prediction_name: pred_name_1 + project: project_3 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_1 + units: units_1 + var_type: label + dataset_3: + broadcast_from: 0 + dataset: dataset_3 + filename: /path/1 + long_name: long_name_3 + prediction_name: pred_name_1 + project: project_1 + short_name: short_name_2 + standard_name: std_name_2 + tag: tag_2 + units: units_3 + var_type: feature + dataset_4: + broadcast_from: + - 0 + - 1 + dataset: dataset_2 + filename: /path/2 + long_name: long_name_2 + project: project_3 + short_name: short_name_2 + standard_name: std_name_2 + tag: tag_1 + units: units_1 + var_type: wrong_var_type + dataset_5: + dataset: dataset_3 + filename: /path/2 + long_name: long_name_1 + prediction_name: pred_name_2 + project: project_2 + short_name: short_name_2 + standard_name: std_name_3 + tag: tag_1 + units: units_3 + var_type: prediction_input + input_files: + - /ancestor/1 + output_file_type: pdf + plot_dir: /plot/dir/2 + test_size: 0.25 + work_dir: /work/dir/1 + logger: + - error + - error + output: + EXCEPTION: + type: ValueError + value: Data with invalid 'var_type' given +- cfg: + allow_missing_features: false + group_datasets_by_attributes: + - dataset + input_data: + dataset_0: + broadcast_from: 0 + dataset: dataset_1 + filename: /path/1 + long_name: long_name_2 + prediction_name: pred_name_1 + project: project_2 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_3 + units: units_1 + var_type: label + dataset_1: + broadcast_from: + - 0 + - 1 + dataset: dataset_2 + filename: /path/2 + long_name: long_name_2 + project: project_2 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_1 + units: units_1 + var_type: feature + input_files: + - /ancestor/1 + - /ancestor/2 + output_file_type: pdf + plot_dir: /plot/dir/2 + test_size: -1.0 + work_dir: /work/dir/1 + logger: [] + output: + EXCEPTION: + type: ValueError + value: No 'prediction_input' data found +- cfg: + accept_only_scalar_data: true + allow_missing_features: false + group_datasets_by_attributes: + - dataset + - project + imputation_strategy: remove + input_data: + dataset_0: + broadcast_from: 0 + dataset: dataset_1 + filename: path/3 + long_name: long_name_3 + prediction_name: pred_name_1 + project: project_3 + short_name: short_name_2 + standard_name: std_name_3 + tag: tag_3 + units: units_1 + var_type: feature + dataset_1: + dataset: dataset_1 + filename: /path/1 + long_name: long_name_2 + prediction_name: pred_name_2 + project: project_1 + short_name: short_name_1 + standard_name: std_name_1 + tag: tag_1 + units: units_2 + var_type: feature + dataset_2: + dataset: dataset_2 + filename: path/3 + long_name: long_name_3 + prediction_name: pred_name_2 + project: project_3 + short_name: short_name_3 + standard_name: std_name_3 + tag: tag_2 + units: units_2 + var_type: label + dataset_3: + broadcast_from: + - 0 + dataset: dataset_3 + filename: /path/1 + long_name: long_name_3 + project: project_2 + short_name: short_name_1 + standard_name: std_name_2 + tag: tag_2 + var_type: wrong_var_type + dataset_4: + dataset: dataset_3 + filename: /path/1 + long_name: long_name_2 + project: project_3 + short_name: short_name_1 + standard_name: std_name_2 + tag: tag_2 + units: units_2 + var_type: feature + dataset_5: + dataset: dataset_3 + filename: path/3 + long_name: long_name_1 + prediction_name: pred_name_2 + project: project_1 + short_name: short_name_2 + standard_name: std_name_1 + tag: tag_1 + units: units_1 + var_type: label + dataset_6: + dataset: dataset_1 + filename: /path/1 + long_name: long_name_1 + prediction_name: pred_name_1 + project: project_2 + short_name: short_name_3 + standard_name: std_name_1 + tag: tag_3 + units: units_2 + var_type: label + input_files: + - /ancestor/1 + - /ancestor/2 + mlr_model_type: gbr + output_file_type: pdf + parameters: + a: 1 + b: 3.1415 + plot_dir: /plot/dir/1 + predict_kwargs: + return_var: true + test_size: 0.25 + work_dir: /work/dir_2 + logger: + - error + output: + EXCEPTION: + type: ValueError + value: Data with invalid 'var_type' given +- cfg: + accept_only_scalar_data: false + allow_missing_features: true + coords_as_features: + - air_pressure + - latitude + imputation_strategy: remove + input_data: + dataset_0: + broadcast_from: 0 + dataset: dataset_2 + filename: /path/2 + long_name: long_name_3 + project: project_2 + short_name: short_name_3 + standard_name: std_name_2 + tag: tag_3 + units: units_1 + var_type: label + input_files: [] + output_file_type: png + plot_dir: /plot/dir/1 + predict_kwargs: + return_var: true + test_size: -1.0 + work_dir: /work/dir_2 + logger: [] + output: + EXCEPTION: + type: ValueError + value: No 'feature' data found +- cfg: + accept_only_scalar_data: false + grid_search_cv_param_grid: + - a: + - 1 + - 2 + b: + - 3.14 + - 2.71 + - b: + - 6.28 + - -1 + group_datasets_by_attributes: + - dataset + - project + input_data: + dataset_0: + broadcast_from: + - 0 + - 1 + dataset: dataset_3 + filename: path/3 + long_name: long_name_2 + prediction_name: pred_name_2 + short_name: short_name_3 + standard_name: std_name_3 + tag: tag_2 + units: units_2 + var_type: prediction_input + dataset_1: + broadcast_from: + - 4 + - 5 + dataset: dataset_3 + filename: /path/2 + long_name: long_name_1 + prediction_name: pred_name_1 + project: project_3 + short_name: short_name_3 + standard_name: std_name_3 + tag: tag_2 + units: units_2 + var_type: label + dataset_2: + broadcast_from: + - 4 + - 5 + dataset: dataset_3 + filename: /path/2 + long_name: long_name_2 + prediction_name: pred_name_2 + short_name: short_name_2 + standard_name: std_name_2 + tag: tag_1 + units: units_3 + var_type: prediction_input + dataset_3: + broadcast_from: + - 0 + dataset: dataset_1 + filename: /path/1 + long_name: long_name_3 + prediction_name: pred_name_2 + project: project_2 + short_name: short_name_3 + standard_name: std_name_1 + tag: tag_3 + units: units_2 + var_type: feature + dataset_4: + dataset: dataset_2 + filename: /path/2 + long_name: long_name_3 + project: project_2 + short_name: short_name_2 + standard_name: std_name_3 + tag: tag_2 + units: units_1 + var_type: prediction_input + dataset_5: + broadcast_from: + - 0 + - 1 + dataset: dataset_2 + filename: /path/1 + long_name: long_name_3 + project: project_1 + short_name: short_name_2 + standard_name: std_name_1 + tag: tag_3 + units: units_2 + var_type: prediction_input + dataset_6: + broadcast_from: 0 + dataset: dataset_3 + filename: path/3 + long_name: long_name_3 + project: project_3 + short_name: short_name_3 + standard_name: std_name_3 + tag: tag_1 + units: units_1 + var_type: prediction_input + dataset_7: + dataset: dataset_2 + filename: path/3 + long_name: long_name_1 + project: project_2 + short_name: short_name_1 + standard_name: std_name_2 + units: units_1 + var_type: wrong_var_type + dataset_8: + dataset: dataset_1 + filename: /path/2 + long_name: long_name_2 + prediction_name: pred_name_1 + project: project_2 + short_name: short_name_2 + standard_name: std_name_2 + tag: tag_3 + units: units_1 + var_type: prediction_input + input_files: [] + output_file_type: png + parameters: + a: 1 + plot_dir: /plot/dir/2 + predict_kwargs: + return_var: true + work_dir: /work/dir_2 + logger: + - error + output: + EXCEPTION: + type: ValueError + value: Data with invalid 'var_type' given +- cfg: + accept_only_scalar_data: true + allow_missing_features: false + grid_search_cv_param_grid: + - a: + - 1 + - 2 + b: + - 3.14 + - 2.71 + - b: + - 6.28 + - -1 + group_datasets_by_attributes: + - dataset + - project + imputation_strategy: mean + input_data: + dataset_0: + broadcast_from: + - 0 + - 1 + dataset: dataset_2 + filename: path/3 + long_name: long_name_2 + project: project_3 + short_name: short_name_3 + standard_name: std_name_3 + tag: tag_1 + units: units_3 + var_type: prediction_input + input_files: [] + mlr_model_type: gpr + output_file_type: png + parameters: + a: 1 + plot_dir: /plot/dir/1 + test_size: 2.5 + work_dir: /work/dir/1 + logger: [] + output: + EXCEPTION: + type: ValueError + value: No 'feature' data found diff --git a/tests/integration/diag_scripts/mlr/generate_config.py b/tests/integration/diag_scripts/mlr/generate_config.py new file mode 100644 index 0000000000..ef5ca809f2 --- /dev/null +++ b/tests/integration/diag_scripts/mlr/generate_config.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Generate config files which can be used as input for tests.""" + +import os +import random +from unittest import mock + +import yaml + +from tests.integration.diag_scripts.mlr.test_read_input import ( + SimplifiedMLRModel, + get_logger_msg, +) + +FUNCTION = '_load_input_datasets' +N_CFG = 20 +N_MAX_DATA = 10 +OUTFILE = os.path.expanduser(os.path.join('~', 'outfile.yml')) +DATASET = { + 'dataset': ['dataset_1', 'dataset_2', 'dataset_3'], + 'project': ['project_1', 'project_2', 'project_3'], + 'standard_name': ['std_name_1', 'std_name_2', 'std_name_3'], + 'short_name': ['short_name_1', 'short_name_2', 'short_name_3'], + 'long_name': ['long_name_1', 'long_name_2', 'long_name_3'], + 'units': ['units_1', 'units_2', 'units_3'], + 'filename': ['/path/1', '/path/2', 'path/3'], + 'var_type': ['feature', 'label', 'prediction_input', 'wrong_var_type'], + 'tag': ['tag_1', 'tag_2', 'tag_3'], + 'prediction_name': [None, 'pred_name_1', 'pred_name_2'], + 'broadcast_from': [None, None, None, None, 0, [0], [0, 1], [4, 5]], +} +CFG = { + 'input_files': [[], ['/ancestor/1'], ['/ancestor/1', '/ancestor/2']], + 'output_file_type': ['png', 'pdf', 'ps'], + 'plot_dir': ['/plot/dir/1', '/plot/dir/2'], + 'work_dir': ['/work/dir/1', '/work/dir_2'], + 'accept_only_scalar_data': [None, True, False], + 'allow_missing_features': [None, True, False], + 'grid_search_cv_param_grid': [ + None, + None, + { + 'a': [1, 2, 3] + }, + [{ + 'a': [1, 2], + 'b': [3.14, 2.71] + }, { + 'b': [6.28, -1] + }], + ], + 'group_datasets_by_attributes': [ + None, + None, + ['dataset'], + ['dataset', 'project'], + ], + 'imputation_strategy': [None, None, 'remove', 'mean'], + 'mlr_model_type': [None, None, 'gbr', 'gpr'], + 'parameters': [ + None, + None, + { + 'a': 1 + }, + { + 'a': 1, + 'b': 3.1415 + }, + ], + 'predict_kwargs': [None, None, { + 'return_var': True + }], + 'test_size': [None, None, 0.25, -1.0, 2.5], + 'coords_as_features': [ + None, + None, + None, + ['latitude'], + ['air_pressure', 'latitude'], + ], +} + +random.seed(42) + + +def generate_random_dict(source, remove_prob=0.0): + """Generate random dict_ using `source`.""" + dict_ = {} + for (attr, values) in source.items(): + value = values[random.randrange(len(values))] + if value is not None: + dict_[attr] = value + if random.random() < remove_prob: + rand_attr = list(dict_.keys())[random.randrange(len(dict_))] + dict_.pop(rand_attr) + return dict_ + + +if __name__ == '__main__': + CFGS = [] + + # Generate data + for idx_cfg in range(N_CFG): + key_cfg = 'cfg_{:d}'.format(idx_cfg) + cfg = generate_random_dict(CFG) + datasets = {} + for idx_data in range(random.randrange(N_MAX_DATA)): + key_data = 'dataset_{:d}'.format(idx_data) + dataset = generate_random_dict(DATASET, remove_prob=0.1) + datasets[key_data] = dataset + cfg['input_data'] = datasets + input_datasets = list(datasets.values()) + + # Output + mlr_model = SimplifiedMLRModel(cfg) + logger_calls = [] + with mock.patch('esmvaltool.diag_scripts.mlr.logger', + autospec=True) as mlr_logger: + with mock.patch('esmvaltool.diag_scripts.mlr.models.logger', + autospec=True) as models_logger: + try: + getattr(mlr_model, FUNCTION)(input_datasets) + except Exception as exc: + output = {'EXCEPTION': {}} + output['EXCEPTION']['type'] = type(exc).__name__ + output['EXCEPTION']['value'] = exc.args[0] + else: + output = mlr_model._datasets + logger_calls.extend(models_logger.method_calls) + logger_calls.extend(mlr_logger.method_calls) + CFGS.append({ + 'cfg': cfg, + 'output': output, + 'logger': get_logger_msg(logger_calls), + }) + + # Write data + NoAnchorDumper = yaml.dumper.SafeDumper + NoAnchorDumper.ignore_aliases = lambda self, data: True + with open(OUTFILE, 'w') as outfile: + yaml.dump(CFGS, + outfile, + default_flow_style=False, + Dumper=NoAnchorDumper) + print("Wrote '{}'".format(OUTFILE)) diff --git a/tests/integration/diag_scripts/mlr/test_custom_sklearn_classes.py b/tests/integration/diag_scripts/mlr/test_custom_sklearn_classes.py new file mode 100644 index 0000000000..3bdf4cca06 --- /dev/null +++ b/tests/integration/diag_scripts/mlr/test_custom_sklearn_classes.py @@ -0,0 +1,1079 @@ +"""Integration tests for classes of custom :mod:`sklearn` functionalities. + +Parts of this code have been copied from :mod:`sklearn`. + +License: BSD 3-Clause License + +Copyright (c) 2007-2020 The scikit-learn developers. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +""" + +# pylint: disable=arguments-differ +# pylint: disable=attribute-defined-outside-init +# pylint: disable=invalid-name +# pylint: disable=no-self-use +# pylint: disable=protected-access +# pylint: disable=too-few-public-methods +# pylint: disable=too-many-arguments + +import warnings +from copy import deepcopy + +import numpy as np +import pytest +from sklearn.base import BaseEstimator, clone +from sklearn.compose import ColumnTransformer, TransformedTargetRegressor +from sklearn.decomposition import PCA +from sklearn.exceptions import NotFittedError +from sklearn.linear_model import LinearRegression +from sklearn.metrics import make_scorer, mean_absolute_error +from sklearn.preprocessing import FunctionTransformer, StandardScaler + +from esmvaltool.diag_scripts.mlr.custom_sklearn import ( + _DEFAULT_TAGS, + AdvancedPipeline, + AdvancedRFE, + AdvancedRFECV, + AdvancedTransformedTargetRegressor, + FeatureSelectionTransformer, + _score_weighted, +) + +# AdvancedPipeline + + +X_TRAIN = np.array([[3.0], [6.0], [10.0]]) +Y_TRAIN = np.array([10.0, 20.0, 30.0]) + + +class FeatureImportanceRegressor(BaseEstimator): + """Estimator that has ``feature_importances_`` attribute.""" + + def __init__(self): + """Initialize instance.""" + super().__init__() + self.feature_importances_ = 42 + + def fit(self, *_): + """Fit method.""" + return self + + +class StdLinearRegression(LinearRegression): + """Expand :class:`sklearn.linear_model.LinearRegression`.""" + + def predict(self, x, return_std=False): + """Expand :meth:`predict`.""" + pred = super().predict(x) + if return_std: + err = np.ones(x.shape[0], dtype=x.dtype) + return (pred, err) + return pred + + +class VarLinearRegression(LinearRegression): + """Expand :class:`sklearn.linear_model.LinearRegression`.""" + + def predict(self, x, return_var=False, return_cov=False, err_2d=False): + """Expand :meth:`predict`.""" + pred = super().predict(x) + if return_var: + err = np.ones(x.shape[0], dtype=x.dtype) + if err_2d: + err = err.reshape(-1, 1) + return (pred, err) + if return_cov: + err = np.ones((x.shape[0], x.shape[0]), dtype=x.dtype) + return (pred, err) + return pred + + +class NonStandardScaler(StandardScaler): + """Expand :class:`sklearn.preprocessing.StandardScaler`.""" + + def fit(self, x, y=None, f=0.0): + """Expand :meth:`fit`.""" + return_value = super().fit(x, y) + if self.mean_ is not None: + self.mean_ += f + return return_value + + +class TestAdvancedPipeline(): + """Tests for ``AdvancedPipeline``.""" + + def test_coef_(self): + """Test ``coef_`` property.""" + pipeline = AdvancedPipeline( + [('t', StandardScaler(with_std=False)), ('r', LinearRegression())], + ) + pipeline.fit(np.arange(3).reshape(3, 1), np.arange(3)) + np.testing.assert_allclose(pipeline.coef_, [1.0]) + + def test_feature_importances_(self): + """Test ``feature_importances_`` property.""" + pipeline = AdvancedPipeline( + [('t', StandardScaler()), ('r', FeatureImportanceRegressor())], + ) + assert pipeline.feature_importances_ == 42 + + AREG = AdvancedTransformedTargetRegressor( + transformer=NonStandardScaler(), + regressor=LinearRegression(), + ) + REG = TransformedTargetRegressor( + transformer=NonStandardScaler(), + regressor=LinearRegression(), + ) + STEPS = [ + [('t', NonStandardScaler())], + [('t', NonStandardScaler()), ('r', LinearRegression())], + [('t', NonStandardScaler()), ('r', REG)], + [('t', NonStandardScaler()), ('r', AREG)], + [('t', NonStandardScaler()), ('r', AREG)], + [('t', NonStandardScaler()), ('r', AREG)], + [('t', NonStandardScaler()), ('r', AREG)], + ] + PIPELINES = [AdvancedPipeline(step) for step in STEPS] + KW_X0 = {'a': 1, 't__f': 2.0} + KW_X1 = {'b__a': 1, 't__f': 2.0} + KW_X2 = {'t__wrongparam': 1, 't__f': 2.0} + KW_X3 = {'r__wrongparam': 1, 't__f': 2.0} + KW_X4 = {'r__wrongstep__f': 1, 't__f': 2.0} + KW_X5 = {'r__regressor__wrongparam': 1, 't__f': 2.0} + KW_0 = {'t__f': 2.0} + KW_1 = {'t__f': 2.0, 'r__sample_weight': np.arange(3.0)} + KW_2 = {'t__f': 2.0, 'r__transformer__f': 3.0} + + TEST_CHECK_FINAL_STEP = zip( + PIPELINES, + [TypeError, TypeError, TypeError, True, True, True, True, True], + ) + + @pytest.mark.parametrize('pipeline,output', TEST_CHECK_FINAL_STEP) + def test_check_final_step(self, pipeline, output): + """Test checking if final step.""" + pipeline = clone(pipeline) + if isinstance(output, type): + with pytest.raises(output): + pipeline._check_final_step() + return + assert pipeline._check_final_step() is None + + TEST_FIT_TARGET_TRANSFORMER_ONLY = zip( + PIPELINES, + [{}, {}, {}, KW_X3, KW_X4, KW_0, KW_2], + [TypeError, + TypeError, + TypeError, + ValueError, + ValueError, + (np.array([20.0]), np.array([200.0 / 3.0])), + NotImplementedError], + ) + + @pytest.mark.parametrize('pipeline,kwargs,output', + TEST_FIT_TARGET_TRANSFORMER_ONLY) + def test_fit_target_transformer_only(self, pipeline, kwargs, output): + """Test fitting of target transformer only.""" + pipeline = clone(pipeline) + if isinstance(output, type): + with pytest.raises(output): + pipeline.fit_target_transformer_only(Y_TRAIN, **kwargs) + return + pipeline.fit_target_transformer_only(Y_TRAIN, **kwargs) + transformer = pipeline.steps[-1][1].transformer_ + np.testing.assert_allclose(transformer.mean_, output[0]) + np.testing.assert_allclose(transformer.var_, output[1]) + assert not hasattr(pipeline.steps[-1][1], 'regressor_') + with pytest.raises(NotFittedError): + pipeline.predict(X_TRAIN) + with pytest.raises(NotFittedError): + pipeline.steps[-1][1].predict(X_TRAIN) + + TEST_FIT_TRANSFORMERS_ONLY = zip( + PIPELINES, + [KW_0, KW_0, KW_1, {}, KW_X0, KW_X1, KW_2], + [None, + (np.array([8.333333]), np.array([8.222222])), + (np.array([8.333333]), np.array([8.222222])), + (np.array([6.333333]), np.array([8.222222])), + ValueError, + KeyError, + (np.array([8.333333]), np.array([8.222222]))], + ) + + @pytest.mark.parametrize('pipeline,kwargs,output', + TEST_FIT_TRANSFORMERS_ONLY) + def test_fit_transformers_only(self, pipeline, kwargs, output): + """Test fitting transformers only.""" + pipeline = clone(pipeline) + if isinstance(output, type): + with pytest.raises(output): + pipeline.fit_transformers_only(X_TRAIN, Y_TRAIN, **kwargs) + return + pipeline.fit_transformers_only(X_TRAIN, Y_TRAIN, **kwargs) + transformer = pipeline.steps[0][1] + if output is None: + assert not hasattr(transformer, 'mean_') + assert not hasattr(transformer, 'var_') + return + np.testing.assert_allclose(transformer.mean_, output[0]) + np.testing.assert_allclose(transformer.var_, output[1]) + assert pipeline.steps[-1][0] == 'r' + assert pipeline.steps[-1][1] != 'passthrough' + with pytest.raises(NotFittedError): + pipeline.predict(X_TRAIN) + with pytest.raises(NotFittedError): + pipeline.steps[-1][1].predict(X_TRAIN) + + TEST_TRANSFORM_ONLY = [ + (KW_X0, ValueError), + (KW_X1, KeyError), + ({}, np.array([[-1.1624763874], [-0.1162476387], [1.2787240262]])), + (KW_0, np.array([[-3.1624763874], [-2.1162476387], [-0.7212759738]])), + ] + + @pytest.mark.parametrize('kwargs,output', TEST_TRANSFORM_ONLY) + def test_transform_only(self, kwargs, output): + """Test transforming only.""" + pipeline = AdvancedPipeline([ + ('s', StandardScaler()), + ('t', NonStandardScaler()), + ('r', LinearRegression()), + ]) + with pytest.raises(NotFittedError): + pipeline.transform_only(X_TRAIN) + if isinstance(output, type): + with pytest.raises(output): + pipeline.fit(X_TRAIN, Y_TRAIN, **kwargs) + return + pipeline.fit(X_TRAIN, Y_TRAIN, **kwargs) + x_trans = pipeline.transform_only(X_TRAIN) + np.testing.assert_allclose(x_trans, output) + + TEST_TRANSFORM_TARGET_ONLY = zip( + PIPELINES, + [{}, {}, {}, {}, KW_X2, KW_0, KW_X5], + [TypeError, + TypeError, + TypeError, + np.array([-1.22474487, 0.0, 1.22474487]), + np.array([-1.22474487, 0.0, 1.22474487]), + np.array([-1.22474487, 0.0, 1.22474487]), + np.array([-1.22474487, 0.0, 1.22474487])], + ) + + @pytest.mark.parametrize('pipeline,kwargs,output', + TEST_TRANSFORM_TARGET_ONLY) + def test_transform_target_only(self, pipeline, kwargs, output): + """Test transforming of target only.""" + pipeline = clone(pipeline) + if isinstance(output, type): + with pytest.raises(output): + pipeline.fit_target_transformer_only(Y_TRAIN, **kwargs) + return + with pytest.raises(NotFittedError): + pipeline.transform_target_only(Y_TRAIN) + pipeline.fit_target_transformer_only(Y_TRAIN, **kwargs) + y_trans = pipeline.transform_target_only(Y_TRAIN) + np.testing.assert_allclose(y_trans, output) + assert not hasattr(pipeline.steps[-1][1], 'regressor_') + with pytest.raises(NotFittedError): + pipeline.predict(X_TRAIN) + with pytest.raises(NotFittedError): + pipeline.steps[-1][1].predict(X_TRAIN) + + +# AdvancedRFE + + +class NewLinearRegression(LinearRegression): + """Expand ``LinearRegression``.""" + + def predict(self, x_data, always_one=False): + """Add dummy predict_kwargs to function.""" + if always_one: + return 'one' + return super().predict(x_data) + + +class TestAdvancedRFE(): + """Tests for ``AdvancedRFE``.""" + + X_TRAIN = np.array( + [[0.0, 0.0, 0.0], + [2.0, 0.0, 1.0], + [3.0, 0.0, -2.0]], + ) + X_PRED = np.array( + [[1000.0, 100.0, 10.0], + [2000.0, 200.0, 20.0]], + ) + + Y_TRAIN = np.array([0.0, 1.0, -2.0]) + SAMPLE_WEIGHTS = np.array([1.0, 1.0, 0.0]) + + def get_rfe(self, drop=False): + """``AdvancedRFE`` object.""" + if drop: + column_transformer_args = [[ + ('drop', 'drop', [2]), + ('passthrough', 'passthrough', [0, 1]), + ]] + else: + column_transformer_args = [[ + ('passthrough', 'passthrough', [0, 1, 2]), + ]] + pipeline_args = [[ + ('trans', ColumnTransformer(*column_transformer_args)), + ('lin', NewLinearRegression()), + ]] + rfe_kwargs = { + 'estimator': AdvancedPipeline(*pipeline_args), + 'n_features_to_select': 1, + 'step': 1, + 'verbose': 1000, + } + return AdvancedRFE(**rfe_kwargs) + + @pytest.fixture + def rfe(self): + """``AdvancedRFE`` object.""" + return self.get_rfe(drop=False) + + @pytest.fixture + def rfe_drop(self): + """``AdvancedRFE`` object where features are dropped.""" + return self.get_rfe(drop=True) + + class NoCoefReg(BaseEstimator): + """Estimator without ``coef_`` and ``feature_importances_``.""" + + def fit(self, *_): + """Fit method.""" + return self + + def test_advanced_rfe_fail(self, rfe_drop): + """Test ``AdvancedRFE`` expected fail.""" + # Transformer that drops features + with pytest.raises(NotImplementedError): + rfe_drop.fit(self.X_TRAIN, self.Y_TRAIN) + + # Regressor without coef_ or feature_importances_ + msg = ("The classifier does not expose 'coef_' or " + "'feature_importances_' attributes") + fail_rfe = AdvancedRFE(self.NoCoefReg()) + with pytest.raises(RuntimeError, match=msg): + fail_rfe.fit(np.arange(6).reshape(3, 2), np.arange(3)) + + # Invalid step + msg = "Step must be >0" + fail_rfe = AdvancedRFE(LinearRegression(), step=-1) + with pytest.raises(ValueError, match=msg): + fail_rfe.fit(np.arange(6).reshape(3, 2), np.arange(3)) + + class FIReg(BaseEstimator): + """Estimator with working ``feature_importances_``.""" + + def fit(self, x_data, *_): + """Fit method.""" + self.feature_importances_ = np.full((4, x_data.shape[1]), 0.0) + for idx in range(self.feature_importances_.shape[1]): + self.feature_importances_[:, idx] = float(idx) + print(self.feature_importances_) + return self + + def test_feature_importances(self): + """Test with ``feature_importances_``.""" + firfe = AdvancedRFE(self.FIReg()) + x_data = np.arange(3 * 6).reshape(3, 6) + y_data = np.arange(3) + firfe.fit(x_data, y_data) + assert firfe.n_features_ == 3 + np.testing.assert_array_equal(firfe.ranking_, [4, 3, 2, 1, 1, 1]) + np.testing.assert_array_equal(firfe.support_, + [False, False, False, True, True, True]) + + def test_advanced_rfe_no_fit_kwargs(self, rfe): + """Test ``AdvancedRFE`` without fit_kwargs.""" + rfe.fit(self.X_TRAIN, self.Y_TRAIN) + assert rfe.n_features_ == 1 + np.testing.assert_array_equal(rfe.ranking_, [2, 3, 1]) + np.testing.assert_array_equal(rfe.support_, [False, False, True]) + est = rfe.estimator_ + assert isinstance(est, AdvancedPipeline) + assert len(est.steps[0][1].transformers_) == 1 + transformer = est.steps[0][1].transformers_[0] + assert transformer[0] == 'passthrough' + assert isinstance(transformer[1], FunctionTransformer) + assert transformer[2] == [0] + np.testing.assert_allclose(est.steps[1][1].coef_, [1.0]) + np.testing.assert_allclose(est.steps[1][1].intercept_, 0.0, atol=1e-10) + pred = rfe.predict(self.X_PRED) + np.testing.assert_allclose(pred, [10.0, 20.0]) + pred_one = rfe.predict(self.X_PRED, always_one=True) + assert pred_one == 'one' + + def test_advanced_rfe_fit_kwargs(self, rfe): + """Test ``AdvancedRFE`` with fit_kwargs.""" + rfe.fit(self.X_TRAIN, self.Y_TRAIN, + lin__sample_weight=self.SAMPLE_WEIGHTS) + assert rfe.n_features_ == 1 + np.testing.assert_array_equal(rfe.ranking_, [1, 3, 2]) + np.testing.assert_array_equal(rfe.support_, [True, False, False]) + est = rfe.estimator_ + assert isinstance(est, AdvancedPipeline) + assert len(est.steps[0][1].transformers_) == 1 + transformer = est.steps[0][1].transformers_[0] + assert transformer[0] == 'passthrough' + assert isinstance(transformer[1], FunctionTransformer) + assert transformer[2] == [0] + np.testing.assert_allclose(est.steps[1][1].coef_, [0.5]) + np.testing.assert_allclose(est.steps[1][1].intercept_, 0.0, atol=1e-10) + pred = rfe.predict(self.X_PRED) + np.testing.assert_allclose(pred, [500.0, 1000.0]) + pred_one = rfe.predict(self.X_PRED, always_one=True) + assert pred_one == 'one' + + def step_score(self, estimator, features): + """Score for a single step rfe.""" + x_test = np.arange(20).reshape(1, 20) + y_test = np.arange(1) + scorer = make_scorer(mean_absolute_error) + return _score_weighted(estimator, x_test[:, features], y_test, scorer) + + def test_alternative_kwargs(self): + """Test alternative kwargs.""" + rfe_kwargs = { + 'estimator': LinearRegression(), + 'n_features_to_select': None, + 'step': 0.1, + } + rfe = AdvancedRFE(**rfe_kwargs) + zero_idx = np.array([1, 3, 5, 7, 9, 11, 13, 15, 17, 19]) + x_data = np.arange(3 * 20).reshape(3, 20) + x_data[:, zero_idx] = 0.0 + y_data = np.arange(3) + + rfe._fit(x_data, y_data, step_score=self.step_score) + + assert rfe.n_features_ == 10 + assert len(rfe.ranking_) == 20 + assert len(rfe.support_) == 20 + expected_support = np.full(20, True) + expected_support[zero_idx] = False + np.testing.assert_array_equal(rfe.support_, expected_support) + assert len(rfe.scores_) == 6 + + +# AdvancedRFECV + + +class TestAdvancedRFECV(): + """Tests for ``AdvancedRFECV``.""" + + @pytest.fixture + def lin(self): + """Return ``LinearRegression`` instance.""" + return LinearRegression() + + def test_init(self, lin): + """Test ``__init__``.""" + rfecv = AdvancedRFECV(estimator=lin, step=2, min_features_to_select=3, + cv=5, scoring='neg_mean_absolute_error', + verbose=42, n_jobs=32) + assert rfecv.estimator is lin + assert rfecv.step == 2 + assert rfecv.min_features_to_select == 3 + assert rfecv.cv == 5 + assert rfecv.scoring == 'neg_mean_absolute_error' + assert rfecv.verbose == 42 + assert rfecv.n_jobs == 32 + + X_DATA = np.array([ + [0, 0, 0], + [1, 1, 0], + [2, 0, 0], + [0, 3, 0], + [0, 3, 0], + [4, 4, 0], + [4, 4, 0], + [1000.0, 2000.0, 0.0], + ]) + Y_DATA = np.array([1, 0, 3, -5, -5, -3, -3, -4]) + SAMPLE_WEIGHTS = np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0]) + + def test_fail(self, lin): + """Test ``AdvancedRFECV`` expected fail.""" + msg = 'Step must be >0' + rfecv = AdvancedRFECV(estimator=lin, step=-1) + with pytest.raises(ValueError, match=msg): + rfecv.fit(self.X_DATA, self.Y_DATA) + + def test_fit(self, lin): + """Test ``fit``.""" + rfecv = AdvancedRFECV(estimator=lin, step=1, min_features_to_select=1, + cv=2, verbose=1000, n_jobs=2) + rfecv.fit(self.X_DATA, self.Y_DATA, sample_weight=self.SAMPLE_WEIGHTS) + assert rfecv.n_features_ == 2 + np.testing.assert_array_equal(rfecv.support_, [True, True, False]) + np.testing.assert_array_equal(rfecv.ranking_, [1, 1, 2]) + np.testing.assert_allclose(rfecv.grid_scores_, + [-7.28912807, -0.69779194, -0.69779194]) + + est = rfecv.estimator_ + assert isinstance(est, LinearRegression) + np.testing.assert_allclose(est.coef_, [1.0, -2.0]) + np.testing.assert_allclose(est.intercept_, [1.0]) + + def test_step_float(self, lin): + """Test float for ``step``.""" + rfecv = AdvancedRFECV(estimator=lin, step=0.1, cv=2) + rfecv.fit(self.X_DATA, self.Y_DATA) + + assert rfecv.n_features_ == 2 + np.testing.assert_array_equal(rfecv.support_, [True, True, False]) + np.testing.assert_array_equal(rfecv.ranking_, [1, 1, 2]) + np.testing.assert_allclose( + rfecv.grid_scores_, + [-3949286.19763361, -1630913.74908173, -1630913.74908173]) + + est = rfecv.estimator_ + assert isinstance(est, LinearRegression) + np.testing.assert_allclose(est.coef_, [0.99952835, -0.5006662]) + np.testing.assert_allclose(est.intercept_, -2.21009561525743) + + +# AdvancedTransformedTargetRegressor + + +class TestAdvancedTransformedTargetRegressor(): + """Tests for ``AdvancedTransformedTargetRegressor``.""" + + def test_regressor_none(self): + """Test ``regressor=None``.""" + areg = AdvancedTransformedTargetRegressor() + areg.fit(np.arange(3).reshape(3, 1), np.arange(3)) + assert isinstance(areg.regressor_, LinearRegression) + + def test_coef_(self): + """Test ``coef_`` property.""" + areg = AdvancedTransformedTargetRegressor() + areg.fit(np.arange(3).reshape(3, 1), np.arange(3)) + np.testing.assert_allclose(areg.coef_, [1.0]) + + def test_feature_importances_(self): + """Test ``feature_importances_`` property.""" + areg = AdvancedTransformedTargetRegressor( + regressor=FeatureImportanceRegressor()) + areg.fit(np.arange(3).reshape(3, 1), np.arange(3)) + assert areg.feature_importances_ == 42 + + AREG = AdvancedTransformedTargetRegressor( + transformer=NonStandardScaler(), + regressor=LinearRegression(), + ) + FIT_KWARGS = [ + {'a': 1}, + {'b__a': 1, 't__f': 2.0}, + {'regressor__wrongparam': 1}, + {'transformer__fails': 1, 'regressor__a': 1, 'regressor__b': 1}, + {}, + {'regressor__sample_weight': np.arange(3.0)}, + ] + + TEST_FIT = zip( + FIT_KWARGS, + [ValueError, + ValueError, + TypeError, + NotImplementedError, + (np.array([20.0]), np.array([200.0 / 3.0]), np.array([0.34756273]), + -2.2012306472308283, + np.array([10.54054054, 19.05405405, 30.40540541])), + (np.array([20.0]), np.array([200.0 / 3.0]), np.array([0.30618622]), + -1.8371173070873827, np.array([12.5, 20.0, 30.0]))], + ) + + @pytest.mark.parametrize('kwargs,output', TEST_FIT) + def test_fit(self, kwargs, output): + """Test fitting with kwargs.""" + reg = clone(self.AREG) + if isinstance(output, type): + with pytest.raises(output): + reg.fit(X_TRAIN, Y_TRAIN, **kwargs) + return + reg.fit(X_TRAIN, Y_TRAIN, **kwargs) + transformer = reg.transformer_ + regressor = reg.regressor_ + np.testing.assert_allclose(transformer.mean_, output[0]) + np.testing.assert_allclose(transformer.var_, output[1]) + np.testing.assert_allclose(regressor.coef_, output[2]) + np.testing.assert_allclose(regressor.intercept_, output[3]) + np.testing.assert_allclose(reg.predict(X_TRAIN), output[4]) + + Y_2D = np.array([[10.0], [20.0], [30.0]]) + TEST_FIT_TRANSFORMER_ONLY = zip( + FIT_KWARGS, + [ValueError, + ValueError, + (Y_2D, {'wrongparam': 1}, np.array([20.0]), np.array([200.0 / 3.0])), + NotImplementedError, + (Y_2D, {}, np.array([20.0]), np.array([200.0 / 3.0])), + (Y_2D, + {'sample_weight': np.arange(3.0)}, + np.array([20.0]), np.array([200.0 / 3.0]))], + ) + + @pytest.mark.parametrize('kwargs,output', TEST_FIT_TRANSFORMER_ONLY) + def test_fit_transformer_only(self, kwargs, output): + """Test fitting of transformer only.""" + reg = clone(self.AREG) + if isinstance(output, type): + with pytest.raises(output): + reg.fit_transformer_only(Y_TRAIN, **kwargs) + return + (y_2d, reg_kwargs) = reg.fit_transformer_only(Y_TRAIN, **kwargs) + np.testing.assert_allclose(y_2d, output[0]) + assert isinstance(reg_kwargs, dict) + assert reg_kwargs.keys() == output[1].keys() + for (key, val) in reg_kwargs.items(): + np.testing.assert_allclose(val, output[1][key]) + transformer = reg.transformer_ + np.testing.assert_allclose(transformer.mean_, output[2]) + np.testing.assert_allclose(transformer.var_, output[3]) + assert not hasattr(reg, 'regressor_') + with pytest.raises(NotFittedError): + reg.predict(X_TRAIN) + + def identity(self, y_data): + """Identity function.""" + return y_data + + def square(self, y_data): + """Identity function.""" + return y_data**2 + + def test_fit_transformer_fail(self): + """Test ``_fit_transformer`` expected fail.""" + # Give transformer and func/inverse_func + msg = ("'transformer' and functions 'func'/'inverse_func' cannot both " + "be set.") + areg = AdvancedTransformedTargetRegressor( + transformer=StandardScaler(), + func=self.identity, + inverse_func=self.identity, + ) + with pytest.raises(ValueError, match=msg): + areg._fit_transformer(self.Y_2D) + + # Give func without inverse_func + msg = "When 'func' is provided, 'inverse_func' must also be provided" + areg = AdvancedTransformedTargetRegressor( + func=self.identity, + inverse_func=None, + ) + with pytest.raises(ValueError, match=msg): + areg._fit_transformer(self.Y_2D) + + # Warn if inverse_func is not true inverse of func + msg = ("The provided functions or transformer are not strictly " + "inverse of each other. If you are sure you want to proceed " + "regardless, set 'check_inverse=False'") + areg = AdvancedTransformedTargetRegressor( + func=self.identity, + inverse_func=self.square, + check_inverse=True, + ) + with pytest.warns(UserWarning, match=msg): + areg._fit_transformer(self.Y_2D) + + # Do not warn if not specified + areg = AdvancedTransformedTargetRegressor( + func=self.identity, + inverse_func=self.square, + check_inverse=False, + ) + with warnings.catch_warnings(): + warnings.simplefilter("error") # make sure no warning is raised + areg._fit_transformer(self.Y_2D) + + def test_fit_transformer_transformer(self): + """Test ``_fit_transformer`` with transformer.""" + areg = AdvancedTransformedTargetRegressor( + transformer=StandardScaler(), + ) + areg._fit_transformer(self.Y_2D) + assert isinstance(areg.transformer_, StandardScaler) + np.testing.assert_allclose(areg.transformer_.scale_, [8.16496581]) + np.testing.assert_allclose(areg.transformer_.mean_, [20.0]) + + def test_fit_transformer_func(self): + """Test ``_fit_transformer`` with func.""" + areg = AdvancedTransformedTargetRegressor( + func=self.identity, + inverse_func=self.square, + check_inverse=False, + ) + areg._fit_transformer(self.Y_2D) + assert isinstance(areg.transformer_, FunctionTransformer) + np.testing.assert_allclose( + areg.transformer_.transform([[42.0]]), [[42.0]]) + np.testing.assert_allclose( + areg.transformer_.inverse_transform([[42.0]]), [[1764.0]]) + + VAR_AREG = AdvancedTransformedTargetRegressor( + transformer=NonStandardScaler(), + regressor=VarLinearRegression(), + ) + STD_AREG = AdvancedTransformedTargetRegressor( + transformer=NonStandardScaler(), + regressor=StdLinearRegression(), + ) + REGS = [ + AREG, + VAR_AREG, + STD_AREG, + AREG, + VAR_AREG, + STD_AREG, + AREG, + VAR_AREG, + STD_AREG, + AREG, + VAR_AREG, + STD_AREG, + AREG, + VAR_AREG, + STD_AREG, + AREG, + VAR_AREG, + STD_AREG, + AREG, + VAR_AREG, + STD_AREG, + AREG, + VAR_AREG, + STD_AREG, + ] + PREDICT_KWARGS = [ + {}, + {}, + {}, + {'wrong_kwarg': 1}, + {'wrong_kwarg': 1}, + {'wrong_kwarg': 1}, + {'always_return_1d': False}, + {'always_return_1d': False}, + {'always_return_1d': False}, + {'always_return_1d': False, 'return_std': True}, + {'always_return_1d': False, 'return_std': True}, + {'always_return_1d': False, 'return_std': True}, + {'always_return_1d': False, 'return_var': True}, + {'always_return_1d': False, 'return_var': True}, + {'always_return_1d': False, 'return_var': True}, + {'always_return_1d': False, 'return_var': True, 'err_2d': True}, + {'always_return_1d': False, 'return_var': True, 'err_2d': True}, + {'always_return_1d': False, 'return_var': True, 'err_2d': True}, + {'always_return_1d': False, 'return_cov': True}, + {'always_return_1d': False, 'return_cov': True}, + {'always_return_1d': False, 'return_cov': True}, + {'return_var': True, 'err_2d': True}, + {'return_var': True, 'err_2d': True}, + {'return_var': True, 'err_2d': True}, + {'return_var': True, 'return_cov': True}, + {'return_var': True, 'return_cov': True}, + {'return_var': True, 'return_cov': True}, + ] + PREDS_1D = [ + np.array([10.5405405405, 19.0540540541, 30.4054054054]), + np.array([12.5, 20.0, 30.0]), + ] + ERR = np.full(3, 200.0 / 3.0) + COV = np.full((3, 3), 200.0 / 3.0) + PRED_OUTPUT_1D = [ + (PREDS_1D, None), + (PREDS_1D, None), + (PREDS_1D, None), + TypeError, + TypeError, + TypeError, + (PREDS_1D, None), + (PREDS_1D, None), + (PREDS_1D, None), + TypeError, + TypeError, + NotImplementedError, + TypeError, + (PREDS_1D, ERR), + TypeError, + TypeError, + (PREDS_1D, ERR), + TypeError, + TypeError, + (PREDS_1D, COV), + TypeError, + TypeError, + (PREDS_1D, ERR), + TypeError, + RuntimeError, + RuntimeError, + RuntimeError, + ] + + TEST_PREDICT_1D = zip(REGS, PREDICT_KWARGS, PRED_OUTPUT_1D) + + @pytest.mark.parametrize('reg,kwargs,output', TEST_PREDICT_1D) + def test_predict_1d(self, reg, kwargs, output): + """Test prediction.""" + for (idx, fit_kwargs) in enumerate( + ({}, {'regressor__sample_weight': [0.0, 1.0, 1.0]})): + new_reg = clone(reg) + with pytest.raises(NotFittedError): + new_reg.predict(X_TRAIN) + new_reg.fit(X_TRAIN, Y_TRAIN, **fit_kwargs) + if isinstance(output, type): + with pytest.raises(output): + new_reg.predict(X_TRAIN, **kwargs) + return + y_pred = new_reg.predict(X_TRAIN, **kwargs) + if output[1] is None: + assert y_pred.shape == output[0][idx].shape + np.testing.assert_allclose(y_pred, output[0][idx]) + else: + assert y_pred[0].shape == output[0][idx].shape + assert y_pred[1].shape == output[1].shape + np.testing.assert_allclose(y_pred[0], output[0][idx]) + np.testing.assert_allclose(y_pred[1], output[1]) + + VAR_AREG_1 = AdvancedTransformedTargetRegressor( + transformer=NonStandardScaler(with_std=False), + regressor=VarLinearRegression(), + ) + PCA_AREG = AdvancedTransformedTargetRegressor( + transformer=PCA(), + regressor=VarLinearRegression(), + ) + REGS = [ + AREG, + VAR_AREG_1, + PCA_AREG, + AREG, + VAR_AREG_1, + PCA_AREG, + AREG, + VAR_AREG_1, + PCA_AREG, + AREG, + VAR_AREG_1, + PCA_AREG, + AREG, + VAR_AREG_1, + PCA_AREG, + AREG, + VAR_AREG_1, + PCA_AREG, + AREG, + VAR_AREG_1, + PCA_AREG, + AREG, + VAR_AREG_1, + PCA_AREG, + ] + PREDS_2D = [ + np.array([[10.5405405405], [19.0540540541], [30.4054054054]]), + np.array([[12.5], [20.0], [30.0]]), + ] + ERR_1D = np.ones(3) + ERR_2D = np.ones((3, 1)) + COV_1 = np.ones((3, 3)) + PRED_OUTPUT_2D = [ + (PREDS_1D, None), + (PREDS_1D, None), + (PREDS_1D, None), + TypeError, + TypeError, + TypeError, + (PREDS_2D, None), + (PREDS_2D, None), + (PREDS_2D, None), + TypeError, + TypeError, + TypeError, + TypeError, + (PREDS_2D, ERR_1D), + NotImplementedError, + TypeError, + (PREDS_2D, ERR_2D), + NotImplementedError, + TypeError, + (PREDS_2D, COV_1), + NotImplementedError, + TypeError, + (PREDS_1D, ERR_1D), + NotImplementedError, + RuntimeError, + RuntimeError, + RuntimeError, + ] + + TEST_PREDICT_2D = zip(REGS, PREDICT_KWARGS, PRED_OUTPUT_2D) + + @pytest.mark.parametrize('reg,kwargs,output', TEST_PREDICT_2D) + def test_predict_2d(self, reg, kwargs, output): + """Test prediction.""" + y_train = Y_TRAIN.reshape(-1, 1) + for (idx, fit_kwargs) in enumerate( + ({}, {'regressor__sample_weight': [0.0, 1.0, 1.0]})): + new_reg = clone(reg) + with pytest.raises(NotFittedError): + new_reg.predict(X_TRAIN) + new_reg.fit(X_TRAIN, y_train, **fit_kwargs) + if isinstance(output, type): + with pytest.raises(output): + new_reg.predict(X_TRAIN, **kwargs) + return + y_pred = new_reg.predict(X_TRAIN, **kwargs) + if output[1] is None: + assert y_pred.shape == output[0][idx].shape + np.testing.assert_allclose(y_pred, output[0][idx]) + else: + assert y_pred[0].shape == output[0][idx].shape + assert y_pred[1].shape == output[1].shape + np.testing.assert_allclose(y_pred[0], output[0][idx]) + np.testing.assert_allclose(y_pred[1], output[1]) + + class Reg2DPrediction(BaseEstimator): + """Estimator with 2D prediction output.""" + + def fit(self, *_): + """Fit method.""" + return self + + def predict(self, *_): + """Predict method that returns 2D array.""" + return np.array([[42.0]]) + + def test_predict_output_2d(self): + """Test prediction.""" + areg = AdvancedTransformedTargetRegressor( + transformer=StandardScaler(), + regressor=self.Reg2DPrediction(), + ) + areg.fit(np.arange(3).reshape(3, 1), np.arange(3)) + pred = areg.predict([[1]]) + np.testing.assert_allclose(areg.transformer_.scale_, [0.8164965809]) + np.testing.assert_allclose(areg.transformer_.mean_, [1.0]) + np.testing.assert_allclose(pred, [42.0 * 0.8164965809 + 1.0]) + + TEST_GET_FIT_PARAMS = zip( + FIT_KWARGS[:-1] + [{'regressor__a': 1, 'regressor__b': 2}], + [ValueError, + ValueError, + ({}, {'wrongparam': 1}), + NotImplementedError, + ({}, {}), + ({}, {'a': 1, 'b': 2})], + ) + + @pytest.mark.parametrize('kwargs,output', TEST_GET_FIT_PARAMS) + def test_get_fit_params(self, kwargs, output): + """Test retrieving of fit kwargs.""" + if isinstance(output, type): + with pytest.raises(output): + self.AREG._get_fit_params(kwargs) + return + fit_params = self.AREG._get_fit_params(kwargs) + assert fit_params == output + + TEST_TO_BE_SQUEEZED = [ + (np.array([0]), True, 1, False), + (np.array([0]), True, 2, False), + (np.array([0]), False, 1, False), + (np.array([0]), False, 2, False), + (np.array([[0]]), True, 1, True), + (np.array([[0]]), True, 2, True), + (np.array([[0]]), False, 1, True), + (np.array([[0]]), False, 2, False), + (np.array([[0, 0], [0, 0]]), True, 1, False), + (np.array([[0, 0], [0, 0]]), True, 2, False), + (np.array([[0, 0], [0, 0]]), False, 1, False), + (np.array([[0, 0], [0, 0]]), False, 2, False), + ] + + @pytest.mark.parametrize('array,always_1d,training_dim,output', + TEST_TO_BE_SQUEEZED) + def test_to_be_squeezed(self, array, always_1d, training_dim, output): + """Test check if array should be squeezed.""" + reg = clone(self.AREG) + reg._training_dim = training_dim + squeezed = reg._to_be_squeezed(array, always_return_1d=always_1d) + assert squeezed == output + + +# FeatureSelectionTransformer + + +class TestFeatureSelectionTransformer(): + """Tests for ``FeatureSelectionTransformer``.""" + + @pytest.fixture + def fst(self): + """Return ``FeatureSelectionTransformer`` instance.""" + return FeatureSelectionTransformer(grid_scores=1, n_features=2, + ranking=3, support=4) + + def test_init(self, fst): + """Test ``__init__``.""" + assert fst.grid_scores == 1 + assert fst.n_features == 2 + assert fst.ranking == 3 + assert fst.support == 4 + + def test_fit(self, fst): + """Test ``fit``.""" + output = fst.fit() + assert output is fst + output = fst.fit(1, 'a', valid_kwarg=2) + assert output is fst + + def test_get_support_mask(self, fst): + """Test ``_get_support_mask``.""" + mask = fst._get_support_mask() + assert mask == 4 + + def test_more_tags(self, fst): + """Test ``_more_tags``.""" + tags = fst._more_tags() + assert tags['allow_nan'] is True + assert tags is not _DEFAULT_TAGS + new_tags = deepcopy(_DEFAULT_TAGS) + new_tags['allow_nan'] = True + assert tags == new_tags diff --git a/tests/integration/diag_scripts/mlr/test_custom_sklearn_functions.py b/tests/integration/diag_scripts/mlr/test_custom_sklearn_functions.py new file mode 100644 index 0000000000..c37069f9ec --- /dev/null +++ b/tests/integration/diag_scripts/mlr/test_custom_sklearn_functions.py @@ -0,0 +1,1043 @@ +"""Integration tests for functions of custom :mod:`sklearn` functionalities. + +Parts of this code have been copied from :mod:`sklearn`. + +License: BSD 3-Clause License + +Copyright (c) 2007-2020 The scikit-learn developers. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +""" + +# pylint: disable=arguments-differ +# pylint: disable=invalid-name +# pylint: disable=no-self-use +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-few-public-methods +# pylint: disable=too-many-arguments + +import warnings +from copy import copy, deepcopy + +import numpy as np +import pytest +import scipy.sparse as sp +from sklearn import datasets +from sklearn.base import BaseEstimator +from sklearn.compose import ColumnTransformer +from sklearn.decomposition import KernelPCA +from sklearn.exceptions import FitFailedWarning, NotFittedError +from sklearn.linear_model import LinearRegression +from sklearn.metrics import ( + explained_variance_score, + make_scorer, + mean_absolute_error, + mean_squared_error, +) +from sklearn.model_selection import LeaveOneGroupOut, ShuffleSplit +from sklearn.svm import SVC + +from esmvaltool.diag_scripts.mlr.custom_sklearn import ( + _DEFAULT_TAGS, + AdvancedPipeline, + AdvancedRFE, + AdvancedRFECV, + FeatureSelectionTransformer, + _check_fit_params, + _determine_key_type, + _fit_and_score_weighted, + _get_fit_parameters, + _is_pairwise, + _map_features, + _num_samples, + _rfe_single_fit, + _safe_indexing, + _safe_split, + _safe_tags, + _score_weighted, + _split_fit_kwargs, + _update_transformers_param, + cross_val_score_weighted, + get_rfecv_transformer, + perform_efecv, +) + +from ._sklearn_utils import ( + FailingClassifier, + _convert_container, + assert_allclose_dense_sparse, + assert_raise_message, + assert_warns, + assert_warns_message, +) + +# _determine_key_type + + +TEST_DETERMINE_KEY_TYPE = [ + (0, 'int'), + ('0', 'str'), + (True, 'bool'), + (np.bool_(True), 'bool'), + ([0, 1, 2], 'int'), + (['0', '1', '2'], 'str'), + ((0, 1, 2), 'int'), + (('0', '1', '2'), 'str'), + (slice(None, None), None), + (slice(0, 2), 'int'), + (np.array([0, 1, 2], dtype=np.int32), 'int'), + (np.array([0, 1, 2], dtype=np.int64), 'int'), + (np.array([0, 1, 2], dtype=np.uint8), 'int'), + ([True, False], 'bool'), + ((True, False), 'bool'), + (np.array([True, False]), 'bool'), + ('col_0', 'str'), + (['col_0', 'col_1', 'col_2'], 'str'), + (('col_0', 'col_1', 'col_2'), 'str'), + (slice('begin', 'end'), 'str'), + (np.array(['col_0', 'col_1', 'col_2']), 'str'), + (np.array(['col_0', 'col_1', 'col_2'], dtype=object), 'str'), +] + + +@pytest.mark.parametrize('key,dtype', TEST_DETERMINE_KEY_TYPE) +def test_determine_key_type(key, dtype): + """Test working ``_determine_key_type``.""" + assert _determine_key_type(key) == dtype + + +def test_determine_key_type_error(): + """Test failing ``_determine_key_type``.""" + with pytest.raises(ValueError, match="No valid specification of the"): + _determine_key_type(1.0) + + +def test_determine_key_type_slice_error(): + """Test failing ``_determine_key_type``.""" + with pytest.raises(TypeError, match="Only array-like or scalar are"): + _determine_key_type(slice(0, 2, 1), accept_slice=False) + + +# _safe_indexing + + +@pytest.mark.parametrize( + 'array_type', ['list', 'array', 'sparse', 'dataframe'], +) +@pytest.mark.parametrize( + 'indices_type', ['list', 'tuple', 'array', 'series', 'slice'], +) +def test_safe_indexing_2d_container_axis_0(array_type, indices_type): + """Test ``_safe_indexing`` with 2D container.""" + indices = [1, 2] + if indices_type == 'slice' and isinstance(indices[1], int): + indices[1] += 1 + array = _convert_container([[1, 2, 3], [4, 5, 6], [7, 8, 9]], array_type) + indices = _convert_container(indices, indices_type) + subset = _safe_indexing(array, indices, axis=0) + assert_allclose_dense_sparse( + subset, _convert_container([[4, 5, 6], [7, 8, 9]], array_type) + ) + + +X_DATA_TOY = np.arange(9).reshape((3, 3)) + + +@pytest.mark.parametrize('array_type', ['list', 'array', 'series']) +@pytest.mark.parametrize( + 'indices_type', ['list', 'tuple', 'array', 'series', 'slice'], +) +def test_safe_indexing_1d_container(array_type, indices_type): + """Test ``_safe_indexing`` with 1D container.""" + indices = [1, 2] + if indices_type == 'slice' and isinstance(indices[1], int): + indices[1] += 1 + array = _convert_container([1, 2, 3, 4, 5, 6, 7, 8, 9], array_type) + indices = _convert_container(indices, indices_type) + subset = _safe_indexing(array, indices, axis=0) + assert_allclose_dense_sparse( + subset, _convert_container([2, 3], array_type) + ) + + +@pytest.mark.parametrize('array_type', ['array', 'sparse', 'dataframe']) +@pytest.mark.parametrize( + 'indices_type', ['list', 'tuple', 'array', 'series', 'slice'], +) +@pytest.mark.parametrize('indices', [[1, 2], ['col_1', 'col_2']]) +def test_safe_indexing_2d_container_axis_1(array_type, indices_type, indices): + """Test ``_safe_indexing`` with 2D container.""" + # validation of the indices + # we make a copy because indices is mutable and shared between tests + indices_converted = copy(indices) + if indices_type == 'slice' and isinstance(indices[1], int): + indices_converted[1] += 1 + + columns_name = ['col_0', 'col_1', 'col_2'] + array = _convert_container( + [[1, 2, 3], [4, 5, 6], [7, 8, 9]], array_type, columns_name + ) + indices_converted = _convert_container(indices_converted, indices_type) + + if isinstance(indices[0], str) and array_type != 'dataframe': + err_msg = ("Specifying the columns using strings is only supported " + "for pandas DataFrames") + with pytest.raises(ValueError, match=err_msg): + _safe_indexing(array, indices_converted, axis=1) + else: + subset = _safe_indexing(array, indices_converted, axis=1) + assert_allclose_dense_sparse( + subset, _convert_container([[2, 3], [5, 6], [8, 9]], array_type) + ) + + +@pytest.mark.parametrize('array_read_only', [True, False]) +@pytest.mark.parametrize('indices_read_only', [True, False]) +@pytest.mark.parametrize('array_type', ['array', 'sparse', 'dataframe']) +@pytest.mark.parametrize('indices_type', ['array', 'series']) +@pytest.mark.parametrize( + 'axis, expected_array', + [(0, [[4, 5, 6], [7, 8, 9]]), (1, [[2, 3], [5, 6], [8, 9]])], +) +def test_safe_indexing_2d_read_only_axis_1(array_read_only, indices_read_only, + array_type, indices_type, axis, + expected_array): + """Test ``_safe_indexing`` with 2D container.""" + array = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]) + if array_read_only: + array.setflags(write=False) + array = _convert_container(array, array_type) + indices = np.array([1, 2]) + if indices_read_only: + indices.setflags(write=False) + indices = _convert_container(indices, indices_type) + subset = _safe_indexing(array, indices, axis=axis) + assert_allclose_dense_sparse( + subset, _convert_container(expected_array, array_type) + ) + + +@pytest.mark.parametrize('array_type', ['list', 'array', 'series']) +@pytest.mark.parametrize('indices_type', ['list', 'tuple', 'array', 'series']) +def test_safe_indexing_1d_container_mask(array_type, indices_type): + """Test ``_safe_indexing`` with 1D container.""" + indices = [False] + [True] * 2 + [False] * 6 + array = _convert_container([1, 2, 3, 4, 5, 6, 7, 8, 9], array_type) + indices = _convert_container(indices, indices_type) + subset = _safe_indexing(array, indices, axis=0) + assert_allclose_dense_sparse( + subset, _convert_container([2, 3], array_type) + ) + + +@pytest.mark.parametrize('array_type', ['array', 'sparse', 'dataframe']) +@pytest.mark.parametrize('indices_type', ['list', 'tuple', 'array', 'series']) +@pytest.mark.parametrize( + 'axis, expected_subset', + [(0, [[4, 5, 6], [7, 8, 9]]), + (1, [[2, 3], [5, 6], [8, 9]])], +) +def test_safe_indexing_2d_mask(array_type, indices_type, axis, + expected_subset): + """Test ``_safe_indexing`` with 2D container.""" + columns_name = ['col_0', 'col_1', 'col_2'] + array = _convert_container( + [[1, 2, 3], [4, 5, 6], [7, 8, 9]], array_type, columns_name + ) + indices = [False, True, True] + indices = _convert_container(indices, indices_type) + + subset = _safe_indexing(array, indices, axis=axis) + assert_allclose_dense_sparse( + subset, _convert_container(expected_subset, array_type) + ) + + +@pytest.mark.parametrize( + 'array_type, expected_output_type', + [('list', 'list'), ('array', 'array'), + ('sparse', 'sparse'), ('dataframe', 'series')], +) +def test_safe_indexing_2d_scalar_axis_0(array_type, expected_output_type): + """Test ``_safe_indexing`` with 2D container.""" + array = _convert_container([[1, 2, 3], [4, 5, 6], [7, 8, 9]], array_type) + indices = 2 + subset = _safe_indexing(array, indices, axis=0) + expected_array = _convert_container([7, 8, 9], expected_output_type) + assert_allclose_dense_sparse(subset, expected_array) + + +@pytest.mark.parametrize('array_type', ['list', 'array', 'series']) +def test_safe_indexing_1d_scalar(array_type): + """Test ``_safe_indexing`` with 1D container.""" + array = _convert_container([1, 2, 3, 4, 5, 6, 7, 8, 9], array_type) + indices = 2 + subset = _safe_indexing(array, indices, axis=0) + assert subset == 3 + + +@pytest.mark.parametrize( + 'array_type, expected_output_type', + [('array', 'array'), ('sparse', 'sparse'), ('dataframe', 'series')], +) +@pytest.mark.parametrize('indices', [2, 'col_2']) +def test_safe_indexing_2d_scalar_axis_1(array_type, expected_output_type, + indices): + """Test ``_safe_indexing`` with 1D container.""" + columns_name = ['col_0', 'col_1', 'col_2'] + array = _convert_container( + [[1, 2, 3], [4, 5, 6], [7, 8, 9]], array_type, columns_name + ) + + if isinstance(indices, str) and array_type != 'dataframe': + err_msg = ("Specifying the columns using strings is only supported " + "for pandas DataFrames") + with pytest.raises(ValueError, match=err_msg): + _safe_indexing(array, indices, axis=1) + else: + subset = _safe_indexing(array, indices, axis=1) + expected_output = [3, 6, 9] + if expected_output_type == 'sparse': + # sparse matrix are keeping the 2D shape + expected_output = [[3], [6], [9]] + expected_array = _convert_container( + expected_output, expected_output_type + ) + assert_allclose_dense_sparse(subset, expected_array) + + +@pytest.mark.parametrize('array_type', ['list', 'array', 'sparse']) +def test_safe_indexing_none_axis_0(array_type): + """Test ``_safe_indexing`` with None.""" + x_data = _convert_container([[1, 2, 3], [4, 5, 6], [7, 8, 9]], array_type) + x_data_subset = _safe_indexing(x_data, None, axis=0) + assert_allclose_dense_sparse(x_data_subset, x_data) + + +def test_safe_indexing_pandas_no_matching_cols_error(): + """Test ``_safe_indexing`` with pandas.""" + pd = pytest.importorskip('pandas') + err_msg = "No valid specification of the columns." + x_data = pd.DataFrame(X_DATA_TOY) + with pytest.raises(ValueError, match=err_msg): + _safe_indexing(x_data, [1.0], axis=1) + + +@pytest.mark.parametrize('axis', [None, 3]) +def test_safe_indexing_error_axis(axis): + """Test ``_safe_indexing`` error.""" + with pytest.raises(ValueError, match="'axis' should be either 0"): + _safe_indexing(X_DATA_TOY, [0, 1], axis=axis) + + +@pytest.mark.parametrize('x_constructor', ['array', 'series']) +def test_safe_indexing_1d_array_error(x_constructor): + """Test ``_safe_indexing`` error.""" + # check that we are raising an error if the array-like passed is 1D and + # we try to index on the 2nd dimension + x_data = list(range(5)) + if x_constructor == 'array': + x_constructor = np.asarray(x_data) + elif x_constructor == 'series': + pd = pytest.importorskip("pandas") + x_constructor = pd.Series(x_data) + + err_msg = "'x_data' should be a 2D NumPy array, 2D sparse matrix or pandas" + with pytest.raises(ValueError, match=err_msg): + _safe_indexing(x_constructor, [0, 1], axis=1) + + +def test_safe_indexing_container_axis_0_unsupported_type(): + """Test ``_safe_indexing`` error.""" + indices = ["col_1", "col_2"] + array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]] + err_msg = "String indexing is not supported with 'axis=0'" + with pytest.raises(ValueError, match=err_msg): + _safe_indexing(array, indices, axis=0) + + +# _num_samples + + +def test_retrieve_samples_from_non_standard_shape(): + """Test ``_num_samples``.""" + class TestNonNumericShape: + """Non-numeric shape.""" + + def __init__(self): + """Init.""" + self.shape = ("not numeric",) + + def __len__(self): + """Length.""" + return len([1, 2, 3]) + + x_data = TestNonNumericShape() + assert _num_samples(x_data) == len(x_data) + + # Check that it gives a good error if there's no __len__ + class TestNoLenWeirdShape: + """Weird shape with no length.""" + + def __init__(self): + """Init.""" + self.shape = ("not numeric",) + + with pytest.raises(TypeError, match="Expected sequence or array-like"): + _num_samples(TestNoLenWeirdShape()) + + +# _check_fit_params + + +@pytest.mark.parametrize('indices', [None, [1, 3]]) +def test_check_fit_params(indices): + """Test ``_check_fit_params``.""" + x_data = np.random.randn(4, 2) + fit_params = { + 'list': [1, 2, 3, 4], + 'array': np.array([1, 2, 3, 4]), + 'sparse-col': sp.csc_matrix([1, 2, 3, 4]).T, + 'sparse-row': sp.csc_matrix([1, 2, 3, 4]), + 'scalar-int': 1, + 'scalar-str': 'xxx', + 'None': None, + } + result = _check_fit_params(x_data, fit_params, indices) + indices_ = indices if indices is not None else list(range(x_data.shape[0])) + + for key in ['sparse-row', 'scalar-int', 'scalar-str', 'None']: + assert result[key] is fit_params[key] + + assert result['list'] == _safe_indexing(fit_params['list'], indices_) + np.testing.assert_array_equal( + result['array'], _safe_indexing(fit_params['array'], indices_) + ) + assert_allclose_dense_sparse( + result['sparse-col'], + _safe_indexing(fit_params['sparse-col'], indices_) + ) + + +# _safe_tags + + +class NoTagsEstimator: + """Estimators with no tags.""" + + +class MoreTagsEstimator: + """Estimator with ``_more_tags``.""" + + def _more_tags(self): + """Return more tags.""" + return {"allow_nan": True} + + +class MockBaseEstimator: + """Estimator with ``_get_tags``.""" + + def _get_tags(self): + """Return tags.""" + return _DEFAULT_TAGS + + +@pytest.mark.parametrize( + 'estimator,err_msg', + [ + (MockBaseEstimator(), 'The key xxx is not defined in _get_tags'), + (NoTagsEstimator(), 'The key xxx is not defined in _DEFAULT_TAGS'), + ], +) +def test_safe_tags_error(estimator, err_msg): + """Test ``_safe_tags`` with error.""" + # Check that safe_tags raises error in ambiguous case. + with pytest.raises(ValueError, match=err_msg): + _safe_tags(estimator, key="xxx") + + +@pytest.mark.parametrize( + 'estimator,key,expected_results', + [ + (NoTagsEstimator(), None, _DEFAULT_TAGS), + (NoTagsEstimator(), 'allow_nan', _DEFAULT_TAGS['allow_nan']), + (MoreTagsEstimator(), None, {**_DEFAULT_TAGS, **{'allow_nan': True}}), + (MoreTagsEstimator(), 'allow_nan', True), + (MockBaseEstimator(), None, _DEFAULT_TAGS), + (MockBaseEstimator(), 'allow_nan', _DEFAULT_TAGS['allow_nan']), + ], +) +def test_safe_tags_no_get_tags(estimator, key, expected_results): + """Test ``_safe_tags`` without ``_get_tags``.""" + assert _safe_tags(estimator, key=key) == expected_results + + +# _is_pairwise + + +def test_is_pairwise(): + """Test ``_is_pairwise``.""" + # Simple checks for _is_pairwise + pca = KernelPCA(kernel='precomputed') + with warnings.catch_warnings(): + warnings.simplefilter("error") # make sure that no warning is raised + assert _is_pairwise(pca) + + # Pairwise attribute that is not consistent with the pairwise tag + class IncorrectTagPCA(KernelPCA): + """Class with incorrect _pairwise attribute.""" + + _pairwise = False + + pca = IncorrectTagPCA(kernel='precomputed') + msg = "_pairwise attribute is inconsistent with tags." + with pytest.warns(FutureWarning, match=msg): + assert not _is_pairwise(pca) + + # The _pairwise attribute is present and set to True while pairwise tag is + # not present + class TruePairwise(BaseEstimator): + """Class without pairwise tag.""" + + _pairwise = True + + true_pairwise = TruePairwise() + with pytest.warns(FutureWarning, match=msg): + assert _is_pairwise(true_pairwise) + + # Pairwise attribute is not defined thus tag is used + est = BaseEstimator() + with warnings.catch_warnings(): + warnings.simplefilter("error") # make sure that no warning is raised + assert not _is_pairwise(est) + + +# _safe_split + + +def test_safe_split(): + """Test ``_safe_split``.""" + clf = SVC() + clfp = SVC(kernel="precomputed") + + iris = datasets.load_iris() + (x_data, y_data) = (iris.data, iris.target) + kernel = np.dot(x_data, x_data.T) + + cv = ShuffleSplit(test_size=0.25, random_state=0) + train, test = list(cv.split(x_data))[0] + + x_train, y_train = _safe_split(clf, x_data, y_data, train) + kernel_train, y_train2 = _safe_split(clfp, kernel, y_data, train) + np.testing.assert_array_almost_equal(kernel_train, np.dot(x_train, + x_train.T)) + np.testing.assert_array_almost_equal(y_train, y_train2) + + x_test, y_test = _safe_split(clf, x_data, y_data, test, train) + kernel_test, y_test2 = _safe_split(clfp, kernel, y_data, test, train) + np.testing.assert_array_almost_equal(kernel_test, np.dot(x_test, + x_train.T)) + np.testing.assert_array_almost_equal(y_test, y_test2) + + +# _fit_and_score_weighted + + +def test_fit_and_score_weighted_failing(): + """Test if ``_fit_and_score_weighted`` fails as expected.""" + # Create a failing classifier to deliberately fail + failing_clf = FailingClassifier(FailingClassifier.FAILING_PARAMETER) + + # Dummy X data + x_data = np.arange(1, 10) + scorer = make_scorer(mean_squared_error) + fit_and_score_args = [failing_clf, x_data, None, scorer, None, None, None, + None] + + # Passing error score to trigger the warning message + fit_and_score_kwargs = {'error_score': 42} + + # Check if the warning message type is as expected + assert_warns(FitFailedWarning, _fit_and_score_weighted, + *fit_and_score_args, **fit_and_score_kwargs) + + # Since we're using FailingClassifier, our error will be the following + error_message = "ValueError: Failing classifier failed as required" + + # The warning message we're expecting to see + warning_message = ("Estimator fit failed. The score on this train-test " + "partition for these parameters will be set to %f. " + "Details: \n%s" % (fit_and_score_kwargs['error_score'], + error_message)) + + def test_warn_trace(msg): + """Traceback in warning message.""" + assert 'Traceback (most recent call last):\n' in msg + split = msg.splitlines() # note: handles more than '\n' + mtb = split[0] + '\n' + split[-1] + return warning_message in mtb + + # Check traceback is included + assert_warns_message(FitFailedWarning, test_warn_trace, + _fit_and_score_weighted, *fit_and_score_args, + **fit_and_score_kwargs) + + # Check return of error_score in case of failed fit + result = _fit_and_score_weighted(*fit_and_score_args, + **fit_and_score_kwargs) + assert isinstance(result, int) + assert result == fit_and_score_kwargs['error_score'] + + # Check if exception was raised, with default error_score='raise' + fit_and_score_kwargs = {'error_score': 'raise'} + assert_raise_message(ValueError, "Failing classifier failed as required", + _fit_and_score_weighted, *fit_and_score_args, + **fit_and_score_kwargs) + + # Wrong parameter type for error_score + fit_and_score_kwargs = {'error_score': 'wrong_type'} + with pytest.raises(ValueError): + _fit_and_score_weighted(*fit_and_score_args, **fit_and_score_kwargs) + + assert failing_clf.score() == 0.0 + + +X_DATA = np.arange(5).reshape(5, 1) +Y_DATA = np.array([0, 1, 2, 3, -1]) +TRAIN = np.array([1, 2, 3, 4]) +TEST = np.array([0]) + + +TEST_FIT_AND_SCORE_WEIGHTED_NO_WEIGHTS = [ + (make_scorer(mean_absolute_error), 2.5), + (make_scorer(mean_squared_error), 6.25), + (make_scorer(explained_variance_score), 1.0), +] + + +@pytest.mark.parametrize('scorer,output', + TEST_FIT_AND_SCORE_WEIGHTED_NO_WEIGHTS) +def test_fit_and_score_weighted_no_weights(scorer, output): + """Test ``_fit_and_score_weighted`` without weights.""" + clf = LinearRegression() + fit_and_score_weighted_args = [clf, X_DATA, Y_DATA, scorer, TRAIN, TEST] + fit_and_score_weighted_kwargs = { + 'parameters': {'copy_X': True}, + 'fit_params': None, + } + + result = _fit_and_score_weighted(*fit_and_score_weighted_args, + **fit_and_score_weighted_kwargs) + np.testing.assert_allclose(clf.coef_, [-0.5]) + np.testing.assert_allclose(clf.intercept_, 2.5) + assert isinstance(result, float) + np.testing.assert_allclose(result, output) + + +SAMPLE_WEIGHTS = np.array([1.0, 1.0, 1.0, 1.0, 0.0]) +TEST_FIT_AND_SCORE_WEIGHTED_WEIGHTS = [ + (make_scorer(mean_absolute_error), 0.0), + (make_scorer(mean_squared_error), 0.0), + (make_scorer(explained_variance_score), 1.0), +] + + +@pytest.mark.parametrize('scorer,output', TEST_FIT_AND_SCORE_WEIGHTED_WEIGHTS) +def test_fit_and_score_weighted_weights(scorer, output): + """Test ``_fit_and_score_weighted`` with weights.""" + clf = LinearRegression() + fit_and_score_weighted_args = [clf, X_DATA, Y_DATA, scorer, TRAIN, TEST] + fit_and_score_weighted_kwargs = { + 'parameters': {'copy_X': True}, + 'fit_params': {'sample_weight': SAMPLE_WEIGHTS}, + 'sample_weights': SAMPLE_WEIGHTS, + } + + result = _fit_and_score_weighted(*fit_and_score_weighted_args, + **fit_and_score_weighted_kwargs) + np.testing.assert_allclose(clf.coef_, [1.0]) + np.testing.assert_allclose(clf.intercept_, 0.0, atol=1e-10) + assert isinstance(result, float) + np.testing.assert_allclose(result, output, atol=1e-10) + + +# _get_fit_parameters + + +STEPS_1 = [('a', 1)] +STEPS_2 = [('a', 1), ('b', 0)] +TEST_GET_FIT_PARAMETERS = [ + ({'a': 1}, STEPS_1, ValueError), + ({'a': 1, 'a__b': 1}, STEPS_1, ValueError), + ({'a__x': 1}, [], ValueError), + ({'a__x': 1}, STEPS_1, {'a': {'x': 1}}), + ({'a__x': 1, 'a__y': 2}, STEPS_1, {'a': {'x': 1, 'y': 2}}), + ({'a__x': 1, 'a__y__z': 2}, STEPS_1, {'a': {'x': 1, 'y__z': 2}}), + ({'a__x': 1, 'b__y': 2}, STEPS_1, ValueError), + ({'a__x': 1, 'b__y': 2}, STEPS_2, {'a': {'x': 1}, 'b': {'y': 2}}), +] + + +@pytest.mark.parametrize('kwargs,steps,output', TEST_GET_FIT_PARAMETERS) +def test_get_fit_parameters(kwargs, steps, output): + """Test retrieving of fit parameters.""" + if isinstance(output, type): + with pytest.raises(output): + _get_fit_parameters(kwargs, steps, 'x') + return + params = _get_fit_parameters(kwargs, steps, 'x') + assert params == output + + +# _score_weighted + + +def test_score_weighted_failing(): + """Test if ``_score_weighted`` fails as expected.""" + error_message = "Scoring must return a number, got None" + + def two_params_scorer(*_, **__): + """Scorer function.""" + return None + + score_args = [None, None, None, two_params_scorer] + assert_raise_message(ValueError, error_message, _score_weighted, + *score_args) + + +TEST_SCORE_WEIGHTED_NO_WEIGHTS = [ + (make_scorer(mean_absolute_error), 1.2), + (make_scorer(mean_squared_error), 2.0), + (make_scorer(explained_variance_score), 0.0), +] + + +@pytest.mark.parametrize('scorer,output', TEST_SCORE_WEIGHTED_NO_WEIGHTS) +def test_score_weighted_no_weights(scorer, output): + """Test ``_score_weighted`` without weights.""" + clf = LinearRegression() + clf.fit(X_DATA, Y_DATA) + np.testing.assert_allclose(clf.coef_, [0.0], atol=1e-10) + np.testing.assert_allclose(clf.intercept_, 1.0) + result = _score_weighted(clf, X_DATA, Y_DATA, scorer) + assert isinstance(result, float) + np.testing.assert_allclose(result, output, atol=1e-10) + + +TEST_SCORE_WEIGHTED_WEIGHTS = [ + (make_scorer(mean_absolute_error), 0.0), + (make_scorer(mean_squared_error), 0.0), + (make_scorer(explained_variance_score), 1.0), +] + + +@pytest.mark.parametrize('scorer,output', TEST_SCORE_WEIGHTED_WEIGHTS) +def test_score_weighted_weights(scorer, output): + """Test ``_score_weighted`` with weights.""" + clf = LinearRegression() + clf.fit(X_DATA, Y_DATA, sample_weight=SAMPLE_WEIGHTS) + np.testing.assert_allclose(clf.coef_, [1.0]) + np.testing.assert_allclose(clf.intercept_, 0.0, atol=1e-10) + result = _score_weighted(clf, X_DATA, Y_DATA, scorer, + sample_weights=SAMPLE_WEIGHTS) + assert isinstance(result, float) + np.testing.assert_allclose(result, output, atol=1e-10) + + +# _split_fit_kwargs + + +def test_split_fit_kwargs(): + """Test ``_split_fit_kwargs``.""" + fit_kwargs = { + 'a': [1, 2], + 'b': [1, 2], + 'sample_weight': [1, 2], + 'a__sample_weight__b': [1, 2], + 'sample_weight_eval_set': [1, 2], + 'a__sample_weight_eval_set__b': [1, 2], + } + train_idx = 0 + test_idx = 1 + + (train_kwargs, test_kwargs) = _split_fit_kwargs(fit_kwargs, train_idx, + test_idx) + + assert train_kwargs is not fit_kwargs + assert test_kwargs is not fit_kwargs + assert len(train_kwargs) == len(fit_kwargs) + assert len(test_kwargs) == len(fit_kwargs) + for key in train_kwargs: + assert train_kwargs[key] is not fit_kwargs[key] + for key in test_kwargs: + assert test_kwargs[key] is not fit_kwargs[key] + + expected_train_kwargs = { + 'a': [1, 2], + 'b': [1, 2], + 'sample_weight': 1, + 'a__sample_weight__b': 1, + 'sample_weight_eval_set': [1, 2], + 'a__sample_weight_eval_set__b': [1, 2], + } + assert train_kwargs == expected_train_kwargs + + expected_test_kwargs = { + 'a': [1, 2], + 'b': [1, 2], + 'sample_weight': 2, + 'a__sample_weight__b': 2, + 'sample_weight_eval_set': [1, 2], + 'a__sample_weight_eval_set__b': [1, 2], + } + assert test_kwargs == expected_test_kwargs + + +# _rfe_single_fit + + +@pytest.fixture +def advanced_rfe(): + """``AdvancedRFE`` object.""" + rfe_kwargs = { + 'estimator': LinearRegression(), + 'n_features_to_select': 1, + } + return AdvancedRFE(**rfe_kwargs) + + +def test_rfe_single_fit(advanced_rfe): + """Test ``_rfe_single_fit``.""" + x_data = np.array( + [[0.0, 0.0, 0.0], + [2.0, 0.0, 1.0], + [3.0, 0.0, -2.0], + [4.0, 0.0, -4.0]], + ) + y_data = np.array([0.0, 1.0, -2.0, -4.0]) + sample_weights = np.array([1.0, 1.0, 0.0, 1.0]) + train = np.array([0, 1, 2]) + test = np.array([3]) + scorer = make_scorer(mean_absolute_error) + + # No weights + scores = _rfe_single_fit(advanced_rfe, advanced_rfe.estimator, x_data, + y_data, train, test, scorer) + assert advanced_rfe.n_features_ == 1 + np.testing.assert_array_equal(advanced_rfe.ranking_, [2, 3, 1]) + np.testing.assert_array_equal(advanced_rfe.support_, + [False, False, True]) + est = advanced_rfe.estimator_ + assert isinstance(est, LinearRegression) + np.testing.assert_allclose(est.coef_, [1.0]) + np.testing.assert_allclose(est.intercept_, 0.0, atol=1e-10) + np.testing.assert_allclose(scores, [0.0, 0.0, 0.0], atol=1e-10) + + # With weights + scores = _rfe_single_fit(advanced_rfe, advanced_rfe.estimator, x_data, + y_data, train, test, scorer, + sample_weight=sample_weights) + assert advanced_rfe.n_features_ == 1 + np.testing.assert_array_equal(advanced_rfe.ranking_, [1, 3, 2]) + np.testing.assert_array_equal(advanced_rfe.support_, + [True, False, False]) + est = advanced_rfe.estimator_ + assert isinstance(est, LinearRegression) + np.testing.assert_allclose(est.coef_, [0.5]) + np.testing.assert_allclose(est.intercept_, 0.0, atol=1e-10) + np.testing.assert_allclose(scores, [4.8, 4.8, 6.0]) + + +# _map_features + + +TEST_MAP_FEATURES = [ + ([True, True, True], [0, 1, 2]), + ([1, 1, 1], [0, 1, 2]), + ([True, True, False], [0, 1]), + ([1, 1, 0], [0, 1]), + ([True, False, True], [0, 1]), + ([1, 0, 1], [0, 1]), + ([False, True, True], [0, 1]), + ([0, 1, 1], [0, 1]), + ([True, False, False], [0]), + ([1, 0, 0], [0]), + ([False, True, False], [0]), + ([0, 1, 0], [0]), + ([False, False, True], [0]), + ([0, 0, 1], [0]), + ([False, False, False], []), + ([0, 0, 0], []), +] + +FEATURES = [0, 1, 2] + + +@pytest.mark.parametrize('support,output', TEST_MAP_FEATURES) +def test_map_features(support, output): + """Test ``_map_features``.""" + new_features = _map_features(FEATURES, support) + np.testing.assert_array_equal(new_features, output) + + +# _update_transformers_param + + +class NoPipeline(BaseEstimator): + """No pipeline.""" + + def __init__(self, test_transformers=1): + """Initialize instance.""" + self.test_transformers = test_transformers + + def fit(self, *_): + """Fit method.""" + return self + + +def test_update_transformers_fail_no_pipeline(): + """Test ``_update_transformers_param`` expected fail.""" + msg = 'estimator is not a Pipeline or AdvancedPipeline' + est = NoPipeline() + with pytest.raises(TypeError, match=msg): + _update_transformers_param(est, []) + + +def test_update_transformers_fail_no_column_transformer(): + """Test ``_update_transformers_param`` expected fail.""" + msg = 'pipeline step is not a ColumnTransformer' + est = AdvancedPipeline([('no_pipeline', NoPipeline())]) + with pytest.raises(TypeError, match=msg): + _update_transformers_param(est, []) + + +def test_update_transformers_param_lin(): + """Test ``_update_transformers_param``.""" + est = LinearRegression() + params = deepcopy(est.get_params()) + _update_transformers_param(est, []) + assert est.get_params() == params + + +def test_update_transformers_column_transformer(): + """Test ``_update_transformers_param``.""" + trans1 = [('1', 'drop', [0, 1, 2])] + trans2 = [('2a', 'drop', [0]), ('2b', 'passthrough', [1, 2])] + est = AdvancedPipeline([ + ('trans1', ColumnTransformer(trans1)), + ('trans2', ColumnTransformer(trans2)), + ('passthrough', 'passthrough'), + ]) + params = deepcopy(est.get_params()) + _update_transformers_param(est, np.array([True, False, True])) + new_params = est.get_params() + assert new_params != params + assert new_params['trans1__transformers'] == [('1', 'drop', [0, 1])] + assert new_params['trans2__transformers'] == [ + ('2a', 'drop', [0]), + ('2b', 'passthrough', [1]), + ] + + +# cross_val_score_weighted + + +def test_cross_val_score_weighted(): + """Test ``cross_val_score_weighted``.""" + sample_weights = np.array([1.0, 1.0, 0.0, 1.0, 1.0, 0.0]) + cv_score_kwargs = { + 'estimator': LinearRegression(), + 'x_data': np.arange(6).reshape(6, 1), + 'y_data': np.array([0, 1, 1000, 0, -1, -1000]), + 'groups': ['A', 'A', 'A', 'B', 'B', 'B'], + 'scoring': 'neg_mean_absolute_error', + 'cv': LeaveOneGroupOut(), + 'fit_params': {'sample_weight': sample_weights}, + 'sample_weights': sample_weights, + } + scores = cross_val_score_weighted(**cv_score_kwargs) + np.testing.assert_allclose(scores, [-2.0, -4.0]) + + +# get_rfecv_transformer + + +def test_get_rfecv_transformer_not_fitted(): + """Test ``get_rfecv_transformer`` expected fail.""" + rfecv = AdvancedRFECV(LinearRegression()) + msg = ('RFECV instance used to initialize FeatureSelectionTransformer ' + 'must be fitted') + with pytest.raises(NotFittedError, match=msg): + get_rfecv_transformer(rfecv) + + +def test_get_rfecv_transformer(): + """Test ``get_rfecv_transformer``.""" + rfecv = AdvancedRFECV(LinearRegression(), cv=2) + x_data = np.arange(30).reshape(10, 3) + y_data = np.arange(10) + rfecv.fit(x_data, y_data) + transformer = get_rfecv_transformer(rfecv) + assert isinstance(transformer, FeatureSelectionTransformer) + assert rfecv.n_features_ == transformer.n_features + np.testing.assert_allclose(rfecv.grid_scores_, transformer.grid_scores) + np.testing.assert_allclose(rfecv.ranking_, transformer.ranking) + np.testing.assert_allclose(rfecv.support_, transformer.support) + assert len(transformer.grid_scores) == 3 + assert len(transformer.ranking) == 3 + assert len(transformer.support) == 3 + + +# perform_efecv + + +def test_perform_efecv(): + """Test ``perform_efecv``.""" + x_data = np.array([ + [0, 0, 0], + [1, 1, 0], + [2, 0, 2], + [0, 3, 3], + [4, 4, 4], + [4, 4, 0], + ]) + y_data = np.array([1, 0, 3, -5, -3, -3]) + + (best_est, transformer) = perform_efecv(LinearRegression(), x_data, y_data, + cv=2) + + assert isinstance(best_est, LinearRegression) + np.testing.assert_allclose(best_est.coef_, [1.0, -2.0]) + np.testing.assert_allclose(best_est.intercept_, 1.0) + + assert isinstance(transformer, FeatureSelectionTransformer) + assert transformer.n_features == 2 + assert len(transformer.grid_scores) == 7 + np.testing.assert_array_equal(transformer.ranking, [1, 1, 2]) + np.testing.assert_array_equal(transformer.support, [True, True, False]) diff --git a/tests/integration/diag_scripts/mlr/test_general.py b/tests/integration/diag_scripts/mlr/test_general.py new file mode 100644 index 0000000000..d3135006ed --- /dev/null +++ b/tests/integration/diag_scripts/mlr/test_general.py @@ -0,0 +1,89 @@ +"""General tests for the module :mod:`esmvaltool.diag_scripts.mlr.models`.""" + +import os +from unittest import mock + +import pytest +import yaml + +from esmvaltool.diag_scripts.mlr.models import MLRModel + +# Load test configuration +with open( + os.path.join(os.path.dirname(__file__), 'configs', + 'test_general.yml')) as file_: + CONFIG = yaml.safe_load(file_) + + +@mock.patch('esmvaltool.diag_scripts.mlr.models.logger', autospec=True) +class TestMLRModel(): + """Tests for the base class.""" + + args = CONFIG['args'] + kwargs = CONFIG['kwargs'] + + def test_direct_initialization(self, mock_logger): + """Test direct initialization without factory function.""" + with pytest.raises(NotImplementedError): + MLRModel(*self.args, **self.kwargs) + assert mock_logger.mock_calls == [] + + def test_register_mlr_model(self, mock_logger): + """Test registering subclass.""" + MLRModel._MODELS = {} + assert MLRModel._MODELS == {} + + @MLRModel.register_mlr_model('test_model') + class MyMLRModel(MLRModel): + """Subclass of `MLRModel`.""" + + assert MLRModel._MODELS == {'test_model': MyMLRModel} + assert MyMLRModel._MLR_MODEL_TYPE == 'test_model' + mock_logger.debug.assert_called_once() + MLRModel._MODELS = {} + + @mock.patch.object(MLRModel, '_load_mlr_models') + @mock.patch.object(MLRModel, '__init__', autospec=True) + def test_create(self, mock_mlr_model_init, mock_load_mlr_models, + mock_logger): + """Test creating subclasses.""" + # No subclasses + MLRModel._MODELS = {} + assert MLRModel._MODELS == {} + mock_mlr_model_init.return_value = None + with pytest.raises(NotImplementedError): + MLRModel.create('test_model', *self.args, **self.kwargs) + mock_load_mlr_models.assert_called() + mock_mlr_model_init.assert_not_called() + mock_load_mlr_models.reset_mock() + mock_mlr_model_init.reset_mock() + mock_logger.reset_mock() + + # Wrong subclass + @MLRModel.register_mlr_model('test_model') + class MyMLRModel(MLRModel): + """Subclass of `MLRModel`.""" + + with pytest.raises(NotImplementedError): + MLRModel.create('another_test_model', *self.args, **self.kwargs) + mock_load_mlr_models.assert_called() + mock_mlr_model_init.assert_not_called() + mock_load_mlr_models.reset_mock() + mock_mlr_model_init.reset_mock() + mock_logger.reset_mock() + MLRModel._MODELS = {} + + # Right subclass + @MLRModel.register_mlr_model('test_model') + class MyMLRModel1(MLRModel): + """Subclass of `MLRModel`.""" + + MLRModel.create('test_model', *self.args, **self.kwargs) + mock_load_mlr_models.assert_called() + mock_mlr_model_init.assert_called_with(mock.ANY, *self.args, + **self.kwargs) + mock_logger.info.assert_called() + mock_load_mlr_models.reset_mock() + mock_mlr_model_init.reset_mock() + mock_logger.reset_mock() + MLRModel._MODELS = {} diff --git a/tests/integration/diag_scripts/mlr/test_read_input.py b/tests/integration/diag_scripts/mlr/test_read_input.py new file mode 100644 index 0000000000..8b3774afb1 --- /dev/null +++ b/tests/integration/diag_scripts/mlr/test_read_input.py @@ -0,0 +1,72 @@ +"""Tests for reading data.""" + +import os +from unittest import mock + +import pytest +import yaml + +from esmvaltool.diag_scripts.mlr.models import MLRModel + +EXCEPTIONS = { + 'ValueError': ValueError, + 'TypeError': TypeError, +} + + +def get_call_name(call): + """Get name of a `mock.call` function.""" + call_str = str(call) + call_str = call_str[call_str.find('call.') + len('call.'):] + call_str = call_str[:call_str.find('(')] + return call_str + + +def get_logger_msg(method_calls): + """Get all important logger calls.""" + all_calls = [get_call_name(call) for call in method_calls] + return [call for call in all_calls if call in ('warning', 'error')] + + +class SimplifiedMLRModel(MLRModel): + """Test class to avoid calling the base class `__init__` method.""" + + def __init__(self, cfg): + """Very simplified constructor of the base class.""" + self._cfg = cfg + self._data = {} + self._datasets = {} + self._classes = {} + + +with open( + os.path.join(os.path.dirname(__file__), 'configs', + 'test_load_input_datasets.yml')) as file_: + CONFIG = yaml.safe_load(file_) + + +# TODO: Add tests for ancestors +@pytest.mark.parametrize('data', CONFIG) +@mock.patch('esmvaltool.diag_scripts.mlr.logger', autospec=True) +@mock.patch('esmvaltool.diag_scripts.mlr.models.logger', autospec=True) +def test_load_input_datasets(mock_models_logger, mock_mlr_logger, data): + """Test loading of input datasets.""" + cfg = data['cfg'] + input_datasets = list(cfg['input_data'].values()) + output = data['output'] + mlr_model = SimplifiedMLRModel(cfg) + + # Load input datasets + if 'EXCEPTION' in output: + exc = output['EXCEPTION'] + with pytest.raises(EXCEPTIONS[exc['type']]) as exc_info: + mlr_model._load_input_datasets(input_datasets) + assert exc.get('value', '') in str(exc_info.value) + else: + mlr_model._load_input_datasets(input_datasets) + assert mlr_model._datasets == output + + # Logger calls + logger_calls = mock_models_logger.method_calls + logger_calls.extend(mock_mlr_logger.method_calls) + assert get_logger_msg(logger_calls) == data['logger'] diff --git a/tests/integration/diagnostic.R b/tests/integration/diagnostic.R new file mode 100644 index 0000000000..31cec6474b --- /dev/null +++ b/tests/integration/diagnostic.R @@ -0,0 +1,7 @@ +library(yaml) +args <- commandArgs(trailingOnly = TRUE) +print(paste0("INFO Loading settings from ", args[1])) +settings <- yaml::read_yaml(args[1]) + +print(paste0("INFO Writing settings to ", settings$setting_name)) +yaml::write_yaml(settings, settings$setting_name) diff --git a/tests/integration/diagnostic.jl b/tests/integration/diagnostic.jl new file mode 100644 index 0000000000..d244e94376 --- /dev/null +++ b/tests/integration/diagnostic.jl @@ -0,0 +1,8 @@ +import YAML +@info "Starting diagnostic script with" ARGS +config_file = ARGS[1] +cfg = YAML.load_file(config_file) +out_file = cfg["setting_name"] +@info "Copying file to" out_file +Base.Filesystem.cp(config_file, out_file) +@info "Done" diff --git a/tests/integration/diagnostic.ncl b/tests/integration/diagnostic.ncl new file mode 100644 index 0000000000..c0bfdca1ca --- /dev/null +++ b/tests/integration/diagnostic.ncl @@ -0,0 +1,13 @@ +begin + print("INFO Loading settings from " + getenv("settings")) + loadscript("$settings") +end +print("INFO Writing " + diag_script_info@setting_name) +n = str_get_nl() +result = "run_dir: " + config_user_info@run_dir + n +\ + "work_dir: " + config_user_info@work_dir + n +\ + "plot_dir: " + config_user_info@plot_dir + n +\ + "log_level: " + config_user_info@log_level + n +\ + "input_files: []" + n + +system("echo '" + result + "' > " + diag_script_info@setting_name) diff --git a/tests/integration/diagnostic.py b/tests/integration/diagnostic.py new file mode 100644 index 0000000000..f4ced9d2cf --- /dev/null +++ b/tests/integration/diagnostic.py @@ -0,0 +1,13 @@ +import yaml + +from esmvaltool.diag_scripts.shared import run_diagnostic + + +def main(cfg): + with open(cfg['setting_name'], 'w') as file: + yaml.safe_dump(cfg, file) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/tests/integration/preprocessor/_derive/test_interface.py b/tests/integration/preprocessor/_derive/test_interface.py deleted file mode 100644 index cef8ea93a4..0000000000 --- a/tests/integration/preprocessor/_derive/test_interface.py +++ /dev/null @@ -1,69 +0,0 @@ -from iris.cube import Cube, CubeList - -from esmvaltool.preprocessor import derive -from esmvaltool.preprocessor._derive import get_required - - -def test_get_required(): - - variables = get_required('alb') - - reference = [ - { - 'short_name': 'rsds', - }, - { - 'short_name': 'rsus', - }, - ] - - assert variables == reference - - -def test_get_required_with_fx(): - - variables = get_required('nbp_grid') - - reference = [{ - 'short_name': 'nbp', - 'fx_files': ['sftlf'], - }] - - assert variables == reference - - -def test_derive_nonstandard_nofx(): - - short_name = 'alb' - long_name = 'albedo at the surface' - units = 1 - standard_name = '' - - rsds = Cube([2.]) - rsds.standard_name = 'surface_downwelling_shortwave_flux_in_air' - - rsus = Cube([1.]) - rsus.standard_name = 'surface_upwelling_shortwave_flux_in_air' - - cubes = CubeList([rsds, rsus]) - - alb = derive(cubes, short_name, long_name, units, standard_name) - - print(alb) - assert alb.var_name == short_name - assert alb.long_name == long_name - assert alb.units == units - assert alb.data == [0.5] - - -def test_derive_noop(): - - alb = Cube([1.]) - alb.var_name = 'alb' - alb.long_name = 'albedo at the surface' - alb.units = 1 - - cube = derive([alb], alb.var_name, alb.long_name, alb.units) - - print(cube) - assert cube is alb diff --git a/tests/integration/preprocessor/_io/test_cleanup.py b/tests/integration/preprocessor/_io/test_cleanup.py deleted file mode 100644 index c984865c44..0000000000 --- a/tests/integration/preprocessor/_io/test_cleanup.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Integration tests for :func:`esmvaltool.preprocessor._io.cleanup`""" - -import os -import tempfile -import unittest - -from esmvaltool.preprocessor import _io - - -class TestCleanup(unittest.TestCase): - """Tests for :func:`esmvaltool.preprocessor._io.cleanup`""" - - def setUp(self): - self.temp_paths = [] - descriptor, temp_file = tempfile.mkstemp('.nc') - os.close(descriptor) - self.temp_paths.append(temp_file) - self.temp_paths.append(tempfile.mkdtemp()) - - def tearDown(self): - for path in self.temp_paths: - if os.path.isfile(path): - os.remove(path) - elif os.path.isdir(path): - os.rmdir(path) - - def test_cleanup(self): - """Test cleanup""" - _io.cleanup([], self.temp_paths) - for path in self.temp_paths: - self.assertFalse(os.path.exists(path)) - - def test_cleanup_when_files_removed(self): - """Test cleanup works even with missing files or folders""" - self.tearDown() - _io.cleanup([], self.temp_paths) - for path in self.temp_paths: - self.assertFalse(os.path.exists(path)) diff --git a/tests/integration/preprocessor/_io/test_concatenate.py b/tests/integration/preprocessor/_io/test_concatenate.py deleted file mode 100644 index ab0f0d0752..0000000000 --- a/tests/integration/preprocessor/_io/test_concatenate.py +++ /dev/null @@ -1,115 +0,0 @@ -"""Integration tests for :func:`esmvaltool.preprocessor._io.concatenate`.""" - -import unittest - -import numpy as np -from iris.coords import DimCoord -from iris.cube import Cube -from iris.exceptions import ConcatenateError - -from esmvaltool.preprocessor import _io - - -class TestConcatenate(unittest.TestCase): - """Tests for :func:`esmvaltool.preprocessor._io.concatenate`.""" - - def setUp(self): - """Start tests.""" - coord = DimCoord([1, 2], var_name='coord') - second_coord = coord.copy([3, 4]) - third_coord = coord.copy([5, 6]) - self.raw_cubes = [] - self.raw_cubes.append( - Cube([1, 2], var_name='sample', dim_coords_and_dims=((coord, - 0), ))) - self.raw_cubes.append( - Cube([3, 4], - var_name='sample', - dim_coords_and_dims=((second_coord, 0), ))) - self.raw_cubes.append( - Cube([5, 6], - var_name='sample', - dim_coords_and_dims=((third_coord, 0), ))) - - def test_concatenate(self): - """Test concatenation of two cubes.""" - concatenated = _io.concatenate(self.raw_cubes) - self.assertTrue((concatenated.coord('coord').points == np.array( - [1, 2, 3, 4, 5, 6])).all()) - - def test_fail_with_duplicates(self): - """Test exception raised if two cubes are overlapping.""" - self.raw_cubes.append(self.raw_cubes[0].copy()) - with self.assertRaises(ConcatenateError): - _io.concatenate(self.raw_cubes) - - def test_fail_metadata_differs(self): - """Test exception raised if two cubes have different metadata.""" - self.raw_cubes[0].units = 'm' - with self.assertRaises(ConcatenateError): - _io.concatenate(self.raw_cubes) - - def test_fix_attributes(self): - """Test fixing attributes for concatenation.""" - identical_attrs = { - 'int': 42, - 'float': 3.1415, - 'bool': True, - 'str': 'Hello, world', - 'list': [1, 1, 2, 3, 5, 8, 13], - 'tuple': (1, 2, 3, 4, 5), - 'dict': { - 1: 'one', - 2: 'two', - 3: 'three' - }, - 'nparray': np.arange(42), - } - differing_attrs = [ - { - 'new_int': 0, - 'new_str': 'hello', - 'new_nparray': np.arange(3), - 'mix': np.arange(2), - }, - { - 'new_int': 1, - 'new_str': 'world', - 'new_list': [1, 1, 2], - 'new_tuple': (0, 1), - 'new_dict': { - 0: 'zero', - }, - 'mix': { - 1: 'one', - }, - }, - { - 'new_str': '!', - 'new_list': [1, 1, 2, 3], - 'new_tuple': (1, 2, 3), - 'new_dict': { - 0: 'zeroo', - 1: 'one', - }, - 'new_nparray': np.arange(2), - 'mix': False, - }, - ] - resulting_attrs = { - 'new_int': '0;1', - 'new_str': 'hello;world;!', - 'new_nparray': '[0 1 2];[0 1]', - 'new_list': '[1, 1, 2];[1, 1, 2, 3]', - 'new_tuple': '(0, 1);(1, 2, 3)', - 'new_dict': "{0: 'zero'};{0: 'zeroo', 1: 'one'}", - 'mix': "[0 1];{1: 'one'};False", - } - resulting_attrs.update(identical_attrs) - - for idx in range(3): - self.raw_cubes[idx].attributes = identical_attrs - self.raw_cubes[idx].attributes.update(differing_attrs[idx]) - _io._fix_cube_attributes(self.raw_cubes) # noqa - for cube in self.raw_cubes: - self.assertTrue(cube.attributes == resulting_attrs) diff --git a/tests/integration/preprocessor/_io/test_load.py b/tests/integration/preprocessor/_io/test_load.py deleted file mode 100644 index 68b7beab8a..0000000000 --- a/tests/integration/preprocessor/_io/test_load.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Integration tests for :func:`esmvaltool.preprocessor._io.load`.""" - -import os -import tempfile -import unittest - -import iris -import numpy as np -from iris.coords import DimCoord -from iris.cube import Cube - -from esmvaltool.preprocessor._io import concatenate_callback, load - - -def _create_sample_cube(): - coord = DimCoord([1, 2], standard_name='latitude', units='degrees_north') - cube = Cube([1, 2], var_name='sample', dim_coords_and_dims=((coord, 0), )) - return cube - - -class TestLoad(unittest.TestCase): - """Tests for :func:`esmvaltool.preprocessor.load`.""" - - def setUp(self): - """Start tests.""" - self.temp_files = [] - - def tearDown(self): - """Finish tests.""" - for temp_file in self.temp_files: - os.remove(temp_file) - - def _save_cube(self, cube): - descriptor, temp_file = tempfile.mkstemp('.nc') - os.close(descriptor) - iris.save(cube, temp_file) - self.temp_files.append(temp_file) - return temp_file - - def test_load(self): - """Test loading multiple files.""" - cube = _create_sample_cube() - temp_file = self._save_cube(cube) - - cubes = load(temp_file) - cube = cubes[0] - self.assertEqual(1, len(cubes)) - self.assertEqual(temp_file, cube.attributes['source_file']) - self.assertTrue((cube.data == np.array([1, 2])).all()) - self.assertTrue((cube.coord('latitude').points == np.array([1, - 2])).all()) - - def test_callback_remove_attributes(self): - """Test callback remove unwanted attributes.""" - attributes = ('history', 'creation_date', 'tracking_id') - for _ in range(2): - cube = _create_sample_cube() - for attr in attributes: - cube.attributes[attr] = attr - self._save_cube(cube) - for temp_file in self.temp_files: - cubes = load(temp_file, callback=concatenate_callback) - cube = cubes[0] - self.assertEqual(1, len(cubes)) - self.assertTrue((cube.data == np.array([1, 2])).all()) - self.assertTrue( - (cube.coord('latitude').points == np.array([1, 2])).all()) - for attr in attributes: - self.assertTrue(attr not in cube.attributes) - - def test_callback_fix_lat_units(self): - """Test callback for fixing units.""" - cube = _create_sample_cube() - temp_file = self._save_cube(cube) - - cubes = load(temp_file, callback=concatenate_callback) - cube = cubes[0] - self.assertEqual(1, len(cubes)) - self.assertTrue((cube.data == np.array([1, 2])).all()) - self.assertTrue((cube.coord('latitude').points == np.array([1, - 2])).all()) - self.assertEquals(cube.coord('latitude').units, 'degrees_north') diff --git a/tests/integration/preprocessor/_io/test_save.py b/tests/integration/preprocessor/_io/test_save.py deleted file mode 100644 index b6f5f1495f..0000000000 --- a/tests/integration/preprocessor/_io/test_save.py +++ /dev/null @@ -1,121 +0,0 @@ -"""Integration tests for :func:`esmvaltool.preprocessor.save`""" - -import os -import tempfile -import unittest - -import iris -import netCDF4 -import numpy as np -from iris.coords import DimCoord -from iris.cube import Cube - -from esmvaltool.preprocessor import save - - -class TestSave(unittest.TestCase): - """Tests for :func:`esmvaltool.preprocessor.save`""" - - def setUp(self): - self.temp_files = [] - - def tearDown(self): - for temp_file in self.temp_files: - if os.path.isfile(temp_file): - os.remove(temp_file) - - def _create_sample_cube(self): - lat = DimCoord( - np.asarray([1, 2], np.single), - standard_name='latitude', - units='degrees_north') - lon = DimCoord( - np.asarray([1, 2], np.single), - standard_name='longitude', - units='degrees_east') - time = DimCoord( - np.asarray([1, 2], np.single), - standard_name='time', - units='days since 2000-1-1') - - cube = Cube( - np.random.random_sample([2, 2, 2]), - var_name='sample', - units='1', - dim_coords_and_dims=((lat, 0), (lon, 1), (time, 2))) - - descriptor, filename = tempfile.mkstemp('.nc') - os.close(descriptor) - self.temp_files.append(filename) - return cube, filename - - def test_save(self): - """Test save""" - cube, filename = self._create_sample_cube() - path = save([cube], filename) - loaded_cube = iris.load_cube(path) - self._compare_cubes(cube, loaded_cube) - - def test_save_zlib(self): - """Test save""" - cube, filename = self._create_sample_cube() - path = save([cube], filename, compress=True) - loaded_cube = iris.load_cube(path) - self._compare_cubes(cube, loaded_cube) - handler = netCDF4.Dataset(path, 'r') - sample_filters = handler.variables['sample'].filters() - self.assertTrue(sample_filters['zlib']) - self.assertTrue(sample_filters['shuffle']) - self.assertEqual(sample_filters['complevel'], 4) - handler.close() - - def test_fail_without_filename(self): - """Test save fails if filename is not provided.""" - cube, _ = self._create_sample_cube() - with self.assertRaises(TypeError): - save([cube]) - - def test_save_optimized_map(self): - """Test save""" - cube, filename = self._create_sample_cube() - path = save([cube], filename, optimize_access='map') - loaded_cube = iris.load_cube(path) - self._compare_cubes(cube, loaded_cube) - self._check_chunks(path, [2, 2, 1]) - - def test_save_optimized_timeseries(self): - """Test save""" - cube, filename = self._create_sample_cube() - path = save([cube], filename, optimize_access='timeseries') - loaded_cube = iris.load_cube(path) - self._compare_cubes(cube, loaded_cube) - self._check_chunks(path, [1, 1, 2]) - - def test_save_optimized_lat(self): - """Test save""" - cube, filename = self._create_sample_cube() - path = save([cube], filename, optimize_access='latitude') - loaded_cube = iris.load_cube(path) - self._compare_cubes(cube, loaded_cube) - expected_chunks = [2, 1, 1] - self._check_chunks(path, expected_chunks) - - def _check_chunks(self, path, expected_chunks): - handler = netCDF4.Dataset(path, 'r') - chunking = handler.variables['sample'].chunking() - handler.close() - self.assertListEqual(expected_chunks, chunking) - - def test_save_optimized_lon_time(self): - """Test save""" - cube, filename = self._create_sample_cube() - path = save([cube], filename, optimize_access='longitude time') - loaded_cube = iris.load_cube(path) - self._compare_cubes(cube, loaded_cube) - self._check_chunks(path, [1, 2, 2]) - - def _compare_cubes(self, cube, loaded_cube): - self.assertTrue((cube.data == loaded_cube.data).all()) - for coord in cube.coords(): - self.assertTrue( - (coord.points == loaded_cube.coord(coord.name()).points).all()) diff --git a/tests/integration/preprocessor/_mask/__init__.py b/tests/integration/preprocessor/_mask/__init__.py deleted file mode 100644 index 147b3fce6e..0000000000 --- a/tests/integration/preprocessor/_mask/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -""" -Test _mask.py - -Integration tests for the esmvaltool.preprocessor._mask module -""" diff --git a/tests/integration/preprocessor/_mask/test_mask.py b/tests/integration/preprocessor/_mask/test_mask.py deleted file mode 100644 index 47d64e30c7..0000000000 --- a/tests/integration/preprocessor/_mask/test_mask.py +++ /dev/null @@ -1,143 +0,0 @@ -""" -Test mask - -Integration tests for the :func:`esmvaltool.preprocessor._mask` -module. - -""" - -import os -import tempfile -import unittest - -import iris -import numpy as np - -import tests -from esmvaltool.preprocessor import (PreprocessorFile, mask_fillvalues, - mask_landsea, mask_landseaice) - - -class Test(tests.Test): - """Test class""" - - def setUp(self): - """Assemble a stock cube""" - fx_data = np.empty((3, 3)) - fx_data[:] = 60. - self.new_cube_data = np.empty((3, 3)) - self.new_cube_data[:] = 200. - crd_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) - lons = iris.coords.DimCoord([0, 1.5, 3], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_east', - coord_system=crd_sys) - lats = iris.coords.DimCoord([0, 1.5, 3], - standard_name='latitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_north', - coord_system=crd_sys) - self.coords_spec = [(lats, 0), (lons, 1)] - self.fx_mask = iris.cube.Cube( - fx_data, dim_coords_and_dims=self.coords_spec) - - def test_mask_landsea(self): - """Test mask_landsea func""" - iris.save(self.fx_mask, 'sftlf_test.nc') - new_cube_land = iris.cube.Cube( - self.new_cube_data, dim_coords_and_dims=self.coords_spec) - new_cube_sea = iris.cube.Cube( - self.new_cube_data, dim_coords_and_dims=self.coords_spec) - - # mask with fx files - result_land = mask_landsea(new_cube_land, ['sftlf_test.nc'], 'land') - result_sea = mask_landsea(new_cube_sea, ['sftlf_test.nc'], 'sea') - expected = np.ma.empty((3, 3)) - expected.data[:] = 200. - expected.mask = np.ones((3, 3), bool) - # set fillvalues so we are sure they are equal - np.ma.set_fill_value(result_land.data, 1e+20) - np.ma.set_fill_value(result_sea.data, 1e+20) - np.ma.set_fill_value(expected, 1e+20) - self.assertArrayEqual(result_land.data.mask, expected.mask) - expected.mask = np.zeros((3, 3), bool) - self.assertArrayEqual(result_sea.data, expected) - # remove the fx.nc temporary file - os.remove('sftlf_test.nc') - - # mask with shp files - new_cube_land = iris.cube.Cube( - self.new_cube_data, dim_coords_and_dims=self.coords_spec) - new_cube_sea = iris.cube.Cube( - self.new_cube_data, dim_coords_and_dims=self.coords_spec) - - # bear in mind all points are in the ocean - result_land = mask_landsea(new_cube_land, None, 'land') - np.ma.set_fill_value(result_land.data, 1e+20) - expected.mask = np.zeros((3, 3), bool) - self.assertArrayEqual(result_land.data, expected) - - def test_mask_landseaice(self): - """Test mask_landseaice func""" - iris.save(self.fx_mask, 'sftgif_test.nc') - new_cube_ice = iris.cube.Cube( - self.new_cube_data, dim_coords_and_dims=self.coords_spec) - result_ice = mask_landseaice(new_cube_ice, ['sftgif_test.nc'], 'ice') - expected = np.ma.empty((3, 3)) - expected.data[:] = 200. - expected.mask = np.ones((3, 3), bool) - np.ma.set_fill_value(result_ice.data, 1e+20) - np.ma.set_fill_value(expected, 1e+20) - self.assertArrayEqual(result_ice.data.mask, expected.mask) - os.remove('sftgif_test.nc') - - def test_mask_fillvalues(self): - """Test the fillvalues mask: func mask_fillvalues""" - data_1 = np.ma.empty((4, 3, 3)) - data_1[:] = 10. - data_2 = np.ma.empty((4, 3, 3)) - data_2[:] = 10. - data_2.mask = np.ones((4, 3, 3), bool) - crd_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) - lons = iris.coords.DimCoord([0, 1.5, 3], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_east', - coord_system=crd_sys) - lats = iris.coords.DimCoord([0, 1.5, 3], - standard_name='latitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_north', - coord_system=crd_sys) - times = iris.coords.DimCoord([0, 1.5, 2.5, 3.5], - standard_name='time', - bounds=[[0, 1], [1, 2], [2, 3], [3, 4]], - units='hours') - coords_spec = [(times, 0), (lats, 1), (lons, 2)] - cube_1 = iris.cube.Cube(data_1, dim_coords_and_dims=coords_spec) - cube_2 = iris.cube.Cube(data_2, dim_coords_and_dims=coords_spec) - filename_1 = tempfile.NamedTemporaryFile().name + '.nc' - filename_2 = tempfile.NamedTemporaryFile().name + '.nc' - product_1 = PreprocessorFile( - attributes={'filename': filename_1}, settings={}) - product_1.cubes = [cube_1] - product_2 = PreprocessorFile( - attributes={'filename': filename_2}, settings={}) - product_2.cubes = [cube_2] - results = mask_fillvalues({product_1, product_2}, - 0.95, - min_value=-1.e10, - time_window=1) - result_1, result_2 = None, None - for product in results: - if product.filename == filename_1: - result_1 = product.cubes[0] - if product.filename == filename_2: - result_2 = product.cubes[0] - self.assertArrayEqual(result_2.data.mask, data_2.mask) - self.assertArrayEqual(result_1.data, data_1) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/integration/preprocessor/_regrid/__init__.py b/tests/integration/preprocessor/_regrid/__init__.py deleted file mode 100644 index 650c03c7cd..0000000000 --- a/tests/integration/preprocessor/_regrid/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -""" -Integration tests for the :mod:`esmvaltool.preprocessor._regrid` module. - -""" diff --git a/tests/integration/preprocessor/_regrid/test_extract_levels.py b/tests/integration/preprocessor/_regrid/test_extract_levels.py deleted file mode 100644 index 15c5d3150b..0000000000 --- a/tests/integration/preprocessor/_regrid/test_extract_levels.py +++ /dev/null @@ -1,93 +0,0 @@ -""" -Integration tests for the :func:`esmvaltool.preprocessor.regrid.extract_levels` -function. - -""" - -import unittest - -import iris -import numpy as np - -import tests -from esmvaltool.preprocessor._regrid import _MDI, extract_levels -from tests.unit.preprocessor._regrid import _make_cube, _make_vcoord - - -class Test(tests.Test): - def setUp(self): - shape = (3, 2, 2) - self.z = shape[0] - data = np.arange(np.prod(shape)).reshape(shape) - cubes = iris.cube.CubeList() - # Create first realization cube. - cube = _make_cube(data) - coord = iris.coords.DimCoord(0, standard_name='realization') - cube.add_aux_coord(coord) - cubes.append(cube) - # Create second realization cube. - cube = _make_cube(data + np.prod(shape)) - coord = iris.coords.DimCoord(1, standard_name='realization') - cube.add_aux_coord(coord) - cubes.append(cube) - # Create a 4d synthetic test cube. - self.cube = cubes.merge_cube() - coord = self.cube.coord(axis='z', dim_coords=True) - self.shape = list(self.cube.shape) - [self.z_dim] = self.cube.coord_dims(coord) - - def test_nop__levels_match(self): - vcoord = _make_vcoord(self.z) - self.assertEqual(self.cube.coord(axis='z', dim_coords=True), vcoord) - levels = vcoord.points - result = extract_levels(self.cube, levels, 'linear') - self.assertEqual(result, self.cube) - self.assertEqual(id(result), id(self.cube)) - - def test_interpolation__linear(self): - levels = [0.5, 1.5] - scheme = 'linear' - result = extract_levels(self.cube, levels, scheme) - expected = np.array([[[[2., 3.], [4., 5.]], [[6., 7.], [8., 9.]]], - [[[14., 15.], [16., 17.]], [[18., 19.], - [20., 21.]]]]) - self.assertArrayEqual(result.data, expected) - self.shape[self.z_dim] = len(levels) - self.assertEqual(result.shape, tuple(self.shape)) - - def test_interpolation__nearest(self): - levels = [0.49, 1.51] - scheme = 'nearest' - result = extract_levels(self.cube, levels, scheme) - expected = np.array([[[[0., 1.], [2., 3.]], [[8., 9.], [10., 11.]]], - [[[12., 13.], [14., 15.]], [[20., 21.], - [22., 23.]]]]) - self.assertArrayEqual(result.data, expected) - self.shape[self.z_dim] = len(levels) - self.assertEqual(result.shape, tuple(self.shape)) - - def test_interpolation__extrapolated_NaN_filling(self): - levels = [-10, 1, 2, 10] - scheme = 'nearest' - result = extract_levels(self.cube, levels, scheme) - expected = np.array( - [[[[_MDI, _MDI], [_MDI, _MDI]], [[4., 5.], [6., 7.]], - [[8., 9.], [10., 11.]], [[_MDI, _MDI], [_MDI, _MDI]]], - [[[_MDI, _MDI], [_MDI, _MDI]], [[16., 17.], [18., 19.]], - [[20., 21.], [22., 23.]], [[_MDI, _MDI], [_MDI, _MDI]]]]) - self.assertArrayEqual(result.data, expected) - self.shape[self.z_dim] = len(levels) - self.assertEqual(result.shape, tuple(self.shape)) - - def test_interpolation__scalar_collapse(self): - level = 1 - scheme = 'nearest' - result = extract_levels(self.cube, level, scheme) - expected = np.array([[[4., 5.], [6., 7.]], [[16., 17.], [18., 19.]]]) - self.assertArrayEqual(result.data, expected) - del self.shape[self.z_dim] - self.assertEqual(result.shape, tuple(self.shape)) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/integration/preprocessor/_regrid/test_get_cmor_levels.py b/tests/integration/preprocessor/_regrid/test_get_cmor_levels.py deleted file mode 100644 index 56662b204a..0000000000 --- a/tests/integration/preprocessor/_regrid/test_get_cmor_levels.py +++ /dev/null @@ -1,57 +0,0 @@ -""" -Integration tests for the :func: -`esmvaltool.preprocessor.regrid.get_cmor_levels` -function. - -""" - -import unittest - -from esmvaltool._config import read_config_developer_file -from esmvaltool.cmor.table import read_cmor_tables -from esmvaltool.preprocessor import _regrid - - -class TestGetCmorLevels(unittest.TestCase): - @staticmethod - def setUpClass(): - """Read cmor tables before testing""" - read_cmor_tables(read_config_developer_file()) - - def test_cmip6_alt40(self): - self.assertListEqual( - _regrid.get_cmor_levels('CMIP6', 'alt40'), [ - 240.0, 720.0, 1200.0, 1680.0, 2160.0, 2640.0, 3120.0, 3600.0, - 4080.0, 4560.0, 5040.0, 5520.0, 6000.0, 6480.0, 6960.0, 7440.0, - 7920.0, 8400.0, 8880.0, 9360.0, 9840.0, 10320.0, 10800.0, - 11280.0, 11760.0, 12240.0, 12720.0, 13200.0, 13680.0, 14160.0, - 14640.0, 15120.0, 15600.0, 16080.0, 16560.0, 17040.0, 17520.0, - 18000.0, 18480.0, 18960.0 - ]) - - def test_cmip6_p200(self): - self.assertListEqual( - _regrid.get_cmor_levels('CMIP6', 'p200'), [20000.]) - - def test_cmip5_alt40(self): - self.assertListEqual( - _regrid.get_cmor_levels('CMIP5', 'plevs'), [ - 100000., 92500., 85000., 70000., 60000., 50000., 40000., - 30000., 25000., 20000., 15000., 10000., 7000., 5000., 3000., - 2000., 1000. - ]) - - def test_cmip5_p500(self): - self.assertListEqual(_regrid.get_cmor_levels('CMIP5', 'p500'), [50000]) - - def test_not_values_in_coordinate(self): - with self.assertRaises(ValueError): - _regrid.get_cmor_levels('CMIP6', 'time') - - def test_bad_table(self): - with self.assertRaises(ValueError): - _regrid.get_cmor_levels('CMOCK', 'p500') - - def test_bad_coordinate(self): - with self.assertRaises(ValueError): - _regrid.get_cmor_levels('CMIP5', 'uglycoord') diff --git a/tests/integration/preprocessor/_regrid/test_get_file_levels.py b/tests/integration/preprocessor/_regrid/test_get_file_levels.py deleted file mode 100644 index a394715595..0000000000 --- a/tests/integration/preprocessor/_regrid/test_get_file_levels.py +++ /dev/null @@ -1,43 +0,0 @@ -""" -Integration tests for the :func: -`esmvaltool.preprocessor.regrid.get_cmor_levels` -function. - -""" - -import os -import tempfile -import unittest - -import iris -import iris.coords -import iris.cube -import numpy as np - -from esmvaltool.preprocessor import _regrid - - -class TestGetFileLevels(unittest.TestCase): - def setUp(self): - """Prepare the sample file for the test""" - self.cube = iris.cube.Cube(np.ones([2, 2, 2]), var_name='var') - self.cube.add_dim_coord( - iris.coords.DimCoord(np.arange(0, 2), var_name='coord'), 0) - - self.cube.coord('coord').attributes['positive'] = 'up' - iris.util.guess_coord_axis(self.cube.coord('coord')) - descriptor, self.path = tempfile.mkstemp('.nc') - os.close(descriptor) - print(self.cube) - iris.save(self.cube, self.path) - - def tearDown(self): - """Remove the sample file for the test""" - os.remove(self.path) - - def test_get_coord(self): - self.assertListEqual( - _regrid.get_reference_levels( - self.path, 'project', 'dataset', 'short_name', 'output_dir'), - [0., 1] - ) diff --git a/tests/integration/preprocessor/_regrid/test_regrid.py b/tests/integration/preprocessor/_regrid/test_regrid.py deleted file mode 100644 index 7ab337710f..0000000000 --- a/tests/integration/preprocessor/_regrid/test_regrid.py +++ /dev/null @@ -1,171 +0,0 @@ -""" -Integration tests for the :func:`esmvaltool.preprocessor.regrid.regrid` -function. - -""" - -import unittest - -import iris -import numpy as np -from numpy import ma - -import tests -from esmvaltool.preprocessor import regrid -from tests.unit.preprocessor._regrid import _make_cube - - -class Test(tests.Test): - def setUp(self): - shape = (3, 2, 2) - data = np.arange(np.prod(shape)).reshape(shape) - self.cube = _make_cube(data) - self.cs = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) - - def test_regrid__linear(self): - data = np.empty((1, 1)) - lons = iris.coords.DimCoord([1.5], - standard_name='longitude', - bounds=[[1, 2]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([1.5], - standard_name='latitude', - bounds=[[1, 2]], - units='degrees_north', - coord_system=self.cs) - coords_spec = [(lats, 0), (lons, 1)] - grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) - result = regrid(self.cube, grid, 'linear') - expected = np.array([[[1.5]], [[5.5]], [[9.5]]]) - self.assertArrayEqual(result.data, expected) - - def test_regrid__linear_extrapolate(self): - data = np.empty((3, 3)) - lons = iris.coords.DimCoord([0, 1.5, 3], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([0, 1.5, 3], - standard_name='latitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_north', - coord_system=self.cs) - coords_spec = [(lats, 0), (lons, 1)] - grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) - result = regrid(self.cube, grid, 'linear_extrapolate') - expected = [[[-3., -1.5, 0.], [0., 1.5, 3.], [3., 4.5, 6.]], - [[1., 2.5, 4.], [4., 5.5, 7.], [7., 8.5, 10.]], - [[5., 6.5, 8.], [8., 9.5, 11.], [11., 12.5, 14.]]] - self.assertArrayEqual(result.data, expected) - - def test_regrid__linear_extrapolate_with_mask(self): - data = np.empty((3, 3)) - grid = iris.cube.Cube(data) - lons = iris.coords.DimCoord([0, 1.5, 3], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([0, 1.5, 3], - standard_name='latitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_north', - coord_system=self.cs) - coords_spec = [(lats, 0), (lons, 1)] - grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) - result = regrid(self.cube, grid, 'linear') - expected = ma.empty((3, 3, 3)) - expected.mask = ma.masked - expected[:, 1, 1] = np.array([1.5, 5.5, 9.5]) - self.assertArrayEqual(result.data, expected) - - def test_regrid__nearest(self): - data = np.empty((1, 1)) - lons = iris.coords.DimCoord([1.6], - standard_name='longitude', - bounds=[[1, 2]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([1.6], - standard_name='latitude', - bounds=[[1, 2]], - units='degrees_north', - coord_system=self.cs) - coords_spec = [(lats, 0), (lons, 1)] - grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) - result = regrid(self.cube, grid, 'nearest') - expected = np.array([[[3]], [[7]], [[11]]]) - self.assertArrayEqual(result.data, expected) - - def test_regrid__nearest_extrapolate_with_mask(self): - data = np.empty((3, 3)) - lons = iris.coords.DimCoord([0, 1.6, 3], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([0, 1.6, 3], - standard_name='latitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_north', - coord_system=self.cs) - coords_spec = [(lats, 0), (lons, 1)] - grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) - result = regrid(self.cube, grid, 'nearest') - expected = ma.empty((3, 3, 3)) - expected.mask = ma.masked - expected[:, 1, 1] = np.array([3, 7, 11]) - self.assertArrayEqual(result.data, expected) - - def test_regrid__area_weighted(self): - data = np.empty((1, 1)) - lons = iris.coords.DimCoord([1.6], - standard_name='longitude', - bounds=[[1, 2]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([1.6], - standard_name='latitude', - bounds=[[1, 2]], - units='degrees_north', - coord_system=self.cs) - coords_spec = [(lats, 0), (lons, 1)] - grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) - result = regrid(self.cube, grid, 'area_weighted') - expected = np.array([1.499886, 5.499886, 9.499886]) - self.assertArrayAlmostEqual(result.data, expected) - - def test_regrid__unstructured_nearest(self): - data = np.empty((1, 1)) - lons = iris.coords.DimCoord([1.6], - standard_name='longitude', - bounds=[[1, 2]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([1.6], - standard_name='latitude', - bounds=[[1, 2]], - units='degrees_north', - coord_system=self.cs) - coords_spec = [(lats, 0), (lons, 1)] - grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) - # Replace 1d spatial coords with 2d spatial coords. - lons = self.cube.coord('longitude') - lats = self.cube.coord('latitude') - x, y = np.meshgrid(lons.points, lats.points) - lats = iris.coords.AuxCoord(x, **lats._as_defn()._asdict()) - lons = iris.coords.AuxCoord(y, **lons._as_defn()._asdict()) - self.cube.remove_coord('longitude') - self.cube.remove_coord('latitude') - self.cube.remove_coord('Pressure Slice') - self.cube.add_aux_coord(lons, (1, 2)) - self.cube.add_aux_coord(lats, (1, 2)) - result = regrid(self.cube, grid, 'unstructured_nearest') - expected = np.array([[[3]], [[7]], [[11]]]) - self.assertArrayAlmostEqual(result.data, expected) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/integration/recipe_filler.yml b/tests/integration/recipe_filler.yml new file mode 100644 index 0000000000..c977dc4f5c --- /dev/null +++ b/tests/integration/recipe_filler.yml @@ -0,0 +1,221 @@ +--- +recipe: + - diagnostics: + variables: + variable: &variable + short_name: ta + project: CMIP5 + mip: Amon + start_year: 1960 + end_year: 1980 + preproc_dir: this/is/a/path + output_file: this/is/a/path/test_diag/test/CMIP5_HadGEM2-ES_Amon_historical_r1i1p1_ta_1960-1980.nc + + - variable: + <<: *variable + exp: [historical, rcp85] + preproc_dir: /test + output_file: /test/test_diag/test/CMIP5_HadGEM2-ES_Amon_historical-rcp85_r1i1p1_ta_1960-1980.nc + +has_additional_datasets: + - drs: default + variable: + <<: *variable + available_files: + - ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + - drs: default + variable: + short_name: o3 + dataset: HadGEM2-ES + project: CMIP6 + mip: Amon + start_year: 1960 + end_year: 1980 + diagnostic: test_diag + available_files: + - o3_Amon_HadGEM2-ES_historical_r1i1p1_gn_193412-195911.nc + - o3_Amon_HadGEM2-ES_historical_r1i1p1_gn_195912-198411.nc + - o3_Amon_HadGEM2-ES_historical_r1i1p1_gn_198412-200511.nc + - drs: BADC + variable: + short_name: ta + dataset: [UKESM1-0-LL, HadGEM2-ES] + project: "*" + mip: Amon + start_year: 1960 + end_year: 1980 + diagnostic: test_diag + available_files: + - CMIP/MOHC/UKESM1-0-LL/historical/r1i1p1f2/Amon/ta/gn/latest/ta_Amon_UKESM1-0-LL_historical_r1i1p1f2_gn_193412-195911.nc + - CMIP/MOHC/UKESM1-0-LL/historical/r1i1p1f2/Amon/ta/gn/latest/ta_Amon_UKESM1-0-LL_historical_r1i1p1f2_gn_195912-198411.nc + - CMIP/MOHC/UKESM1-0-LL/historical/r1i1p1f2/Amon/ta/gn/latest/ta_Amon_UKESM1-0-LL_historical_r1i1p1f2_gn_198412-200511.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + - drs: BADC + variable: + short_name: ta + project: CMIP5 + mip: Amon + start_year: 1960 + end_year: 2080 + ensemble: r1i1p1 + exp: [historical, rcp85] + available_files: + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198413-200512.nc + - MOHC/HadGEM2-ES/rcp85/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc + - drs: BADC + variable: + short_name: ta + dataset: "*" + project: CMIP5 + mip: Amon + start_year: 1960 + end_year: 2080 + ensemble: r1i1p1 + exp: [historical, rcp85] + available_files: + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198413-200512.nc + - MOHC/HadGEM2-ES/rcp85/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc + - MOHC/RUTH/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_RUTH_historical_r1i1p1_192212-200512.nc + - MOHC/RUTH/rcp85/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_RUTH_rcp85_r1i1p1_200601-210012.nc + - drs: default + variable: + <<: *variable + start_year: 2010 + end_year: 2100 + available_files: + - ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - ta_Amon_HadGEM2-ES_historical_r1i1p1_198413-200512.nc + - ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc + - drs: DKRZ + variable: + <<: *variable + start_year: 1980 + end_year: 2002 + available_files: + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_185912-188411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_188412-190911.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_190912-193411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + - drs: DKRZ + variable: + <<: *variable + exp: [historical, rcp45, rcp85] + ensemble: r1i1p1 + start_year: 1980 + end_year: 2100 + available_files: + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_185912-188411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_188412-190911.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_190912-193411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + - MOHC/HadGEM2-ES/rcp45/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_rcp45_r1i1p1_200601-210012.nc + - MOHC/HadGEM2-ES/rcp85/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc + - drs: ETHZ + variable: + <<: *variable + start_year: 1980 + end_year: 2002 + available_files: + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_185912-188411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_188412-190911.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_190912-193411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + - drs: ETHZ + variable: + <<: *variable + dataset: "*" + start_year: 2000 + end_year: 2100 + ensemble: r1i1p1 + exp: [historical, rcp85] + available_files: + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_185912-188411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_188412-190911.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_190912-193411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + - rcp85/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc + - drs: ETHZ + variable: + <<: *variable + dataset: "*" + start_year: 1950 + end_year: 2100 + ensemble: r1i1p1 + exp: [historical, rcp85] + available_files: + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_185912-188411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_188412-190911.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_190912-193411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - rcp85/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc + - historical/Amon/ta/RUTH/r1i1p1/ta_Amon_RUTH_historical_r1i1p1_185912-200512.nc + - rcp85/Amon/ta/RUTH/r1i1p1/ta_Amon_RUTH_rcp85_r1i1p1_200601-210012.nc +no_additional_datasets: + - drs: ETHZ + variable: + <<: *variable + start_year: 1950 + end_year: 2100 + ensemble: r1i1p1 + exp: [historical, rcp85] + available_files: + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_185912-188411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_188412-190911.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_190912-193411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - rcp85/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc + - drs: BADC + variable: + short_name: ta + project: CMIP5 + mip: Amon + start_year: 1960 + end_year: 2080 + ensemble: r1i1p1 + exp: historical + available_files: + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198413-200512.nc +bad_variable: + - drs: BADC + variable: + short_name: cow + project: CMIP5 + mip: Amon + start_year: 1930 + end_year: 1940 + ensemble: r1i1p1 + exp: historical + available_files: + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc +no_short_name: + - drs: BADC + variable: + project: CMIP5 + mip: Amon + start_year: 1930 + end_year: 1940 + ensemble: r1i1p1 + exp: historical + available_files: + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc diff --git a/tests/integration/test_cmorizer.py b/tests/integration/test_cmorizer.py new file mode 100644 index 0000000000..48f75b951a --- /dev/null +++ b/tests/integration/test_cmorizer.py @@ -0,0 +1,227 @@ +"""Tests for the module :mod:`esmvaltool.cmorizers.data.cmorize_obs`.""" + +import contextlib +import os +import sys + +import esmvalcore +import iris +import iris.coord_systems +import iris.coords +import iris.cube +import iris.fileformats +import numpy as np +import pytest +import yaml +from cf_units import Unit +from packaging import version + +from esmvaltool import ESMValToolDeprecationWarning +from esmvaltool.cmorizers.data.cmorizer import DataCommand + + +@contextlib.contextmanager +def keep_cwd(): + """Use a context manager since the cmorizer enters and stays in the + cmorization dir, risking to write test outputs away from test-reports.""" + curr_path = os.getcwd() + try: + yield + finally: + os.chdir(curr_path) + + +def write_config_file(dirname): + """Replace configuration values for testing.""" + config_file = dirname / 'config-user.yml' + cfg = { + 'output_dir': str(dirname / 'output_dir'), + 'rootpath': { + 'RAWOBS': str(dirname / 'raw_stuff'), + }, + 'log_level': 'debug', + } + config_file.write_text(yaml.safe_dump(cfg, encoding=None)) + return str(config_file) + + +def _create_sample_cube(time_step): + """Create a quick CMOR-compliant sample cube.""" + coord_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) + cube_data = np.ones((1, 3, 2, 2)) + cube_data[0, 1, 1, 1] = 22. + time = iris.coords.DimCoord([ + time_step, + ], + standard_name='time', + bounds=[[time_step - 0.5, time_step + 0.5]], + units=Unit('days since 0000-01-01', + calendar='gregorian')) + zcoord = iris.coords.DimCoord([0.5, 5., 50.], + var_name='depth', + standard_name='depth', + bounds=[[0., 2.5], [2.5, 25.], [25., 250.]], + units='m', + attributes={'positive': 'down'}) + lons = iris.coords.DimCoord([1.5, 2.5], + standard_name='longitude', + bounds=[[1., 2.], [2., 3.]], + units='K', + coord_system=coord_sys) + lats = iris.coords.DimCoord([1.5, 2.5], + standard_name='latitude', + bounds=[[1., 2.], [2., 3.]], + units='K', + coord_system=coord_sys) + coords_spec = [(time, 0), (zcoord, 1), (lats, 2), (lons, 3)] + cube = iris.cube.Cube(cube_data, dim_coords_and_dims=coords_spec) + return cube + + +def put_dummy_data(data_path): + """Create a small dummy netCDF file to be cmorized.""" + data_info = [ + # dir_name, file_name_prefix, var_name + ("temperature", "woa18_decav81B0_t", "t_an"), + ("salinity", "woa18_decav81B0_s", "s_an"), + ("oxygen", "woa18_all_o", "o_an"), + ("nitrate", "woa18_all_n", "n_an"), + ("phosphate", "woa18_all_p", "p_an"), + ("silicate", "woa18_all_i", "i_an"), + ] + + for (dir_name, file_name_prefix, var_name) in data_info: + file_dir = os.path.join(data_path, dir_name) + os.makedirs(file_dir) + for month, step in enumerate(np.arange(0.5, 12.5)): + gen_cube = _create_sample_cube(step) + file_name = f"{file_name_prefix}{month:02d}_01.nc" + file_path = os.path.join(file_dir, file_name) + gen_cube.var_name = var_name + iris.save(gen_cube, file_path) + + +def check_log_file(log_file, no_data=False): + """Check the cmorization log file.""" + with open(log_file, 'r') as log: + if no_data: + msg = "Data for WOA not found" + else: + msg = "Fixing data" + assert any(msg in line for line in log) + + +def check_output_exists(output_path): + """Check if cmorizer outputted.""" + # eg Tier2/WOA/OBS6_WOA_clim_2018_Omon_thetao_200001-200012.nc + output_files = os.listdir(output_path) + assert len(output_files) == 8 + assert 'OBS6_WOA_clim' in output_files[0] + out_files = [s.split("_")[5] for s in output_files] + assert 'thetao' in out_files + assert 'so' in out_files + assert 'no3' in out_files + assert 'po4' in out_files + assert 'o2' in out_files + assert 'si' in out_files + assert 'sos' in out_files + assert 'tos' in out_files + + +def check_conversion(output_path): + """Check basic cmorization.""" + cube = iris.load_cube(os.path.join(output_path, + os.listdir(output_path)[0])) + assert cube.coord("time").units == Unit('days since 1950-1-1 00:00:00', + calendar='gregorian') + assert cube.coord("latitude").units == 'degrees' + + +@contextlib.contextmanager +def arguments(*args): + """Arrange contextmanager.""" + backup = sys.argv + sys.argv = list(args) + yield + sys.argv = backup + + +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) >= version.parse("2.14.0"), + reason='ESMValCore >= v2.14.0', +) +def test_cmorize_obs_woa_no_data_config_file(tmp_path): + """Test for example run of cmorize_obs command.""" + config_file = write_config_file(tmp_path) + os.makedirs(os.path.join(tmp_path, 'raw_stuff', 'Tier2')) + os.makedirs(os.path.join(tmp_path, 'output_dir')) + with keep_cwd(): + with pytest.raises(RuntimeError): + with pytest.warns(ESMValToolDeprecationWarning): + DataCommand().format('WOA', config_file=config_file) + + log_dir = os.path.join(tmp_path, 'output_dir') + log_file = os.path.join(log_dir, + os.listdir(log_dir)[0], 'run', 'main_log.txt') + check_log_file(log_file, no_data=True) + + +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) >= version.parse("2.14.0"), + reason='ESMValCore >= v2.14.0', +) +def test_cmorize_obs_woa_data_config_file(tmp_path): + """Test for example run of cmorize_obs command.""" + config_file = write_config_file(tmp_path) + data_path = os.path.join(tmp_path, 'raw_stuff', 'Tier2', 'WOA') + put_dummy_data(data_path) + with keep_cwd(): + with pytest.warns(ESMValToolDeprecationWarning): + DataCommand().format('WOA', config_file=config_file) + + log_dir = os.path.join(tmp_path, 'output_dir') + log_file = os.path.join(log_dir, + os.listdir(log_dir)[0], 'run', 'main_log.txt') + check_log_file(log_file, no_data=False) + output_path = os.path.join(log_dir, os.listdir(log_dir)[0], 'Tier2', 'WOA') + check_output_exists(output_path) + check_conversion(output_path) + + +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) < version.parse("2.12.0"), + reason='ESMValCore < v2.12.0', +) +def test_cmorize_obs_woa_no_data(tmp_path): + """Test for example run of cmorize_obs command.""" + write_config_file(tmp_path) + os.makedirs(os.path.join(tmp_path, 'raw_stuff', 'Tier2')) + with keep_cwd(): + with pytest.raises(RuntimeError): + DataCommand().format('WOA', config_dir=str(tmp_path)) + + log_dir = os.path.join(tmp_path, 'output_dir') + log_file = os.path.join(log_dir, + os.listdir(log_dir)[0], 'run', 'main_log.txt') + check_log_file(log_file, no_data=True) + + +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) < version.parse("2.12.0"), + reason='ESMValCore < v2.12.0', +) +def test_cmorize_obs_woa_data(tmp_path): + """Test for example run of cmorize_obs command.""" + write_config_file(tmp_path) + data_path = os.path.join(tmp_path, 'raw_stuff', 'Tier2', 'WOA') + put_dummy_data(data_path) + with keep_cwd(): + DataCommand().format('WOA', config_dir=str(tmp_path)) + + log_dir = os.path.join(tmp_path, 'output_dir') + log_file = os.path.join(log_dir, + os.listdir(log_dir)[0], 'run', 'main_log.txt') + check_log_file(log_file, no_data=False) + output_path = os.path.join(log_dir, os.listdir(log_dir)[0], 'Tier2', 'WOA') + check_output_exists(output_path) + check_conversion(output_path) diff --git a/tests/integration/test_data_finder.py b/tests/integration/test_data_finder.py deleted file mode 100644 index 484fdc99b7..0000000000 --- a/tests/integration/test_data_finder.py +++ /dev/null @@ -1,113 +0,0 @@ -"""Tests for _data_finder.py.""" -import os -import shutil -import tempfile - -import pytest -import yaml - -import esmvaltool._config -from esmvaltool._data_finder import (get_input_filelist, get_input_fx_filelist, - get_output_file) -from esmvaltool.cmor.table import read_cmor_tables - -# Initialize with standard config developer file -esmvaltool._config.CFG = esmvaltool._config.read_config_developer_file() -# Initialize CMOR tables -read_cmor_tables(esmvaltool._config.CFG) - -# Load test configuration -with open(os.path.join(os.path.dirname(__file__), 'data_finder.yml')) as file: - CONFIG = yaml.safe_load(file) - - -def print_path(path): - """Print path.""" - txt = path - if os.path.isdir(path): - txt += '/' - if os.path.islink(path): - txt += ' -> ' + os.readlink(path) - print(txt) - - -def tree(path): - """Print path, similar to the the `tree` command.""" - print_path(path) - for dirpath, dirnames, filenames in os.walk(path): - for dirname in dirnames: - print_path(os.path.join(dirpath, dirname)) - for filename in filenames: - print_path(os.path.join(dirpath, filename)) - - -def create_file(filename): - """Create an empty file.""" - dirname = os.path.dirname(filename) - if not os.path.exists(dirname): - os.makedirs(dirname) - - with open(filename, 'a'): - pass - - -def create_tree(path, filenames=None, symlinks=None): - """Create directory structure and files.""" - for filename in filenames or []: - create_file(os.path.join(path, filename)) - - for symlink in symlinks or []: - link_name = os.path.join(path, symlink['link_name']) - os.symlink(symlink['target'], link_name) - - -@pytest.mark.parametrize('cfg', CONFIG['get_output_file']) -def test_get_output_file(cfg): - """Test getting output name for preprocessed files.""" - output_file = get_output_file(cfg['variable'], cfg['preproc_dir']) - assert output_file == cfg['output_file'] - - -@pytest.fixture -def root(): - """Root function for tests.""" - dirname = tempfile.mkdtemp() - yield os.path.join(dirname, 'output1') - print("Directory structure was:") - tree(dirname) - shutil.rmtree(dirname) - - -@pytest.mark.parametrize('cfg', CONFIG['get_input_filelist']) -def test_get_input_filelist(root, cfg): - """Test retrieving input filelist.""" - create_tree(root, cfg.get('available_files'), - cfg.get('available_symlinks')) - - # Find files - rootpath = {cfg['variable']['project']: [root]} - drs = {cfg['variable']['project']: cfg['drs']} - input_filelist = get_input_filelist(cfg['variable'], rootpath, drs) - - # Test result - reference = [os.path.join(root, file) for file in cfg['found_files']] - assert sorted(input_filelist) == sorted(reference) - - -@pytest.mark.parametrize('cfg', CONFIG['get_input_fx_filelist']) -def test_get_input_fx_filelist(root, cfg): - """Test retrieving fx filelist.""" - create_tree(root, cfg.get('available_files'), - cfg.get('available_symlinks')) - - # Find files - rootpath = {cfg['variable']['project']: [root]} - drs = {cfg['variable']['project']: cfg['drs']} - fx_files = get_input_fx_filelist(cfg['variable'], rootpath, drs) - - # Test result - reference = { - fx_var: os.path.join(root, filename) if filename else None - for fx_var, filename in cfg['found_files'].items() - } - assert fx_files == reference diff --git a/tests/integration/test_diagnostic_run.py b/tests/integration/test_diagnostic_run.py index d5043284da..670f7088dd 100644 --- a/tests/integration/test_diagnostic_run.py +++ b/tests/integration/test_diagnostic_run.py @@ -1,16 +1,18 @@ """Test diagnostic script runs.""" import contextlib +import shutil import sys +from pathlib import Path from textwrap import dedent +import esmvalcore import pytest import yaml -from six import text_type +from esmvalcore._main import run +from packaging import version -from esmvaltool._main import run - -def write_config_user_file(dirname): +def write_config_file(dirname): config_file = dirname / 'config-user.yml' cfg = { 'output_dir': str(dirname / 'output_dir'), @@ -51,77 +53,90 @@ def check(result_file): assert not missing -SCRIPTS = { - 'diagnostic.py': - dedent(""" - import yaml - from esmvaltool.diag_scripts.shared import run_diagnostic - - def main(cfg): - with open(cfg['setting_name'], 'w') as file: - yaml.safe_dump(cfg, file) - - if __name__ == '__main__': - with run_diagnostic() as config: - main(config) - """), - 'diagnostic.ncl': - dedent(""" - begin - print("INFO Loading settings from " + getenv("settings")) - loadscript("$settings") - end - print("INFO Writing " + diag_script_info@setting_name) - n = str_get_nl() - result = "run_dir: " + config_user_info@run_dir + n +\ - "work_dir: " + config_user_info@work_dir + n +\ - "plot_dir: " + config_user_info@plot_dir + n +\ - "log_level: " + config_user_info@log_level + n +\ - "input_files: []" + n - - system("echo '" + result + "' > " + diag_script_info@setting_name) - """), - 'diagnostic.R': - dedent(""" - library(yaml) - - args <- commandArgs(trailingOnly = TRUE) - print(paste0("INFO Loading settings from ", args[1])) - settings <- yaml::read_yaml(args[1]) - - print(paste0("INFO Writing settings to ", settings$setting_name)) - yaml::write_yaml(settings, settings$setting_name) - """), - 'diagnostic.jl': - dedent(""" - import YAML - @info "Starting diagnostic script with" ARGS - config_file = ARGS[1] - cfg = YAML.load_file(config_file) - out_file = cfg["setting_name"] - @info "Copying file to" out_file - Base.Filesystem.cp(config_file, out_file) - @info "Done" - """), -} - - -@pytest.mark.install -@pytest.mark.parametrize('script_file, script', SCRIPTS.items()) -def test_diagnostic_run(tmp_path, script_file, script): +SCRIPTS = [ + 'diagnostic.py', + pytest.param('diagnostic.ncl', + marks=pytest.mark.skipif( + sys.platform == 'darwin', + reason="ESMValTool ncl not supported on OSX")), + pytest.param('diagnostic.R', + marks=pytest.mark.skipif( + sys.platform == 'darwin', + reason="ESMValTool R not supported on OSX")), + pytest.param('diagnostic.jl', + marks=pytest.mark.skipif( + sys.platform == 'darwin', + reason="ESMValTool Julia not supported on OSX")) +] + + +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) >= version.parse("2.14.0"), + reason='ESMValCore >= v2.14.0', +) +@pytest.mark.installation +@pytest.mark.parametrize('script_file', SCRIPTS) +def test_diagnostic_run_config_file(tmp_path, script_file): + local_script_file = Path(__file__).parent / script_file + + recipe_file = tmp_path / 'recipe_test.yml' + script_file = tmp_path / script_file + result_file = tmp_path / 'result.yml' + + shutil.copy(local_script_file, script_file) + + # Create recipe + recipe = dedent(""" + documentation: + title: Test recipe + description: Recipe with no data. + authors: [andela_bouwe] + + diagnostics: + diagnostic_name: + scripts: + script_name: + script: {} + setting_name: {} + """.format(script_file, result_file)) + recipe_file.write_text(str(recipe)) + + config_file = write_config_file(tmp_path) + with arguments( + 'esmvaltool', + 'run', + '--config_file', + config_file, + str(recipe_file), + ): + run() + + check(result_file) + + +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) < version.parse("2.12.0"), + reason='ESMValCore < v2.12.0', +) +@pytest.mark.installation +@pytest.mark.parametrize('script_file', SCRIPTS) +def test_diagnostic_run(tmp_path, script_file): + local_script_file = Path(__file__).parent / script_file recipe_file = tmp_path / 'recipe_test.yml' script_file = tmp_path / script_file result_file = tmp_path / 'result.yml' + config_dir = tmp_path / 'config' + config_dir.mkdir(exist_ok=True, parents=True) - # Write script to file - script_file.write_text(text_type(script)) + shutil.copy(local_script_file, script_file) # Create recipe recipe = dedent(""" documentation: + title: Test recipe description: Recipe with no data. - authors: [ande_bo] + authors: [andela_bouwe] diagnostics: diagnostic_name: @@ -130,13 +145,14 @@ def test_diagnostic_run(tmp_path, script_file, script): script: {} setting_name: {} """.format(script_file, result_file)) - recipe_file.write_text(text_type(recipe)) + recipe_file.write_text(str(recipe)) - config_user_file = write_config_user_file(tmp_path) + write_config_file(config_dir) with arguments( 'esmvaltool', - '-c', - config_user_file, + 'run', + '--config_dir', + str(config_dir), str(recipe_file), ): run() diff --git a/tests/integration/test_provenance.py b/tests/integration/test_provenance.py deleted file mode 100644 index 630a7c0517..0000000000 --- a/tests/integration/test_provenance.py +++ /dev/null @@ -1,47 +0,0 @@ -from prov.constants import PROV_ATTR_GENERATED_ENTITY, PROV_ATTR_USED_ENTITY -from prov.model import ProvDerivation - - -def get_file_record(prov, filename): - records = prov.get_record('file:' + filename) - assert records - return records[0] - - -def check_provenance(product): - prov = product.provenance - - entity = get_file_record(prov, product.filename) - assert entity == product.entity - - check_product_wasderivedfrom(product) - - -def check_product_wasderivedfrom(product): - """Check that product.filename was derived from product._ancestors.""" - print('checking provenance of file', product.filename) - prov = product.provenance - - def get_identifier(filename): - record = get_file_record(prov, filename) - return {record.identifier} - - # Check that the input and output file records exist - identifier = get_identifier(product.filename) - - relations = {r for r in prov.records if isinstance(r, ProvDerivation)} - for ancestor in product._ancestors: - input_identifier = get_identifier(ancestor.filename) - for record in relations: - if input_identifier == record.get_attribute(PROV_ATTR_USED_ENTITY): - assert identifier == record.get_attribute( - PROV_ATTR_GENERATED_ENTITY) - break - else: - assert False - - if not product._ancestors: - assert 'tracking_id' in product.attributes - else: - for ancestor in product._ancestors: - check_product_wasderivedfrom(ancestor) diff --git a/tests/integration/test_recipe.py b/tests/integration/test_recipe.py deleted file mode 100644 index d54f1969ab..0000000000 --- a/tests/integration/test_recipe.py +++ /dev/null @@ -1,696 +0,0 @@ -import os -from pprint import pformat -from textwrap import dedent - -import iris -import pytest -import yaml -from mock import create_autospec -from six import text_type - -import esmvaltool -from esmvaltool._recipe import TASKSEP, read_recipe_file -from esmvaltool._task import DiagnosticTask -from esmvaltool.diag_scripts.shared import ( - ProvenanceLogger, get_diagnostic_filename, get_plot_filename) -from esmvaltool.preprocessor import DEFAULT_ORDER, PreprocessingTask -from esmvaltool.preprocessor._io import concatenate_callback - -from .test_diagnostic_run import write_config_user_file -from .test_provenance import check_provenance - -MANDATORY_DATASET_KEYS = ( - 'cmor_table', - 'dataset', - 'diagnostic', - 'end_year', - 'filename', - 'frequency', - 'institute', - 'long_name', - 'mip', - 'modeling_realm', - 'preprocessor', - 'project', - 'short_name', - 'standard_name', - 'start_year', - 'units', -) - -MANDATORY_SCRIPT_SETTINGS_KEYS = ( - 'log_level', - 'script', - 'plot_dir', - 'run_dir', - 'work_dir', -) - -DEFAULT_PREPROCESSOR_STEPS = ( - 'cleanup', - 'cmor_check_data', - 'cmor_check_metadata', - 'concatenate', - 'extract_time', - 'fix_data', - 'fix_file', - 'fix_metadata', - 'load', - 'save', -) - - -@pytest.fixture -def config_user(tmp_path): - filename = write_config_user_file(tmp_path) - cfg = esmvaltool._config.read_config_user_file(filename, 'recipe_test') - cfg['synda_download'] = False - return cfg - - -def create_test_file(filename, tracking_id=None): - dirname = os.path.dirname(filename) - if not os.path.exists(dirname): - os.makedirs(dirname) - - attributes = {} - if tracking_id is not None: - attributes['tracking_id'] = tracking_id - cube = iris.cube.Cube([], attributes=attributes) - - iris.save(cube, filename) - - -@pytest.fixture -def patched_datafinder(tmp_path, monkeypatch): - def tracking_ids(i=0): - while True: - yield i - i += 1 - - tracking_id = tracking_ids() - - def find_files(_, filenames): - # Any occurrence of [something] in filename should have - # been replaced before this function is called. - for filename in filenames: - assert '[' not in filename - - filename = filenames[0] - filename = str(tmp_path / 'input' / filename) - filenames = [] - if filename.endswith('*.nc'): - filename = filename[:-len('*.nc')] - intervals = [ - '1990_1999', - '2000_2009', - '2010_2019', - ] - for interval in intervals: - filenames.append(filename + interval + '.nc') - else: - filenames.append(filename) - - for file in filenames: - create_test_file(file, next(tracking_id)) - return filenames - - monkeypatch.setattr(esmvaltool._data_finder, 'find_files', find_files) - - -DEFAULT_DOCUMENTATION = dedent(""" - documentation: - description: This is a test recipe. - authors: - - ande_bo - references: - - contact_authors - - acknow_project - projects: - - c3s-magic - """) - - -def get_recipe(tempdir, content, cfg): - """Save and load recipe content.""" - recipe_file = tempdir / 'recipe_test.yml' - # Add mandatory documentation section - content = text_type(DEFAULT_DOCUMENTATION + content) - recipe_file.write_text(content) - - recipe = read_recipe_file(str(recipe_file), cfg) - - return recipe - - -def test_simple_recipe(tmp_path, patched_datafinder, config_user): - - content = dedent(""" - datasets: - - dataset: bcc-csm1-1 - - preprocessors: - preprocessor_name: - extract_levels: - levels: 85000 - scheme: nearest - - diagnostics: - diagnostic_name: - additional_datasets: - - dataset: GFDL-ESM2G - variables: - ta: - preprocessor: preprocessor_name - project: CMIP5 - mip: Amon - exp: historical - ensemble: r1i1p1 - start_year: 1999 - end_year: 2002 - additional_datasets: - - dataset: MPI-ESM-LR - scripts: - script_name: - script: examples/diagnostic.py - custom_setting: 1 - """) - - recipe = get_recipe(tmp_path, content, config_user) - raw = yaml.safe_load(content) - # Perform some sanity checks on recipe expansion/normalization - print("Expanded recipe:") - assert len(recipe.diagnostics) == len(raw['diagnostics']) - for diagnostic_name, diagnostic in recipe.diagnostics.items(): - print(pformat(diagnostic)) - source = raw['diagnostics'][diagnostic_name] - - # Check that 'variables' have been read and updated - assert len(diagnostic['preprocessor_output']) == len( - source['variables']) - for variable_name, variables in diagnostic[ - 'preprocessor_output'].items(): - assert len(variables) == 3 - for variable in variables: - for key in MANDATORY_DATASET_KEYS: - assert key in variable and variable[key] - assert variable_name == variable['short_name'] - - # Check that the correct tasks have been created - variables = recipe.diagnostics['diagnostic_name']['preprocessor_output'][ - 'ta'] - tasks = {t for task in recipe.tasks for t in task.flatten()} - preproc_tasks = {t for t in tasks if isinstance(t, PreprocessingTask)} - diagnostic_tasks = {t for t in tasks if isinstance(t, DiagnosticTask)} - - assert len(preproc_tasks) == 1 - for task in preproc_tasks: - print("Task", task.name) - assert task.order == list(DEFAULT_ORDER) - for product in task.products: - variable = [ - v for v in variables if v['filename'] == product.filename - ][0] - assert product.attributes == variable - for step in DEFAULT_PREPROCESSOR_STEPS: - assert step in product.settings - assert len(product.files) == 2 - - assert len(diagnostic_tasks) == 1 - for task in diagnostic_tasks: - print("Task", task.name) - assert task.ancestors == list(preproc_tasks) - assert task.script == 'examples/diagnostic.py' - for key in MANDATORY_SCRIPT_SETTINGS_KEYS: - assert key in task.settings and task.settings[key] - assert task.settings['custom_setting'] == 1 - - -def test_default_preprocessor(tmp_path, patched_datafinder, config_user): - - content = dedent(""" - diagnostics: - diagnostic_name: - variables: - chl: - project: CMIP5 - mip: Oyr - exp: historical - start_year: 2000 - end_year: 2005 - ensemble: r1i1p1 - additional_datasets: - - {dataset: CanESM2} - scripts: null - """) - - recipe = get_recipe(tmp_path, content, config_user) - - assert len(recipe.tasks) == 1 - task = recipe.tasks.pop() - assert len(task.products) == 1 - product = task.products.pop() - preproc_dir = os.path.dirname(product.filename) - assert preproc_dir.startswith(str(tmp_path)) - - fix_dir = os.path.join( - preproc_dir, 'CMIP5_CanESM2_Oyr_historical_r1i1p1_chl_2000-2005_fixed') - defaults = { - 'load': { - 'callback': concatenate_callback, - }, - 'concatenate': {}, - 'fix_file': { - 'project': 'CMIP5', - 'dataset': 'CanESM2', - 'short_name': 'chl', - 'output_dir': fix_dir, - }, - 'fix_data': { - 'project': 'CMIP5', - 'dataset': 'CanESM2', - 'short_name': 'chl', - 'cmor_table': 'CMIP5', - 'mip': 'Oyr', - 'frequency': 'yr', - }, - 'fix_metadata': { - 'project': 'CMIP5', - 'dataset': 'CanESM2', - 'short_name': 'chl', - 'cmor_table': 'CMIP5', - 'mip': 'Oyr', - 'frequency': 'yr', - }, - 'extract_time': { - 'start_year': 2000, - 'end_year': 2006, - 'start_month': 1, - 'end_month': 1, - 'start_day': 1, - 'end_day': 1, - }, - 'cmor_check_metadata': { - 'cmor_table': 'CMIP5', - 'mip': 'Oyr', - 'short_name': 'chl', - 'frequency': 'yr', - }, - 'cmor_check_data': { - 'cmor_table': 'CMIP5', - 'mip': 'Oyr', - 'short_name': 'chl', - 'frequency': 'yr', - }, - 'cleanup': { - 'remove': [fix_dir] - }, - 'save': { - 'compress': False, - 'filename': product.filename, - } - } - assert product.settings == defaults - - -def test_reference_dataset(tmp_path, patched_datafinder, config_user, - monkeypatch): - - levels = [100] - get_reference_levels = create_autospec( - esmvaltool._recipe.get_reference_levels, return_value=levels) - monkeypatch.setattr(esmvaltool._recipe, 'get_reference_levels', - get_reference_levels) - - content = dedent(""" - preprocessors: - test_from_reference: - regrid: - target_grid: reference_dataset - scheme: linear - extract_levels: - levels: reference_dataset - scheme: linear - test_from_cmor_table: - extract_levels: - levels: - cmor_table: CMIP6 - coordinate: alt16 - scheme: nearest - - diagnostics: - diagnostic_name: - variables: - ta: &var - preprocessor: test_from_reference - project: CMIP5 - mip: Amon - exp: historical - start_year: 2000 - end_year: 2005 - ensemble: r1i1p1 - additional_datasets: - - {dataset: GFDL-CM3} - - {dataset: MPI-ESM-LR} - reference_dataset: MPI-ESM-LR - ch4: - <<: *var - preprocessor: test_from_cmor_table - additional_datasets: - - {dataset: GFDL-CM3} - - scripts: null - """) - - recipe = get_recipe(tmp_path, content, config_user) - - assert len(recipe.tasks) == 2 - - # Check that the reference dataset has been used - task = next(t for t in recipe.tasks - if t.name == 'diagnostic_name' + TASKSEP + 'ta') - assert len(task.products) == 2 - product = next( - p for p in task.products if p.attributes['dataset'] == 'GFDL-CM3') - reference = next( - p for p in task.products if p.attributes['dataset'] == 'MPI-ESM-LR') - - assert product.settings['regrid']['target_grid'] == reference.files[0] - assert product.settings['extract_levels']['levels'] == levels - - fix_dir = os.path.splitext(reference.filename)[0] + '_fixed' - get_reference_levels.assert_called_once_with( - reference.files[0], - 'CMIP5', - 'MPI-ESM-LR', - 'ta', - fix_dir - ) - - assert 'regrid' not in reference.settings - assert 'extract_levels' not in reference.settings - - # Check that levels have been read from CMOR table - task = next(t for t in recipe.tasks - if t.name == 'diagnostic_name' + TASKSEP + 'ch4') - assert len(task.products) == 1 - product = next(iter(task.products)) - assert product.settings['extract_levels']['levels'] == [ - 0, - 250, - 750, - 1250, - 1750, - 2250, - 2750, - 3500, - 4500, - 6000, - 8000, - 10000, - 12000, - 14500, - 16000, - 18000, - ] - - -def test_custom_preproc_order(tmp_path, patched_datafinder, config_user): - - content = dedent(""" - preprocessors: - default: &default - average_region: - coord1: longitude - coord2: latitude - multi_model_statistics: - span: overlap - statistics: [mean ] - custom: - custom_order: true - <<: *default - - diagnostics: - diagnostic_name: - variables: - chl_default: &chl - short_name: chl - preprocessor: default - project: CMIP5 - mip: Oyr - exp: historical - start_year: 2000 - end_year: 2005 - ensemble: r1i1p1 - additional_datasets: - - {dataset: CanESM2} - chl_custom: - <<: *chl - preprocessor: custom - scripts: null - """) - - recipe = get_recipe(tmp_path, content, config_user) - - assert len(recipe.tasks) == 2 - - default = next(t for t in recipe.tasks if tuple(t.order) == DEFAULT_ORDER) - custom = next(t for t in recipe.tasks if tuple(t.order) != DEFAULT_ORDER) - - assert custom.order.index('average_region') < custom.order.index( - 'multi_model_statistics') - assert default.order.index('average_region') > default.order.index( - 'multi_model_statistics') - - -def test_derive(tmp_path, patched_datafinder, config_user): - - content = dedent(""" - diagnostics: - diagnostic_name: - variables: - toz: - project: CMIP5 - mip: Amon - exp: historical - start_year: 2000 - end_year: 2005 - derive: true - force_derivation: true - additional_datasets: - - {dataset: GFDL-CM3, ensemble: r1i1p1} - scripts: null - """) - - recipe = get_recipe(tmp_path, content, config_user) - - # Check generated tasks - assert len(recipe.tasks) == 1 - task = recipe.tasks.pop() - - assert task.name == 'diagnostic_name' + TASKSEP + 'toz' - assert len(task.ancestors) == 2 - assert 'diagnostic_name' + TASKSEP + 'toz_derive_input_ps' in [ - t.name for t in task.ancestors - ] - assert 'diagnostic_name' + TASKSEP + 'toz_derive_input_tro3' in [ - t.name for t in task.ancestors - ] - - # Check product content of tasks - assert len(task.products) == 1 - product = task.products.pop() - assert 'derive' in product.settings - assert product.attributes['short_name'] == 'toz' - assert product.files - - ps_product = next(p for a in task.ancestors for p in a.products - if p.attributes['short_name'] == 'ps') - tro3_product = next(p for a in task.ancestors for p in a.products - if p.attributes['short_name'] == 'tro3') - assert ps_product.filename in product.files - assert tro3_product.filename in product.files - - -def test_derive_not_needed(tmp_path, patched_datafinder, config_user): - - content = dedent(""" - diagnostics: - diagnostic_name: - variables: - toz: - project: CMIP5 - mip: Amon - exp: historical - start_year: 2000 - end_year: 2005 - derive: true - force_derivation: false - additional_datasets: - - {dataset: GFDL-CM3, ensemble: r1i1p1} - scripts: null - """) - - recipe = get_recipe(tmp_path, content, config_user) - - # Check generated tasks - assert len(recipe.tasks) == 1 - task = recipe.tasks.pop() - - assert task.name == 'diagnostic_name/toz' - assert len(task.ancestors) == 1 - ancestor = [t for t in task.ancestors][0] - assert ancestor.name == 'diagnostic_name/toz_derive_input_toz' - - # Check product content of tasks - assert len(task.products) == 1 - product = task.products.pop() - assert 'derive' in product.settings - assert product.attributes['short_name'] == 'toz' - - assert len(ancestor.products) == 1 - ancestor_product = ancestor.products.pop() - assert ancestor_product.filename in product.files - assert ancestor_product.attributes['short_name'] == 'toz' - - -def test_derive_with_fx(tmp_path, patched_datafinder, config_user): - - content = dedent(""" - diagnostics: - diagnostic_name: - variables: - nbp_grid: - project: CMIP5 - mip: Lmon - exp: historical - start_year: 2000 - end_year: 2005 - derive: true - force_derivation: true - additional_datasets: - - {dataset: GFDL-CM3, ensemble: r1i1p1} - scripts: null - """) - - recipe = get_recipe(tmp_path, content, config_user) - - # Check generated tasks - assert len(recipe.tasks) == 1 - task = recipe.tasks.pop() - - assert task.name == 'diagnostic_name' + TASKSEP + 'nbp_grid' - assert len(task.ancestors) == 1 - ancestor = [t for t in task.ancestors][0] - assert ancestor.name == 'diagnostic_name/nbp_grid_derive_input_nbp' - - # Check product content of tasks - assert len(task.products) == 1 - product = task.products.pop() - assert 'derive' in product.settings - assert product.attributes['short_name'] == 'nbp_grid' - assert 'fx_files' in product.settings['derive'] - assert 'sftlf' in product.settings['derive']['fx_files'] - assert product.settings['derive']['fx_files']['sftlf'] is not None - - assert len(ancestor.products) == 1 - ancestor_product = ancestor.products.pop() - assert ancestor_product.filename in product.files - assert ancestor_product.attributes['short_name'] == 'nbp' - - -def test_diagnostic_task_provenance(tmp_path, patched_datafinder, config_user): - - script = tmp_path / 'diagnostic.py' - with script.open('w'): - pass - - content = dedent(""" - diagnostics: - diagnostic_name: - themes: - - phys - realms: - - atmos - variables: - chl: - project: CMIP5 - mip: Oyr - exp: historical - start_year: 2000 - end_year: 2005 - ensemble: r1i1p1 - additional_datasets: - - dataset: CanESM2 - scripts: - script_name: - script: {script} - """.format(script=script)) - - recipe = get_recipe(tmp_path, content, config_user) - diagnostic_task = recipe.tasks.pop() - - # Simulate Python diagnostic run - cfg = diagnostic_task.settings - input_files = [ - p.filename for a in diagnostic_task.ancestors for p in a.products - ] - record = { - 'caption': 'Test plot', - 'plot_file': get_plot_filename('test', cfg), - 'statistics': ['mean', 'var'], - 'domains': ['trop', 'et'], - 'plot_type': 'zonal', - 'authors': ['ande_bo'], - 'references': ['acknow_project'], - 'ancestors': input_files, - } - - diagnostic_file = get_diagnostic_filename('test', cfg) - create_test_file(diagnostic_file) - with ProvenanceLogger(cfg) as provenance_logger: - provenance_logger.log(diagnostic_file, record) - - diagnostic_task._collect_provenance() - # Done simulating diagnostic run - - # Check resulting product - product = diagnostic_task.products.pop() - check_provenance(product) - for key in ('caption', 'plot_file'): - assert product.attributes[key] == record[key] - assert product.entity.get_attribute('attribute:' + - key).pop() == record[key] - - # Check that diagnostic script tags have been added - with open( - os.path.join( - os.path.dirname(esmvaltool.__file__), - 'config-references.yml')) as file: - tags = yaml.safe_load(file) - for key in ('statistics', 'domains', 'authors', 'references'): - assert product.attributes[key] == tuple( - tags[key][k] for k in record[key]) - - # Check that recipe diagnostic tags have been added - src = yaml.safe_load(DEFAULT_DOCUMENTATION + content) - for key in ('realms', 'themes'): - value = src['diagnostics']['diagnostic_name'][key] - assert product.attributes[key] == tuple(tags[key][k] for k in value) - - # Check that recipe tags have been added - recipe_record = product.provenance.get_record('recipe:recipe_test.yml') - assert len(recipe_record) == 1 - for key in ('description', 'references'): - value = src['documentation'][key] - if key == 'references': - value = ', '.join(tags[key][k] for k in value) - assert recipe_record[0].get_attribute('attribute:' + - key).pop() == value - - # Test that provenance was saved to netcdf, xml and svg plot - cube = iris.load(product.filename)[0] - assert 'provenance' in cube.attributes - prefix = os.path.splitext(product.filename)[0] + '_provenance' - assert os.path.exists(prefix + '.xml') - assert os.path.exists(prefix + '.svg') diff --git a/tests/integration/test_recipe_filler.py b/tests/integration/test_recipe_filler.py new file mode 100644 index 0000000000..b78ac8c5f8 --- /dev/null +++ b/tests/integration/test_recipe_filler.py @@ -0,0 +1,211 @@ +"""Tests for _data_finder.py.""" +import contextlib +import os +import shutil +import sys +import tempfile + +import pytest +import yaml + +from esmvaltool.utils.recipe_filler import run + + +# Load test configuration +with open(os.path.join(os.path.dirname(__file__), + 'recipe_filler.yml')) as file: + CONFIG = yaml.safe_load(file) + + +@contextlib.contextmanager +def arguments(*args): + backup = sys.argv + sys.argv = list(args) + yield + sys.argv = backup + + +def print_path(path): + """Print path.""" + txt = path + if os.path.isdir(path): + txt += '/' + if os.path.islink(path): + txt += ' -> ' + os.readlink(path) + print(txt) + + +def tree(path): + """Print path, similar to the the `tree` command.""" + print_path(path) + for dirpath, dirnames, filenames in os.walk(path): + for dirname in dirnames: + print_path(os.path.join(dirpath, dirname)) + for filename in filenames: + print_path(os.path.join(dirpath, filename)) + + +def create_file(filename): + """Create an empty file.""" + dirname = os.path.dirname(filename) + if not os.path.exists(dirname): + os.makedirs(dirname) + + with open(filename, 'a'): + pass + + +def create_tree(path, filenames=None, symlinks=None): + """Create directory structure and files.""" + for filename in filenames or []: + create_file(os.path.join(path, filename)) + + for symlink in symlinks or []: + link_name = os.path.join(path, symlink['link_name']) + os.symlink(symlink['target'], link_name) + + +def write_config_user_file(dirname, file_path, drs): + config_file = dirname / 'config-user.yml' + cfg = { + 'log_level': 'info', + 'output_dir': str(dirname / 'recipe_filler_output'), + 'rootpath': { + 'CMIP5': str(dirname / file_path), + 'CMIP6': str(dirname / file_path), + }, + 'drs': { + 'CMIP5': drs, + 'CMIP6': drs, + }, + } + config_file.write_text(yaml.safe_dump(cfg, encoding=None)) + return str(config_file) + + +def write_recipe(dirname, recipe_dict): + recipe_file = dirname / 'recipe.yml' + diags = {'diagnostics': recipe_dict} + recipe_file.write_text(yaml.safe_dump(diags, encoding=None)) + return str(recipe_file) + + +@pytest.fixture +def root(): + """Root function for tests.""" + dirname = tempfile.mkdtemp() + yield os.path.join(dirname, 'output1') + print("Directory structure was:") + tree(dirname) + shutil.rmtree(dirname) + + +def setup_files(tmp_path, root, cfg): + """Create config, recipe ,output recipe etc.""" + user_config_file = write_config_user_file(tmp_path, root, cfg['drs']) + diagnostics = {} + diagnostics["test_diagnostic"] = {} + diagnostics["test_diagnostic"]["variables"] = {} + diagnostics["test_diagnostic"]["variables"]["test_var"] = cfg["variable"] + recipe = write_recipe(tmp_path, diagnostics) + output_recipe = str(tmp_path / "recipe_auto.yml") + + return user_config_file, recipe, output_recipe + + +@pytest.mark.parametrize('cfg', CONFIG['has_additional_datasets']) +def test_adding_datasets(tmp_path, root, cfg): + """Test retrieving additional datasets.""" + create_tree(root, cfg.get('available_files'), + cfg.get('available_symlinks')) + + user_config_file, recipe, output_recipe = setup_files(tmp_path, root, cfg) + + with arguments( + 'recipe_filler', + recipe, + '-c', + user_config_file, + '-o', + output_recipe, + ): + run() + + with open(output_recipe, 'r') as file: + autofilled_recipe = yaml.safe_load(file) + diag = autofilled_recipe["diagnostics"]["test_diagnostic"] + var = diag["variables"]["test_var"] + assert "additional_datasets" in var + + +@pytest.mark.parametrize('cfg', CONFIG['no_additional_datasets']) +def test_not_adding_datasets(tmp_path, root, cfg): + """Test retrieving no additional datasets.""" + create_tree(root, cfg.get('available_files'), + cfg.get('available_symlinks')) + + user_config_file, recipe, output_recipe = setup_files(tmp_path, root, cfg) + + with arguments( + 'recipe_filler', + recipe, + '-c', + user_config_file, + '-o', + output_recipe, + ): + run() + + with open(output_recipe, 'r') as file: + autofilled_recipe = yaml.safe_load(file) + diag = autofilled_recipe["diagnostics"]["test_diagnostic"] + var = diag["variables"]["test_var"] + assert "additional_datasets" not in var + + +def test_bad_var(tmp_path, root): + """Test a bad variable in the works.""" + cfg = CONFIG['bad_variable'][0] + user_config_file, recipe, output_recipe = setup_files(tmp_path, root, cfg) + + # this doesn't fail and it shouldn't since it can go on + # and look for data for other valid variables + with arguments( + 'recipe_filler', + recipe, + '-c', + user_config_file, + '-o', + output_recipe, + ): + run() + + with open(output_recipe, 'r') as file: + autofilled_recipe = yaml.safe_load(file) + diag = autofilled_recipe["diagnostics"]["test_diagnostic"] + var = diag["variables"]["test_var"] + assert "additional_datasets" not in var + + +def test_no_short_name(tmp_path, root): + """Test a bad variable in the works.""" + cfg = CONFIG['no_short_name'][0] + user_config_file, recipe, output_recipe = setup_files(tmp_path, root, cfg) + + # this doesn't fail and it shouldn't since it can go on + # and look for data for other valid variables + with arguments( + 'recipe_filler', + recipe, + '-c', + user_config_file, + '-o', + output_recipe, + ): + run() + + with open(output_recipe, 'r') as file: + autofilled_recipe = yaml.safe_load(file) + diag = autofilled_recipe["diagnostics"]["test_diagnostic"] + var = diag["variables"]["test_var"] + assert "additional_datasets" not in var diff --git a/tests/integration/test_recipes_loading.py b/tests/integration/test_recipes_loading.py new file mode 100644 index 0000000000..38a0217d6e --- /dev/null +++ b/tests/integration/test_recipes_loading.py @@ -0,0 +1,166 @@ +"""Test recipes are well formed.""" +from pathlib import Path + +import esmvalcore +import esmvalcore.cmor.check +import pytest +import yaml +from esmvalcore import __version__ as core_ver +from packaging import version + +import esmvaltool + +if version.parse(core_ver) < version.parse('2.8.0'): + from esmvalcore._config import _config + from esmvalcore.experimental.config import CFG + + # Work around + # https://github.com/ESMValGroup/ESMValCore/issues/1579 + def clear(self): + self._mapping.clear() + + esmvalcore.experimental.config.Config.clear = clear +else: + from esmvalcore.config import CFG, _config + + +@pytest.fixture +def session(mocker, tmp_path): + mocker.patch.dict( + CFG, + auxiliary_data_dir=str(tmp_path / 'auxiliary_data_dir'), + check_level=esmvalcore.cmor.check.CheckLevels['DEFAULT'], + drs={}, + search_esgf='never', + rootpath={'default': str(tmp_path)}, + ) + session = CFG.start_session('test') + + # The patched_datafinder fixture does not return the correct input + # directory structure, so make sure it is set to flat for every project + for project in _config.CFG: + mocker.patch.dict(_config.CFG[project]['input_dir'], default='/') + + return session + + +def _get_recipes(): + recipes_path = Path(esmvaltool.__file__).absolute().parent / 'recipes' + recipes = sorted(recipes_path.glob("**/recipe*.yml")) + ids = tuple(str(p.relative_to(recipes_path)) for p in recipes) + return recipes, ids + + +RECIPES, IDS = _get_recipes() + + +@pytest.mark.parametrize('recipe_file', RECIPES, ids=IDS) +def test_recipe_valid(recipe_file, session, mocker): + """Check that recipe files are valid ESMValTool recipes.""" + # Mock input files + try: + # Since ESValCore v2.8.0 + import esmvalcore.local + module = esmvalcore.local + method = 'glob' + except ImportError: + # Prior to ESMValCore v2.8.0 + import esmvalcore._data_finder + module = esmvalcore._data_finder + method = 'find_files' + + mocker.patch.object( + module, + method, + autospec=True, + side_effect=lambda *_, **__: [ + 'test_0001-1849.nc', + 'test_1850-9999.nc', + ], + ) + + # Do not remove unexpanded supplementaries. These cannot be expanded + # because the mocked file finding above does not produce facets. + try: + import esmvalcore.dataset + except ImportError: + pass + else: + mocker.patch.object( + esmvalcore.dataset.Dataset, + '_remove_unexpanded_supplementaries', + autospec=True, + spec_set=True, + ) + + # Mock vertical levels + # Account for module change after esmvalcore=2.7 + if version.parse(core_ver) <= version.parse('2.7.1'): + import esmvalcore._recipe + mocker.patch.object( + esmvalcore._recipe, + 'get_reference_levels', + autospec=True, + spec_set=True, + side_effect=lambda *_, **__: [1, 2], + ) + else: + import esmvalcore._recipe.recipe + mocker.patch.object( + esmvalcore._recipe.recipe, + 'get_reference_levels', + autospec=True, + spec_set=True, + side_effect=lambda *_, **__: [1, 2], + ) + + # Mock valid NCL version + # Account for module change after esmvalcore=2.7 + if version.parse(core_ver) <= version.parse('2.7.1'): + import esmvalcore._recipe_checks + mocker.patch.object( + esmvalcore._recipe_checks, + 'ncl_version', + autospec=True, + spec_set=True, + ) + else: + import esmvalcore._recipe.check + mocker.patch.object( + esmvalcore._recipe.check, + 'ncl_version', + autospec=True, + spec_set=True, + ) + + # Mock interpreters installed + def which(executable): + if executable in ('julia', 'ncl', 'python', 'Rscript'): + path = '/path/to/' + executable + else: + path = None + return path + + mocker.patch.object( + esmvalcore._task, + 'which', + autospec=True, + side_effect=which, + ) + + # Create a shapefile for extract_shape preprocessor if needed + recipe = yaml.safe_load(recipe_file.read_text()) + for preproc in recipe.get('preprocessors', {}).values(): + extract_shape = preproc.get('extract_shape') + if extract_shape and 'shapefile' in extract_shape: + filename = Path( + session['auxiliary_data_dir']) / extract_shape['shapefile'] + filename.parent.mkdir(parents=True, exist_ok=True) + filename.touch() + + # Account for module change after esmvalcore=2.7 + if version.parse(core_ver) <= version.parse('2.7.1'): + config_user = session.to_config_user() + esmvalcore._recipe.read_recipe_file(recipe_file, config_user) + else: + esmvalcore._recipe.recipe.read_recipe_file(recipe_file, session) diff --git a/tests/parse_pymon.py b/tests/parse_pymon.py new file mode 100644 index 0000000000..e63da518cc --- /dev/null +++ b/tests/parse_pymon.py @@ -0,0 +1,77 @@ +""" +Parse and display test memory. + +Uses pytest-monitor plugin from https://github.com/CFMTech/pytest-monitor +Lots of other metrics can be read from the file via sqlite parsing., +currently just MEM_USAGE (RES memory, in MB). +""" +import sqlite3 +import sys +from operator import itemgetter + + +def _get_big_mem_tests(cur): + """Find out which tests are heavy on memory.""" + big_mem_tests = [] + for row in cur.execute('select ITEM, MEM_USAGE from TEST_METRICS;'): + test_name, memory_used = row[0], row[1] + if memory_used > 1000.: # test result in RES mem in MB + print("Test name / memory (MB)") + print(test_name, memory_used) + elif memory_used > 4000.: + big_mem_tests.append((test_name, memory_used)) + + return big_mem_tests + + +def _get_slow_tests(cur): + """Find out which tests are slow.""" + timed_tests = [] + sq_command = \ + 'select ITEM, ITEM_VARIANT, ITEM_PATH, TOTAL_TIME from TEST_METRICS;' + for row in cur.execute(sq_command): + test_name, test_var, test_path, time_used = \ + row[0], row[1], row[2], row[3] + timed_tests.append((test_name, test_var, test_path, time_used)) + + timed_tests = sorted(timed_tests, reverse=True, key=itemgetter(3)) + hundred_slowest_tests = timed_tests[0:100] + print("List of 100 slowest tests (duration, path, name") + if hundred_slowest_tests: + for _, test_var, pth, test_duration in hundred_slowest_tests: + pth = pth.replace(".", "/") + ".py" + executable_test = pth + "::" + test_var + if ", " in executable_test: + executable_test = executable_test.replace(", ", "-") + mssg = "%.2f" % test_duration + "s " + executable_test + print(mssg) + else: + print("Could not retrieve test timing data.") + + +def _parse_pymon_database(): + # Create a SQL connection to our SQLite database + con = sqlite3.connect("../.pymon") + cur = con.cursor() + + # The result of a "cursor.execute" can be iterated over by row + # first look at memory + print("Looking for tests that exceed 1GB resident memory.") + big_mem_tests = _get_big_mem_tests(cur) + + # then look at total time (in seconds) + # (user time is availbale too via USER_TIME, kernel time via KERNEL_TIME) + _get_slow_tests(cur) + + # Be sure to close the connection + con.close() + + # Throw a sys exit so test fails if we have >4GB tests + if big_mem_tests: + print("Some tests exceed 4GB of RES memory, look into them!") + print(big_mem_tests) + sys.exit(1) + + +if __name__ == '__main__': + _parse_pymon_database() diff --git a/tests/system/__init__.py b/tests/system/__init__.py deleted file mode 100644 index 5f7877c08d..0000000000 --- a/tests/system/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Test running esmvaltool""" diff --git a/tests/system/config-test.yml b/tests/system/config-test.yml deleted file mode 100644 index 514ac2ce28..0000000000 --- a/tests/system/config-test.yml +++ /dev/null @@ -1,46 +0,0 @@ -############################################################################### -# Diagnostic test configuration file for the ESMValTool -# -# './setup.py test' will look for this file in the following locations -# and use the first config-test.yml file found: -# - current working directory -# - ~/.esmvaltool/ -# - ESMValTool/tests/test_diagnostics/ -# -############################################################################### ---- - -test: - # Execute system/diagnostic tests [false]/true - run: false - # Simulate input data using the dummydata module [true]/false - simulate_input: true - # Limit testing/generating reference data to the following recipes - # An empty list means any recipe in esmvaltool/nml - recipes: [ - recipe_MyVar.yml, - ] - -# Reference data configuration -reference: - # Directory containing reference output - output: ~/esmvaltool_reference_output - # Generate reference data instead of checking [false]/true - generate: false - -# Template for the user configuration file -user: - write_plots: true - write_netcdf: true - log_level: warning - exit_on_warning: false - output_file_type: pdf - save_intermediary_cubes: true - - rootpath: - CMIP5: ~/esmvaltool_simulated_input - OBS: ~/esmvaltool_simulated_input - default: ~/esmvaltool_simulated_input - - drs: - CMIP5: default diff --git a/tests/system/data_simulator.py b/tests/system/data_simulator.py deleted file mode 100644 index 3efb5a79a0..0000000000 --- a/tests/system/data_simulator.py +++ /dev/null @@ -1,102 +0,0 @@ -"""Simulate test data for `esmvaltool`.""" -import os -import sys -import tempfile -import time - -import numpy as np - -from esmvaltool._config import read_config_user_file -from esmvaltool._recipe import read_recipe_file - - -def get_input_filename(variable, rootpath, drs): - """Get a valid input filename.""" - # TODO: implement this according to esmvaltool._data_finder.py - # or patch get_input_filelist there. - return tempfile.NamedTemporaryFile().name + '.nc' - - -def write_data_file(short_name, filename, field, start_year, end_year): - """Write a file containing simulated data.""" - from dummydata.model2 import Model2 - from dummydata.model3 import Model3 - - if 'T2M' in field: - writer = Model2 - elif 'T3M' in field: - writer = Model3 - else: - raise NotImplementedError( - "Cannot create a model from field {}".format(field)) - - # TODO: Maybe this should be made configurable per diagnostic or model - cfg = { - 'ta': { - 'method': 'gaussian_blobs', - 'low': 223, - 'high': 303, - }, - 'pr': { - 'method': 'gaussian_blobs', - 'low': 1e-7, - 'high': 2e-4, - } - } - - kwargs = cfg[short_name] if short_name in cfg else {} - - writer( - var=short_name, - oname=filename, - start_year=start_year, - stop_year=end_year, - **kwargs) - - -def simulate_input_data(recipe_file, config_user_file=None): - """Simulate data for variables defined in recipe""" - if config_user_file: - user_config = read_config_user_file( - config_file=config_user_file, recipe_name='') - else: - user_config = { - 'rootpath': { - 'default': '.', - }, - 'drs': {}, - } - - recipe = read_recipe_file(recipe_file, user_config, initialize_tasks=False) - - start_time = time.time() - - for diagnostic in recipe.diagnostics.values(): - np.random.seed(0) - for variables in diagnostic['variables'].values(): - for variable in variables: - filename = get_input_filename( - variable=variable, - rootpath=user_config['rootpath'], - drs=user_config['drs']) - dirname = os.path.dirname(filename) - if not os.path.exists(dirname): - print("Creating {}".format(dirname)) - os.makedirs(dirname) - - print("Writing {}".format(filename)) - write_data_file( - short_name=variable['short_name'], - filename=filename, - field=variable['field'], - start_year=variable['start_year'], - end_year=variable['end_year'], - ) - - print( - "Simulating data took {:.0f} seconds".format(time.time() - start_time)) - - -if __name__ == '__main__': - for path in sys.argv[1:]: - simulate_input_data(recipe_file=path, config_user_file=None) diff --git a/tests/system/esmvaltool_testlib.py b/tests/system/esmvaltool_testlib.py deleted file mode 100644 index 61482fa985..0000000000 --- a/tests/system/esmvaltool_testlib.py +++ /dev/null @@ -1,227 +0,0 @@ -"""Provide a class for testing esmvaltool.""" - -import glob -import os -import shutil -import sys -from unittest import SkipTest - -import numpy as np -import yaml -from easytest import EasyTest - -import esmvaltool - - -def _load_config(filename=None): - """Load test configuration""" - if filename is None: - # look in default locations for config-test.yml - config_file = 'config-test.yml' - default_locations = [ - '.', - '~/.esmvaltool', - os.path.dirname(__file__), - ] - for path in default_locations: - filepath = os.path.join(os.path.expanduser(path), config_file) - if os.path.exists(filepath): - filename = os.path.abspath(filepath) - break - - with open(filename, 'r') as file: - cfg = yaml.safe_load(file) - - cfg['configfile'] = filename - cfg['reference']['output'] = os.path.abspath( - os.path.expanduser(cfg['reference']['output'])) - - if cfg['test'].get('recipes', []) == []: - script_root = esmvaltool.get_script_root() - recipe_glob = os.path.join(script_root, 'nml', 'recipe_*.yml') - cfg['test']['recipes'] = glob.glob(recipe_glob) - - return cfg - - -_CFG = _load_config() - -RECIPES = _CFG['test']['recipes'] - - -def _create_config_user_file(output_directory): - """Write a config-user.yml file. - - Write a configuration file for running ESMValTool - such that it writes all output to `output_directory`. - """ - cfg = _CFG['user'] - - cfg['output_dir'] = output_directory - - # write to file - filename = os.path.join(output_directory, 'config-user.yml') - with open(filename, 'w') as file: - yaml.safe_dump(cfg, file) - - return filename - - -class ESMValToolTest(EasyTest): - """Main class for ESMValTool test runs.""" - - def __init__(self, recipe, output_directory, ignore='', **kwargs): - """ - Create ESMValToolTest instance - - recipe: str - The filename of the recipe that should be tested. - output_directory : str - The name of a directory where results can be stored. - ignore: str or iterable of str - Glob patterns of files to be ignored when testing. - """ - if not _CFG['test']['run']: - raise SkipTest("System tests disabled in {}".format( - _CFG['configfile'])) - - self.ignore = (ignore, ) if isinstance(ignore, str) else ignore - - script_root = esmvaltool.get_script_root() - - # Set recipe path - if not os.path.exists(recipe): - recipe = os.path.join( - os.path.dirname(script_root), 'recipes', recipe) - self.recipe_file = os.path.abspath(recipe) - - # Simulate input data? - self.simulate_input = _CFG['test']['simulate_input'] - - # Create reference output? - self.create_reference_output = _CFG['reference']['generate'] - - # Define reference output path - reference_dir = os.path.join( - _CFG['reference']['output'], - os.path.splitext(os.path.basename(self.recipe_file))[0]) - - # If reference data is neither available nor should be generated, skip - if not (os.path.exists(reference_dir) or self.create_reference_output): - raise SkipTest( - "No reference data available for recipe {} in {}".format( - recipe, _CFG['reference']['output'])) - - # Write ESMValTool configuration file - self.config_user_file = _create_config_user_file(output_directory) - - super(ESMValToolTest, self).__init__( - exe='esmvaltool', - args=['-n', self.recipe_file, '-c', self.config_user_file], - output_directory=output_directory, - refdirectory=reference_dir, - **kwargs) - - def run(self, **kwargs): - """Run tests or generate reference data.""" - if self.simulate_input: - from .data_simulator import simulate_input_data - simulate_input_data( - recipe_file=self.recipe_file, - config_user_file=self.config_user_file) - - if self.create_reference_output: - self.generate_reference_output() - raise SkipTest("Generated reference data instead of running test") - else: - super(ESMValToolTest, self).run_tests(**kwargs) - - def generate_reference_output(self): - """Generate reference output. - - Generate reference data by executing the recipe and then moving - results to the output directory. - """ - if not os.path.exists(self.refdirectory): - self._execute() - shutil.move(self.output_directory, - os.path.dirname(self.refdirectory)) - else: - print("Warning: not generating reference data, reference " - "directory {} already exists.".format(self.refdirectory)) - - def _execute(self): - """Execute ESMValTool - - Override the _execute method because we want to run in our own - Python instance to get coverage reporting and we want to update - the location of `self.output_directory` afterwards. - """ - # run ESMValTool - sys.argv[1:] = self.args - esmvaltool.main.run() - - # Update the output directory to point to the output of the run - output_directory = self.output_directory # noqa - - output = [] - for path in os.listdir(output_directory): - path = os.path.join(output_directory, path) - if os.path.isdir(path): - output.append(path) - - if not output: - raise OSError( - "Output directory not found in location {}. " - "Probably ESMValTool failed to create any output.".format( - output_directory)) - - if len(output) > 1: - print("Warning: found multiple output directories:\n{}\nin output " - "location {}\nusing the first one.".format( - output, output_directory)) - - self.output_directory = output[0] + os.sep # noqa - - def _get_files_from_refdir(self): - """Get a list of files from reference directory. - - Ignore files that match patterns in self.ignore. - - Override this method of easytest.EasyTest to be able to ignore certain - files. - """ - from fnmatch import fnmatchcase - - matches = [] - for root, _, filenames in os.walk(self.refdirectory): - for filename in filenames: - path = os.path.join(root, filename) - relpath = os.path.relpath(path, start=self.refdirectory) - for pattern in self.ignore: - if fnmatchcase(relpath, pattern): - break - else: - matches.append(path) - - return matches - - def _compare_netcdf_values(self, F1, F2, allow_subset=False): - """Compare two netCDF4 Dataset instances. - - Check if dataset2 contains the same variable values as dataset1. - - Override this method of easytest.EasyTest because it is broken - for the case where value1 and value2 have no length. - """ - if allow_subset: # allow that only a subset of data is compared - raise NotImplementedError - - for key in F1.variables: - values1 = F1.variables[key][:] - values2 = F2.variables[key][:] - - if not np.array_equal(values1, values2): - return False - - return True diff --git a/tests/system/test_recipes.py b/tests/system/test_recipes.py deleted file mode 100644 index 0825707bd4..0000000000 --- a/tests/system/test_recipes.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Test script to compare the output of ESMValTool against previous runs.""" - -import shutil -import tempfile - -import pytest - -from .esmvaltool_testlib import RECIPES, ESMValToolTest - - -@pytest.fixture -def output_directory(): - """Create a directory for storing ESMValTool output.""" - tmp = tempfile.mkdtemp() - yield tmp - shutil.rmtree(tmp, ignore_errors=True) - - -@pytest.mark.parametrize("recipe", RECIPES) -def test_recipe(output_directory, recipe): # noqa - """Create a test for each recipe in RECIPES and run those.""" - test = ESMValToolTest( - recipe=recipe, - output_directory=output_directory, - ignore=['tmp/*/*', '*log*.txt', '*.log'], - checksum_exclude=['pdf', 'ps', 'png', 'eps', 'epsi', 'nc']) - - test.run( - graphics=None, - files='all', - check_size_gt_zero=True, - checksum_files='all', - check_file_content=['nc']) - - assert test.sucess diff --git a/tests/unit/check_r_code.R b/tests/unit/check_r_code.R index bf30b2d045..3f8b58255e 100644 --- a/tests/unit/check_r_code.R +++ b/tests/unit/check_r_code.R @@ -5,32 +5,30 @@ check_paths <- list("esmvaltool", "tests") root_folder <- args[1] has_errors <- FALSE -linters <- with_defaults( - line_length_linter(79), - # disabled because broken: https://github.com/jimhester/lintr/issues/253 - commas_linter = NULL, - # disabled because broken: https://github.com/jimhester/lintr/issues/27 - object_usage_linter = NULL -) +linters <- with_defaults(line_length_linter(79)) -for (path in check_paths){ - check_path <- file.path(root_folder, path) - for (file in list.files(check_path, recursive = TRUE, include.dirs = FALSE, - ignore.case = TRUE, pattern = ".*\\.R$")){ - errors <- lint(file.path(check_path, file), linters = linters, - parse_settings = FALSE) - if (!is.null(errors)){ - for (error in errors){ - print(error) - if (error["type"] != "warning"){ - has_errors <- TRUE - } - } - } +for (path in check_paths) { # nolint + check_path <- file.path(root_folder, path) + for (file in list.files( + check_path, + recursive = TRUE, + include.dirs = FALSE, + ignore.case = TRUE, + pattern = ".*\\.R$" + )) { + errors <- lint(file.path(check_path, file), + linters = linters, + parse_settings = FALSE + ) + if (!is.null(errors)) { + for (error in errors) { + print(error) + } } + } } -if (has_errors){ - quit(status = 1) +if (has_errors) { + quit(status = 1) } quit(status = 0) diff --git a/tests/unit/cmor/__init__.py b/tests/unit/cmor/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/unit/cmor/test_cmor_check.py b/tests/unit/cmor/test_cmor_check.py deleted file mode 100644 index 840d483d43..0000000000 --- a/tests/unit/cmor/test_cmor_check.py +++ /dev/null @@ -1,605 +0,0 @@ -"""Unit tests for the CMORCheck class.""" - -import sys -import unittest -from io import StringIO - -import iris -import iris.coord_categorisation -import iris.coords -import iris.util -import numpy -from cf_units import Unit - -from esmvaltool.cmor.check import CMORCheck, CMORCheckError - - -class VariableInfoMock: - """Mock for the variables defintion""" - - def __init__(self): - self.table_type = 'CMIP5' - self.short_name = 'short_name' - self.standard_name = 'age_of_sea_ice' # Iris don't accept fakes ... - self.long_name = 'Long Name' - self.units = 'years' # ... nor in the units - self.valid_min = '0' - self.valid_max = '100' - self.frequency = 'day' - self.positive = '' - - generic_level = CoordinateInfoMock('depth') - generic_level.generic_level = True - generic_level.axis = 'Z' - - requested = CoordinateInfoMock('air_pressure') - requested.requested = [str(number) for number in range(20)] - - self.coordinates = { - 'time': CoordinateInfoMock('time'), - 'lat': CoordinateInfoMock('lat'), - 'lon': CoordinateInfoMock('lon'), - 'air_pressure': requested, - 'depth': generic_level - } - - -class CoordinateInfoMock: - """Mock for the coordinates info""" - - def __init__(self, name): - self.name = name - self.generic_level = False - - self.axis = "" - self.value = "" - standard_names = {'lat': 'latitude', 'lon': 'longitude'} - if name in standard_names: - self.standard_name = standard_names[name] - else: - self.standard_name = name - self.long_name = "Long name" - self.out_name = self.name - self.var_name = self.name - - units = { - 'lat': 'degrees_north', - 'lon': 'degrees_east', - 'time': 'days since 1950-01-01 00:00:00' - } - if name in units: - self.units = units[name] - else: - self.units = "units" - - self.stored_direction = "increasing" - self.requested = [] - - valid_limits = {'lat': ('-90', '90'), 'lon': ('0', '360')} - if name in valid_limits: - self.valid_min = valid_limits[name][0] - self.valid_max = valid_limits[name][1] - else: - self.valid_min = "" - self.valid_max = "" - - -class TestCMORCheck(unittest.TestCase): - """Test CMORCheck class""" - - def setUp(self): - self.var_info = VariableInfoMock() - self.cube = self.get_cube(self.var_info) - - def test_report_error(self): - """Test report error function""" - checker = CMORCheck(self.cube, self.var_info) - self.assertFalse(checker.has_errors()) - checker.report_error('New error: {}', 'something failed') - self.assertTrue(checker.has_errors()) - - def test_fail_on_error(self): - """Test exception is raised if fail_on_error is activated""" - checker = CMORCheck(self.cube, self.var_info, fail_on_error=True) - with self.assertRaises(CMORCheckError): - checker.report_error('New error: {}', 'something failed') - - def test_report_warning(self): - """Test report warning function""" - checker = CMORCheck(self.cube, self.var_info) - self.assertFalse(checker.has_errors()) - checker.report_warning('New error: {}', 'something failed') - self.assertTrue(checker.has_warnings()) - - def test_warning_fail_on_error(self): - """Test report warning function with fail_on_error""" - checker = CMORCheck(self.cube, self.var_info, fail_on_error=True) - stdout = sys.stdout - sys.stdout = StringIO() - checker.report_warning('New error: {}', 'something failed') - output = sys.stdout.getvalue().strip() - sys.stdout = stdout - self.assertEqual(output, 'WARNING: New error: something failed') - - def test_check(self): - """Test checks succeeds for a good cube""" - self._check_cube() - - def _check_cube(self, automatic_fixes=False, frequency=None): - """Apply checks and optionally automatic fixes to self.cube.""" - - def checker(cube): - return CMORCheck( - cube, - self.var_info, - automatic_fixes=automatic_fixes, - frequency=frequency) - - self.cube = checker(self.cube).check_metadata() - self.cube = checker(self.cube).check_data() - - def test_check_with_month_number(self): - """Test checks succeeds for a good cube with month number""" - iris.coord_categorisation.add_month_number(self.cube, 'time') - self._check_cube() - - def test_check_with_day_of_month(self): - """Test checks succeeds for a good cube with day of month""" - iris.coord_categorisation.add_day_of_month(self.cube, 'time') - self._check_cube() - - def test_check_with_day_of_year(self): - """Test checks succeeds for a good cube with day of year""" - iris.coord_categorisation.add_day_of_year(self.cube, 'time') - self._check_cube() - - def test_check_with_year(self): - """Test checks succeeds for a good cube with year""" - iris.coord_categorisation.add_year(self.cube, 'time') - self._check_cube() - - def test_check_with_unit_conversion(self): - """Test check succeds for a good cube requiring unit converision""" - self.cube.units = 'days' - self._check_cube() - - def test_check_with_psu_units(self): - """Test check succeds for a good cube with psu units""" - self.var_info.units = 'psu' - self.cube = self.get_cube(self.var_info) - self._check_cube() - - def test_check_with_positive(self): - """Check variable with positive attribute""" - self.var_info.positive = 'up' - self.cube = self.get_cube(self.var_info) - self._check_cube() - - def test_check_with_no_positive_CMIP5(self): - """Check CMIP5 variable with no positive attribute report warning""" - self.cube = self.get_cube(self.var_info) - self.var_info.positive = 'up' - self._check_warnings_on_metadata() - - def test_check_with_no_positive_CMIP6(self): - """Check CMIP6 variable with no positive attribute report warning.""" - self.cube = self.get_cube(self.var_info) - self.var_info.positive = 'up' - self.var_info.table_type = 'CMIP6' - self._check_warnings_on_metadata() - - def test_invalid_rank(self): - """Test check fails in metadata step when rank is not correct""" - lat = iris.coords.AuxCoord.from_coord(self.cube.coord('latitude')) - self.cube.remove_coord('latitude') - self.cube.add_aux_coord(lat, self.cube.coord_dims('longitude')) - self._check_fails_in_metadata() - - def test_rank_with_aux_coords(self): - """Check succeeds even if a required coordinate is an aux coord""" - iris.util.demote_dim_coord_to_aux_coord(self.cube, 'latitude') - self._check_cube() - - def test_rank_with_scalar_coords(self): - """Check succeeds even if a required coordinate is a scalar coord""" - self.cube = self.cube.extract( - iris.Constraint(time=self.cube.coord('time').cell(0))) - self._check_cube() - - def test_rank_unestructured_grid(self): - """Check succeeds even if two required coordinates share a dimension""" - self.cube = self.cube.extract( - iris.Constraint(latitude=self.cube.coord('latitude').points[0])) - self.cube.remove_coord('latitude') - iris.util.demote_dim_coord_to_aux_coord(self.cube, 'longitude') - new_lat = self.cube.coord('longitude').copy() - new_lat.var_name = 'lat' - new_lat.standard_name = 'latitude' - new_lat.long_name = 'Latitude' - self.cube.add_aux_coord(new_lat, 1) - self._check_cube() - - def _check_fails_in_metadata(self, automatic_fixes=False, frequency=None): - checker = CMORCheck( - self.cube, - self.var_info, - automatic_fixes=automatic_fixes, - frequency=frequency) - with self.assertRaises(CMORCheckError): - checker.check_metadata() - - def _check_warnings_on_metadata(self): - checker = CMORCheck(self.cube, self.var_info) - checker.check_metadata() - self.assertTrue(checker.has_warnings()) - - def test_non_requested(self): - """ - Warning if requested values are not present - - Check issue a warning if a values requested - for a coordinate are not correct in the metadata step - """ - coord = self.cube.coord('air_pressure') - values = numpy.linspace(0, 40, len(coord.points)) - self._update_coordinate_values(self.cube, coord, values) - checker = CMORCheck(self.cube, self.var_info) - checker.check_metadata() - self.assertTrue(checker.has_warnings()) - - def test_non_increasing(self): - """Fail in metadata if increasing coordinate is decreasing""" - coord = self.cube.coord('latitude') - values = numpy.linspace(coord.points[-1], coord.points[0], - len(coord.points)) - self._update_coordinate_values(self.cube, coord, values) - self._check_fails_in_metadata() - - def test_non_decreasing(self): - """Fail in metadata if decreasing coordinate is increasing""" - self.var_info.coordinates['lat'].stored_direction = 'decreasing' - self._check_fails_in_metadata() - - def test_non_decreasing_fix(self): - """Check automatic fix for non decreasing coordinate""" - self.cube.data[0, 0, 0, 0, 0] = 70 - self.var_info.coordinates['lat'].stored_direction = 'decreasing' - self._check_cube(automatic_fixes=True) - self._check_cube() - index = [0, 0, 0, 0, 0] - index[self.cube.coord_dims('latitude')[0]] = -1 - self.assertEqual(self.cube.data.item(tuple(index)), 70) - self.assertEqual(self.cube.data[0, 0, 0, 0, 0], 50) - cube_points = self.cube.coord('latitude').points - reference = numpy.linspace(90, -90, 20, endpoint=True) - for index in range(20): - self.assertTrue( - iris.util.approx_equal(cube_points[index], reference[index])) - - def test_not_correct_lons(self): - """Fail if longitudes are not correct in metadata step""" - self.cube = self.cube.intersection(longitude=(-180., 180.)) - self._check_fails_in_metadata() - - def test_lons_automatic_fix(self): - """Test automatic fixes for bad longitudes""" - self.cube = self.cube.intersection(longitude=(-180., 180.)) - self._check_cube(automatic_fixes=True) - - def test_high_lons_automatic_fix(self): - """Test automatic fixes for high longitudes""" - self.cube = self.cube.intersection(longitude=(180., 520.)) - self._check_cube(automatic_fixes=True) - - def test_not_valid_min(self): - """Fail if coordinate values below valid_min""" - coord = self.cube.coord('latitude') - values = numpy.linspace(coord.points[0] - 1, coord.points[-1], - len(coord.points)) - self._update_coordinate_values(self.cube, coord, values) - self._check_fails_in_metadata() - - def test_not_valid_max(self): - """Fail if coordinate values above valid_max""" - coord = self.cube.coord('latitude') - values = numpy.linspace(coord.points[0], coord.points[-1] + 1, - len(coord.points)) - self._update_coordinate_values(self.cube, coord, values) - self._check_fails_in_metadata() - - @staticmethod - def _update_coordinate_values(cube, coord, values): - [dimension] = cube.coord_dims(coord) - cube.remove_coord(coord) - new_coord = iris.coords.DimCoord( - values, - standard_name=coord.standard_name, - long_name=coord.long_name, - var_name=coord.var_name, - units=coord.units) - cube.add_dim_coord(new_coord, dimension) - - def test_bad_units(self): - """Fail if coordinates have bad units""" - self.cube.coord('latitude').units = 'degrees_n' - self._check_fails_in_metadata() - - def test_units_automatic_fix(self): - """Test automatic fix for bad coordinate units""" - self.cube.coord('latitude').units = 'degrees_n' - self._check_cube(automatic_fixes=True) - - def test_units_automatic_fix_failed(self): - """Test automatic fix fail for incompatible coordinate units""" - self.cube.coord('latitude').units = 'degC' - self._check_fails_in_metadata(automatic_fixes=True) - - def test_bad_time(self): - """Fail if time have bad units""" - self.cube.coord('time').units = 'days' - self._check_fails_in_metadata() - - def test_time_automatic_fix(self): - """Test automatic fix for time units""" - self.cube.coord('time').units = 'days since 1860-1-1 00:00:00' - self._check_cube() - assert (self.cube.coord('time').units.origin == - 'days since 1950-1-1 00:00:00') - - def test_time_automatic_fix_failed(self): - """Test automatic fix fail for incompatible time units""" - self.cube.coord('time').units = 'K' - self._check_fails_in_metadata(automatic_fixes=True) - - def test_bad_standard_name(self): - """Fail if coordinates have bad standard names at metadata step""" - self.cube.coord('time').standard_name = 'region' - self._check_fails_in_metadata() - - def test_bad_out_name(self): - """Fail if coordinates have bad short names at metadata step""" - self.cube.coord('latitude').var_name = 'region' - self._check_fails_in_metadata() - - def test_bad_data_units(self): - """Fail if data has bad units at metadata step""" - self.cube.units = 'hPa' - self._check_fails_in_metadata() - - def test_bad_data_standard_name(self): - """Fail if data have bad standard_name at metadata step""" - self.cube.standard_name = 'wind_speed' - self._check_fails_in_metadata() - - def test_bad_positive(self): - """Fail if positive value is incorrect at metadata step""" - self.cube.attributes['positive'] = 'up' - self.var_info.positive = 'down' - self._check_fails_in_metadata() - - def test_bad_standard_name_genlevel(self): - """Check if generic level has a different""" - self.cube.coord('depth').standard_name = None - self._check_cube() - - def test_frequency_month_not_same_day(self): - """Fail at metadata if frequency (day) not matches data frequency""" - self.cube = self.get_cube(self.var_info, frequency='mon') - time = self.cube.coord('time') - points = numpy.array(time.points) - points[1] = points[1] + 12 - dims = self.cube.coord_dims(time) - self.cube.remove_coord(time) - self.cube.add_dim_coord(time.copy(points), dims) - self._check_cube(frequency='mon') - - def test_bad_frequency_day(self): - """Fail at metadata if frequency (day) not matches data frequency""" - self.cube = self.get_cube(self.var_info, frequency='mon') - self._check_fails_in_metadata(frequency='day') - - def test_bad_frequency_subhr(self): - """Fail at metadata if frequency (subhr) not matches data frequency""" - self._check_fails_in_metadata(frequency='subhr') - - def test_bad_frequency_dec(self): - """Fail at metadata if frequency (dec) not matches data frequency""" - self._check_fails_in_metadata(frequency='d') - - def test_bad_frequency_yr(self): - """Fail at metadata if frequency (yr) not matches data frequency""" - self._check_fails_in_metadata(frequency='yr') - - def test_bad_frequency_mon(self): - """Fail at metadata if frequency (mon) not matches data frequency""" - self._check_fails_in_metadata(frequency='mon') - - def test_bad_frequency_hourly(self): - """Fail at metadata if frequency (3hr) not matches data frequency""" - self._check_fails_in_metadata(frequency='3hr') - - def test_frequency_not_supported(self): - """Fail at metadata if frequency is not supported""" - self._check_fails_in_metadata(frequency='wrong_freq') - - def _check_fails_on_data(self): - checker = CMORCheck(self.cube, self.var_info) - checker.check_metadata() - with self.assertRaises(CMORCheckError): - checker.check_data() - - def _check_warnings_on_data(self): - checker = CMORCheck(self.cube, self.var_info) - checker.check_metadata() - checker.check_data() - self.assertTrue(checker.has_warnings()) - - def get_cube(self, - var_info, - set_time_units="days since 1850-1-1 00:00:00", - frequency=None): - """ - Create a cube based on a specification - - Parameters - ---------- - var_info: - variable specification - set_time_units: str - units for the time coordinate - frequency: None or str - frequency of the generated data - - Returns - ------- - iris.cube.Cube - - """ - coords = [] - scalar_coords = [] - index = 0 - if not frequency: - frequency = var_info.frequency - for dim_spec in var_info.coordinates.values(): - coord = self._create_coord_from_spec(dim_spec, set_time_units, - frequency) - if isinstance(coord, iris.coords.DimCoord): - coords.append((coord, index)) - index += 1 - elif isinstance(coord, iris.coords.AuxCoord): - scalar_coords.append(coord) - - if var_info.valid_min: - valid_min = float(var_info.valid_min) - else: - valid_min = 0 - - if var_info.valid_max: - valid_max = float(var_info.valid_max) - else: - valid_max = valid_min + 100 - - var_data = (numpy.ones(len(coords) * [20], 'f') * - (valid_min + (valid_max - valid_min) / 2)) - - if var_info.units == 'psu': - units = None - attributes = {'invalid_units': 'psu'} - else: - units = var_info.units - attributes = None - - cube = iris.cube.Cube( - var_data, - standard_name=var_info.standard_name, - long_name=var_info.long_name, - var_name=var_info.short_name, - units=units, - attributes=attributes, - ) - if var_info.positive: - cube.attributes['positive'] = var_info.positive - - for coord, i in coords: - cube.add_dim_coord(coord, i) - - for coord in scalar_coords: - cube.add_aux_coord(coord) - - return cube - - @staticmethod - def _construct_scalar_coord(coord_spec): - return iris.coords.AuxCoord( - coord_spec.value, - standard_name=coord_spec.standard_name, - long_name=coord_spec.long_name, - var_name=coord_spec.out_name, - units=coord_spec.units, - attributes=None) - - def _create_coord_from_spec(self, coord_spec, set_time_units, frequency): - if coord_spec.units.startswith("days since "): - coord_spec.units = set_time_units - coord_spec.frequency = frequency - - if coord_spec.value: - return self._construct_scalar_coord(coord_spec) - - return self._construct_array_coord(coord_spec) - - def _construct_array_coord(self, dim_spec): - if dim_spec.units.startswith("days since "): - values = self._get_time_values(dim_spec) - unit = Unit(dim_spec.units, calendar='360_day') - else: - values = self._get_values(dim_spec) - unit = Unit(dim_spec.units) - # Set up attributes dictionary - coord_atts = {'stored_direction': dim_spec.stored_direction} - coord = iris.coords.DimCoord( - values, - standard_name=dim_spec.standard_name, - long_name=dim_spec.long_name, - var_name=dim_spec.out_name, - attributes=coord_atts, - units=unit, - ) - return coord - - @staticmethod - def _get_values(dim_spec): - valid_min = dim_spec.valid_min - if valid_min: - valid_min = float(valid_min) - else: - valid_min = 0.0 - valid_max = dim_spec.valid_max - if valid_max: - valid_max = float(valid_max) - else: - valid_max = 100.0 - decreasing = dim_spec.stored_direction == 'decreasing' - endpoint = not dim_spec.standard_name == 'longitude' - if decreasing: - values = numpy.linspace( - valid_max, valid_min, 20, endpoint=endpoint) - else: - values = numpy.linspace( - valid_min, valid_max, 20, endpoint=endpoint) - values = numpy.array(values) - if dim_spec.requested: - requested = [float(val) for val in dim_spec.requested] - requested.sort(reverse=decreasing) - for j, request in enumerate(requested): - values[j] = request - if decreasing: - extra_values = numpy.linspace( - len(requested), valid_min, 20 - len(requested)) - else: - extra_values = numpy.linspace( - len(requested), valid_max, 20 - len(requested)) - - for j in range(len(requested), 20): - values[j] = extra_values[j - len(requested)] - - return values - - @staticmethod - def _get_time_values(dim_spec): - frequency = dim_spec.frequency - if frequency == 'mon': - delta = 30 - elif frequency == 'day': - delta = 1 - elif frequency.ends_with('hr'): - delta = float(frequency[:-2]) / 24 - else: - raise Exception('Frequency {} not supported'.format(frequency)) - start = 0 - end = start + delta * 20 - return numpy.arange(start, end, step=delta) - - -if __name__ == "__main__": - unittest.main() diff --git a/tests/unit/cmor/test_fix.py b/tests/unit/cmor/test_fix.py deleted file mode 100644 index 1e9832ea50..0000000000 --- a/tests/unit/cmor/test_fix.py +++ /dev/null @@ -1,172 +0,0 @@ -"""Unit tests for the variable_info module.""" - -import unittest - -import mock - -from esmvaltool.cmor.fix import Fix, fix_data, fix_file, fix_metadata - - -class TestFixFile(unittest.TestCase): - """Fix file tests""" - - def setUp(self): - """Prepare for testing""" - self.filename = 'filename' - self.mock_fix = mock.Mock() - self.mock_fix.fix_file.return_value = 'new_filename' - - def test_fix(self): - """Check that the returned fix is applied""" - with mock.patch( - 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', - return_value=[self.mock_fix]): - file_returned = fix_file('filename', 'short_name', 'project', - 'model', 'output_dir') - self.assertNotEqual(file_returned, self.filename) - self.assertEqual(file_returned, 'new_filename') - - def test_nofix(self): - """Check that the same file is returned if no fix is available""" - with mock.patch( - 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', return_value=[]): - file_returned = fix_file('filename', 'short_name', 'project', - 'model', 'output_dir') - self.assertEqual(file_returned, self.filename) - - -class TestGetCube(unittest.TestCase): - """Test get cube by var_name method""" - - def setUp(self): - """Prepare for testing""" - self.cube_1 = mock.Mock() - self.cube_1.var_name = 'cube1' - self.cube_2 = mock.Mock() - self.cube_2.var_name = 'cube2' - self.cubes = [self.cube_1, self.cube_2] - self.fix = Fix() - - def test_get_first_cube(self): - """Test selecting first cube""" - self.assertIs(self.cube_1, - self.fix.get_cube_from_list(self.cubes, "cube1")) - - def test_get_second_cube(self): - """Test selecting second cube.""" - self.assertIs(self.cube_2, - self.fix.get_cube_from_list(self.cubes, "cube2")) - - def test_get_default_raises(self): - """Check that the default raises (Fix is not a cube).""" - with self.assertRaises(Exception): - self.fix.get_cube_from_list(self.cubes) - - def test_get_default(self): - """Check that the default raises (Fix is a cube).""" - self.cube_1.var_name = 'Fix' - self.assertIs(self.cube_1, self.fix.get_cube_from_list(self.cubes)) - - -class TestFixMetadata(unittest.TestCase): - """Fix metadata tests.""" - - def setUp(self): - """Prepare for testing.""" - self.cube = mock.Mock() - self.cube.attributes = {'source_file': 'source_file'} - self.fixed_cube = mock.Mock() - self.fixed_cube.attributes = {'source_file': 'source_file'} - self.mock_fix = mock.Mock() - self.mock_fix.fix_metadata.return_value = [self.fixed_cube] - - def test_fix(self): - """Check that the returned fix is applied.""" - with mock.patch( - 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', - return_value=[self.mock_fix]): - cube_returned = fix_metadata([self.cube], 'short_name', 'project', - 'model')[0] - self.assertTrue(cube_returned is not self.cube) - self.assertTrue(cube_returned is self.fixed_cube) - - def test_nofix(self): - """Check that the same cube is returned if no fix is available.""" - with mock.patch( - 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', return_value=[]): - cube_returned = fix_metadata([self.cube], 'short_name', 'project', - 'model')[0] - self.assertTrue(cube_returned is self.cube) - self.assertTrue(cube_returned is not self.fixed_cube) - - def test_cmor_checker_called(self): - """Check that the cmor check is done.""" - checker = mock.Mock() - checker.return_value = mock.Mock() - with mock.patch( - 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', return_value=[]): - with mock.patch( - 'esmvaltool.cmor.fix._get_cmor_checker', - return_value=checker) as get_mock: - fix_metadata([self.cube], 'short_name', 'project', 'model', - 'cmor_table', 'mip', 'frequency') - get_mock.assert_called_once_with( - automatic_fixes=True, - fail_on_error=False, - frequency='frequency', - mip='mip', - short_name='short_name', - table='cmor_table') - checker.assert_called_once_with(self.cube) - checker.return_value.check_metadata.assert_called_once_with() - - -class TestFixData(unittest.TestCase): - """Fix data tests.""" - - def setUp(self): - """Prepare for testing.""" - self.cube = mock.Mock() - self.fixed_cube = mock.Mock() - self.mock_fix = mock.Mock() - self.mock_fix.fix_data.return_value = self.fixed_cube - - def test_fix(self): - """Check that the returned fix is applied.""" - with mock.patch( - 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', - return_value=[self.mock_fix]): - cube_returned = fix_data(self.cube, 'short_name', 'project', - 'model') - self.assertTrue(cube_returned is not self.cube) - self.assertTrue(cube_returned is self.fixed_cube) - - def test_nofix(self): - """Check that the same cube is returned if no fix is available.""" - with mock.patch( - 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', return_value=[]): - cube_returned = fix_data(self.cube, 'short_name', 'project', - 'model') - self.assertTrue(cube_returned is self.cube) - self.assertTrue(cube_returned is not self.fixed_cube) - - def test_cmor_checker_called(self): - """Check that the cmor check is done""" - checker = mock.Mock() - checker.return_value = mock.Mock() - with mock.patch( - 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', return_value=[]): - with mock.patch( - 'esmvaltool.cmor.fix._get_cmor_checker', - return_value=checker) as get_mock: - fix_data(self.cube, 'short_name', 'project', 'model', - 'cmor_table', 'mip', 'frequency') - get_mock.assert_called_once_with( - automatic_fixes=True, - fail_on_error=False, - frequency='frequency', - mip='mip', - short_name='short_name', - table='cmor_table') - checker.assert_called_once_with(self.cube) - checker.return_value.check_data.assert_called_once_with() diff --git a/tests/unit/cmor/test_table.py b/tests/unit/cmor/test_table.py deleted file mode 100644 index db9b57e5bf..0000000000 --- a/tests/unit/cmor/test_table.py +++ /dev/null @@ -1,129 +0,0 @@ -"""Unit tests for the variable_info module.""" - -import unittest - -from esmvaltool.cmor.table import CoordinateInfo, VariableInfo - - -class TestVariableInfo(unittest.TestCase): - """Variable info tests.""" - - def setUp(self): - """Prepare for testing.""" - self.value = 'value' - - def test_constructor(self): - """Test basic constructor.""" - info = VariableInfo('table_type', 'var') - self.assertEqual('table_type', info.table_type) - self.assertEqual('var', info.short_name) - - def test_read_empty_dictionary(self): - """Test read empty dict.""" - info = VariableInfo('table_type', 'var') - info.read_json({}) - self.assertEqual('', info.standard_name) - - def test_read_standard_name(self): - """Test standard_name.""" - info = VariableInfo('table_type', 'var') - info.read_json({'standard_name': self.value}) - self.assertEqual(info.standard_name, self.value) - - def test_read_long_name(self): - """Test long_name.""" - info = VariableInfo('table_type', 'var') - info.read_json({'long_name': self.value}) - self.assertEqual(info.long_name, self.value) - - def test_read_units(self): - """Test units.""" - info = VariableInfo('table_type', 'var') - info.read_json({'units': self.value}) - self.assertEqual(info.units, self.value) - - def test_read_valid_min(self): - """Test valid_min.""" - info = VariableInfo('table_type', 'var') - info.read_json({'valid_min': self.value}) - self.assertEqual(info.valid_min, self.value) - - def test_read_valid_max(self): - """Test valid_max.""" - info = VariableInfo('table_type', 'var') - info.read_json({'valid_max': self.value}) - self.assertEqual(info.valid_max, self.value) - - def test_read_positive(self): - """Test positive.""" - info = VariableInfo('table_type', 'var') - info.read_json({'positive': self.value}) - self.assertEqual(info.positive, self.value) - - -class TestCoordinateInfo(unittest.TestCase): - """Tests for CoordinataInfo.""" - - def setUp(self): - """Prepare for testing.""" - self.value = 'value' - - def test_constructor(self): - """Test constructor.""" - info = CoordinateInfo('var') - self.assertEqual('var', info.name) - - def test_read_empty_dictionary(self): - """Test empty dict.""" - info = CoordinateInfo('var') - info.read_json({}) - self.assertEqual('', info.standard_name) - - def test_read_standard_name(self): - """Test standard_name.""" - info = CoordinateInfo('var') - info.read_json({'standard_name': self.value}) - self.assertEqual(info.standard_name, self.value) - - def test_read_var_name(self): - """Test var_name.""" - info = CoordinateInfo('var') - info.read_json({'var_name': self.value}) - self.assertEqual(info.var_name, self.value) - - def test_read_out_name(self): - """Test out_name.""" - info = CoordinateInfo('var') - info.read_json({'out_name': self.value}) - self.assertEqual(info.out_name, self.value) - - def test_read_units(self): - """Test units.""" - info = CoordinateInfo('var') - info.read_json({'units': self.value}) - self.assertEqual(info.units, self.value) - - def test_read_valid_min(self): - """Test valid_min.""" - info = CoordinateInfo('var') - info.read_json({'valid_min': self.value}) - self.assertEqual(info.valid_min, self.value) - - def test_read_valid_max(self): - """Test valid_max.""" - info = CoordinateInfo('var') - info.read_json({'valid_max': self.value}) - self.assertEqual(info.valid_max, self.value) - - def test_read_value(self): - """Test value.""" - info = CoordinateInfo('var') - info.read_json({'value': self.value}) - self.assertEqual(info.value, self.value) - - def test_read_requested(self): - """Test requested.""" - value = ['value1', 'value2'] - info = CoordinateInfo('var') - info.read_json({'requested': value}) - self.assertEqual(info.requested, value) diff --git a/tests/unit/cmorizers/__init__.py b/tests/unit/cmorizers/__init__.py new file mode 100644 index 0000000000..da9e184552 --- /dev/null +++ b/tests/unit/cmorizers/__init__.py @@ -0,0 +1 @@ +"""Tests for cmorizers.""" diff --git a/tests/unit/cmorizers/obs/__init__.py b/tests/unit/cmorizers/obs/__init__.py new file mode 100644 index 0000000000..1ae55ece87 --- /dev/null +++ b/tests/unit/cmorizers/obs/__init__.py @@ -0,0 +1 @@ +"""Tests for obs cmorizers.""" diff --git a/tests/unit/cmorizers/obs/test_merra2.py b/tests/unit/cmorizers/obs/test_merra2.py new file mode 100644 index 0000000000..cb65c25713 --- /dev/null +++ b/tests/unit/cmorizers/obs/test_merra2.py @@ -0,0 +1,354 @@ +import iris +import iris.coord_systems +import iris.fileformats +import netCDF4 +import numpy as np +import pytest +from cf_units import Unit + +from esmvaltool.cmorizers.data.formatters.datasets.merra2 import ( + _load_cube, + _extract_variable +) +from esmvaltool.cmorizers.data.utilities import read_cmor_config + + +def _create_sample_cube(): + """Create a quick CMOR-compliant sample cube.""" + coord_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) + cube_data = np.ones((1, 3, 2, 2)) + cube_data[0, 1, 1, 1] = 22. + time = iris.coords.DimCoord([ + 100, + ], + standard_name='time', + units=Unit('minutes since 1982-01-01 00:30:00', + calendar='gregorian')) + zcoord = iris.coords.DimCoord([0.5, 5., 50.], + long_name='vertical level', + var_name='lev', + units='hPa', + attributes={'positive': 'down'}) + lons = iris.coords.DimCoord([1.5, 2.5], + standard_name='longitude', + units='K', + coord_system=coord_sys) + lats = iris.coords.DimCoord([1.5, 2.5], + standard_name='latitude', + units='K', + coord_system=coord_sys) + coords_spec = [(time, 0), (zcoord, 1), (lats, 2), (lons, 3)] + cube = iris.cube.Cube(cube_data, dim_coords_and_dims=coords_spec) + drop_attrs = [ + 'History', 'Filename', 'Comment', 'RangeBeginningDate', + 'RangeEndingDate', 'GranuleID', 'ProductionDateTime', 'Source' + ] + for attr in drop_attrs: + cube.attributes[attr] = "cow" + drop_time_attrs = [ + 'begin_date', 'begin_time', 'time_increment', 'valid_range', 'vmax', + 'vmin' + ] + for attr in drop_time_attrs: + cube.coord('time').attributes[attr] = "1982" + cube.coord('time').attributes["valid_range"] = [50, 150] + + extra_special_attrs = [ + "Institution", + "VersionID", + "experiment_id", + "Source", + "ModelID", + "Contact", + ] + for attr in extra_special_attrs: + cube.attributes[attr] = "moose" + + return cube + + +def test_load_cube_single_var(tmp_path): + """Test loading MERRA2 cubes.""" + path_cubes = tmp_path / "cubes.nc" + cube_1 = _create_sample_cube() + cube_1.var_name = "SWTDN" + cubes = iris.cube.CubeList([cube_1]) + iris.save(cubes, str(path_cubes)) + var = { + 'short_name': 'rsut', + 'mip': 'Amon', 'raw': 'SWTDN', + 'file': 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + } + in_files = str(tmp_path / "cubes.nc") + selection = _load_cube(in_files, var) + assert np.mean(selection.data) == 2.75 + + +def test_load_cube_pairwise_vars(tmp_path): + """Test loading MERRA2 cubes.""" + path_cubes = tmp_path / "cubes.nc" + cube_1 = _create_sample_cube() + cube_1.var_name = "SWTDN" + cube_2 = _create_sample_cube() + cube_2.var_name = "SWTNT" + cubes = iris.cube.CubeList([cube_1, cube_2]) + iris.save(cubes, str(path_cubes)) + var = { + 'short_name': 'rsut', + 'mip': 'Amon', 'raw': 'SWTDN-SWTNT', + 'file': 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + } + in_files = str(tmp_path / "cubes.nc") + selection = _load_cube(in_files, var) + assert np.mean(selection.data) == 0. + + +def test_load_cube_threewise_vars(tmp_path): + """Test loading MERRA2 cubes.""" + path_cubes = tmp_path / "cubes.nc" + cube_1 = _create_sample_cube() + cube_1.var_name = "SWTDN" + cube_2 = _create_sample_cube() + cube_2.var_name = "SWTNT" + cube_3 = _create_sample_cube() + cube_3.var_name = "COW" + cubes = iris.cube.CubeList([cube_1, cube_2, cube_3]) + iris.save(cubes, str(path_cubes)) + var = { + 'short_name': 'rsut', + 'mip': 'Amon', 'raw': 'SWTDN-SWTNT-COW', + 'file': 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + } + in_files = str(tmp_path / "cubes.nc") + with pytest.raises(NotImplementedError) as exc: + _load_cube(in_files, var) + print(exc) + + +def test_load_cube_pairwise_vars_var_not_found(tmp_path): + """Test loading MERRA2 cubes.""" + path_cubes = tmp_path / "cubes.nc" + cube_1 = _create_sample_cube() + cube_1.var_name = "SWTDN" + cube_2 = _create_sample_cube() + cube_2.var_name = "SWTNT" + cubes = iris.cube.CubeList([cube_1, cube_2]) + iris.save(cubes, str(path_cubes)) + var = { + 'short_name': 'rsut', + 'mip': 'Amon', 'raw': 'COWABUNGA-CORVETTE', + 'file': 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + } + in_files = str(tmp_path / "cubes.nc") + with pytest.raises(ValueError) as exc: + _load_cube(in_files, var) + print(exc) + + +def test_load_cube_pairwise_vars_var_not_found_2(tmp_path): + """Test loading MERRA2 cubes.""" + path_cubes = tmp_path / "cubes.nc" + cube_1 = _create_sample_cube() + cube_1.var_name = "SWTDN" + cube_2 = _create_sample_cube() + cube_2.var_name = "SWTNT" + cubes = iris.cube.CubeList([cube_1, cube_2]) + iris.save(cubes, str(path_cubes)) + var = { + 'short_name': 'rsut', + 'mip': 'Amon', 'raw': 'SWTDN-CORVETTE', + 'file': 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + } + in_files = str(tmp_path / "cubes.nc") + with pytest.raises(ValueError) as exc: + _load_cube(in_files, var) + print(exc) + + +def test_load_cube_pairwise_vars_wrong_oper(tmp_path): + """Test loading MERRA2 cubes.""" + path_cubes = tmp_path / "cubes.nc" + cube_1 = _create_sample_cube() + cube_1.var_name = "SWTDN" + cube_2 = _create_sample_cube() + cube_2.var_name = "SWTNT" + cubes = iris.cube.CubeList([cube_1, cube_2]) + iris.save(cubes, str(path_cubes)) + var = { + 'short_name': 'rsut', + 'mip': 'Amon', 'raw': 'SWTDN:SWTNT', + 'file': 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + } + in_files = str(tmp_path / "cubes.nc") + with pytest.raises(NotImplementedError) as exc: + _load_cube(in_files, var) + print(exc) + + +def test_extract_variable(tmp_path): + """Test variable extraction.""" + # call is _extract_variable(in_files, var, cfg, out_dir) + path_cubes = tmp_path / "cubes.nc" + cube_1 = _create_sample_cube() + cube_1.var_name = "SWTDN" + cube_1.units = Unit('W m-2') + cubes = iris.cube.CubeList([cube_1]) + iris.save(cubes, str(path_cubes)) + var = { + 'short_name': 'rsut', + 'mip': 'Amon', 'raw': 'SWTDN', + 'file': 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + } + in_files = str(tmp_path / "cubes.nc") + cfg = read_cmor_config("MERRA2") + _extract_variable(in_files, var, cfg, tmp_path) + cmorized_data = \ + tmp_path / "OBS6_MERRA2_reanaly_5.12.4_Amon_rsut_198201-198201.nc" + cmorized_cube = iris.load_cube(str(cmorized_data)) + print(cmorized_cube, + cmorized_cube.coord("time"), + cmorized_cube.coord("latitude")) + assert cmorized_cube.coord('time').core_points()[0] == 48226. + assert cmorized_cube.attributes["raw"] == 'SWTDN' + + +def test_extract_variable_pairs(tmp_path): + """Test variable extraction.""" + path_cubes = tmp_path / "cubes.nc" + cube_1 = _create_sample_cube() + cube_1.var_name = "SWTDN" + cube_1.units = Unit('W m-2') + cube_2 = _create_sample_cube() + cube_2.var_name = "SWTNT" + cube_2.units = Unit('W m-2') + cubes = iris.cube.CubeList([cube_1, cube_2]) + iris.save(cubes, str(path_cubes)) + var = { + 'short_name': 'rsut', + 'mip': 'Amon', 'raw': 'SWTDN-SWTNT', + 'file': 'MERRA2_???.tavgM_2d_rad_Nx.{year}??.nc4' + } + in_files = str(tmp_path / "cubes.nc") + cfg = read_cmor_config("MERRA2") + _extract_variable(in_files, var, cfg, tmp_path) + cmorized_data = \ + tmp_path / "OBS6_MERRA2_reanaly_5.12.4_Amon_rsut_198201-198201.nc" + cmorized_cube = iris.load_cube(str(cmorized_data)) + print(cmorized_cube, + cmorized_cube.coord("time"), + cmorized_cube.coord("latitude")) + assert cmorized_cube.coord('time').core_points()[0] == 48226. + assert cmorized_cube.attributes["raw"] == 'SWTDN-SWTNT' + assert cmorized_cube.attributes["component_raw_1"] == "SWTDN" + assert cmorized_cube.attributes["component_raw_2"] == "SWTNT" + + # Test the existence of extra attributes + extra_special_attrs = [ + "Institution", + "VersionID", + "experiment_id", + "ModelID", + "Contact", + ] + for attr in extra_special_attrs: + assert attr in cmorized_cube.attributes + + +def test_vertical_levels(tmp_path): + """Test cases for cmorization with vertical levels.""" + path_cubes = tmp_path / "cubes.nc" + cube_1 = _create_sample_cube() + cube_1.var_name = "V" + cube_1.units = Unit('m s-1') + cube_2 = _create_sample_cube() + cube_2.var_name = "U10M" + cube_2.units = Unit('m s-1') + cube_3 = _create_sample_cube() + cube_3.var_name = "T2M" + cube_3.units = Unit('K') + cube_4 = _create_sample_cube() + cube_4.var_name = "H" + cube_4.units = Unit('m') + cube_4.coord("vertical level").units = "m" + cube_5 = _create_sample_cube() + cube_5.var_name = "QI" + cube_5.units = Unit('1') + cube_5.coord("vertical level").units = "hPa" + cubes = iris.cube.CubeList([cube_1, cube_2, cube_3, cube_4, cube_5]) + iris.save(cubes, str(path_cubes)) + var_1 = { + 'short_name': 'va', + 'mip': 'Amon', 'raw': 'V', + 'file': 'MERRA2_???.instM_3d_ana_Np.{year}??.nc4' + } + var_2 = { + 'short_name': 'uas', + 'mip': 'Amon', 'raw': 'U10M', + 'file': 'MERRA2_???.tavgM_2d_slv_Nx.{year}??.nc4' + } + var_3 = { + 'short_name': 'tas', + 'mip': 'Amon', 'raw': 'T2M', + 'file': 'MERRA2_???.tavgM_2d_slv_Nx.{year}??.nc4' + } + var_4 = { + 'short_name': 'zg', + 'mip': 'Amon', 'raw': 'H', + 'file': 'MERRA2_???.instM_3d_ana_Np.{year}??.nc4' + } + var_5 = { + 'short_name': 'cli', + 'mip': 'Amon', 'raw': 'QI', + 'file': 'MERRA2_???.tavgM_3d_cld_Np.{year}??.nc4' + } + in_files = str(tmp_path / "cubes.nc") + cfg = read_cmor_config("MERRA2") + + # extract va + _extract_variable(in_files, var_1, cfg, tmp_path) + cmorized_data = \ + tmp_path / "OBS6_MERRA2_reanaly_5.12.4_Amon_va_198201-198201.nc" + cmorized_cube = iris.load_cube(str(cmorized_data)) + print(cmorized_cube, + cmorized_cube.coord("air_pressure")) + assert cmorized_cube.coord("air_pressure").has_bounds() + assert cmorized_cube.coord("air_pressure").units == "Pa" + np.testing.assert_array_equal(cmorized_cube.coord("air_pressure").points, + [50., 500., 5000.]) + # test unlimited time dim + with netCDF4.Dataset(str(cmorized_data), 'r') as handler: + assert handler["va"].get_dims()[0].isunlimited() + + # extract uas + _extract_variable(in_files, var_2, cfg, tmp_path) + cmorized_data = \ + tmp_path / "OBS6_MERRA2_reanaly_5.12.4_Amon_uas_198201-198201.nc" + cmorized_cube = iris.load_cube(str(cmorized_data)) + print(cmorized_cube) + np.testing.assert_array_equal(cmorized_cube.coord("height").points, + [10.]) + # test unlimited time dim + with netCDF4.Dataset(str(cmorized_data), 'r') as handler: + assert handler["uas"].get_dims()[0].isunlimited() + + # extract tas + _extract_variable(in_files, var_3, cfg, tmp_path) + cmorized_data = \ + tmp_path / "OBS6_MERRA2_reanaly_5.12.4_Amon_tas_198201-198201.nc" + cmorized_cube = iris.load_cube(str(cmorized_data)) + print(cmorized_cube) + np.testing.assert_array_equal(cmorized_cube.coord("height").points, + [2.]) + + # extract zg failed + with pytest.raises(ValueError) as exc: + _extract_variable(in_files, var_4, cfg, tmp_path) + expected_exc = "Unable to convert from 'Unit('m')' to 'Unit('Pa')'" + assert expected_exc in str(exc) + + # test unit of vertical coordinate of 3-dim cloud variable + _extract_variable(in_files, var_5, cfg, tmp_path) + cmorized_data = \ + tmp_path / "OBS6_MERRA2_reanaly_5.12.4_Amon_cli_198201-198201.nc" + cmorized_cube = iris.load_cube(str(cmorized_data)) + np.testing.assert_equal(cmorized_cube.coord(axis='Z').units, Unit('Pa')) diff --git a/tests/unit/cmorizers/test_cmorization_interface.py b/tests/unit/cmorizers/test_cmorization_interface.py new file mode 100644 index 0000000000..2d9073a7f7 --- /dev/null +++ b/tests/unit/cmorizers/test_cmorization_interface.py @@ -0,0 +1,63 @@ +import importlib +import inspect +import os + +import esmvaltool.cmorizers.data.downloaders.datasets as ddt +import esmvaltool.cmorizers.data.formatters.datasets as fdt + + +def test_formatters_have_required_interface(): + formatters_folder = os.path.dirname(fdt.__file__) + arg_names = ('in_dir', 'out_dir', 'cfg', 'cfg_user', 'start_date', + 'end_date') + unused_arg_names = ('_', '__', '___') + + error = False + + for formatter in os.listdir(formatters_folder): + if not formatter.endswith('.py') or formatter == '__init__.py': + continue + module = formatter[:-3] + member = importlib.import_module( + f".{module}", + package="esmvaltool.cmorizers.data.formatters.datasets") + spec = inspect.getfullargspec(member.__getattribute__('cmorization')) + try: + assert len(spec.args) == len(arg_names) + for x, arg in enumerate(spec.args): + assert arg == arg_names[x] or arg in unused_arg_names + except AssertionError: + print(f'Bad args in {os.path.join(formatters_folder, formatter)}: ' + f'{spec.args}') + print(f"Expected {arg_names}.") + error = True + assert not error + + +def test_downloaders_have_required_interface(): + formatters_folder = os.path.dirname(ddt.__file__) + arg_names = ('config', 'dataset', 'dataset_info', 'start_date', 'end_date', + 'overwrite') + unused_arg_names = ('_', '__', '___') + + error = False + + for formatter in os.listdir(formatters_folder): + if not formatter.endswith('.py') or formatter == '__init__.py': + continue + module = formatter[:-3] + member = importlib.import_module( + f".{module}", + package="esmvaltool.cmorizers.data.downloaders.datasets") + spec = inspect.getfullargspec( + member.__getattribute__('download_dataset')) + try: + assert len(spec.args) == len(arg_names) + for x, arg in enumerate(spec.args): + assert arg == arg_names[x] or arg in unused_arg_names + except AssertionError: + print(f'Bad args in {os.path.join(formatters_folder, formatter)}: ' + f'{spec.args}') + error = True + + assert not error diff --git a/tests/unit/cmorizers/test_datasets_info.py b/tests/unit/cmorizers/test_datasets_info.py new file mode 100644 index 0000000000..cc5853c579 --- /dev/null +++ b/tests/unit/cmorizers/test_datasets_info.py @@ -0,0 +1,55 @@ +import datetime +import os + +import yamale +import yaml + +import esmvaltool +from esmvaltool.cmorizers.data.cmorizer import datasets_file + +yaml_folder = os.path.abspath(os.path.dirname(datasets_file)) +recipes_folder = os.path.abspath( + os.path.join(os.path.dirname(esmvaltool.__file__), 'recipes')) + + +def test_only_datasets_are_present(): + recipe = yamale.make_data(datasets_file) + schema = yamale.make_schema( + os.path.join(yaml_folder, 'datasets_schema.yml')) + yamale.validate(schema, recipe) + + +def test_latest_version_format(): + with open(datasets_file, 'r') as file: + cfg = yaml.safe_load(file) + for dataset_info in cfg['datasets'].values(): + datetime.datetime.strptime(str(dataset_info['last_access']), + "%Y-%m-%d") + + +def test_datasets_are_added_to_test_recipe(): + with open(datasets_file, 'r') as file: + cfg = yaml.safe_load(file) + + recipe_path = os.path.join(recipes_folder, 'examples/recipe_check_obs.yml') + with open(recipe_path, 'r') as file: + recipe = yaml.safe_load(file) + + tested_datasets = set() + for diagnostic in recipe.get('diagnostics', {}).values(): + for dataset in diagnostic.get('additional_datasets', {}): + tested_datasets.add(dataset['dataset']) + for variable in diagnostic.get('variables', {}).values(): + if variable is None: + continue + for dataset in variable.get('additional_datasets', {}): + tested_datasets.add(dataset['dataset']) + + info_datasets = set(cfg['datasets'].keys()) + + if tested_datasets.symmetric_difference(info_datasets): + for dataset in tested_datasets - info_datasets: + print(f'Dataset {dataset} missing from datasets.yml') + for dataset in info_datasets - tested_datasets: + print(f'Dataset {dataset} missing from recipe_check_obs.yml') + assert False diff --git a/tests/unit/cmorizers/test_utilities.py b/tests/unit/cmorizers/test_utilities.py new file mode 100644 index 0000000000..f5823aa734 --- /dev/null +++ b/tests/unit/cmorizers/test_utilities.py @@ -0,0 +1,340 @@ +"""Tests for the module :mod:`esmvaltool.cmorizers.data.utilities`.""" + +from unittest.mock import Mock + +import dask.array as da +import iris +import iris.coord_systems +import iris.coords +import iris.cube +import iris.fileformats +import iris.util +import numpy as np +import pytest +from cf_units import Unit + +import esmvaltool.cmorizers.data.utilities as utils + + +def np_to_da(array, lazy): + """Convert numpy array to dask array.""" + if not lazy: + return array + if array is None: + return array + return da.from_array(array) + + +def is_lazy(cube): + """Check if data is lazy.""" + if not cube.has_lazy_data(): + return False + for coord in cube.coords(dim_coords=False): + if not coord.has_lazy_points(): + return False + if coord.has_bounds(): + if not coord.has_lazy_bounds(): + return False + return True + + +def cubes_generator(lazy=True): + """Generate a list of cubes via test parametrization.""" + cube_datas = [ + np.array([[0, 1], [-1, 0]], dtype=np.int32), + np.array([[0.0, 1.0], [-1.0, 0.0]], dtype=np.float32), + np.array([[0.0, 1.0], [-1.0, 0.0]], dtype=np.float64), + np.ma.masked_equal([[0, 1], [2, 3]], 3).astype(np.int32), + np.ma.masked_values([[0.0, 1.0], [2.0, 3.0]], 3.0).astype(np.float32), + np.ma.masked_values([[0.0, 1.0], [2.0, 3.0]], 3.0).astype(np.float64), + ] + x_coords = [ + (np.array([1, 3], dtype=np.int32), None), + (np.array([1, 3], + dtype=np.int32), np.array([[0, 2], [2, 4]], dtype=np.int32)), + (np.array([1.0, 3.0], dtype=np.float32), + np.array([[0.0, 2.0], [2.0, 4.0]], dtype=np.float32)), + (np.array([1.0, 3.0], dtype=np.float64), None), + (np.array([1.0, 3.0], dtype=np.float64), + np.array([[0.0, 2.0], [2.0, 4.0]], dtype=np.float64)), + ] + y_coords = [ + (np.array([1, 3], dtype=np.int32), + np.array([[0.0, 2.0], [2.0, 4.0]], dtype=np.float32)), + (np.array([1.0, 3.0], dtype=np.float32), + np.array([[0.0, 2.0], [2.0, 4.0]], dtype=np.float64)), + (np.array([1.0, 3.0], + dtype=np.float64), np.array([[0, 2], [2, 4]], + dtype=np.int32)), + ] + for cube_data in cube_datas: + cube_data = np_to_da(cube_data, lazy) + for x_val in x_coords: + x_val = (np_to_da(x_val[0], lazy), np_to_da(x_val[1], lazy)) + x_coord = iris.coords.DimCoord(x_val[0], + bounds=x_val[1], + var_name='x') + for y_val in y_coords: + y_val = (np_to_da(y_val[0], lazy), np_to_da(y_val[1], lazy)) + y_coord = iris.coords.DimCoord(y_val[0], + bounds=y_val[1], + var_name='y') + aux_coord = iris.coords.AuxCoord(y_val[0], + bounds=y_val[1], + var_name='aux') + cube = iris.cube.Cube( + cube_data, + var_name='test_var', + dim_coords_and_dims=[(x_coord, 0), (y_coord, 1)], + aux_coords_and_dims=[(aux_coord, 0)], + ) + yield cube + + +@pytest.mark.parametrize('cube', cubes_generator(lazy=True)) +def test_fix_dtype_lazy(cube): + """Test fix for lazy data.""" + assert is_lazy(cube) + utils.fix_dtype(cube) + assert cube.dtype == np.float32 + for coord in cube.coords(): + assert coord.dtype == np.float64 + if coord.has_bounds(): + assert coord.bounds_dtype == np.float64 + assert is_lazy(cube) + + +@pytest.mark.parametrize('cube', cubes_generator(lazy=False)) +def test_fix_dtype_not_lazy(cube): + """Test fix for realized data.""" + assert not is_lazy(cube) + utils.fix_dtype(cube) + assert cube.dtype == np.float32 + for coord in cube.coords(): + assert coord.dtype == np.float64 + if coord.has_bounds(): + assert coord.bounds_dtype == np.float64 + assert not is_lazy(cube) + + +def mock_var_info(var_dict): + mock_dict = Mock() + mock_dict.__dict__ = var_dict + return mock_dict + + +def _create_sample_cube(): + """Create a quick CMOR-compliant sample cube.""" + coord_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) + cube_data = np.ones((2, 3, 2, 2)) + cube_data[1, 1, 1, 1] = 22. + time = iris.coords.DimCoord([15, 45], + standard_name='time', + bounds=[[1., 30.], [30., 60.]], + units=Unit('days since 1950-01-01', + calendar='gregorian')) + zcoord = iris.coords.DimCoord([0.5, 5., 50.], + var_name='depth', + standard_name='depth', + bounds=[[0., 2.5], [2.5, 25.], [25., 250.]], + units='m', + attributes={'positive': 'down'}) + lons = iris.coords.DimCoord([1.5, 2.5], + standard_name='longitude', + bounds=[[1., 2.], [2., 3.]], + units='degrees_east', + coord_system=coord_sys) + lats = iris.coords.DimCoord([1.5, 2.5], + standard_name='latitude', + bounds=[[1., 2.], [2., 3.]], + units='degrees_north', + coord_system=coord_sys) + coords_spec = [(time, 0), (zcoord, 1), (lats, 2), (lons, 3)] + cube = iris.cube.Cube(cube_data, dim_coords_and_dims=coords_spec) + return cube + + +def test_add_scalar_height_coord(): + """Test add height aux coord.""" + cube = _create_sample_cube() + utils.add_scalar_height_coord(cube, height=10.) + assert cube.coord("height").points[0] == 10. + assert "positive" in cube.coord("height").attributes + assert cube.coord("height").attributes["positive"] == "up" + + +@pytest.mark.parametrize('time_units', [ + 'months since 1950-01-01 00:00:00', 'days since 0000-01-01 00:00:00', + 'days since 1950-1-1', 'days since 1950-1-1 00:00:00' +]) +def test_convert_time_units(time_units): + """Test convert time units functionlity.""" + cube = _create_sample_cube() + cube.coord("time").units = time_units + utils.convert_timeunits(cube, "1950") + converted_units = cube.coord("time").units + if time_units == 'months since 1950-01-01 00:00:00': + assert converted_units == 'months since 1950-01-01 00:00:00' + else: + assert converted_units == 'days since 1950-01-01 00:00:00' + + +def test_fix_coords(): + """Test fix coordinates.""" + cube = _create_sample_cube() + cube.coord("time").bounds = None + cube.coord('time').convert_units( + Unit('days since 1850-1-1 00:00:00', calendar='gregorian')) + cube.coord("longitude").bounds = None + cube.coord("latitude").bounds = None + cube.coord("depth").bounds = None + cube.coord("longitude").points = cube.coord("longitude").points - 3. + cube.coord("time").var_name = "cows" + cube.coord("longitude").var_name = "cows" + cube.coord("latitude").var_name = "cows" + cube.coord("longitude").units = "m" + cube.coord("latitude").units = "K" + cube_2 = cube.copy() + + cube = utils.fix_coords(cube) + + assert cube.coord("time").var_name == "time" + assert cube.coord("longitude").var_name == "lon" + assert cube.coord("latitude").var_name == "lat" + assert cube.coord("longitude").standard_name == "longitude" + assert cube.coord("latitude").standard_name == "latitude" + assert cube.coord("longitude").long_name == "longitude coordinate" + assert cube.coord("latitude").long_name == "latitude coordinate" + assert cube.coord("longitude").units == "degrees" + assert cube.coord("latitude").units == "degrees" + assert cube.coord("depth").var_name == "lev" + assert cube.coord("depth").attributes['positive'] == "down" + assert cube.coord("time").has_bounds() + assert cube.coord("time").bounds[0][1] == 30. + assert cube.coord("time").units == 'days since 1950-1-1 00:00:00' + # Up to but not including CF Conventions version 1.9, `gregorian` and + # `standard` where synonyms. From then, `gregorian` has been deprecated in + # favor of `standard`. This lead to `cf-units` using `standard`, even when + # the calendar passed to the `Unit` constructor is `gregorian`. To support + # both cf-units <= 3.1.0 and later versions, we list both variants in the + # following assertion. + assert cube.coord("time").units.calendar in ("standard", "gregorian") + assert cube.coord("longitude").points[0] == 358.5 + assert cube.coord("longitude").points[1] == 359.5 + assert cube.coord("longitude").has_bounds() + assert cube.coord("longitude").bounds[1][1] == 360.0 + assert cube.data[1, 1, 1, 1] == 22. + assert cube.coord("latitude").has_bounds() + assert cube.coord("depth").has_bounds() + assert cube.coord('latitude').coord_system is None + assert cube.coord('longitude').coord_system is None + + cube_2.coord("depth").bounds = [[0., 2.5], [2.5, 25.], [25., 250.]] + cube_2 = iris.util.reverse(cube_2, "latitude") + np.testing.assert_allclose(cube_2.coord('latitude').points, [2.5, 1.5]) + cube_2 = utils.fix_coords( + cube_2, + overwrite_time_bounds=False, + overwrite_lon_bounds=False, + overwrite_lat_bounds=False, + overwrite_lev_bounds=False, + ) + assert cube_2.coord("time").bounds[0][1] == 30. + assert cube_2.coord("longitude").bounds[1][1] == 360.0 + assert cube_2.coord("latitude").bounds[1][1] == 3. + assert cube_2.coord("depth").bounds[1][1] == 25. + np.testing.assert_allclose(cube_2.coord('latitude').points, [1.5, 2.5]) + + +def test_fix_var_metadata(): + """Test fixing the variable metadata.""" + cube = _create_sample_cube() + cube.var_name = "cows" + cube.long_name = "flying cows" + cube.units = "m" + var_info = { + "short_name": "tas", + "frequency": "mon", + "modeling_realm": "atmos", + "standard_name": "air_temperature", + "units": "K", + "cell_methods": "area: time: mean", + "cell_measures": "area: areacella", + "long_name": "Near-Surface Air Temperature", + "comment": "near-surface (usually, 2 meter) air temperature", + "dimensions": "longitude latitude time height2m", + "out_name": "tas", + "type": "real", + "positive": "", + "valid_min": "", + "valid_max": "", + "ok_min_mean_abs": "", + "ok_max_mean_abs": "" + } + var_info = mock_var_info(var_info) + utils.fix_var_metadata(cube, var_info) + assert cube.var_name == "tas" + assert cube.long_name == "Near-Surface Air Temperature" + assert cube.units == "K" + assert cube.standard_name == "air_temperature" + + +def test_set_global_atts_correct(): + """Test set global attributes.""" + cube = _create_sample_cube() + global_attrs = { + 'dataset_id': '1', + 'version': '2', + 'tier': '3', + 'source': '4', + 'reference': 'acknow_author', + 'comment': '6', + 'project_id': '7', + } + utils.set_global_atts(cube, global_attrs) + attrs = cube.attributes + assert '1 ' in attrs['title'] + assert attrs['version'] == '2' + assert attrs['tier'] == '3' + assert attrs['source'] == '4' + assert attrs['reference'] == 'doi not found' + assert attrs['comment'] == '6' + assert attrs['project_id'] == '7' + + +def test_set_global_atts_incorrect(): + """Test set global attributes.""" + cube = _create_sample_cube() + global_attrs = { + 'version': '2', + 'tier': '3', + 'source': '4', + 'reference': 'acknow_author', + 'comment': '6', + 'project_id': '7', + } + msg = \ + "".join(["All CMORized datasets need the ", + "global attributes 'dataset_id', ", + "'version', 'tier', 'source', 'reference', 'comment' and ", + "'project_id' specified in the configuration file"]) + with pytest.raises(KeyError) as key_err: + utils.set_global_atts(cube, global_attrs) + assert msg in key_err + + +def test_flip_dim_coord(): + """Test flip dimensional coordinate.""" + cube = _create_sample_cube() + assert cube.data[1, 1, 1, 1] == 22. + utils.flip_dim_coord(cube, "latitude") + assert cube.data[1, 1, 0, 1] == 22. + + +def test_read_cmor_config(): + """Test the cmor table reading.""" + cfg = utils.read_cmor_config("WOA") + assert cfg['attributes']['dataset_id'] == 'WOA' + assert 'thetao' in cfg['variables'] + assert 'Omon' in cfg['cmor_table'].tables + assert 'thetao' in cfg['cmor_table'].tables['Omon'] diff --git a/tests/unit/data_finder/test_get_start_end_year.py b/tests/unit/data_finder/test_get_start_end_year.py deleted file mode 100644 index 348018c4e5..0000000000 --- a/tests/unit/data_finder/test_get_start_end_year.py +++ /dev/null @@ -1,76 +0,0 @@ -"""Unit tests for :func:`esmvaltool._data_finder.regrid._stock_cube`""" - -import unittest - -from esmvaltool._data_finder import get_start_end_year - - -class TestGetStartEndYear(unittest.TestCase): - """Tests for get_start_end_year function""" - - def test_years_at_the_end(self): - """Test parse files with two years at the end""" - start, end = get_start_end_year('var_whatever_1980-1981') - self.assertEqual(1980, start) - self.assertEqual(1981, end) - - def test_one_year_at_the_end(self): - """Test parse files with one year at the end""" - start, end = get_start_end_year('var_whatever_1980.nc') - self.assertEqual(1980, start) - self.assertEqual(1980, end) - - def test_full_dates_at_the_end(self): - """Test parse files with two dates at the end""" - start, end = get_start_end_year('var_whatever_19800101-19811231.nc') - self.assertEqual(1980, start) - self.assertEqual(1981, end) - - def test_one_fulldate_at_the_end(self): - """Test parse files with one date at the end""" - start, end = get_start_end_year('var_whatever_19800101.nc') - self.assertEqual(1980, start) - self.assertEqual(1980, end) - - def test_years_at_the_start(self): - """Test parse files with two years at the start""" - start, end = get_start_end_year('1980-1981_var_whatever.nc') - self.assertEqual(1980, start) - self.assertEqual(1981, end) - - def test_one_year_at_the_start(self): - """Test parse files with one year at the start""" - start, end = get_start_end_year('1980_var_whatever.nc') - self.assertEqual(1980, start) - self.assertEqual(1980, end) - - def test_full_dates_at_the_start(self): - """Test parse files with two dates at the start""" - start, end = get_start_end_year('19800101-19811231_var_whatever.nc') - self.assertEqual(1980, start) - self.assertEqual(1981, end) - - def test_one_fulldate_at_the_start(self): - """Test parse files with one date at the start""" - start, end = get_start_end_year('19800101_var_whatever.nc') - self.assertEqual(1980, start) - self.assertEqual(1980, end) - - def test_start_and_date_in_name(self): - """Test parse one date at the start and one in experiment's name""" - start, end = get_start_end_year( - '19800101_var_control-1950_whatever.nc') - self.assertEqual(1980, start) - self.assertEqual(1980, end) - - def test_end_and_date_in_name(self): - """Test parse one date at the end and one in experiment's name""" - start, end = get_start_end_year( - 'var_control-1950_whatever_19800101.nc') - self.assertEqual(1980, start) - self.assertEqual(1980, end) - - def test_fails_if_no_date_present(self): - """Test raises if no date is present""" - with self.assertRaises(ValueError): - get_start_end_year('var_whatever') diff --git a/tests/integration/cmor/_fixes/PRIMAVERA/__init__.py b/tests/unit/diag_scripts/__init__.py similarity index 100% rename from tests/integration/cmor/_fixes/PRIMAVERA/__init__.py rename to tests/unit/diag_scripts/__init__.py diff --git a/tests/integration/cmor/_fixes/__init__.py b/tests/unit/diag_scripts/mlr/__init__.py similarity index 100% rename from tests/integration/cmor/_fixes/__init__.py rename to tests/unit/diag_scripts/mlr/__init__.py diff --git a/tests/unit/diag_scripts/mlr/test_helpers.py b/tests/unit/diag_scripts/mlr/test_helpers.py new file mode 100644 index 0000000000..818a9daf74 --- /dev/null +++ b/tests/unit/diag_scripts/mlr/test_helpers.py @@ -0,0 +1,548 @@ +"""Unit tests for the module :mod:`esmvaltool.diag_scripts.mlr`.""" + +import os +from unittest import mock + +import iris +import iris.coords +import iris.cube +import numpy as np +import pandas as pd +import pytest +from cf_units import Unit + +from esmvaltool.diag_scripts import mlr +from esmvaltool.diag_scripts.mlr.models import MLRModel + + +@mock.patch('esmvaltool.diag_scripts.mlr.models.importlib', autospec=True) +@mock.patch('esmvaltool.diag_scripts.mlr.os.walk', autospec=True) +@mock.patch('esmvaltool.diag_scripts.mlr.os.path.dirname', autospec=True) +def test_load_mlr_models(mock_dirname, mock_walk, mock_importlib): + """Test for loading mlr models.""" + root_dir = '/root/to/something' + models = [ + (root_dir, ['dir', '__pycache__'], ['test.py', '__init__.py']), + (os.path.join(root_dir, 'root2'), ['d'], ['__init__.py', '42.py']), + (os.path.join(root_dir, 'root3'), [], []), + (os.path.join(root_dir, 'root4'), ['d2'], ['egg.py']), + ] + mock_dirname.return_value = root_dir + mock_walk.return_value = models + MLRModel._load_mlr_models() + modules = [ + 'esmvaltool.diag_scripts.mlr.models.{}'.format(mod) for mod in + ['test', '__init__', 'root2.__init__', 'root2.42', 'root4.egg'] + ] + calls = [mock.call(module) for module in modules] + mock_importlib.import_module.assert_has_calls(calls) + + +DF_1 = pd.DataFrame({'a': np.arange(5.0) - 2.0}) +DF_1_OUT = pd.DataFrame({'a': [-2.0, 0.0, 2.0]}, index=[0, 2, 4]) +DF_2 = pd.DataFrame({'b': [1.0, np.nan, 42.0, np.nan, 3.14]}) +DF_2_OUT = pd.DataFrame({'b': [1.0, 42.0, 3.14]}, index=[0, 2, 4]) +DF_3 = pd.DataFrame({'c': np.arange(5.0) + 1.0, 'd': np.arange(5.0) - 2.0}) +DF_3_OUT = pd.DataFrame({'c': [1.0, 3.0, 5.0], 'd': [-2.0, 0.0, 2.0]}, + index=[0, 2, 4]) +TEST_REMOVE_MISSING_LABELS = [ + ([DF_1, DF_1, None], [DF_1, DF_1, None], 0), + ([DF_1, DF_2, None], [DF_1_OUT, DF_2_OUT, None], 2), + ([DF_2, DF_1, None], [DF_2, DF_1, None], 0), + ([DF_2, DF_2, None], [DF_2_OUT, DF_2_OUT, None], 2), + ([DF_3, DF_1, None], [DF_3, DF_1, None], 0), + ([DF_3, DF_2, None], [DF_3_OUT, DF_2_OUT, None], 2), + ([DF_1, DF_1, DF_1], [DF_1, DF_1, DF_1], 0), + ([DF_1, DF_2, DF_1], [DF_1_OUT, DF_2_OUT, DF_1_OUT], 2), + ([DF_2, DF_1, DF_2], [DF_2, DF_1, DF_2], 0), + ([DF_2, DF_2, DF_2], [DF_2_OUT, DF_2_OUT, DF_2_OUT], 2), + ([DF_3, DF_1, DF_1], [DF_3, DF_1, DF_1], 0), + ([DF_3, DF_2, DF_1], [DF_3_OUT, DF_2_OUT, DF_1_OUT], 2), +] + + +@pytest.mark.parametrize('df_in,df_out,logger', TEST_REMOVE_MISSING_LABELS) +@mock.patch('esmvaltool.diag_scripts.mlr.models.logger', autospec=True) +def test_remove_missing_labels(mock_logger, df_in, df_out, logger): + """Test removing of missing label data.""" + out = MLRModel._remove_missing_labels(*df_in) + assert out is not df_in + for (idx, df) in enumerate(df_out): + if df is None: + assert out[idx] is None + else: + assert df.equals(out[idx]) + if logger: + assert logger in mock_logger.info.call_args[0] + else: + mock_logger.info.assert_not_called() + + +TEST_CHECK_PREDICT_KWARGS = [ + ({'a': 1}, True), + ({'return_var': False}, True), + ({'return_var': False, 'a': 1}, True), + ({'return_var': True, 'a': 1}, True), + ({'return_cov': False}, True), + ({'return_cov': False, 'a': 1}, True), + ({'return_cov': True, 'a': 1}, True), + ({'return_var': True, 'return_cov': False}, True), + ({'return_var': True, 'return_cov': False, 'a': 1}, True), + ({'return_var': False, 'return_cov': True}, True), + ({'return_var': False, 'return_cov': True, 'a': 1}, True), + ({'return_var': True, 'return_cov': True}, RuntimeError), + ({'return_var': True, 'return_cov': True, 'a': 1}, RuntimeError), +] + + +@pytest.mark.parametrize('kwargs,output', TEST_CHECK_PREDICT_KWARGS) +def test_check_predict_kwargs(kwargs, output): + """Test for check of predict kwargs.""" + if isinstance(output, type): + with pytest.raises(output): + mlr.check_predict_kwargs(kwargs) + return + assert mlr.check_predict_kwargs(kwargs) is None + + +TEST_UNITS_POWER = [ + (Unit('m'), 2.5, TypeError, False), + (Unit(''), 1, ValueError, True), + (Unit('no unit'), 1, ValueError, True), + (Unit('2.0 m s-1'), 3, Unit('2.0 m s-1')**3, True), + (Unit('m')**2, 2, Unit('m')**4, True), + (Unit('m')**2, 0, Unit('m')**0, True), + (Unit('m')**2, -3, Unit('m')**-6, True), + (Unit('m'), 2, Unit('m2'), False), + (Unit('m'), 0, Unit('m0'), False), + (Unit('m'), -3, Unit('m-3'), False), + (Unit('kg m'), 2, Unit('kg2 m2'), False), + (Unit('kg m'), 0, Unit('kg0 m0'), False), + (Unit('kg m'), -3, Unit('kg-3 m-3'), False), + (Unit('kg.m'), 2, Unit('kg2 m2'), False), + (Unit('kg.m'), 0, Unit('kg0 m0'), False), + (Unit('kg.m'), -3, Unit('kg-3 m-3'), False), + (Unit('kg m2'), 2, Unit('kg2 m4'), False), + (Unit('kg m2'), 0, Unit('kg0 m0'), False), + (Unit('kg m2'), -3, Unit('kg-3 m-6'), False), + (Unit('kg.m2'), 2, Unit('kg2 m4'), False), + (Unit('kg.m2'), 0, Unit('kg0 m0'), False), + (Unit('kg.m2'), -3, Unit('kg-3 m-6'), False), + (Unit('kg80 m-10'), 2, Unit('kg160 m-20'), False), + (Unit('kg80 m-10'), 0, Unit('kg0 m0'), False), + (Unit('kg80 m-10'), -3, Unit('kg-240 m30'), False), + (Unit('kg80.m-10'), 2, Unit('kg160 m-20'), False), + (Unit('kg80.m-10'), 0, Unit('kg0 m0'), False), + (Unit('kg80.m-10'), -3, Unit('kg-240 m30'), False), + (Unit('W m-2 K-1'), 2, Unit('W2 m-4 K-2'), False), + (Unit('W m-2 K-1'), 0, Unit('W0 m0 K0'), False), + (Unit('W m-2 K-1'), -3, Unit('W-3 m6 K3'), False), + (Unit('W m-2.K-1'), 2, Unit('W2 m-4 K-2'), False), + (Unit('W m-2.K-1'), 0, Unit('W0 m0 K0'), False), + (Unit('W m-2.K-1'), -3, Unit('W-3 m6 K3'), False), + (Unit('kg yr-1'), 2, Unit('kg2 yr-2'), False), + (Unit('kg yr-1'), 0, Unit('kg0 yr0'), False), + (Unit('kg yr-1'), -3, Unit('kg-3 yr3'), False), + (Unit('kg.yr-1'), 2, Unit('kg2 yr-2'), False), + (Unit('kg.yr-1'), 0, Unit('kg0 yr0'), False), + (Unit('kg.yr-1'), -3, Unit('kg-3 yr3'), False), +] + + +@pytest.mark.parametrize('units_in,power,output,logger', TEST_UNITS_POWER) +@mock.patch.object(mlr, 'logger', autospec=True) +def test_units_power(mock_logger, units_in, power, output, logger): + """Test exponentiation of :mod:`cf_units.Unit`.""" + if isinstance(output, type): + with pytest.raises(output): + mlr.units_power(units_in, power) + return + new_units = mlr.units_power(units_in, power) + assert new_units == output + assert new_units.origin == output.origin + if logger: + mock_logger.warning.assert_called_once() + else: + mock_logger.warning.assert_not_called() + + +DATASET = { + 'dataset': 'TEST', + 'exp': 'iceage', + 'filename': 'path/to/file', + 'project': 'CMIP4', +} +TEST_CREATE_ALIAS = [ + ([], None, ValueError), + ([], 'x', ValueError), + (['no'], None, AttributeError), + (['no'], 'x', AttributeError), + (['dataset'], None, 'TEST'), + (['dataset'], 'x', 'TEST'), + (['dataset', 'project'], None, 'TEST-CMIP4'), + (['dataset', 'project'], 'x', 'TESTxCMIP4'), +] + + +@pytest.mark.parametrize('attrs,delim,output', TEST_CREATE_ALIAS) +def test_create_alias(attrs, delim, output): + """Test alias creation.""" + kwargs = {} + if delim is not None: + kwargs['delimiter'] = delim + if isinstance(output, type): + with pytest.raises(output): + mlr.create_alias(DATASET, attrs, **kwargs) + return + alias = mlr.create_alias(DATASET, attrs, **kwargs) + assert alias == output + + +METADATA_IN = [iris.cube.CubeMetadata(*x) for x in [ + ('air_temperature', 'Long', 'var', 'kg2', {}, None), + ('air_temperature', 'squared Long', 'var', 'kg2', {'squared': 1}, None), + ('air_temperature', 'Squared Long', 'var', 'kg2', {}, None), + ('air_temperature', 'squaredLong', 'var', 'kg2', {'squared': 1}, None), + ('air_temperature', 'SquaredLong', 'var', 'kg2', {}, None), + ('air_temperature', 'Long squared', 'var', 'kg2', {}, None), + ('air_temperature', 'Long Squared', 'var', 'kg2', {'squared': 1}, None), + ('air_temperature', 'Long (squared)', 'var', 'kg2', {}, None), + ('air_temperature', 'Long (Squared)', 'var', 'kg2', {}, None), + ('air_temperature', 'Long (squared test)', 'var', 'kg2', {}, None), + ('air_temperature', 'Long (Squared test)', 'var', 'kg2', {}, None), + ('air_temperature', 'squared Long (squared)', 'var', 'kg2', {}, None), + ('air_temperature', 'Squared Long (Squared)', 'var', 'kg2', {}, None), + ('air_temperature', 'Long squared', 'squared_var', 'kg2', {}, None), + ('air_temperature', 'Long Squared', 'var_squared', 'kg2', {}, None), + ('air_temperature', 'Long', 'squared_var_squared', 'kg2', {}, None), +]] +METADATA_OUT = [iris.cube.CubeMetadata(*x) for x in [ + ('air_temperature', 'Root Long', 'root_var', 'kg', {}, None), + ('air_temperature', 'Long', 'root_var', 'kg', {}, None), + ('air_temperature', 'Long', 'root_var', 'kg', {}, None), + ('air_temperature', 'Root squaredLong', 'root_var', 'kg', {}, None), + ('air_temperature', 'Root SquaredLong', 'root_var', 'kg', {}, None), + ('air_temperature', 'Long', 'root_var', 'kg', {}, None), + ('air_temperature', 'Long', 'root_var', 'kg', {}, None), + ('air_temperature', 'Long', 'root_var', 'kg', {}, None), + ('air_temperature', 'Long', 'root_var', 'kg', {}, None), + ('air_temperature', 'Long (test)', 'root_var', 'kg', {}, None), + ('air_temperature', 'Long (test)', 'root_var', 'kg', {}, None), + ('air_temperature', 'Long (squared)', 'root_var', 'kg', {}, None), + ('air_temperature', 'Long (Squared)', 'root_var', 'kg', {}, None), + ('air_temperature', 'Long', 'var', 'kg', {}, None), + ('air_temperature', 'Long', 'var', 'kg', {}, None), + ('air_temperature', 'Root Long', 'var_squared', 'kg', {}, None), +]] +TEST_SQUARE_ROOT_METADATA = [ + (iris.cube.Cube(0, **METADATA_IN[idx]._asdict()), + iris.cube.Cube(0, **METADATA_OUT[idx]._asdict())) for idx in + range(len(METADATA_IN)) +] + + +@pytest.mark.parametrize('cube_in,cube_out', TEST_SQUARE_ROOT_METADATA) +def test_square_root_metadata(cube_in, cube_out): + """Test taking square root of cube metadata.""" + assert cube_in != cube_out + assert cube_in is not cube_out + mlr.square_root_metadata(cube_in) + assert cube_in == cube_out + + +D_1 = { + 'dataset': 'c', + 'filename': 'b', + 'long_name': 'e', + 'project': 'a', + 'short_name': 'd', + 'tag': 'g', + 'var_name': 'f', + 'var_type': 'label', + 'units': 'kg', +} +D_2 = D_1.copy() +D_2.pop('project') +D_2['short_name'] = 'xx' +D_3 = D_1.copy() +D_3['var_type'] = 'wrong var_type' +D_4 = D_3.copy() +D_4.pop('project') +TEST_MLR_ATTRS = [ + ([], 'wrong_mode', ValueError), + ([], 'full', 0), + ([], 'only_missing', 0), + ([], 'only_var_type', 0), + ([D_1, D_1], 'wrong_mode', ValueError), + ([D_1, D_1], 'full', 0), + ([D_1, D_1], 'only_missing', 0), + ([D_1, D_1], 'only_var_type', 0), + ([D_1, D_2], 'wrong_mode', ValueError), + ([D_1, D_2], 'full', 1), + ([D_1, D_2], 'only_missing', 1), + ([D_1, D_2], 'only_var_type', 0), + ([D_1, D_3], 'wrong_mode', ValueError), + ([D_1, D_3], 'full', 1), + ([D_1, D_3], 'only_missing', 0), + ([D_1, D_3], 'only_var_type', 1), + ([D_1, D_4], 'wrong_mode', ValueError), + ([D_1, D_4], 'full', 2), + ([D_1, D_4], 'only_missing', 1), + ([D_1, D_4], 'only_var_type', 1), + ([D_1, D_2, D_3, D_4], 'wrong_mode', ValueError), + ([D_1, D_2, D_3, D_4], 'full', 4), + ([D_1, D_2, D_3, D_4], 'only_missing', 2), + ([D_1, D_2, D_3, D_4], 'only_var_type', 2), +] + + +@pytest.mark.parametrize('datasets,mode,output', TEST_MLR_ATTRS) +@mock.patch('esmvaltool.diag_scripts.mlr.logger', autospec=True) +def test_datasets_have_mlr_attributes(mock_logger, datasets, mode, output): + """Test checker of dataset attributes.""" + for log_level in ('debug', 'info', 'warning', 'error'): + if isinstance(output, type): + with pytest.raises(output): + mlr.datasets_have_mlr_attributes(datasets, log_level=log_level, + mode=mode) + return + out = mlr.datasets_have_mlr_attributes(datasets, log_level=log_level, + mode=mode) + if output == 0: + assert out is True + else: + assert out is False + assert getattr(mock_logger, log_level).call_count == output + + +KWARGS_1 = {'dataset': 'c'} +KWARGS_2 = {'dataset': 'c', 'short_name': 'xx'} +KWARGS_3 = {'project': None} +KWARGS_4 = {'project': None, 'var_type': 'wrong var_type'} +TEST_GET_DATASETS = [ + ([D_1, D_1], {}, [D_1, D_1]), + ([D_1, D_1], KWARGS_1, [D_1, D_1]), + ([D_1, D_1], KWARGS_2, []), + ([D_1, D_1], KWARGS_3, []), + ([D_1, D_1], KWARGS_4, []), + ([D_1, D_2], {}, [D_1, D_2]), + ([D_1, D_2], KWARGS_1, [D_1, D_2]), + ([D_1, D_2], KWARGS_2, [D_2]), + ([D_1, D_2], KWARGS_3, [D_2]), + ([D_1, D_2], KWARGS_4, []), + ([D_3, D_4], {}, [D_3, D_4]), + ([D_3, D_4], KWARGS_1, [D_3, D_4]), + ([D_3, D_4], KWARGS_2, []), + ([D_3, D_4], KWARGS_3, [D_4]), + ([D_3, D_4], KWARGS_4, [D_4]), + ([D_1, D_2, D_3, D_4], {}, [D_1, D_2, D_3, D_4]), + ([D_1, D_2, D_3, D_4], KWARGS_1, [D_1, D_2, D_3, D_4]), + ([D_1, D_2, D_3, D_4], KWARGS_2, [D_2]), + ([D_1, D_2, D_3, D_4], KWARGS_3, [D_2, D_4]), + ([D_1, D_2, D_3, D_4], KWARGS_4, [D_4]), +] + + +@pytest.mark.parametrize('input_data,kwargs,output', TEST_GET_DATASETS) +def test_get_datasets(input_data, kwargs, output): + """Test dataset retrieving according to ``**kwargs``.""" + datasets = mlr._get_datasets(input_data, **kwargs) + assert datasets == output + + +CFG_0 = {'input_data': {}} +CFG_1 = {'input_data': {'1': D_1, '2': D_1}} +CFG_2 = {'input_data': {'1': D_1, '2': D_2}} +CFG_3 = {'input_data': {'1': D_1, '3': D_3}} +CFG_4 = {'input_data': {'1': D_1, '2': D_2, '3': D_3}} +IGNORE = [ + {'dataset': 'c', 'short_name': 'd', 'var_type': 'label'}, + {'project': None}, +] +TEST_GET_INPUT_DATA = [ + (CFG_0, [], True, None, ValueError, 0), + (CFG_0, [], True, IGNORE, ValueError, 0), + (CFG_0, [], False, None, ValueError, 0), + (CFG_0, [], False, IGNORE, ValueError, 0), + (CFG_0, [D_1], True, None, [D_1], 0), + (CFG_0, [D_1], True, IGNORE, ValueError, 0), + (CFG_0, [D_1], False, None, [D_1], 0), + (CFG_0, [D_1], False, IGNORE, ValueError, 0), + (CFG_1, [], True, None, [D_1, D_1], 0), + (CFG_1, [], True, IGNORE, ValueError, 0), + (CFG_1, [], False, None, [D_1, D_1], 0), + (CFG_1, [], False, IGNORE, ValueError, 0), + (CFG_1, [D_1], True, None, [D_1, D_1, D_1], 0), + (CFG_1, [D_1], True, IGNORE, ValueError, 0), + (CFG_1, [D_1], False, None, [D_1, D_1, D_1], 0), + (CFG_1, [D_1], False, IGNORE, ValueError, 0), + (CFG_2, [], True, None, ValueError, 1), + (CFG_2, [], True, IGNORE, ValueError, 0), + (CFG_2, [], False, None, [D_1, D_2], 0), + (CFG_2, [], False, IGNORE, ValueError, 0), + (CFG_2, [D_1], True, None, ValueError, 1), + (CFG_2, [D_1], True, IGNORE, ValueError, 0), + (CFG_2, [D_1], False, None, [D_1, D_2, D_1], 0), + (CFG_2, [D_1], False, IGNORE, ValueError, 0), + (CFG_3, [], True, None, ValueError, 1), + (CFG_3, [], True, IGNORE, ValueError, 1), + (CFG_3, [], False, None, [D_1, D_3], 0), + (CFG_3, [], False, IGNORE, [D_3], 0), + (CFG_3, [D_1], True, None, ValueError, 1), + (CFG_3, [D_1], True, IGNORE, ValueError, 1), + (CFG_3, [D_1], False, None, [D_1, D_1, D_3], 0), + (CFG_3, [D_1], False, IGNORE, [D_3], 0), + (CFG_4, [], True, None, ValueError, 2), + (CFG_4, [], True, IGNORE, ValueError, 1), + (CFG_4, [], False, None, [D_1, D_2, D_3], 0), + (CFG_4, [], False, IGNORE, [D_3], 0), + (CFG_4, [D_1], True, None, ValueError, 2), + (CFG_4, [D_1], True, IGNORE, ValueError, 1), + (CFG_4, [D_1], False, None, [D_1, D_2, D_1, D_3], 0), + (CFG_4, [D_1], False, IGNORE, [D_3], 0), +] + + +@pytest.mark.parametrize( + 'cfg,ancestors,check_mlr_attrs,ignore,output,n_logger', + TEST_GET_INPUT_DATA) +@mock.patch('esmvaltool.diag_scripts.mlr.io.netcdf_to_metadata', autospec=True) +@mock.patch('esmvaltool.diag_scripts.mlr.logger', autospec=True) +def test_get_input_data(mock_logger, mock_netcdf_to_metadata, cfg, ancestors, + check_mlr_attrs, ignore, output, n_logger): + """Test retrieving of input data.""" + mock_netcdf_to_metadata.return_value = ancestors + if isinstance(output, type): + with pytest.raises(output): + mlr.get_input_data(cfg, + check_mlr_attributes=check_mlr_attrs, + ignore=ignore) + assert mock_logger.error.call_count == n_logger + return + input_data = mlr.get_input_data(cfg, + check_mlr_attributes=check_mlr_attrs, + ignore=ignore) + assert input_data == output + if ignore is not None: + mock_logger.info.assert_called_once() + else: + mock_logger.info.assert_not_called() + + +LAT_COORD_0D = iris.coords.AuxCoord(0, bounds=[-10, 10], + standard_name='latitude', + units='degrees') +LON_COORD_0D = iris.coords.AuxCoord(0, bounds=[-10, 10], + standard_name='longitude', + units='degrees') +LAT_COORD_1D = iris.coords.DimCoord([-10, 50, 60], + bounds=[[-20, 0], [45, 55], [55, 65]], + standard_name='latitude', + units='degrees') +LAT_COORD_1D_2 = iris.coords.DimCoord([70, 220], + bounds=[[60, 70], [215, 225]], + standard_name='latitude', + units='degrees') +LON_COORD_1D = iris.coords.DimCoord([70, 220], + bounds=[[60, 70], [215, 225]], + standard_name='longitude', + units='degrees') +LAT_COORD_2D = iris.coords.AuxCoord([[0, 2]], bounds=[[[-1, 1], [1, 3]]], + standard_name='latitude', + units='degrees') +TIME_COORD = iris.coords.DimCoord([1, 3], bounds=[[0, 2], [2, 4]], + standard_name='time', + units='days since 1850-01-01 00:00:00') +CUBE_0 = iris.cube.Cube(0.0) +CUBE_1 = iris.cube.Cube( + [0.0, 1.0], aux_coords_and_dims=[(LAT_COORD_1D_2, 0), (LON_COORD_1D, 0)]) +CUBE_0_0 = iris.cube.Cube( + 0.0, aux_coords_and_dims=[(LAT_COORD_0D, ()), (LON_COORD_0D, ())]) +CUBE_0_1 = iris.cube.Cube( + np.arange(2), dim_coords_and_dims=[(LON_COORD_1D, 0)], + aux_coords_and_dims=[(LAT_COORD_0D, ())]) +CUBE_1_0 = iris.cube.Cube( + np.arange(3), dim_coords_and_dims=[(LAT_COORD_1D, 0)], + aux_coords_and_dims=[(LON_COORD_0D, ())]) +CUBE_1_1 = iris.cube.Cube( + np.arange(3 * 2).reshape(3, 2), + dim_coords_and_dims=[(LAT_COORD_1D, 0), (LON_COORD_1D, 1)]) +CUBE_1_1_1 = iris.cube.Cube( + np.arange(2 * 3 * 2).reshape(2, 3, 2), + dim_coords_and_dims=[(TIME_COORD, 0), (LAT_COORD_1D, 1), + (LON_COORD_1D, 2)]) +CUBE_2_1 = iris.cube.Cube( + np.arange(1 * 2).reshape(1, 2), dim_coords_and_dims=[(LON_COORD_1D, 1)], + aux_coords_and_dims=[(LAT_COORD_2D, (0, 1))]) + + +TEST_LANDSEA_FRACTION_WEIGHTING = [ + (CUBE_0, 'land', False, iris.exceptions.CoordinateNotFoundError), + (CUBE_1, 'land', False, ValueError), + (CUBE_2_1, 'land', False, iris.exceptions.CoordinateMultiDimError), + (CUBE_1_1, 'wrong_type', False, ValueError), + (CUBE_0_0, 'land', False, 0.25350765306122447), + (CUBE_0_1, 'land', False, [0.0, 0.000487013]), + (CUBE_1_0, 'land', False, [0.0051480051, 0.575, 0.1851084184]), + (CUBE_1_1, 'land', False, [[0.0, 6.55200655e-04], + [1.0, 0.0], + [1.0, 5.66883117e-01]]), + (CUBE_1_1_1, 'land', False, [[[0.0, 6.55200655e-04], + [1.0, 0.0], + [1.0, 5.66883117e-01]], + [[0.0, 6.55200655e-04], + [1.0, 0.0], + [1.0, 5.66883117e-01]]]), + (CUBE_0_0, 'sea', False, 0.7464923469387755), + (CUBE_0_1, 'sea', False, [1.0, 0.99951299]), + (CUBE_1_0, 'sea', False, [0.99485199, 0.425, 0.81489158]), + (CUBE_1_1, 'sea', False, [[1.0, 0.9993448], + [0.0, 1.0], + [0.0, 0.43311688]]), + (CUBE_1_1_1, 'sea', False, [[[1.0, 0.9993448], + [0.0, 1.0], + [0.0, 0.43311688]], + [[1.0, 0.9993448], + [0.0, 1.0], + [0.0, 0.43311688]]]), + (CUBE_0_0, 'land', True, 1.0), + (CUBE_0_1, 'land', True, [0.0, 1.0]), + (CUBE_1_0, 'land', True, [0.0067271636, 0.751382128, 0.2418907084]), + (CUBE_1_1, 'land', True, [[0.0, 2.5518632019e-04], + [3.8947812119e-01, 0.0], + [3.8947812119e-01, 2.2078857130e-01]]), + (CUBE_1_1_1, 'land', True, [[[0.0, 2.5518632019e-04], + [3.8947812119e-01, 0.0], + [3.8947812119e-01, 2.2078857130e-01]], + [[0.0, 2.5518632019e-04], + [3.8947812119e-01, 0.0], + [3.8947812119e-01, 2.2078857130e-01]]]), + (CUBE_0_0, 'sea', True, 1.0), + (CUBE_0_1, 'sea', True, [0.5001217829, 0.4998782171]), + (CUBE_1_0, 'sea', True, [0.4451750104, 0.1901784189, 0.3646465707]), + (CUBE_1_1, 'sea', True, [[0.2913361, 0.2911452164], + [0.0, 0.2913361], + [0.0, 0.1261825836]]), + (CUBE_1_1_1, 'sea', True, [[[0.2913361, 0.2911452164], + [0.0, 0.2913361], + [0.0, 0.1261825836]], + [[0.2913361, 0.2911452164], + [0.0, 0.2913361], + [0.0, 0.1261825836]]]), +] + + +@pytest.mark.parametrize('cube,area_type,normalize,output', + TEST_LANDSEA_FRACTION_WEIGHTING) +def test_landsea_fraction_weighting(cube, area_type, normalize, output): + """Test landsea fraction weighting.""" + cube = cube.copy() + if isinstance(output, type): + with pytest.raises(output): + mlr.get_landsea_fraction_weights(cube, area_type, + normalize=normalize) + return + weights = mlr.get_landsea_fraction_weights(cube, area_type, + normalize=normalize) + assert weights.shape == cube.shape + np.testing.assert_allclose(weights, output) diff --git a/tests/unit/diag_scripts/mlr/test_preprocess.py b/tests/unit/diag_scripts/mlr/test_preprocess.py new file mode 100644 index 0000000000..ed8bfd0647 --- /dev/null +++ b/tests/unit/diag_scripts/mlr/test_preprocess.py @@ -0,0 +1,43 @@ +"""Unit tests for the module :mod:`esmvaltool.diag_scripts.mlr.preprocess`.""" + +import numpy as np +import pytest + +import esmvaltool.diag_scripts.mlr.preprocess as preprocess + +X_ARR = np.arange(5) +TEST_GET_SLOPE = [ + (X_ARR, 3.14 * X_ARR, 3.14), + (np.arange(1.0), np.arange(1.0), np.nan), + (X_ARR, np.ma.masked_invalid([np.nan, 1.0, np.nan, 1.5, np.nan]), 0.25), +] + + +@pytest.mark.parametrize('x_arr,y_arr,output', TEST_GET_SLOPE) +def test_get_slope(x_arr, y_arr, output): + """Test calculation of slope.""" + out = preprocess._get_slope(x_arr, y_arr) + assert ((out == output) | (np.isnan(output) & np.isnan(output))).all() + + +Y_ARR_1 = np.ma.masked_invalid([np.nan, 1.0, 0.0, np.nan, -0.5]) +Y_ARR_2 = np.ma.masked_invalid([np.nan, np.nan, np.nan, np.nan, -0.5]) +Y_ARR_2x2 = np.ma.masked_invalid( + [[2.1 * X_ARR, -3.14 * X_ARR, 0.8 * X_ARR], + [Y_ARR_1.filled(np.nan), + Y_ARR_1.filled(2.0), + Y_ARR_2.filled(np.nan)]]) +TEST_GET_SLOPE_VECTORIZED = [ + (X_ARR, Y_ARR_2x2, + np.array([[2.1, -3.14, 0.8], [-0.46428571428571436, -0.4, np.nan]])), + (X_ARR, np.ma.array([X_ARR, Y_ARR_2]), np.array([1.0, np.nan])), +] + + +@pytest.mark.parametrize('x_arr,y_arr,output', TEST_GET_SLOPE_VECTORIZED) +def test_get_slope_vectorized(x_arr, y_arr, output): + """Test vectorized calculation of slope.""" + get_slope = np.vectorize(preprocess._get_slope, excluded=['x_arr'], + signature='(n),(n)->()') + out = get_slope(x_arr, y_arr) + assert (np.isclose(out, output) | (np.isnan(out) & np.isnan(output))).all() diff --git a/tests/integration/preprocessor/__init__.py b/tests/unit/diag_scripts/shared/__init__.py similarity index 100% rename from tests/integration/preprocessor/__init__.py rename to tests/unit/diag_scripts/shared/__init__.py diff --git a/tests/integration/preprocessor/_derive/__init__.py b/tests/unit/diag_scripts/shared/configs/__init__.py similarity index 100% rename from tests/integration/preprocessor/_derive/__init__.py rename to tests/unit/diag_scripts/shared/configs/__init__.py diff --git a/tests/unit/diag_scripts/shared/configs/test_io.yml b/tests/unit/diag_scripts/shared/configs/test_io.yml new file mode 100644 index 0000000000..0487b39a81 --- /dev/null +++ b/tests/unit/diag_scripts/shared/configs/test_io.yml @@ -0,0 +1,207 @@ +_has_necessary_attributes: + - input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + output: true + n_logger: 0 + - input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + output: true + n_logger: 0 + - input: + - filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + output: false + n_logger: 1 + - input: + - dataset: model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + output: false + n_logger: 1 + - input: + - dataset: model + filename: path/to/model + project: CMIP42 + short_name: var + units: KiB + output: false + n_logger: 1 + - input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + short_name: var + units: KiB + output: false + n_logger: 1 + - input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + units: KiB + output: false + n_logger: 1 + - input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + output: false + n_logger: 1 + - input: + - dataset: model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + output: false + n_logger: 1 + - input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + short_name: var + units: KiB + output: false + n_logger: 1 + - input: + - dataset: model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + output: false + n_logger: 2 + - input: + - dataset: model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + - dataset: model + filename: path/to/model + short_name: var + units: KiB + output: false + n_logger: 3 + - kwargs: + only_var_attrs: true + input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + output: true + n_logger: 0 + - kwargs: + only_var_attrs: true + input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + output: true + n_logger: 0 + - kwargs: + only_var_attrs: true + input: + - long_name: 'Loooong name' + short_name: var + units: KiB + - long_name: 'Loooong name' + units: KiB + output: false + n_logger: 1 + - kwargs: + only_var_attrs: true + input: + - long_name: 'Loooong name' + short_name: var + units: KiB + - long_name: 'Loooong name' + short_name: var + units: KiB + output: true + n_logger: 0 + - kwargs: + only_var_attrs: true + input: + - dataset: model + filename: path/to/model + project: CMIP42 + short_name: var + units: KiB + output: false + n_logger: 1 + - kwargs: + only_var_attrs: true + input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + output: false + n_logger: 1 + - kwargs: + only_var_attrs: true + input: + - {} + output: false + n_logger: 3 diff --git a/tests/unit/diag_scripts/shared/test_base.py b/tests/unit/diag_scripts/shared/test_base.py new file mode 100644 index 0000000000..ac03435a8c --- /dev/null +++ b/tests/unit/diag_scripts/shared/test_base.py @@ -0,0 +1,429 @@ +import logging +import sys +from pathlib import Path + +import pytest +import yaml + +from esmvaltool.diag_scripts import shared + + +def test_get_plot_filename(): + + cfg = { + 'plot_dir': '/some/path', + 'output_file_type': 'png', + } + filename = shared.get_plot_filename('test', cfg) + assert filename == '/some/path/test.png' + + +def test_get_diagnostic_filename(): + + cfg = { + 'work_dir': '/some/path', + } + filename = shared.get_diagnostic_filename('test', cfg) + assert filename == '/some/path/test.nc' + + +def test_get_diagnostic_filename_ext(): + + cfg = { + 'work_dir': '/some/path', + } + filename = shared.get_diagnostic_filename('test', cfg, extension='csv') + assert filename == '/some/path/test.csv' + + +def test_provenance_logger(tmp_path): + + record = {'attribute1': 'xyz'} + with shared.ProvenanceLogger({'run_dir': str(tmp_path)}) as prov: + prov.log('output.nc', record) + + provenance = yaml.safe_load( + (tmp_path / 'diagnostic_provenance.yml').read_bytes()) + + assert provenance == {'output.nc': record} + + +def test_provenance_logger_twice(tmp_path): + + record1 = {'attribute1': 'xyz'} + with shared.ProvenanceLogger({'run_dir': str(tmp_path)}) as prov: + prov.log('output1.nc', record1) + + record2 = {'attribute2': 'xyz'} + with shared.ProvenanceLogger({'run_dir': str(tmp_path)}) as prov: + prov.log('output2.nc', record2) + + provenance = yaml.safe_load( + (tmp_path / 'diagnostic_provenance.yml').read_bytes()) + + assert provenance == {'output1.nc': record1, 'output2.nc': record2} + + +def test_provenance_logger_duplicate_raises(tmp_path): + + record = {'attribute1': 'xyz'} + with shared.ProvenanceLogger({'run_dir': str(tmp_path)}) as prov: + prov.log('output.nc', record) + with pytest.raises(KeyError): + prov.log('output.nc', record) + + +def test_select_metadata(): + + metadata = [ + { + 'short_name': 'pr', + 'filename': 'test_pr.nc', + }, + { + 'short_name': 'ta', + 'filename': 'test_ta.nc', + }, + ] + + result = shared.select_metadata(metadata, short_name='ta') + + assert result == [{'short_name': 'ta', 'filename': 'test_ta.nc'}] + + +def test_group_metadata(): + + metadata = [ + { + 'short_name': 'pr', + 'filename': 'test_pr.nc', + }, + { + 'short_name': 'ta', + 'filename': 'test_ta.nc', + }, + ] + + result = shared.group_metadata(metadata, 'short_name') + + assert result == { + 'ta': [ + { + 'short_name': 'ta', + 'filename': 'test_ta.nc' + }, + ], + 'pr': [ + { + 'short_name': 'pr', + 'filename': 'test_pr.nc' + }, + ], + } + + +def test_group_metadata_sorted(): + + metadata = [ + { + 'short_name': 'ta', + 'dataset': 'dataset2', + }, + { + 'short_name': 'ta', + 'dataset': 'dataset1', + }, + ] + + result = shared.group_metadata(metadata, 'short_name', sort='dataset') + + assert result == { + 'ta': [ + { + 'short_name': 'ta', + 'dataset': 'dataset1' + }, + { + 'short_name': 'ta', + 'dataset': 'dataset2' + }, + ], + } + + +def test_group_metadata_sorted_true(): + + metadata = [ + { + 'short_name': 'ta', + }, + { + 'short_name': 'pr', + }, + ] + + result = shared.group_metadata(metadata, 'short_name', sort=True) + + assert result == { + 'pr': [ + { + 'short_name': 'pr', + }, + ], + 'ta': [ + { + 'short_name': 'ta', + }, + ], + } + + +def test_sorted_metadata(): + + metadata = [ + { + 'short_name': 'ta', + 'dataset': 'dataset2', + }, + { + 'short_name': 'pr', + 'dataset': 'dataset2', + 'random_attribute': 1, + }, + { + 'short_name': 'ta', + 'dataset': 'dataset1', + }, + ] + + result = shared.sorted_metadata(metadata, sort=['short_name', 'dataset']) + + assert result == [ + { + 'short_name': 'pr', + 'dataset': 'dataset2', + 'random_attribute': 1, + }, + { + 'short_name': 'ta', + 'dataset': 'dataset1' + }, + { + 'short_name': 'ta', + 'dataset': 'dataset2' + }, + ] + + +@pytest.mark.parametrize('as_iris', [True, False]) +def test_extract_variables(as_iris): + + cfg = { + 'input_data': { + 'file1.nc': { + 'short_name': 'ta', + 'standard_name': 'air_temperature', + 'long_name': 'Air Temperature', + 'units': 'K', + }, + 'file2.nc': { + 'short_name': 'ta', + 'standard_name': 'air_temperature', + 'long_name': 'Air Temperature', + }, + 'file3.nc': { + 'short_name': 'pr', + 'standard_name': 'precipitation_flux', + 'long_name': 'Precipitation', + 'extra_attribute': 1, + }, + 'file4.nc': { + 'short_name': 'toz', + 'standard_name': '', + 'long_name': 'Total Ozone Column', + }, + } + } + + if as_iris: + expected = { + 'ta': { + 'var_name': 'ta', + 'standard_name': 'air_temperature', + 'long_name': 'Air Temperature', + 'units': 'K', + }, + 'pr': { + 'var_name': 'pr', + 'standard_name': 'precipitation_flux', + 'long_name': 'Precipitation', + }, + 'toz': { + 'var_name': 'toz', + 'standard_name': None, + 'long_name': 'Total Ozone Column', + }, + } + else: + expected = { + 'ta': { + 'short_name': 'ta', + 'standard_name': 'air_temperature', + 'long_name': 'Air Temperature', + 'units': 'K', + }, + 'pr': { + 'short_name': 'pr', + 'standard_name': 'precipitation_flux', + 'long_name': 'Precipitation', + }, + 'toz': { + 'short_name': 'toz', + 'standard_name': '', + 'long_name': 'Total Ozone Column', + }, + } + + result = shared.extract_variables(cfg, as_iris) + + assert result == expected + + +def test_variables_available(): + + cfg = { + 'input_data': { + 'file1.nc': { + 'short_name': 'ta' + }, + } + } + assert shared.variables_available(cfg, ['ta']) is True + assert shared.variables_available(cfg, ['pr']) is False + + +def test_get_input_data_files(tmp_path): + + metadata1 = {'file1.nc': {'short_name': 'ta', 'dataset': 'dataset1'}} + metadata_dir1 = tmp_path / 'preproc' / 'ta' + metadata_dir1.mkdir(parents=True) + metadata_file1 = metadata_dir1 / 'metadata.yml' + metadata_file1.write_text(yaml.safe_dump(metadata1)) + + metadata2 = {'file2.nc': {'short_name': 'tas', 'dataset': 'dataset1'}} + metadata_dir2 = tmp_path / 'work_dir' + metadata_dir2.mkdir() + metadata_file2 = metadata_dir2 / 'tas_metadata.yml' + metadata_file2.write_text(yaml.safe_dump(metadata2)) + + cfg = {'input_files': [str(metadata_file1), str(metadata_dir2)]} + input_data = shared._base._get_input_data_files(cfg) + + assert input_data == { + 'file1.nc': metadata1['file1.nc'], + 'file2.nc': metadata2['file2.nc'], + } + + +def create_settings(path): + + settings = { + 'log_level': 'debug', + 'work_dir': str(path / 'work_dir'), + 'plot_dir': str(path / 'plot_dir'), + 'run_dir': str(path / 'run_dir'), + 'script': 'diagnostic.py', + 'input_files': [], + 'example_setting': 1, + } + + return settings + + +def write_settings(settings): + + run_dir = Path(settings['run_dir']) + run_dir.mkdir() + + settings_file = run_dir / 'settings.yml' + settings_file.write_text(yaml.safe_dump(settings)) + + return str(settings_file) + + +def test_run_diagnostic(tmp_path, monkeypatch): + + settings = create_settings(tmp_path) + settings_file = write_settings(settings) + + monkeypatch.setattr(sys, 'argv', ['', settings_file]) + + # Create files created by ESMValCore + for filename in ('log.txt', 'profile.bin', 'resource_usage.txt'): + file = Path(settings['run_dir']) / filename + file.touch() + + with shared.run_diagnostic() as cfg: + assert 'example_setting' in cfg + + +@pytest.mark.parametrize('flag', ['-l', '--log-level']) +def test_run_diagnostic_log_level(tmp_path, monkeypatch, flag): + """Test if setting the log level from the command line works.""" + settings = create_settings(tmp_path) + settings_file = write_settings(settings) + + monkeypatch.setattr(sys, 'argv', ['', flag, 'error', settings_file]) + + with shared.run_diagnostic(): + assert shared._base.logger.getEffectiveLevel() == logging.ERROR + + +def create_run_content(settings): + """Create some files to make it look like the diagnostic ran.""" + for dir_name in 'work_dir', 'plot_dir': + dir_path = Path(settings[dir_name]) + dir_path.mkdir() + (dir_path / 'example_output.txt').touch() + + for filename in ('log.txt', 'profile.bin', 'diagnostic_provenance.yml', + 'resource_usage.txt', 'tmp.nc'): + file = Path(settings['run_dir']) / filename + file.touch() + + +def test_rerun_diagnostic_raises(tmp_path, monkeypatch): + """Test if re-running the diagnostic script fails when output exists.""" + settings = create_settings(tmp_path) + settings_file = write_settings(settings) + + create_run_content(settings) + + monkeypatch.setattr(sys, 'argv', ['', settings_file]) + + with pytest.raises(FileExistsError): + with shared.run_diagnostic(): + pass + + +@pytest.mark.parametrize('flag', ['-i', '--ignore', '-f', '--force']) +def test_rerun_diagnostic_flag(tmp_path, monkeypatch, flag): + """Test if re-running the diagnostic script works.""" + exist = flag in {'-i', '--ignore'} + + settings = create_settings(tmp_path) + settings_file = write_settings(settings) + + create_run_content(settings) + + monkeypatch.setattr(sys, 'argv', ['', flag, settings_file]) + + with shared.run_diagnostic(): + assert not (Path(settings['run_dir']) / + 'diagnostic_provenance.yml').exists() + for file in ( + Path(settings['run_dir']) / 'tmp.nc', + Path(settings['work_dir']) / 'example_output.txt', + Path(settings['plot_dir']) / 'example_output.txt', + ): + assert file.exists() == exist diff --git a/tests/unit/diag_scripts/shared/test_io.py b/tests/unit/diag_scripts/shared/test_io.py new file mode 100644 index 0000000000..1bac515373 --- /dev/null +++ b/tests/unit/diag_scripts/shared/test_io.py @@ -0,0 +1,552 @@ +"""Tests for the module :mod:`esmvaltool.diag_scripts.shared.io`.""" +import os +from collections import OrderedDict +from copy import deepcopy +from unittest import mock + +import iris +import numpy as np +import pytest +import yaml + +from esmvaltool.diag_scripts.shared import io + +with open(os.path.join(os.path.dirname(__file__), 'configs', + 'test_io.yml')) as file_: + CONFIG = yaml.safe_load(file_) + + +@pytest.mark.parametrize('data', CONFIG['_has_necessary_attributes']) +@mock.patch.object(io, 'logger', autospec=True) +def test_has_necessary_attributes(mock_logger, data): + """Test attribute checks.""" + for log_level in ('debug', 'info', 'warning', 'error', 'exception'): + metadata = data['input'] + kwargs = data.get('kwargs', {}) + has_atts = io._has_necessary_attributes(metadata, + log_level=log_level, + **kwargs) + assert has_atts == data['output'] + logger_func = getattr(mock_logger, log_level) + if has_atts: + logger_func.assert_not_called() + else: + logger_func.assert_called() + assert logger_func.call_count == data['n_logger'] + mock_logger.reset_mock() + + +CFG = { + 'input_files': [ + 'metadata.yml', + 'test_metadata.yml', + 'valid/dir/1', + 'valid/dir/2', + ], + 'other_attr': + 'I am not used!', +} +ROOT_DIR = '/root/to/something' +TEST_GET_ALL_ANCESTOR_FILES = [ + (None, [ + os.path.join(ROOT_DIR, 'egg.yml'), + os.path.join(ROOT_DIR, 'root2', 'x.nc'), + os.path.join(ROOT_DIR, 'root2', 'y.png'), + os.path.join(ROOT_DIR, 'root3', 'egg.nc'), + os.path.join(ROOT_DIR, 'root4', 'egg.nc'), + os.path.join(ROOT_DIR, 'test.nc'), + os.path.join(ROOT_DIR, 'test_1.nc'), + os.path.join(ROOT_DIR, 'test_2.yml'), + ]), + ('*', [ + os.path.join(ROOT_DIR, 'egg.yml'), + os.path.join(ROOT_DIR, 'root2', 'x.nc'), + os.path.join(ROOT_DIR, 'root2', 'y.png'), + os.path.join(ROOT_DIR, 'root3', 'egg.nc'), + os.path.join(ROOT_DIR, 'root4', 'egg.nc'), + os.path.join(ROOT_DIR, 'test.nc'), + os.path.join(ROOT_DIR, 'test_1.nc'), + os.path.join(ROOT_DIR, 'test_2.yml'), + ]), + ('*.nc', [ + os.path.join(ROOT_DIR, 'root2', 'x.nc'), + os.path.join(ROOT_DIR, 'root3', 'egg.nc'), + os.path.join(ROOT_DIR, 'root4', 'egg.nc'), + os.path.join(ROOT_DIR, 'test.nc'), + os.path.join(ROOT_DIR, 'test_1.nc'), + ]), + ('test*', [ + os.path.join(ROOT_DIR, 'test.nc'), + os.path.join(ROOT_DIR, 'test_1.nc'), + os.path.join(ROOT_DIR, 'test_2.yml'), + ]), + ('*.yml', [ + os.path.join(ROOT_DIR, 'egg.yml'), + os.path.join(ROOT_DIR, 'test_2.yml'), + ]), + ('egg.nc*', [ + os.path.join(ROOT_DIR, 'root3', 'egg.nc'), + os.path.join(ROOT_DIR, 'root4', 'egg.nc'), + ]), +] + + +@pytest.mark.parametrize('pattern,output', TEST_GET_ALL_ANCESTOR_FILES) +@mock.patch('esmvaltool.diag_scripts.shared.io.os.walk', autospec=True) +def test_get_all_ancestor_files(mock_walk, pattern, output): + """Test retrieving of ancestor files.""" + input_dirs = [ + [ + (ROOT_DIR, ['dir', '__pycache__'], ['test.nc', 'egg.yml']), + (os.path.join(ROOT_DIR, 'root2'), ['d'], ['x.nc', 'y.png']), + (os.path.join(ROOT_DIR, 'root3'), [], ['egg.nc']), + ], + [ + (ROOT_DIR, ['dir', '__pycache__'], ['test_1.nc', 'test_2.yml']), + (os.path.join(ROOT_DIR, 'root4'), ['d2'], ['egg.nc']), + ], + ] + mock_walk.side_effect = input_dirs + files = io.get_all_ancestor_files(CFG, pattern=pattern) + assert files == output + + +TEST_GET_ANCESTOR_FILE = [ + ([], ValueError), + (['I/am/a/cool/file.nc'], 'I/am/a/cool/file.nc'), + (['I/am/a/cool/file.nc', 'oh/no/file_2.nc'], ValueError), +] + + +@pytest.mark.parametrize('files,output', TEST_GET_ANCESTOR_FILE) +@mock.patch.object(io, 'get_all_ancestor_files', autospec=True) +def test_get_ancestor_file(mock_get_all_ancestors, files, output): + """Test retrieving of single ancestor file.""" + mock_get_all_ancestors.return_value = files + if isinstance(output, type): + with pytest.raises(output): + io.get_ancestor_file(CFG, pattern='*') + return + returned_file = io.get_ancestor_file(CFG, pattern='*') + assert returned_file == output + + +INVALID_STANDARD_NAME = 'I_am_an_invalid_standard_name' +LONG_NAME = 'Loooong name' +SHORT_NAME = 'var' +STANDARD_NAME = 'air_temperature' +UNITS = 'K' + +A_1 = { + 'dataset': 'model', + 'filename': 'r/a.nc', + 'project': 'CMIP42', +} +V_1 = { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'units': UNITS, +} +C_1 = iris.cube.Cube(0, **V_1, attributes=A_1) +A_2 = { + 'dataset': 'model', + 'filename': 'r1/b.ps', +} +V_2 = { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'units': UNITS, +} +C_2 = iris.cube.Cube(0, **V_2, attributes=A_2) +A_3 = { + 'filename': 'r/a.nc', +} +V_3 = { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'units': UNITS, +} +C_3 = iris.cube.Cube(0, **V_3, attributes=A_3) +A_4 = { + 'dataset': 'model', + 'filename': 'r1/b.nc', + 'project': 'CMIP42', +} +V_4 = { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'standard_name': STANDARD_NAME, + 'units': UNITS, +} +C_4 = iris.cube.Cube(0, **V_4, attributes=A_4) +A_5 = { + 'dataset': 'model', + 'filename': 'r/a.nc', + 'project': 'CMIP42', +} +V_5 = { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'standard_name': None, + 'units': UNITS, +} +C_5 = iris.cube.Cube(0, **V_5, attributes=A_5) + + +W_1 = [('r', [], ['a.nc'])] +W_2 = [('r', [], ['a.nc']), ('r1', ['d1'], ['b.nc'])] +W_2_X = [('r1', [], ['b.nc', 'b.ps'])] + +TEST_NETCDF_TO_METADATA = [ + ([C_1], W_1, None, [{**A_1, **V_1}], 0), + ([C_1], W_1, '*', [{**A_1, **V_1}], 0), + ([C_1, C_4], W_2, None, [{**A_1, **V_1}, {**A_4, **V_4}], 0), + ([C_1, C_4], W_2, '*', [{**A_1, **V_1}, {**A_4, **V_4}], 0), + ([C_5, C_4], W_2, None, [{**A_5, **V_5}, {**A_4, **V_4}], 0), + ([C_5, C_4], W_2, '*', [{**A_5, **V_5}, {**A_4, **V_4}], 0), + ([C_4], W_2_X, None, [{**A_4, **V_4}], 0), + ([C_4], W_2_X, '*', [{**A_4, **V_4}], 0), + ([C_2], W_1, None, ValueError, 1), + ([C_2], W_1, '*', ValueError, 1), + ([C_3], W_1, None, ValueError, 2), + ([C_3], W_1, '*', ValueError, 2), + ([C_2, C_3], W_2, None, ValueError, 3), + ([C_2, C_3], W_2, '*', ValueError, 3), + ([C_1, C_3], W_2, None, ValueError, 2), + ([C_1, C_3], W_2, '*', ValueError, 2), +] + + +@pytest.mark.parametrize('cubes,walk_out,root,output,n_logger', + TEST_NETCDF_TO_METADATA) +@mock.patch.object(io, 'get_all_ancestor_files', autospec=True) +@mock.patch.object(io, 'logger', autospec=True) +@mock.patch('esmvaltool.diag_scripts.shared.io.iris.load_cube', autospec=True) +@mock.patch('esmvaltool.diag_scripts.shared.io.os.walk', autospec=True) +def test_netcdf_to_metadata(mock_walk, mock_load_cube, mock_logger, + mock_get_all_ancestors, cubes, walk_out, root, + output, n_logger): + """Test cube to metadata.""" + ancestors = [] + for (files_root, _, files) in walk_out: + new_files = [os.path.join(files_root, f) for f in files] + ancestors.extend(new_files) + mock_get_all_ancestors.return_value = ancestors + mock_walk.return_value = walk_out + mock_load_cube.side_effect = cubes + if isinstance(output, type): + with pytest.raises(output): + io.netcdf_to_metadata({}, pattern=root, root=root) + else: + for dataset in output: + dataset['short_name'] = dataset.pop('var_name') + dataset.setdefault('standard_name', None) + metadata = io.netcdf_to_metadata({}, pattern=root, root=root) + assert metadata == output + assert mock_logger.error.call_count == n_logger + + +ATTRS_IN = [ + { + 'dataset': 'a', + 'filename': 'path/to/model1.nc', + 'project': 'CMIP42', + 'bool': True, + }, + { + 'dataset': 'b', + 'filename': 'path/to/model2.nc', + 'project': 'CMIP42', + }, + { + 'dataset': 'c', + 'filename': 'path/to/model3.nc', + }, + { + 'dataset': 'd', + 'filename': 'path/to/model4.nc', + 'project': 'CMIP42', + }, +] +ATTRS_OUT = [ + { + 'dataset': 'a', + 'filename': 'path/to/model1.nc', + 'project': 'CMIP42', + 'bool': 'True', + 'invalid_standard_name': INVALID_STANDARD_NAME, + 'attr': 'test', + }, + { + 'dataset': 'b', + 'filename': 'path/to/model2.nc', + 'project': 'CMIP42', + 'attr': 'test', + }, + {}, + { + 'dataset': 'd', + 'filename': 'path/to/model4.nc', + 'project': 'CMIP42', + 'attr': 'test', + }, +] +VAR_ATTRS_IN = [ + { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'units': UNITS, + }, + { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'units': UNITS, + }, + { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + }, + { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'standard_name': STANDARD_NAME, + 'units': UNITS, + }, +] +VAR_ATTRS_OUT = [ + { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'standard_name': None, + 'units': UNITS, + }, + { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'standard_name': None, + 'units': UNITS, + }, + {}, + { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'standard_name': STANDARD_NAME, + 'units': UNITS, + }, +] +ADD_ATTRS = {'project': 'PROJECT', 'attr': 'test'} +ADD_VAR_ATTRS = {'standard_name': STANDARD_NAME, 'var_name': 'test'} +CUBES_IN = [ + iris.cube.Cube(0, attributes=ADD_ATTRS, **ADD_VAR_ATTRS) for _ in range(4) +] +OUTPUT = [ + iris.cube.Cube(0, attributes=ATTRS_OUT[idx], **VAR_ATTRS_OUT[idx]) for idx + in range(4) +] +OUTPUT[2] = ValueError +for var_attr in VAR_ATTRS_IN: + var_attr['short_name'] = var_attr.pop('var_name') +ATTRS_IN[0]['standard_name'] = INVALID_STANDARD_NAME +METADATA = [{**a, **VAR_ATTRS_IN[idx]} for (idx, a) in enumerate(ATTRS_IN)] +TEST_METADATA_TO_NETDCF = zip(METADATA, CUBES_IN, OUTPUT) + + +@pytest.mark.parametrize('metadata,cube,output', TEST_METADATA_TO_NETDCF) +@mock.patch.object(io, 'iris_save', autospec=True) +@mock.patch.object(io, 'logger', autospec=True) +def test_metadata_to_netcdf(mock_logger, mock_save, metadata, cube, output): + """Test metadata to cube.""" + if isinstance(output, type): + with pytest.raises(output): + io.metadata_to_netcdf(cube, metadata) + assert not mock_save.called + return + io.metadata_to_netcdf(cube, metadata) + if metadata.get('standard_name') == INVALID_STANDARD_NAME: + mock_logger.warning.assert_called() + assert 'invalid_standard_name' in output.attributes + else: + mock_logger.warning.assert_not_called() + assert 'invalid_standard_name' not in output.attributes + save_args = (output, metadata['filename']) + assert mock_save.call_args_list == [mock.call(*save_args)] + + +PATH = 'path/to/super/cube' +VAR_ATTRS_NEW = [ + { + 'long_name': 'I do not have units :(', + 'short_name': 'sad', + }, + { + 'long_name': 'Long name', + 'short_name': 'var', + 'units': '1', + }, + { + 'short_name': SHORT_NAME, + 'long_name': LONG_NAME, + 'standard_name': STANDARD_NAME, + 'units': UNITS, + }, +] +ATTRS_NEW = [ + {}, + {}, + { + 'test': '123', + 'answer': 42, + }, +] +TEST_SAVE_1D_DATA = zip(VAR_ATTRS_NEW, ATTRS_NEW) + + +@pytest.mark.parametrize('var_attrs,attrs', TEST_SAVE_1D_DATA) +@mock.patch.object(io, 'iris_save', autospec=True) +@mock.patch.object(io, 'logger', autospec=True) +def test_save_1d_data(mock_logger, mock_save, var_attrs, attrs): + """Test saving of 1 dimensional data.""" + coord_name = 'inclination' + data = [ + np.ma.masked_invalid([1.0, np.nan, -1.0]), + np.arange(2.0) + 100.0, + np.ma.masked_invalid([33.0, 22.0, np.nan, np.nan, -77.0]), + ] + coords = [ + iris.coords.DimCoord(np.arange(3.0) - 3.0, long_name=coord_name), + iris.coords.DimCoord(np.arange(2.0) + 2.0, long_name=coord_name), + iris.coords.DimCoord(np.array([-7.0, -3.0, -2.71, 3.0, 314.15]), + long_name=coord_name), + ] + cubes = OrderedDict([ + ('model1', + iris.cube.Cube(data[0], + var_name='xy', + units='kg', + attributes={'hi': '!'}, + dim_coords_and_dims=[(coords[0], 0)])), + ('model2', + iris.cube.Cube(data[1], + var_name='zr', + units='1', + attributes={}, + dim_coords_and_dims=[(coords[1], 0)])), + ('model3', + iris.cube.Cube(data[2], + var_name='wa', + units='unknown', + attributes={'very': 'long cube'}, + dim_coords_and_dims=[(coords[2], 0)])), + ]) + dataset_dim = iris.coords.AuxCoord(list(cubes.keys()), long_name='dataset') + dim_1 = coords[0].copy([-7.0, -3.0, -2.71, -2.0, -1.0, 2.0, 3.0, 314.15]) + output_data = np.ma.masked_invalid( + [[np.nan, 1.0, np.nan, np.nan, -1.0, np.nan, np.nan, np.nan], + [np.nan, np.nan, np.nan, np.nan, np.nan, 100.0, 101.0, np.nan], + [33.0, 22.0, np.nan, np.nan, np.nan, np.nan, np.nan, -77.0]]) + output_dims = [(dataset_dim, 0), (dim_1, 1)] + + # Without cubes + with pytest.raises(ValueError): + io.save_1d_data({}, PATH, coord_name, var_attrs, attrs) + mock_logger.error.assert_not_called() + assert not mock_save.called + mock_logger.reset_mock() + mock_save.reset_mock() + + # With cubes + if 'units' not in var_attrs: + with pytest.raises(ValueError): + io.save_1d_data(cubes, PATH, coord_name, var_attrs, attrs) + mock_logger.error.assert_called_once() + assert not mock_save.called + return + io.save_1d_data(cubes, PATH, coord_name, var_attrs, attrs) + iris_var_attrs = deepcopy(var_attrs) + iris_var_attrs['var_name'] = iris_var_attrs.pop('short_name') + new_cube = iris.cube.Cube(output_data, + aux_coords_and_dims=output_dims, + attributes=attrs, + **iris_var_attrs) + mock_logger.error.assert_not_called() + assert mock_save.call_args_list == [mock.call(new_cube, PATH)] + + +CUBELIST = [ + iris.cube.Cube(1), + iris.cube.Cube(2, attributes={ + 'filename': 'a', + 'x': 'y', + }), +] +CUBELIST_OUT = [ + iris.cube.Cube(1, attributes={'filename': PATH}), + iris.cube.Cube(2, attributes={ + 'filename': PATH, + 'x': 'y', + }), +] +CUBES_TO_SAVE = [ + (iris.cube.Cube(0), iris.cube.Cube(0, attributes={'filename': PATH})), + (CUBELIST, CUBELIST_OUT), + (iris.cube.CubeList(CUBELIST), iris.cube.CubeList(CUBELIST_OUT)), +] + + +@pytest.mark.parametrize('source,output', CUBES_TO_SAVE) +@mock.patch('esmvaltool.diag_scripts.shared.io.iris.save', autospec=True) +@mock.patch.object(io, 'logger', autospec=True) +def test_iris_save(mock_logger, mock_save, source, output): + """Test iris save function.""" + io.iris_save(source, PATH) + assert mock_save.call_args_list == [mock.call(output, PATH)] + mock_logger.info.assert_called_once() + + +AUX_COORDS = [ + None, + None, + iris.coords.AuxCoord([2, 3, 5], long_name='Primes!'), +] +TEST_SAVE_SCALAR_DATA = zip(VAR_ATTRS_NEW, ATTRS_NEW, AUX_COORDS) + + +@pytest.mark.parametrize('var_attrs,attrs,aux_coord', TEST_SAVE_SCALAR_DATA) +@mock.patch.object(io, 'iris_save', autospec=True) +@mock.patch.object(io, 'logger', autospec=True) +def test_save_scalar_data(mock_logger, mock_save, var_attrs, attrs, aux_coord): + """Test saving of scalar data.""" + data = OrderedDict([ + ('model1', np.nan), + ('model2', 1.0), + ('model3', 3.14), + ]) + dataset_dim = iris.coords.AuxCoord(list(data.keys()), long_name='dataset') + output_data = np.ma.masked_invalid([np.nan, 1.0, 3.14]) + + # Without data + with pytest.raises(ValueError): + io.save_scalar_data({}, PATH, var_attrs) + mock_logger.error.assert_not_called() + assert not mock_save.called + mock_logger.reset_mock() + mock_save.reset_mock() + + # With data + if 'units' not in var_attrs: + with pytest.raises(ValueError): + io.save_scalar_data(data, PATH, var_attrs, aux_coord, attrs) + mock_logger.error.assert_called_once() + assert not mock_save.called + return + io.save_scalar_data(data, PATH, var_attrs, aux_coord, attrs) + iris_var_attrs = deepcopy(var_attrs) + iris_var_attrs['var_name'] = iris_var_attrs.pop('short_name') + new_cube = iris.cube.Cube(output_data, + aux_coords_and_dims=[(dataset_dim, 0)], + attributes=attrs, + **iris_var_attrs) + if aux_coord is not None: + new_cube.add_aux_coord(aux_coord, 0) + mock_logger.error.assert_not_called() + assert mock_save.call_args_list == [mock.call(new_cube, PATH)] diff --git a/tests/unit/diag_scripts/shared/test_iris_helpers.py b/tests/unit/diag_scripts/shared/test_iris_helpers.py new file mode 100644 index 0000000000..6663a06e81 --- /dev/null +++ b/tests/unit/diag_scripts/shared/test_iris_helpers.py @@ -0,0 +1,502 @@ +"""Tests for the module :mod:`esmvaltool.diag_scripts.shared.iris_helpers`.""" +from unittest import mock + +import iris +import iris.coords +import iris.cube +import iris.exceptions +import numpy as np +import pytest +from cf_units import Unit + +from esmvaltool.diag_scripts.shared import iris_helpers as ih + +LONG_NAME = 'x' +DIM_COORD_1 = iris.coords.DimCoord(np.arange(3.0) - 1.0, long_name=LONG_NAME) +AUX_COORD_1 = iris.coords.AuxCoord(np.arange(3.0) - 1.0, long_name=LONG_NAME) +AUX_COORD_2 = iris.coords.AuxCoord([10.0, 20.0, 30.0], long_name='longer') +SMALL_COORD = iris.coords.DimCoord([0.0], long_name=LONG_NAME) +LONG_COORD_1 = iris.coords.AuxCoord([-1.0, 0.0, 1.0, 1.], long_name=LONG_NAME) +LONG_COORD_2 = iris.coords.DimCoord([-1.0, -0.5, 0.0, 1.0], + long_name=LONG_NAME) +WRONG_COORD = iris.coords.DimCoord([-200.0, +200.0], long_name=LONG_NAME) +SCALAR_COORD = iris.coords.AuxCoord(2.71, long_name='e') +DUP_COORD = iris.coords.AuxCoord([-1.0, 0.0, 1.0, 1.0], long_name=LONG_NAME) +CUBE_1 = iris.cube.Cube( + np.ma.masked_invalid([-1.0, np.nan, 2.0]), + var_name='a', + attributes={'1': '2'}, + dim_coords_and_dims=[(DIM_COORD_1, 0)], + aux_coords_and_dims=[(SCALAR_COORD, []), (AUX_COORD_2, 0)]) +CUBE_2 = iris.cube.Cube( + np.ma.masked_invalid([-1.0, np.nan, 2.0]), + var_name='a', + attributes={'1': '2'}, + dim_coords_and_dims=[(DIM_COORD_1, 0)], + aux_coords_and_dims=[(SCALAR_COORD, [])]) +CUBE_3 = iris.cube.Cube( + np.ma.masked_invalid([np.nan, 3.14, np.nan]), + var_name='a', + attributes={'1': '2'}, + dim_coords_and_dims=[(DIM_COORD_1, 0)]) +CUBE_4 = iris.cube.Cube( + np.ma.masked_invalid([1.0, 2.0, 3.0, 3.0]), + var_name='a', + attributes={'1': '2'}, + aux_coords_and_dims=[(SCALAR_COORD, []), (LONG_COORD_1, 0)]) +CUBE_5 = iris.cube.Cube( + np.ma.masked_invalid([np.nan, 3.14, np.nan, np.nan]), + var_name='a', + attributes={'1': '2'}, + aux_coords_and_dims=[(LONG_COORD_1, 0)]) +CUBE_SMALL = iris.cube.Cube([3.14], + var_name='a', + attributes={'1': '2'}, + dim_coords_and_dims=[(SMALL_COORD, 0)]) +CUBE_LONG = iris.cube.Cube( + np.ma.masked_invalid([-1.0, np.nan, np.nan, 2.0]), + var_name='a', + attributes={'1': '2'}, + dim_coords_and_dims=[(LONG_COORD_2, 0)], + aux_coords_and_dims=[(SCALAR_COORD, [])]) +CUBE_SMALL_LONG = iris.cube.Cube( + np.ma.masked_invalid([np.nan, np.nan, 3.14, np.nan]), + var_name='a', + attributes={'1': '2'}, + dim_coords_and_dims=[(LONG_COORD_2, 0)]) +CUBE_WRONG = iris.cube.Cube( + np.arange(2.0), + var_name='a', + attributes={'1': '2'}, + dim_coords_and_dims=[(WRONG_COORD, 0)]) +CUBE_DUP = iris.cube.Cube( + np.ma.masked_invalid([np.nan, 3.14, 2.71, 6.28]), + var_name='a', + attributes={'1': '2'}, + aux_coords_and_dims=[(DUP_COORD, 0)]) +TEST_TRANSFORM_COORD_TO_REF = [ + (DIM_COORD_1, [CUBE_1, CUBE_1], [CUBE_2, CUBE_2]), + (DIM_COORD_1, [CUBE_SMALL, CUBE_1], [CUBE_3, CUBE_2]), + (DIM_COORD_1, [CUBE_WRONG, CUBE_1], ValueError), + (DIM_COORD_1, [CUBE_DUP, CUBE_1], ValueError), + (AUX_COORD_1, [CUBE_1, CUBE_1], [CUBE_2, CUBE_2]), + (AUX_COORD_1, [CUBE_SMALL, CUBE_1], [CUBE_3, CUBE_2]), + (AUX_COORD_1, [CUBE_WRONG, CUBE_1], ValueError), + (AUX_COORD_1, [CUBE_DUP, CUBE_1], ValueError), + (LONG_COORD_1, [CUBE_1, CUBE_1], ValueError), + (LONG_COORD_1, [CUBE_SMALL, CUBE_1], ValueError), + (LONG_COORD_1, [CUBE_WRONG, CUBE_1], ValueError), + (LONG_COORD_1, [CUBE_DUP, CUBE_1], ValueError), + (LONG_COORD_2, [CUBE_1, CUBE_1], [CUBE_LONG, CUBE_LONG]), + (LONG_COORD_2, [CUBE_SMALL, CUBE_1], [CUBE_SMALL_LONG, CUBE_LONG]), + (LONG_COORD_2, [CUBE_WRONG, CUBE_1], ValueError), + (LONG_COORD_2, [CUBE_DUP, CUBE_1], ValueError), + (DIM_COORD_1, [CUBE_1], [CUBE_2]), + (DIM_COORD_1, [CUBE_SMALL], [CUBE_3]), + (DIM_COORD_1, [CUBE_WRONG], ValueError), + (DIM_COORD_1, [CUBE_DUP], ValueError), + (AUX_COORD_1, [CUBE_1], [CUBE_2]), + (AUX_COORD_1, [CUBE_SMALL], [CUBE_3]), + (AUX_COORD_1, [CUBE_WRONG], ValueError), + (AUX_COORD_1, [CUBE_DUP], ValueError), + (LONG_COORD_1, [CUBE_1], ValueError), + (LONG_COORD_1, [CUBE_SMALL], ValueError), + (LONG_COORD_1, [CUBE_WRONG], ValueError), + (LONG_COORD_1, [CUBE_DUP], ValueError), + (LONG_COORD_2, [CUBE_1], [CUBE_LONG]), + (LONG_COORD_2, [CUBE_SMALL], [CUBE_SMALL_LONG]), + (LONG_COORD_2, [CUBE_WRONG], ValueError), + (LONG_COORD_2, [CUBE_DUP], ValueError), +] + + +@pytest.mark.parametrize('ref_coord,cubes,output', TEST_TRANSFORM_COORD_TO_REF) +def test_transform_coord_to_ref(ref_coord, cubes, output): + """Test transforming coordinate to reference.""" + # ValueErrors + if isinstance(output, type): + with pytest.raises(output): + new_cubes = ih._transform_coord_to_ref(cubes, ref_coord) + return + + # Working examples + cubes = iris.cube.CubeList(cubes) + output = iris.cube.CubeList(output) + new_cubes = ih._transform_coord_to_ref(cubes, ref_coord) + assert new_cubes == output + + +DIM_COORD_2 = iris.coords.DimCoord(np.arange(3.0) - 1.0, long_name='aaa') +DIM_COORD_3 = iris.coords.DimCoord(np.arange(3.0) + 1.0, long_name=LONG_NAME) +CUBE_6 = iris.cube.Cube( + np.ma.arange(3.0) + 100.0, + var_name='a', + dim_coords_and_dims=[(DIM_COORD_2, 0)]) +CUBE_7 = iris.cube.Cube( + np.ma.arange(3.0) - 100.0, + var_name='a', + dim_coords_and_dims=[(DIM_COORD_3, 0)]) +TEST_CHECK_COORDINATE = [ + ([CUBE_1, CUBE_1, CUBE_1], DIM_COORD_1.points), + ([CUBE_1], DIM_COORD_1.points), + ([CUBE_1, CUBE_6], iris.exceptions.CoordinateNotFoundError), + ([CUBE_1, CUBE_7], ValueError), +] + + +@pytest.mark.parametrize('cubes,output', TEST_CHECK_COORDINATE) +def test_check_coordinate(cubes, output): + """Test checking of coordinates.""" + if isinstance(output, type): + with pytest.raises(output): + out = ih.check_coordinate(cubes, LONG_NAME) + else: + out = ih.check_coordinate(cubes, LONG_NAME) + assert np.array_equal(out, output) + + +DICT_1 = {'a': 'b', 'c': 'd'} +DICT_2 = {'short_name': 'x'} +DICT_3 = {'var_name': 'x'} +TEST_CONVERT_TO_IRIS = [ + (DICT_1, DICT_1), + (DICT_2, DICT_3), + (DICT_3, DICT_3), + ({ + **DICT_1, + **DICT_2, + }, { + **DICT_1, + **DICT_3, + }), + ({ + **DICT_1, + **DICT_3, + }, { + **DICT_1, + **DICT_3, + }), + ({ + **DICT_1, + **DICT_2, + 'var_name': ':(', + }, { + **DICT_1, + **DICT_3, + }), +] + + +@pytest.mark.parametrize('dict_in,dict_out', TEST_CONVERT_TO_IRIS) +def test_convert_to_iris(dict_in, dict_out): + """Test converting metadata dictionary checking of coordinates.""" + if 'short_name' in dict_in and 'var_name' in dict_in: + with pytest.raises(KeyError): + ih.convert_to_iris(dict_in) + return + new_dict = ih.convert_to_iris(dict_in) + assert new_dict == dict_out + assert new_dict is not dict_in + + +@mock.patch('esmvaltool.diag_scripts.shared.iris_helpers.iris.load_cube', + autospec=True) +def test_get_mean_cube(mock_load_cube): + """Test calculation of mean cubes.""" + datasets = [ + {'test': 'x', 'filename': 'a/b.nc'}, + {'test': 'y', 'filename': 'a/b/c.nc'}, + {'test': 'z', 'filename': 'c/d.nc'}, + ] + cube = CUBE_1.copy([-4.0, 2.0, -4.0]) + cube.coord(DIM_COORD_1).attributes = {'test': 1} + cube.cell_methods = [iris.coords.CellMethod('mean', coords=LONG_NAME)] + cubes = [CUBE_1, CUBE_2, cube] + mock_load_cube.side_effect = cubes + cube_out = iris.cube.Cube( + [-2.0, 2.0, 0.0], + var_name='a', + dim_coords_and_dims=[(DIM_COORD_1, 0)], + cell_methods=[iris.coords.CellMethod('mean', coords='cube_label')], + ) + result = ih.get_mean_cube(datasets) + assert result == cube_out + + +TEST_IRIS_PROJECT_CONSTRAINT = [ + (['ONE'], False, [2.0, 6.0], ['a', 'e']), + (['ONE'], True, [3.0, 4.0, 5.0], ['b', 'c', 'd']), + (['ONE', 'THREE'], False, [2.0, 4.0, 6.0], ['a', 'c', 'e']), + (['ONE', 'THREE'], True, [3.0, 5.0], ['b', 'd']), +] + + +@pytest.mark.parametrize('constr,negate,data,points', + TEST_IRIS_PROJECT_CONSTRAINT) +def test_iris_project_constraint(constr, negate, data, points): + """Test iris constraint for projects.""" + input_data = [{ + 'project': 'ONE', + 'dataset': 'a', + }, { + 'project': 'TWO', + 'dataset': 'b', + }, { + 'project': 'THREE', + 'dataset': 'c', + }, { + 'project': 'ONE', + 'dataset': 'e', + }] + dataset_coord = iris.coords.AuxCoord(['a', 'b', 'c', 'd', 'e'], + long_name='dataset') + cube = iris.cube.Cube( + np.arange(5.0) + 2.0, aux_coords_and_dims=[(dataset_coord, 0)]) + new_cube = iris.cube.Cube( + data, + aux_coords_and_dims=[(iris.coords.AuxCoord( + points, long_name='dataset'), 0)]) + constraint = ih.iris_project_constraint(constr, input_data, negate=negate) + assert cube.extract(constraint) == new_cube + + +ATTRS = [ + { + 'test': 1, + 'oh': 'yeah', + }, + { + 'a2': 'c2', + }, +] +VAR_ATTRS = [ + { + 'var_name': 'var', + 'long_name': 'LOOONG NAME', + }, + { + 'standard_name': 'air_temperature', + 'units': 'K', + }, +] +DATSET_COORD_1 = iris.coords.AuxCoord(['x', 'b', 'c', 'a', 'y', 'z'], + long_name='dataset') +DATSET_COORD_1_SORTED = iris.coords.AuxCoord(['a', 'b', 'c', 'x', 'y', 'z'], + long_name='dataset') +DATSET_COORD_2 = iris.coords.AuxCoord(['t', 'w', 'z', 'b', 'x'], + long_name='dataset') +DATSET_COORD_3 = iris.coords.AuxCoord(['r', 's'], long_name='dataset') +DATSET_COORD_4 = iris.coords.AuxCoord(['c', 'c', 'b', 'a'], + long_name='dataset') +DATSET_COORD_5 = iris.coords.AuxCoord(['b', 'x', 'z'], long_name='dataset') +CUBE_DAT_1 = iris.cube.Cube( + np.arange(6.0) - 2.0, + aux_coords_and_dims=[(DATSET_COORD_1, 0)], + attributes=ATTRS[0], + **VAR_ATTRS[0]) +CUBE_DAT_1_SORTED = iris.cube.Cube([1.0, -1.0, 0.0, -2.0, 2.0, 3.0], + aux_coords_and_dims=[(DATSET_COORD_1_SORTED, + 0)], + attributes=ATTRS[0], + **VAR_ATTRS[0]) +CUBE_DAT_1_OUT = iris.cube.Cube([-1.0, -2.0, 3.0], + aux_coords_and_dims=[(DATSET_COORD_5, 0)], + attributes=ATTRS[0], + **VAR_ATTRS[0]) +CUBE_DAT_2 = iris.cube.Cube( + np.ma.masked_invalid([np.nan, 0.0, np.nan, 3.14, 2.71]), + aux_coords_and_dims=[(DATSET_COORD_2, 0)], + attributes=ATTRS[1], + **VAR_ATTRS[1]) +CUBE_DAT_2_OUT = iris.cube.Cube( + np.ma.masked_invalid([3.14, 2.71, np.nan]), + aux_coords_and_dims=[(DATSET_COORD_5, 0)], + attributes=ATTRS[1], + **VAR_ATTRS[1]) +CUBE_DAT_3 = iris.cube.Cube( + np.arange(2.0), + aux_coords_and_dims=[(DATSET_COORD_3, 0)], + attributes=ATTRS[0], + **VAR_ATTRS[0]) +CUBE_DAT_4 = iris.cube.Cube( + np.ma.masked_invalid([np.nan, 2.0, 3.0, 42.0]), + aux_coords_and_dims=[(DATSET_COORD_4, 0)], + attributes=ATTRS[1], + **VAR_ATTRS[1]) +TEST_INTERSECT_DATASET_COORDS = [ + ([CUBE_DAT_1, CUBE_1], iris.exceptions.CoordinateNotFoundError), + ([CUBE_DAT_1, CUBE_DAT_4], ValueError), + ([CUBE_DAT_1, CUBE_DAT_3], ValueError), + ([CUBE_DAT_1], [CUBE_DAT_1_SORTED]), + ([CUBE_DAT_1, CUBE_DAT_1], [CUBE_DAT_1_SORTED, CUBE_DAT_1_SORTED]), + ([CUBE_DAT_1, CUBE_DAT_2], [CUBE_DAT_1_OUT, CUBE_DAT_2_OUT]), + ([CUBE_DAT_2, CUBE_DAT_1], [CUBE_DAT_2_OUT, CUBE_DAT_1_OUT]), +] + + +@pytest.mark.parametrize('cubes,output', TEST_INTERSECT_DATASET_COORDS) +def test_intersect_dataset_coords(cubes, output): + """Test intersecting dataset coordinates.""" + # ValueErrors + if isinstance(output, type): + with pytest.raises(output): + new_cubes = ih.intersect_dataset_coordinates(cubes) + return + + # Working examples + cubes = iris.cube.CubeList(cubes) + output = iris.cube.CubeList(output) + new_cubes = ih.intersect_dataset_coordinates(cubes) + assert new_cubes == output + + +def test_prepare_cube_for_merging(): + """Test preprocessing cubes before merging.""" + label = 'abcde' + aux_coord = iris.coords.AuxCoord(label, + var_name='cube_label', + long_name='cube_label') + cube_in = CUBE_1.copy() + cube_in.coord(DIM_COORD_1).attributes = {'test_attr': 1} + cube_in.cell_methods = [iris.coords.CellMethod('mean', coords=LONG_NAME)] + cube_out = iris.cube.Cube( + np.ma.masked_invalid([-1.0, np.nan, 2.0]), + var_name='a', + dim_coords_and_dims=[(DIM_COORD_1, 0)], + aux_coords_and_dims=[(aux_coord, [])], + ) + assert cube_in != cube_out + ih.prepare_cube_for_merging(cube_in, label) + assert cube_in == cube_out + + +DIM_COORD_4 = DIM_COORD_1.copy([100.0, 150.0, 160.0]) +DIM_COORD_4.rename('time') +DIM_COORD_LONGEST = DIM_COORD_1.copy([-200.0, -1.0, 0.0, 1.0, 2.0, 3.0, 200.0]) +CUBE_8 = CUBE_1.copy() +CUBE_8.coord(LONG_NAME).points = np.array([100.0, 150.0, 160.0]) +CUBE_8.coord(LONG_NAME).rename('time') +CUBE_WRONG_COORD = CUBE_WRONG.copy() +CUBE_WRONG_COORD.coord(LONG_NAME).rename('wrooong') +TEST_UNIFY_1D_CUBES = [ + ([CUBE_1, iris.cube.Cube([[1.0]])], LONG_NAME, ValueError), + ([CUBE_1, iris.cube.Cube(0.0)], LONG_NAME, ValueError), + ( + [iris.cube.Cube([0.0])], + LONG_NAME, + iris.exceptions.CoordinateNotFoundError, + ), + ( + [CUBE_1, CUBE_WRONG_COORD, CUBE_3], + LONG_NAME, + iris.exceptions.CoordinateNotFoundError, + ), + ([CUBE_1, CUBE_4, CUBE_3], LONG_NAME, ValueError), + ([CUBE_7, CUBE_1, CUBE_WRONG], LONG_NAME, DIM_COORD_LONGEST), + ([CUBE_8], 'time', DIM_COORD_4), +] + + +@pytest.mark.parametrize('cubes,coord_name,output', TEST_UNIFY_1D_CUBES) +@mock.patch.object(ih, '_transform_coord_to_ref', autospec=True) +@mock.patch( + 'esmvaltool.diag_scripts.shared.io.iris.util.unify_time_units', + autospec=True) +def test_unify_1d_cubes(mock_unify_time, mock_transform, cubes, coord_name, + output): + """Test unifying 1D cubes.""" + # ValueErrors + if isinstance(output, type): + with pytest.raises(output): + ih.unify_1d_cubes(cubes, coord_name) + return + + # Working examples + cubes = iris.cube.CubeList(cubes) + ih.unify_1d_cubes(cubes, coord_name) + assert mock_transform.call_args_list == [mock.call(cubes, output)] + mock_transform.reset_mock() + if coord_name == 'time': + assert mock_unify_time.call_count == 1 + else: + assert not mock_unify_time.called + + +@pytest.fixture +def cube_with_time(): + """Cube that includes time coordinate.""" + time_coord = iris.coords.DimCoord( + [1, 3], + bounds=[[0, 2], [2, 4]], + standard_name='time', + units='days since 1850-01-03', + ) + cube = iris.cube.Cube( + [1, 2], + var_name='x', + dim_coords_and_dims=[(time_coord, 0)], + ) + return cube + + +def test_unify_time_coord_str(cube_with_time): + """Test ``unify_time_coord``.""" + ih.unify_time_coord(cube_with_time) + + expected_units = Unit('days since 1850-01-01 00:00:00', + calendar='standard') + time_coord = cube_with_time.coord('time') + + assert time_coord.var_name == 'time' + assert time_coord.standard_name == 'time' + assert time_coord.long_name == 'time' + assert time_coord.units == expected_units + assert time_coord.attributes == {} + + np.testing.assert_array_equal(time_coord.points, [3, 5]) + np.testing.assert_array_equal(time_coord.bounds, [[2, 4], [4, 6]]) + + +def test_unify_time_coord_unit(cube_with_time): + """Test ``unify_time_coord``.""" + target_units = Unit('days since 1850-01-02 00:00:00', calendar='gregorian') + ih.unify_time_coord(cube_with_time, target_units=target_units) + + expected_units = Unit('days since 1850-01-02 00:00:00', + calendar='gregorian') + time_coord = cube_with_time.coord('time') + + assert time_coord.var_name == 'time' + assert time_coord.standard_name == 'time' + assert time_coord.long_name == 'time' + assert time_coord.units == expected_units + assert time_coord.attributes == {} + assert time_coord.units == expected_units + + np.testing.assert_array_equal(time_coord.points, [2, 4]) + np.testing.assert_array_equal(time_coord.bounds, [[1, 3], [3, 5]]) + + +def test_unify_time_coord_no_bounds(cube_with_time): + """Test ``unify_time_coord``.""" + cube_with_time.coord('time').bounds = None + ih.unify_time_coord(cube_with_time, 'days since 1850-01-04') + + expected_units = Unit('days since 1850-01-04 00:00:00') + time_coord = cube_with_time.coord('time') + + assert time_coord.var_name == 'time' + assert time_coord.standard_name == 'time' + assert time_coord.long_name == 'time' + assert time_coord.units == expected_units + assert time_coord.attributes == {} + + np.testing.assert_array_equal(time_coord.points, [0, 2]) + assert time_coord.bounds is None + + +def test_unify_time_coord_no_time(cube_with_time): + """Test ``unify_time_coord``.""" + cube_with_time.remove_coord('time') + with pytest.raises(iris.exceptions.CoordinateNotFoundError): + ih.unify_time_coord(cube_with_time) diff --git a/tests/integration/preprocessor/_io/__init__.py b/tests/unit/documentation/__init__.py similarity index 100% rename from tests/integration/preprocessor/_io/__init__.py rename to tests/unit/documentation/__init__.py diff --git a/tests/unit/documentation/test_changelog.py b/tests/unit/documentation/test_changelog.py new file mode 100644 index 0000000000..d43d8bc4ff --- /dev/null +++ b/tests/unit/documentation/test_changelog.py @@ -0,0 +1,24 @@ +"""Tests for the changelog.""" +import collections +import os +import re + + +def test_duplications_in_changelog(): + changelog_path = os.path.join(os.path.dirname(__file__), '../../..', + 'doc/sphinx/source/changelog.rst') + with open(changelog_path) as changelog: + changelog = changelog.read() + + # Find all pull requests + pr_links = re.compile( + "") + links = pr_links.findall(changelog) + + # Check for duplicates + if len(links) != len(set(links)): + print('The following PR are duplicated in the changelog:') + print('\n'.join((link + for link, count in collections.Counter(links).items() + if count > 1))) + assert False diff --git a/tests/unit/preprocessor/__init__.py b/tests/unit/preprocessor/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/unit/preprocessor/_area/__init__.py b/tests/unit/preprocessor/_area/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/unit/preprocessor/_area/test_area.py b/tests/unit/preprocessor/_area/test_area.py deleted file mode 100644 index 3f85825f42..0000000000 --- a/tests/unit/preprocessor/_area/test_area.py +++ /dev/null @@ -1,155 +0,0 @@ -"""Unit tests for the :func:`esmvaltool.preprocessor._area` module.""" - -import unittest - -import iris -import numpy as np -from cf_units import Unit - -import tests -from esmvaltool.preprocessor._area import ( - average_region, extract_named_regions, extract_region) - - -class Test(tests.Test): - """Test class for the :func:`esmvaltool.preprocessor._area_pp` module.""" - - def setUp(self): - """Prepare tests.""" - self.coord_sys = iris.coord_systems.GeogCS( - iris.fileformats.pp.EARTH_RADIUS) - data = np.ones((5, 5)) - lons = iris.coords.DimCoord( - [i + .5 for i in range(5)], - standard_name='longitude', - bounds=[[i, i + 1.] for i in range(5)], # [0,1] to [4,5] - units='degrees_east', - coord_system=self.coord_sys) - lats = iris.coords.DimCoord([i + .5 for i in range(5)], - standard_name='latitude', - bounds=[[i, i + 1.] for i in range(5)], - units='degrees_north', - coord_system=self.coord_sys) - coords_spec = [(lats, 0), (lons, 1)] - self.grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) - - ndata = np.ones((6, 6)) - nlons = iris.coords.DimCoord( - [i - 2.5 for i in range(6)], - standard_name='longitude', - bounds=[[i - 3., i - 2.] for i in range(6)], # [3,2] to [4,5] - units='degrees_east', - coord_system=self.coord_sys) - nlats = iris.coords.DimCoord( - [i - 2.5 for i in range(6)], - standard_name='latitude', - bounds=[[i - 3., i - 2.] for i in range(6)], - units='degrees_north', - coord_system=self.coord_sys) - coords_spec = [(nlats, 0), (nlons, 1)] - self.negative_grid = iris.cube.Cube( - ndata, dim_coords_and_dims=coords_spec) - - def test_average_region_mean(self): - """Test for area average of a 2D field.""" - result = average_region(self.grid, 'latitude', 'longitude') - expected = np.array([1.]) - self.assertArrayEqual(result.data, expected) - - def test_average_region_min(self): - """Test for area average of a 2D field.""" - result = average_region(self.grid, 'latitude', 'longitude', - operator='min') - expected = np.array([1.]) - self.assertArrayEqual(result.data, expected) - - def test_average_region_max(self): - """Test for area average of a 2D field.""" - result = average_region(self.grid, 'latitude', 'longitude', - operator='max') - expected = np.array([1.]) - self.assertArrayEqual(result.data, expected) - - def test_average_region_median(self): - """Test for area average of a 2D field.""" - result = average_region(self.grid, 'latitude', 'longitude', - operator='median') - expected = np.array([1.]) - self.assertArrayEqual(result.data, expected) - - def test_average_region_std_dev(self): - """Test for area average of a 2D field.""" - result = average_region(self.grid, 'latitude', 'longitude', - operator='std_dev') - expected = np.array([0.]) - self.assertArrayEqual(result.data, expected) - - def test_average_region_variance(self): - """Test for area average of a 2D field.""" - result = average_region(self.grid, 'latitude', 'longitude', - operator='variance') - expected = np.array([0.]) - self.assertArrayEqual(result.data, expected) - - def test_average_region_neg_lon(self): - """Test for area average of a 2D field.""" - result = average_region(self.negative_grid, 'latitude', 'longitude') - expected = np.array([1.]) - self.assertArrayEqual(result.data, expected) - - def test_extract_region(self): - """Test for extracting a region from a 2D field.""" - result = extract_region(self.grid, 1.5, 2.5, 1.5, 2.5) - # expected outcome - expected = np.ones((2, 2)) - self.assertArrayEqual(result.data, expected) - - def test_extract_region_neg_lon(self): - """Test for extracting a region with a negative longitude field.""" - result = extract_region(self.negative_grid, -0.5, 0.5, -0.5, 0.5) - expected = np.ones((2, 2)) - self.assertArrayEqual(result.data, expected) - - def test_extract_named_region(self): - """Test for extracting a named region.""" - # tests: - # Create a cube with regions - times = np.array([15., 45., 75.]) - bounds = np.array([[0., 30.], [30., 60.], [60., 90.]]) - time = iris.coords.DimCoord( - times, - bounds=bounds, - standard_name='time', - units=Unit('days since 1950-01-01', calendar='gregorian')) - - regions = ['region1', 'region2', 'region3'] - region = iris.coords.AuxCoord( - regions, - standard_name='region', - units='1', - ) - - data = np.ones((3, 3)) - region_cube = iris.cube.Cube( - data, - dim_coords_and_dims=[(time, 0)], - aux_coords_and_dims=[(region, 1)]) - - # test string region - result1 = extract_named_regions(region_cube, 'region1') - expected = np.ones((3, )) - self.assertArrayEqual(result1.data, expected) - - # test list of regions - result2 = extract_named_regions(region_cube, ['region1', 'region2']) - expected = np.ones((3, 2)) - self.assertArrayEqual(result2.data, expected) - - # test for expected failures: - with self.assertRaises(ValueError): - extract_named_regions(region_cube, 'reg_A') - extract_named_regions(region_cube, ['region1', 'reg_A']) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/unit/preprocessor/_derive/__init__.py b/tests/unit/preprocessor/_derive/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/unit/preprocessor/_derive/test_p_level_widths.py b/tests/unit/preprocessor/_derive/test_p_level_widths.py deleted file mode 100644 index 35ba177a92..0000000000 --- a/tests/unit/preprocessor/_derive/test_p_level_widths.py +++ /dev/null @@ -1,88 +0,0 @@ -"""Tests for toz variable derivation functions.""" - -import numpy as np -import pytest - -from esmvaltool.preprocessor._derive.toz import _p_level_widths - - -def test_col_is_not_monotonic(): - """Test for non-monotonic column.""" - plev = 1000 - top_limit = 5 - col = np.array([1, 2, 3, 2, 1]) - col = np.insert(col, 0, plev) - col = np.append(col, top_limit) - with pytest.raises(ValueError): - _p_level_widths(col) - - -def test_keeping_column_length(): - """Test for level widths keeping column lenght.""" - plev = 1000 - top_limit = 5 - col = np.array([1000, 900, 800]) - col = np.insert(col, 0, plev) - col = np.append(col, top_limit) - assert len(_p_level_widths(col)) == len(col) - 2 - - -def test_low_lev_surf_press(): - """Test for lowest level equal to surface pressure.""" - plev = 1000 - top_limit = 5 - col = np.array([1000, 900, 800]) - col = np.insert(col, 0, plev) - col = np.append(col, top_limit) - result = np.array([50, 100, 845]) - assert all(_p_level_widths(col) == result) - - -def test_low_lev_above_surf_press(): - """Test for lowest level above surface pressure.""" - plev = 1020 - top_limit = 5 - col = np.array([1000, 900, 800]) - col = np.insert(col, 0, plev) - col = np.append(col, top_limit) - result = np.array([70, 100, 845]) - assert all(_p_level_widths(col) == result) - - -def test_low_lev_below_surf_press(): - """Test for lowest level below surface pressure.""" - plev = 970 - top_limit = 5 - col = np.array([np.NaN, 900, 800]) - col = np.insert(col, 0, plev) - col = np.append(col, top_limit) - result = np.array([0, 120, 845]) - assert all(_p_level_widths(col) == result) - - col = np.array([np.NaN, np.NaN, 900, 800]) - col = np.insert(col, 0, plev) - col = np.append(col, top_limit) - result = np.array([0, 0, 120, 845]) - assert all(_p_level_widths(col) == result) - - -def test_high_level_top_limit(): - """Test for highest level equal to top limit.""" - plev = 1020 - top_limit = 5 - col = np.array([1000, 900, 5]) - col = np.insert(col, 0, plev) - col = np.append(col, top_limit) - result = np.array([70, 50 + 895 / 2, 895 / 2]) - assert all(_p_level_widths(col) == result) - - -def test_high_level_above_top_limit(): - """Test for highest level above top limit.""" - plev = 1020 - top_limit = 5 - col = np.array([1000, 900, 3]) - col = np.insert(col, 0, plev) - col = np.append(col, top_limit) - with pytest.raises(ValueError): - _p_level_widths(col) diff --git a/tests/unit/preprocessor/_mapping/__init__.py b/tests/unit/preprocessor/_mapping/__init__.py deleted file mode 100644 index 367e65a2b8..0000000000 --- a/tests/unit/preprocessor/_mapping/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Unit tests for the :mod:`esmvaltool.preprocessor._mapping` module.""" diff --git a/tests/unit/preprocessor/_mapping/test_mapping.py b/tests/unit/preprocessor/_mapping/test_mapping.py deleted file mode 100644 index ba1b9b7118..0000000000 --- a/tests/unit/preprocessor/_mapping/test_mapping.py +++ /dev/null @@ -1,255 +0,0 @@ -"""Unit tests for the esmvaltool.preprocessor._mapping module.""" -import cf_units -import iris -import mock -import numpy as np - -import tests -from esmvaltool.preprocessor._mapping import (get_empty_data, map_slices, - ref_to_dims_index) - - -class TestHelpers(tests.Test): - """Unit tests for all helper methods.""" - - def setUp(self): - """Set up basic fixtures.""" - self.coord_system = mock.Mock(return_value=None) - self.scalar_coord = mock.sentinel.scalar_coord - self.scalar_coord.name = lambda: 'scalar_coord' - self.coord = mock.sentinel.coord - self.coords = mock.Mock(return_value=[self.scalar_coord, self.coord]) - - def coord(name_or_coord): - """Return coord for mock cube.""" - if name_or_coord == 'coord': - return self.coord - elif name_or_coord == 'scalar_coord': - return self.scalar_coord - else: - raise iris.exceptions.CoordinateNotFoundError('') - - def coord_dims(coord): - """Return associated dims for coord in mock cube.""" - if coord == self.coord: - return [0] - elif coord == self.scalar_coord: - return [] - else: - raise iris.exceptions.CoordinateNotFoundError('') - - self.cube = mock.Mock( - spec=iris.cube.Cube, - dtype=np.float32, - coord_system=self.coord_system, - coords=self.coords, - coord=coord, - coord_dims=coord_dims, - ndim=4, - ) - - def test_get_empty_data(self): - """Test creation of empty data.""" - shape = (3, 3) - data = get_empty_data(shape) - self.assertIsInstance(data, np.ma.MaskedArray) - self.assertEqual(data.shape, shape) - - def test_ref_to_dims_index__int(self): - """Test ref_to_dims_index with valid integer.""" - dims = ref_to_dims_index(self.cube, 0) - self.assertEqual([0], dims) - - def test_ref_to_dims_index__invalid_int(self): - """Test ref_to_dims_index with invalid integer.""" - self.assertRaises(ValueError, ref_to_dims_index, self.cube, -1) - self.assertRaises(ValueError, ref_to_dims_index, self.cube, 100) - - def test_ref_to_dims_index__scalar_coord(self): - """Test ref_to_dims_index with scalar coordinate.""" - self.assertRaises(ValueError, ref_to_dims_index, self.cube, - 'scalar_coord') - - def test_ref_to_dims_index__valid_coordinate_name(self): - """Test ref_to_dims_index with valid coordinate name.""" - dims = ref_to_dims_index(self.cube, 'coord') - self.assertEqual([0], dims) - - def test_ref_to_dims_index__invalid_coordinate_name(self): - """Test ref_to_dims_index with invalid coordinate name.""" - self.assertRaises(iris.exceptions.CoordinateNotFoundError, - ref_to_dims_index, self.cube, 'test') - - def test_ref_to_dims_index__invalid_type(self): - """Test ref_to_dims_index with invalid argument.""" - self.assertRaises(ValueError, ref_to_dims_index, self.cube, - mock.sentinel.something) - - -class Test(tests.Test): - """Unit tests for the main mapping method.""" - - # pylint: disable=too-many-instance-attributes - - def setup_coordinates(self): - """Set up coordinates for mock cube.""" - self.time = mock.Mock( - spec=iris.coords.DimCoord, - standard_name='time', - long_name='time', - shape=(3, ), - ) - self.z = mock.Mock( - spec=iris.coords.DimCoord, - standard_name='height', - long_name='height', - shape=(4, ), - ) - self.src_latitude = mock.Mock( - spec=iris.coords.DimCoord, - standard_name='latitude', - long_name='latitude', - shape=(5, ), - points=np.array([1.1, 2.2, 3.3, 4.4, 5.5]), - ) - self.src_longitude = mock.Mock( - spec=iris.coords.DimCoord, - standard_name='longitude', - long_name='longitude', - shape=(6, ), - points=np.array([1.1, 2.2, 3.3, 4.4, 5.5, 6.6]), - ) - self.dst_latitude = mock.Mock( - spec=iris.coords.DimCoord, - standard_name='latitude', - long_name='latitude', - shape=(2, ), - points=np.array([1.1, 2.2]), - ) - self.dst_longitude = mock.Mock( - spec=iris.coords.DimCoord, - standard_name='longitude', - long_name='longitude', - shape=(2, ), - points=np.array([1.1, 2.2]), - ) - - def setUp(self): - """Set up fixtures for mapping test.""" - self.coord_system = mock.Mock(return_value=None) - self.scalar_coord = mock.sentinel.scalar_coord - self.scalar_coord.name = lambda: 'scalar_coord' - self.setup_coordinates() - - def src_coord(name_or_coord): - """Return coord for mock source cube.""" - if name_or_coord in ['latitude', self.src_latitude]: - return self.src_latitude - elif name_or_coord in ['longitude', self.src_longitude]: - return self.src_longitude - elif name_or_coord == 'scalar_coord': - return self.scalar_coord - else: - raise iris.exceptions.CoordinateNotFoundError('') - - def coord_dims(coord): - """Return coord dim for mock cubes.""" - if coord in [self.time, self.dst_latitude]: - return [0] - elif coord in [self.z, self.dst_longitude]: - return [1] - elif coord in [self.src_latitude]: - return [2] - elif coord in [self.src_longitude]: - return [3] - elif coord == self.scalar_coord: - return [] - else: - raise iris.exceptions.CoordinateNotFoundError('') - - def src_coords(*args, **kwargs): - """Return selected coords for source cube.""" - # pylint: disable=unused-argument - # Here, args is ignored. - dim_coords_list = [ - self.time, self.z, self.src_latitude, self.src_longitude - ] - contains_dimension = kwargs.get('contains_dimension', None) - if contains_dimension is not None: - return [dim_coords_list[contains_dimension]] - dim_coords = kwargs.get('dim_coords', None) - if dim_coords: - return dim_coords_list - return [self.scalar_coord] + dim_coords_list - - def src_repr_coords(*args, **kwargs): - """Return selected coords for source representant cube.""" - # pylint: disable=unused-argument - # Here, args is ignored. - dim_coords = [self.src_latitude, self.src_longitude] - if kwargs.get('dim_coords', False): - return dim_coords - if 'contains_dimension' in kwargs: - return dim_coords - return [self.scalar_coord] + dim_coords - - def dst_repr_coords(*args, **kwargs): - """Return selected coords for destination representant cube.""" - # pylint: disable=unused-argument - # Here, args is ignored. - dim_coords = [self.dst_latitude, self.dst_longitude] - if kwargs.get('dim_coords', False): - return dim_coords - return [self.scalar_coord] + dim_coords - - self.src_cube = mock.Mock( - spec=iris.cube.Cube, - dtype=np.float32, - coord_system=self.coord_system, - coords=src_coords, - coord=src_coord, - coord_dims=coord_dims, - ndim=4, - shape=(3, 4, 5, 6), - standard_name='sea_surface_temperature', - long_name='Sea surface temperature', - var_name='tos', - units=cf_units.Unit('K'), - attributes={}, - cell_methods={}, - __getitem__=lambda a, b: mock.sentinel.src_data, - ) - self.src_repr = mock.Mock( - spec=iris.cube.Cube, - dtype=np.float32, - coords=src_repr_coords, - ndim=2, - ) - self.dst_repr = mock.Mock( - spec=iris.cube.Cube, - dtype=np.float32, - coords=dst_repr_coords, - shape=(2, 2), - ) - - @mock.patch('esmvaltool.preprocessor._mapping.get_empty_data') - @mock.patch('iris.cube.Cube') - def test_map_slices(self, mock_cube, mock_get_empty_data): - """Test map_slices.""" - mock_get_empty_data.return_value = mock.sentinel.empty_data - dst = map_slices(self.src_cube, lambda s: np.ones((2, 2)), - self.src_repr, self.dst_repr) - self.assertEqual(dst, mock_cube.return_value) - dim_coords = self.src_cube.coords(dim_coords=True)[:2] \ - + self.dst_repr.coords(dim_coords=True) - dim_coords_and_dims = [(c, i) for i, c in enumerate(dim_coords)] - mock_cube.assert_called_once_with( - data=mock.sentinel.empty_data, - standard_name=self.src_cube.standard_name, - long_name=self.src_cube.long_name, - var_name=self.src_cube.var_name, - units=self.src_cube.units, - attributes=self.src_cube.attributes, - cell_methods=self.src_cube.cell_methods, - dim_coords_and_dims=dim_coords_and_dims, - ) diff --git a/tests/unit/preprocessor/_mask/__init__.py b/tests/unit/preprocessor/_mask/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/unit/preprocessor/_mask/test_mask.py b/tests/unit/preprocessor/_mask/test_mask.py deleted file mode 100644 index e2a3e88f67..0000000000 --- a/tests/unit/preprocessor/_mask/test_mask.py +++ /dev/null @@ -1,60 +0,0 @@ -"""Unit test for the :func:`esmvaltool.preprocessor._mask` function""" - -import unittest - -import iris -import numpy as np - -import tests -from esmvaltool.preprocessor._mask import ( - mask_above_threshold, mask_below_threshold, mask_inside_range, - mask_outside_range) - - -class Test(tests.Test): - """Test class for _mask""" - - def setUp(self): - """Prepare tests""" - coord_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) - self.data2 = np.array([[0., 1.], [2., 3.]]) - lons2 = iris.coords.DimCoord([1.5, 2.5], - standard_name='longitude', - bounds=[[1., 2.], [2., 3.]], - units='degrees_east', - coord_system=coord_sys) - lats2 = iris.coords.DimCoord([1.5, 2.5], - standard_name='latitude', - bounds=[[1., 2.], [2., 3.]], - units='degrees_north', - coord_system=coord_sys) - coords_spec3 = [(lats2, 0), (lons2, 1)] - self.arr = iris.cube.Cube(self.data2, dim_coords_and_dims=coords_spec3) - - def test_mask_above_threshold(self): - """Test to mask above a threshold.""" - result = mask_above_threshold(self.arr, 1.5) - expected = np.ma.array(self.data2, mask=[[False, False], [True, True]]) - self.assertArrayEqual(result.data, expected) - - def test_mask_below_threshold(self): - """Test to mask below a threshold.""" - result = mask_below_threshold(self.arr, 1.5) - expected = np.ma.array(self.data2, mask=[[True, True], [False, False]]) - self.assertArrayEqual(result.data, expected) - - def test_mask_inside_range(self): - """Test to mask inside a range.""" - result = mask_inside_range(self.arr, 0.5, 2.5) - expected = np.ma.array(self.data2, mask=[[False, True], [True, False]]) - self.assertArrayEqual(result.data, expected) - - def test_mask_outside_range(self): - """Test to mask outside a range.""" - result = mask_outside_range(self.arr, 0.5, 2.5) - expected = np.ma.array(self.data2, mask=[[True, False], [False, True]]) - self.assertArrayEqual(result.data, expected) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/unit/preprocessor/_regrid/__init__.py b/tests/unit/preprocessor/_regrid/__init__.py deleted file mode 100644 index 0692e0b44b..0000000000 --- a/tests/unit/preprocessor/_regrid/__init__.py +++ /dev/null @@ -1,113 +0,0 @@ -""" -Unit tests for the :mod:`esmvaltool.preprocessor.regrid` module. - -""" - -import iris -import numpy as np -from iris.coords import AuxCoord, CellMethod, DimCoord - - -def _make_vcoord(data, dtype=None): - """ - Create a synthetic test vertical coordinate. - - """ - if dtype is None: - dtype = np.dtype('int8') - - if isinstance(data, int): - data = np.arange(data, dtype=dtype) - elif not isinstance(data, np.ndarray): - data = np.asarray(data, dtype=dtype) - - # Create a pressure vertical coordinate. - kwargs = dict( - standard_name='air_pressure', - long_name='Pressure', - var_name='plev', - units='hPa', - attributes=dict(positive='down'), - coord_system=None) - - try: - zcoord = DimCoord(data, **kwargs) - except ValueError: - zcoord = AuxCoord(data, **kwargs) - - return zcoord - - -def _make_cube(data, aux_coord=True, dim_coord=True, dtype=None): - """ - Create a 3d synthetic test cube. - - """ - if dtype is None: - dtype = np.dtype('int8') - - if not isinstance(data, np.ndarray): - data = np.empty(data, dtype=dtype) - - z, y, x = data.shape - - # Create the cube. - cm = CellMethod( - method='mean', coords='time', intervals='20 minutes', comments=None) - kwargs = dict( - standard_name='air_temperature', - long_name='Air Temperature', - var_name='ta', - units='K', - attributes=dict(cube='attribute'), - cell_methods=(cm, )) - cube = iris.cube.Cube(data, **kwargs) - - # Create a synthetic test vertical coordinate. - if dim_coord: - cube.add_dim_coord(_make_vcoord(z, dtype=dtype), 0) - - # Create a synthetic test latitude coordinate. - data = np.arange(y, dtype=dtype) + 1 - cs = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) - kwargs = dict( - standard_name='latitude', - long_name='Latitude', - var_name='lat', - units='degrees_north', - attributes=dict(latitude='attribute'), - coord_system=cs) - ycoord = DimCoord(data, **kwargs) - if data.size > 1: - ycoord.guess_bounds() - cube.add_dim_coord(ycoord, 1) - - # Create a synthetic test longitude coordinate. - data = np.arange(x, dtype=dtype) + 1 - kwargs = dict( - standard_name='longitude', - long_name='Longitude', - var_name='lon', - units='degrees_east', - attributes=dict(longitude='attribute'), - coord_system=cs) - xcoord = DimCoord(data, **kwargs) - if data.size > 1: - xcoord.guess_bounds() - cube.add_dim_coord(xcoord, 2) - - # Create a synthetic test 2d auxiliary coordinate - # that spans the vertical dimension. - if aux_coord: - data = np.arange(np.prod((z, y)), dtype=dtype).reshape(z, y) - kwargs = dict( - standard_name=None, - long_name='Pressure Slice', - var_name='aplev', - units='hPa', - attributes=dict(positive='down'), - coord_system=None) - zycoord = AuxCoord(data, **kwargs) - cube.add_aux_coord(zycoord, (0, 1)) - - return cube diff --git a/tests/unit/preprocessor/_regrid/test__create_cube.py b/tests/unit/preprocessor/_regrid/test__create_cube.py deleted file mode 100644 index 016926ceda..0000000000 --- a/tests/unit/preprocessor/_regrid/test__create_cube.py +++ /dev/null @@ -1,60 +0,0 @@ -""" -Unit tests for the :func:`esmvaltool.preprocessor.regrid._create_cube` -function. - -""" - -import unittest - -import numpy as np - -import tests -from esmvaltool.preprocessor._regrid import _create_cube as create_cube -from tests.unit.preprocessor._regrid import _make_cube, _make_vcoord - - -class Test(tests.Test): - def setUp(self): - shape = (3, 2, 1) - self.dtype = np.dtype('int8') - self.cube = _make_cube(shape, dtype=self.dtype) - - def test_invalid_shape__data_mismatch_with_levels(self): - levels = np.array([0, 1]) - emsg = 'Mismatch between data and levels' - with self.assertRaisesRegex(ValueError, emsg): - create_cube(self.cube, self.cube.data, levels) - - def test(self): - shape = (2, 2, 1) - data = np.empty(shape) - levels = np.array([10, 20]) - result = create_cube(self.cube, data, levels) - expected = _make_cube(data, aux_coord=False, dim_coord=False) - vcoord = _make_vcoord(levels) - expected.add_dim_coord(vcoord, 0) - self.assertEqual(result, expected) - - def test_non_monotonic(self): - shape = (2, 2, 1) - data = np.empty(shape) - levels = np.array([10, 10]) - result = create_cube(self.cube, data, levels) - expected = _make_cube(data, aux_coord=False, dim_coord=False) - vcoord = _make_vcoord(levels) - expected.add_aux_coord(vcoord, 0) - self.assertEqual(result, expected) - - def test_collapse(self): - shape = (1, 2, 1) - data = np.empty(shape) - levels = np.array([123]) - result = create_cube(self.cube, data, levels) - expected = _make_cube(data, aux_coord=False, dim_coord=False)[0] - vcoord = _make_vcoord(levels) - expected.add_aux_coord(vcoord) - self.assertEqual(result, expected) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/unit/preprocessor/_regrid/test__stock_cube.py b/tests/unit/preprocessor/_regrid/test__stock_cube.py deleted file mode 100644 index cd1b15f47a..0000000000 --- a/tests/unit/preprocessor/_regrid/test__stock_cube.py +++ /dev/null @@ -1,115 +0,0 @@ -""" -Unit tests for the :func:`esmvaltool.preprocessor.regrid._stock_cube` -function. - -""" - -import unittest - -import iris -import mock -import numpy as np - -import tests -from esmvaltool.preprocessor._regrid import (_LAT_MAX, _LAT_MIN, _LAT_RANGE, - _LON_MAX, _LON_MIN, _LON_RANGE) -from esmvaltool.preprocessor._regrid import _stock_cube as stock_cube - - -class Test(tests.Test): - def _check(self, dx, dy, lat_off=True, lon_off=True): - # Generate the expected stock cube coordinate points. - dx, dy = float(dx), float(dy) - mid_dx, mid_dy = dx / 2, dy / 2 - if lat_off and lon_off: - expected_lat_points = np.linspace( - _LAT_MIN + mid_dy, _LAT_MAX - mid_dy, _LAT_RANGE / dy) - expected_lon_points = np.linspace( - _LON_MIN + mid_dx, _LON_MAX - mid_dx, _LON_RANGE / dx) - else: - expected_lat_points = np.linspace(_LAT_MIN, _LAT_MAX, - _LAT_RANGE / dy + 1) - expected_lon_points = np.linspace(_LON_MIN, _LON_MAX - dx, - _LON_RANGE / dx) - - # Check the stock cube coordinates. - self.assertEqual(self.mock_DimCoord.call_count, 2) - call_lats, call_lons = self.mock_DimCoord.call_args_list - - # Check the latitude coordinate creation. - [args], kwargs = call_lats - self.assertArrayEqual(args, expected_lat_points) - expected_lat_kwargs = dict( - standard_name='latitude', units='degrees_north', var_name='lat') - self.assertEqual(kwargs, expected_lat_kwargs) - - # Check the longitude coordinate creation. - [args], kwargs = call_lons - self.assertArrayEqual(args, expected_lon_points) - expected_lon_kwargs = dict( - standard_name='longitude', units='degrees_east', var_name='lon') - self.assertEqual(kwargs, expected_lon_kwargs) - - # Check that the coordinate guess_bounds method has been called. - expected_calls = [mock.call.guess_bounds()] * 2 - self.assertEqual(self.mock_coord.mock_calls, expected_calls) - - # Check the stock cube creation. - self.mock_Cube.assert_called_once() - _, kwargs = self.mock_Cube.call_args - spec = [(self.mock_coord, 0), (self.mock_coord, 1)] - expected_cube_kwargs = dict(dim_coords_and_dims=spec) - self.assertEqual(kwargs, expected_cube_kwargs) - - # Reset the mocks to enable multiple calls per test-case. - for mocker in self.mocks: - mocker.reset_mock() - - def setUp(self): - self.Cube = mock.sentinel.Cube - self.mock_Cube = self.patch('iris.cube.Cube', return_value=self.Cube) - self.mock_coord = mock.Mock(spec=iris.coords.DimCoord) - self.mock_DimCoord = self.patch( - 'iris.coords.DimCoord', return_value=self.mock_coord) - self.mocks = [self.mock_Cube, self.mock_coord, self.mock_DimCoord] - - def test_invalid_cell_spec__alpha(self): - emsg = 'Invalid MxN cell specification' - with self.assertRaisesRegex(ValueError, emsg): - stock_cube('Ax1') - - def test_invalid_cell_spec__separator(self): - emsg = 'Invalid MxN cell specification' - with self.assertRaisesRegex(ValueError, emsg): - stock_cube('1y1') - - def test_invalid_cell_spec__longitude(self): - emsg = 'Invalid longitude delta in MxN cell specification' - with self.assertRaisesRegex(ValueError, emsg): - stock_cube('1.3x1') - - def test_invalid_cell_spec__latitude(self): - emsg = 'Invalid latitude delta in MxN cell specification' - with self.assertRaisesRegex(ValueError, emsg): - stock_cube('1x2.3') - - def test_specs(self): - specs = ['0.5x0.5', '1x1', '2.5x2.5', '5x5', '10x10'] - for spec in specs: - result = stock_cube(spec) - self.assertEqual(result, self.Cube) - self._check(*list(map(float, spec.split('x')))) - - def test_specs_no_offset(self): - specs = ['0.5x0.5', '1x1', '2.5x2.5', '5x5', '10x10'] - for spec in specs: - result = stock_cube(spec, lat_offset=False, lon_offset=False) - self.assertEqual(result, self.Cube) - self._check( - *list(map(float, spec.split('x'))), - lat_off=False, - lon_off=False) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/unit/preprocessor/_regrid/test_extract_levels.py b/tests/unit/preprocessor/_regrid/test_extract_levels.py deleted file mode 100644 index 5f4442357c..0000000000 --- a/tests/unit/preprocessor/_regrid/test_extract_levels.py +++ /dev/null @@ -1,177 +0,0 @@ -"""Unit tests for :func:`esmvaltool.preprocessor.regrid.extract_levels`.""" - -import unittest - -import iris -import mock -import numpy as np -from numpy import ma - -import tests -from esmvaltool.preprocessor._regrid import (_MDI, VERTICAL_SCHEMES, - extract_levels) -from tests.unit.preprocessor._regrid import _make_cube, _make_vcoord - - -class Test(tests.Test): - def setUp(self): - self.shape = (3, 2, 1) - self.z = self.shape[0] - self.dtype = np.dtype('int8') - data = np.arange( - np.prod(self.shape), dtype=self.dtype).reshape(self.shape) - self.cube = _make_cube(data, dtype=self.dtype) - self.created_cube = mock.sentinel.created_cube - self.mock_create_cube = self.patch( - 'esmvaltool.preprocessor._regrid._create_cube', - return_value=self.created_cube) - self.schemes = [ - 'linear', 'nearest', 'linear_horizontal_extrapolate_vertical', - 'nearest_horizontal_extrapolate_vertical' - ] - - def test_invalid_scheme__unknown(self): - levels = mock.sentinel.levels - scheme = mock.sentinel.scheme - emsg = 'Unknown vertical interpolation scheme' - with self.assertRaisesRegex(ValueError, emsg): - extract_levels(self.cube, levels, scheme) - - def test_vertical_schemes(self): - self.assertEqual(set(VERTICAL_SCHEMES), set(self.schemes)) - - def test_nop__levels_match(self): - vcoord = _make_vcoord(self.z, dtype=self.dtype) - self.assertEqual(self.cube.coord(axis='z', dim_coords=True), vcoord) - levels = vcoord.points - result = extract_levels(self.cube, levels, 'linear') - self.assertEqual(id(result), id(self.cube)) - self.assertEqual(result, self.cube) - - def test_extraction(self): - levels = [0, 2] - result = extract_levels(self.cube, levels, 'linear') - data = np.array([0, 1, 4, 5], dtype=self.dtype).reshape(2, 2, 1) - expected = _make_cube( - data, aux_coord=False, dim_coord=False, dtype=self.dtype) - coord = self.cube.coord('Pressure Slice').copy() - expected.add_aux_coord(coord[levels], (0, 1)) - coord = self.cube.coord('air_pressure').copy() - expected.add_dim_coord(coord[levels], 0) - self.assertEqual(result, expected) - - def test_extraction__failure(self): - levels = [0, 2] - with mock.patch('iris.cube.Cube.extract', return_value=None): - emsg = 'Failed to extract levels' - with self.assertRaisesRegex(ValueError, emsg): - extract_levels(self.cube, levels, 'linear') - - def test_interpolation(self): - new_data = np.array(True) - levels = np.array([0.5, 1.5]) - scheme = 'linear' - with mock.patch( - 'stratify.interpolate', return_value=new_data) as mocker: - result = extract_levels(self.cube, levels, scheme) - self.assertEqual(result, self.created_cube) - args, kwargs = mocker.call_args - # Check the stratify.interpolate args ... - self.assertEqual(len(args), 3) - self.assertArrayEqual(args[0], levels) - pts = self.cube.coord(axis='z', dim_coords=True).points - src_levels_broadcast = np.broadcast_to( - pts.reshape(self.z, 1, 1), self.cube.shape) - self.assertArrayEqual(args[1], src_levels_broadcast) - self.assertArrayEqual(args[2], self.cube.data) - # Check the stratify.interpolate kwargs ... - self.assertEqual( - kwargs, dict( - axis=0, interpolation=scheme, extrapolation='nan')) - args, kwargs = self.mock_create_cube.call_args - # Check the _create_cube args ... - self.assertEqual(len(args), 3) - self.assertEqual(args[0], self.cube) - self.assertArrayEqual(args[1], new_data) - self.assertArrayEqual(args[2], levels) - # Check the _create_cube kwargs ... - self.assertEqual(kwargs, dict()) - - def test_interpolation__extrapolated_NaN_filling(self): - new_data = np.array([0, np.nan]) - levels = [0.5, 1.5] - scheme = 'nearest' - with mock.patch( - 'stratify.interpolate', return_value=new_data) as mocker: - result = extract_levels(self.cube, levels, scheme) - self.assertEqual(result, self.created_cube) - args, kwargs = mocker.call_args - # Check the stratify.interpolate args ... - self.assertEqual(len(args), 3) - self.assertArrayEqual(args[0], levels) - pts = self.cube.coord(axis='z', dim_coords=True).points - src_levels_broadcast = np.broadcast_to( - pts.reshape(self.z, 1, 1), self.cube.shape) - self.assertArrayEqual(args[1], src_levels_broadcast) - self.assertArrayEqual(args[2], self.cube.data) - # Check the stratify.interpolate kwargs ... - self.assertEqual( - kwargs, dict( - axis=0, interpolation=scheme, extrapolation='nan')) - args, kwargs = self.mock_create_cube.call_args - # Check the _create_cube args ... - self.assertEqual(len(args), 3) - self.assertArrayEqual(args[0], self.cube) - new_data[np.isnan(new_data)] = _MDI - self.assertArrayEqual(args[1], new_data) - self.assertArrayEqual(args[2], levels) - # Check the _create_cube kwargs ... - self.assertEqual(kwargs, dict()) - - def test_interpolation__masked(self): - levels = np.array([0.5, 1.5]) - new_data = np.empty([len(levels)] + list(self.shape[1:]), dtype=float) - new_data[:, 0, :] = np.nan - new_data_mask = np.isnan(new_data) - scheme = 'linear' - mask = [[[False], [True]], [[True], [False]], [[False], [False]]] - masked = ma.empty(self.shape) - masked.mask = mask - cube = _make_cube(masked, dtype=self.dtype) - with mock.patch( - 'stratify.interpolate', return_value=new_data) as mocker: - result = extract_levels(cube, levels, scheme) - self.assertEqual(result, self.created_cube) - args, kwargs = mocker.call_args - # Check the stratify.interpolate args ... - self.assertEqual(len(args), 3) - self.assertArrayEqual(args[0], levels) - pts = cube.coord(axis='z', dim_coords=True).points - src_levels_broadcast = np.broadcast_to( - pts.reshape(self.z, 1, 1), cube.shape) - self.assertArrayEqual(args[1], src_levels_broadcast) - self.assertArrayEqual(args[2], cube.data) - # Check the stratify.interpolate kwargs ... - self.assertEqual( - kwargs, dict( - axis=0, interpolation=scheme, extrapolation='nan')) - args, kwargs = self.mock_create_cube.call_args - # in-place for new extract_levels with nan's - new_data[np.isnan(new_data)] = _MDI - # Check the _create_cube args ... - self.assertEqual(len(args), 3) - self.assertEqual(args[0].metadata, cube.metadata) - coord_comparison = iris.analysis.coord_comparison(args[0], cube) - self.assertFalse(coord_comparison['not_equal'] - or coord_comparison['non_equal_data_dimension']) - self.assertArrayEqual(args[0].data, cube.data) - self.assertArrayEqual(args[1], new_data) - self.assertTrue(ma.isMaskedArray(args[1])) - self.assertArrayEqual(args[1].mask, new_data_mask) - self.assertArrayEqual(args[2], levels) - # Check the _create_cube kwargs ... - self.assertEqual(kwargs, dict()) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/unit/preprocessor/_regrid/test_regrid.py b/tests/unit/preprocessor/_regrid/test_regrid.py deleted file mode 100644 index 346850709a..0000000000 --- a/tests/unit/preprocessor/_regrid/test_regrid.py +++ /dev/null @@ -1,112 +0,0 @@ -""" -Unit tests for the :func:`esmvaltool.preprocessor.regrid.regrid` function. - -""" - -import unittest - -import iris -import mock - -import tests -from esmvaltool.preprocessor import regrid -from esmvaltool.preprocessor._regrid import _CACHE, HORIZONTAL_SCHEMES - - -class Test(tests.Test): - def _check(self, tgt_grid, scheme, spec=False): - expected_scheme = HORIZONTAL_SCHEMES[scheme] - - if spec: - spec = tgt_grid - self.assertIn(spec, _CACHE) - self.assertEqual(_CACHE[spec], self.tgt_grid) - self.coord_system.asset_called_once() - expected_calls = [ - mock.call(axis='x', dim_coords=True), - mock.call(axis='y', dim_coords=True) - ] - self.assertEqual(self.tgt_grid_coord.mock_calls, expected_calls) - self.regrid.assert_called_once_with(self.tgt_grid, expected_scheme) - else: - if scheme == 'unstructured_nearest': - expected_calls = [ - mock.call(axis='x', dim_coords=True), - mock.call(axis='y', dim_coords=True) - ] - self.assertEqual(self.coords.mock_calls, expected_calls) - expected_calls = [mock.call(self.coord), mock.call(self.coord)] - self.assertEqual(self.remove_coord.mock_calls, expected_calls) - self.regrid.assert_called_once_with(tgt_grid, expected_scheme) - - # Reset the mocks to enable multiple calls per test-case. - for mocker in self.mocks: - mocker.reset_mock() - - def setUp(self): - self.coord_system = mock.Mock(return_value=None) - self.coord = mock.sentinel.coord - self.coords = mock.Mock(return_value=[self.coord]) - self.remove_coord = mock.Mock() - self.regridded_cube = mock.sentinel.regridded_cube - self.regrid = mock.Mock(return_value=self.regridded_cube) - self.src_cube = mock.Mock( - spec=iris.cube.Cube, - coord_system=self.coord_system, - coords=self.coords, - remove_coord=self.remove_coord, - regrid=self.regrid) - self.tgt_grid_coord = mock.Mock() - self.tgt_grid = mock.Mock( - spec=iris.cube.Cube, coord=self.tgt_grid_coord) - self.regrid_schemes = [ - 'linear', 'linear_extrapolate', 'nearest', 'area_weighted', - 'unstructured_nearest' - ] - - def _return_mock_stock_cube(spec, lat_offset=True, lon_offset=True): - return self.tgt_grid - - self.mock_stock = self.patch( - 'esmvaltool.preprocessor._regrid._stock_cube', - side_effect=_return_mock_stock_cube) - self.mocks = [ - self.coord_system, self.coords, self.regrid, self.src_cube, - self.tgt_grid_coord, self.tgt_grid, self.mock_stock - ] - - def test_invalid_tgt_grid__unknown(self): - dummy = mock.sentinel.dummy - scheme = 'linear' - emsg = 'Expecting a cube' - with self.assertRaisesRegex(ValueError, emsg): - regrid(self.src_cube, dummy, scheme) - - def test_invalid_scheme__unknown(self): - dummy = mock.sentinel.dummy - emsg = 'Unknown regridding scheme' - with self.assertRaisesRegex(ValueError, emsg): - regrid(dummy, dummy, 'wibble') - - def test_horizontal_schemes(self): - self.assertEqual( - set(HORIZONTAL_SCHEMES.keys()), set(self.regrid_schemes)) - - def test_regrid__horizontal_schemes(self): - for scheme in self.regrid_schemes: - result = regrid(self.src_cube, self.tgt_grid, scheme) - self.assertEqual(result, self.regridded_cube) - self._check(self.tgt_grid, scheme) - - def test_regrid__cell_specification(self): - specs = ['1x1', '2x2', '3x3', '4x4', '5x5'] - scheme = 'linear' - for spec in specs: - result = regrid(self.src_cube, spec, scheme) - self.assertEqual(result, self.regridded_cube) - self._check(spec, scheme, spec=True) - self.assertEqual(set(_CACHE.keys()), set(specs)) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/unit/preprocessor/_regrid_esmpy/__init__.py b/tests/unit/preprocessor/_regrid_esmpy/__init__.py deleted file mode 100644 index fc186bf9ed..0000000000 --- a/tests/unit/preprocessor/_regrid_esmpy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Unit tests for the :mod:`esmvaltool.preprocessor._regrid_esmpy` module.""" diff --git a/tests/unit/preprocessor/_regrid_esmpy/test_regrid_esmpy.py b/tests/unit/preprocessor/_regrid_esmpy/test_regrid_esmpy.py deleted file mode 100644 index bde05f7894..0000000000 --- a/tests/unit/preprocessor/_regrid_esmpy/test_regrid_esmpy.py +++ /dev/null @@ -1,660 +0,0 @@ -"""Unit tests for the esmvaltool.preprocessor._regrid_esmpy module.""" -import cf_units -import iris -import mock -import numpy as np -from iris.exceptions import CoordinateNotFoundError - -import tests -from esmvaltool.preprocessor._regrid_esmpy import ( - build_regridder, build_regridder_2d, coords_iris_to_esmpy, - cube_to_empty_field, get_grid, get_grid_representant, - get_grid_representants, get_representant, is_lon_circular, regrid) - - -def identity(*args, **kwargs): - """Return args, acting as identity for mocking functions.""" - # pylint: disable=unused-argument - # Here, kwargs will be ignored. - if len(args) == 1: - return args[0] - return args - - -def mock_cube_to_empty_field(cube): - """Return associated field for mock cube.""" - return cube.field - - -class MockGrid(mock.MagicMock): - """Mock ESMF grid.""" - - get_coords = mock.Mock(return_value=mock.MagicMock()) - add_coords = mock.Mock() - add_item = mock.Mock() - get_item = mock.Mock(return_value=mock.MagicMock()) - - -class MockGridItem(mock.Mock): - """Mock ESMF enum for grid items.""" - - MASK = mock.sentinel.gi_mask - - -class MockRegridMethod(mock.Mock): - """Mock ESMF enum for regridding methods.""" - - BILINEAR = mock.sentinel.rm_bilinear - CONSERVE = mock.sentinel.rm_conserve - NEAREST_STOD = mock.sentinel.rm_nearest_stod - - -class MockStaggerLoc(mock.Mock): - """Mock ESMF enum for stagger locations.""" - - CENTER = mock.sentinel.sl_center - CORNER = mock.sentinel.sl_corner - - -class MockUnmappedAction(mock.Mock): - """Mock ESMF enum for unmapped actions.""" - - IGNORE = mock.sentinel.ua_ignore - - -ESMF_REGRID_METHODS = { - 'linear': MockRegridMethod.BILINEAR, - 'area_weighted': MockRegridMethod.CONSERVE, - 'nearest': MockRegridMethod.NEAREST_STOD, -} - -MASK_REGRIDDING_MASK_VALUE = { - mock.sentinel.rm_bilinear: np.array([1]), - mock.sentinel.rm_conserve: np.array([1]), - mock.sentinel.rm_nearest_stod: np.array([]), -} - - -@mock.patch('esmvaltool.preprocessor._regrid_esmpy.MASK_REGRIDDING_MASK_VALUE', - MASK_REGRIDDING_MASK_VALUE) -@mock.patch('esmvaltool.preprocessor._regrid_esmpy.ESMF_REGRID_METHODS', - ESMF_REGRID_METHODS) -@mock.patch('ESMF.Manager', mock.Mock) -@mock.patch('ESMF.GridItem', MockGridItem) -@mock.patch('ESMF.RegridMethod', MockRegridMethod) -@mock.patch('ESMF.StaggerLoc', MockStaggerLoc) -@mock.patch('ESMF.UnmappedAction', MockUnmappedAction) -class TestHelpers(tests.Test): - """Unit tests for helper functions.""" - - # pylint: disable=too-many-instance-attributes, too-many-public-methods - def setUp(self): - """Set up fixtures.""" - # pylint: disable=too-many-locals - lat_1d_pre_bounds = np.linspace(-90, 90, 5) - lat_1d_bounds = np.stack( - [lat_1d_pre_bounds[:-1], lat_1d_pre_bounds[1:]], axis=1) - lat_1d_points = lat_1d_bounds.mean(axis=1) - lon_1d_pre_bounds = np.linspace(0, 360, 5) - lon_1d_bounds = np.stack( - [lon_1d_pre_bounds[:-1], lon_1d_pre_bounds[1:]], axis=1) - lon_1d_points = lon_1d_bounds.mean(axis=1) - lon_2d_points, lat_2d_points = np.meshgrid(lon_1d_points, - lat_1d_points) - (lon_2d_pre_bounds, lat_2d_pre_bounds) = np.meshgrid( - lon_1d_pre_bounds, lat_1d_pre_bounds) - lat_2d_bounds = np.stack([ - lat_2d_pre_bounds[:-1, :-1], lat_2d_pre_bounds[:-1, 1:], - lat_2d_pre_bounds[1:, 1:], lat_2d_pre_bounds[1:, :-1] - ], - axis=2) - lon_2d_bounds = np.stack([ - lon_2d_pre_bounds[:-1, :-1], lon_2d_pre_bounds[:-1, 1:], - lon_2d_pre_bounds[1:, 1:], lon_2d_pre_bounds[1:, :-1] - ], - axis=2) - self.lat_1d = mock.Mock( - iris.coords.DimCoord, - standard_name='latitude', - long_name='latitude', - ndim=1, - points=lat_1d_points, - bounds=lat_1d_bounds, - has_bounds=mock.Mock(return_value=True)) - self.lat_1d_no_bounds = mock.Mock( - iris.coords.DimCoord, - standard_name='latitude', - ndim=1, - points=lat_1d_points, - has_bounds=mock.Mock(return_value=False), - bounds=lat_1d_bounds, - guess_bounds=mock.Mock()) - self.lon_1d = mock.Mock( - iris.coords.DimCoord, - standard_name='longitude', - long_name='longitude', - ndim=1, - points=lon_1d_points, - bounds=lon_1d_bounds, - has_bounds=mock.Mock(return_value=True), - circular=True) - self.lon_1d_aux = mock.Mock( - iris.coords.AuxCoord, - standard_name='longitude', - long_name='longitude', - ndim=1, - shape=lon_1d_points.shape, - points=lon_1d_points, - bounds=lon_1d_bounds, - has_bounds=mock.Mock(return_value=True)) - self.lat_2d = mock.Mock( - iris.coords.AuxCoord, - standard_name='latitude', - long_name='latitude', - ndim=2, - points=lat_2d_points, - bounds=lat_2d_bounds, - has_bounds=mock.Mock(return_value=True)) - self.lon_2d = mock.Mock( - iris.coords.AuxCoord, - standard_name='longitude', - long_name='longitude', - ndim=2, - points=lon_2d_points, - bounds=lon_2d_bounds, - has_bounds=mock.Mock(return_value=True)) - self.lon_2d_non_circular = mock.Mock( - iris.coords.AuxCoord, - standard_name='longitude', - ndim=2, - points=lon_2d_points[:, 1:-1], - bounds=lon_2d_bounds[:, 1:-1], - has_bounds=mock.Mock(return_value=True)) - self.lat_3d = mock.Mock( - iris.coords.AuxCoord, - standard_name='latitude', - long_name='latitude', - ndim=3) - self.lon_3d = mock.Mock( - iris.coords.AuxCoord, - standard_name='longitude', - long_name='longitude', - ndim=3) - depth_pre_bounds = np.linspace(0, 5000, 5) - depth_bounds = np.stack([depth_pre_bounds[:-1], depth_pre_bounds[1:]], - axis=1) - depth_points = depth_bounds.mean(axis=1) - self.depth = mock.Mock( - iris.coords.DimCoord, - standard_name='depth', - long_name='depth', - ndim=1, - shape=depth_points.shape, - points=depth_points, - bounds=depth_bounds, - has_bounds=mock.Mock(return_value=True)) - data_shape = lon_2d_points.shape - raw_data = np.arange(np.prod(data_shape)).reshape(data_shape) - mask = np.zeros(data_shape) - mask[:data_shape[0] // 2] = True - self.data = np.ma.masked_array(raw_data, mask) - self.data_3d = np.repeat( - self.data[..., np.newaxis], depth_points.shape[0], axis=-1) - self.expected_esmpy_lat = np.array([[-67.5, -22.5, 22.5, 67.5], - [-67.5, -22.5, 22.5, 67.5], - [-67.5, -22.5, 22.5, 67.5], - [-67.5, -22.5, 22.5, 67.5]]) - self.expected_esmpy_lon = np.array([[45., 45., 45., 45.], - [135., 135., 135., 135.], - [225., 225., 225., 225.], - [315., 315., 315., 315.]]) - self.expected_esmpy_lat_corners = np.array([[-90., -45., 0., 45., 90.], - [-90., -45., 0., 45., 90.], - [-90., -45., 0., 45., 90.], - [-90., -45., 0., 45., 90.], - [-90., -45., 0., 45., - 90.]]) - self.expected_esmpy_lon_corners = np.array( - [[0., 0., 0., 0., 0.], [90., 90., 90., 90., 90.], - [180., 180., 180., 180., 180.], [270., 270., 270., 270., 270.], - [360., 360., 360., 360., 360.]]) - self.coords = { - 'latitude': self.lat_2d, - 'longitude': self.lon_2d, - 'depth': self.depth - } - self.coord_dims = { - 'latitude': (0, 1), - 'longitude': (0, 1), - self.lat_2d: (0, 1), - self.lon_2d: (0, 1), - } - - def coord(name=None, axis=None): - """Return selected coordinate for mock cube.""" - if axis == 'Z': - raise CoordinateNotFoundError() - return self.coords[name] - - def coords(dim_coords=None): - """Return coordinates for mock cube.""" - if dim_coords: - return [] - return list(self.coords.values()) - - self.cube = mock.Mock( - spec=iris.cube.Cube, - dtype=np.float32, - long_name='longname', - ndim=2, - shape=self.data.shape, - data=self.data, - coord=coord, - coord_dims=lambda name: self.coord_dims[name], - coords=coords, - ) - self.cube.__getitem__ = mock.Mock(return_value=self.cube) - self.unmasked_cube = mock.Mock( - spec=iris.cube.Cube, - dtype=np.float32, - long_name='longname', - ) - self.coord_dims_3d = { - 'latitude': (1, 2), - 'longitude': (1, 2), - self.lat_2d: (1, 2), - self.lon_2d: (1, 2), - 'depth': (0, ), - self.depth: (0, ), - } - - def coord_3d(name=None, dimensions=None, dim_coords=None, axis=None): - """Return coord for 3d mock cube.""" - # pylint: disable=unused-argument - if axis == 'Z' or dimensions == [0]: - return self.coords['depth'] - return self.coords[name] - - self.cube_3d = mock.Mock( - spec=iris.cube.Cube, - dtype=np.float32, - standard_name=None, - long_name='longname', - var_name='ln', - units=cf_units.Unit('1'), - attributes={}, - cell_methods=[], - ndim=3, - shape=self.data_3d.shape, - data=self.data_3d, - coord=coord_3d, - coord_dims=lambda name: self.coord_dims_3d[name], - ) - self.cube.__getitem__ = mock.Mock(return_value=self.cube) - - def test_coords_iris_to_esmpy_mismatched_dimensions(self): - """Test coord conversion with mismatched dimensions.""" - self.assertRaises(ValueError, coords_iris_to_esmpy, self.lat_1d, - self.lon_2d, True) - - def test_coords_iris_to_esmpy_invalid_dimensions(self): - """Test coord conversion with invalid dimensions.""" - self.assertRaises(NotImplementedError, coords_iris_to_esmpy, - self.lat_3d, self.lon_3d, True) - - def test_coords_iris_to_esmpy_call_guess_bounds(self): - """Test coord conversion with missing bounds.""" - coords_iris_to_esmpy(self.lat_1d_no_bounds, self.lon_1d, True) - self.lat_1d_no_bounds.guess_bounds.assert_called_once() - - def test_coords_iris_to_esmpy_1d_circular(self): - """Test coord conversion with 1d coords and circular longitudes.""" - (esmpy_lat, esmpy_lon, - esmpy_lat_corners, esmpy_lon_corners) = coords_iris_to_esmpy( - self.lat_1d, self.lon_1d, True) - self.assertArrayEqual(esmpy_lat, self.expected_esmpy_lat) - self.assertArrayEqual(esmpy_lon, self.expected_esmpy_lon) - self.assertArrayEqual(esmpy_lat_corners, - self.expected_esmpy_lat_corners[:-1]) - self.assertArrayEqual(esmpy_lon_corners, - self.expected_esmpy_lon_corners[:-1]) - - def test_coords_iris_to_esmpy_1d_non_circular(self): - """Test coord conversion with 1d coords and non circular longitudes.""" - (esmpy_lat, esmpy_lon, - esmpy_lat_corners, esmpy_lon_corners) = coords_iris_to_esmpy( - self.lat_1d, self.lon_1d, False) - self.assertArrayEqual(esmpy_lat, self.expected_esmpy_lat) - self.assertArrayEqual(esmpy_lon, self.expected_esmpy_lon) - self.assertArrayEqual(esmpy_lat_corners, - self.expected_esmpy_lat_corners) - self.assertArrayEqual(esmpy_lon_corners, - self.expected_esmpy_lon_corners) - - def test_coords_iris_to_esmpy_2d_circular(self): - """Test coord conversion with 2d coords and circular longitudes.""" - (esmpy_lat, esmpy_lon, - esmpy_lat_corners, esmpy_lon_corners) = coords_iris_to_esmpy( - self.lat_2d, self.lon_2d, True) - self.assertArrayEqual(esmpy_lat, self.expected_esmpy_lat) - self.assertArrayEqual(esmpy_lon, self.expected_esmpy_lon) - self.assertArrayEqual(esmpy_lat_corners, - self.expected_esmpy_lat_corners[:-1]) - self.assertArrayEqual(esmpy_lon_corners, - self.expected_esmpy_lon_corners[:-1]) - - def test_coords_iris_to_esmpy_2d_non_circular(self): - """Test coord conversion with 2d coords and non circular longitudes.""" - (esmpy_lat, esmpy_lon, - esmpy_lat_corners, esmpy_lon_corners) = coords_iris_to_esmpy( - self.lat_2d, self.lon_2d, False) - self.assertArrayEqual(esmpy_lat, self.expected_esmpy_lat) - self.assertArrayEqual(esmpy_lon, self.expected_esmpy_lon) - self.assertArrayEqual(esmpy_lat_corners, - self.expected_esmpy_lat_corners) - self.assertArrayEqual(esmpy_lon_corners, - self.expected_esmpy_lon_corners) - - def test_get_grid_circular(self): - """Test building of ESMF grid from iris cube circular longitude.""" - expected_get_coords_calls = [ - mock.call(0), - mock.call(1), - mock.call(0, staggerloc=mock.sentinel.sl_corner), - mock.call(1, staggerloc=mock.sentinel.sl_corner), - ] - with mock.patch('ESMF.Grid', MockGrid) as mg: - mg.get_coords.reset_mock() - mg.add_coords.reset_mock() - mg.add_item.reset_mock() - get_grid(self.expected_esmpy_lat, self.expected_esmpy_lon, - self.expected_esmpy_lat_corners[:-1], - self.expected_esmpy_lon_corners[:-1], True) - mg.get_coords.assert_has_calls(expected_get_coords_calls) - mg.add_coords.assert_called_once_with([mock.sentinel.sl_corner]) - mg.add_item.assert_called_once_with(mock.sentinel.gi_mask, - mock.sentinel.sl_center) - - def test_get_grid_non_circular(self): - """Test building of ESMF grid from iris cube non circular longitude.""" - expected_get_coords_calls = [ - mock.call(0), - mock.call(1), - mock.call(0, staggerloc=mock.sentinel.sl_corner), - mock.call(1, staggerloc=mock.sentinel.sl_corner), - ] - with mock.patch('ESMF.Grid', MockGrid) as mg: - mg.get_coords.reset_mock() - mg.add_coords.reset_mock() - mg.add_item.reset_mock() - get_grid(self.expected_esmpy_lat, self.expected_esmpy_lon, - self.expected_esmpy_lat_corners, - self.expected_esmpy_lon_corners, False) - mg.get_coords.assert_has_calls(expected_get_coords_calls) - mg.add_coords.assert_called_once_with([mock.sentinel.sl_corner]) - mg.add_item.assert_called_once_with(mock.sentinel.gi_mask, - mock.sentinel.sl_center) - - def test_is_lon_circular_dim_coords_true(self): - """Test detection of circular longitudes 1d dim coords.""" - is_circ = is_lon_circular(self.lon_1d) - self.assertTrue(is_circ) - - def test_is_lon_circular_dim_coords_false(self): - """Test detection of non circular longitudes 1d dim coords.""" - self.lon_1d.circular = False - is_circ = is_lon_circular(self.lon_1d) - self.assertFalse(is_circ) - - def test_is_lon_circular_1d_aux_coords(self): - """Test detection of circular longitudes 1d aux coords.""" - is_circ = is_lon_circular(self.lon_1d_aux) - self.assertTrue(is_circ) - - def test_is_lon_circular_invalid_dimension(self): - """Test detection of circular longitudes, invalid coordinates.""" - self.assertRaises(NotImplementedError, is_lon_circular, self.lon_3d) - - def test_is_lon_circular_invalid_argument(self): - """Test detection of circular longitudes, invalid argument.""" - self.assertRaises(ValueError, is_lon_circular, None) - - def test_is_lon_circular_2d_aux_coords(self): - """Test detection of circular longitudes 2d aux coords.""" - is_circ = is_lon_circular(self.lon_2d) - self.assertTrue(is_circ) - - def test_is_lon_circular_2d_aux_coords_non_circ(self): - """Test detection of non circular longitudes 2d aux coords.""" - is_circ = is_lon_circular(self.lon_2d_non_circular) - self.assertFalse(is_circ) - - @mock.patch('ESMF.Grid', MockGrid) - @mock.patch('ESMF.Field') - def test_cube_to_empty_field(self, mock_field): - """Test building of empty field from iris cube.""" - field = cube_to_empty_field(self.cube) - self.assertEqual(mock_field.return_value, field) - mock_field.assert_called_once() - ckwargs = mock_field.call_args[1] - self.assertEqual('longname', ckwargs['name']) - self.assertEqual(mock.sentinel.sl_center, ckwargs['staggerloc']) - - def test_get_representant(self): - """Test extraction of horizontal representant from iris cube.""" - horizontal_slice = ['latitude', 'longitude'] - get_representant(self.cube, horizontal_slice) - self.cube.__getitem__.assert_called_once_with((slice(None, None, None), - slice(None, None, - None))) - - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.cube_to_empty_field', - mock_cube_to_empty_field) - @mock.patch('ESMF.Regrid') - def test_build_regridder_2d_unmasked_data(self, mock_regrid): - """Test building of 2d regridder for unmasked data.""" - self.cube.data = self.cube.data.data - self.cube.field = mock.Mock() - mock.sentinel.dst_rep.field = mock.Mock() - build_regridder_2d(self.cube, mock.sentinel.dst_rep, - mock.sentinel.regrid_method, .99) - expected_kwargs = { - 'src_mask_values': np.array([1]), - 'dst_mask_values': np.array([1]), - 'regrid_method': mock.sentinel.regrid_method, - 'srcfield': self.cube.field, - 'dstfield': mock.sentinel.dst_rep.field, - 'unmapped_action': mock.sentinel.ua_ignore, - 'ignore_degenerate': True, - } - mock_regrid.assert_called_once_with(**expected_kwargs) - - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.cube_to_empty_field', - mock_cube_to_empty_field) - @mock.patch('ESMF.Regrid') - def test_build_regridder_2d_masked_data(self, mock_regrid): - """Test building of 2d regridder for masked data.""" - mock_regrid.return_value = mock.Mock( - return_value=mock.Mock(data=self.data.T)) - regrid_method = mock.sentinel.rm_bilinear - src_rep = mock.MagicMock(data=self.data) - dst_rep = mock.MagicMock() - src_rep.field = mock.MagicMock(data=self.data.copy()) - dst_rep.field = mock.MagicMock() - build_regridder_2d(src_rep, dst_rep, regrid_method, .99) - expected_calls = [ - mock.call( - src_mask_values=np.array([]), - dst_mask_values=np.array([]), - srcfield=src_rep.field, - dstfield=dst_rep.field, - unmapped_action=mock.sentinel.ua_ignore, - ignore_degenerate=True, - regrid_method=regrid_method), - mock.call( - src_mask_values=np.array([1]), - dst_mask_values=np.array([1]), - regrid_method=regrid_method, - srcfield=src_rep.field, - dstfield=dst_rep.field, - unmapped_action=mock.sentinel.ua_ignore, - ignore_degenerate=True), - ] - kwargs = mock_regrid.call_args_list[0][-1] - expected_kwargs = expected_calls[0][-1] - self.assertEqual(expected_kwargs.keys(), kwargs.keys()) - array_keys = set(['src_mask_values', 'dst_mask_values']) - for key in kwargs.keys(): - if key in array_keys: - self.assertTrue((expected_kwargs[key] == kwargs[key]).all()) - else: - self.assertEqual(expected_kwargs[key], kwargs[key]) - self.assertTrue(mock_regrid.call_args_list[1] == expected_calls[1]) - - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.cube_to_empty_field', - mock_cube_to_empty_field) - @mock.patch('ESMF.Regrid') - def test_regridder_2d_unmasked_data(self, mock_regrid): - """Test regridder for unmasked 2d data.""" - field_regridder = mock.Mock(return_value=mock.Mock(data=self.data.T)) - mock_regrid.return_value = field_regridder - regrid_method = mock.sentinel.rm_bilinear - src_rep = mock.MagicMock(data=self.data, dtype=np.float32) - dst_rep = mock.MagicMock(shape=(4, 4)) - regridder = build_regridder_2d(src_rep, dst_rep, regrid_method, .99) - field_regridder.reset_mock() - regridder(src_rep) - field_regridder.assert_called_once_with(src_rep.field, dst_rep.field) - - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.cube_to_empty_field', - mock_cube_to_empty_field) - @mock.patch('ESMF.Regrid') - def test_regridder_2d_masked_data(self, mock_regrid): - """Test regridder for masked 2d data.""" - field_regridder = mock.Mock(return_value=mock.Mock(data=self.data.T)) - mock_regrid.return_value = field_regridder - regrid_method = mock.sentinel.rm_bilinear - src_rep = mock.MagicMock(data=self.data) - dst_rep = mock.MagicMock(shape=(4, 4)) - regridder = build_regridder_2d(src_rep, dst_rep, regrid_method, .99) - field_regridder.reset_mock() - regridder(self.cube) - field_regridder.assert_called_once_with(src_rep.field, dst_rep.field) - - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.build_regridder_3d') - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.build_regridder_2d') - def test_build_regridder_2(self, mock_regridder_2d, mock_regridder_3d): - """Test build regridder for 2d data.""" - # pylint: disable=no-self-use - src_rep = mock.Mock(ndim=2) - dst_rep = mock.Mock(ndim=2) - build_regridder(src_rep, dst_rep, 'nearest') - mock_regridder_2d.assert_called_once_with( - src_rep, dst_rep, mock.sentinel.rm_nearest_stod, .99) - mock_regridder_3d.assert_not_called() - - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.build_regridder_3d') - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.build_regridder_2d') - def test_build_regridder_3(self, mock_regridder_2d, mock_regridder_3d): - """Test build regridder for 3d data.""" - # pylint: disable=no-self-use - src_rep = mock.Mock(ndim=3) - dst_rep = mock.Mock(ndim=3) - build_regridder(src_rep, dst_rep, 'nearest') - mock_regridder_3d.assert_called_once_with( - src_rep, dst_rep, mock.sentinel.rm_nearest_stod, .99) - mock_regridder_2d.assert_not_called() - - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_representant') - def test_get_grid_representant_2d(self, mock_get_representant): - """Test extraction of 2d grid representant from 2 spatial d cube.""" - mock_get_representant.return_value = mock.sentinel.ret - ret = get_grid_representant(self.cube) - self.assertEqual(mock.sentinel.ret, ret) - mock_get_representant.assert_called_once_with( - self.cube, ['latitude', 'longitude']) - - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_representant') - def test_get_grid_representant_2d_horiz_only(self, mock_get_representant): - """Test extraction of forced 2d grid representant from 2d cube.""" - mock_get_representant.return_value = mock.sentinel.ret - ret = get_grid_representant(self.cube, True) - self.assertEqual(mock.sentinel.ret, ret) - mock_get_representant.assert_called_once_with( - self.cube, ['latitude', 'longitude']) - - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_representant') - def test_get_grid_representant_3d(self, mock_get_representant): - """Test extraction of 3d grid representant from 3 spatial d cube.""" - mock_get_representant.return_value = mock.sentinel.ret - ret = get_grid_representant(self.cube_3d) - self.assertEqual(mock.sentinel.ret, ret) - mock_get_representant.assert_called_once_with( - self.cube_3d, [self.depth, 'latitude', 'longitude']) - - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_representant') - def test_get_grid_representant_3d_horiz_only(self, mock_get_representant): - """Test extraction of 2d grid representant from 3 spatial d cube.""" - mock_get_representant.return_value = mock.sentinel.ret - ret = get_grid_representant(self.cube_3d, True) - self.assertEqual(mock.sentinel.ret, ret) - mock_get_representant.assert_called_once_with( - self.cube_3d, ['latitude', 'longitude']) - - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_grid_representant', - mock.Mock(side_effect=identity)) - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_empty_data') - @mock.patch('iris.cube.Cube') - def test_get_grid_representants_3d_src(self, mock_cube, - mock_get_empty_data): - """Test extraction of grid representants from 3 spatial d cube.""" - src = self.cube_3d - mock_get_empty_data.return_value = mock.sentinel.empty_data - src_rep = get_grid_representants(src, self.cube)[0] - self.assertEqual(src, src_rep) - mock_cube.assert_called_once_with( - data=mock.sentinel.empty_data, - standard_name=src.standard_name, - long_name=src.long_name, - var_name=src.var_name, - units=src.units, - attributes=src.attributes, - cell_methods=src.cell_methods, - dim_coords_and_dims=[(self.depth, 0)], - ) - - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_grid_representant', - mock.Mock(side_effect=identity)) - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_empty_data') - @mock.patch('iris.cube.Cube') - def test_get_grid_representants_2d_src(self, mock_cube, - mock_get_empty_data): - """Test extraction of grid representants from 2 spatial d cube.""" - src = self.cube - mock_get_empty_data.return_value = mock.sentinel.empty_data - src_rep = get_grid_representants(src, self.cube)[0] - self.assertEqual(src, src_rep) - mock_cube.assert_called_once_with( - data=mock.sentinel.empty_data, - standard_name=src.standard_name, - long_name=src.long_name, - var_name=src.var_name, - units=src.units, - attributes=src.attributes, - cell_methods=src.cell_methods, - dim_coords_and_dims=[], - ) - - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.map_slices') - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.build_regridder') - @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_grid_representants', - mock.Mock(side_effect=identity)) - def test_regrid(self, mock_build_regridder, mock_map_slices): - """Test full regrid method.""" - mock_build_regridder.return_value = mock.sentinel.regridder - mock_map_slices.return_value = mock.sentinel.regridded - regrid(self.cube_3d, self.cube) - mock_build_regridder.assert_called_once_with(self.cube_3d, self.cube, - 'linear') - mock_map_slices.assert_called_once_with( - self.cube_3d, mock.sentinel.regridder, self.cube_3d, self.cube) diff --git a/tests/unit/preprocessor/_time/__init__.py b/tests/unit/preprocessor/_time/__init__.py deleted file mode 100644 index 720cc45c94..0000000000 --- a/tests/unit/preprocessor/_time/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Test suite for _time module.""" diff --git a/tests/unit/preprocessor/_time/test_time.py b/tests/unit/preprocessor/_time/test_time.py deleted file mode 100644 index 512d984632..0000000000 --- a/tests/unit/preprocessor/_time/test_time.py +++ /dev/null @@ -1,326 +0,0 @@ -"""Unit tests for the :func:`esmvaltool.preprocessor._time` module.""" - -import unittest - -import iris -import iris.coord_categorisation -import iris.coords -import numpy as np -from cf_units import Unit -from iris.cube import Cube - -import tests -from esmvaltool.preprocessor._time import (annual_mean, - extract_month, - extract_season, extract_time, - regrid_time, time_average) - - -def _create_sample_cube(): - cube = Cube(np.arange(1, 25), var_name='co2', units='J') - cube.add_dim_coord( - iris.coords.DimCoord( - np.arange(15., 720., 30.), - standard_name='time', - units=Unit('days since 1950-01-01 00:00:00', - calendar='gregorian')), 0) - iris.coord_categorisation.add_month_number(cube, 'time') - return cube - - -def add_auxiliary_coordinate(cubeList): - """Add AuxCoords to cubes in cubeList.""" - for cube in cubeList: - iris.coord_categorisation.add_day_of_month(cube, - cube.coord('time'), - name='day_of_month') - iris.coord_categorisation.add_day_of_year(cube, - cube.coord('time'), - name='day_of_year') - - -class TestExtractMonth(tests.Test): - """Tests for extract_month.""" - - def setUp(self): - """Prepare tests""" - self.cube = _create_sample_cube() - - def test_get_january(self): - """Test january extraction""" - sliced = extract_month(self.cube, 1) - print(sliced) - self.assertTrue( - (np.array([1, 1]) == sliced.coord('month_number').points).all()) - - -class TestTimeSlice(tests.Test): - """Tests for extract_time.""" - - def setUp(self): - """Prepare tests""" - self.cube = _create_sample_cube() - - def test_extract_time(self): - """Test extract_time.""" - sliced = extract_time(self.cube, 1950, 1, 1, 1950, 12, 31) - print(sliced) - self.assertTrue( - (np.arange(1, 13, 1) == sliced.coord('month_number').points).all()) - - def test_extract_time_no_slice(self): - """Test fail of extract_time.""" - with self.assertRaises(ValueError): - extract_time(self.cube, 2200, 1, 1, 2200, 12, 31) - - def test_extract_time_one_time(self): - """Test extract_time with one time step.""" - cube = _create_sample_cube() - cube = cube.collapsed('time', iris.analysis.MEAN) - sliced = extract_time(cube, 1950, 1, 1, 1952, 12, 31) - print(sliced) - self.assertTrue(np.array([ - 360., - ]) == sliced.coord('time').points) - - def test_extract_time_no_time(self): - """Test extract_time with no time step.""" - cube = _create_sample_cube()[0] - sliced = extract_time(cube, 1950, 1, 1, 1950, 12, 31) - print('sliced', sliced, sliced.shape) - print('cube', cube, cube.shape) - self.assertTrue(cube == sliced) - - -class TestExtractSeason(tests.Test): - """Tests for extract_season.""" - - def setUp(self): - """Prepare tests""" - self.cube = _create_sample_cube() - - def test_get_djf(self): - """Test function for winter""" - sliced = extract_season(self.cube, 'djf') - print(sliced) - self.assertTrue( - (np.array([1, 2, 12, 1, 2, - 12]) == sliced.coord('month_number').points).all()) - - def test_get_djf_caps(self): - """Test function works when season specified in caps""" - sliced = extract_season(self.cube, 'DJF') - print(sliced) - self.assertTrue( - (np.array([1, 2, 12, 1, 2, - 12]) == sliced.coord('month_number').points).all()) - - def test_get_mam(self): - """Test function for spring""" - sliced = extract_season(self.cube, 'mam') - print(sliced) - self.assertTrue((np.array( - [3, 4, 5, 3, 4, 5]) == sliced.coord('month_number').points).all()) - - def test_get_jja(self): - """Test function for summer""" - sliced = extract_season(self.cube, 'jja') - print(sliced) - self.assertTrue((np.array( - [6, 7, 8, 6, 7, 8]) == sliced.coord('month_number').points).all()) - - def test_get_son(self): - """Test function for summer""" - sliced = extract_season(self.cube, 'son') - print(sliced) - self.assertTrue( - (np.array([9, 10, 11, 9, 10, - 11]) == sliced.coord('month_number').points).all()) - - -class TestTimeAverage(tests.Test): - """Test class for the :func:`esmvaltool.preprocessor._time_pp` module""" - - def test_time_average(self): - """Test for time average of a 1D field.""" - data = np.ones((3)) - cube = iris.cube.Cube(data) - - times = np.array([15., 45., 75.]) - bounds = np.array([[0., 30.], [30., 60.], [60., 90.]]) - time = iris.coords.DimCoord( - times, - bounds=bounds, - standard_name='time', - units=Unit('days since 1950-01-01', calendar='gregorian')) - cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) - - result = time_average(cube) - expected = np.array([1.]) - self.assertArrayEqual(result.data, expected) - - def test_time_average_uneven(self): - """Test for time average of a 1D field with uneven time boundaries.""" - data = np.array([1., 5.]) - cube = iris.cube.Cube(data) - - times = np.array([5., 25.]) - bounds = np.array([[0., 1.], [1., 4.]]) - time = iris.coords.DimCoord( - times, - bounds=bounds, - standard_name='time', - units=Unit('days since 1950-01-01', calendar='gregorian')) - cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) - - result = time_average(cube) - expected = np.array([4.]) - self.assertArrayEqual(result.data, expected) - - def test_time_average_365_day(self): - """Test for time avg of a realisitc time axis and 365 day calendar""" - data = np.ones((6, )) - cube = iris.cube.Cube(data) - - times = np.array([15, 45, 74, 105, 135, 166]) - bounds = np.array([[0, 31], [31, 59], [59, 90], [90, 120], [120, 151], - [151, 181]]) - time = iris.coords.DimCoord( - times, - bounds=bounds, - standard_name='time', - var_name='time', - units=Unit('days since 1950-01-01', calendar='365_day')) - cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) - - result = time_average(cube) - expected = np.array([1.]) - self.assertArrayEqual(result.data, expected) - - -class TestRegridTimeMonthly(tests.Test): - """Tests for regrid_time with monthly frequency.""" - - def setUp(self): - """Prepare tests""" - self.cube_1 = _create_sample_cube() - self.cube_2 = _create_sample_cube() - self.cube_2.data = self.cube_2.data * 2. - self.cube_2.remove_coord('time') - self.cube_2.add_dim_coord( - iris.coords.DimCoord( - np.arange(14., 719., 30.), - standard_name='time', - units=Unit('days since 1950-01-01 00:00:00', - calendar='360_day') - ), 0) - add_auxiliary_coordinate([self.cube_1, self.cube_2]) - - def test_regrid_time_mon(self): - """Test changes to cubes.""" - # test monthly - newcube_1 = regrid_time(self.cube_1, frequency='mon') - newcube_2 = regrid_time(self.cube_2, frequency='mon') - # no changes to core data - self.assertArrayEqual(newcube_1.data, self.cube_1.data) - self.assertArrayEqual(newcube_2.data, self.cube_2.data) - # no changes to number of coords and aux_coords - self.assertTrue(len(newcube_1.coords()), - len(self.cube_1.coords())) - self.assertTrue(len(newcube_1.aux_coords), - len(self.cube_1.aux_coords)) - # test difference; also diff is zero - expected = self.cube_1.data - diff_cube = newcube_2 - newcube_1 - self.assertArrayEqual(diff_cube.data, expected) - - -class TestRegridTimeDaily(tests.Test): - """Tests for regrid_time with daily frequency.""" - - def setUp(self): - """Prepare tests""" - self.cube_1 = _create_sample_cube() - self.cube_2 = _create_sample_cube() - self.cube_2.data = self.cube_2.data * 2. - self.cube_1.remove_coord('time') - self.cube_2.remove_coord('time') - self.cube_1.add_dim_coord( - iris.coords.DimCoord( - np.arange(14. * 24. + 6., 38. * 24. + 6., 24.), - standard_name='time', - units=Unit('hours since 1950-01-01 00:00:00', - calendar='360_day') - ), 0) - self.cube_2.add_dim_coord( - iris.coords.DimCoord( - np.arange(14. * 24. + 3., 38. * 24. + 3., 24.), - standard_name='time', - units=Unit('hours since 1950-01-01 00:00:00', - calendar='360_day') - ), 0) - add_auxiliary_coordinate([self.cube_1, self.cube_2]) - - def test_regrid_time_day(self): - """Test changes to cubes.""" - # test daily - newcube_1 = regrid_time(self.cube_1, frequency='day') - newcube_2 = regrid_time(self.cube_2, frequency='day') - # no changes to core data - self.assertArrayEqual(newcube_1.data, self.cube_1.data) - self.assertArrayEqual(newcube_2.data, self.cube_2.data) - # no changes to number of coords and aux_coords - self.assertTrue(len(newcube_1.coords()), - len(self.cube_1.coords())) - self.assertTrue(len(newcube_1.aux_coords), - len(self.cube_1.aux_coords)) - # test difference; also diff is zero - expected = self.cube_1.data - diff_cube = newcube_2 - newcube_1 - self.assertArrayEqual(diff_cube.data, expected) - - -class TestAnnualAverage(tests.Test): - """Test class for the :func:`esmvaltool.preprocessor._time_area` module.""" - - @staticmethod - def make_time_series(number_years=2): - """Make a cube with time only dimension.""" - times = np.array([i * 30 + 15 for i in range(0, 12 * number_years, 1)]) - bounds = np.array([i * 30 for i in range(0, 12 * number_years + 1, 1)]) - bounds = np.array([[bnd, bounds[index + 1]] - for index, bnd in enumerate(bounds[:-1])]) - data = np.ones_like(times) - cube = iris.cube.Cube(data) - time = iris.coords.DimCoord( - times, - bounds=bounds, - standard_name='time', - units=Unit('days since 1950-01-01', calendar='360_day')) - cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) - iris.coord_categorisation.add_year(cube, 'time') - return cube - - def test_annual_average(self): - """Test for annual average.""" - cube = self.make_time_series(number_years=2) - - result = annual_mean(cube, decadal=False) - expected = np.array([1., 1.]) - self.assertArrayEqual(result.data, expected) - expected_time = np.array([180., 540.]) - self.assertArrayEqual(result.coord('time').points, expected_time) - - def test_decadal_average(self): - """Test for annual average.""" - cube = self.make_time_series(number_years=20) - result = annual_mean(cube, decadal=True) - - expected = np.array([1., 1.]) - self.assertArrayEqual(result.data, expected) - expected_time = np.array([1800., 5400.]) - self.assertArrayEqual(result.coord('time').points, expected_time) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/unit/preprocessor/_volume/__init__.py b/tests/unit/preprocessor/_volume/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/unit/preprocessor/_volume/test_volume.py b/tests/unit/preprocessor/_volume/test_volume.py deleted file mode 100644 index 59071b2925..0000000000 --- a/tests/unit/preprocessor/_volume/test_volume.py +++ /dev/null @@ -1,136 +0,0 @@ -"""Unit test for :func:`esmvaltool.preprocessor._volume`.""" - -import unittest - -import iris -import numpy as np -from cf_units import Unit - -import tests -from esmvaltool.preprocessor._volume import (average_volume, depth_integration, - extract_trajectory, - extract_transect, extract_volume) - - -class Test(tests.Test): - """Test class for _volume_pp""" - - def setUp(self): - """Prepare tests""" - coord_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) - data1 = np.ones((3, 2, 2)) - data2 = np.ma.ones((2, 3, 2, 2)) - data3 = np.ma.ones((4, 3, 2, 2)) - mask3 = np.full((4, 3, 2, 2), False) - mask3[0, 0, 0, 0] = True - data3 = np.ma.array(data3, mask=mask3) - - time = iris.coords.DimCoord([15, 45], - standard_name='time', - bounds=[[1., 30.], [30., 60.]], - units=Unit( - 'days since 1950-01-01', - calendar='gregorian')) - time2 = iris.coords.DimCoord([1., 2., 3., 4.], - standard_name='time', - bounds=[ - [0.5, 1.5], - [1.5, 2.5], - [2.5, 3.5], - [3.5, 4.5], - ], - units=Unit( - 'days since 1950-01-01', - calendar='gregorian')) - - zcoord = iris.coords.DimCoord([0.5, 5., 50.], - long_name='zcoord', - bounds=[[0., 2.5], [2.5, 25.], - [25., 250.]], - units='m', - attributes={'positive': 'down'}) - lons2 = iris.coords.DimCoord([1.5, 2.5], - standard_name='longitude', - bounds=[[1., 2.], [2., 3.]], - units='degrees_east', - coord_system=coord_sys) - lats2 = iris.coords.DimCoord([1.5, 2.5], - standard_name='latitude', - bounds=[[1., 2.], [2., 3.]], - units='degrees_north', - coord_system=coord_sys) - - coords_spec3 = [(zcoord, 0), (lats2, 1), (lons2, 2)] - self.grid_3d = iris.cube.Cube(data1, dim_coords_and_dims=coords_spec3) - - coords_spec4 = [(time, 0), (zcoord, 1), (lats2, 2), (lons2, 3)] - self.grid_4d = iris.cube.Cube(data2, dim_coords_and_dims=coords_spec4) - - coords_spec5 = [(time2, 0), (zcoord, 1), (lats2, 2), (lons2, 3)] - self.grid_4d_2 = iris.cube.Cube( - data3, dim_coords_and_dims=coords_spec5) - - # allow iris to figure out the axis='z' coordinate - iris.util.guess_coord_axis(self.grid_3d.coord('zcoord')) - iris.util.guess_coord_axis(self.grid_4d.coord('zcoord')) - iris.util.guess_coord_axis(self.grid_4d_2.coord('zcoord')) - - def test_extract_volume(self): - """Test to extract the top two layers of a 3 layer depth column.""" - result = extract_volume(self.grid_3d, 0., 10.) - expected = np.ones((2, 2, 2)) - print(result.data, expected.data) - self.assertArrayEqual(result.data, expected) - - def test_average_volume(self): - """Test to take the volume weighted average of a (2,3,2,2) cube.""" - result = average_volume(self.grid_4d, 'latitude', 'longitude') - expected = np.array([1., 1.]) - self.assertArrayEqual(result.data, expected) - - def test_average_volume_long(self): - """ - Test to take the volume weighted average of a (4,3,2,2) cube. - - This extra time is needed, as the volume average calculation uses - different methods for small and large cubes. - """ - result = average_volume(self.grid_4d_2, 'latitude', 'longitude') - expected = np.array([1., 1., 1., 1.]) - self.assertArrayEqual(result.data, expected) - - def test_depth_integration_1d(self): - """Test to take the depth integration of a 3 layer cube.""" - result = depth_integration(self.grid_3d[:, 0, 0]) - expected = np.ones((1, 1)) * 250. - print(result.data, expected.data) - self.assertArrayEqual(result.data, expected) - - def test_depth_integration_3d(self): - """Test to take the depth integration of a 3 layer cube.""" - result = depth_integration(self.grid_3d) - expected = np.ones((2, 2)) * 250. - print(result.data, expected.data) - self.assertArrayEqual(result.data, expected) - - def test_extract_transect_latitude(self): - """Test to extract a transect from a (3, 2, 2) cube.""" - result = extract_transect(self.grid_3d, latitude=1.5) - expected = np.ones((3, 2)) - self.assertArrayEqual(result.data, expected) - - def test_extract_transect_longitude(self): - """Test to extract a transect from a (3, 2, 2) cube.""" - result = extract_transect(self.grid_3d, longitude=1.5) - expected = np.ones((3, 2)) - self.assertArrayEqual(result.data, expected) - - def test_extract_trajectory(self): - """Test to extract a trajectory from a (3, 2, 2) cube.""" - result = extract_trajectory(self.grid_3d, [1.5, 2.5], [2., 2.], 2) - expected = np.ones((3, 2)) - self.assertArrayEqual(result.data, expected) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/unit/preprocessor/test_runner.py b/tests/unit/preprocessor/test_runner.py deleted file mode 100644 index 8814c5fd4a..0000000000 --- a/tests/unit/preprocessor/test_runner.py +++ /dev/null @@ -1,16 +0,0 @@ -from esmvaltool.preprocessor import (DEFAULT_ORDER, MULTI_MODEL_FUNCTIONS, - _get_itype) - - -def test_first_argument_name(): - """Check that the input type of all preprocessor functions is valid.""" - valid_itypes = ('file', 'files', 'cube', 'cubes', 'products') - for step in DEFAULT_ORDER: - itype = _get_itype(step) - assert itype in valid_itypes, ( - "Invalid preprocessor function definition {}, first argument " - "should be one of {} but is {}".format(step, valid_itypes, itype)) - - -def test_multi_model_exist(): - assert MULTI_MODEL_FUNCTIONS.issubset(set(DEFAULT_ORDER)) diff --git a/tests/unit/test_lint.py b/tests/unit/test_lint.py index b07ae55f25..5951a8f32a 100644 --- a/tests/unit/test_lint.py +++ b/tests/unit/test_lint.py @@ -1,79 +1,34 @@ """Lint tests.""" import os import subprocess +import sys import textwrap +from pathlib import Path -import pycodestyle # formerly known as pep8 +import pytest +import esmvaltool from esmvaltool.utils.nclcodestyle import nclcodestyle -def test_pep8_conformance(): - """Test that we conform to PEP-8.""" - check_paths = [ - 'esmvaltool', - 'tests', - ] - exclude_paths = [ - 'esmvaltool/doc', 'esmvaltool/diag_scripts/cvdp/cvdp', - 'esmvaltool/cmor/tables' - ] - - print("PEP8 check of directories: {}\n".format(', '.join(check_paths))) - - # Get paths wrt package root - package_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) - for paths in (check_paths, exclude_paths): - for i, path in enumerate(paths): - paths[i] = os.path.join(package_root, path) - - style = pycodestyle.StyleGuide() - style.options.exclude.extend(exclude_paths) - - success = style.check_files(check_paths).total_errors == 0 - - if not success: - print( - textwrap.dedent(""" - Your Python code does not conform to the official Python style - guide (PEP8), see https://www.python.org/dev/peps/pep-0008 - - A list of warning and error messages can be found above, - prefixed with filename:line number:column number. - - Run `yapf -i yourfile.py` to automatically fix most errors. - Run `yapf -d yourfile.py` to preview what would be changed. - Run `pip install --upgrade yapf` to install the latest version - of yapf. - """)) - - assert success, "Your code does not conform to PEP8" - - def test_nclcodestyle(): """Test that NCL code is formatted according to our standards.""" + package_root = Path(esmvaltool.__file__).absolute().parent check_paths = [ - 'esmvaltool', - 'tests', + package_root, ] + print("Formatting check of NCL code in directories: {}\n".format(', '.join( + str(p) for p in check_paths))) + exclude_paths = [ - 'esmvaltool/diag_scripts/cvdp/cvdp', + package_root / 'diag_scripts' / 'cvdp' / 'cvdp', ] - print("Formatting check of NCL code in directories: {}\n".format( - ', '.join(check_paths))) - - # Get paths wrt package root - package_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) - for paths in (check_paths, exclude_paths): - for i, path in enumerate(paths): - paths[i] = os.path.join(package_root, path) - style = nclcodestyle.StyleGuide() - style.options.exclude.extend(exclude_paths) + style.options.exclude.extend(str(p) for p in exclude_paths) - success = style.check_files(check_paths).total_errors == 0 + success = style.check_files(str(p) for p in check_paths).total_errors == 0 if not success: print( @@ -89,6 +44,9 @@ def test_nclcodestyle(): assert success, "Your NCL code does not follow our formatting standards." +@pytest.mark.installation +@pytest.mark.skipif(sys.platform == 'darwin', + reason="ESMValTool R not supported on OSX") def test_r_lint(monkeypatch): """Test R lint.""" monkeypatch.setenv("LINTR_COMMENT_BOT", "FALSE") @@ -109,5 +67,5 @@ def test_r_lint(monkeypatch): """)) print(ex.output) - assert False,\ + assert False, \ 'Your R code does not follow our formatting standards.' diff --git a/tests/unit/test_naming.py b/tests/unit/test_naming.py index 41971a60a8..eb1255a9cc 100644 --- a/tests/unit/test_naming.py +++ b/tests/unit/test_naming.py @@ -3,10 +3,18 @@ import os import unittest +IGNORE = { + '.git', + '.github', + '.eggs', + 'ESMValTool.egg-info', + '__pycache__', + 'test-reports', +} + class TestNaming(unittest.TestCase): """Test naming of files and folders""" - def setUp(self): """Prepare tests""" folder = os.path.join(__file__, '..', '..', '..') @@ -25,6 +33,10 @@ def test_windows_reserved_names(self): } for dirpath, dirnames, filenames in os.walk(self.esmvaltool_folder): + # we need to modify in-place dirnames so that we don't walk + # over the contents of the dirs that need be ignored + dirnames[:] = [dirn for dirn in dirnames if dirn not in IGNORE] + print(dirnames) error_msg = 'Reserved windows name found at {}.' \ ' Please rename it ' \ '(Windows reserved names are: {})' \ @@ -33,8 +45,8 @@ def test_windows_reserved_names(self): self.assertTrue(reserved_names.isdisjoint(filenames), error_msg) without_extensions = (os.path.splitext(filename)[0] for filename in filenames) - self.assertTrue( - reserved_names.isdisjoint(without_extensions), error_msg) + self.assertTrue(reserved_names.isdisjoint(without_extensions), + error_msg) def test_avoid_casing_collisions(self): """ @@ -43,6 +55,10 @@ def test_avoid_casing_collisions(self): This includes folders differing from files """ for dirpath, dirnames, filenames in os.walk(self.esmvaltool_folder): + # we need to modify in-place dirnames so that we don't walk + # over the contents of the dirs that need be ignored + dirnames[:] = [dirn for dirn in dirnames if dirn not in IGNORE] + print(dirnames) self.assertEqual( len(filenames) + len(dirnames), len({name.lower() @@ -60,8 +76,10 @@ def test_no_namelist(self): exclude_paths = ['esmvaltool/diag_scripts/cvdp/cvdp'] for dirpath, dirnames, filenames in os.walk(self.esmvaltool_folder): - if '.git' in dirpath.split(os.sep): - continue + # we need to modify in-place dirnames so that we don't walk + # over the contents of the dirs that need be ignored + dirnames[:] = [dirn for dirn in dirnames if dirn not in IGNORE] + print(dirnames) if any([item in dirpath for item in exclude_paths]): continue self.assertFalse( diff --git a/tests/unit/test_recipes.py b/tests/unit/test_recipes.py new file mode 100644 index 0000000000..b1a1518928 --- /dev/null +++ b/tests/unit/test_recipes.py @@ -0,0 +1,67 @@ +"""Recipe tests.""" +import textwrap +from pathlib import Path + +import pytest +import yaml + +import esmvaltool +from tests.integration.test_recipes_loading import IDS, RECIPES + +ESMVALTOOL_ROOT = Path(esmvaltool.__file__).absolute().parent +REFERENCES_PATH = ESMVALTOOL_ROOT / 'references' + +CONFIG_REFERENCES_PATH = ESMVALTOOL_ROOT / 'config-references.yml' +AUTHORS = yaml.safe_load(CONFIG_REFERENCES_PATH.read_text())['authors'] + + +@pytest.mark.parametrize('recipe_file', RECIPES, ids=IDS) +def test_reference_tags(recipe_file): + """Check bibtex file is added to REFERENCES_PATH.""" + recipe = yaml.safe_load(recipe_file.read_text()) + tags = recipe.get('documentation', {}).get('references', []) + msg = textwrap.dedent(""" + The tag '{tag}' is mentioned in recipe '{recipe}'. + However, its reference file '{tag}.bibtex' is not available in {path}. + Please check instructions on how to add references at + https://docs.esmvaltool.org/en/latest/community/diagnostic.html#adding-references + """) + for tag in tags: + bibtex_file = REFERENCES_PATH / f'{tag}.bibtex' + assert bibtex_file.is_file(), msg.format(tag=tag, + recipe=recipe_file, + path=REFERENCES_PATH) + + +@pytest.mark.parametrize('recipe_file', RECIPES, ids=IDS) +def test_maintainers(recipe_file): + """Check recipe maintainers.""" + recipe = yaml.safe_load(recipe_file.read_text()) + + # Make sure that 'documentation' and 'maintainer' entries are present + msg = "'documentation' missing in recipe" + assert 'documentation' in recipe, msg + msg = "'maintainer' missing in 'documentation' of recipe" + assert 'maintainer' in recipe['documentation'], msg + maintainers = recipe['documentation']['maintainer'] + + # Make sure that maintainer entry is list and non-empty + msg = f"'maintainer' entry needs to be a list, got {type(maintainers)}" + assert isinstance(maintainers, list), msg + msg = "'maintainer' needs to contain at least one element" + assert maintainers, msg + + # Make sure that 'unmaintained' is single entry if used + if 'unmaintained' in maintainers: + msg = (f"If 'unmaintained' is given as maintainer, this has to be " + f"the sole entry, got {maintainers}") + assert len(maintainers) == 1, msg + + # Check that maintainers are valid + invalid_maintainers = [] + for maintainer in maintainers: + if maintainer not in AUTHORS: + invalid_maintainers.append(maintainer) + msg = (f"Got invalid maintainers: {invalid_maintainers}. Valid entries " + f"are authors from {CONFIG_REFERENCES_PATH}.") + assert not invalid_maintainers, msg diff --git a/tests/unit/utils/test_compare.py b/tests/unit/utils/test_compare.py new file mode 100644 index 0000000000..9251229bae --- /dev/null +++ b/tests/unit/utils/test_compare.py @@ -0,0 +1,16 @@ +"""Test the esmvaltool.utils.testing.regression.compare module.""" +import os + +from esmvaltool.utils.testing.regression.compare import ( + get_recipe_name_from_file +) + + +def test_get_recipe_name_from_file(tmp_path): + """Test the string extractor for recipe name.""" + path_to_file = tmp_path / "recipe_python_20220317_162441" / "run" + os.makedirs(path_to_file) + obtained = get_recipe_name_from_file(path_to_file) + print("Obtained: %s", obtained) + print("Expected: name_from_file0/recipe_python") + assert obtained == "name_from_file0/recipe_python"